diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 00000000000..f09b08660e7 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,36 @@ +917b41d6d73535c090fc312668dff353cdaef906 # Blacken docs/html/conf.py +ed383dd8afa8fe0250dcf9b8962927ada0e21c89 # Blacken docs/pip_sphinxext.py +228405e62451abe8a66233573035007df4be575f # Blacken noxfile.py +f477a9f490e978177b71c9dbaa5465c51ea21129 # Blacken setup.py +e59ba23468390217479465019f8d78e724a23550 # Blacken src/pip/__main__.py +d7013db084e9a52242354ee5754dc5d19ccf062e # Blacken src/pip/_internal/build_env.py +30e9ffacae75378fc3e3df48f754dabad037edb9 # Blacken src/pip/_internal/cache.py +8341d56b46776a805286218ac5fb0e7850fd9341 # Blacken src/pip/_internal/cli/autocompletion.py +3d3461ed65208656358b3595e25d8c31c5c89470 # Blacken src/pip/_internal/cli/base_command.py +d489b0f1b104bc936b0fb17e6c33633664ebdc0e # Blacken src/pip/_internal/cli/cmdoptions.py +591fe4841aefe9befa0530f2a54f820c4ecbb392 # Blacken src/pip/_internal/cli/command_context.py +9265b28ef7248ae1847a80384dbeeb8119c3e2f5 # Blacken src/pip/_internal/cli/main.py +847a369364878c38d210c90beed2737bb6fb3a85 # Blacken src/pip/_internal/cli/main_parser.py +ec97119067041ae58b963935ff5f0e5d9fead80c # Blacken src/pip/_internal/cli/parser.py +6e3b8de22fa39fa3073599ecf9db61367f4b3b32 # Blacken src/pip/_internal/cli/progress_bars.py +55405227de983c5bd5bf0858ea12dbe537d3e490 # Blacken src/pip/_internal/cli/req_command.py +d5ca5c850cae9a0c64882a8f49d3a318699a7e2e # Blacken src/pip/_internal/cli/spinners.py +9747cb48f8430a7a91b36fe697dd18dbddb319f0 # Blacken src/pip/_internal/commands/__init__.py +1c09fd6f124df08ca36bed68085ad68e89bb1957 # Blacken src/pip/_internal/commands/cache.py +315e93d7eb87cd476afcc4eaf0f01a7b56a5037f # Blacken src/pip/_internal/commands/check.py +8ae3b96ed7d24fd24024ccce4840da0dcf635f26 # Blacken src/pip/_internal/commands/completion.py +42ca4792202f26a293ee48380718743a80bbee37 # Blacken src/pip/_internal/commands/configuration.py +790ad78fcd43d41a5bef9dca34a3c128d05eb02c # Blacken src/pip/_internal/commands/debug.py +a6fcc8f045afe257ce321f4012fc8fcb4be01eb3 # Blacken src/pip/_internal/commands/download.py +920e735dfc60109351fbe2f4c483c2f6ede9e52d # Blacken src/pip/_internal/commands/freeze.py +053004e0fcf0851238b1064fbce13aea87b24e9c # Blacken src/pip/_internal/commands/hash.py +a6b6ae487e52c2242045b64cb8962e0a992cfd76 # Blacken src/pip/_internal/commands/help.py +2495cf95a6c7eb61ccf1f9f0e8b8d736af914e53 # Blacken __main__.py +c7ee560e00b85f7486b452c14ff49e4737996eda # Blacken tools/ +8e2e1964a4f0a060f7299a96a911c9e116b2283d # Blacken src/pip/_internal/commands/ +1bc0eef05679e87f45540ab0a294667cb3c6a88e # Blacken src/pip/_internal/network/ +069b01932a7d64a81c708c6254cc93e1f89e6783 # Blacken src/pip/_internal/req +1897784d59e0d5fcda2dd75fea54ddd8be3d502a # Blacken src/pip/_internal/index +94999255d5ede440c37137d210666fdf64302e75 # Reformat the codebase, with black +585037a80a1177f1fa92e159a7079855782e543e # Cleanup implicit string concatenation +8a6f6ac19b80a6dc35900a47016c851d9fcd2ee2 # Blacken src/pip/_internal/resolution directory diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml index e28e5408208..20e0a5dd991 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.yml +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -61,11 +61,13 @@ body: description: >- Provide the output of the steps above, including the commands themselves and pip's output/traceback etc. + (The output will auto-rendered as code, no need for backticks.) If you want to present output from multiple commands, please prefix the line containing the command with `$ `. Please also ensure that the "How to reproduce" section contains matching instructions for reproducing this. + render: shell - type: checkboxes attributes: diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000000..8ac6b8c4984 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3b35e93b21f..57273ff45bd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -11,6 +11,11 @@ on: schedule: - cron: 0 0 * * MON # Run every Monday at 00:00 UTC +env: + # The "FORCE_COLOR" variable, when set to 1, + # tells Nox to colorize itself. + FORCE_COLOR: "1" + concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} cancel-in-progress: true @@ -21,8 +26,8 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 with: python-version: "3.x" - run: pip install nox @@ -57,8 +62,8 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 with: python-version: "3.x" - name: Set up git credentials @@ -81,8 +86,8 @@ jobs: github.event_name != 'pull_request' steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 with: python-version: "3.x" @@ -91,7 +96,7 @@ jobs: - run: git diff --exit-code tests-unix: - name: tests / ${{ matrix.python }} / ${{ matrix.os }} + name: tests / ${{ matrix.python.key || matrix.python }} / ${{ matrix.os }} runs-on: ${{ matrix.os }}-latest needs: [packaging, determine-changes] @@ -104,21 +109,24 @@ jobs: matrix: os: [Ubuntu, MacOS] python: - - "3.7" - "3.8" - "3.9" - "3.10" - "3.11" + - "3.12" steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} + allow-prereleases: true - name: Install Ubuntu dependencies if: matrix.os == 'Ubuntu' - run: sudo apt-get install bzr + run: | + sudo apt-get update + sudo apt-get install bzr - name: Install MacOS dependencies if: matrix.os == 'MacOS' @@ -129,12 +137,12 @@ jobs: # Main check - name: Run unit tests run: >- - nox -s test-${{ matrix.python }} -- + nox -s test-${{ matrix.python.key || matrix.python }} -- -m unit --verbose --numprocesses auto --showlocals - name: Run integration tests run: >- - nox -s test-${{ matrix.python }} -- + nox -s test-${{ matrix.python.key || matrix.python }} -- -m integration --verbose --numprocesses auto --showlocals --durations=5 @@ -153,38 +161,28 @@ jobs: matrix: os: [Windows] python: - - "3.7" - # Commented out, since Windows tests are expensively slow. - # - "3.8" + - "3.8" + # Commented out, since Windows tests are expensively slow, + # only test the oldest and newest Python supported by pip # - "3.9" # - "3.10" - - "3.11" + # - "3.11" + - "3.12" group: [1, 2] steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} - # We use a RAMDisk on Windows, since filesystem IO is a big slowdown - # for our tests. - - name: Create a RAMDisk - run: ./tools/ci/New-RAMDisk.ps1 -Drive R -Size 1GB - - - name: Setup RAMDisk permissions - run: | - mkdir R:\Temp - $acl = Get-Acl "R:\Temp" - $rule = New-Object System.Security.AccessControl.FileSystemAccessRule( - "Everyone", "FullControl", "ContainerInherit,ObjectInherit", "None", "Allow" - ) - $acl.AddAccessRule($rule) - Set-Acl "R:\Temp" $acl - + # We use C:\Temp (which is already available on the worker) + # as a temporary directory for all of the tests because the + # default value (under the user dir) is more deeply nested + # and causes tests to fail with "path too long" errors. - run: pip install nox env: - TEMP: "R:\\Temp" + TEMP: "C:\\Temp" # Main check - name: Run unit tests @@ -194,7 +192,7 @@ jobs: -m unit --verbose --numprocesses auto --showlocals env: - TEMP: "R:\\Temp" + TEMP: "C:\\Temp" - name: Run integration tests (group 1) if: matrix.group == 1 @@ -203,7 +201,7 @@ jobs: -m integration -k "not test_install" --verbose --numprocesses auto --showlocals env: - TEMP: "R:\\Temp" + TEMP: "C:\\Temp" - name: Run integration tests (group 2) if: matrix.group == 2 @@ -212,7 +210,7 @@ jobs: -m integration -k "test_install" --verbose --numprocesses auto --showlocals env: - TEMP: "R:\\Temp" + TEMP: "C:\\Temp" tests-zipapp: name: tests / zipapp @@ -224,15 +222,17 @@ jobs: github.event_name != 'pull_request' steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install Ubuntu dependencies - run: sudo apt-get install bzr + run: | + sudo apt-get update + sudo apt-get install bzr - - run: pip install nox 'virtualenv<20' 'setuptools != 60.6.0' + - run: pip install nox # Main check - name: Run integration tests diff --git a/.github/workflows/lock-threads.yml b/.github/workflows/lock-threads.yml index 990440dd6c8..dc68b683bef 100644 --- a/.github/workflows/lock-threads.yml +++ b/.github/workflows/lock-threads.yml @@ -17,7 +17,7 @@ jobs: if: github.repository_owner == 'pypa' runs-on: ubuntu-latest steps: - - uses: dessant/lock-threads@v3 + - uses: dessant/lock-threads@v4 with: issue-inactive-days: '30' pr-inactive-days: '15' diff --git a/.github/workflows/news-file.yml b/.github/workflows/news-file.yml index 371e12fd755..398ad1b7e67 100644 --- a/.github/workflows/news-file.yml +++ b/.github/workflows/news-file.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: # `towncrier check` runs `git diff --name-only origin/main...`, which # needs a non-shallow clone. diff --git a/.github/workflows/no-response.yml b/.github/workflows/no-response.yml deleted file mode 100644 index 939290b93e5..00000000000 --- a/.github/workflows/no-response.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: No Response - -# Both `issue_comment` and `scheduled` event types are required for this Action -# to work properly. -on: - issue_comment: - types: [created] - schedule: - # Schedule for five minutes after the hour, every hour - - cron: '5 * * * *' - -jobs: - noResponse: - runs-on: ubuntu-latest - steps: - - uses: lee-dohm/no-response@v0.5.0 - with: - token: ${{ github.token }} - responseRequiredLabel: "S: awaiting response" diff --git a/.github/workflows/update-rtd-redirects.yml b/.github/workflows/update-rtd-redirects.yml index 8259b6c0b6a..0beb2b84b97 100644 --- a/.github/workflows/update-rtd-redirects.yml +++ b/.github/workflows/update-rtd-redirects.yml @@ -18,8 +18,8 @@ jobs: runs-on: ubuntu-latest environment: RTD Deploys steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 with: python-version: "3.11" - run: pip install httpx pyyaml rich diff --git a/.mailmap b/.mailmap index d0c64300fd2..875dba24ed3 100644 --- a/.mailmap +++ b/.mailmap @@ -27,6 +27,7 @@ Hugo van Kemenade hugovk Ilya Baryshev Jakub Stasiak +Jean Abou Samra John-Scott Atlakson Jorge Niedbalski diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2fc455b9d64..922e1440d45 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ exclude: 'src/pip/_vendor/' repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.6.0 hooks: - id: check-builtin-literals - id: check-added-large-files @@ -16,43 +16,32 @@ repos: - id: trailing-whitespace exclude: .patch -- repo: https://github.com/psf/black - rev: 23.1.0 +- repo: https://github.com/psf/black-pre-commit-mirror + rev: 24.4.0 hooks: - id: black -- repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.4.1 hooks: - - id: flake8 - additional_dependencies: [ - 'flake8-bugbear', - 'flake8-logging-format', - 'flake8-implicit-str-concat', - ] - exclude: tests/data - -- repo: https://github.com/PyCQA/isort - rev: 5.12.0 - hooks: - - id: isort - files: \.py$ + - id: ruff + args: [--fix, --exit-non-zero-on-fix] - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.961 + rev: v1.9.0 hooks: - id: mypy exclude: tests/data args: ["--pretty", "--show-error-codes"] additional_dependencies: [ - 'keyring==23.0.1', - 'nox==2021.6.12', + 'keyring==24.2.0', + 'nox==2023.4.22', 'pytest', - 'types-docutils==0.18.3', - 'types-setuptools==57.4.14', - 'types-freezegun==1.1.9', - 'types-six==1.16.15', - 'types-pyyaml==6.0.12.2', + 'types-docutils==0.20.0.3', + 'types-setuptools==68.2.0.0', + 'types-freezegun==1.1.10', + 'types-six==1.16.21.9', + 'types-pyyaml==6.0.12.12', ] - repo: https://github.com/pre-commit/pygrep-hooks @@ -65,6 +54,13 @@ repos: types: [file] exclude: NEWS.rst # The errors flagged in NEWS.rst are old. +- repo: https://github.com/codespell-project/codespell + rev: v2.2.6 + hooks: + - id: codespell + exclude: AUTHORS.txt|tests/data + args: ["--ignore-words", tools/codespell-ignore.txt] + - repo: local hooks: - id: news-fragment-filenames diff --git a/.readthedocs.yml b/.readthedocs.yml index b6453d8f0b3..c0d2bba55e9 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -6,7 +6,7 @@ build: python: "3.11" sphinx: - builder: htmldir + builder: dirhtml configuration: docs/html/conf.py python: diff --git a/AUTHORS.txt b/AUTHORS.txt index e9d3c38916f..6b54ae3b17e 100644 --- a/AUTHORS.txt +++ b/AUTHORS.txt @@ -20,6 +20,7 @@ Albert-Guan albertg Alberto Sottile Aleks Bunin +Ales Erjavec Alethea Flowers Alex Gaynor Alex Grönholm @@ -30,6 +31,7 @@ Alex Stachowiak Alexander Shtyrov Alexandre Conrad Alexey Popravka +Aleš Erjavec Alli Ami Fischman Ananya Maiti @@ -71,6 +73,7 @@ atse Atsushi Odagiri Avinash Karhana Avner Cohen +Awit (Ah-Wit) Ghirmai Baptiste Mispelon Barney Gale barneygale @@ -126,6 +129,7 @@ Chih-Hsuan Yen Chris Brinker Chris Hunt Chris Jerdonek +Chris Kuehl Chris McDonough Chris Pawley Chris Pryer @@ -156,6 +160,7 @@ Cristina Muñoz Curtis Doty cytolentino Daan De Meyer +Dale Damian Damian Quiroga Damian Shaw @@ -194,9 +199,11 @@ David Runge David Tucker David Wales Davidovich +ddelange Deepak Sharma Deepyaman Datta Denise Yu +dependabot[bot] derwolfe Desetude Devesh Kumar Singh @@ -220,7 +227,10 @@ Dustin Ingram Dwayne Bailey Ed Morley Edgar Ramírez +Edgar Ramírez Mondragón Ee Durbin +Efflam Lemaillet +efflamlemaillet Eitan Adler ekristina elainechan @@ -250,6 +260,7 @@ Filip Kokosiński Filipe Laíns Finn Womack finnagin +Flavio Amurrio Florian Briand Florian Rathgeber Francesco @@ -310,7 +321,9 @@ Ilya Baryshev Inada Naoki Ionel Cristian Mărieș Ionel Maries Cristian +Itamar Turner-Trauring Ivan Pozdeev +J. Nick Koston Jacob Kim Jacob Walls Jaime Sanz @@ -330,10 +343,14 @@ Jarek Potiuk jarondl Jason Curtis Jason R. Coombs +JasonMo +JasonMo1 Jay Graves +Jean Abou Samra Jean-Christophe Fillion-Robin Jeff Barber Jeff Dairiki +Jeff Widman Jelmer Vernooij jenix21 Jeremy Stanley @@ -344,6 +361,7 @@ Jim Fisher Jim Garrison Jiun Bae Jivan Amara +Joe Bylund Joe Michelini John Paton John T. Wodder II @@ -362,6 +380,7 @@ Joseph Long Josh Bronson Josh Hansen Josh Schneier +Joshua Juan Luis Cano Rodríguez Juanjo Bazán Judah Rand @@ -392,6 +411,7 @@ KOLANICH kpinc Krishna Oza Kumar McMillan +Kurt McKee Kyle Persohn lakshmanaram Laszlo Kiss-Kollar @@ -408,6 +428,7 @@ lorddavidiii Loren Carvalho Lucas Cimon Ludovic Gasc +Lukas Geiger Lukas Juhrich Luke Macken Luo Jiebin @@ -424,7 +445,7 @@ Mark Williams Markus Hametner Martey Dodoo Martin Fischer -Martin Häcker +Martin Häcker Martin Pavlasek Masaki Masklinn @@ -441,6 +462,7 @@ Matthew Einhorn Matthew Feickert Matthew Gilliard Matthew Iversen +Matthew Treinish Matthew Trumbell Matthew Willson Matthias Bussonnier @@ -473,7 +495,7 @@ Miro Hrončok Monica Baluna montefra Monty Taylor -Muha Ajjan‮ +Muha Ajjan Nadav Wexler Nahuel Ambrosini Nate Coraor @@ -523,6 +545,7 @@ Patrick Jenkins Patrick Lawson patricktokeeffe Patrik Kopkan +Paul Ganssle Paul Kehrer Paul Moore Paul Nasrat @@ -563,6 +586,7 @@ Przemek Wrzos Pulkit Goyal q0w Qiangning Hong +Qiming Xu Quentin Lee Quentin Pradet R. David Murray @@ -582,6 +606,7 @@ Rishi RobberPhex Robert Collins Robert McGibbon +Robert Pollak Robert T. McGibbon robin elisha robinson Roey Berman @@ -602,6 +627,7 @@ ryneeverett Sachi King Salvatore Rinchiera sandeepkiran-js +Sander Van Balen Savio Jomton schlamar Scott Kitterman @@ -614,6 +640,8 @@ SeongSoo Cho Sergey Vasilyev Seth Michael Larson Seth Woodworth +Shahar Epstein +Shantanu shireenrao Shivansh-007 Shlomi Fish @@ -638,7 +666,9 @@ Steve Barnes Steve Dower Steve Kowalik Steven Myint +Steven Silvester stonebig +studioj Stéphane Bidoul Stéphane Bidoul (ACSONE) Stéphane Klein @@ -695,6 +725,7 @@ Vincent Philippon Vinicyus Macedo Vipul Kumar Vitaly Babiy +Vladimir Fokow Vladimir Rutsky W. Trevor King Wil Tan @@ -707,6 +738,7 @@ Wilson Mo wim glenn Winson Luk Wolfgang Maier +Wu Zhenyu XAMES3 Xavier Fernandez xoviat @@ -725,4 +757,3 @@ Zvezdan Petkovic Łukasz Langa Роман Донченко Семён Марьясин -‮rekcäH nitraM‮ diff --git a/MANIFEST.in b/MANIFEST.in index 4716f415730..f896c0258e6 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -14,6 +14,7 @@ recursive-include src/pip/_vendor *COPYING* include docs/docutils.conf include docs/requirements.txt +exclude .git-blame-ignore-revs exclude .coveragerc exclude .mailmap exclude .appveyor.yml diff --git a/NEWS.rst b/NEWS.rst index b0ae642634d..ce6d8e6dd3d 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -9,6 +9,187 @@ .. towncrier release notes start +24.0 (2024-02-03) +================= + +Features +-------- + +- Retry on HTTP status code 502 (`#11843 `_) +- Automatically use the setuptools PEP 517 build backend when ``--config-settings`` is + used for projects without ``pyproject.toml``. (`#11915 `_) +- Make pip freeze and pip uninstall of legacy editable installs of packages whose name + contains ``_`` compatible with ``setuptools>=69.0.3``. (`#12477 `_) +- Support per requirement ``--config-settings`` for editable installs. (`#12480 `_) + +Bug Fixes +--------- + +- Optimized usage of ``--find-links=``, by only scanning the relevant directory once, only considering file names that are valid wheel or sdist names, and only considering files in the directory that are related to the install. (`#12327 `_) +- Removed ``wheel`` from the ``[build-system].requires`` list fallback + that is used when ``pyproject.toml`` is absent. (`#12449 `_) + +Vendored Libraries +------------------ + +- Upgrade distlib to 0.3.8 + +Improved Documentation +---------------------- + +- Fix explanation of how PIP_CONFIG_FILE works (`#11815 `_) +- Fix outdated pip install argument description in documentation. (`#12417 `_) +- Replace some links to PEPs with links to the canonical specifications on the :doc:`pypug:index` (`#12434 `_) +- Updated the ``pyproject.toml`` document to stop suggesting + to depend on ``wheel`` as a build dependency directly. (`#12449 `_) +- Update supported interpreters in development docs (`#12475 `_) + +Process +------- + +- Most project metadata is now defined statically via pip's ``pyproject.toml`` file. + +23.3.2 (2023-12-17) +=================== + +Bug Fixes +--------- + +- Fix a bug in extras handling for link requirements (`#12372 `_) +- Fix mercurial revision "parse error": use ``--rev={ref}`` instead of ``-r={ref}`` (`#12373 `_) + + +23.3.1 (2023-10-21) +=================== + +Bug Fixes +--------- + +- Handle a timezone indicator of Z when parsing dates in the self check. (`#12338 `_) +- Fix bug where installing the same package at the same time with multiple pip processes could fail. (`#12361 `_) + + +23.3 (2023-10-15) +================= + +Process +------- + +- Added reference to `vulnerability reporting guidelines `_ to pip's security policy. + +Deprecations and Removals +------------------------- + +- Drop a fallback to using SecureTransport on macOS. It was useful when pip detected OpenSSL older than 1.0.1, but the current pip does not support any Python version supporting such old OpenSSL versions. (`#12175 `_) + +Features +-------- + +- Improve extras resolution for multiple constraints on same base package. (`#11924 `_) +- Improve use of datastructures to make candidate selection 1.6x faster. (`#12204 `_) +- Allow ``pip install --dry-run`` to use platform and ABI overriding options. (`#12215 `_) +- Add ``is_yanked`` boolean entry to the installation report (``--report``) to indicate whether the requirement was yanked from the index, but was still selected by pip conform to :pep:`592`. (`#12224 `_) + +Bug Fixes +--------- + +- Ignore errors in temporary directory cleanup (show a warning instead). (`#11394 `_) +- Normalize extras according to :pep:`685` from package metadata in the resolver + for comparison. This ensures extras are correctly compared and merged as long + as the package providing the extra(s) is built with values normalized according + to the standard. Note, however, that this *does not* solve cases where the + package itself contains unnormalized extra values in the metadata. (`#11649 `_) +- Prevent downloading sdists twice when :pep:`658` metadata is present. (`#11847 `_) +- Include all requested extras in the install report (``--report``). (`#11924 `_) +- Removed uses of ``datetime.datetime.utcnow`` from non-vendored code. (`#12005 `_) +- Consistently report whether a dependency comes from an extra. (`#12095 `_) +- Fix completion script for zsh (`#12166 `_) +- Fix improper handling of the new onexc argument of ``shutil.rmtree()`` in Python 3.12. (`#12187 `_) +- Filter out yanked links from the available versions error message: "(from versions: 1.0, 2.0, 3.0)" will not contain yanked versions conform PEP 592. The yanked versions (if any) will be mentioned in a separate error message. (`#12225 `_) +- Fix crash when the git version number contains something else than digits and dots. (`#12280 `_) +- Use ``-r=...`` instead of ``-r ...`` to specify references with Mercurial. (`#12306 `_) +- Redact password from URLs in some additional places. (`#12350 `_) +- pip uses less memory when caching large packages. As a result, there is a new on-disk cache format stored in a new directory ($PIP_CACHE_DIR/http-v2). (`#2984 `_) + +Vendored Libraries +------------------ + +- Upgrade certifi to 2023.7.22 +- Add truststore 0.8.0 +- Upgrade urllib3 to 1.26.17 + +Improved Documentation +---------------------- + +- Document that ``pip search`` support has been removed from PyPI (`#12059 `_) +- Clarify --prefer-binary in CLI and docs (`#12122 `_) +- Document that using OS-provided Python can cause pip's test suite to report false failures. (`#12334 `_) + + +23.2.1 (2023-07-22) +=================== + +Bug Fixes +--------- + +- Disable :pep:`658` metadata fetching with the legacy resolver. (`#12156 `_) + + +23.2 (2023-07-15) +================= + +Process +------- + +- Deprecate support for eggs for Python 3.11 or later, when the new ``importlib.metadata`` backend is used to load distribution metadata. This only affects the egg *distribution format* (with the ``.egg`` extension); distributions using the ``.egg-info`` *metadata format* (but are not actually eggs) are not affected. For more information about eggs, see `relevant section in the setuptools documentation `__. + +Deprecations and Removals +------------------------- + +- Deprecate legacy version and version specifiers that don't conform to the + :ref:`specification `. + (`#12063 `_) +- ``freeze`` no longer excludes the ``setuptools``, ``distribute``, and ``wheel`` + from the output when running on Python 3.12 or later, where they are not + included in a virtual environment by default. Use ``--exclude`` if you wish to + exclude any of these packages. (`#4256 `_) + +Features +-------- + +- make rejection messages slightly different between 1 and 8, so the user can make the difference. (`#12040 `_) + +Bug Fixes +--------- + +- Fix ``pip completion --zsh``. (`#11417 `_) +- Prevent downloading files twice when :pep:`658` metadata is present (`#11847 `_) +- Add permission check before configuration (`#11920 `_) +- Fix deprecation warnings in Python 3.12 for usage of shutil.rmtree (`#11957 `_) +- Ignore invalid or unreadable ``origin.json`` files in the cache of locally built wheels. (`#11985 `_) +- Fix installation of packages with :pep:`658` metadata using non-canonicalized names (`#12038 `_) +- Correctly parse ``dist-info-metadata`` values from JSON-format index data. (`#12042 `_) +- Fail with an error if the ``--python`` option is specified after the subcommand name. (`#12067 `_) +- Fix slowness when using ``importlib.metadata`` (the default way for pip to read metadata in Python 3.11+) and there is a large overlap between already installed and to-be-installed packages. (`#12079 `_) +- Pass the ``-r`` flag to mercurial to be explicit that a revision is passed and protect + against ``hg`` options injection as part of VCS URLs. Users that do not have control on + VCS URLs passed to pip are advised to upgrade. (`#12119 `_) + +Vendored Libraries +------------------ + +- Upgrade certifi to 2023.5.7 +- Upgrade platformdirs to 3.8.1 +- Upgrade pygments to 2.15.1 +- Upgrade pyparsing to 3.1.0 +- Upgrade Requests to 2.31.0 +- Upgrade rich to 13.4.2 +- Upgrade setuptools to 68.0.0 +- Updated typing_extensions to 4.6.0 +- Upgrade typing_extensions to 4.7.1 +- Upgrade urllib3 to 1.26.16 + + 23.1.2 (2023-04-26) =================== @@ -53,7 +234,7 @@ Deprecations and Removals ``--config-settings``. (`#11859 `_) - Using ``--config-settings`` with projects that don't have a ``pyproject.toml`` now prints a deprecation warning. In the future the presence of config settings will automatically - enable the default build backend for legacy projects and pass the setttings to it. (`#11915 `_) + enable the default build backend for legacy projects and pass the settings to it. (`#11915 `_) - Remove ``setup.py install`` fallback when building a wheel failed for projects without ``pyproject.toml``. (`#8368 `_) - When the ``wheel`` package is not installed, pip now uses the default build backend @@ -123,7 +304,7 @@ Improved Documentation - Cross-reference the ``--python`` flag from the ``--prefix`` flag, and mention limitations of ``--prefix`` regarding script installation. (`#11775 `_) -- Add SECURITY.md to make the policy offical. (`#11809 `_) +- Add SECURITY.md to make the policy official. (`#11809 `_) - Add username to Git over SSH example. (`#11838 `_) - Quote extras in the pip install docs to guard shells with default glob qualifiers, like zsh. (`#11842 `_) @@ -157,7 +338,7 @@ Features - Change the hashes in the installation report to be a mapping. Emit the ``archive_info.hashes`` dictionary in ``direct_url.json``. (`#11312 `_) -- Implement logic to read the ``EXTERNALLY-MANAGED`` file as specified in PEP 668. +- Implement logic to read the ``EXTERNALLY-MANAGED`` file as specified in :pep:`668`. This allows a downstream Python distributor to prevent users from using pip to modify the externally managed environment. (`#11381 `_) - Enable the use of ``keyring`` found on ``PATH``. This allows ``keyring`` @@ -173,7 +354,7 @@ Bug Fixes - Use the "venv" scheme if available to obtain prefixed lib paths. (`#11598 `_) - Deprecated a historical ambiguity in how ``egg`` fragments in URL-style requirements are formatted and handled. ``egg`` fragments that do not look - like PEP 508 names now produce a deprecation warning. (`#11617 `_) + like :pep:`508` names now produce a deprecation warning. (`#11617 `_) - Fix scripts path in isolated build environment on Debian. (`#11623 `_) - Make ``pip show`` show the editable location if package is editable (`#11638 `_) - Stop checking that ``wheel`` is present when ``build-system.requires`` diff --git a/README.rst b/README.rst index 7e08f857c4c..479ddfd7ba1 100644 --- a/README.rst +++ b/README.rst @@ -1,11 +1,19 @@ pip - The Python Package Installer ================================== -.. image:: https://img.shields.io/pypi/v/pip.svg +.. |pypi-version| image:: https://img.shields.io/pypi/v/pip.svg :target: https://pypi.org/project/pip/ + :alt: PyPI -.. image:: https://readthedocs.org/projects/pip/badge/?version=latest +.. |python-versions| image:: https://img.shields.io/pypi/pyversions/pip + :target: https://pypi.org/project/pip + :alt: PyPI - Python Version + +.. |docs-badge| image:: https://readthedocs.org/projects/pip/badge/?version=latest :target: https://pip.pypa.io/en/latest + :alt: Documentation + +|pypi-version| |python-versions| |docs-badge| pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes. @@ -19,10 +27,6 @@ We release updates regularly, with a new version every 3 months. Find more detai * `Release notes`_ * `Release process`_ -In pip 20.3, we've `made a big improvement to the heart of pip`_; `learn more`_. We want your input, so `sign up for our user experience research studies`_ to help us do it right. - -**Note**: pip 21.0, in January 2021, removed Python 2 support, per pip's `Python 2 support policy`_. Please migrate to Python 3. - If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms: * `Issue tracking`_ @@ -49,10 +53,6 @@ rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. .. _Release process: https://pip.pypa.io/en/latest/development/release-process/ .. _GitHub page: https://github.com/pypa/pip .. _Development documentation: https://pip.pypa.io/en/latest/development -.. _made a big improvement to the heart of pip: https://pyfound.blogspot.com/2020/11/pip-20-3-new-resolver.html -.. _learn more: https://pip.pypa.io/en/latest/user_guide/#changes-to-the-pip-dependency-resolver-in-20-3-2020 -.. _sign up for our user experience research studies: https://pyfound.blogspot.com/2020/03/new-pip-resolver-to-roll-out-this-year.html -.. _Python 2 support policy: https://pip.pypa.io/en/latest/development/release-process/#python-2-support .. _Issue tracking: https://github.com/pypa/pip/issues .. _Discourse channel: https://discuss.python.org/c/packaging .. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa diff --git a/SECURITY.md b/SECURITY.md index 4e423805aee..e75a1c0de68 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,3 +1,10 @@ -# Security and Vulnerability Reporting +# Security Policy -If you find any security issues, please report to [security@python.org](mailto:security@python.org) +## Reporting a Vulnerability + +Please read the guidelines on reporting security issues [on the +official website](https://www.python.org/dev/security/) for +instructions on how to report a security-related problem to +the Python Security Response Team responsibly. + +To reach the response team, email `security at python dot org`. diff --git a/docs/html/cli/pip_install.rst b/docs/html/cli/pip_install.rst index 951dc2705a3..d893fb7c8ef 100644 --- a/docs/html/cli/pip_install.rst +++ b/docs/html/cli/pip_install.rst @@ -45,11 +45,11 @@ When looking at the items to be installed, pip checks what type of item each is, in the following order: 1. Project or archive URL. -2. Local directory (which must contain a ``setup.py``, or pip will report - an error). +2. Local directory (which must contain a ``pyproject.toml`` or ``setup.py``, + otherwise pip will report an error). 3. Local file (a sdist or wheel format archive, following the naming conventions for those formats). -4. A requirement, as specified in :pep:`440`. +4. A :ref:`version specifier `. Each item identified is added to the set of requirements to be satisfied by the install. @@ -97,7 +97,8 @@ Installation Order .. note:: This section is only about installation order of runtime dependencies, and - does not apply to build dependencies (those are specified using PEP 518). + does not apply to build dependencies (those are specified using the + :ref:`[build-system] table `). As of v6.1.0, pip installs dependencies before their dependents, i.e. in "topological order." This is the only commitment pip currently makes related @@ -181,8 +182,9 @@ Pre-release Versions -------------------- Starting with v1.4, pip will only install stable versions as specified by -`pre-releases`_ by default. If a version cannot be parsed as a compliant :pep:`440` -version then it is assumed to be a pre-release. +`pre-releases`_ by default. If a version cannot be parsed as a +:ref:`compliant ` version then it is assumed to be +a pre-release. If a Requirement specifier includes a pre-release or development version (e.g. ``>=0.0.dev0``) then pip will allow pre-release and development versions @@ -210,12 +212,13 @@ and `there `_. pip offers a number of package index options for modifying how packages are found. -pip looks for packages in a number of places: on PyPI (if not disabled via -``--no-index``), in the local filesystem, and in any additional repositories -specified via ``--find-links`` or ``--index-url``. There is no ordering in -the locations that are searched. Rather they are all checked, and the "best" -match for the requirements (in terms of version number - see :pep:`440` for -details) is selected. +pip looks for packages in a number of places: on PyPI (or the index given as +``--index-url``, if not disabled via ``--no-index``), in the local filesystem, +and in any additional repositories specified via ``--find-links`` or +``--extra-index-url``. There is no priority in the locations that are searched. +Rather they are all checked, and the "best" match for the requirements (in +terms of version number - see the +:ref:`specification ` for details) is selected. See the :ref:`pip install Examples`. @@ -380,7 +383,8 @@ Examples py -m pip install -e "git+https://git.repo/some_pkg.git@feature#egg=SomePackage" # from 'feature' branch py -m pip install -e "git+https://git.repo/some_repo.git#egg=subdir&subdirectory=subdir_path" # install a python package from a repo subdirectory -#. Install a package with `extras`_. +#. Install a package with extras, i.e., optional dependencies + (:ref:`specification `). .. tab:: Unix/macOS @@ -418,7 +422,8 @@ Examples py -m pip install "./downloads/SomePackage-1.0.4.tar.gz" py -m pip install "http://my.package.repo/SomePackage-1.0.4.zip" -#. Install a particular source archive file following :pep:`440` direct references. +#. Install a particular source archive file following direct references + (:ref:`specification `). .. tab:: Unix/macOS @@ -539,5 +544,4 @@ Examples py -m pip install SomePackage1 SomePackage2 --no-binary SomePackage1 -.. _extras: https://www.python.org/dev/peps/pep-0508/#extras .. _PyPI: https://pypi.org/ diff --git a/docs/html/cli/pip_search.rst b/docs/html/cli/pip_search.rst index 9905a1bafac..93ddab3fa78 100644 --- a/docs/html/cli/pip_search.rst +++ b/docs/html/cli/pip_search.rst @@ -21,6 +21,12 @@ Usage Description =========== +.. attention:: + PyPI no longer supports ``pip search`` (or XML-RPC search). Please use https://pypi.org/search (via a browser) + instead. See https://warehouse.pypa.io/api-reference/xml-rpc.html#deprecated-methods for more information. + + However, XML-RPC search (and this command) may still be supported by indexes other than PyPI. + .. pip-command-description:: search diff --git a/docs/html/cli/pip_wheel.rst b/docs/html/cli/pip_wheel.rst index bfd19a0ccb1..ba749529c0c 100644 --- a/docs/html/cli/pip_wheel.rst +++ b/docs/html/cli/pip_wheel.rst @@ -34,7 +34,8 @@ Differences to ``build`` ------------------------ `build `_ is a simple tool which can among other things build -wheels for projects using PEP 517. It is comparable to the execution of ``pip wheel --no-deps .``. +wheels for projects using the standard ``pyproject.toml``-based build interface. It +is comparable to the execution of ``pip wheel --no-deps .``. It can also build source distributions which is not possible with ``pip``. ``pip wheel`` covers the wheel scope of ``build`` but offers many additional features. diff --git a/docs/html/development/architecture/anatomy.rst b/docs/html/development/architecture/anatomy.rst index 98708f2afeb..d5e205654ff 100644 --- a/docs/html/development/architecture/anatomy.rst +++ b/docs/html/development/architecture/anatomy.rst @@ -14,7 +14,7 @@ Repository anatomy & directory structure Root and tools ============== -The ``README``, license, ``pyproject.toml``, ``setup.py``, and so on are in the top level. +The ``README``, license, ``pyproject.toml``, and so on are in the top level. * ``AUTHORS.txt`` * ``LICENSE.txt`` @@ -22,8 +22,6 @@ The ``README``, license, ``pyproject.toml``, ``setup.py``, and so on are in the * ``NEWS.rst`` * ``pyproject.toml`` * ``README.rst`` -* ``setup.cfg`` -* ``setup.py`` * ``noxfile.py`` -- ``pip`` uses Nox, an automation tool, configured by this file. ``noxfile.py`` describes a few environments ``pip`` uses during development for simplifying how tests are run (complicated situation there). Example: ``nox -s lint``, ``nox -s test-3.10``. We can run tests for different versions of Python by changing “3.10” to “3.7” or similar. * ``.gitattributes`` * ``.gitignore`` diff --git a/docs/html/development/architecture/package-finding.rst b/docs/html/development/architecture/package-finding.rst index 0b64d420d93..4885d925ee3 100644 --- a/docs/html/development/architecture/package-finding.rst +++ b/docs/html/development/architecture/package-finding.rst @@ -182,8 +182,9 @@ example, whether a pre-release is eligible for selection or whether a file whose hash doesn't match is eligible depends on properties of the collection as a whole. -The ``CandidateEvaluator`` class uses information like the list of `PEP 425`_ -tags compatible with the target Python interpreter, hashes provided by the +The ``CandidateEvaluator`` class uses information like the list of +:ref:`platform tags ` +compatible with the target Python interpreter, hashes provided by the user, and other user preferences, etc. Specifically, the class has a ``get_applicable_candidates()`` method. @@ -236,5 +237,4 @@ The class is the return type of both the ``CandidateEvaluator`` class's ``find_best_candidate()`` method. -.. _`PEP 425`: https://www.python.org/dev/peps/pep-0425/ .. _`PEP 503`: https://www.python.org/dev/peps/pep-0503/ diff --git a/docs/html/development/ci.rst b/docs/html/development/ci.rst index ac65f816594..2e950232fca 100644 --- a/docs/html/development/ci.rst +++ b/docs/html/development/ci.rst @@ -18,10 +18,11 @@ Supported interpreters pip support a variety of Python interpreters: -- CPython 3.7 - CPython 3.8 - CPython 3.9 - CPython 3.10 +- CPython 3.11 +- CPython 3.12 - Latest PyPy3 on different operating systems: @@ -88,61 +89,75 @@ Actual testing +------------------------------+---------------+-----------------+ | **interpreter** | **unit** | **integration** | +-----------+----------+-------+---------------+-----------------+ -| | x86 | CP3.7 | | | -| | +-------+---------------+-----------------+ -| | | CP3.8 | | | +| | x86 | CP3.8 | | | | | +-------+---------------+-----------------+ | | | CP3.9 | | | | | +-------+---------------+-----------------+ | | | CP3.10| | | | | +-------+---------------+-----------------+ +| | | CP3.11| | | +| | +-------+---------------+-----------------+ +| | | CP3.12| | | +| | +-------+---------------+-----------------+ | | | PyPy3 | | | | Windows +----------+-------+---------------+-----------------+ -| | x64 | CP3.7 | GitHub | GitHub | -| | +-------+---------------+-----------------+ -| | | CP3.8 | | | +| | x64 | CP3.8 | GitHub | GitHub | | | +-------+---------------+-----------------+ | | | CP3.9 | | | | | +-------+---------------+-----------------+ -| | | CP3.10| GitHub | GitHub | +| | | CP3.10| | | +| | +-------+---------------+-----------------+ +| | | CP3.11| GitHub | GitHub | +| | +-------+---------------+-----------------+ +| | | CP3.12| | | | | +-------+---------------+-----------------+ | | | PyPy3 | | | +-----------+----------+-------+---------------+-----------------+ -| | x86 | CP3.7 | | | -| | +-------+---------------+-----------------+ -| | | CP3.8 | | | +| | x86 | CP3.8 | | | | | +-------+---------------+-----------------+ | | | CP3.9 | | | | | +-------+---------------+-----------------+ +| | | CP3.10| | | +| | +-------+---------------+-----------------+ +| | | CP3.11| | | +| | +-------+---------------+-----------------+ +| | | CP3.12| | | +| | +-------+---------------+-----------------+ | | | PyPy3 | | | | Linux +----------+-------+---------------+-----------------+ -| | x64 | CP3.7 | GitHub | GitHub | -| | +-------+---------------+-----------------+ -| | | CP3.8 | GitHub | GitHub | +| | x64 | CP3.8 | GitHub | GitHub | | | +-------+---------------+-----------------+ | | | CP3.9 | GitHub | GitHub | | | +-------+---------------+-----------------+ | | | CP3.10| GitHub | GitHub | | | +-------+---------------+-----------------+ +| | | CP3.11| GitHub | GitHub | +| | +-------+---------------+-----------------+ +| | | CP3.12| GitHub | GitHub | +| | +-------+---------------+-----------------+ | | | PyPy3 | | | +-----------+----------+-------+---------------+-----------------+ -| | arm64 | CP3.7 | | | -| | +-------+---------------+-----------------+ -| | | CP3.8 | | | +| | arm64 | CP3.8 | | | | | +-------+---------------+-----------------+ | | | CP3.9 | | | | | +-------+---------------+-----------------+ | | | CP3.10| | | | | +-------+---------------+-----------------+ +| | | CP3.11| | | +| | +-------+---------------+-----------------+ +| | | CP3.12| | | +| | +-------+---------------+-----------------+ | | | PyPy3 | | | | macOS +----------+-------+---------------+-----------------+ -| | x64 | CP3.7 | GitHub | GitHub | -| | +-------+---------------+-----------------+ -| | | CP3.8 | GitHub | GitHub | +| | x64 | CP3.8 | GitHub | GitHub | | | +-------+---------------+-----------------+ | | | CP3.9 | GitHub | GitHub | | | +-------+---------------+-----------------+ | | | CP3.10| GitHub | GitHub | | | +-------+---------------+-----------------+ +| | | CP3.11| GitHub | GitHub | +| | +-------+---------------+-----------------+ +| | | CP3.12| GitHub | GitHub | +| | +-------+---------------+-----------------+ | | | PyPy3 | | | +-----------+----------+-------+---------------+-----------------+ diff --git a/docs/html/development/contributing.rst b/docs/html/development/contributing.rst index 87734ee4d55..b2f6f1d1378 100644 --- a/docs/html/development/contributing.rst +++ b/docs/html/development/contributing.rst @@ -112,7 +112,7 @@ the ``news/`` directory with the extension of ``.trivial.rst``. If you are on a POSIX like operating system, one can be added by running ``touch news/$(uuidgen).trivial.rst``. On Windows, the same result can be achieved in Powershell using ``New-Item "news/$([guid]::NewGuid()).trivial.rst"``. -Core committers may also add a "trivial" label to the PR which will accomplish +Core committers may also add a "skip news" label to the PR which will accomplish the same thing. Upgrading, removing, or adding a new vendored library gets a special mention diff --git a/docs/html/development/getting-started.rst b/docs/html/development/getting-started.rst index e248259f08d..bc483997a64 100644 --- a/docs/html/development/getting-started.rst +++ b/docs/html/development/getting-started.rst @@ -73,7 +73,7 @@ pip's tests are written using the :pypi:`pytest` test framework and :mod:`unittest.mock`. :pypi:`nox` is used to automate the setup and execution of pip's tests. -It is preferable to run the tests in parallel for better experience during development, +It is preferable to run the tests in parallel for a better experience during development, since the tests can take a long time to finish when run sequentially. To run tests: @@ -104,6 +104,15 @@ can select tests using the various ways that pytest provides: $ # Using keywords $ nox -s test-3.10 -- -k "install and not wheel" +.. note:: + + When running pip's tests with OS distribution Python versions, be aware that some + functional tests may fail due to potential patches introduced by the distribution. + For all tests to pass consider: + + - Installing Python from `python.org`_ or compile from source + - Or, using `pyenv`_ to assist with source compilation + Running pip's entire test suite requires supported version control tools (subversion, bazaar, git, and mercurial) to be installed. If you are missing any of these VCS, those tests should be skipped automatically. You can also @@ -114,6 +123,9 @@ explicitly tell pytest to skip those tests: $ nox -s test-3.10 -- -k "not svn" $ nox -s test-3.10 -- -k "not (svn or git)" +.. _python.org: https://www.python.org/downloads/ +.. _pyenv: https://github.com/pyenv/pyenv + Running Linters =============== @@ -194,7 +206,6 @@ in order to start contributing. .. _`open an issue`: https://github.com/pypa/pip/issues/new?title=Trouble+with+pip+development+environment .. _`install Python`: https://realpython.com/installing-python/ -.. _`PEP 484 type-comments`: https://www.python.org/dev/peps/pep-0484/#suggested-syntax-for-python-2-7-and-straddling-code .. _`rich CLI`: https://docs.pytest.org/en/latest/usage.html#specifying-tests-selecting-tests .. _`GitHub`: https://github.com/pypa/pip .. _`good first issues`: https://github.com/pypa/pip/labels/good%20first%20issue diff --git a/docs/html/development/release-process.rst b/docs/html/development/release-process.rst index b71e2820bd2..21a6ca5622f 100644 --- a/docs/html/development/release-process.rst +++ b/docs/html/development/release-process.rst @@ -93,8 +93,13 @@ issues by pip's maintainers. Python Support Policy --------------------- -In general, a given Python version is supported until its usage on PyPI falls below 5%. -This is at the maintainers' discretion, in case extraordinary circumstances arise. +pip supports `CPython versions that are not end-of-life`_. Older versions of CPython may +be supported at the discretion of pip maintainers (based on criteria such as download +statistics on PyPI, Python versions supported by the vendored dependencies and +maintenance burden). + +pip maintainers accept pull requests to support other Python implementations, but the +pip CI does not test for compatibility with them. .. _`Feature Flags`: @@ -145,8 +150,8 @@ Creating a new release #. Push the tag created by ``prepare-release``. #. Regenerate the ``get-pip.py`` script in the `get-pip repository`_ (as documented there) and commit the results. -#. Submit a Pull Request to `CPython`_ adding the new version of pip (and upgrading - setuptools) to ``Lib/ensurepip/_bundled``, removing the existing version, and +#. Submit a Pull Request to `CPython`_ adding the new version of pip + to ``Lib/ensurepip/_bundled``, removing the existing version, and adjusting the versions listed in ``Lib/ensurepip/__init__.py``. @@ -196,3 +201,4 @@ for creating a new release can be used, simply changing the version number. .. _`get-pip repository`: https://github.com/pypa/get-pip .. _`psf-salt repository`: https://github.com/python/psf-salt .. _`CPython`: https://github.com/python/cpython +.. _`CPython versions that are not end-of-life`: https://devguide.python.org/versions/ diff --git a/docs/html/index.md b/docs/html/index.md index ab0b40dc180..dad5d94f2bf 100644 --- a/docs/html/index.md +++ b/docs/html/index.md @@ -23,7 +23,7 @@ cli/index :hidden: development/index -ux_research_design +ux-research-design/index news Code of Conduct GitHub diff --git a/docs/html/installation.md b/docs/html/installation.md index 036a91397a5..80a09f39dba 100644 --- a/docs/html/installation.md +++ b/docs/html/installation.md @@ -67,11 +67,34 @@ $ python pip.pyz --help If run directly: -```{pip-cli} -$ pip.pyz --help +````{tab} Linux +```console +$ chmod +x ./pip.pyz +$ ./pip.pyz +``` + +then the currently active Python interpreter will be used. +```` + +````{tab} MacOS +```console +$ chmod +x ./pip.pyz +$ ./pip.pyz ``` then the currently active Python interpreter will be used. +```` + +````{tab} Windows +```doscon +C:> .\pip.pyz +``` + +then the currently active Python interpreter will be used. + +You may need to configure your system to recognise the ``.pyz`` extension +before this will work. +```` ## Alternative Methods @@ -102,8 +125,8 @@ $ pip install --upgrade pip The current version of pip works on: -- Windows, Linux and MacOS. -- CPython 3.7, 3.8, 3.9, 3.10 and latest PyPy3. +- Windows, Linux and macOS. +- CPython 3.8, 3.9, 3.10, 3.11, 3.12, and latest PyPy3. pip is tested to work on the latest patch version of the Python interpreter, for each of the minor versions listed above. Previous patch versions are diff --git a/docs/html/reference/build-system/pyproject-toml.md b/docs/html/reference/build-system/pyproject-toml.md index a42a3b8c484..778584de397 100644 --- a/docs/html/reference/build-system/pyproject-toml.md +++ b/docs/html/reference/build-system/pyproject-toml.md @@ -29,7 +29,7 @@ that build requirements are handled independently of the user's runtime environment. For example, a project that needs an older version of setuptools to build can -still be installed, even if the user has an newer version installed (and +still be installed, even if the user has a newer version installed (and without silently replacing that version). ### Build-time dependencies @@ -130,18 +130,25 @@ dealing with [the same challenges as pip has for legacy builds](build-output). ## Fallback Behaviour +```{warning} +The following snippet merely describes the fallback behavior. For valid +examples of `pyproject.toml` to use with setuptools, please refer to +[the setuptools documentation]( +https://setuptools.pypa.io/en/stable/userguide/quickstart.html#basic-use). +``` + If a project does not have a `pyproject.toml` file containing a `build-system` section, it will be assumed to have the following backend settings: ```toml [build-system] -requires = ["setuptools>=40.8.0", "wheel"] +requires = ["setuptools>=40.8.0"] build-backend = "setuptools.build_meta:__legacy__" ``` If a project has a `build-system` section but no `build-backend`, then: -- It is expected to include `setuptools` and `wheel` as build requirements. An +- It is expected to include `setuptools` as a build requirement. An error is reported if the available version of `setuptools` is not recent enough. diff --git a/docs/html/reference/inspect-report.md b/docs/html/reference/inspect-report.md index 1355e5d4274..ad8263c6742 100644 --- a/docs/html/reference/inspect-report.md +++ b/docs/html/reference/inspect-report.md @@ -27,9 +27,8 @@ The report is a JSON object with the following properties: distribution packages that are installed. - `environment`: an object describing the environment where the installation report was - generated. See [PEP 508 environment - markers](https://peps.python.org/pep-0508/#environment-markers) for more information. - Values have a string type. + generated. See the section on environment markers in the {ref}`pypug:dependency-specifiers` + specification for more information. Values have a string type. (InspectReportItem)= diff --git a/docs/html/reference/installation-report.md b/docs/html/reference/installation-report.md index 5823205f977..e0cfcd97e8b 100644 --- a/docs/html/reference/installation-report.md +++ b/docs/html/reference/installation-report.md @@ -56,6 +56,9 @@ package with the following properties: URL reference. `false` if the requirements was provided as a name and version specifier. +- `is_yanked`: `true` if the requirement was yanked from the index, but was still + selected by pip conform to [PEP 592](https://peps.python.org/pep-0592/#installers). + - `download_info`: Information about the artifact (to be) downloaded for installation, using the [direct URL data structure](https://packaging.python.org/en/latest/specifications/direct-url-data-structure/). @@ -106,6 +109,7 @@ will produce an output similar to this (metadata abriged for brevity): } }, "is_direct": false, + "is_yanked": false, "requested": true, "metadata": { "name": "pydantic", @@ -133,6 +137,7 @@ will produce an output similar to this (metadata abriged for brevity): } }, "is_direct": true, + "is_yanked": false, "requested": true, "metadata": { "name": "packaging", diff --git a/docs/html/topics/authentication.md b/docs/html/topics/authentication.md index 966ac3e7a0d..a2649071762 100644 --- a/docs/html/topics/authentication.md +++ b/docs/html/topics/authentication.md @@ -68,7 +68,7 @@ man pages][netrc-docs]. pip supports loading credentials stored in your keyring using the {pypi}`keyring` library, which can be enabled py passing `--keyring-provider` with a value of `auto`, `disabled`, `import`, or `subprocess`. The default -value `auto` respects `--no-input` and not query keyring at all if the option +value `auto` respects `--no-input` and does not query keyring at all if the option is used; otherwise it tries the `import`, `subprocess`, and `disabled` providers (in this order) and uses the first one that works. diff --git a/docs/html/topics/caching.md b/docs/html/topics/caching.md index 954cebe402d..8d6c40f112d 100644 --- a/docs/html/topics/caching.md +++ b/docs/html/topics/caching.md @@ -27,6 +27,13 @@ While this cache attempts to minimize network activity, it does not prevent network access altogether. If you want a local install solution that circumvents accessing PyPI, see {ref}`Installing from local packages`. +```{versionchanged} 23.3 +A new cache format is now used, stored in a directory called `http-v2` (see +below for this directory's location). Previously this cache was stored in a +directory called `http` in the main cache directory. If you have completely +switched to newer versions of `pip`, you may wish to delete the old directory. +``` + (wheel-caching)= ### Locally built wheels @@ -124,11 +131,11 @@ The {ref}`pip cache` command can be used to manage pip's cache. ### Removing a single package -`pip cache remove setuptools` removes all wheel files related to setuptools from pip's cache. +`pip cache remove setuptools` removes all wheel files related to setuptools from pip's cache. HTTP cache files are not removed at this time. ### Removing the cache -`pip cache purge` will clear all wheel files from pip's cache. +`pip cache purge` will clear all files from pip's wheel and HTTP caches. ### Listing cached files diff --git a/docs/html/topics/configuration.md b/docs/html/topics/configuration.md index e4aafcd2b98..12bad0ad7a4 100644 --- a/docs/html/topics/configuration.md +++ b/docs/html/topics/configuration.md @@ -19,8 +19,8 @@ and how they are related to pip's various command line options. ## Configuration Files -Configuration files can change the default values for command line option. -They are written using a standard INI style configuration files. +Configuration files can change the default values for command line options. +The files are written using standard INI format. pip has 3 "levels" of configuration files: @@ -28,11 +28,15 @@ pip has 3 "levels" of configuration files: - `user`: per-user configuration file. - `site`: per-environment configuration file; i.e. per-virtualenv. +Additionally, environment variables can be specified which will override any of the above. + ### Location pip's configuration files are located in fairly standard locations. This location is different on different operating systems, and has some additional -complexity for backwards compatibility reasons. +complexity for backwards compatibility reasons. Note that if user config files +exist in both the legacy and current locations, values in the current file +will override values in the legacy file. ```{tab} Unix @@ -88,9 +92,10 @@ Site ### `PIP_CONFIG_FILE` Additionally, the environment variable `PIP_CONFIG_FILE` can be used to specify -a configuration file that's loaded first, and whose values are overridden by -the values set in the aforementioned files. Setting this to {any}`os.devnull` -disables the loading of _all_ configuration files. +a configuration file that's loaded last, and whose values override the values +set in the aforementioned files. Setting this to {any}`os.devnull` +disables the loading of _all_ configuration files. Note that if a file exists +at the location that this is set to, the user config file will not be loaded. (config-precedence)= @@ -99,10 +104,10 @@ disables the loading of _all_ configuration files. When multiple configuration files are found, pip combines them in the following order: -- `PIP_CONFIG_FILE`, if given. - Global - User - Site +- `PIP_CONFIG_FILE`, if given. Each file read overrides any values read from previous files, so if the global timeout is specified in both the global file and the per-user file @@ -113,7 +118,7 @@ then the latter value will be used. The names of the settings are derived from the long command line option. As an example, if you want to use a different package index (`--index-url`) and -set the HTTP timeout (`--default-timeout`) to 60 seconds, your config file would +set the HTTP timeout (`--timeout`) to 60 seconds, your config file would look like this: ```ini @@ -200,7 +205,7 @@ pip's command line options can be set with environment variables using the format `PIP_` . Dashes (`-`) have to be replaced with underscores (`_`). -- `PIP_DEFAULT_TIMEOUT=60` is the same as `--default-timeout=60` +- `PIP_TIMEOUT=60` is the same as `--timeout=60` - ``` PIP_FIND_LINKS="http://mirror1.example.com http://mirror2.example.com" ``` diff --git a/docs/html/topics/dependency-resolution.md b/docs/html/topics/dependency-resolution.md index 03f276baa2f..b932a2cdaf8 100644 --- a/docs/html/topics/dependency-resolution.md +++ b/docs/html/topics/dependency-resolution.md @@ -32,8 +32,8 @@ been done, and going back to choose another path. This can look like pip downloading multiple versions of the same package, since pip explicitly presents each download to the user. The backtracking of -choices made during is not unexpected behaviour or a bug. It is part of how -dependency resolution for Python packages works. +choices made during this step is not unexpected behaviour or a bug. It is part +of how dependency resolution for Python packages works. ````{admonition} Example The user requests `pip install tea`. The package `tea` declares a dependency on @@ -175,22 +175,24 @@ When you get a `ResolutionImpossible` error, you might see something like this: ```{pip-cli} -$ pip install "pytest < 4.6" pytest-cov==2.12.1 +$ pip install package_coffee==0.44.1 package_tea==4.3.0 [regular pip output] -ERROR: Cannot install pytest-cov==2.12.1 and pytest<4.6 because these package versions have conflicting dependencies. +ERROR: Cannot install package_coffee==0.44.1 and package_tea==4.3.0 because these package versions have conflicting dependencies. The conflict is caused by: - The user requested pytest<4.6 - pytest-cov 2.12.1 depends on pytest>=4.6 + package_coffee 0.44.1 depends on package_water<3.0.0,>=2.4.2 + package_tea 4.3.0 depends on package_water==2.3.1 ``` -In this example, pip cannot install the packages requested because they are -asking for conflicting versions of pytest. +In this example, pip cannot install the packages you have requested, +because they each depend on different versions of the same package +(``package_water``): -- `pytest-cov` version `2.12.1`, requires `pytest` with a version or equal to - `4.6`. -- `package_tea` version `4.3.0` depends on version `2.3.1` of - `package_water` +- ``package_coffee`` version ``0.44.1`` depends on a version of + ``package_water`` that is less than ``3.0.0`` but greater than or equal to + ``2.4.2`` +- ``package_tea`` version ``4.3.0`` depends on version ``2.3.1`` of + ``package_water`` Sometimes these messages are straightforward to read, because they use commonly understood comparison operators to specify the required version @@ -199,16 +201,16 @@ commonly understood comparison operators to specify the required version However, Python packaging also supports some more complex ways for specifying package versions (e.g. `~=` or `*`): -| Operator | Description | Example | -| -------- | -------------------------------------------------------------- | --------------------------------------------------- | -| `>` | Any version greater than the specified version. | `>3.1`: any version greater than `3.1`. | -| `<` | Any version less than the specified version. | `<3.1`: any version less than `3.1`. | -| `<=` | Any version less than or equal to the specified version. | `<=3.1`: any version less than or equal to `3.1`. | -| `>=` | Any version greater than or equal to the specified version. | `>=3.1`: version `3.1` and greater. | -| `==` | Exactly the specified version. | `==3.1`: only `3.1`. | -| `!=` | Any version not equal to the specified version. | `!=3.1`: any version other than `3.1`. | -| `~=` | Any compatible{sup}`1` version. | `~=3.1`: any version compatible{sup}`1` with `3.1`. | -| `*` | Can be used at the end of a version number to represent _all_. | `==3.1.*`: any version that starts with `3.1`. | +| Operator | Description | Example | +| -------- | -------------------------------------------------------------- | ---------------------------------------------------- | +| `>` | Any version greater than the specified version. | `>3.1`: any version greater than `3.1`. | +| `<` | Any version less than the specified version. | `<3.1`: any version less than `3.1`. | +| `<=` | Any version less than or equal to the specified version. | `<=3.1`: any version less than or equal to `3.1`. | +| `>=` | Any version greater than or equal to the specified version. | `>=3.1`: any version greater than or equal to `3.1`. | +| `==` | Exactly the specified version. | `==3.1`: only version `3.1`. | +| `!=` | Any version not equal to the specified version. | `!=3.1`: any version other than `3.1`. | +| `~=` | Any compatible{sup}`1` version. | `~=3.1`: any version compatible{sup}`1` with `3.1`. | +| `*` | Can be used at the end of a version number to represent _all_. | `==3.1.*`: any version that starts with `3.1`. | {sup}`1` Compatible versions are higher versions that only differ in the final segment. `~=3.1.2` is equivalent to `>=3.1.2, ==3.1.*`. `~=3.1` is equivalent to `>=3.1, ==3.*`. @@ -237,7 +239,7 @@ package version. In our first example both `package_coffee` and `package_tea` have been _pinned_ to use specific versions -(`package_coffee==0.44.1b0 package_tea==4.3.0`). +(`package_coffee==0.44.1 package_tea==4.3.0`). To find a version of both `package_coffee` and `package_tea` that depend on the same version of `package_water`, you might consider: @@ -252,20 +254,20 @@ In the second case, pip will automatically find a version of both `package_coffee` and `package_tea` that depend on the same version of `package_water`, installing: -- `package_coffee 0.46.0b0`, which depends on `package_water 2.6.1` -- `package_tea 4.3.0` which _also_ depends on `package_water 2.6.1` +- `package_coffee 0.44.1`, which depends on `package_water 2.6.1` +- `package_tea 4.4.3` which _also_ depends on `package_water 2.6.1` If you want to prioritize one package over another, you can add version specifiers to _only_ the more important package: ```{pip-cli} -$ pip install package_coffee==0.44.1b0 package_tea +$ pip install package_coffee==0.44.1 package_tea ``` This will result in: -- `package_coffee 0.44.1b0`, which depends on `package_water 2.6.1` -- `package_tea 4.1.3` which also depends on `package_water 2.6.1` +- `package_coffee 0.44.1`, which depends on `package_water 2.6.1` +- `package_tea 4.4.3` which _also_ depends on `package_water 2.6.1` Now that you have resolved the issue, you can repin the compatible package versions as required. diff --git a/docs/html/topics/https-certificates.md b/docs/html/topics/https-certificates.md index b42c463e6cc..341cfc632de 100644 --- a/docs/html/topics/https-certificates.md +++ b/docs/html/topics/https-certificates.md @@ -28,19 +28,9 @@ It is possible to use the system trust store, instead of the bundled certifi certificates for verifying HTTPS certificates. This approach will typically support corporate proxy certificates without additional configuration. -In order to use system trust stores, you need to: - -- Use Python 3.10 or newer. -- Install the {pypi}`truststore` package, in the Python environment you're - running pip in. - - This is typically done by installing this package using a system package - manager or by using pip in {ref}`Hash-checking mode` for this package and - trusting the network using the `--trusted-host` flag. +In order to use system trust stores, you need to use Python 3.10 or newer. ```{pip-cli} - $ python -m pip install truststore - [...] $ python -m pip install SomePackage --use-feature=truststore [...] Successfully installed SomePackage diff --git a/docs/html/topics/index.md b/docs/html/topics/index.md index ad467615090..40fd1527944 100644 --- a/docs/html/topics/index.md +++ b/docs/html/topics/index.md @@ -21,4 +21,5 @@ repeatable-installs secure-installs vcs-support python-option +workflow ``` diff --git a/docs/html/topics/more-dependency-resolution.md b/docs/html/topics/more-dependency-resolution.md index 31967a6a920..b955e2ec114 100644 --- a/docs/html/topics/more-dependency-resolution.md +++ b/docs/html/topics/more-dependency-resolution.md @@ -8,7 +8,7 @@ and this article is intended to help readers understand what is happening ```{note} This document is a work in progress. The details included are accurate (at the time of writing), but there is additional information, in particular around -pip's interface with resolvelib, which have not yet been included. +pip's interface with resolvelib, which has not yet been included. Contributions to improve this document are welcome. ``` @@ -26,7 +26,7 @@ The practical implication of that is that there will always be some situations where pip cannot determine what to install in a reasonable length of time. We make every effort to ensure that such situations happen rarely, but eliminating them altogether isn't even theoretically possible. We'll discuss what options -yopu have if you hit a problem situation like this a little later. +you have if you hit a problem situation like this a little later. ## Python specific issues @@ -97,10 +97,10 @@ feeding candidates to the resolver, and has a key role to play in selecting suitable candidates. Note that the resolver is *only* relevant for packages fetched from an index. -Candidates coming from other sources (local source directories, PEP 508 -direct URL references) do *not* go through the finder, and are merged with the -candidates provided by the finder as part of the resolver's "provider" -implementation. +Candidates coming from other sources (local source directories, {ref}`direct +URL references `) do *not* go through the finder, +and are merged with the candidates provided by the finder as part of the resolver's +"provider" implementation. As well as determining what versions exist in the index for a given project, the finder selects the best distribution file to use for that candidate. This @@ -136,7 +136,7 @@ operations: that satisfy them. This is essentially where the finder interacts with the resolver. * `is_satisfied_by` - checks if a candidate satisfies a requirement. This is - basically the implementation of what a requirement meams. + basically the implementation of what a requirement means. * `get_dependencies` - get the dependency metadata for a candidate. This is the implementation of the process of getting and reading package metadata. diff --git a/docs/html/topics/vcs-support.md b/docs/html/topics/vcs-support.md index 465d5ecb78c..c8169dbe24c 100644 --- a/docs/html/topics/vcs-support.md +++ b/docs/html/topics/vcs-support.md @@ -140,9 +140,8 @@ pip also looks at the `egg` fragment specifying the "project name". In practice mode. In all other circumstances, the `egg` fragment is not necessary and its use is discouraged. -The `egg` fragment **should** be a bare -[PEP 508](https://peps.python.org/pep-0508/) project name. Anything else -is not guaranteed to work. +The `egg` fragment **should** be a bare {ref}`project name `. +Anything else is not guaranteed to work. ````{admonition} Example If your repository layout is: diff --git a/docs/html/topics/workflow.md b/docs/html/topics/workflow.md new file mode 100644 index 00000000000..c7c1159adc1 --- /dev/null +++ b/docs/html/topics/workflow.md @@ -0,0 +1,40 @@ +# Pip is not a workflow management tool + +The core purpose of pip is to *manage the packages installed in your +environment*. Whilst package management is an important part of most Python +development workflows, it is only one part. Tasks like creating and managing +environments, configuring and running development tasks, managing the Python +interpreter itself, and managing the overall "project", are not part of pip's +scope. Managing a development workflow as a whole is a complex task and one +where there are many views on the "correct approach". + +Pip has a number of features which make it useful in development workflows - for +example, the ability to install the current project via `pip install .`, +editable installs, and requirements files. However, there is no intention that +pip will manage the workflow as a whole. + +As an example, pip provides the `pip wheel` command, which can be used to build +a wheel for your project. However, there is no corresponding command to build a +source distribution. This is because building a wheel is a fundamental step in +installing a package (if that package is only available as source code), whereas +building a source distribution is never needed when installing. Users who need a +tool to build their project should use a dedicated tool like `build`, which +provides commands to build wheels and source distributions. + + +## The role of `ensurepip` + +Pip is available in a standard Python installation, via the `ensurepip` stdlib +module. This provides users with an "out of the box" installer, which can be +used to gain access to all of the various tools and libraries available on PyPI. +In particular, this enables the installation of a number of workflow tools. + +This "bootstrapping" mechanism was proposed (and accepted) in [PEP +453](https://peps.python.org/pep-0453/). + + +## Further information + +The [Packaging User Guide](https://packaging.python.org) discusses Python +project development, and includes tool recommendations for people looking for +further information on how to manage their development workflow. diff --git a/docs/html/user_guide.rst b/docs/html/user_guide.rst index 9a6f2901cd5..aa2e3ad8e26 100644 --- a/docs/html/user_guide.rst +++ b/docs/html/user_guide.rst @@ -264,7 +264,7 @@ Installing from Wheels "Wheel" is a built, archive format that can greatly speed installation compared to building and installing from source archives. For more information, see the -`Wheel docs `_ , :pep:`427`, and :pep:`425`. +:ref:`specification `. pip prefers Wheels where they are available. To disable this, use the :ref:`--no-binary ` flag for :ref:`pip install`. @@ -306,7 +306,8 @@ name: .. note:: In the future, the ``path[extras]`` syntax may become deprecated. It is - recommended to use PEP 508 syntax wherever possible. + recommended to use :ref:`standard ` + syntax wherever possible. For the cases where wheels are not available, pip offers :ref:`pip wheel` as a convenience, to build wheels for all your requirements and dependencies. @@ -855,6 +856,12 @@ We are using `freeze`_ here which outputs installed packages in requirements for reqs = subprocess.check_output([sys.executable, '-m', 'pip', 'freeze']) +To programmatically monitor download progress use the ``--progress-bar=raw`` option. +This will print lines to stdout in the format ``Progress CURRENT of TOTAL``, where +``CURRENT`` and ``TOTAL`` are integers and the unit is bytes. +If the real total is unknown then ``TOTAL`` is set to ``0``. Be aware that the +specific formatting of pip's outputs are *not* guaranteed to be the same in future versions. + If you don't want to use pip's command line functionality, but are rather trying to implement code that works with Python packages, their metadata, or PyPI, then you should consider other, supported, packages that offer this type @@ -1133,6 +1140,13 @@ Since this work will not change user-visible behavior described in the pip documentation, this change is not covered by the :ref:`Deprecation Policy`. +.. attention:: + + The legacy resolver is deprecated and unsupported. New features, such + as :doc:`reference/installation-report`, will not work with the + legacy resolver and this resolver will be removed in a future + release. + Context and followup -------------------- diff --git a/docs/html/ux-research-design/contribute.md b/docs/html/ux-research-design/contribute.md new file mode 100644 index 00000000000..48aab68e636 --- /dev/null +++ b/docs/html/ux-research-design/contribute.md @@ -0,0 +1,24 @@ +# How to Contribute + +## Participate in UX Research + +It is important that we hear from pip users so that we can: + +- Understand how pip is currently used by the Python community +- Understand how pip users _need_ pip to behave +- Understand how pip users _would like_ pip to behave +- Understand pip’s strengths and shortcomings +- Make useful design recommendations for improving pip + +If you are interested in participating in pip user research, please [join pip’s user panel](https://mail.python.org/mailman3/lists/pip-ux-studies.python.org/). + +## Test New Features + +You can help the team by testing new features as they are released to the community. + +## Report and Work on UX Issues + +If you believe that you have found a user experience bug in pip, or you have ideas for how pip could be made better for all users, please file an issue on the [pip issue tracker](https://github.com/pypa/pip/issues/new). + +You can also help improve pip’s user experience by [working on UX issues](https://github.com/pypa/pip/issues?q=is%3Aissue+label%3AUX+is%3Aopen). Issues that are ideal for new contributors are marked with “[good first issue](https://github.com/pypa/pip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22)”. Explore the +[UX Guidance](guidance) if you have questions. diff --git a/docs/html/ux-research-design/guidance.md b/docs/html/ux-research-design/guidance.md new file mode 100644 index 00000000000..035df4c734b --- /dev/null +++ b/docs/html/ux-research-design/guidance.md @@ -0,0 +1,412 @@ +# UX Guidance + +This section of the documentation is intended for contributors who wish to work on improving pip's user experience, including pip's documentation. + +## What is User Centered Design? + +User-centered design (UCD) or human-centered design (HCD) is an iterative process in which design decisions are informed by an understanding of users and their needs. There are many terms used to describe this type of work; in this document we will use "user experience (UX) research and design". + +For the pip project, UX research and design can be used to: + +- Develop a deeper understanding of pip's users, the context in which they use pip and the challenges that they face +- Inform the design of new or existing pip features, so that pip us more usable and accessible. This may include improving pip's output (including error messages), controls (e.g. commands and flags) and documentation +- Help pip's development team prioritize feature requests based on user needs + +At a high level, the UX research and design process is comprised of: + +1. **[Research](#conducting-research-for-pip)**, where a variety of techniques are used (e.g.[surveys](#surveys) and [interviews](#interviews)) to learn about users and what they want from the tools they use +2. **[Design](#user-interface-design)**, where solutions are proposed to response to the research conducted. UX research and design is conducted iteratively, with design proposals or prototypes tested with users to validate that they are effective in meeting users' needs. Often, it is necessary to complete several cycles of research, design and validation to find a solution that works: + +![Graphic showing an iterative process of Research, Make (Design), Validate, around user goals and needs.](https://user-images.githubusercontent.com/3323703/124515613-c5bae880-ddd7-11eb-99d6-35c0a7522c7a.png) + +For more information on how this process has been applied to the pip project, see [research results](research-results/index). + +See also: + +- [Introduction to user centered design from the interaction design foundation](https://www.interaction-design.org/literature/topics/user-centered-design) +- [User-Centered Design Basics from usability.gov](https://www.usability.gov/what-and-why/user-centered-design.html) +- [User-centered design articles and videos from Nielson Norman Group](https://www.nngroup.com/topic/user-centered-design/) + +## Conducting Research for pip + +User research can be used to answer a few different types of questions: + +- _Understanding the context generally_ — e.g. how is pip used by people? What different environments and contexts is pip used in? +- _Understanding the users more broadly_ — e.g. who uses pip? How much experience do they have typically? How do they learn how to use pip? Are there any common characteristics between pip users? How diverse are the needs of pip's users? +- _Evaluating a specific need or challenge_ — e.g. how are pip users encountering a given issue? When does it come up? Do pip users regularly encounter this issue? How would a new feature address this issue? + +During the research process, it is important to engage users for input, and incorporate their feedback into decision making. + +Input and feedback from users is as valuable to an open source project as code contributions; end users may not be ready yet to submit a pull request or make fixes into the code directly, but their feedback can help to shape pip's priorities and direction. + +There are many ways to engage users in open source projects, and sometimes input from community members can feel overwhelming! Providing a structure, such as surveys and interviews, can make it easier to collect and understand feedback. Some examples of how to engage users are: + +- _Surveys_ — good for targeted feedback about specific issues and broad community context and understanding +- _Interviews_ — good for in-depth conversations to understand or explore a topic +- _Testing_ — good to evaluate an issue or validate a design idea +- _Open issue queues_ (e.g. GitHub issues) & support ticket systems — great data source to understand common challenges +- _Forums or discussion tools_ — great data source to understand common challenges or engage broader community in open discussion +- _Conferences and events_ — great opportunity to go lightweight interviews or testing of specific features + +When running [UX research on pip in 2020](research-results/index), we found that surveys and interviews were particularly useful tools to engage with pip's users. Some general guidelines, as well as pip-specific recommendations are below. + +### Surveys + +Surveys are great for collecting broad, large scale input, e.g. learning more about pip's user community as a whole, or for getting targeted feedback about a specific issue. + +Surveys can also be leveraged to get in-situ feedback with early releases of new tools, e.g. prompting users on the command line if they are using a beta version of a feature or asking people for feedback on a documentation page. + +As an example, in 2020, the pip UX team published several surveys to learn about pip and pip's users. This included: + +- Understanding 'who uses pip' +- Collecting feedback about pip's documentation +- Collecting feedback about pip's beta release of the 2020 dependency resolver +- Asking users how specific parts of pip's 2020 dependency resolver should behave + +A full list of the surveys published in 2020 and their results [can be found here](research-results/index). + +#### Designing Surveys + +When designing surveys, it is important to first establish what you want to learn. It can be useful to write this down as research-results/index questions. Example pip research-results/index questions [can be found here](https://github.com/pypa/pip/issues/8518). + +If you find that your topic is large, or you have many research-results/index questions, consider publishing several separate surveys, as long surveys risk a low response / high dropoff rate. + +Below is a brief guide to building a survey for pip: + +
    +
  1. + Introduce your survey
    + Explain the motivation for the survey, or (for surveys about pip's behaviour) set the scene with a scenario. +
  2. +
  3. + Design your questions
    +
      +
    • + Limit the number of questions you ask to avoid a low response rate. A good rule of thumb is: 3-4 questions about the specific topic, 2-3 questions about users level of experience / what they use Python or pip for.
      + When asking about years of experience use the following groupings as options: +
        +
      • < 1 Year
      • +
      • 1-3 Years
      • +
      • 4-6 Years
      • +
      • 7-10 Years
      • +
      • 11-15 Years
      • +
      • 16+ Years
      • +
      +
    • +
    • + Use closed questions with a fixed number of possible responses (e.g. yes/no, multiple choice, checkboxes, or likert scale) for measuring behaviour, opinion or preferences +
    • +
    • + Use open questions to learn about reasoning. If you are using a lot of closed questions in your survey, it is useful to include some open questions to "fish" for less expected answers - e.g. asking a user "why?" they chose a particular option +
    • +
    +
  4. +
  5. + Pilot your survey and modify it based on feedback
    + This could be as simple as sharing it with 1-2 people to see if it makes sense. +
  6. +
  7. + Determine where to do outreach
    + Establish who you want to hear from and where you should post the survey. Are there community members or groups that can help you reach more people?
    +
      +
    • Does the survey need to be translated into other languages to reach a broader set of the community?
    • +
    • Are you able to compensate people for their time?
    • +
    • Do participants want to be acknowledged as contributors?
    • +
    +
  8. +
  9. + Launch and promote your survey
    + See survey and interview outreach for recommendations on how to do outreach for pip based on the UX research-results/index conducted in 2020. +
  10. +
+ +#### Survey Case Study + +The process described above was followed in 2020, when we wanted to establish whether pip [should install packages with conflicting dependencies](https://github.com/pypa/pip/issues/8452). + +First, we introduced the purpose of the survey, with a scenario: + +![survey introduction with scenario with packages that conflict](https://user-images.githubusercontent.com/3323703/124516502-b046be00-ddd9-11eb-830c-62b8a6fb6182.png) + +Next, we asked a closed question to establish what the user prefers: + +![survey question asking whether pip should allow users to install packages when there are conflicting dependencies](https://user-images.githubusercontent.com/3323703/124516576-e5eba700-ddd9-11eb-8baf-e07773e75742.png) + +Following this, we qualified the response with an open question: + +![survey question asking respondents why pip should allow users to install packages with conflicting dependencies](https://user-images.githubusercontent.com/3323703/124516646-129fbe80-ddda-11eb-9c8a-da127f19fccd.png) + +This was followed by further questions about workarounds, syntax and behaviour preferences. + +Finally, we asked survey participants about themselves, including how much Python experience they have, and what they use Python for. This was to find out if different types of Python users answered the questions differently. + +This survey was shared with the pip team and improved several times, before it was published and promoted using a variety of [outreach channels](#survey-and-interview-outreach). + +In total, we received 415 responses, with [clear results](research-results/override-conflicting-dependencies) that helped us to make strong recommendations on how to move forward with this feature. + +#### Analysing Survey Results + +Surveys are particularly useful for being able to quickly understand trends from a larger population of responses. If your questions are designed well, then you should be able to easily aggregate the data and make statements such as: `X% of respondents said that Option B was the best option.` + +#### Contextualizing the Responses + +It's important to remember that the responses to your survey will be biased by the way that you did outreach for your survey, so unless you can be sure that the people who responded to your survey are representative of all of your users, then you need to be sure to contextualize the results to the participants. Within your survey responses it can be helpful to see if there is variation in the responses by different aspects of your users or your user community, e.g. + +- By experience level — Are responses consistent across experience level or do they vary? E.g. Do newer or more junior experience users have different responses, needs or challenges? +- By background/context — Are responses consistent across background or context? E.g. Do users in a corporate context have similar responses to hobbyist/independent users? Do data analysts have similar responses to software engineers? + +#### How many responses is enough? + +It depends! This is a hard question to answer in research like this — Traditional statistics would suggest that "enough" depends on the total population you need the survey to represent. In UX research, the answer tends to be more around when you see variation in responses level out, and so it's more about signals and trends in the data. + +If you are finding that there aren't patterns in the data, it might mean that your questions weren't clear or provided too many options, or it might mean that you need to reach out to more people. + +See also: + +- [28 Tips for Creating Great Qualitative Surveys from Nielson Norman Group](https://www.nngroup.com/articles/qualitative-surveys/) +- [Open vs. Closed Questions in User Research from Nielsen Norman Group](https://www.nngroup.com/videos/open-vs-closed-questions/) +- [Survey questions 101: over 70 survey question examples + types of surveys and FAQs - from HotJar](https://www.hotjar.com/blog/survey-questions/) + +### Interviews + +Interviews are a great way to have more in-depth conversations with users to better understand or explore a topic. Unlike surveys, they are not a great way to understand overall patterns, as it is hard to engage with a large number of people due to the time involved. It can be particularly useful to plan around conferences and events as a way to connect with many users in a more informal setting. + +#### Designing Interviews + +As with surveys, it's important to establish what you want to learn before you begin. + +Often, interviews are conducted with a script; this helps the interview run smoothly by providing some structure. However, it is also ok to go "off script" if the conversation is moving in an interesting or insightful direction. + +Below is a brief guide to running an interview for pip: + +
    +
  1. + Write your script
    + This should include an introduction that sets the scene for the participant, explaining what the interview is about, how you (or observers) will take notes, how long it will take, how their feedback will be used (and shared) and any other pointers you want to share.
    + Next, design your questions. Limit the number of questions, so that you have enough time to cover key points and the interview does not run for too long. Like in surveys, a good rule of thumb is 2-3 questions about users' level of experience, and what they use Python/pip for, plus 3-4 questions about the specific topic.
    + There are four different types of interview questions: +
      +
    1. + Descriptive — This type of question gives you concrete, specific stories and details. It also helps your interviewee "arrive" at the interview, resurfacing their relevant experiences and memories. E.g.
      +
        +
      • Tell me about a time…
      • +
      • Tell me about the first time…
      • +
      • Tell me about the last time…
      • +
      • Tell me about the worst/best time…
      • +
      • Walk me through how you…
      • +
      +
    2. +
    3. + Reflective — These questions allow the interviewee to revisit and think more deeply about their experiences. Helping the interviewee reflect is at the heart of your interview. Don't rush – give them lots of space to put their thoughts together.
      +
        +
      • What do you think about…
      • +
      • How do you feel about…
      • +
      • Why do you do…
      • +
      • Why do you think…
      • +
      • What effects did it have when…
      • +
      • How has ... changed over time?
      • +
      +
    4. +
    5. + Clarifying — This type of question gives interviewees the opportunity to expand on key points. Skillful clarifying questions also let you subtly direct the interviewee's storytelling towards the areas you find most intriguing and relevant.
      +
        +
      • What do you mean when you say…
      • +
      • So, in other words…
      • +
      • It sounds like you're saying [...]. Is that right?
      • +
      • Can you tell me more about that?
      • +
      +
    6. +
    7. + Exploratory — These questions are an invitation to the interview-ee to think creatively about their situation, and are best left for the end of the interview. Careful, though – suggestions from a single person are rarely the answer to your design problem, and you need to be clear to them that you're just collecting ideas at this point.
      +
        +
      • How would you change…
      • +
      • What would happen if…
      • +
      • If you had a magic wand...
      • +
      +
    8. +
    +
  2. +
  3. + Pilot interview with 1-2 people & modify based on their feedback +
  4. +
  5. + Determine how to do outreach for interviews
    +
      +
    • Who do you want to be sure to hear from? Where do you need to post to contact people for interviews? Are there community members or groups that can help you reach specific people?
    • +
    • Do the interviews need to be translated into other languages to reach a broader set of the community or a specific community?
    • +
    • How will people sign up for your interview?
    • +
    • Are you able to compensate people for their time?
    • +
    • Do participants want to be acknowledged as contributors?
    • +
    +
  6. +
  7. + Start outreach!
    + See survey and interview outreach for recommendations on how to do outreach for pip based on the UX research conducted in 2020. +
  8. +
+ +Here is an example user interview script used for speaking to users about pip's documentation: + +> **Introduction** +> +> - Firstly thank you for giving me your time and for your continued involvement. +> - The purpose of this interview is to better understand how pip's documentation is perceived and used by Python community members +> - The interview will take approximately 30 minutes. If you don't understand any of the questions please ask me to repeat or rephrase. If you don't have a good answer, feel free to tell me to skip. +> - I will be taking notes. These will be shared on GitHub or the pip docs, but we will remove any identifying data to > protect your anonymity +> - Please be honest - your feedback can help us make pip better. I won't be offended by anything you have to say :) +> - (optional) Do you mind if I record this session? +> +> **Opening questions** +> +> - Can you tell me a bit about how you use Python? +> - How long have you been using pip? +> +> **Solving problems** +> +> - Can you tell me about a time you had a problem when using pip? +> - What happened? +> - What did you do? +> - Where did you go? +> - How did you resolve your problem? +> - Please go to[ https://pip.pypa.io/en/stable/](https://pip.pypa.io/en/stable/) +> - Have you ever used this documentation? +> - On a scale of 1-10 how useful was it? +> - Why? +> - Are there any projects that you use that you'd like us to look at when thinking about improving pip's docs? +> - What makes that documentation good/useful? +> +> **Conclusion** +> +> - What one thing could the pip team do to help users troubleshoot pip problems? +> - Do you have any questions? + +#### How many interviews is enough? + +This depends on the complexity of the issue you are discussing, and whether or not you feel that you have gained enough insight from the interviews you have conducted. It also depends on whether you feel you have heard from a wide enough range of people. For example, you may wish to stop interviewing only after you have heard from both expert _and_ novice pip users. + +Often, conducting just a few interviews will uncover so many problems that there is enough material to make recommendations to the team. + +#### Analyzing Interview Data + +Formal interview analysis typically uses a process called "coding" where multiple researchers review interview transcripts and label different statements or comments based on a code system or typology that has been developed to align with the research. This is a great practice and a great way to make sure that the researchers' bias is addressed as part of the process, but most teams do not have the staffing or resources to do this practice. + +Instead many smaller teams use lightweight processes of capturing interview statements into **themes**, e.g. specific topics or issue areas around needs or challenges. Interviews are also a great source for **quotes**, which can be helpful for providing an example of why something is important or when/how something comes up for users. + +Interview analysis is frequently done using sticky notes, where you can write a quote, issue or finding on a sticky note and then move the sticky notes around into clusters that can be labeled or categorized into the themes. Remotely this can be facilitated by any number of tools, e.g. digital sticky board tools like [Miro](https://miro.com/) or [Mural](https://www.mural.co/), or even kanban board tools like [Trello](https://trello.com/), [Wekan](https://wekan.github.io/) or [Cryptpad](https://cryptpad.fr/), or this can be done just with text documents or spreadsheets, using lists and categories. It can be helpful to use a [worksheet for debriefing](https://simplysecure.org/resources/interview_synthesis.pdf) at the end of each interview to capture insights and themes quickly before you forget topics from the specific interview. + +See also: + +- [User Interviews: How, When, and Why to Conduct Them from Nielson Norman Group](https://www.nngroup.com/articles/user-interviews/) +- [Interviewing Users from Nielson Norman Group](https://www.nngroup.com/articles/interviewing-users/) + +### Survey and Interview Outreach + +The following is a list of outreach platforms that the pip team used when conducting research in 2020. Some were more successful than others: + +#### Recommended: UX Research Panel + +As part of the [2020 UX Work](research-results/index), we published a form that asked people to join a research panel and be regularly contacted about surveys and interview opportunities. This is now a [mailing list that users can sign up for](https://mail.python.org/mailman3/lists/pip-ux-studies.python.org/), and will be used in an ongoing way in addition to broad public outreach. + +#### Recommended: Twitter + +We found Twitter to be a very effective platform for engaging with the Python community and drive participation in UX research. We recommend: + +1. Asking [ThePSF](https://twitter.com/ThePSF), [PyPA](https://twitter.com/ThePyPA) and [PyPI](https://twitter.com/pypi) to retweet calls for survey and interview participation +2. Asking specific individuals (who have reach within specific communities, or general followings within the Python community) to retweet. +3. Explicitly asking for retweets within tweets +4. Responding to users within Twitter + +#### Recommended: Specific Interest Groups + +We engaged with the [PyLadies](https://pyladies.com/) community via their [Slack channel](https://slackin.pyladies.com/) to drive more participation from women using pip, as we found this demographic more difficult to reach via other channels + +#### Recommended: Conference Communities + +Due to the 2020 Global Pandemic we were unable to engage with users via PyCon (or other regional conferences) as we would have liked. However, we would still recommend this channel as a fast and insightful way to engage with large groups of interested people. + +#### Worth Exploring: Adding a prompt/path into pip's 'help' command + +We didn't have a chance to explore this opportunity, but the idea came up during workshops in December 2020 with Pypa Maintainers, and could be a great way to engage users and help point them towards opportunities to contribute. + +#### Not recommended: Forums (Discourse, etc) + +We used [discuss.python.org](https://discuss.python.org/) several times, posting to the [packaging forum](https://discuss.python.org/c/packaging/14) to ask packaging maintainers about their views on pip's functionality. Unfortunately, this was not as fruitful as we hoped, with very few responses. We found that engaging with packaging maintainers via Twitter was more effective. + +Posting surveys on Reddit was also not as useful as we had expected. If the user posting the survey or call for research participation does not have significant credit on Reddit, then the posting process itself can be challenging. Overall we did not see as much engagement in surveys or interviews come from Reddit relative to other outreach means. + +## User Interface Design + +Many people associate the term "user interface" with websites or applications, however it is important to remember that a CLI is a user interface too, and deserves the same design consideration as graphical user interfaces. + +Designing for pip includes: + +- Designing pip's _input_ - establishing the best way to group functionality under commands, and how to name those commands so that they make sense to the user +- Writing pip's _output_ - establishing how pip responds to commands and what information it provides the user. This includes writing success and error messages. +- Providing supplemental materials - e.g. documentation that helps users understand pip's operation + +### Design Principles / Usability Heuristics + +There are many interaction design principles that help designers design great experiences. Nielsen Norman's [10 Usability Heuristics for User Interface Design](https://www.nngroup.com/articles/ten-usability-heuristics) is a great place to start. Here are some of the ways these principles apply to pip: + +- Visibility of system status: ensure all commands result in clear feedback that is relevant to the user - but do not overload the user with too much information (see "Aesthetic and minimalist design") +- Consistency and standards: when writing interfaces, strive for consistency with the rest of the Python packaging ecosystem, and (where possible) adopt familiar patterns from other CLI tools +- Aesthetic and minimalist design: remove noise from CLI output to ensure the user can find the most important information +- Help users recognize, diagnose, and recover from errors: clearly label and explain errors: what happened, why, and what the user can do to try and fix the error. Link to documentation where you need to provide a detailed explanation. +- Help and documentation: provide help in context and ensure that documentation is task-focussed + +#### Additional Resources + +- [Command Line Interface Guidelines](https://clig.dev) +- [10 design principles for delightful CLIs](https://blog.developer.atlassian.com/10-design-principles-for-delightful-clis/) + +### Design Tools + +Tools that are frequently used in the design process are personas and guidelines, but also wireframing, prototyping, and testing, as well as creating flow diagrams or models. + +#### Personas + +_For a more in-depth overview of personas and using them in open source projects, this [resource from Simply Secure](https://simplysecure.org/blog/personas) may be helpful._ + +Personas are abstractions or archetypes of people who might use your tool. It often takes the form of a quick portrait including things like — name, age range, job title, enough to give you a sense of who this person is. You can capture this information into a [persona template](https://simplysecure.org/resources/persona-template-tech.pdf) and share them with your open source community as a resource see [examples from the Gitlab UX Team](https://about.gitlab.com/handbook/marketing/strategic-marketing/roles-personas/). + +Personas are particularly useful to help ground a feature design in priorities for specific needs of specific users. This helps provide useful constraints into the design process, so that you can focus your work, and not try to make every feature a swiss army knife of solutions for every user. + +In 2020, the pip UX team developed the following personas for the pip project: + +- Python Software User +- Python Software Maker +- Python Package Maintainer + +An in-depth write up on how the pip personas were created, and how they can be applied to future pip UX work can be [found here](research-results/personas). + +#### Prototyping + +In any UX project, it is important to prototype and test interfaces with real users. This provides the team with a feedback loop, and ensures that the solution shipped to the end user meets their needs. + +Prototyping CLIs can be a challenge. See [Creating rapid CLI prototypes with cli-output](https://www.ei8fdb.org/prototyping-command-line-interfaces-with-cli-output/) for recommendations. + +#### Copywriting Style Guides + +Given pip's interface is text, it is particularly important that clear and consistent language is used. + +The following copywriting Style Guides may be useful to the pip team: + +- [Warehouse (PyPI) copywriting styleguide and glossary of terms](https://warehouse.readthedocs.io/ui-principles.html#write-clearly-with-consistent-style-and-terminology) +- Firefox: + - [Voice and Tone](https://meet.google.com/linkredirect?authuser=0&dest=https%3A%2F%2Fdesign.firefox.com%2Fphoton%2Fcopy%2Fvoice-and-tone.html) + - [Writing for users](https://meet.google.com/linkredirect?authuser=0&dest=https%3A%2F%2Fdesign.firefox.com%2Fphoton%2Fcopy%2Fwriting-for-users.html) +- [Heroku CLI](https://devcenter.heroku.com/articles/cli-style-guide) (very specific to Heroku's CLI tools) +- [Redhat Pattern Fly style guide](https://www.patternfly.org/v4/ux-writing/about) +- [Writing for UIs from Simply Secure](https://simplysecure.org/blog/writing-for-uis) + +### General Resources + +- Heroku talk on design of their CLI tools ([video](https://www.youtube.com/watch?v=PHiDG-_XoRk) transcript) +- [Simply Secure: UX Starter Pack](https://simplysecure.org/ux-starter-pack/) +- [Simply Secure: Feedback Gathering Guide](https://simplysecure.org/blog/feedback-gathering-guide) +- [Simply Secure: Getting Quick Tool Feedback](https://simplysecure.org/blog/design-spot-tool-feedback) +- [Internews: UX Feedback Collection Guidebook](https://globaltech.internews.org/our-resources/ux-feedback-collection-guidebook) +- [Simply Secure: Knowledge Base](http://simplysecure.org/knowledge-base/) +- [Open Source Design](https://opensourcedesign.net/resources/) +- [Nielsen Norman Group](https://www.nngroup.com/articles/) +- [Interaction Design Foundation](https://www.interaction-design.org/literature) diff --git a/docs/html/ux-research-design/index.md b/docs/html/ux-research-design/index.md new file mode 100644 index 00000000000..0d9efd07ac4 --- /dev/null +++ b/docs/html/ux-research-design/index.md @@ -0,0 +1,15 @@ +# UX Research & Design + +```{toctree} +:hidden: + +contribute +guidance +research-results/index +``` + +Welcome to pip’s UX research and design documentation. The purpose of this section of the documentation is to: + +- [Identify where new contributors can participate in or lead UX research and design activities](contribute) +- [Share pip UX guidelines](guidance), including an introduction to User Centered Design practices, and how they can be applied to the pip project +- Share [results of user research](research-results/index) that the pip team has already conducted diff --git a/docs/html/ux-research-design/research-results/about-our-users.md b/docs/html/ux-research-design/research-results/about-our-users.md new file mode 100644 index 00000000000..81a0878d9fe --- /dev/null +++ b/docs/html/ux-research-design/research-results/about-our-users.md @@ -0,0 +1,291 @@ +# About pip's Users + +## Problem + +We want to understand users' background, their cultural environment, and how they experience the world, so that we can find better ways to serve them. + +[Skip to recommendations](#recommendations) + +## Research + +To develop our understanding about pip's users, we published a "Who uses pip?" survey that asked users about: + +- Their location in the world +- Their spoken language +- If they identified as members of an underrepresented group in the Python community +- Their disabilities and if those disabilities affected their usage of pip + +## Results + +164 people responded to the survey, with 40% of these coming from English speaking countries. 80% of participants came from Europe or North America. + +Approx. 60% did not identify as members of an underrepresented group. The majority of participants who did identify as underrepresented did so for gender reasons. + +The majority of participants (94%) responded that they did not have a disability. Of those that did have a disability, the majority were cognitive disabilities (Attention Deficit Hyperactivity Disorder aka ADHD, Autism, Aspergers, Dyslexia) or a hearing disability. + +### Participant Demographics + +#### Location + +The majority of participants came from North America and Western Europe. Participation from pip users in Africa, Asia, and the Middle-East was low. + +![Map of world showing distribution of participants as per table below](https://i.imgur.com/U2MYiK7.png) + +Fig. X: Global distribution of pip research participants. + +| Country Name | Number of participants | +| ------------------------ | ---------------------- | +| United States of America | 42 | +| United Kingdom | 17 | +| France | 12 | +| Germany | 11 | +| Canada | 10 | +| Netherlands | 8 | +| Spain | 6 | +| Switzerland | 5 | +| Nigeria | 4 | +| India | 4 | +| Czech Republic | 4 | +| Argentina | 4 | +| Sweden | 3 | +| Australia | 3 | +| Ukraine | 2 | +| Taiwan | 2 | +| Russia | 2 | +| Greece | 2 | +| Colombia | 2 | +| Chile | 2 | +| Brazil | 2 | +| Belgium | 2 | +| Uganda | 1 | +| Turkey | 1 | +| Singapore | 1 | +| Serbia | 1 | +| Norway | 1 | +| Luxembourg | 1 | +| Japan | 1 | +| Italy | 1 | +| Israel | 1 | +| Ireland | 1 | +| Hungary | 1 | +| Ghana | 1 | +| Finland | 1 | +| Bulgaria | 1 | +| Austria | 1 | +| Total | 164 | + +#### Participant's First Language + +Even though the research was carried out mainly in English, 51% of participants spoke languages other than English. + +| What spoken language do you feel is your first? | Number of participants | +| ----------------------------------------------- | ---------------------- | +| English | 79 | +| French | 18 | +| Spanish | 12 | +| German | 11 | +| Russian | 5 | +| Czech | 4 | +| Italian | 4 | +| Portuguese | 3 | +| Dutch | 3 | +| Ukrainian | 2 | +| Swedish | 2 | +| Greek | 2 | +| Catalan | 2 | +| Mandarin | 2 | +| Hungarian | 2 | +| Bengali | 1 | +| Luxembourgish | 1 | +| Bulgarian | 1 | +| Romanian | 1 | +| Chinese | 1 | +| Norwegian | 1 | +| Serbian | 1 | +| Polish | 1 | +| Hebrew | 1 | +| Indonesian | 1 | +| Malayalam | 1 | + +NB: English includes British English and American English. Some participants gave more than one answer, their first answer is included here. + +### Participants who identified as underrepresented in the Python community + +We asked research participants if they identified as members of an underrepresented group within the Python community. + +The wording of this question was deliberately broad to discover participants' understanding of the term "underrepresented" - we listed gender, age, educational background, spoken language, and what they use Python for as a non-exhaustive list of examples. + +![Pie chart showing 17.9% answering - I am not sure, 22.6% answering -yes, 59.4% answering - No to the question - Do you identify as an underrepresented group in the Python community](https://i.imgur.com/ghwzxg9.png) + +Of the 22.6% that responded "Yes" the answers were classified as follows : + +| Underrepresentation category | Count | +| ---------------------------- | ----- | +| Gender | 9 | +| Cultural | 3 | +| Age | 3 | +| Immigration status | 2 | +| Neurodiversity | 3 | +| Other | 6 | +| No answer | 8 | + +NB: This question was included after the survey was published. Total participants was 106, as opposed to all other questions which had 164. + +The majority of participants did not identify as part of an underrepresented group. However, due to the small sample size these results cannot be seen as representative of the whole pip user base. + +#### Participant comments about identifying (or not) as under-represented + +Here is a sample of noteworthy comments from these different groups: + +##### Related to Gender + +> "(I am) LGBTQ/IA+" **- Participant 242608909** + +> "I am a 25 year old female Colombian developer." **- Participant 242611698** + +> "Female, 39, no computer science background whatsoever, self taught." **- Participant 242614039** + +##### Related to Culture + +> "The hispanic community is quite underrepresented in the web in general" **- Participant 242599212** + +> "I am a 1st generation Dominican-American. My parents are from the Dominican Republic." **- Participant 242769361** + +##### Related to Age + +> "Older age, I am 50 now." **- Participant 242769743** + +##### Related to Neurodiversity + +> "I'm a woman. And autistic. But the latter might not be underrepresented ;)" **- Participant 243428773** + +##### Other Noteworthy Comments + +> "Veterans who entered tech post-military" **- Participant 243524784** + +> "I'm a young white cis male, so by far not a minority in those aspects. But at the same time I'm from a third world country, Argentina, and that sometimes (and I emphasize, only sometimes) makes me feel like a minority. When participating in our local communities (Python Argentina), I feel clearly not-minority, and with the responsibility of helping minorities, trying to build a more welcoming and fair environment for them. But when I participate in the broader global community, at times I feel underrepresented, seeing it mostly guided by english-s[p]eaking people from first world countries. But if I have to choose, I would say I mostly feel not-minority, because I mostly interact with people from our local communities, where I'm not part of a minority." **- Participant 242592869** + +> "As a CIS male I conform the majoritarian group in the IT world. I'm hopeful that things are changing everywhere, and will keep changing: inclusion is getting bigger and better, more and more people are starting their careers as devs or similar, disregarding ethnicity and/or sexual orientation and that's great! And we need to keep fighting for that." **- Participant 243455292** + +### Participant Disabilities + +Disabilities - physical, motor, cognitive, hearing, speech - alter how people perceive and interact with the world around them - software included. We asked participants about their disabilities and how it affected their usage of pip. + +Understanding these disabilities is important particularly when designing pip command structures, and designing pip output. + +The majority of participants (91%) responded that they did not have a disability. Of those that told us that have a disability, the majority were cognitive disabilities (Attention Deficit Hyperactivity Disorder (ADHD), Autism, Aspergers, Dyslexia, or a hearing disability. + +#### How many participants identified as having a disability? + +| Do you self-identify as someone who has a disability? | Number of responses | +| ----------------------------------------------------- | ------------------- | +| No | 150 | +| Yes | 14 | +| Grand Total | 164 | + +#### Vision + +Participants who answered yes to this question were partially sighted. Their vision disability was not corrected by glasses, but did not significantly affect their usage of pip. + +#### Hearing + +Five participants identified as having a hearing impairment, or hearing loss. While this disability made participants lives more difficult, it did not affect their usage of pip: + +> "Being hard of hearing/impaired makes my life much harder, but so far it never has impacted my usage of pip. Perhaps because I haven‘t used parts of it that would?" **- Participant 242934019** + +> "Not at all given that everything happens by text in my console." **- Participant 243455292** + +However it did affect the way they consume pip learning materials: if video is being used for learning or support, they should have captions/subtitles/transcriptions available. + +> "any videos released, it is so helpful if there is either a) transcripts, or b) captions." **- Participant 243524784** + +#### Cognitive Disabilities + +Nine participants expressed cognitive disabilities including undefined mental health conditions, Attention Deficit Hyperactivity Disorder (ADHD), Autism, Aspergers, Dyslexia. + +These participants did not explain how their cognitive disabilities affect their usage of pip, however there are guidelines and best practices for designing for people with cognitive disabilities. + +#### Physical or Mobility Disability + +One participant responded that they had a physical or mobility disability, but did not give detail about it in relation to their usage of pip. + +### Participants use of Assistive Technology + +The term "assistive technology" (AT) is used to describe products or systems that support and assist individuals with disabilities, restricted mobility or other impairments to perform functions that might otherwise be difficult or impossible. A subset of these are used to make computer technology - hardware and software - more accessible. Common examples of AT used with computer technology are: screen readers, text-to-speech outputs/inputs. + +The majority of participants (94%) said that they have never used assistive technology. + +| Do you use assistive technology (AT) when using computers? | Number of responses | +| ---------------------------------------------------------- | ------------------- | +| No, I have never used it | 128 | +| I only use it when needed | 3 | +| I use it everyday | 1 | +| I have used it in the past, but not anymore | 4 | + +Of the eight participants who have used assistive technology, one participant uses assistive technology every day with: + +- Text-to-speech output as "text to speech allow(s) me to listen and learn when my eyes get strained." +- Speech-to-text input as they like using their "tablet and makes typing easier" +- On-screen keyboards +- Input switches/touch screens + +A further seven participants use assistive technology only as needed: + +> "I use custom display filter software to do things like colorize key lines of output automatically (to draw my eye/attention), and provide digit dilimination (I.E. help me tell 1000 and 10000 apart) when using a text console application." + +> "The standard Mac user interface design contains enough assistive technology without my needing to use any features which are specifically intended solely as assistive functions." + +> "I sometimes use it to make sure that my code will work correctly with AT." + +#### Operating systems used with assistive technology + +Participants use assistive technology across the three most popular desktop operating systems - Linux (most popular), Windows (2nd most popular), and Mac. + +![Pie chart showing breakdown of most popular operating systems for pip users using assistive technology](https://i.imgur.com/CD2ev5P.png) + +#### Assistive technology when using pip + +We asked participants how well their assistive technologies worked when they use pip. All participants using assistive technology with pip said it worked well for them. + +We received some feedback about screen readers not coping well with long output, with users experiencing difficulties accessing content at the top of the current terminal window. Therefore, commands or actions (e.g. pip help, pip install, failed builds) that generate a lot of content can be a problem for screenreader users. + +## Recommendations + +### Supporting languages other than English + +As 51% of participants speak a language other than English, we recommend that the pip team add localization support to the pip documentation and reach out to the community to find pip users who might be willing to contribute translations. Translators that have [contributed translations to PyPI](https://hosted.weblate.org/projects/pypa/warehouse/) may be a good starting point. + +If this is not possible, we recommend linking to useful resources in languages other than English from the pip documentation, as we know from our other research that users use a mixture of the official documentation, search engine searches, Stack Overflow and blogs/websites to find solutions to their problems. + +### Supporting pip users with disabilities + +Pip's operation is generally very good for users with disabilities. Being a command line application there are no distracting images or ancillary content, and the user has a large amount of control on how they experience pip via customisation of interface visual preferences (to use contrasting colours, font size and type) and visual and auditory alerts. + +To better support pip's users with disabilities, the pip team should: + +- Ensure any future video or audio support materials are provided with captions +- Improve pip's output (see below) + +### Improving pip output + +Pip's output is currently too verbose, generating an unhelpful amount of output during its operation. This causes usability issues for all users - especially users with cognitive disabilities. + +Pip's output should be improved by: + +- Retaining only the information that is important to users in their current moment (e.g. at install of a package) +- Removing unimportant information from the terminal output. The information can still be logged to the log files if needed. +- Reducing the number of verbosity levels to three. Right now there are seven levels of verbosity, which is overwhelming and in no way useful. We recommend: + - Verbosity 0 - shows only what packages are to be installed, notifications identified as important about the operation, any errors and the final outcome + - Verbosity 1 - shows more detail about the packages being installed + - Verbosity 2 - shows full information which is also logged to logfiles + +## Further reading + +Designing for people with disabilities: + +- [An Introduction to inclusive design](https://www.nomensa.com/blog/2011/introduction-inclusive-design) +- [How ADHD and dyslexia teach you to do better UX design](https://themasters.io/blog/posts/how-adhd-dyslexia-teach-better-ux-design) +- [Improve User Experience by Designing with Cognitive Differences in Mind](https://noti.st/elizabethschafer/fg3BR4) +- [Designing accessible software - guidelines for different disabilities](https://ukhomeoffice.github.io/accessibility-posters/) +- [Designing for Children with ADHD: The Search for Guidelines for Non-Experts](https://uxpamagazine.org/designing_children_adhd/) (written for children however applicable generally) +- [Designing for dyslexia](https://uxplanet.org/designing-for-dyslexia-6d12e8c41cd7) diff --git a/docs/html/ux-research-design/research-results/ci-cd.md b/docs/html/ux-research-design/research-results/ci-cd.md new file mode 100644 index 00000000000..f136ddb16ab --- /dev/null +++ b/docs/html/ux-research-design/research-results/ci-cd.md @@ -0,0 +1,42 @@ +# How pip is used in interactive environments (i.e. CI, CD) + +## Problem + +We want to know about the contexts in which pip users use pip - interactively (i.e. typing pip commands at the command line terminal) and in an automated environment (i.e. as part of continuous software integration or continuous software development pipelines). + +Different contexts of use mean that users have different important and common tasks; it means when, where and how they complete these tasks are different. + +Each of these contexts bring different needs: interactive usage requires the right feedback/output at the right time, whereas an automated environment requires little or no feedback in the moment but detailed feedback after the task has finished. + +We also wanted to know what users used pip for - as part of their software development toolchain, or purely as a software installer (analogous to Ubuntu Aptitude or Mac Appstore). We also asked about their need for pip to build packages from source. + +## Research + +We created a survey and asked users to give answers to the following statements: + +- I use pip in an automated environment (e.g. CI/CD pipelines) +- I have problems with pip in CI/CD pipelines +- I use pip interactively (e.g. typing pip commands on the commandline) +- I make software and use pip as part of my software development workflow +- I use pip only to install and use Python packages +- I need pip to build software packages from source + +## Results + +Using pip interactively makes up the majority of pip usage (91%), the majority (73%) of this usage is basic usage - to only install and use Python packages. + +Half (51%) of all participants used pip in an automated environment, with only 9% having issues with pip in that automated environment. This points to a good use experience for these users. + +71% use pip as part of their software toolchain, only 29% needing pip to build from source. + +These results show that the main context of use is interactive - users either writing code, installing software at the command line and we know from other research that interactive usage has its issues e.g. pip output being too verbose. + +While it is important to provide automated environment users with a good experience, interactive mode users are being underserved. + +![Answer to question - I use pip in an automated environment](https://i.imgur.com/pLHqBpN.png) + +![Answer to question - I use pip interactively](https://i.imgur.com/8ETVMYS.png) + +91% of users said they used pip interactively. This does not preclude them from automated usage. + +![Answer to the question - What do you use Python for?](https://i.imgur.com/ySlo2Es.png) diff --git a/docs/html/ux-research-design/research-results/improving-pips-documentation.md b/docs/html/ux-research-design/research-results/improving-pips-documentation.md new file mode 100644 index 00000000000..765a8f1069a --- /dev/null +++ b/docs/html/ux-research-design/research-results/improving-pips-documentation.md @@ -0,0 +1,531 @@ +# Improving pip's Documentation + +## Problem + +We want to establish whether or not the [official pip documentation](https://pip.pypa.io/en/stable/) helps users to solve their pip problems. We also want to identify possible improvements to the content and structure of the docs. + +[Skip to recommendations](#recommendations) + +## Research + +### Interviews + +We conducted interviews with pip users specifically discussing documentation. During these interviews we asked about: + +- Problems they had experienced while using pip, and how they solved them (with a focus on what information sources they used) +- How they rate pip's documentation, and what we could do to make the docs more useful +- What documentation (from other projects or languages) they find valuable, and why + +### Surveys + +We collected documentation feedback via two surveys: + +- In our survey that profiled pip users, we asked "What would be your ideal way of getting help with pip?" +- We also published a survey specific to pip's docs: + +![Screenshot of survey](https://i.imgur.com/dtTnTQJ.png) + +### Keyword research + +We used keyword research tools to understand what words ("keywords") people use when using search engines to troubleshoot pip problems. + +### Other research methods + +We also: + +1. Asked for volunteers to participate in a diary study, documenting their experience solving pip problems. Unfortunately this was not completed due to lack of interest from the community. +2. Asked for user feedback on the pip documentation site: + ![screenshot of user feedback mechanism on pip docs](https://i.imgur.com/WJVjl8N.png) + Unfortunately, we did not gather any useful feedback via this effort +3. [Installed analytics on the pip docs](https://github.com/pypa/pip/pull/9146). We are waiting for this to be merged and start providing useful data. + +## Results + +In total, we: + +- Conducted 5 user interviews about pip's documentation +- Received 141 responses to the question "What would be your ideal way of getting help with pip?" +- Received 159 responses to the documentation survey + +In general, we found that pip's documentation is underutilized by the community, with many users not knowing that it exists. Instead, most users turn to common tools (Google, Stack Overflow) to solve their pip problems. + +In response to the question "When you have a problem using pip, what do you do?" (multiselect): + +- 81.9% of respondents Google it +- 56.9% of respondents search or ask on Stack Overflow +- 33.8% of respondents use pip help from the command line +- **25.6% of respondents go to the pip docs** +- 20.6% of respondents go the the Python Packaging User Guide +- 8.1% of respondents ask on a forum, community board, or chat channel + +![screenshot of survey results](https://i.imgur.com/qlt1b4n.png) + +Based on survey results, users find pip's docs: + +- Marginally more useful than not useful +- Marginally more clear than unclear +- Not opinionated enough + +Common feedback that emerged from both surveys and user interviews includes: + +- The documentation performs poorly in search engine results +- The style and layout is dated (note: this feedback was collected before the [new theme was deployed](https://github.com/pypa/pip/pull/9012)) +- There is not enough guidance/examples on how to resolve common problems, or achieve specific goals +- The documentation information architecture is difficult to navigate (the monolithic structure of the user guide is a problem) and does not prioritise the most useful content +- There should be more instructions specific to each user's different situation (e.g. what operating system they are using) +- The scope of the documentation is unclear +- The documentation should recognise that pip exists within an ecosystem of other packaging tools +- ["There should be one-- and preferably only one --obvious way to do it."](https://www.python.org/dev/peps/pep-0020/) - i.e. the documentation should provide stronger recommendations + +While some users mentioned that video would be helpful, more said that video was too long, or inappropriate for the kind of problems they experience using pip. + +Some users mentioned that in person support, forums or chat would be helpful, with many unaware of existing support / community channels. + +Several users also noted that improving pip's error messages would reduce the need for better documentation. + +From our keyword research we identified seven _query types_: "about pip", "install pip", "uninstall pip" "update pip", "using pip", "errors", and "other". + +
See keyword research results + +### About pip + +- what is pip +- what is pip in python +- what is pip python +- what does pip mean +- what does pip stand for +- what does pip stand for python +- pip meaning + +### Install pip + +- get pip +- python install pip +- install pip +- installing pip +- how to install pip python +- how to install pip +- how to download pip +- how to get pip +- how to check if pip is installed +- install pip mac +- how to install pip on mac +- install pip on mac +- install pip linux +- how to install pip linux +- how to install pip on linux +- how to install pip in ubuntu +- how to install pip ubuntu +- install pip ubuntu +- ubuntu install pip +- pip windows +- install pip windows +- pip install windows +- how to install pip windows +- how to install pip in windows +- how to install pip on windows +- how to pip install on windows +- how to install pip on windows 10 +- how to run pip on windows + +### Uninstall pip + +- how to uninstall pip +- uninstall pip +- pip uninstall + +### Update pip + +- how to update pip +- how to upgrade pip +- pip update +- pip upgrade +- upgrade pip +- how to upgrade pip on windows + +### Using pip + +- how to use pip +- how to use pip install +- how to pip install +- how to use pip python +- how to install with pip +- how to run pip +- python how to use pip +- pip install requirements.txt +- pip requirements.txt +- pip freeze +- pip update package +- pip install specific version +- pip upgrade package +- pip uninstall package + +### Errors + +- no module named pip +- pip command not found +- pip is not recognized +- 'pip' is not recognized as an internal or external command, operable program or batch file. +- -bash: pip: command not found +- pip is not recognized as an internal or external command +- pip install invalid syntax + +### Other + +- how to add pip to path +- how to check pip version +- how does pip work +- where does pip install packages +- pip vs pip3 +- where is pip installed + +
+
+ +The prevalence of "install pip" queries strongly suggests that the current installation documentation should be improved and that users are searching for solutions specific to their operating system. + +The "about pip" queries also suggest that beginners would benefit from documentation that better explains pip basics - e.g. what pip is and what it does. + +## Recommendations + +Based on our research, we recommend that the pip team: + +- Revise the structure of the documentation: + - Break monolithic pages into standalone pages on different subjects, with appropriate meta tags. This will help the docs appear higher in search results for the 81.9% of users who use Google to troubleshoot their pip problems. + - Prioritise most used features (see "[buy a feature](prioritizing-features)" results for guidance) +- Add a "troubleshooting" section to the documentation that addresses common questions, explains error messages and tells users where they can find more help +- Provide more context about pip's role in the Python packaging ecosystem by: + - Introducing packaging concepts that users need to understand in order to use pip + - Explaining pip's role/scope within the packaging ecosystem + - Comparing pip to other tools +- Develop a beginner's guide that walks new pip users through everything they need to know to use pip's most basic functionality. This should include addressing concepts outside of pip's scope (e.g. how to open and use a terminal, how to set up a virtual environment), that may block users from being successful +- For each page, (where appropriate), add sections for: + - "tips and tricks" - things to know / gotchas + - "troubleshooting" - possible error messages and recommended solutions. Where appropriate, this should link to content in the troubleshooting section. + - "see also" (links to external resources - e.g. useful stack overflow questions, blog articles, etc.) +- In general, write content that: + - Is opinionated. Prioritize solutions that will work in the majority of cases, while pointing to possible edge cases and workarounds in "tips and tricks", "troubleshooting" and "see also" content + - Uses keywords to increase search results visibility + - Provides instructions for different contexts - e.g. for users on Windows, Linux, MacOSX + - Increases interlinking with external sources, including packaging.python.org + +### Suggested site map + +Based on the above user input, we have developed a proposed [site map](https://i.imgur.com/UP5q09W.png) (link opens larger format image) to help guide the redevelopment of pip's documentation in line with the above recommendations. + +![sitemap. for details see summary below](https://i.imgur.com/UP5q09W.png) + +
See notes for this site map + +#### Node 1.0: Quick reference + +_Page purpose:_ + +- To give pip users a quick overview of how to install pip, and use pip's main functionality +- To link to other (more detailed) areas of the documentation + +_Suggested content:_ + +- Quick installation guide, including how to use a virtual environment. This is necessary for user who want to install more than one Python project on their machine. +- Common commands / tasks (based on [buy a feature](prioritizing-features) data) + +--- + +#### Node 2.0: About pip + +_Page purpose:_ + +- To introduce pip to new users + +_Suggested content:_ + +- Introduce pip as a command line program +- Explain what the command line is and how to use it in different operating systems +- Explain what pip is/does, and what it stands for +- Link to packaging concepts (node 2.1) +- Explain pip's scope (e.g. to install and uninstall packages) and link to other tools (node 2.2) + +#### Node 2.1: Packaging concepts + +_Page purpose:_ + +- To introduce packaging concepts for new pip users + +_Suggested content:_ + +- What is a package? +- What types of packages are there? e.g. file types +- What is package versioning / what are requirement specifiers? (note: talk about potential dependency conflicts here) +- Where do I get packages from? +- How should I control how packages are installed on my system (e.g. virtualenv and environment isolation) +- How can I reproduce an environment / ensure repeatability? (e.g requirements files) +- What do I need to know about security? (e.g. hash checking, PyPI name squatting) +- Link to node 2.2 ("pip vs other packaging tools") + +#### Node 2.2: pip vs other packaging tools + +_Page purpose:_ + +- To compare pip to other tools with the same scope +- To highlight that pip exists within a _packaging ecosystem_ and link to other packaging tools + +_Suggested content:_ + +- Compare pip to other installation tools - e.g. poetry, pipenv, conda. What are the features, pros and cons of each? Why do packaging users choose one over the other? +- Briefly introduce other packaging projects. Link to https://packaging.python.org/key_projects/ + +--- + +#### Node 3.0: Installing pip + +_Page purpose:_ + +- To help pip users install pip + +_Suggested content:_ + +- Refactor current page, emphasising pathways for different operating systems +- Add "tips and tricks", "troubleshooting" and "see also" (link to external resources) sections to provide additional help + +--- + +#### Node 4.0: Tutorials + +_Page purpose:_ + +- To provide a jumping off place into pip's tutorials + +_Suggested content:_ + +- Link to tutorials, including sub pages, where appropriate + +#### Node 4.1: Using pip to install your first package + +_Page purpose:_ + +- To help new pip users get started with pip + +_Suggested content:_ +Step by step tutorial (possibly broken into several pages) that covers: + +- Using the command line +- Installing pip (or checking pip is installed) +- Creating/activating a virtual env (use venv for this, but point to alternatives) +- Installing a package +- Showing where the package has been installed +- Deactivating/reactivating virtualenv +- Uninstalling a package + +#### Node 4.2: Advanced tutorial - using pip behind a proxy + +_Page purpose:_ + +- To help advanced pip users achieve specific goals + +_Suggested content:_ + +- Step by step tutorial for using pip behind a proxy + +NB: other advanced tutorials should be added as identified by the team and/or requested by the community. + +--- + +#### 5.0: Using pip + +_Page purpose:_ + +- To provide a jumping off point for the user guide and reference guide + +_Suggested content:_ + +- Link to each subject in the user guide +- Link to reference guide + +#### 5.1: User guide + +_Page purpose:_ + +- To provide users with specific detailed instructions on pip's key features + +_Suggested content:_ +Break down current user guide into separate pages, or pages linked by subject. Suggested order: + +- Running pip +- Installing Packages +- Uninstalling Packages +- Environment recreation with requirements files + - sub heading: "pinned version numbers" + - sub heading: "hash checking mode" +- Listing Packages +- Searching for Packages +- Installing from local packages +- Installing from Wheels +- Wheel bundles +- “Only if needed” Recursive Upgrade +- Configuration +- User Installs +- Command Completion +- Basic Authentication Credentials +- Using a Proxy Server (includes link to tutorial) +- Constraints Files +- Using pip from your program + +Where possible, each page should include: + +- "tips and tricks" for workarounds, common _gotchas_ and edge use cases +- "troubleshooting" information, linking to content in node 6.2 ("Troubleshooting error messages") where applicable +- "see also", linking to external resources (e.g. stack overflow questions, useful threads on message boards, blogs posts, etc. + +Note: the following content should be moved: + +- Fixing conflicting dependencies (move to node 6.2 - "Troubleshooting error messages") +- Dependency resolution backtracking (move to node 6.2 - "Troubleshooting error messages") +- Changes to the pip dependency resolver in 20.3 (move to node 7.0 - "News, changelog and roadmap") + +#### 5.2: Reference guide + +_Page purpose:_ + +- To document pip's CLI + +_Suggested content:_ + +- https://pip.pypa.io/en/stable/reference/ + +--- + +#### 6.0: Help + +_Page purpose:_ + +- To provide a jumping off place for users to find answers to their pip questions + +_Suggested content:_ + +- Links to + - 6.1 "FAQs" + - 6.2 "Troubleshooting error messages" + - 6.3 "Finding more help" + +#### 6.1: FAQs + +_Page purpose:_ + +- To answer common pip questions / search terms + +_Suggested content:_ + +- What is the difference between pip and pip3? +- Where does pip install packages? +- How can I check pip's version? +- How can I add pip to my path? +- Where is pip installed? +- What does pip stand for? + +See [popular questions on Stack Overflow](https://stackoverflow.com/search?q=pip&s=ec4ee117-277a-4c5d-a3f5-c921ca6c5da6) for more examples. + +#### 6.2: Troubleshooting error messages + +_Page purpose:_ + +- To help pip users solve their problem when they experience an error using pip + +_Suggested content:_ +For each (common) error message: + +- Explain what happened +- Explain why it happened +- Explain what the user can do to resolve the problem + +Note: the [ResolutionImpossible](https://pip.pypa.io/en/stable/user_guide/#fixing-conflicting-dependencies) and [dependency resolution backtracking](https://pip.pypa.io/en/stable/user_guide/#dependency-resolution-backtracking) +documentation should both be moved here. + +#### 6.3: Finding more help + +_Page purpose:_ + +- To point pip users to other resources if they cannot find the information they need within the pip documentation + +_Suggested content:_ + +- See [getting help](https://pip.pypa.io/en/stable/user_guide/#getting-help) + +--- + +#### 7.0: News, changelog and roadmap + +_Page purpose:_ + +- To share information about: + - Recent changes to pip + - Upcoming changes to pip + - Ideas for improving pip, specifically highlighting where funding would be useful + +_Suggested content:_ + +- [Changes to the pip dependency resolver in 20.3 (2020)](https://pip.pypa.io/en/stable/user_guide/#changes-to-the-pip-dependency-resolver-in-20-3-2020) +- Links to PSF blog posts about pip +- Link to [fundable packaging improvements](https://github.com/psf/fundable-packaging-improvements/blob/master/FUNDABLES.md) + +--- + +#### 8.0: Contributing + +_Page purpose:_ + +- To encourage new people to contribute to the pip project +- To demonstrate that the project values different _types_ of contributions, e.g. not just development +- To recognise past and current contributors + +_Suggested content:_ + +- Introduction to pip as an open source project +- Contributors code of conduct +- Recognition of the different types of contributions that are valued +- Credit list of contributors, including pip maintainers + +#### 8.1: Development + +_Page purpose:_ + +- To onboard people who want to contribute code to pip + +_Suggested content:_ + +- https://pip.pypa.io/en/stable/development/ + +#### 8.2: UX design + +_Page purpose:_ + +- To onboard people who want to contribute UX (research or design) to pip +- To share UX knowledge and research results with the pip team + +_Suggested content:_ + +- UX guidelines, and how they apply to the pip project +- Current UX initiatives (e.g. open surveys, interview slots, etc.) +- Previous research and results, including UX artifacts (e.g. personas) + +#### 8.3: Documentation + +_Page purpose:_ + +- To onboard people who want to contribute to pip's docs +- To share previous research and recommendataions related to pip's docs + +_Suggested content:_ + +- This guide +- Writing styleguide / glossary of terms - see the [Warehouse documentation](https://warehouse.readthedocs.io/ui-principles.html#write-clearly-with-consistent-style-and-terminology) for an example. + +
+ +### Future research suggestions + +To continue to improve pip's documentation, we suggest: + +- Conducting [card sorting](https://www.nngroup.com/articles/card-sorting-definition/) with pip users to establish the ideal order and grouping of pages +- Regularly reviewing the documentation analytics, to understand those pages which are most/least visited +- Regularly reviewing Stack Overflow to identify questions for the FAQ +- Setting up a mechanism for collecting user feedback while users are on the documentation site diff --git a/docs/html/ux-research-design/research-results/index.md b/docs/html/ux-research-design/research-results/index.md new file mode 100644 index 00000000000..3cbd73570dd --- /dev/null +++ b/docs/html/ux-research-design/research-results/index.md @@ -0,0 +1,208 @@ +# UX Research Results + +Over the course of 2020, the pip team worked on improving pip's user experience, developing a better understanding of pip's UX challenges and opportunities, with a particular focus on pip's new dependency resolver. The [Simply Secure](https://simplysecure.org/) team focused on 4 key areas: + +- [Understanding who uses pip](https://github.com/pypa/pip/issues/8518) +- [Understanding how pip compares to other package managers, and supports other Python packaging tools](https://github.com/pypa/pip/issues/8515) +- [Understanding how pip's functionality is used could be improved](https://github.com/pypa/pip/issues/8516), and +- [Understanding how pip's documentation is used, and how it could be improved](https://github.com/pypa/pip/issues/8517) + +Some key outcomes from the 2020 work are: + +- This documentation & resource section! +- A pip UX research panel ([Sign up here!](https://mail.python.org/mailman3/lists/pip-ux-studies.python.org/)) +- New and expanded GitHub issues +- UX improvements in 2020 + - UX work supporting the dependency resolver + - Improved error messaging + - Supporting Documentation +- UX Training for the Pypa + pip maintainers + +This work was made possible through the [pip donor funded roadmap](https://wiki.python.org/psf/Pip2020DonorFundedRoadmap). + +## Outreach + +We [recruited participants](https://www.ei8fdb.org/thoughts/2020/03/pip-ux-study-recruitment/) for a user research panel that we could contact when we wanted to run surveys and interviews about pip. In total 472 people signed up to the panel, although some unsubscribed during the research period. + +At the end of the 2020 research, we asked users to opt-in to a [long-term panel](https://mail.python.org/mailman3/lists/pip-ux-studies.python.org/), where they can be contacted for future UX studies. Should the pip team wish to continue to build this panel, we recommend translating the sign-up form into multiple languages and better leveraging local communities and outreach groups (e.g. PyLadies) to increase the diversity of the participants. + +## User Interviews + +In total, we **interviewed 48 pip users**, recruited from the user panel, and through social media channels. + +During the interviews, we asked users about: + +- How they use Python +- How long they have been using pip +- Whether or not they use a virtual environment +- If and how they address security issues associated with pip +- Which pip commands they regularly use +- How they install packages with pip +- Their experience using pip list, pip show and pip freeze +- Their experience using pip wheel +- Whether or not they use other package managers, and how pip compares to their experience with these other tools +- What the pip team could do to improve pip +- Problems they have experienced while using pip, and how they solved these problems +- Their perception and use of the pip documentation +- What other technical documentation they value, and how the pip docs could take inspiration from these +- What other resources the pip team could provide to help pip users solve their problems + +## Surveys + +We **published 10 surveys** to gather feedback about pip's users and their preferences: + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
TitlePurposeResults
+ Pip research panel survey + + Recruit pip users to participate in user research, user tests and participate in future surveys. See associated blog post for more information. + + 472 full sign-ups +
+ Feedback for testing the new pip resolver + + Understand use cases where the new resolver fails due to dependency conflicts. See associated blog post for more information. + + 459 responses via the feedback form, approx. 8 issues transferred to issue tracker +
+ How should pip handle conflicts with already installed packages when updating other packages? + + Determine if the way that pip handles package upgrades is in-line with user's expectations/needs. See related blog post and GitHub issue for more information. + + See write up, including recommendations +
+ Learning about our users + + Learn about pip's users, including: +
    +
  • their usage of Python and pip
  • +
  • why and how they started using Python
  • +
  • if they are living with any disabilities, and if so what effect (if any) this has on their usage of Python and pip
  • +
  • if they use assistive technologies when using Python and pip and how this work for them
  • +
  • where they get support when you have issues with pip
  • +
+
+ See write up +
+ Buy a pip feature + + Establish which features are most important to pip's users + + See write up +
+ Should pip install conflicting dependencies? + + Establish whether pip should provide an override that allows users to install packages with conflicting dependencies + + See write up +
+ How should pip force reinstall work? + + Establish whether or not pip force reinstall should continue to behave the way it currently does, if the functionality should be changed, or if the option should be removed + + See write up +
+ Feedback on pip search + + To establish whether or not to remove or redesign pip search. See this GitHub issue for more information. + + See write up +
+ Feedback on pip's docs + + To gather feedback on pip's docs, supplementing feedback gathered in user interviews + + See write up +
+
+ +## All Results + +```{toctree} +:maxdepth: 1 + +about-our-users +mental-models +users-and-security +ci-cd +personas +prioritizing-features +override-conflicting-dependencies +pip-force-reinstall +pip-search +pip-upgrade-conflict +improving-pips-documentation +``` + +## Read More + +- [Pip team midyear report (blog, July 2020)](https://pyfound.blogspot.com/2020/07/pip-team-midyear-report.html) +- [Creating rapid CLI prototypes with cli-output (blog, Oct 2020)](https://www.ei8fdb.org/prototyping-command-line-interfaces-with-cli-output/) +- [Changes are coming to pip (video)](https://www.youtube.com/watch?v=B4GQCBBsuNU) +- [How should pip handle dependency conflicts when updating already installed packages? (blog, July 2020)](https://www.ei8fdb.org/how-should-pip-handle-conflicts-when-updating-already-installed-packages/) +- [Test pip's alpha resolver and help us document dependency conflicts (blog, May 2020)](https://www.ei8fdb.org/test-pips-alpha-resolver-and-help-us-document-dependency-conflicts/) +- [How do you deal with conflicting dependencies caused by pip installs? (blog, April 2020)](https://www.ei8fdb.org/how-do-you-deal-with-conflicting-dependencies-caused-by-pip-installs/) +- [pip UX studies: response data (blog, March 2020)](https://www.ei8fdb.org/pip-ux-studies-response-data/) + +Other PyPA UX work: + +- [PyPI User Research (blog, July 2018)](https://whoisnicoleharris.com/2018/07/22/pypi-user-research.html) +- [Warehouse - The Future of PyPI](https://whoisnicoleharris.com/warehouse/) +- [Accessibility on Warehouse (PyPI) (blog, May 2018)](https://whoisnicoleharris.com/2018/05/17/warehouse-accessibility.html) +- [User Testing Warehouse (blog, Mar 2018)](https://whoisnicoleharris.com/2018/03/13/user-testing-warehouse.html) +- [Designing Warehouse - An Overview (blog, Dec 2015)](https://whoisnicoleharris.com/2015/12/31/designing-warehouse-an-overview.html) diff --git a/docs/html/ux-research-design/research-results/mental-models.md b/docs/html/ux-research-design/research-results/mental-models.md new file mode 100644 index 00000000000..0491586efab --- /dev/null +++ b/docs/html/ux-research-design/research-results/mental-models.md @@ -0,0 +1,70 @@ +# How Users Understand pip + +## Problem + +We want to understand how pip's users understand pip as a tool: what they think it is and what it does. + +[Skip to recommendations](#recommendations) + +## Research + +In order to capture participants mental models of pip and how package management works, we asked participants the following questions: + +- In your own words, explain what pip is +- In your own words, explain what happens when pip installs a software package +- In your own words, explain what a Python package dependency is + +When we talk about mental models, we talk about "deep" or "shallow" mental models. When a user has a deep mental models of something, their have a deep understanding with a lot of detail, shallow models are the opposite. + +In order to evaluate those mental models - do they match the reality of pip and package management - we worked with the maintainers to identify 1. pip's behaviours and activities (18 aspects), and 2. the aspects of package dependencies (13), and what a Python package dependency is (10). We then scored participants' answers against those. + +## Results + +The analysis focused on participants with between 2 and 10 years of Python experience. + +Over 90% of participants did not have a deep understanding of pip - with limited understanding of what pip is, what it does during the install process, and of package management in general. +However, while participants' understanding was low, only 4 participants had factually incorrect understandings of what pip is and does. + +Participants had a slightly deeper understanding of what happens during a pip install process. The most in depth answer included 7 of the 13 identified aspects. The median was 3. Answers focused on resolving dependencies, finding possible package names, downloading assets and installing the package. + +Participants' understanding of software dependencies was again shallow - the most in depth answer included 8 identified aspects. The median was 3. Answers focused on the fact that software dependencies were a result of code reuse, that constraining package versions reduced the possibility of dependency conflicts. + +The full data is available in[ this spreadsheet](https://docs.google.com/spreadsheets/d/1HBiNyehaILxhzZKWcBavkKXDzJr6gIt_Y8Jm8RRgJYg/edit#gid=0). + +### Responses to "In your own words, explain what pip is" + +> "pip is a standard command-line tool for managing python packages. It has three primary functions: (1) obtaining & caching python packages and/or their dependencies from a repository (typically pypi), (2) building (if needed) and installing python packages--and related dependencies--to a 'site-packages' location in the python path, and (optionally) (3) uninstalling previously-installed packages." **- participant 242608909 (Scientist, Professor in the Earth and Atmospheric Sciences Department, using Python for 7 - 10 years)** + +> "Pip is a package management system for python. Kind of like apt in linux, it can be used to install packages in public or private repositories into the current version or environment of Python that invoked the pip command." **- participant 240364032 (Professional software developer using Python for 7-10 years)** + +> "pip allows to install/update remove python libraries in your environment. pip manage the library. you will need something else to manage your environment. To use it the easiest is pip install `package-name` I recommend using a requirements.txt and add as you go the library and do pip install -r requirements.txt each time. it avoid to forget a library at the end of the project :)" **- participant 241178995 (Data scientist working in software engineering)** + +> "python's npm/cargo/opam... dedicated package manager and ecosystem for python libraries and applications" **- participant 240306262 (self-taught Python creative artist and web developer, using Python for 5-6 years)** + +> "A tool to download & install packages and resolve dependencies. I see it in the same area as yum, zypper or apt-get install in the Linux world." **- participant 240306204 (Using Python for scientific research and data analysis for 3 - 4 years)** + +> "Pip is the tool primarily used in the Python community to install packages. ("Package" means two different things in Python; it can be a target of the `import` statement that includes modules and other packages, or it can mean a collection of code with a defined interface that can be installed for reuse. I'm referring to the second thing here.) Pip's implementation defines what it means for a package to be installed in a Python environment. Any other tool that wishes to install software into a Python environment (e.g. conda) must match Pip's implementation." **- participant 240313922 (Computer security researcher at a university, using Python for 7-10 years)** + +### Responses to "In your own words, explain what happens when pip installs a software package" + +> "I think pip looks up package "tea" in the repository of packages (PyPI by default, but can be changed). If it doesn't find it, it gives an error. If it exists, it downloads some information about the package, like what form it exists in. This can be a wheel, or a package that needs to be built. If it is a wheel, it checks the dependencies and installs them, then it installs the wheel (not sure what this means, probably it extracts it). The wheel is specific to a python distribution and base OS, so it might be available on certain platforms but not others. If it is a package that needs to be built, pip downloads the package source (or clones the repository), and runs setup.py, which installs dependencies and other packages, then the package itself. I forgot to mention that before installing there is some check for checking compatibility of the version required and the versions required by other packages." **- participant 240426799 (Scientific researcher - data analysis and computer vision models, using Python for 5-6 years)** + +> "pip searches for a package source (and for me uses the default, so Pypi), then ask the package source for a package with the given name and versions (if specified), then if the package is available download the package in the most appropriate format (depending on my platform), then unzip the package and runs the installer (most probably calls setuptools with the included setup.py file) which will perform the required installation steps. This installation process may contain dependencies (typically specified in setup.py), which will trigger the same process for the dependencies, and so on until all dependencies are installed (if everything is OK)." **- participant 240670292 (Software developer industrial systems control, using Python for 5-6 years)** + +> "Pip checks PyPI (default package index, assuming that wasn't overridden) for the package matching `tea`. It uses the various specifiers (eg. OS compatibility, Python compatibility, etc) to find the latest version of `tea` compatible with my system. Within that version, it finds the best possible installation match (eg. a `wheel`, if supported on my system and my version of `pip` contains the relevant versioned support [eg. most recently manylinux2010], potentially falling back all the way to a source distribution). After downloading the relevant distribution, it performs the same operations recursively up the dependency chain as specified by the `install_requires` of the `setuptools.setup()` method. After grabbing all relevant packages, it performs the installations as specified in their setup methods -- generally, this involves extracting python files to specific system paths, but various levels of complexity may be added as need be such as compilations, system library bindings, etc. I believe the new resolver changes the above by performing all the lookups simultaneously (eg. by building and solving a dependency graph rather than traversing incrementally) but have not yet read the PEP to learn more. I've answered the above with setuptools in mind -- I believe there was a step added recently to check pyproject.toml first to allow for alternate systems here, but I find the added customization to be a net negative to the ecosystem and have not yet played with it -- the entire Poetry/Pipenv/Pipfile.lock/Flit thing just seems to be adding unnecessary complexity; users who know what they're doing have solved all these issues years ago for their packages and users who find the porcelain makes their lives easier are likely going to run into UX trouble no matter the veneer." **- participant 241463652 (Using Python for 5-6 years)** + +> "pip accesses the tea package from pypi (guessing that's where, online at least) and downloads a copy of the files into my local venv" **- participant 243434435 (Data analysis & machine learning, using Python for 1-2 years)** + +> "Looking up the latest version of of the package from pypi" **- participant 243897973 (Software testing/writing automated tests using Python 3 - 4 years)** + +> "Download, unpack, sometimes compile a module for my target arch" **- participant 243428875 (System administration using Python 7 - 10 years)** + +## Recommendations + +It's difficult to know what to recommend. Some ideas: + +- Question: Is it actually necessary for users to know everything that pip is doing? +- Better documentation: + - Describing the "blocks of functionality" that pip carries out and how to deal with them when it breaks + - Curating package manager training and help + - Improving pip output to expose the different pip functionality blocks diff --git a/docs/html/ux-research-design/research-results/override-conflicting-dependencies.md b/docs/html/ux-research-design/research-results/override-conflicting-dependencies.md new file mode 100644 index 00000000000..b087a8070b9 --- /dev/null +++ b/docs/html/ux-research-design/research-results/override-conflicting-dependencies.md @@ -0,0 +1,62 @@ +# Providing an override to install packages with conflicting dependencies + +## Problem + +Currently, when a user has dependency conflicts in their project they may be unaware there is a problem, because pip will install conflicting packages without raising an error. + +The new pip resolver is more strict and will no longer allow users to install packages that have conflicting dependencies. + +As a result, some users may feel that newer versions of pip are "broken" when pip refuses to install conflicting packages. + +For this reason, the pip team wanted to know if they should provide an override that allows users to install conflicting packages. + +[Skip to recommendations](#recommendations) + +## Research + +We published a survey with the following introduction: + +
+Imagine you have packages tea and coffee: + +tea 1.0.0 depends on water <1.12.
+coffee 1.0.0 depends on water>=1.12
+ +Installing tea 1.0.0 and coffee 1.0.0 will cause a conflict because they each rely on different versions of water - this is known as a "dependency conflict". + +The pip team has recently changed the way that pip resolves dependency conflicts. The new implementation is stricter than before: pip will no longer install packages where there is a dependency conflict - instead it will show an error. + +The purpose of this survey is to gather feedback on providing a way to override this behaviour. + +All questions are optional - please provide as much information as you can. + +
+ +We then asked users: + +- If pip should provide an override that allows users to install packages when there are dependency conflicts +- Why they answered yes or no +- For users that answered yes, we asked: + - When they would use the override + - How often they would use the override + - How easy it would be to find a workaround, if pip did not provide an override + - What syntax they prefer + +## Results + +In total, we received 415 responses to the survey. + +An overwhelming majority (>70%) of respondents indicated that they want some kind of override that allows them to install packages when there are dependency conflicts. Despite desiring this feature, most respondents said if it exists they would use it "not often" — this indicates that it is an advanced feature that is not critical to day-to-day usage. Nevertheless, because it would be difficult or very difficult to find a workaround (>60%), we suggest that pip should offer a override feature (see recommendations, below). + +Over half of the respondents said that `pip install tea coffee --ignore-conflicts` was the most ideal syntax for this command when installing multiple packages at once with a conflicting dependency. When using the `pip install —-ignore-conflicts` command, a majority (>48%) of respondents said they would prefer pip to install to the most recent version of the conflicted dependency. + +Most respondents suggested that installing the latest version by default is safer, because it could include security fixes or features that would be difficult to replicate on their own. They also trust that dependencies will be largely backwards-compatible. However, they said it was very important that it is necessary to have a way to override this default behavior, in case they need to use an older version of the conflicted package. + +## Recommendations + +Based on this research we recommend that the pip team: + +- Implement an `--ignore-conflicts` option, that allows users to install packages with conflicting dependencies +- Ensure that `--ignore-conflicts` installs the most recent version of the conflicting package. For example, for conflicting package `water<1.1.2` and `water≥1.1.2`, pip should prefer to install `water≥1.1.2`. +- Allow users to override this default behavior by specifying the version of the conflicting packages. For example, `pip install tea coffee water==1.1.1 --ignore-conflicts` +- Warn users that they used the `--ignore-conflicts` flag and that this may cause unexpected behavior in their program diff --git a/docs/html/ux-research-design/research-results/personas.md b/docs/html/ux-research-design/research-results/personas.md new file mode 100644 index 00000000000..e4f1a3df44b --- /dev/null +++ b/docs/html/ux-research-design/research-results/personas.md @@ -0,0 +1,250 @@ +# pip Personas + +## Problem + +We want to develop personas for pip's user to facilate faster user-centered decision making for the pip development team. + +[Skip to recommendations](#recommendations) + +## Research + +From early interviews with pip users, and from desk research into the different communities that use Python, it was our expectation that there were large communities who were not professional software developers. For example the SciPy library is widely used in the science and engineering communities for mathematical analysis, signal and image processing. + +Based on this, we expected a lot of these users would have different expectations, challenges and needs from pip. + +Our hypothesis was that: + +1. Python users fall into 3 main user types - a software user, a software maker and a software/package maintainer +2. That the majority (over 60%) would define themselves as Python software users +3. That the minority would define themselves as Python software maintainers + +### Usertype definitions + +During the research we've met different user types in the Python community. The 3 types of Python users, we proposed were: + +#### The Python Software User + +"I use Python software mainly as a tool to help me do what I want to do. This might be running scientific experiments, making music or analysing data with Python software I install with pip. I don't write Python software for others." + +#### The Python Software Maker + +"I use the Python software language and Python software packages to make software for others, mostly for other people. An example might be - building web applications for my customers. To make this web application I might use the Django framework, and a number of Python packages and libraries." + +#### The Python Package Maintainer + +"I spend a lot of my time creating Python software packages and libraries for other people to use in the software they make. I might make Python packages and libraries and then publish them on pypi.org or other software repositories." + +## Results + +During our research we found that these user types did fit with participants' sense of their usage of Python. Participants did not identify significantly different Python user types when asked. + +Each of these user types is a spectrum. Some Python users after time, and with experience/training, a need to use code more than once, started to make their own Python software. + +Identifying as one of these user types does not preclude users from also being another user type. Python users were more likely to Python software makers, but rarely Python software maintainers. + +Most (86%) participants identified as being a Python software user. This ranged a) from using Python applications - SciPy, Scikit-Learn - as a tool, with no knowledge, or interest to do more, to b) more advanced usage of Python involving modifying others code/scripts, possibly using libraries to create code specifically for their needs. + +75% identified as a Python software maker - as with Python software user, this ranged from writing basic scripts, code, to being a professional software developer. + +40% identified as a Python software maintainer - the activities of a maintainer were seen as only available to someone who had many years of Python experience, was heavily involved in a particular package or application, or did it as part of their job. + +### I am a Python software user + +As expected, almost all participants identified as a Python software user (86%). This was the most fundamental user type - both trained software developers and those who came to Python as a result of their job were users. + +Non-software developer users identified Python as a language to get stuff done - + +> "Almost everyone falls into the user (category) - that’s the target. It's not an obscure language that's meant for specific domains - it's a broad general purpose language designed to get stuff done. It's used by many who don't know other languages, they just need a language to get what they're doing finished." **- Participant 240312164** + +However, "using Python software" meant different things depending on who you ask - participants identified as a Python software user on a spectrum. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
I am a Python software userNumber of responses
I agree50
I disagree4
I have no opinion11
I strongly agree70
I strongly disagree4
Grand Total140
+ +![Pie chart with responses to question - I am a Python software user](https://i.imgur.com/ir3tP3B.png) + +#### Low end of the spectrum + +Python software applications were identified by some as a tool they use to do their "actual" work - the scientist, the data analyst, the engineer, the journalist. + +Here, they were "using" Python applications like SciPy, PsychPi, NumPy, to run scientific experiments, to gather data, to analyse data, with the objective of creating knowledge to make decisions, to identify what to do next. + +These were users who 1) who were new to Python software, 2) came across these Python applications in their profession, and used them as tools. + +They describe NumPy, or SciPy as a Python software application in itself, analogous to being a Windows user, or a Mac user. + +These users are not "classically trained programmers" as one participant identified themselves. As a result, they may not have the training, or knowledge about programming concepts like software dependencies. When they are expected to deal with complex or confusing error messages or instructions they have problems, often stopping them. + +#### High-end of the spectrum + +Python users who "move up the spectrum" to more advanced Python usage had been using Python for longer periods - many years. + +Again they may not have been classically trained developers, but through exposure - from work colleagues and their own usage - they started to experiment. This experimentation was in the form of modifying others scripts, taking classes, reading books so they could use code for other purposes. + +This was _making_ software - this software could be used by them as part of their day-job, but it could also be used by many others. + +We asked participants to explain the progression on this user spectrum - what is the difference between a user and a maker? + +Participants spoke about "are you working on something reusable or are you using the tools to achieve a one time task?" + +> "I didn't have classic software development training, more statistical analysis experience. I was clueless to the idea that it was a repository that anyone could upload packages to and become a maintainer." **- Participant \_240396891 (Data scientist at an applied research lab using Python do to network traffic analysis/parsing or Machine Learning)** + +> "Firstly I use my own software written in Python, I use Python libraries from pip. I use Django, Flask, libraries like requests." **- Participant 240302171** + +> "I am not a classically trained programmer, so it's a great way for me to learn and keep current in techniques. Not being a classically trained programmer, in some cases it detracts, I have a reasonable knowledge of the way to use hashes, but if I wanted to change Python's hash I'd have to read books. I can find information out there." **- Participant 240312164 (Nuclear physicist using Python for computer simulations, designing experimental methods)** + +### I am a Python software maker + +Being a "Python software maker" was a natural progression for some Python users, particularly those who had software development training - either on the job, personal learning or formal education. This training was important to understand fundamental programming concepts. + +As discussed earlier, some participants identified as "advanced" Python users, using Python software to modify or create other software. These users were likely to progress onto being software makers. + +55% of participants who identified as a software maker had between 5-20+ years of experience with Python. Only 18% of software makers had less than 2 years of experience. + +![Pie chart with responses to question - I am a Python software maker](https://i.imgur.com/aqg1kaL.png) + +We did not ask these participants about the "quality" of the software they created, but apart from the professional software developers, the opinion of these users was they were not software developers. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
I am a Python software userNumber of responses
I agree50
I disagree9
I have no opinion14
I strongly agree56
I strongly disagree10
Grand Total140
+ +Making software was as defined earlier as "are you working on something reusable or are you using the tools to achieve a one time task?" + +> "I'm using Python software and libraries to make this product I'm working on, it's foundation is based on Python, with React, D3 and all built on Python. The cloud assets are Python and testing is Python." **- Participant 240315927 (a professional IT developer building a Python based data analysis application)** + +> "I make software in Python. My day job is making software in python. Mainly Django web design. I work for a retail company, where I write calculating orders, creating data in other inventory management systems. Data analysis." **- Participant 240393825** + +> "I have written software, sometimes for business and personal reasons. At one point I worked on a django website project, that was being used by 1000s of people. I don't think any of my live projects are based. + +> "Most of it is for sysadmin, automation. I [like] to use python instead of shell scripting. I manage a server with wordpress sites. I wrote a script to update these sites, mailman list and sql DB management, and for different utilities." **- Participant 240313542** + +> "I use Python for creating things - like outputs for data scientist, software engineer. I make software to look at patterns, and analyse stuff. I think I'm a maker because someone else is using - they are colleagues. Usually its non-technical colleagues. I produce outputs - make data understandable. They use the results, or a package it behind a flask app. Or analyse graphs." **- Participant 240426799** + +### I am a Python software maintainer + +The Python software/package maintainer user type was seen as requiring a significant amount of time and experience - domain experience as the software could be very specific (e.g. SciKit Learn, SciPy, etc), technical/coding experience, and experience in the community. You need to have spent time in doing the other jobs, before you could become a maintainer. + +For large projects it was seen as necessary to have core code contributors, and maintainers. Maintainers did not always write code - they could be more involved with technical architecture, technical design, than writing code. + +An aspect of the software maintainer role that wasn’t mentioned a lot was the community management aspect. + +![Pie chart with responses to question - I am a Python software maintainer](https://i.imgur.com/gXPc946.png) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
I am a Python package maintainerNumber of responses
I agree39
I disagree24
I have no opinion20
I strongly agree18
I strongly disagree38
Grand Total140
+ +> "You can become a maintainer once you get past a certain level of experience." **- Participant 240278297** + +> "To be a package maintainer, you'd have to spend a lot of time fixing issues, e.g. your package is on Github and you'd be looking at issues, reviewing PRs, writing documentation. A package maintainer is someone heavily involved in the project. They deal with more support calls, they do more thinking about issues to get your package into multiple environments. That's the good thing about the Python community - I was trying to use a Python package but there was an issue with the documentation. I said, there's a better way of doing this example. They answered and said "great, do you want to do it? Doing package maintaining, it doesn't interest me, I don't have time for it really - if I have a specific issue I will focus on it. It'd be nice (to do more)." **- Participant 240278297 (professional Python software developer)** + +> "I am a core developer of scikit-learn, I spend time writing code. These days strictly speaking - writing code is the least thing I do - mostly I do reviews of other people's code. There is a lot of API design work, it can translate into writing code. I may be the one writing the code or not. I am involved with the CI every now and then. [...] I have been the release manager for the last 2 releases. There are different types of maintainer - writing code maintainers, but you do need core devs writing code. But being a maintainer and building a community -that is about communication and PRs, and mentoring people." **- Participant 240306385 (core maintainer of SciKit-Learn)** + +## Recommendations + +### Provide documentation recommending "best/recommended ways" + +The majority of participants were using Python as a tool, as a participant said: "it's a broad general purpose language designed to get stuff done." + +The majority of participants - scientists, product/electronic engineers, data analysts, nuclear physicists - used Python for their work - they may write Python software, for themselves, possibly for colleagues. A smaller number are maintainers of widely used Python packages. + +As a result they are not classically trained software developers and so may not have "the right" understanding of important software programming concepts. + +Users of all types, and experience struggled with knowing the "right" way to do something. They often spoke about the "recommended way" to do something - to start a new project, to make a package: + +> "As a new comer, it's not easy to figure out what should be in the right way to structure a _setup.py_ or _pyproject.toml_. There is a good guide, but it's not easy to figure out what to use. I wish there was a guide like 'Make an application (or library) in 30 minutes'." diff --git a/docs/html/ux-research-design/research-results/pip-force-reinstall.md b/docs/html/ux-research-design/research-results/pip-force-reinstall.md new file mode 100644 index 00000000000..4ba84b876d3 --- /dev/null +++ b/docs/html/ux-research-design/research-results/pip-force-reinstall.md @@ -0,0 +1,102 @@ +# pip --force-reinstall + +## Problem + +Currently, when `pip install [package-name] --force-reinstall` is executed, instead of reinstalling the package at the version previously installed, pip installs the package at the newest version available. + +i.e. `pip install [package name] --force-reinstall` acts as `pip [package name] --upgrade` + +We want to find out if users understand (or desire) this implicit behaviour. + +More information can be found on [this GitHub issue](https://github.com/pypa/pip/issues/8238). + +[Skip to recommendations](#recommendations) + +## Research + +To help us understand what users want from the `--force-reinstall` option, we launched a survey with the following scenario: + +
+You have the requests package and its dependencies installed: + +requests==2.22.0
+asgiref==3.2.10
+certifi==2020.6.20
+chardet==3.0.4
+Django==3.1
+idna==2.8
+pytz==2020.1
+sqlparse==0.3.1
+urllib3==1.25.10
+ +You run 'pip install requests --force-reinstall'. What should happen? + +
+ +Respondents could choose from one of the following options: + +- pip reinstalls the same version of requests. pip does not reinstall request's dependencies. +- pip reinstalls requests and its dependencies, updating all these packages to the latest compatible versions +- pip reinstalls requests and its dependencies, keeping every package on the same version +- pip reinstalls requests, updating it to the latest version. pip updates request's dependencies where necessary to support the newer version. +- I don't know what pip should do +- I don't understand the question +- Other (allows respondent to provide their own answer) + +We also asked how useful `pip --force-reinstall` is, and how often it is used. + +## Results + +In total we received 190 responses to our survey, with 186 people telling us what pip should do when the `--force-reinstall` option is executed. + +![pie chart with survey results](https://i.imgur.com/yoN02o9.png) + +- **31.7%** (59/186) of respondents said that pip should reinstall requests and its dependencies, keeping every package on the same version +- **28%** (52/186) of respondents said that pip should reinstall requests, updating it to the latest version, with pip updating request's dependencies where necessary to support the newer version. +- **15.6%** (29/186) of respondents said that pip should reinstall requests and its dependencies, updating all these packages to the latest compatible versions +- **14%** (26/186) of respondents said that pip should reinstall the same version of requests, and not reinstall request's dependencies + +If we group responses into "upgrade" or "do not upgrade" (ignoring responses that could not be grouped), we find: + +- 46.32% (88/186) of respondents thought that pip should install the same version of requests - i.e. that `--force-reinstall` should _not_ implicitly upgrade +- 43.16% (82/186) of respondents thought that pip should upgrade requests to the latest version - i.e that `--force-reinstall` _should_ implicitly upgrade + +Most respondents use `--force-reinstall` "almost never" (65.6%): + +![screenshot of survey question of how often users use --force-reinstall](https://i.imgur.com/fjLQUPV.png) +![bar chart of how often users use --force-reinstall](https://i.imgur.com/Xe1XDkI.png) + +Amongst respondents who said they use `--force-resinstall` often or very often: + +- 54.54% (6/11) of respondents thought that pip should install the same version of requests - i.e. that `--force-reinstall` should _not_ implicitly upgrade +- 45.45% (5/11) of respondents thought that pip should upgrade requests to the latest version - i.e that `--force-reinstall` _should_ implicitly upgrade + +Respondents find `--force-reinstall` less useful than useful: + +![screenshot of survey question of how useful users find --force-reinstall](https://i.imgur.com/6cv4lFn.png) +![bar chart of how useful users find --force-reinstall](https://i.imgur.com/gMUBDBo.png) + +Amongst respondents who said they find `--force-resinstall` useful or very useful: + +- 38.46% (20/52) of respondents thought that pip should install the same version of requests - i.e. that `--force-reinstall` should _not_ implicitly upgrade +- 50% (26/52) of respondents thought that pip should upgrade requests to the latest version - i.e that `--force-reinstall` _should_ implicitly upgrade + +## Recommendations + +Given that this option is not regularly used and not strongly rated as useful, we recommend that the development team consider removing `--force-reinstall` _should they wish to reduce maintenance overhead_. + +In this case, we recommend showing the following message when a user tries to use `--force-reinstall`: + +> Error: the pip install --force-reinstall option no longer exists. Use pip uninstall then pip install to replace up-to-date packages, or pip install --upgrade to update your packages to the latest available versions. + +Should the pip development team wish to keep `--force-resintall`, we recommend maintaining the current (implicit upgrade) behaviour, as pip's users have not expressed a clear preference for a different behaviour. + +In this case, we recommend upgrading the [help text](https://pip.pypa.io/en/stable/reference/pip_install/#cmdoption-force-reinstall) to be more explicit: + +Old help text: + +> Reinstall all packages even if they are already up-to-date. + +New help text: + +> Reinstall package(s), and their dependencies, even if they are already up-to-date. Where package(s) are not up-to-date, upgrade these to the latest version (unless version specifiers are used). diff --git a/docs/html/ux-research-design/research-results/pip-search.md b/docs/html/ux-research-design/research-results/pip-search.md new file mode 100644 index 00000000000..641b30ca673 --- /dev/null +++ b/docs/html/ux-research-design/research-results/pip-search.md @@ -0,0 +1,145 @@ +# pip search + +## Problem + +By default, `pip search` searches packages on PyPI.org from the command line. However, the team are [considering removing it](https://github.com/pypa/pip/issues/5216), because they think it's not that useful and using too many resources on PyPI ([PyPI XMLRPC search has been disabled](https://status.python.org/incidents/grk0k7sz6zkp) because of abuse/overuse). + +[Skip to recommendations](#recommendations) + +## Research + +Prior to PyPI XMLRPC search being disabled, we: + +- Gathered feedback on pip search via the "buy a feature" survey +- Published a survey specifically about pip search, asking users about: + - Their current use of pip search + - How useful they find pip search results + - How clear they find pip search results + - Where users expect pip to search (e.g. PyPI vs private index) + - What data pip should search _other_ than project name + - What changes or additions they would make to pip search + +## Results + +In total, we received 1070 responses to the buy a feature survey, with 541 (50.4%) respondents selecting "Search pypi.org for packages" in their top 10 features. + +However, search ranked lower than the following features: + +1. Run pip without requiring any user input (e.g. in CI) _718_ +2. Show information about all installed packages _707_ +3. Show information about a single installed package _596_ + +We received 302 responses to the pip search survey, with 62 of the 302 (20.5%) respondents either not knowing that the command existed, never using it, or using it "rarely". + +We found that the remaining ~80% of respondents who do use pip search use it to: + +- Find/search for the right/new/alternate packages to install: + - Checking package name (verify correct spelling) + - Assessing functionality (check a package's description) + - Verifying availability (check if such package exists) +- Search for the latest version of a package (verify version) +- Find package libraries and new modules + +In general, pip search is regarded as: + +- more useful than not useful +- more clear than not clear + +When asked if pip should search on items _other_ than the package name, respondents most commonly asked to search the package description: + +![wordcloud of common search terms](https://i.imgur.com/lxS2TG6.png) + +Some users also mentioned that they would like the search to be configurable, e.g. by passing flags/options. + +When asked how they would improve pip search, users said they would improve: + +**1. Search methods:** + +- fuzzy search and insensitive case should be acceptable +- users should have the option to filter/sort by description, name, tag + +**2. Search results:** + +- relevancy: the results should show both the exact match and closest match +- order/category: the result should display items in a certain order, e.g highest number of downloads (popularity), development status (last updated/latest version), etc. +- there should be a limited number of search results + +**3. User interface:** + +- link package to pypi page +- use color coding / system for better clarity +- distinguish exact match search results from others: by highlighting, or using a different color +- indicate version compatibility + +## Recommendations + +### Deprecation strategy + +Given that the [PyPI](https://pypi.org/pypi) search API is currently disabled (as of 1st Jan, 2021) for technical and sustainability reasons, we recommend that the pip team display a clear error message to users who use the command: + +``` +The PyPI search API has been disabled due to unmanageable load. +To search PyPI, open your browser to search for packages at https://pypi.org +Alternatively, you can search a different index using the --index command. +``` + +In the longer term, **we recommend that the PyPI team investigate alternative methods of serving search results (e.g. via caching)** that would enable pip search to work again. This recommendation is supported by our research which suggests that many pip users find this functionality useful. + +If this is not possible, the pip team should create clear instructions that tells users what to use instead. Some suggestions (based on common user flows) are listed below: + +#### Finding a new package based on tags and keywords + +This is the most common feature that you would expect from `pip search` and likely the hardest to replace after deprecation. + +As mentioned above, the pip CLI should - as soon as possible - hide the full-trace error message present when a user types `pip search`. Instead, pip should show a message that encourages users to use the search index on the website itself (in their browser) by providing a link directly to [https://pypi.org](https://pypi.org). Also, pip should provide a short hint on how to use an alternative index. + +``` +$ pip search pytest + +The PyPI search API has been disabled due to unmanageable load. + +Please open your browser to search for packages at https://pypi.org + +Alternatively, you can use a different index using the --index command. + + pip search pytest --index +``` + +In addition, the pip team could implement an alternative to the PyPI search API that works without a hard dependency on a centralized service. Similar to other distribution systems like `apt` and `yum`, the metadata of all package names could be downloaded on the user's machine with an opt-in workflow: + +``` +$ pip search pytest +Using pip search on the command line requires you to download the index first. +Alternatively, you can open your browser to search for packages at https://pypi.org + +Download the index to /System/Library/Frameworks/Python.framework/ +Versions/2.7/Resources/Python.app/Contents/MacOS/search.db? (y/n) y +......... done! + + + +$ pip search pytest + +``` + +This is a more complex route that will require more engineering time, but can aim to provide command line users with a similar functionality to the old `pip search` command. It can also check the age of the local index and show a warning if it is getting old. + +#### Verifying the latest version of a package + +Users also use the `pip search` command to find or verify a particular package's version. + +As a replacement, the pip team could do either of the following: + +1. Extend the `pip show` feature to include known latest versions of the package; +2. Create a `pip outdated` command which scans the current dependency tree and outputs the packages that are outdated (compared to the latest versions on the configured index). + +### UX improvements + +Should it be possible to continue to support pip search, we strongly recommend the following UX improvements: + +- Adding support for [fuzzy search](https://en.wikipedia.org/wiki/Approximate_string_matching), or suggesting alternative/related search terms +- Adding support for case insensitive search +- Searching based on a package's description +- Linking search results to a package's PyPI page (where appropriate) + +Other user feedback (as detailed above) should also be considered by the team on a case-by-case basis. diff --git a/docs/html/ux-research-design/research-results/pip-upgrade-conflict.md b/docs/html/ux-research-design/research-results/pip-upgrade-conflict.md new file mode 100644 index 00000000000..9261a318518 --- /dev/null +++ b/docs/html/ux-research-design/research-results/pip-upgrade-conflict.md @@ -0,0 +1,68 @@ +# pip Upgrade Conflict + +## Problem + +Currently, pip does _not_ take into account packages that are already installed when a user asks pip to upgrade a package. This can cause dependency conflicts for pip's users. + +[Skip to recommendations](#recommendations) + +## Research + +We published a [survey](https://bit.ly/2ZqJijr) asking users how they would solve the following scenario: + +
+Imagine you have package tea and coffee with the following dependencies: + +tea 1.0.0 - depends on water<1.12
+tea 2.0.0 - depends on water>=1.12
+coffee 1.0.0 - depends on water<1.12
+coffee 2.0.0 - depends on water>=1.12
+ +You have the following packages installed: + +tea 1.0.0
+coffee 1.0.0
+water 1.11.0
+ +You ask pip to upgrade tea. What should pip do? + +If pip upgrades tea to 2.0.0, water needs to be upgraded as well, creating a conflict with coffee... + +
+ +We gave users four choices: + +1. Upgrade tea and water. Show a warning explaining that coffee now has unsatisfied requirements. +2. Upgrade coffee automatically to 2.0.0 +3. Install nothing. Tell the user that everything is up-to-date (since the version of tea they have installed is the latest version without conflicts). +4. Install nothing. Show an error explaining that the upgrade would cause incompatibilities. + +We allowed users to post their own solution, and asked why they came to their decision. + +## Results + +In total, we received 693 responses, 407 of which included an explanation of why a particular solution was best. + +![](https://i.imgur.com/UdBWkaQ.png) + +- 497 responses (71.7%) preferred option 4: that pip should install nothing and raise an error message +- 102 responses (14.7%) preferred option 2: that pip should upgrade package_coffee +- 79 responses (11.4%) preferred option 1: that pip should upgrade tea and water +- 15 responses (2.2%) preferred option 3: that pip should install nothing and tell the user that everything is up to date + +From the 407 responses that answered "why" a particular solution was best, the following key themes emerged: + +- "explicit is better than implicit" - pip should not create "side effects" that the user does not understand, has not anticipated, and has not consented to +- pip should do everything in its power to avoid introducing conflicts (pip should not "break" the development environment) +- Telling the user that everything is up to date (option 3) is misleading / dishonest +- pip could be more flexible by: + - allowing the user to choose how they want to resolve the situation + - allowing the user to override the default behaviour (using flags) + +## Recommendations + +Based on the results of this research, the pip UX team has made the following recommendations to the development team: + +- While the current behaviour exists, [warn the user when conflicts are introduced](https://github.com/pypa/pip/issues/7744#issuecomment-717573440) +- [Change the current behaviour](https://github.com/pypa/pip/issues/9094), so that pip takes into account packages that are already installed when upgrading other packages. Show the user a warning when pip anticipates a dependency conflict (as per option 4) +- Explore [the possibility of adding additional flags to the upgrade command](https://github.com/pypa/pip/issues/9095), to give users more control diff --git a/docs/html/ux-research-design/research-results/prioritizing-features.md b/docs/html/ux-research-design/research-results/prioritizing-features.md new file mode 100644 index 00000000000..3642042f333 --- /dev/null +++ b/docs/html/ux-research-design/research-results/prioritizing-features.md @@ -0,0 +1,156 @@ +# Prioritizing pip Features + +## Problem + +The pip development team is small, and has limited time and energy to work on issues reported via the [issue tracker](https://github.com/pypa/pip/issues). There is also a significant backlog of issues (782 as of November, 2020) for the team to respond to. +For the team to prioritize their work based on what will have the most impact, we need to develop a better understanding of what users want from pip. + +[Skip to recommendations](#recommendations) + +## Research + +To help answer this question, we developed a "buy a feature" survey, with the following scenario: + +
+Help us to understand what's important to you by participating in our "buy a feature" game: + +You have an allocated budget of $200 to spend on redesigning pip. + +With your $200 budget, "buy" the functionality you'd most like to keep. + +You don't have to spend the whole $200, but you should also not overspend your budget! + +
+ +We asked users to spend their first $100 on features related to `pip install`, and to spend their remaining $100 on other pip features. We also gave users an additional $10 to suggest a new feature: + +![survey question where users are asked to buy features for pip install](https://i.imgur.com/2QShgYo.png) + +![survey question where users are asked to buy features other than pip install](https://i.imgur.com/sY8gdXD.png) + +![survey question where users are asked to spend an additional ten dollars](https://i.imgur.com/hvgjdEG.png) + +## Results + +We received 1076 responses, 1070 of which were valid. The most popular features included the core competencies of pip: + +- Recreating an environment from a list of installed dependencies; +- Install, uninstall, and upgrade packages from a virtual control system, file, or local directory; +- Warn about broken or conflicting dependencies. + +### pip install + +The top ten features related to pip install were: + +![pip install results](https://i.imgur.com/1rNIOB7.png) + +1. Install and uninstall packages +2. Upgrade packages to the latest version +3. Warn about broken dependencies +4. Install a package from a version control system (e.g. Git, Mercurial, etc.) +5. Install packages as specified in a file +6. Install a package from a local directory +7. Verify downloaded packages against hashes +8. Install packages from an alternative package index, or indexes (default is PyPI only) +9. Install a package from wheels (no need for compiling code) +10. Control where you want your installed package to live on your computer + +### Other pip functionality + +The top ten features related to other pip functionality were: + +![other pip functionality results](https://i.imgur.com/xrp9XWw.png) + +1. Generate a list of installed packages that can be used to recreate the environment +2. Check that your installed packages do not have dependency conflicts +3. Run pip without requiring any user input (e.g. in CI) +4. Show information about all installed packages +5. Show information about a single installed package +6. Search pypi.org for packages +7. Show information about pip (version information, help information, etc.) +8. Download packages, build wheels and keep them in a directory for offline use +9. Manage pip's default configuration (e.g. by using configuration files) +10. Customise pip's output (e.g. reduce or increase verbosity, suppress colors, send output to a log) + +Results varied by the amount of Python experience the user had. + +
+See how likely users are to select a feature based on their experience level + +#### Verify downloaded packages against hashes + +![screenshot of verify downloaded packages against hashes](https://i.imgur.com/oVHOGBQ.png) + +#### Warn about broken dependencies + +![Screenshot of Warn about broken dependencies](https://i.imgur.com/uNv2tnG.png) + +#### Upgrade packages to the latest version + +![Screenshot of Upgrade packages to the latest version](https://i.imgur.com/pQgCLBO.png) + +#### Install packages from an alternative package index, or indexes + +![Screenshot of Install packages from an alternative package index, or indexes](https://i.imgur.com/E1LnTBt.png) + +#### Install packages as specified in a file + +![Screenshot of Install packages as specified in a file](https://i.imgur.com/87uh4xp.png) + +#### Install and uninstall packages + +![Screenshot of Install and uninstall packages](https://i.imgur.com/GRsazBy.png) + +#### Install packages from a version control system + +![Screenshot of Install packages from a version control system](https://i.imgur.com/iW7d0Sq.png) + +#### Install a package from wheels + +![Screenshot of Install a package from wheels](https://i.imgur.com/9DMBfNL.png) + +#### Install apackage from a local directory + +![Screenshot of Install apackage from a local directory](https://i.imgur.com/Jp95rak.png) + +#### Control where you want your installed package to live on your computer + +![Screenshot of Control where you want your installed package to live on your computer](https://i.imgur.com/32fpww2.png) + +
+ +## Recommendations + +### Environment recreation + +Environment recreation is already included in pip as part of the `requirements.txt` feature; however, with it's popularity and demand, we recommend that **pip should improve it's support of this feature.** + +- Improve environment recreation user output and help guides directly in the pip CLI; +- Improve pip documentation & user guide to prominently feature environment recreation as a core feature of pip; +- Improve environment recreation process itself by considering virtual environments as a core competency "built-in" to pip. + +**Recreating an environment from a list of installed dependencies was the most valued feature request overall** as well as in each user group, _except for those with less than 6 months of experience and those with 16-19 years of experience (for which it was the second most valued)._ + +When asked to enter a feature request with freetext, users placed the words 'built-in,' 'virtual,' 'automatic,' and 'isolation' alongside the word 'environment,' which suggest that users expect pip to recreate environments with a high level of intelligence and usability. + +**Selected direct quotes** + +> Make pip warn you when you are not in virtualenv + +> Automatic virtual env creation with a command line argument + +> Eliminate virtual environments. Just use ./python_modules/ like everyone else + +> I would love to see pip manage the python version and virtual env similar to the minicona + +> Would spend all my $200 on this: Integrate pipenv or venv into pip so installing an application doesn't install it's dependencies in the system package store. And allow pinning dependency versions for application packages (like how pip-compile does it) + +### Dependency management + +We recommend that the pip team improve warning and error messages related to dependencies (e.g., conflicts) with practical hints for resolution. This can be rolled out in multiple timescales, including: + +- Give hints to the user on how to resolve this issue directly alongside the error message; +- Prominently include virtual environment creation in the documentation, upon `pip install` conflict errors, and if possible as a built-in feature of pip; +- Upgrading the dependency resolver (in progress). + +It is clear that dependency management, including warning about conflicting packages and upgrades, is important for pip users. By helping users better manage their dependencies through virtual environments, pip can reduce the overall warnings and conflict messages that users encounter. diff --git a/docs/html/ux-research-design/research-results/users-and-security.md b/docs/html/ux-research-design/research-results/users-and-security.md new file mode 100644 index 00000000000..fbc8f492f3c --- /dev/null +++ b/docs/html/ux-research-design/research-results/users-and-security.md @@ -0,0 +1,172 @@ +# How pip users think about security + +## Problem + +We wanted to understand how pip users think about security when installing packages with pip. + +[Skip to recommendations](#recommendations) + +## Research + +We asked participants about their behaviours and practices in terms of the security and integrity of the Python packages they install with pip, and of the software they create. + +We asked participants to tell us how often they: + +1. Carry out a code audit of the Python software they install with pip +2. Think about the security and integrity of the (Python) software they install (with pip) +3. Think about the security and integrity of the (Python) code they create + +## Results + +While the security and integrity of the software users install (51%) and make (71%) is important to research participants, less than 7% do code audits of the packages or code they install with pip. + +This is due to lack of time to audit large packages, lack of expertise, reliance on widely adopted Python packages, the expectation that pip automatically checks hashes, and reliance of the wider Python community to act as canary in the coalmine. + +This behaviour was common across all user types, and baselines of software development experience. + +These results - particularly the lack of expertise in auditing packages fits in with the overall findings that the majority of pip users are not "classically trained" (i.e. having formally learned software development) software developers and so lack the expertise and/or formal training in software development practices. + +There is a gulf between what the maintainers expect users to think, and worry about, and what the users actually worry and think about. Right now, pip leaves users to "fend for themselves" in terms of providing them with assurance of the software they install. This isn't meant as a criticism, but an observation. + +### Responses to question: before I install any Python software with pip, I carry out a code audit + +The vast majority of participants, 82%, do not (rarely or never) do a code audit of the software packages they install using pip, the reasons are explained below. + +| Before I install any Python software with pip, I carry out a code audit: | Number of responses | +| ------------------------------------------------------------------------ | ------------------- | +| Always | 3 | +| Frequently | 9 | +| Rarely | 66 | +| Never | 68 | +| I'm not sure what this means | 5 | +| No opinion | 13 | +| **Total number of participants** | **164** | + +### Responses to question: I think about the security and integrity of the software I install + +![screenshot of responses to question about security](https://i.imgur.com/wy4lGwJ.png) + +The vast majority of participants did think about the security and integrity of the software they installed - and unlike responses about code audits, in some cases participants made attempts to verify the security and integrity of the software they installed. + +Most attempts were made by those who had experience in software development, however in some cases, people gave up. + +Those who were not classically trained software developers did not know where to start. + +Both of these groups identified their "sphere of influence" and did their best to cover this. + +### User thoughts about security + +#### Responsibility as author + +Participants who spent a lot of their time writing Python code - either for community or as part of their job - expressed a responsibility to their users for the code they wrote - people who wrote code which was made public expressed a stronger responsibility. + +They thought about where the software would be used, who would use it, and possible attack surfaces. + +> "On the basic point, I have to think about attack surfaces. If I am writing the thing (software), I have to give a crap. I have to answer the emails! In the code I push to[ pypi.org](http://pypi.org/) I think about it doubley. What could people do with this code? Whether I do a good job, that's different! I am aware of it when publishing it or making it [publicly] available. Whether I do a good job, that's different! I am aware of it when publishing it or making it [publicly] available. I rely on community resources - Python security related, I follow security people blogs, Twitter. I use Hypothesis for fuzz-testing. I also rely on having security policies in place and a reporting mechanism. I steer clear of crypto, I rely on other peoples. There's a certain amount of knowledge in the Python community, I am actively involved in it. If something happens, I will hear about it. I use Twitter, if something happens, in the morning it can take me awhile to figure out what's happened. I have a lot of trust in the ecosystem to be self healing. As long as you don't stray too far-off the reservation (into using odd or uncommon or new packages), it's a better sense of security." **- Participant (data scientist turned Python developer)** + +> Yes, because I'm liable for that. If the problem is my code, and I deliver something and they get attacked. I'm screwed. **- Participant (professional Python developer and trainer)** + +#### Reliance on software packages + +Participants also explained they rely on code security scanning and checking software packages. + +> "I use linters (Bandit), I scan the code I have created and when there is an issue I raise a red flag." + +> "I use Hypothesis for fuzz-testing." + +#### Reliance on good software development practices + +A small number of participants e### Selected quotes from research participants +xplained they have good software practices in place, which help with writing secure software. + +> "We have a book about ethics of code - we have mandatory certification." + +> "I also rely on having security policies in place and a reporting mechanism. I steer clear of crypto, I rely on other peoples." + +Of the users who have used pip's hash checking functionality: + +- One finds the error messages "too annoying and loud", and has difficulty matching the file name to the hash +- Another finds the process of explicitly pinning hashes to be too tiresome (especially for dependencies) + +One user mentioned that he likes [NPM audit](https://docs.npmjs.com/cli/v6/commands/npm-audit) and would like to see something similar in the Python ecosystem. + +#### Lack of time + +The lack of time to carry out the audit of the package code, and that of the dependencies, was cited as a very common reason. In most cases participants used Python code as a means to achieving their goal. + +#### Lack of expertise to carry out the audit + +The lack of expertise or knowledge of auditing software was mainly due to participants expertise not being software development. However, in the case participants were "classically" software developers, lack of expertise was also a commonly given reason for not carrying out audits. + +#### Use of only widely used, well-established packages + +Use of well-established, high-quality packages was a common reason amongst all types of participants - professional Python software developers and those who used Python as a tool. + +"Well-established, high-quality packages" were defined by users as packages that: + +- have been in existence for many years +- are popular, or commonly used by those in their community or industry +- have responsive maintainers +- maintained by people the participant has heard of +- have many hundreds or thousands of users +- are in active development (many open issues, many forks, Github stars) +- are developed in the open, and transparently +- their history is known, or can be found out publicly + +#### Reliance on the Python community to find issues + +There was a reliance on the community to find issues and make them know publicly - "Many eyes shallow bugs". + +> "I rarely do code audits. Most of the time I rely on the opinions of the community. I look at how many maintainers there are. Maybe it's not good practice but I don't have time to go through the code." **- Participant 240315091** + +#### Use of only internal packages + +> "I only install internal packages, so I don't need to worry about this." + +This theme was not that common, mainly in large software development environments or where security was of high importance. + +#### Expectation that pip audits packages + +Some users expect/assume that pip (and PyPI) should "protect" them from malicious actors - e.g. by automatically checking hashes, or detecting malicious packages. + +> "If I was downloading a package on my own I check the hash, if it's installed by pip, then no. I expect pip to do it. If it doesn't do it, it does surprise me. Every package manager checks the hash against what it downloads. The hashes are already known on pypi." **- Participant 240312164 (Nuclear physicist)** + +#### Other notable comments + +> "Never. I should but I never do [audit code]. I don't stray, I am risk adverse. I install packages that are good already. I consider my risk surface small. I don't have time or resources to audit them. I have sufficient faith in the ecosystem to be self-auditing. If something turned up in a well known package, the community is well known for making a stink. And anyway a code audit wouldn't pick it up." **- Participant 240326752 (professional Python developer)** + +> "On the private level (work) the code is developed internally. I don't audit the code on pypi - due to lack of time auditing the dependencies, and I trust it. I know they had a security breach a few years ago, but it doesn't happen that often. I know they don't audit anything but I still don't audit the code." + +> "I wouldn't know how to [audit code], also I'm writing this stuff for myself. It'll work or not. Sometimes I end up installing 2 or 3 packages and find out that I need to install something else. I move on if it doesn't work. The last resort is I will write the code myself." + +> "I'm quite trusting - Python is open source, I'm assuming that if a package is on[ pypi.org](http://pypi.org/) - it must be alright. I install the package first, then I look at it. I find a package by figuring out - we need to do a certain task, we search for it on the Internet, look at the documentation, we install it and then see if it is what we want" **- Participant 240278297** + +> "If I want to install a package, it's for a reason. I want to calculate the azimuth and elevation of the moon with PyEphem. Do a code audit? Pffff. Most of the stuff I do is banal. It needs to meet a dependency, so I install it. I'm not going to do a code audit. I don't care. Never, but this is one of the things - is the package on pypi the exact source I see on Github? You could end up with files that are distributed differently. Probably (I don't do it) because I am too scared to look. There is this thing that pip verifies (the packages) hash - so that is a feature to guard against this. What is the hash of? No idea. It's located in the local python install." **- Participant 240426799 (systems administrator)** + +> "No [I don't audit code]. [laughs] Coz, I'm not going to read thousands of lines of code before I install a package. Oh my God. [..] I wouldn't be able to find it. I'm trading off - honestly how popular the package is, number of stars on GH. pypi doesn't have any UI way to tell me how many downloads it has. If it did I would use that." **- Participant 240386315 (IT administrator)** + +> "Well, I don't have the background to do a code audit of something like Numerical Python. Most packages I use are huge. Most people aren't doing code of those packages, except the maintainer. I am relying on whatever is built into pip to do package security. I also assume if there is an exploit someone will find it and let the world now. I'm really lazy." **- Participant 240312164 (Nuclear physicist)** + +> "I would like some security advisor, [like in npm](https://docs.npmjs.com/auditing-package-dependencies-for-security-vulnerabilities) - it works very well, when you install a package "there are security vulns. with this package - 1 low, 5 medium, 8 high. I haven't come across security issues with Python packages." **- CZI convening research participant** + +## Recommendations + +### Provide package security guidance or auditing mechanism + +A small number of participants (3-4) over the research period mentioned the[ NPM audit command](https://docs.npmjs.com/auditing-package-dependencies-for-security-vulnerabilities) as an example of a good way to assess package security. It may provide a model for how to approach this user need. + +### Automatically check package hashes + +pip should **by default** check packages hashes during install, providing a way for users to turn this behaviour off. + +In the case of no hash being available, pip should warn users and provide recommendations for users - from simplest to most advanced. + +### Mechanism to report suspicious packages + +Users should have a mechanism to report suspicious, or malicious, packages/behaviour. Where this mechanism should exist is open to discussion. The minimum should be a mechanism for users to flag packages on pypi.org. + +### Improve the output of pips activities easier to understand + +Right now pip's output is overwhelming and while it contains a lot of information, little of it is perceivable to the user - meaning is lost in "the wall of text". + +Pip's output must be redesigned to provide users with the right information - including security warnings - at the right time. diff --git a/docs/html/ux_research_design.rst b/docs/html/ux_research_design.rst deleted file mode 100644 index 165b6949670..00000000000 --- a/docs/html/ux_research_design.rst +++ /dev/null @@ -1,81 +0,0 @@ -==================== -UX Research & Design -==================== - -Over the course of 2020, the pip team has been working on improving pip's user -experience. - -Currently, our focus is on: - -1. `Understanding who uses pip`_ -2. `Understanding how pip compares to other package managers, and how pip supports other Python packaging tools`_ -3. `Understanding how pip's functionality is used, and how it could be improved`_ -4. `Understanding how pip's documentation is used, and how it could be improved`_ - -You can read the `overall plan`_ and the `mid-year update`_ to learn more about -our work. - -How to contribute ------------------ - -Participate in UX research -========================== - -It is important that we hear from pip users so that we can: - -- Understand how pip is currently used by the Python community -- Understand how pip users *need* pip to behave -- Understand how pip users *would like* pip to behave -- Understand pip's strengths and shortcomings -- Make useful design recommendations for improving pip - -If you are interested in participating in pip user research, please -`join pip's user panel`_. -You can `read more information about the user panel here`_. - -We are also looking for users to: - -- `Give us feedback about pip's new resolver`_ -- `Tell us how pip should handle conflicts with already installed packages when updating other packages`_ - -Report UX issues -================ - -If you believe that you have found a user experience bug in pip, or you have -ideas for how pip could be made better for all users, you please file an issue -on the `pip issue tracker`_. - -Work on UX issues -================= - -You can help improve pip's user experience by `working on UX issues`_. -Issues that are ideal for new contributors are marked with "good first issue". - -Test new features -================= - -You can help the team by testing new features as they are released to the -community. Currently, we are looking for users to -`test pip's new dependency resolver`_. - -Next steps ----------- - -In the coming months we will extend this documentation to include: - -1. Summaries of our user research, including recommendations for how to improve pip -2. Tools for the pip team to continue to practice user centered design (e.g. user personas, etc.) - -.. _Understanding who uses pip: https://github.com/pypa/pip/issues/8518 -.. _Understanding how pip compares to other package managers, and how pip supports other Python packaging tools: https://github.com/pypa/pip/issues/8515 -.. _Understanding how pip's functionality is used, and how it could be improved: https://github.com/pypa/pip/issues/8516 -.. _Understanding how pip's documentation is used, and how it could be improved: https://github.com/pypa/pip/issues/8517 -.. _overall plan: https://wiki.python.org/psf/Pip2020DonorFundedRoadmap -.. _mid-year update: http://pyfound.blogspot.com/2020/07/pip-team-midyear-report.html -.. _join pip's user panel: https://tools.simplysecure.org/survey/index.php?r=survey/index&sid=827389&lang=en -.. _read more information about the user panel here: https://bit.ly/pip-ux-studies -.. _Give us feedback about pip's new resolver: https://tools.simplysecure.org/survey/index.php?r=survey/index&sid=989272&lang=en -.. _Tell us how pip should handle conflicts with already installed packages when updating other packages: https://docs.google.com/forms/d/1KtejgZnK-6NPTmAJ-7aWox4iktcezQauW-Mh3gbnydQ/edit -.. _pip issue tracker: https://github.com/pypa/pip/issues/new -.. _working on UX issues: https://github.com/pypa/pip/issues?q=is%3Aissue+is%3Aopen+label%3A%22K%3A+UX%22 -.. _test pip's new dependency resolver: https://pip.pypa.io/en/latest/user_guide/#changes-to-the-pip-dependency-resolver-in-20-2-2020 diff --git a/docs/pip_sphinxext.py b/docs/pip_sphinxext.py index 2e559702294..16a3206da50 100644 --- a/docs/pip_sphinxext.py +++ b/docs/pip_sphinxext.py @@ -14,9 +14,22 @@ from pip._internal.cli import cmdoptions from pip._internal.commands import commands_dict, create_command +from pip._internal.configuration import _normalize_name from pip._internal.req.req_file import SUPPORTED_OPTIONS +def convert_cli_option_to_envvar(opt_name: str) -> str: + undashed_opt_name = _normalize_name(opt_name) + normalized_opt_name = undashed_opt_name.upper().replace("-", "_") + return f"PIP_{normalized_opt_name}" + + +def convert_cli_opt_names_to_envvars(original_cli_opt_names: List[str]) -> List[str]: + return [ + convert_cli_option_to_envvar(opt_name) for opt_name in original_cli_opt_names + ] + + class PipNewsInclude(rst.Directive): required_arguments = 1 @@ -130,7 +143,18 @@ def _format_option( opt_help = option.help.replace("%default", str(option.default)) # fix paths with sys.prefix opt_help = opt_help.replace(sys.prefix, "") - return [bookmark_line, "", line, "", " " + opt_help, ""] + env_var_names = convert_cli_opt_names_to_envvars(option._long_opts) + env_var_names_src = ", ".join(f"``{env_var}``" for env_var in env_var_names) + return [ + bookmark_line, + "", + line, + "", + f" {opt_help}", + "", + f" (environment variable: {env_var_names_src})", + "", + ] def _format_options( self, options: Iterable[optparse.Option], cmd_name: Optional[str] = None @@ -194,22 +218,17 @@ def process_options(self) -> None: opt = option() opt_name = opt._long_opts[0] if opt._short_opts: - short_opt_name = "{}, ".format(opt._short_opts[0]) + short_opt_name = f"{opt._short_opts[0]}, " else: short_opt_name = "" if option in cmdoptions.general_group["options"]: prefix = "" else: - prefix = "{}_".format(self.determine_opt_prefix(opt_name)) + prefix = f"{self.determine_opt_prefix(opt_name)}_" self.view_list.append( - "* :ref:`{short}{long}<{prefix}{opt_name}>`".format( - short=short_opt_name, - long=opt_name, - prefix=prefix, - opt_name=opt_name, - ), + f"* :ref:`{short_opt_name}{opt_name}<{prefix}{opt_name}>`", "\n", ) diff --git a/docs/requirements.txt b/docs/requirements.txt index ef72c8fb722..debfa632b7a 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,4 +1,4 @@ -sphinx ~= 6.0 +sphinx ~= 7.0 towncrier furo myst_parser diff --git a/news/10685.bugfix.rst b/news/10685.bugfix.rst new file mode 100644 index 00000000000..a0bc7ed5285 --- /dev/null +++ b/news/10685.bugfix.rst @@ -0,0 +1 @@ +Make the ``--proxy`` parameter take precedence over environment variables. diff --git a/news/10745.doc.rst b/news/10745.doc.rst new file mode 100644 index 00000000000..4094f41405c --- /dev/null +++ b/news/10745.doc.rst @@ -0,0 +1 @@ +Start using Rich for presenting error messages in a consistent format. diff --git a/news/10751.trivial.rst b/news/10751.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/news/11050.bugfix.rst b/news/11050.bugfix.rst new file mode 100644 index 00000000000..01aa5ea7b1d --- /dev/null +++ b/news/11050.bugfix.rst @@ -0,0 +1 @@ +Fix error on checkout for subversion and bazaar with verbose mode on. diff --git a/news/11221.feature.rst b/news/11221.feature.rst new file mode 100644 index 00000000000..319cc02c4a5 --- /dev/null +++ b/news/11221.feature.rst @@ -0,0 +1,3 @@ +Display the Project-URL value under key "Home-page" in ``pip show`` when the Home-Page metadata field is not set. + +The Project-URL key detection is case-insensitive, and ignores any dashes and underscores. diff --git a/news/11508.feature.rst b/news/11508.feature.rst new file mode 100644 index 00000000000..2f0d7e2d04d --- /dev/null +++ b/news/11508.feature.rst @@ -0,0 +1 @@ +Add a 'raw' progress_bar type for simple and parsable download progress reports diff --git a/news/11934.removal.rst b/news/11934.removal.rst new file mode 100644 index 00000000000..bf146d23baa --- /dev/null +++ b/news/11934.removal.rst @@ -0,0 +1 @@ +Drop support for EOL Python 3.7. diff --git a/news/12043.doc.rst b/news/12043.doc.rst new file mode 100644 index 00000000000..2c77f9b59dc --- /dev/null +++ b/news/12043.doc.rst @@ -0,0 +1 @@ +Fix the direct usage of zipapp showing up as ``python -m pip.pyz`` rather than ``./pip.pyz`` / ``.\pip.pyz`` diff --git a/news/12122.doc.rst b/news/12122.doc.rst new file mode 100644 index 00000000000..dbc0ebb53fd --- /dev/null +++ b/news/12122.doc.rst @@ -0,0 +1,3 @@ +Add a warning explaining that the snippet in "Fallback behavior" is not a valid +``pyproject.toml`` snippet for projects, and link to setuptools documentation +instead. diff --git a/news/12165.bugfix.rst b/news/12165.bugfix.rst new file mode 100644 index 00000000000..dd09baf60dc --- /dev/null +++ b/news/12165.bugfix.rst @@ -0,0 +1 @@ +This change will deduplicate entries in the ``Requires`` field of ``pip show``. diff --git a/news/12401.bugfix.rst b/news/12401.bugfix.rst new file mode 100644 index 00000000000..371f80011b3 --- /dev/null +++ b/news/12401.bugfix.rst @@ -0,0 +1 @@ +Fix exception with completions when COMP_CWORD is not set diff --git a/news/12453.feature.rst b/news/12453.feature.rst new file mode 100644 index 00000000000..704cd012a90 --- /dev/null +++ b/news/12453.feature.rst @@ -0,0 +1 @@ +Improve performance of resolution of large dependency trees, with more caching. diff --git a/news/12510.trivial.rst b/news/12510.trivial.rst new file mode 100644 index 00000000000..d41d5425b56 --- /dev/null +++ b/news/12510.trivial.rst @@ -0,0 +1 @@ +Update ruff to 0.2.0 and update ruff config to reflect diff --git a/news/12529.doc.rst b/news/12529.doc.rst new file mode 100644 index 00000000000..20f049ac384 --- /dev/null +++ b/news/12529.doc.rst @@ -0,0 +1 @@ +The Python Support Policy has been updated. diff --git a/news/12533.trivial.rst b/news/12533.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/news/12536.bugfix.rst b/news/12536.bugfix.rst new file mode 100644 index 00000000000..248d93bdba5 --- /dev/null +++ b/news/12536.bugfix.rst @@ -0,0 +1,2 @@ +Remove the incorrect pip3.x console entrypoint from the pip wheel. This console +script continues to be generated by pip when it installs itself. diff --git a/news/12537.process.rst b/news/12537.process.rst new file mode 100644 index 00000000000..5fd08a84867 --- /dev/null +++ b/news/12537.process.rst @@ -0,0 +1,2 @@ +Remove ``setup.py`` since all the pip project metadata is now declared in +``pyproject.toml``. diff --git a/news/12538.process.rst b/news/12538.process.rst new file mode 100644 index 00000000000..53d4496972b --- /dev/null +++ b/news/12538.process.rst @@ -0,0 +1 @@ +Move remaining pip development tools configurations to ``pyproject.toml``. diff --git a/news/12545.trivial.rst b/news/12545.trivial.rst new file mode 100644 index 00000000000..f373e869cfd --- /dev/null +++ b/news/12545.trivial.rst @@ -0,0 +1,4 @@ +This change will use ``build`` to create the ``pip`` sdist for testing. + +It will also remove a direct ``setup.py`` invocation to install ``pip`` in +editable mode to run from tests. diff --git a/news/12559.trivial.rst b/news/12559.trivial.rst new file mode 100644 index 00000000000..80b07d0d00d --- /dev/null +++ b/news/12559.trivial.rst @@ -0,0 +1 @@ +Fix Ubuntu CI Tests diff --git a/news/12561.trivial.rst b/news/12561.trivial.rst new file mode 100644 index 00000000000..ecc9e4a9afa --- /dev/null +++ b/news/12561.trivial.rst @@ -0,0 +1 @@ +Remove virtualenv and setuptools installs from zipapp CI tests diff --git a/news/12562.trivial.rst b/news/12562.trivial.rst new file mode 100644 index 00000000000..ab88c4077bd --- /dev/null +++ b/news/12562.trivial.rst @@ -0,0 +1 @@ +Update CI tests for Windows to run on Python 3.12 diff --git a/news/12576.doc.rst b/news/12576.doc.rst new file mode 100644 index 00000000000..b82dd3d583f --- /dev/null +++ b/news/12576.doc.rst @@ -0,0 +1 @@ +Document the environment variables that correspond with CLI options. diff --git a/news/12577.bugfix.rst b/news/12577.bugfix.rst new file mode 100644 index 00000000000..b408be6a8c9 --- /dev/null +++ b/news/12577.bugfix.rst @@ -0,0 +1 @@ +Ensure ``-vv`` gets passed to any ``pip install`` build environment subprocesses. diff --git a/news/12579.bugfix.rst b/news/12579.bugfix.rst new file mode 100644 index 00000000000..df189e8fbff --- /dev/null +++ b/news/12579.bugfix.rst @@ -0,0 +1 @@ +Remove duplication in invalid wheel error message diff --git a/news/12594.trivial.rst b/news/12594.trivial.rst new file mode 100644 index 00000000000..3d4a67b887d --- /dev/null +++ b/news/12594.trivial.rst @@ -0,0 +1 @@ +Update Black pre-commit to 24.4.0 diff --git a/news/12595.trivial.rst b/news/12595.trivial.rst new file mode 100644 index 00000000000..3aad5da6745 --- /dev/null +++ b/news/12595.trivial.rst @@ -0,0 +1 @@ +Update ruff pre-commit to v0.3.6 diff --git a/news/12615.trivial.rst b/news/12615.trivial.rst new file mode 100644 index 00000000000..ace9836ed43 --- /dev/null +++ b/news/12615.trivial.rst @@ -0,0 +1 @@ +uses RST substitution to put badges in 1 line. diff --git a/news/12630.trivial.rst b/news/12630.trivial.rst new file mode 100644 index 00000000000..8beb79ec366 --- /dev/null +++ b/news/12630.trivial.rst @@ -0,0 +1 @@ +Add ``render: shell`` to the bug report template to format output as code diff --git a/news/23f96da0-5535-40c4-ad79-3feb7f694ec2.trivial.rst b/news/23f96da0-5535-40c4-ad79-3feb7f694ec2.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/news/287f037c-108f-48dd-80a0-489921a6b2f3.trivial.rst b/news/287f037c-108f-48dd-80a0-489921a6b2f3.trivial.rst new file mode 100644 index 00000000000..446d4f9fe9a --- /dev/null +++ b/news/287f037c-108f-48dd-80a0-489921a6b2f3.trivial.rst @@ -0,0 +1 @@ +Add codespell pre-commit hook to catch common misspellings. diff --git a/news/4768.feature.rst b/news/4768.feature.rst new file mode 100644 index 00000000000..df69a245a65 --- /dev/null +++ b/news/4768.feature.rst @@ -0,0 +1 @@ +Reduce startup time of commands (e.g. show, freeze) that do not access the network by 15-30%. diff --git a/news/4CCE4788-B8B3-402E-9A88-2981AD074999.trivial.rst b/news/4CCE4788-B8B3-402E-9A88-2981AD074999.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/news/514f0c13-84ee-4d81-8465-bae74e370d0b.trivial.rst b/news/514f0c13-84ee-4d81-8465-bae74e370d0b.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/news/5a771372-fb26-11ee-8cc4-f72623cb6607.trivial.rst b/news/5a771372-fb26-11ee-8cc4-f72623cb6607.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/news/7ae28a10-04c4-4a1f-a276-4c9e04f2e0c1.trivial.rst b/news/7ae28a10-04c4-4a1f-a276-4c9e04f2e0c1.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/news/7f9639a2-df21-4e0c-9023-80f00fd71d20.trivial.rst b/news/7f9639a2-df21-4e0c-9023-80f00fd71d20.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/news/91d23d4d-a9cc-442f-a569-c46e0bdc3e64.trivial.rst b/news/91d23d4d-a9cc-442f-a569-c46e0bdc3e64.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/news/a870a527-a6ea-46fd-b4aa-c0b0d9b669b0.trivial.rst b/news/a870a527-a6ea-46fd-b4aa-c0b0d9b669b0.trivial.rst new file mode 100644 index 00000000000..d9cbf94d387 --- /dev/null +++ b/news/a870a527-a6ea-46fd-b4aa-c0b0d9b669b0.trivial.rst @@ -0,0 +1 @@ +Bump pre-commit hooks. diff --git a/news/b2631650-69cc-4747-8a50-24e574a0ae57.trivial.rst b/news/b2631650-69cc-4747-8a50-24e574a0ae57.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/news/c678d9e3-4844-4298-a46c-80768b38f652.trivial.rst b/news/c678d9e3-4844-4298-a46c-80768b38f652.trivial.rst new file mode 100644 index 00000000000..8837b629849 --- /dev/null +++ b/news/c678d9e3-4844-4298-a46c-80768b38f652.trivial.rst @@ -0,0 +1 @@ +Replace ``captured_output()`` and ``get_url_scheme()`` with stdlib alternatives. diff --git a/news/certifi.vendor.rst b/news/certifi.vendor.rst new file mode 100644 index 00000000000..d307502900d --- /dev/null +++ b/news/certifi.vendor.rst @@ -0,0 +1 @@ +Upgrade certifi to 2024.2.2 diff --git a/news/chardet.vendor.rst b/news/chardet.vendor.rst new file mode 100644 index 00000000000..8471ff6bb36 --- /dev/null +++ b/news/chardet.vendor.rst @@ -0,0 +1 @@ +Upgrade chardet to 5.2.0 diff --git a/news/distro.vendor.rst b/news/distro.vendor.rst new file mode 100644 index 00000000000..9929155edbe --- /dev/null +++ b/news/distro.vendor.rst @@ -0,0 +1 @@ +Upgrade distro to 1.9.0 diff --git a/news/f14947e7-deea-4e17-bdc2-dd8dab2a1fa5.trivial.rst b/news/f14947e7-deea-4e17-bdc2-dd8dab2a1fa5.trivial.rst new file mode 100644 index 00000000000..001cec34342 --- /dev/null +++ b/news/f14947e7-deea-4e17-bdc2-dd8dab2a1fa5.trivial.rst @@ -0,0 +1,5 @@ +Convert numerous internal classes to dataclasses for readability and stricter +enforcement of immutability across the codebase. A conservative approach was +taken in selecting which classes to convert. Classes which did not convert +cleanly into a dataclass or were "too complex" (e.g. maintains interconnected +state) were left alone. diff --git a/news/idna.vendor.rst b/news/idna.vendor.rst new file mode 100644 index 00000000000..229b1f3568a --- /dev/null +++ b/news/idna.vendor.rst @@ -0,0 +1 @@ +Upgrade idna to 3.6 diff --git a/news/msgpack.vendor.rst b/news/msgpack.vendor.rst new file mode 100644 index 00000000000..cc45383eaa4 --- /dev/null +++ b/news/msgpack.vendor.rst @@ -0,0 +1 @@ +Upgrade msgpack to 1.0.8 diff --git a/news/no-issue.trivial.rst b/news/no-issue.trivial.rst deleted file mode 100644 index 6440f668716..00000000000 --- a/news/no-issue.trivial.rst +++ /dev/null @@ -1,2 +0,0 @@ -Added seperate instructions for installing ``nox`` in the ``docs/development/getting-started.rst`` doc. and slight update -to the below ``Running pip From Source Tree`` section. diff --git a/news/platformdirs.vendor.rst b/news/platformdirs.vendor.rst new file mode 100644 index 00000000000..fb749d1ab8d --- /dev/null +++ b/news/platformdirs.vendor.rst @@ -0,0 +1 @@ +Upgrade platformdirs to 4.2.0 diff --git a/news/pygments.vendor.rst b/news/pygments.vendor.rst new file mode 100644 index 00000000000..f3e9a6f67df --- /dev/null +++ b/news/pygments.vendor.rst @@ -0,0 +1 @@ +Upgrade pygments to 2.17.2 diff --git a/news/rich.vendor.rst b/news/rich.vendor.rst new file mode 100644 index 00000000000..586a4617228 --- /dev/null +++ b/news/rich.vendor.rst @@ -0,0 +1 @@ +Upgrade rich to 13.7.0 diff --git a/news/setuptools.vendor.rst b/news/setuptools.vendor.rst new file mode 100644 index 00000000000..135850e0a97 --- /dev/null +++ b/news/setuptools.vendor.rst @@ -0,0 +1 @@ +Upgrade setuptools to 69.1.1 diff --git a/news/tenacity.vendor.rst b/news/tenacity.vendor.rst new file mode 100644 index 00000000000..5f37bc3f116 --- /dev/null +++ b/news/tenacity.vendor.rst @@ -0,0 +1 @@ +Upgrade tenacity to 8.2.3 diff --git a/news/typing_extensions.vendor.rst b/news/typing_extensions.vendor.rst new file mode 100644 index 00000000000..d23d4bbced0 --- /dev/null +++ b/news/typing_extensions.vendor.rst @@ -0,0 +1 @@ +Upgrade typing_extensions to 4.9.0 diff --git a/news/urllib3.vendor.rst b/news/urllib3.vendor.rst new file mode 100644 index 00000000000..87c7ccfa0b5 --- /dev/null +++ b/news/urllib3.vendor.rst @@ -0,0 +1 @@ +Upgrade urllib3 to 1.26.18 diff --git a/noxfile.py b/noxfile.py index 565a5039955..dc74654da53 100644 --- a/noxfile.py +++ b/noxfile.py @@ -13,7 +13,7 @@ # fmt: off sys.path.append(".") -from tools import release # isort:skip # noqa +from tools import release # isort:skip sys.path.pop() # fmt: on @@ -67,7 +67,7 @@ def should_update_common_wheels() -> bool: # ----------------------------------------------------------------------------- # Development Commands # ----------------------------------------------------------------------------- -@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11", "pypy3"]) +@nox.session(python=["3.8", "3.9", "3.10", "3.11", "3.12", "pypy3"]) def test(session: nox.Session) -> None: # Get the common wheels. if should_update_common_wheels(): @@ -89,8 +89,9 @@ def test(session: nox.Session) -> None: shutil.rmtree(sdist_dir, ignore_errors=True) # fmt: off + session.install("build") session.run( - "python", "setup.py", "sdist", "--formats=zip", "--dist-dir", sdist_dir, + "python", "-I", "-m", "build", "--sdist", "--outdir", sdist_dir, silent=True, ) # fmt: on @@ -183,6 +184,12 @@ def lint(session: nox.Session) -> None: # git reset --hard origin/main @nox.session def vendoring(session: nox.Session) -> None: + # Ensure that the session Python is running 3.10+ + # so that truststore can be installed correctly. + session.run( + "python", "-c", "import sys; sys.exit(1 if sys.version_info < (3, 10) else 0)" + ) + session.install("vendoring~=1.2.0") parser = argparse.ArgumentParser(prog="nox -s vendoring") @@ -219,7 +226,7 @@ def pinned_requirements(path: Path) -> Iterator[Tuple[str, str]]: new_version = old_version for inner_name, inner_version in pinned_requirements(vendor_txt): if inner_name == name: - # this is a dedicated assignment, to make flake8 happy + # this is a dedicated assignment, to make lint happy new_version = inner_version break else: @@ -315,7 +322,7 @@ def build_release(session: nox.Session) -> None: ) session.log("# Install dependencies") - session.install("setuptools", "wheel", "twine") + session.install("build", "twine") with release.isolated_temporary_checkout(session, version) as build_dir: session.log( @@ -351,7 +358,7 @@ def build_dists(session: nox.Session) -> List[str]: ) session.log("# Build distributions") - session.run("python", "setup.py", "sdist", "bdist_wheel", silent=True) + session.run("python", "-m", "build", silent=True) produced_dists = glob.glob("dist/*") session.log(f"# Verify distributions: {', '.join(produced_dists)}") diff --git a/pyproject.toml b/pyproject.toml index 139c37e18d7..74a7f71ca59 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,79 @@ +[project] +dynamic = ["version"] + +name = "pip" +description = "The PyPA recommended tool for installing Python packages." +readme = "README.rst" +license = {text = "MIT"} +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Topic :: Software Development :: Build Tools", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", +] +authors = [ + {name = "The pip developers", email = "distutils-sig@python.org"}, +] + +# NOTE: requires-python is duplicated in __pip-runner__.py. +# When changing this value, please change the other copy as well. +requires-python = ">=3.8" + +[project.scripts] +pip = "pip._internal.cli.main:main" +pip3 = "pip._internal.cli.main:main" + +[project.urls] +Homepage = "https://pip.pypa.io/" +Documentation = "https://pip.pypa.io" +Source = "https://github.com/pypa/pip" +Changelog = "https://pip.pypa.io/en/stable/news/" + [build-system] -requires = ["setuptools", "wheel"] +# The lower bound is for . +requires = ["setuptools>=67.6.1", "wheel"] build-backend = "setuptools.build_meta" +[tool.setuptools] +package-dir = {"" = "src"} +include-package-data = false + +[tool.setuptools.dynamic] +version = {attr = "pip.__version__"} + +[tool.setuptools.packages.find] +where = ["src"] +exclude = ["contrib", "docs", "tests*", "tasks"] + +[tool.setuptools.package-data] +"pip" = ["py.typed"] +"pip._vendor" = ["vendor.txt"] +"pip._vendor.certifi" = ["*.pem"] +"pip._vendor.requests" = ["*.pem"] +"pip._vendor.distlib._backport" = ["sysconfig.cfg"] +"pip._vendor.distlib" = [ + "t32.exe", + "t64.exe", + "t64-arm.exe", + "w32.exe", + "w64.exe", + "w64-arm.exe", +] + +###################################################################################### +# towncrier +# + [tool.towncrier] # For finding the __version__ package = "pip" @@ -17,15 +89,19 @@ template = "tools/news/template.rst" # Grouping of entries, within our changelog type = [ - { name = "Process", directory = "process", showcontent = true }, { name = "Deprecations and Removals", directory = "removal", showcontent = true }, { name = "Features", directory = "feature", showcontent = true }, { name = "Bug Fixes", directory = "bugfix", showcontent = true }, { name = "Vendored Libraries", directory = "vendor", showcontent = true }, { name = "Improved Documentation", directory = "doc", showcontent = true }, + { name = "Process", directory = "process", showcontent = true }, { name = "Trivial Changes", directory = "trivial", showcontent = false }, ] +###################################################################################### +# vendoring +# + [tool.vendoring] destination = "src/pip/_vendor/" requirements = "src/pip/_vendor/vendor.txt" @@ -71,3 +147,160 @@ setuptools = "pkg_resources" CacheControl = "https://raw.githubusercontent.com/ionrock/cachecontrol/v0.12.6/LICENSE.txt" distlib = "https://bitbucket.org/pypa/distlib/raw/master/LICENSE.txt" webencodings = "https://github.com/SimonSapin/python-webencodings/raw/master/LICENSE" + +###################################################################################### +# ruff +# + +[tool.ruff] +src = ["src"] +target-version = "py38" +line-length = 88 +extend-exclude = [ + "_vendor", + "./build", + ".scratch", + "data", +] + +[tool.ruff.lint] +ignore = [ + "B019", + "B020", + "B904", # Ruff enables opinionated warnings by default + "B905", # Ruff enables opinionated warnings by default +] +select = [ + "ASYNC", + "B", + "C4", + "C90", + "E", + "F", + "G", + "I", + "ISC", + "PERF", + "PLE", + "PLR0", + "W", + "RUF100", + "UP032", +] + +[tool.ruff.lint.isort] +# Explicitly make tests "first party" as it's not in the "src" directory +known-first-party = ["tests"] +known-third-party = ["pip._vendor"] + +[tool.ruff.lint.mccabe] +max-complexity = 33 # default is 10 + +[tool.ruff.lint.per-file-ignores] +"noxfile.py" = ["G"] +"src/pip/_internal/*" = ["PERF203"] +"tests/*" = ["B011"] +"tests/unit/test_finder.py" = ["C414"] +"src/pip/__pip-runner__.py" = ["UP"] # Must be compatible with Python 2.7 + +[tool.ruff.lint.pylint] +max-args = 15 # default is 5 +max-branches = 28 # default is 12 +max-returns = 13 # default is 6 +max-statements = 134 # default is 50 + +###################################################################################### +# mypy +# + +[tool.mypy] +mypy_path = "$MYPY_CONFIG_FILE_DIR/src" +strict = true +no_implicit_reexport = false +disallow_subclassing_any = false +disallow_untyped_calls = false +warn_return_any = false +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pip._internal.utils._jaraco_text" +ignore_errors = true + +[[tool.mypy.overrides]] +module = "pip._vendor.*" +ignore_errors = true + +# These vendored libraries use runtime magic to populate things and don't sit +# well with static typing out of the box. Eventually we should provide correct +# typing information for their public interface and remove these configs. + +[[tool.mypy.overrides]] +module = "pip._vendor.pkg_resources" +follow_imports = "skip" + +[[tool.mypy.overrides]] +module = "pip._vendor.requests.*" +follow_imports = "skip" + +###################################################################################### +# pytest +# + +[tool.pytest.ini_options] +addopts = "--ignore src/pip/_vendor --ignore tests/tests_cache -r aR --color=yes" +xfail_strict = true +markers = [ + "network: tests that need network", + "incompatible_with_sysconfig", + "incompatible_with_venv", + "no_auto_tempdir_manager", + "unit: unit tests", + "integration: integration tests", + "bzr: VCS: Bazaar", + "svn: VCS: Subversion", + "mercurial: VCS: Mercurial", + "git: VCS: git", + "search: tests for 'pip search'", +] + +###################################################################################### +# coverage +# + +[tool.coverage.run] +branch = true +# Do not gather coverage for vendored libraries. +omit = "*/_vendor/*" +# Centralized absolute file prefix for coverage files. +data_file = "${COVERAGE_OUTPUT_DIR}/.coverage" +# By default, each covered process will try to truncate and then write to +# `data_file`, but with `parallel`, they will write to separate files suffixed +# with hostname, pid, and a timestamp. +parallel = true +# If not set, then at the termination of each worker (when using pytest-xdist), +# the following is traced: "Coverage.py warning: Module pip was previously +# imported, but not measured (module-not-measured)" +disable_warnings = "module-not-measured" + +[tool.coverage.paths] +# We intentionally use "source0" here because pytest-cov unconditionally sets +# "source" after loading the config. +source0 = [ + # The primary source code path which other paths will be combined into. + "src/pip/", + # Unit test source directory e.g. + # `.tox/coverage-py3/lib/pythonX.Y/site-packages/pip/...` + "*/site-packages/pip/", + # Functional test virtual environment directories, which look like + # `tmpdir/pip0/pip/src/pip/...` + "*/pip/src/pip/", +] + +[tool.coverage.report] +exclude_lines = [ + # We must re-state the default because the `exclude_lines` option overrides + # it. + "pragma: no cover", + # This excludes typing-specific code, which will be validated by mypy anyway. + "if TYPE_CHECKING", +] diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 2e35be30dd6..00000000000 --- a/setup.cfg +++ /dev/null @@ -1,113 +0,0 @@ -[isort] -profile = black -skip = - ./build, - .nox, - .tox, - .scratch, - _vendor, - data -known_third_party = - pip._vendor - -[flake8] -max-line-length = 88 -exclude = - ./build, - .nox, - .tox, - .scratch, - _vendor, - data -enable-extensions = G -extend-ignore = - G200, G202, - # black adds spaces around ':' - E203, - # using a cache - B019, - # reassigning variables in a loop - B020, -per-file-ignores = - # G: The plugin logging-format treats every .log and .error as logging. - noxfile.py: G - # B011: Do not call assert False since python -O removes these calls - tests/*: B011 - -[mypy] -mypy_path = $MYPY_CONFIG_FILE_DIR/src -ignore_missing_imports = True -disallow_untyped_defs = True -disallow_any_generics = True -warn_unused_ignores = True -no_implicit_optional = True - -[mypy-pip._internal.utils._jaraco_text] -ignore_errors = True - -[mypy-pip._vendor.*] -ignore_errors = True - -# These vendored libraries use runtime magic to populate things and don't sit -# well with static typing out of the box. Eventually we should provide correct -# typing information for their public interface and remove these configs. -[mypy-pip._vendor.colorama] -follow_imports = skip -[mypy-pip._vendor.pkg_resources] -follow_imports = skip -[mypy-pip._vendor.progress.*] -follow_imports = skip -[mypy-pip._vendor.requests.*] -follow_imports = skip - -[tool:pytest] -addopts = --ignore src/pip/_vendor --ignore tests/tests_cache -r aR --color=yes -xfail_strict = True -markers = - network: tests that need network - incompatible_with_sysconfig - incompatible_with_venv - no_auto_tempdir_manager - unit: unit tests - integration: integration tests - bzr: VCS: Bazaar - svn: VCS: Subversion - mercurial: VCS: Mercurial - git: VCS: git - search: tests for 'pip search' - -[coverage:run] -branch = True -# Do not gather coverage for vendored libraries. -omit = */_vendor/* -# Centralized absolute file prefix for coverage files. -data_file = ${COVERAGE_OUTPUT_DIR}/.coverage -# By default, each covered process will try to truncate and then write to -# `data_file`, but with `parallel`, they will write to separate files suffixed -# with hostname, pid, and a timestamp. -parallel = True -# If not set, then at the termination of each worker (when using pytest-xdist), -# the following is traced: "Coverage.py warning: Module pip was previously -# imported, but not measured (module-not-measured)" -disable_warnings = module-not-measured - -[coverage:paths] -# We intentionally use "source0" here because pytest-cov unconditionally sets -# "source" after loading the config. -source0 = - # The primary source code path which other paths will be combined into. - src/pip/ - # Unit test source directory e.g. - # `.tox/coverage-py3/lib/pythonX.Y/site-packages/pip/...` - */site-packages/pip/ - # Functional test virtual environment directories, which look like - # `tmpdir/pip0/pip/src/pip/...` - */pip/src/pip/ - -[coverage:report] -exclude_lines = - # We must re-state the default because the `exclude_lines` option overrides - # it. - pragma: no cover - # This excludes typing-specific code, which will be validated by mypy anyway. - if TYPE_CHECKING diff --git a/setup.py b/setup.py deleted file mode 100644 index d73c77b7346..00000000000 --- a/setup.py +++ /dev/null @@ -1,88 +0,0 @@ -import os -import sys - -from setuptools import find_packages, setup - - -def read(rel_path: str) -> str: - here = os.path.abspath(os.path.dirname(__file__)) - # intentionally *not* adding an encoding option to open, See: - # https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690 - with open(os.path.join(here, rel_path)) as fp: - return fp.read() - - -def get_version(rel_path: str) -> str: - for line in read(rel_path).splitlines(): - if line.startswith("__version__"): - # __version__ = "0.9" - delim = '"' if '"' in line else "'" - return line.split(delim)[1] - raise RuntimeError("Unable to find version string.") - - -long_description = read("README.rst") - -setup( - name="pip", - version=get_version("src/pip/__init__.py"), - description="The PyPA recommended tool for installing Python packages.", - long_description=long_description, - license="MIT", - classifiers=[ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Topic :: Software Development :: Build Tools", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy", - ], - url="https://pip.pypa.io/", - project_urls={ - "Documentation": "https://pip.pypa.io", - "Source": "https://github.com/pypa/pip", - "Changelog": "https://pip.pypa.io/en/stable/news/", - }, - author="The pip developers", - author_email="distutils-sig@python.org", - package_dir={"": "src"}, - packages=find_packages( - where="src", - exclude=["contrib", "docs", "tests*", "tasks"], - ), - package_data={ - "pip": ["py.typed"], - "pip._vendor": ["vendor.txt"], - "pip._vendor.certifi": ["*.pem"], - "pip._vendor.requests": ["*.pem"], - "pip._vendor.distlib._backport": ["sysconfig.cfg"], - "pip._vendor.distlib": [ - "t32.exe", - "t64.exe", - "t64-arm.exe", - "w32.exe", - "w64.exe", - "w64-arm.exe", - ], - }, - entry_points={ - "console_scripts": [ - "pip=pip._internal.cli.main:main", - "pip{}=pip._internal.cli.main:main".format(sys.version_info[0]), - "pip{}.{}=pip._internal.cli.main:main".format(*sys.version_info[:2]), - ], - }, - zip_safe=False, - # NOTE: python_requires is duplicated in __pip-runner__.py. - # When changing this value, please change the other copy as well. - python_requires=">=3.7", -) diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 20d8bf56c17..13523d261f0 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "23.2.dev0" +__version__ = "24.1.dev0" def main(args: Optional[List[str]] = None) -> int: diff --git a/src/pip/__main__.py b/src/pip/__main__.py index fe34a7b7772..5991326115f 100644 --- a/src/pip/__main__.py +++ b/src/pip/__main__.py @@ -1,6 +1,5 @@ import os import sys -import warnings # Remove '' and current working directory from the first entry # of sys.path, if present to avoid using current directory @@ -20,12 +19,6 @@ sys.path.insert(0, path) if __name__ == "__main__": - # Work around the error reported in #9540, pending a proper fix. - # Note: It is essential the warning filter is set *before* importing - # pip, as the deprecation happens at import time, not runtime. - warnings.filterwarnings( - "ignore", category=DeprecationWarning, module=".*packaging\\.version" - ) from pip._internal.cli.main import main as _main sys.exit(_main()) diff --git a/src/pip/__pip-runner__.py b/src/pip/__pip-runner__.py index 49a148a097e..c633787fced 100644 --- a/src/pip/__pip-runner__.py +++ b/src/pip/__pip-runner__.py @@ -8,8 +8,8 @@ import sys -# Copied from setup.py -PYTHON_REQUIRES = (3, 7) +# Copied from pyproject.toml +PYTHON_REQUIRES = (3, 8) def version_str(version): # type: ignore diff --git a/src/pip/_internal/__init__.py b/src/pip/_internal/__init__.py index 6afb5c627ce..1a5b7f87f97 100755 --- a/src/pip/_internal/__init__.py +++ b/src/pip/_internal/__init__.py @@ -1,6 +1,5 @@ from typing import List, Optional -import pip._internal.utils.inject_securetransport # noqa from pip._internal.utils import _log # init_logging() must be called before any call to logging.getLogger() @@ -8,7 +7,7 @@ _log.init_logging() -def main(args: (Optional[List[str]]) = None) -> int: +def main(args: Optional[List[str]] = None) -> int: """This is preserved for old console scripts that may still be referencing it. diff --git a/src/pip/_internal/build_env.py b/src/pip/_internal/build_env.py index 4f704a3547d..838de86474f 100644 --- a/src/pip/_internal/build_env.py +++ b/src/pip/_internal/build_env.py @@ -19,6 +19,7 @@ from pip._internal.cli.spinners import open_spinner from pip._internal.locations import get_platlib, get_purelib, get_scheme from pip._internal.metadata import get_default_environment, get_environment +from pip._internal.utils.logging import VERBOSE from pip._internal.utils.subprocess import call_subprocess from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds @@ -242,6 +243,8 @@ def _install_requirements( "--no-warn-script-location", ] if logger.getEffectiveLevel() <= logging.DEBUG: + args.append("-vv") + elif logger.getEffectiveLevel() <= VERBOSE: args.append("-v") for format_control in ("no_binary", "only_binary"): formats = getattr(finder.format_control, format_control) diff --git a/src/pip/_internal/cache.py b/src/pip/_internal/cache.py index 05f0a9acb24..6b4512672db 100644 --- a/src/pip/_internal/cache.py +++ b/src/pip/_internal/cache.py @@ -44,7 +44,7 @@ def _get_cache_path_parts(self, link: Link) -> List[str]: """Get parts of part that must be os.path.joined with cache_dir""" # We want to generate an url to use as our cache key, we don't want to - # just re-use the URL because it might have other items in the fragment + # just reuse the URL because it might have other items in the fragment # and we don't care about those. key_parts = {"url": link.url_without_fragment} if link.hash_name is not None and link.hash is not None: @@ -78,12 +78,10 @@ def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]: if can_not_cache: return [] - candidates = [] path = self.get_path_for_link(link) if os.path.isdir(path): - for candidate in os.listdir(path): - candidates.append((candidate, path)) - return candidates + return [(candidate, path) for candidate in os.listdir(path)] + return [] def get_path_for_link(self, link: Link) -> str: """Return a directory to store cached items in for link.""" @@ -194,7 +192,17 @@ def __init__( self.origin: Optional[DirectUrl] = None origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME if origin_direct_url_path.exists(): - self.origin = DirectUrl.from_json(origin_direct_url_path.read_text()) + try: + self.origin = DirectUrl.from_json( + origin_direct_url_path.read_text(encoding="utf-8") + ) + except Exception as e: + logger.warning( + "Ignoring invalid cache entry origin file %s for %s (%s)", + origin_direct_url_path, + link.filename, + e, + ) class WheelCache(Cache): @@ -257,16 +265,26 @@ def get_cache_entry( @staticmethod def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None: origin_path = Path(cache_dir) / ORIGIN_JSON_NAME - if origin_path.is_file(): - origin = DirectUrl.from_json(origin_path.read_text()) - # TODO: use DirectUrl.equivalent when https://github.com/pypa/pip/pull/10564 - # is merged. - if origin.url != download_info.url: + if origin_path.exists(): + try: + origin = DirectUrl.from_json(origin_path.read_text(encoding="utf-8")) + except Exception as e: logger.warning( - "Origin URL %s in cache entry %s does not match download URL %s. " - "This is likely a pip bug or a cache corruption issue.", - origin.url, - cache_dir, - download_info.url, + "Could not read origin file %s in cache entry (%s). " + "Will attempt to overwrite it.", + origin_path, + e, ) + else: + # TODO: use DirectUrl.equivalent when + # https://github.com/pypa/pip/pull/10564 is merged. + if origin.url != download_info.url: + logger.warning( + "Origin URL %s in cache entry %s does not match download URL " + "%s. This is likely a pip bug or a cache corruption issue. " + "Will overwrite it with the new value.", + origin.url, + cache_dir, + download_info.url, + ) origin_path.write_text(download_info.to_json(), encoding="utf-8") diff --git a/src/pip/_internal/cli/autocompletion.py b/src/pip/_internal/cli/autocompletion.py index 226fe84dc0d..f3f70ac8553 100644 --- a/src/pip/_internal/cli/autocompletion.py +++ b/src/pip/_internal/cli/autocompletion.py @@ -17,6 +17,10 @@ def autocomplete() -> None: # Don't complete if user hasn't sourced bash_completion file. if "PIP_AUTO_COMPLETE" not in os.environ: return + # Don't complete if autocompletion environment variables + # are not present + if not os.environ.get("COMP_WORDS") or not os.environ.get("COMP_CWORD"): + return cwords = os.environ["COMP_WORDS"].split()[1:] cword = int(os.environ["COMP_CWORD"]) try: @@ -71,8 +75,9 @@ def autocomplete() -> None: for opt in subcommand.parser.option_list_all: if opt.help != optparse.SUPPRESS_HELP: - for opt_str in opt._long_opts + opt._short_opts: - options.append((opt_str, opt.nargs)) + options += [ + (opt_str, opt.nargs) for opt_str in opt._long_opts + opt._short_opts + ] # filter out previously specified options from available options prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]] diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py index 637fba18cfc..db9d5cc6624 100644 --- a/src/pip/_internal/cli/base_command.py +++ b/src/pip/_internal/cli/base_command.py @@ -131,6 +131,17 @@ def _main(self, args: List[str]) -> int: ", ".join(sorted(always_enabled_features)), ) + # Make sure that the --python argument isn't specified after the + # subcommand. We can tell, because if --python was specified, + # we should only reach this point if we're running in the created + # subprocess, which has the _PIP_RUNNING_IN_SUBPROCESS environment + # variable set. + if options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ: + logger.critical( + "The --python option must be placed before the pip subcommand name" + ) + sys.exit(ERROR) + # TODO: Try to get these passing down from the command? # without resorting to os.environ to hold these. # This also affects isolated builds and it should. @@ -170,7 +181,7 @@ def exc_logging_wrapper(*args: Any) -> int: assert isinstance(status, int) return status except DiagnosticPipError as exc: - logger.error("[present-rich] %s", exc) + logger.error("%s", exc, extra={"rich": True}) logger.debug("Exception information:", exc_info=True) return ERROR diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index a81e952bae1..c3a8792317f 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -92,10 +92,10 @@ def check_dist_restriction(options: Values, check_target: bool = False) -> None: ) if check_target: - if dist_restriction_set and not options.target_dir: + if not options.dry_run and dist_restriction_set and not options.target_dir: raise CommandError( "Can not use any platform or abi specific options unless " - "installing via '--target'" + "installing via '--target' or using '--dry-run'" ) @@ -226,9 +226,9 @@ class PipOption(Option): "--progress-bar", dest="progress_bar", type="choice", - choices=["on", "off"], + choices=["on", "off", "raw"], default="on", - help="Specify whether the progress bar should be used [on, off] (default: on)", + help="Specify whether the progress bar should be used [on, off, raw] (default: on)", ) log: Callable[..., Option] = partial( @@ -591,10 +591,7 @@ def _handle_python_version( """ version_info, error_msg = _convert_python_version(value) if error_msg is not None: - msg = "invalid --python-version value: {!r}: {}".format( - value, - error_msg, - ) + msg = f"invalid --python-version value: {value!r}: {error_msg}" raise_option_error(parser, option=option, msg=msg) parser.values.python_version = version_info @@ -679,7 +676,10 @@ def prefer_binary() -> Option: dest="prefer_binary", action="store_true", default=False, - help="Prefer older binary packages over newer source packages.", + help=( + "Prefer binary packages over source packages, even if the " + "source packages are newer." + ), ) @@ -832,7 +832,7 @@ def _handle_config_settings( ) -> None: key, sep, val = value.partition("=") if sep != "=": - parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") # noqa + parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") dest = getattr(parser.values, option.dest) if dest is None: dest = {} @@ -912,7 +912,7 @@ def _handle_config_settings( dest="root_user_action", default="warn", choices=["warn", "ignore"], - help="Action if pip is run as a root user. By default, a warning message is shown.", + help="Action if pip is run as a root user [warn, ignore] (default: warn)", ) @@ -927,13 +927,13 @@ def _handle_merge_hash( algo, digest = value.split(":", 1) except ValueError: parser.error( - "Arguments to {} must be a hash name " # noqa + f"Arguments to {opt_str} must be a hash name " "followed by a value, like --hash=sha256:" - "abcde...".format(opt_str) + "abcde..." ) if algo not in STRONG_HASHES: parser.error( - "Allowed hash algorithms for {} are {}.".format( # noqa + "Allowed hash algorithms for {} are {}.".format( opt_str, ", ".join(STRONG_HASHES) ) ) diff --git a/src/pip/_internal/cli/main.py b/src/pip/_internal/cli/main.py index 7e061f5b390..563ac79c984 100644 --- a/src/pip/_internal/cli/main.py +++ b/src/pip/_internal/cli/main.py @@ -1,5 +1,6 @@ """Primary application entrypoint. """ + import locale import logging import os diff --git a/src/pip/_internal/cli/parser.py b/src/pip/_internal/cli/parser.py index c762cf2781d..ae554b24cae 100644 --- a/src/pip/_internal/cli/parser.py +++ b/src/pip/_internal/cli/parser.py @@ -229,9 +229,9 @@ def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]: val = strtobool(val) except ValueError: self.error( - "{} is not a valid value for {} option, " # noqa + f"{val} is not a valid value for {key} option, " "please specify a boolean value like yes/no, " - "true/false or 1/0 instead.".format(val, key) + "true/false or 1/0 instead." ) elif option.action == "count": with suppress(ValueError): @@ -240,10 +240,10 @@ def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]: val = int(val) if not isinstance(val, int) or val < 0: self.error( - "{} is not a valid value for {} option, " # noqa + f"{val} is not a valid value for {key} option, " "please instead specify either a non-negative integer " "or a boolean value like yes/no or false/true " - "which is equivalent to 1/0.".format(val, key) + "which is equivalent to 1/0." ) elif option.action == "append": val = val.split() diff --git a/src/pip/_internal/cli/progress_bars.py b/src/pip/_internal/cli/progress_bars.py index 0ad14031ca5..b842b1b316a 100644 --- a/src/pip/_internal/cli/progress_bars.py +++ b/src/pip/_internal/cli/progress_bars.py @@ -1,4 +1,5 @@ import functools +import sys from typing import Callable, Generator, Iterable, Iterator, Optional, Tuple from pip._vendor.rich.progress import ( @@ -14,6 +15,7 @@ TransferSpeedColumn, ) +from pip._internal.cli.spinners import RateLimiter from pip._internal.utils.logging import get_indentation DownloadProgressRenderer = Callable[[Iterable[bytes]], Iterator[bytes]] @@ -55,6 +57,28 @@ def _rich_progress_bar( progress.update(task_id, advance=len(chunk)) +def _raw_progress_bar( + iterable: Iterable[bytes], + *, + size: Optional[int], +) -> Generator[bytes, None, None]: + def write_progress(current: int, total: int) -> None: + sys.stdout.write("Progress %d of %d\n" % (current, total)) + sys.stdout.flush() + + current = 0 + total = size or 0 + rate_limiter = RateLimiter(0.25) + + write_progress(current, total) + for chunk in iterable: + current += len(chunk) + if rate_limiter.ready() or current == total: + write_progress(current, total) + rate_limiter.reset() + yield chunk + + def get_download_progress_renderer( *, bar_type: str, size: Optional[int] = None ) -> DownloadProgressRenderer: @@ -64,5 +88,7 @@ def get_download_progress_renderer( """ if bar_type == "on": return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size) + elif bar_type == "raw": + return functools.partial(_raw_progress_bar, size=size) else: return iter # no-op, when passed an iterator diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index 6202c379ad1..b60bbb56dd4 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -58,18 +58,14 @@ def _create_truststore_ssl_context() -> Optional["SSLContext"]: return None try: - import truststore - except ImportError: - raise CommandError( - "To use the truststore feature, 'truststore' must be installed into " - "pip's current environment." - ) + from pip._vendor import truststore + except ImportError as e: + raise CommandError(f"The truststore feature is unavailable: {e}") return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT) class SessionCommandMixin(CommandContextMixIn): - """ A class mixin for command classes needing _build_session(). """ @@ -123,7 +119,7 @@ def _build_session( ssl_context = None session = PipSession( - cache=os.path.join(cache_dir, "http") if cache_dir else None, + cache=os.path.join(cache_dir, "http-v2") if cache_dir else None, retries=retries if retries is not None else options.retries, trusted_hosts=options.trusted_hosts, index_urls=self._get_index_urls(options), @@ -148,6 +144,7 @@ def _build_session( "http": options.proxy, "https": options.proxy, } + session.trust_env = False # Handle no proxy option if options.no_proxy: @@ -165,7 +162,6 @@ def _build_session( class IndexGroupCommand(Command, SessionCommandMixin): - """ Abstract base class for commands with the index_group options. @@ -230,9 +226,12 @@ def warn_if_run_as_root() -> None: logger.warning( "Running pip as the 'root' user can result in broken permissions and " - "conflicting behaviour with the system package manager. " + "conflicting behaviour with the system package manager, possibly " + "rendering your system unusable." "It is recommended to use a virtual environment instead: " - "https://pip.pypa.io/warnings/venv" + "https://pip.pypa.io/warnings/venv. " + "Use the --root-user-action option if you know what you are doing and " + "want to suppress this warning." ) @@ -276,7 +275,7 @@ def determine_resolver_variant(options: Values) -> str: if "legacy-resolver" in options.deprecated_features_enabled: return "legacy" - return "2020-resolver" + return "resolvelib" @classmethod def make_requirement_preparer( @@ -295,9 +294,10 @@ def make_requirement_preparer( """ temp_build_dir_path = temp_build_dir.path assert temp_build_dir_path is not None + legacy_resolver = False resolver_variant = cls.determine_resolver_variant(options) - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": lazy_wheel = "fast-deps" in options.features_enabled if lazy_wheel: logger.warning( @@ -308,6 +308,7 @@ def make_requirement_preparer( "production." ) else: + legacy_resolver = True lazy_wheel = False if "fast-deps" in options.features_enabled: logger.warning( @@ -328,6 +329,7 @@ def make_requirement_preparer( use_user_site=use_user_site, lazy_wheel=lazy_wheel, verbosity=verbosity, + legacy_resolver=legacy_resolver, ) @classmethod @@ -357,7 +359,7 @@ def make_resolver( # The long import name and duplicated invocation is needed to convince # Mypy into correctly typechecking. Otherwise it would complain the # "Resolver" class being redefined. - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": import pip._internal.resolution.resolvelib.resolver return pip._internal.resolution.resolvelib.resolver.Resolver( @@ -446,9 +448,11 @@ def get_requirements( isolated=options.isolated_mode, use_pep517=options.use_pep517, user_supplied=True, - config_settings=parsed_req.options.get("config_settings") - if parsed_req.options - else None, + config_settings=( + parsed_req.options.get("config_settings") + if parsed_req.options + else None + ), ) requirements.append(req_to_add) diff --git a/src/pip/_internal/commands/cache.py b/src/pip/_internal/commands/cache.py index e96d2b4924c..328336152cc 100644 --- a/src/pip/_internal/commands/cache.py +++ b/src/pip/_internal/commands/cache.py @@ -3,10 +3,10 @@ from optparse import Values from typing import Any, List -import pip._internal.utils.filesystem as filesystem from pip._internal.cli.base_command import Command from pip._internal.cli.status_codes import ERROR, SUCCESS from pip._internal.exceptions import CommandError, PipError +from pip._internal.utils import filesystem from pip._internal.utils.logging import getLogger logger = getLogger(__name__) @@ -93,24 +93,30 @@ def get_cache_info(self, options: Values, args: List[Any]) -> None: num_http_files = len(self._find_http_files(options)) num_packages = len(self._find_wheels(options, "*")) - http_cache_location = self._cache_dir(options, "http") + http_cache_location = self._cache_dir(options, "http-v2") + old_http_cache_location = self._cache_dir(options, "http") wheels_cache_location = self._cache_dir(options, "wheels") - http_cache_size = filesystem.format_directory_size(http_cache_location) + http_cache_size = filesystem.format_size( + filesystem.directory_size(http_cache_location) + + filesystem.directory_size(old_http_cache_location) + ) wheels_cache_size = filesystem.format_directory_size(wheels_cache_location) message = ( textwrap.dedent( """ - Package index page cache location: {http_cache_location} + Package index page cache location (pip v23.3+): {http_cache_location} + Package index page cache location (older pips): {old_http_cache_location} Package index page cache size: {http_cache_size} Number of HTTP files: {num_http_files} Locally built wheels location: {wheels_cache_location} Locally built wheels size: {wheels_cache_size} Number of locally built wheels: {package_count} - """ + """ # noqa: E501 ) .format( http_cache_location=http_cache_location, + old_http_cache_location=old_http_cache_location, http_cache_size=http_cache_size, num_http_files=num_http_files, wheels_cache_location=wheels_cache_location, @@ -151,14 +157,8 @@ def format_for_human(self, files: List[str]) -> None: logger.info("\n".join(sorted(results))) def format_for_abspath(self, files: List[str]) -> None: - if not files: - return - - results = [] - for filename in files: - results.append(filename) - - logger.info("\n".join(sorted(results))) + if files: + logger.info("\n".join(sorted(files))) def remove_cache_items(self, options: Values, args: List[Any]) -> None: if len(args) > 1: @@ -175,7 +175,7 @@ def remove_cache_items(self, options: Values, args: List[Any]) -> None: files += self._find_http_files(options) else: # Add the pattern to the log message - no_matching_msg += ' for pattern "{}"'.format(args[0]) + no_matching_msg += f' for pattern "{args[0]}"' if not files: logger.warning(no_matching_msg) @@ -195,8 +195,11 @@ def _cache_dir(self, options: Values, subdir: str) -> str: return os.path.join(options.cache_dir, subdir) def _find_http_files(self, options: Values) -> List[str]: - http_dir = self._cache_dir(options, "http") - return filesystem.find_files(http_dir, "*") + old_http_dir = self._cache_dir(options, "http") + new_http_dir = self._cache_dir(options, "http-v2") + return filesystem.find_files(old_http_dir, "*") + filesystem.find_files( + new_http_dir, "*" + ) def _find_wheels(self, options: Values, pattern: str) -> List[str]: wheel_dir = self._cache_dir(options, "wheels") diff --git a/src/pip/_internal/commands/check.py b/src/pip/_internal/commands/check.py index 584df9f55c5..5efd0a34160 100644 --- a/src/pip/_internal/commands/check.py +++ b/src/pip/_internal/commands/check.py @@ -7,6 +7,7 @@ from pip._internal.operations.check import ( check_package_set, create_package_set_from_installed, + warn_legacy_versions_and_specifiers, ) from pip._internal.utils.misc import write_output @@ -21,6 +22,7 @@ class CheckCommand(Command): def run(self, options: Values, args: List[str]) -> int: package_set, parsing_probs = create_package_set_from_installed() + warn_legacy_versions_and_specifiers(package_set) missing, conflicting = check_package_set(package_set) for project_name in missing: diff --git a/src/pip/_internal/commands/completion.py b/src/pip/_internal/commands/completion.py index deaa30899e6..9e89e279883 100644 --- a/src/pip/_internal/commands/completion.py +++ b/src/pip/_internal/commands/completion.py @@ -22,15 +22,19 @@ complete -o default -F _pip_completion {prog} """, "zsh": """ - function _pip_completion {{ - local words cword - read -Ac words - read -cn cword - reply=( $( COMP_WORDS="$words[*]" \\ - COMP_CWORD=$(( cword-1 )) \\ - PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )) + #compdef -P pip[0-9.]# + __pip() {{ + compadd $( COMP_WORDS="$words[*]" \\ + COMP_CWORD=$((CURRENT-1)) \\ + PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ) }} - compctl -K _pip_completion {prog} + if [[ $zsh_eval_context[-1] == loadautofunc ]]; then + # autoload from fpath, call function directly + __pip "$@" + else + # eval/source/. command, register function for later + compdef __pip -P 'pip[0-9.]#' + fi """, "fish": """ function __fish_complete_pip diff --git a/src/pip/_internal/commands/configuration.py b/src/pip/_internal/commands/configuration.py index 84b134e490b..1a1dc6b6cd8 100644 --- a/src/pip/_internal/commands/configuration.py +++ b/src/pip/_internal/commands/configuration.py @@ -242,17 +242,15 @@ def open_in_editor(self, options: Values, args: List[str]) -> None: e.filename = editor raise except subprocess.CalledProcessError as e: - raise PipError( - "Editor Subprocess exited with exit code {}".format(e.returncode) - ) + raise PipError(f"Editor Subprocess exited with exit code {e.returncode}") def _get_n_args(self, args: List[str], example: str, n: int) -> Any: """Helper to make sure the command got the right number of arguments""" if len(args) != n: msg = ( - "Got unexpected number of arguments, expected {}. " - '(example: "{} config {}")' - ).format(n, get_prog(), example) + f"Got unexpected number of arguments, expected {n}. " + f'(example: "{get_prog()} config {example}")' + ) raise PipError(msg) if n == 1: diff --git a/src/pip/_internal/commands/debug.py b/src/pip/_internal/commands/debug.py index 2a3e7d298f3..567ca967e5b 100644 --- a/src/pip/_internal/commands/debug.py +++ b/src/pip/_internal/commands/debug.py @@ -1,4 +1,3 @@ -import importlib.resources import locale import logging import os @@ -17,6 +16,7 @@ from pip._internal.cli.status_codes import SUCCESS from pip._internal.configuration import Configuration from pip._internal.metadata import get_environment +from pip._internal.utils.compat import open_text_resource from pip._internal.utils.logging import indent_log from pip._internal.utils.misc import get_pip_version @@ -35,7 +35,7 @@ def show_sys_implementation() -> None: def create_vendor_txt_map() -> Dict[str, str]: - with importlib.resources.open_text("pip._vendor", "vendor.txt") as f: + with open_text_resource("pip._vendor", "vendor.txt") as f: # Purge non version specifying lines. # Also, remove any space prefix or suffixes (including comments). lines = [ @@ -46,22 +46,29 @@ def create_vendor_txt_map() -> Dict[str, str]: return dict(line.split("==", 1) for line in lines) -def get_module_from_module_name(module_name: str) -> ModuleType: +def get_module_from_module_name(module_name: str) -> Optional[ModuleType]: # Module name can be uppercase in vendor.txt for some reason... module_name = module_name.lower().replace("-", "_") # PATCH: setuptools is actually only pkg_resources. if module_name == "setuptools": module_name = "pkg_resources" - __import__(f"pip._vendor.{module_name}", globals(), locals(), level=0) - return getattr(pip._vendor, module_name) + try: + __import__(f"pip._vendor.{module_name}", globals(), locals(), level=0) + return getattr(pip._vendor, module_name) + except ImportError: + # We allow 'truststore' to fail to import due + # to being unavailable on Python 3.9 and earlier. + if module_name == "truststore" and sys.version_info < (3, 10): + return None + raise def get_vendor_version_from_module(module_name: str) -> Optional[str]: module = get_module_from_module_name(module_name) version = getattr(module, "__version__", None) - if not version: + if module and not version: # Try to find version in debundled module info. assert module.__file__ is not None env = get_environment([os.path.dirname(module.__file__)]) @@ -88,7 +95,7 @@ def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None: elif parse_version(actual_version) != parse_version(expected_version): extra_message = ( " (CONFLICT: vendor.txt suggests version should" - " be {})".format(expected_version) + f" be {expected_version})" ) logger.info("%s==%s%s", module_name, actual_version, extra_message) @@ -105,7 +112,7 @@ def show_tags(options: Values) -> None: tag_limit = 10 target_python = make_target_python(options) - tags = target_python.get_tags() + tags = target_python.get_sorted_tags() # Display the target options that were explicitly provided. formatted_target = target_python.format_given() @@ -113,7 +120,7 @@ def show_tags(options: Values) -> None: if formatted_target: suffix = f" (target: {formatted_target})" - msg = "Compatible tags: {}{}".format(len(tags), suffix) + msg = f"Compatible tags: {len(tags)}{suffix}" logger.info(msg) if options.verbose < 1 and len(tags) > tag_limit: @@ -127,17 +134,12 @@ def show_tags(options: Values) -> None: logger.info(str(tag)) if tags_limited: - msg = ( - "...\n[First {tag_limit} tags shown. Pass --verbose to show all.]" - ).format(tag_limit=tag_limit) + msg = f"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]" logger.info(msg) def ca_bundle_info(config: Configuration) -> str: - levels = set() - for key, _ in config.items(): - levels.add(key.split(".")[0]) - + levels = {key.split(".", 1)[0] for key, _ in config.items()} if not levels: return "Not specified" diff --git a/src/pip/_internal/commands/download.py b/src/pip/_internal/commands/download.py index 36e947c8c05..54247a78a65 100644 --- a/src/pip/_internal/commands/download.py +++ b/src/pip/_internal/commands/download.py @@ -137,6 +137,10 @@ def run(self, options: Values, args: List[str]) -> int: assert req.name is not None preparer.save_linked_requirement(req) downloaded.append(req.name) + + preparer.prepare_linked_requirements_more(requirement_set.requirements.values()) + requirement_set.warn_legacy_versions_and_specifiers() + if downloaded: write_output("Successfully downloaded %s", " ".join(downloaded)) diff --git a/src/pip/_internal/commands/freeze.py b/src/pip/_internal/commands/freeze.py index 5fa6d39b2c7..fd9d88a8b01 100644 --- a/src/pip/_internal/commands/freeze.py +++ b/src/pip/_internal/commands/freeze.py @@ -1,6 +1,6 @@ import sys from optparse import Values -from typing import List +from typing import AbstractSet, List from pip._internal.cli import cmdoptions from pip._internal.cli.base_command import Command @@ -8,7 +8,18 @@ from pip._internal.operations.freeze import freeze from pip._internal.utils.compat import stdlib_pkgs -DEV_PKGS = {"pip", "setuptools", "distribute", "wheel"} + +def _should_suppress_build_backends() -> bool: + return sys.version_info < (3, 12) + + +def _dev_pkgs() -> AbstractSet[str]: + pkgs = {"pip"} + + if _should_suppress_build_backends(): + pkgs |= {"setuptools", "distribute", "wheel"} + + return pkgs class FreezeCommand(Command): @@ -61,7 +72,7 @@ def add_options(self) -> None: action="store_true", help=( "Do not skip these packages in the output:" - " {}".format(", ".join(DEV_PKGS)) + " {}".format(", ".join(_dev_pkgs())) ), ) self.cmd_opts.add_option( @@ -77,7 +88,7 @@ def add_options(self) -> None: def run(self, options: Values, args: List[str]) -> int: skip = set(stdlib_pkgs) if not options.freeze_all: - skip.update(DEV_PKGS) + skip.update(_dev_pkgs()) if options.excludes: skip.update(options.excludes) diff --git a/src/pip/_internal/commands/index.py b/src/pip/_internal/commands/index.py index 7267effed24..f55e9e49974 100644 --- a/src/pip/_internal/commands/index.py +++ b/src/pip/_internal/commands/index.py @@ -128,12 +128,12 @@ def get_available_package_versions(self, options: Values, args: List[Any]) -> No if not versions: raise DistributionNotFound( - "No matching distribution found for {}".format(query) + f"No matching distribution found for {query}" ) formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)] latest = formatted_versions[0] - write_output("{} ({})".format(query, latest)) + write_output(f"{query} ({latest})") write_output("Available versions: {}".format(", ".join(formatted_versions))) print_dist_installation_info(query, latest) diff --git a/src/pip/_internal/commands/inspect.py b/src/pip/_internal/commands/inspect.py index 27c8fa3d5b6..e810c13166b 100644 --- a/src/pip/_internal/commands/inspect.py +++ b/src/pip/_internal/commands/inspect.py @@ -7,7 +7,7 @@ from pip import __version__ from pip._internal.cli import cmdoptions -from pip._internal.cli.req_command import Command +from pip._internal.cli.base_command import Command from pip._internal.cli.status_codes import SUCCESS from pip._internal.metadata import BaseDistribution, get_environment from pip._internal.utils.compat import stdlib_pkgs diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index 3c15ed4158c..6cf7571e4a6 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -387,6 +387,9 @@ def run(self, options: Values, args: List[str]) -> int: json.dump(report.to_dict(), f, indent=2, ensure_ascii=False) if options.dry_run: + # In non dry-run mode, the legacy versions and specifiers check + # will be done as part of conflict detection. + requirement_set.warn_legacy_versions_and_specifiers() would_install_items = sorted( (r.metadata["name"], r.metadata["version"]) for r in requirement_set.requirements_to_install @@ -424,8 +427,8 @@ def run(self, options: Values, args: List[str]) -> int: if build_failures: raise InstallationError( - "Could not build wheels for {}, which is required to " - "install pyproject.toml-based projects".format( + "ERROR: Failed to build installable wheels for some " + "pyproject.toml based projects ({})".format( ", ".join(r.name for r in build_failures) # type: ignore ) ) @@ -498,7 +501,7 @@ def run(self, options: Values, args: List[str]) -> int: show_traceback, options.use_user_site, ) - logger.error(message, exc_info=show_traceback) # noqa + logger.error(message, exc_info=show_traceback) return ERROR @@ -592,7 +595,7 @@ def _warn_about_conflicts( "source of the following dependency conflicts." ) else: - assert resolver_variant == "2020-resolver" + assert resolver_variant == "resolvelib" parts.append( "pip's dependency resolver does not currently take into account " "all the packages that are installed. This behaviour is the " @@ -604,12 +607,8 @@ def _warn_about_conflicts( version = package_set[project_name][0] for dependency in missing[project_name]: message = ( - "{name} {version} requires {requirement}, " + f"{project_name} {version} requires {dependency[1]}, " "which is not installed." - ).format( - name=project_name, - version=version, - requirement=dependency[1], ) parts.append(message) @@ -625,7 +624,7 @@ def _warn_about_conflicts( requirement=req, dep_name=dep_name, dep_version=dep_version, - you=("you" if resolver_variant == "2020-resolver" else "you'll"), + you=("you" if resolver_variant == "resolvelib" else "you'll"), ) parts.append(message) diff --git a/src/pip/_internal/commands/list.py b/src/pip/_internal/commands/list.py index 8e1426dbb6c..e551dda9a96 100644 --- a/src/pip/_internal/commands/list.py +++ b/src/pip/_internal/commands/list.py @@ -103,7 +103,10 @@ def add_options(self) -> None: dest="list_format", default="columns", choices=("columns", "freeze", "json"), - help="Select the output format among: columns (default), freeze, or json", + help=( + "Select the output format among: columns (default), freeze, or json. " + "The 'freeze' format cannot be used with the --outdated option." + ), ) self.cmd_opts.add_option( @@ -157,7 +160,7 @@ def run(self, options: Values, args: List[str]) -> int: if options.outdated and options.list_format == "freeze": raise CommandError( - "List format 'freeze' can not be used with the --outdated option." + "List format 'freeze' cannot be used with the --outdated option." ) cmdoptions.check_list_path_option(options) @@ -294,7 +297,7 @@ def output_package_listing_columns( # Create and add a separator. if len(data) > 0: - pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes))) + pkg_strings.insert(1, " ".join("-" * x for x in sizes)) for val in pkg_strings: write_output(val) diff --git a/src/pip/_internal/commands/search.py b/src/pip/_internal/commands/search.py index 03ed925b246..e0d329d58ad 100644 --- a/src/pip/_internal/commands/search.py +++ b/src/pip/_internal/commands/search.py @@ -5,7 +5,7 @@ import xmlrpc.client from collections import OrderedDict from optparse import Values -from typing import TYPE_CHECKING, Dict, List, Optional +from typing import TYPE_CHECKING, Dict, List, Optional, TypedDict from pip._vendor.packaging.version import parse as parse_version @@ -20,7 +20,6 @@ from pip._internal.utils.misc import write_output if TYPE_CHECKING: - from typing import TypedDict class TransformedHit(TypedDict): name: str @@ -76,9 +75,8 @@ def search(self, query: List[str], options: Values) -> List[Dict[str, str]]: try: hits = pypi.search({"name": query, "summary": query}, "or") except xmlrpc.client.Fault as fault: - message = "XMLRPC request failed [code: {code}]\n{string}".format( - code=fault.faultCode, - string=fault.faultString, + message = ( + f"XMLRPC request failed [code: {fault.faultCode}]\n{fault.faultString}" ) raise CommandError(message) assert isinstance(hits, list) diff --git a/src/pip/_internal/commands/show.py b/src/pip/_internal/commands/show.py index 3f10701f6b2..b7894ce1f9c 100644 --- a/src/pip/_internal/commands/show.py +++ b/src/pip/_internal/commands/show.py @@ -100,7 +100,11 @@ def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]: except KeyError: continue - requires = sorted((req.name for req in dist.iter_dependencies()), key=str.lower) + requires = sorted( + # Avoid duplicates in requirements (e.g. due to environment markers). + {req.name for req in dist.iter_dependencies()}, + key=str.lower, + ) required_by = sorted(_get_requiring_packages(dist), key=str.lower) try: @@ -117,6 +121,22 @@ def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]: metadata = dist.metadata + project_urls = metadata.get_all("Project-URL", []) + homepage = metadata.get("Home-page", "") + if not homepage: + # It's common that there is a "homepage" Project-URL, but Home-page + # remains unset (especially as PEP 621 doesn't surface the field). + # + # This logic was taken from PyPI's codebase. + for url in project_urls: + url_label, url = url.split(",", maxsplit=1) + normalized_label = ( + url_label.casefold().replace("-", "").replace("_", "").strip() + ) + if normalized_label == "homepage": + homepage = url.strip() + break + yield _PackageInfo( name=dist.raw_name, version=str(dist.version), @@ -128,8 +148,8 @@ def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]: metadata_version=dist.metadata_version or "", classifiers=metadata.get_all("Classifier", []), summary=metadata.get("Summary", ""), - homepage=metadata.get("Home-page", ""), - project_urls=metadata.get_all("Project-URL", []), + homepage=homepage, + project_urls=project_urls, author=metadata.get("Author", ""), author_email=metadata.get("Author-email", ""), license=metadata.get("License", ""), diff --git a/src/pip/_internal/commands/wheel.py b/src/pip/_internal/commands/wheel.py index c6a588ff09b..ed578aa2500 100644 --- a/src/pip/_internal/commands/wheel.py +++ b/src/pip/_internal/commands/wheel.py @@ -153,6 +153,9 @@ def run(self, options: Values, args: List[str]) -> int: elif should_build_for_wheel_command(req): reqs_to_build.append(req) + preparer.prepare_linked_requirements_more(requirement_set.requirements.values()) + requirement_set.warn_legacy_versions_and_specifiers() + # build wheels build_successes, build_failures = build( reqs_to_build, diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index 8fd46c9b8e0..c25273d5f0b 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -59,8 +59,8 @@ def _disassemble_key(name: str) -> List[str]: if "." not in name: error_message = ( "Key does not contain dot separated section and key. " - "Perhaps you wanted to use 'global.{}' instead?" - ).format(name) + f"Perhaps you wanted to use 'global.{name}' instead?" + ) raise ConfigurationError(error_message) return name.split(".", 1) @@ -210,8 +210,15 @@ def save(self) -> None: # Ensure directory exists. ensure_dir(os.path.dirname(fname)) - with open(fname, "w") as f: - parser.write(f) + # Ensure directory's permission(need to be writeable) + try: + with open(fname, "w") as f: + parser.write(f) + except OSError as error: + raise ConfigurationError( + f"An error occurred while writing to the configuration file " + f"{fname}: {error}" + ) # # Private routines @@ -320,33 +327,35 @@ def get_environ_vars(self) -> Iterable[Tuple[str, str]]: def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]: """Yields variant and configuration files associated with it. - This should be treated like items of a dictionary. + This should be treated like items of a dictionary. The order + here doesn't affect what gets overridden. That is controlled + by OVERRIDE_ORDER. However this does control the order they are + displayed to the user. It's probably most ergononmic to display + things in the same order as OVERRIDE_ORDER """ # SMELL: Move the conditions out of this function - # environment variables have the lowest priority - config_file = os.environ.get("PIP_CONFIG_FILE", None) - if config_file is not None: - yield kinds.ENV, [config_file] - else: - yield kinds.ENV, [] - + env_config_file = os.environ.get("PIP_CONFIG_FILE", None) config_files = get_configuration_files() - # at the base we have any global configuration yield kinds.GLOBAL, config_files[kinds.GLOBAL] - # per-user configuration next + # per-user config is not loaded when env_config_file exists should_load_user_config = not self.isolated and not ( - config_file and os.path.exists(config_file) + env_config_file and os.path.exists(env_config_file) ) if should_load_user_config: # The legacy config file is overridden by the new config file yield kinds.USER, config_files[kinds.USER] - # finally virtualenv configuration first trumping others + # virtualenv config yield kinds.SITE, config_files[kinds.SITE] + if env_config_file is not None: + yield kinds.ENV, [env_config_file] + else: + yield kinds.ENV, [] + def get_values_in_config(self, variant: Kind) -> Dict[str, Any]: """Get values present in a config file""" return self._config[variant] diff --git a/src/pip/_internal/distributions/base.py b/src/pip/_internal/distributions/base.py index 75ce2dc9057..6e4d0c91a90 100644 --- a/src/pip/_internal/distributions/base.py +++ b/src/pip/_internal/distributions/base.py @@ -1,9 +1,12 @@ import abc +from typing import TYPE_CHECKING, Optional -from pip._internal.index.package_finder import PackageFinder from pip._internal.metadata.base import BaseDistribution from pip._internal.req import InstallRequirement +if TYPE_CHECKING: + from pip._internal.index.package_finder import PackageFinder + class AbstractDistribution(metaclass=abc.ABCMeta): """A base class for handling installable artifacts. @@ -19,12 +22,23 @@ class AbstractDistribution(metaclass=abc.ABCMeta): - we must be able to create a Distribution object exposing the above metadata. + + - if we need to do work in the build tracker, we must be able to generate a unique + string to identify the requirement in the build tracker. """ def __init__(self, req: InstallRequirement) -> None: super().__init__() self.req = req + @abc.abstractproperty + def build_tracker_id(self) -> Optional[str]: + """A string that uniquely identifies this requirement to the build tracker. + + If None, then this dist has no work to do in the build tracker, and + ``.prepare_distribution_metadata()`` will not be called.""" + raise NotImplementedError() + @abc.abstractmethod def get_metadata_distribution(self) -> BaseDistribution: raise NotImplementedError() @@ -32,7 +46,7 @@ def get_metadata_distribution(self) -> BaseDistribution: @abc.abstractmethod def prepare_distribution_metadata( self, - finder: PackageFinder, + finder: "PackageFinder", build_isolation: bool, check_build_deps: bool, ) -> None: diff --git a/src/pip/_internal/distributions/installed.py b/src/pip/_internal/distributions/installed.py index edb38aa1a6c..ab8d53be740 100644 --- a/src/pip/_internal/distributions/installed.py +++ b/src/pip/_internal/distributions/installed.py @@ -1,3 +1,5 @@ +from typing import Optional + from pip._internal.distributions.base import AbstractDistribution from pip._internal.index.package_finder import PackageFinder from pip._internal.metadata import BaseDistribution @@ -10,6 +12,10 @@ class InstalledDistribution(AbstractDistribution): been computed. """ + @property + def build_tracker_id(self) -> Optional[str]: + return None + def get_metadata_distribution(self) -> BaseDistribution: assert self.req.satisfied_by is not None, "not actually installed" return self.req.satisfied_by diff --git a/src/pip/_internal/distributions/sdist.py b/src/pip/_internal/distributions/sdist.py index 4c25647930c..28ea5cea16c 100644 --- a/src/pip/_internal/distributions/sdist.py +++ b/src/pip/_internal/distributions/sdist.py @@ -1,13 +1,15 @@ import logging -from typing import Iterable, Set, Tuple +from typing import TYPE_CHECKING, Iterable, Optional, Set, Tuple from pip._internal.build_env import BuildEnvironment from pip._internal.distributions.base import AbstractDistribution from pip._internal.exceptions import InstallationError -from pip._internal.index.package_finder import PackageFinder from pip._internal.metadata import BaseDistribution from pip._internal.utils.subprocess import runner_with_spinner_message +if TYPE_CHECKING: + from pip._internal.index.package_finder import PackageFinder + logger = logging.getLogger(__name__) @@ -18,12 +20,18 @@ class SourceDistribution(AbstractDistribution): generated, either using PEP 517 or using the legacy `setup.py egg_info`. """ + @property + def build_tracker_id(self) -> Optional[str]: + """Identify this requirement uniquely by its link.""" + assert self.req.link + return self.req.link.url_without_fragment + def get_metadata_distribution(self) -> BaseDistribution: return self.req.get_dist() def prepare_distribution_metadata( self, - finder: PackageFinder, + finder: "PackageFinder", build_isolation: bool, check_build_deps: bool, ) -> None: @@ -60,7 +68,7 @@ def prepare_distribution_metadata( self._raise_missing_reqs(missing) self.req.prepare_metadata() - def _prepare_build_backend(self, finder: PackageFinder) -> None: + def _prepare_build_backend(self, finder: "PackageFinder") -> None: # Isolate in a BuildEnvironment and install the build-time # requirements. pyproject_requires = self.req.pyproject_requires @@ -104,14 +112,14 @@ def _get_build_requires_editable(self) -> Iterable[str]: with backend.subprocess_runner(runner): return backend.get_requires_for_build_editable() - def _install_build_reqs(self, finder: PackageFinder) -> None: + def _install_build_reqs(self, finder: "PackageFinder") -> None: # Install any extra build dependencies that the backend requests. # This must be done in a second pass, as the pyproject.toml # dependencies must be installed before we can call the backend. if ( self.req.editable and self.req.permit_editable_wheels - and self.req.supports_pyproject_editable() + and self.req.supports_pyproject_editable ): build_reqs = self._get_build_requires_editable() else: diff --git a/src/pip/_internal/distributions/wheel.py b/src/pip/_internal/distributions/wheel.py index 03aac775b53..bfadd39dcb7 100644 --- a/src/pip/_internal/distributions/wheel.py +++ b/src/pip/_internal/distributions/wheel.py @@ -1,13 +1,17 @@ +from typing import TYPE_CHECKING, Optional + from pip._vendor.packaging.utils import canonicalize_name from pip._internal.distributions.base import AbstractDistribution -from pip._internal.index.package_finder import PackageFinder from pip._internal.metadata import ( BaseDistribution, FilesystemWheel, get_wheel_distribution, ) +if TYPE_CHECKING: + from pip._internal.index.package_finder import PackageFinder + class WheelDistribution(AbstractDistribution): """Represents a wheel distribution. @@ -15,6 +19,10 @@ class WheelDistribution(AbstractDistribution): This does not need any preparation as wheels can be directly unpacked. """ + @property + def build_tracker_id(self) -> Optional[str]: + return None + def get_metadata_distribution(self) -> BaseDistribution: """Loads the metadata from the wheel file into memory and returns a Distribution that uses it, not relying on the wheel file or @@ -27,7 +35,7 @@ def get_metadata_distribution(self) -> BaseDistribution: def prepare_distribution_metadata( self, - finder: PackageFinder, + finder: "PackageFinder", build_isolation: bool, check_build_deps: bool, ) -> None: diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py index 7d92ba69983..0609e450683 100644 --- a/src/pip/_internal/exceptions.py +++ b/src/pip/_internal/exceptions.py @@ -13,16 +13,16 @@ import re import sys from itertools import chain, groupby, repeat -from typing import TYPE_CHECKING, Dict, Iterator, List, Optional, Union +from typing import TYPE_CHECKING, Dict, Iterator, List, Literal, Optional, Union -from pip._vendor.requests.models import Request, Response from pip._vendor.rich.console import Console, ConsoleOptions, RenderResult from pip._vendor.rich.markup import escape from pip._vendor.rich.text import Text if TYPE_CHECKING: from hashlib import _Hash - from typing import Literal + + from pip._vendor.requests.models import Request, Response from pip._internal.metadata import BaseDistribution from pip._internal.req.req_install import InstallRequirement @@ -247,10 +247,7 @@ def __init__( def __str__(self) -> str: # Use `dist` in the error message because its stringification # includes more information, like the version and location. - return "None {} metadata found for distribution: {}".format( - self.metadata_name, - self.dist, - ) + return f"None {self.metadata_name} metadata found for distribution: {self.dist}" class UserInstallationInvalid(InstallationError): @@ -297,8 +294,8 @@ class NetworkConnectionError(PipError): def __init__( self, error_msg: str, - response: Optional[Response] = None, - request: Optional[Request] = None, + response: Optional["Response"] = None, + request: Optional["Request"] = None, ) -> None: """ Initialize NetworkConnectionError with `request` and `response` @@ -544,7 +541,7 @@ def body(self) -> str: # so the output can be directly copied into the requirements file. package = ( self.req.original_link - if self.req.original_link + if self.req.is_direct # In case someone feeds something downright stupid # to InstallRequirement's constructor. else getattr(self.req, "req", None) @@ -594,7 +591,7 @@ def __init__(self, allowed: Dict[str, List[str]], gots: Dict[str, "_Hash"]) -> N self.gots = gots def body(self) -> str: - return " {}:\n{}".format(self._requirement_name(), self._hash_comparison()) + return f" {self._requirement_name()}:\n{self._hash_comparison()}" def _hash_comparison(self) -> str: """ @@ -616,11 +613,9 @@ def hash_then_or(hash_name: str) -> "chain[str]": lines: List[str] = [] for hash_name, expecteds in self.allowed.items(): prefix = hash_then_or(hash_name) - lines.extend( - (" Expected {} {}".format(next(prefix), e)) for e in expecteds - ) + lines.extend((f" Expected {next(prefix)} {e}") for e in expecteds) lines.append( - " Got {}\n".format(self.gots[hash_name].hexdigest()) + f" Got {self.gots[hash_name].hexdigest()}\n" ) return "\n".join(lines) diff --git a/src/pip/_internal/index/collector.py b/src/pip/_internal/index/collector.py index b3e293ea3a5..5f8fdee3d46 100644 --- a/src/pip/_internal/index/collector.py +++ b/src/pip/_internal/index/collector.py @@ -11,10 +11,10 @@ import os import urllib.parse import urllib.request +from dataclasses import dataclass from html.parser import HTMLParser from optparse import Values from typing import ( - TYPE_CHECKING, Callable, Dict, Iterable, @@ -22,6 +22,7 @@ MutableMapping, NamedTuple, Optional, + Protocol, Sequence, Tuple, Union, @@ -42,11 +43,6 @@ from .sources import CandidatesFromPage, LinkSource, build_source -if TYPE_CHECKING: - from typing import Protocol -else: - Protocol = object - logger = logging.getLogger(__name__) ResponseHeaders = MutableMapping[str, str] @@ -201,8 +197,7 @@ def __hash__(self) -> int: class ParseLinks(Protocol): - def __call__(self, page: "IndexContent") -> Iterable[Link]: - ... + def __call__(self, page: "IndexContent") -> Iterable[Link]: ... def with_cached_index_content(fn: ParseLinks) -> ParseLinks: @@ -254,29 +249,22 @@ def parse_links(page: "IndexContent") -> Iterable[Link]: yield link +@dataclass(frozen=True) class IndexContent: - """Represents one response (or page), along with its URL""" + """Represents one response (or page), along with its URL. - def __init__( - self, - content: bytes, - content_type: str, - encoding: Optional[str], - url: str, - cache_link_parsing: bool = True, - ) -> None: - """ - :param encoding: the encoding to decode the given content. - :param url: the URL from which the HTML was downloaded. - :param cache_link_parsing: whether links parsed from this page's url - should be cached. PyPI index urls should - have this set to False, for example. - """ - self.content = content - self.content_type = content_type - self.encoding = encoding - self.url = url - self.cache_link_parsing = cache_link_parsing + :param encoding: the encoding to decode the given content. + :param url: the URL from which the HTML was downloaded. + :param cache_link_parsing: whether links parsed from this page's url + should be cached. PyPI index urls should + have this set to False, for example. + """ + + content: bytes + content_type: str + encoding: Optional[str] + url: str + cache_link_parsing: bool = True def __str__(self) -> str: return redact_auth_from_url(self.url) @@ -400,7 +388,6 @@ class CollectedSources(NamedTuple): class LinkCollector: - """ Responsible for collecting Link objects from all configured locations, making network requests as needed. @@ -473,6 +460,7 @@ def collect_sources( page_validator=self.session.is_secure_origin, expand_dir=False, cache_link_parsing=False, + project_name=project_name, ) for loc in self.search_scope.get_index_urls_locations(project_name) ).values() @@ -483,6 +471,7 @@ def collect_sources( page_validator=self.session.is_secure_origin, expand_dir=True, cache_link_parsing=True, + project_name=project_name, ) for loc in self.find_links ).values() diff --git a/src/pip/_internal/index/package_finder.py b/src/pip/_internal/index/package_finder.py index b6f8d57e854..98ee6315567 100644 --- a/src/pip/_internal/index/package_finder.py +++ b/src/pip/_internal/index/package_finder.py @@ -5,6 +5,7 @@ import itertools import logging import re +from dataclasses import dataclass from typing import TYPE_CHECKING, FrozenSet, Iterable, List, Optional, Set, Tuple, Union from pip._vendor.packaging import specifiers @@ -106,7 +107,6 @@ class LinkType(enum.Enum): class LinkEvaluator: - """ Responsible for evaluating links for a particular project. """ @@ -198,7 +198,7 @@ def evaluate_link(self, link: Link) -> Tuple[LinkType, str]: reason = f"wrong project name (not {self.project_name})" return (LinkType.different_project, reason) - supported_tags = self._target_python.get_tags() + supported_tags = self._target_python.get_unsorted_tags() if not wheel.supported(supported_tags): # Include the wheel's tags in the reason string to # simplify troubleshooting compatibility issues. @@ -323,23 +323,15 @@ def filter_unallowed_hashes( return filtered +@dataclass class CandidatePreferences: - """ Encapsulates some of the preferences for filtering and sorting InstallationCandidate objects. """ - def __init__( - self, - prefer_binary: bool = False, - allow_all_prereleases: bool = False, - ) -> None: - """ - :param allow_all_prereleases: Whether to allow all pre-releases. - """ - self.allow_all_prereleases = allow_all_prereleases - self.prefer_binary = prefer_binary + prefer_binary: bool = False + allow_all_prereleases: bool = False class BestCandidateResult: @@ -383,7 +375,6 @@ def iter_applicable(self) -> Iterable[InstallationCandidate]: class CandidateEvaluator: - """ Responsible for filtering and sorting candidates for installation based on what tags are valid. @@ -414,7 +405,7 @@ def create( if specifier is None: specifier = specifiers.SpecifierSet() - supported_tags = target_python.get_tags() + supported_tags = target_python.get_sorted_tags() return cls( project_name=project_name, @@ -533,8 +524,8 @@ def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey: ) except ValueError: raise UnsupportedWheel( - "{} is not a supported wheel for this platform. It " - "can't be sorted.".format(wheel.filename) + f"{wheel.filename} is not a supported wheel for this platform. It " + "can't be sorted." ) if self._prefer_binary: binary_preference = 1 @@ -939,9 +930,7 @@ def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str: _format_versions(best_candidate_result.iter_all()), ) - raise DistributionNotFound( - "No matching distribution found for {}".format(req) - ) + raise DistributionNotFound(f"No matching distribution found for {req}") def _should_install_candidate( candidate: Optional[InstallationCandidate], diff --git a/src/pip/_internal/index/sources.py b/src/pip/_internal/index/sources.py index cd9cb8d40f1..f4626d71ab4 100644 --- a/src/pip/_internal/index/sources.py +++ b/src/pip/_internal/index/sources.py @@ -1,8 +1,17 @@ import logging import mimetypes import os -import pathlib -from typing import Callable, Iterable, Optional, Tuple +from collections import defaultdict +from typing import Callable, Dict, Iterable, List, Optional, Tuple + +from pip._vendor.packaging.utils import ( + InvalidSdistFilename, + InvalidVersion, + InvalidWheelFilename, + canonicalize_name, + parse_sdist_filename, + parse_wheel_filename, +) from pip._internal.models.candidate import InstallationCandidate from pip._internal.models.link import Link @@ -36,6 +45,53 @@ def _is_html_file(file_url: str) -> bool: return mimetypes.guess_type(file_url, strict=False)[0] == "text/html" +class _FlatDirectoryToUrls: + """Scans directory and caches results""" + + def __init__(self, path: str) -> None: + self._path = path + self._page_candidates: List[str] = [] + self._project_name_to_urls: Dict[str, List[str]] = defaultdict(list) + self._scanned_directory = False + + def _scan_directory(self) -> None: + """Scans directory once and populates both page_candidates + and project_name_to_urls at the same time + """ + for entry in os.scandir(self._path): + url = path_to_url(entry.path) + if _is_html_file(url): + self._page_candidates.append(url) + continue + + # File must have a valid wheel or sdist name, + # otherwise not worth considering as a package + try: + project_filename = parse_wheel_filename(entry.name)[0] + except (InvalidWheelFilename, InvalidVersion): + try: + project_filename = parse_sdist_filename(entry.name)[0] + except (InvalidSdistFilename, InvalidVersion): + continue + + self._project_name_to_urls[project_filename].append(url) + self._scanned_directory = True + + @property + def page_candidates(self) -> List[str]: + if not self._scanned_directory: + self._scan_directory() + + return self._page_candidates + + @property + def project_name_to_urls(self) -> Dict[str, List[str]]: + if not self._scanned_directory: + self._scan_directory() + + return self._project_name_to_urls + + class _FlatDirectorySource(LinkSource): """Link source specified by ``--find-links=``. @@ -45,30 +101,34 @@ class _FlatDirectorySource(LinkSource): * ``file_candidates``: Archives in the directory. """ + _paths_to_urls: Dict[str, _FlatDirectoryToUrls] = {} + def __init__( self, candidates_from_page: CandidatesFromPage, path: str, + project_name: str, ) -> None: self._candidates_from_page = candidates_from_page - self._path = pathlib.Path(os.path.realpath(path)) + self._project_name = canonicalize_name(project_name) + + # Get existing instance of _FlatDirectoryToUrls if it exists + if path in self._paths_to_urls: + self._path_to_urls = self._paths_to_urls[path] + else: + self._path_to_urls = _FlatDirectoryToUrls(path=path) + self._paths_to_urls[path] = self._path_to_urls @property def link(self) -> Optional[Link]: return None def page_candidates(self) -> FoundCandidates: - for path in self._path.iterdir(): - url = path_to_url(str(path)) - if not _is_html_file(url): - continue + for url in self._path_to_urls.page_candidates: yield from self._candidates_from_page(Link(url)) def file_links(self) -> FoundLinks: - for path in self._path.iterdir(): - url = path_to_url(str(path)) - if _is_html_file(url): - continue + for url in self._path_to_urls.project_name_to_urls[self._project_name]: yield Link(url) @@ -170,6 +230,7 @@ def build_source( page_validator: PageValidator, expand_dir: bool, cache_link_parsing: bool, + project_name: str, ) -> Tuple[Optional[str], Optional[LinkSource]]: path: Optional[str] = None url: Optional[str] = None @@ -203,6 +264,7 @@ def build_source( source = _FlatDirectorySource( candidates_from_page=candidates_from_page, path=path, + project_name=project_name, ) else: source = _IndexDirectorySource( diff --git a/src/pip/_internal/locations/__init__.py b/src/pip/_internal/locations/__init__.py index d54bc63eba3..32382be7fe5 100644 --- a/src/pip/_internal/locations/__init__.py +++ b/src/pip/_internal/locations/__init__.py @@ -336,17 +336,6 @@ def get_scheme( if skip_linux_system_special_case: continue - # On Python 3.7 and earlier, sysconfig does not include sys.abiflags in - # the "pythonX.Y" part of the path, but distutils does. - skip_sysconfig_abiflag_bug = ( - sys.version_info < (3, 8) - and not WINDOWS - and k in ("headers", "platlib", "purelib") - and tuple(_fix_abiflags(old_v.parts)) == new_v.parts - ) - if skip_sysconfig_abiflag_bug: - continue - # MSYS2 MINGW's sysconfig patch does not include the "site-packages" # part of the path. This is incorrect and will be fixed in MSYS. skip_msys2_mingw_bug = ( diff --git a/src/pip/_internal/locations/_distutils.py b/src/pip/_internal/locations/_distutils.py index 92bd93179c5..0e18c6e1e14 100644 --- a/src/pip/_internal/locations/_distutils.py +++ b/src/pip/_internal/locations/_distutils.py @@ -56,8 +56,7 @@ def distutils_scheme( try: d.parse_config_files() except UnicodeDecodeError: - # Typeshed does not include find_config_files() for some reason. - paths = d.find_config_files() # type: ignore + paths = d.find_config_files() logger.warning( "Ignore distutils configs in %s due to encoding errors.", ", ".join(os.path.basename(p) for p in paths), @@ -89,7 +88,7 @@ def distutils_scheme( # finalize_options(); we only want to override here if the user # has explicitly requested it hence going back to the config if "install_lib" in d.get_option_dict("install"): - scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) + scheme.update({"purelib": i.install_lib, "platlib": i.install_lib}) if running_under_virtualenv(): if home: diff --git a/src/pip/_internal/locations/_sysconfig.py b/src/pip/_internal/locations/_sysconfig.py index 97aef1f1ac2..ca860ea562c 100644 --- a/src/pip/_internal/locations/_sysconfig.py +++ b/src/pip/_internal/locations/_sysconfig.py @@ -192,9 +192,10 @@ def get_scheme( data=paths["data"], ) if root is not None: + converted_keys = {} for key in SCHEME_KEYS: - value = change_root(root, getattr(scheme, key)) - setattr(scheme, key, value) + converted_keys[key] = change_root(root, getattr(scheme, key)) + scheme = Scheme(**converted_keys) return scheme diff --git a/src/pip/_internal/metadata/__init__.py b/src/pip/_internal/metadata/__init__.py index 9f73ca7105f..aa232b6cabd 100644 --- a/src/pip/_internal/metadata/__init__.py +++ b/src/pip/_internal/metadata/__init__.py @@ -9,7 +9,7 @@ from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel if TYPE_CHECKING: - from typing import Protocol + from typing import Literal, Protocol else: Protocol = object @@ -50,6 +50,7 @@ def _should_use_importlib_metadata() -> bool: class Backend(Protocol): + NAME: 'Literal["importlib", "pkg_resources"]' Distribution: Type[BaseDistribution] Environment: Type[BaseEnvironment] diff --git a/src/pip/_internal/metadata/_json.py b/src/pip/_internal/metadata/_json.py index 336b52f1efd..9097dd58590 100644 --- a/src/pip/_internal/metadata/_json.py +++ b/src/pip/_internal/metadata/_json.py @@ -2,7 +2,7 @@ from email.header import Header, decode_header, make_header from email.message import Message -from typing import Any, Dict, List, Union +from typing import Any, Dict, List, Union, cast METADATA_FIELDS = [ # Name, Multiple-Use @@ -64,10 +64,10 @@ def sanitise_header(h: Union[Header, str]) -> str: key = json_name(field) if multi: value: Union[str, List[str]] = [ - sanitise_header(v) for v in msg.get_all(field) + sanitise_header(v) for v in msg.get_all(field) # type: ignore ] else: - value = sanitise_header(msg.get(field)) + value = sanitise_header(msg.get(field)) # type: ignore if key == "keywords": # Accept both comma-separated and space-separated # forms, for better compatibility with old data. @@ -77,7 +77,7 @@ def sanitise_header(h: Union[Header, str]) -> str: value = value.split() result[key] = value - payload = msg.get_payload() + payload = cast(str, msg.get_payload()) if payload: result["description"] = payload diff --git a/src/pip/_internal/metadata/base.py b/src/pip/_internal/metadata/base.py index cafb79fb3dc..af0412c819c 100644 --- a/src/pip/_internal/metadata/base.py +++ b/src/pip/_internal/metadata/base.py @@ -8,7 +8,6 @@ import zipfile from typing import ( IO, - TYPE_CHECKING, Any, Collection, Container, @@ -18,13 +17,14 @@ List, NamedTuple, Optional, + Protocol, Tuple, Union, ) from pip._vendor.packaging.requirements import Requirement from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet -from pip._vendor.packaging.utils import NormalizedName +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name from pip._vendor.packaging.version import LegacyVersion, Version from pip._internal.exceptions import NoneMetadataError @@ -37,16 +37,10 @@ from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here. from pip._internal.utils.egg_link import egg_link_path_from_sys_path from pip._internal.utils.misc import is_local, normalize_path -from pip._internal.utils.packaging import safe_extra from pip._internal.utils.urls import url_to_path from ._json import msg_to_json -if TYPE_CHECKING: - from typing import Protocol -else: - Protocol = object - DistributionVersion = Union[LegacyVersion, Version] InfoPath = Union[str, pathlib.PurePath] @@ -386,15 +380,7 @@ def iter_entry_points(self) -> Iterable[BaseEntryPoint]: def _metadata_impl(self) -> email.message.Message: raise NotImplementedError() - @functools.lru_cache(maxsize=1) - def _metadata_cached(self) -> email.message.Message: - # When we drop python 3.7 support, move this to the metadata property and use - # functools.cached_property instead of lru_cache. - metadata = self._metadata_impl() - self._add_egg_info_requires(metadata) - return metadata - - @property + @functools.cached_property def metadata(self) -> email.message.Message: """Metadata of distribution parsed from e.g. METADATA or PKG-INFO. @@ -403,7 +389,9 @@ def metadata(self) -> email.message.Message: :raises NoneMetadataError: If the metadata file is available, but does not contain valid metadata. """ - return self._metadata_cached() + metadata = self._metadata_impl() + self._add_egg_info_requires(metadata) + return metadata @property def metadata_dict(self) -> Dict[str, Any]: @@ -460,6 +448,19 @@ def iter_provided_extras(self) -> Iterable[str]: For modern .dist-info distributions, this is the collection of "Provides-Extra:" entries in distribution metadata. + + The return value of this function is not particularly useful other than + display purposes due to backward compatibility issues and the extra + names being poorly normalized prior to PEP 685. If you want to perform + logic operations on extras, use :func:`is_extra_provided` instead. + """ + raise NotImplementedError() + + def is_extra_provided(self, extra: str) -> bool: + """Check whether an extra is provided by this distribution. + + This is needed mostly for compatibility issues with pkg_resources not + following the extra normalization rules defined in PEP 685. """ raise NotImplementedError() @@ -537,10 +538,11 @@ def _iter_egg_info_extras(self) -> Iterable[str]: """Get extras from the egg-info directory.""" known_extras = {""} for entry in self._iter_requires_txt_entries(): - if entry.extra in known_extras: + extra = canonicalize_name(entry.extra) + if extra in known_extras: continue - known_extras.add(entry.extra) - yield entry.extra + known_extras.add(extra) + yield extra def _iter_egg_info_dependencies(self) -> Iterable[str]: """Get distribution dependencies from the egg-info directory. @@ -556,10 +558,11 @@ def _iter_egg_info_dependencies(self) -> Iterable[str]: all currently available PEP 517 backends, although not standardized. """ for entry in self._iter_requires_txt_entries(): - if entry.extra and entry.marker: - marker = f'({entry.marker}) and extra == "{safe_extra(entry.extra)}"' - elif entry.extra: - marker = f'extra == "{safe_extra(entry.extra)}"' + extra = canonicalize_name(entry.extra) + if extra and entry.marker: + marker = f'({entry.marker}) and extra == "{extra}"' + elif extra: + marker = f'extra == "{extra}"' elif entry.marker: marker = entry.marker else: diff --git a/src/pip/_internal/metadata/importlib/__init__.py b/src/pip/_internal/metadata/importlib/__init__.py index 5e7af9fe521..a779138db10 100644 --- a/src/pip/_internal/metadata/importlib/__init__.py +++ b/src/pip/_internal/metadata/importlib/__init__.py @@ -1,4 +1,6 @@ from ._dists import Distribution from ._envs import Environment -__all__ = ["Distribution", "Environment"] +__all__ = ["NAME", "Distribution", "Environment"] + +NAME = "importlib" diff --git a/src/pip/_internal/metadata/importlib/_dists.py b/src/pip/_internal/metadata/importlib/_dists.py index 65c043c87ef..8591029f16e 100644 --- a/src/pip/_internal/metadata/importlib/_dists.py +++ b/src/pip/_internal/metadata/importlib/_dists.py @@ -27,7 +27,6 @@ Wheel, ) from pip._internal.utils.misc import normalize_path -from pip._internal.utils.packaging import safe_extra from pip._internal.utils.temp_dir import TempDirectory from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file @@ -134,8 +133,6 @@ def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution: dist = WheelDistribution.from_zipfile(zf, name, wheel.location) except zipfile.BadZipFile as e: raise InvalidWheel(wheel.location, name) from e - except UnsupportedWheel as e: - raise UnsupportedWheel(f"{name} has an invalid wheel, {e}") return cls(dist, dist.info_location, pathlib.PurePosixPath(wheel.location)) @property @@ -208,12 +205,16 @@ def _metadata_impl(self) -> email.message.Message: return cast(email.message.Message, self._dist.metadata) def iter_provided_extras(self) -> Iterable[str]: - return ( - safe_extra(extra) for extra in self.metadata.get_all("Provides-Extra", []) + return self.metadata.get_all("Provides-Extra", []) + + def is_extra_provided(self, extra: str) -> bool: + return any( + canonicalize_name(provided_extra) == canonicalize_name(extra) + for provided_extra in self.metadata.get_all("Provides-Extra", []) ) def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: - contexts: Sequence[Dict[str, str]] = [{"extra": safe_extra(e)} for e in extras] + contexts: Sequence[Dict[str, str]] = [{"extra": e} for e in extras] for req_string in self.metadata.get_all("Requires-Dist", []): req = Requirement(req_string) if not req.marker: diff --git a/src/pip/_internal/metadata/importlib/_envs.py b/src/pip/_internal/metadata/importlib/_envs.py index cbec59e2c6d..048dc55dcb2 100644 --- a/src/pip/_internal/metadata/importlib/_envs.py +++ b/src/pip/_internal/metadata/importlib/_envs.py @@ -151,7 +151,8 @@ def _emit_egg_deprecation(location: Optional[str]) -> None: deprecated( reason=f"Loading egg at {location} is deprecated.", replacement="to use pip for package installation.", - gone_in=None, + gone_in="24.3", + issue=12330, ) @@ -174,7 +175,7 @@ def _iter_distributions(self) -> Iterator[BaseDistribution]: for location in self._paths: yield from finder.find(location) for dist in finder.find_eggs(location): - # _emit_egg_deprecation(dist.location) # TODO: Enable this. + _emit_egg_deprecation(dist.location) yield dist # This must go last because that's how pkg_resources tie-breaks. yield from finder.find_linked(location) diff --git a/src/pip/_internal/metadata/pkg_resources.py b/src/pip/_internal/metadata/pkg_resources.py index f330ef12a2c..bb11e5bd8a5 100644 --- a/src/pip/_internal/metadata/pkg_resources.py +++ b/src/pip/_internal/metadata/pkg_resources.py @@ -24,8 +24,12 @@ Wheel, ) +__all__ = ["NAME", "Distribution", "Environment"] + logger = logging.getLogger(__name__) +NAME = "pkg_resources" + class EntryPoint(NamedTuple): name: str @@ -212,12 +216,16 @@ def _metadata_impl(self) -> email.message.Message: def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: if extras: # pkg_resources raises on invalid extras, so we sanitize. - extras = frozenset(extras).intersection(self._dist.extras) + extras = frozenset(pkg_resources.safe_extra(e) for e in extras) + extras = extras.intersection(self._dist.extras) return self._dist.requires(extras) def iter_provided_extras(self) -> Iterable[str]: return self._dist.extras + def is_extra_provided(self, extra: str) -> bool: + return pkg_resources.safe_extra(extra) in self._dist.extras + class Environment(BaseEnvironment): def __init__(self, ws: pkg_resources.WorkingSet) -> None: diff --git a/src/pip/_internal/models/candidate.py b/src/pip/_internal/models/candidate.py index a4963aec638..f27f283154a 100644 --- a/src/pip/_internal/models/candidate.py +++ b/src/pip/_internal/models/candidate.py @@ -1,34 +1,25 @@ +from dataclasses import dataclass + +from pip._vendor.packaging.version import Version from pip._vendor.packaging.version import parse as parse_version from pip._internal.models.link import Link -from pip._internal.utils.models import KeyBasedCompareMixin -class InstallationCandidate(KeyBasedCompareMixin): +@dataclass(frozen=True) +class InstallationCandidate: """Represents a potential "candidate" for installation.""" __slots__ = ["name", "version", "link"] + name: str + version: Version + link: Link + def __init__(self, name: str, version: str, link: Link) -> None: - self.name = name - self.version = parse_version(version) - self.link = link - - super().__init__( - key=(self.name, self.version, self.link), - defining_class=InstallationCandidate, - ) - - def __repr__(self) -> str: - return "".format( - self.name, - self.version, - self.link, - ) + object.__setattr__(self, "name", name) + object.__setattr__(self, "version", parse_version(version)) + object.__setattr__(self, "link", link) def __str__(self) -> str: - return "{!r} candidate (version {} at {})".format( - self.name, - self.version, - self.link, - ) + return f"{self.name!r} candidate (version {self.version} at {self.link})" diff --git a/src/pip/_internal/models/direct_url.py b/src/pip/_internal/models/direct_url.py index e219d73849b..fc5ec8d4aa9 100644 --- a/src/pip/_internal/models/direct_url.py +++ b/src/pip/_internal/models/direct_url.py @@ -1,8 +1,10 @@ """ PEP 610 """ + import json import re import urllib.parse -from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union +from dataclasses import dataclass +from typing import Any, ClassVar, Dict, Iterable, Optional, Type, TypeVar, Union __all__ = [ "DirectUrl", @@ -31,9 +33,7 @@ def _get( value = d[key] if not isinstance(value, expected_type): raise DirectUrlValidationError( - "{!r} has unexpected type for {} (expected {})".format( - value, key, expected_type - ) + f"{value!r} has unexpected type for {key} (expected {expected_type})" ) return value @@ -66,18 +66,13 @@ def _filter_none(**kwargs: Any) -> Dict[str, Any]: return {k: v for k, v in kwargs.items() if v is not None} +@dataclass class VcsInfo: - name = "vcs_info" + name: ClassVar = "vcs_info" - def __init__( - self, - vcs: str, - commit_id: str, - requested_revision: Optional[str] = None, - ) -> None: - self.vcs = vcs - self.requested_revision = requested_revision - self.commit_id = commit_id + vcs: str + commit_id: str + requested_revision: Optional[str] = None @classmethod def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]: @@ -141,14 +136,11 @@ def _to_dict(self) -> Dict[str, Any]: return _filter_none(hash=self.hash, hashes=self.hashes) +@dataclass class DirInfo: - name = "dir_info" + name: ClassVar = "dir_info" - def __init__( - self, - editable: bool = False, - ) -> None: - self.editable = editable + editable: bool = False @classmethod def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]: @@ -163,16 +155,11 @@ def _to_dict(self) -> Dict[str, Any]: InfoType = Union[ArchiveInfo, DirInfo, VcsInfo] +@dataclass class DirectUrl: - def __init__( - self, - url: str, - info: InfoType, - subdirectory: Optional[str] = None, - ) -> None: - self.url = url - self.info = info - self.subdirectory = subdirectory + url: str + info: InfoType + subdirectory: Optional[str] = None def _remove_auth_from_netloc(self, netloc: str) -> str: if "@" not in netloc: diff --git a/src/pip/_internal/models/format_control.py b/src/pip/_internal/models/format_control.py index db3995eac9f..ccd11272c03 100644 --- a/src/pip/_internal/models/format_control.py +++ b/src/pip/_internal/models/format_control.py @@ -33,9 +33,7 @@ def __eq__(self, other: object) -> bool: return all(getattr(self, k) == getattr(other, k) for k in self.__slots__) def __repr__(self) -> str: - return "{}({}, {})".format( - self.__class__.__name__, self.no_binary, self.only_binary - ) + return f"{self.__class__.__name__}({self.no_binary}, {self.only_binary})" @staticmethod def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None: diff --git a/src/pip/_internal/models/installation_report.py b/src/pip/_internal/models/installation_report.py index fef3757f222..b9c6330df32 100644 --- a/src/pip/_internal/models/installation_report.py +++ b/src/pip/_internal/models/installation_report.py @@ -22,7 +22,10 @@ def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]: # is_direct is true if the requirement was a direct URL reference (which # includes editable requirements), and false if the requirement was # downloaded from a PEP 503 index or --find-links. - "is_direct": bool(ireq.original_link), + "is_direct": ireq.is_direct, + # is_yanked is true if the requirement was yanked from the index, but + # was still selected by pip to conform to PEP 592. + "is_yanked": ireq.link.is_yanked if ireq.link else False, # requested is true if the requirement was specified by the user (aka # top level requirement), and false if it was installed as a dependency of a # requirement. https://peps.python.org/pep-0376/#requested @@ -33,7 +36,7 @@ def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]: } if ireq.user_supplied and ireq.extras: # For top level requirements, the list of requested extras, if any. - res["requested_extras"] = list(sorted(ireq.extras)) + res["requested_extras"] = sorted(ireq.extras) return res def to_dict(self) -> Dict[str, Any]: diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index e741c3283cd..2f41f2f6a09 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -27,7 +27,6 @@ split_auth_from_netloc, splitext, ) -from pip._internal.utils.models import KeyBasedCompareMixin from pip._internal.utils.urls import path_to_url, url_to_path if TYPE_CHECKING: @@ -69,18 +68,6 @@ class LinkHash: def __post_init__(self) -> None: assert self.name in _SUPPORTED_HASHES - @classmethod - def parse_pep658_hash(cls, dist_info_metadata: str) -> Optional["LinkHash"]: - """Parse a PEP 658 data-dist-info-metadata hash.""" - if dist_info_metadata == "true": - return None - name, sep, value = dist_info_metadata.partition("=") - if not sep: - return None - if name not in _SUPPORTED_HASHES: - return None - return cls(name=name, value=value) - @classmethod @functools.lru_cache(maxsize=None) def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]: @@ -107,6 +94,28 @@ def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool: return hashes.is_hash_allowed(self.name, hex_digest=self.value) +@dataclass(frozen=True) +class MetadataFile: + """Information about a core metadata file associated with a distribution.""" + + hashes: Optional[Dict[str, str]] + + def __post_init__(self) -> None: + if self.hashes is not None: + assert all(name in _SUPPORTED_HASHES for name in self.hashes) + + +def supported_hashes(hashes: Optional[Dict[str, str]]) -> Optional[Dict[str, str]]: + # Remove any unsupported hash types from the mapping. If this leaves no + # supported hashes, return None + if hashes is None: + return None + hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES} + if not hashes: + return None + return hashes + + def _clean_url_path_part(part: str) -> str: """ Clean a "part" of a URL path (i.e. after splitting on "@" characters). @@ -169,7 +178,8 @@ def _ensure_quoted_url(url: str) -> str: return urllib.parse.urlunparse(result._replace(path=path)) -class Link(KeyBasedCompareMixin): +@functools.total_ordering +class Link: """Represents a parsed link from a Package Index's simple URL""" __slots__ = [ @@ -179,7 +189,7 @@ class Link(KeyBasedCompareMixin): "comes_from", "requires_python", "yanked_reason", - "dist_info_metadata", + "metadata_file_data", "cache_link_parsing", "egg_fragment", ] @@ -190,7 +200,7 @@ def __init__( comes_from: Optional[Union[str, "IndexContent"]] = None, requires_python: Optional[str] = None, yanked_reason: Optional[str] = None, - dist_info_metadata: Optional[str] = None, + metadata_file_data: Optional[MetadataFile] = None, cache_link_parsing: bool = True, hashes: Optional[Mapping[str, str]] = None, ) -> None: @@ -208,11 +218,10 @@ def __init__( a simple repository HTML link. If the file has been yanked but no reason was provided, this should be the empty string. See PEP 592 for more information and the specification. - :param dist_info_metadata: the metadata attached to the file, or None if no such - metadata is provided. This is the value of the "data-dist-info-metadata" - attribute, if present, in a simple repository HTML link. This may be parsed - into its own `Link` by `self.metadata_link()`. See PEP 658 for more - information and the specification. + :param metadata_file_data: the metadata attached to the file, or None if + no such metadata is provided. This argument, if not None, indicates + that a separate metadata file exists, and also optionally supplies + hashes for that file. :param cache_link_parsing: A flag that is used elsewhere to determine whether resources retrieved from this link should be cached. PyPI URLs should generally have this set to False, for example. @@ -220,6 +229,10 @@ def __init__( determine the validity of a download. """ + # The comes_from, requires_python, and metadata_file_data arguments are + # only used by classmethods of this class, and are not used in client + # code directly. + # url can be a UNC windows share if url.startswith("\\\\"): url = path_to_url(url) @@ -239,9 +252,7 @@ def __init__( self.comes_from = comes_from self.requires_python = requires_python if requires_python else None self.yanked_reason = yanked_reason - self.dist_info_metadata = dist_info_metadata - - super().__init__(key=url, defining_class=Link) + self.metadata_file_data = metadata_file_data self.cache_link_parsing = cache_link_parsing self.egg_fragment = self._egg_fragment() @@ -262,9 +273,25 @@ def from_json( url = _ensure_quoted_url(urllib.parse.urljoin(page_url, file_url)) pyrequire = file_data.get("requires-python") yanked_reason = file_data.get("yanked") - dist_info_metadata = file_data.get("dist-info-metadata") hashes = file_data.get("hashes", {}) + # PEP 714: Indexes must use the name core-metadata, but + # clients should support the old name as a fallback for compatibility. + metadata_info = file_data.get("core-metadata") + if metadata_info is None: + metadata_info = file_data.get("dist-info-metadata") + + # The metadata info value may be a boolean, or a dict of hashes. + if isinstance(metadata_info, dict): + # The file exists, and hashes have been supplied + metadata_file_data = MetadataFile(supported_hashes(metadata_info)) + elif metadata_info: + # The file exists, but there are no hashes + metadata_file_data = MetadataFile(None) + else: + # False or not present: the file does not exist + metadata_file_data = None + # The Link.yanked_reason expects an empty string instead of a boolean. if yanked_reason and not isinstance(yanked_reason, str): yanked_reason = "" @@ -278,7 +305,7 @@ def from_json( requires_python=pyrequire, yanked_reason=yanked_reason, hashes=hashes, - dist_info_metadata=dist_info_metadata, + metadata_file_data=metadata_file_data, ) @classmethod @@ -298,14 +325,39 @@ def from_element( url = _ensure_quoted_url(urllib.parse.urljoin(base_url, href)) pyrequire = anchor_attribs.get("data-requires-python") yanked_reason = anchor_attribs.get("data-yanked") - dist_info_metadata = anchor_attribs.get("data-dist-info-metadata") + + # PEP 714: Indexes must use the name data-core-metadata, but + # clients should support the old name as a fallback for compatibility. + metadata_info = anchor_attribs.get("data-core-metadata") + if metadata_info is None: + metadata_info = anchor_attribs.get("data-dist-info-metadata") + # The metadata info value may be the string "true", or a string of + # the form "hashname=hashval" + if metadata_info == "true": + # The file exists, but there are no hashes + metadata_file_data = MetadataFile(None) + elif metadata_info is None: + # The file does not exist + metadata_file_data = None + else: + # The file exists, and hashes have been supplied + hashname, sep, hashval = metadata_info.partition("=") + if sep == "=": + metadata_file_data = MetadataFile(supported_hashes({hashname: hashval})) + else: + # Error - data is wrong. Treat as no hashes supplied. + logger.debug( + "Index returned invalid data-dist-info-metadata value: %s", + metadata_info, + ) + metadata_file_data = MetadataFile(None) return cls( url, comes_from=page_url, requires_python=pyrequire, yanked_reason=yanked_reason, - dist_info_metadata=dist_info_metadata, + metadata_file_data=metadata_file_data, ) def __str__(self) -> str: @@ -314,15 +366,26 @@ def __str__(self) -> str: else: rp = "" if self.comes_from: - return "{} (from {}){}".format( - redact_auth_from_url(self._url), self.comes_from, rp - ) + return f"{redact_auth_from_url(self._url)} (from {self.comes_from}){rp}" else: return redact_auth_from_url(str(self._url)) def __repr__(self) -> str: return f"" + def __hash__(self) -> int: + return hash(self.url) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Link): + return NotImplemented + return self.url == other.url + + def __lt__(self, other: Any) -> bool: + if not isinstance(other, Link): + return NotImplemented + return self.url < other.url + @property def url(self) -> str: return self._url @@ -407,17 +470,13 @@ def subdirectory_fragment(self) -> Optional[str]: return match.group(1) def metadata_link(self) -> Optional["Link"]: - """Implementation of PEP 658 parsing.""" - # Note that Link.from_element() parsing the "data-dist-info-metadata" attribute - # from an HTML anchor tag is typically how the Link.dist_info_metadata attribute - # gets set. - if self.dist_info_metadata is None: + """Return a link to the associated core metadata file (if any).""" + if self.metadata_file_data is None: return None metadata_url = f"{self.url_without_fragment}.metadata" - metadata_link_hash = LinkHash.parse_pep658_hash(self.dist_info_metadata) - if metadata_link_hash is None: + if self.metadata_file_data.hashes is None: return Link(metadata_url) - return Link(metadata_url, hashes=metadata_link_hash.as_dict()) + return Link(metadata_url, hashes=self.metadata_file_data.hashes) def as_hashes(self) -> Hashes: return Hashes({k: [v] for k, v in self._hashes.items()}) diff --git a/src/pip/_internal/models/scheme.py b/src/pip/_internal/models/scheme.py index f51190ac603..06a9a550e34 100644 --- a/src/pip/_internal/models/scheme.py +++ b/src/pip/_internal/models/scheme.py @@ -5,10 +5,12 @@ https://docs.python.org/3/install/index.html#alternate-installation. """ +from dataclasses import dataclass SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"] +@dataclass(frozen=True) class Scheme: """A Scheme holds paths which are used as the base directories for artifacts associated with a Python package. @@ -16,16 +18,8 @@ class Scheme: __slots__ = SCHEME_KEYS - def __init__( - self, - platlib: str, - purelib: str, - headers: str, - scripts: str, - data: str, - ) -> None: - self.platlib = platlib - self.purelib = purelib - self.headers = headers - self.scripts = scripts - self.data = data + platlib: str + purelib: str + headers: str + scripts: str + data: str diff --git a/src/pip/_internal/models/search_scope.py b/src/pip/_internal/models/search_scope.py index fe61e8116b7..ee7bc86229a 100644 --- a/src/pip/_internal/models/search_scope.py +++ b/src/pip/_internal/models/search_scope.py @@ -3,6 +3,7 @@ import os import posixpath import urllib.parse +from dataclasses import dataclass from typing import List from pip._vendor.packaging.utils import canonicalize_name @@ -14,14 +15,18 @@ logger = logging.getLogger(__name__) +@dataclass(frozen=True) class SearchScope: - """ Encapsulates the locations that pip is configured to search. """ __slots__ = ["find_links", "index_urls", "no_index"] + find_links: List[str] + index_urls: List[str] + no_index: bool + @classmethod def create( cls, @@ -64,16 +69,6 @@ def create( no_index=no_index, ) - def __init__( - self, - find_links: List[str], - index_urls: List[str], - no_index: bool, - ) -> None: - self.find_links = find_links - self.index_urls = index_urls - self.no_index = no_index - def get_formatted_locations(self) -> str: lines = [] redacted_index_urls = [] diff --git a/src/pip/_internal/models/selection_prefs.py b/src/pip/_internal/models/selection_prefs.py index 977bc4caa75..e9b50aa5175 100644 --- a/src/pip/_internal/models/selection_prefs.py +++ b/src/pip/_internal/models/selection_prefs.py @@ -3,6 +3,8 @@ from pip._internal.models.format_control import FormatControl +# TODO: This needs Python 3.10's improved slots support for dataclasses +# to be converted into a dataclass. class SelectionPreferences: """ Encapsulates the candidate selection preferences for downloading diff --git a/src/pip/_internal/models/target_python.py b/src/pip/_internal/models/target_python.py index 744bd7ef58b..88925a9fd01 100644 --- a/src/pip/_internal/models/target_python.py +++ b/src/pip/_internal/models/target_python.py @@ -1,5 +1,5 @@ import sys -from typing import List, Optional, Tuple +from typing import List, Optional, Set, Tuple from pip._vendor.packaging.tags import Tag @@ -8,7 +8,6 @@ class TargetPython: - """ Encapsulates the properties of a Python interpreter one is targeting for a package install, download, etc. @@ -22,6 +21,7 @@ class TargetPython: "py_version", "py_version_info", "_valid_tags", + "_valid_tags_set", ] def __init__( @@ -61,8 +61,9 @@ def __init__( self.py_version = py_version self.py_version_info = py_version_info - # This is used to cache the return value of get_tags(). + # This is used to cache the return value of get_(un)sorted_tags. self._valid_tags: Optional[List[Tag]] = None + self._valid_tags_set: Optional[Set[Tag]] = None def format_given(self) -> str: """ @@ -84,7 +85,7 @@ def format_given(self) -> str: f"{key}={value!r}" for key, value in key_values if value is not None ) - def get_tags(self) -> List[Tag]: + def get_sorted_tags(self) -> List[Tag]: """ Return the supported PEP 425 tags to check wheel candidates against. @@ -108,3 +109,13 @@ def get_tags(self) -> List[Tag]: self._valid_tags = tags return self._valid_tags + + def get_unsorted_tags(self) -> Set[Tag]: + """Exactly the same as get_sorted_tags, but returns a set. + + This is important for performance. + """ + if self._valid_tags_set is None: + self._valid_tags_set = set(self.get_sorted_tags()) + + return self._valid_tags_set diff --git a/src/pip/_internal/models/wheel.py b/src/pip/_internal/models/wheel.py index a5dc12bdd63..36d4d2e785c 100644 --- a/src/pip/_internal/models/wheel.py +++ b/src/pip/_internal/models/wheel.py @@ -1,6 +1,7 @@ """Represents a wheel file and provides access to the various parts of the name that have meaning. """ + import re from typing import Dict, Iterable, List diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index c0efa765c85..4705b55a7aa 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -3,6 +3,7 @@ Contains interface (MultiDomainBasicAuth) and associated glue code for providing credentials in the context of network requests. """ + import logging import os import shutil @@ -47,12 +48,12 @@ class KeyRingBaseProvider(ABC): has_keyring: bool @abstractmethod - def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: - ... + def get_auth_info( + self, url: str, username: Optional[str] + ) -> Optional[AuthInfo]: ... @abstractmethod - def save_auth_info(self, url: str, username: str, password: str) -> None: - ... + def save_auth_info(self, url: str, username: str, password: str) -> None: ... class KeyRingNullProvider(KeyRingBaseProvider): @@ -151,7 +152,7 @@ def _set_password(self, service_name: str, username: str, password: str) -> None env["PYTHONIOENCODING"] = "utf-8" subprocess.run( [self.keyring, "set", service_name, username], - input=f"{password}{os.linesep}".encode("utf-8"), + input=f"{password}{os.linesep}".encode(), env=env, check=True, ) @@ -514,7 +515,9 @@ def handle_401(self, resp: Response, **kwargs: Any) -> Response: # Consume content and release the original connection to allow our new # request to reuse the same one. - resp.content + # The result of the assignment isn't used, it's just needed to consume + # the content. + _ = resp.content resp.raw.release_conn() # Add our new username and password to the request diff --git a/src/pip/_internal/network/cache.py b/src/pip/_internal/network/cache.py index a81a2398519..4d0fb545dc2 100644 --- a/src/pip/_internal/network/cache.py +++ b/src/pip/_internal/network/cache.py @@ -3,10 +3,11 @@ import os from contextlib import contextmanager -from typing import Generator, Optional +from datetime import datetime +from typing import BinaryIO, Generator, Optional, Union -from pip._vendor.cachecontrol.cache import BaseCache -from pip._vendor.cachecontrol.caches import FileCache +from pip._vendor.cachecontrol.cache import SeparateBodyBaseCache +from pip._vendor.cachecontrol.caches import SeparateBodyFileCache from pip._vendor.requests.models import Response from pip._internal.utils.filesystem import adjacent_tmp_file, replace @@ -28,10 +29,22 @@ def suppressed_cache_errors() -> Generator[None, None, None]: pass -class SafeFileCache(BaseCache): +class SafeFileCache(SeparateBodyBaseCache): """ A file based cache which is safe to use even when the target directory may not be accessible or writable. + + There is a race condition when two processes try to write and/or read the + same entry at the same time, since each entry consists of two separate + files (https://github.com/psf/cachecontrol/issues/324). We therefore have + additional logic that makes sure that both files to be present before + returning an entry; this fixes the read side of the race condition. + + For the write side, we assume that the server will only ever return the + same data for the same URL, which ought to be the case for files pip is + downloading. PyPI does not have a mechanism to swap out a wheel for + another wheel, for example. If this assumption is not true, the + CacheControl issue will need to be fixed. """ def __init__(self, directory: str) -> None: @@ -43,27 +56,51 @@ def _get_cache_path(self, name: str) -> str: # From cachecontrol.caches.file_cache.FileCache._fn, brought into our # class for backwards-compatibility and to avoid using a non-public # method. - hashed = FileCache.encode(name) + hashed = SeparateBodyFileCache.encode(name) parts = list(hashed[:5]) + [hashed] return os.path.join(self.directory, *parts) def get(self, key: str) -> Optional[bytes]: - path = self._get_cache_path(key) + # The cache entry is only valid if both metadata and body exist. + metadata_path = self._get_cache_path(key) + body_path = metadata_path + ".body" + if not (os.path.exists(metadata_path) and os.path.exists(body_path)): + return None with suppressed_cache_errors(): - with open(path, "rb") as f: + with open(metadata_path, "rb") as f: return f.read() - def set(self, key: str, value: bytes, expires: Optional[int] = None) -> None: - path = self._get_cache_path(key) + def _write(self, path: str, data: bytes) -> None: with suppressed_cache_errors(): ensure_dir(os.path.dirname(path)) with adjacent_tmp_file(path) as f: - f.write(value) + f.write(data) replace(f.name, path) + def set( + self, key: str, value: bytes, expires: Union[int, datetime, None] = None + ) -> None: + path = self._get_cache_path(key) + self._write(path, value) + def delete(self, key: str) -> None: path = self._get_cache_path(key) with suppressed_cache_errors(): os.remove(path) + with suppressed_cache_errors(): + os.remove(path + ".body") + + def get_body(self, key: str) -> Optional[BinaryIO]: + # The cache entry is only valid if both metadata and body exist. + metadata_path = self._get_cache_path(key) + body_path = metadata_path + ".body" + if not (os.path.exists(metadata_path) and os.path.exists(body_path)): + return None + with suppressed_cache_errors(): + return open(body_path, "rb") + + def set_body(self, key: str, body: bytes) -> None: + path = self._get_cache_path(key) + ".body" + self._write(path, body) diff --git a/src/pip/_internal/network/download.py b/src/pip/_internal/network/download.py index 79b82a570e5..032fdd0314f 100644 --- a/src/pip/_internal/network/download.py +++ b/src/pip/_internal/network/download.py @@ -1,5 +1,6 @@ """Download files with progress indicators. """ + import email.message import logging import mimetypes @@ -42,7 +43,7 @@ def _prepare_download( logged_url = redact_auth_from_url(url) if total_length: - logged_url = "{} ({})".format(logged_url, format_size(total_length)) + logged_url = f"{logged_url} ({format_size(total_length)})" if is_from_cache(resp): logger.info("Using cached %s", logged_url) diff --git a/src/pip/_internal/network/session.py b/src/pip/_internal/network/session.py index 6c40ade1595..1765b4f6bd7 100644 --- a/src/pip/_internal/network/session.py +++ b/src/pip/_internal/network/session.py @@ -3,6 +3,7 @@ """ import email.utils +import functools import io import ipaddress import json @@ -106,6 +107,7 @@ def looks_like_ci() -> bool: return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES) +@functools.lru_cache(maxsize=1) def user_agent() -> str: """ Return a string representing the user agent. @@ -230,7 +232,7 @@ def send( # to return a better error message: resp.status_code = 404 resp.reason = type(exc).__name__ - resp.raw = io.BytesIO(f"{resp.reason}: {exc}".encode("utf8")) + resp.raw = io.BytesIO(f"{resp.reason}: {exc}".encode()) else: modified = email.utils.formatdate(stats.st_mtime, usegmt=True) content_type = mimetypes.guess_type(pathname)[0] or "text/plain" @@ -355,8 +357,9 @@ def __init__( # is typically considered a transient error so we'll go ahead and # retry it. # A 500 may indicate transient error in Amazon S3 + # A 502 may be a transient error from a CDN like CloudFlare or CloudFront # A 520 or 527 - may indicate transient error in CloudFlare - status_forcelist=[500, 503, 520, 527], + status_forcelist=[500, 502, 503, 520, 527], # Add a small amount of back off between failed requests in # order to prevent hammering the service. backoff_factor=0.25, @@ -419,15 +422,17 @@ def add_trusted_host( msg += f" (from {source})" logger.info(msg) - host_port = parse_netloc(host) - if host_port not in self.pip_trusted_origins: - self.pip_trusted_origins.append(host_port) + parsed_host, parsed_port = parse_netloc(host) + if parsed_host is None: + raise ValueError(f"Trusted host URL must include a host part: {host!r}") + if (parsed_host, parsed_port) not in self.pip_trusted_origins: + self.pip_trusted_origins.append((parsed_host, parsed_port)) self.mount( build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter ) self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter) - if not host_port[1]: + if not parsed_port: self.mount( build_url_from_netloc(host, scheme="http") + ":", self._trusted_host_adapter, diff --git a/src/pip/_internal/network/xmlrpc.py b/src/pip/_internal/network/xmlrpc.py index 4a7d55d0e50..22ec8d2f4a6 100644 --- a/src/pip/_internal/network/xmlrpc.py +++ b/src/pip/_internal/network/xmlrpc.py @@ -13,6 +13,8 @@ if TYPE_CHECKING: from xmlrpc.client import _HostType, _Marshallable + from _typeshed import SizedBuffer + logger = logging.getLogger(__name__) @@ -33,7 +35,7 @@ def request( self, host: "_HostType", handler: str, - request_body: bytes, + request_body: "SizedBuffer", verbose: bool = False, ) -> Tuple["_Marshallable", ...]: assert isinstance(host, str) diff --git a/src/pip/_internal/operations/build/build_tracker.py b/src/pip/_internal/operations/build/build_tracker.py index 6621549b844..0ed8dd23596 100644 --- a/src/pip/_internal/operations/build/build_tracker.py +++ b/src/pip/_internal/operations/build/build_tracker.py @@ -3,9 +3,8 @@ import logging import os from types import TracebackType -from typing import Dict, Generator, Optional, Set, Type, Union +from typing import Dict, Generator, Optional, Type, Union -from pip._internal.models.link import Link from pip._internal.req.req_install import InstallRequirement from pip._internal.utils.temp_dir import TempDirectory @@ -51,10 +50,22 @@ def get_build_tracker() -> Generator["BuildTracker", None, None]: yield tracker +class TrackerId(str): + """Uniquely identifying string provided to the build tracker.""" + + class BuildTracker: + """Ensure that an sdist cannot request itself as a setup requirement. + + When an sdist is prepared, it identifies its setup requirements in the + context of ``BuildTracker.track()``. If a requirement shows up recursively, this + raises an exception. + + This stops fork bombs embedded in malicious packages.""" + def __init__(self, root: str) -> None: self._root = root - self._entries: Set[InstallRequirement] = set() + self._entries: Dict[TrackerId, InstallRequirement] = {} logger.debug("Created build tracker: %s", self._root) def __enter__(self) -> "BuildTracker": @@ -69,16 +80,15 @@ def __exit__( ) -> None: self.cleanup() - def _entry_path(self, link: Link) -> str: - hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest() + def _entry_path(self, key: TrackerId) -> str: + hashed = hashlib.sha224(key.encode()).hexdigest() return os.path.join(self._root, hashed) - def add(self, req: InstallRequirement) -> None: + def add(self, req: InstallRequirement, key: TrackerId) -> None: """Add an InstallRequirement to build tracking.""" - assert req.link # Get the file to write information about this requirement. - entry_path = self._entry_path(req.link) + entry_path = self._entry_path(key) # Try reading from the file. If it exists and can be read from, a build # is already in progress, so a LookupError is raised. @@ -88,37 +98,41 @@ def add(self, req: InstallRequirement) -> None: except FileNotFoundError: pass else: - message = "{} is already being built: {}".format(req.link, contents) + message = f"{req.link} is already being built: {contents}" raise LookupError(message) # If we're here, req should really not be building already. - assert req not in self._entries + assert key not in self._entries # Start tracking this requirement. with open(entry_path, "w", encoding="utf-8") as fp: fp.write(str(req)) - self._entries.add(req) + self._entries[key] = req logger.debug("Added %s to build tracker %r", req, self._root) - def remove(self, req: InstallRequirement) -> None: + def remove(self, req: InstallRequirement, key: TrackerId) -> None: """Remove an InstallRequirement from build tracking.""" - assert req.link - # Delete the created file and the corresponding entries. - os.unlink(self._entry_path(req.link)) - self._entries.remove(req) + # Delete the created file and the corresponding entry. + os.unlink(self._entry_path(key)) + del self._entries[key] logger.debug("Removed %s from build tracker %r", req, self._root) def cleanup(self) -> None: - for req in set(self._entries): - self.remove(req) + for key, req in list(self._entries.items()): + self.remove(req, key) logger.debug("Removed build tracker: %r", self._root) @contextlib.contextmanager - def track(self, req: InstallRequirement) -> Generator[None, None, None]: - self.add(req) + def track(self, req: InstallRequirement, key: str) -> Generator[None, None, None]: + """Ensure that `key` cannot install itself as a setup requirement. + + :raises LookupError: If `key` was already provided in a parent invocation of + the context introduced by this method.""" + tracker_id = TrackerId(key) + self.add(req, tracker_id) yield - self.remove(req) + self.remove(req, tracker_id) diff --git a/src/pip/_internal/operations/build/metadata_legacy.py b/src/pip/_internal/operations/build/metadata_legacy.py index e60988d643e..c01dd1c678a 100644 --- a/src/pip/_internal/operations/build/metadata_legacy.py +++ b/src/pip/_internal/operations/build/metadata_legacy.py @@ -27,7 +27,7 @@ def _find_egg_info(directory: str) -> str: if len(filenames) > 1: raise InstallationError( - "More than one .egg-info directory found in {}".format(directory) + f"More than one .egg-info directory found in {directory}" ) return os.path.join(directory, filenames[0]) diff --git a/src/pip/_internal/operations/build/wheel_legacy.py b/src/pip/_internal/operations/build/wheel_legacy.py index c5f0492ccbe..3ee2a7058d3 100644 --- a/src/pip/_internal/operations/build/wheel_legacy.py +++ b/src/pip/_internal/operations/build/wheel_legacy.py @@ -40,16 +40,16 @@ def get_legacy_build_wheel_path( # Sort for determinism. names = sorted(names) if not names: - msg = ("Legacy build of wheel for {!r} created no files.\n").format(name) + msg = f"Legacy build of wheel for {name!r} created no files.\n" msg += format_command_result(command_args, command_output) logger.warning(msg) return None if len(names) > 1: msg = ( - "Legacy build of wheel for {!r} created more than one file.\n" - "Filenames (choosing first): {}\n" - ).format(name, names) + f"Legacy build of wheel for {name!r} created more than one file.\n" + f"Filenames (choosing first): {names}\n" + ) msg += format_command_result(command_args, command_output) logger.warning(msg) diff --git a/src/pip/_internal/operations/check.py b/src/pip/_internal/operations/check.py index e3bce69b204..90c6a58a55e 100644 --- a/src/pip/_internal/operations/check.py +++ b/src/pip/_internal/operations/check.py @@ -5,12 +5,15 @@ from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.specifiers import LegacySpecifier from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import LegacyVersion from pip._internal.distributions import make_distribution_for_install_requirement from pip._internal.metadata import get_default_environment from pip._internal.metadata.base import DistributionVersion from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.deprecation import deprecated logger = logging.getLogger(__name__) @@ -57,6 +60,8 @@ def check_package_set( package name and returns a boolean. """ + warn_legacy_versions_and_specifiers(package_set) + missing = {} conflicting = {} @@ -147,3 +152,36 @@ def _create_whitelist( break return packages_affected + + +def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None: + for project_name, package_details in package_set.items(): + if isinstance(package_details.version, LegacyVersion): + deprecated( + reason=( + f"{project_name} {package_details.version} " + f"has a non-standard version number." + ), + replacement=( + f"to upgrade to a newer version of {project_name} " + f"or contact the author to suggest that they " + f"release a version with a conforming version number" + ), + issue=12063, + gone_in="24.1", + ) + for dep in package_details.dependencies: + if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier): + deprecated( + reason=( + f"{project_name} {package_details.version} " + f"has a non-standard dependency specifier {dep}." + ), + replacement=( + f"to upgrade to a newer version of {project_name} " + f"or contact the author to suggest that they " + f"release a version with a conforming dependency specifiers" + ), + issue=12063, + gone_in="24.1", + ) diff --git a/src/pip/_internal/operations/install/editable_legacy.py b/src/pip/_internal/operations/install/editable_legacy.py index bebe24e6d3a..9aaa699a645 100644 --- a/src/pip/_internal/operations/install/editable_legacy.py +++ b/src/pip/_internal/operations/install/editable_legacy.py @@ -1,5 +1,6 @@ """Legacy editable installation process, i.e. `setup.py develop`. """ + import logging from typing import Optional, Sequence diff --git a/src/pip/_internal/operations/install/wheel.py b/src/pip/_internal/operations/install/wheel.py index a8cd1330f0f..a02a193d226 100644 --- a/src/pip/_internal/operations/install/wheel.py +++ b/src/pip/_internal/operations/install/wheel.py @@ -28,6 +28,7 @@ List, NewType, Optional, + Protocol, Sequence, Set, Tuple, @@ -50,7 +51,7 @@ from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl from pip._internal.models.scheme import SCHEME_KEYS, Scheme from pip._internal.utils.filesystem import adjacent_tmp_file, replace -from pip._internal.utils.misc import captured_stdout, ensure_dir, hash_file, partition +from pip._internal.utils.misc import StreamWrapper, ensure_dir, hash_file, partition from pip._internal.utils.unpacking import ( current_umask, is_within_directory, @@ -60,7 +61,6 @@ from pip._internal.utils.wheel import parse_wheel if TYPE_CHECKING: - from typing import Protocol class File(Protocol): src_record_path: "RecordPath" @@ -164,16 +164,14 @@ def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]: for parent_dir, dir_scripts in warn_for.items(): sorted_scripts: List[str] = sorted(dir_scripts) if len(sorted_scripts) == 1: - start_text = "script {} is".format(sorted_scripts[0]) + start_text = f"script {sorted_scripts[0]} is" else: start_text = "scripts {} are".format( ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1] ) msg_lines.append( - "The {} installed in '{}' which is not on PATH.".format( - start_text, parent_dir - ) + f"The {start_text} installed in '{parent_dir}' which is not on PATH." ) last_line_fmt = ( @@ -267,9 +265,9 @@ def get_csv_rows_for_installed( path = _fs_to_record_path(f, lib_dir) digest, length = rehash(f) installed_rows.append((path, digest, length)) - for installed_record_path in installed.values(): - installed_rows.append((installed_record_path, "", "")) - return installed_rows + return installed_rows + [ + (installed_record_path, "", "") for installed_record_path in installed.values() + ] def get_console_script_specs(console: Dict[str, str]) -> List[str]: @@ -290,17 +288,15 @@ def get_console_script_specs(console: Dict[str, str]) -> List[str]: # the wheel metadata at build time, and so if the wheel is installed with # a *different* version of Python the entry points will be wrong. The # correct fix for this is to enhance the metadata to be able to describe - # such versioned entry points, but that won't happen till Metadata 2.0 is - # available. - # In the meantime, projects using versioned entry points will either have + # such versioned entry points. + # Currently, projects using versioned entry points will either have # incorrect versioned entry points, or they will not be able to distribute # "universal" wheels (i.e., they will need a wheel per Python version). # # Because setuptools and pip are bundled with _ensurepip and virtualenv, - # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we + # we need to use universal wheels. As a workaround, we # override the versioned entry points in the wheel and generate the - # correct ones. This code is purely a short-term measure until Metadata 2.0 - # is available. + # correct ones. # # To add the level of hack in this section of code, in order to support # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment @@ -321,9 +317,7 @@ def get_console_script_specs(console: Dict[str, str]) -> List[str]: scripts_to_generate.append("pip = " + pip_script) if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": - scripts_to_generate.append( - "pip{} = {}".format(sys.version_info[0], pip_script) - ) + scripts_to_generate.append(f"pip{sys.version_info[0]} = {pip_script}") scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}") # Delete any other versioned pip entry points @@ -336,9 +330,7 @@ def get_console_script_specs(console: Dict[str, str]) -> List[str]: scripts_to_generate.append("easy_install = " + easy_install_script) scripts_to_generate.append( - "easy_install-{} = {}".format( - get_major_minor_version(), easy_install_script - ) + f"easy_install-{get_major_minor_version()} = {easy_install_script}" ) # Delete any other versioned easy_install entry points easy_install_ep = [ @@ -408,10 +400,10 @@ def save(self) -> None: class MissingCallableSuffix(InstallationError): def __init__(self, entry_point: str) -> None: super().__init__( - "Invalid script entry point: {} - A callable " + f"Invalid script entry point: {entry_point} - A callable " "suffix is required. Cf https://packaging.python.org/" "specifications/entry-points/#use-for-scripts for more " - "information.".format(entry_point) + "information." ) @@ -513,9 +505,9 @@ def make_data_scheme_file(record_path: RecordPath) -> "File": _, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2) except ValueError: message = ( - "Unexpected file in {}: {!r}. .data directory contents" - " should be named like: '/'." - ).format(wheel_path, record_path) + f"Unexpected file in {wheel_path}: {record_path!r}. .data directory" + " contents should be named like: '/'." + ) raise InstallationError(message) try: @@ -523,10 +515,11 @@ def make_data_scheme_file(record_path: RecordPath) -> "File": except KeyError: valid_scheme_keys = ", ".join(sorted(scheme_paths)) message = ( - "Unknown scheme key used in {}: {} (for file {!r}). .data" - " directory contents should be in subdirectories named" - " with a valid scheme key ({})" - ).format(wheel_path, scheme_key, record_path, valid_scheme_keys) + f"Unknown scheme key used in {wheel_path}: {scheme_key} " + f"(for file {record_path!r}). .data directory contents " + f"should be in subdirectories named with a valid scheme " + f"key ({valid_scheme_keys})" + ) raise InstallationError(message) dest_path = os.path.join(scheme_path, dest_subpath) @@ -610,7 +603,9 @@ def pyc_output_path(path: str) -> str: # Compile all of the pyc files for the installed files if pycompile: - with captured_stdout() as stdout: + with contextlib.redirect_stdout( + StreamWrapper.from_stream(sys.stdout) + ) as stdout: with warnings.catch_warnings(): warnings.filterwarnings("ignore") for path in pyc_source_file_paths(): @@ -712,7 +707,7 @@ def req_error_context(req_description: str) -> Generator[None, None, None]: try: yield except InstallationError as e: - message = "For req: {}. {}".format(req_description, e.args[0]) + message = f"For req: {req_description}. {e.args[0]}" raise InstallationError(message) from e diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 2273315234d..e6aa3447200 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -4,10 +4,11 @@ # The following comment should be removed at some point in the future. # mypy: strict-optional=False -import logging import mimetypes import os import shutil +from dataclasses import dataclass +from pathlib import Path from typing import Dict, Iterable, List, Optional from pip._vendor.packaging.utils import canonicalize_name @@ -21,7 +22,6 @@ InstallationError, MetadataInconsistent, NetworkConnectionError, - PreviousBuildDirError, VcsHashUnsupported, ) from pip._internal.index.package_finder import PackageFinder @@ -37,6 +37,7 @@ from pip._internal.network.session import PipSession from pip._internal.operations.build.build_tracker import BuildTracker from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils._log import getLogger from pip._internal.utils.direct_url_helpers import ( direct_url_for_editable, direct_url_from_link, @@ -47,13 +48,13 @@ display_path, hash_file, hide_url, - is_installable_dir, + redact_auth_from_requirement, ) from pip._internal.utils.temp_dir import TempDirectory from pip._internal.utils.unpacking import unpack_file from pip._internal.vcs import vcs -logger = logging.getLogger(__name__) +logger = getLogger(__name__) def _get_prepared_distribution( @@ -65,10 +66,12 @@ def _get_prepared_distribution( ) -> BaseDistribution: """Prepare a distribution for installation.""" abstract_dist = make_distribution_for_install_requirement(req) - with build_tracker.track(req): - abstract_dist.prepare_distribution_metadata( - finder, build_isolation, check_build_deps - ) + tracker_id = abstract_dist.build_tracker_id + if tracker_id is not None: + with build_tracker.track(req, tracker_id): + abstract_dist.prepare_distribution_metadata( + finder, build_isolation, check_build_deps + ) return abstract_dist.get_metadata_distribution() @@ -78,13 +81,14 @@ def unpack_vcs_link(link: Link, location: str, verbosity: int) -> None: vcs_backend.unpack(location, url=hide_url(link.url), verbosity=verbosity) +@dataclass class File: - def __init__(self, path: str, content_type: Optional[str]) -> None: - self.path = path - if content_type is None: - self.content_type = mimetypes.guess_type(path)[0] - else: - self.content_type = content_type + path: str + content_type: Optional[str] = None + + def __post_init__(self) -> None: + if self.content_type is None: + self.content_type = mimetypes.guess_type(self.path)[0] def get_http_url( @@ -226,6 +230,7 @@ def __init__( use_user_site: bool, lazy_wheel: bool, verbosity: int, + legacy_resolver: bool, ) -> None: super().__init__() @@ -259,6 +264,9 @@ def __init__( # How verbose should underlying tooling be? self.verbosity = verbosity + # Are we using the legacy resolver? + self.legacy_resolver = legacy_resolver + # Memoized downloaded files, as mapping of url: path. self._downloaded: Dict[str, str] = {} @@ -272,7 +280,7 @@ def _log_preparing_link(self, req: InstallRequirement) -> None: information = str(display_path(req.link.file_path)) else: message = "Collecting %s" - information = str(req.req or req) + information = redact_auth_from_requirement(req.req) if req.req else str(req) # If we used req.req, inject requirement source if available (this # would already be included if we used req directly) @@ -313,21 +321,7 @@ def _ensure_link_req_src_dir( autodelete=True, parallel_builds=parallel_builds, ) - - # If a checkout exists, it's unwise to keep going. version - # inconsistencies are logged later, but do not fail the - # installation. - # FIXME: this won't upgrade when there's an existing - # package unpacked in `req.source_dir` - # TODO: this check is now probably dead code - if is_installable_dir(req.source_dir): - raise PreviousBuildDirError( - "pip can't proceed with requirements '{}' due to a" - "pre-existing build directory ({}). This is likely " - "due to a previous installation that failed . pip is " - "being responsible and not assuming it can delete this. " - "Please delete it and try again.".format(req, req.source_dir) - ) + req.ensure_pristine_source_checkout() def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes: # By the time this is called, the requirement's link should have @@ -352,7 +346,7 @@ def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes: # a surprising hash mismatch in the future. # file:/// URLs aren't pinnable, so don't complain about them # not being pinned. - if req.original_link is None and not req.is_pinned: + if not req.is_direct and not req.is_pinned: raise HashUnpinned() # If known-good hashes are missing for this requirement, @@ -365,6 +359,11 @@ def _fetch_metadata_only( self, req: InstallRequirement, ) -> Optional[BaseDistribution]: + if self.legacy_resolver: + logger.debug( + "Metadata-only fetching is not used in the legacy resolver", + ) + return None if self.require_hashes: logger.debug( "Metadata-only fetching is not used as hash checking is required", @@ -385,7 +384,7 @@ def _fetch_metadata_using_link_data_attr( if metadata_link is None: return None assert req.req is not None - logger.info( + logger.verbose( "Obtaining dependency information for %s from %s", req.req, metadata_link, @@ -410,7 +409,7 @@ def _fetch_metadata_using_link_data_attr( # NB: raw_name will fall back to the name from the install requirement if # the Name: field is not present, but it's noted in the raw_name docstring # that that should NEVER happen anyway. - if metadata_dist.raw_name != req.req.name: + if canonicalize_name(metadata_dist.raw_name) != canonicalize_name(req.req.name): raise MetadataInconsistent( req, "Name", req.req.name, metadata_dist.raw_name ) @@ -470,7 +469,19 @@ def _complete_partial_requirements( for link, (filepath, _) in batch_download: logger.debug("Downloading link %s to %s", link, filepath) req = links_to_fully_download[link] + # Record the downloaded file path so wheel reqs can extract a Distribution + # in .get_dist(). req.local_file_path = filepath + # Record that the file is downloaded so we don't do it again in + # _prepare_linked_requirement(). + self._downloaded[req.link.url] = filepath + + # If this is an sdist, we need to unpack it after downloading, but the + # .source_dir won't be set up until we are in _prepare_linked_requirement(). + # Add the downloaded archive to the install requirement to unpack after + # preparing the source dir. + if not req.is_wheel: + req.needs_unpacked_archive(Path(filepath)) # This step is necessary to ensure all lazy wheels are processed # successfully by the 'download', 'wheel', and 'install' commands. @@ -594,8 +605,8 @@ def _prepare_linked_requirement( ) except NetworkConnectionError as exc: raise InstallationError( - "Could not install requirement {} because of HTTP " - "error {} for URL {}".format(req, exc, link) + f"Could not install requirement {req} because of HTTP " + f"error {exc} for URL {link}" ) else: file_path = self._downloaded[link.url] @@ -675,9 +686,9 @@ def prepare_editable_requirement( with indent_log(): if self.require_hashes: raise InstallationError( - "The editable requirement {} cannot be installed when " + f"The editable requirement {req} cannot be installed when " "requiring hashes, because there is no single file to " - "hash.".format(req) + "hash." ) req.ensure_has_source_dir(self.src_dir) req.update_editable() @@ -705,7 +716,7 @@ def prepare_installed_requirement( assert req.satisfied_by, "req should have been satisfied but isn't" assert skip_reason is not None, ( "did not get skip reason skipped but req.satisfied_by " - "is set to {}".format(req.satisfied_by) + f"is set to {req.satisfied_by}" ) logger.info( "Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version diff --git a/src/pip/_internal/pyproject.py b/src/pip/_internal/pyproject.py index eb8e12b2dec..8de36b873ed 100644 --- a/src/pip/_internal/pyproject.py +++ b/src/pip/_internal/pyproject.py @@ -123,7 +123,7 @@ def load_pyproject_toml( # a version of setuptools that supports that backend. build_system = { - "requires": ["setuptools>=40.8.0", "wheel"], + "requires": ["setuptools>=40.8.0"], "build-backend": "setuptools.build_meta:__legacy__", } diff --git a/src/pip/_internal/req/__init__.py b/src/pip/_internal/req/__init__.py index 16de903a44c..422d851d729 100644 --- a/src/pip/_internal/req/__init__.py +++ b/src/pip/_internal/req/__init__.py @@ -1,5 +1,6 @@ import collections import logging +from dataclasses import dataclass from typing import Generator, List, Optional, Sequence, Tuple from pip._internal.utils.logging import indent_log @@ -18,12 +19,9 @@ logger = logging.getLogger(__name__) +@dataclass(frozen=True) class InstallationResult: - def __init__(self, name: str) -> None: - self.name = name - - def __repr__(self) -> str: - return f"InstallationResult(name={self.name!r})" + name: str def _validate_requirements( diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index c5ca2d85d51..36f517e599d 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -8,10 +8,12 @@ InstallRequirement. """ +import copy import logging import os import re -from typing import Dict, List, Optional, Set, Tuple, Union +from dataclasses import dataclass +from typing import Collection, Dict, List, Optional, Set, Tuple, Union from pip._vendor.packaging.markers import Marker from pip._vendor.packaging.requirements import InvalidRequirement, Requirement @@ -57,6 +59,31 @@ def convert_extras(extras: Optional[str]) -> Set[str]: return get_requirement("placeholder" + extras.lower()).extras +def _set_requirement_extras(req: Requirement, new_extras: Set[str]) -> Requirement: + """ + Returns a new requirement based on the given one, with the supplied extras. If the + given requirement already has extras those are replaced (or dropped if no new extras + are given). + """ + match: Optional[re.Match[str]] = re.fullmatch( + # see https://peps.python.org/pep-0508/#complete-grammar + r"([\w\t .-]+)(\[[^\]]*\])?(.*)", + str(req), + flags=re.ASCII, + ) + # ireq.req is a valid requirement so the regex should always match + assert ( + match is not None + ), f"regex match on requirement {req} failed, this should never happen" + pre: Optional[str] = match.group(1) + post: Optional[str] = match.group(3) + assert ( + pre is not None and post is not None + ), f"regex group selection for requirement {req} failed, this should never happen" + extras: str = "[%s]" % ",".join(sorted(new_extras)) if new_extras else "" + return Requirement(f"{pre}{extras}{post}") + + def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]: """Parses an editable requirement into: - a requirement name @@ -106,8 +133,8 @@ def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]: package_name = link.egg_fragment if not package_name: raise InstallationError( - "Could not detect requirement name for '{}', please specify one " - "with #egg=your_package_name".format(editable_req) + f"Could not detect requirement name for '{editable_req}', " + "please specify one with #egg=your_package_name" ) return package_name, url, set() @@ -165,18 +192,12 @@ def deduce_helpful_msg(req: str) -> str: return msg +@dataclass(frozen=True) class RequirementParts: - def __init__( - self, - requirement: Optional[Requirement], - link: Optional[Link], - markers: Optional[Marker], - extras: Set[str], - ): - self.requirement = requirement - self.link = link - self.markers = markers - self.extras = extras + requirement: Optional[Requirement] + link: Optional[Link] + markers: Optional[Marker] + extras: Set[str] def parse_req_from_editable(editable_req: str) -> RequirementParts: @@ -436,7 +457,7 @@ def install_req_from_req_string( raise InstallationError( "Packages installed from PyPI cannot depend on packages " "which are not also hosted on PyPI.\n" - "{} depends on {} ".format(comes_from.name, req) + f"{comes_from.name} depends on {req} " ) return InstallRequirement( @@ -504,3 +525,47 @@ def install_req_from_link_and_ireq( config_settings=ireq.config_settings, user_supplied=ireq.user_supplied, ) + + +def install_req_drop_extras(ireq: InstallRequirement) -> InstallRequirement: + """ + Creates a new InstallationRequirement using the given template but without + any extras. Sets the original requirement as the new one's parent + (comes_from). + """ + return InstallRequirement( + req=( + _set_requirement_extras(ireq.req, set()) if ireq.req is not None else None + ), + comes_from=ireq, + editable=ireq.editable, + link=ireq.link, + markers=ireq.markers, + use_pep517=ireq.use_pep517, + isolated=ireq.isolated, + global_options=ireq.global_options, + hash_options=ireq.hash_options, + constraint=ireq.constraint, + extras=[], + config_settings=ireq.config_settings, + user_supplied=ireq.user_supplied, + permit_editable_wheels=ireq.permit_editable_wheels, + ) + + +def install_req_extend_extras( + ireq: InstallRequirement, + extras: Collection[str], +) -> InstallRequirement: + """ + Returns a copy of an installation requirement with some additional extras. + Makes a shallow copy of the ireq object. + """ + result = copy.copy(ireq) + result.extras = {*ireq.extras, *extras} + result.req = ( + _set_requirement_extras(ireq.req, result.extras) + if ireq.req is not None + else None + ) + return result diff --git a/src/pip/_internal/req/req_file.py b/src/pip/_internal/req/req_file.py index f717c1ccc79..53ad8674cd8 100644 --- a/src/pip/_internal/req/req_file.py +++ b/src/pip/_internal/req/req_file.py @@ -17,6 +17,7 @@ Generator, Iterable, List, + NoReturn, Optional, Tuple, ) @@ -24,17 +25,11 @@ from pip._internal.cli import cmdoptions from pip._internal.exceptions import InstallationError, RequirementsFileParseError from pip._internal.models.search_scope import SearchScope -from pip._internal.network.session import PipSession -from pip._internal.network.utils import raise_for_status from pip._internal.utils.encoding import auto_decode -from pip._internal.utils.urls import get_url_scheme if TYPE_CHECKING: - # NoReturn introduced in 3.6.2; imported only for type checking to maintain - # pip compatibility with older patch versions of Python 3.6 - from typing import NoReturn - from pip._internal.index.package_finder import PackageFinder + from pip._internal.network.session import PipSession __all__ = ["parse_requirements"] @@ -75,8 +70,16 @@ cmdoptions.config_settings, ] +SUPPORTED_OPTIONS_EDITABLE_REQ: List[Callable[..., optparse.Option]] = [ + cmdoptions.config_settings, +] + + # the 'dest' string values SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ] +SUPPORTED_OPTIONS_EDITABLE_REQ_DEST = [ + str(o().dest) for o in SUPPORTED_OPTIONS_EDITABLE_REQ +] logger = logging.getLogger(__name__) @@ -128,7 +131,7 @@ def __init__( def parse_requirements( filename: str, - session: PipSession, + session: "PipSession", finder: Optional["PackageFinder"] = None, options: Optional[optparse.Values] = None, constraint: bool = False, @@ -178,31 +181,25 @@ def handle_requirement_line( assert line.is_requirement + # get the options that apply to requirements if line.is_editable: - # For editable requirements, we don't support per-requirement - # options, so just return the parsed requirement. - return ParsedRequirement( - requirement=line.requirement, - is_editable=line.is_editable, - comes_from=line_comes_from, - constraint=line.constraint, - ) + supported_dest = SUPPORTED_OPTIONS_EDITABLE_REQ_DEST else: - # get the options that apply to requirements - req_options = {} - for dest in SUPPORTED_OPTIONS_REQ_DEST: - if dest in line.opts.__dict__ and line.opts.__dict__[dest]: - req_options[dest] = line.opts.__dict__[dest] - - line_source = f"line {line.lineno} of {line.filename}" - return ParsedRequirement( - requirement=line.requirement, - is_editable=line.is_editable, - comes_from=line_comes_from, - constraint=line.constraint, - options=req_options, - line_source=line_source, - ) + supported_dest = SUPPORTED_OPTIONS_REQ_DEST + req_options = {} + for dest in supported_dest: + if dest in line.opts.__dict__ and line.opts.__dict__[dest]: + req_options[dest] = line.opts.__dict__[dest] + + line_source = f"line {line.lineno} of {line.filename}" + return ParsedRequirement( + requirement=line.requirement, + is_editable=line.is_editable, + comes_from=line_comes_from, + constraint=line.constraint, + options=req_options, + line_source=line_source, + ) def handle_option_line( @@ -211,7 +208,7 @@ def handle_option_line( lineno: int, finder: Optional["PackageFinder"] = None, options: Optional[optparse.Values] = None, - session: Optional[PipSession] = None, + session: Optional["PipSession"] = None, ) -> None: if opts.hashes: logger.warning( @@ -279,7 +276,7 @@ def handle_line( line: ParsedLine, options: Optional[optparse.Values] = None, finder: Optional["PackageFinder"] = None, - session: Optional[PipSession] = None, + session: Optional["PipSession"] = None, ) -> Optional[ParsedRequirement]: """Handle a single parsed requirements line; This can result in creating/yielding requirements, or updating the finder. @@ -322,7 +319,7 @@ def handle_line( class RequirementsFileParser: def __init__( self, - session: PipSession, + session: "PipSession", line_parser: LineParser, ) -> None: self._session = session @@ -527,7 +524,7 @@ def expand_env_variables(lines_enum: ReqFileLines) -> ReqFileLines: yield line_number, line -def get_file_content(url: str, session: PipSession) -> Tuple[str, str]: +def get_file_content(url: str, session: "PipSession") -> Tuple[str, str]: """Gets the content of a file; it may be a filename, file: URL, or http: URL. Returns (location, content). Content is unicode. Respects # -*- coding: declarations on the retrieved files. @@ -535,10 +532,12 @@ def get_file_content(url: str, session: PipSession) -> Tuple[str, str]: :param url: File path or url. :param session: PipSession instance. """ - scheme = get_url_scheme(url) - + scheme = urllib.parse.urlsplit(url).scheme # Pip has special support for file:// URLs (LocalFSAdapter). if scheme in ["http", "https", "file"]: + # Delay importing heavy network modules until absolutely necessary. + from pip._internal.network.utils import raise_for_status + resp = session.get(url) raise_for_status(resp) return resp.url, resp.text diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index d01b24a9189..7d527959e81 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - import functools import logging import os @@ -9,6 +6,7 @@ import uuid import zipfile from optparse import Values +from pathlib import Path from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union from pip._vendor.packaging.markers import Marker @@ -20,7 +18,7 @@ from pip._vendor.pyproject_hooks import BuildBackendHookCaller from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment -from pip._internal.exceptions import InstallationError +from pip._internal.exceptions import InstallationError, PreviousBuildDirError from pip._internal.locations import get_scheme from pip._internal.metadata import ( BaseDistribution, @@ -50,11 +48,14 @@ backup_dir, display_path, hide_url, + is_installable_dir, + redact_auth_from_requirement, redact_auth_from_url, ) from pip._internal.utils.packaging import safe_extra from pip._internal.utils.subprocess import runner_with_spinner_message from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds +from pip._internal.utils.unpacking import unpack_file from pip._internal.utils.virtualenv import running_under_virtualenv from pip._internal.vcs import vcs @@ -104,6 +105,8 @@ def __init__( if link.is_file: self.source_dir = os.path.normpath(os.path.abspath(link.file_path)) + # original_link is the direct URL that was provided by the user for the + # requirement, either directly or via a constraints file. if link is None and req and req.url: # PEP 508 URL requirement link = Link(req.url) @@ -126,7 +129,7 @@ def __init__( if extras: self.extras = extras elif req: - self.extras = {safe_extra(extra) for extra in req.extras} + self.extras = req.extras else: self.extras = set() if markers is None and req: @@ -178,14 +181,27 @@ def __init__( # but after loading this flag should be treated as read only. self.use_pep517 = use_pep517 + # If config settings are provided, enforce PEP 517. + if self.config_settings: + if self.use_pep517 is False: + logger.warning( + "--no-use-pep517 ignored for %s " + "because --config-settings are specified.", + self, + ) + self.use_pep517 = True + # This requirement needs more preparation before it can be built self.needs_more_preparation = False + # This requirement needs to be unpacked before it can be installed. + self._archive_source: Optional[Path] = None + def __str__(self) -> str: if self.req: - s = str(self.req) + s = redact_auth_from_requirement(self.req) if self.link: - s += " from {}".format(redact_auth_from_url(self.link.url)) + s += f" from {redact_auth_from_url(self.link.url)}" elif self.link: s = redact_auth_from_url(self.link.url) else: @@ -206,8 +222,9 @@ def __str__(self) -> str: return s def __repr__(self) -> str: - return "<{} object: {} editable={!r}>".format( - self.__class__.__name__, str(self), self.editable + return ( + f"<{self.__class__.__name__} object: " + f"{str(self)} editable={self.editable!r}>" ) def format_debug(self) -> str: @@ -215,7 +232,7 @@ def format_debug(self) -> str: attributes = vars(self) names = sorted(attributes) - state = ("{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)) + state = (f"{attr}={attributes[attr]!r}" for attr in sorted(names)) return "<{name} object: {{{state}}}>".format( name=self.__class__.__name__, state=", ".join(state), @@ -228,7 +245,7 @@ def name(self) -> Optional[str]: return None return self.req.name - @functools.lru_cache() # use cached_property in python 3.8+ + @functools.cached_property def supports_pyproject_editable(self) -> bool: if not self.use_pep517: return False @@ -242,15 +259,22 @@ def supports_pyproject_editable(self) -> bool: @property def specifier(self) -> SpecifierSet: + assert self.req is not None return self.req.specifier + @property + def is_direct(self) -> bool: + """Whether this requirement was specified as a direct URL.""" + return self.original_link is not None + @property def is_pinned(self) -> bool: """Return whether I am pinned to an exact version. For example, some-package==1.2 is pinned; some-package>1.2 is not. """ - specifiers = self.specifier + assert self.req is not None + specifiers = self.req.specifier return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="} def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool: @@ -260,7 +284,12 @@ def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> boo extras_requested = ("",) if self.markers is not None: return any( - self.markers.evaluate({"extra": extra}) for extra in extras_requested + self.markers.evaluate({"extra": extra}) + # TODO: Remove these two variants when packaging is upgraded to + # support the marker comparison logic specified in PEP 685. + or self.markers.evaluate({"extra": safe_extra(extra)}) + or self.markers.evaluate({"extra": canonicalize_name(extra)}) + for extra in extras_requested ) else: return True @@ -293,11 +322,12 @@ def hashes(self, trust_internet: bool = True) -> Hashes: good_hashes = self.hash_options.copy() if trust_internet: link = self.link - elif self.original_link and self.user_supplied: + elif self.is_direct and self.user_supplied: link = self.original_link else: link = None if link and link.hash: + assert link.hash_name is not None good_hashes.setdefault(link.hash_name, []).append(link.hash) return Hashes(good_hashes) @@ -307,6 +337,7 @@ def from_path(self) -> Optional[str]: return None s = str(self.req) if self.comes_from: + comes_from: Optional[str] if isinstance(self.comes_from, str): comes_from = self.comes_from else: @@ -338,7 +369,7 @@ def ensure_build_location( # When parallel builds are enabled, add a UUID to the build directory # name so multiple builds do not interfere with each other. - dir_name: str = canonicalize_name(self.name) + dir_name: str = canonicalize_name(self.req.name) if parallel_builds: dir_name = f"{dir_name}_{uuid.uuid4().hex}" @@ -381,6 +412,7 @@ def _set_requirement(self) -> None: ) def warn_on_mismatching_name(self) -> None: + assert self.req is not None metadata_name = canonicalize_name(self.metadata["Name"]) if canonicalize_name(self.req.name) == metadata_name: # Everything is fine. @@ -450,6 +482,7 @@ def is_wheel_from_cache(self) -> bool: # Things valid for sdists @property def unpacked_source_directory(self) -> str: + assert self.source_dir, f"No source dir for {self}" return os.path.join( self.source_dir, self.link and self.link.subdirectory_fragment or "" ) @@ -486,15 +519,7 @@ def load_pyproject_toml(self) -> None: ) if pyproject_toml_data is None: - if self.config_settings: - deprecated( - reason=f"Config settings are ignored for project {self}.", - replacement=( - "to use --use-pep517 or add a " - "pyproject.toml file to the project" - ), - gone_in="23.3", - ) + assert not self.config_settings self.use_pep517 = False return @@ -518,7 +543,7 @@ def isolated_editable_sanity_check(self) -> None: if ( self.editable and self.use_pep517 - and not self.supports_pyproject_editable() + and not self.supports_pyproject_editable and not os.path.isfile(self.setup_py_path) and not os.path.isfile(self.setup_cfg_path) ): @@ -536,7 +561,7 @@ def prepare_metadata(self) -> None: Under PEP 517 and PEP 660, call the backend hook to prepare the metadata. Under legacy processing, call setup.py egg-info. """ - assert self.source_dir + assert self.source_dir, f"No source dir for {self}" details = self.name or f"from {self.link}" if self.use_pep517: @@ -544,7 +569,7 @@ def prepare_metadata(self) -> None: if ( self.editable and self.permit_editable_wheels - and self.supports_pyproject_editable() + and self.supports_pyproject_editable ): self.metadata_directory = generate_editable_metadata( build_env=self.build_env, @@ -585,8 +610,10 @@ def get_dist(self) -> BaseDistribution: if self.metadata_directory: return get_directory_distribution(self.metadata_directory) elif self.local_file_path and self.is_wheel: + assert self.req is not None return get_wheel_distribution( - FilesystemWheel(self.local_file_path), canonicalize_name(self.name) + FilesystemWheel(self.local_file_path), + canonicalize_name(self.req.name), ) raise AssertionError( f"InstallRequirement {self} has no metadata directory and no wheel: " @@ -594,9 +621,9 @@ def get_dist(self) -> BaseDistribution: ) def assert_source_matches_version(self) -> None: - assert self.source_dir + assert self.source_dir, f"No source dir for {self}" version = self.metadata["version"] - if self.req.specifier and version not in self.req.specifier: + if self.req and self.req.specifier and version not in self.req.specifier: logger.warning( "Requested %s, but installing version %s", self, @@ -633,6 +660,27 @@ def ensure_has_source_dir( parallel_builds=parallel_builds, ) + def needs_unpacked_archive(self, archive_source: Path) -> None: + assert self._archive_source is None + self._archive_source = archive_source + + def ensure_pristine_source_checkout(self) -> None: + """Ensure the source directory has not yet been built in.""" + assert self.source_dir is not None + if self._archive_source is not None: + unpack_file(str(self._archive_source), self.source_dir) + elif is_installable_dir(self.source_dir): + # If a checkout exists, it's unwise to keep going. + # version inconsistencies are logged later, but do not fail + # the installation. + raise PreviousBuildDirError( + f"pip can't proceed with requirements '{self}' due to a " + f"pre-existing build directory ({self.source_dir}). This is likely " + "due to a previous installation that failed . pip is " + "being responsible and not assuming it can delete this. " + "Please delete it and try again." + ) + # For editable installations def update_editable(self) -> None: if not self.link: @@ -689,9 +737,10 @@ def _clean_zip_name(name: str, prefix: str) -> str: name = name.replace(os.path.sep, "/") return name + assert self.req is not None path = os.path.join(parentdir, path) name = _clean_zip_name(path, rootdir) - return self.name + "/" + name + return self.req.name + "/" + name def archive(self, build_dir: Optional[str]) -> None: """Saves archive to provided build_dir. @@ -708,8 +757,8 @@ def archive(self, build_dir: Optional[str]) -> None: if os.path.exists(archive_path): response = ask_path_exists( - "The file {} exists. (i)gnore, (w)ipe, " - "(b)ackup, (a)bort ".format(display_path(archive_path)), + f"The file {display_path(archive_path)} exists. (i)gnore, (w)ipe, " + "(b)ackup, (a)bort ", ("i", "w", "b", "a"), ) if response == "i": @@ -770,8 +819,9 @@ def install( use_user_site: bool = False, pycompile: bool = True, ) -> None: + assert self.req is not None scheme = get_scheme( - self.name, + self.req.name, user=use_user_site, home=home, root=root, @@ -780,12 +830,19 @@ def install( ) if self.editable and not self.is_wheel: + if self.config_settings: + logger.warning( + "--config-settings ignored for legacy editable install of %s. " + "Consider upgrading to a version of setuptools " + "that supports PEP 660 (>= 64).", + self, + ) install_editable_legacy( global_options=global_options if global_options is not None else [], prefix=prefix, home=home, use_user_site=use_user_site, - name=self.name, + name=self.req.name, setup_py_path=self.setup_py_path, isolated=self.isolated, build_env=self.build_env, @@ -798,13 +855,13 @@ def install( assert self.local_file_path install_wheel( - self.name, + self.req.name, self.local_file_path, scheme=scheme, req_description=str(self.req), pycompile=pycompile, warn_script_location=warn_script_location, - direct_url=self.download_info if self.original_link else None, + direct_url=self.download_info if self.is_direct else None, requested=self.user_supplied, ) self.install_succeeded = True @@ -858,7 +915,7 @@ def check_legacy_setup_py_options( reason="--build-option and --global-option are deprecated.", issue=11859, replacement="to use --config-settings", - gone_in="23.3", + gone_in="24.2", ) logger.warning( "Implying --no-binary=:all: due to the presence of " diff --git a/src/pip/_internal/req/req_set.py b/src/pip/_internal/req/req_set.py index ec7a6e07a25..bf36114e802 100644 --- a/src/pip/_internal/req/req_set.py +++ b/src/pip/_internal/req/req_set.py @@ -2,9 +2,12 @@ from collections import OrderedDict from typing import Dict, List +from pip._vendor.packaging.specifiers import LegacySpecifier from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import LegacyVersion from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.deprecation import deprecated logger = logging.getLogger(__name__) @@ -80,3 +83,37 @@ def requirements_to_install(self) -> List[InstallRequirement]: for install_req in self.all_requirements if not install_req.constraint and not install_req.satisfied_by ] + + def warn_legacy_versions_and_specifiers(self) -> None: + for req in self.requirements_to_install: + version = req.get_dist().version + if isinstance(version, LegacyVersion): + deprecated( + reason=( + f"pip has selected the non standard version {version} " + f"of {req}. In the future this version will be " + f"ignored as it isn't standard compliant." + ), + replacement=( + "set or update constraints to select another version " + "or contact the package author to fix the version number" + ), + issue=12063, + gone_in="24.1", + ) + for dep in req.get_dist().iter_dependencies(): + if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier): + deprecated( + reason=( + f"pip has selected {req} {version} which has non " + f"standard dependency specifier {dep}. " + f"In the future this version of {req} will be " + f"ignored as it isn't standard compliant." + ), + replacement=( + "set or update constraints to select another version " + "or contact the package author to fix the version number" + ), + issue=12063, + gone_in="24.1", + ) diff --git a/src/pip/_internal/req/req_uninstall.py b/src/pip/_internal/req/req_uninstall.py index ad5178e76ff..3a9ae2b1097 100644 --- a/src/pip/_internal/req/req_uninstall.py +++ b/src/pip/_internal/req/req_uninstall.py @@ -71,16 +71,16 @@ def uninstallation_paths(dist: BaseDistribution) -> Generator[str, None, None]: entries = dist.iter_declared_entries() if entries is None: - msg = "Cannot uninstall {dist}, RECORD file not found.".format(dist=dist) + msg = f"Cannot uninstall {dist}, RECORD file not found." installer = dist.installer if not installer or installer == "pip": - dep = "{}=={}".format(dist.raw_name, dist.version) + dep = f"{dist.raw_name}=={dist.version}" msg += ( " You might be able to recover from this via: " - "'pip install --force-reinstall --no-deps {}'.".format(dep) + f"'pip install --force-reinstall --no-deps {dep}'." ) else: - msg += " Hint: The package was installed by {}.".format(installer) + msg += f" Hint: The package was installed by {installer}." raise UninstallationError(msg) for entry in entries: @@ -172,8 +172,7 @@ def compress_for_output_listing(paths: Iterable[str]) -> Tuple[Set[str], Set[str folders.add(os.path.dirname(path)) files.add(path) - # probably this one https://github.com/python/mypy/issues/390 - _normcased_files = set(map(os.path.normcase, files)) # type: ignore + _normcased_files = set(map(os.path.normcase, files)) folders = compact(folders) @@ -274,7 +273,7 @@ def stash(self, path: str) -> str: def commit(self) -> None: """Commits the uninstall by removing stashed files.""" - for _, save_dir in self._save_dirs.items(): + for save_dir in self._save_dirs.values(): save_dir.cleanup() self._moves = [] self._save_dirs = {} @@ -316,7 +315,7 @@ def __init__(self, dist: BaseDistribution) -> None: # Create local cache of normalize_path results. Creating an UninstallPathSet # can result in hundreds/thousands of redundant calls to normalize_path with # the same args, which hurts performance. - self._normalize_path_cached = functools.lru_cache()(normalize_path) + self._normalize_path_cached = functools.lru_cache(normalize_path) def _permitted(self, path: str) -> bool: """ @@ -511,11 +510,9 @@ def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet": elif dist.installed_by_distutils: raise UninstallationError( - "Cannot uninstall {!r}. It is a distutils installed project " - "and thus we cannot accurately determine which files belong " - "to it which would lead to only a partial uninstall.".format( - dist.raw_name, - ) + f"Cannot uninstall {dist.raw_name!r}. It is a distutils installed " + "project and thus we cannot accurately determine which files belong " + "to it which would lead to only a partial uninstall." ) elif dist.installed_as_egg: diff --git a/src/pip/_internal/resolution/legacy/resolver.py b/src/pip/_internal/resolution/legacy/resolver.py index b17b7e4530b..1dd0d7041bb 100644 --- a/src/pip/_internal/resolution/legacy/resolver.py +++ b/src/pip/_internal/resolution/legacy/resolver.py @@ -10,9 +10,6 @@ a. "first found, wins" (where the order is breadth first) """ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - import logging import sys from collections import defaultdict @@ -52,7 +49,7 @@ logger = logging.getLogger(__name__) -DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]] +DiscoveredDependencies = DefaultDict[Optional[str], List[InstallRequirement]] def _check_dist_requires_python( @@ -104,9 +101,8 @@ def _check_dist_requires_python( return raise UnsupportedPythonVersion( - "Package {!r} requires a different Python: {} not in {!r}".format( - dist.raw_name, version, requires_python - ) + f"Package {dist.raw_name!r} requires a different Python: " + f"{version} not in {requires_python!r}" ) @@ -231,9 +227,7 @@ def _add_requirement_to_set( tags = compatibility_tags.get_supported() if requirement_set.check_supported_wheels and not wheel.supported(tags): raise InstallationError( - "{} is not a supported wheel on this platform.".format( - wheel.filename - ) + f"{wheel.filename} is not a supported wheel on this platform." ) # This next bit is really a sanity check. @@ -248,9 +242,9 @@ def _add_requirement_to_set( return [install_req], None try: - existing_req: Optional[ - InstallRequirement - ] = requirement_set.get_requirement(install_req.name) + existing_req: Optional[InstallRequirement] = ( + requirement_set.get_requirement(install_req.name) + ) except KeyError: existing_req = None @@ -265,9 +259,8 @@ def _add_requirement_to_set( ) if has_conflicting_requirement: raise InstallationError( - "Double requirement given: {} (already in {}, name={!r})".format( - install_req, existing_req, install_req.name - ) + f"Double requirement given: {install_req} " + f"(already in {existing_req}, name={install_req.name!r})" ) # When no existing requirement exists, add the requirement as a @@ -287,9 +280,9 @@ def _add_requirement_to_set( ) if does_not_satisfy_constraint: raise InstallationError( - "Could not satisfy constraints for '{}': " + f"Could not satisfy constraints for '{install_req.name}': " "installation from path or url cannot be " - "constrained to a version".format(install_req.name) + "constrained to a version" ) # If we're now installing a constraint, mark the existing # object for real installation. @@ -325,6 +318,7 @@ def _set_req_to_reinstall(self, req: InstallRequirement) -> None: """ # Don't uninstall the conflict if doing a user install and the # conflict is not a user install. + assert req.satisfied_by is not None if not self.use_user_site or req.satisfied_by.in_usersite: req.should_reinstall = True req.satisfied_by = None @@ -398,9 +392,9 @@ def _find_requirement_link(self, req: InstallRequirement) -> Optional[Link]: # "UnicodeEncodeError: 'ascii' codec can't encode character" # in Python 2 when the reason contains non-ascii characters. "The candidate selected for download or install is a " - "yanked version: {candidate}\n" - "Reason for being yanked: {reason}" - ).format(candidate=best_candidate, reason=reason) + f"yanked version: {best_candidate}\n" + f"Reason for being yanked: {reason}" + ) logger.warning(msg) return link @@ -423,6 +417,8 @@ def _populate_link(self, req: InstallRequirement) -> None: if self.wheel_cache is None or self.preparer.require_hashes: return + + assert req.link is not None, "_find_requirement_link unexpectedly returned None" cache_entry = self.wheel_cache.get_cache_entry( link=req.link, package_name=req.name, @@ -536,6 +532,7 @@ def add_req(subreq: Requirement, extras_requested: Iterable[str]) -> None: with indent_log(): # We add req_to_install before its dependencies, so that we # can refer to it when adding dependencies. + assert req_to_install.name is not None if not requirement_set.has_requirement(req_to_install.name): # 'unnamed' requirements will get added here # 'unnamed' requirements can only come from being directly diff --git a/src/pip/_internal/resolution/resolvelib/base.py b/src/pip/_internal/resolution/resolvelib/base.py index b206692a0a9..4269c7fbecc 100644 --- a/src/pip/_internal/resolution/resolvelib/base.py +++ b/src/pip/_internal/resolution/resolvelib/base.py @@ -1,7 +1,8 @@ +from dataclasses import dataclass from typing import FrozenSet, Iterable, Optional, Tuple, Union from pip._vendor.packaging.specifiers import SpecifierSet -from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.utils import NormalizedName from pip._vendor.packaging.version import LegacyVersion, Version from pip._internal.models.link import Link, links_equivalent @@ -12,20 +13,18 @@ CandidateVersion = Union[LegacyVersion, Version] -def format_name(project: str, extras: FrozenSet[str]) -> str: +def format_name(project: NormalizedName, extras: FrozenSet[NormalizedName]) -> str: if not extras: return project - canonical_extras = sorted(canonicalize_name(e) for e in extras) - return "{}[{}]".format(project, ",".join(canonical_extras)) + extras_expr = ",".join(sorted(extras)) + return f"{project}[{extras_expr}]" +@dataclass(frozen=True) class Constraint: - def __init__( - self, specifier: SpecifierSet, hashes: Hashes, links: FrozenSet[Link] - ) -> None: - self.specifier = specifier - self.hashes = hashes - self.links = links + specifier: SpecifierSet + hashes: Hashes + links: FrozenSet[Link] @classmethod def empty(cls) -> "Constraint": diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index 31020e27ad1..9d15b8fda0a 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -159,10 +159,7 @@ def __str__(self) -> str: return f"{self.name} {self.version}" def __repr__(self) -> str: - return "{class_name}({link!r})".format( - class_name=self.__class__.__name__, - link=str(self._link), - ) + return f"{self.__class__.__name__}({str(self._link)!r})" def __hash__(self) -> int: return hash((self.__class__, self._link)) @@ -194,10 +191,9 @@ def version(self) -> CandidateVersion: return self._version def format_for_error(self) -> str: - return "{} {} (from {})".format( - self.name, - self.version, - self._link.file_path if self._link.is_file else self._link, + return ( + f"{self.name} {self.version} " + f"(from {self._link.file_path if self._link.is_file else self._link})" ) def _prepare_distribution(self) -> BaseDistribution: @@ -240,7 +236,7 @@ def _prepare(self) -> BaseDistribution: def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: requires = self.dist.iter_dependencies() if with_requires else () for r in requires: - yield self._factory.make_requirement_from_spec(str(r), self._ireq) + yield from self._factory.make_requirements_from_spec(str(r), self._ireq) yield self._factory.make_requires_python_requirement(self.dist.requires_python) def get_install_requirement(self) -> Optional[InstallRequirement]: @@ -272,9 +268,9 @@ def __init__( # Version may not be present for PEP 508 direct URLs if version is not None: wheel_version = Version(wheel.version) - assert version == wheel_version, "{!r} != {!r} for wheel {}".format( - version, wheel_version, name - ) + assert ( + version == wheel_version + ), f"{version!r} != {wheel_version!r} for wheel {name}" if cache_entry is not None: assert ireq.link.is_wheel @@ -341,6 +337,7 @@ def __init__( self.dist = dist self._ireq = _make_install_req_from_dist(dist, template) self._factory = factory + self._version = None # This is just logging some messages, so we can do it eagerly. # The returned dist would be exactly the same as self.dist because we @@ -353,18 +350,15 @@ def __str__(self) -> str: return str(self.dist) def __repr__(self) -> str: - return "{class_name}({distribution!r})".format( - class_name=self.__class__.__name__, - distribution=self.dist, - ) + return f"{self.__class__.__name__}({self.dist!r})" - def __hash__(self) -> int: - return hash((self.__class__, self.name, self.version)) + def __eq__(self, other: object) -> bool: + if not isinstance(other, AlreadyInstalledCandidate): + return NotImplemented + return self.name == other.name and self.version == other.version - def __eq__(self, other: Any) -> bool: - if isinstance(other, self.__class__): - return self.name == other.name and self.version == other.version - return False + def __hash__(self) -> int: + return hash((self.name, self.version)) @property def project_name(self) -> NormalizedName: @@ -376,7 +370,9 @@ def name(self) -> str: @property def version(self) -> CandidateVersion: - return self.dist.version + if self._version is None: + self._version = self.dist.version + return self._version @property def is_editable(self) -> bool: @@ -389,7 +385,7 @@ def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requiremen if not with_requires: return for r in self.dist.iter_dependencies(): - yield self._factory.make_requirement_from_spec(str(r), self._ireq) + yield from self._factory.make_requirements_from_spec(str(r), self._ireq) def get_install_requirement(self) -> Optional[InstallRequirement]: return None @@ -424,20 +420,35 @@ def __init__( self, base: BaseCandidate, extras: FrozenSet[str], + *, + comes_from: Optional[InstallRequirement] = None, ) -> None: + """ + :param comes_from: the InstallRequirement that led to this candidate if it + differs from the base's InstallRequirement. This will often be the + case in the sense that this candidate's requirement has the extras + while the base's does not. Unlike the InstallRequirement backed + candidates, this requirement is used solely for reporting purposes, + it does not do any leg work. + """ self.base = base - self.extras = extras + self.extras = frozenset(canonicalize_name(e) for e in extras) + # If any extras are requested in their non-normalized forms, keep track + # of their raw values. This is needed when we look up dependencies + # since PEP 685 has not been implemented for marker-matching, and using + # the non-normalized extra for lookup ensures the user can select a + # non-normalized extra in a package with its non-normalized form. + # TODO: Remove this attribute when packaging is upgraded to support the + # marker comparison logic specified in PEP 685. + self._unnormalized_extras = extras.difference(self.extras) + self._comes_from = comes_from if comes_from is not None else self.base._ireq def __str__(self) -> str: name, rest = str(self.base).split(" ", 1) return "{}[{}] {}".format(name, ",".join(self.extras), rest) def __repr__(self) -> str: - return "{class_name}(base={base!r}, extras={extras!r})".format( - class_name=self.__class__.__name__, - base=self.base, - extras=self.extras, - ) + return f"{self.__class__.__name__}(base={self.base!r}, extras={self.extras!r})" def __hash__(self) -> int: return hash((self.base, self.extras)) @@ -477,6 +488,50 @@ def is_editable(self) -> bool: def source_link(self) -> Optional[Link]: return self.base.source_link + def _warn_invalid_extras( + self, + requested: FrozenSet[str], + valid: FrozenSet[str], + ) -> None: + """Emit warnings for invalid extras being requested. + + This emits a warning for each requested extra that is not in the + candidate's ``Provides-Extra`` list. + """ + invalid_extras_to_warn = frozenset( + extra + for extra in requested + if extra not in valid + # If an extra is requested in an unnormalized form, skip warning + # about the normalized form being missing. + and extra in self.extras + ) + if not invalid_extras_to_warn: + return + for extra in sorted(invalid_extras_to_warn): + logger.warning( + "%s %s does not provide the extra '%s'", + self.base.name, + self.version, + extra, + ) + + def _calculate_valid_requested_extras(self) -> FrozenSet[str]: + """Get a list of valid extras requested by this candidate. + + The user (or upstream dependent) may have specified extras that the + candidate doesn't support. Any unsupported extras are dropped, and each + cause a warning to be logged here. + """ + requested_extras = self.extras.union(self._unnormalized_extras) + valid_extras = frozenset( + extra + for extra in requested_extras + if self.base.dist.is_extra_provided(extra) + ) + self._warn_invalid_extras(requested_extras, valid_extras) + return valid_extras + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: factory = self.base._factory @@ -486,24 +541,13 @@ def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requiremen if not with_requires: return - # The user may have specified extras that the candidate doesn't - # support. We ignore any unsupported extras here. - valid_extras = self.extras.intersection(self.base.dist.iter_provided_extras()) - invalid_extras = self.extras.difference(self.base.dist.iter_provided_extras()) - for extra in sorted(invalid_extras): - logger.warning( - "%s %s does not provide the extra '%s'", - self.base.name, - self.version, - extra, - ) - + valid_extras = self._calculate_valid_requested_extras() for r in self.base.dist.iter_dependencies(valid_extras): - requirement = factory.make_requirement_from_spec( - str(r), self.base._ireq, valid_extras + yield from factory.make_requirements_from_spec( + str(r), + self._comes_from, + valid_extras, ) - if requirement: - yield requirement def get_install_requirement(self) -> Optional[InstallRequirement]: # We don't return anything here, because we always diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index 0331297b85b..e36df15d459 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -3,6 +3,7 @@ import logging from typing import ( TYPE_CHECKING, + Callable, Dict, FrozenSet, Iterable, @@ -11,6 +12,7 @@ Mapping, NamedTuple, Optional, + Protocol, Sequence, Set, Tuple, @@ -36,7 +38,10 @@ from pip._internal.models.link import Link from pip._internal.models.wheel import Wheel from pip._internal.operations.prepare import RequirementPreparer -from pip._internal.req.constructors import install_req_from_link_and_ireq +from pip._internal.req.constructors import ( + install_req_drop_extras, + install_req_from_link_and_ireq, +) from pip._internal.req.req_install import ( InstallRequirement, check_invalid_constraint_type, @@ -62,11 +67,11 @@ ExplicitRequirement, RequiresPythonRequirement, SpecifierRequirement, + SpecifierWithoutExtrasRequirement, UnsatisfiableRequirement, ) if TYPE_CHECKING: - from typing import Protocol class ConflictCause(Protocol): requirement: RequiresPythonRequirement @@ -112,7 +117,7 @@ def __init__( self._editable_candidate_cache: Cache[EditableCandidate] = {} self._installed_candidate_cache: Dict[str, AlreadyInstalledCandidate] = {} self._extras_candidate_cache: Dict[ - Tuple[int, FrozenSet[str]], ExtrasCandidate + Tuple[int, FrozenSet[NormalizedName]], ExtrasCandidate ] = {} if not ignore_installed: @@ -132,19 +137,23 @@ def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None: if not link.is_wheel: return wheel = Wheel(link.filename) - if wheel.supported(self._finder.target_python.get_tags()): + if wheel.supported(self._finder.target_python.get_unsorted_tags()): return msg = f"{link.filename} is not a supported wheel on this platform." raise UnsupportedWheel(msg) def _make_extras_candidate( - self, base: BaseCandidate, extras: FrozenSet[str] + self, + base: BaseCandidate, + extras: FrozenSet[str], + *, + comes_from: Optional[InstallRequirement] = None, ) -> ExtrasCandidate: - cache_key = (id(base), extras) + cache_key = (id(base), frozenset(canonicalize_name(e) for e in extras)) try: candidate = self._extras_candidate_cache[cache_key] except KeyError: - candidate = ExtrasCandidate(base, extras) + candidate = ExtrasCandidate(base, extras, comes_from=comes_from) self._extras_candidate_cache[cache_key] = candidate return candidate @@ -161,7 +170,7 @@ def _make_candidate_from_dist( self._installed_candidate_cache[dist.canonical_name] = base if not extras: return base - return self._make_extras_candidate(base, extras) + return self._make_extras_candidate(base, extras, comes_from=template) def _make_candidate_from_link( self, @@ -171,6 +180,20 @@ def _make_candidate_from_link( name: Optional[NormalizedName], version: Optional[CandidateVersion], ) -> Optional[Candidate]: + base: Optional[BaseCandidate] = self._make_base_candidate_from_link( + link, template, name, version + ) + if not extras or base is None: + return base + return self._make_extras_candidate(base, extras, comes_from=template) + + def _make_base_candidate_from_link( + self, + link: Link, + template: InstallRequirement, + name: Optional[NormalizedName], + version: Optional[CandidateVersion], + ) -> Optional[BaseCandidate]: # TODO: Check already installed candidate, and use it if the link and # editable flag match. @@ -199,7 +222,7 @@ def _make_candidate_from_link( self._build_failures[link] = e return None - base: BaseCandidate = self._editable_candidate_cache[link] + return self._editable_candidate_cache[link] else: if link not in self._link_candidate_cache: try: @@ -219,11 +242,7 @@ def _make_candidate_from_link( ) self._build_failures[link] = e return None - base = self._link_candidate_cache[link] - - if not extras: - return base - return self._make_extras_candidate(base, extras) + return self._link_candidate_cache[link] def _iter_found_candidates( self, @@ -357,9 +376,8 @@ def _iter_candidates_from_constraints( """ for link in constraint.links: self._fail_if_link_is_unsupported_wheel(link) - candidate = self._make_candidate_from_link( + candidate = self._make_base_candidate_from_link( link, - extras=frozenset(), template=install_req_from_link_and_ireq(link, template), name=canonicalize_name(identifier), version=None, @@ -374,6 +392,7 @@ def find_candidates( incompatibilities: Mapping[str, Iterator[Candidate]], constraint: Constraint, prefers_installed: bool, + is_satisfied_by: Callable[[Requirement, Candidate], bool], ) -> Iterable[Candidate]: # Collect basic lookup information from the requirements. explicit_candidates: Set[Candidate] = set() @@ -385,16 +404,21 @@ def find_candidates( if ireq is not None: ireqs.append(ireq) - # If the current identifier contains extras, add explicit candidates - # from entries from extra-less identifier. + # If the current identifier contains extras, add requires and explicit + # candidates from entries from extra-less identifier. with contextlib.suppress(InvalidRequirement): parsed_requirement = get_requirement(identifier) - explicit_candidates.update( - self._iter_explicit_candidates_from_base( - requirements.get(parsed_requirement.name, ()), - frozenset(parsed_requirement.extras), - ), - ) + if parsed_requirement.name != identifier: + explicit_candidates.update( + self._iter_explicit_candidates_from_base( + requirements.get(parsed_requirement.name, ()), + frozenset(parsed_requirement.extras), + ), + ) + for req in requirements.get(parsed_requirement.name, []): + _, ireq = req.get_candidate_lookup() + if ireq is not None: + ireqs.append(ireq) # Add explicit candidates from constraints. We only do this if there are # known ireqs, which represent requirements not already explicit. If @@ -434,40 +458,61 @@ def find_candidates( for c in explicit_candidates if id(c) not in incompat_ids and constraint.is_satisfied_by(c) - and all(req.is_satisfied_by(c) for req in requirements[identifier]) + and all(is_satisfied_by(req, c) for req in requirements[identifier]) ) - def _make_requirement_from_install_req( + def _make_requirements_from_install_req( self, ireq: InstallRequirement, requested_extras: Iterable[str] - ) -> Optional[Requirement]: + ) -> Iterator[Requirement]: + """ + Returns requirement objects associated with the given InstallRequirement. In + most cases this will be a single object but the following special cases exist: + - the InstallRequirement has markers that do not apply -> result is empty + - the InstallRequirement has both a constraint (or link) and extras + -> result is split in two requirement objects: one with the constraint + (or link) and one with the extra. This allows centralized constraint + handling for the base, resulting in fewer candidate rejections. + """ if not ireq.match_markers(requested_extras): logger.info( "Ignoring %s: markers '%s' don't match your environment", ireq.name, ireq.markers, ) - return None - if not ireq.link: - return SpecifierRequirement(ireq) - self._fail_if_link_is_unsupported_wheel(ireq.link) - cand = self._make_candidate_from_link( - ireq.link, - extras=frozenset(ireq.extras), - template=ireq, - name=canonicalize_name(ireq.name) if ireq.name else None, - version=None, - ) - if cand is None: - # There's no way we can satisfy a URL requirement if the underlying - # candidate fails to build. An unnamed URL must be user-supplied, so - # we fail eagerly. If the URL is named, an unsatisfiable requirement - # can make the resolver do the right thing, either backtrack (and - # maybe find some other requirement that's buildable) or raise a - # ResolutionImpossible eventually. - if not ireq.name: - raise self._build_failures[ireq.link] - return UnsatisfiableRequirement(canonicalize_name(ireq.name)) - return self.make_requirement_from_candidate(cand) + elif not ireq.link: + if ireq.extras and ireq.req is not None and ireq.req.specifier: + yield SpecifierWithoutExtrasRequirement(ireq) + yield SpecifierRequirement(ireq) + else: + self._fail_if_link_is_unsupported_wheel(ireq.link) + # Always make the link candidate for the base requirement to make it + # available to `find_candidates` for explicit candidate lookup for any + # set of extras. + # The extras are required separately via a second requirement. + cand = self._make_base_candidate_from_link( + ireq.link, + template=install_req_drop_extras(ireq) if ireq.extras else ireq, + name=canonicalize_name(ireq.name) if ireq.name else None, + version=None, + ) + if cand is None: + # There's no way we can satisfy a URL requirement if the underlying + # candidate fails to build. An unnamed URL must be user-supplied, so + # we fail eagerly. If the URL is named, an unsatisfiable requirement + # can make the resolver do the right thing, either backtrack (and + # maybe find some other requirement that's buildable) or raise a + # ResolutionImpossible eventually. + if not ireq.name: + raise self._build_failures[ireq.link] + yield UnsatisfiableRequirement(canonicalize_name(ireq.name)) + else: + # require the base from the link + yield self.make_requirement_from_candidate(cand) + if ireq.extras: + # require the extras on top of the base candidate + yield self.make_requirement_from_candidate( + self._make_extras_candidate(cand, frozenset(ireq.extras)) + ) def collect_root_requirements( self, root_ireqs: List[InstallRequirement] @@ -488,15 +533,27 @@ def collect_root_requirements( else: collected.constraints[name] = Constraint.from_ireq(ireq) else: - req = self._make_requirement_from_install_req( - ireq, - requested_extras=(), + reqs = list( + self._make_requirements_from_install_req( + ireq, + requested_extras=(), + ) ) - if req is None: + if not reqs: continue - if ireq.user_supplied and req.name not in collected.user_requested: - collected.user_requested[req.name] = i - collected.requirements.append(req) + template = reqs[0] + if ireq.user_supplied and template.name not in collected.user_requested: + collected.user_requested[template.name] = i + collected.requirements.extend(reqs) + # Put requirements with extras at the end of the root requires. This does not + # affect resolvelib's picking preference but it does affect its initial criteria + # population: by putting extras at the end we enable the candidate finder to + # present resolvelib with a smaller set of candidates to resolvelib, already + # taking into account any non-transient constraints on the associated base. This + # means resolvelib will have fewer candidates to visit and reject. + # Python's list sort is stable, meaning relative order is kept for objects with + # the same key. + collected.requirements.sort(key=lambda r: r.name != r.project_name) return collected def make_requirement_from_candidate( @@ -504,14 +561,23 @@ def make_requirement_from_candidate( ) -> ExplicitRequirement: return ExplicitRequirement(candidate) - def make_requirement_from_spec( + def make_requirements_from_spec( self, specifier: str, comes_from: Optional[InstallRequirement], requested_extras: Iterable[str] = (), - ) -> Optional[Requirement]: + ) -> Iterator[Requirement]: + """ + Returns requirement objects associated with the given specifier. In most cases + this will be a single object but the following special cases exist: + - the specifier has markers that do not apply -> result is empty + - the specifier has both a constraint and extras -> result is split + in two requirement objects: one with the constraint and one with the + extra. This allows centralized constraint handling for the base, + resulting in fewer candidate rejections. + """ ireq = self._make_install_req_from_spec(specifier, comes_from) - return self._make_requirement_from_install_req(ireq, requested_extras) + return self._make_requirements_from_install_req(ireq, requested_extras) def make_requires_python_requirement( self, @@ -603,8 +669,26 @@ def _report_single_requirement_conflict( cands = self._finder.find_all_candidates(req.project_name) skipped_by_requires_python = self._finder.requires_python_skipped_reasons() - versions = [str(v) for v in sorted({c.version for c in cands})] + versions_set: Set[CandidateVersion] = set() + yanked_versions_set: Set[CandidateVersion] = set() + for c in cands: + is_yanked = c.link.is_yanked if c.link else False + if is_yanked: + yanked_versions_set.add(c.version) + else: + versions_set.add(c.version) + + versions = [str(v) for v in sorted(versions_set)] + yanked_versions = [str(v) for v in sorted(yanked_versions_set)] + + if yanked_versions: + # Saying "version X is yanked" isn't entirely accurate. + # https://github.com/pypa/pip/issues/11745#issuecomment-1402805842 + logger.critical( + "Ignored the following yanked versions: %s", + ", ".join(yanked_versions) or "none", + ) if skipped_by_requires_python: logger.critical( "Ignored the following versions that require a different python " @@ -692,8 +776,8 @@ def describe_trigger(parent: Candidate) -> str: info = "the requested packages" msg = ( - "Cannot install {} because these package versions " - "have conflicting dependencies.".format(info) + f"Cannot install {info} because these package versions " + "have conflicting dependencies." ) logger.critical(msg) msg = "\nThe conflict is caused by:" @@ -717,7 +801,7 @@ def describe_trigger(parent: Candidate) -> str: + "\n\n" + "To fix this you could try to:\n" + "1. loosen the range of package versions you've specified\n" - + "2. remove package versions to allow pip attempt to solve " + + "2. remove package versions to allow pip to attempt to solve " + "the dependency conflict\n" ) diff --git a/src/pip/_internal/resolution/resolvelib/provider.py b/src/pip/_internal/resolution/resolvelib/provider.py index 315fb9c8902..fb0dd85f112 100644 --- a/src/pip/_internal/resolution/resolvelib/provider.py +++ b/src/pip/_internal/resolution/resolvelib/provider.py @@ -1,5 +1,6 @@ import collections import math +from functools import lru_cache from typing import ( TYPE_CHECKING, Dict, @@ -234,8 +235,10 @@ def _eligible_for_upgrade(identifier: str) -> bool: constraint=constraint, prefers_installed=(not _eligible_for_upgrade(identifier)), incompatibilities=incompatibilities, + is_satisfied_by=self.is_satisfied_by, ) + @lru_cache(maxsize=None) def is_satisfied_by(self, requirement: Requirement, candidate: Candidate) -> bool: return requirement.is_satisfied_by(candidate) diff --git a/src/pip/_internal/resolution/resolvelib/reporter.py b/src/pip/_internal/resolution/resolvelib/reporter.py index 3c724238a1e..12adeff7b6e 100644 --- a/src/pip/_internal/resolution/resolvelib/reporter.py +++ b/src/pip/_internal/resolution/resolvelib/reporter.py @@ -20,7 +20,7 @@ def __init__(self) -> None: "requirements. This could take a while." ), 8: ( - "pip is looking at multiple versions of {package_name} to " + "pip is still looking at multiple versions of {package_name} to " "determine which version is compatible with other " "requirements. This could take a while." ), diff --git a/src/pip/_internal/resolution/resolvelib/requirements.py b/src/pip/_internal/resolution/resolvelib/requirements.py index 06addc0ddce..f980a356f18 100644 --- a/src/pip/_internal/resolution/resolvelib/requirements.py +++ b/src/pip/_internal/resolution/resolvelib/requirements.py @@ -1,6 +1,9 @@ +from typing import Any + from pip._vendor.packaging.specifiers import SpecifierSet from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._internal.req.constructors import install_req_drop_extras from pip._internal.req.req_install import InstallRequirement from .base import Candidate, CandidateLookup, Requirement, format_name @@ -14,10 +17,15 @@ def __str__(self) -> str: return str(self.candidate) def __repr__(self) -> str: - return "{class_name}({candidate!r})".format( - class_name=self.__class__.__name__, - candidate=self.candidate, - ) + return f"{self.__class__.__name__}({self.candidate!r})" + + def __hash__(self) -> int: + return hash(self.candidate) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ExplicitRequirement): + return False + return self.candidate == other.candidate @property def project_name(self) -> NormalizedName: @@ -43,16 +51,21 @@ class SpecifierRequirement(Requirement): def __init__(self, ireq: InstallRequirement) -> None: assert ireq.link is None, "This is a link, not a specifier" self._ireq = ireq - self._extras = frozenset(ireq.extras) + self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras) def __str__(self) -> str: return str(self._ireq.req) def __repr__(self) -> str: - return "{class_name}({requirement!r})".format( - class_name=self.__class__.__name__, - requirement=str(self._ireq.req), - ) + return f"{self.__class__.__name__}({str(self._ireq.req)!r})" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SpecifierRequirement): + return NotImplemented + return str(self._ireq) == str(other._ireq) + + def __hash__(self) -> int: + return hash(str(self._ireq)) @property def project_name(self) -> NormalizedName: @@ -92,20 +105,49 @@ def is_satisfied_by(self, candidate: Candidate) -> bool: return spec.contains(candidate.version, prereleases=True) +class SpecifierWithoutExtrasRequirement(SpecifierRequirement): + """ + Requirement backed by an install requirement on a base package. + Trims extras from its install requirement if there are any. + """ + + def __init__(self, ireq: InstallRequirement) -> None: + assert ireq.link is None, "This is a link, not a specifier" + self._ireq = install_req_drop_extras(ireq) + self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, SpecifierWithoutExtrasRequirement): + return NotImplemented + return str(self._ireq) == str(other._ireq) + + def __hash__(self) -> int: + return hash(str(self._ireq)) + + class RequiresPythonRequirement(Requirement): """A requirement representing Requires-Python metadata.""" def __init__(self, specifier: SpecifierSet, match: Candidate) -> None: self.specifier = specifier + self._specifier_string = str(specifier) # for faster __eq__ self._candidate = match def __str__(self) -> str: return f"Python {self.specifier}" def __repr__(self) -> str: - return "{class_name}({specifier!r})".format( - class_name=self.__class__.__name__, - specifier=str(self.specifier), + return f"{self.__class__.__name__}({str(self.specifier)!r})" + + def __hash__(self) -> int: + return hash((self._specifier_string, self._candidate)) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, RequiresPythonRequirement): + return False + return ( + self._specifier_string == other._specifier_string + and self._candidate == other._candidate ) @property @@ -142,10 +184,15 @@ def __str__(self) -> str: return f"{self._name} (unavailable)" def __repr__(self) -> str: - return "{class_name}({name!r})".format( - class_name=self.__class__.__name__, - name=str(self._name), - ) + return f"{self.__class__.__name__}({str(self._name)!r})" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, UnsatisfiableRequirement): + return NotImplemented + return self._name == other._name + + def __hash__(self) -> int: + return hash(self._name) @property def project_name(self) -> NormalizedName: diff --git a/src/pip/_internal/resolution/resolvelib/resolver.py b/src/pip/_internal/resolution/resolvelib/resolver.py index 47bbfecce36..c12beef0b2a 100644 --- a/src/pip/_internal/resolution/resolvelib/resolver.py +++ b/src/pip/_internal/resolution/resolvelib/resolver.py @@ -1,3 +1,4 @@ +import contextlib import functools import logging import os @@ -11,6 +12,7 @@ from pip._internal.cache import WheelCache from pip._internal.index.package_finder import PackageFinder from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.constructors import install_req_extend_extras from pip._internal.req.req_install import InstallRequirement from pip._internal.req.req_set import RequirementSet from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider @@ -19,6 +21,7 @@ PipDebuggingReporter, PipReporter, ) +from pip._internal.utils.packaging import get_requirement from .base import Candidate, Requirement from .factory import Factory @@ -101,9 +104,24 @@ def resolve( raise error from e req_set = RequirementSet(check_supported_wheels=check_supported_wheels) - for candidate in result.mapping.values(): + # process candidates with extras last to ensure their base equivalent is + # already in the req_set if appropriate. + # Python's sort is stable so using a binary key function keeps relative order + # within both subsets. + for candidate in sorted( + result.mapping.values(), key=lambda c: c.name != c.project_name + ): ireq = candidate.get_install_requirement() if ireq is None: + if candidate.name != candidate.project_name: + # extend existing req's extras + with contextlib.suppress(KeyError): + req = req_set.get_requirement(candidate.project_name) + req_set.add_named_requirement( + install_req_extend_extras( + req, get_requirement(candidate.name).extras + ) + ) continue # Check if there is already an installation under the same name, @@ -159,6 +177,9 @@ def resolve( reqs = req_set.all_requirements self.factory.preparer.prepare_linked_requirements_more(reqs) + for req in reqs: + req.prepared = True + req.needs_more_preparation = False return req_set def get_installation_order( diff --git a/src/pip/_internal/self_outdated_check.py b/src/pip/_internal/self_outdated_check.py index 41cc42c5677..0f64ae0e614 100644 --- a/src/pip/_internal/self_outdated_check.py +++ b/src/pip/_internal/self_outdated_check.py @@ -28,8 +28,7 @@ from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace from pip._internal.utils.misc import ensure_dir -_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" - +_WEEK = datetime.timedelta(days=7) logger = logging.getLogger(__name__) @@ -40,6 +39,15 @@ def _get_statefile_name(key: str) -> str: return name +def _convert_date(isodate: str) -> datetime.datetime: + """Convert an ISO format string to a date. + + Handles the format 2020-01-22T14:24:01Z (trailing Z) + which is not supported by older versions of fromisoformat. + """ + return datetime.datetime.fromisoformat(isodate.replace("Z", "+00:00")) + + class SelfCheckState: def __init__(self, cache_dir: str) -> None: self._state: Dict[str, Any] = {} @@ -73,12 +81,10 @@ def get(self, current_time: datetime.datetime) -> Optional[str]: if "pypi_version" not in self._state: return None - seven_days_in_seconds = 7 * 24 * 60 * 60 - # Determine if we need to refresh the state - last_check = datetime.datetime.strptime(self._state["last_check"], _DATE_FMT) - seconds_since_last_check = (current_time - last_check).total_seconds() - if seconds_since_last_check > seven_days_in_seconds: + last_check = _convert_date(self._state["last_check"]) + time_since_last_check = current_time - last_check + if time_since_last_check > _WEEK: return None return self._state["pypi_version"] @@ -100,7 +106,7 @@ def set(self, pypi_version: str, current_time: datetime.datetime) -> None: # Include the key so it's easy to tell which pip wrote the # file. "key": self.key, - "last_check": current_time.strftime(_DATE_FMT), + "last_check": current_time.isoformat(), "pypi_version": pypi_version, } @@ -229,14 +235,14 @@ def pip_self_version_check(session: PipSession, options: optparse.Values) -> Non try: upgrade_prompt = _self_version_check_logic( state=SelfCheckState(cache_dir=options.cache_dir), - current_time=datetime.datetime.utcnow(), + current_time=datetime.datetime.now(datetime.timezone.utc), local_version=installed_dist.version, get_remote_version=functools.partial( _get_current_remote_pip_version, session, options ), ) if upgrade_prompt is not None: - logger.warning("[present-rich] %s", upgrade_prompt) + logger.warning("%s", upgrade_prompt, extra={"rich": True}) except Exception: logger.warning("There was an error checking the latest version of pip.") logger.debug("See below for error", exc_info=True) diff --git a/src/pip/_internal/utils/_jaraco_text.py b/src/pip/_internal/utils/_jaraco_text.py index e06947c051a..6ccf53b7ac5 100644 --- a/src/pip/_internal/utils/_jaraco_text.py +++ b/src/pip/_internal/utils/_jaraco_text.py @@ -88,7 +88,7 @@ def join_continuation(lines): ['foobarbaz'] Not sure why, but... - The character preceeding the backslash is also elided. + The character preceding the backslash is also elided. >>> list(join_continuation(['goo\\', 'dly'])) ['godly'] diff --git a/src/pip/_internal/utils/compat.py b/src/pip/_internal/utils/compat.py index 3f4d300cef0..d8b54e4ee51 100644 --- a/src/pip/_internal/utils/compat.py +++ b/src/pip/_internal/utils/compat.py @@ -1,9 +1,11 @@ """Stuff that differs in different Python versions and platform distributions.""" +import importlib.resources import logging import os import sys +from typing import IO __all__ = ["get_path_uid", "stdlib_pkgs", "WINDOWS"] @@ -51,6 +53,20 @@ def get_path_uid(path: str) -> int: return file_uid +# The importlib.resources.open_text function was deprecated in 3.11 with suggested +# replacement we use below. +if sys.version_info < (3, 11): + open_text_resource = importlib.resources.open_text +else: + + def open_text_resource( + package: str, resource: str, encoding: str = "utf-8", errors: str = "strict" + ) -> IO[str]: + return (importlib.resources.files(package) / resource).open( + "r", encoding=encoding, errors=errors + ) + + # packages in the stdlib that may have installation metadata, but should not be # considered 'installed'. this theoretically could be determined based on # dist.location (py27:`sysconfig.get_paths()['stdlib']`, diff --git a/src/pip/_internal/utils/deprecation.py b/src/pip/_internal/utils/deprecation.py index 72bd6f25a55..0911147e784 100644 --- a/src/pip/_internal/utils/deprecation.py +++ b/src/pip/_internal/utils/deprecation.py @@ -87,9 +87,11 @@ def deprecated( (reason, f"{DEPRECATION_MSG_PREFIX}{{}}"), ( gone_in, - "pip {} will enforce this behaviour change." - if not is_gone - else "Since pip {}, this is no longer supported.", + ( + "pip {} will enforce this behaviour change." + if not is_gone + else "Since pip {}, this is no longer supported." + ), ), ( replacement, @@ -97,9 +99,11 @@ def deprecated( ), ( feature_flag, - "You can use the flag --use-feature={} to test the upcoming behaviour." - if not is_gone - else None, + ( + "You can use the flag --use-feature={} to test the upcoming behaviour." + if not is_gone + else None + ), ), ( issue, diff --git a/src/pip/_internal/utils/direct_url_helpers.py b/src/pip/_internal/utils/direct_url_helpers.py index 0e8e5e1608b..66020d3964a 100644 --- a/src/pip/_internal/utils/direct_url_helpers.py +++ b/src/pip/_internal/utils/direct_url_helpers.py @@ -12,8 +12,8 @@ def direct_url_as_pep440_direct_reference(direct_url: DirectUrl, name: str) -> s requirement = name + " @ " fragments = [] if isinstance(direct_url.info, VcsInfo): - requirement += "{}+{}@{}".format( - direct_url.info.vcs, direct_url.url, direct_url.info.commit_id + requirement += ( + f"{direct_url.info.vcs}+{direct_url.url}@{direct_url.info.commit_id}" ) elif isinstance(direct_url.info, ArchiveInfo): requirement += direct_url.url diff --git a/src/pip/_internal/utils/egg_link.py b/src/pip/_internal/utils/egg_link.py index eb57ed1519f..4a384a63682 100644 --- a/src/pip/_internal/utils/egg_link.py +++ b/src/pip/_internal/utils/egg_link.py @@ -15,24 +15,31 @@ ] -def _egg_link_name(raw_name: str) -> str: +def _egg_link_names(raw_name: str) -> List[str]: """ Convert a Name metadata value to a .egg-link name, by applying the same substitution as pkg_resources's safe_name function. Note: we cannot use canonicalize_name because it has a different logic. + + We also look for the raw name (without normalization) as setuptools 69 changed + the way it names .egg-link files (https://github.com/pypa/setuptools/issues/4167). """ - return re.sub("[^A-Za-z0-9.]+", "-", raw_name) + ".egg-link" + return [ + re.sub("[^A-Za-z0-9.]+", "-", raw_name) + ".egg-link", + f"{raw_name}.egg-link", + ] def egg_link_path_from_sys_path(raw_name: str) -> Optional[str]: """ Look for a .egg-link file for project name, by walking sys.path. """ - egg_link_name = _egg_link_name(raw_name) + egg_link_names = _egg_link_names(raw_name) for path_item in sys.path: - egg_link = os.path.join(path_item, egg_link_name) - if os.path.isfile(egg_link): - return egg_link + for egg_link_name in egg_link_names: + egg_link = os.path.join(path_item, egg_link_name) + if os.path.isfile(egg_link): + return egg_link return None @@ -64,9 +71,10 @@ def egg_link_path_from_location(raw_name: str) -> Optional[str]: sites.append(user_site) sites.append(site_packages) - egg_link_name = _egg_link_name(raw_name) + egg_link_names = _egg_link_names(raw_name) for site in sites: - egglink = os.path.join(site, egg_link_name) - if os.path.isfile(egglink): - return egglink + for egg_link_name in egg_link_names: + egglink = os.path.join(site, egg_link_name) + if os.path.isfile(egglink): + return egglink return None diff --git a/src/pip/_internal/utils/glibc.py b/src/pip/_internal/utils/glibc.py index 7bd3c20681d..81342afa447 100644 --- a/src/pip/_internal/utils/glibc.py +++ b/src/pip/_internal/utils/glibc.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - import os import sys from typing import Optional, Tuple @@ -20,8 +17,11 @@ def glibc_version_string_confstr() -> Optional[str]: if sys.platform == "win32": return None try: + gnu_libc_version = os.confstr("CS_GNU_LIBC_VERSION") + if gnu_libc_version is None: + return None # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17": - _, version = os.confstr("CS_GNU_LIBC_VERSION").split() + _, version = gnu_libc_version.split() except (AttributeError, OSError, ValueError): # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... return None diff --git a/src/pip/_internal/utils/hashes.py b/src/pip/_internal/utils/hashes.py index 843cffc6b3d..c073b09dd98 100644 --- a/src/pip/_internal/utils/hashes.py +++ b/src/pip/_internal/utils/hashes.py @@ -1,5 +1,5 @@ import hashlib -from typing import TYPE_CHECKING, BinaryIO, Dict, Iterable, List, Optional +from typing import TYPE_CHECKING, BinaryIO, Dict, Iterable, List, NoReturn, Optional from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError from pip._internal.utils.misc import read_chunks @@ -7,10 +7,6 @@ if TYPE_CHECKING: from hashlib import _Hash - # NoReturn introduced in 3.6.2; imported only for type checking to maintain - # pip compatibility with older patch versions of Python 3.6 - from typing import NoReturn - # The recommended hash algo of the moment. Change this whenever the state of # the art changes; it won't hurt backward compatibility. diff --git a/src/pip/_internal/utils/inject_securetransport.py b/src/pip/_internal/utils/inject_securetransport.py deleted file mode 100644 index 276aa79bb81..00000000000 --- a/src/pip/_internal/utils/inject_securetransport.py +++ /dev/null @@ -1,35 +0,0 @@ -"""A helper module that injects SecureTransport, on import. - -The import should be done as early as possible, to ensure all requests and -sessions (or whatever) are created after injecting SecureTransport. - -Note that we only do the injection on macOS, when the linked OpenSSL is too -old to handle TLSv1.2. -""" - -import sys - - -def inject_securetransport() -> None: - # Only relevant on macOS - if sys.platform != "darwin": - return - - try: - import ssl - except ImportError: - return - - # Checks for OpenSSL 1.0.1 - if ssl.OPENSSL_VERSION_NUMBER >= 0x1000100F: - return - - try: - from pip._vendor.urllib3.contrib import securetransport - except (ImportError, OSError): - return - - securetransport.inject_into_urllib3() - - -inject_securetransport() diff --git a/src/pip/_internal/utils/logging.py b/src/pip/_internal/utils/logging.py index c10e1f4ced6..90df257821e 100644 --- a/src/pip/_internal/utils/logging.py +++ b/src/pip/_internal/utils/logging.py @@ -155,8 +155,8 @@ def emit(self, record: logging.LogRecord) -> None: # If we are given a diagnostic error to present, present it with indentation. assert isinstance(record.args, tuple) - if record.msg == "[present-rich] %s" and len(record.args) == 1: - rich_renderable = record.args[0] + if getattr(record, "rich", False): + (rich_renderable,) = record.args assert isinstance( rich_renderable, (ConsoleRenderable, RichCast, str) ), f"{rich_renderable} is not rich-console-renderable" @@ -212,7 +212,6 @@ def filter(self, record: logging.LogRecord) -> bool: class ExcludeLoggerFilter(Filter): - """ A logging Filter that excludes records from a logger (or its children). """ diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index bfed8270252..82b6261322d 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -1,7 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - -import contextlib import errno import getpass import hashlib @@ -14,14 +10,16 @@ import sys import sysconfig import urllib.parse +from dataclasses import dataclass +from functools import partial from io import StringIO from itertools import filterfalse, tee, zip_longest -from types import TracebackType +from pathlib import Path +from types import FunctionType, TracebackType from typing import ( Any, BinaryIO, Callable, - ContextManager, Dict, Generator, Iterable, @@ -36,6 +34,7 @@ cast, ) +from pip._vendor.packaging.requirements import Requirement from pip._vendor.pyproject_hooks import BuildBackendHookCaller from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed @@ -56,7 +55,6 @@ "normalize_path", "renames", "get_prog", - "captured_stdout", "ensure_dir", "remove_auth_from_url", "check_externally_managed", @@ -69,17 +67,15 @@ ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType] VersionInfo = Tuple[int, int, int] NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]] +OnExc = Callable[[FunctionType, Path, BaseException], Any] +OnErr = Callable[[FunctionType, Path, ExcInfo], Any] def get_pip_version() -> str: pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..") pip_pkg_dir = os.path.abspath(pip_pkg_dir) - return "pip {} from {} (python {})".format( - __version__, - pip_pkg_dir, - get_major_minor_version(), - ) + return f"pip {__version__} from {pip_pkg_dir} (python {get_major_minor_version()})" def normalize_version_info(py_version_info: Tuple[int, ...]) -> Tuple[int, int, int]: @@ -126,28 +122,75 @@ def get_prog() -> str: # Retry every half second for up to 3 seconds # Tenacity raises RetryError by default, explicitly raise the original exception @retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5)) -def rmtree(dir: str, ignore_errors: bool = False) -> None: - shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler) +def rmtree( + dir: str, + ignore_errors: bool = False, + onexc: Optional[OnExc] = None, +) -> None: + if ignore_errors: + onexc = _onerror_ignore + if onexc is None: + onexc = _onerror_reraise + handler: OnErr = partial( + # `[func, path, Union[ExcInfo, BaseException]] -> Any` is equivalent to + # `Union[([func, path, ExcInfo] -> Any), ([func, path, BaseException] -> Any)]`. + cast(Union[OnExc, OnErr], rmtree_errorhandler), + onexc=onexc, + ) + if sys.version_info >= (3, 12): + # See https://docs.python.org/3.12/whatsnew/3.12.html#shutil. + shutil.rmtree(dir, onexc=handler) # type: ignore + else: + shutil.rmtree(dir, onerror=handler) # type: ignore + + +def _onerror_ignore(*_args: Any) -> None: + pass + + +def _onerror_reraise(*_args: Any) -> None: + raise + +def rmtree_errorhandler( + func: FunctionType, + path: Path, + exc_info: Union[ExcInfo, BaseException], + *, + onexc: OnExc = _onerror_reraise, +) -> None: + """ + `rmtree` error handler to 'force' a file remove (i.e. like `rm -f`). + + * If a file is readonly then it's write flag is set and operation is + retried. -def rmtree_errorhandler(func: Callable[..., Any], path: str, exc_info: ExcInfo) -> None: - """On Windows, the files in .svn are read-only, so when rmtree() tries to - remove them, an exception is thrown. We catch that here, remove the - read-only attribute, and hopefully continue without problems.""" + * `onerror` is the original callback from `rmtree(... onerror=onerror)` + that is chained at the end if the "rm -f" still fails. + """ try: - has_attr_readonly = not (os.stat(path).st_mode & stat.S_IWRITE) + st_mode = os.stat(path).st_mode except OSError: # it's equivalent to os.path.exists return - if has_attr_readonly: + if not st_mode & stat.S_IWRITE: # convert to read/write - os.chmod(path, stat.S_IWRITE) - # use the original function to repeat the operation - func(path) - return - else: - raise + try: + os.chmod(path, st_mode | stat.S_IWRITE) + except OSError: + pass + else: + # use the original function to repeat the operation + try: + func(path) + return + except OSError: + pass + + if not isinstance(exc_info, BaseException): + _, exc_info, _ = exc_info + onexc(func, path, exc_info) def display_path(path: str) -> str: @@ -230,13 +273,13 @@ def strtobool(val: str) -> int: def format_size(bytes: float) -> str: if bytes > 1000 * 1000: - return "{:.1f} MB".format(bytes / 1000.0 / 1000) + return f"{bytes / 1000.0 / 1000:.1f} MB" elif bytes > 10 * 1000: - return "{} kB".format(int(bytes / 1000)) + return f"{int(bytes / 1000)} kB" elif bytes > 1000: - return "{:.1f} kB".format(bytes / 1000.0) + return f"{bytes / 1000.0:.1f} kB" else: - return "{} bytes".format(int(bytes)) + return f"{int(bytes)} bytes" def tabulate(rows: Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]]: @@ -339,54 +382,21 @@ def write_output(msg: Any, *args: Any) -> None: class StreamWrapper(StringIO): - orig_stream: TextIO = None + orig_stream: TextIO @classmethod def from_stream(cls, orig_stream: TextIO) -> "StreamWrapper": - cls.orig_stream = orig_stream - return cls() + ret = cls() + ret.orig_stream = orig_stream + return ret # compileall.compile_dir() needs stdout.encoding to print to stdout - # https://github.com/python/mypy/issues/4125 + # type ignore is because TextIOBase.encoding is writeable @property - def encoding(self): # type: ignore + def encoding(self) -> str: # type: ignore return self.orig_stream.encoding -@contextlib.contextmanager -def captured_output(stream_name: str) -> Generator[StreamWrapper, None, None]: - """Return a context manager used by captured_stdout/stdin/stderr - that temporarily replaces the sys stream *stream_name* with a StringIO. - - Taken from Lib/support/__init__.py in the CPython repo. - """ - orig_stdout = getattr(sys, stream_name) - setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout)) - try: - yield getattr(sys, stream_name) - finally: - setattr(sys, stream_name, orig_stdout) - - -def captured_stdout() -> ContextManager[StreamWrapper]: - """Capture the output of sys.stdout: - - with captured_stdout() as stdout: - print('hello') - self.assertEqual(stdout.getvalue(), 'hello\n') - - Taken from Lib/support/__init__.py in the CPython repo. - """ - return captured_output("stdout") - - -def captured_stderr() -> ContextManager[StreamWrapper]: - """ - See captured_stdout(). - """ - return captured_output("stderr") - - # Simulates an enum def enum(*sequential: Any, **named: Any) -> Type[Any]: enums = dict(zip(sequential, range(len(sequential))), **named) @@ -417,7 +427,7 @@ def build_url_from_netloc(netloc: str, scheme: str = "https") -> str: return f"{scheme}://{netloc}" -def parse_netloc(netloc: str) -> Tuple[str, Optional[int]]: +def parse_netloc(netloc: str) -> Tuple[Optional[str], Optional[int]]: """ Return the host-port pair from a netloc. """ @@ -472,9 +482,7 @@ def redact_netloc(netloc: str) -> str: else: user = urllib.parse.quote(user) password = ":****" - return "{user}{password}@{netloc}".format( - user=user, password=password, netloc=netloc - ) + return f"{user}{password}@{netloc}" def _transform_url( @@ -505,7 +513,9 @@ def _redact_netloc(netloc: str) -> Tuple[str]: return (redact_netloc(netloc),) -def split_auth_netloc_from_url(url: str) -> Tuple[str, str, Tuple[str, str]]: +def split_auth_netloc_from_url( + url: str, +) -> Tuple[str, str, Tuple[Optional[str], Optional[str]]]: """ Parse a url into separate netloc, auth, and url with no auth. @@ -527,13 +537,20 @@ def redact_auth_from_url(url: str) -> str: return _transform_url(url, _redact_netloc)[0] +def redact_auth_from_requirement(req: Requirement) -> str: + """Replace the password in a given requirement url with ****.""" + if not req.url: + return str(req) + return str(req).replace(req.url, redact_auth_from_url(req.url)) + + +@dataclass(frozen=True) class HiddenText: - def __init__(self, secret: str, redacted: str) -> None: - self.secret = secret - self.redacted = redacted + secret: str + redacted: str def __repr__(self) -> str: - return "".format(str(self)) + return f"" def __str__(self) -> str: return self.redacted diff --git a/src/pip/_internal/utils/models.py b/src/pip/_internal/utils/models.py deleted file mode 100644 index b6bb21a8b26..00000000000 --- a/src/pip/_internal/utils/models.py +++ /dev/null @@ -1,39 +0,0 @@ -"""Utilities for defining models -""" - -import operator -from typing import Any, Callable, Type - - -class KeyBasedCompareMixin: - """Provides comparison capabilities that is based on a key""" - - __slots__ = ["_compare_key", "_defining_class"] - - def __init__(self, key: Any, defining_class: Type["KeyBasedCompareMixin"]) -> None: - self._compare_key = key - self._defining_class = defining_class - - def __hash__(self) -> int: - return hash(self._compare_key) - - def __lt__(self, other: Any) -> bool: - return self._compare(other, operator.__lt__) - - def __le__(self, other: Any) -> bool: - return self._compare(other, operator.__le__) - - def __gt__(self, other: Any) -> bool: - return self._compare(other, operator.__gt__) - - def __ge__(self, other: Any) -> bool: - return self._compare(other, operator.__ge__) - - def __eq__(self, other: Any) -> bool: - return self._compare(other, operator.__eq__) - - def _compare(self, other: Any, method: Callable[[Any, Any], bool]) -> bool: - if not isinstance(other, self._defining_class): - return NotImplemented - - return method(self._compare_key, other._compare_key) diff --git a/src/pip/_internal/utils/subprocess.py b/src/pip/_internal/utils/subprocess.py index 1e8ff50edfb..cb2e23f007a 100644 --- a/src/pip/_internal/utils/subprocess.py +++ b/src/pip/_internal/utils/subprocess.py @@ -2,16 +2,7 @@ import os import shlex import subprocess -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Iterable, - List, - Mapping, - Optional, - Union, -) +from typing import Any, Callable, Iterable, List, Literal, Mapping, Optional, Union from pip._vendor.rich.markup import escape @@ -20,12 +11,6 @@ from pip._internal.utils.logging import VERBOSE, subprocess_logger from pip._internal.utils.misc import HiddenText -if TYPE_CHECKING: - # Literal was introduced in Python 3.8. - # - # TODO: Remove `if TYPE_CHECKING` when dropping support for Python 3.7. - from typing import Literal - CommandArgs = List[Union[str, HiddenText]] @@ -209,7 +194,7 @@ def call_subprocess( output_lines=all_output if not showing_subprocess else None, ) if log_failed_cmd: - subprocess_logger.error("[present-rich] %s", error) + subprocess_logger.error("%s", error, extra={"rich": True}) subprocess_logger.verbose( "[bold magenta]full command[/]: [blue]%s[/]", escape(format_command_args(cmd)), diff --git a/src/pip/_internal/utils/temp_dir.py b/src/pip/_internal/utils/temp_dir.py index 8ee8a1cb180..4eec5f37f76 100644 --- a/src/pip/_internal/utils/temp_dir.py +++ b/src/pip/_internal/utils/temp_dir.py @@ -3,8 +3,19 @@ import logging import os.path import tempfile +import traceback from contextlib import ExitStack, contextmanager -from typing import Any, Dict, Generator, Optional, TypeVar, Union +from pathlib import Path +from typing import ( + Any, + Callable, + Dict, + Generator, + List, + Optional, + TypeVar, + Union, +) from pip._internal.utils.misc import enum, rmtree @@ -106,6 +117,7 @@ def __init__( delete: Union[bool, None, _Default] = _default, kind: str = "temp", globally_managed: bool = False, + ignore_cleanup_errors: bool = True, ): super().__init__() @@ -128,6 +140,7 @@ def __init__( self._deleted = False self.delete = delete self.kind = kind + self.ignore_cleanup_errors = ignore_cleanup_errors if globally_managed: assert _tempdir_manager is not None @@ -170,7 +183,44 @@ def cleanup(self) -> None: self._deleted = True if not os.path.exists(self._path): return - rmtree(self._path) + + errors: List[BaseException] = [] + + def onerror( + func: Callable[..., Any], + path: Path, + exc_val: BaseException, + ) -> None: + """Log a warning for a `rmtree` error and continue""" + formatted_exc = "\n".join( + traceback.format_exception_only(type(exc_val), exc_val) + ) + formatted_exc = formatted_exc.rstrip() # remove trailing new line + if func in (os.unlink, os.remove, os.rmdir): + logger.debug( + "Failed to remove a temporary file '%s' due to %s.\n", + path, + formatted_exc, + ) + else: + logger.debug("%s failed with %s.", func.__qualname__, formatted_exc) + errors.append(exc_val) + + if self.ignore_cleanup_errors: + try: + # first try with tenacity; retrying to handle ephemeral errors + rmtree(self._path, ignore_errors=False) + except OSError: + # last pass ignore/log all errors + rmtree(self._path, onexc=onerror) + if errors: + logger.warning( + "Failed to remove contents in a temporary directory '%s'.\n" + "You can safely remove it manually.", + self._path, + ) + else: + rmtree(self._path) class AdjacentTempDirectory(TempDirectory): diff --git a/src/pip/_internal/utils/urls.py b/src/pip/_internal/utils/urls.py index 6ba2e04f350..9f34f882a1a 100644 --- a/src/pip/_internal/utils/urls.py +++ b/src/pip/_internal/utils/urls.py @@ -2,17 +2,10 @@ import string import urllib.parse import urllib.request -from typing import Optional from .compat import WINDOWS -def get_url_scheme(url: str) -> Optional[str]: - if ":" not in url: - return None - return url.split(":", 1)[0].lower() - - def path_to_url(path: str) -> str: """ Convert a path to a file: URL. The path will be made absolute and have diff --git a/src/pip/_internal/utils/wheel.py b/src/pip/_internal/utils/wheel.py index e5e3f34ed81..f85aee8a3f9 100644 --- a/src/pip/_internal/utils/wheel.py +++ b/src/pip/_internal/utils/wheel.py @@ -28,7 +28,7 @@ def parse_wheel(wheel_zip: ZipFile, name: str) -> Tuple[str, Message]: metadata = wheel_metadata(wheel_zip, info_dir) version = wheel_version(metadata) except UnsupportedWheel as e: - raise UnsupportedWheel("{} has an invalid wheel, {}".format(name, str(e))) + raise UnsupportedWheel(f"{name} has an invalid wheel, {e}") check_compatibility(version, name) @@ -60,9 +60,7 @@ def wheel_dist_info_dir(source: ZipFile, name: str) -> str: canonical_name = canonicalize_name(name) if not info_dir_name.startswith(canonical_name): raise UnsupportedWheel( - ".dist-info directory {!r} does not start with {!r}".format( - info_dir, canonical_name - ) + f".dist-info directory {info_dir!r} does not start with {canonical_name!r}" ) return info_dir diff --git a/src/pip/_internal/vcs/bazaar.py b/src/pip/_internal/vcs/bazaar.py index 20a17ed0927..c754b7cc5c0 100644 --- a/src/pip/_internal/vcs/bazaar.py +++ b/src/pip/_internal/vcs/bazaar.py @@ -44,13 +44,13 @@ def fetch_new( display_path(dest), ) if verbosity <= 0: - flag = "--quiet" + flags = ["--quiet"] elif verbosity == 1: - flag = "" + flags = [] else: - flag = f"-{'v'*verbosity}" + flags = [f"-{'v'*verbosity}"] cmd_args = make_command( - "checkout", "--lightweight", flag, rev_options.to_args(), url, dest + "checkout", "--lightweight", *flags, rev_options.to_args(), url, dest ) self.run_command(cmd_args) diff --git a/src/pip/_internal/vcs/git.py b/src/pip/_internal/vcs/git.py index 8d1d4993767..0425debb3ae 100644 --- a/src/pip/_internal/vcs/git.py +++ b/src/pip/_internal/vcs/git.py @@ -4,6 +4,7 @@ import re import urllib.parse import urllib.request +from dataclasses import replace from typing import List, Optional, Tuple from pip._internal.exceptions import BadCommand, InstallationError @@ -101,7 +102,7 @@ def get_git_version(self) -> Tuple[int, ...]: if not match: logger.warning("Can't parse git version: %s", version) return () - return tuple(int(c) for c in match.groups()) + return (int(match.group(1)), int(match.group(2))) @classmethod def get_current_branch(cls, location: str) -> Optional[str]: @@ -217,7 +218,7 @@ def resolve_revision( if sha is not None: rev_options = rev_options.make_new(sha) - rev_options.branch_name = rev if is_branch else None + rev_options = replace(rev_options, branch_name=(rev if is_branch else None)) return rev_options diff --git a/src/pip/_internal/vcs/mercurial.py b/src/pip/_internal/vcs/mercurial.py index 2a005e0aff2..c183d41d09c 100644 --- a/src/pip/_internal/vcs/mercurial.py +++ b/src/pip/_internal/vcs/mercurial.py @@ -31,7 +31,7 @@ class Mercurial(VersionControl): @staticmethod def get_base_rev_args(rev: str) -> List[str]: - return [rev] + return [f"--rev={rev}"] def fetch_new( self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int diff --git a/src/pip/_internal/vcs/subversion.py b/src/pip/_internal/vcs/subversion.py index 16d93a67b7b..f359266d9c0 100644 --- a/src/pip/_internal/vcs/subversion.py +++ b/src/pip/_internal/vcs/subversion.py @@ -288,12 +288,12 @@ def fetch_new( display_path(dest), ) if verbosity <= 0: - flag = "--quiet" + flags = ["--quiet"] else: - flag = "" + flags = [] cmd_args = make_command( "checkout", - flag, + *flags, self.get_remote_call_options(), rev_options.to_args(), url, diff --git a/src/pip/_internal/vcs/versioncontrol.py b/src/pip/_internal/vcs/versioncontrol.py index 02bbf68e7ad..bd7d509a3ea 100644 --- a/src/pip/_internal/vcs/versioncontrol.py +++ b/src/pip/_internal/vcs/versioncontrol.py @@ -5,13 +5,14 @@ import shutil import sys import urllib.parse +from dataclasses import dataclass, field from typing import ( - TYPE_CHECKING, Any, Dict, Iterable, Iterator, List, + Literal, Mapping, Optional, Tuple, @@ -37,14 +38,6 @@ format_command_args, make_command, ) -from pip._internal.utils.urls import get_url_scheme - -if TYPE_CHECKING: - # Literal was introduced in Python 3.8. - # - # TODO: Remove `if TYPE_CHECKING` when dropping support for Python 3.7. - from typing import Literal - __all__ = ["vcs"] @@ -58,8 +51,8 @@ def is_url(name: str) -> bool: """ Return true if the name looks like a URL. """ - scheme = get_url_scheme(name) - if scheme is None: + scheme = urllib.parse.urlsplit(name).scheme + if not scheme: return False return scheme in ["http", "https", "file", "ftp"] + vcs.all_schemes @@ -121,34 +114,22 @@ def __init__(self, url: str): self.url = url +@dataclass(frozen=True) class RevOptions: - """ Encapsulates a VCS-specific revision to install, along with any VCS install options. - Instances of this class should be treated as if immutable. + Args: + vc_class: a VersionControl subclass. + rev: the name of the revision to install. + extra_args: a list of extra options. """ - def __init__( - self, - vc_class: Type["VersionControl"], - rev: Optional[str] = None, - extra_args: Optional[CommandArgs] = None, - ) -> None: - """ - Args: - vc_class: a VersionControl subclass. - rev: the name of the revision to install. - extra_args: a list of extra options. - """ - if extra_args is None: - extra_args = [] - - self.extra_args = extra_args - self.rev = rev - self.vc_class = vc_class - self.branch_name: Optional[str] = None + vc_class: Type["VersionControl"] + rev: Optional[str] = None + extra_args: CommandArgs = field(default_factory=list) + branch_name: Optional[str] = None def __repr__(self) -> str: return f"" @@ -362,7 +343,7 @@ def make_rev_options( rev: the name of a revision to install. extra_args: a list of extra options. """ - return RevOptions(cls, rev, extra_args=extra_args) + return RevOptions(cls, rev, extra_args=extra_args or []) @classmethod def _is_local_repository(cls, repo: str) -> bool: @@ -405,9 +386,9 @@ def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: scheme, netloc, path, query, frag = urllib.parse.urlsplit(url) if "+" not in scheme: raise ValueError( - "Sorry, {!r} is a malformed VCS url. " + f"Sorry, {url!r} is a malformed VCS url. " "The format is +://, " - "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url) + "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp" ) # Remove the vcs prefix. scheme = scheme.split("+", 1)[1] @@ -417,9 +398,9 @@ def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: path, rev = path.rsplit("@", 1) if not rev: raise InstallationError( - "The URL {!r} has an empty revision (after @) " + f"The URL {url!r} has an empty revision (after @) " "which is not supported. Include a revision after @ " - "or remove @ from the URL.".format(url) + "or remove @ from the URL." ) url = urllib.parse.urlunsplit((scheme, netloc, path, query, "")) return url, rev, user_pass @@ -566,7 +547,7 @@ def obtain(self, dest: str, url: HiddenText, verbosity: int) -> None: self.name, url, ) - response = ask_path_exists("What to do? {}".format(prompt[0]), prompt[1]) + response = ask_path_exists(f"What to do? {prompt[0]}", prompt[1]) if response == "a": sys.exit(-1) diff --git a/src/pip/_internal/wheel_builder.py b/src/pip/_internal/wheel_builder.py index 60d75dd18ef..93f8e1f5b2f 100644 --- a/src/pip/_internal/wheel_builder.py +++ b/src/pip/_internal/wheel_builder.py @@ -70,7 +70,7 @@ def _should_build( if req.editable: # we only build PEP 660 editable requirements - return req.supports_pyproject_editable() + return req.supports_pyproject_editable return True @@ -140,15 +140,15 @@ def _verify_one(req: InstallRequirement, wheel_path: str) -> None: w = Wheel(os.path.basename(wheel_path)) if canonicalize_name(w.name) != canonical_name: raise InvalidWheelFilename( - "Wheel has unexpected file name: expected {!r}, " - "got {!r}".format(canonical_name, w.name), + f"Wheel has unexpected file name: expected {canonical_name!r}, " + f"got {w.name!r}", ) dist = get_wheel_distribution(FilesystemWheel(wheel_path), canonical_name) dist_verstr = str(dist.version) if canonicalize_version(dist_verstr) != canonicalize_version(w.version): raise InvalidWheelFilename( - "Wheel has unexpected file name: expected {!r}, " - "got {!r}".format(dist_verstr, w.version), + f"Wheel has unexpected file name: expected {dist_verstr!r}, " + f"got {w.version!r}", ) metadata_version_value = dist.metadata_version if metadata_version_value is None: @@ -160,8 +160,7 @@ def _verify_one(req: InstallRequirement, wheel_path: str) -> None: raise UnsupportedWheel(msg) if metadata_version >= Version("1.2") and not isinstance(dist.version, Version): raise UnsupportedWheel( - "Metadata 1.2 mandates PEP 440 version, " - "but {!r} is not".format(dist_verstr) + f"Metadata 1.2 mandates PEP 440 version, but {dist_verstr!r} is not" ) diff --git a/src/pip/_vendor/__init__.py b/src/pip/_vendor/__init__.py index b22f7abb93b..c1884baf3d1 100644 --- a/src/pip/_vendor/__init__.py +++ b/src/pip/_vendor/__init__.py @@ -117,4 +117,5 @@ def vendored(modulename): vendored("rich.traceback") vendored("tenacity") vendored("tomli") + vendored("truststore") vendored("urllib3") diff --git a/src/pip/_vendor/cachecontrol.pyi b/src/pip/_vendor/cachecontrol.pyi deleted file mode 100644 index 636a66bacaf..00000000000 --- a/src/pip/_vendor/cachecontrol.pyi +++ /dev/null @@ -1 +0,0 @@ -from cachecontrol import * \ No newline at end of file diff --git a/src/pip/_vendor/cachecontrol/__init__.py b/src/pip/_vendor/cachecontrol/__init__.py index f631ae6df47..4d20bc9b12a 100644 --- a/src/pip/_vendor/cachecontrol/__init__.py +++ b/src/pip/_vendor/cachecontrol/__init__.py @@ -8,11 +8,21 @@ """ __author__ = "Eric Larson" __email__ = "eric@ionrock.org" -__version__ = "0.12.11" +__version__ = "0.13.1" -from .wrapper import CacheControl -from .adapter import CacheControlAdapter -from .controller import CacheController +from pip._vendor.cachecontrol.adapter import CacheControlAdapter +from pip._vendor.cachecontrol.controller import CacheController +from pip._vendor.cachecontrol.wrapper import CacheControl + +__all__ = [ + "__author__", + "__email__", + "__version__", + "CacheControlAdapter", + "CacheController", + "CacheControl", +] import logging + logging.getLogger(__name__).addHandler(logging.NullHandler()) diff --git a/src/pip/_vendor/cachecontrol/_cmd.py b/src/pip/_vendor/cachecontrol/_cmd.py index 4266b5ee92a..2c84208a5d8 100644 --- a/src/pip/_vendor/cachecontrol/_cmd.py +++ b/src/pip/_vendor/cachecontrol/_cmd.py @@ -1,8 +1,11 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations import logging +from argparse import ArgumentParser +from typing import TYPE_CHECKING from pip._vendor import requests @@ -10,16 +13,19 @@ from pip._vendor.cachecontrol.cache import DictCache from pip._vendor.cachecontrol.controller import logger -from argparse import ArgumentParser +if TYPE_CHECKING: + from argparse import Namespace + from pip._vendor.cachecontrol.controller import CacheController -def setup_logging(): + +def setup_logging() -> None: logger.setLevel(logging.DEBUG) handler = logging.StreamHandler() logger.addHandler(handler) -def get_session(): +def get_session() -> requests.Session: adapter = CacheControlAdapter( DictCache(), cache_etags=True, serializer=None, heuristic=None ) @@ -27,17 +33,17 @@ def get_session(): sess.mount("http://", adapter) sess.mount("https://", adapter) - sess.cache_controller = adapter.controller + sess.cache_controller = adapter.controller # type: ignore[attr-defined] return sess -def get_args(): +def get_args() -> Namespace: parser = ArgumentParser() parser.add_argument("url", help="The URL to try and cache") return parser.parse_args() -def main(args=None): +def main() -> None: args = get_args() sess = get_session() @@ -48,10 +54,13 @@ def main(args=None): setup_logging() # try setting the cache - sess.cache_controller.cache_response(resp.request, resp.raw) + cache_controller: CacheController = ( + sess.cache_controller # type: ignore[attr-defined] + ) + cache_controller.cache_response(resp.request, resp.raw) # Now try to get it - if sess.cache_controller.cached_request(resp.request): + if cache_controller.cached_request(resp.request): print("Cached!") else: print("Not cached :(") diff --git a/src/pip/_vendor/cachecontrol/adapter.py b/src/pip/_vendor/cachecontrol/adapter.py index 94c75e1a05b..3e83e308dba 100644 --- a/src/pip/_vendor/cachecontrol/adapter.py +++ b/src/pip/_vendor/cachecontrol/adapter.py @@ -1,16 +1,26 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations -import types import functools +import types import zlib +from typing import TYPE_CHECKING, Any, Collection, Mapping from pip._vendor.requests.adapters import HTTPAdapter -from .controller import CacheController, PERMANENT_REDIRECT_STATUSES -from .cache import DictCache -from .filewrapper import CallbackFileWrapper +from pip._vendor.cachecontrol.cache import DictCache +from pip._vendor.cachecontrol.controller import PERMANENT_REDIRECT_STATUSES, CacheController +from pip._vendor.cachecontrol.filewrapper import CallbackFileWrapper + +if TYPE_CHECKING: + from pip._vendor.requests import PreparedRequest, Response + from pip._vendor.urllib3 import HTTPResponse + + from pip._vendor.cachecontrol.cache import BaseCache + from pip._vendor.cachecontrol.heuristics import BaseHeuristic + from pip._vendor.cachecontrol.serialize import Serializer class CacheControlAdapter(HTTPAdapter): @@ -18,16 +28,16 @@ class CacheControlAdapter(HTTPAdapter): def __init__( self, - cache=None, - cache_etags=True, - controller_class=None, - serializer=None, - heuristic=None, - cacheable_methods=None, - *args, - **kw - ): - super(CacheControlAdapter, self).__init__(*args, **kw) + cache: BaseCache | None = None, + cache_etags: bool = True, + controller_class: type[CacheController] | None = None, + serializer: Serializer | None = None, + heuristic: BaseHeuristic | None = None, + cacheable_methods: Collection[str] | None = None, + *args: Any, + **kw: Any, + ) -> None: + super().__init__(*args, **kw) self.cache = DictCache() if cache is None else cache self.heuristic = heuristic self.cacheable_methods = cacheable_methods or ("GET",) @@ -37,7 +47,16 @@ def __init__( self.cache, cache_etags=cache_etags, serializer=serializer ) - def send(self, request, cacheable_methods=None, **kw): + def send( + self, + request: PreparedRequest, + stream: bool = False, + timeout: None | float | tuple[float, float] | tuple[float, None] = None, + verify: bool | str = True, + cert: (None | bytes | str | tuple[bytes | str, bytes | str]) = None, + proxies: Mapping[str, str] | None = None, + cacheable_methods: Collection[str] | None = None, + ) -> Response: """ Send a request. Use the request information to see if it exists in the cache and cache the response if we need to and can. @@ -54,13 +73,17 @@ def send(self, request, cacheable_methods=None, **kw): # check for etags and add headers if appropriate request.headers.update(self.controller.conditional_headers(request)) - resp = super(CacheControlAdapter, self).send(request, **kw) + resp = super().send(request, stream, timeout, verify, cert, proxies) return resp def build_response( - self, request, response, from_cache=False, cacheable_methods=None - ): + self, + request: PreparedRequest, + response: HTTPResponse, + from_cache: bool = False, + cacheable_methods: Collection[str] | None = None, + ) -> Response: """ Build a response by making a request or using the cache. @@ -102,36 +125,37 @@ def build_response( else: # Wrap the response file with a wrapper that will cache the # response when the stream has been consumed. - response._fp = CallbackFileWrapper( - response._fp, + response._fp = CallbackFileWrapper( # type: ignore[attr-defined] + response._fp, # type: ignore[attr-defined] functools.partial( self.controller.cache_response, request, response ), ) if response.chunked: - super_update_chunk_length = response._update_chunk_length + super_update_chunk_length = response._update_chunk_length # type: ignore[attr-defined] - def _update_chunk_length(self): + def _update_chunk_length(self: HTTPResponse) -> None: super_update_chunk_length() if self.chunk_left == 0: - self._fp._close() + self._fp._close() # type: ignore[attr-defined] - response._update_chunk_length = types.MethodType( + response._update_chunk_length = types.MethodType( # type: ignore[attr-defined] _update_chunk_length, response ) - resp = super(CacheControlAdapter, self).build_response(request, response) + resp: Response = super().build_response(request, response) # type: ignore[no-untyped-call] # See if we should invalidate the cache. if request.method in self.invalidating_methods and resp.ok: + assert request.url is not None cache_url = self.controller.cache_url(request.url) self.cache.delete(cache_url) # Give the request a from_cache attr to let people use it - resp.from_cache = from_cache + resp.from_cache = from_cache # type: ignore[attr-defined] return resp - def close(self): + def close(self) -> None: self.cache.close() - super(CacheControlAdapter, self).close() + super().close() # type: ignore[no-untyped-call] diff --git a/src/pip/_vendor/cachecontrol/cache.py b/src/pip/_vendor/cachecontrol/cache.py index 2a965f595ff..3293b0057c7 100644 --- a/src/pip/_vendor/cachecontrol/cache.py +++ b/src/pip/_vendor/cachecontrol/cache.py @@ -6,38 +6,46 @@ The cache object API for implementing caches. The default is a thread safe in-memory dictionary. """ +from __future__ import annotations + from threading import Lock +from typing import IO, TYPE_CHECKING, MutableMapping +if TYPE_CHECKING: + from datetime import datetime -class BaseCache(object): - def get(self, key): +class BaseCache: + def get(self, key: str) -> bytes | None: raise NotImplementedError() - def set(self, key, value, expires=None): + def set( + self, key: str, value: bytes, expires: int | datetime | None = None + ) -> None: raise NotImplementedError() - def delete(self, key): + def delete(self, key: str) -> None: raise NotImplementedError() - def close(self): + def close(self) -> None: pass class DictCache(BaseCache): - - def __init__(self, init_dict=None): + def __init__(self, init_dict: MutableMapping[str, bytes] | None = None) -> None: self.lock = Lock() self.data = init_dict or {} - def get(self, key): + def get(self, key: str) -> bytes | None: return self.data.get(key, None) - def set(self, key, value, expires=None): + def set( + self, key: str, value: bytes, expires: int | datetime | None = None + ) -> None: with self.lock: self.data.update({key: value}) - def delete(self, key): + def delete(self, key: str) -> None: with self.lock: if key in self.data: self.data.pop(key) @@ -55,10 +63,11 @@ class SeparateBodyBaseCache(BaseCache): Similarly, the body should be loaded separately via ``get_body()``. """ - def set_body(self, key, body): + + def set_body(self, key: str, body: bytes) -> None: raise NotImplementedError() - def get_body(self, key): + def get_body(self, key: str) -> IO[bytes] | None: """ Return the body as file-like object. """ diff --git a/src/pip/_vendor/cachecontrol/caches/__init__.py b/src/pip/_vendor/cachecontrol/caches/__init__.py index 37827291fb5..24ff469ff98 100644 --- a/src/pip/_vendor/cachecontrol/caches/__init__.py +++ b/src/pip/_vendor/cachecontrol/caches/__init__.py @@ -2,8 +2,7 @@ # # SPDX-License-Identifier: Apache-2.0 -from .file_cache import FileCache, SeparateBodyFileCache -from .redis_cache import RedisCache - +from pip._vendor.cachecontrol.caches.file_cache import FileCache, SeparateBodyFileCache +from pip._vendor.cachecontrol.caches.redis_cache import RedisCache __all__ = ["FileCache", "SeparateBodyFileCache", "RedisCache"] diff --git a/src/pip/_vendor/cachecontrol/caches/file_cache.py b/src/pip/_vendor/cachecontrol/caches/file_cache.py index f1ddb2ebdf9..1fd28013084 100644 --- a/src/pip/_vendor/cachecontrol/caches/file_cache.py +++ b/src/pip/_vendor/cachecontrol/caches/file_cache.py @@ -1,22 +1,23 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations import hashlib import os from textwrap import dedent +from typing import IO, TYPE_CHECKING -from ..cache import BaseCache, SeparateBodyBaseCache -from ..controller import CacheController +from pip._vendor.cachecontrol.cache import BaseCache, SeparateBodyBaseCache +from pip._vendor.cachecontrol.controller import CacheController -try: - FileNotFoundError -except NameError: - # py2.X - FileNotFoundError = (IOError, OSError) +if TYPE_CHECKING: + from datetime import datetime + from filelock import BaseFileLock -def _secure_open_write(filename, fmode): + +def _secure_open_write(filename: str, fmode: int) -> IO[bytes]: # We only want to write to this file, so open it in write only mode flags = os.O_WRONLY @@ -39,7 +40,7 @@ def _secure_open_write(filename, fmode): # there try: os.remove(filename) - except (IOError, OSError): + except OSError: # The file must not exist already, so we can just skip ahead to opening pass @@ -62,37 +63,27 @@ class _FileCacheMixin: def __init__( self, - directory, - forever=False, - filemode=0o0600, - dirmode=0o0700, - use_dir_lock=None, - lock_class=None, - ): - - if use_dir_lock is not None and lock_class is not None: - raise ValueError("Cannot use use_dir_lock and lock_class together") - + directory: str, + forever: bool = False, + filemode: int = 0o0600, + dirmode: int = 0o0700, + lock_class: type[BaseFileLock] | None = None, + ) -> None: try: - from lockfile import LockFile - from lockfile.mkdirlockfile import MkdirLockFile + if lock_class is None: + from filelock import FileLock + + lock_class = FileLock except ImportError: notice = dedent( """ NOTE: In order to use the FileCache you must have - lockfile installed. You can install it via pip: - pip install lockfile + filelock installed. You can install it via pip: + pip install filelock """ ) raise ImportError(notice) - else: - if use_dir_lock: - lock_class = MkdirLockFile - - elif lock_class is None: - lock_class = LockFile - self.directory = directory self.forever = forever self.filemode = filemode @@ -100,17 +91,17 @@ def __init__( self.lock_class = lock_class @staticmethod - def encode(x): + def encode(x: str) -> str: return hashlib.sha224(x.encode()).hexdigest() - def _fn(self, name): + def _fn(self, name: str) -> str: # NOTE: This method should not change as some may depend on it. # See: https://github.com/ionrock/cachecontrol/issues/63 hashed = self.encode(name) parts = list(hashed[:5]) + [hashed] return os.path.join(self.directory, *parts) - def get(self, key): + def get(self, key: str) -> bytes | None: name = self._fn(key) try: with open(name, "rb") as fh: @@ -119,26 +110,28 @@ def get(self, key): except FileNotFoundError: return None - def set(self, key, value, expires=None): + def set( + self, key: str, value: bytes, expires: int | datetime | None = None + ) -> None: name = self._fn(key) self._write(name, value) - def _write(self, path, data: bytes): + def _write(self, path: str, data: bytes) -> None: """ Safely write the data to the given path. """ # Make sure the directory exists try: os.makedirs(os.path.dirname(path), self.dirmode) - except (IOError, OSError): + except OSError: pass - with self.lock_class(path) as lock: + with self.lock_class(path + ".lock"): # Write our actual file - with _secure_open_write(lock.path, self.filemode) as fh: + with _secure_open_write(path, self.filemode) as fh: fh.write(data) - def _delete(self, key, suffix): + def _delete(self, key: str, suffix: str) -> None: name = self._fn(key) + suffix if not self.forever: try: @@ -153,7 +146,7 @@ class FileCache(_FileCacheMixin, BaseCache): downloads. """ - def delete(self, key): + def delete(self, key: str) -> None: self._delete(key, "") @@ -163,23 +156,23 @@ class SeparateBodyFileCache(_FileCacheMixin, SeparateBodyBaseCache): peak memory usage. """ - def get_body(self, key): + def get_body(self, key: str) -> IO[bytes] | None: name = self._fn(key) + ".body" try: return open(name, "rb") except FileNotFoundError: return None - def set_body(self, key, body): + def set_body(self, key: str, body: bytes) -> None: name = self._fn(key) + ".body" self._write(name, body) - def delete(self, key): + def delete(self, key: str) -> None: self._delete(key, "") self._delete(key, ".body") -def url_to_file_path(url, filecache): +def url_to_file_path(url: str, filecache: FileCache) -> str: """Return the file cache path based on the URL. This does not ensure the file exists! diff --git a/src/pip/_vendor/cachecontrol/caches/redis_cache.py b/src/pip/_vendor/cachecontrol/caches/redis_cache.py index 2cba4b07080..f4f68c47bf6 100644 --- a/src/pip/_vendor/cachecontrol/caches/redis_cache.py +++ b/src/pip/_vendor/cachecontrol/caches/redis_cache.py @@ -1,39 +1,48 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations -from __future__ import division -from datetime import datetime +from datetime import datetime, timezone +from typing import TYPE_CHECKING + from pip._vendor.cachecontrol.cache import BaseCache +if TYPE_CHECKING: + from redis import Redis -class RedisCache(BaseCache): - def __init__(self, conn): +class RedisCache(BaseCache): + def __init__(self, conn: Redis[bytes]) -> None: self.conn = conn - def get(self, key): + def get(self, key: str) -> bytes | None: return self.conn.get(key) - def set(self, key, value, expires=None): + def set( + self, key: str, value: bytes, expires: int | datetime | None = None + ) -> None: if not expires: self.conn.set(key, value) elif isinstance(expires, datetime): - expires = expires - datetime.utcnow() - self.conn.setex(key, int(expires.total_seconds()), value) + now_utc = datetime.now(timezone.utc) + if expires.tzinfo is None: + now_utc = now_utc.replace(tzinfo=None) + delta = expires - now_utc + self.conn.setex(key, int(delta.total_seconds()), value) else: self.conn.setex(key, expires, value) - def delete(self, key): + def delete(self, key: str) -> None: self.conn.delete(key) - def clear(self): + def clear(self) -> None: """Helper for clearing all the keys in a database. Use with caution!""" for key in self.conn.keys(): self.conn.delete(key) - def close(self): + def close(self) -> None: """Redis uses connection pooling, no need to close the connection.""" pass diff --git a/src/pip/_vendor/cachecontrol/compat.py b/src/pip/_vendor/cachecontrol/compat.py deleted file mode 100644 index ccec9379dba..00000000000 --- a/src/pip/_vendor/cachecontrol/compat.py +++ /dev/null @@ -1,32 +0,0 @@ -# SPDX-FileCopyrightText: 2015 Eric Larson -# -# SPDX-License-Identifier: Apache-2.0 - -try: - from urllib.parse import urljoin -except ImportError: - from urlparse import urljoin - - -try: - import cPickle as pickle -except ImportError: - import pickle - -# Handle the case where the requests module has been patched to not have -# urllib3 bundled as part of its source. -try: - from pip._vendor.requests.packages.urllib3.response import HTTPResponse -except ImportError: - from pip._vendor.urllib3.response import HTTPResponse - -try: - from pip._vendor.requests.packages.urllib3.util import is_fp_closed -except ImportError: - from pip._vendor.urllib3.util import is_fp_closed - -# Replicate some six behaviour -try: - text_type = unicode -except NameError: - text_type = str diff --git a/src/pip/_vendor/cachecontrol/controller.py b/src/pip/_vendor/cachecontrol/controller.py index 7f23529f115..586b9f97b80 100644 --- a/src/pip/_vendor/cachecontrol/controller.py +++ b/src/pip/_vendor/cachecontrol/controller.py @@ -5,17 +5,27 @@ """ The httplib2 algorithms ported for use with requests. """ +from __future__ import annotations + +import calendar import logging import re -import calendar import time from email.utils import parsedate_tz +from typing import TYPE_CHECKING, Collection, Mapping from pip._vendor.requests.structures import CaseInsensitiveDict -from .cache import DictCache, SeparateBodyBaseCache -from .serialize import Serializer +from pip._vendor.cachecontrol.cache import DictCache, SeparateBodyBaseCache +from pip._vendor.cachecontrol.serialize import Serializer + +if TYPE_CHECKING: + from typing import Literal + + from pip._vendor.requests import PreparedRequest + from pip._vendor.urllib3 import HTTPResponse + from pip._vendor.cachecontrol.cache import BaseCache logger = logging.getLogger(__name__) @@ -24,20 +34,26 @@ PERMANENT_REDIRECT_STATUSES = (301, 308) -def parse_uri(uri): +def parse_uri(uri: str) -> tuple[str, str, str, str, str]: """Parses a URI using the regex given in Appendix B of RFC 3986. (scheme, authority, path, query, fragment) = parse_uri(uri) """ - groups = URI.match(uri).groups() + match = URI.match(uri) + assert match is not None + groups = match.groups() return (groups[1], groups[3], groups[4], groups[6], groups[8]) -class CacheController(object): +class CacheController: """An interface to see if request should cached or not.""" def __init__( - self, cache=None, cache_etags=True, serializer=None, status_codes=None + self, + cache: BaseCache | None = None, + cache_etags: bool = True, + serializer: Serializer | None = None, + status_codes: Collection[int] | None = None, ): self.cache = DictCache() if cache is None else cache self.cache_etags = cache_etags @@ -45,7 +61,7 @@ def __init__( self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308) @classmethod - def _urlnorm(cls, uri): + def _urlnorm(cls, uri: str) -> str: """Normalize the URL to create a safe key for the cache""" (scheme, authority, path, query, fragment) = parse_uri(uri) if not scheme or not authority: @@ -65,10 +81,10 @@ def _urlnorm(cls, uri): return defrag_uri @classmethod - def cache_url(cls, uri): + def cache_url(cls, uri: str) -> str: return cls._urlnorm(uri) - def parse_cache_control(self, headers): + def parse_cache_control(self, headers: Mapping[str, str]) -> dict[str, int | None]: known_directives = { # https://tools.ietf.org/html/rfc7234#section-5.2 "max-age": (int, True), @@ -87,7 +103,7 @@ def parse_cache_control(self, headers): cc_headers = headers.get("cache-control", headers.get("Cache-Control", "")) - retval = {} + retval: dict[str, int | None] = {} for cc_directive in cc_headers.split(","): if not cc_directive.strip(): @@ -122,11 +138,33 @@ def parse_cache_control(self, headers): return retval - def cached_request(self, request): + def _load_from_cache(self, request: PreparedRequest) -> HTTPResponse | None: + """ + Load a cached response, or return None if it's not available. + """ + cache_url = request.url + assert cache_url is not None + cache_data = self.cache.get(cache_url) + if cache_data is None: + logger.debug("No cache entry available") + return None + + if isinstance(self.cache, SeparateBodyBaseCache): + body_file = self.cache.get_body(cache_url) + else: + body_file = None + + result = self.serializer.loads(request, cache_data, body_file) + if result is None: + logger.warning("Cache entry deserialization failed, entry ignored") + return result + + def cached_request(self, request: PreparedRequest) -> HTTPResponse | Literal[False]: """ Return a cached response if it exists in the cache, otherwise return False. """ + assert request.url is not None cache_url = self.cache_url(request.url) logger.debug('Looking up "%s" in the cache', cache_url) cc = self.parse_cache_control(request.headers) @@ -140,21 +178,9 @@ def cached_request(self, request): logger.debug('Request header has "max_age" as 0, cache bypassed') return False - # Request allows serving from the cache, let's see if we find something - cache_data = self.cache.get(cache_url) - if cache_data is None: - logger.debug("No cache entry available") - return False - - if isinstance(self.cache, SeparateBodyBaseCache): - body_file = self.cache.get_body(cache_url) - else: - body_file = None - - # Check whether it can be deserialized - resp = self.serializer.loads(request, cache_data, body_file) + # Check whether we can load the response from the cache: + resp = self._load_from_cache(request) if not resp: - logger.warning("Cache entry deserialization failed, entry ignored") return False # If we have a cached permanent redirect, return it immediately. We @@ -174,7 +200,7 @@ def cached_request(self, request): logger.debug(msg) return resp - headers = CaseInsensitiveDict(resp.headers) + headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers) if not headers or "date" not in headers: if "etag" not in headers: # Without date or etag, the cached response can never be used @@ -185,7 +211,9 @@ def cached_request(self, request): return False now = time.time() - date = calendar.timegm(parsedate_tz(headers["date"])) + time_tuple = parsedate_tz(headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) current_age = max(0, now - date) logger.debug("Current age based on date: %i", current_age) @@ -199,28 +227,30 @@ def cached_request(self, request): freshness_lifetime = 0 # Check the max-age pragma in the cache control header - if "max-age" in resp_cc: - freshness_lifetime = resp_cc["max-age"] + max_age = resp_cc.get("max-age") + if max_age is not None: + freshness_lifetime = max_age logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime) # If there isn't a max-age, check for an expires header elif "expires" in headers: expires = parsedate_tz(headers["expires"]) if expires is not None: - expire_time = calendar.timegm(expires) - date + expire_time = calendar.timegm(expires[:6]) - date freshness_lifetime = max(0, expire_time) logger.debug("Freshness lifetime from expires: %i", freshness_lifetime) # Determine if we are setting freshness limit in the # request. Note, this overrides what was in the response. - if "max-age" in cc: - freshness_lifetime = cc["max-age"] + max_age = cc.get("max-age") + if max_age is not None: + freshness_lifetime = max_age logger.debug( "Freshness lifetime from request max-age: %i", freshness_lifetime ) - if "min-fresh" in cc: - min_fresh = cc["min-fresh"] + min_fresh = cc.get("min-fresh") + if min_fresh is not None: # adjust our current age by our min fresh current_age += min_fresh logger.debug("Adjusted current age from min-fresh: %i", current_age) @@ -239,13 +269,12 @@ def cached_request(self, request): # return the original handler return False - def conditional_headers(self, request): - cache_url = self.cache_url(request.url) - resp = self.serializer.loads(request, self.cache.get(cache_url)) + def conditional_headers(self, request: PreparedRequest) -> dict[str, str]: + resp = self._load_from_cache(request) new_headers = {} if resp: - headers = CaseInsensitiveDict(resp.headers) + headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers) if "etag" in headers: new_headers["If-None-Match"] = headers["ETag"] @@ -255,7 +284,14 @@ def conditional_headers(self, request): return new_headers - def _cache_set(self, cache_url, request, response, body=None, expires_time=None): + def _cache_set( + self, + cache_url: str, + request: PreparedRequest, + response: HTTPResponse, + body: bytes | None = None, + expires_time: int | None = None, + ) -> None: """ Store the data in the cache. """ @@ -267,7 +303,10 @@ def _cache_set(self, cache_url, request, response, body=None, expires_time=None) self.serializer.dumps(request, response, b""), expires=expires_time, ) - self.cache.set_body(cache_url, body) + # body is None can happen when, for example, we're only updating + # headers, as is the case in update_cached_response(). + if body is not None: + self.cache.set_body(cache_url, body) else: self.cache.set( cache_url, @@ -275,7 +314,13 @@ def _cache_set(self, cache_url, request, response, body=None, expires_time=None) expires=expires_time, ) - def cache_response(self, request, response, body=None, status_codes=None): + def cache_response( + self, + request: PreparedRequest, + response: HTTPResponse, + body: bytes | None = None, + status_codes: Collection[int] | None = None, + ) -> None: """ Algorithm for caching requests. @@ -290,10 +335,14 @@ def cache_response(self, request, response, body=None, status_codes=None): ) return - response_headers = CaseInsensitiveDict(response.headers) + response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict( + response.headers + ) if "date" in response_headers: - date = calendar.timegm(parsedate_tz(response_headers["date"])) + time_tuple = parsedate_tz(response_headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) else: date = 0 @@ -312,6 +361,7 @@ def cache_response(self, request, response, body=None, status_codes=None): cc_req = self.parse_cache_control(request.headers) cc = self.parse_cache_control(response_headers) + assert request.url is not None cache_url = self.cache_url(request.url) logger.debug('Updating cache with response from "%s"', cache_url) @@ -344,11 +394,11 @@ def cache_response(self, request, response, body=None, status_codes=None): if response_headers.get("expires"): expires = parsedate_tz(response_headers["expires"]) if expires is not None: - expires_time = calendar.timegm(expires) - date + expires_time = calendar.timegm(expires[:6]) - date expires_time = max(expires_time, 14 * 86400) - logger.debug("etag object cached for {0} seconds".format(expires_time)) + logger.debug(f"etag object cached for {expires_time} seconds") logger.debug("Caching due to etag") self._cache_set(cache_url, request, response, body, expires_time) @@ -362,11 +412,14 @@ def cache_response(self, request, response, body=None, status_codes=None): # is no date header then we can't do anything about expiring # the cache. elif "date" in response_headers: - date = calendar.timegm(parsedate_tz(response_headers["date"])) + time_tuple = parsedate_tz(response_headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) # cache when there is a max-age > 0 - if "max-age" in cc and cc["max-age"] > 0: + max_age = cc.get("max-age") + if max_age is not None and max_age > 0: logger.debug("Caching b/c date exists and max-age > 0") - expires_time = cc["max-age"] + expires_time = max_age self._cache_set( cache_url, request, @@ -381,12 +434,12 @@ def cache_response(self, request, response, body=None, status_codes=None): if response_headers["expires"]: expires = parsedate_tz(response_headers["expires"]) if expires is not None: - expires_time = calendar.timegm(expires) - date + expires_time = calendar.timegm(expires[:6]) - date else: expires_time = None logger.debug( - "Caching b/c of expires header. expires in {0} seconds".format( + "Caching b/c of expires header. expires in {} seconds".format( expires_time ) ) @@ -398,16 +451,18 @@ def cache_response(self, request, response, body=None, status_codes=None): expires_time, ) - def update_cached_response(self, request, response): + def update_cached_response( + self, request: PreparedRequest, response: HTTPResponse + ) -> HTTPResponse: """On a 304 we will get a new set of headers that we want to update our cached value with, assuming we have one. This should only ever be called when we've sent an ETag and gotten a 304 as the response. """ + assert request.url is not None cache_url = self.cache_url(request.url) - - cached_response = self.serializer.loads(request, self.cache.get(cache_url)) + cached_response = self._load_from_cache(request) if not cached_response: # we didn't have a cached response @@ -423,11 +478,11 @@ def update_cached_response(self, request, response): excluded_headers = ["content-length"] cached_response.headers.update( - dict( - (k, v) - for k, v in response.headers.items() + { + k: v + for k, v in response.headers.items() # type: ignore[no-untyped-call] if k.lower() not in excluded_headers - ) + } ) # we want a 200 b/c we have content via the cache diff --git a/src/pip/_vendor/cachecontrol/filewrapper.py b/src/pip/_vendor/cachecontrol/filewrapper.py index f5ed5f6f6ec..25143902a26 100644 --- a/src/pip/_vendor/cachecontrol/filewrapper.py +++ b/src/pip/_vendor/cachecontrol/filewrapper.py @@ -1,12 +1,17 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations -from tempfile import NamedTemporaryFile import mmap +from tempfile import NamedTemporaryFile +from typing import TYPE_CHECKING, Any, Callable + +if TYPE_CHECKING: + from http.client import HTTPResponse -class CallbackFileWrapper(object): +class CallbackFileWrapper: """ Small wrapper around a fp object which will tee everything read into a buffer, and when that file is closed it will execute a callback with the @@ -25,12 +30,14 @@ class CallbackFileWrapper(object): performance impact. """ - def __init__(self, fp, callback): + def __init__( + self, fp: HTTPResponse, callback: Callable[[bytes], None] | None + ) -> None: self.__buf = NamedTemporaryFile("rb+", delete=True) self.__fp = fp self.__callback = callback - def __getattr__(self, name): + def __getattr__(self, name: str) -> Any: # The vaguaries of garbage collection means that self.__fp is # not always set. By using __getattribute__ and the private # name[0] allows looking up the attribute value and raising an @@ -42,7 +49,7 @@ def __getattr__(self, name): fp = self.__getattribute__("_CallbackFileWrapper__fp") return getattr(fp, name) - def __is_fp_closed(self): + def __is_fp_closed(self) -> bool: try: return self.__fp.fp is None @@ -50,7 +57,8 @@ def __is_fp_closed(self): pass try: - return self.__fp.closed + closed: bool = self.__fp.closed + return closed except AttributeError: pass @@ -59,7 +67,7 @@ def __is_fp_closed(self): # TODO: Add some logging here... return False - def _close(self): + def _close(self) -> None: if self.__callback: if self.__buf.tell() == 0: # Empty file: @@ -86,8 +94,8 @@ def _close(self): # Important when caching big files. self.__buf.close() - def read(self, amt=None): - data = self.__fp.read(amt) + def read(self, amt: int | None = None) -> bytes: + data: bytes = self.__fp.read(amt) if data: # We may be dealing with b'', a sign that things are over: # it's passed e.g. after we've already closed self.__buf. @@ -97,8 +105,8 @@ def read(self, amt=None): return data - def _safe_read(self, amt): - data = self.__fp._safe_read(amt) + def _safe_read(self, amt: int) -> bytes: + data: bytes = self.__fp._safe_read(amt) # type: ignore[attr-defined] if amt == 2 and data == b"\r\n": # urllib executes this read to toss the CRLF at the end # of the chunk. diff --git a/src/pip/_vendor/cachecontrol/heuristics.py b/src/pip/_vendor/cachecontrol/heuristics.py index ebe4a96f589..b9d72ca4ac5 100644 --- a/src/pip/_vendor/cachecontrol/heuristics.py +++ b/src/pip/_vendor/cachecontrol/heuristics.py @@ -1,29 +1,31 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations import calendar import time - +from datetime import datetime, timedelta, timezone from email.utils import formatdate, parsedate, parsedate_tz +from typing import TYPE_CHECKING, Any, Mapping -from datetime import datetime, timedelta +if TYPE_CHECKING: + from pip._vendor.urllib3 import HTTPResponse TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT" -def expire_after(delta, date=None): - date = date or datetime.utcnow() +def expire_after(delta: timedelta, date: datetime | None = None) -> datetime: + date = date or datetime.now(timezone.utc) return date + delta -def datetime_to_header(dt): +def datetime_to_header(dt: datetime) -> str: return formatdate(calendar.timegm(dt.timetuple())) -class BaseHeuristic(object): - - def warning(self, response): +class BaseHeuristic: + def warning(self, response: HTTPResponse) -> str | None: """ Return a valid 1xx warning header value describing the cache adjustments. @@ -34,7 +36,7 @@ def warning(self, response): """ return '110 - "Response is Stale"' - def update_headers(self, response): + def update_headers(self, response: HTTPResponse) -> dict[str, str]: """Update the response headers with any new headers. NOTE: This SHOULD always include some Warning header to @@ -43,7 +45,7 @@ def update_headers(self, response): """ return {} - def apply(self, response): + def apply(self, response: HTTPResponse) -> HTTPResponse: updated_headers = self.update_headers(response) if updated_headers: @@ -61,12 +63,12 @@ class OneDayCache(BaseHeuristic): future. """ - def update_headers(self, response): + def update_headers(self, response: HTTPResponse) -> dict[str, str]: headers = {} if "expires" not in response.headers: date = parsedate(response.headers["date"]) - expires = expire_after(timedelta(days=1), date=datetime(*date[:6])) + expires = expire_after(timedelta(days=1), date=datetime(*date[:6], tzinfo=timezone.utc)) # type: ignore[misc] headers["expires"] = datetime_to_header(expires) headers["cache-control"] = "public" return headers @@ -77,14 +79,14 @@ class ExpiresAfter(BaseHeuristic): Cache **all** requests for a defined time period. """ - def __init__(self, **kw): + def __init__(self, **kw: Any) -> None: self.delta = timedelta(**kw) - def update_headers(self, response): + def update_headers(self, response: HTTPResponse) -> dict[str, str]: expires = expire_after(self.delta) return {"expires": datetime_to_header(expires), "cache-control": "public"} - def warning(self, response): + def warning(self, response: HTTPResponse) -> str | None: tmpl = "110 - Automatically cached for %s. Response might be stale" return tmpl % self.delta @@ -101,12 +103,23 @@ class LastModified(BaseHeuristic): http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397 Unlike mozilla we limit this to 24-hr. """ + cacheable_by_default_statuses = { - 200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501 + 200, + 203, + 204, + 206, + 300, + 301, + 404, + 405, + 410, + 414, + 501, } - def update_headers(self, resp): - headers = resp.headers + def update_headers(self, resp: HTTPResponse) -> dict[str, str]: + headers: Mapping[str, str] = resp.headers if "expires" in headers: return {} @@ -120,9 +133,11 @@ def update_headers(self, resp): if "date" not in headers or "last-modified" not in headers: return {} - date = calendar.timegm(parsedate_tz(headers["date"])) + time_tuple = parsedate_tz(headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) last_modified = parsedate(headers["last-modified"]) - if date is None or last_modified is None: + if last_modified is None: return {} now = time.time() @@ -135,5 +150,5 @@ def update_headers(self, resp): expires = date + freshness_lifetime return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))} - def warning(self, resp): + def warning(self, resp: HTTPResponse) -> str | None: return None diff --git a/src/pip/_vendor/cachecontrol/py.typed b/src/pip/_vendor/cachecontrol/py.typed new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/pip/_vendor/cachecontrol/serialize.py b/src/pip/_vendor/cachecontrol/serialize.py index 7fe1a3e33a3..f9e967c3c34 100644 --- a/src/pip/_vendor/cachecontrol/serialize.py +++ b/src/pip/_vendor/cachecontrol/serialize.py @@ -1,78 +1,76 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations -import base64 import io -import json -import zlib +from typing import IO, TYPE_CHECKING, Any, Mapping, cast from pip._vendor import msgpack from pip._vendor.requests.structures import CaseInsensitiveDict +from pip._vendor.urllib3 import HTTPResponse -from .compat import HTTPResponse, pickle, text_type +if TYPE_CHECKING: + from pip._vendor.requests import PreparedRequest -def _b64_decode_bytes(b): - return base64.b64decode(b.encode("ascii")) +class Serializer: + serde_version = "4" - -def _b64_decode_str(s): - return _b64_decode_bytes(s).decode("utf8") - - -_default_body_read = object() - - -class Serializer(object): - def dumps(self, request, response, body=None): - response_headers = CaseInsensitiveDict(response.headers) + def dumps( + self, + request: PreparedRequest, + response: HTTPResponse, + body: bytes | None = None, + ) -> bytes: + response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict( + response.headers + ) if body is None: # When a body isn't passed in, we'll read the response. We # also update the response with a new file handler to be # sure it acts as though it was never read. body = response.read(decode_content=False) - response._fp = io.BytesIO(body) - - # NOTE: This is all a bit weird, but it's really important that on - # Python 2.x these objects are unicode and not str, even when - # they contain only ascii. The problem here is that msgpack - # understands the difference between unicode and bytes and we - # have it set to differentiate between them, however Python 2 - # doesn't know the difference. Forcing these to unicode will be - # enough to have msgpack know the difference. + response._fp = io.BytesIO(body) # type: ignore[attr-defined] + response.length_remaining = len(body) + data = { - u"response": { - u"body": body, # Empty bytestring if body is stored separately - u"headers": dict( - (text_type(k), text_type(v)) for k, v in response.headers.items() - ), - u"status": response.status, - u"version": response.version, - u"reason": text_type(response.reason), - u"strict": response.strict, - u"decode_content": response.decode_content, + "response": { + "body": body, # Empty bytestring if body is stored separately + "headers": {str(k): str(v) for k, v in response.headers.items()}, # type: ignore[no-untyped-call] + "status": response.status, + "version": response.version, + "reason": str(response.reason), + "decode_content": response.decode_content, } } # Construct our vary headers - data[u"vary"] = {} - if u"vary" in response_headers: - varied_headers = response_headers[u"vary"].split(",") + data["vary"] = {} + if "vary" in response_headers: + varied_headers = response_headers["vary"].split(",") for header in varied_headers: - header = text_type(header).strip() + header = str(header).strip() header_value = request.headers.get(header, None) if header_value is not None: - header_value = text_type(header_value) - data[u"vary"][header] = header_value + header_value = str(header_value) + data["vary"][header] = header_value + + return b",".join([f"cc={self.serde_version}".encode(), self.serialize(data)]) - return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)]) + def serialize(self, data: dict[str, Any]) -> bytes: + return cast(bytes, msgpack.dumps(data, use_bin_type=True)) - def loads(self, request, data, body_file=None): + def loads( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: # Short circuit if we've been given an empty set of data if not data: - return + return None # Determine what version of the serializer the data was serialized # with @@ -88,18 +86,23 @@ def loads(self, request, data, body_file=None): ver = b"cc=0" # Get the version number out of the cc=N - ver = ver.split(b"=", 1)[-1].decode("ascii") + verstr = ver.split(b"=", 1)[-1].decode("ascii") # Dispatch to the actual load method for the given version try: - return getattr(self, "_loads_v{}".format(ver))(request, data, body_file) + return getattr(self, f"_loads_v{verstr}")(request, data, body_file) # type: ignore[no-any-return] except AttributeError: # This is a version we don't have a loads function for, so we'll # just treat it as a miss and return None - return - - def prepare_response(self, request, cached, body_file=None): + return None + + def prepare_response( + self, + request: PreparedRequest, + cached: Mapping[str, Any], + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: """Verify our vary headers match and construct a real urllib3 HTTPResponse object. """ @@ -108,23 +111,26 @@ def prepare_response(self, request, cached, body_file=None): # This case is also handled in the controller code when creating # a cache entry, but is left here for backwards compatibility. if "*" in cached.get("vary", {}): - return + return None # Ensure that the Vary headers for the cached response match our # request for header, value in cached.get("vary", {}).items(): if request.headers.get(header, None) != value: - return + return None body_raw = cached["response"].pop("body") - headers = CaseInsensitiveDict(data=cached["response"]["headers"]) + headers: CaseInsensitiveDict[str] = CaseInsensitiveDict( + data=cached["response"]["headers"] + ) if headers.get("transfer-encoding", "") == "chunked": headers.pop("transfer-encoding") cached["response"]["headers"] = headers try: + body: IO[bytes] if body_file is None: body = io.BytesIO(body_raw) else: @@ -138,53 +144,63 @@ def prepare_response(self, request, cached, body_file=None): # TypeError: 'str' does not support the buffer interface body = io.BytesIO(body_raw.encode("utf8")) + # Discard any `strict` parameter serialized by older version of cachecontrol. + cached["response"].pop("strict", None) + return HTTPResponse(body=body, preload_content=False, **cached["response"]) - def _loads_v0(self, request, data, body_file=None): + def _loads_v0( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> None: # The original legacy cache data. This doesn't contain enough # information to construct everything we need, so we'll treat this as # a miss. - return - - def _loads_v1(self, request, data, body_file=None): - try: - cached = pickle.loads(data) - except ValueError: - return - - return self.prepare_response(request, cached, body_file) - - def _loads_v2(self, request, data, body_file=None): - assert body_file is None - try: - cached = json.loads(zlib.decompress(data).decode("utf8")) - except (ValueError, zlib.error): - return - - # We need to decode the items that we've base64 encoded - cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"]) - cached["response"]["headers"] = dict( - (_b64_decode_str(k), _b64_decode_str(v)) - for k, v in cached["response"]["headers"].items() - ) - cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"]) - cached["vary"] = dict( - (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v) - for k, v in cached["vary"].items() - ) - - return self.prepare_response(request, cached, body_file) - - def _loads_v3(self, request, data, body_file): + return None + + def _loads_v1( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: + # The "v1" pickled cache format. This is no longer supported + # for security reasons, so we treat it as a miss. + return None + + def _loads_v2( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: + # The "v2" compressed base64 cache format. + # This has been removed due to age and poor size/performance + # characteristics, so we treat it as a miss. + return None + + def _loads_v3( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> None: # Due to Python 2 encoding issues, it's impossible to know for sure # exactly how to load v3 entries, thus we'll treat these as a miss so # that they get rewritten out as v4 entries. - return - - def _loads_v4(self, request, data, body_file=None): + return None + + def _loads_v4( + self, + request: PreparedRequest, + data: bytes, + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: try: cached = msgpack.loads(data, raw=False) except ValueError: - return + return None return self.prepare_response(request, cached, body_file) diff --git a/src/pip/_vendor/cachecontrol/wrapper.py b/src/pip/_vendor/cachecontrol/wrapper.py index b6ee7f20398..f618bc363f1 100644 --- a/src/pip/_vendor/cachecontrol/wrapper.py +++ b/src/pip/_vendor/cachecontrol/wrapper.py @@ -1,22 +1,32 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations -from .adapter import CacheControlAdapter -from .cache import DictCache +from typing import TYPE_CHECKING, Collection +from pip._vendor.cachecontrol.adapter import CacheControlAdapter +from pip._vendor.cachecontrol.cache import DictCache -def CacheControl( - sess, - cache=None, - cache_etags=True, - serializer=None, - heuristic=None, - controller_class=None, - adapter_class=None, - cacheable_methods=None, -): +if TYPE_CHECKING: + from pip._vendor import requests + + from pip._vendor.cachecontrol.cache import BaseCache + from pip._vendor.cachecontrol.controller import CacheController + from pip._vendor.cachecontrol.heuristics import BaseHeuristic + from pip._vendor.cachecontrol.serialize import Serializer + +def CacheControl( + sess: requests.Session, + cache: BaseCache | None = None, + cache_etags: bool = True, + serializer: Serializer | None = None, + heuristic: BaseHeuristic | None = None, + controller_class: type[CacheController] | None = None, + adapter_class: type[CacheControlAdapter] | None = None, + cacheable_methods: Collection[str] | None = None, +) -> requests.Session: cache = DictCache() if cache is None else cache adapter_class = adapter_class or CacheControlAdapter adapter = adapter_class( diff --git a/src/pip/_vendor/certifi/LICENSE b/src/pip/_vendor/certifi/LICENSE index 0a64774eabe..62b076cdee5 100644 --- a/src/pip/_vendor/certifi/LICENSE +++ b/src/pip/_vendor/certifi/LICENSE @@ -2,7 +2,6 @@ This package contains a modified version of ca-bundle.crt: ca-bundle.crt -- Bundle of CA Root Certificates -Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011# This is a bundle of X.509 certificates of public Certificate Authorities (CA). These were automatically extracted from Mozilla's root certificates file (certdata.txt). This file can be found in the mozilla source tree: diff --git a/src/pip/_vendor/certifi/__init__.py b/src/pip/_vendor/certifi/__init__.py index a3546f12555..1c91f3ec932 100644 --- a/src/pip/_vendor/certifi/__init__.py +++ b/src/pip/_vendor/certifi/__init__.py @@ -1,4 +1,4 @@ from .core import contents, where __all__ = ["contents", "where"] -__version__ = "2022.12.07" +__version__ = "2024.02.02" diff --git a/src/pip/_vendor/certifi/cacert.pem b/src/pip/_vendor/certifi/cacert.pem index df9e4e3c755..fac3c31909b 100644 --- a/src/pip/_vendor/certifi/cacert.pem +++ b/src/pip/_vendor/certifi/cacert.pem @@ -245,34 +245,6 @@ mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK 4SVhM7JZG+Ju1zdXtg2pEto= -----END CERTIFICATE----- -# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 -# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 -# Label: "Security Communication Root CA" -# Serial: 0 -# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a -# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 -# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c ------BEGIN CERTIFICATE----- -MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY -MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t -dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 -WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD -VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 -9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ -DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 -Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N -QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ -xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G -A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG -kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr -Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 -Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU -JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot -RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== ------END CERTIFICATE----- - # Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com # Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com # Label: "XRamp Global CA Root" @@ -791,34 +763,6 @@ uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= -----END CERTIFICATE----- -# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Label: "Hongkong Post Root CA 1" -# Serial: 1000 -# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca -# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 -# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 ------BEGIN CERTIFICATE----- -MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx -FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg -Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG -A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr -b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ -jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn -PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh -ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 -nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h -q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED -MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC -mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 -7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB -oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs -EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO -fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi -AmvZWg== ------END CERTIFICATE----- - # Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. # Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. # Label: "SecureSign RootCA11" @@ -909,49 +853,6 @@ Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH WD9f -----END CERTIFICATE----- -# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" -# Serial: 6047274297262753887 -# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 -# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa -# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef ------BEGIN CERTIFICATE----- -MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE -BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h -cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy -MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg -Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 -thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM -cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG -L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i -NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h -X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b -m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy -Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja -EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T -KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF -6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh -OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD -VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD -VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp -cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv -ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl -AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF -661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 -am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 -ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 -PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS -3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k -SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF -3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM -ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g -StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz -Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB -jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V ------END CERTIFICATE----- - # Issuer: CN=Izenpe.com O=IZENPE S.A. # Subject: CN=Izenpe.com O=IZENPE S.A. # Label: "Izenpe.com" @@ -1676,50 +1577,6 @@ HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= -----END CERTIFICATE----- -# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi -# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi -# Label: "E-Tugra Certification Authority" -# Serial: 7667447206703254355 -# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 -# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 -# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c ------BEGIN CERTIFICATE----- -MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV -BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC -aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV -BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 -Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz -MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ -BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp -em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN -ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY -B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH -D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF -Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo -q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D -k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH -fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut -dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM -ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 -zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn -rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX -U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 -Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 -XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF -Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR -HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY -GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c -77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 -+GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK -vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 -FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl -yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P -AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD -y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d -NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== ------END CERTIFICATE----- - # Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center # Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center # Label: "T-TeleSec GlobalRoot Class 2" @@ -4397,73 +4254,6 @@ ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR -----END CERTIFICATE----- -# Issuer: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center -# Subject: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center -# Label: "E-Tugra Global Root CA RSA v3" -# Serial: 75951268308633135324246244059508261641472512052 -# MD5 Fingerprint: 22:be:10:f6:c2:f8:03:88:73:5f:33:29:47:28:47:a4 -# SHA1 Fingerprint: e9:a8:5d:22:14:52:1c:5b:aa:0a:b4:be:24:6a:23:8a:c9:ba:e2:a9 -# SHA256 Fingerprint: ef:66:b0:b1:0a:3c:db:9f:2e:36:48:c7:6b:d2:af:18:ea:d2:bf:e6:f1:17:65:5e:28:c4:06:0d:a1:a3:f4:c2 ------BEGIN CERTIFICATE----- -MIIF8zCCA9ugAwIBAgIUDU3FzRYilZYIfrgLfxUGNPt5EDQwDQYJKoZIhvcNAQEL -BQAwgYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUt -VHVncmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYw -JAYDVQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIFJTQSB2MzAeFw0yMDAzMTgw -OTA3MTdaFw00NTAzMTIwOTA3MTdaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMG -QW5rYXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1 -Z3JhIFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBD -QSBSU0EgdjMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCiZvCJt3J7 -7gnJY9LTQ91ew6aEOErxjYG7FL1H6EAX8z3DeEVypi6Q3po61CBxyryfHUuXCscx -uj7X/iWpKo429NEvx7epXTPcMHD4QGxLsqYxYdE0PD0xesevxKenhOGXpOhL9hd8 -7jwH7eKKV9y2+/hDJVDqJ4GohryPUkqWOmAalrv9c/SF/YP9f4RtNGx/ardLAQO/ -rWm31zLZ9Vdq6YaCPqVmMbMWPcLzJmAy01IesGykNz709a/r4d+ABs8qQedmCeFL -l+d3vSFtKbZnwy1+7dZ5ZdHPOrbRsV5WYVB6Ws5OUDGAA5hH5+QYfERaxqSzO8bG -wzrwbMOLyKSRBfP12baqBqG3q+Sx6iEUXIOk/P+2UNOMEiaZdnDpwA+mdPy70Bt4 -znKS4iicvObpCdg604nmvi533wEKb5b25Y08TVJ2Glbhc34XrD2tbKNSEhhw5oBO -M/J+JjKsBY04pOZ2PJ8QaQ5tndLBeSBrW88zjdGUdjXnXVXHt6woq0bM5zshtQoK -5EpZ3IE1S0SVEgpnpaH/WwAH0sDM+T/8nzPyAPiMbIedBi3x7+PmBvrFZhNb/FAH -nnGGstpvdDDPk1Po3CLW3iAfYY2jLqN4MpBs3KwytQXk9TwzDdbgh3cXTJ2w2Amo -DVf3RIXwyAS+XF1a4xeOVGNpf0l0ZAWMowIDAQABo2MwYTAPBgNVHRMBAf8EBTAD -AQH/MB8GA1UdIwQYMBaAFLK0ruYt9ybVqnUtdkvAG1Mh0EjvMB0GA1UdDgQWBBSy -tK7mLfcm1ap1LXZLwBtTIdBI7zAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEL -BQADggIBAImocn+M684uGMQQgC0QDP/7FM0E4BQ8Tpr7nym/Ip5XuYJzEmMmtcyQ -6dIqKe6cLcwsmb5FJ+Sxce3kOJUxQfJ9emN438o2Fi+CiJ+8EUdPdk3ILY7r3y18 -Tjvarvbj2l0Upq7ohUSdBm6O++96SmotKygY/r+QLHUWnw/qln0F7psTpURs+APQ -3SPh/QMSEgj0GDSz4DcLdxEBSL9htLX4GdnLTeqjjO/98Aa1bZL0SmFQhO3sSdPk -vmjmLuMxC1QLGpLWgti2omU8ZgT5Vdps+9u1FGZNlIM7zR6mK7L+d0CGq+ffCsn9 -9t2HVhjYsCxVYJb6CH5SkPVLpi6HfMsg2wY+oF0Dd32iPBMbKaITVaA9FCKvb7jQ -mhty3QUBjYZgv6Rn7rWlDdF/5horYmbDB7rnoEgcOMPpRfunf/ztAmgayncSd6YA -VSgU7NbHEqIbZULpkejLPoeJVF3Zr52XnGnnCv8PWniLYypMfUeUP95L6VPQMPHF -9p5J3zugkaOj/s1YzOrfr28oO6Bpm4/srK4rVJ2bBLFHIK+WEj5jlB0E5y67hscM -moi/dkfv97ALl2bSRM9gUgfh1SxKOidhd8rXj+eHDjD/DLsE4mHDosiXYY60MGo8 -bcIHX0pzLz/5FooBZu+6kcpSV3uu1OYP3Qt6f4ueJiDPO++BcYNZ ------END CERTIFICATE----- - -# Issuer: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center -# Subject: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center -# Label: "E-Tugra Global Root CA ECC v3" -# Serial: 218504919822255052842371958738296604628416471745 -# MD5 Fingerprint: 46:bc:81:bb:f1:b5:1e:f7:4b:96:bc:14:e2:e7:27:64 -# SHA1 Fingerprint: 8a:2f:af:57:53:b1:b0:e6:a1:04:ec:5b:6a:69:71:6d:f6:1c:e2:84 -# SHA256 Fingerprint: 87:3f:46:85:fa:7f:56:36:25:25:2e:6d:36:bc:d7:f1:6f:c2:49:51:f2:64:e4:7e:1b:95:4f:49:08:cd:ca:13 ------BEGIN CERTIFICATE----- -MIICpTCCAiqgAwIBAgIUJkYZdzHhT28oNt45UYbm1JeIIsEwCgYIKoZIzj0EAwMw -gYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUtVHVn -cmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYwJAYD -VQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIEVDQyB2MzAeFw0yMDAzMTgwOTQ2 -NThaFw00NTAzMTIwOTQ2NThaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMGQW5r -YXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1Z3Jh -IFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBDQSBF -Q0MgdjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASOmCm/xxAeJ9urA8woLNheSBkQ -KczLWYHMjLiSF4mDKpL2w6QdTGLVn9agRtwcvHbB40fQWxPa56WzZkjnIZpKT4YK -fWzqTTKACrJ6CZtpS5iB4i7sAnCWH/31Rs7K3IKjYzBhMA8GA1UdEwEB/wQFMAMB -Af8wHwYDVR0jBBgwFoAU/4Ixcj75xGZsrTie0bBRiKWQzPUwHQYDVR0OBBYEFP+C -MXI++cRmbK04ntGwUYilkMz1MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNp -ADBmAjEA5gVYaWHlLcoNy/EZCL3W/VGSGn5jVASQkZo1kTmZ+gepZpO6yGjUij/6 -7W4WAie3AjEA3VoXK3YdZUKWpqxdinlW2Iob35reX8dQj7FbcQwm32pAAOwzkSFx -vmjkI6TZraE3 ------END CERTIFICATE----- - # Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. # Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. # Label: "Security Communication RootCA3" @@ -4525,3 +4315,500 @@ BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu 9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= -----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA1" +# Serial: 113562791157148395269083148143378328608 +# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 +# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a +# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU +MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI +T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz +MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF +SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh +bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z +xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ +spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 +58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR +at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll +5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq +nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK +V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ +pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO +z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn +jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ +WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF +7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 +YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli +awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u ++2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 +X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN +SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo +P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI ++pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz +znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 +eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 +YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy +r/6zcCwupvI= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA2" +# Serial: 58605626836079930195615843123109055211 +# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c +# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 +# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 +-----BEGIN CERTIFICATE----- +MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw +CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ +VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy +MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ +TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS +b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B +IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ ++kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK +sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA +94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B +43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root E46" +# Serial: 88989738453351742415770396670917916916 +# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 +# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a +# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 +-----BEGIN CERTIFICATE----- +MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw +CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T +ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN +MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG +A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT +ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC +WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ +6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B +Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa +qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q +4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root R46" +# Serial: 156256931880233212765902055439220583700 +# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 +# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 +# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 +-----BEGIN CERTIFICATE----- +MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD +Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw +HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY +MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp +YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa +ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz +SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf +iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X +ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 +IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS +VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE +SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu ++Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt +8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L +HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt +zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P +AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c +mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ +YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 +gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA +Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB +JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX +DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui +TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 +dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 +LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp +0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY +QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS RSA Root CA 2022" +# Serial: 148535279242832292258835760425842727825 +# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da +# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca +# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed +-----BEGIN CERTIFICATE----- +MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO +MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD +DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX +DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw +b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC +AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP +L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY +t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins +S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 +PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO +L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 +R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w +dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS ++YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS +d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG +AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f +gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j +BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z +NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt +hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM +QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf +R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ +DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW +P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy +lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq +bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w +AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q +r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji +Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU +98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS ECC Root CA 2022" +# Serial: 26605119622390491762507526719404364228 +# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 +# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 +# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 +-----BEGIN CERTIFICATE----- +MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT +U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 +MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh +dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm +acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN +SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME +GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW +uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp +15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN +b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA ECC TLS 2021" +# Serial: 81873346711060652204712539181482831616 +# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 +# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd +# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 +-----BEGIN CERTIFICATE----- +MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w +LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w +CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 +MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF +Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI +zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X +tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 +AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 +KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD +aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu +CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo +9H1/IISpQuQo +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA RSA TLS 2021" +# Serial: 111436099570196163832749341232207667876 +# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 +# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 +# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f +-----BEGIN CERTIFICATE----- +MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM +MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx +MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 +MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD +QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z +4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv +Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ +kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs +GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln +nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh +3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD +0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy +geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 +ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB +c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI +pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB +DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS +4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs +o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ +qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw +xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM +rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 +AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR +0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY +o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 +dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE +oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G3" +# Serial: 576386314500428537169965010905813481816650257167 +# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04 +# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7 +# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08 +-----BEGIN CERTIFICATE----- +MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM +BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp +ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe +Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw +IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU +cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC +DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS +T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK +AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1 +nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep +qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA +yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs +hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX +zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv +kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT +f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA +uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB +o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih +MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E +BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4 +wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2 +XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1 +JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j +ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV +VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx +xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on +AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d +7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj +gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV ++Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo +FGWsJwt0ivKH +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G4" +# Serial: 451799571007117016466790293371524403291602933463 +# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb +# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a +# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c +-----BEGIN CERTIFICATE----- +MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw +WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs +IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y +MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD +VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz +dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx +s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw +LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij +YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD +pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE +AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR +UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj +/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA== +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust ECC Root-01 O=CommScope +# Subject: CN=CommScope Public Trust ECC Root-01 O=CommScope +# Label: "CommScope Public Trust ECC Root-01" +# Serial: 385011430473757362783587124273108818652468453534 +# MD5 Fingerprint: 3a:40:a7:fc:03:8c:9c:38:79:2f:3a:a2:6c:b6:0a:16 +# SHA1 Fingerprint: 07:86:c0:d8:dd:8e:c0:80:98:06:98:d0:58:7a:ef:de:a6:cc:a2:5d +# SHA256 Fingerprint: 11:43:7c:da:7b:b4:5e:41:36:5f:45:b3:9a:38:98:6b:0d:e0:0d:ef:34:8e:0c:7b:b0:87:36:33:80:0b:c3:8b +-----BEGIN CERTIFICATE----- +MIICHTCCAaOgAwIBAgIUQ3CCd89NXTTxyq4yLzf39H91oJ4wCgYIKoZIzj0EAwMw +TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t +bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMTAeFw0yMTA0MjgxNzM1NDNa +Fw00NjA0MjgxNzM1NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv +cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDEw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAARLNumuV16ocNfQj3Rid8NeeqrltqLxeP0C +flfdkXmcbLlSiFS8LwS+uM32ENEp7LXQoMPwiXAZu1FlxUOcw5tjnSCDPgYLpkJE +hRGnSjot6dZoL0hOUysHP029uax3OVejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSOB2LAUN3GGQYARnQE9/OufXVNMDAKBggq +hkjOPQQDAwNoADBlAjEAnDPfQeMjqEI2Jpc1XHvr20v4qotzVRVcrHgpD7oh2MSg +2NED3W3ROT3Ek2DS43KyAjB8xX6I01D1HiXo+k515liWpDVfG2XqYZpwI7UNo5uS +Um9poIyNStDuiw7LR47QjRE= +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust ECC Root-02 O=CommScope +# Subject: CN=CommScope Public Trust ECC Root-02 O=CommScope +# Label: "CommScope Public Trust ECC Root-02" +# Serial: 234015080301808452132356021271193974922492992893 +# MD5 Fingerprint: 59:b0:44:d5:65:4d:b8:5c:55:19:92:02:b6:d1:94:b2 +# SHA1 Fingerprint: 3c:3f:ef:57:0f:fe:65:93:86:9e:a0:fe:b0:f6:ed:8e:d1:13:c7:e5 +# SHA256 Fingerprint: 2f:fb:7f:81:3b:bb:b3:c8:9a:b4:e8:16:2d:0f:16:d7:15:09:a8:30:cc:9d:73:c2:62:e5:14:08:75:d1:ad:4a +-----BEGIN CERTIFICATE----- +MIICHDCCAaOgAwIBAgIUKP2ZYEFHpgE6yhR7H+/5aAiDXX0wCgYIKoZIzj0EAwMw +TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t +bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMjAeFw0yMTA0MjgxNzQ0NTRa +Fw00NjA0MjgxNzQ0NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv +cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDIw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAR4MIHoYx7l63FRD/cHB8o5mXxO1Q/MMDAL +j2aTPs+9xYa9+bG3tD60B8jzljHz7aRP+KNOjSkVWLjVb3/ubCK1sK9IRQq9qEmU +v4RDsNuESgMjGWdqb8FuvAY5N9GIIvejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTmGHX/72DehKT1RsfeSlXjMjZ59TAKBggq +hkjOPQQDAwNnADBkAjAmc0l6tqvmSfR9Uj/UQQSugEODZXW5hYA4O9Zv5JOGq4/n +ich/m35rChJVYaoR4HkCMHfoMXGsPHED1oQmHhS48zs73u1Z/GtMMH9ZzkXpc2AV +mkzw5l4lIhVtwodZ0LKOag== +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust RSA Root-01 O=CommScope +# Subject: CN=CommScope Public Trust RSA Root-01 O=CommScope +# Label: "CommScope Public Trust RSA Root-01" +# Serial: 354030733275608256394402989253558293562031411421 +# MD5 Fingerprint: 0e:b4:15:bc:87:63:5d:5d:02:73:d4:26:38:68:73:d8 +# SHA1 Fingerprint: 6d:0a:5f:f7:b4:23:06:b4:85:b3:b7:97:64:fc:ac:75:f5:33:f2:93 +# SHA256 Fingerprint: 02:bd:f9:6e:2a:45:dd:9b:f1:8f:c7:e1:db:df:21:a0:37:9b:a3:c9:c2:61:03:44:cf:d8:d6:06:fe:c1:ed:81 +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIUPgNJgXUWdDGOTKvVxZAplsU5EN0wDQYJKoZIhvcNAQEL +BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi +Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMTAeFw0yMTA0MjgxNjQ1 +NTRaFw00NjA0MjgxNjQ1NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t +U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt +MDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwSGWjDR1C45FtnYSk +YZYSwu3D2iM0GXb26v1VWvZVAVMP8syMl0+5UMuzAURWlv2bKOx7dAvnQmtVzslh +suitQDy6uUEKBU8bJoWPQ7VAtYXR1HHcg0Hz9kXHgKKEUJdGzqAMxGBWBB0HW0al +DrJLpA6lfO741GIDuZNqihS4cPgugkY4Iw50x2tBt9Apo52AsH53k2NC+zSDO3Oj +WiE260f6GBfZumbCk6SP/F2krfxQapWsvCQz0b2If4b19bJzKo98rwjyGpg/qYFl +P8GMicWWMJoKz/TUyDTtnS+8jTiGU+6Xn6myY5QXjQ/cZip8UlF1y5mO6D1cv547 +KI2DAg+pn3LiLCuz3GaXAEDQpFSOm117RTYm1nJD68/A6g3czhLmfTifBSeolz7p +UcZsBSjBAg/pGG3svZwG1KdJ9FQFa2ww8esD1eo9anbCyxooSU1/ZOD6K9pzg4H/ +kQO9lLvkuI6cMmPNn7togbGEW682v3fuHX/3SZtS7NJ3Wn2RnU3COS3kuoL4b/JO +Hg9O5j9ZpSPcPYeoKFgo0fEbNttPxP/hjFtyjMcmAyejOQoBqsCyMWCDIqFPEgkB +Ea801M/XrmLTBQe0MXXgDW1XT2mH+VepuhX2yFJtocucH+X8eKg1mp9BFM6ltM6U +CBwJrVbl2rZJmkrqYxhTnCwuwwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUN12mmnQywsL5x6YVEFm45P3luG0wDQYJ +KoZIhvcNAQELBQADggIBAK+nz97/4L1CjU3lIpbfaOp9TSp90K09FlxD533Ahuh6 +NWPxzIHIxgvoLlI1pKZJkGNRrDSsBTtXAOnTYtPZKdVUvhwQkZyybf5Z/Xn36lbQ +nmhUQo8mUuJM3y+Xpi/SB5io82BdS5pYV4jvguX6r2yBS5KPQJqTRlnLX3gWsWc+ +QgvfKNmwrZggvkN80V4aCRckjXtdlemrwWCrWxhkgPut4AZ9HcpZuPN4KWfGVh2v +trV0KnahP/t1MJ+UXjulYPPLXAziDslg+MkfFoom3ecnf+slpoq9uC02EJqxWE2a +aE9gVOX2RhOOiKy8IUISrcZKiX2bwdgt6ZYD9KJ0DLwAHb/WNyVntHKLr4W96ioD +j8z7PEQkguIBpQtZtjSNMgsSDesnwv1B10A8ckYpwIzqug/xBpMu95yo9GA+o/E4 +Xo4TwbM6l4c/ksp4qRyv0LAbJh6+cOx69TOY6lz/KwsETkPdY34Op054A5U+1C0w +lREQKC6/oAI+/15Z0wUOlV9TRe9rh9VIzRamloPh37MG88EU26fsHItdkJANclHn +YfkUyq+Dj7+vsQpZXdxc1+SWrVtgHdqul7I52Qb1dgAT+GhMIbA1xNxVssnBQVoc +icCMb3SgazNNtQEo/a2tiRc7ppqEvOuM6sRxJKi6KfkIsidWNTJf6jn7MZrVGczw +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust RSA Root-02 O=CommScope +# Subject: CN=CommScope Public Trust RSA Root-02 O=CommScope +# Label: "CommScope Public Trust RSA Root-02" +# Serial: 480062499834624527752716769107743131258796508494 +# MD5 Fingerprint: e1:29:f9:62:7b:76:e2:96:6d:f3:d4:d7:0f:ae:1f:aa +# SHA1 Fingerprint: ea:b0:e2:52:1b:89:93:4c:11:68:f2:d8:9a:ac:22:4c:a3:8a:57:ae +# SHA256 Fingerprint: ff:e9:43:d7:93:42:4b:4f:7c:44:0c:1c:3d:64:8d:53:63:f3:4b:82:dc:87:aa:7a:9f:11:8f:c5:de:e1:01:f1 +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIUVBa/O345lXGN0aoApYYNK496BU4wDQYJKoZIhvcNAQEL +BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi +Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMjAeFw0yMTA0MjgxNzE2 +NDNaFw00NjA0MjgxNzE2NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t +U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt +MDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDh+g77aAASyE3VrCLE +NQE7xVTlWXZjpX/rwcRqmL0yjReA61260WI9JSMZNRTpf4mnG2I81lDnNJUDMrG0 +kyI9p+Kx7eZ7Ti6Hmw0zdQreqjXnfuU2mKKuJZ6VszKWpCtYHu8//mI0SFHRtI1C +rWDaSWqVcN3SAOLMV2MCe5bdSZdbkk6V0/nLKR8YSvgBKtJjCW4k6YnS5cciTNxz +hkcAqg2Ijq6FfUrpuzNPDlJwnZXjfG2WWy09X6GDRl224yW4fKcZgBzqZUPckXk2 +LHR88mcGyYnJ27/aaL8j7dxrrSiDeS/sOKUNNwFnJ5rpM9kzXzehxfCrPfp4sOcs +n/Y+n2Dg70jpkEUeBVF4GiwSLFworA2iI540jwXmojPOEXcT1A6kHkIfhs1w/tku +FT0du7jyU1fbzMZ0KZwYszZ1OC4PVKH4kh+Jlk+71O6d6Ts2QrUKOyrUZHk2EOH5 +kQMreyBUzQ0ZGshBMjTRsJnhkB4BQDa1t/qp5Xd1pCKBXbCL5CcSD1SIxtuFdOa3 +wNemKfrb3vOTlycEVS8KbzfFPROvCgCpLIscgSjX74Yxqa7ybrjKaixUR9gqiC6v +wQcQeKwRoi9C8DfF8rhW3Q5iLc4tVn5V8qdE9isy9COoR+jUKgF4z2rDN6ieZdIs +5fq6M8EGRPbmz6UNp2YINIos8wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUR9DnsSL/nSz12Vdgs7GxcJXvYXowDQYJ +KoZIhvcNAQELBQADggIBAIZpsU0v6Z9PIpNojuQhmaPORVMbc0RTAIFhzTHjCLqB +KCh6krm2qMhDnscTJk3C2OVVnJJdUNjCK9v+5qiXz1I6JMNlZFxHMaNlNRPDk7n3 ++VGXu6TwYofF1gbTl4MgqX67tiHCpQ2EAOHyJxCDut0DgdXdaMNmEMjRdrSzbyme +APnCKfWxkxlSaRosTKCL4BWaMS/TiJVZbuXEs1DIFAhKm4sTg7GkcrI7djNB3Nyq +pgdvHSQSn8h2vS/ZjvQs7rfSOBAkNlEv41xdgSGn2rtO/+YHqP65DSdsu3BaVXoT +6fEqSWnHX4dXTEN5bTpl6TBcQe7rd6VzEojov32u5cSoHw2OHG1QAk8mGEPej1WF +sQs3BWDJVTkSBKEqz3EWnzZRSb9wO55nnPt7eck5HHisd5FUmrh1CoFSl+NmYWvt +PjgelmFV4ZFUjO2MJB+ByRCac5krFk5yAD9UG/iNuovnFNa2RU9g7Jauwy8CTl2d +lklyALKrdVwPaFsdZcJfMw8eD/A7hvWwTruc9+olBdytoptLFwG+Qt81IR2tq670 +v64fG9PiO/yzcnMcmyiQiRM9HcEARwmWmjgb3bHPDcK0RPOWlc4yOo80nOAXx17O +rg3bhzjlP1v9mxnhMUF6cKojawHhRUzNlM47ni3niAIi9G7oyOzWPPO5std3eqx7 +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS ECC Root 2020" +# Serial: 72082518505882327255703894282316633856 +# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd +# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec +# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1 +-----BEGIN CERTIFICATE----- +MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw +CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH +bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw +MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx +JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE +AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O +tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP +f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA +MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di +z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn +27iQ7t0l +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS RSA Root 2023" +# Serial: 44676229530606711399881795178081572759 +# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2 +# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93 +# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj +MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0 +eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy +MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC +REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG +A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9 +cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV +cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA +U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6 +Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug +BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy +8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J +co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg +8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8 +rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12 +mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg ++y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX +gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2 +p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ +pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm +9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw +M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd +GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+ +CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t +xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+ +w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK +L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj +X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q +ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm +dTdmQRCsu/WU48IxK63nI1bMNSWSs1A= +-----END CERTIFICATE----- diff --git a/src/pip/_vendor/certifi/core.py b/src/pip/_vendor/certifi/core.py index c3e546604c8..70e0c3bdbd2 100644 --- a/src/pip/_vendor/certifi/core.py +++ b/src/pip/_vendor/certifi/core.py @@ -5,6 +5,10 @@ This module returns the installation location of cacert.pem or its contents. """ import sys +import atexit + +def exit_cacert_ctx() -> None: + _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr] if sys.version_info >= (3, 11): @@ -35,6 +39,7 @@ def where() -> str: # we will also store that at the global level as well. _CACERT_CTX = as_file(files("pip._vendor.certifi").joinpath("cacert.pem")) _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) return _CACERT_PATH @@ -70,6 +75,7 @@ def where() -> str: # we will also store that at the global level as well. _CACERT_CTX = get_path("pip._vendor.certifi", "cacert.pem") _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) return _CACERT_PATH diff --git a/src/pip/_vendor/chardet/__main__.py b/src/pip/_vendor/chardet/__main__.py new file mode 100644 index 00000000000..c19b0d2d7a3 --- /dev/null +++ b/src/pip/_vendor/chardet/__main__.py @@ -0,0 +1,6 @@ +"""Wrapper so people can run python -m chardet""" + +from .cli.chardetect import main + +if __name__ == "__main__": + main() diff --git a/src/pip/_vendor/chardet/version.py b/src/pip/_vendor/chardet/version.py index c5e9d85cd75..19dd01e0301 100644 --- a/src/pip/_vendor/chardet/version.py +++ b/src/pip/_vendor/chardet/version.py @@ -5,5 +5,5 @@ :author: Dan Blanchard (dan.blanchard@gmail.com) """ -__version__ = "5.1.0" +__version__ = "5.2.0" VERSION = __version__.split(".") diff --git a/src/pip/_vendor/distlib/__init__.py b/src/pip/_vendor/distlib/__init__.py index 962173c8d0a..e999438fe94 100644 --- a/src/pip/_vendor/distlib/__init__.py +++ b/src/pip/_vendor/distlib/__init__.py @@ -1,23 +1,33 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2012-2022 Vinay Sajip. +# Copyright (C) 2012-2023 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # import logging -__version__ = '0.3.6' +__version__ = '0.3.8' + class DistlibException(Exception): pass + try: from logging import NullHandler -except ImportError: # pragma: no cover +except ImportError: # pragma: no cover + class NullHandler(logging.Handler): - def handle(self, record): pass - def emit(self, record): pass - def createLock(self): self.lock = None + + def handle(self, record): + pass + + def emit(self, record): + pass + + def createLock(self): + self.lock = None + logger = logging.getLogger(__name__) logger.addHandler(NullHandler()) diff --git a/src/pip/_vendor/distlib/compat.py b/src/pip/_vendor/distlib/compat.py index 1fe3d225acb..e93dc27a3eb 100644 --- a/src/pip/_vendor/distlib/compat.py +++ b/src/pip/_vendor/distlib/compat.py @@ -8,6 +8,7 @@ import os import re +import shutil import sys try: @@ -33,9 +34,8 @@ def quote(s): import urllib2 from urllib2 import (Request, urlopen, URLError, HTTPError, - HTTPBasicAuthHandler, HTTPPasswordMgr, - HTTPHandler, HTTPRedirectHandler, - build_opener) + HTTPBasicAuthHandler, HTTPPasswordMgr, HTTPHandler, + HTTPRedirectHandler, build_opener) if ssl: from urllib2 import HTTPSHandler import httplib @@ -50,15 +50,15 @@ def quote(s): # Leaving this around for now, in case it needs resurrecting in some way # _userprog = None # def splituser(host): - # """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" - # global _userprog - # if _userprog is None: - # import re - # _userprog = re.compile('^(.*)@(.*)$') + # """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" + # global _userprog + # if _userprog is None: + # import re + # _userprog = re.compile('^(.*)@(.*)$') - # match = _userprog.match(host) - # if match: return match.group(1, 2) - # return None, host + # match = _userprog.match(host) + # if match: return match.group(1, 2) + # return None, host else: # pragma: no cover from io import StringIO @@ -67,14 +67,12 @@ def quote(s): from io import TextIOWrapper as file_type import builtins import configparser - import shutil - from urllib.parse import (urlparse, urlunparse, urljoin, quote, - unquote, urlsplit, urlunsplit, splittype) + from urllib.parse import (urlparse, urlunparse, urljoin, quote, unquote, + urlsplit, urlunsplit, splittype) from urllib.request import (urlopen, urlretrieve, Request, url2pathname, - pathname2url, - HTTPBasicAuthHandler, HTTPPasswordMgr, - HTTPHandler, HTTPRedirectHandler, - build_opener) + pathname2url, HTTPBasicAuthHandler, + HTTPPasswordMgr, HTTPHandler, + HTTPRedirectHandler, build_opener) if ssl: from urllib.request import HTTPSHandler from urllib.error import HTTPError, URLError, ContentTooShortError @@ -88,14 +86,13 @@ def quote(s): from itertools import filterfalse filter = filter - try: from ssl import match_hostname, CertificateError -except ImportError: # pragma: no cover +except ImportError: # pragma: no cover + class CertificateError(ValueError): pass - def _dnsname_match(dn, hostname, max_wildcards=1): """Matching according to RFC 6125, section 6.4.3 @@ -145,7 +142,6 @@ def _dnsname_match(dn, hostname, max_wildcards=1): pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) return pat.match(hostname) - def match_hostname(cert, hostname): """Verify that *cert* (in decoded format as returned by SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 @@ -178,24 +174,26 @@ def match_hostname(cert, hostname): dnsnames.append(value) if len(dnsnames) > 1: raise CertificateError("hostname %r " - "doesn't match either of %s" - % (hostname, ', '.join(map(repr, dnsnames)))) + "doesn't match either of %s" % + (hostname, ', '.join(map(repr, dnsnames)))) elif len(dnsnames) == 1: raise CertificateError("hostname %r " - "doesn't match %r" - % (hostname, dnsnames[0])) + "doesn't match %r" % + (hostname, dnsnames[0])) else: raise CertificateError("no appropriate commonName or " - "subjectAltName fields were found") + "subjectAltName fields were found") try: from types import SimpleNamespace as Container except ImportError: # pragma: no cover + class Container(object): """ A generic container for when multiple values need to be returned """ + def __init__(self, **kwargs): self.__dict__.update(kwargs) @@ -214,6 +212,7 @@ def which(cmd, mode=os.F_OK | os.X_OK, path=None): path. """ + # Check that a given file can be accessed with the correct mode. # Additionally check that `file` is not a directory, as on Windows # directories pass the os.access check. @@ -237,7 +236,7 @@ def _access_check(fn, mode): if sys.platform == "win32": # The current directory takes precedence on Windows. - if not os.curdir in path: + if os.curdir not in path: path.insert(0, os.curdir) # PATHEXT is necessary to check on Windows. @@ -258,7 +257,7 @@ def _access_check(fn, mode): seen = set() for dir in path: normdir = os.path.normcase(dir) - if not normdir in seen: + if normdir not in seen: seen.add(normdir) for thefile in files: name = os.path.join(dir, thefile) @@ -277,6 +276,7 @@ def _access_check(fn, mode): from zipfile import ZipExtFile as BaseZipExtFile class ZipExtFile(BaseZipExtFile): + def __init__(self, base): self.__dict__.update(base.__dict__) @@ -288,6 +288,7 @@ def __exit__(self, *exc_info): # return None, so if an exception occurred, it will propagate class ZipFile(BaseZipFile): + def __enter__(self): return self @@ -299,9 +300,11 @@ def open(self, *args, **kwargs): base = BaseZipFile.open(self, *args, **kwargs) return ZipExtFile(base) + try: from platform import python_implementation -except ImportError: # pragma: no cover +except ImportError: # pragma: no cover + def python_implementation(): """Return a string identifying the Python implementation.""" if 'PyPy' in sys.version: @@ -312,12 +315,12 @@ def python_implementation(): return 'IronPython' return 'CPython' -import shutil + import sysconfig try: callable = callable -except NameError: # pragma: no cover +except NameError: # pragma: no cover from collections.abc import Callable def callable(obj): @@ -358,11 +361,11 @@ def fsdecode(filename): raise TypeError("expect bytes or str, not %s" % type(filename).__name__) + try: from tokenize import detect_encoding -except ImportError: # pragma: no cover +except ImportError: # pragma: no cover from codecs import BOM_UTF8, lookup - import re cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)") @@ -401,6 +404,7 @@ def detect_encoding(readline): bom_found = False encoding = None default = 'utf-8' + def read_or_stop(): try: return readline() @@ -430,8 +434,8 @@ def find_cookie(line): if filename is None: msg = "unknown encoding: " + encoding else: - msg = "unknown encoding for {!r}: {}".format(filename, - encoding) + msg = "unknown encoding for {!r}: {}".format( + filename, encoding) raise SyntaxError(msg) if bom_found: @@ -440,7 +444,8 @@ def find_cookie(line): if filename is None: msg = 'encoding problem: utf-8' else: - msg = 'encoding problem for {!r}: utf-8'.format(filename) + msg = 'encoding problem for {!r}: utf-8'.format( + filename) raise SyntaxError(msg) encoding += '-sig' return encoding @@ -467,6 +472,7 @@ def find_cookie(line): return default, [first, second] + # For converting & <-> & etc. try: from html import escape @@ -479,12 +485,13 @@ def find_cookie(line): try: from collections import ChainMap -except ImportError: # pragma: no cover +except ImportError: # pragma: no cover from collections import MutableMapping try: from reprlib import recursive_repr as _recursive_repr except ImportError: + def _recursive_repr(fillvalue='...'): ''' Decorator to make a repr function return fillvalue for a recursive @@ -509,13 +516,15 @@ def wrapper(self): wrapper.__module__ = getattr(user_function, '__module__') wrapper.__doc__ = getattr(user_function, '__doc__') wrapper.__name__ = getattr(user_function, '__name__') - wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) + wrapper.__annotations__ = getattr(user_function, + '__annotations__', {}) return wrapper return decorating_function class ChainMap(MutableMapping): - ''' A ChainMap groups multiple dicts (or other mappings) together + ''' + A ChainMap groups multiple dicts (or other mappings) together to create a single, updateable view. The underlying mappings are stored in a list. That list is public and can @@ -524,7 +533,6 @@ class ChainMap(MutableMapping): Lookups search the underlying mappings successively until a key is found. In contrast, writes, updates, and deletions only operate on the first mapping. - ''' def __init__(self, *maps): @@ -532,7 +540,7 @@ def __init__(self, *maps): If no mappings are provided, a single empty dictionary is used. ''' - self.maps = list(maps) or [{}] # always at least one map + self.maps = list(maps) or [{}] # always at least one map def __missing__(self, key): raise KeyError(key) @@ -540,16 +548,19 @@ def __missing__(self, key): def __getitem__(self, key): for mapping in self.maps: try: - return mapping[key] # can't use 'key in mapping' with defaultdict + return mapping[ + key] # can't use 'key in mapping' with defaultdict except KeyError: pass - return self.__missing__(key) # support subclasses that define __missing__ + return self.__missing__( + key) # support subclasses that define __missing__ def get(self, key, default=None): return self[key] if key in self else default def __len__(self): - return len(set().union(*self.maps)) # reuses stored hash values if possible + return len(set().union( + *self.maps)) # reuses stored hash values if possible def __iter__(self): return iter(set().union(*self.maps)) @@ -576,12 +587,12 @@ def copy(self): __copy__ = copy - def new_child(self): # like Django's Context.push() + def new_child(self): # like Django's Context.push() 'New ChainMap with a new dict followed by all previous maps.' return self.__class__({}, *self.maps) @property - def parents(self): # like Django's Context.pop() + def parents(self): # like Django's Context.pop() 'New ChainMap from maps[1:].' return self.__class__(*self.maps[1:]) @@ -592,7 +603,8 @@ def __delitem__(self, key): try: del self.maps[0][key] except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + raise KeyError( + 'Key not found in the first mapping: {!r}'.format(key)) def popitem(self): 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' @@ -606,15 +618,18 @@ def pop(self, key, *args): try: return self.maps[0].pop(key, *args) except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + raise KeyError( + 'Key not found in the first mapping: {!r}'.format(key)) def clear(self): 'Clear maps[0], leaving maps[1:] intact.' self.maps[0].clear() + try: from importlib.util import cache_from_source # Python >= 3.4 except ImportError: # pragma: no cover + def cache_from_source(path, debug_override=None): assert path.endswith('.py') if debug_override is None: @@ -625,12 +640,13 @@ def cache_from_source(path, debug_override=None): suffix = 'o' return path + suffix + try: from collections import OrderedDict -except ImportError: # pragma: no cover -## {{{ http://code.activestate.com/recipes/576693/ (r9) -# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. -# Passes Python2.7's test suite and incorporates all the latest updates. +except ImportError: # pragma: no cover + # {{{ http://code.activestate.com/recipes/576693/ (r9) + # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. + # Passes Python2.7's test suite and incorporates all the latest updates. try: from thread import get_ident as _get_ident except ImportError: @@ -641,9 +657,9 @@ def cache_from_source(path, debug_override=None): except ImportError: pass - class OrderedDict(dict): 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. # The inherited dict provides __getitem__, __len__, __contains__, and get. # The remaining methods are order-aware. @@ -661,11 +677,12 @@ def __init__(self, *args, **kwds): ''' if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) + raise TypeError('expected at most 1 arguments, got %d' % + len(args)) try: self.__root except AttributeError: - self.__root = root = [] # sentinel node + self.__root = root = [] # sentinel node root[:] = [root, root, None] self.__map = {} self.__update(*args, **kwds) @@ -779,7 +796,7 @@ def update(*args, **kwds): ''' if len(args) > 2: raise TypeError('update() takes at most 2 positional ' - 'arguments (%d given)' % (len(args),)) + 'arguments (%d given)' % (len(args), )) elif not args: raise TypeError('update() takes at least 1 argument (0 given)') self = args[0] @@ -825,14 +842,15 @@ def setdefault(self, key, default=None): def __repr__(self, _repr_running=None): 'od.__repr__() <==> repr(od)' - if not _repr_running: _repr_running = {} + if not _repr_running: + _repr_running = {} call_key = id(self), _get_ident() if call_key in _repr_running: return '...' _repr_running[call_key] = 1 try: if not self: - return '%s()' % (self.__class__.__name__,) + return '%s()' % (self.__class__.__name__, ) return '%s(%r)' % (self.__class__.__name__, self.items()) finally: del _repr_running[call_key] @@ -844,8 +862,8 @@ def __reduce__(self): for k in vars(OrderedDict()): inst_dict.pop(k, None) if inst_dict: - return (self.__class__, (items,), inst_dict) - return self.__class__, (items,) + return (self.__class__, (items, ), inst_dict) + return self.__class__, (items, ) def copy(self): 'od.copy() -> a shallow copy of od' @@ -868,7 +886,8 @@ def __eq__(self, other): ''' if isinstance(other, OrderedDict): - return len(self)==len(other) and self.items() == other.items() + return len(self) == len( + other) and self.items() == other.items() return dict.__eq__(self, other) def __ne__(self, other): @@ -888,19 +907,18 @@ def viewitems(self): "od.viewitems() -> a set-like object providing a view on od's items" return ItemsView(self) + try: from logging.config import BaseConfigurator, valid_ident -except ImportError: # pragma: no cover +except ImportError: # pragma: no cover IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) - def valid_ident(s): m = IDENTIFIER.match(s) if not m: raise ValueError('Not a valid Python identifier: %r' % s) return True - # The ConvertingXXX classes are wrappers around standard Python containers, # and they serve to convert any suitable values in the container. The # conversion converts base dicts, lists and tuples to their wrapped @@ -916,7 +934,7 @@ class ConvertingDict(dict): def __getitem__(self, key): value = dict.__getitem__(self, key) result = self.configurator.convert(value) - #If the converted value is different, save for next time + # If the converted value is different, save for next time if value is not result: self[key] = result if type(result) in (ConvertingDict, ConvertingList, @@ -928,7 +946,7 @@ def __getitem__(self, key): def get(self, key, default=None): value = dict.get(self, key, default) result = self.configurator.convert(value) - #If the converted value is different, save for next time + # If the converted value is different, save for next time if value is not result: self[key] = result if type(result) in (ConvertingDict, ConvertingList, @@ -949,10 +967,11 @@ def pop(self, key, default=None): class ConvertingList(list): """A converting list wrapper.""" + def __getitem__(self, key): value = list.__getitem__(self, key) result = self.configurator.convert(value) - #If the converted value is different, save for next time + # If the converted value is different, save for next time if value is not result: self[key] = result if type(result) in (ConvertingDict, ConvertingList, @@ -972,6 +991,7 @@ def pop(self, idx=-1): class ConvertingTuple(tuple): """A converting tuple wrapper.""" + def __getitem__(self, key): value = tuple.__getitem__(self, key) result = self.configurator.convert(value) @@ -995,8 +1015,8 @@ class BaseConfigurator(object): DIGIT_PATTERN = re.compile(r'^\d+$') value_converters = { - 'ext' : 'ext_convert', - 'cfg' : 'cfg_convert', + 'ext': 'ext_convert', + 'cfg': 'cfg_convert', } # We might want to use a different one, e.g. importlib @@ -1042,7 +1062,6 @@ def cfg_convert(self, value): else: rest = rest[m.end():] d = self.config[m.groups()[0]] - #print d, rest while rest: m = self.DOT_PATTERN.match(rest) if m: @@ -1055,7 +1074,9 @@ def cfg_convert(self, value): d = d[idx] else: try: - n = int(idx) # try as number first (most likely) + n = int( + idx + ) # try as number first (most likely) d = d[n] except TypeError: d = d[idx] @@ -1064,7 +1085,7 @@ def cfg_convert(self, value): else: raise ValueError('Unable to convert ' '%r at %r' % (value, rest)) - #rest should be empty + # rest should be empty return d def convert(self, value): @@ -1073,14 +1094,15 @@ def convert(self, value): replaced by their converting alternatives. Strings are checked to see if they have a conversion format and are converted if they do. """ - if not isinstance(value, ConvertingDict) and isinstance(value, dict): + if not isinstance(value, ConvertingDict) and isinstance( + value, dict): value = ConvertingDict(value) value.configurator = self - elif not isinstance(value, ConvertingList) and isinstance(value, list): + elif not isinstance(value, ConvertingList) and isinstance( + value, list): value = ConvertingList(value) value.configurator = self - elif not isinstance(value, ConvertingTuple) and\ - isinstance(value, tuple): + elif not isinstance(value, ConvertingTuple) and isinstance(value, tuple): value = ConvertingTuple(value) value.configurator = self elif isinstance(value, string_types): diff --git a/src/pip/_vendor/distlib/database.py b/src/pip/_vendor/distlib/database.py index 5db5d7f507c..eb3765f193b 100644 --- a/src/pip/_vendor/distlib/database.py +++ b/src/pip/_vendor/distlib/database.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2012-2017 The Python Software Foundation. +# Copyright (C) 2012-2023 The Python Software Foundation. # See LICENSE.txt and CONTRIBUTORS.txt. # """PEP 376 implementation.""" @@ -25,11 +25,10 @@ from .util import (parse_requirement, cached_property, parse_name_and_version, read_exports, write_exports, CSVReader, CSVWriter) - -__all__ = ['Distribution', 'BaseInstalledDistribution', - 'InstalledDistribution', 'EggInfoDistribution', - 'DistributionPath'] - +__all__ = [ + 'Distribution', 'BaseInstalledDistribution', 'InstalledDistribution', + 'EggInfoDistribution', 'DistributionPath' +] logger = logging.getLogger(__name__) @@ -46,6 +45,7 @@ class _Cache(object): """ A simple cache mapping names and .dist-info paths to distributions """ + def __init__(self): """ Initialise an instance. There is normally one for each DistributionPath. @@ -76,6 +76,7 @@ class DistributionPath(object): """ Represents a set of distributions installed on a path (typically sys.path). """ + def __init__(self, path=None, include_egg=False): """ Create an instance from a path, optionally including legacy (distutils/ @@ -111,7 +112,6 @@ def clear_cache(self): self._cache.clear() self._cache_egg.clear() - def _yield_distributions(self): """ Yield .dist-info and/or .egg(-info) distributions. @@ -134,11 +134,13 @@ def _yield_distributions(self): continue try: if self._include_dist and entry.endswith(DISTINFO_EXT): - possible_filenames = [METADATA_FILENAME, - WHEEL_METADATA_FILENAME, - LEGACY_METADATA_FILENAME] + possible_filenames = [ + METADATA_FILENAME, WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME + ] for metadata_filename in possible_filenames: - metadata_path = posixpath.join(entry, metadata_filename) + metadata_path = posixpath.join( + entry, metadata_filename) pydist = finder.find(metadata_path) if pydist: break @@ -146,13 +148,15 @@ def _yield_distributions(self): continue with contextlib.closing(pydist.as_stream()) as stream: - metadata = Metadata(fileobj=stream, scheme='legacy') + metadata = Metadata(fileobj=stream, + scheme='legacy') logger.debug('Found %s', r.path) seen.add(r.path) - yield new_dist_class(r.path, metadata=metadata, + yield new_dist_class(r.path, + metadata=metadata, env=self) - elif self._include_egg and entry.endswith(('.egg-info', - '.egg')): + elif self._include_egg and entry.endswith( + ('.egg-info', '.egg')): logger.debug('Found %s', r.path) seen.add(r.path) yield old_dist_class(r.path, self) @@ -271,7 +275,7 @@ def provides_distribution(self, name, version=None): matcher = self._scheme.matcher('%s (%s)' % (name, version)) except ValueError: raise DistlibException('invalid name or version: %r, %r' % - (name, version)) + (name, version)) for dist in self.get_distributions(): # We hit a problem on Travis where enum34 was installed and doesn't @@ -346,12 +350,12 @@ def __init__(self, metadata): """ self.metadata = metadata self.name = metadata.name - self.key = self.name.lower() # for case-insensitive comparisons + self.key = self.name.lower() # for case-insensitive comparisons self.version = metadata.version self.locator = None self.digest = None - self.extras = None # additional features requested - self.context = None # environment marker overrides + self.extras = None # additional features requested + self.context = None # environment marker overrides self.download_urls = set() self.digests = {} @@ -362,7 +366,7 @@ def source_url(self): """ return self.metadata.source_url - download_url = source_url # Backward compatibility + download_url = source_url # Backward compatibility @property def name_and_version(self): @@ -386,10 +390,10 @@ def provides(self): def _get_requirements(self, req_attr): md = self.metadata reqts = getattr(md, req_attr) - logger.debug('%s: got requirements %r from metadata: %r', self.name, req_attr, - reqts) - return set(md.get_requirements(reqts, extras=self.extras, - env=self.context)) + logger.debug('%s: got requirements %r from metadata: %r', self.name, + req_attr, reqts) + return set( + md.get_requirements(reqts, extras=self.extras, env=self.context)) @property def run_requires(self): @@ -426,12 +430,11 @@ def matches_requirement(self, req): matcher = scheme.matcher(r.requirement) except UnsupportedVersionError: # XXX compat-mode if cannot read the version - logger.warning('could not read version %r - using name only', - req) + logger.warning('could not read version %r - using name only', req) name = req.split()[0] matcher = scheme.matcher(name) - name = matcher.key # case-insensitive + name = matcher.key # case-insensitive result = False for p in self.provides: @@ -466,9 +469,8 @@ def __eq__(self, other): if type(other) is not type(self): result = False else: - result = (self.name == other.name and - self.version == other.version and - self.source_url == other.source_url) + result = (self.name == other.name and self.version == other.version + and self.source_url == other.source_url) return result def __hash__(self): @@ -559,8 +561,8 @@ def __init__(self, path, metadata=None, env=None): if r is None: r = finder.find(LEGACY_METADATA_FILENAME) if r is None: - raise ValueError('no %s found in %s' % (METADATA_FILENAME, - path)) + raise ValueError('no %s found in %s' % + (METADATA_FILENAME, path)) with contextlib.closing(r.as_stream()) as stream: metadata = Metadata(fileobj=stream, scheme='legacy') @@ -571,7 +573,7 @@ def __init__(self, path, metadata=None, env=None): r = finder.find('REQUESTED') self.requested = r is not None - p = os.path.join(path, 'top_level.txt') + p = os.path.join(path, 'top_level.txt') if os.path.exists(p): with open(p, 'rb') as f: data = f.read().decode('utf-8') @@ -596,14 +598,14 @@ def _get_records(self): with contextlib.closing(r.as_stream()) as stream: with CSVReader(stream=stream) as record_reader: # Base location is parent dir of .dist-info dir - #base_location = os.path.dirname(self.path) - #base_location = os.path.abspath(base_location) + # base_location = os.path.dirname(self.path) + # base_location = os.path.abspath(base_location) for row in record_reader: missing = [None for i in range(len(row), 3)] path, checksum, size = row + missing - #if not os.path.isabs(path): - # path = path.replace('/', os.sep) - # path = os.path.join(base_location, path) + # if not os.path.isabs(path): + # path = path.replace('/', os.sep) + # path = os.path.join(base_location, path) results.append((path, checksum, size)) return results @@ -701,8 +703,8 @@ def write_installed_files(self, paths, prefix, dry_run=False): size = '%d' % os.path.getsize(path) with open(path, 'rb') as fp: hash_value = self.get_hash(fp.read()) - if path.startswith(base) or (base_under_prefix and - path.startswith(prefix)): + if path.startswith(base) or (base_under_prefix + and path.startswith(prefix)): path = os.path.relpath(path, base) writer.writerow((path, hash_value, size)) @@ -744,7 +746,8 @@ def check_installed_files(self): with open(path, 'rb') as f: actual_hash = self.get_hash(f.read(), hasher) if actual_hash != hash_value: - mismatches.append((path, 'hash', hash_value, actual_hash)) + mismatches.append( + (path, 'hash', hash_value, actual_hash)) return mismatches @cached_property @@ -791,7 +794,7 @@ def write_shared_locations(self, paths, dry_run=False): for key in ('prefix', 'lib', 'headers', 'scripts', 'data'): path = paths[key] if os.path.isdir(paths[key]): - lines.append('%s=%s' % (key, path)) + lines.append('%s=%s' % (key, path)) for ns in paths.get('namespace', ()): lines.append('namespace=%s' % ns) @@ -854,8 +857,8 @@ def list_distinfo_files(self): yield path def __eq__(self, other): - return (isinstance(other, InstalledDistribution) and - self.path == other.path) + return (isinstance(other, InstalledDistribution) + and self.path == other.path) # See http://docs.python.org/reference/datamodel#object.__hash__ __hash__ = object.__hash__ @@ -867,13 +870,14 @@ class EggInfoDistribution(BaseInstalledDistribution): if the given path happens to be a directory, the metadata is read from the file ``PKG-INFO`` under that directory.""" - requested = True # as we have no way of knowing, assume it was + requested = True # as we have no way of knowing, assume it was shared_locations = {} def __init__(self, path, env=None): + def set_name_and_version(s, n, v): s.name = n - s.key = n.lower() # for case-insensitive comparisons + s.key = n.lower() # for case-insensitive comparisons s.version = v self.path = path @@ -903,15 +907,18 @@ def parse_requires_data(data): lines = data.splitlines() for line in lines: line = line.strip() - if line.startswith('['): - logger.warning('Unexpected line: quitting requirement scan: %r', - line) + # sectioned files have bare newlines (separating sections) + if not line: # pragma: no cover + continue + if line.startswith('['): # pragma: no cover + logger.warning( + 'Unexpected line: quitting requirement scan: %r', line) break r = parse_requirement(line) - if not r: + if not r: # pragma: no cover logger.warning('Not recognised as a requirement: %r', line) continue - if r.extras: + if r.extras: # pragma: no cover logger.warning('extra requirements in requires.txt are ' 'not supported') if not r.constraints: @@ -952,7 +959,8 @@ def parse_requires_path(req_path): metadata = Metadata(fileobj=fileobj, scheme='legacy') try: data = zipf.get_data('EGG-INFO/requires.txt') - tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode('utf-8') + tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode( + 'utf-8') requires = parse_requires_data(data.decode('utf-8')) except IOError: requires = None @@ -982,8 +990,8 @@ def parse_requires_path(req_path): return metadata def __repr__(self): - return '' % ( - self.name, self.version, self.path) + return '' % (self.name, self.version, + self.path) def __str__(self): return "%s %s" % (self.name, self.version) @@ -1039,7 +1047,7 @@ def _size(path): logger.warning('Non-existent file: %s', p) if p.endswith(('.pyc', '.pyo')): continue - #otherwise fall through and fail + # otherwise fall through and fail if not os.path.isdir(p): result.append((p, _md5(p), _size(p))) result.append((record_path, None, None)) @@ -1075,12 +1083,13 @@ def list_distinfo_files(self, absolute=False): yield line def __eq__(self, other): - return (isinstance(other, EggInfoDistribution) and - self.path == other.path) + return (isinstance(other, EggInfoDistribution) + and self.path == other.path) # See http://docs.python.org/reference/datamodel#object.__hash__ __hash__ = object.__hash__ + new_dist_class = InstalledDistribution old_dist_class = EggInfoDistribution @@ -1114,7 +1123,7 @@ def add_distribution(self, distribution): """ self.adjacency_list[distribution] = [] self.reverse_list[distribution] = [] - #self.missing[distribution] = [] + # self.missing[distribution] = [] def add_edge(self, x, y, label=None): """Add an edge from distribution *x* to distribution *y* with the given @@ -1174,7 +1183,7 @@ def to_dot(self, f, skip_disconnected=True): if len(adjs) == 0 and not skip_disconnected: disconnected.append(dist) for other, label in adjs: - if not label is None: + if label is not None: f.write('"%s" -> "%s" [label="%s"]\n' % (dist.name, other.name, label)) else: @@ -1252,8 +1261,8 @@ def make_graph(dists, scheme='default'): # now make the edges for dist in dists: - requires = (dist.run_requires | dist.meta_requires | - dist.build_requires | dist.dev_requires) + requires = (dist.run_requires | dist.meta_requires + | dist.build_requires | dist.dev_requires) for req in requires: try: matcher = scheme.matcher(req) @@ -1264,7 +1273,7 @@ def make_graph(dists, scheme='default'): name = req.split()[0] matcher = scheme.matcher(name) - name = matcher.key # case-insensitive + name = matcher.key # case-insensitive matched = False if name in provided: @@ -1324,7 +1333,7 @@ def get_required_dists(dists, dist): req = set() # required distributions todo = graph.adjacency_list[dist] # list of nodes we should inspect - seen = set(t[0] for t in todo) # already added to todo + seen = set(t[0] for t in todo) # already added to todo while todo: d = todo.pop()[0] diff --git a/src/pip/_vendor/distlib/index.py b/src/pip/_vendor/distlib/index.py index 9b6d129ed69..56cd2867145 100644 --- a/src/pip/_vendor/distlib/index.py +++ b/src/pip/_vendor/distlib/index.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2013 Vinay Sajip. +# Copyright (C) 2013-2023 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # @@ -25,6 +25,7 @@ DEFAULT_INDEX = 'https://pypi.org/pypi' DEFAULT_REALM = 'pypi' + class PackageIndex(object): """ This class represents a package index compatible with PyPI, the Python @@ -119,7 +120,7 @@ def register(self, metadata): # pragma: no cover d = metadata.todict() d[':action'] = 'verify' request = self.encode_request(d.items(), []) - response = self.send_request(request) + self.send_request(request) d[':action'] = 'submit' request = self.encode_request(d.items(), []) return self.send_request(request) @@ -358,8 +359,7 @@ def verify_signature(self, signature_filename, data_filename, keystore) rc, stdout, stderr = self.run_command(cmd) if rc not in (0, 1): - raise DistlibException('verify command failed with error ' - 'code %s' % rc) + raise DistlibException('verify command failed with error code %s' % rc) return rc == 0 def download_file(self, url, destfile, digest=None, reporthook=None): diff --git a/src/pip/_vendor/distlib/locators.py b/src/pip/_vendor/distlib/locators.py index 966ebc0e37d..f9f0788fc2a 100644 --- a/src/pip/_vendor/distlib/locators.py +++ b/src/pip/_vendor/distlib/locators.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2012-2015 Vinay Sajip. +# Copyright (C) 2012-2023 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # @@ -38,6 +38,7 @@ HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml') DEFAULT_INDEX = 'https://pypi.org/pypi' + def get_all_distribution_names(url=None): """ Return all distribution names known by an index. @@ -52,6 +53,7 @@ def get_all_distribution_names(url=None): finally: client('close')() + class RedirectHandler(BaseRedirectHandler): """ A class to work around a bug in some Python 3.2.x releases. @@ -83,6 +85,7 @@ def http_error_302(self, req, fp, code, msg, headers): http_error_301 = http_error_303 = http_error_307 = http_error_302 + class Locator(object): """ A base class for locators - things that locate distributions. @@ -272,7 +275,7 @@ def same_project(name1, name2): 'python-version': ', '.join( ['.'.join(list(v[2:])) for v in wheel.pyver]), } - except Exception as e: # pragma: no cover + except Exception: # pragma: no cover logger.warning('invalid path for wheel: %s', path) elif not path.endswith(self.downloadable_extensions): # pragma: no cover logger.debug('Not downloadable: %s', path) @@ -293,7 +296,6 @@ def same_project(name1, name2): 'filename': filename, 'url': urlunparse((scheme, netloc, origpath, params, query, '')), - #'packagetype': 'sdist', } if pyver: # pragma: no cover result['python-version'] = pyver @@ -382,12 +384,9 @@ def locate(self, requirement, prereleases=False): else: if prereleases or not vcls(k).is_prerelease: slist.append(k) - # else: - # logger.debug('skipping pre-release ' - # 'version %s of %s', k, matcher.name) except Exception: # pragma: no cover logger.warning('error matching %s with %r', matcher, k) - pass # slist.append(k) + pass # slist.append(k) if len(slist) > 1: slist = sorted(slist, key=scheme.key) if slist: @@ -456,6 +455,7 @@ def _get_project(self, name): result['digests'][url] = digest return result + class PyPIJSONLocator(Locator): """ This locator uses PyPI's JSON interface. It's very limited in functionality @@ -476,7 +476,7 @@ def _get_project(self, name): url = urljoin(self.base_url, '%s/json' % quote(name)) try: resp = self.opener.open(url) - data = resp.read().decode() # for now + data = resp.read().decode() # for now d = json.loads(data) md = Metadata(scheme=self.scheme) data = d['info'] @@ -487,7 +487,7 @@ def _get_project(self, name): md.summary = data.get('summary') dist = Distribution(md) dist.locator = self - urls = d['urls'] + # urls = d['urls'] result[md.version] = dist for info in d['urls']: url = info['url'] @@ -745,7 +745,7 @@ def _fetch(self): try: self._seen.add(link) if (not self._process_download(link) and - self._should_queue(link, url, rel)): + self._should_queue(link, url, rel)): logger.debug('Queueing %s from %s', link, url) self._to_fetch.put(link) except MetadataInvalidError: # e.g. invalid versions @@ -756,7 +756,7 @@ def _fetch(self): # always do this, to avoid hangs :-) self._to_fetch.task_done() if not url: - #logger.debug('Sentinel seen, quitting.') + # logger.debug('Sentinel seen, quitting.') break def get_page(self, url): @@ -832,6 +832,7 @@ def get_distribution_names(self): result.add(match.group(1)) return result + class DirectoryLocator(Locator): """ This class locates distributions in a directory tree. @@ -897,6 +898,7 @@ def get_distribution_names(self): break return result + class JSONLocator(Locator): """ This locator uses special extended metadata (not available on PyPI) and is @@ -935,6 +937,7 @@ def _get_project(self, name): result['urls'].setdefault(dist.version, set()).add(info['url']) return result + class DistPathLocator(Locator): """ This locator finds installed distributions in a path. It can be useful for @@ -1245,7 +1248,7 @@ def find(self, requirement, meta_extras=None, prereleases=False): if name not in self.dists_by_name: self.add_distribution(dist) else: - #import pdb; pdb.set_trace() + # import pdb; pdb.set_trace() other = self.dists_by_name[name] if other != dist: self.try_to_replace(dist, other, problems) diff --git a/src/pip/_vendor/distlib/manifest.py b/src/pip/_vendor/distlib/manifest.py index ca0fe442d9c..420dcf12ed2 100644 --- a/src/pip/_vendor/distlib/manifest.py +++ b/src/pip/_vendor/distlib/manifest.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2012-2013 Python Software Foundation. +# Copyright (C) 2012-2023 Python Software Foundation. # See LICENSE.txt and CONTRIBUTORS.txt. # """ @@ -34,9 +34,11 @@ # _PYTHON_VERSION = sys.version_info[:2] + class Manifest(object): - """A list of files built by on exploring the filesystem and filtered by - applying various patterns to what we find there. + """ + A list of files built by exploring the filesystem and filtered by applying various + patterns to what we find there. """ def __init__(self, base=None): @@ -154,10 +156,7 @@ def process_directive(self, directive): elif action == 'exclude': for pattern in patterns: - found = self._exclude_pattern(pattern, anchor=True) - #if not found: - # logger.warning('no previously-included files ' - # 'found matching %r', pattern) + self._exclude_pattern(pattern, anchor=True) elif action == 'global-include': for pattern in patterns: @@ -167,11 +166,7 @@ def process_directive(self, directive): elif action == 'global-exclude': for pattern in patterns: - found = self._exclude_pattern(pattern, anchor=False) - #if not found: - # logger.warning('no previously-included files ' - # 'matching %r found anywhere in ' - # 'distribution', pattern) + self._exclude_pattern(pattern, anchor=False) elif action == 'recursive-include': for pattern in patterns: @@ -181,11 +176,7 @@ def process_directive(self, directive): elif action == 'recursive-exclude': for pattern in patterns: - found = self._exclude_pattern(pattern, prefix=thedir) - #if not found: - # logger.warning('no previously-included files ' - # 'matching %r found under directory %r', - # pattern, thedir) + self._exclude_pattern(pattern, prefix=thedir) elif action == 'graft': if not self._include_pattern(None, prefix=dirpattern): diff --git a/src/pip/_vendor/distlib/markers.py b/src/pip/_vendor/distlib/markers.py index 9dc68410337..1514d460e70 100644 --- a/src/pip/_vendor/distlib/markers.py +++ b/src/pip/_vendor/distlib/markers.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2012-2017 Vinay Sajip. +# Copyright (C) 2012-2023 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # @@ -19,26 +19,32 @@ from .compat import string_types from .util import in_venv, parse_marker -from .version import NormalizedVersion as NV +from .version import LegacyVersion as LV __all__ = ['interpret'] -_VERSION_PATTERN = re.compile(r'((\d+(\.\d+)*\w*)|\'(\d+(\.\d+)*\w*)\'|\"(\d+(\.\d+)*\w*)\")') +_VERSION_PATTERN = re.compile( + r'((\d+(\.\d+)*\w*)|\'(\d+(\.\d+)*\w*)\'|\"(\d+(\.\d+)*\w*)\")') +_VERSION_MARKERS = {'python_version', 'python_full_version'} + + +def _is_version_marker(s): + return isinstance(s, string_types) and s in _VERSION_MARKERS + def _is_literal(o): if not isinstance(o, string_types) or not o: return False return o[0] in '\'"' + def _get_versions(s): - result = [] - for m in _VERSION_PATTERN.finditer(s): - result.append(NV(m.groups()[0])) - return set(result) + return {LV(m.groups()[0]) for m in _VERSION_PATTERN.finditer(s)} + class Evaluator(object): """ - This class is used to evaluate marker expessions. + This class is used to evaluate marker expressions. """ operations = { @@ -46,10 +52,10 @@ class Evaluator(object): '===': lambda x, y: x == y, '~=': lambda x, y: x == y or x > y, '!=': lambda x, y: x != y, - '<': lambda x, y: x < y, - '<=': lambda x, y: x == y or x < y, - '>': lambda x, y: x > y, - '>=': lambda x, y: x == y or x > y, + '<': lambda x, y: x < y, + '<=': lambda x, y: x == y or x < y, + '>': lambda x, y: x > y, + '>=': lambda x, y: x == y or x > y, 'and': lambda x, y: x and y, 'or': lambda x, y: x or y, 'in': lambda x, y: x in y, @@ -76,23 +82,27 @@ def evaluate(self, expr, context): elhs = expr['lhs'] erhs = expr['rhs'] if _is_literal(expr['lhs']) and _is_literal(expr['rhs']): - raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs)) + raise SyntaxError('invalid comparison: %s %s %s' % + (elhs, op, erhs)) lhs = self.evaluate(elhs, context) rhs = self.evaluate(erhs, context) - if ((elhs == 'python_version' or erhs == 'python_version') and - op in ('<', '<=', '>', '>=', '===', '==', '!=', '~=')): - lhs = NV(lhs) - rhs = NV(rhs) - elif elhs == 'python_version' and op in ('in', 'not in'): - lhs = NV(lhs) + if ((_is_version_marker(elhs) or _is_version_marker(erhs)) + and op in ('<', '<=', '>', '>=', '===', '==', '!=', '~=')): + lhs = LV(lhs) + rhs = LV(rhs) + elif _is_version_marker(elhs) and op in ('in', 'not in'): + lhs = LV(lhs) rhs = _get_versions(rhs) result = self.operations[op](lhs, rhs) return result + _DIGITS = re.compile(r'\d+\.\d+') + def default_context(): + def format_full_version(info): version = '%s.%s.%s' % (info.major, info.minor, info.micro) kind = info.releaselevel @@ -101,7 +111,8 @@ def format_full_version(info): return version if hasattr(sys, 'implementation'): - implementation_version = format_full_version(sys.implementation.version) + implementation_version = format_full_version( + sys.implementation.version) implementation_name = sys.implementation.name else: implementation_version = '0' @@ -126,11 +137,13 @@ def format_full_version(info): } return result + DEFAULT_CONTEXT = default_context() del default_context evaluator = Evaluator() + def interpret(marker, execution_context=None): """ Interpret a marker and return a result depending on environment. @@ -143,9 +156,11 @@ def interpret(marker, execution_context=None): try: expr, rest = parse_marker(marker) except Exception as e: - raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e)) + raise SyntaxError('Unable to interpret marker syntax: %s: %s' % + (marker, e)) if rest and rest[0] != '#': - raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest)) + raise SyntaxError('unexpected trailing data in marker: %s: %s' % + (marker, rest)) context = dict(DEFAULT_CONTEXT) if execution_context: context.update(execution_context) diff --git a/src/pip/_vendor/distlib/metadata.py b/src/pip/_vendor/distlib/metadata.py index c329e1977fd..7189aeef229 100644 --- a/src/pip/_vendor/distlib/metadata.py +++ b/src/pip/_vendor/distlib/metadata.py @@ -136,17 +136,9 @@ def _version2fieldlist(version): def _best_version(fields): """Detect the best version depending on the fields used.""" def _has_marker(keys, markers): - for marker in markers: - if marker in keys: - return True - return False - - keys = [] - for key, value in fields.items(): - if value in ([], 'UNKNOWN', None): - continue - keys.append(key) + return any(marker in keys for marker in markers) + keys = [key for key, value in fields.items() if value not in ([], 'UNKNOWN', None)] possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.1', '2.2'] # 2.0 removed # first let's try to see if a field is not part of one of the version diff --git a/src/pip/_vendor/distlib/scripts.py b/src/pip/_vendor/distlib/scripts.py index d2706242b8a..cfa45d2af18 100644 --- a/src/pip/_vendor/distlib/scripts.py +++ b/src/pip/_vendor/distlib/scripts.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2013-2015 Vinay Sajip. +# Copyright (C) 2013-2023 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # @@ -65,9 +65,11 @@ def enquote_executable(executable): executable = '"%s"' % executable return executable + # Keep the old name around (for now), as there is at least one project using it! _enquote_executable = enquote_executable + class ScriptMaker(object): """ A class to copy or create scripts from source scripts or callable @@ -77,21 +79,25 @@ class ScriptMaker(object): executable = None # for shebangs - def __init__(self, source_dir, target_dir, add_launchers=True, - dry_run=False, fileop=None): + def __init__(self, + source_dir, + target_dir, + add_launchers=True, + dry_run=False, + fileop=None): self.source_dir = source_dir self.target_dir = target_dir self.add_launchers = add_launchers self.force = False self.clobber = False # It only makes sense to set mode bits on POSIX. - self.set_mode = (os.name == 'posix') or (os.name == 'java' and - os._name == 'posix') + self.set_mode = (os.name == 'posix') or (os.name == 'java' + and os._name == 'posix') self.variants = set(('', 'X.Y')) self._fileop = fileop or FileOperator(dry_run) - self._is_nt = os.name == 'nt' or ( - os.name == 'java' and os._name == 'nt') + self._is_nt = os.name == 'nt' or (os.name == 'java' + and os._name == 'nt') self.version_info = sys.version_info def _get_alternate_executable(self, executable, options): @@ -102,6 +108,7 @@ def _get_alternate_executable(self, executable, options): return executable if sys.platform.startswith('java'): # pragma: no cover + def _is_shell(self, executable): """ Determine if the specified executable is a script @@ -146,8 +153,8 @@ def _build_shebang(self, executable, post_interp): max_shebang_length = 512 else: max_shebang_length = 127 - simple_shebang = ((b' ' not in executable) and - (shebang_length <= max_shebang_length)) + simple_shebang = ((b' ' not in executable) + and (shebang_length <= max_shebang_length)) if simple_shebang: result = b'#!' + executable + post_interp + b'\n' @@ -161,22 +168,25 @@ def _get_shebang(self, encoding, post_interp=b'', options=None): enquote = True if self.executable: executable = self.executable - enquote = False # assume this will be taken care of + enquote = False # assume this will be taken care of elif not sysconfig.is_python_build(): executable = get_executable() elif in_venv(): # pragma: no cover - executable = os.path.join(sysconfig.get_path('scripts'), - 'python%s' % sysconfig.get_config_var('EXE')) - else: # pragma: no cover executable = os.path.join( - sysconfig.get_config_var('BINDIR'), - 'python%s%s' % (sysconfig.get_config_var('VERSION'), - sysconfig.get_config_var('EXE'))) - if not os.path.isfile(executable): + sysconfig.get_path('scripts'), + 'python%s' % sysconfig.get_config_var('EXE')) + else: # pragma: no cover + if os.name == 'nt': # for Python builds from source on Windows, no Python executables with # a version suffix are created, so we use python.exe - executable = os.path.join(sysconfig.get_config_var('BINDIR'), - 'python%s' % (sysconfig.get_config_var('EXE'))) + executable = os.path.join( + sysconfig.get_config_var('BINDIR'), + 'python%s' % (sysconfig.get_config_var('EXE'))) + else: + executable = os.path.join( + sysconfig.get_config_var('BINDIR'), + 'python%s%s' % (sysconfig.get_config_var('VERSION'), + sysconfig.get_config_var('EXE'))) if options: executable = self._get_alternate_executable(executable, options) @@ -201,7 +211,7 @@ def _get_shebang(self, encoding, post_interp=b'', options=None): executable = executable.encode('utf-8') # in case of IronPython, play safe and enable frames support if (sys.platform == 'cli' and '-X:Frames' not in post_interp - and '-X:FullFrames' not in post_interp): # pragma: no cover + and '-X:FullFrames' not in post_interp): # pragma: no cover post_interp += b' -X:Frames' shebang = self._build_shebang(executable, post_interp) # Python parser starts to read a script using UTF-8 until @@ -212,8 +222,8 @@ def _get_shebang(self, encoding, post_interp=b'', options=None): try: shebang.decode('utf-8') except UnicodeDecodeError: # pragma: no cover - raise ValueError( - 'The shebang (%r) is not decodable from utf-8' % shebang) + raise ValueError('The shebang (%r) is not decodable from utf-8' % + shebang) # If the script is encoded to a custom encoding (use a # #coding:xxx cookie), the shebang has to be decodable from # the script encoding too. @@ -221,15 +231,16 @@ def _get_shebang(self, encoding, post_interp=b'', options=None): try: shebang.decode(encoding) except UnicodeDecodeError: # pragma: no cover - raise ValueError( - 'The shebang (%r) is not decodable ' - 'from the script encoding (%r)' % (shebang, encoding)) + raise ValueError('The shebang (%r) is not decodable ' + 'from the script encoding (%r)' % + (shebang, encoding)) return shebang def _get_script_text(self, entry): - return self.script_template % dict(module=entry.prefix, - import_name=entry.suffix.split('.')[0], - func=entry.suffix) + return self.script_template % dict( + module=entry.prefix, + import_name=entry.suffix.split('.')[0], + func=entry.suffix) manifest = _DEFAULT_MANIFEST @@ -254,7 +265,8 @@ def _write_script(self, names, shebang, script_bytes, filenames, ext): source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH') if source_date_epoch: date_time = time.gmtime(int(source_date_epoch))[:6] - zinfo = ZipInfo(filename='__main__.py', date_time=date_time) + zinfo = ZipInfo(filename='__main__.py', + date_time=date_time) zf.writestr(zinfo, script_bytes) else: zf.writestr('__main__.py', script_bytes) @@ -275,7 +287,7 @@ def _write_script(self, names, shebang, script_bytes, filenames, ext): 'use .deleteme logic') dfname = '%s.deleteme' % outname if os.path.exists(dfname): - os.remove(dfname) # Not allowed to fail here + os.remove(dfname) # Not allowed to fail here os.rename(outname, dfname) # nor here self._fileop.write_binary_file(outname, script_bytes) logger.debug('Able to replace executable using ' @@ -283,9 +295,10 @@ def _write_script(self, names, shebang, script_bytes, filenames, ext): try: os.remove(dfname) except Exception: - pass # still in use - ignore error + pass # still in use - ignore error else: - if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover + if self._is_nt and not outname.endswith( + '.' + ext): # pragma: no cover outname = '%s.%s' % (outname, ext) if os.path.exists(outname) and not self.clobber: logger.warning('Skipping existing file %s', outname) @@ -304,8 +317,9 @@ def get_script_filenames(self, name): if 'X' in self.variants: result.add('%s%s' % (name, self.version_info[0])) if 'X.Y' in self.variants: - result.add('%s%s%s.%s' % (name, self.variant_separator, - self.version_info[0], self.version_info[1])) + result.add('%s%s%s.%s' % + (name, self.variant_separator, self.version_info[0], + self.version_info[1])) return result def _make_script(self, entry, filenames, options=None): @@ -383,12 +397,13 @@ def dry_run(self): def dry_run(self, value): self._fileop.dry_run = value - if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover + if os.name == 'nt' or (os.name == 'java' + and os._name == 'nt'): # pragma: no cover # Executable launcher support. # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/ def _get_launcher(self, kind): - if struct.calcsize('P') == 8: # 64-bit + if struct.calcsize('P') == 8: # 64-bit bits = '64' else: bits = '32' @@ -399,8 +414,8 @@ def _get_launcher(self, kind): distlib_package = __name__.rsplit('.', 1)[0] resource = finder(distlib_package).find(name) if not resource: - msg = ('Unable to find resource %s in package %s' % (name, - distlib_package)) + msg = ('Unable to find resource %s in package %s' % + (name, distlib_package)) raise ValueError(msg) return resource.bytes diff --git a/src/pip/_vendor/distlib/util.py b/src/pip/_vendor/distlib/util.py index dd01849d997..ba58858d0fb 100644 --- a/src/pip/_vendor/distlib/util.py +++ b/src/pip/_vendor/distlib/util.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2012-2021 The Python Software Foundation. +# Copyright (C) 2012-2023 The Python Software Foundation. # See LICENSE.txt and CONTRIBUTORS.txt. # import codecs @@ -33,7 +33,7 @@ from . import DistlibException from .compat import (string_types, text_type, shutil, raw_input, StringIO, cache_from_source, urlopen, urljoin, httplib, xmlrpclib, - splittype, HTTPHandler, BaseConfigurator, valid_ident, + HTTPHandler, BaseConfigurator, valid_ident, Container, configparser, URLError, ZipFile, fsdecode, unquote, urlparse) @@ -62,6 +62,7 @@ def parse_marker(marker_string): interpreted as a literal string, and a string not contained in quotes is a variable (such as os_name). """ + def marker_var(remaining): # either identifier, or literal string m = IDENTIFIER.match(remaining) @@ -87,7 +88,8 @@ def marker_var(remaining): else: m = STRING_CHUNK.match(remaining) if not m: - raise SyntaxError('error in string literal: %s' % remaining) + raise SyntaxError('error in string literal: %s' % + remaining) parts.append(m.groups()[0]) remaining = remaining[m.end():] else: @@ -95,7 +97,7 @@ def marker_var(remaining): raise SyntaxError('unterminated string: %s' % s) parts.append(q) result = ''.join(parts) - remaining = remaining[1:].lstrip() # skip past closing quote + remaining = remaining[1:].lstrip() # skip past closing quote return result, remaining def marker_expr(remaining): @@ -208,7 +210,8 @@ def get_versions(ver_remaining): ver_remaining = ver_remaining[m.end():] m = VERSION_IDENTIFIER.match(ver_remaining) if not m: - raise SyntaxError('invalid version: %s' % ver_remaining) + raise SyntaxError('invalid version: %s' % + ver_remaining) v = m.groups()[0] versions.append((op, v)) ver_remaining = ver_remaining[m.end():] @@ -221,7 +224,8 @@ def get_versions(ver_remaining): break m = COMPARE_OP.match(ver_remaining) if not m: - raise SyntaxError('invalid constraint: %s' % ver_remaining) + raise SyntaxError('invalid constraint: %s' % + ver_remaining) if not versions: versions = None return versions, ver_remaining @@ -231,7 +235,8 @@ def get_versions(ver_remaining): else: i = remaining.find(')', 1) if i < 0: - raise SyntaxError('unterminated parenthesis: %s' % remaining) + raise SyntaxError('unterminated parenthesis: %s' % + remaining) s = remaining[1:i] remaining = remaining[i + 1:].lstrip() # As a special diversion from PEP 508, allow a version number @@ -262,9 +267,14 @@ def get_versions(ver_remaining): if not versions: rs = distname else: - rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions])) - return Container(name=distname, extras=extras, constraints=versions, - marker=mark_expr, url=uri, requirement=rs) + rs = '%s %s' % (distname, ', '.join( + ['%s %s' % con for con in versions])) + return Container(name=distname, + extras=extras, + constraints=versions, + marker=mark_expr, + url=uri, + requirement=rs) def get_resources_dests(resources_root, rules): @@ -304,15 +314,15 @@ def in_venv(): def get_executable(): -# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as -# changes to the stub launcher mean that sys.executable always points -# to the stub on OS X -# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' -# in os.environ): -# result = os.environ['__PYVENV_LAUNCHER__'] -# else: -# result = sys.executable -# return result + # The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as + # changes to the stub launcher mean that sys.executable always points + # to the stub on OS X + # if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' + # in os.environ): + # result = os.environ['__PYVENV_LAUNCHER__'] + # else: + # result = sys.executable + # return result # Avoid normcasing: see issue #143 # result = os.path.normcase(sys.executable) result = sys.executable @@ -346,6 +356,7 @@ def extract_by_key(d, keys): result[key] = d[key] return result + def read_exports(stream): if sys.version_info[0] >= 3: # needs to be a text stream @@ -388,7 +399,7 @@ def read_stream(cp, stream): s = '%s = %s' % (name, value) entry = get_export_entry(s) assert entry is not None - #entry.dist = self + # entry.dist = self entries[name] = entry return result @@ -420,6 +431,7 @@ def tempdir(): finally: shutil.rmtree(td) + @contextlib.contextmanager def chdir(d): cwd = os.getcwd() @@ -441,19 +453,21 @@ def socket_timeout(seconds=15): class cached_property(object): + def __init__(self, func): self.func = func - #for attr in ('__name__', '__module__', '__doc__'): - # setattr(self, attr, getattr(func, attr, None)) + # for attr in ('__name__', '__module__', '__doc__'): + # setattr(self, attr, getattr(func, attr, None)) def __get__(self, obj, cls=None): if obj is None: return self value = self.func(obj) object.__setattr__(obj, self.func.__name__, value) - #obj.__dict__[self.func.__name__] = value = self.func(obj) + # obj.__dict__[self.func.__name__] = value = self.func(obj) return value + def convert_path(pathname): """Return 'pathname' as a name that will work on the native filesystem. @@ -482,6 +496,7 @@ def convert_path(pathname): class FileOperator(object): + def __init__(self, dry_run=False): self.dry_run = dry_run self.ensured = set() @@ -586,7 +601,12 @@ def ensure_dir(self, path): if self.record: self.dirs_created.add(path) - def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False): + def byte_compile(self, + path, + optimize=False, + force=False, + prefix=None, + hashed_invalidation=False): dpath = cache_from_source(path, not optimize) logger.info('Byte-compiling %s to %s', path, dpath) if not self.dry_run: @@ -597,9 +617,12 @@ def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_in assert path.startswith(prefix) diagpath = path[len(prefix):] compile_kwargs = {} - if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'): - compile_kwargs['invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH - py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error + if hashed_invalidation and hasattr(py_compile, + 'PycInvalidationMode'): + compile_kwargs[ + 'invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH + py_compile.compile(path, dpath, diagpath, True, + **compile_kwargs) # raise error self.record_as_written(dpath) return dpath @@ -661,9 +684,10 @@ def rollback(self): assert flist == ['__pycache__'] sd = os.path.join(d, flist[0]) os.rmdir(sd) - os.rmdir(d) # should fail if non-empty + os.rmdir(d) # should fail if non-empty self._init_record() + def resolve(module_name, dotted_path): if module_name in sys.modules: mod = sys.modules[module_name] @@ -680,6 +704,7 @@ def resolve(module_name, dotted_path): class ExportEntry(object): + def __init__(self, name, prefix, suffix, flags): self.name = name self.prefix = prefix @@ -698,20 +723,21 @@ def __eq__(self, other): if not isinstance(other, ExportEntry): result = False else: - result = (self.name == other.name and - self.prefix == other.prefix and - self.suffix == other.suffix and - self.flags == other.flags) + result = (self.name == other.name and self.prefix == other.prefix + and self.suffix == other.suffix + and self.flags == other.flags) return result __hash__ = object.__hash__ -ENTRY_RE = re.compile(r'''(?P(\w|[-.+])+) +ENTRY_RE = re.compile( + r'''(?P([^\[]\S*)) \s*=\s*(?P(\w+)([:\.]\w+)*) \s*(\[\s*(?P[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? ''', re.VERBOSE) + def get_export_entry(specification): m = ENTRY_RE.search(specification) if not m: @@ -827,6 +853,7 @@ def get_process_umask(): os.umask(result) return result + def is_string_sequence(seq): result = True i = None @@ -837,8 +864,10 @@ def is_string_sequence(seq): assert i is not None return result -PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' - '([a-z0-9_.+-]+)', re.I) + +PROJECT_NAME_AND_VERSION = re.compile( + '([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' + '([a-z0-9_.+-]+)', re.I) PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') @@ -866,10 +895,12 @@ def split_filename(filename, project_name=None): result = m.group(1), m.group(3), pyver return result + # Allow spaces in name because of legacy dists like "Twisted Core" NAME_VERSION_RE = re.compile(r'(?P[\w .-]+)\s*' r'\(\s*(?P[^\s)]+)\)$') + def parse_name_and_version(p): """ A utility method used to get name and version from a string. @@ -885,6 +916,7 @@ def parse_name_and_version(p): d = m.groupdict() return d['name'].strip().lower(), d['ver'] + def get_extras(requested, available): result = set() requested = set(requested or []) @@ -906,10 +938,13 @@ def get_extras(requested, available): logger.warning('undeclared extra: %s' % r) result.add(r) return result + + # # Extended metadata functionality # + def _get_external_data(url): result = {} try: @@ -923,21 +958,24 @@ def _get_external_data(url): logger.debug('Unexpected response for JSON request: %s', ct) else: reader = codecs.getreader('utf-8')(resp) - #data = reader.read().decode('utf-8') - #result = json.loads(data) + # data = reader.read().decode('utf-8') + # result = json.loads(data) result = json.load(reader) except Exception as e: logger.exception('Failed to get external data for %s: %s', url, e) return result + _external_data_base_url = 'https://www.red-dove.com/pypi/projects/' + def get_project_data(name): url = '%s/%s/project.json' % (name[0].upper(), name) url = urljoin(_external_data_base_url, url) result = _get_external_data(url) return result + def get_package_data(name, version): url = '%s/%s/package-%s.json' % (name[0].upper(), name, version) url = urljoin(_external_data_base_url, url) @@ -992,6 +1030,7 @@ class EventMixin(object): """ A very simple publish/subscribe system. """ + def __init__(self): self._subscribers = {} @@ -1053,18 +1092,20 @@ def publish(self, event, *args, **kwargs): logger.exception('Exception during event publication') value = None result.append(value) - logger.debug('publish %s: args = %s, kwargs = %s, result = %s', - event, args, kwargs, result) + logger.debug('publish %s: args = %s, kwargs = %s, result = %s', event, + args, kwargs, result) return result + # # Simple sequencing # class Sequencer(object): + def __init__(self): self._preds = {} self._succs = {} - self._nodes = set() # nodes with no preds/succs + self._nodes = set() # nodes with no preds/succs def add_node(self, node): self._nodes.add(node) @@ -1104,8 +1145,8 @@ def remove(self, pred, succ): raise ValueError('%r not a successor of %r' % (succ, pred)) def is_step(self, step): - return (step in self._preds or step in self._succs or - step in self._nodes) + return (step in self._preds or step in self._succs + or step in self._nodes) def get_steps(self, final): if not self.is_step(final): @@ -1134,7 +1175,7 @@ def get_steps(self, final): @property def strong_connections(self): - #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm + # http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm index_counter = [0] stack = [] lowlinks = {} @@ -1159,11 +1200,11 @@ def strongconnect(node): if successor not in lowlinks: # Successor has not yet been visited strongconnect(successor) - lowlinks[node] = min(lowlinks[node],lowlinks[successor]) + lowlinks[node] = min(lowlinks[node], lowlinks[successor]) elif successor in stack: # the successor is in the stack and hence in the current # strongly connected component (SCC) - lowlinks[node] = min(lowlinks[node],index[successor]) + lowlinks[node] = min(lowlinks[node], index[successor]) # If `node` is a root node, pop the stack and generate an SCC if lowlinks[node] == index[node]: @@ -1172,7 +1213,8 @@ def strongconnect(node): while True: successor = stack.pop() connected_component.append(successor) - if successor == node: break + if successor == node: + break component = tuple(connected_component) # storing the result result.append(component) @@ -1195,12 +1237,14 @@ def dot(self): result.append('}') return '\n'.join(result) + # # Unarchiving functionality for zip, tar, tgz, tbz, whl # -ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', - '.tgz', '.tbz', '.whl') +ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz', + '.whl') + def unarchive(archive_filename, dest_dir, format=None, check=True): @@ -1249,6 +1293,20 @@ def check_path(path): for tarinfo in archive.getmembers(): if not isinstance(tarinfo.name, text_type): tarinfo.name = tarinfo.name.decode('utf-8') + + # Limit extraction of dangerous items, if this Python + # allows it easily. If not, just trust the input. + # See: https://docs.python.org/3/library/tarfile.html#extraction-filters + def extraction_filter(member, path): + """Run tarfile.tar_filter, but raise the expected ValueError""" + # This is only called if the current Python has tarfile filters + try: + return tarfile.tar_filter(member, path) + except tarfile.FilterError as exc: + raise ValueError(str(exc)) + + archive.extraction_filter = extraction_filter + archive.extractall(dest_dir) finally: @@ -1269,11 +1327,12 @@ def zip_dir(directory): zf.write(full, dest) return result + # # Simple progress bar # -UNITS = ('', 'K', 'M', 'G','T','P') +UNITS = ('', 'K', 'M', 'G', 'T', 'P') class Progress(object): @@ -1328,8 +1387,8 @@ def percentage(self): def format_duration(self, duration): if (duration <= 0) and self.max is None or self.cur == self.min: result = '??:??:??' - #elif duration < 1: - # result = '--:--:--' + # elif duration < 1: + # result = '--:--:--' else: result = time.strftime('%H:%M:%S', time.gmtime(duration)) return result @@ -1339,7 +1398,7 @@ def ETA(self): if self.done: prefix = 'Done' t = self.elapsed - #import pdb; pdb.set_trace() + # import pdb; pdb.set_trace() else: prefix = 'ETA ' if self.max is None: @@ -1347,7 +1406,7 @@ def ETA(self): elif self.elapsed == 0 or (self.cur == self.min): t = 0 else: - #import pdb; pdb.set_trace() + # import pdb; pdb.set_trace() t = float(self.max - self.min) t /= self.cur - self.min t = (t - 1) * self.elapsed @@ -1365,6 +1424,7 @@ def speed(self): result /= 1000.0 return '%d %sB/s' % (result, unit) + # # Glob functionality # @@ -1412,22 +1472,23 @@ def _iglob(path_glob): for fn in _iglob(os.path.join(path, radical)): yield fn + if ssl: from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, CertificateError) - -# -# HTTPSConnection which verifies certificates/matches domains -# + # + # HTTPSConnection which verifies certificates/matches domains + # class HTTPSConnection(httplib.HTTPSConnection): - ca_certs = None # set this to the path to the certs file (.pem) - check_domain = True # only used if ca_certs is not None + ca_certs = None # set this to the path to the certs file (.pem) + check_domain = True # only used if ca_certs is not None # noinspection PyPropertyAccess def connect(self): - sock = socket.create_connection((self.host, self.port), self.timeout) + sock = socket.create_connection((self.host, self.port), + self.timeout) if getattr(self, '_tunnel_host', False): self.sock = sock self._tunnel() @@ -1435,7 +1496,7 @@ def connect(self): context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) if hasattr(ssl, 'OP_NO_SSLv2'): context.options |= ssl.OP_NO_SSLv2 - if self.cert_file: + if getattr(self, 'cert_file', None): context.load_cert_chain(self.cert_file, self.key_file) kwargs = {} if self.ca_certs: @@ -1455,6 +1516,7 @@ def connect(self): raise class HTTPSHandler(BaseHTTPSHandler): + def __init__(self, ca_certs, check_domain=True): BaseHTTPSHandler.__init__(self) self.ca_certs = ca_certs @@ -1481,8 +1543,9 @@ def https_open(self, req): return self.do_open(self._conn_maker, req) except URLError as e: if 'certificate verify failed' in str(e.reason): - raise CertificateError('Unable to verify server certificate ' - 'for %s' % req.host) + raise CertificateError( + 'Unable to verify server certificate ' + 'for %s' % req.host) else: raise @@ -1496,14 +1559,18 @@ def https_open(self, req): # handler for HTTP itself. # class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): + def http_open(self, req): - raise URLError('Unexpected HTTP request on what should be a secure ' - 'connection: %s' % req) + raise URLError( + 'Unexpected HTTP request on what should be a secure ' + 'connection: %s' % req) + # # XML-RPC with timeouts # class Transport(xmlrpclib.Transport): + def __init__(self, timeout, use_datetime=0): self.timeout = timeout xmlrpclib.Transport.__init__(self, use_datetime) @@ -1515,8 +1582,11 @@ def make_connection(self, host): self._connection = host, httplib.HTTPConnection(h) return self._connection[1] + if ssl: + class SafeTransport(xmlrpclib.SafeTransport): + def __init__(self, timeout, use_datetime=0): self.timeout = timeout xmlrpclib.SafeTransport.__init__(self, use_datetime) @@ -1528,12 +1598,13 @@ def make_connection(self, host): kwargs['timeout'] = self.timeout if not self._connection or host != self._connection[0]: self._extra_headers = eh - self._connection = host, httplib.HTTPSConnection(h, None, - **kwargs) + self._connection = host, httplib.HTTPSConnection( + h, None, **kwargs) return self._connection[1] class ServerProxy(xmlrpclib.ServerProxy): + def __init__(self, uri, **kwargs): self.timeout = timeout = kwargs.pop('timeout', None) # The above classes only come into play if a timeout @@ -1550,11 +1621,13 @@ def __init__(self, uri, **kwargs): self.transport = t xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) + # # CSV functionality. This is provided because on 2.x, the csv module can't # handle Unicode. However, we need to deal with Unicode in e.g. RECORD files. # + def _csv_open(fn, mode, **kwargs): if sys.version_info[0] < 3: mode += 'b' @@ -1568,9 +1641,9 @@ def _csv_open(fn, mode, **kwargs): class CSVBase(object): defaults = { - 'delimiter': str(','), # The strs are used because we need native - 'quotechar': str('"'), # str in the csv API (2.x won't take - 'lineterminator': str('\n') # Unicode) + 'delimiter': str(','), # The strs are used because we need native + 'quotechar': str('"'), # str in the csv API (2.x won't take + 'lineterminator': str('\n') # Unicode) } def __enter__(self): @@ -1581,6 +1654,7 @@ def __exit__(self, *exc_info): class CSVReader(CSVBase): + def __init__(self, **kwargs): if 'stream' in kwargs: stream = kwargs['stream'] @@ -1605,7 +1679,9 @@ def next(self): __next__ = next + class CSVWriter(CSVBase): + def __init__(self, fn, **kwargs): self.stream = _csv_open(fn, 'w') self.writer = csv.writer(self.stream, **self.defaults) @@ -1620,10 +1696,12 @@ def writerow(self, row): row = r self.writer.writerow(row) + # # Configurator functionality # + class Configurator(BaseConfigurator): value_converters = dict(BaseConfigurator.value_converters) @@ -1634,6 +1712,7 @@ def __init__(self, config, base=None): self.base = base or os.getcwd() def configure_custom(self, config): + def convert(o): if isinstance(o, (list, tuple)): result = type(o)([convert(i) for i in o]) @@ -1683,6 +1762,7 @@ class SubprocessMixin(object): """ Mixin for running subprocesses and capturing their output """ + def __init__(self, verbose=False, progress=None): self.verbose = verbose self.progress = progress @@ -1709,8 +1789,10 @@ def reader(self, stream, context): stream.close() def run_command(self, cmd, **kwargs): - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, **kwargs) + p = subprocess.Popen(cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + **kwargs) t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) t1.start() t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) @@ -1730,15 +1812,17 @@ def normalize_name(name): # https://www.python.org/dev/peps/pep-0503/#normalized-names return re.sub('[-_.]+', '-', name).lower() + # def _get_pypirc_command(): - # """ - # Get the distutils command for interacting with PyPI configurations. - # :return: the command. - # """ - # from distutils.core import Distribution - # from distutils.config import PyPIRCCommand - # d = Distribution() - # return PyPIRCCommand(d) +# """ +# Get the distutils command for interacting with PyPI configurations. +# :return: the command. +# """ +# from distutils.core import Distribution +# from distutils.config import PyPIRCCommand +# d = Distribution() +# return PyPIRCCommand(d) + class PyPIRCFile(object): @@ -1763,9 +1847,10 @@ def read(self): if 'distutils' in sections: # let's get the list of servers index_servers = config.get('distutils', 'index-servers') - _servers = [server.strip() for server in - index_servers.split('\n') - if server.strip() != ''] + _servers = [ + server.strip() for server in index_servers.split('\n') + if server.strip() != '' + ] if _servers == []: # nothing set, let's try to get the default pypi if 'pypi' in sections: @@ -1776,7 +1861,8 @@ def read(self): result['username'] = config.get(server, 'username') # optional params - for key, default in (('repository', self.DEFAULT_REPOSITORY), + for key, default in (('repository', + self.DEFAULT_REPOSITORY), ('realm', self.DEFAULT_REALM), ('password', None)): if config.has_option(server, key): @@ -1787,11 +1873,11 @@ def read(self): # work around people having "repository" for the "pypi" # section of their config set to the HTTP (rather than # HTTPS) URL - if (server == 'pypi' and - repository in (self.DEFAULT_REPOSITORY, 'pypi')): + if (server == 'pypi' and repository + in (self.DEFAULT_REPOSITORY, 'pypi')): result['repository'] = self.DEFAULT_REPOSITORY - elif (result['server'] != repository and - result['repository'] != repository): + elif (result['server'] != repository + and result['repository'] != repository): result = {} elif 'server-login' in sections: # old format @@ -1821,20 +1907,24 @@ def update(self, username, password): with open(fn, 'w') as f: config.write(f) + def _load_pypirc(index): """ Read the PyPI access configuration as supported by distutils. """ return PyPIRCFile(url=index.url).read() + def _store_pypirc(index): PyPIRCFile().update(index.username, index.password) + # # get_platform()/get_host_platform() copied from Python 3.10.a0 source, with some minor # tweaks # + def get_host_platform(): """Return a string that identifies the current platform. This is used mainly to distinguish platform-specific build directories and platform-specific built @@ -1886,16 +1976,16 @@ def get_host_platform(): # At least on Linux/Intel, 'machine' is the processor -- # i386, etc. # XXX what about Alpha, SPARC, etc? - return "%s-%s" % (osname, machine) + return "%s-%s" % (osname, machine) elif osname[:5] == 'sunos': - if release[0] >= '5': # SunOS 5 == Solaris 2 + if release[0] >= '5': # SunOS 5 == Solaris 2 osname = 'solaris' release = '%d.%s' % (int(release[0]) - 3, release[2:]) # We can't use 'platform.architecture()[0]' because a # bootstrap problem. We use a dict to get an error # if some suspicious happens. - bitness = {2147483647:'32bit', 9223372036854775807:'64bit'} + bitness = {2147483647: '32bit', 9223372036854775807: '64bit'} machine += '.%s' % bitness[sys.maxsize] # fall through to standard osname-release-machine representation elif osname[:3] == 'aix': @@ -1903,23 +1993,26 @@ def get_host_platform(): return aix_platform() elif osname[:6] == 'cygwin': osname = 'cygwin' - rel_re = re.compile (r'[\d.]+', re.ASCII) + rel_re = re.compile(r'[\d.]+', re.ASCII) m = rel_re.match(release) if m: release = m.group() elif osname[:6] == 'darwin': - import _osx_support, distutils.sysconfig + import _osx_support + try: + from distutils import sysconfig + except ImportError: + import sysconfig osname, release, machine = _osx_support.get_platform_osx( - distutils.sysconfig.get_config_vars(), - osname, release, machine) + sysconfig.get_config_vars(), osname, release, machine) return '%s-%s-%s' % (osname, release, machine) _TARGET_TO_PLAT = { - 'x86' : 'win32', - 'x64' : 'win-amd64', - 'arm' : 'win-arm32', + 'x86': 'win32', + 'x64': 'win-amd64', + 'arm': 'win-arm32', } diff --git a/src/pip/_vendor/distlib/version.py b/src/pip/_vendor/distlib/version.py index c7c8bb6ff4f..14171ac938d 100644 --- a/src/pip/_vendor/distlib/version.py +++ b/src/pip/_vendor/distlib/version.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2012-2017 The Python Software Foundation. +# Copyright (C) 2012-2023 The Python Software Foundation. # See LICENSE.txt and CONTRIBUTORS.txt. # """ @@ -176,9 +176,9 @@ def __str__(self): return self._string -PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?' - r'(\.(post)(\d+))?(\.(dev)(\d+))?' - r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$') +PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|alpha|b|beta|c|rc|pre|preview)(\d+)?)?' + r'(\.(post|r|rev)(\d+)?)?([._-]?(dev)(\d+)?)?' + r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$', re.I) def _pep_440_key(s): @@ -202,15 +202,24 @@ def _pep_440_key(s): if pre == (None, None): pre = () else: - pre = pre[0], int(pre[1]) + if pre[1] is None: + pre = pre[0], 0 + else: + pre = pre[0], int(pre[1]) if post == (None, None): post = () else: - post = post[0], int(post[1]) + if post[1] is None: + post = post[0], 0 + else: + post = post[0], int(post[1]) if dev == (None, None): dev = () else: - dev = dev[0], int(dev[1]) + if dev[1] is None: + dev = dev[0], 0 + else: + dev = dev[0], int(dev[1]) if local is None: local = () else: @@ -238,7 +247,6 @@ def _pep_440_key(s): if not dev: dev = ('final',) - #print('%s -> %s' % (s, m.groups())) return epoch, nums, pre, post, dev, local @@ -378,6 +386,7 @@ def _match_compatible(self, version, constraint, prefix): pfx = '.'.join([str(i) for i in release_clause]) return _match_prefix(version, pfx) + _REPLACEMENTS = ( (re.compile('[.+-]$'), ''), # remove trailing puncts (re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start @@ -388,7 +397,7 @@ def _match_compatible(self, version, constraint, prefix): (re.compile('[.]{2,}'), '.'), # multiple runs of '.' (re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha (re.compile(r'\b(pre-alpha|prealpha)\b'), - 'pre.alpha'), # standardise + 'pre.alpha'), # standardise (re.compile(r'\(beta\)$'), 'beta'), # remove parentheses ) @@ -416,7 +425,7 @@ def _suggest_semantic_version(s): # Now look for numeric prefix, and separate it out from # the rest. - #import pdb; pdb.set_trace() + # import pdb; pdb.set_trace() m = _NUMERIC_PREFIX.match(result) if not m: prefix = '0.0.0' @@ -434,7 +443,7 @@ def _suggest_semantic_version(s): prefix = '.'.join([str(i) for i in prefix]) suffix = suffix.strip() if suffix: - #import pdb; pdb.set_trace() + # import pdb; pdb.set_trace() # massage the suffix. for pat, repl in _SUFFIX_REPLACEMENTS: suffix = pat.sub(repl, suffix) @@ -504,7 +513,7 @@ def _suggest_normalized_version(s): rs = rs[1:] # Clean leading '0's on numbers. - #TODO: unintended side-effect on, e.g., "2003.05.09" + # TODO: unintended side-effect on, e.g., "2003.05.09" # PyPI stats: 77 (~2%) better rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs) @@ -563,6 +572,7 @@ def _suggest_normalized_version(s): # Legacy version processing (distribute-compatible) # + _VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I) _VERSION_REPLACE = { 'pre': 'c', @@ -610,7 +620,7 @@ def is_prerelease(self): result = False for x in self._parts: if (isinstance(x, string_types) and x.startswith('*') and - x < '*final'): + x < '*final'): result = True break return result @@ -641,6 +651,7 @@ def _match_compatible(self, version, constraint, prefix): # Semantic versioning # + _SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)' r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?' r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I) @@ -722,6 +733,7 @@ def suggest(self, s): result = self.suggester(s) return result + _SCHEMES = { 'normalized': VersionScheme(_normalized_key, NormalizedMatcher, _suggest_normalized_version), diff --git a/src/pip/_vendor/distlib/wheel.py b/src/pip/_vendor/distlib/wheel.py index 028c2d99b57..4a5a30e1d8d 100644 --- a/src/pip/_vendor/distlib/wheel.py +++ b/src/pip/_vendor/distlib/wheel.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2013-2020 Vinay Sajip. +# Copyright (C) 2013-2023 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # @@ -24,8 +24,7 @@ from . import __version__, DistlibException from .compat import sysconfig, ZipFile, fsdecode, text_type, filter from .database import InstalledDistribution -from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, - LEGACY_METADATA_FILENAME) +from .metadata import Metadata, WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, cached_property, get_cache_base, read_exports, tempdir, get_platform) @@ -33,7 +32,7 @@ logger = logging.getLogger(__name__) -cache = None # created when needed +cache = None # created when needed if hasattr(sys, 'pypy_version_info'): # pragma: no cover IMP_PREFIX = 'pp' @@ -45,7 +44,7 @@ IMP_PREFIX = 'cp' VER_SUFFIX = sysconfig.get_config_var('py_version_nodot') -if not VER_SUFFIX: # pragma: no cover +if not VER_SUFFIX: # pragma: no cover VER_SUFFIX = '%s%s' % sys.version_info[:2] PYVER = 'py' + VER_SUFFIX IMPVER = IMP_PREFIX + VER_SUFFIX @@ -56,6 +55,7 @@ if ABI and ABI.startswith('cpython-'): ABI = ABI.replace('cpython-', 'cp').split('-')[0] else: + def _derive_abi(): parts = ['cp', VER_SUFFIX] if sysconfig.get_config_var('Py_DEBUG'): @@ -73,10 +73,12 @@ def _derive_abi(): if us == 4 or (us is None and sys.maxunicode == 0x10FFFF): parts.append('u') return ''.join(parts) + ABI = _derive_abi() del _derive_abi -FILENAME_RE = re.compile(r''' +FILENAME_RE = re.compile( + r''' (?P[^-]+) -(?P\d+[^-]*) (-(?P\d+[^-]*))? @@ -86,7 +88,8 @@ def _derive_abi(): \.whl$ ''', re.IGNORECASE | re.VERBOSE) -NAME_VERSION_RE = re.compile(r''' +NAME_VERSION_RE = re.compile( + r''' (?P[^-]+) -(?P\d+[^-]*) (-(?P\d+[^-]*))?$ @@ -109,12 +112,14 @@ def _derive_abi(): import importlib.machinery import importlib.util + def _get_suffixes(): if imp: return [s[0] for s in imp.get_suffixes()] else: return importlib.machinery.EXTENSION_SUFFIXES + def _load_dynamic(name, path): # https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly if imp: @@ -126,7 +131,9 @@ def _load_dynamic(name, path): spec.loader.exec_module(module) return module + class Mounter(object): + def __init__(self): self.impure_wheels = {} self.libs = {} @@ -161,6 +168,7 @@ def load_module(self, fullname): result.__package__ = parts[0] return result + _hook = Mounter() @@ -227,8 +235,8 @@ def filename(self): arch = '.'.join(self.arch) # replace - with _ as a local version separator version = self.version.replace('-', '_') - return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, - pyver, abi, arch) + return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, pyver, + abi, arch) @property def exists(self): @@ -249,14 +257,14 @@ def metadata(self): info_dir = '%s.dist-info' % name_ver wrapper = codecs.getreader('utf-8') with ZipFile(pathname, 'r') as zf: - wheel_metadata = self.get_wheel_metadata(zf) - wv = wheel_metadata['Wheel-Version'].split('.', 1) - file_version = tuple([int(i) for i in wv]) + self.get_wheel_metadata(zf) + # wv = wheel_metadata['Wheel-Version'].split('.', 1) + # file_version = tuple([int(i) for i in wv]) # if file_version < (1, 1): - # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, - # LEGACY_METADATA_FILENAME] + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, + # LEGACY_METADATA_FILENAME] # else: - # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME] result = None for fn in fns: @@ -326,13 +334,14 @@ def get_hash(self, data, hash_kind=None): try: hasher = getattr(hashlib, hash_kind) except AttributeError: - raise DistlibException('Unsupported hash algorithm: %r' % hash_kind) + raise DistlibException('Unsupported hash algorithm: %r' % + hash_kind) result = hasher(data).digest() result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') return hash_kind, result def write_record(self, records, record_path, archive_record_path): - records = list(records) # make a copy, as mutated + records = list(records) # make a copy, as mutated records.append((archive_record_path, '', '')) with CSVWriter(record_path) as writer: for row in records: @@ -341,7 +350,7 @@ def write_record(self, records, record_path, archive_record_path): def write_records(self, info, libdir, archive_paths): records = [] distinfo, info_dir = info - hasher = getattr(hashlib, self.hash_kind) + # hasher = getattr(hashlib, self.hash_kind) for ap, p in archive_paths: with open(p, 'rb') as f: data = f.read() @@ -466,6 +475,7 @@ def sorter(t): if '.dist-info' in ap: n += 10000 return (n, ap) + archive_paths = sorted(archive_paths, key=sorter) # Now, at last, RECORD. @@ -512,7 +522,8 @@ def install(self, paths, maker, **kwargs): dry_run = maker.dry_run warner = kwargs.get('warner') lib_only = kwargs.get('lib_only', False) - bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False) + bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', + False) pathname = os.path.join(self.dirname, self.filename) name_ver = '%s-%s' % (self.name, self.version) @@ -553,11 +564,11 @@ def install(self, paths, maker, **kwargs): # make a new instance rather than a copy of maker's, # as we mutate it fileop = FileOperator(dry_run=dry_run) - fileop.record = True # so we can rollback if needed + fileop.record = True # so we can rollback if needed - bc = not sys.dont_write_bytecode # Double negatives. Lovely! + bc = not sys.dont_write_bytecode # Double negatives. Lovely! - outfiles = [] # for RECORD writing + outfiles = [] # for RECORD writing # for script copying/shebang processing workdir = tempfile.mkdtemp() @@ -611,7 +622,8 @@ def install(self, paths, maker, **kwargs): # So ... manually preserve permission bits as given in zinfo if os.name == 'posix': # just set the normal permission bits - os.chmod(outfile, (zinfo.external_attr >> 16) & 0x1FF) + os.chmod(outfile, + (zinfo.external_attr >> 16) & 0x1FF) outfiles.append(outfile) # Double check the digest of the written file if not dry_run and row[1]: @@ -624,8 +636,9 @@ def install(self, paths, maker, **kwargs): '%s' % outfile) if bc and outfile.endswith('.py'): try: - pyc = fileop.byte_compile(outfile, - hashed_invalidation=bc_hashed_invalidation) + pyc = fileop.byte_compile( + outfile, + hashed_invalidation=bc_hashed_invalidation) outfiles.append(pyc) except Exception: # Don't give up if byte-compilation fails, @@ -700,7 +713,7 @@ def install(self, paths, maker, **kwargs): fileop.set_executable_mode(filenames) if gui_scripts: - options = {'gui': True } + options = {'gui': True} for k, v in gui_scripts.items(): script = '%s = %s' % (k, v) filenames = maker.make(script, options) @@ -710,7 +723,7 @@ def install(self, paths, maker, **kwargs): dist = InstalledDistribution(p) # Write SHARED - paths = dict(paths) # don't change passed in dict + paths = dict(paths) # don't change passed in dict del paths['purelib'] del paths['platlib'] paths['lib'] = libdir @@ -761,7 +774,8 @@ def _get_extensions(self): extract = True else: file_time = os.stat(dest).st_mtime - file_time = datetime.datetime.fromtimestamp(file_time) + file_time = datetime.datetime.fromtimestamp( + file_time) info = zf.getinfo(relpath) wheel_time = datetime.datetime(*info.date_time) extract = wheel_time > file_time @@ -782,7 +796,7 @@ def is_mountable(self): """ Determine if a wheel is asserted as mountable by its metadata. """ - return True # for now - metadata details TBD + return True # for now - metadata details TBD def mount(self, append=False): pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) @@ -820,10 +834,10 @@ def unmount(self): def verify(self): pathname = os.path.join(self.dirname, self.filename) name_ver = '%s-%s' % (self.name, self.version) - data_dir = '%s.data' % name_ver + # data_dir = '%s.data' % name_ver info_dir = '%s.dist-info' % name_ver - metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) + # metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') record_name = posixpath.join(info_dir, 'RECORD') @@ -832,9 +846,9 @@ def verify(self): with ZipFile(pathname, 'r') as zf: with zf.open(wheel_metadata_name) as bwf: wf = wrapper(bwf) - message = message_from_file(wf) - wv = message['Wheel-Version'].split('.', 1) - file_version = tuple([int(i) for i in wv]) + message_from_file(wf) + # wv = message['Wheel-Version'].split('.', 1) + # file_version = tuple([int(i) for i in wv]) # TODO version verification records = {} @@ -903,25 +917,25 @@ def get_version(path_map, info_dir): def update_version(version, path): updated = None try: - v = NormalizedVersion(version) + NormalizedVersion(version) i = version.find('-') if i < 0: updated = '%s+1' % version else: parts = [int(s) for s in version[i + 1:].split('.')] parts[-1] += 1 - updated = '%s+%s' % (version[:i], - '.'.join(str(i) for i in parts)) + updated = '%s+%s' % (version[:i], '.'.join( + str(i) for i in parts)) except UnsupportedVersionError: - logger.debug('Cannot update non-compliant (PEP-440) ' - 'version %r', version) + logger.debug( + 'Cannot update non-compliant (PEP-440) ' + 'version %r', version) if updated: md = Metadata(path=path) md.version = updated legacy = path.endswith(LEGACY_METADATA_FILENAME) md.write(path=path, legacy=legacy) - logger.debug('Version updated from %r to %r', version, - updated) + logger.debug('Version updated from %r to %r', version, updated) pathname = os.path.join(self.dirname, self.filename) name_ver = '%s-%s' % (self.name, self.version) @@ -963,7 +977,8 @@ def update_version(version, path): os.close(fd) else: if not os.path.isdir(dest_dir): - raise DistlibException('Not a directory: %r' % dest_dir) + raise DistlibException('Not a directory: %r' % + dest_dir) newpath = os.path.join(dest_dir, self.filename) archive_paths = list(path_map.items()) distinfo = os.path.join(workdir, info_dir) @@ -974,6 +989,7 @@ def update_version(version, path): shutil.copyfile(newpath, pathname) return modified + def _get_glibc_version(): import platform ver = platform.libc_ver() @@ -984,13 +1000,14 @@ def _get_glibc_version(): result = tuple(result) return result + def compatible_tags(): """ Return (pyver, abi, arch) tuples compatible with this Python. """ versions = [VER_SUFFIX] major = VER_SUFFIX[0] - for minor in range(sys.version_info[1] - 1, - 1, -1): + for minor in range(sys.version_info[1] - 1, -1, -1): versions.append(''.join([major, str(minor)])) abis = [] @@ -1023,7 +1040,7 @@ def compatible_tags(): while minor >= 0: for match in matches: s = '%s_%s_%s_%s' % (name, major, minor, match) - if s != ARCH: # already there + if s != ARCH: # already there arches.append(s) minor -= 1 @@ -1045,9 +1062,9 @@ def compatible_tags(): if parts >= (2, 17): result.append((''.join((IMP_PREFIX, versions[0])), abi, 'manylinux2014_%s' % arch)) - result.append((''.join((IMP_PREFIX, versions[0])), abi, - 'manylinux_%s_%s_%s' % (parts[0], parts[1], - arch))) + result.append( + (''.join((IMP_PREFIX, versions[0])), abi, + 'manylinux_%s_%s_%s' % (parts[0], parts[1], arch))) # where no ABI / arch dependency, but IMP_PREFIX dependency for i, version in enumerate(versions): @@ -1071,7 +1088,7 @@ def compatible_tags(): def is_compatible(wheel, tags=None): if not isinstance(wheel, Wheel): - wheel = Wheel(wheel) # assume it's a filename + wheel = Wheel(wheel) # assume it's a filename result = False if tags is None: tags = COMPATIBLE_TAGS diff --git a/src/pip/_vendor/distro/distro.py b/src/pip/_vendor/distro/distro.py index 89e18680472..78ccdfa402a 100644 --- a/src/pip/_vendor/distro/distro.py +++ b/src/pip/_vendor/distro/distro.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright 2015,2016,2017 Nir Cohen +# Copyright 2015-2021 Nir Cohen # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -55,7 +55,7 @@ # Python 3.7 TypedDict = dict -__version__ = "1.8.0" +__version__ = "1.9.0" class VersionDict(TypedDict): @@ -125,6 +125,7 @@ class InfoDict(TypedDict): # Base file names to be looked up for if _UNIXCONFDIR is not readable. _DISTRO_RELEASE_BASENAMES = [ "SuSE-release", + "altlinux-release", "arch-release", "base-release", "centos-release", @@ -151,6 +152,8 @@ class InfoDict(TypedDict): "system-release", "plesk-release", "iredmail-release", + "board-release", + "ec2_version", ) @@ -243,6 +246,7 @@ def id() -> str: "rocky" Rocky Linux "aix" AIX "guix" Guix System + "altlinux" ALT Linux ============== ========================================= If you have a need to get distros for reliable IDs added into this set, @@ -991,10 +995,10 @@ def info(self, pretty: bool = False, best: bool = False) -> InfoDict: For details, see :func:`distro.info`. """ - return dict( + return InfoDict( id=self.id(), version=self.version(pretty, best), - version_parts=dict( + version_parts=VersionDict( major=self.major_version(best), minor=self.minor_version(best), build_number=self.build_number(best), diff --git a/src/pip/_vendor/idna/LICENSE.md b/src/pip/_vendor/idna/LICENSE.md index b6f87326ffb..ce3670186c6 100644 --- a/src/pip/_vendor/idna/LICENSE.md +++ b/src/pip/_vendor/idna/LICENSE.md @@ -1,29 +1,31 @@ BSD 3-Clause License -Copyright (c) 2013-2021, Kim Davies +Copyright (c) 2013-2023, Kim Davies and contributors. All rights reserved. Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: +modification, are permitted provided that the following conditions are +met: -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/src/pip/_vendor/idna/codec.py b/src/pip/_vendor/idna/codec.py index 1ca9ba62c20..c855a4de6d7 100644 --- a/src/pip/_vendor/idna/codec.py +++ b/src/pip/_vendor/idna/codec.py @@ -1,7 +1,7 @@ from .core import encode, decode, alabel, ulabel, IDNAError import codecs import re -from typing import Tuple, Optional +from typing import Any, Tuple, Optional _unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') @@ -26,24 +26,24 @@ def decode(self, data: bytes, errors: str = 'strict') -> Tuple[str, int]: return decode(data), len(data) class IncrementalEncoder(codecs.BufferedIncrementalEncoder): - def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore + def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]: if errors != 'strict': raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) if not data: - return "", 0 + return b'', 0 labels = _unicode_dots_re.split(data) - trailing_dot = '' + trailing_dot = b'' if labels: if not labels[-1]: - trailing_dot = '.' + trailing_dot = b'.' del labels[-1] elif not final: # Keep potentially unfinished label until the next call del labels[-1] if labels: - trailing_dot = '.' + trailing_dot = b'.' result = [] size = 0 @@ -54,18 +54,21 @@ def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[str, int] size += len(label) # Join with U+002E - result_str = '.'.join(result) + trailing_dot # type: ignore + result_bytes = b'.'.join(result) + trailing_dot size += len(trailing_dot) - return result_str, size + return result_bytes, size class IncrementalDecoder(codecs.BufferedIncrementalDecoder): - def _buffer_decode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore + def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]: if errors != 'strict': raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) if not data: return ('', 0) + if not isinstance(data, str): + data = str(data, 'ascii') + labels = _unicode_dots_re.split(data) trailing_dot = '' if labels: @@ -99,14 +102,17 @@ class StreamReader(Codec, codecs.StreamReader): pass -def getregentry() -> codecs.CodecInfo: - # Compatibility as a search_function for codecs.register() +def search_function(name: str) -> Optional[codecs.CodecInfo]: + if name != 'idna2008': + return None return codecs.CodecInfo( - name='idna', - encode=Codec().encode, # type: ignore - decode=Codec().decode, # type: ignore + name=name, + encode=Codec().encode, + decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamwriter=StreamWriter, streamreader=StreamReader, ) + +codecs.register(search_function) diff --git a/src/pip/_vendor/idna/core.py b/src/pip/_vendor/idna/core.py index 4f300371102..aaf7d658ba0 100644 --- a/src/pip/_vendor/idna/core.py +++ b/src/pip/_vendor/idna/core.py @@ -318,7 +318,7 @@ def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False status = uts46row[1] replacement = None # type: Optional[str] if len(uts46row) == 3: - replacement = uts46row[2] # type: ignore + replacement = uts46row[2] if (status == 'V' or (status == 'D' and not transitional) or (status == '3' and not std3_rules and replacement is None)): @@ -338,9 +338,9 @@ def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False def encode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False, transitional: bool = False) -> bytes: - if isinstance(s, (bytes, bytearray)): + if not isinstance(s, str): try: - s = s.decode('ascii') + s = str(s, 'ascii') except UnicodeDecodeError: raise IDNAError('should pass a unicode string to the function rather than a byte string.') if uts46: @@ -372,8 +372,8 @@ def encode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = def decode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False) -> str: try: - if isinstance(s, (bytes, bytearray)): - s = s.decode('ascii') + if not isinstance(s, str): + s = str(s, 'ascii') except UnicodeDecodeError: raise IDNAError('Invalid ASCII in A-label') if uts46: diff --git a/src/pip/_vendor/idna/idnadata.py b/src/pip/_vendor/idna/idnadata.py index 67db4625829..5cd05d9056e 100644 --- a/src/pip/_vendor/idna/idnadata.py +++ b/src/pip/_vendor/idna/idnadata.py @@ -1,6 +1,6 @@ # This file is automatically generated by tools/idna-data -__version__ = '15.0.0' +__version__ = '15.1.0' scripts = { 'Greek': ( 0x37000000374, @@ -59,6 +59,7 @@ 0x2b7400002b81e, 0x2b8200002cea2, 0x2ceb00002ebe1, + 0x2ebf00002ee5e, 0x2f8000002fa1e, 0x300000003134b, 0x31350000323b0, @@ -1834,7 +1835,6 @@ 0xa7d50000a7d6, 0xa7d70000a7d8, 0xa7d90000a7da, - 0xa7f20000a7f5, 0xa7f60000a7f8, 0xa7fa0000a828, 0xa82c0000a82d, @@ -1907,9 +1907,7 @@ 0x1060000010737, 0x1074000010756, 0x1076000010768, - 0x1078000010786, - 0x10787000107b1, - 0x107b2000107bb, + 0x1078000010781, 0x1080000010806, 0x1080800010809, 0x1080a00010836, @@ -2134,6 +2132,7 @@ 0x2b7400002b81e, 0x2b8200002cea2, 0x2ceb00002ebe1, + 0x2ebf00002ee5e, 0x300000003134b, 0x31350000323b0, ), diff --git a/src/pip/_vendor/idna/package_data.py b/src/pip/_vendor/idna/package_data.py index 8501893bd15..c5b7220c970 100644 --- a/src/pip/_vendor/idna/package_data.py +++ b/src/pip/_vendor/idna/package_data.py @@ -1,2 +1,2 @@ -__version__ = '3.4' +__version__ = '3.6' diff --git a/src/pip/_vendor/idna/uts46data.py b/src/pip/_vendor/idna/uts46data.py index 186796c17b2..6a1eddbfd75 100644 --- a/src/pip/_vendor/idna/uts46data.py +++ b/src/pip/_vendor/idna/uts46data.py @@ -7,7 +7,7 @@ """IDNA Mapping Table from UTS46.""" -__version__ = '15.0.0' +__version__ = '15.1.0' def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0x0, '3'), @@ -1899,7 +1899,7 @@ def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1E9A, 'M', 'aʾ'), (0x1E9B, 'M', 'ṡ'), (0x1E9C, 'V'), - (0x1E9E, 'M', 'ss'), + (0x1E9E, 'M', 'ß'), (0x1E9F, 'V'), (0x1EA0, 'M', 'ạ'), (0x1EA1, 'V'), @@ -2418,10 +2418,6 @@ def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x222F, 'M', '∮∮'), (0x2230, 'M', '∮∮∮'), (0x2231, 'V'), - (0x2260, '3'), - (0x2261, 'V'), - (0x226E, '3'), - (0x2270, 'V'), (0x2329, 'M', '〈'), (0x232A, 'M', '〉'), (0x232B, 'V'), @@ -2502,14 +2498,14 @@ def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x24BA, 'M', 'e'), (0x24BB, 'M', 'f'), (0x24BC, 'M', 'g'), - ] - -def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x24BD, 'M', 'h'), (0x24BE, 'M', 'i'), (0x24BF, 'M', 'j'), (0x24C0, 'M', 'k'), + ] + +def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x24C1, 'M', 'l'), (0x24C2, 'M', 'm'), (0x24C3, 'M', 'n'), @@ -2606,14 +2602,14 @@ def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2C26, 'M', 'ⱖ'), (0x2C27, 'M', 'ⱗ'), (0x2C28, 'M', 'ⱘ'), - ] - -def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x2C29, 'M', 'ⱙ'), (0x2C2A, 'M', 'ⱚ'), (0x2C2B, 'M', 'ⱛ'), (0x2C2C, 'M', 'ⱜ'), + ] + +def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2C2D, 'M', 'ⱝ'), (0x2C2E, 'M', 'ⱞ'), (0x2C2F, 'M', 'ⱟ'), @@ -2710,14 +2706,14 @@ def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2CC0, 'M', 'ⳁ'), (0x2CC1, 'V'), (0x2CC2, 'M', 'ⳃ'), - ] - -def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x2CC3, 'V'), (0x2CC4, 'M', 'ⳅ'), (0x2CC5, 'V'), (0x2CC6, 'M', 'ⳇ'), + ] + +def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2CC7, 'V'), (0x2CC8, 'M', 'ⳉ'), (0x2CC9, 'V'), @@ -2814,14 +2810,14 @@ def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2F13, 'M', '勹'), (0x2F14, 'M', '匕'), (0x2F15, 'M', '匚'), - ] - -def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x2F16, 'M', '匸'), (0x2F17, 'M', '十'), (0x2F18, 'M', '卜'), (0x2F19, 'M', '卩'), + ] + +def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2F1A, 'M', '厂'), (0x2F1B, 'M', '厶'), (0x2F1C, 'M', '又'), @@ -2918,14 +2914,14 @@ def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2F77, 'M', '糸'), (0x2F78, 'M', '缶'), (0x2F79, 'M', '网'), - ] - -def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x2F7A, 'M', '羊'), (0x2F7B, 'M', '羽'), (0x2F7C, 'M', '老'), (0x2F7D, 'M', '而'), + ] + +def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2F7E, 'M', '耒'), (0x2F7F, 'M', '耳'), (0x2F80, 'M', '聿'), @@ -3022,14 +3018,14 @@ def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x3036, 'M', '〒'), (0x3037, 'V'), (0x3038, 'M', '十'), - ] - -def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x3039, 'M', '卄'), (0x303A, 'M', '卅'), (0x303B, 'V'), (0x3040, 'X'), + ] + +def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x3041, 'V'), (0x3097, 'X'), (0x3099, 'V'), @@ -3126,14 +3122,14 @@ def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x3182, 'M', 'ᇱ'), (0x3183, 'M', 'ᇲ'), (0x3184, 'M', 'ᅗ'), - ] - -def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x3185, 'M', 'ᅘ'), (0x3186, 'M', 'ᅙ'), (0x3187, 'M', 'ᆄ'), (0x3188, 'M', 'ᆅ'), + ] + +def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x3189, 'M', 'ᆈ'), (0x318A, 'M', 'ᆑ'), (0x318B, 'M', 'ᆒ'), @@ -3230,14 +3226,14 @@ def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x3244, 'M', '問'), (0x3245, 'M', '幼'), (0x3246, 'M', '文'), - ] - -def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x3247, 'M', '箏'), (0x3248, 'V'), (0x3250, 'M', 'pte'), (0x3251, 'M', '21'), + ] + +def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x3252, 'M', '22'), (0x3253, 'M', '23'), (0x3254, 'M', '24'), @@ -3334,14 +3330,14 @@ def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x32AF, 'M', '協'), (0x32B0, 'M', '夜'), (0x32B1, 'M', '36'), - ] - -def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x32B2, 'M', '37'), (0x32B3, 'M', '38'), (0x32B4, 'M', '39'), (0x32B5, 'M', '40'), + ] + +def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x32B6, 'M', '41'), (0x32B7, 'M', '42'), (0x32B8, 'M', '43'), @@ -3438,14 +3434,14 @@ def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x3313, 'M', 'ギルダー'), (0x3314, 'M', 'キロ'), (0x3315, 'M', 'キログラム'), - ] - -def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x3316, 'M', 'キロメートル'), (0x3317, 'M', 'キロワット'), (0x3318, 'M', 'グラム'), (0x3319, 'M', 'グラムトン'), + ] + +def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x331A, 'M', 'クルゼイロ'), (0x331B, 'M', 'クローネ'), (0x331C, 'M', 'ケース'), @@ -3542,14 +3538,14 @@ def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x3377, 'M', 'dm'), (0x3378, 'M', 'dm2'), (0x3379, 'M', 'dm3'), - ] - -def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x337A, 'M', 'iu'), (0x337B, 'M', '平成'), (0x337C, 'M', '昭和'), (0x337D, 'M', '大正'), + ] + +def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x337E, 'M', '明治'), (0x337F, 'M', '株式会社'), (0x3380, 'M', 'pa'), @@ -3646,14 +3642,14 @@ def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x33DB, 'M', 'sr'), (0x33DC, 'M', 'sv'), (0x33DD, 'M', 'wb'), - ] - -def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x33DE, 'M', 'v∕m'), (0x33DF, 'M', 'a∕m'), (0x33E0, 'M', '1日'), (0x33E1, 'M', '2日'), + ] + +def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x33E2, 'M', '3日'), (0x33E3, 'M', '4日'), (0x33E4, 'M', '5日'), @@ -3750,14 +3746,14 @@ def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xA68B, 'V'), (0xA68C, 'M', 'ꚍ'), (0xA68D, 'V'), - ] - -def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0xA68E, 'M', 'ꚏ'), (0xA68F, 'V'), (0xA690, 'M', 'ꚑ'), (0xA691, 'V'), + ] + +def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xA692, 'M', 'ꚓ'), (0xA693, 'V'), (0xA694, 'M', 'ꚕ'), @@ -3854,14 +3850,14 @@ def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xA779, 'M', 'ꝺ'), (0xA77A, 'V'), (0xA77B, 'M', 'ꝼ'), - ] - -def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0xA77C, 'V'), (0xA77D, 'M', 'ᵹ'), (0xA77E, 'M', 'ꝿ'), (0xA77F, 'V'), + ] + +def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xA780, 'M', 'ꞁ'), (0xA781, 'V'), (0xA782, 'M', 'ꞃ'), @@ -3958,14 +3954,14 @@ def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xA878, 'X'), (0xA880, 'V'), (0xA8C6, 'X'), - ] - -def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0xA8CE, 'V'), (0xA8DA, 'X'), (0xA8E0, 'V'), (0xA954, 'X'), + ] + +def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xA95F, 'V'), (0xA97D, 'X'), (0xA980, 'V'), @@ -4062,14 +4058,14 @@ def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xABA8, 'M', 'Ꮨ'), (0xABA9, 'M', 'Ꮩ'), (0xABAA, 'M', 'Ꮪ'), - ] - -def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0xABAB, 'M', 'Ꮫ'), (0xABAC, 'M', 'Ꮬ'), (0xABAD, 'M', 'Ꮭ'), (0xABAE, 'M', 'Ꮮ'), + ] + +def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xABAF, 'M', 'Ꮯ'), (0xABB0, 'M', 'Ꮰ'), (0xABB1, 'M', 'Ꮱ'), @@ -4166,14 +4162,14 @@ def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xF943, 'M', '弄'), (0xF944, 'M', '籠'), (0xF945, 'M', '聾'), - ] - -def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0xF946, 'M', '牢'), (0xF947, 'M', '磊'), (0xF948, 'M', '賂'), (0xF949, 'M', '雷'), + ] + +def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xF94A, 'M', '壘'), (0xF94B, 'M', '屢'), (0xF94C, 'M', '樓'), @@ -4270,14 +4266,14 @@ def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xF9A7, 'M', '獵'), (0xF9A8, 'M', '令'), (0xF9A9, 'M', '囹'), - ] - -def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0xF9AA, 'M', '寧'), (0xF9AB, 'M', '嶺'), (0xF9AC, 'M', '怜'), (0xF9AD, 'M', '玲'), + ] + +def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xF9AE, 'M', '瑩'), (0xF9AF, 'M', '羚'), (0xF9B0, 'M', '聆'), @@ -4374,14 +4370,14 @@ def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xFA0B, 'M', '廓'), (0xFA0C, 'M', '兀'), (0xFA0D, 'M', '嗀'), - ] - -def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0xFA0E, 'V'), (0xFA10, 'M', '塚'), (0xFA11, 'V'), (0xFA12, 'M', '晴'), + ] + +def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFA13, 'V'), (0xFA15, 'M', '凞'), (0xFA16, 'M', '猪'), @@ -4478,14 +4474,14 @@ def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xFA76, 'M', '勇'), (0xFA77, 'M', '勺'), (0xFA78, 'M', '喝'), - ] - -def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0xFA79, 'M', '啕'), (0xFA7A, 'M', '喙'), (0xFA7B, 'M', '嗢'), (0xFA7C, 'M', '塚'), + ] + +def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFA7D, 'M', '墳'), (0xFA7E, 'M', '奄'), (0xFA7F, 'M', '奔'), @@ -4582,14 +4578,14 @@ def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xFADA, 'X'), (0xFB00, 'M', 'ff'), (0xFB01, 'M', 'fi'), - ] - -def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0xFB02, 'M', 'fl'), (0xFB03, 'M', 'ffi'), (0xFB04, 'M', 'ffl'), (0xFB05, 'M', 'st'), + ] + +def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFB07, 'X'), (0xFB13, 'M', 'մն'), (0xFB14, 'M', 'մե'), @@ -4686,14 +4682,14 @@ def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xFBDB, 'M', 'ۈ'), (0xFBDD, 'M', 'ۇٴ'), (0xFBDE, 'M', 'ۋ'), - ] - -def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0xFBE0, 'M', 'ۅ'), (0xFBE2, 'M', 'ۉ'), (0xFBE4, 'M', 'ې'), (0xFBE8, 'M', 'ى'), + ] + +def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFBEA, 'M', 'ئا'), (0xFBEC, 'M', 'ئە'), (0xFBEE, 'M', 'ئو'), @@ -4790,14 +4786,14 @@ def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xFC54, 'M', 'هي'), (0xFC55, 'M', 'يج'), (0xFC56, 'M', 'يح'), - ] - -def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0xFC57, 'M', 'يخ'), (0xFC58, 'M', 'يم'), (0xFC59, 'M', 'يى'), (0xFC5A, 'M', 'يي'), + ] + +def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFC5B, 'M', 'ذٰ'), (0xFC5C, 'M', 'رٰ'), (0xFC5D, 'M', 'ىٰ'), @@ -4894,14 +4890,14 @@ def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xFCB8, 'M', 'طح'), (0xFCB9, 'M', 'ظم'), (0xFCBA, 'M', 'عج'), - ] - -def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0xFCBB, 'M', 'عم'), (0xFCBC, 'M', 'غج'), (0xFCBD, 'M', 'غم'), (0xFCBE, 'M', 'فج'), + ] + +def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFCBF, 'M', 'فح'), (0xFCC0, 'M', 'فخ'), (0xFCC1, 'M', 'فم'), @@ -4998,14 +4994,14 @@ def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xFD1C, 'M', 'حي'), (0xFD1D, 'M', 'جى'), (0xFD1E, 'M', 'جي'), - ] - -def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0xFD1F, 'M', 'خى'), (0xFD20, 'M', 'خي'), (0xFD21, 'M', 'صى'), (0xFD22, 'M', 'صي'), + ] + +def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFD23, 'M', 'ضى'), (0xFD24, 'M', 'ضي'), (0xFD25, 'M', 'شج'), @@ -5102,14 +5098,14 @@ def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xFDA4, 'M', 'تمى'), (0xFDA5, 'M', 'جمي'), (0xFDA6, 'M', 'جحى'), - ] - -def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0xFDA7, 'M', 'جمى'), (0xFDA8, 'M', 'سخى'), (0xFDA9, 'M', 'صحي'), (0xFDAA, 'M', 'شحي'), + ] + +def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFDAB, 'M', 'ضحي'), (0xFDAC, 'M', 'لجي'), (0xFDAD, 'M', 'لمي'), @@ -5206,14 +5202,14 @@ def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xFE5B, '3', '{'), (0xFE5C, '3', '}'), (0xFE5D, 'M', '〔'), - ] - -def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0xFE5E, 'M', '〕'), (0xFE5F, '3', '#'), (0xFE60, '3', '&'), (0xFE61, '3', '*'), + ] + +def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFE62, '3', '+'), (0xFE63, 'M', '-'), (0xFE64, '3', '<'), @@ -5310,14 +5306,14 @@ def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xFF18, 'M', '8'), (0xFF19, 'M', '9'), (0xFF1A, '3', ':'), - ] - -def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0xFF1B, '3', ';'), (0xFF1C, '3', '<'), (0xFF1D, '3', '='), (0xFF1E, '3', '>'), + ] + +def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFF1F, '3', '?'), (0xFF20, '3', '@'), (0xFF21, 'M', 'a'), @@ -5414,14 +5410,14 @@ def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xFF7C, 'M', 'シ'), (0xFF7D, 'M', 'ス'), (0xFF7E, 'M', 'セ'), - ] - -def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0xFF7F, 'M', 'ソ'), (0xFF80, 'M', 'タ'), (0xFF81, 'M', 'チ'), (0xFF82, 'M', 'ツ'), + ] + +def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFF83, 'M', 'テ'), (0xFF84, 'M', 'ト'), (0xFF85, 'M', 'ナ'), @@ -5518,14 +5514,14 @@ def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xFFE7, 'X'), (0xFFE8, 'M', '│'), (0xFFE9, 'M', '←'), - ] - -def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0xFFEA, 'M', '↑'), (0xFFEB, 'M', '→'), (0xFFEC, 'M', '↓'), (0xFFED, 'M', '■'), + ] + +def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xFFEE, 'M', '○'), (0xFFEF, 'X'), (0x10000, 'V'), @@ -5622,14 +5618,14 @@ def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x104B3, 'M', '𐓛'), (0x104B4, 'M', '𐓜'), (0x104B5, 'M', '𐓝'), - ] - -def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x104B6, 'M', '𐓞'), (0x104B7, 'M', '𐓟'), (0x104B8, 'M', '𐓠'), (0x104B9, 'M', '𐓡'), + ] + +def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x104BA, 'M', '𐓢'), (0x104BB, 'M', '𐓣'), (0x104BC, 'M', '𐓤'), @@ -5726,14 +5722,14 @@ def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x10786, 'X'), (0x10787, 'M', 'ʣ'), (0x10788, 'M', 'ꭦ'), - ] - -def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x10789, 'M', 'ʥ'), (0x1078A, 'M', 'ʤ'), (0x1078B, 'M', 'ɖ'), (0x1078C, 'M', 'ɗ'), + ] + +def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1078D, 'M', 'ᶑ'), (0x1078E, 'M', 'ɘ'), (0x1078F, 'M', 'ɞ'), @@ -5830,14 +5826,14 @@ def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x10A60, 'V'), (0x10AA0, 'X'), (0x10AC0, 'V'), - ] - -def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x10AE7, 'X'), (0x10AEB, 'V'), (0x10AF7, 'X'), (0x10B00, 'V'), + ] + +def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x10B36, 'X'), (0x10B39, 'V'), (0x10B56, 'X'), @@ -5934,14 +5930,14 @@ def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1107F, 'V'), (0x110BD, 'X'), (0x110BE, 'V'), - ] - -def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x110C3, 'X'), (0x110D0, 'V'), (0x110E9, 'X'), (0x110F0, 'V'), + ] + +def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x110FA, 'X'), (0x11100, 'V'), (0x11135, 'X'), @@ -6038,14 +6034,14 @@ def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x118A4, 'M', '𑣄'), (0x118A5, 'M', '𑣅'), (0x118A6, 'M', '𑣆'), - ] - -def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x118A7, 'M', '𑣇'), (0x118A8, 'M', '𑣈'), (0x118A9, 'M', '𑣉'), (0x118AA, 'M', '𑣊'), + ] + +def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x118AB, 'M', '𑣋'), (0x118AC, 'M', '𑣌'), (0x118AD, 'M', '𑣍'), @@ -6142,14 +6138,14 @@ def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x11EE0, 'V'), (0x11EF9, 'X'), (0x11F00, 'V'), - ] - -def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x11F11, 'X'), (0x11F12, 'V'), (0x11F3B, 'X'), (0x11F3E, 'V'), + ] + +def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x11F5A, 'X'), (0x11FB0, 'V'), (0x11FB1, 'X'), @@ -6246,14 +6242,14 @@ def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x18D00, 'V'), (0x18D09, 'X'), (0x1AFF0, 'V'), - ] - -def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1AFF4, 'X'), (0x1AFF5, 'V'), (0x1AFFC, 'X'), (0x1AFFD, 'V'), + ] + +def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1AFFF, 'X'), (0x1B000, 'V'), (0x1B123, 'X'), @@ -6350,14 +6346,14 @@ def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D41E, 'M', 'e'), (0x1D41F, 'M', 'f'), (0x1D420, 'M', 'g'), - ] - -def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D421, 'M', 'h'), (0x1D422, 'M', 'i'), (0x1D423, 'M', 'j'), (0x1D424, 'M', 'k'), + ] + +def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D425, 'M', 'l'), (0x1D426, 'M', 'm'), (0x1D427, 'M', 'n'), @@ -6454,14 +6450,14 @@ def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D482, 'M', 'a'), (0x1D483, 'M', 'b'), (0x1D484, 'M', 'c'), - ] - -def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D485, 'M', 'd'), (0x1D486, 'M', 'e'), (0x1D487, 'M', 'f'), (0x1D488, 'M', 'g'), + ] + +def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D489, 'M', 'h'), (0x1D48A, 'M', 'i'), (0x1D48B, 'M', 'j'), @@ -6558,14 +6554,14 @@ def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D4E9, 'M', 'z'), (0x1D4EA, 'M', 'a'), (0x1D4EB, 'M', 'b'), - ] - -def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D4EC, 'M', 'c'), (0x1D4ED, 'M', 'd'), (0x1D4EE, 'M', 'e'), (0x1D4EF, 'M', 'f'), + ] + +def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D4F0, 'M', 'g'), (0x1D4F1, 'M', 'h'), (0x1D4F2, 'M', 'i'), @@ -6662,14 +6658,14 @@ def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D550, 'M', 'y'), (0x1D551, 'X'), (0x1D552, 'M', 'a'), - ] - -def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D553, 'M', 'b'), (0x1D554, 'M', 'c'), (0x1D555, 'M', 'd'), (0x1D556, 'M', 'e'), + ] + +def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D557, 'M', 'f'), (0x1D558, 'M', 'g'), (0x1D559, 'M', 'h'), @@ -6766,14 +6762,14 @@ def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D5B4, 'M', 'u'), (0x1D5B5, 'M', 'v'), (0x1D5B6, 'M', 'w'), - ] - -def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D5B7, 'M', 'x'), (0x1D5B8, 'M', 'y'), (0x1D5B9, 'M', 'z'), (0x1D5BA, 'M', 'a'), + ] + +def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D5BB, 'M', 'b'), (0x1D5BC, 'M', 'c'), (0x1D5BD, 'M', 'd'), @@ -6870,14 +6866,14 @@ def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D618, 'M', 'q'), (0x1D619, 'M', 'r'), (0x1D61A, 'M', 's'), - ] - -def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D61B, 'M', 't'), (0x1D61C, 'M', 'u'), (0x1D61D, 'M', 'v'), (0x1D61E, 'M', 'w'), + ] + +def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D61F, 'M', 'x'), (0x1D620, 'M', 'y'), (0x1D621, 'M', 'z'), @@ -6974,14 +6970,14 @@ def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D67C, 'M', 'm'), (0x1D67D, 'M', 'n'), (0x1D67E, 'M', 'o'), - ] - -def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D67F, 'M', 'p'), (0x1D680, 'M', 'q'), (0x1D681, 'M', 'r'), (0x1D682, 'M', 's'), + ] + +def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D683, 'M', 't'), (0x1D684, 'M', 'u'), (0x1D685, 'M', 'v'), @@ -7078,14 +7074,14 @@ def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D6E2, 'M', 'α'), (0x1D6E3, 'M', 'β'), (0x1D6E4, 'M', 'γ'), - ] - -def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D6E5, 'M', 'δ'), (0x1D6E6, 'M', 'ε'), (0x1D6E7, 'M', 'ζ'), (0x1D6E8, 'M', 'η'), + ] + +def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D6E9, 'M', 'θ'), (0x1D6EA, 'M', 'ι'), (0x1D6EB, 'M', 'κ'), @@ -7182,14 +7178,14 @@ def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D747, 'M', 'σ'), (0x1D749, 'M', 'τ'), (0x1D74A, 'M', 'υ'), - ] - -def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D74B, 'M', 'φ'), (0x1D74C, 'M', 'χ'), (0x1D74D, 'M', 'ψ'), (0x1D74E, 'M', 'ω'), + ] + +def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D74F, 'M', '∂'), (0x1D750, 'M', 'ε'), (0x1D751, 'M', 'θ'), @@ -7286,14 +7282,14 @@ def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D7AD, 'M', 'δ'), (0x1D7AE, 'M', 'ε'), (0x1D7AF, 'M', 'ζ'), - ] - -def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D7B0, 'M', 'η'), (0x1D7B1, 'M', 'θ'), (0x1D7B2, 'M', 'ι'), (0x1D7B3, 'M', 'κ'), + ] + +def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D7B4, 'M', 'λ'), (0x1D7B5, 'M', 'μ'), (0x1D7B6, 'M', 'ν'), @@ -7390,14 +7386,14 @@ def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1E030, 'M', 'а'), (0x1E031, 'M', 'б'), (0x1E032, 'M', 'в'), - ] - -def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1E033, 'M', 'г'), (0x1E034, 'M', 'д'), (0x1E035, 'M', 'е'), (0x1E036, 'M', 'ж'), + ] + +def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1E037, 'M', 'з'), (0x1E038, 'M', 'и'), (0x1E039, 'M', 'к'), @@ -7494,14 +7490,14 @@ def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1E907, 'M', '𞤩'), (0x1E908, 'M', '𞤪'), (0x1E909, 'M', '𞤫'), - ] - -def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1E90A, 'M', '𞤬'), (0x1E90B, 'M', '𞤭'), (0x1E90C, 'M', '𞤮'), (0x1E90D, 'M', '𞤯'), + ] + +def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1E90E, 'M', '𞤰'), (0x1E90F, 'M', '𞤱'), (0x1E910, 'M', '𞤲'), @@ -7598,14 +7594,14 @@ def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1EE48, 'X'), (0x1EE49, 'M', 'ي'), (0x1EE4A, 'X'), - ] - -def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1EE4B, 'M', 'ل'), (0x1EE4C, 'X'), (0x1EE4D, 'M', 'ن'), (0x1EE4E, 'M', 'س'), + ] + +def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1EE4F, 'M', 'ع'), (0x1EE50, 'X'), (0x1EE51, 'M', 'ص'), @@ -7702,14 +7698,14 @@ def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1EEB2, 'M', 'ق'), (0x1EEB3, 'M', 'ر'), (0x1EEB4, 'M', 'ش'), - ] - -def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1EEB5, 'M', 'ت'), (0x1EEB6, 'M', 'ث'), (0x1EEB7, 'M', 'خ'), (0x1EEB8, 'M', 'ذ'), + ] + +def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1EEB9, 'M', 'ض'), (0x1EEBA, 'M', 'ظ'), (0x1EEBB, 'M', 'غ'), @@ -7806,14 +7802,14 @@ def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1F150, 'V'), (0x1F16A, 'M', 'mc'), (0x1F16B, 'M', 'md'), - ] - -def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1F16C, 'M', 'mr'), (0x1F16D, 'V'), (0x1F190, 'M', 'dj'), (0x1F191, 'V'), + ] + +def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1F1AE, 'X'), (0x1F1E6, 'V'), (0x1F200, 'M', 'ほか'), @@ -7910,14 +7906,14 @@ def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1FA54, 'X'), (0x1FA60, 'V'), (0x1FA6E, 'X'), - ] - -def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1FA70, 'V'), (0x1FA7D, 'X'), (0x1FA80, 'V'), (0x1FA89, 'X'), + ] + +def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1FA90, 'V'), (0x1FABE, 'X'), (0x1FABF, 'V'), @@ -7953,6 +7949,8 @@ def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2CEA2, 'X'), (0x2CEB0, 'V'), (0x2EBE1, 'X'), + (0x2EBF0, 'V'), + (0x2EE5E, 'X'), (0x2F800, 'M', '丽'), (0x2F801, 'M', '丸'), (0x2F802, 'M', '乁'), @@ -8014,12 +8012,12 @@ def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2F83C, 'M', '咞'), (0x2F83D, 'M', '吸'), (0x2F83E, 'M', '呈'), + (0x2F83F, 'M', '周'), + (0x2F840, 'M', '咢'), ] def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x2F83F, 'M', '周'), - (0x2F840, 'M', '咢'), (0x2F841, 'M', '哶'), (0x2F842, 'M', '唐'), (0x2F843, 'M', '啓'), @@ -8118,12 +8116,12 @@ def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2F8A4, 'M', '𢛔'), (0x2F8A5, 'M', '惇'), (0x2F8A6, 'M', '慈'), + (0x2F8A7, 'M', '慌'), + (0x2F8A8, 'M', '慎'), ] def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x2F8A7, 'M', '慌'), - (0x2F8A8, 'M', '慎'), (0x2F8A9, 'M', '慌'), (0x2F8AA, 'M', '慺'), (0x2F8AB, 'M', '憎'), @@ -8222,12 +8220,12 @@ def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2F908, 'M', '港'), (0x2F909, 'M', '湮'), (0x2F90A, 'M', '㴳'), + (0x2F90B, 'M', '滋'), + (0x2F90C, 'M', '滇'), ] def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x2F90B, 'M', '滋'), - (0x2F90C, 'M', '滇'), (0x2F90D, 'M', '𣻑'), (0x2F90E, 'M', '淹'), (0x2F90F, 'M', '潮'), @@ -8326,12 +8324,12 @@ def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2F96F, 'M', '縂'), (0x2F970, 'M', '繅'), (0x2F971, 'M', '䌴'), + (0x2F972, 'M', '𦈨'), + (0x2F973, 'M', '𦉇'), ] def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x2F972, 'M', '𦈨'), - (0x2F973, 'M', '𦉇'), (0x2F974, 'M', '䍙'), (0x2F975, 'M', '𦋙'), (0x2F976, 'M', '罺'), @@ -8430,12 +8428,12 @@ def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2F9D3, 'M', '𧲨'), (0x2F9D4, 'M', '貫'), (0x2F9D5, 'M', '賁'), + (0x2F9D6, 'M', '贛'), + (0x2F9D7, 'M', '起'), ] def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x2F9D6, 'M', '贛'), - (0x2F9D7, 'M', '起'), (0x2F9D8, 'M', '𧼯'), (0x2F9D9, 'M', '𠠄'), (0x2F9DA, 'M', '跋'), diff --git a/src/pip/_vendor/msgpack/__init__.py b/src/pip/_vendor/msgpack/__init__.py index 1300b866043..919b86f175f 100644 --- a/src/pip/_vendor/msgpack/__init__.py +++ b/src/pip/_vendor/msgpack/__init__.py @@ -1,16 +1,14 @@ -# coding: utf-8 from .exceptions import * from .ext import ExtType, Timestamp import os -import sys -version = (1, 0, 5) -__version__ = "1.0.5" +version = (1, 0, 8) +__version__ = "1.0.8" -if os.environ.get("MSGPACK_PUREPYTHON") or sys.version_info[0] == 2: +if os.environ.get("MSGPACK_PUREPYTHON"): from .fallback import Packer, unpackb, Unpacker else: try: diff --git a/src/pip/_vendor/msgpack/ext.py b/src/pip/_vendor/msgpack/ext.py index 23e0d6b41ce..02c2c43008a 100644 --- a/src/pip/_vendor/msgpack/ext.py +++ b/src/pip/_vendor/msgpack/ext.py @@ -1,23 +1,8 @@ -# coding: utf-8 from collections import namedtuple import datetime -import sys import struct -PY2 = sys.version_info[0] == 2 - -if PY2: - int_types = (int, long) - _utc = None -else: - int_types = int - try: - _utc = datetime.timezone.utc - except AttributeError: - _utc = datetime.timezone(datetime.timedelta(0)) - - class ExtType(namedtuple("ExtType", "code data")): """ExtType represents ext type in msgpack.""" @@ -28,14 +13,15 @@ def __new__(cls, code, data): raise TypeError("data must be bytes") if not 0 <= code <= 127: raise ValueError("code must be 0~127") - return super(ExtType, cls).__new__(cls, code, data) + return super().__new__(cls, code, data) -class Timestamp(object): +class Timestamp: """Timestamp represents the Timestamp extension type in msgpack. - When built with Cython, msgpack uses C methods to pack and unpack `Timestamp`. When using pure-Python - msgpack, :func:`to_bytes` and :func:`from_bytes` are used to pack and unpack `Timestamp`. + When built with Cython, msgpack uses C methods to pack and unpack `Timestamp`. + When using pure-Python msgpack, :func:`to_bytes` and :func:`from_bytes` are used to pack and + unpack `Timestamp`. This class is immutable: Do not override seconds and nanoseconds. """ @@ -53,31 +39,25 @@ def __init__(self, seconds, nanoseconds=0): Number of nanoseconds to add to `seconds` to get fractional time. Maximum is 999_999_999. Default is 0. - Note: Negative times (before the UNIX epoch) are represented as negative seconds + positive ns. + Note: Negative times (before the UNIX epoch) are represented as neg. seconds + pos. ns. """ - if not isinstance(seconds, int_types): + if not isinstance(seconds, int): raise TypeError("seconds must be an integer") - if not isinstance(nanoseconds, int_types): + if not isinstance(nanoseconds, int): raise TypeError("nanoseconds must be an integer") if not (0 <= nanoseconds < 10**9): - raise ValueError( - "nanoseconds must be a non-negative integer less than 999999999." - ) + raise ValueError("nanoseconds must be a non-negative integer less than 999999999.") self.seconds = seconds self.nanoseconds = nanoseconds def __repr__(self): """String representation of Timestamp.""" - return "Timestamp(seconds={0}, nanoseconds={1})".format( - self.seconds, self.nanoseconds - ) + return f"Timestamp(seconds={self.seconds}, nanoseconds={self.nanoseconds})" def __eq__(self, other): """Check for equality with another Timestamp object""" if type(other) is self.__class__: - return ( - self.seconds == other.seconds and self.nanoseconds == other.nanoseconds - ) + return self.seconds == other.seconds and self.nanoseconds == other.nanoseconds return False def __ne__(self, other): @@ -140,7 +120,7 @@ def from_unix(unix_sec): """Create a Timestamp from posix timestamp in seconds. :param unix_float: Posix timestamp in seconds. - :type unix_float: int or float. + :type unix_float: int or float """ seconds = int(unix_sec // 1) nanoseconds = int((unix_sec % 1) * 10**9) @@ -174,20 +154,15 @@ def to_unix_nano(self): def to_datetime(self): """Get the timestamp as a UTC datetime. - Python 2 is not supported. - - :rtype: datetime. + :rtype: `datetime.datetime` """ - return datetime.datetime.fromtimestamp(0, _utc) + datetime.timedelta( - seconds=self.to_unix() - ) + utc = datetime.timezone.utc + return datetime.datetime.fromtimestamp(0, utc) + datetime.timedelta(seconds=self.to_unix()) @staticmethod def from_datetime(dt): """Create a Timestamp from datetime with tzinfo. - Python 2 is not supported. - :rtype: Timestamp """ return Timestamp.from_unix(dt.timestamp()) diff --git a/src/pip/_vendor/msgpack/fallback.py b/src/pip/_vendor/msgpack/fallback.py index e8cebc1bef7..a174162af8a 100644 --- a/src/pip/_vendor/msgpack/fallback.py +++ b/src/pip/_vendor/msgpack/fallback.py @@ -4,39 +4,6 @@ import struct -PY2 = sys.version_info[0] == 2 -if PY2: - int_types = (int, long) - - def dict_iteritems(d): - return d.iteritems() - -else: - int_types = int - unicode = str - xrange = range - - def dict_iteritems(d): - return d.items() - - -if sys.version_info < (3, 5): - # Ugly hack... - RecursionError = RuntimeError - - def _is_recursionerror(e): - return ( - len(e.args) == 1 - and isinstance(e.args[0], str) - and e.args[0].startswith("maximum recursion depth exceeded") - ) - -else: - - def _is_recursionerror(e): - return True - - if hasattr(sys, "pypy_version_info"): # StringIO is slow on PyPy, StringIO is faster. However: PyPy's own # StringBuilder is fastest. @@ -48,7 +15,7 @@ def _is_recursionerror(e): from __pypy__.builders import StringBuilder USING_STRINGBUILDER = True - class StringIO(object): + class StringIO: def __init__(self, s=b""): if s: self.builder = StringBuilder(len(s)) @@ -125,24 +92,13 @@ def unpackb(packed, **kwargs): ret = unpacker._unpack() except OutOfData: raise ValueError("Unpack failed: incomplete input") - except RecursionError as e: - if _is_recursionerror(e): - raise StackError - raise + except RecursionError: + raise StackError if unpacker._got_extradata(): raise ExtraData(ret, unpacker._get_extradata()) return ret -if sys.version_info < (2, 7, 6): - - def _unpack_from(f, b, o=0): - """Explicit type cast for legacy struct.unpack_from""" - return struct.unpack_from(f, bytes(b), o) - -else: - _unpack_from = struct.unpack_from - _NO_FORMAT_USED = "" _MSGPACK_HEADERS = { 0xC4: (1, _NO_FORMAT_USED, TYPE_BIN), @@ -176,14 +132,14 @@ def _unpack_from(f, b, o=0): } -class Unpacker(object): +class Unpacker: """Streaming unpacker. Arguments: :param file_like: File-like object having `.read(n)` method. - If specified, unpacker reads serialized data from it and :meth:`feed()` is not usable. + If specified, unpacker reads serialized data from it and `.feed()` is not usable. :param int read_size: Used as `file_like.read(read_size)`. (default: `min(16*1024, max_buffer_size)`) @@ -202,17 +158,17 @@ class Unpacker(object): 0 - Timestamp 1 - float (Seconds from the EPOCH) 2 - int (Nanoseconds from the EPOCH) - 3 - datetime.datetime (UTC). Python 2 is not supported. + 3 - datetime.datetime (UTC). :param bool strict_map_key: If true (default), only str or bytes are accepted for map (dict) keys. - :param callable object_hook: + :param object_hook: When specified, it should be callable. Unpacker calls it with a dict argument after unpacking msgpack map. (See also simplejson) - :param callable object_pairs_hook: + :param object_pairs_hook: When specified, it should be callable. Unpacker calls it with a list of key-value pairs after unpacking msgpack map. (See also simplejson) @@ -359,9 +315,7 @@ def __init__( if object_pairs_hook is not None and not callable(object_pairs_hook): raise TypeError("`object_pairs_hook` is not callable") if object_hook is not None and object_pairs_hook is not None: - raise TypeError( - "object_pairs_hook and object_hook are mutually " "exclusive" - ) + raise TypeError("object_pairs_hook and object_hook are mutually exclusive") if not callable(ext_hook): raise TypeError("`ext_hook` is not callable") @@ -453,20 +407,18 @@ def _read_header(self): n = b & 0b00011111 typ = TYPE_RAW if n > self._max_str_len: - raise ValueError("%s exceeds max_str_len(%s)" % (n, self._max_str_len)) + raise ValueError(f"{n} exceeds max_str_len({self._max_str_len})") obj = self._read(n) elif b & 0b11110000 == 0b10010000: n = b & 0b00001111 typ = TYPE_ARRAY if n > self._max_array_len: - raise ValueError( - "%s exceeds max_array_len(%s)" % (n, self._max_array_len) - ) + raise ValueError(f"{n} exceeds max_array_len({self._max_array_len})") elif b & 0b11110000 == 0b10000000: n = b & 0b00001111 typ = TYPE_MAP if n > self._max_map_len: - raise ValueError("%s exceeds max_map_len(%s)" % (n, self._max_map_len)) + raise ValueError(f"{n} exceeds max_map_len({self._max_map_len})") elif b == 0xC0: obj = None elif b == 0xC2: @@ -477,65 +429,61 @@ def _read_header(self): size, fmt, typ = _MSGPACK_HEADERS[b] self._reserve(size) if len(fmt) > 0: - n = _unpack_from(fmt, self._buffer, self._buff_i)[0] + n = struct.unpack_from(fmt, self._buffer, self._buff_i)[0] else: n = self._buffer[self._buff_i] self._buff_i += size if n > self._max_bin_len: - raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) + raise ValueError(f"{n} exceeds max_bin_len({self._max_bin_len})") obj = self._read(n) elif 0xC7 <= b <= 0xC9: size, fmt, typ = _MSGPACK_HEADERS[b] self._reserve(size) - L, n = _unpack_from(fmt, self._buffer, self._buff_i) + L, n = struct.unpack_from(fmt, self._buffer, self._buff_i) self._buff_i += size if L > self._max_ext_len: - raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) + raise ValueError(f"{L} exceeds max_ext_len({self._max_ext_len})") obj = self._read(L) elif 0xCA <= b <= 0xD3: size, fmt = _MSGPACK_HEADERS[b] self._reserve(size) if len(fmt) > 0: - obj = _unpack_from(fmt, self._buffer, self._buff_i)[0] + obj = struct.unpack_from(fmt, self._buffer, self._buff_i)[0] else: obj = self._buffer[self._buff_i] self._buff_i += size elif 0xD4 <= b <= 0xD8: size, fmt, typ = _MSGPACK_HEADERS[b] if self._max_ext_len < size: - raise ValueError( - "%s exceeds max_ext_len(%s)" % (size, self._max_ext_len) - ) + raise ValueError(f"{size} exceeds max_ext_len({self._max_ext_len})") self._reserve(size + 1) - n, obj = _unpack_from(fmt, self._buffer, self._buff_i) + n, obj = struct.unpack_from(fmt, self._buffer, self._buff_i) self._buff_i += size + 1 elif 0xD9 <= b <= 0xDB: size, fmt, typ = _MSGPACK_HEADERS[b] self._reserve(size) if len(fmt) > 0: - (n,) = _unpack_from(fmt, self._buffer, self._buff_i) + (n,) = struct.unpack_from(fmt, self._buffer, self._buff_i) else: n = self._buffer[self._buff_i] self._buff_i += size if n > self._max_str_len: - raise ValueError("%s exceeds max_str_len(%s)" % (n, self._max_str_len)) + raise ValueError(f"{n} exceeds max_str_len({self._max_str_len})") obj = self._read(n) elif 0xDC <= b <= 0xDD: size, fmt, typ = _MSGPACK_HEADERS[b] self._reserve(size) - (n,) = _unpack_from(fmt, self._buffer, self._buff_i) + (n,) = struct.unpack_from(fmt, self._buffer, self._buff_i) self._buff_i += size if n > self._max_array_len: - raise ValueError( - "%s exceeds max_array_len(%s)" % (n, self._max_array_len) - ) + raise ValueError(f"{n} exceeds max_array_len({self._max_array_len})") elif 0xDE <= b <= 0xDF: size, fmt, typ = _MSGPACK_HEADERS[b] self._reserve(size) - (n,) = _unpack_from(fmt, self._buffer, self._buff_i) + (n,) = struct.unpack_from(fmt, self._buffer, self._buff_i) self._buff_i += size if n > self._max_map_len: - raise ValueError("%s exceeds max_map_len(%s)" % (n, self._max_map_len)) + raise ValueError(f"{n} exceeds max_map_len({self._max_map_len})") else: raise FormatError("Unknown header: 0x%x" % b) return typ, n, obj @@ -554,12 +502,12 @@ def _unpack(self, execute=EX_CONSTRUCT): # TODO should we eliminate the recursion? if typ == TYPE_ARRAY: if execute == EX_SKIP: - for i in xrange(n): + for i in range(n): # TODO check whether we need to call `list_hook` self._unpack(EX_SKIP) return ret = newlist_hint(n) - for i in xrange(n): + for i in range(n): ret.append(self._unpack(EX_CONSTRUCT)) if self._list_hook is not None: ret = self._list_hook(ret) @@ -567,25 +515,22 @@ def _unpack(self, execute=EX_CONSTRUCT): return ret if self._use_list else tuple(ret) if typ == TYPE_MAP: if execute == EX_SKIP: - for i in xrange(n): + for i in range(n): # TODO check whether we need to call hooks self._unpack(EX_SKIP) self._unpack(EX_SKIP) return if self._object_pairs_hook is not None: ret = self._object_pairs_hook( - (self._unpack(EX_CONSTRUCT), self._unpack(EX_CONSTRUCT)) - for _ in xrange(n) + (self._unpack(EX_CONSTRUCT), self._unpack(EX_CONSTRUCT)) for _ in range(n) ) else: ret = {} - for _ in xrange(n): + for _ in range(n): key = self._unpack(EX_CONSTRUCT) - if self._strict_map_key and type(key) not in (unicode, bytes): - raise ValueError( - "%s is not allowed for map key" % str(type(key)) - ) - if not PY2 and type(key) is str: + if self._strict_map_key and type(key) not in (str, bytes): + raise ValueError("%s is not allowed for map key" % str(type(key))) + if isinstance(key, str): key = sys.intern(key) ret[key] = self._unpack(EX_CONSTRUCT) if self._object_hook is not None: @@ -659,7 +604,7 @@ def tell(self): return self._stream_offset -class Packer(object): +class Packer: """ MessagePack Packer @@ -671,7 +616,8 @@ class Packer(object): Packer's constructor has some keyword arguments: - :param callable default: + :param default: + When specified, it should be callable. Convert user type to builtin type that Packer supports. See also simplejson's document. @@ -698,7 +644,6 @@ class Packer(object): If set to true, datetime with tzinfo is packed into Timestamp type. Note that the tzinfo is stripped in the timestamp. You can get UTC datetime with `timestamp=3` option of the Unpacker. - (Python 2 is not supported). :param str unicode_errors: The error handler for encoding unicode. (default: 'strict') @@ -743,8 +688,6 @@ def __init__( self._autoreset = autoreset self._use_bin_type = use_bin_type self._buffer = StringIO() - if PY2 and datetime: - raise ValueError("datetime is not supported in Python 2") self._datetime = bool(datetime) self._unicode_errors = unicode_errors or "strict" if default is not None: @@ -774,7 +717,7 @@ def _pack( if obj: return self._buffer.write(b"\xc3") return self._buffer.write(b"\xc2") - if check(obj, int_types): + if check(obj, int): if 0 <= obj < 0x80: return self._buffer.write(struct.pack("B", obj)) if -0x20 <= obj < 0: @@ -806,7 +749,7 @@ def _pack( raise ValueError("%s is too large" % type(obj).__name__) self._pack_bin_header(n) return self._buffer.write(obj) - if check(obj, unicode): + if check(obj, str): obj = obj.encode("utf-8", self._unicode_errors) n = len(obj) if n >= 2**32: @@ -855,13 +798,11 @@ def _pack( if check(obj, list_types): n = len(obj) self._pack_array_header(n) - for i in xrange(n): + for i in range(n): self._pack(obj[i], nest_limit - 1) return if check(obj, dict): - return self._pack_map_pairs( - len(obj), dict_iteritems(obj), nest_limit - 1 - ) + return self._pack_map_pairs(len(obj), obj.items(), nest_limit - 1) if self._datetime and check(obj, _DateTime) and obj.tzinfo is not None: obj = Timestamp.from_datetime(obj) @@ -874,9 +815,9 @@ def _pack( continue if self._datetime and check(obj, _DateTime): - raise ValueError("Cannot serialize %r where tzinfo=None" % (obj,)) + raise ValueError(f"Cannot serialize {obj!r} where tzinfo=None") - raise TypeError("Cannot serialize %r" % (obj,)) + raise TypeError(f"Cannot serialize {obj!r}") def pack(self, obj): try: @@ -963,7 +904,7 @@ def _pack_map_header(self, n): def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT): self._pack_map_header(n) - for (k, v) in pairs: + for k, v in pairs: self._pack(k, nest_limit - 1) self._pack(v, nest_limit - 1) @@ -1004,7 +945,7 @@ def reset(self): def getbuffer(self): """Return view of internal buffer.""" - if USING_STRINGBUILDER or PY2: + if USING_STRINGBUILDER: return memoryview(self.bytes()) else: return self._buffer.getbuffer() diff --git a/src/pip/_vendor/pkg_resources/LICENSE b/src/pip/_vendor/pkg_resources/LICENSE index 353924be0e5..1bb5a44356f 100644 --- a/src/pip/_vendor/pkg_resources/LICENSE +++ b/src/pip/_vendor/pkg_resources/LICENSE @@ -1,5 +1,3 @@ -Copyright Jason R. Coombs - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py index 1bf26a94226..89f49570f4a 100644 --- a/src/pip/_vendor/pkg_resources/__init__.py +++ b/src/pip/_vendor/pkg_resources/__init__.py @@ -13,19 +13,21 @@ .zip files and with custom PEP 302 loaders that support the ``get_data()`` method. -This module is deprecated. Users are directed to -`importlib.resources `_ -and -`importlib.metadata `_ -instead. +This module is deprecated. Users are directed to :mod:`importlib.resources`, +:mod:`importlib.metadata` and :pypi:`packaging` instead. """ import sys + +if sys.version_info < (3, 8): + raise RuntimeError("Python 3.8 or later is required") + import os import io import time import re import types +from typing import Protocol import zipfile import zipimport import warnings @@ -44,18 +46,10 @@ import ntpath import posixpath import importlib +import importlib.machinery from pkgutil import get_importer -try: - import _imp -except ImportError: - # Python 3.2 compatibility - import imp as _imp - -try: - FileExistsError -except NameError: - FileExistsError = OSError +import _imp # capture these to bypass sandboxing from os import utime @@ -71,14 +65,6 @@ from os import open as os_open from os.path import isdir, split -try: - import importlib.machinery as importlib_machinery - - # access attribute to force import under delayed import mechanisms. - importlib_machinery.__name__ -except ImportError: - importlib_machinery = None - from pip._internal.utils._jaraco_text import ( yield_lines, drop_comment, @@ -94,9 +80,6 @@ __import__('pip._vendor.packaging.markers') __import__('pip._vendor.packaging.utils') -if sys.version_info < (3, 5): - raise RuntimeError("Python 3.5 or later is required") - # declare some globals that will be defined later to # satisfy the linters. require = None @@ -118,7 +101,12 @@ _namespace_packages = None -warnings.warn("pkg_resources is deprecated as an API", DeprecationWarning) +warnings.warn( + "pkg_resources is deprecated as an API. " + "See https://setuptools.pypa.io/en/latest/pkg_resources.html", + DeprecationWarning, + stacklevel=2, +) _PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I) @@ -405,20 +393,18 @@ def get_provider(moduleOrReq): return _find_adapter(_provider_factories, loader)(module) -def _macos_vers(_cache=[]): - if not _cache: - version = platform.mac_ver()[0] - # fallback for MacPorts - if version == '': - plist = '/System/Library/CoreServices/SystemVersion.plist' - if os.path.exists(plist): - if hasattr(plistlib, 'readPlist'): - plist_content = plistlib.readPlist(plist) - if 'ProductVersion' in plist_content: - version = plist_content['ProductVersion'] - - _cache.append(version.split('.')) - return _cache[0] +@functools.lru_cache(maxsize=None) +def _macos_vers(): + version = platform.mac_ver()[0] + # fallback for MacPorts + if version == '': + plist = '/System/Library/CoreServices/SystemVersion.plist' + if os.path.exists(plist): + with open(plist, 'rb') as fh: + plist_content = plistlib.load(fh) + if 'ProductVersion' in plist_content: + version = plist_content['ProductVersion'] + return version.split('.') def _macos_arch(machine): @@ -544,54 +530,54 @@ def get_entry_info(dist, group, name): return get_distribution(dist).get_entry_info(group, name) -class IMetadataProvider: - def has_metadata(name): +class IMetadataProvider(Protocol): + def has_metadata(self, name): """Does the package's distribution contain the named metadata?""" - def get_metadata(name): + def get_metadata(self, name): """The named metadata resource as a string""" - def get_metadata_lines(name): + def get_metadata_lines(self, name): """Yield named metadata resource as list of non-blank non-comment lines Leading and trailing whitespace is stripped from each line, and lines with ``#`` as the first non-blank character are omitted.""" - def metadata_isdir(name): + def metadata_isdir(self, name): """Is the named metadata a directory? (like ``os.path.isdir()``)""" - def metadata_listdir(name): + def metadata_listdir(self, name): """List of metadata names in the directory (like ``os.listdir()``)""" - def run_script(script_name, namespace): + def run_script(self, script_name, namespace): """Execute the named script in the supplied namespace dictionary""" -class IResourceProvider(IMetadataProvider): +class IResourceProvider(IMetadataProvider, Protocol): """An object that provides access to package resources""" - def get_resource_filename(manager, resource_name): + def get_resource_filename(self, manager, resource_name): """Return a true filesystem path for `resource_name` `manager` must be an ``IResourceManager``""" - def get_resource_stream(manager, resource_name): + def get_resource_stream(self, manager, resource_name): """Return a readable file-like object for `resource_name` `manager` must be an ``IResourceManager``""" - def get_resource_string(manager, resource_name): + def get_resource_string(self, manager, resource_name): """Return a string containing the contents of `resource_name` `manager` must be an ``IResourceManager``""" - def has_resource(resource_name): + def has_resource(self, resource_name): """Does the package contain the named resource?""" - def resource_isdir(resource_name): + def resource_isdir(self, resource_name): """Is the named resource a directory? (like ``os.path.isdir()``)""" - def resource_listdir(resource_name): + def resource_listdir(self, resource_name): """List of resource names in the directory (like ``os.listdir()``)""" @@ -1141,8 +1127,7 @@ def obtain(self, requirement, installer=None): None is returned instead. This method is a hook that allows subclasses to attempt other ways of obtaining a distribution before falling back to the `installer` argument.""" - if installer is not None: - return installer(requirement) + return installer(requirement) if installer else None def __iter__(self): """Yield the unique project names of the available distributions""" @@ -1416,7 +1401,7 @@ def _forgiving_version(version): match = _PEP440_FALLBACK.search(version) if match: safe = match["safe"] - rest = version[len(safe):] + rest = version[len(safe) :] else: safe = "0" rest = version @@ -1659,10 +1644,9 @@ def _validate_resource_path(path): # for compatibility, warn; in future # raise ValueError(msg) - warnings.warn( + issue_warning( msg[:-1] + " and will raise exceptions in a future release.", DeprecationWarning, - stacklevel=4, ) def _get(self, path): @@ -1733,7 +1717,7 @@ def _register(cls): 'SourcelessFileLoader', ) for name in loader_names: - loader_cls = getattr(importlib_machinery, name, type(None)) + loader_cls = getattr(importlib.machinery, name, type(None)) register_loader_type(loader_cls, cls) @@ -1873,7 +1857,7 @@ def _extract_resource(self, manager, zip_path): # noqa: C901 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) if not WRITE_SUPPORT: - raise IOError( + raise OSError( '"os.rename" and "os.unlink" are not supported ' 'on this platform' ) try: @@ -1894,7 +1878,7 @@ def _extract_resource(self, manager, zip_path): # noqa: C901 try: rename(tmpnam, real_path) - except os.error: + except OSError: if os.path.isfile(real_path): if self._is_current(real_path, zip_path): # the file became current since it was checked above, @@ -1907,7 +1891,7 @@ def _extract_resource(self, manager, zip_path): # noqa: C901 return real_path raise - except os.error: + except OSError: # report a user-friendly error manager.extraction_error() @@ -2000,7 +1984,7 @@ def get_metadata(self, name): if name != 'PKG-INFO': raise KeyError("No metadata except PKG-INFO is available") - with io.open(self.path, encoding='utf-8', errors="replace") as f: + with open(self.path, encoding='utf-8', errors="replace") as f: metadata = f.read() self._warn_on_replacement(metadata) return metadata @@ -2094,8 +2078,7 @@ def find_eggs_in_zip(importer, path_item, only=False): if _is_egg_path(subitem): subpath = os.path.join(path_item, subitem) dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath) - for dist in dists: - yield dist + yield from dists elif subitem.lower().endswith(('.dist-info', '.egg-info')): subpath = os.path.join(path_item, subitem) submeta = EggMetadata(zipimport.zipimporter(subpath)) @@ -2130,8 +2113,7 @@ def find_on_path(importer, path_item, only=False): for entry in sorted(entries): fullpath = os.path.join(path_item, entry) factory = dist_factory(path_item, entry, only) - for dist in factory(fullpath): - yield dist + yield from factory(fullpath) def dist_factory(path_item, entry, only): @@ -2230,7 +2212,7 @@ def resolve_egg_link(path): if hasattr(pkgutil, 'ImpImporter'): register_finder(pkgutil.ImpImporter, find_on_path) -register_finder(importlib_machinery.FileFinder, find_on_path) +register_finder(importlib.machinery.FileFinder, find_on_path) _declare_state('dict', _namespace_handlers={}) _declare_state('dict', _namespace_packages={}) @@ -2397,7 +2379,7 @@ def file_ns_handler(importer, path_item, packageName, module): register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) register_namespace_handler(zipimport.zipimporter, file_ns_handler) -register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) +register_namespace_handler(importlib.machinery.FileFinder, file_ns_handler) def null_ns_handler(importer, path_item, packageName, module): @@ -2423,12 +2405,9 @@ def _cygwin_patch(filename): # pragma: nocover return os.path.abspath(filename) if sys.platform == 'cygwin' else filename -def _normalize_cached(filename, _cache={}): - try: - return _cache[filename] - except KeyError: - _cache[filename] = result = normalize_path(filename) - return result +@functools.lru_cache(maxsize=None) +def _normalize_cached(filename): + return normalize_path(filename) def _is_egg_path(path): @@ -2849,14 +2828,11 @@ def _get_metadata_path_for_display(self, name): def _get_metadata(self, name): if self.has_metadata(name): - for line in self.get_metadata_lines(name): - yield line + yield from self.get_metadata_lines(name) def _get_version(self): lines = self._get_metadata(self.PKG_INFO) - version = _version_from_file(lines) - - return version + return _version_from_file(lines) def activate(self, path=None, replace=False): """Ensure distribution is importable on `path` (default=sys.path)""" @@ -2903,7 +2879,7 @@ def __getattr__(self, attr): def __dir__(self): return list( - set(super(Distribution, self).__dir__()) + set(super().__dir__()) | set(attr for attr in self._provider.__dir__() if not attr.startswith('_')) ) @@ -3170,7 +3146,7 @@ class RequirementParseError(packaging.requirements.InvalidRequirement): class Requirement(packaging.requirements.Requirement): def __init__(self, requirement_string): """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" - super(Requirement, self).__init__(requirement_string) + super().__init__(requirement_string) self.unsafe_name = self.name project_name = safe_name(self.name) self.project_name, self.key = project_name, project_name.lower() @@ -3231,6 +3207,7 @@ def _find_adapter(registry, ob): for t in types: if t in registry: return registry[t] + return None def ensure_directory(path): @@ -3242,7 +3219,7 @@ def ensure_directory(path): def _bypass_ensure_directory(path): """Sandbox-bypassing version of ensure_directory()""" if not WRITE_SUPPORT: - raise IOError('"os.mkdir" not supported on this platform.') + raise OSError('"os.mkdir" not supported on this platform.') dirname, filename = split(path) if dirname and filename and not isdir(dirname): _bypass_ensure_directory(dirname) diff --git a/src/pip/_vendor/platformdirs/__init__.py b/src/pip/_vendor/platformdirs/__init__.py index c46a145cdc1..3da5ac1474f 100644 --- a/src/pip/_vendor/platformdirs/__init__.py +++ b/src/pip/_vendor/platformdirs/__init__.py @@ -2,21 +2,21 @@ Utilities for determining application-specific dirs. See for details and usage. """ + from __future__ import annotations import os import sys -from pathlib import Path - -if sys.version_info >= (3, 8): # pragma: no cover (py38+) - from typing import Literal -else: # pragma: no cover (py38+) - from pip._vendor.typing_extensions import Literal +from typing import TYPE_CHECKING from .api import PlatformDirsABC from .version import __version__ from .version import __version_tuple__ as __version_info__ +if TYPE_CHECKING: + from pathlib import Path + from typing import Literal + def _set_platform_dir_class() -> type[PlatformDirsABC]: if sys.platform == "win32": @@ -48,8 +48,8 @@ def user_data_dir( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - roaming: bool = False, - ensure_exists: bool = False, + roaming: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> str: """ :param appname: See `appname `. @@ -72,8 +72,8 @@ def site_data_dir( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - multipath: bool = False, - ensure_exists: bool = False, + multipath: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> str: """ :param appname: See `appname `. @@ -96,8 +96,8 @@ def user_config_dir( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - roaming: bool = False, - ensure_exists: bool = False, + roaming: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> str: """ :param appname: See `appname `. @@ -120,8 +120,8 @@ def site_config_dir( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - multipath: bool = False, - ensure_exists: bool = False, + multipath: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> str: """ :param appname: See `appname `. @@ -144,8 +144,8 @@ def user_cache_dir( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - opinion: bool = True, - ensure_exists: bool = False, + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> str: """ :param appname: See `appname `. @@ -168,8 +168,8 @@ def site_cache_dir( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - opinion: bool = True, - ensure_exists: bool = False, + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> str: """ :param appname: See `appname `. @@ -192,8 +192,8 @@ def user_state_dir( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - roaming: bool = False, - ensure_exists: bool = False, + roaming: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> str: """ :param appname: See `appname `. @@ -216,8 +216,8 @@ def user_log_dir( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - opinion: bool = True, - ensure_exists: bool = False, + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> str: """ :param appname: See `appname `. @@ -237,18 +237,41 @@ def user_log_dir( def user_documents_dir() -> str: - """ - :returns: documents directory tied to the user - """ + """:returns: documents directory tied to the user""" return PlatformDirs().user_documents_dir +def user_downloads_dir() -> str: + """:returns: downloads directory tied to the user""" + return PlatformDirs().user_downloads_dir + + +def user_pictures_dir() -> str: + """:returns: pictures directory tied to the user""" + return PlatformDirs().user_pictures_dir + + +def user_videos_dir() -> str: + """:returns: videos directory tied to the user""" + return PlatformDirs().user_videos_dir + + +def user_music_dir() -> str: + """:returns: music directory tied to the user""" + return PlatformDirs().user_music_dir + + +def user_desktop_dir() -> str: + """:returns: desktop directory tied to the user""" + return PlatformDirs().user_desktop_dir + + def user_runtime_dir( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - opinion: bool = True, - ensure_exists: bool = False, + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> str: """ :param appname: See `appname `. @@ -267,12 +290,36 @@ def user_runtime_dir( ).user_runtime_dir +def site_runtime_dir( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 +) -> str: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param opinion: See `opinion `. + :param ensure_exists: See `ensure_exists `. + :returns: runtime directory shared by users + """ + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + opinion=opinion, + ensure_exists=ensure_exists, + ).site_runtime_dir + + def user_data_path( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - roaming: bool = False, - ensure_exists: bool = False, + roaming: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> Path: """ :param appname: See `appname `. @@ -295,8 +342,8 @@ def site_data_path( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - multipath: bool = False, - ensure_exists: bool = False, + multipath: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> Path: """ :param appname: See `appname `. @@ -319,8 +366,8 @@ def user_config_path( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - roaming: bool = False, - ensure_exists: bool = False, + roaming: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> Path: """ :param appname: See `appname `. @@ -343,8 +390,8 @@ def site_config_path( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - multipath: bool = False, - ensure_exists: bool = False, + multipath: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> Path: """ :param appname: See `appname `. @@ -367,8 +414,8 @@ def site_cache_path( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - opinion: bool = True, - ensure_exists: bool = False, + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> Path: """ :param appname: See `appname `. @@ -391,8 +438,8 @@ def user_cache_path( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - opinion: bool = True, - ensure_exists: bool = False, + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> Path: """ :param appname: See `appname `. @@ -415,8 +462,8 @@ def user_state_path( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - roaming: bool = False, - ensure_exists: bool = False, + roaming: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> Path: """ :param appname: See `appname `. @@ -439,8 +486,8 @@ def user_log_path( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - opinion: bool = True, - ensure_exists: bool = False, + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> Path: """ :param appname: See `appname `. @@ -460,18 +507,41 @@ def user_log_path( def user_documents_path() -> Path: - """ - :returns: documents path tied to the user - """ + """:returns: documents path tied to the user""" return PlatformDirs().user_documents_path +def user_downloads_path() -> Path: + """:returns: downloads path tied to the user""" + return PlatformDirs().user_downloads_path + + +def user_pictures_path() -> Path: + """:returns: pictures path tied to the user""" + return PlatformDirs().user_pictures_path + + +def user_videos_path() -> Path: + """:returns: videos path tied to the user""" + return PlatformDirs().user_videos_path + + +def user_music_path() -> Path: + """:returns: music path tied to the user""" + return PlatformDirs().user_music_path + + +def user_desktop_path() -> Path: + """:returns: desktop path tied to the user""" + return PlatformDirs().user_desktop_path + + def user_runtime_path( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - opinion: bool = True, - ensure_exists: bool = False, + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> Path: """ :param appname: See `appname `. @@ -490,6 +560,30 @@ def user_runtime_path( ).user_runtime_path +def site_runtime_path( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 +) -> Path: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param opinion: See `opinion `. + :param ensure_exists: See `ensure_exists `. + :returns: runtime path shared by users + """ + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + opinion=opinion, + ensure_exists=ensure_exists, + ).site_runtime_path + + __all__ = [ "__version__", "__version_info__", @@ -502,18 +596,30 @@ def user_runtime_path( "user_state_dir", "user_log_dir", "user_documents_dir", + "user_downloads_dir", + "user_pictures_dir", + "user_videos_dir", + "user_music_dir", + "user_desktop_dir", "user_runtime_dir", "site_data_dir", "site_config_dir", "site_cache_dir", + "site_runtime_dir", "user_data_path", "user_config_path", "user_cache_path", "user_state_path", "user_log_path", "user_documents_path", + "user_downloads_path", + "user_pictures_path", + "user_videos_path", + "user_music_path", + "user_desktop_path", "user_runtime_path", "site_data_path", "site_config_path", "site_cache_path", + "site_runtime_path", ] diff --git a/src/pip/_vendor/platformdirs/__main__.py b/src/pip/_vendor/platformdirs/__main__.py index 7171f13114e..61342265b60 100644 --- a/src/pip/_vendor/platformdirs/__main__.py +++ b/src/pip/_vendor/platformdirs/__main__.py @@ -1,3 +1,5 @@ +"""Main entry point.""" + from __future__ import annotations from pip._vendor.platformdirs import PlatformDirs, __version__ @@ -9,38 +11,44 @@ "user_state_dir", "user_log_dir", "user_documents_dir", + "user_downloads_dir", + "user_pictures_dir", + "user_videos_dir", + "user_music_dir", "user_runtime_dir", "site_data_dir", "site_config_dir", "site_cache_dir", + "site_runtime_dir", ) def main() -> None: + """Run main entry point.""" app_name = "MyApp" app_author = "MyCompany" - print(f"-- platformdirs {__version__} --") + print(f"-- platformdirs {__version__} --") # noqa: T201 - print("-- app dirs (with optional 'version')") + print("-- app dirs (with optional 'version')") # noqa: T201 dirs = PlatformDirs(app_name, app_author, version="1.0") for prop in PROPS: - print(f"{prop}: {getattr(dirs, prop)}") + print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201 - print("\n-- app dirs (without optional 'version')") + print("\n-- app dirs (without optional 'version')") # noqa: T201 dirs = PlatformDirs(app_name, app_author) for prop in PROPS: - print(f"{prop}: {getattr(dirs, prop)}") + print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201 - print("\n-- app dirs (without optional 'appauthor')") + print("\n-- app dirs (without optional 'appauthor')") # noqa: T201 dirs = PlatformDirs(app_name) for prop in PROPS: - print(f"{prop}: {getattr(dirs, prop)}") + print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201 - print("\n-- app dirs (with disabled 'appauthor')") + print("\n-- app dirs (with disabled 'appauthor')") # noqa: T201 dirs = PlatformDirs(app_name, appauthor=False) for prop in PROPS: - print(f"{prop}: {getattr(dirs, prop)}") + print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201 if __name__ == "__main__": diff --git a/src/pip/_vendor/platformdirs/android.py b/src/pip/_vendor/platformdirs/android.py index f6de7451b25..4acdb63833f 100644 --- a/src/pip/_vendor/platformdirs/android.py +++ b/src/pip/_vendor/platformdirs/android.py @@ -1,3 +1,5 @@ +"""Android.""" + from __future__ import annotations import os @@ -30,7 +32,8 @@ def site_data_dir(self) -> str: @property def user_config_dir(self) -> str: """ - :return: config directory tied to the user, e.g. ``/data/user///shared_prefs/`` + :return: config directory tied to the user, e.g. \ + ``/data/user///shared_prefs/`` """ return self._append_app_name_and_version(cast(str, _android_folder()), "shared_prefs") @@ -62,16 +65,39 @@ def user_log_dir(self) -> str: """ path = self.user_cache_dir if self.opinion: - path = os.path.join(path, "log") + path = os.path.join(path, "log") # noqa: PTH118 return path @property def user_documents_dir(self) -> str: - """ - :return: documents directory tied to the user e.g. ``/storage/emulated/0/Documents`` - """ + """:return: documents directory tied to the user e.g. ``/storage/emulated/0/Documents``""" return _android_documents_folder() + @property + def user_downloads_dir(self) -> str: + """:return: downloads directory tied to the user e.g. ``/storage/emulated/0/Downloads``""" + return _android_downloads_folder() + + @property + def user_pictures_dir(self) -> str: + """:return: pictures directory tied to the user e.g. ``/storage/emulated/0/Pictures``""" + return _android_pictures_folder() + + @property + def user_videos_dir(self) -> str: + """:return: videos directory tied to the user e.g. ``/storage/emulated/0/DCIM/Camera``""" + return _android_videos_folder() + + @property + def user_music_dir(self) -> str: + """:return: music directory tied to the user e.g. ``/storage/emulated/0/Music``""" + return _android_music_folder() + + @property + def user_desktop_dir(self) -> str: + """:return: desktop directory tied to the user e.g. ``/storage/emulated/0/Desktop``""" + return "/storage/emulated/0/Desktop" + @property def user_runtime_dir(self) -> str: """ @@ -80,20 +106,25 @@ def user_runtime_dir(self) -> str: """ path = self.user_cache_dir if self.opinion: - path = os.path.join(path, "tmp") + path = os.path.join(path, "tmp") # noqa: PTH118 return path + @property + def site_runtime_dir(self) -> str: + """:return: runtime directory shared by users, same as `user_runtime_dir`""" + return self.user_runtime_dir + @lru_cache(maxsize=1) def _android_folder() -> str | None: - """:return: base folder for the Android OS or None if cannot be found""" + """:return: base folder for the Android OS or None if it cannot be found""" try: # First try to get path to android app via pyjnius from jnius import autoclass - Context = autoclass("android.content.Context") # noqa: N806 - result: str | None = Context.getFilesDir().getParentFile().getAbsolutePath() - except Exception: + context = autoclass("android.content.Context") + result: str | None = context.getFilesDir().getParentFile().getAbsolutePath() + except Exception: # noqa: BLE001 # if fails find an android folder looking path on the sys.path pattern = re.compile(r"/data/(data|user/\d+)/(.+)/files") for path in sys.path: @@ -112,15 +143,79 @@ def _android_documents_folder() -> str: try: from jnius import autoclass - Context = autoclass("android.content.Context") # noqa: N806 - Environment = autoclass("android.os.Environment") # noqa: N806 - documents_dir: str = Context.getExternalFilesDir(Environment.DIRECTORY_DOCUMENTS).getAbsolutePath() - except Exception: + context = autoclass("android.content.Context") + environment = autoclass("android.os.Environment") + documents_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOCUMENTS).getAbsolutePath() + except Exception: # noqa: BLE001 documents_dir = "/storage/emulated/0/Documents" return documents_dir +@lru_cache(maxsize=1) +def _android_downloads_folder() -> str: + """:return: downloads folder for the Android OS""" + # Get directories with pyjnius + try: + from jnius import autoclass + + context = autoclass("android.content.Context") + environment = autoclass("android.os.Environment") + downloads_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOWNLOADS).getAbsolutePath() + except Exception: # noqa: BLE001 + downloads_dir = "/storage/emulated/0/Downloads" + + return downloads_dir + + +@lru_cache(maxsize=1) +def _android_pictures_folder() -> str: + """:return: pictures folder for the Android OS""" + # Get directories with pyjnius + try: + from jnius import autoclass + + context = autoclass("android.content.Context") + environment = autoclass("android.os.Environment") + pictures_dir: str = context.getExternalFilesDir(environment.DIRECTORY_PICTURES).getAbsolutePath() + except Exception: # noqa: BLE001 + pictures_dir = "/storage/emulated/0/Pictures" + + return pictures_dir + + +@lru_cache(maxsize=1) +def _android_videos_folder() -> str: + """:return: videos folder for the Android OS""" + # Get directories with pyjnius + try: + from jnius import autoclass + + context = autoclass("android.content.Context") + environment = autoclass("android.os.Environment") + videos_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DCIM).getAbsolutePath() + except Exception: # noqa: BLE001 + videos_dir = "/storage/emulated/0/DCIM/Camera" + + return videos_dir + + +@lru_cache(maxsize=1) +def _android_music_folder() -> str: + """:return: music folder for the Android OS""" + # Get directories with pyjnius + try: + from jnius import autoclass + + context = autoclass("android.content.Context") + environment = autoclass("android.os.Environment") + music_dir: str = context.getExternalFilesDir(environment.DIRECTORY_MUSIC).getAbsolutePath() + except Exception: # noqa: BLE001 + music_dir = "/storage/emulated/0/Music" + + return music_dir + + __all__ = [ "Android", ] diff --git a/src/pip/_vendor/platformdirs/api.py b/src/pip/_vendor/platformdirs/api.py index f140e8b6db8..031a38a3d36 100644 --- a/src/pip/_vendor/platformdirs/api.py +++ b/src/pip/_vendor/platformdirs/api.py @@ -1,29 +1,29 @@ +"""Base API.""" + from __future__ import annotations import os -import sys from abc import ABC, abstractmethod from pathlib import Path +from typing import TYPE_CHECKING -if sys.version_info >= (3, 8): # pragma: no branch - from typing import Literal # pragma: no cover +if TYPE_CHECKING: + from typing import Iterator, Literal class PlatformDirsABC(ABC): - """ - Abstract base class for platform directories. - """ + """Abstract base class for platform directories.""" - def __init__( + def __init__( # noqa: PLR0913 self, appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - roaming: bool = False, - multipath: bool = False, - opinion: bool = True, - ensure_exists: bool = False, - ): + roaming: bool = False, # noqa: FBT001, FBT002 + multipath: bool = False, # noqa: FBT001, FBT002 + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 + ) -> None: """ Create a new platform directory. @@ -54,8 +54,8 @@ def __init__( """ self.multipath = multipath """ - An optional parameter only applicable to Unix/Linux which indicates that the entire list of data dirs should be - returned. By default, the first item would only be returned. + An optional parameter which indicates that the entire list of data dirs should be returned. + By default, the first item would only be returned. """ self.opinion = opinion #: A flag to indicating to use opinionated values. self.ensure_exists = ensure_exists @@ -70,7 +70,7 @@ def _append_app_name_and_version(self, *base: str) -> str: params.append(self.appname) if self.version: params.append(self.version) - path = os.path.join(base[0], *params) + path = os.path.join(base[0], *params) # noqa: PTH118 self._optionally_create_directory(path) return path @@ -123,11 +123,41 @@ def user_log_dir(self) -> str: def user_documents_dir(self) -> str: """:return: documents directory tied to the user""" + @property + @abstractmethod + def user_downloads_dir(self) -> str: + """:return: downloads directory tied to the user""" + + @property + @abstractmethod + def user_pictures_dir(self) -> str: + """:return: pictures directory tied to the user""" + + @property + @abstractmethod + def user_videos_dir(self) -> str: + """:return: videos directory tied to the user""" + + @property + @abstractmethod + def user_music_dir(self) -> str: + """:return: music directory tied to the user""" + + @property + @abstractmethod + def user_desktop_dir(self) -> str: + """:return: desktop directory tied to the user""" + @property @abstractmethod def user_runtime_dir(self) -> str: """:return: runtime directory tied to the user""" + @property + @abstractmethod + def site_runtime_dir(self) -> str: + """:return: runtime directory shared by users""" + @property def user_data_path(self) -> Path: """:return: data path tied to the user""" @@ -173,7 +203,77 @@ def user_documents_path(self) -> Path: """:return: documents path tied to the user""" return Path(self.user_documents_dir) + @property + def user_downloads_path(self) -> Path: + """:return: downloads path tied to the user""" + return Path(self.user_downloads_dir) + + @property + def user_pictures_path(self) -> Path: + """:return: pictures path tied to the user""" + return Path(self.user_pictures_dir) + + @property + def user_videos_path(self) -> Path: + """:return: videos path tied to the user""" + return Path(self.user_videos_dir) + + @property + def user_music_path(self) -> Path: + """:return: music path tied to the user""" + return Path(self.user_music_dir) + + @property + def user_desktop_path(self) -> Path: + """:return: desktop path tied to the user""" + return Path(self.user_desktop_dir) + @property def user_runtime_path(self) -> Path: """:return: runtime path tied to the user""" return Path(self.user_runtime_dir) + + @property + def site_runtime_path(self) -> Path: + """:return: runtime path shared by users""" + return Path(self.site_runtime_dir) + + def iter_config_dirs(self) -> Iterator[str]: + """:yield: all user and site configuration directories.""" + yield self.user_config_dir + yield self.site_config_dir + + def iter_data_dirs(self) -> Iterator[str]: + """:yield: all user and site data directories.""" + yield self.user_data_dir + yield self.site_data_dir + + def iter_cache_dirs(self) -> Iterator[str]: + """:yield: all user and site cache directories.""" + yield self.user_cache_dir + yield self.site_cache_dir + + def iter_runtime_dirs(self) -> Iterator[str]: + """:yield: all user and site runtime directories.""" + yield self.user_runtime_dir + yield self.site_runtime_dir + + def iter_config_paths(self) -> Iterator[Path]: + """:yield: all user and site configuration paths.""" + for path in self.iter_config_dirs(): + yield Path(path) + + def iter_data_paths(self) -> Iterator[Path]: + """:yield: all user and site data paths.""" + for path in self.iter_data_dirs(): + yield Path(path) + + def iter_cache_paths(self) -> Iterator[Path]: + """:yield: all user and site cache paths.""" + for path in self.iter_cache_dirs(): + yield Path(path) + + def iter_runtime_paths(self) -> Iterator[Path]: + """:yield: all user and site runtime paths.""" + for path in self.iter_runtime_dirs(): + yield Path(path) diff --git a/src/pip/_vendor/platformdirs/macos.py b/src/pip/_vendor/platformdirs/macos.py index ec9751129c1..b7b48808ca7 100644 --- a/src/pip/_vendor/platformdirs/macos.py +++ b/src/pip/_vendor/platformdirs/macos.py @@ -1,6 +1,9 @@ +"""macOS.""" + from __future__ import annotations -import os +import os.path +import sys from .api import PlatformDirsABC @@ -17,12 +20,24 @@ class MacOS(PlatformDirsABC): @property def user_data_dir(self) -> str: """:return: data directory tied to the user, e.g. ``~/Library/Application Support/$appname/$version``""" - return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support")) + return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support")) # noqa: PTH111 @property def site_data_dir(self) -> str: - """:return: data directory shared by users, e.g. ``/Library/Application Support/$appname/$version``""" - return self._append_app_name_and_version("/Library/Application Support") + """ + :return: data directory shared by users, e.g. ``/Library/Application Support/$appname/$version``. + If we're using a Python binary managed by `Homebrew `_, the directory + will be under the Homebrew prefix, e.g. ``/opt/homebrew/share/$appname/$version``. + If `multipath ` is enabled and we're in Homebrew, + the response is a multi-path string separated by ":", e.g. + ``/opt/homebrew/share/$appname/$version:/Library/Application Support/$appname/$version`` + """ + is_homebrew = sys.prefix.startswith("/opt/homebrew") + path_list = [self._append_app_name_and_version("/opt/homebrew/share")] if is_homebrew else [] + path_list.append(self._append_app_name_and_version("/Library/Application Support")) + if self.multipath: + return os.pathsep.join(path_list) + return path_list[0] @property def user_config_dir(self) -> str: @@ -37,12 +52,24 @@ def site_config_dir(self) -> str: @property def user_cache_dir(self) -> str: """:return: cache directory tied to the user, e.g. ``~/Library/Caches/$appname/$version``""" - return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches")) + return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches")) # noqa: PTH111 @property def site_cache_dir(self) -> str: - """:return: cache directory shared by users, e.g. ``/Library/Caches/$appname/$version``""" - return self._append_app_name_and_version("/Library/Caches") + """ + :return: cache directory shared by users, e.g. ``/Library/Caches/$appname/$version``. + If we're using a Python binary managed by `Homebrew `_, the directory + will be under the Homebrew prefix, e.g. ``/opt/homebrew/var/cache/$appname/$version``. + If `multipath ` is enabled and we're in Homebrew, + the response is a multi-path string separated by ":", e.g. + ``/opt/homebrew/var/cache/$appname/$version:/Library/Caches/$appname/$version`` + """ + is_homebrew = sys.prefix.startswith("/opt/homebrew") + path_list = [self._append_app_name_and_version("/opt/homebrew/var/cache")] if is_homebrew else [] + path_list.append(self._append_app_name_and_version("/Library/Caches")) + if self.multipath: + return os.pathsep.join(path_list) + return path_list[0] @property def user_state_dir(self) -> str: @@ -52,17 +79,47 @@ def user_state_dir(self) -> str: @property def user_log_dir(self) -> str: """:return: log directory tied to the user, e.g. ``~/Library/Logs/$appname/$version``""" - return self._append_app_name_and_version(os.path.expanduser("~/Library/Logs")) + return self._append_app_name_and_version(os.path.expanduser("~/Library/Logs")) # noqa: PTH111 @property def user_documents_dir(self) -> str: """:return: documents directory tied to the user, e.g. ``~/Documents``""" - return os.path.expanduser("~/Documents") + return os.path.expanduser("~/Documents") # noqa: PTH111 + + @property + def user_downloads_dir(self) -> str: + """:return: downloads directory tied to the user, e.g. ``~/Downloads``""" + return os.path.expanduser("~/Downloads") # noqa: PTH111 + + @property + def user_pictures_dir(self) -> str: + """:return: pictures directory tied to the user, e.g. ``~/Pictures``""" + return os.path.expanduser("~/Pictures") # noqa: PTH111 + + @property + def user_videos_dir(self) -> str: + """:return: videos directory tied to the user, e.g. ``~/Movies``""" + return os.path.expanduser("~/Movies") # noqa: PTH111 + + @property + def user_music_dir(self) -> str: + """:return: music directory tied to the user, e.g. ``~/Music``""" + return os.path.expanduser("~/Music") # noqa: PTH111 + + @property + def user_desktop_dir(self) -> str: + """:return: desktop directory tied to the user, e.g. ``~/Desktop``""" + return os.path.expanduser("~/Desktop") # noqa: PTH111 @property def user_runtime_dir(self) -> str: """:return: runtime directory tied to the user, e.g. ``~/Library/Caches/TemporaryItems/$appname/$version``""" - return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches/TemporaryItems")) + return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches/TemporaryItems")) # noqa: PTH111 + + @property + def site_runtime_dir(self) -> str: + """:return: runtime directory shared by users, same as `user_runtime_dir`""" + return self.user_runtime_dir __all__ = [ diff --git a/src/pip/_vendor/platformdirs/unix.py b/src/pip/_vendor/platformdirs/unix.py index 17d355da9f4..ca4728e6079 100644 --- a/src/pip/_vendor/platformdirs/unix.py +++ b/src/pip/_vendor/platformdirs/unix.py @@ -1,18 +1,23 @@ +"""Unix.""" + from __future__ import annotations import os import sys from configparser import ConfigParser from pathlib import Path +from typing import Iterator from .api import PlatformDirsABC -if sys.platform.startswith("linux"): # pragma: no branch # no op check, only to please the type checker - from os import getuid -else: +if sys.platform == "win32": def getuid() -> int: - raise RuntimeError("should only be used on Linux") + msg = "should only be used on Unix" + raise RuntimeError(msg) + +else: + from os import getuid class Unix(PlatformDirsABC): @@ -36,28 +41,28 @@ def user_data_dir(self) -> str: """ path = os.environ.get("XDG_DATA_HOME", "") if not path.strip(): - path = os.path.expanduser("~/.local/share") + path = os.path.expanduser("~/.local/share") # noqa: PTH111 return self._append_app_name_and_version(path) + @property + def _site_data_dirs(self) -> list[str]: + path = os.environ.get("XDG_DATA_DIRS", "") + if not path.strip(): + path = f"/usr/local/share{os.pathsep}/usr/share" + return [self._append_app_name_and_version(p) for p in path.split(os.pathsep)] + @property def site_data_dir(self) -> str: """ :return: data directories shared by users (if `multipath ` is - enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS - path separator), e.g. ``/usr/local/share/$appname/$version`` or ``/usr/share/$appname/$version`` + enabled and ``XDG_DATA_DIRS`` is set and a multi path the response is also a multi path separated by the + OS path separator), e.g. ``/usr/local/share/$appname/$version`` or ``/usr/share/$appname/$version`` """ # XDG default for $XDG_DATA_DIRS; only first, if multipath is False - path = os.environ.get("XDG_DATA_DIRS", "") - if not path.strip(): - path = f"/usr/local/share{os.pathsep}/usr/share" - return self._with_multi_path(path) - - def _with_multi_path(self, path: str) -> str: - path_list = path.split(os.pathsep) + dirs = self._site_data_dirs if not self.multipath: - path_list = path_list[0:1] - path_list = [self._append_app_name_and_version(os.path.expanduser(p)) for p in path_list] - return os.pathsep.join(path_list) + return dirs[0] + return os.pathsep.join(dirs) @property def user_config_dir(self) -> str: @@ -67,21 +72,28 @@ def user_config_dir(self) -> str: """ path = os.environ.get("XDG_CONFIG_HOME", "") if not path.strip(): - path = os.path.expanduser("~/.config") + path = os.path.expanduser("~/.config") # noqa: PTH111 return self._append_app_name_and_version(path) + @property + def _site_config_dirs(self) -> list[str]: + path = os.environ.get("XDG_CONFIG_DIRS", "") + if not path.strip(): + path = "/etc/xdg" + return [self._append_app_name_and_version(p) for p in path.split(os.pathsep)] + @property def site_config_dir(self) -> str: """ :return: config directories shared by users (if `multipath ` - is enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS - path separator), e.g. ``/etc/xdg/$appname/$version`` + is enabled and ``XDG_CONFIG_DIRS`` is set and a multi path the response is also a multi path separated by + the OS path separator), e.g. ``/etc/xdg/$appname/$version`` """ # XDG default for $XDG_CONFIG_DIRS only first, if multipath is False - path = os.environ.get("XDG_CONFIG_DIRS", "") - if not path.strip(): - path = "/etc/xdg" - return self._with_multi_path(path) + dirs = self._site_config_dirs + if not self.multipath: + return dirs[0] + return os.pathsep.join(dirs) @property def user_cache_dir(self) -> str: @@ -91,15 +103,13 @@ def user_cache_dir(self) -> str: """ path = os.environ.get("XDG_CACHE_HOME", "") if not path.strip(): - path = os.path.expanduser("~/.cache") + path = os.path.expanduser("~/.cache") # noqa: PTH111 return self._append_app_name_and_version(path) @property def site_cache_dir(self) -> str: - """ - :return: cache directory shared by users, e.g. ``/var/tmp/$appname/$version`` - """ - return self._append_app_name_and_version("/var/tmp") + """:return: cache directory shared by users, e.g. ``/var/cache/$appname/$version``""" + return self._append_app_name_and_version("/var/cache") @property def user_state_dir(self) -> str: @@ -109,41 +119,88 @@ def user_state_dir(self) -> str: """ path = os.environ.get("XDG_STATE_HOME", "") if not path.strip(): - path = os.path.expanduser("~/.local/state") + path = os.path.expanduser("~/.local/state") # noqa: PTH111 return self._append_app_name_and_version(path) @property def user_log_dir(self) -> str: - """ - :return: log directory tied to the user, same as `user_state_dir` if not opinionated else ``log`` in it - """ + """:return: log directory tied to the user, same as `user_state_dir` if not opinionated else ``log`` in it""" path = self.user_state_dir if self.opinion: - path = os.path.join(path, "log") + path = os.path.join(path, "log") # noqa: PTH118 + self._optionally_create_directory(path) return path @property def user_documents_dir(self) -> str: - """ - :return: documents directory tied to the user, e.g. ``~/Documents`` - """ - documents_dir = _get_user_dirs_folder("XDG_DOCUMENTS_DIR") - if documents_dir is None: - documents_dir = os.environ.get("XDG_DOCUMENTS_DIR", "").strip() - if not documents_dir: - documents_dir = os.path.expanduser("~/Documents") + """:return: documents directory tied to the user, e.g. ``~/Documents``""" + return _get_user_media_dir("XDG_DOCUMENTS_DIR", "~/Documents") + + @property + def user_downloads_dir(self) -> str: + """:return: downloads directory tied to the user, e.g. ``~/Downloads``""" + return _get_user_media_dir("XDG_DOWNLOAD_DIR", "~/Downloads") + + @property + def user_pictures_dir(self) -> str: + """:return: pictures directory tied to the user, e.g. ``~/Pictures``""" + return _get_user_media_dir("XDG_PICTURES_DIR", "~/Pictures") + + @property + def user_videos_dir(self) -> str: + """:return: videos directory tied to the user, e.g. ``~/Videos``""" + return _get_user_media_dir("XDG_VIDEOS_DIR", "~/Videos") + + @property + def user_music_dir(self) -> str: + """:return: music directory tied to the user, e.g. ``~/Music``""" + return _get_user_media_dir("XDG_MUSIC_DIR", "~/Music") - return documents_dir + @property + def user_desktop_dir(self) -> str: + """:return: desktop directory tied to the user, e.g. ``~/Desktop``""" + return _get_user_media_dir("XDG_DESKTOP_DIR", "~/Desktop") @property def user_runtime_dir(self) -> str: """ :return: runtime directory tied to the user, e.g. ``/run/user/$(id -u)/$appname/$version`` or - ``$XDG_RUNTIME_DIR/$appname/$version`` + ``$XDG_RUNTIME_DIR/$appname/$version``. + + For FreeBSD/OpenBSD/NetBSD, it would return ``/var/run/user/$(id -u)/$appname/$version`` if + exists, otherwise ``/tmp/runtime-$(id -u)/$appname/$version``, if``$XDG_RUNTIME_DIR`` + is not set. + """ + path = os.environ.get("XDG_RUNTIME_DIR", "") + if not path.strip(): + if sys.platform.startswith(("freebsd", "openbsd", "netbsd")): + path = f"/var/run/user/{getuid()}" + if not Path(path).exists(): + path = f"/tmp/runtime-{getuid()}" # noqa: S108 + else: + path = f"/run/user/{getuid()}" + return self._append_app_name_and_version(path) + + @property + def site_runtime_dir(self) -> str: + """ + :return: runtime directory shared by users, e.g. ``/run/$appname/$version`` or \ + ``$XDG_RUNTIME_DIR/$appname/$version``. + + Note that this behaves almost exactly like `user_runtime_dir` if ``$XDG_RUNTIME_DIR`` is set, but will + fall back to paths associated to the root user instead of a regular logged-in user if it's not set. + + If you wish to ensure that a logged-in root user path is returned e.g. ``/run/user/0``, use `user_runtime_dir` + instead. + + For FreeBSD/OpenBSD/NetBSD, it would return ``/var/run/$appname/$version`` if ``$XDG_RUNTIME_DIR`` is not set. """ path = os.environ.get("XDG_RUNTIME_DIR", "") if not path.strip(): - path = f"/run/user/{getuid()}" + if sys.platform.startswith(("freebsd", "openbsd", "netbsd")): + path = "/var/run" + else: + path = "/run" return self._append_app_name_and_version(path) @property @@ -167,14 +224,34 @@ def _first_item_as_path_if_multipath(self, directory: str) -> Path: directory = directory.split(os.pathsep)[0] return Path(directory) + def iter_config_dirs(self) -> Iterator[str]: + """:yield: all user and site configuration directories.""" + yield self.user_config_dir + yield from self._site_config_dirs + + def iter_data_dirs(self) -> Iterator[str]: + """:yield: all user and site data directories.""" + yield self.user_data_dir + yield from self._site_data_dirs + + +def _get_user_media_dir(env_var: str, fallback_tilde_path: str) -> str: + media_dir = _get_user_dirs_folder(env_var) + if media_dir is None: + media_dir = os.environ.get(env_var, "").strip() + if not media_dir: + media_dir = os.path.expanduser(fallback_tilde_path) # noqa: PTH111 + + return media_dir + def _get_user_dirs_folder(key: str) -> str | None: - """Return directory from user-dirs.dirs config file. See https://freedesktop.org/wiki/Software/xdg-user-dirs/""" - user_dirs_config_path = os.path.join(Unix().user_config_dir, "user-dirs.dirs") - if os.path.exists(user_dirs_config_path): + """Return directory from user-dirs.dirs config file. See https://freedesktop.org/wiki/Software/xdg-user-dirs/.""" + user_dirs_config_path = Path(Unix().user_config_dir) / "user-dirs.dirs" + if user_dirs_config_path.exists(): parser = ConfigParser() - with open(user_dirs_config_path) as stream: + with user_dirs_config_path.open() as stream: # Add fake section header, so ConfigParser doesn't complain parser.read_string(f"[top]\n{stream.read()}") @@ -183,8 +260,7 @@ def _get_user_dirs_folder(key: str) -> str | None: path = parser["top"][key].strip('"') # Handle relative home paths - path = path.replace("$HOME", os.path.expanduser("~")) - return path + return path.replace("$HOME", os.path.expanduser("~")) # noqa: PTH111 return None diff --git a/src/pip/_vendor/platformdirs/version.py b/src/pip/_vendor/platformdirs/version.py index d906a2c99e6..cc1e34568ad 100644 --- a/src/pip/_vendor/platformdirs/version.py +++ b/src/pip/_vendor/platformdirs/version.py @@ -1,4 +1,16 @@ # file generated by setuptools_scm # don't change, don't track in version control -__version__ = version = '3.2.0' -__version_tuple__ = version_tuple = (3, 2, 0) +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple, Union + VERSION_TUPLE = Tuple[Union[int, str], ...] +else: + VERSION_TUPLE = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE + +__version__ = version = '4.2.0' +__version_tuple__ = version_tuple = (4, 2, 0) diff --git a/src/pip/_vendor/platformdirs/windows.py b/src/pip/_vendor/platformdirs/windows.py index e7573c3d6ae..c62d0c8d2ba 100644 --- a/src/pip/_vendor/platformdirs/windows.py +++ b/src/pip/_vendor/platformdirs/windows.py @@ -1,16 +1,22 @@ +"""Windows.""" + from __future__ import annotations import ctypes import os import sys from functools import lru_cache -from typing import Callable +from typing import TYPE_CHECKING from .api import PlatformDirsABC +if TYPE_CHECKING: + from collections.abc import Callable + class Windows(PlatformDirsABC): - """`MSDN on where to store app data files + """ + `MSDN on where to store app data files `_. Makes use of the `appname `, @@ -43,7 +49,7 @@ def _append_parts(self, path: str, *, opinion_value: str | None = None) -> str: params.append(opinion_value) if self.version: params.append(self.version) - path = os.path.join(path, *params) + path = os.path.join(path, *params) # noqa: PTH118 self._optionally_create_directory(path) return path @@ -85,36 +91,63 @@ def user_state_dir(self) -> str: @property def user_log_dir(self) -> str: - """ - :return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``Logs`` in it - """ + """:return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``Logs`` in it""" path = self.user_data_dir if self.opinion: - path = os.path.join(path, "Logs") + path = os.path.join(path, "Logs") # noqa: PTH118 self._optionally_create_directory(path) return path @property def user_documents_dir(self) -> str: - """ - :return: documents directory tied to the user e.g. ``%USERPROFILE%\\Documents`` - """ + """:return: documents directory tied to the user e.g. ``%USERPROFILE%\\Documents``""" return os.path.normpath(get_win_folder("CSIDL_PERSONAL")) + @property + def user_downloads_dir(self) -> str: + """:return: downloads directory tied to the user e.g. ``%USERPROFILE%\\Downloads``""" + return os.path.normpath(get_win_folder("CSIDL_DOWNLOADS")) + + @property + def user_pictures_dir(self) -> str: + """:return: pictures directory tied to the user e.g. ``%USERPROFILE%\\Pictures``""" + return os.path.normpath(get_win_folder("CSIDL_MYPICTURES")) + + @property + def user_videos_dir(self) -> str: + """:return: videos directory tied to the user e.g. ``%USERPROFILE%\\Videos``""" + return os.path.normpath(get_win_folder("CSIDL_MYVIDEO")) + + @property + def user_music_dir(self) -> str: + """:return: music directory tied to the user e.g. ``%USERPROFILE%\\Music``""" + return os.path.normpath(get_win_folder("CSIDL_MYMUSIC")) + + @property + def user_desktop_dir(self) -> str: + """:return: desktop directory tied to the user, e.g. ``%USERPROFILE%\\Desktop``""" + return os.path.normpath(get_win_folder("CSIDL_DESKTOPDIRECTORY")) + @property def user_runtime_dir(self) -> str: """ :return: runtime directory tied to the user, e.g. ``%USERPROFILE%\\AppData\\Local\\Temp\\$appauthor\\$appname`` """ - path = os.path.normpath(os.path.join(get_win_folder("CSIDL_LOCAL_APPDATA"), "Temp")) + path = os.path.normpath(os.path.join(get_win_folder("CSIDL_LOCAL_APPDATA"), "Temp")) # noqa: PTH118 return self._append_parts(path) + @property + def site_runtime_dir(self) -> str: + """:return: runtime directory shared by users, same as `user_runtime_dir`""" + return self.user_runtime_dir + def get_win_folder_from_env_vars(csidl_name: str) -> str: """Get folder from environment variables.""" - if csidl_name == "CSIDL_PERSONAL": # does not have an environment name - return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Documents") + result = get_win_folder_if_csidl_name_not_env_var(csidl_name) + if result is not None: + return result env_var_name = { "CSIDL_APPDATA": "APPDATA", @@ -122,28 +155,54 @@ def get_win_folder_from_env_vars(csidl_name: str) -> str: "CSIDL_LOCAL_APPDATA": "LOCALAPPDATA", }.get(csidl_name) if env_var_name is None: - raise ValueError(f"Unknown CSIDL name: {csidl_name}") + msg = f"Unknown CSIDL name: {csidl_name}" + raise ValueError(msg) result = os.environ.get(env_var_name) if result is None: - raise ValueError(f"Unset environment variable: {env_var_name}") + msg = f"Unset environment variable: {env_var_name}" + raise ValueError(msg) return result +def get_win_folder_if_csidl_name_not_env_var(csidl_name: str) -> str | None: + """Get folder for a CSIDL name that does not exist as an environment variable.""" + if csidl_name == "CSIDL_PERSONAL": + return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Documents") # noqa: PTH118 + + if csidl_name == "CSIDL_DOWNLOADS": + return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Downloads") # noqa: PTH118 + + if csidl_name == "CSIDL_MYPICTURES": + return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Pictures") # noqa: PTH118 + + if csidl_name == "CSIDL_MYVIDEO": + return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Videos") # noqa: PTH118 + + if csidl_name == "CSIDL_MYMUSIC": + return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Music") # noqa: PTH118 + return None + + def get_win_folder_from_registry(csidl_name: str) -> str: - """Get folder from the registry. + """ + Get folder from the registry. - This is a fallback technique at best. I'm not sure if using the - registry for this guarantees us the correct answer for all CSIDL_* - names. + This is a fallback technique at best. I'm not sure if using the registry for these guarantees us the correct answer + for all CSIDL_* names. """ shell_folder_name = { "CSIDL_APPDATA": "AppData", "CSIDL_COMMON_APPDATA": "Common AppData", "CSIDL_LOCAL_APPDATA": "Local AppData", "CSIDL_PERSONAL": "Personal", + "CSIDL_DOWNLOADS": "{374DE290-123F-4565-9164-39C4925E467B}", + "CSIDL_MYPICTURES": "My Pictures", + "CSIDL_MYVIDEO": "My Video", + "CSIDL_MYMUSIC": "My Music", }.get(csidl_name) if shell_folder_name is None: - raise ValueError(f"Unknown CSIDL name: {csidl_name}") + msg = f"Unknown CSIDL name: {csidl_name}" + raise ValueError(msg) if sys.platform != "win32": # only needed for mypy type checker to know that this code runs only on Windows raise NotImplementedError import winreg @@ -155,25 +214,38 @@ def get_win_folder_from_registry(csidl_name: str) -> str: def get_win_folder_via_ctypes(csidl_name: str) -> str: """Get folder with ctypes.""" + # There is no 'CSIDL_DOWNLOADS'. + # Use 'CSIDL_PROFILE' (40) and append the default folder 'Downloads' instead. + # https://learn.microsoft.com/en-us/windows/win32/shell/knownfolderid + csidl_const = { "CSIDL_APPDATA": 26, "CSIDL_COMMON_APPDATA": 35, "CSIDL_LOCAL_APPDATA": 28, "CSIDL_PERSONAL": 5, + "CSIDL_MYPICTURES": 39, + "CSIDL_MYVIDEO": 14, + "CSIDL_MYMUSIC": 13, + "CSIDL_DOWNLOADS": 40, + "CSIDL_DESKTOPDIRECTORY": 16, }.get(csidl_name) if csidl_const is None: - raise ValueError(f"Unknown CSIDL name: {csidl_name}") + msg = f"Unknown CSIDL name: {csidl_name}" + raise ValueError(msg) buf = ctypes.create_unicode_buffer(1024) windll = getattr(ctypes, "windll") # noqa: B009 # using getattr to avoid false positive with mypy type checker windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) - # Downgrade to short path name if it has highbit chars. - if any(ord(c) > 255 for c in buf): + # Downgrade to short path name if it has high-bit chars. + if any(ord(c) > 255 for c in buf): # noqa: PLR2004 buf2 = ctypes.create_unicode_buffer(1024) if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): buf = buf2 + if csidl_name == "CSIDL_DOWNLOADS": + return os.path.join(buf.value, "Downloads") # noqa: PTH118 + return buf.value diff --git a/src/pip/_vendor/pygments/__init__.py b/src/pip/_vendor/pygments/__init__.py index d9b0a8dea2e..5b8a3f95483 100644 --- a/src/pip/_vendor/pygments/__init__.py +++ b/src/pip/_vendor/pygments/__init__.py @@ -21,12 +21,12 @@ .. _Pygments master branch: https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from io import StringIO, BytesIO -__version__ = '2.14.0' +__version__ = '2.17.2' __docformat__ = 'restructuredtext' __all__ = ['lex', 'format', 'highlight'] @@ -34,7 +34,9 @@ def lex(code, lexer): """ - Lex ``code`` with ``lexer`` and return an iterable of tokens. + Lex `code` with the `lexer` (must be a `Lexer` instance) + and return an iterable of tokens. Currently, this only calls + `lexer.get_tokens()`. """ try: return lexer.get_tokens(code) @@ -49,11 +51,12 @@ def lex(code, lexer): def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builtin """ - Format a tokenlist ``tokens`` with the formatter ``formatter``. + Format ``tokens`` (an iterable of tokens) with the formatter ``formatter`` + (a `Formatter` instance). - If ``outfile`` is given and a valid file object (an object - with a ``write`` method), the result will be written to it, otherwise - it is returned as a string. + If ``outfile`` is given and a valid file object (an object with a + ``write`` method), the result will be written to it, otherwise it + is returned as a string. """ try: if not outfile: @@ -73,10 +76,7 @@ def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builti def highlight(code, lexer, formatter, outfile=None): """ - Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``. - - If ``outfile`` is given and a valid file object (an object - with a ``write`` method), the result will be written to it, otherwise - it is returned as a string. + This is the most high-level highlighting function. It combines `lex` and + `format` in one function. """ return format(lex(code, lexer), formatter, outfile) diff --git a/src/pip/_vendor/pygments/__main__.py b/src/pip/_vendor/pygments/__main__.py index 90cafd93426..2f7f8cbad05 100644 --- a/src/pip/_vendor/pygments/__main__.py +++ b/src/pip/_vendor/pygments/__main__.py @@ -4,7 +4,7 @@ Main entry point for ``python -m pygments``. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/cmdline.py b/src/pip/_vendor/pygments/cmdline.py index de73b06b4cf..eec1775ba5f 100644 --- a/src/pip/_vendor/pygments/cmdline.py +++ b/src/pip/_vendor/pygments/cmdline.py @@ -4,7 +4,7 @@ Command line interface. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -185,7 +185,7 @@ def main_inner(parser, argns): return 0 if argns.V: - print('Pygments version %s, (c) 2006-2022 by Georg Brandl, Matthäus ' + print('Pygments version %s, (c) 2006-2023 by Georg Brandl, Matthäus ' 'Chajdas and contributors.' % __version__) return 0 diff --git a/src/pip/_vendor/pygments/console.py b/src/pip/_vendor/pygments/console.py index 2ada68e03b3..deb4937f74f 100644 --- a/src/pip/_vendor/pygments/console.py +++ b/src/pip/_vendor/pygments/console.py @@ -4,7 +4,7 @@ Format colored console output. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/filter.py b/src/pip/_vendor/pygments/filter.py index e5c96649382..dafa08d1569 100644 --- a/src/pip/_vendor/pygments/filter.py +++ b/src/pip/_vendor/pygments/filter.py @@ -4,7 +4,7 @@ Module that implements the default filter. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/filters/__init__.py b/src/pip/_vendor/pygments/filters/__init__.py index c302a6c0c53..5aa9ecbb80c 100644 --- a/src/pip/_vendor/pygments/filters/__init__.py +++ b/src/pip/_vendor/pygments/filters/__init__.py @@ -5,7 +5,7 @@ Module containing filter lookup functions and default filters. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/formatter.py b/src/pip/_vendor/pygments/formatter.py index a2349ef8652..3ca4892fa31 100644 --- a/src/pip/_vendor/pygments/formatter.py +++ b/src/pip/_vendor/pygments/formatter.py @@ -4,7 +4,7 @@ Base formatter class. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -26,7 +26,21 @@ class Formatter: """ Converts a token stream to text. - Options accepted: + Formatters should have attributes to help selecting them. These + are similar to the corresponding :class:`~pygments.lexer.Lexer` + attributes. + + .. autoattribute:: name + :no-value: + + .. autoattribute:: aliases + :no-value: + + .. autoattribute:: filenames + :no-value: + + You can pass options as keyword arguments to the constructor. + All formatters accept these basic options: ``style`` The style to use, can be a string or a Style subclass @@ -47,15 +61,19 @@ class Formatter: support (default: None). ``outencoding`` Overrides ``encoding`` if given. + """ - #: Name of the formatter + #: Full name for the formatter, in human-readable form. name = None - #: Shortcuts for the formatter + #: A list of short, unique identifiers that can be used to lookup + #: the formatter from a list, e.g. using :func:`.get_formatter_by_name()`. aliases = [] - #: fn match rules + #: A list of fnmatch patterns that match filenames for which this + #: formatter can produce output. The patterns in this list should be unique + #: among all formatters. filenames = [] #: If True, this formatter outputs Unicode strings when no encoding @@ -63,6 +81,11 @@ class Formatter: unicodeoutput = True def __init__(self, **options): + """ + As with lexers, this constructor takes arbitrary optional arguments, + and if you override it, you should first process your own options, then + call the base class implementation. + """ self.style = _lookup_style(options.get('style', 'default')) self.full = get_bool_opt(options, 'full', False) self.title = options.get('title', '') @@ -75,18 +98,25 @@ def __init__(self, **options): def get_style_defs(self, arg=''): """ - Return the style definitions for the current style as a string. + This method must return statements or declarations suitable to define + the current style for subsequent highlighted text (e.g. CSS classes + in the `HTMLFormatter`). - ``arg`` is an additional argument whose meaning depends on the - formatter used. Note that ``arg`` can also be a list or tuple - for some formatters like the html formatter. + The optional argument `arg` can be used to modify the generation and + is formatter dependent (it is standardized because it can be given on + the command line). + + This method is called by the ``-S`` :doc:`command-line option `, + the `arg` is then given by the ``-a`` option. """ return '' def format(self, tokensource, outfile): """ - Format ``tokensource``, an iterable of ``(tokentype, tokenstring)`` - tuples and write it into ``outfile``. + This method must format the tokens from the `tokensource` iterable and + write the formatted version to the file object `outfile`. + + Formatter options can control how exactly the tokens are converted. """ if self.encoding: # wrap the outfile in a StreamWriter diff --git a/src/pip/_vendor/pygments/formatters/__init__.py b/src/pip/_vendor/pygments/formatters/__init__.py index 7ecf7eee35f..6abb45ac71f 100644 --- a/src/pip/_vendor/pygments/formatters/__init__.py +++ b/src/pip/_vendor/pygments/formatters/__init__.py @@ -4,13 +4,14 @@ Pygments formatters. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ +import re import sys import types -from fnmatch import fnmatch +import fnmatch from os.path import basename from pip._vendor.pygments.formatters._mapping import FORMATTERS @@ -21,6 +22,16 @@ 'get_all_formatters', 'load_formatter_from_file'] + list(FORMATTERS) _formatter_cache = {} # classes by name +_pattern_cache = {} + + +def _fn_matches(fn, glob): + """Return whether the supplied file name fn matches pattern filename.""" + if glob not in _pattern_cache: + pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob)) + return pattern.match(fn) + return _pattern_cache[glob].match(fn) + def _load_formatters(module_name): """Load a formatter (and all others in the module too).""" @@ -57,9 +68,12 @@ def find_formatter_class(alias): def get_formatter_by_name(_alias, **options): - """Lookup and instantiate a formatter by alias. + """ + Return an instance of a :class:`.Formatter` subclass that has `alias` in its + aliases list. The formatter is given the `options` at its instantiation. - Raises ClassNotFound if not found. + Will raise :exc:`pygments.util.ClassNotFound` if no formatter with that + alias is found. """ cls = find_formatter_class(_alias) if cls is None: @@ -67,19 +81,18 @@ def get_formatter_by_name(_alias, **options): return cls(**options) -def load_formatter_from_file(filename, formattername="CustomFormatter", - **options): - """Load a formatter from a file. - - This method expects a file located relative to the current working - directory, which contains a class named CustomFormatter. By default, - it expects the Formatter to be named CustomFormatter; you can specify - your own class name as the second argument to this function. +def load_formatter_from_file(filename, formattername="CustomFormatter", **options): + """ + Return a `Formatter` subclass instance loaded from the provided file, relative + to the current directory. - Users should be very careful with the input, because this method - is equivalent to running eval on the input file. + The file is expected to contain a Formatter class named ``formattername`` + (by default, CustomFormatter). Users should be very careful with the input, because + this method is equivalent to running ``eval()`` on the input file. The formatter is + given the `options` at its instantiation. - Raises ClassNotFound if there are any problems importing the Formatter. + :exc:`pygments.util.ClassNotFound` is raised if there are any errors loading + the formatter. .. versionadded:: 2.2 """ @@ -104,20 +117,23 @@ def load_formatter_from_file(filename, formattername="CustomFormatter", def get_formatter_for_filename(fn, **options): - """Lookup and instantiate a formatter by filename pattern. + """ + Return a :class:`.Formatter` subclass instance that has a filename pattern + matching `fn`. The formatter is given the `options` at its instantiation. - Raises ClassNotFound if not found. + Will raise :exc:`pygments.util.ClassNotFound` if no formatter for that filename + is found. """ fn = basename(fn) for modname, name, _, filenames, _ in FORMATTERS.values(): for filename in filenames: - if fnmatch(fn, filename): + if _fn_matches(fn, filename): if name not in _formatter_cache: _load_formatters(modname) return _formatter_cache[name](**options) - for cls in find_plugin_formatters(): + for _name, cls in find_plugin_formatters(): for filename in cls.filenames: - if fnmatch(fn, filename): + if _fn_matches(fn, filename): return cls(**options) raise ClassNotFound("no formatter found for file name %r" % fn) diff --git a/src/pip/_vendor/pygments/formatters/_mapping.py b/src/pip/_vendor/pygments/formatters/_mapping.py old mode 100644 new mode 100755 index 6e34f960784..72ca84040b6 --- a/src/pip/_vendor/pygments/formatters/_mapping.py +++ b/src/pip/_vendor/pygments/formatters/_mapping.py @@ -1,12 +1,12 @@ # Automatically generated by scripts/gen_mapfiles.py. -# DO NOT EDIT BY HAND; run `make mapfiles` instead. +# DO NOT EDIT BY HAND; run `tox -e mapfiles` instead. FORMATTERS = { 'BBCodeFormatter': ('pygments.formatters.bbcode', 'BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'), 'BmpImageFormatter': ('pygments.formatters.img', 'img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), 'GifImageFormatter': ('pygments.formatters.img', 'img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), 'GroffFormatter': ('pygments.formatters.groff', 'groff', ('groff', 'troff', 'roff'), (), 'Format tokens with groff escapes to change their color and font style.'), - 'HtmlFormatter': ('pygments.formatters.html', 'HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ```` tags within a ``
`` tag, wrapped in a ``
`` tag. The ``
``'s CSS class can be set by the `cssclass` option."), + 'HtmlFormatter': ('pygments.formatters.html', 'HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ```` tags. By default, the content is enclosed in a ``
`` tag, itself wrapped in a ``
`` tag (but see the `nowrap` option). The ``
``'s CSS class can be set by the `cssclass` option."), 'IRCFormatter': ('pygments.formatters.irc', 'IRC', ('irc', 'IRC'), (), 'Format tokens with IRC color sequences'), 'ImageFormatter': ('pygments.formatters.img', 'img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), 'JpgImageFormatter': ('pygments.formatters.img', 'img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), diff --git a/src/pip/_vendor/pygments/formatters/bbcode.py b/src/pip/_vendor/pygments/formatters/bbcode.py index 2be2b4e3129..c4db8f4ef21 100644 --- a/src/pip/_vendor/pygments/formatters/bbcode.py +++ b/src/pip/_vendor/pygments/formatters/bbcode.py @@ -4,7 +4,7 @@ BBcode formatter. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/formatters/groff.py b/src/pip/_vendor/pygments/formatters/groff.py index f3dcbce9b9f..30a528e668f 100644 --- a/src/pip/_vendor/pygments/formatters/groff.py +++ b/src/pip/_vendor/pygments/formatters/groff.py @@ -4,7 +4,7 @@ Formatter for groff output. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -84,7 +84,7 @@ def _define_colors(self, outfile): if ndef['color'] is not None: colors.add(ndef['color']) - for color in colors: + for color in sorted(colors): outfile.write('.defcolor ' + color + ' rgb #' + color + '\n') diff --git a/src/pip/_vendor/pygments/formatters/html.py b/src/pip/_vendor/pygments/formatters/html.py index f22b200c0e6..0cadcb228e7 100644 --- a/src/pip/_vendor/pygments/formatters/html.py +++ b/src/pip/_vendor/pygments/formatters/html.py @@ -4,7 +4,7 @@ Formatter for HTML output. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -62,7 +62,7 @@ def _get_ttype_class(ttype): CSSFILE_TEMPLATE = '''\ /* generated by Pygments -Copyright 2006-2022 by the Pygments team. +Copyright 2006-2023 by the Pygments team. Licensed under the BSD license, see LICENSE for details. */ %(styledefs)s @@ -73,7 +73,7 @@ def _get_ttype_class(ttype): "http://www.w3.org/TR/html4/strict.dtd"> @@ -112,9 +112,9 @@ def _get_ttype_class(ttype): class HtmlFormatter(Formatter): r""" - Format tokens as HTML 4 ```` tags within a ``
`` tag, wrapped
-    in a ``
`` tag. The ``
``'s CSS class can be set by the `cssclass` - option. + Format tokens as HTML 4 ```` tags. By default, the content is enclosed + in a ``
`` tag, itself wrapped in a ``
`` tag (but see the `nowrap` option). + The ``
``'s CSS class can be set by the `cssclass` option. If the `linenos` option is set to ``"table"``, the ``
`` is
     additionally wrapped inside a ```` which has one row and two
@@ -140,8 +140,6 @@ class HtmlFormatter(Formatter):
 
     (whitespace added to improve clarity).
 
-    Wrapping can be disabled using the `nowrap` option.
-
     A list of lines can be specified using the `hl_lines` option to make these
     lines highlighted (as of Pygments 0.11).
 
@@ -187,8 +185,8 @@ class HtmlFormatter(Formatter):
     Additional options accepted:
 
     `nowrap`
-        If set to ``True``, don't wrap the tokens at all, not even inside a ``
``
-        tag. This disables most other options (default: ``False``).
+        If set to ``True``, don't add a ``
`` and a ``
`` tag + around the tokens. This disables most other options (default: ``False``). `full` Tells the formatter to output a "full" document, i.e. a complete @@ -325,6 +323,7 @@ class ``"special"`` (default: ``0``). If set to the path of a ctags file, wrap names in anchor tags that link to their definitions. `lineanchors` should be used, and the tags file should specify line numbers (see the `-n` option to ctags). + The tags file is assumed to be encoded in UTF-8. .. versionadded:: 1.6 @@ -635,7 +634,7 @@ def _wrap_full(self, inner, outfile): # write CSS file only if noclobber_cssfile isn't given as an option. try: if not os.path.exists(cssfilename) or not self.noclobber_cssfile: - with open(cssfilename, "w") as cf: + with open(cssfilename, "w", encoding="utf-8") as cf: cf.write(CSSFILE_TEMPLATE % {'styledefs': self.get_style_defs('body')}) except OSError as err: @@ -721,7 +720,7 @@ def _wrap_tablelinenos(self, inner): yield 0, dummyoutfile.getvalue() yield 0, '
' yield 0, '
' - + def _wrap_inlinelinenos(self, inner): # need a list of lines since we need the width of a single number :( @@ -910,7 +909,7 @@ def _format_lines(self, tokensource): def _lookup_ctag(self, token): entry = ctags.TagEntry() if self._ctags.find(entry, token.encode(), 0): - return entry['file'], entry['lineNumber'] + return entry['file'].decode(), entry['lineNumber'] else: return None, None @@ -946,9 +945,9 @@ def wrap(self, source): output = source if self.wrapcode: output = self._wrap_code(output) - + output = self._wrap_pre(output) - + return output def format_unencoded(self, tokensource, outfile): diff --git a/src/pip/_vendor/pygments/formatters/img.py b/src/pip/_vendor/pygments/formatters/img.py index 0f36a32ba33..9e66b669162 100644 --- a/src/pip/_vendor/pygments/formatters/img.py +++ b/src/pip/_vendor/pygments/formatters/img.py @@ -4,10 +4,9 @@ Formatter for Pixmap output. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ - import os import sys @@ -68,6 +67,15 @@ def __init__(self, font_name, font_size=14): self.font_size = font_size self.fonts = {} self.encoding = None + self.variable = False + if hasattr(font_name, 'read') or os.path.isfile(font_name): + font = ImageFont.truetype(font_name, self.font_size) + self.variable = True + for style in STYLES: + self.fonts[style] = font + + return + if sys.platform.startswith('win'): if not font_name: self.font_name = DEFAULT_FONT_NAME_WIN @@ -223,14 +231,43 @@ def get_font(self, bold, oblique): Get the font based on bold and italic flags. """ if bold and oblique: + if self.variable: + return self.get_style('BOLDITALIC') + return self.fonts['BOLDITALIC'] elif bold: + if self.variable: + return self.get_style('BOLD') + return self.fonts['BOLD'] elif oblique: + if self.variable: + return self.get_style('ITALIC') + return self.fonts['ITALIC'] else: + if self.variable: + return self.get_style('NORMAL') + return self.fonts['NORMAL'] + def get_style(self, style): + """ + Get the specified style of the font if it is a variable font. + If not found, return the normal font. + """ + font = self.fonts[style] + for style_name in STYLES[style]: + try: + font.set_variation_by_name(style_name) + return font + except ValueError: + pass + except OSError: + return font + + return font + class ImageFormatter(Formatter): """ @@ -258,6 +295,8 @@ class ImageFormatter(Formatter): The font name to be used as the base font from which others, such as bold and italic fonts will be generated. This really should be a monospace font to look sane. + If a filename or a file-like object is specified, the user must + provide different styles of the font. Default: "Courier New" on Windows, "Menlo" on Mac OS, and "DejaVu Sans Mono" on \\*nix diff --git a/src/pip/_vendor/pygments/formatters/irc.py b/src/pip/_vendor/pygments/formatters/irc.py index 53e19b83d1e..2144d439e0f 100644 --- a/src/pip/_vendor/pygments/formatters/irc.py +++ b/src/pip/_vendor/pygments/formatters/irc.py @@ -4,7 +4,7 @@ Formatter for IRC output - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/formatters/latex.py b/src/pip/_vendor/pygments/formatters/latex.py index 4a7375a5ceb..ca539b40f6a 100644 --- a/src/pip/_vendor/pygments/formatters/latex.py +++ b/src/pip/_vendor/pygments/formatters/latex.py @@ -4,7 +4,7 @@ Formatter for LaTeX fancyvrb output. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/formatters/other.py b/src/pip/_vendor/pygments/formatters/other.py index 1e39cd42a8c..990ead48021 100644 --- a/src/pip/_vendor/pygments/formatters/other.py +++ b/src/pip/_vendor/pygments/formatters/other.py @@ -4,7 +4,7 @@ Other formatters: NullFormatter, RawTokenFormatter. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/formatters/pangomarkup.py b/src/pip/_vendor/pygments/formatters/pangomarkup.py index bd00866b8b9..6bb325d0788 100644 --- a/src/pip/_vendor/pygments/formatters/pangomarkup.py +++ b/src/pip/_vendor/pygments/formatters/pangomarkup.py @@ -4,7 +4,7 @@ Formatter for Pango markup output. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/formatters/rtf.py b/src/pip/_vendor/pygments/formatters/rtf.py index 4114d1688c3..125189c6fa5 100644 --- a/src/pip/_vendor/pygments/formatters/rtf.py +++ b/src/pip/_vendor/pygments/formatters/rtf.py @@ -4,7 +4,7 @@ A formatter that generates RTF files. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/formatters/svg.py b/src/pip/_vendor/pygments/formatters/svg.py index 075150a4b58..a8727ed8592 100644 --- a/src/pip/_vendor/pygments/formatters/svg.py +++ b/src/pip/_vendor/pygments/formatters/svg.py @@ -4,7 +4,7 @@ Formatter for SVG output. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/formatters/terminal.py b/src/pip/_vendor/pygments/formatters/terminal.py index e0bda16a236..abb8770811f 100644 --- a/src/pip/_vendor/pygments/formatters/terminal.py +++ b/src/pip/_vendor/pygments/formatters/terminal.py @@ -4,7 +4,7 @@ Formatter for terminal output with ANSI sequences. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/formatters/terminal256.py b/src/pip/_vendor/pygments/formatters/terminal256.py index 201b3c32832..0cfe5d1612e 100644 --- a/src/pip/_vendor/pygments/formatters/terminal256.py +++ b/src/pip/_vendor/pygments/formatters/terminal256.py @@ -10,7 +10,7 @@ Formatter version 1. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/lexer.py b/src/pip/_vendor/pygments/lexer.py index 74ab9b9088f..c6634c91daa 100644 --- a/src/pip/_vendor/pygments/lexer.py +++ b/src/pip/_vendor/pygments/lexer.py @@ -4,7 +4,7 @@ Base lexer classes. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -50,7 +50,36 @@ class Lexer(metaclass=LexerMeta): """ Lexer for a specific language. - Basic options recognized: + See also :doc:`lexerdevelopment`, a high-level guide to writing + lexers. + + Lexer classes have attributes used for choosing the most appropriate + lexer based on various criteria. + + .. autoattribute:: name + :no-value: + .. autoattribute:: aliases + :no-value: + .. autoattribute:: filenames + :no-value: + .. autoattribute:: alias_filenames + .. autoattribute:: mimetypes + :no-value: + .. autoattribute:: priority + + Lexers included in Pygments should have an additional attribute: + + .. autoattribute:: url + :no-value: + + Lexers included in Pygments may have additional attributes: + + .. autoattribute:: _example + :no-value: + + You can pass options to the constructor. The basic options recognized + by all lexers and processed by the base `Lexer` class are: + ``stripnl`` Strip leading and trailing newlines from the input (default: True). ``stripall`` @@ -74,28 +103,59 @@ class Lexer(metaclass=LexerMeta): Overrides the ``encoding`` if given. """ - #: Name of the lexer + #: Full name of the lexer, in human-readable form name = None - #: URL of the language specification/definition - url = None - - #: Shortcuts for the lexer + #: A list of short, unique identifiers that can be used to look + #: up the lexer from a list, e.g., using `get_lexer_by_name()`. aliases = [] - #: File name globs + #: A list of `fnmatch` patterns that match filenames which contain + #: content for this lexer. The patterns in this list should be unique among + #: all lexers. filenames = [] - #: Secondary file name globs + #: A list of `fnmatch` patterns that match filenames which may or may not + #: contain content for this lexer. This list is used by the + #: :func:`.guess_lexer_for_filename()` function, to determine which lexers + #: are then included in guessing the correct one. That means that + #: e.g. every lexer for HTML and a template language should include + #: ``\*.html`` in this list. alias_filenames = [] - #: MIME types + #: A list of MIME types for content that can be lexed with this lexer. mimetypes = [] #: Priority, should multiple lexers match and no content is provided priority = 0 + #: URL of the language specification/definition. Used in the Pygments + #: documentation. + url = None + + #: Example file name. Relative to the ``tests/examplefiles`` directory. + #: This is used by the documentation generator to show an example. + _example = None + def __init__(self, **options): + """ + This constructor takes arbitrary options as keyword arguments. + Every subclass must first process its own options and then call + the `Lexer` constructor, since it processes the basic + options like `stripnl`. + + An example looks like this: + + .. sourcecode:: python + + def __init__(self, **options): + self.compress = options.get('compress', '') + Lexer.__init__(self, **options) + + As these options must all be specifiable as strings (due to the + command line usage), there are various utility functions + available to help with that, see `Utilities`_. + """ self.options = options self.stripnl = get_bool_opt(options, 'stripnl', True) self.stripall = get_bool_opt(options, 'stripall', False) @@ -124,10 +184,13 @@ def add_filter(self, filter_, **options): def analyse_text(text): """ - Has to return a float between ``0`` and ``1`` that indicates - if a lexer wants to highlight this text. Used by ``guess_lexer``. - If this method returns ``0`` it won't highlight it in any case, if - it returns ``1`` highlighting with this lexer is guaranteed. + A static method which is called for lexer guessing. + + It should analyse the text and return a float in the range + from ``0.0`` to ``1.0``. If it returns ``0.0``, the lexer + will not be selected as the most probable one, if it returns + ``1.0``, it will be selected immediately. This is used by + `guess_lexer`. The `LexerMeta` metaclass automatically wraps this function so that it works like a static method (no ``self`` or ``cls`` @@ -136,15 +199,9 @@ def analyse_text(text): it's the same as if the return values was ``0.0``. """ - def get_tokens(self, text, unfiltered=False): - """ - Return an iterable of (tokentype, value) pairs generated from - `text`. If `unfiltered` is set to `True`, the filtering mechanism - is bypassed even if filters are defined. + def _preprocess_lexer_input(self, text): + """Apply preprocessing such as decoding the input, removing BOM and normalizing newlines.""" - Also preprocess the text, i.e. expand tabs and strip it if - wanted and applies registered filters. - """ if not isinstance(text, str): if self.encoding == 'guess': text, _ = guess_decode(text) @@ -187,6 +244,24 @@ def get_tokens(self, text, unfiltered=False): if self.ensurenl and not text.endswith('\n'): text += '\n' + return text + + def get_tokens(self, text, unfiltered=False): + """ + This method is the basic interface of a lexer. It is called by + the `highlight()` function. It must process the text and return an + iterable of ``(tokentype, value)`` pairs from `text`. + + Normally, you don't need to override this method. The default + implementation processes the options recognized by all lexers + (`stripnl`, `stripall` and so on), and then yields all tokens + from `get_tokens_unprocessed()`, with the ``index`` dropped. + + If `unfiltered` is set to `True`, the filtering mechanism is + bypassed even if filters are defined. + """ + text = self._preprocess_lexer_input(text) + def streamer(): for _, t, v in self.get_tokens_unprocessed(text): yield t, v @@ -197,11 +272,12 @@ def streamer(): def get_tokens_unprocessed(self, text): """ - Return an iterable of (index, tokentype, value) pairs where "index" - is the starting position of the token within the input text. + This method should process the text and return an iterable of + ``(index, tokentype, value)`` tuples where ``index`` is the starting + position of the token within the input text. - In subclasses, implement this method as a generator to - maximize effectiveness. + It must be overridden by subclasses. It is recommended to + implement it as a generator to maximize effectiveness. """ raise NotImplementedError diff --git a/src/pip/_vendor/pygments/lexers/__init__.py b/src/pip/_vendor/pygments/lexers/__init__.py index e75a05791e2..0c176dfbfd6 100644 --- a/src/pip/_vendor/pygments/lexers/__init__.py +++ b/src/pip/_vendor/pygments/lexers/__init__.py @@ -4,13 +4,14 @@ Pygments lexers. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ +import re import sys import types -from fnmatch import fnmatch +import fnmatch from os.path import basename from pip._vendor.pygments.lexers._mapping import LEXERS @@ -21,12 +22,23 @@ COMPAT = { 'Python3Lexer': 'PythonLexer', 'Python3TracebackLexer': 'PythonTracebackLexer', + 'LeanLexer': 'Lean3Lexer', } __all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class', 'guess_lexer', 'load_lexer_from_file'] + list(LEXERS) + list(COMPAT) _lexer_cache = {} +_pattern_cache = {} + + +def _fn_matches(fn, glob): + """Return whether the supplied file name fn matches pattern filename.""" + if glob not in _pattern_cache: + pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob)) + return pattern.match(fn) + return _pattern_cache[glob].match(fn) + def _load_lexers(module_name): """Load a lexer (and all others in the module too).""" @@ -51,9 +63,9 @@ def get_all_lexers(plugins=True): def find_lexer_class(name): - """Lookup a lexer class by name. - - Return None if not found. + """ + Return the `Lexer` subclass that with the *name* attribute as given by + the *name* argument. """ if name in _lexer_cache: return _lexer_cache[name] @@ -69,10 +81,15 @@ def find_lexer_class(name): def find_lexer_class_by_name(_alias): - """Lookup a lexer class by alias. + """ + Return the `Lexer` subclass that has `alias` in its aliases list, without + instantiating it. Like `get_lexer_by_name`, but does not instantiate the class. + Will raise :exc:`pygments.util.ClassNotFound` if no lexer with that alias is + found. + .. versionadded:: 2.2 """ if not _alias: @@ -91,9 +108,13 @@ def find_lexer_class_by_name(_alias): def get_lexer_by_name(_alias, **options): - """Get a lexer by an alias. + """ + Return an instance of a `Lexer` subclass that has `alias` in its + aliases list. The lexer is given the `options` at its + instantiation. - Raises ClassNotFound if not found. + Will raise :exc:`pygments.util.ClassNotFound` if no lexer with that alias is + found. """ if not _alias: raise ClassNotFound('no lexer for alias %r found' % _alias) @@ -158,13 +179,13 @@ def find_lexer_class_for_filename(_fn, code=None): fn = basename(_fn) for modname, name, _, filenames, _ in LEXERS.values(): for filename in filenames: - if fnmatch(fn, filename): + if _fn_matches(fn, filename): if name not in _lexer_cache: _load_lexers(modname) matches.append((_lexer_cache[name], filename)) for cls in find_plugin_lexers(): for filename in cls.filenames: - if fnmatch(fn, filename): + if _fn_matches(fn, filename): matches.append((cls, filename)) if isinstance(code, bytes): @@ -192,10 +213,15 @@ def get_rating(info): def get_lexer_for_filename(_fn, code=None, **options): """Get a lexer for a filename. - If multiple lexers match the filename pattern, use ``analyse_text()`` to - figure out which one is more appropriate. + Return a `Lexer` subclass instance that has a filename pattern + matching `fn`. The lexer is given the `options` at its + instantiation. - Raises ClassNotFound if not found. + Raise :exc:`pygments.util.ClassNotFound` if no lexer for that filename + is found. + + If multiple lexers match the filename pattern, use their ``analyse_text()`` + methods to figure out which one is more appropriate. """ res = find_lexer_class_for_filename(_fn, code) if not res: @@ -204,9 +230,12 @@ def get_lexer_for_filename(_fn, code=None, **options): def get_lexer_for_mimetype(_mime, **options): - """Get a lexer for a mimetype. + """ + Return a `Lexer` subclass instance that has `mime` in its mimetype + list. The lexer is given the `options` at its instantiation. - Raises ClassNotFound if not found. + Will raise :exc:`pygments.util.ClassNotFound` if not lexer for that mimetype + is found. """ for modname, name, _, _, mimetypes in LEXERS.values(): if _mime in mimetypes: @@ -232,30 +261,22 @@ def _iter_lexerclasses(plugins=True): def guess_lexer_for_filename(_fn, _text, **options): """ - Lookup all lexers that handle those filenames primary (``filenames``) - or secondary (``alias_filenames``). Then run a text analysis for those - lexers and choose the best result. - - usage:: - - >>> from pygments.lexers import guess_lexer_for_filename - >>> guess_lexer_for_filename('hello.html', '<%= @foo %>') - - >>> guess_lexer_for_filename('hello.html', '

{{ title|e }}

') - - >>> guess_lexer_for_filename('style.css', 'a { color: }') - + As :func:`guess_lexer()`, but only lexers which have a pattern in `filenames` + or `alias_filenames` that matches `filename` are taken into consideration. + + :exc:`pygments.util.ClassNotFound` is raised if no lexer thinks it can + handle the content. """ fn = basename(_fn) primary = {} matching_lexers = set() for lexer in _iter_lexerclasses(): for filename in lexer.filenames: - if fnmatch(fn, filename): + if _fn_matches(fn, filename): matching_lexers.add(lexer) primary[lexer] = True for filename in lexer.alias_filenames: - if fnmatch(fn, filename): + if _fn_matches(fn, filename): matching_lexers.add(lexer) primary[lexer] = False if not matching_lexers: @@ -282,7 +303,15 @@ def type_sort(t): def guess_lexer(_text, **options): - """Guess a lexer by strong distinctions in the text (eg, shebang).""" + """ + Return a `Lexer` subclass instance that's guessed from the text in + `text`. For that, the :meth:`.analyse_text()` method of every known lexer + class is called with the text as argument, and the lexer which returned the + highest value will be instantiated and returned. + + :exc:`pygments.util.ClassNotFound` is raised if no lexer thinks it can + handle the content. + """ if not isinstance(_text, str): inencoding = options.get('inencoding', options.get('encoding')) diff --git a/src/pip/_vendor/pygments/lexers/_mapping.py b/src/pip/_vendor/pygments/lexers/_mapping.py index 1eaaf56e9c2..1ff2b282a12 100644 --- a/src/pip/_vendor/pygments/lexers/_mapping.py +++ b/src/pip/_vendor/pygments/lexers/_mapping.py @@ -1,5 +1,5 @@ # Automatically generated by scripts/gen_mapfiles.py. -# DO NOT EDIT BY HAND; run `make mapfiles` instead. +# DO NOT EDIT BY HAND; run `tox -e mapfiles` instead. LEXERS = { 'ABAPLexer': ('pip._vendor.pygments.lexers.business', 'ABAP', ('abap',), ('*.abap', '*.ABAP'), ('text/x-abap',)), @@ -31,7 +31,8 @@ 'ArduinoLexer': ('pip._vendor.pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)), 'ArrowLexer': ('pip._vendor.pygments.lexers.arrow', 'Arrow', ('arrow',), ('*.arw',), ()), 'ArturoLexer': ('pip._vendor.pygments.lexers.arturo', 'Arturo', ('arturo', 'art'), ('*.art',), ()), - 'AscLexer': ('pip._vendor.pygments.lexers.asc', 'ASCII armored', ('asc', 'pem'), ('*.asc', '*.pem', 'id_dsa', 'id_ecdsa', 'id_ecdsa_sk', 'id_ed25519', 'id_ed25519_sk', 'id_rsa'), ('application/pgp-keys', 'application/pgp-encrypted', 'application/pgp-signature')), + 'AscLexer': ('pip._vendor.pygments.lexers.asc', 'ASCII armored', ('asc', 'pem'), ('*.asc', '*.pem', 'id_dsa', 'id_ecdsa', 'id_ecdsa_sk', 'id_ed25519', 'id_ed25519_sk', 'id_rsa'), ('application/pgp-keys', 'application/pgp-encrypted', 'application/pgp-signature', 'application/pem-certificate-chain')), + 'Asn1Lexer': ('pip._vendor.pygments.lexers.asn1', 'ASN.1', ('asn1',), ('*.asn1',), ()), 'AspectJLexer': ('pip._vendor.pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)), 'AsymptoteLexer': ('pip._vendor.pygments.lexers.graphics', 'Asymptote', ('asymptote', 'asy'), ('*.asy',), ('text/x-asymptote',)), 'AugeasLexer': ('pip._vendor.pygments.lexers.configs', 'Augeas', ('augeas',), ('*.aug',), ()), @@ -41,6 +42,7 @@ 'BBCBasicLexer': ('pip._vendor.pygments.lexers.basic', 'BBC Basic', ('bbcbasic',), ('*.bbc',), ()), 'BBCodeLexer': ('pip._vendor.pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)), 'BCLexer': ('pip._vendor.pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()), + 'BQNLexer': ('pip._vendor.pygments.lexers.bqn', 'BQN', ('bqn',), ('*.bqn',), ()), 'BSTLexer': ('pip._vendor.pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()), 'BareLexer': ('pip._vendor.pygments.lexers.bare', 'BARE', ('bare',), ('*.bare',), ()), 'BaseMakefileLexer': ('pip._vendor.pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()), @@ -53,6 +55,7 @@ 'BibTeXLexer': ('pip._vendor.pygments.lexers.bibtex', 'BibTeX', ('bibtex', 'bib'), ('*.bib',), ('text/x-bibtex',)), 'BlitzBasicLexer': ('pip._vendor.pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)), 'BlitzMaxLexer': ('pip._vendor.pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)), + 'BlueprintLexer': ('pip._vendor.pygments.lexers.blueprint', 'Blueprint', ('blueprint',), ('*.blp',), ('text/x-blueprint',)), 'BnfLexer': ('pip._vendor.pygments.lexers.grammar_notation', 'BNF', ('bnf',), ('*.bnf',), ('text/x-bnf',)), 'BoaLexer': ('pip._vendor.pygments.lexers.boa', 'Boa', ('boa',), ('*.boa',), ()), 'BooLexer': ('pip._vendor.pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)), @@ -71,6 +74,7 @@ 'CadlLexer': ('pip._vendor.pygments.lexers.archetype', 'cADL', ('cadl',), ('*.cadl',), ()), 'CapDLLexer': ('pip._vendor.pygments.lexers.esoteric', 'CapDL', ('capdl',), ('*.cdl',), ()), 'CapnProtoLexer': ('pip._vendor.pygments.lexers.capnproto', "Cap'n Proto", ('capnp',), ('*.capnp',), ()), + 'CarbonLexer': ('pip._vendor.pygments.lexers.carbon', 'Carbon', ('carbon',), ('*.carbon',), ('text/x-carbon',)), 'CbmBasicV2Lexer': ('pip._vendor.pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()), 'CddlLexer': ('pip._vendor.pygments.lexers.cddl', 'CDDL', ('cddl',), ('*.cddl',), ('text/x-cddl',)), 'CeylonLexer': ('pip._vendor.pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)), @@ -121,12 +125,15 @@ 'DarcsPatchLexer': ('pip._vendor.pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()), 'DartLexer': ('pip._vendor.pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)), 'Dasm16Lexer': ('pip._vendor.pygments.lexers.asm', 'DASM16', ('dasm16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)), + 'DaxLexer': ('pip._vendor.pygments.lexers.dax', 'Dax', ('dax',), ('*.dax',), ()), 'DebianControlLexer': ('pip._vendor.pygments.lexers.installers', 'Debian Control file', ('debcontrol', 'control'), ('control',), ()), 'DelphiLexer': ('pip._vendor.pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)), + 'DesktopLexer': ('pip._vendor.pygments.lexers.configs', 'Desktop file', ('desktop',), ('*.desktop',), ()), 'DevicetreeLexer': ('pip._vendor.pygments.lexers.devicetree', 'Devicetree', ('devicetree', 'dts'), ('*.dts', '*.dtsi'), ('text/x-c',)), 'DgLexer': ('pip._vendor.pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)), 'DiffLexer': ('pip._vendor.pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')), 'DjangoLexer': ('pip._vendor.pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')), + 'DnsZoneLexer': ('pip._vendor.pygments.lexers.dns', 'Zone', ('zone',), ('*.zone',), ('text/dns',)), 'DockerLexer': ('pip._vendor.pygments.lexers.configs', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)), 'DtdLexer': ('pip._vendor.pygments.lexers.html', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)), 'DuelLexer': ('pip._vendor.pygments.lexers.webmisc', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')), @@ -188,6 +195,7 @@ 'GoodDataCLLexer': ('pip._vendor.pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)), 'GosuLexer': ('pip._vendor.pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)), 'GosuTemplateLexer': ('pip._vendor.pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)), + 'GraphQLLexer': ('pip._vendor.pygments.lexers.graphql', 'GraphQL', ('graphql',), ('*.graphql',), ()), 'GraphvizLexer': ('pip._vendor.pygments.lexers.graphviz', 'Graphviz', ('graphviz', 'dot'), ('*.gv', '*.dot'), ('text/x-graphviz', 'text/vnd.graphviz')), 'GroffLexer': ('pip._vendor.pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1-9]', '*.man', '*.1p', '*.3pm'), ('application/x-troff', 'text/troff')), 'GroovyLexer': ('pip._vendor.pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy', '*.gradle'), ('text/x-groovy',)), @@ -217,7 +225,7 @@ 'Inform6Lexer': ('pip._vendor.pygments.lexers.int_fiction', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()), 'Inform6TemplateLexer': ('pip._vendor.pygments.lexers.int_fiction', 'Inform 6 template', ('i6t',), ('*.i6t',), ()), 'Inform7Lexer': ('pip._vendor.pygments.lexers.int_fiction', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()), - 'IniLexer': ('pip._vendor.pygments.lexers.configs', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg', '*.inf', '.editorconfig', '*.service', '*.socket', '*.device', '*.mount', '*.automount', '*.swap', '*.target', '*.path', '*.timer', '*.slice', '*.scope'), ('text/x-ini', 'text/inf')), + 'IniLexer': ('pip._vendor.pygments.lexers.configs', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg', '*.inf', '.editorconfig'), ('text/x-ini', 'text/inf')), 'IoLexer': ('pip._vendor.pygments.lexers.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)), 'IokeLexer': ('pip._vendor.pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)), 'IrcLogsLexer': ('pip._vendor.pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)), @@ -239,9 +247,10 @@ 'JsgfLexer': ('pip._vendor.pygments.lexers.grammar_notation', 'JSGF', ('jsgf',), ('*.jsgf',), ('application/jsgf', 'application/x-jsgf', 'text/jsgf')), 'JsonBareObjectLexer': ('pip._vendor.pygments.lexers.data', 'JSONBareObject', (), (), ()), 'JsonLdLexer': ('pip._vendor.pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)), - 'JsonLexer': ('pip._vendor.pygments.lexers.data', 'JSON', ('json', 'json-object'), ('*.json', 'Pipfile.lock'), ('application/json', 'application/json-object')), + 'JsonLexer': ('pip._vendor.pygments.lexers.data', 'JSON', ('json', 'json-object'), ('*.json', '*.jsonl', '*.ndjson', 'Pipfile.lock'), ('application/json', 'application/json-object', 'application/x-ndjson', 'application/jsonl', 'application/json-seq')), 'JsonnetLexer': ('pip._vendor.pygments.lexers.jsonnet', 'Jsonnet', ('jsonnet',), ('*.jsonnet', '*.libsonnet'), ()), 'JspLexer': ('pip._vendor.pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)), + 'JsxLexer': ('pip._vendor.pygments.lexers.jsx', 'JSX', ('jsx', 'react'), ('*.jsx', '*.react'), ('text/jsx', 'text/typescript-jsx')), 'JuliaConsoleLexer': ('pip._vendor.pygments.lexers.julia', 'Julia console', ('jlcon', 'julia-repl'), (), ()), 'JuliaLexer': ('pip._vendor.pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')), 'JuttleLexer': ('pip._vendor.pygments.lexers.javascript', 'Juttle', ('juttle',), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')), @@ -252,13 +261,16 @@ 'KokaLexer': ('pip._vendor.pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)), 'KotlinLexer': ('pip._vendor.pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt', '*.kts'), ('text/x-kotlin',)), 'KuinLexer': ('pip._vendor.pygments.lexers.kuin', 'Kuin', ('kuin',), ('*.kn',), ()), + 'KustoLexer': ('pip._vendor.pygments.lexers.kusto', 'Kusto', ('kql', 'kusto'), ('*.kql', '*.kusto', '.csl'), ()), 'LSLLexer': ('pip._vendor.pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)), 'LassoCssLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)), 'LassoHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')), 'LassoJavascriptLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Lasso', ('javascript+lasso', 'js+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')), 'LassoLexer': ('pip._vendor.pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)), 'LassoXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)), - 'LeanLexer': ('pip._vendor.pygments.lexers.theorem', 'Lean', ('lean',), ('*.lean',), ('text/x-lean',)), + 'LdaprcLexer': ('pip._vendor.pygments.lexers.ldap', 'LDAP configuration file', ('ldapconf', 'ldaprc'), ('.ldaprc', 'ldaprc', 'ldap.conf'), ('text/x-ldapconf',)), + 'LdifLexer': ('pip._vendor.pygments.lexers.ldap', 'LDIF', ('ldif',), ('*.ldif',), ('text/x-ldif',)), + 'Lean3Lexer': ('pip._vendor.pygments.lexers.lean', 'Lean', ('lean', 'lean3'), ('*.lean',), ('text/x-lean', 'text/x-lean3')), 'LessCssLexer': ('pip._vendor.pygments.lexers.css', 'LessCss', ('less',), ('*.less',), ('text/x-less-css',)), 'LighttpdConfLexer': ('pip._vendor.pygments.lexers.configs', 'Lighttpd configuration file', ('lighttpd', 'lighty'), ('lighttpd.conf',), ('text/x-lighttpd-conf',)), 'LilyPondLexer': ('pip._vendor.pygments.lexers.lilypond', 'LilyPond', ('lilypond',), ('*.ly',), ()), @@ -349,6 +361,7 @@ 'OocLexer': ('pip._vendor.pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)), 'OpaLexer': ('pip._vendor.pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)), 'OpenEdgeLexer': ('pip._vendor.pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')), + 'OpenScadLexer': ('pip._vendor.pygments.lexers.openscad', 'OpenSCAD', ('openscad',), ('*.scad',), ('application/x-openscad',)), 'OutputLexer': ('pip._vendor.pygments.lexers.special', 'Text output', ('output',), (), ()), 'PacmanConfLexer': ('pip._vendor.pygments.lexers.configs', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()), 'PanLexer': ('pip._vendor.pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()), @@ -368,6 +381,7 @@ 'PortugolLexer': ('pip._vendor.pygments.lexers.pascal', 'Portugol', ('portugol',), ('*.alg', '*.portugol'), ()), 'PostScriptLexer': ('pip._vendor.pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)), 'PostgresConsoleLexer': ('pip._vendor.pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)), + 'PostgresExplainLexer': ('pip._vendor.pygments.lexers.sql', 'PostgreSQL EXPLAIN dialect', ('postgres-explain',), ('*.explain',), ('text/x-postgresql-explain',)), 'PostgresLexer': ('pip._vendor.pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)), 'PovrayLexer': ('pip._vendor.pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)), 'PowerShellLexer': ('pip._vendor.pygments.lexers.shell', 'PowerShell', ('powershell', 'pwsh', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)), @@ -378,14 +392,16 @@ 'PromQLLexer': ('pip._vendor.pygments.lexers.promql', 'PromQL', ('promql',), ('*.promql',), ()), 'PropertiesLexer': ('pip._vendor.pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)), 'ProtoBufLexer': ('pip._vendor.pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()), + 'PrqlLexer': ('pip._vendor.pygments.lexers.prql', 'PRQL', ('prql',), ('*.prql',), ('application/prql', 'application/x-prql')), 'PsyshConsoleLexer': ('pip._vendor.pygments.lexers.php', 'PsySH console session for PHP', ('psysh',), (), ()), + 'PtxLexer': ('pip._vendor.pygments.lexers.ptx', 'PTX', ('ptx',), ('*.ptx',), ('text/x-ptx',)), 'PugLexer': ('pip._vendor.pygments.lexers.html', 'Pug', ('pug', 'jade'), ('*.pug', '*.jade'), ('text/x-pug', 'text/x-jade')), 'PuppetLexer': ('pip._vendor.pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()), 'PyPyLogLexer': ('pip._vendor.pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)), 'Python2Lexer': ('pip._vendor.pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')), 'Python2TracebackLexer': ('pip._vendor.pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)), 'PythonConsoleLexer': ('pip._vendor.pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)), - 'PythonLexer': ('pip._vendor.pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3'), ('*.py', '*.pyw', '*.pyi', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')), + 'PythonLexer': ('pip._vendor.pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3', 'bazel', 'starlark'), ('*.py', '*.pyw', '*.pyi', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')), 'PythonTracebackLexer': ('pip._vendor.pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')), 'PythonUL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'Python+UL4', ('py+ul4',), ('*.pyul4',), ()), 'QBasicLexer': ('pip._vendor.pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)), @@ -474,9 +490,10 @@ 'SwiftLexer': ('pip._vendor.pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)), 'SwigLexer': ('pip._vendor.pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)), 'SystemVerilogLexer': ('pip._vendor.pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)), + 'SystemdLexer': ('pip._vendor.pygments.lexers.configs', 'Systemd', ('systemd',), ('*.service', '*.socket', '*.device', '*.mount', '*.automount', '*.swap', '*.target', '*.path', '*.timer', '*.slice', '*.scope'), ()), 'TAPLexer': ('pip._vendor.pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()), 'TNTLexer': ('pip._vendor.pygments.lexers.tnt', 'Typographic Number Theory', ('tnt',), ('*.tnt',), ()), - 'TOMLLexer': ('pip._vendor.pygments.lexers.configs', 'TOML', ('toml',), ('*.toml', 'Pipfile', 'poetry.lock'), ()), + 'TOMLLexer': ('pip._vendor.pygments.lexers.configs', 'TOML', ('toml',), ('*.toml', 'Pipfile', 'poetry.lock'), ('application/toml',)), 'Tads3Lexer': ('pip._vendor.pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()), 'TalLexer': ('pip._vendor.pygments.lexers.tal', 'Tal', ('tal', 'uxntal'), ('*.tal',), ('text/x-uxntal',)), 'TasmLexer': ('pip._vendor.pygments.lexers.asm', 'TASM', ('tasm',), ('*.asm', '*.ASM', '*.tasm'), ('text/x-tasm',)), @@ -488,13 +505,14 @@ 'TeraTermLexer': ('pip._vendor.pygments.lexers.teraterm', 'Tera Term macro', ('teratermmacro', 'teraterm', 'ttl'), ('*.ttl',), ('text/x-teratermmacro',)), 'TermcapLexer': ('pip._vendor.pygments.lexers.configs', 'Termcap', ('termcap',), ('termcap', 'termcap.src'), ()), 'TerminfoLexer': ('pip._vendor.pygments.lexers.configs', 'Terminfo', ('terminfo',), ('terminfo', 'terminfo.src'), ()), - 'TerraformLexer': ('pip._vendor.pygments.lexers.configs', 'Terraform', ('terraform', 'tf'), ('*.tf',), ('application/x-tf', 'application/x-terraform')), + 'TerraformLexer': ('pip._vendor.pygments.lexers.configs', 'Terraform', ('terraform', 'tf', 'hcl'), ('*.tf', '*.hcl'), ('application/x-tf', 'application/x-terraform')), 'TexLexer': ('pip._vendor.pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')), 'TextLexer': ('pip._vendor.pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)), 'ThingsDBLexer': ('pip._vendor.pygments.lexers.thingsdb', 'ThingsDB', ('ti', 'thingsdb'), ('*.ti',), ()), 'ThriftLexer': ('pip._vendor.pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)), 'TiddlyWiki5Lexer': ('pip._vendor.pygments.lexers.markup', 'tiddler', ('tid',), ('*.tid',), ('text/vnd.tiddlywiki',)), 'TlbLexer': ('pip._vendor.pygments.lexers.tlb', 'Tl-b', ('tlb',), ('*.tlb',), ()), + 'TlsLexer': ('pip._vendor.pygments.lexers.tls', 'TLS Presentation Language', ('tls',), (), ()), 'TodotxtLexer': ('pip._vendor.pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)), 'TransactSqlLexer': ('pip._vendor.pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)), 'TreetopLexer': ('pip._vendor.pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()), @@ -510,6 +528,7 @@ 'UniconLexer': ('pip._vendor.pygments.lexers.unicon', 'Unicon', ('unicon',), ('*.icn',), ('text/unicon',)), 'UnixConfigLexer': ('pip._vendor.pygments.lexers.configs', 'Unix/Linux config files', ('unixconfig', 'linuxconfig'), (), ()), 'UrbiscriptLexer': ('pip._vendor.pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)), + 'UrlEncodedLexer': ('pip._vendor.pygments.lexers.html', 'urlencoded', ('urlencoded',), (), ('application/x-www-form-urlencoded',)), 'UsdLexer': ('pip._vendor.pygments.lexers.usd', 'USD', ('usd', 'usda'), ('*.usd', '*.usda'), ()), 'VBScriptLexer': ('pip._vendor.pygments.lexers.basic', 'VBScript', ('vbscript',), ('*.vbs', '*.VBS'), ()), 'VCLLexer': ('pip._vendor.pygments.lexers.varnish', 'VCL', ('vcl',), ('*.vcl',), ('text/x-vclsrc',)), @@ -522,13 +541,19 @@ 'VelocityHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)), 'VelocityLexer': ('pip._vendor.pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()), 'VelocityXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)), + 'VerifpalLexer': ('pip._vendor.pygments.lexers.verifpal', 'Verifpal', ('verifpal',), ('*.vp',), ('text/x-verifpal',)), 'VerilogLexer': ('pip._vendor.pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)), 'VhdlLexer': ('pip._vendor.pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)), 'VimLexer': ('pip._vendor.pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)), + 'VisualPrologGrammarLexer': ('pip._vendor.pygments.lexers.vip', 'Visual Prolog Grammar', ('visualprologgrammar',), ('*.vipgrm',), ()), + 'VisualPrologLexer': ('pip._vendor.pygments.lexers.vip', 'Visual Prolog', ('visualprolog',), ('*.pro', '*.cl', '*.i', '*.pack', '*.ph'), ()), + 'VyperLexer': ('pip._vendor.pygments.lexers.vyper', 'Vyper', ('vyper',), ('*.vy',), ()), 'WDiffLexer': ('pip._vendor.pygments.lexers.diff', 'WDiff', ('wdiff',), ('*.wdiff',), ()), 'WatLexer': ('pip._vendor.pygments.lexers.webassembly', 'WebAssembly', ('wast', 'wat'), ('*.wat', '*.wast'), ()), 'WebIDLLexer': ('pip._vendor.pygments.lexers.webidl', 'Web IDL', ('webidl',), ('*.webidl',), ()), + 'WgslLexer': ('pip._vendor.pygments.lexers.wgsl', 'WebGPU Shading Language', ('wgsl',), ('*.wgsl',), ('text/wgsl',)), 'WhileyLexer': ('pip._vendor.pygments.lexers.whiley', 'Whiley', ('whiley',), ('*.whiley',), ('text/x-whiley',)), + 'WikitextLexer': ('pip._vendor.pygments.lexers.markup', 'Wikitext', ('wikitext', 'mediawiki'), (), ('text/x-wiki',)), 'WoWTocLexer': ('pip._vendor.pygments.lexers.wowtoc', 'World of Warcraft TOC', ('wowtoc',), ('*.toc',), ()), 'WrenLexer': ('pip._vendor.pygments.lexers.wren', 'Wren', ('wren',), ('*.wren',), ()), 'X10Lexer': ('pip._vendor.pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)), @@ -540,12 +565,14 @@ 'XmlPhpLexer': ('pip._vendor.pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)), 'XmlSmartyLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)), 'XorgLexer': ('pip._vendor.pygments.lexers.xorg', 'Xorg', ('xorg.conf',), ('xorg.conf',), ()), + 'XppLexer': ('pip._vendor.pygments.lexers.dotnet', 'X++', ('xpp', 'x++'), ('*.xpp',), ()), 'XsltLexer': ('pip._vendor.pygments.lexers.html', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')), 'XtendLexer': ('pip._vendor.pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)), 'XtlangLexer': ('pip._vendor.pygments.lexers.lisp', 'xtlang', ('extempore',), ('*.xtm',), ()), 'YamlJinjaLexer': ('pip._vendor.pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls', '*.yaml.j2', '*.yml.j2', '*.yaml.jinja2', '*.yml.jinja2'), ('text/x-yaml+jinja', 'text/x-sls')), 'YamlLexer': ('pip._vendor.pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)), 'YangLexer': ('pip._vendor.pygments.lexers.yang', 'YANG', ('yang',), ('*.yang',), ('application/yang',)), + 'YaraLexer': ('pip._vendor.pygments.lexers.yara', 'YARA', ('yara', 'yar'), ('*.yar',), ('text/x-yara',)), 'ZeekLexer': ('pip._vendor.pygments.lexers.dsls', 'Zeek', ('zeek', 'bro'), ('*.zeek', '*.bro'), ()), 'ZephirLexer': ('pip._vendor.pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()), 'ZigLexer': ('pip._vendor.pygments.lexers.zig', 'Zig', ('zig',), ('*.zig',), ('text/zig',)), diff --git a/src/pip/_vendor/pygments/lexers/python.py b/src/pip/_vendor/pygments/lexers/python.py index 3341a382685..e2ce58f5a19 100644 --- a/src/pip/_vendor/pygments/lexers/python.py +++ b/src/pip/_vendor/pygments/lexers/python.py @@ -4,15 +4,15 @@ Lexers for Python and related languages. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re import keyword -from pip._vendor.pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \ - default, words, combined, do_insertions, this, line_re +from pip._vendor.pygments.lexer import DelegatingLexer, Lexer, RegexLexer, include, \ + bygroups, using, default, words, combined, do_insertions, this, line_re from pip._vendor.pygments.util import get_bool_opt, shebang_matches from pip._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Generic, Other, Error, Whitespace @@ -35,8 +35,8 @@ class PythonLexer(RegexLexer): """ name = 'Python' - url = 'http://www.python.org' - aliases = ['python', 'py', 'sage', 'python3', 'py3'] + url = 'https://www.python.org' + aliases = ['python', 'py', 'sage', 'python3', 'py3', 'bazel', 'starlark'] filenames = [ '*.py', '*.pyw', @@ -234,16 +234,16 @@ def fstring_rules(ttype): ], 'builtins': [ (words(( - '__import__', 'abs', 'all', 'any', 'bin', 'bool', 'bytearray', - 'breakpoint', 'bytes', 'chr', 'classmethod', 'compile', 'complex', - 'delattr', 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'filter', - 'float', 'format', 'frozenset', 'getattr', 'globals', 'hasattr', - 'hash', 'hex', 'id', 'input', 'int', 'isinstance', 'issubclass', - 'iter', 'len', 'list', 'locals', 'map', 'max', 'memoryview', - 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'print', - 'property', 'range', 'repr', 'reversed', 'round', 'set', 'setattr', - 'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', - 'type', 'vars', 'zip'), prefix=r'(?>> )(.*\n)', bygroups(Generic.Prompt, Other.Code), 'continuations'), + # This happens, e.g., when tracebacks are embedded in documentation; + # trailing whitespaces are often stripped in such contexts. + (r'(>>>)(\n)', bygroups(Generic.Prompt, Whitespace)), + (r'(\^C)?Traceback \(most recent call last\):\n', Other.Traceback, 'traceback'), + # SyntaxError starts with this + (r' File "[^"]+", line \d+', Other.Traceback, 'traceback'), + (r'.*\n', Generic.Output), + ], + 'continuations': [ + (r'(\.\.\. )(.*\n)', bygroups(Generic.Prompt, Other.Code)), + # See above. + (r'(\.\.\.)(\n)', bygroups(Generic.Prompt, Whitespace)), + default('#pop'), + ], + 'traceback': [ + # As soon as we see a traceback, consume everything until the next + # >>> prompt. + (r'(?=>>>( |$))', Text, '#pop'), + (r'(KeyboardInterrupt)(\n)', bygroups(Name.Class, Whitespace)), + (r'.*\n', Other.Traceback), + ], + } -class PythonConsoleLexer(Lexer): +class PythonConsoleLexer(DelegatingLexer): """ For Python console output or doctests, such as: .. sourcecode:: pycon >>> a = 'foo' - >>> print a + >>> print(a) foo >>> 1 / 0 Traceback (most recent call last): @@ -659,70 +694,28 @@ class PythonConsoleLexer(Lexer): .. versionchanged:: 2.5 Now defaults to ``True``. """ + name = 'Python console session' aliases = ['pycon'] mimetypes = ['text/x-python-doctest'] def __init__(self, **options): - self.python3 = get_bool_opt(options, 'python3', True) - Lexer.__init__(self, **options) - - def get_tokens_unprocessed(self, text): - if self.python3: - pylexer = PythonLexer(**self.options) - tblexer = PythonTracebackLexer(**self.options) + python3 = get_bool_opt(options, 'python3', True) + if python3: + pylexer = PythonLexer + tblexer = PythonTracebackLexer else: - pylexer = Python2Lexer(**self.options) - tblexer = Python2TracebackLexer(**self.options) - - curcode = '' - insertions = [] - curtb = '' - tbindex = 0 - tb = 0 - for match in line_re.finditer(text): - line = match.group() - if line.startswith('>>> ') or line.startswith('... '): - tb = 0 - insertions.append((len(curcode), - [(0, Generic.Prompt, line[:4])])) - curcode += line[4:] - elif line.rstrip() == '...' and not tb: - # only a new >>> prompt can end an exception block - # otherwise an ellipsis in place of the traceback frames - # will be mishandled - insertions.append((len(curcode), - [(0, Generic.Prompt, '...')])) - curcode += line[3:] - else: - if curcode: - yield from do_insertions( - insertions, pylexer.get_tokens_unprocessed(curcode)) - curcode = '' - insertions = [] - if (line.startswith('Traceback (most recent call last):') or - re.match(' File "[^"]+", line \\d+\\n$', line)): - tb = 1 - curtb = line - tbindex = match.start() - elif line == 'KeyboardInterrupt\n': - yield match.start(), Name.Class, line - elif tb: - curtb += line - if not (line.startswith(' ') or line.strip() == '...'): - tb = 0 - for i, t, v in tblexer.get_tokens_unprocessed(curtb): - yield tbindex+i, t, v - curtb = '' - else: - yield match.start(), Generic.Output, line - if curcode: - yield from do_insertions(insertions, - pylexer.get_tokens_unprocessed(curcode)) - if curtb: - for i, t, v in tblexer.get_tokens_unprocessed(curtb): - yield tbindex+i, t, v - + pylexer = Python2Lexer + tblexer = Python2TracebackLexer + # We have two auxiliary lexers. Use DelegatingLexer twice with + # different tokens. TODO: DelegatingLexer should support this + # directly, by accepting a tuplet of auxiliary lexers and a tuple of + # distinguishing tokens. Then we wouldn't need this intermediary + # class. + class _ReplaceInnerCode(DelegatingLexer): + def __init__(self, **options): + super().__init__(pylexer, _PythonConsoleLexerBase, Other.Code, **options) + super().__init__(tblexer, _ReplaceInnerCode, Other.Traceback, **options) class PythonTracebackLexer(RegexLexer): """ @@ -743,7 +736,7 @@ class PythonTracebackLexer(RegexLexer): tokens = { 'root': [ (r'\n', Whitespace), - (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'), + (r'^(\^C)?Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'), (r'^During handling of the above exception, another ' r'exception occurred:\n\n', Generic.Traceback), (r'^The above exception was the direct cause of the ' @@ -763,7 +756,8 @@ class PythonTracebackLexer(RegexLexer): (r'^([^:]+)(: )(.+)(\n)', bygroups(Generic.Error, Text, Name, Whitespace), '#pop'), (r'^([a-zA-Z_][\w.]*)(:?\n)', - bygroups(Generic.Error, Whitespace), '#pop') + bygroups(Generic.Error, Whitespace), '#pop'), + default('#pop'), ], 'markers': [ # Either `PEP 657 ` @@ -836,7 +830,7 @@ class CythonLexer(RegexLexer): """ name = 'Cython' - url = 'http://cython.org' + url = 'https://cython.org' aliases = ['cython', 'pyx', 'pyrex'] filenames = ['*.pyx', '*.pxd', '*.pxi'] mimetypes = ['text/x-cython', 'application/x-cython'] diff --git a/src/pip/_vendor/pygments/modeline.py b/src/pip/_vendor/pygments/modeline.py index 43630835ca6..7b6f6a324ba 100644 --- a/src/pip/_vendor/pygments/modeline.py +++ b/src/pip/_vendor/pygments/modeline.py @@ -4,7 +4,7 @@ A simple modeline parser (based on pymodeline). - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/plugin.py b/src/pip/_vendor/pygments/plugin.py index 3590bee8d29..7b722d58db0 100644 --- a/src/pip/_vendor/pygments/plugin.py +++ b/src/pip/_vendor/pygments/plugin.py @@ -34,7 +34,7 @@ yourfilter = yourfilter:YourFilter - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/regexopt.py b/src/pip/_vendor/pygments/regexopt.py index ae0079199b9..45223eccc10 100644 --- a/src/pip/_vendor/pygments/regexopt.py +++ b/src/pip/_vendor/pygments/regexopt.py @@ -5,7 +5,7 @@ An algorithm that generates optimized regexes for matching long lists of literal strings. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/scanner.py b/src/pip/_vendor/pygments/scanner.py index d47ed4828a0..32a2f303296 100644 --- a/src/pip/_vendor/pygments/scanner.py +++ b/src/pip/_vendor/pygments/scanner.py @@ -11,7 +11,7 @@ Have a look at the `DelphiLexer` to get an idea of how to use this scanner. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re diff --git a/src/pip/_vendor/pygments/sphinxext.py b/src/pip/_vendor/pygments/sphinxext.py index 3537ecdb26f..fc0b0270bfd 100644 --- a/src/pip/_vendor/pygments/sphinxext.py +++ b/src/pip/_vendor/pygments/sphinxext.py @@ -5,7 +5,7 @@ Sphinx extension to generate automatic documentation of lexers, formatters and filters. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -147,6 +147,10 @@ def write_seperator(): def document_lexers(self): from pip._vendor.pygments.lexers._mapping import LEXERS + from pip._vendor import pygments + import inspect + import pathlib + out = [] modules = {} moduledocstrings = {} @@ -160,6 +164,24 @@ def document_lexers(self): docstring = cls.__doc__ if isinstance(docstring, bytes): docstring = docstring.decode('utf8') + + example_file = getattr(cls, '_example', None) + if example_file: + p = pathlib.Path(inspect.getabsfile(pygments)).parent.parent /\ + 'tests' / 'examplefiles' / example_file + content = p.read_text(encoding='utf-8') + if not content: + raise Exception( + f"Empty example file '{example_file}' for lexer " + f"{classname}") + + if data[2]: + lexer_name = data[2][0] + docstring += '\n\n .. admonition:: Example\n' + docstring += f'\n .. code-block:: {lexer_name}\n\n' + for line in content.splitlines(): + docstring += f' {line}\n' + modules.setdefault(module, []).append(( classname, ', '.join(data[2]) or 'None', diff --git a/src/pip/_vendor/pygments/style.py b/src/pip/_vendor/pygments/style.py index 84abbc20599..f2f72d3bc56 100644 --- a/src/pip/_vendor/pygments/style.py +++ b/src/pip/_vendor/pygments/style.py @@ -4,7 +4,7 @@ Basic style object. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -190,6 +190,12 @@ class Style(metaclass=StyleMeta): #: Style definitions for individual token types. styles = {} + #: user-friendly style name (used when selecting the style, so this + # should be all-lowercase, no spaces, hyphens) + name = 'unnamed' + + aliases = [] + # Attribute for lexers defined within Pygments. If set # to True, the style is not shown in the style gallery # on the website. This is intended for language-specific diff --git a/src/pip/_vendor/pygments/styles/__init__.py b/src/pip/_vendor/pygments/styles/__init__.py index 44cc0efb086..23b55468e2c 100644 --- a/src/pip/_vendor/pygments/styles/__init__.py +++ b/src/pip/_vendor/pygments/styles/__init__.py @@ -4,70 +4,33 @@ Contains built-in styles. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from pip._vendor.pygments.plugin import find_plugin_styles from pip._vendor.pygments.util import ClassNotFound +from pip._vendor.pygments.styles._mapping import STYLES +#: A dictionary of built-in styles, mapping style names to +#: ``'submodule::classname'`` strings. +#: This list is deprecated. Use `pygments.styles.STYLES` instead +STYLE_MAP = {v[1]: v[0].split('.')[-1] + '::' + k for k, v in STYLES.items()} -#: Maps style names to 'submodule::classname'. -STYLE_MAP = { - 'default': 'default::DefaultStyle', - 'emacs': 'emacs::EmacsStyle', - 'friendly': 'friendly::FriendlyStyle', - 'friendly_grayscale': 'friendly_grayscale::FriendlyGrayscaleStyle', - 'colorful': 'colorful::ColorfulStyle', - 'autumn': 'autumn::AutumnStyle', - 'murphy': 'murphy::MurphyStyle', - 'manni': 'manni::ManniStyle', - 'material': 'material::MaterialStyle', - 'monokai': 'monokai::MonokaiStyle', - 'perldoc': 'perldoc::PerldocStyle', - 'pastie': 'pastie::PastieStyle', - 'borland': 'borland::BorlandStyle', - 'trac': 'trac::TracStyle', - 'native': 'native::NativeStyle', - 'fruity': 'fruity::FruityStyle', - 'bw': 'bw::BlackWhiteStyle', - 'vim': 'vim::VimStyle', - 'vs': 'vs::VisualStudioStyle', - 'tango': 'tango::TangoStyle', - 'rrt': 'rrt::RrtStyle', - 'xcode': 'xcode::XcodeStyle', - 'igor': 'igor::IgorStyle', - 'paraiso-light': 'paraiso_light::ParaisoLightStyle', - 'paraiso-dark': 'paraiso_dark::ParaisoDarkStyle', - 'lovelace': 'lovelace::LovelaceStyle', - 'algol': 'algol::AlgolStyle', - 'algol_nu': 'algol_nu::Algol_NuStyle', - 'arduino': 'arduino::ArduinoStyle', - 'rainbow_dash': 'rainbow_dash::RainbowDashStyle', - 'abap': 'abap::AbapStyle', - 'solarized-dark': 'solarized::SolarizedDarkStyle', - 'solarized-light': 'solarized::SolarizedLightStyle', - 'sas': 'sas::SasStyle', - 'staroffice' : 'staroffice::StarofficeStyle', - 'stata': 'stata_light::StataLightStyle', - 'stata-light': 'stata_light::StataLightStyle', - 'stata-dark': 'stata_dark::StataDarkStyle', - 'inkpot': 'inkpot::InkPotStyle', - 'zenburn': 'zenburn::ZenburnStyle', - 'gruvbox-dark': 'gruvbox::GruvboxDarkStyle', - 'gruvbox-light': 'gruvbox::GruvboxLightStyle', - 'dracula': 'dracula::DraculaStyle', - 'one-dark': 'onedark::OneDarkStyle', - 'lilypond' : 'lilypond::LilyPondStyle', - 'nord': 'nord::NordStyle', - 'nord-darker': 'nord::NordDarkerStyle', - 'github-dark': 'gh_dark::GhDarkStyle' -} +#: Internal reverse mapping to make `get_style_by_name` more efficient +_STYLE_NAME_TO_MODULE_MAP = {v[1]: (v[0], k) for k, v in STYLES.items()} def get_style_by_name(name): - if name in STYLE_MAP: - mod, cls = STYLE_MAP[name].split('::') + """ + Return a style class by its short name. The names of the builtin styles + are listed in :data:`pygments.styles.STYLE_MAP`. + + Will raise :exc:`pygments.util.ClassNotFound` if no style of that name is + found. + """ + if name in _STYLE_NAME_TO_MODULE_MAP: + mod, cls = _STYLE_NAME_TO_MODULE_MAP[name] builtin = "yes" else: for found_name, style in find_plugin_styles(): @@ -75,14 +38,15 @@ def get_style_by_name(name): return style # perhaps it got dropped into our styles package builtin = "" - mod = name + mod = 'pygments.styles.' + name cls = name.title() + "Style" try: - mod = __import__('pygments.styles.' + mod, None, None, [cls]) + mod = __import__(mod, None, None, [cls]) except ImportError: raise ClassNotFound("Could not find style module %r" % mod + - (builtin and ", though it should be builtin") + ".") + (builtin and ", though it should be builtin") + + ".") try: return getattr(mod, cls) except AttributeError: @@ -90,8 +54,8 @@ def get_style_by_name(name): def get_all_styles(): - """Return a generator for all styles by name, - both builtin and plugin.""" - yield from STYLE_MAP + """Return a generator for all styles by name, both builtin and plugin.""" + for v in STYLES.values(): + yield v[1] for name, _ in find_plugin_styles(): yield name diff --git a/src/pip/_vendor/pygments/styles/_mapping.py b/src/pip/_vendor/pygments/styles/_mapping.py new file mode 100644 index 00000000000..04c7ddfbb04 --- /dev/null +++ b/src/pip/_vendor/pygments/styles/_mapping.py @@ -0,0 +1,53 @@ +# Automatically generated by scripts/gen_mapfiles.py. +# DO NOT EDIT BY HAND; run `tox -e mapfiles` instead. + +STYLES = { + 'AbapStyle': ('pygments.styles.abap', 'abap', ()), + 'AlgolStyle': ('pygments.styles.algol', 'algol', ()), + 'Algol_NuStyle': ('pygments.styles.algol_nu', 'algol_nu', ()), + 'ArduinoStyle': ('pygments.styles.arduino', 'arduino', ()), + 'AutumnStyle': ('pygments.styles.autumn', 'autumn', ()), + 'BlackWhiteStyle': ('pygments.styles.bw', 'bw', ()), + 'BorlandStyle': ('pygments.styles.borland', 'borland', ()), + 'ColorfulStyle': ('pygments.styles.colorful', 'colorful', ()), + 'DefaultStyle': ('pygments.styles.default', 'default', ()), + 'DraculaStyle': ('pygments.styles.dracula', 'dracula', ()), + 'EmacsStyle': ('pygments.styles.emacs', 'emacs', ()), + 'FriendlyGrayscaleStyle': ('pygments.styles.friendly_grayscale', 'friendly_grayscale', ()), + 'FriendlyStyle': ('pygments.styles.friendly', 'friendly', ()), + 'FruityStyle': ('pygments.styles.fruity', 'fruity', ()), + 'GhDarkStyle': ('pygments.styles.gh_dark', 'github-dark', ()), + 'GruvboxDarkStyle': ('pygments.styles.gruvbox', 'gruvbox-dark', ()), + 'GruvboxLightStyle': ('pygments.styles.gruvbox', 'gruvbox-light', ()), + 'IgorStyle': ('pygments.styles.igor', 'igor', ()), + 'InkPotStyle': ('pygments.styles.inkpot', 'inkpot', ()), + 'LightbulbStyle': ('pygments.styles.lightbulb', 'lightbulb', ()), + 'LilyPondStyle': ('pygments.styles.lilypond', 'lilypond', ()), + 'LovelaceStyle': ('pygments.styles.lovelace', 'lovelace', ()), + 'ManniStyle': ('pygments.styles.manni', 'manni', ()), + 'MaterialStyle': ('pygments.styles.material', 'material', ()), + 'MonokaiStyle': ('pygments.styles.monokai', 'monokai', ()), + 'MurphyStyle': ('pygments.styles.murphy', 'murphy', ()), + 'NativeStyle': ('pygments.styles.native', 'native', ()), + 'NordDarkerStyle': ('pygments.styles.nord', 'nord-darker', ()), + 'NordStyle': ('pygments.styles.nord', 'nord', ()), + 'OneDarkStyle': ('pygments.styles.onedark', 'one-dark', ()), + 'ParaisoDarkStyle': ('pygments.styles.paraiso_dark', 'paraiso-dark', ()), + 'ParaisoLightStyle': ('pygments.styles.paraiso_light', 'paraiso-light', ()), + 'PastieStyle': ('pygments.styles.pastie', 'pastie', ()), + 'PerldocStyle': ('pygments.styles.perldoc', 'perldoc', ()), + 'RainbowDashStyle': ('pygments.styles.rainbow_dash', 'rainbow_dash', ()), + 'RrtStyle': ('pygments.styles.rrt', 'rrt', ()), + 'SasStyle': ('pygments.styles.sas', 'sas', ()), + 'SolarizedDarkStyle': ('pygments.styles.solarized', 'solarized-dark', ()), + 'SolarizedLightStyle': ('pygments.styles.solarized', 'solarized-light', ()), + 'StarofficeStyle': ('pygments.styles.staroffice', 'staroffice', ()), + 'StataDarkStyle': ('pygments.styles.stata_dark', 'stata-dark', ()), + 'StataLightStyle': ('pygments.styles.stata_light', 'stata-light', ()), + 'TangoStyle': ('pygments.styles.tango', 'tango', ()), + 'TracStyle': ('pygments.styles.trac', 'trac', ()), + 'VimStyle': ('pygments.styles.vim', 'vim', ()), + 'VisualStudioStyle': ('pygments.styles.vs', 'vs', ()), + 'XcodeStyle': ('pygments.styles.xcode', 'xcode', ()), + 'ZenburnStyle': ('pygments.styles.zenburn', 'zenburn', ()), +} diff --git a/src/pip/_vendor/pygments/token.py b/src/pip/_vendor/pygments/token.py index e3e565ad591..bdf2e8e2e12 100644 --- a/src/pip/_vendor/pygments/token.py +++ b/src/pip/_vendor/pygments/token.py @@ -4,7 +4,7 @@ Basic token types and the standard tokens. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -209,5 +209,6 @@ def string_to_tokentype(s): Generic.Prompt: 'gp', Generic.Strong: 'gs', Generic.Subheading: 'gu', + Generic.EmphStrong: 'ges', Generic.Traceback: 'gt', } diff --git a/src/pip/_vendor/pygments/unistring.py b/src/pip/_vendor/pygments/unistring.py index 2e3c80869d9..39f6baeedfb 100644 --- a/src/pip/_vendor/pygments/unistring.py +++ b/src/pip/_vendor/pygments/unistring.py @@ -7,7 +7,7 @@ Inspired by chartypes_create.py from the MoinMoin project. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -112,7 +112,7 @@ def _handle_runs(char_list): # pragma: no cover categories = {'xid_start': [], 'xid_continue': []} - with open(__file__) as fp: + with open(__file__, encoding='utf-8') as fp: content = fp.read() header = content[:content.find('Cc =')] @@ -136,7 +136,7 @@ def _handle_runs(char_list): # pragma: no cover if ('a' + c).isidentifier(): categories['xid_continue'].append(c) - with open(__file__, 'w') as fp: + with open(__file__, 'w', encoding='utf-8') as fp: fp.write(header) for cat in sorted(categories): diff --git a/src/pip/_vendor/pygments/util.py b/src/pip/_vendor/pygments/util.py index 8032962dc99..941fdb9ec7a 100644 --- a/src/pip/_vendor/pygments/util.py +++ b/src/pip/_vendor/pygments/util.py @@ -4,7 +4,7 @@ Utility functions. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -32,10 +32,16 @@ class ClassNotFound(ValueError): class OptionError(Exception): - pass - + """ + This exception will be raised by all option processing functions if + the type or value of the argument is not correct. + """ def get_choice_opt(options, optname, allowed, default=None, normcase=False): + """ + If the key `optname` from the dictionary is not in the sequence + `allowed`, raise an error, otherwise return it. + """ string = options.get(optname, default) if normcase: string = string.lower() @@ -46,6 +52,17 @@ def get_choice_opt(options, optname, allowed, default=None, normcase=False): def get_bool_opt(options, optname, default=None): + """ + Intuitively, this is `options.get(optname, default)`, but restricted to + Boolean value. The Booleans can be represented as string, in order to accept + Boolean value from the command line arguments. If the key `optname` is + present in the dictionary `options` and is not associated with a Boolean, + raise an `OptionError`. If it is absent, `default` is returned instead. + + The valid string values for ``True`` are ``1``, ``yes``, ``true`` and + ``on``, the ones for ``False`` are ``0``, ``no``, ``false`` and ``off`` + (matched case-insensitively). + """ string = options.get(optname, default) if isinstance(string, bool): return string @@ -66,6 +83,7 @@ def get_bool_opt(options, optname, default=None): def get_int_opt(options, optname, default=None): + """As :func:`get_bool_opt`, but interpret the value as an integer.""" string = options.get(optname, default) try: return int(string) @@ -78,8 +96,12 @@ def get_int_opt(options, optname, default=None): 'must give an integer value' % ( string, optname)) - def get_list_opt(options, optname, default=None): + """ + If the key `optname` from the dictionary `options` is a string, + split it at whitespace and return it. If it is already a list + or a tuple, it is returned as a list. + """ val = options.get(optname, default) if isinstance(val, str): return val.split() diff --git a/src/pip/_vendor/pyparsing/__init__.py b/src/pip/_vendor/pyparsing/__init__.py index 75372500ed9..88bc10ac18a 100644 --- a/src/pip/_vendor/pyparsing/__init__.py +++ b/src/pip/_vendor/pyparsing/__init__.py @@ -56,7 +56,7 @@ :class:`'|'`, :class:`'^'` and :class:`'&'` operators. The :class:`ParseResults` object returned from -:class:`ParserElement.parseString` can be +:class:`ParserElement.parse_string` can be accessed as a nested list, a dictionary, or an object with named attributes. @@ -85,11 +85,11 @@ and :class:`'&'` operators to combine simple expressions into more complex ones - associate names with your parsed results using - :class:`ParserElement.setResultsName` + :class:`ParserElement.set_results_name` - access the parsed data, which is returned as a :class:`ParseResults` object - - find some helpful expression short-cuts like :class:`delimitedList` - and :class:`oneOf` + - find some helpful expression short-cuts like :class:`DelimitedList` + and :class:`one_of` - find more useful common expressions in the :class:`pyparsing_common` namespace class """ @@ -106,30 +106,22 @@ class version_info(NamedTuple): @property def __version__(self): return ( - "{}.{}.{}".format(self.major, self.minor, self.micro) + f"{self.major}.{self.minor}.{self.micro}" + ( - "{}{}{}".format( - "r" if self.releaselevel[0] == "c" else "", - self.releaselevel[0], - self.serial, - ), + f"{'r' if self.releaselevel[0] == 'c' else ''}{self.releaselevel[0]}{self.serial}", "", )[self.releaselevel == "final"] ) def __str__(self): - return "{} {} / {}".format(__name__, self.__version__, __version_time__) + return f"{__name__} {self.__version__} / {__version_time__}" def __repr__(self): - return "{}.{}({})".format( - __name__, - type(self).__name__, - ", ".join("{}={!r}".format(*nv) for nv in zip(self._fields, self)), - ) + return f"{__name__}.{type(self).__name__}({', '.join('{}={!r}'.format(*nv) for nv in zip(self._fields, self))})" -__version_info__ = version_info(3, 0, 9, "final", 0) -__version_time__ = "05 May 2022 07:02 UTC" +__version_info__ = version_info(3, 1, 0, "final", 1) +__version_time__ = "18 Jun 2023 14:05 UTC" __version__ = __version_info__.__version__ __versionTime__ = __version_time__ __author__ = "Paul McGuire " @@ -139,9 +131,9 @@ def __repr__(self): from .actions import * from .core import __diag__, __compat__ from .results import * -from .core import * +from .core import * # type: ignore[misc, assignment] from .core import _builtin_exprs as core_builtin_exprs -from .helpers import * +from .helpers import * # type: ignore[misc, assignment] from .helpers import _builtin_exprs as helper_builtin_exprs from .unicode import unicode_set, UnicodeRangeList, pyparsing_unicode as unicode @@ -153,11 +145,11 @@ def __repr__(self): # define backward compat synonyms if "pyparsing_unicode" not in globals(): - pyparsing_unicode = unicode + pyparsing_unicode = unicode # type: ignore[misc] if "pyparsing_common" not in globals(): - pyparsing_common = common + pyparsing_common = common # type: ignore[misc] if "pyparsing_test" not in globals(): - pyparsing_test = testing + pyparsing_test = testing # type: ignore[misc] core_builtin_exprs += common_builtin_exprs + helper_builtin_exprs @@ -174,7 +166,9 @@ def __repr__(self): "CaselessKeyword", "CaselessLiteral", "CharsNotIn", + "CloseMatch", "Combine", + "DelimitedList", "Dict", "Each", "Empty", @@ -227,9 +221,11 @@ def __repr__(self): "alphas8bit", "any_close_tag", "any_open_tag", + "autoname_elements", "c_style_comment", "col", "common_html_entity", + "condition_as_parse_action", "counted_array", "cpp_style_comment", "dbl_quoted_string", @@ -241,6 +237,7 @@ def __repr__(self): "html_comment", "identchars", "identbodychars", + "infix_notation", "java_style_comment", "line", "line_end", @@ -255,8 +252,12 @@ def __repr__(self): "null_debug_action", "nums", "one_of", + "original_text_for", "printables", "punc8bit", + "pyparsing_common", + "pyparsing_test", + "pyparsing_unicode", "python_style_comment", "quoted_string", "remove_quotes", @@ -267,28 +268,20 @@ def __repr__(self): "srange", "string_end", "string_start", + "token_map", "trace_parse_action", + "ungroup", + "unicode_set", "unicode_string", "with_attribute", - "indentedBlock", - "original_text_for", - "ungroup", - "infix_notation", - "locatedExpr", "with_class", - "CloseMatch", - "token_map", - "pyparsing_common", - "pyparsing_unicode", - "unicode_set", - "condition_as_parse_action", - "pyparsing_test", # pre-PEP8 compatibility names "__versionTime__", "anyCloseTag", "anyOpenTag", "cStyleComment", "commonHTMLEntity", + "conditionAsParseAction", "countedArray", "cppStyleComment", "dblQuotedString", @@ -296,9 +289,12 @@ def __repr__(self): "delimitedList", "dictOf", "htmlComment", + "indentedBlock", + "infixNotation", "javaStyleComment", "lineEnd", "lineStart", + "locatedExpr", "makeHTMLTags", "makeXMLTags", "matchOnlyAtCol", @@ -308,6 +304,7 @@ def __repr__(self): "nullDebugAction", "oneOf", "opAssoc", + "originalTextFor", "pythonStyleComment", "quotedString", "removeQuotes", @@ -317,15 +314,9 @@ def __repr__(self): "sglQuotedString", "stringEnd", "stringStart", + "tokenMap", "traceParseAction", "unicodeString", "withAttribute", - "indentedBlock", - "originalTextFor", - "infixNotation", - "locatedExpr", "withClass", - "tokenMap", - "conditionAsParseAction", - "autoname_elements", ] diff --git a/src/pip/_vendor/pyparsing/actions.py b/src/pip/_vendor/pyparsing/actions.py index f72c66e7431..ca6e4c6afb4 100644 --- a/src/pip/_vendor/pyparsing/actions.py +++ b/src/pip/_vendor/pyparsing/actions.py @@ -1,7 +1,7 @@ # actions.py from .exceptions import ParseException -from .util import col +from .util import col, replaced_by_pep8 class OnlyOnce: @@ -38,7 +38,7 @@ def match_only_at_col(n): def verify_col(strg, locn, toks): if col(locn, strg) != n: - raise ParseException(strg, locn, "matched token not at column {}".format(n)) + raise ParseException(strg, locn, f"matched token not at column {n}") return verify_col @@ -148,15 +148,13 @@ def pa(s, l, tokens): raise ParseException( s, l, - "attribute {!r} has value {!r}, must be {!r}".format( - attrName, tokens[attrName], attrValue - ), + f"attribute {attrName!r} has value {tokens[attrName]!r}, must be {attrValue!r}", ) return pa -with_attribute.ANY_VALUE = object() +with_attribute.ANY_VALUE = object() # type: ignore [attr-defined] def with_class(classname, namespace=""): @@ -195,13 +193,25 @@ def with_class(classname, namespace=""): 1 4 0 1 0 1,3 2,3 1,1 """ - classattr = "{}:class".format(namespace) if namespace else "class" + classattr = f"{namespace}:class" if namespace else "class" return with_attribute(**{classattr: classname}) # pre-PEP8 compatibility symbols -replaceWith = replace_with -removeQuotes = remove_quotes -withAttribute = with_attribute -withClass = with_class -matchOnlyAtCol = match_only_at_col +# fmt: off +@replaced_by_pep8(replace_with) +def replaceWith(): ... + +@replaced_by_pep8(remove_quotes) +def removeQuotes(): ... + +@replaced_by_pep8(with_attribute) +def withAttribute(): ... + +@replaced_by_pep8(with_class) +def withClass(): ... + +@replaced_by_pep8(match_only_at_col) +def matchOnlyAtCol(): ... + +# fmt: on diff --git a/src/pip/_vendor/pyparsing/common.py b/src/pip/_vendor/pyparsing/common.py index 1859fb79cc4..7a666b276df 100644 --- a/src/pip/_vendor/pyparsing/common.py +++ b/src/pip/_vendor/pyparsing/common.py @@ -1,6 +1,6 @@ # common.py from .core import * -from .helpers import delimited_list, any_open_tag, any_close_tag +from .helpers import DelimitedList, any_open_tag, any_close_tag from datetime import datetime @@ -22,17 +22,17 @@ class pyparsing_common: Parse actions: - - :class:`convertToInteger` - - :class:`convertToFloat` - - :class:`convertToDate` - - :class:`convertToDatetime` - - :class:`stripHTMLTags` - - :class:`upcaseTokens` - - :class:`downcaseTokens` + - :class:`convert_to_integer` + - :class:`convert_to_float` + - :class:`convert_to_date` + - :class:`convert_to_datetime` + - :class:`strip_html_tags` + - :class:`upcase_tokens` + - :class:`downcase_tokens` Example:: - pyparsing_common.number.runTests(''' + pyparsing_common.number.run_tests(''' # any int or real number, returned as the appropriate type 100 -100 @@ -42,7 +42,7 @@ class pyparsing_common: 1e-12 ''') - pyparsing_common.fnumber.runTests(''' + pyparsing_common.fnumber.run_tests(''' # any int or real number, returned as float 100 -100 @@ -52,19 +52,19 @@ class pyparsing_common: 1e-12 ''') - pyparsing_common.hex_integer.runTests(''' + pyparsing_common.hex_integer.run_tests(''' # hex numbers 100 FF ''') - pyparsing_common.fraction.runTests(''' + pyparsing_common.fraction.run_tests(''' # fractions 1/2 -3/4 ''') - pyparsing_common.mixed_integer.runTests(''' + pyparsing_common.mixed_integer.run_tests(''' # mixed fractions 1 1/2 @@ -73,8 +73,8 @@ class pyparsing_common: ''') import uuid - pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) - pyparsing_common.uuid.runTests(''' + pyparsing_common.uuid.set_parse_action(token_map(uuid.UUID)) + pyparsing_common.uuid.run_tests(''' # uuid 12345678-1234-5678-1234-567812345678 ''') @@ -260,8 +260,8 @@ def convert_to_date(fmt: str = "%Y-%m-%d"): Example:: date_expr = pyparsing_common.iso8601_date.copy() - date_expr.setParseAction(pyparsing_common.convertToDate()) - print(date_expr.parseString("1999-12-31")) + date_expr.set_parse_action(pyparsing_common.convert_to_date()) + print(date_expr.parse_string("1999-12-31")) prints:: @@ -287,8 +287,8 @@ def convert_to_datetime(fmt: str = "%Y-%m-%dT%H:%M:%S.%f"): Example:: dt_expr = pyparsing_common.iso8601_datetime.copy() - dt_expr.setParseAction(pyparsing_common.convertToDatetime()) - print(dt_expr.parseString("1999-12-31T23:59:59.999")) + dt_expr.set_parse_action(pyparsing_common.convert_to_datetime()) + print(dt_expr.parse_string("1999-12-31T23:59:59.999")) prints:: @@ -326,9 +326,9 @@ def strip_html_tags(s: str, l: int, tokens: ParseResults): # strip HTML links from normal text text = 'More info at the pyparsing wiki page' - td, td_end = makeHTMLTags("TD") - table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end - print(table_text.parseString(text).body) + td, td_end = make_html_tags("TD") + table_text = td + SkipTo(td_end).set_parse_action(pyparsing_common.strip_html_tags)("body") + td_end + print(table_text.parse_string(text).body) Prints:: @@ -348,7 +348,7 @@ def strip_html_tags(s: str, l: int, tokens: ParseResults): .streamline() .set_name("commaItem") ) - comma_separated_list = delimited_list( + comma_separated_list = DelimitedList( Opt(quoted_string.copy() | _commasepitem, default="") ).set_name("comma separated list") """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" @@ -363,7 +363,7 @@ def strip_html_tags(s: str, l: int, tokens: ParseResults): url = Regex( # https://mathiasbynens.be/demo/url-regex # https://gist.github.com/dperini/729294 - r"^" + + r"(?P" + # protocol identifier (optional) # short syntax // still required r"(?:(?:(?Phttps?|ftp):)?\/\/)" + @@ -405,18 +405,26 @@ def strip_html_tags(s: str, l: int, tokens: ParseResults): r"(\?(?P[^#]*))?" + # fragment (optional) r"(#(?P\S*))?" + - r"$" + r")" ).set_name("url") + """URL (http/https/ftp scheme)""" # fmt: on # pre-PEP8 compatibility names convertToInteger = convert_to_integer + """Deprecated - use :class:`convert_to_integer`""" convertToFloat = convert_to_float + """Deprecated - use :class:`convert_to_float`""" convertToDate = convert_to_date + """Deprecated - use :class:`convert_to_date`""" convertToDatetime = convert_to_datetime + """Deprecated - use :class:`convert_to_datetime`""" stripHTMLTags = strip_html_tags + """Deprecated - use :class:`strip_html_tags`""" upcaseTokens = upcase_tokens + """Deprecated - use :class:`upcase_tokens`""" downcaseTokens = downcase_tokens + """Deprecated - use :class:`downcase_tokens`""" _builtin_exprs = [ diff --git a/src/pip/_vendor/pyparsing/core.py b/src/pip/_vendor/pyparsing/core.py index 6ff3c766f7d..8d5a856ecd6 100644 --- a/src/pip/_vendor/pyparsing/core.py +++ b/src/pip/_vendor/pyparsing/core.py @@ -1,19 +1,22 @@ # # core.py # + +from collections import deque import os import typing from typing import ( - NamedTuple, - Union, - Callable, Any, + Callable, Generator, - Tuple, List, - TextIO, - Set, + NamedTuple, Sequence, + Set, + TextIO, + Tuple, + Union, + cast, ) from abc import ABC, abstractmethod from enum import Enum @@ -40,6 +43,7 @@ _flatten, LRUMemo as _LRUMemo, UnboundedMemo as _UnboundedMemo, + replaced_by_pep8, ) from .exceptions import * from .actions import * @@ -134,6 +138,7 @@ def enable_all_warnings(cls) -> None: class Diagnostics(Enum): """ Diagnostic configuration (all default to disabled) + - ``warn_multiple_tokens_in_named_alternation`` - flag to enable warnings when a results name is defined on a :class:`MatchFirst` or :class:`Or` expression with one or more :class:`And` subexpressions - ``warn_ungrouped_named_tokens_in_collection`` - flag to enable warnings when a results @@ -228,6 +233,8 @@ def _should_enable_warnings( } _generatorType = types.GeneratorType +ParseImplReturnType = Tuple[int, Any] +PostParseReturnType = Union[ParseResults, Sequence[ParseResults]] ParseAction = Union[ Callable[[], Any], Callable[[ParseResults], Any], @@ -256,7 +263,7 @@ def _should_enable_warnings( alphanums = alphas + nums printables = "".join([c for c in string.printable if c not in string.whitespace]) -_trim_arity_call_line: traceback.StackSummary = None +_trim_arity_call_line: traceback.StackSummary = None # type: ignore[assignment] def _trim_arity(func, max_limit=3): @@ -269,11 +276,6 @@ def _trim_arity(func, max_limit=3): limit = 0 found_arity = False - def extract_tb(tb, limit=0): - frames = traceback.extract_tb(tb, limit=limit) - frame_summary = frames[-1] - return [frame_summary[:2]] - # synthesize what would be returned by traceback.extract_stack at the call to # user's parse action 'func', so that we don't incur call penalty at parse time @@ -297,8 +299,10 @@ def wrapper(*args): raise else: tb = te.__traceback__ + frames = traceback.extract_tb(tb, limit=2) + frame_summary = frames[-1] trim_arity_type_error = ( - extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth + [frame_summary[:2]][-1][:2] == pa_call_line_synth ) del tb @@ -320,7 +324,7 @@ def wrapper(*args): def condition_as_parse_action( - fn: ParseCondition, message: str = None, fatal: bool = False + fn: ParseCondition, message: typing.Optional[str] = None, fatal: bool = False ) -> ParseAction: """ Function to convert a simple predicate function that returns ``True`` or ``False`` @@ -353,15 +357,9 @@ def _default_start_debug_action( cache_hit_str = "*" if cache_hit else "" print( ( - "{}Match {} at loc {}({},{})\n {}\n {}^".format( - cache_hit_str, - expr, - loc, - lineno(loc, instring), - col(loc, instring), - line(loc, instring), - " " * (col(loc, instring) - 1), - ) + f"{cache_hit_str}Match {expr} at loc {loc}({lineno(loc, instring)},{col(loc, instring)})\n" + f" {line(loc, instring)}\n" + f" {' ' * (col(loc, instring) - 1)}^" ) ) @@ -375,7 +373,7 @@ def _default_success_debug_action( cache_hit: bool = False, ): cache_hit_str = "*" if cache_hit else "" - print("{}Matched {} -> {}".format(cache_hit_str, expr, toks.as_list())) + print(f"{cache_hit_str}Matched {expr} -> {toks.as_list()}") def _default_exception_debug_action( @@ -386,11 +384,7 @@ def _default_exception_debug_action( cache_hit: bool = False, ): cache_hit_str = "*" if cache_hit else "" - print( - "{}Match {} failed, {} raised: {}".format( - cache_hit_str, expr, type(exc).__name__, exc - ) - ) + print(f"{cache_hit_str}Match {expr} failed, {type(exc).__name__} raised: {exc}") def null_debug_action(*args): @@ -402,7 +396,7 @@ class ParserElement(ABC): DEFAULT_WHITE_CHARS: str = " \n\t\r" verbose_stacktrace: bool = False - _literalStringClass: typing.Optional[type] = None + _literalStringClass: type = None # type: ignore[assignment] @staticmethod def set_default_whitespace_chars(chars: str) -> None: @@ -447,6 +441,18 @@ def inline_literals_using(cls: type) -> None: """ ParserElement._literalStringClass = cls + @classmethod + def using_each(cls, seq, **class_kwargs): + """ + Yields a sequence of class(obj, **class_kwargs) for obj in seq. + + Example:: + + LPAR, RPAR, LBRACE, RBRACE, SEMI = Suppress.using_each("(){};") + + """ + yield from (cls(obj, **class_kwargs) for obj in seq) + class DebugActions(NamedTuple): debug_try: typing.Optional[DebugStartAction] debug_match: typing.Optional[DebugSuccessAction] @@ -455,9 +461,9 @@ class DebugActions(NamedTuple): def __init__(self, savelist: bool = False): self.parseAction: List[ParseAction] = list() self.failAction: typing.Optional[ParseFailAction] = None - self.customName = None - self._defaultName = None - self.resultsName = None + self.customName: str = None # type: ignore[assignment] + self._defaultName: typing.Optional[str] = None + self.resultsName: str = None # type: ignore[assignment] self.saveAsList = savelist self.skipWhitespace = True self.whiteChars = set(ParserElement.DEFAULT_WHITE_CHARS) @@ -490,12 +496,29 @@ def suppress_warning(self, warning_type: Diagnostics) -> "ParserElement": base.suppress_warning(Diagnostics.warn_on_parse_using_empty_Forward) # statement would normally raise a warning, but is now suppressed - print(base.parseString("x")) + print(base.parse_string("x")) """ self.suppress_warnings_.append(warning_type) return self + def visit_all(self): + """General-purpose method to yield all expressions and sub-expressions + in a grammar. Typically just for internal use. + """ + to_visit = deque([self]) + seen = set() + while to_visit: + cur = to_visit.popleft() + + # guard against looping forever through recursive grammars + if cur in seen: + continue + seen.add(cur) + + to_visit.extend(cur.recurse()) + yield cur + def copy(self) -> "ParserElement": """ Make a copy of this :class:`ParserElement`. Useful for defining @@ -585,11 +608,11 @@ def breaker(instring, loc, doActions=True, callPreParse=True): pdb.set_trace() return _parseMethod(instring, loc, doActions, callPreParse) - breaker._originalParseMethod = _parseMethod - self._parse = breaker + breaker._originalParseMethod = _parseMethod # type: ignore [attr-defined] + self._parse = breaker # type: ignore [assignment] else: if hasattr(self._parse, "_originalParseMethod"): - self._parse = self._parse._originalParseMethod + self._parse = self._parse._originalParseMethod # type: ignore [attr-defined, assignment] return self def set_parse_action(self, *fns: ParseAction, **kwargs) -> "ParserElement": @@ -601,9 +624,9 @@ def set_parse_action(self, *fns: ParseAction, **kwargs) -> "ParserElement": Each parse action ``fn`` is a callable method with 0-3 arguments, called as ``fn(s, loc, toks)`` , ``fn(loc, toks)`` , ``fn(toks)`` , or just ``fn()`` , where: - - s = the original string being parsed (see note below) - - loc = the location of the matching substring - - toks = a list of the matched tokens, packaged as a :class:`ParseResults` object + - ``s`` = the original string being parsed (see note below) + - ``loc`` = the location of the matching substring + - ``toks`` = a list of the matched tokens, packaged as a :class:`ParseResults` object The parsed tokens are passed to the parse action as ParseResults. They can be modified in place using list-style append, extend, and pop operations to update @@ -621,7 +644,7 @@ def set_parse_action(self, *fns: ParseAction, **kwargs) -> "ParserElement": Optional keyword arguments: - - call_during_try = (default= ``False``) indicate if parse action should be run during + - ``call_during_try`` = (default= ``False``) indicate if parse action should be run during lookaheads and alternate testing. For parse actions that have side effects, it is important to only call the parse action once it is determined that it is being called as part of a successful parse. For parse actions that perform additional @@ -697,10 +720,10 @@ def add_condition(self, *fns: ParseCondition, **kwargs) -> "ParserElement": Optional keyword arguments: - - message = define a custom message to be used in the raised exception - - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise + - ``message`` = define a custom message to be used in the raised exception + - ``fatal`` = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException - - call_during_try = boolean to indicate if this method should be called during internal tryParse calls, + - ``call_during_try`` = boolean to indicate if this method should be called during internal tryParse calls, default=False Example:: @@ -716,7 +739,9 @@ def add_condition(self, *fns: ParseCondition, **kwargs) -> "ParserElement": for fn in fns: self.parseAction.append( condition_as_parse_action( - fn, message=kwargs.get("message"), fatal=kwargs.get("fatal", False) + fn, + message=str(kwargs.get("message")), + fatal=bool(kwargs.get("fatal", False)), ) ) @@ -731,30 +756,33 @@ def set_fail_action(self, fn: ParseFailAction) -> "ParserElement": Fail acton fn is a callable function that takes the arguments ``fn(s, loc, expr, err)`` where: - - s = string being parsed - - loc = location where expression match was attempted and failed - - expr = the parse expression that failed - - err = the exception thrown + - ``s`` = string being parsed + - ``loc`` = location where expression match was attempted and failed + - ``expr`` = the parse expression that failed + - ``err`` = the exception thrown The function returns no value. It may throw :class:`ParseFatalException` if it is desired to stop parsing immediately.""" self.failAction = fn return self - def _skipIgnorables(self, instring, loc): + def _skipIgnorables(self, instring: str, loc: int) -> int: + if not self.ignoreExprs: + return loc exprsFound = True + ignore_expr_fns = [e._parse for e in self.ignoreExprs] while exprsFound: exprsFound = False - for e in self.ignoreExprs: + for ignore_fn in ignore_expr_fns: try: while 1: - loc, dummy = e._parse(instring, loc) + loc, dummy = ignore_fn(instring, loc) exprsFound = True except ParseException: pass return loc - def preParse(self, instring, loc): + def preParse(self, instring: str, loc: int) -> int: if self.ignoreExprs: loc = self._skipIgnorables(instring, loc) @@ -830,7 +858,7 @@ def _parseNoCache( try: for fn in self.parseAction: try: - tokens = fn(instring, tokens_start, ret_tokens) + tokens = fn(instring, tokens_start, ret_tokens) # type: ignore [call-arg, arg-type] except IndexError as parse_action_exc: exc = ParseException("exception raised in parse action") raise exc from parse_action_exc @@ -853,7 +881,7 @@ def _parseNoCache( else: for fn in self.parseAction: try: - tokens = fn(instring, tokens_start, ret_tokens) + tokens = fn(instring, tokens_start, ret_tokens) # type: ignore [call-arg, arg-type] except IndexError as parse_action_exc: exc = ParseException("exception raised in parse action") raise exc from parse_action_exc @@ -875,17 +903,24 @@ def _parseNoCache( return loc, ret_tokens - def try_parse(self, instring: str, loc: int, raise_fatal: bool = False) -> int: + def try_parse( + self, + instring: str, + loc: int, + *, + raise_fatal: bool = False, + do_actions: bool = False, + ) -> int: try: - return self._parse(instring, loc, doActions=False)[0] + return self._parse(instring, loc, doActions=do_actions)[0] except ParseFatalException: if raise_fatal: raise raise ParseException(instring, loc, self.errmsg, self) - def can_parse_next(self, instring: str, loc: int) -> bool: + def can_parse_next(self, instring: str, loc: int, do_actions: bool = False) -> bool: try: - self.try_parse(instring, loc) + self.try_parse(instring, loc, do_actions=do_actions) except (ParseException, IndexError): return False else: @@ -897,10 +932,23 @@ def can_parse_next(self, instring: str, loc: int) -> bool: Tuple[int, "Forward", bool], Tuple[int, Union[ParseResults, Exception]] ] = {} + class _CacheType(dict): + """ + class to help type checking + """ + + not_in_cache: bool + + def get(self, *args): + ... + + def set(self, *args): + ... + # argument cache for optimizing repeated calls when backtracking through recursive expressions packrat_cache = ( - {} - ) # this is set later by enabled_packrat(); this is here so that reset_cache() doesn't fail + _CacheType() + ) # set later by enable_packrat(); this is here so that reset_cache() doesn't fail packrat_cache_lock = RLock() packrat_cache_stats = [0, 0] @@ -930,24 +978,25 @@ def _parseCache( ParserElement.packrat_cache_stats[HIT] += 1 if self.debug and self.debugActions.debug_try: try: - self.debugActions.debug_try(instring, loc, self, cache_hit=True) + self.debugActions.debug_try(instring, loc, self, cache_hit=True) # type: ignore [call-arg] except TypeError: pass if isinstance(value, Exception): if self.debug and self.debugActions.debug_fail: try: self.debugActions.debug_fail( - instring, loc, self, value, cache_hit=True + instring, loc, self, value, cache_hit=True # type: ignore [call-arg] ) except TypeError: pass raise value + value = cast(Tuple[int, ParseResults, int], value) loc_, result, endloc = value[0], value[1].copy(), value[2] if self.debug and self.debugActions.debug_match: try: self.debugActions.debug_match( - instring, loc_, endloc, self, result, cache_hit=True + instring, loc_, endloc, self, result, cache_hit=True # type: ignore [call-arg] ) except TypeError: pass @@ -1009,7 +1058,7 @@ def enable_left_recursion( Parameters: - - cache_size_limit - (default=``None``) - memoize at most this many + - ``cache_size_limit`` - (default=``None``) - memoize at most this many ``Forward`` elements during matching; if ``None`` (the default), memoize all ``Forward`` elements. @@ -1022,9 +1071,9 @@ def enable_left_recursion( elif ParserElement._packratEnabled: raise RuntimeError("Packrat and Bounded Recursion are not compatible") if cache_size_limit is None: - ParserElement.recursion_memos = _UnboundedMemo() + ParserElement.recursion_memos = _UnboundedMemo() # type: ignore[assignment] elif cache_size_limit > 0: - ParserElement.recursion_memos = _LRUMemo(capacity=cache_size_limit) + ParserElement.recursion_memos = _LRUMemo(capacity=cache_size_limit) # type: ignore[assignment] else: raise NotImplementedError("Memo size of %s" % cache_size_limit) ParserElement._left_recursion_enabled = True @@ -1040,7 +1089,7 @@ def enable_packrat(cache_size_limit: int = 128, *, force: bool = False) -> None: Parameters: - - cache_size_limit - (default= ``128``) - if an integer value is provided + - ``cache_size_limit`` - (default= ``128``) - if an integer value is provided will limit the size of the packrat cache; if None is passed, then the cache size will be unbounded; if 0 is passed, the cache will be effectively disabled. @@ -1070,7 +1119,7 @@ def enable_packrat(cache_size_limit: int = 128, *, force: bool = False) -> None: if cache_size_limit is None: ParserElement.packrat_cache = _UnboundedCache() else: - ParserElement.packrat_cache = _FifoCache(cache_size_limit) + ParserElement.packrat_cache = _FifoCache(cache_size_limit) # type: ignore[assignment] ParserElement._parse = ParserElement._parseCache def parse_string( @@ -1088,7 +1137,7 @@ def parse_string( an object with attributes if the given parser includes results names. If the input string is required to match the entire grammar, ``parse_all`` flag must be set to ``True``. This - is also equivalent to ending the grammar with :class:`StringEnd`(). + is also equivalent to ending the grammar with :class:`StringEnd`\\ (). To report proper column numbers, ``parse_string`` operates on a copy of the input string where all tabs are converted to spaces (8 spaces per tab, as per the default in ``string.expandtabs``). If the input string @@ -1198,7 +1247,9 @@ def scan_string( try: while loc <= instrlen and matches < maxMatches: try: - preloc = preparseFn(instring, loc) + preloc: int = preparseFn(instring, loc) + nextLoc: int + tokens: ParseResults nextLoc, tokens = parseFn(instring, preloc, callPreParse=False) except ParseException: loc = preloc + 1 @@ -1352,7 +1403,7 @@ def split( def __add__(self, other) -> "ParserElement": """ Implementation of ``+`` operator - returns :class:`And`. Adding strings to a :class:`ParserElement` - converts them to :class:`Literal`s by default. + converts them to :class:`Literal`\\ s by default. Example:: @@ -1364,11 +1415,11 @@ def __add__(self, other) -> "ParserElement": Hello, World! -> ['Hello', ',', 'World', '!'] - ``...`` may be used as a parse expression as a short form of :class:`SkipTo`. + ``...`` may be used as a parse expression as a short form of :class:`SkipTo`:: Literal('start') + ... + Literal('end') - is equivalent to: + is equivalent to:: Literal('start') + SkipTo('end')("_skipped*") + Literal('end') @@ -1382,11 +1433,7 @@ def __add__(self, other) -> "ParserElement": if isinstance(other, str_type): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return And([self, other]) def __radd__(self, other) -> "ParserElement": @@ -1399,11 +1446,7 @@ def __radd__(self, other) -> "ParserElement": if isinstance(other, str_type): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return other + self def __sub__(self, other) -> "ParserElement": @@ -1413,11 +1456,7 @@ def __sub__(self, other) -> "ParserElement": if isinstance(other, str_type): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return self + And._ErrorStop() + other def __rsub__(self, other) -> "ParserElement": @@ -1427,11 +1466,7 @@ def __rsub__(self, other) -> "ParserElement": if isinstance(other, str_type): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return other - self def __mul__(self, other) -> "ParserElement": @@ -1440,11 +1475,12 @@ def __mul__(self, other) -> "ParserElement": ``expr + expr + expr``. Expressions may also be multiplied by a 2-integer tuple, similar to ``{min, max}`` multipliers in regular expressions. Tuples may also include ``None`` as in: + - ``expr*(n, None)`` or ``expr*(n, )`` is equivalent - to ``expr*n + ZeroOrMore(expr)`` - (read as "at least n instances of ``expr``") + to ``expr*n + ZeroOrMore(expr)`` + (read as "at least n instances of ``expr``") - ``expr*(None, n)`` is equivalent to ``expr*(0, n)`` - (read as "0 to n instances of ``expr``") + (read as "0 to n instances of ``expr``") - ``expr*(None, None)`` is equivalent to ``ZeroOrMore(expr)`` - ``expr*(1, None)`` is equivalent to ``OneOrMore(expr)`` @@ -1477,17 +1513,9 @@ def __mul__(self, other) -> "ParserElement": minElements, optElements = other optElements -= minElements else: - raise TypeError( - "cannot multiply ParserElement and ({}) objects".format( - ",".join(type(item).__name__ for item in other) - ) - ) + return NotImplemented else: - raise TypeError( - "cannot multiply ParserElement and {} objects".format( - type(other).__name__ - ) - ) + return NotImplemented if minElements < 0: raise ValueError("cannot multiply ParserElement by negative value") @@ -1531,13 +1559,12 @@ def __or__(self, other) -> "ParserElement": return _PendingSkip(self, must_skip=True) if isinstance(other, str_type): + # `expr | ""` is equivalent to `Opt(expr)` + if other == "": + return Opt(self) other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return MatchFirst([self, other]) def __ror__(self, other) -> "ParserElement": @@ -1547,11 +1574,7 @@ def __ror__(self, other) -> "ParserElement": if isinstance(other, str_type): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return other | self def __xor__(self, other) -> "ParserElement": @@ -1561,11 +1584,7 @@ def __xor__(self, other) -> "ParserElement": if isinstance(other, str_type): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return Or([self, other]) def __rxor__(self, other) -> "ParserElement": @@ -1575,11 +1594,7 @@ def __rxor__(self, other) -> "ParserElement": if isinstance(other, str_type): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return other ^ self def __and__(self, other) -> "ParserElement": @@ -1589,11 +1604,7 @@ def __and__(self, other) -> "ParserElement": if isinstance(other, str_type): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return Each([self, other]) def __rand__(self, other) -> "ParserElement": @@ -1603,11 +1614,7 @@ def __rand__(self, other) -> "ParserElement": if isinstance(other, str_type): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return other & self def __invert__(self) -> "ParserElement": @@ -1636,38 +1643,58 @@ def __getitem__(self, key): ``None`` may be used in place of ``...``. - Note that ``expr[..., n]`` and ``expr[m, n]``do not raise an exception - if more than ``n`` ``expr``s exist in the input stream. If this behavior is + Note that ``expr[..., n]`` and ``expr[m, n]`` do not raise an exception + if more than ``n`` ``expr``\\ s exist in the input stream. If this behavior is desired, then write ``expr[..., n] + ~expr``. + + For repetition with a stop_on expression, use slice notation: + + - ``expr[...: end_expr]`` and ``expr[0, ...: end_expr]`` are equivalent to ``ZeroOrMore(expr, stop_on=end_expr)`` + - ``expr[1, ...: end_expr]`` is equivalent to ``OneOrMore(expr, stop_on=end_expr)`` + """ + stop_on_defined = False + stop_on = NoMatch() + if isinstance(key, slice): + key, stop_on = key.start, key.stop + if key is None: + key = ... + stop_on_defined = True + elif isinstance(key, tuple) and isinstance(key[-1], slice): + key, stop_on = (key[0], key[1].start), key[1].stop + stop_on_defined = True + # convert single arg keys to tuples + if isinstance(key, str_type): + key = (key,) try: - if isinstance(key, str_type): - key = (key,) iter(key) except TypeError: key = (key, key) if len(key) > 2: raise TypeError( - "only 1 or 2 index arguments supported ({}{})".format( - key[:5], "... [{}]".format(len(key)) if len(key) > 5 else "" - ) + f"only 1 or 2 index arguments supported ({key[:5]}{f'... [{len(key)}]' if len(key) > 5 else ''})" ) # clip to 2 elements ret = self * tuple(key[:2]) + ret = typing.cast(_MultipleMatch, ret) + + if stop_on_defined: + ret.stopOn(stop_on) + return ret - def __call__(self, name: str = None) -> "ParserElement": + def __call__(self, name: typing.Optional[str] = None) -> "ParserElement": """ Shortcut for :class:`set_results_name`, with ``list_all_matches=False``. If ``name`` is given with a trailing ``'*'`` character, then ``list_all_matches`` will be passed as ``True``. - If ``name` is omitted, same as calling :class:`copy`. + If ``name`` is omitted, same as calling :class:`copy`. Example:: @@ -1775,17 +1802,18 @@ def set_debug_actions( should have the signature ``fn(input_string: str, location: int, expression: ParserElement, exception: Exception, cache_hit: bool)`` """ self.debugActions = self.DebugActions( - start_action or _default_start_debug_action, - success_action or _default_success_debug_action, - exception_action or _default_exception_debug_action, + start_action or _default_start_debug_action, # type: ignore[truthy-function] + success_action or _default_success_debug_action, # type: ignore[truthy-function] + exception_action or _default_exception_debug_action, # type: ignore[truthy-function] ) self.debug = True return self - def set_debug(self, flag: bool = True) -> "ParserElement": + def set_debug(self, flag: bool = True, recurse: bool = False) -> "ParserElement": """ Enable display of debugging messages while doing pattern matching. Set ``flag`` to ``True`` to enable, ``False`` to disable. + Set ``recurse`` to ``True`` to set the debug flag on this expression and all sub-expressions. Example:: @@ -1819,6 +1847,11 @@ def set_debug(self, flag: bool = True) -> "ParserElement": which makes debugging and exception messages easier to understand - for instance, the default name created for the :class:`Word` expression without calling ``set_name`` is ``"W:(A-Za-z)"``. """ + if recurse: + for expr in self.visit_all(): + expr.set_debug(flag, recurse=False) + return self + if flag: self.set_debug_actions( _default_start_debug_action, @@ -1836,7 +1869,7 @@ def default_name(self) -> str: return self._defaultName @abstractmethod - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: """ Child classes must define this method, which defines how the ``default_name`` is set. """ @@ -1844,7 +1877,9 @@ def _generateDefaultName(self): def set_name(self, name: str) -> "ParserElement": """ Define name for this expression, makes debugging and exception messages clearer. + Example:: + Word(nums).parse_string("ABC") # -> Exception: Expected W:(0-9) (at char 0), (line:1, col:1) Word(nums).set_name("integer").parse_string("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) """ @@ -1870,7 +1905,7 @@ def streamline(self) -> "ParserElement": self._defaultName = None return self - def recurse(self) -> Sequence["ParserElement"]: + def recurse(self) -> List["ParserElement"]: return [] def _checkRecursion(self, parseElementList): @@ -1882,6 +1917,11 @@ def validate(self, validateTrace=None) -> None: """ Check defined expressions for valid structure, check for infinite recursive definitions. """ + warnings.warn( + "ParserElement.validate() is deprecated, and should not be used to check for left recursion", + DeprecationWarning, + stacklevel=2, + ) self._checkRecursion([]) def parse_file( @@ -1899,8 +1939,10 @@ def parse_file( """ parseAll = parseAll or parse_all try: + file_or_filename = typing.cast(TextIO, file_or_filename) file_contents = file_or_filename.read() except AttributeError: + file_or_filename = typing.cast(str, file_or_filename) with open(file_or_filename, "r", encoding=encoding) as f: file_contents = f.read() try: @@ -1932,6 +1974,7 @@ def matches( inline microtests of sub expressions while building up larger parser. Parameters: + - ``test_string`` - to test against this expression for a match - ``parse_all`` - (default= ``True``) - flag to pass to :class:`parse_string` when running tests @@ -1955,7 +1998,7 @@ def run_tests( full_dump: bool = True, print_results: bool = True, failure_tests: bool = False, - post_parse: Callable[[str, ParseResults], str] = None, + post_parse: typing.Optional[Callable[[str, ParseResults], str]] = None, file: typing.Optional[TextIO] = None, with_line_numbers: bool = False, *, @@ -1963,7 +2006,7 @@ def run_tests( fullDump: bool = True, printResults: bool = True, failureTests: bool = False, - postParse: Callable[[str, ParseResults], str] = None, + postParse: typing.Optional[Callable[[str, ParseResults], str]] = None, ) -> Tuple[bool, List[Tuple[str, Union[ParseResults, Exception]]]]: """ Execute the parse expression on a series of test strings, showing each @@ -1971,6 +2014,7 @@ def run_tests( run a parse expression against a list of sample strings. Parameters: + - ``tests`` - a list of separate test strings, or a multiline string of test strings - ``parse_all`` - (default= ``True``) - flag to pass to :class:`parse_string` when running tests - ``comment`` - (default= ``'#'``) - expression for indicating embedded comments in the test @@ -2067,22 +2111,27 @@ def run_tests( failureTests = failureTests or failure_tests postParse = postParse or post_parse if isinstance(tests, str_type): + tests = typing.cast(str, tests) line_strip = type(tests).strip tests = [line_strip(test_line) for test_line in tests.rstrip().splitlines()] - if isinstance(comment, str_type): - comment = Literal(comment) + comment_specified = comment is not None + if comment_specified: + if isinstance(comment, str_type): + comment = typing.cast(str, comment) + comment = Literal(comment) + comment = typing.cast(ParserElement, comment) if file is None: file = sys.stdout print_ = file.write result: Union[ParseResults, Exception] - allResults = [] - comments = [] + allResults: List[Tuple[str, Union[ParseResults, Exception]]] = [] + comments: List[str] = [] success = True NL = Literal(r"\n").add_parse_action(replace_with("\n")).ignore(quoted_string) BOM = "\ufeff" for t in tests: - if comment is not None and comment.matches(t, False) or comments and not t: + if comment_specified and comment.matches(t, False) or comments and not t: comments.append( pyparsing_test.with_line_numbers(t) if with_line_numbers else t ) @@ -2107,7 +2156,7 @@ def run_tests( success = success and failureTests result = pe except Exception as exc: - out.append("FAIL-EXCEPTION: {}: {}".format(type(exc).__name__, exc)) + out.append(f"FAIL-EXCEPTION: {type(exc).__name__}: {exc}") if ParserElement.verbose_stacktrace: out.extend(traceback.format_tb(exc.__traceback__)) success = success and failureTests @@ -2127,9 +2176,7 @@ def run_tests( except Exception as e: out.append(result.dump(full=fullDump)) out.append( - "{} failed: {}: {}".format( - postParse.__name__, type(e).__name__, e - ) + f"{postParse.__name__} failed: {type(e).__name__}: {e}" ) else: out.append(result.dump(full=fullDump)) @@ -2148,19 +2195,28 @@ def create_diagram( vertical: int = 3, show_results_names: bool = False, show_groups: bool = False, + embed: bool = False, **kwargs, ) -> None: """ Create a railroad diagram for the parser. Parameters: - - output_html (str or file-like object) - output target for generated + + - ``output_html`` (str or file-like object) - output target for generated diagram HTML - - vertical (int) - threshold for formatting multiple alternatives vertically + - ``vertical`` (int) - threshold for formatting multiple alternatives vertically instead of horizontally (default=3) - - show_results_names - bool flag whether diagram should show annotations for + - ``show_results_names`` - bool flag whether diagram should show annotations for defined results names - - show_groups - bool flag whether groups should be highlighted with an unlabeled surrounding box + - ``show_groups`` - bool flag whether groups should be highlighted with an unlabeled surrounding box + - ``embed`` - bool flag whether generated HTML should omit , , and tags to embed + the resulting HTML in an enclosing HTML source + - ``head`` - str containing additional HTML to insert into the section of the generated code; + can be used to insert custom CSS styling + - ``body`` - str containing additional HTML to insert at the beginning of the section of the + generated code + Additional diagram-formatting keyword arguments can also be included; see railroad.Diagram class. """ @@ -2183,38 +2239,93 @@ def create_diagram( ) if isinstance(output_html, (str, Path)): with open(output_html, "w", encoding="utf-8") as diag_file: - diag_file.write(railroad_to_html(railroad)) + diag_file.write(railroad_to_html(railroad, embed=embed, **kwargs)) else: # we were passed a file-like object, just write to it - output_html.write(railroad_to_html(railroad)) - - setDefaultWhitespaceChars = set_default_whitespace_chars - inlineLiteralsUsing = inline_literals_using - setResultsName = set_results_name - setBreak = set_break - setParseAction = set_parse_action - addParseAction = add_parse_action - addCondition = add_condition - setFailAction = set_fail_action - tryParse = try_parse + output_html.write(railroad_to_html(railroad, embed=embed, **kwargs)) + + # Compatibility synonyms + # fmt: off + @staticmethod + @replaced_by_pep8(inline_literals_using) + def inlineLiteralsUsing(): ... + + @staticmethod + @replaced_by_pep8(set_default_whitespace_chars) + def setDefaultWhitespaceChars(): ... + + @replaced_by_pep8(set_results_name) + def setResultsName(self): ... + + @replaced_by_pep8(set_break) + def setBreak(self): ... + + @replaced_by_pep8(set_parse_action) + def setParseAction(self): ... + + @replaced_by_pep8(add_parse_action) + def addParseAction(self): ... + + @replaced_by_pep8(add_condition) + def addCondition(self): ... + + @replaced_by_pep8(set_fail_action) + def setFailAction(self): ... + + @replaced_by_pep8(try_parse) + def tryParse(self): ... + + @staticmethod + @replaced_by_pep8(enable_left_recursion) + def enableLeftRecursion(): ... + + @staticmethod + @replaced_by_pep8(enable_packrat) + def enablePackrat(): ... + + @replaced_by_pep8(parse_string) + def parseString(self): ... + + @replaced_by_pep8(scan_string) + def scanString(self): ... + + @replaced_by_pep8(transform_string) + def transformString(self): ... + + @replaced_by_pep8(search_string) + def searchString(self): ... + + @replaced_by_pep8(ignore_whitespace) + def ignoreWhitespace(self): ... + + @replaced_by_pep8(leave_whitespace) + def leaveWhitespace(self): ... + + @replaced_by_pep8(set_whitespace_chars) + def setWhitespaceChars(self): ... + + @replaced_by_pep8(parse_with_tabs) + def parseWithTabs(self): ... + + @replaced_by_pep8(set_debug_actions) + def setDebugActions(self): ... + + @replaced_by_pep8(set_debug) + def setDebug(self): ... + + @replaced_by_pep8(set_name) + def setName(self): ... + + @replaced_by_pep8(parse_file) + def parseFile(self): ... + + @replaced_by_pep8(run_tests) + def runTests(self): ... + canParseNext = can_parse_next resetCache = reset_cache - enableLeftRecursion = enable_left_recursion - enablePackrat = enable_packrat - parseString = parse_string - scanString = scan_string - searchString = search_string - transformString = transform_string - setWhitespaceChars = set_whitespace_chars - parseWithTabs = parse_with_tabs - setDebugActions = set_debug_actions - setDebug = set_debug defaultName = default_name - setName = set_name - parseFile = parse_file - runTests = run_tests - ignoreWhitespace = ignore_whitespace - leaveWhitespace = leave_whitespace + # fmt: on class _PendingSkip(ParserElement): @@ -2225,7 +2336,7 @@ def __init__(self, expr: ParserElement, must_skip: bool = False): self.anchor = expr self.must_skip = must_skip - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return str(self.anchor + Empty()).replace("Empty", "...") def __add__(self, other) -> "ParserElement": @@ -2266,21 +2377,10 @@ class Token(ParserElement): def __init__(self): super().__init__(savelist=False) - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return type(self).__name__ -class Empty(Token): - """ - An empty token, will always match. - """ - - def __init__(self): - super().__init__() - self.mayReturnEmpty = True - self.mayIndexError = False - - class NoMatch(Token): """ A token that will never match. @@ -2312,25 +2412,33 @@ class Literal(Token): use :class:`Keyword` or :class:`CaselessKeyword`. """ + def __new__(cls, match_string: str = "", *, matchString: str = ""): + # Performance tuning: select a subclass with optimized parseImpl + if cls is Literal: + match_string = matchString or match_string + if not match_string: + return super().__new__(Empty) + if len(match_string) == 1: + return super().__new__(_SingleCharLiteral) + + # Default behavior + return super().__new__(cls) + + # Needed to make copy.copy() work correctly if we customize __new__ + def __getnewargs__(self): + return (self.match,) + def __init__(self, match_string: str = "", *, matchString: str = ""): super().__init__() match_string = matchString or match_string self.match = match_string self.matchLen = len(match_string) - try: - self.firstMatchChar = match_string[0] - except IndexError: - raise ValueError("null string passed to Literal; use Empty() instead") + self.firstMatchChar = match_string[:1] self.errmsg = "Expected " + self.name self.mayReturnEmpty = False self.mayIndexError = False - # Performance tuning: modify __class__ to select - # a parseImpl optimized for single-character check - if self.matchLen == 1 and type(self) is Literal: - self.__class__ = _SingleCharLiteral - - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return repr(self.match) def parseImpl(self, instring, loc, doActions=True): @@ -2341,6 +2449,23 @@ def parseImpl(self, instring, loc, doActions=True): raise ParseException(instring, loc, self.errmsg, self) +class Empty(Literal): + """ + An empty token, will always match. + """ + + def __init__(self, match_string="", *, matchString=""): + super().__init__("") + self.mayReturnEmpty = True + self.mayIndexError = False + + def _generateDefaultName(self) -> str: + return "Empty" + + def parseImpl(self, instring, loc, doActions=True): + return loc, [] + + class _SingleCharLiteral(Literal): def parseImpl(self, instring, loc, doActions=True): if instring[loc] == self.firstMatchChar: @@ -2354,8 +2479,8 @@ def parseImpl(self, instring, loc, doActions=True): class Keyword(Token): """ Token to exactly match a specified string as a keyword, that is, - it must be immediately followed by a non-keyword character. Compare - with :class:`Literal`: + it must be immediately preceded and followed by whitespace or + non-keyword characters. Compare with :class:`Literal`: - ``Literal("if")`` will match the leading ``'if'`` in ``'ifAndOnlyIf'``. @@ -2365,7 +2490,7 @@ class Keyword(Token): Accepts two optional constructor arguments in addition to the keyword string: - - ``identChars`` is a string of characters that would be valid + - ``ident_chars`` is a string of characters that would be valid identifier characters, defaulting to all alphanumerics + "_" and "$" - ``caseless`` allows case-insensitive matching, default is ``False``. @@ -2400,7 +2525,7 @@ def __init__( self.firstMatchChar = match_string[0] except IndexError: raise ValueError("null string passed to Keyword; use Empty() instead") - self.errmsg = "Expected {} {}".format(type(self).__name__, self.name) + self.errmsg = f"Expected {type(self).__name__} {self.name}" self.mayReturnEmpty = False self.mayIndexError = False self.caseless = caseless @@ -2409,7 +2534,7 @@ def __init__( identChars = identChars.upper() self.identChars = set(identChars) - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return repr(self.match) def parseImpl(self, instring, loc, doActions=True): @@ -2559,7 +2684,7 @@ class CloseMatch(Token): def __init__( self, match_string: str, - max_mismatches: int = None, + max_mismatches: typing.Optional[int] = None, *, maxMismatches: int = 1, caseless=False, @@ -2568,15 +2693,13 @@ def __init__( super().__init__() self.match_string = match_string self.maxMismatches = maxMismatches - self.errmsg = "Expected {!r} (with up to {} mismatches)".format( - self.match_string, self.maxMismatches - ) + self.errmsg = f"Expected {self.match_string!r} (with up to {self.maxMismatches} mismatches)" self.caseless = caseless self.mayIndexError = False self.mayReturnEmpty = False - def _generateDefaultName(self): - return "{}:{!r}".format(type(self).__name__, self.match_string) + def _generateDefaultName(self) -> str: + return f"{type(self).__name__}:{self.match_string!r}" def parseImpl(self, instring, loc, doActions=True): start = loc @@ -2612,7 +2735,9 @@ def parseImpl(self, instring, loc, doActions=True): class Word(Token): """Token for matching words composed of allowed character sets. + Parameters: + - ``init_chars`` - string of all characters that should be used to match as a word; "ABC" will match "AAA", "ABAB", "CBAC", etc.; if ``body_chars`` is also specified, then this is the string of @@ -2697,26 +2822,24 @@ def __init__( super().__init__() if not initChars: raise ValueError( - "invalid {}, initChars cannot be empty string".format( - type(self).__name__ - ) + f"invalid {type(self).__name__}, initChars cannot be empty string" ) - initChars = set(initChars) - self.initChars = initChars + initChars_set = set(initChars) if excludeChars: - excludeChars = set(excludeChars) - initChars -= excludeChars + excludeChars_set = set(excludeChars) + initChars_set -= excludeChars_set if bodyChars: - bodyChars = set(bodyChars) - excludeChars - self.initCharsOrig = "".join(sorted(initChars)) + bodyChars = "".join(set(bodyChars) - excludeChars_set) + self.initChars = initChars_set + self.initCharsOrig = "".join(sorted(initChars_set)) if bodyChars: - self.bodyCharsOrig = "".join(sorted(bodyChars)) self.bodyChars = set(bodyChars) + self.bodyCharsOrig = "".join(sorted(bodyChars)) else: - self.bodyCharsOrig = "".join(sorted(initChars)) - self.bodyChars = set(initChars) + self.bodyChars = initChars_set + self.bodyCharsOrig = self.initCharsOrig self.maxSpecified = max > 0 @@ -2725,6 +2848,11 @@ def __init__( "cannot specify a minimum length < 1; use Opt(Word()) if zero-length word is permitted" ) + if self.maxSpecified and min > max: + raise ValueError( + f"invalid args, if min and max both specified min must be <= max (min={min}, max={max})" + ) + self.minLen = min if max > 0: @@ -2733,62 +2861,66 @@ def __init__( self.maxLen = _MAX_INT if exact > 0: + min = max = exact self.maxLen = exact self.minLen = exact self.errmsg = "Expected " + self.name self.mayIndexError = False self.asKeyword = asKeyword + if self.asKeyword: + self.errmsg += " as a keyword" # see if we can make a regex for this Word - if " " not in self.initChars | self.bodyChars and (min == 1 and exact == 0): + if " " not in (self.initChars | self.bodyChars): + if len(self.initChars) == 1: + re_leading_fragment = re.escape(self.initCharsOrig) + else: + re_leading_fragment = f"[{_collapse_string_to_ranges(self.initChars)}]" + if self.bodyChars == self.initChars: if max == 0: repeat = "+" elif max == 1: repeat = "" else: - repeat = "{{{},{}}}".format( - self.minLen, "" if self.maxLen == _MAX_INT else self.maxLen - ) - self.reString = "[{}]{}".format( - _collapse_string_to_ranges(self.initChars), - repeat, - ) - elif len(self.initChars) == 1: - if max == 0: - repeat = "*" - else: - repeat = "{{0,{}}}".format(max - 1) - self.reString = "{}[{}]{}".format( - re.escape(self.initCharsOrig), - _collapse_string_to_ranges(self.bodyChars), - repeat, - ) + if self.minLen != self.maxLen: + repeat = f"{{{self.minLen},{'' if self.maxLen == _MAX_INT else self.maxLen}}}" + else: + repeat = f"{{{self.minLen}}}" + self.reString = f"{re_leading_fragment}{repeat}" else: - if max == 0: - repeat = "*" - elif max == 2: + if max == 1: + re_body_fragment = "" repeat = "" else: - repeat = "{{0,{}}}".format(max - 1) - self.reString = "[{}][{}]{}".format( - _collapse_string_to_ranges(self.initChars), - _collapse_string_to_ranges(self.bodyChars), - repeat, + re_body_fragment = f"[{_collapse_string_to_ranges(self.bodyChars)}]" + if max == 0: + repeat = "*" + elif max == 2: + repeat = "?" if min <= 1 else "" + else: + if min != max: + repeat = f"{{{min - 1 if min > 0 else 0},{max - 1}}}" + else: + repeat = f"{{{min - 1 if min > 0 else 0}}}" + + self.reString = ( + f"{re_leading_fragment}" f"{re_body_fragment}" f"{repeat}" ) + if self.asKeyword: - self.reString = r"\b" + self.reString + r"\b" + self.reString = rf"\b{self.reString}\b" try: self.re = re.compile(self.reString) except re.error: - self.re = None + self.re = None # type: ignore[assignment] else: self.re_match = self.re.match - self.__class__ = _WordRegex + self.parseImpl = self.parseImpl_regex # type: ignore[assignment] - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: def charsAsStr(s): max_repr_len = 16 s = _collapse_string_to_ranges(s, re_escape=False) @@ -2798,11 +2930,9 @@ def charsAsStr(s): return s if self.initChars != self.bodyChars: - base = "W:({}, {})".format( - charsAsStr(self.initChars), charsAsStr(self.bodyChars) - ) + base = f"W:({charsAsStr(self.initChars)}, {charsAsStr(self.bodyChars)})" else: - base = "W:({})".format(charsAsStr(self.initChars)) + base = f"W:({charsAsStr(self.initChars)})" # add length specification if self.minLen > 1 or self.maxLen != _MAX_INT: @@ -2810,11 +2940,11 @@ def charsAsStr(s): if self.minLen == 1: return base[2:] else: - return base + "{{{}}}".format(self.minLen) + return base + f"{{{self.minLen}}}" elif self.maxLen == _MAX_INT: - return base + "{{{},...}}".format(self.minLen) + return base + f"{{{self.minLen},...}}" else: - return base + "{{{},{}}}".format(self.minLen, self.maxLen) + return base + f"{{{self.minLen},{self.maxLen}}}" return base def parseImpl(self, instring, loc, doActions=True): @@ -2849,9 +2979,7 @@ def parseImpl(self, instring, loc, doActions=True): return loc, instring[start:loc] - -class _WordRegex(Word): - def parseImpl(self, instring, loc, doActions=True): + def parseImpl_regex(self, instring, loc, doActions=True): result = self.re_match(instring, loc) if not result: raise ParseException(instring, loc, self.errmsg, self) @@ -2860,7 +2988,7 @@ def parseImpl(self, instring, loc, doActions=True): return loc, result.group() -class Char(_WordRegex): +class Char(Word): """A short-cut class for defining :class:`Word` ``(characters, exact=1)``, when defining a match of any single character in a string of characters. @@ -2878,13 +3006,8 @@ def __init__( asKeyword = asKeyword or as_keyword excludeChars = excludeChars or exclude_chars super().__init__( - charset, exact=1, asKeyword=asKeyword, excludeChars=excludeChars + charset, exact=1, as_keyword=asKeyword, exclude_chars=excludeChars ) - self.reString = "[{}]".format(_collapse_string_to_ranges(self.initChars)) - if asKeyword: - self.reString = r"\b{}\b".format(self.reString) - self.re = re.compile(self.reString) - self.re_match = self.re.match class Regex(Token): @@ -2954,9 +3077,9 @@ def __init__( self.asGroupList = asGroupList self.asMatch = asMatch if self.asGroupList: - self.parseImpl = self.parseImplAsGroupList + self.parseImpl = self.parseImplAsGroupList # type: ignore [assignment] if self.asMatch: - self.parseImpl = self.parseImplAsMatch + self.parseImpl = self.parseImplAsMatch # type: ignore [assignment] @cached_property def re(self): @@ -2966,9 +3089,7 @@ def re(self): try: return re.compile(self.pattern, self.flags) except re.error: - raise ValueError( - "invalid pattern ({!r}) passed to Regex".format(self.pattern) - ) + raise ValueError(f"invalid pattern ({self.pattern!r}) passed to Regex") @cached_property def re_match(self): @@ -2978,7 +3099,7 @@ def re_match(self): def mayReturnEmpty(self): return self.re_match("") is not None - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return "Re:({})".format(repr(self.pattern).replace("\\\\", "\\")) def parseImpl(self, instring, loc, doActions=True): @@ -3024,10 +3145,12 @@ def sub(self, repl: str) -> ParserElement: # prints "

main title

" """ if self.asGroupList: - raise TypeError("cannot use sub() with Regex(asGroupList=True)") + raise TypeError("cannot use sub() with Regex(as_group_list=True)") if self.asMatch and callable(repl): - raise TypeError("cannot use sub() with a callable with Regex(asMatch=True)") + raise TypeError( + "cannot use sub() with a callable with Regex(as_match=True)" + ) if self.asMatch: @@ -3081,7 +3204,7 @@ class QuotedString(Token): [['This is the "quote"']] [['This is the quote with "embedded" quotes']] """ - ws_map = ((r"\t", "\t"), (r"\n", "\n"), (r"\f", "\f"), (r"\r", "\r")) + ws_map = dict(((r"\t", "\t"), (r"\n", "\n"), (r"\f", "\f"), (r"\r", "\r"))) def __init__( self, @@ -3120,57 +3243,54 @@ def __init__( else: endQuoteChar = endQuoteChar.strip() if not endQuoteChar: - raise ValueError("endQuoteChar cannot be the empty string") - - self.quoteChar = quote_char - self.quoteCharLen = len(quote_char) - self.firstQuoteChar = quote_char[0] - self.endQuoteChar = endQuoteChar - self.endQuoteCharLen = len(endQuoteChar) - self.escChar = escChar - self.escQuote = escQuote - self.unquoteResults = unquoteResults - self.convertWhitespaceEscapes = convertWhitespaceEscapes + raise ValueError("end_quote_char cannot be the empty string") + + self.quoteChar: str = quote_char + self.quoteCharLen: int = len(quote_char) + self.firstQuoteChar: str = quote_char[0] + self.endQuoteChar: str = endQuoteChar + self.endQuoteCharLen: int = len(endQuoteChar) + self.escChar: str = escChar or "" + self.escQuote: str = escQuote or "" + self.unquoteResults: bool = unquoteResults + self.convertWhitespaceEscapes: bool = convertWhitespaceEscapes + self.multiline = multiline sep = "" inner_pattern = "" if escQuote: - inner_pattern += r"{}(?:{})".format(sep, re.escape(escQuote)) + inner_pattern += rf"{sep}(?:{re.escape(escQuote)})" sep = "|" if escChar: - inner_pattern += r"{}(?:{}.)".format(sep, re.escape(escChar)) + inner_pattern += rf"{sep}(?:{re.escape(escChar)}.)" sep = "|" - self.escCharReplacePattern = re.escape(self.escChar) + "(.)" + self.escCharReplacePattern = re.escape(escChar) + "(.)" if len(self.endQuoteChar) > 1: inner_pattern += ( - "{}(?:".format(sep) + f"{sep}(?:" + "|".join( - "(?:{}(?!{}))".format( - re.escape(self.endQuoteChar[:i]), - re.escape(self.endQuoteChar[i:]), - ) + f"(?:{re.escape(self.endQuoteChar[:i])}(?!{re.escape(self.endQuoteChar[i:])}))" for i in range(len(self.endQuoteChar) - 1, 0, -1) ) + ")" ) sep = "|" + self.flags = re.RegexFlag(0) + if multiline: self.flags = re.MULTILINE | re.DOTALL - inner_pattern += r"{}(?:[^{}{}])".format( - sep, - _escape_regex_range_chars(self.endQuoteChar[0]), - (_escape_regex_range_chars(escChar) if escChar is not None else ""), + inner_pattern += ( + rf"{sep}(?:[^{_escape_regex_range_chars(self.endQuoteChar[0])}" + rf"{(_escape_regex_range_chars(escChar) if escChar is not None else '')}])" ) else: - self.flags = 0 - inner_pattern += r"{}(?:[^{}\n\r{}])".format( - sep, - _escape_regex_range_chars(self.endQuoteChar[0]), - (_escape_regex_range_chars(escChar) if escChar is not None else ""), + inner_pattern += ( + rf"{sep}(?:[^{_escape_regex_range_chars(self.endQuoteChar[0])}\n\r" + rf"{(_escape_regex_range_chars(escChar) if escChar is not None else '')}])" ) self.pattern = "".join( @@ -3183,26 +3303,33 @@ def __init__( ] ) + if self.unquoteResults: + if self.convertWhitespaceEscapes: + self.unquote_scan_re = re.compile( + rf"({'|'.join(re.escape(k) for k in self.ws_map)})|({re.escape(self.escChar)}.)|(\n|.)", + flags=self.flags, + ) + else: + self.unquote_scan_re = re.compile( + rf"({re.escape(self.escChar)}.)|(\n|.)", flags=self.flags + ) + try: self.re = re.compile(self.pattern, self.flags) self.reString = self.pattern self.re_match = self.re.match except re.error: - raise ValueError( - "invalid pattern {!r} passed to Regex".format(self.pattern) - ) + raise ValueError(f"invalid pattern {self.pattern!r} passed to Regex") self.errmsg = "Expected " + self.name self.mayIndexError = False self.mayReturnEmpty = True - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: if self.quoteChar == self.endQuoteChar and isinstance(self.quoteChar, str_type): - return "string enclosed in {!r}".format(self.quoteChar) + return f"string enclosed in {self.quoteChar!r}" - return "quoted string, starting with {} ending with {}".format( - self.quoteChar, self.endQuoteChar - ) + return f"quoted string, starting with {self.quoteChar} ending with {self.endQuoteChar}" def parseImpl(self, instring, loc, doActions=True): result = ( @@ -3217,19 +3344,24 @@ def parseImpl(self, instring, loc, doActions=True): ret = result.group() if self.unquoteResults: - # strip off quotes ret = ret[self.quoteCharLen : -self.endQuoteCharLen] if isinstance(ret, str_type): - # replace escaped whitespace - if "\\" in ret and self.convertWhitespaceEscapes: - for wslit, wschar in self.ws_map: - ret = ret.replace(wslit, wschar) - - # replace escaped characters - if self.escChar: - ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret) + if self.convertWhitespaceEscapes: + ret = "".join( + self.ws_map[match.group(1)] + if match.group(1) + else match.group(2)[-1] + if match.group(2) + else match.group(3) + for match in self.unquote_scan_re.finditer(ret) + ) + else: + ret = "".join( + match.group(1)[-1] if match.group(1) else match.group(2) + for match in self.unquote_scan_re.finditer(ret) + ) # replace escaped quotes if self.escQuote: @@ -3252,7 +3384,7 @@ class CharsNotIn(Token): # define a comma-separated-value as anything that is not a ',' csv_value = CharsNotIn(',') - print(delimited_list(csv_value).parse_string("dkls,lsdkjf,s12 34,@!#,213")) + print(DelimitedList(csv_value).parse_string("dkls,lsdkjf,s12 34,@!#,213")) prints:: @@ -3294,12 +3426,12 @@ def __init__( self.mayReturnEmpty = self.minLen == 0 self.mayIndexError = False - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: not_chars_str = _collapse_string_to_ranges(self.notChars) if len(not_chars_str) > 16: - return "!W:({}...)".format(self.notChars[: 16 - 3]) + return f"!W:({self.notChars[: 16 - 3]}...)" else: - return "!W:({})".format(self.notChars) + return f"!W:({self.notChars})" def parseImpl(self, instring, loc, doActions=True): notchars = self.notCharsSet @@ -3376,7 +3508,7 @@ def __init__(self, ws: str = " \t\r\n", min: int = 1, max: int = 0, exact: int = self.maxLen = exact self.minLen = exact - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return "".join(White.whiteStrs[c] for c in self.matchWhite) def parseImpl(self, instring, loc, doActions=True): @@ -3411,7 +3543,7 @@ def __init__(self, colno: int): super().__init__() self.col = colno - def preParse(self, instring, loc): + def preParse(self, instring: str, loc: int) -> int: if col(loc, instring) != self.col: instrlen = len(instring) if self.ignoreExprs: @@ -3446,7 +3578,7 @@ class LineStart(PositionToken): B AAA and definitely not this one ''' - for t in (LineStart() + 'AAA' + restOfLine).search_string(test): + for t in (LineStart() + 'AAA' + rest_of_line).search_string(test): print(t) prints:: @@ -3464,7 +3596,7 @@ def __init__(self): self.skipper = Empty().set_whitespace_chars(self.whiteChars) self.errmsg = "Expected start of line" - def preParse(self, instring, loc): + def preParse(self, instring: str, loc: int) -> int: if loc == 0: return loc else: @@ -3624,7 +3756,7 @@ def __init__(self, exprs: typing.Iterable[ParserElement], savelist: bool = False self.exprs = [exprs] self.callPreparse = False - def recurse(self) -> Sequence[ParserElement]: + def recurse(self) -> List[ParserElement]: return self.exprs[:] def append(self, other) -> ParserElement: @@ -3669,8 +3801,8 @@ def ignore(self, other) -> ParserElement: e.ignore(self.ignoreExprs[-1]) return self - def _generateDefaultName(self): - return "{}:({})".format(self.__class__.__name__, str(self.exprs)) + def _generateDefaultName(self) -> str: + return f"{self.__class__.__name__}:({str(self.exprs)})" def streamline(self) -> ParserElement: if self.streamlined: @@ -3714,6 +3846,11 @@ def streamline(self) -> ParserElement: return self def validate(self, validateTrace=None) -> None: + warnings.warn( + "ParserElement.validate() is deprecated, and should not be used to check for left recursion", + DeprecationWarning, + stacklevel=2, + ) tmp = (validateTrace if validateTrace is not None else [])[:] + [self] for e in self.exprs: e.validate(tmp) @@ -3721,6 +3858,7 @@ def validate(self, validateTrace=None) -> None: def copy(self) -> ParserElement: ret = super().copy() + ret = typing.cast(ParseExpression, ret) ret.exprs = [e.copy() for e in self.exprs] return ret @@ -3750,8 +3888,14 @@ def _setResultsName(self, name, listAllMatches=False): return super()._setResultsName(name, listAllMatches) - ignoreWhitespace = ignore_whitespace - leaveWhitespace = leave_whitespace + # Compatibility synonyms + # fmt: off + @replaced_by_pep8(leave_whitespace) + def leaveWhitespace(self): ... + + @replaced_by_pep8(ignore_whitespace) + def ignoreWhitespace(self): ... + # fmt: on class And(ParseExpression): @@ -3777,7 +3921,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.leave_whitespace() - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return "-" def __init__( @@ -3789,7 +3933,9 @@ def __init__( for i, expr in enumerate(exprs): if expr is Ellipsis: if i < len(exprs) - 1: - skipto_arg: ParserElement = (Empty() + exprs[i + 1]).exprs[-1] + skipto_arg: ParserElement = typing.cast( + ParseExpression, (Empty() + exprs[i + 1]) + ).exprs[-1] tmp.append(SkipTo(skipto_arg)("_skipped*")) else: raise Exception( @@ -3822,8 +3968,9 @@ def streamline(self) -> ParserElement: and isinstance(e.exprs[-1], _PendingSkip) for e in self.exprs[:-1] ): + deleted_expr_marker = NoMatch() for i, e in enumerate(self.exprs[:-1]): - if e is None: + if e is deleted_expr_marker: continue if ( isinstance(e, ParseExpression) @@ -3831,17 +3978,19 @@ def streamline(self) -> ParserElement: and isinstance(e.exprs[-1], _PendingSkip) ): e.exprs[-1] = e.exprs[-1] + self.exprs[i + 1] - self.exprs[i + 1] = None - self.exprs = [e for e in self.exprs if e is not None] + self.exprs[i + 1] = deleted_expr_marker + self.exprs = [e for e in self.exprs if e is not deleted_expr_marker] super().streamline() # link any IndentedBlocks to the prior expression + prev: ParserElement + cur: ParserElement for prev, cur in zip(self.exprs, self.exprs[1:]): # traverse cur or any first embedded expr of cur looking for an IndentedBlock # (but watch out for recursive grammar) seen = set() - while cur: + while True: if id(cur) in seen: break seen.add(id(cur)) @@ -3853,7 +4002,10 @@ def streamline(self) -> ParserElement: ) break subs = cur.recurse() - cur = next(iter(subs), None) + next_first = next(iter(subs), None) + if next_first is None: + break + cur = typing.cast(ParserElement, next_first) self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) return self @@ -3884,13 +4036,14 @@ def parseImpl(self, instring, loc, doActions=True): ) else: loc, exprtokens = e._parse(instring, loc, doActions) - if exprtokens or exprtokens.haskeys(): - resultlist += exprtokens + resultlist += exprtokens return loc, resultlist def __iadd__(self, other): if isinstance(other, str_type): other = self._literalStringClass(other) + if not isinstance(other, ParserElement): + return NotImplemented return self.append(other) # And([self, other]) def _checkRecursion(self, parseElementList): @@ -3900,7 +4053,7 @@ def _checkRecursion(self, parseElementList): if not e.mayReturnEmpty: break - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: inner = " ".join(str(e) for e in self.exprs) # strip off redundant inner {}'s while len(inner) > 1 and inner[0 :: len(inner) - 1] == "{}": @@ -3958,7 +4111,7 @@ def parseImpl(self, instring, loc, doActions=True): loc2 = e.try_parse(instring, loc, raise_fatal=True) except ParseFatalException as pfe: pfe.__traceback__ = None - pfe.parserElement = e + pfe.parser_element = e fatals.append(pfe) maxException = None maxExcLoc = -1 @@ -4016,12 +4169,15 @@ def parseImpl(self, instring, loc, doActions=True): if len(fatals) > 1: fatals.sort(key=lambda e: -e.loc) if fatals[0].loc == fatals[1].loc: - fatals.sort(key=lambda e: (-e.loc, -len(str(e.parserElement)))) + fatals.sort(key=lambda e: (-e.loc, -len(str(e.parser_element)))) max_fatal = fatals[0] raise max_fatal if maxException is not None: - maxException.msg = self.errmsg + # infer from this check that all alternatives failed at the current position + # so emit this collective error message instead of any single error message + if maxExcLoc == loc: + maxException.msg = self.errmsg raise maxException else: raise ParseException( @@ -4031,9 +4187,11 @@ def parseImpl(self, instring, loc, doActions=True): def __ixor__(self, other): if isinstance(other, str_type): other = self._literalStringClass(other) + if not isinstance(other, ParserElement): + return NotImplemented return self.append(other) # Or([self, other]) - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return "{" + " ^ ".join(str(e) for e in self.exprs) + "}" def _setResultsName(self, name, listAllMatches=False): @@ -4118,7 +4276,7 @@ def parseImpl(self, instring, loc, doActions=True): ) except ParseFatalException as pfe: pfe.__traceback__ = None - pfe.parserElement = e + pfe.parser_element = e raise except ParseException as err: if err.loc > maxExcLoc: @@ -4132,7 +4290,10 @@ def parseImpl(self, instring, loc, doActions=True): maxExcLoc = len(instring) if maxException is not None: - maxException.msg = self.errmsg + # infer from this check that all alternatives failed at the current position + # so emit this collective error message instead of any individual error message + if maxExcLoc == loc: + maxException.msg = self.errmsg raise maxException else: raise ParseException( @@ -4142,9 +4303,11 @@ def parseImpl(self, instring, loc, doActions=True): def __ior__(self, other): if isinstance(other, str_type): other = self._literalStringClass(other) + if not isinstance(other, ParserElement): + return NotImplemented return self.append(other) # MatchFirst([self, other]) - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return "{" + " | ".join(str(e) for e in self.exprs) + "}" def _setResultsName(self, name, listAllMatches=False): @@ -4242,6 +4405,13 @@ def __init__(self, exprs: typing.Iterable[ParserElement], savelist: bool = True) self.initExprGroups = True self.saveAsList = True + def __iand__(self, other): + if isinstance(other, str_type): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): + return NotImplemented + return self.append(other) # Each([self, other]) + def streamline(self) -> ParserElement: super().streamline() if self.exprs: @@ -4296,7 +4466,7 @@ def parseImpl(self, instring, loc, doActions=True): tmpLoc = e.try_parse(instring, tmpLoc, raise_fatal=True) except ParseFatalException as pfe: pfe.__traceback__ = None - pfe.parserElement = e + pfe.parser_element = e fatals.append(pfe) failed.append(e) except ParseException: @@ -4315,7 +4485,7 @@ def parseImpl(self, instring, loc, doActions=True): if len(fatals) > 1: fatals.sort(key=lambda e: -e.loc) if fatals[0].loc == fatals[1].loc: - fatals.sort(key=lambda e: (-e.loc, -len(str(e.parserElement)))) + fatals.sort(key=lambda e: (-e.loc, -len(str(e.parser_element)))) max_fatal = fatals[0] raise max_fatal @@ -4324,7 +4494,7 @@ def parseImpl(self, instring, loc, doActions=True): raise ParseException( instring, loc, - "Missing one or more required elements ({})".format(missing), + f"Missing one or more required elements ({missing})", ) # add any unmatched Opts, in case they have default values defined @@ -4337,7 +4507,7 @@ def parseImpl(self, instring, loc, doActions=True): return loc, total_results - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return "{" + " & ".join(str(e) for e in self.exprs) + "}" @@ -4349,12 +4519,14 @@ class ParseElementEnhance(ParserElement): def __init__(self, expr: Union[ParserElement, str], savelist: bool = False): super().__init__(savelist) if isinstance(expr, str_type): + expr_str = typing.cast(str, expr) if issubclass(self._literalStringClass, Token): - expr = self._literalStringClass(expr) + expr = self._literalStringClass(expr_str) # type: ignore[call-arg] elif issubclass(type(self), self._literalStringClass): - expr = Literal(expr) + expr = Literal(expr_str) else: - expr = self._literalStringClass(Literal(expr)) + expr = self._literalStringClass(Literal(expr_str)) # type: ignore[assignment, call-arg] + expr = typing.cast(ParserElement, expr) self.expr = expr if expr is not None: self.mayIndexError = expr.mayIndexError @@ -4367,12 +4539,16 @@ def __init__(self, expr: Union[ParserElement, str], savelist: bool = False): self.callPreparse = expr.callPreparse self.ignoreExprs.extend(expr.ignoreExprs) - def recurse(self) -> Sequence[ParserElement]: + def recurse(self) -> List[ParserElement]: return [self.expr] if self.expr is not None else [] def parseImpl(self, instring, loc, doActions=True): if self.expr is not None: - return self.expr._parse(instring, loc, doActions, callPreParse=False) + try: + return self.expr._parse(instring, loc, doActions, callPreParse=False) + except ParseBaseException as pbe: + pbe.msg = self.errmsg + raise else: raise ParseException(instring, loc, "No expression defined", self) @@ -4380,8 +4556,8 @@ def leave_whitespace(self, recursive: bool = True) -> ParserElement: super().leave_whitespace(recursive) if recursive: - self.expr = self.expr.copy() if self.expr is not None: + self.expr = self.expr.copy() self.expr.leave_whitespace(recursive) return self @@ -4389,8 +4565,8 @@ def ignore_whitespace(self, recursive: bool = True) -> ParserElement: super().ignore_whitespace(recursive) if recursive: - self.expr = self.expr.copy() if self.expr is not None: + self.expr = self.expr.copy() self.expr.ignore_whitespace(recursive) return self @@ -4420,6 +4596,11 @@ def _checkRecursion(self, parseElementList): self.expr._checkRecursion(subRecCheckList) def validate(self, validateTrace=None) -> None: + warnings.warn( + "ParserElement.validate() is deprecated, and should not be used to check for left recursion", + DeprecationWarning, + stacklevel=2, + ) if validateTrace is None: validateTrace = [] tmp = validateTrace[:] + [self] @@ -4427,11 +4608,17 @@ def validate(self, validateTrace=None) -> None: self.expr.validate(tmp) self._checkRecursion([]) - def _generateDefaultName(self): - return "{}:({})".format(self.__class__.__name__, str(self.expr)) + def _generateDefaultName(self) -> str: + return f"{self.__class__.__name__}:({str(self.expr)})" + + # Compatibility synonyms + # fmt: off + @replaced_by_pep8(leave_whitespace) + def leaveWhitespace(self): ... - ignoreWhitespace = ignore_whitespace - leaveWhitespace = leave_whitespace + @replaced_by_pep8(ignore_whitespace) + def ignoreWhitespace(self): ... + # fmt: on class IndentedBlock(ParseElementEnhance): @@ -4443,13 +4630,13 @@ class IndentedBlock(ParseElementEnhance): class _Indent(Empty): def __init__(self, ref_col: int): super().__init__() - self.errmsg = "expected indent at column {}".format(ref_col) + self.errmsg = f"expected indent at column {ref_col}" self.add_condition(lambda s, l, t: col(l, s) == ref_col) class _IndentGreater(Empty): def __init__(self, ref_col: int): super().__init__() - self.errmsg = "expected indent at column greater than {}".format(ref_col) + self.errmsg = f"expected indent at column greater than {ref_col}" self.add_condition(lambda s, l, t: col(l, s) > ref_col) def __init__( @@ -4469,7 +4656,7 @@ def parseImpl(self, instring, loc, doActions=True): # see if self.expr matches at the current location - if not it will raise an exception # and no further work is necessary - self.expr.try_parse(instring, anchor_loc, doActions) + self.expr.try_parse(instring, anchor_loc, do_actions=doActions) indent_col = col(anchor_loc, instring) peer_detect_expr = self._Indent(indent_col) @@ -4532,7 +4719,7 @@ class AtLineStart(ParseElementEnhance): B AAA and definitely not this one ''' - for t in (AtLineStart('AAA') + restOfLine).search_string(test): + for t in (AtLineStart('AAA') + rest_of_line).search_string(test): print(t) prints:: @@ -4598,9 +4785,9 @@ class PrecededBy(ParseElementEnhance): Parameters: - - expr - expression that must match prior to the current parse + - ``expr`` - expression that must match prior to the current parse location - - retreat - (default= ``None``) - (int) maximum number of characters + - ``retreat`` - (default= ``None``) - (int) maximum number of characters to lookbehind prior to the current parse location If the lookbehind expression is a string, :class:`Literal`, @@ -4627,6 +4814,7 @@ def __init__( self.mayIndexError = False self.exact = False if isinstance(expr, str_type): + expr = typing.cast(str, expr) retreat = len(expr) self.exact = True elif isinstance(expr, (Literal, Keyword)): @@ -4746,18 +4934,18 @@ def __init__(self, expr: Union[ParserElement, str]): self.errmsg = "Found unwanted token, " + str(self.expr) def parseImpl(self, instring, loc, doActions=True): - if self.expr.can_parse_next(instring, loc): + if self.expr.can_parse_next(instring, loc, do_actions=doActions): raise ParseException(instring, loc, self.errmsg, self) return loc, [] - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return "~{" + str(self.expr) + "}" class _MultipleMatch(ParseElementEnhance): def __init__( self, - expr: ParserElement, + expr: Union[str, ParserElement], stop_on: typing.Optional[Union[ParserElement, str]] = None, *, stopOn: typing.Optional[Union[ParserElement, str]] = None, @@ -4781,7 +4969,7 @@ def parseImpl(self, instring, loc, doActions=True): self_skip_ignorables = self._skipIgnorables check_ender = self.not_ender is not None if check_ender: - try_not_ender = self.not_ender.tryParse + try_not_ender = self.not_ender.try_parse # must be at least one (but first see if we are the stopOn sentinel; # if so, fail) @@ -4798,8 +4986,7 @@ def parseImpl(self, instring, loc, doActions=True): else: preloc = loc loc, tmptokens = self_expr_parse(instring, preloc, doActions) - if tmptokens or tmptokens.haskeys(): - tokens += tmptokens + tokens += tmptokens except (ParseException, IndexError): pass @@ -4837,10 +5024,11 @@ class OneOrMore(_MultipleMatch): Repetition of one or more of the given expression. Parameters: - - expr - expression that must match one or more times - - stop_on - (default= ``None``) - expression for a terminating sentinel - (only required if the sentinel would ordinarily match the repetition - expression) + + - ``expr`` - expression that must match one or more times + - ``stop_on`` - (default= ``None``) - expression for a terminating sentinel + (only required if the sentinel would ordinarily match the repetition + expression) Example:: @@ -4859,7 +5047,7 @@ class OneOrMore(_MultipleMatch): (attr_expr * (1,)).parse_string(text).pprint() """ - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return "{" + str(self.expr) + "}..." @@ -4868,6 +5056,7 @@ class ZeroOrMore(_MultipleMatch): Optional repetition of zero or more of the given expression. Parameters: + - ``expr`` - expression that must match zero or more times - ``stop_on`` - expression for a terminating sentinel (only required if the sentinel would ordinarily match the repetition @@ -4878,7 +5067,7 @@ class ZeroOrMore(_MultipleMatch): def __init__( self, - expr: ParserElement, + expr: Union[str, ParserElement], stop_on: typing.Optional[Union[ParserElement, str]] = None, *, stopOn: typing.Optional[Union[ParserElement, str]] = None, @@ -4892,10 +5081,75 @@ def parseImpl(self, instring, loc, doActions=True): except (ParseException, IndexError): return loc, ParseResults([], name=self.resultsName) - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return "[" + str(self.expr) + "]..." +class DelimitedList(ParseElementEnhance): + def __init__( + self, + expr: Union[str, ParserElement], + delim: Union[str, ParserElement] = ",", + combine: bool = False, + min: typing.Optional[int] = None, + max: typing.Optional[int] = None, + *, + allow_trailing_delim: bool = False, + ): + """Helper to define a delimited list of expressions - the delimiter + defaults to ','. By default, the list elements and delimiters can + have intervening whitespace, and comments, but this can be + overridden by passing ``combine=True`` in the constructor. If + ``combine`` is set to ``True``, the matching tokens are + returned as a single token string, with the delimiters included; + otherwise, the matching tokens are returned as a list of tokens, + with the delimiters suppressed. + + If ``allow_trailing_delim`` is set to True, then the list may end with + a delimiter. + + Example:: + + DelimitedList(Word(alphas)).parse_string("aa,bb,cc") # -> ['aa', 'bb', 'cc'] + DelimitedList(Word(hexnums), delim=':', combine=True).parse_string("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] + """ + if isinstance(expr, str_type): + expr = ParserElement._literalStringClass(expr) + expr = typing.cast(ParserElement, expr) + + if min is not None: + if min < 1: + raise ValueError("min must be greater than 0") + if max is not None: + if min is not None and max < min: + raise ValueError("max must be greater than, or equal to min") + + self.content = expr + self.raw_delim = str(delim) + self.delim = delim + self.combine = combine + if not combine: + self.delim = Suppress(delim) + self.min = min or 1 + self.max = max + self.allow_trailing_delim = allow_trailing_delim + + delim_list_expr = self.content + (self.delim + self.content) * ( + self.min - 1, + None if self.max is None else self.max - 1, + ) + if self.allow_trailing_delim: + delim_list_expr += Opt(self.delim) + + if self.combine: + delim_list_expr = Combine(delim_list_expr) + + super().__init__(delim_list_expr, savelist=True) + + def _generateDefaultName(self) -> str: + return "{0} [{1} {0}]...".format(self.content.streamline(), self.raw_delim) + + class _NullToken: def __bool__(self): return False @@ -4909,6 +5163,7 @@ class Opt(ParseElementEnhance): Optional matching of the given expression. Parameters: + - ``expr`` - expression that must match zero or more times - ``default`` (optional) - value to be returned if the optional expression is not found. @@ -4969,7 +5224,7 @@ def parseImpl(self, instring, loc, doActions=True): tokens = [] return loc, tokens - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: inner = str(self.expr) # strip off redundant inner {}'s while len(inner) > 1 and inner[0 :: len(inner) - 1] == "{}": @@ -4986,6 +5241,7 @@ class SkipTo(ParseElementEnhance): expression is found. Parameters: + - ``expr`` - target expression marking the end of the data to be skipped - ``include`` - if ``True``, the target expression is also parsed (the skipped text and target expression are returned as a 2-element @@ -5045,14 +5301,15 @@ def __init__( self, other: Union[ParserElement, str], include: bool = False, - ignore: bool = None, + ignore: typing.Optional[Union[ParserElement, str]] = None, fail_on: typing.Optional[Union[ParserElement, str]] = None, *, - failOn: Union[ParserElement, str] = None, + failOn: typing.Optional[Union[ParserElement, str]] = None, ): super().__init__(other) failOn = failOn or fail_on - self.ignoreExpr = ignore + if ignore is not None: + self.ignore(ignore) self.mayReturnEmpty = True self.mayIndexError = False self.includeMatch = include @@ -5070,9 +5327,7 @@ def parseImpl(self, instring, loc, doActions=True): self_failOn_canParseNext = ( self.failOn.canParseNext if self.failOn is not None else None ) - self_ignoreExpr_tryParse = ( - self.ignoreExpr.tryParse if self.ignoreExpr is not None else None - ) + self_preParse = self.preParse if self.callPreparse else None tmploc = loc while tmploc <= instrlen: @@ -5081,13 +5336,9 @@ def parseImpl(self, instring, loc, doActions=True): if self_failOn_canParseNext(instring, tmploc): break - if self_ignoreExpr_tryParse is not None: - # advance past ignore expressions - while 1: - try: - tmploc = self_ignoreExpr_tryParse(instring, tmploc) - except ParseBaseException: - break + if self_preParse is not None: + # skip grammar-ignored expressions + tmploc = self_preParse(instring, tmploc) try: self_expr_parse(instring, tmploc, doActions=False, callPreParse=False) @@ -5145,15 +5396,20 @@ class Forward(ParseElementEnhance): def __init__(self, other: typing.Optional[Union[ParserElement, str]] = None): self.caller_frame = traceback.extract_stack(limit=2)[0] - super().__init__(other, savelist=False) + super().__init__(other, savelist=False) # type: ignore[arg-type] self.lshift_line = None - def __lshift__(self, other): + def __lshift__(self, other) -> "Forward": if hasattr(self, "caller_frame"): del self.caller_frame if isinstance(other, str_type): other = self._literalStringClass(other) + + if not isinstance(other, ParserElement): + return NotImplemented + self.expr = other + self.streamlined = other.streamlined self.mayIndexError = self.expr.mayIndexError self.mayReturnEmpty = self.expr.mayReturnEmpty self.set_whitespace_chars( @@ -5162,13 +5418,16 @@ def __lshift__(self, other): self.skipWhitespace = self.expr.skipWhitespace self.saveAsList = self.expr.saveAsList self.ignoreExprs.extend(self.expr.ignoreExprs) - self.lshift_line = traceback.extract_stack(limit=2)[-2] + self.lshift_line = traceback.extract_stack(limit=2)[-2] # type: ignore[assignment] return self - def __ilshift__(self, other): + def __ilshift__(self, other) -> "Forward": + if not isinstance(other, ParserElement): + return NotImplemented + return self << other - def __or__(self, other): + def __or__(self, other) -> "ParserElement": caller_line = traceback.extract_stack(limit=2)[-2] if ( __diag__.warn_on_match_first_with_lshift_operator @@ -5205,12 +5464,12 @@ def parseImpl(self, instring, loc, doActions=True): not in self.suppress_warnings_ ): # walk stack until parse_string, scan_string, search_string, or transform_string is found - parse_fns = [ + parse_fns = ( "parse_string", "scan_string", "search_string", "transform_string", - ] + ) tb = traceback.extract_stack(limit=200) for i, frm in enumerate(reversed(tb), start=1): if frm.name in parse_fns: @@ -5308,6 +5567,11 @@ def streamline(self) -> ParserElement: return self def validate(self, validateTrace=None) -> None: + warnings.warn( + "ParserElement.validate() is deprecated, and should not be used to check for left recursion", + DeprecationWarning, + stacklevel=2, + ) if validateTrace is None: validateTrace = [] @@ -5317,7 +5581,7 @@ def validate(self, validateTrace=None) -> None: self.expr.validate(tmp) self._checkRecursion([]) - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: # Avoid infinite recursion by setting a temporary _defaultName self._defaultName = ": ..." @@ -5356,8 +5620,14 @@ def _setResultsName(self, name, list_all_matches=False): return super()._setResultsName(name, list_all_matches) - ignoreWhitespace = ignore_whitespace - leaveWhitespace = leave_whitespace + # Compatibility synonyms + # fmt: off + @replaced_by_pep8(leave_whitespace) + def leaveWhitespace(self): ... + + @replaced_by_pep8(ignore_whitespace) + def ignoreWhitespace(self): ... + # fmt: on class TokenConverter(ParseElementEnhance): @@ -5439,11 +5709,11 @@ class Group(TokenConverter): ident = Word(alphas) num = Word(nums) term = ident | num - func = ident + Opt(delimited_list(term)) + func = ident + Opt(DelimitedList(term)) print(func.parse_string("fn a, b, 100")) # -> ['fn', 'a', 'b', '100'] - func = ident + Group(Opt(delimited_list(term))) + func = ident + Group(Opt(DelimitedList(term))) print(func.parse_string("fn a, b, 100")) # -> ['fn', ['a', 'b', '100']] """ @@ -5579,7 +5849,7 @@ class Suppress(TokenConverter): ['a', 'b', 'c', 'd'] ['START', 'relevant text ', 'END'] - (See also :class:`delimited_list`.) + (See also :class:`DelimitedList`.) """ def __init__(self, expr: Union[ParserElement, str], savelist: bool = False): @@ -5638,15 +5908,13 @@ def z(*paArgs): s, l, t = paArgs[-3:] if len(paArgs) > 3: thisFunc = paArgs[0].__class__.__name__ + "." + thisFunc - sys.stderr.write( - ">>entering {}(line: {!r}, {}, {!r})\n".format(thisFunc, line(l, s), l, t) - ) + sys.stderr.write(f">>entering {thisFunc}(line: {line(l, s)!r}, {l}, {t!r})\n") try: ret = f(*paArgs) except Exception as exc: - sys.stderr.write("< str: ) try: return "".join(_expanded(part) for part in _reBracketExpr.parse_string(s).body) - except Exception: + except Exception as e: return "" @@ -5769,7 +6037,11 @@ def autoname_elements() -> None: Utility to simplify mass-naming of parser elements, for generating railroad diagram with named subdiagrams. """ - for name, var in sys._getframe().f_back.f_locals.items(): + calling_frame = sys._getframe().f_back + if calling_frame is None: + return + calling_frame = typing.cast(types.FrameType, calling_frame) + for name, var in calling_frame.f_locals.items(): if isinstance(var, ParserElement) and not var.customName: var.set_name(name) @@ -5783,9 +6055,28 @@ def autoname_elements() -> None: ).set_name("string enclosed in single quotes") quoted_string = Combine( - Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"' - | Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'" -).set_name("quotedString using single or double quotes") + (Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"').set_name( + "double quoted string" + ) + | (Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'").set_name( + "single quoted string" + ) +).set_name("quoted string using single or double quotes") + +python_quoted_string = Combine( + (Regex(r'"""(?:[^"\\]|""(?!")|"(?!"")|\\.)*', flags=re.MULTILINE) + '"""').set_name( + "multiline double quoted string" + ) + ^ ( + Regex(r"'''(?:[^'\\]|''(?!')|'(?!'')|\\.)*", flags=re.MULTILINE) + "'''" + ).set_name("multiline single quoted string") + ^ (Regex(r'"(?:[^"\n\r\\]|(?:\\")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"').set_name( + "double quoted string" + ) + ^ (Regex(r"'(?:[^'\n\r\\]|(?:\\')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'").set_name( + "single quoted string" + ) +).set_name("Python quoted string") unicode_string = Combine("u" + quoted_string.copy()).set_name("unicode string literal") @@ -5800,9 +6091,7 @@ def autoname_elements() -> None: ] # backward compatibility names -tokenMap = token_map -conditionAsParseAction = condition_as_parse_action -nullDebugAction = null_debug_action +# fmt: off sglQuotedString = sgl_quoted_string dblQuotedString = dbl_quoted_string quotedString = quoted_string @@ -5811,4 +6100,16 @@ def autoname_elements() -> None: lineEnd = line_end stringStart = string_start stringEnd = string_end -traceParseAction = trace_parse_action + +@replaced_by_pep8(null_debug_action) +def nullDebugAction(): ... + +@replaced_by_pep8(trace_parse_action) +def traceParseAction(): ... + +@replaced_by_pep8(condition_as_parse_action) +def conditionAsParseAction(): ... + +@replaced_by_pep8(token_map) +def tokenMap(): ... +# fmt: on diff --git a/src/pip/_vendor/pyparsing/diagram/__init__.py b/src/pip/_vendor/pyparsing/diagram/__init__.py index 1506d66bf4e..83f9018ee93 100644 --- a/src/pip/_vendor/pyparsing/diagram/__init__.py +++ b/src/pip/_vendor/pyparsing/diagram/__init__.py @@ -1,3 +1,4 @@ +# mypy: ignore-errors import railroad from pip._vendor import pyparsing import typing @@ -17,11 +18,13 @@ jinja2_template_source = """\ +{% if not embed %} +{% endif %} {% if not head %} - - -
{code}
+
{code}
""" diff --git a/src/pip/_vendor/rich/_ratio.py b/src/pip/_vendor/rich/_ratio.py index e8a3a674e00..95267b0cb6c 100644 --- a/src/pip/_vendor/rich/_ratio.py +++ b/src/pip/_vendor/rich/_ratio.py @@ -151,7 +151,6 @@ def ratio_distribute( @dataclass class E: - size: Optional[int] = None ratio: int = 1 minimum_size: int = 1 diff --git a/src/pip/_vendor/rich/_windows.py b/src/pip/_vendor/rich/_windows.py index 10fc0d7e9f3..7520a9f90a5 100644 --- a/src/pip/_vendor/rich/_windows.py +++ b/src/pip/_vendor/rich/_windows.py @@ -30,7 +30,6 @@ class WindowsConsoleFeatures: ) except (AttributeError, ImportError, ValueError): - # Fallback if we can't load the Windows DLL def get_windows_console_features() -> WindowsConsoleFeatures: features = WindowsConsoleFeatures() diff --git a/src/pip/_vendor/rich/_wrap.py b/src/pip/_vendor/rich/_wrap.py index c45f193f74a..2e94ff6f43a 100644 --- a/src/pip/_vendor/rich/_wrap.py +++ b/src/pip/_vendor/rich/_wrap.py @@ -1,5 +1,7 @@ +from __future__ import annotations + import re -from typing import Iterable, List, Tuple +from typing import Iterable from ._loop import loop_last from .cells import cell_len, chop_cells @@ -7,7 +9,11 @@ re_word = re.compile(r"\s*\S+\s*") -def words(text: str) -> Iterable[Tuple[int, int, str]]: +def words(text: str) -> Iterable[tuple[int, int, str]]: + """Yields each word from the text as a tuple + containing (start_index, end_index, word). A "word" in this context may + include the actual word and any whitespace to the right. + """ position = 0 word_match = re_word.match(text, position) while word_match is not None: @@ -17,35 +23,59 @@ def words(text: str) -> Iterable[Tuple[int, int, str]]: word_match = re_word.match(text, end) -def divide_line(text: str, width: int, fold: bool = True) -> List[int]: - divides: List[int] = [] - append = divides.append - line_position = 0 +def divide_line(text: str, width: int, fold: bool = True) -> list[int]: + """Given a string of text, and a width (measured in cells), return a list + of cell offsets which the string should be split at in order for it to fit + within the given width. + + Args: + text: The text to examine. + width: The available cell width. + fold: If True, words longer than `width` will be folded onto a new line. + + Returns: + A list of indices to break the line at. + """ + break_positions: list[int] = [] # offsets to insert the breaks at + append = break_positions.append + cell_offset = 0 _cell_len = cell_len + for start, _end, word in words(text): word_length = _cell_len(word.rstrip()) - if line_position + word_length > width: + remaining_space = width - cell_offset + word_fits_remaining_space = remaining_space >= word_length + + if word_fits_remaining_space: + # Simplest case - the word fits within the remaining width for this line. + cell_offset += _cell_len(word) + else: + # Not enough space remaining for this word on the current line. if word_length > width: + # The word doesn't fit on any line, so we can't simply + # place it on the next line... if fold: - chopped_words = chop_cells(word, max_size=width, position=0) - for last, line in loop_last(chopped_words): + # Fold the word across multiple lines. + folded_word = chop_cells(word, width=width) + for last, line in loop_last(folded_word): if start: append(start) - if last: - line_position = _cell_len(line) + cell_offset = _cell_len(line) else: start += len(line) else: + # Folding isn't allowed, so crop the word. if start: append(start) - line_position = _cell_len(word) - elif line_position and start: + cell_offset = _cell_len(word) + elif cell_offset and start: + # The word doesn't fit within the remaining space on the current + # line, but it *can* fit on to the next (empty) line. append(start) - line_position = _cell_len(word) - else: - line_position += _cell_len(word) - return divides + cell_offset = _cell_len(word) + + return break_positions if __name__ == "__main__": # pragma: no cover @@ -53,4 +83,11 @@ def divide_line(text: str, width: int, fold: bool = True) -> List[int]: console = Console(width=10) console.print("12345 abcdefghijklmnopqrstuvwyxzABCDEFGHIJKLMNOPQRSTUVWXYZ 12345") - print(chop_cells("abcdefghijklmnopqrstuvwxyz", 10, position=2)) + print(chop_cells("abcdefghijklmnopqrstuvwxyz", 10)) + + console = Console(width=20) + console.rule() + console.print("TextualはPythonの高速アプリケーション開発フレームワークです") + + console.rule() + console.print("アプリケーションは1670万色を使用でき") diff --git a/src/pip/_vendor/rich/align.py b/src/pip/_vendor/rich/align.py index c310b66e783..f7b734fd728 100644 --- a/src/pip/_vendor/rich/align.py +++ b/src/pip/_vendor/rich/align.py @@ -27,7 +27,7 @@ class Align(JupyterMixin): renderable (RenderableType): A console renderable. align (AlignMethod): One of "left", "center", or "right"" style (StyleType, optional): An optional style to apply to the background. - vertical (Optional[VerticalAlginMethod], optional): Optional vertical align, one of "top", "middle", or "bottom". Defaults to None. + vertical (Optional[VerticalAlignMethod], optional): Optional vertical align, one of "top", "middle", or "bottom". Defaults to None. pad (bool, optional): Pad the right with spaces. Defaults to True. width (int, optional): Restrict contents to given width, or None to use default width. Defaults to None. height (int, optional): Set height of align renderable, or None to fit to contents. Defaults to None. diff --git a/src/pip/_vendor/rich/bar.py b/src/pip/_vendor/rich/bar.py index ed86a552d1c..022284b5788 100644 --- a/src/pip/_vendor/rich/bar.py +++ b/src/pip/_vendor/rich/bar.py @@ -48,7 +48,6 @@ def __repr__(self) -> str: def __rich_console__( self, console: Console, options: ConsoleOptions ) -> RenderResult: - width = min( self.width if self.width is not None else options.max_width, options.max_width, diff --git a/src/pip/_vendor/rich/box.py b/src/pip/_vendor/rich/box.py index 97d2a944457..0511a9e48ba 100644 --- a/src/pip/_vendor/rich/box.py +++ b/src/pip/_vendor/rich/box.py @@ -188,260 +188,224 @@ def get_bottom(self, widths: Iterable[int]) -> str: return "".join(parts) +# fmt: off ASCII: Box = Box( - """\ -+--+ -| || -|-+| -| || -|-+| -|-+| -| || -+--+ -""", + "+--+\n" + "| ||\n" + "|-+|\n" + "| ||\n" + "|-+|\n" + "|-+|\n" + "| ||\n" + "+--+\n", ascii=True, ) ASCII2: Box = Box( - """\ -+-++ -| || -+-++ -| || -+-++ -+-++ -| || -+-++ -""", + "+-++\n" + "| ||\n" + "+-++\n" + "| ||\n" + "+-++\n" + "+-++\n" + "| ||\n" + "+-++\n", ascii=True, ) ASCII_DOUBLE_HEAD: Box = Box( - """\ -+-++ -| || -+=++ -| || -+-++ -+-++ -| || -+-++ -""", + "+-++\n" + "| ||\n" + "+=++\n" + "| ||\n" + "+-++\n" + "+-++\n" + "| ||\n" + "+-++\n", ascii=True, ) SQUARE: Box = Box( - """\ -┌─┬┐ -│ ││ -├─┼┤ -│ ││ -├─┼┤ -├─┼┤ -│ ││ -└─┴┘ -""" + "┌─┬┐\n" + "│ ││\n" + "├─┼┤\n" + "│ ││\n" + "├─┼┤\n" + "├─┼┤\n" + "│ ││\n" + "└─┴┘\n" ) SQUARE_DOUBLE_HEAD: Box = Box( - """\ -┌─┬┐ -│ ││ -╞═╪╡ -│ ││ -├─┼┤ -├─┼┤ -│ ││ -└─┴┘ -""" + "┌─┬┐\n" + "│ ││\n" + "╞═╪╡\n" + "│ ││\n" + "├─┼┤\n" + "├─┼┤\n" + "│ ││\n" + "└─┴┘\n" ) MINIMAL: Box = Box( - """\ - ╷ - │ -╶─┼╴ - │ -╶─┼╴ -╶─┼╴ - │ - ╵ -""" + " ╷ \n" + " │ \n" + "╶─┼╴\n" + " │ \n" + "╶─┼╴\n" + "╶─┼╴\n" + " │ \n" + " ╵ \n" ) MINIMAL_HEAVY_HEAD: Box = Box( - """\ - ╷ - │ -╺━┿╸ - │ -╶─┼╴ -╶─┼╴ - │ - ╵ -""" + " ╷ \n" + " │ \n" + "╺━┿╸\n" + " │ \n" + "╶─┼╴\n" + "╶─┼╴\n" + " │ \n" + " ╵ \n" ) MINIMAL_DOUBLE_HEAD: Box = Box( - """\ - ╷ - │ - ═╪ - │ - ─┼ - ─┼ - │ - ╵ -""" + " ╷ \n" + " │ \n" + " ═╪ \n" + " │ \n" + " ─┼ \n" + " ─┼ \n" + " │ \n" + " ╵ \n" ) SIMPLE: Box = Box( - """\ - - - ── - - - ── - - -""" + " \n" + " \n" + " ── \n" + " \n" + " \n" + " ── \n" + " \n" + " \n" ) SIMPLE_HEAD: Box = Box( - """\ - - - ── - - - - - -""" + " \n" + " \n" + " ── \n" + " \n" + " \n" + " \n" + " \n" + " \n" ) SIMPLE_HEAVY: Box = Box( - """\ - - - ━━ - - - ━━ - - -""" + " \n" + " \n" + " ━━ \n" + " \n" + " \n" + " ━━ \n" + " \n" + " \n" ) HORIZONTALS: Box = Box( - """\ - ── - - ── - - ── - ── - - ── -""" + " ── \n" + " \n" + " ── \n" + " \n" + " ── \n" + " ── \n" + " \n" + " ── \n" ) ROUNDED: Box = Box( - """\ -╭─┬╮ -│ ││ -├─┼┤ -│ ││ -├─┼┤ -├─┼┤ -│ ││ -╰─┴╯ -""" + "╭─┬╮\n" + "│ ││\n" + "├─┼┤\n" + "│ ││\n" + "├─┼┤\n" + "├─┼┤\n" + "│ ││\n" + "╰─┴╯\n" ) HEAVY: Box = Box( - """\ -┏━┳┓ -┃ ┃┃ -┣━╋┫ -┃ ┃┃ -┣━╋┫ -┣━╋┫ -┃ ┃┃ -┗━┻┛ -""" + "┏━┳┓\n" + "┃ ┃┃\n" + "┣━╋┫\n" + "┃ ┃┃\n" + "┣━╋┫\n" + "┣━╋┫\n" + "┃ ┃┃\n" + "┗━┻┛\n" ) HEAVY_EDGE: Box = Box( - """\ -┏━┯┓ -┃ │┃ -┠─┼┨ -┃ │┃ -┠─┼┨ -┠─┼┨ -┃ │┃ -┗━┷┛ -""" + "┏━┯┓\n" + "┃ │┃\n" + "┠─┼┨\n" + "┃ │┃\n" + "┠─┼┨\n" + "┠─┼┨\n" + "┃ │┃\n" + "┗━┷┛\n" ) HEAVY_HEAD: Box = Box( - """\ -┏━┳┓ -┃ ┃┃ -┡━╇┩ -│ ││ -├─┼┤ -├─┼┤ -│ ││ -└─┴┘ -""" + "┏━┳┓\n" + "┃ ┃┃\n" + "┡━╇┩\n" + "│ ││\n" + "├─┼┤\n" + "├─┼┤\n" + "│ ││\n" + "└─┴┘\n" ) DOUBLE: Box = Box( - """\ -╔═╦╗ -║ ║║ -╠═╬╣ -║ ║║ -╠═╬╣ -╠═╬╣ -║ ║║ -╚═╩╝ -""" + "╔═╦╗\n" + "║ ║║\n" + "╠═╬╣\n" + "║ ║║\n" + "╠═╬╣\n" + "╠═╬╣\n" + "║ ║║\n" + "╚═╩╝\n" ) DOUBLE_EDGE: Box = Box( - """\ -╔═╤╗ -║ │║ -╟─┼╢ -║ │║ -╟─┼╢ -╟─┼╢ -║ │║ -╚═╧╝ -""" + "╔═╤╗\n" + "║ │║\n" + "╟─┼╢\n" + "║ │║\n" + "╟─┼╢\n" + "╟─┼╢\n" + "║ │║\n" + "╚═╧╝\n" ) MARKDOWN: Box = Box( - """\ - -| || -|-|| -| || -|-|| -|-|| -| || - -""", + " \n" + "| ||\n" + "|-||\n" + "| ||\n" + "|-||\n" + "|-||\n" + "| ||\n" + " \n", ascii=True, ) +# fmt: on # Map Boxes that don't render with raster fonts on to equivalent that do LEGACY_WINDOWS_SUBSTITUTIONS = { @@ -464,7 +428,6 @@ def get_bottom(self, widths: Iterable[int]) -> str: if __name__ == "__main__": # pragma: no cover - from pip._vendor.rich.columns import Columns from pip._vendor.rich.panel import Panel diff --git a/src/pip/_vendor/rich/cells.py b/src/pip/_vendor/rich/cells.py index 9354f9e3140..f85f928f75e 100644 --- a/src/pip/_vendor/rich/cells.py +++ b/src/pip/_vendor/rich/cells.py @@ -1,6 +1,8 @@ +from __future__ import annotations + import re from functools import lru_cache -from typing import Callable, List +from typing import Callable from ._cell_widths import CELL_WIDTHS @@ -119,33 +121,44 @@ def set_cell_size(text: str, total: int) -> str: start = pos -# TODO: This is inefficient -# TODO: This might not work with CWJ type characters -def chop_cells(text: str, max_size: int, position: int = 0) -> List[str]: - """Break text in to equal (cell) length strings, returning the characters in reverse - order""" +def chop_cells( + text: str, + width: int, +) -> list[str]: + """Split text into lines such that each line fits within the available (cell) width. + + Args: + text: The text to fold such that it fits in the given width. + width: The width available (number of cells). + + Returns: + A list of strings such that each string in the list has cell width + less than or equal to the available width. + """ _get_character_cell_size = get_character_cell_size - characters = [ - (character, _get_character_cell_size(character)) for character in text - ] - total_size = position - lines: List[List[str]] = [[]] - append = lines[-1].append - - for character, size in reversed(characters): - if total_size + size > max_size: - lines.append([character]) - append = lines[-1].append - total_size = size + lines: list[list[str]] = [[]] + + append_new_line = lines.append + append_to_last_line = lines[-1].append + + total_width = 0 + + for character in text: + cell_width = _get_character_cell_size(character) + char_doesnt_fit = total_width + cell_width > width + + if char_doesnt_fit: + append_new_line([character]) + append_to_last_line = lines[-1].append + total_width = cell_width else: - total_size += size - append(character) + append_to_last_line(character) + total_width += cell_width return ["".join(line) for line in lines] if __name__ == "__main__": # pragma: no cover - print(get_character_cell_size("😽")) for line in chop_cells("""这是对亚洲语言支持的测试。面对模棱两可的想法,拒绝猜测的诱惑。""", 8): print(line) diff --git a/src/pip/_vendor/rich/color.py b/src/pip/_vendor/rich/color.py index dfe455937c8..4270a278d59 100644 --- a/src/pip/_vendor/rich/color.py +++ b/src/pip/_vendor/rich/color.py @@ -592,7 +592,6 @@ def blend_rgb( if __name__ == "__main__": # pragma: no cover - from .console import Console from .table import Table from .text import Text diff --git a/src/pip/_vendor/rich/console.py b/src/pip/_vendor/rich/console.py index 7c363dfdc5e..a11c7c137f0 100644 --- a/src/pip/_vendor/rich/console.py +++ b/src/pip/_vendor/rich/console.py @@ -278,6 +278,7 @@ def __rich_console__( # A type that may be rendered by Console. RenderableType = Union[ConsoleRenderable, RichCast, str] +"""A string or any object that may be rendered by Rich.""" # The result of calling a __rich_console__ method. RenderResult = Iterable[Union[RenderableType, Segment]] @@ -952,6 +953,7 @@ def is_terminal(self) -> bool: force_color = self._environ.get("FORCE_COLOR") if force_color is not None: self._force_terminal = True + return True isatty: Optional[Callable[[], bool]] = getattr(self.file, "isatty", None) try: @@ -1924,7 +1926,6 @@ def log( end (str, optional): String to write at end of print data. Defaults to "\\\\n". style (Union[str, Style], optional): A style to apply to output. Defaults to None. justify (str, optional): One of "left", "right", "center", or "full". Defaults to ``None``. - overflow (str, optional): Overflow method: "crop", "fold", or "ellipsis". Defaults to None. emoji (Optional[bool], optional): Enable emoji code, or ``None`` to use console default. Defaults to None. markup (Optional[bool], optional): Enable markup, or ``None`` to use console default. Defaults to None. highlight (Optional[bool], optional): Enable automatic highlighting, or ``None`` to use console default. Defaults to None. @@ -2000,7 +2001,6 @@ def _check_buffer(self) -> None: self._record_buffer.extend(self._buffer[:]) if self._buffer_index == 0: - if self.is_jupyter: # pragma: no cover from .jupyter import display diff --git a/src/pip/_vendor/rich/containers.py b/src/pip/_vendor/rich/containers.py index e29cf368991..901ff8ba6ea 100644 --- a/src/pip/_vendor/rich/containers.py +++ b/src/pip/_vendor/rich/containers.py @@ -1,13 +1,13 @@ from itertools import zip_longest from typing import ( - Iterator, + TYPE_CHECKING, Iterable, + Iterator, List, Optional, + TypeVar, Union, overload, - TypeVar, - TYPE_CHECKING, ) if TYPE_CHECKING: @@ -119,7 +119,7 @@ def justify( Args: console (Console): Console instance. - width (int): Number of characters per line. + width (int): Number of cells available per line. justify (str, optional): Default justify method for text: "left", "center", "full" or "right". Defaults to "left". overflow (str, optional): Default overflow for text: "crop", "fold", or "ellipsis". Defaults to "fold". diff --git a/src/pip/_vendor/rich/highlighter.py b/src/pip/_vendor/rich/highlighter.py index c2646794a98..27714b25b40 100644 --- a/src/pip/_vendor/rich/highlighter.py +++ b/src/pip/_vendor/rich/highlighter.py @@ -98,7 +98,7 @@ class ReprHighlighter(RegexHighlighter): r"(?P(?\B(/[-\w._+]+)*\/)(?P[-\w._+]*)?", r"(?b?'''.*?(?(file|https|http|ws|wss)://[-0-9a-zA-Z$_+!`(),.?/;:&=%#]*)", + r"(?P(file|https|http|ws|wss)://[-0-9a-zA-Z$_+!`(),.?/;:&=%#~]*)", ), ] diff --git a/src/pip/_vendor/rich/json.py b/src/pip/_vendor/rich/json.py index ea94493f21e..4087c79bb32 100644 --- a/src/pip/_vendor/rich/json.py +++ b/src/pip/_vendor/rich/json.py @@ -103,7 +103,6 @@ def __rich__(self) -> Text: if __name__ == "__main__": - import argparse import sys diff --git a/src/pip/_vendor/rich/layout.py b/src/pip/_vendor/rich/layout.py index 849356ea9a0..a6f1a31b294 100644 --- a/src/pip/_vendor/rich/layout.py +++ b/src/pip/_vendor/rich/layout.py @@ -227,7 +227,6 @@ def tree(self) -> "Tree": from pip._vendor.rich.tree import Tree def summary(layout: "Layout") -> Table: - icon = layout.splitter.get_tree_icon() table = Table.grid(padding=(0, 1, 0, 0)) @@ -403,7 +402,7 @@ def __rich_console__( self._render_map = render_map layout_lines: List[List[Segment]] = [[] for _ in range(height)] _islice = islice - for (region, lines) in render_map.values(): + for region, lines in render_map.values(): _x, y, _layout_width, layout_height = region for row, line in zip( _islice(layout_lines, y, y + layout_height), lines diff --git a/src/pip/_vendor/rich/live.py b/src/pip/_vendor/rich/live.py index 3ebbbc4ccbe..f0529a781cf 100644 --- a/src/pip/_vendor/rich/live.py +++ b/src/pip/_vendor/rich/live.py @@ -362,7 +362,7 @@ def process_renderables( table.add_column("Destination Currency") table.add_column("Exchange Rate") - for ((source, dest), exchange_rate) in exchange_rate_dict.items(): + for (source, dest), exchange_rate in exchange_rate_dict.items(): table.add_row( source, dest, diff --git a/src/pip/_vendor/rich/live_render.py b/src/pip/_vendor/rich/live_render.py index b90fbf7f350..e20745df6bf 100644 --- a/src/pip/_vendor/rich/live_render.py +++ b/src/pip/_vendor/rich/live_render.py @@ -82,7 +82,6 @@ def restore_cursor(self) -> Control: def __rich_console__( self, console: Console, options: ConsoleOptions ) -> RenderResult: - renderable = self.renderable style = console.get_style(self.style) lines = console.render_lines(renderable, options, style=style, pad=False) diff --git a/src/pip/_vendor/rich/markup.py b/src/pip/_vendor/rich/markup.py index fd80d8c1129..f6171878f82 100644 --- a/src/pip/_vendor/rich/markup.py +++ b/src/pip/_vendor/rich/markup.py @@ -64,6 +64,9 @@ def escape_backslashes(match: Match[str]) -> str: return f"{backslashes}{backslashes}\\{text}" markup = _escape(escape_backslashes, markup) + if markup.endswith("\\") and not markup.endswith("\\\\"): + return markup + "\\" + return markup @@ -110,7 +113,10 @@ def render( Args: markup (str): A string containing console markup. + style: (Union[str, Style]): The style to use. emoji (bool, optional): Also render emoji code. Defaults to True. + emoji_variant (str, optional): Optional emoji variant, either "text" or "emoji". Defaults to None. + Raises: MarkupError: If there is a syntax error in the markup. @@ -226,7 +232,6 @@ def pop_style(style_name: str) -> Tuple[int, Tag]: if __name__ == "__main__": # pragma: no cover - MARKUP = [ "[red]Hello World[/red]", "[magenta]Hello [b]World[/b]", diff --git a/src/pip/_vendor/rich/panel.py b/src/pip/_vendor/rich/panel.py index d522d80b518..95f4c84cf0b 100644 --- a/src/pip/_vendor/rich/panel.py +++ b/src/pip/_vendor/rich/panel.py @@ -82,7 +82,9 @@ def fit( style: StyleType = "none", border_style: StyleType = "none", width: Optional[int] = None, + height: Optional[int] = None, padding: PaddingDimensions = (0, 1), + highlight: bool = False, ) -> "Panel": """An alternative constructor that sets expand=False.""" return cls( @@ -96,7 +98,9 @@ def fit( style=style, border_style=border_style, width=width, + height=height, padding=padding, + highlight=highlight, expand=False, ) diff --git a/src/pip/_vendor/rich/pretty.py b/src/pip/_vendor/rich/pretty.py index 2bd9eb0073d..9b9e3ba9086 100644 --- a/src/pip/_vendor/rich/pretty.py +++ b/src/pip/_vendor/rich/pretty.py @@ -211,8 +211,11 @@ def display_hook(value: Any) -> None: ) builtins._ = value # type: ignore[attr-defined] - if "get_ipython" in globals(): + try: ip = get_ipython() # type: ignore[name-defined] + except NameError: + sys.displayhook = display_hook + else: from IPython.core.formatters import BaseFormatter class RichFormatter(BaseFormatter): # type: ignore[misc] @@ -236,8 +239,6 @@ def __call__(self, value: Any) -> Any: # replace plain text formatter with rich formatter rich_formatter = RichFormatter() ip.display_formatter.formatters["text/plain"] = rich_formatter - else: - sys.displayhook = display_hook class Pretty(JupyterMixin): @@ -708,9 +709,9 @@ def iter_rich_args(rich_args: Any) -> Iterable[Union[Any, Tuple[str, Any]]]: last=root, ) - def iter_attrs() -> Iterable[ - Tuple[str, Any, Optional[Callable[[Any], str]]] - ]: + def iter_attrs() -> ( + Iterable[Tuple[str, Any, Optional[Callable[[Any], str]]]] + ): """Iterate over attr fields and values.""" for attr in attr_fields: if attr.repr: @@ -985,7 +986,7 @@ class StockKeepingUnit(NamedTuple): from pip._vendor.rich import print - # print(Pretty(data, indent_guides=True, max_string=20)) + print(Pretty(data, indent_guides=True, max_string=20)) class Thing: def __repr__(self) -> str: diff --git a/src/pip/_vendor/rich/progress.py b/src/pip/_vendor/rich/progress.py index 8b0a315f324..2420c24e646 100644 --- a/src/pip/_vendor/rich/progress.py +++ b/src/pip/_vendor/rich/progress.py @@ -681,7 +681,7 @@ def render(self, task: "Task") -> Text: elapsed = task.finished_time if task.finished else task.elapsed if elapsed is None: return Text("-:--:--", style="progress.elapsed") - delta = timedelta(seconds=int(elapsed)) + delta = timedelta(seconds=max(0, int(elapsed))) return Text(str(delta), style="progress.elapsed") @@ -710,7 +710,6 @@ def __init__( table_column: Optional[Column] = None, show_speed: bool = False, ) -> None: - self.text_format_no_percentage = text_format_no_percentage self.show_speed = show_speed super().__init__( @@ -1114,7 +1113,7 @@ def get_default_columns(cls) -> Tuple[ProgressColumn, ...]: progress = Progress( SpinnerColumn(), - *Progress.default_columns(), + *Progress.get_default_columns(), "Elapsed:", TimeElapsedColumn(), ) @@ -1636,7 +1635,6 @@ def remove_task(self, task_id: TaskID) -> None: if __name__ == "__main__": # pragma: no coverage - import random import time @@ -1689,7 +1687,6 @@ def remove_task(self, task_id: TaskID) -> None: console=console, transient=False, ) as progress: - task1 = progress.add_task("[red]Downloading", total=1000) task2 = progress.add_task("[green]Processing", total=1000) task3 = progress.add_task("[yellow]Thinking", total=None) diff --git a/src/pip/_vendor/rich/progress_bar.py b/src/pip/_vendor/rich/progress_bar.py index 67361df2e49..a2bf326144b 100644 --- a/src/pip/_vendor/rich/progress_bar.py +++ b/src/pip/_vendor/rich/progress_bar.py @@ -156,7 +156,6 @@ def _render_pulse( def __rich_console__( self, console: Console, options: ConsoleOptions ) -> RenderResult: - width = min(self.width or options.max_width, options.max_width) ascii = options.legacy_windows or options.ascii_only should_pulse = self.pulse or self.total is None diff --git a/src/pip/_vendor/rich/prompt.py b/src/pip/_vendor/rich/prompt.py index 2bd0a7724f4..75ff0481684 100644 --- a/src/pip/_vendor/rich/prompt.py +++ b/src/pip/_vendor/rich/prompt.py @@ -307,7 +307,7 @@ class IntPrompt(PromptBase[int]): validate_error_message = "[prompt.invalid]Please enter a valid integer number" -class FloatPrompt(PromptBase[int]): +class FloatPrompt(PromptBase[float]): """A prompt that returns a float. Example: @@ -346,7 +346,6 @@ def process_response(self, value: str) -> bool: if __name__ == "__main__": # pragma: no cover - from pip._vendor.rich import print if Confirm.ask("Run [i]prompt[/i] tests?", default=True): diff --git a/src/pip/_vendor/rich/repr.py b/src/pip/_vendor/rich/repr.py index f284bcafa6a..10efc427c35 100644 --- a/src/pip/_vendor/rich/repr.py +++ b/src/pip/_vendor/rich/repr.py @@ -76,7 +76,7 @@ def auto_rich_repr(self: Type[T]) -> Result: param.POSITIONAL_OR_KEYWORD, param.KEYWORD_ONLY, ): - if param.default == param.empty: + if param.default is param.empty: yield getattr(self, param.name) else: yield param.name, getattr(self, param.name), param.default diff --git a/src/pip/_vendor/rich/segment.py b/src/pip/_vendor/rich/segment.py index e1257984635..93edbbdeb72 100644 --- a/src/pip/_vendor/rich/segment.py +++ b/src/pip/_vendor/rich/segment.py @@ -109,7 +109,6 @@ def is_control(self) -> bool: @classmethod @lru_cache(1024 * 16) def _split_cells(cls, segment: "Segment", cut: int) -> Tuple["Segment", "Segment"]: - text, style, control = segment _Segment = Segment diff --git a/src/pip/_vendor/rich/status.py b/src/pip/_vendor/rich/status.py index 09eff405ec1..65744838e3f 100644 --- a/src/pip/_vendor/rich/status.py +++ b/src/pip/_vendor/rich/status.py @@ -107,7 +107,6 @@ def __exit__( if __name__ == "__main__": # pragma: no cover - from time import sleep from .console import Console diff --git a/src/pip/_vendor/rich/syntax.py b/src/pip/_vendor/rich/syntax.py index 25b226a3a98..c26fd8784e8 100644 --- a/src/pip/_vendor/rich/syntax.py +++ b/src/pip/_vendor/rich/syntax.py @@ -439,6 +439,16 @@ def lexer(self) -> Optional[Lexer]: except ClassNotFound: return None + @property + def default_lexer(self) -> Lexer: + """A Pygments Lexer to use if one is not specified or invalid.""" + return get_lexer_by_name( + "text", + stripnl=False, + ensurenl=True, + tabsize=self.tab_size, + ) + def highlight( self, code: str, @@ -467,7 +477,7 @@ def highlight( ) _get_theme_style = self._theme.get_style_for_token - lexer = self.lexer + lexer = self.lexer or self.default_lexer if lexer is None: text.append(code) @@ -590,7 +600,6 @@ def _get_number_styles(self, console: Console) -> Tuple[Style, Style, Style]: def __rich_measure__( self, console: "Console", options: "ConsoleOptions" ) -> "Measurement": - _, right, _, left = Padding.unpack(self.padding) padding = left + right if self.code_width is not None: @@ -688,7 +697,7 @@ def _get_syntax( lines = ( Text("\n") .join(lines) - .with_indent_guides(self.tab_size, style=style) + .with_indent_guides(self.tab_size, style=style + Style(italic=False)) .split("\n", allow_blank=True) ) @@ -830,7 +839,6 @@ def _get_code_index_for_syntax_position( if __name__ == "__main__": # pragma: no cover - import argparse import sys diff --git a/src/pip/_vendor/rich/table.py b/src/pip/_vendor/rich/table.py index 17409f2ee8d..43c718ebf59 100644 --- a/src/pip/_vendor/rich/table.py +++ b/src/pip/_vendor/rich/table.py @@ -212,7 +212,6 @@ def __init__( caption_justify: "JustifyMethod" = "center", highlight: bool = False, ) -> None: - self.columns: List[Column] = [] self.rows: List[Row] = [] self.title = title @@ -471,7 +470,6 @@ def add_section(self) -> None: def __rich_console__( self, console: "Console", options: "ConsoleOptions" ) -> "RenderResult": - if not self.columns: yield Segment("\n") return @@ -685,7 +683,7 @@ def get_padding(first_row: bool, last_row: bool) -> Tuple[int, int, int, int]: getattr(renderable, "vertical", None) or column.vertical, ) else: - for (style, renderable) in raw_cells: + for style, renderable in raw_cells: yield _Cell( style, renderable, diff --git a/src/pip/_vendor/rich/text.py b/src/pip/_vendor/rich/text.py index 998cb87dab7..09f881e7296 100644 --- a/src/pip/_vendor/rich/text.py +++ b/src/pip/_vendor/rich/text.py @@ -38,6 +38,7 @@ _re_whitespace = re.compile(r"\s+$") TextType = Union[str, "Text"] +"""A plain string or a [Text][rich.text.Text] instance.""" GetStyleCallable = Callable[[str], Optional[StyleType]] @@ -97,6 +98,21 @@ def right_crop(self, offset: int) -> "Span": return self return Span(start, min(offset, end), style) + def extend(self, cells: int) -> "Span": + """Extend the span by the given number of cells. + + Args: + cells (int): Additional space to add to end of span. + + Returns: + Span: A span. + """ + if cells: + start, end, style = self + return Span(start, end + cells, style) + else: + return self + class Text(JupyterMixin): """Text with color / style. @@ -108,7 +124,7 @@ class Text(JupyterMixin): overflow (str, optional): Overflow method: "crop", "fold", "ellipsis". Defaults to None. no_wrap (bool, optional): Disable text wrapping, or None for default. Defaults to None. end (str, optional): Character to end text with. Defaults to "\\\\n". - tab_size (int): Number of spaces per tab, or ``None`` to use ``console.tab_size``. Defaults to 8. + tab_size (int): Number of spaces per tab, or ``None`` to use ``console.tab_size``. Defaults to None. spans (List[Span], optional). A list of predefined style spans. Defaults to None. """ @@ -133,7 +149,7 @@ def __init__( overflow: Optional["OverflowMethod"] = None, no_wrap: Optional[bool] = None, end: str = "\n", - tab_size: Optional[int] = 8, + tab_size: Optional[int] = None, spans: Optional[List[Span]] = None, ) -> None: sanitized_text = strip_control_codes(text) @@ -255,7 +271,9 @@ def from_markup( Args: text (str): A string containing console markup. + style (Union[str, Style], optional): Base style for text. Defaults to "". emoji (bool, optional): Also render emoji code. Defaults to True. + emoji_variant (str, optional): Optional emoji variant, either "text" or "emoji". Defaults to None. justify (str, optional): Justify method: "left", "center", "full", "right". Defaults to None. overflow (str, optional): Overflow method: "crop", "fold", "ellipsis". Defaults to None. end (str, optional): Character to end text with. Defaults to "\\\\n". @@ -292,7 +310,7 @@ def from_ansi( overflow (str, optional): Overflow method: "crop", "fold", "ellipsis". Defaults to None. no_wrap (bool, optional): Disable text wrapping, or None for default. Defaults to None. end (str, optional): Character to end text with. Defaults to "\\\\n". - tab_size (int): Number of spaces per tab, or ``None`` to use ``console.tab_size``. Defaults to 8. + tab_size (int): Number of spaces per tab, or ``None`` to use ``console.tab_size``. Defaults to None. """ from .ansi import AnsiDecoder @@ -353,8 +371,9 @@ def assemble( style (Union[str, Style], optional): Base style for text. Defaults to "". justify (str, optional): Justify method: "left", "center", "full", "right". Defaults to None. overflow (str, optional): Overflow method: "crop", "fold", "ellipsis". Defaults to None. + no_wrap (bool, optional): Disable text wrapping, or None for default. Defaults to None. end (str, optional): Character to end text with. Defaults to "\\\\n". - tab_size (int): Number of spaces per tab, or ``None`` to use ``console.tab_size``. Defaults to 8. + tab_size (int): Number of spaces per tab, or ``None`` to use ``console.tab_size``. Defaults to None. meta (Dict[str, Any], optional). Meta data to apply to text, or None for no meta data. Default to None Returns: @@ -408,7 +427,7 @@ def spans(self, spans: List[Span]) -> None: self._spans = spans[:] def blank_copy(self, plain: str = "") -> "Text": - """Return a new Text instance with copied meta data (but not the string or spans).""" + """Return a new Text instance with copied metadata (but not the string or spans).""" copy_self = Text( plain, style=self.style, @@ -489,7 +508,7 @@ def stylize_before( def apply_meta( self, meta: Dict[str, Any], start: int = 0, end: Optional[int] = None ) -> None: - """Apply meta data to the text, or a portion of the text. + """Apply metadata to the text, or a portion of the text. Args: meta (Dict[str, Any]): A dict of meta information. @@ -549,6 +568,27 @@ def get_style_at_offset(self, console: "Console", offset: int) -> Style: style += get_style(span_style, default="") return style + def extend_style(self, spaces: int) -> None: + """Extend the Text given number of spaces where the spaces have the same style as the last character. + + Args: + spaces (int): Number of spaces to add to the Text. + """ + if spaces <= 0: + return + spans = self.spans + new_spaces = " " * spaces + if spans: + end_offset = len(self) + self._spans[:] = [ + span.extend(spaces) if span.end >= end_offset else span + for span in spans + ] + self._text.append(new_spaces) + self._length += spaces + else: + self.plain += new_spaces + def highlight_regex( self, re_highlight: str, @@ -597,9 +637,9 @@ def highlight_words( """Highlight words with a style. Args: - words (Iterable[str]): Worlds to highlight. + words (Iterable[str]): Words to highlight. style (Union[str, Style]): Style to apply. - case_sensitive (bool, optional): Enable case sensitive matchings. Defaults to True. + case_sensitive (bool, optional): Enable case sensitive matching. Defaults to True. Returns: int: Number of words highlighted. @@ -646,7 +686,7 @@ def set_length(self, new_length: int) -> None: def __rich_console__( self, console: "Console", options: "ConsoleOptions" ) -> Iterable[Segment]: - tab_size: int = console.tab_size or self.tab_size or 8 + tab_size: int = console.tab_size if self.tab_size is None else self.tab_size justify = self.justify or options.justify or DEFAULT_JUSTIFY overflow = self.overflow or options.overflow or DEFAULT_OVERFLOW @@ -781,27 +821,35 @@ def expand_tabs(self, tab_size: Optional[int] = None) -> None: """ if "\t" not in self.plain: return - pos = 0 if tab_size is None: tab_size = self.tab_size - assert tab_size is not None - result = self.blank_copy() - append = result.append + if tab_size is None: + tab_size = 8 + + new_text: List[Text] = [] + append = new_text.append - _style = self.style for line in self.split("\n", include_separator=True): - parts = line.split("\t", include_separator=True) - for part in parts: - if part.plain.endswith("\t"): - part._text = [part.plain[:-1] + " "] - append(part) - pos += len(part) - spaces = tab_size - ((pos - 1) % tab_size) - 1 - if spaces: - append(" " * spaces, _style) - pos += spaces - else: + if "\t" not in line.plain: + append(line) + else: + cell_position = 0 + parts = line.split("\t", include_separator=True) + for part in parts: + if part.plain.endswith("\t"): + part._text[-1] = part._text[-1][:-1] + " " + cell_position += part.cell_len + tab_remainder = cell_position % tab_size + if tab_remainder: + spaces = tab_size - tab_remainder + part.extend_style(spaces) + cell_position += spaces + else: + cell_position += part.cell_len append(part) + + result = Text("").join(new_text) + self._text = [result.plain] self._length = len(self.plain) self._spans[:] = result._spans @@ -852,6 +900,7 @@ def pad(self, count: int, character: str = " ") -> None: Args: count (int): Width of padding. + character (str): The character to pad with. Must be a string of length 1. """ assert len(character) == 1, "Character must be a string of length 1" if count: @@ -932,7 +981,7 @@ def append( self._text.append(sanitized_text) offset = len(self) text_length = len(sanitized_text) - if style is not None: + if style: self._spans.append(Span(offset, offset + text_length, style)) self._length += text_length elif isinstance(text, Text): @@ -942,7 +991,7 @@ def append( "style must not be set when appending Text instance" ) text_length = self._length - if text.style is not None: + if text.style: self._spans.append( _Span(text_length, text_length + len(text), text.style) ) @@ -958,12 +1007,15 @@ def append_text(self, text: "Text") -> "Text": """Append another Text instance. This method is more performant that Text.append, but only works for Text. + Args: + text (Text): The Text instance to append to this instance. + Returns: Text: Returns self for chaining. """ _Span = Span text_length = self._length - if text.style is not None: + if text.style: self._spans.append(_Span(text_length, text_length + len(text), text.style)) self._text.append(text.plain) self._spans.extend( @@ -979,7 +1031,7 @@ def append_tokens( """Append iterable of str and style. Style may be a Style instance or a str style definition. Args: - pairs (Iterable[Tuple[str, Optional[StyleType]]]): An iterable of tuples containing str content and style. + tokens (Iterable[Tuple[str, Optional[StyleType]]]): An iterable of tuples containing str content and style. Returns: Text: Returns self for chaining. @@ -990,7 +1042,7 @@ def append_tokens( offset = len(self) for content, style in tokens: append_text(content) - if style is not None: + if style: append_span(_Span(offset, offset + len(content), style)) offset += len(content) self._length = offset @@ -1088,7 +1140,6 @@ def divide(self, offsets: Iterable[int]) -> Lines: _Span = Span for span_start, span_end, style in self._spans: - lower_bound = 0 upper_bound = line_count start_line_no = (lower_bound + upper_bound) // 2 @@ -1158,8 +1209,7 @@ def wrap( Args: console (Console): Console instance. - width (int): Number of characters per line. - emoji (bool, optional): Also render emoji code. Defaults to True. + width (int): Number of cells available per line. justify (str, optional): Justify method: "default", "left", "center", "full", "right". Defaults to "default". overflow (str, optional): Overflow method: "crop", "fold", or "ellipsis". Defaults to None. tab_size (int, optional): Default tab size. Defaults to 8. diff --git a/src/pip/_vendor/rich/traceback.py b/src/pip/_vendor/rich/traceback.py index c4ffe1f99e6..f223ad44bfe 100644 --- a/src/pip/_vendor/rich/traceback.py +++ b/src/pip/_vendor/rich/traceback.py @@ -636,7 +636,6 @@ def render_locals(frame: Frame) -> Iterable[ConsoleRenderable]: excluded = False for frame_index, frame in enumerate(stack.frames): - if exclude_frames and frame_index in exclude_frames: excluded = True continue @@ -720,7 +719,6 @@ def render_locals(frame: Frame) -> Iterable[ConsoleRenderable]: if __name__ == "__main__": # pragma: no cover - from .console import Console console = Console() @@ -744,7 +742,6 @@ def foo(a: Any) -> None: bar(a) def error() -> None: - try: try: foo(0) diff --git a/src/pip/_vendor/rich/tree.py b/src/pip/_vendor/rich/tree.py index afe8da1a4a3..64bc75d2286 100644 --- a/src/pip/_vendor/rich/tree.py +++ b/src/pip/_vendor/rich/tree.py @@ -72,7 +72,6 @@ def add( def __rich_console__( self, console: "Console", options: "ConsoleOptions" ) -> "RenderResult": - stack: List[Iterator[Tuple[bool, Tree]]] = [] pop = stack.pop push = stack.append @@ -195,7 +194,6 @@ def __rich_measure__( if __name__ == "__main__": # pragma: no cover - from pip._vendor.rich.console import Group from pip._vendor.rich.markdown import Markdown from pip._vendor.rich.panel import Panel diff --git a/src/pip/_vendor/tenacity/__init__.py b/src/pip/_vendor/tenacity/__init__.py index 4f1603adeb6..c1b0310bdfb 100644 --- a/src/pip/_vendor/tenacity/__init__.py +++ b/src/pip/_vendor/tenacity/__init__.py @@ -501,7 +501,7 @@ def retry(func: WrappedFn) -> WrappedFn: @t.overload def retry( - sleep: t.Callable[[t.Union[int, float]], None] = sleep, + sleep: t.Callable[[t.Union[int, float]], t.Optional[t.Awaitable[None]]] = sleep, stop: "StopBaseT" = stop_never, wait: "WaitBaseT" = wait_none(), retry: "RetryBaseT" = retry_if_exception_type(), diff --git a/src/pip/_vendor/truststore/LICENSE b/src/pip/_vendor/truststore/LICENSE new file mode 100644 index 00000000000..7ec568c1136 --- /dev/null +++ b/src/pip/_vendor/truststore/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2022 Seth Michael Larson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/src/pip/_vendor/truststore/__init__.py b/src/pip/_vendor/truststore/__init__.py new file mode 100644 index 00000000000..59930f455b0 --- /dev/null +++ b/src/pip/_vendor/truststore/__init__.py @@ -0,0 +1,13 @@ +"""Verify certificates using native system trust stores""" + +import sys as _sys + +if _sys.version_info < (3, 10): + raise ImportError("truststore requires Python 3.10 or later") + +from ._api import SSLContext, extract_from_ssl, inject_into_ssl # noqa: E402 + +del _api, _sys # type: ignore[name-defined] # noqa: F821 + +__all__ = ["SSLContext", "inject_into_ssl", "extract_from_ssl"] +__version__ = "0.8.0" diff --git a/src/pip/_vendor/truststore/_api.py b/src/pip/_vendor/truststore/_api.py new file mode 100644 index 00000000000..829aff72672 --- /dev/null +++ b/src/pip/_vendor/truststore/_api.py @@ -0,0 +1,302 @@ +import os +import platform +import socket +import ssl +import typing + +import _ssl # type: ignore[import] + +from ._ssl_constants import ( + _original_SSLContext, + _original_super_SSLContext, + _truststore_SSLContext_dunder_class, + _truststore_SSLContext_super_class, +) + +if platform.system() == "Windows": + from ._windows import _configure_context, _verify_peercerts_impl +elif platform.system() == "Darwin": + from ._macos import _configure_context, _verify_peercerts_impl +else: + from ._openssl import _configure_context, _verify_peercerts_impl + +if typing.TYPE_CHECKING: + from pip._vendor.typing_extensions import Buffer + +# From typeshed/stdlib/ssl.pyi +_StrOrBytesPath: typing.TypeAlias = str | bytes | os.PathLike[str] | os.PathLike[bytes] +_PasswordType: typing.TypeAlias = str | bytes | typing.Callable[[], str | bytes] + + +def inject_into_ssl() -> None: + """Injects the :class:`truststore.SSLContext` into the ``ssl`` + module by replacing :class:`ssl.SSLContext`. + """ + setattr(ssl, "SSLContext", SSLContext) + # urllib3 holds on to its own reference of ssl.SSLContext + # so we need to replace that reference too. + try: + import pip._vendor.urllib3.util.ssl_ as urllib3_ssl + + setattr(urllib3_ssl, "SSLContext", SSLContext) + except ImportError: + pass + + +def extract_from_ssl() -> None: + """Restores the :class:`ssl.SSLContext` class to its original state""" + setattr(ssl, "SSLContext", _original_SSLContext) + try: + import pip._vendor.urllib3.util.ssl_ as urllib3_ssl + + urllib3_ssl.SSLContext = _original_SSLContext + except ImportError: + pass + + +class SSLContext(_truststore_SSLContext_super_class): # type: ignore[misc] + """SSLContext API that uses system certificates on all platforms""" + + @property # type: ignore[misc] + def __class__(self) -> type: + # Dirty hack to get around isinstance() checks + # for ssl.SSLContext instances in aiohttp/trustme + # when using non-CPython implementations. + return _truststore_SSLContext_dunder_class or SSLContext + + def __init__(self, protocol: int = None) -> None: # type: ignore[assignment] + self._ctx = _original_SSLContext(protocol) + + class TruststoreSSLObject(ssl.SSLObject): + # This object exists because wrap_bio() doesn't + # immediately do the handshake so we need to do + # certificate verifications after SSLObject.do_handshake() + + def do_handshake(self) -> None: + ret = super().do_handshake() + _verify_peercerts(self, server_hostname=self.server_hostname) + return ret + + self._ctx.sslobject_class = TruststoreSSLObject + + def wrap_socket( + self, + sock: socket.socket, + server_side: bool = False, + do_handshake_on_connect: bool = True, + suppress_ragged_eofs: bool = True, + server_hostname: str | None = None, + session: ssl.SSLSession | None = None, + ) -> ssl.SSLSocket: + # Use a context manager here because the + # inner SSLContext holds on to our state + # but also does the actual handshake. + with _configure_context(self._ctx): + ssl_sock = self._ctx.wrap_socket( + sock, + server_side=server_side, + server_hostname=server_hostname, + do_handshake_on_connect=do_handshake_on_connect, + suppress_ragged_eofs=suppress_ragged_eofs, + session=session, + ) + try: + _verify_peercerts(ssl_sock, server_hostname=server_hostname) + except Exception: + ssl_sock.close() + raise + return ssl_sock + + def wrap_bio( + self, + incoming: ssl.MemoryBIO, + outgoing: ssl.MemoryBIO, + server_side: bool = False, + server_hostname: str | None = None, + session: ssl.SSLSession | None = None, + ) -> ssl.SSLObject: + with _configure_context(self._ctx): + ssl_obj = self._ctx.wrap_bio( + incoming, + outgoing, + server_hostname=server_hostname, + server_side=server_side, + session=session, + ) + return ssl_obj + + def load_verify_locations( + self, + cafile: str | bytes | os.PathLike[str] | os.PathLike[bytes] | None = None, + capath: str | bytes | os.PathLike[str] | os.PathLike[bytes] | None = None, + cadata: typing.Union[str, "Buffer", None] = None, + ) -> None: + return self._ctx.load_verify_locations( + cafile=cafile, capath=capath, cadata=cadata + ) + + def load_cert_chain( + self, + certfile: _StrOrBytesPath, + keyfile: _StrOrBytesPath | None = None, + password: _PasswordType | None = None, + ) -> None: + return self._ctx.load_cert_chain( + certfile=certfile, keyfile=keyfile, password=password + ) + + def load_default_certs( + self, purpose: ssl.Purpose = ssl.Purpose.SERVER_AUTH + ) -> None: + return self._ctx.load_default_certs(purpose) + + def set_alpn_protocols(self, alpn_protocols: typing.Iterable[str]) -> None: + return self._ctx.set_alpn_protocols(alpn_protocols) + + def set_npn_protocols(self, npn_protocols: typing.Iterable[str]) -> None: + return self._ctx.set_npn_protocols(npn_protocols) + + def set_ciphers(self, __cipherlist: str) -> None: + return self._ctx.set_ciphers(__cipherlist) + + def get_ciphers(self) -> typing.Any: + return self._ctx.get_ciphers() + + def session_stats(self) -> dict[str, int]: + return self._ctx.session_stats() + + def cert_store_stats(self) -> dict[str, int]: + raise NotImplementedError() + + @typing.overload + def get_ca_certs( + self, binary_form: typing.Literal[False] = ... + ) -> list[typing.Any]: + ... + + @typing.overload + def get_ca_certs(self, binary_form: typing.Literal[True] = ...) -> list[bytes]: + ... + + @typing.overload + def get_ca_certs(self, binary_form: bool = ...) -> typing.Any: + ... + + def get_ca_certs(self, binary_form: bool = False) -> list[typing.Any] | list[bytes]: + raise NotImplementedError() + + @property + def check_hostname(self) -> bool: + return self._ctx.check_hostname + + @check_hostname.setter + def check_hostname(self, value: bool) -> None: + self._ctx.check_hostname = value + + @property + def hostname_checks_common_name(self) -> bool: + return self._ctx.hostname_checks_common_name + + @hostname_checks_common_name.setter + def hostname_checks_common_name(self, value: bool) -> None: + self._ctx.hostname_checks_common_name = value + + @property + def keylog_filename(self) -> str: + return self._ctx.keylog_filename + + @keylog_filename.setter + def keylog_filename(self, value: str) -> None: + self._ctx.keylog_filename = value + + @property + def maximum_version(self) -> ssl.TLSVersion: + return self._ctx.maximum_version + + @maximum_version.setter + def maximum_version(self, value: ssl.TLSVersion) -> None: + _original_super_SSLContext.maximum_version.__set__( # type: ignore[attr-defined] + self._ctx, value + ) + + @property + def minimum_version(self) -> ssl.TLSVersion: + return self._ctx.minimum_version + + @minimum_version.setter + def minimum_version(self, value: ssl.TLSVersion) -> None: + _original_super_SSLContext.minimum_version.__set__( # type: ignore[attr-defined] + self._ctx, value + ) + + @property + def options(self) -> ssl.Options: + return self._ctx.options + + @options.setter + def options(self, value: ssl.Options) -> None: + _original_super_SSLContext.options.__set__( # type: ignore[attr-defined] + self._ctx, value + ) + + @property + def post_handshake_auth(self) -> bool: + return self._ctx.post_handshake_auth + + @post_handshake_auth.setter + def post_handshake_auth(self, value: bool) -> None: + self._ctx.post_handshake_auth = value + + @property + def protocol(self) -> ssl._SSLMethod: + return self._ctx.protocol + + @property + def security_level(self) -> int: + return self._ctx.security_level + + @property + def verify_flags(self) -> ssl.VerifyFlags: + return self._ctx.verify_flags + + @verify_flags.setter + def verify_flags(self, value: ssl.VerifyFlags) -> None: + _original_super_SSLContext.verify_flags.__set__( # type: ignore[attr-defined] + self._ctx, value + ) + + @property + def verify_mode(self) -> ssl.VerifyMode: + return self._ctx.verify_mode + + @verify_mode.setter + def verify_mode(self, value: ssl.VerifyMode) -> None: + _original_super_SSLContext.verify_mode.__set__( # type: ignore[attr-defined] + self._ctx, value + ) + + +def _verify_peercerts( + sock_or_sslobj: ssl.SSLSocket | ssl.SSLObject, server_hostname: str | None +) -> None: + """ + Verifies the peer certificates from an SSLSocket or SSLObject + against the certificates in the OS trust store. + """ + sslobj: ssl.SSLObject = sock_or_sslobj # type: ignore[assignment] + try: + while not hasattr(sslobj, "get_unverified_chain"): + sslobj = sslobj._sslobj # type: ignore[attr-defined] + except AttributeError: + pass + + # SSLObject.get_unverified_chain() returns 'None' + # if the peer sends no certificates. This is common + # for the server-side scenario. + unverified_chain: typing.Sequence[_ssl.Certificate] = ( + sslobj.get_unverified_chain() or () # type: ignore[attr-defined] + ) + cert_bytes = [cert.public_bytes(_ssl.ENCODING_DER) for cert in unverified_chain] + _verify_peercerts_impl( + sock_or_sslobj.context, cert_bytes, server_hostname=server_hostname + ) diff --git a/src/pip/_vendor/truststore/_macos.py b/src/pip/_vendor/truststore/_macos.py new file mode 100644 index 00000000000..7dc440bf362 --- /dev/null +++ b/src/pip/_vendor/truststore/_macos.py @@ -0,0 +1,501 @@ +import contextlib +import ctypes +import platform +import ssl +import typing +from ctypes import ( + CDLL, + POINTER, + c_bool, + c_char_p, + c_int32, + c_long, + c_uint32, + c_ulong, + c_void_p, +) +from ctypes.util import find_library + +from ._ssl_constants import _set_ssl_context_verify_mode + +_mac_version = platform.mac_ver()[0] +_mac_version_info = tuple(map(int, _mac_version.split("."))) +if _mac_version_info < (10, 8): + raise ImportError( + f"Only OS X 10.8 and newer are supported, not {_mac_version_info[0]}.{_mac_version_info[1]}" + ) + + +def _load_cdll(name: str, macos10_16_path: str) -> CDLL: + """Loads a CDLL by name, falling back to known path on 10.16+""" + try: + # Big Sur is technically 11 but we use 10.16 due to the Big Sur + # beta being labeled as 10.16. + path: str | None + if _mac_version_info >= (10, 16): + path = macos10_16_path + else: + path = find_library(name) + if not path: + raise OSError # Caught and reraised as 'ImportError' + return CDLL(path, use_errno=True) + except OSError: + raise ImportError(f"The library {name} failed to load") from None + + +Security = _load_cdll( + "Security", "/System/Library/Frameworks/Security.framework/Security" +) +CoreFoundation = _load_cdll( + "CoreFoundation", + "/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation", +) + +Boolean = c_bool +CFIndex = c_long +CFStringEncoding = c_uint32 +CFData = c_void_p +CFString = c_void_p +CFArray = c_void_p +CFMutableArray = c_void_p +CFError = c_void_p +CFType = c_void_p +CFTypeID = c_ulong +CFTypeRef = POINTER(CFType) +CFAllocatorRef = c_void_p + +OSStatus = c_int32 + +CFErrorRef = POINTER(CFError) +CFDataRef = POINTER(CFData) +CFStringRef = POINTER(CFString) +CFArrayRef = POINTER(CFArray) +CFMutableArrayRef = POINTER(CFMutableArray) +CFArrayCallBacks = c_void_p +CFOptionFlags = c_uint32 + +SecCertificateRef = POINTER(c_void_p) +SecPolicyRef = POINTER(c_void_p) +SecTrustRef = POINTER(c_void_p) +SecTrustResultType = c_uint32 +SecTrustOptionFlags = c_uint32 + +try: + Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef] + Security.SecCertificateCreateWithData.restype = SecCertificateRef + + Security.SecCertificateCopyData.argtypes = [SecCertificateRef] + Security.SecCertificateCopyData.restype = CFDataRef + + Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p] + Security.SecCopyErrorMessageString.restype = CFStringRef + + Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef] + Security.SecTrustSetAnchorCertificates.restype = OSStatus + + Security.SecTrustSetAnchorCertificatesOnly.argtypes = [SecTrustRef, Boolean] + Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus + + Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)] + Security.SecTrustEvaluate.restype = OSStatus + + Security.SecPolicyCreateRevocation.argtypes = [CFOptionFlags] + Security.SecPolicyCreateRevocation.restype = SecPolicyRef + + Security.SecPolicyCreateSSL.argtypes = [Boolean, CFStringRef] + Security.SecPolicyCreateSSL.restype = SecPolicyRef + + Security.SecTrustCreateWithCertificates.argtypes = [ + CFTypeRef, + CFTypeRef, + POINTER(SecTrustRef), + ] + Security.SecTrustCreateWithCertificates.restype = OSStatus + + Security.SecTrustGetTrustResult.argtypes = [ + SecTrustRef, + POINTER(SecTrustResultType), + ] + Security.SecTrustGetTrustResult.restype = OSStatus + + Security.SecTrustRef = SecTrustRef # type: ignore[attr-defined] + Security.SecTrustResultType = SecTrustResultType # type: ignore[attr-defined] + Security.OSStatus = OSStatus # type: ignore[attr-defined] + + kSecRevocationUseAnyAvailableMethod = 3 + kSecRevocationRequirePositiveResponse = 8 + + CoreFoundation.CFRelease.argtypes = [CFTypeRef] + CoreFoundation.CFRelease.restype = None + + CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef] + CoreFoundation.CFGetTypeID.restype = CFTypeID + + CoreFoundation.CFStringCreateWithCString.argtypes = [ + CFAllocatorRef, + c_char_p, + CFStringEncoding, + ] + CoreFoundation.CFStringCreateWithCString.restype = CFStringRef + + CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding] + CoreFoundation.CFStringGetCStringPtr.restype = c_char_p + + CoreFoundation.CFStringGetCString.argtypes = [ + CFStringRef, + c_char_p, + CFIndex, + CFStringEncoding, + ] + CoreFoundation.CFStringGetCString.restype = c_bool + + CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex] + CoreFoundation.CFDataCreate.restype = CFDataRef + + CoreFoundation.CFDataGetLength.argtypes = [CFDataRef] + CoreFoundation.CFDataGetLength.restype = CFIndex + + CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef] + CoreFoundation.CFDataGetBytePtr.restype = c_void_p + + CoreFoundation.CFArrayCreate.argtypes = [ + CFAllocatorRef, + POINTER(CFTypeRef), + CFIndex, + CFArrayCallBacks, + ] + CoreFoundation.CFArrayCreate.restype = CFArrayRef + + CoreFoundation.CFArrayCreateMutable.argtypes = [ + CFAllocatorRef, + CFIndex, + CFArrayCallBacks, + ] + CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef + + CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p] + CoreFoundation.CFArrayAppendValue.restype = None + + CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef] + CoreFoundation.CFArrayGetCount.restype = CFIndex + + CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex] + CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p + + CoreFoundation.CFErrorGetCode.argtypes = [CFErrorRef] + CoreFoundation.CFErrorGetCode.restype = CFIndex + + CoreFoundation.CFErrorCopyDescription.argtypes = [CFErrorRef] + CoreFoundation.CFErrorCopyDescription.restype = CFStringRef + + CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll( # type: ignore[attr-defined] + CoreFoundation, "kCFAllocatorDefault" + ) + CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll( # type: ignore[attr-defined] + CoreFoundation, "kCFTypeArrayCallBacks" + ) + + CoreFoundation.CFTypeRef = CFTypeRef # type: ignore[attr-defined] + CoreFoundation.CFArrayRef = CFArrayRef # type: ignore[attr-defined] + CoreFoundation.CFStringRef = CFStringRef # type: ignore[attr-defined] + CoreFoundation.CFErrorRef = CFErrorRef # type: ignore[attr-defined] + +except AttributeError: + raise ImportError("Error initializing ctypes") from None + + +def _handle_osstatus(result: OSStatus, _: typing.Any, args: typing.Any) -> typing.Any: + """ + Raises an error if the OSStatus value is non-zero. + """ + if int(result) == 0: + return args + + # Returns a CFString which we need to transform + # into a UTF-8 Python string. + error_message_cfstring = None + try: + error_message_cfstring = Security.SecCopyErrorMessageString(result, None) + + # First step is convert the CFString into a C string pointer. + # We try the fast no-copy way first. + error_message_cfstring_c_void_p = ctypes.cast( + error_message_cfstring, ctypes.POINTER(ctypes.c_void_p) + ) + message = CoreFoundation.CFStringGetCStringPtr( + error_message_cfstring_c_void_p, CFConst.kCFStringEncodingUTF8 + ) + + # Quoting the Apple dev docs: + # + # "A pointer to a C string or NULL if the internal + # storage of theString does not allow this to be + # returned efficiently." + # + # So we need to get our hands dirty. + if message is None: + buffer = ctypes.create_string_buffer(1024) + result = CoreFoundation.CFStringGetCString( + error_message_cfstring_c_void_p, + buffer, + 1024, + CFConst.kCFStringEncodingUTF8, + ) + if not result: + raise OSError("Error copying C string from CFStringRef") + message = buffer.value + + finally: + if error_message_cfstring is not None: + CoreFoundation.CFRelease(error_message_cfstring) + + # If no message can be found for this status we come + # up with a generic one that forwards the status code. + if message is None or message == "": + message = f"SecureTransport operation returned a non-zero OSStatus: {result}" + + raise ssl.SSLError(message) + + +Security.SecTrustCreateWithCertificates.errcheck = _handle_osstatus # type: ignore[assignment] +Security.SecTrustSetAnchorCertificates.errcheck = _handle_osstatus # type: ignore[assignment] +Security.SecTrustGetTrustResult.errcheck = _handle_osstatus # type: ignore[assignment] + + +class CFConst: + """CoreFoundation constants""" + + kCFStringEncodingUTF8 = CFStringEncoding(0x08000100) + + errSecIncompleteCertRevocationCheck = -67635 + errSecHostNameMismatch = -67602 + errSecCertificateExpired = -67818 + errSecNotTrusted = -67843 + + +def _bytes_to_cf_data_ref(value: bytes) -> CFDataRef: # type: ignore[valid-type] + return CoreFoundation.CFDataCreate( # type: ignore[no-any-return] + CoreFoundation.kCFAllocatorDefault, value, len(value) + ) + + +def _bytes_to_cf_string(value: bytes) -> CFString: + """ + Given a Python binary data, create a CFString. + The string must be CFReleased by the caller. + """ + c_str = ctypes.c_char_p(value) + cf_str = CoreFoundation.CFStringCreateWithCString( + CoreFoundation.kCFAllocatorDefault, + c_str, + CFConst.kCFStringEncodingUTF8, + ) + return cf_str # type: ignore[no-any-return] + + +def _cf_string_ref_to_str(cf_string_ref: CFStringRef) -> str | None: # type: ignore[valid-type] + """ + Creates a Unicode string from a CFString object. Used entirely for error + reporting. + Yes, it annoys me quite a lot that this function is this complex. + """ + + string = CoreFoundation.CFStringGetCStringPtr( + cf_string_ref, CFConst.kCFStringEncodingUTF8 + ) + if string is None: + buffer = ctypes.create_string_buffer(1024) + result = CoreFoundation.CFStringGetCString( + cf_string_ref, buffer, 1024, CFConst.kCFStringEncodingUTF8 + ) + if not result: + raise OSError("Error copying C string from CFStringRef") + string = buffer.value + if string is not None: + string = string.decode("utf-8") + return string # type: ignore[no-any-return] + + +def _der_certs_to_cf_cert_array(certs: list[bytes]) -> CFMutableArrayRef: # type: ignore[valid-type] + """Builds a CFArray of SecCertificateRefs from a list of DER-encoded certificates. + Responsibility of the caller to call CoreFoundation.CFRelease on the CFArray. + """ + cf_array = CoreFoundation.CFArrayCreateMutable( + CoreFoundation.kCFAllocatorDefault, + 0, + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), + ) + if not cf_array: + raise MemoryError("Unable to allocate memory!") + + for cert_data in certs: + cf_data = None + sec_cert_ref = None + try: + cf_data = _bytes_to_cf_data_ref(cert_data) + sec_cert_ref = Security.SecCertificateCreateWithData( + CoreFoundation.kCFAllocatorDefault, cf_data + ) + CoreFoundation.CFArrayAppendValue(cf_array, sec_cert_ref) + finally: + if cf_data: + CoreFoundation.CFRelease(cf_data) + if sec_cert_ref: + CoreFoundation.CFRelease(sec_cert_ref) + + return cf_array # type: ignore[no-any-return] + + +@contextlib.contextmanager +def _configure_context(ctx: ssl.SSLContext) -> typing.Iterator[None]: + check_hostname = ctx.check_hostname + verify_mode = ctx.verify_mode + ctx.check_hostname = False + _set_ssl_context_verify_mode(ctx, ssl.CERT_NONE) + try: + yield + finally: + ctx.check_hostname = check_hostname + _set_ssl_context_verify_mode(ctx, verify_mode) + + +def _verify_peercerts_impl( + ssl_context: ssl.SSLContext, + cert_chain: list[bytes], + server_hostname: str | None = None, +) -> None: + certs = None + policies = None + trust = None + cf_error = None + try: + if server_hostname is not None: + cf_str_hostname = None + try: + cf_str_hostname = _bytes_to_cf_string(server_hostname.encode("ascii")) + ssl_policy = Security.SecPolicyCreateSSL(True, cf_str_hostname) + finally: + if cf_str_hostname: + CoreFoundation.CFRelease(cf_str_hostname) + else: + ssl_policy = Security.SecPolicyCreateSSL(True, None) + + policies = ssl_policy + if ssl_context.verify_flags & ssl.VERIFY_CRL_CHECK_CHAIN: + # Add explicit policy requiring positive revocation checks + policies = CoreFoundation.CFArrayCreateMutable( + CoreFoundation.kCFAllocatorDefault, + 0, + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), + ) + CoreFoundation.CFArrayAppendValue(policies, ssl_policy) + CoreFoundation.CFRelease(ssl_policy) + revocation_policy = Security.SecPolicyCreateRevocation( + kSecRevocationUseAnyAvailableMethod + | kSecRevocationRequirePositiveResponse + ) + CoreFoundation.CFArrayAppendValue(policies, revocation_policy) + CoreFoundation.CFRelease(revocation_policy) + elif ssl_context.verify_flags & ssl.VERIFY_CRL_CHECK_LEAF: + raise NotImplementedError("VERIFY_CRL_CHECK_LEAF not implemented for macOS") + + certs = None + try: + certs = _der_certs_to_cf_cert_array(cert_chain) + + # Now that we have certificates loaded and a SecPolicy + # we can finally create a SecTrust object! + trust = Security.SecTrustRef() + Security.SecTrustCreateWithCertificates( + certs, policies, ctypes.byref(trust) + ) + + finally: + # The certs are now being held by SecTrust so we can + # release our handles for the array. + if certs: + CoreFoundation.CFRelease(certs) + + # If there are additional trust anchors to load we need to transform + # the list of DER-encoded certificates into a CFArray. Otherwise + # pass 'None' to signal that we only want system / fetched certificates. + ctx_ca_certs_der: list[bytes] | None = ssl_context.get_ca_certs( + binary_form=True + ) + if ctx_ca_certs_der: + ctx_ca_certs = None + try: + ctx_ca_certs = _der_certs_to_cf_cert_array(cert_chain) + Security.SecTrustSetAnchorCertificates(trust, ctx_ca_certs) + finally: + if ctx_ca_certs: + CoreFoundation.CFRelease(ctx_ca_certs) + else: + Security.SecTrustSetAnchorCertificates(trust, None) + + cf_error = CoreFoundation.CFErrorRef() + sec_trust_eval_result = Security.SecTrustEvaluateWithError( + trust, ctypes.byref(cf_error) + ) + # sec_trust_eval_result is a bool (0 or 1) + # where 1 means that the certs are trusted. + if sec_trust_eval_result == 1: + is_trusted = True + elif sec_trust_eval_result == 0: + is_trusted = False + else: + raise ssl.SSLError( + f"Unknown result from Security.SecTrustEvaluateWithError: {sec_trust_eval_result!r}" + ) + + cf_error_code = 0 + if not is_trusted: + cf_error_code = CoreFoundation.CFErrorGetCode(cf_error) + + # If the error is a known failure that we're + # explicitly okay with from SSLContext configuration + # we can set is_trusted accordingly. + if ssl_context.verify_mode != ssl.CERT_REQUIRED and ( + cf_error_code == CFConst.errSecNotTrusted + or cf_error_code == CFConst.errSecCertificateExpired + ): + is_trusted = True + elif ( + not ssl_context.check_hostname + and cf_error_code == CFConst.errSecHostNameMismatch + ): + is_trusted = True + + # If we're still not trusted then we start to + # construct and raise the SSLCertVerificationError. + if not is_trusted: + cf_error_string_ref = None + try: + cf_error_string_ref = CoreFoundation.CFErrorCopyDescription(cf_error) + + # Can this ever return 'None' if there's a CFError? + cf_error_message = ( + _cf_string_ref_to_str(cf_error_string_ref) + or "Certificate verification failed" + ) + + # TODO: Not sure if we need the SecTrustResultType for anything? + # We only care whether or not it's a success or failure for now. + sec_trust_result_type = Security.SecTrustResultType() + Security.SecTrustGetTrustResult( + trust, ctypes.byref(sec_trust_result_type) + ) + + err = ssl.SSLCertVerificationError(cf_error_message) + err.verify_message = cf_error_message + err.verify_code = cf_error_code + raise err + finally: + if cf_error_string_ref: + CoreFoundation.CFRelease(cf_error_string_ref) + + finally: + if policies: + CoreFoundation.CFRelease(policies) + if trust: + CoreFoundation.CFRelease(trust) diff --git a/src/pip/_vendor/truststore/_openssl.py b/src/pip/_vendor/truststore/_openssl.py new file mode 100644 index 00000000000..9951cf75c40 --- /dev/null +++ b/src/pip/_vendor/truststore/_openssl.py @@ -0,0 +1,66 @@ +import contextlib +import os +import re +import ssl +import typing + +# candidates based on https://github.com/tiran/certifi-system-store by Christian Heimes +_CA_FILE_CANDIDATES = [ + # Alpine, Arch, Fedora 34+, OpenWRT, RHEL 9+, BSD + "/etc/ssl/cert.pem", + # Fedora <= 34, RHEL <= 9, CentOS <= 9 + "/etc/pki/tls/cert.pem", + # Debian, Ubuntu (requires ca-certificates) + "/etc/ssl/certs/ca-certificates.crt", + # SUSE + "/etc/ssl/ca-bundle.pem", +] + +_HASHED_CERT_FILENAME_RE = re.compile(r"^[0-9a-fA-F]{8}\.[0-9]$") + + +@contextlib.contextmanager +def _configure_context(ctx: ssl.SSLContext) -> typing.Iterator[None]: + # First, check whether the default locations from OpenSSL + # seem like they will give us a usable set of CA certs. + # ssl.get_default_verify_paths already takes care of: + # - getting cafile from either the SSL_CERT_FILE env var + # or the path configured when OpenSSL was compiled, + # and verifying that that path exists + # - getting capath from either the SSL_CERT_DIR env var + # or the path configured when OpenSSL was compiled, + # and verifying that that path exists + # In addition we'll check whether capath appears to contain certs. + defaults = ssl.get_default_verify_paths() + if defaults.cafile or (defaults.capath and _capath_contains_certs(defaults.capath)): + ctx.set_default_verify_paths() + else: + # cafile from OpenSSL doesn't exist + # and capath from OpenSSL doesn't contain certs. + # Let's search other common locations instead. + for cafile in _CA_FILE_CANDIDATES: + if os.path.isfile(cafile): + ctx.load_verify_locations(cafile=cafile) + break + + yield + + +def _capath_contains_certs(capath: str) -> bool: + """Check whether capath exists and contains certs in the expected format.""" + if not os.path.isdir(capath): + return False + for name in os.listdir(capath): + if _HASHED_CERT_FILENAME_RE.match(name): + return True + return False + + +def _verify_peercerts_impl( + ssl_context: ssl.SSLContext, + cert_chain: list[bytes], + server_hostname: str | None = None, +) -> None: + # This is a no-op because we've enabled SSLContext's built-in + # verification via verify_mode=CERT_REQUIRED, and don't need to repeat it. + pass diff --git a/src/pip/_vendor/truststore/_ssl_constants.py b/src/pip/_vendor/truststore/_ssl_constants.py new file mode 100644 index 00000000000..b1ee7a3cb13 --- /dev/null +++ b/src/pip/_vendor/truststore/_ssl_constants.py @@ -0,0 +1,31 @@ +import ssl +import sys +import typing + +# Hold on to the original class so we can create it consistently +# even if we inject our own SSLContext into the ssl module. +_original_SSLContext = ssl.SSLContext +_original_super_SSLContext = super(_original_SSLContext, _original_SSLContext) + +# CPython is known to be good, but non-CPython implementations +# may implement SSLContext differently so to be safe we don't +# subclass the SSLContext. + +# This is returned by truststore.SSLContext.__class__() +_truststore_SSLContext_dunder_class: typing.Optional[type] + +# This value is the superclass of truststore.SSLContext. +_truststore_SSLContext_super_class: type + +if sys.implementation.name == "cpython": + _truststore_SSLContext_super_class = _original_SSLContext + _truststore_SSLContext_dunder_class = None +else: + _truststore_SSLContext_super_class = object + _truststore_SSLContext_dunder_class = _original_SSLContext + + +def _set_ssl_context_verify_mode( + ssl_context: ssl.SSLContext, verify_mode: ssl.VerifyMode +) -> None: + _original_super_SSLContext.verify_mode.__set__(ssl_context, verify_mode) # type: ignore[attr-defined] diff --git a/src/pip/_vendor/truststore/_windows.py b/src/pip/_vendor/truststore/_windows.py new file mode 100644 index 00000000000..3de4960a1b0 --- /dev/null +++ b/src/pip/_vendor/truststore/_windows.py @@ -0,0 +1,554 @@ +import contextlib +import ssl +import typing +from ctypes import WinDLL # type: ignore +from ctypes import WinError # type: ignore +from ctypes import ( + POINTER, + Structure, + c_char_p, + c_ulong, + c_void_p, + c_wchar_p, + cast, + create_unicode_buffer, + pointer, + sizeof, +) +from ctypes.wintypes import ( + BOOL, + DWORD, + HANDLE, + LONG, + LPCSTR, + LPCVOID, + LPCWSTR, + LPFILETIME, + LPSTR, + LPWSTR, +) +from typing import TYPE_CHECKING, Any + +from ._ssl_constants import _set_ssl_context_verify_mode + +HCERTCHAINENGINE = HANDLE +HCERTSTORE = HANDLE +HCRYPTPROV_LEGACY = HANDLE + + +class CERT_CONTEXT(Structure): + _fields_ = ( + ("dwCertEncodingType", DWORD), + ("pbCertEncoded", c_void_p), + ("cbCertEncoded", DWORD), + ("pCertInfo", c_void_p), + ("hCertStore", HCERTSTORE), + ) + + +PCERT_CONTEXT = POINTER(CERT_CONTEXT) +PCCERT_CONTEXT = POINTER(PCERT_CONTEXT) + + +class CERT_ENHKEY_USAGE(Structure): + _fields_ = ( + ("cUsageIdentifier", DWORD), + ("rgpszUsageIdentifier", POINTER(LPSTR)), + ) + + +PCERT_ENHKEY_USAGE = POINTER(CERT_ENHKEY_USAGE) + + +class CERT_USAGE_MATCH(Structure): + _fields_ = ( + ("dwType", DWORD), + ("Usage", CERT_ENHKEY_USAGE), + ) + + +class CERT_CHAIN_PARA(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("RequestedUsage", CERT_USAGE_MATCH), + ("RequestedIssuancePolicy", CERT_USAGE_MATCH), + ("dwUrlRetrievalTimeout", DWORD), + ("fCheckRevocationFreshnessTime", BOOL), + ("dwRevocationFreshnessTime", DWORD), + ("pftCacheResync", LPFILETIME), + ("pStrongSignPara", c_void_p), + ("dwStrongSignFlags", DWORD), + ) + + +if TYPE_CHECKING: + PCERT_CHAIN_PARA = pointer[CERT_CHAIN_PARA] # type: ignore[misc] +else: + PCERT_CHAIN_PARA = POINTER(CERT_CHAIN_PARA) + + +class CERT_TRUST_STATUS(Structure): + _fields_ = ( + ("dwErrorStatus", DWORD), + ("dwInfoStatus", DWORD), + ) + + +class CERT_CHAIN_ELEMENT(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("pCertContext", PCERT_CONTEXT), + ("TrustStatus", CERT_TRUST_STATUS), + ("pRevocationInfo", c_void_p), + ("pIssuanceUsage", PCERT_ENHKEY_USAGE), + ("pApplicationUsage", PCERT_ENHKEY_USAGE), + ("pwszExtendedErrorInfo", LPCWSTR), + ) + + +PCERT_CHAIN_ELEMENT = POINTER(CERT_CHAIN_ELEMENT) + + +class CERT_SIMPLE_CHAIN(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("TrustStatus", CERT_TRUST_STATUS), + ("cElement", DWORD), + ("rgpElement", POINTER(PCERT_CHAIN_ELEMENT)), + ("pTrustListInfo", c_void_p), + ("fHasRevocationFreshnessTime", BOOL), + ("dwRevocationFreshnessTime", DWORD), + ) + + +PCERT_SIMPLE_CHAIN = POINTER(CERT_SIMPLE_CHAIN) + + +class CERT_CHAIN_CONTEXT(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("TrustStatus", CERT_TRUST_STATUS), + ("cChain", DWORD), + ("rgpChain", POINTER(PCERT_SIMPLE_CHAIN)), + ("cLowerQualityChainContext", DWORD), + ("rgpLowerQualityChainContext", c_void_p), + ("fHasRevocationFreshnessTime", BOOL), + ("dwRevocationFreshnessTime", DWORD), + ) + + +PCERT_CHAIN_CONTEXT = POINTER(CERT_CHAIN_CONTEXT) +PCCERT_CHAIN_CONTEXT = POINTER(PCERT_CHAIN_CONTEXT) + + +class SSL_EXTRA_CERT_CHAIN_POLICY_PARA(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("dwAuthType", DWORD), + ("fdwChecks", DWORD), + ("pwszServerName", LPCWSTR), + ) + + +class CERT_CHAIN_POLICY_PARA(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("dwFlags", DWORD), + ("pvExtraPolicyPara", c_void_p), + ) + + +PCERT_CHAIN_POLICY_PARA = POINTER(CERT_CHAIN_POLICY_PARA) + + +class CERT_CHAIN_POLICY_STATUS(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("dwError", DWORD), + ("lChainIndex", LONG), + ("lElementIndex", LONG), + ("pvExtraPolicyStatus", c_void_p), + ) + + +PCERT_CHAIN_POLICY_STATUS = POINTER(CERT_CHAIN_POLICY_STATUS) + + +class CERT_CHAIN_ENGINE_CONFIG(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("hRestrictedRoot", HCERTSTORE), + ("hRestrictedTrust", HCERTSTORE), + ("hRestrictedOther", HCERTSTORE), + ("cAdditionalStore", DWORD), + ("rghAdditionalStore", c_void_p), + ("dwFlags", DWORD), + ("dwUrlRetrievalTimeout", DWORD), + ("MaximumCachedCertificates", DWORD), + ("CycleDetectionModulus", DWORD), + ("hExclusiveRoot", HCERTSTORE), + ("hExclusiveTrustedPeople", HCERTSTORE), + ("dwExclusiveFlags", DWORD), + ) + + +PCERT_CHAIN_ENGINE_CONFIG = POINTER(CERT_CHAIN_ENGINE_CONFIG) +PHCERTCHAINENGINE = POINTER(HCERTCHAINENGINE) + +X509_ASN_ENCODING = 0x00000001 +PKCS_7_ASN_ENCODING = 0x00010000 +CERT_STORE_PROV_MEMORY = b"Memory" +CERT_STORE_ADD_USE_EXISTING = 2 +USAGE_MATCH_TYPE_OR = 1 +OID_PKIX_KP_SERVER_AUTH = c_char_p(b"1.3.6.1.5.5.7.3.1") +CERT_CHAIN_REVOCATION_CHECK_END_CERT = 0x10000000 +CERT_CHAIN_REVOCATION_CHECK_CHAIN = 0x20000000 +CERT_CHAIN_POLICY_IGNORE_ALL_NOT_TIME_VALID_FLAGS = 0x00000007 +CERT_CHAIN_POLICY_IGNORE_INVALID_BASIC_CONSTRAINTS_FLAG = 0x00000008 +CERT_CHAIN_POLICY_ALLOW_UNKNOWN_CA_FLAG = 0x00000010 +CERT_CHAIN_POLICY_IGNORE_INVALID_NAME_FLAG = 0x00000040 +CERT_CHAIN_POLICY_IGNORE_WRONG_USAGE_FLAG = 0x00000020 +CERT_CHAIN_POLICY_IGNORE_INVALID_POLICY_FLAG = 0x00000080 +CERT_CHAIN_POLICY_IGNORE_ALL_REV_UNKNOWN_FLAGS = 0x00000F00 +CERT_CHAIN_POLICY_ALLOW_TESTROOT_FLAG = 0x00008000 +CERT_CHAIN_POLICY_TRUST_TESTROOT_FLAG = 0x00004000 +AUTHTYPE_SERVER = 2 +CERT_CHAIN_POLICY_SSL = 4 +FORMAT_MESSAGE_FROM_SYSTEM = 0x00001000 +FORMAT_MESSAGE_IGNORE_INSERTS = 0x00000200 + +# Flags to set for SSLContext.verify_mode=CERT_NONE +CERT_CHAIN_POLICY_VERIFY_MODE_NONE_FLAGS = ( + CERT_CHAIN_POLICY_IGNORE_ALL_NOT_TIME_VALID_FLAGS + | CERT_CHAIN_POLICY_IGNORE_INVALID_BASIC_CONSTRAINTS_FLAG + | CERT_CHAIN_POLICY_ALLOW_UNKNOWN_CA_FLAG + | CERT_CHAIN_POLICY_IGNORE_INVALID_NAME_FLAG + | CERT_CHAIN_POLICY_IGNORE_WRONG_USAGE_FLAG + | CERT_CHAIN_POLICY_IGNORE_INVALID_POLICY_FLAG + | CERT_CHAIN_POLICY_IGNORE_ALL_REV_UNKNOWN_FLAGS + | CERT_CHAIN_POLICY_ALLOW_TESTROOT_FLAG + | CERT_CHAIN_POLICY_TRUST_TESTROOT_FLAG +) + +wincrypt = WinDLL("crypt32.dll") +kernel32 = WinDLL("kernel32.dll") + + +def _handle_win_error(result: bool, _: Any, args: Any) -> Any: + if not result: + # Note, actually raises OSError after calling GetLastError and FormatMessage + raise WinError() + return args + + +CertCreateCertificateChainEngine = wincrypt.CertCreateCertificateChainEngine +CertCreateCertificateChainEngine.argtypes = ( + PCERT_CHAIN_ENGINE_CONFIG, + PHCERTCHAINENGINE, +) +CertCreateCertificateChainEngine.errcheck = _handle_win_error + +CertOpenStore = wincrypt.CertOpenStore +CertOpenStore.argtypes = (LPCSTR, DWORD, HCRYPTPROV_LEGACY, DWORD, c_void_p) +CertOpenStore.restype = HCERTSTORE +CertOpenStore.errcheck = _handle_win_error + +CertAddEncodedCertificateToStore = wincrypt.CertAddEncodedCertificateToStore +CertAddEncodedCertificateToStore.argtypes = ( + HCERTSTORE, + DWORD, + c_char_p, + DWORD, + DWORD, + PCCERT_CONTEXT, +) +CertAddEncodedCertificateToStore.restype = BOOL + +CertCreateCertificateContext = wincrypt.CertCreateCertificateContext +CertCreateCertificateContext.argtypes = (DWORD, c_char_p, DWORD) +CertCreateCertificateContext.restype = PCERT_CONTEXT +CertCreateCertificateContext.errcheck = _handle_win_error + +CertGetCertificateChain = wincrypt.CertGetCertificateChain +CertGetCertificateChain.argtypes = ( + HCERTCHAINENGINE, + PCERT_CONTEXT, + LPFILETIME, + HCERTSTORE, + PCERT_CHAIN_PARA, + DWORD, + c_void_p, + PCCERT_CHAIN_CONTEXT, +) +CertGetCertificateChain.restype = BOOL +CertGetCertificateChain.errcheck = _handle_win_error + +CertVerifyCertificateChainPolicy = wincrypt.CertVerifyCertificateChainPolicy +CertVerifyCertificateChainPolicy.argtypes = ( + c_ulong, + PCERT_CHAIN_CONTEXT, + PCERT_CHAIN_POLICY_PARA, + PCERT_CHAIN_POLICY_STATUS, +) +CertVerifyCertificateChainPolicy.restype = BOOL + +CertCloseStore = wincrypt.CertCloseStore +CertCloseStore.argtypes = (HCERTSTORE, DWORD) +CertCloseStore.restype = BOOL +CertCloseStore.errcheck = _handle_win_error + +CertFreeCertificateChain = wincrypt.CertFreeCertificateChain +CertFreeCertificateChain.argtypes = (PCERT_CHAIN_CONTEXT,) + +CertFreeCertificateContext = wincrypt.CertFreeCertificateContext +CertFreeCertificateContext.argtypes = (PCERT_CONTEXT,) + +CertFreeCertificateChainEngine = wincrypt.CertFreeCertificateChainEngine +CertFreeCertificateChainEngine.argtypes = (HCERTCHAINENGINE,) + +FormatMessageW = kernel32.FormatMessageW +FormatMessageW.argtypes = ( + DWORD, + LPCVOID, + DWORD, + DWORD, + LPWSTR, + DWORD, + c_void_p, +) +FormatMessageW.restype = DWORD + + +def _verify_peercerts_impl( + ssl_context: ssl.SSLContext, + cert_chain: list[bytes], + server_hostname: str | None = None, +) -> None: + """Verify the cert_chain from the server using Windows APIs.""" + pCertContext = None + hIntermediateCertStore = CertOpenStore(CERT_STORE_PROV_MEMORY, 0, None, 0, None) + try: + # Add intermediate certs to an in-memory cert store + for cert_bytes in cert_chain[1:]: + CertAddEncodedCertificateToStore( + hIntermediateCertStore, + X509_ASN_ENCODING | PKCS_7_ASN_ENCODING, + cert_bytes, + len(cert_bytes), + CERT_STORE_ADD_USE_EXISTING, + None, + ) + + # Cert context for leaf cert + leaf_cert = cert_chain[0] + pCertContext = CertCreateCertificateContext( + X509_ASN_ENCODING | PKCS_7_ASN_ENCODING, leaf_cert, len(leaf_cert) + ) + + # Chain params to match certs for serverAuth extended usage + cert_enhkey_usage = CERT_ENHKEY_USAGE() + cert_enhkey_usage.cUsageIdentifier = 1 + cert_enhkey_usage.rgpszUsageIdentifier = (c_char_p * 1)(OID_PKIX_KP_SERVER_AUTH) + cert_usage_match = CERT_USAGE_MATCH() + cert_usage_match.Usage = cert_enhkey_usage + chain_params = CERT_CHAIN_PARA() + chain_params.RequestedUsage = cert_usage_match + chain_params.cbSize = sizeof(chain_params) + pChainPara = pointer(chain_params) + + if ssl_context.verify_flags & ssl.VERIFY_CRL_CHECK_CHAIN: + chain_flags = CERT_CHAIN_REVOCATION_CHECK_CHAIN + elif ssl_context.verify_flags & ssl.VERIFY_CRL_CHECK_LEAF: + chain_flags = CERT_CHAIN_REVOCATION_CHECK_END_CERT + else: + chain_flags = 0 + + try: + # First attempt to verify using the default Windows system trust roots + # (default chain engine). + _get_and_verify_cert_chain( + ssl_context, + None, + hIntermediateCertStore, + pCertContext, + pChainPara, + server_hostname, + chain_flags=chain_flags, + ) + except ssl.SSLCertVerificationError: + # If that fails but custom CA certs have been added + # to the SSLContext using load_verify_locations, + # try verifying using a custom chain engine + # that trusts the custom CA certs. + custom_ca_certs: list[bytes] | None = ssl_context.get_ca_certs( + binary_form=True + ) + if custom_ca_certs: + _verify_using_custom_ca_certs( + ssl_context, + custom_ca_certs, + hIntermediateCertStore, + pCertContext, + pChainPara, + server_hostname, + chain_flags=chain_flags, + ) + else: + raise + finally: + CertCloseStore(hIntermediateCertStore, 0) + if pCertContext: + CertFreeCertificateContext(pCertContext) + + +def _get_and_verify_cert_chain( + ssl_context: ssl.SSLContext, + hChainEngine: HCERTCHAINENGINE | None, + hIntermediateCertStore: HCERTSTORE, + pPeerCertContext: c_void_p, + pChainPara: PCERT_CHAIN_PARA, # type: ignore[valid-type] + server_hostname: str | None, + chain_flags: int, +) -> None: + ppChainContext = None + try: + # Get cert chain + ppChainContext = pointer(PCERT_CHAIN_CONTEXT()) + CertGetCertificateChain( + hChainEngine, # chain engine + pPeerCertContext, # leaf cert context + None, # current system time + hIntermediateCertStore, # additional in-memory cert store + pChainPara, # chain-building parameters + chain_flags, + None, # reserved + ppChainContext, # the resulting chain context + ) + pChainContext = ppChainContext.contents + + # Verify cert chain + ssl_extra_cert_chain_policy_para = SSL_EXTRA_CERT_CHAIN_POLICY_PARA() + ssl_extra_cert_chain_policy_para.cbSize = sizeof( + ssl_extra_cert_chain_policy_para + ) + ssl_extra_cert_chain_policy_para.dwAuthType = AUTHTYPE_SERVER + ssl_extra_cert_chain_policy_para.fdwChecks = 0 + if server_hostname: + ssl_extra_cert_chain_policy_para.pwszServerName = c_wchar_p(server_hostname) + + chain_policy = CERT_CHAIN_POLICY_PARA() + chain_policy.pvExtraPolicyPara = cast( + pointer(ssl_extra_cert_chain_policy_para), c_void_p + ) + if ssl_context.verify_mode == ssl.CERT_NONE: + chain_policy.dwFlags |= CERT_CHAIN_POLICY_VERIFY_MODE_NONE_FLAGS + if not ssl_context.check_hostname: + chain_policy.dwFlags |= CERT_CHAIN_POLICY_IGNORE_INVALID_NAME_FLAG + chain_policy.cbSize = sizeof(chain_policy) + + pPolicyPara = pointer(chain_policy) + policy_status = CERT_CHAIN_POLICY_STATUS() + policy_status.cbSize = sizeof(policy_status) + pPolicyStatus = pointer(policy_status) + CertVerifyCertificateChainPolicy( + CERT_CHAIN_POLICY_SSL, + pChainContext, + pPolicyPara, + pPolicyStatus, + ) + + # Check status + error_code = policy_status.dwError + if error_code: + # Try getting a human readable message for an error code. + error_message_buf = create_unicode_buffer(1024) + error_message_chars = FormatMessageW( + FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, + None, + error_code, + 0, + error_message_buf, + sizeof(error_message_buf), + None, + ) + + # See if we received a message for the error, + # otherwise we use a generic error with the + # error code and hope that it's search-able. + if error_message_chars <= 0: + error_message = f"Certificate chain policy error {error_code:#x} [{policy_status.lElementIndex}]" + else: + error_message = error_message_buf.value.strip() + + err = ssl.SSLCertVerificationError(error_message) + err.verify_message = error_message + err.verify_code = error_code + raise err from None + finally: + if ppChainContext: + CertFreeCertificateChain(ppChainContext.contents) + + +def _verify_using_custom_ca_certs( + ssl_context: ssl.SSLContext, + custom_ca_certs: list[bytes], + hIntermediateCertStore: HCERTSTORE, + pPeerCertContext: c_void_p, + pChainPara: PCERT_CHAIN_PARA, # type: ignore[valid-type] + server_hostname: str | None, + chain_flags: int, +) -> None: + hChainEngine = None + hRootCertStore = CertOpenStore(CERT_STORE_PROV_MEMORY, 0, None, 0, None) + try: + # Add custom CA certs to an in-memory cert store + for cert_bytes in custom_ca_certs: + CertAddEncodedCertificateToStore( + hRootCertStore, + X509_ASN_ENCODING | PKCS_7_ASN_ENCODING, + cert_bytes, + len(cert_bytes), + CERT_STORE_ADD_USE_EXISTING, + None, + ) + + # Create a custom cert chain engine which exclusively trusts + # certs from our hRootCertStore + cert_chain_engine_config = CERT_CHAIN_ENGINE_CONFIG() + cert_chain_engine_config.cbSize = sizeof(cert_chain_engine_config) + cert_chain_engine_config.hExclusiveRoot = hRootCertStore + pConfig = pointer(cert_chain_engine_config) + phChainEngine = pointer(HCERTCHAINENGINE()) + CertCreateCertificateChainEngine( + pConfig, + phChainEngine, + ) + hChainEngine = phChainEngine.contents + + # Get and verify a cert chain using the custom chain engine + _get_and_verify_cert_chain( + ssl_context, + hChainEngine, + hIntermediateCertStore, + pPeerCertContext, + pChainPara, + server_hostname, + chain_flags, + ) + finally: + if hChainEngine: + CertFreeCertificateChainEngine(hChainEngine) + CertCloseStore(hRootCertStore, 0) + + +@contextlib.contextmanager +def _configure_context(ctx: ssl.SSLContext) -> typing.Iterator[None]: + check_hostname = ctx.check_hostname + verify_mode = ctx.verify_mode + ctx.check_hostname = False + _set_ssl_context_verify_mode(ctx, ssl.CERT_NONE) + try: + yield + finally: + ctx.check_hostname = check_hostname + _set_ssl_context_verify_mode(ctx, verify_mode) diff --git a/src/pip/_vendor/truststore/py.typed b/src/pip/_vendor/truststore/py.typed new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/pip/_vendor/typing_extensions.LICENSE b/src/pip/_vendor/typing_extensions.LICENSE index 1df6b3b8de0..f26bcf4d2de 100644 --- a/src/pip/_vendor/typing_extensions.LICENSE +++ b/src/pip/_vendor/typing_extensions.LICENSE @@ -2,12 +2,12 @@ A. HISTORY OF THE SOFTWARE ========================== Python was created in the early 1990s by Guido van Rossum at Stichting -Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands as a successor of a language called ABC. Guido remains Python's principal author, although it includes many contributions from others. In 1995, Guido continued his work on Python at the Corporation for -National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +National Research Initiatives (CNRI, see https://www.cnri.reston.va.us) in Reston, Virginia where he released several versions of the software. @@ -19,7 +19,7 @@ https://www.python.org/psf/) was formed, a non-profit organization created specifically to own Python-related Intellectual Property. Zope Corporation was a sponsoring member of the PSF. -All Python releases are Open Source (see http://www.opensource.org for +All Python releases are Open Source (see https://opensource.org for the Open Source Definition). Historically, most, but not all, Python releases have also been GPL-compatible; the table below summarizes the various releases. @@ -59,6 +59,17 @@ direction to make these releases possible. B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON =============================================================== +Python software and documentation are licensed under the +Python Software Foundation License Version 2. + +Starting with Python 3.8.6, examples, recipes, and other code in +the documentation are dual licensed under the PSF License Version 2 +and the Zero-Clause BSD license. + +Some software incorporated into Python is under different licenses. +The licenses are listed with code falling under that license. + + PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -------------------------------------------- @@ -73,7 +84,7 @@ analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Python Software Foundation; +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation; All Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee. @@ -252,3 +263,17 @@ FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION +---------------------------------------------------------------------- + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/src/pip/_vendor/typing_extensions.py b/src/pip/_vendor/typing_extensions.py index 9cbf5b87b59..351036faf7f 100644 --- a/src/pip/_vendor/typing_extensions.py +++ b/src/pip/_vendor/typing_extensions.py @@ -9,7 +9,6 @@ import typing import warnings - __all__ = [ # Super-special typing primitives. 'Any', @@ -33,6 +32,7 @@ 'Coroutine', 'AsyncGenerator', 'AsyncContextManager', + 'Buffer', 'ChainMap', # Concrete collection types. @@ -45,7 +45,13 @@ 'TypedDict', # Structural checks, a.k.a. protocols. + 'SupportsAbs', + 'SupportsBytes', + 'SupportsComplex', + 'SupportsFloat', 'SupportsIndex', + 'SupportsInt', + 'SupportsRound', # One-off things. 'Annotated', @@ -54,12 +60,16 @@ 'clear_overloads', 'dataclass_transform', 'deprecated', + 'Doc', 'get_overloads', 'final', 'get_args', 'get_origin', + 'get_original_bases', + 'get_protocol_members', 'get_type_hints', 'IntVar', + 'is_protocol', 'is_typeddict', 'Literal', 'NewType', @@ -71,12 +81,53 @@ 'runtime_checkable', 'Text', 'TypeAlias', + 'TypeAliasType', 'TypeGuard', 'TYPE_CHECKING', 'Never', 'NoReturn', + 'ReadOnly', 'Required', 'NotRequired', + + # Pure aliases, have always been in typing + 'AbstractSet', + 'AnyStr', + 'BinaryIO', + 'Callable', + 'Collection', + 'Container', + 'Dict', + 'ForwardRef', + 'FrozenSet', + 'Generator', + 'Generic', + 'Hashable', + 'IO', + 'ItemsView', + 'Iterable', + 'Iterator', + 'KeysView', + 'List', + 'Mapping', + 'MappingView', + 'Match', + 'MutableMapping', + 'MutableSequence', + 'MutableSet', + 'Optional', + 'Pattern', + 'Reversible', + 'Sequence', + 'Set', + 'Sized', + 'TextIO', + 'Tuple', + 'Union', + 'ValuesView', + 'cast', + 'no_type_check', + 'no_type_check_decorator', ] # for backward compatibility @@ -86,7 +137,13 @@ # The functions below are modified copies of typing internal helpers. # They are needed by _ProtocolMeta and they provide support for PEP 646. -_marker = object() + +class _Sentinel: + def __repr__(self): + return "" + + +_marker = _Sentinel() def _check_generic(cls, parameters, elen=_marker): @@ -187,36 +244,13 @@ def __new__(cls, *args, **kwargs): ClassVar = typing.ClassVar -# On older versions of typing there is an internal class named "Final". -# 3.8+ -if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7): - Final = typing.Final -# 3.7 -else: - class _FinalForm(typing._SpecialForm, _root=True): - - def __repr__(self): - return 'typing_extensions.' + self._name - - def __getitem__(self, parameters): - item = typing._type_check(parameters, - f'{self._name} accepts only a single type.') - return typing._GenericAlias(self, (item,)) - - Final = _FinalForm('Final', - doc="""A special typing construct to indicate that a name - cannot be re-assigned or overridden in a subclass. - For example: - MAX_SIZE: Final = 9000 - MAX_SIZE += 1 # Error reported by type checker +class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name - class Connection: - TIMEOUT: Final[int] = 10 - class FastConnector(Connection): - TIMEOUT = 1 # Error reported by type checker - There is no runtime checking of these properties.""") +Final = typing.Final if sys.version_info >= (3, 11): final = typing.final @@ -260,21 +294,67 @@ def IntVar(name): return typing.TypeVar(name) -# 3.8+: -if hasattr(typing, 'Literal'): +# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8 +if sys.version_info >= (3, 10, 1): Literal = typing.Literal -# 3.7: else: - class _LiteralForm(typing._SpecialForm, _root=True): + def _flatten_literal_params(parameters): + """An internal helper for Literal creation: flatten Literals among parameters""" + params = [] + for p in parameters: + if isinstance(p, _LiteralGenericAlias): + params.extend(p.__args__) + else: + params.append(p) + return tuple(params) - def __repr__(self): - return 'typing_extensions.' + self._name + def _value_and_type_iter(params): + for p in params: + yield p, type(p) + + class _LiteralGenericAlias(typing._GenericAlias, _root=True): + def __eq__(self, other): + if not isinstance(other, _LiteralGenericAlias): + return NotImplemented + these_args_deduped = set(_value_and_type_iter(self.__args__)) + other_args_deduped = set(_value_and_type_iter(other.__args__)) + return these_args_deduped == other_args_deduped + + def __hash__(self): + return hash(frozenset(_value_and_type_iter(self.__args__))) + + class _LiteralForm(_ExtensionsSpecialForm, _root=True): + def __init__(self, doc: str): + self._name = 'Literal' + self._doc = self.__doc__ = doc def __getitem__(self, parameters): - return typing._GenericAlias(self, parameters) + if not isinstance(parameters, tuple): + parameters = (parameters,) - Literal = _LiteralForm('Literal', - doc="""A type that can be used to indicate to type checkers + parameters = _flatten_literal_params(parameters) + + val_type_pairs = list(_value_and_type_iter(parameters)) + try: + deduped_pairs = set(val_type_pairs) + except TypeError: + # unhashable parameters + pass + else: + # similar logic to typing._deduplicate on Python 3.9+ + if len(deduped_pairs) < len(val_type_pairs): + new_parameters = [] + for pair in val_type_pairs: + if pair in deduped_pairs: + new_parameters.append(pair[0]) + deduped_pairs.remove(pair) + assert not deduped_pairs, deduped_pairs + parameters = tuple(new_parameters) + + return _LiteralGenericAlias(self, parameters) + + Literal = _LiteralForm(doc="""\ + A type that can be used to indicate to type checkers that the corresponding value has a value literally equivalent to the provided parameter. For example: @@ -288,7 +368,7 @@ def __getitem__(self, parameters): instead of a type.""") -_overload_dummy = typing._overload_dummy # noqa +_overload_dummy = typing._overload_dummy if hasattr(typing, "get_overloads"): # 3.11+ @@ -362,8 +442,6 @@ def clear_overloads(): # Various ABCs mimicking those in collections.abc. # A few are simply re-exported for completeness. - - Awaitable = typing.Awaitable Coroutine = typing.Coroutine AsyncIterable = typing.AsyncIterable @@ -372,278 +450,284 @@ def clear_overloads(): ContextManager = typing.ContextManager AsyncContextManager = typing.AsyncContextManager DefaultDict = typing.DefaultDict - -# 3.7.2+ -if hasattr(typing, 'OrderedDict'): - OrderedDict = typing.OrderedDict -# 3.7.0-3.7.2 -else: - OrderedDict = typing._alias(collections.OrderedDict, (KT, VT)) - +OrderedDict = typing.OrderedDict Counter = typing.Counter ChainMap = typing.ChainMap AsyncGenerator = typing.AsyncGenerator -NewType = typing.NewType Text = typing.Text TYPE_CHECKING = typing.TYPE_CHECKING -_PROTO_WHITELIST = ['Callable', 'Awaitable', - 'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator', - 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', - 'ContextManager', 'AsyncContextManager'] +_PROTO_ALLOWLIST = { + 'collections.abc': [ + 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable', + 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer', + ], + 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'], + 'typing_extensions': ['Buffer'], +} + + +_EXCLUDED_ATTRS = { + "__abstractmethods__", "__annotations__", "__weakref__", "_is_protocol", + "_is_runtime_protocol", "__dict__", "__slots__", "__parameters__", + "__orig_bases__", "__module__", "_MutableMapping__marker", "__doc__", + "__subclasshook__", "__orig_class__", "__init__", "__new__", + "__protocol_attrs__", "__callable_proto_members_only__", + "__match_args__", +} + +if sys.version_info >= (3, 9): + _EXCLUDED_ATTRS.add("__class_getitem__") + +if sys.version_info >= (3, 12): + _EXCLUDED_ATTRS.add("__type_params__") + +_EXCLUDED_ATTRS = frozenset(_EXCLUDED_ATTRS) def _get_protocol_attrs(cls): attrs = set() for base in cls.__mro__[:-1]: # without object - if base.__name__ in ('Protocol', 'Generic'): + if base.__name__ in {'Protocol', 'Generic'}: continue annotations = getattr(base, '__annotations__', {}) - for attr in list(base.__dict__.keys()) + list(annotations.keys()): - if (not attr.startswith('_abc_') and attr not in ( - '__abstractmethods__', '__annotations__', '__weakref__', - '_is_protocol', '_is_runtime_protocol', '__dict__', - '__args__', '__slots__', - '__next_in_mro__', '__parameters__', '__origin__', - '__orig_bases__', '__extra__', '__tree_hash__', - '__doc__', '__subclasshook__', '__init__', '__new__', - '__module__', '_MutableMapping__marker', '_gorg')): + for attr in (*base.__dict__, *annotations): + if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS): attrs.add(attr) return attrs -def _is_callable_members_only(cls): - return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls)) - +def _caller(depth=2): + try: + return sys._getframe(depth).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): # For platforms without _getframe() + return None -def _maybe_adjust_parameters(cls): - """Helper function used in Protocol.__init_subclass__ and _TypedDictMeta.__new__. - The contents of this function are very similar - to logic found in typing.Generic.__init_subclass__ - on the CPython main branch. - """ - tvars = [] - if '__orig_bases__' in cls.__dict__: - tvars = typing._collect_type_vars(cls.__orig_bases__) - # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn]. - # If found, tvars must be a subset of it. - # If not found, tvars is it. - # Also check for and reject plain Generic, - # and reject multiple Generic[...] and/or Protocol[...]. - gvars = None - for base in cls.__orig_bases__: - if (isinstance(base, typing._GenericAlias) and - base.__origin__ in (typing.Generic, Protocol)): - # for error messages - the_base = base.__origin__.__name__ - if gvars is not None: - raise TypeError( - "Cannot inherit from Generic[...]" - " and/or Protocol[...] multiple types.") - gvars = base.__parameters__ - if gvars is None: - gvars = tvars - else: - tvarset = set(tvars) - gvarset = set(gvars) - if not tvarset <= gvarset: - s_vars = ', '.join(str(t) for t in tvars if t not in gvarset) - s_args = ', '.join(str(g) for g in gvars) - raise TypeError(f"Some type variables ({s_vars}) are" - f" not listed in {the_base}[{s_args}]") - tvars = gvars - cls.__parameters__ = tuple(tvars) - - -# 3.8+ -if hasattr(typing, 'Protocol'): +# `__match_args__` attribute was removed from protocol members in 3.13, +# we want to backport this change to older Python versions. +if sys.version_info >= (3, 13): Protocol = typing.Protocol -# 3.7 else: + def _allow_reckless_class_checks(depth=3): + """Allow instance and class checks for special stdlib modules. + The abc and functools modules indiscriminately call isinstance() and + issubclass() on the whole MRO of a user class, which may contain protocols. + """ + return _caller(depth) in {'abc', 'functools', None} def _no_init(self, *args, **kwargs): if type(self)._is_protocol: raise TypeError('Protocols cannot be instantiated') - class _ProtocolMeta(abc.ABCMeta): # noqa: B024 - # This metaclass is a bit unfortunate and exists only because of the lack - # of __instancehook__. + # Inheriting from typing._ProtocolMeta isn't actually desirable, + # but is necessary to allow typing.Protocol and typing_extensions.Protocol + # to mix without getting TypeErrors about "metaclass conflict" + class _ProtocolMeta(type(typing.Protocol)): + # This metaclass is somewhat unfortunate, + # but is necessary for several reasons... + # + # NOTE: DO NOT call super() in any methods in this class + # That would call the methods on typing._ProtocolMeta on Python 3.8-3.11 + # and those are slow + def __new__(mcls, name, bases, namespace, **kwargs): + if name == "Protocol" and len(bases) < 2: + pass + elif {Protocol, typing.Protocol} & set(bases): + for base in bases: + if not ( + base in {object, typing.Generic, Protocol, typing.Protocol} + or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, []) + or is_protocol(base) + ): + raise TypeError( + f"Protocols can only inherit from other protocols, " + f"got {base!r}" + ) + return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs) + + def __init__(cls, *args, **kwargs): + abc.ABCMeta.__init__(cls, *args, **kwargs) + if getattr(cls, "_is_protocol", False): + cls.__protocol_attrs__ = _get_protocol_attrs(cls) + # PEP 544 prohibits using issubclass() + # with protocols that have non-method members. + cls.__callable_proto_members_only__ = all( + callable(getattr(cls, attr, None)) for attr in cls.__protocol_attrs__ + ) + + def __subclasscheck__(cls, other): + if cls is Protocol: + return type.__subclasscheck__(cls, other) + if ( + getattr(cls, '_is_protocol', False) + and not _allow_reckless_class_checks() + ): + if not isinstance(other, type): + # Same error message as for issubclass(1, int). + raise TypeError('issubclass() arg 1 must be a class') + if ( + not cls.__callable_proto_members_only__ + and cls.__dict__.get("__subclasshook__") is _proto_hook + ): + non_method_attrs = sorted( + attr for attr in cls.__protocol_attrs__ + if not callable(getattr(cls, attr, None)) + ) + raise TypeError( + "Protocols with non-method members don't support issubclass()." + f" Non-method members: {str(non_method_attrs)[1:-1]}." + ) + if not getattr(cls, '_is_runtime_protocol', False): + raise TypeError( + "Instance and class checks can only be used with " + "@runtime_checkable protocols" + ) + return abc.ABCMeta.__subclasscheck__(cls, other) + def __instancecheck__(cls, instance): # We need this method for situations where attributes are # assigned in __init__. - if ((not getattr(cls, '_is_protocol', False) or - _is_callable_members_only(cls)) and - issubclass(instance.__class__, cls)): + if cls is Protocol: + return type.__instancecheck__(cls, instance) + if not getattr(cls, "_is_protocol", False): + # i.e., it's a concrete subclass of a protocol + return abc.ABCMeta.__instancecheck__(cls, instance) + + if ( + not getattr(cls, '_is_runtime_protocol', False) and + not _allow_reckless_class_checks() + ): + raise TypeError("Instance and class checks can only be used with" + " @runtime_checkable protocols") + + if abc.ABCMeta.__instancecheck__(cls, instance): return True - if cls._is_protocol: - if all(hasattr(instance, attr) and - (not callable(getattr(cls, attr, None)) or - getattr(instance, attr) is not None) - for attr in _get_protocol_attrs(cls)): - return True - return super().__instancecheck__(instance) - - class Protocol(metaclass=_ProtocolMeta): - # There is quite a lot of overlapping code with typing.Generic. - # Unfortunately it is hard to avoid this while these live in two different - # modules. The duplicated code will be removed when Protocol is moved to typing. - """Base class for protocol classes. Protocol classes are defined as:: - - class Proto(Protocol): - def meth(self) -> int: - ... - - Such classes are primarily used with static type checkers that recognize - structural subtyping (static duck-typing), for example:: - - class C: - def meth(self) -> int: - return 0 - - def func(x: Proto) -> int: - return x.meth() - func(C()) # Passes static type check + for attr in cls.__protocol_attrs__: + try: + val = inspect.getattr_static(instance, attr) + except AttributeError: + break + if val is None and callable(getattr(cls, attr, None)): + break + else: + return True - See PEP 544 for details. Protocol classes decorated with - @typing_extensions.runtime act as simple-minded runtime protocol that checks - only the presence of given attributes, ignoring their type signatures. + return False - Protocol classes can be generic, they are defined as:: + def __eq__(cls, other): + # Hack so that typing.Generic.__class_getitem__ + # treats typing_extensions.Protocol + # as equivalent to typing.Protocol + if abc.ABCMeta.__eq__(cls, other) is True: + return True + return cls is Protocol and other is typing.Protocol + + # This has to be defined, or the abc-module cache + # complains about classes with this metaclass being unhashable, + # if we define only __eq__! + def __hash__(cls) -> int: + return type.__hash__(cls) + + @classmethod + def _proto_hook(cls, other): + if not cls.__dict__.get('_is_protocol', False): + return NotImplemented + + for attr in cls.__protocol_attrs__: + for base in other.__mro__: + # Check if the members appears in the class dictionary... + if attr in base.__dict__: + if base.__dict__[attr] is None: + return NotImplemented + break + + # ...or in annotations, if it is a sub-protocol. + annotations = getattr(base, '__annotations__', {}) + if ( + isinstance(annotations, collections.abc.Mapping) + and attr in annotations + and is_protocol(other) + ): + break + else: + return NotImplemented + return True - class GenProto(Protocol[T]): - def meth(self) -> T: - ... - """ + class Protocol(typing.Generic, metaclass=_ProtocolMeta): + __doc__ = typing.Protocol.__doc__ __slots__ = () _is_protocol = True - - def __new__(cls, *args, **kwds): - if cls is Protocol: - raise TypeError("Type Protocol cannot be instantiated; " - "it can only be used as a base class") - return super().__new__(cls) - - @typing._tp_cache - def __class_getitem__(cls, params): - if not isinstance(params, tuple): - params = (params,) - if not params and cls is not typing.Tuple: - raise TypeError( - f"Parameter list to {cls.__qualname__}[...] cannot be empty") - msg = "Parameters to generic types must be types." - params = tuple(typing._type_check(p, msg) for p in params) # noqa - if cls is Protocol: - # Generic can only be subscripted with unique type variables. - if not all(isinstance(p, typing.TypeVar) for p in params): - i = 0 - while isinstance(params[i], typing.TypeVar): - i += 1 - raise TypeError( - "Parameters to Protocol[...] must all be type variables." - f" Parameter {i + 1} is {params[i]}") - if len(set(params)) != len(params): - raise TypeError( - "Parameters to Protocol[...] must all be unique") - else: - # Subscripting a regular Generic subclass. - _check_generic(cls, params, len(cls.__parameters__)) - return typing._GenericAlias(cls, params) + _is_runtime_protocol = False def __init_subclass__(cls, *args, **kwargs): - if '__orig_bases__' in cls.__dict__: - error = typing.Generic in cls.__orig_bases__ - else: - error = typing.Generic in cls.__bases__ - if error: - raise TypeError("Cannot inherit from plain Generic") - _maybe_adjust_parameters(cls) + super().__init_subclass__(*args, **kwargs) # Determine if this is a protocol or a concrete subclass. - if not cls.__dict__.get('_is_protocol', None): + if not cls.__dict__.get('_is_protocol', False): cls._is_protocol = any(b is Protocol for b in cls.__bases__) # Set (or override) the protocol subclass hook. - def _proto_hook(other): - if not cls.__dict__.get('_is_protocol', None): - return NotImplemented - if not getattr(cls, '_is_runtime_protocol', False): - if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: - return NotImplemented - raise TypeError("Instance and class checks can only be used with" - " @runtime protocols") - if not _is_callable_members_only(cls): - if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: - return NotImplemented - raise TypeError("Protocols with non-method members" - " don't support issubclass()") - if not isinstance(other, type): - # Same error as for issubclass(1, int) - raise TypeError('issubclass() arg 1 must be a class') - for attr in _get_protocol_attrs(cls): - for base in other.__mro__: - if attr in base.__dict__: - if base.__dict__[attr] is None: - return NotImplemented - break - annotations = getattr(base, '__annotations__', {}) - if (isinstance(annotations, typing.Mapping) and - attr in annotations and - isinstance(other, _ProtocolMeta) and - other._is_protocol): - break - else: - return NotImplemented - return True if '__subclasshook__' not in cls.__dict__: cls.__subclasshook__ = _proto_hook - # We have nothing more to do for non-protocols. - if not cls._is_protocol: - return + # Prohibit instantiation for protocol classes + if cls._is_protocol and cls.__init__ is Protocol.__init__: + cls.__init__ = _no_init + + +# The "runtime" alias exists for backwards compatibility. +runtime = runtime_checkable = typing.runtime_checkable + - # Check consistency of bases. - for base in cls.__bases__: - if not (base in (object, typing.Generic) or - base.__module__ == 'collections.abc' and - base.__name__ in _PROTO_WHITELIST or - isinstance(base, _ProtocolMeta) and base._is_protocol): - raise TypeError('Protocols can only inherit from other' - f' protocols, got {repr(base)}') - cls.__init__ = _no_init - - -# 3.8+ -if hasattr(typing, 'runtime_checkable'): - runtime_checkable = typing.runtime_checkable -# 3.7 +# Our version of runtime-checkable protocols is faster on Python 3.8-3.11 +if sys.version_info >= (3, 12): + SupportsInt = typing.SupportsInt + SupportsFloat = typing.SupportsFloat + SupportsComplex = typing.SupportsComplex + SupportsBytes = typing.SupportsBytes + SupportsIndex = typing.SupportsIndex + SupportsAbs = typing.SupportsAbs + SupportsRound = typing.SupportsRound else: - def runtime_checkable(cls): - """Mark a protocol class as a runtime protocol, so that it - can be used with isinstance() and issubclass(). Raise TypeError - if applied to a non-protocol class. + @runtime_checkable + class SupportsInt(Protocol): + """An ABC with one abstract method __int__.""" + __slots__ = () - This allows a simple-minded structural check very similar to the - one-offs in collections.abc such as Hashable. - """ - if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol: - raise TypeError('@runtime_checkable can be only applied to protocol classes,' - f' got {cls!r}') - cls._is_runtime_protocol = True - return cls + @abc.abstractmethod + def __int__(self) -> int: + pass + @runtime_checkable + class SupportsFloat(Protocol): + """An ABC with one abstract method __float__.""" + __slots__ = () -# Exists for backwards compatibility. -runtime = runtime_checkable + @abc.abstractmethod + def __float__(self) -> float: + pass + @runtime_checkable + class SupportsComplex(Protocol): + """An ABC with one abstract method __complex__.""" + __slots__ = () + + @abc.abstractmethod + def __complex__(self) -> complex: + pass + + @runtime_checkable + class SupportsBytes(Protocol): + """An ABC with one abstract method __bytes__.""" + __slots__ = () + + @abc.abstractmethod + def __bytes__(self) -> bytes: + pass -# 3.8+ -if hasattr(typing, 'SupportsIndex'): - SupportsIndex = typing.SupportsIndex -# 3.7 -else: @runtime_checkable class SupportsIndex(Protocol): __slots__ = () @@ -652,8 +736,45 @@ class SupportsIndex(Protocol): def __index__(self) -> int: pass + @runtime_checkable + class SupportsAbs(Protocol[T_co]): + """ + An ABC with one abstract method __abs__ that is covariant in its return type. + """ + __slots__ = () + + @abc.abstractmethod + def __abs__(self) -> T_co: + pass + + @runtime_checkable + class SupportsRound(Protocol[T_co]): + """ + An ABC with one abstract method __round__ that is covariant in its return type. + """ + __slots__ = () -if hasattr(typing, "Required"): + @abc.abstractmethod + def __round__(self, ndigits: int = 0) -> T_co: + pass + + +def _ensure_subclassable(mro_entries): + def inner(func): + if sys.implementation.name == "pypy" and sys.version_info < (3, 9): + cls_dict = { + "__call__": staticmethod(func), + "__mro_entries__": staticmethod(mro_entries) + } + t = type(func.__name__, (), cls_dict) + return functools.update_wrapper(t(), func) + else: + func.__mro_entries__ = mro_entries + return func + return inner + + +if hasattr(typing, "ReadOnly"): # The standard library TypedDict in Python 3.8 does not store runtime information # about which (if any) keys are optional. See https://bugs.python.org/issue38834 # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" @@ -661,152 +782,145 @@ def __index__(self) -> int: # The standard library TypedDict below Python 3.11 does not store runtime # information about optional and required keys when using Required or NotRequired. # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11. + # Aaaand on 3.12 we add __orig_bases__ to TypedDict + # to enable better runtime introspection. + # On 3.13 we deprecate some odd ways of creating TypedDicts. + # PEP 705 proposes adding the ReadOnly[] qualifier. TypedDict = typing.TypedDict _TypedDictMeta = typing._TypedDictMeta is_typeddict = typing.is_typeddict else: - def _check_fails(cls, other): - try: - if sys._getframe(1).f_globals['__name__'] not in ['abc', - 'functools', - 'typing']: - # Typed dicts are only for static structural subtyping. - raise TypeError('TypedDict does not support instance and class checks') - except (AttributeError, ValueError): - pass - return False - - def _dict_new(*args, **kwargs): - if not args: - raise TypeError('TypedDict.__new__(): not enough arguments') - _, args = args[0], args[1:] # allow the "cls" keyword be passed - return dict(*args, **kwargs) - - _dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)' - - def _typeddict_new(*args, total=True, **kwargs): - if not args: - raise TypeError('TypedDict.__new__(): not enough arguments') - _, args = args[0], args[1:] # allow the "cls" keyword be passed - if args: - typename, args = args[0], args[1:] # allow the "_typename" keyword be passed - elif '_typename' in kwargs: - typename = kwargs.pop('_typename') - import warnings - warnings.warn("Passing '_typename' as keyword argument is deprecated", - DeprecationWarning, stacklevel=2) - else: - raise TypeError("TypedDict.__new__() missing 1 required positional " - "argument: '_typename'") - if args: - try: - fields, = args # allow the "_fields" keyword be passed - except ValueError: - raise TypeError('TypedDict.__new__() takes from 2 to 3 ' - f'positional arguments but {len(args) + 2} ' - 'were given') - elif '_fields' in kwargs and len(kwargs) == 1: - fields = kwargs.pop('_fields') - import warnings - warnings.warn("Passing '_fields' as keyword argument is deprecated", - DeprecationWarning, stacklevel=2) - else: - fields = None - - if fields is None: - fields = kwargs - elif kwargs: - raise TypeError("TypedDict takes either a dict or keyword arguments," - " but not both") + # 3.10.0 and later + _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters - ns = {'__annotations__': dict(fields)} - try: - # Setting correct module is necessary to make typed dict classes pickleable. - ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass + def _get_typeddict_qualifiers(annotation_type): + while True: + annotation_origin = get_origin(annotation_type) + if annotation_origin is Annotated: + annotation_args = get_args(annotation_type) + if annotation_args: + annotation_type = annotation_args[0] + else: + break + elif annotation_origin is Required: + yield Required + annotation_type, = get_args(annotation_type) + elif annotation_origin is NotRequired: + yield NotRequired + annotation_type, = get_args(annotation_type) + elif annotation_origin is ReadOnly: + yield ReadOnly + annotation_type, = get_args(annotation_type) + else: + break - return _TypedDictMeta(typename, (), ns, total=total) + class _TypedDictMeta(type): + def __new__(cls, name, bases, ns, *, total=True): + """Create new typed dict class object. + + This method is called when TypedDict is subclassed, + or when TypedDict is instantiated. This way + TypedDict supports all three syntax forms described in its docstring. + Subclasses and instances of TypedDict return actual dictionaries. + """ + for base in bases: + if type(base) is not _TypedDictMeta and base is not typing.Generic: + raise TypeError('cannot inherit from both a TypedDict type ' + 'and a non-TypedDict base class') - _typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,' - ' /, *, total=True, **kwargs)') + if any(issubclass(b, typing.Generic) for b in bases): + generic_base = (typing.Generic,) + else: + generic_base = () - _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters + # typing.py generally doesn't let you inherit from plain Generic, unless + # the name of the class happens to be "Protocol" + tp_dict = type.__new__(_TypedDictMeta, "Protocol", (*generic_base, dict), ns) + tp_dict.__name__ = name + if tp_dict.__qualname__ == "Protocol": + tp_dict.__qualname__ = name - class _TypedDictMeta(type): - def __init__(cls, name, bases, ns, total=True): - super().__init__(name, bases, ns) - - def __new__(cls, name, bases, ns, total=True): - # Create new typed dict class object. - # This method is called directly when TypedDict is subclassed, - # or via _typeddict_new when TypedDict is instantiated. This way - # TypedDict supports all three syntaxes described in its docstring. - # Subclasses and instances of TypedDict return actual dictionaries - # via _dict_new. - ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new - # Don't insert typing.Generic into __bases__ here, - # or Generic.__init_subclass__ will raise TypeError - # in the super().__new__() call. - # Instead, monkey-patch __bases__ onto the class after it's been created. - tp_dict = super().__new__(cls, name, (dict,), ns) - - if any(issubclass(base, typing.Generic) for base in bases): - tp_dict.__bases__ = (typing.Generic, dict) - _maybe_adjust_parameters(tp_dict) + if not hasattr(tp_dict, '__orig_bases__'): + tp_dict.__orig_bases__ = bases annotations = {} own_annotations = ns.get('__annotations__', {}) msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" - kwds = {"module": tp_dict.__module__} if _TAKES_MODULE else {} - own_annotations = { - n: typing._type_check(tp, msg, **kwds) - for n, tp in own_annotations.items() - } + if _TAKES_MODULE: + own_annotations = { + n: typing._type_check(tp, msg, module=tp_dict.__module__) + for n, tp in own_annotations.items() + } + else: + own_annotations = { + n: typing._type_check(tp, msg) + for n, tp in own_annotations.items() + } required_keys = set() optional_keys = set() + readonly_keys = set() + mutable_keys = set() for base in bases: - annotations.update(base.__dict__.get('__annotations__', {})) - required_keys.update(base.__dict__.get('__required_keys__', ())) - optional_keys.update(base.__dict__.get('__optional_keys__', ())) + base_dict = base.__dict__ + + annotations.update(base_dict.get('__annotations__', {})) + required_keys.update(base_dict.get('__required_keys__', ())) + optional_keys.update(base_dict.get('__optional_keys__', ())) + readonly_keys.update(base_dict.get('__readonly_keys__', ())) + mutable_keys.update(base_dict.get('__mutable_keys__', ())) annotations.update(own_annotations) for annotation_key, annotation_type in own_annotations.items(): - annotation_origin = get_origin(annotation_type) - if annotation_origin is Annotated: - annotation_args = get_args(annotation_type) - if annotation_args: - annotation_type = annotation_args[0] - annotation_origin = get_origin(annotation_type) - - if annotation_origin is Required: + qualifiers = set(_get_typeddict_qualifiers(annotation_type)) + + if Required in qualifiers: required_keys.add(annotation_key) - elif annotation_origin is NotRequired: + elif NotRequired in qualifiers: optional_keys.add(annotation_key) elif total: required_keys.add(annotation_key) else: optional_keys.add(annotation_key) + if ReadOnly in qualifiers: + if annotation_key in mutable_keys: + raise TypeError( + f"Cannot override mutable key {annotation_key!r}" + " with read-only key" + ) + readonly_keys.add(annotation_key) + else: + mutable_keys.add(annotation_key) + readonly_keys.discard(annotation_key) tp_dict.__annotations__ = annotations tp_dict.__required_keys__ = frozenset(required_keys) tp_dict.__optional_keys__ = frozenset(optional_keys) + tp_dict.__readonly_keys__ = frozenset(readonly_keys) + tp_dict.__mutable_keys__ = frozenset(mutable_keys) if not hasattr(tp_dict, '__total__'): tp_dict.__total__ = total return tp_dict - __instancecheck__ = __subclasscheck__ = _check_fails + __call__ = dict # static method + + def __subclasscheck__(cls, other): + # Typed dicts are only for static structural subtyping. + raise TypeError('TypedDict does not support instance and class checks') - TypedDict = _TypedDictMeta('TypedDict', (dict,), {}) - TypedDict.__module__ = __name__ - TypedDict.__doc__ = \ - """A simple typed name space. At runtime it is equivalent to a plain dict. + __instancecheck__ = __subclasscheck__ - TypedDict creates a dictionary type that expects all of its - instances to have a certain set of keys, with each key + _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {}) + + @_ensure_subclassable(lambda bases: (_TypedDict,)) + def TypedDict(typename, fields=_marker, /, *, total=True, **kwargs): + """A simple typed namespace. At runtime it is equivalent to a plain dict. + + TypedDict creates a dictionary type such that a type checker will expect all + instances to have a certain set of keys, where each key is associated with a value of a consistent type. This expectation - is not checked at runtime but is only enforced by type checkers. + is not checked at runtime. + Usage:: class Point2D(TypedDict): @@ -821,14 +935,68 @@ class Point2D(TypedDict): The type info can be accessed via the Point2D.__annotations__ dict, and the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. - TypedDict supports two additional equivalent forms:: + TypedDict supports an additional equivalent form:: - Point2D = TypedDict('Point2D', x=int, y=int, label=str) Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) - The class syntax is only supported in Python 3.6+, while two other - syntax forms work for Python 2.7 and 3.2+ + By default, all keys must be present in a TypedDict. It is possible + to override this by specifying totality:: + + class Point2D(TypedDict, total=False): + x: int + y: int + + This means that a Point2D TypedDict can have any of the keys omitted. A type + checker is only expected to support a literal False or True as the value of + the total argument. True is the default, and makes all items defined in the + class body be required. + + The Required and NotRequired special forms can also be used to mark + individual keys as being required or not required:: + + class Point2D(TypedDict): + x: int # the "x" key must always be present (Required is the default) + y: NotRequired[int] # the "y" key can be omitted + + See PEP 655 for more details on Required and NotRequired. """ + if fields is _marker or fields is None: + if fields is _marker: + deprecated_thing = "Failing to pass a value for the 'fields' parameter" + else: + deprecated_thing = "Passing `None` as the 'fields' parameter" + + example = f"`{typename} = TypedDict({typename!r}, {{}})`" + deprecation_msg = ( + f"{deprecated_thing} is deprecated and will be disallowed in " + "Python 3.15. To create a TypedDict class with 0 fields " + "using the functional syntax, pass an empty dictionary, e.g. " + ) + example + "." + warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2) + fields = kwargs + elif kwargs: + raise TypeError("TypedDict takes either a dict or keyword arguments," + " but not both") + if kwargs: + if sys.version_info >= (3, 13): + raise TypeError("TypedDict takes no keyword arguments") + warnings.warn( + "The kwargs-based syntax for TypedDict definitions is deprecated " + "in Python 3.11, will be removed in Python 3.13, and may not be " + "understood by third-party type checkers.", + DeprecationWarning, + stacklevel=2, + ) + + ns = {'__annotations__': dict(fields)} + module = _caller() + if module is not None: + # Setting correct module is necessary to make typed dict classes pickleable. + ns['__module__'] = module + + td = _TypedDictMeta(typename, (), ns, total=total) + td.__orig_bases__ = (TypedDict,) + return td if hasattr(typing, "_TypedDictMeta"): _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta) @@ -846,14 +1014,17 @@ class Film(TypedDict): is_typeddict(Film) # => True is_typeddict(Union[list, str]) # => False """ - return isinstance(tp, tuple(_TYPEDDICT_TYPES)) + # On 3.8, this would otherwise return True + if hasattr(typing, "TypedDict") and tp is typing.TypedDict: + return False + return isinstance(tp, _TYPEDDICT_TYPES) if hasattr(typing, "assert_type"): assert_type = typing.assert_type else: - def assert_type(__val, __typ): + def assert_type(val, typ, /): """Assert (to the type checker) that the value is of the given type. When the type checker encounters a call to assert_type(), it @@ -866,15 +1037,12 @@ def greet(name: str) -> None: At runtime this returns the first argument unchanged and otherwise does nothing. """ - return __val + return val -if hasattr(typing, "Required"): +if hasattr(typing, "Required"): # 3.11+ get_type_hints = typing.get_type_hints -else: - import functools - import types - +else: # <=3.10 # replaces _strip_annotations() def _strip_extras(t): """Strips Annotated, Required and NotRequired from a given type.""" @@ -887,12 +1055,12 @@ def _strip_extras(t): if stripped_args == t.__args__: return t return t.copy_with(stripped_args) - if hasattr(types, "GenericAlias") and isinstance(t, types.GenericAlias): + if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias): stripped_args = tuple(_strip_extras(a) for a in t.__args__) if stripped_args == t.__args__: return t - return types.GenericAlias(t.__origin__, stripped_args) - if hasattr(types, "UnionType") and isinstance(t, types.UnionType): + return _types.GenericAlias(t.__origin__, stripped_args) + if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType): stripped_args = tuple(_strip_extras(a) for a in t.__args__) if stripped_args == t.__args__: return t @@ -932,11 +1100,11 @@ def get_type_hints(obj, globalns=None, localns=None, include_extras=False): - If two dict arguments are passed, they specify globals and locals, respectively. """ - if hasattr(typing, "Annotated"): + if hasattr(typing, "Annotated"): # 3.9+ hint = typing.get_type_hints( obj, globalns=globalns, localns=localns, include_extras=True ) - else: + else: # 3.8 hint = typing.get_type_hints(obj, globalns=globalns, localns=localns) if include_extras: return hint @@ -949,7 +1117,7 @@ def get_type_hints(obj, globalns=None, localns=None, include_extras=False): # Not exported and not a public API, but needed for get_origin() and get_args() # to work. _AnnotatedAlias = typing._AnnotatedAlias -# 3.7-3.8 +# 3.8 else: class _AnnotatedAlias(typing._GenericAlias, _root=True): """Runtime representation of an annotated type. @@ -1054,7 +1222,7 @@ def __init_subclass__(cls, *args, **kwargs): if sys.version_info[:2] >= (3, 10): get_origin = typing.get_origin get_args = typing.get_args -# 3.7-3.9 +# 3.8-3.9 else: try: # 3.9+ @@ -1119,11 +1287,7 @@ def get_args(tp): TypeAlias = typing.TypeAlias # 3.9 elif sys.version_info[:2] >= (3, 9): - class _TypeAliasForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - @_TypeAliasForm + @_ExtensionsSpecialForm def TypeAlias(self, parameters): """Special marker indicating that an assignment should be recognized as a proper type alias definition by type @@ -1136,68 +1300,89 @@ def TypeAlias(self, parameters): It's invalid when used anywhere except as in the example above. """ raise TypeError(f"{self} is not subscriptable") -# 3.7-3.8 +# 3.8 else: - class _TypeAliasForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name + TypeAlias = _ExtensionsSpecialForm( + 'TypeAlias', + doc="""Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. - TypeAlias = _TypeAliasForm('TypeAlias', - doc="""Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. + For example:: - For example:: + Predicate: TypeAlias = Callable[..., bool] - Predicate: TypeAlias = Callable[..., bool] + It's invalid when used anywhere except as in the example + above.""" + ) - It's invalid when used anywhere except as in the example - above.""") + +def _set_default(type_param, default): + if isinstance(default, (tuple, list)): + type_param.__default__ = tuple((typing._type_check(d, "Default must be a type") + for d in default)) + elif default != _marker: + if isinstance(type_param, ParamSpec) and default is ...: # ... not valid <3.11 + type_param.__default__ = default + else: + type_param.__default__ = typing._type_check(default, "Default must be a type") + else: + type_param.__default__ = None + + +def _set_module(typevarlike): + # for pickling: + def_mod = _caller(depth=3) + if def_mod != 'typing_extensions': + typevarlike.__module__ = def_mod class _DefaultMixin: """Mixin for TypeVarLike defaults.""" __slots__ = () + __init__ = _set_default - def __init__(self, default): - if isinstance(default, (tuple, list)): - self.__default__ = tuple((typing._type_check(d, "Default must be a type") - for d in default)) - elif default != _marker: - self.__default__ = typing._type_check(default, "Default must be a type") - else: - self.__default__ = None + +# Classes using this metaclass must provide a _backported_typevarlike ClassVar +class _TypeVarLikeMeta(type): + def __instancecheck__(cls, __instance: Any) -> bool: + return isinstance(__instance, cls._backported_typevarlike) # Add default and infer_variance parameters from PEP 696 and 695 -class TypeVar(typing.TypeVar, _DefaultMixin, _root=True): +class TypeVar(metaclass=_TypeVarLikeMeta): """Type variable.""" - __module__ = 'typing' + _backported_typevarlike = typing.TypeVar - def __init__(self, name, *constraints, bound=None, - covariant=False, contravariant=False, - default=_marker, infer_variance=False): - super().__init__(name, *constraints, bound=bound, covariant=covariant, - contravariant=contravariant) - _DefaultMixin.__init__(self, default) - self.__infer_variance__ = infer_variance + def __new__(cls, name, *constraints, bound=None, + covariant=False, contravariant=False, + default=_marker, infer_variance=False): + if hasattr(typing, "TypeAliasType"): + # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar + typevar = typing.TypeVar(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant, + infer_variance=infer_variance) + else: + typevar = typing.TypeVar(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant) + if infer_variance and (covariant or contravariant): + raise ValueError("Variance cannot be specified with infer_variance.") + typevar.__infer_variance__ = infer_variance + _set_default(typevar, default) + _set_module(typevar) + return typevar - # for pickling: - try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - def_mod = None - if def_mod != 'typing_extensions': - self.__module__ = def_mod + def __init_subclass__(cls) -> None: + raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type") # Python 3.10+ has PEP 612 if hasattr(typing, 'ParamSpecArgs'): ParamSpecArgs = typing.ParamSpecArgs ParamSpecKwargs = typing.ParamSpecKwargs -# 3.7-3.9 +# 3.8-3.9 else: class _Immutable: """Mixin to indicate that object should not be copied.""" @@ -1258,27 +1443,35 @@ def __eq__(self, other): # 3.10+ if hasattr(typing, 'ParamSpec'): - # Add default Parameter - PEP 696 - class ParamSpec(typing.ParamSpec, _DefaultMixin, _root=True): - """Parameter specification variable.""" - - __module__ = 'typing' + # Add default parameter - PEP 696 + class ParamSpec(metaclass=_TypeVarLikeMeta): + """Parameter specification.""" + + _backported_typevarlike = typing.ParamSpec + + def __new__(cls, name, *, bound=None, + covariant=False, contravariant=False, + infer_variance=False, default=_marker): + if hasattr(typing, "TypeAliasType"): + # PEP 695 implemented, can pass infer_variance to typing.TypeVar + paramspec = typing.ParamSpec(name, bound=bound, + covariant=covariant, + contravariant=contravariant, + infer_variance=infer_variance) + else: + paramspec = typing.ParamSpec(name, bound=bound, + covariant=covariant, + contravariant=contravariant) + paramspec.__infer_variance__ = infer_variance - def __init__(self, name, *, bound=None, covariant=False, contravariant=False, - default=_marker): - super().__init__(name, bound=bound, covariant=covariant, - contravariant=contravariant) - _DefaultMixin.__init__(self, default) + _set_default(paramspec, default) + _set_module(paramspec) + return paramspec - # for pickling: - try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - def_mod = None - if def_mod != 'typing_extensions': - self.__module__ = def_mod + def __init_subclass__(cls) -> None: + raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type") -# 3.7-3.9 +# 3.8-3.9 else: # Inherits from list as a workaround for Callable checks in Python < 3.9.2. @@ -1341,11 +1534,12 @@ def kwargs(self): return ParamSpecKwargs(self) def __init__(self, name, *, bound=None, covariant=False, contravariant=False, - default=_marker): + infer_variance=False, default=_marker): super().__init__([self]) self.__name__ = name self.__covariant__ = bool(covariant) self.__contravariant__ = bool(contravariant) + self.__infer_variance__ = bool(infer_variance) if bound: self.__bound__ = typing._type_check(bound, 'Bound must be a type.') else: @@ -1353,15 +1547,14 @@ def __init__(self, name, *, bound=None, covariant=False, contravariant=False, _DefaultMixin.__init__(self, default) # for pickling: - try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - def_mod = None + def_mod = _caller() if def_mod != 'typing_extensions': self.__module__ = def_mod def __repr__(self): - if self.__covariant__: + if self.__infer_variance__: + prefix = '' + elif self.__covariant__: prefix = '+' elif self.__contravariant__: prefix = '-' @@ -1383,7 +1576,7 @@ def __call__(self, *args, **kwargs): pass -# 3.7-3.9 +# 3.8-3.9 if not hasattr(typing, 'Concatenate'): # Inherits from list as a workaround for Callable checks in Python < 3.9.2. class _ConcatenateGenericAlias(list): @@ -1418,7 +1611,7 @@ def __parameters__(self): ) -# 3.7-3.9 +# 3.8-3.9 @typing._tp_cache def _concatenate_getitem(self, parameters): if parameters == (): @@ -1436,10 +1629,10 @@ def _concatenate_getitem(self, parameters): # 3.10+ if hasattr(typing, 'Concatenate'): Concatenate = typing.Concatenate - _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa + _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa: F811 # 3.9 elif sys.version_info[:2] >= (3, 9): - @_TypeAliasForm + @_ExtensionsSpecialForm def Concatenate(self, parameters): """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a higher order function which adds, removes or transforms parameters of a @@ -1452,12 +1645,9 @@ def Concatenate(self, parameters): See PEP 612 for detailed information. """ return _concatenate_getitem(self, parameters) -# 3.7-8 +# 3.8 else: - class _ConcatenateForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - + class _ConcatenateForm(_ExtensionsSpecialForm, _root=True): def __getitem__(self, parameters): return _concatenate_getitem(self, parameters) @@ -1479,11 +1669,7 @@ def __getitem__(self, parameters): TypeGuard = typing.TypeGuard # 3.9 elif sys.version_info[:2] >= (3, 9): - class _TypeGuardForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - @_TypeGuardForm + @_ExtensionsSpecialForm def TypeGuard(self, parameters): """Special typing form used to annotate the return type of a user-defined type guard function. ``TypeGuard`` only accepts a single type argument. @@ -1529,13 +1715,9 @@ def is_str(val: Union[str, float]): """ item = typing._type_check(parameters, f'{self} accepts only a single type.') return typing._GenericAlias(self, (item,)) -# 3.7-3.8 +# 3.8 else: - class _TypeGuardForm(typing._SpecialForm, _root=True): - - def __repr__(self): - return 'typing_extensions.' + self._name - + class _TypeGuardForm(_ExtensionsSpecialForm, _root=True): def __getitem__(self, parameters): item = typing._type_check(parameters, f'{self._name} accepts only a single type') @@ -1631,7 +1813,7 @@ def __getitem__(self, parameters): return self._getitem(self, parameters) -if hasattr(typing, "LiteralString"): +if hasattr(typing, "LiteralString"): # 3.11+ LiteralString = typing.LiteralString else: @_SpecialForm @@ -1654,7 +1836,7 @@ def query(sql: LiteralString) -> ...: raise TypeError(f"{self} is not subscriptable") -if hasattr(typing, "Self"): +if hasattr(typing, "Self"): # 3.11+ Self = typing.Self else: @_SpecialForm @@ -1675,7 +1857,7 @@ def parse(self, data: bytes) -> Self: raise TypeError(f"{self} is not subscriptable") -if hasattr(typing, "Never"): +if hasattr(typing, "Never"): # 3.11+ Never = typing.Never else: @_SpecialForm @@ -1705,14 +1887,10 @@ def int_or_str(arg: int | str) -> None: raise TypeError(f"{self} is not subscriptable") -if hasattr(typing, 'Required'): +if hasattr(typing, 'Required'): # 3.11+ Required = typing.Required NotRequired = typing.NotRequired -elif sys.version_info[:2] >= (3, 9): - class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - +elif sys.version_info[:2] >= (3, 9): # 3.9-3.10 @_ExtensionsSpecialForm def Required(self, parameters): """A special typing construct to mark a key of a total=False TypedDict @@ -1750,11 +1928,8 @@ class Movie(TypedDict): item = typing._type_check(parameters, f'{self._name} accepts only a single type.') return typing._GenericAlias(self, (item,)) -else: - class _RequiredForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - +else: # 3.8 + class _RequiredForm(_ExtensionsSpecialForm, _root=True): def __getitem__(self, parameters): item = typing._type_check(parameters, f'{self._name} accepts only a single type.') @@ -1793,59 +1968,129 @@ class Movie(TypedDict): """) -if hasattr(typing, "Unpack"): # 3.11+ +if hasattr(typing, 'ReadOnly'): + ReadOnly = typing.ReadOnly +elif sys.version_info[:2] >= (3, 9): # 3.9-3.12 + @_ExtensionsSpecialForm + def ReadOnly(self, parameters): + """A special typing construct to mark an item of a TypedDict as read-only. + + For example: + + class Movie(TypedDict): + title: ReadOnly[str] + year: int + + def mutate_movie(m: Movie) -> None: + m["year"] = 1992 # allowed + m["title"] = "The Matrix" # typechecker error + + There is no runtime checking for this property. + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + +else: # 3.8 + class _ReadOnlyForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + ReadOnly = _ReadOnlyForm( + 'ReadOnly', + doc="""A special typing construct to mark a key of a TypedDict as read-only. + + For example: + + class Movie(TypedDict): + title: ReadOnly[str] + year: int + + def mutate_movie(m: Movie) -> None: + m["year"] = 1992 # allowed + m["title"] = "The Matrix" # typechecker error + + There is no runtime checking for this propery. + """) + + +_UNPACK_DOC = """\ +Type unpack operator. + +The type unpack operator takes the child types from some container type, +such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For +example: + + # For some generic class `Foo`: + Foo[Unpack[tuple[int, str]]] # Equivalent to Foo[int, str] + + Ts = TypeVarTuple('Ts') + # Specifies that `Bar` is generic in an arbitrary number of types. + # (Think of `Ts` as a tuple of an arbitrary number of individual + # `TypeVar`s, which the `Unpack` is 'pulling out' directly into the + # `Generic[]`.) + class Bar(Generic[Unpack[Ts]]): ... + Bar[int] # Valid + Bar[int, str] # Also valid + +From Python 3.11, this can also be done using the `*` operator: + + Foo[*tuple[int, str]] + class Bar(Generic[*Ts]): ... + +The operator can also be used along with a `TypedDict` to annotate +`**kwargs` in a function signature. For instance: + + class Movie(TypedDict): + name: str + year: int + + # This function expects two keyword arguments - *name* of type `str` and + # *year* of type `int`. + def foo(**kwargs: Unpack[Movie]): ... + +Note that there is only some runtime checking of this operator. Not +everything the runtime allows may be accepted by static type checkers. + +For more information, see PEP 646 and PEP 692. +""" + + +if sys.version_info >= (3, 12): # PEP 692 changed the repr of Unpack[] Unpack = typing.Unpack -elif sys.version_info[:2] >= (3, 9): - class _UnpackSpecialForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name + + def _is_unpack(obj): + return get_origin(obj) is Unpack + +elif sys.version_info[:2] >= (3, 9): # 3.9+ + class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True): + def __init__(self, getitem): + super().__init__(getitem) + self.__doc__ = _UNPACK_DOC class _UnpackAlias(typing._GenericAlias, _root=True): __class__ = typing.TypeVar @_UnpackSpecialForm def Unpack(self, parameters): - """A special typing construct to unpack a variadic type. For example: - - Shape = TypeVarTuple('Shape') - Batch = NewType('Batch', int) - - def add_batch_axis( - x: Array[Unpack[Shape]] - ) -> Array[Batch, Unpack[Shape]]: ... - - """ item = typing._type_check(parameters, f'{self._name} accepts only a single type.') return _UnpackAlias(self, (item,)) def _is_unpack(obj): return isinstance(obj, _UnpackAlias) -else: +else: # 3.8 class _UnpackAlias(typing._GenericAlias, _root=True): __class__ = typing.TypeVar - class _UnpackForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - + class _UnpackForm(_ExtensionsSpecialForm, _root=True): def __getitem__(self, parameters): item = typing._type_check(parameters, f'{self._name} accepts only a single type.') return _UnpackAlias(self, (item,)) - Unpack = _UnpackForm( - 'Unpack', - doc="""A special typing construct to unpack a variadic type. For example: - - Shape = TypeVarTuple('Shape') - Batch = NewType('Batch', int) - - def add_batch_axis( - x: Array[Unpack[Shape]] - ) -> Array[Batch, Unpack[Shape]]: ... - - """) + Unpack = _UnpackForm('Unpack', doc=_UNPACK_DOC) def _is_unpack(obj): return isinstance(obj, _UnpackAlias) @@ -1853,23 +2098,22 @@ def _is_unpack(obj): if hasattr(typing, "TypeVarTuple"): # 3.11+ - # Add default Parameter - PEP 696 - class TypeVarTuple(typing.TypeVarTuple, _DefaultMixin, _root=True): + # Add default parameter - PEP 696 + class TypeVarTuple(metaclass=_TypeVarLikeMeta): """Type variable tuple.""" - def __init__(self, name, *, default=_marker): - super().__init__(name) - _DefaultMixin.__init__(self, default) + _backported_typevarlike = typing.TypeVarTuple - # for pickling: - try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - def_mod = None - if def_mod != 'typing_extensions': - self.__module__ = def_mod + def __new__(cls, name, *, default=_marker): + tvt = typing.TypeVarTuple(name) + _set_default(tvt, default) + _set_module(tvt) + return tvt -else: + def __init_subclass__(self, *args, **kwds): + raise TypeError("Cannot subclass special typing classes") + +else: # <=3.10 class TypeVarTuple(_DefaultMixin): """Type variable tuple. @@ -1925,10 +2169,7 @@ def __init__(self, name, *, default=_marker): _DefaultMixin.__init__(self, default) # for pickling: - try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - def_mod = None + def_mod = _caller() if def_mod != 'typing_extensions': self.__module__ = def_mod @@ -1951,10 +2192,10 @@ def __init_subclass__(self, *args, **kwds): raise TypeError("Cannot subclass special typing classes") -if hasattr(typing, "reveal_type"): +if hasattr(typing, "reveal_type"): # 3.11+ reveal_type = typing.reveal_type -else: - def reveal_type(__obj: T) -> T: +else: # <=3.10 + def reveal_type(obj: T, /) -> T: """Reveal the inferred type of a variable. When a static type checker encounters a call to ``reveal_type()``, @@ -1970,14 +2211,14 @@ def reveal_type(__obj: T) -> T: argument and returns it unchanged. """ - print(f"Runtime type is {type(__obj).__name__!r}", file=sys.stderr) - return __obj + print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr) + return obj -if hasattr(typing, "assert_never"): +if hasattr(typing, "assert_never"): # 3.11+ assert_never = typing.assert_never -else: - def assert_never(__arg: Never) -> Never: +else: # <=3.10 + def assert_never(arg: Never, /) -> Never: """Assert to the type checker that a line of code is unreachable. Example:: @@ -2000,10 +2241,10 @@ def int_or_str(arg: int | str) -> None: raise AssertionError("Expected code to be unreachable") -if sys.version_info >= (3, 12): +if sys.version_info >= (3, 12): # 3.12+ # dataclass_transform exists in 3.11 but lacks the frozen_default parameter dataclass_transform = typing.dataclass_transform -else: +else: # <=3.11 def dataclass_transform( *, eq_default: bool = True, @@ -2090,18 +2331,18 @@ def decorator(cls_or_fn): return decorator -if hasattr(typing, "override"): +if hasattr(typing, "override"): # 3.12+ override = typing.override -else: +else: # <=3.11 _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any]) - def override(__arg: _F) -> _F: + def override(arg: _F, /) -> _F: """Indicate that a method is intended to override a method in a base class. Usage: class Base: - def method(self) -> None: ... + def method(self) -> None: pass class Child(Base): @@ -2122,28 +2363,26 @@ def method(self) -> None: """ try: - __arg.__override__ = True + arg.__override__ = True except (AttributeError, TypeError): # Skip the attribute silently if it is not writable. # AttributeError happens if the object has __slots__ or a # read-only property, TypeError if it's a builtin class. pass - return __arg + return arg -if hasattr(typing, "deprecated"): - deprecated = typing.deprecated +if hasattr(warnings, "deprecated"): + deprecated = warnings.deprecated else: _T = typing.TypeVar("_T") - def deprecated( - __msg: str, - *, - category: typing.Optional[typing.Type[Warning]] = DeprecationWarning, - stacklevel: int = 1, - ) -> typing.Callable[[_T], _T]: + class deprecated: """Indicate that a class, function or overload is deprecated. + When this decorator is applied to an object, the type checker + will generate a diagnostic on usage of the deprecated object. + Usage: @deprecated("Use B instead") @@ -2160,56 +2399,113 @@ def g(x: int) -> int: ... @overload def g(x: str) -> int: ... - When this decorator is applied to an object, the type checker - will generate a diagnostic on usage of the deprecated object. - - No runtime warning is issued. The decorator sets the ``__deprecated__`` - attribute on the decorated object to the deprecation message - passed to the decorator. If applied to an overload, the decorator + The warning specified by *category* will be emitted at runtime + on use of deprecated objects. For functions, that happens on calls; + for classes, on instantiation and on creation of subclasses. + If the *category* is ``None``, no warning is emitted at runtime. + The *stacklevel* determines where the + warning is emitted. If it is ``1`` (the default), the warning + is emitted at the direct caller of the deprecated object; if it + is higher, it is emitted further up the stack. + Static type checker behavior is not affected by the *category* + and *stacklevel* arguments. + + The deprecation message passed to the decorator is saved in the + ``__deprecated__`` attribute on the decorated object. + If applied to an overload, the decorator must be after the ``@overload`` decorator for the attribute to exist on the overload as returned by ``get_overloads()``. See PEP 702 for details. """ - def decorator(__arg: _T) -> _T: + def __init__( + self, + message: str, + /, + *, + category: typing.Optional[typing.Type[Warning]] = DeprecationWarning, + stacklevel: int = 1, + ) -> None: + if not isinstance(message, str): + raise TypeError( + "Expected an object of type str for 'message', not " + f"{type(message).__name__!r}" + ) + self.message = message + self.category = category + self.stacklevel = stacklevel + + def __call__(self, arg: _T, /) -> _T: + # Make sure the inner functions created below don't + # retain a reference to self. + msg = self.message + category = self.category + stacklevel = self.stacklevel if category is None: - __arg.__deprecated__ = __msg - return __arg - elif isinstance(__arg, type): - original_new = __arg.__new__ - has_init = __arg.__init__ is not object.__init__ + arg.__deprecated__ = msg + return arg + elif isinstance(arg, type): + import functools + from types import MethodType + + original_new = arg.__new__ @functools.wraps(original_new) def __new__(cls, *args, **kwargs): - warnings.warn(__msg, category=category, stacklevel=stacklevel + 1) - # Mirrors a similar check in object.__new__. - if not has_init and (args or kwargs): - raise TypeError(f"{cls.__name__}() takes no arguments") + if cls is arg: + warnings.warn(msg, category=category, stacklevel=stacklevel + 1) if original_new is not object.__new__: return original_new(cls, *args, **kwargs) + # Mirrors a similar check in object.__new__. + elif cls.__init__ is object.__init__ and (args or kwargs): + raise TypeError(f"{cls.__name__}() takes no arguments") else: return original_new(cls) - __arg.__new__ = staticmethod(__new__) - __arg.__deprecated__ = __new__.__deprecated__ = __msg - return __arg - elif callable(__arg): - @functools.wraps(__arg) + arg.__new__ = staticmethod(__new__) + + original_init_subclass = arg.__init_subclass__ + # We need slightly different behavior if __init_subclass__ + # is a bound method (likely if it was implemented in Python) + if isinstance(original_init_subclass, MethodType): + original_init_subclass = original_init_subclass.__func__ + + @functools.wraps(original_init_subclass) + def __init_subclass__(*args, **kwargs): + warnings.warn(msg, category=category, stacklevel=stacklevel + 1) + return original_init_subclass(*args, **kwargs) + + arg.__init_subclass__ = classmethod(__init_subclass__) + # Or otherwise, which likely means it's a builtin such as + # object's implementation of __init_subclass__. + else: + @functools.wraps(original_init_subclass) + def __init_subclass__(*args, **kwargs): + warnings.warn(msg, category=category, stacklevel=stacklevel + 1) + return original_init_subclass(*args, **kwargs) + + arg.__init_subclass__ = __init_subclass__ + + arg.__deprecated__ = __new__.__deprecated__ = msg + __init_subclass__.__deprecated__ = msg + return arg + elif callable(arg): + import functools + + @functools.wraps(arg) def wrapper(*args, **kwargs): - warnings.warn(__msg, category=category, stacklevel=stacklevel + 1) - return __arg(*args, **kwargs) + warnings.warn(msg, category=category, stacklevel=stacklevel + 1) + return arg(*args, **kwargs) - __arg.__deprecated__ = wrapper.__deprecated__ = __msg + arg.__deprecated__ = wrapper.__deprecated__ = msg return wrapper else: raise TypeError( "@deprecated decorator with non-None category must be applied to " - f"a class or callable, not {__arg!r}" + f"a class or callable, not {arg!r}" ) - return decorator - # We have to do some monkey patching to deal with the dual nature of # Unpack/TypeVarTuple: @@ -2223,18 +2519,14 @@ def wrapper(*args, **kwargs): typing._check_generic = _check_generic -# Backport typing.NamedTuple as it exists in Python 3.11. +# Backport typing.NamedTuple as it exists in Python 3.13. # In 3.11, the ability to define generic `NamedTuple`s was supported. # This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8. -if sys.version_info >= (3, 11): +# On 3.12, we added __orig_bases__ to call-based NamedTuples +# On 3.13, we deprecated kwargs-based NamedTuples +if sys.version_info >= (3, 13): NamedTuple = typing.NamedTuple else: - def _caller(): - try: - return sys._getframe(2).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): # For platforms without _getframe() - return None - def _make_nmtuple(name, types, module, defaults=()): fields = [n for n, t in types] annotations = {n: typing._type_check(t, f"field {n} annotation must be a type") @@ -2276,37 +2568,486 @@ def __new__(cls, typename, bases, ns): ) nm_tpl.__bases__ = bases if typing.Generic in bases: - class_getitem = typing.Generic.__class_getitem__.__func__ - nm_tpl.__class_getitem__ = classmethod(class_getitem) + if hasattr(typing, '_generic_class_getitem'): # 3.12+ + nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem) + else: + class_getitem = typing.Generic.__class_getitem__.__func__ + nm_tpl.__class_getitem__ = classmethod(class_getitem) # update from user namespace without overriding special namedtuple attributes - for key in ns: + for key, val in ns.items(): if key in _prohibited_namedtuple_fields: raise AttributeError("Cannot overwrite NamedTuple attribute " + key) - elif key not in _special_namedtuple_fields and key not in nm_tpl._fields: - setattr(nm_tpl, key, ns[key]) + elif key not in _special_namedtuple_fields: + if key not in nm_tpl._fields: + setattr(nm_tpl, key, ns[key]) + try: + set_name = type(val).__set_name__ + except AttributeError: + pass + else: + try: + set_name(val, nm_tpl, key) + except BaseException as e: + msg = ( + f"Error calling __set_name__ on {type(val).__name__!r} " + f"instance {key!r} in {typename!r}" + ) + # BaseException.add_note() existed on py311, + # but the __set_name__ machinery didn't start + # using add_note() until py312. + # Making sure exceptions are raised in the same way + # as in "normal" classes seems most important here. + if sys.version_info >= (3, 12): + e.add_note(msg) + raise + else: + raise RuntimeError(msg) from e + if typing.Generic in bases: nm_tpl.__init_subclass__() return nm_tpl - def NamedTuple(__typename, __fields=None, **kwargs): - if __fields is None: - __fields = kwargs.items() + _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) + + def _namedtuple_mro_entries(bases): + assert NamedTuple in bases + return (_NamedTuple,) + + @_ensure_subclassable(_namedtuple_mro_entries) + def NamedTuple(typename, fields=_marker, /, **kwargs): + """Typed version of namedtuple. + + Usage:: + + class Employee(NamedTuple): + name: str + id: int + + This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + + The resulting class has an extra __annotations__ attribute, giving a + dict that maps field names to types. (The field names are also in + the _fields attribute, which is part of the namedtuple API.) + An alternative equivalent functional syntax is also accepted:: + + Employee = NamedTuple('Employee', [('name', str), ('id', int)]) + """ + if fields is _marker: + if kwargs: + deprecated_thing = "Creating NamedTuple classes using keyword arguments" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "Use the class-based or functional syntax instead." + ) + else: + deprecated_thing = "Failing to pass a value for the 'fields' parameter" + example = f"`{typename} = NamedTuple({typename!r}, [])`" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "To create a NamedTuple class with 0 fields " + "using the functional syntax, " + "pass an empty list, e.g. " + ) + example + "." + elif fields is None: + if kwargs: + raise TypeError( + "Cannot pass `None` as the 'fields' parameter " + "and also specify fields using keyword arguments" + ) + else: + deprecated_thing = "Passing `None` as the 'fields' parameter" + example = f"`{typename} = NamedTuple({typename!r}, [])`" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "To create a NamedTuple class with 0 fields " + "using the functional syntax, " + "pass an empty list, e.g. " + ) + example + "." elif kwargs: raise TypeError("Either list of fields or keywords" " can be provided to NamedTuple, not both") - return _make_nmtuple(__typename, __fields, module=_caller()) + if fields is _marker or fields is None: + warnings.warn( + deprecation_msg.format(name=deprecated_thing, remove="3.15"), + DeprecationWarning, + stacklevel=2, + ) + fields = kwargs.items() + nt = _make_nmtuple(typename, fields, module=_caller()) + nt.__orig_bases__ = (NamedTuple,) + return nt - NamedTuple.__doc__ = typing.NamedTuple.__doc__ - _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) - # On 3.8+, alter the signature so that it matches typing.NamedTuple. - # The signature of typing.NamedTuple on >=3.8 is invalid syntax in Python 3.7, - # so just leave the signature as it is on 3.7. - if sys.version_info >= (3, 8): - NamedTuple.__text_signature__ = '(typename, fields=None, /, **kwargs)' +if hasattr(collections.abc, "Buffer"): + Buffer = collections.abc.Buffer +else: + class Buffer(abc.ABC): + """Base class for classes that implement the buffer protocol. + + The buffer protocol allows Python objects to expose a low-level + memory buffer interface. Before Python 3.12, it is not possible + to implement the buffer protocol in pure Python code, or even + to check whether a class implements the buffer protocol. In + Python 3.12 and higher, the ``__buffer__`` method allows access + to the buffer protocol from Python code, and the + ``collections.abc.Buffer`` ABC allows checking whether a class + implements the buffer protocol. + + To indicate support for the buffer protocol in earlier versions, + inherit from this ABC, either in a stub file or at runtime, + or use ABC registration. This ABC provides no methods, because + there is no Python-accessible methods shared by pre-3.12 buffer + classes. It is useful primarily for static checks. - def _namedtuple_mro_entries(bases): - assert NamedTuple in bases - return (_NamedTuple,) + """ + + # As a courtesy, register the most common stdlib buffer classes. + Buffer.register(memoryview) + Buffer.register(bytearray) + Buffer.register(bytes) + + +# Backport of types.get_original_bases, available on 3.12+ in CPython +if hasattr(_types, "get_original_bases"): + get_original_bases = _types.get_original_bases +else: + def get_original_bases(cls, /): + """Return the class's "original" bases prior to modification by `__mro_entries__`. + + Examples:: + + from typing import TypeVar, Generic + from pip._vendor.typing_extensions import NamedTuple, TypedDict + + T = TypeVar("T") + class Foo(Generic[T]): ... + class Bar(Foo[int], float): ... + class Baz(list[str]): ... + Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) + Spam = TypedDict("Spam", {"a": int, "b": str}) + + assert get_original_bases(Bar) == (Foo[int], float) + assert get_original_bases(Baz) == (list[str],) + assert get_original_bases(Eggs) == (NamedTuple,) + assert get_original_bases(Spam) == (TypedDict,) + assert get_original_bases(int) == (object,) + """ + try: + return cls.__dict__.get("__orig_bases__", cls.__bases__) + except AttributeError: + raise TypeError( + f'Expected an instance of type, not {type(cls).__name__!r}' + ) from None + + +# NewType is a class on Python 3.10+, making it pickleable +# The error message for subclassing instances of NewType was improved on 3.11+ +if sys.version_info >= (3, 11): + NewType = typing.NewType +else: + class NewType: + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy callable that simply returns its argument. Usage:: + UserId = NewType('UserId', int) + def name_by_id(user_id: UserId) -> str: + ... + UserId('user') # Fails type check + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + num = UserId(5) + 1 # type: int + """ + + def __call__(self, obj, /): + return obj + + def __init__(self, name, tp): + self.__qualname__ = name + if '.' in name: + name = name.rpartition('.')[-1] + self.__name__ = name + self.__supertype__ = tp + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + def __mro_entries__(self, bases): + # We defined __mro_entries__ to get a better error message + # if a user attempts to subclass a NewType instance. bpo-46170 + supercls_name = self.__name__ + + class Dummy: + def __init_subclass__(cls): + subcls_name = cls.__name__ + raise TypeError( + f"Cannot subclass an instance of NewType. " + f"Perhaps you were looking for: " + f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`" + ) + + return (Dummy,) + + def __repr__(self): + return f'{self.__module__}.{self.__qualname__}' + + def __reduce__(self): + return self.__qualname__ + + if sys.version_info >= (3, 10): + # PEP 604 methods + # It doesn't make sense to have these methods on Python <3.10 + + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + +if hasattr(typing, "TypeAliasType"): + TypeAliasType = typing.TypeAliasType +else: + def _is_unionable(obj): + """Corresponds to is_unionable() in unionobject.c in CPython.""" + return obj is None or isinstance(obj, ( + type, + _types.GenericAlias, + _types.UnionType, + TypeAliasType, + )) + + class TypeAliasType: + """Create named, parameterized type aliases. + + This provides a backport of the new `type` statement in Python 3.12: + + type ListOrSet[T] = list[T] | set[T] + + is equivalent to: + + T = TypeVar("T") + ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,)) + + The name ListOrSet can then be used as an alias for the type it refers to. + + The type_params argument should contain all the type parameters used + in the value of the type alias. If the alias is not generic, this + argument is omitted. + + Static type checkers should only support type aliases declared using + TypeAliasType that follow these rules: - NamedTuple.__mro_entries__ = _namedtuple_mro_entries + - The first argument (the name) must be a string literal. + - The TypeAliasType instance must be immediately assigned to a variable + of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid, + as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)'). + + """ + + def __init__(self, name: str, value, *, type_params=()): + if not isinstance(name, str): + raise TypeError("TypeAliasType name must be a string") + self.__value__ = value + self.__type_params__ = type_params + + parameters = [] + for type_param in type_params: + if isinstance(type_param, TypeVarTuple): + parameters.extend(type_param) + else: + parameters.append(type_param) + self.__parameters__ = tuple(parameters) + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + # Setting this attribute closes the TypeAliasType from further modification + self.__name__ = name + + def __setattr__(self, name: str, value: object, /) -> None: + if hasattr(self, "__name__"): + self._raise_attribute_error(name) + super().__setattr__(name, value) + + def __delattr__(self, name: str, /) -> Never: + self._raise_attribute_error(name) + + def _raise_attribute_error(self, name: str) -> Never: + # Match the Python 3.12 error messages exactly + if name == "__name__": + raise AttributeError("readonly attribute") + elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}: + raise AttributeError( + f"attribute '{name}' of 'typing.TypeAliasType' objects " + "is not writable" + ) + else: + raise AttributeError( + f"'typing.TypeAliasType' object has no attribute '{name}'" + ) + + def __repr__(self) -> str: + return self.__name__ + + def __getitem__(self, parameters): + if not isinstance(parameters, tuple): + parameters = (parameters,) + parameters = [ + typing._type_check( + item, f'Subscripting {self.__name__} requires a type.' + ) + for item in parameters + ] + return typing._GenericAlias(self, tuple(parameters)) + + def __reduce__(self): + return self.__name__ + + def __init_subclass__(cls, *args, **kwargs): + raise TypeError( + "type 'typing_extensions.TypeAliasType' is not an acceptable base type" + ) + + # The presence of this method convinces typing._type_check + # that TypeAliasTypes are types. + def __call__(self): + raise TypeError("Type alias is not callable") + + if sys.version_info >= (3, 10): + def __or__(self, right): + # For forward compatibility with 3.12, reject Unions + # that are not accepted by the built-in Union. + if not _is_unionable(right): + return NotImplemented + return typing.Union[self, right] + + def __ror__(self, left): + if not _is_unionable(left): + return NotImplemented + return typing.Union[left, self] + + +if hasattr(typing, "is_protocol"): + is_protocol = typing.is_protocol + get_protocol_members = typing.get_protocol_members +else: + def is_protocol(tp: type, /) -> bool: + """Return True if the given type is a Protocol. + + Example:: + + >>> from typing_extensions import Protocol, is_protocol + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> is_protocol(P) + True + >>> is_protocol(int) + False + """ + return ( + isinstance(tp, type) + and getattr(tp, '_is_protocol', False) + and tp is not Protocol + and tp is not typing.Protocol + ) + + def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]: + """Return the set of members defined in a Protocol. + + Example:: + + >>> from typing_extensions import Protocol, get_protocol_members + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> get_protocol_members(P) + frozenset({'a', 'b'}) + + Raise a TypeError for arguments that are not Protocols. + """ + if not is_protocol(tp): + raise TypeError(f'{tp!r} is not a Protocol') + if hasattr(tp, '__protocol_attrs__'): + return frozenset(tp.__protocol_attrs__) + return frozenset(_get_protocol_attrs(tp)) + + +if hasattr(typing, "Doc"): + Doc = typing.Doc +else: + class Doc: + """Define the documentation of a type annotation using ``Annotated``, to be + used in class attributes, function and method parameters, return values, + and variables. + + The value should be a positional-only string literal to allow static tools + like editors and documentation generators to use it. + + This complements docstrings. + + The string value passed is available in the attribute ``documentation``. + + Example:: + + >>> from typing_extensions import Annotated, Doc + >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ... + """ + def __init__(self, documentation: str, /) -> None: + self.documentation = documentation + + def __repr__(self) -> str: + return f"Doc({self.documentation!r})" + + def __hash__(self) -> int: + return hash(self.documentation) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Doc): + return NotImplemented + return self.documentation == other.documentation + + +# Aliases for items that have always been in typing. +# Explicitly assign these (rather than using `from typing import *` at the top), +# so that we get a CI error if one of these is deleted from typing.py +# in a future version of Python +AbstractSet = typing.AbstractSet +AnyStr = typing.AnyStr +BinaryIO = typing.BinaryIO +Callable = typing.Callable +Collection = typing.Collection +Container = typing.Container +Dict = typing.Dict +ForwardRef = typing.ForwardRef +FrozenSet = typing.FrozenSet +Generator = typing.Generator +Generic = typing.Generic +Hashable = typing.Hashable +IO = typing.IO +ItemsView = typing.ItemsView +Iterable = typing.Iterable +Iterator = typing.Iterator +KeysView = typing.KeysView +List = typing.List +Mapping = typing.Mapping +MappingView = typing.MappingView +Match = typing.Match +MutableMapping = typing.MutableMapping +MutableSequence = typing.MutableSequence +MutableSet = typing.MutableSet +Optional = typing.Optional +Pattern = typing.Pattern +Reversible = typing.Reversible +Sequence = typing.Sequence +Set = typing.Set +Sized = typing.Sized +TextIO = typing.TextIO +Tuple = typing.Tuple +Union = typing.Union +ValuesView = typing.ValuesView +cast = typing.cast +no_type_check = typing.no_type_check +no_type_check_decorator = typing.no_type_check_decorator diff --git a/src/pip/_vendor/urllib3/_collections.py b/src/pip/_vendor/urllib3/_collections.py index da9857e986d..bceb8451f0e 100644 --- a/src/pip/_vendor/urllib3/_collections.py +++ b/src/pip/_vendor/urllib3/_collections.py @@ -268,6 +268,24 @@ def getlist(self, key, default=__marker): else: return vals[1:] + def _prepare_for_method_change(self): + """ + Remove content-specific header fields before changing the request + method to GET or HEAD according to RFC 9110, Section 15.4. + """ + content_specific_headers = [ + "Content-Encoding", + "Content-Language", + "Content-Location", + "Content-Type", + "Content-Length", + "Digest", + "Last-Modified", + ] + for header in content_specific_headers: + self.discard(header) + return self + # Backwards compatibility for httplib getheaders = getlist getallmatchingheaders = getlist diff --git a/src/pip/_vendor/urllib3/_version.py b/src/pip/_vendor/urllib3/_version.py index e12dd0e7853..85e725eaf4d 100644 --- a/src/pip/_vendor/urllib3/_version.py +++ b/src/pip/_vendor/urllib3/_version.py @@ -1,2 +1,2 @@ # This file is protected via CODEOWNERS -__version__ = "1.26.15" +__version__ = "1.26.18" diff --git a/src/pip/_vendor/urllib3/connectionpool.py b/src/pip/_vendor/urllib3/connectionpool.py index c23d736b186..5a6adcbdc75 100644 --- a/src/pip/_vendor/urllib3/connectionpool.py +++ b/src/pip/_vendor/urllib3/connectionpool.py @@ -9,6 +9,7 @@ from socket import error as SocketError from socket import timeout as SocketTimeout +from ._collections import HTTPHeaderDict from .connection import ( BaseSSLError, BrokenPipeError, @@ -50,6 +51,13 @@ from .util.url import _normalize_host as normalize_host from .util.url import get_host, parse_url +try: # Platform-specific: Python 3 + import weakref + + weakref_finalize = weakref.finalize +except AttributeError: # Platform-specific: Python 2 + from .packages.backports.weakref_finalize import weakref_finalize + xrange = six.moves.xrange log = logging.getLogger(__name__) @@ -220,6 +228,16 @@ def __init__( self.conn_kw["proxy"] = self.proxy self.conn_kw["proxy_config"] = self.proxy_config + # Do not pass 'self' as callback to 'finalize'. + # Then the 'finalize' would keep an endless living (leak) to self. + # By just passing a reference to the pool allows the garbage collector + # to free self if nobody else has a reference to it. + pool = self.pool + + # Close all the HTTPConnections in the pool before the + # HTTPConnectionPool object is garbage collected. + weakref_finalize(self, _close_pool_connections, pool) + def _new_conn(self): """ Return a fresh :class:`HTTPConnection`. @@ -489,14 +507,8 @@ def close(self): # Disable access to the pool old_pool, self.pool = self.pool, None - try: - while True: - conn = old_pool.get(block=False) - if conn: - conn.close() - - except queue.Empty: - pass # Done. + # Close all the HTTPConnections in the pool. + _close_pool_connections(old_pool) def is_same_host(self, url): """ @@ -832,7 +844,11 @@ def _is_ssl_error_message_from_http_proxy(ssl_error): redirect_location = redirect and response.get_redirect_location() if redirect_location: if response.status == 303: + # Change the method according to RFC 9110, Section 15.4.4. method = "GET" + # And lose the body not to transfer anything sensitive. + body = None + headers = HTTPHeaderDict(headers)._prepare_for_method_change() try: retries = retries.increment(method, url, response=response, _pool=self) @@ -1108,3 +1124,14 @@ def _normalize_host(host, scheme): if host.startswith("[") and host.endswith("]"): host = host[1:-1] return host + + +def _close_pool_connections(pool): + """Drains a queue of connections and closes each one.""" + try: + while True: + conn = pool.get(block=False) + if conn: + conn.close() + except queue.Empty: + pass # Done. diff --git a/src/pip/_vendor/urllib3/contrib/securetransport.py b/src/pip/_vendor/urllib3/contrib/securetransport.py index 4a06bc69d5c..722ee4e1242 100644 --- a/src/pip/_vendor/urllib3/contrib/securetransport.py +++ b/src/pip/_vendor/urllib3/contrib/securetransport.py @@ -64,9 +64,8 @@ import threading import weakref -from pip._vendor import six - from .. import util +from ..packages import six from ..util.ssl_ import PROTOCOL_TLS_CLIENT from ._securetransport.bindings import CoreFoundation, Security, SecurityConst from ._securetransport.low_level import ( diff --git a/src/pip/_vendor/urllib3/packages/backports/weakref_finalize.py b/src/pip/_vendor/urllib3/packages/backports/weakref_finalize.py new file mode 100644 index 00000000000..a2f2966e549 --- /dev/null +++ b/src/pip/_vendor/urllib3/packages/backports/weakref_finalize.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +""" +backports.weakref_finalize +~~~~~~~~~~~~~~~~~~ + +Backports the Python 3 ``weakref.finalize`` method. +""" +from __future__ import absolute_import + +import itertools +import sys +from weakref import ref + +__all__ = ["weakref_finalize"] + + +class weakref_finalize(object): + """Class for finalization of weakrefable objects + finalize(obj, func, *args, **kwargs) returns a callable finalizer + object which will be called when obj is garbage collected. The + first time the finalizer is called it evaluates func(*arg, **kwargs) + and returns the result. After this the finalizer is dead, and + calling it just returns None. + When the program exits any remaining finalizers for which the + atexit attribute is true will be run in reverse order of creation. + By default atexit is true. + """ + + # Finalizer objects don't have any state of their own. They are + # just used as keys to lookup _Info objects in the registry. This + # ensures that they cannot be part of a ref-cycle. + + __slots__ = () + _registry = {} + _shutdown = False + _index_iter = itertools.count() + _dirty = False + _registered_with_atexit = False + + class _Info(object): + __slots__ = ("weakref", "func", "args", "kwargs", "atexit", "index") + + def __init__(self, obj, func, *args, **kwargs): + if not self._registered_with_atexit: + # We may register the exit function more than once because + # of a thread race, but that is harmless + import atexit + + atexit.register(self._exitfunc) + weakref_finalize._registered_with_atexit = True + info = self._Info() + info.weakref = ref(obj, self) + info.func = func + info.args = args + info.kwargs = kwargs or None + info.atexit = True + info.index = next(self._index_iter) + self._registry[self] = info + weakref_finalize._dirty = True + + def __call__(self, _=None): + """If alive then mark as dead and return func(*args, **kwargs); + otherwise return None""" + info = self._registry.pop(self, None) + if info and not self._shutdown: + return info.func(*info.args, **(info.kwargs or {})) + + def detach(self): + """If alive then mark as dead and return (obj, func, args, kwargs); + otherwise return None""" + info = self._registry.get(self) + obj = info and info.weakref() + if obj is not None and self._registry.pop(self, None): + return (obj, info.func, info.args, info.kwargs or {}) + + def peek(self): + """If alive then return (obj, func, args, kwargs); + otherwise return None""" + info = self._registry.get(self) + obj = info and info.weakref() + if obj is not None: + return (obj, info.func, info.args, info.kwargs or {}) + + @property + def alive(self): + """Whether finalizer is alive""" + return self in self._registry + + @property + def atexit(self): + """Whether finalizer should be called at exit""" + info = self._registry.get(self) + return bool(info) and info.atexit + + @atexit.setter + def atexit(self, value): + info = self._registry.get(self) + if info: + info.atexit = bool(value) + + def __repr__(self): + info = self._registry.get(self) + obj = info and info.weakref() + if obj is None: + return "<%s object at %#x; dead>" % (type(self).__name__, id(self)) + else: + return "<%s object at %#x; for %r at %#x>" % ( + type(self).__name__, + id(self), + type(obj).__name__, + id(obj), + ) + + @classmethod + def _select_for_exit(cls): + # Return live finalizers marked for exit, oldest first + L = [(f, i) for (f, i) in cls._registry.items() if i.atexit] + L.sort(key=lambda item: item[1].index) + return [f for (f, i) in L] + + @classmethod + def _exitfunc(cls): + # At shutdown invoke finalizers for which atexit is true. + # This is called once all other non-daemonic threads have been + # joined. + reenable_gc = False + try: + if cls._registry: + import gc + + if gc.isenabled(): + reenable_gc = True + gc.disable() + pending = None + while True: + if pending is None or weakref_finalize._dirty: + pending = cls._select_for_exit() + weakref_finalize._dirty = False + if not pending: + break + f = pending.pop() + try: + # gc is disabled, so (assuming no daemonic + # threads) the following is the only line in + # this function which might trigger creation + # of a new finalizer + f() + except Exception: + sys.excepthook(*sys.exc_info()) + assert f not in cls._registry + finally: + # prevent any more finalizers from executing during shutdown + weakref_finalize._shutdown = True + if reenable_gc: + gc.enable() diff --git a/src/pip/_vendor/urllib3/poolmanager.py b/src/pip/_vendor/urllib3/poolmanager.py index ca4ec341184..fb51bf7d96b 100644 --- a/src/pip/_vendor/urllib3/poolmanager.py +++ b/src/pip/_vendor/urllib3/poolmanager.py @@ -4,7 +4,7 @@ import functools import logging -from ._collections import RecentlyUsedContainer +from ._collections import HTTPHeaderDict, RecentlyUsedContainer from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme from .exceptions import ( LocationValueError, @@ -171,7 +171,7 @@ class PoolManager(RequestMethods): def __init__(self, num_pools=10, headers=None, **connection_pool_kw): RequestMethods.__init__(self, headers) self.connection_pool_kw = connection_pool_kw - self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close()) + self.pools = RecentlyUsedContainer(num_pools) # Locally set the pool classes and keys so other PoolManagers can # override them. @@ -382,9 +382,12 @@ def urlopen(self, method, url, redirect=True, **kw): # Support relative URLs for redirecting. redirect_location = urljoin(url, redirect_location) - # RFC 7231, Section 6.4.4 if response.status == 303: + # Change the method according to RFC 9110, Section 15.4.4. method = "GET" + # And lose the body not to transfer anything sensitive. + kw["body"] = None + kw["headers"] = HTTPHeaderDict(kw["headers"])._prepare_for_method_change() retries = kw.get("retries") if not isinstance(retries, Retry): diff --git a/src/pip/_vendor/urllib3/request.py b/src/pip/_vendor/urllib3/request.py index 398386a5b9f..3b4cf999225 100644 --- a/src/pip/_vendor/urllib3/request.py +++ b/src/pip/_vendor/urllib3/request.py @@ -1,6 +1,9 @@ from __future__ import absolute_import +import sys + from .filepost import encode_multipart_formdata +from .packages import six from .packages.six.moves.urllib.parse import urlencode __all__ = ["RequestMethods"] @@ -168,3 +171,21 @@ def request_encode_body( extra_kw.update(urlopen_kw) return self.urlopen(method, url, **extra_kw) + + +if not six.PY2: + + class RequestModule(sys.modules[__name__].__class__): + def __call__(self, *args, **kwargs): + """ + If user tries to call this module directly urllib3 v2.x style raise an error to the user + suggesting they may need urllib3 v2 + """ + raise TypeError( + "'module' object is not callable\n" + "urllib3.request() method is not supported in this release, " + "upgrade to urllib3 v2 to use it\n" + "see https://urllib3.readthedocs.io/en/stable/v2-migration-guide.html" + ) + + sys.modules[__name__].__class__ = RequestModule diff --git a/src/pip/_vendor/urllib3/util/retry.py b/src/pip/_vendor/urllib3/util/retry.py index 2490d5e5b63..60ef6c4f3f9 100644 --- a/src/pip/_vendor/urllib3/util/retry.py +++ b/src/pip/_vendor/urllib3/util/retry.py @@ -235,7 +235,7 @@ class Retry(object): RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) #: Default headers to be used for ``remove_headers_on_redirect`` - DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"]) + DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Cookie", "Authorization"]) #: Maximum backoff time. DEFAULT_BACKOFF_MAX = 120 diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 61063459d6d..f00a8c02db4 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -1,23 +1,24 @@ -CacheControl==0.12.11 # Make sure to update the license in pyproject.toml for this. +CacheControl==0.13.1 # Make sure to update the license in pyproject.toml for this. colorama==0.4.6 -distlib==0.3.6 -distro==1.8.0 -msgpack==1.0.5 +distlib==0.3.8 +distro==1.9.0 +msgpack==1.0.8 packaging==21.3 -platformdirs==3.2.0 -pyparsing==3.0.9 +platformdirs==4.2.0 +pyparsing==3.1.0 pyproject-hooks==1.0.0 -requests==2.28.2 - certifi==2022.12.7 - chardet==5.1.0 - idna==3.4 - urllib3==1.26.15 -rich==13.3.3 - pygments==2.14.0 - typing_extensions==4.5.0 +requests==2.31.0 + certifi==2024.2.2 + chardet==5.2.0 + idna==3.6 + urllib3==1.26.18 +rich==13.7.0 + pygments==2.17.2 + typing_extensions==4.9.0 resolvelib==1.0.1 -setuptools==67.7.2 +setuptools==69.1.1 six==1.16.0 -tenacity==8.2.2 +tenacity==8.2.3 tomli==2.0.1 +truststore==0.8.0 webencodings==0.5.1 diff --git a/tests/conftest.py b/tests/conftest.py index f75711a17f2..4296c016deb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,23 +1,32 @@ import compileall +import contextlib import fnmatch -import io +import http.server import os import re import shutil import subprocess import sys -from contextlib import ExitStack, contextmanager +import threading +from dataclasses import dataclass +from enum import Enum +from hashlib import sha256 from pathlib import Path +from textwrap import dedent from typing import ( TYPE_CHECKING, + Any, AnyStr, Callable, + ClassVar, + ContextManager, Dict, Iterable, Iterator, List, Optional, - Union, + Set, + Tuple, ) from unittest.mock import patch from zipfile import ZipFile @@ -36,24 +45,22 @@ from installer.sources import WheelFile from pip import __file__ as pip_location -from pip._internal.cli.main import main as pip_entry_point from pip._internal.locations import _USE_SYSCONFIG from pip._internal.utils.temp_dir import global_tempdir_manager -from tests.lib import DATA_DIR, SRC_DIR, PipTestEnvironment, TestData -from tests.lib.server import MockServer as _MockServer -from tests.lib.server import make_mock_server, server_running +from tests.lib import ( + DATA_DIR, + SRC_DIR, + CertFactory, + InMemoryPip, + PipTestEnvironment, + ScriptFactory, + TestData, +) +from tests.lib.server import MockServer, make_mock_server from tests.lib.venv import VirtualEnvironment, VirtualEnvironmentType -from .lib.compat import nullcontext - if TYPE_CHECKING: - from typing import Protocol - - from wsgi import WSGIApplication -else: - # TODO: Protocol was introduced in Python 3.8. Remove this branch when - # dropping support for Python 3.7. - Protocol = object + from pip._vendor.typing_extensions import Self def pytest_addoption(parser: Parser) -> None: @@ -66,8 +73,8 @@ def pytest_addoption(parser: Parser) -> None: parser.addoption( "--resolver", action="store", - default="2020-resolver", - choices=["2020-resolver", "legacy"], + default="resolvelib", + choices=["resolvelib", "legacy"], help="use given resolver in tests", ) parser.addoption( @@ -144,7 +151,7 @@ def pytest_collection_modifyitems(config: Config, items: List[pytest.Function]) if "script" in item.fixturenames: raise RuntimeError( "Cannot use the ``script`` funcarg in a unit test: " - "(filename = {}, item = {})".format(module_path, item) + f"(filename = {module_path}, item = {item})" ) else: raise RuntimeError(f"Unknown test type (filename = {module_path})") @@ -317,6 +324,10 @@ def isolate(tmpdir: Path, monkeypatch: pytest.MonkeyPatch) -> None: # Make sure tests don't share a requirements tracker. monkeypatch.delenv("PIP_BUILD_TRACKER", False) + # Make sure color control variables don't affect internal output. + monkeypatch.delenv("FORCE_COLOR", False) + monkeypatch.delenv("NO_COLOR", False) + # FIXME: Windows... os.makedirs(os.path.join(home_dir, ".config", "git")) with open(os.path.join(home_dir, ".config", "git", "config"), "wb") as fp: @@ -331,7 +342,7 @@ def scoped_global_tempdir_manager(request: pytest.FixtureRequest) -> Iterator[No temporary directories in the application. """ if "no_auto_tempdir_manager" in request.keywords: - ctx = nullcontext + ctx: Callable[[], ContextManager[None]] = contextlib.nullcontext else: ctx = global_tempdir_manager @@ -371,6 +382,36 @@ def not_code_files_and_folders(path: str, names: List[str]) -> Iterable[str]: return pip_src +@pytest.fixture(scope="session") +def pip_editable_parts( + pip_src: Path, tmpdir_factory: pytest.TempPathFactory +) -> Tuple[Path, ...]: + pip_editable = tmpdir_factory.mktemp("pip") / "pip" + shutil.copytree(pip_src, pip_editable, symlinks=True) + # noxfile.py is Python 3 only + assert compileall.compile_dir( + pip_editable, + quiet=1, + rx=re.compile("noxfile.py$"), + ) + pip_self_install_path = tmpdir_factory.mktemp("pip_self_install") + subprocess.check_call( + [ + sys.executable, + "-m", + "pip", + "install", + "--target", + pip_self_install_path, + "-e", + pip_editable, + ] + ) + pth = next(pip_self_install_path.glob("*pip*.pth")) + dist_info = next(pip_self_install_path.glob("*.dist-info")) + return (pth, dist_info) + + def _common_wheel_editable_install( tmpdir_factory: pytest.TempPathFactory, common_wheels: Path, package: str ) -> Path: @@ -434,6 +475,7 @@ def virtualenv_template( request: pytest.FixtureRequest, tmpdir_factory: pytest.TempPathFactory, pip_src: Path, + pip_editable_parts: Tuple[Path, ...], setuptools_install: Path, wheel_install: Path, coverage_install: Path, @@ -451,17 +493,17 @@ def virtualenv_template( # Install setuptools, wheel and pip. install_pth_link(venv, "setuptools", setuptools_install) install_pth_link(venv, "wheel", wheel_install) - pip_editable = tmpdir_factory.mktemp("pip") / "pip" - shutil.copytree(pip_src, pip_editable, symlinks=True) - # noxfile.py is Python 3 only - assert compileall.compile_dir( - str(pip_editable), - quiet=1, - rx=re.compile("noxfile.py$"), - ) - subprocess.check_call( - [os.fspath(venv.bin / "python"), "setup.py", "-q", "develop"], cwd=pip_editable + + pth, dist_info = pip_editable_parts + + shutil.copy(pth, venv.site) + shutil.copytree( + dist_info, venv.site / dist_info.name, dirs_exist_ok=True, symlinks=True ) + # Create placeholder ``easy-install.pth``, as several tests depend on its + # existence. TODO: Ensure ``tests.lib.TestPipResult.files_updated`` correctly + # detects changed files. + venv.site.joinpath("easy-install.pth").touch() # Install coverage and pth file for executing it in any spawned processes # in this virtual environment. @@ -508,16 +550,6 @@ def virtualenv( yield virtualenv_factory(tmpdir.joinpath("workspace", "venv")) -class ScriptFactory(Protocol): - def __call__( - self, - tmpdir: Path, - virtualenv: Optional[VirtualEnvironment] = None, - environ: Optional[Dict[AnyStr, AnyStr]] = None, - ) -> PipTestEnvironment: - ... - - @pytest.fixture(scope="session") def script_factory( virtualenv_factory: Callable[[Path], VirtualEnvironment], @@ -637,26 +669,6 @@ def data(tmpdir: Path) -> TestData: return TestData.copy(tmpdir.joinpath("data")) -class InMemoryPipResult: - def __init__(self, returncode: int, stdout: str) -> None: - self.returncode = returncode - self.stdout = stdout - - -class InMemoryPip: - def pip(self, *args: Union[str, Path]) -> InMemoryPipResult: - orig_stdout = sys.stdout - stdout = io.StringIO() - sys.stdout = stdout - try: - returncode = pip_entry_point([os.fspath(a) for a in args]) - except SystemExit as e: - returncode = e.code or 0 - finally: - sys.stdout = orig_stdout - return InMemoryPipResult(returncode, stdout.getvalue()) - - @pytest.fixture def in_memory_pip() -> InMemoryPip: return InMemoryPip() @@ -668,9 +680,6 @@ def deprecated_python() -> bool: return sys.version_info[:2] in [] -CertFactory = Callable[[], str] - - @pytest.fixture(scope="session") def cert_factory(tmpdir_factory: pytest.TempPathFactory) -> CertFactory: # Delay the import requiring cryptography in order to make it possible @@ -692,49 +701,6 @@ def factory() -> str: return factory -class MockServer: - def __init__(self, server: _MockServer) -> None: - self._server = server - self._running = False - self.context = ExitStack() - - @property - def port(self) -> int: - return self._server.port - - @property - def host(self) -> str: - return self._server.host - - def set_responses(self, responses: Iterable["WSGIApplication"]) -> None: - assert not self._running, "responses cannot be set on running server" - self._server.mock.side_effect = responses - - def start(self) -> None: - assert not self._running, "running server cannot be started" - self.context.enter_context(server_running(self._server)) - self.context.enter_context(self._set_running()) - - @contextmanager - def _set_running(self) -> Iterator[None]: - self._running = True - try: - yield - finally: - self._running = False - - def stop(self) -> None: - assert self._running, "idle server cannot be stopped" - self.context.close() - - def get_requests(self) -> List[Dict[str, str]]: - """Get environ for each received request.""" - assert not self._running, "cannot get mock from running server" - # Legacy: replace call[0][0] with call.args[0] - # when pip drops support for python3.7 - return [call[0][0] for call in self._server.mock.call_args_list] - - @pytest.fixture def mock_server() -> Iterator[MockServer]: server = make_mock_server() @@ -751,3 +717,292 @@ def proxy(request: pytest.FixtureRequest) -> str: @pytest.fixture def enable_user_site(virtualenv: VirtualEnvironment) -> None: virtualenv.user_site_packages = True + + +class MetadataKind(Enum): + """All the types of values we might be provided for the data-dist-info-metadata + attribute from PEP 658.""" + + # Valid: will read metadata from the dist instead. + No = "none" + # Valid: will read the .metadata file, but won't check its hash. + Unhashed = "unhashed" + # Valid: will read the .metadata file and check its hash matches. + Sha256 = "sha256" + # Invalid: will error out after checking the hash. + WrongHash = "wrong-hash" + # Invalid: will error out after failing to fetch the .metadata file. + NoFile = "no-file" + + +@dataclass(frozen=True) +class FakePackage: + """Mock package structure used to generate a PyPI repository. + + FakePackage name and version should correspond to sdists (.tar.gz files) in our test + data.""" + + name: str + version: str + filename: str + metadata: MetadataKind + # This will override any dependencies specified in the actual dist's METADATA. + requires_dist: Tuple[str, ...] = () + # This will override the Name specified in the actual dist's METADATA. + metadata_name: Optional[str] = None + + def metadata_filename(self) -> str: + """This is specified by PEP 658.""" + return f"{self.filename}.metadata" + + def generate_additional_tag(self) -> str: + """This gets injected into the tag in the generated PyPI index page for this + package.""" + if self.metadata == MetadataKind.No: + return "" + if self.metadata in [MetadataKind.Unhashed, MetadataKind.NoFile]: + return 'data-dist-info-metadata="true"' + if self.metadata == MetadataKind.WrongHash: + return 'data-dist-info-metadata="sha256=WRONG-HASH"' + assert self.metadata == MetadataKind.Sha256 + checksum = sha256(self.generate_metadata()).hexdigest() + return f'data-dist-info-metadata="sha256={checksum}"' + + def requires_str(self) -> str: + if not self.requires_dist: + return "" + joined = " and ".join(self.requires_dist) + return f"Requires-Dist: {joined}" + + def generate_metadata(self) -> bytes: + """This is written to `self.metadata_filename()` and will override the actual + dist's METADATA, unless `self.metadata == MetadataKind.NoFile`.""" + return dedent( + f"""\ + Metadata-Version: 2.1 + Name: {self.metadata_name or self.name} + Version: {self.version} + {self.requires_str()} + """ + ).encode("utf-8") + + +@pytest.fixture(scope="session") +def fake_packages() -> Dict[str, List[FakePackage]]: + """The package database we generate for testing PEP 658 support.""" + return { + "simple": [ + FakePackage("simple", "1.0", "simple-1.0.tar.gz", MetadataKind.Sha256), + FakePackage("simple", "2.0", "simple-2.0.tar.gz", MetadataKind.No), + # This will raise a hashing error. + FakePackage("simple", "3.0", "simple-3.0.tar.gz", MetadataKind.WrongHash), + ], + "simple2": [ + # Override the dependencies here in order to force pip to download + # simple-1.0.tar.gz as well. + FakePackage( + "simple2", + "1.0", + "simple2-1.0.tar.gz", + MetadataKind.Unhashed, + ("simple==1.0",), + ), + # This will raise an error when pip attempts to fetch the metadata file. + FakePackage("simple2", "2.0", "simple2-2.0.tar.gz", MetadataKind.NoFile), + # This has a METADATA file with a mismatched name. + FakePackage( + "simple2", + "3.0", + "simple2-3.0.tar.gz", + MetadataKind.Sha256, + metadata_name="not-simple2", + ), + ], + "colander": [ + # Ensure we can read the dependencies from a metadata file within a wheel + # *without* PEP 658 metadata. + FakePackage( + "colander", + "0.9.9", + "colander-0.9.9-py2.py3-none-any.whl", + MetadataKind.No, + ), + ], + "compilewheel": [ + # Ensure we can override the dependencies of a wheel file by injecting PEP + # 658 metadata. + FakePackage( + "compilewheel", + "1.0", + "compilewheel-1.0-py2.py3-none-any.whl", + MetadataKind.Unhashed, + ("simple==1.0",), + ), + ], + "has-script": [ + # Ensure we check PEP 658 metadata hashing errors for wheel files. + FakePackage( + "has-script", + "1.0", + "has.script-1.0-py2.py3-none-any.whl", + MetadataKind.WrongHash, + ), + ], + "translationstring": [ + FakePackage( + "translationstring", + "1.1", + "translationstring-1.1.tar.gz", + MetadataKind.No, + ), + ], + "priority": [ + # Ensure we check for a missing metadata file for wheels. + FakePackage( + "priority", + "1.0", + "priority-1.0-py2.py3-none-any.whl", + MetadataKind.NoFile, + ), + ], + "requires-simple-extra": [ + # Metadata name is not canonicalized. + FakePackage( + "requires-simple-extra", + "0.1", + "requires_simple_extra-0.1-py2.py3-none-any.whl", + MetadataKind.Sha256, + metadata_name="Requires_Simple.Extra", + ), + ], + } + + +@pytest.fixture(scope="session") +def html_index_for_packages( + shared_data: TestData, + fake_packages: Dict[str, List[FakePackage]], + tmpdir_factory: pytest.TempPathFactory, +) -> Path: + """Generate a PyPI HTML package index within a local directory pointing to + synthetic test data.""" + html_dir = tmpdir_factory.mktemp("fake_index_html_content") + + # (1) Generate the content for a PyPI index.html. + pkg_links = "\n".join( + f' {pkg}' for pkg in fake_packages.keys() + ) + # Output won't be nicely indented because dedent() acts after f-string + # arg insertion. + index_html = dedent( + f"""\ + + + + + Simple index + + + {pkg_links} + + """ + ) + # (2) Generate the index.html in a new subdirectory of the temp directory. + (html_dir / "index.html").write_text(index_html) + + # (3) Generate subdirectories for individual packages, each with their own + # index.html. + for pkg, links in fake_packages.items(): + pkg_subdir = html_dir / pkg + pkg_subdir.mkdir() + + download_links: List[str] = [] + for package_link in links: + # (3.1) Generate the tag which pip can crawl pointing to this + # specific package version. + download_links.append( + f' {package_link.filename}
' # noqa: E501 + ) + # (3.2) Copy over the corresponding file in `shared_data.packages`. + shutil.copy( + shared_data.packages / package_link.filename, + pkg_subdir / package_link.filename, + ) + # (3.3) Write a metadata file, if applicable. + if package_link.metadata != MetadataKind.NoFile: + with open(pkg_subdir / package_link.metadata_filename(), "wb") as f: + f.write(package_link.generate_metadata()) + + # (3.4) After collating all the download links and copying over the files, + # write an index.html with the generated download links for each + # copied file for this specific package name. + download_links_str = "\n".join(download_links) + pkg_index_content = dedent( + f"""\ + + + + + Links for {pkg} + + +

Links for {pkg}

+ {download_links_str} + + """ + ) + with open(pkg_subdir / "index.html", "w") as f: + f.write(pkg_index_content) + + return html_dir + + +class OneTimeDownloadHandler(http.server.SimpleHTTPRequestHandler): + """Serve files from the current directory, but error if a file is downloaded more + than once.""" + + _seen_paths: ClassVar[Set[str]] = set() + + def do_GET(self) -> None: + if self.path in self._seen_paths: + self.send_error( + http.HTTPStatus.NOT_FOUND, + f"File {self.path} not available more than once!", + ) + return + super().do_GET() + if not (self.path.endswith("/") or self.path.endswith(".metadata")): + self._seen_paths.add(self.path) + + +@pytest.fixture(scope="function") +def html_index_with_onetime_server( + html_index_for_packages: Path, +) -> Iterator[http.server.ThreadingHTTPServer]: + """Serve files from a generated pypi index, erroring if a file is downloaded more + than once. + + Provide `-i http://localhost:8000` to pip invocations to point them at this server. + """ + + class InDirectoryServer(http.server.ThreadingHTTPServer): + def finish_request(self: "Self", request: Any, client_address: Any) -> None: + self.RequestHandlerClass( + request, + client_address, + self, + directory=str(html_index_for_packages), # type: ignore[call-arg] + ) + + class Handler(OneTimeDownloadHandler): + _seen_paths: ClassVar[Set[str]] = set() + + with InDirectoryServer(("", 8000), Handler) as httpd: + server_thread = threading.Thread(target=httpd.serve_forever) + server_thread.start() + + try: + yield httpd + finally: + httpd.shutdown() + server_thread.join() diff --git a/tests/data/packages/BrokenEmitsUTF8/setup.py b/tests/data/packages/BrokenEmitsUTF8/setup.py index eb4ebf2d380..21266b3fcf6 100644 --- a/tests/data/packages/BrokenEmitsUTF8/setup.py +++ b/tests/data/packages/BrokenEmitsUTF8/setup.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import sys from distutils.core import setup @@ -11,13 +9,11 @@ class FakeError(Exception): if sys.argv[1] in ("install", "bdist_wheel"): if hasattr(sys.stdout, "buffer"): sys.stdout.buffer.write( - "\nThis package prints out UTF-8 stuff like:\n".encode("utf-8") - ) - sys.stdout.buffer.write( - "* return type of ‘main’ is not ‘int’\n".encode("utf-8") + "\nThis package prints out UTF-8 stuff like:\n".encode() ) + sys.stdout.buffer.write("* return type of ‘main’ is not ‘int’\n".encode()) sys.stdout.buffer.write( - "* Björk Guðmundsdóttir [ˈpjœr̥k ˈkvʏðmʏntsˌtoʊhtɪr]".encode("utf-8") + "* Björk Guðmundsdóttir [ˈpjœr̥k ˈkvʏðmʏntsˌtoʊhtɪr]".encode() ) else: pass diff --git a/tests/functional/test_build_env.py b/tests/functional/test_build_env.py index 22a71cd3200..11164be0500 100644 --- a/tests/functional/test_build_env.py +++ b/tests/functional/test_build_env.py @@ -1,4 +1,5 @@ import os +import sys from textwrap import dedent from typing import Optional @@ -23,9 +24,10 @@ def run_with_build_env( test_script_contents: Optional[str] = None, ) -> TestPipResult: build_env_script = script.scratch_path / "build_env.py" + scratch_path = str(script.scratch_path) build_env_script.write_text( dedent( - """ + f""" import subprocess import sys @@ -41,7 +43,7 @@ def run_with_build_env( link_collector = LinkCollector( session=PipSession(), - search_scope=SearchScope.create([{scratch!r}], [], False), + search_scope=SearchScope.create([{scratch_path!r}], [], False), ) selection_prefs = SelectionPreferences( allow_yanked=True, @@ -53,9 +55,7 @@ def run_with_build_env( with global_tempdir_manager(): build_env = BuildEnvironment() - """.format( - scratch=str(script.scratch_path) - ) + """ ) + indent(dedent(setup_script_contents), " ") + indent( @@ -203,6 +203,31 @@ def test_build_env_overlay_prefix_has_priority(script: PipTestEnvironment) -> No assert result.stdout.strip() == "2.0", str(result) +if sys.version_info < (3, 12): + BUILD_ENV_ERROR_DEBUG_CODE = r""" + from distutils.sysconfig import get_python_lib + print( + f'imported `pkg` from `{pkg.__file__}`', + file=sys.stderr) + print('system sites:\n ' + '\n '.join(sorted({ + get_python_lib(plat_specific=0), + get_python_lib(plat_specific=1), + })), file=sys.stderr) + """ +else: + BUILD_ENV_ERROR_DEBUG_CODE = r""" + from sysconfig import get_paths + paths = get_paths() + print( + f'imported `pkg` from `{pkg.__file__}`', + file=sys.stderr) + print('system sites:\n ' + '\n '.join(sorted({ + paths['platlib'], + paths['purelib'], + })), file=sys.stderr) + """ + + @pytest.mark.usefixtures("enable_user_site") def test_build_env_isolation(script: PipTestEnvironment) -> None: # Create dummy `pkg` wheel. @@ -231,8 +256,7 @@ def test_build_env_isolation(script: PipTestEnvironment) -> None: run_with_build_env( script, "", - r""" - from distutils.sysconfig import get_python_lib + f""" import sys try: @@ -240,17 +264,9 @@ def test_build_env_isolation(script: PipTestEnvironment) -> None: except ImportError: pass else: - print( - f'imported `pkg` from `{pkg.__file__}`', - file=sys.stderr) - print('system sites:\n ' + '\n '.join(sorted({ - get_python_lib(plat_specific=0), - get_python_lib(plat_specific=1), - })), file=sys.stderr) - print('sys.path:\n ' + '\n '.join(sys.path), file=sys.stderr) + {BUILD_ENV_ERROR_DEBUG_CODE} + print('sys.path:\\n ' + '\\n '.join(sys.path), file=sys.stderr) sys.exit(1) - """ - f""" # second check: direct check of exclusion of system site packages import os diff --git a/tests/functional/test_cache.py b/tests/functional/test_cache.py index 788abdd2be5..5b7e585260d 100644 --- a/tests/functional/test_cache.py +++ b/tests/functional/test_cache.py @@ -20,7 +20,7 @@ def cache_dir(script: PipTestEnvironment) -> str: @pytest.fixture def http_cache_dir(cache_dir: str) -> str: - return os.path.normcase(os.path.join(cache_dir, "http")) + return os.path.normcase(os.path.join(cache_dir, "http-v2")) @pytest.fixture @@ -36,10 +36,7 @@ def http_cache_files(http_cache_dir: str) -> List[str]: return [] filenames = glob(os.path.join(destination, "*")) - files = [] - for filename in filenames: - files.append(os.path.join(destination, filename)) - return files + return [os.path.join(destination, filename) for filename in filenames] @pytest.fixture @@ -50,10 +47,7 @@ def wheel_cache_files(wheel_cache_dir: str) -> List[str]: return [] filenames = glob(os.path.join(destination, "*.whl")) - files = [] - for filename in filenames: - files.append(os.path.join(destination, filename)) - return files + return [os.path.join(destination, filename) for filename in filenames] @pytest.fixture @@ -107,7 +101,7 @@ def list_matches_wheel(wheel_name: str, result: TestPipResult) -> bool: `- foo-1.2.3-py3-none-any.whl `.""" lines = result.stdout.splitlines() expected = f" - {wheel_name}-py3-none-any.whl " - return any(map(lambda line: line.startswith(expected), lines)) + return any(line.startswith(expected) for line in lines) def list_matches_wheel_abspath(wheel_name: str, result: TestPipResult) -> bool: @@ -120,12 +114,8 @@ def list_matches_wheel_abspath(wheel_name: str, result: TestPipResult) -> bool: lines = result.stdout.splitlines() expected = f"{wheel_name}-py3-none-any.whl" return any( - map( - lambda line: ( - os.path.basename(line).startswith(expected) and os.path.exists(line) - ), - lines, - ) + (os.path.basename(line).startswith(expected) and os.path.exists(line)) + for line in lines ) @@ -211,7 +201,10 @@ def test_cache_info( ) -> None: result = script.pip("cache", "info") - assert f"Package index page cache location: {http_cache_dir}" in result.stdout + assert ( + f"Package index page cache location (pip v23.3+): {http_cache_dir}" + in result.stdout + ) assert f"Locally built wheels location: {wheel_cache_dir}" in result.stdout num_wheels = len(wheel_cache_files) assert f"Number of locally built wheels: {num_wheels}" in result.stdout diff --git a/tests/functional/test_check.py b/tests/functional/test_check.py index e2b1c60ef3a..79b6df39c19 100644 --- a/tests/functional/test_check.py +++ b/tests/functional/test_check.py @@ -119,7 +119,10 @@ def test_check_complicated_name_missing(script: PipTestEnvironment) -> None: # Without dependency result = script.pip("install", "--no-index", package_a_path, "--no-deps") - assert "Successfully installed package-A-1.0" in result.stdout, str(result) + assert ( + "Successfully installed package_A-1.0" in result.stdout + or "Successfully installed package-A-1.0" in result.stdout + ), str(result) result = script.pip("check", expect_error=True) expected_lines = ("package-a 1.0 requires dependency-b, which is not installed.",) @@ -142,7 +145,10 @@ def test_check_complicated_name_broken(script: PipTestEnvironment) -> None: # With broken dependency result = script.pip("install", "--no-index", package_a_path, "--no-deps") - assert "Successfully installed package-A-1.0" in result.stdout, str(result) + assert ( + "Successfully installed package_A-1.0" in result.stdout + or "Successfully installed package-A-1.0" in result.stdout + ), str(result) result = script.pip( "install", @@ -175,7 +181,10 @@ def test_check_complicated_name_clean(script: PipTestEnvironment) -> None: ) result = script.pip("install", "--no-index", package_a_path, "--no-deps") - assert "Successfully installed package-A-1.0" in result.stdout, str(result) + assert ( + "Successfully installed package_A-1.0" in result.stdout + or "Successfully installed package-A-1.0" in result.stdout + ), str(result) result = script.pip( "install", @@ -203,7 +212,10 @@ def test_check_considers_conditional_reqs(script: PipTestEnvironment) -> None: ) result = script.pip("install", "--no-index", package_a_path, "--no-deps") - assert "Successfully installed package-A-1.0" in result.stdout, str(result) + assert ( + "Successfully installed package_A-1.0" in result.stdout + or "Successfully installed package-A-1.0" in result.stdout + ), str(result) result = script.pip("check", expect_error=True) expected_lines = ("package-a 1.0 requires dependency-b, which is not installed.",) diff --git a/tests/functional/test_cli.py b/tests/functional/test_cli.py index a1b69b72106..3c166152111 100644 --- a/tests/functional/test_cli.py +++ b/tests/functional/test_cli.py @@ -1,9 +1,14 @@ """Basic CLI functionality checks. """ + +import subprocess +import sys +from pathlib import Path from textwrap import dedent import pytest +from pip._internal.commands import commands_dict from tests.lib import PipTestEnvironment @@ -23,7 +28,7 @@ def test_entrypoints_work(entrypoint: str, script: PipTestEnvironment) -> None: fake_pkg.mkdir() fake_pkg.joinpath("setup.py").write_text( dedent( - """ + f""" from setuptools import setup setup( @@ -31,13 +36,11 @@ def test_entrypoints_work(entrypoint: str, script: PipTestEnvironment) -> None: version="0.1.0", entry_points={{ "console_scripts": [ - {!r} + {entrypoint!r} ] }} ) - """.format( - entrypoint - ) + """ ) ) @@ -47,3 +50,49 @@ def test_entrypoints_work(entrypoint: str, script: PipTestEnvironment) -> None: result2 = script.run("fake_pip", "-V", allow_stderr_warning=True) assert result.stdout == result2.stdout assert "old script wrapper" in result2.stderr + + +@pytest.mark.parametrize( + "command", + sorted( + set(commands_dict).symmetric_difference( + # Exclude commands that are expected to use the network. + # TODO: uninstall and list should only import network modules as needed + {"install", "uninstall", "download", "search", "index", "wheel", "list"} + ) + ), +) +def test_no_network_imports(command: str, tmp_path: Path) -> None: + """ + Verify that commands that don't access the network do NOT import network code. + + This helps to reduce the startup time of these commands. + + Note: This won't catch lazy network imports, but it'll catch top-level + network imports which were accidentally added (which is the most likely way + to regress anyway). + """ + file = tmp_path / f"imported_modules_for_{command}.txt" + code = f""" +import runpy +import sys + +sys.argv[1:] = [{command!r}, "--help"] + +try: + runpy.run_module("pip", alter_sys=True, run_name="__main__") +finally: + with open({str(file)!r}, "w") as f: + print(*sys.modules.keys(), sep="\\n", file=f) + """ + subprocess.run( + [sys.executable], + input=code, + encoding="utf-8", + check=True, + ) + imported = file.read_text().splitlines() + assert not any("pip._internal.index" in mod for mod in imported) + assert not any("pip._internal.network" in mod for mod in imported) + assert not any("requests" in mod for mod in imported) + assert not any("urllib3" in mod for mod in imported) diff --git a/tests/functional/test_completion.py b/tests/functional/test_completion.py index b02cd4fa317..a52b135c8b0 100644 --- a/tests/functional/test_completion.py +++ b/tests/functional/test_completion.py @@ -1,20 +1,11 @@ import os import sys from pathlib import Path -from typing import TYPE_CHECKING, Tuple, Union +from typing import Protocol, Tuple, Union import pytest -from tests.conftest import ScriptFactory -from tests.lib import PipTestEnvironment, TestData, TestPipResult - -if TYPE_CHECKING: - from typing import Protocol -else: - # TODO: Protocol was introduced in Python 3.8. Remove this branch when - # dropping support for Python 3.7. - Protocol = object - +from tests.lib import PipTestEnvironment, ScriptFactory, TestData, TestPipResult COMPLETION_FOR_SUPPORTED_SHELLS_TESTS = ( ( @@ -44,15 +35,19 @@ ( "zsh", """\ -function _pip_completion { - local words cword - read -Ac words - read -cn cword - reply=( $( COMP_WORDS="$words[*]" \\ - COMP_CWORD=$(( cword-1 )) \\ - PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )) +#compdef -P pip[0-9.]# +__pip() { + compadd $( COMP_WORDS="$words[*]" \\ + COMP_CWORD=$((CURRENT-1)) \\ + PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ) } -compctl -K _pip_completion pip""", +if [[ $zsh_eval_context[-1] == loadautofunc ]]; then + # autoload from fpath, call function directly + __pip "$@" +else + # eval/source/. command, register function for later + compdef __pip -P 'pip[0-9.]#' +fi""", ), ( "powershell", @@ -125,9 +120,13 @@ def autocomplete_script( class DoAutocomplete(Protocol): def __call__( - self, words: str, cword: str, cwd: Union[Path, str, None] = None - ) -> Tuple[TestPipResult, PipTestEnvironment]: - ... + self, + words: str, + cword: str, + cwd: Union[Path, str, None] = None, + include_env: bool = True, + expect_error: bool = True, + ) -> Tuple[TestPipResult, PipTestEnvironment]: ... @pytest.fixture @@ -138,16 +137,21 @@ def autocomplete( autocomplete_script.environ["PIP_AUTO_COMPLETE"] = "1" def do_autocomplete( - words: str, cword: str, cwd: Union[Path, str, None] = None + words: str, + cword: str, + cwd: Union[Path, str, None] = None, + include_env: bool = True, + expect_error: bool = True, ) -> Tuple[TestPipResult, PipTestEnvironment]: - autocomplete_script.environ["COMP_WORDS"] = words - autocomplete_script.environ["COMP_CWORD"] = cword + if include_env: + autocomplete_script.environ["COMP_WORDS"] = words + autocomplete_script.environ["COMP_CWORD"] = cword result = autocomplete_script.run( "python", "-c", "from pip._internal.cli.autocompletion import autocomplete;" "autocomplete()", - expect_error=True, + expect_error=expect_error, cwd=cwd, ) @@ -165,6 +169,17 @@ def test_completion_for_unknown_shell(autocomplete_script: PipTestEnvironment) - assert error_msg in result.stderr, "tests for an unknown shell failed" +def test_completion_without_env_vars(autocomplete: DoAutocomplete) -> None: + """ + Test getting completion after options in command + given absolute path + """ + res, env = autocomplete( + words="pip install ", cword="", include_env=False, expect_error=False + ) + assert res.stdout == "", "autocomplete function did not complete" + + def test_completion_alone(autocomplete_script: PipTestEnvironment) -> None: """ Test getting completion for none shell, just pip completion @@ -392,11 +407,12 @@ def test_completion_path_after_option( ) -@pytest.mark.parametrize("flag", ["--bash", "--zsh", "--fish", "--powershell"]) +# zsh completion script doesn't contain pip3 +@pytest.mark.parametrize("flag", ["--bash", "--fish", "--powershell"]) def test_completion_uses_same_executable_name( autocomplete_script: PipTestEnvironment, flag: str, deprecated_python: bool ) -> None: - executable_name = "pip{}".format(sys.version_info[0]) + executable_name = f"pip{sys.version_info[0]}" # Deprecated python versions produce an extra deprecation warning result = autocomplete_script.run( executable_name, diff --git a/tests/functional/test_config_settings.py b/tests/functional/test_config_settings.py index f3975de2af5..3f88d9c3924 100644 --- a/tests/functional/test_config_settings.py +++ b/tests/functional/test_config_settings.py @@ -107,6 +107,26 @@ def make_project( return name, version, project_dir +def test_config_settings_implies_pep517( + script: PipTestEnvironment, tmp_path: Path +) -> None: + """Test that setup.py bdist_wheel is not used when config settings are.""" + pkg_path = tmp_path / "pkga" + pkg_path.mkdir() + pkg_path.joinpath("setup.py").write_text( + "from setuptools import setup; setup(name='pkga')\n" + ) + result = script.pip( + "wheel", + "--config-settings", + "FOO=Hello", + pkg_path, + cwd=tmp_path, + ) + assert "Successfully built pkga" in result.stdout + assert "Preparing metadata (pyproject.toml)" in result.stdout + + def test_backend_sees_config(script: PipTestEnvironment) -> None: name, version, project_dir = make_project(script.scratch_path) script.pip( diff --git a/tests/functional/test_configuration.py b/tests/functional/test_configuration.py index b3de3f697b0..854fb3694b1 100644 --- a/tests/functional/test_configuration.py +++ b/tests/functional/test_configuration.py @@ -1,5 +1,6 @@ """Tests for the config command """ + import re import textwrap diff --git a/tests/functional/test_debug.py b/tests/functional/test_debug.py index 77cd732f9f1..77d4bea335f 100644 --- a/tests/functional/test_debug.py +++ b/tests/functional/test_debug.py @@ -68,7 +68,7 @@ def test_debug__tags(script: PipTestEnvironment, args: List[str]) -> None: stdout = result.stdout tags = compatibility_tags.get_supported() - expected_tag_header = "Compatible tags: {}".format(len(tags)) + expected_tag_header = f"Compatible tags: {len(tags)}" assert expected_tag_header in stdout show_verbose_note = "--verbose" not in args diff --git a/tests/functional/test_download.py b/tests/functional/test_download.py index 31418ca8c2b..555a1163f42 100644 --- a/tests/functional/test_download.py +++ b/tests/functional/test_download.py @@ -1,28 +1,25 @@ +import http.server import os import re import shutil import textwrap -import uuid -from dataclasses import dataclass -from enum import Enum from hashlib import sha256 from pathlib import Path -from textwrap import dedent -from typing import Callable, Dict, List, Tuple +from typing import Callable, List, Tuple import pytest from pip._internal.cli.status_codes import ERROR from pip._internal.utils.urls import path_to_url -from tests.conftest import MockServer, ScriptFactory from tests.lib import ( PipTestEnvironment, + ScriptFactory, TestData, TestPipResult, create_basic_sdist_for_package, create_really_basic_wheel, ) -from tests.lib.server import file_response +from tests.lib.server import MockServer, file_response def fake_wheel(data: TestData, wheel_path: str) -> None: @@ -1237,179 +1234,47 @@ def test_download_use_pep517_propagation( assert len(downloads) == 2 -class MetadataKind(Enum): - """All the types of values we might be provided for the data-dist-info-metadata - attribute from PEP 658.""" - - # Valid: will read metadata from the dist instead. - No = "none" - # Valid: will read the .metadata file, but won't check its hash. - Unhashed = "unhashed" - # Valid: will read the .metadata file and check its hash matches. - Sha256 = "sha256" - # Invalid: will error out after checking the hash. - WrongHash = "wrong-hash" - # Invalid: will error out after failing to fetch the .metadata file. - NoFile = "no-file" - - -@dataclass(frozen=True) -class Package: - """Mock package structure used to generate a PyPI repository. - - Package name and version should correspond to sdists (.tar.gz files) in our test - data.""" - - name: str - version: str - filename: str - metadata: MetadataKind - # This will override any dependencies specified in the actual dist's METADATA. - requires_dist: Tuple[str, ...] = () - - def metadata_filename(self) -> str: - """This is specified by PEP 658.""" - return f"{self.filename}.metadata" - - def generate_additional_tag(self) -> str: - """This gets injected into the tag in the generated PyPI index page for this - package.""" - if self.metadata == MetadataKind.No: - return "" - if self.metadata in [MetadataKind.Unhashed, MetadataKind.NoFile]: - return 'data-dist-info-metadata="true"' - if self.metadata == MetadataKind.WrongHash: - return 'data-dist-info-metadata="sha256=WRONG-HASH"' - assert self.metadata == MetadataKind.Sha256 - checksum = sha256(self.generate_metadata()).hexdigest() - return f'data-dist-info-metadata="sha256={checksum}"' - - def requires_str(self) -> str: - if not self.requires_dist: - return "" - joined = " and ".join(self.requires_dist) - return f"Requires-Dist: {joined}" - - def generate_metadata(self) -> bytes: - """This is written to `self.metadata_filename()` and will override the actual - dist's METADATA, unless `self.metadata == MetadataKind.NoFile`.""" - return dedent( - f"""\ - Metadata-Version: 2.1 - Name: {self.name} - Version: {self.version} - {self.requires_str()} - """ - ).encode("utf-8") - - -@pytest.fixture(scope="function") -def write_index_html_content(tmpdir: Path) -> Callable[[str], Path]: - """Generate a PyPI package index.html within a temporary local directory.""" - html_dir = tmpdir / "index_html_content" - html_dir.mkdir() - - def generate_index_html_subdir(index_html: str) -> Path: - """Create a new subdirectory after a UUID and write an index.html.""" - new_subdir = html_dir / uuid.uuid4().hex - new_subdir.mkdir() - - with open(new_subdir / "index.html", "w") as f: - f.write(index_html) - - return new_subdir - - return generate_index_html_subdir - - @pytest.fixture(scope="function") -def html_index_for_packages( - shared_data: TestData, - write_index_html_content: Callable[[str], Path], -) -> Callable[..., Path]: - """Generate a PyPI HTML package index within a local directory pointing to - blank data.""" +def download_local_html_index( + script: PipTestEnvironment, + html_index_for_packages: Path, + tmpdir: Path, +) -> Callable[..., Tuple[TestPipResult, Path]]: + """Execute `pip download` against a generated PyPI index.""" + download_dir = tmpdir / "download_dir" - def generate_html_index_for_packages(packages: Dict[str, List[Package]]) -> Path: + def run_for_generated_index( + args: List[str], + allow_error: bool = False, + ) -> Tuple[TestPipResult, Path]: """ - Produce a PyPI directory structure pointing to the specified packages. + Produce a PyPI directory structure pointing to the specified packages, then + execute `pip download -i ...` pointing to our generated index. """ - # (1) Generate the content for a PyPI index.html. - pkg_links = "\n".join( - f' {pkg}' for pkg in packages.keys() - ) - index_html = f"""\ - - - - - Simple index - - -{pkg_links} - -""" - # (2) Generate the index.html in a new subdirectory of the temp directory. - index_html_subdir = write_index_html_content(index_html) - - # (3) Generate subdirectories for individual packages, each with their own - # index.html. - for pkg, links in packages.items(): - pkg_subdir = index_html_subdir / pkg - pkg_subdir.mkdir() - - download_links: List[str] = [] - for package_link in links: - # (3.1) Generate the tag which pip can crawl pointing to this - # specific package version. - download_links.append( - f' {package_link.filename}
' # noqa: E501 - ) - # (3.2) Copy over the corresponding file in `shared_data.packages`. - shutil.copy( - shared_data.packages / package_link.filename, - pkg_subdir / package_link.filename, - ) - # (3.3) Write a metadata file, if applicable. - if package_link.metadata != MetadataKind.NoFile: - with open(pkg_subdir / package_link.metadata_filename(), "wb") as f: - f.write(package_link.generate_metadata()) - - # (3.4) After collating all the download links and copying over the files, - # write an index.html with the generated download links for each - # copied file for this specific package name. - download_links_str = "\n".join(download_links) - pkg_index_content = f"""\ - - - - - Links for {pkg} - - -

Links for {pkg}

-{download_links_str} - -""" - with open(pkg_subdir / "index.html", "w") as f: - f.write(pkg_index_content) - - return index_html_subdir - - return generate_html_index_for_packages + pip_args = [ + "download", + "-d", + str(download_dir), + "-i", + path_to_url(str(html_index_for_packages)), + *args, + ] + result = script.pip(*pip_args, allow_error=allow_error) + return (result, download_dir) + + return run_for_generated_index @pytest.fixture(scope="function") -def download_generated_html_index( +def download_server_html_index( script: PipTestEnvironment, - html_index_for_packages: Callable[[Dict[str, List[Package]]], Path], tmpdir: Path, + html_index_with_onetime_server: http.server.ThreadingHTTPServer, ) -> Callable[..., Tuple[TestPipResult, Path]]: """Execute `pip download` against a generated PyPI index.""" download_dir = tmpdir / "download_dir" def run_for_generated_index( - packages: Dict[str, List[Package]], args: List[str], allow_error: bool = False, ) -> Tuple[TestPipResult, Path]: @@ -1417,13 +1282,12 @@ def run_for_generated_index( Produce a PyPI directory structure pointing to the specified packages, then execute `pip download -i ...` pointing to our generated index. """ - index_dir = html_index_for_packages(packages) pip_args = [ "download", "-d", str(download_dir), "-i", - path_to_url(str(index_dir)), + "http://localhost:8000", *args, ] result = script.pip(*pip_args, allow_error=allow_error) @@ -1432,68 +1296,6 @@ def run_for_generated_index( return run_for_generated_index -# The package database we generate for testing PEP 658 support. -_simple_packages: Dict[str, List[Package]] = { - "simple": [ - Package("simple", "1.0", "simple-1.0.tar.gz", MetadataKind.Sha256), - Package("simple", "2.0", "simple-2.0.tar.gz", MetadataKind.No), - # This will raise a hashing error. - Package("simple", "3.0", "simple-3.0.tar.gz", MetadataKind.WrongHash), - ], - "simple2": [ - # Override the dependencies here in order to force pip to download - # simple-1.0.tar.gz as well. - Package( - "simple2", - "1.0", - "simple2-1.0.tar.gz", - MetadataKind.Unhashed, - ("simple==1.0",), - ), - # This will raise an error when pip attempts to fetch the metadata file. - Package("simple2", "2.0", "simple2-2.0.tar.gz", MetadataKind.NoFile), - ], - "colander": [ - # Ensure we can read the dependencies from a metadata file within a wheel - # *without* PEP 658 metadata. - Package( - "colander", "0.9.9", "colander-0.9.9-py2.py3-none-any.whl", MetadataKind.No - ), - ], - "compilewheel": [ - # Ensure we can override the dependencies of a wheel file by injecting PEP - # 658 metadata. - Package( - "compilewheel", - "1.0", - "compilewheel-1.0-py2.py3-none-any.whl", - MetadataKind.Unhashed, - ("simple==1.0",), - ), - ], - "has-script": [ - # Ensure we check PEP 658 metadata hashing errors for wheel files. - Package( - "has-script", - "1.0", - "has.script-1.0-py2.py3-none-any.whl", - MetadataKind.WrongHash, - ), - ], - "translationstring": [ - Package( - "translationstring", "1.1", "translationstring-1.1.tar.gz", MetadataKind.No - ), - ], - "priority": [ - # Ensure we check for a missing metadata file for wheels. - Package( - "priority", "1.0", "priority-1.0-py2.py3-none-any.whl", MetadataKind.NoFile - ), - ], -} - - @pytest.mark.parametrize( "requirement_to_download, expected_outputs", [ @@ -1510,19 +1312,69 @@ def run_for_generated_index( ], ) def test_download_metadata( - download_generated_html_index: Callable[..., Tuple[TestPipResult, Path]], + download_local_html_index: Callable[..., Tuple[TestPipResult, Path]], requirement_to_download: str, expected_outputs: List[str], ) -> None: """Verify that if a data-dist-info-metadata attribute is present, then it is used instead of the actual dist's METADATA.""" - _, download_dir = download_generated_html_index( - _simple_packages, + _, download_dir = download_local_html_index( [requirement_to_download], ) assert sorted(os.listdir(download_dir)) == expected_outputs +@pytest.mark.parametrize( + "requirement_to_download, expected_outputs, doubled_path", + [ + ( + "simple2==1.0", + ["simple-1.0.tar.gz", "simple2-1.0.tar.gz"], + "/simple2/simple2-1.0.tar.gz", + ), + ("simple==2.0", ["simple-2.0.tar.gz"], "/simple/simple-2.0.tar.gz"), + ( + "colander", + ["colander-0.9.9-py2.py3-none-any.whl", "translationstring-1.1.tar.gz"], + "/colander/colander-0.9.9-py2.py3-none-any.whl", + ), + ( + "compilewheel", + [ + "compilewheel-1.0-py2.py3-none-any.whl", + "simple-1.0.tar.gz", + ], + "/compilewheel/compilewheel-1.0-py2.py3-none-any.whl", + ), + ], +) +def test_download_metadata_server( + download_server_html_index: Callable[..., Tuple[TestPipResult, Path]], + requirement_to_download: str, + expected_outputs: List[str], + doubled_path: str, +) -> None: + """Verify that if a data-dist-info-metadata attribute is present, then it is used + instead of the actual dist's METADATA. + + Additionally, verify that each dist is downloaded exactly once using a mock server. + + This is a regression test for issue https://github.com/pypa/pip/issues/11847. + """ + _, download_dir = download_server_html_index( + [requirement_to_download, "--no-cache-dir"], + ) + assert sorted(os.listdir(download_dir)) == expected_outputs + shutil.rmtree(download_dir) + result, _ = download_server_html_index( + [requirement_to_download, "--no-cache-dir"], + allow_error=True, + ) + assert result.returncode != 0 + expected_msg = f"File {doubled_path} not available more than once!" + assert expected_msg in result.stderr + + @pytest.mark.parametrize( "requirement_to_download, real_hash", [ @@ -1537,14 +1389,13 @@ def test_download_metadata( ], ) def test_incorrect_metadata_hash( - download_generated_html_index: Callable[..., Tuple[TestPipResult, Path]], + download_local_html_index: Callable[..., Tuple[TestPipResult, Path]], requirement_to_download: str, real_hash: str, ) -> None: """Verify that if a hash for data-dist-info-metadata is provided, it must match the actual hash of the metadata file.""" - result, _ = download_generated_html_index( - _simple_packages, + result, _ = download_local_html_index( [requirement_to_download], allow_error=True, ) @@ -1563,15 +1414,14 @@ def test_incorrect_metadata_hash( ], ) def test_metadata_not_found( - download_generated_html_index: Callable[..., Tuple[TestPipResult, Path]], + download_local_html_index: Callable[..., Tuple[TestPipResult, Path]], requirement_to_download: str, expected_url: str, ) -> None: """Verify that if a data-dist-info-metadata attribute is provided, that pip will fetch the .metadata file at the location specified by PEP 658, and error if unavailable.""" - result, _ = download_generated_html_index( - _simple_packages, + result, _ = download_local_html_index( [requirement_to_download], allow_error=True, ) @@ -1581,3 +1431,45 @@ def test_metadata_not_found( f"ERROR: 404 Client Error: FileNotFoundError for url:.*{expected_re}" ) assert pattern.search(result.stderr), (pattern, result.stderr) + + +def test_produces_error_for_mismatched_package_name_in_metadata( + download_local_html_index: Callable[..., Tuple[TestPipResult, Path]], +) -> None: + """Verify that the package name from the metadata matches the requested package.""" + result, _ = download_local_html_index( + ["simple2==3.0"], + allow_error=True, + ) + assert result.returncode != 0 + assert ( + "simple2-3.0.tar.gz has inconsistent Name: expected 'simple2', but metadata " + "has 'not-simple2'" + ) in result.stdout + + +@pytest.mark.parametrize( + "requirement", + ( + "requires-simple-extra==0.1", + "REQUIRES_SIMPLE-EXTRA==0.1", + "REQUIRES....simple-_-EXTRA==0.1", + ), +) +def test_canonicalizes_package_name_before_verifying_metadata( + download_local_html_index: Callable[..., Tuple[TestPipResult, Path]], + requirement: str, +) -> None: + """Verify that the package name from the command line and the package's + METADATA are both canonicalized before comparison. + + Regression test for https://github.com/pypa/pip/issues/12038 + """ + result, download_dir = download_local_html_index( + [requirement], + allow_error=True, + ) + assert result.returncode == 0 + assert os.listdir(download_dir) == [ + "requires_simple_extra-0.1-py2.py3-none-any.whl", + ] diff --git a/tests/functional/test_fast_deps.py b/tests/functional/test_fast_deps.py index 0109db825b7..9e529c0891e 100644 --- a/tests/functional/test_fast_deps.py +++ b/tests/functional/test_fast_deps.py @@ -2,12 +2,14 @@ import json import os import pathlib +import re from os.path import basename from typing import Iterable from pip._vendor.packaging.utils import canonicalize_name from pytest import mark +from pip._internal.utils.misc import hash_file from tests.lib import PipTestEnvironment, TestData, TestPipResult @@ -101,3 +103,36 @@ def test_hash_mismatch(script: PipTestEnvironment, tmp_path: pathlib.Path) -> No expect_error=True, ) assert "DO NOT MATCH THE HASHES" in result.stderr + + +@mark.network +def test_hash_mismatch_existing_download_for_metadata_only_wheel( + script: PipTestEnvironment, tmp_path: pathlib.Path +) -> None: + """Metadata-only wheels from PEP 658 or fast-deps check for hash matching in + a separate code path than when the wheel is downloaded all at once. Make sure we + still check for hash mismatches.""" + reqs = tmp_path / "requirements.txt" + reqs.write_text("idna==2.10") + dl_dir = tmp_path / "downloads" + dl_dir.mkdir() + idna_wheel = dl_dir / "idna-2.10-py2.py3-none-any.whl" + idna_wheel.write_text("asdf") + result = script.pip( + "download", + # Ensure that we have a metadata-only dist for idna. + "--use-feature=fast-deps", + "-r", + str(reqs), + "-d", + str(dl_dir), + allow_stderr_warning=True, + ) + assert re.search( + r"WARNING: Previously-downloaded file.*has bad hash", result.stderr + ) + # This is the correct hash for idna==2.10. + assert ( + hash_file(str(idna_wheel))[0].hexdigest() + == "b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" + ) diff --git a/tests/functional/test_freeze.py b/tests/functional/test_freeze.py index b24b27edcc6..b7af974ea61 100644 --- a/tests/functional/test_freeze.py +++ b/tests/functional/test_freeze.py @@ -88,11 +88,49 @@ def test_basic_freeze(script: PipTestEnvironment) -> None: def test_freeze_with_pip(script: PipTestEnvironment) -> None: - """Test pip shows itself""" + """Test that pip shows itself only when --all is used""" + result = script.pip("freeze") + assert "pip==" not in result.stdout result = script.pip("freeze", "--all") assert "pip==" in result.stdout +def test_freeze_with_setuptools(script: PipTestEnvironment) -> None: + """ + Test that pip shows setuptools only when --all is used + or _should_suppress_build_backends() returns false + """ + + result = script.pip("freeze", "--all") + assert "setuptools==" in result.stdout + + (script.site_packages_path / "mock.pth").write_text("import mock\n") + + (script.site_packages_path / "mock.py").write_text( + textwrap.dedent( + """\ + import pip._internal.commands.freeze as freeze + freeze._should_suppress_build_backends = lambda: False + """ + ) + ) + + result = script.pip("freeze") + assert "setuptools==" in result.stdout + + (script.site_packages_path / "mock.py").write_text( + textwrap.dedent( + """\ + import pip._internal.commands.freeze as freeze + freeze._should_suppress_build_backends = lambda: True + """ + ) + ) + + result = script.pip("freeze") + assert "setuptools==" not in result.stdout + + def test_exclude_and_normalization(script: PipTestEnvironment, tmpdir: Path) -> None: req_path = wheel.make_wheel(name="Normalizable_Name", version="1.0").save_to_dir( tmpdir @@ -128,13 +166,11 @@ def fake_install(pkgname: str, dest: str) -> None: with open(egg_info_path, "w") as egg_info_file: egg_info_file.write( textwrap.dedent( - """\ + f"""\ Metadata-Version: 1.0 - Name: {} + Name: {pkgname} Version: 1.0 - """.format( - pkgname - ) + """ ) ) @@ -183,12 +219,10 @@ def test_freeze_editable_not_vcs(script: PipTestEnvironment) -> None: # We need to apply os.path.normcase() to the path since that is what # the freeze code does. expected = textwrap.dedent( - """\ - ...# Editable install with no version control (version-pkg==0.1) - -e {} - ...""".format( - os.path.normcase(pkg_path) - ) + f"""\ + ...# Editable install with no version control (version...pkg==0.1) + -e {os.path.normcase(pkg_path)} + ...""" ) _check_output(result.stdout, expected) @@ -210,12 +244,10 @@ def test_freeze_editable_git_with_no_remote( # We need to apply os.path.normcase() to the path since that is what # the freeze code does. expected = textwrap.dedent( - """\ - ...# Editable Git install with no remote (version-pkg==0.1) - -e {} - ...""".format( - os.path.normcase(pkg_path) - ) + f"""\ + ...# Editable Git install with no remote (version...pkg==0.1) + -e {os.path.normcase(pkg_path)} + ...""" ) _check_output(result.stdout, expected) @@ -451,7 +483,7 @@ def test_freeze_git_remote(script: PipTestEnvironment) -> None: expected = os.path.normcase( textwrap.dedent( f""" - ...# Editable Git...(version-pkg...)... + ...# Editable Git...(version...pkg...)... # '{other_remote}' -e {repo_dir}... """ @@ -591,7 +623,7 @@ def test_freeze_nested_vcs( --extra-index-url http://ignore --find-links http://ignore --index-url http://ignore - --use-feature 2020-resolver + --use-feature resolvelib """ ) @@ -615,9 +647,9 @@ def test_freeze_with_requirement_option_file_url_egg_not_installed( expect_stderr=True, ) expected_err = ( - "WARNING: Requirement file [requirements.txt] contains {}, " + f"WARNING: Requirement file [requirements.txt] contains {url}, " "but package 'Does.Not-Exist' is not installed\n" - ).format(url) + ) if deprecated_python: assert expected_err in result.stderr else: diff --git a/tests/functional/test_hash.py b/tests/functional/test_hash.py index 0422f73ffa5..cf993b6feb7 100644 --- a/tests/functional/test_hash.py +++ b/tests/functional/test_hash.py @@ -1,4 +1,5 @@ """Tests for the ``pip hash`` command""" + from pathlib import Path from tests.lib import PipTestEnvironment diff --git a/tests/functional/test_help.py b/tests/functional/test_help.py index dba41af5f79..75414214a93 100644 --- a/tests/functional/test_help.py +++ b/tests/functional/test_help.py @@ -5,8 +5,7 @@ from pip._internal.cli.status_codes import ERROR, SUCCESS from pip._internal.commands import commands_dict, create_command from pip._internal.exceptions import CommandError -from tests.conftest import InMemoryPip -from tests.lib import PipTestEnvironment +from tests.lib import InMemoryPip, PipTestEnvironment def test_run_method_should_return_success_when_finds_command_name() -> None: @@ -102,8 +101,8 @@ def test_help_commands_equally_functional(in_memory_pip: InMemoryPip) -> None: results = list(map(in_memory_pip.pip, ("help", "--help"))) results.append(in_memory_pip.pip()) - out = map(lambda x: x.stdout, results) - ret = map(lambda x: x.returncode, results) + out = (x.stdout for x in results) + ret = (x.returncode for x in results) msg = '"pip --help" != "pip help" != "pip"' assert len(set(out)) == 1, "output of: " + msg diff --git a/tests/functional/test_inspect.py b/tests/functional/test_inspect.py index c9f43134624..fc3aab5d495 100644 --- a/tests/functional/test_inspect.py +++ b/tests/functional/test_inspect.py @@ -2,11 +2,10 @@ import pytest -from tests.conftest import ScriptFactory -from tests.lib import PipTestEnvironment, TestData +from tests.lib import PipTestEnvironment, ScriptFactory, TestData -@pytest.fixture(scope="session") +@pytest.fixture def simple_script( tmpdir_factory: pytest.TempPathFactory, script_factory: ScriptFactory, diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 63712827479..b65212f929c 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -7,7 +7,7 @@ import textwrap from os.path import curdir, join, pardir from pathlib import Path -from typing import Dict, List, Tuple +from typing import Dict, Iterable, List, Optional, Tuple import pytest @@ -15,11 +15,12 @@ from pip._internal.models.index import PyPI, TestPyPI from pip._internal.utils.misc import rmtree from pip._internal.utils.urls import path_to_url -from tests.conftest import CertFactory from tests.lib import ( + CertFactory, PipTestEnvironment, ResolverVariant, TestData, + TestPipResult, _create_svn_repo, _create_test_package, create_basic_wheel_for_package, @@ -105,10 +106,10 @@ def test_pep518_refuses_conflicting_requires( assert ( result.returncode != 0 and ( - "Some build dependencies for {url} conflict " + f"Some build dependencies for {project_dir.as_uri()} conflict " "with PEP 517/518 supported " "requirements: setuptools==1.0 is incompatible with " - "setuptools>=40.8.0.".format(url=project_dir.as_uri()) + "setuptools>=40.8.0." ) in result.stderr ), str(result) @@ -357,7 +358,7 @@ def test_basic_install_editable_from_svn(script: PipTestEnvironment) -> None: checkout_path = _create_test_package(script.scratch_path) repo_url = _create_svn_repo(script.scratch_path, checkout_path) result = script.pip("install", "-e", "svn+" + repo_url + "#egg=version-pkg") - result.assert_installed("version-pkg", with_files=[".svn"]) + result.assert_installed("version_pkg", with_files=[".svn"]) def _test_install_editable_from_git(script: PipTestEnvironment) -> None: @@ -594,8 +595,8 @@ def test_hashed_install_success( with requirements_file( "simple2==1.0 --hash=sha256:9336af72ca661e6336eb87bc7de3e8844d853e" "3848c2b9bbd2e8bf01db88c2c7\n" - "{simple} --hash=sha256:393043e672415891885c9a2a0929b1af95fb866d6c" - "a016b42d2e6ce53619b653".format(simple=file_url), + f"{file_url} --hash=sha256:393043e672415891885c9a2a0929b1af95fb866d6c" + "a016b42d2e6ce53619b653", tmpdir, ) as reqs_file: script.pip_install_local("-r", reqs_file.resolve()) @@ -848,14 +849,18 @@ def test_editable_install__local_dir_no_setup_py( ) +@pytest.mark.skipif( + sys.version_info >= (3, 12), + reason="Setuptools<64 does not support Python 3.12+", +) @pytest.mark.network -def test_editable_install__local_dir_no_setup_py_with_pyproject( +def test_editable_install_legacy__local_dir_no_setup_py_with_pyproject( script: PipTestEnvironment, ) -> None: """ - Test installing in editable mode from a local directory with no setup.py - but that does have pyproject.toml with a build backend that does not support - the build_editable hook. + Test installing in legacy editable mode from a local directory with no + setup.py but that does have pyproject.toml with a build backend that does + not support the build_editable hook. """ local_dir = script.scratch_path.joinpath("temp") local_dir.mkdir() @@ -1157,7 +1162,7 @@ def test_install_nonlocal_compatible_wheel( "--find-links", data.find_links, "--only-binary=:all:", - "--python", + "--python-version", "3", "--platform", "fakeplat", @@ -1177,7 +1182,7 @@ def test_install_nonlocal_compatible_wheel( "--find-links", data.find_links, "--only-binary=:all:", - "--python", + "--python-version", "3", "--platform", "fakeplat", @@ -1204,9 +1209,9 @@ def test_install_nonlocal_compatible_wheel_path( "--no-index", "--only-binary=:all:", Path(data.packages) / "simplewheel-2.0-py3-fakeabi-fakeplat.whl", - expect_error=(resolver_variant == "2020-resolver"), + expect_error=(resolver_variant == "resolvelib"), ) - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": assert result.returncode == ERROR else: assert result.returncode == SUCCESS @@ -1383,8 +1388,14 @@ def test_install_editable_with_prefix_setup_py(script: PipTestEnvironment) -> No _test_install_editable_with_prefix(script, {"setup.py": setup_py}) +@pytest.mark.skipif( + sys.version_info >= (3, 12), + reason="Setuptools<64 does not support Python 3.12+", +) @pytest.mark.network -def test_install_editable_with_prefix_setup_cfg(script: PipTestEnvironment) -> None: +def test_install_editable_legacy_with_prefix_setup_cfg( + script: PipTestEnvironment, +) -> None: setup_cfg = """[metadata] name = pkga version = 0.1 @@ -1720,11 +1731,11 @@ def test_install_builds_wheels(script: PipTestEnvironment, data: TestData) -> No assert "Building wheel for wheelb" in str(res), str(res) assert "Failed to build wheelbroken" in str(res), str(res) # Wheels are built for local directories, but not cached. - assert "Building wheel for requir" in str(res), str(res) + assert "Building wheel for require" in str(res), str(res) # into the cache assert wheels != [], str(res) assert wheels == [ - "Upper-2.0-py{}-none-any.whl".format(sys.version_info[0]), + f"Upper-2.0-py{sys.version_info[0]}-none-any.whl", ] @@ -1743,7 +1754,7 @@ def test_install_no_binary_builds_wheels( ) # Wheels are built for all requirements assert "Building wheel for wheelb" in str(res), str(res) - assert "Building wheel for requir" in str(res), str(res) + assert "Building wheel for require" in str(res), str(res) assert "Building wheel for upper" in str(res), str(res) # Wheelbroken failed to build assert "Failed to build wheelbroken" in str(res), str(res) @@ -1814,14 +1825,14 @@ def test_install_editable_with_wrong_egg_name( "install", "--editable", f"file://{pkga_path}#egg=pkgb", - expect_error=(resolver_variant == "2020-resolver"), + expect_error=(resolver_variant == "resolvelib"), ) assert ( "Generating metadata for package pkgb produced metadata " "for project name pkga. Fix your #egg=pkgb " "fragments." ) in result.stderr - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": assert "has inconsistent" in result.stdout, str(result) else: assert "Successfully installed pkga" in str(result), str(result) @@ -2231,6 +2242,33 @@ def test_install_yanked_file_and_print_warning( assert "Successfully installed simple-3.0\n" in result.stdout, str(result) +def test_yanked_version_missing_from_availble_versions_error_message( + script: PipTestEnvironment, data: TestData +) -> None: + """ + Test yanked version is missing from available versions error message. + + Yanked files are always ignored, unless they are the only file that + matches a version specifier that "pins" to an exact version (PEP 592). + """ + result = script.pip( + "install", + "simple==", + "--index-url", + data.index_url("yanked"), + expect_error=True, + ) + # the yanked version (3.0) is filtered out from the output: + expected_warning = ( + "Could not find a version that satisfies the requirement simple== " + "(from versions: 1.0, 2.0)" + ) + assert expected_warning in result.stderr, str(result) + # and mentioned in a separate warning: + expected_warning = "Ignored the following yanked versions: 3.0" + assert expected_warning in result.stderr, str(result) + + def test_error_all_yanked_files_and_no_pin( script: PipTestEnvironment, data: TestData ) -> None: @@ -2251,10 +2289,6 @@ def test_error_all_yanked_files_and_no_pin( ), str(result) -@pytest.mark.skipif( - sys.platform == "linux" and sys.version_info < (3, 8), - reason="Custom SSL certification not running well in CI", -) @pytest.mark.parametrize( "install_args", [ @@ -2349,7 +2383,7 @@ def test_install_verify_package_name_normalization( assert "Successfully installed simple-package" in result.stdout result = script.pip("install", package_name) - assert "Requirement already satisfied: {}".format(package_name) in result.stdout + assert f"Requirement already satisfied: {package_name}" in result.stdout def test_install_logs_pip_version_in_debug( @@ -2361,14 +2395,68 @@ def test_install_logs_pip_version_in_debug( assert_re_match(pattern, result.stdout) -def test_install_dry_run(script: PipTestEnvironment, data: TestData) -> None: - """Test that pip install --dry-run logs what it would install.""" - result = script.pip( - "install", "--dry-run", "--find-links", data.find_links, "simple" +def install_find_links( + script: PipTestEnvironment, + data: TestData, + args: Iterable[str], + *, + dry_run: bool, + target_dir: Optional[Path], +) -> TestPipResult: + return script.pip( + "install", + *( + ( + "--target", + str(target_dir), + ) + if target_dir is not None + else () + ), + *(("--dry-run",) if dry_run else ()), + "--no-index", + "--find-links", + data.find_links, + *args, + ) + + +@pytest.mark.parametrize( + "with_target_dir", + (True, False), +) +def test_install_dry_run_nothing_installed( + script: PipTestEnvironment, + data: TestData, + tmpdir: Path, + with_target_dir: bool, +) -> None: + """Test that pip install --dry-run logs what it would install, but doesn't actually + install anything.""" + if with_target_dir: + install_dir = tmpdir / "fake-install" + install_dir.mkdir() + else: + install_dir = None + + result = install_find_links( + script, data, ["simple"], dry_run=True, target_dir=install_dir ) assert "Would install simple-3.0" in result.stdout assert "Successfully installed" not in result.stdout + script.assert_not_installed("simple") + if with_target_dir: + assert not os.listdir(install_dir) + + # Ensure that the same install command would normally have worked if not for + # --dry-run. + install_find_links(script, data, ["simple"], dry_run=False, target_dir=install_dir) + if with_target_dir: + assert os.listdir(install_dir) + else: + script.assert_installed(simple="3.0") + @pytest.mark.skipif( sys.version_info < (3, 11), @@ -2449,6 +2537,40 @@ def test_install_pip_prints_req_chain_local(script: PipTestEnvironment) -> None: ) +def test_install_dist_restriction_without_target(script: PipTestEnvironment) -> None: + result = script.pip( + "install", "--python-version=3.1", "--only-binary=:all:", expect_error=True + ) + assert ( + "Can not use any platform or abi specific options unless installing " + "via '--target'" in result.stderr + ), str(result) + + +def test_install_dist_restriction_dry_run_doesnt_require_target( + script: PipTestEnvironment, +) -> None: + create_basic_wheel_for_package( + script, + "base", + "0.1.0", + ) + + result = script.pip( + "install", + "--python-version=3.1", + "--only-binary=:all:", + "--dry-run", + "--no-cache-dir", + "--no-index", + "--find-links", + script.scratch_path, + "base", + ) + + assert not result.stderr, str(result) + + @pytest.mark.network def test_install_pip_prints_req_chain_pypi(script: PipTestEnvironment) -> None: """ diff --git a/tests/functional/test_install_compat.py b/tests/functional/test_install_compat.py index ae27ebd536e..7ecacdb8e7a 100644 --- a/tests/functional/test_install_compat.py +++ b/tests/functional/test_install_compat.py @@ -2,13 +2,18 @@ Tests for compatibility workarounds. """ + import os from pathlib import Path import pytest -from tests.lib import pyversion # noqa: F401 -from tests.lib import PipTestEnvironment, TestData, assert_all_changes +from tests.lib import ( + PipTestEnvironment, + TestData, + assert_all_changes, + pyversion, +) @pytest.mark.network diff --git a/tests/functional/test_install_config.py b/tests/functional/test_install_config.py index 9f8a8067787..9374fade121 100644 --- a/tests/functional/test_install_config.py +++ b/tests/functional/test_install_config.py @@ -1,6 +1,5 @@ import os import ssl -import sys import tempfile import textwrap from pathlib import Path @@ -8,9 +7,9 @@ import pytest -from tests.conftest import CertFactory, MockServer, ScriptFactory -from tests.lib import PipTestEnvironment, TestData +from tests.lib import CertFactory, PipTestEnvironment, ScriptFactory, TestData from tests.lib.server import ( + MockServer, authorization_response, file_response, make_mock_server, @@ -125,9 +124,6 @@ def test_command_line_append_flags( "Fetching project page and analyzing links: https://test.pypi.org" in result.stdout ) - assert ( - f"Skipping link: not a file: {data.find_links}" in result.stdout - ), f"stdout: {result.stdout}" @pytest.mark.network @@ -151,9 +147,6 @@ def test_command_line_appends_correctly( "Fetching project page and analyzing links: https://test.pypi.org" in result.stdout ), result.stdout - assert ( - f"Skipping link: not a file: {data.find_links}" in result.stdout - ), f"stdout: {result.stdout}" def test_config_file_override_stack( @@ -184,12 +177,10 @@ def test_config_file_override_stack( config_file.write_text( textwrap.dedent( - """\ + f"""\ [global] - index-url = {}/simple1 - """.format( - base_address - ) + index-url = {base_address}/simple1 + """ ) ) script.pip("install", "-vvv", "INITools", expect_error=True) @@ -197,14 +188,12 @@ def test_config_file_override_stack( config_file.write_text( textwrap.dedent( - """\ + f"""\ [global] - index-url = {address}/simple1 + index-url = {base_address}/simple1 [install] - index-url = {address}/simple2 - """.format( - address=base_address - ) + index-url = {base_address}/simple2 + """ ) ) script.pip("install", "-vvv", "INITools", expect_error=True) @@ -270,10 +259,6 @@ def test_install_no_binary_via_config_disables_cached_wheels( assert "Building wheel for upper" in str(res), str(res) -@pytest.mark.skipif( - sys.platform == "linux" and sys.version_info < (3, 8), - reason="Custom SSL certification not running well in CI", -) def test_prompt_for_authentication( script: PipTestEnvironment, data: TestData, cert_factory: CertFactory ) -> None: @@ -314,10 +299,6 @@ def test_prompt_for_authentication( assert f"User for {server.host}:{server.port}" in result.stdout, str(result) -@pytest.mark.skipif( - sys.platform == "linux" and sys.version_info < (3, 8), - reason="Custom SSL certification not running well in CI", -) def test_do_not_prompt_for_authentication( script: PipTestEnvironment, data: TestData, cert_factory: CertFactory ) -> None: @@ -408,10 +389,6 @@ def flags( return flags -@pytest.mark.skipif( - sys.platform == "linux" and sys.version_info < (3, 8), - reason="Custom SSL certification not running well in CI", -) def test_prompt_for_keyring_if_needed( data: TestData, cert_factory: CertFactory, diff --git a/tests/functional/test_install_extras.py b/tests/functional/test_install_extras.py index c6cef00fa9c..1dd67be0a0c 100644 --- a/tests/functional/test_install_extras.py +++ b/tests/functional/test_install_extras.py @@ -4,7 +4,12 @@ import pytest -from tests.lib import PipTestEnvironment, ResolverVariant, TestData +from tests.lib import ( + PipTestEnvironment, + ResolverVariant, + TestData, + create_basic_wheel_for_package, +) @pytest.mark.network @@ -150,25 +155,42 @@ def test_install_fails_if_extra_at_end( assert "Extras after version" in result.stderr -def test_install_special_extra(script: PipTestEnvironment) -> None: - # Check that uppercase letters and '-' are dealt with - # make a dummy project - pkga_path = script.scratch_path / "pkga" - pkga_path.mkdir() - pkga_path.joinpath("setup.py").write_text( - textwrap.dedent( - """ - from setuptools import setup - setup(name='pkga', - version='0.1', - extras_require={'Hop_hOp-hoP': ['missing_pkg']}, - ) - """ - ) +@pytest.mark.parametrize( + "specified_extra, requested_extra", + [ + ("Hop_hOp-hoP", "Hop_hOp-hoP"), + pytest.param( + "Hop_hOp-hoP", + "hop-hop-hop", + marks=pytest.mark.xfail( + reason=( + "matching a normalized extra request against an" + "unnormalized extra in metadata requires PEP 685 support " + "in packaging (see pypa/pip#11445)." + ), + ), + ), + ("hop-hop-hop", "Hop_hOp-hoP"), + ], +) +def test_install_special_extra( + script: PipTestEnvironment, + specified_extra: str, + requested_extra: str, +) -> None: + """Check extra normalization is implemented according to specification.""" + pkga_path = create_basic_wheel_for_package( + script, + name="pkga", + version="0.1", + extras={specified_extra: ["missing_pkg"]}, ) result = script.pip( - "install", "--no-index", f"{pkga_path}[Hop_hOp-hoP]", expect_error=True + "install", + "--no-index", + f"pkga[{requested_extra}] @ {pkga_path.as_uri()}", + expect_error=True, ) assert ( "Could not find a version that satisfies the requirement missing_pkg" @@ -220,6 +242,23 @@ def test_install_extra_merging( expect_error=(fails_on_legacy and resolver_variant == "legacy"), ) - if not fails_on_legacy or resolver_variant == "2020-resolver": + if not fails_on_legacy or resolver_variant == "resolvelib": expected = f"Successfully installed pkga-0.1 simple-{simple_version}" assert expected in result.stdout + + +def test_install_extras(script: PipTestEnvironment) -> None: + create_basic_wheel_for_package(script, "a", "1", depends=["b", "dep[x-y]"]) + create_basic_wheel_for_package(script, "b", "1", depends=["dep[x_y]"]) + create_basic_wheel_for_package(script, "dep", "1", extras={"x-y": ["meh"]}) + create_basic_wheel_for_package(script, "meh", "1") + + script.pip( + "install", + "--no-cache-dir", + "--no-index", + "--find-links", + script.scratch_path, + "a", + ) + script.assert_installed(a="1", b="1", dep="1", meh="1") diff --git a/tests/functional/test_install_index.py b/tests/functional/test_install_index.py index b73e28f4794..72b0b9db7bd 100644 --- a/tests/functional/test_install_index.py +++ b/tests/functional/test_install_index.py @@ -41,13 +41,11 @@ def test_find_links_requirements_file_relative_path( """Test find-links as a relative path to a reqs file.""" script.scratch_path.joinpath("test-req.txt").write_text( textwrap.dedent( - """ + f""" --no-index - --find-links={} + --find-links={data.packages.as_posix()} parent==0.1 - """.format( - data.packages.as_posix() - ) + """ ) ) result = script.pip( diff --git a/tests/functional/test_install_report.py b/tests/functional/test_install_report.py index 003b29d3821..a1e7f8375d9 100644 --- a/tests/functional/test_install_report.py +++ b/tests/functional/test_install_report.py @@ -1,7 +1,7 @@ import json import textwrap from pathlib import Path -from typing import Any, Dict +from typing import Any, Dict, Tuple import pytest from packaging.utils import canonicalize_name @@ -64,14 +64,79 @@ def test_install_report_dep( assert _install_dict(report)["simple"]["requested"] is False +def test_yanked_version( + script: PipTestEnvironment, data: TestData, tmp_path: Path +) -> None: + """ + Test is_yanked is True when explicitly requesting a yanked package. + Yanked files are always ignored, unless they are the only file that + matches a version specifier that "pins" to an exact version (PEP 592). + """ + report_path = tmp_path / "report.json" + script.pip( + "install", + "simple==3.0", + "--index-url", + data.index_url("yanked"), + "--dry-run", + "--report", + str(report_path), + allow_stderr_warning=True, + ) + report = json.loads(report_path.read_text()) + simple_report = _install_dict(report)["simple"] + assert simple_report["requested"] is True + assert simple_report["is_direct"] is False + assert simple_report["is_yanked"] is True + assert simple_report["metadata"]["version"] == "3.0" + + +def test_skipped_yanked_version( + script: PipTestEnvironment, data: TestData, tmp_path: Path +) -> None: + """ + Test is_yanked is False when not explicitly requesting a yanked package. + Yanked files are always ignored, unless they are the only file that + matches a version specifier that "pins" to an exact version (PEP 592). + """ + report_path = tmp_path / "report.json" + script.pip( + "install", + "simple", + "--index-url", + data.index_url("yanked"), + "--dry-run", + "--report", + str(report_path), + ) + report = json.loads(report_path.read_text()) + simple_report = _install_dict(report)["simple"] + assert simple_report["requested"] is True + assert simple_report["is_direct"] is False + assert simple_report["is_yanked"] is False + assert simple_report["metadata"]["version"] == "2.0" + + +@pytest.mark.parametrize( + "specifiers", + [ + # result should be the same regardless of the method and order in which + # extras are specified + ("Paste[openid]==1.7.5.1",), + ("Paste==1.7.5.1", "Paste[openid]==1.7.5.1"), + ("Paste[openid]==1.7.5.1", "Paste==1.7.5.1"), + ], +) @pytest.mark.network -def test_install_report_index(script: PipTestEnvironment, tmp_path: Path) -> None: +def test_install_report_index( + script: PipTestEnvironment, tmp_path: Path, specifiers: Tuple[str, ...] +) -> None: """Test report for sdist obtained from index.""" report_path = tmp_path / "report.json" script.pip( "install", "--dry-run", - "Paste[openid]==1.7.5.1", + *specifiers, "--report", str(report_path), ) @@ -93,6 +158,26 @@ def test_install_report_index(script: PipTestEnvironment, tmp_path: Path) -> Non assert "requires_dist" in paste_report["metadata"] +@pytest.mark.network +def test_install_report_index_multiple_extras( + script: PipTestEnvironment, tmp_path: Path +) -> None: + """Test report for sdist obtained from index, with multiple extras requested.""" + report_path = tmp_path / "report.json" + script.pip( + "install", + "--dry-run", + "Paste[openid]", + "Paste[subprocess]", + "--report", + str(report_path), + ) + report = json.loads(report_path.read_text()) + install_dict = _install_dict(report) + assert "paste" in install_dict + assert install_dict["paste"]["requested_extras"] == ["openid", "subprocess"] + + @pytest.mark.network def test_install_report_direct_archive( script: PipTestEnvironment, tmp_path: Path, shared_data: TestData diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py index 96cff0dc5da..1db749b145c 100644 --- a/tests/functional/test_install_reqs.py +++ b/tests/functional/test_install_reqs.py @@ -2,7 +2,7 @@ import os import textwrap from pathlib import Path -from typing import TYPE_CHECKING, Any +from typing import Any, Protocol import pytest @@ -18,11 +18,6 @@ ) from tests.lib.local_repos import local_checkout -if TYPE_CHECKING: - from typing import Protocol -else: - Protocol = object - class ArgRecordingSdist: def __init__(self, sdist_path: Path, args_path: Path) -> None: @@ -34,8 +29,7 @@ def args(self) -> Any: class ArgRecordingSdistMaker(Protocol): - def __call__(self, name: str, **kwargs: Any) -> ArgRecordingSdist: - ... + def __call__(self, name: str, **kwargs: Any) -> ArgRecordingSdist: ... @pytest.fixture() @@ -95,7 +89,7 @@ def test_requirements_file(script: PipTestEnvironment) -> None: result.did_create(script.site_packages / "INITools-0.2.dist-info") result.did_create(script.site_packages / "initools") assert result.files_created[script.site_packages / other_lib_name].dir - fn = "{}-{}.dist-info".format(other_lib_name, other_lib_version) + fn = f"{other_lib_name}-{other_lib_version}.dist-info" assert result.files_created[script.site_packages / fn].dir @@ -260,13 +254,13 @@ def test_respect_order_in_requirements_file( assert ( "parent" in downloaded[0] - ), 'First download should be "parent" but was "{}"'.format(downloaded[0]) + ), f'First download should be "parent" but was "{downloaded[0]}"' assert ( "child" in downloaded[1] - ), 'Second download should be "child" but was "{}"'.format(downloaded[1]) + ), f'Second download should be "child" but was "{downloaded[1]}"' assert ( "simple" in downloaded[2] - ), 'Third download should be "simple" but was "{}"'.format(downloaded[2]) + ), f'Third download should be "simple" but was "{downloaded[2]}"' def test_install_local_editable_with_extras( @@ -300,7 +294,7 @@ def test_install_local_editable_with_subdirectory(script: PipTestEnvironment) -> ), ) - result.assert_installed("version-subpkg", sub_dir="version_subdir") + result.assert_installed("version_subpkg", sub_dir="version_subdir") @pytest.mark.network @@ -392,11 +386,8 @@ def test_constraints_local_editable_install_causes_error( to_install, expect_error=True, ) - if resolver_variant == "legacy-resolver": - assert "Could not satisfy constraints" in result.stderr, str(result) - else: - # Because singlemodule only has 0.0.1 available. - assert "Cannot install singlemodule 0.0.1" in result.stderr, str(result) + # Because singlemodule only has 0.0.1 available. + assert "Cannot install singlemodule 0.0.1" in result.stderr, str(result) @pytest.mark.network @@ -426,11 +417,8 @@ def test_constraints_local_install_causes_error( to_install, expect_error=True, ) - if resolver_variant == "legacy-resolver": - assert "Could not satisfy constraints" in result.stderr, str(result) - else: - # Because singlemodule only has 0.0.1 available. - assert "Cannot install singlemodule 0.0.1" in result.stderr, str(result) + # Because singlemodule only has 0.0.1 available. + assert "Cannot install singlemodule 0.0.1" in result.stderr, str(result) def test_constraints_constrain_to_local_editable( @@ -451,9 +439,9 @@ def test_constraints_constrain_to_local_editable( script.scratch_path / "constraints.txt", "singlemodule", allow_stderr_warning=True, - expect_error=(resolver_variant == "2020-resolver"), + expect_error=(resolver_variant == "resolvelib"), ) - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": assert "Editable requirements are not allowed as constraints" in result.stderr else: assert "Running setup.py develop for singlemodule" in result.stdout @@ -551,9 +539,9 @@ def test_install_with_extras_from_constraints( script.scratch_path / "constraints.txt", "LocalExtras", allow_stderr_warning=True, - expect_error=(resolver_variant == "2020-resolver"), + expect_error=(resolver_variant == "resolvelib"), ) - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": assert "Constraints cannot have extras" in result.stderr else: result.did_create(script.site_packages / "simple") @@ -589,9 +577,9 @@ def test_install_with_extras_joined( script.scratch_path / "constraints.txt", "LocalExtras[baz]", allow_stderr_warning=True, - expect_error=(resolver_variant == "2020-resolver"), + expect_error=(resolver_variant == "resolvelib"), ) - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": assert "Constraints cannot have extras" in result.stderr else: result.did_create(script.site_packages / "simple") @@ -610,9 +598,9 @@ def test_install_with_extras_editable_joined( script.scratch_path / "constraints.txt", "LocalExtras[baz]", allow_stderr_warning=True, - expect_error=(resolver_variant == "2020-resolver"), + expect_error=(resolver_variant == "resolvelib"), ) - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": assert "Editable requirements are not allowed as constraints" in result.stderr else: result.did_create(script.site_packages / "simple") @@ -654,9 +642,9 @@ def test_install_distribution_union_with_constraints( script.scratch_path / "constraints.txt", f"{to_install}[baz]", allow_stderr_warning=True, - expect_error=(resolver_variant == "2020-resolver"), + expect_error=(resolver_variant == "resolvelib"), ) - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": msg = "Unnamed requirements are not allowed as constraints" assert msg in result.stderr else: @@ -674,12 +662,12 @@ def test_install_distribution_union_with_versions( result = script.pip_install_local( f"{to_install_001}[bar]", f"{to_install_002}[baz]", - expect_error=(resolver_variant == "2020-resolver"), + expect_error=(resolver_variant == "resolvelib"), ) - if resolver_variant == "2020-resolver": - assert "Cannot install localextras[bar]" in result.stderr - assert ("localextras[bar] 0.0.1 depends on localextras 0.0.1") in result.stdout - assert ("localextras[baz] 0.0.2 depends on localextras 0.0.2") in result.stdout + if resolver_variant == "resolvelib": + assert "Cannot install localextras" in result.stderr + assert ("The user requested localextras 0.0.1") in result.stdout + assert ("The user requested localextras 0.0.2") in result.stdout else: assert ( "Successfully installed LocalExtras-0.0.1 simple-3.0 singlemodule-0.0.1" diff --git a/tests/functional/test_install_upgrade.py b/tests/functional/test_install_upgrade.py index fc61d70bc5e..6556fcdf599 100644 --- a/tests/functional/test_install_upgrade.py +++ b/tests/functional/test_install_upgrade.py @@ -6,8 +6,13 @@ import pytest -from tests.lib import pyversion # noqa: F401 -from tests.lib import PipTestEnvironment, ResolverVariant, TestData, assert_all_changes +from tests.lib import ( + PipTestEnvironment, + ResolverVariant, + TestData, + assert_all_changes, + pyversion, # noqa: F401 +) from tests.lib.local_repos import local_checkout from tests.lib.wheel import make_wheel @@ -167,7 +172,7 @@ def test_upgrade_with_newest_already_installed( "install", "--upgrade", "-f", data.find_links, "--no-index", "simple" ) assert not result.files_created, "simple upgraded when it should not have" - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": msg = "Requirement already satisfied" else: msg = "already up-to-date" diff --git a/tests/functional/test_install_user.py b/tests/functional/test_install_user.py index 9bdadb94203..604072d765f 100644 --- a/tests/functional/test_install_user.py +++ b/tests/functional/test_install_user.py @@ -1,6 +1,7 @@ """ tests specific to "pip install --user" """ + import os import textwrap from os.path import curdir, isdir, isfile @@ -8,12 +9,12 @@ import pytest -from tests.lib import pyversion # noqa: F401 from tests.lib import ( PipTestEnvironment, TestData, create_basic_wheel_for_package, need_svn, + pyversion, # noqa: F401 ) from tests.lib.local_repos import local_checkout from tests.lib.venv import VirtualEnvironment diff --git a/tests/functional/test_install_vcs_git.py b/tests/functional/test_install_vcs_git.py index 971526c5181..e59b269a61f 100644 --- a/tests/functional/test_install_vcs_git.py +++ b/tests/functional/test_install_vcs_git.py @@ -3,11 +3,11 @@ import pytest -from tests.lib import pyversion # noqa: F401 from tests.lib import ( PipTestEnvironment, _change_test_package_version, _create_test_package, + pyversion, # noqa: F401 ) from tests.lib.git_submodule_helpers import ( _change_test_package_submodule, @@ -449,7 +449,7 @@ def test_git_with_ambiguous_revs(script: PipTestEnvironment) -> None: assert "Could not find a tag or branch" not in result.stdout # it is 'version-pkg' instead of 'version_pkg' because # egg-link name is version-pkg.egg-link because it is a single .py module - result.assert_installed("version-pkg", with_files=[".git"]) + result.assert_installed("version_pkg", with_files=[".git"]) def test_editable__no_revision(script: PipTestEnvironment) -> None: diff --git a/tests/functional/test_install_wheel.py b/tests/functional/test_install_wheel.py index 4221ae76ae2..7e7aeaf7a81 100644 --- a/tests/functional/test_install_wheel.py +++ b/tests/functional/test_install_wheel.py @@ -169,9 +169,9 @@ def get_header_scheme_path_for_script( ) -> Path: command = ( "from pip._internal.locations import get_scheme;" - "scheme = get_scheme({!r});" + f"scheme = get_scheme({dist_name!r});" "print(scheme.headers);" - ).format(dist_name) + ) result = script.run("python", "-c", command).stdout return Path(result.strip()) diff --git a/tests/functional/test_list.py b/tests/functional/test_list.py index bd45f82df7f..5164c1d5c39 100644 --- a/tests/functional/test_list.py +++ b/tests/functional/test_list.py @@ -5,9 +5,9 @@ import pytest from pip._internal.models.direct_url import DirectUrl, DirInfo -from tests.conftest import ScriptFactory from tests.lib import ( PipTestEnvironment, + ScriptFactory, TestData, _create_test_package, create_test_package_with_setup, @@ -273,25 +273,19 @@ def test_outdated_flag(script: PipTestEnvironment, data: TestData) -> None: "latest_version": "3.0", "latest_filetype": "sdist", } in json_output - assert ( - dict( - name="simplewheel", - version="1.0", - latest_version="2.0", - latest_filetype="wheel", - ) - in json_output - ) - assert ( - dict( - name="pip-test-package", - version="0.1", - latest_version="0.1.1", - latest_filetype="sdist", - editable_project_location="", - ) - in json_output - ) + assert { + "name": "simplewheel", + "version": "1.0", + "latest_version": "2.0", + "latest_filetype": "wheel", + } in json_output + assert { + "name": "pip-test-package", + "version": "0.1", + "latest_version": "0.1.1", + "latest_filetype": "sdist", + "editable_project_location": "", + } in json_output assert "simple2" not in {p["name"] for p in json_output} @@ -588,7 +582,7 @@ def test_outdated_formats(script: PipTestEnvironment, data: TestData) -> None: expect_error=True, ) assert ( - "List format 'freeze' can not be used with the --outdated option." + "List format 'freeze' cannot be used with the --outdated option." in result.stderr ) @@ -601,8 +595,7 @@ def test_outdated_formats(script: PipTestEnvironment, data: TestData) -> None: "--outdated", "--format=json", ) - data = json.loads(result.stdout) - assert data == [ + assert json.loads(result.stdout) == [ { "name": "simple", "version": "1.0", diff --git a/tests/functional/test_new_resolver.py b/tests/functional/test_new_resolver.py index fc52ab9c8d8..774311a38e8 100644 --- a/tests/functional/test_new_resolver.py +++ b/tests/functional/test_new_resolver.py @@ -2,10 +2,12 @@ import pathlib import sys import textwrap -from typing import TYPE_CHECKING, Callable, Dict, List, Tuple +from typing import TYPE_CHECKING, Callable, Dict, List, Protocol, Tuple import pytest +from packaging.utils import canonicalize_name +from tests.conftest import ScriptFactory from tests.lib import ( PipTestEnvironment, create_basic_sdist_for_package, @@ -13,11 +15,9 @@ create_test_package_with_setup, ) from tests.lib.direct_url import get_created_direct_url +from tests.lib.venv import VirtualEnvironment from tests.lib.wheel import make_wheel -if TYPE_CHECKING: - from typing import Protocol - MakeFakeWheel = Callable[[str, str, str], pathlib.Path] @@ -25,9 +25,13 @@ def assert_editable(script: PipTestEnvironment, *args: str) -> None: # This simply checks whether all of the listed packages have a # corresponding .egg-link file installed. # TODO: Implement a more rigorous way to test for editable installations. - egg_links = {f"{arg}.egg-link" for arg in args} - assert egg_links <= set( - os.listdir(script.site_packages_path) + egg_links = {f"{canonicalize_name(arg)}.egg-link" for arg in args} + actual_egg_links = { + f"{canonicalize_name(p.stem)}.egg-link" + for p in script.site_packages_path.glob("*.egg-link") + } + assert ( + egg_links <= actual_egg_links ), f"{args!r} not all found in {script.site_packages_path!r}" @@ -926,8 +930,7 @@ def __call__( version: str, requires: List[str], extras: Dict[str, List[str]], - ) -> str: - ... + ) -> str: ... def _local_with_setup( @@ -1183,7 +1186,7 @@ def test_new_resolver_presents_messages_when_backtracking_a_lot( for index in range(1, N + 1): A_version = f"{index}.0.0" B_version = f"{index}.0.0" - C_version = "{index_minus_one}.0.0".format(index_minus_one=index - 1) + C_version = f"{index - 1}.0.0" depends = ["B == " + B_version] if index != 1: @@ -1845,7 +1848,7 @@ def test_new_resolver_succeeds_on_matching_constraint_and_requirement( script.assert_installed(test_pkg="0.1.0") if editable: - assert_editable(script, "test-pkg") + assert_editable(script, "test_pkg") def test_new_resolver_applies_url_constraint_to_dep(script: PipTestEnvironment) -> None: @@ -2272,6 +2275,103 @@ def test_new_resolver_dont_backtrack_on_extra_if_base_constrained( script.assert_installed(pkg="1.0", dep="1.0") +@pytest.mark.parametrize("swap_order", (True, False)) +@pytest.mark.parametrize("two_extras", (True, False)) +def test_new_resolver_dont_backtrack_on_extra_if_base_constrained_in_requirement( + script: PipTestEnvironment, swap_order: bool, two_extras: bool +) -> None: + """ + Verify that a requirement with a constraint on a package (either on the base + on the base with an extra) causes the resolver to infer the same constraint for + any (other) extras with the same base. + + :param swap_order: swap the order the install specifiers appear in + :param two_extras: also add an extra for the constrained specifier + """ + create_basic_wheel_for_package(script, "dep", "1.0") + create_basic_wheel_for_package( + script, "pkg", "1.0", extras={"ext1": ["dep"], "ext2": ["dep"]} + ) + create_basic_wheel_for_package( + script, "pkg", "2.0", extras={"ext1": ["dep"], "ext2": ["dep"]} + ) + + to_install: Tuple[str, str] = ( + "pkg[ext1]", + "pkg[ext2]==1.0" if two_extras else "pkg==1.0", + ) + + result = script.pip( + "install", + "--no-cache-dir", + "--no-index", + "--find-links", + script.scratch_path, + *(to_install if not swap_order else reversed(to_install)), + ) + assert "pkg-2.0" not in result.stdout, "Should not try 2.0 due to constraint" + script.assert_installed(pkg="1.0", dep="1.0") + + +@pytest.mark.parametrize("swap_order", (True, False)) +@pytest.mark.parametrize("two_extras", (True, False)) +def test_new_resolver_dont_backtrack_on_conflicting_constraints_on_extras( + tmpdir: pathlib.Path, + virtualenv: VirtualEnvironment, + script_factory: ScriptFactory, + swap_order: bool, + two_extras: bool, +) -> None: + """ + Verify that conflicting constraints on the same package with different + extras cause the resolver to trivially reject the request rather than + trying any candidates. + + :param swap_order: swap the order the install specifiers appear in + :param two_extras: also add an extra for the second specifier + """ + script: PipTestEnvironment = script_factory( + tmpdir.joinpath("workspace"), + virtualenv, + {**os.environ, "PIP_RESOLVER_DEBUG": "1"}, + ) + create_basic_wheel_for_package(script, "dep", "1.0") + create_basic_wheel_for_package( + script, "pkg", "1.0", extras={"ext1": ["dep"], "ext2": ["dep"]} + ) + create_basic_wheel_for_package( + script, "pkg", "2.0", extras={"ext1": ["dep"], "ext2": ["dep"]} + ) + + to_install: Tuple[str, str] = ( + "pkg[ext1]>1", + "pkg[ext2]==1.0" if two_extras else "pkg==1.0", + ) + + result = script.pip( + "install", + "--no-cache-dir", + "--no-index", + "--find-links", + script.scratch_path, + *(to_install if not swap_order else reversed(to_install)), + expect_error=True, + ) + assert ( + "pkg-2.0" not in result.stdout or "pkg-1.0" not in result.stdout + ), "Should only try one of 1.0, 2.0 depending on order" + assert "Reporter.starting()" in result.stdout, ( + "This should never fail unless the debug reporting format has changed," + " in which case the other assertions in this test need to be reviewed." + ) + assert ( + "Reporter.rejecting_candidate" not in result.stdout + ), "Should be able to conclude conflict before even selecting a candidate" + assert ( + "conflict is caused by" in result.stdout + ), "Resolver should be trivially able to find conflict cause" + + def test_new_resolver_respect_user_requested_if_extra_is_installed( script: PipTestEnvironment, ) -> None: @@ -2307,6 +2407,51 @@ def test_new_resolver_respect_user_requested_if_extra_is_installed( script.assert_installed(pkg3="1.0", pkg2="2.0", pkg1="1.0") +def test_new_resolver_constraint_on_link_with_extra( + script: PipTestEnvironment, +) -> None: + """ + Verify that installing works from a link with both an extra and a constraint. + """ + wheel: pathlib.Path = create_basic_wheel_for_package( + script, "pkg", "1.0", extras={"ext": []} + ) + + script.pip( + "install", + "--no-cache-dir", + # no index, no --find-links: only the explicit path + "--no-index", + f"{wheel}[ext]", + "pkg==1", + ) + script.assert_installed(pkg="1.0") + + +def test_new_resolver_constraint_on_link_with_extra_indirect( + script: PipTestEnvironment, +) -> None: + """ + Verify that installing works from a link with an extra if there is an indirect + dependency on that same package with the same extra (#12372). + """ + wheel_one: pathlib.Path = create_basic_wheel_for_package( + script, "pkg1", "1.0", extras={"ext": []} + ) + wheel_two: pathlib.Path = create_basic_wheel_for_package( + script, "pkg2", "1.0", depends=["pkg1[ext]==1.0"] + ) + + script.pip( + "install", + "--no-cache-dir", + # no index, no --find-links: only the explicit path + wheel_two, + f"{wheel_one}[ext]", + ) + script.assert_installed(pkg1="1.0", pkg2="1.0") + + def test_new_resolver_do_not_backtrack_on_build_failure( script: PipTestEnvironment, ) -> None: @@ -2347,3 +2492,31 @@ def test_new_resolver_works_when_failing_package_builds_are_disallowed( ) script.assert_installed(pkg2="1.0", pkg1="1.0") + + +@pytest.mark.parametrize("swap_order", (True, False)) +def test_new_resolver_comes_from_with_extra( + script: PipTestEnvironment, swap_order: bool +) -> None: + """ + Verify that reporting where a dependency comes from is accurate when it comes + from a package with an extra. + + :param swap_order: swap the order the install specifiers appear in + """ + create_basic_wheel_for_package(script, "dep", "1.0") + create_basic_wheel_for_package(script, "pkg", "1.0", extras={"ext": ["dep"]}) + + to_install: Tuple[str, str] = ("pkg", "pkg[ext]") + + result = script.pip( + "install", + "--no-cache-dir", + "--no-index", + "--find-links", + script.scratch_path, + *(to_install if not swap_order else reversed(to_install)), + ) + assert "(from pkg[ext])" in result.stdout + assert "(from pkg)" not in result.stdout + script.assert_installed(pkg="1.0", dep="1.0") diff --git a/tests/functional/test_new_resolver_errors.py b/tests/functional/test_new_resolver_errors.py index 62304131283..5976de52e39 100644 --- a/tests/functional/test_new_resolver_errors.py +++ b/tests/functional/test_new_resolver_errors.py @@ -71,8 +71,8 @@ def test_new_resolver_conflict_constraints_file( def test_new_resolver_requires_python_error(script: PipTestEnvironment) -> None: - compatible_python = ">={0.major}.{0.minor}".format(sys.version_info) - incompatible_python = "<{0.major}.{0.minor}".format(sys.version_info) + compatible_python = f">={sys.version_info.major}.{sys.version_info.minor}" + incompatible_python = f"<{sys.version_info.major}.{sys.version_info.minor}" pkga = create_test_package_with_setup( script, @@ -99,7 +99,7 @@ def test_new_resolver_requires_python_error(script: PipTestEnvironment) -> None: def test_new_resolver_checks_requires_python_before_dependencies( script: PipTestEnvironment, ) -> None: - incompatible_python = "<{0.major}.{0.minor}".format(sys.version_info) + incompatible_python = f"<{sys.version_info.major}.{sys.version_info.minor}" pkg_dep = create_basic_wheel_for_package( script, diff --git a/tests/functional/test_new_resolver_hashes.py b/tests/functional/test_new_resolver_hashes.py index 6db2efd0e4c..5fb1f2bf799 100644 --- a/tests/functional/test_new_resolver_hashes.py +++ b/tests/functional/test_new_resolver_hashes.py @@ -24,18 +24,11 @@ def _create_find_links(script: PipTestEnvironment) -> _FindLinks: index_html = script.scratch_path / "index.html" index_html.write_text( - """ + f""" - {sdist_path.stem} - {wheel_path.stem} - """.format( - sdist_url=sdist_path.as_uri(), - sdist_hash=sdist_hash, - sdist_path=sdist_path, - wheel_url=wheel_path.as_uri(), - wheel_hash=wheel_hash, - wheel_path=wheel_path, - ).strip() + {sdist_path.stem} + {wheel_path.stem} + """.strip() ) return _FindLinks(index_html, sdist_hash, wheel_hash) @@ -96,21 +89,15 @@ def test_new_resolver_hash_intersect_from_constraint( script: PipTestEnvironment, ) -> None: find_links = _create_find_links(script) + sdist_hash = find_links.sdist_hash constraints_txt = script.scratch_path / "constraints.txt" - constraints_txt.write_text( - "base==0.1.0 --hash=sha256:{sdist_hash}".format( - sdist_hash=find_links.sdist_hash, - ), - ) + constraints_txt.write_text(f"base==0.1.0 --hash=sha256:{sdist_hash}") requirements_txt = script.scratch_path / "requirements.txt" requirements_txt.write_text( - """ - base==0.1.0 --hash=sha256:{sdist_hash} --hash=sha256:{wheel_hash} - """.format( - sdist_hash=find_links.sdist_hash, - wheel_hash=find_links.wheel_hash, - ), + f""" + base==0.1.0 --hash=sha256:{sdist_hash} --hash=sha256:{find_links.wheel_hash} + """, ) result = script.pip( @@ -200,13 +187,10 @@ def test_new_resolver_hash_intersect_empty_from_constraint( constraints_txt = script.scratch_path / "constraints.txt" constraints_txt.write_text( - """ - base==0.1.0 --hash=sha256:{sdist_hash} - base==0.1.0 --hash=sha256:{wheel_hash} - """.format( - sdist_hash=find_links.sdist_hash, - wheel_hash=find_links.wheel_hash, - ), + f""" + base==0.1.0 --hash=sha256:{find_links.sdist_hash} + base==0.1.0 --hash=sha256:{find_links.wheel_hash} + """, ) result = script.pip( @@ -240,19 +224,15 @@ def test_new_resolver_hash_requirement_and_url_constraint_can_succeed( requirements_txt = script.scratch_path / "requirements.txt" requirements_txt.write_text( - """ + f""" base==0.1.0 --hash=sha256:{wheel_hash} - """.format( - wheel_hash=wheel_hash, - ), + """, ) constraints_txt = script.scratch_path / "constraints.txt" - constraint_text = "base @ {wheel_url}\n".format(wheel_url=wheel_path.as_uri()) + constraint_text = f"base @ {wheel_path.as_uri()}\n" if constrain_by_hash: - constraint_text += "base==0.1.0 --hash=sha256:{wheel_hash}\n".format( - wheel_hash=wheel_hash, - ) + constraint_text += f"base==0.1.0 --hash=sha256:{wheel_hash}\n" constraints_txt.write_text(constraint_text) script.pip( @@ -280,19 +260,15 @@ def test_new_resolver_hash_requirement_and_url_constraint_can_fail( requirements_txt = script.scratch_path / "requirements.txt" requirements_txt.write_text( - """ + f""" base==0.1.0 --hash=sha256:{other_hash} - """.format( - other_hash=other_hash, - ), + """, ) constraints_txt = script.scratch_path / "constraints.txt" - constraint_text = "base @ {wheel_url}\n".format(wheel_url=wheel_path.as_uri()) + constraint_text = f"base @ {wheel_path.as_uri()}\n" if constrain_by_hash: - constraint_text += "base==0.1.0 --hash=sha256:{other_hash}\n".format( - other_hash=other_hash, - ) + constraint_text += f"base==0.1.0 --hash=sha256:{other_hash}\n" constraints_txt.write_text(constraint_text) result = script.pip( @@ -343,17 +319,12 @@ def test_new_resolver_hash_with_extras(script: PipTestEnvironment) -> None: requirements_txt = script.scratch_path / "requirements.txt" requirements_txt.write_text( - """ + f""" child[extra]==0.1.0 --hash=sha256:{child_hash} parent_with_extra==0.1.0 --hash=sha256:{parent_with_extra_hash} parent_without_extra==0.1.0 --hash=sha256:{parent_without_extra_hash} extra==0.1.0 --hash=sha256:{extra_hash} - """.format( - child_hash=child_hash, - parent_with_extra_hash=parent_with_extra_hash, - parent_without_extra_hash=parent_without_extra_hash, - extra_hash=extra_hash, - ), + """, ) script.pip( diff --git a/tests/functional/test_new_resolver_target.py b/tests/functional/test_new_resolver_target.py index 811ae935aec..a81cfe5e83d 100644 --- a/tests/functional/test_new_resolver_target.py +++ b/tests/functional/test_new_resolver_target.py @@ -58,12 +58,7 @@ def test_new_resolver_target_checks_compatibility_failure( if platform: args += ["--platform", platform] - args_tag = "{}{}-{}-{}".format( - implementation, - python_version, - abi, - platform, - ) + args_tag = f"{implementation}{python_version}-{abi}-{platform}" wheel_tag_matches = args_tag == fake_wheel_tag result = script.pip(*args, expect_error=(not wheel_tag_matches)) diff --git a/tests/functional/test_new_resolver_user.py b/tests/functional/test_new_resolver_user.py index 4cd06311348..1660924f28c 100644 --- a/tests/functional/test_new_resolver_user.py +++ b/tests/functional/test_new_resolver_user.py @@ -27,7 +27,7 @@ def test_new_resolver_install_user_satisfied_by_global_site( script: PipTestEnvironment, ) -> None: """ - An install a matching version to user site should re-use a global site + An install a matching version to user site should reuse a global site installation if it satisfies. """ create_basic_wheel_for_package(script, "base", "1.0.0") diff --git a/tests/functional/test_no_color.py b/tests/functional/test_no_color.py index 4094bdd167a..99200c6e4d6 100644 --- a/tests/functional/test_no_color.py +++ b/tests/functional/test_no_color.py @@ -1,6 +1,7 @@ """ Test specific for the --no-color option """ + import os import shutil import subprocess diff --git a/tests/functional/test_pep517.py b/tests/functional/test_pep517.py index a642a3f8bfb..78a6c2bbc6c 100644 --- a/tests/functional/test_pep517.py +++ b/tests/functional/test_pep517.py @@ -159,9 +159,9 @@ def test_conflicting_pep517_backend_requirements( expect_error=True, ) msg = ( - "Some build dependencies for {url} conflict with the backend " + f"Some build dependencies for {project_dir.as_uri()} conflict with the backend " "dependencies: simplewheel==1.0 is incompatible with " - "simplewheel==2.0.".format(url=project_dir.as_uri()) + "simplewheel==2.0." ) assert result.returncode != 0 and msg in result.stderr, str(result) @@ -205,8 +205,8 @@ def test_validate_missing_pep517_backend_requirements( expect_error=True, ) msg = ( - "Some build dependencies for {url} are missing: " - "'simplewheel==1.0', 'test_backend'.".format(url=project_dir.as_uri()) + f"Some build dependencies for {project_dir.as_uri()} are missing: " + "'simplewheel==1.0', 'test_backend'." ) assert result.returncode != 0 and msg in result.stderr, str(result) @@ -231,9 +231,9 @@ def test_validate_conflicting_pep517_backend_requirements( expect_error=True, ) msg = ( - "Some build dependencies for {url} conflict with the backend " + f"Some build dependencies for {project_dir.as_uri()} conflict with the backend " "dependencies: simplewheel==2.0 is incompatible with " - "simplewheel==1.0.".format(url=project_dir.as_uri()) + "simplewheel==1.0." ) assert result.returncode != 0 and msg in result.stderr, str(result) diff --git a/tests/functional/test_pep660.py b/tests/functional/test_pep660.py index 8418b26894c..d562d0750db 100644 --- a/tests/functional/test_pep660.py +++ b/tests/functional/test_pep660.py @@ -37,7 +37,7 @@ def prepare_metadata_for_build_wheel(metadata_directory, config_settings=None): def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): with open("log.txt", "a") as f: - print(":build_wheel called", file=f) + print(f":build_wheel called with config_settings={config_settings}", file=f) return _build_wheel(wheel_directory, config_settings, metadata_directory) """ @@ -55,7 +55,7 @@ def prepare_metadata_for_build_editable(metadata_directory, config_settings=None def build_editable(wheel_directory, config_settings=None, metadata_directory=None): with open("log.txt", "a") as f: - print(":build_editable called", file=f) + print(f":build_editable called with config_settings={config_settings}", file=f) return _build_wheel(wheel_directory, config_settings, metadata_directory) """ # fmt: on @@ -88,6 +88,16 @@ def _assert_hook_called(project_dir: Path, hook: str) -> None: assert f":{hook} called" in log, f"{hook} has not been called" +def _assert_hook_called_with_config_settings( + project_dir: Path, hook: str, config_settings: Dict[str, str] +) -> None: + log = project_dir.joinpath("log.txt").read_text() + assert f":{hook} called" in log, f"{hook} has not been called" + assert ( + f":{hook} called with config_settings={config_settings}" in log + ), f"{hook} has not been called with the expected config settings:\n{log}" + + def _assert_hook_not_called(project_dir: Path, hook: str) -> None: log = project_dir.joinpath("log.txt").read_text() assert f":{hook} called" not in log, f"{hook} should not have been called" @@ -119,9 +129,35 @@ def test_install_pep660_basic(tmpdir: Path, script: PipTestEnvironment) -> None: "--no-build-isolation", "--editable", project_dir, + "--config-setting", + "x=y", + ) + _assert_hook_called(project_dir, "prepare_metadata_for_build_editable") + _assert_hook_called_with_config_settings(project_dir, "build_editable", {"x": "y"}) + assert ( + result.test_env.site_packages.joinpath("project.egg-link") + not in result.files_created + ), "a .egg-link file should not have been created" + + +def test_install_pep660_from_reqs_file( + tmpdir: Path, script: PipTestEnvironment +) -> None: + """ + Test with backend that supports build_editable. + """ + project_dir = _make_project(tmpdir, BACKEND_WITH_PEP660, with_setup_py=False) + reqs_file = tmpdir / "requirements.txt" + reqs_file.write_text(f"-e {project_dir.as_uri()} --config-setting x=y\n") + result = script.pip( + "install", + "--no-index", + "--no-build-isolation", + "-r", + reqs_file, ) _assert_hook_called(project_dir, "prepare_metadata_for_build_editable") - _assert_hook_called(project_dir, "build_editable") + _assert_hook_called_with_config_settings(project_dir, "build_editable", {"x": "y"}) assert ( result.test_env.site_packages.joinpath("project.egg-link") not in result.files_created diff --git a/tests/functional/test_proxy.py b/tests/functional/test_proxy.py new file mode 100644 index 00000000000..ab53637900f --- /dev/null +++ b/tests/functional/test_proxy.py @@ -0,0 +1,92 @@ +import ssl +from pathlib import Path +from typing import Any, Dict + +import proxy +import pytest +from proxy.http.proxy import HttpProxyBasePlugin + +from tests.conftest import CertFactory +from tests.lib import PipTestEnvironment, TestData +from tests.lib.server import ( + authorization_response, + make_mock_server, + package_page, + server_running, +) + + +class AccessLogPlugin(HttpProxyBasePlugin): + def on_access_log(self, context: Dict[str, Any]) -> None: + print(context) + + +@pytest.mark.network +def test_proxy_overrides_env( + script: PipTestEnvironment, capfd: pytest.CaptureFixture[str] +) -> None: + with proxy.Proxy( + port=8899, + num_acceptors=1, + ), proxy.Proxy(plugins=[AccessLogPlugin], port=8888, num_acceptors=1): + script.environ["http_proxy"] = "127.0.0.1:8888" + script.environ["https_proxy"] = "127.0.0.1:8888" + result = script.pip( + "download", + "--proxy", + "http://127.0.0.1:8899", + "--trusted-host", + "127.0.0.1", + "-d", + "pip_downloads", + "INITools==0.1", + ) + result.did_create(Path("scratch") / "pip_downloads" / "INITools-0.1.tar.gz") + out, _ = capfd.readouterr() + assert "CONNECT" not in out + + +def test_proxy_does_not_override_netrc( + script: PipTestEnvironment, + data: TestData, + cert_factory: CertFactory, +) -> None: + cert_path = cert_factory() + ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + ctx.load_cert_chain(cert_path, cert_path) + ctx.load_verify_locations(cafile=cert_path) + ctx.verify_mode = ssl.CERT_REQUIRED + + server = make_mock_server(ssl_context=ctx) + server.mock.side_effect = [ + package_page( + { + "simple-3.0.tar.gz": "/files/simple-3.0.tar.gz", + } + ), + authorization_response(data.packages / "simple-3.0.tar.gz"), + authorization_response(data.packages / "simple-3.0.tar.gz"), + ] + + url = f"https://{server.host}:{server.port}/simple" + + netrc = script.scratch_path / ".netrc" + netrc.write_text(f"machine {server.host} login USERNAME password PASSWORD") + with proxy.Proxy(port=8888, num_acceptors=1), server_running(server): + script.environ["NETRC"] = netrc + script.pip( + "install", + "--proxy", + "http://127.0.0.1:8888", + "--trusted-host", + "127.0.0.1", + "--no-cache-dir", + "--index-url", + url, + "--cert", + cert_path, + "--client-cert", + cert_path, + "simple", + ) + script.assert_installed(simple="3.0") diff --git a/tests/functional/test_python_option.py b/tests/functional/test_python_option.py index 8bf16d7a56b..ecfd819eb7c 100644 --- a/tests/functional/test_python_option.py +++ b/tests/functional/test_python_option.py @@ -39,3 +39,15 @@ def test_python_interpreter( script.pip("--python", env_path, "uninstall", "simplewheel", "--yes") result = script.pip("--python", env_path, "list", "--format=json") assert json.loads(result.stdout) == before + + +def test_error_python_option_wrong_location( + script: PipTestEnvironment, + tmpdir: Path, + shared_data: TestData, +) -> None: + env_path = os.fspath(tmpdir / "venv") + env = EnvBuilder(with_pip=False) + env.create(env_path) + + script.pip("list", "--python", env_path, "--format=json", expect_error=True) diff --git a/tests/functional/test_show.py b/tests/functional/test_show.py index b8ec0510a1e..7797de9e992 100644 --- a/tests/functional/test_show.py +++ b/tests/functional/test_show.py @@ -3,6 +3,8 @@ import re import textwrap +import pytest + from pip import __version__ from pip._internal.commands.show import search_packages_info from pip._internal.utils.unpacking import untar_file @@ -277,7 +279,10 @@ def test_show_required_by_packages_basic( lines = result.stdout.splitlines() assert "Name: simple" in lines - assert "Required-by: requires-simple" in lines + assert ( + "Required-by: requires_simple" in lines + or "Required-by: requires-simple" in lines + ) def test_show_required_by_packages_capitalized( @@ -294,7 +299,10 @@ def test_show_required_by_packages_capitalized( lines = result.stdout.splitlines() assert "Name: simple" in lines - assert "Required-by: Requires-Capitalized" in lines + assert ( + "Required-by: Requires_Capitalized" in lines + or "Required-by: Requires-Capitalized" in lines + ) def test_show_required_by_packages_requiring_capitalized( @@ -314,8 +322,13 @@ def test_show_required_by_packages_requiring_capitalized( lines = result.stdout.splitlines() print(lines) - assert "Name: Requires-Capitalized" in lines - assert "Required-by: requires-requires-capitalized" in lines + assert ( + "Name: Requires_Capitalized" in lines or "Name: Requires-Capitalized" in lines + ) + assert ( + "Required-by: requires_requires_capitalized" in lines + or "Required-by: requires-requires-capitalized" in lines + ) def test_show_skip_work_dir_pkg(script: PipTestEnvironment) -> None: @@ -350,3 +363,49 @@ def test_show_include_work_dir_pkg(script: PipTestEnvironment) -> None: result = script.pip("show", "simple", cwd=pkg_path) lines = result.stdout.splitlines() assert "Name: simple" in lines + + +def test_show_deduplicate_requirements(script: PipTestEnvironment) -> None: + """ + Test that show should deduplicate requirements + for a package + """ + + # Create a test package and create .egg-info dir + pkg_path = create_test_package_with_setup( + script, + name="simple", + version="1.0", + install_requires=[ + "pip >= 19.0.1", + 'pip >= 19.3.1; python_version < "3.8"', + 'pip >= 23.0.1; python_version < "3.9"', + ], + ) + script.run("python", "setup.py", "egg_info", expect_stderr=True, cwd=pkg_path) + + script.environ.update({"PYTHONPATH": pkg_path}) + + result = script.pip("show", "simple", cwd=pkg_path) + lines = result.stdout.splitlines() + assert "Requires: pip" in lines + + +@pytest.mark.parametrize( + "project_url", ["Home-page", "home-page", "Homepage", "homepage"] +) +def test_show_populate_homepage_from_project_urls( + script: PipTestEnvironment, project_url: str +) -> None: + pkg_path = create_test_package_with_setup( + script, + name="simple", + version="1.0", + project_urls={project_url: "https://example.com"}, + ) + script.run("python", "setup.py", "egg_info", expect_stderr=True, cwd=pkg_path) + script.environ.update({"PYTHONPATH": pkg_path}) + + result = script.pip("show", "simple", cwd=pkg_path) + lines = result.stdout.splitlines() + assert "Home-page: https://example.com" in lines diff --git a/tests/functional/test_truststore.py b/tests/functional/test_truststore.py index 33153d0fbf9..cc90343b52d 100644 --- a/tests/functional/test_truststore.py +++ b/tests/functional/test_truststore.py @@ -27,20 +27,6 @@ def test_truststore_error_on_old_python(pip: PipRunner) -> None: assert "The truststore feature is only available for Python 3.10+" in result.stderr -@pytest.mark.skipif(sys.version_info < (3, 10), reason="3.10+ required for truststore") -def test_truststore_error_without_preinstalled(pip: PipRunner) -> None: - result = pip( - "install", - "--no-index", - "does-not-matter", - expect_error=True, - ) - assert ( - "To use the truststore feature, 'truststore' must be installed into " - "pip's current environment." - ) in result.stderr - - @pytest.mark.skipif(sys.version_info < (3, 10), reason="3.10+ required for truststore") @pytest.mark.network @pytest.mark.parametrize( @@ -56,6 +42,5 @@ def test_trustore_can_install( pip: PipRunner, package: str, ) -> None: - script.pip("install", "truststore") result = pip("install", package) assert "Successfully installed" in result.stdout diff --git a/tests/functional/test_uninstall.py b/tests/functional/test_uninstall.py index 87e7157497c..af140e07159 100644 --- a/tests/functional/test_uninstall.py +++ b/tests/functional/test_uninstall.py @@ -37,6 +37,10 @@ def test_basic_uninstall(script: PipTestEnvironment) -> None: assert_all_changes(result, result2, [script.venv / "build", "cache"]) +@pytest.mark.skipif( + sys.version_info >= (3, 12), + reason="distutils is no longer available in Python 3.12+", +) def test_basic_uninstall_distutils(script: PipTestEnvironment) -> None: """ Test basic install and uninstall. @@ -68,6 +72,10 @@ def test_basic_uninstall_distutils(script: PipTestEnvironment) -> None: ) in result.stderr +@pytest.mark.skipif( + sys.version_info >= (3, 12), + reason="Setuptools<64 does not support Python 3.12+", +) @pytest.mark.network def test_basic_uninstall_with_scripts(script: PipTestEnvironment) -> None: """ @@ -101,6 +109,10 @@ def test_uninstall_invalid_parameter( assert expected_message in result.stderr +@pytest.mark.skipif( + sys.version_info >= (3, 12), + reason="Setuptools<64 does not support Python 3.12+", +) @pytest.mark.network def test_uninstall_easy_install_after_import(script: PipTestEnvironment) -> None: """ @@ -126,6 +138,10 @@ def test_uninstall_easy_install_after_import(script: PipTestEnvironment) -> None ) +@pytest.mark.skipif( + sys.version_info >= (3, 12), + reason="Setuptools<64 does not support Python 3.12+", +) @pytest.mark.network def test_uninstall_trailing_newline(script: PipTestEnvironment) -> None: """ @@ -222,12 +238,8 @@ def test_uninstall_overlapping_package( "console_scripts", [ "test_ = distutils_install:test", - pytest.param( - "test_:test_ = distutils_install:test_test", - marks=pytest.mark.xfail( - reason="colon not supported in wheel entry point name?" - ), - ), + ",test_ = distutils_install:test_test", + ", = distutils_install:test_test", ], ) def test_uninstall_entry_point_colon_in_name( @@ -337,6 +349,10 @@ def test_uninstall_console_scripts_uppercase_name(script: PipTestEnvironment) -> assert not script_name.exists() +@pytest.mark.skipif( + sys.version_info >= (3, 12), + reason="Setuptools<64 does not support Python 3.12+", +) @pytest.mark.network def test_uninstall_easy_installed_console_scripts(script: PipTestEnvironment) -> None: """ @@ -584,9 +600,7 @@ def test_uninstall_without_record_fails( "simple.dist==0.1'." ) elif installer: - expected_error_message += " Hint: The package was installed by {}.".format( - installer - ) + expected_error_message += f" Hint: The package was installed by {installer}." assert result2.stderr.rstrip() == expected_error_message assert_all_changes(result.files_after, result2, ignore_changes) diff --git a/tests/functional/test_uninstall_user.py b/tests/functional/test_uninstall_user.py index 0bf2e6d4180..0129d2e46a1 100644 --- a/tests/functional/test_uninstall_user.py +++ b/tests/functional/test_uninstall_user.py @@ -1,6 +1,7 @@ """ tests specific to uninstalling --user installs """ + from os.path import isdir, isfile, normcase import pytest diff --git a/tests/functional/test_vcs_git.py b/tests/functional/test_vcs_git.py index da4d9583f0f..1ec09c73e47 100644 --- a/tests/functional/test_vcs_git.py +++ b/tests/functional/test_vcs_git.py @@ -1,6 +1,7 @@ """ Contains functional tests of the Git class. """ + import logging import os import pathlib diff --git a/tests/functional/test_wheel.py b/tests/functional/test_wheel.py index 1e3e90e410f..1bddd40dc41 100644 --- a/tests/functional/test_wheel.py +++ b/tests/functional/test_wheel.py @@ -1,4 +1,5 @@ """'pip wheel' tests""" + import os import re import sys @@ -7,8 +8,11 @@ import pytest from pip._internal.cli.status_codes import ERROR -from tests.lib import pyversion # noqa: F401 -from tests.lib import PipTestEnvironment, TestData +from tests.lib import ( + PipTestEnvironment, + TestData, + pyversion, +) def add_files_to_dist_directory(folder: Path) -> None: @@ -56,9 +60,7 @@ def test_pip_wheel_success(script: PipTestEnvironment, data: TestData) -> None: wheel_file_path = script.scratch / wheel_file_name assert re.search( r"Created wheel for simple: " - r"filename={filename} size=\d+ sha256=[A-Fa-f0-9]{{64}}".format( - filename=re.escape(wheel_file_name) - ), + rf"filename={re.escape(wheel_file_name)} size=\d+ sha256=[A-Fa-f0-9]{{64}}", result.stdout, ) assert re.search(r"^\s+Stored in directory: ", result.stdout, re.M) @@ -340,15 +342,6 @@ def test_pip_wheel_with_user_set_in_config( sys.platform.startswith("win"), reason="The empty extension module does not work on Win", ) -@pytest.mark.xfail( - condition=sys.platform == "darwin" and sys.version_info < (3, 9), - reason=( - "Unexplained 'no module named platform' in " - "https://github.com/pypa/wheel/blob" - "/c87e6ed82b58b41b258a3e8c852af8bc1817bb00" - "/src/wheel/vendored/packaging/tags.py#L396-L411" - ), -) def test_pip_wheel_ext_module_with_tmpdir_inside( script: PipTestEnvironment, data: TestData, common_wheels: Path ) -> None: diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index 7410072f50e..bd31b59ff2f 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -10,18 +10,20 @@ from base64 import urlsafe_b64encode from contextlib import contextmanager from hashlib import sha256 -from io import BytesIO +from io import BytesIO, StringIO from textwrap import dedent from typing import ( - TYPE_CHECKING, Any, + AnyStr, Callable, Dict, Iterable, Iterator, List, + Literal, Mapping, Optional, + Protocol, Tuple, Union, cast, @@ -32,6 +34,7 @@ from pip._vendor.packaging.utils import canonicalize_name from scripttest import FoundDir, FoundFile, ProcResult, TestFileEnvironment +from pip._internal.cli.main import main as pip_entry_point from pip._internal.index.collector import LinkCollector from pip._internal.index.package_finder import PackageFinder from pip._internal.locations import get_major_minor_version @@ -39,16 +42,11 @@ from pip._internal.models.selection_prefs import SelectionPreferences from pip._internal.models.target_python import TargetPython from pip._internal.network.session import PipSession +from pip._internal.utils.egg_link import _egg_link_names from tests.lib.venv import VirtualEnvironment from tests.lib.wheel import make_wheel -if TYPE_CHECKING: - # Literal was introduced in Python 3.8. - from typing import Literal - - ResolverVariant = Literal["resolvelib", "legacy"] -else: - ResolverVariant = str +ResolverVariant = Literal["resolvelib", "legacy"] DATA_DIR = pathlib.Path(__file__).parent.parent.joinpath("data").resolve() SRC_DIR = pathlib.Path(__file__).resolve().parent.parent.parent @@ -303,6 +301,12 @@ def files_updated(self) -> FoundFiles: def files_deleted(self) -> FoundFiles: return FoundFiles(self._impl.files_deleted) + def _get_egg_link_path_created(self, egg_link_paths: List[str]) -> Optional[str]: + for egg_link_path in egg_link_paths: + if egg_link_path in self.files_created: + return egg_link_path + return None + def assert_installed( self, pkg_name: str, @@ -318,7 +322,7 @@ def assert_installed( e = self.test_env if editable: - pkg_dir = e.venv / "src" / pkg_name.lower() + pkg_dir = e.venv / "src" / canonicalize_name(pkg_name) # If package was installed in a sub directory if sub_dir: pkg_dir = pkg_dir / sub_dir @@ -327,22 +331,30 @@ def assert_installed( pkg_dir = e.site_packages / pkg_name if use_user_site: - egg_link_path = e.user_site / f"{pkg_name}.egg-link" + egg_link_paths = [ + e.user_site / egg_link_name + for egg_link_name in _egg_link_names(pkg_name) + ] else: - egg_link_path = e.site_packages / f"{pkg_name}.egg-link" + egg_link_paths = [ + e.site_packages / egg_link_name + for egg_link_name in _egg_link_names(pkg_name) + ] + egg_link_path_created = self._get_egg_link_path_created(egg_link_paths) if without_egg_link: - if egg_link_path in self.files_created: + if egg_link_path_created: raise TestFailure( - f"unexpected egg link file created: {egg_link_path!r}\n{self}" + f"unexpected egg link file created: {egg_link_path_created!r}\n" + f"{self}" ) else: - if egg_link_path not in self.files_created: + if not egg_link_path_created: raise TestFailure( - f"expected egg link file missing: {egg_link_path!r}\n{self}" + f"expected egg link file missing: {egg_link_paths!r}\n{self}" ) - egg_link_file = self.files_created[egg_link_path] + egg_link_file = self.files_created[egg_link_path_created] egg_link_contents = egg_link_file.bytes.replace(os.linesep, "\n") # FIXME: I don't understand why there's a trailing . here @@ -645,7 +657,7 @@ def run( cwd = cwd or self.cwd if sys.platform == "win32": # Partial fix for ScriptTest.run using `shell=True` on Windows. - args = tuple(str(a).replace("^", "^^").replace("&", "^&") for a in args) + args = tuple(re.sub("([&|<>^])", r"^\1", str(a)) for a in args) if allow_error: kw["expect_error"] = True @@ -684,7 +696,9 @@ def run( # Pass expect_stderr=True to allow any stderr. We do this because # we do our checking of stderr further on in check_stderr(). kw["expect_stderr"] = True - result = super().run(cwd=cwd, *args, **kw) + # Ignore linter check + # B026 Star-arg unpacking after a keyword argument is strongly discouraged + result = super().run(cwd=cwd, *args, **kw) # noqa: B026 if expect_error and not allow_error: if result.returncode == 0: @@ -738,24 +752,20 @@ def easy_install(self, *args: str, **kwargs: Any) -> TestPipResult: def assert_installed(self, **kwargs: str) -> None: ret = self.pip("list", "--format=json") - installed = set( + installed = { (canonicalize_name(val["name"]), val["version"]) for val in json.loads(ret.stdout) - ) - expected = set((canonicalize_name(k), v) for k, v in kwargs.items()) - assert expected <= installed, "{!r} not all in {!r}".format(expected, installed) + } + expected = {(canonicalize_name(k), v) for k, v in kwargs.items()} + assert expected <= installed, f"{expected!r} not all in {installed!r}" def assert_not_installed(self, *args: str) -> None: ret = self.pip("list", "--format=json") - installed = set( - canonicalize_name(val["name"]) for val in json.loads(ret.stdout) - ) + installed = {canonicalize_name(val["name"]) for val in json.loads(ret.stdout)} # None of the given names should be listed as installed, i.e. their # intersection should be empty. - expected = set(canonicalize_name(k) for k in args) - assert not (expected & installed), "{!r} contained in {!r}".format( - expected, installed - ) + expected = {canonicalize_name(k) for k in args} + assert not (expected & installed), f"{expected!r} contained in {installed!r}" # FIXME ScriptTest does something similar, but only within a single @@ -795,17 +805,15 @@ def prefix_match(path: str, prefix_path: StrPath) -> bool: prefix = prefix.rstrip(os.path.sep) + os.path.sep return path.startswith(prefix) - start_keys = { - k for k in start.keys() if not any([prefix_match(k, i) for i in ignore]) - } - end_keys = {k for k in end.keys() if not any([prefix_match(k, i) for i in ignore])} + start_keys = {k for k in start if not any(prefix_match(k, i) for i in ignore)} + end_keys = {k for k in end if not any(prefix_match(k, i) for i in ignore)} deleted = {k: start[k] for k in start_keys.difference(end_keys)} created = {k: end[k] for k in end_keys.difference(start_keys)} updated = {} for k in start_keys.intersection(end_keys): if start[k].size != end[k].size: updated[k] = end[k] - return dict(deleted=deleted, created=created, updated=updated) + return {"deleted": deleted, "created": created, "updated": updated} def assert_all_changes( @@ -1028,7 +1036,7 @@ def _create_test_package_with_srcdir( pkg_path.joinpath("__init__.py").write_text("") subdir_path.joinpath("setup.py").write_text( textwrap.dedent( - """ + f""" from setuptools import setup, find_packages setup( name="{name}", @@ -1036,9 +1044,7 @@ def _create_test_package_with_srcdir( packages=find_packages(), package_dir={{"": "src"}}, ) - """.format( - name=name - ) + """ ) ) return _vcs_add(dir_path, version_pkg_path, vcs) @@ -1052,7 +1058,7 @@ def _create_test_package( _create_main_file(version_pkg_path, name=name, output="0.1") version_pkg_path.joinpath("setup.py").write_text( textwrap.dedent( - """ + f""" from setuptools import setup, find_packages setup( name="{name}", @@ -1061,9 +1067,7 @@ def _create_test_package( py_modules=["{name}"], entry_points=dict(console_scripts=["{name}={name}:main"]), ) - """.format( - name=name - ) + """ ) ) return _vcs_add(dir_path, version_pkg_path, vcs) @@ -1137,7 +1141,7 @@ def urlsafe_b64encode_nopad(data: bytes) -> str: def create_really_basic_wheel(name: str, version: str) -> bytes: def digest(contents: bytes) -> str: - return "sha256={}".format(urlsafe_b64encode_nopad(sha256(contents).digest())) + return f"sha256={urlsafe_b64encode_nopad(sha256(contents).digest())}" def add_file(path: str, text: str) -> None: contents = text.encode("utf-8") @@ -1153,13 +1157,11 @@ def add_file(path: str, text: str) -> None: add_file( f"{dist_info}/METADATA", dedent( - """\ + f"""\ Metadata-Version: 2.1 - Name: {} - Version: {} - """.format( - name, version - ) + Name: {name} + Version: {version} + """ ), ) z.writestr(record_path, "\n".join(",".join(r) for r in records)) @@ -1185,7 +1187,7 @@ def create_basic_wheel_for_package( # Fix wheel distribution name by replacing runs of non-alphanumeric # characters with an underscore _ as per PEP 491 - name = re.sub(r"[^\w\d.]+", "_", name, re.UNICODE) + name = re.sub(r"[^\w\d.]+", "_", name) archive_name = f"{name}-{version}-py2.py3-none-any.whl" archive_path = script.scratch_path / archive_name @@ -1336,3 +1338,40 @@ def need_svn(fn: _Test) -> _Test: def need_mercurial(fn: _Test) -> _Test: return pytest.mark.mercurial(need_executable("Mercurial", ("hg", "version"))(fn)) + + +class InMemoryPipResult: + def __init__(self, returncode: int, stdout: str) -> None: + self.returncode = returncode + self.stdout = stdout + + +class InMemoryPip: + def pip(self, *args: Union[str, pathlib.Path]) -> InMemoryPipResult: + orig_stdout = sys.stdout + stdout = StringIO() + sys.stdout = stdout + try: + returncode = pip_entry_point([os.fspath(a) for a in args]) + except SystemExit as e: + if isinstance(e.code, int): + returncode = e.code + elif e.code: + returncode = 1 + else: + returncode = 0 + finally: + sys.stdout = orig_stdout + return InMemoryPipResult(returncode, stdout.getvalue()) + + +class ScriptFactory(Protocol): + def __call__( + self, + tmpdir: pathlib.Path, + virtualenv: Optional[VirtualEnvironment] = None, + environ: Optional[Dict[AnyStr, AnyStr]] = None, + ) -> PipTestEnvironment: ... + + +CertFactory = Callable[[], str] diff --git a/tests/lib/certs.py b/tests/lib/certs.py index 54b484ac0e7..9e6542d2d57 100644 --- a/tests/lib/certs.py +++ b/tests/lib/certs.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from typing import Tuple from cryptography import x509 @@ -23,8 +23,8 @@ def make_tls_cert(hostname: str) -> Tuple[x509.Certificate, rsa.RSAPrivateKey]: .issuer_name(issuer) .public_key(key.public_key()) .serial_number(x509.random_serial_number()) - .not_valid_before(datetime.utcnow()) - .not_valid_after(datetime.utcnow() + timedelta(days=10)) + .not_valid_before(datetime.now(timezone.utc)) + .not_valid_after(datetime.now(timezone.utc) + timedelta(days=10)) .add_extension( x509.SubjectAlternativeName([x509.DNSName(hostname)]), critical=False, diff --git a/tests/lib/compat.py b/tests/lib/compat.py index 4d44cbddbbc..866ac7a7734 100644 --- a/tests/lib/compat.py +++ b/tests/lib/compat.py @@ -2,32 +2,13 @@ import contextlib import signal -from typing import Iterable, Iterator - - -@contextlib.contextmanager -def nullcontext() -> Iterator[None]: - """ - Context manager that does no additional processing. - - Used as a stand-in for a normal context manager, when a particular block of - code is only sometimes used with a normal context manager: - - cm = optional_cm if condition else nullcontext() - with cm: - # Perform operation, using optional_cm if condition is True - - TODO: Replace with contextlib.nullcontext after dropping Python 3.6 - support. - """ - yield - +from typing import Callable, ContextManager, Iterable, Iterator # Applies on Windows. if not hasattr(signal, "pthread_sigmask"): # We're not relying on this behavior anywhere currently, it's just best # practice. - blocked_signals = nullcontext + blocked_signals: Callable[[], ContextManager[None]] = contextlib.nullcontext else: @contextlib.contextmanager diff --git a/tests/lib/configuration_helpers.py b/tests/lib/configuration_helpers.py index ec824ffd3b8..b6e398c5bf1 100644 --- a/tests/lib/configuration_helpers.py +++ b/tests/lib/configuration_helpers.py @@ -38,7 +38,7 @@ def overridden() -> None: old() # https://github.com/python/mypy/issues/2427 - self.configuration._load_config_files = overridden # type: ignore[assignment] + self.configuration._load_config_files = overridden # type: ignore[method-assign] @contextlib.contextmanager def tmpfile(self, contents: str) -> Iterator[str]: diff --git a/tests/lib/filesystem.py b/tests/lib/filesystem.py index 5f8fe519d5d..4cf003b3c0b 100644 --- a/tests/lib/filesystem.py +++ b/tests/lib/filesystem.py @@ -1,5 +1,6 @@ """Helpers for filesystem-dependent tests. """ + import os from functools import partial from itertools import chain diff --git a/tests/lib/local_repos.py b/tests/lib/local_repos.py index a04d1d0fe58..a8cf4aa6c74 100644 --- a/tests/lib/local_repos.py +++ b/tests/lib/local_repos.py @@ -56,7 +56,7 @@ def local_checkout( assert vcs_backend is not None vcs_backend.obtain(repo_url_path, url=hide_url(remote_repo), verbosity=0) - return "{}+{}".format(vcs_name, Path(repo_url_path).as_uri()) + return f"{vcs_name}+{Path(repo_url_path).as_uri()}" def local_repo(remote_repo: str, temp_path: Path) -> str: diff --git a/tests/lib/server.py b/tests/lib/server.py index 4cc18452cb5..96ac5930dc9 100644 --- a/tests/lib/server.py +++ b/tests/lib/server.py @@ -2,9 +2,9 @@ import ssl import threading from base64 import b64encode -from contextlib import contextmanager +from contextlib import ExitStack, contextmanager from textwrap import dedent -from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, Iterator +from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, Iterator, List from unittest.mock import Mock from werkzeug.serving import BaseWSGIServer, WSGIRequestHandler @@ -18,7 +18,7 @@ Body = Iterable[bytes] -class MockServer(BaseWSGIServer): +class _MockServer(BaseWSGIServer): mock: Mock = Mock() @@ -64,7 +64,7 @@ def adapter(environ: "WSGIEnvironment", start_response: "StartResponse") -> Body return adapter -def make_mock_server(**kwargs: Any) -> MockServer: +def make_mock_server(**kwargs: Any) -> _MockServer: """Creates a mock HTTP(S) server listening on a random port on localhost. The `mock` property of the returned server provides and records all WSGI @@ -152,7 +152,7 @@ def html5_page(text: str) -> str: def package_page(spec: Dict[str, str]) -> "WSGIApplication": def link(name: str, value: str) -> str: - return '{}'.format(value, name) + return f'{name}' links = "".join(link(*kv) for kv in spec.items()) return text_html_response(html5_page(links)) @@ -189,3 +189,46 @@ def responder(environ: "WSGIEnvironment", start_response: "StartResponse") -> Bo return [path.read_bytes()] return responder + + +class MockServer: + def __init__(self, server: _MockServer) -> None: + self._server = server + self._running = False + self.context = ExitStack() + + @property + def port(self) -> int: + return self._server.port + + @property + def host(self) -> str: + return self._server.host + + def set_responses(self, responses: Iterable["WSGIApplication"]) -> None: + assert not self._running, "responses cannot be set on running server" + self._server.mock.side_effect = responses + + def start(self) -> None: + assert not self._running, "running server cannot be started" + self.context.enter_context(server_running(self._server)) + self.context.enter_context(self._set_running()) + + @contextmanager + def _set_running(self) -> Iterator[None]: + self._running = True + try: + yield + finally: + self._running = False + + def stop(self) -> None: + assert self._running, "idle server cannot be stopped" + self.context.close() + + def get_requests(self) -> List[Dict[str, str]]: + """Get environ for each received request.""" + assert not self._running, "cannot get mock from running server" + # Legacy: replace call[0][0] with call.args[0] + # when pip drops support for python3.7 + return [call[0][0] for call in self._server.mock.call_args_list] diff --git a/tests/lib/test_lib.py b/tests/lib/test_lib.py index a541a0a204d..09a1cc738f9 100644 --- a/tests/lib/test_lib.py +++ b/tests/lib/test_lib.py @@ -1,4 +1,5 @@ """Test the test support.""" + import filecmp import pathlib import re @@ -107,8 +108,8 @@ def run_with_log_command( """ command = ( "import logging; logging.basicConfig(level='INFO'); " - "logging.getLogger().info('sub: {}', 'foo')" - ).format(sub_string) + f"logging.getLogger().info('sub: {sub_string}', 'foo')" + ) args = [sys.executable, "-c", command] script.run(*args, **kwargs) diff --git a/tests/lib/test_wheel.py b/tests/lib/test_wheel.py index 86994c28e57..a171484a549 100644 --- a/tests/lib/test_wheel.py +++ b/tests/lib/test_wheel.py @@ -1,5 +1,6 @@ """Tests for wheel helper. """ + import csv from email import message_from_string from email.message import Message @@ -19,12 +20,12 @@ def test_message_from_dict_one_value() -> None: message = message_from_dict({"a": "1"}) - assert set(message.get_all("a")) == {"1"} + assert set(message.get_all("a")) == {"1"} # type: ignore def test_message_from_dict_multiple_values() -> None: message = message_from_dict({"a": ["1", "2"]}) - assert set(message.get_all("a")) == {"1", "2"} + assert set(message.get_all("a")) == {"1", "2"} # type: ignore def message_from_bytes(contents: bytes) -> Message: @@ -67,7 +68,7 @@ def test_make_metadata_file_custom_value_list() -> None: f = default_make_metadata(updates={"a": ["1", "2"]}) assert f is not None message = default_metadata_checks(f) - assert set(message.get_all("a")) == {"1", "2"} + assert set(message.get_all("a")) == {"1", "2"} # type: ignore def test_make_metadata_file_custom_value_overrides() -> None: @@ -101,7 +102,7 @@ def default_wheel_metadata_checks(f: File) -> Message: assert message.get_all("Wheel-Version") == ["1.0"] assert message.get_all("Generator") == ["pip-test-suite"] assert message.get_all("Root-Is-Purelib") == ["true"] - assert set(message.get_all("Tag")) == {"py2-none-any", "py3-none-any"} + assert set(message.get_all("Tag")) == {"py2-none-any", "py3-none-any"} # type: ignore return message @@ -122,7 +123,7 @@ def test_make_wheel_metadata_file_custom_value_list() -> None: f = default_make_wheel_metadata(updates={"a": ["1", "2"]}) assert f is not None message = default_wheel_metadata_checks(f) - assert set(message.get_all("a")) == {"1", "2"} + assert set(message.get_all("a")) == {"1", "2"} # type: ignore def test_make_wheel_metadata_file_custom_value_override() -> None: diff --git a/tests/lib/venv.py b/tests/lib/venv.py index e65a3291230..fac54d3bd2c 100644 --- a/tests/lib/venv.py +++ b/tests/lib/venv.py @@ -7,17 +7,11 @@ import textwrap import venv as _venv from pathlib import Path -from typing import TYPE_CHECKING, Dict, Optional, Union +from typing import Dict, Literal, Optional, Union import virtualenv as _virtualenv -if TYPE_CHECKING: - # Literal was introduced in Python 3.8. - from typing import Literal - - VirtualEnvironmentType = Literal["virtualenv", "venv"] -else: - VirtualEnvironmentType = str +VirtualEnvironmentType = Literal["virtualenv", "venv"] class VirtualEnvironment: @@ -124,7 +118,7 @@ def _create(self, clear: bool = False) -> None: ) elif self._venv_type == "venv": builder = _venv.EnvBuilder() - context = builder.ensure_directories(self.location) + context = builder.ensure_directories(os.fspath(self.location)) builder.create_configuration(context) builder.setup_python(context) self.site.mkdir(parents=True, exist_ok=True) diff --git a/tests/lib/wheel.py b/tests/lib/wheel.py index f2ddfd3b7e1..e63f44e1cad 100644 --- a/tests/lib/wheel.py +++ b/tests/lib/wheel.py @@ -1,5 +1,6 @@ """Helper for building wheels as would be in test cases. """ + import csv import itertools from base64 import urlsafe_b64encode @@ -190,7 +191,7 @@ def urlsafe_b64encode_nopad(data: bytes) -> str: def digest(contents: bytes) -> str: - return "sha256={}".format(urlsafe_b64encode_nopad(sha256(contents).digest())) + return f"sha256={urlsafe_b64encode_nopad(sha256(contents).digest())}" def record_file_maker_wrapper( diff --git a/tests/requirements.txt b/tests/requirements.txt index 84b7c14d4b4..5ecb21f6bf6 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -12,3 +12,4 @@ virtualenv >= 20.0 ; python_version >= '3.10' werkzeug wheel tomli-w +proxy.py diff --git a/tests/unit/metadata/test_metadata.py b/tests/unit/metadata/test_metadata.py index f77178fb9c1..ccc8ceb2e75 100644 --- a/tests/unit/metadata/test_metadata.py +++ b/tests/unit/metadata/test_metadata.py @@ -23,7 +23,7 @@ def test_dist_get_direct_url_no_metadata(mock_read_text: mock.Mock) -> None: class FakeDistribution(BaseDistribution): pass - dist = FakeDistribution() + dist = FakeDistribution() # type: ignore assert dist.direct_url is None mock_read_text.assert_called_once_with(DIRECT_URL_METADATA_NAME) @@ -35,7 +35,7 @@ def test_dist_get_direct_url_invalid_json( class FakeDistribution(BaseDistribution): canonical_name = cast(NormalizedName, "whatever") # Needed for error logging. - dist = FakeDistribution() + dist = FakeDistribution() # type: ignore with caplog.at_level(logging.WARNING): assert dist.direct_url is None @@ -84,7 +84,7 @@ def test_dist_get_direct_url_valid_metadata(mock_read_text: mock.Mock) -> None: class FakeDistribution(BaseDistribution): pass - dist = FakeDistribution() + dist = FakeDistribution() # type: ignore direct_url = dist.direct_url assert direct_url is not None mock_read_text.assert_called_once_with(DIRECT_URL_METADATA_NAME) @@ -129,3 +129,17 @@ def test_dist_found_in_zip(tmp_path: Path) -> None: dist = get_environment([location]).get_distribution("pkg") assert dist is not None and dist.location is not None assert Path(dist.location) == Path(location) + + +@pytest.mark.parametrize( + "path", + ( + "/path/to/foo.egg-info".replace("/", os.path.sep), + # Tests issue fixed by https://github.com/pypa/pip/pull/2530 + "/path/to/foo.egg-info/".replace("/", os.path.sep), + ), +) +def test_trailing_slash_directory_metadata(path: str) -> None: + dist = get_directory_distribution(path) + assert dist.raw_name == dist.canonical_name == "foo" + assert dist.location == "/path/to".replace("/", os.path.sep) diff --git a/tests/unit/resolution_resolvelib/test_provider.py b/tests/unit/resolution_resolvelib/test_provider.py index ab1dc74caa3..77d1d299a0e 100644 --- a/tests/unit/resolution_resolvelib/test_provider.py +++ b/tests/unit/resolution_resolvelib/test_provider.py @@ -50,29 +50,29 @@ def test_provider_known_depths(factory: Factory) -> None: ) assert provider._known_depths == {root_requirement_name: 1.0} - # Transative requirement is a dependency of root requirement + # Transitive requirement is a dependency of root requirement # theforefore has an inferred depth of 2 root_package_candidate = InstallationCandidate( root_requirement_name, "1.0", Link("https://{root_requirement_name}.com"), ) - transative_requirement_name = "my-transitive-package" + transitive_requirement_name = "my-transitive-package" - transative_package_information = build_requirement_information( - name=transative_requirement_name, parent=root_package_candidate + transitive_package_information = build_requirement_information( + name=transitive_requirement_name, parent=root_package_candidate ) provider.get_preference( - identifier=transative_requirement_name, + identifier=transitive_requirement_name, resolutions={}, candidates={}, information={ root_requirement_name: root_requirement_information, - transative_requirement_name: transative_package_information, + transitive_requirement_name: transitive_package_information, }, backtrack_causes=[], ) assert provider._known_depths == { - transative_requirement_name: 2.0, + transitive_requirement_name: 2.0, root_requirement_name: 1.0, } diff --git a/tests/unit/resolution_resolvelib/test_requirement.py b/tests/unit/resolution_resolvelib/test_requirement.py index 6864e70ea0a..b7b0395b037 100644 --- a/tests/unit/resolution_resolvelib/test_requirement.py +++ b/tests/unit/resolution_resolvelib/test_requirement.py @@ -23,6 +23,14 @@ # Editables +def _is_satisfied_by(requirement: Requirement, candidate: Candidate) -> bool: + """A helper function to check if a requirement is satisfied by a candidate. + + Used for mocking PipProvider.is_satified_by. + """ + return requirement.is_satisfied_by(candidate) + + @pytest.fixture def test_cases(data: TestData) -> Iterator[List[Tuple[str, str, int]]]: def _data_file(name: str) -> Path: @@ -61,9 +69,9 @@ def test_new_resolver_requirement_has_name( ) -> None: """All requirements should have a name""" for spec, name, _ in test_cases: - req = factory.make_requirement_from_spec(spec, comes_from=None) - assert req is not None - assert req.name == name + reqs = list(factory.make_requirements_from_spec(spec, comes_from=None)) + assert len(reqs) == 1 + assert reqs[0].name == name def test_new_resolver_correct_number_of_matches( @@ -71,14 +79,16 @@ def test_new_resolver_correct_number_of_matches( ) -> None: """Requirements should return the correct number of candidates""" for spec, _, match_count in test_cases: - req = factory.make_requirement_from_spec(spec, comes_from=None) - assert req is not None + reqs = list(factory.make_requirements_from_spec(spec, comes_from=None)) + assert len(reqs) == 1 + req = reqs[0] matches = factory.find_candidates( req.name, {req.name: [req]}, {}, Constraint.empty(), prefers_installed=False, + is_satisfied_by=_is_satisfied_by, ) assert sum(1 for _ in matches) == match_count @@ -88,14 +98,16 @@ def test_new_resolver_candidates_match_requirement( ) -> None: """Candidates returned from find_candidates should satisfy the requirement""" for spec, _, _ in test_cases: - req = factory.make_requirement_from_spec(spec, comes_from=None) - assert req is not None + reqs = list(factory.make_requirements_from_spec(spec, comes_from=None)) + assert len(reqs) == 1 + req = reqs[0] candidates = factory.find_candidates( req.name, {req.name: [req]}, {}, Constraint.empty(), prefers_installed=False, + is_satisfied_by=_is_satisfied_by, ) for c in candidates: assert isinstance(c, Candidate) @@ -104,8 +116,8 @@ def test_new_resolver_candidates_match_requirement( def test_new_resolver_full_resolve(factory: Factory, provider: PipProvider) -> None: """A very basic full resolve""" - req = factory.make_requirement_from_spec("simplewheel", comes_from=None) - assert req is not None + reqs = list(factory.make_requirements_from_spec("simplewheel", comes_from=None)) + assert len(reqs) == 1 r: Resolver[Requirement, Candidate, str] = Resolver(provider, BaseReporter()) - result = r.resolve([req]) + result = r.resolve(reqs) assert set(result.mapping.keys()) == {"simplewheel"} diff --git a/tests/unit/test_base_command.py b/tests/unit/test_base_command.py index daec5fc6c65..44dae384a75 100644 --- a/tests/unit/test_base_command.py +++ b/tests/unit/test_base_command.py @@ -151,7 +151,7 @@ def assert_helpers_set(options: Values, args: List[str]) -> int: c = Command("fake", "fake") # https://github.com/python/mypy/issues/2427 - c.run = Mock(side_effect=assert_helpers_set) # type: ignore[assignment] + c.run = Mock(side_effect=assert_helpers_set) # type: ignore[method-assign] assert c.main(["fake"]) == SUCCESS c.run.assert_called_once() @@ -176,7 +176,7 @@ def create_temp_dirs(options: Values, args: List[str]) -> int: c = Command("fake", "fake") # https://github.com/python/mypy/issues/2427 - c.run = Mock(side_effect=create_temp_dirs) # type: ignore[assignment] + c.run = Mock(side_effect=create_temp_dirs) # type: ignore[method-assign] assert c.main(["fake"]) == SUCCESS c.run.assert_called_once() assert os.path.exists(Holder.value) == exists @@ -200,6 +200,6 @@ def create_temp_dirs(options: Values, args: List[str]) -> int: c = Command("fake", "fake") # https://github.com/python/mypy/issues/2427 - c.run = Mock(side_effect=create_temp_dirs) # type: ignore[assignment] + c.run = Mock(side_effect=create_temp_dirs) # type: ignore[method-assign] assert c.main(["fake"]) == SUCCESS c.run.assert_called_once() diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py index e855d78e126..2aaedeedfe8 100644 --- a/tests/unit/test_collector.py +++ b/tests/unit/test_collector.py @@ -30,6 +30,7 @@ from pip._internal.models.link import ( Link, LinkHash, + MetadataFile, _clean_url_path, _ensure_quoted_url, ) @@ -118,8 +119,8 @@ def test_get_index_content_invalid_content_type_archive( assert ( "pip._internal.index.collector", logging.WARNING, - "Skipping page {} because it looks like an archive, and cannot " - "be checked by a HTTP HEAD request.".format(url), + f"Skipping page {url} because it looks like an archive, and cannot " + "be checked by a HTTP HEAD request.", ) in caplog.record_tuples @@ -416,8 +417,8 @@ def _test_parse_links_data_attribute( html = ( "" '' - "{}" - ).format(anchor_html) + f"{anchor_html}" + ) html_bytes = html.encode("utf-8") page = IndexContent( html_bytes, @@ -485,13 +486,30 @@ def test_parse_links_json() -> None: "requires-python": ">=3.7", "dist-info-metadata": False, }, - # Same as above, but parsing dist-info-metadata. + # Same as above, but parsing core-metadata. + { + "filename": "holygrail-1.0-py3-none-any.whl", + "url": "/files/holygrail-1.0-py3-none-any.whl", + "hashes": {"sha256": "sha256 hash", "blake2b": "blake2b hash"}, + "requires-python": ">=3.7", + "core-metadata": {"sha512": "aabdd41"}, + }, + # Ensure fallback to dist-info-metadata works { "filename": "holygrail-1.0-py3-none-any.whl", "url": "/files/holygrail-1.0-py3-none-any.whl", "hashes": {"sha256": "sha256 hash", "blake2b": "blake2b hash"}, "requires-python": ">=3.7", - "dist-info-metadata": "sha512=aabdd41", + "dist-info-metadata": {"sha512": "aabdd41"}, + }, + # Ensure that core-metadata gets priority. + { + "filename": "holygrail-1.0-py3-none-any.whl", + "url": "/files/holygrail-1.0-py3-none-any.whl", + "hashes": {"sha256": "sha256 hash", "blake2b": "blake2b hash"}, + "requires-python": ">=3.7", + "core-metadata": {"sha512": "aabdd41"}, + "dist-info-metadata": {"sha512": "this_is_wrong"}, }, ], } @@ -527,7 +545,23 @@ def test_parse_links_json() -> None: requires_python=">=3.7", yanked_reason=None, hashes={"sha256": "sha256 hash", "blake2b": "blake2b hash"}, - dist_info_metadata="sha512=aabdd41", + metadata_file_data=MetadataFile({"sha512": "aabdd41"}), + ), + Link( + "https://example.com/files/holygrail-1.0-py3-none-any.whl", + comes_from=page.url, + requires_python=">=3.7", + yanked_reason=None, + hashes={"sha256": "sha256 hash", "blake2b": "blake2b hash"}, + metadata_file_data=MetadataFile({"sha512": "aabdd41"}), + ), + Link( + "https://example.com/files/holygrail-1.0-py3-none-any.whl", + comes_from=page.url, + requires_python=">=3.7", + yanked_reason=None, + hashes={"sha256": "sha256 hash", "blake2b": "blake2b hash"}, + metadata_file_data=MetadataFile({"sha512": "aabdd41"}), ), ] @@ -585,30 +619,42 @@ def test_parse_links__yanked_reason(anchor_html: str, expected: Optional[str]) - ), # Test with value "true". ( - '', - "true", + '', + MetadataFile(None), {}, ), # Test with a provided hash value. ( - '', # noqa: E501 - "sha256=aa113592bbe", + '', + MetadataFile({"sha256": "aa113592bbe"}), {}, ), # Test with a provided hash value for both the requirement as well as metadata. ( - '', # noqa: E501 - "sha256=aa113592bbe", + '', # noqa: E501 + MetadataFile({"sha256": "aa113592bbe"}), {"sha512": "abc132409cb"}, ), + # Ensure the fallback to the old name works. + ( + '', # noqa: E501 + MetadataFile({"sha256": "aa113592bbe"}), + {}, + ), + # Ensure that the data-core-metadata name gets priority. + ( + '', # noqa: E501 + MetadataFile({"sha256": "aa113592bbe"}), + {}, + ), ], ) -def test_parse_links__dist_info_metadata( +def test_parse_links__metadata_file_data( anchor_html: str, expected: Optional[str], hashes: Dict[str, str], ) -> None: - link = _test_parse_links_data_attribute(anchor_html, "dist_info_metadata", expected) + link = _test_parse_links_data_attribute(anchor_html, "metadata_file_data", expected) assert link._hashes == hashes @@ -718,8 +764,8 @@ def test_get_index_content_invalid_scheme( ( "pip._internal.index.collector", logging.WARNING, - "Cannot look at {} URL {} because it does not support " - "lookup as web pages.".format(vcs_scheme, url), + f"Cannot look at {vcs_scheme} URL {url} because it does not support " + "lookup as web pages.", ), ] @@ -756,9 +802,9 @@ def test_get_index_content_invalid_content_type( assert ( "pip._internal.index.collector", logging.WARNING, - "Skipping page {} because the GET request got Content-Type: {}. " - "The only supported Content-Types are application/vnd.pypi.simple.v1+json, " - "application/vnd.pypi.simple.v1+html, and text/html".format(url, content_type), + f"Skipping page {url} because the GET request got Content-Type: {content_type}." + " The only supported Content-Types are application/vnd.pypi.simple.v1+json, " + "application/vnd.pypi.simple.v1+html, and text/html", ) in caplog.record_tuples @@ -816,7 +862,7 @@ def test_collect_sources__file_expand_dir(data: TestData) -> None: ) sources = collector.collect_sources( # Shouldn't be used. - project_name=None, # type: ignore[arg-type] + project_name="", candidates_from_page=None, # type: ignore[arg-type] ) assert ( @@ -865,7 +911,7 @@ def test_collect_sources__non_existing_path() -> None: index_url="ignored-by-no-index", extra_index_urls=[], no_index=True, - find_links=[os.path.join("this", "doesnt", "exist")], + find_links=[os.path.join("this", "does", "not", "exist")], ), ) sources = collector.collect_sources( @@ -914,7 +960,7 @@ def test_fetch_response(self, mock_get_simple_response: mock.Mock) -> None: session=link_collector.session, ) - def test_collect_sources( + def test_collect_page_sources( self, caplog: pytest.LogCaptureFixture, data: TestData ) -> None: caplog.set_level(logging.DEBUG) @@ -947,9 +993,8 @@ def test_collect_sources( files = list(files_it) pages = list(pages_it) - # Spot-check the returned sources. - assert len(files) > 20 - check_links_include(files, names=["simple-1.0.tar.gz"]) + # Only "twine" should return from collecting sources + assert len(files) == 1 assert [page.link for page in pages] == [Link("https://pypi.org/simple/twine/")] # Check that index URLs are marked as *un*cacheable. @@ -964,6 +1009,52 @@ def test_collect_sources( ("pip._internal.index.collector", logging.DEBUG, expected_message), ] + def test_collect_file_sources( + self, caplog: pytest.LogCaptureFixture, data: TestData + ) -> None: + caplog.set_level(logging.DEBUG) + + link_collector = make_test_link_collector( + find_links=[data.find_links], + # Include two copies of the URL to check that the second one + # is skipped. + index_urls=[PyPI.simple_url, PyPI.simple_url], + ) + collected_sources = link_collector.collect_sources( + "singlemodule", + candidates_from_page=lambda link: [ + InstallationCandidate("singlemodule", "0.0.1", link) + ], + ) + + files_it = itertools.chain.from_iterable( + source.file_links() + for sources in collected_sources + for source in sources + if source is not None + ) + pages_it = itertools.chain.from_iterable( + source.page_candidates() + for sources in collected_sources + for source in sources + if source is not None + ) + files = list(files_it) + _ = list(pages_it) + + # singlemodule should return files + assert len(files) > 0 + check_links_include(files, names=["singlemodule-0.0.1.tar.gz"]) + + expected_message = dedent( + """\ + 1 location(s) to search for versions of singlemodule: + * https://pypi.org/simple/singlemodule/""" + ) + assert caplog.record_tuples == [ + ("pip._internal.index.collector", logging.DEBUG, expected_message), + ] + @pytest.mark.parametrize( "find_links, no_index, suppress_no_index, expected", @@ -1080,17 +1171,26 @@ def test_link_hash_parsing(url: str, result: Optional[LinkHash]) -> None: @pytest.mark.parametrize( - "dist_info_metadata, result", + "metadata_attrib, expected", [ - ("sha256=aa113592bbe", LinkHash("sha256", "aa113592bbe")), - ("sha256=", LinkHash("sha256", "")), - ("sha500=aa113592bbe", None), - ("true", None), - ("", None), - ("aa113592bbe", None), + ("sha256=aa113592bbe", MetadataFile({"sha256": "aa113592bbe"})), + ("sha256=", MetadataFile({"sha256": ""})), + ("sha500=aa113592bbe", MetadataFile(None)), + ("true", MetadataFile(None)), + (None, None), + # Attribute is present but invalid + ("", MetadataFile(None)), + ("aa113592bbe", MetadataFile(None)), ], ) -def test_pep658_hash_parsing( - dist_info_metadata: str, result: Optional[LinkHash] +def test_metadata_file_info_parsing_html( + metadata_attrib: str, expected: Optional[MetadataFile] ) -> None: - assert LinkHash.parse_pep658_hash(dist_info_metadata) == result + attribs: Dict[str, Optional[str]] = { + "href": "something", + "data-dist-info-metadata": metadata_attrib, + } + page_url = "dummy_for_comes_from" + base_url = "https://index.url/simple" + link = Link.from_element(attribs, page_url, base_url) + assert link is not None and link.metadata_file_data == expected diff --git a/tests/unit/test_configuration.py b/tests/unit/test_configuration.py index c6b44d45aad..1a0acb7b411 100644 --- a/tests/unit/test_configuration.py +++ b/tests/unit/test_configuration.py @@ -215,7 +215,7 @@ def test_site_modification(self) -> None: # Mock out the method mymock = MagicMock(spec=self.configuration._mark_as_modified) # https://github.com/python/mypy/issues/2427 - self.configuration._mark_as_modified = mymock # type: ignore[assignment] + self.configuration._mark_as_modified = mymock # type: ignore[method-assign] self.configuration.set_value("test.hello", "10") @@ -231,7 +231,7 @@ def test_user_modification(self) -> None: # Mock out the method mymock = MagicMock(spec=self.configuration._mark_as_modified) # https://github.com/python/mypy/issues/2427 - self.configuration._mark_as_modified = mymock # type: ignore[assignment] + self.configuration._mark_as_modified = mymock # type: ignore[method-assign] self.configuration.set_value("test.hello", "10") @@ -250,7 +250,7 @@ def test_global_modification(self) -> None: # Mock out the method mymock = MagicMock(spec=self.configuration._mark_as_modified) # https://github.com/python/mypy/issues/2427 - self.configuration._mark_as_modified = mymock # type: ignore[assignment] + self.configuration._mark_as_modified = mymock # type: ignore[method-assign] self.configuration.set_value("test.hello", "10") diff --git a/tests/unit/test_finder.py b/tests/unit/test_finder.py index 3404d1498e3..35c7e89b765 100644 --- a/tests/unit/test_finder.py +++ b/tests/unit/test_finder.py @@ -128,7 +128,10 @@ def test_skip_invalid_wheel_link( with pytest.raises(DistributionNotFound): finder.find_requirement(req, True) - assert "Skipping link: invalid wheel filename:" in caplog.text + assert ( + "Could not find a version that satisfies the requirement invalid" + " (from versions:" in caplog.text + ) def test_not_find_wheel_not_supported(self, data: TestData) -> None: """ diff --git a/tests/unit/test_link.py b/tests/unit/test_link.py index 311be588858..a379d877b2c 100644 --- a/tests/unit/test_link.py +++ b/tests/unit/test_link.py @@ -143,10 +143,7 @@ def test_is_yanked(self, yanked_reason: Optional[str], expected: bool) -> None: def test_is_hash_allowed( self, hash_name: str, hex_digest: str, expected: bool ) -> None: - url = "https://example.com/wheel.whl#{hash_name}={hex_digest}".format( - hash_name=hash_name, - hex_digest=hex_digest, - ) + url = f"https://example.com/wheel.whl#{hash_name}={hex_digest}" link = Link(url) hashes_data = { "sha512": [128 * "a", 128 * "b"], diff --git a/tests/unit/test_locations.py b/tests/unit/test_locations.py index bd233b22aab..884e0dd51e2 100644 --- a/tests/unit/test_locations.py +++ b/tests/unit/test_locations.py @@ -2,6 +2,7 @@ locations.py tests """ + import getpass import os import shutil diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py index 3ba6ed57ca5..f673ed29def 100644 --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -1,5 +1,7 @@ import logging import time +from contextlib import redirect_stderr, redirect_stdout +from io import StringIO from threading import Thread from unittest.mock import patch @@ -11,7 +13,6 @@ RichPipStreamHandler, indent_log, ) -from pip._internal.utils.misc import captured_stderr, captured_stdout logger = logging.getLogger(__name__) @@ -21,13 +22,13 @@ class TestIndentingFormatter: def make_record(self, msg: str, level_name: str) -> logging.LogRecord: level_number = getattr(logging, level_name) - attrs = dict( - msg=msg, - created=1547704837.040001 + time.timezone, - msecs=40, - levelname=level_name, - levelno=level_number, - ) + attrs = { + "msg": msg, + "created": 1547704837.040001 + time.timezone, + "msecs": 40, + "levelname": level_name, + "levelno": level_number, + } record = logging.makeLogRecord(attrs) return record @@ -140,7 +141,7 @@ def test_broken_pipe_in_stderr_flush(self) -> None: """ record = self._make_log_record() - with captured_stderr() as stderr: + with redirect_stderr(StringIO()) as stderr: handler = RichPipStreamHandler(stream=stderr, no_color=True) with patch("sys.stderr.flush") as mock_flush: mock_flush.side_effect = BrokenPipeError() @@ -163,7 +164,7 @@ def test_broken_pipe_in_stdout_write(self) -> None: """ record = self._make_log_record() - with captured_stdout() as stdout: + with redirect_stdout(StringIO()) as stdout: handler = RichPipStreamHandler(stream=stdout, no_color=True) with patch("sys.stdout.write") as mock_write: mock_write.side_effect = BrokenPipeError() @@ -178,7 +179,7 @@ def test_broken_pipe_in_stdout_flush(self) -> None: """ record = self._make_log_record() - with captured_stdout() as stdout: + with redirect_stdout(StringIO()) as stdout: handler = RichPipStreamHandler(stream=stdout, no_color=True) with patch("sys.stdout.flush") as mock_flush: mock_flush.side_effect = BrokenPipeError() diff --git a/tests/unit/test_models.py b/tests/unit/test_models.py index c5545e37d01..2550cae412d 100644 --- a/tests/unit/test_models.py +++ b/tests/unit/test_models.py @@ -49,11 +49,3 @@ def test_sets_correct_variables(self) -> None: assert obj.name == "A" assert obj.version == parse_version("1.0.0") assert obj.link.url == "https://somewhere.com/path/A-1.0.0.tar.gz" - - # NOTE: This isn't checking the ordering logic; only the data provided to - # it is correct. - def test_sets_the_right_key(self) -> None: - obj = candidate.InstallationCandidate( - "A", "1.0.0", Link("https://somewhere.com/path/A-1.0.0.tar.gz") - ) - assert obj._compare_key == (obj.name, obj.version, obj.link) diff --git a/tests/unit/test_network_auth.py b/tests/unit/test_network_auth.py index 5dde6da57c5..5c12d870156 100644 --- a/tests/unit/test_network_auth.py +++ b/tests/unit/test_network_auth.py @@ -193,7 +193,7 @@ def test_keyring_get_password( expect: Tuple[Optional[str], Optional[str]], ) -> None: keyring = KeyringModuleV1() - monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", keyring) auth = MultiDomainBasicAuth( index_urls=["http://example.com/path2", "http://example.com/path3"], keyring_provider="import", @@ -205,7 +205,7 @@ def test_keyring_get_password( def test_keyring_get_password_after_prompt(monkeypatch: pytest.MonkeyPatch) -> None: keyring = KeyringModuleV1() - monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", keyring) auth = MultiDomainBasicAuth(keyring_provider="import") def ask_input(prompt: str) -> str: @@ -221,7 +221,7 @@ def test_keyring_get_password_after_prompt_when_none( monkeypatch: pytest.MonkeyPatch, ) -> None: keyring = KeyringModuleV1() - monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", keyring) auth = MultiDomainBasicAuth(keyring_provider="import") def ask_input(prompt: str) -> str: @@ -242,7 +242,7 @@ def test_keyring_get_password_username_in_index( monkeypatch: pytest.MonkeyPatch, ) -> None: keyring = KeyringModuleV1() - monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", keyring) auth = MultiDomainBasicAuth( index_urls=["http://user@example.com/path2", "http://example.com/path4"], keyring_provider="import", @@ -278,7 +278,7 @@ def test_keyring_set_password( expect_save: bool, ) -> None: keyring = KeyringModuleV1() - monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", keyring) auth = MultiDomainBasicAuth(prompting=True, keyring_provider="import") monkeypatch.setattr(auth, "_get_url_and_credentials", lambda u: (u, None, None)) monkeypatch.setattr(auth, "_prompt_for_password", lambda *a: creds) @@ -352,9 +352,9 @@ def get_credential(self, system: str, username: str) -> Optional[Credential]: ), ) def test_keyring_get_credential( - monkeypatch: pytest.MonkeyPatch, url: str, expect: str + monkeypatch: pytest.MonkeyPatch, url: str, expect: Tuple[str, str] ) -> None: - monkeypatch.setitem(sys.modules, "keyring", KeyringModuleV2()) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", KeyringModuleV2()) auth = MultiDomainBasicAuth( index_urls=["http://example.com/path1", "http://example.com/path2"], keyring_provider="import", @@ -378,7 +378,7 @@ def get_credential(self, system: str, username: str) -> None: def test_broken_keyring_disables_keyring(monkeypatch: pytest.MonkeyPatch) -> None: keyring_broken = KeyringModuleBroken() - monkeypatch.setitem(sys.modules, "keyring", keyring_broken) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", keyring_broken) auth = MultiDomainBasicAuth( index_urls=["http://example.com/"], keyring_provider="import" @@ -406,7 +406,7 @@ def __call__( stdin: Optional[Any] = None, stdout: Optional[Any] = None, input: Optional[bytes] = None, - check: Optional[bool] = None + check: Optional[bool] = None, ) -> Any: if cmd[1] == "get": assert stdin == -3 # subprocess.DEVNULL diff --git a/tests/unit/test_network_cache.py b/tests/unit/test_network_cache.py index a5519864f4c..6a816b30090 100644 --- a/tests/unit/test_network_cache.py +++ b/tests/unit/test_network_cache.py @@ -27,10 +27,32 @@ def test_cache_roundtrip(self, cache_tmpdir: Path) -> None: cache = SafeFileCache(os.fspath(cache_tmpdir)) assert cache.get("test key") is None cache.set("test key", b"a test string") + # Body hasn't been stored yet, so the entry isn't valid yet + assert cache.get("test key") is None + + # With a body, the cache entry is valid: + cache.set_body("test key", b"body") assert cache.get("test key") == b"a test string" cache.delete("test key") assert cache.get("test key") is None + def test_cache_roundtrip_body(self, cache_tmpdir: Path) -> None: + cache = SafeFileCache(os.fspath(cache_tmpdir)) + assert cache.get_body("test key") is None + cache.set_body("test key", b"a test string") + # Metadata isn't available, so the entry isn't valid yet (this + # shouldn't happen, but just in case) + assert cache.get_body("test key") is None + + # With metadata, the cache entry is valid: + cache.set("test key", b"metadata") + body = cache.get_body("test key") + assert body is not None + with body: + assert body.read() == b"a test string" + cache.delete("test key") + assert cache.get_body("test key") is None + @pytest.mark.skipif("sys.platform == 'win32'") def test_safe_get_no_perms( self, cache_tmpdir: Path, monkeypatch: pytest.MonkeyPatch diff --git a/tests/unit/test_network_session.py b/tests/unit/test_network_session.py index 86217468459..e867ff4b308 100644 --- a/tests/unit/test_network_session.py +++ b/tests/unit/test_network_session.py @@ -10,10 +10,17 @@ from pip import __version__ from pip._internal.models.link import Link -from pip._internal.network.session import CI_ENVIRONMENT_VARIABLES, PipSession +from pip._internal.network.session import ( + CI_ENVIRONMENT_VARIABLES, + PipSession, + user_agent, +) def get_user_agent() -> str: + # These tests are testing the computation of the user agent, so we want to + # avoid reusing cached values. + user_agent.cache_clear() return PipSession().headers["User-Agent"] @@ -58,7 +65,7 @@ def test_user_agent__ci( def test_user_agent_user_data(monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv("PIP_USER_AGENT_USER_DATA", "some_string") - assert "some_string" in PipSession().headers["User-Agent"] + assert "some_string" in get_user_agent() class TestPipSession: diff --git a/tests/unit/test_network_utils.py b/tests/unit/test_network_utils.py index cdc10b2ba6e..380d5741ff6 100644 --- a/tests/unit/test_network_utils.py +++ b/tests/unit/test_network_utils.py @@ -21,8 +21,8 @@ def test_raise_for_status_raises_exception(status_code: int, error_type: str) -> with pytest.raises(NetworkConnectionError) as excinfo: raise_for_status(resp) assert str(excinfo.value) == ( - "{} {}: Network Error for url:" - " http://www.example.com/whatever.tgz".format(status_code, error_type) + f"{status_code} {error_type}: Network Error for url:" + " http://www.example.com/whatever.tgz" ) diff --git a/tests/unit/test_options.py b/tests/unit/test_options.py index f2c26935665..8e7e1464a7b 100644 --- a/tests/unit/test_options.py +++ b/tests/unit/test_options.py @@ -2,7 +2,7 @@ from contextlib import contextmanager from optparse import Values from tempfile import NamedTemporaryFile -from typing import Any, Dict, Iterator, List, Tuple, Union, cast +from typing import Any, Dict, Iterator, List, Tuple, Type, Union, cast import pytest @@ -195,7 +195,6 @@ def test_cache_dir__PIP_NO_CACHE_DIR_invalid__with_no_cache_dir( class TestUsePEP517Options: - """ Test options related to using --use-pep517. """ @@ -612,7 +611,7 @@ def test_config_file_options( self, monkeypatch: pytest.MonkeyPatch, args: List[str], - expect: Union[None, str, PipError], + expect: Union[None, str, Type[PipError]], ) -> None: cmd = cast(ConfigurationCommand, create_command("config")) # Replace a handler with a no-op to avoid side effects diff --git a/tests/unit/test_req.py b/tests/unit/test_req.py index c9742812be4..5e3c640a55e 100644 --- a/tests/unit/test_req.py +++ b/tests/unit/test_req.py @@ -6,7 +6,7 @@ import tempfile from functools import partial from pathlib import Path -from typing import Iterator, Optional, Tuple, cast +from typing import Iterator, Optional, Set, Tuple, cast from unittest import mock import pytest @@ -23,7 +23,6 @@ PreviousBuildDirError, ) from pip._internal.index.package_finder import PackageFinder -from pip._internal.metadata import select_backend from pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo from pip._internal.models.link import Link from pip._internal.network.session import PipSession @@ -33,6 +32,8 @@ from pip._internal.req.constructors import ( _get_url_from_path, _looks_like_path, + install_req_drop_extras, + install_req_extend_extras, install_req_from_editable, install_req_from_line, install_req_from_parsed_requirement, @@ -105,6 +106,7 @@ def _basic_resolver( use_user_site=False, lazy_wheel=False, verbosity=0, + legacy_resolver=True, ) yield Resolver( preparer=preparer, @@ -233,8 +235,8 @@ def test_unsupported_hashes(self, data: TestData) -> None: r"file \(line 1\)\)\n" r"Can't verify hashes for these file:// requirements because " r"they point to directories:\n" - r" file://.*{sep}data{sep}packages{sep}FSPkg " - r"\(from -r file \(line 2\)\)".format(sep=sep) + rf" file://.*{sep}data{sep}packages{sep}FSPkg " + r"\(from -r file \(line 2\)\)" ), ): resolver.resolve(reqset.all_requirements, True) @@ -445,6 +447,25 @@ def test_download_info_archive_cache_with_origin( assert isinstance(req.download_info.info, ArchiveInfo) assert req.download_info.info.hash == hash + def test_download_info_archive_cache_with_invalid_origin( + self, tmp_path: Path, shared_data: TestData, caplog: pytest.LogCaptureFixture + ) -> None: + """Test an invalid origin.json is ignored.""" + url = shared_data.packages.joinpath("simple-1.0.tar.gz").as_uri() + finder = make_test_finder() + wheel_cache = WheelCache(str(tmp_path / "cache")) + cache_entry_dir = wheel_cache.get_path_for_link(Link(url)) + Path(cache_entry_dir).mkdir(parents=True) + Path(cache_entry_dir).joinpath("origin.json").write_text("{") # invalid json + wheel.make_wheel(name="simple", version="1.0").save_to_dir(cache_entry_dir) + with self._basic_resolver(finder, wheel_cache=wheel_cache) as resolver: + ireq = get_processed_req_from_line(f"simple @ {url}") + reqset = resolver.resolve([ireq], True) + assert len(reqset.all_requirements) == 1 + req = reqset.all_requirements[0] + assert req.is_wheel_from_cache + assert "Ignoring invalid cache entry origin file" in caplog.messages[0] + def test_download_info_local_wheel(self, data: TestData) -> None: """Test that download_info is set for requirements from a local wheel.""" finder = make_test_finder() @@ -580,22 +601,6 @@ def test_url_preserved_editable_req(self) -> None: assert req.link is not None assert req.link.url == url - @pytest.mark.parametrize( - "path", - ( - "/path/to/foo.egg-info".replace("/", os.path.sep), - # Tests issue fixed by https://github.com/pypa/pip/pull/2530 - "/path/to/foo.egg-info/".replace("/", os.path.sep), - ), - ) - def test_get_dist(self, path: str) -> None: - req = install_req_from_line("foo") - req.metadata_directory = path - dist = req.get_dist() - assert isinstance(dist, select_backend().Distribution) - assert dist.raw_name == dist.canonical_name == "foo" - assert dist.location == "/path/to".replace("/", os.path.sep) - def test_markers(self) -> None: for line in ( # recommended syntax @@ -743,6 +748,89 @@ def test_requirement_file(self) -> None: assert "appears to be a requirements file." in err_msg assert "If that is the case, use the '-r' flag to install" in err_msg + @pytest.mark.parametrize( + "inp, out", + [ + ("pkg", "pkg"), + ("pkg==1.0", "pkg==1.0"), + ("pkg ; python_version<='3.6'", "pkg"), + ("pkg[ext]", "pkg"), + ("pkg [ ext1, ext2 ]", "pkg"), + ("pkg [ ext1, ext2 ] @ https://example.com/", "pkg@ https://example.com/"), + ("pkg [ext] == 1.0; python_version<='3.6'", "pkg==1.0"), + ("pkg-all.allowed_chars0 ~= 2.0", "pkg-all.allowed_chars0~=2.0"), + ("pkg-all.allowed_chars0 [ext] ~= 2.0", "pkg-all.allowed_chars0~=2.0"), + ], + ) + def test_install_req_drop_extras(self, inp: str, out: str) -> None: + """ + Test behavior of install_req_drop_extras + """ + req = install_req_from_line(inp) + without_extras = install_req_drop_extras(req) + assert not without_extras.extras + assert str(without_extras.req) == out + # should always be a copy + assert req is not without_extras + assert req.req is not without_extras.req + # comes_from should point to original + assert without_extras.comes_from is req + # all else should be the same + assert without_extras.link == req.link + assert without_extras.markers == req.markers + assert without_extras.use_pep517 == req.use_pep517 + assert without_extras.isolated == req.isolated + assert without_extras.global_options == req.global_options + assert without_extras.hash_options == req.hash_options + assert without_extras.constraint == req.constraint + assert without_extras.config_settings == req.config_settings + assert without_extras.user_supplied == req.user_supplied + assert without_extras.permit_editable_wheels == req.permit_editable_wheels + + @pytest.mark.parametrize( + "inp, extras, out", + [ + ("pkg", {}, "pkg"), + ("pkg==1.0", {}, "pkg==1.0"), + ("pkg[ext]", {}, "pkg[ext]"), + ("pkg", {"ext"}, "pkg[ext]"), + ("pkg==1.0", {"ext"}, "pkg[ext]==1.0"), + ("pkg==1.0", {"ext1", "ext2"}, "pkg[ext1,ext2]==1.0"), + ("pkg; python_version<='3.6'", {"ext"}, "pkg[ext]"), + ("pkg[ext1,ext2]==1.0", {"ext2", "ext3"}, "pkg[ext1,ext2,ext3]==1.0"), + ( + "pkg-all.allowed_chars0 [ ext1 ] @ https://example.com/", + {"ext2"}, + "pkg-all.allowed_chars0[ext1,ext2]@ https://example.com/", + ), + ], + ) + def test_install_req_extend_extras( + self, inp: str, extras: Set[str], out: str + ) -> None: + """ + Test behavior of install_req_extend_extras + """ + req = install_req_from_line(inp) + extended = install_req_extend_extras(req, extras) + assert str(extended.req) == out + assert extended.req is not None + assert set(extended.extras) == set(extended.req.extras) + # should always be a copy + assert req is not extended + assert req.req is not extended.req + # all else should be the same + assert extended.link == req.link + assert extended.markers == req.markers + assert extended.use_pep517 == req.use_pep517 + assert extended.isolated == req.isolated + assert extended.global_options == req.global_options + assert extended.hash_options == req.hash_options + assert extended.constraint == req.constraint + assert extended.config_settings == req.config_settings + assert extended.user_supplied == req.user_supplied + assert extended.permit_editable_wheels == req.permit_editable_wheels + @mock.patch("pip._internal.req.req_install.os.path.abspath") @mock.patch("pip._internal.req.req_install.os.path.exists") diff --git a/tests/unit/test_req_file.py b/tests/unit/test_req_file.py index 439c41563b7..ce751afe258 100644 --- a/tests/unit/test_req_file.py +++ b/tests/unit/test_req_file.py @@ -4,7 +4,7 @@ import textwrap from optparse import Values from pathlib import Path -from typing import TYPE_CHECKING, Any, Iterator, List, Optional, Tuple, Union +from typing import Any, Iterator, List, Optional, Protocol, Tuple, Union from unittest import mock import pytest @@ -29,12 +29,6 @@ from pip._internal.req.req_install import InstallRequirement from tests.lib import TestData, make_test_finder, requirements_file -if TYPE_CHECKING: - from typing import Protocol -else: - # Protocol was introduced in Python 3.8. - Protocol = object - @pytest.fixture def session() -> PipSession: @@ -76,9 +70,11 @@ def parse_reqfile( yield install_req_from_parsed_requirement( parsed_req, isolated=isolated, - config_settings=parsed_req.options.get("config_settings") - if parsed_req.options - else None, + config_settings=( + parsed_req.options.get("config_settings") + if parsed_req.options + else None + ), ) @@ -202,8 +198,7 @@ def __call__( options: Optional[Values] = None, session: Optional[PipSession] = None, constraint: bool = False, - ) -> List[InstallRequirement]: - ... + ) -> List[InstallRequirement]: ... @pytest.fixture @@ -297,7 +292,7 @@ def test_yield_pep440_line_requirement(self, line_processor: LineProcessor) -> N def test_yield_line_constraint(self, line_processor: LineProcessor) -> None: line = "SomeProject" filename = "filename" - comes_from = "-c {} (line {})".format(filename, 1) + comes_from = f"-c {filename} (line {1})" req = install_req_from_line(line, comes_from=comes_from, constraint=True) found_req = line_processor(line, filename, 1, constraint=True)[0] assert repr(found_req) == repr(req) @@ -326,7 +321,7 @@ def test_yield_editable_constraint(self, line_processor: LineProcessor) -> None: url = "git+https://url#egg=SomeProject" line = f"-e {url}" filename = "filename" - comes_from = "-c {} (line {})".format(filename, 1) + comes_from = f"-c {filename} (line {1})" req = install_req_from_editable(url, comes_from=comes_from, constraint=True) found_req = line_processor(line, filename, 1, constraint=True)[0] assert repr(found_req) == repr(req) @@ -471,9 +466,7 @@ def test_use_feature_with_error( ) -> None: """--use-feature triggers error when parsing requirements files.""" with pytest.raises(RequirementsFileParseError): - line_processor( - "--use-feature=2020-resolver", "filename", 1, options=options - ) + line_processor("--use-feature=resolvelib", "filename", 1, options=options) def test_relative_local_find_links( self, @@ -875,12 +868,10 @@ def test_install_requirements_with_options( ) -> None: global_option = "--dry-run" - content = """ + content = f""" --only-binary :all: INITools==2.0 --global-option="{global_option}" - """.format( - global_option=global_option - ) + """ with requirements_file(content, tmpdir) as reqs_file: req = next( diff --git a/tests/unit/test_req_uninstall.py b/tests/unit/test_req_uninstall.py index b4ae97350e0..6a846e20272 100644 --- a/tests/unit/test_req_uninstall.py +++ b/tests/unit/test_req_uninstall.py @@ -59,10 +59,9 @@ def iter_declared_entries(self) -> Optional[Iterator[str]]: def test_compressed_listing(tmpdir: Path) -> None: def in_tmpdir(paths: List[str]) -> List[str]: - li = [] - for path in paths: - li.append(str(os.path.join(tmpdir, path.replace("/", os.path.sep)))) - return li + return [ + str(os.path.join(tmpdir, path.replace("/", os.path.sep))) for path in paths + ] sample = in_tmpdir( [ diff --git a/tests/unit/test_resolution_legacy_resolver.py b/tests/unit/test_resolution_legacy_resolver.py index 8b9d1a58a33..b2f93b3d4f5 100644 --- a/tests/unit/test_resolution_legacy_resolver.py +++ b/tests/unit/test_resolution_legacy_resolver.py @@ -252,7 +252,7 @@ class NotWorkingFakeDist(FakeDist): def metadata(self) -> email.message.Message: raise FileNotFoundError(metadata_name) - dist = make_fake_dist(klass=NotWorkingFakeDist) + dist = make_fake_dist(klass=NotWorkingFakeDist) # type: ignore with pytest.raises(NoneMetadataError) as exc: _check_dist_requires_python( @@ -261,8 +261,8 @@ def metadata(self) -> email.message.Message: ignore_requires_python=False, ) assert str(exc.value) == ( - "None {} metadata found for distribution: " - "".format(metadata_name) + f"None {metadata_name} metadata found for distribution: " + "" ) diff --git a/tests/unit/test_self_check_outdated.py b/tests/unit/test_self_check_outdated.py index c025ff30275..6b6fcea55a9 100644 --- a/tests/unit/test_self_check_outdated.py +++ b/tests/unit/test_self_check_outdated.py @@ -40,7 +40,7 @@ def test_pip_self_version_check_calls_underlying_implementation( ) -> None: # GIVEN mock_session = Mock() - fake_options = Values(dict(cache_dir=str(tmpdir))) + fake_options = Values({"cache_dir": str(tmpdir)}) # WHEN self_outdated_check.pip_self_version_check(mock_session, fake_options) @@ -49,7 +49,9 @@ def test_pip_self_version_check_calls_underlying_implementation( mocked_state.assert_called_once_with(cache_dir=str(tmpdir)) mocked_function.assert_called_once_with( state=mocked_state(cache_dir=str(tmpdir)), - current_time=datetime.datetime(1970, 1, 2, 11, 0, 0), + current_time=datetime.datetime( + 1970, 1, 2, 11, 0, 0, tzinfo=datetime.timezone.utc + ), local_version=ANY, get_remote_version=ANY, ) @@ -167,7 +169,10 @@ def test_writes_expected_statefile(self, tmpdir: Path) -> None: # WHEN state = self_outdated_check.SelfCheckState(cache_dir=str(cache_dir)) - state.set("1.0.0", datetime.datetime(2000, 1, 1, 0, 0, 0)) + state.set( + "1.0.0", + datetime.datetime(2000, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc), + ) # THEN assert state._statefile_path == os.fspath(expected_path) @@ -175,6 +180,6 @@ def test_writes_expected_statefile(self, tmpdir: Path) -> None: contents = expected_path.read_text() assert json.loads(contents) == { "key": sys.prefix, - "last_check": "2000-01-01T00:00:00Z", + "last_check": "2000-01-01T00:00:00+00:00", "pypi_version": "1.0.0", } diff --git a/tests/unit/test_target_python.py b/tests/unit/test_target_python.py index d3e27e39ae8..31df5935ee3 100644 --- a/tests/unit/test_target_python.py +++ b/tests/unit/test_target_python.py @@ -54,18 +54,18 @@ def test_init__py_version_info_none(self) -> None: "kwargs, expected", [ ({}, ""), - (dict(py_version_info=(3, 6)), "version_info='3.6'"), + ({"py_version_info": (3, 6)}, "version_info='3.6'"), ( - dict(platforms=["darwin"], py_version_info=(3, 6)), + {"platforms": ["darwin"], "py_version_info": (3, 6)}, "platforms=['darwin'] version_info='3.6'", ), ( - dict( - platforms=["darwin"], - py_version_info=(3, 6), - abis=["cp36m"], - implementation="cp", - ), + { + "platforms": ["darwin"], + "py_version_info": (3, 6), + "abis": ["cp36m"], + "implementation": "cp", + }, ( "platforms=['darwin'] version_info='3.6' abis=['cp36m'] " "implementation='cp'" @@ -88,37 +88,37 @@ def test_format_given(self, kwargs: Dict[str, Any], expected: str) -> None: ((3, 7, 3), "37"), # Check a minor version with two digits. ((3, 10, 1), "310"), - # Check that versions=None is passed to get_tags(). + # Check that versions=None is passed to get_sorted_tags(). (None, None), ], ) @mock.patch("pip._internal.models.target_python.get_supported") - def test_get_tags( + def test_get_sorted_tags( self, mock_get_supported: mock.Mock, py_version_info: Optional[Tuple[int, ...]], expected_version: Optional[str], ) -> None: - mock_get_supported.return_value = ["tag-1", "tag-2"] + dummy_tags = [Tag("py4", "none", "any"), Tag("py5", "none", "any")] + mock_get_supported.return_value = dummy_tags target_python = TargetPython(py_version_info=py_version_info) - actual = target_python.get_tags() - assert actual == ["tag-1", "tag-2"] + actual = target_python.get_sorted_tags() + assert actual == dummy_tags - actual = mock_get_supported.call_args[1]["version"] - assert actual == expected_version + assert mock_get_supported.call_args[1]["version"] == expected_version # Check that the value was cached. - assert target_python._valid_tags == ["tag-1", "tag-2"] + assert target_python._valid_tags == dummy_tags - def test_get_tags__uses_cached_value(self) -> None: + def test_get_unsorted_tags__uses_cached_value(self) -> None: """ - Test that get_tags() uses the cached value. + Test that get_unsorted_tags() uses the cached value. """ target_python = TargetPython(py_version_info=None) - target_python._valid_tags = [ + target_python._valid_tags_set = { Tag("py2", "none", "any"), Tag("py3", "none", "any"), - ] - actual = target_python.get_tags() - assert actual == [Tag("py2", "none", "any"), Tag("py3", "none", "any")] + } + actual = target_python.get_unsorted_tags() + assert actual == {Tag("py2", "none", "any"), Tag("py3", "none", "any")} diff --git a/tests/unit/test_urls.py b/tests/unit/test_urls.py index 56ee80aa802..746d0222425 100644 --- a/tests/unit/test_urls.py +++ b/tests/unit/test_urls.py @@ -1,24 +1,10 @@ import os import sys import urllib.request -from typing import Optional import pytest -from pip._internal.utils.urls import get_url_scheme, path_to_url, url_to_path - - -@pytest.mark.parametrize( - "url,expected", - [ - ("http://localhost:8080/", "http"), - ("file:c:/path/to/file", "file"), - ("file:/dev/null", "file"), - ("", None), - ], -) -def test_get_url_scheme(url: str, expected: Optional[str]) -> None: - assert get_url_scheme(url) == expected +from pip._internal.utils.urls import path_to_url, url_to_path @pytest.mark.skipif("sys.platform == 'win32'") diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 450081cfd03..102b0340a14 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -2,6 +2,7 @@ util tests """ + import codecs import os import shutil @@ -14,6 +15,7 @@ from unittest.mock import Mock, patch import pytest +from pip._vendor.packaging.requirements import Requirement from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError from pip._internal.utils.deprecation import PipDeprecationWarning, deprecated @@ -37,6 +39,7 @@ normalize_path, normalize_version_info, parse_netloc, + redact_auth_from_requirement, redact_auth_from_url, redact_netloc, remove_auth_from_url, @@ -257,9 +260,13 @@ def test_rmtree_errorhandler_reraises_error(tmpdir: Path) -> None: except RuntimeError: # Make sure the handler reraises an exception with pytest.raises(RuntimeError, match="test message"): - # Argument 3 to "rmtree_errorhandler" has incompatible type "None"; expected - # "Tuple[Type[BaseException], BaseException, TracebackType]" - rmtree_errorhandler(mock_func, path, None) # type: ignore[arg-type] + # Argument 3 to "rmtree_errorhandler" has incompatible type + # "Union[Tuple[Type[BaseException], BaseException, TracebackType], + # Tuple[None, None, None]]"; expected "Tuple[Type[BaseException], + # BaseException, TracebackType]" + rmtree_errorhandler( + mock_func, path, sys.exc_info() # type: ignore[arg-type] + ) mock_func.assert_not_called() @@ -761,6 +768,30 @@ def test_redact_auth_from_url(auth_url: str, expected_url: str) -> None: assert url == expected_url +@pytest.mark.parametrize( + "req, expected", + [ + ("pkga", "pkga"), + ( + "resolvelib@ " + " git+https://test-user:test-pass@github.com/sarugaku/resolvelib@1.0.1", + "resolvelib@" + " git+https://test-user:****@github.com/sarugaku/resolvelib@1.0.1", + ), + ( + "resolvelib@" + " git+https://test-user:test-pass@github.com/sarugaku/resolvelib@1.0.1" + " ; python_version>='3.6'", + "resolvelib@" + " git+https://test-user:****@github.com/sarugaku/resolvelib@1.0.1" + ' ; python_version >= "3.6"', + ), + ], +) +def test_redact_auth_from_requirement(req: str, expected: str) -> None: + assert redact_auth_from_requirement(Requirement(req)) == expected + + class TestHiddenText: def test_basic(self) -> None: """ diff --git a/tests/unit/test_utils_subprocess.py b/tests/unit/test_utils_subprocess.py index a694b717fcb..5f0c16595a7 100644 --- a/tests/unit/test_utils_subprocess.py +++ b/tests/unit/test_utils_subprocess.py @@ -89,7 +89,6 @@ def finish(self, final_status: str) -> None: class TestCallSubprocess: - """ Test call_subprocess(). """ @@ -260,9 +259,9 @@ def test_info_logging__subprocess_error( expected = ( None, [ - # pytest's caplog overrides th formatter, which means that we + # pytest's caplog overrides the formatter, which means that we # won't see the message formatted through our formatters. - ("pip.subprocessor", ERROR, "[present-rich]"), + ("pip.subprocessor", ERROR, "subprocess error exited with 1"), ], ) # The spinner should spin three times in this case since the diff --git a/tests/unit/test_utils_temp_dir.py b/tests/unit/test_utils_temp_dir.py index 4a656d23ace..a6cd0d0e5af 100644 --- a/tests/unit/test_utils_temp_dir.py +++ b/tests/unit/test_utils_temp_dir.py @@ -4,6 +4,7 @@ import tempfile from pathlib import Path from typing import Any, Iterator, Optional, Union +from unittest import mock import pytest @@ -274,3 +275,25 @@ def test_tempdir_registry_lazy(should_delete: bool) -> None: registry.set_delete("test-for-lazy", should_delete) assert os.path.exists(path) assert os.path.exists(path) == (not should_delete) + + +def test_tempdir_cleanup_ignore_errors() -> None: + os_unlink = os.unlink + + # mock os.unlink to fail with EACCES for a specific filename to simulate + # how removing a loaded exe/dll behaves. + def unlink(name: str, *args: Any, **kwargs: Any) -> None: + if "bomb" in name: + raise PermissionError(name) + else: + os_unlink(name) + + with mock.patch("os.unlink", unlink): + with TempDirectory(ignore_cleanup_errors=True) as tmp_dir: + path = tmp_dir.path + with open(os.path.join(path, "bomb"), "a"): + pass + + filename = os.path.join(path, "bomb") + assert os.path.isfile(filename) + os.unlink(filename) diff --git a/tests/unit/test_vcs.py b/tests/unit/test_vcs.py index 566c88cf02b..af5f348dc2d 100644 --- a/tests/unit/test_vcs.py +++ b/tests/unit/test_vcs.py @@ -66,14 +66,14 @@ def test_rev_options_repr() -> None: # First check VCS-specific RevOptions behavior. (Bazaar, [], ["-r", "123"], {}), (Git, ["HEAD"], ["123"], {}), - (Mercurial, [], ["123"], {}), + (Mercurial, [], ["--rev=123"], {}), (Subversion, [], ["-r", "123"], {}), # Test extra_args. For this, test using a single VersionControl class. ( Git, ["HEAD", "opt1", "opt2"], ["123", "opt1", "opt2"], - dict(extra_args=["opt1", "opt2"]), + {"extra_args": ["opt1", "opt2"]}, ), ], ) @@ -458,8 +458,7 @@ def test_version_control__run_command__fails( with mock.patch("pip._internal.vcs.versioncontrol.call_subprocess") as call: call.side_effect = exc_cls with pytest.raises(BadCommand, match=msg_re.format(name=vcs_cls.name)): - # https://github.com/python/mypy/issues/3283 - vcs_cls.run_command([]) # type: ignore[arg-type] + vcs_cls.run_command([]) @pytest.mark.parametrize( @@ -598,6 +597,21 @@ def test_get_git_version() -> None: assert git_version >= (1, 0, 0) +@pytest.mark.parametrize( + ("version", "expected"), + [ + ("git version 2.17", (2, 17)), + ("git version 2.18.1", (2, 18)), + ("git version 2.35.GIT", (2, 35)), # gh:12280 + ("oh my git version 2.37.GIT", ()), # invalid version + ("git version 2.GIT", ()), # invalid version + ], +) +def test_get_git_version_parser(version: str, expected: Tuple[int, int]) -> None: + with mock.patch("pip._internal.vcs.git.Git.run_command", return_value=version): + assert Git().get_git_version() == expected + + @pytest.mark.parametrize( "use_interactive,is_atty,expected", [ @@ -763,6 +777,22 @@ def assert_call_args(self, args: CommandArgs) -> None: assert self.call_subprocess_mock.call_args[0][0] == args def test_obtain(self) -> None: + self.svn.obtain(self.dest, hide_url(self.url), verbosity=1) + self.assert_call_args( + [ + "svn", + "checkout", + "--non-interactive", + "--username", + "username", + "--password", + hide_value("password"), + hide_url("http://svn.example.com/"), + "/tmp/test", + ] + ) + + def test_obtain_quiet(self) -> None: self.svn.obtain(self.dest, hide_url(self.url), verbosity=0) self.assert_call_args( [ @@ -780,6 +810,18 @@ def test_obtain(self) -> None: ) def test_fetch_new(self) -> None: + self.svn.fetch_new(self.dest, hide_url(self.url), self.rev_options, verbosity=1) + self.assert_call_args( + [ + "svn", + "checkout", + "--non-interactive", + hide_url("svn+http://username:password@svn.example.com/"), + "/tmp/test", + ] + ) + + def test_fetch_new_quiet(self) -> None: self.svn.fetch_new(self.dest, hide_url(self.url), self.rev_options, verbosity=0) self.assert_call_args( [ @@ -793,6 +835,21 @@ def test_fetch_new(self) -> None: ) def test_fetch_new_revision(self) -> None: + rev_options = RevOptions(Subversion, "123") + self.svn.fetch_new(self.dest, hide_url(self.url), rev_options, verbosity=1) + self.assert_call_args( + [ + "svn", + "checkout", + "--non-interactive", + "-r", + "123", + hide_url("svn+http://username:password@svn.example.com/"), + "/tmp/test", + ] + ) + + def test_fetch_new_revision_quiet(self) -> None: rev_options = RevOptions(Subversion, "123") self.svn.fetch_new(self.dest, hide_url(self.url), rev_options, verbosity=0) self.assert_call_args( diff --git a/tests/unit/test_wheel.py b/tests/unit/test_wheel.py index 6d6d1a3dc87..ed6f5821133 100644 --- a/tests/unit/test_wheel.py +++ b/tests/unit/test_wheel.py @@ -1,4 +1,5 @@ """Tests for wheel binary packages and .dist-info.""" + import csv import logging import os @@ -102,15 +103,13 @@ def test_get_legacy_build_wheel_path__multiple_names( ], ) def test_get_entrypoints(tmp_path: pathlib.Path, console_scripts: str) -> None: - entry_points_text = """ + entry_points_text = f""" [console_scripts] - {} + {console_scripts} [section] common:one = module:func common:two = module:other_func - """.format( - console_scripts - ) + """ distribution = make_wheel( "simple", diff --git a/tests/unit/test_wheel_builder.py b/tests/unit/test_wheel_builder.py index 9044f945307..d5f372dd5cd 100644 --- a/tests/unit/test_wheel_builder.py +++ b/tests/unit/test_wheel_builder.py @@ -1,5 +1,6 @@ import logging import os +from dataclasses import dataclass from pathlib import Path from typing import Optional, cast @@ -31,29 +32,16 @@ def test_contains_egg_info(s: str, expected: bool) -> None: assert result == expected +@dataclass class ReqMock: - def __init__( - self, - name: str = "pendulum", - is_wheel: bool = False, - editable: bool = False, - link: Optional[Link] = None, - constraint: bool = False, - source_dir: Optional[str] = "/tmp/pip-install-123/pendulum", - use_pep517: bool = True, - supports_pyproject_editable: bool = False, - ) -> None: - self.name = name - self.is_wheel = is_wheel - self.editable = editable - self.link = link - self.constraint = constraint - self.source_dir = source_dir - self.use_pep517 = use_pep517 - self._supports_pyproject_editable = supports_pyproject_editable - - def supports_pyproject_editable(self) -> bool: - return self._supports_pyproject_editable + name: str = "pendulum" + is_wheel: bool = False + editable: bool = False + link: Optional[Link] = None + constraint: bool = False + source_dir: Optional[str] = "/tmp/pip-install-123/pendulum" + use_pep517: bool = True + supports_pyproject_editable: bool = False @pytest.mark.parametrize( diff --git a/tools/__init__.py b/tools/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tools/ci/New-RAMDisk.ps1 b/tools/ci/New-RAMDisk.ps1 deleted file mode 100644 index 21b1a573a49..00000000000 --- a/tools/ci/New-RAMDisk.ps1 +++ /dev/null @@ -1,74 +0,0 @@ -[CmdletBinding()] -param( - [Parameter(Mandatory=$true, - HelpMessage="Drive letter to use for the RAMDisk")] - [String]$drive, - [Parameter(HelpMessage="Size to allocate to the RAMDisk")] - [UInt64]$size=1GB -) - -$ErrorActionPreference = "Stop" -Set-StrictMode -Version Latest - -Write-Output "Installing FS-iSCSITarget-Server" -Install-WindowsFeature -Name FS-iSCSITarget-Server - -Write-Output "Starting MSiSCSI" -Start-Service MSiSCSI -$retry = 10 -do { - $service = Get-Service MSiSCSI - if ($service.Status -eq "Running") { - break; - } - $retry-- - Start-Sleep -Milliseconds 500 -} until ($retry -eq 0) - -$service = Get-Service MSiSCSI -if ($service.Status -ne "Running") { - throw "MSiSCSI is not running" -} - -Write-Output "Configuring Firewall" -Get-NetFirewallServiceFilter -Service MSiSCSI | Enable-NetFirewallRule - -Write-Output "Configuring RAMDisk" -# Must use external-facing IP address, otherwise New-IscsiTargetPortal is -# unable to connect. -$ip = ( - Get-NetIPAddress -AddressFamily IPv4 | - Where-Object {$_.IPAddress -ne "127.0.0.1"} -)[0].IPAddress -if ( - -not (Get-IscsiServerTarget -ComputerName localhost | Where-Object {$_.TargetName -eq "ramdisks"}) -) { - New-IscsiServerTarget ` - -ComputerName localhost ` - -TargetName ramdisks ` - -InitiatorId IPAddress:$ip -} - -$newVirtualDisk = New-IscsiVirtualDisk ` - -ComputerName localhost ` - -Path ramdisk:local$drive.vhdx ` - -Size $size -Add-IscsiVirtualDiskTargetMapping ` - -ComputerName localhost ` - -TargetName ramdisks ` - -Path ramdisk:local$drive.vhdx - -Write-Output "Connecting to iSCSI" -New-IscsiTargetPortal -TargetPortalAddress $ip -Get-IscsiTarget | Where-Object {!$_.IsConnected} | Connect-IscsiTarget - -Write-Output "Configuring disk" -$newDisk = Get-IscsiConnection | - Get-Disk | - Where-Object {$_.SerialNumber -eq $newVirtualDisk.SerialNumber} - -Set-Disk -InputObject $newDisk -IsOffline $false -Initialize-Disk -InputObject $newDisk -PartitionStyle MBR -New-Partition -InputObject $newDisk -UseMaximumSize -DriveLetter $drive - -Format-Volume -DriveLetter $drive -NewFileSystemLabel Temp -FileSystem NTFS diff --git a/tools/codespell-ignore.txt b/tools/codespell-ignore.txt new file mode 100644 index 00000000000..288f597353b --- /dev/null +++ b/tools/codespell-ignore.txt @@ -0,0 +1,9 @@ +# An actual English word +lousily +followings +# A contributor first name +wil +# Codebase variable or class names +uptodate +afile +failer diff --git a/tools/release/__init__.py b/tools/release/__init__.py index ebd1b901414..9e0601683d6 100644 --- a/tools/release/__init__.py +++ b/tools/release/__init__.py @@ -8,6 +8,7 @@ import pathlib import subprocess import tempfile +import unicodedata from typing import Iterator, List, Optional, Set from nox.sessions import Session @@ -45,6 +46,34 @@ def modified_files_in_git(*args: str) -> int: ).returncode +def strip_rtl_ltr_overrides(a: str) -> str: + """Strip RIGHT-TO-LEFT OVERRIDE and LEFT-TO-RIGHT OVERRIDE characters + from author names. + Reorder the characters in between them to preserve the perception. + See https://github.com/pypa/pip/issues/12467 for more info.""" + rtl = "\N{RIGHT-TO-LEFT OVERRIDE}" + ltr = "\N{LEFT-TO-RIGHT OVERRIDE}" + + # If there are no overrides to RIGHT-TO-LEFT, + # only strip useless LEFT-TO-RIGHT overrides. + # This returns the original for most of the authors. + # It also serves as a termination condition for recursive calls. + if rtl not in a: + return a.replace(ltr, "") + + prefix = a[: a.index(rtl)].replace(ltr, "") + rest = a[: a.index(rtl) : -1] + if ltr not in rest: + rest = rest.replace(rtl, "") + else: + rest = a[a.index(ltr) - 1 : a.index(rtl) : -1].replace(rtl, "") + rest += a[a.index(ltr) + 1 :] + combined = prefix + strip_rtl_ltr_overrides(rest) + assert rtl not in combined, f"RIGHT-TO-LEFT OVERRIDE in {combined!r}" + assert ltr not in combined, f"LEFT-TO-RIGHT OVERRIDE in {combined!r}" + return combined + + def get_author_list() -> List[str]: """Get the list of authors from Git commits.""" # subprocess because session.run doesn't give us stdout @@ -60,6 +89,8 @@ def get_author_list() -> List[str]: seen_authors: Set[str] = set() for author in result.stdout.splitlines(): author = author.strip() + author = strip_rtl_ltr_overrides(author) + author = unicodedata.normalize("NFC", author) if author.lower() not in seen_authors: seen_authors.add(author.lower()) authors.append(author) diff --git a/tools/release/check_version.py b/tools/release/check_version.py index e89d1b5bad9..de3658faacd 100644 --- a/tools/release/check_version.py +++ b/tools/release/check_version.py @@ -27,7 +27,7 @@ def is_this_a_good_version_number(string: str) -> Optional[str]: expected_major = datetime.now().year % 100 if len(release) not in [2, 3]: - return "Not of the form: {0}.N or {0}.N.P".format(expected_major) + return f"Not of the form: {expected_major}.N or {expected_major}.N.P" return None diff --git a/tools/update-rtd-redirects.py b/tools/update-rtd-redirects.py index 8515c026cb7..2aa90e467e3 100644 --- a/tools/update-rtd-redirects.py +++ b/tools/update-rtd-redirects.py @@ -2,10 +2,12 @@ Relevant API reference: https://docs.readthedocs.io/en/stable/api/v3.html#redirects """ + import operator import os import sys from pathlib import Path +from typing import Dict, List import httpx import rich @@ -84,8 +86,8 @@ def get_rtd_api() -> httpx.Client: next_step("Compare and determine modifications.") -redirects_to_remove: list[int] = [] -redirects_to_add: dict[str, str] = {} +redirects_to_remove: List[int] = [] +redirects_to_add: Dict[str, str] = {} for redirect in rtd_redirects["results"]: if redirect["type"] != "exact": diff --git a/tools/vendoring/patches/certifi.patch b/tools/vendoring/patches/certifi.patch index 4f03c62fbde..7326de77724 100644 --- a/tools/vendoring/patches/certifi.patch +++ b/tools/vendoring/patches/certifi.patch @@ -1,14 +1,15 @@ diff --git a/src/pip/_vendor/certifi/core.py b/src/pip/_vendor/certifi/core.py -index de028981b..c3e546604 100644 +index 91f538bb1..70e0c3bdb 100644 --- a/src/pip/_vendor/certifi/core.py +++ b/src/pip/_vendor/certifi/core.py -@@ -33,13 +33,13 @@ def where() -> str: +@@ -37,14 +37,14 @@ if sys.version_info >= (3, 11): # We also have to hold onto the actual context manager, because # it will do the cleanup whenever it gets garbage collected, so # we will also store that at the global level as well. - _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) + _CACERT_CTX = as_file(files("pip._vendor.certifi").joinpath("cacert.pem")) _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) return _CACERT_PATH @@ -18,13 +19,14 @@ index de028981b..c3e546604 100644 elif sys.version_info >= (3, 7): -@@ -68,13 +68,13 @@ def where() -> str: +@@ -73,14 +73,14 @@ elif sys.version_info >= (3, 7): # We also have to hold onto the actual context manager, because # it will do the cleanup whenever it gets garbage collected, so # we will also store that at the global level as well. - _CACERT_CTX = get_path("certifi", "cacert.pem") + _CACERT_CTX = get_path("pip._vendor.certifi", "cacert.pem") _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) return _CACERT_PATH @@ -34,7 +36,7 @@ index de028981b..c3e546604 100644 else: import os -@@ -105,4 +105,4 @@ def where() -> str: +@@ -111,4 +111,4 @@ else: return os.path.join(f, "cacert.pem") def contents() -> str: diff --git a/tools/vendoring/patches/pkg_resources.patch b/tools/vendoring/patches/pkg_resources.patch index 48ae954311b..a99b6c63df8 100644 --- a/tools/vendoring/patches/pkg_resources.patch +++ b/tools/vendoring/patches/pkg_resources.patch @@ -2,9 +2,9 @@ diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_res index 3f2476a0c..8d5727d35 100644 --- a/src/pip/_vendor/pkg_resources/__init__.py +++ b/src/pip/_vendor/pkg_resources/__init__.py -@@ -71,7 +71,7 @@ - except ImportError: - importlib_machinery = None +@@ -65,7 +65,7 @@ + from os import open as os_open + from os.path import isdir, split -from pkg_resources.extern.jaraco.text import ( +from pip._internal.utils._jaraco_text import (