diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index dfc06e64968..347d6a91f12 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,6 +31,6 @@ repos: - id: validate_manifest - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.278 + rev: v0.0.280 hooks: - id: ruff diff --git a/.pre-commit-hooks.yaml b/.pre-commit-hooks.yaml index 08f733c18e8..be3bca9dbcc 100644 --- a/.pre-commit-hooks.yaml +++ b/.pre-commit-hooks.yaml @@ -4,7 +4,7 @@ entry: poetry check language: python pass_filenames: false - files: ^(.*/)?pyproject.toml$ + files: ^(.*/)?pyproject\.toml$ - id: poetry-lock name: poetry-lock @@ -12,6 +12,7 @@ entry: poetry lock language: python pass_filenames: false + files: ^(.*/)?(poetry\.lock|pyproject\.toml)$ - id: poetry-export name: poetry-export @@ -19,5 +20,5 @@ entry: poetry export language: python pass_filenames: false - files: ^poetry.lock$ + files: ^(.*/)?poetry\.lock$ args: ["-f", "requirements.txt", "-o", "requirements.txt"] diff --git a/docs/faq.md b/docs/faq.md index 0b28434bf24..beac15a9692 100644 --- a/docs/faq.md +++ b/docs/faq.md @@ -146,6 +146,11 @@ commands = `tox` will not do any install. Poetry installs all the dependencies and the current package in editable mode. Thus, tests are running against the local files and not the built and installed package. +### Is Nox supported? + +Use the [`nox-poetry`](https://github.com/cjolowicz/nox-poetry) package to install locked versions of +dependencies specified in `poetry.lock` into [Nox](https://nox.thea.codes/en/stable/) sessions. + ### I don't want Poetry to manage my virtual environments. Can I disable it? While Poetry automatically creates virtual environments to always work isolated diff --git a/docs/pre-commit-hooks.md b/docs/pre-commit-hooks.md index baff210811c..49aae8d9ccd 100644 --- a/docs/pre-commit-hooks.md +++ b/docs/pre-commit-hooks.md @@ -24,6 +24,10 @@ the defaults are overwritten. You must fully specify all arguments for your hook if you make use of `args:`. {{% /note %}} +{{% note %}} +If the `pyproject.toml` file is not in the root directory, you can specify `args: ["-C", "./subdirectory"]`. +{{% /note %}} + ## poetry-check The `poetry-check` hook calls the `poetry check` command @@ -34,10 +38,6 @@ to make sure the poetry configuration does not get committed in a broken state. The hook takes the same arguments as the poetry command. For more information see the [check command]({{< relref "cli#check" >}}). -{{% note %}} -If the `pyproject.toml` file is not in the root directory, you can specify `args: ["-C", "./subdirectory"]`. -{{% /note %}} - ## poetry-lock The `poetry-lock` hook calls the `poetry lock` command @@ -48,7 +48,6 @@ to make sure the lock file is up-to-date when committing changes. The hook takes the same arguments as the poetry command. For more information see the [lock command]({{< relref "cli#lock" >}}). - ## poetry-export The `poetry-export` hook calls the `poetry export` command @@ -64,7 +63,7 @@ The hook takes the same arguments as the poetry command. For more information see the [export command]({{< relref "cli#export" >}}). The default arguments are `args: ["-f", "requirements.txt", "-o", "requirements.txt"]`, -which will create/update the requirements.txt file in the current working directory. +which will create/update the `requirements.txt` file in the current working directory. You may add `verbose: true` in your `.pre-commit-config.yaml` in order to output to the console: @@ -84,12 +83,23 @@ hooks: args: ["--dev", "-f", "requirements.txt", "-o", "requirements.txt"] ``` - ## Usage For more information on how to use pre-commit please see the [official documentation](https://pre-commit.com/). -A full `.pre-commit-config.yaml` example: +A minimalistic `.pre-commit-config.yaml` example: + +```yaml +repos: +- repo: https://github.com/python-poetry/poetry + rev: '' # add version here + hooks: + - id: poetry-check + - id: poetry-lock + - id: poetry-export +``` + +A `.pre-commit-config.yaml` example for a monorepo setup or if the `pyproject.toml` file is not in the root directory: ```yaml repos: @@ -97,9 +107,11 @@ repos: rev: '' # add version here hooks: - id: poetry-check + args: ["-C", "./subdirectory"] - id: poetry-lock + args: ["-C", "./subdirectory"] - id: poetry-export - args: ["-f", "requirements.txt", "-o", "requirements.txt"] + args: ["-C", "./subdirectory", "-f", "requirements.txt", "-o", "./subdirectory/requirements.txt"] ``` ## FAQ diff --git a/docs/pyproject.md b/docs/pyproject.md index f123313d883..0d23d89ff02 100644 --- a/docs/pyproject.md +++ b/docs/pyproject.md @@ -351,7 +351,7 @@ To specify a script that [depends on an extra](#extras), you may provide an entr ```toml [tool.poetry.scripts] -devtest = { callable = "mypackage:test.run_tests", extras = ["test"] } +devtest = { reference = "mypackage:test.run_tests", extras = ["test"], type = "console" } ``` {{% note %}} diff --git a/docs/repositories.md b/docs/repositories.md index 0446d3ec1cc..ba71b8369b4 100644 --- a/docs/repositories.md +++ b/docs/repositories.md @@ -174,6 +174,18 @@ poetry source add --priority=primary PyPI This way, the priority of PyPI can be set in a fine-granular way. +The equivalent specification in `pyproject.toml` is: + +```toml +[[tool.poetry.source]] +name = "pypi" +priority = "primary" +``` + +**Omit the `url` when specifying PyPI explicitly.** Because PyPI is internally configured +with Poetry, the PyPI repository cannot be configured with a given URL. Remember, you can always use +`poetry check` to ensure the validity of the `pyproject.toml` file. + {{% /warning %}} {{% warning %}} diff --git a/src/poetry/console/commands/show.py b/src/poetry/console/commands/show.py index 58f12b26160..b5ae58bdb07 100644 --- a/src/poetry/console/commands/show.py +++ b/src/poetry/console/commands/show.py @@ -317,9 +317,7 @@ def _display_packages_information( name = locked.pretty_name install_marker = "" - if show_top_level and not any( - locked.is_same_package_as(r) for r in requires - ): + if show_top_level and not any(locked.satisfies(r) for r in requires): continue if locked not in required_locked_packages: diff --git a/src/poetry/mixology/incompatibility.py b/src/poetry/mixology/incompatibility.py index 8ac85731a17..26bf72e79ba 100644 --- a/src/poetry/mixology/incompatibility.py +++ b/src/poetry/mixology/incompatibility.py @@ -196,7 +196,7 @@ def __str__(self) -> str: if len(positive) != 1: return f"if {' and '.join(positive)} then {' or '.join(negative)}" - positive_term = [term for term in self._terms if term.is_positive()][0] + positive_term = next(term for term in self._terms if term.is_positive()) return ( f"{self._terse(positive_term, allow_every=True)} requires" f" {' or '.join(negative)}" diff --git a/src/poetry/puzzle/provider.py b/src/poetry/puzzle/provider.py index e8204fffcf3..a7ef7a0ad7c 100644 --- a/src/poetry/puzzle/provider.py +++ b/src/poetry/puzzle/provider.py @@ -1,5 +1,6 @@ from __future__ import annotations +import itertools import logging import re import time @@ -13,10 +14,10 @@ from cleo.ui.progress_indicator import ProgressIndicator from poetry.core.constraints.version import EmptyConstraint from poetry.core.constraints.version import Version +from poetry.core.constraints.version import VersionRange from poetry.core.packages.utils.utils import get_python_constraint_from_marker from poetry.core.version.markers import AnyMarker -from poetry.core.version.markers import EmptyMarker -from poetry.core.version.markers import MarkerUnion +from poetry.core.version.markers import union as marker_union from poetry.mixology.incompatibility import Incompatibility from poetry.mixology.incompatibility_cause import DependencyCause @@ -60,10 +61,22 @@ class IncompatibleConstraintsError(Exception): Exception when there are duplicate dependencies with incompatible constraints. """ - def __init__(self, package: Package, *dependencies: Dependency) -> None: - constraints = "\n".join(dep.to_pep_508() for dep in dependencies) + def __init__( + self, package: Package, *dependencies: Dependency, with_sources: bool = False + ) -> None: + constraints = [] + for dep in dependencies: + constraint = dep.to_pep_508() + if dep.is_direct_origin(): + # add version info because issue might be a version conflict + # with a version constraint + constraint += f" ({dep.constraint})" + if with_sources and dep.source_name: + constraint += f" ; source={dep.source_name}" + constraints.append(constraint) super().__init__( - f"Incompatible constraints in requirements of {package}:\n{constraints}" + f"Incompatible constraints in requirements of {package}:\n" + + "\n".join(constraints) ) @@ -591,55 +604,15 @@ def complete_package( self.debug(f"Duplicate dependencies for {dep_name}") - # Group dependencies for merging. - # We must not merge dependencies from different sources! - dep_groups = self._group_by_source(deps) - deps = [] - for group in dep_groups: - # In order to reduce the number of overrides we merge duplicate - # dependencies by constraint. For instance, if we have: - # • foo (>=2.0) ; python_version >= "3.6" and python_version < "3.7" - # • foo (>=2.0) ; python_version >= "3.7" - # we can avoid two overrides by merging them to: - # • foo (>=2.0) ; python_version >= "3.6" - # However, if we want to merge dependencies by constraint we have to - # merge dependencies by markers first in order to avoid unnecessary - # solver failures. For instance, if we have: - # • foo (>=2.0) ; python_version >= "3.6" and python_version < "3.7" - # • foo (>=2.0) ; python_version >= "3.7" - # • foo (<2.1) ; python_version >= "3.7" - # we must not merge the first two constraints but the last two: - # • foo (>=2.0) ; python_version >= "3.6" and python_version < "3.7" - # • foo (>=2.0,<2.1) ; python_version >= "3.7" - deps += self._merge_dependencies_by_constraint( - self._merge_dependencies_by_marker(group) - ) + # For dependency resolution, markers of duplicate dependencies must be + # mutually exclusive. + deps = self._resolve_overlapping_markers(package, deps) + if len(deps) == 1: self.debug(f"Merging requirements for {deps[0]!s}") dependencies.append(deps[0]) continue - # We leave dependencies as-is if they have the same - # python/platform constraints. - # That way the resolver will pickup the conflict - # and display a proper error. - seen = set() - for dep in deps: - pep_508_dep = dep.to_pep_508(False) - if ";" not in pep_508_dep: - _requirements = "" - else: - _requirements = pep_508_dep.split(";")[1].strip() - - if _requirements not in seen: - seen.add(_requirements) - - if len(deps) != len(seen): - for dep in deps: - dependencies.append(dep) - - continue - # At this point, we raise an exception that will # tell the solver to make new resolutions with specific overrides. # @@ -665,8 +638,6 @@ def fmt_warning(d: Dependency) -> str: f"Different requirements found for {warnings}." ) - deps = self._handle_any_marker_dependencies(package, deps) - overrides = [] overrides_marker_intersection: BaseMarker = AnyMarker() for dep_overrides in self._overrides.values(): @@ -691,18 +662,18 @@ def fmt_warning(d: Dependency) -> str: clean_dependencies = [] for dep in dependencies: if not dependency.transitive_marker.without_extras().is_any(): - marker_intersection = ( + transitive_marker_intersection = ( dependency.transitive_marker.without_extras().intersect( dep.marker.without_extras() ) ) - if marker_intersection.is_empty(): + if transitive_marker_intersection.is_empty(): # The dependency is not needed, since the markers specified # for the current package selection are not compatible with # the markers for the current dependency, so we skip it continue - dep.transitive_marker = marker_intersection + dep.transitive_marker = transitive_marker_intersection if not dependency.python_constraint.is_any(): python_constraint_intersection = dep.python_constraint.intersect( @@ -846,118 +817,127 @@ def _merge_dependencies_by_constraint( """ Merge dependencies with the same constraint by building a union of their markers. - """ - by_constraint: dict[VersionConstraint, list[Dependency]] = defaultdict(list) - for dep in dependencies: - by_constraint[dep.constraint].append(dep) - for constraint, _deps in by_constraint.items(): - new_markers = [dep.marker for dep in _deps] - dep = _deps[0] - - # Union with EmptyMarker is to make sure we get the benefit of marker - # simplifications. - dep.marker = MarkerUnion(*new_markers).union(EmptyMarker()) - by_constraint[constraint] = [dep] - - return [value[0] for value in by_constraint.values()] - - def _merge_dependencies_by_marker( - self, dependencies: Iterable[Dependency] - ) -> list[Dependency]: + For instance, if we have: + - foo (>=2.0) ; python_version >= "3.6" and python_version < "3.7" + - foo (>=2.0) ; python_version >= "3.7" + we can avoid two overrides by merging them to: + - foo (>=2.0) ; python_version >= "3.6" """ - Merge dependencies with the same marker - by building the intersection of their constraints. + dep_groups = self._group_by_source(dependencies) + merged_dependencies = [] + for group in dep_groups: + by_constraint: dict[VersionConstraint, list[Dependency]] = defaultdict(list) + for dep in group: + by_constraint[dep.constraint].append(dep) + for deps in by_constraint.values(): + dep = deps[0] + if len(deps) > 1: + new_markers = (dep.marker for dep in deps) + dep.marker = marker_union(*new_markers) + merged_dependencies.append(dep) + + return merged_dependencies + + def _is_relevant_marker(self, marker: BaseMarker) -> bool: """ - by_marker: dict[BaseMarker, list[Dependency]] = defaultdict(list) - for dep in dependencies: - by_marker[dep.marker].append(dep) - deps = [] - for _deps in by_marker.values(): - if len(_deps) == 1: - deps.extend(_deps) - else: - new_constraint = _deps[0].constraint - for dep in _deps[1:]: - new_constraint = new_constraint.intersect(dep.constraint) - if new_constraint.is_empty(): - # leave dependencies as-is so the resolver will pickup - # the conflict and display a proper error. - deps.extend(_deps) - else: - self.debug( - f"Merging constraints for {_deps[0].name} for" - f" marker {_deps[0].marker}" - ) - deps.append(_deps[0].with_constraint(new_constraint)) - return deps + A marker is relevant if + - it is not empty + - allowed by the project's python constraint + - allowed by the environment (only during installation) + """ + return ( + not marker.is_empty() + and self._python_constraint.allows_any( + get_python_constraint_from_marker(marker) + ) + and (not self._env or marker.validate(self._env.marker_env)) + ) - def _handle_any_marker_dependencies( + def _resolve_overlapping_markers( self, package: Package, dependencies: list[Dependency] ) -> list[Dependency]: """ - We need to check if one of the duplicate dependencies - has no markers. If there is one, we need to change its - environment markers to the inverse of the union of the - other dependencies markers. - For instance, if we have the following dependencies: - • ipython - • ipython (1.2.4) ; implementation_name == "pypy" - - the marker for `ipython` will become `implementation_name != "pypy"`. - - Further, we have to merge the constraints of the requirements - without markers into the constraints of the requirements with markers. - for instance, if we have the following dependencies: - • foo (>= 1.2) - • foo (!= 1.2.1) ; python == 3.10 - - the constraint for the second entry will become (!= 1.2.1, >= 1.2). + Convert duplicate dependencies with potentially overlapping markers + into duplicate dependencies with mutually exclusive markers. + + Therefore, the intersections of all combinations of markers and inverted markers + have to be calculated. If such an intersection is relevant (not empty, etc.), + the intersection of all constraints, whose markers were not inverted is built + and a new dependency with the calculated version constraint and marker is added. + (The marker of such a dependency does not overlap with the marker + of any other new dependency.) """ - any_markers_dependencies = [d for d in dependencies if d.marker.is_any()] - other_markers_dependencies = [d for d in dependencies if not d.marker.is_any()] - - if any_markers_dependencies: - for dep_other in other_markers_dependencies: - new_constraint = dep_other.constraint - for dep_any in any_markers_dependencies: - new_constraint = new_constraint.intersect(dep_any.constraint) - if new_constraint.is_empty(): - raise IncompatibleConstraintsError( - package, dep_other, *any_markers_dependencies - ) - dep_other.constraint = new_constraint - - marker = other_markers_dependencies[0].marker - for other_dep in other_markers_dependencies[1:]: - marker = marker.union(other_dep.marker) - inverted_marker = marker.invert() - - if ( - not inverted_marker.is_empty() - and self._python_constraint.allows_any( - get_python_constraint_from_marker(inverted_marker) + # In order to reduce the number of intersections, + # we merge duplicate dependencies by constraint. + dependencies = self._merge_dependencies_by_constraint(dependencies) + + new_dependencies = [] + for uses in itertools.product([True, False], repeat=len(dependencies)): + # intersection of markers + # For performance optimization, we don't just intersect all markers at once, + # but intersect them one after the other to get empty markers early. + # Further, we intersect the inverted markers at last because + # they are more likely to overlap than the non-inverted ones. + markers = ( + dep.marker if use else dep.marker.invert() + for use, dep in sorted( + zip(uses, dependencies), key=lambda ud: ud[0], reverse=True + ) ) - and (not self._env or inverted_marker.validate(self._env.marker_env)) - ): - if any_markers_dependencies: - for dep_any in any_markers_dependencies: - dep_any.marker = inverted_marker - else: - # If there is no any marker dependency - # and the inverted marker is not empty, - # a dependency with the inverted union of all markers is required - # in order to not miss other dependencies later, for instance: + used_marker_intersection: BaseMarker = AnyMarker() + for m in markers: + used_marker_intersection = used_marker_intersection.intersect(m) + if not self._is_relevant_marker(used_marker_intersection): + continue + + # intersection of constraints + constraint: VersionConstraint = VersionRange() + specific_source_dependency = None + used_dependencies = list(itertools.compress(dependencies, uses)) + for dep in used_dependencies: + if dep.is_direct_origin() or dep.source_name: + # if direct origin or specific source: + # conflict if specific source already set and not the same + if specific_source_dependency and ( + not dep.is_same_source_as(specific_source_dependency) + or dep.source_name != specific_source_dependency.source_name + ): + raise IncompatibleConstraintsError( + package, dep, specific_source_dependency, with_sources=True + ) + specific_source_dependency = dep + constraint = constraint.intersect(dep.constraint) + if constraint.is_empty(): + # conflict in overlapping area + raise IncompatibleConstraintsError(package, *used_dependencies) + + if not any(uses): + # This is an edge case where the dependency is not required + # for the resulting marker. However, we have to consider it anyway + # in order to not miss other dependencies later, for instance: # • foo (1.0) ; python == 3.7 # • foo (2.0) ; python == 3.8 # • bar (2.0) ; python == 3.8 # • bar (3.0) ; python == 3.9 - # # the last dependency would be missed without this, # because the intersection with both foo dependencies is empty. - inverted_marker_dep = dependencies[0].with_constraint(EmptyConstraint()) - inverted_marker_dep.marker = inverted_marker - dependencies.append(inverted_marker_dep) - else: - dependencies = other_markers_dependencies - return dependencies + + # Set constraint to empty to mark dependency as "not required". + constraint = EmptyConstraint() + used_dependencies = dependencies + + # build new dependency with intersected constraint and marker + # (and correct source) + new_dep = ( + specific_source_dependency + if specific_source_dependency + else used_dependencies[0] + ).with_constraint(constraint) + new_dep.marker = used_marker_intersection + new_dependencies.append(new_dep) + + # In order to reduce the number of overrides we merge duplicate + # dependencies by constraint again. After overlapping markers were + # resolved, there might be new dependencies with the same constraint. + return self._merge_dependencies_by_constraint(new_dependencies) diff --git a/src/poetry/utils/shell.py b/src/poetry/utils/shell.py index d60998f0b0e..290a62c09c1 100644 --- a/src/poetry/utils/shell.py +++ b/src/poetry/utils/shell.py @@ -108,6 +108,8 @@ def activate(self, env: VirtualEnv) -> int | None: if self._name == "zsh": # Under ZSH the source command should be invoked in zsh's bash emulator c.sendline(f"emulate bash -c '. {shlex.quote(str(activate_path))}'") + elif self._name == "xonsh": + c.sendline(f"vox activate {shlex.quote(str(env.path))}") else: cmd = f"{self._get_source_command()} {shlex.quote(str(activate_path))}" if self._name in ["fish", "nu"]: diff --git a/tests/console/commands/test_show.py b/tests/console/commands/test_show.py index bd7dd37e9b4..162fa73a13d 100644 --- a/tests/console/commands/test_show.py +++ b/tests/console/commands/test_show.py @@ -2222,6 +2222,75 @@ def test_show_top_level_with_explicitly_defined_depenancy( assert tester.io.fetch_output() == expected +def test_show_top_level_with_extras( + tester: CommandTester, poetry: Poetry, installed: Repository +) -> None: + black_dep = Factory.create_dependency( + "black", {"version": "23.3.0", "extras": ["d"]} + ) + poetry.package.add_dependency(black_dep) + + black_package = get_package("black", "23.3.0") + black_package.add_dependency( + Factory.create_dependency( + "aiohttp", + { + "version": ">=3.7.4", + "optional": True, + "markers": 'extra == "d"', + }, + ) + ) + installed.add_package(black_package) + + assert isinstance(poetry.locker, TestLocker) + poetry.locker.mock_lock_data( + { + "package": [ + { + "name": "black", + "version": "23.3.0", + "description": "", + "category": "main", + "optional": False, + "platform": "*", + "python-versions": "*", + "checksum": [], + "dependencies": { + "aiohttp": { + "version": ">=3.7.4", + "optional": True, + "markers": 'extra == "d"', + } + }, + }, + { + "name": "aiohttp", + "version": "3.8.4", + "description": "", + "category": "main", + "optional": False, + "platform": "*", + "python-versions": "*", + "checksum": [], + }, + ], + "metadata": { + "python-versions": "*", + "platform": "*", + "content-hash": "123456789", + "files": {"black": [], "aiohttp": []}, + }, + } + ) + + tester.execute("--top-level") + + expected = """black 23.3.0 \n""" + + assert tester.io.fetch_output() == expected + + def test_show_error_top_level_with_tree(tester: CommandTester) -> None: expected = "Error: Cannot use --tree and --top-level at the same time.\n" tester.execute("--top-level --tree") diff --git a/tests/installation/fixtures/with-duplicate-dependencies.test b/tests/installation/fixtures/with-duplicate-dependencies.test index 2c3030a34e5..3c88e7c18f8 100644 --- a/tests/installation/fixtures/with-duplicate-dependencies.test +++ b/tests/installation/fixtures/with-duplicate-dependencies.test @@ -8,8 +8,8 @@ files = [] [package.dependencies] B = [ - {version = "^1.0", markers = "python_version < \"4.0\""}, - {version = "^2.0", markers = "python_version >= \"4.0\""}, + {version = ">=1.0,<2.0", markers = "python_version < \"4.0\""}, + {version = ">=2.0,<3.0", markers = "python_version >= \"4.0\""}, ] [[package]] diff --git a/tests/installation/fixtures/with-multiple-updates.test b/tests/installation/fixtures/with-multiple-updates.test index 76bc65a2a5b..4c6e401ad97 100644 --- a/tests/installation/fixtures/with-multiple-updates.test +++ b/tests/installation/fixtures/with-multiple-updates.test @@ -9,8 +9,8 @@ files = [] [package.dependencies] B = ">=1.0.1" C = [ - {version = "^1.0", markers = "python_version >= \"2.7\" and python_version < \"2.8\""}, - {version = "^2.0", markers = "python_version >= \"3.4\" and python_version < \"4.0\""}, + {version = ">=1.0,<2.0", markers = "python_version >= \"2.7\" and python_version < \"2.8\""}, + {version = ">=2.0,<3.0", markers = "python_version >= \"3.4\" and python_version < \"4.0\""}, ] [[package]] diff --git a/tests/mixology/version_solver/test_unsolvable.py b/tests/mixology/version_solver/test_unsolvable.py index 92cc5a98515..ce5f6456a28 100644 --- a/tests/mixology/version_solver/test_unsolvable.py +++ b/tests/mixology/version_solver/test_unsolvable.py @@ -2,7 +2,10 @@ from typing import TYPE_CHECKING +import pytest + from poetry.factory import Factory +from poetry.puzzle.provider import IncompatibleConstraintsError from tests.mixology.helpers import add_to_repo from tests.mixology.helpers import check_solver_result @@ -88,9 +91,14 @@ def test_disjoint_root_constraints( add_to_repo(repo, "foo", "2.0.0") error = """\ -Because myapp depends on both foo (1.0.0) and foo (2.0.0), version solving failed.""" +Incompatible constraints in requirements of myapp (0.0.0): +foo (==1.0.0) +foo (==2.0.0)""" - check_solver_result(root, provider, error=error) + with pytest.raises(IncompatibleConstraintsError) as e: + check_solver_result(root, provider, error=error) + + assert str(e.value) == error def test_disjoint_root_constraints_path_dependencies( @@ -106,12 +114,15 @@ def test_disjoint_root_constraints_path_dependencies( dependency2 = Factory.create_dependency("demo", {"path": project_dir / "demo_two"}) root.add_dependency(dependency2) - error = ( - f"Because myapp depends on both {str(dependency1).replace('*', '1.2.3')} " - f"and {str(dependency2).replace('*', '1.2.3')}, version solving failed." - ) + error = f"""\ +Incompatible constraints in requirements of myapp (0.0.0): +demo @ {project_dir.as_uri()}/demo_two (1.2.3) +demo @ {project_dir.as_uri()}/demo_one (1.2.3)""" - check_solver_result(root, provider, error=error) + with pytest.raises(IncompatibleConstraintsError) as e: + check_solver_result(root, provider, error=error) + + assert str(e.value) == error def test_no_valid_solution( diff --git a/tests/puzzle/test_provider.py b/tests/puzzle/test_provider.py index eec324a2b4a..38504c412cc 100644 --- a/tests/puzzle/test_provider.py +++ b/tests/puzzle/test_provider.py @@ -19,6 +19,7 @@ from poetry.factory import Factory from poetry.inspection.info import PackageInfo from poetry.packages import DependencyPackage +from poetry.puzzle.provider import IncompatibleConstraintsError from poetry.puzzle.provider import Provider from poetry.repositories.repository import Repository from poetry.repositories.repository_pool import RepositoryPool @@ -584,6 +585,36 @@ def test_search_for_file_wheel_with_extras( } +def test_complete_package_merges_same_source_and_no_source( + provider: Provider, root: ProjectPackage +) -> None: + foo_no_source_1 = get_dependency("foo", ">=1") + foo_source_1 = get_dependency("foo", "!=1.1.*") + foo_source_1.source_name = "source" + foo_source_2 = get_dependency("foo", "!=1.2.*") + foo_source_2.source_name = "source" + foo_no_source_2 = get_dependency("foo", "<2") + + root.add_dependency(foo_no_source_1) + root.add_dependency(foo_source_1) + root.add_dependency(foo_source_2) + root.add_dependency(foo_no_source_2) + + complete_package = provider.complete_package( + DependencyPackage(root.to_dependency(), root) + ) + + requires = complete_package.package.all_requires + assert len(requires) == 1 + assert requires[0].source_name == "source" + assert str(requires[0].constraint) in { + ">=1,<1.1 || >=1.3,<2", + ">=1,<1.1.dev0 || >=1.3.dev0,<2", + ">=1,<1.1.0 || >=1.3.0,<2", + ">=1,<1.1.0.dev0 || >=1.3.0.dev0,<2", + } + + def test_complete_package_does_not_merge_different_source_names( provider: Provider, root: ProjectPackage ) -> None: @@ -595,19 +626,39 @@ def test_complete_package_does_not_merge_different_source_names( root.add_dependency(foo_source_1) root.add_dependency(foo_source_2) + with pytest.raises(IncompatibleConstraintsError) as e: + provider.complete_package(DependencyPackage(root.to_dependency(), root)) + + expected = """\ +Incompatible constraints in requirements of root (1.2.3): +foo ; source=source_2 +foo ; source=source_1""" + + assert str(e.value) == expected + + +def test_complete_package_merges_same_source_type_and_no_source( + provider: Provider, root: ProjectPackage, fixture_dir: FixtureDirGetter +) -> None: + project_dir = fixture_dir("with_conditional_path_deps") + path = (project_dir / "demo_one").as_posix() + + root.add_dependency(Factory.create_dependency("demo", ">=1.0")) + root.add_dependency(Factory.create_dependency("demo", {"path": path})) + root.add_dependency(Factory.create_dependency("demo", {"path": path})) # duplicate + root.add_dependency(Factory.create_dependency("demo", "<2.0")) + complete_package = provider.complete_package( DependencyPackage(root.to_dependency(), root) ) requires = complete_package.package.all_requires - assert len(requires) == 2 - assert {requires[0].source_name, requires[1].source_name} == { - "source_1", - "source_2", - } + assert len(requires) == 1 + assert requires[0].source_url == path + assert str(requires[0].constraint) == "1.2.3" -def test_complete_package_preserves_source_type( +def test_complete_package_does_not_merge_different_source_types( provider: Provider, root: ProjectPackage, fixture_dir: FixtureDirGetter ) -> None: project_dir = fixture_dir("with_conditional_path_deps") @@ -615,19 +666,40 @@ def test_complete_package_preserves_source_type( path = (project_dir / folder).as_posix() root.add_dependency(Factory.create_dependency("demo", {"path": path})) - complete_package = provider.complete_package( - DependencyPackage(root.to_dependency(), root) - ) + with pytest.raises(IncompatibleConstraintsError) as e: + provider.complete_package(DependencyPackage(root.to_dependency(), root)) - requires = complete_package.package.all_requires - assert len(requires) == 2 - assert {requires[0].source_url, requires[1].source_url} == { - project_dir.joinpath("demo_one").as_posix(), - project_dir.joinpath("demo_two").as_posix(), - } + expected = f"""\ +Incompatible constraints in requirements of root (1.2.3): +demo @ {project_dir.as_uri()}/demo_two (1.2.3) +demo @ {project_dir.as_uri()}/demo_one (1.2.3)""" + + assert str(e.value) == expected + + +def test_complete_package_does_not_merge_different_source_type_and_name( + provider: Provider, root: ProjectPackage, fixture_dir: FixtureDirGetter +) -> None: + project_dir = fixture_dir("with_conditional_path_deps") + path = (project_dir / "demo_one").as_posix() + dep_with_source_name = Factory.create_dependency("demo", ">=1.0") + dep_with_source_name.source_name = "source" + root.add_dependency(dep_with_source_name) + root.add_dependency(Factory.create_dependency("demo", {"path": path})) -def test_complete_package_preserves_source_type_with_subdirectories( + with pytest.raises(IncompatibleConstraintsError) as e: + provider.complete_package(DependencyPackage(root.to_dependency(), root)) + + expected = f"""\ +Incompatible constraints in requirements of root (1.2.3): +demo @ {project_dir.as_uri()}/demo_one (1.2.3) +demo (>=1.0) ; source=source""" + + assert str(e.value) == expected + + +def test_complete_package_does_not_merge_different_subdirectories( provider: Provider, root: ProjectPackage ) -> None: dependency_one = Factory.create_dependency( @@ -644,34 +716,19 @@ def test_complete_package_preserves_source_type_with_subdirectories( "subdirectory": "one-copy", }, ) - dependency_two = Factory.create_dependency( - "two", - {"git": "https://github.com/demo/subdirectories.git", "subdirectory": "two"}, - ) - root.add_dependency( - Factory.create_dependency( - "one", - { - "git": "https://github.com/demo/subdirectories.git", - "subdirectory": "one", - }, - ) - ) + root.add_dependency(dependency_one) root.add_dependency(dependency_one_copy) - root.add_dependency(dependency_two) - complete_package = provider.complete_package( - DependencyPackage(root.to_dependency(), root) - ) + with pytest.raises(IncompatibleConstraintsError) as e: + provider.complete_package(DependencyPackage(root.to_dependency(), root)) - requires = complete_package.package.all_requires - assert len(requires) == 3 - assert {r.to_pep_508() for r in requires} == { - dependency_one.to_pep_508(), - dependency_one_copy.to_pep_508(), - dependency_two.to_pep_508(), - } + expected = """\ +Incompatible constraints in requirements of root (1.2.3): +one @ git+https://github.com/demo/subdirectories.git#subdirectory=one-copy (1.0.0) +one @ git+https://github.com/demo/subdirectories.git#subdirectory=one (1.0.0)""" + + assert str(e.value) == expected @pytest.mark.parametrize("source_name", [None, "repo"]) diff --git a/tests/puzzle/test_solver.py b/tests/puzzle/test_solver.py index eb179218e1f..125b29511fb 100644 --- a/tests/puzzle/test_solver.py +++ b/tests/puzzle/test_solver.py @@ -1406,13 +1406,13 @@ def test_solver_duplicate_dependencies_different_constraints_same_requirements( repo.add_package(package_b10) repo.add_package(package_b20) - with pytest.raises(SolverProblemError) as e: + with pytest.raises(IncompatibleConstraintsError) as e: solver.solve() expected = """\ -Because a (1.0) depends on both B (^1.0) and B (^2.0), a is forbidden. -So, because no versions of a match !=1.0 - and root depends on A (*), version solving failed.""" +Incompatible constraints in requirements of a (1.0): +B (>=1.0,<2.0) +B (>=2.0,<3.0)""" assert str(e.value) == expected @@ -1455,7 +1455,7 @@ def test_solver_duplicate_dependencies_different_constraints_merge_by_marker( @pytest.mark.parametrize("git_first", [False, True]) -def test_solver_duplicate_dependencies_different_sources_types_are_preserved( +def test_solver_duplicate_dependencies_different_sources_direct_origin_preserved( solver: Solver, repo: Repository, package: ProjectPackage, git_first: bool ) -> None: pendulum = get_package("pendulum", "2.0.3") @@ -1504,19 +1504,12 @@ def test_solver_duplicate_dependencies_different_sources_types_are_preserved( DependencyPackage(package.to_dependency(), package) ) - assert len(complete_package.package.all_requires) == 2 + assert len(complete_package.package.all_requires) == 1 + dep = complete_package.package.all_requires[0] - if git_first: - git, pypi = complete_package.package.all_requires - else: - pypi, git = complete_package.package.all_requires - - assert isinstance(pypi, Dependency) - assert pypi == dependency_pypi - - assert isinstance(git, VCSDependency) - assert git.constraint != dependency_git.constraint - assert (git.name, git.source_type, git.source_url, git.source_reference) == ( + assert isinstance(dep, VCSDependency) + assert dep.constraint == demo.version + assert (dep.name, dep.source_type, dep.source_url, dep.source_reference) == ( dependency_git.name, dependency_git.source_type, dependency_git.source_url, @@ -1581,8 +1574,8 @@ def test_solver_duplicate_dependencies_different_constraints_conflict( expectation = ( "Incompatible constraints in requirements of root (1.0):\n" - 'A (<1.1) ; python_version == "3.10"\n' - "A (>=1.1)" + "A (>=1.1)\n" + 'A (<1.1) ; python_version == "3.10"' ) with pytest.raises(IncompatibleConstraintsError, match=re.escape(expectation)): solver.solve() @@ -1880,6 +1873,179 @@ def test_solver_duplicate_dependencies_sub_dependencies( ) +def test_solver_duplicate_dependencies_with_overlapping_markers_simple( + solver: Solver, repo: Repository, package: ProjectPackage +) -> None: + package.add_dependency(get_dependency("b", "1.0")) + + package_b = get_package("b", "1.0") + dep_strings = [ + "a (>=1.0)", + "a (>=1.1) ; python_version >= '3.7'", + "a (<2.0) ; python_version < '3.8'", + "a (!=1.2) ; python_version == '3.7'", + ] + deps = [Dependency.create_from_pep_508(dep) for dep in dep_strings] + for dep in deps: + package_b.add_dependency(dep) + + package_a09 = get_package("a", "0.9") + package_a10 = get_package("a", "1.0") + package_a11 = get_package("a", "1.1") + package_a12 = get_package("a", "1.2") + package_a20 = get_package("a", "2.0") + + package_a11.python_versions = ">=3.7" + package_a12.python_versions = ">=3.7" + package_a20.python_versions = ">=3.7" + + repo.add_package(package_a09) + repo.add_package(package_a10) + repo.add_package(package_a11) + repo.add_package(package_a12) + repo.add_package(package_a20) + repo.add_package(package_b) + + transaction = solver.solve() + ops = check_solver_result( + transaction, + [ + {"job": "install", "package": package_a10}, + {"job": "install", "package": package_a11}, + {"job": "install", "package": package_a20}, + {"job": "install", "package": package_b}, + ], + ) + package_b_requires = {dep.to_pep_508() for dep in ops[-1].package.requires} + assert package_b_requires == { + 'a (>=1.0,<2.0) ; python_version < "3.7"', + 'a (>=1.1,!=1.2,<2.0) ; python_version == "3.7"', + 'a (>=1.1) ; python_version >= "3.8"', + } + + +def test_solver_duplicate_dependencies_with_overlapping_markers_complex( + solver: Solver, repo: Repository, package: ProjectPackage +) -> None: + """ + Dependencies with overlapping markers from + https://pypi.org/project/opencv-python/4.6.0.66/ + """ + package.add_dependency(get_dependency("opencv", "4.6.0.66")) + + opencv_package = get_package("opencv", "4.6.0.66") + dep_strings = [ + "numpy (>=1.13.3) ; python_version < '3.7'", + "numpy (>=1.21.2) ; python_version >= '3.10'", + ( + "numpy (>=1.21.2) ; python_version >= '3.6' " + "and platform_system == 'Darwin' and platform_machine == 'arm64'" + ), + ( + "numpy (>=1.19.3) ; python_version >= '3.6' " + "and platform_system == 'Linux' and platform_machine == 'aarch64'" + ), + "numpy (>=1.14.5) ; python_version >= '3.7'", + "numpy (>=1.17.3) ; python_version >= '3.8'", + "numpy (>=1.19.3) ; python_version >= '3.9'", + ] + deps = [Dependency.create_from_pep_508(dep) for dep in dep_strings] + for dep in deps: + opencv_package.add_dependency(dep) + + for version in {"1.13.3", "1.21.2", "1.19.3", "1.14.5", "1.17.3"}: + repo.add_package(get_package("numpy", version)) + repo.add_package(opencv_package) + + transaction = solver.solve() + ops = check_solver_result( + transaction, + [ + {"job": "install", "package": get_package("numpy", "1.21.2")}, + {"job": "install", "package": opencv_package}, + ], + ) + opencv_requires = {dep.to_pep_508() for dep in ops[-1].package.requires} + expectation = ( + { # concise solution, but too expensive + ( + "numpy (>=1.21.2) ;" + ' platform_system == "Darwin" and platform_machine == "arm64"' + ' and python_version >= "3.6" or python_version >= "3.10"' + ), + ( + 'numpy (>=1.19.3) ; python_version >= "3.9" and python_version < "3.10"' + ' and platform_system != "Darwin" or platform_system == "Linux"' + ' and platform_machine == "aarch64" and python_version < "3.10"' + ' and python_version >= "3.6" or python_version >= "3.9"' + ' and python_version < "3.10" and platform_machine != "arm64"' + ), + ( + 'numpy (>=1.17.3) ; python_version >= "3.8" and python_version < "3.9"' + ' and (platform_system != "Darwin" or platform_machine != "arm64")' + ' and (platform_system != "Linux" or platform_machine != "aarch64")' + ), + ( + 'numpy (>=1.14.5) ; python_version >= "3.7" and python_version < "3.8"' + ' and (platform_system != "Darwin" or platform_machine != "arm64")' + ' and (platform_system != "Linux" or platform_machine != "aarch64")' + ), + ( + 'numpy (>=1.13.3) ; python_version < "3.7"' + ' and (python_version < "3.6" or platform_system != "Darwin"' + ' or platform_machine != "arm64") and (python_version < "3.6"' + ' or platform_system != "Linux" or platform_machine != "aarch64")' + ), + }, + { # current solution + ( + "numpy (>=1.21.2) ;" + ' python_version >= "3.6" and platform_system == "Darwin"' + ' and platform_machine == "arm64" or python_version >= "3.10"' + ), + ( + 'numpy (>=1.19.3) ; python_version >= "3.9" and python_version < "3.10"' + ' and platform_system != "Darwin" or python_version >= "3.9"' + ' and python_version < "3.10" and platform_machine != "arm64"' + ' or platform_system == "Linux" and python_version < "3.10"' + ' and platform_machine == "aarch64" and python_version >= "3.6"' + ), + ( + 'numpy (>=1.17.3) ; python_version < "3.9"' + ' and (platform_system != "Darwin" and platform_system != "Linux")' + ' and python_version >= "3.8" or python_version < "3.9"' + ' and platform_system != "Darwin" and python_version >= "3.8"' + ' and platform_machine != "aarch64" or python_version < "3.9"' + ' and platform_machine != "arm64" and python_version >= "3.8"' + ' and platform_system != "Linux" or python_version < "3.9"' + ' and (platform_machine != "arm64" and platform_machine != "aarch64")' + ' and python_version >= "3.8"' + ), + ( + 'numpy (>=1.14.5) ; python_version < "3.8"' + ' and (platform_system != "Darwin" and platform_system != "Linux")' + ' and python_version >= "3.7" or python_version < "3.8"' + ' and platform_system != "Darwin" and python_version >= "3.7"' + ' and platform_machine != "aarch64" or python_version < "3.8"' + ' and platform_machine != "arm64" and python_version >= "3.7"' + ' and platform_system != "Linux" or python_version < "3.8"' + ' and (platform_machine != "arm64" and platform_machine != "aarch64")' + ' and python_version >= "3.7"' + ), + ( + 'numpy (>=1.13.3) ; python_version < "3.6" or python_version < "3.7"' + ' and (platform_system != "Darwin" and platform_system != "Linux")' + ' or python_version < "3.7" and platform_system != "Darwin"' + ' and platform_machine != "aarch64" or python_version < "3.7"' + ' and platform_machine != "arm64" and platform_system != "Linux"' + ' or python_version < "3.7" and (platform_machine != "arm64"' + ' and platform_machine != "aarch64")' + ), + }, + ) + assert opencv_requires in expectation + + def test_duplicate_path_dependencies( solver: Solver, package: ProjectPackage, fixture_dir: FixtureDirGetter ) -> None: @@ -2067,7 +2233,7 @@ def test_solver_can_resolve_git_dependencies_with_ref( "0.1.2", source_type="git", source_url="https://github.com/demo/demo.git", - source_reference=ref[list(ref.keys())[0]], + source_reference=ref[next(iter(ref.keys()))], source_resolved_reference=MOCK_DEFAULT_GIT_REVISION, ) @@ -2087,7 +2253,7 @@ def test_solver_can_resolve_git_dependencies_with_ref( op = ops[1] assert op.package.source_type == "git" - assert op.package.source_reference == ref[list(ref.keys())[0]] + assert op.package.source_reference == ref[next(iter(ref.keys()))] assert op.package.source_resolved_reference is not None assert op.package.source_resolved_reference.startswith("9cf87a2") diff --git a/tests/repositories/conftest.py b/tests/repositories/conftest.py index db676c6d90c..2661b46fc20 100644 --- a/tests/repositories/conftest.py +++ b/tests/repositories/conftest.py @@ -13,7 +13,7 @@ def html_page_content() -> HTMLPageGetter: def _fixture(content: str, base_url: str | None = None) -> str: base = f' @@ -26,6 +26,6 @@ def _fixture(content: str, base_url: str | None = None) -> str: {content} - """.format(content=content, base=base) + """ return _fixture diff --git a/tests/repositories/link_sources/test_html.py b/tests/repositories/link_sources/test_html.py index 2be9f6b3a13..e39edcae034 100644 --- a/tests/repositories/link_sources/test_html.py +++ b/tests/repositories/link_sources/test_html.py @@ -53,7 +53,7 @@ def test_link_attributes( page = HTMLPage("https://example.org", content) assert len(list(page.links)) == 1 - link = list(page.links)[0] + link = next(iter(page.links)) assert link.url == expected_link.url assert link.requires_python == expected_link.requires_python assert link.yanked == expected_link.yanked @@ -111,5 +111,5 @@ def test_base_url( ) -> None: content = html_page_content(anchor, base_url) page = HTMLPage("https://example.org", content) - link = list(page.links)[0] + link = next(iter(page.links)) assert link.url == expected