diff --git a/.lock b/.lock new file mode 100644 index 00000000..e69de29b diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/crates.js b/crates.js new file mode 100644 index 00000000..7571f0f2 --- /dev/null +++ b/crates.js @@ -0,0 +1 @@ +window.ALL_CRATES = ["pubgrub"]; \ No newline at end of file diff --git a/help.html b/help.html new file mode 100644 index 00000000..584eb36f --- /dev/null +++ b/help.html @@ -0,0 +1,2 @@ +
pub enum PubGrubError<P: Package, VS: VersionSet, E: Error> {
+ NoSolution(DerivationTree<P, VS>),
+ ErrorRetrievingDependencies {
+ package: P,
+ version: VS::V,
+ source: E,
+ },
+ SelfDependency {
+ package: P,
+ version: VS::V,
+ },
+ ErrorChoosingPackageVersion(E),
+ ErrorInShouldCancel(E),
+ Failure(String),
+}
Errors that may occur while solving dependencies.
+There is no solution for this set of dependencies.
+package: P
Package whose dependencies we want.
+source: E
Error raised by the implementer of +DependencyProvider.
+Error arising when the implementer of +DependencyProvider +returned an error in the method +get_dependencies.
+package: P
Package whose dependencies we want.
+Error arising when the implementer of +DependencyProvider +returned a dependency on the requested package. +This technically means that the package directly depends on itself, +and is clearly some kind of mistake.
+Error arising when the implementer of +DependencyProvider +returned an error in the method +choose_version.
+Error arising when the implementer of DependencyProvider +returned an error in the method should_cancel.
+Something unexpected happened.
+Handling pubgrub errors.
+PubGrub version solving algorithm.
+Version solving consists in efficiently finding a set of packages and versions +that satisfy all the constraints of a given project dependencies. +In addition, when that is not possible, +we should try to provide a very human-readable and clear +explanation as to why that failed.
+All the code in this crate is manipulating packages and versions, and for this to work +we defined a Package and Version traits +that are used as bounds on most of the exposed types and functions.
+Package identifiers needs to implement our Package trait, +which is automatic if the type already implements +Clone + Eq + Hash + Debug + Display. +So things like String will work out of the box.
+Our Version trait requires +Clone + Ord + Debug + Display +and also the definition of two methods, +lowest() -> Self which returns the lowest version existing, +and bump(&self) -> Self which returns the next smallest version +strictly higher than the current one. +For convenience, this library already provides +two implementations of Version. +The first one is NumberVersion, basically a newtype for u32. +The second one is SemanticVersion +that implements semantic versioning rules.
+Let’s imagine that we are building a user interface
+with a menu containing dropdowns with some icons,
+icons that we are also directly using in other parts of the interface.
+For this scenario our direct dependencies are menu
and icons
,
+but the complete set of dependencies looks like follows:
root
depends on menu
and icons
menu
depends on dropdown
dropdown
depends on icons
icons
has no dependencyWe can model that scenario with this library as follows
+ +
+type NumVS = Range<NumberVersion>;
+
+let mut dependency_provider = OfflineDependencyProvider::<&str, NumVS>::new();
+
+dependency_provider.add_dependencies(
+ "root", 1, [("menu", Range::full()), ("icons", Range::full())],
+);
+dependency_provider.add_dependencies("menu", 1, [("dropdown", Range::full())]);
+dependency_provider.add_dependencies("dropdown", 1, [("icons", Range::full())]);
+dependency_provider.add_dependencies("icons", 1, []);
+
+// Run the algorithm.
+let solution = resolve(&dependency_provider, "root", 1).unwrap();
In our previous example we used the +OfflineDependencyProvider, +which is a basic implementation of the DependencyProvider trait.
+But we might want to implement the DependencyProvider +trait for our own type. +Let’s say that we will use String for packages, +and SemanticVersion for versions. +This may be done quite easily by implementing the three following functions.
+ +type SemVS = Range<SemanticVersion>;
+
+impl DependencyProvider<String, SemVS> for MyDependencyProvider {
+ fn choose_version(&self, package: &String, range: &SemVS) -> Result<Option<SemanticVersion>, Infallible> {
+ unimplemented!()
+ }
+
+ type Priority = usize;
+ fn prioritize(&self, package: &String, range: &SemVS) -> Self::Priority {
+ unimplemented!()
+ }
+
+ fn get_dependencies(
+ &self,
+ package: &String,
+ version: &SemanticVersion,
+ ) -> Result<Dependencies<String, SemVS>, Infallible> {
+ unimplemented!()
+ }
+
+ type Err = Infallible;
+}
The first method +choose_version +chooses a version compatible with the provided range for a package. +The second method +prioritize +in which order different packages should be chosen. +Usually prioritizing packages +with the fewest number of compatible versions speeds up resolution. +But in general you are free to employ whatever strategy suits you best +to pick a package and a version.
+The third method get_dependencies +aims at retrieving the dependencies of a given package at a given version. +Returns None if dependencies are unknown.
+In a real scenario, these two methods may involve reading the file system
+or doing network request, so you may want to hold a cache in your
+DependencyProvider implementation.
+How exactly this could be achieved is shown in CachingDependencyProvider
+(see examples/caching_dependency_provider.rs
).
+You could also use the OfflineDependencyProvider
+type defined by the crate as guidance,
+but you are free to use whatever approach makes sense in your situation.
When everything goes well, the algorithm finds and returns the complete +set of direct and indirect dependencies satisfying all the constraints. +The packages and versions selected are returned as +SelectedDepedencies<P, V>. +But sometimes there is no solution because dependencies are incompatible. +In such cases, resolve(…) returns a +PubGrubError::NoSolution(derivation_tree), +where the provided derivation tree is a custom binary tree +containing the full chain of reasons why there is no solution.
+All the items in the tree are called incompatibilities +and may be of two types, either “external” or “derived”. +Leaves of the tree are external incompatibilities, +and nodes are derived. +External incompatibilities have reasons that are independent +of the way this algorithm is implemented such as
+Derived incompatibilities are obtained during the algorithm execution by deduction, +such as if “a” depends on “b” and “b” depends on “c”, “a” depends on “c”.
+This crate defines a Reporter trait, with an associated +Output type and a single method.
+ +pub trait Reporter<P: Package, VS: VersionSet> {
+ type Output;
+
+ fn report(derivation_tree: &DerivationTree<P, VS>) -> Self::Output;
+}
Implementing a Reporter may involve a lot of heuristics +to make the output human-readable and natural. +For convenience, we provide a default implementation +DefaultStringReporter +that outputs the report as a String. +You may use it as follows:
+ +match resolve(&dependency_provider, root_package, root_version) {
+ Ok(solution) => println!("{:?}", solution),
+ Err(PubGrubError::NoSolution(mut derivation_tree)) => {
+ derivation_tree.collapse_no_versions();
+ eprintln!("{}", DefaultStringReporter::report(&derivation_tree));
+ }
+ Err(err) => panic!("{:?}", err),
+};
Notice that we also used +collapse_no_versions() above. +This method simplifies the derivation tree to get rid of the +NoVersions +external incompatibilities in the derivation tree. +So instead of seeing things like this in the report:
+Because there is no version of foo in 1.0.1 <= v < 2.0.0
+and foo 1.0.0 depends on bar 2.0.0 <= v < 3.0.0,
+foo 1.0.0 <= v < 2.0.0 depends on bar 2.0.0 <= v < 3.0.0.
+
you may have directly:
+foo 1.0.0 <= v < 2.0.0 depends on bar 2.0.0 <= v < 3.0.0.
+
Beware though that if you are using some kind of offline mode +with a cache, you may want to know that some versions +do not exist in your cache.
+Ranges are constraints defining sets of versions.
+Concretely, those constraints correspond to any set of versions +representable as the concatenation, union, and complement +of the ranges building blocks.
+Those building blocks are:
+v <= versions
v < versions
versions <= v
versions < v
v1 <= versions < v2
Ranges can be created from any type that implements Ord
+ Clone
.
In order to advance the solver front, comparisons of versions sets are necessary in the algorithm. +To do those comparisons between two sets S1 and S2 we use the mathematical property that S1 ⊂ S2 if and only if S1 ∩ S2 == S1. +We can thus compute an intersection and evaluate an equality to answer if S1 is a subset of S2. +But this means that the implementation of equality must be correct semantically. +In practice, if equality is derived automatically, this means sets must have unique representations.
+By migrating from a custom representation for discrete sets in v0.2 +to a generic bounded representation for continuous sets in v0.3 +we are potentially breaking that assumption in two ways:
+Unbounded
values can be replaced by their equivalent if it exists.An example for each can be given when T
is u32
.
+First, we can have both segments S1 = (Unbounded, Included(42u32))
and S2 = (Included(0), Included(42u32))
+that represent the same segment but are structurally different.
+Thus, a derived equality check would answer false
to S1 == S2
while it’s true.
Second both segments S1 = (Included(1), Included(5))
and S2 = (Included(1), Included(3)) + (Included(4), Included(5))
are equal.
+But without asking the user to provide a bump
function for discrete sets,
+the algorithm is not able tell that the space between the right Included(3)
bound and the left Included(4)
bound is empty.
+Thus the algorithm is not able to reduce S2 to its canonical S1 form while computing sets operations like intersections in the generic code.
This is likely to lead to user facing theoretically correct but practically nonsensical ranges, +like (Unbounded, Excluded(0)) or (Excluded(6), Excluded(7)). +In general nonsensical inputs often lead to hard to track bugs. +But as far as we can tell this should work in practice. +So for now this crate only provides an implementation for continuous ranges. +With the v0.3 api the user could choose to bring back the discrete implementation from v0.2, as documented in the guide. +If doing so regularly fixes bugs seen by users, we will bring it back into the core library. +If we do not see practical bugs, or we get a formal proof that the code cannot lead to error states, then we may remove this warning.
+pub struct Range<V> { /* private fields */ }
A Range represents multiple intervals of a continuous range of monotone increasing +values.
+Set of all versions higher or equal to some version
+Set of all versions higher to some version
+Set of all versions lower to some version
+Set of all versions lower or equal to some version
+Returns the complement of this Range.
+Convert to something that can be used with +BTreeMap::range. +All versions contained in self, will be in the output, +but there may be versions in the output that are not contained in self. +Returns None if the range is empty.
+Returns true if the this Range contains the specified value.
+Returns true if the this Range contains the specified values.
+The versions
iterator must be sorted.
+Functionally equivalent to versions.map(|v| self.contains(v))
.
+Except it runs in O(size_of_range + len_of_versions)
not O(size_of_range * len_of_versions)
Construct a simple range from anything that impls RangeBounds like v1..v2
.
Computes the intersection of two sets of versions.
+Returns a simpler Range that contains the same versions
+For every one of the Versions provided in versions the existing range and +the simplified range will agree on whether it is contained. +The simplified version may include or exclude versions that are not in versions as the implementation wishes. +For example:
+full
.empty
.If versions are not sorted the correctness of this function is not guaranteed.
+key
and return true
if they are equal.key
and return true
if they are equal.pub enum DerivationTree<P: Package, VS: VersionSet> {
+ External(External<P, VS>),
+ Derived(Derived<P, VS>),
+}
Derivation tree resulting in the impossibility +to solve the dependencies of our root package.
+External incompatibility.
+Incompatibility derived from two others.
+Merge the NoVersions external incompatibilities +with the other one they are matched with +in a derived incompatibility. +This cleans up quite nicely the generated report. +You might want to do this if you know that the +DependencyProvider +was not run in some kind of offline mode that may not +have access to all versions existing.
+source
. Read morepub enum External<P: Package, VS: VersionSet> {
+ NotRoot(P, VS::V),
+ NoVersions(P, VS),
+ UnavailableDependencies(P, VS),
+ FromDependencyOf(P, VS, P, VS),
+}
Incompatibilities that are not derived from others, +they have their own reason.
+Initial incompatibility aiming at picking the root package for the first decision.
+There are no versions in the given set for this package.
+Dependencies of the package are unavailable for versions in that set.
+Incompatibility coming from the dependencies of a given package.
+Build a report as clear as possible as to why +dependency solving failed.
+pub struct DefaultStringReportFormatter;
Default formatter for the default reporter.
+pub struct DefaultStringReporter { /* private fields */ }
Default reporter able to generate an explanation as a String.
+pub struct Derived<P: Package, VS: VersionSet> {
+ pub terms: Map<P, Term<VS>>,
+ pub shared_id: Option<usize>,
+ pub cause1: Arc<DerivationTree<P, VS>>,
+ pub cause2: Arc<DerivationTree<P, VS>>,
+}
Incompatibility derived from two others.
+terms: Map<P, Term<VS>>
Terms of the incompatibility.
+Indicate if that incompatibility is present multiple times +in the derivation tree. +If that is the case, it has a unique id, provided in that option. +Then, we may want to only explain it once, +and refer to the explanation for the other times.
+cause1: Arc<DerivationTree<P, VS>>
First cause.
+cause2: Arc<DerivationTree<P, VS>>
Second cause.
+pub trait ReportFormatter<P: Package, VS: VersionSet> {
+ type Output;
+
+ // Required methods
+ fn format_external(&self, external: &External<P, VS>) -> Self::Output;
+ fn format_terms(&self, terms: &Map<P, Term<VS>>) -> Self::Output;
+}
Trait for formatting outputs in the reporter.
+Format an External incompatibility.
+Format terms of an incompatibility.
+pub trait Reporter<P: Package, VS: VersionSet> {
+ type Output;
+
+ // Required methods
+ fn report(derivation_tree: &DerivationTree<P, VS>) -> Self::Output;
+ fn report_with_formatter(
+ derivation_tree: &DerivationTree<P, VS>,
+ formatter: &impl ReportFormatter<P, VS, Output = Self::Output>
+ ) -> Self::Output;
+}
Reporter trait.
+Generate a report from the derivation tree +describing the resolution failure using the default formatter.
+Generate a report from the derivation tree +describing the resolution failure using a custom formatter.
+pub enum Dependencies<P: Package, VS: VersionSet> {
+ Unknown,
+ Known(DependencyConstraints<P, VS>),
+}
An enum used by DependencyProvider that holds information about package dependencies. +For each Package there is a set of versions allowed as a dependency.
+Package dependencies are unavailable.
+Container for all available package versions.
+source
. Read morepub fn resolve<P: Package, VS: VersionSet, DP: DependencyProvider<P, VS>>(
+ dependency_provider: &DP,
+ package: P,
+ version: impl Into<VS::V>
+) -> Result<SelectedDependencies<P, VS::V>, PubGrubError<P, VS, DP::Err>>
Main function of the library. +Finds a set of packages satisfying dependency bounds for a given package + version pair.
+PubGrub version solving algorithm.
+It consists in efficiently finding a set of packages and versions +that satisfy all the constraints of a given project dependencies. +In addition, when that is not possible, +PubGrub tries to provide a very human-readable and clear +explanation as to why that failed. +Below is an example of explanation present in +the introductory blog post about PubGrub
+Because dropdown >=2.0.0 depends on icons >=2.0.0 and
+ root depends on icons <2.0.0, dropdown >=2.0.0 is forbidden.
+
+And because menu >=1.1.0 depends on dropdown >=2.0.0,
+ menu >=1.1.0 is forbidden.
+
+And because menu <1.1.0 depends on dropdown >=1.0.0 <2.0.0
+ which depends on intl <4.0.0, every version of menu
+ requires intl <4.0.0.
+
+So, because root depends on both menu >=1.0.0 and intl >=5.0.0,
+ version solving failed.
+
The algorithm is generic and works for any type of dependency system +as long as packages (P) and versions (V) implement +the Package and Version traits. +Package is strictly equivalent and automatically generated +for any type that implement Clone + Eq + Hash + Debug + Display. +Version simply states that versions are ordered, +that there should be +a minimal lowest version (like 0.0.0 in semantic versions), +and that for any version, it is possible to compute +what the next version closest to this one is (bump). +For semantic versions, bump corresponds to +an increment of the patch number.
+let solution = resolve(&dependency_provider, package, version)?;
Where dependency_provider
supplies the list of available packages and versions,
+as well as the dependencies of every available package
+by implementing the DependencyProvider trait.
+The call to resolve for a given package at a given version
+will compute the set of packages and versions needed
+to satisfy the dependencies of that package and version pair.
+If there is no solution, the reason will be provided as clear as possible.
pub struct OfflineDependencyProvider<P: Package, VS: VersionSet> { /* private fields */ }
A basic implementation of DependencyProvider.
+Registers the dependencies of a package and version pair. +Dependencies must be added with a single call to +add_dependencies. +All subsequent calls to +add_dependencies for a given +package version pair will replace the dependencies by the new ones.
+The API does not allow to add dependencies one at a time to uphold an assumption that +OfflineDependencyProvider.get_dependencies(p, v) +provides all dependencies of a given package (p) and version (v) pair.
+source
. Read moreAn implementation of DependencyProvider that +contains all dependency information available in memory. +Currently packages are picked with the fewest versions contained in the constraints first. +But, that may change in new versions if better heuristics are found. +Versions are picked with the newest versions first.
+Priority
package from all potential valid
+packages, it needs to know what vertion of that package to use. The most common pattern
+is to select the largest vertion that the range contains.prioritize
. The resolver does not care what type this is
+as long as it can pick a largest one and clone it. Read morepub trait DependencyProvider<P: Package, VS: VersionSet> {
+ type Priority: Ord + Clone;
+ type Err: Error;
+
+ // Required methods
+ fn prioritize(&self, package: &P, range: &VS) -> Self::Priority;
+ fn choose_version(
+ &self,
+ package: &P,
+ range: &VS
+ ) -> Result<Option<VS::V>, Self::Err>;
+ fn get_dependencies(
+ &self,
+ package: &P,
+ version: &VS::V
+ ) -> Result<Dependencies<P, VS>, Self::Err>;
+
+ // Provided method
+ fn should_cancel(&self) -> Result<(), Self::Err> { ... }
+}
Trait that allows the algorithm to retrieve available packages and their dependencies. +An implementor needs to be supplied to the resolve function.
+The type returned from prioritize
. The resolver does not care what type this is
+as long as it can pick a largest one and clone it.
std::cmp::Reverse can be useful if you want to pick the package with +the fewest versions that match the outstanding constraint.
+Decision making +is the process of choosing the next package +and version that will be appended to the partial solution.
+Every time such a decision must be made, the resolver looks at all the potential valid
+packages that have changed, and a asks the dependency provider how important each one is.
+For each one it calls prioritize
with the name of the package and the current set of
+acceptable versions.
+The resolver will then pick the package with the highes priority from all the potential valid
+packages.
The strategy employed to prioritize packages +cannot change the existence of a solution or not, +but can drastically change the performances of the solver, +or the properties of the solution. +The documentation of Pub (PubGrub implementation for the dart programming language) +states the following:
+++Pub chooses the latest matching version of the package +with the fewest versions that match the outstanding constraint. +This tends to find conflicts earlier if any exist, +since these packages will run out of versions to try more quickly. +But there’s likely room for improvement in these heuristics.
+
Note: the resolver may call this even when the range has not change, +if it is more efficient for the resolveres internal data structures.
+Once the resolver has found the highest Priority
package from all potential valid
+packages, it needs to know what vertion of that package to use. The most common pattern
+is to select the largest vertion that the range contains.
Retrieves the package dependencies. +Return Dependencies::Unknown if its dependencies are unknown.
+This is called fairly regularly during the resolution, +if it returns an Err then resolution will be terminated. +This is helpful if you want to add some form of early termination like a timeout, +or you want to add some form of user feedback if things are taking a while. +If not provided the resolver will run as long as needed.
+An implementation of DependencyProvider that +contains all dependency information available in memory. +Currently packages are picked with the fewest versions contained in the constraints first. +But, that may change in new versions if better heuristics are found. +Versions are picked with the newest versions first.
+pub enum Term<VS: VersionSet> {
+ Positive(VS),
+ Negative(VS),
+}
A positive or negative expression regarding a set of versions.
+For example, “1.0.0 <= v < 2.0.0” is a positive expression +that is evaluated true if a version is selected +and comprised between version 1.0.0 and version 2.0.0.
+The term “not v < 3.0.0” is a negative expression +that is evaluated true if a version is selected >= 3.0.0 +or if no version is selected at all.
+key
and return true
if they are equal.key
and return true
if they are equal.Publicly exported type aliases.
+pub type DependencyConstraints<P, VS> = Map<P, VS>;
Holds information about all possible versions a given package can accept. +There is a difference in semantics between an empty map +inside DependencyConstraints and Dependencies::Unknown: +the former means the package has no dependency and it is a known fact, +while the latter means they could not be fetched by the DependencyProvider.
+struct DependencyConstraints<P, VS> { /* private fields */ }
pub type Map<K, V> = FxHashMap<K, V>;
Map implementation used by the library.
+struct Map<K, V> { /* private fields */ }
pub type SelectedDependencies<P, V> = Map<P, V>;
Concrete dependencies picked by the library during resolve +from DependencyConstraints.
+struct SelectedDependencies<P, V> { /* private fields */ }
pub type Set<V> = FxHashSet<V>;
Set implementation used by the library.
+struct Set<V> { /* private fields */ }
pub enum VersionParseError {
+ NotThreeParts {
+ full_version: String,
+ },
+ ParseIntError {
+ full_version: String,
+ version_part: String,
+ parse_error: String,
+ },
+}
Error creating SemanticVersion from String.
+full_version: String
SemanticVersion that was being parsed.
+SemanticVersion must contain major, minor, patch versions.
+full_version: String
SemanticVersion that was being parsed.
+Wrapper around ParseIntError.
+self
and other
values to be equal, and is used
+by ==
.key
and return true
if they are equal.key
and return true
if they are equal.Traits and implementations to create and compare versions.
+pub struct NumberVersion(pub u32);
Simplest versions possible, just a positive number.
+0: u32
source
. Read moreself
and other
values to be equal, and is used
+by ==
.self
and other
) and is used by the <=
+operator. Read morekey
and return true
if they are equal.key
and return true
if they are equal.pub struct SemanticVersion { /* private fields */ }
Type for semantic versions: major.minor.patch.
+Bump the patch number of a version.
+Bump the minor number of a version.
+Bump the major number of a version.
+source
. Read moreself
and other
values to be equal, and is used
+by ==
.self
and other
) and is used by the <=
+operator. Read morekey
and return true
if they are equal.key
and return true
if they are equal.pub trait Version: Clone + Ord + Debug + Display {
+ // Required methods
+ fn lowest() -> Self;
+ fn bump(&self) -> Self;
+}
Versions have a minimal version (a “0” version) +and are ordered such that every version has a next one.
+As its name suggests, the VersionSet trait describes sets of versions.
+One needs to define
+Two functions are automatically derived, thanks to the mathematical properties of sets. +You can overwrite those implementations, but we highly recommend that you don’t, +except if you are confident in a correct implementation that brings much performance gains.
+It is also extremely important that the Eq
trait is correctly implemented.
+In particular, you can only use #[derive(Eq, PartialEq)]
if Eq
is strictly equivalent to the
+structural equality, i.e. if version sets have canonical representations.
+Such problems may arise if your implementations of complement()
and intersection()
do not
+return canonical representations so be careful there.
pub trait VersionSet: Debug + Display + Clone + Eq {
+ type V: Debug + Display + Clone + Ord;
+
+ // Required methods
+ fn empty() -> Self;
+ fn singleton(v: Self::V) -> Self;
+ fn complement(&self) -> Self;
+ fn intersection(&self, other: &Self) -> Self;
+ fn contains(&self, v: &Self::V) -> bool;
+
+ // Provided methods
+ fn full() -> Self { ... }
+ fn union(&self, other: &Self) -> Self { ... }
+}
Trait describing sets of versions.
+Compute the complement of this set.
+Compute the intersection with another set.
+U::from(self)
.","","","","","","Package whose dependencies we want.","Package whose dependencies we want.","Error raised by the implementer of DependencyProvider.","Version of the package for which we want the dependencies.","Version of the package for which we want the dependencies.","Trait for identifying packages. Automatically implemented …","A Range represents multiple intervals of a continuous …","Set of versions greater or equal to v1
but less than v2
.","","","Convert to something that can be used with BTreeMap::range.","","","","Returns the complement of this Range.","Returns true if the this Range contains the specified …","","Returns true if the this Range contains the specified …","Empty set of versions.","","","","","","","","","Returns the argument unchanged.","Construct a simple range from anything that impls …","","Set of all possible versions","","Set of all versions higher or equal to some version","","Computes the intersection of two sets of versions.","Calls U::from(self)
.","Set of all versions lower or equal to some version","Returns a simpler Range that contains the same versions","Set containing exactly one version","","Set of all versions higher to some version","Set of all versions lower to some version","","","","","","Computes the union of this Range
and another.","","Default formatter for the default reporter.","Default reporter able to generate an explanation as a …","Derivation tree resulting in the impossibility to solve …","Incompatibility derived from two others.","Incompatibility derived from two others.","Incompatibilities that are not derived from others, they …","External incompatibility.","Incompatibility coming from the dependencies of a given …","There are no versions in the given set for this package.","Initial incompatibility aiming at picking the root package …","Output type of the report.","Output type of the report.","Trait for formatting outputs in the reporter.","Reporter trait.","Dependencies of the package are unavailable for versions …","","","","","","","","","","","First cause.","Second cause.","","","","","","","Merge the NoVersions external incompatibilities with the …","","","","","","","Format an External incompatibility.","","Format terms of an incompatibility.","","Returns the argument unchanged.","Returns the argument unchanged.","Returns the argument unchanged.","Returns the argument unchanged.","Returns the argument unchanged.","Calls U::from(self)
.","Calls U::from(self)
.","Calls U::from(self)
.","Calls U::from(self)
.","Calls U::from(self)
.","Generate a report from the derivation tree describing the …","","Generate a report from the derivation tree describing the …","","Indicate if that incompatibility is present multiple times …","Terms of the incompatibility.","","","","","","","","","","","","","","","","","","","","An enum used by DependencyProvider that holds information …","Trait that allows the algorithm to retrieve available …","The kind of error returned from these methods.","Container for all available package versions.","A basic implementation of DependencyProvider.","The type returned from prioritize
. The resolver does not …","Package dependencies are unavailable.","Registers the dependencies of a package and version pair. …","","","","","Once the resolver has found the highest Priority
package …","","","","","","","","Returns the argument unchanged.","Returns the argument unchanged.","Retrieves the package dependencies. Return …","","Calls U::from(self)
.","Calls U::from(self)
.","Creates an empty OfflineDependencyProvider with no …","Lists packages that have been saved.","Decision making is the process of choosing the next package","","Main function of the library. Finds a set of packages …","This is called fairly regularly during the resolution, if …","","","","","","","","","Lists versions of saved packages in sorted order. Returns …","The term “not v < 3.0.0” is a negative expression that …","For example, “1.0.0 <= v < 2.0.0” is a positive …","A positive or negative expression regarding a set of …","","","","","","","","","","","","","Returns the argument unchanged.","Calls U::from(self)
.","","","","","","Holds information about all possible versions a given …","Map implementation used by the library.","Concrete dependencies picked by the library during resolve …","Set implementation used by the library.","SemanticVersion must contain major, minor, patch versions.","Simplest versions possible, just a positive number.","Wrapper around ParseIntError.","Type for semantic versions: major.minor.patch.","Versions have a minimal version (a “0” version) and …","Error creating SemanticVersion from String.","","","","","","","Returns the next version, the smallest strictly higher …","","","Bump the major number of a version.","Bump the minor number of a version.","Bump the patch number of a version.","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","Returns the argument unchanged.","","Returns the argument unchanged.","","Returns the argument unchanged.","","","","","","Calls U::from(self)
.","Calls U::from(self)
.","Calls U::from(self)
.","Returns the lowest version.","","","Create a version with “major”, “minor” and “patch…","Version 1.0.0.","","","","","","","","","","","","","","Version 2.0.0.","","","","Version 0.0.0.","SemanticVersion that was being parsed.","SemanticVersion that was being parsed.","A specific error resulted from parsing a part of the …","A version part where parsing failed.","Version type associated with the sets manipulated.","Trait describing sets of versions.","Compute the complement of this set.","Evaluate membership of a version in this set.","Constructor for an empty set containing no version.","Constructor for the set containing all versions. …","Compute the intersection with another set.","Constructor for a set containing exactly one version.","Compute the union with another set. Thanks to set …"],"i":[0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,53,54,53,53,54,0,0,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,0,0,0,0,28,0,28,29,29,29,55,33,0,0,29,56,28,29,30,31,56,28,29,30,31,30,30,28,29,30,28,29,30,28,31,28,29,29,30,31,33,31,33,31,56,28,29,30,31,56,28,29,30,31,55,56,55,56,30,30,28,29,30,29,56,28,29,30,31,56,28,29,30,31,56,28,29,30,31,0,0,40,42,0,40,42,36,42,36,42,36,40,36,42,36,42,36,36,36,42,36,40,36,42,36,36,36,40,36,0,40,42,36,42,36,42,36,42,36,36,34,34,0,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,0,0,0,0,49,0,49,0,0,0,46,49,47,46,49,47,45,46,47,46,46,46,46,47,46,47,46,47,46,47,46,49,47,46,46,46,46,49,49,49,49,47,47,47,47,46,46,49,49,47,47,46,46,46,46,49,47,47,47,47,46,46,47,46,49,47,45,46,47,46,46,46,47,46,47,46,49,47,46,49,47,46,49,47,46,46,49,47,46,57,58,58,58,6,0,6,6,6,6,6,6,6],"f":"````````````````{ce{}{}}0{{{b{ceg}}d}f{hj}ln}{{{b{ceg}}d}f{A`j}{A`l}{A`n}}{cc{}}3{{{b{ceg}}}{{Ab{n}}}jln}{cAd{}}{c{{Af{e}}}{}{}}0{cAh{}}```````{{eg}{{Aj{c}}}{}{{Al{c}}}{{Al{c}}}}88{{{Aj{c}}}{{Ab{{B`{{An{c}}{An{c}}}}}}}Bb}{{{Aj{c}}}{{Aj{c}}}Bd}{{ce}Bf{}{}}{{{Aj{c}}}{{Aj{c}}}{A`hBdBhBb}}2{{{Aj{c}}c}BjBb}{{{Aj{c}}e}Bj{A`hBdBhBb}{}}{{{Aj{c}}e}{{`{{Bn{}{{Bl{Bj}}}}}}}Bb{{Bn{}{{Bl{c}}}}}}{{}{{Aj{c}}}{}}{{}{{Aj{c}}}{A`hBdBhBb}}{{{Aj{c}}{Aj{c}}}BjC`}{{ce}Bj{}{}}000{{{Aj{c}}d}fA`}{{{Aj{c}}d}f{hBh}}{cc{}}{g{{Aj{c}}}Bb{Bd{Al{c}}}{{Cb{e}}}}67{{{Aj{c}}e}BfCdCf}{e{{Aj{c}}}{}{{Al{c}}}}{{{Aj{c}}{Aj{c}}}{{Aj{c}}}{A`hBdBhBb}}{{{Aj{c}}{Aj{c}}}{{Aj{c}}}{BbBd}}{ce{}{}}3{{{Aj{c}}e}{{Aj{c}}}{BbBd}{{Bn{}{{Bl{c}}}}}}{e{{Aj{c}}}Bd{{Al{c}}}}{c{{Aj{e}}}{}{A`hBdBhBb}}663{cAd{}}{c{{Af{e}}}{}{}}0{cAh{}}78```````````````6666666666``{{{Ch{ce}}}{{Ch{ce}}}{Bdj}{Bdl}}{{{Cj{ce}}}{{Cj{ce}}}{Bdj}{Bdl}}{{{Cl{ce}}}{{Cl{ce}}}{Bdj}{Bdl}}{{ce}Bf{}{}}00{{{Ch{ce}}}Bfjl}{{}Cn}{{{Ch{ce}}d}f{A`j}{A`l}}{{{Cj{ce}}d}fjl}{{{Cj{ce}}d}f{A`j}{A`l}}{{{Cl{ce}}d}f{A`j}{A`l}}{{Cnd}f}{{{Db{}{{D`{c}}}}{Cj{eg}}}c{}jl}{{Cn{Cj{ce}}}Adjl}{{{Db{}{{D`{c}}}}{Df{e{Dd{g}}}}}c{}jl}{{Cn{Df{c{Dd{e}}}}}gjl{}}{cc{}}0000{ce{}{}}0000{{{Ch{ce}}}gjl{}}0{{{Ch{ce}}i}gjl{}{{Db{ce}{{D`{g}}}}}}0``222{cAd{}}{c{{Af{e}}}{}{}}000000000{cAh{}}0000```````{{{Dh{ce}}cgi}BfjlAl{{Dj{}{{Bl{{B`{ce}}}}}}}}6666{{{E`{}{{Dl{c}}{Dn{e}}}}gi}{{Af{Abe}}}{BbBd}njl}{{{Dh{ce}}ce}{{Af{AbEb}}}jl}{{{Ed{ce}}}{{Ed{ce}}}{Bdj}{Bdl}}{{{Dh{ce}}}{{Dh{ce}}}{Bdj}{Bdl}}{{ce}Bf{}{}}0{{}{{Dh{ce}}}{Efj}{Efl}}{{{Dh{ce}}d}f{A`j}{A`l}}>>{{{E`{}{{Dl{c}}{Dn{e}}}}g}{{Af{{Ed{gi}}e}}}{BbBd}njl}{{{Dh{ce}}c}{{Af{{Ed{ce}}Eb}}}jl}??{{}{{Dh{ce}}}jl}{{{Dh{ce}}}{{`{{Bn{}{{Bl{c}}}}}}}jl}{{{E`{}{{Dl{c}}{Dn{e}}}}gi}c{BbBd}njl}{{{Dh{ce}}ce}gjl{}}{{gci}{{Af{{Eh{c}}{b{ce}}}}}jl{{E`{ce}}}Al}{{{E`{}{{Dl{c}}{Dn{e}}}}}{{Af{Bfe}}}{BbBd}n}{ce{}{}}0{c{{Af{e}}}{}{}}000{cAh{}}0{{{Dh{ce}}c}{{Ab{{`{{Bn{}{{Bl{}}}}}}}}}jl}```{{{Dd{c}}}{{Dd{c}}}l}44{{{Dd{c}}}{{Dd{c}}}{Bdl}}{{ce}Bf{}{}}{{{Dd{c}}{Dd{c}}}Bj{C`l}}{{ce}Bj{}{}}000{{{Dd{c}}d}f{lh}}{{{Dd{c}}d}f{A`l}}{cc{}};;{cAd{}};;:``````````<<<<<<{EjEj}{ElEl}{EnEn}1111099{{ElEl}F`}{{EnEn}F`}{{ce}F`{}{}}0{{ElEl}Bj}{{FbFb}Bj}{{EnEn}Bj}============{{Eld}f}0{{Fbd}f}0{{End}f}0{{{B`{FdFdFd}}}El}0>;>:>{FdEn}0{Ff{{Af{Elc}}}{}}{{Elc}BfCf}{{Enc}BfCf}{ce{}{}}00{{}Ej}{{}El}{{}En}{{FdFdFd}El}2{{ElEl}{{Ab{F`}}}}{{EnEn}{{Ab{F`}}}}66{cAd{}}00{c{{Af{e}}}{}{}}000006{cAh{}}007``````{{{l{}{{Fh{c}}}}}{{l{}{{Fh{c}}}}}{A`hBdBb}}{{{l{}{{Fh{c}}}}c}Bj{A`hBdBb}}{{}{{l{}{{Fh{c}}}}}{A`hBdBb}}0{{{l{}{{Fh{c}}}}{l{}{{Fh{c}}}}}{{l{}{{Fh{c}}}}}{A`hBdBb}}{c{{l{}{{Fh{c}}}}}{A`hBdBb}}1","c":[],"p":[[6,"PubGrubError",9],[5,"Formatter",320],[8,"Result",320],[10,"Display",320],[10,"Package",32],[10,"VersionSet",311],[10,"Error",321],[10,"Debug",320],[6,"Option",322],[5,"String",323],[6,"Result",324],[5,"TypeId",325],[5,"Range",33],[10,"Into",326],[6,"Bound",327],[1,"tuple"],[10,"Ord",328],[10,"Clone",329],[1,"unit"],[10,"Eq",328],[1,"bool"],[17,"Item"],[10,"Iterator",330],[10,"PartialEq",328],[10,"RangeBounds",327],[10,"Hash",331],[10,"Hasher",331],[6,"DerivationTree",76],[6,"External",76],[5,"Derived",76],[5,"DefaultStringReportFormatter",76],[17,"Output"],[10,"ReportFormatter",76],[6,"Term",196],[8,"Map",218],[5,"OfflineDependencyProvider",155],[10,"IntoIterator",332],[17,"Priority"],[17,"Err"],[10,"DependencyProvider",155],[6,"Infallible",326],[6,"Dependencies",155],[10,"Default",333],[8,"SelectedDependencies",218],[10,"Version",222],[5,"SemanticVersion",222],[5,"NumberVersion",222],[6,"Ordering",328],[6,"VersionParseError",222],[1,"u32"],[1,"str"],[17,"V"],[15,"ErrorRetrievingDependencies",27],[15,"SelfDependency",27],[10,"Reporter",76],[5,"DefaultStringReporter",76],[15,"NotThreeParts",307],[15,"ParseIntError",307]],"b":[[18,"impl-Display-for-PubGrubError%3CP,+VS,+E%3E"],[19,"impl-Debug-for-PubGrubError%3CP,+VS,+E%3E"],[40,"impl-VersionSet-for-Range%3CT%3E"],[41,"impl-Range%3CV%3E"],[42,"impl-Range%3CV%3E"],[43,"impl-VersionSet-for-Range%3CT%3E"],[45,"impl-Range%3CV%3E"],[46,"impl-VersionSet-for-Range%3CT%3E"],[52,"impl-Debug-for-Range%3CV%3E"],[53,"impl-Display-for-Range%3CV%3E"],[56,"impl-VersionSet-for-Range%3CT%3E"],[57,"impl-Range%3CV%3E"],[60,"impl-VersionSet-for-Range%3CT%3E"],[61,"impl-Range%3CV%3E"],[65,"impl-Range%3CV%3E"],[66,"impl-VersionSet-for-Range%3CT%3E"],[74,"impl-Range%3CV%3E"],[75,"impl-VersionSet-for-Range%3CT%3E"],[112,"impl-Display-for-External%3CP,+VS%3E"],[113,"impl-Debug-for-External%3CP,+VS%3E"],[209,"impl-Display-for-Term%3CVS%3E"],[210,"impl-Debug-for-Term%3CVS%3E"],[263,"impl-Debug-for-SemanticVersion"],[264,"impl-Display-for-SemanticVersion"],[265,"impl-Debug-for-VersionParseError"],[266,"impl-Display-for-VersionParseError"],[267,"impl-Display-for-NumberVersion"],[268,"impl-Debug-for-NumberVersion"],[269,"impl-From%3C%26(u32,+u32,+u32)%3E-for-SemanticVersion"],[270,"impl-From%3C(u32,+u32,+u32)%3E-for-SemanticVersion"],[272,"impl-From%3C%26SemanticVersion%3E-for-SemanticVersion"],[274,"impl-From%3C%26NumberVersion%3E-for-NumberVersion"],[276,"impl-From%3Cu32%3E-for-NumberVersion"],[277,"impl-From%3C%26u32%3E-for-NumberVersion"]]}]\
+]'));
+if (typeof exports !== 'undefined') exports.searchIndex = searchIndex;
+else if (window.initSearch) window.initSearch(searchIndex);
diff --git a/settings.html b/settings.html
new file mode 100644
index 00000000..05488c48
--- /dev/null
+++ b/settings.html
@@ -0,0 +1,2 @@
+1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +
// SPDX-License-Identifier: MPL-2.0
+
+//! Handling pubgrub errors.
+
+use thiserror::Error;
+
+use crate::package::Package;
+use crate::report::DerivationTree;
+use crate::version_set::VersionSet;
+
+/// Errors that may occur while solving dependencies.
+#[derive(Error, Debug)]
+pub enum PubGrubError<P: Package, VS: VersionSet, E: std::error::Error> {
+ /// There is no solution for this set of dependencies.
+ #[error("No solution")]
+ NoSolution(DerivationTree<P, VS>),
+
+ /// Error arising when the implementer of
+ /// [DependencyProvider](crate::solver::DependencyProvider)
+ /// returned an error in the method
+ /// [get_dependencies](crate::solver::DependencyProvider::get_dependencies).
+ #[error("Retrieving dependencies of {package} {version} failed")]
+ ErrorRetrievingDependencies {
+ /// Package whose dependencies we want.
+ package: P,
+ /// Version of the package for which we want the dependencies.
+ version: VS::V,
+ /// Error raised by the implementer of
+ /// [DependencyProvider](crate::solver::DependencyProvider).
+ source: E,
+ },
+
+ /// Error arising when the implementer of
+ /// [DependencyProvider](crate::solver::DependencyProvider)
+ /// returned a dependency on the requested package.
+ /// This technically means that the package directly depends on itself,
+ /// and is clearly some kind of mistake.
+ #[error("{package} {version} depends on itself")]
+ SelfDependency {
+ /// Package whose dependencies we want.
+ package: P,
+ /// Version of the package for which we want the dependencies.
+ version: VS::V,
+ },
+
+ /// Error arising when the implementer of
+ /// [DependencyProvider](crate::solver::DependencyProvider)
+ /// returned an error in the method
+ /// [choose_version](crate::solver::DependencyProvider::choose_version).
+ #[error("Decision making failed")]
+ ErrorChoosingPackageVersion(E),
+
+ /// Error arising when the implementer of [DependencyProvider](crate::solver::DependencyProvider)
+ /// returned an error in the method [should_cancel](crate::solver::DependencyProvider::should_cancel).
+ #[error("We should cancel")]
+ ErrorInShouldCancel(E),
+
+ /// Something unexpected happened.
+ #[error("{0}")]
+ Failure(String),
+}
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99 +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121 +122 +
use std::{
+ fmt,
+ hash::{Hash, Hasher},
+ marker::PhantomData,
+ ops::{Index, Range},
+};
+
+/// The index of a value allocated in an arena that holds `T`s.
+///
+/// The Clone, Copy and other traits are defined manually because
+/// deriving them adds some additional constraints on the `T` generic type
+/// that we actually don't need since it is phantom.
+///
+/// <https://github.com/rust-lang/rust/issues/26925>
+pub struct Id<T> {
+ raw: u32,
+ _ty: PhantomData<fn() -> T>,
+}
+
+impl<T> Clone for Id<T> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+impl<T> Copy for Id<T> {}
+
+impl<T> PartialEq for Id<T> {
+ fn eq(&self, other: &Id<T>) -> bool {
+ self.raw == other.raw
+ }
+}
+
+impl<T> Eq for Id<T> {}
+
+impl<T> Hash for Id<T> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.raw.hash(state)
+ }
+}
+
+impl<T> fmt::Debug for Id<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut type_name = std::any::type_name::<T>();
+ if let Some(id) = type_name.rfind(':') {
+ type_name = &type_name[id + 1..]
+ }
+ write!(f, "Id::<{}>({})", type_name, self.raw)
+ }
+}
+
+impl<T> Id<T> {
+ pub fn into_raw(self) -> usize {
+ self.raw as usize
+ }
+ fn from(n: u32) -> Self {
+ Self {
+ raw: n,
+ _ty: PhantomData,
+ }
+ }
+ pub fn range_to_iter(range: Range<Self>) -> impl Iterator<Item = Self> {
+ let start = range.start.raw;
+ let end = range.end.raw;
+ (start..end).map(Self::from)
+ }
+}
+
+/// Yet another index-based arena.
+///
+/// An arena is a kind of simple grow-only allocator, backed by a `Vec`
+/// where all items have the same lifetime, making it easier
+/// to have references between those items.
+/// They are all dropped at once when the arena is dropped.
+#[derive(Clone, PartialEq, Eq)]
+pub struct Arena<T> {
+ data: Vec<T>,
+}
+
+impl<T: fmt::Debug> fmt::Debug for Arena<T> {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("Arena")
+ .field("len", &self.data.len())
+ .field("data", &self.data)
+ .finish()
+ }
+}
+
+impl<T> Arena<T> {
+ pub fn new() -> Arena<T> {
+ Arena { data: Vec::new() }
+ }
+
+ pub fn alloc(&mut self, value: T) -> Id<T> {
+ let raw = self.data.len();
+ self.data.push(value);
+ Id::from(raw as u32)
+ }
+
+ pub fn alloc_iter<I: Iterator<Item = T>>(&mut self, values: I) -> Range<Id<T>> {
+ let start = Id::from(self.data.len() as u32);
+ values.for_each(|v| {
+ self.alloc(v);
+ });
+ let end = Id::from(self.data.len() as u32);
+ Range { start, end }
+ }
+}
+
+impl<T> Index<Id<T>> for Arena<T> {
+ type Output = T;
+ fn index(&self, id: Id<T>) -> &T {
+ &self.data[id.raw as usize]
+ }
+}
+
+impl<T> Index<Range<Id<T>>> for Arena<T> {
+ type Output = [T];
+ fn index(&self, id: Range<Id<T>>) -> &[T] {
+ &self.data[(id.start.raw as usize)..(id.end.raw as usize)]
+ }
+}
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99 +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121 +122 +123 +124 +125 +126 +127 +128 +129 +130 +131 +132 +133 +134 +135 +136 +137 +138 +139 +140 +141 +142 +143 +144 +145 +146 +147 +148 +149 +150 +151 +152 +153 +154 +155 +156 +157 +158 +159 +160 +161 +162 +163 +164 +165 +166 +167 +168 +169 +170 +171 +172 +173 +174 +175 +176 +177 +178 +179 +180 +181 +182 +183 +184 +185 +186 +187 +188 +189 +190 +191 +192 +193 +194 +195 +196 +197 +198 +199 +200 +201 +202 +203 +204 +205 +206 +207 +208 +209 +210 +211 +212 +213 +214 +215 +216 +217 +218 +219 +220 +221 +222 +223 +224 +225 +226 +227 +228 +229 +230 +231 +232 +233 +234 +235 +236 +237 +238 +239 +240 +241 +242 +243 +244 +245 +246 +247 +248 +249 +250 +251 +252 +253 +254 +255 +256 +257 +258 +259 +260 +261 +262 +263 +264 +265 +266 +267 +268 +269 +270 +271 +272 +273 +274 +275 +276 +277 +278 +279 +280 +281 +282 +283 +284 +285 +286 +287 +288 +289 +290 +291 +292 +293 +294 +295 +296 +297 +298 +299 +300 +301 +302 +303 +304 +305 +306 +307 +308 +309 +310 +311 +312 +313 +314 +315 +316 +317 +318 +319 +320 +321 +322 +323 +324 +
// SPDX-License-Identifier: MPL-2.0
+
+//! Core model and functions
+//! to write a functional PubGrub algorithm.
+
+use std::error::Error;
+use std::sync::Arc;
+
+use crate::error::PubGrubError;
+use crate::internal::arena::Arena;
+use crate::internal::incompatibility::{IncompId, Incompatibility, Relation};
+use crate::internal::partial_solution::SatisfierSearch::{
+ DifferentDecisionLevels, SameDecisionLevels,
+};
+use crate::internal::partial_solution::{DecisionLevel, PartialSolution};
+use crate::internal::small_vec::SmallVec;
+use crate::package::Package;
+use crate::report::DerivationTree;
+use crate::type_aliases::{DependencyConstraints, Map, Set};
+use crate::version_set::VersionSet;
+
+/// Current state of the PubGrub algorithm.
+#[derive(Clone)]
+pub struct State<P: Package, VS: VersionSet, Priority: Ord + Clone> {
+ root_package: P,
+ root_version: VS::V,
+
+ incompatibilities: Map<P, Vec<IncompId<P, VS>>>,
+
+ /// Store the ids of incompatibilities that are already contradicted.
+ /// For each one keep track of the decision level when it was found to be contradicted.
+ /// These will stay contradicted until we have backtracked beyond its associated decision level.
+ contradicted_incompatibilities: Map<IncompId<P, VS>, DecisionLevel>,
+
+ /// All incompatibilities expressing dependencies,
+ /// with common dependents merged.
+ merged_dependencies: Map<(P, P), SmallVec<IncompId<P, VS>>>,
+
+ /// Partial solution.
+ /// TODO: remove pub.
+ pub partial_solution: PartialSolution<P, VS, Priority>,
+
+ /// The store is the reference storage for all incompatibilities.
+ pub incompatibility_store: Arena<Incompatibility<P, VS>>,
+
+ /// This is a stack of work to be done in `unit_propagation`.
+ /// It can definitely be a local variable to that method, but
+ /// this way we can reuse the same allocation for better performance.
+ unit_propagation_buffer: SmallVec<P>,
+}
+
+impl<P: Package, VS: VersionSet, Priority: Ord + Clone> State<P, VS, Priority> {
+ /// Initialization of PubGrub state.
+ pub fn init(root_package: P, root_version: VS::V) -> Self {
+ let mut incompatibility_store = Arena::new();
+ let not_root_id = incompatibility_store.alloc(Incompatibility::not_root(
+ root_package.clone(),
+ root_version.clone(),
+ ));
+ let mut incompatibilities = Map::default();
+ incompatibilities.insert(root_package.clone(), vec![not_root_id]);
+ Self {
+ root_package,
+ root_version,
+ incompatibilities,
+ contradicted_incompatibilities: Map::default(),
+ partial_solution: PartialSolution::empty(),
+ incompatibility_store,
+ unit_propagation_buffer: SmallVec::Empty,
+ merged_dependencies: Map::default(),
+ }
+ }
+
+ /// Add an incompatibility to the state.
+ pub fn add_incompatibility(&mut self, incompat: Incompatibility<P, VS>) {
+ let id = self.incompatibility_store.alloc(incompat);
+ self.merge_incompatibility(id);
+ }
+
+ /// Add an incompatibility to the state.
+ pub fn add_incompatibility_from_dependencies(
+ &mut self,
+ package: P,
+ version: VS::V,
+ deps: &DependencyConstraints<P, VS>,
+ ) -> std::ops::Range<IncompId<P, VS>> {
+ // Create incompatibilities and allocate them in the store.
+ let new_incompats_id_range =
+ self.incompatibility_store
+ .alloc_iter(deps.iter().map(|dep| {
+ Incompatibility::from_dependency(
+ package.clone(),
+ VS::singleton(version.clone()),
+ dep,
+ )
+ }));
+ // Merge the newly created incompatibilities with the older ones.
+ for id in IncompId::range_to_iter(new_incompats_id_range.clone()) {
+ self.merge_incompatibility(id);
+ }
+ new_incompats_id_range
+ }
+
+ /// Unit propagation is the core mechanism of the solving algorithm.
+ /// CF <https://github.com/dart-lang/pub/blob/master/doc/solver.md#unit-propagation>
+ pub fn unit_propagation<E: Error>(&mut self, package: P) -> Result<(), PubGrubError<P, VS, E>> {
+ self.unit_propagation_buffer.clear();
+ self.unit_propagation_buffer.push(package);
+ while let Some(current_package) = self.unit_propagation_buffer.pop() {
+ // Iterate over incompatibilities in reverse order
+ // to evaluate first the newest incompatibilities.
+ let mut conflict_id = None;
+ // We only care about incompatibilities if it contains the current package.
+ for &incompat_id in self.incompatibilities[¤t_package].iter().rev() {
+ if self
+ .contradicted_incompatibilities
+ .contains_key(&incompat_id)
+ {
+ continue;
+ }
+ let current_incompat = &self.incompatibility_store[incompat_id];
+ match self.partial_solution.relation(current_incompat) {
+ // If the partial solution satisfies the incompatibility
+ // we must perform conflict resolution.
+ Relation::Satisfied => {
+ log::info!(
+ "Start conflict resolution because incompat satisfied:\n {}",
+ current_incompat
+ );
+ conflict_id = Some(incompat_id);
+ break;
+ }
+ Relation::AlmostSatisfied(package_almost) => {
+ self.unit_propagation_buffer.push(package_almost.clone());
+ // Add (not term) to the partial solution with incompat as cause.
+ self.partial_solution.add_derivation(
+ package_almost,
+ incompat_id,
+ &self.incompatibility_store,
+ );
+ // With the partial solution updated, the incompatibility is now contradicted.
+ self.contradicted_incompatibilities
+ .insert(incompat_id, self.partial_solution.current_decision_level());
+ }
+ Relation::Contradicted(_) => {
+ self.contradicted_incompatibilities
+ .insert(incompat_id, self.partial_solution.current_decision_level());
+ }
+ _ => {}
+ }
+ }
+ if let Some(incompat_id) = conflict_id {
+ let (package_almost, root_cause) = self.conflict_resolution(incompat_id)?;
+ self.unit_propagation_buffer.clear();
+ self.unit_propagation_buffer.push(package_almost.clone());
+ // Add to the partial solution with incompat as cause.
+ self.partial_solution.add_derivation(
+ package_almost,
+ root_cause,
+ &self.incompatibility_store,
+ );
+ // After conflict resolution and the partial solution update,
+ // the root cause incompatibility is now contradicted.
+ self.contradicted_incompatibilities
+ .insert(root_cause, self.partial_solution.current_decision_level());
+ }
+ }
+ // If there are no more changed packages, unit propagation is done.
+ Ok(())
+ }
+
+ /// Return the root cause and the backtracked model.
+ /// CF <https://github.com/dart-lang/pub/blob/master/doc/solver.md#unit-propagation>
+ #[allow(clippy::type_complexity)]
+ fn conflict_resolution<E: Error>(
+ &mut self,
+ incompatibility: IncompId<P, VS>,
+ ) -> Result<(P, IncompId<P, VS>), PubGrubError<P, VS, E>> {
+ let mut current_incompat_id = incompatibility;
+ let mut current_incompat_changed = false;
+ loop {
+ if self.incompatibility_store[current_incompat_id]
+ .is_terminal(&self.root_package, &self.root_version)
+ {
+ return Err(PubGrubError::NoSolution(
+ self.build_derivation_tree(current_incompat_id),
+ ));
+ } else {
+ let (package, satisfier_search_result) = self.partial_solution.satisfier_search(
+ &self.incompatibility_store[current_incompat_id],
+ &self.incompatibility_store,
+ );
+ match satisfier_search_result {
+ DifferentDecisionLevels {
+ previous_satisfier_level,
+ } => {
+ let package = package.clone();
+ self.backtrack(
+ current_incompat_id,
+ current_incompat_changed,
+ previous_satisfier_level,
+ );
+ log::info!("backtrack to {:?}", previous_satisfier_level);
+ return Ok((package, current_incompat_id));
+ }
+ SameDecisionLevels { satisfier_cause } => {
+ let prior_cause = Incompatibility::prior_cause(
+ current_incompat_id,
+ satisfier_cause,
+ package,
+ &self.incompatibility_store,
+ );
+ log::info!("prior cause: {}", prior_cause);
+ current_incompat_id = self.incompatibility_store.alloc(prior_cause);
+ current_incompat_changed = true;
+ }
+ }
+ }
+ }
+ }
+
+ /// Backtracking.
+ fn backtrack(
+ &mut self,
+ incompat: IncompId<P, VS>,
+ incompat_changed: bool,
+ decision_level: DecisionLevel,
+ ) {
+ self.partial_solution.backtrack(decision_level);
+ // Remove contradicted incompatibilities that depend on decisions we just backtracked away.
+ self.contradicted_incompatibilities
+ .retain(|_, dl| *dl <= decision_level);
+ if incompat_changed {
+ self.merge_incompatibility(incompat);
+ }
+ }
+
+ /// Add this incompatibility into the set of all incompatibilities.
+ ///
+ /// Pub collapses identical dependencies from adjacent package versions
+ /// into individual incompatibilities.
+ /// This substantially reduces the total number of incompatibilities
+ /// and makes it much easier for Pub to reason about multiple versions of packages at once.
+ ///
+ /// For example, rather than representing
+ /// foo 1.0.0 depends on bar ^1.0.0 and
+ /// foo 1.1.0 depends on bar ^1.0.0
+ /// as two separate incompatibilities,
+ /// they are collapsed together into the single incompatibility {foo ^1.0.0, not bar ^1.0.0}
+ /// (provided that no other version of foo exists between 1.0.0 and 2.0.0).
+ /// We could collapse them into { foo (1.0.0 ∪ 1.1.0), not bar ^1.0.0 }
+ /// without having to check the existence of other versions though.
+ fn merge_incompatibility(&mut self, mut id: IncompId<P, VS>) {
+ if let Some((p1, p2)) = self.incompatibility_store[id].as_dependency() {
+ // If we are a dependency, there's a good chance we can be merged with a previous dependency
+ let deps_lookup = self
+ .merged_dependencies
+ .entry((p1.clone(), p2.clone()))
+ .or_default();
+ if let Some((past, merged)) = deps_lookup.as_mut_slice().iter_mut().find_map(|past| {
+ self.incompatibility_store[id]
+ .merge_dependents(&self.incompatibility_store[*past])
+ .map(|m| (past, m))
+ }) {
+ let new = self.incompatibility_store.alloc(merged);
+ for (pkg, _) in self.incompatibility_store[new].iter() {
+ self.incompatibilities
+ .entry(pkg.clone())
+ .or_default()
+ .retain(|id| id != past);
+ }
+ *past = new;
+ id = new;
+ } else {
+ deps_lookup.push(id);
+ }
+ }
+ for (pkg, term) in self.incompatibility_store[id].iter() {
+ if cfg!(debug_assertions) {
+ assert_ne!(term, &crate::term::Term::any());
+ }
+ self.incompatibilities
+ .entry(pkg.clone())
+ .or_default()
+ .push(id);
+ }
+ }
+
+ // Error reporting #########################################################
+
+ fn build_derivation_tree(&self, incompat: IncompId<P, VS>) -> DerivationTree<P, VS> {
+ let mut all_ids = Set::default();
+ let mut shared_ids = Set::default();
+ let mut stack = vec![incompat];
+ while let Some(i) = stack.pop() {
+ if let Some((id1, id2)) = self.incompatibility_store[i].causes() {
+ if all_ids.contains(&i) {
+ shared_ids.insert(i);
+ } else {
+ stack.push(id1);
+ stack.push(id2);
+ }
+ }
+ all_ids.insert(i);
+ }
+ // To avoid recursion we need to generate trees in topological order.
+ // That is to say we need to ensure that the causes are processed before the incompatibility they effect.
+ // It happens to be that sorting by their ID maintains this property.
+ let mut sorted_ids = all_ids.into_iter().collect::<Vec<_>>();
+ sorted_ids.sort_unstable_by_key(|id| id.into_raw());
+ let mut precomputed = Map::default();
+ for id in sorted_ids {
+ let tree = Incompatibility::build_derivation_tree(
+ id,
+ &shared_ids,
+ &self.incompatibility_store,
+ &precomputed,
+ );
+ precomputed.insert(id, Arc::new(tree));
+ }
+ // Now the user can refer to the entire tree from its root.
+ Arc::into_inner(precomputed.remove(&incompat).unwrap()).unwrap()
+ }
+}
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99 +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121 +122 +123 +124 +125 +126 +127 +128 +129 +130 +131 +132 +133 +134 +135 +136 +137 +138 +139 +140 +141 +142 +143 +144 +145 +146 +147 +148 +149 +150 +151 +152 +153 +154 +155 +156 +157 +158 +159 +160 +161 +162 +163 +164 +165 +166 +167 +168 +169 +170 +171 +172 +173 +174 +175 +176 +177 +178 +179 +180 +181 +182 +183 +184 +185 +186 +187 +188 +189 +190 +191 +192 +193 +194 +195 +196 +197 +198 +199 +200 +201 +202 +203 +204 +205 +206 +207 +208 +209 +210 +211 +212 +213 +214 +215 +216 +217 +218 +219 +220 +221 +222 +223 +224 +225 +226 +227 +228 +229 +230 +231 +232 +233 +234 +235 +236 +237 +238 +239 +240 +241 +242 +243 +244 +245 +246 +247 +248 +249 +250 +251 +252 +253 +254 +255 +256 +257 +258 +259 +260 +261 +262 +263 +264 +265 +266 +267 +268 +269 +270 +271 +272 +273 +274 +275 +276 +277 +278 +279 +280 +281 +282 +283 +284 +285 +286 +287 +288 +289 +290 +291 +292 +293 +294 +295 +296 +297 +298 +299 +300 +301 +302 +303 +304 +305 +306 +307 +308 +309 +310 +311 +312 +313 +314 +315 +316 +317 +318 +319 +320 +321 +322 +323 +324 +325 +326 +327 +328 +329 +330 +331 +332 +333 +334 +335 +336 +337 +338 +339 +340 +341 +342 +343 +344 +
// SPDX-License-Identifier: MPL-2.0
+
+//! An incompatibility is a set of terms for different packages
+//! that should never be satisfied all together.
+
+use std::fmt;
+use std::sync::Arc;
+
+use crate::internal::arena::{Arena, Id};
+use crate::internal::small_map::SmallMap;
+use crate::package::Package;
+use crate::report::{
+ DefaultStringReportFormatter, DerivationTree, Derived, External, ReportFormatter,
+};
+use crate::term::{self, Term};
+use crate::type_aliases::{Map, Set};
+use crate::version_set::VersionSet;
+
+/// An incompatibility is a set of terms for different packages
+/// that should never be satisfied all together.
+/// An incompatibility usually originates from a package dependency.
+/// For example, if package A at version 1 depends on package B
+/// at version 2, you can never have both terms `A = 1`
+/// and `not B = 2` satisfied at the same time in a partial solution.
+/// This would mean that we found a solution with package A at version 1
+/// but not with package B at version 2.
+/// Yet A at version 1 depends on B at version 2 so this is not possible.
+/// Therefore, the set `{ A = 1, not B = 2 }` is an incompatibility,
+/// defined from dependencies of A at version 1.
+///
+/// Incompatibilities can also be derived from two other incompatibilities
+/// during conflict resolution. More about all this in
+/// [PubGrub documentation](https://github.com/dart-lang/pub/blob/master/doc/solver.md#incompatibility).
+#[derive(Debug, Clone)]
+pub struct Incompatibility<P: Package, VS: VersionSet> {
+ package_terms: SmallMap<P, Term<VS>>,
+ kind: Kind<P, VS>,
+}
+
+/// Type alias of unique identifiers for incompatibilities.
+pub type IncompId<P, VS> = Id<Incompatibility<P, VS>>;
+
+#[derive(Debug, Clone)]
+enum Kind<P: Package, VS: VersionSet> {
+ /// Initial incompatibility aiming at picking the root package for the first decision.
+ NotRoot(P, VS::V),
+ /// There are no versions in the given range for this package.
+ NoVersions(P, VS),
+ /// Dependencies of the package are unavailable for versions in that range.
+ UnavailableDependencies(P, VS),
+ /// Incompatibility coming from the dependencies of a given package.
+ FromDependencyOf(P, VS, P, VS),
+ /// Derived from two causes. Stores cause ids.
+ DerivedFrom(IncompId<P, VS>, IncompId<P, VS>),
+}
+
+/// A Relation describes how a set of terms can be compared to an incompatibility.
+/// Typically, the set of terms comes from the partial solution.
+#[derive(Eq, PartialEq, Debug)]
+pub enum Relation<P: Package> {
+ /// We say that a set of terms S satisfies an incompatibility I
+ /// if S satisfies every term in I.
+ Satisfied,
+ /// We say that S contradicts I
+ /// if S contradicts at least one term in I.
+ Contradicted(P),
+ /// If S satisfies all but one of I's terms and is inconclusive for the remaining term,
+ /// we say S "almost satisfies" I and we call the remaining term the "unsatisfied term".
+ AlmostSatisfied(P),
+ /// Otherwise, we say that their relation is inconclusive.
+ Inconclusive,
+}
+
+impl<P: Package, VS: VersionSet> Incompatibility<P, VS> {
+ /// Create the initial "not Root" incompatibility.
+ pub fn not_root(package: P, version: VS::V) -> Self {
+ Self {
+ package_terms: SmallMap::One([(
+ package.clone(),
+ Term::Negative(VS::singleton(version.clone())),
+ )]),
+ kind: Kind::NotRoot(package, version),
+ }
+ }
+
+ /// Create an incompatibility to remember
+ /// that a given set does not contain any version.
+ pub fn no_versions(package: P, term: Term<VS>) -> Self {
+ let set = match &term {
+ Term::Positive(r) => r.clone(),
+ Term::Negative(_) => panic!("No version should have a positive term"),
+ };
+ Self {
+ package_terms: SmallMap::One([(package.clone(), term)]),
+ kind: Kind::NoVersions(package, set),
+ }
+ }
+
+ /// Create an incompatibility to remember
+ /// that a package version is not selectable
+ /// because its list of dependencies is unavailable.
+ pub fn unavailable_dependencies(package: P, version: VS::V) -> Self {
+ let set = VS::singleton(version);
+ Self {
+ package_terms: SmallMap::One([(package.clone(), Term::Positive(set.clone()))]),
+ kind: Kind::UnavailableDependencies(package, set),
+ }
+ }
+
+ /// Build an incompatibility from a given dependency.
+ pub fn from_dependency(package: P, versions: VS, dep: (&P, &VS)) -> Self {
+ let (p2, set2) = dep;
+ Self {
+ package_terms: if set2 == &VS::empty() {
+ SmallMap::One([(package.clone(), Term::Positive(versions.clone()))])
+ } else {
+ SmallMap::Two([
+ (package.clone(), Term::Positive(versions.clone())),
+ (p2.clone(), Term::Negative(set2.clone())),
+ ])
+ },
+ kind: Kind::FromDependencyOf(package, versions, p2.clone(), set2.clone()),
+ }
+ }
+
+ pub fn as_dependency(&self) -> Option<(&P, &P)> {
+ match &self.kind {
+ Kind::FromDependencyOf(p1, _, p2, _) => Some((p1, p2)),
+ _ => None,
+ }
+ }
+
+ /// Merge dependant versions with the same dependency.
+ ///
+ /// When multiple versions of a package depend on the same range of another package,
+ /// we can merge the two into a single incompatibility.
+ /// For example, if a@1 depends on b and a@2 depends on b, we can say instead
+ /// a@1 and a@b depend on b.
+ ///
+ /// It is a special case of prior cause computation where the unified package
+ /// is the common dependant in the two incompatibilities expressing dependencies.
+ pub fn merge_dependents(&self, other: &Self) -> Option<Self> {
+ // It is almost certainly a bug to call this method without checking that self is a dependency
+ debug_assert!(self.as_dependency().is_some());
+ // Check that both incompatibilities are of the shape p1 depends on p2,
+ // with the same p1 and p2.
+ let self_pkgs = self.as_dependency()?;
+ if self_pkgs != other.as_dependency()? {
+ return None;
+ }
+ let (p1, p2) = self_pkgs;
+ let dep_term = self.get(p2);
+ // The dependency range for p2 must be the same in both case
+ // to be able to merge multiple p1 ranges.
+ if dep_term != other.get(p2) {
+ return None;
+ }
+ return Some(Self::from_dependency(
+ p1.clone(),
+ self.get(p1)
+ .unwrap()
+ .unwrap_positive()
+ .union(other.get(p1).unwrap().unwrap_positive()), // It is safe to `simplify` here
+ (&p2, dep_term.map_or(&VS::empty(), |v| v.unwrap_negative())),
+ ));
+ }
+
+ /// Prior cause of two incompatibilities using the rule of resolution.
+ pub fn prior_cause(
+ incompat: Id<Self>,
+ satisfier_cause: Id<Self>,
+ package: &P,
+ incompatibility_store: &Arena<Self>,
+ ) -> Self {
+ let kind = Kind::DerivedFrom(incompat, satisfier_cause);
+ let mut package_terms = incompatibility_store[incompat].package_terms.clone();
+ let t1 = package_terms.remove(package).unwrap();
+ let satisfier_cause_terms = &incompatibility_store[satisfier_cause].package_terms;
+ package_terms.merge(
+ satisfier_cause_terms.iter().filter(|(p, _)| p != &package),
+ |t1, t2| Some(t1.intersection(t2)),
+ );
+ let term = t1.union(satisfier_cause_terms.get(package).unwrap());
+ if term != Term::any() {
+ package_terms.insert(package.clone(), term);
+ }
+ Self {
+ package_terms,
+ kind,
+ }
+ }
+
+ /// Check if an incompatibility should mark the end of the algorithm
+ /// because it satisfies the root package.
+ pub fn is_terminal(&self, root_package: &P, root_version: &VS::V) -> bool {
+ if self.package_terms.len() == 0 {
+ true
+ } else if self.package_terms.len() > 1 {
+ false
+ } else {
+ let (package, term) = self.package_terms.iter().next().unwrap();
+ (package == root_package) && term.contains(root_version)
+ }
+ }
+
+ /// Get the term related to a given package (if it exists).
+ pub fn get(&self, package: &P) -> Option<&Term<VS>> {
+ self.package_terms.get(package)
+ }
+
+ /// Iterate over packages.
+ pub fn iter(&self) -> impl Iterator<Item = (&P, &Term<VS>)> {
+ self.package_terms.iter()
+ }
+
+ // Reporting ###############################################################
+
+ /// Retrieve parent causes if of type DerivedFrom.
+ pub fn causes(&self) -> Option<(Id<Self>, Id<Self>)> {
+ match self.kind {
+ Kind::DerivedFrom(id1, id2) => Some((id1, id2)),
+ _ => None,
+ }
+ }
+
+ /// Build a derivation tree for error reporting.
+ pub fn build_derivation_tree(
+ self_id: Id<Self>,
+ shared_ids: &Set<Id<Self>>,
+ store: &Arena<Self>,
+ precomputed: &Map<Id<Self>, Arc<DerivationTree<P, VS>>>,
+ ) -> DerivationTree<P, VS> {
+ match store[self_id].kind.clone() {
+ Kind::DerivedFrom(id1, id2) => {
+ let derived = Derived {
+ terms: store[self_id].package_terms.as_map(),
+ shared_id: shared_ids.get(&self_id).map(|id| id.into_raw()),
+ cause1: precomputed
+ .get(&id1)
+ .expect("Non-topological calls building tree")
+ .clone(),
+ cause2: precomputed
+ .get(&id2)
+ .expect("Non-topological calls building tree")
+ .clone(),
+ };
+ DerivationTree::Derived(derived)
+ }
+ Kind::NotRoot(package, version) => {
+ DerivationTree::External(External::NotRoot(package, version))
+ }
+ Kind::NoVersions(package, set) => {
+ DerivationTree::External(External::NoVersions(package, set))
+ }
+ Kind::UnavailableDependencies(package, set) => {
+ DerivationTree::External(External::UnavailableDependencies(package, set))
+ }
+ Kind::FromDependencyOf(package, set, dep_package, dep_set) => DerivationTree::External(
+ External::FromDependencyOf(package, set, dep_package, dep_set),
+ ),
+ }
+ }
+}
+
+impl<'a, P: Package, VS: VersionSet + 'a> Incompatibility<P, VS> {
+ /// CF definition of Relation enum.
+ pub fn relation(&self, terms: impl Fn(&P) -> Option<&'a Term<VS>>) -> Relation<P> {
+ let mut relation = Relation::Satisfied;
+ for (package, incompat_term) in self.package_terms.iter() {
+ match terms(package).map(|term| incompat_term.relation_with(term)) {
+ Some(term::Relation::Satisfied) => {}
+ Some(term::Relation::Contradicted) => {
+ return Relation::Contradicted(package.clone());
+ }
+ None | Some(term::Relation::Inconclusive) => {
+ // If a package is not present, the intersection is the same as [Term::any].
+ // According to the rules of satisfactions, the relation would be inconclusive.
+ // It could also be satisfied if the incompatibility term was also [Term::any],
+ // but we systematically remove those from incompatibilities
+ // so we're safe on that front.
+ if relation == Relation::Satisfied {
+ relation = Relation::AlmostSatisfied(package.clone());
+ } else {
+ return Relation::Inconclusive;
+ }
+ }
+ }
+ }
+ relation
+ }
+}
+
+impl<P: Package, VS: VersionSet> fmt::Display for Incompatibility<P, VS> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(
+ f,
+ "{}",
+ DefaultStringReportFormatter.format_terms(&self.package_terms.as_map())
+ )
+ }
+}
+
+// TESTS #######################################################################
+
+#[cfg(test)]
+pub mod tests {
+ use super::*;
+ use crate::range::Range;
+ use crate::term::tests::strategy as term_strat;
+ use crate::type_aliases::Map;
+ use proptest::prelude::*;
+
+ proptest! {
+
+ /// For any three different packages p1, p2 and p3,
+ /// for any three terms t1, t2 and t3,
+ /// if we have the two following incompatibilities:
+ /// { p1: t1, p2: not t2 }
+ /// { p2: t2, p3: t3 }
+ /// the rule of resolution says that we can deduce the following incompatibility:
+ /// { p1: t1, p3: t3 }
+ #[test]
+ fn rule_of_resolution(t1 in term_strat(), t2 in term_strat(), t3 in term_strat()) {
+ let mut store = Arena::new();
+ let i1 = store.alloc(Incompatibility {
+ package_terms: SmallMap::Two([("p1", t1.clone()), ("p2", t2.negate())]),
+ kind: Kind::UnavailableDependencies("0", Range::full())
+ });
+
+ let i2 = store.alloc(Incompatibility {
+ package_terms: SmallMap::Two([("p2", t2), ("p3", t3.clone())]),
+ kind: Kind::UnavailableDependencies("0", Range::full())
+ });
+
+ let mut i3 = Map::default();
+ i3.insert("p1", t1);
+ i3.insert("p3", t3);
+
+ let i_resolution = Incompatibility::prior_cause(i1, i2, &"p2", &store);
+ assert_eq!(i_resolution.package_terms.as_map(), i3);
+ }
+
+ }
+}
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99 +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121 +122 +123 +124 +125 +126 +127 +128 +129 +130 +131 +132 +133 +134 +135 +136 +137 +138 +139 +140 +141 +142 +143 +144 +145 +146 +147 +148 +149 +150 +151 +152 +153 +154 +155 +156 +157 +158 +159 +160 +161 +162 +163 +164 +165 +166 +167 +168 +169 +170 +171 +172 +173 +174 +175 +176 +177 +178 +179 +180 +181 +182 +183 +184 +185 +186 +187 +188 +189 +190 +191 +192 +193 +194 +195 +196 +197 +198 +199 +200 +201 +202 +203 +204 +205 +206 +207 +208 +209 +210 +211 +212 +213 +214 +215 +216 +217 +218 +219 +220 +221 +222 +223 +224 +225 +226 +227 +228 +229 +230 +231 +232 +233 +234 +235 +236 +237 +238 +239 +240 +241 +242 +243 +244 +245 +246 +247 +248 +249 +250 +251 +252 +253 +254 +255 +256 +257 +258 +259 +260 +261 +262 +263 +264 +265 +266 +267 +268 +269 +270 +271 +272 +273 +274 +275 +276 +277 +278 +279 +280 +281 +282 +283 +284 +285 +286 +287 +288 +289 +290 +291 +292 +293 +294 +295 +296 +297 +298 +299 +300 +301 +302 +303 +304 +305 +306 +307 +308 +309 +310 +311 +312 +313 +314 +315 +316 +317 +318 +319 +320 +321 +322 +323 +324 +325 +326 +327 +328 +329 +330 +331 +332 +333 +334 +335 +336 +337 +338 +339 +340 +341 +342 +343 +344 +345 +346 +347 +348 +349 +350 +351 +352 +353 +354 +355 +356 +357 +358 +359 +360 +361 +362 +363 +364 +365 +366 +367 +368 +369 +370 +371 +372 +373 +374 +375 +376 +377 +378 +379 +380 +381 +382 +383 +384 +385 +386 +387 +388 +389 +390 +391 +392 +393 +394 +395 +396 +397 +398 +399 +400 +401 +402 +403 +404 +405 +406 +407 +408 +409 +410 +411 +412 +413 +414 +415 +416 +417 +418 +419 +420 +421 +422 +423 +424 +425 +426 +427 +428 +429 +430 +431 +432 +433 +434 +435 +436 +437 +438 +439 +440 +441 +442 +443 +444 +445 +446 +447 +448 +449 +450 +451 +452 +453 +454 +455 +456 +457 +458 +459 +460 +461 +462 +463 +464 +465 +466 +467 +468 +469 +470 +471 +472 +473 +474 +475 +476 +477 +478 +479 +480 +481 +482 +483 +484 +485 +486 +487 +488 +489 +490 +491 +492 +493 +494 +495 +496 +497 +498 +499 +500 +501 +502 +503 +504 +505 +506 +507 +508 +509 +510 +511 +512 +513 +514 +515 +516 +517 +518 +519 +520 +521 +522 +523 +524 +525 +526 +527 +528 +529 +530 +531 +532 +533 +534 +535 +536 +537 +538 +539 +540 +541 +542 +543 +544 +545 +546 +547 +548 +549 +550 +551 +552 +553 +554 +555 +556 +557 +558 +559 +
// SPDX-License-Identifier: MPL-2.0
+
+//! A Memory acts like a structured partial solution
+//! where terms are regrouped by package in a [Map](crate::type_aliases::Map).
+
+use std::fmt::Display;
+use std::hash::BuildHasherDefault;
+
+use priority_queue::PriorityQueue;
+use rustc_hash::FxHasher;
+
+use crate::internal::arena::Arena;
+use crate::internal::incompatibility::{IncompId, Incompatibility, Relation};
+use crate::internal::small_map::SmallMap;
+use crate::package::Package;
+use crate::term::Term;
+use crate::type_aliases::SelectedDependencies;
+use crate::version_set::VersionSet;
+
+use super::small_vec::SmallVec;
+
+type FnvIndexMap<K, V> = indexmap::IndexMap<K, V, BuildHasherDefault<rustc_hash::FxHasher>>;
+
+#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq)]
+pub struct DecisionLevel(pub u32);
+
+impl DecisionLevel {
+ pub fn increment(self) -> Self {
+ Self(self.0 + 1)
+ }
+}
+
+/// The partial solution contains all package assignments,
+/// organized by package and historically ordered.
+#[derive(Clone, Debug)]
+pub struct PartialSolution<P: Package, VS: VersionSet, Priority: Ord + Clone> {
+ next_global_index: u32,
+ current_decision_level: DecisionLevel,
+ /// `package_assignments` is primarily a HashMap from a package to its
+ /// `PackageAssignments`. But it can also keep the items in an order.
+ /// We maintain three sections in this order:
+ /// 1. `[..current_decision_level]` Are packages that have had a decision made sorted by the `decision_level`.
+ /// This makes it very efficient to extract the solution, And to backtrack to a particular decision level.
+ /// 2. `[current_decision_level..changed_this_decision_level]` Are packages that have **not** had there assignments
+ /// changed since the last time `prioritize` has bean called. Within this range there is no sorting.
+ /// 3. `[changed_this_decision_level..]` Containes all packages that **have** had there assignments changed since
+ /// the last time `prioritize` has bean called. The inverse is not necessarily true, some packages in the range
+ /// did not have a change. Within this range there is no sorting.
+ package_assignments: FnvIndexMap<P, PackageAssignments<P, VS>>,
+ /// `prioritized_potential_packages` is primarily a HashMap from a package with no desition and a positive assignment
+ /// to its `Priority`. But, it also maintains a max heap of packages by `Priority` order.
+ prioritized_potential_packages: PriorityQueue<P, Priority, BuildHasherDefault<FxHasher>>,
+ changed_this_decision_level: usize,
+}
+
+impl<P: Package, VS: VersionSet, Priority: Ord + Clone> Display
+ for PartialSolution<P, VS, Priority>
+{
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let mut assignments: Vec<_> = self
+ .package_assignments
+ .iter()
+ .map(|(p, pa)| format!("{}: {}", p, pa))
+ .collect();
+ assignments.sort();
+ write!(
+ f,
+ "next_global_index: {}\ncurrent_decision_level: {:?}\npackage_assignements:\n{}",
+ self.next_global_index,
+ self.current_decision_level,
+ assignments.join("\t\n")
+ )
+ }
+}
+
+/// Package assignments contain the potential decision and derivations
+/// that have already been made for a given package,
+/// as well as the intersection of terms by all of these.
+#[derive(Clone, Debug)]
+struct PackageAssignments<P: Package, VS: VersionSet> {
+ smallest_decision_level: DecisionLevel,
+ highest_decision_level: DecisionLevel,
+ dated_derivations: SmallVec<DatedDerivation<P, VS>>,
+ assignments_intersection: AssignmentsIntersection<VS>,
+}
+
+impl<P: Package, VS: VersionSet> Display for PackageAssignments<P, VS> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let derivations: Vec<_> = self
+ .dated_derivations
+ .iter()
+ .map(|dd| dd.to_string())
+ .collect();
+ write!(
+ f,
+ "decision range: {:?}..{:?}\nderivations:\n {}\n,assignments_intersection: {}",
+ self.smallest_decision_level,
+ self.highest_decision_level,
+ derivations.join("\n "),
+ self.assignments_intersection
+ )
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct DatedDerivation<P: Package, VS: VersionSet> {
+ global_index: u32,
+ decision_level: DecisionLevel,
+ cause: IncompId<P, VS>,
+ accumulated_intersection: Term<VS>,
+}
+
+impl<P: Package, VS: VersionSet> Display for DatedDerivation<P, VS> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{:?}, cause: {:?}", self.decision_level, self.cause)
+ }
+}
+
+#[derive(Clone, Debug)]
+enum AssignmentsIntersection<VS: VersionSet> {
+ Decision((u32, VS::V, Term<VS>)),
+ Derivations(Term<VS>),
+}
+
+impl<VS: VersionSet> Display for AssignmentsIntersection<VS> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ Self::Decision((lvl, version, _)) => {
+ write!(f, "Decision: level {}, v = {}", lvl, version)
+ }
+ Self::Derivations(term) => write!(f, "Derivations term: {}", term),
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub enum SatisfierSearch<P: Package, VS: VersionSet> {
+ DifferentDecisionLevels {
+ previous_satisfier_level: DecisionLevel,
+ },
+ SameDecisionLevels {
+ satisfier_cause: IncompId<P, VS>,
+ },
+}
+
+type SatisfiedMap<'i, P, VS> = SmallMap<&'i P, (Option<IncompId<P, VS>>, u32, DecisionLevel)>;
+
+impl<P: Package, VS: VersionSet, Priority: Ord + Clone> PartialSolution<P, VS, Priority> {
+ /// Initialize an empty PartialSolution.
+ pub fn empty() -> Self {
+ Self {
+ next_global_index: 0,
+ current_decision_level: DecisionLevel(0),
+ package_assignments: FnvIndexMap::default(),
+ prioritized_potential_packages: PriorityQueue::default(),
+ changed_this_decision_level: 0,
+ }
+ }
+
+ /// Add a decision.
+ pub fn add_decision(&mut self, package: P, version: VS::V) {
+ // Check that add_decision is never used in the wrong context.
+ if cfg!(debug_assertions) {
+ match self.package_assignments.get_mut(&package) {
+ None => panic!("Derivations must already exist"),
+ Some(pa) => match &pa.assignments_intersection {
+ // Cannot be called when a decision has already been taken.
+ AssignmentsIntersection::Decision(_) => panic!("Already existing decision"),
+ // Cannot be called if the versions is not contained in the terms intersection.
+ AssignmentsIntersection::Derivations(term) => {
+ debug_assert!(
+ term.contains(&version),
+ "{}: {} was expected to be contained in {}",
+ package,
+ version,
+ term,
+ )
+ }
+ },
+ }
+ assert_eq!(
+ self.changed_this_decision_level,
+ self.package_assignments.len()
+ );
+ }
+ let new_idx = self.current_decision_level.0 as usize;
+ self.current_decision_level = self.current_decision_level.increment();
+ let (old_idx, _, pa) = self
+ .package_assignments
+ .get_full_mut(&package)
+ .expect("Derivations must already exist");
+ pa.highest_decision_level = self.current_decision_level;
+ pa.assignments_intersection = AssignmentsIntersection::Decision((
+ self.next_global_index,
+ version.clone(),
+ Term::exact(version),
+ ));
+ // Maintain that the beginning of the `package_assignments` Have all decisions in sorted order.
+ if new_idx != old_idx {
+ self.package_assignments.swap_indices(new_idx, old_idx);
+ }
+ self.next_global_index += 1;
+ }
+
+ /// Add a derivation.
+ pub fn add_derivation(
+ &mut self,
+ package: P,
+ cause: IncompId<P, VS>,
+ store: &Arena<Incompatibility<P, VS>>,
+ ) {
+ use indexmap::map::Entry;
+ let mut dated_derivation = DatedDerivation {
+ global_index: self.next_global_index,
+ decision_level: self.current_decision_level,
+ cause,
+ accumulated_intersection: store[cause].get(&package).unwrap().negate(),
+ };
+ self.next_global_index += 1;
+ let pa_last_index = self.package_assignments.len().saturating_sub(1);
+ match self.package_assignments.entry(package) {
+ Entry::Occupied(mut occupied) => {
+ let idx = occupied.index();
+ let pa = occupied.get_mut();
+ pa.highest_decision_level = self.current_decision_level;
+ match &mut pa.assignments_intersection {
+ // Check that add_derivation is never called in the wrong context.
+ AssignmentsIntersection::Decision(_) => {
+ panic!("add_derivation should not be called after a decision")
+ }
+ AssignmentsIntersection::Derivations(t) => {
+ *t = t.intersection(&dated_derivation.accumulated_intersection);
+ dated_derivation.accumulated_intersection = t.clone();
+ if t.is_positive() {
+ // we can use `swap_indices` to make `changed_this_decision_level` only go down by 1
+ // but the copying is slower then the larger search
+ self.changed_this_decision_level =
+ std::cmp::min(self.changed_this_decision_level, idx);
+ }
+ }
+ }
+ pa.dated_derivations.push(dated_derivation);
+ }
+ Entry::Vacant(v) => {
+ let term = dated_derivation.accumulated_intersection.clone();
+ if term.is_positive() {
+ self.changed_this_decision_level =
+ std::cmp::min(self.changed_this_decision_level, pa_last_index);
+ }
+ v.insert(PackageAssignments {
+ smallest_decision_level: self.current_decision_level,
+ highest_decision_level: self.current_decision_level,
+ dated_derivations: SmallVec::One([dated_derivation]),
+ assignments_intersection: AssignmentsIntersection::Derivations(term),
+ });
+ }
+ }
+ }
+
+ pub fn pick_highest_priority_pkg(
+ &mut self,
+ prioritizer: impl Fn(&P, &VS) -> Priority,
+ ) -> Option<P> {
+ let check_all = self.changed_this_decision_level
+ == self.current_decision_level.0.saturating_sub(1) as usize;
+ let current_decision_level = self.current_decision_level;
+ let prioritized_potential_packages = &mut self.prioritized_potential_packages;
+ self.package_assignments
+ .get_range(self.changed_this_decision_level..)
+ .unwrap()
+ .iter()
+ .filter(|(_, pa)| {
+ // We only actually need to update the package if its Been changed
+ // since the last time we called prioritize.
+ // Which means it's highest decision level is the current decision level,
+ // or if we backtracked in the mean time.
+ check_all || pa.highest_decision_level == current_decision_level
+ })
+ .filter_map(|(p, pa)| pa.assignments_intersection.potential_package_filter(p))
+ .for_each(|(p, r)| {
+ let priority = prioritizer(p, r);
+ prioritized_potential_packages.push(p.clone(), priority);
+ });
+ self.changed_this_decision_level = self.package_assignments.len();
+ prioritized_potential_packages.pop().map(|(p, _)| p)
+ }
+
+ /// If a partial solution has, for every positive derivation,
+ /// a corresponding decision that satisfies that assignment,
+ /// it's a total solution and version solving has succeeded.
+ pub fn extract_solution(&self) -> SelectedDependencies<P, VS::V> {
+ self.package_assignments
+ .iter()
+ .take(self.current_decision_level.0 as usize)
+ .map(|(p, pa)| match &pa.assignments_intersection {
+ AssignmentsIntersection::Decision((_, v, _)) => (p.clone(), v.clone()),
+ AssignmentsIntersection::Derivations(_) => {
+ panic!("Derivations in the Decision part")
+ }
+ })
+ .collect()
+ }
+
+ /// Backtrack the partial solution to a given decision level.
+ pub fn backtrack(&mut self, decision_level: DecisionLevel) {
+ self.current_decision_level = decision_level;
+ self.package_assignments.retain(|_p, pa| {
+ if pa.smallest_decision_level > decision_level {
+ // Remove all entries that have a smallest decision level higher than the backtrack target.
+ false
+ } else if pa.highest_decision_level <= decision_level {
+ // Do not change entries older than the backtrack decision level target.
+ true
+ } else {
+ // smallest_decision_level <= decision_level < highest_decision_level
+ //
+ // Since decision_level < highest_decision_level,
+ // We can be certain that there will be no decision in this package assignments
+ // after backtracking, because such decision would have been the last
+ // assignment and it would have the "highest_decision_level".
+
+ // Truncate the history.
+ while pa.dated_derivations.last().map(|dd| dd.decision_level) > Some(decision_level)
+ {
+ pa.dated_derivations.pop();
+ }
+ debug_assert!(!pa.dated_derivations.is_empty());
+
+ let last = pa.dated_derivations.last().unwrap();
+
+ // Update highest_decision_level.
+ pa.highest_decision_level = last.decision_level;
+
+ // Reset the assignments intersection.
+ pa.assignments_intersection =
+ AssignmentsIntersection::Derivations(last.accumulated_intersection.clone());
+ true
+ }
+ });
+ // Throw away all stored priority levels, And mark that they all need to be recomputed.
+ self.prioritized_potential_packages.clear();
+ self.changed_this_decision_level = self.current_decision_level.0.saturating_sub(1) as usize;
+ }
+
+ /// We can add the version to the partial solution as a decision
+ /// if it doesn't produce any conflict with the new incompatibilities.
+ /// In practice I think it can only produce a conflict if one of the dependencies
+ /// (which are used to make the new incompatibilities)
+ /// is already in the partial solution with an incompatible version.
+ pub fn add_version(
+ &mut self,
+ package: P,
+ version: VS::V,
+ new_incompatibilities: std::ops::Range<IncompId<P, VS>>,
+ store: &Arena<Incompatibility<P, VS>>,
+ ) {
+ let exact = Term::exact(version.clone());
+ let not_satisfied = |incompat: &Incompatibility<P, VS>| {
+ incompat.relation(|p| {
+ if p == &package {
+ Some(&exact)
+ } else {
+ self.term_intersection_for_package(p)
+ }
+ }) != Relation::Satisfied
+ };
+
+ // Check none of the dependencies (new_incompatibilities)
+ // would create a conflict (be satisfied).
+ if store[new_incompatibilities].iter().all(not_satisfied) {
+ log::info!("add_decision: {} @ {}", package, version);
+ self.add_decision(package, version);
+ } else {
+ log::info!(
+ "not adding {} @ {} because of its dependencies",
+ package,
+ version
+ );
+ }
+ }
+
+ /// Check if the terms in the partial solution satisfy the incompatibility.
+ pub fn relation(&self, incompat: &Incompatibility<P, VS>) -> Relation<P> {
+ incompat.relation(|package| self.term_intersection_for_package(package))
+ }
+
+ /// Retrieve intersection of terms related to package.
+ pub fn term_intersection_for_package(&self, package: &P) -> Option<&Term<VS>> {
+ self.package_assignments
+ .get(package)
+ .map(|pa| pa.assignments_intersection.term())
+ }
+
+ /// Figure out if the satisfier and previous satisfier are of different decision levels.
+ pub fn satisfier_search<'i>(
+ &self,
+ incompat: &'i Incompatibility<P, VS>,
+ store: &Arena<Incompatibility<P, VS>>,
+ ) -> (&'i P, SatisfierSearch<P, VS>) {
+ let satisfied_map = Self::find_satisfier(incompat, &self.package_assignments);
+ let (&satisfier_package, &(satisfier_cause, _, satisfier_decision_level)) = satisfied_map
+ .iter()
+ .max_by_key(|(_p, (_, global_index, _))| global_index)
+ .unwrap();
+ let previous_satisfier_level = Self::find_previous_satisfier(
+ incompat,
+ satisfier_package,
+ satisfied_map,
+ &self.package_assignments,
+ store,
+ );
+ let search_result = if previous_satisfier_level >= satisfier_decision_level {
+ SatisfierSearch::SameDecisionLevels {
+ satisfier_cause: satisfier_cause.unwrap(),
+ }
+ } else {
+ SatisfierSearch::DifferentDecisionLevels {
+ previous_satisfier_level,
+ }
+ };
+ (satisfier_package, search_result)
+ }
+
+ /// A satisfier is the earliest assignment in partial solution such that the incompatibility
+ /// is satisfied by the partial solution up to and including that assignment.
+ ///
+ /// Returns a map indicating for each package term, when that was first satisfied in history.
+ /// If we effectively found a satisfier, the returned map must be the same size that incompat.
+ ///
+ /// Question: This is possible since we added a "global_index" to every dated_derivation.
+ /// It would be nice if we could get rid of it, but I don't know if then it will be possible
+ /// to return a coherent previous_satisfier_level.
+ fn find_satisfier<'i>(
+ incompat: &'i Incompatibility<P, VS>,
+ package_assignments: &FnvIndexMap<P, PackageAssignments<P, VS>>,
+ ) -> SatisfiedMap<'i, P, VS> {
+ let mut satisfied = SmallMap::Empty;
+ for (package, incompat_term) in incompat.iter() {
+ let pa = package_assignments.get(package).expect("Must exist");
+ satisfied.insert(package, pa.satisfier(package, &incompat_term.negate()));
+ }
+ satisfied
+ }
+
+ /// Earliest assignment in the partial solution before satisfier
+ /// such that incompatibility is satisfied by the partial solution up to
+ /// and including that assignment plus satisfier.
+ fn find_previous_satisfier<'i>(
+ incompat: &Incompatibility<P, VS>,
+ satisfier_package: &'i P,
+ mut satisfied_map: SatisfiedMap<'i, P, VS>,
+ package_assignments: &FnvIndexMap<P, PackageAssignments<P, VS>>,
+ store: &Arena<Incompatibility<P, VS>>,
+ ) -> DecisionLevel {
+ // First, let's retrieve the previous derivations and the initial accum_term.
+ let satisfier_pa = package_assignments.get(satisfier_package).unwrap();
+ let (satisfier_cause, _gidx, _dl) = satisfied_map.get(&satisfier_package).unwrap();
+
+ let accum_term = if let &Some(cause) = satisfier_cause {
+ store[cause].get(satisfier_package).unwrap().negate()
+ } else {
+ match &satisfier_pa.assignments_intersection {
+ AssignmentsIntersection::Derivations(_) => panic!("must be a decision"),
+ AssignmentsIntersection::Decision((_, _, term)) => term.clone(),
+ }
+ };
+
+ let incompat_term = incompat
+ .get(satisfier_package)
+ .expect("satisfier package not in incompat");
+
+ satisfied_map.insert(
+ satisfier_package,
+ satisfier_pa.satisfier(
+ satisfier_package,
+ &accum_term.intersection(&incompat_term.negate()),
+ ),
+ );
+
+ // Finally, let's identify the decision level of that previous satisfier.
+ let (_, &(_, _, decision_level)) = satisfied_map
+ .iter()
+ .max_by_key(|(_p, (_, global_index, _))| global_index)
+ .unwrap();
+ decision_level.max(DecisionLevel(1))
+ }
+
+ pub fn current_decision_level(&self) -> DecisionLevel {
+ self.current_decision_level
+ }
+}
+
+impl<P: Package, VS: VersionSet> PackageAssignments<P, VS> {
+ fn satisfier(
+ &self,
+ package: &P,
+ start_term: &Term<VS>,
+ ) -> (Option<IncompId<P, VS>>, u32, DecisionLevel) {
+ let empty = Term::empty();
+ // Indicate if we found a satisfier in the list of derivations, otherwise it will be the decision.
+ let idx = self
+ .dated_derivations
+ .as_slice()
+ .partition_point(|dd| dd.accumulated_intersection.intersection(start_term) != empty);
+ if let Some(dd) = self.dated_derivations.get(idx) {
+ debug_assert_eq!(dd.accumulated_intersection.intersection(start_term), empty);
+ return (Some(dd.cause), dd.global_index, dd.decision_level);
+ }
+ // If it wasn't found in the derivations,
+ // it must be the decision which is last (if called in the right context).
+ match &self.assignments_intersection {
+ AssignmentsIntersection::Decision((global_index, _, _)) => {
+ (None, *global_index, self.highest_decision_level)
+ }
+ AssignmentsIntersection::Derivations(accumulated_intersection) => {
+ unreachable!(
+ concat!(
+ "while processing package {}: ",
+ "accum_term = {} has overlap with incompat_term = {}, ",
+ "which means the last assignment should have been a decision, ",
+ "but instead it was a derivation. This shouldn't be possible! ",
+ "(Maybe your Version ordering is broken?)"
+ ),
+ package, accumulated_intersection, start_term
+ )
+ }
+ }
+ }
+}
+
+impl<VS: VersionSet> AssignmentsIntersection<VS> {
+ /// Returns the term intersection of all assignments (decision included).
+ fn term(&self) -> &Term<VS> {
+ match self {
+ Self::Decision((_, _, term)) => term,
+ Self::Derivations(term) => term,
+ }
+ }
+
+ /// A package is a potential pick if there isn't an already
+ /// selected version (no "decision")
+ /// and if it contains at least one positive derivation term
+ /// in the partial solution.
+ fn potential_package_filter<'a, P: Package>(
+ &'a self,
+ package: &'a P,
+ ) -> Option<(&'a P, &'a VS)> {
+ match self {
+ Self::Decision(_) => None,
+ Self::Derivations(term_intersection) => {
+ if term_intersection.is_positive() {
+ Some((package, term_intersection.unwrap_positive()))
+ } else {
+ None
+ }
+ }
+ }
+ }
+}
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99 +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121 +122 +123 +124 +125 +126 +127 +128 +129 +130 +131 +132 +133 +134 +135 +136 +137 +138 +139 +140 +141 +142 +143 +144 +145 +146 +147 +148 +149 +150 +151 +152 +153 +154 +155 +156 +157 +158 +159 +160 +161 +162 +163 +164 +165 +166 +167 +168 +169 +170 +171 +172 +173 +174 +175 +176 +177 +178 +179 +180 +181 +182 +183 +184 +185 +186 +187 +188 +189 +190 +191 +192 +193 +194 +195 +196 +197 +
use crate::type_aliases::Map;
+use std::hash::Hash;
+
+#[derive(Debug, Clone)]
+pub enum SmallMap<K, V> {
+ Empty,
+ One([(K, V); 1]),
+ Two([(K, V); 2]),
+ Flexible(Map<K, V>),
+}
+
+impl<K: PartialEq + Eq + Hash, V> SmallMap<K, V> {
+ pub fn get(&self, key: &K) -> Option<&V> {
+ match self {
+ Self::Empty => None,
+ Self::One([(k, v)]) if k == key => Some(v),
+ Self::One(_) => None,
+ Self::Two([(k1, v1), _]) if key == k1 => Some(v1),
+ Self::Two([_, (k2, v2)]) if key == k2 => Some(v2),
+ Self::Two(_) => None,
+ Self::Flexible(data) => data.get(key),
+ }
+ }
+
+ pub fn get_mut(&mut self, key: &K) -> Option<&mut V> {
+ match self {
+ Self::Empty => None,
+ Self::One([(k, v)]) if k == key => Some(v),
+ Self::One(_) => None,
+ Self::Two([(k1, v1), _]) if key == k1 => Some(v1),
+ Self::Two([_, (k2, v2)]) if key == k2 => Some(v2),
+ Self::Two(_) => None,
+ Self::Flexible(data) => data.get_mut(key),
+ }
+ }
+
+ pub fn remove(&mut self, key: &K) -> Option<V> {
+ let out;
+ *self = match std::mem::take(self) {
+ Self::Empty => {
+ out = None;
+ Self::Empty
+ }
+ Self::One([(k, v)]) => {
+ if key == &k {
+ out = Some(v);
+ Self::Empty
+ } else {
+ out = None;
+ Self::One([(k, v)])
+ }
+ }
+ Self::Two([(k1, v1), (k2, v2)]) => {
+ if key == &k1 {
+ out = Some(v1);
+ Self::One([(k2, v2)])
+ } else if key == &k2 {
+ out = Some(v2);
+ Self::One([(k1, v1)])
+ } else {
+ out = None;
+ Self::Two([(k1, v1), (k2, v2)])
+ }
+ }
+ Self::Flexible(mut data) => {
+ out = data.remove(key);
+ Self::Flexible(data)
+ }
+ };
+ out
+ }
+
+ pub fn insert(&mut self, key: K, value: V) {
+ *self = match std::mem::take(self) {
+ Self::Empty => Self::One([(key, value)]),
+ Self::One([(k, v)]) => {
+ if key == k {
+ Self::One([(k, value)])
+ } else {
+ Self::Two([(k, v), (key, value)])
+ }
+ }
+ Self::Two([(k1, v1), (k2, v2)]) => {
+ if key == k1 {
+ Self::Two([(k1, value), (k2, v2)])
+ } else if key == k2 {
+ Self::Two([(k1, v1), (k2, value)])
+ } else {
+ let mut data: Map<K, V> = Map::with_capacity_and_hasher(3, Default::default());
+ data.insert(key, value);
+ data.insert(k1, v1);
+ data.insert(k2, v2);
+ Self::Flexible(data)
+ }
+ }
+ Self::Flexible(mut data) => {
+ data.insert(key, value);
+ Self::Flexible(data)
+ }
+ };
+ }
+}
+
+impl<K: Clone + PartialEq + Eq + Hash, V: Clone> SmallMap<K, V> {
+ /// Merge two hash maps.
+ ///
+ /// When a key is common to both,
+ /// apply the provided function to both values.
+ /// If the result is None, remove that key from the merged map,
+ /// otherwise add the content of the Some(_).
+ pub fn merge<'a>(
+ &'a mut self,
+ map_2: impl Iterator<Item = (&'a K, &'a V)>,
+ f: impl Fn(&V, &V) -> Option<V>,
+ ) {
+ for (key, val_2) in map_2 {
+ match self.get_mut(key) {
+ None => {
+ self.insert(key.clone(), val_2.clone());
+ }
+ Some(val_1) => match f(val_1, val_2) {
+ None => {
+ self.remove(key);
+ }
+ Some(merged_value) => *val_1 = merged_value,
+ },
+ }
+ }
+ }
+}
+
+impl<K, V> Default for SmallMap<K, V> {
+ fn default() -> Self {
+ Self::Empty
+ }
+}
+
+impl<K, V> SmallMap<K, V> {
+ pub fn len(&self) -> usize {
+ match self {
+ Self::Empty => 0,
+ Self::One(_) => 1,
+ Self::Two(_) => 2,
+ Self::Flexible(data) => data.len(),
+ }
+ }
+}
+
+impl<K: Eq + Hash + Clone, V: Clone> SmallMap<K, V> {
+ pub fn as_map(&self) -> Map<K, V> {
+ match self {
+ Self::Empty => Map::default(),
+ Self::One([(k, v)]) => {
+ let mut map = Map::with_capacity_and_hasher(1, Default::default());
+ map.insert(k.clone(), v.clone());
+ map
+ }
+ Self::Two(data) => {
+ let mut map = Map::with_capacity_and_hasher(2, Default::default());
+ for (k, v) in data {
+ map.insert(k.clone(), v.clone());
+ }
+ map
+ }
+ Self::Flexible(data) => data.clone(),
+ }
+ }
+}
+
+enum IterSmallMap<'a, K, V> {
+ Inline(std::slice::Iter<'a, (K, V)>),
+ Map(std::collections::hash_map::Iter<'a, K, V>),
+}
+
+impl<'a, K: 'a, V: 'a> Iterator for IterSmallMap<'a, K, V> {
+ type Item = (&'a K, &'a V);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match self {
+ // False-positive, remove when stable is >=1.76 February 24
+ #[allow(clippy::map_identity)]
+ IterSmallMap::Inline(inner) => inner.next().map(|(k, v)| (k, v)),
+ IterSmallMap::Map(inner) => inner.next(),
+ }
+ }
+}
+
+impl<K, V> SmallMap<K, V> {
+ pub fn iter(&self) -> impl Iterator<Item = (&K, &V)> {
+ match self {
+ Self::Empty => IterSmallMap::Inline([].iter()),
+ Self::One(data) => IterSmallMap::Inline(data.iter()),
+ Self::Two(data) => IterSmallMap::Inline(data.iter()),
+ Self::Flexible(data) => IterSmallMap::Map(data.iter()),
+ }
+ }
+}
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99 +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121 +122 +123 +124 +125 +126 +127 +128 +129 +130 +131 +132 +133 +134 +135 +136 +137 +138 +139 +140 +141 +142 +143 +144 +145 +146 +147 +148 +149 +150 +151 +152 +153 +154 +155 +156 +157 +158 +159 +160 +161 +162 +163 +164 +165 +166 +167 +168 +169 +170 +171 +172 +173 +174 +175 +176 +177 +178 +179 +180 +181 +182 +183 +184 +185 +186 +187 +188 +189 +190 +191 +192 +193 +194 +195 +196 +197 +198 +199 +200 +201 +202 +203 +204 +205 +206 +207 +208 +209 +210 +211 +212 +213 +214 +215 +216 +217 +218 +219 +220 +221 +222 +223 +224 +225 +226 +227 +228 +229 +230 +231 +
use std::fmt;
+use std::hash::{Hash, Hasher};
+use std::ops::Deref;
+
+#[derive(Clone)]
+pub enum SmallVec<T> {
+ Empty,
+ One([T; 1]),
+ Two([T; 2]),
+ Flexible(Vec<T>),
+}
+
+impl<T> SmallVec<T> {
+ pub fn empty() -> Self {
+ Self::Empty
+ }
+
+ pub fn one(t: T) -> Self {
+ Self::One([t])
+ }
+
+ pub fn as_slice(&self) -> &[T] {
+ match self {
+ Self::Empty => &[],
+ Self::One(v) => v,
+ Self::Two(v) => v,
+ Self::Flexible(v) => v,
+ }
+ }
+
+ pub fn as_mut_slice(&mut self) -> &mut [T] {
+ match self {
+ Self::Empty => &mut [],
+ Self::One(v) => v,
+ Self::Two(v) => v,
+ Self::Flexible(v) => v,
+ }
+ }
+
+ pub fn push(&mut self, new: T) {
+ *self = match std::mem::take(self) {
+ Self::Empty => Self::One([new]),
+ Self::One([v1]) => Self::Two([v1, new]),
+ Self::Two([v1, v2]) => Self::Flexible(vec![v1, v2, new]),
+ Self::Flexible(mut v) => {
+ v.push(new);
+ Self::Flexible(v)
+ }
+ }
+ }
+
+ pub fn pop(&mut self) -> Option<T> {
+ match std::mem::take(self) {
+ Self::Empty => None,
+ Self::One([v1]) => {
+ *self = Self::Empty;
+ Some(v1)
+ }
+ Self::Two([v1, v2]) => {
+ *self = Self::One([v1]);
+ Some(v2)
+ }
+ Self::Flexible(mut v) => {
+ let out = v.pop();
+ *self = Self::Flexible(v);
+ out
+ }
+ }
+ }
+
+ pub fn clear(&mut self) {
+ if let Self::Flexible(mut v) = std::mem::take(self) {
+ v.clear();
+ *self = Self::Flexible(v);
+ } // else: self already eq Empty from the take
+ }
+
+ pub fn iter(&self) -> std::slice::Iter<'_, T> {
+ self.as_slice().iter()
+ }
+}
+
+impl<T> Default for SmallVec<T> {
+ fn default() -> Self {
+ Self::Empty
+ }
+}
+
+impl<T> Deref for SmallVec<T> {
+ type Target = [T];
+
+ fn deref(&self) -> &Self::Target {
+ self.as_slice()
+ }
+}
+
+impl<'a, T> IntoIterator for &'a SmallVec<T> {
+ type Item = &'a T;
+
+ type IntoIter = std::slice::Iter<'a, T>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.iter()
+ }
+}
+
+impl<T: Eq> Eq for SmallVec<T> {}
+
+impl<T: PartialEq> PartialEq for SmallVec<T> {
+ fn eq(&self, other: &Self) -> bool {
+ self.as_slice() == other.as_slice()
+ }
+}
+
+impl<T: fmt::Debug> fmt::Debug for SmallVec<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.as_slice().fmt(f)
+ }
+}
+
+impl<T: Hash> Hash for SmallVec<T> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.len().hash(state);
+ Hash::hash_slice(self.as_slice(), state);
+ }
+}
+
+#[cfg(feature = "serde")]
+impl<T: serde::Serialize> serde::Serialize for SmallVec<T> {
+ fn serialize<S: serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
+ serde::Serialize::serialize(self.as_slice(), s)
+ }
+}
+
+#[cfg(feature = "serde")]
+impl<'de, T: serde::Deserialize<'de>> serde::Deserialize<'de> for SmallVec<T> {
+ fn deserialize<D: serde::Deserializer<'de>>(d: D) -> Result<Self, D::Error> {
+ struct SmallVecVisitor<T> {
+ marker: std::marker::PhantomData<T>,
+ }
+
+ impl<'de, T> serde::de::Visitor<'de> for SmallVecVisitor<T>
+ where
+ T: serde::Deserialize<'de>,
+ {
+ type Value = SmallVec<T>;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("a sequence")
+ }
+
+ fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
+ where
+ A: serde::de::SeqAccess<'de>,
+ {
+ let mut values = SmallVec::empty();
+ while let Some(value) = seq.next_element()? {
+ values.push(value);
+ }
+ Ok(values)
+ }
+ }
+
+ let visitor = SmallVecVisitor {
+ marker: Default::default(),
+ };
+ d.deserialize_seq(visitor)
+ }
+}
+
+impl<T> IntoIterator for SmallVec<T> {
+ type Item = T;
+ type IntoIter = SmallVecIntoIter<T>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ match self {
+ SmallVec::Empty => SmallVecIntoIter::Empty,
+ SmallVec::One(a) => SmallVecIntoIter::One(a.into_iter()),
+ SmallVec::Two(a) => SmallVecIntoIter::Two(a.into_iter()),
+ SmallVec::Flexible(v) => SmallVecIntoIter::Flexible(v.into_iter()),
+ }
+ }
+}
+
+pub enum SmallVecIntoIter<T> {
+ Empty,
+ One(<[T; 1] as IntoIterator>::IntoIter),
+ Two(<[T; 2] as IntoIterator>::IntoIter),
+ Flexible(<Vec<T> as IntoIterator>::IntoIter),
+}
+
+impl<T> Iterator for SmallVecIntoIter<T> {
+ type Item = T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match self {
+ SmallVecIntoIter::Empty => None,
+ SmallVecIntoIter::One(it) => it.next(),
+ SmallVecIntoIter::Two(it) => it.next(),
+ SmallVecIntoIter::Flexible(it) => it.next(),
+ }
+ }
+}
+
+// TESTS #######################################################################
+
+#[cfg(test)]
+pub mod tests {
+ use super::*;
+ use proptest::prelude::*;
+
+ proptest! {
+ #[test]
+ fn push_and_pop(commands: Vec<Option<u8>>) {
+ let mut v = vec![];
+ let mut sv = SmallVec::Empty;
+ for command in commands {
+ match command {
+ Some(i) => {
+ v.push(i);
+ sv.push(i);
+ }
+ None => {
+ assert_eq!(v.pop(), sv.pop());
+ }
+ }
+ assert_eq!(v.as_slice(), sv.as_slice());
+ }
+ }
+ }
+}
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99 +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121 +122 +123 +124 +125 +126 +127 +128 +129 +130 +131 +132 +133 +134 +135 +136 +137 +138 +139 +140 +141 +142 +143 +144 +145 +146 +147 +148 +149 +150 +151 +152 +153 +154 +155 +156 +157 +158 +159 +160 +161 +162 +163 +164 +165 +166 +167 +168 +169 +170 +171 +172 +173 +174 +175 +176 +177 +178 +179 +180 +181 +182 +183 +184 +185 +186 +187 +188 +189 +190 +191 +192 +193 +194 +195 +196 +197 +198 +199 +200 +201 +202 +203 +204 +205 +206 +207 +208 +209 +210 +211 +212 +213 +214 +215 +216 +217 +218 +219 +220 +221 +222 +223 +224 +225 +226 +227 +228 +229 +230 +231 +232 +233 +234 +235 +
// SPDX-License-Identifier: MPL-2.0
+
+//! PubGrub version solving algorithm.
+//!
+//! Version solving consists in efficiently finding a set of packages and versions
+//! that satisfy all the constraints of a given project dependencies.
+//! In addition, when that is not possible,
+//! we should try to provide a very human-readable and clear
+//! explanation as to why that failed.
+//!
+//! # Package and Version traits
+//!
+//! All the code in this crate is manipulating packages and versions, and for this to work
+//! we defined a [Package](package::Package) and [Version](version::Version) traits
+//! that are used as bounds on most of the exposed types and functions.
+//!
+//! Package identifiers needs to implement our [Package](package::Package) trait,
+//! which is automatic if the type already implements
+//! [Clone] + [Eq] + [Hash] + [Debug] + [Display](std::fmt::Display).
+//! So things like [String] will work out of the box.
+//!
+//! Our [Version](version::Version) trait requires
+//! [Clone] + [Ord] + [Debug] + [Display](std::fmt::Display)
+//! and also the definition of two methods,
+//! [lowest() -> Self](version::Version::lowest) which returns the lowest version existing,
+//! and [bump(&self) -> Self](version::Version::bump) which returns the next smallest version
+//! strictly higher than the current one.
+//! For convenience, this library already provides
+//! two implementations of [Version](version::Version).
+//! The first one is [NumberVersion](version::NumberVersion), basically a newtype for [u32].
+//! The second one is [SemanticVersion](version::NumberVersion)
+//! that implements semantic versioning rules.
+//!
+//! # Basic example
+//!
+//! Let's imagine that we are building a user interface
+//! with a menu containing dropdowns with some icons,
+//! icons that we are also directly using in other parts of the interface.
+//! For this scenario our direct dependencies are `menu` and `icons`,
+//! but the complete set of dependencies looks like follows:
+//!
+//! - `root` depends on `menu` and `icons`
+//! - `menu` depends on `dropdown`
+//! - `dropdown` depends on `icons`
+//! - `icons` has no dependency
+//!
+//! We can model that scenario with this library as follows
+//! ```
+//! # use pubgrub::solver::{OfflineDependencyProvider, resolve};
+//! # use pubgrub::version::NumberVersion;
+//! # use pubgrub::range::Range;
+//!
+//! type NumVS = Range<NumberVersion>;
+//!
+//! let mut dependency_provider = OfflineDependencyProvider::<&str, NumVS>::new();
+//!
+//! dependency_provider.add_dependencies(
+//! "root", 1, [("menu", Range::full()), ("icons", Range::full())],
+//! );
+//! dependency_provider.add_dependencies("menu", 1, [("dropdown", Range::full())]);
+//! dependency_provider.add_dependencies("dropdown", 1, [("icons", Range::full())]);
+//! dependency_provider.add_dependencies("icons", 1, []);
+//!
+//! // Run the algorithm.
+//! let solution = resolve(&dependency_provider, "root", 1).unwrap();
+//! ```
+//!
+//! # DependencyProvider trait
+//!
+//! In our previous example we used the
+//! [OfflineDependencyProvider](solver::OfflineDependencyProvider),
+//! which is a basic implementation of the [DependencyProvider](solver::DependencyProvider) trait.
+//!
+//! But we might want to implement the [DependencyProvider](solver::DependencyProvider)
+//! trait for our own type.
+//! Let's say that we will use [String] for packages,
+//! and [SemanticVersion](version::SemanticVersion) for versions.
+//! This may be done quite easily by implementing the three following functions.
+//! ```
+//! # use pubgrub::solver::{DependencyProvider, Dependencies};
+//! # use pubgrub::version::SemanticVersion;
+//! # use pubgrub::range::Range;
+//! # use pubgrub::type_aliases::Map;
+//! # use std::error::Error;
+//! # use std::borrow::Borrow;
+//! # use std::convert::Infallible;
+//! #
+//! # struct MyDependencyProvider;
+//! #
+//! type SemVS = Range<SemanticVersion>;
+//!
+//! impl DependencyProvider<String, SemVS> for MyDependencyProvider {
+//! fn choose_version(&self, package: &String, range: &SemVS) -> Result<Option<SemanticVersion>, Infallible> {
+//! unimplemented!()
+//! }
+//!
+//! type Priority = usize;
+//! fn prioritize(&self, package: &String, range: &SemVS) -> Self::Priority {
+//! unimplemented!()
+//! }
+//!
+//! fn get_dependencies(
+//! &self,
+//! package: &String,
+//! version: &SemanticVersion,
+//! ) -> Result<Dependencies<String, SemVS>, Infallible> {
+//! unimplemented!()
+//! }
+//!
+//! type Err = Infallible;
+//! }
+//! ```
+//!
+//! The first method
+//! [choose_version](crate::solver::DependencyProvider::choose_version)
+//! chooses a version compatible with the provided range for a package.
+//! The second method
+//! [prioritize](crate::solver::DependencyProvider::prioritize)
+//! in which order different packages should be chosen.
+//! Usually prioritizing packages
+//! with the fewest number of compatible versions speeds up resolution.
+//! But in general you are free to employ whatever strategy suits you best
+//! to pick a package and a version.
+//!
+//! The third method [get_dependencies](crate::solver::DependencyProvider::get_dependencies)
+//! aims at retrieving the dependencies of a given package at a given version.
+//! Returns [None] if dependencies are unknown.
+//!
+//! In a real scenario, these two methods may involve reading the file system
+//! or doing network request, so you may want to hold a cache in your
+//! [DependencyProvider](solver::DependencyProvider) implementation.
+//! How exactly this could be achieved is shown in `CachingDependencyProvider`
+//! (see `examples/caching_dependency_provider.rs`).
+//! You could also use the [OfflineDependencyProvider](solver::OfflineDependencyProvider)
+//! type defined by the crate as guidance,
+//! but you are free to use whatever approach makes sense in your situation.
+//!
+//! # Solution and error reporting
+//!
+//! When everything goes well, the algorithm finds and returns the complete
+//! set of direct and indirect dependencies satisfying all the constraints.
+//! The packages and versions selected are returned as
+//! [SelectedDepedencies<P, V>](type_aliases::SelectedDependencies).
+//! But sometimes there is no solution because dependencies are incompatible.
+//! In such cases, [resolve(...)](solver::resolve) returns a
+//! [PubGrubError::NoSolution(derivation_tree)](error::PubGrubError::NoSolution),
+//! where the provided derivation tree is a custom binary tree
+//! containing the full chain of reasons why there is no solution.
+//!
+//! All the items in the tree are called incompatibilities
+//! and may be of two types, either "external" or "derived".
+//! Leaves of the tree are external incompatibilities,
+//! and nodes are derived.
+//! External incompatibilities have reasons that are independent
+//! of the way this algorithm is implemented such as
+//! - dependencies: "package_a" at version 1 depends on "package_b" at version 4
+//! - missing dependencies: dependencies of "package_a" are unknown
+//! - absence of version: there is no version of "package_a" in the range [3.1.0 4.0.0[
+//!
+//! Derived incompatibilities are obtained during the algorithm execution by deduction,
+//! such as if "a" depends on "b" and "b" depends on "c", "a" depends on "c".
+//!
+//! This crate defines a [Reporter](crate::report::Reporter) trait, with an associated
+//! [Output](crate::report::Reporter::Output) type and a single method.
+//! ```
+//! # use pubgrub::package::Package;
+//! # use pubgrub::version_set::VersionSet;
+//! # use pubgrub::report::DerivationTree;
+//! #
+//! pub trait Reporter<P: Package, VS: VersionSet> {
+//! type Output;
+//!
+//! fn report(derivation_tree: &DerivationTree<P, VS>) -> Self::Output;
+//! }
+//! ```
+//! Implementing a [Reporter](crate::report::Reporter) may involve a lot of heuristics
+//! to make the output human-readable and natural.
+//! For convenience, we provide a default implementation
+//! [DefaultStringReporter](crate::report::DefaultStringReporter)
+//! that outputs the report as a [String].
+//! You may use it as follows:
+//! ```
+//! # use pubgrub::solver::{resolve, OfflineDependencyProvider};
+//! # use pubgrub::report::{DefaultStringReporter, Reporter};
+//! # use pubgrub::error::PubGrubError;
+//! # use pubgrub::version::NumberVersion;
+//! # use pubgrub::range::Range;
+//! #
+//! # type NumVS = Range<NumberVersion>;
+//! #
+//! # let dependency_provider = OfflineDependencyProvider::<&str, NumVS>::new();
+//! # let root_package = "root";
+//! # let root_version = 1;
+//! #
+//! match resolve(&dependency_provider, root_package, root_version) {
+//! Ok(solution) => println!("{:?}", solution),
+//! Err(PubGrubError::NoSolution(mut derivation_tree)) => {
+//! derivation_tree.collapse_no_versions();
+//! eprintln!("{}", DefaultStringReporter::report(&derivation_tree));
+//! }
+//! Err(err) => panic!("{:?}", err),
+//! };
+//! ```
+//! Notice that we also used
+//! [collapse_no_versions()](crate::report::DerivationTree::collapse_no_versions) above.
+//! This method simplifies the derivation tree to get rid of the
+//! [NoVersions](crate::report::External::NoVersions)
+//! external incompatibilities in the derivation tree.
+//! So instead of seeing things like this in the report:
+//! ```txt
+//! Because there is no version of foo in 1.0.1 <= v < 2.0.0
+//! and foo 1.0.0 depends on bar 2.0.0 <= v < 3.0.0,
+//! foo 1.0.0 <= v < 2.0.0 depends on bar 2.0.0 <= v < 3.0.0.
+//! ```
+//! you may have directly:
+//! ```txt
+//! foo 1.0.0 <= v < 2.0.0 depends on bar 2.0.0 <= v < 3.0.0.
+//! ```
+//! Beware though that if you are using some kind of offline mode
+//! with a cache, you may want to know that some versions
+//! do not exist in your cache.
+
+#![warn(missing_docs)]
+
+pub mod error;
+pub mod package;
+pub mod range;
+pub mod report;
+pub mod solver;
+pub mod term;
+pub mod type_aliases;
+pub mod version;
+pub mod version_set;
+
+mod internal;
+
// SPDX-License-Identifier: MPL-2.0
+
+//! Trait for identifying packages.
+//! Automatically implemented for traits implementing
+//! [Clone] + [Eq] + [Hash] + [Debug] + [Display].
+
+use std::fmt::{Debug, Display};
+use std::hash::Hash;
+
+/// Trait for identifying packages.
+/// Automatically implemented for types already implementing
+/// [Clone] + [Eq] + [Hash] + [Debug] + [Display].
+pub trait Package: Clone + Eq + Hash + Debug + Display {}
+
+/// Automatically implement the Package trait for any type
+/// that already implement [Clone] + [Eq] + [Hash] + [Debug] + [Display].
+impl<T: Clone + Eq + Hash + Debug + Display> Package for T {}
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99 +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121 +122 +123 +124 +125 +126 +127 +128 +129 +130 +131 +132 +133 +134 +135 +136 +137 +138 +139 +140 +141 +142 +143 +144 +145 +146 +147 +148 +149 +150 +151 +152 +153 +154 +155 +156 +157 +158 +159 +160 +161 +162 +163 +164 +165 +166 +167 +168 +169 +170 +171 +172 +173 +174 +175 +176 +177 +178 +179 +180 +181 +182 +183 +184 +185 +186 +187 +188 +189 +190 +191 +192 +193 +194 +195 +196 +197 +198 +199 +200 +201 +202 +203 +204 +205 +206 +207 +208 +209 +210 +211 +212 +213 +214 +215 +216 +217 +218 +219 +220 +221 +222 +223 +224 +225 +226 +227 +228 +229 +230 +231 +232 +233 +234 +235 +236 +237 +238 +239 +240 +241 +242 +243 +244 +245 +246 +247 +248 +249 +250 +251 +252 +253 +254 +255 +256 +257 +258 +259 +260 +261 +262 +263 +264 +265 +266 +267 +268 +269 +270 +271 +272 +273 +274 +275 +276 +277 +278 +279 +280 +281 +282 +283 +284 +285 +286 +287 +288 +289 +290 +291 +292 +293 +294 +295 +296 +297 +298 +299 +300 +301 +302 +303 +304 +305 +306 +307 +308 +309 +310 +311 +312 +313 +314 +315 +316 +317 +318 +319 +320 +321 +322 +323 +324 +325 +326 +327 +328 +329 +330 +331 +332 +333 +334 +335 +336 +337 +338 +339 +340 +341 +342 +343 +344 +345 +346 +347 +348 +349 +350 +351 +352 +353 +354 +355 +356 +357 +358 +359 +360 +361 +362 +363 +364 +365 +366 +367 +368 +369 +370 +371 +372 +373 +374 +375 +376 +377 +378 +379 +380 +381 +382 +383 +384 +385 +386 +387 +388 +389 +390 +391 +392 +393 +394 +395 +396 +397 +398 +399 +400 +401 +402 +403 +404 +405 +406 +407 +408 +409 +410 +411 +412 +413 +414 +415 +416 +417 +418 +419 +420 +421 +422 +423 +424 +425 +426 +427 +428 +429 +430 +431 +432 +433 +434 +435 +436 +437 +438 +439 +440 +441 +442 +443 +444 +445 +446 +447 +448 +449 +450 +451 +452 +453 +454 +455 +456 +457 +458 +459 +460 +461 +462 +463 +464 +465 +466 +467 +468 +469 +470 +471 +472 +473 +474 +475 +476 +477 +478 +479 +480 +481 +482 +483 +484 +485 +486 +487 +488 +489 +490 +491 +492 +493 +494 +495 +496 +497 +498 +499 +500 +501 +502 +503 +504 +505 +506 +507 +508 +509 +510 +511 +512 +513 +514 +515 +516 +517 +518 +519 +520 +521 +522 +523 +524 +525 +526 +527 +528 +529 +530 +531 +532 +533 +534 +535 +536 +537 +538 +539 +540 +541 +542 +543 +544 +545 +546 +547 +548 +549 +550 +551 +552 +553 +554 +555 +556 +557 +558 +559 +560 +561 +562 +563 +564 +565 +566 +567 +568 +569 +570 +571 +572 +573 +574 +575 +576 +577 +578 +579 +580 +581 +582 +583 +584 +585 +586 +587 +588 +589 +590 +591 +592 +593 +594 +595 +596 +597 +598 +599 +600 +601 +602 +603 +604 +605 +606 +607 +608 +609 +610 +611 +612 +613 +614 +615 +616 +617 +618 +619 +620 +621 +622 +623 +624 +625 +626 +627 +628 +629 +630 +631 +632 +633 +634 +635 +636 +637 +638 +639 +640 +641 +642 +643 +644 +645 +646 +647 +648 +649 +650 +651 +652 +653 +654 +655 +656 +657 +658 +659 +660 +661 +662 +663 +664 +665 +666 +667 +668 +669 +670 +671 +672 +673 +674 +675 +676 +677 +678 +679 +680 +681 +682 +683 +684 +685 +686 +687 +688 +689 +690 +691 +692 +693 +694 +695 +696 +697 +698 +699 +700 +701 +702 +703 +704 +705 +706 +707 +708 +709 +710 +711 +712 +713 +714 +715 +716 +717 +718 +719 +720 +721 +722 +723 +724 +725 +726 +727 +728 +729 +730 +731 +732 +733 +734 +735 +736 +737 +738 +739 +740 +741 +742 +743 +744 +745 +746 +747 +748 +749 +750 +751 +752 +753 +754 +755 +756 +757 +758 +759 +760 +761 +762 +763 +764 +765 +766 +767 +768 +769 +770 +771 +772 +
// SPDX-License-Identifier: MPL-2.0
+
+//! Ranges are constraints defining sets of versions.
+//!
+//! Concretely, those constraints correspond to any set of versions
+//! representable as the concatenation, union, and complement
+//! of the ranges building blocks.
+//!
+//! Those building blocks are:
+//! - [empty()](Range::empty): the empty set
+//! - [full()](Range::full): the set of all possible versions
+//! - [singleton(v)](Range::singleton): the set containing only the version v
+//! - [higher_than(v)](Range::higher_than): the set defined by `v <= versions`
+//! - [strictly_higher_than(v)](Range::strictly_higher_than): the set defined by `v < versions`
+//! - [lower_than(v)](Range::lower_than): the set defined by `versions <= v`
+//! - [strictly_lower_than(v)](Range::strictly_lower_than): the set defined by `versions < v`
+//! - [between(v1, v2)](Range::between): the set defined by `v1 <= versions < v2`
+//!
+//! Ranges can be created from any type that implements [`Ord`] + [`Clone`].
+//!
+//! In order to advance the solver front, comparisons of versions sets are necessary in the algorithm.
+//! To do those comparisons between two sets S1 and S2 we use the mathematical property that S1 ⊂ S2 if and only if S1 ∩ S2 == S1.
+//! We can thus compute an intersection and evaluate an equality to answer if S1 is a subset of S2.
+//! But this means that the implementation of equality must be correct semantically.
+//! In practice, if equality is derived automatically, this means sets must have unique representations.
+//!
+//! By migrating from a custom representation for discrete sets in v0.2
+//! to a generic bounded representation for continuous sets in v0.3
+//! we are potentially breaking that assumption in two ways:
+//!
+//! 1. Minimal and maximal `Unbounded` values can be replaced by their equivalent if it exists.
+//! 2. Simplifying adjacent bounds of discrete sets cannot be detected and automated in the generic intersection code.
+//!
+//! An example for each can be given when `T` is `u32`.
+//! First, we can have both segments `S1 = (Unbounded, Included(42u32))` and `S2 = (Included(0), Included(42u32))`
+//! that represent the same segment but are structurally different.
+//! Thus, a derived equality check would answer `false` to `S1 == S2` while it's true.
+//!
+//! Second both segments `S1 = (Included(1), Included(5))` and `S2 = (Included(1), Included(3)) + (Included(4), Included(5))` are equal.
+//! But without asking the user to provide a `bump` function for discrete sets,
+//! the algorithm is not able tell that the space between the right `Included(3)` bound and the left `Included(4)` bound is empty.
+//! Thus the algorithm is not able to reduce S2 to its canonical S1 form while computing sets operations like intersections in the generic code.
+//!
+//! This is likely to lead to user facing theoretically correct but practically nonsensical ranges,
+//! like (Unbounded, Excluded(0)) or (Excluded(6), Excluded(7)).
+//! In general nonsensical inputs often lead to hard to track bugs.
+//! But as far as we can tell this should work in practice.
+//! So for now this crate only provides an implementation for continuous ranges.
+//! With the v0.3 api the user could choose to bring back the discrete implementation from v0.2, as documented in the guide.
+//! If doing so regularly fixes bugs seen by users, we will bring it back into the core library.
+//! If we do not see practical bugs, or we get a formal proof that the code cannot lead to error states, then we may remove this warning.
+
+use crate::{internal::small_vec::SmallVec, version_set::VersionSet};
+use std::cmp::Ordering;
+use std::ops::RangeBounds;
+use std::{
+ fmt::{Debug, Display, Formatter},
+ ops::Bound::{self, Excluded, Included, Unbounded},
+};
+
+/// A Range represents multiple intervals of a continuous range of monotone increasing
+/// values.
+#[derive(Debug, Clone, Eq, PartialEq, Hash)]
+#[cfg_attr(feature = "serde", derive(serde::Serialize))]
+#[cfg_attr(feature = "serde", serde(transparent))]
+pub struct Range<V> {
+ segments: SmallVec<Interval<V>>,
+}
+
+type Interval<V> = (Bound<V>, Bound<V>);
+
+impl<V> Range<V> {
+ /// Empty set of versions.
+ pub fn empty() -> Self {
+ Self {
+ segments: SmallVec::empty(),
+ }
+ }
+
+ /// Set of all possible versions
+ pub fn full() -> Self {
+ Self {
+ segments: SmallVec::one((Unbounded, Unbounded)),
+ }
+ }
+
+ /// Set of all versions higher or equal to some version
+ pub fn higher_than(v: impl Into<V>) -> Self {
+ Self {
+ segments: SmallVec::one((Included(v.into()), Unbounded)),
+ }
+ }
+
+ /// Set of all versions higher to some version
+ pub fn strictly_higher_than(v: impl Into<V>) -> Self {
+ Self {
+ segments: SmallVec::one((Excluded(v.into()), Unbounded)),
+ }
+ }
+
+ /// Set of all versions lower to some version
+ pub fn strictly_lower_than(v: impl Into<V>) -> Self {
+ Self {
+ segments: SmallVec::one((Unbounded, Excluded(v.into()))),
+ }
+ }
+
+ /// Set of all versions lower or equal to some version
+ pub fn lower_than(v: impl Into<V>) -> Self {
+ Self {
+ segments: SmallVec::one((Unbounded, Included(v.into()))),
+ }
+ }
+
+ /// Set of versions greater or equal to `v1` but less than `v2`.
+ pub fn between(v1: impl Into<V>, v2: impl Into<V>) -> Self {
+ Self {
+ segments: SmallVec::one((Included(v1.into()), Excluded(v2.into()))),
+ }
+ }
+}
+
+impl<V: Clone> Range<V> {
+ /// Set containing exactly one version
+ pub fn singleton(v: impl Into<V>) -> Self {
+ let v = v.into();
+ Self {
+ segments: SmallVec::one((Included(v.clone()), Included(v))),
+ }
+ }
+
+ /// Returns the complement of this Range.
+ pub fn complement(&self) -> Self {
+ match self.segments.first() {
+ // Complement of ∅ is ∞
+ None => Self::full(),
+
+ // Complement of ∞ is ∅
+ Some((Unbounded, Unbounded)) => Self::empty(),
+
+ // First high bound is +∞
+ Some((Included(v), Unbounded)) => Self::strictly_lower_than(v.clone()),
+ Some((Excluded(v), Unbounded)) => Self::lower_than(v.clone()),
+
+ Some((Unbounded, Included(v))) => {
+ Self::negate_segments(Excluded(v.clone()), &self.segments[1..])
+ }
+ Some((Unbounded, Excluded(v))) => {
+ Self::negate_segments(Included(v.clone()), &self.segments[1..])
+ }
+ Some((Included(_), Included(_)))
+ | Some((Included(_), Excluded(_)))
+ | Some((Excluded(_), Included(_)))
+ | Some((Excluded(_), Excluded(_))) => Self::negate_segments(Unbounded, &self.segments),
+ }
+ }
+
+ /// Helper function performing the negation of intervals in segments.
+ fn negate_segments(start: Bound<V>, segments: &[Interval<V>]) -> Self {
+ let mut complement_segments: SmallVec<Interval<V>> = SmallVec::empty();
+ let mut start = start;
+ for (v1, v2) in segments {
+ complement_segments.push((
+ start,
+ match v1 {
+ Included(v) => Excluded(v.clone()),
+ Excluded(v) => Included(v.clone()),
+ Unbounded => unreachable!(),
+ },
+ ));
+ start = match v2 {
+ Included(v) => Excluded(v.clone()),
+ Excluded(v) => Included(v.clone()),
+ Unbounded => Unbounded,
+ }
+ }
+ if !matches!(start, Unbounded) {
+ complement_segments.push((start, Unbounded));
+ }
+
+ Self {
+ segments: complement_segments,
+ }
+ }
+}
+
+impl<V: Ord> Range<V> {
+ /// Convert to something that can be used with
+ /// [BTreeMap::range](std::collections::BTreeMap::range).
+ /// All versions contained in self, will be in the output,
+ /// but there may be versions in the output that are not contained in self.
+ /// Returns None if the range is empty.
+ pub fn bounding_range(&self) -> Option<(Bound<&V>, Bound<&V>)> {
+ self.segments.first().map(|(start, _)| {
+ let end = self
+ .segments
+ .last()
+ .expect("if there is a first element, there must be a last element");
+ (start.as_ref(), end.1.as_ref())
+ })
+ }
+
+ /// Returns true if the this Range contains the specified value.
+ pub fn contains(&self, v: &V) -> bool {
+ for segment in self.segments.iter() {
+ match within_bounds(v, segment) {
+ Ordering::Less => return false,
+ Ordering::Equal => return true,
+ Ordering::Greater => (),
+ }
+ }
+ false
+ }
+
+ /// Returns true if the this Range contains the specified values.
+ ///
+ /// The `versions` iterator must be sorted.
+ /// Functionally equivalent to `versions.map(|v| self.contains(v))`.
+ /// Except it runs in `O(size_of_range + len_of_versions)` not `O(size_of_range * len_of_versions)`
+ pub fn contains_many<'s, I>(&'s self, versions: I) -> impl Iterator<Item = bool> + 's
+ where
+ I: Iterator<Item = &'s V> + 's,
+ V: 's,
+ {
+ versions.scan(0, move |i, v| {
+ while let Some(segment) = self.segments.get(*i) {
+ match within_bounds(v, segment) {
+ Ordering::Less => return Some(false),
+ Ordering::Equal => return Some(true),
+ Ordering::Greater => *i += 1,
+ }
+ }
+ Some(false)
+ })
+ }
+
+ /// Construct a simple range from anything that impls [RangeBounds] like `v1..v2`.
+ pub fn from_range_bounds<R, IV>(bounds: R) -> Self
+ where
+ R: RangeBounds<IV>,
+ IV: Clone + Into<V>,
+ {
+ let start = match bounds.start_bound() {
+ Included(v) => Included(v.clone().into()),
+ Excluded(v) => Excluded(v.clone().into()),
+ Unbounded => Unbounded,
+ };
+ let end = match bounds.end_bound() {
+ Included(v) => Included(v.clone().into()),
+ Excluded(v) => Excluded(v.clone().into()),
+ Unbounded => Unbounded,
+ };
+ if valid_segment(&start, &end) {
+ Self {
+ segments: SmallVec::one((start, end)),
+ }
+ } else {
+ Self::empty()
+ }
+ }
+
+ fn check_invariants(self) -> Self {
+ if cfg!(debug_assertions) {
+ for p in self.segments.as_slice().windows(2) {
+ match (&p[0].1, &p[1].0) {
+ (Included(l_end), Included(r_start)) => assert!(l_end < r_start),
+ (Included(l_end), Excluded(r_start)) => assert!(l_end < r_start),
+ (Excluded(l_end), Included(r_start)) => assert!(l_end < r_start),
+ (Excluded(l_end), Excluded(r_start)) => assert!(l_end <= r_start),
+ (_, Unbounded) => panic!(),
+ (Unbounded, _) => panic!(),
+ }
+ }
+ for (s, e) in self.segments.iter() {
+ assert!(valid_segment(s, e));
+ }
+ }
+ self
+ }
+}
+
+fn within_bounds<V: PartialOrd>(v: &V, segment: &Interval<V>) -> Ordering {
+ let below_lower_bound = match segment {
+ (Excluded(start), _) => v <= start,
+ (Included(start), _) => v < start,
+ (Unbounded, _) => false,
+ };
+ if below_lower_bound {
+ return Ordering::Less;
+ }
+ let below_upper_bound = match segment {
+ (_, Unbounded) => true,
+ (_, Included(end)) => v <= end,
+ (_, Excluded(end)) => v < end,
+ };
+ if below_upper_bound {
+ return Ordering::Equal;
+ }
+ Ordering::Greater
+}
+
+fn valid_segment<T: PartialOrd>(start: &Bound<T>, end: &Bound<T>) -> bool {
+ match (start, end) {
+ (Included(s), Included(e)) => s <= e,
+ (Included(s), Excluded(e)) => s < e,
+ (Excluded(s), Included(e)) => s < e,
+ (Excluded(s), Excluded(e)) => s < e,
+ (Unbounded, _) | (_, Unbounded) => true,
+ }
+}
+
+/// Group adjacent versions locations.
+///
+/// ```text
+/// [None, 3, 6, 7, None] -> [(3, 7)]
+/// [3, 6, 7, None] -> [(None, 7)]
+/// [3, 6, 7] -> [(None, None)]
+/// [None, 1, 4, 7, None, None, None, 8, None, 9] -> [(1, 7), (8, 8), (9, None)]
+/// ```
+fn group_adjacent_locations(
+ mut locations: impl Iterator<Item = Option<usize>>,
+) -> impl Iterator<Item = (Option<usize>, Option<usize>)> {
+ // If the first version matched, then the lower bound of that segment is not needed
+ let mut seg = locations.next().flatten().map(|ver| (None, Some(ver)));
+ std::iter::from_fn(move || {
+ for ver in locations.by_ref() {
+ if let Some(ver) = ver {
+ // As long as were still matching versions, we keep merging into the currently matching segment
+ seg = Some((seg.map_or(Some(ver), |(s, _)| s), Some(ver)));
+ } else {
+ // If we have found a version that doesn't match, then right the merge segment and prepare for a new one.
+ if seg.is_some() {
+ return seg.take();
+ }
+ }
+ }
+ // If the last version matched, then write out the merged segment but the upper bound is not needed.
+ seg.take().map(|(s, _)| (s, None))
+ })
+}
+
+impl<V: Ord + Clone> Range<V> {
+ /// Computes the union of this `Range` and another.
+ pub fn union(&self, other: &Self) -> Self {
+ self.complement()
+ .intersection(&other.complement())
+ .complement()
+ .check_invariants()
+ }
+
+ /// Computes the intersection of two sets of versions.
+ pub fn intersection(&self, other: &Self) -> Self {
+ let mut output: SmallVec<Interval<V>> = SmallVec::empty();
+ let mut left_iter = self.segments.iter().peekable();
+ let mut right_iter = other.segments.iter().peekable();
+ // By the definition of intersection any point that is matched by the output
+ // must have a segment in each of the inputs that it matches.
+ // Therefore, every segment in the output must be the intersection of a segment from each of the inputs.
+ // It would be correct to do the "O(n^2)" thing, by computing the intersection of every segment from one input
+ // with every segment of the other input, and sorting the result.
+ // We can avoid the sorting by generating our candidate segments with an increasing `end` value.
+ while let Some(((left_start, left_end), (right_start, right_end))) =
+ left_iter.peek().zip(right_iter.peek())
+ {
+ // The next smallest `end` value is going to come from one of the inputs.
+ let left_end_is_smaller = match (left_end, right_end) {
+ (Included(l), Included(r))
+ | (Excluded(l), Excluded(r))
+ | (Excluded(l), Included(r)) => l <= r,
+
+ (Included(l), Excluded(r)) => l < r,
+ (_, Unbounded) => true,
+ (Unbounded, _) => false,
+ };
+ // Now that we are processing `end` we will never have to process any segment smaller than that.
+ // We can ensure that the input that `end` came from is larger than `end` by advancing it one step.
+ // `end` is the smaller available input, so we know the other input is already larger than `end`.
+ // Note: We can call `other_iter.next_if( == end)`, but the ends lining up is rare enough that
+ // it does not end up being faster in practice.
+ let (other_start, end) = if left_end_is_smaller {
+ left_iter.next();
+ (right_start, left_end)
+ } else {
+ right_iter.next();
+ (left_start, right_end)
+ };
+ // `start` will either come from the input `end` came from or the other input, whichever one is larger.
+ // The intersection is invalid if `start` > `end`.
+ // But, we already know that the segments in our input are valid.
+ // So we do not need to check if the `start` from the input `end` came from is smaller then `end`.
+ // If the `other_start` is larger than end, then the intersection will be invalid.
+ if !valid_segment(other_start, end) {
+ // Note: We can call `this_iter.next_if(!valid_segment(other_start, this_end))` in a loop.
+ // But the checks make it slower for the benchmarked inputs.
+ continue;
+ }
+ let start = match (left_start, right_start) {
+ (Included(l), Included(r)) => Included(std::cmp::max(l, r)),
+ (Excluded(l), Excluded(r)) => Excluded(std::cmp::max(l, r)),
+
+ (Included(i), Excluded(e)) | (Excluded(e), Included(i)) => {
+ if i <= e {
+ Excluded(e)
+ } else {
+ Included(i)
+ }
+ }
+ (s, Unbounded) | (Unbounded, s) => s.as_ref(),
+ };
+ // Now we clone and push a new segment.
+ // By dealing with references until now we ensure that NO cloning happens when we reject the segment.
+ output.push((start.cloned(), end.clone()))
+ }
+
+ Self { segments: output }.check_invariants()
+ }
+
+ /// Returns a simpler Range that contains the same versions
+ ///
+ /// For every one of the Versions provided in versions the existing range and
+ /// the simplified range will agree on whether it is contained.
+ /// The simplified version may include or exclude versions that are not in versions as the implementation wishes.
+ /// For example:
+ /// - If all the versions are contained in the original than the range will be simplified to `full`.
+ /// - If none of the versions are contained in the original than the range will be simplified to `empty`.
+ ///
+ /// If versions are not sorted the correctness of this function is not guaranteed.
+ pub fn simplify<'v, I>(&self, versions: I) -> Self
+ where
+ I: Iterator<Item = &'v V> + 'v,
+ V: 'v,
+ {
+ // Return the segment index in the range for each version in the range, None otherwise
+ let version_locations = versions.scan(0, move |i, v| {
+ while let Some(segment) = self.segments.get(*i) {
+ match within_bounds(v, segment) {
+ Ordering::Less => return Some(None),
+ Ordering::Equal => return Some(Some(*i)),
+ Ordering::Greater => *i += 1,
+ }
+ }
+ Some(None)
+ });
+ let kept_segments = group_adjacent_locations(version_locations);
+ self.keep_segments(kept_segments)
+ }
+
+ /// Create a new range with a subset of segments at given location bounds.
+ ///
+ /// Each new segment is constructed from a pair of segments, taking the
+ /// start of the first and the end of the second.
+ fn keep_segments(
+ &self,
+ kept_segments: impl Iterator<Item = (Option<usize>, Option<usize>)>,
+ ) -> Range<V> {
+ let mut segments = SmallVec::Empty;
+ for (s, e) in kept_segments {
+ segments.push((
+ s.map_or(Unbounded, |s| self.segments[s].0.clone()),
+ e.map_or(Unbounded, |e| self.segments[e].1.clone()),
+ ));
+ }
+ Self { segments }.check_invariants()
+ }
+}
+
+impl<T: Debug + Display + Clone + Eq + Ord> VersionSet for Range<T> {
+ type V = T;
+
+ fn empty() -> Self {
+ Range::empty()
+ }
+
+ fn singleton(v: Self::V) -> Self {
+ Range::singleton(v)
+ }
+
+ fn complement(&self) -> Self {
+ Range::complement(self)
+ }
+
+ fn intersection(&self, other: &Self) -> Self {
+ Range::intersection(self, other)
+ }
+
+ fn contains(&self, v: &Self::V) -> bool {
+ Range::contains(self, v)
+ }
+
+ fn full() -> Self {
+ Range::full()
+ }
+
+ fn union(&self, other: &Self) -> Self {
+ Range::union(self, other)
+ }
+}
+
+// REPORT ######################################################################
+
+impl<V: Display + Eq> Display for Range<V> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ if self.segments.is_empty() {
+ write!(f, "∅")?;
+ } else {
+ for (idx, segment) in self.segments.iter().enumerate() {
+ if idx > 0 {
+ write!(f, " | ")?;
+ }
+ match segment {
+ (Unbounded, Unbounded) => write!(f, "*")?,
+ (Unbounded, Included(v)) => write!(f, "<={v}")?,
+ (Unbounded, Excluded(v)) => write!(f, "<{v}")?,
+ (Included(v), Unbounded) => write!(f, ">={v}")?,
+ (Included(v), Included(b)) => {
+ if v == b {
+ write!(f, "{v}")?
+ } else {
+ write!(f, ">={v}, <={b}")?
+ }
+ }
+ (Included(v), Excluded(b)) => write!(f, ">={v}, <{b}")?,
+ (Excluded(v), Unbounded) => write!(f, ">{v}")?,
+ (Excluded(v), Included(b)) => write!(f, ">{v}, <={b}")?,
+ (Excluded(v), Excluded(b)) => write!(f, ">{v}, <{b}")?,
+ };
+ }
+ }
+ Ok(())
+ }
+}
+
+// SERIALIZATION ###############################################################
+
+#[cfg(feature = "serde")]
+impl<'de, V: serde::Deserialize<'de>> serde::Deserialize<'de> for Range<V> {
+ fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
+ // This enables conversion from the "old" discrete implementation of `Range` to the new
+ // bounded one.
+ //
+ // Serialization is always performed in the new format.
+ #[derive(serde::Deserialize)]
+ #[serde(untagged)]
+ enum EitherInterval<V> {
+ B(Bound<V>, Bound<V>),
+ D(V, Option<V>),
+ }
+
+ let bounds: SmallVec<EitherInterval<V>> = serde::Deserialize::deserialize(deserializer)?;
+
+ let mut segments = SmallVec::Empty;
+ for i in bounds {
+ match i {
+ EitherInterval::B(l, r) => segments.push((l, r)),
+ EitherInterval::D(l, Some(r)) => segments.push((Included(l), Excluded(r))),
+ EitherInterval::D(l, None) => segments.push((Included(l), Unbounded)),
+ }
+ }
+
+ Ok(Range { segments })
+ }
+}
+
+// TESTS #######################################################################
+
+#[cfg(test)]
+pub mod tests {
+ use proptest::prelude::*;
+
+ use super::*;
+
+ /// Generate version sets from a random vector of deltas between bounds.
+ /// Each bound is randomly inclusive or exclusive.
+ pub fn strategy() -> impl Strategy<Value = Range<u32>> {
+ (
+ any::<bool>(),
+ prop::collection::vec(any::<(u32, bool)>(), 1..10),
+ )
+ .prop_map(|(start_unbounded, deltas)| {
+ let mut start = if start_unbounded {
+ Some(Unbounded)
+ } else {
+ None
+ };
+ let mut largest: u32 = 0;
+ let mut last_bound_was_inclusive = false;
+ let mut segments = SmallVec::Empty;
+ for (delta, inclusive) in deltas {
+ // Add the offset to the current bound
+ largest = match largest.checked_add(delta) {
+ Some(s) => s,
+ None => {
+ // Skip this offset, if it would result in a too large bound.
+ continue;
+ }
+ };
+
+ let current_bound = if inclusive {
+ Included(largest)
+ } else {
+ Excluded(largest)
+ };
+
+ // If we already have a start bound, the next offset defines the complete range.
+ // If we don't have a start bound, we have to generate one.
+ if let Some(start_bound) = start.take() {
+ // If the delta from the start bound is 0, the only authorized configuration is
+ // Included(x), Included(x)
+ if delta == 0 && !(matches!(start_bound, Included(_)) && inclusive) {
+ start = Some(start_bound);
+ continue;
+ }
+ last_bound_was_inclusive = inclusive;
+ segments.push((start_bound, current_bound));
+ } else {
+ // If the delta from the end bound of the last range is 0 and
+ // any of the last ending or current starting bound is inclusive,
+ // we skip the delta because they basically overlap.
+ if delta == 0 && (last_bound_was_inclusive || inclusive) {
+ continue;
+ }
+ start = Some(current_bound);
+ }
+ }
+
+ // If we still have a start bound, but didn't have enough deltas to complete another
+ // segment, we add an unbounded upperbound.
+ if let Some(start_bound) = start {
+ segments.push((start_bound, Unbounded));
+ }
+
+ Range { segments }.check_invariants()
+ })
+ }
+
+ fn version_strat() -> impl Strategy<Value = u32> {
+ any::<u32>()
+ }
+
+ proptest! {
+
+ // Testing negate ----------------------------------
+
+ #[test]
+ fn negate_is_different(range in strategy()) {
+ assert_ne!(range.complement(), range);
+ }
+
+ #[test]
+ fn double_negate_is_identity(range in strategy()) {
+ assert_eq!(range.complement().complement(), range);
+ }
+
+ #[test]
+ fn negate_contains_opposite(range in strategy(), version in version_strat()) {
+ assert_ne!(range.contains(&version), range.complement().contains(&version));
+ }
+
+ // Testing intersection ----------------------------
+
+ #[test]
+ fn intersection_is_symmetric(r1 in strategy(), r2 in strategy()) {
+ assert_eq!(r1.intersection(&r2), r2.intersection(&r1));
+ }
+
+ #[test]
+ fn intersection_with_any_is_identity(range in strategy()) {
+ assert_eq!(Range::full().intersection(&range), range);
+ }
+
+ #[test]
+ fn intersection_with_none_is_none(range in strategy()) {
+ assert_eq!(Range::empty().intersection(&range), Range::empty());
+ }
+
+ #[test]
+ fn intersection_is_idempotent(r1 in strategy(), r2 in strategy()) {
+ assert_eq!(r1.intersection(&r2).intersection(&r2), r1.intersection(&r2));
+ }
+
+ #[test]
+ fn intersection_is_associative(r1 in strategy(), r2 in strategy(), r3 in strategy()) {
+ assert_eq!(r1.intersection(&r2).intersection(&r3), r1.intersection(&r2.intersection(&r3)));
+ }
+
+ #[test]
+ fn intesection_of_complements_is_none(range in strategy()) {
+ assert_eq!(range.complement().intersection(&range), Range::empty());
+ }
+
+ #[test]
+ fn intesection_contains_both(r1 in strategy(), r2 in strategy(), version in version_strat()) {
+ assert_eq!(r1.intersection(&r2).contains(&version), r1.contains(&version) && r2.contains(&version));
+ }
+
+ // Testing union -----------------------------------
+
+ #[test]
+ fn union_of_complements_is_any(range in strategy()) {
+ assert_eq!(range.complement().union(&range), Range::full());
+ }
+
+ #[test]
+ fn union_contains_either(r1 in strategy(), r2 in strategy(), version in version_strat()) {
+ assert_eq!(r1.union(&r2).contains(&version), r1.contains(&version) || r2.contains(&version));
+ }
+
+ // Testing contains --------------------------------
+
+ #[test]
+ fn always_contains_exact(version in version_strat()) {
+ assert!(Range::singleton(version).contains(&version));
+ }
+
+ #[test]
+ fn contains_negation(range in strategy(), version in version_strat()) {
+ assert_ne!(range.contains(&version), range.complement().contains(&version));
+ }
+
+ #[test]
+ fn contains_intersection(range in strategy(), version in version_strat()) {
+ assert_eq!(range.contains(&version), range.intersection(&Range::singleton(version)) != Range::empty());
+ }
+
+ #[test]
+ fn contains_bounding_range(range in strategy(), version in version_strat()) {
+ if range.contains(&version) {
+ assert!(range.bounding_range().map(|b| b.contains(&version)).unwrap_or(false));
+ }
+ }
+
+ #[test]
+ fn from_range_bounds(range in any::<(Bound<u32>, Bound<u32>)>(), version in version_strat()) {
+ let rv: Range<_> = Range::from_range_bounds(range);
+ assert_eq!(range.contains(&version), rv.contains(&version));
+ }
+
+ #[test]
+ fn from_range_bounds_round_trip(range in any::<(Bound<u32>, Bound<u32>)>()) {
+ let rv: Range<u32> = Range::from_range_bounds(range);
+ let rv2: Range<u32> = rv.bounding_range().map(Range::from_range_bounds::<_, u32>).unwrap_or_else(Range::empty);
+ assert_eq!(rv, rv2);
+ }
+
+ #[test]
+ fn contains(range in strategy(), versions in proptest::collection::vec(version_strat(), ..30)) {
+ for v in versions {
+ assert_eq!(range.contains(&v), range.segments.iter().any(|s| RangeBounds::contains(s, &v)));
+ }
+ }
+
+ #[test]
+ fn contains_many(range in strategy(), mut versions in proptest::collection::vec(version_strat(), ..30)) {
+ versions.sort();
+ assert_eq!(versions.len(), range.contains_many(versions.iter()).count());
+ for (a, b) in versions.iter().zip(range.contains_many(versions.iter())) {
+ assert_eq!(range.contains(a), b);
+ }
+ }
+
+ #[test]
+ fn simplify(range in strategy(), mut versions in proptest::collection::vec(version_strat(), ..30)) {
+ versions.sort();
+ let simp = range.simplify(versions.iter());
+
+ for v in versions {
+ assert_eq!(range.contains(&v), simp.contains(&v));
+ }
+ assert!(simp.segments.len() <= range.segments.len())
+ }
+ }
+}
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99 +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121 +122 +123 +124 +125 +126 +127 +128 +129 +130 +131 +132 +133 +134 +135 +136 +137 +138 +139 +140 +141 +142 +143 +144 +145 +146 +147 +148 +149 +150 +151 +152 +153 +154 +155 +156 +157 +158 +159 +160 +161 +162 +163 +164 +165 +166 +167 +168 +169 +170 +171 +172 +173 +174 +175 +176 +177 +178 +179 +180 +181 +182 +183 +184 +185 +186 +187 +188 +189 +190 +191 +192 +193 +194 +195 +196 +197 +198 +199 +200 +201 +202 +203 +204 +205 +206 +207 +208 +209 +210 +211 +212 +213 +214 +215 +216 +217 +218 +219 +220 +221 +222 +223 +224 +225 +226 +227 +228 +229 +230 +231 +232 +233 +234 +235 +236 +237 +238 +239 +240 +241 +242 +243 +244 +245 +246 +247 +248 +249 +250 +251 +252 +253 +254 +255 +256 +257 +258 +259 +260 +261 +262 +263 +264 +265 +266 +267 +268 +269 +270 +271 +272 +273 +274 +275 +276 +277 +278 +279 +280 +281 +282 +283 +284 +285 +286 +287 +288 +289 +290 +291 +292 +293 +294 +295 +296 +297 +298 +299 +300 +301 +302 +303 +304 +305 +306 +307 +308 +309 +310 +311 +312 +313 +314 +315 +316 +317 +318 +319 +320 +321 +322 +323 +324 +325 +326 +327 +328 +329 +330 +331 +332 +333 +334 +335 +336 +337 +338 +339 +340 +341 +342 +343 +344 +345 +346 +347 +348 +349 +350 +351 +352 +353 +354 +355 +356 +357 +358 +359 +360 +361 +362 +363 +364 +365 +366 +367 +368 +369 +370 +371 +372 +373 +374 +375 +376 +377 +378 +379 +380 +381 +382 +383 +384 +385 +386 +387 +388 +389 +390 +391 +392 +393 +394 +395 +396 +397 +398 +399 +400 +401 +402 +403 +404 +405 +406 +407 +408 +409 +410 +411 +412 +413 +414 +415 +416 +417 +418 +419 +420 +421 +422 +423 +424 +425 +426 +427 +428 +429 +430 +431 +432 +433 +434 +435 +436 +437 +438 +439 +440 +441 +442 +443 +444 +445 +446 +447 +448 +449 +450 +451 +452 +453 +454 +455 +456 +457 +458 +459 +460 +461 +462 +463 +464 +465 +466 +467 +468 +469 +470 +471 +472 +473 +474 +475 +476 +477 +478 +479 +480 +481 +482 +483 +484 +485 +486 +487 +488 +489 +490 +491 +492 +493 +494 +495 +496 +497 +498 +499 +500 +501 +502 +503 +504 +505 +506 +507 +508 +509 +510 +511 +512 +513 +514 +515 +516 +517 +518 +519 +520 +521 +522 +523 +524 +525 +526 +527 +528 +529 +530 +531 +532 +533 +534 +535 +536 +537 +538 +539 +540 +541 +542 +543 +544 +545 +546 +547 +548 +549 +550 +551 +552 +553 +554 +555 +556 +557 +558 +559 +560 +561 +562 +563 +564 +565 +566 +567 +568 +569 +570 +571 +572 +573 +574 +575 +576 +577 +578 +579 +580 +581 +582 +583 +584 +585 +586 +587 +588 +589 +590 +
// SPDX-License-Identifier: MPL-2.0
+
+//! Build a report as clear as possible as to why
+//! dependency solving failed.
+
+use std::fmt;
+use std::ops::Deref;
+use std::sync::Arc;
+
+use crate::package::Package;
+use crate::term::Term;
+use crate::type_aliases::Map;
+use crate::version_set::VersionSet;
+
+/// Reporter trait.
+pub trait Reporter<P: Package, VS: VersionSet> {
+ /// Output type of the report.
+ type Output;
+
+ /// Generate a report from the derivation tree
+ /// describing the resolution failure using the default formatter.
+ fn report(derivation_tree: &DerivationTree<P, VS>) -> Self::Output;
+
+ /// Generate a report from the derivation tree
+ /// describing the resolution failure using a custom formatter.
+ fn report_with_formatter(
+ derivation_tree: &DerivationTree<P, VS>,
+ formatter: &impl ReportFormatter<P, VS, Output = Self::Output>,
+ ) -> Self::Output;
+}
+
+/// Derivation tree resulting in the impossibility
+/// to solve the dependencies of our root package.
+#[derive(Debug, Clone)]
+pub enum DerivationTree<P: Package, VS: VersionSet> {
+ /// External incompatibility.
+ External(External<P, VS>),
+ /// Incompatibility derived from two others.
+ Derived(Derived<P, VS>),
+}
+
+/// Incompatibilities that are not derived from others,
+/// they have their own reason.
+#[derive(Debug, Clone)]
+pub enum External<P: Package, VS: VersionSet> {
+ /// Initial incompatibility aiming at picking the root package for the first decision.
+ NotRoot(P, VS::V),
+ /// There are no versions in the given set for this package.
+ NoVersions(P, VS),
+ /// Dependencies of the package are unavailable for versions in that set.
+ UnavailableDependencies(P, VS),
+ /// Incompatibility coming from the dependencies of a given package.
+ FromDependencyOf(P, VS, P, VS),
+}
+
+/// Incompatibility derived from two others.
+#[derive(Debug, Clone)]
+pub struct Derived<P: Package, VS: VersionSet> {
+ /// Terms of the incompatibility.
+ pub terms: Map<P, Term<VS>>,
+ /// Indicate if that incompatibility is present multiple times
+ /// in the derivation tree.
+ /// If that is the case, it has a unique id, provided in that option.
+ /// Then, we may want to only explain it once,
+ /// and refer to the explanation for the other times.
+ pub shared_id: Option<usize>,
+ /// First cause.
+ pub cause1: Arc<DerivationTree<P, VS>>,
+ /// Second cause.
+ pub cause2: Arc<DerivationTree<P, VS>>,
+}
+
+impl<P: Package, VS: VersionSet> DerivationTree<P, VS> {
+ /// Merge the [NoVersions](External::NoVersions) external incompatibilities
+ /// with the other one they are matched with
+ /// in a derived incompatibility.
+ /// This cleans up quite nicely the generated report.
+ /// You might want to do this if you know that the
+ /// [DependencyProvider](crate::solver::DependencyProvider)
+ /// was not run in some kind of offline mode that may not
+ /// have access to all versions existing.
+ pub fn collapse_no_versions(&mut self) {
+ match self {
+ DerivationTree::External(_) => {}
+ DerivationTree::Derived(derived) => {
+ match (
+ Arc::make_mut(&mut derived.cause1),
+ Arc::make_mut(&mut derived.cause2),
+ ) {
+ (DerivationTree::External(External::NoVersions(p, r)), ref mut cause2) => {
+ cause2.collapse_no_versions();
+ *self = cause2
+ .clone()
+ .merge_no_versions(p.to_owned(), r.to_owned())
+ .unwrap_or_else(|| self.to_owned());
+ }
+ (ref mut cause1, DerivationTree::External(External::NoVersions(p, r))) => {
+ cause1.collapse_no_versions();
+ *self = cause1
+ .clone()
+ .merge_no_versions(p.to_owned(), r.to_owned())
+ .unwrap_or_else(|| self.to_owned());
+ }
+ _ => {
+ Arc::make_mut(&mut derived.cause1).collapse_no_versions();
+ Arc::make_mut(&mut derived.cause2).collapse_no_versions();
+ }
+ }
+ }
+ }
+ }
+
+ fn merge_no_versions(self, package: P, set: VS) -> Option<Self> {
+ match self {
+ // TODO: take care of the Derived case.
+ // Once done, we can remove the Option.
+ DerivationTree::Derived(_) => Some(self),
+ DerivationTree::External(External::NotRoot(_, _)) => {
+ panic!("How did we end up with a NoVersions merged with a NotRoot?")
+ }
+ DerivationTree::External(External::NoVersions(_, r)) => Some(DerivationTree::External(
+ External::NoVersions(package, set.union(&r)),
+ )),
+ DerivationTree::External(External::UnavailableDependencies(_, r)) => Some(
+ DerivationTree::External(External::UnavailableDependencies(package, set.union(&r))),
+ ),
+ DerivationTree::External(External::FromDependencyOf(p1, r1, p2, r2)) => {
+ if p1 == package {
+ Some(DerivationTree::External(External::FromDependencyOf(
+ p1,
+ r1.union(&set),
+ p2,
+ r2,
+ )))
+ } else {
+ Some(DerivationTree::External(External::FromDependencyOf(
+ p1,
+ r1,
+ p2,
+ r2.union(&set),
+ )))
+ }
+ }
+ }
+ }
+}
+
+impl<P: Package, VS: VersionSet> fmt::Display for External<P, VS> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Self::NotRoot(package, version) => {
+ write!(f, "we are solving dependencies of {} {}", package, version)
+ }
+ Self::NoVersions(package, set) => {
+ if set == &VS::full() {
+ write!(f, "there is no available version for {}", package)
+ } else {
+ write!(f, "there is no version of {} in {}", package, set)
+ }
+ }
+ Self::UnavailableDependencies(package, set) => {
+ if set == &VS::full() {
+ write!(f, "dependencies of {} are unavailable", package)
+ } else {
+ write!(
+ f,
+ "dependencies of {} at version {} are unavailable",
+ package, set
+ )
+ }
+ }
+ Self::FromDependencyOf(p, set_p, dep, set_dep) => {
+ if set_p == &VS::full() && set_dep == &VS::full() {
+ write!(f, "{} depends on {}", p, dep)
+ } else if set_p == &VS::full() {
+ write!(f, "{} depends on {} {}", p, dep, set_dep)
+ } else if set_dep == &VS::full() {
+ write!(f, "{} {} depends on {}", p, set_p, dep)
+ } else {
+ write!(f, "{} {} depends on {} {}", p, set_p, dep, set_dep)
+ }
+ }
+ }
+ }
+}
+
+/// Trait for formatting outputs in the reporter.
+pub trait ReportFormatter<P: Package, VS: VersionSet> {
+ /// Output type of the report.
+ type Output;
+
+ /// Format an [External] incompatibility.
+ fn format_external(&self, external: &External<P, VS>) -> Self::Output;
+
+ /// Format terms of an incompatibility.
+ fn format_terms(&self, terms: &Map<P, Term<VS>>) -> Self::Output;
+}
+
+/// Default formatter for the default reporter.
+#[derive(Default, Debug)]
+pub struct DefaultStringReportFormatter;
+
+impl<P: Package, VS: VersionSet> ReportFormatter<P, VS> for DefaultStringReportFormatter {
+ type Output = String;
+
+ fn format_external(&self, external: &External<P, VS>) -> String {
+ external.to_string()
+ }
+
+ fn format_terms(&self, terms: &Map<P, Term<VS>>) -> Self::Output {
+ let terms_vec: Vec<_> = terms.iter().collect();
+ match terms_vec.as_slice() {
+ [] => "version solving failed".into(),
+ // TODO: special case when that unique package is root.
+ [(package, Term::Positive(range))] => format!("{} {} is forbidden", package, range),
+ [(package, Term::Negative(range))] => format!("{} {} is mandatory", package, range),
+ [(p1, Term::Positive(r1)), (p2, Term::Negative(r2))] => {
+ self.format_external(&External::FromDependencyOf(p1, r1.clone(), p2, r2.clone()))
+ }
+ [(p1, Term::Negative(r1)), (p2, Term::Positive(r2))] => {
+ self.format_external(&External::FromDependencyOf(p2, r2.clone(), p1, r1.clone()))
+ }
+ slice => {
+ let str_terms: Vec<_> = slice.iter().map(|(p, t)| format!("{} {}", p, t)).collect();
+ str_terms.join(", ") + " are incompatible"
+ }
+ }
+ }
+}
+
+/// Default reporter able to generate an explanation as a [String].
+pub struct DefaultStringReporter {
+ /// Number of explanations already with a line reference.
+ ref_count: usize,
+ /// Shared nodes that have already been marked with a line reference.
+ /// The incompatibility ids are the keys, and the line references are the values.
+ shared_with_ref: Map<usize, usize>,
+ /// Accumulated lines of the report already generated.
+ lines: Vec<String>,
+}
+
+impl DefaultStringReporter {
+ /// Initialize the reporter.
+ fn new() -> Self {
+ Self {
+ ref_count: 0,
+ shared_with_ref: Map::default(),
+ lines: Vec::new(),
+ }
+ }
+
+ fn build_recursive<P: Package, VS: VersionSet, F: ReportFormatter<P, VS, Output = String>>(
+ &mut self,
+ derived: &Derived<P, VS>,
+ formatter: &F,
+ ) {
+ self.build_recursive_helper(derived, formatter);
+ if let Some(id) = derived.shared_id {
+ if self.shared_with_ref.get(&id).is_none() {
+ self.add_line_ref();
+ self.shared_with_ref.insert(id, self.ref_count);
+ }
+ };
+ }
+
+ fn build_recursive_helper<
+ P: Package,
+ VS: VersionSet,
+ F: ReportFormatter<P, VS, Output = String>,
+ >(
+ &mut self,
+ current: &Derived<P, VS>,
+ formatter: &F,
+ ) {
+ match (current.cause1.deref(), current.cause2.deref()) {
+ (DerivationTree::External(external1), DerivationTree::External(external2)) => {
+ // Simplest case, we just combine two external incompatibilities.
+ self.lines.push(Self::explain_both_external(
+ external1,
+ external2,
+ ¤t.terms,
+ formatter,
+ ));
+ }
+ (DerivationTree::Derived(derived), DerivationTree::External(external)) => {
+ // One cause is derived, so we explain this first
+ // then we add the one-line external part
+ // and finally conclude with the current incompatibility.
+ self.report_one_each(derived, external, ¤t.terms, formatter);
+ }
+ (DerivationTree::External(external), DerivationTree::Derived(derived)) => {
+ self.report_one_each(derived, external, ¤t.terms, formatter);
+ }
+ (DerivationTree::Derived(derived1), DerivationTree::Derived(derived2)) => {
+ // This is the most complex case since both causes are also derived.
+ match (
+ self.line_ref_of(derived1.shared_id),
+ self.line_ref_of(derived2.shared_id),
+ ) {
+ // If both causes already have been referenced (shared_id),
+ // the explanation simply uses those references.
+ (Some(ref1), Some(ref2)) => self.lines.push(Self::explain_both_ref(
+ ref1,
+ derived1,
+ ref2,
+ derived2,
+ ¤t.terms,
+ formatter,
+ )),
+ // Otherwise, if one only has a line number reference,
+ // we recursively call the one without reference and then
+ // add the one with reference to conclude.
+ (Some(ref1), None) => {
+ self.build_recursive(derived2, formatter);
+ self.lines.push(Self::and_explain_ref(
+ ref1,
+ derived1,
+ ¤t.terms,
+ formatter,
+ ));
+ }
+ (None, Some(ref2)) => {
+ self.build_recursive(derived1, formatter);
+ self.lines.push(Self::and_explain_ref(
+ ref2,
+ derived2,
+ ¤t.terms,
+ formatter,
+ ));
+ }
+ // Finally, if no line reference exists yet,
+ // we call recursively the first one and then,
+ // - if this was a shared node, it will get a line ref
+ // and we can simply recall this with the current node.
+ // - otherwise, we add a line reference to it,
+ // recursively call on the second node,
+ // and finally conclude.
+ (None, None) => {
+ self.build_recursive(derived1, formatter);
+ if derived1.shared_id.is_some() {
+ self.lines.push("".into());
+ self.build_recursive(current, formatter);
+ } else {
+ self.add_line_ref();
+ let ref1 = self.ref_count;
+ self.lines.push("".into());
+ self.build_recursive(derived2, formatter);
+ self.lines.push(Self::and_explain_ref(
+ ref1,
+ derived1,
+ ¤t.terms,
+ formatter,
+ ));
+ }
+ }
+ }
+ }
+ }
+ }
+
+ /// Report a derived and an external incompatibility.
+ ///
+ /// The result will depend on the fact that the derived incompatibility
+ /// has already been explained or not.
+ fn report_one_each<P: Package, VS: VersionSet, F: ReportFormatter<P, VS, Output = String>>(
+ &mut self,
+ derived: &Derived<P, VS>,
+ external: &External<P, VS>,
+ current_terms: &Map<P, Term<VS>>,
+ formatter: &F,
+ ) {
+ match self.line_ref_of(derived.shared_id) {
+ Some(ref_id) => self.lines.push(Self::explain_ref_and_external(
+ ref_id,
+ derived,
+ external,
+ current_terms,
+ formatter,
+ )),
+ None => self.report_recurse_one_each(derived, external, current_terms, formatter),
+ }
+ }
+
+ /// Report one derived (without a line ref yet) and one external.
+ fn report_recurse_one_each<
+ P: Package,
+ VS: VersionSet,
+ F: ReportFormatter<P, VS, Output = String>,
+ >(
+ &mut self,
+ derived: &Derived<P, VS>,
+ external: &External<P, VS>,
+ current_terms: &Map<P, Term<VS>>,
+ formatter: &F,
+ ) {
+ match (derived.cause1.deref(), derived.cause2.deref()) {
+ // If the derived cause has itself one external prior cause,
+ // we can chain the external explanations.
+ (DerivationTree::Derived(prior_derived), DerivationTree::External(prior_external)) => {
+ self.build_recursive(prior_derived, formatter);
+ self.lines.push(Self::and_explain_prior_and_external(
+ prior_external,
+ external,
+ current_terms,
+ formatter,
+ ));
+ }
+ // If the derived cause has itself one external prior cause,
+ // we can chain the external explanations.
+ (DerivationTree::External(prior_external), DerivationTree::Derived(prior_derived)) => {
+ self.build_recursive(prior_derived, formatter);
+ self.lines.push(Self::and_explain_prior_and_external(
+ prior_external,
+ external,
+ current_terms,
+ formatter,
+ ));
+ }
+ _ => {
+ self.build_recursive(derived, formatter);
+ self.lines.push(Self::and_explain_external(
+ external,
+ current_terms,
+ formatter,
+ ));
+ }
+ }
+ }
+
+ // String explanations #####################################################
+
+ /// Simplest case, we just combine two external incompatibilities.
+ fn explain_both_external<
+ P: Package,
+ VS: VersionSet,
+ F: ReportFormatter<P, VS, Output = String>,
+ >(
+ external1: &External<P, VS>,
+ external2: &External<P, VS>,
+ current_terms: &Map<P, Term<VS>>,
+ formatter: &F,
+ ) -> String {
+ // TODO: order should be chosen to make it more logical.
+ format!(
+ "Because {} and {}, {}.",
+ formatter.format_external(external1),
+ formatter.format_external(external2),
+ formatter.format_terms(current_terms)
+ )
+ }
+
+ /// Both causes have already been explained so we use their refs.
+ fn explain_both_ref<P: Package, VS: VersionSet, F: ReportFormatter<P, VS, Output = String>>(
+ ref_id1: usize,
+ derived1: &Derived<P, VS>,
+ ref_id2: usize,
+ derived2: &Derived<P, VS>,
+ current_terms: &Map<P, Term<VS>>,
+ formatter: &F,
+ ) -> String {
+ // TODO: order should be chosen to make it more logical.
+ format!(
+ "Because {} ({}) and {} ({}), {}.",
+ formatter.format_terms(&derived1.terms),
+ ref_id1,
+ formatter.format_terms(&derived2.terms),
+ ref_id2,
+ formatter.format_terms(current_terms)
+ )
+ }
+
+ /// One cause is derived (already explained so one-line),
+ /// the other is a one-line external cause,
+ /// and finally we conclude with the current incompatibility.
+ fn explain_ref_and_external<
+ P: Package,
+ VS: VersionSet,
+ F: ReportFormatter<P, VS, Output = String>,
+ >(
+ ref_id: usize,
+ derived: &Derived<P, VS>,
+ external: &External<P, VS>,
+ current_terms: &Map<P, Term<VS>>,
+ formatter: &F,
+ ) -> String {
+ // TODO: order should be chosen to make it more logical.
+ format!(
+ "Because {} ({}) and {}, {}.",
+ formatter.format_terms(&derived.terms),
+ ref_id,
+ formatter.format_external(external),
+ formatter.format_terms(current_terms)
+ )
+ }
+
+ /// Add an external cause to the chain of explanations.
+ fn and_explain_external<
+ P: Package,
+ VS: VersionSet,
+ F: ReportFormatter<P, VS, Output = String>,
+ >(
+ external: &External<P, VS>,
+ current_terms: &Map<P, Term<VS>>,
+ formatter: &F,
+ ) -> String {
+ format!(
+ "And because {}, {}.",
+ formatter.format_external(external),
+ formatter.format_terms(current_terms)
+ )
+ }
+
+ /// Add an already explained incompat to the chain of explanations.
+ fn and_explain_ref<P: Package, VS: VersionSet, F: ReportFormatter<P, VS, Output = String>>(
+ ref_id: usize,
+ derived: &Derived<P, VS>,
+ current_terms: &Map<P, Term<VS>>,
+ formatter: &F,
+ ) -> String {
+ format!(
+ "And because {} ({}), {}.",
+ formatter.format_terms(&derived.terms),
+ ref_id,
+ formatter.format_terms(current_terms)
+ )
+ }
+
+ /// Add an already explained incompat to the chain of explanations.
+ fn and_explain_prior_and_external<
+ P: Package,
+ VS: VersionSet,
+ F: ReportFormatter<P, VS, Output = String>,
+ >(
+ prior_external: &External<P, VS>,
+ external: &External<P, VS>,
+ current_terms: &Map<P, Term<VS>>,
+ formatter: &F,
+ ) -> String {
+ format!(
+ "And because {} and {}, {}.",
+ formatter.format_external(prior_external),
+ formatter.format_external(external),
+ formatter.format_terms(current_terms)
+ )
+ }
+
+ // Helper functions ########################################################
+
+ fn add_line_ref(&mut self) {
+ let new_count = self.ref_count + 1;
+ self.ref_count = new_count;
+ if let Some(line) = self.lines.last_mut() {
+ *line = format!("{} ({})", line, new_count);
+ }
+ }
+
+ fn line_ref_of(&self, shared_id: Option<usize>) -> Option<usize> {
+ shared_id.and_then(|id| self.shared_with_ref.get(&id).cloned())
+ }
+}
+
+impl<P: Package, VS: VersionSet> Reporter<P, VS> for DefaultStringReporter {
+ type Output = String;
+
+ fn report(derivation_tree: &DerivationTree<P, VS>) -> Self::Output {
+ let formatter = DefaultStringReportFormatter;
+ match derivation_tree {
+ DerivationTree::External(external) => formatter.format_external(external),
+ DerivationTree::Derived(derived) => {
+ let mut reporter = Self::new();
+ reporter.build_recursive(derived, &formatter);
+ reporter.lines.join("\n")
+ }
+ }
+ }
+
+ fn report_with_formatter(
+ derivation_tree: &DerivationTree<P, VS>,
+ formatter: &impl ReportFormatter<P, VS, Output = Self::Output>,
+ ) -> Self::Output {
+ match derivation_tree {
+ DerivationTree::External(external) => formatter.format_external(external),
+ DerivationTree::Derived(derived) => {
+ let mut reporter = Self::new();
+ reporter.build_recursive(derived, formatter);
+ reporter.lines.join("\n")
+ }
+ }
+ }
+}
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99 +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121 +122 +123 +124 +125 +126 +127 +128 +129 +130 +131 +132 +133 +134 +135 +136 +137 +138 +139 +140 +141 +142 +143 +144 +145 +146 +147 +148 +149 +150 +151 +152 +153 +154 +155 +156 +157 +158 +159 +160 +161 +162 +163 +164 +165 +166 +167 +168 +169 +170 +171 +172 +173 +174 +175 +176 +177 +178 +179 +180 +181 +182 +183 +184 +185 +186 +187 +188 +189 +190 +191 +192 +193 +194 +195 +196 +197 +198 +199 +200 +201 +202 +203 +204 +205 +206 +207 +208 +209 +210 +211 +212 +213 +214 +215 +216 +217 +218 +219 +220 +221 +222 +223 +224 +225 +226 +227 +228 +229 +230 +231 +232 +233 +234 +235 +236 +237 +238 +239 +240 +241 +242 +243 +244 +245 +246 +247 +248 +249 +250 +251 +252 +253 +254 +255 +256 +257 +258 +259 +260 +261 +262 +263 +264 +265 +266 +267 +268 +269 +270 +271 +272 +273 +274 +275 +276 +277 +278 +279 +280 +281 +282 +283 +284 +285 +286 +287 +288 +289 +290 +291 +292 +293 +294 +295 +296 +297 +298 +299 +300 +301 +302 +303 +304 +305 +306 +307 +308 +309 +310 +311 +312 +313 +314 +315 +316 +317 +318 +319 +320 +321 +322 +323 +324 +325 +326 +327 +328 +329 +330 +331 +332 +333 +334 +335 +336 +337 +338 +339 +340 +341 +342 +343 +344 +345 +346 +347 +348 +349 +350 +351 +352 +353 +354 +355 +356 +357 +358 +359 +360 +361 +362 +363 +364 +365 +366 +367 +368 +369 +370 +371 +372 +373 +374 +375 +376 +
// SPDX-License-Identifier: MPL-2.0
+
+//! PubGrub version solving algorithm.
+//!
+//! It consists in efficiently finding a set of packages and versions
+//! that satisfy all the constraints of a given project dependencies.
+//! In addition, when that is not possible,
+//! PubGrub tries to provide a very human-readable and clear
+//! explanation as to why that failed.
+//! Below is an example of explanation present in
+//! the introductory blog post about PubGrub
+//!
+//! ```txt
+//! Because dropdown >=2.0.0 depends on icons >=2.0.0 and
+//! root depends on icons <2.0.0, dropdown >=2.0.0 is forbidden.
+//!
+//! And because menu >=1.1.0 depends on dropdown >=2.0.0,
+//! menu >=1.1.0 is forbidden.
+//!
+//! And because menu <1.1.0 depends on dropdown >=1.0.0 <2.0.0
+//! which depends on intl <4.0.0, every version of menu
+//! requires intl <4.0.0.
+//!
+//! So, because root depends on both menu >=1.0.0 and intl >=5.0.0,
+//! version solving failed.
+//! ```
+//!
+//! The algorithm is generic and works for any type of dependency system
+//! as long as packages (P) and versions (V) implement
+//! the [Package] and [Version](crate::version::Version) traits.
+//! [Package] is strictly equivalent and automatically generated
+//! for any type that implement [Clone] + [Eq] + [Hash] + [Debug] + [Display](std::fmt::Display).
+//! [Version](crate::version::Version) simply states that versions are ordered,
+//! that there should be
+//! a minimal [lowest](crate::version::Version::lowest) version (like 0.0.0 in semantic versions),
+//! and that for any version, it is possible to compute
+//! what the next version closest to this one is ([bump](crate::version::Version::bump)).
+//! For semantic versions, [bump](crate::version::Version::bump) corresponds to
+//! an increment of the patch number.
+//!
+//! ## API
+//!
+//! ```
+//! # use std::convert::Infallible;
+//! # use pubgrub::solver::{resolve, OfflineDependencyProvider};
+//! # use pubgrub::version::NumberVersion;
+//! # use pubgrub::error::PubGrubError;
+//! # use pubgrub::range::Range;
+//! #
+//! # type NumVS = Range<NumberVersion>;
+//! #
+//! # fn try_main() -> Result<(), PubGrubError<&'static str, NumVS, Infallible>> {
+//! # let dependency_provider = OfflineDependencyProvider::<&str, NumVS>::new();
+//! # let package = "root";
+//! # let version = 1;
+//! let solution = resolve(&dependency_provider, package, version)?;
+//! # Ok(())
+//! # }
+//! # fn main() {
+//! # assert!(matches!(try_main(), Err(PubGrubError::NoSolution(_))));
+//! # }
+//! ```
+//!
+//! Where `dependency_provider` supplies the list of available packages and versions,
+//! as well as the dependencies of every available package
+//! by implementing the [DependencyProvider] trait.
+//! The call to [resolve] for a given package at a given version
+//! will compute the set of packages and versions needed
+//! to satisfy the dependencies of that package and version pair.
+//! If there is no solution, the reason will be provided as clear as possible.
+
+use std::cmp::Reverse;
+use std::collections::{BTreeMap, BTreeSet as Set};
+use std::convert::Infallible;
+use std::error::Error;
+
+use crate::error::PubGrubError;
+use crate::internal::core::State;
+use crate::internal::incompatibility::Incompatibility;
+use crate::package::Package;
+use crate::type_aliases::{DependencyConstraints, Map, SelectedDependencies};
+use crate::version_set::VersionSet;
+use log::{debug, info};
+
+/// Main function of the library.
+/// Finds a set of packages satisfying dependency bounds for a given package + version pair.
+#[allow(clippy::type_complexity)]
+pub fn resolve<P: Package, VS: VersionSet, DP: DependencyProvider<P, VS>>(
+ dependency_provider: &DP,
+ package: P,
+ version: impl Into<VS::V>,
+) -> Result<SelectedDependencies<P, VS::V>, PubGrubError<P, VS, DP::Err>> {
+ let mut state = State::init(package.clone(), version.into());
+ let mut added_dependencies: Map<P, Set<VS::V>> = Map::default();
+ let mut next = package;
+ loop {
+ dependency_provider
+ .should_cancel()
+ .map_err(|err| PubGrubError::ErrorInShouldCancel(err))?;
+
+ info!("unit_propagation: {}", &next);
+ state.unit_propagation(next)?;
+
+ debug!(
+ "Partial solution after unit propagation: {}",
+ state.partial_solution
+ );
+
+ let Some(highest_priority_pkg) = state
+ .partial_solution
+ .pick_highest_priority_pkg(|p, r| dependency_provider.prioritize(p, r))
+ else {
+ return Ok(state.partial_solution.extract_solution());
+ };
+ next = highest_priority_pkg;
+
+ let term_intersection = state
+ .partial_solution
+ .term_intersection_for_package(&next)
+ .ok_or_else(|| {
+ PubGrubError::Failure("a package was chosen but we don't have a term.".into())
+ })?;
+ let decision = dependency_provider
+ .choose_version(&next, term_intersection.unwrap_positive())
+ .map_err(PubGrubError::ErrorChoosingPackageVersion)?;
+ info!("DP chose: {} @ {:?}", next, decision);
+
+ // Pick the next compatible version.
+ let v = match decision {
+ None => {
+ let inc = Incompatibility::no_versions(next.clone(), term_intersection.clone());
+ state.add_incompatibility(inc);
+ continue;
+ }
+ Some(x) => x,
+ };
+
+ if !term_intersection.contains(&v) {
+ return Err(PubGrubError::Failure(
+ "choose_package_version picked an incompatible version".into(),
+ ));
+ }
+
+ let is_new_dependency = added_dependencies
+ .entry(next.clone())
+ .or_default()
+ .insert(v.clone());
+
+ if is_new_dependency {
+ // Retrieve that package dependencies.
+ let p = &next;
+ let dependencies = dependency_provider.get_dependencies(p, &v).map_err(|err| {
+ PubGrubError::ErrorRetrievingDependencies {
+ package: p.clone(),
+ version: v.clone(),
+ source: err,
+ }
+ })?;
+
+ let known_dependencies = match dependencies {
+ Dependencies::Unknown => {
+ state.add_incompatibility(Incompatibility::unavailable_dependencies(
+ p.clone(),
+ v.clone(),
+ ));
+ continue;
+ }
+ Dependencies::Known(x) if x.contains_key(p) => {
+ return Err(PubGrubError::SelfDependency {
+ package: p.clone(),
+ version: v,
+ });
+ }
+ Dependencies::Known(x) => x,
+ };
+
+ // Add that package and version if the dependencies are not problematic.
+ let dep_incompats = state.add_incompatibility_from_dependencies(
+ p.clone(),
+ v.clone(),
+ &known_dependencies,
+ );
+
+ state.partial_solution.add_version(
+ p.clone(),
+ v,
+ dep_incompats,
+ &state.incompatibility_store,
+ );
+ } else {
+ // `dep_incompats` are already in `incompatibilities` so we know there are not satisfied
+ // terms and can add the decision directly.
+ info!("add_decision (not first time): {} @ {}", &next, v);
+ state.partial_solution.add_decision(next.clone(), v);
+ }
+ }
+}
+
+/// An enum used by [DependencyProvider] that holds information about package dependencies.
+/// For each [Package] there is a set of versions allowed as a dependency.
+#[derive(Clone)]
+pub enum Dependencies<P: Package, VS: VersionSet> {
+ /// Package dependencies are unavailable.
+ Unknown,
+ /// Container for all available package versions.
+ Known(DependencyConstraints<P, VS>),
+}
+
+/// Trait that allows the algorithm to retrieve available packages and their dependencies.
+/// An implementor needs to be supplied to the [resolve] function.
+pub trait DependencyProvider<P: Package, VS: VersionSet> {
+ /// [Decision making](https://github.com/dart-lang/pub/blob/master/doc/solver.md#decision-making)
+ /// is the process of choosing the next package
+ /// and version that will be appended to the partial solution.
+ ///
+ /// Every time such a decision must be made, the resolver looks at all the potential valid
+ /// packages that have changed, and a asks the dependency provider how important each one is.
+ /// For each one it calls `prioritize` with the name of the package and the current set of
+ /// acceptable versions.
+ /// The resolver will then pick the package with the highes priority from all the potential valid
+ /// packages.
+ ///
+ /// The strategy employed to prioritize packages
+ /// cannot change the existence of a solution or not,
+ /// but can drastically change the performances of the solver,
+ /// or the properties of the solution.
+ /// The documentation of Pub (PubGrub implementation for the dart programming language)
+ /// states the following:
+ ///
+ /// > Pub chooses the latest matching version of the package
+ /// > with the fewest versions that match the outstanding constraint.
+ /// > This tends to find conflicts earlier if any exist,
+ /// > since these packages will run out of versions to try more quickly.
+ /// > But there's likely room for improvement in these heuristics.
+ ///
+ /// Note: the resolver may call this even when the range has not change,
+ /// if it is more efficient for the resolveres internal data structures.
+ fn prioritize(&self, package: &P, range: &VS) -> Self::Priority;
+ /// The type returned from `prioritize`. The resolver does not care what type this is
+ /// as long as it can pick a largest one and clone it.
+ ///
+ /// [std::cmp::Reverse] can be useful if you want to pick the package with
+ /// the fewest versions that match the outstanding constraint.
+ type Priority: Ord + Clone;
+
+ /// The kind of error returned from these methods.
+ ///
+ /// Returning this signals that resolution should fail with this error.
+ type Err: Error;
+
+ /// Once the resolver has found the highest `Priority` package from all potential valid
+ /// packages, it needs to know what vertion of that package to use. The most common pattern
+ /// is to select the largest vertion that the range contains.
+ fn choose_version(&self, package: &P, range: &VS) -> Result<Option<VS::V>, Self::Err>;
+
+ /// Retrieves the package dependencies.
+ /// Return [Dependencies::Unknown] if its dependencies are unknown.
+ fn get_dependencies(
+ &self,
+ package: &P,
+ version: &VS::V,
+ ) -> Result<Dependencies<P, VS>, Self::Err>;
+
+ /// This is called fairly regularly during the resolution,
+ /// if it returns an Err then resolution will be terminated.
+ /// This is helpful if you want to add some form of early termination like a timeout,
+ /// or you want to add some form of user feedback if things are taking a while.
+ /// If not provided the resolver will run as long as needed.
+ fn should_cancel(&self) -> Result<(), Self::Err> {
+ Ok(())
+ }
+}
+
+/// A basic implementation of [DependencyProvider].
+#[derive(Debug, Clone, Default)]
+#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
+#[cfg_attr(
+ feature = "serde",
+ serde(bound(
+ serialize = "VS::V: serde::Serialize, VS: serde::Serialize, P: serde::Serialize",
+ deserialize = "VS::V: serde::Deserialize<'de>, VS: serde::Deserialize<'de>, P: serde::Deserialize<'de>"
+ ))
+)]
+#[cfg_attr(feature = "serde", serde(transparent))]
+pub struct OfflineDependencyProvider<P: Package, VS: VersionSet> {
+ dependencies: Map<P, BTreeMap<VS::V, DependencyConstraints<P, VS>>>,
+}
+
+impl<P: Package, VS: VersionSet> OfflineDependencyProvider<P, VS> {
+ /// Creates an empty OfflineDependencyProvider with no dependencies.
+ pub fn new() -> Self {
+ Self {
+ dependencies: Map::default(),
+ }
+ }
+
+ /// Registers the dependencies of a package and version pair.
+ /// Dependencies must be added with a single call to
+ /// [add_dependencies](OfflineDependencyProvider::add_dependencies).
+ /// All subsequent calls to
+ /// [add_dependencies](OfflineDependencyProvider::add_dependencies) for a given
+ /// package version pair will replace the dependencies by the new ones.
+ ///
+ /// The API does not allow to add dependencies one at a time to uphold an assumption that
+ /// [OfflineDependencyProvider.get_dependencies(p, v)](OfflineDependencyProvider::get_dependencies)
+ /// provides all dependencies of a given package (p) and version (v) pair.
+ pub fn add_dependencies<I: IntoIterator<Item = (P, VS)>>(
+ &mut self,
+ package: P,
+ version: impl Into<VS::V>,
+ dependencies: I,
+ ) {
+ let package_deps = dependencies.into_iter().collect();
+ let v = version.into();
+ *self
+ .dependencies
+ .entry(package)
+ .or_default()
+ .entry(v)
+ .or_default() = package_deps;
+ }
+
+ /// Lists packages that have been saved.
+ pub fn packages(&self) -> impl Iterator<Item = &P> {
+ self.dependencies.keys()
+ }
+
+ /// Lists versions of saved packages in sorted order.
+ /// Returns [None] if no information is available regarding that package.
+ pub fn versions(&self, package: &P) -> Option<impl Iterator<Item = &VS::V>> {
+ self.dependencies.get(package).map(|k| k.keys())
+ }
+
+ /// Lists dependencies of a given package and version.
+ /// Returns [None] if no information is available regarding that package and version pair.
+ fn dependencies(&self, package: &P, version: &VS::V) -> Option<DependencyConstraints<P, VS>> {
+ self.dependencies.get(package)?.get(version).cloned()
+ }
+}
+
+/// An implementation of [DependencyProvider] that
+/// contains all dependency information available in memory.
+/// Currently packages are picked with the fewest versions contained in the constraints first.
+/// But, that may change in new versions if better heuristics are found.
+/// Versions are picked with the newest versions first.
+impl<P: Package, VS: VersionSet> DependencyProvider<P, VS> for OfflineDependencyProvider<P, VS> {
+ type Err = Infallible;
+
+ fn choose_version(&self, package: &P, range: &VS) -> Result<Option<VS::V>, Infallible> {
+ Ok(self
+ .dependencies
+ .get(package)
+ .and_then(|versions| versions.keys().rev().find(|v| range.contains(v)).cloned()))
+ }
+
+ type Priority = Reverse<usize>;
+ fn prioritize(&self, package: &P, range: &VS) -> Self::Priority {
+ Reverse(
+ self.dependencies
+ .get(package)
+ .map(|versions| versions.keys().filter(|v| range.contains(v)).count())
+ .unwrap_or(0),
+ )
+ }
+
+ fn get_dependencies(
+ &self,
+ package: &P,
+ version: &VS::V,
+ ) -> Result<Dependencies<P, VS>, Infallible> {
+ Ok(match self.dependencies(package, version) {
+ None => Dependencies::Unknown,
+ Some(dependencies) => Dependencies::Known(dependencies),
+ })
+ }
+}
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99 +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121 +122 +123 +124 +125 +126 +127 +128 +129 +130 +131 +132 +133 +134 +135 +136 +137 +138 +139 +140 +141 +142 +143 +144 +145 +146 +147 +148 +149 +150 +151 +152 +153 +154 +155 +156 +157 +158 +159 +160 +161 +162 +163 +164 +165 +166 +167 +168 +169 +170 +171 +172 +173 +174 +175 +176 +177 +178 +179 +180 +181 +182 +183 +184 +185 +186 +187 +188 +189 +190 +191 +192 +193 +194 +195 +196 +197 +198 +199 +200 +201 +202 +203 +204 +205 +206 +207 +208 +209 +210 +211 +212 +213 +214 +215 +216 +217 +218 +219 +220 +221 +
// SPDX-License-Identifier: MPL-2.0
+
+//! A term is the fundamental unit of operation of the PubGrub algorithm.
+//! It is a positive or negative expression regarding a set of versions.
+
+use crate::version_set::VersionSet;
+use std::fmt::{self, Display};
+
+/// A positive or negative expression regarding a set of versions.
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum Term<VS: VersionSet> {
+ /// For example, "1.0.0 <= v < 2.0.0" is a positive expression
+ /// that is evaluated true if a version is selected
+ /// and comprised between version 1.0.0 and version 2.0.0.
+ Positive(VS),
+ /// The term "not v < 3.0.0" is a negative expression
+ /// that is evaluated true if a version is selected >= 3.0.0
+ /// or if no version is selected at all.
+ Negative(VS),
+}
+
+/// Base methods.
+impl<VS: VersionSet> Term<VS> {
+ /// A term that is always true.
+ pub(crate) fn any() -> Self {
+ Self::Negative(VS::empty())
+ }
+
+ /// A term that is never true.
+ pub(crate) fn empty() -> Self {
+ Self::Positive(VS::empty())
+ }
+
+ /// A positive term containing exactly that version.
+ pub(crate) fn exact(version: VS::V) -> Self {
+ Self::Positive(VS::singleton(version))
+ }
+
+ /// Simply check if a term is positive.
+ pub(crate) fn is_positive(&self) -> bool {
+ match self {
+ Self::Positive(_) => true,
+ Self::Negative(_) => false,
+ }
+ }
+
+ /// Negate a term.
+ /// Evaluation of a negated term always returns
+ /// the opposite of the evaluation of the original one.
+ pub(crate) fn negate(&self) -> Self {
+ match self {
+ Self::Positive(set) => Self::Negative(set.clone()),
+ Self::Negative(set) => Self::Positive(set.clone()),
+ }
+ }
+
+ /// Evaluate a term regarding a given choice of version.
+ pub(crate) fn contains(&self, v: &VS::V) -> bool {
+ match self {
+ Self::Positive(set) => set.contains(v),
+ Self::Negative(set) => !(set.contains(v)),
+ }
+ }
+
+ /// Unwrap the set contained in a positive term.
+ /// Will panic if used on a negative set.
+ pub(crate) fn unwrap_positive(&self) -> &VS {
+ match self {
+ Self::Positive(set) => set,
+ _ => panic!("Negative term cannot unwrap positive set"),
+ }
+ }
+
+ /// Unwrap the set contained in a negative term.
+ /// Will panic if used on a positive set.
+ pub(crate) fn unwrap_negative(&self) -> &VS {
+ match self {
+ Self::Negative(set) => set,
+ _ => panic!("Positive term cannot unwrap negative set"),
+ }
+ }
+}
+
+/// Set operations with terms.
+impl<VS: VersionSet> Term<VS> {
+ /// Compute the intersection of two terms.
+ /// If at least one term is positive, the intersection is also positive.
+ pub(crate) fn intersection(&self, other: &Self) -> Self {
+ match (self, other) {
+ (Self::Positive(r1), Self::Positive(r2)) => Self::Positive(r1.intersection(r2)),
+ (Self::Positive(r1), Self::Negative(r2)) => {
+ Self::Positive(r1.intersection(&r2.complement()))
+ }
+ (Self::Negative(r1), Self::Positive(r2)) => {
+ Self::Positive(r1.complement().intersection(r2))
+ }
+ (Self::Negative(r1), Self::Negative(r2)) => Self::Negative(r1.union(r2)),
+ }
+ }
+
+ /// Compute the union of two terms.
+ /// If at least one term is negative, the union is also negative.
+ pub(crate) fn union(&self, other: &Self) -> Self {
+ (self.negate().intersection(&other.negate())).negate()
+ }
+
+ /// Indicate if this term is a subset of another term.
+ /// Just like for sets, we say that t1 is a subset of t2
+ /// if and only if t1 ∩ t2 = t1.
+ #[cfg(test)]
+ pub(crate) fn subset_of(&self, other: &Self) -> bool {
+ self == &self.intersection(other)
+ }
+}
+
+/// Describe a relation between a set of terms S and another term t.
+///
+/// As a shorthand, we say that a term v
+/// satisfies or contradicts a term t if {v} satisfies or contradicts it.
+pub(crate) enum Relation {
+ /// We say that a set of terms S "satisfies" a term t
+ /// if t must be true whenever every term in S is true.
+ Satisfied,
+ /// Conversely, S "contradicts" t if t must be false
+ /// whenever every term in S is true.
+ Contradicted,
+ /// If neither of these is true we say that S is "inconclusive" for t.
+ Inconclusive,
+}
+
+/// Relation between terms.
+impl<VS: VersionSet> Term<VS> {
+ /// Check if a set of terms satisfies this term.
+ ///
+ /// We say that a set of terms S "satisfies" a term t
+ /// if t must be true whenever every term in S is true.
+ ///
+ /// It turns out that this can also be expressed with set operations:
+ /// S satisfies t if and only if ⋂ S ⊆ t
+ #[cfg(test)]
+ fn satisfied_by(&self, terms_intersection: &Self) -> bool {
+ terms_intersection.subset_of(self)
+ }
+
+ /// Check if a set of terms contradicts this term.
+ ///
+ /// We say that a set of terms S "contradicts" a term t
+ /// if t must be false whenever every term in S is true.
+ ///
+ /// It turns out that this can also be expressed with set operations:
+ /// S contradicts t if and only if ⋂ S is disjoint with t
+ /// S contradicts t if and only if (⋂ S) ⋂ t = ∅
+ #[cfg(test)]
+ fn contradicted_by(&self, terms_intersection: &Self) -> bool {
+ terms_intersection.intersection(self) == Self::empty()
+ }
+
+ /// Check if a set of terms satisfies or contradicts a given term.
+ /// Otherwise the relation is inconclusive.
+ pub(crate) fn relation_with(&self, other_terms_intersection: &Self) -> Relation {
+ let full_intersection = self.intersection(other_terms_intersection);
+ if &full_intersection == other_terms_intersection {
+ Relation::Satisfied
+ } else if full_intersection == Self::empty() {
+ Relation::Contradicted
+ } else {
+ Relation::Inconclusive
+ }
+ }
+}
+
+impl<VS: VersionSet> AsRef<Self> for Term<VS> {
+ fn as_ref(&self) -> &Self {
+ self
+ }
+}
+
+// REPORT ######################################################################
+
+impl<VS: VersionSet + Display> Display for Term<VS> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Self::Positive(set) => write!(f, "{}", set),
+ Self::Negative(set) => write!(f, "Not ( {} )", set),
+ }
+ }
+}
+
+// TESTS #######################################################################
+
+#[cfg(test)]
+pub mod tests {
+ use super::*;
+ use crate::range::Range;
+ use proptest::prelude::*;
+
+ pub fn strategy() -> impl Strategy<Value = Term<Range<u32>>> {
+ prop_oneof![
+ crate::range::tests::strategy().prop_map(Term::Positive),
+ crate::range::tests::strategy().prop_map(Term::Negative),
+ ]
+ }
+
+ proptest! {
+
+ // Testing relation --------------------------------
+
+ #[test]
+ fn relation_with(term1 in strategy(), term2 in strategy()) {
+ match term1.relation_with(&term2) {
+ Relation::Satisfied => assert!(term1.satisfied_by(&term2)),
+ Relation::Contradicted => assert!(term1.contradicted_by(&term2)),
+ Relation::Inconclusive => {
+ assert!(!term1.satisfied_by(&term2));
+ assert!(!term1.contradicted_by(&term2));
+ }
+ }
+ }
+
+ }
+}
+
// SPDX-License-Identifier: MPL-2.0
+
+//! Publicly exported type aliases.
+
+/// Map implementation used by the library.
+pub type Map<K, V> = rustc_hash::FxHashMap<K, V>;
+
+/// Set implementation used by the library.
+pub type Set<V> = rustc_hash::FxHashSet<V>;
+
+/// Concrete dependencies picked by the library during [resolve](crate::solver::resolve)
+/// from [DependencyConstraints].
+pub type SelectedDependencies<P, V> = Map<P, V>;
+
+/// Holds information about all possible versions a given package can accept.
+/// There is a difference in semantics between an empty map
+/// inside [DependencyConstraints] and [Dependencies::Unknown](crate::solver::Dependencies::Unknown):
+/// the former means the package has no dependency and it is a known fact,
+/// while the latter means they could not be fetched by the [DependencyProvider](crate::solver::DependencyProvider).
+pub type DependencyConstraints<P, VS> = Map<P, VS>;
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99 +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121 +122 +123 +124 +125 +126 +127 +128 +129 +130 +131 +132 +133 +134 +135 +136 +137 +138 +139 +140 +141 +142 +143 +144 +145 +146 +147 +148 +149 +150 +151 +152 +153 +154 +155 +156 +157 +158 +159 +160 +161 +162 +163 +164 +165 +166 +167 +168 +169 +170 +171 +172 +173 +174 +175 +176 +177 +178 +179 +180 +181 +182 +183 +184 +185 +186 +187 +188 +189 +190 +191 +192 +193 +194 +195 +196 +197 +198 +199 +200 +201 +202 +203 +204 +205 +206 +207 +208 +209 +210 +211 +212 +213 +214 +215 +216 +217 +218 +219 +220 +221 +222 +223 +224 +225 +226 +227 +228 +229 +230 +231 +232 +233 +234 +235 +236 +237 +238 +239 +240 +241 +242 +243 +244 +245 +246 +247 +248 +249 +250 +251 +252 +253 +254 +255 +256 +257 +258 +259 +260 +261 +262 +263 +264 +265 +266 +267 +268 +269 +270 +271 +272 +273 +274 +275 +276 +277 +278 +279 +280 +281 +282 +283 +284 +285 +286 +287 +288 +289 +
// SPDX-License-Identifier: MPL-2.0
+
+//! Traits and implementations to create and compare versions.
+
+use std::fmt::{self, Debug, Display};
+use std::str::FromStr;
+use thiserror::Error;
+
+/// Versions have a minimal version (a "0" version)
+/// and are ordered such that every version has a next one.
+pub trait Version: Clone + Ord + Debug + Display {
+ /// Returns the lowest version.
+ fn lowest() -> Self;
+ /// Returns the next version, the smallest strictly higher version.
+ fn bump(&self) -> Self;
+}
+
+/// Type for semantic versions: major.minor.patch.
+#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
+pub struct SemanticVersion {
+ major: u32,
+ minor: u32,
+ patch: u32,
+}
+
+#[cfg(feature = "serde")]
+impl serde::Serialize for SemanticVersion {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ serializer.serialize_str(&format!("{}", self))
+ }
+}
+
+#[cfg(feature = "serde")]
+impl<'de> serde::Deserialize<'de> for SemanticVersion {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ let s = String::deserialize(deserializer)?;
+ FromStr::from_str(&s).map_err(serde::de::Error::custom)
+ }
+}
+
+// Constructors
+impl SemanticVersion {
+ /// Create a version with "major", "minor" and "patch" values.
+ /// `version = major.minor.patch`
+ pub fn new(major: u32, minor: u32, patch: u32) -> Self {
+ Self {
+ major,
+ minor,
+ patch,
+ }
+ }
+
+ /// Version 0.0.0.
+ pub fn zero() -> Self {
+ Self::new(0, 0, 0)
+ }
+
+ /// Version 1.0.0.
+ pub fn one() -> Self {
+ Self::new(1, 0, 0)
+ }
+
+ /// Version 2.0.0.
+ pub fn two() -> Self {
+ Self::new(2, 0, 0)
+ }
+}
+
+// Convert a tuple (major, minor, patch) into a version.
+impl From<(u32, u32, u32)> for SemanticVersion {
+ fn from(tuple: (u32, u32, u32)) -> Self {
+ let (major, minor, patch) = tuple;
+ Self::new(major, minor, patch)
+ }
+}
+
+// Convert a &(major, minor, patch) into a version.
+impl From<&(u32, u32, u32)> for SemanticVersion {
+ fn from(tuple: &(u32, u32, u32)) -> Self {
+ let (major, minor, patch) = *tuple;
+ Self::new(major, minor, patch)
+ }
+}
+
+// Convert an &version into a version.
+impl From<&SemanticVersion> for SemanticVersion {
+ fn from(v: &SemanticVersion) -> Self {
+ *v
+ }
+}
+
+// Convert a version into a tuple (major, minor, patch).
+impl From<SemanticVersion> for (u32, u32, u32) {
+ fn from(v: SemanticVersion) -> Self {
+ (v.major, v.minor, v.patch)
+ }
+}
+
+// Bump versions.
+impl SemanticVersion {
+ /// Bump the patch number of a version.
+ pub fn bump_patch(self) -> Self {
+ Self::new(self.major, self.minor, self.patch + 1)
+ }
+
+ /// Bump the minor number of a version.
+ pub fn bump_minor(self) -> Self {
+ Self::new(self.major, self.minor + 1, 0)
+ }
+
+ /// Bump the major number of a version.
+ pub fn bump_major(self) -> Self {
+ Self::new(self.major + 1, 0, 0)
+ }
+}
+
+/// Error creating [SemanticVersion] from [String].
+#[derive(Error, Debug, PartialEq, Eq)]
+pub enum VersionParseError {
+ /// [SemanticVersion] must contain major, minor, patch versions.
+ #[error("version {full_version} must contain 3 numbers separated by dot")]
+ NotThreeParts {
+ /// [SemanticVersion] that was being parsed.
+ full_version: String,
+ },
+ /// Wrapper around [ParseIntError](core::num::ParseIntError).
+ #[error("cannot parse '{version_part}' in '{full_version}' as u32: {parse_error}")]
+ ParseIntError {
+ /// [SemanticVersion] that was being parsed.
+ full_version: String,
+ /// A version part where parsing failed.
+ version_part: String,
+ /// A specific error resulted from parsing a part of the version as [u32].
+ parse_error: String,
+ },
+}
+
+impl FromStr for SemanticVersion {
+ type Err = VersionParseError;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ let parse_u32 = |part: &str| {
+ part.parse::<u32>().map_err(|e| Self::Err::ParseIntError {
+ full_version: s.to_string(),
+ version_part: part.to_string(),
+ parse_error: e.to_string(),
+ })
+ };
+
+ let mut parts = s.split('.');
+ match (parts.next(), parts.next(), parts.next(), parts.next()) {
+ (Some(major), Some(minor), Some(patch), None) => {
+ let major = parse_u32(major)?;
+ let minor = parse_u32(minor)?;
+ let patch = parse_u32(patch)?;
+ Ok(Self {
+ major,
+ minor,
+ patch,
+ })
+ }
+ _ => Err(Self::Err::NotThreeParts {
+ full_version: s.to_string(),
+ }),
+ }
+ }
+}
+
+#[test]
+fn from_str_for_semantic_version() {
+ let parse = |str: &str| str.parse::<SemanticVersion>();
+ assert!(parse(
+ &SemanticVersion {
+ major: 0,
+ minor: 1,
+ patch: 0
+ }
+ .to_string()
+ )
+ .is_ok());
+ assert!(parse("1.2.3").is_ok());
+ assert_eq!(
+ parse("1.abc.3"),
+ Err(VersionParseError::ParseIntError {
+ full_version: "1.abc.3".to_owned(),
+ version_part: "abc".to_owned(),
+ parse_error: "invalid digit found in string".to_owned(),
+ })
+ );
+ assert_eq!(
+ parse("1.2.-3"),
+ Err(VersionParseError::ParseIntError {
+ full_version: "1.2.-3".to_owned(),
+ version_part: "-3".to_owned(),
+ parse_error: "invalid digit found in string".to_owned(),
+ })
+ );
+ assert_eq!(
+ parse("1.2.9876543210"),
+ Err(VersionParseError::ParseIntError {
+ full_version: "1.2.9876543210".to_owned(),
+ version_part: "9876543210".to_owned(),
+ parse_error: "number too large to fit in target type".to_owned(),
+ })
+ );
+ assert_eq!(
+ parse("1.2"),
+ Err(VersionParseError::NotThreeParts {
+ full_version: "1.2".to_owned(),
+ })
+ );
+ assert_eq!(
+ parse("1.2.3."),
+ Err(VersionParseError::NotThreeParts {
+ full_version: "1.2.3.".to_owned(),
+ })
+ );
+}
+
+impl Display for SemanticVersion {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
+ }
+}
+
+// Implement Version for SemanticVersion.
+impl Version for SemanticVersion {
+ fn lowest() -> Self {
+ Self::zero()
+ }
+ fn bump(&self) -> Self {
+ self.bump_patch()
+ }
+}
+
+/// Simplest versions possible, just a positive number.
+#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
+#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize,))]
+#[cfg_attr(feature = "serde", serde(transparent))]
+pub struct NumberVersion(pub u32);
+
+// Convert an usize into a version.
+impl From<u32> for NumberVersion {
+ fn from(v: u32) -> Self {
+ Self(v)
+ }
+}
+
+// Convert an &usize into a version.
+impl From<&u32> for NumberVersion {
+ fn from(v: &u32) -> Self {
+ Self(*v)
+ }
+}
+
+// Convert an &version into a version.
+impl From<&NumberVersion> for NumberVersion {
+ fn from(v: &NumberVersion) -> Self {
+ *v
+ }
+}
+
+// Convert a version into an usize.
+impl From<NumberVersion> for u32 {
+ fn from(version: NumberVersion) -> Self {
+ version.0
+ }
+}
+
+impl Display for NumberVersion {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.0)
+ }
+}
+
+impl Version for NumberVersion {
+ fn lowest() -> Self {
+ Self(0)
+ }
+ fn bump(&self) -> Self {
+ Self(self.0 + 1)
+ }
+}
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +
// SPDX-License-Identifier: MPL-2.0
+
+//! As its name suggests, the [VersionSet] trait describes sets of versions.
+//!
+//! One needs to define
+//! - the associate type for versions,
+//! - two constructors for the empty set and a singleton set,
+//! - the complement and intersection set operations,
+//! - and a function to evaluate membership of versions.
+//!
+//! Two functions are automatically derived, thanks to the mathematical properties of sets.
+//! You can overwrite those implementations, but we highly recommend that you don't,
+//! except if you are confident in a correct implementation that brings much performance gains.
+//!
+//! It is also extremely important that the `Eq` trait is correctly implemented.
+//! In particular, you can only use `#[derive(Eq, PartialEq)]` if `Eq` is strictly equivalent to the
+//! structural equality, i.e. if version sets have canonical representations.
+//! Such problems may arise if your implementations of `complement()` and `intersection()` do not
+//! return canonical representations so be careful there.
+
+use std::fmt::{Debug, Display};
+
+/// Trait describing sets of versions.
+pub trait VersionSet: Debug + Display + Clone + Eq {
+ /// Version type associated with the sets manipulated.
+ type V: Debug + Display + Clone + Ord;
+
+ // Constructors
+ /// Constructor for an empty set containing no version.
+ fn empty() -> Self;
+ /// Constructor for a set containing exactly one version.
+ fn singleton(v: Self::V) -> Self;
+
+ // Operations
+ /// Compute the complement of this set.
+ fn complement(&self) -> Self;
+ /// Compute the intersection with another set.
+ fn intersection(&self, other: &Self) -> Self;
+
+ // Membership
+ /// Evaluate membership of a version in this set.
+ fn contains(&self, v: &Self::V) -> bool;
+
+ // Automatically implemented functions ###########################
+
+ /// Constructor for the set containing all versions.
+ /// Automatically implemented as `Self::empty().complement()`.
+ fn full() -> Self {
+ Self::empty().complement()
+ }
+
+ /// Compute the union with another set.
+ /// Thanks to set properties, this is automatically implemented as:
+ /// `self.complement().intersection(&other.complement()).complement()`
+ fn union(&self, other: &Self) -> Self {
+ self.complement()
+ .intersection(&other.complement())
+ .complement()
+ }
+}
+
fn:
) to \
+ restrict the search to a given item kind.","Accepted kinds are: fn
, mod
, struct
, \
+ enum
, trait
, type
, macro
, \
+ and const
.","Search functions by type signature (e.g., vec -> usize
or \
+ -> vec
or String, enum:Cow -> bool
)","You can look for items with an exact name by putting double quotes around \
+ your request: \"string\"
","Look for functions that accept or return \
+ slices and \
+ arrays by writing \
+ square brackets (e.g., -> [u8]
or [] -> Option
)","Look for items inside another one by searching for a path: vec::Vec
",].map(x=>""+x+"
").join("");const div_infos=document.createElement("div");addClass(div_infos,"infos");div_infos.innerHTML="${value.replaceAll(" ", " ")}
`}else{error[index]=value}});output+=`