Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cleanup internal imports #243

Merged
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 3 additions & 8 deletions benches/large_case.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,11 @@
// SPDX-License-Identifier: MPL-2.0
use std::time::Duration;

extern crate criterion;
use self::criterion::*;

use pubgrub::package::Package;
use pubgrub::range::Range;
use pubgrub::solver::{resolve, OfflineDependencyProvider};
use pubgrub::version::SemanticVersion;
use pubgrub::version_set::VersionSet;
use criterion::*;
use serde::de::Deserialize;

use pubgrub::{resolve, OfflineDependencyProvider, Package, Range, SemanticVersion, VersionSet};

fn bench<'a, P: Package + Deserialize<'a>, VS: VersionSet + Deserialize<'a>>(
b: &mut Bencher,
case: &'a str,
Expand Down
8 changes: 4 additions & 4 deletions examples/unsat_root_message_no_version.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
// SPDX-License-Identifier: MPL-2.0

use std::fmt::{self, Display};

use pubgrub::{
resolve, Derived, OfflineDependencyProvider, PubGrubError, Range, Reporter, SemanticVersion,
resolve, DefaultStringReporter, Derived, External, Map, OfflineDependencyProvider,
PubGrubError, Range, ReportFormatter, Reporter, SemanticVersion, Term,
};

use pubgrub::{DefaultStringReporter, External, Map, ReportFormatter, Term};
use std::fmt::{self, Display};

#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum Package {
Root,
Expand Down
17 changes: 6 additions & 11 deletions src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@

use thiserror::Error;

use crate::report::DerivationTree;
use crate::solver::DependencyProvider;
use crate::{DependencyProvider, DerivationTree};

/// There is no solution for this set of dependencies.
pub type NoSolutionError<DP> = DerivationTree<
Expand All @@ -21,10 +20,8 @@ pub enum PubGrubError<DP: DependencyProvider> {
#[error("No solution")]
NoSolution(NoSolutionError<DP>),

/// Error arising when the implementer of
/// [DependencyProvider]
/// returned an error in the method
/// [get_dependencies](crate::solver::DependencyProvider::get_dependencies).
/// Error arising when the implementer of [DependencyProvider] returned an error in the method
/// [get_dependencies](DependencyProvider::get_dependencies).
#[error("Retrieving dependencies of {package} {version} failed")]
ErrorRetrievingDependencies {
/// Package whose dependencies we want.
Expand All @@ -49,15 +46,13 @@ pub enum PubGrubError<DP: DependencyProvider> {
version: DP::V,
},

/// Error arising when the implementer of
/// [DependencyProvider]
/// returned an error in the method
/// [choose_version](crate::solver::DependencyProvider::choose_version).
/// Error arising when the implementer of [DependencyProvider] returned an error in the method
/// [choose_version](DependencyProvider::choose_version).
#[error("Decision making failed")]
ErrorChoosingPackageVersion(#[source] DP::Err),

/// Error arising when the implementer of [DependencyProvider]
/// returned an error in the method [should_cancel](crate::solver::DependencyProvider::should_cancel).
/// returned an error in the method [should_cancel](DependencyProvider::should_cancel).
#[error("We should cancel")]
ErrorInShouldCancel(#[source] DP::Err),

Expand Down
14 changes: 7 additions & 7 deletions src/internal/arena.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use std::ops::{Index, Range};
/// that we actually don't need since it is phantom.
///
/// <https://github.com/rust-lang/rust/issues/26925>
pub struct Id<T> {
pub(crate) struct Id<T> {
raw: u32,
_ty: PhantomData<fn() -> T>,
}
Expand Down Expand Up @@ -48,7 +48,7 @@ impl<T> fmt::Debug for Id<T> {
}

impl<T> Id<T> {
pub fn into_raw(self) -> usize {
pub(crate) fn into_raw(self) -> usize {
self.raw as usize
}
fn from(n: u32) -> Self {
Expand All @@ -57,7 +57,7 @@ impl<T> Id<T> {
_ty: PhantomData,
}
}
pub fn range_to_iter(range: Range<Self>) -> impl Iterator<Item = Self> {
pub(crate) fn range_to_iter(range: Range<Self>) -> impl Iterator<Item = Self> {
let start = range.start.raw;
let end = range.end.raw;
(start..end).map(Self::from)
Expand All @@ -71,7 +71,7 @@ impl<T> Id<T> {
/// to have references between those items.
/// They are all dropped at once when the arena is dropped.
#[derive(Clone, PartialEq, Eq)]
pub struct Arena<T> {
pub(crate) struct Arena<T> {
data: Vec<T>,
}

Expand All @@ -91,17 +91,17 @@ impl<T> Default for Arena<T> {
}

impl<T> Arena<T> {
pub fn new() -> Self {
pub(crate) fn new() -> Self {
Self { data: Vec::new() }
}

pub fn alloc(&mut self, value: T) -> Id<T> {
pub(crate) fn alloc(&mut self, value: T) -> Id<T> {
let raw = self.data.len();
self.data.push(value);
Id::from(raw as u32)
}

pub fn alloc_iter<I: Iterator<Item = T>>(&mut self, values: I) -> Range<Id<T>> {
pub(crate) fn alloc_iter<I: Iterator<Item = T>>(&mut self, values: I) -> Range<Id<T>> {
let start = Id::from(self.data.len() as u32);
values.for_each(|v| {
self.alloc(v);
Expand Down
32 changes: 13 additions & 19 deletions src/internal/core.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,22 +6,16 @@
use std::collections::HashSet as Set;
use std::sync::Arc;

use crate::error::NoSolutionError;
use crate::internal::arena::Arena;
use crate::internal::incompatibility::{Incompatibility, Relation};
use crate::internal::partial_solution::SatisfierSearch::{
DifferentDecisionLevels, SameDecisionLevels,
};
use crate::internal::partial_solution::{DecisionLevel, PartialSolution};
use crate::internal::small_vec::SmallVec;
use crate::report::DerivationTree;
use crate::solver::DependencyProvider;
use crate::type_aliases::{IncompDpId, Map};
use crate::version_set::VersionSet;
use crate::internal::{IncompDpId, SatisfierSearch};
use crate::{DependencyProvider, DerivationTree, Map, NoSolutionError, VersionSet};
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Well this is exactly the kind of cyclic imports that I’ve always wanted to avoid. crate imports internal.core which in turn imports crate. I believe this lead into putting definitions and functions in a much less organized and maintainable way in the long term, because we don’t care where we put it and just pub(crate) it.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I do find everything at the root very useful and convenient for outside callers of the library, or test files. Just not for the inside of the library itself.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I've reverted the internal import changes

Copy link
Member

@mpizenberg mpizenberg Jul 29, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sorry for the long wait time. I didn’t mean specifically internal just inside the library in general. The idea is to avoid cyclic imports, which are signs that something is not compartmentalized properly. Because a module that requires cyclic imports means that any other module that would need a functionality from this one, would also end up with the whole crate (all its modules) as a dependency. Which makes things like dead code elimination more complex, and other bad architecture proofs in general.

So basically, I find it fine to have examples, benchmarks, external tests, ... to import everything from the new top level crate. But I’d like to avoid having the library modules (anything that is in the import path from the root lib.rs) to import crate module.

That being said, I know I’m a bit stubborn on this, and I know this isn’t something very common in rust crates, so I’m willing to bend if you all think its better.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I've never gotten the hang of keeping my imports well structured. So I personally would not mind making it a cyclical ball of spaghetti. :-P On the other hand, I'm terrible at naming things and I never remember to fix documentation. This project is better for your efforts to rein in those bad habits of mine.

So whichever way we decide, I would deeply appreciate automation that insists on it. Something in CI, even just a grep for the "bad" patterns, or are there tools in the ecosystem for identifying cyclical imports.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Cyclical ball of spaghetti sounds delicious! ahah. I wanted to explain my reasoning because every time something is cyclical, it makes everything more complex. Dependency solving is more complex, memory releasing for garbage collector is more complex, etc. I see no reason why import graph solving would not suffer from similar issues, potentially leading to longer compilation times or less optimal dead code elimination for compiled bundles.

But that’s just my hunch and it isn’t backed by any scientific proof or concrete testing using Rust and using our code base in particular. So take that with a grain of salt. And if you think it makes code reading and maintenance simpler I can’t argue against that.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For concrete data, I guess compiling a rather simple example based on pubgrub could be used. To see if this change makes any impact on compile time and bundle sizes.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Now that I think about it, it probably would not change anything for pubgrub, because its not like people would use only a subset of pubgrub, they basically would use one of the top level modules that do need everything in the lib. There isn’t any part of this lib that really stands on its own.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Resolving symbols to the source location is comparatively simple for a compiler: You build an alias map from exported symbol to origin path or the local definition for each file once and then when looking up a symbol, you iterate over the alias until you find the source location. If i want to resolve Foo, i walk through the scopes until i find a use crate::Foo, then i ask crate/src/lib.rs for Foo, it has memorized it as coming from crate::foo/src/foo.rs due to pub use foo::Foo, then i go to crate::foo/src/foo.rs and it tells me it has a local stuct Foo, so i get my fully qualified path crate::foo::Foo (and pub/priv checking along the way).

I found cyclical imports often become unavoidable with more complex crates, e.g. between the error capturing some struct for context and that struct return the error in its methods. I haven't found cyclical imports being a signal for bad architecture.

There are something that are known to be costly to the compiler, notably large/many dependencies, proc macros (need to be compiled, executed and then rustc needs to compile the bulk of generated code), generics (rustc generates and processes monomorphized code for each instantiation, i.e. n different usages give us n times the code) and llvm in general.

I say we structure the code in the way that's easiest for us to maintain and move as much work as possible to the compiler. Pubgrub compile ridiculously fast compare to any other crate i've worked on (0.2s for incremental cargo build, and ~2s for cargo test on my desktop) so i have zero worries about compile times.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks for the detailed answer! Alright, don’t forget to "unrevert" your changes then.


/// Current state of the PubGrub algorithm.
#[derive(Clone)]
pub struct State<DP: DependencyProvider> {
pub(crate) struct State<DP: DependencyProvider> {
root_package: DP::P,
root_version: DP::V,

Expand All @@ -40,10 +34,10 @@ pub struct State<DP: DependencyProvider> {

/// Partial solution.
/// TODO: remove pub.
pub partial_solution: PartialSolution<DP>,
pub(crate) partial_solution: PartialSolution<DP>,

/// The store is the reference storage for all incompatibilities.
pub incompatibility_store: Arena<Incompatibility<DP::P, DP::VS, DP::M>>,
pub(crate) incompatibility_store: Arena<Incompatibility<DP::P, DP::VS, DP::M>>,

/// This is a stack of work to be done in `unit_propagation`.
/// It can definitely be a local variable to that method, but
Expand All @@ -53,7 +47,7 @@ pub struct State<DP: DependencyProvider> {

impl<DP: DependencyProvider> State<DP> {
/// Initialization of PubGrub state.
pub fn init(root_package: DP::P, root_version: DP::V) -> Self {
pub(crate) fn init(root_package: DP::P, root_version: DP::V) -> Self {
let mut incompatibility_store = Arena::new();
let not_root_id = incompatibility_store.alloc(Incompatibility::not_root(
root_package.clone(),
Expand All @@ -74,13 +68,13 @@ impl<DP: DependencyProvider> State<DP> {
}

/// Add an incompatibility to the state.
pub fn add_incompatibility(&mut self, incompat: Incompatibility<DP::P, DP::VS, DP::M>) {
pub(crate) fn add_incompatibility(&mut self, incompat: Incompatibility<DP::P, DP::VS, DP::M>) {
let id = self.incompatibility_store.alloc(incompat);
self.merge_incompatibility(id);
}

/// Add an incompatibility to the state.
pub fn add_incompatibility_from_dependencies(
pub(crate) fn add_incompatibility_from_dependencies(
&mut self,
package: DP::P,
version: DP::V,
Expand All @@ -105,7 +99,7 @@ impl<DP: DependencyProvider> State<DP> {

/// Unit propagation is the core mechanism of the solving algorithm.
/// CF <https://github.com/dart-lang/pub/blob/master/doc/solver.md#unit-propagation>
pub fn unit_propagation(&mut self, package: DP::P) -> Result<(), NoSolutionError<DP>> {
pub(crate) fn unit_propagation(&mut self, package: DP::P) -> Result<(), NoSolutionError<DP>> {
self.unit_propagation_buffer.clear();
self.unit_propagation_buffer.push(package);
while let Some(current_package) = self.unit_propagation_buffer.pop() {
Expand Down Expand Up @@ -202,7 +196,7 @@ impl<DP: DependencyProvider> State<DP> {
&self.incompatibility_store,
);
match satisfier_search_result {
DifferentDecisionLevels {
SatisfierSearch::DifferentDecisionLevels {
previous_satisfier_level,
} => {
let package = package.clone();
Expand All @@ -214,7 +208,7 @@ impl<DP: DependencyProvider> State<DP> {
log::info!("backtrack to {:?}", previous_satisfier_level);
return Ok((package, current_incompat_id));
}
SameDecisionLevels { satisfier_cause } => {
SatisfierSearch::SameDecisionLevels { satisfier_cause } => {
let prior_cause = Incompatibility::prior_cause(
current_incompat_id,
satisfier_cause,
Expand Down Expand Up @@ -248,10 +242,10 @@ impl<DP: DependencyProvider> State<DP> {

/// Add this incompatibility into the set of all incompatibilities.
///
/// Pub collapses identical dependencies from adjacent package versions
/// PubGrub collapses identical dependencies from adjacent package versions
Eh2406 marked this conversation as resolved.
Show resolved Hide resolved
/// into individual incompatibilities.
/// This substantially reduces the total number of incompatibilities
/// and makes it much easier for Pub to reason about multiple versions of packages at once.
/// and makes it much easier for PubGrub to reason about multiple versions of packages at once.
///
/// For example, rather than representing
/// foo 1.0.0 depends on bar ^1.0.0 and
Expand Down
Loading
Loading