Skip to content

Commit

Permalink
uv-resolver: account for conflicting extras when forking
Browse files Browse the repository at this point in the history
This commit implements the actual logic for creating new
forks when there are dependencies matching declared conflicts.
  • Loading branch information
BurntSushi committed Nov 9, 2024
1 parent 658dc7a commit 2446e9e
Show file tree
Hide file tree
Showing 2 changed files with 214 additions and 19 deletions.
65 changes: 64 additions & 1 deletion crates/uv-resolver/src/resolver/environment.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
use std::sync::Arc;

use rustc_hash::{FxHashMap, FxHashSet};
use uv_normalize::{ExtraName, PackageName};
use uv_pep508::{MarkerEnvironment, MarkerTree};
use uv_pypi_types::{ConflictingGroupList, ResolverMarkerEnvironment};
use uv_pypi_types::{
ConflictingGroup, ConflictingGroupList, ConflictingGroupRef, ResolverMarkerEnvironment,
};

use crate::pubgrub::{PubGrubDependency, PubGrubPackage};
use crate::requires_python::RequiresPythonRange;
Expand Down Expand Up @@ -95,6 +98,8 @@ enum Kind {
initial_forks: Arc<[MarkerTree]>,
/// The markers associated with this resolver fork.
markers: MarkerTree,
/// Conflicting group exclusions.
exclude: Arc<FxHashMap<PackageName, FxHashSet<ExtraName>>>,
},
}

Expand Down Expand Up @@ -132,6 +137,7 @@ impl ResolverEnvironment {
let kind = Kind::Universal {
initial_forks: initial_forks.into(),
markers: MarkerTree::TRUE,
exclude: Arc::new(FxHashMap::default()),
};
ResolverEnvironment { kind }
}
Expand All @@ -157,6 +163,18 @@ impl ResolverEnvironment {
}
}

/// Returns true if the dependency represented by this forker may be
/// included in the given resolver environment.
pub(crate) fn included_by_group(&self, group: ConflictingGroupRef<'_>) -> bool {
match self.kind {
Kind::Specific { .. } => true,
Kind::Universal { ref exclude, .. } => !exclude
.get(group.package())
.map(|set| set.contains(group.extra()))
.unwrap_or(false),
}
}

/// Returns the bounding Python versions that can satisfy this
/// resolver environment's marker, if it's constrained.
pub(crate) fn requires_python(&self) -> Option<RequiresPythonRange> {
Expand All @@ -183,12 +201,56 @@ impl ResolverEnvironment {
Kind::Universal {
ref initial_forks,
markers: ref lhs,
ref exclude,
} => {
let mut markers = lhs.clone();
markers.and(rhs);
let kind = Kind::Universal {
initial_forks: Arc::clone(initial_forks),
markers,
exclude: Arc::clone(exclude),
};
ResolverEnvironment { kind }
}
}
}

/// Returns a new resolver environment with the given groups excluded from
/// it.
///
/// When a group is excluded from a resolver environment,
/// `ResolverEnvironment::included_by_group` will return false. The idea
/// is that a dependency with a corresponding group should be excluded by
/// forks in the resolver with this environment.
///
/// # Panics
///
/// This panics if the resolver environment corresponds to one and only one
/// specific marker environment. i.e., "pip"-style resolution.
pub(crate) fn exclude_by_group(
&self,
groups: impl IntoIterator<Item = ConflictingGroup>,
) -> ResolverEnvironment {
match self.kind {
Kind::Specific { .. } => {
unreachable!("environment narrowing only happens in universal resolution")
}
Kind::Universal {
ref initial_forks,
ref markers,
ref exclude,
} => {
let mut exclude: FxHashMap<_, _> = (**exclude).clone();
for group in groups {
exclude
.entry(group.package().clone())
.or_default()
.insert(group.extra().clone());
}
let kind = Kind::Universal {
initial_forks: Arc::clone(initial_forks),
markers: markers.clone(),
exclude: Arc::new(exclude),
};
ResolverEnvironment { kind }
}
Expand All @@ -207,6 +269,7 @@ impl ResolverEnvironment {
let Kind::Universal {
ref initial_forks,
markers: ref _markers,
exclude: ref _exclude,
} = self.kind
else {
return vec![init];
Expand Down
168 changes: 150 additions & 18 deletions crates/uv-resolver/src/resolver/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,10 @@ use uv_normalize::{ExtraName, GroupName, PackageName};
use uv_pep440::{release_specifiers_to_ranges, Version, MIN_VERSION};
use uv_pep508::MarkerTree;
use uv_platform_tags::Tags;
use uv_pypi_types::{ConflictingGroupList, Requirement, ResolutionMetadata, VerbatimParsedUrl};
use uv_pypi_types::{
ConflictingGroup, ConflictingGroupList, ConflictingGroupRef, Requirement, ResolutionMetadata,
VerbatimParsedUrl,
};
use uv_types::{BuildContext, HashStrategy, InstalledPackagesProvider};
use uv_warnings::warn_user_once;

Expand Down Expand Up @@ -1560,6 +1563,11 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
) {
return None;
}
if !env.included_by_group(
ConflictingGroupRef::from((&requirement.name, source_extra)),
) {
return None;
}
}
None => {
if !requirement.evaluate_markers(env.marker_environment(), &[]) {
Expand Down Expand Up @@ -1648,6 +1656,11 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
) {
return None;
}
if !env.included_by_group(
ConflictingGroupRef::from((&requirement.name, source_extra)),
) {
return None;
}
}
None => {
if !constraint.evaluate_markers(env.marker_environment(), &[]) {
Expand Down Expand Up @@ -2740,10 +2753,7 @@ impl Forks {
) -> Forks {
let python_marker = python_requirement.to_marker_tree();

let mut forks = vec![Fork {
dependencies: vec![],
env: env.clone(),
}];
let mut forks = vec![Fork::new(env.clone())];
let mut diverging_packages = BTreeSet::new();
for (name, mut deps) in name_to_deps {
assert!(!deps.is_empty(), "every name has at least one dependency");
Expand Down Expand Up @@ -2775,7 +2785,7 @@ impl Forks {
let markers = dep.package.marker().cloned().unwrap_or(MarkerTree::TRUE);
for fork in &mut forks {
if fork.env.included_by_marker(&markers) {
fork.dependencies.push(dep.clone());
fork.add_dependency(dep.clone());
}
}
continue;
Expand All @@ -2793,7 +2803,7 @@ impl Forks {
// Or, if the markers are always true, then we just
// add the dependency to every fork unconditionally.
for fork in &mut forks {
fork.dependencies.push(dep.clone());
fork.add_dependency(dep.clone());
}
continue;
}
Expand All @@ -2818,7 +2828,7 @@ impl Forks {
// specifically created to exclude this dependency,
// so this isn't always true!
if forker.included(&new_fork.env) {
new_fork.dependencies.push(dep.clone());
new_fork.add_dependency(dep.clone());
}
// Filter out any forks we created that are disjoint with our
// Python requirement.
Expand All @@ -2830,6 +2840,62 @@ impl Forks {
forks = new;
}
}
// When there is a conflicting group configuration, we need
// to potentially add more forks. Each fork added contains an
// exclusion list of conflicting groups where dependencies with
// the corresponding package and extra name are forcefully
// excluded from that group.
//
// We specifically iterate on conflicting groups and
// potentially re-generate all forks for each one. We do it
// this way in case there are multiple sets of conflicting
// groups that impact the forks here.
//
// For example, if we have conflicting groups {x1, x2} and {x3,
// x4}, we need to make sure the forks generated from one set
// also account for the other set.
for groups in conflicting_groups.iter() {
let mut new = vec![];
for fork in std::mem::take(&mut forks) {
let mut has_conflicting_dependency = false;
for group in groups.iter() {
if fork.contains_conflicting_group(group.as_ref()) {
has_conflicting_dependency = true;
break;
}
}
if !has_conflicting_dependency {
new.push(fork);
continue;
}

// Create a fork that excludes ALL extras.
let mut fork_none = fork.clone();
for group in groups.iter() {
fork_none = fork_none.exclude([group.clone()]);
}
new.push(fork_none);

// Now create a fork for each conflicting group, where
// that fork excludes every *other* conflicting group.
//
// So if we have conflicting extras foo, bar and baz,
// then this creates three forks: one that excludes
// {foo, bar}, one that excludes {foo, baz} and one
// that excludes {bar, baz}.
for (i, _) in groups.iter().enumerate() {
let fork_allows_group = fork.clone().exclude(
groups
.iter()
.enumerate()
.filter(|&(j, _)| i != j)
.map(|(_, group)| group.clone()),
);
new.push(fork_allows_group);
}
}
forks = new;
}
Forks {
forks,
diverging_packages,
Expand All @@ -2846,7 +2912,7 @@ impl Forks {
/// have the same name and because the marker expressions are disjoint,
/// a fork occurs. One fork will contain `a<2` but not `a>=2`, while
/// the other fork will contain `a>=2` but not `a<2`.
#[derive(Clone, Debug, Eq, PartialEq)]
#[derive(Clone, Debug)]
struct Fork {
/// The list of dependencies for this fork, guaranteed to be conflict
/// free. (i.e., There are no two packages with the same name with
Expand All @@ -2857,6 +2923,12 @@ struct Fork {
/// it should be impossible for a package with a marker expression that is
/// disjoint from the marker expression on this fork to be added.
dependencies: Vec<PubGrubDependency>,
/// The conflicting groups in this fork.
///
/// This exists to make some access patterns more efficient. Namely,
/// it makes it easy to check whether there's a dependency with a
/// particular conflicting group in this fork.
conflicting_groups: FxHashMap<PackageName, FxHashSet<ExtraName>>,
/// The resolver environment for this fork.
///
/// Principally, this corresponds to the markers in this for. So in the
Expand All @@ -2872,26 +2944,86 @@ struct Fork {
}

impl Fork {
/*
fn intersect(&mut self, markers: MarkerTree) {
self.markers.and(markers);
/// Create a new fork with no dependencies with the given resolver
/// environment.
fn new(env: ResolverEnvironment) -> Fork {
Fork {
dependencies: vec![],
conflicting_groups: FxHashMap::default(),
env,
}
}

/// Add a dependency to this fork.
fn add_dependency(&mut self, dep: PubGrubDependency) {
if let Some(conflicting_group) = dep.package.conflicting_group() {
self.conflicting_groups
.entry(conflicting_group.package().clone())
.or_default()
.insert(conflicting_group.extra().clone());
}
self.dependencies.push(dep);
}

/// Sets the resolver environment to the one given.
///
/// Any dependency in this fork that does not satisfy the given environment
/// is removed.
fn set_env(&mut self, env: ResolverEnvironment) {
self.env = env;
self.dependencies.retain(|dep| {
let Some(markers) = dep.package.marker() else {
return true;
};
!self.markers.is_disjoint(markers)
if self.env.included_by_marker(markers) {
return true;
}
if let Some(conflicting_group) = dep.package.conflicting_group() {
if let Some(set) = self.conflicting_groups.get_mut(conflicting_group.package()) {
set.remove(conflicting_group.extra());
}
}
false
});
}
*/

fn set_env(&mut self, env: ResolverEnvironment) {
self.env = env;
/// Returns true if any of the dependencies in this fork contain a
/// dependency with the given package and extra values.
fn contains_conflicting_group(&self, group: ConflictingGroupRef<'_>) -> bool {
self.conflicting_groups
.get(group.package())
.map(|set| set.contains(group.extra()))
.unwrap_or(false)
}

/// Exclude the given groups from this fork.
///
/// This removes all dependencies matching the given conflicting groups.
fn exclude(mut self, groups: impl IntoIterator<Item = ConflictingGroup>) -> Fork {
self.env = self.env.exclude_by_group(groups);
self.dependencies.retain(|dep| {
let Some(markers) = dep.package.marker() else {
let Some(conflicting_group) = dep.package.conflicting_group() else {
return true;
};
self.env.included(markers)
if self.env.included_by_group(conflicting_group) {
return true;
}
if let Some(conflicting_group) = dep.package.conflicting_group() {
if let Some(set) = self.conflicting_groups.get_mut(conflicting_group.package()) {
set.remove(conflicting_group.extra());
}
}
false
});
self
}
}

impl Eq for Fork {}

impl PartialEq for Fork {
fn eq(&self, other: &Fork) -> bool {
self.dependencies == other.dependencies && self.env == other.env
}
}

Expand Down

0 comments on commit 2446e9e

Please sign in to comment.