From 9542d828db898c106d3101e2d110842b4392df13 Mon Sep 17 00:00:00 2001 From: Douglas Creager Date: Mon, 23 Feb 2026 16:54:54 -0500 Subject: [PATCH 1/9] hand-roll memoization caches --- .../ty_python_semantic/src/types/call/bind.rs | 7 +- .../src/types/constraints.rs | 2767 ++++++++++------- .../ty_python_semantic/src/types/generics.rs | 18 +- 3 files changed, 1720 insertions(+), 1072 deletions(-) diff --git a/crates/ty_python_semantic/src/types/call/bind.rs b/crates/ty_python_semantic/src/types/call/bind.rs index b3a7b4b73b720e..3ecd08bf650c44 100644 --- a/crates/ty_python_semantic/src/types/call/bind.rs +++ b/crates/ty_python_semantic/src/types/call/bind.rs @@ -1873,8 +1873,11 @@ impl<'db> Bindings<'db> { let constraints = ConstraintSetBuilder::new(); let set = constraints.load(tracked.constraints(db)); - let result = - set.satisfied_by_all_typevars(db, InferableTypeVars::One(&inferable)); + let result = set.satisfied_by_all_typevars( + db, + &constraints, + InferableTypeVars::One(&inferable), + ); overload.set_return_type(Type::bool_literal(result)); } diff --git a/crates/ty_python_semantic/src/types/constraints.rs b/crates/ty_python_semantic/src/types/constraints.rs index 95e1d4067585af..2ec243e34a9dbc 100644 --- a/crates/ty_python_semantic/src/types/constraints.rs +++ b/crates/ty_python_semantic/src/types/constraints.rs @@ -66,7 +66,7 @@ //! //! [bdd]: https://en.wikipedia.org/wiki/Binary_decision_diagram -use std::cell::RefCell; +use std::cell::{Ref, RefCell}; use std::cmp::Ordering; use std::fmt::{Debug, Display}; use std::marker::PhantomData; @@ -74,8 +74,8 @@ use std::ops::Range; use indexmap::map::Entry; use itertools::Itertools; +use ruff_index::{Idx, IndexVec, newtype_index}; use rustc_hash::{FxHashMap, FxHashSet}; -use salsa::plumbing::AsId; use smallvec::SmallVec; use crate::types::class::GenericAlias; @@ -173,8 +173,9 @@ where builder: &'c ConstraintSetBuilder<'db>, mut f: impl FnMut(T) -> ConstraintSet<'db, 'c>, ) -> ConstraintSet<'db, 'c> { - let node = Node::distributed_or( + let node = NodeId::distributed_or( db, + builder, self.map(|element| { let constraint = f(element); constraint.verify_builder(builder); @@ -190,8 +191,9 @@ where builder: &'c ConstraintSetBuilder<'db>, mut f: impl FnMut(T) -> ConstraintSet<'db, 'c>, ) -> ConstraintSet<'db, 'c> { - let node = Node::distributed_and( + let node = NodeId::distributed_and( db, + builder, self.map(|element| { let constraint = f(element); constraint.verify_builder(builder); @@ -205,8 +207,9 @@ where #[derive(Clone, Debug, Eq, Hash, PartialEq, get_size2::GetSize, salsa::Update)] pub struct OwnedConstraintSet<'db> { /// The BDD representing this constraint set - node: Node<'db>, - storage: ConstraintSetStorage<'db>, + node: NodeId, + constraints: IndexVec>, + nodes: IndexVec, } /// A set of constraints under which a type property holds. @@ -222,13 +225,13 @@ pub struct OwnedConstraintSet<'db> { #[derive(Clone, Copy)] pub struct ConstraintSet<'db, 'c> { /// The BDD representing this constraint set - node: Node<'db>, + node: NodeId, builder: &'c ConstraintSetBuilder<'db>, _invariant: PhantomData &'c ()>, } impl<'db, 'c> ConstraintSet<'db, 'c> { - fn from_node(builder: &'c ConstraintSetBuilder<'db>, node: Node<'db>) -> Self { + fn from_node(builder: &'c ConstraintSetBuilder<'db>, node: NodeId) -> Self { Self { node, builder, @@ -237,11 +240,11 @@ impl<'db, 'c> ConstraintSet<'db, 'c> { } fn never(builder: &'c ConstraintSetBuilder<'db>) -> Self { - Self::from_node(builder, Node::AlwaysFalse) + Self::from_node(builder, ALWAYS_FALSE) } fn always(builder: &'c ConstraintSetBuilder<'db>) -> Self { - Self::from_node(builder, Node::AlwaysTrue) + Self::from_node(builder, ALWAYS_TRUE) } pub(crate) fn from_bool(builder: &'c ConstraintSetBuilder<'db>, b: bool) -> Self { @@ -262,7 +265,7 @@ impl<'db, 'c> ConstraintSet<'db, 'c> { ) -> Self { Self::from_node( builder, - ConstrainedTypeVar::new_node(db, typevar, lower, upper), + Constraint::new_node(db, builder, typevar, lower, upper), ) } @@ -273,12 +276,12 @@ impl<'db, 'c> ConstraintSet<'db, 'c> { /// Returns whether this constraint set never holds pub(crate) fn is_never_satisfied(self, db: &'db dyn Db) -> bool { - self.node.is_never_satisfied(db) + self.node.is_never_satisfied(db, self.builder) } /// Returns whether this constraint set always holds pub(crate) fn is_always_satisfied(self, db: &'db dyn Db) -> bool { - self.node.is_always_satisfied(db) + self.node.is_always_satisfied(db, self.builder) } /// Returns whether this constraint set contains any cycles between typevars. If it does, then @@ -363,15 +366,17 @@ impl<'db, 'c> ConstraintSet<'db, 'c> { BoundTypeVarIdentity<'db>, FxHashSet>, > = FxHashMap::default(); - self.node.for_each_constraint(db, &mut |constraint, _| { - let visitor = CollectReachability::default(); - visitor.visit_type(db, constraint.lower(db)); - visitor.visit_type(db, constraint.upper(db)); - reachable_typevars - .entry(constraint.typevar(db).identity(db)) - .or_default() - .extend(visitor.reachable_typevars.into_inner()); - }); + self.node + .for_each_constraint(self.builder, &mut |constraint, _| { + let visitor = CollectReachability::default(); + let constraint = self.builder.constraint_data(constraint); + visitor.visit_type(db, constraint.lower); + visitor.visit_type(db, constraint.upper); + reachable_typevars + .entry(constraint.typevar.identity(db)) + .or_default() + .extend(visitor.reachable_typevars.into_inner()); + }); // Then perform a depth-first search to see if there are any cycles. let mut discovered: FxHashSet> = FxHashSet::default(); @@ -399,7 +404,7 @@ impl<'db, 'c> ConstraintSet<'db, 'c> { rhs: Type<'db>, ) -> Self { self.verify_builder(builder); - Self::from_node(builder, self.node.implies_subtype_of(db, lhs, rhs)) + Self::from_node(builder, self.node.implies_subtype_of(db, builder, lhs, rhs)) } /// Returns whether this constraint set is satisfied by all of the typevars that it mentions. @@ -419,9 +424,11 @@ impl<'db, 'c> ConstraintSet<'db, 'c> { pub(crate) fn satisfied_by_all_typevars( &self, db: &'db dyn Db, + builder: &'c ConstraintSetBuilder<'db>, inferable: InferableTypeVars<'_, 'db>, ) -> bool { - self.node.satisfied_by_all_typevars(db, inferable) + self.verify_builder(builder); + self.node.satisfied_by_all_typevars(db, builder, inferable) } /// Updates this constraint set to hold the union of itself and another constraint set. @@ -430,12 +437,12 @@ impl<'db, 'c> ConstraintSet<'db, 'c> { /// nodes. pub(crate) fn union( &mut self, - db: &'db dyn Db, + _db: &'db dyn Db, builder: &'c ConstraintSetBuilder<'db>, other: Self, ) -> Self { self.verify_builder(builder); - self.node = self.node.or_with_offset(db, other.node); + self.node = self.node.or_with_offset(builder, other.node); *self } @@ -445,19 +452,19 @@ impl<'db, 'c> ConstraintSet<'db, 'c> { /// nodes. pub(crate) fn intersect( &mut self, - db: &'db dyn Db, + _db: &'db dyn Db, builder: &'c ConstraintSetBuilder<'db>, other: Self, ) -> Self { self.verify_builder(builder); - self.node = self.node.and_with_offset(db, other.node); + self.node = self.node.and_with_offset(builder, other.node); *self } /// Returns the negation of this constraint set. - pub(crate) fn negate(self, db: &'db dyn Db, builder: &'c ConstraintSetBuilder<'db>) -> Self { + pub(crate) fn negate(self, _db: &'db dyn Db, builder: &'c ConstraintSetBuilder<'db>) -> Self { self.verify_builder(builder); - Self::from_node(builder, self.node.negate(db)) + Self::from_node(builder, self.node.negate(builder)) } /// Returns the intersection of this constraint set and another. The other constraint set is @@ -521,12 +528,12 @@ impl<'db, 'c> ConstraintSet<'db, 'c> { /// nodes. pub(crate) fn iff( self, - db: &'db dyn Db, + _db: &'db dyn Db, builder: &'c ConstraintSetBuilder<'db>, other: Self, ) -> Self { self.verify_builder(builder); - Self::from_node(builder, self.node.iff_with_offset(db, other.node)) + Self::from_node(builder, self.node.iff_with_offset(builder, other.node)) } /// Reduces the set of inferable typevars for this constraint set. You provide an iterator of @@ -541,27 +548,43 @@ impl<'db, 'c> ConstraintSet<'db, 'c> { to_remove: impl IntoIterator>, ) -> Self { self.verify_builder(builder); - Self::from_node(builder, self.node.exists(db, to_remove)) + Self::from_node(builder, self.node.exists(db, builder, to_remove)) } - pub(crate) fn solutions(self, db: &'db dyn Db) -> Solutions<'db> { + pub(crate) fn solutions( + self, + db: &'db dyn Db, + builder: &'c ConstraintSetBuilder<'db>, + ) -> Solutions<'db, 'c> { + self.verify_builder(builder); + // If the constraint set is cyclic, we'll hit an infinite expansion when trying to add type // mappings for it. if self.is_cyclic(db) { return Solutions::Unsatisfiable; } - self.node.solutions(db) + self.node.solutions(db, builder) } #[expect(dead_code)] // Keep this around for debugging purposes pub(crate) fn display(self, db: &'db dyn Db) -> impl Display { - self.node.simplify_for_display(db).display(db) + self.node + .simplify_for_display(db, self.builder) + .display(db, self.builder) } #[expect(dead_code)] // Keep this around for debugging purposes - pub(crate) fn display_graph(self, db: &'db dyn Db, prefix: &dyn Display) -> impl Display { - self.node.display_graph(db, prefix) + pub(crate) fn display_graph<'a>( + self, + db: &'db dyn Db, + prefix: &'a dyn Display, + ) -> impl Display + 'a + where + 'db: 'a, + 'c: 'a, + { + self.node.display_graph(db, self.builder, prefix) } } @@ -578,9 +601,26 @@ pub(crate) struct ConstraintSetBuilder<'db> { storage: RefCell>, } -#[derive(Clone, Debug, Default, Eq, Hash, PartialEq, get_size2::GetSize, salsa::Update)] +#[derive(Clone, Debug, Default, Eq, PartialEq, get_size2::GetSize)] struct ConstraintSetStorage<'db> { - _dummy: PhantomData<&'db ()>, + constraints: IndexVec>, + nodes: IndexVec, + + constraint_cache: FxHashMap, ConstraintId>, + node_cache: FxHashMap, + + negate_cache: FxHashMap, + or_cache: FxHashMap<(NodeId, NodeId, usize), NodeId>, + and_cache: FxHashMap<(NodeId, NodeId, usize), NodeId>, + iff_cache: FxHashMap<(NodeId, NodeId, usize), NodeId>, + exists_one_cache: FxHashMap<(NodeId, BoundTypeVarIdentity<'db>), NodeId>, + retain_one_cache: FxHashMap<(NodeId, BoundTypeVarIdentity<'db>), NodeId>, + restrict_one_cache: FxHashMap<(NodeId, ConstraintAssignment), (NodeId, bool)>, + solutions_cache: FxHashMap>>, + simplify_cache: FxHashMap, + + single_sequent_cache: FxHashMap, + pair_sequent_cache: FxHashMap<(ConstraintId, ConstraintId), SequentMap>, } impl<'db> ConstraintSetBuilder<'db> { @@ -594,18 +634,64 @@ impl<'db> ConstraintSetBuilder<'db> { ) -> OwnedConstraintSet<'db> { let constraint = f(&self); let node = constraint.node; + let storage = self.storage.into_inner(); OwnedConstraintSet { node, - storage: self.storage.into_inner(), + constraints: storage.constraints, + nodes: storage.nodes, } } pub(crate) fn load<'c>(&'c self, other: &OwnedConstraintSet<'db>) -> ConstraintSet<'db, 'c> { - // For now, all constraints are still salsa-interned globally, so we can just coerce the - // constraint set to consider ourselves as where it's stored. Once we migrate to actually - // storing the constraints in ConstraintSetStorage, this will need to copy the relevant BDD - // nodes from other's storage into ourselves. - ConstraintSet::from_node(self, other.node) + let mut constraints = IndexVec::with_capacity(other.constraints.len()); + for constraint in other.constraints.iter().copied() { + constraints.push(self.intern_constraint(constraint)); + } + + let mut nodes = IndexVec::with_capacity(other.nodes.len()); + let remap = |old: NodeId, nodes: &IndexVec| { + if old.is_terminal() { old } else { nodes[old] } + }; + for node in other.nodes.iter().copied() { + let remapped = InteriorNodeData { + constraint: constraints[node.constraint], + if_true: remap(node.if_true, &nodes), + if_false: remap(node.if_false, &nodes), + source_order: node.source_order, + max_source_order: node.max_source_order, + }; + nodes.push(self.intern_interior_node(remapped)); + } + + ConstraintSet::from_node(self, remap(other.node, &nodes)) + } + + fn intern_constraint(&self, data: Constraint<'db>) -> ConstraintId { + let mut storage = self.storage.borrow_mut(); + if let Some(id) = storage.constraint_cache.get(&data) { + return *id; + } + let id = storage.constraints.push(data); + storage.constraint_cache.insert(data, id); + id + } + + fn intern_interior_node(&self, data: InteriorNodeData) -> NodeId { + let mut storage = self.storage.borrow_mut(); + if let Some(id) = storage.node_cache.get(&data) { + return *id; + } + let id = storage.nodes.push(data); + storage.node_cache.insert(data, id); + id + } + + fn constraint_data(&self, constraint: ConstraintId) -> Constraint<'db> { + self.storage.borrow().constraints[constraint] + } + + fn interior_node_data(&self, node: NodeId) -> InteriorNodeData { + self.storage.borrow().nodes[node] } } @@ -626,7 +712,7 @@ impl<'db> BoundTypeVarInstance<'db> { #[derive(Clone, Copy, Debug)] enum IntersectionResult<'db> { - Simplified(ConstrainedTypeVar<'db>), + Simplified(Constraint<'db>), CannotSimplify, Disjoint, } @@ -637,29 +723,45 @@ impl IntersectionResult<'_> { } } +/// The index of an individual constraint (i.e. a BDD variable) within a [`ConstraintSetStorage`]. +#[newtype_index] +#[derive(Ord, PartialOrd, salsa::Update, get_size2::GetSize)] +pub struct ConstraintId; + /// An individual constraint in a constraint set. This restricts a single typevar to be within a /// lower and upper bound. -#[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)] -pub(crate) struct ConstrainedTypeVar<'db> { +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, get_size2::GetSize, salsa::Update)] +pub(crate) struct Constraint<'db> { pub(crate) typevar: BoundTypeVarInstance<'db>, pub(crate) lower: Type<'db>, pub(crate) upper: Type<'db>, } -// The Salsa heap is tracked separately. -impl get_size2::GetSize for ConstrainedTypeVar<'_> {} +impl<'db> Constraint<'db> { + #[expect(clippy::new_ret_no_self)] + fn new( + builder: &ConstraintSetBuilder<'db>, + typevar: BoundTypeVarInstance<'db>, + lower: Type<'db>, + upper: Type<'db>, + ) -> ConstraintId { + builder.intern_constraint(Constraint { + typevar, + lower, + upper, + }) + } -#[salsa::tracked] -impl<'db> ConstrainedTypeVar<'db> { /// Returns a new range constraint. /// /// Panics if `lower` and `upper` are not both fully static. fn new_node( db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, typevar: BoundTypeVarInstance<'db>, mut lower: Type<'db>, mut upper: Type<'db>, - ) -> Node<'db> { + ) -> NodeId { // It's not useful for an upper bound to be an intersection type, or for a lower bound to // be a union type. Because the following equivalences hold, we can break these bounds // apart and create an equivalent BDD with more nodes but simpler constraints. (Fewer, @@ -669,11 +771,11 @@ impl<'db> ConstrainedTypeVar<'db> { // T ≤ (¬α & ¬β) ⇔ (T ≤ ¬α) ∧ (T ≤ ¬β) // (α | β) ≤ T ⇔ (α ≤ T) ∧ (β ≤ T) if let Type::Union(lower_union) = lower { - let mut result = Node::AlwaysTrue; + let mut result = ALWAYS_TRUE; for lower_element in lower_union.elements(db) { result = result.and_with_offset( - db, - ConstrainedTypeVar::new_node(db, typevar, *lower_element, upper), + builder, + Constraint::new_node(db, builder, typevar, *lower_element, upper), ); } return result; @@ -684,17 +786,17 @@ impl<'db> ConstrainedTypeVar<'db> { if let Type::Intersection(upper_intersection) = upper && !upper_intersection.is_simple_negation(db) { - let mut result = Node::AlwaysTrue; + let mut result = ALWAYS_TRUE; for upper_element in upper_intersection.iter_positive(db) { result = result.and_with_offset( - db, - ConstrainedTypeVar::new_node(db, typevar, lower, upper_element), + builder, + Constraint::new_node(db, builder, typevar, lower, upper_element), ); } for upper_element in upper_intersection.iter_negative(db) { result = result.and_with_offset( - db, - ConstrainedTypeVar::new_node(db, typevar, lower, upper_element.negate(db)), + builder, + Constraint::new_node(db, builder, typevar, lower, upper_element.negate(db)), ); } return result; @@ -725,11 +827,11 @@ impl<'db> ConstrainedTypeVar<'db> { }) => { return Node::new_constraint( - db, - ConstrainedTypeVar::new(db, typevar, Type::Never, Type::object()), + builder, + Constraint::new(builder, typevar, Type::Never, Type::object()), 1, ) - .negate(db); + .negate(builder); } _ => {} } @@ -754,7 +856,7 @@ impl<'db> ConstrainedTypeVar<'db> { // If `lower ≰ upper`, then the constraint cannot be satisfied, since there is no type that // is both greater than `lower`, and less than `upper`. if !lower.is_constraint_set_assignable_to(db, upper) { - return Node::AlwaysFalse; + return ALWAYS_FALSE; } // We have an (arbitrary) ordering for typevars. If the upper and/or lower bounds are @@ -772,13 +874,8 @@ impl<'db> ConstrainedTypeVar<'db> { (typevar, lower) }; Node::new_constraint( - db, - ConstrainedTypeVar::new( - db, - typevar, - Type::TypeVar(bound), - Type::TypeVar(bound), - ), + builder, + Constraint::new(builder, typevar, Type::TypeVar(bound), Type::TypeVar(bound)), 1, ) } @@ -788,57 +885,59 @@ impl<'db> ConstrainedTypeVar<'db> { if typevar.can_be_bound_for(db, lower) && typevar.can_be_bound_for(db, upper) => { let lower = Node::new_constraint( - db, - ConstrainedTypeVar::new(db, lower, Type::Never, Type::TypeVar(typevar)), + builder, + Constraint::new(builder, lower, Type::Never, Type::TypeVar(typevar)), 1, ); let upper = Node::new_constraint( - db, - ConstrainedTypeVar::new(db, upper, Type::TypeVar(typevar), Type::object()), + builder, + Constraint::new(builder, upper, Type::TypeVar(typevar), Type::object()), 1, ); - lower.and(db, upper) + lower.and(builder, upper) } // L ≤ T ≤ U == ([L] ≤ T) && ([T] ≤ U) (Type::TypeVar(lower), _) if typevar.can_be_bound_for(db, lower) => { let lower = Node::new_constraint( - db, - ConstrainedTypeVar::new(db, lower, Type::Never, Type::TypeVar(typevar)), + builder, + Constraint::new(builder, lower, Type::Never, Type::TypeVar(typevar)), 1, ); let upper = if upper.is_object() { - Node::AlwaysTrue + ALWAYS_TRUE } else { - Self::new_node(db, typevar, Type::Never, upper) + Constraint::new_node(db, builder, typevar, Type::Never, upper) }; - lower.and(db, upper) + lower.and(builder, upper) } // L ≤ T ≤ U == (L ≤ [T]) && (T ≤ [U]) (_, Type::TypeVar(upper)) if typevar.can_be_bound_for(db, upper) => { let lower = if lower.is_never() { - Node::AlwaysTrue + ALWAYS_TRUE } else { - Self::new_node(db, typevar, lower, Type::object()) + Constraint::new_node(db, builder, typevar, lower, Type::object()) }; let upper = Node::new_constraint( - db, - ConstrainedTypeVar::new(db, upper, Type::TypeVar(typevar), Type::object()), + builder, + Constraint::new(builder, upper, Type::TypeVar(typevar), Type::object()), 1, ); - lower.and(db, upper) + lower.and(builder, upper) } - _ => Node::new_constraint(db, ConstrainedTypeVar::new(db, typevar, lower, upper), 1), + _ => Node::new_constraint(builder, Constraint::new(builder, typevar, lower, upper), 1), } } +} - fn when_true(self) -> ConstraintAssignment<'db> { +impl ConstraintId { + fn when_true(self) -> ConstraintAssignment { ConstraintAssignment::Positive(self) } - fn when_false(self) -> ConstraintAssignment<'db> { + fn when_false(self) -> ConstraintAssignment { ConstraintAssignment::Negative(self) } @@ -865,8 +964,8 @@ impl<'db> ConstrainedTypeVar<'db> { /// adjacent in the BDD structure. However, this proved to be counterproductive; we've found /// empirically that we get smaller BDDs with an ordering that is more aligned with source /// order. - fn ordering(self, _db: &'db dyn Db) -> impl Ord { - std::cmp::Reverse(self.as_id()) + fn ordering(self) -> impl Ord { + std::cmp::Reverse(self) } /// Returns whether this constraint implies another — i.e., whether every type that @@ -874,20 +973,35 @@ impl<'db> ConstrainedTypeVar<'db> { /// /// This is used to simplify how we display constraint sets, by removing redundant constraints /// from a clause. - fn implies(self, db: &'db dyn Db, other: Self) -> bool { - if !self.typevar(db).is_same_typevar_as(db, other.typevar(db)) { + fn implies<'db>( + self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + other: Self, + ) -> bool { + let self_constraint = builder.constraint_data(self); + let other_constraint = builder.constraint_data(other); + if !self_constraint + .typevar + .is_same_typevar_as(db, other_constraint.typevar) + { return false; } - other - .lower(db) - .is_constraint_set_assignable_to(db, self.lower(db)) - && self - .upper(db) - .is_constraint_set_assignable_to(db, other.upper(db)) + other_constraint + .lower + .is_constraint_set_assignable_to(db, self_constraint.lower) + && self_constraint + .upper + .is_constraint_set_assignable_to(db, other_constraint.upper) } /// Returns the intersection of two range constraints, or `None` if the intersection is empty. - fn intersect(self, db: &'db dyn Db, other: Self) -> IntersectionResult<'db> { + fn intersect<'db>( + self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + other: Self, + ) -> IntersectionResult<'db> { /// TODO: For now, we treat some upper bounds as unsimplifiable if they become "too big". /// When intersecting constraints, the upper bounds are also intersected together. If the /// lhs and rhs upper bounds are unions of intersections (e.g. `(a & b) | (c & d)`), then @@ -896,23 +1010,26 @@ impl<'db> ConstrainedTypeVar<'db> { /// that would let us model this case more directly, but for now, we punt. const MAX_UPPER_BOUND_SIZE: usize = 4; - let self_upper = self.upper(db); - let other_upper = other.upper(db); - let estimated_upper_bound_size = self_upper + let self_constraint = builder.constraint_data(self); + let other_constraint = builder.constraint_data(other); + let estimated_upper_bound_size = self_constraint + .upper .union_size(db) - .saturating_mul(other_upper.union_size(db)) + .saturating_mul(other_constraint.upper.union_size(db)) .saturating_mul( - self_upper + self_constraint + .upper .intersection_size(db) - .saturating_add(other_upper.intersection_size(db)), + .saturating_add(other_constraint.upper.intersection_size(db)), ); if estimated_upper_bound_size >= MAX_UPPER_BOUND_SIZE { return IntersectionResult::CannotSimplify; } // (s₁ ≤ α ≤ t₁) ∧ (s₂ ≤ α ≤ t₂) = (s₁ ∪ s₂) ≤ α ≤ (t₁ ∩ t₂)) - let lower = UnionType::from_two_elements(db, self.lower(db), other.lower(db)); - let upper = IntersectionType::from_two_elements(db, self_upper, other_upper); + let lower = UnionType::from_two_elements(db, self_constraint.lower, other_constraint.lower); + let upper = + IntersectionType::from_two_elements(db, self_constraint.upper, other_constraint.upper); // If `lower ≰ upper`, then the intersection is empty, since there is no type that is both // greater than `lower`, and less than `upper`. @@ -926,29 +1043,46 @@ impl<'db> ConstrainedTypeVar<'db> { return IntersectionResult::CannotSimplify; } - IntersectionResult::Simplified(Self::new(db, self.typevar(db), lower, upper)) + IntersectionResult::Simplified(Constraint { + typevar: self_constraint.typevar, + lower, + upper, + }) } - pub(crate) fn display(self, db: &'db dyn Db) -> impl Display { - self.display_inner(db, false) + pub(crate) fn display<'db>( + self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + ) -> impl Display { + self.display_inner(db, builder, false) } - fn display_negated(self, db: &'db dyn Db) -> impl Display { - self.display_inner(db, true) + fn display_negated<'db>( + self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + ) -> impl Display { + self.display_inner(db, builder, true) } - fn display_inner(self, db: &'db dyn Db, negated: bool) -> impl Display { + fn display_inner<'db>( + self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + negated: bool, + ) -> impl Display { struct DisplayConstrainedTypeVar<'db> { - constraint: ConstrainedTypeVar<'db>, + constraint: Constraint<'db>, negated: bool, db: &'db dyn Db, } impl Display for DisplayConstrainedTypeVar<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let lower = self.constraint.lower(self.db); - let upper = self.constraint.upper(self.db); - let typevar = self.constraint.typevar(self.db); + let lower = self.constraint.lower; + let upper = self.constraint.upper; + let typevar = self.constraint.typevar; if lower.is_equivalent_to(self.db, upper) { // If this typevar is equivalent to another, output the constraint in a // consistent alphabetical order, regardless of the salsa ordering that we are @@ -1004,20 +1138,22 @@ impl<'db> ConstrainedTypeVar<'db> { } DisplayConstrainedTypeVar { - constraint: self, + constraint: builder.constraint_data(self), negated, db, } } } -/// A BDD node. +/// The index of a BDD node within a [`ConstraintSetStorage`]. /// /// The "variables" of a constraint set BDD are individual constraints, represented by an interned -/// [`ConstrainedTypeVar`]. +/// [`Constraint`]. /// -/// Terminal nodes (`false` and `true`) have their own dedicated enum variants. The -/// [`Interior`][InteriorNode] variant represents interior nodes. +/// Terminal nodes (`false` and `true`) have hard-coded index values. Interior nodes are stored in +/// a [`ConstraintSetStorage`], and are represented by the index into the storage array. By +/// construction, interior nodes can only refer to nodes with smaller indexes (since the nodes that +/// outgoing edges point at must already exist). /// /// BDD nodes are _quasi-reduced_, which means that there are no duplicate nodes (which we handle /// via Salsa interning). Unlike the typical BDD representation, which is (fully) reduced, we do @@ -1025,7 +1161,7 @@ impl<'db> ConstrainedTypeVar<'db> { /// means that our BDDs "remember" all of the individual constraints that they were created with. /// /// BDD nodes are also _ordered_, meaning that every path from the root of a BDD to a terminal node -/// visits variables in the same order. [`ConstrainedTypeVar::ordering`] defines the variable +/// visits variables in the same order. [`ConstraintId::ordering`] defines the variable /// ordering that we use for constraint set BDDs. /// /// In addition to this BDD variable ordering, we also track a `source_order` for each individual @@ -1035,252 +1171,344 @@ impl<'db> ConstrainedTypeVar<'db> { /// cannot use this ordering as our BDD variable ordering, since we calculate it from already /// constructed BDDs, and we need the BDD variable ordering to be fixed and available before /// construction starts.) -#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, get_size2::GetSize, salsa::Update)] -enum Node<'db> { - AlwaysFalse, +#[derive(Clone, Copy, Eq, Hash, PartialEq, get_size2::GetSize, salsa::Update)] +struct NodeId(u32); + +/// A special ID that is used for an "always true" / "always visible" constraint. +const ALWAYS_TRUE: NodeId = NodeId(0xffff_ffff); + +/// A special ID that is used for an "always false" / "never visible" constraint. +const ALWAYS_FALSE: NodeId = NodeId(0xffff_fffe); + +const SMALLEST_TERMINAL: NodeId = ALWAYS_FALSE; + +enum Node { AlwaysTrue, - Interior(InteriorNode<'db>), + AlwaysFalse, + Interior(InteriorNode), } -impl<'db> Node<'db> { +impl Node { /// Creates a new BDD node, ensuring that it is quasi-reduced. + #[expect(clippy::new_ret_no_self)] fn new( - db: &'db dyn Db, - constraint: ConstrainedTypeVar<'db>, - if_true: Node<'db>, - if_false: Node<'db>, + builder: &ConstraintSetBuilder<'_>, + constraint: ConstraintId, + if_true: NodeId, + if_false: NodeId, source_order: usize, - ) -> Self { - debug_assert!((if_true.root_constraint(db)).is_none_or(|root_constraint| { - root_constraint.ordering(db) > constraint.ordering(db) - })); + ) -> NodeId { debug_assert!( - (if_false.root_constraint(db)).is_none_or(|root_constraint| { - root_constraint.ordering(db) > constraint.ordering(db) - }) + if_true + .root_constraint(builder) + .is_none_or(|root_constraint| { + root_constraint.ordering() > constraint.ordering() + }) ); - if if_true == Node::AlwaysFalse && if_false == Node::AlwaysFalse { - return Node::AlwaysFalse; + debug_assert!( + if_false + .root_constraint(builder) + .is_none_or(|root_constraint| { + root_constraint.ordering() > constraint.ordering() + }) + ); + if if_true == ALWAYS_FALSE && if_false == ALWAYS_FALSE { + return ALWAYS_FALSE; } let max_source_order = source_order - .max(if_true.max_source_order(db)) - .max(if_false.max_source_order(db)); - Self::Interior(InteriorNode::new( - db, + .max(if_true.max_source_order(builder)) + .max(if_false.max_source_order(builder)); + builder.intern_interior_node(InteriorNodeData { constraint, if_true, if_false, source_order, max_source_order, - )) + }) } /// Creates a new BDD node for an individual constraint. (The BDD will evaluate to `true` when /// the constraint holds, and to `false` when it does not.) fn new_constraint( - db: &'db dyn Db, - constraint: ConstrainedTypeVar<'db>, + builder: &ConstraintSetBuilder<'_>, + constraint: ConstraintId, source_order: usize, - ) -> Self { - Self::Interior(InteriorNode::new( - db, + ) -> NodeId { + builder.intern_interior_node(InteriorNodeData { constraint, - Node::AlwaysTrue, - Node::AlwaysFalse, - source_order, + if_true: ALWAYS_TRUE, + if_false: ALWAYS_FALSE, source_order, - )) + max_source_order: source_order, + }) } /// Creates a new BDD node for a positive or negative individual constraint. (For a positive /// constraint, this returns the same BDD node as [`new_constraint`][Self::new_constraint]. For /// a negative constraint, it returns the negation of that BDD node.) fn new_satisfied_constraint( - db: &'db dyn Db, - constraint: ConstraintAssignment<'db>, + builder: &ConstraintSetBuilder<'_>, + constraint: ConstraintAssignment, source_order: usize, - ) -> Self { + ) -> NodeId { match constraint { - ConstraintAssignment::Positive(constraint) => Self::Interior(InteriorNode::new( - db, - constraint, - Node::AlwaysTrue, - Node::AlwaysFalse, - source_order, - source_order, - )), - ConstraintAssignment::Negative(constraint) => Self::Interior(InteriorNode::new( - db, - constraint, - Node::AlwaysFalse, - Node::AlwaysTrue, - source_order, - source_order, - )), + ConstraintAssignment::Positive(constraint) => { + builder.intern_interior_node(InteriorNodeData { + constraint, + if_true: ALWAYS_TRUE, + if_false: ALWAYS_FALSE, + source_order, + max_source_order: source_order, + }) + } + ConstraintAssignment::Negative(constraint) => { + builder.intern_interior_node(InteriorNodeData { + constraint, + if_true: ALWAYS_FALSE, + if_false: ALWAYS_TRUE, + source_order, + max_source_order: source_order, + }) + } + } + } +} + +impl NodeId { + fn node(self) -> Node { + match self { + ALWAYS_TRUE => Node::AlwaysTrue, + ALWAYS_FALSE => Node::AlwaysFalse, + _ => Node::Interior(InteriorNode(self)), } } + fn is_terminal(self) -> bool { + self.0 >= SMALLEST_TERMINAL.0 + } + /// Returns the BDD variable of the root node of this BDD, or `None` if this BDD is a terminal /// node. - fn root_constraint(self, db: &'db dyn Db) -> Option> { - match self { - Node::Interior(interior) => Some(interior.constraint(db)), - _ => None, + fn root_constraint(self, builder: &ConstraintSetBuilder<'_>) -> Option { + if self.is_terminal() { + return None; } + let interior = builder.interior_node_data(self); + Some(interior.constraint) } - fn max_source_order(self, db: &'db dyn Db) -> usize { - match self { - Node::Interior(interior) => interior.max_source_order(db), - Node::AlwaysTrue | Node::AlwaysFalse => 0, + fn max_source_order(self, builder: &ConstraintSetBuilder<'_>) -> usize { + if self.is_terminal() { + return 0; } + let interior = builder.interior_node_data(self); + interior.max_source_order } /// Returns a copy of this BDD node with all `source_order`s adjusted by the given amount. - fn with_adjusted_source_order(self, db: &'db dyn Db, delta: usize) -> Self { + fn with_adjusted_source_order(self, builder: &ConstraintSetBuilder<'_>, delta: usize) -> Self { if delta == 0 { return self; } - match self { - Node::AlwaysTrue => Node::AlwaysTrue, - Node::AlwaysFalse => Node::AlwaysFalse, - Node::Interior(interior) => Node::new( - db, - interior.constraint(db), - interior.if_true(db).with_adjusted_source_order(db, delta), - interior.if_false(db).with_adjusted_source_order(db, delta), - interior.source_order(db) + delta, - ), + match self.node() { + Node::AlwaysTrue | Node::AlwaysFalse => self, + Node::Interior(_) => { + let interior = builder.interior_node_data(self); + Node::new( + builder, + interior.constraint, + interior.if_true.with_adjusted_source_order(builder, delta), + interior.if_false.with_adjusted_source_order(builder, delta), + interior.source_order + delta, + ) + } } } - fn for_each_path(self, db: &'db dyn Db, mut f: impl FnMut(&PathAssignments<'db>)) { - match self { + fn for_each_path<'db>( + self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + mut f: impl FnMut(&PathAssignments), + ) { + match self.node() { Node::AlwaysTrue => {} Node::AlwaysFalse => {} Node::Interior(interior) => { - let mut path = interior.path_assignments(db); - self.for_each_path_inner(db, &mut f, &mut path); + let mut path = interior.path_assignments(builder); + self.for_each_path_inner(db, builder, &mut f, &mut path); } } } - fn for_each_path_inner( + fn for_each_path_inner<'db>( self, db: &'db dyn Db, - f: &mut dyn FnMut(&PathAssignments<'db>), - path: &mut PathAssignments<'db>, + builder: &ConstraintSetBuilder<'db>, + f: &mut dyn FnMut(&PathAssignments), + path: &mut PathAssignments, ) { - match self { + match self.node() { Node::AlwaysTrue => f(path), Node::AlwaysFalse => {} - Node::Interior(interior) => { - let constraint = interior.constraint(db); - let source_order = interior.source_order(db); - path.walk_edge(db, constraint.when_true(), source_order, |path, _| { - interior.if_true(db).for_each_path_inner(db, f, path); - }); - path.walk_edge(db, constraint.when_false(), source_order, |path, _| { - interior.if_false(db).for_each_path_inner(db, f, path); - }); + Node::Interior(_) => { + let interior = builder.interior_node_data(self); + path.walk_edge( + db, + builder, + interior.constraint.when_true(), + interior.source_order, + |path, _| interior.if_true.for_each_path_inner(db, builder, f, path), + ); + path.walk_edge( + db, + builder, + interior.constraint.when_false(), + interior.source_order, + |path, _| interior.if_false.for_each_path_inner(db, builder, f, path), + ); } } } /// Returns whether this BDD represent the constant function `true`. - fn is_always_satisfied(self, db: &'db dyn Db) -> bool { - match self { + fn is_always_satisfied<'db>( + self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + ) -> bool { + match self.node() { Node::AlwaysTrue => true, Node::AlwaysFalse => false, Node::Interior(interior) => { - let mut path = interior.path_assignments(db); - self.is_always_satisfied_inner(db, &mut path) + let mut path = interior.path_assignments(builder); + self.is_always_satisfied_inner(db, builder, &mut path) } } } - fn is_always_satisfied_inner(self, db: &'db dyn Db, path: &mut PathAssignments<'db>) -> bool { - match self { + fn is_always_satisfied_inner<'db>( + self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + path: &mut PathAssignments, + ) -> bool { + match self.node() { Node::AlwaysTrue => true, Node::AlwaysFalse => false, - Node::Interior(interior) => { + Node::Interior(_) => { // walk_edge will return None if this node's constraint (or anything we can derive // from it) causes the if_true edge to become impossible. We want to ignore // impossible paths, and so we treat them as passing the "always satisfied" check. - let constraint = interior.constraint(db); - let source_order = interior.source_order(db); + let interior = builder.interior_node_data(self); let true_always_satisfied = path - .walk_edge(db, constraint.when_true(), source_order, |path, _| { - interior.if_true(db).is_always_satisfied_inner(db, path) - }) + .walk_edge( + db, + builder, + interior.constraint.when_true(), + interior.source_order, + |path, _| { + interior + .if_true + .is_always_satisfied_inner(db, builder, path) + }, + ) .unwrap_or(true); if !true_always_satisfied { return false; } // Ditto for the if_false branch - path.walk_edge(db, constraint.when_false(), source_order, |path, _| { - interior.if_false(db).is_always_satisfied_inner(db, path) - }) + path.walk_edge( + db, + builder, + interior.constraint.when_false(), + interior.source_order, + |path, _| { + interior + .if_false + .is_always_satisfied_inner(db, builder, path) + }, + ) .unwrap_or(true) } } } /// Returns whether this BDD represent the constant function `false`. - fn is_never_satisfied(self, db: &'db dyn Db) -> bool { - match self { + fn is_never_satisfied<'db>(self, db: &'db dyn Db, builder: &ConstraintSetBuilder<'db>) -> bool { + match self.node() { Node::AlwaysTrue => false, Node::AlwaysFalse => true, Node::Interior(interior) => { - let mut path = interior.path_assignments(db); - self.is_never_satisfied_inner(db, &mut path) + let mut path = interior.path_assignments(builder); + self.is_never_satisfied_inner(db, builder, &mut path) } } } - fn is_never_satisfied_inner(self, db: &'db dyn Db, path: &mut PathAssignments<'db>) -> bool { - match self { + fn is_never_satisfied_inner<'db>( + self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + path: &mut PathAssignments, + ) -> bool { + match self.node() { Node::AlwaysTrue => false, Node::AlwaysFalse => true, - Node::Interior(interior) => { + Node::Interior(_) => { // walk_edge will return None if this node's constraint (or anything we can derive // from it) causes the if_true edge to become impossible. We want to ignore // impossible paths, and so we treat them as passing the "never satisfied" check. - let constraint = interior.constraint(db); - let source_order = interior.source_order(db); + let interior = builder.interior_node_data(self); let true_never_satisfied = path - .walk_edge(db, constraint.when_true(), source_order, |path, _| { - interior.if_true(db).is_never_satisfied_inner(db, path) - }) + .walk_edge( + db, + builder, + interior.constraint.when_true(), + interior.source_order, + |path, _| interior.if_true.is_never_satisfied_inner(db, builder, path), + ) .unwrap_or(true); if !true_never_satisfied { return false; } // Ditto for the if_false branch - path.walk_edge(db, constraint.when_false(), source_order, |path, _| { - interior.if_false(db).is_never_satisfied_inner(db, path) - }) + path.walk_edge( + db, + builder, + interior.constraint.when_false(), + interior.source_order, + |path, _| { + interior + .if_false + .is_never_satisfied_inner(db, builder, path) + }, + ) .unwrap_or(true) } } } - fn solutions(self, db: &'db dyn Db) -> Solutions<'db> { - match self { + fn solutions<'db, 'c>( + self, + db: &'db dyn Db, + builder: &'c ConstraintSetBuilder<'db>, + ) -> Solutions<'db, 'c> { + match self.node() { Node::AlwaysTrue => Solutions::Unconstrained, Node::AlwaysFalse => Solutions::Unsatisfiable, - Node::Interior(interior) => interior.solutions(db), + Node::Interior(interior) => interior.solutions(db, builder), } } /// Returns the negation of this BDD. - fn negate(self, db: &'db dyn Db) -> Self { - match self { - Node::AlwaysTrue => Node::AlwaysFalse, - Node::AlwaysFalse => Node::AlwaysTrue, - Node::Interior(interior) => interior.negate(db), + fn negate(self, builder: &ConstraintSetBuilder<'_>) -> Self { + match self.node() { + Node::AlwaysTrue => ALWAYS_FALSE, + Node::AlwaysFalse => ALWAYS_TRUE, + Node::Interior(interior) => interior.negate(builder), } } @@ -1288,7 +1516,7 @@ impl<'db> Node<'db> { /// /// In the result, `self` will appear before `other` according to the `source_order` of the BDD /// nodes. - fn or_with_offset(self, db: &'db dyn Db, other: Self) -> Self { + fn or_with_offset(self, builder: &ConstraintSetBuilder<'_>, other: Self) -> Self { // To ensure that `self` appears before `other` in `source_order`, we add the maximum // `source_order` of the lhs to all of the `source_order`s in the rhs. // @@ -1296,35 +1524,46 @@ impl<'db> Node<'db> { // avoid all of the extra work in the calls to with_adjusted_source_order, and apply the // adjustment lazily when walking a BDD tree. (ditto below in the other _with_offset // methods) - let other_offset = self.max_source_order(db); - self.or_inner(db, other, other_offset) + let other_offset = self.max_source_order(builder); + self.or_inner(builder, other, other_offset) } - fn or(self, db: &'db dyn Db, other: Self) -> Self { - self.or_inner(db, other, 0) + fn or(self, builder: &ConstraintSetBuilder<'_>, other: Self) -> Self { + self.or_inner(builder, other, 0) } - fn or_inner(self, db: &'db dyn Db, other: Self, other_offset: usize) -> Self { - match (self, other) { - (Node::AlwaysTrue, Node::AlwaysTrue) => Node::AlwaysTrue, - (Node::AlwaysTrue, Node::Interior(other_interior)) => Node::new( - db, - other_interior.constraint(db), - Node::AlwaysTrue, - Node::AlwaysTrue, - other_interior.source_order(db) + other_offset, - ), - (Node::Interior(self_interior), Node::AlwaysTrue) => Node::new( - db, - self_interior.constraint(db), - Node::AlwaysTrue, - Node::AlwaysTrue, - self_interior.source_order(db), - ), - (Node::AlwaysFalse, _) => other.with_adjusted_source_order(db, other_offset), + fn or_inner( + self, + builder: &ConstraintSetBuilder<'_>, + other: Self, + other_offset: usize, + ) -> Self { + match (self.node(), other.node()) { + (Node::AlwaysTrue, Node::AlwaysTrue) => ALWAYS_TRUE, + (Node::AlwaysTrue, Node::Interior(_)) => { + let other_interior = builder.interior_node_data(other); + Node::new( + builder, + other_interior.constraint, + ALWAYS_TRUE, + ALWAYS_TRUE, + other_interior.source_order + other_offset, + ) + } + (Node::Interior(_), Node::AlwaysTrue) => { + let self_interior = builder.interior_node_data(self); + Node::new( + builder, + self_interior.constraint, + ALWAYS_TRUE, + ALWAYS_TRUE, + self_interior.source_order, + ) + } + (Node::AlwaysFalse, _) => other.with_adjusted_source_order(builder, other_offset), (_, Node::AlwaysFalse) => self, (Node::Interior(self_interior), Node::Interior(other_interior)) => { - self_interior.or(db, other_interior, other_offset) + self_interior.or(builder, other_interior, other_offset) } } } @@ -1347,12 +1586,13 @@ impl<'db> Node<'db> { /// that has no effect (`0 ∨ a = a`). It is returned if the iterator is empty. The "one" is the /// value that saturates (`1 ∨ a = 1`). We use this to short-circuit; if any element BDD or any /// intermediate result evaluates to "one", we can return early. - fn tree_fold( + fn tree_fold<'db>( db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, nodes: impl Iterator, zero: Self, - is_one: impl Fn(Self, &'db dyn Db) -> bool, - mut combine: impl FnMut(Self, &'db dyn Db, Self) -> Self, + is_one: impl Fn(Self, &'db dyn Db, &ConstraintSetBuilder<'db>) -> bool, + mut combine: impl FnMut(Self, &ConstraintSetBuilder<'db>, Self) -> Self, ) -> Self { // To implement the "linear" shape described above, we could collect the iterator elements // into a vector, and then use the fold at the bottom of this method to combine the @@ -1378,9 +1618,9 @@ impl<'db> Node<'db> { // // We use a SmallVec for the accumulator so that we don't have to spill over to the heap // until the iterator passes 256 elements. - let mut accumulator: SmallVec<[(Node<'db>, u8); 8]> = SmallVec::default(); + let mut accumulator: SmallVec<[(NodeId, u8); 8]> = SmallVec::default(); for node in nodes { - if is_one(node, db) { + if is_one(node, db, builder) { return node; } @@ -1390,8 +1630,8 @@ impl<'db> Node<'db> { .is_some_and(|(_, existing)| *existing == depth) { let (existing, _) = accumulator.pop().expect("accumulator should not be empty"); - node = combine(existing, db, node); - if is_one(node, db) { + node = combine(existing, builder, node); + if is_one(node, db, builder) { return node; } depth += 1; @@ -1404,24 +1644,34 @@ impl<'db> Node<'db> { // produce the overall result. accumulator .into_iter() - .fold(zero, |result, (node, _)| combine(result, db, node)) + .fold(zero, |result, (node, _)| combine(result, builder, node)) } - fn distributed_or(db: &'db dyn Db, nodes: impl Iterator>) -> Self { + fn distributed_or<'db>( + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + nodes: impl Iterator, + ) -> Self { Self::tree_fold( db, + builder, nodes, - Node::AlwaysFalse, + ALWAYS_FALSE, Self::is_always_satisfied, Self::or_with_offset, ) } - fn distributed_and(db: &'db dyn Db, nodes: impl Iterator>) -> Self { + fn distributed_and<'db>( + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + nodes: impl Iterator, + ) -> Self { Self::tree_fold( db, + builder, nodes, - Node::AlwaysTrue, + ALWAYS_TRUE, Self::is_never_satisfied, Self::and_with_offset, ) @@ -1431,45 +1681,56 @@ impl<'db> Node<'db> { /// /// In the result, `self` will appear before `other` according to the `source_order` of the BDD /// nodes. - fn and_with_offset(self, db: &'db dyn Db, other: Self) -> Self { + fn and_with_offset(self, builder: &ConstraintSetBuilder<'_>, other: Self) -> Self { // To ensure that `self` appears before `other` in `source_order`, we add the maximum // `source_order` of the lhs to all of the `source_order`s in the rhs. - let other_offset = self.max_source_order(db); - self.and_inner(db, other, other_offset) + let other_offset = self.max_source_order(builder); + self.and_inner(builder, other, other_offset) } - fn and(self, db: &'db dyn Db, other: Self) -> Self { - self.and_inner(db, other, 0) + fn and(self, builder: &ConstraintSetBuilder<'_>, other: Self) -> Self { + self.and_inner(builder, other, 0) } - fn and_inner(self, db: &'db dyn Db, other: Self, other_offset: usize) -> Self { - match (self, other) { - (Node::AlwaysFalse, Node::AlwaysFalse) => Node::AlwaysFalse, - (Node::AlwaysFalse, Node::Interior(other_interior)) => Node::new( - db, - other_interior.constraint(db), - Node::AlwaysFalse, - Node::AlwaysFalse, - other_interior.source_order(db) + other_offset, - ), - (Node::Interior(self_interior), Node::AlwaysFalse) => Node::new( - db, - self_interior.constraint(db), - Node::AlwaysFalse, - Node::AlwaysFalse, - self_interior.source_order(db), - ), - (Node::AlwaysTrue, _) => other.with_adjusted_source_order(db, other_offset), + fn and_inner( + self, + builder: &ConstraintSetBuilder<'_>, + other: Self, + other_offset: usize, + ) -> Self { + match (self.node(), other.node()) { + (Node::AlwaysFalse, Node::AlwaysFalse) => ALWAYS_FALSE, + (Node::AlwaysFalse, Node::Interior(_)) => { + let other_interior = builder.interior_node_data(other); + Node::new( + builder, + other_interior.constraint, + ALWAYS_FALSE, + ALWAYS_FALSE, + other_interior.source_order + other_offset, + ) + } + (Node::Interior(_), Node::AlwaysFalse) => { + let self_interior = builder.interior_node_data(self); + Node::new( + builder, + self_interior.constraint, + ALWAYS_FALSE, + ALWAYS_FALSE, + self_interior.source_order, + ) + } + (Node::AlwaysTrue, _) => other.with_adjusted_source_order(builder, other_offset), (_, Node::AlwaysTrue) => self, (Node::Interior(self_interior), Node::Interior(other_interior)) => { - self_interior.and(db, other_interior, other_offset) + self_interior.and(builder, other_interior, other_offset) } } } - fn implies(self, db: &'db dyn Db, other: Self) -> Self { + fn implies(self, builder: &ConstraintSetBuilder<'_>, other: Self) -> Self { // p → q == ¬p ∨ q - self.negate(db).or(db, other) + self.negate(builder).or(builder, other) } /// Returns a new BDD that evaluates to `true` when both input BDDs evaluate to the same @@ -1477,51 +1738,68 @@ impl<'db> Node<'db> { /// /// In the result, `self` will appear before `other` according to the `source_order` of the BDD /// nodes. - fn iff_with_offset(self, db: &'db dyn Db, other: Self) -> Self { + fn iff_with_offset(self, builder: &ConstraintSetBuilder<'_>, other: Self) -> Self { // To ensure that `self` appears before `other` in `source_order`, we add the maximum // `source_order` of the lhs to all of the `source_order`s in the rhs. - let other_offset = self.max_source_order(db); - self.iff_inner(db, other, other_offset) + let other_offset = self.max_source_order(builder); + self.iff_inner(builder, other, other_offset) } - fn iff(self, db: &'db dyn Db, other: Self) -> Self { - self.iff_inner(db, other, 0) + fn iff(self, builder: &ConstraintSetBuilder<'_>, other: Self) -> Self { + self.iff_inner(builder, other, 0) } - fn iff_inner(self, db: &'db dyn Db, other: Self, other_offset: usize) -> Self { - match (self, other) { + fn iff_inner( + self, + builder: &ConstraintSetBuilder<'_>, + other: Self, + other_offset: usize, + ) -> Self { + match (self.node(), other.node()) { (Node::AlwaysFalse, Node::AlwaysFalse) | (Node::AlwaysTrue, Node::AlwaysTrue) => { - Node::AlwaysTrue + ALWAYS_TRUE } (Node::AlwaysTrue, Node::AlwaysFalse) | (Node::AlwaysFalse, Node::AlwaysTrue) => { - Node::AlwaysFalse + ALWAYS_FALSE } - (Node::AlwaysTrue | Node::AlwaysFalse, Node::Interior(interior)) => Node::new( - db, - interior.constraint(db), - self.iff_inner(db, interior.if_true(db), other_offset), - self.iff_inner(db, interior.if_false(db), other_offset), - interior.source_order(db) + other_offset, - ), - (Node::Interior(interior), Node::AlwaysTrue | Node::AlwaysFalse) => Node::new( - db, - interior.constraint(db), - interior.if_true(db).iff_inner(db, other, other_offset), - interior.if_false(db).iff_inner(db, other, other_offset), - interior.source_order(db), - ), - (Node::Interior(a), Node::Interior(b)) => a.iff(db, b, other_offset), + (Node::AlwaysTrue | Node::AlwaysFalse, Node::Interior(_)) => { + let interior = builder.interior_node_data(other); + Node::new( + builder, + interior.constraint, + self.iff_inner(builder, interior.if_true, other_offset), + self.iff_inner(builder, interior.if_false, other_offset), + interior.source_order + other_offset, + ) + } + (Node::Interior(_), Node::AlwaysTrue | Node::AlwaysFalse) => { + let interior = builder.interior_node_data(self); + Node::new( + builder, + interior.constraint, + interior.if_true.iff_inner(builder, other, other_offset), + interior.if_false.iff_inner(builder, other, other_offset), + interior.source_order, + ) + } + (Node::Interior(a), Node::Interior(b)) => a.iff(builder, b, other_offset), } } /// Returns the `if-then-else` of three BDDs: when `self` evaluates to `true`, it returns what /// `then_node` evaluates to; otherwise it returns what `else_node` evaluates to. - fn ite(self, db: &'db dyn Db, then_node: Self, else_node: Self) -> Self { - self.and(db, then_node) - .or(db, self.negate(db).and(db, else_node)) + fn ite(self, builder: &ConstraintSetBuilder<'_>, then_node: Self, else_node: Self) -> Self { + self.and(builder, then_node) + .or(builder, self.negate(builder).and(builder, else_node)) } - fn implies_subtype_of(self, db: &'db dyn Db, lhs: Type<'db>, rhs: Type<'db>) -> Self { + fn implies_subtype_of<'db>( + self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + lhs: Type<'db>, + rhs: Type<'db>, + ) -> Self { // When checking subtyping involving a typevar, we can turn the subtyping check into a // constraint (i.e, "is `T` a subtype of `int` becomes the constraint `T ≤ int`), and then // check when the BDD implies that constraint. @@ -1531,14 +1809,16 @@ impl<'db> Node<'db> { // perform. So we have to take the appropriate materialization when translating the check // into a constraint. let constraint = match (lhs, rhs) { - (Type::TypeVar(bound_typevar), _) => ConstrainedTypeVar::new_node( + (Type::TypeVar(bound_typevar), _) => Constraint::new_node( db, + builder, bound_typevar, Type::Never, rhs.bottom_materialization(db), ), - (_, Type::TypeVar(bound_typevar)) => ConstrainedTypeVar::new_node( + (_, Type::TypeVar(bound_typevar)) => Constraint::new_node( db, + builder, bound_typevar, lhs.top_materialization(db), Type::object(), @@ -1546,44 +1826,50 @@ impl<'db> Node<'db> { _ => panic!("at least one type should be a typevar"), }; - self.implies(db, constraint) + self.implies(builder, constraint) } - fn satisfied_by_all_typevars( + fn satisfied_by_all_typevars<'db>( self, db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, inferable: InferableTypeVars<'_, 'db>, ) -> bool { - match self { + match self.node() { Node::AlwaysTrue => return true, Node::AlwaysFalse => return false, Node::Interior(_) => {} } let mut typevars = FxHashSet::default(); - self.for_each_constraint(db, &mut |constraint, _| { - typevars.insert(constraint.typevar(db)); + self.for_each_constraint(builder, &mut |constraint, _| { + let constraint = builder.constraint_data(constraint); + typevars.insert(constraint.typevar); }); // Returns if some specialization satisfies this constraint set. - let some_specialization_satisfies = move |specializations: Node<'db>| { - let when_satisfied = specializations.implies(db, self).and(db, specializations); - !when_satisfied.is_never_satisfied(db) + let some_specialization_satisfies = move |specializations: NodeId| { + let when_satisfied = specializations + .implies(builder, self) + .and(builder, specializations); + !when_satisfied.is_never_satisfied(db, builder) }; // Returns if all specializations satisfy this constraint set. - let all_specializations_satisfy = move |specializations: Node<'db>| { - let when_satisfied = specializations.implies(db, self).and(db, specializations); + let all_specializations_satisfy = move |specializations: NodeId| { + let when_satisfied = specializations + .implies(builder, self) + .and(builder, specializations); when_satisfied - .iff(db, specializations) - .is_always_satisfied(db) + .iff(builder, specializations) + .is_always_satisfied(db, builder) }; for typevar in typevars { if typevar.is_inferable(db, inferable) { // If the typevar is in inferable position, we need to verify that some valid // specialization satisfies the constraint set. - let valid_specializations = typevar.valid_specializations(db); + let valid_specializations = typevar.valid_specializations(db, builder); if !some_specialization_satisfies(valid_specializations) { return false; } @@ -1600,7 +1886,7 @@ impl<'db> Node<'db> { // constraint to refer to the synthetic typevar instead of the original gradual // constraint. let (static_specializations, gradual_constraints) = - typevar.required_specializations(db); + typevar.required_specializations(db, builder); if !all_specializations_satisfy(static_specializations) { return false; } @@ -1618,46 +1904,60 @@ impl<'db> Node<'db> { /// Returns a new BDD that is the _existential abstraction_ of `self` for a set of typevars. /// The result will return true whenever `self` returns true for _any_ assignment of those /// typevars. The result will not contain any constraints that mention those typevars. - fn exists( + fn exists<'db>( self, db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, bound_typevars: impl IntoIterator>, ) -> Self { bound_typevars .into_iter() .fold(self, |abstracted, bound_typevar| { - abstracted.exists_one(db, bound_typevar) + abstracted.exists_one(db, builder, bound_typevar) }) } - fn exists_one(self, db: &'db dyn Db, bound_typevar: BoundTypeVarIdentity<'db>) -> Self { - match self { - Node::AlwaysTrue => Node::AlwaysTrue, - Node::AlwaysFalse => Node::AlwaysFalse, - Node::Interior(interior) => interior.exists_one(db, bound_typevar), + fn exists_one<'db>( + self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + bound_typevar: BoundTypeVarIdentity<'db>, + ) -> Self { + match self.node() { + Node::AlwaysTrue => ALWAYS_TRUE, + Node::AlwaysFalse => ALWAYS_FALSE, + Node::Interior(interior) => interior.exists_one(db, builder, bound_typevar), } } /// Returns a new BDD that is the _existential abstraction_ of `self` for a set of typevars. /// All typevars _other_ than the one given will be removed and abstracted away. - fn retain_one(self, db: &'db dyn Db, bound_typevar: BoundTypeVarIdentity<'db>) -> Self { - match self { - Node::AlwaysTrue => Node::AlwaysTrue, - Node::AlwaysFalse => Node::AlwaysFalse, - Node::Interior(interior) => interior.retain_one(db, bound_typevar), + fn retain_one<'db>( + self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + bound_typevar: BoundTypeVarIdentity<'db>, + ) -> Self { + match self.node() { + Node::AlwaysTrue => ALWAYS_TRUE, + Node::AlwaysFalse => ALWAYS_FALSE, + Node::Interior(interior) => interior.retain_one(db, builder, bound_typevar), } } - fn abstract_one_inner( + fn abstract_one_inner<'db>( self, db: &'db dyn Db, - should_remove: &mut dyn FnMut(ConstrainedTypeVar<'db>) -> bool, - path: &mut PathAssignments<'db>, + builder: &ConstraintSetBuilder<'db>, + should_remove: &mut dyn FnMut(ConstraintId) -> bool, + path: &mut PathAssignments, ) -> Self { - match self { - Node::AlwaysTrue => Node::AlwaysTrue, - Node::AlwaysFalse => Node::AlwaysFalse, - Node::Interior(interior) => interior.abstract_one_inner(db, should_remove, path), + match self.node() { + Node::AlwaysTrue => ALWAYS_TRUE, + Node::AlwaysFalse => ALWAYS_FALSE, + Node::Interior(interior) => { + interior.abstract_one_inner(db, builder, should_remove, path) + } } } @@ -1673,23 +1973,25 @@ impl<'db> Node<'db> { /// /// If the abstracted BDD does not mention the typevar at all (i.e., it leaves the typevar /// completely unconstrained), we will invoke your callback once with `None`. - fn find_representative_types( + fn find_representative_types<'db>( self, db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, bound_typevar: BoundTypeVarIdentity<'db>, mut f: impl FnMut(Option<&[RepresentativeBounds<'db>]>), ) { - self.retain_one(db, bound_typevar) - .find_representative_types_inner(db, &mut Vec::default(), &mut f); + self.retain_one(db, builder, bound_typevar) + .find_representative_types_inner(db, builder, &mut Vec::default(), &mut f); } - fn find_representative_types_inner( + fn find_representative_types_inner<'db>( self, db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, current_bounds: &mut Vec>, f: &mut dyn FnMut(Option<&[RepresentativeBounds<'db>]>), ) { - match self { + match self.node() { Node::AlwaysTrue => { // If we reach the `true` terminal, the path we've been following represents one // representative type. @@ -1725,7 +2027,8 @@ impl<'db> Node<'db> { // invalid specialization, so we skip it. } - Node::Interior(interior) => { + Node::Interior(_) => { + let interior = builder.interior_node_data(self); let reset_point = current_bounds.len(); // For an interior node, there are two outgoing paths: one for the `if_true` @@ -1735,10 +2038,10 @@ impl<'db> Node<'db> { // on the types that satisfy the current path through the BDD. So we intersect the // current glb/lub with the constraint's bounds to get the new glb/lub for the // recursive call. - current_bounds.push(RepresentativeBounds::from_interior_node(db, interior)); + current_bounds.push(RepresentativeBounds::from_interior_node(builder, interior)); interior - .if_true(db) - .find_representative_types_inner(db, current_bounds, f); + .if_true + .find_representative_types_inner(db, builder, current_bounds, f); current_bounds.truncate(reset_point); // For the `if_false` branch, then the types that satisfy the current path through @@ -1752,8 +2055,8 @@ impl<'db> Node<'db> { // the path to incorporate that negative "hole" in the set of valid types for this // path. interior - .if_false(db) - .find_representative_types_inner(db, current_bounds, f); + .if_false + .find_representative_types_inner(db, builder, current_bounds, f); } } } @@ -1763,15 +2066,16 @@ impl<'db> Node<'db> { /// will not be present in the result.) /// /// Also returns whether _all_ of the restricted variables appeared in the BDD. - fn restrict( + fn restrict<'db>( self, db: &'db dyn Db, - assignment: impl IntoIterator>, + builder: &ConstraintSetBuilder<'db>, + assignment: impl IntoIterator, ) -> (Self, bool) { assignment .into_iter() .fold((self, true), |(restricted, found), assignment| { - let (restricted, found_this) = restricted.restrict_one(db, assignment); + let (restricted, found_this) = restricted.restrict_one(db, builder, assignment); (restricted, found && found_this) }) } @@ -1781,30 +2085,36 @@ impl<'db> Node<'db> { /// will not be present in the result.) /// /// Also returns whether the restricted variable appeared in the BDD. - fn restrict_one(self, db: &'db dyn Db, assignment: ConstraintAssignment<'db>) -> (Self, bool) { - match self { - Node::AlwaysTrue => (Node::AlwaysTrue, false), - Node::AlwaysFalse => (Node::AlwaysFalse, false), - Node::Interior(interior) => interior.restrict_one(db, assignment), + fn restrict_one<'db>( + self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + assignment: ConstraintAssignment, + ) -> (Self, bool) { + match self.node() { + Node::AlwaysTrue | Node::AlwaysFalse => (self, false), + Node::Interior(interior) => interior.restrict_one(db, builder, assignment), } } /// Returns a new BDD with any occurrence of `left ∧ right` replaced with `replacement`. - fn substitute_intersection( + #[expect(clippy::too_many_arguments)] + fn substitute_intersection<'db>( self, db: &'db dyn Db, - left: ConstraintAssignment<'db>, + builder: &ConstraintSetBuilder<'db>, + left: ConstraintAssignment, left_source_order: usize, - right: ConstraintAssignment<'db>, + right: ConstraintAssignment, right_source_order: usize, - replacement: Node<'db>, + replacement: NodeId, ) -> Self { // We perform a Shannon expansion to find out what the input BDD evaluates to when: // - left and right are both true // - left is false // - left is true and right is false // This covers the entire truth table of `left ∧ right`. - let (when_left_and_right, both_found) = self.restrict(db, [left, right]); + let (when_left_and_right, both_found) = self.restrict(db, builder, [left, right]); if !both_found { // If left and right are not both present in the input BDD, we should not even attempt // the substitution, since the Shannon expansion might introduce the missing variables! @@ -1812,8 +2122,8 @@ impl<'db> Node<'db> { // with the input. return self; } - let (when_not_left, _) = self.restrict(db, [left.negated()]); - let (when_left_but_not_right, _) = self.restrict(db, [left, right.negated()]); + let (when_not_left, _) = self.restrict(db, builder, [left.negated()]); + let (when_left_but_not_right, _) = self.restrict(db, builder, [left, right.negated()]); // The result should test `replacement`, and when it's true, it should produce the same // output that input would when `left ∧ right` is true. When replacement is false, it @@ -1830,18 +2140,18 @@ impl<'db> Node<'db> { // false // // (Note that the `else` branch shouldn't be reachable, but we have to provide something!) - let left_node = Node::new_satisfied_constraint(db, left, left_source_order); - let right_node = Node::new_satisfied_constraint(db, right, right_source_order); - let right_result = right_node.ite(db, Node::AlwaysFalse, when_left_but_not_right); - let left_result = left_node.ite(db, right_result, when_not_left); - let result = replacement.ite(db, when_left_and_right, left_result); + let left_node = Node::new_satisfied_constraint(builder, left, left_source_order); + let right_node = Node::new_satisfied_constraint(builder, right, right_source_order); + let right_result = right_node.ite(builder, ALWAYS_FALSE, when_left_but_not_right); + let left_result = left_node.ite(builder, right_result, when_not_left); + let result = replacement.ite(builder, when_left_and_right, left_result); // Lastly, verify that the result is consistent with the input. (It must produce the same // results when `left ∧ right`.) If it doesn't, the substitution isn't valid, and we should // return the original BDD unmodified. - let validity = replacement.iff(db, left_node.and(db, right_node)); - let constrained_original = self.and(db, validity); - let constrained_replacement = result.and(db, validity); + let validity = replacement.iff(builder, left_node.and(builder, right_node)); + let constrained_original = self.and(builder, validity); + let constrained_replacement = result.and(builder, validity); if constrained_original == constrained_replacement { result } else { @@ -1850,14 +2160,16 @@ impl<'db> Node<'db> { } /// Returns a new BDD with any occurrence of `left ∨ right` replaced with `replacement`. - fn substitute_union( + #[expect(clippy::too_many_arguments)] + fn substitute_union<'db>( self, db: &'db dyn Db, - left: ConstraintAssignment<'db>, + builder: &ConstraintSetBuilder<'db>, + left: ConstraintAssignment, left_source_order: usize, - right: ConstraintAssignment<'db>, + right: ConstraintAssignment, right_source_order: usize, - replacement: Node<'db>, + replacement: NodeId, ) -> Self { // We perform a Shannon expansion to find out what the input BDD evaluates to when: // - left and right are both true @@ -1865,7 +2177,7 @@ impl<'db> Node<'db> { // - left is false and right is true // - left and right are both false // This covers the entire truth table of `left ∨ right`. - let (when_l1_r1, both_found) = self.restrict(db, [left, right]); + let (when_l1_r1, both_found) = self.restrict(db, builder, [left, right]); if !both_found { // If left and right are not both present in the input BDD, we should not even attempt // the substitution, since the Shannon expansion might introduce the missing variables! @@ -1873,9 +2185,9 @@ impl<'db> Node<'db> { // with the input. return self; } - let (when_l0_r0, _) = self.restrict(db, [left.negated(), right.negated()]); - let (when_l1_r0, _) = self.restrict(db, [left, right.negated()]); - let (when_l0_r1, _) = self.restrict(db, [left.negated(), right]); + let (when_l0_r0, _) = self.restrict(db, builder, [left.negated(), right.negated()]); + let (when_l1_r0, _) = self.restrict(db, builder, [left, right.negated()]); + let (when_l0_r1, _) = self.restrict(db, builder, [left.negated(), right]); // The result should test `replacement`, and when it's true, it should produce the same // output that input would when `left ∨ right` is true. For OR, this is the union of what @@ -1888,19 +2200,19 @@ impl<'db> Node<'db> { // else // when_l0_r0 let result = replacement.ite( - db, - when_l1_r0.or(db, when_l0_r1.or(db, when_l1_r1)), + builder, + when_l1_r0.or(builder, when_l0_r1.or(builder, when_l1_r1)), when_l0_r0, ); // Lastly, verify that the result is consistent with the input. (It must produce the same // results when `left ∨ right`.) If it doesn't, the substitution isn't valid, and we should // return the original BDD unmodified. - let left_node = Node::new_satisfied_constraint(db, left, left_source_order); - let right_node = Node::new_satisfied_constraint(db, right, right_source_order); - let validity = replacement.iff(db, left_node.or(db, right_node)); - let constrained_original = self.and(db, validity); - let constrained_replacement = result.and(db, validity); + let left_node = Node::new_satisfied_constraint(builder, left, left_source_order); + let right_node = Node::new_satisfied_constraint(builder, right, right_source_order); + let validity = replacement.iff(builder, left_node.or(builder, right_node)); + let constrained_original = self.and(builder, validity); + let constrained_replacement = result.and(builder, validity); if constrained_original == constrained_replacement { result } else { @@ -1914,15 +2226,16 @@ impl<'db> Node<'db> { /// the constraint.) fn for_each_constraint( self, - db: &'db dyn Db, - f: &mut dyn FnMut(ConstrainedTypeVar<'db>, usize), + builder: &ConstraintSetBuilder<'_>, + f: &mut dyn FnMut(ConstraintId, usize), ) { - let Node::Interior(interior) = self else { + if self.is_terminal() { return; - }; - f(interior.constraint(db), interior.source_order(db)); - interior.if_true(db).for_each_constraint(db, f); - interior.if_false(db).for_each_constraint(db, f); + } + let interior = builder.interior_node_data(self); + f(interior.constraint, interior.source_order); + interior.if_true.for_each_constraint(builder, f); + interior.if_false.for_each_constraint(builder, f); } /// Simplifies a BDD, replacing constraints with simpler or smaller constraints where possible. @@ -1946,33 +2259,37 @@ impl<'db> Node<'db> { /// purposes). That means we have some tech debt here, since there is a lot of duplicate logic /// between `simplify_for_display` and `SequentMap`. It would be nice to update our display /// logic to use the sequent map as much as possible. But that can happen later. - fn simplify_for_display(self, db: &'db dyn Db) -> Self { - match self { + fn simplify_for_display<'db>( + self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + ) -> Self { + match self.node() { Node::AlwaysTrue | Node::AlwaysFalse => self, - Node::Interior(interior) => interior.simplify(db), + Node::Interior(interior) => interior.simplify(db, builder), } } /// Returns clauses describing all of the variable assignments that cause this BDD to evaluate /// to `true`. (This translates the boolean function that this BDD represents into DNF form.) - fn satisfied_clauses(self, db: &'db dyn Db) -> SatisfiedClauses<'db> { - struct Searcher<'db> { - clauses: SatisfiedClauses<'db>, - current_clause: SatisfiedClause<'db>, + fn satisfied_clauses(self, builder: &ConstraintSetBuilder<'_>) -> SatisfiedClauses { + struct Searcher { + clauses: SatisfiedClauses, + current_clause: SatisfiedClause, } - impl<'db> Searcher<'db> { - fn visit_node(&mut self, db: &'db dyn Db, node: Node<'db>) { - match node { + impl Searcher { + fn visit_node(&mut self, builder: &ConstraintSetBuilder<'_>, node: NodeId) { + match node.node() { Node::AlwaysFalse => {} Node::AlwaysTrue => self.clauses.push(self.current_clause.clone()), - Node::Interior(interior) => { - let interior_constraint = interior.constraint(db); - self.current_clause.push(interior_constraint.when_true()); - self.visit_node(db, interior.if_true(db)); + Node::Interior(_) => { + let interior = builder.interior_node_data(node); + self.current_clause.push(interior.constraint.when_true()); + self.visit_node(builder, interior.if_true); self.current_clause.pop(); - self.current_clause.push(interior_constraint.when_false()); - self.visit_node(db, interior.if_false(db)); + self.current_clause.push(interior.constraint.when_false()); + self.visit_node(builder, interior.if_false); self.current_clause.pop(); } } @@ -1983,36 +2300,41 @@ impl<'db> Node<'db> { clauses: SatisfiedClauses::default(), current_clause: SatisfiedClause::default(), }; - searcher.visit_node(db, self); + searcher.visit_node(builder, self); searcher.clauses } - fn display(self, db: &'db dyn Db) -> impl Display { + fn display<'db>(self, db: &'db dyn Db, builder: &ConstraintSetBuilder<'db>) -> impl Display { // To render a BDD in DNF form, you perform a depth-first search of the BDD tree, looking // for any path that leads to the AlwaysTrue terminal. Each such path represents one of the // intersection clauses in the DNF form. The path traverses zero or more interior nodes, // and takes either the true or false edge from each one. That gives you the positive or // negative individual constraints in the path's clause. - struct DisplayNode<'db> { - node: Node<'db>, + struct DisplayNode<'db, 'c> { + node: NodeId, db: &'db dyn Db, + builder: &'c ConstraintSetBuilder<'db>, } - impl Display for DisplayNode<'_> { + impl Display for DisplayNode<'_, '_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self.node { + match self.node.node() { Node::AlwaysTrue => f.write_str("always"), Node::AlwaysFalse => f.write_str("never"), Node::Interior(_) => { - let mut clauses = self.node.satisfied_clauses(self.db); - clauses.simplify(self.db); - Display::fmt(&clauses.display(self.db), f) + let mut clauses = self.node.satisfied_clauses(self.builder); + clauses.simplify(self.db, self.builder); + Display::fmt(&clauses.display(self.db, self.builder), f) } } } } - DisplayNode { node: self, db } + DisplayNode { + node: self, + db, + builder, + } } /// Displays the full graph structure of this BDD. `prefix` will be output before each line @@ -2033,42 +2355,51 @@ impl<'db> Node<'db> { /// │ └─₀ never /// └─₀ never /// ``` - fn display_graph(self, db: &'db dyn Db, prefix: &dyn Display) -> impl Display { + fn display_graph<'db, 'a>( + self, + db: &'db dyn Db, + builder: &'a ConstraintSetBuilder<'db>, + prefix: &'a dyn Display, + ) -> impl Display + 'a { struct DisplayNode<'a, 'db> { db: &'db dyn Db, - node: Node<'db>, + builder: &'a ConstraintSetBuilder<'db>, + node: NodeId, prefix: &'a dyn Display, - seen: RefCell>>, + seen: RefCell>, } fn format_node<'db>( db: &'db dyn Db, - node: Node<'db>, + builder: &ConstraintSetBuilder<'db>, + node: NodeId, prefix: &dyn Display, - seen: &RefCell>>, + seen: &RefCell>, f: &mut std::fmt::Formatter<'_>, ) -> std::fmt::Result { - match node { + match node.node() { Node::AlwaysTrue => write!(f, "always"), Node::AlwaysFalse => write!(f, "never"), - Node::Interior(interior) => { - let (index, is_new) = seen.borrow_mut().insert_full(interior); + Node::Interior(_) => { + let (index, is_new) = seen.borrow_mut().insert_full(node); if !is_new { return write!(f, "<{index}> SHARED"); } + let interior = builder.interior_node_data(node); write!( f, "<{index}> {} {}/{}", - interior.constraint(db).display(db), - interior.source_order(db), - interior.max_source_order(db), + interior.constraint.display(db, builder), + interior.source_order, + interior.max_source_order, )?; // Calling display_graph recursively here causes rustc to claim that the // expect(unused) up above is unfulfilled! write!(f, "\n{prefix}┡━₁ ",)?; format_node( db, - interior.if_true(db), + builder, + interior.if_true, &format_args!("{prefix}│ ",), seen, f, @@ -2076,7 +2407,8 @@ impl<'db> Node<'db> { write!(f, "\n{prefix}└─₀ ",)?; format_node( db, - interior.if_false(db), + builder, + interior.if_false, &format_args!("{prefix} ",), seen, f, @@ -2088,12 +2420,13 @@ impl<'db> Node<'db> { impl Display for DisplayNode<'_, '_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - format_node(self.db, self.node, self.prefix, &self.seen, f) + format_node(self.db, self.builder, self.node, self.prefix, &self.seen, f) } } DisplayNode { db, + builder, node: self, prefix, seen: RefCell::default(), @@ -2101,6 +2434,36 @@ impl<'db> Node<'db> { } } +impl Debug for NodeId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut f = f.debug_tuple("Node"); + match self.node() { + // We use format_args instead of rendering the strings directly so that we don't get + // any quotes in the output: ScopedReachabilityConstraintId(AlwaysTrue) instead of + // ScopedReachabilityConstraintId("AlwaysTrue"). + Node::AlwaysTrue => f.field(&format_args!("AlwaysTrue")), + Node::AlwaysFalse => f.field(&format_args!("AlwaysFalse")), + Node::Interior(_) => f.field(&self.0), + }; + f.finish() + } +} + +impl Idx for NodeId { + #[inline] + fn new(value: usize) -> Self { + assert!(value <= (SMALLEST_TERMINAL.0 as usize)); + #[expect(clippy::cast_possible_truncation)] + Self(value as u32) + } + + #[inline] + fn index(self) -> usize { + debug_assert!(!self.is_terminal()); + self.0 as usize + } +} + #[derive(Clone, Copy, Debug)] struct RepresentativeBounds<'db> { lower: Type<'db>, @@ -2109,25 +2472,26 @@ struct RepresentativeBounds<'db> { } impl<'db> RepresentativeBounds<'db> { - fn from_interior_node(db: &'db dyn Db, interior: InteriorNode<'db>) -> Self { - let constraint = interior.constraint(db); - let lower = constraint.lower(db); - let upper = constraint.upper(db); - let source_order = interior.source_order(db); + fn from_interior_node(builder: &ConstraintSetBuilder<'db>, interior: InteriorNodeData) -> Self { + let constraint = builder.constraint_data(interior.constraint); Self { - lower, - upper, - source_order, + lower: constraint.lower, + upper: constraint.upper, + source_order: interior.source_order, } } } +/// The index of an interior node within a [`ConstraintSetStorage`]. +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, get_size2::GetSize)] +struct InteriorNode(NodeId); + /// An interior node of a BDD -#[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)] -struct InteriorNode<'db> { - constraint: ConstrainedTypeVar<'db>, - if_true: Node<'db>, - if_false: Node<'db>, +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, get_size2::GetSize, salsa::Update)] +struct InteriorNodeData { + constraint: ConstraintId, + if_true: NodeId, + if_false: NodeId, /// Represents the order in which this node's constraint was added to the containing constraint /// set, relative to all of the other constraints in the set. This starts off at 1 for a simple @@ -2140,130 +2504,207 @@ struct InteriorNode<'db> { max_source_order: usize, } -// The Salsa heap is tracked separately. -impl get_size2::GetSize for InteriorNode<'_> {} +impl InteriorNode { + fn node(self) -> NodeId { + self.0 + } -#[salsa::tracked] -impl<'db> InteriorNode<'db> { - #[salsa::tracked(heap_size=ruff_memory_usage::heap_size)] - fn negate(self, db: &'db dyn Db) -> Node<'db> { - Node::new( - db, - self.constraint(db), - self.if_true(db).negate(db), - self.if_false(db).negate(db), - self.source_order(db), - ) + fn negate(self, builder: &ConstraintSetBuilder<'_>) -> NodeId { + let key = self.node(); + let storage = builder.storage.borrow(); + if let Some(result) = storage.negate_cache.get(&key) { + return *result; + } + drop(storage); + + let interior = builder.interior_node_data(self.node()); + let result = Node::new( + builder, + interior.constraint, + interior.if_true.negate(builder), + interior.if_false.negate(builder), + interior.source_order, + ); + + let mut storage = builder.storage.borrow_mut(); + storage.negate_cache.insert(key, result); + result } - #[salsa::tracked(heap_size=ruff_memory_usage::heap_size)] - fn or(self, db: &'db dyn Db, other: Self, other_offset: usize) -> Node<'db> { - let self_constraint = self.constraint(db); - let other_constraint = other.constraint(db); - match (self_constraint.ordering(db)).cmp(&other_constraint.ordering(db)) { + fn or(self, builder: &ConstraintSetBuilder<'_>, other: Self, other_offset: usize) -> NodeId { + let key = (self.node(), other.node(), other_offset); + let storage = builder.storage.borrow(); + if let Some(result) = storage.or_cache.get(&key) { + return *result; + } + drop(storage); + + let self_interior = builder.interior_node_data(self.node()); + let self_ordering = self_interior.constraint.ordering(); + let other_interior = builder.interior_node_data(other.node()); + let other_ordering = other_interior.constraint.ordering(); + let result = match self_ordering.cmp(&other_ordering) { Ordering::Equal => Node::new( - db, - self_constraint, - self.if_true(db) - .or_inner(db, other.if_true(db), other_offset), - self.if_false(db) - .or_inner(db, other.if_false(db), other_offset), - self.source_order(db), + builder, + self_interior.constraint, + self_interior + .if_true + .or_inner(builder, other_interior.if_true, other_offset), + self_interior + .if_false + .or_inner(builder, other_interior.if_false, other_offset), + self_interior.source_order, ), Ordering::Less => Node::new( - db, - self_constraint, - self.if_true(db) - .or_inner(db, Node::Interior(other), other_offset), - self.if_false(db) - .or_inner(db, Node::Interior(other), other_offset), - self.source_order(db), + builder, + self_interior.constraint, + self_interior + .if_true + .or_inner(builder, other.node(), other_offset), + self_interior + .if_false + .or_inner(builder, other.node(), other_offset), + self_interior.source_order, ), Ordering::Greater => Node::new( - db, - other_constraint, - Node::Interior(self).or_inner(db, other.if_true(db), other_offset), - Node::Interior(self).or_inner(db, other.if_false(db), other_offset), - other.source_order(db) + other_offset, + builder, + other_interior.constraint, + self.node() + .or_inner(builder, other_interior.if_true, other_offset), + self.node() + .or_inner(builder, other_interior.if_false, other_offset), + other_interior.source_order + other_offset, ), - } + }; + + let mut storage = builder.storage.borrow_mut(); + storage.or_cache.insert(key, result); + result } - #[salsa::tracked(heap_size=ruff_memory_usage::heap_size)] - fn and(self, db: &'db dyn Db, other: Self, other_offset: usize) -> Node<'db> { - let self_constraint = self.constraint(db); - let other_constraint = other.constraint(db); - match (self_constraint.ordering(db)).cmp(&other_constraint.ordering(db)) { + fn and(self, builder: &ConstraintSetBuilder<'_>, other: Self, other_offset: usize) -> NodeId { + let key = (self.node(), other.node(), other_offset); + let storage = builder.storage.borrow(); + if let Some(result) = storage.and_cache.get(&key) { + return *result; + } + drop(storage); + + let self_interior = builder.interior_node_data(self.node()); + let self_ordering = self_interior.constraint.ordering(); + let other_interior = builder.interior_node_data(other.node()); + let other_ordering = other_interior.constraint.ordering(); + let result = match self_ordering.cmp(&other_ordering) { Ordering::Equal => Node::new( - db, - self_constraint, - self.if_true(db) - .and_inner(db, other.if_true(db), other_offset), - self.if_false(db) - .and_inner(db, other.if_false(db), other_offset), - self.source_order(db), + builder, + self_interior.constraint, + self_interior + .if_true + .and_inner(builder, other_interior.if_true, other_offset), + self_interior + .if_false + .and_inner(builder, other_interior.if_false, other_offset), + self_interior.source_order, ), Ordering::Less => Node::new( - db, - self_constraint, - self.if_true(db) - .and_inner(db, Node::Interior(other), other_offset), - self.if_false(db) - .and_inner(db, Node::Interior(other), other_offset), - self.source_order(db), + builder, + self_interior.constraint, + self_interior + .if_true + .and_inner(builder, other.node(), other_offset), + self_interior + .if_false + .and_inner(builder, other.node(), other_offset), + self_interior.source_order, ), Ordering::Greater => Node::new( - db, - other_constraint, - Node::Interior(self).and_inner(db, other.if_true(db), other_offset), - Node::Interior(self).and_inner(db, other.if_false(db), other_offset), - other.source_order(db) + other_offset, + builder, + other_interior.constraint, + self.node() + .and_inner(builder, other_interior.if_true, other_offset), + self.node() + .and_inner(builder, other_interior.if_false, other_offset), + other_interior.source_order + other_offset, ), - } + }; + + let mut storage = builder.storage.borrow_mut(); + storage.and_cache.insert(key, result); + result } - #[salsa::tracked(heap_size=ruff_memory_usage::heap_size)] - fn iff(self, db: &'db dyn Db, other: Self, other_offset: usize) -> Node<'db> { - let self_constraint = self.constraint(db); - let other_constraint = other.constraint(db); - match (self_constraint.ordering(db)).cmp(&other_constraint.ordering(db)) { + fn iff(self, builder: &ConstraintSetBuilder<'_>, other: Self, other_offset: usize) -> NodeId { + let key = (self.node(), other.node(), other_offset); + let storage = builder.storage.borrow(); + if let Some(result) = storage.iff_cache.get(&key) { + return *result; + } + drop(storage); + + let self_interior = builder.interior_node_data(self.node()); + let self_ordering = self_interior.constraint.ordering(); + let other_interior = builder.interior_node_data(other.node()); + let other_ordering = other_interior.constraint.ordering(); + let result = match self_ordering.cmp(&other_ordering) { Ordering::Equal => Node::new( - db, - self_constraint, - self.if_true(db) - .iff_inner(db, other.if_true(db), other_offset), - self.if_false(db) - .iff_inner(db, other.if_false(db), other_offset), - self.source_order(db), + builder, + self_interior.constraint, + self_interior + .if_true + .iff_inner(builder, other_interior.if_true, other_offset), + self_interior + .if_false + .iff_inner(builder, other_interior.if_false, other_offset), + self_interior.source_order, ), Ordering::Less => Node::new( - db, - self_constraint, - self.if_true(db) - .iff_inner(db, Node::Interior(other), other_offset), - self.if_false(db) - .iff_inner(db, Node::Interior(other), other_offset), - self.source_order(db), + builder, + self_interior.constraint, + self_interior + .if_true + .iff_inner(builder, other.node(), other_offset), + self_interior + .if_false + .iff_inner(builder, other.node(), other_offset), + self_interior.source_order, ), Ordering::Greater => Node::new( - db, - other_constraint, - Node::Interior(self).iff_inner(db, other.if_true(db), other_offset), - Node::Interior(self).iff_inner(db, other.if_false(db), other_offset), - other.source_order(db) + other_offset, + builder, + other_interior.constraint, + self.node() + .iff_inner(builder, other_interior.if_true, other_offset), + self.node() + .iff_inner(builder, other_interior.if_false, other_offset), + other_interior.source_order + other_offset, ), - } + }; + + let mut storage = builder.storage.borrow_mut(); + storage.iff_cache.insert(key, result); + result } - #[salsa::tracked(heap_size=ruff_memory_usage::heap_size)] - fn exists_one(self, db: &'db dyn Db, bound_typevar: BoundTypeVarIdentity<'db>) -> Node<'db> { - let mut path = self.path_assignments(db); + fn exists_one<'db>( + self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + bound_typevar: BoundTypeVarIdentity<'db>, + ) -> NodeId { + let key = (self.node(), bound_typevar); + let storage = builder.storage.borrow(); + if let Some(result) = storage.exists_one_cache.get(&key) { + return *result; + } + drop(storage); + + let mut path = self.path_assignments(builder); let mentions_typevar = |ty: Type<'db>| match ty { Type::TypeVar(haystack) => haystack.identity(db) == bound_typevar, _ => false, }; - self.abstract_one_inner( + let result = self.abstract_one_inner( db, + builder, // Remove any node that constrains `bound_typevar`, or that has a lower/upper bound // that mentions `bound_typevar`. // TODO: This will currently remove constraints that mention a typevar, but the sequent @@ -2272,52 +2713,74 @@ impl<'db> InteriorNode<'db> { // But that requires `T ≤ int ∧ U ≤ Sequence[T] → U ≤ Sequence[int]` to exist in the // sequent map. It doesn't, and so we currently produce `U ≤ Unknown` in this case. &mut |constraint| { - if constraint.typevar(db).identity(db) == bound_typevar { + let constraint = builder.constraint_data(constraint); + if constraint.typevar.identity(db) == bound_typevar { return true; } - if any_over_type(db, constraint.lower(db), false, mentions_typevar) { + if any_over_type(db, constraint.lower, false, mentions_typevar) { return true; } - if any_over_type(db, constraint.upper(db), false, mentions_typevar) { + if any_over_type(db, constraint.upper, false, mentions_typevar) { return true; } false }, &mut path, - ) + ); + + let mut storage = builder.storage.borrow_mut(); + storage.exists_one_cache.insert(key, result); + result } - #[salsa::tracked(heap_size=ruff_memory_usage::heap_size)] - fn retain_one(self, db: &'db dyn Db, bound_typevar: BoundTypeVarIdentity<'db>) -> Node<'db> { - let mut path = self.path_assignments(db); - self.abstract_one_inner( + fn retain_one<'db>( + self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + bound_typevar: BoundTypeVarIdentity<'db>, + ) -> NodeId { + let key = (self.node(), bound_typevar); + let storage = builder.storage.borrow(); + if let Some(result) = storage.retain_one_cache.get(&key) { + return *result; + } + drop(storage); + + let mut path = self.path_assignments(builder); + let result = self.abstract_one_inner( db, + builder, // Remove any node that constrains some other typevar than `bound_typevar`, and any // node that constrains `bound_typevar` with a lower/upper bound of some other typevar. // (For the latter, if there are any derived facts that we can infer from the typevar // bound, those will be automatically added to the result.) &mut |constraint| { - if constraint.typevar(db).identity(db) != bound_typevar { + let constraint = builder.constraint_data(constraint); + if constraint.typevar.identity(db) != bound_typevar { return true; } - if constraint.lower(db).has_typevar(db) || constraint.upper(db).has_typevar(db) { + if constraint.lower.has_typevar(db) || constraint.upper.has_typevar(db) { return true; } false }, &mut path, - ) + ); + + let mut storage = builder.storage.borrow_mut(); + storage.retain_one_cache.insert(key, result); + result } - fn abstract_one_inner( + fn abstract_one_inner<'db>( self, db: &'db dyn Db, - should_remove: &mut dyn FnMut(ConstrainedTypeVar<'db>) -> bool, - path: &mut PathAssignments<'db>, - ) -> Node<'db> { - let self_constraint = self.constraint(db); - let self_source_order = self.source_order(db); - if should_remove(self_constraint) { + builder: &ConstraintSetBuilder<'db>, + should_remove: &mut dyn FnMut(ConstraintId) -> bool, + path: &mut PathAssignments, + ) -> NodeId { + let self_interior = builder.interior_node_data(self.node()); + if should_remove(self_interior.constraint) { // If we should remove constraints involving this typevar, then we replace this node // with the OR of its if_false/if_true edges. That is, the result is true if there's // any assignment of this node's constraint that is true. @@ -2330,14 +2793,19 @@ impl<'db> InteriorNode<'db> { // TODO: This might not be stable enough, if we add more than one derived fact for this // constraint. If we still see inconsistent test output, we might need a more complex // way of tracking source order for derived facts. - let self_source_order = self.source_order(db); let if_true = path .walk_edge( db, - self_constraint.when_true(), - self_source_order, + builder, + self_interior.constraint.when_true(), + self_interior.source_order, |path, new_range| { - let branch = self.if_true(db).abstract_one_inner(db, should_remove, path); + let branch = self_interior.if_true.abstract_one_inner( + db, + builder, + should_remove, + path, + ); path.assignments[new_range] .iter() .filter(|(assignment, _)| { @@ -2347,22 +2815,30 @@ impl<'db> InteriorNode<'db> { }) .fold(branch, |branch, (assignment, source_order)| { branch.and( - db, - Node::new_satisfied_constraint(db, *assignment, *source_order), + builder, + Node::new_satisfied_constraint( + builder, + *assignment, + *source_order, + ), ) }) }, ) - .unwrap_or(Node::AlwaysFalse); + .unwrap_or(ALWAYS_FALSE); let if_false = path .walk_edge( db, - self_constraint.when_false(), - self_source_order, + builder, + self_interior.constraint.when_false(), + self_interior.source_order, |path, new_range| { - let branch = self - .if_false(db) - .abstract_one_inner(db, should_remove, path); + let branch = self_interior.if_false.abstract_one_inner( + db, + builder, + should_remove, + path, + ); path.assignments[new_range] .iter() .filter(|(assignment, _)| { @@ -2372,80 +2848,113 @@ impl<'db> InteriorNode<'db> { }) .fold(branch, |branch, (assignment, source_order)| { branch.and( - db, - Node::new_satisfied_constraint(db, *assignment, *source_order), + builder, + Node::new_satisfied_constraint( + builder, + *assignment, + *source_order, + ), ) }) }, ) - .unwrap_or(Node::AlwaysFalse); - if_true.or(db, if_false) + .unwrap_or(ALWAYS_FALSE); + if_true.or(builder, if_false) } else { // Otherwise, we abstract the if_false/if_true edges recursively. let if_true = path .walk_edge( db, - self_constraint.when_true(), - self_source_order, - |path, _| self.if_true(db).abstract_one_inner(db, should_remove, path), + builder, + self_interior.constraint.when_true(), + self_interior.source_order, + |path, _| { + self_interior + .if_true + .abstract_one_inner(db, builder, should_remove, path) + }, ) - .unwrap_or(Node::AlwaysFalse); + .unwrap_or(ALWAYS_FALSE); let if_false = path .walk_edge( db, - self_constraint.when_false(), - self_source_order, + builder, + self_interior.constraint.when_false(), + self_interior.source_order, |path, _| { - self.if_false(db) - .abstract_one_inner(db, should_remove, path) + self_interior + .if_false + .abstract_one_inner(db, builder, should_remove, path) }, ) - .unwrap_or(Node::AlwaysFalse); + .unwrap_or(ALWAYS_FALSE); // NB: We cannot use `Node::new` here, because the recursive calls might introduce new // derived constraints into the result, and those constraints might appear before this // one in the BDD ordering. - Node::new_constraint(db, self_constraint, self.source_order(db)) - .ite(db, if_true, if_false) + Node::new_constraint( + builder, + self_interior.constraint, + self_interior.source_order, + ) + .ite(builder, if_true, if_false) } } - #[salsa::tracked(heap_size=ruff_memory_usage::heap_size)] - fn restrict_one( + fn restrict_one<'db>( self, db: &'db dyn Db, - assignment: ConstraintAssignment<'db>, - ) -> (Node<'db>, bool) { - // If this node's variable is larger than the assignment's variable, then we have reached a - // point in the BDD where the assignment can no longer affect the result, - // and we can return early. - let self_constraint = self.constraint(db); - if assignment.constraint().ordering(db) < self_constraint.ordering(db) { - return (Node::Interior(self), false); - } - - // Otherwise, check if this node's variable is in the assignment. If so, substitute the - // variable by replacing this node with its if_false/if_true edge, accordingly. - if assignment == self_constraint.when_true() { - (self.if_true(db), true) - } else if assignment == self_constraint.when_false() { - (self.if_false(db), true) - } else { - let (if_true, found_in_true) = self.if_true(db).restrict_one(db, assignment); - let (if_false, found_in_false) = self.if_false(db).restrict_one(db, assignment); - ( - Node::new( - db, - self_constraint, - if_true, - if_false, - self.source_order(db), - ), - found_in_true || found_in_false, - ) + builder: &ConstraintSetBuilder<'db>, + assignment: ConstraintAssignment, + ) -> (NodeId, bool) { + let key = (self.node(), assignment); + let storage = builder.storage.borrow(); + if let Some(result) = storage.restrict_one_cache.get(&key) { + return *result; } + drop(storage); + + let self_interior = builder.interior_node_data(self.node()); + let self_ordering = self_interior.constraint.ordering(); + let result = if assignment.constraint().ordering() < self_ordering { + // If this node's variable is larger than the assignment's variable, then we have reached a + // point in the BDD where the assignment can no longer affect the result, + // and we can return early. + (self.node(), false) + } else { + // Otherwise, check if this node's variable is in the assignment. If so, substitute the + // variable by replacing this node with its if_false/if_true edge, accordingly. + if assignment == self_interior.constraint.when_true() { + (self_interior.if_true, true) + } else if assignment == self_interior.constraint.when_false() { + (self_interior.if_false, true) + } else { + let (if_true, found_in_true) = + self_interior.if_true.restrict_one(db, builder, assignment); + let (if_false, found_in_false) = + self_interior.if_false.restrict_one(db, builder, assignment); + ( + Node::new( + builder, + self_interior.constraint, + if_true, + if_false, + self_interior.source_order, + ), + found_in_true || found_in_false, + ) + } + }; + + let mut storage = builder.storage.borrow_mut(); + storage.restrict_one_cache.insert(key, result); + result } - fn solutions(self, db: &'db dyn Db) -> Solutions<'db> { + fn solutions<'db, 'c>( + self, + db: &'db dyn Db, + builder: &'c ConstraintSetBuilder<'db>, + ) -> Solutions<'db, 'c> { #[derive(Default)] struct Bounds<'db> { lower: FxIndexSet>, @@ -2484,18 +2993,26 @@ impl<'db> InteriorNode<'db> { } } - #[salsa::tracked(returns(ref), heap_size=ruff_memory_usage::heap_size)] - fn solutions_inner<'db>( + fn solutions_inner<'db, 'c>( db: &'db dyn Db, - interior: InteriorNode<'db>, - ) -> Vec> { + builder: &'c ConstraintSetBuilder<'db>, + interior: NodeId, + ) -> Ref<'c, Vec>> { + let key = interior; + let storage = builder.storage.borrow(); + if let Ok(solutions) = + Ref::filter_map(storage, |storage| storage.solutions_cache.get(&key)) + { + return solutions; + } + // Sort the constraints in each path by their `source_order`s, to ensure that we construct // any unions or intersections in our type mappings in a stable order. Constraints might // come out of `PathAssignment`s with identical `source_order`s, but if they do, those // "tied" constraints will still be ordered in a stable way. So we need a stable sort to // retain that stable per-tie ordering. let mut sorted_paths = Vec::new(); - Node::Interior(interior).for_each_path(db, |path| { + interior.for_each_path(db, builder, |path| { let mut path: Vec<_> = path.positive_constraints().collect(); path.sort_by_key(|(_, source_order)| *source_order); sorted_paths.push(path); @@ -2512,9 +3029,10 @@ impl<'db> InteriorNode<'db> { 'paths: for path in sorted_paths { mappings.clear(); for (constraint, _) in path { - let typevar = constraint.typevar(db); - let lower = constraint.lower(db); - let upper = constraint.upper(db); + let constraint = builder.constraint_data(constraint); + let typevar = constraint.typevar; + let lower = constraint.lower; + let upper = constraint.upper; let bounds = mappings.entry(typevar).or_default(); bounds.add_lower(db, lower); bounds.add_upper(db, upper); @@ -2605,25 +3123,31 @@ impl<'db> InteriorNode<'db> { solutions.push(solution); } - solutions + let mut storage = builder.storage.borrow_mut(); + storage.solutions_cache.insert(key, solutions); + drop(storage); + + let storage = builder.storage.borrow(); + Ref::map(storage, |storage| &storage.solutions_cache[&key]) } - let solutions = solutions_inner(db, self); + let solutions = solutions_inner(db, builder, self.node()); if solutions.is_empty() { return Solutions::Unsatisfiable; } Solutions::Constrained(solutions) } - fn path_assignments(self, db: &'db dyn Db) -> PathAssignments<'db> { + fn path_assignments(self, builder: &ConstraintSetBuilder<'_>) -> PathAssignments { // Sort the constraints in this BDD by their `source_order`s before adding them to the // sequent map. This ensures that constraints appear in the sequent map in a stable order. // The constraints mentioned in a BDD should all have distinct `source_order`s, so an // unstable sort is fine. let mut constraints: SmallVec<[_; 8]> = SmallVec::new(); - Node::Interior(self).for_each_constraint(db, &mut |constraint, source_order| { - constraints.push((constraint, source_order)); - }); + self.node() + .for_each_constraint(builder, &mut |constraint, source_order| { + constraints.push((constraint, source_order)); + }); constraints.sort_unstable_by_key(|(_, source_order)| *source_order); PathAssignments::new(constraints.into_iter().map(|(constraint, _)| constraint)) @@ -2634,8 +3158,14 @@ impl<'db> InteriorNode<'db> { /// This is calculated by looking at the relationships that exist between the constraints that /// are mentioned in the BDD. For instance, if one constraint implies another (`x → y`), then /// `x ∧ ¬y` is not a valid input, and we can rewrite any occurrences of `x ∨ y` into `y`. - #[salsa::tracked(heap_size=ruff_memory_usage::heap_size)] - fn simplify(self, db: &'db dyn Db) -> Node<'db> { + fn simplify<'db>(self, db: &'db dyn Db, builder: &ConstraintSetBuilder<'db>) -> NodeId { + let key = self.node(); + let storage = builder.storage.borrow(); + if let Some(result) = storage.simplify_cache.get(&key) { + return *result; + } + drop(storage); + // To simplify a non-terminal BDD, we find all pairs of constraints that are mentioned in // the BDD. If any of those pairs can be simplified to some other BDD, we perform a // substitution to replace the pair with the simplification. @@ -2653,10 +3183,11 @@ impl<'db> InteriorNode<'db> { // need to compare a constraint against itself, and because ordering doesn't matter.) let mut seen_constraints = FxHashSet::default(); let mut source_orders = FxHashMap::default(); - Node::Interior(self).for_each_constraint(db, &mut |constraint, source_order| { - seen_constraints.insert(constraint); - source_orders.insert(constraint, source_order); - }); + self.node() + .for_each_constraint(builder, &mut |constraint, source_order| { + seen_constraints.insert(constraint); + source_orders.insert(constraint, source_order); + }); let mut to_visit: Vec<(_, _)> = (seen_constraints.iter().copied()) .tuple_combinations() .collect(); @@ -2666,92 +3197,89 @@ impl<'db> InteriorNode<'db> { // source order. (We do not have any test cases that depend on constraint sets being // displayed in a consistent ordering, so we don't need to be clever in assigning these // `source_order`s.) - let mut simplified = Node::Interior(self); - let mut next_source_order = self.max_source_order(db) + 1; + let mut simplified = self.node(); + let self_interior = builder.interior_node_data(self.node()); + let mut next_source_order = self_interior.max_source_order + 1; while let Some((left_constraint, right_constraint)) = to_visit.pop() { let left_source_order = source_orders[&left_constraint]; let right_source_order = source_orders[&right_constraint]; // If the constraints refer to different typevars, the only simplifications we can make // are of the form `S ≤ T ∧ T ≤ int → S ≤ int`. - let left_typevar = left_constraint.typevar(db); - let right_typevar = right_constraint.typevar(db); + let left_constraint_data = builder.constraint_data(left_constraint); + let left_typevar = left_constraint_data.typevar; + let right_constraint_data = builder.constraint_data(right_constraint); + let right_typevar = right_constraint_data.typevar; if !left_typevar.is_same_typevar_as(db, right_typevar) { // We've structured our constraints so that a typevar's upper/lower bound can only // be another typevar if the bound is "later" in our arbitrary ordering. That means // we only have to check this pair of constraints in one direction — though we do // have to figure out which of the two typevars is constrained, and which one is // the upper/lower bound. - let (bound_typevar, bound_constraint, constrained_typevar, constrained_constraint) = + let (bound_constraint, constrained_constraint) = if left_typevar.can_be_bound_for(db, right_typevar) { - ( - left_typevar, - left_constraint, - right_typevar, - right_constraint, - ) + (left_constraint, right_constraint) } else { - ( - right_typevar, - right_constraint, - left_typevar, - left_constraint, - ) + (right_constraint, left_constraint) }; + let bound_constraint_data = builder.constraint_data(bound_constraint); + let bound_typevar = bound_constraint_data.typevar; + let constrained_constraint_data = builder.constraint_data(constrained_constraint); + let constrained_typevar = constrained_constraint_data.typevar; // We then look for cases where the "constrained" typevar's upper and/or lower // bound matches the "bound" typevar. If so, we're going to add an implication to // the constraint set that replaces the upper/lower bound that matched with the // bound constraint's corresponding bound. let (new_lower, new_upper) = match ( - constrained_constraint.lower(db), - constrained_constraint.upper(db), + constrained_constraint_data.lower, + constrained_constraint_data.upper, ) { // (B ≤ C ≤ B) ∧ (BL ≤ B ≤ BU) → (BL ≤ C ≤ BU) (Type::TypeVar(constrained_lower), Type::TypeVar(constrained_upper)) if constrained_lower.is_same_typevar_as(db, bound_typevar) && constrained_upper.is_same_typevar_as(db, bound_typevar) => { - (bound_constraint.lower(db), bound_constraint.upper(db)) + (bound_constraint_data.lower, bound_constraint_data.upper) } // (CL ≤ C ≤ B) ∧ (BL ≤ B ≤ BU) → (CL ≤ C ≤ BU) (constrained_lower, Type::TypeVar(constrained_upper)) if constrained_upper.is_same_typevar_as(db, bound_typevar) => { - (constrained_lower, bound_constraint.upper(db)) + (constrained_lower, bound_constraint_data.upper) } // (B ≤ C ≤ CU) ∧ (BL ≤ B ≤ BU) → (BL ≤ C ≤ CU) (Type::TypeVar(constrained_lower), constrained_upper) if constrained_lower.is_same_typevar_as(db, bound_typevar) => { - (bound_constraint.lower(db), constrained_upper) + (bound_constraint_data.lower, constrained_upper) } _ => continue, }; let new_constraint = - ConstrainedTypeVar::new(db, constrained_typevar, new_lower, new_upper); + Constraint::new(builder, constrained_typevar, new_lower, new_upper); if seen_constraints.contains(&new_constraint) { continue; } - let new_node = Node::new_constraint(db, new_constraint, next_source_order); + let new_node = Node::new_constraint(builder, new_constraint, next_source_order); next_source_order += 1; let positive_left_node = Node::new_satisfied_constraint( - db, + builder, left_constraint.when_true(), left_source_order, ); let positive_right_node = Node::new_satisfied_constraint( - db, + builder, right_constraint.when_true(), right_source_order, ); - let lhs = positive_left_node.and(db, positive_right_node); - let intersection = new_node.ite(db, lhs, Node::AlwaysFalse); - simplified = simplified.and(db, intersection); + let lhs = positive_left_node.and(builder, positive_right_node); + let intersection = new_node.ite(builder, lhs, ALWAYS_FALSE); + simplified = simplified.and(builder, intersection); continue; } @@ -2760,24 +3288,24 @@ impl<'db> InteriorNode<'db> { // other typevars, producing constraints on this typevar that have concrete lower/upper // bounds. That means we can skip the simplifications below if any bound is another // typevar. - if left_constraint.lower(db).is_type_var() - || left_constraint.upper(db).is_type_var() - || right_constraint.lower(db).is_type_var() - || right_constraint.upper(db).is_type_var() + if left_constraint_data.lower.is_type_var() + || left_constraint_data.upper.is_type_var() + || right_constraint_data.lower.is_type_var() + || right_constraint_data.upper.is_type_var() { continue; } // Containment: The range of one constraint might completely contain the range of the // other. If so, there are several potential simplifications. - let larger_smaller = if left_constraint.implies(db, right_constraint) { + let larger_smaller = if left_constraint.implies(db, builder, right_constraint) { Some(( right_constraint, right_source_order, left_constraint, left_source_order, )) - } else if right_constraint.implies(db, left_constraint) { + } else if right_constraint.implies(db, builder, left_constraint) { Some(( left_constraint, left_source_order, @@ -2795,12 +3323,12 @@ impl<'db> InteriorNode<'db> { )) = larger_smaller { let positive_larger_node = Node::new_satisfied_constraint( - db, + builder, larger_constraint.when_true(), larger_source_order, ); let negative_larger_node = Node::new_satisfied_constraint( - db, + builder, larger_constraint.when_false(), larger_source_order, ); @@ -2808,6 +3336,7 @@ impl<'db> InteriorNode<'db> { // larger ∨ smaller = larger simplified = simplified.substitute_union( db, + builder, larger_constraint.when_true(), larger_source_order, smaller_constraint.when_true(), @@ -2818,6 +3347,7 @@ impl<'db> InteriorNode<'db> { // ¬larger ∧ ¬smaller = ¬larger simplified = simplified.substitute_intersection( db, + builder, larger_constraint.when_false(), larger_source_order, smaller_constraint.when_false(), @@ -2829,30 +3359,35 @@ impl<'db> InteriorNode<'db> { // (¬larger removes everything that's present in smaller) simplified = simplified.substitute_intersection( db, + builder, larger_constraint.when_false(), larger_source_order, smaller_constraint.when_true(), smaller_source_order, - Node::AlwaysFalse, + ALWAYS_FALSE, ); // larger ∨ ¬smaller = true // (larger fills in everything that's missing in ¬smaller) simplified = simplified.substitute_union( db, + builder, larger_constraint.when_true(), larger_source_order, smaller_constraint.when_false(), smaller_source_order, - Node::AlwaysTrue, + ALWAYS_TRUE, ); } // There are some simplifications we can make when the intersection of the two // constraints is empty, and others that we can make when the intersection is // non-empty. - match left_constraint.intersect(db, right_constraint) { - IntersectionResult::Simplified(intersection_constraint) => { + match left_constraint.intersect(db, builder, right_constraint) { + IntersectionResult::Simplified(intersection_constraint_data) => { + let intersection_constraint = + builder.intern_constraint(intersection_constraint_data); + // If the intersection is non-empty, we need to create a new constraint to // represent that intersection. We also need to add the new constraint to our // seen set and (if we haven't already seen it) to the to-visit queue. @@ -2865,35 +3400,35 @@ impl<'db> InteriorNode<'db> { ); } let positive_intersection_node = Node::new_satisfied_constraint( - db, + builder, intersection_constraint.when_true(), next_source_order, ); let negative_intersection_node = Node::new_satisfied_constraint( - db, + builder, intersection_constraint.when_false(), next_source_order, ); next_source_order += 1; let positive_left_node = Node::new_satisfied_constraint( - db, + builder, left_constraint.when_true(), left_source_order, ); let negative_left_node = Node::new_satisfied_constraint( - db, + builder, left_constraint.when_false(), left_source_order, ); let positive_right_node = Node::new_satisfied_constraint( - db, + builder, right_constraint.when_true(), right_source_order, ); let negative_right_node = Node::new_satisfied_constraint( - db, + builder, right_constraint.when_false(), right_source_order, ); @@ -2901,6 +3436,7 @@ impl<'db> InteriorNode<'db> { // left ∧ right = intersection simplified = simplified.substitute_intersection( db, + builder, left_constraint.when_true(), left_source_order, right_constraint.when_true(), @@ -2911,6 +3447,7 @@ impl<'db> InteriorNode<'db> { // ¬left ∨ ¬right = ¬intersection simplified = simplified.substitute_union( db, + builder, left_constraint.when_false(), left_source_order, right_constraint.when_false(), @@ -2923,22 +3460,24 @@ impl<'db> InteriorNode<'db> { // something from positive constraint) simplified = simplified.substitute_intersection( db, + builder, left_constraint.when_true(), left_source_order, right_constraint.when_false(), right_source_order, - positive_left_node.and(db, negative_intersection_node), + positive_left_node.and(builder, negative_intersection_node), ); // ¬left ∧ right = ¬intersection ∧ right // (save as above but reversed) simplified = simplified.substitute_intersection( db, + builder, left_constraint.when_false(), left_source_order, right_constraint.when_true(), right_source_order, - positive_right_node.and(db, negative_intersection_node), + positive_right_node.and(builder, negative_intersection_node), ); // left ∨ ¬right = intersection ∨ ¬right @@ -2946,22 +3485,24 @@ impl<'db> InteriorNode<'db> { // something to the negative constraint) simplified = simplified.substitute_union( db, + builder, left_constraint.when_true(), left_source_order, right_constraint.when_false(), right_source_order, - negative_right_node.or(db, positive_intersection_node), + negative_right_node.or(builder, positive_intersection_node), ); // ¬left ∨ right = ¬left ∨ intersection // (save as above but reversed) simplified = simplified.substitute_union( db, + builder, left_constraint.when_false(), left_source_order, right_constraint.when_true(), right_source_order, - negative_left_node.or(db, positive_intersection_node), + negative_left_node.or(builder, positive_intersection_node), ); } @@ -2974,12 +3515,12 @@ impl<'db> InteriorNode<'db> { // and right is empty. let positive_left_node = Node::new_satisfied_constraint( - db, + builder, left_constraint.when_true(), left_source_order, ); let positive_right_node = Node::new_satisfied_constraint( - db, + builder, right_constraint.when_true(), right_source_order, ); @@ -2987,27 +3528,30 @@ impl<'db> InteriorNode<'db> { // left ∧ right = false simplified = simplified.substitute_intersection( db, + builder, left_constraint.when_true(), left_source_order, right_constraint.when_true(), right_source_order, - Node::AlwaysFalse, + ALWAYS_FALSE, ); // ¬left ∨ ¬right = true simplified = simplified.substitute_union( db, + builder, left_constraint.when_false(), left_source_order, right_constraint.when_false(), right_source_order, - Node::AlwaysTrue, + ALWAYS_TRUE, ); // left ∧ ¬right = left // (there is nothing in the hole of ¬right that overlaps with left) simplified = simplified.substitute_intersection( db, + builder, left_constraint.when_true(), left_source_order, right_constraint.when_false(), @@ -3019,6 +3563,7 @@ impl<'db> InteriorNode<'db> { // (save as above but reversed) simplified = simplified.substitute_intersection( db, + builder, left_constraint.when_false(), left_source_order, right_constraint.when_true(), @@ -3029,20 +3574,22 @@ impl<'db> InteriorNode<'db> { } } + let mut storage = builder.storage.borrow_mut(); + storage.simplify_cache.insert(key, simplified); simplified } } -#[derive(Clone, Debug)] -pub(crate) enum Solutions<'db> { +#[derive(Debug)] +pub(crate) enum Solutions<'db, 'c> { Unsatisfiable, Unconstrained, - Constrained(&'db Vec>), + Constrained(Ref<'c, Vec>>), } pub(crate) type Solution<'db> = Vec>; -#[derive(Clone, Debug, Eq, PartialEq, get_size2::GetSize, salsa::Update)] +#[derive(Clone, Debug, Eq, Hash, PartialEq, get_size2::GetSize)] pub(crate) struct TypeVarSolution<'db> { pub(crate) bound_typevar: BoundTypeVarInstance<'db>, pub(crate) solution: Type<'db>, @@ -3050,14 +3597,14 @@ pub(crate) struct TypeVarSolution<'db> { /// An assignment of one BDD variable to either `true` or `false`. (When evaluating a BDD, we /// must provide an assignment for each variable present in the BDD.) -#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, salsa::Update)] -pub(crate) enum ConstraintAssignment<'db> { - Positive(ConstrainedTypeVar<'db>), - Negative(ConstrainedTypeVar<'db>), +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, get_size2::GetSize)] +pub(crate) enum ConstraintAssignment { + Positive(ConstraintId), + Negative(ConstraintId), } -impl<'db> ConstraintAssignment<'db> { - fn constraint(self) -> ConstrainedTypeVar<'db> { +impl ConstraintAssignment { + fn constraint(self) -> ConstraintId { match self { ConstraintAssignment::Positive(constraint) => constraint, ConstraintAssignment::Negative(constraint) => constraint, @@ -3084,7 +3631,12 @@ impl<'db> ConstraintAssignment<'db> { /// /// This is used to simplify how we display constraint sets, by removing redundant constraints /// from a clause. - fn implies(self, db: &'db dyn Db, other: Self) -> bool { + fn implies<'db>( + self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + other: Self, + ) -> bool { match (self, other) { // For two positive constraints, one range has to fully contain the other; the smaller // constraint implies the larger. @@ -3094,7 +3646,7 @@ impl<'db> ConstraintAssignment<'db> { ( ConstraintAssignment::Positive(self_constraint), ConstraintAssignment::Positive(other_constraint), - ) => self_constraint.implies(db, other_constraint), + ) => self_constraint.implies(db, builder, other_constraint), // For two negative constraints, one range has to fully contain the other; the ranges // represent "holes", though, so the constraint with the larger range implies the one @@ -3105,7 +3657,7 @@ impl<'db> ConstraintAssignment<'db> { ( ConstraintAssignment::Negative(self_constraint), ConstraintAssignment::Negative(other_constraint), - ) => other_constraint.implies(db, self_constraint), + ) => other_constraint.implies(db, builder, self_constraint), // For a positive and negative constraint, the ranges have to be disjoint, and the // positive range implies the negative range. @@ -3116,7 +3668,7 @@ impl<'db> ConstraintAssignment<'db> { ConstraintAssignment::Positive(self_constraint), ConstraintAssignment::Negative(other_constraint), ) => self_constraint - .intersect(db, other_constraint) + .intersect(db, builder, other_constraint) .is_disjoint(), // It's theoretically possible for a negative constraint to imply a positive constraint @@ -3130,20 +3682,21 @@ impl<'db> ConstraintAssignment<'db> { } } - fn display(self, db: &'db dyn Db) -> impl Display { - struct DisplayConstraintAssignment<'db> { - constraint: ConstraintAssignment<'db>, + fn display<'db>(self, db: &'db dyn Db, builder: &ConstraintSetBuilder<'db>) -> impl Display { + struct DisplayConstraintAssignment<'db, 'c> { + constraint: ConstraintAssignment, db: &'db dyn Db, + builder: &'c ConstraintSetBuilder<'db>, } - impl Display for DisplayConstraintAssignment<'_> { + impl Display for DisplayConstraintAssignment<'_, '_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self.constraint { ConstraintAssignment::Positive(constraint) => { - constraint.display(self.db).fmt(f) + constraint.display(self.db, self.builder).fmt(f) } ConstraintAssignment::Negative(constraint) => { - constraint.display_negated(self.db).fmt(f) + constraint.display_negated(self.db, self.builder).fmt(f) } } } @@ -3152,6 +3705,7 @@ impl<'db> ConstraintAssignment<'db> { DisplayConstraintAssignment { constraint: self, db, + builder, } } } @@ -3189,42 +3743,48 @@ impl<'db> ConstraintAssignment<'db> { /// new constraint, and then merges those cached sequents into its own sequent map. (That means we /// also share the work of calculating the sequent map across `PathAssignments` for _different_ /// constraint sets.) -#[derive(Clone, Debug, Default, Eq, PartialEq, get_size2::GetSize, salsa::Update)] -struct SequentMap<'db> { +#[derive(Clone, Debug, Default, Eq, PartialEq, get_size2::GetSize)] +struct SequentMap { /// Sequents of the form `¬C₁ → false` - single_tautologies: FxHashSet>, + single_tautologies: FxHashSet, /// Sequents of the form `C₁ ∧ C₂ → false` - pair_impossibilities: FxHashSet<(ConstrainedTypeVar<'db>, ConstrainedTypeVar<'db>)>, + pair_impossibilities: FxHashSet<(ConstraintId, ConstraintId)>, /// Sequents of the form `C₁ ∧ C₂ → D` - pair_implications: FxHashMap< - (ConstrainedTypeVar<'db>, ConstrainedTypeVar<'db>), - FxOrderSet>, - >, + pair_implications: FxHashMap<(ConstraintId, ConstraintId), FxOrderSet>, /// Sequents of the form `C → D` - single_implications: FxHashMap, FxOrderSet>>, + single_implications: FxHashMap>, } -impl<'db> SequentMap<'db> { +impl SequentMap { /// Returns a sequent map containing the sequents that we can infer from a single constraint in /// isolation. This method is salsa-tracked so that we only perform this work once per /// constraint. - fn for_constraint(db: &'db dyn Db, constraint: ConstrainedTypeVar<'db>) -> &'db Self { - #[salsa::tracked(returns(ref), heap_size=ruff_memory_usage::heap_size)] - fn for_constraint_inner<'db>( - db: &'db dyn Db, - constraint: ConstrainedTypeVar<'db>, - ) -> SequentMap<'db> { - tracing::trace!( - target: "ty_python_semantic::types::constraints::SequentMap", - constraint = %constraint.display(db), - "add sequents for constraint", - ); - let mut map = SequentMap::default(); - map.add_sequents_for_single(db, constraint); - map + fn for_constraint<'db, 'c>( + db: &'db dyn Db, + builder: &'c ConstraintSetBuilder<'db>, + constraint: ConstraintId, + ) -> Ref<'c, Self> { + let key = constraint; + let storage = builder.storage.borrow(); + if let Ok(map) = Ref::filter_map(storage, |storage| storage.single_sequent_cache.get(&key)) + { + return map; } - for_constraint_inner(db, constraint) + tracing::trace!( + target: "ty_python_semantic::types::constraints::SequentMap", + constraint = %constraint.display(db, builder), + "add sequents for constraint", + ); + let mut map = SequentMap::default(); + map.add_sequents_for_single(db, builder, constraint); + + let mut storage = builder.storage.borrow_mut(); + storage.single_sequent_cache.insert(key, map); + drop(storage); + + let storage = builder.storage.borrow(); + Ref::map(storage, |storage| &storage.single_sequent_cache[&key]) } /// Returns a sequent map containing the sequents that we can infer from a pair of constraints. @@ -3233,39 +3793,43 @@ impl<'db> SequentMap<'db> { /// (Note that this method is _not_ commutative; you should provide `left` and `right` in the /// order that they appear in the source code, so that we can construct derived constraints /// that retain that ordering.) - fn for_constraint_pair( + fn for_constraint_pair<'db, 'c>( db: &'db dyn Db, - left: ConstrainedTypeVar<'db>, - right: ConstrainedTypeVar<'db>, - ) -> &'db Self { - #[salsa::tracked(returns(ref), heap_size=ruff_memory_usage::heap_size)] - fn for_constraint_pair_inner<'db>( - db: &'db dyn Db, - left: ConstrainedTypeVar<'db>, - right: ConstrainedTypeVar<'db>, - ) -> SequentMap<'db> { - tracing::trace!( - target: "ty_python_semantic::types::constraints::SequentMap", - left = %left.display(db), - right = %right.display(db), - "add sequents for constraint pair", - ); - let mut map = SequentMap::default(); - map.add_sequents_for_pair(db, left, right); - map + builder: &'c ConstraintSetBuilder<'db>, + left: ConstraintId, + right: ConstraintId, + ) -> Ref<'c, Self> { + let key = (left, right); + let storage = builder.storage.borrow(); + if let Ok(map) = Ref::filter_map(storage, |storage| storage.pair_sequent_cache.get(&key)) { + return map; } - for_constraint_pair_inner(db, left, right) + tracing::trace!( + target: "ty_python_semantic::types::constraints::SequentMap", + left = %left.display(db, builder), + right = %right.display(db, builder), + "add sequents for constraint pair", + ); + let mut map = SequentMap::default(); + map.add_sequents_for_pair(db, builder, left, right); + + let mut storage = builder.storage.borrow_mut(); + storage.pair_sequent_cache.insert(key, map); + drop(storage); + + let storage = builder.storage.borrow(); + Ref::map(storage, |storage| &storage.pair_sequent_cache[&key]) } /// Merges the sequents from another sequent map into this one. - fn merge(&mut self, db: &'db dyn Db, other: &Self) { + fn merge(&mut self, other: &Self) { self.single_tautologies.extend(&other.single_tautologies); self.pair_impossibilities .extend(&other.pair_impossibilities); for ((ante1, ante2), post) in &other.pair_implications { self.pair_implications - .entry(Self::pair_key(db, *ante1, *ante2)) + .entry(Self::pair_key(*ante1, *ante2)) .or_default() .extend(post); } @@ -3277,60 +3841,67 @@ impl<'db> SequentMap<'db> { } } - fn pair_key( - db: &'db dyn Db, - ante1: ConstrainedTypeVar<'db>, - ante2: ConstrainedTypeVar<'db>, - ) -> (ConstrainedTypeVar<'db>, ConstrainedTypeVar<'db>) { - if ante1.ordering(db) < ante2.ordering(db) { + fn pair_key(ante1: ConstraintId, ante2: ConstraintId) -> (ConstraintId, ConstraintId) { + if ante1.ordering() < ante2.ordering() { (ante1, ante2) } else { (ante2, ante1) } } - fn add_single_tautology(&mut self, db: &'db dyn Db, ante: ConstrainedTypeVar<'db>) { + fn add_single_tautology<'db>( + &mut self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + ante: ConstraintId, + ) { if self.single_tautologies.insert(ante) { tracing::trace!( target: "ty_python_semantic::types::constraints::SequentMap", - sequent = %format_args!("¬{} → false", ante.display(db)), + sequent = %format_args!("¬{} → false", ante.display(db, builder)), "add sequent", ); } } - fn add_pair_impossibility( + fn add_pair_impossibility<'db>( &mut self, db: &'db dyn Db, - ante1: ConstrainedTypeVar<'db>, - ante2: ConstrainedTypeVar<'db>, + builder: &ConstraintSetBuilder<'db>, + ante1: ConstraintId, + ante2: ConstraintId, ) { if self .pair_impossibilities - .insert(Self::pair_key(db, ante1, ante2)) + .insert(Self::pair_key(ante1, ante2)) { tracing::trace!( target: "ty_python_semantic::types::constraints::SequentMap", - sequent = %format_args!("{} ∧ {} → false", ante1.display(db), ante2.display(db)), + sequent = %format_args!( + "{} ∧ {} → false", + ante1.display(db, builder), + ante2.display(db, builder), + ), "add sequent", ); } } - fn add_pair_implication( + fn add_pair_implication<'db>( &mut self, db: &'db dyn Db, - ante1: ConstrainedTypeVar<'db>, - ante2: ConstrainedTypeVar<'db>, - post: ConstrainedTypeVar<'db>, + builder: &ConstraintSetBuilder<'db>, + ante1: ConstraintId, + ante2: ConstraintId, + post: ConstraintId, ) { // If either antecedent implies the consequent on its own, this new sequent is redundant. - if ante1.implies(db, post) || ante2.implies(db, post) { + if ante1.implies(db, builder, post) || ante2.implies(db, builder, post) { return; } if self .pair_implications - .entry(Self::pair_key(db, ante1, ante2)) + .entry(Self::pair_key(ante1, ante2)) .or_default() .insert(post) { @@ -3338,20 +3909,21 @@ impl<'db> SequentMap<'db> { target: "ty_python_semantic::types::constraints::SequentMap", sequent = %format_args!( "{} ∧ {} → {}", - ante1.display(db), - ante2.display(db), - post.display(db), + ante1.display(db, builder), + ante2.display(db, builder), + post.display(db, builder), ), "add sequent", ); } } - fn add_single_implication( + fn add_single_implication<'db>( &mut self, db: &'db dyn Db, - ante: ConstrainedTypeVar<'db>, - post: ConstrainedTypeVar<'db>, + builder: &ConstraintSetBuilder<'db>, + ante: ConstraintId, + post: ConstraintId, ) { if ante == post { return; @@ -3366,21 +3938,27 @@ impl<'db> SequentMap<'db> { target: "ty_python_semantic::types::constraints::SequentMap", sequent = %format_args!( "{} → {}", - ante.display(db), - post.display(db), + ante.display(db, builder), + post.display(db, builder), ), "add sequent", ); } } - fn add_sequents_for_single(&mut self, db: &'db dyn Db, constraint: ConstrainedTypeVar<'db>) { + fn add_sequents_for_single<'db>( + &mut self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + constraint: ConstraintId, + ) { // If this constraint binds its typevar to `Never ≤ T ≤ object`, then the typevar can take // on any type, and the constraint is always satisfied. - let lower = constraint.lower(db); - let upper = constraint.upper(db); + let constraint_data = builder.constraint_data(constraint); + let lower = constraint_data.lower; + let upper = constraint_data.upper; if lower.is_never() && upper.is_object() { - self.add_single_tautology(db, constraint); + self.add_single_tautology(db, builder, constraint); return; } @@ -3398,7 +3976,7 @@ impl<'db> SequentMap<'db> { // Case 1 (Type::TypeVar(lower_typevar), Type::TypeVar(upper_typevar)) => { if !lower_typevar.is_same_typevar_as(db, upper_typevar) { - ConstrainedTypeVar::new(db, lower_typevar, Type::Never, upper) + Constraint::new(builder, lower_typevar, Type::Never, upper) } else { return; } @@ -3406,25 +3984,26 @@ impl<'db> SequentMap<'db> { // Case 2 (Type::TypeVar(lower_typevar), _) => { - ConstrainedTypeVar::new(db, lower_typevar, Type::Never, upper) + Constraint::new(builder, lower_typevar, Type::Never, upper) } // Case 3 (_, Type::TypeVar(upper_typevar)) => { - ConstrainedTypeVar::new(db, upper_typevar, lower, Type::object()) + Constraint::new(builder, upper_typevar, lower, Type::object()) } _ => return, }; - self.add_single_implication(db, constraint, post_constraint); + self.add_single_implication(db, builder, constraint, post_constraint); } - fn add_sequents_for_pair( + fn add_sequents_for_pair<'db>( &mut self, db: &'db dyn Db, - left_constraint: ConstrainedTypeVar<'db>, - right_constraint: ConstrainedTypeVar<'db>, + builder: &ConstraintSetBuilder<'db>, + left_constraint: ConstraintId, + right_constraint: ConstraintId, ) { // If either of the constraints has another typevar as a lower/upper bound, the only // sequents we can add are for the transitive closure. For instance, if we have @@ -3447,79 +4026,88 @@ impl<'db> SequentMap<'db> { // // If all of the lower and upper bounds are concrete (i.e., not typevars), then there // several _other_ sequents that we can add, as handled by `add_concrete_sequents`. - let left_typevar = left_constraint.typevar(db); - let right_typevar = right_constraint.typevar(db); + let left_constraint_data = builder.constraint_data(left_constraint); + let left_typevar = left_constraint_data.typevar; + let right_constraint_data = builder.constraint_data(right_constraint); + let right_typevar = right_constraint_data.typevar; if !left_typevar.is_same_typevar_as(db, right_typevar) { - self.add_mutual_sequents_for_different_typevars(db, left_constraint, right_constraint); - } else if left_constraint.lower(db).is_type_var() - || left_constraint.upper(db).is_type_var() - || right_constraint.lower(db).is_type_var() - || right_constraint.upper(db).is_type_var() + self.add_mutual_sequents_for_different_typevars( + db, + builder, + left_constraint, + right_constraint, + ); + } else if left_constraint_data.lower.is_type_var() + || left_constraint_data.upper.is_type_var() + || right_constraint_data.lower.is_type_var() + || right_constraint_data.upper.is_type_var() { - self.add_mutual_sequents_for_same_typevars(db, left_constraint, right_constraint); + self.add_mutual_sequents_for_same_typevars( + db, + builder, + left_constraint, + right_constraint, + ); } else { - self.add_concrete_sequents(db, left_constraint, right_constraint); + self.add_concrete_sequents(db, builder, left_constraint, right_constraint); } } - fn add_mutual_sequents_for_different_typevars( + fn add_mutual_sequents_for_different_typevars<'db>( &mut self, db: &'db dyn Db, - left_constraint: ConstrainedTypeVar<'db>, - right_constraint: ConstrainedTypeVar<'db>, + builder: &ConstraintSetBuilder<'db>, + left_constraint: ConstraintId, + right_constraint: ConstraintId, ) { // We've structured our constraints so that a typevar's upper/lower bound can only // be another typevar if the bound is "later" in our arbitrary ordering. That means // we only have to check this pair of constraints in one direction — though we do // have to figure out which of the two typevars is constrained, and which one is // the upper/lower bound. - let left_typevar = left_constraint.typevar(db); - let right_typevar = right_constraint.typevar(db); - let (bound_typevar, bound_constraint, constrained_typevar, constrained_constraint) = + let left_constraint_data = builder.constraint_data(left_constraint); + let left_typevar = left_constraint_data.typevar; + let right_constraint_data = builder.constraint_data(right_constraint); + let right_typevar = right_constraint_data.typevar; + let (bound_constraint, constrained_constraint) = if left_typevar.can_be_bound_for(db, right_typevar) { - ( - left_typevar, - left_constraint, - right_typevar, - right_constraint, - ) + (left_constraint, right_constraint) } else { - ( - right_typevar, - right_constraint, - left_typevar, - left_constraint, - ) + (right_constraint, left_constraint) }; // We then look for cases where the "constrained" typevar's upper and/or lower bound // matches the "bound" typevar. If so, we're going to add an implication sequent that // replaces the upper/lower bound that matched with the bound constraint's corresponding // bound. + let bound_constraint_data = builder.constraint_data(bound_constraint); + let bound_typevar = bound_constraint_data.typevar; + let constrained_constraint_data = builder.constraint_data(constrained_constraint); + let constrained_typevar = constrained_constraint_data.typevar; let (new_lower, new_upper) = match ( - constrained_constraint.lower(db), - constrained_constraint.upper(db), + constrained_constraint_data.lower, + constrained_constraint_data.upper, ) { // (B ≤ C ≤ B) ∧ (BL ≤ B ≤ BU) → (BL ≤ C ≤ BU) (Type::TypeVar(constrained_lower), Type::TypeVar(constrained_upper)) if constrained_lower.is_same_typevar_as(db, bound_typevar) && constrained_upper.is_same_typevar_as(db, bound_typevar) => { - (bound_constraint.lower(db), bound_constraint.upper(db)) + (bound_constraint_data.lower, bound_constraint_data.upper) } // (CL ≤ C ≤ B) ∧ (BL ≤ B ≤ BU) → (CL ≤ C ≤ BU) (constrained_lower, Type::TypeVar(constrained_upper)) if constrained_upper.is_same_typevar_as(db, bound_typevar) => { - (constrained_lower, bound_constraint.upper(db)) + (constrained_lower, bound_constraint_data.upper) } // (B ≤ C ≤ CU) ∧ (BL ≤ B ≤ BU) → (BL ≤ C ≤ CU) (Type::TypeVar(constrained_lower), constrained_upper) if constrained_lower.is_same_typevar_as(db, bound_typevar) => { - (bound_constraint.lower(db), constrained_upper) + (bound_constraint_data.lower, constrained_upper) } // (CL ≤ C ≤ pivot) ∧ (pivot ≤ B ≤ BU) → (CL ≤ C ≤ B) @@ -3530,7 +4118,7 @@ impl<'db> SequentMap<'db> { .top_materialization(db) .is_constraint_set_assignable_to( db, - bound_constraint.lower(db).bottom_materialization(db), + bound_constraint_data.lower.bottom_materialization(db), ) => { (constrained_lower, Type::TypeVar(bound_typevar)) @@ -3540,8 +4128,8 @@ impl<'db> SequentMap<'db> { (constrained_lower, constrained_upper) if !constrained_lower.is_never() && !constrained_lower.is_object() - && bound_constraint - .upper(db) + && bound_constraint_data + .upper .top_materialization(db) .is_constraint_set_assignable_to( db, @@ -3554,24 +4142,31 @@ impl<'db> SequentMap<'db> { _ => return, }; - let post_constraint = - ConstrainedTypeVar::new(db, constrained_typevar, new_lower, new_upper); - self.add_pair_implication(db, left_constraint, right_constraint, post_constraint); + let post_constraint = Constraint::new(builder, constrained_typevar, new_lower, new_upper); + self.add_pair_implication( + db, + builder, + left_constraint, + right_constraint, + post_constraint, + ); } - fn add_mutual_sequents_for_same_typevars( + fn add_mutual_sequents_for_same_typevars<'db>( &mut self, db: &'db dyn Db, - left_constraint: ConstrainedTypeVar<'db>, - right_constraint: ConstrainedTypeVar<'db>, + builder: &ConstraintSetBuilder<'db>, + left_constraint: ConstraintId, + right_constraint: ConstraintId, ) { let mut try_one_direction = - |left_constraint: ConstrainedTypeVar<'db>, - right_constraint: ConstrainedTypeVar<'db>| { - let left_lower = left_constraint.lower(db); - let left_upper = left_constraint.upper(db); - let right_lower = right_constraint.lower(db); - let right_upper = right_constraint.upper(db); + |left_constraint: ConstraintId, right_constraint: ConstraintId| { + let left_constraint_data = builder.constraint_data(left_constraint); + let left_lower = left_constraint_data.lower; + let left_upper = left_constraint_data.upper; + let right_constraint_data = builder.constraint_data(right_constraint); + let right_lower = right_constraint_data.lower; + let right_upper = right_constraint_data.upper; let new_constraint = |bound_typevar: BoundTypeVarInstance<'db>, right_lower: Type<'db>, right_upper: Type<'db>| { @@ -3589,7 +4184,7 @@ impl<'db> SequentMap<'db> { } else { right_upper }; - ConstrainedTypeVar::new(db, bound_typevar, right_lower, right_upper) + Constraint::new(builder, bound_typevar, right_lower, right_upper) }; let post_constraint = match (left_lower, left_upper) { (Type::TypeVar(bound_typevar), Type::TypeVar(other_bound_typevar)) @@ -3605,60 +4200,70 @@ impl<'db> SequentMap<'db> { } _ => return, }; - self.add_pair_implication(db, left_constraint, right_constraint, post_constraint); + self.add_pair_implication( + db, + builder, + left_constraint, + right_constraint, + post_constraint, + ); }; try_one_direction(left_constraint, right_constraint); try_one_direction(right_constraint, left_constraint); } - fn add_concrete_sequents( + fn add_concrete_sequents<'db>( &mut self, db: &'db dyn Db, - left_constraint: ConstrainedTypeVar<'db>, - right_constraint: ConstrainedTypeVar<'db>, + builder: &ConstraintSetBuilder<'db>, + left_constraint: ConstraintId, + right_constraint: ConstraintId, ) { // These might seem redundant with the intersection check below, since `a → b` means that // `a ∧ b = a`. But we are not normalizing constraint bounds, and these clauses help us // identify constraints that are identical besides e.g. ordering of union/intersection // elements. (For instance, when processing `T ≤ τ₁ & τ₂` and `T ≤ τ₂ & τ₁`, these clauses // would add sequents for `(T ≤ τ₁ & τ₂) → (T ≤ τ₂ & τ₁)` and vice versa.) - if left_constraint.implies(db, right_constraint) { + if left_constraint.implies(db, builder, right_constraint) { tracing::trace!( target: "ty_python_semantic::types::constraints::SequentMap", - left = %left_constraint.display(db), - right = %right_constraint.display(db), + left = %left_constraint.display(db, builder), + right = %right_constraint.display(db, builder), "left implies right", ); - self.add_single_implication(db, left_constraint, right_constraint); + self.add_single_implication(db, builder, left_constraint, right_constraint); } - if right_constraint.implies(db, left_constraint) { + if right_constraint.implies(db, builder, left_constraint) { tracing::trace!( target: "ty_python_semantic::types::constraints::SequentMap", - left = %left_constraint.display(db), - right = %right_constraint.display(db), + left = %left_constraint.display(db, builder), + right = %right_constraint.display(db, builder), "right implies left", ); - self.add_single_implication(db, right_constraint, left_constraint); + self.add_single_implication(db, builder, right_constraint, left_constraint); } - match left_constraint.intersect(db, right_constraint) { - IntersectionResult::Simplified(intersection_constraint) => { + match left_constraint.intersect(db, builder, right_constraint) { + IntersectionResult::Simplified(intersection_constraint_data) => { + let intersection_constraint = + builder.intern_constraint(intersection_constraint_data); tracing::trace!( target: "ty_python_semantic::types::constraints::SequentMap", - left = %left_constraint.display(db), - right = %right_constraint.display(db), - intersection = %intersection_constraint.display(db), + left = %left_constraint.display(db, builder), + right = %right_constraint.display(db, builder), + intersection = %intersection_constraint.display(db, builder), "left and right overlap", ); self.add_pair_implication( db, + builder, left_constraint, right_constraint, intersection_constraint, ); - self.add_single_implication(db, intersection_constraint, left_constraint); - self.add_single_implication(db, intersection_constraint, right_constraint); + self.add_single_implication(db, builder, intersection_constraint, left_constraint); + self.add_single_implication(db, builder, intersection_constraint, right_constraint); } // The sequent map only needs to include constraints that might appear in a BDD. If the @@ -3669,21 +4274,27 @@ impl<'db> SequentMap<'db> { IntersectionResult::Disjoint => { tracing::trace!( target: "ty_python_semantic::types::constraints::SequentMap", - left = %left_constraint.display(db), - right = %right_constraint.display(db), + left = %left_constraint.display(db, builder), + right = %right_constraint.display(db, builder), "left and right are disjoint", ); - self.add_pair_impossibility(db, left_constraint, right_constraint); + self.add_pair_impossibility(db, builder, left_constraint, right_constraint); } } } #[expect(dead_code)] // Keep this around for debugging purposes - fn display<'a>(&'a self, db: &'db dyn Db, prefix: &'a dyn Display) -> impl Display + 'a { + fn display<'db, 'a>( + &'a self, + db: &'db dyn Db, + builder: &'a ConstraintSetBuilder<'db>, + prefix: &'a dyn Display, + ) -> impl Display + 'a { struct DisplaySequentMap<'a, 'db> { - map: &'a SequentMap<'db>, + map: &'a SequentMap, prefix: &'a dyn Display, db: &'db dyn Db, + builder: &'a ConstraintSetBuilder<'db>, } impl Display for DisplaySequentMap<'_, '_> { @@ -3703,8 +4314,8 @@ impl<'db> SequentMap<'db> { write!( f, "{} ∧ {} → false", - ante1.display(self.db), - ante2.display(self.db), + ante1.display(self.db, self.builder), + ante2.display(self.db, self.builder), )?; } @@ -3714,9 +4325,9 @@ impl<'db> SequentMap<'db> { write!( f, "{} ∧ {} → {}", - ante1.display(self.db), - ante2.display(self.db), - post.display(self.db), + ante1.display(self.db, self.builder), + ante2.display(self.db, self.builder), + post.display(self.db, self.builder), )?; } } @@ -3724,7 +4335,12 @@ impl<'db> SequentMap<'db> { for (ante, posts) in &self.map.single_implications { for post in posts { maybe_write_prefix(f)?; - write!(f, "{} → {}", ante.display(self.db), post.display(self.db))?; + write!( + f, + "{} → {}", + ante.display(self.db, self.builder), + post.display(self.db, self.builder) + )?; } } @@ -3739,6 +4355,7 @@ impl<'db> SequentMap<'db> { map: self, prefix, db, + builder, } } } @@ -3746,17 +4363,17 @@ impl<'db> SequentMap<'db> { /// The collection of constraints that we know to be true or false at a certain point when /// traversing a BDD. #[derive(Debug)] -pub(crate) struct PathAssignments<'db> { - map: SequentMap<'db>, - assignments: FxIndexMap, usize>, +pub(crate) struct PathAssignments { + map: SequentMap, + assignments: FxIndexMap, /// Constraints that we have discovered, mapped to whether we have processed them yet. (This /// ensures a stable order for all of the derived constraints that we create, while still /// letting us create them lazily.) - discovered: FxIndexMap, bool>, + discovered: FxIndexMap, } -impl<'db> PathAssignments<'db> { - fn new(constraints: impl IntoIterator>) -> Self { +impl PathAssignments { + fn new(constraints: impl IntoIterator) -> Self { let discovered = constraints .into_iter() .map(|constraint| (constraint, false)) @@ -3790,10 +4407,11 @@ impl<'db> PathAssignments<'db> { /// the BDD. You should make this call from inside of your callback, so that as you get further /// down into the BDD structure, we remember all of the information that we have learned from /// the path we're on. - fn walk_edge( + fn walk_edge<'db, R>( &mut self, db: &'db dyn Db, - assignment: ConstraintAssignment<'db>, + builder: &ConstraintSetBuilder<'db>, + assignment: ConstraintAssignment, source_order: usize, f: impl FnOnce(&mut Self, Range) -> R, ) -> Option { @@ -3807,12 +4425,14 @@ impl<'db> PathAssignments<'db> { target: "ty_python_semantic::types::constraints::PathAssignment", before = %format_args!( "[{}]", - self.assignments[..start].iter().map(|(assignment, _)| assignment.display(db)).format(", "), + self.assignments[..start].iter().map(|(assignment, _)| { + assignment.display(db, builder) + }).format(", "), ), - edge = %assignment.display(db), + edge = %assignment.display(db, builder), "walk edge", ); - let found_conflict = self.add_assignment(db, assignment, source_order); + let found_conflict = self.add_assignment(db, builder, assignment, source_order); let result = if found_conflict.is_err() { // If that results in the path now being impossible due to a contradiction, return // without invoking the callback. @@ -3827,7 +4447,9 @@ impl<'db> PathAssignments<'db> { target: "ty_python_semantic::types::constraints::PathAssignment", new = %format_args!( "[{}]", - self.assignments[start..].iter().map(|(assignment, _)| assignment.display(db)).format(", "), + self.assignments[start..].iter().map(|(assignment, _)| { + assignment.display(db, builder) + }).format(", "), ), "new assignments", ); @@ -3841,9 +4463,7 @@ impl<'db> PathAssignments<'db> { result } - pub(crate) fn positive_constraints( - &self, - ) -> impl Iterator, usize)> + '_ { + pub(crate) fn positive_constraints(&self) -> impl Iterator + '_ { self.assignments .iter() .filter_map(|(assignment, source_order)| match assignment { @@ -3852,7 +4472,7 @@ impl<'db> PathAssignments<'db> { }) } - fn assignment_holds(&self, assignment: ConstraintAssignment<'db>) -> bool { + fn assignment_holds(&self, assignment: ConstraintAssignment) -> bool { self.assignments.contains_key(&assignment) } @@ -3861,7 +4481,12 @@ impl<'db> PathAssignments<'db> { /// [`SequentMap::for_constraint`] and [`for_constraint_pair`][SequentMap::for_constraint_pair] /// to calculate _and cache_ the constraints, so that if we walk another constraint set /// containing this constraint, we reuse the work to calculate its sequents. - fn discover_constraint(&mut self, db: &'db dyn Db, constraint: ConstrainedTypeVar<'db>) { + fn discover_constraint<'db>( + &mut self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + constraint: ConstraintId, + ) { // If we've already processed this constraint, we can skip it. let existing = self.discovered.insert(constraint, true); let already_processed = existing.is_some_and(|existing| existing); @@ -3869,22 +4494,24 @@ impl<'db> PathAssignments<'db> { return; } - let single_map = SequentMap::for_constraint(db, constraint); - self.map.merge(db, single_map); + let single_map = SequentMap::for_constraint(db, builder, constraint); + self.map.merge(&single_map); + drop(single_map); for existing in self.discovered.keys().dropping_back(1) { - let pair_map = SequentMap::for_constraint_pair(db, *existing, constraint); - self.map.merge(db, pair_map); + let pair_map = SequentMap::for_constraint_pair(db, builder, *existing, constraint); + self.map.merge(&pair_map); } } /// Adds a new assignment, along with any derived information that we can infer from the new /// assignment combined with the assignments we've already seen. If any of this causes the path /// to become invalid, due to a contradiction, returns a [`PathAssignmentConflict`] error. - fn add_assignment( + fn add_assignment<'db>( &mut self, db: &'db dyn Db, - assignment: ConstraintAssignment<'db>, + builder: &ConstraintSetBuilder<'db>, + assignment: ConstraintAssignment, source_order: usize, ) -> Result<(), PathAssignmentConflict> { // First add this assignment. If it causes a conflict, return that as an error. If we've @@ -3892,10 +4519,12 @@ impl<'db> PathAssignments<'db> { if self.assignments.contains_key(&assignment.negated()) { tracing::trace!( target: "ty_python_semantic::types::constraints::PathAssignment", - assignment = %assignment.display(db), + assignment = %assignment.display(db, builder), facts = %format_args!( "[{}]", - self.assignments.iter().map(|(assignment, _)| assignment.display(db)).format(", "), + self.assignments.iter().map(|(assignment, _)| { + assignment.display(db, builder) + }).format(", "), ), "found contradiction", ); @@ -3919,7 +4548,7 @@ impl<'db> PathAssignments<'db> { // don't anticipate the sequent maps to be very large. We might consider avoiding the // brute-force search. - self.discover_constraint(db, assignment.constraint()); + self.discover_constraint(db, builder, assignment.constraint()); for ante in &self.map.single_tautologies { if self.assignment_holds(ante.when_false()) { @@ -3927,10 +4556,12 @@ impl<'db> PathAssignments<'db> { // it's false. tracing::trace!( target: "ty_python_semantic::types::constraints::PathAssignment", - ante = %ante.display(db), + ante = %ante.display(db, builder), facts = %format_args!( "[{}]", - self.assignments.iter().map(|(assignment, _)| assignment.display(db)).format(", "), + self.assignments.iter().map(|(assignment, _)| { + assignment.display(db, builder) + }).format(", "), ), "found contradiction", ); @@ -3945,11 +4576,13 @@ impl<'db> PathAssignments<'db> { // current path asserts that both are true. tracing::trace!( target: "ty_python_semantic::types::constraints::PathAssignment", - ante1 = %ante1.display(db), - ante2 = %ante2.display(db), + ante1 = %ante1.display(db, builder), + ante2 = %ante2.display(db, builder), facts = %format_args!( "[{}]", - self.assignments.iter().map(|(assignment, _)| assignment.display(db)).format(", "), + self.assignments.iter().map(|(assignment, _)| { + assignment.display(db, builder) + }).format(", "), ), "found contradiction", ); @@ -3977,7 +4610,7 @@ impl<'db> PathAssignments<'db> { } for new_constraint in new_constraints { - self.add_assignment(db, new_constraint.when_true(), source_order)?; + self.add_assignment(db, builder, new_constraint.when_true(), source_order)?; } Ok(()) @@ -3989,12 +4622,12 @@ struct PathAssignmentConflict; /// A single clause in the DNF representation of a BDD #[derive(Clone, Debug, Default, Eq, PartialEq)] -struct SatisfiedClause<'db> { - constraints: Vec>, +struct SatisfiedClause { + constraints: Vec, } -impl<'db> SatisfiedClause<'db> { - fn push(&mut self, constraint: ConstraintAssignment<'db>) { +impl SatisfiedClause { + fn push(&mut self, constraint: ConstraintAssignment) { self.constraints.push(constraint); } @@ -4018,7 +4651,7 @@ impl<'db> SatisfiedClause<'db> { /// Removes another clause from this clause, if it appears as a prefix of this clause. Returns /// whether the prefix was removed. - fn remove_prefix(&mut self, prefix: &SatisfiedClause<'db>) -> bool { + fn remove_prefix(&mut self, prefix: &SatisfiedClause) -> bool { if self.constraints.starts_with(&prefix.constraints) { self.constraints.drain(0..prefix.constraints.len()); return true; @@ -4031,7 +4664,7 @@ impl<'db> SatisfiedClause<'db> { /// want to remove the larger one and keep the smaller one.) /// /// Returns a boolean that indicates whether any simplifications were made. - fn simplify(&mut self, db: &'db dyn Db) -> bool { + fn simplify<'db>(&mut self, db: &'db dyn Db, builder: &ConstraintSetBuilder<'db>) -> bool { let mut changes_made = false; let mut i = 0; // Loop through each constraint, comparing it with any constraints that appear later in the @@ -4039,7 +4672,7 @@ impl<'db> SatisfiedClause<'db> { 'outer: while i < self.constraints.len() { let mut j = i + 1; while j < self.constraints.len() { - if self.constraints[j].implies(db, self.constraints[i]) { + if self.constraints[j].implies(db, builder, self.constraints[i]) { // If constraint `i` is removed, then we don't need to compare it with any // later constraints in the list. Note that we continue the outer loop, instead // of breaking from the inner loop, so that we don't bump index `i` below. @@ -4048,7 +4681,7 @@ impl<'db> SatisfiedClause<'db> { self.constraints.swap_remove(i); changes_made = true; continue 'outer; - } else if self.constraints[i].implies(db, self.constraints[j]) { + } else if self.constraints[i].implies(db, builder, self.constraints[j]) { // If constraint `j` is removed, then we can continue the inner loop. We will // swap a new element into place at index `j`, and will continue comparing the // constraint at index `i` with later constraints. @@ -4063,7 +4696,7 @@ impl<'db> SatisfiedClause<'db> { changes_made } - fn display(&self, db: &'db dyn Db) -> String { + fn display<'db>(&self, db: &'db dyn Db, builder: &ConstraintSetBuilder<'db>) -> String { if self.constraints.is_empty() { return String::from("always"); } @@ -4075,9 +4708,11 @@ impl<'db> SatisfiedClause<'db> { .constraints .iter() .map(|constraint| match constraint { - ConstraintAssignment::Positive(constraint) => constraint.display(db).to_string(), + ConstraintAssignment::Positive(constraint) => { + constraint.display(db, builder).to_string() + } ConstraintAssignment::Negative(constraint) => { - constraint.display_negated(db).to_string() + constraint.display_negated(db, builder).to_string() } }) .collect(); @@ -4103,23 +4738,23 @@ impl<'db> SatisfiedClause<'db> { /// A list of the clauses that satisfy a BDD. This is a DNF representation of the boolean function /// that the BDD represents. #[derive(Clone, Debug, Default, Eq, PartialEq)] -struct SatisfiedClauses<'db> { - clauses: Vec>, +struct SatisfiedClauses { + clauses: Vec, } -impl<'db> SatisfiedClauses<'db> { - fn push(&mut self, clause: SatisfiedClause<'db>) { +impl SatisfiedClauses { + fn push(&mut self, clause: SatisfiedClause) { self.clauses.push(clause); } /// Simplifies the DNF representation, removing redundancies that do not change the underlying /// function. (This is used when displaying a BDD, to make sure that the representation that we /// show is as simple as possible while still producing the same results.) - fn simplify(&mut self, db: &'db dyn Db) { + fn simplify<'db>(&mut self, db: &'db dyn Db, builder: &ConstraintSetBuilder<'db>) { // First simplify each clause individually, by removing constraints that are implied by // other constraints in the clause. for clause in &mut self.clauses { - clause.simplify(db); + clause.simplify(db, builder); } while self.simplify_one_round() { @@ -4191,7 +4826,7 @@ impl<'db> SatisfiedClauses<'db> { false } - fn display(&self, db: &'db dyn Db) -> String { + fn display<'db>(&self, db: &'db dyn Db, builder: &ConstraintSetBuilder<'db>) -> String { // This is a bit heavy-handed, but we need to output the clauses in a consistent order // even though Salsa IDs are assigned non-deterministically. This Display output is only // used in test cases, so we don't need to over-optimize it. @@ -4202,7 +4837,7 @@ impl<'db> SatisfiedClauses<'db> { let mut clauses: Vec<_> = self .clauses .iter() - .map(|clause| clause.display(db)) + .map(|clause| clause.display(db, builder)) .collect(); clauses.sort(); clauses.join(" ∨ ") @@ -4213,10 +4848,10 @@ impl<'db> BoundTypeVarInstance<'db> { /// Returns the valid specializations of a typevar. This is used when checking a constraint set /// when this typevar is in inferable position, where we only need _some_ specialization to /// satisfy the constraint set. - fn valid_specializations(self, db: &'db dyn Db) -> Node<'db> { + fn valid_specializations(self, db: &'db dyn Db, builder: &ConstraintSetBuilder<'db>) -> NodeId { if self.paramspec_attr(db).is_some() { // P.args and P.kwargs are variadic, and do not have an upper bound or constraints. - return Node::AlwaysTrue; + return ALWAYS_TRUE; } // For gradual upper bounds and constraints, we are free to choose any materialization that @@ -4230,19 +4865,19 @@ impl<'db> BoundTypeVarInstance<'db> { // that _some_ valid specialization satisfies the constraint set, it's correct for us to // return the range of valid materializations that we can choose from. match self.typevar(db).bound_or_constraints(db) { - None => Node::AlwaysTrue, + None => ALWAYS_TRUE, Some(TypeVarBoundOrConstraints::UpperBound(bound)) => { let bound = bound.top_materialization(db); - ConstrainedTypeVar::new_node(db, self, Type::Never, bound) + Constraint::new_node(db, builder, self, Type::Never, bound) } Some(TypeVarBoundOrConstraints::Constraints(constraints)) => { - let mut specializations = Node::AlwaysFalse; + let mut specializations = ALWAYS_FALSE; for constraint in constraints.elements(db) { let constraint_lower = constraint.bottom_materialization(db); let constraint_upper = constraint.top_materialization(db); specializations = specializations.or_with_offset( - db, - ConstrainedTypeVar::new_node(db, self, constraint_lower, constraint_upper), + builder, + Constraint::new_node(db, builder, self, constraint_lower, constraint_upper), ); } specializations @@ -4263,32 +4898,36 @@ impl<'db> BoundTypeVarInstance<'db> { /// specifies the required specializations, and the iterator will be empty. For a constrained /// typevar, the primary result will include the fully static constraints, and the iterator /// will include an entry for each non-fully-static constraint. - fn required_specializations(self, db: &'db dyn Db) -> (Node<'db>, Vec>) { + fn required_specializations( + self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + ) -> (NodeId, Vec) { // For upper bounds and constraints, we are free to choose any materialization that makes // the check succeed. In non-inferable positions, it is most helpful to choose a // materialization that is as restrictive as possible, since that minimizes the number of // valid specializations that must satisfy the check. We therefore take the bottom // materialization of the bound or constraints. match self.typevar(db).bound_or_constraints(db) { - None => (Node::AlwaysTrue, Vec::new()), + None => (ALWAYS_TRUE, Vec::new()), Some(TypeVarBoundOrConstraints::UpperBound(bound)) => { let bound = bound.bottom_materialization(db); ( - ConstrainedTypeVar::new_node(db, self, Type::Never, bound), + Constraint::new_node(db, builder, self, Type::Never, bound), Vec::new(), ) } Some(TypeVarBoundOrConstraints::Constraints(constraints)) => { - let mut non_gradual_constraints = Node::AlwaysFalse; + let mut non_gradual_constraints = ALWAYS_FALSE; let mut gradual_constraints = Vec::new(); for constraint in constraints.elements(db) { let constraint_lower = constraint.bottom_materialization(db); let constraint_upper = constraint.top_materialization(db); let constraint = - ConstrainedTypeVar::new_node(db, self, constraint_lower, constraint_upper); + Constraint::new_node(db, builder, self, constraint_lower, constraint_upper); if constraint_lower == constraint_upper { non_gradual_constraints = - non_gradual_constraints.or_with_offset(db, constraint); + non_gradual_constraints.or_with_offset(builder, constraint); } else { gradual_constraints.push(constraint); } @@ -4303,13 +4942,13 @@ impl<'db> GenericContext<'db> { pub(crate) fn specialize_constrained<'c>( self, db: &'db dyn Db, - _builder: &'c ConstraintSetBuilder<'db>, + builder: &'c ConstraintSetBuilder<'db>, constraints: ConstraintSet<'db, 'c>, ) -> Result, ()> { tracing::trace!( target: "ty_python_semantic::types::constraints::specialize_constrained", generic_context = %self.display_full(db), - constraints = %constraints.node.display(db), + constraints = %constraints.node.display(db, builder), "create specialization for constraint set", ); @@ -4317,7 +4956,7 @@ impl<'db> GenericContext<'db> { if constraints.is_cyclic(db) { tracing::error!( target: "ty_python_semantic::types::constraints::specialize_constrained", - constraints = %constraints.node.display(db), + constraints = %constraints.node.display(db, builder), "constraint set is cyclic", ); // TODO: Better error @@ -4329,110 +4968,110 @@ impl<'db> GenericContext<'db> { // each typevar. let abstracted = self .variables(db) - .fold(Node::AlwaysTrue, |constraints, bound_typevar| { - constraints.and_with_offset(db, bound_typevar.valid_specializations(db)) + .fold(ALWAYS_TRUE, |constraints, bound_typevar| { + constraints + .and_with_offset(builder, bound_typevar.valid_specializations(db, builder)) }) - .and_with_offset(db, constraints.node); + .and_with_offset(builder, constraints.node); tracing::trace!( target: "ty_python_semantic::types::constraints::specialize_constrained", - valid = %abstracted.display(db), + valid = %abstracted.display(db, builder), "limited to valid specializations", ); // Then we find all of the "representative types" for each typevar in the constraint set. let mut error_occurred = false; let mut representatives = Vec::new(); - let types = - self.variables(db).map(|bound_typevar| { - // Each representative type represents one of the ways that the typevar can satisfy the - // constraint, expressed as a lower/upper bound on the types that the typevar can - // specialize to. - // - // If there are multiple paths in the BDD, they technically represent independent - // possible specializations. If there's a type that satisfies all of them, we will - // return that as the specialization. If not, then the constraint set is ambiguous. - // (This happens most often with constrained typevars.) We could in the future turn - // _each_ of the paths into separate specializations, but it's not clear what we would - // do with that, so instead we just report the ambiguity as a specialization failure. - let mut unconstrained = false; - let identity = bound_typevar.identity(db); + let types = self.variables(db).map(|bound_typevar| { + // Each representative type represents one of the ways that the typevar can satisfy the + // constraint, expressed as a lower/upper bound on the types that the typevar can + // specialize to. + // + // If there are multiple paths in the BDD, they technically represent independent + // possible specializations. If there's a type that satisfies all of them, we will + // return that as the specialization. If not, then the constraint set is ambiguous. + // (This happens most often with constrained typevars.) We could in the future turn + // _each_ of the paths into separate specializations, but it's not clear what we would + // do with that, so instead we just report the ambiguity as a specialization failure. + let mut unconstrained = false; + let identity = bound_typevar.identity(db); + tracing::trace!( + target: "ty_python_semantic::types::constraints::specialize_constrained", + bound_typevar = %identity.display(db), + abstracted = %abstracted.retain_one(db, builder, identity).display(db, builder), + "find specialization for typevar", + ); + representatives.clear(); + abstracted.find_representative_types(db, builder, identity, |representative| { + match representative { + Some(representative) => { + representatives.extend_from_slice(representative); + } + None => { + unconstrained = true; + } + } + }); + + // The BDD is satisfiable, but the typevar is unconstrained, then we use `None` to tell + // specialize_recursive to fall back on the typevar's default. + if unconstrained { tracing::trace!( target: "ty_python_semantic::types::constraints::specialize_constrained", bound_typevar = %identity.display(db), - abstracted = %abstracted.retain_one(db, identity).display(db), - "find specialization for typevar", + "typevar is unconstrained", ); - representatives.clear(); - abstracted.find_representative_types(db, identity, |representative| { - match representative { - Some(representative) => { - representatives.extend_from_slice(representative); - } - None => { - unconstrained = true; - } - } - }); - - // The BDD is satisfiable, but the typevar is unconstrained, then we use `None` to tell - // specialize_recursive to fall back on the typevar's default. - if unconstrained { - tracing::trace!( - target: "ty_python_semantic::types::constraints::specialize_constrained", - bound_typevar = %identity.display(db), - "typevar is unconstrained", - ); - return None; - } - - // If there are no satisfiable paths in the BDD, then there is no valid specialization - // for this constraint set. - if representatives.is_empty() { - // TODO: Construct a useful error here - tracing::trace!( - target: "ty_python_semantic::types::constraints::specialize_constrained", - bound_typevar = %identity.display(db), - "typevar cannot be satisfied", - ); - error_occurred = true; - return None; - } + return None; + } - // Before constructing the final lower and upper bound, sort the constraints by - // their source order. This should give us a consistently ordered specialization, - // regardless of the variable ordering of the original BDD. - representatives.sort_unstable_by_key(|bounds| bounds.source_order); - let greatest_lower_bound = - UnionType::from_elements(db, representatives.iter().map(|bounds| bounds.lower)); - let least_upper_bound = IntersectionType::from_elements( - db, - representatives.iter().map(|bounds| bounds.upper), + // If there are no satisfiable paths in the BDD, then there is no valid specialization + // for this constraint set. + if representatives.is_empty() { + // TODO: Construct a useful error here + tracing::trace!( + target: "ty_python_semantic::types::constraints::specialize_constrained", + bound_typevar = %identity.display(db), + "typevar cannot be satisfied", ); + error_occurred = true; + return None; + } - // If `lower ≰ upper`, then there is no type that satisfies all of the paths in the - // BDD. That's an ambiguous specialization, as described above. - if !greatest_lower_bound.is_constraint_set_assignable_to(db, least_upper_bound) { - tracing::trace!( - target: "ty_python_semantic::types::constraints::specialize_constrained", - bound_typevar = %identity.display(db), - greatest_lower_bound = %greatest_lower_bound.display(db), - least_upper_bound = %least_upper_bound.display(db), - "typevar bounds are incompatible", - ); - error_occurred = true; - return None; - } + // Before constructing the final lower and upper bound, sort the constraints by + // their source order. This should give us a consistently ordered specialization, + // regardless of the variable ordering of the original BDD. + representatives.sort_unstable_by_key(|bounds| bounds.source_order); + let greatest_lower_bound = + UnionType::from_elements(db, representatives.iter().map(|bounds| bounds.lower)); + let least_upper_bound = IntersectionType::from_elements( + db, + representatives.iter().map(|bounds| bounds.upper), + ); - // Of all of the types that satisfy all of the paths in the BDD, we choose the - // "largest" one (i.e., "closest to `object`") as the specialization. + // If `lower ≰ upper`, then there is no type that satisfies all of the paths in the + // BDD. That's an ambiguous specialization, as described above. + if !greatest_lower_bound.is_constraint_set_assignable_to(db, least_upper_bound) { tracing::trace!( target: "ty_python_semantic::types::constraints::specialize_constrained", bound_typevar = %identity.display(db), - specialization = %least_upper_bound.display(db), - "found specialization for typevar", + greatest_lower_bound = %greatest_lower_bound.display(db), + least_upper_bound = %least_upper_bound.display(db), + "typevar bounds are incompatible", ); - Some(least_upper_bound) - }); + error_occurred = true; + return None; + } + + // Of all of the types that satisfy all of the paths in the BDD, we choose the + // "largest" one (i.e., "closest to `object`") as the specialization. + tracing::trace!( + target: "ty_python_semantic::types::constraints::specialize_constrained", + bound_typevar = %identity.display(db), + specialization = %least_upper_bound.display(db), + "found specialization for typevar", + ); + Some(least_upper_bound) + }); let specialization = self.specialize_recursive(db, types); if error_occurred { @@ -4486,9 +5125,9 @@ mod tests { let u_bool = ConstraintSet::constrain_typevar(&db, &constraints, u, bool_type, bool_type); // Construct this in a different order than above to make the source_orders more // interesting. - let constraints = (u_str.or(&db, &constraints, || u_bool)) + let set = (u_str.or(&db, &constraints, || u_bool)) .and(&db, &constraints, || t_str.or(&db, &constraints, || t_bool)); - let actual = constraints.node.display_graph(&db, &"").to_string(); + let actual = set.node.display_graph(&db, &constraints, &"").to_string(); assert_eq!(actual, expected); } } diff --git a/crates/ty_python_semantic/src/types/generics.rs b/crates/ty_python_semantic/src/types/generics.rs index 4bde75f353e4ec..536a04ed3d2480 100644 --- a/crates/ty_python_semantic/src/types/generics.rs +++ b/crates/ty_python_semantic/src/types/generics.rs @@ -1826,15 +1826,16 @@ impl<'db> SpecializationBuilder<'db> { fn add_type_mappings_from_constraint_set<'c>( &mut self, formal: Type<'db>, - constraints: ConstraintSet<'db, 'c>, + set: ConstraintSet<'db, 'c>, + constraints: &'c ConstraintSetBuilder<'db>, mut f: impl FnMut(TypeVarAssignment<'db>) -> Option>, ) -> Result<(), ()> { - let solutions = match constraints.solutions(self.db) { + let solutions = match set.solutions(self.db, constraints) { Solutions::Unsatisfiable => return Err(()), Solutions::Unconstrained => return Ok(()), Solutions::Constrained(solutions) => solutions, }; - for solution in solutions { + for solution in solutions.iter() { for binding in solution { let variance = formal.variance_of(self.db, binding.bound_typevar); self.add_type_mapping(binding.bound_typevar, binding.solution, variance, &mut f); @@ -1864,7 +1865,7 @@ impl<'db> SpecializationBuilder<'db> { &constraints, self.inferable, ); - self.add_type_mappings_from_constraint_set(formal, when, &mut *f)?; + self.add_type_mappings_from_constraint_set(formal, when, &constraints, &mut *f)?; } else { // An overloaded actual callable is compatible with the formal signature if at // least one of its overloads is. We collect type mappings from all satisfiable @@ -1878,7 +1879,7 @@ impl<'db> SpecializationBuilder<'db> { self.inferable, ); if self - .add_type_mappings_from_constraint_set(formal, when, &mut *f) + .add_type_mappings_from_constraint_set(formal, when, &constraints, &mut *f) .is_ok() { any_satisfiable = true; @@ -2316,7 +2317,12 @@ impl<'db> SpecializationBuilder<'db> { // unsatisfied comparisons simply produced no type mappings), and avoids // false positives for callable-wrapper patterns while this path is still // a hybrid of old and new solver logic. - let _ = self.add_type_mappings_from_constraint_set(formal, when, &mut f); + let _ = self.add_type_mappings_from_constraint_set( + formal, + when, + constraints, + &mut f, + ); return Ok(()); } From 5742c681425da8197335871e2fdccbfca14219a4 Mon Sep 17 00:00:00 2001 From: Douglas Creager Date: Tue, 24 Feb 2026 17:13:34 -0500 Subject: [PATCH 2/9] intern typevars too!!! --- crates/ty_python_semantic/src/types.rs | 2 +- .../ty_python_semantic/src/types/call/bind.rs | 10 +- .../src/types/constraints.rs | 169 ++++++++++++++---- .../src/types/infer/builder.rs | 14 +- 4 files changed, 151 insertions(+), 44 deletions(-) diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index 166e2a7bc2dda0..56c33dc102010b 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -3701,7 +3701,7 @@ impl<'db> Type<'db> { Type::KnownInstance(KnownInstanceType::ConstraintSet(tracked_set)) => { let constraints = ConstraintSetBuilder::new(); - let tracked_set = constraints.load(tracked_set.constraints(db)); + let tracked_set = constraints.load(db, tracked_set.constraints(db)); Truthiness::from(tracked_set.is_always_satisfied(db)) } diff --git a/crates/ty_python_semantic/src/types/call/bind.rs b/crates/ty_python_semantic/src/types/call/bind.rs index 3ecd08bf650c44..f9394347733622 100644 --- a/crates/ty_python_semantic/src/types/call/bind.rs +++ b/crates/ty_python_semantic/src/types/call/bind.rs @@ -1807,7 +1807,7 @@ impl<'db> Bindings<'db> { ty_a.when_subtype_of_assuming( db, *ty_b, - constraints.load(tracked.constraints(db)), + constraints.load(db, tracked.constraints(db)), constraints, InferableTypeVars::None, ) @@ -1831,8 +1831,8 @@ impl<'db> Bindings<'db> { let constraints = ConstraintSetBuilder::new(); let result = constraints.into_owned(|constraints| { - let lhs = constraints.load(tracked.constraints(db)); - let rhs = constraints.load(other.constraints(db)); + let lhs = constraints.load(db, tracked.constraints(db)); + let rhs = constraints.load(db, other.constraints(db)); lhs.implies(db, constraints, || rhs) }); let tracked = InternedConstraintSet::new(db, result); @@ -1872,7 +1872,7 @@ impl<'db> Bindings<'db> { }; let constraints = ConstraintSetBuilder::new(); - let set = constraints.load(tracked.constraints(db)); + let set = constraints.load(db, tracked.constraints(db)); let result = set.satisfied_by_all_typevars( db, &constraints, @@ -1891,7 +1891,7 @@ impl<'db> Bindings<'db> { continue; }; let constraints = ConstraintSetBuilder::new(); - let set = constraints.load(set.constraints(db)); + let set = constraints.load(db, set.constraints(db)); let specialization = generic_context.specialize_constrained(db, &constraints, set); let result = match specialization { diff --git a/crates/ty_python_semantic/src/types/constraints.rs b/crates/ty_python_semantic/src/types/constraints.rs index 2ec243e34a9dbc..cef91dfe368bb8 100644 --- a/crates/ty_python_semantic/src/types/constraints.rs +++ b/crates/ty_python_semantic/src/types/constraints.rs @@ -603,9 +603,11 @@ pub(crate) struct ConstraintSetBuilder<'db> { #[derive(Clone, Debug, Default, Eq, PartialEq, get_size2::GetSize)] struct ConstraintSetStorage<'db> { + typevars: IndexVec>, constraints: IndexVec>, nodes: IndexVec, + typevar_cache: FxHashMap, TypeVarId>, constraint_cache: FxHashMap, ConstraintId>, node_cache: FxHashMap, @@ -642,10 +644,14 @@ impl<'db> ConstraintSetBuilder<'db> { } } - pub(crate) fn load<'c>(&'c self, other: &OwnedConstraintSet<'db>) -> ConstraintSet<'db, 'c> { + pub(crate) fn load<'c>( + &'c self, + db: &'db dyn Db, + other: &OwnedConstraintSet<'db>, + ) -> ConstraintSet<'db, 'c> { let mut constraints = IndexVec::with_capacity(other.constraints.len()); for constraint in other.constraints.iter().copied() { - constraints.push(self.intern_constraint(constraint)); + constraints.push(self.intern_constraint(db, constraint)); } let mut nodes = IndexVec::with_capacity(other.nodes.len()); @@ -666,7 +672,70 @@ impl<'db> ConstraintSetBuilder<'db> { ConstraintSet::from_node(self, remap(other.node, &nodes)) } - fn intern_constraint(&self, data: Constraint<'db>) -> ConstraintId { + fn intern_typevar(&self, db: &'db dyn Db, typevar: BoundTypeVarInstance<'db>) -> TypeVarId { + let identity = typevar.identity(db); + let mut storage = self.storage.borrow_mut(); + if let Some(id) = storage.typevar_cache.get(&identity) { + return *id; + } + let id = storage.typevars.push(identity); + storage.typevar_cache.insert(identity, id); + id + } + + fn intern_mentioned_typevars_in_type(&self, db: &'db dyn Db, ty: Type<'db>) { + struct InternMentionedTypevars<'a, 'db> { + builder: &'a ConstraintSetBuilder<'db>, + recursion_guard: TypeCollector<'db>, + } + + impl<'db> TypeVisitor<'db> for InternMentionedTypevars<'_, 'db> { + fn should_visit_lazy_type_attributes(&self) -> bool { + false + } + + fn visit_bound_type_var_type( + &self, + db: &'db dyn Db, + bound_typevar: BoundTypeVarInstance<'db>, + ) { + self.builder.intern_typevar(db, bound_typevar); + walk_bound_type_var_type(db, bound_typevar, self); + } + + fn visit_generic_alias_type(&self, db: &'db dyn Db, alias: GenericAlias<'db>) { + for ty in alias.specialization(db).types(db) { + self.visit_type(db, *ty); + } + } + + fn visit_type(&self, db: &'db dyn Db, ty: Type<'db>) { + walk_type_with_recursion_guard(db, ty, self, &self.recursion_guard); + } + } + + InternMentionedTypevars { + builder: self, + recursion_guard: TypeCollector::default(), + } + .visit_type(db, ty); + } + + fn intern_constraint_typevars( + &self, + db: &'db dyn Db, + typevar: BoundTypeVarInstance<'db>, + lower: Type<'db>, + upper: Type<'db>, + ) { + self.intern_typevar(db, typevar); + self.intern_mentioned_typevars_in_type(db, lower); + self.intern_mentioned_typevars_in_type(db, upper); + } + + fn intern_constraint(&self, db: &'db dyn Db, data: Constraint<'db>) -> ConstraintId { + self.intern_constraint_typevars(db, data.typevar, data.lower, data.upper); + let mut storage = self.storage.borrow_mut(); if let Some(id) = storage.constraint_cache.get(&data) { return *id; @@ -686,6 +755,16 @@ impl<'db> ConstraintSetBuilder<'db> { id } + fn typevar_id(&self, db: &'db dyn Db, typevar: BoundTypeVarInstance<'db>) -> TypeVarId { + let identity = typevar.identity(db); + self.storage + .borrow() + .typevar_cache + .get(&identity) + .copied() + .expect("typevar should be interned before ordering") + } + fn constraint_data(&self, constraint: ConstraintId) -> Constraint<'db> { self.storage.borrow().constraints[constraint] } @@ -705,8 +784,13 @@ impl<'db> BoundTypeVarInstance<'db> { /// any cycles. This particular ordering plays nicely with how we are ordering constraints /// within a BDD — it means that if a typevar has another typevar as a bound, all of the /// constraints that apply to the bound will appear lower in the BDD. - fn can_be_bound_for(self, db: &'db dyn Db, typevar: Self) -> bool { - self.identity(db) > typevar.identity(db) + fn can_be_bound_for( + self, + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + typevar: Self, + ) -> bool { + builder.typevar_id(db, self) > builder.typevar_id(db, typevar) } } @@ -723,6 +807,11 @@ impl IntersectionResult<'_> { } } +/// The index of a bound typevar within a [`ConstraintSetStorage`]. +#[newtype_index] +#[derive(Ord, PartialOrd, salsa::Update, get_size2::GetSize)] +pub struct TypeVarId; + /// The index of an individual constraint (i.e. a BDD variable) within a [`ConstraintSetStorage`]. #[newtype_index] #[derive(Ord, PartialOrd, salsa::Update, get_size2::GetSize)] @@ -740,16 +829,20 @@ pub(crate) struct Constraint<'db> { impl<'db> Constraint<'db> { #[expect(clippy::new_ret_no_self)] fn new( + db: &'db dyn Db, builder: &ConstraintSetBuilder<'db>, typevar: BoundTypeVarInstance<'db>, lower: Type<'db>, upper: Type<'db>, ) -> ConstraintId { - builder.intern_constraint(Constraint { - typevar, - lower, - upper, - }) + builder.intern_constraint( + db, + Constraint { + typevar, + lower, + upper, + }, + ) } /// Returns a new range constraint. @@ -828,7 +921,7 @@ impl<'db> Constraint<'db> { { return Node::new_constraint( builder, - Constraint::new(builder, typevar, Type::Never, Type::object()), + Constraint::new(db, builder, typevar, Type::Never, Type::object()), 1, ) .negate(builder); @@ -859,6 +952,8 @@ impl<'db> Constraint<'db> { return ALWAYS_FALSE; } + builder.intern_constraint_typevars(db, typevar, lower, upper); + // We have an (arbitrary) ordering for typevars. If the upper and/or lower bounds are // typevars, we have to ensure that the bounds are "later" according to that order than the // typevar being constrained. @@ -868,40 +963,47 @@ impl<'db> Constraint<'db> { match (lower, upper) { // L ≤ T ≤ L == (T ≤ [L] ≤ T) (Type::TypeVar(lower), Type::TypeVar(upper)) if lower.is_same_typevar_as(db, upper) => { - let (bound, typevar) = if lower.can_be_bound_for(db, typevar) { + let (bound, typevar) = if lower.can_be_bound_for(db, builder, typevar) { (lower, typevar) } else { (typevar, lower) }; Node::new_constraint( builder, - Constraint::new(builder, typevar, Type::TypeVar(bound), Type::TypeVar(bound)), + Constraint::new( + db, + builder, + typevar, + Type::TypeVar(bound), + Type::TypeVar(bound), + ), 1, ) } // L ≤ T ≤ U == ([L] ≤ T) && (T ≤ [U]) (Type::TypeVar(lower), Type::TypeVar(upper)) - if typevar.can_be_bound_for(db, lower) && typevar.can_be_bound_for(db, upper) => + if typevar.can_be_bound_for(db, builder, lower) + && typevar.can_be_bound_for(db, builder, upper) => { let lower = Node::new_constraint( builder, - Constraint::new(builder, lower, Type::Never, Type::TypeVar(typevar)), + Constraint::new(db, builder, lower, Type::Never, Type::TypeVar(typevar)), 1, ); let upper = Node::new_constraint( builder, - Constraint::new(builder, upper, Type::TypeVar(typevar), Type::object()), + Constraint::new(db, builder, upper, Type::TypeVar(typevar), Type::object()), 1, ); lower.and(builder, upper) } // L ≤ T ≤ U == ([L] ≤ T) && ([T] ≤ U) - (Type::TypeVar(lower), _) if typevar.can_be_bound_for(db, lower) => { + (Type::TypeVar(lower), _) if typevar.can_be_bound_for(db, builder, lower) => { let lower = Node::new_constraint( builder, - Constraint::new(builder, lower, Type::Never, Type::TypeVar(typevar)), + Constraint::new(db, builder, lower, Type::Never, Type::TypeVar(typevar)), 1, ); let upper = if upper.is_object() { @@ -913,7 +1015,7 @@ impl<'db> Constraint<'db> { } // L ≤ T ≤ U == (L ≤ [T]) && (T ≤ [U]) - (_, Type::TypeVar(upper)) if typevar.can_be_bound_for(db, upper) => { + (_, Type::TypeVar(upper)) if typevar.can_be_bound_for(db, builder, upper) => { let lower = if lower.is_never() { ALWAYS_TRUE } else { @@ -921,13 +1023,17 @@ impl<'db> Constraint<'db> { }; let upper = Node::new_constraint( builder, - Constraint::new(builder, upper, Type::TypeVar(typevar), Type::object()), + Constraint::new(db, builder, upper, Type::TypeVar(typevar), Type::object()), 1, ); lower.and(builder, upper) } - _ => Node::new_constraint(builder, Constraint::new(builder, typevar, lower, upper), 1), + _ => Node::new_constraint( + builder, + Constraint::new(db, builder, typevar, lower, upper), + 1, + ), } } } @@ -3217,7 +3323,7 @@ impl InteriorNode { // have to figure out which of the two typevars is constrained, and which one is // the upper/lower bound. let (bound_constraint, constrained_constraint) = - if left_typevar.can_be_bound_for(db, right_typevar) { + if left_typevar.can_be_bound_for(db, builder, right_typevar) { (left_constraint, right_constraint) } else { (right_constraint, left_constraint) @@ -3261,7 +3367,7 @@ impl InteriorNode { }; let new_constraint = - Constraint::new(builder, constrained_typevar, new_lower, new_upper); + Constraint::new(db, builder, constrained_typevar, new_lower, new_upper); if seen_constraints.contains(&new_constraint) { continue; } @@ -3386,7 +3492,7 @@ impl InteriorNode { match left_constraint.intersect(db, builder, right_constraint) { IntersectionResult::Simplified(intersection_constraint_data) => { let intersection_constraint = - builder.intern_constraint(intersection_constraint_data); + builder.intern_constraint(db, intersection_constraint_data); // If the intersection is non-empty, we need to create a new constraint to // represent that intersection. We also need to add the new constraint to our @@ -3976,7 +4082,7 @@ impl SequentMap { // Case 1 (Type::TypeVar(lower_typevar), Type::TypeVar(upper_typevar)) => { if !lower_typevar.is_same_typevar_as(db, upper_typevar) { - Constraint::new(builder, lower_typevar, Type::Never, upper) + Constraint::new(db, builder, lower_typevar, Type::Never, upper) } else { return; } @@ -3984,12 +4090,12 @@ impl SequentMap { // Case 2 (Type::TypeVar(lower_typevar), _) => { - Constraint::new(builder, lower_typevar, Type::Never, upper) + Constraint::new(db, builder, lower_typevar, Type::Never, upper) } // Case 3 (_, Type::TypeVar(upper_typevar)) => { - Constraint::new(builder, upper_typevar, lower, Type::object()) + Constraint::new(db, builder, upper_typevar, lower, Type::object()) } _ => return, @@ -4070,7 +4176,7 @@ impl SequentMap { let right_constraint_data = builder.constraint_data(right_constraint); let right_typevar = right_constraint_data.typevar; let (bound_constraint, constrained_constraint) = - if left_typevar.can_be_bound_for(db, right_typevar) { + if left_typevar.can_be_bound_for(db, builder, right_typevar) { (left_constraint, right_constraint) } else { (right_constraint, left_constraint) @@ -4142,7 +4248,8 @@ impl SequentMap { _ => return, }; - let post_constraint = Constraint::new(builder, constrained_typevar, new_lower, new_upper); + let post_constraint = + Constraint::new(db, builder, constrained_typevar, new_lower, new_upper); self.add_pair_implication( db, builder, @@ -4184,7 +4291,7 @@ impl SequentMap { } else { right_upper }; - Constraint::new(builder, bound_typevar, right_lower, right_upper) + Constraint::new(db, builder, bound_typevar, right_lower, right_upper) }; let post_constraint = match (left_lower, left_upper) { (Type::TypeVar(bound_typevar), Type::TypeVar(other_bound_typevar)) @@ -4247,7 +4354,7 @@ impl SequentMap { match left_constraint.intersect(db, builder, right_constraint) { IntersectionResult::Simplified(intersection_constraint_data) => { let intersection_constraint = - builder.intern_constraint(intersection_constraint_data); + builder.intern_constraint(db, intersection_constraint_data); tracing::trace!( target: "ty_python_semantic::types::constraints::SequentMap", left = %left_constraint.display(db, builder), diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index a9ce47c3049cfa..e32d1a9f06de06 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -13984,7 +13984,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { (ast::UnaryOp::Invert, Type::KnownInstance(KnownInstanceType::ConstraintSet(set))) => { let constraints = ConstraintSetBuilder::new(); let result = constraints.into_owned(|constraints| { - let set = constraints.load(set.constraints(self.db())); + let set = constraints.load(self.db(), set.constraints(self.db())); set.negate(self.db(), constraints) }); Type::KnownInstance(KnownInstanceType::ConstraintSet( @@ -14743,8 +14743,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { ) => { let constraints = ConstraintSetBuilder::new(); let result = constraints.into_owned(|constraints| { - let left = constraints.load(left.constraints(self.db())); - let right = constraints.load(right.constraints(self.db())); + let left = constraints.load(self.db(), left.constraints(self.db())); + let right = constraints.load(self.db(), right.constraints(self.db())); left.and(self.db(), constraints, || right) }); Some(Type::KnownInstance(KnownInstanceType::ConstraintSet( @@ -14759,8 +14759,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { ) => { let constraints = ConstraintSetBuilder::new(); let result = constraints.into_owned(|constraints| { - let left = constraints.load(left.constraints(self.db())); - let right = constraints.load(right.constraints(self.db())); + let left = constraints.load(self.db(), left.constraints(self.db())); + let right = constraints.load(self.db(), right.constraints(self.db())); left.or(self.db(), constraints, || right) }); Some(Type::KnownInstance(KnownInstanceType::ConstraintSet( @@ -15614,8 +15614,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { Type::KnownInstance(KnownInstanceType::ConstraintSet(right)), ) => { let constraints = ConstraintSetBuilder::new(); - let left = constraints.load(left.constraints(self.db())); - let right = constraints.load(right.constraints(self.db())); + let left = constraints.load(self.db(), left.constraints(self.db())); + let right = constraints.load(self.db(), right.constraints(self.db())); let result = left.iff(self.db(), &constraints, right); let equivalent = result.is_always_satisfied(self.db()); match op { From 3d4f62ce4f31033b2313009477219157064d298e Mon Sep 17 00:00:00 2001 From: Douglas Creager Date: Tue, 24 Feb 2026 18:58:20 -0500 Subject: [PATCH 3/9] brittle on ordering --- .../src/types/constraints.rs | 52 ++++++++++++------- 1 file changed, 33 insertions(+), 19 deletions(-) diff --git a/crates/ty_python_semantic/src/types/constraints.rs b/crates/ty_python_semantic/src/types/constraints.rs index cef91dfe368bb8..4044354a35c4ad 100644 --- a/crates/ty_python_semantic/src/types/constraints.rs +++ b/crates/ty_python_semantic/src/types/constraints.rs @@ -649,27 +649,41 @@ impl<'db> ConstraintSetBuilder<'db> { db: &'db dyn Db, other: &OwnedConstraintSet<'db>, ) -> ConstraintSet<'db, 'c> { - let mut constraints = IndexVec::with_capacity(other.constraints.len()); - for constraint in other.constraints.iter().copied() { - constraints.push(self.intern_constraint(db, constraint)); - } + fn rebuild_node<'db>( + db: &'db dyn Db, + builder: &ConstraintSetBuilder<'db>, + other: &OwnedConstraintSet<'db>, + cache: &mut FxHashMap, + old_node: NodeId, + ) -> NodeId { + if old_node.is_terminal() { + return old_node; + } + if let Some(remapped) = cache.get(&old_node) { + return *remapped; + } - let mut nodes = IndexVec::with_capacity(other.nodes.len()); - let remap = |old: NodeId, nodes: &IndexVec| { - if old.is_terminal() { old } else { nodes[old] } - }; - for node in other.nodes.iter().copied() { - let remapped = InteriorNodeData { - constraint: constraints[node.constraint], - if_true: remap(node.if_true, &nodes), - if_false: remap(node.if_false, &nodes), - source_order: node.source_order, - max_source_order: node.max_source_order, - }; - nodes.push(self.intern_interior_node(remapped)); + let old_interior = other.nodes[old_node]; + let old_constraint = other.constraints[old_interior.constraint]; + let condition = Constraint::new_node( + db, + builder, + old_constraint.typevar, + old_constraint.lower, + old_constraint.upper, + ); + + let if_true = rebuild_node(db, builder, other, cache, old_interior.if_true); + let if_false = rebuild_node(db, builder, other, cache, old_interior.if_false); + let remapped = condition.ite(builder, if_true, if_false); + + cache.insert(old_node, remapped); + remapped } - ConstraintSet::from_node(self, remap(other.node, &nodes)) + let mut cache = FxHashMap::default(); + let node = rebuild_node(db, self, other, &mut cache, other.node); + ConstraintSet::from_node(self, node) } fn intern_typevar(&self, db: &'db dyn Db, typevar: BoundTypeVarInstance<'db>) -> TypeVarId { @@ -790,7 +804,7 @@ impl<'db> BoundTypeVarInstance<'db> { builder: &ConstraintSetBuilder<'db>, typevar: Self, ) -> bool { - builder.typevar_id(db, self) > builder.typevar_id(db, typevar) + builder.typevar_id(db, self) < builder.typevar_id(db, typevar) } } From dbb1981fd6471c59ac8b25ef69ac3705c0a2ae8b Mon Sep 17 00:00:00 2001 From: Douglas Creager Date: Wed, 25 Feb 2026 10:23:38 -0500 Subject: [PATCH 4/9] fix?!? typevar ordering brittleness --- .../src/types/constraints.rs | 201 ++++++++++++++---- 1 file changed, 161 insertions(+), 40 deletions(-) diff --git a/crates/ty_python_semantic/src/types/constraints.rs b/crates/ty_python_semantic/src/types/constraints.rs index 4044354a35c4ad..4e706e017d9070 100644 --- a/crates/ty_python_semantic/src/types/constraints.rs +++ b/crates/ty_python_semantic/src/types/constraints.rs @@ -4095,10 +4095,28 @@ impl SequentMap { let post_constraint = match (lower, upper) { // Case 1 (Type::TypeVar(lower_typevar), Type::TypeVar(upper_typevar)) => { - if !lower_typevar.is_same_typevar_as(db, upper_typevar) { + if lower_typevar.is_same_typevar_as(db, upper_typevar) { + return; + } + + // We always want to propagate `lower ≤ upper`, but we must do so using a + // canonical top-level typevar ordering. + // + // Example: if we learn `(A ≤ [T] ≤ B)`, this single-constraint propagation step + // should infer `A ≤ B`. Depending on ordering, we might need to encode that as + // either `(Never ≤ [A] ≤ B)` or `(A ≤ [B] ≤ object)`. Both render as `A ≤ B`, + // but they constrain different typevars and must be created in the orientation + // allowed by `can_be_bound_for`. + if upper_typevar.can_be_bound_for(db, builder, lower_typevar) { Constraint::new(db, builder, lower_typevar, Type::Never, upper) } else { - return; + Constraint::new( + db, + builder, + upper_typevar, + Type::TypeVar(lower_typevar), + Type::object(), + ) } } @@ -4262,15 +4280,70 @@ impl SequentMap { _ => return, }; - let post_constraint = - Constraint::new(db, builder, constrained_typevar, new_lower, new_upper); - self.add_pair_implication( - db, - builder, - left_constraint, - right_constraint, - post_constraint, - ); + let mut post_constraints: SmallVec<[ConstraintId; 3]> = SmallVec::new(); + let mut constrained_lower = new_lower; + let mut constrained_upper = new_upper; + + // The transitive rule above gives us an intended post-condition + // `new_lower ≤ [constrained] ≤ new_upper`. + // + // If a top-level bound typevar is "earlier" than `constrained`, we cannot represent that + // directly as a bound on `constrained` without violating our canonical ordering. + // Instead, split it into equivalent canonical constraints by "moving" that bound onto the + // other typevar: + // + // invalid lower `L ≤ [C]` -> `(Never ≤ [L] ≤ C)` and drop `L` from C's lower bound + // invalid upper `[C] ≤ U` -> `(C ≤ [U] ≤ object)` and drop `U` from C's upper bound + // + // Example: if we derive `[A] ≤ T ≤ [B]` but `A`/`B` are not valid top-level bounds for + // `T` in this ordering, we emit two pair implications: + // `(Never ≤ [A] ≤ T)` and `(T ≤ [B] ≤ object)`. + // This preserves the relationship while keeping all derived constraints canonical. + if let Type::TypeVar(lower_bound_typevar) = new_lower + && !lower_bound_typevar.can_be_bound_for(db, builder, constrained_typevar) + { + post_constraints.push(Constraint::new( + db, + builder, + lower_bound_typevar, + Type::Never, + Type::TypeVar(constrained_typevar), + )); + constrained_lower = Type::Never; + } + + if let Type::TypeVar(upper_bound_typevar) = new_upper + && !upper_bound_typevar.can_be_bound_for(db, builder, constrained_typevar) + { + post_constraints.push(Constraint::new( + db, + builder, + upper_bound_typevar, + Type::TypeVar(constrained_typevar), + Type::object(), + )); + constrained_upper = Type::object(); + } + + if !(constrained_lower.is_never() && constrained_upper.is_object()) { + post_constraints.push(Constraint::new( + db, + builder, + constrained_typevar, + constrained_lower, + constrained_upper, + )); + } + + for post_constraint in post_constraints { + self.add_pair_implication( + db, + builder, + left_constraint, + right_constraint, + post_constraint, + ); + } } fn add_mutual_sequents_for_same_typevars<'db>( @@ -4288,46 +4361,94 @@ impl SequentMap { let right_constraint_data = builder.constraint_data(right_constraint); let right_lower = right_constraint_data.lower; let right_upper = right_constraint_data.upper; - let new_constraint = |bound_typevar: BoundTypeVarInstance<'db>, - right_lower: Type<'db>, - right_upper: Type<'db>| { - let right_lower = if let Type::TypeVar(other_bound_typevar) = right_lower - && bound_typevar.is_same_typevar_as(db, other_bound_typevar) - { - Type::Never - } else { - right_lower - }; - let right_upper = if let Type::TypeVar(other_bound_typevar) = right_upper - && bound_typevar.is_same_typevar_as(db, other_bound_typevar) - { - Type::object() - } else { - right_upper + let new_constraints = + |bound_typevar: BoundTypeVarInstance<'db>, + right_lower: Type<'db>, + right_upper: Type<'db>| { + let right_lower = if let Type::TypeVar(other_bound_typevar) = right_lower + && bound_typevar.is_same_typevar_as(db, other_bound_typevar) + { + Type::Never + } else { + right_lower + }; + let right_upper = if let Type::TypeVar(other_bound_typevar) = right_upper + && bound_typevar.is_same_typevar_as(db, other_bound_typevar) + { + Type::object() + } else { + right_upper + }; + + // Same idea as `add_mutual_sequents_for_different_typevars`: if a derived + // post-condition for `[bound]` has top-level typevar bounds in the wrong + // orientation, split it into equivalent canonical constraints instead of + // dropping it. + let mut post_constraints: SmallVec<[ConstraintId; 3]> = SmallVec::new(); + let mut constrained_lower = right_lower; + let mut constrained_upper = right_upper; + + if let Type::TypeVar(lower_bound_typevar) = right_lower + && !lower_bound_typevar.can_be_bound_for(db, builder, bound_typevar) + { + post_constraints.push(Constraint::new( + db, + builder, + lower_bound_typevar, + Type::Never, + Type::TypeVar(bound_typevar), + )); + constrained_lower = Type::Never; + } + + if let Type::TypeVar(upper_bound_typevar) = right_upper + && !upper_bound_typevar.can_be_bound_for(db, builder, bound_typevar) + { + post_constraints.push(Constraint::new( + db, + builder, + upper_bound_typevar, + Type::TypeVar(bound_typevar), + Type::object(), + )); + constrained_upper = Type::object(); + } + + if !(constrained_lower.is_never() && constrained_upper.is_object()) { + post_constraints.push(Constraint::new( + db, + builder, + bound_typevar, + constrained_lower, + constrained_upper, + )); + } + + post_constraints }; - Constraint::new(db, builder, bound_typevar, right_lower, right_upper) - }; - let post_constraint = match (left_lower, left_upper) { + let post_constraints = match (left_lower, left_upper) { (Type::TypeVar(bound_typevar), Type::TypeVar(other_bound_typevar)) if bound_typevar.is_same_typevar_as(db, other_bound_typevar) => { - new_constraint(bound_typevar, right_lower, right_upper) + new_constraints(bound_typevar, right_lower, right_upper) } (Type::TypeVar(bound_typevar), _) => { - new_constraint(bound_typevar, Type::Never, right_upper) + new_constraints(bound_typevar, Type::Never, right_upper) } (_, Type::TypeVar(bound_typevar)) => { - new_constraint(bound_typevar, right_lower, Type::object()) + new_constraints(bound_typevar, right_lower, Type::object()) } _ => return, }; - self.add_pair_implication( - db, - builder, - left_constraint, - right_constraint, - post_constraint, - ); + for post_constraint in post_constraints { + self.add_pair_implication( + db, + builder, + left_constraint, + right_constraint, + post_constraint, + ); + } }; try_one_direction(left_constraint, right_constraint); From f6b9a683cdf34ecb15e02dbb6068fe13eac428e0 Mon Sep 17 00:00:00 2001 From: Douglas Creager Date: Fri, 27 Feb 2026 13:27:39 -0500 Subject: [PATCH 5/9] add some docs --- .../src/types/constraints.rs | 94 ++++++++++++++++--- 1 file changed, 82 insertions(+), 12 deletions(-) diff --git a/crates/ty_python_semantic/src/types/constraints.rs b/crates/ty_python_semantic/src/types/constraints.rs index 4e706e017d9070..426d1d44c1e97b 100644 --- a/crates/ty_python_semantic/src/types/constraints.rs +++ b/crates/ty_python_semantic/src/types/constraints.rs @@ -87,7 +87,7 @@ use crate::types::{ BoundTypeVarIdentity, BoundTypeVarInstance, IntersectionType, Type, TypeVarBoundOrConstraints, UnionType, walk_bound_type_var_type, }; -use crate::{Db, FxIndexMap, FxIndexSet, FxOrderSet}; +use crate::{Db, FxIndexMap, FxIndexSet}; /// An extension trait for building constraint sets from [`Option`] values. pub(crate) trait OptionConstraintsExtension { @@ -204,11 +204,27 @@ where } } +/// An owned copy of a [`ConstraintSet`]. Unlike [`ConstraintSet`], this type owns the storage +/// arenas that hold its BDD. +/// +/// This type is never created as part of the core type inference algorithms; it is only used by +/// the [`InternedConstraintSet`][crate::types::InternedConstraintSet] type, which is the wrapper +/// type that lets us create and operate on constraint sets in our mdtests. That means we don't +/// have to be overly worried about the efficiency of this type. +/// +/// Note that you cannot interrogate an owned constraint set in any useful way. Instead, you must +/// [`load`][ConstraintSetBuilder::load] it into a new builder, and query the result. #[derive(Clone, Debug, Eq, Hash, PartialEq, get_size2::GetSize, salsa::Update)] pub struct OwnedConstraintSet<'db> { /// The BDD representing this constraint set node: NodeId, + + /// The constraints arena for this BDD. This is extracted from the [`ConstraintSetBuilder`] + /// when an owned constraint set is constructed. constraints: IndexVec>, + + /// The nodes arena for this BDD. This is extracted from the [`ConstraintSetBuilder`] when an + /// owned constraint set is constructed. nodes: IndexVec, } @@ -226,7 +242,11 @@ pub struct OwnedConstraintSet<'db> { pub struct ConstraintSet<'db, 'c> { /// The BDD representing this constraint set node: NodeId, + + /// A reference to the builder that holds the storage for this constraint set's BDD builder: &'c ConstraintSetBuilder<'db>, + + /// Ensures that the `'c` lifetime is invariant _invariant: PhantomData &'c ()>, } @@ -269,6 +289,7 @@ impl<'db, 'c> ConstraintSet<'db, 'c> { ) } + /// Verifies that this constraint set was created by `builder` #[track_caller] fn verify_builder(self, builder: &'c ConstraintSetBuilder<'db>) { debug_assert!(std::ptr::eq(self.builder, builder)); @@ -596,6 +617,19 @@ impl Debug for ConstraintSet<'_, '_> { } } +/// Holds the storage for the BDD structure of a related collection of constraint sets. +/// +/// This is usually passed around by shared reference to avoid convoluted APIs that thread mutable +/// references to the builder back and forth. +/// +/// Most core type inference algorithms create one of these, create one or more constraint sets in +/// the builder, interrogate those constraint sets, and then throw the builder away. +/// +/// All of our BDD algorithms rely heavily on interning and memoization, for both correctness and +/// efficiency. These caches are only unique within the context of a particular builder. We do not +/// cache globally across the entire ty process. (The main reason is to avoid any dependencies on +/// the particular order in which files or expressions are visited during type checking. A minor +/// additional benefit is that the builder does not need to be thread-safe or impl [`Sync`].) #[derive(Default)] pub(crate) struct ConstraintSetBuilder<'db> { storage: RefCell>, @@ -603,12 +637,27 @@ pub(crate) struct ConstraintSetBuilder<'db> { #[derive(Clone, Debug, Default, Eq, PartialEq, get_size2::GetSize)] struct ConstraintSetStorage<'db> { - typevars: IndexVec>, + /// Constraints are the variables of our BDD. They are interned to give them a space-efficient + /// identity. Constraints are added to this arena as they are encountered when constructing + /// constraint sets. The ordering within the arena defines the BDD variable ordering in our BDD + /// structures. constraints: IndexVec>, + + /// Typevars are interned so that they have a stable ordering within this builder, which does + /// not depend on their salsa IDs. (The salsa IDs are not stable, since each typevar can be + /// used (possibly indirectly) in expressions in different files, and there are no guarantees + /// about the order or the speed that we process each file.) + /// + /// The ordering of typevars within this arena defines which typevars can be the lower/upper + /// bounds of another (e.g., whether we encode `T ≤ U` as `Never ≤ T ≤ U` or `T ≤ U ≤ object`). + typevars: IndexVec>, + + /// The BDD nodes that appear in any of the constraint sets constructed in this builder. nodes: IndexVec, - typevar_cache: FxHashMap, TypeVarId>, + // Everything below are the memoization tables for the arenas and for our BDD operations. constraint_cache: FxHashMap, ConstraintId>, + typevar_cache: FxHashMap, TypeVarId>, node_cache: FxHashMap, negate_cache: FxHashMap, @@ -630,10 +679,18 @@ impl<'db> ConstraintSetBuilder<'db> { Self::default() } + /// Creates an [`OwnedConstraintSet`], consuming this builder in the process. You provide a + /// callback that constructs a [`ConstraintSet`]. We then package that constraint set up with + /// the storage arenas from this builder. pub(crate) fn into_owned( self, f: impl for<'c> FnOnce(&'c Self) -> ConstraintSet<'db, 'c>, ) -> OwnedConstraintSet<'db> { + // NOTE: We do not store any of the builder's memoization caches in the result. Owned + // constraint sets can only be used by adding them to a new builder. Doing so adds copies + // of the constraints and nodes to the new builder, since they might overlap with + // constraints and nodes that already exist there. That means the memoization caches from + // the original builder aren't relevant to the new builder, and don't need to be retained. let constraint = f(&self); let node = constraint.node; let storage = self.storage.into_inner(); @@ -644,11 +701,20 @@ impl<'db> ConstraintSetBuilder<'db> { } } + /// Loads an [`OwnedConstraintSet`] into this builder. pub(crate) fn load<'c>( &'c self, db: &'db dyn Db, other: &OwnedConstraintSet<'db>, ) -> ConstraintSet<'db, 'c> { + // The BDD structure inside a builder depends on the ordering of constraints and typevars + // in the builder's arenas. (The constraint ordering defines the BDD variable ordering, + // while the typevar ordering defines which typevars can be lower/upper bounds of other + // typevars.) There is no guarantee that the `OwnedConstraintSet` and this builder have + // consistent orderings, so we have to just reload everything, standardizing on _this_ + // builder's orderings. That's not the quickest thing in the world, but that's fine, since + // `OwnedConstraintSet` is only used in mdtests, and not in type inference of user code. + fn rebuild_node<'db>( db: &'db dyn Db, builder: &ConstraintSetBuilder<'db>, @@ -681,11 +747,13 @@ impl<'db> ConstraintSetBuilder<'db> { remapped } + // Maps NodeIds in the OwnedConstraintSet to the corresponding NodeIds in this builder. let mut cache = FxHashMap::default(); let node = rebuild_node(db, self, other, &mut cache, other.node); ConstraintSet::from_node(self, node) } + /// Interns a single typevar, giving it a stable order in this builder fn intern_typevar(&self, db: &'db dyn Db, typevar: BoundTypeVarInstance<'db>) -> TypeVarId { let identity = typevar.identity(db); let mut storage = self.storage.borrow_mut(); @@ -697,6 +765,7 @@ impl<'db> ConstraintSetBuilder<'db> { id } + /// Interns all of the typevars mentioned in a type in a stable order. fn intern_mentioned_typevars_in_type(&self, db: &'db dyn Db, ty: Type<'db>) { struct InternMentionedTypevars<'a, 'db> { builder: &'a ConstraintSetBuilder<'db>, @@ -735,6 +804,7 @@ impl<'db> ConstraintSetBuilder<'db> { .visit_type(db, ty); } + /// Interns all of the typevars mentioned in a constraint in a stable order. fn intern_constraint_typevars( &self, db: &'db dyn Db, @@ -1069,12 +1139,12 @@ impl ConstraintId { /// and working with BDDs. We don't do that, but we have tried to make some simple choices that /// have clear wins. /// - /// In particular, we use the IDs that salsa assigns to each constraint as it is created. This - /// tends to ensure that constraints that are close to each other in the source are also close - /// to each other in the BDD structure. + /// In particular, we use the order that constraints are added to this builder. This gives us + /// an ordering that is stable across runs, and which is not influenced by when and how quickly + /// we analyze the other files in the project. /// /// As an optimization, we also _reverse_ this ordering, so that constraints that appear - /// earlier in the source appear "lower" (closer to the terminal nodes) in the BDD. Since we + /// earlier in the arena appear "lower" (closer to the terminal nodes) in the BDD. Since we /// build up BDDs by combining smaller BDDs (which will have been constructed from expressions /// earlier in the source), this tends to minimize the amount of "node shuffling" that we have /// to do when combining BDDs. @@ -1265,13 +1335,13 @@ impl ConstraintId { } } -/// The index of a BDD node within a [`ConstraintSetStorage`]. +/// The index of a BDD node within a [`ConstraintSetBuilder`]. /// /// The "variables" of a constraint set BDD are individual constraints, represented by an interned /// [`Constraint`]. /// -/// Terminal nodes (`false` and `true`) have hard-coded index values. Interior nodes are stored in -/// a [`ConstraintSetStorage`], and are represented by the index into the storage array. By +/// Terminal nodes (`false` and `true`) have hard-coded IDs. Interior nodes are stored in a +/// [`ConstraintSetBuilder`], and are represented by the index into the storage array. By /// construction, interior nodes can only refer to nodes with smaller indexes (since the nodes that /// outgoing edges point at must already exist). /// @@ -3870,9 +3940,9 @@ struct SequentMap { /// Sequents of the form `C₁ ∧ C₂ → false` pair_impossibilities: FxHashSet<(ConstraintId, ConstraintId)>, /// Sequents of the form `C₁ ∧ C₂ → D` - pair_implications: FxHashMap<(ConstraintId, ConstraintId), FxOrderSet>, + pair_implications: FxIndexMap<(ConstraintId, ConstraintId), FxIndexSet>, /// Sequents of the form `C → D` - single_implications: FxHashMap>, + single_implications: FxIndexMap>, } impl SequentMap { From dddfeba4c5ed09499d47cc5ad5f6e8fe075b246b Mon Sep 17 00:00:00 2001 From: Douglas Creager Date: Fri, 27 Feb 2026 14:27:00 -0500 Subject: [PATCH 6/9] no no_self --- .../src/types/constraints.rs | 84 ++++++++++--------- 1 file changed, 43 insertions(+), 41 deletions(-) diff --git a/crates/ty_python_semantic/src/types/constraints.rs b/crates/ty_python_semantic/src/types/constraints.rs index 426d1d44c1e97b..73efa4ee0981ff 100644 --- a/crates/ty_python_semantic/src/types/constraints.rs +++ b/crates/ty_python_semantic/src/types/constraints.rs @@ -910,9 +910,8 @@ pub(crate) struct Constraint<'db> { pub(crate) upper: Type<'db>, } -impl<'db> Constraint<'db> { - #[expect(clippy::new_ret_no_self)] - fn new( +impl ConstraintId { + fn new<'db>( db: &'db dyn Db, builder: &ConstraintSetBuilder<'db>, typevar: BoundTypeVarInstance<'db>, @@ -928,7 +927,9 @@ impl<'db> Constraint<'db> { }, ) } +} +impl<'db> Constraint<'db> { /// Returns a new range constraint. /// /// Panics if `lower` and `upper` are not both fully static. @@ -1005,7 +1006,7 @@ impl<'db> Constraint<'db> { { return Node::new_constraint( builder, - Constraint::new(db, builder, typevar, Type::Never, Type::object()), + ConstraintId::new(db, builder, typevar, Type::Never, Type::object()), 1, ) .negate(builder); @@ -1054,7 +1055,7 @@ impl<'db> Constraint<'db> { }; Node::new_constraint( builder, - Constraint::new( + ConstraintId::new( db, builder, typevar, @@ -1072,12 +1073,12 @@ impl<'db> Constraint<'db> { { let lower = Node::new_constraint( builder, - Constraint::new(db, builder, lower, Type::Never, Type::TypeVar(typevar)), + ConstraintId::new(db, builder, lower, Type::Never, Type::TypeVar(typevar)), 1, ); let upper = Node::new_constraint( builder, - Constraint::new(db, builder, upper, Type::TypeVar(typevar), Type::object()), + ConstraintId::new(db, builder, upper, Type::TypeVar(typevar), Type::object()), 1, ); lower.and(builder, upper) @@ -1087,7 +1088,7 @@ impl<'db> Constraint<'db> { (Type::TypeVar(lower), _) if typevar.can_be_bound_for(db, builder, lower) => { let lower = Node::new_constraint( builder, - Constraint::new(db, builder, lower, Type::Never, Type::TypeVar(typevar)), + ConstraintId::new(db, builder, lower, Type::Never, Type::TypeVar(typevar)), 1, ); let upper = if upper.is_object() { @@ -1107,7 +1108,7 @@ impl<'db> Constraint<'db> { }; let upper = Node::new_constraint( builder, - Constraint::new(db, builder, upper, Type::TypeVar(typevar), Type::object()), + ConstraintId::new(db, builder, upper, Type::TypeVar(typevar), Type::object()), 1, ); lower.and(builder, upper) @@ -1115,7 +1116,7 @@ impl<'db> Constraint<'db> { _ => Node::new_constraint( builder, - Constraint::new(db, builder, typevar, lower, upper), + ConstraintId::new(db, builder, typevar, lower, upper), 1, ), } @@ -1378,9 +1379,8 @@ enum Node { Interior(InteriorNode), } -impl Node { +impl NodeId { /// Creates a new BDD node, ensuring that it is quasi-reduced. - #[expect(clippy::new_ret_no_self)] fn new( builder: &ConstraintSetBuilder<'_>, constraint: ConstraintId, @@ -1416,7 +1416,9 @@ impl Node { max_source_order, }) } +} +impl Node { /// Creates a new BDD node for an individual constraint. (The BDD will evaluate to `true` when /// the constraint holds, and to `false` when it does not.) fn new_constraint( @@ -1504,7 +1506,7 @@ impl NodeId { Node::AlwaysTrue | Node::AlwaysFalse => self, Node::Interior(_) => { let interior = builder.interior_node_data(self); - Node::new( + NodeId::new( builder, interior.constraint, interior.if_true.with_adjusted_source_order(builder, delta), @@ -1732,7 +1734,7 @@ impl NodeId { (Node::AlwaysTrue, Node::AlwaysTrue) => ALWAYS_TRUE, (Node::AlwaysTrue, Node::Interior(_)) => { let other_interior = builder.interior_node_data(other); - Node::new( + NodeId::new( builder, other_interior.constraint, ALWAYS_TRUE, @@ -1742,7 +1744,7 @@ impl NodeId { } (Node::Interior(_), Node::AlwaysTrue) => { let self_interior = builder.interior_node_data(self); - Node::new( + NodeId::new( builder, self_interior.constraint, ALWAYS_TRUE, @@ -1892,7 +1894,7 @@ impl NodeId { (Node::AlwaysFalse, Node::AlwaysFalse) => ALWAYS_FALSE, (Node::AlwaysFalse, Node::Interior(_)) => { let other_interior = builder.interior_node_data(other); - Node::new( + NodeId::new( builder, other_interior.constraint, ALWAYS_FALSE, @@ -1902,7 +1904,7 @@ impl NodeId { } (Node::Interior(_), Node::AlwaysFalse) => { let self_interior = builder.interior_node_data(self); - Node::new( + NodeId::new( builder, self_interior.constraint, ALWAYS_FALSE, @@ -1954,7 +1956,7 @@ impl NodeId { } (Node::AlwaysTrue | Node::AlwaysFalse, Node::Interior(_)) => { let interior = builder.interior_node_data(other); - Node::new( + NodeId::new( builder, interior.constraint, self.iff_inner(builder, interior.if_true, other_offset), @@ -1964,7 +1966,7 @@ impl NodeId { } (Node::Interior(_), Node::AlwaysTrue | Node::AlwaysFalse) => { let interior = builder.interior_node_data(self); - Node::new( + NodeId::new( builder, interior.constraint, interior.if_true.iff_inner(builder, other, other_offset), @@ -2708,7 +2710,7 @@ impl InteriorNode { drop(storage); let interior = builder.interior_node_data(self.node()); - let result = Node::new( + let result = NodeId::new( builder, interior.constraint, interior.if_true.negate(builder), @@ -2734,7 +2736,7 @@ impl InteriorNode { let other_interior = builder.interior_node_data(other.node()); let other_ordering = other_interior.constraint.ordering(); let result = match self_ordering.cmp(&other_ordering) { - Ordering::Equal => Node::new( + Ordering::Equal => NodeId::new( builder, self_interior.constraint, self_interior @@ -2745,7 +2747,7 @@ impl InteriorNode { .or_inner(builder, other_interior.if_false, other_offset), self_interior.source_order, ), - Ordering::Less => Node::new( + Ordering::Less => NodeId::new( builder, self_interior.constraint, self_interior @@ -2756,7 +2758,7 @@ impl InteriorNode { .or_inner(builder, other.node(), other_offset), self_interior.source_order, ), - Ordering::Greater => Node::new( + Ordering::Greater => NodeId::new( builder, other_interior.constraint, self.node() @@ -2785,7 +2787,7 @@ impl InteriorNode { let other_interior = builder.interior_node_data(other.node()); let other_ordering = other_interior.constraint.ordering(); let result = match self_ordering.cmp(&other_ordering) { - Ordering::Equal => Node::new( + Ordering::Equal => NodeId::new( builder, self_interior.constraint, self_interior @@ -2796,7 +2798,7 @@ impl InteriorNode { .and_inner(builder, other_interior.if_false, other_offset), self_interior.source_order, ), - Ordering::Less => Node::new( + Ordering::Less => NodeId::new( builder, self_interior.constraint, self_interior @@ -2807,7 +2809,7 @@ impl InteriorNode { .and_inner(builder, other.node(), other_offset), self_interior.source_order, ), - Ordering::Greater => Node::new( + Ordering::Greater => NodeId::new( builder, other_interior.constraint, self.node() @@ -2836,7 +2838,7 @@ impl InteriorNode { let other_interior = builder.interior_node_data(other.node()); let other_ordering = other_interior.constraint.ordering(); let result = match self_ordering.cmp(&other_ordering) { - Ordering::Equal => Node::new( + Ordering::Equal => NodeId::new( builder, self_interior.constraint, self_interior @@ -2847,7 +2849,7 @@ impl InteriorNode { .iff_inner(builder, other_interior.if_false, other_offset), self_interior.source_order, ), - Ordering::Less => Node::new( + Ordering::Less => NodeId::new( builder, self_interior.constraint, self_interior @@ -2858,7 +2860,7 @@ impl InteriorNode { .iff_inner(builder, other.node(), other_offset), self_interior.source_order, ), - Ordering::Greater => Node::new( + Ordering::Greater => NodeId::new( builder, other_interior.constraint, self.node() @@ -3123,7 +3125,7 @@ impl InteriorNode { let (if_false, found_in_false) = self_interior.if_false.restrict_one(db, builder, assignment); ( - Node::new( + NodeId::new( builder, self_interior.constraint, if_true, @@ -3451,7 +3453,7 @@ impl InteriorNode { }; let new_constraint = - Constraint::new(db, builder, constrained_typevar, new_lower, new_upper); + ConstraintId::new(db, builder, constrained_typevar, new_lower, new_upper); if seen_constraints.contains(&new_constraint) { continue; } @@ -4178,9 +4180,9 @@ impl SequentMap { // but they constrain different typevars and must be created in the orientation // allowed by `can_be_bound_for`. if upper_typevar.can_be_bound_for(db, builder, lower_typevar) { - Constraint::new(db, builder, lower_typevar, Type::Never, upper) + ConstraintId::new(db, builder, lower_typevar, Type::Never, upper) } else { - Constraint::new( + ConstraintId::new( db, builder, upper_typevar, @@ -4192,12 +4194,12 @@ impl SequentMap { // Case 2 (Type::TypeVar(lower_typevar), _) => { - Constraint::new(db, builder, lower_typevar, Type::Never, upper) + ConstraintId::new(db, builder, lower_typevar, Type::Never, upper) } // Case 3 (_, Type::TypeVar(upper_typevar)) => { - Constraint::new(db, builder, upper_typevar, lower, Type::object()) + ConstraintId::new(db, builder, upper_typevar, lower, Type::object()) } _ => return, @@ -4372,7 +4374,7 @@ impl SequentMap { if let Type::TypeVar(lower_bound_typevar) = new_lower && !lower_bound_typevar.can_be_bound_for(db, builder, constrained_typevar) { - post_constraints.push(Constraint::new( + post_constraints.push(ConstraintId::new( db, builder, lower_bound_typevar, @@ -4385,7 +4387,7 @@ impl SequentMap { if let Type::TypeVar(upper_bound_typevar) = new_upper && !upper_bound_typevar.can_be_bound_for(db, builder, constrained_typevar) { - post_constraints.push(Constraint::new( + post_constraints.push(ConstraintId::new( db, builder, upper_bound_typevar, @@ -4396,7 +4398,7 @@ impl SequentMap { } if !(constrained_lower.is_never() && constrained_upper.is_object()) { - post_constraints.push(Constraint::new( + post_constraints.push(ConstraintId::new( db, builder, constrained_typevar, @@ -4461,7 +4463,7 @@ impl SequentMap { if let Type::TypeVar(lower_bound_typevar) = right_lower && !lower_bound_typevar.can_be_bound_for(db, builder, bound_typevar) { - post_constraints.push(Constraint::new( + post_constraints.push(ConstraintId::new( db, builder, lower_bound_typevar, @@ -4474,7 +4476,7 @@ impl SequentMap { if let Type::TypeVar(upper_bound_typevar) = right_upper && !upper_bound_typevar.can_be_bound_for(db, builder, bound_typevar) { - post_constraints.push(Constraint::new( + post_constraints.push(ConstraintId::new( db, builder, upper_bound_typevar, @@ -4485,7 +4487,7 @@ impl SequentMap { } if !(constrained_lower.is_never() && constrained_upper.is_object()) { - post_constraints.push(Constraint::new( + post_constraints.push(ConstraintId::new( db, builder, bound_typevar, From 369ed36d9a4c24bdaa65ff90cc5e3feb80c671c2 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Sat, 28 Feb 2026 18:19:21 +0000 Subject: [PATCH 7/9] remove more footgunny `PartialOrd` impls --- .../src/semantic_index/definition.rs | 5 ----- crates/ty_python_semantic/src/types.rs | 15 +++------------ 2 files changed, 3 insertions(+), 17 deletions(-) diff --git a/crates/ty_python_semantic/src/semantic_index/definition.rs b/crates/ty_python_semantic/src/semantic_index/definition.rs index 99bacd9e4c1bac..26c570e6458c86 100644 --- a/crates/ty_python_semantic/src/semantic_index/definition.rs +++ b/crates/ty_python_semantic/src/semantic_index/definition.rs @@ -25,12 +25,7 @@ use crate::unpack::{Unpack, UnpackPosition}; /// because a new scope gets inserted before the `Definition` or a new place is inserted /// before this `Definition`. However, the ID can be considered stable and it is okay to use /// `Definition` in cross-module` salsa queries or as a field on other salsa tracked structs. -/// -/// # Ordering -/// Ordering is based on the definition's salsa-assigned id and not on its values. -/// The id may change between runs, or when the definition was garbage collected and recreated. #[salsa::tracked(debug, heap_size=ruff_memory_usage::heap_size)] -#[derive(Ord, PartialOrd)] pub struct Definition<'db> { /// The file in which the definition occurs. pub file: File, diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index b7261c3853b55b..cbacf5545d3fca 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -8047,12 +8047,7 @@ impl TypeVarKind { /// This represents the core identity of a typevar, independent of its bounds or constraints. Two /// typevars have the same identity if they represent the same logical typevar, even if their /// bounds have been materialized differently. -/// -/// # Ordering -/// Ordering is based on the identity's salsa-assigned id and not on its values. -/// The id may change between runs, or when the identity was garbage collected and recreated. #[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)] -#[derive(PartialOrd, Ord)] pub struct TypeVarIdentity<'db> { /// The name of this TypeVar (e.g. `T`) #[returns(ref)] @@ -8589,9 +8584,7 @@ fn lazy_default_cycle_recover<'db>( } /// Where a type variable is bound and usable. -#[derive( - Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, salsa::Update, get_size2::GetSize, -)] +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, salsa::Update, get_size2::GetSize)] pub enum BindingContext<'db> { /// The definition of the generic class, function, or type alias that binds this typevar. Definition(Definition<'db>), @@ -8619,7 +8612,7 @@ impl<'db> BindingContext<'db> { } } -#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, get_size2::GetSize)] +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, get_size2::GetSize)] pub enum ParamSpecAttrKind { Args, Kwargs, @@ -8640,9 +8633,7 @@ impl std::fmt::Display for ParamSpecAttrKind { /// independent of the typevar's bounds or constraints. Two bound typevars have the same identity /// if they represent the same logical typevar bound in the same context, even if their bounds /// have been materialized differently. -#[derive( - Debug, Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd, get_size2::GetSize, salsa::Update, -)] +#[derive(Debug, Clone, Copy, Eq, Hash, PartialEq, get_size2::GetSize, salsa::Update)] pub struct BoundTypeVarIdentity<'db> { pub(crate) identity: TypeVarIdentity<'db>, pub(crate) binding_context: BindingContext<'db>, From 40459bd6491d83a146eb09b70c5c5ff9af5a35c6 Mon Sep 17 00:00:00 2001 From: Douglas Creager Date: Mon, 2 Mar 2026 10:40:45 -0500 Subject: [PATCH 8/9] remove more Ords --- crates/ty_python_semantic/src/types/constraints.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/ty_python_semantic/src/types/constraints.rs b/crates/ty_python_semantic/src/types/constraints.rs index c593de4dba66a6..ae16466ace4e2f 100644 --- a/crates/ty_python_semantic/src/types/constraints.rs +++ b/crates/ty_python_semantic/src/types/constraints.rs @@ -874,7 +874,7 @@ impl<'db> BoundTypeVarInstance<'db> { builder: &ConstraintSetBuilder<'db>, typevar: Self, ) -> bool { - builder.typevar_id(db, self) < builder.typevar_id(db, typevar) + builder.typevar_id(db, self).index() < builder.typevar_id(db, typevar).index() } } @@ -893,12 +893,12 @@ impl IntersectionResult<'_> { /// The index of a bound typevar within a [`ConstraintSetStorage`]. #[newtype_index] -#[derive(Ord, PartialOrd, salsa::Update, get_size2::GetSize)] +#[derive(salsa::Update, get_size2::GetSize)] pub struct TypeVarId; /// The index of an individual constraint (i.e. a BDD variable) within a [`ConstraintSetStorage`]. #[newtype_index] -#[derive(Ord, PartialOrd, salsa::Update, get_size2::GetSize)] +#[derive(salsa::Update, get_size2::GetSize)] pub struct ConstraintId; /// An individual constraint in a constraint set. This restricts a single typevar to be within a @@ -1156,7 +1156,7 @@ impl ConstraintId { /// empirically that we get smaller BDDs with an ordering that is more aligned with source /// order. fn ordering(self) -> impl Ord { - std::cmp::Reverse(self) + std::cmp::Reverse(self.index()) } /// Returns whether this constraint implies another — i.e., whether every type that From ac60f1006ad6d9f6ccf7660a57ba5a90b3588f9e Mon Sep 17 00:00:00 2001 From: Douglas Creager Date: Mon, 2 Mar 2026 13:25:25 -0500 Subject: [PATCH 9/9] add todo about creating builder in TypeInferenceBuilder --- crates/ty_python_semantic/src/types/constraints.rs | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/crates/ty_python_semantic/src/types/constraints.rs b/crates/ty_python_semantic/src/types/constraints.rs index ae16466ace4e2f..b75d19c18b4d4a 100644 --- a/crates/ty_python_semantic/src/types/constraints.rs +++ b/crates/ty_python_semantic/src/types/constraints.rs @@ -622,14 +622,21 @@ impl Debug for ConstraintSet<'_, '_> { /// This is usually passed around by shared reference to avoid convoluted APIs that thread mutable /// references to the builder back and forth. /// -/// Most core type inference algorithms create one of these, create one or more constraint sets in -/// the builder, interrogate those constraint sets, and then throw the builder away. -/// /// All of our BDD algorithms rely heavily on interning and memoization, for both correctness and /// efficiency. These caches are only unique within the context of a particular builder. We do not /// cache globally across the entire ty process. (The main reason is to avoid any dependencies on /// the particular order in which files or expressions are visited during type checking. A minor /// additional benefit is that the builder does not need to be thread-safe or impl [`Sync`].) +/// +/// Most core type inference algorithms create a builder, create one or more constraint sets in the +/// builder, interrogate those constraint sets, and then throw the builder away. +/// +/// TODO: We are considering creating a single builder in `TypeInferenceBuilder` that would be +/// shared across an entire inference region. That would give us even more sharing opportunities, +/// which could be highly impactful, since it's likely that there will be types and constraints +/// that are repeated within a region. It should still give us the stability that we need, because +/// once we determine that we need _something_ from an inference regions, we always infer _all_ of +/// the definitions and expressions in that region, in a stable order. #[derive(Default)] pub(crate) struct ConstraintSetBuilder<'db> { storage: RefCell>,