@@ -21,10 +21,9 @@ use std::marker::PhantomData;
2121use  derive_where:: derive_where; 
2222#[ cfg( feature = "nightly" ) ]  
2323use  rustc_macros:: { Decodable_NoContext ,  Encodable_NoContext ,  HashStable_NoContext } ; 
24+ use  rustc_type_ir:: data_structures:: HashMap ; 
2425use  tracing:: { debug,  instrument} ; 
2526
26- use  crate :: data_structures:: HashMap ; 
27- 
2827mod  stack; 
2928use  stack:: { Stack ,  StackDepth ,  StackEntry } ; 
3029mod  global_cache; 
@@ -137,6 +136,12 @@ pub enum PathKind {
137136Unknown , 
138137    /// A path with at least one coinductive step. Such cycles hold. 
139138Coinductive , 
139+     /// A path which is treated as ambiguous. Once a path has this path kind 
140+ /// any other segment does not change its kind. 
141+ /// 
142+ /// This is currently only used when fuzzing to support negative reasoning. 
143+ /// For more details, see #143054. 
144+ ForcedAmbiguity , 
140145} 
141146
142147impl  PathKind  { 
@@ -149,6 +154,9 @@ impl PathKind {
149154/// to `max(self, rest)`. 
150155fn  extend ( self ,  rest :  PathKind )  -> PathKind  { 
151156        match  ( self ,  rest)  { 
157+             ( PathKind :: ForcedAmbiguity ,  _)  | ( _,  PathKind :: ForcedAmbiguity )  => { 
158+                 PathKind :: ForcedAmbiguity 
159+             } 
152160            ( PathKind :: Coinductive ,  _)  | ( _,  PathKind :: Coinductive )  => PathKind :: Coinductive , 
153161            ( PathKind :: Unknown ,  _)  | ( _,  PathKind :: Unknown )  => PathKind :: Unknown , 
154162            ( PathKind :: Inductive ,  PathKind :: Inductive )  => PathKind :: Inductive , 
@@ -187,41 +195,6 @@ impl UsageKind {
187195    } 
188196} 
189197
190- /// For each goal we track whether the paths from this goal 
191- /// to its cycle heads are coinductive. 
192- /// 
193- /// This is a necessary condition to rebase provisional cache 
194- /// entries. 
195- #[ derive( Debug ,  Clone ,  Copy ,  PartialEq ,  Eq ) ]  
196- pub  enum  AllPathsToHeadCoinductive  { 
197-     Yes , 
198-     No , 
199- } 
200- impl  From < PathKind >  for  AllPathsToHeadCoinductive  { 
201-     fn  from ( path :  PathKind )  -> AllPathsToHeadCoinductive  { 
202-         match  path { 
203-             PathKind :: Coinductive  => AllPathsToHeadCoinductive :: Yes , 
204-             _ => AllPathsToHeadCoinductive :: No , 
205-         } 
206-     } 
207- } 
208- impl  AllPathsToHeadCoinductive  { 
209-     #[ must_use]  
210-     fn  merge ( self ,  other :  impl  Into < Self > )  -> Self  { 
211-         match  ( self ,  other. into ( ) )  { 
212-             ( AllPathsToHeadCoinductive :: Yes ,  AllPathsToHeadCoinductive :: Yes )  => { 
213-                 AllPathsToHeadCoinductive :: Yes 
214-             } 
215-             ( AllPathsToHeadCoinductive :: No ,  _)  | ( _,  AllPathsToHeadCoinductive :: No )  => { 
216-                 AllPathsToHeadCoinductive :: No 
217-             } 
218-         } 
219-     } 
220-     fn  and_merge ( & mut  self ,  other :  impl  Into < Self > )  { 
221-         * self  = self . merge ( other) ; 
222-     } 
223- } 
224- 
225198#[ derive( Debug ,  Clone ,  Copy ) ]  
226199struct  AvailableDepth ( usize ) ; 
227200impl  AvailableDepth  { 
@@ -261,9 +234,9 @@ impl AvailableDepth {
261234/// 
262235/// We also track all paths from this goal to that head. This is necessary 
263236/// when rebasing provisional cache results. 
264- #[ derive( Clone ,  Debug ,  PartialEq ,   Eq ,   Default ) ]  
237+ #[ derive( Clone ,  Debug ,  Default ) ]  
265238struct  CycleHeads  { 
266-     heads :  BTreeMap < StackDepth ,  AllPathsToHeadCoinductive > , 
239+     heads :  BTreeMap < StackDepth ,  PathsToNested > , 
267240} 
268241
269242impl  CycleHeads  { 
@@ -283,27 +256,16 @@ impl CycleHeads {
283256        self . heads . first_key_value ( ) . map ( |( k,  _) | * k) 
284257    } 
285258
286-     fn  remove_highest_cycle_head ( & mut  self )  { 
259+     fn  remove_highest_cycle_head ( & mut  self )  ->  PathsToNested   { 
287260        let  last = self . heads . pop_last ( ) ; 
288-         debug_assert_ne ! ( last,  None ) ; 
289-     } 
290- 
291-     fn  insert ( 
292-         & mut  self , 
293-         head :  StackDepth , 
294-         path_from_entry :  impl  Into < AllPathsToHeadCoinductive >  + Copy , 
295-     )  { 
296-         self . heads . entry ( head) . or_insert ( path_from_entry. into ( ) ) . and_merge ( path_from_entry) ; 
261+         last. unwrap ( ) . 1 
297262    } 
298263
299-     fn  merge ( & mut  self ,  heads :  & CycleHeads )  { 
300-         for  ( & head,  & path_from_entry)  in  heads. heads . iter ( )  { 
301-             self . insert ( head,  path_from_entry) ; 
302-             debug_assert ! ( matches!( self . heads[ & head] ,  AllPathsToHeadCoinductive :: Yes ) ) ; 
303-         } 
264+     fn  insert ( & mut  self ,  head :  StackDepth ,  path_from_entry :  impl  Into < PathsToNested >  + Copy )  { 
265+         * self . heads . entry ( head) . or_insert ( path_from_entry. into ( ) )  |= path_from_entry. into ( ) ; 
304266    } 
305267
306-     fn  iter ( & self )  -> impl  Iterator < Item  = ( StackDepth ,  AllPathsToHeadCoinductive ) >  + ' _  { 
268+     fn  iter ( & self )  -> impl  Iterator < Item  = ( StackDepth ,  PathsToNested ) >  + ' _  { 
307269        self . heads . iter ( ) . map ( |( k,  v) | ( * k,  * v) ) 
308270    } 
309271
@@ -317,13 +279,7 @@ impl CycleHeads {
317279                Ordering :: Equal  => continue , 
318280                Ordering :: Greater  => unreachable ! ( ) , 
319281            } 
320- 
321-             let  path_from_entry = match  step_kind { 
322-                 PathKind :: Coinductive  => AllPathsToHeadCoinductive :: Yes , 
323-                 PathKind :: Unknown  | PathKind :: Inductive  => path_from_entry, 
324-             } ; 
325- 
326-             self . insert ( head,  path_from_entry) ; 
282+             self . insert ( head,  path_from_entry. extend_with ( step_kind) ) ; 
327283        } 
328284    } 
329285} 
@@ -332,13 +288,14 @@ bitflags::bitflags! {
332288    /// Tracks how nested goals have been accessed. This is necessary to disable 
333289/// global cache entries if computing them would otherwise result in a cycle or 
334290/// access a provisional cache entry. 
335- [ derive( Debug ,  Clone ,  Copy ) ] 
291+ [ derive( Debug ,  Clone ,  Copy ,   PartialEq ,   Eq ) ] 
336292    pub  struct  PathsToNested :  u8  { 
337293        /// The initial value when adding a goal to its own nested goals. 
338294const  EMPTY                       = 1  << 0 ; 
339295        const  INDUCTIVE                   = 1  << 1 ; 
340296        const  UNKNOWN                     = 1  << 2 ; 
341297        const  COINDUCTIVE                 = 1  << 3 ; 
298+         const  FORCED_AMBIGUITY            = 1  << 4 ; 
342299    } 
343300} 
344301impl  From < PathKind >  for  PathsToNested  { 
@@ -347,6 +304,7 @@ impl From<PathKind> for PathsToNested {
347304            PathKind :: Inductive  => PathsToNested :: INDUCTIVE , 
348305            PathKind :: Unknown  => PathsToNested :: UNKNOWN , 
349306            PathKind :: Coinductive  => PathsToNested :: COINDUCTIVE , 
307+             PathKind :: ForcedAmbiguity  => PathsToNested :: FORCED_AMBIGUITY , 
350308        } 
351309    } 
352310} 
@@ -379,10 +337,45 @@ impl PathsToNested {
379337                    self . insert ( PathsToNested :: COINDUCTIVE ) ; 
380338                } 
381339            } 
340+             PathKind :: ForcedAmbiguity  => { 
341+                 if  self . intersects ( 
342+                     PathsToNested :: EMPTY 
343+                         | PathsToNested :: INDUCTIVE 
344+                         | PathsToNested :: UNKNOWN 
345+                         | PathsToNested :: COINDUCTIVE , 
346+                 )  { 
347+                     self . remove ( 
348+                         PathsToNested :: EMPTY 
349+                             | PathsToNested :: INDUCTIVE 
350+                             | PathsToNested :: UNKNOWN 
351+                             | PathsToNested :: COINDUCTIVE , 
352+                     ) ; 
353+                     self . insert ( PathsToNested :: FORCED_AMBIGUITY ) ; 
354+                 } 
355+             } 
382356        } 
383357
384358        self 
385359    } 
360+ 
361+     #[ must_use]  
362+     fn  extend_with_paths ( self ,  path :  PathsToNested )  -> Self  { 
363+         let  mut  new = PathsToNested :: empty ( ) ; 
364+         for  p in  path. iter_paths ( )  { 
365+             new |= self . extend_with ( p) ; 
366+         } 
367+         new
368+     } 
369+ 
370+     fn  iter_paths ( self )  -> impl  Iterator < Item  = PathKind >  { 
371+         let  ( PathKind :: Inductive 
372+         | PathKind :: Unknown 
373+         | PathKind :: Coinductive 
374+         | PathKind :: ForcedAmbiguity ) ; 
375+         [ PathKind :: Inductive ,  PathKind :: Unknown ,  PathKind :: Coinductive ,  PathKind :: ForcedAmbiguity ] 
376+             . into_iter ( ) 
377+             . filter ( move  |& p| self . contains ( p. into ( ) ) ) 
378+     } 
386379} 
387380
388381/// The nested goals of each stack entry and the path from the 
@@ -693,7 +686,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
693686            if  let  Some ( ( _scope,  expected) )  = validate_cache { 
694687                // Do not try to move a goal into the cache again if we're testing 
695688                // the global cache. 
696-                 assert_eq ! ( evaluation_result. result,  expected ,  "input={input:?}" ) ; 
689+                 assert_eq ! ( expected ,   evaluation_result. result,  "input={input:?}" ) ; 
697690            }  else  if  D :: inspect_is_noop ( inspect)  { 
698691                self . insert_global_cache ( cx,  input,  evaluation_result,  dep_node) 
699692            } 
@@ -763,14 +756,11 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
763756/// provisional cache entry is still applicable. We need to keep the cache entries to 
764757/// prevent hangs. 
765758/// 
766- /// What we therefore do is check whether the cycle kind of all cycles the goal of a  
767- /// provisional cache entry is involved in would stay the same when computing the  
768- /// goal without its  cycle head on the stack. For more details, see  the relevant  
759+ /// This can be thought of as pretending to reevaluate the popped head as nested goals  
760+ /// of this provisional result. For this to be correct, all cycles encountered while  
761+ /// we'd reevaluate the  cycle head as a nested goal must keep  the same cycle kind.  
769762/// [rustc-dev-guide chapter](https://rustc-dev-guide.rust-lang.org/solve/caching.html). 
770763/// 
771- /// This can be thought of rotating the sub-tree of this provisional result and changing 
772- /// its entry point while making sure that all paths through this sub-tree stay the same. 
773- /// 
774764/// In case the popped cycle head failed to reach a fixpoint anything which depends on 
775765/// its provisional result is invalid. Actually discarding provisional cache entries in 
776766/// this case would cause hangs, so we instead change the result of dependant provisional 
@@ -782,7 +772,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
782772        stack_entry :  & StackEntry < X > , 
783773        mut  mutate_result :  impl  FnMut ( X :: Input ,  X :: Result )  -> X :: Result , 
784774    )  { 
785-         let  head  = self . stack . next_index ( ) ; 
775+         let  popped_head  = self . stack . next_index ( ) ; 
786776        #[ allow( rustc:: potential_query_instability) ]  
787777        self . provisional_cache . retain ( |& input,  entries| { 
788778            entries. retain_mut ( |entry| { 
@@ -792,30 +782,44 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
792782                    path_from_head, 
793783                    result, 
794784                }  = entry; 
795-                 if  heads. highest_cycle_head ( )  == head  { 
785+                 let  ep =  if  heads. highest_cycle_head ( )  == popped_head  { 
796786                    heads. remove_highest_cycle_head ( ) 
797787                }  else  { 
798788                    return  true ; 
799-                 } 
800- 
801-                 // We only try to rebase if all paths from the cache entry 
802-                 // to its heads are coinductive. In this case these cycle 
803-                 // kinds won't change, no matter the goals between these 
804-                 // heads and the provisional cache entry. 
805-                 if  heads. iter ( ) . any ( |( _,  p) | matches ! ( p,  AllPathsToHeadCoinductive :: No ) )  { 
806-                     return  false ; 
807-                 } 
789+                 } ; 
808790
809-                 // The same for nested goals of the cycle head. 
810-                 if  stack_entry. heads . iter ( ) . any ( |( _,  p) | matches ! ( p,  AllPathsToHeadCoinductive :: No ) ) 
811-                 { 
812-                     return  false ; 
791+                 // We're rebasing an entry `e` over a head `p`. This head 
792+                 // has a number of own heads `h` it depends on. We need to 
793+                 // make sure that the path kind of all paths `hph` remain the 
794+                 // same after rebasing. 
795+                 // 
796+                 // After rebasing the cycles `hph` will go through `e`. We need to make 
797+                 // sure that forall possible paths `hep`, `heph` is equal to `hph.` 
798+                 for  ( h,  ph)  in  stack_entry. heads . iter ( )  { 
799+                     let  hp =
800+                         Self :: cycle_path_kind ( & self . stack ,  stack_entry. step_kind_from_parent ,  h) ; 
801+ 
802+                     // We first validate that all cycles while computing `p` would stay 
803+                     // the same if we were to recompute it as a nested goal of `e`. 
804+                     let  he = hp. extend ( * path_from_head) ; 
805+                     for  ph in  ph. iter_paths ( )  { 
806+                         let  hph = hp. extend ( ph) ; 
807+                         for  ep in  ep. iter_paths ( )  { 
808+                             let  hep = ep. extend ( he) ; 
809+                             let  heph = hep. extend ( ph) ; 
810+                             if  hph != heph { 
811+                                 return  false ; 
812+                             } 
813+                         } 
814+                     } 
815+ 
816+                     // If so, all paths reached while computing `p` have to get added 
817+                     // the heads of `e` to make sure that rebasing `e` again also considers 
818+                     // them. 
819+                     let  eph = ep. extend_with_paths ( ph) ; 
820+                     heads. insert ( h,  eph) ; 
813821                } 
814822
815-                 // Merge the cycle heads of the provisional cache entry and the 
816-                 // popped head. If the popped cycle head was a root, discard all 
817-                 // provisional cache entries which depend on it. 
818-                 heads. merge ( & stack_entry. heads ) ; 
819823                let  Some ( head)  = heads. opt_highest_cycle_head ( )  else  { 
820824                    return  false ; 
821825                } ; 
0 commit comments