@@ -21,10 +21,9 @@ use std::marker::PhantomData;
21
21
use derive_where:: derive_where;
22
22
#[ cfg( feature = "nightly" ) ]
23
23
use rustc_macros:: { Decodable_NoContext , Encodable_NoContext , HashStable_NoContext } ;
24
+ use rustc_type_ir:: data_structures:: HashMap ;
24
25
use tracing:: { debug, instrument} ;
25
26
26
- use crate :: data_structures:: HashMap ;
27
-
28
27
mod stack;
29
28
use stack:: { Stack , StackDepth , StackEntry } ;
30
29
mod global_cache;
@@ -137,6 +136,12 @@ pub enum PathKind {
137
136
Unknown ,
138
137
/// A path with at least one coinductive step. Such cycles hold.
139
138
Coinductive ,
139
+ /// A path which is treated as ambiguous. Once a path has this path kind
140
+ /// any other segment does not change its kind.
141
+ ///
142
+ /// This is currently only used when fuzzing to support negative reasoning.
143
+ /// For more details, see #143054.
144
+ ForcedAmbiguity ,
140
145
}
141
146
142
147
impl PathKind {
@@ -149,6 +154,9 @@ impl PathKind {
149
154
/// to `max(self, rest)`.
150
155
fn extend ( self , rest : PathKind ) -> PathKind {
151
156
match ( self , rest) {
157
+ ( PathKind :: ForcedAmbiguity , _) | ( _, PathKind :: ForcedAmbiguity ) => {
158
+ PathKind :: ForcedAmbiguity
159
+ }
152
160
( PathKind :: Coinductive , _) | ( _, PathKind :: Coinductive ) => PathKind :: Coinductive ,
153
161
( PathKind :: Unknown , _) | ( _, PathKind :: Unknown ) => PathKind :: Unknown ,
154
162
( PathKind :: Inductive , PathKind :: Inductive ) => PathKind :: Inductive ,
@@ -187,41 +195,6 @@ impl UsageKind {
187
195
}
188
196
}
189
197
190
- /// For each goal we track whether the paths from this goal
191
- /// to its cycle heads are coinductive.
192
- ///
193
- /// This is a necessary condition to rebase provisional cache
194
- /// entries.
195
- #[ derive( Debug , Clone , Copy , PartialEq , Eq ) ]
196
- pub enum AllPathsToHeadCoinductive {
197
- Yes ,
198
- No ,
199
- }
200
- impl From < PathKind > for AllPathsToHeadCoinductive {
201
- fn from ( path : PathKind ) -> AllPathsToHeadCoinductive {
202
- match path {
203
- PathKind :: Coinductive => AllPathsToHeadCoinductive :: Yes ,
204
- _ => AllPathsToHeadCoinductive :: No ,
205
- }
206
- }
207
- }
208
- impl AllPathsToHeadCoinductive {
209
- #[ must_use]
210
- fn merge ( self , other : impl Into < Self > ) -> Self {
211
- match ( self , other. into ( ) ) {
212
- ( AllPathsToHeadCoinductive :: Yes , AllPathsToHeadCoinductive :: Yes ) => {
213
- AllPathsToHeadCoinductive :: Yes
214
- }
215
- ( AllPathsToHeadCoinductive :: No , _) | ( _, AllPathsToHeadCoinductive :: No ) => {
216
- AllPathsToHeadCoinductive :: No
217
- }
218
- }
219
- }
220
- fn and_merge ( & mut self , other : impl Into < Self > ) {
221
- * self = self . merge ( other) ;
222
- }
223
- }
224
-
225
198
#[ derive( Debug , Clone , Copy ) ]
226
199
struct AvailableDepth ( usize ) ;
227
200
impl AvailableDepth {
@@ -261,9 +234,9 @@ impl AvailableDepth {
261
234
///
262
235
/// We also track all paths from this goal to that head. This is necessary
263
236
/// when rebasing provisional cache results.
264
- #[ derive( Clone , Debug , PartialEq , Eq , Default ) ]
237
+ #[ derive( Clone , Debug , Default ) ]
265
238
struct CycleHeads {
266
- heads : BTreeMap < StackDepth , AllPathsToHeadCoinductive > ,
239
+ heads : BTreeMap < StackDepth , PathsToNested > ,
267
240
}
268
241
269
242
impl CycleHeads {
@@ -283,27 +256,16 @@ impl CycleHeads {
283
256
self . heads . first_key_value ( ) . map ( |( k, _) | * k)
284
257
}
285
258
286
- fn remove_highest_cycle_head ( & mut self ) {
259
+ fn remove_highest_cycle_head ( & mut self ) -> PathsToNested {
287
260
let last = self . heads . pop_last ( ) ;
288
- debug_assert_ne ! ( last, None ) ;
289
- }
290
-
291
- fn insert (
292
- & mut self ,
293
- head : StackDepth ,
294
- path_from_entry : impl Into < AllPathsToHeadCoinductive > + Copy ,
295
- ) {
296
- self . heads . entry ( head) . or_insert ( path_from_entry. into ( ) ) . and_merge ( path_from_entry) ;
261
+ last. unwrap ( ) . 1
297
262
}
298
263
299
- fn merge ( & mut self , heads : & CycleHeads ) {
300
- for ( & head, & path_from_entry) in heads. heads . iter ( ) {
301
- self . insert ( head, path_from_entry) ;
302
- debug_assert ! ( matches!( self . heads[ & head] , AllPathsToHeadCoinductive :: Yes ) ) ;
303
- }
264
+ fn insert ( & mut self , head : StackDepth , path_from_entry : impl Into < PathsToNested > + Copy ) {
265
+ * self . heads . entry ( head) . or_insert ( path_from_entry. into ( ) ) |= path_from_entry. into ( ) ;
304
266
}
305
267
306
- fn iter ( & self ) -> impl Iterator < Item = ( StackDepth , AllPathsToHeadCoinductive ) > + ' _ {
268
+ fn iter ( & self ) -> impl Iterator < Item = ( StackDepth , PathsToNested ) > + ' _ {
307
269
self . heads . iter ( ) . map ( |( k, v) | ( * k, * v) )
308
270
}
309
271
@@ -317,13 +279,7 @@ impl CycleHeads {
317
279
Ordering :: Equal => continue ,
318
280
Ordering :: Greater => unreachable ! ( ) ,
319
281
}
320
-
321
- let path_from_entry = match step_kind {
322
- PathKind :: Coinductive => AllPathsToHeadCoinductive :: Yes ,
323
- PathKind :: Unknown | PathKind :: Inductive => path_from_entry,
324
- } ;
325
-
326
- self . insert ( head, path_from_entry) ;
282
+ self . insert ( head, path_from_entry. extend_with ( step_kind) ) ;
327
283
}
328
284
}
329
285
}
@@ -332,13 +288,14 @@ bitflags::bitflags! {
332
288
/// Tracks how nested goals have been accessed. This is necessary to disable
333
289
/// global cache entries if computing them would otherwise result in a cycle or
334
290
/// access a provisional cache entry.
335
- #[ derive( Debug , Clone , Copy ) ]
291
+ #[ derive( Debug , Clone , Copy , PartialEq , Eq ) ]
336
292
pub struct PathsToNested : u8 {
337
293
/// The initial value when adding a goal to its own nested goals.
338
294
const EMPTY = 1 << 0 ;
339
295
const INDUCTIVE = 1 << 1 ;
340
296
const UNKNOWN = 1 << 2 ;
341
297
const COINDUCTIVE = 1 << 3 ;
298
+ const FORCED_AMBIGUITY = 1 << 4 ;
342
299
}
343
300
}
344
301
impl From < PathKind > for PathsToNested {
@@ -347,6 +304,7 @@ impl From<PathKind> for PathsToNested {
347
304
PathKind :: Inductive => PathsToNested :: INDUCTIVE ,
348
305
PathKind :: Unknown => PathsToNested :: UNKNOWN ,
349
306
PathKind :: Coinductive => PathsToNested :: COINDUCTIVE ,
307
+ PathKind :: ForcedAmbiguity => PathsToNested :: FORCED_AMBIGUITY ,
350
308
}
351
309
}
352
310
}
@@ -379,10 +337,46 @@ impl PathsToNested {
379
337
self . insert ( PathsToNested :: COINDUCTIVE ) ;
380
338
}
381
339
}
340
+ PathKind :: ForcedAmbiguity => {
341
+ if self . intersects (
342
+ PathsToNested :: EMPTY
343
+ | PathsToNested :: INDUCTIVE
344
+ | PathsToNested :: UNKNOWN
345
+ | PathsToNested :: COINDUCTIVE ,
346
+ ) {
347
+ self . remove (
348
+ PathsToNested :: EMPTY
349
+ | PathsToNested :: INDUCTIVE
350
+ | PathsToNested :: UNKNOWN
351
+ | PathsToNested :: COINDUCTIVE ,
352
+ ) ;
353
+ self . insert ( PathsToNested :: FORCED_AMBIGUITY ) ;
354
+ }
355
+ }
382
356
}
383
357
384
358
self
385
359
}
360
+
361
+ #[ must_use]
362
+ fn extend_with_paths ( self , path : PathsToNested ) -> Self {
363
+ let mut new = PathsToNested :: empty ( ) ;
364
+ let ( PathKind :: Inductive
365
+ | PathKind :: Unknown
366
+ | PathKind :: Coinductive
367
+ | PathKind :: ForcedAmbiguity ) ;
368
+ for n in [
369
+ PathKind :: Inductive ,
370
+ PathKind :: Unknown ,
371
+ PathKind :: Coinductive ,
372
+ PathKind :: ForcedAmbiguity ,
373
+ ] {
374
+ if path. contains ( n. into ( ) ) {
375
+ new |= self . extend_with ( n) ;
376
+ }
377
+ }
378
+ new
379
+ }
386
380
}
387
381
388
382
/// The nested goals of each stack entry and the path from the
@@ -693,7 +687,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
693
687
if let Some ( ( _scope, expected) ) = validate_cache {
694
688
// Do not try to move a goal into the cache again if we're testing
695
689
// the global cache.
696
- assert_eq ! ( evaluation_result. result, expected , "input={input:?}" ) ;
690
+ assert_eq ! ( expected , evaluation_result. result, "input={input:?}" ) ;
697
691
} else if D :: inspect_is_noop ( inspect) {
698
692
self . insert_global_cache ( cx, input, evaluation_result, dep_node)
699
693
}
@@ -782,7 +776,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
782
776
stack_entry : & StackEntry < X > ,
783
777
mut mutate_result : impl FnMut ( X :: Input , X :: Result ) -> X :: Result ,
784
778
) {
785
- let head = self . stack . next_index ( ) ;
779
+ let popped_head = self . stack . next_index ( ) ;
786
780
#[ allow( rustc:: potential_query_instability) ]
787
781
self . provisional_cache . retain ( |& input, entries| {
788
782
entries. retain_mut ( |entry| {
@@ -792,30 +786,32 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
792
786
path_from_head,
793
787
result,
794
788
} = entry;
795
- if heads. highest_cycle_head ( ) == head {
789
+ let ep = if heads. highest_cycle_head ( ) == popped_head {
796
790
heads. remove_highest_cycle_head ( )
797
791
} else {
798
792
return true ;
799
- }
800
-
801
- // We only try to rebase if all paths from the cache entry
802
- // to its heads are coinductive. In this case these cycle
803
- // kinds won't change, no matter the goals between these
804
- // heads and the provisional cache entry.
805
- if heads. iter ( ) . any ( |( _, p) | matches ! ( p, AllPathsToHeadCoinductive :: No ) ) {
806
- return false ;
807
- }
793
+ } ;
808
794
809
- // The same for nested goals of the cycle head.
810
- if stack_entry. heads . iter ( ) . any ( |( _, p) | matches ! ( p, AllPathsToHeadCoinductive :: No ) )
811
- {
812
- return false ;
795
+ // We're rebasing an entry `e` over a head `p`. This head
796
+ // has a number of own heads `h` it depends on. We need to
797
+ // make sure that the cycle `hph` cannot change after rebasing.
798
+ //
799
+ // After rebasing the cycles `hph` will go through the path `heph` instead.
800
+ for ( h, ph) in stack_entry. heads . iter ( ) {
801
+ let hp =
802
+ Self :: cycle_path_kind ( & self . stack , stack_entry. step_kind_from_parent , h) ;
803
+ let hph = ph. extend_with ( hp) ;
804
+ let he = hp. extend ( * path_from_head) ;
805
+ let hep = ep. extend_with ( he) ;
806
+ let heph = hep. extend_with_paths ( ph) ;
807
+ if hph == heph {
808
+ let eph = ep. extend_with_paths ( ph) ;
809
+ heads. insert ( h, eph) ;
810
+ } else {
811
+ return false ;
812
+ }
813
813
}
814
814
815
- // Merge the cycle heads of the provisional cache entry and the
816
- // popped head. If the popped cycle head was a root, discard all
817
- // provisional cache entries which depend on it.
818
- heads. merge ( & stack_entry. heads ) ;
819
815
let Some ( head) = heads. opt_highest_cycle_head ( ) else {
820
816
return false ;
821
817
} ;
0 commit comments