@@ -19,6 +19,9 @@ rustc_index::newtype_index! {
19
19
}
20
20
21
21
bitflags:: bitflags! {
22
+ /// Whether and how this goal has been used as the root of a
23
+ /// cycle. We track the kind of cycle as we're otherwise forced
24
+ /// to always rerun at least once.
22
25
#[ derive( Debug , Clone , Copy , PartialEq , Eq ) ]
23
26
struct HasBeenUsed : u8 {
24
27
const INDUCTIVE_CYCLE = 1 << 0 ;
@@ -29,23 +32,30 @@ bitflags::bitflags! {
29
32
#[ derive( Debug ) ]
30
33
struct StackEntry < ' tcx > {
31
34
input : CanonicalInput < ' tcx > ,
35
+
32
36
available_depth : Limit ,
37
+
33
38
/// The maximum depth reached by this stack entry, only up-to date
34
39
/// for the top of the stack and lazily updated for the rest.
35
40
reached_depth : StackDepth ,
36
- /// Whether this entry is a cycle participant which is not a root.
41
+
42
+ /// Whether this entry is a non-root cycle participant.
37
43
///
38
- /// If so, it must not be moved to the global cache. See
39
- /// [SearchGraph::cycle_participants] for more details.
44
+ /// We must not move the result of non-root cycle participants to the
45
+ /// global cache. See [SearchGraph::cycle_participants] for more details.
46
+ /// We store the highest stack depth of a head of a cycle this goal is involved
47
+ /// in. This necessary to soundly cache its provisional result.
40
48
non_root_cycle_participant : Option < StackDepth > ,
41
49
42
50
encountered_overflow : bool ,
51
+
43
52
has_been_used : HasBeenUsed ,
44
53
/// Starts out as `None` and gets set when rerunning this
45
54
/// goal in case we encounter a cycle.
46
55
provisional_result : Option < QueryResult < ' tcx > > ,
47
56
}
48
57
58
+ /// The provisional result for a goal which is not on the stack.
49
59
struct DetachedEntry < ' tcx > {
50
60
/// The head of the smallest non-trivial cycle involving this entry.
51
61
///
@@ -59,6 +69,19 @@ struct DetachedEntry<'tcx> {
59
69
result : QueryResult < ' tcx > ,
60
70
}
61
71
72
+ /// Stores the stack depth of a currently evaluated goal *and* already
73
+ /// computed results for goals which depend on other goals still on the stack.
74
+ ///
75
+ /// The provisional result may depend on whether the stack above it is inductive
76
+ /// or coinductive. Because of this, we store separate provisional results for
77
+ /// each case. If an provisional entry is not applicable, it may be the case
78
+ /// that we already have provisional result while computing a goal. In this case
79
+ /// we prefer the provisional result to potentially avoid fixpoint iterations.
80
+ ///
81
+ /// See tests/ui/traits/next-solver/cycles/mixed-cycles-2.rs for an example.
82
+ ///
83
+ /// The provisional cache can theoretically result in changes to theobservable behavior,
84
+ /// see tests/ui/traits/next-solver/cycles/provisional-cache-impacts-behavior.rs.
62
85
#[ derive( Default ) ]
63
86
struct ProvisionalCacheEntry < ' tcx > {
64
87
stack_depth : Option < StackDepth > ,
@@ -200,6 +223,16 @@ impl<'tcx> SearchGraph<'tcx> {
200
223
. all ( |entry| entry. input . value . goal . predicate . is_coinductive ( tcx) )
201
224
}
202
225
226
+ // When encountering a solver cycle, the result of the current goal
227
+ // depends on goals lower on the stack.
228
+ //
229
+ // We have to therefore be careful when caching goals. Only the final result
230
+ // of the cycle root, i.e. the lowest goal on the stack involved in this cycle,
231
+ // is moved to the global cache while all others are stored in a provisional cache.
232
+ //
233
+ // We update both the head of this cycle to rerun its evaluation until
234
+ // we reach a fixpoint and all other cycle participants to make sure that
235
+ // their result does not get moved to the global cache.
203
236
fn tag_cycle_participants (
204
237
stack : & mut IndexVec < StackDepth , StackEntry < ' tcx > > ,
205
238
cycle_participants : & mut FxHashSet < CanonicalInput < ' tcx > > ,
@@ -281,24 +314,20 @@ impl<'tcx> SearchGraph<'tcx> {
281
314
}
282
315
283
316
// Check whether the goal is in the provisional cache.
317
+ // The provisional result may rely on the path to its cycle roots,
318
+ // so we have to check the path of the current goal matches that of
319
+ // the cache entry.
284
320
let cache_entry = self . provisional_cache . entry ( input) . or_default ( ) ;
285
- if let Some ( with_coinductive_stack) = & cache_entry. with_coinductive_stack
286
- && Self :: stack_coinductive_from ( tcx, & self . stack , with_coinductive_stack. head )
287
- {
288
- // We have a nested goal which is already in the provisional cache, use
289
- // its result. We do not provide any usage kind as that should have been
290
- // already set correctly while computing the cache entry.
291
- inspect
292
- . goal_evaluation_kind ( inspect:: WipCanonicalGoalEvaluationKind :: ProvisionalCacheHit ) ;
293
- Self :: tag_cycle_participants (
294
- & mut self . stack ,
295
- & mut self . cycle_participants ,
296
- HasBeenUsed :: empty ( ) ,
297
- with_coinductive_stack. head ,
298
- ) ;
299
- return with_coinductive_stack. result ;
300
- } else if let Some ( with_inductive_stack) = & cache_entry. with_inductive_stack
301
- && !Self :: stack_coinductive_from ( tcx, & self . stack , with_inductive_stack. head )
321
+ if let Some ( entry) = cache_entry
322
+ . with_coinductive_stack
323
+ . as_ref ( )
324
+ . filter ( |p| Self :: stack_coinductive_from ( tcx, & self . stack , p. head ) )
325
+ . or_else ( || {
326
+ cache_entry
327
+ . with_inductive_stack
328
+ . as_ref ( )
329
+ . filter ( |p| !Self :: stack_coinductive_from ( tcx, & self . stack , p. head ) )
330
+ } )
302
331
{
303
332
// We have a nested goal which is already in the provisional cache, use
304
333
// its result. We do not provide any usage kind as that should have been
@@ -309,20 +338,17 @@ impl<'tcx> SearchGraph<'tcx> {
309
338
& mut self . stack ,
310
339
& mut self . cycle_participants ,
311
340
HasBeenUsed :: empty ( ) ,
312
- with_inductive_stack . head ,
341
+ entry . head ,
313
342
) ;
314
- return with_inductive_stack . result ;
343
+ return entry . result ;
315
344
} else if let Some ( stack_depth) = cache_entry. stack_depth {
316
345
debug ! ( "encountered cycle with depth {stack_depth:?}" ) ;
317
- // We have a nested goal which relies on a goal `root` deeper in the stack.
346
+ // We have a nested goal which directly relies on a goal deeper in the stack.
318
347
//
319
- // We first store that we may have to reprove `root` in case the provisional
320
- // response is not equal to the final response. We also update the depth of all
321
- // goals which recursively depend on our current goal to depend on `root`
322
- // instead.
348
+ // We start by tagging all cycle participants, as that's necessary for caching.
323
349
//
324
- // Finally we can return either the provisional response for that goal if we have a
325
- // coinductive cycle or an ambiguous result if the cycle is inductive .
350
+ // Finally we can return either the provisional response or the initial response
351
+ // in case we're in the first fixpoint iteration for this goal .
326
352
inspect. goal_evaluation_kind ( inspect:: WipCanonicalGoalEvaluationKind :: CycleInStack ) ;
327
353
let is_coinductive_cycle = Self :: stack_coinductive_from ( tcx, & self . stack , stack_depth) ;
328
354
let usage_kind = if is_coinductive_cycle {
@@ -410,10 +436,10 @@ impl<'tcx> SearchGraph<'tcx> {
410
436
false
411
437
} ;
412
438
439
+ // If we did not reach a fixpoint, update the provisional result and reevaluate.
413
440
if reached_fixpoint {
414
441
return ( stack_entry, result) ;
415
442
} else {
416
- // Did not reach a fixpoint, update the provisional result and reevaluate.
417
443
let depth = self . stack . push ( StackEntry {
418
444
has_been_used : HasBeenUsed :: empty ( ) ,
419
445
provisional_result : Some ( result) ,
@@ -435,9 +461,6 @@ impl<'tcx> SearchGraph<'tcx> {
435
461
// We're now done with this goal. In case this goal is involved in a larger cycle
436
462
// do not remove it from the provisional cache and update its provisional result.
437
463
// We only add the root of cycles to the global cache.
438
- //
439
- // It is not possible for any nested goal to depend on something deeper on the
440
- // stack, as this would have also updated the depth of the current goal.
441
464
if let Some ( head) = final_entry. non_root_cycle_participant {
442
465
let coinductive_stack = Self :: stack_coinductive_from ( tcx, & self . stack , head) ;
443
466
@@ -449,6 +472,9 @@ impl<'tcx> SearchGraph<'tcx> {
449
472
entry. with_inductive_stack = Some ( DetachedEntry { head, result } ) ;
450
473
}
451
474
} else {
475
+ self . provisional_cache . remove ( & input) ;
476
+ let reached_depth = final_entry. reached_depth . as_usize ( ) - self . stack . len ( ) ;
477
+ let cycle_participants = mem:: take ( & mut self . cycle_participants ) ;
452
478
// When encountering a cycle, both inductive and coinductive, we only
453
479
// move the root into the global cache. We also store all other cycle
454
480
// participants involved.
@@ -457,9 +483,6 @@ impl<'tcx> SearchGraph<'tcx> {
457
483
// participant is on the stack. This is necessary to prevent unstable
458
484
// results. See the comment of `SearchGraph::cycle_participants` for
459
485
// more details.
460
- self . provisional_cache . remove ( & input) ;
461
- let reached_depth = final_entry. reached_depth . as_usize ( ) - self . stack . len ( ) ;
462
- let cycle_participants = mem:: take ( & mut self . cycle_participants ) ;
463
486
self . global_cache ( tcx) . insert (
464
487
tcx,
465
488
input,
0 commit comments