@@ -193,41 +193,6 @@ impl UsageKind {
193
193
}
194
194
}
195
195
196
- /// For each goal we track whether the paths from this goal
197
- /// to its cycle heads are coinductive.
198
- ///
199
- /// This is a necessary condition to rebase provisional cache
200
- /// entries.
201
- #[ derive( Debug , Clone , Copy , PartialEq , Eq ) ]
202
- pub enum AllPathsToHeadForcedAmbiguity {
203
- Yes ,
204
- No ,
205
- }
206
- impl From < PathKind > for AllPathsToHeadForcedAmbiguity {
207
- fn from ( path : PathKind ) -> AllPathsToHeadForcedAmbiguity {
208
- match path {
209
- PathKind :: ForcedAmbiguity => AllPathsToHeadForcedAmbiguity :: Yes ,
210
- _ => AllPathsToHeadForcedAmbiguity :: No ,
211
- }
212
- }
213
- }
214
- impl AllPathsToHeadForcedAmbiguity {
215
- #[ must_use]
216
- fn merge ( self , other : impl Into < Self > ) -> Self {
217
- match ( self , other. into ( ) ) {
218
- ( AllPathsToHeadForcedAmbiguity :: Yes , AllPathsToHeadForcedAmbiguity :: Yes ) => {
219
- AllPathsToHeadForcedAmbiguity :: Yes
220
- }
221
- ( AllPathsToHeadForcedAmbiguity :: No , _) | ( _, AllPathsToHeadForcedAmbiguity :: No ) => {
222
- AllPathsToHeadForcedAmbiguity :: No
223
- }
224
- }
225
- }
226
- fn and_merge ( & mut self , other : impl Into < Self > ) {
227
- * self = self . merge ( other) ;
228
- }
229
- }
230
-
231
196
#[ derive( Debug , Clone , Copy ) ]
232
197
struct AvailableDepth ( usize ) ;
233
198
impl AvailableDepth {
@@ -267,9 +232,9 @@ impl AvailableDepth {
267
232
///
268
233
/// We also track all paths from this goal to that head. This is necessary
269
234
/// when rebasing provisional cache results.
270
- #[ derive( Clone , Debug , PartialEq , Eq , Default ) ]
235
+ #[ derive( Clone , Debug , Default ) ]
271
236
struct CycleHeads {
272
- heads : BTreeMap < StackDepth , AllPathsToHeadForcedAmbiguity > ,
237
+ heads : BTreeMap < StackDepth , PathsToNested > ,
273
238
}
274
239
275
240
impl CycleHeads {
@@ -289,27 +254,22 @@ impl CycleHeads {
289
254
self . heads . first_key_value ( ) . map ( |( k, _) | * k)
290
255
}
291
256
292
- fn remove_highest_cycle_head ( & mut self ) {
257
+ fn remove_highest_cycle_head ( & mut self ) -> PathsToNested {
293
258
let last = self . heads . pop_last ( ) ;
294
- debug_assert_ne ! ( last, None ) ;
259
+ last. unwrap ( ) . 1
295
260
}
296
261
297
- fn insert (
298
- & mut self ,
299
- head : StackDepth ,
300
- path_from_entry : impl Into < AllPathsToHeadForcedAmbiguity > + Copy ,
301
- ) {
302
- self . heads . entry ( head) . or_insert ( path_from_entry. into ( ) ) . and_merge ( path_from_entry) ;
262
+ fn insert ( & mut self , head : StackDepth , path_from_entry : impl Into < PathsToNested > + Copy ) {
263
+ * self . heads . entry ( head) . or_insert ( path_from_entry. into ( ) ) |= path_from_entry. into ( ) ;
303
264
}
304
265
305
266
fn merge ( & mut self , heads : & CycleHeads ) {
306
267
for ( & head, & path_from_entry) in heads. heads . iter ( ) {
307
268
self . insert ( head, path_from_entry) ;
308
- debug_assert ! ( matches!( self . heads[ & head] , AllPathsToHeadForcedAmbiguity :: Yes ) ) ;
309
269
}
310
270
}
311
271
312
- fn iter ( & self ) -> impl Iterator < Item = ( StackDepth , AllPathsToHeadForcedAmbiguity ) > + ' _ {
272
+ fn iter ( & self ) -> impl Iterator < Item = ( StackDepth , PathsToNested ) > + ' _ {
313
273
self . heads . iter ( ) . map ( |( k, v) | ( * k, * v) )
314
274
}
315
275
@@ -323,15 +283,7 @@ impl CycleHeads {
323
283
Ordering :: Equal => continue ,
324
284
Ordering :: Greater => unreachable ! ( ) ,
325
285
}
326
-
327
- let path_from_entry = match step_kind {
328
- PathKind :: ForcedAmbiguity => AllPathsToHeadForcedAmbiguity :: Yes ,
329
- PathKind :: Unknown | PathKind :: Inductive | PathKind :: Coinductive => {
330
- path_from_entry
331
- }
332
- } ;
333
-
334
- self . insert ( head, path_from_entry) ;
286
+ self . insert ( head, path_from_entry. extend_with ( step_kind) ) ;
335
287
}
336
288
}
337
289
@@ -348,7 +300,7 @@ bitflags::bitflags! {
348
300
/// Tracks how nested goals have been accessed. This is necessary to disable
349
301
/// global cache entries if computing them would otherwise result in a cycle or
350
302
/// access a provisional cache entry.
351
- #[ derive( Debug , Clone , Copy ) ]
303
+ #[ derive( Debug , Clone , Copy , PartialEq , Eq ) ]
352
304
pub struct PathsToNested : u8 {
353
305
/// The initial value when adding a goal to its own nested goals.
354
306
const EMPTY = 1 << 0 ;
@@ -417,6 +369,26 @@ impl PathsToNested {
417
369
418
370
self
419
371
}
372
+
373
+ #[ must_use]
374
+ fn extend_with_paths ( mut self , path : PathsToNested ) -> Self {
375
+ let mut new = PathsToNested :: empty ( ) ;
376
+ let ( PathKind :: Inductive
377
+ | PathKind :: Unknown
378
+ | PathKind :: Coinductive
379
+ | PathKind :: ForcedAmbiguity ) ;
380
+ for n in [
381
+ PathKind :: Inductive ,
382
+ PathKind :: Unknown ,
383
+ PathKind :: Coinductive ,
384
+ PathKind :: ForcedAmbiguity ,
385
+ ] {
386
+ if path. contains ( n. into ( ) ) {
387
+ new |= self . extend_with ( n) ;
388
+ }
389
+ }
390
+ new
391
+ }
420
392
}
421
393
422
394
/// The nested goals of each stack entry and the path from the
@@ -862,30 +834,30 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
862
834
path_from_head,
863
835
result,
864
836
} = entry;
865
- if heads. highest_cycle_head ( ) == head {
837
+ let ep = if heads. highest_cycle_head ( ) == head {
866
838
heads. remove_highest_cycle_head ( )
867
839
} else {
868
840
return true ;
869
- }
870
-
871
- // We only try to rebase if all paths from the cache entry
872
- // to its heads are coinductive. In this case these cycle
873
- // kinds won't change, no matter the goals between these
874
- // heads and the provisional cache entry.
875
- if heads. iter ( ) . any ( |( _, p) | matches ! ( p, AllPathsToHeadForcedAmbiguity :: No ) ) {
876
- return false ;
877
- }
841
+ } ;
878
842
879
- // The same for nested goals of the cycle head.
880
- if stack_entry. heads . iter ( ) . any ( |( _, p) | matches ! ( p, AllPathsToHeadForcedAmbiguity :: No ) )
881
- {
882
- return false ;
843
+ // We're rebasing an entry `e` over a head `p`. This head
844
+ // has a number of own heads `h` it depends on. We need to
845
+ // make sure that the cycle `hph` cannot change after rebasing.
846
+ //
847
+ // After rebasing the cycles `hph` will go through the path `heph` instead.
848
+ // We need to check hep = hp.
849
+ for ( h, ph) in stack_entry. heads . iter ( ) {
850
+ let hp = Self :: cycle_path_kind ( & stack, stack_entry. step_kind_from_parent , h) ;
851
+ let he = hp. extend ( * path_from_head) ;
852
+ let hep = ep. extend_with ( he) ;
853
+ if PathsToNested :: from ( hp) == hep {
854
+ let eph = ep. extend_with_paths ( ph) ;
855
+ heads. insert ( h, eph) ;
856
+ } else {
857
+ return false ;
858
+ }
883
859
}
884
860
885
- // Merge the cycle heads of the provisional cache entry and the
886
- // popped head. If the popped cycle head was a root, discard all
887
- // provisional cache entries which depend on it.
888
- heads. merge ( & stack_entry. heads ) ;
889
861
let Some ( head) = heads. opt_highest_cycle_head ( ) else {
890
862
return false ;
891
863
} ;
0 commit comments