Skip to content

Commit 540e0a6

Browse files
committed
gamer, fuzzing works
1 parent 9b9aed3 commit 540e0a6

File tree

1 file changed

+47
-75
lines changed
  • compiler/rustc_type_ir/src/search_graph

1 file changed

+47
-75
lines changed

compiler/rustc_type_ir/src/search_graph/mod.rs

Lines changed: 47 additions & 75 deletions
Original file line numberDiff line numberDiff line change
@@ -193,41 +193,6 @@ impl UsageKind {
193193
}
194194
}
195195

196-
/// For each goal we track whether the paths from this goal
197-
/// to its cycle heads are coinductive.
198-
///
199-
/// This is a necessary condition to rebase provisional cache
200-
/// entries.
201-
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
202-
pub enum AllPathsToHeadForcedAmbiguity {
203-
Yes,
204-
No,
205-
}
206-
impl From<PathKind> for AllPathsToHeadForcedAmbiguity {
207-
fn from(path: PathKind) -> AllPathsToHeadForcedAmbiguity {
208-
match path {
209-
PathKind::ForcedAmbiguity => AllPathsToHeadForcedAmbiguity::Yes,
210-
_ => AllPathsToHeadForcedAmbiguity::No,
211-
}
212-
}
213-
}
214-
impl AllPathsToHeadForcedAmbiguity {
215-
#[must_use]
216-
fn merge(self, other: impl Into<Self>) -> Self {
217-
match (self, other.into()) {
218-
(AllPathsToHeadForcedAmbiguity::Yes, AllPathsToHeadForcedAmbiguity::Yes) => {
219-
AllPathsToHeadForcedAmbiguity::Yes
220-
}
221-
(AllPathsToHeadForcedAmbiguity::No, _) | (_, AllPathsToHeadForcedAmbiguity::No) => {
222-
AllPathsToHeadForcedAmbiguity::No
223-
}
224-
}
225-
}
226-
fn and_merge(&mut self, other: impl Into<Self>) {
227-
*self = self.merge(other);
228-
}
229-
}
230-
231196
#[derive(Debug, Clone, Copy)]
232197
struct AvailableDepth(usize);
233198
impl AvailableDepth {
@@ -267,9 +232,9 @@ impl AvailableDepth {
267232
///
268233
/// We also track all paths from this goal to that head. This is necessary
269234
/// when rebasing provisional cache results.
270-
#[derive(Clone, Debug, PartialEq, Eq, Default)]
235+
#[derive(Clone, Debug, Default)]
271236
struct CycleHeads {
272-
heads: BTreeMap<StackDepth, AllPathsToHeadForcedAmbiguity>,
237+
heads: BTreeMap<StackDepth, PathsToNested>,
273238
}
274239

275240
impl CycleHeads {
@@ -289,27 +254,22 @@ impl CycleHeads {
289254
self.heads.first_key_value().map(|(k, _)| *k)
290255
}
291256

292-
fn remove_highest_cycle_head(&mut self) {
257+
fn remove_highest_cycle_head(&mut self) -> PathsToNested {
293258
let last = self.heads.pop_last();
294-
debug_assert_ne!(last, None);
259+
last.unwrap().1
295260
}
296261

297-
fn insert(
298-
&mut self,
299-
head: StackDepth,
300-
path_from_entry: impl Into<AllPathsToHeadForcedAmbiguity> + Copy,
301-
) {
302-
self.heads.entry(head).or_insert(path_from_entry.into()).and_merge(path_from_entry);
262+
fn insert(&mut self, head: StackDepth, path_from_entry: impl Into<PathsToNested> + Copy) {
263+
*self.heads.entry(head).or_insert(path_from_entry.into()) |= path_from_entry.into();
303264
}
304265

305266
fn merge(&mut self, heads: &CycleHeads) {
306267
for (&head, &path_from_entry) in heads.heads.iter() {
307268
self.insert(head, path_from_entry);
308-
debug_assert!(matches!(self.heads[&head], AllPathsToHeadForcedAmbiguity::Yes));
309269
}
310270
}
311271

312-
fn iter(&self) -> impl Iterator<Item = (StackDepth, AllPathsToHeadForcedAmbiguity)> + '_ {
272+
fn iter(&self) -> impl Iterator<Item = (StackDepth, PathsToNested)> + '_ {
313273
self.heads.iter().map(|(k, v)| (*k, *v))
314274
}
315275

@@ -323,15 +283,7 @@ impl CycleHeads {
323283
Ordering::Equal => continue,
324284
Ordering::Greater => unreachable!(),
325285
}
326-
327-
let path_from_entry = match step_kind {
328-
PathKind::ForcedAmbiguity => AllPathsToHeadForcedAmbiguity::Yes,
329-
PathKind::Unknown | PathKind::Inductive | PathKind::Coinductive => {
330-
path_from_entry
331-
}
332-
};
333-
334-
self.insert(head, path_from_entry);
286+
self.insert(head, path_from_entry.extend_with(step_kind));
335287
}
336288
}
337289

@@ -348,7 +300,7 @@ bitflags::bitflags! {
348300
/// Tracks how nested goals have been accessed. This is necessary to disable
349301
/// global cache entries if computing them would otherwise result in a cycle or
350302
/// access a provisional cache entry.
351-
#[derive(Debug, Clone, Copy)]
303+
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
352304
pub struct PathsToNested: u8 {
353305
/// The initial value when adding a goal to its own nested goals.
354306
const EMPTY = 1 << 0;
@@ -417,6 +369,26 @@ impl PathsToNested {
417369

418370
self
419371
}
372+
373+
#[must_use]
374+
fn extend_with_paths(mut self, path: PathsToNested) -> Self {
375+
let mut new = PathsToNested::empty();
376+
let (PathKind::Inductive
377+
| PathKind::Unknown
378+
| PathKind::Coinductive
379+
| PathKind::ForcedAmbiguity);
380+
for n in [
381+
PathKind::Inductive,
382+
PathKind::Unknown,
383+
PathKind::Coinductive,
384+
PathKind::ForcedAmbiguity,
385+
] {
386+
if path.contains(n.into()) {
387+
new |= self.extend_with(n);
388+
}
389+
}
390+
new
391+
}
420392
}
421393

422394
/// The nested goals of each stack entry and the path from the
@@ -862,30 +834,30 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
862834
path_from_head,
863835
result,
864836
} = entry;
865-
if heads.highest_cycle_head() == head {
837+
let ep = if heads.highest_cycle_head() == head {
866838
heads.remove_highest_cycle_head()
867839
} else {
868840
return true;
869-
}
870-
871-
// We only try to rebase if all paths from the cache entry
872-
// to its heads are coinductive. In this case these cycle
873-
// kinds won't change, no matter the goals between these
874-
// heads and the provisional cache entry.
875-
if heads.iter().any(|(_, p)| matches!(p, AllPathsToHeadForcedAmbiguity::No)) {
876-
return false;
877-
}
841+
};
878842

879-
// The same for nested goals of the cycle head.
880-
if stack_entry.heads.iter().any(|(_, p)| matches!(p, AllPathsToHeadForcedAmbiguity::No))
881-
{
882-
return false;
843+
// We're rebasing an entry `e` over a head `p`. This head
844+
// has a number of own heads `h` it depends on. We need to
845+
// make sure that the cycle `hph` cannot change after rebasing.
846+
//
847+
// After rebasing the cycles `hph` will go through the path `heph` instead.
848+
// We need to check hep = hp.
849+
for (h, ph) in stack_entry.heads.iter() {
850+
let hp = Self::cycle_path_kind(&stack, stack_entry.step_kind_from_parent, h);
851+
let he = hp.extend(*path_from_head);
852+
let hep = ep.extend_with(he);
853+
if PathsToNested::from(hp) == hep {
854+
let eph = ep.extend_with_paths(ph);
855+
heads.insert(h, eph);
856+
} else {
857+
return false;
858+
}
883859
}
884860

885-
// Merge the cycle heads of the provisional cache entry and the
886-
// popped head. If the popped cycle head was a root, discard all
887-
// provisional cache entries which depend on it.
888-
heads.merge(&stack_entry.heads);
889861
let Some(head) = heads.opt_highest_cycle_head() else {
890862
return false;
891863
};

0 commit comments

Comments
 (0)