Skip to content

Commit 9c5a110

Browse files
committed
significantly improve provisional cache rebasing
1 parent df32e15 commit 9c5a110

File tree

3 files changed

+86
-85
lines changed

3 files changed

+86
-85
lines changed

compiler/rustc_next_trait_solver/src/solve/search_graph.rs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,9 @@ where
4848
) -> QueryResult<I> {
4949
match kind {
5050
PathKind::Coinductive => response_no_constraints(cx, input, Certainty::Yes),
51-
PathKind::Unknown => response_no_constraints(cx, input, Certainty::overflow(false)),
51+
PathKind::Unknown | PathKind::ForcedAmbiguity => {
52+
response_no_constraints(cx, input, Certainty::overflow(false))
53+
}
5254
// Even though we know these cycles to be unproductive, we still return
5355
// overflow during coherence. This is both as we are not 100% confident in
5456
// the implementation yet and any incorrect errors would be unsound there.

compiler/rustc_type_ir/src/search_graph/mod.rs

Lines changed: 79 additions & 83 deletions
Original file line numberDiff line numberDiff line change
@@ -21,10 +21,9 @@ use std::marker::PhantomData;
2121
use derive_where::derive_where;
2222
#[cfg(feature = "nightly")]
2323
use rustc_macros::{Decodable_NoContext, Encodable_NoContext, HashStable_NoContext};
24+
use rustc_type_ir::data_structures::HashMap;
2425
use tracing::{debug, instrument};
2526

26-
use crate::data_structures::HashMap;
27-
2827
mod stack;
2928
use stack::{Stack, StackDepth, StackEntry};
3029
mod global_cache;
@@ -137,6 +136,12 @@ pub enum PathKind {
137136
Unknown,
138137
/// A path with at least one coinductive step. Such cycles hold.
139138
Coinductive,
139+
/// A path which is treated as ambiguous. Once a path has this path kind
140+
/// any other segment does not change its kind.
141+
///
142+
/// This is currently only used when fuzzing to support negative reasoning.
143+
/// For more details, see #143054.
144+
ForcedAmbiguity,
140145
}
141146

142147
impl PathKind {
@@ -149,6 +154,9 @@ impl PathKind {
149154
/// to `max(self, rest)`.
150155
fn extend(self, rest: PathKind) -> PathKind {
151156
match (self, rest) {
157+
(PathKind::ForcedAmbiguity, _) | (_, PathKind::ForcedAmbiguity) => {
158+
PathKind::ForcedAmbiguity
159+
}
152160
(PathKind::Coinductive, _) | (_, PathKind::Coinductive) => PathKind::Coinductive,
153161
(PathKind::Unknown, _) | (_, PathKind::Unknown) => PathKind::Unknown,
154162
(PathKind::Inductive, PathKind::Inductive) => PathKind::Inductive,
@@ -187,41 +195,6 @@ impl UsageKind {
187195
}
188196
}
189197

190-
/// For each goal we track whether the paths from this goal
191-
/// to its cycle heads are coinductive.
192-
///
193-
/// This is a necessary condition to rebase provisional cache
194-
/// entries.
195-
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
196-
pub enum AllPathsToHeadCoinductive {
197-
Yes,
198-
No,
199-
}
200-
impl From<PathKind> for AllPathsToHeadCoinductive {
201-
fn from(path: PathKind) -> AllPathsToHeadCoinductive {
202-
match path {
203-
PathKind::Coinductive => AllPathsToHeadCoinductive::Yes,
204-
_ => AllPathsToHeadCoinductive::No,
205-
}
206-
}
207-
}
208-
impl AllPathsToHeadCoinductive {
209-
#[must_use]
210-
fn merge(self, other: impl Into<Self>) -> Self {
211-
match (self, other.into()) {
212-
(AllPathsToHeadCoinductive::Yes, AllPathsToHeadCoinductive::Yes) => {
213-
AllPathsToHeadCoinductive::Yes
214-
}
215-
(AllPathsToHeadCoinductive::No, _) | (_, AllPathsToHeadCoinductive::No) => {
216-
AllPathsToHeadCoinductive::No
217-
}
218-
}
219-
}
220-
fn and_merge(&mut self, other: impl Into<Self>) {
221-
*self = self.merge(other);
222-
}
223-
}
224-
225198
#[derive(Debug, Clone, Copy)]
226199
struct AvailableDepth(usize);
227200
impl AvailableDepth {
@@ -261,9 +234,9 @@ impl AvailableDepth {
261234
///
262235
/// We also track all paths from this goal to that head. This is necessary
263236
/// when rebasing provisional cache results.
264-
#[derive(Clone, Debug, PartialEq, Eq, Default)]
237+
#[derive(Clone, Debug, Default)]
265238
struct CycleHeads {
266-
heads: BTreeMap<StackDepth, AllPathsToHeadCoinductive>,
239+
heads: BTreeMap<StackDepth, PathsToNested>,
267240
}
268241

269242
impl CycleHeads {
@@ -283,27 +256,16 @@ impl CycleHeads {
283256
self.heads.first_key_value().map(|(k, _)| *k)
284257
}
285258

286-
fn remove_highest_cycle_head(&mut self) {
259+
fn remove_highest_cycle_head(&mut self) -> PathsToNested {
287260
let last = self.heads.pop_last();
288-
debug_assert_ne!(last, None);
289-
}
290-
291-
fn insert(
292-
&mut self,
293-
head: StackDepth,
294-
path_from_entry: impl Into<AllPathsToHeadCoinductive> + Copy,
295-
) {
296-
self.heads.entry(head).or_insert(path_from_entry.into()).and_merge(path_from_entry);
261+
last.unwrap().1
297262
}
298263

299-
fn merge(&mut self, heads: &CycleHeads) {
300-
for (&head, &path_from_entry) in heads.heads.iter() {
301-
self.insert(head, path_from_entry);
302-
debug_assert!(matches!(self.heads[&head], AllPathsToHeadCoinductive::Yes));
303-
}
264+
fn insert(&mut self, head: StackDepth, path_from_entry: impl Into<PathsToNested> + Copy) {
265+
*self.heads.entry(head).or_insert(path_from_entry.into()) |= path_from_entry.into();
304266
}
305267

306-
fn iter(&self) -> impl Iterator<Item = (StackDepth, AllPathsToHeadCoinductive)> + '_ {
268+
fn iter(&self) -> impl Iterator<Item = (StackDepth, PathsToNested)> + '_ {
307269
self.heads.iter().map(|(k, v)| (*k, *v))
308270
}
309271

@@ -317,13 +279,7 @@ impl CycleHeads {
317279
Ordering::Equal => continue,
318280
Ordering::Greater => unreachable!(),
319281
}
320-
321-
let path_from_entry = match step_kind {
322-
PathKind::Coinductive => AllPathsToHeadCoinductive::Yes,
323-
PathKind::Unknown | PathKind::Inductive => path_from_entry,
324-
};
325-
326-
self.insert(head, path_from_entry);
282+
self.insert(head, path_from_entry.extend_with(step_kind));
327283
}
328284
}
329285
}
@@ -332,13 +288,14 @@ bitflags::bitflags! {
332288
/// Tracks how nested goals have been accessed. This is necessary to disable
333289
/// global cache entries if computing them would otherwise result in a cycle or
334290
/// access a provisional cache entry.
335-
#[derive(Debug, Clone, Copy)]
291+
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
336292
pub struct PathsToNested: u8 {
337293
/// The initial value when adding a goal to its own nested goals.
338294
const EMPTY = 1 << 0;
339295
const INDUCTIVE = 1 << 1;
340296
const UNKNOWN = 1 << 2;
341297
const COINDUCTIVE = 1 << 3;
298+
const FORCED_AMBIGUITY = 1 << 4;
342299
}
343300
}
344301
impl From<PathKind> for PathsToNested {
@@ -347,6 +304,7 @@ impl From<PathKind> for PathsToNested {
347304
PathKind::Inductive => PathsToNested::INDUCTIVE,
348305
PathKind::Unknown => PathsToNested::UNKNOWN,
349306
PathKind::Coinductive => PathsToNested::COINDUCTIVE,
307+
PathKind::ForcedAmbiguity => PathsToNested::FORCED_AMBIGUITY,
350308
}
351309
}
352310
}
@@ -379,10 +337,46 @@ impl PathsToNested {
379337
self.insert(PathsToNested::COINDUCTIVE);
380338
}
381339
}
340+
PathKind::ForcedAmbiguity => {
341+
if self.intersects(
342+
PathsToNested::EMPTY
343+
| PathsToNested::INDUCTIVE
344+
| PathsToNested::UNKNOWN
345+
| PathsToNested::COINDUCTIVE,
346+
) {
347+
self.remove(
348+
PathsToNested::EMPTY
349+
| PathsToNested::INDUCTIVE
350+
| PathsToNested::UNKNOWN
351+
| PathsToNested::COINDUCTIVE,
352+
);
353+
self.insert(PathsToNested::FORCED_AMBIGUITY);
354+
}
355+
}
382356
}
383357

384358
self
385359
}
360+
361+
#[must_use]
362+
fn extend_with_paths(self, path: PathsToNested) -> Self {
363+
let mut new = PathsToNested::empty();
364+
let (PathKind::Inductive
365+
| PathKind::Unknown
366+
| PathKind::Coinductive
367+
| PathKind::ForcedAmbiguity);
368+
for n in [
369+
PathKind::Inductive,
370+
PathKind::Unknown,
371+
PathKind::Coinductive,
372+
PathKind::ForcedAmbiguity,
373+
] {
374+
if path.contains(n.into()) {
375+
new |= self.extend_with(n);
376+
}
377+
}
378+
new
379+
}
386380
}
387381

388382
/// The nested goals of each stack entry and the path from the
@@ -693,7 +687,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
693687
if let Some((_scope, expected)) = validate_cache {
694688
// Do not try to move a goal into the cache again if we're testing
695689
// the global cache.
696-
assert_eq!(evaluation_result.result, expected, "input={input:?}");
690+
assert_eq!(expected, evaluation_result.result, "input={input:?}");
697691
} else if D::inspect_is_noop(inspect) {
698692
self.insert_global_cache(cx, input, evaluation_result, dep_node)
699693
}
@@ -782,7 +776,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
782776
stack_entry: &StackEntry<X>,
783777
mut mutate_result: impl FnMut(X::Input, X::Result) -> X::Result,
784778
) {
785-
let head = self.stack.next_index();
779+
let popped_head = self.stack.next_index();
786780
#[allow(rustc::potential_query_instability)]
787781
self.provisional_cache.retain(|&input, entries| {
788782
entries.retain_mut(|entry| {
@@ -792,30 +786,32 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
792786
path_from_head,
793787
result,
794788
} = entry;
795-
if heads.highest_cycle_head() == head {
789+
let ep = if heads.highest_cycle_head() == popped_head {
796790
heads.remove_highest_cycle_head()
797791
} else {
798792
return true;
799-
}
800-
801-
// We only try to rebase if all paths from the cache entry
802-
// to its heads are coinductive. In this case these cycle
803-
// kinds won't change, no matter the goals between these
804-
// heads and the provisional cache entry.
805-
if heads.iter().any(|(_, p)| matches!(p, AllPathsToHeadCoinductive::No)) {
806-
return false;
807-
}
793+
};
808794

809-
// The same for nested goals of the cycle head.
810-
if stack_entry.heads.iter().any(|(_, p)| matches!(p, AllPathsToHeadCoinductive::No))
811-
{
812-
return false;
795+
// We're rebasing an entry `e` over a head `p`. This head
796+
// has a number of own heads `h` it depends on. We need to
797+
// make sure that the cycle `hph` cannot change after rebasing.
798+
//
799+
// After rebasing the cycles `hph` will go through the path `heph` instead.
800+
for (h, ph) in stack_entry.heads.iter() {
801+
let hp =
802+
Self::cycle_path_kind(&self.stack, stack_entry.step_kind_from_parent, h);
803+
let hph = ph.extend_with(hp);
804+
let he = hp.extend(*path_from_head);
805+
let hep = ep.extend_with(he);
806+
let heph = hep.extend_with_paths(ph);
807+
if hph == heph {
808+
let eph = ep.extend_with_paths(ph);
809+
heads.insert(h, eph);
810+
} else {
811+
return false;
812+
}
813813
}
814814

815-
// Merge the cycle heads of the provisional cache entry and the
816-
// popped head. If the popped cycle head was a root, discard all
817-
// provisional cache entries which depend on it.
818-
heads.merge(&stack_entry.heads);
819815
let Some(head) = heads.opt_highest_cycle_head() else {
820816
return false;
821817
};

compiler/rustc_type_ir/src/search_graph/stack.rs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ use std::ops::{Index, IndexMut};
33
use derive_where::derive_where;
44
use rustc_index::IndexVec;
55

6-
use super::{AvailableDepth, Cx, CycleHeads, NestedGoals, PathKind, UsageKind};
6+
use crate::search_graph::{AvailableDepth, Cx, CycleHeads, NestedGoals, PathKind, UsageKind};
77

88
rustc_index::newtype_index! {
99
#[orderable]
@@ -79,6 +79,9 @@ impl<X: Cx> Stack<X> {
7979
}
8080

8181
pub(super) fn push(&mut self, entry: StackEntry<X>) -> StackDepth {
82+
if cfg!(debug_assertions) && self.entries.iter().any(|e| e.input == entry.input) {
83+
panic!("pushing duplicate entry on stack: {entry:?} {:?}", self.entries);
84+
}
8285
self.entries.push(entry)
8386
}
8487

0 commit comments

Comments
 (0)