1
Fork 0

implement a performant and fuzzed solver cache

This commit is contained in:
lcnr 2024-07-24 17:07:22 +02:00
parent f860873983
commit 0aa17a4c4d
4 changed files with 740 additions and 358 deletions

View file

@ -2,12 +2,12 @@ use std::convert::Infallible;
use std::marker::PhantomData; use std::marker::PhantomData;
use rustc_type_ir::inherent::*; use rustc_type_ir::inherent::*;
use rustc_type_ir::search_graph::{self, CycleKind, UsageKind}; use rustc_type_ir::search_graph::{self, PathKind};
use rustc_type_ir::solve::{CanonicalInput, Certainty, QueryResult}; use rustc_type_ir::solve::{CanonicalInput, Certainty, QueryResult};
use rustc_type_ir::Interner; use rustc_type_ir::Interner;
use super::inspect::ProofTreeBuilder; use super::inspect::ProofTreeBuilder;
use super::FIXPOINT_STEP_LIMIT; use super::{has_no_inference_or_external_constraints, FIXPOINT_STEP_LIMIT};
use crate::delegate::SolverDelegate; use crate::delegate::SolverDelegate;
/// This type is never constructed. We only use it to implement `search_graph::Delegate` /// This type is never constructed. We only use it to implement `search_graph::Delegate`
@ -23,10 +23,11 @@ where
{ {
type Cx = D::Interner; type Cx = D::Interner;
const ENABLE_PROVISIONAL_CACHE: bool = true;
type ValidationScope = Infallible; type ValidationScope = Infallible;
fn enter_validation_scope( fn enter_validation_scope(
_cx: Self::Cx, _cx: Self::Cx,
_input: <Self::Cx as search_graph::Cx>::Input, _input: CanonicalInput<I>,
) -> Option<Self::ValidationScope> { ) -> Option<Self::ValidationScope> {
None None
} }
@ -38,39 +39,32 @@ where
inspect.is_noop() inspect.is_noop()
} }
const DIVIDE_AVAILABLE_DEPTH_ON_OVERFLOW: usize = 4;
fn recursion_limit(cx: I) -> usize { fn recursion_limit(cx: I) -> usize {
cx.recursion_limit() cx.recursion_limit()
} }
fn initial_provisional_result( fn initial_provisional_result(
cx: I, cx: I,
kind: CycleKind, kind: PathKind,
input: CanonicalInput<I>, input: CanonicalInput<I>,
) -> QueryResult<I> { ) -> QueryResult<I> {
match kind { match kind {
CycleKind::Coinductive => response_no_constraints(cx, input, Certainty::Yes), PathKind::Coinductive => response_no_constraints(cx, input, Certainty::Yes),
CycleKind::Inductive => response_no_constraints(cx, input, Certainty::overflow(false)), PathKind::Inductive => response_no_constraints(cx, input, Certainty::overflow(false)),
} }
} }
fn reached_fixpoint( fn is_initial_provisional_result(
cx: I, cx: Self::Cx,
kind: UsageKind, kind: PathKind,
input: CanonicalInput<I>, input: CanonicalInput<I>,
provisional_result: Option<QueryResult<I>>,
result: QueryResult<I>, result: QueryResult<I>,
) -> bool { ) -> bool {
if let Some(r) = provisional_result { match kind {
r == result PathKind::Coinductive => response_no_constraints(cx, input, Certainty::Yes) == result,
} else { PathKind::Inductive => {
match kind { response_no_constraints(cx, input, Certainty::overflow(false)) == result
UsageKind::Single(CycleKind::Coinductive) => {
response_no_constraints(cx, input, Certainty::Yes) == result
}
UsageKind::Single(CycleKind::Inductive) => {
response_no_constraints(cx, input, Certainty::overflow(false)) == result
}
UsageKind::Mixed => false,
} }
} }
} }
@ -88,6 +82,22 @@ where
response_no_constraints(cx, input, Certainty::overflow(false)) response_no_constraints(cx, input, Certainty::overflow(false))
} }
fn is_ambiguous_result(result: QueryResult<I>) -> bool {
result.is_ok_and(|response| {
has_no_inference_or_external_constraints(response)
&& matches!(response.value.certainty, Certainty::Maybe(_))
})
}
fn propagate_ambiguity(
cx: I,
for_input: CanonicalInput<I>,
from_result: QueryResult<I>,
) -> QueryResult<I> {
let certainty = from_result.unwrap().value.certainty;
response_no_constraints(cx, for_input, certainty)
}
fn step_is_coinductive(cx: I, input: CanonicalInput<I>) -> bool { fn step_is_coinductive(cx: I, input: CanonicalInput<I>) -> bool {
input.value.goal.predicate.is_coinductive(cx) input.value.goal.predicate.is_coinductive(cx)
} }

View file

@ -1,12 +1,17 @@
use derive_where::derive_where; use derive_where::derive_where;
use rustc_index::IndexVec;
use super::{AvailableDepth, Cx, StackDepth, StackEntry}; use super::{AvailableDepth, Cx, NestedGoals};
use crate::data_structures::{HashMap, HashSet}; use crate::data_structures::HashMap;
struct Success<X: Cx> { struct Success<X: Cx> {
result: X::Tracked<X::Result>,
additional_depth: usize, additional_depth: usize,
nested_goals: NestedGoals<X>,
result: X::Tracked<X::Result>,
}
struct WithOverflow<X: Cx> {
nested_goals: NestedGoals<X>,
result: X::Tracked<X::Result>,
} }
/// The cache entry for a given input. /// The cache entry for a given input.
@ -17,12 +22,7 @@ struct Success<X: Cx> {
#[derive_where(Default; X: Cx)] #[derive_where(Default; X: Cx)]
struct CacheEntry<X: Cx> { struct CacheEntry<X: Cx> {
success: Option<Success<X>>, success: Option<Success<X>>,
/// We have to be careful when caching roots of cycles. with_overflow: HashMap<usize, WithOverflow<X>>,
///
/// See the doc comment of `StackEntry::cycle_participants` for more
/// details.
nested_goals: HashSet<X::Input>,
with_overflow: HashMap<usize, X::Tracked<X::Result>>,
} }
#[derive_where(Debug; X: Cx)] #[derive_where(Debug; X: Cx)]
@ -30,10 +30,7 @@ pub(super) struct CacheData<'a, X: Cx> {
pub(super) result: X::Result, pub(super) result: X::Result,
pub(super) additional_depth: usize, pub(super) additional_depth: usize,
pub(super) encountered_overflow: bool, pub(super) encountered_overflow: bool,
// FIXME: This is currently unused, but impacts the design pub(super) nested_goals: &'a NestedGoals<X>,
// by requiring a closure for `Cx::with_global_cache`.
#[allow(dead_code)]
pub(super) nested_goals: &'a HashSet<X::Input>,
} }
#[derive_where(Default; X: Cx)] #[derive_where(Default; X: Cx)]
pub struct GlobalCache<X: Cx> { pub struct GlobalCache<X: Cx> {
@ -52,15 +49,17 @@ impl<X: Cx> GlobalCache<X> {
additional_depth: usize, additional_depth: usize,
encountered_overflow: bool, encountered_overflow: bool,
nested_goals: &HashSet<X::Input>, nested_goals: NestedGoals<X>,
) { ) {
let result = cx.mk_tracked(result, dep_node); let result = cx.mk_tracked(result, dep_node);
let entry = self.map.entry(input).or_default(); let entry = self.map.entry(input).or_default();
entry.nested_goals.extend(nested_goals);
if encountered_overflow { if encountered_overflow {
entry.with_overflow.insert(additional_depth, result); let with_overflow = WithOverflow { nested_goals, result };
let prev = entry.with_overflow.insert(additional_depth, with_overflow);
assert!(prev.is_none());
} else { } else {
entry.success = Some(Success { result, additional_depth }); let prev = entry.success.replace(Success { additional_depth, nested_goals, result });
assert!(prev.is_none());
} }
} }
@ -72,30 +71,37 @@ impl<X: Cx> GlobalCache<X> {
&'a self, &'a self,
cx: X, cx: X,
input: X::Input, input: X::Input,
stack: &IndexVec<StackDepth, StackEntry<X>>,
available_depth: AvailableDepth, available_depth: AvailableDepth,
mut candidate_is_applicable: impl FnMut(&NestedGoals<X>) -> bool,
) -> Option<CacheData<'a, X>> { ) -> Option<CacheData<'a, X>> {
let entry = self.map.get(&input)?; let entry = self.map.get(&input)?;
if stack.iter().any(|e| entry.nested_goals.contains(&e.input)) { if let Some(Success { additional_depth, ref nested_goals, ref result }) = entry.success {
return None; if available_depth.cache_entry_is_applicable(additional_depth)
} && candidate_is_applicable(nested_goals)
{
if let Some(ref success) = entry.success {
if available_depth.cache_entry_is_applicable(success.additional_depth) {
return Some(CacheData { return Some(CacheData {
result: cx.get_tracked(&success.result), result: cx.get_tracked(&result),
additional_depth: success.additional_depth, additional_depth,
encountered_overflow: false, encountered_overflow: false,
nested_goals: &entry.nested_goals, nested_goals,
}); });
} }
} }
entry.with_overflow.get(&available_depth.0).map(|e| CacheData { let additional_depth = available_depth.0;
result: cx.get_tracked(e), if let Some(WithOverflow { nested_goals, result }) =
additional_depth: available_depth.0, entry.with_overflow.get(&additional_depth)
encountered_overflow: true, {
nested_goals: &entry.nested_goals, if candidate_is_applicable(nested_goals) {
}) return Some(CacheData {
result: cx.get_tracked(result),
additional_depth,
encountered_overflow: true,
nested_goals,
});
}
}
None
} }
} }

File diff suppressed because it is too large Load diff

View file

@ -1,63 +0,0 @@
use super::*;
impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
#[allow(rustc::potential_query_instability)]
pub(super) fn check_invariants(&self) {
if !cfg!(debug_assertions) {
return;
}
let SearchGraph { mode: _, stack, provisional_cache, _marker } = self;
if stack.is_empty() {
assert!(provisional_cache.is_empty());
}
for (depth, entry) in stack.iter_enumerated() {
let StackEntry {
input,
available_depth: _,
reached_depth: _,
non_root_cycle_participant,
encountered_overflow: _,
has_been_used,
ref nested_goals,
provisional_result,
} = *entry;
if let Some(head) = non_root_cycle_participant {
assert!(head < depth);
assert!(nested_goals.is_empty());
assert_ne!(stack[head].has_been_used, None);
let mut current_root = head;
while let Some(parent) = stack[current_root].non_root_cycle_participant {
current_root = parent;
}
assert!(stack[current_root].nested_goals.contains(&input));
}
if !nested_goals.is_empty() {
assert!(provisional_result.is_some() || has_been_used.is_some());
for entry in stack.iter().take(depth.as_usize()) {
assert_eq!(nested_goals.get(&entry.input), None);
}
}
}
for (&_input, entry) in &self.provisional_cache {
let ProvisionalCacheEntry { with_coinductive_stack, with_inductive_stack } = entry;
assert!(with_coinductive_stack.is_some() || with_inductive_stack.is_some());
let check_detached = |detached_entry: &DetachedEntry<X>| {
let DetachedEntry { head, result: _ } = *detached_entry;
assert_ne!(stack[head].has_been_used, None);
};
if let Some(with_coinductive_stack) = with_coinductive_stack {
check_detached(with_coinductive_stack);
}
if let Some(with_inductive_stack) = with_inductive_stack {
check_detached(with_inductive_stack);
}
}
}
}