Auto merge of #138414 - matthiaskrgr:rollup-9ablqdb, r=matthiaskrgr
Rollup of 7 pull requests Successful merges: - #137314 (change definitely unproductive cycles to error) - #137701 (Convert `ShardedHashMap` to use `hashbrown::HashTable`) - #138269 (uefi: fs: Implement FileType, FilePermissions and FileAttr) - #138331 (Use `RUSTC_LINT_FLAGS` more) - #138345 (Some autodiff cleanups) - #138387 (intrinsics: remove unnecessary leading underscore from argument names) - #138390 (fix incorrect tracing log) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
249cb84316
138 changed files with 837 additions and 816 deletions
|
@ -5,7 +5,6 @@
|
|||
#![cfg_attr(feature = "nightly", feature(rustc_attrs))]
|
||||
#![cfg_attr(feature = "nightly", feature(rustdoc_internals))]
|
||||
#![cfg_attr(feature = "nightly", feature(step_trait))]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
/*! ABI handling for rustc
|
||||
|
|
|
@ -23,7 +23,6 @@
|
|||
#![feature(maybe_uninit_slice)]
|
||||
#![feature(rustc_attrs)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use std::alloc::Layout;
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
#![feature(never_type)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![feature(stmt_expr_attributes)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
pub mod util {
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
#![cfg_attr(feature = "nightly", allow(internal_features))]
|
||||
#![cfg_attr(feature = "nightly", feature(never_type))]
|
||||
#![cfg_attr(feature = "nightly", feature(rustc_attrs))]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
#[cfg(feature = "nightly")]
|
||||
|
|
|
@ -40,7 +40,6 @@
|
|||
#![feature(if_let_guard)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use std::sync::Arc;
|
||||
|
|
|
@ -10,7 +10,6 @@
|
|||
#![feature(iter_is_partitioned)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
pub mod ast_validation;
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
#![doc(rust_logo)]
|
||||
#![feature(box_patterns)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
mod helpers;
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
#![doc(rust_logo)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
mod attributes;
|
||||
|
|
|
@ -81,7 +81,6 @@
|
|||
#![doc(rust_logo)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
#[macro_use]
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
// tidy-alphabetical-start
|
||||
#![allow(elided_lifetimes_in_paths)]
|
||||
#![allow(internal_features)]
|
||||
#![allow(unreachable_pub)] // because this crate is mostly generated code
|
||||
#![doc(rust_logo)]
|
||||
#![feature(rustdoc_internals)]
|
||||
// #![warn(unreachable_pub)] // don't use because this crate is mostly generated code
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
#![feature(rustdoc_internals)]
|
||||
#![feature(stmt_expr_attributes)]
|
||||
#![feature(try_blocks)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
#![feature(rustdoc_internals)]
|
||||
#![feature(string_from_utf8_lossy_owned)]
|
||||
#![feature(try_blocks)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
extern crate proc_macro;
|
||||
|
|
|
@ -624,25 +624,25 @@ pub mod intrinsics {
|
|||
#[rustc_intrinsic]
|
||||
pub fn size_of<T>() -> usize;
|
||||
#[rustc_intrinsic]
|
||||
pub unsafe fn size_of_val<T: ?::Sized>(_val: *const T) -> usize;
|
||||
pub unsafe fn size_of_val<T: ?::Sized>(val: *const T) -> usize;
|
||||
#[rustc_intrinsic]
|
||||
pub fn min_align_of<T>() -> usize;
|
||||
#[rustc_intrinsic]
|
||||
pub unsafe fn min_align_of_val<T: ?::Sized>(_val: *const T) -> usize;
|
||||
pub unsafe fn min_align_of_val<T: ?::Sized>(val: *const T) -> usize;
|
||||
#[rustc_intrinsic]
|
||||
pub unsafe fn copy<T>(_src: *const T, _dst: *mut T, _count: usize);
|
||||
pub unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize);
|
||||
#[rustc_intrinsic]
|
||||
pub unsafe fn transmute<T, U>(_e: T) -> U;
|
||||
pub unsafe fn transmute<T, U>(e: T) -> U;
|
||||
#[rustc_intrinsic]
|
||||
pub unsafe fn ctlz_nonzero<T>(_x: T) -> u32;
|
||||
pub unsafe fn ctlz_nonzero<T>(x: T) -> u32;
|
||||
#[rustc_intrinsic]
|
||||
pub fn needs_drop<T: ?::Sized>() -> bool;
|
||||
#[rustc_intrinsic]
|
||||
pub fn bitreverse<T>(_x: T) -> T;
|
||||
pub fn bitreverse<T>(x: T) -> T;
|
||||
#[rustc_intrinsic]
|
||||
pub fn bswap<T>(_x: T) -> T;
|
||||
pub fn bswap<T>(x: T) -> T;
|
||||
#[rustc_intrinsic]
|
||||
pub unsafe fn write_bytes<T>(_dst: *mut T, _val: u8, _count: usize);
|
||||
pub unsafe fn write_bytes<T>(dst: *mut T, val: u8, count: usize);
|
||||
#[rustc_intrinsic]
|
||||
pub unsafe fn unreachable() -> !;
|
||||
}
|
||||
|
|
|
@ -595,25 +595,25 @@ pub mod intrinsics {
|
|||
#[rustc_intrinsic]
|
||||
pub fn size_of<T>() -> usize;
|
||||
#[rustc_intrinsic]
|
||||
pub unsafe fn size_of_val<T: ?::Sized>(_val: *const T) -> usize;
|
||||
pub unsafe fn size_of_val<T: ?::Sized>(val: *const T) -> usize;
|
||||
#[rustc_intrinsic]
|
||||
pub fn min_align_of<T>() -> usize;
|
||||
#[rustc_intrinsic]
|
||||
pub unsafe fn min_align_of_val<T: ?::Sized>(_val: *const T) -> usize;
|
||||
pub unsafe fn min_align_of_val<T: ?::Sized>(val: *const T) -> usize;
|
||||
#[rustc_intrinsic]
|
||||
pub unsafe fn copy<T>(_src: *const T, _dst: *mut T, _count: usize);
|
||||
pub unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize);
|
||||
#[rustc_intrinsic]
|
||||
pub unsafe fn transmute<T, U>(_e: T) -> U;
|
||||
pub unsafe fn transmute<T, U>(e: T) -> U;
|
||||
#[rustc_intrinsic]
|
||||
pub unsafe fn ctlz_nonzero<T>(_x: T) -> u32;
|
||||
pub unsafe fn ctlz_nonzero<T>(x: T) -> u32;
|
||||
#[rustc_intrinsic]
|
||||
pub fn needs_drop<T: ?::Sized>() -> bool;
|
||||
#[rustc_intrinsic]
|
||||
pub fn bitreverse<T>(_x: T) -> T;
|
||||
pub fn bitreverse<T>(x: T) -> T;
|
||||
#[rustc_intrinsic]
|
||||
pub fn bswap<T>(_x: T) -> T;
|
||||
pub fn bswap<T>(x: T) -> T;
|
||||
#[rustc_intrinsic]
|
||||
pub unsafe fn write_bytes<T>(_dst: *mut T, _val: u8, _count: usize);
|
||||
pub unsafe fn write_bytes<T>(dst: *mut T, val: u8, count: usize);
|
||||
#[rustc_intrinsic]
|
||||
pub unsafe fn unreachable() -> !;
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
#![feature(rustdoc_internals)]
|
||||
#![feature(slice_as_array)]
|
||||
#![feature(try_blocks)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use std::any::Any;
|
||||
|
|
|
@ -15,7 +15,6 @@
|
|||
#![feature(rustdoc_internals)]
|
||||
#![feature(trait_alias)]
|
||||
#![feature(try_blocks)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
//! This crate contains codegen code that is used by all codegen backends (LLVM and others).
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
#![feature(unqualified_local_imports)]
|
||||
#![feature(yeet_expr)]
|
||||
#![warn(unqualified_local_imports)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
pub mod check_consts;
|
||||
|
|
|
@ -29,6 +29,11 @@ thin-vec = "0.2.12"
|
|||
tracing = "0.1"
|
||||
# tidy-alphabetical-end
|
||||
|
||||
[dependencies.hashbrown]
|
||||
version = "0.15.2"
|
||||
default-features = false
|
||||
features = ["nightly"] # for may_dangle
|
||||
|
||||
[dependencies.parking_lot]
|
||||
version = "0.12"
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::cmp::max;
|
|||
use super::*;
|
||||
use crate::fx::FxHashMap;
|
||||
|
||||
pub struct TestGraph {
|
||||
pub(super) struct TestGraph {
|
||||
num_nodes: usize,
|
||||
start_node: usize,
|
||||
successors: FxHashMap<usize, Vec<usize>>,
|
||||
|
@ -11,7 +11,7 @@ pub struct TestGraph {
|
|||
}
|
||||
|
||||
impl TestGraph {
|
||||
pub fn new(start_node: usize, edges: &[(usize, usize)]) -> Self {
|
||||
pub(super) fn new(start_node: usize, edges: &[(usize, usize)]) -> Self {
|
||||
let mut graph = TestGraph {
|
||||
num_nodes: start_node + 1,
|
||||
start_node,
|
||||
|
|
|
@ -24,7 +24,6 @@
|
|||
#![feature(dropck_eyepatch)]
|
||||
#![feature(extend_one)]
|
||||
#![feature(file_buffered)]
|
||||
#![feature(hash_raw_entry)]
|
||||
#![feature(macro_metavar_expr)]
|
||||
#![feature(map_try_insert)]
|
||||
#![feature(min_specialization)]
|
||||
|
|
|
@ -76,6 +76,7 @@ impl_dyn_send!(
|
|||
[crate::sync::RwLock<T> where T: DynSend]
|
||||
[crate::tagged_ptr::TaggedRef<'a, P, T> where 'a, P: Sync, T: Send + crate::tagged_ptr::Tag]
|
||||
[rustc_arena::TypedArena<T> where T: DynSend]
|
||||
[hashbrown::HashTable<T> where T: DynSend]
|
||||
[indexmap::IndexSet<V, S> where V: DynSend, S: DynSend]
|
||||
[indexmap::IndexMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend]
|
||||
[thin_vec::ThinVec<T> where T: DynSend]
|
||||
|
@ -153,6 +154,7 @@ impl_dyn_sync!(
|
|||
[crate::tagged_ptr::TaggedRef<'a, P, T> where 'a, P: Sync, T: Sync + crate::tagged_ptr::Tag]
|
||||
[parking_lot::lock_api::Mutex<R, T> where R: DynSync, T: ?Sized + DynSend]
|
||||
[parking_lot::lock_api::RwLock<R, T> where R: DynSync, T: ?Sized + DynSend + DynSync]
|
||||
[hashbrown::HashTable<T> where T: DynSync]
|
||||
[indexmap::IndexSet<V, S> where V: DynSync, S: DynSync]
|
||||
[indexmap::IndexMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync]
|
||||
[smallvec::SmallVec<A> where A: smallvec::Array + DynSync]
|
||||
|
|
|
@ -313,7 +313,7 @@ pub struct Error<O, E> {
|
|||
|
||||
mod helper {
|
||||
use super::*;
|
||||
pub type ObligationTreeIdGenerator = impl Iterator<Item = ObligationTreeId>;
|
||||
pub(super) type ObligationTreeIdGenerator = impl Iterator<Item = ObligationTreeId>;
|
||||
impl<O: ForestObligation> ObligationForest<O> {
|
||||
#[cfg_attr(not(bootstrap), define_opaque(ObligationTreeIdGenerator))]
|
||||
pub fn new() -> ObligationForest<O> {
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
use std::borrow::Borrow;
|
||||
use std::collections::hash_map::RawEntryMut;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::iter;
|
||||
use std::{iter, mem};
|
||||
|
||||
use either::Either;
|
||||
use hashbrown::hash_table::{Entry, HashTable};
|
||||
|
||||
use crate::fx::{FxHashMap, FxHasher};
|
||||
use crate::fx::FxHasher;
|
||||
use crate::sync::{CacheAligned, Lock, LockGuard, Mode, is_dyn_thread_safe};
|
||||
|
||||
// 32 shards is sufficient to reduce contention on an 8-core Ryzen 7 1700,
|
||||
|
@ -140,17 +140,67 @@ pub fn shards() -> usize {
|
|||
1
|
||||
}
|
||||
|
||||
pub type ShardedHashMap<K, V> = Sharded<FxHashMap<K, V>>;
|
||||
pub type ShardedHashMap<K, V> = Sharded<HashTable<(K, V)>>;
|
||||
|
||||
impl<K: Eq, V> ShardedHashMap<K, V> {
|
||||
pub fn with_capacity(cap: usize) -> Self {
|
||||
Self::new(|| FxHashMap::with_capacity_and_hasher(cap, rustc_hash::FxBuildHasher::default()))
|
||||
Self::new(|| HashTable::with_capacity(cap))
|
||||
}
|
||||
pub fn len(&self) -> usize {
|
||||
self.lock_shards().map(|shard| shard.len()).sum()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Eq + Hash, V> ShardedHashMap<K, V> {
|
||||
#[inline]
|
||||
pub fn get<Q>(&self, key: &Q) -> Option<V>
|
||||
where
|
||||
K: Borrow<Q>,
|
||||
Q: Hash + Eq,
|
||||
V: Clone,
|
||||
{
|
||||
let hash = make_hash(key);
|
||||
let shard = self.lock_shard_by_hash(hash);
|
||||
let (_, value) = shard.find(hash, |(k, _)| k.borrow() == key)?;
|
||||
Some(value.clone())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn get_or_insert_with(&self, key: K, default: impl FnOnce() -> V) -> V
|
||||
where
|
||||
V: Copy,
|
||||
{
|
||||
let hash = make_hash(&key);
|
||||
let mut shard = self.lock_shard_by_hash(hash);
|
||||
|
||||
match table_entry(&mut shard, hash, &key) {
|
||||
Entry::Occupied(e) => e.get().1,
|
||||
Entry::Vacant(e) => {
|
||||
let value = default();
|
||||
e.insert((key, value));
|
||||
value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn insert(&self, key: K, value: V) -> Option<V> {
|
||||
let hash = make_hash(&key);
|
||||
let mut shard = self.lock_shard_by_hash(hash);
|
||||
|
||||
match table_entry(&mut shard, hash, &key) {
|
||||
Entry::Occupied(e) => {
|
||||
let previous = mem::replace(&mut e.into_mut().1, value);
|
||||
Some(previous)
|
||||
}
|
||||
Entry::Vacant(e) => {
|
||||
e.insert((key, value));
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Eq + Hash + Copy> ShardedHashMap<K, ()> {
|
||||
#[inline]
|
||||
pub fn intern_ref<Q: ?Sized>(&self, value: &Q, make: impl FnOnce() -> K) -> K
|
||||
|
@ -160,13 +210,12 @@ impl<K: Eq + Hash + Copy> ShardedHashMap<K, ()> {
|
|||
{
|
||||
let hash = make_hash(value);
|
||||
let mut shard = self.lock_shard_by_hash(hash);
|
||||
let entry = shard.raw_entry_mut().from_key_hashed_nocheck(hash, value);
|
||||
|
||||
match entry {
|
||||
RawEntryMut::Occupied(e) => *e.key(),
|
||||
RawEntryMut::Vacant(e) => {
|
||||
match table_entry(&mut shard, hash, value) {
|
||||
Entry::Occupied(e) => e.get().0,
|
||||
Entry::Vacant(e) => {
|
||||
let v = make();
|
||||
e.insert_hashed_nocheck(hash, v, ());
|
||||
e.insert((v, ()));
|
||||
v
|
||||
}
|
||||
}
|
||||
|
@ -180,13 +229,12 @@ impl<K: Eq + Hash + Copy> ShardedHashMap<K, ()> {
|
|||
{
|
||||
let hash = make_hash(&value);
|
||||
let mut shard = self.lock_shard_by_hash(hash);
|
||||
let entry = shard.raw_entry_mut().from_key_hashed_nocheck(hash, &value);
|
||||
|
||||
match entry {
|
||||
RawEntryMut::Occupied(e) => *e.key(),
|
||||
RawEntryMut::Vacant(e) => {
|
||||
match table_entry(&mut shard, hash, &value) {
|
||||
Entry::Occupied(e) => e.get().0,
|
||||
Entry::Vacant(e) => {
|
||||
let v = make(value);
|
||||
e.insert_hashed_nocheck(hash, v, ());
|
||||
e.insert((v, ()));
|
||||
v
|
||||
}
|
||||
}
|
||||
|
@ -203,17 +251,30 @@ impl<K: Eq + Hash + Copy + IntoPointer> ShardedHashMap<K, ()> {
|
|||
let hash = make_hash(&value);
|
||||
let shard = self.lock_shard_by_hash(hash);
|
||||
let value = value.into_pointer();
|
||||
shard.raw_entry().from_hash(hash, |entry| entry.into_pointer() == value).is_some()
|
||||
shard.find(hash, |(k, ())| k.into_pointer() == value).is_some()
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn make_hash<K: Hash + ?Sized>(val: &K) -> u64 {
|
||||
fn make_hash<K: Hash + ?Sized>(val: &K) -> u64 {
|
||||
let mut state = FxHasher::default();
|
||||
val.hash(&mut state);
|
||||
state.finish()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn table_entry<'a, K, V, Q>(
|
||||
table: &'a mut HashTable<(K, V)>,
|
||||
hash: u64,
|
||||
key: &Q,
|
||||
) -> Entry<'a, (K, V)>
|
||||
where
|
||||
K: Hash + Borrow<Q>,
|
||||
Q: ?Sized + Eq,
|
||||
{
|
||||
table.entry(hash, move |(k, _)| k.borrow() == key, |(k, _)| make_hash(k))
|
||||
}
|
||||
|
||||
/// Get a shard with a pre-computed hash value. If `get_shard_by_value` is
|
||||
/// ever used in combination with `get_shard_by_hash` on a single `Sharded`
|
||||
/// instance, then `hash` must be computed with `FxHasher`. Otherwise,
|
||||
|
|
|
@ -88,7 +88,7 @@ mod mode {
|
|||
|
||||
// Whether thread safety might be enabled.
|
||||
#[inline]
|
||||
pub fn might_be_dyn_thread_safe() -> bool {
|
||||
pub(super) fn might_be_dyn_thread_safe() -> bool {
|
||||
DYN_THREAD_SAFE_MODE.load(Ordering::Relaxed) != DYN_NOT_THREAD_SAFE
|
||||
}
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ pub fn parallel_guard<R>(f: impl FnOnce(&ParallelGuard) -> R) -> R {
|
|||
ret
|
||||
}
|
||||
|
||||
pub fn serial_join<A, B, RA, RB>(oper_a: A, oper_b: B) -> (RA, RB)
|
||||
fn serial_join<A, B, RA, RB>(oper_a: A, oper_b: B) -> (RA, RB)
|
||||
where
|
||||
A: FnOnce() -> RA,
|
||||
B: FnOnce() -> RB,
|
||||
|
|
|
@ -7,7 +7,7 @@ use crate::stable_hasher::{HashStable, StableHasher};
|
|||
|
||||
/// A tag type used in [`TaggedRef`] tests.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Tag2 {
|
||||
enum Tag2 {
|
||||
B00 = 0b00,
|
||||
B01 = 0b01,
|
||||
B10 = 0b10,
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
#![feature(result_flattening)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![feature(try_blocks)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use std::cmp::max;
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
#![deny(rustdoc::invalid_codeblock_attributes)]
|
||||
#![doc(rust_logo)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
// This higher-order macro defines the error codes that are in use. It is used
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
#![feature(rustc_attrs)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![feature(type_alias_impl_trait)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
|
|
@ -25,7 +25,6 @@
|
|||
#![feature(trait_alias)]
|
||||
#![feature(try_blocks)]
|
||||
#![feature(yeet_expr)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
extern crate self as rustc_errors;
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
#![feature(rustdoc_internals)]
|
||||
#![feature(try_blocks)]
|
||||
#![feature(yeet_expr)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
extern crate proc_macro as pm;
|
||||
|
|
|
@ -15,7 +15,6 @@
|
|||
#![allow(internal_features)]
|
||||
#![doc(rust_logo)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
mod accepted;
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
#![feature(proc_macro_span)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![feature(track_path)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
|
|
|
@ -277,7 +277,6 @@
|
|||
)]
|
||||
#![doc(rust_logo)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
#![feature(never_type)]
|
||||
#![feature(rustc_attrs)]
|
||||
#![feature(variant_count)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
extern crate self as rustc_hir;
|
||||
|
|
|
@ -74,7 +74,6 @@ This API is completely unstable and subject to change.
|
|||
#![feature(slice_partition_dedup)]
|
||||
#![feature(try_blocks)]
|
||||
#![feature(unwrap_infallible)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
// These are used by Clippy.
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
|
||||
// tidy-alphabetical-start
|
||||
#![recursion_limit = "256"]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use std::cell::Cell;
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
#![feature(let_chains)]
|
||||
#![feature(never_type)]
|
||||
#![feature(try_blocks)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
mod _match;
|
||||
|
|
|
@ -8,7 +8,6 @@
|
|||
#![doc(rust_logo)]
|
||||
#![feature(file_buffered)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
mod assert_dep_graph;
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
#![cfg_attr(feature = "nightly", feature(extend_one, step_trait, test))]
|
||||
#![cfg_attr(feature = "nightly", feature(new_range_api))]
|
||||
#![cfg_attr(feature = "nightly", feature(new_zeroed_alloc))]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
pub mod bit_set;
|
||||
|
|
|
@ -305,7 +305,7 @@ impl Parse for Newtype {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn newtype(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
pub(crate) fn newtype(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let input = parse_macro_input!(input as Newtype);
|
||||
input.0.into()
|
||||
}
|
||||
|
|
|
@ -24,7 +24,6 @@
|
|||
#![feature(let_chains)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![recursion_limit = "512"] // For rustdoc
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
mod errors;
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
#![feature(iter_intersperse)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(try_blocks)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
mod callbacks;
|
||||
|
|
|
@ -23,7 +23,6 @@
|
|||
// We want to be able to build this crate with a stable compiler,
|
||||
// so no `#![feature]` attributes should be added.
|
||||
#![deny(unstable_features)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
mod cursor;
|
||||
|
|
|
@ -33,7 +33,6 @@
|
|||
#![feature(rustc_attrs)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![feature(try_blocks)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
mod async_closures;
|
||||
|
|
|
@ -1,7 +1,3 @@
|
|||
// tidy-alphabetical-start
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use rustc_abi::ExternAbi;
|
||||
use rustc_ast::AttrId;
|
||||
use rustc_ast::attr::AttributeExt;
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
#![doc(rust_logo)]
|
||||
#![feature(extern_types)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use std::cell::RefCell;
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
#![feature(proc_macro_diagnostic)]
|
||||
#![feature(proc_macro_span)]
|
||||
#![feature(proc_macro_tracked_env)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
#![feature(proc_macro_internals)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![feature(trusted_len)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
extern crate proc_macro;
|
||||
|
|
|
@ -62,7 +62,6 @@
|
|||
#![feature(try_trait_v2_yeet)]
|
||||
#![feature(type_alias_impl_trait)]
|
||||
#![feature(yeet_expr)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -452,12 +452,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
}
|
||||
let id = self.alloc_map.reserve();
|
||||
debug!("creating alloc {:?} with id {id:?}", alloc_salt.0);
|
||||
let had_previous = self
|
||||
.alloc_map
|
||||
.to_alloc
|
||||
.lock_shard_by_value(&id)
|
||||
.insert(id, alloc_salt.0.clone())
|
||||
.is_some();
|
||||
let had_previous = self.alloc_map.to_alloc.insert(id, alloc_salt.0.clone()).is_some();
|
||||
// We just reserved, so should always be unique.
|
||||
assert!(!had_previous);
|
||||
dedup.insert(alloc_salt, id);
|
||||
|
@ -510,7 +505,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
/// local dangling pointers and allocations in constants/statics.
|
||||
#[inline]
|
||||
pub fn try_get_global_alloc(self, id: AllocId) -> Option<GlobalAlloc<'tcx>> {
|
||||
self.alloc_map.to_alloc.lock_shard_by_value(&id).get(&id).cloned()
|
||||
self.alloc_map.to_alloc.get(&id)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -529,9 +524,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
/// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. Trying to
|
||||
/// call this function twice, even with the same `Allocation` will ICE the compiler.
|
||||
pub fn set_alloc_id_memory(self, id: AllocId, mem: ConstAllocation<'tcx>) {
|
||||
if let Some(old) =
|
||||
self.alloc_map.to_alloc.lock_shard_by_value(&id).insert(id, GlobalAlloc::Memory(mem))
|
||||
{
|
||||
if let Some(old) = self.alloc_map.to_alloc.insert(id, GlobalAlloc::Memory(mem)) {
|
||||
bug!("tried to set allocation ID {id:?}, but it was already existing as {old:#?}");
|
||||
}
|
||||
}
|
||||
|
@ -539,11 +532,8 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
/// Freezes an `AllocId` created with `reserve` by pointing it at a static item. Trying to
|
||||
/// call this function twice, even with the same `DefId` will ICE the compiler.
|
||||
pub fn set_nested_alloc_id_static(self, id: AllocId, def_id: LocalDefId) {
|
||||
if let Some(old) = self
|
||||
.alloc_map
|
||||
.to_alloc
|
||||
.lock_shard_by_value(&id)
|
||||
.insert(id, GlobalAlloc::Static(def_id.to_def_id()))
|
||||
if let Some(old) =
|
||||
self.alloc_map.to_alloc.insert(id, GlobalAlloc::Static(def_id.to_def_id()))
|
||||
{
|
||||
bug!("tried to set allocation ID {id:?}, but it was already existing as {old:#?}");
|
||||
}
|
||||
|
|
|
@ -2336,8 +2336,8 @@ macro_rules! sty_debug_print {
|
|||
$(let mut $variant = total;)*
|
||||
|
||||
for shard in tcx.interners.type_.lock_shards() {
|
||||
let types = shard.keys();
|
||||
for &InternedInSet(t) in types {
|
||||
let types = shard.iter();
|
||||
for &(InternedInSet(t), ()) in types {
|
||||
let variant = match t.internee {
|
||||
ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) |
|
||||
ty::Float(..) | ty::Str | ty::Never => continue,
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
#![feature(if_let_guard)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(try_blocks)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
// The `builder` module used to be named `build`, but that was causing GitHub's
|
||||
|
|
|
@ -8,7 +8,6 @@
|
|||
#![feature(let_chains)]
|
||||
#![feature(never_type)]
|
||||
#![feature(try_blocks)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use rustc_middle::ty;
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
#![feature(never_type)]
|
||||
#![feature(try_blocks)]
|
||||
#![feature(yeet_expr)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use hir::ConstContext;
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
#![feature(if_let_guard)]
|
||||
#![feature(impl_trait_in_assoc_type)]
|
||||
#![feature(let_chains)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use rustc_hir::lang_items::LangItem;
|
||||
|
|
|
@ -12,21 +12,15 @@ fn adjust_activity_to_abi<'tcx>(tcx: TyCtxt<'tcx>, fn_ty: Ty<'tcx>, da: &mut Vec
|
|||
if !matches!(fn_ty.kind(), ty::FnDef(..)) {
|
||||
bug!("expected fn def for autodiff, got {:?}", fn_ty);
|
||||
}
|
||||
let fnc_binder: ty::Binder<'_, ty::FnSig<'_>> = fn_ty.fn_sig(tcx);
|
||||
|
||||
// If rustc compiles the unmodified primal, we know that this copy of the function
|
||||
// also has correct lifetimes. We know that Enzyme won't free the shadow too early
|
||||
// (or actually at all), so let's strip lifetimes when computing the layout.
|
||||
let x = tcx.instantiate_bound_regions_with_erased(fnc_binder);
|
||||
// We don't actually pass the types back into the type system.
|
||||
// All we do is decide how to handle the arguments.
|
||||
let sig = fn_ty.fn_sig(tcx).skip_binder();
|
||||
|
||||
let mut new_activities = vec![];
|
||||
let mut new_positions = vec![];
|
||||
for (i, ty) in x.inputs().iter().enumerate() {
|
||||
for (i, ty) in sig.inputs().iter().enumerate() {
|
||||
if let Some(inner_ty) = ty.builtin_deref(true) {
|
||||
if ty.is_fn_ptr() {
|
||||
// FIXME(ZuseZ4): add a nicer error, or just figure out how to support them,
|
||||
// since Enzyme itself can handle them.
|
||||
tcx.dcx().err("function pointers are currently not supported in autodiff");
|
||||
}
|
||||
if inner_ty.is_slice() {
|
||||
// We know that the length will be passed as extra arg.
|
||||
if !da.is_empty() {
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
|
||||
// tidy-alphabetical-start
|
||||
#![allow(rustc::usage_of_type_ir_inherent)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
pub mod canonicalizer;
|
||||
|
|
|
@ -271,12 +271,39 @@ where
|
|||
/// and will need to clearly document it in the rustc-dev-guide before
|
||||
/// stabilization.
|
||||
pub(super) fn step_kind_for_source(&self, source: GoalSource) -> PathKind {
|
||||
match (self.current_goal_kind, source) {
|
||||
(_, GoalSource::NormalizeGoal(step_kind)) => step_kind,
|
||||
(CurrentGoalKind::CoinductiveTrait, GoalSource::ImplWhereBound) => {
|
||||
PathKind::Coinductive
|
||||
match source {
|
||||
// We treat these goals as unknown for now. It is likely that most miscellaneous
|
||||
// nested goals will be converted to an inductive variant in the future.
|
||||
//
|
||||
// Having unknown cycles is always the safer option, as changing that to either
|
||||
// succeed or hard error is backwards compatible. If we incorrectly treat a cycle
|
||||
// as inductive even though it should not be, it may be unsound during coherence and
|
||||
// fixing it may cause inference breakage or introduce ambiguity.
|
||||
GoalSource::Misc => PathKind::Unknown,
|
||||
GoalSource::NormalizeGoal(path_kind) => path_kind,
|
||||
GoalSource::ImplWhereBound => {
|
||||
// We currently only consider a cycle coinductive if it steps
|
||||
// into a where-clause of a coinductive trait.
|
||||
//
|
||||
// We probably want to make all traits coinductive in the future,
|
||||
// so we treat cycles involving their where-clauses as ambiguous.
|
||||
if let CurrentGoalKind::CoinductiveTrait = self.current_goal_kind {
|
||||
PathKind::Coinductive
|
||||
} else {
|
||||
PathKind::Unknown
|
||||
}
|
||||
}
|
||||
_ => PathKind::Inductive,
|
||||
// Relating types is always unproductive. If we were to map proof trees to
|
||||
// corecursive functions as explained in #136824, relating types never
|
||||
// introduces a constructor which could cause the recursion to be guarded.
|
||||
GoalSource::TypeRelating => PathKind::Inductive,
|
||||
// Instantiating a higher ranked goal can never cause the recursion to be
|
||||
// guarded and is therefore unproductive.
|
||||
GoalSource::InstantiateHigherRanked => PathKind::Inductive,
|
||||
// These goal sources are likely unproductive and can be changed to
|
||||
// `PathKind::Inductive`. Keeping them as unknown until we're confident
|
||||
// about this and have an example where it is necessary.
|
||||
GoalSource::AliasBoundConstCondition | GoalSource::AliasWellFormed => PathKind::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -606,7 +633,7 @@ where
|
|||
|
||||
let (NestedNormalizationGoals(nested_goals), _, certainty) = self.evaluate_goal_raw(
|
||||
GoalEvaluationKind::Nested,
|
||||
GoalSource::Misc,
|
||||
GoalSource::TypeRelating,
|
||||
unconstrained_goal,
|
||||
)?;
|
||||
// Add the nested goals from normalization to our own nested goals.
|
||||
|
@ -683,7 +710,7 @@ where
|
|||
pub(super) fn add_normalizes_to_goal(&mut self, mut goal: Goal<I, ty::NormalizesTo<I>>) {
|
||||
goal.predicate = goal.predicate.fold_with(&mut ReplaceAliasWithInfer::new(
|
||||
self,
|
||||
GoalSource::Misc,
|
||||
GoalSource::TypeRelating,
|
||||
goal.param_env,
|
||||
));
|
||||
self.inspect.add_normalizes_to_goal(self.delegate, self.max_input_universe, goal);
|
||||
|
@ -939,7 +966,15 @@ where
|
|||
rhs: T,
|
||||
) -> Result<(), NoSolution> {
|
||||
let goals = self.delegate.relate(param_env, lhs, variance, rhs, self.origin_span)?;
|
||||
self.add_goals(GoalSource::Misc, goals);
|
||||
if cfg!(debug_assertions) {
|
||||
for g in goals.iter() {
|
||||
match g.predicate.kind().skip_binder() {
|
||||
ty::PredicateKind::Subtype { .. } | ty::PredicateKind::AliasRelate(..) => {}
|
||||
p => unreachable!("unexpected nested goal in `relate`: {p:?}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
self.add_goals(GoalSource::TypeRelating, goals);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
|
@ -421,7 +421,7 @@ impl<D: SolverDelegate<Interner = I>, I: Interner> ProofTreeBuilder<D> {
|
|||
self.add_goal(
|
||||
delegate,
|
||||
max_input_universe,
|
||||
GoalSource::Misc,
|
||||
GoalSource::TypeRelating,
|
||||
goal.with(delegate.cx(), goal.predicate),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -313,7 +313,9 @@ where
|
|||
ty::AliasRelationDirection::Equate,
|
||||
),
|
||||
);
|
||||
self.add_goal(GoalSource::Misc, alias_relate_goal);
|
||||
// We normalize the self type to be able to relate it with
|
||||
// types from candidates.
|
||||
self.add_goal(GoalSource::TypeRelating, alias_relate_goal);
|
||||
self.try_evaluate_added_goals()?;
|
||||
Ok(self.resolve_vars_if_possible(normalized_term))
|
||||
} else {
|
||||
|
|
|
@ -24,7 +24,8 @@ where
|
|||
ty::AliasRelationDirection::Equate,
|
||||
),
|
||||
);
|
||||
self.add_goal(GoalSource::Misc, goal);
|
||||
// A projection goal holds if the alias is equal to the expected term.
|
||||
self.add_goal(GoalSource::TypeRelating, goal);
|
||||
self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
use std::convert::Infallible;
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use rustc_type_ir::Interner;
|
||||
use rustc_type_ir::search_graph::{self, PathKind};
|
||||
use rustc_type_ir::solve::{CanonicalInput, Certainty, QueryResult};
|
||||
use rustc_type_ir::solve::{CanonicalInput, Certainty, NoSolution, QueryResult};
|
||||
use rustc_type_ir::{Interner, TypingMode};
|
||||
|
||||
use super::inspect::ProofTreeBuilder;
|
||||
use super::{FIXPOINT_STEP_LIMIT, has_no_inference_or_external_constraints};
|
||||
|
@ -47,7 +47,24 @@ where
|
|||
) -> QueryResult<I> {
|
||||
match kind {
|
||||
PathKind::Coinductive => response_no_constraints(cx, input, Certainty::Yes),
|
||||
PathKind::Inductive => response_no_constraints(cx, input, Certainty::overflow(false)),
|
||||
PathKind::Unknown => response_no_constraints(cx, input, Certainty::overflow(false)),
|
||||
// Even though we know these cycles to be unproductive, we still return
|
||||
// overflow during coherence. This is both as we are not 100% confident in
|
||||
// the implementation yet and any incorrect errors would be unsound there.
|
||||
// The affected cases are also fairly artificial and not necessarily desirable
|
||||
// so keeping this as ambiguity is fine for now.
|
||||
//
|
||||
// See `tests/ui/traits/next-solver/cycles/unproductive-in-coherence.rs` for an
|
||||
// example where this would matter. We likely should change these cycles to `NoSolution`
|
||||
// even in coherence once this is a bit more settled.
|
||||
PathKind::Inductive => match input.typing_mode {
|
||||
TypingMode::Coherence => {
|
||||
response_no_constraints(cx, input, Certainty::overflow(false))
|
||||
}
|
||||
TypingMode::Analysis { .. }
|
||||
| TypingMode::PostBorrowckAnalysis { .. }
|
||||
| TypingMode::PostAnalysis => Err(NoSolution),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -57,12 +74,7 @@ where
|
|||
input: CanonicalInput<I>,
|
||||
result: QueryResult<I>,
|
||||
) -> bool {
|
||||
match kind {
|
||||
PathKind::Coinductive => response_no_constraints(cx, input, Certainty::Yes) == result,
|
||||
PathKind::Inductive => {
|
||||
response_no_constraints(cx, input, Certainty::overflow(false)) == result
|
||||
}
|
||||
}
|
||||
Self::initial_provisional_result(cx, kind, input) == result
|
||||
}
|
||||
|
||||
fn on_stack_overflow(
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
#![feature(iter_intersperse)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(string_from_utf8_lossy_owned)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use std::path::{Path, PathBuf};
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
html_playground_url = "https://play.rust-lang.org/",
|
||||
test(attr(deny(warnings)))
|
||||
)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
pub use Alignment::*;
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
#![feature(map_try_insert)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![feature(try_blocks)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use rustc_middle::query::Providers;
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
#![allow(rustc::diagnostic_outside_of_impl)]
|
||||
#![allow(rustc::untranslatable_diagnostic)]
|
||||
#![cfg_attr(feature = "rustc", feature(let_chains))]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
pub mod constructor;
|
||||
|
|
|
@ -5,7 +5,7 @@ use rustc_pattern_analysis::usefulness::{PlaceValidity, UsefulnessReport};
|
|||
use rustc_pattern_analysis::{MatchArm, PatCx, PrivateUninhabitedField};
|
||||
|
||||
/// Sets up `tracing` for easier debugging. Tries to look like the `rustc` setup.
|
||||
pub fn init_tracing() {
|
||||
fn init_tracing() {
|
||||
use tracing_subscriber::Layer;
|
||||
use tracing_subscriber::layer::SubscriberExt;
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
|
@ -24,7 +24,7 @@ pub fn init_tracing() {
|
|||
/// A simple set of types.
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
pub enum Ty {
|
||||
pub(super) enum Ty {
|
||||
/// Booleans
|
||||
Bool,
|
||||
/// 8-bit unsigned integers
|
||||
|
@ -41,7 +41,7 @@ pub enum Ty {
|
|||
|
||||
/// The important logic.
|
||||
impl Ty {
|
||||
pub fn sub_tys(&self, ctor: &Constructor<Cx>) -> Vec<Self> {
|
||||
pub(super) fn sub_tys(&self, ctor: &Constructor<Cx>) -> Vec<Self> {
|
||||
use Constructor::*;
|
||||
match (ctor, *self) {
|
||||
(Struct, Ty::Tuple(tys)) => tys.iter().copied().collect(),
|
||||
|
@ -63,7 +63,7 @@ impl Ty {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn ctor_set(&self) -> ConstructorSet<Cx> {
|
||||
fn ctor_set(&self) -> ConstructorSet<Cx> {
|
||||
match *self {
|
||||
Ty::Bool => ConstructorSet::Bool,
|
||||
Ty::U8 => ConstructorSet::Integers {
|
||||
|
@ -104,7 +104,7 @@ impl Ty {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn write_variant_name(
|
||||
fn write_variant_name(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter<'_>,
|
||||
ctor: &Constructor<Cx>,
|
||||
|
@ -120,7 +120,7 @@ impl Ty {
|
|||
}
|
||||
|
||||
/// Compute usefulness in our simple context (and set up tracing for easier debugging).
|
||||
pub fn compute_match_usefulness<'p>(
|
||||
pub(super) fn compute_match_usefulness<'p>(
|
||||
arms: &[MatchArm<'p, Cx>],
|
||||
ty: Ty,
|
||||
scrut_validity: PlaceValidity,
|
||||
|
@ -137,7 +137,7 @@ pub fn compute_match_usefulness<'p>(
|
|||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Cx;
|
||||
pub(super) struct Cx;
|
||||
|
||||
/// The context for pattern analysis. Forwards anything interesting to `Ty` methods.
|
||||
impl PatCx for Cx {
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
#![feature(let_chains)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![feature(try_blocks)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
mod errors;
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
#![feature(min_specialization)]
|
||||
#![feature(rustc_attrs)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use rustc_data_structures::stable_hasher::HashStable;
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
use std::assert_matches::assert_matches;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::fmt::Debug;
|
||||
use std::hash::Hash;
|
||||
use std::marker::PhantomData;
|
||||
|
@ -9,7 +8,7 @@ use std::sync::atomic::{AtomicU32, Ordering};
|
|||
use rustc_data_structures::fingerprint::Fingerprint;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::profiling::{QueryInvocationId, SelfProfilerRef};
|
||||
use rustc_data_structures::sharded::{self, Sharded};
|
||||
use rustc_data_structures::sharded::{self, ShardedHashMap};
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_data_structures::sync::{AtomicU64, Lock};
|
||||
use rustc_data_structures::unord::UnordMap;
|
||||
|
@ -619,7 +618,7 @@ impl<D: Deps> DepGraphData<D> {
|
|||
if let Some(prev_index) = self.previous.node_to_index_opt(dep_node) {
|
||||
self.current.prev_index_to_index.lock()[prev_index]
|
||||
} else {
|
||||
self.current.new_node_to_index.lock_shard_by_value(dep_node).get(dep_node).copied()
|
||||
self.current.new_node_to_index.get(dep_node)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1048,7 +1047,7 @@ rustc_index::newtype_index! {
|
|||
/// first, and `data` second.
|
||||
pub(super) struct CurrentDepGraph<D: Deps> {
|
||||
encoder: GraphEncoder<D>,
|
||||
new_node_to_index: Sharded<FxHashMap<DepNode, DepNodeIndex>>,
|
||||
new_node_to_index: ShardedHashMap<DepNode, DepNodeIndex>,
|
||||
prev_index_to_index: Lock<IndexVec<SerializedDepNodeIndex, Option<DepNodeIndex>>>,
|
||||
|
||||
/// This is used to verify that fingerprints do not change between the creation of a node
|
||||
|
@ -1117,12 +1116,9 @@ impl<D: Deps> CurrentDepGraph<D> {
|
|||
profiler,
|
||||
previous,
|
||||
),
|
||||
new_node_to_index: Sharded::new(|| {
|
||||
FxHashMap::with_capacity_and_hasher(
|
||||
new_node_count_estimate / sharded::shards(),
|
||||
Default::default(),
|
||||
)
|
||||
}),
|
||||
new_node_to_index: ShardedHashMap::with_capacity(
|
||||
new_node_count_estimate / sharded::shards(),
|
||||
),
|
||||
prev_index_to_index: Lock::new(IndexVec::from_elem_n(None, prev_graph_node_count)),
|
||||
anon_id_seed,
|
||||
#[cfg(debug_assertions)]
|
||||
|
@ -1152,14 +1148,9 @@ impl<D: Deps> CurrentDepGraph<D> {
|
|||
edges: EdgesVec,
|
||||
current_fingerprint: Fingerprint,
|
||||
) -> DepNodeIndex {
|
||||
let dep_node_index = match self.new_node_to_index.lock_shard_by_value(&key).entry(key) {
|
||||
Entry::Occupied(entry) => *entry.get(),
|
||||
Entry::Vacant(entry) => {
|
||||
let dep_node_index = self.encoder.send(key, current_fingerprint, edges);
|
||||
entry.insert(dep_node_index);
|
||||
dep_node_index
|
||||
}
|
||||
};
|
||||
let dep_node_index = self
|
||||
.new_node_to_index
|
||||
.get_or_insert_with(key, || self.encoder.send(key, current_fingerprint, edges));
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
self.record_edge(dep_node_index, key, current_fingerprint);
|
||||
|
@ -1257,7 +1248,7 @@ impl<D: Deps> CurrentDepGraph<D> {
|
|||
) {
|
||||
let node = &prev_graph.index_to_node(prev_index);
|
||||
debug_assert!(
|
||||
!self.new_node_to_index.lock_shard_by_value(node).contains_key(node),
|
||||
!self.new_node_to_index.get(node).is_some(),
|
||||
"node from previous graph present in new node collection"
|
||||
);
|
||||
}
|
||||
|
@ -1382,7 +1373,7 @@ fn panic_on_forbidden_read<D: Deps>(data: &DepGraphData<D>, dep_node_index: DepN
|
|||
if dep_node.is_none() {
|
||||
// Try to find it among the new nodes
|
||||
for shard in data.current.new_node_to_index.lock_shards() {
|
||||
if let Some((node, _)) = shard.iter().find(|(_, index)| **index == dep_node_index) {
|
||||
if let Some((node, _)) = shard.iter().find(|(_, index)| *index == dep_node_index) {
|
||||
dep_node = Some(*node);
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -3,10 +3,8 @@
|
|||
#![feature(assert_matches)]
|
||||
#![feature(core_intrinsics)]
|
||||
#![feature(dropck_eyepatch)]
|
||||
#![feature(hash_raw_entry)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(min_specialization)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
pub mod cache;
|
||||
|
|
|
@ -2,8 +2,7 @@ use std::fmt::Debug;
|
|||
use std::hash::Hash;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sharded::{self, Sharded};
|
||||
use rustc_data_structures::sharded::ShardedHashMap;
|
||||
pub use rustc_data_structures::vec_cache::VecCache;
|
||||
use rustc_hir::def_id::LOCAL_CRATE;
|
||||
use rustc_index::Idx;
|
||||
|
@ -36,7 +35,7 @@ pub trait QueryCache: Sized {
|
|||
/// In-memory cache for queries whose keys aren't suitable for any of the
|
||||
/// more specialized kinds of cache. Backed by a sharded hashmap.
|
||||
pub struct DefaultCache<K, V> {
|
||||
cache: Sharded<FxHashMap<K, (V, DepNodeIndex)>>,
|
||||
cache: ShardedHashMap<K, (V, DepNodeIndex)>,
|
||||
}
|
||||
|
||||
impl<K, V> Default for DefaultCache<K, V> {
|
||||
|
@ -55,19 +54,14 @@ where
|
|||
|
||||
#[inline(always)]
|
||||
fn lookup(&self, key: &K) -> Option<(V, DepNodeIndex)> {
|
||||
let key_hash = sharded::make_hash(key);
|
||||
let lock = self.cache.lock_shard_by_hash(key_hash);
|
||||
let result = lock.raw_entry().from_key_hashed_nocheck(key_hash, key);
|
||||
|
||||
if let Some((_, value)) = result { Some(*value) } else { None }
|
||||
self.cache.get(key)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn complete(&self, key: K, value: V, index: DepNodeIndex) {
|
||||
let mut lock = self.cache.lock_shard_by_value(&key);
|
||||
// We may be overwriting another value. This is all right, since the dep-graph
|
||||
// will check that the fingerprint matches.
|
||||
lock.insert(key, (value, index));
|
||||
self.cache.insert(key, (value, index));
|
||||
}
|
||||
|
||||
fn iter(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex)) {
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
#![feature(let_chains)]
|
||||
#![feature(rustc_attrs)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use std::cell::{Cell, RefCell};
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
// tidy-alphabetical-start
|
||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
||||
#![feature(let_chains)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
pub mod cfi;
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
#![feature(min_specialization)]
|
||||
#![feature(never_type)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
pub use self::serialize::{Decodable, Decoder, Encodable, Encoder};
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
// To generate CodegenOptionsTargetModifiers and UnstableOptionsTargetModifiers enums
|
||||
// with macro_rules, it is necessary to use recursive mechanic ("Incremental TT Munchers").
|
||||
#![recursion_limit = "256"]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
pub mod errors;
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
)]
|
||||
#![doc(rust_logo)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
pub mod rustc_internal;
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
#![feature(rustc_attrs)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![feature(slice_as_chunks)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
// The code produced by the `Encodable`/`Decodable` derive macros refer to
|
||||
|
|
|
@ -94,7 +94,6 @@
|
|||
#![doc(rust_logo)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use rustc_hir::def::DefKind;
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
#![feature(let_chains)]
|
||||
#![feature(rustc_attrs)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use std::path::{Path, PathBuf};
|
||||
|
|
|
@ -31,7 +31,6 @@
|
|||
#![feature(unwrap_infallible)]
|
||||
#![feature(yeet_expr)]
|
||||
#![recursion_limit = "512"] // For rustdoc
|
||||
#![warn(unreachable_pub)] // For rustdoc
|
||||
// tidy-alphabetical-end
|
||||
|
||||
pub mod error_reporting;
|
||||
|
|
|
@ -440,7 +440,7 @@ impl<'tcx> ProofTreeVisitor<'tcx> for BestObligation<'tcx> {
|
|||
match (child_mode, nested_goal.source()) {
|
||||
(
|
||||
ChildMode::Trait(_) | ChildMode::Host(_),
|
||||
GoalSource::Misc | GoalSource::NormalizeGoal(_),
|
||||
GoalSource::Misc | GoalSource::TypeRelating | GoalSource::NormalizeGoal(_),
|
||||
) => {
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
// tidy-alphabetical-start
|
||||
#![recursion_limit = "256"]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
mod codegen;
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
// tidy-alphabetical-start
|
||||
#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
|
||||
#![feature(never_type)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
pub(crate) use rustc_data_structures::fx::{FxIndexMap as Map, FxIndexSet as Set};
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
#![feature(let_chains)]
|
||||
#![feature(never_type)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use rustc_middle::query::Providers;
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
feature(associated_type_defaults, never_type, rustc_attrs, negative_impls)
|
||||
)]
|
||||
#![cfg_attr(feature = "nightly", allow(internal_features))]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
extern crate self as rustc_type_ir;
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
/// behavior as long as the resulting behavior is still correct.
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::fmt::Debug;
|
||||
use std::hash::Hash;
|
||||
use std::marker::PhantomData;
|
||||
|
@ -20,7 +21,7 @@ use std::marker::PhantomData;
|
|||
use derive_where::derive_where;
|
||||
use rustc_index::{Idx, IndexVec};
|
||||
#[cfg(feature = "nightly")]
|
||||
use rustc_macros::HashStable_NoContext;
|
||||
use rustc_macros::{HashStable_NoContext, TyDecodable, TyEncodable};
|
||||
use tracing::debug;
|
||||
|
||||
use crate::data_structures::HashMap;
|
||||
|
@ -111,21 +112,35 @@ pub trait Delegate {
|
|||
/// In the initial iteration of a cycle, we do not yet have a provisional
|
||||
/// result. In the case we return an initial provisional result depending
|
||||
/// on the kind of cycle.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[cfg_attr(feature = "nightly", derive(HashStable_NoContext))]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[cfg_attr(feature = "nightly", derive(TyDecodable, TyEncodable, HashStable_NoContext))]
|
||||
pub enum PathKind {
|
||||
Coinductive,
|
||||
/// A path consisting of only inductive/unproductive steps. Their initial
|
||||
/// provisional result is `Err(NoSolution)`. We currently treat them as
|
||||
/// `PathKind::Unknown` during coherence until we're fully confident in
|
||||
/// our approach.
|
||||
Inductive,
|
||||
/// A path which is not be coinductive right now but we may want
|
||||
/// to change of them to be so in the future. We return an ambiguous
|
||||
/// result in this case to prevent people from relying on this.
|
||||
Unknown,
|
||||
/// A path with at least one coinductive step. Such cycles hold.
|
||||
Coinductive,
|
||||
}
|
||||
|
||||
impl PathKind {
|
||||
/// Returns the path kind when merging `self` with `rest`.
|
||||
///
|
||||
/// Given an inductive path `self` and a coinductive path `rest`,
|
||||
/// the path `self -> rest` would be coinductive.
|
||||
///
|
||||
/// This operation represents an ordering and would be equivalent
|
||||
/// to `max(self, rest)`.
|
||||
fn extend(self, rest: PathKind) -> PathKind {
|
||||
match self {
|
||||
PathKind::Coinductive => PathKind::Coinductive,
|
||||
PathKind::Inductive => rest,
|
||||
match (self, rest) {
|
||||
(PathKind::Coinductive, _) | (_, PathKind::Coinductive) => PathKind::Coinductive,
|
||||
(PathKind::Unknown, _) | (_, PathKind::Unknown) => PathKind::Unknown,
|
||||
(PathKind::Inductive, PathKind::Inductive) => PathKind::Inductive,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -159,9 +174,6 @@ impl UsageKind {
|
|||
}
|
||||
}
|
||||
}
|
||||
fn and_merge(&mut self, other: impl Into<Self>) {
|
||||
*self = self.merge(other);
|
||||
}
|
||||
}
|
||||
|
||||
/// For each goal we track whether the paths from this goal
|
||||
|
@ -297,7 +309,7 @@ impl CycleHeads {
|
|||
|
||||
let path_from_entry = match step_kind {
|
||||
PathKind::Coinductive => AllPathsToHeadCoinductive::Yes,
|
||||
PathKind::Inductive => path_from_entry,
|
||||
PathKind::Unknown | PathKind::Inductive => path_from_entry,
|
||||
};
|
||||
|
||||
self.insert(head, path_from_entry);
|
||||
|
@ -305,6 +317,63 @@ impl CycleHeads {
|
|||
}
|
||||
}
|
||||
|
||||
bitflags::bitflags! {
|
||||
/// Tracks how nested goals have been accessed. This is necessary to disable
|
||||
/// global cache entries if computing them would otherwise result in a cycle or
|
||||
/// access a provisional cache entry.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct PathsToNested: u8 {
|
||||
/// The initial value when adding a goal to its own nested goals.
|
||||
const EMPTY = 1 << 0;
|
||||
const INDUCTIVE = 1 << 1;
|
||||
const UNKNOWN = 1 << 2;
|
||||
const COINDUCTIVE = 1 << 3;
|
||||
}
|
||||
}
|
||||
impl From<PathKind> for PathsToNested {
|
||||
fn from(path: PathKind) -> PathsToNested {
|
||||
match path {
|
||||
PathKind::Inductive => PathsToNested::INDUCTIVE,
|
||||
PathKind::Unknown => PathsToNested::UNKNOWN,
|
||||
PathKind::Coinductive => PathsToNested::COINDUCTIVE,
|
||||
}
|
||||
}
|
||||
}
|
||||
impl PathsToNested {
|
||||
/// The implementation of this function is kind of ugly. We check whether
|
||||
/// there currently exist 'weaker' paths in the set, if so we upgrade these
|
||||
/// paths to at least `path`.
|
||||
#[must_use]
|
||||
fn extend_with(mut self, path: PathKind) -> Self {
|
||||
match path {
|
||||
PathKind::Inductive => {
|
||||
if self.intersects(PathsToNested::EMPTY) {
|
||||
self.remove(PathsToNested::EMPTY);
|
||||
self.insert(PathsToNested::INDUCTIVE);
|
||||
}
|
||||
}
|
||||
PathKind::Unknown => {
|
||||
if self.intersects(PathsToNested::EMPTY | PathsToNested::INDUCTIVE) {
|
||||
self.remove(PathsToNested::EMPTY | PathsToNested::INDUCTIVE);
|
||||
self.insert(PathsToNested::UNKNOWN);
|
||||
}
|
||||
}
|
||||
PathKind::Coinductive => {
|
||||
if self.intersects(
|
||||
PathsToNested::EMPTY | PathsToNested::INDUCTIVE | PathsToNested::UNKNOWN,
|
||||
) {
|
||||
self.remove(
|
||||
PathsToNested::EMPTY | PathsToNested::INDUCTIVE | PathsToNested::UNKNOWN,
|
||||
);
|
||||
self.insert(PathsToNested::COINDUCTIVE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// The nested goals of each stack entry and the path from the
|
||||
/// stack entry to that nested goal.
|
||||
///
|
||||
|
@ -322,15 +391,18 @@ impl CycleHeads {
|
|||
/// results from a the cycle BAB depending on the cycle root.
|
||||
#[derive_where(Debug, Default, Clone; X: Cx)]
|
||||
struct NestedGoals<X: Cx> {
|
||||
nested_goals: HashMap<X::Input, UsageKind>,
|
||||
nested_goals: HashMap<X::Input, PathsToNested>,
|
||||
}
|
||||
impl<X: Cx> NestedGoals<X> {
|
||||
fn is_empty(&self) -> bool {
|
||||
self.nested_goals.is_empty()
|
||||
}
|
||||
|
||||
fn insert(&mut self, input: X::Input, path_from_entry: UsageKind) {
|
||||
self.nested_goals.entry(input).or_insert(path_from_entry).and_merge(path_from_entry);
|
||||
fn insert(&mut self, input: X::Input, paths_to_nested: PathsToNested) {
|
||||
match self.nested_goals.entry(input) {
|
||||
Entry::Occupied(mut entry) => *entry.get_mut() |= paths_to_nested,
|
||||
Entry::Vacant(entry) => drop(entry.insert(paths_to_nested)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds the nested goals of a nested goal, given that the path `step_kind` from this goal
|
||||
|
@ -341,18 +413,15 @@ impl<X: Cx> NestedGoals<X> {
|
|||
/// the same as for the child.
|
||||
fn extend_from_child(&mut self, step_kind: PathKind, nested_goals: &NestedGoals<X>) {
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
for (input, path_from_entry) in nested_goals.iter() {
|
||||
let path_from_entry = match step_kind {
|
||||
PathKind::Coinductive => UsageKind::Single(PathKind::Coinductive),
|
||||
PathKind::Inductive => path_from_entry,
|
||||
};
|
||||
self.insert(input, path_from_entry);
|
||||
for (input, paths_to_nested) in nested_goals.iter() {
|
||||
let paths_to_nested = paths_to_nested.extend_with(step_kind);
|
||||
self.insert(input, paths_to_nested);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "nightly", rustc_lint_query_instability)]
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
fn iter(&self) -> impl Iterator<Item = (X::Input, UsageKind)> {
|
||||
fn iter(&self) -> impl Iterator<Item = (X::Input, PathsToNested)> + '_ {
|
||||
self.nested_goals.iter().map(|(i, p)| (*i, *p))
|
||||
}
|
||||
|
||||
|
@ -490,7 +559,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
|
|||
// goals as this change may cause them to now depend on additional
|
||||
// goals, resulting in new cycles. See the dev-guide for examples.
|
||||
if parent_depends_on_cycle {
|
||||
parent.nested_goals.insert(parent.input, UsageKind::Single(PathKind::Inductive))
|
||||
parent.nested_goals.insert(parent.input, PathsToNested::EMPTY);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -666,7 +735,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
|
|||
//
|
||||
// We must therefore not use the global cache entry for `B` in that case.
|
||||
// See tests/ui/traits/next-solver/cycles/hidden-by-overflow.rs
|
||||
last.nested_goals.insert(last.input, UsageKind::Single(PathKind::Inductive));
|
||||
last.nested_goals.insert(last.input, PathsToNested::EMPTY);
|
||||
}
|
||||
|
||||
debug!("encountered stack overflow");
|
||||
|
@ -749,16 +818,11 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
|
|||
|
||||
// We now care about the path from the next highest cycle head to the
|
||||
// provisional cache entry.
|
||||
match path_from_head {
|
||||
PathKind::Coinductive => {}
|
||||
PathKind::Inductive => {
|
||||
*path_from_head = Self::cycle_path_kind(
|
||||
&self.stack,
|
||||
stack_entry.step_kind_from_parent,
|
||||
head,
|
||||
)
|
||||
}
|
||||
}
|
||||
*path_from_head = path_from_head.extend(Self::cycle_path_kind(
|
||||
&self.stack,
|
||||
stack_entry.step_kind_from_parent,
|
||||
head,
|
||||
));
|
||||
// Mutate the result of the provisional cache entry in case we did
|
||||
// not reach a fixpoint.
|
||||
*result = mutate_result(input, *result);
|
||||
|
@ -858,7 +922,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
|
|||
for &ProvisionalCacheEntry {
|
||||
encountered_overflow,
|
||||
ref heads,
|
||||
path_from_head,
|
||||
path_from_head: head_to_provisional,
|
||||
result: _,
|
||||
} in entries.iter()
|
||||
{
|
||||
|
@ -870,24 +934,19 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
|
|||
|
||||
// A provisional cache entry only applies if the path from its highest head
|
||||
// matches the path when encountering the goal.
|
||||
//
|
||||
// We check if any of the paths taken while computing the global goal
|
||||
// would end up with an applicable provisional cache entry.
|
||||
let head = heads.highest_cycle_head();
|
||||
let full_path = match Self::cycle_path_kind(stack, step_kind_from_parent, head) {
|
||||
PathKind::Coinductive => UsageKind::Single(PathKind::Coinductive),
|
||||
PathKind::Inductive => path_from_global_entry,
|
||||
};
|
||||
|
||||
match (full_path, path_from_head) {
|
||||
(UsageKind::Mixed, _)
|
||||
| (UsageKind::Single(PathKind::Coinductive), PathKind::Coinductive)
|
||||
| (UsageKind::Single(PathKind::Inductive), PathKind::Inductive) => {
|
||||
debug!(
|
||||
?full_path,
|
||||
?path_from_head,
|
||||
"cache entry not applicable due to matching paths"
|
||||
);
|
||||
return false;
|
||||
}
|
||||
_ => debug!(?full_path, ?path_from_head, "paths don't match"),
|
||||
let head_to_curr = Self::cycle_path_kind(stack, step_kind_from_parent, head);
|
||||
let full_paths = path_from_global_entry.extend_with(head_to_curr);
|
||||
if full_paths.contains(head_to_provisional.into()) {
|
||||
debug!(
|
||||
?full_paths,
|
||||
?head_to_provisional,
|
||||
"cache entry not applicable due to matching paths"
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -986,8 +1045,8 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
|
|||
let last = &mut self.stack[last_index];
|
||||
last.reached_depth = last.reached_depth.max(next_index);
|
||||
|
||||
last.nested_goals.insert(input, UsageKind::Single(step_kind_from_parent));
|
||||
last.nested_goals.insert(last.input, UsageKind::Single(PathKind::Inductive));
|
||||
last.nested_goals.insert(input, step_kind_from_parent.into());
|
||||
last.nested_goals.insert(last.input, PathsToNested::EMPTY);
|
||||
if last_index != head {
|
||||
last.heads.insert(head, step_kind_from_parent);
|
||||
}
|
||||
|
|
|
@ -58,20 +58,24 @@ impl<I: Interner, P> Goal<I, P> {
|
|||
/// Why a specific goal has to be proven.
|
||||
///
|
||||
/// This is necessary as we treat nested goals different depending on
|
||||
/// their source. This is currently mostly used by proof tree visitors
|
||||
/// but will be used by cycle handling in the future.
|
||||
/// their source. This is used to decide whether a cycle is coinductive.
|
||||
/// See the documentation of `EvalCtxt::step_kind_for_source` for more details
|
||||
/// about this.
|
||||
///
|
||||
/// It is also used by proof tree visitors, e.g. for diagnostics purposes.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[cfg_attr(feature = "nightly", derive(HashStable_NoContext))]
|
||||
pub enum GoalSource {
|
||||
Misc,
|
||||
/// We're proving a where-bound of an impl.
|
||||
/// A nested goal required to prove that types are equal/subtypes.
|
||||
/// This is always an unproductive step.
|
||||
///
|
||||
/// FIXME(-Znext-solver=coinductive): Explain how and why this
|
||||
/// changes whether cycles are coinductive.
|
||||
/// This is also used for all `NormalizesTo` goals as we they are used
|
||||
/// to relate types in `AliasRelate`.
|
||||
TypeRelating,
|
||||
/// We're proving a where-bound of an impl.
|
||||
ImplWhereBound,
|
||||
/// Const conditions that need to hold for `~const` alias bounds to hold.
|
||||
///
|
||||
/// FIXME(-Znext-solver=coinductive): Are these even coinductive?
|
||||
AliasBoundConstCondition,
|
||||
/// Instantiating a higher-ranked goal and re-proving it.
|
||||
InstantiateHigherRanked,
|
||||
|
@ -79,7 +83,6 @@ pub enum GoalSource {
|
|||
/// This is used in two places: projecting to an opaque whose hidden type
|
||||
/// is already registered in the opaque type storage, and for rigid projections.
|
||||
AliasWellFormed,
|
||||
|
||||
/// In case normalizing aliases in nested goals cycles, eagerly normalizing these
|
||||
/// aliases in the context of the parent may incorrectly change the cycle kind.
|
||||
/// Normalizing aliases in goals therefore tracks the original path kind for this
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue