rust/compiler/rustc_middle/src/ty/context.rs

2990 lines
109 KiB
Rust
Raw Normal View History

2019-02-08 14:53:55 +01:00
//! Type context book-keeping.
2015-09-06 21:51:58 +03:00
use crate::arena::Arena;
2021-10-16 20:10:23 +02:00
use crate::dep_graph::{DepGraph, DepKind, DepKindStruct};
use crate::hir::place::Place as HirPlace;
2019-02-05 11:20:45 -06:00
use crate::infer::canonical::{Canonical, CanonicalVarInfo, CanonicalVarInfos};
use crate::lint::{struct_lint_level, LintDiagnosticBuilder, LintLevelSource};
use crate::middle::resolve_lifetime::{self, LifetimeScopeForPath};
2019-02-05 11:20:45 -06:00
use crate::middle::stability;
use crate::mir::interpret::{self, Allocation, ConstValue, Scalar};
use crate::mir::{
Body, BorrowCheckResult, Field, Local, Place, PlaceElem, ProjectionKind, Promoted,
};
2021-04-04 18:42:17 +02:00
use crate::thir::Thir;
2019-02-05 11:20:45 -06:00
use crate::traits;
2021-06-28 21:12:01 +02:00
use crate::ty::query::{self, TyCtxtAt};
use crate::ty::subst::{GenericArg, GenericArgKind, InternalSubsts, Subst, SubstsRef, UserSubsts};
2019-12-24 17:38:22 -05:00
use crate::ty::TyKind::*;
use crate::ty::{
self, AdtDef, AdtKind, Binder, BindingMode, BoundVar, CanonicalPolyFnSig,
ClosureSizeProfileData, Const, ConstS, ConstVid, DefIdTree, ExistentialPredicate, FloatTy,
FloatVar, FloatVid, GenericParamDefKind, InferConst, InferTy, IntTy, IntVar, IntVid, List,
ParamConst, ParamTy, PolyFnSig, Predicate, PredicateKind, PredicateS, ProjectionTy, Region,
RegionKind, ReprOptions, TraitObjectVisitor, Ty, TyKind, TyS, TyVar, TyVid, TypeAndMut, UintTy,
};
2020-04-27 23:26:11 +05:30
use rustc_ast as ast;
2020-02-02 09:47:58 +10:00
use rustc_attr as attr;
2019-12-24 05:02:53 +01:00
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
2022-01-25 14:13:38 +11:00
use rustc_data_structures::intern::Interned;
use rustc_data_structures::memmap::Mmap;
use rustc_data_structures::profiling::SelfProfilerRef;
use rustc_data_structures::sharded::{IntoPointer, ShardedHashMap};
2021-05-11 10:38:54 +02:00
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
2020-11-14 01:29:30 +01:00
use rustc_data_structures::steal::Steal;
2020-02-09 15:32:00 +01:00
use rustc_data_structures::sync::{self, Lock, Lrc, WorkerLocal};
2020-03-31 21:18:30 +02:00
use rustc_errors::ErrorReported;
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LocalDefId, LOCAL_CRATE};
use rustc_hir::intravisit::Visitor;
use rustc_hir::lang_items::LangItem;
2020-11-22 02:13:53 +01:00
use rustc_hir::{
Constness, ExprKind, HirId, ImplItemKind, ItemKind, ItemLocalId, ItemLocalMap, ItemLocalSet,
Node, TraitCandidate, TraitItemKind,
2020-11-22 02:13:53 +01:00
};
use rustc_index::vec::{Idx, IndexVec};
2019-12-24 17:38:22 -05:00
use rustc_macros::HashStable;
2021-02-23 17:55:36 -05:00
use rustc_middle::mir::FakeReadCause;
2020-11-14 16:48:54 +01:00
use rustc_query_system::ich::{NodeIdHashingMode, StableHashingContext};
use rustc_serialize::opaque::{FileEncodeResult, FileEncoder};
use rustc_session::config::{BorrowckMode, CrateType, OutputFilenames};
2020-01-05 10:58:44 +01:00
use rustc_session::lint::{Level, Lint};
use rustc_session::Limit;
use rustc_session::Session;
2021-06-28 21:12:01 +02:00
use rustc_span::def_id::{DefPathHash, StableCrateId};
use rustc_span::source_map::{MultiSpan, SourceMap};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{Span, DUMMY_SP};
use rustc_target::abi::{Layout, TargetDataLayout, VariantIdx};
2019-12-24 17:38:22 -05:00
use rustc_target::spec::abi;
2019-12-24 17:38:22 -05:00
use smallvec::SmallVec;
use std::any::Any;
2015-09-06 21:51:58 +03:00
use std::borrow::Borrow;
use std::cmp::Ordering;
use std::collections::hash_map::{self, Entry};
2018-06-27 06:01:19 -04:00
use std::fmt;
2019-12-24 17:38:22 -05:00
use std::hash::{Hash, Hasher};
use std::iter;
2019-12-24 17:38:22 -05:00
use std::mem;
use std::ops::{Bound, Deref};
use std::sync::Arc;
2015-09-06 21:51:58 +03:00
2021-06-28 21:33:47 +02:00
pub trait OnDiskCache<'tcx>: rustc_data_structures::sync::Sync {
2021-06-28 21:12:01 +02:00
/// Creates a new `OnDiskCache` instance from the serialized data in `data`.
fn new(sess: &'tcx Session, data: Mmap, start_pos: usize) -> Self
2021-06-28 21:12:01 +02:00
where
Self: Sized;
fn new_empty(source_map: &'tcx SourceMap) -> Self
where
Self: Sized;
fn drop_serialized_data(&self, tcx: TyCtxt<'tcx>);
2021-06-28 21:12:01 +02:00
fn serialize(&self, tcx: TyCtxt<'tcx>, encoder: &mut FileEncoder) -> FileEncodeResult;
}
/// A type that is not publicly constructable. This prevents people from making [`TyKind::Error`]s
/// except through the error-reporting functions on a [`tcx`][TyCtxt].
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
#[derive(TyEncodable, TyDecodable, HashStable)]
pub struct DelaySpanBugEmitted(());
type InternedSet<'tcx, T> = ShardedHashMap<InternedInSet<'tcx, T>, ()>;
pub struct CtxtInterners<'tcx> {
/// The arena that types, regions, etc. are allocated from.
2020-01-02 01:26:18 +01:00
arena: &'tcx WorkerLocal<Arena<'tcx>>,
// Specifically use a speedy hash algorithm for these hash sets, since
// they're accessed quite often.
type_: InternedSet<'tcx, TyS<'tcx>>,
2018-08-22 00:35:01 +01:00
type_list: InternedSet<'tcx, List<Ty<'tcx>>>,
substs: InternedSet<'tcx, InternalSubsts<'tcx>>,
2020-10-28 01:11:03 +00:00
canonical_var_infos: InternedSet<'tcx, List<CanonicalVarInfo<'tcx>>>,
region: InternedSet<'tcx, RegionKind>,
2020-10-05 16:51:33 -04:00
poly_existential_predicates:
InternedSet<'tcx, List<ty::Binder<'tcx, ExistentialPredicate<'tcx>>>>,
predicate: InternedSet<'tcx, PredicateS<'tcx>>,
2018-08-22 00:35:01 +01:00
predicates: InternedSet<'tcx, List<Predicate<'tcx>>>,
projs: InternedSet<'tcx, List<ProjectionKind>>,
place_elems: InternedSet<'tcx, List<PlaceElem<'tcx>>>,
const_: InternedSet<'tcx, ConstS<'tcx>>,
const_allocation: InternedSet<'tcx, Allocation>,
2020-10-05 20:41:46 -04:00
bound_variable_kinds: InternedSet<'tcx, List<ty::BoundVariableKind>>,
layout: InternedSet<'tcx, Layout>,
adt_def: InternedSet<'tcx, AdtDef>,
/// `#[stable]` and `#[unstable]` attributes
stability: InternedSet<'tcx, attr::Stability>,
/// `#[rustc_const_stable]` and `#[rustc_const_unstable]` attributes
const_stability: InternedSet<'tcx, attr::ConstStability>,
}
2019-06-14 00:48:52 +03:00
impl<'tcx> CtxtInterners<'tcx> {
2020-01-02 01:26:18 +01:00
fn new(arena: &'tcx WorkerLocal<Arena<'tcx>>) -> CtxtInterners<'tcx> {
CtxtInterners {
arena,
type_: Default::default(),
type_list: Default::default(),
substs: Default::default(),
region: Default::default(),
2020-12-11 15:02:46 -05:00
poly_existential_predicates: Default::default(),
canonical_var_infos: Default::default(),
predicate: Default::default(),
predicates: Default::default(),
projs: Default::default(),
place_elems: Default::default(),
2019-03-14 10:19:31 +01:00
const_: Default::default(),
const_allocation: Default::default(),
2020-10-05 20:41:46 -04:00
bound_variable_kinds: Default::default(),
layout: Default::default(),
adt_def: Default::default(),
stability: Default::default(),
const_stability: Default::default(),
}
}
/// Interns a type.
2019-08-11 12:55:14 -04:00
#[allow(rustc::usage_of_ty_tykind)]
#[inline(never)]
2019-12-24 17:38:22 -05:00
fn intern_ty(&self, kind: TyKind<'tcx>) -> Ty<'tcx> {
2022-01-25 14:13:38 +11:00
Ty(Interned::new_unchecked(
self.type_
.intern(kind, |kind| {
let flags = super::flags::FlagComputation::for_kind(&kind);
let ty_struct = TyS {
kind,
flags: flags.flags,
outer_exclusive_binder: flags.outer_exclusive_binder,
};
2018-04-30 08:59:23 +02:00
2022-01-25 14:13:38 +11:00
InternedInSet(self.arena.alloc(ty_struct))
})
.0,
))
2018-04-30 08:59:23 +02:00
}
#[inline(never)]
fn intern_predicate(&self, kind: Binder<'tcx, PredicateKind<'tcx>>) -> Predicate<'tcx> {
Predicate(Interned::new_unchecked(
self.predicate
.intern(kind, |kind| {
let flags = super::flags::FlagComputation::for_predicate(kind);
let predicate_struct = PredicateS {
kind,
flags: flags.flags,
outer_exclusive_binder: flags.outer_exclusive_binder,
};
InternedInSet(self.arena.alloc(predicate_struct))
})
.0,
))
}
}
2015-09-06 21:51:58 +03:00
pub struct CommonTypes<'tcx> {
pub unit: Ty<'tcx>,
2015-09-06 21:51:58 +03:00
pub bool: Ty<'tcx>,
pub char: Ty<'tcx>,
pub isize: Ty<'tcx>,
pub i8: Ty<'tcx>,
pub i16: Ty<'tcx>,
pub i32: Ty<'tcx>,
pub i64: Ty<'tcx>,
pub i128: Ty<'tcx>,
2015-09-06 21:51:58 +03:00
pub usize: Ty<'tcx>,
pub u8: Ty<'tcx>,
pub u16: Ty<'tcx>,
pub u32: Ty<'tcx>,
pub u64: Ty<'tcx>,
pub u128: Ty<'tcx>,
2015-09-06 21:51:58 +03:00
pub f32: Ty<'tcx>,
pub f64: Ty<'tcx>,
2020-05-28 13:02:02 +02:00
pub str_: Ty<'tcx>,
pub never: Ty<'tcx>,
pub self_param: Ty<'tcx>,
/// Dummy type used for the `Self` of a `TraitRef` created for converting
/// a trait object, and which gets removed in `ExistentialTraitRef`.
/// This type must not appear anywhere in other converted types.
pub trait_object_dummy_self: Ty<'tcx>,
}
pub struct CommonLifetimes<'tcx> {
2020-01-18 05:47:28 -05:00
/// `ReEmpty` in the root universe.
pub re_root_empty: Region<'tcx>,
2020-01-18 05:47:28 -05:00
/// `ReStatic`
pub re_static: Region<'tcx>,
/// Erased region, used outside of type inference.
pub re_erased: Region<'tcx>,
}
pub struct CommonConsts<'tcx> {
pub unit: Const<'tcx>,
2015-09-06 21:51:58 +03:00
}
pub struct LocalTableInContext<'a, V> {
hir_owner: LocalDefId,
2019-12-24 17:38:22 -05:00
data: &'a ItemLocalMap<V>,
}
/// Validate that the given HirId (respectively its `local_id` part) can be
2020-07-17 08:47:04 +00:00
/// safely used as a key in the maps of a TypeckResults. For that to be
/// the case, the HirId must have the same `owner` as all the other IDs in
/// this table (signified by `hir_owner`). Otherwise the HirId
/// would be in a different frame of reference and using its `local_id`
/// would result in lookup errors, or worse, in silently wrong data being
/// stored/returned.
#[inline]
2020-07-17 08:47:04 +00:00
fn validate_hir_id_for_typeck_results(hir_owner: LocalDefId, hir_id: hir::HirId) {
if hir_id.owner != hir_owner {
invalid_hir_id_for_typeck_results(hir_owner, hir_id);
}
}
#[cold]
#[inline(never)]
fn invalid_hir_id_for_typeck_results(hir_owner: LocalDefId, hir_id: hir::HirId) {
ty::tls::with(|tcx| {
bug!(
"node {} with HirId::owner {:?} cannot be placed in TypeckResults with hir_owner {:?}",
tcx.hir().node_to_string(hir_id),
hir_id.owner,
hir_owner
)
});
}
impl<'a, V> LocalTableInContext<'a, V> {
pub fn contains_key(&self, id: hir::HirId) -> bool {
2020-07-17 08:47:04 +00:00
validate_hir_id_for_typeck_results(self.hir_owner, id);
self.data.contains_key(&id.local_id)
}
pub fn get(&self, id: hir::HirId) -> Option<&V> {
2020-07-17 08:47:04 +00:00
validate_hir_id_for_typeck_results(self.hir_owner, id);
self.data.get(&id.local_id)
}
pub fn iter(&self) -> hash_map::Iter<'_, hir::ItemLocalId, V> {
self.data.iter()
}
}
impl<'a, V> ::std::ops::Index<hir::HirId> for LocalTableInContext<'a, V> {
type Output = V;
fn index(&self, key: hir::HirId) -> &V {
self.get(key).expect("LocalTableInContext: key not found")
}
}
pub struct LocalTableInContextMut<'a, V> {
hir_owner: LocalDefId,
2019-12-24 17:38:22 -05:00
data: &'a mut ItemLocalMap<V>,
}
impl<'a, V> LocalTableInContextMut<'a, V> {
pub fn get_mut(&mut self, id: hir::HirId) -> Option<&mut V> {
2020-07-17 08:47:04 +00:00
validate_hir_id_for_typeck_results(self.hir_owner, id);
self.data.get_mut(&id.local_id)
}
pub fn entry(&mut self, id: hir::HirId) -> Entry<'_, hir::ItemLocalId, V> {
2020-07-17 08:47:04 +00:00
validate_hir_id_for_typeck_results(self.hir_owner, id);
self.data.entry(id.local_id)
}
pub fn insert(&mut self, id: hir::HirId, val: V) -> Option<V> {
2020-07-17 08:47:04 +00:00
validate_hir_id_for_typeck_results(self.hir_owner, id);
self.data.insert(id.local_id, val)
}
pub fn remove(&mut self, id: hir::HirId) -> Option<V> {
2020-07-17 08:47:04 +00:00
validate_hir_id_for_typeck_results(self.hir_owner, id);
self.data.remove(&id.local_id)
}
}
/// Whenever a value may be live across a generator yield, the type of that value winds up in the
/// `GeneratorInteriorTypeCause` struct. This struct adds additional information about such
/// captured types that can be useful for diagnostics. In particular, it stores the span that
/// caused a given type to be recorded, along with the scope that enclosed the value (which can
/// be used to find the await that the value is live across).
///
/// For example:
///
/// ```ignore (pseudo-Rust)
/// async move {
/// let x: T = expr;
/// foo.await
/// ...
/// }
/// ```
///
/// Here, we would store the type `T`, the span of the value `x`, the "scope-span" for
/// the scope that contains `x`, the expr `T` evaluated from, and the span of `foo.await`.
#[derive(TyEncodable, TyDecodable, Clone, Debug, Eq, Hash, PartialEq, HashStable)]
2020-12-16 22:36:14 -05:00
#[derive(TypeFoldable)]
pub struct GeneratorInteriorTypeCause<'tcx> {
/// Type of the captured binding.
pub ty: Ty<'tcx>,
/// Span of the binding that was captured.
pub span: Span,
/// Span of the scope of the captured binding.
pub scope_span: Option<Span>,
2020-04-16 23:14:11 +08:00
/// Span of `.await` or `yield` expression.
pub yield_span: Span,
/// Expr which the type evaluated from.
pub expr: Option<hir::HirId>,
}
#[derive(TyEncodable, TyDecodable, Debug)]
2020-07-17 08:47:04 +00:00
pub struct TypeckResults<'tcx> {
/// The `HirId::owner` all `ItemLocalId`s in this table are relative to.
pub hir_owner: LocalDefId,
/// Resolved definitions for `<T>::X` associated paths and
/// method calls, including those of overloaded operators.
type_dependent_defs: ItemLocalMap<Result<(DefKind, DefId), ErrorReported>>,
/// Resolved field indices for field accesses in expressions (`S { field }`, `obj.field`)
/// or patterns (`S { field }`). The index is often useful by itself, but to learn more
/// about the field you also need definition of the variant to which the field
/// belongs, but it may not exist if it's a tuple field (`tuple.0`).
field_indices: ItemLocalMap<usize>,
2019-02-08 14:53:55 +01:00
/// Stores the types for various nodes in the AST. Note that this table
/// is not guaranteed to be populated outside inference. See
2015-09-06 21:51:58 +03:00
/// typeck::check::fn_ctxt for details.
node_types: ItemLocalMap<Ty<'tcx>>,
2015-09-06 21:51:58 +03:00
/// Stores the type parameters which were substituted to obtain the type
2019-02-08 14:53:55 +01:00
/// of this node. This only applies to nodes that refer to entities
2015-09-06 21:51:58 +03:00
/// parameterized by type parameters, such as generic fns, types, or
/// other items.
2019-02-09 22:11:53 +08:00
node_substs: ItemLocalMap<SubstsRef<'tcx>>,
2015-09-06 21:51:58 +03:00
/// This will either store the canonicalized types provided by the user
/// or the substitutions that the user explicitly gave (if any) attached
/// to `id`. These will not include any inferred values. The canonical form
/// is used to capture things like `_` or other unspecified values.
///
/// For example, if the user wrote `foo.collect::<Vec<_>>()`, then the
/// canonical substitutions would include only `for<X> { Vec<X> }`.
///
/// See also `AscribeUserType` statement in MIR.
user_provided_types: ItemLocalMap<CanonicalUserType<'tcx>>,
/// Stores the canonicalized types provided by the user. See also
/// `AscribeUserType` statement in MIR.
pub user_provided_sigs: DefIdMap<CanonicalPolyFnSig<'tcx>>,
adjustments: ItemLocalMap<Vec<ty::adjustment::Adjustment<'tcx>>>,
2015-09-06 21:51:58 +03:00
/// Stores the actual binding mode for all instances of hir::BindingAnnotation.
pat_binding_modes: ItemLocalMap<BindingMode>,
/// Stores the types which were implicitly dereferenced in pattern binding modes
2020-07-21 09:09:27 +00:00
/// for later usage in THIR lowering. For example,
///
/// ```
/// match &&Some(5i32) {
/// Some(n) => {},
/// _ => {},
/// }
/// ```
/// leads to a `vec![&&Option<i32>, &Option<i32>]`. Empty vectors are not stored.
///
/// See:
2020-11-05 14:33:23 +01:00
/// <https://github.com/rust-lang/rfcs/blob/master/text/2005-match-ergonomics.md#definitions>
pat_adjustments: ItemLocalMap<Vec<Ty<'tcx>>>,
/// Records the reasons that we picked the kind of each closure;
/// not all closures are present in the map.
closure_kind_origins: ItemLocalMap<(Span, HirPlace<'tcx>)>,
2016-12-26 14:34:03 +01:00
/// For each fn, records the "liberated" types of its arguments
/// and return type. Liberated means that all bound regions
/// (including late-bound regions) are replaced with free
2018-05-08 16:10:16 +03:00
/// equivalents. This table is not used in codegen (since regions
/// are erased there) and hence is not serialized to metadata.
///
/// This table also contains the "revealed" values for any `impl Trait`
/// that appear in the signature and whose values are being inferred
/// by this function.
///
/// # Example
///
/// ```rust
/// fn foo(x: &u32) -> impl Debug { *x }
/// ```
///
/// The function signature here would be:
///
/// ```
/// for<'a> fn(&'a u32) -> Foo
/// ```
///
/// where `Foo` is an opaque type created for this function.
///
///
/// The *liberated* form of this would be
///
/// ```
/// fn(&'a u32) -> u32
/// ```
///
/// Note that `'a` is not bound (it would be an `ReFree`) and
/// that the `Foo` opaque type is replaced by its hidden type.
liberated_fn_sigs: ItemLocalMap<ty::FnSig<'tcx>>,
/// For each FRU expression, record the normalized types of the fields
/// of the struct - this is needed because it is non-trivial to
/// normalize while preserving regions. This table is used only in
/// MIR construction and hence is not serialized to metadata.
fru_field_types: ItemLocalMap<Vec<Ty<'tcx>>>,
2019-02-23 16:11:34 +05:30
/// For every coercion cast we add the HIR node ID of the cast
/// expression to this set.
coercion_casts: ItemLocalSet,
/// Set of trait imports actually used in the method resolution.
2017-10-16 23:41:51 -07:00
/// This is used for warning unused imports. During type
2018-02-27 17:11:14 +01:00
/// checking, this `Lrc` should not be cloned: it must have a ref-count
2017-10-19 23:06:22 -07:00
/// of 1 so that we can insert things into the set mutably.
pub used_trait_imports: Lrc<FxHashSet<LocalDefId>>,
/// If any errors occurred while type-checking this body,
/// this field will be set to `Some(ErrorReported)`.
pub tainted_by_errors: Option<ErrorReported>,
/// All the opaque types that are restricted to concrete types
/// by this function.
pub concrete_opaque_types: FxHashSet<DefId>,
2020-11-09 00:15:45 -05:00
/// Tracks the minimum captures required for a closure;
/// see `MinCaptureInformationMap` for more details.
pub closure_min_captures: ty::MinCaptureInformationMap<'tcx>,
2021-02-25 18:03:41 -05:00
/// Tracks the fake reads required for a closure and the reason for the fake read.
/// When performing pattern matching for closures, there are times we don't end up
/// reading places that are mentioned in a closure (because of _ patterns). However,
/// to ensure the places are initialized, we introduce fake reads.
/// Consider these two examples:
/// ``` (discriminant matching with only wildcard arm)
/// let x: u8;
/// let c = || match x { _ => () };
/// ```
/// In this example, we don't need to actually read/borrow `x` in `c`, and so we don't
/// want to capture it. However, we do still want an error here, because `x` should have
/// to be initialized at the point where c is created. Therefore, we add a "fake read"
/// instead.
/// ``` (destructured assignments)
/// let c = || {
/// let (t1, t2) = t;
/// }
/// ```
/// In the second example, we capture the disjoint fields of `t` (`t.0` & `t.1`), but
/// we never capture `t`. This becomes an issue when we build MIR as we require
/// information on `t` in order to create place `t.0` and `t.1`. We can solve this
/// issue by fake reading `t`.
pub closure_fake_reads: FxHashMap<DefId, Vec<(HirPlace<'tcx>, FakeReadCause, hir::HirId)>>,
2021-02-02 21:07:52 -05:00
2020-01-14 21:22:43 +08:00
/// Stores the type, expression, span and optional scope span of all types
/// that are live across the yield of this generator (if a generator).
2020-10-05 16:51:33 -04:00
pub generator_interior_types: ty::Binder<'tcx, Vec<GeneratorInteriorTypeCause<'tcx>>>,
/// We sometimes treat byte string literals (which are of type `&[u8; N]`)
/// as `&[u8]`, depending on the pattern in which they are used.
/// This hashset records all instances where we behave
/// like this to allow `const_to_pat` to reliably handle this situation.
pub treat_byte_string_as_slice: ItemLocalSet,
/// Contains the data for evaluating the effect of feature `capture_disjoint_fields`
/// on closure size.
pub closure_size_eval: FxHashMap<DefId, ClosureSizeProfileData<'tcx>>,
2015-09-06 21:51:58 +03:00
}
2020-07-17 08:47:04 +00:00
impl<'tcx> TypeckResults<'tcx> {
pub fn new(hir_owner: LocalDefId) -> TypeckResults<'tcx> {
TypeckResults {
hir_owner,
type_dependent_defs: Default::default(),
field_indices: Default::default(),
user_provided_types: Default::default(),
user_provided_sigs: Default::default(),
node_types: Default::default(),
node_substs: Default::default(),
adjustments: Default::default(),
pat_binding_modes: Default::default(),
pat_adjustments: Default::default(),
closure_kind_origins: Default::default(),
liberated_fn_sigs: Default::default(),
fru_field_types: Default::default(),
2019-02-23 16:11:34 +05:30
coercion_casts: Default::default(),
used_trait_imports: Lrc::new(Default::default()),
tainted_by_errors: None,
2019-08-01 00:41:54 +01:00
concrete_opaque_types: Default::default(),
closure_min_captures: Default::default(),
2021-02-02 21:07:52 -05:00
closure_fake_reads: Default::default(),
2020-12-16 22:36:14 -05:00
generator_interior_types: ty::Binder::dummy(Default::default()),
treat_byte_string_as_slice: Default::default(),
closure_size_eval: Default::default(),
2015-09-06 21:51:58 +03:00
}
}
/// Returns the final resolution of a `QPath` in an `Expr` or `Pat` node.
2019-11-30 17:46:46 +01:00
pub fn qpath_res(&self, qpath: &hir::QPath<'_>, id: hir::HirId) -> Res {
match *qpath {
hir::QPath::Resolved(_, ref path) => path.res,
hir::QPath::TypeRelative(..) | hir::QPath::LangItem(..) => self
2019-12-24 17:38:22 -05:00
.type_dependent_def(id)
.map_or(Res::Err, |(kind, def_id)| Res::Def(kind, def_id)),
}
}
pub fn type_dependent_defs(
&self,
) -> LocalTableInContext<'_, Result<(DefKind, DefId), ErrorReported>> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.type_dependent_defs }
}
pub fn type_dependent_def(&self, id: HirId) -> Option<(DefKind, DefId)> {
2020-07-17 08:47:04 +00:00
validate_hir_id_for_typeck_results(self.hir_owner, id);
self.type_dependent_defs.get(&id.local_id).cloned().and_then(|r| r.ok())
}
pub fn type_dependent_def_id(&self, id: HirId) -> Option<DefId> {
self.type_dependent_def(id).map(|(_, def_id)| def_id)
}
pub fn type_dependent_defs_mut(
&mut self,
) -> LocalTableInContextMut<'_, Result<(DefKind, DefId), ErrorReported>> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.type_dependent_defs }
}
pub fn field_indices(&self) -> LocalTableInContext<'_, usize> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.field_indices }
}
pub fn field_indices_mut(&mut self) -> LocalTableInContextMut<'_, usize> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.field_indices }
}
2019-12-24 17:38:22 -05:00
pub fn user_provided_types(&self) -> LocalTableInContext<'_, CanonicalUserType<'tcx>> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.user_provided_types }
}
pub fn user_provided_types_mut(
2019-12-24 17:38:22 -05:00
&mut self,
) -> LocalTableInContextMut<'_, CanonicalUserType<'tcx>> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.user_provided_types }
}
pub fn node_types(&self) -> LocalTableInContext<'_, Ty<'tcx>> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.node_types }
}
pub fn node_types_mut(&mut self) -> LocalTableInContextMut<'_, Ty<'tcx>> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.node_types }
}
2019-02-04 09:38:11 +01:00
pub fn node_type(&self, id: hir::HirId) -> Ty<'tcx> {
2019-12-24 17:38:22 -05:00
self.node_type_opt(id).unwrap_or_else(|| {
bug!("node_type: no type for node `{}`", tls::with(|tcx| tcx.hir().node_to_string(id)))
})
}
2019-02-04 09:38:11 +01:00
pub fn node_type_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> {
2020-07-17 08:47:04 +00:00
validate_hir_id_for_typeck_results(self.hir_owner, id);
self.node_types.get(&id.local_id).cloned()
}
2019-02-09 22:11:53 +08:00
pub fn node_substs_mut(&mut self) -> LocalTableInContextMut<'_, SubstsRef<'tcx>> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.node_substs }
}
2019-02-09 22:11:53 +08:00
pub fn node_substs(&self, id: hir::HirId) -> SubstsRef<'tcx> {
2020-07-17 08:47:04 +00:00
validate_hir_id_for_typeck_results(self.hir_owner, id);
self.node_substs.get(&id.local_id).cloned().unwrap_or_else(|| InternalSubsts::empty())
}
2019-02-09 22:11:53 +08:00
pub fn node_substs_opt(&self, id: hir::HirId) -> Option<SubstsRef<'tcx>> {
2020-07-17 08:47:04 +00:00
validate_hir_id_for_typeck_results(self.hir_owner, id);
self.node_substs.get(&id.local_id).cloned()
}
// Returns the type of a pattern as a monotype. Like @expr_ty, this function
// doesn't provide type parameter substitutions.
2019-11-29 13:43:03 +01:00
pub fn pat_ty(&self, pat: &hir::Pat<'_>) -> Ty<'tcx> {
2019-02-04 09:38:11 +01:00
self.node_type(pat.hir_id)
}
// Returns the type of an expression as a monotype.
//
// NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
// some cases, we insert `Adjustment` annotations such as auto-deref or
// auto-ref. The type returned by this function does not consider such
// adjustments. See `expr_ty_adjusted()` instead.
//
// NB (2): This type doesn't provide type parameter substitutions; e.g., if you
// ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize"
// instead of "fn(ty) -> T with T = isize".
2019-11-29 13:43:03 +01:00
pub fn expr_ty(&self, expr: &hir::Expr<'_>) -> Ty<'tcx> {
2019-02-04 09:38:11 +01:00
self.node_type(expr.hir_id)
}
2019-11-29 13:43:03 +01:00
pub fn expr_ty_opt(&self, expr: &hir::Expr<'_>) -> Option<Ty<'tcx>> {
2019-02-04 09:38:11 +01:00
self.node_type_opt(expr.hir_id)
}
pub fn adjustments(&self) -> LocalTableInContext<'_, Vec<ty::adjustment::Adjustment<'tcx>>> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.adjustments }
}
2019-12-24 17:38:22 -05:00
pub fn adjustments_mut(
&mut self,
) -> LocalTableInContextMut<'_, Vec<ty::adjustment::Adjustment<'tcx>>> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.adjustments }
}
2019-11-29 13:43:03 +01:00
pub fn expr_adjustments(&self, expr: &hir::Expr<'_>) -> &[ty::adjustment::Adjustment<'tcx>] {
2020-07-17 08:47:04 +00:00
validate_hir_id_for_typeck_results(self.hir_owner, expr.hir_id);
self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..])
}
/// Returns the type of `expr`, considering any `Adjustment`
/// entry recorded for that expression.
2019-11-29 13:43:03 +01:00
pub fn expr_ty_adjusted(&self, expr: &hir::Expr<'_>) -> Ty<'tcx> {
2019-12-24 17:38:22 -05:00
self.expr_adjustments(expr).last().map_or_else(|| self.expr_ty(expr), |adj| adj.target)
}
2019-11-29 13:43:03 +01:00
pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr<'_>) -> Option<Ty<'tcx>> {
2019-12-24 17:38:22 -05:00
self.expr_adjustments(expr).last().map(|adj| adj.target).or_else(|| self.expr_ty_opt(expr))
}
2019-11-29 13:43:03 +01:00
pub fn is_method_call(&self, expr: &hir::Expr<'_>) -> bool {
// Only paths and method calls/overloaded operators have
// entries in type_dependent_defs, ignore the former here.
if let hir::ExprKind::Path(_) = expr.kind {
return false;
}
matches!(self.type_dependent_defs().get(expr.hir_id), Some(Ok((DefKind::AssocFn, _))))
}
2019-12-14 23:18:39 +01:00
pub fn extract_binding_mode(&self, s: &Session, id: HirId, sp: Span) -> Option<BindingMode> {
self.pat_binding_modes().get(id).copied().or_else(|| {
s.delay_span_bug(sp, "missing binding mode");
None
})
}
pub fn pat_binding_modes(&self) -> LocalTableInContext<'_, BindingMode> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.pat_binding_modes }
}
2019-12-24 17:38:22 -05:00
pub fn pat_binding_modes_mut(&mut self) -> LocalTableInContextMut<'_, BindingMode> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.pat_binding_modes }
}
pub fn pat_adjustments(&self) -> LocalTableInContext<'_, Vec<Ty<'tcx>>> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.pat_adjustments }
}
2019-12-24 17:38:22 -05:00
pub fn pat_adjustments_mut(&mut self) -> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.pat_adjustments }
}
/// For a given closure, returns the iterator of `ty::CapturedPlace`s that are captured
/// by the closure.
pub fn closure_min_captures_flattened(
&self,
closure_def_id: DefId,
) -> impl Iterator<Item = &ty::CapturedPlace<'tcx>> {
self.closure_min_captures
.get(&closure_def_id)
.map(|closure_min_captures| closure_min_captures.values().flat_map(|v| v.iter()))
.into_iter()
.flatten()
}
pub fn closure_kind_origins(&self) -> LocalTableInContext<'_, (Span, HirPlace<'tcx>)> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.closure_kind_origins }
}
pub fn closure_kind_origins_mut(
&mut self,
) -> LocalTableInContextMut<'_, (Span, HirPlace<'tcx>)> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.closure_kind_origins }
}
pub fn liberated_fn_sigs(&self) -> LocalTableInContext<'_, ty::FnSig<'tcx>> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.liberated_fn_sigs }
}
pub fn liberated_fn_sigs_mut(&mut self) -> LocalTableInContextMut<'_, ty::FnSig<'tcx>> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.liberated_fn_sigs }
}
pub fn fru_field_types(&self) -> LocalTableInContext<'_, Vec<Ty<'tcx>>> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.fru_field_types }
}
pub fn fru_field_types_mut(&mut self) -> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.fru_field_types }
}
2019-02-23 16:11:34 +05:30
pub fn is_coercion_cast(&self, hir_id: hir::HirId) -> bool {
2020-07-17 08:47:04 +00:00
validate_hir_id_for_typeck_results(self.hir_owner, hir_id);
2019-02-23 16:11:34 +05:30
self.coercion_casts.contains(&hir_id.local_id)
}
2019-02-23 16:11:34 +05:30
pub fn set_coercion_cast(&mut self, id: ItemLocalId) {
self.coercion_casts.insert(id);
}
pub fn coercion_casts(&self) -> &ItemLocalSet {
&self.coercion_casts
}
}
2020-07-17 08:47:04 +00:00
impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for TypeckResults<'tcx> {
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
2020-07-17 08:47:04 +00:00
let ty::TypeckResults {
2021-03-19 00:39:39 -04:00
hir_owner,
ref type_dependent_defs,
ref field_indices,
ref user_provided_types,
ref user_provided_sigs,
ref node_types,
ref node_substs,
ref adjustments,
ref pat_binding_modes,
ref pat_adjustments,
ref closure_kind_origins,
ref liberated_fn_sigs,
ref fru_field_types,
2019-02-23 16:11:34 +05:30
ref coercion_casts,
ref used_trait_imports,
tainted_by_errors,
2019-08-01 00:41:54 +01:00
ref concrete_opaque_types,
ref closure_min_captures,
2021-02-02 21:07:52 -05:00
ref closure_fake_reads,
ref generator_interior_types,
ref treat_byte_string_as_slice,
ref closure_size_eval,
} = *self;
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
2021-03-19 00:39:39 -04:00
hcx.local_def_path_hash(hir_owner);
type_dependent_defs.hash_stable(hcx, hasher);
field_indices.hash_stable(hcx, hasher);
user_provided_types.hash_stable(hcx, hasher);
user_provided_sigs.hash_stable(hcx, hasher);
node_types.hash_stable(hcx, hasher);
node_substs.hash_stable(hcx, hasher);
adjustments.hash_stable(hcx, hasher);
pat_binding_modes.hash_stable(hcx, hasher);
pat_adjustments.hash_stable(hcx, hasher);
closure_kind_origins.hash_stable(hcx, hasher);
liberated_fn_sigs.hash_stable(hcx, hasher);
fru_field_types.hash_stable(hcx, hasher);
2019-02-23 16:11:34 +05:30
coercion_casts.hash_stable(hcx, hasher);
used_trait_imports.hash_stable(hcx, hasher);
tainted_by_errors.hash_stable(hcx, hasher);
2019-08-01 00:41:54 +01:00
concrete_opaque_types.hash_stable(hcx, hasher);
closure_min_captures.hash_stable(hcx, hasher);
2021-02-02 21:07:52 -05:00
closure_fake_reads.hash_stable(hcx, hasher);
generator_interior_types.hash_stable(hcx, hasher);
treat_byte_string_as_slice.hash_stable(hcx, hasher);
closure_size_eval.hash_stable(hcx, hasher);
})
}
2015-09-06 21:51:58 +03:00
}
rustc_index::newtype_index! {
pub struct UserTypeAnnotationIndex {
2018-12-03 01:14:35 +01:00
derive [HashStable]
2019-01-06 16:01:45 +00:00
DEBUG_FORMAT = "UserType({})",
const START_INDEX = 0,
}
}
/// Mapping of type annotation indices to canonical user type annotations.
pub type CanonicalUserTypeAnnotations<'tcx> =
2019-01-06 17:10:53 +00:00
IndexVec<UserTypeAnnotationIndex, CanonicalUserTypeAnnotation<'tcx>>;
#[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable, TypeFoldable, Lift)]
2019-01-06 17:10:53 +00:00
pub struct CanonicalUserTypeAnnotation<'tcx> {
pub user_ty: CanonicalUserType<'tcx>,
pub span: Span,
pub inferred_ty: Ty<'tcx>,
2019-01-06 17:10:53 +00:00
}
/// Canonicalized user type annotation.
2019-06-14 00:48:52 +03:00
pub type CanonicalUserType<'tcx> = Canonical<'tcx, UserType<'tcx>>;
impl<'tcx> CanonicalUserType<'tcx> {
/// Returns `true` if this represents a substitution of the form `[?0, ?1, ?2]`,
2019-02-08 14:53:55 +01:00
/// i.e., each thing is mapped to a canonical variable with the same index.
pub fn is_identity(&self) -> bool {
match self.value {
2019-01-06 16:01:45 +00:00
UserType::Ty(_) => false,
UserType::TypeOf(_, user_substs) => {
if user_substs.user_self_ty.is_some() {
return false;
}
2021-03-08 15:32:41 -08:00
iter::zip(user_substs.substs, BoundVar::new(0)..).all(|(kind, cvar)| {
match kind.unpack() {
2020-08-03 00:49:11 +02:00
GenericArgKind::Type(ty) => match ty.kind() {
ty::Bound(debruijn, b) => {
// We only allow a `ty::INNERMOST` index in substitutions.
2020-08-03 00:49:11 +02:00
assert_eq!(*debruijn, ty::INNERMOST);
cvar == b.var
}
_ => false,
},
GenericArgKind::Lifetime(r) => match *r {
ty::ReLateBound(debruijn, br) => {
// We only allow a `ty::INNERMOST` index in substitutions.
assert_eq!(debruijn, ty::INNERMOST);
cvar == br.var
}
_ => false,
},
GenericArgKind::Const(ct) => match ct.val() {
ty::ConstKind::Bound(debruijn, b) => {
// We only allow a `ty::INNERMOST` index in substitutions.
2019-03-14 10:19:31 +01:00
assert_eq!(debruijn, ty::INNERMOST);
cvar == b
}
_ => false,
},
}
})
2019-12-24 17:38:22 -05:00
}
}
}
}
2019-02-08 14:53:55 +01:00
/// A user-given type annotation attached to a constant. These arise
/// from constants that are named via paths, like `Foo::<A>::new` and
/// so forth.
#[derive(Copy, Clone, Debug, PartialEq, TyEncodable, TyDecodable)]
2019-11-15 18:30:20 +01:00
#[derive(HashStable, TypeFoldable, Lift)]
2019-01-06 16:01:45 +00:00
pub enum UserType<'tcx> {
Ty(Ty<'tcx>),
/// The canonical type is the result of `type_of(def_id)` with the
/// given substitutions applied.
TypeOf(DefId, UserSubsts<'tcx>),
}
2015-09-06 21:51:58 +03:00
impl<'tcx> CommonTypes<'tcx> {
fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> {
2019-09-16 19:11:57 +01:00
let mk = |ty| interners.intern_ty(ty);
2015-09-06 21:51:58 +03:00
CommonTypes {
unit: mk(Tuple(List::empty())),
bool: mk(Bool),
char: mk(Char),
never: mk(Never),
isize: mk(Int(ty::IntTy::Isize)),
i8: mk(Int(ty::IntTy::I8)),
i16: mk(Int(ty::IntTy::I16)),
i32: mk(Int(ty::IntTy::I32)),
i64: mk(Int(ty::IntTy::I64)),
i128: mk(Int(ty::IntTy::I128)),
usize: mk(Uint(ty::UintTy::Usize)),
u8: mk(Uint(ty::UintTy::U8)),
u16: mk(Uint(ty::UintTy::U16)),
u32: mk(Uint(ty::UintTy::U32)),
u64: mk(Uint(ty::UintTy::U64)),
u128: mk(Uint(ty::UintTy::U128)),
f32: mk(Float(ty::FloatTy::F32)),
f64: mk(Float(ty::FloatTy::F64)),
2020-05-28 13:02:02 +02:00
str_: mk(Str),
2019-12-24 17:38:22 -05:00
self_param: mk(ty::Param(ty::ParamTy { index: 0, name: kw::SelfUpper })),
trait_object_dummy_self: mk(Infer(ty::FreshTy(0))),
}
}
}
impl<'tcx> CommonLifetimes<'tcx> {
fn new(interners: &CtxtInterners<'tcx>) -> CommonLifetimes<'tcx> {
let mk = |r| {
Region(Interned::new_unchecked(
interners.region.intern(r, |r| InternedInSet(interners.arena.alloc(r))).0,
))
};
CommonLifetimes {
re_root_empty: mk(ty::ReEmpty(ty::UniverseIndex::ROOT)),
re_static: mk(ty::ReStatic),
re_erased: mk(ty::ReErased),
2015-09-06 21:51:58 +03:00
}
}
}
impl<'tcx> CommonConsts<'tcx> {
fn new(interners: &CtxtInterners<'tcx>, types: &CommonTypes<'tcx>) -> CommonConsts<'tcx> {
let mk_const = |c| {
Const(Interned::new_unchecked(
interners.const_.intern(c, |c| InternedInSet(interners.arena.alloc(c))).0,
))
};
CommonConsts {
unit: mk_const(ty::ConstS {
val: ty::ConstKind::Value(ConstValue::Scalar(Scalar::ZST)),
ty: types.unit,
}),
}
}
}
// This struct contains information regarding the `ReFree(FreeRegion)` corresponding to a lifetime
// conflict.
#[derive(Debug)]
pub struct FreeRegionInfo {
2020-06-27 13:38:00 +02:00
// `LocalDefId` corresponding to FreeRegion
pub def_id: LocalDefId,
// the bound region corresponding to FreeRegion
pub boundregion: ty::BoundRegionKind,
// checks if bound region is in Impl Item
pub is_impl_item: bool,
}
2017-09-15 16:19:44 -04:00
/// The central data structure of the compiler. It stores references
/// to the various **arenas** and also houses the results of the
2017-12-31 17:08:04 +01:00
/// various **compiler queries** that have been performed. See the
2020-03-05 18:07:42 -03:00
/// [rustc dev guide] for more details.
2017-12-31 17:08:04 +01:00
///
/// [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/ty.html
#[derive(Copy, Clone)]
2019-09-25 08:42:46 -04:00
#[rustc_diagnostic_item = "TyCtxt"]
#[cfg_attr(not(bootstrap), rustc_pass_by_value)]
2019-06-14 00:48:52 +03:00
pub struct TyCtxt<'tcx> {
gcx: &'tcx GlobalCtxt<'tcx>,
}
2019-06-14 00:48:52 +03:00
impl<'tcx> Deref for TyCtxt<'tcx> {
type Target = &'tcx GlobalCtxt<'tcx>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.gcx
}
}
pub struct GlobalCtxt<'tcx> {
2019-11-26 23:16:48 +01:00
pub arena: &'tcx WorkerLocal<Arena<'tcx>>,
2019-05-31 10:23:22 +02:00
interners: CtxtInterners<'tcx>,
2015-09-06 21:51:58 +03:00
pub sess: &'tcx Session,
2020-01-09 03:45:42 +01:00
/// This only ever stores a `LintStore` but we don't want a dependency on that type here.
///
/// FIXME(Centril): consider `dyn LintStoreMarker` once
/// we can upcast to `Any` for some additional type safety.
2020-01-09 09:40:55 +01:00
pub lint_store: Lrc<dyn Any + sync::Sync + sync::Send>,
2016-01-05 13:07:45 -05:00
pub dep_graph: DepGraph,
pub prof: SelfProfilerRef,
2015-09-06 21:51:58 +03:00
/// Common types, pre-interned for your convenience.
pub types: CommonTypes<'tcx>,
/// Common lifetimes, pre-interned for your convenience.
pub lifetimes: CommonLifetimes<'tcx>,
/// Common consts, pre-interned for your convenience.
pub consts: CommonConsts<'tcx>,
2021-04-04 14:40:35 +02:00
/// Output of the resolver.
pub(crate) untracked_resolutions: ty::ResolverOutputs,
2017-03-23 14:18:25 -04:00
2020-02-07 13:13:35 +01:00
pub(crate) untracked_crate: &'tcx hir::Crate<'tcx>,
/// This provides access to the incremental compilation on-disk cache for query results.
/// Do not access this directly. It is only meant to be used by
/// `DepGraph::try_mark_green()` and the query infrastructure.
/// This is `None` if we are not incremental compilation mode
2021-06-28 21:12:01 +02:00
pub on_disk_cache: Option<&'tcx dyn OnDiskCache<'tcx>>,
2021-01-18 22:32:20 +01:00
pub queries: &'tcx dyn query::QueryEngine<'tcx>,
pub query_caches: query::QueryCaches<'tcx>,
2021-10-16 20:10:23 +02:00
query_kinds: &'tcx [DepKindStruct],
2016-10-28 13:55:49 +03:00
2020-06-10 09:26:54 +01:00
// Internal caches for metadata decoding. No need to track deps on this.
pub ty_rcache: Lock<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>,
pub pred_rcache: Lock<FxHashMap<ty::CReaderCacheKey, Predicate<'tcx>>>,
2015-09-06 21:51:58 +03:00
/// Caches the results of trait selection. This cache is used
/// for things that do not have to do with the parameters in scope.
pub selection_cache: traits::SelectionCache<'tcx>,
2015-10-21 14:50:38 +03:00
/// Caches the results of trait evaluation. This cache is used
/// for things that do not have to do with the parameters in scope.
/// Merge this with `selection_cache`?
pub evaluation_cache: traits::EvaluationCache<'tcx>,
/// The definite name of the current crate after taking into account
/// attributes, commandline parameters, etc.
crate_name: Symbol,
2016-04-18 16:03:16 +03:00
/// Data layout specification for the current target.
pub data_layout: TargetDataLayout,
/// Stores memory for globals (statics/consts).
2020-04-24 12:53:18 +02:00
pub(crate) alloc_map: Lock<interpret::AllocMap<'tcx>>,
2017-12-06 09:25:29 +01:00
output_filenames: Arc<OutputFilenames>,
2015-09-06 21:51:58 +03:00
}
2019-06-14 00:48:52 +03:00
impl<'tcx> TyCtxt<'tcx> {
2020-07-17 08:47:04 +00:00
pub fn typeck_opt_const_arg(
self,
2020-07-15 10:50:54 +02:00
def: ty::WithOptConstParam<LocalDefId>,
2020-07-17 08:47:04 +00:00
) -> &'tcx TypeckResults<'tcx> {
2020-07-15 10:50:54 +02:00
if let Some(param_did) = def.const_param_did {
2020-07-17 08:47:04 +00:00
self.typeck_const_arg((def.did, param_did))
} else {
2020-07-17 08:47:04 +00:00
self.typeck(def.did)
}
}
pub fn mir_borrowck_opt_const_arg(
self,
def: ty::WithOptConstParam<LocalDefId>,
) -> &'tcx BorrowCheckResult<'tcx> {
if let Some(param_did) = def.const_param_did {
self.mir_borrowck_const_arg((def.did, param_did))
} else {
self.mir_borrowck(def.did)
}
}
2021-04-04 18:42:17 +02:00
pub fn alloc_steal_thir(self, thir: Thir<'tcx>) -> &'tcx Steal<Thir<'tcx>> {
self.arena.alloc(Steal::new(thir))
}
2020-07-03 22:15:27 +02:00
pub fn alloc_steal_mir(self, mir: Body<'tcx>) -> &'tcx Steal<Body<'tcx>> {
self.arena.alloc(Steal::new(mir))
2015-09-06 21:51:58 +03:00
}
2019-12-24 17:38:22 -05:00
pub fn alloc_steal_promoted(
self,
2020-04-12 10:31:00 -07:00
promoted: IndexVec<Promoted, Body<'tcx>>,
2020-07-03 22:15:27 +02:00
) -> &'tcx Steal<IndexVec<Promoted, Body<'tcx>>> {
self.arena.alloc(Steal::new(promoted))
2019-08-04 16:20:21 -04:00
}
pub fn alloc_adt_def(
self,
did: DefId,
kind: AdtKind,
variants: IndexVec<VariantIdx, ty::VariantDef>,
repr: ReprOptions,
2020-04-03 09:49:21 +02:00
) -> &'tcx ty::AdtDef {
self.intern_adt_def(ty::AdtDef::new(self, did, kind, variants, repr))
2015-09-06 21:51:58 +03:00
}
/// Allocates a read-only byte or string literal for `mir::interpret`.
2018-05-01 12:18:53 +02:00
pub fn allocate_bytes(self, bytes: &[u8]) -> interpret::AllocId {
// Create an allocation that just contains these bytes.
let alloc = interpret::Allocation::from_bytes_byte_aligned_immutable(bytes);
2017-12-06 09:25:29 +01:00
let alloc = self.intern_const_alloc(alloc);
2020-04-24 12:53:18 +02:00
self.create_memory_alloc(alloc)
2017-12-06 09:25:29 +01:00
}
/// Returns a range of the start/end indices specified with the
/// `rustc_layout_scalar_valid_range` attribute.
// FIXME(eddyb) this is an awkward spot for this method, maybe move it?
pub fn layout_scalar_valid_range(self, def_id: DefId) -> (Bound<u128>, Bound<u128>) {
let attrs = self.get_attrs(def_id);
let get = |name| {
let attr = match attrs.iter().find(|a| a.has_name(name)) {
Some(attr) => attr,
None => return Bound::Unbounded,
};
debug!("layout_scalar_valid_range: attr={:?}", attr);
if let Some(
2021-11-30 13:08:41 -05:00
&[
ast::NestedMetaItem::Literal(ast::Lit {
kind: ast::LitKind::Int(a, _), ..
}),
],
) = attr.meta_item_list().as_deref()
{
Bound::Included(a)
} else {
self.sess
.delay_span_bug(attr.span, "invalid rustc_layout_scalar_valid_range attribute");
Bound::Unbounded
}
};
2019-12-24 17:38:22 -05:00
(
get(sym::rustc_layout_scalar_valid_range_start),
get(sym::rustc_layout_scalar_valid_range_end),
)
}
2020-10-16 21:59:49 +02:00
pub fn lift<T: Lift<'tcx>>(self, value: T) -> Option<T::Lifted> {
2015-09-06 21:51:58 +03:00
value.lift_to_tcx(self)
}
2019-02-08 14:53:55 +01:00
/// Creates a type context and call the closure with a `TyCtxt` reference
2015-09-06 21:51:58 +03:00
/// to the context. The closure enforces that the type context and any interned
/// value (types, substs, etc.) can only be used while `ty::tls` has a valid
/// reference to the context, to allow formatting values that need it.
pub fn create_global_ctxt(
s: &'tcx Session,
2020-01-09 09:40:55 +01:00
lint_store: Lrc<dyn Any + sync::Send + sync::Sync>,
2019-11-27 13:24:19 +01:00
arena: &'tcx WorkerLocal<Arena<'tcx>>,
resolutions: ty::ResolverOutputs,
2020-02-09 15:32:00 +01:00
krate: &'tcx hir::Crate<'tcx>,
2020-02-08 05:18:34 +01:00
dep_graph: DepGraph,
2021-06-28 21:12:01 +02:00
on_disk_cache: Option<&'tcx dyn OnDiskCache<'tcx>>,
2021-01-18 22:32:20 +01:00
queries: &'tcx dyn query::QueryEngine<'tcx>,
2021-10-16 20:10:23 +02:00
query_kinds: &'tcx [DepKindStruct],
crate_name: &str,
2021-05-11 15:03:53 +02:00
output_filenames: OutputFilenames,
) -> GlobalCtxt<'tcx> {
let data_layout = TargetDataLayout::parse(&s.target).unwrap_or_else(|err| {
s.fatal(&err);
});
2020-01-02 01:26:18 +01:00
let interners = CtxtInterners::new(arena);
let common_types = CommonTypes::new(&interners);
let common_lifetimes = CommonLifetimes::new(&interners);
let common_consts = CommonConsts::new(&interners, &common_types);
GlobalCtxt {
sess: s,
lint_store,
2019-11-27 13:24:19 +01:00
arena,
2019-05-31 10:23:22 +02:00
interners,
2018-10-26 03:11:11 +09:00
dep_graph,
2021-04-04 14:40:35 +02:00
untracked_resolutions: resolutions,
prof: s.prof.clone(),
2015-09-06 21:51:58 +03:00
types: common_types,
lifetimes: common_lifetimes,
consts: common_consts,
2020-02-09 15:32:00 +01:00
untracked_crate: krate,
on_disk_cache,
2020-10-11 10:34:13 +02:00
queries,
query_caches: query::QueryCaches::default(),
2021-10-16 20:10:23 +02:00
query_kinds,
2020-06-10 09:26:54 +01:00
ty_rcache: Default::default(),
pred_rcache: Default::default(),
selection_cache: Default::default(),
evaluation_cache: Default::default(),
crate_name: Symbol::intern(crate_name),
data_layout,
alloc_map: Lock::new(interpret::AllocMap::new()),
2021-05-11 15:03:53 +02:00
output_filenames: Arc::new(output_filenames),
}
2015-09-06 21:51:58 +03:00
}
2021-10-16 21:12:34 +02:00
crate fn query_kind(self, k: DepKind) -> &'tcx DepKindStruct {
2021-10-16 20:10:23 +02:00
&self.query_kinds[k as usize]
}
/// Constructs a `TyKind::Error` type and registers a `delay_span_bug` to ensure it gets used.
#[track_caller]
pub fn ty_error(self) -> Ty<'tcx> {
2020-05-26 12:49:11 -05:00
self.ty_error_with_message(DUMMY_SP, "TyKind::Error constructed but no error reported")
}
/// Constructs a `TyKind::Error` type and registers a `delay_span_bug` with the given `msg` to
/// ensure it gets used.
#[track_caller]
pub fn ty_error_with_message<S: Into<MultiSpan>>(self, span: S, msg: &str) -> Ty<'tcx> {
2020-05-26 12:49:11 -05:00
self.sess.delay_span_bug(span, msg);
self.mk_ty(Error(DelaySpanBugEmitted(())))
}
2022-01-17 07:54:56 +01:00
/// Like [TyCtxt::ty_error] but for constants.
#[track_caller]
pub fn const_error(self, ty: Ty<'tcx>) -> Const<'tcx> {
2022-01-05 11:42:08 +01:00
self.const_error_with_message(
ty,
DUMMY_SP,
"ty::ConstKind::Error constructed but no error reported",
)
}
2022-01-17 07:54:56 +01:00
/// Like [TyCtxt::ty_error_with_message] but for constants.
2022-01-05 11:42:08 +01:00
#[track_caller]
pub fn const_error_with_message<S: Into<MultiSpan>>(
self,
ty: Ty<'tcx>,
span: S,
msg: &str,
) -> Const<'tcx> {
2022-01-05 11:42:08 +01:00
self.sess.delay_span_bug(span, msg);
self.mk_const(ty::ConstS { val: ty::ConstKind::Error(DelaySpanBugEmitted(())), ty })
}
2020-09-19 12:38:24 +02:00
pub fn consider_optimizing<T: Fn() -> String>(self, msg: T) -> bool {
let cname = self.crate_name(LOCAL_CRATE);
self.sess.consider_optimizing(cname.as_str(), msg)
}
/// Obtain all lang items of this crate and all dependencies (recursively)
2020-03-31 21:38:14 +02:00
pub fn lang_items(self) -> &'tcx rustc_hir::lang_items::LanguageItems {
2021-05-11 13:50:41 +02:00
self.get_lang_items(())
}
/// Obtain the given diagnostic item's `DefId`. Use `is_diagnostic_item` if you just want to
/// compare against another `DefId`, since `is_diagnostic_item` is cheaper.
pub fn get_diagnostic_item(self, name: Symbol) -> Option<DefId> {
2021-10-04 15:57:39 -05:00
self.all_diagnostic_items(()).name_to_id.get(&name).copied()
}
/// Obtain the diagnostic item's name
pub fn get_diagnostic_name(self, id: DefId) -> Option<Symbol> {
self.diagnostic_items(id.krate).id_to_name.get(&id).copied()
}
/// Check whether the diagnostic item with the given `name` has the given `DefId`.
pub fn is_diagnostic_item(self, name: Symbol, did: DefId) -> bool {
2021-10-04 15:57:39 -05:00
self.diagnostic_items(did.krate).name_to_id.get(&name) == Some(&did)
}
2019-06-14 00:48:52 +03:00
pub fn stability(self) -> &'tcx stability::Index<'tcx> {
2021-05-11 13:50:41 +02:00
self.stability_index(())
}
pub fn features(self) -> &'tcx rustc_feature::Features {
2021-05-11 13:50:41 +02:00
self.features_query(())
2018-02-14 16:11:02 +01:00
}
pub fn def_key(self, id: DefId) -> rustc_hir::definitions::DefKey {
// Accessing the DefKey is ok, since it is part of DefPathHash.
2021-04-04 14:40:35 +02:00
if let Some(id) = id.as_local() {
self.untracked_resolutions.definitions.def_key(id)
2021-04-04 14:40:35 +02:00
} else {
self.untracked_resolutions.cstore.def_key(id)
}
}
2019-02-08 14:53:55 +01:00
/// Converts a `DefId` into its fully expanded `DefPath` (every
/// `DefId` is really just an interned `DefPath`).
///
/// Note that if `id` is not local to this crate, the result will
/// be a non-local `DefPath`.
pub fn def_path(self, id: DefId) -> rustc_hir::definitions::DefPath {
// Accessing the DefPath is ok, since it is part of DefPathHash.
if let Some(id) = id.as_local() {
self.untracked_resolutions.definitions.def_path(id)
} else {
2021-04-04 14:40:35 +02:00
self.untracked_resolutions.cstore.def_path(id)
}
}
#[inline]
pub fn def_path_hash(self, def_id: DefId) -> rustc_hir::definitions::DefPathHash {
// Accessing the DefPathHash is ok, it is incr. comp. stable.
if let Some(def_id) = def_id.as_local() {
2021-04-04 14:40:35 +02:00
self.untracked_resolutions.definitions.def_path_hash(def_id)
} else {
2021-04-04 14:40:35 +02:00
self.untracked_resolutions.cstore.def_path_hash(def_id)
}
}
2021-05-30 22:32:45 +02:00
#[inline]
2021-06-30 18:39:28 +02:00
pub fn stable_crate_id(self, crate_num: CrateNum) -> StableCrateId {
if crate_num == LOCAL_CRATE {
self.sess.local_stable_crate_id()
} else {
self.untracked_resolutions.cstore.stable_crate_id(crate_num)
2021-06-30 18:39:28 +02:00
}
2021-05-30 22:32:45 +02:00
}
/// Maps a StableCrateId to the corresponding CrateNum. This method assumes
/// that the crate in question has already been loaded by the CrateStore.
#[inline]
pub fn stable_crate_id_to_crate_num(self, stable_crate_id: StableCrateId) -> CrateNum {
if stable_crate_id == self.sess.local_stable_crate_id() {
LOCAL_CRATE
} else {
self.untracked_resolutions.cstore.stable_crate_id_to_crate_num(stable_crate_id)
}
}
/// Converts a `DefPathHash` to its corresponding `DefId` in the current compilation
/// session, if it still exists. This is used during incremental compilation to
/// turn a deserialized `DefPathHash` into its current `DefId`.
pub fn def_path_hash_to_def_id(self, hash: DefPathHash, err: &mut dyn FnMut() -> !) -> DefId {
debug!("def_path_hash_to_def_id({:?})", hash);
let stable_crate_id = hash.stable_crate_id();
// If this is a DefPathHash from the local crate, we can look up the
// DefId in the tcx's `Definitions`.
if stable_crate_id == self.sess.local_stable_crate_id() {
self.untracked_resolutions
.definitions
.local_def_path_hash_to_def_id(hash, err)
.to_def_id()
} else {
// If this is a DefPathHash from an upstream crate, let the CrateStore map
// it to a DefId.
let cstore = &self.untracked_resolutions.cstore;
let cnum = cstore.stable_crate_id_to_crate_num(stable_crate_id);
cstore.def_path_hash_to_def_id(cnum, hash)
}
}
pub fn def_path_debug_str(self, def_id: DefId) -> String {
// We are explicitly not going through queries here in order to get
// crate name and stable crate id since this code is called from debug!()
// statements within the query system and we'd run into endless
// recursion otherwise.
let (crate_name, stable_crate_id) = if def_id.is_local() {
(self.crate_name, self.sess.local_stable_crate_id())
} else {
2021-04-04 14:40:35 +02:00
let cstore = &self.untracked_resolutions.cstore;
(cstore.crate_name(def_id.krate), cstore.stable_crate_id(def_id.krate))
};
2019-12-24 17:38:22 -05:00
format!(
"{}[{}]{}",
crate_name,
// Don't print the whole stable crate id. That's just
2019-12-24 17:38:22 -05:00
// annoying in debug output.
&(format!("{:08x}", stable_crate_id.to_u64()))[..4],
2020-09-23 23:38:38 +01:00
self.def_path(def_id).to_string_no_crate_verbose()
2019-12-24 17:38:22 -05:00
)
}
2021-06-28 21:12:01 +02:00
/// Note that this is *untracked* and should only be used within the query
/// system if the result is otherwise tracked through queries
pub fn cstore_untracked(self) -> &'tcx ty::CrateStoreDyn {
&*self.untracked_resolutions.cstore
}
/// Note that this is *untracked* and should only be used within the query
/// system if the result is otherwise tracked through queries
pub fn definitions_untracked(self) -> &'tcx hir::definitions::Definitions {
&self.untracked_resolutions.definitions
}
2018-12-04 16:26:34 +01:00
#[inline(always)]
pub fn create_stable_hashing_context(self) -> StableHashingContext<'tcx> {
2021-04-04 14:40:35 +02:00
let resolutions = &self.gcx.untracked_resolutions;
StableHashingContext::new(self.sess, &resolutions.definitions, &*resolutions.cstore)
}
#[inline(always)]
pub fn create_no_span_stable_hashing_context(self) -> StableHashingContext<'tcx> {
2021-04-04 14:40:35 +02:00
let resolutions = &self.gcx.untracked_resolutions;
StableHashingContext::ignore_spans(
self.sess,
&resolutions.definitions,
&*resolutions.cstore,
)
}
pub fn serialize_query_result_cache(self, encoder: &mut FileEncoder) -> FileEncodeResult {
self.on_disk_cache.as_ref().map_or(Ok(()), |c| c.serialize(self, encoder))
}
/// If `true`, we should use the MIR-based borrowck, but also
/// fall back on the AST borrowck if the MIR-based one errors.
pub fn migrate_borrowck(self) -> bool {
self.borrowck_mode().migrate()
}
2018-02-14 16:11:02 +01:00
/// What mode(s) of borrowck should we run? AST? MIR? both?
/// (Also considers the `#![feature(nll)]` setting.)
pub fn borrowck_mode(self) -> BorrowckMode {
// Here are the main constraints we need to deal with:
//
// 1. An opts.borrowck_mode of `BorrowckMode::Migrate` is
// synonymous with no `-Z borrowck=...` flag at all.
//
// 2. We want to allow developers on the Nightly channel
// to opt back into the "hard error" mode for NLL,
// (which they can do via specifying `#![feature(nll)]`
// explicitly in their crate).
//
// So, this precedence list is how pnkfelix chose to work with
// the above constraints:
//
// * `#![feature(nll)]` *always* means use NLL with hard
// errors. (To simplify the code here, it now even overrides
// a user's attempt to specify `-Z borrowck=compare`, which
// we arguably do not need anymore and should remove.)
//
// * Otherwise, if no `-Z borrowck=...` then use migrate mode
//
// * Otherwise, use the behavior requested via `-Z borrowck=...`
2019-12-24 17:38:22 -05:00
if self.features().nll {
return BorrowckMode::Mir;
}
self.sess.opts.borrowck_mode
2018-02-14 16:11:02 +01:00
}
/// If `true`, we should use lazy normalization for constants, otherwise
/// we still evaluate them eagerly.
#[inline]
pub fn lazy_normalization(self) -> bool {
2020-08-05 18:27:54 +02:00
let features = self.features();
// Note: We only use lazy normalization for generic const expressions.
features.generic_const_exprs
}
#[inline]
pub fn local_crate_exports_generics(self) -> bool {
debug_assert!(self.sess.opts.share_generics());
2020-05-15 21:44:28 -07:00
self.sess.crate_types().iter().any(|crate_type| {
match crate_type {
2019-12-24 17:38:22 -05:00
CrateType::Executable
| CrateType::Staticlib
| CrateType::ProcMacro
| CrateType::Cdylib => false,
// FIXME rust-lang/rust#64319, rust-lang/rust#64872:
// We want to block export of generics from dylibs,
// but we must fix rust-lang/rust#65890 before we can
// do that robustly.
2019-12-24 17:38:22 -05:00
CrateType::Dylib => true,
2019-12-24 17:38:22 -05:00
CrateType::Rlib => true,
}
})
}
// Returns the `DefId` and the `BoundRegionKind` corresponding to the given region.
2020-09-18 20:49:25 +02:00
pub fn is_suitable_region(self, region: Region<'tcx>) -> Option<FreeRegionInfo> {
let (suitable_region_binding_scope, bound_region) = match *region {
2020-06-27 13:38:00 +02:00
ty::ReFree(ref free_region) => {
(free_region.scope.expect_local(), free_region.bound_region)
2019-12-24 17:38:22 -05:00
}
2020-06-27 13:38:00 +02:00
ty::ReEarlyBound(ref ebr) => (
self.parent(ebr.def_id).unwrap().expect_local(),
ty::BoundRegionKind::BrNamed(ebr.def_id, ebr.name),
2020-06-27 13:38:00 +02:00
),
_ => return None, // not a free region
};
2021-10-20 20:59:15 +02:00
let is_impl_item = match self.hir().find_by_def_id(suitable_region_binding_scope) {
Some(Node::Item(..) | Node::TraitItem(..)) => false,
Some(Node::ImplItem(..)) => {
self.is_bound_region_in_impl_item(suitable_region_binding_scope)
}
_ => return None,
};
Some(FreeRegionInfo {
def_id: suitable_region_binding_scope,
boundregion: bound_region,
is_impl_item,
})
}
2020-06-22 16:05:31 -07:00
/// Given a `DefId` for an `fn`, return all the `dyn` and `impl` traits in its return type.
2020-06-27 13:15:12 +02:00
pub fn return_type_impl_or_dyn_traits(
2020-09-18 20:49:25 +02:00
self,
2020-06-27 13:15:12 +02:00
scope_def_id: LocalDefId,
) -> Vec<&'tcx hir::Ty<'tcx>> {
let hir_id = self.hir().local_def_id_to_hir_id(scope_def_id);
let hir_output = match self.hir().fn_decl_by_hir_id(hir_id) {
Some(hir::FnDecl { output: hir::FnRetTy::Return(ty), .. }) => ty,
_ => return vec![],
};
let mut v = TraitObjectVisitor(vec![], self.hir());
v.visit_ty(hir_output);
v.0
}
2020-09-18 20:49:25 +02:00
pub fn return_type_impl_trait(self, scope_def_id: LocalDefId) -> Option<(Ty<'tcx>, Span)> {
// `type_of()` will fail on these (#55796, #86483), so only allow `fn`s or closures.
2021-10-20 20:59:15 +02:00
match self.hir().get_by_def_id(scope_def_id) {
Node::Item(&hir::Item { kind: ItemKind::Fn(..), .. }) => {}
Node::TraitItem(&hir::TraitItem { kind: TraitItemKind::Fn(..), .. }) => {}
Node::ImplItem(&hir::ImplItem { kind: ImplItemKind::Fn(..), .. }) => {}
Node::Expr(&hir::Expr { kind: ExprKind::Closure(..), .. }) => {}
_ => return None,
}
let ret_ty = self.type_of(scope_def_id);
2020-08-03 00:49:11 +02:00
match ret_ty.kind() {
ty::FnDef(_, _) => {
2020-09-18 20:49:25 +02:00
let sig = ret_ty.fn_sig(self);
2020-10-24 02:21:18 +02:00
let output = self.erase_late_bound_regions(sig.output());
if output.is_impl_trait() {
2021-10-20 20:59:15 +02:00
let hir_id = self.hir().local_def_id_to_hir_id(scope_def_id);
2019-10-24 01:28:55 +08:00
let fn_decl = self.hir().fn_decl_by_hir_id(hir_id).unwrap();
Some((output, fn_decl.output.span()))
} else {
None
}
}
2019-12-24 17:38:22 -05:00
_ => None,
}
}
// Checks if the bound region is in Impl Item.
2020-09-18 20:49:25 +02:00
pub fn is_bound_region_in_impl_item(self, suitable_region_binding_scope: LocalDefId) -> bool {
2020-06-27 13:38:00 +02:00
let container_id =
self.associated_item(suitable_region_binding_scope.to_def_id()).container.id();
if self.impl_trait_ref(container_id).is_some() {
// For now, we do not try to target impls of traits. This is
// because this message is going to suggest that the user
// change the fn signature, but they may not be free to do so,
// since the signature must match the trait.
//
// FIXME(#42706) -- in some cases, we could do better here.
return true;
}
false
}
/// Determines whether identifiers in the assembly have strict naming rules.
/// Currently, only NVPTX* targets need it.
2020-09-18 20:49:25 +02:00
pub fn has_strict_asm_symbol_naming(self) -> bool {
self.sess.target.arch.contains("nvptx")
}
/// Returns `&'static core::panic::Location<'static>`.
2020-09-18 20:49:25 +02:00
pub fn caller_location_ty(self) -> Ty<'tcx> {
self.mk_imm_ref(
self.lifetimes.re_static,
self.type_of(self.require_lang_item(LangItem::PanicLocation, None))
2020-09-18 20:49:25 +02:00
.subst(self, self.mk_substs([self.lifetimes.re_static.into()].iter())),
)
}
2019-12-30 19:46:30 -06:00
2020-01-25 19:09:23 -06:00
/// Returns a displayable description and article for the given `def_id` (e.g. `("a", "struct")`).
2020-09-18 20:49:25 +02:00
pub fn article_and_description(self, def_id: DefId) -> (&'static str, &'static str) {
match self.def_kind(def_id) {
DefKind::Generator => match self.generator_kind(def_id).unwrap() {
rustc_hir::GeneratorKind::Async(..) => ("an", "async closure"),
rustc_hir::GeneratorKind::Gen => ("a", "generator"),
},
def_kind => (def_kind.article(), def_kind.descr(def_id)),
}
2019-12-30 19:46:30 -06:00
}
pub fn type_length_limit(self) -> Limit {
self.limits(()).type_length_limit
}
pub fn recursion_limit(self) -> Limit {
self.limits(()).recursion_limit
}
pub fn move_size_limit(self) -> Limit {
self.limits(()).move_size_limit
}
pub fn const_eval_limit(self) -> Limit {
self.limits(()).const_eval_limit
}
pub fn all_traits(self) -> impl Iterator<Item = DefId> + 'tcx {
iter::once(LOCAL_CRATE)
.chain(self.crates(()).iter().copied())
.flat_map(move |cnum| self.traits_in_crate(cnum).iter().copied())
}
2015-09-06 21:51:58 +03:00
}
/// A trait implemented for all `X<'a>` types that can be safely and
/// efficiently converted to `X<'tcx>` as long as they are part of the
/// provided `TyCtxt<'tcx>`.
/// This can be done, for example, for `Ty<'tcx>` or `SubstsRef<'tcx>`
2015-09-06 21:51:58 +03:00
/// by looking them up in their respective interners.
///
/// However, this is still not the best implementation as it does
/// need to compare the components, even for interned values.
/// It would be more efficient if `TypedArena` provided a way to
/// determine whether the address is in the allocated range.
///
/// `None` is returned if the value or one of the components is not part
2015-09-06 21:51:58 +03:00
/// of the provided context.
/// For `Ty`, `None` can be returned if either the type interner doesn't
/// contain the `TyKind` key or if the address of the interned
2015-09-06 21:51:58 +03:00
/// pointer differs. The latter case is possible if a primitive type,
/// e.g., `()` or `u8`, was interned in a different context.
2018-06-27 06:01:19 -04:00
pub trait Lift<'tcx>: fmt::Debug {
type Lifted: fmt::Debug + 'tcx;
2020-10-16 21:59:49 +02:00
fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted>;
2015-09-06 21:51:58 +03:00
}
2022-01-25 14:13:38 +11:00
// Deprecated: we are in the process of converting all uses to `nop_lift`.
macro_rules! nop_lift_old {
($set:ident; $ty:ty => $lifted:ty) => {
2018-12-28 20:30:06 +01:00
impl<'a, 'tcx> Lift<'tcx> for $ty {
2019-12-24 17:38:22 -05:00
type Lifted = $lifted;
2020-10-16 21:59:49 +02:00
fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
if tcx.interners.$set.contains_pointer_to(&InternedInSet(self)) {
2020-10-16 21:59:49 +02:00
Some(unsafe { mem::transmute(self) })
2019-12-24 17:38:22 -05:00
} else {
None
2018-12-28 20:30:06 +01:00
}
2019-12-24 17:38:22 -05:00
}
}
2018-12-28 20:30:06 +01:00
};
}
2022-01-25 14:13:38 +11:00
macro_rules! nop_lift {
($set:ident; $ty:ty => $lifted:ty) => {
impl<'a, 'tcx> Lift<'tcx> for $ty {
type Lifted = $lifted;
fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
if tcx.interners.$set.contains_pointer_to(&InternedInSet(self.0.0)) {
Some(unsafe { mem::transmute(self) })
} else {
None
}
}
}
};
}
2018-12-28 20:30:06 +01:00
macro_rules! nop_list_lift {
($set:ident; $ty:ty => $lifted:ty) => {
2018-12-28 20:30:06 +01:00
impl<'a, 'tcx> Lift<'tcx> for &'a List<$ty> {
2019-12-24 17:38:22 -05:00
type Lifted = &'tcx List<$lifted>;
2020-10-16 21:59:49 +02:00
fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
2019-12-24 17:38:22 -05:00
if self.is_empty() {
return Some(List::empty());
2018-12-28 20:30:06 +01:00
}
if tcx.interners.$set.contains_pointer_to(&InternedInSet(self)) {
2020-10-16 21:59:49 +02:00
Some(unsafe { mem::transmute(self) })
2019-12-24 17:38:22 -05:00
} else {
None
}
}
}
2018-12-28 20:30:06 +01:00
};
}
nop_lift! {type_; Ty<'a> => Ty<'tcx>}
nop_lift! {region; Region<'a> => Region<'tcx>}
nop_lift! {const_; Const<'a> => Const<'tcx>}
2022-01-25 14:13:38 +11:00
nop_lift_old! {const_allocation; &'a Allocation => &'tcx Allocation}
nop_lift! {predicate; Predicate<'a> => Predicate<'tcx>}
nop_list_lift! {type_list; Ty<'a> => Ty<'tcx>}
2020-10-05 16:51:33 -04:00
nop_list_lift! {poly_existential_predicates; ty::Binder<'a, ExistentialPredicate<'a>> => ty::Binder<'tcx, ExistentialPredicate<'tcx>>}
nop_list_lift! {predicates; Predicate<'a> => Predicate<'tcx>}
2020-10-28 01:11:03 +00:00
nop_list_lift! {canonical_var_infos; CanonicalVarInfo<'a> => CanonicalVarInfo<'tcx>}
nop_list_lift! {projs; ProjectionKind => ProjectionKind}
2020-10-05 20:41:46 -04:00
nop_list_lift! {bound_variable_kinds; ty::BoundVariableKind => ty::BoundVariableKind}
2015-09-06 21:51:58 +03:00
// This is the impl for `&'a InternalSubsts<'a>`.
nop_list_lift! {substs; GenericArg<'a> => GenericArg<'tcx>}
CloneLiftImpls! { for<'tcx> { Constness, traits::WellFormedLoc, } }
2020-11-22 02:13:53 +01:00
2015-09-06 21:51:58 +03:00
pub mod tls {
2019-12-24 17:38:22 -05:00
use super::{ptr_eq, GlobalCtxt, TyCtxt};
2015-09-06 21:51:58 +03:00
use crate::dep_graph::TaskDepsRef;
2019-02-05 11:20:45 -06:00
use crate::ty::query;
2020-01-31 04:00:03 +01:00
use rustc_data_structures::sync::{self, Lock};
use rustc_data_structures::thin_vec::ThinVec;
use rustc_errors::Diagnostic;
2019-12-24 17:38:22 -05:00
use std::mem;
2015-09-06 21:51:58 +03:00
#[cfg(not(parallel_compiler))]
use std::cell::Cell;
#[cfg(parallel_compiler)]
2019-02-05 11:20:45 -06:00
use rustc_rayon_core as rayon_core;
2018-03-24 06:19:20 +01:00
/// This is the implicit state of rustc. It contains the current
/// `TyCtxt` and query. It is updated when creating a local interner or
/// executing a new query. Whenever there's a `TyCtxt` value available
/// you should also have access to an `ImplicitCtxt` through the functions
2018-03-24 06:19:20 +01:00
/// in this module.
#[derive(Clone)]
2019-06-14 00:48:52 +03:00
pub struct ImplicitCtxt<'a, 'tcx> {
/// The current `TyCtxt`.
2019-06-14 00:48:52 +03:00
pub tcx: TyCtxt<'tcx>,
2015-09-06 21:51:58 +03:00
/// The current query job, if any. This is updated by `JobOwner::start` in
/// `ty::query::plumbing` when executing a query.
pub query: Option<query::QueryJobId>,
2018-04-06 14:53:11 +02:00
/// Where to store diagnostics for the current query job, if any.
/// This is updated by `JobOwner::start` in `ty::query::plumbing` when executing a query.
pub diagnostics: Option<&'a Lock<ThinVec<Diagnostic>>>,
2018-04-06 14:53:11 +02:00
/// Used to prevent layout from recursing too deeply.
pub layout_depth: usize,
2018-04-06 14:52:36 +02:00
/// The current dep graph task. This is used to add dependencies to queries
/// when executing them.
pub task_deps: TaskDepsRef<'a>,
}
2015-09-06 21:51:58 +03:00
impl<'a, 'tcx> ImplicitCtxt<'a, 'tcx> {
pub fn new(gcx: &'tcx GlobalCtxt<'tcx>) -> Self {
let tcx = TyCtxt { gcx };
ImplicitCtxt {
tcx,
query: None,
diagnostics: None,
layout_depth: 0,
task_deps: TaskDepsRef::Ignore,
}
}
}
/// Sets Rayon's thread-local variable, which is preserved for Rayon jobs
2018-05-31 23:04:21 +02:00
/// to `value` during the call to `f`. It is restored to its previous value after.
/// This is used to set the pointer to the new `ImplicitCtxt`.
#[cfg(parallel_compiler)]
2018-12-05 18:59:48 +01:00
#[inline]
fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
rayon_core::tlv::with(value, f)
}
/// Gets Rayon's thread-local variable, which is preserved for Rayon jobs.
/// This is used to get the pointer to the current `ImplicitCtxt`.
#[cfg(parallel_compiler)]
2018-12-05 18:59:48 +01:00
#[inline]
pub fn get_tlv() -> usize {
rayon_core::tlv::get()
}
#[cfg(not(parallel_compiler))]
thread_local! {
/// A thread local variable that stores a pointer to the current `ImplicitCtxt`.
static TLV: Cell<usize> = const { Cell::new(0) };
}
2015-09-06 21:51:58 +03:00
2018-05-31 23:04:21 +02:00
/// Sets TLV to `value` during the call to `f`.
/// It is restored to its previous value after.
/// This is used to set the pointer to the new `ImplicitCtxt`.
#[cfg(not(parallel_compiler))]
2018-12-05 18:59:48 +01:00
#[inline]
fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
let old = get_tlv();
let _reset = rustc_data_structures::OnDrop(move || TLV.with(|tlv| tlv.set(old)));
TLV.with(|tlv| tlv.set(value));
f()
}
2015-09-06 21:51:58 +03:00
/// Gets the pointer to the current `ImplicitCtxt`.
#[cfg(not(parallel_compiler))]
#[inline]
fn get_tlv() -> usize {
TLV.with(|tlv| tlv.get())
}
2015-09-06 21:51:58 +03:00
/// Sets `context` as the new current `ImplicitCtxt` for the duration of the function `f`.
2018-12-05 18:59:48 +01:00
#[inline]
pub fn enter_context<'a, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'tcx>, f: F) -> R
where
F: FnOnce(&ImplicitCtxt<'a, 'tcx>) -> R,
{
2019-12-24 17:38:22 -05:00
set_tlv(context as *const _ as usize, || f(&context))
}
/// Allows access to the current `ImplicitCtxt` in a closure if one is available.
2018-12-05 18:59:48 +01:00
#[inline]
pub fn with_context_opt<F, R>(f: F) -> R
where
F: for<'a, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'tcx>>) -> R,
{
let context = get_tlv();
if context == 0 {
2015-09-06 21:51:58 +03:00
f(None)
} else {
2021-08-22 14:46:15 +02:00
// We could get an `ImplicitCtxt` pointer from another thread.
// Ensure that `ImplicitCtxt` is `Sync`.
2019-06-14 00:48:52 +03:00
sync::assert_sync::<ImplicitCtxt<'_, '_>>();
2018-04-26 01:03:54 +02:00
2019-06-14 00:48:52 +03:00
unsafe { f(Some(&*(context as *const ImplicitCtxt<'_, '_>))) }
2015-09-06 21:51:58 +03:00
}
}
/// Allows access to the current `ImplicitCtxt`.
/// Panics if there is no `ImplicitCtxt` available.
2018-12-05 18:59:48 +01:00
#[inline]
2018-03-24 06:19:20 +01:00
pub fn with_context<F, R>(f: F) -> R
where
F: for<'a, 'tcx> FnOnce(&ImplicitCtxt<'a, 'tcx>) -> R,
2018-03-24 06:19:20 +01:00
{
with_context_opt(|opt_context| f(opt_context.expect("no ImplicitCtxt stored in tls")))
}
2020-06-28 11:43:10 +02:00
/// Allows access to the current `ImplicitCtxt` whose tcx field is the same as the tcx argument
/// passed in. This means the closure is given an `ImplicitCtxt` with the same `'tcx` lifetime
/// as the `TyCtxt` passed in.
/// This will panic if you pass it a `TyCtxt` which is different from the current
/// `ImplicitCtxt`'s `tcx` field.
2018-12-05 18:59:48 +01:00
#[inline]
2019-06-14 00:48:52 +03:00
pub fn with_related_context<'tcx, F, R>(tcx: TyCtxt<'tcx>, f: F) -> R
where
2019-06-14 00:48:52 +03:00
F: FnOnce(&ImplicitCtxt<'_, 'tcx>) -> R,
{
2019-12-24 17:38:22 -05:00
with_context(|context| unsafe {
assert!(ptr_eq(context.tcx.gcx, tcx.gcx));
let context: &ImplicitCtxt<'_, '_> = mem::transmute(context);
f(context)
})
}
/// Allows access to the `TyCtxt` in the current `ImplicitCtxt`.
/// Panics if there is no `ImplicitCtxt` available.
2018-12-05 18:59:48 +01:00
#[inline]
pub fn with<F, R>(f: F) -> R
where
2019-06-14 00:48:52 +03:00
F: for<'tcx> FnOnce(TyCtxt<'tcx>) -> R,
{
with_context(|context| f(context.tcx))
2015-09-06 21:51:58 +03:00
}
/// Allows access to the `TyCtxt` in the current `ImplicitCtxt`.
/// The closure is passed None if there is no `ImplicitCtxt` available.
2018-12-05 18:59:48 +01:00
#[inline]
pub fn with_opt<F, R>(f: F) -> R
where
2019-06-14 00:48:52 +03:00
F: for<'tcx> FnOnce(Option<TyCtxt<'tcx>>) -> R,
{
with_context_opt(|opt_context| f(opt_context.map(|context| context.tcx)))
2015-09-06 21:51:58 +03:00
}
}
macro_rules! sty_debug_print {
($fmt: expr, $ctxt: expr, $($variant: ident),*) => {{
// Curious inner module to allow variant names to be used as
2015-09-06 21:51:58 +03:00
// variable names.
#[allow(non_snake_case)]
mod inner {
2019-02-05 11:20:45 -06:00
use crate::ty::{self, TyCtxt};
use crate::ty::context::InternedInSet;
2015-09-06 21:51:58 +03:00
#[derive(Copy, Clone)]
struct DebugStat {
total: usize,
lt_infer: usize,
2015-09-06 21:51:58 +03:00
ty_infer: usize,
ct_infer: usize,
all_infer: usize,
2015-09-06 21:51:58 +03:00
}
pub fn go(fmt: &mut std::fmt::Formatter<'_>, tcx: TyCtxt<'_>) -> std::fmt::Result {
2015-09-06 21:51:58 +03:00
let mut total = DebugStat {
total: 0,
lt_infer: 0,
ty_infer: 0,
ct_infer: 0,
all_infer: 0,
2015-09-06 21:51:58 +03:00
};
$(let mut $variant = total;)*
2019-06-12 14:39:12 +02:00
let shards = tcx.interners.type_.lock_shards();
let types = shards.iter().flat_map(|shard| shard.keys());
for &InternedInSet(t) in types {
2022-01-25 14:13:38 +11:00
let variant = match t.kind {
ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) |
ty::Float(..) | ty::Str | ty::Never => continue,
ty::Error(_) => /* unimportant */ continue,
2015-09-06 21:51:58 +03:00
$(ty::$variant(..) => &mut $variant,)*
};
2022-01-25 14:13:38 +11:00
let lt = t.flags.intersects(ty::TypeFlags::HAS_RE_INFER);
let ty = t.flags.intersects(ty::TypeFlags::HAS_TY_INFER);
let ct = t.flags.intersects(ty::TypeFlags::HAS_CT_INFER);
2015-09-06 21:51:58 +03:00
variant.total += 1;
total.total += 1;
if lt { total.lt_infer += 1; variant.lt_infer += 1 }
2015-09-06 21:51:58 +03:00
if ty { total.ty_infer += 1; variant.ty_infer += 1 }
if ct { total.ct_infer += 1; variant.ct_infer += 1 }
if lt && ty && ct { total.all_infer += 1; variant.all_infer += 1 }
2015-09-06 21:51:58 +03:00
}
writeln!(fmt, "Ty interner total ty lt ct all")?;
$(writeln!(fmt, " {:18}: {uses:6} {usespc:4.1}%, \
{ty:4.1}% {lt:5.1}% {ct:4.1}% {all:4.1}%",
stringify!($variant),
uses = $variant.total,
usespc = $variant.total as f64 * 100.0 / total.total as f64,
ty = $variant.ty_infer as f64 * 100.0 / total.total as f64,
lt = $variant.lt_infer as f64 * 100.0 / total.total as f64,
ct = $variant.ct_infer as f64 * 100.0 / total.total as f64,
all = $variant.all_infer as f64 * 100.0 / total.total as f64)?;
)*
writeln!(fmt, " total {uses:6} \
{ty:4.1}% {lt:5.1}% {ct:4.1}% {all:4.1}%",
uses = total.total,
ty = total.ty_infer as f64 * 100.0 / total.total as f64,
lt = total.lt_infer as f64 * 100.0 / total.total as f64,
ct = total.ct_infer as f64 * 100.0 / total.total as f64,
all = total.all_infer as f64 * 100.0 / total.total as f64)
2015-09-06 21:51:58 +03:00
}
}
inner::go($fmt, $ctxt)
2015-09-06 21:51:58 +03:00
}}
}
2019-06-14 00:48:52 +03:00
impl<'tcx> TyCtxt<'tcx> {
pub fn debug_stats(self) -> impl std::fmt::Debug + 'tcx {
2020-07-29 11:37:33 +02:00
struct DebugStats<'tcx>(TyCtxt<'tcx>);
impl<'tcx> std::fmt::Debug for DebugStats<'tcx> {
2020-07-29 11:37:33 +02:00
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
sty_debug_print!(
fmt,
self.0,
Adt,
Array,
Slice,
RawPtr,
Ref,
FnDef,
FnPtr,
Placeholder,
Generator,
GeneratorWitness,
Dynamic,
Closure,
Tuple,
Bound,
Param,
Infer,
Projection,
Opaque,
Foreign
)?;
writeln!(fmt, "InternalSubsts interner: #{}", self.0.interners.substs.len())?;
writeln!(fmt, "Region interner: #{}", self.0.interners.region.len())?;
writeln!(fmt, "Stability interner: #{}", self.0.interners.stability.len())?;
2020-07-29 11:37:33 +02:00
writeln!(
fmt,
"Const Stability interner: #{}",
self.0.interners.const_stability.len()
2020-07-29 11:37:33 +02:00
)?;
writeln!(
fmt,
"Const Allocation interner: #{}",
self.0.interners.const_allocation.len()
)?;
writeln!(fmt, "Layout interner: #{}", self.0.interners.layout.len())?;
2020-07-29 11:37:33 +02:00
Ok(())
}
}
2020-07-29 11:37:33 +02:00
DebugStats(self)
2015-09-06 21:51:58 +03:00
}
}
// This type holds a `T` in the interner. The `T` is stored in the arena and
// this type just holds a pointer to it, but it still effectively owns it. It
// impls `Borrow` so that it can be looked up using the original
// (non-arena-memory-owning) types.
struct InternedInSet<'tcx, T: ?Sized>(&'tcx T);
2015-09-06 21:51:58 +03:00
impl<'tcx, T: 'tcx + ?Sized> Clone for InternedInSet<'tcx, T> {
fn clone(&self) -> Self {
InternedInSet(self.0)
}
}
impl<'tcx, T: 'tcx + ?Sized> Copy for InternedInSet<'tcx, T> {}
impl<'tcx, T: 'tcx + ?Sized> IntoPointer for InternedInSet<'tcx, T> {
fn into_pointer(&self) -> *const () {
self.0 as *const _ as *const ()
}
}
#[allow(rustc::usage_of_ty_tykind)]
impl<'tcx> Borrow<TyKind<'tcx>> for InternedInSet<'tcx, TyS<'tcx>> {
fn borrow<'a>(&'a self) -> &'a TyKind<'tcx> {
2022-01-25 14:13:38 +11:00
&self.0.kind
}
}
impl<'tcx> PartialEq for InternedInSet<'tcx, TyS<'tcx>> {
fn eq(&self, other: &InternedInSet<'tcx, TyS<'tcx>>) -> bool {
// The `Borrow` trait requires that `x.borrow() == y.borrow()` equals
// `x == y`.
2022-01-25 14:13:38 +11:00
self.0.kind == other.0.kind
2015-09-06 21:51:58 +03:00
}
}
impl<'tcx> Eq for InternedInSet<'tcx, TyS<'tcx>> {}
2015-09-06 21:51:58 +03:00
impl<'tcx> Hash for InternedInSet<'tcx, TyS<'tcx>> {
2015-09-06 21:51:58 +03:00
fn hash<H: Hasher>(&self, s: &mut H) {
// The `Borrow` trait requires that `x.borrow().hash(s) == x.hash(s)`.
2022-01-25 14:13:38 +11:00
self.0.kind.hash(s)
2015-09-06 21:51:58 +03:00
}
}
impl<'tcx> Borrow<Binder<'tcx, PredicateKind<'tcx>>> for InternedInSet<'tcx, PredicateS<'tcx>> {
fn borrow<'a>(&'a self) -> &'a Binder<'tcx, PredicateKind<'tcx>> {
&self.0.kind
2015-09-06 21:51:58 +03:00
}
}
impl<'tcx> PartialEq for InternedInSet<'tcx, PredicateS<'tcx>> {
fn eq(&self, other: &InternedInSet<'tcx, PredicateS<'tcx>>) -> bool {
// The `Borrow` trait requires that `x.borrow() == y.borrow()` equals
// `x == y`.
self.0.kind == other.0.kind
}
}
impl<'tcx> Eq for InternedInSet<'tcx, PredicateS<'tcx>> {}
impl<'tcx> Hash for InternedInSet<'tcx, PredicateS<'tcx>> {
fn hash<H: Hasher>(&self, s: &mut H) {
// The `Borrow` trait requires that `x.borrow().hash(s) == x.hash(s)`.
self.0.kind.hash(s)
}
}
impl<'tcx, T> Borrow<[T]> for InternedInSet<'tcx, List<T>> {
fn borrow<'a>(&'a self) -> &'a [T] {
&self.0[..]
}
}
2015-09-06 21:51:58 +03:00
impl<'tcx, T: PartialEq> PartialEq for InternedInSet<'tcx, List<T>> {
fn eq(&self, other: &InternedInSet<'tcx, List<T>>) -> bool {
// The `Borrow` trait requires that `x.borrow() == y.borrow()` equals
// `x == y`.
self.0[..] == other.0[..]
}
}
impl<'tcx, T: Eq> Eq for InternedInSet<'tcx, List<T>> {}
impl<'tcx, T: Hash> Hash for InternedInSet<'tcx, List<T>> {
fn hash<H: Hasher>(&self, s: &mut H) {
// The `Borrow` trait requires that `x.borrow().hash(s) == x.hash(s)`.
self.0[..].hash(s)
}
}
macro_rules! direct_interners {
($($name:ident: $method:ident($ty:ty): $ret_ctor:ident -> $ret_ty:ty,)+) => {
$(impl<'tcx> Borrow<$ty> for InternedInSet<'tcx, $ty> {
fn borrow<'a>(&'a self) -> &'a $ty {
&self.0
}
}
impl<'tcx> PartialEq for InternedInSet<'tcx, $ty> {
fn eq(&self, other: &Self) -> bool {
// The `Borrow` trait requires that `x.borrow() == y.borrow()`
// equals `x == y`.
self.0 == other.0
}
}
impl<'tcx> Eq for InternedInSet<'tcx, $ty> {}
impl<'tcx> Hash for InternedInSet<'tcx, $ty> {
fn hash<H: Hasher>(&self, s: &mut H) {
// The `Borrow` trait requires that `x.borrow().hash(s) ==
// x.hash(s)`.
self.0.hash(s)
}
}
impl<'tcx> TyCtxt<'tcx> {
pub fn $method(self, v: $ty) -> $ret_ty {
$ret_ctor(Interned::new_unchecked(self.interners.$name.intern(v, |v| {
InternedInSet(self.interners.arena.alloc(v))
}).0))
}
})+
}
}
direct_interners! {
region: mk_region(RegionKind): Region -> Region<'tcx>,
const_: mk_const(ConstS<'tcx>): Const -> Const<'tcx>,
}
macro_rules! direct_interners_old {
2020-05-11 22:04:22 +02:00
($($name:ident: $method:ident($ty:ty),)+) => {
$(impl<'tcx> Borrow<$ty> for InternedInSet<'tcx, $ty> {
fn borrow<'a>(&'a self) -> &'a $ty {
&self.0
}
}
impl<'tcx> PartialEq for InternedInSet<'tcx, $ty> {
fn eq(&self, other: &Self) -> bool {
// The `Borrow` trait requires that `x.borrow() == y.borrow()`
// equals `x == y`.
self.0 == other.0
}
}
impl<'tcx> Eq for InternedInSet<'tcx, $ty> {}
impl<'tcx> Hash for InternedInSet<'tcx, $ty> {
fn hash<H: Hasher>(&self, s: &mut H) {
// The `Borrow` trait requires that `x.borrow().hash(s) ==
// x.hash(s)`.
self.0.hash(s)
}
}
impl<'tcx> TyCtxt<'tcx> {
pub fn $method(self, v: $ty) -> &'tcx $ty {
self.interners.$name.intern(v, |v| {
InternedInSet(self.interners.arena.alloc(v))
}).0
}
})+
2015-09-06 21:51:58 +03:00
}
}
2015-09-06 21:51:58 +03:00
// FIXME: eventually these should all be converted to `direct_interners`.
direct_interners_old! {
const_allocation: intern_const_alloc(Allocation),
layout: intern_layout(Layout),
adt_def: intern_adt_def(AdtDef),
stability: intern_stability(attr::Stability),
const_stability: intern_const_stability(attr::ConstStability),
}
2016-10-16 21:21:25 -06:00
macro_rules! slice_interners {
2020-09-01 17:58:34 +02:00
($($field:ident: $method:ident($ty:ty)),+ $(,)?) => (
2020-10-24 11:55:00 +02:00
impl<'tcx> TyCtxt<'tcx> {
$(pub fn $method(self, v: &[$ty]) -> &'tcx List<$ty> {
self.interners.$field.intern_ref(v, || {
InternedInSet(List::from_arena(&*self.arena, v))
}).0
2020-10-24 11:55:00 +02:00
})+
}
);
2016-10-16 21:21:25 -06:00
}
2016-10-16 21:21:25 -06:00
slice_interners!(
type_list: _intern_type_list(Ty<'tcx>),
substs: _intern_substs(GenericArg<'tcx>),
2020-10-28 01:11:03 +00:00
canonical_var_infos: _intern_canonical_var_infos(CanonicalVarInfo<'tcx>),
2020-12-11 15:02:46 -05:00
poly_existential_predicates:
2020-10-05 16:51:33 -04:00
_intern_poly_existential_predicates(ty::Binder<'tcx, ExistentialPredicate<'tcx>>),
predicates: _intern_predicates(Predicate<'tcx>),
projs: _intern_projs(ProjectionKind),
2020-03-03 11:25:03 -05:00
place_elems: _intern_place_elems(PlaceElem<'tcx>),
2020-10-05 20:41:46 -04:00
bound_variable_kinds: _intern_bound_variable_kinds(ty::BoundVariableKind),
);
2019-06-14 00:48:52 +03:00
impl<'tcx> TyCtxt<'tcx> {
/// Given a `fn` type, returns an equivalent `unsafe fn` type;
/// that is, a `fn` type that is equivalent in every way for being
/// unsafe.
pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
assert_eq!(sig.unsafety(), hir::Unsafety::Normal);
2019-12-24 17:38:22 -05:00
self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig { unsafety: hir::Unsafety::Unsafe, ..sig }))
2015-09-06 21:51:58 +03:00
}
/// Given the def_id of a Trait `trait_def_id` and the name of an associated item `assoc_name`
/// returns true if the `trait_def_id` defines an associated item of name `assoc_name`.
pub fn trait_may_define_assoc_type(self, trait_def_id: DefId, assoc_name: Ident) -> bool {
self.super_traits_of(trait_def_id).any(|trait_did| {
self.associated_items(trait_did)
.find_by_name_and_kind(self, assoc_name, ty::AssocKind::Type, trait_did)
.is_some()
})
}
/// Computes the def-ids of the transitive supertraits of `trait_def_id`. This (intentionally)
/// does not compute the full elaborated super-predicates but just the set of def-ids. It is used
/// to identify which traits may define a given associated type to help avoid cycle errors.
/// Returns a `DefId` iterator.
fn super_traits_of(self, trait_def_id: DefId) -> impl Iterator<Item = DefId> + 'tcx {
let mut set = FxHashSet::default();
let mut stack = vec![trait_def_id];
set.insert(trait_def_id);
iter::from_fn(move || -> Option<DefId> {
let trait_did = stack.pop()?;
let generic_predicates = self.super_predicates_of(trait_did);
for (predicate, _) in generic_predicates.predicates {
2021-07-22 21:56:07 +08:00
if let ty::PredicateKind::Trait(data) = predicate.kind().skip_binder() {
if set.insert(data.def_id()) {
stack.push(data.def_id());
}
}
}
Some(trait_did)
})
}
/// Given a closure signature, returns an equivalent fn signature. Detuples
/// and so forth -- so e.g., if we have a sig with `Fn<(u32, i32)>` then
/// you would get a `fn(u32, i32)`.
/// `unsafety` determines the unsafety of the fn signature. If you pass
2019-04-01 00:00:43 +09:00
/// `hir::Unsafety::Unsafe` in the previous example, then you would get
/// an `unsafe fn (u32, i32)`.
/// It cannot convert a closure that requires unsafe.
pub fn signature_unclosure(
self,
sig: PolyFnSig<'tcx>,
unsafety: hir::Unsafety,
) -> PolyFnSig<'tcx> {
sig.map_bound(|s| {
2020-08-03 00:49:11 +02:00
let params_iter = match s.inputs()[0].kind() {
2019-12-24 17:38:22 -05:00
ty::Tuple(params) => params.into_iter().map(|k| k.expect_ty()),
2017-11-30 12:22:11 -03:00
_ => bug!(),
};
2019-12-24 17:38:22 -05:00
self.mk_fn_sig(params_iter, s.output(), s.c_variadic, unsafety, abi::Abi::Rust)
})
2017-11-30 12:22:11 -03:00
}
/// Same a `self.mk_region(kind)`, but avoids accessing the interners if
/// `*r == kind`.
#[inline]
pub fn reuse_or_mk_region(self, r: Region<'tcx>, kind: RegionKind) -> Region<'tcx> {
if *r == kind { r } else { self.mk_region(kind) }
}
2019-08-11 12:55:14 -04:00
#[allow(rustc::usage_of_ty_tykind)]
#[inline]
pub fn mk_ty(self, st: TyKind<'tcx>) -> Ty<'tcx> {
2019-05-31 10:23:22 +02:00
self.interners.intern_ty(st)
2015-09-06 21:51:58 +03:00
}
2020-05-11 22:06:41 +02:00
#[inline]
2020-10-05 16:51:33 -04:00
pub fn mk_predicate(self, binder: Binder<'tcx, PredicateKind<'tcx>>) -> Predicate<'tcx> {
self.interners.intern_predicate(binder)
2020-05-11 22:06:41 +02:00
}
2020-06-24 18:06:04 +02:00
#[inline]
pub fn reuse_or_mk_predicate(
self,
pred: Predicate<'tcx>,
2020-10-05 16:51:33 -04:00
binder: Binder<'tcx, PredicateKind<'tcx>>,
2020-06-24 18:06:04 +02:00
) -> Predicate<'tcx> {
2021-01-07 11:20:28 -05:00
if pred.kind() != binder { self.mk_predicate(binder) } else { pred }
2020-06-24 18:06:04 +02:00
}
pub fn mk_mach_int(self, tm: IntTy) -> Ty<'tcx> {
2015-09-06 21:51:58 +03:00
match tm {
IntTy::Isize => self.types.isize,
IntTy::I8 => self.types.i8,
IntTy::I16 => self.types.i16,
IntTy::I32 => self.types.i32,
IntTy::I64 => self.types.i64,
IntTy::I128 => self.types.i128,
2015-09-06 21:51:58 +03:00
}
}
pub fn mk_mach_uint(self, tm: UintTy) -> Ty<'tcx> {
2015-09-06 21:51:58 +03:00
match tm {
UintTy::Usize => self.types.usize,
UintTy::U8 => self.types.u8,
UintTy::U16 => self.types.u16,
UintTy::U32 => self.types.u32,
UintTy::U64 => self.types.u64,
UintTy::U128 => self.types.u128,
2015-09-06 21:51:58 +03:00
}
}
pub fn mk_mach_float(self, tm: FloatTy) -> Ty<'tcx> {
2015-09-06 21:51:58 +03:00
match tm {
FloatTy::F32 => self.types.f32,
FloatTy::F64 => self.types.f64,
2015-09-06 21:51:58 +03:00
}
}
#[inline]
pub fn mk_static_str(self) -> Ty<'tcx> {
2020-05-28 13:02:02 +02:00
self.mk_imm_ref(self.lifetimes.re_static, self.types.str_)
2015-09-06 21:51:58 +03:00
}
#[inline]
2019-02-09 22:11:53 +08:00
pub fn mk_adt(self, def: &'tcx AdtDef, substs: SubstsRef<'tcx>) -> Ty<'tcx> {
// Take a copy of substs so that we own the vectors inside.
self.mk_ty(Adt(def, substs))
2015-09-06 21:51:58 +03:00
}
#[inline]
2017-09-03 19:53:58 +01:00
pub fn mk_foreign(self, def_id: DefId) -> Ty<'tcx> {
self.mk_ty(Foreign(def_id))
2017-09-03 19:53:58 +01:00
}
fn mk_generic_adt(self, wrapper_def_id: DefId, ty_param: Ty<'tcx>) -> Ty<'tcx> {
let adt_def = self.adt_def(wrapper_def_id);
2019-12-24 17:38:22 -05:00
let substs =
InternalSubsts::for_item(self, wrapper_def_id, |param, substs| match param.kind {
GenericParamDefKind::Lifetime | GenericParamDefKind::Const { .. } => bug!(),
GenericParamDefKind::Type { has_default, .. } => {
if param.index == 0 {
ty_param.into()
} else {
assert!(has_default);
self.type_of(param.def_id).subst(self, substs).into()
2018-05-10 23:02:41 +01:00
}
}
2019-12-24 17:38:22 -05:00
});
self.mk_ty(Adt(adt_def, substs))
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> {
let def_id = self.require_lang_item(LangItem::OwnedBox, None);
self.mk_generic_adt(def_id, ty)
}
#[inline]
pub fn mk_lang_item(self, ty: Ty<'tcx>, item: LangItem) -> Option<Ty<'tcx>> {
2019-09-14 02:40:20 +08:00
let def_id = self.lang_items().require(item).ok()?;
Some(self.mk_generic_adt(def_id, ty))
}
#[inline]
pub fn mk_diagnostic_item(self, ty: Ty<'tcx>, name: Symbol) -> Option<Ty<'tcx>> {
let def_id = self.get_diagnostic_item(name)?;
Some(self.mk_generic_adt(def_id, ty))
}
#[inline]
pub fn mk_maybe_uninit(self, ty: Ty<'tcx>) -> Ty<'tcx> {
let def_id = self.require_lang_item(LangItem::MaybeUninit, None);
self.mk_generic_adt(def_id, ty)
}
#[inline]
pub fn mk_ptr(self, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
self.mk_ty(RawPtr(tm))
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_ref(self, r: Region<'tcx>, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
self.mk_ty(Ref(r, tm.ty, tm.mutbl))
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_mut_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
self.mk_ref(r, TypeAndMut { ty, mutbl: hir::Mutability::Mut })
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_imm_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
self.mk_ref(r, TypeAndMut { ty, mutbl: hir::Mutability::Not })
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_mut_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
self.mk_ptr(TypeAndMut { ty, mutbl: hir::Mutability::Mut })
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
self.mk_ptr(TypeAndMut { ty, mutbl: hir::Mutability::Not })
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> {
self.mk_ty(Array(ty, ty::Const::from_usize(self, n)))
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> {
self.mk_ty(Slice(ty))
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn intern_tup(self, ts: &[Ty<'tcx>]) -> Ty<'tcx> {
let kinds: Vec<_> = ts.iter().map(|&t| GenericArg::from(t)).collect();
2019-04-26 00:27:33 +01:00
self.mk_ty(Tuple(self.intern_substs(&kinds)))
}
pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I) -> I::Output {
2019-04-26 00:27:33 +01:00
iter.intern_with(|ts| {
let kinds: Vec<_> = ts.iter().map(|&t| GenericArg::from(t)).collect();
2019-04-26 00:27:33 +01:00
self.mk_ty(Tuple(self.intern_substs(&kinds)))
})
2015-09-06 21:51:58 +03:00
}
#[inline]
2018-09-10 11:07:13 +09:00
pub fn mk_unit(self) -> Ty<'tcx> {
self.types.unit
}
#[inline]
pub fn mk_diverging_default(self) -> Ty<'tcx> {
2019-12-24 17:38:22 -05:00
if self.features().never_type_fallback { self.types.never } else { self.types.unit }
}
#[inline]
2019-12-24 17:38:22 -05:00
pub fn mk_fn_def(self, def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> {
self.mk_ty(FnDef(def_id, substs))
}
#[inline]
pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> {
self.mk_ty(FnPtr(fty))
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_dynamic(
self,
2020-10-05 16:51:33 -04:00
obj: &'tcx List<ty::Binder<'tcx, ExistentialPredicate<'tcx>>>,
2019-12-24 17:38:22 -05:00
reg: ty::Region<'tcx>,
) -> Ty<'tcx> {
self.mk_ty(Dynamic(obj, reg))
2015-09-06 21:51:58 +03:00
}
#[inline]
2019-12-24 17:38:22 -05:00
pub fn mk_projection(self, item_def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> {
self.mk_ty(Projection(ProjectionTy { item_def_id, substs }))
}
2015-09-06 21:51:58 +03:00
#[inline]
2019-12-24 17:38:22 -05:00
pub fn mk_closure(self, closure_id: DefId, closure_substs: SubstsRef<'tcx>) -> Ty<'tcx> {
self.mk_ty(Closure(closure_id, closure_substs))
2015-09-06 21:51:58 +03:00
}
#[inline]
2019-12-24 17:38:22 -05:00
pub fn mk_generator(
self,
id: DefId,
generator_substs: SubstsRef<'tcx>,
movability: hir::Movability,
) -> Ty<'tcx> {
self.mk_ty(Generator(id, generator_substs, movability))
2016-12-26 14:34:03 +01:00
}
#[inline]
2020-10-05 16:51:33 -04:00
pub fn mk_generator_witness(self, types: ty::Binder<'tcx, &'tcx List<Ty<'tcx>>>) -> Ty<'tcx> {
self.mk_ty(GeneratorWitness(types))
}
#[inline]
pub fn mk_ty_var(self, v: TyVid) -> Ty<'tcx> {
self.mk_ty_infer(TyVar(v))
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_const_var(self, v: ConstVid<'tcx>, ty: Ty<'tcx>) -> Const<'tcx> {
self.mk_const(ty::ConstS { val: ty::ConstKind::Infer(InferConst::Var(v)), ty })
}
#[inline]
pub fn mk_int_var(self, v: IntVid) -> Ty<'tcx> {
self.mk_ty_infer(IntVar(v))
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_float_var(self, v: FloatVid) -> Ty<'tcx> {
self.mk_ty_infer(FloatVar(v))
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_ty_infer(self, it: InferTy) -> Ty<'tcx> {
self.mk_ty(Infer(it))
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_const_infer(self, ic: InferConst<'tcx>, ty: Ty<'tcx>) -> ty::Const<'tcx> {
self.mk_const(ty::ConstS { val: ty::ConstKind::Infer(ic), ty })
}
#[inline]
pub fn mk_ty_param(self, index: u32, name: Symbol) -> Ty<'tcx> {
self.mk_ty(Param(ParamTy { index, name }))
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_const_param(self, index: u32, name: Symbol, ty: Ty<'tcx>) -> Const<'tcx> {
self.mk_const(ty::ConstS { val: ty::ConstKind::Param(ParamConst { index, name }), ty })
}
pub fn mk_param_from_def(self, param: &ty::GenericParamDef) -> GenericArg<'tcx> {
2018-05-15 13:35:53 +01:00
match param.kind {
GenericParamDefKind::Lifetime => {
self.mk_region(ty::ReEarlyBound(param.to_early_bound_region_data())).into()
}
GenericParamDefKind::Type { .. } => self.mk_ty_param(param.index, param.name).into(),
GenericParamDefKind::Const { .. } => {
self.mk_const_param(param.index, param.name, self.type_of(param.def_id)).into()
}
2018-05-15 13:35:53 +01:00
}
2015-09-06 21:51:58 +03:00
}
#[inline]
2019-02-09 22:11:53 +08:00
pub fn mk_opaque(self, def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> {
self.mk_ty(Opaque(def_id, substs))
}
pub fn mk_place_field(self, place: Place<'tcx>, f: Field, ty: Ty<'tcx>) -> Place<'tcx> {
self.mk_place_elem(place, PlaceElem::Field(f, ty))
}
pub fn mk_place_deref(self, place: Place<'tcx>) -> Place<'tcx> {
self.mk_place_elem(place, PlaceElem::Deref)
}
pub fn mk_place_downcast(
self,
place: Place<'tcx>,
adt_def: &'tcx AdtDef,
variant_index: VariantIdx,
) -> Place<'tcx> {
self.mk_place_elem(
place,
PlaceElem::Downcast(Some(adt_def.variants[variant_index].name), variant_index),
)
}
pub fn mk_place_downcast_unnamed(
self,
place: Place<'tcx>,
variant_index: VariantIdx,
) -> Place<'tcx> {
self.mk_place_elem(place, PlaceElem::Downcast(None, variant_index))
}
pub fn mk_place_index(self, place: Place<'tcx>, index: Local) -> Place<'tcx> {
self.mk_place_elem(place, PlaceElem::Index(index))
}
/// This method copies `Place`'s projection, add an element and reintern it. Should not be used
/// to build a full `Place` it's just a convenient way to grab a projection and modify it in
/// flight.
pub fn mk_place_elem(self, place: Place<'tcx>, elem: PlaceElem<'tcx>) -> Place<'tcx> {
let mut projection = place.projection.to_vec();
projection.push(elem);
Place { local: place.local, projection: self.intern_place_elems(&projection) }
}
2020-12-11 15:02:46 -05:00
pub fn intern_poly_existential_predicates(
2019-12-24 17:38:22 -05:00
self,
2020-10-05 16:51:33 -04:00
eps: &[ty::Binder<'tcx, ExistentialPredicate<'tcx>>],
) -> &'tcx List<ty::Binder<'tcx, ExistentialPredicate<'tcx>>> {
assert!(!eps.is_empty());
2020-12-11 15:02:46 -05:00
assert!(
eps.array_windows()
.all(|[a, b]| a.skip_binder().stable_cmp(self, &b.skip_binder())
!= Ordering::Greater)
);
self._intern_poly_existential_predicates(eps)
}
2019-12-24 17:38:22 -05:00
pub fn intern_predicates(self, preds: &[Predicate<'tcx>]) -> &'tcx List<Predicate<'tcx>> {
// FIXME consider asking the input slice to be sorted to avoid
// re-interning permutations, in which case that would be asserted
// here.
if preds.is_empty() {
// The macro-generated method below asserts we don't intern an empty slice.
2018-08-22 00:35:01 +01:00
List::empty()
} else {
self._intern_predicates(preds)
}
}
2018-08-22 00:35:01 +01:00
pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx List<Ty<'tcx>> {
if ts.is_empty() { List::empty() } else { self._intern_type_list(ts) }
}
pub fn intern_substs(self, ts: &[GenericArg<'tcx>]) -> &'tcx List<GenericArg<'tcx>> {
if ts.is_empty() { List::empty() } else { self._intern_substs(ts) }
}
pub fn intern_projs(self, ps: &[ProjectionKind]) -> &'tcx List<ProjectionKind> {
if ps.is_empty() { List::empty() } else { self._intern_projs(ps) }
}
pub fn intern_place_elems(self, ts: &[PlaceElem<'tcx>]) -> &'tcx List<PlaceElem<'tcx>> {
if ts.is_empty() { List::empty() } else { self._intern_place_elems(ts) }
}
2020-10-28 01:11:03 +00:00
pub fn intern_canonical_var_infos(
self,
ts: &[CanonicalVarInfo<'tcx>],
) -> CanonicalVarInfos<'tcx> {
if ts.is_empty() { List::empty() } else { self._intern_canonical_var_infos(ts) }
}
2020-10-05 20:41:46 -04:00
pub fn intern_bound_variable_kinds(
self,
ts: &[ty::BoundVariableKind],
) -> &'tcx List<ty::BoundVariableKind> {
if ts.is_empty() { List::empty() } else { self._intern_bound_variable_kinds(ts) }
}
2019-12-24 17:38:22 -05:00
pub fn mk_fn_sig<I>(
self,
inputs: I,
output: I::Item,
c_variadic: bool,
unsafety: hir::Unsafety,
abi: abi::Abi,
) -> <I::Item as InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>::Output
where
I: Iterator<Item: InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>,
{
inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig {
inputs_and_output: self.intern_type_list(xs),
2019-12-24 17:38:22 -05:00
c_variadic,
unsafety,
abi,
})
}
2020-12-11 15:02:46 -05:00
pub fn mk_poly_existential_predicates<
I: InternAs<
2020-10-05 16:51:33 -04:00
[ty::Binder<'tcx, ExistentialPredicate<'tcx>>],
&'tcx List<ty::Binder<'tcx, ExistentialPredicate<'tcx>>>,
2020-12-11 15:02:46 -05:00
>,
2019-12-24 17:38:22 -05:00
>(
self,
iter: I,
) -> I::Output {
2020-12-11 15:02:46 -05:00
iter.intern_with(|xs| self.intern_poly_existential_predicates(xs))
}
2019-12-24 17:38:22 -05:00
pub fn mk_predicates<I: InternAs<[Predicate<'tcx>], &'tcx List<Predicate<'tcx>>>>(
self,
iter: I,
) -> I::Output {
iter.intern_with(|xs| self.intern_predicates(xs))
}
2019-12-24 17:38:22 -05:00
pub fn mk_type_list<I: InternAs<[Ty<'tcx>], &'tcx List<Ty<'tcx>>>>(self, iter: I) -> I::Output {
iter.intern_with(|xs| self.intern_type_list(xs))
}
2019-12-24 17:38:22 -05:00
pub fn mk_substs<I: InternAs<[GenericArg<'tcx>], &'tcx List<GenericArg<'tcx>>>>(
self,
iter: I,
) -> I::Output {
iter.intern_with(|xs| self.intern_substs(xs))
}
2019-12-24 17:38:22 -05:00
pub fn mk_place_elems<I: InternAs<[PlaceElem<'tcx>], &'tcx List<PlaceElem<'tcx>>>>(
self,
iter: I,
) -> I::Output {
2019-10-20 16:11:04 -04:00
iter.intern_with(|xs| self.intern_place_elems(xs))
}
2019-12-24 17:38:22 -05:00
pub fn mk_substs_trait(self, self_ty: Ty<'tcx>, rest: &[GenericArg<'tcx>]) -> SubstsRef<'tcx> {
self.mk_substs(iter::once(self_ty.into()).chain(rest.iter().cloned()))
}
rustc: Rearchitect lints to be emitted more eagerly In preparation for incremental compilation this commit refactors the lint handling infrastructure in the compiler to be more "eager" and overall more incremental-friendly. Many passes of the compiler can emit lints at various points but before this commit all lints were buffered in a table to be emitted at the very end of compilation. This commit changes these lints to be emitted immediately during compilation using pre-calculated lint level-related data structures. Linting today is split into two phases, one set of "early" lints run on the `syntax::ast` and a "late" set of lints run on the HIR. This commit moves the "early" lints to running as late as possible in compilation, just before HIR lowering. This notably means that we're catching resolve-related lints just before HIR lowering. The early linting remains a pass very similar to how it was before, maintaining context of the current lint level as it walks the tree. Post-HIR, however, linting is structured as a method on the `TyCtxt` which transitively executes a query to calculate lint levels. Each request to lint on a `TyCtxt` will query the entire crate's 'lint level data structure' and then go from there about whether the lint should be emitted or not. The query depends on the entire HIR crate but should be very quick to calculate (just a quick walk of the HIR) and the red-green system should notice that the lint level data structure rarely changes, and should hopefully preserve incrementality. Overall this resulted in a pretty big change to the test suite now that lints are emitted much earlier in compilation (on-demand vs only at the end). This in turn necessitated the addition of many `#![allow(warnings)]` directives throughout the compile-fail test suite and a number of updates to the UI test suite.
2017-07-26 21:51:09 -07:00
2020-10-05 20:41:46 -04:00
pub fn mk_bound_variable_kinds<
I: InternAs<[ty::BoundVariableKind], &'tcx List<ty::BoundVariableKind>>,
>(
self,
iter: I,
) -> I::Output {
iter.intern_with(|xs| self.intern_bound_variable_kinds(xs))
}
/// Walks upwards from `id` to find a node which might change lint levels with attributes.
/// It stops at `bound` and just returns it if reached.
2020-01-09 06:08:07 +01:00
pub fn maybe_lint_level_root_bounded(self, mut id: HirId, bound: HirId) -> HirId {
let hir = self.hir();
loop {
if id == bound {
return bound;
rustc: Rearchitect lints to be emitted more eagerly In preparation for incremental compilation this commit refactors the lint handling infrastructure in the compiler to be more "eager" and overall more incremental-friendly. Many passes of the compiler can emit lints at various points but before this commit all lints were buffered in a table to be emitted at the very end of compilation. This commit changes these lints to be emitted immediately during compilation using pre-calculated lint level-related data structures. Linting today is split into two phases, one set of "early" lints run on the `syntax::ast` and a "late" set of lints run on the HIR. This commit moves the "early" lints to running as late as possible in compilation, just before HIR lowering. This notably means that we're catching resolve-related lints just before HIR lowering. The early linting remains a pass very similar to how it was before, maintaining context of the current lint level as it walks the tree. Post-HIR, however, linting is structured as a method on the `TyCtxt` which transitively executes a query to calculate lint levels. Each request to lint on a `TyCtxt` will query the entire crate's 'lint level data structure' and then go from there about whether the lint should be emitted or not. The query depends on the entire HIR crate but should be very quick to calculate (just a quick walk of the HIR) and the red-green system should notice that the lint level data structure rarely changes, and should hopefully preserve incrementality. Overall this resulted in a pretty big change to the test suite now that lints are emitted much earlier in compilation (on-demand vs only at the end). This in turn necessitated the addition of many `#![allow(warnings)]` directives throughout the compile-fail test suite and a number of updates to the UI test suite.
2017-07-26 21:51:09 -07:00
}
2020-01-09 06:08:07 +01:00
if hir.attrs(id).iter().any(|attr| Level::from_symbol(attr.name_or_empty()).is_some()) {
return id;
}
2020-01-09 06:08:07 +01:00
let next = hir.get_parent_node(id);
if next == id {
bug!("lint traversal reached the root of the crate");
}
id = next;
}
}
pub fn lint_level_at_node(
self,
lint: &'static Lint,
2019-12-24 17:38:22 -05:00
mut id: hir::HirId,
) -> (Level, LintLevelSource) {
2021-05-11 12:22:11 +02:00
let sets = self.lint_levels(());
loop {
if let Some(pair) = sets.level_and_source(lint, id, self.sess) {
2019-12-24 17:38:22 -05:00
return pair;
}
let next = self.hir().get_parent_node(id);
if next == id {
bug!("lint traversal reached the root of the crate");
}
id = next;
}
rustc: Rearchitect lints to be emitted more eagerly In preparation for incremental compilation this commit refactors the lint handling infrastructure in the compiler to be more "eager" and overall more incremental-friendly. Many passes of the compiler can emit lints at various points but before this commit all lints were buffered in a table to be emitted at the very end of compilation. This commit changes these lints to be emitted immediately during compilation using pre-calculated lint level-related data structures. Linting today is split into two phases, one set of "early" lints run on the `syntax::ast` and a "late" set of lints run on the HIR. This commit moves the "early" lints to running as late as possible in compilation, just before HIR lowering. This notably means that we're catching resolve-related lints just before HIR lowering. The early linting remains a pass very similar to how it was before, maintaining context of the current lint level as it walks the tree. Post-HIR, however, linting is structured as a method on the `TyCtxt` which transitively executes a query to calculate lint levels. Each request to lint on a `TyCtxt` will query the entire crate's 'lint level data structure' and then go from there about whether the lint should be emitted or not. The query depends on the entire HIR crate but should be very quick to calculate (just a quick walk of the HIR) and the red-green system should notice that the lint level data structure rarely changes, and should hopefully preserve incrementality. Overall this resulted in a pretty big change to the test suite now that lints are emitted much earlier in compilation (on-demand vs only at the end). This in turn necessitated the addition of many `#![allow(warnings)]` directives throughout the compile-fail test suite and a number of updates to the UI test suite.
2017-07-26 21:51:09 -07:00
}
2020-01-05 10:58:44 +01:00
pub fn struct_span_lint_hir(
2019-12-24 17:38:22 -05:00
self,
lint: &'static Lint,
hir_id: HirId,
2020-01-05 10:58:44 +01:00
span: impl Into<MultiSpan>,
2020-02-02 09:47:58 +10:00
decorate: impl for<'a> FnOnce(LintDiagnosticBuilder<'a>),
) {
2019-02-26 11:48:34 +01:00
let (level, src) = self.lint_level_at_node(lint, hir_id);
struct_lint_level(self.sess, lint, level, src, Some(span.into()), decorate);
rustc: Rearchitect lints to be emitted more eagerly In preparation for incremental compilation this commit refactors the lint handling infrastructure in the compiler to be more "eager" and overall more incremental-friendly. Many passes of the compiler can emit lints at various points but before this commit all lints were buffered in a table to be emitted at the very end of compilation. This commit changes these lints to be emitted immediately during compilation using pre-calculated lint level-related data structures. Linting today is split into two phases, one set of "early" lints run on the `syntax::ast` and a "late" set of lints run on the HIR. This commit moves the "early" lints to running as late as possible in compilation, just before HIR lowering. This notably means that we're catching resolve-related lints just before HIR lowering. The early linting remains a pass very similar to how it was before, maintaining context of the current lint level as it walks the tree. Post-HIR, however, linting is structured as a method on the `TyCtxt` which transitively executes a query to calculate lint levels. Each request to lint on a `TyCtxt` will query the entire crate's 'lint level data structure' and then go from there about whether the lint should be emitted or not. The query depends on the entire HIR crate but should be very quick to calculate (just a quick walk of the HIR) and the red-green system should notice that the lint level data structure rarely changes, and should hopefully preserve incrementality. Overall this resulted in a pretty big change to the test suite now that lints are emitted much earlier in compilation (on-demand vs only at the end). This in turn necessitated the addition of many `#![allow(warnings)]` directives throughout the compile-fail test suite and a number of updates to the UI test suite.
2017-07-26 21:51:09 -07:00
}
2019-12-24 17:38:22 -05:00
pub fn struct_lint_node(
self,
lint: &'static Lint,
id: HirId,
decorate: impl for<'a> FnOnce(LintDiagnosticBuilder<'a>),
) {
rustc: Rearchitect lints to be emitted more eagerly In preparation for incremental compilation this commit refactors the lint handling infrastructure in the compiler to be more "eager" and overall more incremental-friendly. Many passes of the compiler can emit lints at various points but before this commit all lints were buffered in a table to be emitted at the very end of compilation. This commit changes these lints to be emitted immediately during compilation using pre-calculated lint level-related data structures. Linting today is split into two phases, one set of "early" lints run on the `syntax::ast` and a "late" set of lints run on the HIR. This commit moves the "early" lints to running as late as possible in compilation, just before HIR lowering. This notably means that we're catching resolve-related lints just before HIR lowering. The early linting remains a pass very similar to how it was before, maintaining context of the current lint level as it walks the tree. Post-HIR, however, linting is structured as a method on the `TyCtxt` which transitively executes a query to calculate lint levels. Each request to lint on a `TyCtxt` will query the entire crate's 'lint level data structure' and then go from there about whether the lint should be emitted or not. The query depends on the entire HIR crate but should be very quick to calculate (just a quick walk of the HIR) and the red-green system should notice that the lint level data structure rarely changes, and should hopefully preserve incrementality. Overall this resulted in a pretty big change to the test suite now that lints are emitted much earlier in compilation (on-demand vs only at the end). This in turn necessitated the addition of many `#![allow(warnings)]` directives throughout the compile-fail test suite and a number of updates to the UI test suite.
2017-07-26 21:51:09 -07:00
let (level, src) = self.lint_level_at_node(lint, id);
struct_lint_level(self.sess, lint, level, src, None, decorate);
rustc: Rearchitect lints to be emitted more eagerly In preparation for incremental compilation this commit refactors the lint handling infrastructure in the compiler to be more "eager" and overall more incremental-friendly. Many passes of the compiler can emit lints at various points but before this commit all lints were buffered in a table to be emitted at the very end of compilation. This commit changes these lints to be emitted immediately during compilation using pre-calculated lint level-related data structures. Linting today is split into two phases, one set of "early" lints run on the `syntax::ast` and a "late" set of lints run on the HIR. This commit moves the "early" lints to running as late as possible in compilation, just before HIR lowering. This notably means that we're catching resolve-related lints just before HIR lowering. The early linting remains a pass very similar to how it was before, maintaining context of the current lint level as it walks the tree. Post-HIR, however, linting is structured as a method on the `TyCtxt` which transitively executes a query to calculate lint levels. Each request to lint on a `TyCtxt` will query the entire crate's 'lint level data structure' and then go from there about whether the lint should be emitted or not. The query depends on the entire HIR crate but should be very quick to calculate (just a quick walk of the HIR) and the red-green system should notice that the lint level data structure rarely changes, and should hopefully preserve incrementality. Overall this resulted in a pretty big change to the test suite now that lints are emitted much earlier in compilation (on-demand vs only at the end). This in turn necessitated the addition of many `#![allow(warnings)]` directives throughout the compile-fail test suite and a number of updates to the UI test suite.
2017-07-26 21:51:09 -07:00
}
2021-05-11 10:38:54 +02:00
pub fn in_scope_traits(self, id: HirId) -> Option<&'tcx [TraitCandidate]> {
let map = self.in_scope_traits_map(id.owner)?;
let candidates = map.get(&id.local_id)?;
Some(&*candidates)
}
pub fn named_region(self, id: HirId) -> Option<resolve_lifetime::Region> {
debug!(?id, "named_region");
self.named_region_map(id.owner).and_then(|map| map.get(&id.local_id).cloned())
}
pub fn is_late_bound(self, id: HirId) -> bool {
self.is_late_bound_map(id.owner)
.map_or(false, |(owner, set)| owner == id.owner && set.contains(&id.local_id))
}
pub fn late_bound_vars(self, id: HirId) -> &'tcx List<ty::BoundVariableKind> {
self.mk_bound_variable_kinds(
self.late_bound_vars_map(id.owner)
.and_then(|map| map.get(&id.local_id).cloned())
.unwrap_or_else(|| {
bug!("No bound vars found for {:?} ({:?})", self.hir().node_to_string(id), id)
})
.iter(),
)
}
2021-02-18 21:01:44 +01:00
pub fn lifetime_scope(self, id: HirId) -> Option<&'tcx LifetimeScopeForPath> {
self.lifetime_scope_map(id.owner).as_ref().and_then(|map| map.get(&id.local_id))
2021-02-18 21:01:44 +01:00
}
2021-09-15 10:03:03 +00:00
/// Whether the `def_id` counts as const fn in the current crate, considering all active
/// feature gates
pub fn is_const_fn(self, def_id: DefId) -> bool {
if self.is_const_fn_raw(def_id) {
match self.lookup_const_stability(def_id) {
Some(stability) if stability.level.is_unstable() => {
// has a `rustc_const_unstable` attribute, check whether the user enabled the
// corresponding feature gate.
self.features()
.declared_lib_features
.iter()
.any(|&(sym, _)| sym == stability.feature)
2021-09-15 10:03:03 +00:00
}
// functions without const stability are either stable user written
// const fn or the user is using feature gates and we thus don't
// care what they do
_ => true,
}
} else {
false
}
}
2015-09-06 21:51:58 +03:00
}
impl<'tcx> TyCtxtAt<'tcx> {
/// Constructs a `TyKind::Error` type and registers a `delay_span_bug` to ensure it gets used.
#[track_caller]
pub fn ty_error(self) -> Ty<'tcx> {
self.tcx.ty_error_with_message(self.span, "TyKind::Error constructed but no error reported")
}
/// Constructs a `TyKind::Error` type and registers a `delay_span_bug` with the given `msg to
/// ensure it gets used.
#[track_caller]
pub fn ty_error_with_message(self, msg: &str) -> Ty<'tcx> {
self.tcx.ty_error_with_message(self.span, msg)
}
}
pub trait InternAs<T: ?Sized, R> {
type Output;
fn intern_with<F>(self, f: F) -> Self::Output
2019-12-24 17:38:22 -05:00
where
F: FnOnce(&T) -> R;
}
impl<I, T, R, E> InternAs<[T], R> for I
2019-12-24 17:38:22 -05:00
where
E: InternIteratorElement<T, R>,
I: Iterator<Item = E>,
{
type Output = E::Output;
fn intern_with<F>(self, f: F) -> Self::Output
2019-12-24 17:38:22 -05:00
where
F: FnOnce(&[T]) -> R,
{
E::intern_with(self, f)
}
}
pub trait InternIteratorElement<T, R>: Sized {
type Output;
2019-12-24 17:38:22 -05:00
fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output;
}
impl<T, R> InternIteratorElement<T, R> for T {
type Output = R;
2022-01-18 13:22:50 +11:00
fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>(
mut iter: I,
f: F,
) -> Self::Output {
// This code is hot enough that it's worth specializing for the most
// common length lists, to avoid the overhead of `SmallVec` creation.
2022-01-20 09:59:30 +11:00
// Lengths 0, 1, and 2 typically account for ~95% of cases. If
// `size_hint` is incorrect a panic will occur via an `unwrap` or an
// `assert`.
2022-01-18 13:22:50 +11:00
match iter.size_hint() {
(0, Some(0)) => {
assert!(iter.next().is_none());
f(&[])
}
(1, Some(1)) => {
let t0 = iter.next().unwrap();
assert!(iter.next().is_none());
f(&[t0])
}
(2, Some(2)) => {
let t0 = iter.next().unwrap();
let t1 = iter.next().unwrap();
assert!(iter.next().is_none());
f(&[t0, t1])
}
_ => f(&iter.collect::<SmallVec<[_; 8]>>()),
}
}
}
impl<'a, T, R> InternIteratorElement<T, R> for &'a T
2019-12-24 17:38:22 -05:00
where
T: Clone + 'a,
{
type Output = R;
2019-12-24 17:38:22 -05:00
fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2022-01-18 13:22:50 +11:00
// This code isn't hot.
f(&iter.cloned().collect::<SmallVec<[_; 8]>>())
}
}
impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> {
type Output = Result<R, E>;
2019-12-24 17:38:22 -05:00
fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>(
mut iter: I,
f: F,
) -> Self::Output {
// This code is hot enough that it's worth specializing for the most
// common length lists, to avoid the overhead of `SmallVec` creation.
2022-01-20 09:59:30 +11:00
// Lengths 0, 1, and 2 typically account for ~95% of cases. If
// `size_hint` is incorrect a panic will occur via an `unwrap` or an
// `assert`, unless a failure happens first, in which case the result
// will be an error anyway.
Ok(match iter.size_hint() {
2022-01-18 13:22:50 +11:00
(0, Some(0)) => {
assert!(iter.next().is_none());
f(&[])
}
(1, Some(1)) => {
let t0 = iter.next().unwrap()?;
assert!(iter.next().is_none());
f(&[t0])
}
(2, Some(2)) => {
let t0 = iter.next().unwrap()?;
let t1 = iter.next().unwrap()?;
assert!(iter.next().is_none());
f(&[t0, t1])
}
2019-12-24 17:38:22 -05:00
_ => f(&iter.collect::<Result<SmallVec<[_; 8]>, _>>()?),
})
}
}
2019-02-23 14:25:03 +00:00
// We are comparing types with different invariant lifetimes, so `ptr::eq`
// won't work for us.
fn ptr_eq<T, U>(t: *const T, u: *const U) -> bool {
t as *const () == u as *const ()
}
pub fn provide(providers: &mut ty::query::Providers) {
2021-04-04 14:40:35 +02:00
providers.resolutions = |tcx, ()| &tcx.untracked_resolutions;
providers.module_reexports =
|tcx, id| tcx.resolutions(()).reexport_map.get(&id).map(|v| &v[..]);
providers.crate_name = |tcx, id| {
assert_eq!(id, LOCAL_CRATE);
tcx.crate_name
};
2021-04-04 14:40:35 +02:00
providers.maybe_unused_trait_import =
|tcx, id| tcx.resolutions(()).maybe_unused_trait_imports.contains(&id);
providers.maybe_unused_extern_crates =
|tcx, ()| &tcx.resolutions(()).maybe_unused_extern_crates[..];
providers.names_imported_by_glob_use = |tcx, id| {
tcx.arena.alloc(tcx.resolutions(()).glob_map.get(&id).cloned().unwrap_or_default())
};
providers.lookup_stability = |tcx, id| tcx.stability().local_stability(id.expect_local());
providers.lookup_const_stability =
|tcx, id| tcx.stability().local_const_stability(id.expect_local());
providers.lookup_deprecation_entry =
|tcx, id| tcx.stability().local_deprecation_entry(id.expect_local());
2021-04-04 14:40:35 +02:00
providers.extern_mod_stmt_cnum =
|tcx, id| tcx.resolutions(()).extern_crate_map.get(&id).cloned();
providers.output_filenames = |tcx, ()| &tcx.output_filenames;
2021-05-11 13:50:41 +02:00
providers.features_query = |tcx, ()| tcx.sess.features_untracked();
providers.is_panic_runtime = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
tcx.sess.contains_name(tcx.hir().krate_attrs(), sym::panic_runtime)
};
providers.is_compiler_builtins = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
tcx.sess.contains_name(tcx.hir().krate_attrs(), sym::compiler_builtins)
};
2019-10-28 17:07:15 -04:00
providers.has_panic_handler = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
// We want to check if the panic handler was defined in this crate
tcx.lang_items().panic_impl().map_or(false, |did| did.is_local())
};
}