rust/src/librustc_middle/ty/context.rs

2754 lines
100 KiB
Rust
Raw Normal View History

2019-02-08 14:53:55 +01:00
//! Type context book-keeping.
2015-09-06 21:51:58 +03:00
use crate::arena::Arena;
use crate::dep_graph::{self, DepConstructor, DepGraph};
use crate::hir::exports::ExportMap;
2019-12-24 17:38:22 -05:00
use crate::ich::{NodeIdHashingMode, StableHashingContext};
2019-02-05 11:20:45 -06:00
use crate::infer::canonical::{Canonical, CanonicalVarInfo, CanonicalVarInfos};
use crate::lint::{struct_lint_level, LintDiagnosticBuilder, LintSource};
2019-12-24 17:38:22 -05:00
use crate::middle;
use crate::middle::cstore::{CrateStoreDyn, EncodedMetadata};
2019-02-05 11:20:45 -06:00
use crate::middle::resolve_lifetime::{self, ObjectLifetimeDefault};
use crate::middle::stability;
use crate::mir::interpret::{self, Allocation, ConstValue, Scalar};
use crate::mir::{Body, Field, Local, Place, PlaceElem, ProjectionKind, Promoted};
2019-02-05 11:20:45 -06:00
use crate::traits;
use crate::ty::steal::Steal;
use crate::ty::subst::{GenericArg, GenericArgKind, InternalSubsts, Subst, SubstsRef, UserSubsts};
2019-12-24 17:38:22 -05:00
use crate::ty::TyKind::*;
use crate::ty::{
self, query, AdtDef, AdtKind, BindingMode, BoundVar, CanonicalPolyFnSig, Const, ConstVid,
DefIdTree, ExistentialPredicate, FloatVar, FloatVid, GenericParamDefKind, InferConst, InferTy,
IntVar, IntVid, List, ParamConst, ParamTy, PolyFnSig, Predicate, PredicateKind, ProjectionTy,
Region, RegionKind, ReprOptions, TraitObjectVisitor, Ty, TyKind, TyS, TyVar, TyVid, TypeAndMut,
};
use rustc_ast::ast;
use rustc_ast::expand::allocator::AllocatorKind;
2020-02-02 09:47:58 +10:00
use rustc_attr as attr;
2019-12-24 05:02:53 +01:00
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::profiling::SelfProfilerRef;
use rustc_data_structures::sharded::{IntoPointer, ShardedHashMap};
use rustc_data_structures::stable_hasher::{
2019-12-24 17:38:22 -05:00
hash_stable_hashmap, HashStable, StableHasher, StableVec,
};
2020-02-09 15:32:00 +01:00
use rustc_data_structures::sync::{self, Lock, Lrc, WorkerLocal};
2020-03-31 21:18:30 +02:00
use rustc_errors::ErrorReported;
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, DefIdSet, LocalDefId, LOCAL_CRATE};
use rustc_hir::definitions::{DefPathHash, Definitions};
use rustc_hir::lang_items::{self, PanicLocationLangItem};
use rustc_hir::{HirId, ItemKind, ItemLocalId, ItemLocalMap, ItemLocalSet, Node, TraitCandidate};
use rustc_index::vec::{Idx, IndexVec};
2019-12-24 17:38:22 -05:00
use rustc_macros::HashStable;
use rustc_session::config::{BorrowckMode, CrateType, OutputFilenames};
2020-01-05 10:58:44 +01:00
use rustc_session::lint::{Level, Lint};
use rustc_session::Session;
use rustc_span::source_map::MultiSpan;
2020-01-01 19:30:57 +01:00
use rustc_span::symbol::{kw, sym, Symbol};
use rustc_span::{Span, DUMMY_SP};
use rustc_target::abi::{Layout, TargetDataLayout, VariantIdx};
2019-12-24 17:38:22 -05:00
use rustc_target::spec::abi;
2019-12-24 17:38:22 -05:00
use smallvec::SmallVec;
use std::any::Any;
2015-09-06 21:51:58 +03:00
use std::borrow::Borrow;
use std::cmp::Ordering;
use std::collections::hash_map::{self, Entry};
2018-06-27 06:01:19 -04:00
use std::fmt;
2019-12-24 17:38:22 -05:00
use std::hash::{Hash, Hasher};
use std::iter;
2019-12-24 17:38:22 -05:00
use std::mem;
use std::ops::{Bound, Deref};
use std::sync::Arc;
2015-09-06 21:51:58 +03:00
2019-06-12 14:39:12 +02:00
type InternedSet<'tcx, T> = ShardedHashMap<Interned<'tcx, T>, ()>;
pub struct CtxtInterners<'tcx> {
/// The arena that types, regions, etc. are allocated from.
2020-01-02 01:26:18 +01:00
arena: &'tcx WorkerLocal<Arena<'tcx>>,
/// Specifically use a speedy hash algorithm for these hash sets, since
/// they're accessed quite often.
type_: InternedSet<'tcx, TyS<'tcx>>,
2018-08-22 00:35:01 +01:00
type_list: InternedSet<'tcx, List<Ty<'tcx>>>,
substs: InternedSet<'tcx, InternalSubsts<'tcx>>,
2018-08-22 00:35:01 +01:00
canonical_var_infos: InternedSet<'tcx, List<CanonicalVarInfo>>,
region: InternedSet<'tcx, RegionKind>,
2018-08-22 00:35:01 +01:00
existential_predicates: InternedSet<'tcx, List<ExistentialPredicate<'tcx>>>,
2020-05-11 22:04:22 +02:00
predicate_kind: InternedSet<'tcx, PredicateKind<'tcx>>,
2018-08-22 00:35:01 +01:00
predicates: InternedSet<'tcx, List<Predicate<'tcx>>>,
projs: InternedSet<'tcx, List<ProjectionKind>>,
place_elems: InternedSet<'tcx, List<PlaceElem<'tcx>>>,
2019-03-14 10:19:31 +01:00
const_: InternedSet<'tcx, Const<'tcx>>,
2020-03-03 11:25:03 -05:00
chalk_environment_clause_list: InternedSet<'tcx, List<traits::ChalkEnvironmentClause<'tcx>>>,
}
2019-06-14 00:48:52 +03:00
impl<'tcx> CtxtInterners<'tcx> {
2020-01-02 01:26:18 +01:00
fn new(arena: &'tcx WorkerLocal<Arena<'tcx>>) -> CtxtInterners<'tcx> {
CtxtInterners {
arena,
type_: Default::default(),
type_list: Default::default(),
substs: Default::default(),
region: Default::default(),
existential_predicates: Default::default(),
canonical_var_infos: Default::default(),
2020-05-11 22:04:22 +02:00
predicate_kind: Default::default(),
predicates: Default::default(),
projs: Default::default(),
place_elems: Default::default(),
2019-03-14 10:19:31 +01:00
const_: Default::default(),
2020-03-03 11:25:03 -05:00
chalk_environment_clause_list: Default::default(),
}
}
/// Interns a type.
2019-08-11 12:55:14 -04:00
#[allow(rustc::usage_of_ty_tykind)]
#[inline(never)]
2019-12-24 17:38:22 -05:00
fn intern_ty(&self, kind: TyKind<'tcx>) -> Ty<'tcx> {
self.type_
.intern(kind, |kind| {
let flags = super::flags::FlagComputation::for_kind(&kind);
let ty_struct = TyS {
kind,
flags: flags.flags,
outer_exclusive_binder: flags.outer_exclusive_binder,
};
2018-04-30 08:59:23 +02:00
2019-12-24 17:38:22 -05:00
Interned(self.arena.alloc(ty_struct))
})
.0
2018-04-30 08:59:23 +02:00
}
}
2015-09-06 21:51:58 +03:00
pub struct CommonTypes<'tcx> {
pub unit: Ty<'tcx>,
2015-09-06 21:51:58 +03:00
pub bool: Ty<'tcx>,
pub char: Ty<'tcx>,
pub isize: Ty<'tcx>,
pub i8: Ty<'tcx>,
pub i16: Ty<'tcx>,
pub i32: Ty<'tcx>,
pub i64: Ty<'tcx>,
pub i128: Ty<'tcx>,
2015-09-06 21:51:58 +03:00
pub usize: Ty<'tcx>,
pub u8: Ty<'tcx>,
pub u16: Ty<'tcx>,
pub u32: Ty<'tcx>,
pub u64: Ty<'tcx>,
pub u128: Ty<'tcx>,
2015-09-06 21:51:58 +03:00
pub f32: Ty<'tcx>,
pub f64: Ty<'tcx>,
pub never: Ty<'tcx>,
pub self_param: Ty<'tcx>,
/// Dummy type used for the `Self` of a `TraitRef` created for converting
/// a trait object, and which gets removed in `ExistentialTraitRef`.
/// This type must not appear anywhere in other converted types.
pub trait_object_dummy_self: Ty<'tcx>,
}
pub struct CommonLifetimes<'tcx> {
2020-01-18 05:47:28 -05:00
/// `ReEmpty` in the root universe.
pub re_root_empty: Region<'tcx>,
2020-01-18 05:47:28 -05:00
/// `ReStatic`
pub re_static: Region<'tcx>,
/// Erased region, used after type-checking
pub re_erased: Region<'tcx>,
}
pub struct CommonConsts<'tcx> {
pub unit: &'tcx Const<'tcx>,
2015-09-06 21:51:58 +03:00
}
pub struct LocalTableInContext<'a, V> {
hir_owner: Option<LocalDefId>,
2019-12-24 17:38:22 -05:00
data: &'a ItemLocalMap<V>,
}
/// Validate that the given HirId (respectively its `local_id` part) can be
/// safely used as a key in the tables of a TypeckTable. For that to be
/// the case, the HirId must have the same `owner` as all the other IDs in
/// this table (signified by `hir_owner`). Otherwise the HirId
/// would be in a different frame of reference and using its `local_id`
/// would result in lookup errors, or worse, in silently wrong data being
/// stored/returned.
2019-12-24 17:38:22 -05:00
fn validate_hir_id_for_typeck_tables(
hir_owner: Option<LocalDefId>,
2019-12-24 17:38:22 -05:00
hir_id: hir::HirId,
mut_access: bool,
) {
if let Some(hir_owner) = hir_owner {
if hir_id.owner != hir_owner {
ty::tls::with(|tcx| {
2019-12-24 17:38:22 -05:00
bug!(
"node {} with HirId::owner {:?} cannot be placed in TypeckTables with hir_owner {:?}",
2019-12-24 17:38:22 -05:00
tcx.hir().node_to_string(hir_id),
hir_id.owner,
hir_owner
2019-12-24 17:38:22 -05:00
)
});
}
} else {
// We use "Null Object" TypeckTables in some of the analysis passes.
// These are just expected to be empty and their `hir_owner` is
// `None`. Therefore we cannot verify whether a given `HirId` would
// be a valid key for the given table. Instead we make sure that
// nobody tries to write to such a Null Object table.
if mut_access {
bug!("access to invalid TypeckTables")
}
}
}
impl<'a, V> LocalTableInContext<'a, V> {
pub fn contains_key(&self, id: hir::HirId) -> bool {
validate_hir_id_for_typeck_tables(self.hir_owner, id, false);
self.data.contains_key(&id.local_id)
}
pub fn get(&self, id: hir::HirId) -> Option<&V> {
validate_hir_id_for_typeck_tables(self.hir_owner, id, false);
self.data.get(&id.local_id)
}
pub fn iter(&self) -> hash_map::Iter<'_, hir::ItemLocalId, V> {
self.data.iter()
}
}
impl<'a, V> ::std::ops::Index<hir::HirId> for LocalTableInContext<'a, V> {
type Output = V;
fn index(&self, key: hir::HirId) -> &V {
self.get(key).expect("LocalTableInContext: key not found")
}
}
pub struct LocalTableInContextMut<'a, V> {
hir_owner: Option<LocalDefId>,
2019-12-24 17:38:22 -05:00
data: &'a mut ItemLocalMap<V>,
}
impl<'a, V> LocalTableInContextMut<'a, V> {
pub fn get_mut(&mut self, id: hir::HirId) -> Option<&mut V> {
validate_hir_id_for_typeck_tables(self.hir_owner, id, true);
self.data.get_mut(&id.local_id)
}
pub fn entry(&mut self, id: hir::HirId) -> Entry<'_, hir::ItemLocalId, V> {
validate_hir_id_for_typeck_tables(self.hir_owner, id, true);
self.data.entry(id.local_id)
}
pub fn insert(&mut self, id: hir::HirId, val: V) -> Option<V> {
validate_hir_id_for_typeck_tables(self.hir_owner, id, true);
self.data.insert(id.local_id, val)
}
pub fn remove(&mut self, id: hir::HirId) -> Option<V> {
validate_hir_id_for_typeck_tables(self.hir_owner, id, true);
self.data.remove(&id.local_id)
}
}
2019-08-01 00:41:54 +01:00
/// All information necessary to validate and reveal an `impl Trait`.
2018-12-03 01:14:35 +01:00
#[derive(RustcEncodable, RustcDecodable, Debug, HashStable)]
pub struct ResolvedOpaqueTy<'tcx> {
/// The revealed type as seen by this function.
pub concrete_type: Ty<'tcx>,
/// Generic parameters on the opaque type as passed by this function.
2019-08-01 00:41:54 +01:00
/// For `type Foo<A, B> = impl Bar<A, B>; fn foo<T, U>() -> Foo<T, U> { .. }`
/// this is `[T, U]`, not `[A, B]`.
2019-02-09 22:11:53 +08:00
pub substs: SubstsRef<'tcx>,
}
/// Whenever a value may be live across a generator yield, the type of that value winds up in the
/// `GeneratorInteriorTypeCause` struct. This struct adds additional information about such
/// captured types that can be useful for diagnostics. In particular, it stores the span that
/// caused a given type to be recorded, along with the scope that enclosed the value (which can
/// be used to find the await that the value is live across).
///
/// For example:
///
/// ```ignore (pseudo-Rust)
/// async move {
/// let x: T = expr;
/// foo.await
/// ...
/// }
/// ```
///
/// Here, we would store the type `T`, the span of the value `x`, the "scope-span" for
/// the scope that contains `x`, the expr `T` evaluated from, and the span of `foo.await`.
#[derive(RustcEncodable, RustcDecodable, Clone, Debug, Eq, Hash, PartialEq, HashStable)]
pub struct GeneratorInteriorTypeCause<'tcx> {
/// Type of the captured binding.
pub ty: Ty<'tcx>,
/// Span of the binding that was captured.
pub span: Span,
/// Span of the scope of the captured binding.
pub scope_span: Option<Span>,
2020-04-16 23:14:11 +08:00
/// Span of `.await` or `yield` expression.
pub yield_span: Span,
/// Expr which the type evaluated from.
pub expr: Option<hir::HirId>,
}
#[derive(RustcEncodable, RustcDecodable, Debug)]
2017-01-25 16:24:00 -05:00
pub struct TypeckTables<'tcx> {
/// The `HirId::owner` all `ItemLocalId`s in this table are relative to.
pub hir_owner: Option<LocalDefId>,
/// Resolved definitions for `<T>::X` associated paths and
/// method calls, including those of overloaded operators.
type_dependent_defs: ItemLocalMap<Result<(DefKind, DefId), ErrorReported>>,
/// Resolved field indices for field accesses in expressions (`S { field }`, `obj.field`)
/// or patterns (`S { field }`). The index is often useful by itself, but to learn more
/// about the field you also need definition of the variant to which the field
/// belongs, but it may not exist if it's a tuple field (`tuple.0`).
field_indices: ItemLocalMap<usize>,
2019-02-08 14:53:55 +01:00
/// Stores the types for various nodes in the AST. Note that this table
/// is not guaranteed to be populated until after typeck. See
2015-09-06 21:51:58 +03:00
/// typeck::check::fn_ctxt for details.
node_types: ItemLocalMap<Ty<'tcx>>,
2015-09-06 21:51:58 +03:00
/// Stores the type parameters which were substituted to obtain the type
2019-02-08 14:53:55 +01:00
/// of this node. This only applies to nodes that refer to entities
2015-09-06 21:51:58 +03:00
/// parameterized by type parameters, such as generic fns, types, or
/// other items.
2019-02-09 22:11:53 +08:00
node_substs: ItemLocalMap<SubstsRef<'tcx>>,
2015-09-06 21:51:58 +03:00
/// This will either store the canonicalized types provided by the user
/// or the substitutions that the user explicitly gave (if any) attached
/// to `id`. These will not include any inferred values. The canonical form
/// is used to capture things like `_` or other unspecified values.
///
/// For example, if the user wrote `foo.collect::<Vec<_>>()`, then the
/// canonical substitutions would include only `for<X> { Vec<X> }`.
///
/// See also `AscribeUserType` statement in MIR.
user_provided_types: ItemLocalMap<CanonicalUserType<'tcx>>,
/// Stores the canonicalized types provided by the user. See also
/// `AscribeUserType` statement in MIR.
pub user_provided_sigs: DefIdMap<CanonicalPolyFnSig<'tcx>>,
adjustments: ItemLocalMap<Vec<ty::adjustment::Adjustment<'tcx>>>,
2015-09-06 21:51:58 +03:00
/// Stores the actual binding mode for all instances of hir::BindingAnnotation.
pat_binding_modes: ItemLocalMap<BindingMode>,
/// Stores the types which were implicitly dereferenced in pattern binding modes
/// for later usage in HAIR lowering. For example,
///
/// ```
/// match &&Some(5i32) {
/// Some(n) => {},
/// _ => {},
/// }
/// ```
/// leads to a `vec![&&Option<i32>, &Option<i32>]`. Empty vectors are not stored.
///
/// See:
/// https://github.com/rust-lang/rfcs/blob/master/text/2005-match-ergonomics.md#definitions
pat_adjustments: ItemLocalMap<Vec<Ty<'tcx>>>,
2015-09-06 21:51:58 +03:00
/// Borrows
pub upvar_capture_map: ty::UpvarCaptureMap<'tcx>,
2015-09-06 21:51:58 +03:00
/// Records the reasons that we picked the kind of each closure;
/// not all closures are present in the map.
2020-04-19 13:00:18 +02:00
closure_kind_origins: ItemLocalMap<(Span, Symbol)>,
2016-12-26 14:34:03 +01:00
/// For each fn, records the "liberated" types of its arguments
/// and return type. Liberated means that all bound regions
/// (including late-bound regions) are replaced with free
2018-05-08 16:10:16 +03:00
/// equivalents. This table is not used in codegen (since regions
/// are erased there) and hence is not serialized to metadata.
liberated_fn_sigs: ItemLocalMap<ty::FnSig<'tcx>>,
/// For each FRU expression, record the normalized types of the fields
/// of the struct - this is needed because it is non-trivial to
/// normalize while preserving regions. This table is used only in
/// MIR construction and hence is not serialized to metadata.
fru_field_types: ItemLocalMap<Vec<Ty<'tcx>>>,
2019-02-23 16:11:34 +05:30
/// For every coercion cast we add the HIR node ID of the cast
/// expression to this set.
coercion_casts: ItemLocalSet,
/// Set of trait imports actually used in the method resolution.
2017-10-16 23:41:51 -07:00
/// This is used for warning unused imports. During type
2018-02-27 17:11:14 +01:00
/// checking, this `Lrc` should not be cloned: it must have a ref-count
2017-10-19 23:06:22 -07:00
/// of 1 so that we can insert things into the set mutably.
2018-02-27 17:11:14 +01:00
pub used_trait_imports: Lrc<DefIdSet>,
/// If any errors occurred while type-checking this body,
/// this field will be set to `Some(ErrorReported)`.
pub tainted_by_errors: Option<ErrorReported>,
2019-08-01 00:41:54 +01:00
/// All the opaque types that are restricted to concrete types
2019-08-02 01:14:42 +01:00
/// by this function.
2019-08-01 00:41:54 +01:00
pub concrete_opaque_types: FxHashMap<DefId, ResolvedOpaqueTy<'tcx>>,
/// Given the closure ID this map provides the list of UpvarIDs used by it.
/// The upvarID contains the HIR node ID and it also contains the full path
/// leading to the member of the struct or tuple that is used instead of the
/// entire variable.
pub closure_captures: ty::UpvarListMap,
2020-01-14 21:22:43 +08:00
/// Stores the type, expression, span and optional scope span of all types
/// that are live across the yield of this generator (if a generator).
pub generator_interior_types: Vec<GeneratorInteriorTypeCause<'tcx>>,
2015-09-06 21:51:58 +03:00
}
2017-01-25 16:24:00 -05:00
impl<'tcx> TypeckTables<'tcx> {
pub fn empty(hir_owner: Option<LocalDefId>) -> TypeckTables<'tcx> {
2017-01-25 16:24:00 -05:00
TypeckTables {
hir_owner,
type_dependent_defs: Default::default(),
field_indices: Default::default(),
user_provided_types: Default::default(),
user_provided_sigs: Default::default(),
node_types: Default::default(),
node_substs: Default::default(),
adjustments: Default::default(),
pat_binding_modes: Default::default(),
pat_adjustments: Default::default(),
upvar_capture_map: Default::default(),
closure_kind_origins: Default::default(),
liberated_fn_sigs: Default::default(),
fru_field_types: Default::default(),
2019-02-23 16:11:34 +05:30
coercion_casts: Default::default(),
used_trait_imports: Lrc::new(Default::default()),
tainted_by_errors: None,
2019-08-01 00:41:54 +01:00
concrete_opaque_types: Default::default(),
closure_captures: Default::default(),
generator_interior_types: Default::default(),
2015-09-06 21:51:58 +03:00
}
}
/// Returns the final resolution of a `QPath` in an `Expr` or `Pat` node.
2019-11-30 17:46:46 +01:00
pub fn qpath_res(&self, qpath: &hir::QPath<'_>, id: hir::HirId) -> Res {
match *qpath {
hir::QPath::Resolved(_, ref path) => path.res,
2019-12-24 17:38:22 -05:00
hir::QPath::TypeRelative(..) => self
.type_dependent_def(id)
.map_or(Res::Err, |(kind, def_id)| Res::Def(kind, def_id)),
}
}
pub fn type_dependent_defs(
&self,
) -> LocalTableInContext<'_, Result<(DefKind, DefId), ErrorReported>> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.type_dependent_defs }
}
pub fn type_dependent_def(&self, id: HirId) -> Option<(DefKind, DefId)> {
validate_hir_id_for_typeck_tables(self.hir_owner, id, false);
self.type_dependent_defs.get(&id.local_id).cloned().and_then(|r| r.ok())
}
pub fn type_dependent_def_id(&self, id: HirId) -> Option<DefId> {
self.type_dependent_def(id).map(|(_, def_id)| def_id)
}
pub fn type_dependent_defs_mut(
&mut self,
) -> LocalTableInContextMut<'_, Result<(DefKind, DefId), ErrorReported>> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.type_dependent_defs }
}
pub fn field_indices(&self) -> LocalTableInContext<'_, usize> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.field_indices }
}
pub fn field_indices_mut(&mut self) -> LocalTableInContextMut<'_, usize> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.field_indices }
}
2019-12-24 17:38:22 -05:00
pub fn user_provided_types(&self) -> LocalTableInContext<'_, CanonicalUserType<'tcx>> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.user_provided_types }
}
pub fn user_provided_types_mut(
2019-12-24 17:38:22 -05:00
&mut self,
) -> LocalTableInContextMut<'_, CanonicalUserType<'tcx>> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.user_provided_types }
}
pub fn node_types(&self) -> LocalTableInContext<'_, Ty<'tcx>> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.node_types }
}
pub fn node_types_mut(&mut self) -> LocalTableInContextMut<'_, Ty<'tcx>> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.node_types }
}
2019-02-04 09:38:11 +01:00
pub fn node_type(&self, id: hir::HirId) -> Ty<'tcx> {
2019-12-24 17:38:22 -05:00
self.node_type_opt(id).unwrap_or_else(|| {
bug!("node_type: no type for node `{}`", tls::with(|tcx| tcx.hir().node_to_string(id)))
})
}
2019-02-04 09:38:11 +01:00
pub fn node_type_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> {
validate_hir_id_for_typeck_tables(self.hir_owner, id, false);
self.node_types.get(&id.local_id).cloned()
}
2019-02-09 22:11:53 +08:00
pub fn node_substs_mut(&mut self) -> LocalTableInContextMut<'_, SubstsRef<'tcx>> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.node_substs }
}
2019-02-09 22:11:53 +08:00
pub fn node_substs(&self, id: hir::HirId) -> SubstsRef<'tcx> {
validate_hir_id_for_typeck_tables(self.hir_owner, id, false);
self.node_substs.get(&id.local_id).cloned().unwrap_or_else(|| InternalSubsts::empty())
}
2019-02-09 22:11:53 +08:00
pub fn node_substs_opt(&self, id: hir::HirId) -> Option<SubstsRef<'tcx>> {
validate_hir_id_for_typeck_tables(self.hir_owner, id, false);
self.node_substs.get(&id.local_id).cloned()
}
// Returns the type of a pattern as a monotype. Like @expr_ty, this function
// doesn't provide type parameter substitutions.
2019-11-29 13:43:03 +01:00
pub fn pat_ty(&self, pat: &hir::Pat<'_>) -> Ty<'tcx> {
2019-02-04 09:38:11 +01:00
self.node_type(pat.hir_id)
}
2019-11-29 13:43:03 +01:00
pub fn pat_ty_opt(&self, pat: &hir::Pat<'_>) -> Option<Ty<'tcx>> {
2019-02-04 09:38:11 +01:00
self.node_type_opt(pat.hir_id)
}
// Returns the type of an expression as a monotype.
//
// NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
// some cases, we insert `Adjustment` annotations such as auto-deref or
// auto-ref. The type returned by this function does not consider such
// adjustments. See `expr_ty_adjusted()` instead.
//
// NB (2): This type doesn't provide type parameter substitutions; e.g., if you
// ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize"
// instead of "fn(ty) -> T with T = isize".
2019-11-29 13:43:03 +01:00
pub fn expr_ty(&self, expr: &hir::Expr<'_>) -> Ty<'tcx> {
2019-02-04 09:38:11 +01:00
self.node_type(expr.hir_id)
}
2019-11-29 13:43:03 +01:00
pub fn expr_ty_opt(&self, expr: &hir::Expr<'_>) -> Option<Ty<'tcx>> {
2019-02-04 09:38:11 +01:00
self.node_type_opt(expr.hir_id)
}
pub fn adjustments(&self) -> LocalTableInContext<'_, Vec<ty::adjustment::Adjustment<'tcx>>> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.adjustments }
}
2019-12-24 17:38:22 -05:00
pub fn adjustments_mut(
&mut self,
) -> LocalTableInContextMut<'_, Vec<ty::adjustment::Adjustment<'tcx>>> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.adjustments }
}
2019-11-29 13:43:03 +01:00
pub fn expr_adjustments(&self, expr: &hir::Expr<'_>) -> &[ty::adjustment::Adjustment<'tcx>] {
validate_hir_id_for_typeck_tables(self.hir_owner, expr.hir_id, false);
self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..])
}
/// Returns the type of `expr`, considering any `Adjustment`
/// entry recorded for that expression.
2019-11-29 13:43:03 +01:00
pub fn expr_ty_adjusted(&self, expr: &hir::Expr<'_>) -> Ty<'tcx> {
2019-12-24 17:38:22 -05:00
self.expr_adjustments(expr).last().map_or_else(|| self.expr_ty(expr), |adj| adj.target)
}
2019-11-29 13:43:03 +01:00
pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr<'_>) -> Option<Ty<'tcx>> {
2019-12-24 17:38:22 -05:00
self.expr_adjustments(expr).last().map(|adj| adj.target).or_else(|| self.expr_ty_opt(expr))
}
2019-11-29 13:43:03 +01:00
pub fn is_method_call(&self, expr: &hir::Expr<'_>) -> bool {
// Only paths and method calls/overloaded operators have
// entries in type_dependent_defs, ignore the former here.
if let hir::ExprKind::Path(_) = expr.kind {
return false;
}
match self.type_dependent_defs().get(expr.hir_id) {
2020-03-03 12:29:07 -06:00
Some(Ok((DefKind::AssocFn, _))) => true,
2019-12-24 17:38:22 -05:00
_ => false,
}
}
2019-12-14 23:18:39 +01:00
pub fn extract_binding_mode(&self, s: &Session, id: HirId, sp: Span) -> Option<BindingMode> {
self.pat_binding_modes().get(id).copied().or_else(|| {
s.delay_span_bug(sp, "missing binding mode");
None
})
}
pub fn pat_binding_modes(&self) -> LocalTableInContext<'_, BindingMode> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.pat_binding_modes }
}
2019-12-24 17:38:22 -05:00
pub fn pat_binding_modes_mut(&mut self) -> LocalTableInContextMut<'_, BindingMode> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.pat_binding_modes }
}
pub fn pat_adjustments(&self) -> LocalTableInContext<'_, Vec<Ty<'tcx>>> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.pat_adjustments }
}
2019-12-24 17:38:22 -05:00
pub fn pat_adjustments_mut(&mut self) -> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.pat_adjustments }
}
pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> ty::UpvarCapture<'tcx> {
self.upvar_capture_map[&upvar_id]
}
2020-04-19 13:00:18 +02:00
pub fn closure_kind_origins(&self) -> LocalTableInContext<'_, (Span, Symbol)> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.closure_kind_origins }
}
2020-04-19 13:00:18 +02:00
pub fn closure_kind_origins_mut(&mut self) -> LocalTableInContextMut<'_, (Span, Symbol)> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.closure_kind_origins }
}
pub fn liberated_fn_sigs(&self) -> LocalTableInContext<'_, ty::FnSig<'tcx>> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.liberated_fn_sigs }
}
pub fn liberated_fn_sigs_mut(&mut self) -> LocalTableInContextMut<'_, ty::FnSig<'tcx>> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.liberated_fn_sigs }
}
pub fn fru_field_types(&self) -> LocalTableInContext<'_, Vec<Ty<'tcx>>> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.fru_field_types }
}
pub fn fru_field_types_mut(&mut self) -> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.fru_field_types }
}
2019-02-23 16:11:34 +05:30
pub fn is_coercion_cast(&self, hir_id: hir::HirId) -> bool {
validate_hir_id_for_typeck_tables(self.hir_owner, hir_id, true);
2019-02-23 16:11:34 +05:30
self.coercion_casts.contains(&hir_id.local_id)
}
2019-02-23 16:11:34 +05:30
pub fn set_coercion_cast(&mut self, id: ItemLocalId) {
self.coercion_casts.insert(id);
}
pub fn coercion_casts(&self) -> &ItemLocalSet {
&self.coercion_casts
}
}
2019-06-14 00:48:52 +03:00
impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for TypeckTables<'tcx> {
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
let ty::TypeckTables {
hir_owner,
ref type_dependent_defs,
ref field_indices,
ref user_provided_types,
ref user_provided_sigs,
ref node_types,
ref node_substs,
ref adjustments,
ref pat_binding_modes,
ref pat_adjustments,
ref upvar_capture_map,
ref closure_kind_origins,
ref liberated_fn_sigs,
ref fru_field_types,
2019-02-23 16:11:34 +05:30
ref coercion_casts,
ref used_trait_imports,
tainted_by_errors,
2019-08-01 00:41:54 +01:00
ref concrete_opaque_types,
ref closure_captures,
ref generator_interior_types,
} = *self;
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
type_dependent_defs.hash_stable(hcx, hasher);
field_indices.hash_stable(hcx, hasher);
user_provided_types.hash_stable(hcx, hasher);
user_provided_sigs.hash_stable(hcx, hasher);
node_types.hash_stable(hcx, hasher);
node_substs.hash_stable(hcx, hasher);
adjustments.hash_stable(hcx, hasher);
pat_binding_modes.hash_stable(hcx, hasher);
pat_adjustments.hash_stable(hcx, hasher);
hash_stable_hashmap(hcx, hasher, upvar_capture_map, |up_var_id, hcx| {
2019-12-24 17:38:22 -05:00
let ty::UpvarId { var_path, closure_expr_id } = *up_var_id;
assert_eq!(Some(var_path.hir_id.owner), hir_owner);
2019-12-24 17:38:22 -05:00
(
hcx.local_def_path_hash(var_path.hir_id.owner),
2019-12-24 17:38:22 -05:00
var_path.hir_id.local_id,
hcx.local_def_path_hash(closure_expr_id),
2019-12-24 17:38:22 -05:00
)
});
closure_kind_origins.hash_stable(hcx, hasher);
liberated_fn_sigs.hash_stable(hcx, hasher);
fru_field_types.hash_stable(hcx, hasher);
2019-02-23 16:11:34 +05:30
coercion_casts.hash_stable(hcx, hasher);
used_trait_imports.hash_stable(hcx, hasher);
tainted_by_errors.hash_stable(hcx, hasher);
2019-08-01 00:41:54 +01:00
concrete_opaque_types.hash_stable(hcx, hasher);
closure_captures.hash_stable(hcx, hasher);
generator_interior_types.hash_stable(hcx, hasher);
})
}
2015-09-06 21:51:58 +03:00
}
rustc_index::newtype_index! {
pub struct UserTypeAnnotationIndex {
2018-12-03 01:14:35 +01:00
derive [HashStable]
2019-01-06 16:01:45 +00:00
DEBUG_FORMAT = "UserType({})",
const START_INDEX = 0,
}
}
/// Mapping of type annotation indices to canonical user type annotations.
pub type CanonicalUserTypeAnnotations<'tcx> =
2019-01-06 17:10:53 +00:00
IndexVec<UserTypeAnnotationIndex, CanonicalUserTypeAnnotation<'tcx>>;
2019-11-15 18:19:52 +01:00
#[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable, TypeFoldable, Lift)]
2019-01-06 17:10:53 +00:00
pub struct CanonicalUserTypeAnnotation<'tcx> {
pub user_ty: CanonicalUserType<'tcx>,
pub span: Span,
pub inferred_ty: Ty<'tcx>,
2019-01-06 17:10:53 +00:00
}
/// Canonicalized user type annotation.
2019-06-14 00:48:52 +03:00
pub type CanonicalUserType<'tcx> = Canonical<'tcx, UserType<'tcx>>;
2019-06-14 00:48:52 +03:00
impl CanonicalUserType<'tcx> {
/// Returns `true` if this represents a substitution of the form `[?0, ?1, ?2]`,
2019-02-08 14:53:55 +01:00
/// i.e., each thing is mapped to a canonical variable with the same index.
pub fn is_identity(&self) -> bool {
match self.value {
2019-01-06 16:01:45 +00:00
UserType::Ty(_) => false,
UserType::TypeOf(_, user_substs) => {
if user_substs.user_self_ty.is_some() {
return false;
}
user_substs.substs.iter().zip(BoundVar::new(0)..).all(|(kind, cvar)| {
match kind.unpack() {
GenericArgKind::Type(ty) => match ty.kind {
ty::Bound(debruijn, b) => {
// We only allow a `ty::INNERMOST` index in substitutions.
assert_eq!(debruijn, ty::INNERMOST);
cvar == b.var
}
_ => false,
},
GenericArgKind::Lifetime(r) => match r {
ty::ReLateBound(debruijn, br) => {
// We only allow a `ty::INNERMOST` index in substitutions.
assert_eq!(*debruijn, ty::INNERMOST);
cvar == br.assert_bound_var()
}
_ => false,
},
GenericArgKind::Const(ct) => match ct.val {
ty::ConstKind::Bound(debruijn, b) => {
// We only allow a `ty::INNERMOST` index in substitutions.
2019-03-14 10:19:31 +01:00
assert_eq!(debruijn, ty::INNERMOST);
cvar == b
}
_ => false,
},
}
})
2019-12-24 17:38:22 -05:00
}
}
}
}
2019-02-08 14:53:55 +01:00
/// A user-given type annotation attached to a constant. These arise
/// from constants that are named via paths, like `Foo::<A>::new` and
/// so forth.
2019-11-15 18:30:20 +01:00
#[derive(Copy, Clone, Debug, PartialEq, RustcEncodable, RustcDecodable)]
#[derive(HashStable, TypeFoldable, Lift)]
2019-01-06 16:01:45 +00:00
pub enum UserType<'tcx> {
Ty(Ty<'tcx>),
/// The canonical type is the result of `type_of(def_id)` with the
/// given substitutions applied.
TypeOf(DefId, UserSubsts<'tcx>),
}
2015-09-06 21:51:58 +03:00
impl<'tcx> CommonTypes<'tcx> {
fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> {
2019-09-16 19:11:57 +01:00
let mk = |ty| interners.intern_ty(ty);
2015-09-06 21:51:58 +03:00
CommonTypes {
unit: mk(Tuple(List::empty())),
bool: mk(Bool),
char: mk(Char),
never: mk(Never),
isize: mk(Int(ast::IntTy::Isize)),
i8: mk(Int(ast::IntTy::I8)),
i16: mk(Int(ast::IntTy::I16)),
i32: mk(Int(ast::IntTy::I32)),
i64: mk(Int(ast::IntTy::I64)),
i128: mk(Int(ast::IntTy::I128)),
usize: mk(Uint(ast::UintTy::Usize)),
u8: mk(Uint(ast::UintTy::U8)),
u16: mk(Uint(ast::UintTy::U16)),
u32: mk(Uint(ast::UintTy::U32)),
u64: mk(Uint(ast::UintTy::U64)),
u128: mk(Uint(ast::UintTy::U128)),
f32: mk(Float(ast::FloatTy::F32)),
f64: mk(Float(ast::FloatTy::F64)),
2019-12-24 17:38:22 -05:00
self_param: mk(ty::Param(ty::ParamTy { index: 0, name: kw::SelfUpper })),
trait_object_dummy_self: mk(Infer(ty::FreshTy(0))),
}
}
}
impl<'tcx> CommonLifetimes<'tcx> {
fn new(interners: &CtxtInterners<'tcx>) -> CommonLifetimes<'tcx> {
2019-12-24 17:38:22 -05:00
let mk = |r| interners.region.intern(r, |r| Interned(interners.arena.alloc(r))).0;
CommonLifetimes {
re_root_empty: mk(RegionKind::ReEmpty(ty::UniverseIndex::ROOT)),
re_static: mk(RegionKind::ReStatic),
re_erased: mk(RegionKind::ReErased),
2015-09-06 21:51:58 +03:00
}
}
}
impl<'tcx> CommonConsts<'tcx> {
fn new(interners: &CtxtInterners<'tcx>, types: &CommonTypes<'tcx>) -> CommonConsts<'tcx> {
2019-12-24 17:38:22 -05:00
let mk_const = |c| interners.const_.intern(c, |c| Interned(interners.arena.alloc(c))).0;
CommonConsts {
unit: mk_const(ty::Const {
val: ty::ConstKind::Value(ConstValue::Scalar(Scalar::zst())),
ty: types.unit,
}),
}
}
}
// This struct contains information regarding the `ReFree(FreeRegion)` corresponding to a lifetime
// conflict.
#[derive(Debug)]
pub struct FreeRegionInfo {
// def id corresponding to FreeRegion
pub def_id: DefId,
// the bound region corresponding to FreeRegion
pub boundregion: ty::BoundRegion,
// checks if bound region is in Impl Item
pub is_impl_item: bool,
}
2017-09-15 16:19:44 -04:00
/// The central data structure of the compiler. It stores references
/// to the various **arenas** and also houses the results of the
2017-12-31 17:08:04 +01:00
/// various **compiler queries** that have been performed. See the
2020-03-05 18:07:42 -03:00
/// [rustc dev guide] for more details.
2017-12-31 17:08:04 +01:00
///
/// [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/ty.html
#[derive(Copy, Clone)]
2019-09-25 08:42:46 -04:00
#[rustc_diagnostic_item = "TyCtxt"]
2019-06-14 00:48:52 +03:00
pub struct TyCtxt<'tcx> {
gcx: &'tcx GlobalCtxt<'tcx>,
}
2019-06-14 00:48:52 +03:00
impl<'tcx> Deref for TyCtxt<'tcx> {
type Target = &'tcx GlobalCtxt<'tcx>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.gcx
}
}
pub struct GlobalCtxt<'tcx> {
2019-11-26 23:16:48 +01:00
pub arena: &'tcx WorkerLocal<Arena<'tcx>>,
2019-05-31 10:23:22 +02:00
interners: CtxtInterners<'tcx>,
2015-09-06 21:51:58 +03:00
2020-02-09 15:32:00 +01:00
pub(crate) cstore: Box<CrateStoreDyn>,
pub sess: &'tcx Session,
2020-01-09 03:45:42 +01:00
/// This only ever stores a `LintStore` but we don't want a dependency on that type here.
///
/// FIXME(Centril): consider `dyn LintStoreMarker` once
/// we can upcast to `Any` for some additional type safety.
2020-01-09 09:40:55 +01:00
pub lint_store: Lrc<dyn Any + sync::Sync + sync::Send>,
2016-01-05 13:07:45 -05:00
pub dep_graph: DepGraph,
pub prof: SelfProfilerRef,
2015-09-06 21:51:58 +03:00
/// Common types, pre-interned for your convenience.
pub types: CommonTypes<'tcx>,
/// Common lifetimes, pre-interned for your convenience.
pub lifetimes: CommonLifetimes<'tcx>,
/// Common consts, pre-interned for your convenience.
pub consts: CommonConsts<'tcx>,
/// Resolutions of `extern crate` items produced by resolver.
extern_crate_map: FxHashMap<LocalDefId, CrateNum>,
/// Map indicating what traits are in scope for places where this
/// is relevant; generated by resolve.
trait_map: FxHashMap<LocalDefId, FxHashMap<ItemLocalId, StableVec<TraitCandidate>>>,
2017-03-23 14:18:25 -04:00
/// Export map produced by name resolution.
export_map: ExportMap<LocalDefId>,
2017-03-23 14:18:25 -04:00
2020-02-07 13:13:35 +01:00
pub(crate) untracked_crate: &'tcx hir::Crate<'tcx>,
pub(crate) definitions: &'tcx Definitions,
/// A map from `DefPathHash` -> `DefId`. Includes `DefId`s from the local crate
/// as well as all upstream crates. Only populated in incremental mode.
pub def_path_hash_to_def_id: Option<FxHashMap<DefPathHash, DefId>>,
pub queries: query::Queries<'tcx>,
2016-10-28 13:55:49 +03:00
maybe_unused_trait_imports: FxHashSet<LocalDefId>,
maybe_unused_extern_crates: Vec<(LocalDefId, Span)>,
/// A map of glob use to a set of names it actually imports. Currently only
/// used in save-analysis.
2020-04-19 13:00:18 +02:00
glob_map: FxHashMap<LocalDefId, FxHashSet<Symbol>>,
2018-10-22 22:54:18 +03:00
/// Extern prelude entries. The value is `true` if the entry was introduced
/// via `extern crate` item and not `--extern` option or compiler built-in.
2020-04-19 13:00:18 +02:00
pub extern_prelude: FxHashMap<Symbol, bool>,
2017-06-24 17:48:27 +09:00
// Internal cache for metadata decoding. No need to track deps on this.
2018-02-15 10:52:26 +01:00
pub rcache: Lock<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>,
2015-09-06 21:51:58 +03:00
/// Caches the results of trait selection. This cache is used
/// for things that do not have to do with the parameters in scope.
pub selection_cache: traits::SelectionCache<'tcx>,
2015-10-21 14:50:38 +03:00
/// Caches the results of trait evaluation. This cache is used
/// for things that do not have to do with the parameters in scope.
/// Merge this with `selection_cache`?
pub evaluation_cache: traits::EvaluationCache<'tcx>,
/// The definite name of the current crate after taking into account
/// attributes, commandline parameters, etc.
pub crate_name: Symbol,
2016-04-18 16:03:16 +03:00
/// Data layout specification for the current target.
pub data_layout: TargetDataLayout,
/// `#[stable]` and `#[unstable]` attributes
2019-06-12 14:39:12 +02:00
stability_interner: ShardedHashMap<&'tcx attr::Stability, ()>,
/// `#[rustc_const_stable]` and `#[rustc_const_unstable]` attributes
const_stability_interner: ShardedHashMap<&'tcx attr::ConstStability, ()>,
/// Stores the value of constants (and deduplicates the actual memory)
2019-06-12 14:39:12 +02:00
allocation_interner: ShardedHashMap<&'tcx Allocation, ()>,
/// Stores memory for globals (statics/consts).
2020-04-24 12:53:18 +02:00
pub(crate) alloc_map: Lock<interpret::AllocMap<'tcx>>,
2017-12-06 09:25:29 +01:00
2020-03-04 14:13:00 +00:00
layout_interner: ShardedHashMap<&'tcx Layout, ()>,
output_filenames: Arc<OutputFilenames>,
2015-09-06 21:51:58 +03:00
}
2019-06-14 00:48:52 +03:00
impl<'tcx> TyCtxt<'tcx> {
2020-03-27 20:26:20 +01:00
pub fn alloc_steal_mir(self, mir: Body<'tcx>) -> Steal<Body<'tcx>> {
Steal::new(mir)
2015-09-06 21:51:58 +03:00
}
2019-12-24 17:38:22 -05:00
pub fn alloc_steal_promoted(
self,
2020-04-12 10:31:00 -07:00
promoted: IndexVec<Promoted, Body<'tcx>>,
2020-03-27 20:26:20 +01:00
) -> Steal<IndexVec<Promoted, Body<'tcx>>> {
Steal::new(promoted)
2019-08-04 16:20:21 -04:00
}
pub fn alloc_adt_def(
self,
did: DefId,
kind: AdtKind,
variants: IndexVec<VariantIdx, ty::VariantDef>,
repr: ReprOptions,
2020-04-03 09:49:21 +02:00
) -> &'tcx ty::AdtDef {
self.arena.alloc(ty::AdtDef::new(self, did, kind, variants, repr))
2015-09-06 21:51:58 +03:00
}
2019-06-14 00:48:52 +03:00
pub fn intern_const_alloc(self, alloc: Allocation) -> &'tcx Allocation {
2019-12-24 17:38:22 -05:00
self.allocation_interner.intern(alloc, |alloc| self.arena.alloc(alloc))
2017-12-06 09:25:29 +01:00
}
/// Allocates a read-only byte or string literal for `mir::interpret`.
2018-05-01 12:18:53 +02:00
pub fn allocate_bytes(self, bytes: &[u8]) -> interpret::AllocId {
// Create an allocation that just contains these bytes.
let alloc = interpret::Allocation::from_byte_aligned_bytes(bytes);
2017-12-06 09:25:29 +01:00
let alloc = self.intern_const_alloc(alloc);
2020-04-24 12:53:18 +02:00
self.create_memory_alloc(alloc)
2017-12-06 09:25:29 +01:00
}
2019-06-14 00:48:52 +03:00
pub fn intern_stability(self, stab: attr::Stability) -> &'tcx attr::Stability {
2019-12-24 17:38:22 -05:00
self.stability_interner.intern(stab, |stab| self.arena.alloc(stab))
2015-09-06 21:51:58 +03:00
}
pub fn intern_const_stability(self, stab: attr::ConstStability) -> &'tcx attr::ConstStability {
2019-12-24 17:38:22 -05:00
self.const_stability_interner.intern(stab, |stab| self.arena.alloc(stab))
}
2020-03-04 14:13:00 +00:00
pub fn intern_layout(self, layout: Layout) -> &'tcx Layout {
2019-12-24 17:38:22 -05:00
self.layout_interner.intern(layout, |layout| self.arena.alloc(layout))
}
/// Returns a range of the start/end indices specified with the
/// `rustc_layout_scalar_valid_range` attribute.
pub fn layout_scalar_valid_range(self, def_id: DefId) -> (Bound<u128>, Bound<u128>) {
let attrs = self.get_attrs(def_id);
let get = |name| {
let attr = match attrs.iter().find(|a| a.check_name(name)) {
Some(attr) => attr,
None => return Bound::Unbounded,
};
for meta in attr.meta_item_list().expect("rustc_layout_scalar_valid_range takes args") {
2019-09-26 16:56:53 +01:00
match meta.literal().expect("attribute takes lit").kind {
ast::LitKind::Int(a, _) => return Bound::Included(a),
_ => span_bug!(attr.span, "rustc_layout_scalar_valid_range expects int arg"),
}
}
span_bug!(attr.span, "no arguments to `rustc_layout_scalar_valid_range` attribute");
};
2019-12-24 17:38:22 -05:00
(
get(sym::rustc_layout_scalar_valid_range_start),
get(sym::rustc_layout_scalar_valid_range_end),
)
}
pub fn lift<T: ?Sized + Lift<'tcx>>(self, value: &T) -> Option<T::Lifted> {
2015-09-06 21:51:58 +03:00
value.lift_to_tcx(self)
}
2019-02-08 14:53:55 +01:00
/// Creates a type context and call the closure with a `TyCtxt` reference
2015-09-06 21:51:58 +03:00
/// to the context. The closure enforces that the type context and any interned
/// value (types, substs, etc.) can only be used while `ty::tls` has a valid
/// reference to the context, to allow formatting values that need it.
pub fn create_global_ctxt(
s: &'tcx Session,
2020-01-09 09:40:55 +01:00
lint_store: Lrc<dyn Any + sync::Send + sync::Sync>,
local_providers: ty::query::Providers<'tcx>,
extern_providers: ty::query::Providers<'tcx>,
2019-11-27 13:24:19 +01:00
arena: &'tcx WorkerLocal<Arena<'tcx>>,
resolutions: ty::ResolverOutputs,
2020-02-09 15:32:00 +01:00
krate: &'tcx hir::Crate<'tcx>,
definitions: &'tcx Definitions,
2020-02-08 05:18:34 +01:00
dep_graph: DepGraph,
on_disk_query_result_cache: query::OnDiskCache<'tcx>,
crate_name: &str,
output_filenames: &OutputFilenames,
) -> GlobalCtxt<'tcx> {
2018-04-26 16:07:26 +03:00
let data_layout = TargetDataLayout::parse(&s.target.target).unwrap_or_else(|err| {
s.fatal(&err);
});
2020-01-02 01:26:18 +01:00
let interners = CtxtInterners::new(arena);
let common_types = CommonTypes::new(&interners);
let common_lifetimes = CommonLifetimes::new(&interners);
let common_consts = CommonConsts::new(&interners, &common_types);
let cstore = resolutions.cstore;
let crates = cstore.crates_untracked();
let max_cnum = crates.iter().map(|c| c.as_usize()).max().unwrap_or(0);
let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1);
providers[LOCAL_CRATE] = local_providers;
let def_path_hash_to_def_id = if s.opts.build_dep_graph() {
let def_path_tables = crates
.iter()
.map(|&cnum| (cnum, cstore.def_path_table(cnum)))
2020-02-07 13:13:35 +01:00
.chain(iter::once((LOCAL_CRATE, definitions.def_path_table())));
// Precompute the capacity of the hashmap so we don't have to
// re-allocate when populating it.
let capacity = def_path_tables.clone().map(|(_, t)| t.size()).sum::<usize>();
2019-12-24 17:38:22 -05:00
let mut map: FxHashMap<_, _> =
FxHashMap::with_capacity_and_hasher(capacity, ::std::default::Default::default());
for (cnum, def_path_table) in def_path_tables {
def_path_table.add_def_path_hashes_to(cnum, &mut map);
}
Some(map)
} else {
None
};
2018-12-01 16:17:59 +01:00
let mut trait_map: FxHashMap<_, FxHashMap<_, _>> = FxHashMap::default();
for (hir_id, v) in resolutions.trait_map.into_iter() {
let map = trait_map.entry(hir_id.owner).or_default();
map.insert(hir_id.local_id, StableVec::new(v));
}
GlobalCtxt {
sess: s,
lint_store,
cstore,
2019-11-27 13:24:19 +01:00
arena,
2019-05-31 10:23:22 +02:00
interners,
2018-10-26 03:11:11 +09:00
dep_graph,
prof: s.prof.clone(),
2015-09-06 21:51:58 +03:00
types: common_types,
lifetimes: common_lifetimes,
consts: common_consts,
extern_crate_map: resolutions.extern_crate_map,
trait_map,
export_map: resolutions.export_map,
maybe_unused_trait_imports: resolutions.maybe_unused_trait_imports,
maybe_unused_extern_crates: resolutions.maybe_unused_extern_crates,
glob_map: resolutions.glob_map,
extern_prelude: resolutions.extern_prelude,
2020-02-09 15:32:00 +01:00
untracked_crate: krate,
2020-02-07 13:13:35 +01:00
definitions,
def_path_hash_to_def_id,
2019-12-24 17:38:22 -05:00
queries: query::Queries::new(providers, extern_providers, on_disk_query_result_cache),
rcache: Default::default(),
selection_cache: Default::default(),
evaluation_cache: Default::default(),
crate_name: Symbol::intern(crate_name),
data_layout,
layout_interner: Default::default(),
stability_interner: Default::default(),
const_stability_interner: Default::default(),
allocation_interner: Default::default(),
alloc_map: Lock::new(interpret::AllocMap::new()),
output_filenames: Arc::new(output_filenames.clone()),
}
2015-09-06 21:51:58 +03:00
}
/// Constructs a `TyKind::Error` type and registers a `delay_span_bug` to ensure it gets used.
#[track_caller]
pub fn ty_error(self) -> Ty<'tcx> {
2020-05-26 12:49:11 -05:00
self.ty_error_with_message(DUMMY_SP, "TyKind::Error constructed but no error reported")
}
/// Constructs a `TyKind::Error` type and registers a `delay_span_bug` with the given `msg to
/// ensure it gets used.
#[track_caller]
pub fn ty_error_with_message<S: Into<MultiSpan>>(self, span: S, msg: &str) -> Ty<'tcx> {
2020-05-26 12:49:11 -05:00
self.sess.delay_span_bug(span, msg);
self.mk_ty(Error(super::sty::DelaySpanBugEmitted(())))
}
/// Like `err` but for constants.
#[track_caller]
pub fn const_error(self, ty: Ty<'tcx>) -> &'tcx Const<'tcx> {
2020-05-26 12:49:11 -05:00
self.sess
.delay_span_bug(DUMMY_SP, "ty::ConstKind::Error constructed but no error reported.");
self.mk_const(ty::Const {
val: ty::ConstKind::Error(super::sty::DelaySpanBugEmitted(())),
ty,
})
}
pub fn consider_optimizing<T: Fn() -> String>(&self, msg: T) -> bool {
let cname = self.crate_name(LOCAL_CRATE).as_str();
self.sess.consider_optimizing(&cname, msg)
}
2019-06-14 00:48:52 +03:00
pub fn lib_features(self) -> &'tcx middle::lib_features::LibFeatures {
self.get_lib_features(LOCAL_CRATE)
}
/// Obtain all lang items of this crate and all dependencies (recursively)
2020-03-31 21:38:14 +02:00
pub fn lang_items(self) -> &'tcx rustc_hir::lang_items::LanguageItems {
self.get_lang_items(LOCAL_CRATE)
}
/// Obtain the given diagnostic item's `DefId`. Use `is_diagnostic_item` if you just want to
/// compare against another `DefId`, since `is_diagnostic_item` is cheaper.
pub fn get_diagnostic_item(self, name: Symbol) -> Option<DefId> {
self.all_diagnostic_items(LOCAL_CRATE).get(&name).copied()
}
/// Check whether the diagnostic item with the given `name` has the given `DefId`.
pub fn is_diagnostic_item(self, name: Symbol, did: DefId) -> bool {
self.diagnostic_items(did.krate).get(&name) == Some(&did)
}
2019-06-14 00:48:52 +03:00
pub fn stability(self) -> &'tcx stability::Index<'tcx> {
self.stability_index(LOCAL_CRATE)
}
2019-06-14 00:48:52 +03:00
pub fn crates(self) -> &'tcx [CrateNum] {
self.all_crate_nums(LOCAL_CRATE)
}
pub fn allocator_kind(self) -> Option<AllocatorKind> {
self.cstore.allocator_kind()
}
pub fn features(self) -> &'tcx rustc_feature::Features {
2018-02-14 16:11:02 +01:00
self.features_query(LOCAL_CRATE)
}
pub fn def_key(self, id: DefId) -> rustc_hir::definitions::DefKey {
if let Some(id) = id.as_local() { self.hir().def_key(id) } else { self.cstore.def_key(id) }
}
2019-02-08 14:53:55 +01:00
/// Converts a `DefId` into its fully expanded `DefPath` (every
/// `DefId` is really just an interned `DefPath`).
///
/// Note that if `id` is not local to this crate, the result will
/// be a non-local `DefPath`.
pub fn def_path(self, id: DefId) -> rustc_hir::definitions::DefPath {
if let Some(id) = id.as_local() {
self.hir().def_path(id)
} else {
self.cstore.def_path(id)
}
}
/// Returns whether or not the crate with CrateNum 'cnum'
/// is marked as a private dependency
pub fn is_private_dep(self, cnum: CrateNum) -> bool {
2019-12-24 17:38:22 -05:00
if cnum == LOCAL_CRATE { false } else { self.cstore.crate_is_private_dep_untracked(cnum) }
}
#[inline]
pub fn def_path_hash(self, def_id: DefId) -> rustc_hir::definitions::DefPathHash {
if let Some(def_id) = def_id.as_local() {
self.definitions.def_path_hash(def_id)
} else {
self.cstore.def_path_hash(def_id)
}
}
pub fn def_path_debug_str(self, def_id: DefId) -> String {
// We are explicitly not going through queries here in order to get
// crate name and disambiguator since this code is called from debug!()
// statements within the query system and we'd run into endless
// recursion otherwise.
let (crate_name, crate_disambiguator) = if def_id.is_local() {
(self.crate_name, self.sess.local_crate_disambiguator())
} else {
2019-12-24 17:38:22 -05:00
(
self.cstore.crate_name_untracked(def_id.krate),
self.cstore.crate_disambiguator_untracked(def_id.krate),
)
};
2019-12-24 17:38:22 -05:00
format!(
"{}[{}]{}",
crate_name,
// Don't print the whole crate disambiguator. That's just
// annoying in debug output.
&(crate_disambiguator.to_fingerprint().to_hex())[..4],
self.def_path(def_id).to_string_no_crate()
)
}
pub fn metadata_encoding_version(self) -> Vec<u8> {
self.cstore.metadata_encoding_version().to_vec()
}
2019-12-24 17:38:22 -05:00
pub fn encode_metadata(self) -> EncodedMetadata {
2020-01-11 04:47:20 +01:00
let _prof_timer = self.prof.verbose_generic_activity("generate_crate_metadata");
self.cstore.encode_metadata(self)
}
// Note that this is *untracked* and should only be used within the query
// system if the result is otherwise tracked through queries
pub fn cstore_as_any(self) -> &'tcx dyn Any {
self.cstore.as_any()
}
2018-12-04 16:26:34 +01:00
#[inline(always)]
pub fn create_stable_hashing_context(self) -> StableHashingContext<'tcx> {
2020-02-07 13:13:35 +01:00
let krate = self.gcx.untracked_crate;
2020-02-07 13:13:35 +01:00
StableHashingContext::new(self.sess, krate, self.definitions, &*self.cstore)
}
// This method makes sure that we have a DepNode and a Fingerprint for
// every upstream crate. It needs to be called once right after the tcx is
// created.
// With full-fledged red/green, the method will probably become unnecessary
// as this will be done on-demand.
pub fn allocate_metadata_dep_nodes(self) {
// We cannot use the query versions of crates() and crate_hash(), since
// those would need the DepNodes that we are allocating here.
for cnum in self.cstore.crates_untracked() {
let dep_node = DepConstructor::CrateMetadata(self, cnum);
let crate_hash = self.cstore.crate_hash_untracked(cnum);
2019-12-24 17:38:22 -05:00
self.dep_graph.with_task(
dep_node,
self,
crate_hash,
|_, x| x, // No transformation needed
dep_graph::hash_result,
);
}
}
2019-12-24 17:38:22 -05:00
pub fn serialize_query_result_cache<E>(self, encoder: &mut E) -> Result<(), E::Error>
where
E: ty::codec::TyEncoder,
{
self.queries.on_disk_cache.serialize(self, encoder)
}
/// If `true`, we should use the MIR-based borrowck, but also
/// fall back on the AST borrowck if the MIR-based one errors.
pub fn migrate_borrowck(self) -> bool {
self.borrowck_mode().migrate()
}
2018-02-14 16:11:02 +01:00
/// What mode(s) of borrowck should we run? AST? MIR? both?
/// (Also considers the `#![feature(nll)]` setting.)
pub fn borrowck_mode(self) -> BorrowckMode {
// Here are the main constraints we need to deal with:
//
// 1. An opts.borrowck_mode of `BorrowckMode::Migrate` is
// synonymous with no `-Z borrowck=...` flag at all.
//
// 2. We want to allow developers on the Nightly channel
// to opt back into the "hard error" mode for NLL,
// (which they can do via specifying `#![feature(nll)]`
// explicitly in their crate).
//
// So, this precedence list is how pnkfelix chose to work with
// the above constraints:
//
// * `#![feature(nll)]` *always* means use NLL with hard
// errors. (To simplify the code here, it now even overrides
// a user's attempt to specify `-Z borrowck=compare`, which
// we arguably do not need anymore and should remove.)
//
// * Otherwise, if no `-Z borrowck=...` then use migrate mode
//
// * Otherwise, use the behavior requested via `-Z borrowck=...`
2019-12-24 17:38:22 -05:00
if self.features().nll {
return BorrowckMode::Mir;
}
self.sess.opts.borrowck_mode
2018-02-14 16:11:02 +01:00
}
/// If `true`, we should use lazy normalization for constants, otherwise
/// we still evaluate them eagerly.
#[inline]
pub fn lazy_normalization(self) -> bool {
self.features().const_generics
}
#[inline]
pub fn local_crate_exports_generics(self) -> bool {
debug_assert!(self.sess.opts.share_generics());
2020-05-15 21:44:28 -07:00
self.sess.crate_types().iter().any(|crate_type| {
match crate_type {
2019-12-24 17:38:22 -05:00
CrateType::Executable
| CrateType::Staticlib
| CrateType::ProcMacro
| CrateType::Cdylib => false,
// FIXME rust-lang/rust#64319, rust-lang/rust#64872:
// We want to block export of generics from dylibs,
// but we must fix rust-lang/rust#65890 before we can
// do that robustly.
2019-12-24 17:38:22 -05:00
CrateType::Dylib => true,
2019-12-24 17:38:22 -05:00
CrateType::Rlib => true,
}
})
}
// Returns the `DefId` and the `BoundRegion` corresponding to the given region.
pub fn is_suitable_region(&self, region: Region<'tcx>) -> Option<FreeRegionInfo> {
let (suitable_region_binding_scope, bound_region) = match *region {
ty::ReFree(ref free_region) => (free_region.scope, free_region.bound_region),
2019-12-24 17:38:22 -05:00
ty::ReEarlyBound(ref ebr) => {
(self.parent(ebr.def_id).unwrap(), ty::BoundRegion::BrNamed(ebr.def_id, ebr.name))
}
_ => return None, // not a free region
};
let hir_id = self.hir().as_local_hir_id(suitable_region_binding_scope.expect_local());
2019-06-24 09:58:49 +02:00
let is_impl_item = match self.hir().find(hir_id) {
Some(Node::Item(..) | Node::TraitItem(..)) => false,
Some(Node::ImplItem(..)) => {
self.is_bound_region_in_impl_item(suitable_region_binding_scope)
}
_ => return None,
};
Some(FreeRegionInfo {
def_id: suitable_region_binding_scope,
boundregion: bound_region,
is_impl_item,
})
}
pub fn return_type_impl_or_dyn_trait(
&self,
scope_def_id: DefId,
) -> Option<&'tcx hir::Ty<'tcx>> {
let hir_id = self.hir().as_local_hir_id(scope_def_id.expect_local());
let hir_output = match self.hir().get(hir_id) {
Node::Item(hir::Item {
kind:
ItemKind::Fn(
hir::FnSig {
decl: hir::FnDecl { output: hir::FnRetTy::Return(ty), .. },
..
},
..,
),
..
})
| Node::ImplItem(hir::ImplItem {
kind:
hir::ImplItemKind::Fn(
hir::FnSig {
decl: hir::FnDecl { output: hir::FnRetTy::Return(ty), .. },
..
},
_,
),
..
})
| Node::TraitItem(hir::TraitItem {
kind:
hir::TraitItemKind::Fn(
hir::FnSig {
decl: hir::FnDecl { output: hir::FnRetTy::Return(ty), .. },
..
},
_,
),
..
}) => ty,
_ => return None,
};
let ret_ty = self.type_of(scope_def_id);
match ret_ty.kind {
ty::FnDef(_, _) => {
let sig = ret_ty.fn_sig(*self);
let output = self.erase_late_bound_regions(&sig.output());
if output.is_impl_trait() {
let fn_decl = self.hir().fn_decl_by_hir_id(hir_id).unwrap();
if let hir::FnRetTy::Return(ty) = fn_decl.output {
return Some(ty);
}
} else {
let mut v = TraitObjectVisitor(vec![]);
rustc_hir::intravisit::walk_ty(&mut v, hir_output);
if v.0.len() == 1 {
return Some(v.0[0]);
}
}
None
}
_ => None,
}
}
2019-12-24 17:38:22 -05:00
pub fn return_type_impl_trait(&self, scope_def_id: DefId) -> Option<(Ty<'tcx>, Span)> {
// HACK: `type_of_def_id()` will fail on these (#55796), so return `None`.
let hir_id = self.hir().as_local_hir_id(scope_def_id.expect_local());
2019-06-20 10:39:19 +02:00
match self.hir().get(hir_id) {
Node::Item(item) => {
2019-09-26 17:51:36 +01:00
match item.kind {
ItemKind::Fn(..) => { /* `type_of_def_id()` will work */ }
2018-11-09 10:16:07 -08:00
_ => {
return None;
}
}
}
_ => { /* `type_of_def_id()` will work or panic */ }
}
let ret_ty = self.type_of(scope_def_id);
2019-09-16 19:08:35 +01:00
match ret_ty.kind {
ty::FnDef(_, _) => {
let sig = ret_ty.fn_sig(*self);
let output = self.erase_late_bound_regions(&sig.output());
if output.is_impl_trait() {
2019-10-24 01:28:55 +08:00
let fn_decl = self.hir().fn_decl_by_hir_id(hir_id).unwrap();
Some((output, fn_decl.output.span()))
} else {
None
}
}
2019-12-24 17:38:22 -05:00
_ => None,
}
}
// Checks if the bound region is in Impl Item.
2019-12-24 17:38:22 -05:00
pub fn is_bound_region_in_impl_item(&self, suitable_region_binding_scope: DefId) -> bool {
let container_id = self.associated_item(suitable_region_binding_scope).container.id();
if self.impl_trait_ref(container_id).is_some() {
// For now, we do not try to target impls of traits. This is
// because this message is going to suggest that the user
// change the fn signature, but they may not be free to do so,
// since the signature must match the trait.
//
// FIXME(#42706) -- in some cases, we could do better here.
return true;
}
false
}
/// Determines whether identifiers in the assembly have strict naming rules.
/// Currently, only NVPTX* targets need it.
pub fn has_strict_asm_symbol_naming(&self) -> bool {
self.sess.target.target.arch.contains("nvptx")
}
/// Returns `&'static core::panic::Location<'static>`.
pub fn caller_location_ty(&self) -> Ty<'tcx> {
self.mk_imm_ref(
self.lifetimes.re_static,
self.type_of(self.require_lang_item(PanicLocationLangItem, None))
.subst(*self, self.mk_substs([self.lifetimes.re_static.into()].iter())),
)
}
2019-12-30 19:46:30 -06:00
2020-01-25 19:09:23 -06:00
/// Returns a displayable description and article for the given `def_id` (e.g. `("a", "struct")`).
pub fn article_and_description(&self, def_id: DefId) -> (&'static str, &'static str) {
match self.def_kind(def_id) {
DefKind::Generator => match self.generator_kind(def_id).unwrap() {
rustc_hir::GeneratorKind::Async(..) => ("an", "async closure"),
rustc_hir::GeneratorKind::Gen => ("a", "generator"),
},
def_kind => (def_kind.article(), def_kind.descr(def_id)),
}
2019-12-30 19:46:30 -06:00
}
2015-09-06 21:51:58 +03:00
}
2019-06-14 00:48:52 +03:00
impl<'tcx> GlobalCtxt<'tcx> {
/// Calls the closure with a local `TyCtxt` using the given arena.
2018-12-13 03:32:44 +01:00
/// `interners` is a slot passed so we can create a CtxtInterners
/// with the same lifetime as `arena`.
2019-06-14 00:48:52 +03:00
pub fn enter_local<F, R>(&'tcx self, f: F) -> R
2018-03-24 06:19:20 +01:00
where
2019-06-14 00:48:52 +03:00
F: FnOnce(TyCtxt<'tcx>) -> R,
{
2019-12-24 17:38:22 -05:00
let tcx = TyCtxt { gcx: self };
ty::tls::with_related_context(tcx, |icx| {
let new_icx = ty::tls::ImplicitCtxt {
tcx,
query: icx.query,
diagnostics: icx.diagnostics,
2018-04-06 14:53:11 +02:00
layout_depth: icx.layout_depth,
task_deps: icx.task_deps,
};
2019-12-24 17:38:22 -05:00
ty::tls::enter_context(&new_icx, |_| f(tcx))
})
}
}
/// A trait implemented for all `X<'a>` types that can be safely and
/// efficiently converted to `X<'tcx>` as long as they are part of the
/// provided `TyCtxt<'tcx>`.
/// This can be done, for example, for `Ty<'tcx>` or `SubstsRef<'tcx>`
2015-09-06 21:51:58 +03:00
/// by looking them up in their respective interners.
///
/// However, this is still not the best implementation as it does
/// need to compare the components, even for interned values.
/// It would be more efficient if `TypedArena` provided a way to
/// determine whether the address is in the allocated range.
///
/// `None` is returned if the value or one of the components is not part
2015-09-06 21:51:58 +03:00
/// of the provided context.
/// For `Ty`, `None` can be returned if either the type interner doesn't
/// contain the `TyKind` key or if the address of the interned
2015-09-06 21:51:58 +03:00
/// pointer differs. The latter case is possible if a primitive type,
/// e.g., `()` or `u8`, was interned in a different context.
2018-06-27 06:01:19 -04:00
pub trait Lift<'tcx>: fmt::Debug {
type Lifted: fmt::Debug + 'tcx;
2019-06-14 00:48:52 +03:00
fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted>;
2015-09-06 21:51:58 +03:00
}
2018-12-28 20:30:06 +01:00
macro_rules! nop_lift {
($set:ident; $ty:ty => $lifted:ty) => {
2018-12-28 20:30:06 +01:00
impl<'a, 'tcx> Lift<'tcx> for $ty {
2019-12-24 17:38:22 -05:00
type Lifted = $lifted;
fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
if tcx.interners.$set.contains_pointer_to(&Interned(*self)) {
2019-12-24 17:38:22 -05:00
Some(unsafe { mem::transmute(*self) })
} else {
None
2018-12-28 20:30:06 +01:00
}
2019-12-24 17:38:22 -05:00
}
}
2018-12-28 20:30:06 +01:00
};
}
2018-12-28 20:30:06 +01:00
macro_rules! nop_list_lift {
($set:ident; $ty:ty => $lifted:ty) => {
2018-12-28 20:30:06 +01:00
impl<'a, 'tcx> Lift<'tcx> for &'a List<$ty> {
2019-12-24 17:38:22 -05:00
type Lifted = &'tcx List<$lifted>;
fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
if self.is_empty() {
return Some(List::empty());
2018-12-28 20:30:06 +01:00
}
if tcx.interners.$set.contains_pointer_to(&Interned(*self)) {
2019-12-24 17:38:22 -05:00
Some(unsafe { mem::transmute(*self) })
} else {
None
}
}
}
2018-12-28 20:30:06 +01:00
};
}
nop_lift! {type_; Ty<'a> => Ty<'tcx>}
nop_lift! {region; Region<'a> => Region<'tcx>}
nop_lift! {const_; &'a Const<'a> => &'tcx Const<'tcx>}
2020-05-11 22:04:22 +02:00
nop_lift! {predicate_kind; &'a PredicateKind<'a> => &'tcx PredicateKind<'tcx>}
nop_list_lift! {type_list; Ty<'a> => Ty<'tcx>}
nop_list_lift! {existential_predicates; ExistentialPredicate<'a> => ExistentialPredicate<'tcx>}
nop_list_lift! {predicates; Predicate<'a> => Predicate<'tcx>}
nop_list_lift! {canonical_var_infos; CanonicalVarInfo => CanonicalVarInfo}
nop_list_lift! {projs; ProjectionKind => ProjectionKind}
2015-09-06 21:51:58 +03:00
// This is the impl for `&'a InternalSubsts<'a>`.
nop_list_lift! {substs; GenericArg<'a> => GenericArg<'tcx>}
2015-09-06 21:51:58 +03:00
pub mod tls {
2019-12-24 17:38:22 -05:00
use super::{ptr_eq, GlobalCtxt, TyCtxt};
2015-09-06 21:51:58 +03:00
2020-03-18 20:30:02 +01:00
use crate::dep_graph::{DepKind, TaskDeps};
2019-02-05 11:20:45 -06:00
use crate::ty::query;
2020-01-31 04:00:03 +01:00
use rustc_data_structures::sync::{self, Lock};
use rustc_data_structures::thin_vec::ThinVec;
2019-12-24 17:38:22 -05:00
use rustc_data_structures::OnDrop;
use rustc_errors::Diagnostic;
2019-12-24 17:38:22 -05:00
use std::mem;
2015-09-06 21:51:58 +03:00
#[cfg(not(parallel_compiler))]
use std::cell::Cell;
#[cfg(parallel_compiler)]
2019-02-05 11:20:45 -06:00
use rustc_rayon_core as rayon_core;
2018-03-24 06:19:20 +01:00
/// This is the implicit state of rustc. It contains the current
/// `TyCtxt` and query. It is updated when creating a local interner or
/// executing a new query. Whenever there's a `TyCtxt` value available
/// you should also have access to an `ImplicitCtxt` through the functions
2018-03-24 06:19:20 +01:00
/// in this module.
#[derive(Clone)]
2019-06-14 00:48:52 +03:00
pub struct ImplicitCtxt<'a, 'tcx> {
/// The current `TyCtxt`. Initially created by `enter_global` and updated
/// by `enter_local` with a new local interner.
2019-06-14 00:48:52 +03:00
pub tcx: TyCtxt<'tcx>,
2015-09-06 21:51:58 +03:00
/// The current query job, if any. This is updated by `JobOwner::start` in
/// `ty::query::plumbing` when executing a query.
2020-03-18 20:30:02 +01:00
pub query: Option<query::QueryJobId<DepKind>>,
2018-04-06 14:53:11 +02:00
/// Where to store diagnostics for the current query job, if any.
/// This is updated by `JobOwner::start` in `ty::query::plumbing` when executing a query.
pub diagnostics: Option<&'a Lock<ThinVec<Diagnostic>>>,
2018-04-06 14:53:11 +02:00
/// Used to prevent layout from recursing too deeply.
pub layout_depth: usize,
2018-04-06 14:52:36 +02:00
/// The current dep graph task. This is used to add dependencies to queries
/// when executing them.
pub task_deps: Option<&'a Lock<TaskDeps>>,
}
2015-09-06 21:51:58 +03:00
/// Sets Rayon's thread-local variable, which is preserved for Rayon jobs
2018-05-31 23:04:21 +02:00
/// to `value` during the call to `f`. It is restored to its previous value after.
/// This is used to set the pointer to the new `ImplicitCtxt`.
#[cfg(parallel_compiler)]
2018-12-05 18:59:48 +01:00
#[inline]
fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
rayon_core::tlv::with(value, f)
}
/// Gets Rayon's thread-local variable, which is preserved for Rayon jobs.
/// This is used to get the pointer to the current `ImplicitCtxt`.
#[cfg(parallel_compiler)]
2018-12-05 18:59:48 +01:00
#[inline]
fn get_tlv() -> usize {
rayon_core::tlv::get()
}
#[cfg(not(parallel_compiler))]
thread_local! {
/// A thread local variable that stores a pointer to the current `ImplicitCtxt`.
static TLV: Cell<usize> = Cell::new(0);
}
2015-09-06 21:51:58 +03:00
2018-05-31 23:04:21 +02:00
/// Sets TLV to `value` during the call to `f`.
/// It is restored to its previous value after.
/// This is used to set the pointer to the new `ImplicitCtxt`.
#[cfg(not(parallel_compiler))]
2018-12-05 18:59:48 +01:00
#[inline]
fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
let old = get_tlv();
let _reset = OnDrop(move || TLV.with(|tlv| tlv.set(old)));
TLV.with(|tlv| tlv.set(value));
f()
}
2015-09-06 21:51:58 +03:00
/// Gets the pointer to the current `ImplicitCtxt`.
#[cfg(not(parallel_compiler))]
#[inline]
fn get_tlv() -> usize {
TLV.with(|tlv| tlv.get())
}
2015-09-06 21:51:58 +03:00
/// Sets `context` as the new current `ImplicitCtxt` for the duration of the function `f`.
2018-12-05 18:59:48 +01:00
#[inline]
pub fn enter_context<'a, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'tcx>, f: F) -> R
where
F: FnOnce(&ImplicitCtxt<'a, 'tcx>) -> R,
{
2019-12-24 17:38:22 -05:00
set_tlv(context as *const _ as usize, || f(&context))
}
2020-02-29 20:16:26 +03:00
/// Enters `GlobalCtxt` by setting up librustc_ast callbacks and
/// creating a initial `TyCtxt` and `ImplicitCtxt`.
/// This happens once per rustc session and `TyCtxt`s only exists
2018-03-24 06:19:20 +01:00
/// inside the `f` function.
2019-06-14 00:48:52 +03:00
pub fn enter_global<'tcx, F, R>(gcx: &'tcx GlobalCtxt<'tcx>, f: F) -> R
where
2019-06-14 00:48:52 +03:00
F: FnOnce(TyCtxt<'tcx>) -> R,
{
// Update `GCX_PTR` to indicate there's a `GlobalCtxt` available.
GCX_PTR.with(|lock| {
*lock.lock() = gcx as *const _ as usize;
});
// Set `GCX_PTR` back to 0 when we exit.
let _on_drop = OnDrop(move || {
GCX_PTR.with(|lock| *lock.lock() = 0);
});
2019-12-24 17:38:22 -05:00
let tcx = TyCtxt { gcx };
let icx =
ImplicitCtxt { tcx, query: None, diagnostics: None, layout_depth: 0, task_deps: None };
enter_context(&icx, |_| f(tcx))
2015-09-06 21:51:58 +03:00
}
scoped_thread_local! {
/// Stores a pointer to the `GlobalCtxt` if one is available.
/// This is used to access the `GlobalCtxt` in the deadlock handler given to Rayon.
pub static GCX_PTR: Lock<usize>
}
/// Creates a `TyCtxt` and `ImplicitCtxt` based on the `GCX_PTR` thread local.
2018-05-31 23:04:21 +02:00
/// This is used in the deadlock handler.
pub unsafe fn with_global<F, R>(f: F) -> R
where
2019-06-14 00:48:52 +03:00
F: for<'tcx> FnOnce(TyCtxt<'tcx>) -> R,
{
let gcx = GCX_PTR.with(|lock| *lock.lock());
assert!(gcx != 0);
let gcx = &*(gcx as *const GlobalCtxt<'_>);
2019-12-24 17:38:22 -05:00
let tcx = TyCtxt { gcx };
let icx =
ImplicitCtxt { query: None, diagnostics: None, tcx, layout_depth: 0, task_deps: None };
enter_context(&icx, |_| f(tcx))
}
/// Allows access to the current `ImplicitCtxt` in a closure if one is available.
2018-12-05 18:59:48 +01:00
#[inline]
pub fn with_context_opt<F, R>(f: F) -> R
where
F: for<'a, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'tcx>>) -> R,
{
let context = get_tlv();
if context == 0 {
2015-09-06 21:51:58 +03:00
f(None)
} else {
// We could get a `ImplicitCtxt` pointer from another thread.
// Ensure that `ImplicitCtxt` is `Sync`.
2019-06-14 00:48:52 +03:00
sync::assert_sync::<ImplicitCtxt<'_, '_>>();
2018-04-26 01:03:54 +02:00
2019-06-14 00:48:52 +03:00
unsafe { f(Some(&*(context as *const ImplicitCtxt<'_, '_>))) }
2015-09-06 21:51:58 +03:00
}
}
/// Allows access to the current `ImplicitCtxt`.
/// Panics if there is no `ImplicitCtxt` available.
2018-12-05 18:59:48 +01:00
#[inline]
2018-03-24 06:19:20 +01:00
pub fn with_context<F, R>(f: F) -> R
where
F: for<'a, 'tcx> FnOnce(&ImplicitCtxt<'a, 'tcx>) -> R,
2018-03-24 06:19:20 +01:00
{
with_context_opt(|opt_context| f(opt_context.expect("no ImplicitCtxt stored in tls")))
}
/// Allows access to the current `ImplicitCtxt` whose tcx field has the same global
/// interner as the tcx argument passed in. This means the closure is given an `ImplicitCtxt`
/// with the same `'tcx` lifetime as the `TyCtxt` passed in.
/// This will panic if you pass it a `TyCtxt` which has a different global interner from
/// the current `ImplicitCtxt`'s `tcx` field.
2018-12-05 18:59:48 +01:00
#[inline]
2019-06-14 00:48:52 +03:00
pub fn with_related_context<'tcx, F, R>(tcx: TyCtxt<'tcx>, f: F) -> R
where
2019-06-14 00:48:52 +03:00
F: FnOnce(&ImplicitCtxt<'_, 'tcx>) -> R,
{
2019-12-24 17:38:22 -05:00
with_context(|context| unsafe {
assert!(ptr_eq(context.tcx.gcx, tcx.gcx));
let context: &ImplicitCtxt<'_, '_> = mem::transmute(context);
f(context)
})
}
/// Allows access to the `TyCtxt` in the current `ImplicitCtxt`.
/// Panics if there is no `ImplicitCtxt` available.
2018-12-05 18:59:48 +01:00
#[inline]
pub fn with<F, R>(f: F) -> R
where
2019-06-14 00:48:52 +03:00
F: for<'tcx> FnOnce(TyCtxt<'tcx>) -> R,
{
with_context(|context| f(context.tcx))
2015-09-06 21:51:58 +03:00
}
/// Allows access to the `TyCtxt` in the current `ImplicitCtxt`.
/// The closure is passed None if there is no `ImplicitCtxt` available.
2018-12-05 18:59:48 +01:00
#[inline]
pub fn with_opt<F, R>(f: F) -> R
where
2019-06-14 00:48:52 +03:00
F: for<'tcx> FnOnce(Option<TyCtxt<'tcx>>) -> R,
{
with_context_opt(|opt_context| f(opt_context.map(|context| context.tcx)))
2015-09-06 21:51:58 +03:00
}
}
macro_rules! sty_debug_print {
($ctxt: expr, $($variant: ident),*) => {{
// Curious inner module to allow variant names to be used as
2015-09-06 21:51:58 +03:00
// variable names.
#[allow(non_snake_case)]
mod inner {
2019-02-05 11:20:45 -06:00
use crate::ty::{self, TyCtxt};
use crate::ty::context::Interned;
2015-09-06 21:51:58 +03:00
#[derive(Copy, Clone)]
struct DebugStat {
total: usize,
lt_infer: usize,
2015-09-06 21:51:58 +03:00
ty_infer: usize,
ct_infer: usize,
all_infer: usize,
2015-09-06 21:51:58 +03:00
}
2019-06-14 00:48:52 +03:00
pub fn go(tcx: TyCtxt<'_>) {
2015-09-06 21:51:58 +03:00
let mut total = DebugStat {
total: 0,
lt_infer: 0,
ty_infer: 0,
ct_infer: 0,
all_infer: 0,
2015-09-06 21:51:58 +03:00
};
$(let mut $variant = total;)*
2019-06-12 14:39:12 +02:00
let shards = tcx.interners.type_.lock_shards();
let types = shards.iter().flat_map(|shard| shard.keys());
for &Interned(t) in types {
2019-09-16 19:08:35 +01:00
let variant = match t.kind {
ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) |
ty::Float(..) | ty::Str | ty::Never => continue,
ty::Error(_) => /* unimportant */ continue,
2015-09-06 21:51:58 +03:00
$(ty::$variant(..) => &mut $variant,)*
};
let lt = t.flags.intersects(ty::TypeFlags::HAS_RE_INFER);
let ty = t.flags.intersects(ty::TypeFlags::HAS_TY_INFER);
let ct = t.flags.intersects(ty::TypeFlags::HAS_CT_INFER);
2015-09-06 21:51:58 +03:00
variant.total += 1;
total.total += 1;
if lt { total.lt_infer += 1; variant.lt_infer += 1 }
2015-09-06 21:51:58 +03:00
if ty { total.ty_infer += 1; variant.ty_infer += 1 }
if ct { total.ct_infer += 1; variant.ct_infer += 1 }
if lt && ty && ct { total.all_infer += 1; variant.all_infer += 1 }
2015-09-06 21:51:58 +03:00
}
println!("Ty interner total ty lt ct all");
2015-09-06 21:51:58 +03:00
$(println!(" {:18}: {uses:6} {usespc:4.1}%, \
{ty:4.1}% {lt:5.1}% {ct:4.1}% {all:4.1}%",
stringify!($variant),
uses = $variant.total,
usespc = $variant.total as f64 * 100.0 / total.total as f64,
ty = $variant.ty_infer as f64 * 100.0 / total.total as f64,
lt = $variant.lt_infer as f64 * 100.0 / total.total as f64,
ct = $variant.ct_infer as f64 * 100.0 / total.total as f64,
all = $variant.all_infer as f64 * 100.0 / total.total as f64);
)*
2015-09-06 21:51:58 +03:00
println!(" total {uses:6} \
{ty:4.1}% {lt:5.1}% {ct:4.1}% {all:4.1}%",
uses = total.total,
ty = total.ty_infer as f64 * 100.0 / total.total as f64,
lt = total.lt_infer as f64 * 100.0 / total.total as f64,
ct = total.ct_infer as f64 * 100.0 / total.total as f64,
all = total.all_infer as f64 * 100.0 / total.total as f64)
2015-09-06 21:51:58 +03:00
}
}
inner::go($ctxt)
}}
}
2019-06-14 00:48:52 +03:00
impl<'tcx> TyCtxt<'tcx> {
pub fn print_debug_stats(self) {
2015-09-06 21:51:58 +03:00
sty_debug_print!(
self,
2019-12-24 17:38:22 -05:00
Adt,
Array,
Slice,
RawPtr,
Ref,
FnDef,
FnPtr,
Placeholder,
Generator,
GeneratorWitness,
Dynamic,
Closure,
Tuple,
Bound,
Param,
Infer,
Projection,
Opaque,
Foreign
);
2015-09-06 21:51:58 +03:00
2019-06-12 14:39:12 +02:00
println!("InternalSubsts interner: #{}", self.interners.substs.len());
println!("Region interner: #{}", self.interners.region.len());
println!("Stability interner: #{}", self.stability_interner.len());
println!("Const Stability interner: #{}", self.const_stability_interner.len());
2019-06-12 14:39:12 +02:00
println!("Allocation interner: #{}", self.allocation_interner.len());
println!("Layout interner: #{}", self.layout_interner.len());
2015-09-06 21:51:58 +03:00
}
}
/// An entry in an interner.
struct Interned<'tcx, T: ?Sized>(&'tcx T);
2015-09-06 21:51:58 +03:00
2019-12-24 17:38:22 -05:00
impl<'tcx, T: 'tcx + ?Sized> Clone for Interned<'tcx, T> {
fn clone(&self) -> Self {
Interned(self.0)
}
}
2019-12-24 17:38:22 -05:00
impl<'tcx, T: 'tcx + ?Sized> Copy for Interned<'tcx, T> {}
impl<'tcx, T: 'tcx + ?Sized> IntoPointer for Interned<'tcx, T> {
fn into_pointer(&self) -> *const () {
self.0 as *const _ as *const ()
}
}
2019-09-16 19:11:57 +01:00
// N.B., an `Interned<Ty>` compares and hashes as a `TyKind`.
impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> {
fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool {
2019-09-16 19:08:35 +01:00
self.0.kind == other.0.kind
2015-09-06 21:51:58 +03:00
}
}
impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {}
2015-09-06 21:51:58 +03:00
impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> {
2015-09-06 21:51:58 +03:00
fn hash<H: Hasher>(&self, s: &mut H) {
2019-09-16 19:08:35 +01:00
self.0.kind.hash(s)
2015-09-06 21:51:58 +03:00
}
}
2019-08-11 12:55:14 -04:00
#[allow(rustc::usage_of_ty_tykind)]
2019-06-14 00:48:52 +03:00
impl<'tcx> Borrow<TyKind<'tcx>> for Interned<'tcx, TyS<'tcx>> {
fn borrow<'a>(&'a self) -> &'a TyKind<'tcx> {
2019-09-16 19:08:35 +01:00
&self.0.kind
2015-09-06 21:51:58 +03:00
}
}
// N.B., an `Interned<List<T>>` compares and hashes as its elements.
2018-08-22 00:35:01 +01:00
impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, List<T>> {
fn eq(&self, other: &Interned<'tcx, List<T>>) -> bool {
self.0[..] == other.0[..]
}
}
2018-08-22 00:35:01 +01:00
impl<'tcx, T: Eq> Eq for Interned<'tcx, List<T>> {}
2018-08-22 00:35:01 +01:00
impl<'tcx, T: Hash> Hash for Interned<'tcx, List<T>> {
fn hash<H: Hasher>(&self, s: &mut H) {
self.0[..].hash(s)
}
}
impl<'tcx, T> Borrow<[T]> for Interned<'tcx, List<T>> {
fn borrow<'a>(&'a self) -> &'a [T] {
&self.0[..]
}
}
impl<'tcx> Borrow<RegionKind> for Interned<'tcx, RegionKind> {
2019-06-21 11:39:28 +02:00
fn borrow(&self) -> &RegionKind {
&self.0
}
}
2019-06-14 00:48:52 +03:00
impl<'tcx> Borrow<Const<'tcx>> for Interned<'tcx, Const<'tcx>> {
fn borrow<'a>(&'a self) -> &'a Const<'tcx> {
2017-08-04 00:41:44 +03:00
&self.0
}
}
2020-05-11 22:04:22 +02:00
impl<'tcx> Borrow<PredicateKind<'tcx>> for Interned<'tcx, PredicateKind<'tcx>> {
fn borrow<'a>(&'a self) -> &'a PredicateKind<'tcx> {
&self.0
}
}
macro_rules! direct_interners {
2020-05-11 22:04:22 +02:00
($($name:ident: $method:ident($ty:ty),)+) => {
$(impl<'tcx> PartialEq for Interned<'tcx, $ty> {
fn eq(&self, other: &Self) -> bool {
self.0 == other.0
}
}
impl<'tcx> Eq for Interned<'tcx, $ty> {}
impl<'tcx> Hash for Interned<'tcx, $ty> {
fn hash<H: Hasher>(&self, s: &mut H) {
self.0.hash(s)
}
}
impl<'tcx> TyCtxt<'tcx> {
pub fn $method(self, v: $ty) -> &'tcx $ty {
self.interners.$name.intern_ref(&v, || {
Interned(self.interners.arena.alloc(v))
}).0
}
})+
2015-09-06 21:51:58 +03:00
}
}
2015-09-06 21:51:58 +03:00
2020-05-11 22:04:22 +02:00
direct_interners!(
region: mk_region(RegionKind),
const_: mk_const(Const<'tcx>),
predicate_kind: intern_predicate_kind(PredicateKind<'tcx>),
);
2016-10-16 21:21:25 -06:00
macro_rules! slice_interners {
($($field:ident: $method:ident($ty:ty)),+) => (
$(impl<'tcx> TyCtxt<'tcx> {
pub fn $method(self, v: &[$ty]) -> &'tcx List<$ty> {
self.interners.$field.intern_ref(v, || {
2020-01-02 01:26:18 +01:00
Interned(List::from_arena(&*self.arena, v))
}).0
}
})+
);
2016-10-16 21:21:25 -06:00
}
2016-10-16 21:21:25 -06:00
slice_interners!(
type_list: _intern_type_list(Ty<'tcx>),
substs: _intern_substs(GenericArg<'tcx>),
canonical_var_infos: _intern_canonical_var_infos(CanonicalVarInfo),
existential_predicates: _intern_existential_predicates(ExistentialPredicate<'tcx>),
predicates: _intern_predicates(Predicate<'tcx>),
projs: _intern_projs(ProjectionKind),
2020-03-03 11:25:03 -05:00
place_elems: _intern_place_elems(PlaceElem<'tcx>),
chalk_environment_clause_list:
_intern_chalk_environment_clause_list(traits::ChalkEnvironmentClause<'tcx>)
);
2019-06-14 00:48:52 +03:00
impl<'tcx> TyCtxt<'tcx> {
/// Given a `fn` type, returns an equivalent `unsafe fn` type;
/// that is, a `fn` type that is equivalent in every way for being
/// unsafe.
pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
assert_eq!(sig.unsafety(), hir::Unsafety::Normal);
2019-12-24 17:38:22 -05:00
self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig { unsafety: hir::Unsafety::Unsafe, ..sig }))
2015-09-06 21:51:58 +03:00
}
/// Given a closure signature, returns an equivalent fn signature. Detuples
/// and so forth -- so e.g., if we have a sig with `Fn<(u32, i32)>` then
/// you would get a `fn(u32, i32)`.
/// `unsafety` determines the unsafety of the fn signature. If you pass
2019-04-01 00:00:43 +09:00
/// `hir::Unsafety::Unsafe` in the previous example, then you would get
/// an `unsafe fn (u32, i32)`.
/// It cannot convert a closure that requires unsafe.
pub fn signature_unclosure(
self,
sig: PolyFnSig<'tcx>,
unsafety: hir::Unsafety,
) -> PolyFnSig<'tcx> {
sig.map_bound(|s| {
2019-09-16 19:08:35 +01:00
let params_iter = match s.inputs()[0].kind {
2019-12-24 17:38:22 -05:00
ty::Tuple(params) => params.into_iter().map(|k| k.expect_ty()),
2017-11-30 12:22:11 -03:00
_ => bug!(),
};
2019-12-24 17:38:22 -05:00
self.mk_fn_sig(params_iter, s.output(), s.c_variadic, unsafety, abi::Abi::Rust)
})
2017-11-30 12:22:11 -03:00
}
2019-08-11 12:55:14 -04:00
#[allow(rustc::usage_of_ty_tykind)]
#[inline]
2018-08-22 01:34:12 +01:00
pub fn mk_ty(&self, st: TyKind<'tcx>) -> Ty<'tcx> {
2019-05-31 10:23:22 +02:00
self.interners.intern_ty(st)
2015-09-06 21:51:58 +03:00
}
2020-05-11 22:06:41 +02:00
#[inline]
pub fn mk_predicate(&self, kind: PredicateKind<'tcx>) -> Predicate<'tcx> {
2020-05-11 22:04:22 +02:00
let kind = self.intern_predicate_kind(kind);
2020-05-11 22:06:41 +02:00
Predicate { kind }
}
pub fn mk_mach_int(self, tm: ast::IntTy) -> Ty<'tcx> {
2015-09-06 21:51:58 +03:00
match tm {
2019-12-24 17:38:22 -05:00
ast::IntTy::Isize => self.types.isize,
ast::IntTy::I8 => self.types.i8,
ast::IntTy::I16 => self.types.i16,
ast::IntTy::I32 => self.types.i32,
ast::IntTy::I64 => self.types.i64,
ast::IntTy::I128 => self.types.i128,
2015-09-06 21:51:58 +03:00
}
}
pub fn mk_mach_uint(self, tm: ast::UintTy) -> Ty<'tcx> {
2015-09-06 21:51:58 +03:00
match tm {
2019-12-24 17:38:22 -05:00
ast::UintTy::Usize => self.types.usize,
ast::UintTy::U8 => self.types.u8,
ast::UintTy::U16 => self.types.u16,
ast::UintTy::U32 => self.types.u32,
ast::UintTy::U64 => self.types.u64,
ast::UintTy::U128 => self.types.u128,
2015-09-06 21:51:58 +03:00
}
}
pub fn mk_mach_float(self, tm: ast::FloatTy) -> Ty<'tcx> {
2015-09-06 21:51:58 +03:00
match tm {
2019-12-24 17:38:22 -05:00
ast::FloatTy::F32 => self.types.f32,
ast::FloatTy::F64 => self.types.f64,
2015-09-06 21:51:58 +03:00
}
}
#[inline]
pub fn mk_str(self) -> Ty<'tcx> {
self.mk_ty(Str)
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_static_str(self) -> Ty<'tcx> {
self.mk_imm_ref(self.lifetimes.re_static, self.mk_str())
2015-09-06 21:51:58 +03:00
}
#[inline]
2019-02-09 22:11:53 +08:00
pub fn mk_adt(self, def: &'tcx AdtDef, substs: SubstsRef<'tcx>) -> Ty<'tcx> {
// Take a copy of substs so that we own the vectors inside.
self.mk_ty(Adt(def, substs))
2015-09-06 21:51:58 +03:00
}
#[inline]
2017-09-03 19:53:58 +01:00
pub fn mk_foreign(self, def_id: DefId) -> Ty<'tcx> {
self.mk_ty(Foreign(def_id))
2017-09-03 19:53:58 +01:00
}
fn mk_generic_adt(self, wrapper_def_id: DefId, ty_param: Ty<'tcx>) -> Ty<'tcx> {
let adt_def = self.adt_def(wrapper_def_id);
2019-12-24 17:38:22 -05:00
let substs =
InternalSubsts::for_item(self, wrapper_def_id, |param, substs| match param.kind {
GenericParamDefKind::Lifetime | GenericParamDefKind::Const => bug!(),
GenericParamDefKind::Type { has_default, .. } => {
if param.index == 0 {
ty_param.into()
} else {
assert!(has_default);
self.type_of(param.def_id).subst(self, substs).into()
2018-05-10 23:02:41 +01:00
}
}
2019-12-24 17:38:22 -05:00
});
self.mk_ty(Adt(adt_def, substs))
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> {
let def_id = self.require_lang_item(lang_items::OwnedBoxLangItem, None);
self.mk_generic_adt(def_id, ty)
}
#[inline]
2019-12-24 17:38:22 -05:00
pub fn mk_lang_item(self, ty: Ty<'tcx>, item: lang_items::LangItem) -> Option<Ty<'tcx>> {
2019-09-14 02:40:20 +08:00
let def_id = self.lang_items().require(item).ok()?;
Some(self.mk_generic_adt(def_id, ty))
}
#[inline]
pub fn mk_diagnostic_item(self, ty: Ty<'tcx>, name: Symbol) -> Option<Ty<'tcx>> {
let def_id = self.get_diagnostic_item(name)?;
Some(self.mk_generic_adt(def_id, ty))
}
#[inline]
pub fn mk_maybe_uninit(self, ty: Ty<'tcx>) -> Ty<'tcx> {
let def_id = self.require_lang_item(lang_items::MaybeUninitLangItem, None);
self.mk_generic_adt(def_id, ty)
}
#[inline]
pub fn mk_ptr(self, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
self.mk_ty(RawPtr(tm))
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_ref(self, r: Region<'tcx>, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
self.mk_ty(Ref(r, tm.ty, tm.mutbl))
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_mut_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
self.mk_ref(r, TypeAndMut { ty, mutbl: hir::Mutability::Mut })
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_imm_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
self.mk_ref(r, TypeAndMut { ty, mutbl: hir::Mutability::Not })
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_mut_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
self.mk_ptr(TypeAndMut { ty, mutbl: hir::Mutability::Mut })
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
self.mk_ptr(TypeAndMut { ty, mutbl: hir::Mutability::Not })
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_nil_ptr(self) -> Ty<'tcx> {
2018-09-10 11:07:13 +09:00
self.mk_imm_ptr(self.mk_unit())
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> {
self.mk_ty(Array(ty, ty::Const::from_usize(self, n)))
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> {
self.mk_ty(Slice(ty))
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn intern_tup(self, ts: &[Ty<'tcx>]) -> Ty<'tcx> {
let kinds: Vec<_> = ts.iter().map(|&t| GenericArg::from(t)).collect();
2019-04-26 00:27:33 +01:00
self.mk_ty(Tuple(self.intern_substs(&kinds)))
}
pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I) -> I::Output {
2019-04-26 00:27:33 +01:00
iter.intern_with(|ts| {
let kinds: Vec<_> = ts.iter().map(|&t| GenericArg::from(t)).collect();
2019-04-26 00:27:33 +01:00
self.mk_ty(Tuple(self.intern_substs(&kinds)))
})
2015-09-06 21:51:58 +03:00
}
#[inline]
2018-09-10 11:07:13 +09:00
pub fn mk_unit(self) -> Ty<'tcx> {
self.types.unit
}
#[inline]
pub fn mk_diverging_default(self) -> Ty<'tcx> {
2019-12-24 17:38:22 -05:00
if self.features().never_type_fallback { self.types.never } else { self.types.unit }
}
#[inline]
2019-12-24 17:38:22 -05:00
pub fn mk_fn_def(self, def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> {
self.mk_ty(FnDef(def_id, substs))
}
#[inline]
pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> {
self.mk_ty(FnPtr(fty))
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_dynamic(
self,
2018-08-22 00:35:01 +01:00
obj: ty::Binder<&'tcx List<ExistentialPredicate<'tcx>>>,
2019-12-24 17:38:22 -05:00
reg: ty::Region<'tcx>,
) -> Ty<'tcx> {
self.mk_ty(Dynamic(obj, reg))
2015-09-06 21:51:58 +03:00
}
#[inline]
2019-12-24 17:38:22 -05:00
pub fn mk_projection(self, item_def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> {
self.mk_ty(Projection(ProjectionTy { item_def_id, substs }))
}
2015-09-06 21:51:58 +03:00
#[inline]
2019-12-24 17:38:22 -05:00
pub fn mk_closure(self, closure_id: DefId, closure_substs: SubstsRef<'tcx>) -> Ty<'tcx> {
self.mk_ty(Closure(closure_id, closure_substs))
2015-09-06 21:51:58 +03:00
}
#[inline]
2019-12-24 17:38:22 -05:00
pub fn mk_generator(
self,
id: DefId,
generator_substs: SubstsRef<'tcx>,
movability: hir::Movability,
) -> Ty<'tcx> {
self.mk_ty(Generator(id, generator_substs, movability))
2016-12-26 14:34:03 +01:00
}
#[inline]
2018-08-22 00:35:01 +01:00
pub fn mk_generator_witness(self, types: ty::Binder<&'tcx List<Ty<'tcx>>>) -> Ty<'tcx> {
self.mk_ty(GeneratorWitness(types))
}
#[inline]
pub fn mk_ty_var(self, v: TyVid) -> Ty<'tcx> {
self.mk_ty_infer(TyVar(v))
2015-09-06 21:51:58 +03:00
}
#[inline]
2019-03-14 10:19:31 +01:00
pub fn mk_const_var(self, v: ConstVid<'tcx>, ty: Ty<'tcx>) -> &'tcx Const<'tcx> {
2019-12-24 17:38:22 -05:00
self.mk_const(ty::Const { val: ty::ConstKind::Infer(InferConst::Var(v)), ty })
}
#[inline]
pub fn mk_int_var(self, v: IntVid) -> Ty<'tcx> {
self.mk_ty_infer(IntVar(v))
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_float_var(self, v: FloatVid) -> Ty<'tcx> {
self.mk_ty_infer(FloatVar(v))
2015-09-06 21:51:58 +03:00
}
#[inline]
pub fn mk_ty_infer(self, it: InferTy) -> Ty<'tcx> {
self.mk_ty(Infer(it))
2015-09-06 21:51:58 +03:00
}
#[inline]
2019-12-24 17:38:22 -05:00
pub fn mk_const_infer(self, ic: InferConst<'tcx>, ty: Ty<'tcx>) -> &'tcx ty::Const<'tcx> {
self.mk_const(ty::Const { val: ty::ConstKind::Infer(ic), ty })
}
#[inline]
pub fn mk_ty_param(self, index: u32, name: Symbol) -> Ty<'tcx> {
self.mk_ty(Param(ParamTy { index, name }))
2015-09-06 21:51:58 +03:00
}
#[inline]
2019-12-24 17:38:22 -05:00
pub fn mk_const_param(self, index: u32, name: Symbol, ty: Ty<'tcx>) -> &'tcx Const<'tcx> {
self.mk_const(ty::Const { val: ty::ConstKind::Param(ParamConst { index, name }), ty })
}
pub fn mk_param_from_def(self, param: &ty::GenericParamDef) -> GenericArg<'tcx> {
2018-05-15 13:35:53 +01:00
match param.kind {
GenericParamDefKind::Lifetime => {
self.mk_region(ty::ReEarlyBound(param.to_early_bound_region_data())).into()
}
GenericParamDefKind::Type { .. } => self.mk_ty_param(param.index, param.name).into(),
GenericParamDefKind::Const => {
self.mk_const_param(param.index, param.name, self.type_of(param.def_id)).into()
}
2018-05-15 13:35:53 +01:00
}
2015-09-06 21:51:58 +03:00
}
#[inline]
2019-02-09 22:11:53 +08:00
pub fn mk_opaque(self, def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> {
self.mk_ty(Opaque(def_id, substs))
}
pub fn mk_place_field(self, place: Place<'tcx>, f: Field, ty: Ty<'tcx>) -> Place<'tcx> {
self.mk_place_elem(place, PlaceElem::Field(f, ty))
}
pub fn mk_place_deref(self, place: Place<'tcx>) -> Place<'tcx> {
self.mk_place_elem(place, PlaceElem::Deref)
}
pub fn mk_place_downcast(
self,
place: Place<'tcx>,
adt_def: &'tcx AdtDef,
variant_index: VariantIdx,
) -> Place<'tcx> {
self.mk_place_elem(
place,
PlaceElem::Downcast(Some(adt_def.variants[variant_index].ident.name), variant_index),
)
}
pub fn mk_place_downcast_unnamed(
self,
place: Place<'tcx>,
variant_index: VariantIdx,
) -> Place<'tcx> {
self.mk_place_elem(place, PlaceElem::Downcast(None, variant_index))
}
pub fn mk_place_index(self, place: Place<'tcx>, index: Local) -> Place<'tcx> {
self.mk_place_elem(place, PlaceElem::Index(index))
}
/// This method copies `Place`'s projection, add an element and reintern it. Should not be used
/// to build a full `Place` it's just a convenient way to grab a projection and modify it in
/// flight.
pub fn mk_place_elem(self, place: Place<'tcx>, elem: PlaceElem<'tcx>) -> Place<'tcx> {
let mut projection = place.projection.to_vec();
projection.push(elem);
Place { local: place.local, projection: self.intern_place_elems(&projection) }
}
2019-12-24 17:38:22 -05:00
pub fn intern_existential_predicates(
self,
eps: &[ExistentialPredicate<'tcx>],
) -> &'tcx List<ExistentialPredicate<'tcx>> {
assert!(!eps.is_empty());
assert!(eps.windows(2).all(|w| w[0].stable_cmp(self, &w[1]) != Ordering::Greater));
self._intern_existential_predicates(eps)
}
2019-12-24 17:38:22 -05:00
pub fn intern_predicates(self, preds: &[Predicate<'tcx>]) -> &'tcx List<Predicate<'tcx>> {
// FIXME consider asking the input slice to be sorted to avoid
// re-interning permutations, in which case that would be asserted
// here.
if preds.is_empty() {
// The macro-generated method below asserts we don't intern an empty slice.
2018-08-22 00:35:01 +01:00
List::empty()
} else {
self._intern_predicates(preds)
}
}
2018-08-22 00:35:01 +01:00
pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx List<Ty<'tcx>> {
if ts.is_empty() { List::empty() } else { self._intern_type_list(ts) }
}
pub fn intern_substs(self, ts: &[GenericArg<'tcx>]) -> &'tcx List<GenericArg<'tcx>> {
if ts.is_empty() { List::empty() } else { self._intern_substs(ts) }
}
pub fn intern_projs(self, ps: &[ProjectionKind]) -> &'tcx List<ProjectionKind> {
if ps.is_empty() { List::empty() } else { self._intern_projs(ps) }
}
pub fn intern_place_elems(self, ts: &[PlaceElem<'tcx>]) -> &'tcx List<PlaceElem<'tcx>> {
if ts.is_empty() { List::empty() } else { self._intern_place_elems(ts) }
}
2019-06-14 00:48:52 +03:00
pub fn intern_canonical_var_infos(self, ts: &[CanonicalVarInfo]) -> CanonicalVarInfos<'tcx> {
if ts.is_empty() { List::empty() } else { self._intern_canonical_var_infos(ts) }
}
2020-03-03 11:25:03 -05:00
pub fn intern_chalk_environment_clause_list(
self,
ts: &[traits::ChalkEnvironmentClause<'tcx>],
) -> &'tcx List<traits::ChalkEnvironmentClause<'tcx>> {
if ts.is_empty() { List::empty() } else { self._intern_chalk_environment_clause_list(ts) }
}
2019-12-24 17:38:22 -05:00
pub fn mk_fn_sig<I>(
self,
inputs: I,
output: I::Item,
c_variadic: bool,
unsafety: hir::Unsafety,
abi: abi::Abi,
) -> <I::Item as InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>::Output
where
I: Iterator<Item: InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>,
{
inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig {
inputs_and_output: self.intern_type_list(xs),
2019-12-24 17:38:22 -05:00
c_variadic,
unsafety,
abi,
})
}
2019-12-24 17:38:22 -05:00
pub fn mk_existential_predicates<
I: InternAs<[ExistentialPredicate<'tcx>], &'tcx List<ExistentialPredicate<'tcx>>>,
>(
self,
iter: I,
) -> I::Output {
iter.intern_with(|xs| self.intern_existential_predicates(xs))
}
2019-12-24 17:38:22 -05:00
pub fn mk_predicates<I: InternAs<[Predicate<'tcx>], &'tcx List<Predicate<'tcx>>>>(
self,
iter: I,
) -> I::Output {
iter.intern_with(|xs| self.intern_predicates(xs))
}
2019-12-24 17:38:22 -05:00
pub fn mk_type_list<I: InternAs<[Ty<'tcx>], &'tcx List<Ty<'tcx>>>>(self, iter: I) -> I::Output {
iter.intern_with(|xs| self.intern_type_list(xs))
}
2019-12-24 17:38:22 -05:00
pub fn mk_substs<I: InternAs<[GenericArg<'tcx>], &'tcx List<GenericArg<'tcx>>>>(
self,
iter: I,
) -> I::Output {
iter.intern_with(|xs| self.intern_substs(xs))
}
2019-12-24 17:38:22 -05:00
pub fn mk_place_elems<I: InternAs<[PlaceElem<'tcx>], &'tcx List<PlaceElem<'tcx>>>>(
self,
iter: I,
) -> I::Output {
2019-10-20 16:11:04 -04:00
iter.intern_with(|xs| self.intern_place_elems(xs))
}
2019-12-24 17:38:22 -05:00
pub fn mk_substs_trait(self, self_ty: Ty<'tcx>, rest: &[GenericArg<'tcx>]) -> SubstsRef<'tcx> {
self.mk_substs(iter::once(self_ty.into()).chain(rest.iter().cloned()))
}
rustc: Rearchitect lints to be emitted more eagerly In preparation for incremental compilation this commit refactors the lint handling infrastructure in the compiler to be more "eager" and overall more incremental-friendly. Many passes of the compiler can emit lints at various points but before this commit all lints were buffered in a table to be emitted at the very end of compilation. This commit changes these lints to be emitted immediately during compilation using pre-calculated lint level-related data structures. Linting today is split into two phases, one set of "early" lints run on the `syntax::ast` and a "late" set of lints run on the HIR. This commit moves the "early" lints to running as late as possible in compilation, just before HIR lowering. This notably means that we're catching resolve-related lints just before HIR lowering. The early linting remains a pass very similar to how it was before, maintaining context of the current lint level as it walks the tree. Post-HIR, however, linting is structured as a method on the `TyCtxt` which transitively executes a query to calculate lint levels. Each request to lint on a `TyCtxt` will query the entire crate's 'lint level data structure' and then go from there about whether the lint should be emitted or not. The query depends on the entire HIR crate but should be very quick to calculate (just a quick walk of the HIR) and the red-green system should notice that the lint level data structure rarely changes, and should hopefully preserve incrementality. Overall this resulted in a pretty big change to the test suite now that lints are emitted much earlier in compilation (on-demand vs only at the end). This in turn necessitated the addition of many `#![allow(warnings)]` directives throughout the compile-fail test suite and a number of updates to the UI test suite.
2017-07-26 21:51:09 -07:00
2020-03-03 11:25:03 -05:00
pub fn mk_chalk_environment_clause_list<
I: InternAs<
[traits::ChalkEnvironmentClause<'tcx>],
&'tcx List<traits::ChalkEnvironmentClause<'tcx>>,
>,
>(
self,
iter: I,
) -> I::Output {
iter.intern_with(|xs| self.intern_chalk_environment_clause_list(xs))
}
/// Walks upwards from `id` to find a node which might change lint levels with attributes.
/// It stops at `bound` and just returns it if reached.
2020-01-09 06:08:07 +01:00
pub fn maybe_lint_level_root_bounded(self, mut id: HirId, bound: HirId) -> HirId {
let hir = self.hir();
loop {
if id == bound {
return bound;
rustc: Rearchitect lints to be emitted more eagerly In preparation for incremental compilation this commit refactors the lint handling infrastructure in the compiler to be more "eager" and overall more incremental-friendly. Many passes of the compiler can emit lints at various points but before this commit all lints were buffered in a table to be emitted at the very end of compilation. This commit changes these lints to be emitted immediately during compilation using pre-calculated lint level-related data structures. Linting today is split into two phases, one set of "early" lints run on the `syntax::ast` and a "late" set of lints run on the HIR. This commit moves the "early" lints to running as late as possible in compilation, just before HIR lowering. This notably means that we're catching resolve-related lints just before HIR lowering. The early linting remains a pass very similar to how it was before, maintaining context of the current lint level as it walks the tree. Post-HIR, however, linting is structured as a method on the `TyCtxt` which transitively executes a query to calculate lint levels. Each request to lint on a `TyCtxt` will query the entire crate's 'lint level data structure' and then go from there about whether the lint should be emitted or not. The query depends on the entire HIR crate but should be very quick to calculate (just a quick walk of the HIR) and the red-green system should notice that the lint level data structure rarely changes, and should hopefully preserve incrementality. Overall this resulted in a pretty big change to the test suite now that lints are emitted much earlier in compilation (on-demand vs only at the end). This in turn necessitated the addition of many `#![allow(warnings)]` directives throughout the compile-fail test suite and a number of updates to the UI test suite.
2017-07-26 21:51:09 -07:00
}
2020-01-09 06:08:07 +01:00
if hir.attrs(id).iter().any(|attr| Level::from_symbol(attr.name_or_empty()).is_some()) {
return id;
}
2020-01-09 06:08:07 +01:00
let next = hir.get_parent_node(id);
if next == id {
bug!("lint traversal reached the root of the crate");
}
id = next;
}
}
pub fn lint_level_at_node(
self,
lint: &'static Lint,
2019-12-24 17:38:22 -05:00
mut id: hir::HirId,
2020-01-05 10:58:44 +01:00
) -> (Level, LintSource) {
let sets = self.lint_levels(LOCAL_CRATE);
loop {
if let Some(pair) = sets.level_and_source(lint, id, self.sess) {
2019-12-24 17:38:22 -05:00
return pair;
}
let next = self.hir().get_parent_node(id);
if next == id {
bug!("lint traversal reached the root of the crate");
}
id = next;
}
rustc: Rearchitect lints to be emitted more eagerly In preparation for incremental compilation this commit refactors the lint handling infrastructure in the compiler to be more "eager" and overall more incremental-friendly. Many passes of the compiler can emit lints at various points but before this commit all lints were buffered in a table to be emitted at the very end of compilation. This commit changes these lints to be emitted immediately during compilation using pre-calculated lint level-related data structures. Linting today is split into two phases, one set of "early" lints run on the `syntax::ast` and a "late" set of lints run on the HIR. This commit moves the "early" lints to running as late as possible in compilation, just before HIR lowering. This notably means that we're catching resolve-related lints just before HIR lowering. The early linting remains a pass very similar to how it was before, maintaining context of the current lint level as it walks the tree. Post-HIR, however, linting is structured as a method on the `TyCtxt` which transitively executes a query to calculate lint levels. Each request to lint on a `TyCtxt` will query the entire crate's 'lint level data structure' and then go from there about whether the lint should be emitted or not. The query depends on the entire HIR crate but should be very quick to calculate (just a quick walk of the HIR) and the red-green system should notice that the lint level data structure rarely changes, and should hopefully preserve incrementality. Overall this resulted in a pretty big change to the test suite now that lints are emitted much earlier in compilation (on-demand vs only at the end). This in turn necessitated the addition of many `#![allow(warnings)]` directives throughout the compile-fail test suite and a number of updates to the UI test suite.
2017-07-26 21:51:09 -07:00
}
2020-01-05 10:58:44 +01:00
pub fn struct_span_lint_hir(
2019-12-24 17:38:22 -05:00
self,
lint: &'static Lint,
hir_id: HirId,
2020-01-05 10:58:44 +01:00
span: impl Into<MultiSpan>,
2020-02-02 09:47:58 +10:00
decorate: impl for<'a> FnOnce(LintDiagnosticBuilder<'a>),
) {
2019-02-26 11:48:34 +01:00
let (level, src) = self.lint_level_at_node(lint, hir_id);
struct_lint_level(self.sess, lint, level, src, Some(span.into()), decorate);
rustc: Rearchitect lints to be emitted more eagerly In preparation for incremental compilation this commit refactors the lint handling infrastructure in the compiler to be more "eager" and overall more incremental-friendly. Many passes of the compiler can emit lints at various points but before this commit all lints were buffered in a table to be emitted at the very end of compilation. This commit changes these lints to be emitted immediately during compilation using pre-calculated lint level-related data structures. Linting today is split into two phases, one set of "early" lints run on the `syntax::ast` and a "late" set of lints run on the HIR. This commit moves the "early" lints to running as late as possible in compilation, just before HIR lowering. This notably means that we're catching resolve-related lints just before HIR lowering. The early linting remains a pass very similar to how it was before, maintaining context of the current lint level as it walks the tree. Post-HIR, however, linting is structured as a method on the `TyCtxt` which transitively executes a query to calculate lint levels. Each request to lint on a `TyCtxt` will query the entire crate's 'lint level data structure' and then go from there about whether the lint should be emitted or not. The query depends on the entire HIR crate but should be very quick to calculate (just a quick walk of the HIR) and the red-green system should notice that the lint level data structure rarely changes, and should hopefully preserve incrementality. Overall this resulted in a pretty big change to the test suite now that lints are emitted much earlier in compilation (on-demand vs only at the end). This in turn necessitated the addition of many `#![allow(warnings)]` directives throughout the compile-fail test suite and a number of updates to the UI test suite.
2017-07-26 21:51:09 -07:00
}
2019-12-24 17:38:22 -05:00
pub fn struct_lint_node(
self,
lint: &'static Lint,
id: HirId,
decorate: impl for<'a> FnOnce(LintDiagnosticBuilder<'a>),
) {
rustc: Rearchitect lints to be emitted more eagerly In preparation for incremental compilation this commit refactors the lint handling infrastructure in the compiler to be more "eager" and overall more incremental-friendly. Many passes of the compiler can emit lints at various points but before this commit all lints were buffered in a table to be emitted at the very end of compilation. This commit changes these lints to be emitted immediately during compilation using pre-calculated lint level-related data structures. Linting today is split into two phases, one set of "early" lints run on the `syntax::ast` and a "late" set of lints run on the HIR. This commit moves the "early" lints to running as late as possible in compilation, just before HIR lowering. This notably means that we're catching resolve-related lints just before HIR lowering. The early linting remains a pass very similar to how it was before, maintaining context of the current lint level as it walks the tree. Post-HIR, however, linting is structured as a method on the `TyCtxt` which transitively executes a query to calculate lint levels. Each request to lint on a `TyCtxt` will query the entire crate's 'lint level data structure' and then go from there about whether the lint should be emitted or not. The query depends on the entire HIR crate but should be very quick to calculate (just a quick walk of the HIR) and the red-green system should notice that the lint level data structure rarely changes, and should hopefully preserve incrementality. Overall this resulted in a pretty big change to the test suite now that lints are emitted much earlier in compilation (on-demand vs only at the end). This in turn necessitated the addition of many `#![allow(warnings)]` directives throughout the compile-fail test suite and a number of updates to the UI test suite.
2017-07-26 21:51:09 -07:00
let (level, src) = self.lint_level_at_node(lint, id);
struct_lint_level(self.sess, lint, level, src, None, decorate);
rustc: Rearchitect lints to be emitted more eagerly In preparation for incremental compilation this commit refactors the lint handling infrastructure in the compiler to be more "eager" and overall more incremental-friendly. Many passes of the compiler can emit lints at various points but before this commit all lints were buffered in a table to be emitted at the very end of compilation. This commit changes these lints to be emitted immediately during compilation using pre-calculated lint level-related data structures. Linting today is split into two phases, one set of "early" lints run on the `syntax::ast` and a "late" set of lints run on the HIR. This commit moves the "early" lints to running as late as possible in compilation, just before HIR lowering. This notably means that we're catching resolve-related lints just before HIR lowering. The early linting remains a pass very similar to how it was before, maintaining context of the current lint level as it walks the tree. Post-HIR, however, linting is structured as a method on the `TyCtxt` which transitively executes a query to calculate lint levels. Each request to lint on a `TyCtxt` will query the entire crate's 'lint level data structure' and then go from there about whether the lint should be emitted or not. The query depends on the entire HIR crate but should be very quick to calculate (just a quick walk of the HIR) and the red-green system should notice that the lint level data structure rarely changes, and should hopefully preserve incrementality. Overall this resulted in a pretty big change to the test suite now that lints are emitted much earlier in compilation (on-demand vs only at the end). This in turn necessitated the addition of many `#![allow(warnings)]` directives throughout the compile-fail test suite and a number of updates to the UI test suite.
2017-07-26 21:51:09 -07:00
}
2019-06-14 00:48:52 +03:00
pub fn in_scope_traits(self, id: HirId) -> Option<&'tcx StableVec<TraitCandidate>> {
self.in_scope_traits_map(id.owner).and_then(|map| map.get(&id.local_id))
}
pub fn named_region(self, id: HirId) -> Option<resolve_lifetime::Region> {
self.named_region_map(id.owner).and_then(|map| map.get(&id.local_id).cloned())
}
pub fn is_late_bound(self, id: HirId) -> bool {
self.is_late_bound_map(id.owner).map(|set| set.contains(&id.local_id)).unwrap_or(false)
}
pub fn object_lifetime_defaults(self, id: HirId) -> Option<&'tcx [ObjectLifetimeDefault]> {
self.object_lifetime_defaults_map(id.owner)
.and_then(|map| map.get(&id.local_id).map(|v| &**v))
}
2015-09-06 21:51:58 +03:00
}
pub trait InternAs<T: ?Sized, R> {
type Output;
fn intern_with<F>(self, f: F) -> Self::Output
2019-12-24 17:38:22 -05:00
where
F: FnOnce(&T) -> R;
}
impl<I, T, R, E> InternAs<[T], R> for I
2019-12-24 17:38:22 -05:00
where
E: InternIteratorElement<T, R>,
I: Iterator<Item = E>,
{
type Output = E::Output;
fn intern_with<F>(self, f: F) -> Self::Output
2019-12-24 17:38:22 -05:00
where
F: FnOnce(&[T]) -> R,
{
E::intern_with(self, f)
}
}
pub trait InternIteratorElement<T, R>: Sized {
type Output;
2019-12-24 17:38:22 -05:00
fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output;
}
impl<T, R> InternIteratorElement<T, R> for T {
type Output = R;
2019-12-24 17:38:22 -05:00
fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
f(&iter.collect::<SmallVec<[_; 8]>>())
}
}
impl<'a, T, R> InternIteratorElement<T, R> for &'a T
2019-12-24 17:38:22 -05:00
where
T: Clone + 'a,
{
type Output = R;
2019-12-24 17:38:22 -05:00
fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
f(&iter.cloned().collect::<SmallVec<[_; 8]>>())
}
}
impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> {
type Output = Result<R, E>;
2019-12-24 17:38:22 -05:00
fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>(
mut iter: I,
f: F,
) -> Self::Output {
// This code is hot enough that it's worth specializing for the most
// common length lists, to avoid the overhead of `SmallVec` creation.
// The match arms are in order of frequency. The 1, 2, and 0 cases are
// typically hit in ~95% of cases. We assume that if the upper and
// lower bounds from `size_hint` agree they are correct.
Ok(match iter.size_hint() {
(1, Some(1)) => {
let t0 = iter.next().unwrap()?;
assert!(iter.next().is_none());
f(&[t0])
}
(2, Some(2)) => {
let t0 = iter.next().unwrap()?;
let t1 = iter.next().unwrap()?;
assert!(iter.next().is_none());
f(&[t0, t1])
}
(0, Some(0)) => {
assert!(iter.next().is_none());
f(&[])
}
2019-12-24 17:38:22 -05:00
_ => f(&iter.collect::<Result<SmallVec<[_; 8]>, _>>()?),
})
}
}
2019-02-23 14:25:03 +00:00
// We are comparing types with different invariant lifetimes, so `ptr::eq`
// won't work for us.
fn ptr_eq<T, U>(t: *const T, u: *const U) -> bool {
t as *const () == u as *const ()
}
pub fn provide(providers: &mut ty::query::Providers<'_>) {
2018-12-01 16:17:59 +01:00
providers.in_scope_traits_map = |tcx, id| tcx.gcx.trait_map.get(&id);
2018-12-01 16:23:32 +01:00
providers.module_exports = |tcx, id| tcx.gcx.export_map.get(&id).map(|v| &v[..]);
providers.crate_name = |tcx, id| {
assert_eq!(id, LOCAL_CRATE);
tcx.crate_name
};
2019-12-24 17:38:22 -05:00
providers.maybe_unused_trait_import = |tcx, id| tcx.maybe_unused_trait_imports.contains(&id);
providers.maybe_unused_extern_crates = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
&tcx.maybe_unused_extern_crates[..]
};
providers.names_imported_by_glob_use =
|tcx, id| tcx.arena.alloc(tcx.glob_map.get(&id).cloned().unwrap_or_default());
providers.lookup_stability = |tcx, id| {
let id = tcx.hir().local_def_id_to_hir_id(id.expect_local());
tcx.stability().local_stability(id)
};
providers.lookup_const_stability = |tcx, id| {
let id = tcx.hir().local_def_id_to_hir_id(id.expect_local());
tcx.stability().local_const_stability(id)
};
providers.lookup_deprecation_entry = |tcx, id| {
let id = tcx.hir().local_def_id_to_hir_id(id.expect_local());
tcx.stability().local_deprecation_entry(id)
};
providers.extern_mod_stmt_cnum = |tcx, id| tcx.extern_crate_map.get(&id).cloned();
providers.all_crate_nums = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
tcx.arena.alloc_slice(&tcx.cstore.crates_untracked())
};
providers.output_filenames = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
tcx.output_filenames.clone()
};
2018-02-14 16:11:02 +01:00
providers.features_query = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
2020-03-27 20:26:20 +01:00
tcx.sess.features_untracked()
2018-02-14 16:11:02 +01:00
};
providers.is_panic_runtime = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
attr::contains_name(tcx.hir().krate_attrs(), sym::panic_runtime)
};
providers.is_compiler_builtins = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
attr::contains_name(tcx.hir().krate_attrs(), sym::compiler_builtins)
};
2019-10-28 17:07:15 -04:00
providers.has_panic_handler = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
// We want to check if the panic handler was defined in this crate
tcx.lang_items().panic_impl().map_or(false, |did| did.is_local())
};
}