Aggregation of drive-by cosmetic changes.
This commit is contained in:
parent
35ef33a89d
commit
51cb60cd3f
40 changed files with 615 additions and 608 deletions
|
@ -1919,8 +1919,9 @@ pub enum ImplItemKind {
|
||||||
/// Bindings like `A: Debug` are represented as a special type `A =
|
/// Bindings like `A: Debug` are represented as a special type `A =
|
||||||
/// $::Debug` that is understood by the astconv code.
|
/// $::Debug` that is understood by the astconv code.
|
||||||
///
|
///
|
||||||
/// FIXME(alexreg) -- why have a separate type for the binding case,
|
/// FIXME(alexreg): why have a separate type for the binding case,
|
||||||
/// wouldn't it be better to make the `ty` field an enum like:
|
/// wouldn't it be better to make the `ty` field an enum like the
|
||||||
|
/// following?
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// enum TypeBindingKind {
|
/// enum TypeBindingKind {
|
||||||
|
|
|
@ -306,7 +306,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for Canonicalizer<'cx, 'tcx> {
|
||||||
match *r {
|
match *r {
|
||||||
ty::ReLateBound(index, ..) => {
|
ty::ReLateBound(index, ..) => {
|
||||||
if index >= self.binder_index {
|
if index >= self.binder_index {
|
||||||
bug!("escaping late bound region during canonicalization")
|
bug!("escaping late-bound region during canonicalization");
|
||||||
} else {
|
} else {
|
||||||
r
|
r
|
||||||
}
|
}
|
||||||
|
@ -336,7 +336,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for Canonicalizer<'cx, 'tcx> {
|
||||||
.canonicalize_free_region(self, r),
|
.canonicalize_free_region(self, r),
|
||||||
|
|
||||||
ty::ReClosureBound(..) => {
|
ty::ReClosureBound(..) => {
|
||||||
bug!("closure bound region encountered during canonicalization")
|
bug!("closure bound region encountered during canonicalization");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -346,14 +346,14 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for Canonicalizer<'cx, 'tcx> {
|
||||||
ty::Infer(ty::TyVar(vid)) => {
|
ty::Infer(ty::TyVar(vid)) => {
|
||||||
debug!("canonical: type var found with vid {:?}", vid);
|
debug!("canonical: type var found with vid {:?}", vid);
|
||||||
match self.infcx.unwrap().probe_ty_var(vid) {
|
match self.infcx.unwrap().probe_ty_var(vid) {
|
||||||
// `t` could be a float / int variable: canonicalize that instead
|
// `t` could be a float / int variable; canonicalize that instead.
|
||||||
Ok(t) => {
|
Ok(t) => {
|
||||||
debug!("(resolved to {:?})", t);
|
debug!("(resolved to {:?})", t);
|
||||||
self.fold_ty(t)
|
self.fold_ty(t)
|
||||||
}
|
}
|
||||||
|
|
||||||
// `TyVar(vid)` is unresolved, track its universe index in the canonicalized
|
// `TyVar(vid)` is unresolved, track its universe index in the canonicalized
|
||||||
// result
|
// result.
|
||||||
Err(mut ui) => {
|
Err(mut ui) => {
|
||||||
if !self.infcx.unwrap().tcx.sess.opts.debugging_opts.chalk {
|
if !self.infcx.unwrap().tcx.sess.opts.debugging_opts.chalk {
|
||||||
// FIXME: perf problem described in #55921.
|
// FIXME: perf problem described in #55921.
|
||||||
|
|
|
@ -48,22 +48,24 @@
|
||||||
use super::lexical_region_resolve::RegionResolutionError;
|
use super::lexical_region_resolve::RegionResolutionError;
|
||||||
use super::region_constraints::GenericKind;
|
use super::region_constraints::GenericKind;
|
||||||
use super::{InferCtxt, RegionVariableOrigin, SubregionOrigin, TypeTrace, ValuePairs};
|
use super::{InferCtxt, RegionVariableOrigin, SubregionOrigin, TypeTrace, ValuePairs};
|
||||||
use crate::infer::{self, SuppressRegionErrors};
|
|
||||||
|
|
||||||
use crate::hir;
|
use crate::hir;
|
||||||
use crate::hir::def_id::DefId;
|
use crate::hir::def_id::DefId;
|
||||||
use crate::hir::Node;
|
use crate::hir::Node;
|
||||||
|
use crate::infer::{self, SuppressRegionErrors};
|
||||||
use crate::infer::opaque_types;
|
use crate::infer::opaque_types;
|
||||||
use crate::middle::region;
|
use crate::middle::region;
|
||||||
use crate::traits::{IfExpressionCause, MatchExpressionArmCause, ObligationCause};
|
use crate::traits::{
|
||||||
use crate::traits::{ObligationCauseCode};
|
IfExpressionCause, MatchExpressionArmCause, ObligationCause, ObligationCauseCode,
|
||||||
|
};
|
||||||
use crate::ty::error::TypeError;
|
use crate::ty::error::TypeError;
|
||||||
use crate::ty::{self, subst::{Subst, SubstsRef}, Region, Ty, TyCtxt, TypeFoldable};
|
use crate::ty::{self, subst::{Subst, SubstsRef}, Region, Ty, TyCtxt, TypeFoldable};
|
||||||
|
|
||||||
use errors::{Applicability, DiagnosticBuilder, DiagnosticStyledString};
|
use errors::{Applicability, DiagnosticBuilder, DiagnosticStyledString};
|
||||||
use std::{cmp, fmt};
|
use rustc_error_codes::*;
|
||||||
use syntax_pos::{Pos, Span};
|
use syntax_pos::{Pos, Span};
|
||||||
|
|
||||||
use rustc_error_codes::*;
|
use std::{cmp, fmt};
|
||||||
|
|
||||||
mod note;
|
mod note;
|
||||||
|
|
||||||
|
@ -1249,7 +1251,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// When encountering a case where `.as_ref()` on a `Result` or `Option` would be appropriate,
|
/// When encountering a case where `.as_ref()` on a `Result` or `Option` would be appropriate,
|
||||||
/// suggest it.
|
/// suggests it.
|
||||||
fn suggest_as_ref_where_appropriate(
|
fn suggest_as_ref_where_appropriate(
|
||||||
&self,
|
&self,
|
||||||
span: Span,
|
span: Span,
|
||||||
|
|
|
@ -221,7 +221,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The `TypeOutlives` struct has the job of "lowering" a `T: 'a`
|
/// The `TypeOutlives` struct has the job of "lowering" a `T: 'a`
|
||||||
/// obligation into a series of `'a: 'b` constraints and "verifys", as
|
/// obligation into a series of `'a: 'b` constraints and "verify"s, as
|
||||||
/// described on the module comment. The final constraints are emitted
|
/// described on the module comment. The final constraints are emitted
|
||||||
/// via a "delegate" of type `D` -- this is usually the `infcx`, which
|
/// via a "delegate" of type `D` -- this is usually the `infcx`, which
|
||||||
/// accrues them into the `region_obligations` code, but for NLL we
|
/// accrues them into the `region_obligations` code, but for NLL we
|
||||||
|
|
|
@ -1,11 +1,12 @@
|
||||||
use std::fmt::{self, Display};
|
use super::{AllocId, InterpResult};
|
||||||
use std::convert::TryFrom;
|
|
||||||
|
|
||||||
use crate::mir;
|
use crate::mir;
|
||||||
use crate::ty::layout::{self, HasDataLayout, Size};
|
use crate::ty::layout::{self, HasDataLayout, Size};
|
||||||
|
|
||||||
use rustc_macros::HashStable;
|
use rustc_macros::HashStable;
|
||||||
|
|
||||||
use super::{AllocId, InterpResult};
|
use std::convert::TryFrom;
|
||||||
|
use std::fmt::{self, Display};
|
||||||
|
|
||||||
/// Used by `check_in_alloc` to indicate context of check
|
/// Used by `check_in_alloc` to indicate context of check
|
||||||
#[derive(Debug, Copy, Clone, RustcEncodable, RustcDecodable, HashStable)]
|
#[derive(Debug, Copy, Clone, RustcEncodable, RustcDecodable, HashStable)]
|
||||||
|
@ -74,8 +75,8 @@ pub trait PointerArithmetic: layout::HasDataLayout {
|
||||||
fn overflowing_signed_offset(&self, val: u64, i: i128) -> (u64, bool) {
|
fn overflowing_signed_offset(&self, val: u64, i: i128) -> (u64, bool) {
|
||||||
// FIXME: is it possible to over/underflow here?
|
// FIXME: is it possible to over/underflow here?
|
||||||
if i < 0 {
|
if i < 0 {
|
||||||
// Trickery to ensure that i64::min_value() works fine: compute n = -i.
|
// Trickery to ensure that `i64::min_value()` works fine: compute `n = -i`.
|
||||||
// This formula only works for true negative values, it overflows for zero!
|
// This formula only works for true negative values; it overflows for zero!
|
||||||
let n = u64::max_value() - (i as u64) + 1;
|
let n = u64::max_value() - (i as u64) + 1;
|
||||||
let res = val.overflowing_sub(n);
|
let res = val.overflowing_sub(n);
|
||||||
self.truncate_to_ptr(res)
|
self.truncate_to_ptr(res)
|
||||||
|
@ -105,7 +106,7 @@ impl<T: layout::HasDataLayout> PointerArithmetic for T {}
|
||||||
///
|
///
|
||||||
/// Defaults to the index based and loosely coupled `AllocId`.
|
/// Defaults to the index based and loosely coupled `AllocId`.
|
||||||
///
|
///
|
||||||
/// Pointer is also generic over the `Tag` associated with each pointer,
|
/// `Pointer` is also generic over the `Tag` associated with each pointer,
|
||||||
/// which is used to do provenance tracking during execution.
|
/// which is used to do provenance tracking during execution.
|
||||||
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd,
|
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd,
|
||||||
RustcEncodable, RustcDecodable, Hash, HashStable)]
|
RustcEncodable, RustcDecodable, Hash, HashStable)]
|
||||||
|
@ -129,7 +130,7 @@ impl<Id: fmt::Debug> fmt::Debug for Pointer<(), Id> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Produces a `Pointer` which points to the beginning of the `Allocation`.
|
/// Produces a `Pointer` that points to the beginning of the `Allocation`.
|
||||||
impl From<AllocId> for Pointer {
|
impl From<AllocId> for Pointer {
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn from(alloc_id: AllocId) -> Self {
|
fn from(alloc_id: AllocId) -> Self {
|
||||||
|
|
|
@ -1203,7 +1203,7 @@ options! {CodegenOptions, CodegenSetter, basic_codegen_options,
|
||||||
force_frame_pointers: Option<bool> = (None, parse_opt_bool, [TRACKED],
|
force_frame_pointers: Option<bool> = (None, parse_opt_bool, [TRACKED],
|
||||||
"force use of the frame pointers"),
|
"force use of the frame pointers"),
|
||||||
debug_assertions: Option<bool> = (None, parse_opt_bool, [TRACKED],
|
debug_assertions: Option<bool> = (None, parse_opt_bool, [TRACKED],
|
||||||
"explicitly enable the cfg(debug_assertions) directive"),
|
"explicitly enable the `cfg(debug_assertions)` directive"),
|
||||||
inline_threshold: Option<usize> = (None, parse_opt_uint, [TRACKED],
|
inline_threshold: Option<usize> = (None, parse_opt_uint, [TRACKED],
|
||||||
"set the threshold for inlining a function (default: 225)"),
|
"set the threshold for inlining a function (default: 225)"),
|
||||||
panic: Option<PanicStrategy> = (None, parse_panic_strategy,
|
panic: Option<PanicStrategy> = (None, parse_panic_strategy,
|
||||||
|
|
|
@ -1,18 +1,18 @@
|
||||||
//! Support code for rustdoc and external tools . You really don't
|
//! Support code for rustdoc and external tools.
|
||||||
//! want to be using this unless you need to.
|
//! You really don't want to be using this unless you need to.
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
use std::collections::hash_map::Entry;
|
|
||||||
use std::collections::VecDeque;
|
|
||||||
|
|
||||||
use crate::infer::region_constraints::{Constraint, RegionConstraintData};
|
use crate::infer::region_constraints::{Constraint, RegionConstraintData};
|
||||||
use crate::infer::InferCtxt;
|
use crate::infer::InferCtxt;
|
||||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
|
||||||
|
|
||||||
use crate::ty::fold::TypeFolder;
|
use crate::ty::fold::TypeFolder;
|
||||||
use crate::ty::{Region, RegionVid};
|
use crate::ty::{Region, RegionVid};
|
||||||
|
|
||||||
|
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||||
|
|
||||||
|
use std::collections::hash_map::Entry;
|
||||||
|
use std::collections::VecDeque;
|
||||||
|
|
||||||
// FIXME(twk): this is obviously not nice to duplicate like that
|
// FIXME(twk): this is obviously not nice to duplicate like that
|
||||||
#[derive(Eq, PartialEq, Hash, Copy, Clone, Debug)]
|
#[derive(Eq, PartialEq, Hash, Copy, Clone, Debug)]
|
||||||
pub enum RegionTarget<'tcx> {
|
pub enum RegionTarget<'tcx> {
|
||||||
|
@ -235,41 +235,42 @@ impl<'tcx> AutoTraitFinder<'tcx> {
|
||||||
impl AutoTraitFinder<'tcx> {
|
impl AutoTraitFinder<'tcx> {
|
||||||
// The core logic responsible for computing the bounds for our synthesized impl.
|
// The core logic responsible for computing the bounds for our synthesized impl.
|
||||||
//
|
//
|
||||||
// To calculate the bounds, we call SelectionContext.select in a loop. Like FulfillmentContext,
|
// To calculate the bounds, we call `SelectionContext.select` in a loop. Like
|
||||||
// we recursively select the nested obligations of predicates we encounter. However, whenever we
|
// `FulfillmentContext`, we recursively select the nested obligations of predicates we
|
||||||
// encounter an UnimplementedError involving a type parameter, we add it to our ParamEnv. Since
|
// encounter. However, whenever we encounter an `UnimplementedError` involving a type parameter,
|
||||||
// our goal is to determine when a particular type implements an auto trait, Unimplemented
|
// we add it to our `ParamEnv`. Since our goal is to determine when a particular type implements
|
||||||
// errors tell us what conditions need to be met.
|
// an auto trait, Unimplemented errors tell us what conditions need to be met.
|
||||||
//
|
//
|
||||||
// This method ends up working somewhat similarly to FulfillmentContext, but with a few key
|
// This method ends up working somewhat similarly to `FulfillmentContext`, but with a few key
|
||||||
// differences. FulfillmentContext works under the assumption that it's dealing with concrete
|
// differences. `FulfillmentContext` works under the assumption that it's dealing with concrete
|
||||||
// user code. According, it considers all possible ways that a Predicate could be met - which
|
// user code. According, it considers all possible ways that a `Predicate` could be met, which
|
||||||
// isn't always what we want for a synthesized impl. For example, given the predicate 'T:
|
// isn't always what we want for a synthesized impl. For example, given the predicate `T:
|
||||||
// Iterator', FulfillmentContext can end up reporting an Unimplemented error for T:
|
// Iterator`, `FulfillmentContext` can end up reporting an Unimplemented error for `T:
|
||||||
// IntoIterator - since there's an implementation of Iteratpr where T: IntoIterator,
|
// IntoIterator` -- since there's an implementation of `Iterator` where `T: IntoIterator`,
|
||||||
// FulfillmentContext will drive SelectionContext to consider that impl before giving up. If we
|
// `FulfillmentContext` will drive `SelectionContext` to consider that impl before giving up. If
|
||||||
// were to rely on FulfillmentContext's decision, we might end up synthesizing an impl like
|
// we were to rely on `FulfillmentContext`'s decision, we might end up synthesizing an impl like
|
||||||
// this:
|
// this:
|
||||||
// 'impl<T> Send for Foo<T> where T: IntoIterator'
|
|
||||||
//
|
//
|
||||||
// While it might be technically true that Foo implements Send where T: IntoIterator,
|
// impl<T> Send for Foo<T> where T: IntoIterator
|
||||||
// the bound is overly restrictive - it's really only necessary that T: Iterator.
|
|
||||||
//
|
//
|
||||||
// For this reason, evaluate_predicates handles predicates with type variables specially. When
|
// While it might be technically true that Foo implements Send where `T: IntoIterator`,
|
||||||
// we encounter an Unimplemented error for a bound such as 'T: Iterator', we immediately add it
|
// the bound is overly restrictive - it's really only necessary that `T: Iterator`.
|
||||||
// to our ParamEnv, and add it to our stack for recursive evaluation. When we later select it,
|
//
|
||||||
// we'll pick up any nested bounds, without ever inferring that 'T: IntoIterator' needs to
|
// For this reason, `evaluate_predicates` handles predicates with type variables specially. When
|
||||||
|
// we encounter an `Unimplemented` error for a bound such as `T: Iterator`, we immediately add
|
||||||
|
// it to our `ParamEnv`, and add it to our stack for recursive evaluation. When we later select
|
||||||
|
// it, we'll pick up any nested bounds, without ever inferring that `T: IntoIterator` needs to
|
||||||
// hold.
|
// hold.
|
||||||
//
|
//
|
||||||
// One additional consideration is supertrait bounds. Normally, a ParamEnv is only ever
|
// One additional consideration is supertrait bounds. Normally, a `ParamEnv` is only ever
|
||||||
// constructed once for a given type. As part of the construction process, the ParamEnv will
|
// constructed once for a given type. As part of the construction process, the `ParamEnv` will
|
||||||
// have any supertrait bounds normalized - e.g., if we have a type 'struct Foo<T: Copy>', the
|
// have any supertrait bounds normalized -- e.g., if we have a type `struct Foo<T: Copy>`, the
|
||||||
// ParamEnv will contain 'T: Copy' and 'T: Clone', since 'Copy: Clone'. When we construct our
|
// `ParamEnv` will contain `T: Copy` and `T: Clone`, since `Copy: Clone`. When we construct our
|
||||||
// own ParamEnv, we need to do this ourselves, through traits::elaborate_predicates, or else
|
// own `ParamEnv`, we need to do this ourselves, through `traits::elaborate_predicates`, or else
|
||||||
// SelectionContext will choke on the missing predicates. However, this should never show up in
|
// `SelectionContext` will choke on the missing predicates. However, this should never show up
|
||||||
// the final synthesized generics: we don't want our generated docs page to contain something
|
// in the final synthesized generics: we don't want our generated docs page to contain something
|
||||||
// like 'T: Copy + Clone', as that's redundant. Therefore, we keep track of a separate
|
// like `T: Copy + Clone`, as that's redundant. Therefore, we keep track of a separate
|
||||||
// 'user_env', which only holds the predicates that will actually be displayed to the user.
|
// `user_env`, which only holds the predicates that will actually be displayed to the user.
|
||||||
fn evaluate_predicates(
|
fn evaluate_predicates(
|
||||||
&self,
|
&self,
|
||||||
infcx: &InferCtxt<'_, 'tcx>,
|
infcx: &InferCtxt<'_, 'tcx>,
|
||||||
|
@ -307,7 +308,7 @@ impl AutoTraitFinder<'tcx> {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Call infcx.resolve_vars_if_possible to see if we can
|
// Call `infcx.resolve_vars_if_possible` to see if we can
|
||||||
// get rid of any inference variables.
|
// get rid of any inference variables.
|
||||||
let obligation = infcx.resolve_vars_if_possible(
|
let obligation = infcx.resolve_vars_if_possible(
|
||||||
&Obligation::new(dummy_cause.clone(), new_env, pred)
|
&Obligation::new(dummy_cause.clone(), new_env, pred)
|
||||||
|
@ -316,14 +317,14 @@ impl AutoTraitFinder<'tcx> {
|
||||||
|
|
||||||
match &result {
|
match &result {
|
||||||
&Ok(Some(ref vtable)) => {
|
&Ok(Some(ref vtable)) => {
|
||||||
// If we see an explicit negative impl (e.g., 'impl !Send for MyStruct'),
|
// If we see an explicit negative impl (e.g., `impl !Send for MyStruct`),
|
||||||
// we immediately bail out, since it's impossible for us to continue.
|
// we immediately bail out, since it's impossible for us to continue.
|
||||||
match vtable {
|
match vtable {
|
||||||
Vtable::VtableImpl(VtableImplData { impl_def_id, .. }) => {
|
Vtable::VtableImpl(VtableImplData { impl_def_id, .. }) => {
|
||||||
// Blame tidy for the weird bracket placement
|
// Blame 'tidy' for the weird bracket placement.
|
||||||
if infcx.tcx.impl_polarity(*impl_def_id) == ty::ImplPolarity::Negative
|
if infcx.tcx.impl_polarity(*impl_def_id) == ty::ImplPolarity::Negative
|
||||||
{
|
{
|
||||||
debug!("evaluate_nested_obligations: Found explicit negative impl\
|
debug!("evaluate_nested_obligations: found explicit negative impl\
|
||||||
{:?}, bailing out", impl_def_id);
|
{:?}, bailing out", impl_def_id);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
@ -356,7 +357,7 @@ impl AutoTraitFinder<'tcx> {
|
||||||
predicates.push_back(pred);
|
predicates.push_back(pred);
|
||||||
} else {
|
} else {
|
||||||
debug!(
|
debug!(
|
||||||
"evaluate_nested_obligations: Unimplemented found, bailing: \
|
"evaluate_nested_obligations: `Unimplemented` found, bailing: \
|
||||||
{:?} {:?} {:?}",
|
{:?} {:?} {:?}",
|
||||||
ty,
|
ty,
|
||||||
pred,
|
pred,
|
||||||
|
@ -393,28 +394,28 @@ impl AutoTraitFinder<'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// This method is designed to work around the following issue:
|
// This method is designed to work around the following issue:
|
||||||
// When we compute auto trait bounds, we repeatedly call SelectionContext.select,
|
// When we compute auto trait bounds, we repeatedly call `SelectionContext.select`,
|
||||||
// progressively building a ParamEnv based on the results we get.
|
// progressively building a `ParamEnv` based on the results we get.
|
||||||
// However, our usage of SelectionContext differs from its normal use within the compiler,
|
// However, our usage of `SelectionContext` differs from its normal use within the compiler,
|
||||||
// in that we capture and re-reprocess predicates from Unimplemented errors.
|
// in that we capture and re-reprocess predicates from `Unimplemented` errors.
|
||||||
//
|
//
|
||||||
// This can lead to a corner case when dealing with region parameters.
|
// This can lead to a corner case when dealing with region parameters.
|
||||||
// During our selection loop in evaluate_predicates, we might end up with
|
// During our selection loop in `evaluate_predicates`, we might end up with
|
||||||
// two trait predicates that differ only in their region parameters:
|
// two trait predicates that differ only in their region parameters:
|
||||||
// one containing a HRTB lifetime parameter, and one containing a 'normal'
|
// one containing a HRTB lifetime parameter, and one containing a 'normal'
|
||||||
// lifetime parameter. For example:
|
// lifetime parameter. For example:
|
||||||
//
|
//
|
||||||
// T as MyTrait<'a>
|
// T as MyTrait<'a>
|
||||||
// T as MyTrait<'static>
|
// T as MyTrait<'static>
|
||||||
//
|
//
|
||||||
// If we put both of these predicates in our computed ParamEnv, we'll
|
// If we put both of these predicates in our computed `ParamEnv`, we'll
|
||||||
// confuse SelectionContext, since it will (correctly) view both as being applicable.
|
// confuse `SelectionContext`, since it will (correctly) view both as being applicable.
|
||||||
//
|
//
|
||||||
// To solve this, we pick the 'more strict' lifetime bound - i.e., the HRTB
|
// To solve this, we pick the 'more strict' lifetime bound -- i.e., the HRTB
|
||||||
// Our end goal is to generate a user-visible description of the conditions
|
// Our end goal is to generate a user-visible description of the conditions
|
||||||
// under which a type implements an auto trait. A trait predicate involving
|
// under which a type implements an auto trait. A trait predicate involving
|
||||||
// a HRTB means that the type needs to work with any choice of lifetime,
|
// a HRTB means that the type needs to work with any choice of lifetime,
|
||||||
// not just one specific lifetime (e.g., 'static).
|
// not just one specific lifetime (e.g., `'static`).
|
||||||
fn add_user_pred<'c>(
|
fn add_user_pred<'c>(
|
||||||
&self,
|
&self,
|
||||||
user_computed_preds: &mut FxHashSet<ty::Predicate<'c>>,
|
user_computed_preds: &mut FxHashSet<ty::Predicate<'c>>,
|
||||||
|
@ -430,7 +431,7 @@ impl AutoTraitFinder<'tcx> {
|
||||||
|
|
||||||
if !new_substs.types().eq(old_substs.types()) {
|
if !new_substs.types().eq(old_substs.types()) {
|
||||||
// We can't compare lifetimes if the types are different,
|
// We can't compare lifetimes if the types are different,
|
||||||
// so skip checking old_pred
|
// so skip checking `old_pred`.
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -438,8 +439,8 @@ impl AutoTraitFinder<'tcx> {
|
||||||
new_substs.regions().zip(old_substs.regions())
|
new_substs.regions().zip(old_substs.regions())
|
||||||
{
|
{
|
||||||
match (new_region, old_region) {
|
match (new_region, old_region) {
|
||||||
// If both predicates have an 'ReLateBound' (a HRTB) in the
|
// If both predicates have an `ReLateBound` (a HRTB) in the
|
||||||
// same spot, we do nothing
|
// same spot, we do nothing.
|
||||||
(
|
(
|
||||||
ty::RegionKind::ReLateBound(_, _),
|
ty::RegionKind::ReLateBound(_, _),
|
||||||
ty::RegionKind::ReLateBound(_, _),
|
ty::RegionKind::ReLateBound(_, _),
|
||||||
|
@ -463,13 +464,13 @@ impl AutoTraitFinder<'tcx> {
|
||||||
// varaible).
|
// varaible).
|
||||||
//
|
//
|
||||||
// In both cases, we want to remove the old predicate,
|
// In both cases, we want to remove the old predicate,
|
||||||
// from user_computed_preds, and replace it with the new
|
// from `user_computed_preds`, and replace it with the new
|
||||||
// one. Having both the old and the new
|
// one. Having both the old and the new
|
||||||
// predicate in a ParamEnv would confuse SelectionContext
|
// predicate in a `ParamEnv` would confuse `SelectionContext`.
|
||||||
//
|
//
|
||||||
// We're currently in the predicate passed to 'retain',
|
// We're currently in the predicate passed to 'retain',
|
||||||
// so we return 'false' to remove the old predicate from
|
// so we return `false` to remove the old predicate from
|
||||||
// user_computed_preds
|
// `user_computed_preds`.
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
(_, ty::RegionKind::ReLateBound(_, _)) |
|
(_, ty::RegionKind::ReLateBound(_, _)) |
|
||||||
|
@ -486,8 +487,8 @@ impl AutoTraitFinder<'tcx> {
|
||||||
// predicate has some other type of region.
|
// predicate has some other type of region.
|
||||||
//
|
//
|
||||||
// We want to leave the old
|
// We want to leave the old
|
||||||
// predicate in user_computed_preds, and skip adding
|
// predicate in `user_computed_preds`, and skip adding
|
||||||
// new_pred to user_computed_params.
|
// new_pred to `user_computed_params`.
|
||||||
should_add_new = false
|
should_add_new = false
|
||||||
},
|
},
|
||||||
_ => {}
|
_ => {}
|
||||||
|
@ -505,8 +506,8 @@ impl AutoTraitFinder<'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// This is very similar to handle_lifetimes. However, instead of matching ty::Region's
|
// This is very similar to `handle_lifetimes`. However, instead of matching `ty::Region`'s
|
||||||
// to each other, we match ty::RegionVid's to ty::Region's
|
// to each other, we match `ty::RegionVid`'s to `ty::Region`'s.
|
||||||
fn map_vid_to_region<'cx>(
|
fn map_vid_to_region<'cx>(
|
||||||
&self,
|
&self,
|
||||||
regions: &RegionConstraintData<'cx>,
|
regions: &RegionConstraintData<'cx>,
|
||||||
|
@ -573,7 +574,7 @@ impl AutoTraitFinder<'tcx> {
|
||||||
finished_map.insert(v1, r1);
|
finished_map.insert(v1, r1);
|
||||||
}
|
}
|
||||||
(&RegionTarget::Region(_), &RegionTarget::RegionVid(_)) => {
|
(&RegionTarget::Region(_), &RegionTarget::RegionVid(_)) => {
|
||||||
// Do nothing - we don't care about regions that are smaller than vids
|
// Do nothing; we don't care about regions that are smaller than vids.
|
||||||
}
|
}
|
||||||
(&RegionTarget::RegionVid(_), &RegionTarget::RegionVid(_)) => {
|
(&RegionTarget::RegionVid(_), &RegionTarget::RegionVid(_)) => {
|
||||||
if let Entry::Occupied(v) = vid_map.entry(*smaller) {
|
if let Entry::Occupied(v) = vid_map.entry(*smaller) {
|
||||||
|
|
|
@ -191,23 +191,23 @@ pub enum ObligationCauseCode<'tcx> {
|
||||||
/// Obligation incurred due to a coercion.
|
/// Obligation incurred due to a coercion.
|
||||||
Coercion { source: Ty<'tcx>, target: Ty<'tcx> },
|
Coercion { source: Ty<'tcx>, target: Ty<'tcx> },
|
||||||
|
|
||||||
// Various cases where expressions must be sized/copy/etc:
|
// Various cases where expressions must be `Sized` / `Copy` / etc.
|
||||||
/// L = X implies that L is Sized
|
/// `L = X` implies that `L` is `Sized`.
|
||||||
AssignmentLhsSized,
|
AssignmentLhsSized,
|
||||||
/// (x1, .., xn) must be Sized
|
/// `(x1, .., xn)` must be `Sized`.
|
||||||
TupleInitializerSized,
|
TupleInitializerSized,
|
||||||
/// S { ... } must be Sized
|
/// `S { ... }` must be `Sized`.
|
||||||
StructInitializerSized,
|
StructInitializerSized,
|
||||||
/// Type of each variable must be Sized
|
/// Type of each variable must be `Sized`.
|
||||||
VariableType(hir::HirId),
|
VariableType(hir::HirId),
|
||||||
/// Argument type must be Sized
|
/// Argument type must be `Sized`.
|
||||||
SizedArgumentType,
|
SizedArgumentType,
|
||||||
/// Return type must be Sized
|
/// Return type must be `Sized`.
|
||||||
SizedReturnType,
|
SizedReturnType,
|
||||||
/// Yield type must be Sized
|
/// Yield type must be `Sized`.
|
||||||
SizedYieldType,
|
SizedYieldType,
|
||||||
/// [T,..n] --> T must be Copy. If `true`, suggest `const_in_array_repeat_expressions` feature
|
/// `[T, ..n]` implies that `T` must be `Copy`.
|
||||||
/// flag.
|
/// If `true`, suggest `const_in_array_repeat_expressions` feature flag.
|
||||||
RepeatVec(bool),
|
RepeatVec(bool),
|
||||||
|
|
||||||
/// Types of fields (other than the last, except for packed structs) in a struct must be sized.
|
/// Types of fields (other than the last, except for packed structs) in a struct must be sized.
|
||||||
|
@ -216,7 +216,7 @@ pub enum ObligationCauseCode<'tcx> {
|
||||||
/// Constant expressions must be sized.
|
/// Constant expressions must be sized.
|
||||||
ConstSized,
|
ConstSized,
|
||||||
|
|
||||||
/// Static items must have `Sync` type
|
/// `static` items must have `Sync` type.
|
||||||
SharedStatic,
|
SharedStatic,
|
||||||
|
|
||||||
BuiltinDerivedObligation(DerivedObligationCause<'tcx>),
|
BuiltinDerivedObligation(DerivedObligationCause<'tcx>),
|
||||||
|
@ -602,7 +602,7 @@ pub enum Vtable<'tcx, N> {
|
||||||
/// the impl's type parameters.
|
/// the impl's type parameters.
|
||||||
///
|
///
|
||||||
/// The type parameter `N` indicates the type used for "nested
|
/// The type parameter `N` indicates the type used for "nested
|
||||||
/// obligations" that are required by the impl. During type check, this
|
/// obligations" that are required by the impl. During type-check, this
|
||||||
/// is `Obligation`, as one might expect. During codegen, however, this
|
/// is `Obligation`, as one might expect. During codegen, however, this
|
||||||
/// is `()`, because codegen only requires a shallow resolution of an
|
/// is `()`, because codegen only requires a shallow resolution of an
|
||||||
/// impl, and nested obligations are satisfied later.
|
/// impl, and nested obligations are satisfied later.
|
||||||
|
@ -1046,8 +1046,7 @@ fn vtable_methods<'tcx>(
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
// the method may have some early-bound lifetimes, add
|
// The method may have some early-bound lifetimes; add regions for those.
|
||||||
// regions for those
|
|
||||||
let substs = trait_ref.map_bound(|trait_ref|
|
let substs = trait_ref.map_bound(|trait_ref|
|
||||||
InternalSubsts::for_item(tcx, def_id, |param, _|
|
InternalSubsts::for_item(tcx, def_id, |param, _|
|
||||||
match param.kind {
|
match param.kind {
|
||||||
|
@ -1060,15 +1059,15 @@ fn vtable_methods<'tcx>(
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
// the trait type may have higher-ranked lifetimes in it;
|
// The trait type may have higher-ranked lifetimes in it;
|
||||||
// so erase them if they appear, so that we get the type
|
// erase them if they appear, so that we get the type
|
||||||
// at some particular call site
|
// at some particular call site.
|
||||||
let substs = tcx.normalize_erasing_late_bound_regions(
|
let substs = tcx.normalize_erasing_late_bound_regions(
|
||||||
ty::ParamEnv::reveal_all(),
|
ty::ParamEnv::reveal_all(),
|
||||||
&substs
|
&substs
|
||||||
);
|
);
|
||||||
|
|
||||||
// It's possible that the method relies on where clauses that
|
// It's possible that the method relies on where-clauses that
|
||||||
// do not hold for this particular set of type parameters.
|
// do not hold for this particular set of type parameters.
|
||||||
// Note that this method could then never be called, so we
|
// Note that this method could then never be called, so we
|
||||||
// do not want to try and codegen it, in that case (see #23435).
|
// do not want to try and codegen it, in that case (see #23435).
|
||||||
|
|
|
@ -157,7 +157,7 @@ impl IntercrateAmbiguityCause {
|
||||||
struct TraitObligationStack<'prev, 'tcx> {
|
struct TraitObligationStack<'prev, 'tcx> {
|
||||||
obligation: &'prev TraitObligation<'tcx>,
|
obligation: &'prev TraitObligation<'tcx>,
|
||||||
|
|
||||||
/// Trait ref from `obligation` but "freshened" with the
|
/// The trait ref from `obligation` but "freshened" with the
|
||||||
/// selection-context's freshener. Used to check for recursion.
|
/// selection-context's freshener. Used to check for recursion.
|
||||||
fresh_trait_ref: ty::PolyTraitRef<'tcx>,
|
fresh_trait_ref: ty::PolyTraitRef<'tcx>,
|
||||||
|
|
||||||
|
@ -193,11 +193,11 @@ struct TraitObligationStack<'prev, 'tcx> {
|
||||||
|
|
||||||
previous: TraitObligationStackList<'prev, 'tcx>,
|
previous: TraitObligationStackList<'prev, 'tcx>,
|
||||||
|
|
||||||
/// Number of parent frames plus one -- so the topmost frame has depth 1.
|
/// The number of parent frames plus one (thus, the topmost frame has depth 1).
|
||||||
depth: usize,
|
depth: usize,
|
||||||
|
|
||||||
/// Depth-first number of this node in the search graph -- a
|
/// The depth-first number of this node in the search graph -- a
|
||||||
/// pre-order index. Basically a freshly incremented counter.
|
/// pre-order index. Basically, a freshly incremented counter.
|
||||||
dfn: usize,
|
dfn: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -239,9 +239,9 @@ pub struct SelectionCache<'tcx> {
|
||||||
/// }
|
/// }
|
||||||
/// fn foo<T: AsDebug>(t: T) { println!("{:?}", <T as AsDebug>::debug(t)); }
|
/// fn foo<T: AsDebug>(t: T) { println!("{:?}", <T as AsDebug>::debug(t)); }
|
||||||
///
|
///
|
||||||
/// we can't just use the impl to resolve the <T as AsDebug> obligation
|
/// we can't just use the impl to resolve the `<T as AsDebug>` obligation
|
||||||
/// - a type from another crate (that doesn't implement fmt::Debug) could
|
/// -- a type from another crate (that doesn't implement `fmt::Debug`) could
|
||||||
/// implement AsDebug.
|
/// implement `AsDebug`.
|
||||||
///
|
///
|
||||||
/// Because where-clauses match the type exactly, multiple clauses can
|
/// Because where-clauses match the type exactly, multiple clauses can
|
||||||
/// only match if there are unresolved variables, and we can mostly just
|
/// only match if there are unresolved variables, and we can mostly just
|
||||||
|
@ -266,10 +266,10 @@ pub struct SelectionCache<'tcx> {
|
||||||
/// }
|
/// }
|
||||||
/// fn main() { foo(false); }
|
/// fn main() { foo(false); }
|
||||||
///
|
///
|
||||||
/// Here the obligation <T as Foo<$0>> can be matched by both the blanket
|
/// Here the obligation `<T as Foo<$0>>` can be matched by both the blanket
|
||||||
/// impl and the where-clause. We select the where-clause and unify $0=bool,
|
/// impl and the where-clause. We select the where-clause and unify `$0=bool`,
|
||||||
/// so the program prints "false". However, if the where-clause is omitted,
|
/// so the program prints "false". However, if the where-clause is omitted,
|
||||||
/// the blanket impl is selected, we unify $0=(), and the program prints
|
/// the blanket impl is selected, we unify `$0=()`, and the program prints
|
||||||
/// "()".
|
/// "()".
|
||||||
///
|
///
|
||||||
/// Exactly the same issues apply to projection and object candidates, except
|
/// Exactly the same issues apply to projection and object candidates, except
|
||||||
|
@ -282,8 +282,8 @@ pub struct SelectionCache<'tcx> {
|
||||||
/// parameter environment.
|
/// parameter environment.
|
||||||
#[derive(PartialEq, Eq, Debug, Clone, TypeFoldable)]
|
#[derive(PartialEq, Eq, Debug, Clone, TypeFoldable)]
|
||||||
enum SelectionCandidate<'tcx> {
|
enum SelectionCandidate<'tcx> {
|
||||||
/// If has_nested is false, there are no *further* obligations
|
|
||||||
BuiltinCandidate {
|
BuiltinCandidate {
|
||||||
|
/// `false` if there are no *further* obligations.
|
||||||
has_nested: bool,
|
has_nested: bool,
|
||||||
},
|
},
|
||||||
ParamCandidate(ty::PolyTraitRef<'tcx>),
|
ParamCandidate(ty::PolyTraitRef<'tcx>),
|
||||||
|
@ -303,7 +303,7 @@ enum SelectionCandidate<'tcx> {
|
||||||
GeneratorCandidate,
|
GeneratorCandidate,
|
||||||
|
|
||||||
/// Implementation of a `Fn`-family trait by one of the anonymous
|
/// Implementation of a `Fn`-family trait by one of the anonymous
|
||||||
/// types generated for a fn pointer type (e.g., `fn(int)->int`)
|
/// types generated for a fn pointer type (e.g., `fn(int) -> int`)
|
||||||
FnPointerCandidate,
|
FnPointerCandidate,
|
||||||
|
|
||||||
TraitAliasCandidate(DefId),
|
TraitAliasCandidate(DefId),
|
||||||
|
@ -339,11 +339,11 @@ impl<'a, 'tcx> ty::Lift<'tcx> for SelectionCandidate<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
struct SelectionCandidateSet<'tcx> {
|
struct SelectionCandidateSet<'tcx> {
|
||||||
// a list of candidates that definitely apply to the current
|
// A list of candidates that definitely apply to the current
|
||||||
// obligation (meaning: types unify).
|
// obligation (meaning: types unify).
|
||||||
vec: Vec<SelectionCandidate<'tcx>>,
|
vec: Vec<SelectionCandidate<'tcx>>,
|
||||||
|
|
||||||
// if this is true, then there were candidates that might or might
|
// If `true`, then there were candidates that might or might
|
||||||
// not have applied, but we couldn't tell. This occurs when some
|
// not have applied, but we couldn't tell. This occurs when some
|
||||||
// of the input types are type variables, in which case there are
|
// of the input types are type variables, in which case there are
|
||||||
// various "builtin" rules that might or might not trigger.
|
// various "builtin" rules that might or might not trigger.
|
||||||
|
@ -358,7 +358,7 @@ struct EvaluatedCandidate<'tcx> {
|
||||||
|
|
||||||
/// When does the builtin impl for `T: Trait` apply?
|
/// When does the builtin impl for `T: Trait` apply?
|
||||||
enum BuiltinImplConditions<'tcx> {
|
enum BuiltinImplConditions<'tcx> {
|
||||||
/// The impl is conditional on T1,T2,.. : Trait
|
/// The impl is conditional on `T1, T2, ...: Trait`.
|
||||||
Where(ty::Binder<Vec<Ty<'tcx>>>),
|
Where(ty::Binder<Vec<Ty<'tcx>>>),
|
||||||
/// There is no built-in impl. There may be some other
|
/// There is no built-in impl. There may be some other
|
||||||
/// candidate (a where-clause or user-defined impl).
|
/// candidate (a where-clause or user-defined impl).
|
||||||
|
@ -381,15 +381,15 @@ enum BuiltinImplConditions<'tcx> {
|
||||||
/// the categories it's easy to see that the unions are correct.
|
/// the categories it's easy to see that the unions are correct.
|
||||||
#[derive(Copy, Clone, Debug, PartialOrd, Ord, PartialEq, Eq, HashStable)]
|
#[derive(Copy, Clone, Debug, PartialOrd, Ord, PartialEq, Eq, HashStable)]
|
||||||
pub enum EvaluationResult {
|
pub enum EvaluationResult {
|
||||||
/// Evaluation successful
|
/// Evaluation successful.
|
||||||
EvaluatedToOk,
|
EvaluatedToOk,
|
||||||
/// Evaluation successful, but there were unevaluated region obligations
|
/// Evaluation successful, but there were unevaluated region obligations.
|
||||||
EvaluatedToOkModuloRegions,
|
EvaluatedToOkModuloRegions,
|
||||||
/// Evaluation is known to be ambiguous - it *might* hold for some
|
/// Evaluation is known to be ambiguous -- it *might* hold for some
|
||||||
/// assignment of inference variables, but it might not.
|
/// assignment of inference variables, but it might not.
|
||||||
///
|
///
|
||||||
/// While this has the same meaning as `EvaluatedToUnknown` - we can't
|
/// While this has the same meaning as `EvaluatedToUnknown` -- we can't
|
||||||
/// know whether this obligation holds or not - it is the result we
|
/// know whether this obligation holds or not -- it is the result we
|
||||||
/// would get with an empty stack, and therefore is cacheable.
|
/// would get with an empty stack, and therefore is cacheable.
|
||||||
EvaluatedToAmbig,
|
EvaluatedToAmbig,
|
||||||
/// Evaluation failed because of recursion involving inference
|
/// Evaluation failed because of recursion involving inference
|
||||||
|
@ -404,29 +404,29 @@ pub enum EvaluationResult {
|
||||||
/// We know this branch can't be a part of a minimal proof-tree for
|
/// We know this branch can't be a part of a minimal proof-tree for
|
||||||
/// the "root" of our cycle, because then we could cut out the recursion
|
/// the "root" of our cycle, because then we could cut out the recursion
|
||||||
/// and maintain a valid proof tree. However, this does not mean
|
/// and maintain a valid proof tree. However, this does not mean
|
||||||
/// that all the obligations on this branch do not hold - it's possible
|
/// that all the obligations on this branch do not hold -- it's possible
|
||||||
/// that we entered this branch "speculatively", and that there
|
/// that we entered this branch "speculatively", and that there
|
||||||
/// might be some other way to prove this obligation that does not
|
/// might be some other way to prove this obligation that does not
|
||||||
/// go through this cycle - so we can't cache this as a failure.
|
/// go through this cycle -- so we can't cache this as a failure.
|
||||||
///
|
///
|
||||||
/// For example, suppose we have this:
|
/// For example, suppose we have this:
|
||||||
///
|
///
|
||||||
/// ```rust,ignore (pseudo-Rust)
|
/// ```rust,ignore (pseudo-Rust)
|
||||||
/// pub trait Trait { fn xyz(); }
|
/// pub trait Trait { fn xyz(); }
|
||||||
/// // This impl is "useless", but we can still have
|
/// // This impl is "useless", but we can still have
|
||||||
/// // an `impl Trait for SomeUnsizedType` somewhere.
|
/// // an `impl Trait for SomeUnsizedType` somewhere.
|
||||||
/// impl<T: Trait + Sized> Trait for T { fn xyz() {} }
|
/// impl<T: Trait + Sized> Trait for T { fn xyz() {} }
|
||||||
///
|
///
|
||||||
/// pub fn foo<T: Trait + ?Sized>() {
|
/// pub fn foo<T: Trait + ?Sized>() {
|
||||||
/// <T as Trait>::xyz();
|
/// <T as Trait>::xyz();
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
/// When checking `foo`, we have to prove `T: Trait`. This basically
|
/// When checking `foo`, we have to prove `T: Trait`. This basically
|
||||||
/// translates into this:
|
/// translates into this:
|
||||||
///
|
///
|
||||||
/// ```plain,ignore
|
/// ```plain,ignore
|
||||||
/// (T: Trait + Sized →_\impl T: Trait), T: Trait ⊢ T: Trait
|
/// (T: Trait + Sized →_\impl T: Trait), T: Trait ⊢ T: Trait
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
/// When we try to prove it, we first go the first option, which
|
/// When we try to prove it, we first go the first option, which
|
||||||
|
@ -594,7 +594,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
// 1. If no applicable impl or parameter bound can be found.
|
// 1. If no applicable impl or parameter bound can be found.
|
||||||
// 2. If the output type parameters in the obligation do not match
|
// 2. If the output type parameters in the obligation do not match
|
||||||
// those specified by the impl/bound. For example, if the obligation
|
// those specified by the impl/bound. For example, if the obligation
|
||||||
// is `Vec<Foo>:Iterable<Bar>`, but the impl specifies
|
// is `Vec<Foo>: Iterable<Bar>`, but the impl specifies
|
||||||
// `impl<T> Iterable<T> for Vec<T>`, than an error would result.
|
// `impl<T> Iterable<T> for Vec<T>`, than an error would result.
|
||||||
|
|
||||||
/// Attempts to satisfy the obligation. If successful, this will affect the surrounding
|
/// Attempts to satisfy the obligation. If successful, this will affect the surrounding
|
||||||
|
@ -723,10 +723,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
debug!("evaluate_predicate_recursively(previous_stack={:?}, obligation={:?})",
|
debug!("evaluate_predicate_recursively(previous_stack={:?}, obligation={:?})",
|
||||||
previous_stack.head(), obligation);
|
previous_stack.head(), obligation);
|
||||||
|
|
||||||
// Previous_stack stores a TraitObligatiom, while 'obligation' is
|
// `previous_stack` stores a `TraitObligatiom`, while `obligation` is
|
||||||
// a PredicateObligation. These are distinct types, so we can't
|
// a `PredicateObligation`. These are distinct types, so we can't
|
||||||
// use any Option combinator method that would force them to be
|
// use any `Option` combinator method that would force them to be
|
||||||
// the same
|
// the same.
|
||||||
match previous_stack.head() {
|
match previous_stack.head() {
|
||||||
Some(h) => self.check_recursion_limit(&obligation, h.obligation)?,
|
Some(h) => self.check_recursion_limit(&obligation, h.obligation)?,
|
||||||
None => self.check_recursion_limit(&obligation, &obligation)?
|
None => self.check_recursion_limit(&obligation, &obligation)?
|
||||||
|
@ -740,7 +740,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
ty::Predicate::Subtype(ref p) => {
|
ty::Predicate::Subtype(ref p) => {
|
||||||
// does this code ever run?
|
// Does this code ever run?
|
||||||
match self.infcx
|
match self.infcx
|
||||||
.subtype_predicate(&obligation.cause, obligation.param_env, p)
|
.subtype_predicate(&obligation.cause, obligation.param_env, p)
|
||||||
{
|
{
|
||||||
|
@ -768,8 +768,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
},
|
},
|
||||||
|
|
||||||
ty::Predicate::TypeOutlives(..) | ty::Predicate::RegionOutlives(..) => {
|
ty::Predicate::TypeOutlives(..) | ty::Predicate::RegionOutlives(..) => {
|
||||||
// we do not consider region relationships when
|
// We do not consider region relationships when evaluating trait matches.
|
||||||
// evaluating trait matches
|
|
||||||
Ok(EvaluatedToOkModuloRegions)
|
Ok(EvaluatedToOkModuloRegions)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -953,7 +952,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
stack: &TraitObligationStack<'_, 'tcx>,
|
stack: &TraitObligationStack<'_, 'tcx>,
|
||||||
) -> Option<EvaluationResult> {
|
) -> Option<EvaluationResult> {
|
||||||
if let Some(cycle_depth) = stack.iter()
|
if let Some(cycle_depth) = stack.iter()
|
||||||
.skip(1) // skip top-most frame
|
.skip(1) // Skip top-most frame.
|
||||||
.find(|prev| stack.obligation.param_env == prev.obligation.param_env &&
|
.find(|prev| stack.obligation.param_env == prev.obligation.param_env &&
|
||||||
stack.fresh_trait_ref == prev.fresh_trait_ref)
|
stack.fresh_trait_ref == prev.fresh_trait_ref)
|
||||||
.map(|stack| stack.depth)
|
.map(|stack| stack.depth)
|
||||||
|
@ -1030,8 +1029,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
.skip_binder()
|
.skip_binder()
|
||||||
.input_types()
|
.input_types()
|
||||||
.any(|ty| ty.is_fresh());
|
.any(|ty| ty.is_fresh());
|
||||||
// this check was an imperfect workaround for a bug n the old
|
// This check was an imperfect workaround for a bug in the old
|
||||||
// intercrate mode, it should be removed when that goes away.
|
// intercrate mode; it should be removed when that goes away.
|
||||||
if unbound_input_types && self.intercrate == Some(IntercrateMode::Issue43355) {
|
if unbound_input_types && self.intercrate == Some(IntercrateMode::Issue43355) {
|
||||||
debug!(
|
debug!(
|
||||||
"evaluate_stack({:?}) --> unbound argument, intercrate --> ambiguous",
|
"evaluate_stack({:?}) --> unbound argument, intercrate --> ambiguous",
|
||||||
|
@ -1083,7 +1082,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// For defaulted traits, we use a co-inductive strategy to solve, so
|
/// For defaulted traits, we use a co-inductive strategy to solve, so
|
||||||
/// that recursion is ok. This routine returns true if the top of the
|
/// that recursion is ok. This routine returns `true` if the top of the
|
||||||
/// stack (`cycle[0]`):
|
/// stack (`cycle[0]`):
|
||||||
///
|
///
|
||||||
/// - is a defaulted trait,
|
/// - is a defaulted trait,
|
||||||
|
@ -1107,7 +1106,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Further evaluate `candidate` to decide whether all type parameters match and whether nested
|
/// Further evaluates `candidate` to decide whether all type parameters match and whether nested
|
||||||
/// obligations are met. Returns whether `candidate` remains viable after this further
|
/// obligations are met. Returns whether `candidate` remains viable after this further
|
||||||
/// scrutiny.
|
/// scrutiny.
|
||||||
fn evaluate_candidate<'o>(
|
fn evaluate_candidate<'o>(
|
||||||
|
@ -1202,11 +1201,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
// For various reasons, it's possible for a subobligation
|
// For various reasons, it's possible for a subobligation
|
||||||
// to have a *lower* recursion_depth than the obligation used to create it.
|
// to have a *lower* recursion_depth than the obligation used to create it.
|
||||||
// Projection sub-obligations may be returned from the projection cache,
|
// Projection sub-obligations may be returned from the projection cache,
|
||||||
// which results in obligations with an 'old' recursion_depth.
|
// which results in obligations with an 'old' `recursion_depth`.
|
||||||
// Additionally, methods like ty::wf::obligations and
|
// Additionally, methods like `ty::wf::obligations` and
|
||||||
// InferCtxt.subtype_predicate produce subobligations without
|
// `InferCtxt.subtype_predicate` produce subobligations without
|
||||||
// taking in a 'parent' depth, causing the generated subobligations
|
// taking in a 'parent' depth, causing the generated subobligations
|
||||||
// to have a recursion_depth of 0
|
// to have a `recursion_depth` of `0`.
|
||||||
//
|
//
|
||||||
// To ensure that obligation_depth never decreasees, we force all subobligations
|
// To ensure that obligation_depth never decreasees, we force all subobligations
|
||||||
// to have at least the depth of the original obligation.
|
// to have at least the depth of the original obligation.
|
||||||
|
@ -1215,10 +1214,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
it.for_each(|o| o.recursion_depth = cmp::max(min_depth, o.recursion_depth) + 1);
|
it.for_each(|o| o.recursion_depth = cmp::max(min_depth, o.recursion_depth) + 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check that the recursion limit has not been exceeded.
|
// Checks that the recursion limit has not been exceeded.
|
||||||
//
|
//
|
||||||
// The weird return type of this function allows it to be used with the 'try' (?)
|
// The weird return type of this function allows it to be used with the `try` (`?`)
|
||||||
// operator within certain functions
|
// operator within certain functions.
|
||||||
fn check_recursion_limit<T: Display + TypeFoldable<'tcx>, V: Display + TypeFoldable<'tcx>>(
|
fn check_recursion_limit<T: Display + TypeFoldable<'tcx>, V: Display + TypeFoldable<'tcx>>(
|
||||||
&self,
|
&self,
|
||||||
obligation: &Obligation<'tcx, T>,
|
obligation: &Obligation<'tcx, T>,
|
||||||
|
@ -1256,7 +1255,6 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
// not update) the cache.
|
// not update) the cache.
|
||||||
self.check_recursion_limit(&stack.obligation, &stack.obligation)?;
|
self.check_recursion_limit(&stack.obligation, &stack.obligation)?;
|
||||||
|
|
||||||
|
|
||||||
// Check the cache. Note that we freshen the trait-ref
|
// Check the cache. Note that we freshen the trait-ref
|
||||||
// separately rather than using `stack.fresh_trait_ref` --
|
// separately rather than using `stack.fresh_trait_ref` --
|
||||||
// this is because we want the unbound variables to be
|
// this is because we want the unbound variables to be
|
||||||
|
@ -1436,10 +1434,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
// candidate set is *individually* applicable. Now we have to
|
// candidate set is *individually* applicable. Now we have to
|
||||||
// figure out if they contain mutual incompatibilities. This
|
// figure out if they contain mutual incompatibilities. This
|
||||||
// frequently arises if we have an unconstrained input type --
|
// frequently arises if we have an unconstrained input type --
|
||||||
// for example, we are looking for $0:Eq where $0 is some
|
// for example, we are looking for `$0: Eq` where `$0` is some
|
||||||
// unconstrained type variable. In that case, we'll get a
|
// unconstrained type variable. In that case, we'll get a
|
||||||
// candidate which assumes $0 == int, one that assumes $0 ==
|
// candidate which assumes $0 == int, one that assumes `$0 ==
|
||||||
// usize, etc. This spells an ambiguity.
|
// usize`, etc. This spells an ambiguity.
|
||||||
|
|
||||||
// If there is more than one candidate, first winnow them down
|
// If there is more than one candidate, first winnow them down
|
||||||
// by considering extra conditions (nested obligations and so
|
// by considering extra conditions (nested obligations and so
|
||||||
|
@ -1453,8 +1451,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
// and we were to see some code `foo.push_clone()` where `boo`
|
// and we were to see some code `foo.push_clone()` where `boo`
|
||||||
// is a `Vec<Bar>` and `Bar` does not implement `Clone`. If
|
// is a `Vec<Bar>` and `Bar` does not implement `Clone`. If
|
||||||
// we were to winnow, we'd wind up with zero candidates.
|
// we were to winnow, we'd wind up with zero candidates.
|
||||||
// Instead, we select the right impl now but report `Bar does
|
// Instead, we select the right impl now but report "`Bar` does
|
||||||
// not implement Clone`.
|
// not implement `Clone`".
|
||||||
if candidates.len() == 1 {
|
if candidates.len() == 1 {
|
||||||
return self.filter_negative_and_reservation_impls(candidates.pop().unwrap());
|
return self.filter_negative_and_reservation_impls(candidates.pop().unwrap());
|
||||||
}
|
}
|
||||||
|
@ -1586,7 +1584,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
// avoid us having to fear that coherence results "pollute"
|
// avoid us having to fear that coherence results "pollute"
|
||||||
// the master cache. Since coherence executes pretty quickly,
|
// the master cache. Since coherence executes pretty quickly,
|
||||||
// it's not worth going to more trouble to increase the
|
// it's not worth going to more trouble to increase the
|
||||||
// hit-rate I don't think.
|
// hit-rate, I don't think.
|
||||||
if self.intercrate.is_some() {
|
if self.intercrate.is_some() {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -1617,13 +1615,13 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Determines whether can we safely cache the result
|
/// Determines whether can we safely cache the result
|
||||||
/// of selecting an obligation. This is almost always 'true',
|
/// of selecting an obligation. This is almost always `true`,
|
||||||
/// except when dealing with certain ParamCandidates.
|
/// except when dealing with certain `ParamCandidate`s.
|
||||||
///
|
///
|
||||||
/// Ordinarily, a ParamCandidate will contain no inference variables,
|
/// Ordinarily, a `ParamCandidate` will contain no inference variables,
|
||||||
/// since it was usually produced directly from a DefId. However,
|
/// since it was usually produced directly from a `DefId`. However,
|
||||||
/// certain cases (currently only librustdoc's blanket impl finder),
|
/// certain cases (currently only librustdoc's blanket impl finder),
|
||||||
/// a ParamEnv may be explicitly constructed with inference types.
|
/// a `ParamEnv` may be explicitly constructed with inference types.
|
||||||
/// When this is the case, we do *not* want to cache the resulting selection
|
/// When this is the case, we do *not* want to cache the resulting selection
|
||||||
/// candidate. This is due to the fact that it might not always be possible
|
/// candidate. This is due to the fact that it might not always be possible
|
||||||
/// to equate the obligation's trait ref and the candidate's trait ref,
|
/// to equate the obligation's trait ref and the candidate's trait ref,
|
||||||
|
@ -1631,7 +1629,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
///
|
///
|
||||||
/// Because of this, we always want to re-run the full selection
|
/// Because of this, we always want to re-run the full selection
|
||||||
/// process for our obligation the next time we see it, since
|
/// process for our obligation the next time we see it, since
|
||||||
/// we might end up picking a different SelectionCandidate (or none at all)
|
/// we might end up picking a different `SelectionCandidate` (or none at all).
|
||||||
fn can_cache_candidate(&self,
|
fn can_cache_candidate(&self,
|
||||||
result: &SelectionResult<'tcx, SelectionCandidate<'tcx>>
|
result: &SelectionResult<'tcx, SelectionCandidate<'tcx>>
|
||||||
) -> bool {
|
) -> bool {
|
||||||
|
@ -1662,15 +1660,14 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
|
|
||||||
if self.can_use_global_caches(param_env) {
|
if self.can_use_global_caches(param_env) {
|
||||||
if let Err(Overflow) = candidate {
|
if let Err(Overflow) = candidate {
|
||||||
// Don't cache overflow globally; we only produce this
|
// Don't cache overflow globally; we only produce this in certain modes.
|
||||||
// in certain modes.
|
|
||||||
} else if !trait_ref.has_local_value() {
|
} else if !trait_ref.has_local_value() {
|
||||||
if !candidate.has_local_value() {
|
if !candidate.has_local_value() {
|
||||||
debug!(
|
debug!(
|
||||||
"insert_candidate_cache(trait_ref={:?}, candidate={:?}) global",
|
"insert_candidate_cache(trait_ref={:?}, candidate={:?}) global",
|
||||||
trait_ref, candidate,
|
trait_ref, candidate,
|
||||||
);
|
);
|
||||||
// This may overwrite the cache with the same value
|
// This may overwrite the cache with the same value.
|
||||||
tcx.selection_cache
|
tcx.selection_cache
|
||||||
.hashmap
|
.hashmap
|
||||||
.borrow_mut()
|
.borrow_mut()
|
||||||
|
@ -1755,7 +1752,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
} else {
|
} else {
|
||||||
if lang_items.clone_trait() == Some(def_id) {
|
if lang_items.clone_trait() == Some(def_id) {
|
||||||
// Same builtin conditions as `Copy`, i.e., every type which has builtin support
|
// Same builtin conditions as `Copy`, i.e., every type which has builtin support
|
||||||
// for `Copy` also has builtin support for `Clone`, + tuples and arrays of `Clone`
|
// for `Copy` also has builtin support for `Clone`, and tuples/arrays of `Clone`
|
||||||
// types have builtin support for `Clone`.
|
// types have builtin support for `Clone`.
|
||||||
let clone_conditions = self.copy_clone_conditions(obligation);
|
let clone_conditions = self.copy_clone_conditions(obligation);
|
||||||
self.assemble_builtin_bound_candidates(clone_conditions, &mut candidates)?;
|
self.assemble_builtin_bound_candidates(clone_conditions, &mut candidates)?;
|
||||||
|
@ -1786,7 +1783,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
) {
|
) {
|
||||||
debug!("assemble_candidates_for_projected_tys({:?})", obligation);
|
debug!("assemble_candidates_for_projected_tys({:?})", obligation);
|
||||||
|
|
||||||
// before we go into the whole placeholder thing, just
|
// Before we go into the whole placeholder thing, just
|
||||||
// quickly check if the self-type is a projection at all.
|
// quickly check if the self-type is a projection at all.
|
||||||
match obligation.predicate.skip_binder().trait_ref.self_ty().kind {
|
match obligation.predicate.skip_binder().trait_ref.self_ty().kind {
|
||||||
ty::Projection(_) | ty::Opaque(..) => {}
|
ty::Projection(_) | ty::Opaque(..) => {}
|
||||||
|
@ -1907,10 +1904,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
self.infcx.leak_check(false, placeholder_map, snapshot).is_ok()
|
self.infcx.leak_check(false, placeholder_map, snapshot).is_ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given an obligation like `<SomeTrait for T>`, search the obligations that the caller
|
/// Given an obligation like `<SomeTrait for T>`, searches the obligations that the caller
|
||||||
/// supplied to find out whether it is listed among them.
|
/// supplied to find out whether it is listed among them.
|
||||||
///
|
///
|
||||||
/// Never affects inference environment.
|
/// Never affects the inference environment.
|
||||||
fn assemble_candidates_from_caller_bounds<'o>(
|
fn assemble_candidates_from_caller_bounds<'o>(
|
||||||
&mut self,
|
&mut self,
|
||||||
stack: &TraitObligationStack<'o, 'tcx>,
|
stack: &TraitObligationStack<'o, 'tcx>,
|
||||||
|
@ -2052,7 +2049,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Implement one of the `Fn()` family for a fn pointer.
|
/// Implements one of the `Fn()` family for a fn pointer.
|
||||||
fn assemble_fn_pointer_candidates(
|
fn assemble_fn_pointer_candidates(
|
||||||
&mut self,
|
&mut self,
|
||||||
obligation: &TraitObligation<'tcx>,
|
obligation: &TraitObligation<'tcx>,
|
||||||
|
@ -2067,14 +2064,14 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Okay to skip binder because what we are inspecting doesn't involve bound regions
|
// Okay to skip binder because what we are inspecting doesn't involve bound regions.
|
||||||
let self_ty = *obligation.self_ty().skip_binder();
|
let self_ty = *obligation.self_ty().skip_binder();
|
||||||
match self_ty.kind {
|
match self_ty.kind {
|
||||||
ty::Infer(ty::TyVar(_)) => {
|
ty::Infer(ty::TyVar(_)) => {
|
||||||
debug!("assemble_fn_pointer_candidates: ambiguous self-type");
|
debug!("assemble_fn_pointer_candidates: ambiguous self-type");
|
||||||
candidates.ambiguous = true; // could wind up being a fn() type
|
candidates.ambiguous = true; // Could wind up being a fn() type.
|
||||||
}
|
}
|
||||||
// provide an impl, but only for suitable `fn` pointers
|
// Provide an impl, but only for suitable `fn` pointers.
|
||||||
ty::FnDef(..) | ty::FnPtr(_) => {
|
ty::FnDef(..) | ty::FnPtr(_) => {
|
||||||
if let ty::FnSig {
|
if let ty::FnSig {
|
||||||
unsafety: hir::Unsafety::Normal,
|
unsafety: hir::Unsafety::Normal,
|
||||||
|
@ -2092,7 +2089,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Search for impls that might apply to `obligation`.
|
/// Searches for impls that might apply to `obligation`.
|
||||||
fn assemble_candidates_from_impls(
|
fn assemble_candidates_from_impls(
|
||||||
&mut self,
|
&mut self,
|
||||||
obligation: &TraitObligation<'tcx>,
|
obligation: &TraitObligation<'tcx>,
|
||||||
|
@ -2160,7 +2157,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
// this path.
|
// this path.
|
||||||
}
|
}
|
||||||
ty::Infer(ty::TyVar(_)) => {
|
ty::Infer(ty::TyVar(_)) => {
|
||||||
// the auto impl might apply, we don't know
|
// The auto impl might apply; we don't know.
|
||||||
candidates.ambiguous = true;
|
candidates.ambiguous = true;
|
||||||
}
|
}
|
||||||
ty::Generator(_, _, movability)
|
ty::Generator(_, _, movability)
|
||||||
|
@ -2188,7 +2185,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Search for impls that might apply to `obligation`.
|
/// Searches for impls that might apply to `obligation`.
|
||||||
fn assemble_candidates_from_object_ty(
|
fn assemble_candidates_from_object_ty(
|
||||||
&mut self,
|
&mut self,
|
||||||
obligation: &TraitObligation<'tcx>,
|
obligation: &TraitObligation<'tcx>,
|
||||||
|
@ -2226,7 +2223,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Only auto-trait bounds exist.
|
// Only auto trait bounds exist.
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2247,7 +2244,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
// we are looking for. Specifically, do not only check for the
|
// we are looking for. Specifically, do not only check for the
|
||||||
// correct trait, but also the correct type parameters.
|
// correct trait, but also the correct type parameters.
|
||||||
// For example, we may be trying to upcast `Foo` to `Bar<i32>`,
|
// For example, we may be trying to upcast `Foo` to `Bar<i32>`,
|
||||||
// but `Foo` is declared as `trait Foo : Bar<u32>`.
|
// but `Foo` is declared as `trait Foo: Bar<u32>`.
|
||||||
let upcast_trait_refs = util::supertraits(self.tcx(), poly_trait_ref)
|
let upcast_trait_refs = util::supertraits(self.tcx(), poly_trait_ref)
|
||||||
.filter(|upcast_trait_ref| {
|
.filter(|upcast_trait_ref| {
|
||||||
self.infcx.probe(|_| {
|
self.infcx.probe(|_| {
|
||||||
|
@ -2267,7 +2264,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Search for unsizing that might apply to `obligation`.
|
/// Searches for unsizing that might apply to `obligation`.
|
||||||
fn assemble_candidates_for_unsizing(
|
fn assemble_candidates_for_unsizing(
|
||||||
&mut self,
|
&mut self,
|
||||||
obligation: &TraitObligation<'tcx>,
|
obligation: &TraitObligation<'tcx>,
|
||||||
|
@ -2311,11 +2308,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
(&ty::Dynamic(ref data_a, ..), &ty::Dynamic(ref data_b, ..)) => {
|
(&ty::Dynamic(ref data_a, ..), &ty::Dynamic(ref data_b, ..)) => {
|
||||||
// Upcasts permit two things:
|
// Upcasts permit two things:
|
||||||
//
|
//
|
||||||
// 1. Dropping builtin bounds, e.g., `Foo+Send` to `Foo`
|
// 1. Dropping auto traits, e.g., `Foo + Send` to `Foo`
|
||||||
// 2. Tightening the region bound, e.g., `Foo+'a` to `Foo+'b` if `'a : 'b`
|
// 2. Tightening the region bound, e.g., `Foo + 'a` to `Foo + 'b` if `'a: 'b`
|
||||||
//
|
//
|
||||||
// Note that neither of these changes requires any
|
// Note that neither of these changes requires any
|
||||||
// change at runtime. Eventually this will be
|
// change at runtime. Eventually this will be
|
||||||
// generalized.
|
// generalized.
|
||||||
//
|
//
|
||||||
// We always upcast when we can because of reason
|
// We always upcast when we can because of reason
|
||||||
|
@ -2326,11 +2323,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
.all(|b| data_a.auto_traits().any(|a| a == b))
|
.all(|b| data_a.auto_traits().any(|a| a == b))
|
||||||
}
|
}
|
||||||
|
|
||||||
// T -> Trait.
|
// `T` -> `Trait`
|
||||||
(_, &ty::Dynamic(..)) => true,
|
(_, &ty::Dynamic(..)) => true,
|
||||||
|
|
||||||
// Ambiguous handling is below T -> Trait, because inference
|
// Ambiguous handling is below `T` -> `Trait`, because inference
|
||||||
// variables can still implement Unsize<Trait> and nested
|
// variables can still implement `Unsize<Trait>` and nested
|
||||||
// obligations will have the final say (likely deferred).
|
// obligations will have the final say (likely deferred).
|
||||||
(&ty::Infer(ty::TyVar(_)), _) | (_, &ty::Infer(ty::TyVar(_))) => {
|
(&ty::Infer(ty::TyVar(_)), _) | (_, &ty::Infer(ty::TyVar(_))) => {
|
||||||
debug!("assemble_candidates_for_unsizing: ambiguous");
|
debug!("assemble_candidates_for_unsizing: ambiguous");
|
||||||
|
@ -2338,15 +2335,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
// [T; n] -> [T].
|
// `[T; n]` -> `[T]`
|
||||||
(&ty::Array(..), &ty::Slice(_)) => true,
|
(&ty::Array(..), &ty::Slice(_)) => true,
|
||||||
|
|
||||||
// Struct<T> -> Struct<U>.
|
// `Struct<T>` -> `Struct<U>`
|
||||||
(&ty::Adt(def_id_a, _), &ty::Adt(def_id_b, _)) if def_id_a.is_struct() => {
|
(&ty::Adt(def_id_a, _), &ty::Adt(def_id_b, _)) if def_id_a.is_struct() => {
|
||||||
def_id_a == def_id_b
|
def_id_a == def_id_b
|
||||||
}
|
}
|
||||||
|
|
||||||
// (.., T) -> (.., U).
|
// `(.., T)` -> `(.., U)`
|
||||||
(&ty::Tuple(tys_a), &ty::Tuple(tys_b)) => tys_a.len() == tys_b.len(),
|
(&ty::Tuple(tys_a), &ty::Tuple(tys_b)) => tys_a.len() == tys_b.len(),
|
||||||
|
|
||||||
_ => false,
|
_ => false,
|
||||||
|
@ -2404,7 +2401,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
|cand: &ty::PolyTraitRef<'_>| cand.is_global() && !cand.has_late_bound_regions();
|
|cand: &ty::PolyTraitRef<'_>| cand.is_global() && !cand.has_late_bound_regions();
|
||||||
|
|
||||||
match other.candidate {
|
match other.candidate {
|
||||||
// Prefer BuiltinCandidate { has_nested: false } to anything else.
|
// Prefer `BuiltinCandidate { has_nested: false }` to anything else.
|
||||||
// This is a fix for #53123 and prevents winnowing from accidentally extending the
|
// This is a fix for #53123 and prevents winnowing from accidentally extending the
|
||||||
// lifetime of a variable.
|
// lifetime of a variable.
|
||||||
BuiltinCandidate { has_nested: false } => true,
|
BuiltinCandidate { has_nested: false } => true,
|
||||||
|
@ -2415,7 +2412,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
when there are other valid candidates"
|
when there are other valid candidates"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// Prefer BuiltinCandidate { has_nested: false } to anything else.
|
// Prefer `BuiltinCandidate { has_nested: false }` to anything else.
|
||||||
// This is a fix for #53123 and prevents winnowing from accidentally extending the
|
// This is a fix for #53123 and prevents winnowing from accidentally extending the
|
||||||
// lifetime of a variable.
|
// lifetime of a variable.
|
||||||
BuiltinCandidate { has_nested: false } => false,
|
BuiltinCandidate { has_nested: false } => false,
|
||||||
|
@ -2446,7 +2443,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
when there are other valid candidates"
|
when there are other valid candidates"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// Prefer BuiltinCandidate { has_nested: false } to anything else.
|
// Prefer `BuiltinCandidate { has_nested: false }` to anything else.
|
||||||
// This is a fix for #53123 and prevents winnowing from accidentally extending the
|
// This is a fix for #53123 and prevents winnowing from accidentally extending the
|
||||||
// lifetime of a variable.
|
// lifetime of a variable.
|
||||||
BuiltinCandidate { has_nested: false } => false,
|
BuiltinCandidate { has_nested: false } => false,
|
||||||
|
@ -2468,7 +2465,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
ImplCandidate(other_def) => {
|
ImplCandidate(other_def) => {
|
||||||
// See if we can toss out `victim` based on specialization.
|
// See if we can toss out `victim` based on specialization.
|
||||||
// This requires us to know *for sure* that the `other` impl applies
|
// This requires us to know *for sure* that the `other` impl applies
|
||||||
// i.e., EvaluatedToOk:
|
// i.e., `EvaluatedToOk`.
|
||||||
if other.evaluation.must_apply_modulo_regions() {
|
if other.evaluation.must_apply_modulo_regions() {
|
||||||
match victim.candidate {
|
match victim.candidate {
|
||||||
ImplCandidate(victim_def) => {
|
ImplCandidate(victim_def) => {
|
||||||
|
@ -2496,7 +2493,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
match victim.candidate {
|
match victim.candidate {
|
||||||
ParamCandidate(ref cand) => {
|
ParamCandidate(ref cand) => {
|
||||||
// Prefer these to a global where-clause bound
|
// Prefer these to a global where-clause bound
|
||||||
// (see issue #50825)
|
// (see issue #50825).
|
||||||
is_global(cand) && other.evaluation.must_apply_modulo_regions()
|
is_global(cand) && other.evaluation.must_apply_modulo_regions()
|
||||||
}
|
}
|
||||||
_ => false,
|
_ => false,
|
||||||
|
@ -2754,7 +2751,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
types.skip_binder().to_vec()
|
types.skip_binder().to_vec()
|
||||||
}
|
}
|
||||||
|
|
||||||
// for `PhantomData<T>`, we pass `T`
|
// For `PhantomData<T>`, we pass `T`.
|
||||||
ty::Adt(def, substs) if def.is_phantom_data() => substs.types().collect(),
|
ty::Adt(def, substs) if def.is_phantom_data() => substs.types().collect(),
|
||||||
|
|
||||||
ty::Adt(def, substs) => def.all_fields().map(|f| f.ty(self.tcx(), substs)).collect(),
|
ty::Adt(def, substs) => def.all_fields().map(|f| f.ty(self.tcx(), substs)).collect(),
|
||||||
|
@ -2894,11 +2891,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
BuiltinObjectCandidate => {
|
BuiltinObjectCandidate => {
|
||||||
// This indicates something like `(Trait+Send) :
|
// This indicates something like `Trait + Send: Send`. In this case, we know that
|
||||||
// Send`. In this case, we know that this holds
|
// this holds because that's what the object type is telling us, and there's really
|
||||||
// because that's what the object type is telling us,
|
// no additional obligations to prove and no types in particular to unify, etc.
|
||||||
// and there's really no additional obligations to
|
|
||||||
// prove and no types in particular to unify etc.
|
|
||||||
Ok(VtableParam(Vec::new()))
|
Ok(VtableParam(Vec::new()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3152,7 +3147,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
// We want to find the first supertrait in the list of
|
// We want to find the first supertrait in the list of
|
||||||
// supertraits that we can unify with, and do that
|
// supertraits that we can unify with, and do that
|
||||||
// unification. We know that there is exactly one in the list
|
// unification. We know that there is exactly one in the list
|
||||||
// where we can unify because otherwise select would have
|
// where we can unify, because otherwise select would have
|
||||||
// reported an ambiguity. (When we do find a match, also
|
// reported an ambiguity. (When we do find a match, also
|
||||||
// record it for later.)
|
// record it for later.)
|
||||||
let nonmatching = util::supertraits(tcx, poly_trait_ref).take_while(
|
let nonmatching = util::supertraits(tcx, poly_trait_ref).take_while(
|
||||||
|
@ -3166,7 +3161,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
// Additionally, for each of the nonmatching predicates that
|
// Additionally, for each of the non-matching predicates that
|
||||||
// we pass over, we sum up the set of number of vtable
|
// we pass over, we sum up the set of number of vtable
|
||||||
// entries, so that we can compute the offset for the selected
|
// entries, so that we can compute the offset for the selected
|
||||||
// trait.
|
// trait.
|
||||||
|
@ -3354,7 +3349,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
trait_ref,
|
trait_ref,
|
||||||
)?);
|
)?);
|
||||||
|
|
||||||
// FIXME: chalk
|
// FIXME: Chalk
|
||||||
|
|
||||||
if !self.tcx().sess.opts.debugging_opts.chalk {
|
if !self.tcx().sess.opts.debugging_opts.chalk {
|
||||||
obligations.push(Obligation::new(
|
obligations.push(Obligation::new(
|
||||||
|
@ -3421,7 +3416,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
) -> Result<VtableBuiltinData<PredicateObligation<'tcx>>, SelectionError<'tcx>> {
|
) -> Result<VtableBuiltinData<PredicateObligation<'tcx>>, SelectionError<'tcx>> {
|
||||||
let tcx = self.tcx();
|
let tcx = self.tcx();
|
||||||
|
|
||||||
// assemble_candidates_for_unsizing should ensure there are no late bound
|
// `assemble_candidates_for_unsizing` should ensure there are no late-bound
|
||||||
// regions here. See the comment there for more details.
|
// regions here. See the comment there for more details.
|
||||||
let source = self.infcx
|
let source = self.infcx
|
||||||
.shallow_resolve(obligation.self_ty().no_bound_vars().unwrap());
|
.shallow_resolve(obligation.self_ty().no_bound_vars().unwrap());
|
||||||
|
@ -3442,20 +3437,20 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
match (&source.kind, &target.kind) {
|
match (&source.kind, &target.kind) {
|
||||||
// Trait+Kx+'a -> Trait+Ky+'b (upcasts).
|
// Trait+Kx+'a -> Trait+Ky+'b (upcasts).
|
||||||
(&ty::Dynamic(ref data_a, r_a), &ty::Dynamic(ref data_b, r_b)) => {
|
(&ty::Dynamic(ref data_a, r_a), &ty::Dynamic(ref data_b, r_b)) => {
|
||||||
// See assemble_candidates_for_unsizing for more info.
|
// See `assemble_candidates_for_unsizing` for more info.
|
||||||
let existential_predicates = data_a.map_bound(|data_a| {
|
let existential_predicates = data_a.map_bound(|data_a| {
|
||||||
let iter =
|
let iter =
|
||||||
data_a.principal().map(|x| ty::ExistentialPredicate::Trait(x))
|
data_a.principal().map(|x| ty::ExistentialPredicate::Trait(x))
|
||||||
.into_iter().chain(
|
.into_iter().chain(
|
||||||
data_a
|
data_a
|
||||||
.projection_bounds()
|
.projection_bounds()
|
||||||
.map(|x| ty::ExistentialPredicate::Projection(x)),
|
.map(|x| ty::ExistentialPredicate::Projection(x)),
|
||||||
)
|
)
|
||||||
.chain(
|
.chain(
|
||||||
data_b
|
data_b
|
||||||
.auto_traits()
|
.auto_traits()
|
||||||
.map(ty::ExistentialPredicate::AutoTrait),
|
.map(ty::ExistentialPredicate::AutoTrait),
|
||||||
);
|
);
|
||||||
tcx.mk_existential_predicates(iter)
|
tcx.mk_existential_predicates(iter)
|
||||||
});
|
});
|
||||||
let source_trait = tcx.mk_dynamic(existential_predicates, r_b);
|
let source_trait = tcx.mk_dynamic(existential_predicates, r_b);
|
||||||
|
@ -3463,20 +3458,19 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
// Require that the traits involved in this upcast are **equal**;
|
// Require that the traits involved in this upcast are **equal**;
|
||||||
// only the **lifetime bound** is changed.
|
// only the **lifetime bound** is changed.
|
||||||
//
|
//
|
||||||
// FIXME: This condition is arguably too strong -- it
|
// FIXME: This condition is arguably too strong -- it would
|
||||||
// would suffice for the source trait to be a
|
// suffice for the source trait to be a *subtype* of the target
|
||||||
// *subtype* of the target trait. In particular
|
// trait. In particular, changing from something like
|
||||||
// changing from something like `for<'a, 'b> Foo<'a,
|
// `for<'a, 'b> Foo<'a, 'b>` to `for<'a> Foo<'a, 'a>` should be
|
||||||
// 'b>` to `for<'a> Foo<'a, 'a>` should be
|
|
||||||
// permitted. And, indeed, in the in commit
|
// permitted. And, indeed, in the in commit
|
||||||
// 904a0bde93f0348f69914ee90b1f8b6e4e0d7cbc, this
|
// 904a0bde93f0348f69914ee90b1f8b6e4e0d7cbc, this
|
||||||
// condition was loosened. However, when the leak check was added
|
// condition was loosened. However, when the leak check was
|
||||||
// back, using subtype here actually guies the coercion code in
|
// added back, using subtype here actually guides the coercion
|
||||||
// such a way that it accepts `old-lub-glb-object.rs`. This is probably
|
// code in such a way that it accepts `old-lub-glb-object.rs`.
|
||||||
// a good thing, but I've modified this to `.eq` because I want
|
// This is probably a good thing, but I've modified this to `.eq`
|
||||||
// to continue rejecting that test (as we have done for quite some time)
|
// because I want to continue rejecting that test (as we have
|
||||||
// before we are firmly comfortable with what our behavior
|
// done for quite some time) before we are firmly comfortable
|
||||||
// should be there. -nikomatsakis
|
// with what our behavior should be there. -nikomatsakis
|
||||||
let InferOk { obligations, .. } = self.infcx
|
let InferOk { obligations, .. } = self.infcx
|
||||||
.at(&obligation.cause, obligation.param_env)
|
.at(&obligation.cause, obligation.param_env)
|
||||||
.eq(target, source_trait) // FIXME -- see below
|
.eq(target, source_trait) // FIXME -- see below
|
||||||
|
@ -3498,7 +3492,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
// T -> Trait.
|
// `T` -> `Trait`
|
||||||
(_, &ty::Dynamic(ref data, r)) => {
|
(_, &ty::Dynamic(ref data, r)) => {
|
||||||
let mut object_dids = data.auto_traits()
|
let mut object_dids = data.auto_traits()
|
||||||
.chain(data.principal_def_id());
|
.chain(data.principal_def_id());
|
||||||
|
@ -3522,32 +3516,34 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
};
|
};
|
||||||
|
|
||||||
// Create obligations:
|
// Create obligations:
|
||||||
// - Casting T to Trait
|
// - Casting `T` to `Trait`
|
||||||
// - For all the various builtin bounds attached to the object cast. (In other
|
// - For all the various builtin bounds attached to the object cast. (In other
|
||||||
// words, if the object type is Foo+Send, this would create an obligation for the
|
// words, if the object type is `Foo + Send`, this would create an obligation for
|
||||||
// Send check.)
|
// the `Send` check.)
|
||||||
// - Projection predicates
|
// - Projection predicates
|
||||||
nested.extend(
|
nested.extend(
|
||||||
data.iter()
|
data.iter()
|
||||||
.map(|d| predicate_to_obligation(d.with_self_ty(tcx, source))),
|
.map(|predicate|
|
||||||
|
predicate_to_obligation(predicate.with_self_ty(tcx, source))
|
||||||
|
),
|
||||||
);
|
);
|
||||||
|
|
||||||
// We can only make objects from sized types.
|
// We can only make objects from sized types.
|
||||||
let tr = ty::TraitRef {
|
let tr = ty::TraitRef::new(
|
||||||
def_id: tcx.require_lang_item(lang_items::SizedTraitLangItem, None),
|
tcx.require_lang_item(lang_items::SizedTraitLangItem, None),
|
||||||
substs: tcx.mk_substs_trait(source, &[]),
|
tcx.mk_substs_trait(source, &[]),
|
||||||
};
|
);
|
||||||
nested.push(predicate_to_obligation(tr.to_predicate()));
|
nested.push(predicate_to_obligation(tr.to_predicate()));
|
||||||
|
|
||||||
// If the type is `Foo+'a`, ensures that the type
|
// If the type is `Foo + 'a`, ensure that the type
|
||||||
// being cast to `Foo+'a` outlives `'a`:
|
// being cast to `Foo + 'a` outlives `'a`:
|
||||||
let outlives = ty::OutlivesPredicate(source, r);
|
let outlives = ty::OutlivesPredicate(source, r);
|
||||||
nested.push(predicate_to_obligation(
|
nested.push(predicate_to_obligation(
|
||||||
ty::Binder::dummy(outlives).to_predicate(),
|
ty::Binder::dummy(outlives).to_predicate(),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
// [T; n] -> [T].
|
// `[T; n]` -> `[T]`
|
||||||
(&ty::Array(a, _), &ty::Slice(b)) => {
|
(&ty::Array(a, _), &ty::Slice(b)) => {
|
||||||
let InferOk { obligations, .. } = self.infcx
|
let InferOk { obligations, .. } = self.infcx
|
||||||
.at(&obligation.cause, obligation.param_env)
|
.at(&obligation.cause, obligation.param_env)
|
||||||
|
@ -3556,10 +3552,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
nested.extend(obligations);
|
nested.extend(obligations);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Struct<T> -> Struct<U>.
|
// `Struct<T>` -> `Struct<U>`
|
||||||
(&ty::Adt(def, substs_a), &ty::Adt(_, substs_b)) => {
|
(&ty::Adt(def, substs_a), &ty::Adt(_, substs_b)) => {
|
||||||
let fields = def.all_fields()
|
let fields = def.all_fields()
|
||||||
.map(|f| tcx.type_of(f.did))
|
.map(|field| tcx.type_of(field.did))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
// The last field of the structure has to exist and contain type parameters.
|
// The last field of the structure has to exist and contain type parameters.
|
||||||
|
@ -3598,7 +3594,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract Field<T> and Field<U> from Struct<T> and Struct<U>.
|
// Extract `Field<T>` and `Field<U>` from `Struct<T>` and `Struct<U>`.
|
||||||
let inner_source = field.subst(tcx, substs_a);
|
let inner_source = field.subst(tcx, substs_a);
|
||||||
let inner_target = field.subst(tcx, substs_b);
|
let inner_target = field.subst(tcx, substs_b);
|
||||||
|
|
||||||
|
@ -3618,7 +3614,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
.map_err(|_| Unimplemented)?;
|
.map_err(|_| Unimplemented)?;
|
||||||
nested.extend(obligations);
|
nested.extend(obligations);
|
||||||
|
|
||||||
// Construct the nested Field<T>: Unsize<Field<U>> predicate.
|
// Construct the nested `Field<T>: Unsize<Field<U>>` predicate.
|
||||||
nested.push(tcx.predicate_for_trait_def(
|
nested.push(tcx.predicate_for_trait_def(
|
||||||
obligation.param_env,
|
obligation.param_env,
|
||||||
obligation.cause.clone(),
|
obligation.cause.clone(),
|
||||||
|
@ -3629,7 +3625,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
// (.., T) -> (.., U).
|
// `(.., T)` -> `(.., U)`
|
||||||
(&ty::Tuple(tys_a), &ty::Tuple(tys_b)) => {
|
(&ty::Tuple(tys_a), &ty::Tuple(tys_b)) => {
|
||||||
assert_eq!(tys_a.len(), tys_b.len());
|
assert_eq!(tys_a.len(), tys_b.len());
|
||||||
|
|
||||||
|
@ -3652,7 +3648,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
.map_err(|_| Unimplemented)?;
|
.map_err(|_| Unimplemented)?;
|
||||||
nested.extend(obligations);
|
nested.extend(obligations);
|
||||||
|
|
||||||
// Construct the nested T: Unsize<U> predicate.
|
// Construct the nested `T: Unsize<U>` predicate.
|
||||||
nested.push(tcx.predicate_for_trait_def(
|
nested.push(tcx.predicate_for_trait_def(
|
||||||
obligation.param_env,
|
obligation.param_env,
|
||||||
obligation.cause.clone(),
|
obligation.cause.clone(),
|
||||||
|
@ -3969,7 +3965,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
//
|
//
|
||||||
// This code is hot enough that it's worth avoiding the allocation
|
// This code is hot enough that it's worth avoiding the allocation
|
||||||
// required for the FxHashSet when possible. Special-casing lengths 0,
|
// required for the FxHashSet when possible. Special-casing lengths 0,
|
||||||
// 1 and 2 covers roughly 75--80% of the cases.
|
// 1 and 2 covers roughly 75-80% of the cases.
|
||||||
if predicates.len() <= 1 {
|
if predicates.len() <= 1 {
|
||||||
// No possibility of duplicates.
|
// No possibility of duplicates.
|
||||||
} else if predicates.len() == 2 {
|
} else if predicates.len() == 2 {
|
||||||
|
|
|
@ -80,7 +80,7 @@ impl<T: AsRef<ty::Predicate<'tcx>>> Extend<T> for PredicateSet<'tcx> {
|
||||||
///////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
/// "Elaboration" is the process of identifying all the predicates that
|
/// "Elaboration" is the process of identifying all the predicates that
|
||||||
/// are implied by a source predicate. Currently this basically means
|
/// are implied by a source predicate. Currently, this basically means
|
||||||
/// walking the "supertraits" and other similar assumptions. For example,
|
/// walking the "supertraits" and other similar assumptions. For example,
|
||||||
/// if we know that `T: Ord`, the elaborator would deduce that `T: PartialOrd`
|
/// if we know that `T: Ord`, the elaborator would deduce that `T: PartialOrd`
|
||||||
/// holds as well. Similarly, if we have `trait Foo: 'static`, and we know that
|
/// holds as well. Similarly, if we have `trait Foo: 'static`, and we know that
|
||||||
|
|
|
@ -1,14 +1,15 @@
|
||||||
|
use crate::hir;
|
||||||
use crate::hir::def_id::DefId;
|
use crate::hir::def_id::DefId;
|
||||||
use crate::ty::{self, BoundRegion, Region, Ty, TyCtxt};
|
use crate::ty::{self, BoundRegion, Region, Ty, TyCtxt};
|
||||||
use std::borrow::Cow;
|
|
||||||
use std::fmt;
|
use errors::{Applicability, DiagnosticBuilder};
|
||||||
use rustc_target::spec::abi;
|
use rustc_target::spec::abi;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::errors::pluralize;
|
use syntax::errors::pluralize;
|
||||||
use errors::{Applicability, DiagnosticBuilder};
|
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
|
|
||||||
use crate::hir;
|
use std::borrow::Cow;
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, TypeFoldable)]
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, TypeFoldable)]
|
||||||
pub struct ExpectedFound<T> {
|
pub struct ExpectedFound<T> {
|
||||||
|
|
|
@ -36,10 +36,10 @@ pub enum InstanceDef<'tcx> {
|
||||||
ReifyShim(DefId),
|
ReifyShim(DefId),
|
||||||
|
|
||||||
/// `<fn() as FnTrait>::call_*`
|
/// `<fn() as FnTrait>::call_*`
|
||||||
/// `DefId` is `FnTrait::call_*`
|
/// `DefId` is `FnTrait::call_*`.
|
||||||
FnPtrShim(DefId, Ty<'tcx>),
|
FnPtrShim(DefId, Ty<'tcx>),
|
||||||
|
|
||||||
/// `<Trait as Trait>::fn`
|
/// `<dyn Trait as Trait>::fn`
|
||||||
Virtual(DefId, usize),
|
Virtual(DefId, usize),
|
||||||
|
|
||||||
/// `<[mut closure] as FnOnce>::call_once`
|
/// `<[mut closure] as FnOnce>::call_once`
|
||||||
|
@ -115,7 +115,7 @@ impl<'tcx> Instance<'tcx> {
|
||||||
pub fn fn_sig(&self, tcx: TyCtxt<'tcx>) -> ty::PolyFnSig<'tcx> {
|
pub fn fn_sig(&self, tcx: TyCtxt<'tcx>) -> ty::PolyFnSig<'tcx> {
|
||||||
let mut fn_sig = self.fn_sig_noadjust(tcx);
|
let mut fn_sig = self.fn_sig_noadjust(tcx);
|
||||||
if let InstanceDef::VtableShim(..) = self.def {
|
if let InstanceDef::VtableShim(..) = self.def {
|
||||||
// Modify fn(self, ...) to fn(self: *mut Self, ...)
|
// Modify `fn(self, ...)` to `fn(self: *mut Self, ...)`.
|
||||||
fn_sig = fn_sig.map_bound(|mut fn_sig| {
|
fn_sig = fn_sig.map_bound(|mut fn_sig| {
|
||||||
let mut inputs_and_output = fn_sig.inputs_and_output.to_vec();
|
let mut inputs_and_output = fn_sig.inputs_and_output.to_vec();
|
||||||
inputs_and_output[0] = tcx.mk_mut_ptr(inputs_and_output[0]);
|
inputs_and_output[0] = tcx.mk_mut_ptr(inputs_and_output[0]);
|
||||||
|
|
|
@ -2103,8 +2103,8 @@ where
|
||||||
ty::RawPtr(ty::TypeAndMut { ty: pointee, .. }) => {
|
ty::RawPtr(ty::TypeAndMut { ty: pointee, .. }) => {
|
||||||
assert!(i < this.fields.count());
|
assert!(i < this.fields.count());
|
||||||
|
|
||||||
// Reuse the fat *T type as its own thin pointer data field.
|
// Reuse the fat `*T` type as its own thin pointer data field.
|
||||||
// This provides information about e.g., DST struct pointees
|
// This provides information about, e.g., DST struct pointees
|
||||||
// (which may have no non-DST form), and will work as long
|
// (which may have no non-DST form), and will work as long
|
||||||
// as the `Abi` or `FieldPlacement` is checked by users.
|
// as the `Abi` or `FieldPlacement` is checked by users.
|
||||||
if i == 0 {
|
if i == 0 {
|
||||||
|
|
|
@ -1952,17 +1952,17 @@ pub struct FieldDef {
|
||||||
///
|
///
|
||||||
/// These are all interned (by `intern_adt_def`) into the `adt_defs` table.
|
/// These are all interned (by `intern_adt_def`) into the `adt_defs` table.
|
||||||
///
|
///
|
||||||
/// The initialism *"Adt"* stands for an [*algebraic data type (ADT)*][adt].
|
/// The initialism *ADT* stands for an [*algebraic data type (ADT)*][adt].
|
||||||
/// This is slightly wrong because `union`s are not ADTs.
|
/// This is slightly wrong because `union`s are not ADTs.
|
||||||
/// Moreover, Rust only allows recursive data types through indirection.
|
/// Moreover, Rust only allows recursive data types through indirection.
|
||||||
///
|
///
|
||||||
/// [adt]: https://en.wikipedia.org/wiki/Algebraic_data_type
|
/// [adt]: https://en.wikipedia.org/wiki/Algebraic_data_type
|
||||||
pub struct AdtDef {
|
pub struct AdtDef {
|
||||||
/// `DefId` of the struct, enum or union item.
|
/// The `DefId` of the struct, enum or union item.
|
||||||
pub did: DefId,
|
pub did: DefId,
|
||||||
/// Variants of the ADT. If this is a struct or union, then there will be a single variant.
|
/// Variants of the ADT. If this is a struct or union, then there will be a single variant.
|
||||||
pub variants: IndexVec<self::layout::VariantIdx, VariantDef>,
|
pub variants: IndexVec<self::layout::VariantIdx, VariantDef>,
|
||||||
/// Flags of the ADT (e.g. is this a struct? is this non-exhaustive?)
|
/// Flags of the ADT (e.g., is this a struct? is this non-exhaustive?).
|
||||||
flags: AdtFlags,
|
flags: AdtFlags,
|
||||||
/// Repr options provided by the user.
|
/// Repr options provided by the user.
|
||||||
pub repr: ReprOptions,
|
pub repr: ReprOptions,
|
||||||
|
@ -1983,7 +1983,7 @@ impl Ord for AdtDef {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq for AdtDef {
|
impl PartialEq for AdtDef {
|
||||||
// AdtDef are always interned and this is part of TyS equality
|
// `AdtDef`s are always interned, and this is part of `TyS` equality.
|
||||||
#[inline]
|
#[inline]
|
||||||
fn eq(&self, other: &Self) -> bool { ptr::eq(self, other) }
|
fn eq(&self, other: &Self) -> bool { ptr::eq(self, other) }
|
||||||
}
|
}
|
||||||
|
@ -2005,7 +2005,6 @@ impl<'tcx> rustc_serialize::UseSpecializedEncodable for &'tcx AdtDef {
|
||||||
|
|
||||||
impl<'tcx> rustc_serialize::UseSpecializedDecodable for &'tcx AdtDef {}
|
impl<'tcx> rustc_serialize::UseSpecializedDecodable for &'tcx AdtDef {}
|
||||||
|
|
||||||
|
|
||||||
impl<'a> HashStable<StableHashingContext<'a>> for AdtDef {
|
impl<'a> HashStable<StableHashingContext<'a>> for AdtDef {
|
||||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
||||||
thread_local! {
|
thread_local! {
|
||||||
|
|
|
@ -281,7 +281,7 @@ impl<'tcx> Relate<'tcx> for ty::TraitRef<'tcx> {
|
||||||
a: &ty::TraitRef<'tcx>,
|
a: &ty::TraitRef<'tcx>,
|
||||||
b: &ty::TraitRef<'tcx>,
|
b: &ty::TraitRef<'tcx>,
|
||||||
) -> RelateResult<'tcx, ty::TraitRef<'tcx>> {
|
) -> RelateResult<'tcx, ty::TraitRef<'tcx>> {
|
||||||
// Different traits cannot be related
|
// Different traits cannot be related.
|
||||||
if a.def_id != b.def_id {
|
if a.def_id != b.def_id {
|
||||||
Err(TypeError::Traits(expected_found(relation, &a.def_id, &b.def_id)))
|
Err(TypeError::Traits(expected_found(relation, &a.def_id, &b.def_id)))
|
||||||
} else {
|
} else {
|
||||||
|
@ -297,7 +297,7 @@ impl<'tcx> Relate<'tcx> for ty::ExistentialTraitRef<'tcx> {
|
||||||
a: &ty::ExistentialTraitRef<'tcx>,
|
a: &ty::ExistentialTraitRef<'tcx>,
|
||||||
b: &ty::ExistentialTraitRef<'tcx>,
|
b: &ty::ExistentialTraitRef<'tcx>,
|
||||||
) -> RelateResult<'tcx, ty::ExistentialTraitRef<'tcx>> {
|
) -> RelateResult<'tcx, ty::ExistentialTraitRef<'tcx>> {
|
||||||
// Different traits cannot be related
|
// Different traits cannot be related.
|
||||||
if a.def_id != b.def_id {
|
if a.def_id != b.def_id {
|
||||||
Err(TypeError::Traits(expected_found(relation, &a.def_id, &b.def_id)))
|
Err(TypeError::Traits(expected_found(relation, &a.def_id, &b.def_id)))
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -4,12 +4,13 @@
|
||||||
|
|
||||||
use crate::hir::def::Namespace;
|
use crate::hir::def::Namespace;
|
||||||
use crate::mir::ProjectionKind;
|
use crate::mir::ProjectionKind;
|
||||||
|
use crate::mir::interpret;
|
||||||
use crate::ty::{self, Lift, Ty, TyCtxt, InferConst};
|
use crate::ty::{self, Lift, Ty, TyCtxt, InferConst};
|
||||||
use crate::ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
|
use crate::ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
|
||||||
use crate::ty::print::{FmtPrinter, Printer};
|
use crate::ty::print::{FmtPrinter, Printer};
|
||||||
|
|
||||||
use rustc_index::vec::{IndexVec, Idx};
|
use rustc_index::vec::{IndexVec, Idx};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use crate::mir::interpret;
|
|
||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
|
@ -2,14 +2,14 @@
|
||||||
|
|
||||||
#![allow(rustc::usage_of_ty_tykind)]
|
#![allow(rustc::usage_of_ty_tykind)]
|
||||||
|
|
||||||
|
use self::InferTy::*;
|
||||||
|
use self::TyKind::*;
|
||||||
|
|
||||||
use crate::hir;
|
use crate::hir;
|
||||||
use crate::hir::def_id::DefId;
|
use crate::hir::def_id::DefId;
|
||||||
use crate::infer::canonical::Canonical;
|
use crate::infer::canonical::Canonical;
|
||||||
use crate::mir::interpret::ConstValue;
|
use crate::mir::interpret::ConstValue;
|
||||||
use crate::middle::region;
|
use crate::middle::region;
|
||||||
use polonius_engine::Atom;
|
|
||||||
use rustc_index::vec::Idx;
|
|
||||||
use rustc_macros::HashStable;
|
|
||||||
use crate::ty::subst::{InternalSubsts, Subst, SubstsRef, GenericArg, GenericArgKind};
|
use crate::ty::subst::{InternalSubsts, Subst, SubstsRef, GenericArg, GenericArgKind};
|
||||||
use crate::ty::{self, AdtDef, Discr, DefIdTree, TypeFlags, Ty, TyCtxt, TypeFoldable};
|
use crate::ty::{self, AdtDef, Discr, DefIdTree, TypeFlags, Ty, TyCtxt, TypeFoldable};
|
||||||
use crate::ty::{List, TyS, ParamEnvAnd, ParamEnv};
|
use crate::ty::{List, TyS, ParamEnvAnd, ParamEnv};
|
||||||
|
@ -17,27 +17,30 @@ use crate::ty::layout::VariantIdx;
|
||||||
use crate::util::captures::Captures;
|
use crate::util::captures::Captures;
|
||||||
use crate::mir::interpret::{Scalar, GlobalId};
|
use crate::mir::interpret::{Scalar, GlobalId};
|
||||||
|
|
||||||
|
use polonius_engine::Atom;
|
||||||
|
use rustc_index::vec::Idx;
|
||||||
|
use rustc_macros::HashStable;
|
||||||
|
use rustc_target::spec::abi;
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
use rustc_target::spec::abi;
|
|
||||||
use syntax::ast::{self, Ident};
|
use syntax::ast::{self, Ident};
|
||||||
use syntax::symbol::{kw, Symbol};
|
use syntax::symbol::{kw, Symbol};
|
||||||
|
|
||||||
use self::InferTy::*;
|
#[derive(
|
||||||
use self::TyKind::*;
|
Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable,
|
||||||
|
HashStable, TypeFoldable, Lift,
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
|
)]
|
||||||
#[derive(HashStable, TypeFoldable, Lift)]
|
|
||||||
pub struct TypeAndMut<'tcx> {
|
pub struct TypeAndMut<'tcx> {
|
||||||
pub ty: Ty<'tcx>,
|
pub ty: Ty<'tcx>,
|
||||||
pub mutbl: hir::Mutability,
|
pub mutbl: hir::Mutability,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash,
|
#[derive(
|
||||||
RustcEncodable, RustcDecodable, Copy, HashStable)]
|
Clone, PartialEq, PartialOrd, Eq, Ord, Hash, RustcEncodable, RustcDecodable, Copy, HashStable,
|
||||||
|
)]
|
||||||
/// A "free" region `fr` can be interpreted as "some region
|
/// A "free" region `fr` can be interpreted as "some region
|
||||||
/// at least as big as the scope `fr.scope`".
|
/// at least as big as the scope `fr.scope`".
|
||||||
pub struct FreeRegion {
|
pub struct FreeRegion {
|
||||||
|
@ -45,8 +48,9 @@ pub struct FreeRegion {
|
||||||
pub bound_region: BoundRegion,
|
pub bound_region: BoundRegion,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash,
|
#[derive(
|
||||||
RustcEncodable, RustcDecodable, Copy, HashStable)]
|
Clone, PartialEq, PartialOrd, Eq, Ord, Hash, RustcEncodable, RustcDecodable, Copy, HashStable,
|
||||||
|
)]
|
||||||
pub enum BoundRegion {
|
pub enum BoundRegion {
|
||||||
/// An anonymous region parameter for a given fn (&T)
|
/// An anonymous region parameter for a given fn (&T)
|
||||||
BrAnon(u32),
|
BrAnon(u32),
|
||||||
|
@ -471,18 +475,18 @@ impl<'tcx> GeneratorSubsts<'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> GeneratorSubsts<'tcx> {
|
impl<'tcx> GeneratorSubsts<'tcx> {
|
||||||
/// Generator have not been resumed yet
|
/// Generator has not been resumed yet.
|
||||||
pub const UNRESUMED: usize = 0;
|
pub const UNRESUMED: usize = 0;
|
||||||
/// Generator has returned / is completed
|
/// Generator has returned or is completed.
|
||||||
pub const RETURNED: usize = 1;
|
pub const RETURNED: usize = 1;
|
||||||
/// Generator has been poisoned
|
/// Generator has been poisoned.
|
||||||
pub const POISONED: usize = 2;
|
pub const POISONED: usize = 2;
|
||||||
|
|
||||||
const UNRESUMED_NAME: &'static str = "Unresumed";
|
const UNRESUMED_NAME: &'static str = "Unresumed";
|
||||||
const RETURNED_NAME: &'static str = "Returned";
|
const RETURNED_NAME: &'static str = "Returned";
|
||||||
const POISONED_NAME: &'static str = "Panicked";
|
const POISONED_NAME: &'static str = "Panicked";
|
||||||
|
|
||||||
/// The valid variant indices of this Generator.
|
/// The valid variant indices of this generator.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn variant_range(&self, def_id: DefId, tcx: TyCtxt<'tcx>) -> Range<VariantIdx> {
|
pub fn variant_range(&self, def_id: DefId, tcx: TyCtxt<'tcx>) -> Range<VariantIdx> {
|
||||||
// FIXME requires optimized MIR
|
// FIXME requires optimized MIR
|
||||||
|
@ -490,7 +494,7 @@ impl<'tcx> GeneratorSubsts<'tcx> {
|
||||||
(VariantIdx::new(0)..VariantIdx::new(num_variants))
|
(VariantIdx::new(0)..VariantIdx::new(num_variants))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The discriminant for the given variant. Panics if the variant_index is
|
/// The discriminant for the given variant. Panics if the `variant_index` is
|
||||||
/// out of range.
|
/// out of range.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn discriminant_for_variant(
|
pub fn discriminant_for_variant(
|
||||||
|
@ -505,7 +509,7 @@ impl<'tcx> GeneratorSubsts<'tcx> {
|
||||||
Discr { val: variant_index.as_usize() as u128, ty: self.discr_ty(tcx) }
|
Discr { val: variant_index.as_usize() as u128, ty: self.discr_ty(tcx) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The set of all discriminants for the Generator, enumerated with their
|
/// The set of all discriminants for the generator, enumerated with their
|
||||||
/// variant indices.
|
/// variant indices.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn discriminants(
|
pub fn discriminants(
|
||||||
|
@ -670,12 +674,12 @@ impl<'tcx> List<ExistentialPredicate<'tcx>> {
|
||||||
pub fn principal(&self) -> Option<ExistentialTraitRef<'tcx>> {
|
pub fn principal(&self) -> Option<ExistentialTraitRef<'tcx>> {
|
||||||
match self[0] {
|
match self[0] {
|
||||||
ExistentialPredicate::Trait(tr) => Some(tr),
|
ExistentialPredicate::Trait(tr) => Some(tr),
|
||||||
_ => None
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn principal_def_id(&self) -> Option<DefId> {
|
pub fn principal_def_id(&self) -> Option<DefId> {
|
||||||
self.principal().map(|d| d.def_id)
|
self.principal().map(|trait_ref| trait_ref.def_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -684,7 +688,7 @@ impl<'tcx> List<ExistentialPredicate<'tcx>> {
|
||||||
{
|
{
|
||||||
self.iter().filter_map(|predicate| {
|
self.iter().filter_map(|predicate| {
|
||||||
match *predicate {
|
match *predicate {
|
||||||
ExistentialPredicate::Projection(p) => Some(p),
|
ExistentialPredicate::Projection(projection) => Some(projection),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -694,8 +698,8 @@ impl<'tcx> List<ExistentialPredicate<'tcx>> {
|
||||||
pub fn auto_traits<'a>(&'a self) -> impl Iterator<Item = DefId> + 'a {
|
pub fn auto_traits<'a>(&'a self) -> impl Iterator<Item = DefId> + 'a {
|
||||||
self.iter().filter_map(|predicate| {
|
self.iter().filter_map(|predicate| {
|
||||||
match *predicate {
|
match *predicate {
|
||||||
ExistentialPredicate::AutoTrait(d) => Some(d),
|
ExistentialPredicate::AutoTrait(did) => Some(did),
|
||||||
_ => None
|
_ => None,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -722,7 +726,8 @@ impl<'tcx> Binder<&'tcx List<ExistentialPredicate<'tcx>>> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn iter<'a>(&'a self)
|
pub fn iter<'a>(&'a self)
|
||||||
-> impl DoubleEndedIterator<Item = Binder<ExistentialPredicate<'tcx>>> + 'tcx {
|
-> impl DoubleEndedIterator<Item = Binder<ExistentialPredicate<'tcx>>> + 'tcx
|
||||||
|
{
|
||||||
self.skip_binder().iter().cloned().map(Binder::bind)
|
self.skip_binder().iter().cloned().map(Binder::bind)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -751,7 +756,7 @@ pub struct TraitRef<'tcx> {
|
||||||
|
|
||||||
impl<'tcx> TraitRef<'tcx> {
|
impl<'tcx> TraitRef<'tcx> {
|
||||||
pub fn new(def_id: DefId, substs: SubstsRef<'tcx>) -> TraitRef<'tcx> {
|
pub fn new(def_id: DefId, substs: SubstsRef<'tcx>) -> TraitRef<'tcx> {
|
||||||
TraitRef { def_id: def_id, substs: substs }
|
TraitRef { def_id, substs }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a `TraitRef` of the form `P0: Foo<P1..Pn>` where `Pi`
|
/// Returns a `TraitRef` of the form `P0: Foo<P1..Pn>` where `Pi`
|
||||||
|
@ -822,7 +827,7 @@ pub struct ExistentialTraitRef<'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> ExistentialTraitRef<'tcx> {
|
impl<'tcx> ExistentialTraitRef<'tcx> {
|
||||||
pub fn input_types<'b>(&'b self) -> impl DoubleEndedIterator<Item=Ty<'tcx>> + 'b {
|
pub fn input_types<'b>(&'b self) -> impl DoubleEndedIterator<Item = Ty<'tcx>> + 'b {
|
||||||
// Select only the "input types" from a trait-reference. For
|
// Select only the "input types" from a trait-reference. For
|
||||||
// now this is all the types that appear in the
|
// now this is all the types that appear in the
|
||||||
// trait-reference, but it should eventually exclude
|
// trait-reference, but it should eventually exclude
|
||||||
|
@ -1296,7 +1301,7 @@ pub enum RegionKind {
|
||||||
/// A region variable. Should not exist after typeck.
|
/// A region variable. Should not exist after typeck.
|
||||||
ReVar(RegionVid),
|
ReVar(RegionVid),
|
||||||
|
|
||||||
/// A placeholder region - basically the higher-ranked version of ReFree.
|
/// A placeholder region -- basically, the higher-ranked version of `ReFree`.
|
||||||
/// Should not exist after typeck.
|
/// Should not exist after typeck.
|
||||||
RePlaceholder(ty::PlaceholderRegion),
|
RePlaceholder(ty::PlaceholderRegion),
|
||||||
|
|
||||||
|
@ -1807,14 +1812,14 @@ impl<'tcx> TyS<'tcx> {
|
||||||
match self.kind {
|
match self.kind {
|
||||||
Array(ty, _) | Slice(ty) => ty,
|
Array(ty, _) | Slice(ty) => ty,
|
||||||
Str => tcx.mk_mach_uint(ast::UintTy::U8),
|
Str => tcx.mk_mach_uint(ast::UintTy::U8),
|
||||||
_ => bug!("sequence_element_type called on non-sequence value: {}", self),
|
_ => bug!("`sequence_element_type` called on non-sequence value: {}", self),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn simd_type(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
|
pub fn simd_type(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
|
||||||
match self.kind {
|
match self.kind {
|
||||||
Adt(def, substs) => def.non_enum_variant().fields[0].ty(tcx, substs),
|
Adt(def, substs) => def.non_enum_variant().fields[0].ty(tcx, substs),
|
||||||
_ => bug!("simd_type called on invalid type")
|
_ => bug!("`simd_type` called on invalid type"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1823,7 +1828,7 @@ impl<'tcx> TyS<'tcx> {
|
||||||
// allow `#[repr(simd)] struct Simd<T, const N: usize>([T; N]);`.
|
// allow `#[repr(simd)] struct Simd<T, const N: usize>([T; N]);`.
|
||||||
match self.kind {
|
match self.kind {
|
||||||
Adt(def, _) => def.non_enum_variant().fields.len() as u64,
|
Adt(def, _) => def.non_enum_variant().fields.len() as u64,
|
||||||
_ => bug!("simd_size called on invalid type")
|
_ => bug!("`simd_size` called on invalid type"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1833,7 +1838,7 @@ impl<'tcx> TyS<'tcx> {
|
||||||
let variant = def.non_enum_variant();
|
let variant = def.non_enum_variant();
|
||||||
(variant.fields.len() as u64, variant.fields[0].ty(tcx, substs))
|
(variant.fields.len() as u64, variant.fields[0].ty(tcx, substs))
|
||||||
}
|
}
|
||||||
_ => bug!("simd_size_and_type called on invalid type")
|
_ => bug!("`simd_size_and_type` called on invalid type"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1894,7 +1899,7 @@ impl<'tcx> TyS<'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// panics if called on any type other than `Box<T>`
|
/// Panics if called on any type other than `Box<T>`.
|
||||||
pub fn boxed_ty(&self) -> Ty<'tcx> {
|
pub fn boxed_ty(&self) -> Ty<'tcx> {
|
||||||
match self.kind {
|
match self.kind {
|
||||||
Adt(def, substs) if def.is_box() => substs.type_at(0),
|
Adt(def, substs) if def.is_box() => substs.type_at(0),
|
||||||
|
@ -2114,7 +2119,8 @@ impl<'tcx> TyS<'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If the type contains variants, returns the valid range of variant indices.
|
/// If the type contains variants, returns the valid range of variant indices.
|
||||||
/// FIXME This requires the optimized MIR in the case of generators.
|
//
|
||||||
|
// FIXME: This requires the optimized MIR in the case of generators.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn variant_range(&self, tcx: TyCtxt<'tcx>) -> Option<Range<VariantIdx>> {
|
pub fn variant_range(&self, tcx: TyCtxt<'tcx>) -> Option<Range<VariantIdx>> {
|
||||||
match self.kind {
|
match self.kind {
|
||||||
|
@ -2127,7 +2133,8 @@ impl<'tcx> TyS<'tcx> {
|
||||||
|
|
||||||
/// If the type contains variants, returns the variant for `variant_index`.
|
/// If the type contains variants, returns the variant for `variant_index`.
|
||||||
/// Panics if `variant_index` is out of range.
|
/// Panics if `variant_index` is out of range.
|
||||||
/// FIXME This requires the optimized MIR in the case of generators.
|
//
|
||||||
|
// FIXME: This requires the optimized MIR in the case of generators.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn discriminant_for_variant(
|
pub fn discriminant_for_variant(
|
||||||
&self,
|
&self,
|
||||||
|
@ -2142,7 +2149,7 @@ impl<'tcx> TyS<'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Push onto `out` the regions directly referenced from this type (but not
|
/// Pushes onto `out` the regions directly referenced from this type (but not
|
||||||
/// types reachable from this type via `walk_tys`). This ignores late-bound
|
/// types reachable from this type via `walk_tys`). This ignores late-bound
|
||||||
/// regions binders.
|
/// regions binders.
|
||||||
pub fn push_regions(&self, out: &mut SmallVec<[ty::Region<'tcx>; 4]>) {
|
pub fn push_regions(&self, out: &mut SmallVec<[ty::Region<'tcx>; 4]>) {
|
||||||
|
@ -2255,7 +2262,7 @@ impl<'tcx> TyS<'tcx> {
|
||||||
ty::Infer(ty::FreshTy(_)) |
|
ty::Infer(ty::FreshTy(_)) |
|
||||||
ty::Infer(ty::FreshIntTy(_)) |
|
ty::Infer(ty::FreshIntTy(_)) |
|
||||||
ty::Infer(ty::FreshFloatTy(_)) =>
|
ty::Infer(ty::FreshFloatTy(_)) =>
|
||||||
bug!("is_trivially_sized applied to unexpected type: {:?}", self),
|
bug!("`is_trivially_sized` applied to unexpected type: {:?}", self),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -333,14 +333,14 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||||
ty
|
ty
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Same as applying struct_tail on `source` and `target`, but only
|
/// Same as applying `struct_tail` on `source` and `target`, but only
|
||||||
/// keeps going as long as the two types are instances of the same
|
/// keeps going as long as the two types are instances of the same
|
||||||
/// structure definitions.
|
/// structure definitions.
|
||||||
/// For `(Foo<Foo<T>>, Foo<dyn Trait>)`, the result will be `(Foo<T>, Trait)`,
|
/// For `(Foo<Foo<T>>, Foo<dyn Trait>)`, the result will be `(Foo<T>, Trait)`,
|
||||||
/// whereas struct_tail produces `T`, and `Trait`, respectively.
|
/// whereas struct_tail produces `T`, and `Trait`, respectively.
|
||||||
///
|
///
|
||||||
/// Should only be called if the types have no inference variables and do
|
/// Should only be called if the types have no inference variables and do
|
||||||
/// not need their lifetimes preserved (e.g. as part of codegen); otherwise
|
/// not need their lifetimes preserved (e.g., as part of codegen); otherwise,
|
||||||
/// normalization attempt may cause compiler bugs.
|
/// normalization attempt may cause compiler bugs.
|
||||||
pub fn struct_lockstep_tails_erasing_lifetimes(self,
|
pub fn struct_lockstep_tails_erasing_lifetimes(self,
|
||||||
source: Ty<'tcx>,
|
source: Ty<'tcx>,
|
||||||
|
@ -353,7 +353,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||||
source, target, |ty| tcx.normalize_erasing_regions(param_env, ty))
|
source, target, |ty| tcx.normalize_erasing_regions(param_env, ty))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Same as applying struct_tail on `source` and `target`, but only
|
/// Same as applying `struct_tail` on `source` and `target`, but only
|
||||||
/// keeps going as long as the two types are instances of the same
|
/// keeps going as long as the two types are instances of the same
|
||||||
/// structure definitions.
|
/// structure definitions.
|
||||||
/// For `(Foo<Foo<T>>, Foo<dyn Trait>)`, the result will be `(Foo<T>, Trait)`,
|
/// For `(Foo<Foo<T>>, Foo<dyn Trait>)`, the result will be `(Foo<T>, Trait)`,
|
||||||
|
|
|
@ -1,16 +1,15 @@
|
||||||
use crate::llvm::{self, AttributePlace};
|
|
||||||
use crate::builder::Builder;
|
use crate::builder::Builder;
|
||||||
use crate::context::CodegenCx;
|
use crate::context::CodegenCx;
|
||||||
|
use crate::llvm::{self, AttributePlace};
|
||||||
use crate::type_::Type;
|
use crate::type_::Type;
|
||||||
|
use crate::type_of::LayoutLlvmExt;
|
||||||
use crate::value::Value;
|
use crate::value::Value;
|
||||||
use crate::type_of::{LayoutLlvmExt};
|
|
||||||
use rustc_codegen_ssa::MemFlags;
|
use rustc_codegen_ssa::MemFlags;
|
||||||
use rustc_codegen_ssa::mir::place::PlaceRef;
|
use rustc_codegen_ssa::mir::place::PlaceRef;
|
||||||
use rustc_codegen_ssa::mir::operand::OperandValue;
|
use rustc_codegen_ssa::mir::operand::OperandValue;
|
||||||
use rustc_target::abi::call::ArgAbi;
|
|
||||||
|
|
||||||
use rustc_codegen_ssa::traits::*;
|
use rustc_codegen_ssa::traits::*;
|
||||||
|
use rustc_target::abi::call::ArgAbi;
|
||||||
use rustc_target::abi::{HasDataLayout, LayoutOf};
|
use rustc_target::abi::{HasDataLayout, LayoutOf};
|
||||||
use rustc::ty::{Ty};
|
use rustc::ty::{Ty};
|
||||||
use rustc::ty::layout::{self};
|
use rustc::ty::layout::{self};
|
||||||
|
@ -202,7 +201,7 @@ impl ArgAbiExt<'ll, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
|
||||||
if self.is_sized_indirect() {
|
if self.is_sized_indirect() {
|
||||||
OperandValue::Ref(val, None, self.layout.align.abi).store(bx, dst)
|
OperandValue::Ref(val, None, self.layout.align.abi).store(bx, dst)
|
||||||
} else if self.is_unsized_indirect() {
|
} else if self.is_unsized_indirect() {
|
||||||
bug!("unsized ArgAbi must be handled through store_fn_arg");
|
bug!("unsized `ArgAbi` must be handled through `store_fn_arg`");
|
||||||
} else if let PassMode::Cast(cast) = self.mode {
|
} else if let PassMode::Cast(cast) = self.mode {
|
||||||
// FIXME(eddyb): Figure out when the simpler Store is safe, clang
|
// FIXME(eddyb): Figure out when the simpler Store is safe, clang
|
||||||
// uses it for i16 -> {i8, i8}, but not for i24 -> {i8, i8, i8}.
|
// uses it for i16 -> {i8, i8}, but not for i24 -> {i8, i8, i8}.
|
||||||
|
@ -232,10 +231,10 @@ impl ArgAbiExt<'ll, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
|
||||||
let llscratch = bx.alloca(cast.llvm_type(bx), scratch_align);
|
let llscratch = bx.alloca(cast.llvm_type(bx), scratch_align);
|
||||||
bx.lifetime_start(llscratch, scratch_size);
|
bx.lifetime_start(llscratch, scratch_size);
|
||||||
|
|
||||||
// ...where we first store the value...
|
// ... where we first store the value...
|
||||||
bx.store(val, llscratch, scratch_align);
|
bx.store(val, llscratch, scratch_align);
|
||||||
|
|
||||||
// ...and then memcpy it to the intended destination.
|
// ... and then memcpy it to the intended destination.
|
||||||
bx.memcpy(
|
bx.memcpy(
|
||||||
dst.llval,
|
dst.llval,
|
||||||
self.layout.align.abi,
|
self.layout.align.abi,
|
||||||
|
|
|
@ -140,7 +140,7 @@
|
||||||
//! In order for link-time optimization to work properly, LLVM needs a unique
|
//! In order for link-time optimization to work properly, LLVM needs a unique
|
||||||
//! type identifier that tells it across compilation units which types are the
|
//! type identifier that tells it across compilation units which types are the
|
||||||
//! same as others. This type identifier is created by
|
//! same as others. This type identifier is created by
|
||||||
//! TypeMap::get_unique_type_id_of_type() using the following algorithm:
|
//! `TypeMap::get_unique_type_id_of_type()` using the following algorithm:
|
||||||
//!
|
//!
|
||||||
//! (1) Primitive types have their name as ID
|
//! (1) Primitive types have their name as ID
|
||||||
//! (2) Structs, enums and traits have a multipart identifier
|
//! (2) Structs, enums and traits have a multipart identifier
|
||||||
|
|
|
@ -7,16 +7,16 @@ use super::utils::{debug_context, DIB, span_start,
|
||||||
use super::namespace::mangled_name_of_instance;
|
use super::namespace::mangled_name_of_instance;
|
||||||
use super::type_names::compute_debuginfo_type_name;
|
use super::type_names::compute_debuginfo_type_name;
|
||||||
use super::CrateDebugContext;
|
use super::CrateDebugContext;
|
||||||
use crate::abi;
|
|
||||||
use crate::value::Value;
|
|
||||||
use rustc_codegen_ssa::traits::*;
|
|
||||||
|
|
||||||
|
use crate::abi;
|
||||||
|
use crate::common::CodegenCx;
|
||||||
use crate::llvm;
|
use crate::llvm;
|
||||||
use crate::llvm::debuginfo::{DIArray, DIType, DIFile, DIScope, DIDescriptor,
|
use crate::llvm::debuginfo::{DIArray, DIType, DIFile, DIScope, DIDescriptor,
|
||||||
DICompositeType, DILexicalBlock, DIFlags, DebugEmissionKind};
|
DICompositeType, DILexicalBlock, DIFlags, DebugEmissionKind};
|
||||||
use crate::llvm_util;
|
use crate::llvm_util;
|
||||||
|
use crate::value::Value;
|
||||||
|
|
||||||
use crate::common::CodegenCx;
|
use rustc_codegen_ssa::traits::*;
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
use rustc::hir::CodegenFnAttrFlags;
|
use rustc::hir::CodegenFnAttrFlags;
|
||||||
use rustc::hir::def::CtorKind;
|
use rustc::hir::def::CtorKind;
|
||||||
|
@ -36,6 +36,9 @@ use rustc::util::nodemap::FxHashMap;
|
||||||
use rustc_fs_util::path_to_c_string;
|
use rustc_fs_util::path_to_c_string;
|
||||||
use rustc_data_structures::small_c_str::SmallCStr;
|
use rustc_data_structures::small_c_str::SmallCStr;
|
||||||
use rustc_target::abi::HasDataLayout;
|
use rustc_target::abi::HasDataLayout;
|
||||||
|
use syntax::ast;
|
||||||
|
use syntax::symbol::{Interner, Symbol};
|
||||||
|
use syntax_pos::{self, Span, FileName};
|
||||||
|
|
||||||
use libc::{c_uint, c_longlong};
|
use libc::{c_uint, c_longlong};
|
||||||
use std::collections::hash_map::Entry;
|
use std::collections::hash_map::Entry;
|
||||||
|
@ -45,9 +48,6 @@ use std::hash::{Hash, Hasher};
|
||||||
use std::iter;
|
use std::iter;
|
||||||
use std::ptr;
|
use std::ptr;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use syntax::ast;
|
|
||||||
use syntax::symbol::{Interner, Symbol};
|
|
||||||
use syntax_pos::{self, Span, FileName};
|
|
||||||
|
|
||||||
impl PartialEq for llvm::Metadata {
|
impl PartialEq for llvm::Metadata {
|
||||||
fn eq(&self, other: &Self) -> bool {
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
@ -70,7 +70,7 @@ impl fmt::Debug for llvm::Metadata {
|
||||||
}
|
}
|
||||||
|
|
||||||
// From DWARF 5.
|
// From DWARF 5.
|
||||||
// See http://www.dwarfstd.org/ShowIssue.php?issue=140129.1
|
// See http://www.dwarfstd.org/ShowIssue.php?issue=140129.1.
|
||||||
const DW_LANG_RUST: c_uint = 0x1c;
|
const DW_LANG_RUST: c_uint = 0x1c;
|
||||||
#[allow(non_upper_case_globals)]
|
#[allow(non_upper_case_globals)]
|
||||||
const DW_ATE_boolean: c_uint = 0x02;
|
const DW_ATE_boolean: c_uint = 0x02;
|
||||||
|
@ -91,70 +91,70 @@ pub const NO_SCOPE_METADATA: Option<&DIScope> = None;
|
||||||
#[derive(Copy, Debug, Hash, Eq, PartialEq, Clone)]
|
#[derive(Copy, Debug, Hash, Eq, PartialEq, Clone)]
|
||||||
pub struct UniqueTypeId(ast::Name);
|
pub struct UniqueTypeId(ast::Name);
|
||||||
|
|
||||||
// The TypeMap is where the CrateDebugContext holds the type metadata nodes
|
// The `TypeMap` is where the `CrateDebugContext` holds the type metadata nodes
|
||||||
// created so far. The metadata nodes are indexed by UniqueTypeId, and, for
|
// created so far. The metadata nodes are indexed by `UniqueTypeId`, and, for
|
||||||
// faster lookup, also by Ty. The TypeMap is responsible for creating
|
// faster lookup, also by `Ty`. The `TypeMap` is responsible for creating
|
||||||
// UniqueTypeIds.
|
// `UniqueTypeId`s.
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct TypeMap<'ll, 'tcx> {
|
pub struct TypeMap<'ll, 'tcx> {
|
||||||
// The UniqueTypeIds created so far
|
// The `UniqueTypeId`s created so far.
|
||||||
unique_id_interner: Interner,
|
unique_id_interner: Interner,
|
||||||
// A map from UniqueTypeId to debuginfo metadata for that type. This is a 1:1 mapping.
|
// A map from `UniqueTypeId` to debuginfo metadata for that type. This is a 1:1 mapping.
|
||||||
unique_id_to_metadata: FxHashMap<UniqueTypeId, &'ll DIType>,
|
unique_id_to_metadata: FxHashMap<UniqueTypeId, &'ll DIType>,
|
||||||
// A map from types to debuginfo metadata. This is a N:1 mapping.
|
// A map from types to debuginfo metadata. This is an N:1 mapping.
|
||||||
type_to_metadata: FxHashMap<Ty<'tcx>, &'ll DIType>,
|
type_to_metadata: FxHashMap<Ty<'tcx>, &'ll DIType>,
|
||||||
// A map from types to UniqueTypeId. This is a N:1 mapping.
|
// A map from types to `UniqueTypeId`. This is an N:1 mapping.
|
||||||
type_to_unique_id: FxHashMap<Ty<'tcx>, UniqueTypeId>
|
type_to_unique_id: FxHashMap<Ty<'tcx>, UniqueTypeId>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TypeMap<'ll, 'tcx> {
|
impl TypeMap<'ll, 'tcx> {
|
||||||
// Adds a Ty to metadata mapping to the TypeMap. The method will fail if
|
/// Adds a Ty to metadata mapping to the TypeMap. The method will fail if
|
||||||
// the mapping already exists.
|
/// the mapping already exists.
|
||||||
fn register_type_with_metadata(
|
fn register_type_with_metadata(
|
||||||
&mut self,
|
&mut self,
|
||||||
type_: Ty<'tcx>,
|
type_: Ty<'tcx>,
|
||||||
metadata: &'ll DIType,
|
metadata: &'ll DIType,
|
||||||
) {
|
) {
|
||||||
if self.type_to_metadata.insert(type_, metadata).is_some() {
|
if self.type_to_metadata.insert(type_, metadata).is_some() {
|
||||||
bug!("Type metadata for Ty '{}' is already in the TypeMap!", type_);
|
bug!("type metadata for `Ty` '{}' is already in the `TypeMap`!", type_);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Removes a Ty to metadata mapping
|
/// Removes a `Ty`-to-metadata mapping.
|
||||||
// This is useful when computing the metadata for a potentially
|
/// This is useful when computing the metadata for a potentially
|
||||||
// recursive type (e.g. a function ptr of the form:
|
/// recursive type (e.g., a function pointer of the form:
|
||||||
//
|
///
|
||||||
// fn foo() -> impl Copy { foo }
|
/// fn foo() -> impl Copy { foo }
|
||||||
//
|
///
|
||||||
// This kind of type cannot be properly represented
|
/// This kind of type cannot be properly represented
|
||||||
// via LLVM debuginfo. As a workaround,
|
/// via LLVM debuginfo. As a workaround,
|
||||||
// we register a temporary Ty to metadata mapping
|
/// we register a temporary Ty to metadata mapping
|
||||||
// for the function before we compute its actual metadata.
|
/// for the function before we compute its actual metadata.
|
||||||
// If the metadata computation ends up recursing back to the
|
/// If the metadata computation ends up recursing back to the
|
||||||
// original function, it will use the temporary mapping
|
/// original function, it will use the temporary mapping
|
||||||
// for the inner self-reference, preventing us from
|
/// for the inner self-reference, preventing us from
|
||||||
// recursing forever.
|
/// recursing forever.
|
||||||
//
|
///
|
||||||
// This function is used to remove the temporary metadata
|
/// This function is used to remove the temporary metadata
|
||||||
// mapping after we've computed the actual metadata
|
/// mapping after we've computed the actual metadata.
|
||||||
fn remove_type(
|
fn remove_type(
|
||||||
&mut self,
|
&mut self,
|
||||||
type_: Ty<'tcx>,
|
type_: Ty<'tcx>,
|
||||||
) {
|
) {
|
||||||
if self.type_to_metadata.remove(type_).is_none() {
|
if self.type_to_metadata.remove(type_).is_none() {
|
||||||
bug!("Type metadata Ty '{}' is not in the TypeMap!", type_);
|
bug!("type metadata `Ty` '{}' is not in the `TypeMap`!", type_);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Adds a UniqueTypeId to metadata mapping to the TypeMap. The method will
|
/// Adds a `UniqueTypeId` to metadata mapping to the `TypeMap`. The method will
|
||||||
// fail if the mapping already exists.
|
/// fail if the mapping already exists.
|
||||||
fn register_unique_id_with_metadata(
|
fn register_unique_id_with_metadata(
|
||||||
&mut self,
|
&mut self,
|
||||||
unique_type_id: UniqueTypeId,
|
unique_type_id: UniqueTypeId,
|
||||||
metadata: &'ll DIType,
|
metadata: &'ll DIType,
|
||||||
) {
|
) {
|
||||||
if self.unique_id_to_metadata.insert(unique_type_id, metadata).is_some() {
|
if self.unique_id_to_metadata.insert(unique_type_id, metadata).is_some() {
|
||||||
bug!("Type metadata for unique id '{}' is already in the TypeMap!",
|
bug!("type metadata for unique ID '{}' is already in the `TypeMap`!",
|
||||||
self.get_unique_type_id_as_string(unique_type_id));
|
self.get_unique_type_id_as_string(unique_type_id));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -167,23 +167,23 @@ impl TypeMap<'ll, 'tcx> {
|
||||||
self.unique_id_to_metadata.get(&unique_type_id).cloned()
|
self.unique_id_to_metadata.get(&unique_type_id).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the string representation of a UniqueTypeId. This method will fail if
|
/// Gets the string representation of a `UniqueTypeId`. This method will fail if
|
||||||
// the id is unknown.
|
/// the ID is unknown.
|
||||||
fn get_unique_type_id_as_string(&self, unique_type_id: UniqueTypeId) -> &str {
|
fn get_unique_type_id_as_string(&self, unique_type_id: UniqueTypeId) -> &str {
|
||||||
let UniqueTypeId(interner_key) = unique_type_id;
|
let UniqueTypeId(interner_key) = unique_type_id;
|
||||||
self.unique_id_interner.get(interner_key)
|
self.unique_id_interner.get(interner_key)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the UniqueTypeId for the given type. If the UniqueTypeId for the given
|
/// Gets the `UniqueTypeId` for the given type. If the `UniqueTypeId` for the given
|
||||||
// type has been requested before, this is just a table lookup. Otherwise an
|
/// type has been requested before, this is just a table lookup. Otherwise, an
|
||||||
// ID will be generated and stored for later lookup.
|
/// ID will be generated and stored for later lookup.
|
||||||
fn get_unique_type_id_of_type<'a>(&mut self, cx: &CodegenCx<'a, 'tcx>,
|
fn get_unique_type_id_of_type<'a>(&mut self, cx: &CodegenCx<'a, 'tcx>,
|
||||||
type_: Ty<'tcx>) -> UniqueTypeId {
|
type_: Ty<'tcx>) -> UniqueTypeId {
|
||||||
// Let's see if we already have something in the cache
|
// Let's see if we already have something in the cache.
|
||||||
if let Some(unique_type_id) = self.type_to_unique_id.get(&type_).cloned() {
|
if let Some(unique_type_id) = self.type_to_unique_id.get(&type_).cloned() {
|
||||||
return unique_type_id;
|
return unique_type_id;
|
||||||
}
|
}
|
||||||
// if not, generate one
|
// If not, generate one.
|
||||||
|
|
||||||
// The hasher we are using to generate the UniqueTypeId. We want
|
// The hasher we are using to generate the UniqueTypeId. We want
|
||||||
// something that provides more than the 64 bits of the DefaultHasher.
|
// something that provides more than the 64 bits of the DefaultHasher.
|
||||||
|
@ -203,7 +203,7 @@ impl TypeMap<'ll, 'tcx> {
|
||||||
return UniqueTypeId(key);
|
return UniqueTypeId(key);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the UniqueTypeId for an enum variant. Enum variants are not really
|
// Get the `UniqueTypeId` for an enum variant. Enum variants are not really
|
||||||
// types of their own, so they need special handling. We still need a
|
// types of their own, so they need special handling. We still need a
|
||||||
// UniqueTypeId for them, since to debuginfo they *are* real types.
|
// UniqueTypeId for them, since to debuginfo they *are* real types.
|
||||||
fn get_unique_type_id_of_enum_variant<'a>(&mut self,
|
fn get_unique_type_id_of_enum_variant<'a>(&mut self,
|
||||||
|
@ -219,9 +219,9 @@ impl TypeMap<'ll, 'tcx> {
|
||||||
UniqueTypeId(interner_key)
|
UniqueTypeId(interner_key)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the unique type id string for an enum variant part.
|
// Get the unique type ID string for an enum variant part.
|
||||||
// Variant parts are not types and shouldn't really have their own id,
|
// Variant parts are not types and shouldn't really have their own ID,
|
||||||
// but it makes set_members_of_composite_type() simpler.
|
// but it makes `set_members_of_composite_type()` simpler.
|
||||||
fn get_unique_type_id_str_of_enum_variant_part(&mut self, enum_type_id: UniqueTypeId) -> &str {
|
fn get_unique_type_id_str_of_enum_variant_part(&mut self, enum_type_id: UniqueTypeId) -> &str {
|
||||||
let variant_part_type_id = format!("{}_variant_part",
|
let variant_part_type_id = format!("{}_variant_part",
|
||||||
self.get_unique_type_id_as_string(enum_type_id));
|
self.get_unique_type_id_as_string(enum_type_id));
|
||||||
|
@ -230,11 +230,11 @@ impl TypeMap<'ll, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// A description of some recursive type. It can either be already finished (as
|
/// A description of some recursive type. It can either be already finished (as
|
||||||
// with FinalMetadata) or it is not yet finished, but contains all information
|
/// with `FinalMetadata`) or it is not yet finished, but contains all information
|
||||||
// needed to generate the missing parts of the description. See the
|
/// needed to generate the missing parts of the description. See the
|
||||||
// documentation section on Recursive Types at the top of this file for more
|
/// documentation section on Recursive Types at the top of this file for more
|
||||||
// information.
|
/// information.
|
||||||
enum RecursiveTypeDescription<'ll, 'tcx> {
|
enum RecursiveTypeDescription<'ll, 'tcx> {
|
||||||
UnfinishedMetadata {
|
UnfinishedMetadata {
|
||||||
unfinished_type: Ty<'tcx>,
|
unfinished_type: Ty<'tcx>,
|
||||||
|
@ -255,7 +255,7 @@ fn create_and_register_recursive_type_forward_declaration(
|
||||||
member_description_factory: MemberDescriptionFactory<'ll, 'tcx>,
|
member_description_factory: MemberDescriptionFactory<'ll, 'tcx>,
|
||||||
) -> RecursiveTypeDescription<'ll, 'tcx> {
|
) -> RecursiveTypeDescription<'ll, 'tcx> {
|
||||||
|
|
||||||
// Insert the stub into the TypeMap in order to allow for recursive references
|
// Insert the stub into the `TypeMap` in order to allow for recursive references.
|
||||||
let mut type_map = debug_context(cx).type_map.borrow_mut();
|
let mut type_map = debug_context(cx).type_map.borrow_mut();
|
||||||
type_map.register_unique_id_with_metadata(unique_type_id, metadata_stub);
|
type_map.register_unique_id_with_metadata(unique_type_id, metadata_stub);
|
||||||
type_map.register_type_with_metadata(unfinished_type, metadata_stub);
|
type_map.register_type_with_metadata(unfinished_type, metadata_stub);
|
||||||
|
@ -270,9 +270,9 @@ fn create_and_register_recursive_type_forward_declaration(
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RecursiveTypeDescription<'ll, 'tcx> {
|
impl RecursiveTypeDescription<'ll, 'tcx> {
|
||||||
// Finishes up the description of the type in question (mostly by providing
|
/// Finishes up the description of the type in question (mostly by providing
|
||||||
// descriptions of the fields of the given type) and returns the final type
|
/// descriptions of the fields of the given type) and returns the final type
|
||||||
// metadata.
|
/// metadata.
|
||||||
fn finalize(&self, cx: &CodegenCx<'ll, 'tcx>) -> MetadataCreationResult<'ll> {
|
fn finalize(&self, cx: &CodegenCx<'ll, 'tcx>) -> MetadataCreationResult<'ll> {
|
||||||
match *self {
|
match *self {
|
||||||
FinalMetadata(metadata) => MetadataCreationResult::new(metadata, false),
|
FinalMetadata(metadata) => MetadataCreationResult::new(metadata, false),
|
||||||
|
@ -287,7 +287,7 @@ impl RecursiveTypeDescription<'ll, 'tcx> {
|
||||||
// the TypeMap so that recursive references are possible. This
|
// the TypeMap so that recursive references are possible. This
|
||||||
// will always be the case if the RecursiveTypeDescription has
|
// will always be the case if the RecursiveTypeDescription has
|
||||||
// been properly created through the
|
// been properly created through the
|
||||||
// create_and_register_recursive_type_forward_declaration()
|
// `create_and_register_recursive_type_forward_declaration()`
|
||||||
// function.
|
// function.
|
||||||
{
|
{
|
||||||
let type_map = debug_context(cx).type_map.borrow();
|
let type_map = debug_context(cx).type_map.borrow();
|
||||||
|
@ -314,8 +314,8 @@ impl RecursiveTypeDescription<'ll, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns from the enclosing function if the type metadata with the given
|
/// Returns from the enclosing function if the type metadata with the given
|
||||||
// unique id can be found in the type map
|
/// unique ID can be found in the type map.
|
||||||
macro_rules! return_if_metadata_created_in_meantime {
|
macro_rules! return_if_metadata_created_in_meantime {
|
||||||
($cx: expr, $unique_type_id: expr) => (
|
($cx: expr, $unique_type_id: expr) => (
|
||||||
if let Some(metadata) = debug_context($cx).type_map
|
if let Some(metadata) = debug_context($cx).type_map
|
||||||
|
@ -527,19 +527,19 @@ pub fn type_metadata(
|
||||||
t: Ty<'tcx>,
|
t: Ty<'tcx>,
|
||||||
usage_site_span: Span,
|
usage_site_span: Span,
|
||||||
) -> &'ll DIType {
|
) -> &'ll DIType {
|
||||||
// Get the unique type id of this type.
|
// Get the unique type ID of this type.
|
||||||
let unique_type_id = {
|
let unique_type_id = {
|
||||||
let mut type_map = debug_context(cx).type_map.borrow_mut();
|
let mut type_map = debug_context(cx).type_map.borrow_mut();
|
||||||
// First, try to find the type in TypeMap. If we have seen it before, we
|
// First, try to find the type in `TypeMap`. If we have seen it before, we
|
||||||
// can exit early here.
|
// can exit early here.
|
||||||
match type_map.find_metadata_for_type(t) {
|
match type_map.find_metadata_for_type(t) {
|
||||||
Some(metadata) => {
|
Some(metadata) => {
|
||||||
return metadata;
|
return metadata;
|
||||||
},
|
},
|
||||||
None => {
|
None => {
|
||||||
// The Ty is not in the TypeMap but maybe we have already seen
|
// The Ty is not in the `TypeMap` but maybe we have already seen
|
||||||
// an equivalent type (e.g., only differing in region arguments).
|
// an equivalent type (e.g., only differing in region arguments).
|
||||||
// In order to find out, generate the unique type id and look
|
// In order to find out, generate the unique type ID and look
|
||||||
// that up.
|
// that up.
|
||||||
let unique_type_id = type_map.get_unique_type_id_of_type(cx, t);
|
let unique_type_id = type_map.get_unique_type_id_of_type(cx, t);
|
||||||
match type_map.find_metadata_for_unique_id(unique_type_id) {
|
match type_map.find_metadata_for_unique_id(unique_type_id) {
|
||||||
|
@ -647,15 +647,15 @@ pub fn type_metadata(
|
||||||
//
|
//
|
||||||
// fn foo() -> impl Copy { foo }
|
// fn foo() -> impl Copy { foo }
|
||||||
//
|
//
|
||||||
// See TypeMap::remove_type for more detals
|
// See `TypeMap::remove_type` for more detals
|
||||||
// about the workaround
|
// about the workaround.
|
||||||
|
|
||||||
let temp_type = {
|
let temp_type = {
|
||||||
unsafe {
|
unsafe {
|
||||||
// The choice of type here is pretty arbitrary -
|
// The choice of type here is pretty arbitrary -
|
||||||
// anything reading the debuginfo for a recursive
|
// anything reading the debuginfo for a recursive
|
||||||
// type is going to see *somthing* weird - the only
|
// type is going to see *somthing* weird - the only
|
||||||
// question is what exactly it will see
|
// question is what exactly it will see.
|
||||||
let (size, align) = cx.size_and_align_of(t);
|
let (size, align) = cx.size_and_align_of(t);
|
||||||
llvm::LLVMRustDIBuilderCreateBasicType(
|
llvm::LLVMRustDIBuilderCreateBasicType(
|
||||||
DIB(cx),
|
DIB(cx),
|
||||||
|
@ -677,7 +677,7 @@ pub fn type_metadata(
|
||||||
type_map.borrow_mut().remove_type(t);
|
type_map.borrow_mut().remove_type(t);
|
||||||
|
|
||||||
|
|
||||||
// This is actually a function pointer, so wrap it in pointer DI
|
// This is actually a function pointer, so wrap it in pointer DI.
|
||||||
MetadataCreationResult::new(pointer_type_metadata(cx, t, fn_metadata), false)
|
MetadataCreationResult::new(pointer_type_metadata(cx, t, fn_metadata), false)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -743,14 +743,14 @@ pub fn type_metadata(
|
||||||
let mut type_map = debug_context(cx).type_map.borrow_mut();
|
let mut type_map = debug_context(cx).type_map.borrow_mut();
|
||||||
|
|
||||||
if already_stored_in_typemap {
|
if already_stored_in_typemap {
|
||||||
// Also make sure that we already have a TypeMap entry for the unique type id.
|
// Also make sure that we already have a `TypeMap` entry for the unique type ID.
|
||||||
let metadata_for_uid = match type_map.find_metadata_for_unique_id(unique_type_id) {
|
let metadata_for_uid = match type_map.find_metadata_for_unique_id(unique_type_id) {
|
||||||
Some(metadata) => metadata,
|
Some(metadata) => metadata,
|
||||||
None => {
|
None => {
|
||||||
span_bug!(usage_site_span,
|
span_bug!(usage_site_span,
|
||||||
"Expected type metadata for unique \
|
"expected type metadata for unique \
|
||||||
type id '{}' to already be in \
|
type ID '{}' to already be in \
|
||||||
the debuginfo::TypeMap but it \
|
the `debuginfo::TypeMap` but it \
|
||||||
was not. (Ty = {})",
|
was not. (Ty = {})",
|
||||||
type_map.get_unique_type_id_as_string(unique_type_id),
|
type_map.get_unique_type_id_as_string(unique_type_id),
|
||||||
t);
|
t);
|
||||||
|
@ -761,9 +761,9 @@ pub fn type_metadata(
|
||||||
Some(metadata) => {
|
Some(metadata) => {
|
||||||
if metadata != metadata_for_uid {
|
if metadata != metadata_for_uid {
|
||||||
span_bug!(usage_site_span,
|
span_bug!(usage_site_span,
|
||||||
"Mismatch between Ty and \
|
"mismatch between `Ty` and \
|
||||||
UniqueTypeId maps in \
|
`UniqueTypeId` maps in \
|
||||||
debuginfo::TypeMap. \
|
`debuginfo::TypeMap`. \
|
||||||
UniqueTypeId={}, Ty={}",
|
UniqueTypeId={}, Ty={}",
|
||||||
type_map.get_unique_type_id_as_string(unique_type_id),
|
type_map.get_unique_type_id_as_string(unique_type_id),
|
||||||
t);
|
t);
|
||||||
|
@ -851,7 +851,7 @@ fn basic_type_metadata(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) -> &'ll DIType {
|
||||||
ty::Float(float_ty) => {
|
ty::Float(float_ty) => {
|
||||||
(float_ty.name_str(), DW_ATE_float)
|
(float_ty.name_str(), DW_ATE_float)
|
||||||
},
|
},
|
||||||
_ => bug!("debuginfo::basic_type_metadata - t is invalid type")
|
_ => bug!("debuginfo::basic_type_metadata - `t` is invalid type")
|
||||||
};
|
};
|
||||||
|
|
||||||
let (size, align) = cx.size_and_align_of(t);
|
let (size, align) = cx.size_and_align_of(t);
|
||||||
|
@ -908,7 +908,7 @@ pub fn compile_unit_metadata(
|
||||||
};
|
};
|
||||||
|
|
||||||
// The OSX linker has an idiosyncrasy where it will ignore some debuginfo
|
// The OSX linker has an idiosyncrasy where it will ignore some debuginfo
|
||||||
// if multiple object files with the same DW_AT_name are linked together.
|
// if multiple object files with the same `DW_AT_name` are linked together.
|
||||||
// As a workaround we generate unique names for each object file. Those do
|
// As a workaround we generate unique names for each object file. Those do
|
||||||
// not correspond to an actual source file but that should be harmless.
|
// not correspond to an actual source file but that should be harmless.
|
||||||
if tcx.sess.target.target.options.is_like_osx {
|
if tcx.sess.target.target.options.is_like_osx {
|
||||||
|
@ -935,11 +935,9 @@ pub fn compile_unit_metadata(
|
||||||
//
|
//
|
||||||
// This should actually be
|
// This should actually be
|
||||||
//
|
//
|
||||||
// ```
|
// let kind = DebugEmissionKind::from_generic(tcx.sess.opts.debuginfo);
|
||||||
// let kind = DebugEmissionKind::from_generic(tcx.sess.opts.debuginfo);
|
|
||||||
// ```
|
|
||||||
//
|
//
|
||||||
// that is, we should set LLVM's emission kind to `LineTablesOnly` if
|
// That is, we should set LLVM's emission kind to `LineTablesOnly` if
|
||||||
// we are compiling with "limited" debuginfo. However, some of the
|
// we are compiling with "limited" debuginfo. However, some of the
|
||||||
// existing tools relied on slightly more debuginfo being generated than
|
// existing tools relied on slightly more debuginfo being generated than
|
||||||
// would be the case with `LineTablesOnly`, and we did not want to break
|
// would be the case with `LineTablesOnly`, and we did not want to break
|
||||||
|
@ -1030,7 +1028,7 @@ impl MetadataCreationResult<'ll> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Description of a type member, which can either be a regular field (as in
|
// Description of a type member, which can either be a regular field (as in
|
||||||
// structs or tuples) or an enum variant.
|
/// structs or tuples) or an enum variant.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct MemberDescription<'ll> {
|
struct MemberDescription<'ll> {
|
||||||
name: String,
|
name: String,
|
||||||
|
@ -1067,10 +1065,10 @@ impl<'ll> MemberDescription<'ll> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// A factory for MemberDescriptions. It produces a list of member descriptions
|
/// A factory for `MemberDescription`s. It produces a list of member descriptions
|
||||||
// for some record-like type. MemberDescriptionFactories are used to defer the
|
/// for some record-like type. `MemberDescriptionFactory`s are used to defer the
|
||||||
// creation of type member descriptions in order to break cycles arising from
|
/// creation of type member descriptions in order to break cycles arising from
|
||||||
// recursive type definitions.
|
/// recursive type definitions.
|
||||||
enum MemberDescriptionFactory<'ll, 'tcx> {
|
enum MemberDescriptionFactory<'ll, 'tcx> {
|
||||||
StructMDF(StructMemberDescriptionFactory<'tcx>),
|
StructMDF(StructMemberDescriptionFactory<'tcx>),
|
||||||
TupleMDF(TupleMemberDescriptionFactory<'tcx>),
|
TupleMDF(TupleMemberDescriptionFactory<'tcx>),
|
||||||
|
@ -1106,7 +1104,7 @@ impl MemberDescriptionFactory<'ll, 'tcx> {
|
||||||
// Structs
|
// Structs
|
||||||
//=-----------------------------------------------------------------------------
|
//=-----------------------------------------------------------------------------
|
||||||
|
|
||||||
// Creates MemberDescriptions for the fields of a struct
|
/// Creates `MemberDescription`s for the fields of a struct.
|
||||||
struct StructMemberDescriptionFactory<'tcx> {
|
struct StructMemberDescriptionFactory<'tcx> {
|
||||||
ty: Ty<'tcx>,
|
ty: Ty<'tcx>,
|
||||||
variant: &'tcx ty::VariantDef,
|
variant: &'tcx ty::VariantDef,
|
||||||
|
@ -1177,7 +1175,7 @@ fn prepare_struct_metadata(
|
||||||
// Tuples
|
// Tuples
|
||||||
//=-----------------------------------------------------------------------------
|
//=-----------------------------------------------------------------------------
|
||||||
|
|
||||||
// Creates MemberDescriptions for the fields of a tuple
|
/// Creates `MemberDescription`s for the fields of a tuple.
|
||||||
struct TupleMemberDescriptionFactory<'tcx> {
|
struct TupleMemberDescriptionFactory<'tcx> {
|
||||||
ty: Ty<'tcx>,
|
ty: Ty<'tcx>,
|
||||||
component_types: Vec<Ty<'tcx>>,
|
component_types: Vec<Ty<'tcx>>,
|
||||||
|
@ -1300,14 +1298,14 @@ fn prepare_union_metadata(
|
||||||
// Enums
|
// Enums
|
||||||
//=-----------------------------------------------------------------------------
|
//=-----------------------------------------------------------------------------
|
||||||
|
|
||||||
// DWARF variant support is only available starting in LLVM 8.
|
/// DWARF variant support is only available starting in LLVM 8.
|
||||||
// Although the earlier enum debug info output did not work properly
|
/// Although the earlier enum debug info output did not work properly
|
||||||
// in all situations, it is better for the time being to continue to
|
/// in all situations, it is better for the time being to continue to
|
||||||
// sometimes emit the old style rather than emit something completely
|
/// sometimes emit the old style rather than emit something completely
|
||||||
// useless when rust is compiled against LLVM 6 or older. LLVM 7
|
/// useless when rust is compiled against LLVM 6 or older. LLVM 7
|
||||||
// contains an early version of the DWARF variant support, and will
|
/// contains an early version of the DWARF variant support, and will
|
||||||
// crash when handling the new debug info format. This function
|
/// crash when handling the new debug info format. This function
|
||||||
// decides which representation will be emitted.
|
/// decides which representation will be emitted.
|
||||||
fn use_enum_fallback(cx: &CodegenCx<'_, '_>) -> bool {
|
fn use_enum_fallback(cx: &CodegenCx<'_, '_>) -> bool {
|
||||||
// On MSVC we have to use the fallback mode, because LLVM doesn't
|
// On MSVC we have to use the fallback mode, because LLVM doesn't
|
||||||
// lower variant parts to PDB.
|
// lower variant parts to PDB.
|
||||||
|
@ -1318,11 +1316,11 @@ fn use_enum_fallback(cx: &CodegenCx<'_, '_>) -> bool {
|
||||||
|| llvm_util::get_major_version() < 8;
|
|| llvm_util::get_major_version() < 8;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Describes the members of an enum value: An enum is described as a union of
|
/// Describes the members of an enum value; an enum is described as a union of
|
||||||
// structs in DWARF. This MemberDescriptionFactory provides the description for
|
/// structs in DWARF. This `MemberDescriptionFactory` provides the description for
|
||||||
// the members of this union; so for every variant of the given enum, this
|
/// the members of this union; so for every variant of the given enum, this
|
||||||
// factory will produce one MemberDescription (all with no name and a fixed
|
/// factory will produce one `MemberDescription` (all with no name and a fixed
|
||||||
// offset of zero bytes).
|
/// offset of zero bytes).
|
||||||
struct EnumMemberDescriptionFactory<'ll, 'tcx> {
|
struct EnumMemberDescriptionFactory<'ll, 'tcx> {
|
||||||
enum_type: Ty<'tcx>,
|
enum_type: Ty<'tcx>,
|
||||||
layout: TyLayout<'tcx>,
|
layout: TyLayout<'tcx>,
|
||||||
|
@ -1456,7 +1454,7 @@ impl EnumMemberDescriptionFactory<'ll, 'tcx> {
|
||||||
} => {
|
} => {
|
||||||
if fallback {
|
if fallback {
|
||||||
let variant = self.layout.for_variant(cx, dataful_variant);
|
let variant = self.layout.for_variant(cx, dataful_variant);
|
||||||
// Create a description of the non-null variant
|
// Create a description of the non-null variant.
|
||||||
let (variant_type_metadata, member_description_factory) =
|
let (variant_type_metadata, member_description_factory) =
|
||||||
describe_enum_variant(cx,
|
describe_enum_variant(cx,
|
||||||
variant,
|
variant,
|
||||||
|
@ -1566,9 +1564,9 @@ impl EnumMemberDescriptionFactory<'ll, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Creates MemberDescriptions for the fields of a single enum variant.
|
// Creates `MemberDescription`s for the fields of a single enum variant.
|
||||||
struct VariantMemberDescriptionFactory<'ll, 'tcx> {
|
struct VariantMemberDescriptionFactory<'ll, 'tcx> {
|
||||||
// Cloned from the layout::Struct describing the variant.
|
/// Cloned from the `layout::Struct` describing the variant.
|
||||||
offsets: Vec<layout::Size>,
|
offsets: Vec<layout::Size>,
|
||||||
args: Vec<(String, Ty<'tcx>)>,
|
args: Vec<(String, Ty<'tcx>)>,
|
||||||
discriminant_type_metadata: Option<&'ll DIType>,
|
discriminant_type_metadata: Option<&'ll DIType>,
|
||||||
|
@ -1652,10 +1650,10 @@ impl<'tcx> VariantInfo<'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns a tuple of (1) type_metadata_stub of the variant, (2) a
|
/// Returns a tuple of (1) `type_metadata_stub` of the variant, (2) a
|
||||||
// MemberDescriptionFactory for producing the descriptions of the
|
/// `MemberDescriptionFactory` for producing the descriptions of the
|
||||||
// fields of the variant. This is a rudimentary version of a full
|
/// fields of the variant. This is a rudimentary version of a full
|
||||||
// RecursiveTypeDescription.
|
/// `RecursiveTypeDescription`.
|
||||||
fn describe_enum_variant(
|
fn describe_enum_variant(
|
||||||
cx: &CodegenCx<'ll, 'tcx>,
|
cx: &CodegenCx<'ll, 'tcx>,
|
||||||
layout: layout::TyLayout<'tcx>,
|
layout: layout::TyLayout<'tcx>,
|
||||||
|
@ -2088,8 +2086,7 @@ fn set_members_of_composite_type(cx: &CodegenCx<'ll, 'tcx>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Compute the type parameters for a type, if any, for the given
|
/// Computes the type parameters for a type, if any, for the given metadata.
|
||||||
// metadata.
|
|
||||||
fn compute_type_parameters(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>) -> Option<&'ll DIArray> {
|
fn compute_type_parameters(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>) -> Option<&'ll DIArray> {
|
||||||
if let ty::Adt(def, substs) = ty.kind {
|
if let ty::Adt(def, substs) = ty.kind {
|
||||||
if !substs.types().next().is_none() {
|
if !substs.types().next().is_none() {
|
||||||
|
@ -2134,9 +2131,9 @@ fn compute_type_parameters(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>) -> Option<&'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// A convenience wrapper around LLVMRustDIBuilderCreateStructType(). Does not do
|
/// A convenience wrapper around `LLVMRustDIBuilderCreateStructType()`. Does not do
|
||||||
// any caching, does not add any fields to the struct. This can be done later
|
/// any caching, does not add any fields to the struct. This can be done later
|
||||||
// with set_members_of_composite_type().
|
/// with `set_members_of_composite_type()`.
|
||||||
fn create_struct_stub(
|
fn create_struct_stub(
|
||||||
cx: &CodegenCx<'ll, 'tcx>,
|
cx: &CodegenCx<'ll, 'tcx>,
|
||||||
struct_type: Ty<'tcx>,
|
struct_type: Ty<'tcx>,
|
||||||
|
@ -2151,9 +2148,9 @@ fn create_struct_stub(
|
||||||
debug_context(cx).type_map.borrow().get_unique_type_id_as_string(unique_type_id)
|
debug_context(cx).type_map.borrow().get_unique_type_id_as_string(unique_type_id)
|
||||||
);
|
);
|
||||||
let metadata_stub = unsafe {
|
let metadata_stub = unsafe {
|
||||||
// LLVMRustDIBuilderCreateStructType() wants an empty array. A null
|
// `LLVMRustDIBuilderCreateStructType()` wants an empty array. A null
|
||||||
// pointer will lead to hard to trace and debug LLVM assertions
|
// pointer will lead to hard to trace and debug LLVM assertions
|
||||||
// later on in llvm/lib/IR/Value.cpp.
|
// later on in `llvm/lib/IR/Value.cpp`.
|
||||||
let empty_array = create_DIArray(DIB(cx), &[]);
|
let empty_array = create_DIArray(DIB(cx), &[]);
|
||||||
|
|
||||||
llvm::LLVMRustDIBuilderCreateStructType(
|
llvm::LLVMRustDIBuilderCreateStructType(
|
||||||
|
@ -2189,9 +2186,9 @@ fn create_union_stub(
|
||||||
debug_context(cx).type_map.borrow().get_unique_type_id_as_string(unique_type_id)
|
debug_context(cx).type_map.borrow().get_unique_type_id_as_string(unique_type_id)
|
||||||
);
|
);
|
||||||
let metadata_stub = unsafe {
|
let metadata_stub = unsafe {
|
||||||
// LLVMRustDIBuilderCreateUnionType() wants an empty array. A null
|
// `LLVMRustDIBuilderCreateUnionType()` wants an empty array. A null
|
||||||
// pointer will lead to hard to trace and debug LLVM assertions
|
// pointer will lead to hard to trace and debug LLVM assertions
|
||||||
// later on in llvm/lib/IR/Value.cpp.
|
// later on in `llvm/lib/IR/Value.cpp`.
|
||||||
let empty_array = create_DIArray(DIB(cx), &[]);
|
let empty_array = create_DIArray(DIB(cx), &[]);
|
||||||
|
|
||||||
llvm::LLVMRustDIBuilderCreateUnionType(
|
llvm::LLVMRustDIBuilderCreateUnionType(
|
||||||
|
@ -2231,8 +2228,8 @@ pub fn create_global_var_metadata(
|
||||||
}
|
}
|
||||||
|
|
||||||
let no_mangle = attrs.flags.contains(CodegenFnAttrFlags::NO_MANGLE);
|
let no_mangle = attrs.flags.contains(CodegenFnAttrFlags::NO_MANGLE);
|
||||||
// We may want to remove the namespace scope if we're in an extern block, see:
|
// We may want to remove the namespace scope if we're in an extern block (see
|
||||||
// https://github.com/rust-lang/rust/pull/46457#issuecomment-351750952
|
// https://github.com/rust-lang/rust/pull/46457#issuecomment-351750952).
|
||||||
let var_scope = get_namespace_for_item(cx, def_id);
|
let var_scope = get_namespace_for_item(cx, def_id);
|
||||||
let span = tcx.def_span(def_id);
|
let span = tcx.def_span(def_id);
|
||||||
|
|
||||||
|
@ -2287,14 +2284,14 @@ pub fn create_vtable_metadata(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>, vtable: &
|
||||||
let type_metadata = type_metadata(cx, ty, syntax_pos::DUMMY_SP);
|
let type_metadata = type_metadata(cx, ty, syntax_pos::DUMMY_SP);
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
// LLVMRustDIBuilderCreateStructType() wants an empty array. A null
|
// `LLVMRustDIBuilderCreateStructType()` wants an empty array. A null
|
||||||
// pointer will lead to hard to trace and debug LLVM assertions
|
// pointer will lead to hard to trace and debug LLVM assertions
|
||||||
// later on in llvm/lib/IR/Value.cpp.
|
// later on in `llvm/lib/IR/Value.cpp`.
|
||||||
let empty_array = create_DIArray(DIB(cx), &[]);
|
let empty_array = create_DIArray(DIB(cx), &[]);
|
||||||
|
|
||||||
let name = const_cstr!("vtable");
|
let name = const_cstr!("vtable");
|
||||||
|
|
||||||
// Create a new one each time. We don't want metadata caching
|
// Create a new one each time. We don't want metadata caching
|
||||||
// here, because each vtable will refer to a unique containing
|
// here, because each vtable will refer to a unique containing
|
||||||
// type.
|
// type.
|
||||||
let vtable_type = llvm::LLVMRustDIBuilderCreateStructType(
|
let vtable_type = llvm::LLVMRustDIBuilderCreateStructType(
|
||||||
|
@ -2327,7 +2324,7 @@ pub fn create_vtable_metadata(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>, vtable: &
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Creates an "extension" of an existing DIScope into another file.
|
/// Creates an "extension" of an existing `DIScope` into another file.
|
||||||
pub fn extend_scope_to_file(
|
pub fn extend_scope_to_file(
|
||||||
cx: &CodegenCx<'ll, '_>,
|
cx: &CodegenCx<'ll, '_>,
|
||||||
scope_metadata: &'ll DIScope,
|
scope_metadata: &'ll DIScope,
|
||||||
|
|
|
@ -152,7 +152,7 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
|
||||||
match scalar.value {
|
match scalar.value {
|
||||||
Primitive::Int(..) => {
|
Primitive::Int(..) => {
|
||||||
if self.cx().size_of(ret_ty).bytes() < 4 {
|
if self.cx().size_of(ret_ty).bytes() < 4 {
|
||||||
// va_arg should not be called on a integer type
|
// `va_arg` should not be called on a integer type
|
||||||
// less than 4 bytes in length. If it is, promote
|
// less than 4 bytes in length. If it is, promote
|
||||||
// the integer to a `i32` and truncate the result
|
// the integer to a `i32` and truncate the result
|
||||||
// back to the smaller type.
|
// back to the smaller type.
|
||||||
|
|
|
@ -1,21 +1,32 @@
|
||||||
//! Codegen the completed AST to the LLVM IR.
|
//! Codegen the completed AST to the LLVM IR.
|
||||||
//!
|
//!
|
||||||
//! Some functions here, such as codegen_block and codegen_expr, return a value --
|
//! Some functions here, such as `codegen_block` and `codegen_expr`, return a value --
|
||||||
//! the result of the codegen to LLVM -- while others, such as codegen_fn
|
//! the result of the codegen to LLVM -- while others, such as `codegen_fn`
|
||||||
//! and mono_item, are called only for the side effect of adding a
|
//! and `mono_item`, are called only for the side effect of adding a
|
||||||
//! particular definition to the LLVM IR output we're producing.
|
//! particular definition to the LLVM IR output we're producing.
|
||||||
//!
|
//!
|
||||||
//! Hopefully useful general knowledge about codegen:
|
//! Hopefully useful general knowledge about codegen:
|
||||||
//!
|
//!
|
||||||
//! * There's no way to find out the `Ty` type of a Value. Doing so
|
//! * There's no way to find out the `Ty` type of a `Value`. Doing so
|
||||||
//! would be "trying to get the eggs out of an omelette" (credit:
|
//! would be "trying to get the eggs out of an omelette" (credit:
|
||||||
//! pcwalton). You can, instead, find out its `llvm::Type` by calling `val_ty`,
|
//! pcwalton). You can, instead, find out its `llvm::Type` by calling `val_ty`,
|
||||||
//! but one `llvm::Type` corresponds to many `Ty`s; for instance, `tup(int, int,
|
//! but one `llvm::Type` corresponds to many `Ty`s; for instance, `tup(int, int,
|
||||||
//! int)` and `rec(x=int, y=int, z=int)` will have the same `llvm::Type`.
|
//! int)` and `rec(x=int, y=int, z=int)` will have the same `llvm::Type`.
|
||||||
|
|
||||||
use crate::{ModuleCodegen, ModuleKind, CachedModuleCodegen};
|
use crate::{CachedModuleCodegen, CrateInfo, MemFlags, ModuleCodegen, ModuleKind};
|
||||||
|
use crate::back::write::{
|
||||||
|
OngoingCodegen, start_async_codegen, submit_pre_lto_module_to_llvm,
|
||||||
|
submit_post_lto_module_to_llvm,
|
||||||
|
};
|
||||||
|
use crate::common::{RealPredicate, TypeKind, IntPredicate};
|
||||||
|
use crate::meth;
|
||||||
|
use crate::mir;
|
||||||
|
use crate::mir::operand::OperandValue;
|
||||||
|
use crate::mir::place::PlaceRef;
|
||||||
|
use crate::traits::*;
|
||||||
|
|
||||||
use rustc::dep_graph::cgu_reuse_tracker::CguReuse;
|
use rustc::dep_graph::cgu_reuse_tracker::CguReuse;
|
||||||
|
use rustc::hir;
|
||||||
use rustc::hir::def_id::{DefId, LOCAL_CRATE};
|
use rustc::hir::def_id::{DefId, LOCAL_CRATE};
|
||||||
use rustc::middle::cstore::EncodedMetadata;
|
use rustc::middle::cstore::EncodedMetadata;
|
||||||
use rustc::middle::lang_items::StartFnLangItem;
|
use rustc::middle::lang_items::StartFnLangItem;
|
||||||
|
@ -23,6 +34,7 @@ use rustc::middle::weak_lang_items;
|
||||||
use rustc::mir::mono::{CodegenUnitNameBuilder, CodegenUnit, MonoItem};
|
use rustc::mir::mono::{CodegenUnitNameBuilder, CodegenUnit, MonoItem};
|
||||||
use rustc::ty::{self, Ty, TyCtxt, Instance};
|
use rustc::ty::{self, Ty, TyCtxt, Instance};
|
||||||
use rustc::ty::layout::{self, Align, TyLayout, LayoutOf, VariantIdx, HasTyCtxt};
|
use rustc::ty::layout::{self, Align, TyLayout, LayoutOf, VariantIdx, HasTyCtxt};
|
||||||
|
use rustc::ty::layout::{FAT_PTR_ADDR, FAT_PTR_EXTRA};
|
||||||
use rustc::ty::query::Providers;
|
use rustc::ty::query::Providers;
|
||||||
use rustc::middle::cstore::{self, LinkagePreference};
|
use rustc::middle::cstore::{self, LinkagePreference};
|
||||||
use rustc::util::common::{time, print_time_passes_entry, set_time_depth, time_depth};
|
use rustc::util::common::{time, print_time_passes_entry, set_time_depth, time_depth};
|
||||||
|
@ -31,25 +43,12 @@ use rustc::session::Session;
|
||||||
use rustc::util::nodemap::FxHashMap;
|
use rustc::util::nodemap::FxHashMap;
|
||||||
use rustc_index::vec::Idx;
|
use rustc_index::vec::Idx;
|
||||||
use rustc_codegen_utils::{symbol_names_test, check_for_rustc_errors_attr};
|
use rustc_codegen_utils::{symbol_names_test, check_for_rustc_errors_attr};
|
||||||
use rustc::ty::layout::{FAT_PTR_ADDR, FAT_PTR_EXTRA};
|
use syntax::attr;
|
||||||
use crate::mir::place::PlaceRef;
|
use syntax_pos::Span;
|
||||||
use crate::back::write::{OngoingCodegen, start_async_codegen, submit_pre_lto_module_to_llvm,
|
|
||||||
submit_post_lto_module_to_llvm};
|
|
||||||
use crate::{MemFlags, CrateInfo};
|
|
||||||
use crate::common::{RealPredicate, TypeKind, IntPredicate};
|
|
||||||
use crate::meth;
|
|
||||||
use crate::mir;
|
|
||||||
|
|
||||||
use crate::traits::*;
|
|
||||||
|
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
use std::ops::{Deref, DerefMut};
|
use std::ops::{Deref, DerefMut};
|
||||||
use std::time::{Instant, Duration};
|
use std::time::{Instant, Duration};
|
||||||
use syntax_pos::Span;
|
|
||||||
use syntax::attr;
|
|
||||||
use rustc::hir;
|
|
||||||
|
|
||||||
use crate::mir::operand::OperandValue;
|
|
||||||
|
|
||||||
pub fn bin_op_to_icmp_predicate(op: hir::BinOpKind,
|
pub fn bin_op_to_icmp_predicate(op: hir::BinOpKind,
|
||||||
signed: bool)
|
signed: bool)
|
||||||
|
@ -116,9 +115,8 @@ pub fn compare_simd_types<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
||||||
/// Retrieves the information we are losing (making dynamic) in an unsizing
|
/// Retrieves the information we are losing (making dynamic) in an unsizing
|
||||||
/// adjustment.
|
/// adjustment.
|
||||||
///
|
///
|
||||||
/// The `old_info` argument is a bit funny. It is intended for use
|
/// The `old_info` argument is a bit odd. It is intended for use in an upcast,
|
||||||
/// in an upcast, where the new vtable for an object will be derived
|
/// where the new vtable for an object will be derived from the old one.
|
||||||
/// from the old one.
|
|
||||||
pub fn unsized_info<'tcx, Cx: CodegenMethods<'tcx>>(
|
pub fn unsized_info<'tcx, Cx: CodegenMethods<'tcx>>(
|
||||||
cx: &Cx,
|
cx: &Cx,
|
||||||
source: Ty<'tcx>,
|
source: Ty<'tcx>,
|
||||||
|
@ -140,16 +138,19 @@ pub fn unsized_info<'tcx, Cx: CodegenMethods<'tcx>>(
|
||||||
(_, &ty::Dynamic(ref data, ..)) => {
|
(_, &ty::Dynamic(ref data, ..)) => {
|
||||||
let vtable_ptr = cx.layout_of(cx.tcx().mk_mut_ptr(target))
|
let vtable_ptr = cx.layout_of(cx.tcx().mk_mut_ptr(target))
|
||||||
.field(cx, FAT_PTR_EXTRA);
|
.field(cx, FAT_PTR_EXTRA);
|
||||||
cx.const_ptrcast(meth::get_vtable(cx, source, data.principal()),
|
cx.const_ptrcast(
|
||||||
cx.backend_type(vtable_ptr))
|
meth::get_vtable(cx, source, data.principal()),
|
||||||
|
cx.backend_type(vtable_ptr),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
_ => bug!("unsized_info: invalid unsizing {:?} -> {:?}",
|
_ => bug!(
|
||||||
source,
|
"unsized_info: invalid unsizing {:?} -> {:?}",
|
||||||
target),
|
source, target
|
||||||
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Coerce `src` to `dst_ty`. `src_ty` must be a thin pointer.
|
/// Coerces `src` to `dst_ty`. `src_ty` must be a thin pointer.
|
||||||
pub fn unsize_thin_ptr<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
pub fn unsize_thin_ptr<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
||||||
bx: &mut Bx,
|
bx: &mut Bx,
|
||||||
src: Bx::Value,
|
src: Bx::Value,
|
||||||
|
@ -199,8 +200,8 @@ pub fn unsize_thin_ptr<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Coerce `src`, which is a reference to a value of type `src_ty`,
|
/// Coerces `src`, which is a reference to a value of type `src_ty`,
|
||||||
/// to a value of type `dst_ty` and store the result in `dst`
|
/// to a value of type `dst_ty`, and stores the result in `dst`.
|
||||||
pub fn coerce_unsized_into<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
pub fn coerce_unsized_into<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
||||||
bx: &mut Bx,
|
bx: &mut Bx,
|
||||||
src: PlaceRef<'tcx, Bx::Value>,
|
src: PlaceRef<'tcx, Bx::Value>,
|
||||||
|
@ -244,15 +245,17 @@ pub fn coerce_unsized_into<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
||||||
|
|
||||||
if src_f.layout.ty == dst_f.layout.ty {
|
if src_f.layout.ty == dst_f.layout.ty {
|
||||||
memcpy_ty(bx, dst_f.llval, dst_f.align, src_f.llval, src_f.align,
|
memcpy_ty(bx, dst_f.llval, dst_f.align, src_f.llval, src_f.align,
|
||||||
src_f.layout, MemFlags::empty());
|
src_f.layout, MemFlags::empty());
|
||||||
} else {
|
} else {
|
||||||
coerce_unsized_into(bx, src_f, dst_f);
|
coerce_unsized_into(bx, src_f, dst_f);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => bug!("coerce_unsized_into: invalid coercion {:?} -> {:?}",
|
_ => bug!(
|
||||||
src_ty,
|
"coerce_unsized_into: invalid coercion {:?} -> {:?}",
|
||||||
dst_ty),
|
src_ty,
|
||||||
|
dst_ty,
|
||||||
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
use rustc_target::abi::call::FnAbi;
|
|
||||||
|
|
||||||
use crate::traits::*;
|
use crate::traits::*;
|
||||||
|
|
||||||
use rustc::ty::{self, Ty, Instance};
|
use rustc::ty::{self, Ty, Instance};
|
||||||
|
use rustc_target::abi::call::FnAbi;
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug)]
|
#[derive(Copy, Clone, Debug)]
|
||||||
pub struct VirtualIndex(u64);
|
pub struct VirtualIndex(u64);
|
||||||
|
@ -20,7 +19,7 @@ impl<'a, 'tcx> VirtualIndex {
|
||||||
self,
|
self,
|
||||||
bx: &mut Bx,
|
bx: &mut Bx,
|
||||||
llvtable: Bx::Value,
|
llvtable: Bx::Value,
|
||||||
fn_abi: &FnAbi<'tcx, Ty<'tcx>>
|
fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
|
||||||
) -> Bx::Value {
|
) -> Bx::Value {
|
||||||
// Load the data pointer from the object.
|
// Load the data pointer from the object.
|
||||||
debug!("get_fn({:?}, {:?})", llvtable, self);
|
debug!("get_fn({:?}, {:?})", llvtable, self);
|
||||||
|
@ -33,7 +32,7 @@ impl<'a, 'tcx> VirtualIndex {
|
||||||
let gep = bx.inbounds_gep(llvtable, &[bx.const_usize(self.0)]);
|
let gep = bx.inbounds_gep(llvtable, &[bx.const_usize(self.0)]);
|
||||||
let ptr = bx.load(gep, ptr_align);
|
let ptr = bx.load(gep, ptr_align);
|
||||||
bx.nonnull_metadata(ptr);
|
bx.nonnull_metadata(ptr);
|
||||||
// Vtable loads are invariant
|
// Vtable loads are invariant.
|
||||||
bx.set_invariant_load(ptr);
|
bx.set_invariant_load(ptr);
|
||||||
ptr
|
ptr
|
||||||
}
|
}
|
||||||
|
@ -41,7 +40,7 @@ impl<'a, 'tcx> VirtualIndex {
|
||||||
pub fn get_usize<Bx: BuilderMethods<'a, 'tcx>>(
|
pub fn get_usize<Bx: BuilderMethods<'a, 'tcx>>(
|
||||||
self,
|
self,
|
||||||
bx: &mut Bx,
|
bx: &mut Bx,
|
||||||
llvtable: Bx::Value
|
llvtable: Bx::Value,
|
||||||
) -> Bx::Value {
|
) -> Bx::Value {
|
||||||
// Load the data pointer from the object.
|
// Load the data pointer from the object.
|
||||||
debug!("get_int({:?}, {:?})", llvtable, self);
|
debug!("get_int({:?}, {:?})", llvtable, self);
|
||||||
|
@ -50,7 +49,7 @@ impl<'a, 'tcx> VirtualIndex {
|
||||||
let usize_align = bx.tcx().data_layout.pointer_align.abi;
|
let usize_align = bx.tcx().data_layout.pointer_align.abi;
|
||||||
let gep = bx.inbounds_gep(llvtable, &[bx.const_usize(self.0)]);
|
let gep = bx.inbounds_gep(llvtable, &[bx.const_usize(self.0)]);
|
||||||
let ptr = bx.load(gep, usize_align);
|
let ptr = bx.load(gep, usize_align);
|
||||||
// Vtable loads are invariant
|
// Vtable loads are invariant.
|
||||||
bx.set_invariant_load(ptr);
|
bx.set_invariant_load(ptr);
|
||||||
ptr
|
ptr
|
||||||
}
|
}
|
||||||
|
@ -63,7 +62,7 @@ impl<'a, 'tcx> VirtualIndex {
|
||||||
///
|
///
|
||||||
/// The `trait_ref` encodes the erased self type. Hence if we are
|
/// The `trait_ref` encodes the erased self type. Hence if we are
|
||||||
/// making an object `Foo<dyn Trait>` from a value of type `Foo<T>`, then
|
/// making an object `Foo<dyn Trait>` from a value of type `Foo<T>`, then
|
||||||
/// `trait_ref` would map `T:Trait`.
|
/// `trait_ref` would map `T: Trait`.
|
||||||
pub fn get_vtable<'tcx, Cx: CodegenMethods<'tcx>>(
|
pub fn get_vtable<'tcx, Cx: CodegenMethods<'tcx>>(
|
||||||
cx: &Cx,
|
cx: &Cx,
|
||||||
ty: Ty<'tcx>,
|
ty: Ty<'tcx>,
|
||||||
|
@ -78,7 +77,7 @@ pub fn get_vtable<'tcx, Cx: CodegenMethods<'tcx>>(
|
||||||
return val;
|
return val;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Not in the cache. Build it.
|
// Not in the cache; build it.
|
||||||
let nullptr = cx.const_null(cx.type_i8p());
|
let nullptr = cx.const_null(cx.type_i8p());
|
||||||
|
|
||||||
let methods_root;
|
let methods_root;
|
||||||
|
@ -105,7 +104,7 @@ pub fn get_vtable<'tcx, Cx: CodegenMethods<'tcx>>(
|
||||||
let layout = cx.layout_of(ty);
|
let layout = cx.layout_of(ty);
|
||||||
// /////////////////////////////////////////////////////////////////////////////////////////////
|
// /////////////////////////////////////////////////////////////////////////////////////////////
|
||||||
// If you touch this code, be sure to also make the corresponding changes to
|
// If you touch this code, be sure to also make the corresponding changes to
|
||||||
// `get_vtable` in rust_mir/interpret/traits.rs
|
// `get_vtable` in `rust_mir/interpret/traits.rs`.
|
||||||
// /////////////////////////////////////////////////////////////////////////////////////////////
|
// /////////////////////////////////////////////////////////////////////////////////////////////
|
||||||
let components: Vec<_> = [
|
let components: Vec<_> = [
|
||||||
cx.get_fn_addr(Instance::resolve_drop_in_place(cx.tcx(), ty)),
|
cx.get_fn_addr(Instance::resolve_drop_in_place(cx.tcx(), ty)),
|
||||||
|
|
|
@ -1,19 +1,18 @@
|
||||||
|
use super::{FunctionCx, LocalRef};
|
||||||
|
use super::place::PlaceRef;
|
||||||
|
|
||||||
|
use crate::MemFlags;
|
||||||
|
use crate::base;
|
||||||
|
use crate::glue;
|
||||||
|
use crate::traits::*;
|
||||||
|
|
||||||
use rustc::mir::interpret::{ConstValue, ErrorHandled, Pointer, Scalar};
|
use rustc::mir::interpret::{ConstValue, ErrorHandled, Pointer, Scalar};
|
||||||
use rustc::mir;
|
use rustc::mir;
|
||||||
use rustc::ty;
|
use rustc::ty;
|
||||||
use rustc::ty::layout::{self, Align, LayoutOf, TyLayout, Size};
|
use rustc::ty::layout::{self, Align, LayoutOf, TyLayout, Size};
|
||||||
|
|
||||||
use crate::base;
|
|
||||||
use crate::MemFlags;
|
|
||||||
use crate::glue;
|
|
||||||
|
|
||||||
use crate::traits::*;
|
|
||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use super::{FunctionCx, LocalRef};
|
|
||||||
use super::place::PlaceRef;
|
|
||||||
|
|
||||||
/// The representation of a Rust value. The enum variant is in fact
|
/// The representation of a Rust value. The enum variant is in fact
|
||||||
/// uniquely determined by the value's type, but is kept as a
|
/// uniquely determined by the value's type, but is kept as a
|
||||||
/// safety check.
|
/// safety check.
|
||||||
|
@ -343,6 +342,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn store_unsized<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
pub fn store_unsized<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
||||||
self,
|
self,
|
||||||
bx: &mut Bx,
|
bx: &mut Bx,
|
||||||
|
|
|
@ -1,28 +1,28 @@
|
||||||
|
use super::{FunctionCx, LocalRef};
|
||||||
|
use super::operand::OperandValue;
|
||||||
|
|
||||||
|
use crate::MemFlags;
|
||||||
|
use crate::common::IntPredicate;
|
||||||
|
use crate::glue;
|
||||||
|
use crate::traits::*;
|
||||||
|
|
||||||
use rustc::ty::{self, Instance, Ty};
|
use rustc::ty::{self, Instance, Ty};
|
||||||
use rustc::ty::layout::{self, Align, TyLayout, LayoutOf, VariantIdx, HasTyCtxt};
|
use rustc::ty::layout::{self, Align, TyLayout, LayoutOf, VariantIdx, HasTyCtxt};
|
||||||
use rustc::mir;
|
use rustc::mir;
|
||||||
use rustc::mir::tcx::PlaceTy;
|
use rustc::mir::tcx::PlaceTy;
|
||||||
use crate::MemFlags;
|
|
||||||
use crate::common::IntPredicate;
|
|
||||||
use crate::glue;
|
|
||||||
|
|
||||||
use crate::traits::*;
|
|
||||||
|
|
||||||
use super::{FunctionCx, LocalRef};
|
|
||||||
use super::operand::OperandValue;
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug)]
|
#[derive(Copy, Clone, Debug)]
|
||||||
pub struct PlaceRef<'tcx, V> {
|
pub struct PlaceRef<'tcx, V> {
|
||||||
/// Pointer to the contents of the place.
|
/// A pointer to the contents of the place.
|
||||||
pub llval: V,
|
pub llval: V,
|
||||||
|
|
||||||
/// This place's extra data if it is unsized, or null.
|
/// This place's extra data if it is unsized, or `None` if null.
|
||||||
pub llextra: Option<V>,
|
pub llextra: Option<V>,
|
||||||
|
|
||||||
/// Monomorphized type of this place, including variant information.
|
/// The monomorphized type of this place, including variant information.
|
||||||
pub layout: TyLayout<'tcx>,
|
pub layout: TyLayout<'tcx>,
|
||||||
|
|
||||||
/// What alignment we know for this place.
|
/// The alignment we know for this place.
|
||||||
pub align: Align,
|
pub align: Align,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -107,7 +107,6 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
|
||||||
bug!("unexpected layout `{:#?}` in PlaceRef::len", self.layout)
|
bug!("unexpected layout `{:#?}` in PlaceRef::len", self.layout)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
|
impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
|
||||||
|
|
|
@ -1,22 +1,22 @@
|
||||||
|
use super::{FunctionCx, LocalRef};
|
||||||
|
use super::operand::{OperandRef, OperandValue};
|
||||||
|
use super::place::PlaceRef;
|
||||||
|
|
||||||
|
use crate::base;
|
||||||
|
use crate::MemFlags;
|
||||||
|
use crate::common::{self, RealPredicate, IntPredicate};
|
||||||
|
use crate::traits::*;
|
||||||
|
|
||||||
use rustc::ty::{self, Ty, adjustment::{PointerCast}, Instance};
|
use rustc::ty::{self, Ty, adjustment::{PointerCast}, Instance};
|
||||||
use rustc::ty::cast::{CastTy, IntTy};
|
use rustc::ty::cast::{CastTy, IntTy};
|
||||||
use rustc::ty::layout::{self, LayoutOf, HasTyCtxt};
|
use rustc::ty::layout::{self, LayoutOf, HasTyCtxt};
|
||||||
use rustc::mir;
|
use rustc::mir;
|
||||||
use rustc::middle::lang_items::ExchangeMallocFnLangItem;
|
use rustc::middle::lang_items::ExchangeMallocFnLangItem;
|
||||||
use rustc_apfloat::{ieee, Float, Status, Round};
|
use rustc_apfloat::{ieee, Float, Status, Round};
|
||||||
use std::{u128, i128};
|
|
||||||
use syntax::symbol::sym;
|
use syntax::symbol::sym;
|
||||||
use syntax::source_map::{DUMMY_SP, Span};
|
use syntax::source_map::{DUMMY_SP, Span};
|
||||||
|
|
||||||
use crate::base;
|
use std::{u128, i128};
|
||||||
use crate::MemFlags;
|
|
||||||
use crate::common::{self, RealPredicate, IntPredicate};
|
|
||||||
|
|
||||||
use crate::traits::*;
|
|
||||||
|
|
||||||
use super::{FunctionCx, LocalRef};
|
|
||||||
use super::operand::{OperandRef, OperandValue};
|
|
||||||
use super::place::PlaceRef;
|
|
||||||
|
|
||||||
impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||||
pub fn codegen_rvalue(
|
pub fn codegen_rvalue(
|
||||||
|
@ -31,8 +31,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||||
match *rvalue {
|
match *rvalue {
|
||||||
mir::Rvalue::Use(ref operand) => {
|
mir::Rvalue::Use(ref operand) => {
|
||||||
let cg_operand = self.codegen_operand(&mut bx, operand);
|
let cg_operand = self.codegen_operand(&mut bx, operand);
|
||||||
// FIXME: consider not copying constants through stack. (fixable by codegenning
|
// FIXME: consider not copying constants through stack. (Fixable by codegen'ing
|
||||||
// constants into OperandValue::Ref, why don’t we do that yet if we don’t?)
|
// constants into `OperandValue::Ref`; why don’t we do that yet if we don’t?)
|
||||||
cg_operand.val.store(&mut bx, dest);
|
cg_operand.val.store(&mut bx, dest);
|
||||||
bx
|
bx
|
||||||
}
|
}
|
||||||
|
@ -41,7 +41,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||||
// The destination necessarily contains a fat pointer, so if
|
// The destination necessarily contains a fat pointer, so if
|
||||||
// it's a scalar pair, it's a fat pointer or newtype thereof.
|
// it's a scalar pair, it's a fat pointer or newtype thereof.
|
||||||
if bx.cx().is_backend_scalar_pair(dest.layout) {
|
if bx.cx().is_backend_scalar_pair(dest.layout) {
|
||||||
// into-coerce of a thin pointer to a fat pointer - just
|
// Into-coerce of a thin pointer to a fat pointer -- just
|
||||||
// use the operand path.
|
// use the operand path.
|
||||||
let (mut bx, temp) = self.codegen_rvalue_operand(bx, rvalue);
|
let (mut bx, temp) = self.codegen_rvalue_operand(bx, rvalue);
|
||||||
temp.val.store(&mut bx, dest);
|
temp.val.store(&mut bx, dest);
|
||||||
|
@ -56,10 +56,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||||
match operand.val {
|
match operand.val {
|
||||||
OperandValue::Pair(..) |
|
OperandValue::Pair(..) |
|
||||||
OperandValue::Immediate(_) => {
|
OperandValue::Immediate(_) => {
|
||||||
// unsize from an immediate structure. We don't
|
// Unsize from an immediate structure. We don't
|
||||||
// really need a temporary alloca here, but
|
// really need a temporary alloca here, but
|
||||||
// avoiding it would require us to have
|
// avoiding it would require us to have
|
||||||
// `coerce_unsized_into` use extractvalue to
|
// `coerce_unsized_into` use `extractvalue` to
|
||||||
// index into the struct, and this case isn't
|
// index into the struct, and this case isn't
|
||||||
// important enough for it.
|
// important enough for it.
|
||||||
debug!("codegen_rvalue: creating ugly alloca");
|
debug!("codegen_rvalue: creating ugly alloca");
|
||||||
|
@ -74,7 +74,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||||
base::coerce_unsized_into(&mut bx, source, dest);
|
base::coerce_unsized_into(&mut bx, source, dest);
|
||||||
}
|
}
|
||||||
OperandValue::Ref(_, Some(_), _) => {
|
OperandValue::Ref(_, Some(_), _) => {
|
||||||
bug!("unsized coercion on an unsized rvalue")
|
bug!("unsized coercion on an unsized rvalue");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
bx
|
bx
|
||||||
|
@ -160,7 +160,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||||
bx
|
bx
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => bug!("unsized assignment other than Rvalue::Use"),
|
_ => bug!("unsized assignment other than `Rvalue::Use`"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -220,17 +220,16 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
mir::CastKind::Pointer(PointerCast::UnsafeFnPointer) => {
|
mir::CastKind::Pointer(PointerCast::UnsafeFnPointer) => {
|
||||||
// this is a no-op at the LLVM level
|
// This is a no-op at the LLVM level.
|
||||||
operand.val
|
operand.val
|
||||||
}
|
}
|
||||||
mir::CastKind::Pointer(PointerCast::Unsize) => {
|
mir::CastKind::Pointer(PointerCast::Unsize) => {
|
||||||
assert!(bx.cx().is_backend_scalar_pair(cast));
|
assert!(bx.cx().is_backend_scalar_pair(cast));
|
||||||
match operand.val {
|
match operand.val {
|
||||||
OperandValue::Pair(lldata, llextra) => {
|
OperandValue::Pair(lldata, llextra) => {
|
||||||
// unsize from a fat pointer - this is a
|
// unsize from a fat pointer -- this is a
|
||||||
// "trait-object-to-supertrait" coercion, for
|
// "trait-object-to-supertrait" coercion, for
|
||||||
// example,
|
// example, `&'a fmt::Debug + Send => &'a fmt::Debug`.
|
||||||
// &'a fmt::Debug+Send => &'a fmt::Debug,
|
|
||||||
|
|
||||||
// HACK(eddyb) have to bitcast pointers
|
// HACK(eddyb) have to bitcast pointers
|
||||||
// until LLVM removes pointee types.
|
// until LLVM removes pointee types.
|
||||||
|
@ -245,13 +244,13 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||||
OperandValue::Pair(lldata, llextra)
|
OperandValue::Pair(lldata, llextra)
|
||||||
}
|
}
|
||||||
OperandValue::Ref(..) => {
|
OperandValue::Ref(..) => {
|
||||||
bug!("by-ref operand {:?} in codegen_rvalue_operand",
|
bug!("by-ref operand {:?} in `codegen_rvalue_operand`",
|
||||||
operand);
|
operand);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
mir::CastKind::Pointer(PointerCast::MutToConstPointer)
|
mir::CastKind::Pointer(PointerCast::MutToConstPointer) |
|
||||||
| mir::CastKind::Misc if bx.cx().is_backend_scalar_pair(operand.layout) => {
|
mir::CastKind::Misc if bx.cx().is_backend_scalar_pair(operand.layout) => {
|
||||||
if let OperandValue::Pair(data_ptr, meta) = operand.val {
|
if let OperandValue::Pair(data_ptr, meta) = operand.val {
|
||||||
if bx.cx().is_backend_scalar_pair(cast) {
|
if bx.cx().is_backend_scalar_pair(cast) {
|
||||||
let data_cast = bx.pointercast(data_ptr,
|
let data_cast = bx.pointercast(data_ptr,
|
||||||
|
@ -265,12 +264,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||||
OperandValue::Immediate(llval)
|
OperandValue::Immediate(llval)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
bug!("Unexpected non-Pair operand")
|
bug!("unexpected non-pair operand");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
mir::CastKind::Pointer(PointerCast::MutToConstPointer)
|
mir::CastKind::Pointer(PointerCast::MutToConstPointer) |
|
||||||
| mir::CastKind::Pointer(PointerCast::ArrayToPointer)
|
mir::CastKind::Pointer(PointerCast::ArrayToPointer) |
|
||||||
| mir::CastKind::Misc => {
|
mir::CastKind::Misc => {
|
||||||
assert!(bx.cx().is_backend_immediate(cast));
|
assert!(bx.cx().is_backend_immediate(cast));
|
||||||
let ll_t_out = bx.cx().immediate_backend_type(cast);
|
let ll_t_out = bx.cx().immediate_backend_type(cast);
|
||||||
if operand.layout.abi.is_uninhabited() {
|
if operand.layout.abi.is_uninhabited() {
|
||||||
|
|
|
@ -1,17 +1,18 @@
|
||||||
use rustc::ty::layout::{HasTyCtxt, LayoutOf, TyLayout};
|
|
||||||
use rustc::ty::Ty;
|
|
||||||
|
|
||||||
use super::write::WriteBackendMethods;
|
use super::write::WriteBackendMethods;
|
||||||
use super::CodegenObject;
|
use super::CodegenObject;
|
||||||
|
|
||||||
|
use rustc::ty::layout::{HasTyCtxt, LayoutOf, TyLayout};
|
||||||
|
use rustc::ty::Ty;
|
||||||
use rustc::middle::cstore::EncodedMetadata;
|
use rustc::middle::cstore::EncodedMetadata;
|
||||||
use rustc::session::{Session, config};
|
use rustc::session::{Session, config};
|
||||||
use rustc::ty::TyCtxt;
|
use rustc::ty::TyCtxt;
|
||||||
use rustc_codegen_utils::codegen_backend::CodegenBackend;
|
use rustc_codegen_utils::codegen_backend::CodegenBackend;
|
||||||
use std::sync::Arc;
|
|
||||||
use std::sync::mpsc;
|
|
||||||
use syntax::expand::allocator::AllocatorKind;
|
use syntax::expand::allocator::AllocatorKind;
|
||||||
use syntax_pos::symbol::Symbol;
|
use syntax_pos::symbol::Symbol;
|
||||||
|
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::sync::mpsc;
|
||||||
|
|
||||||
pub trait BackendTypes {
|
pub trait BackendTypes {
|
||||||
type Value: CodegenObject;
|
type Value: CodegenObject;
|
||||||
type Function: CodegenObject;
|
type Function: CodegenObject;
|
||||||
|
|
|
@ -4,14 +4,17 @@ use super::debuginfo::DebugInfoBuilderMethods;
|
||||||
use super::intrinsic::IntrinsicCallMethods;
|
use super::intrinsic::IntrinsicCallMethods;
|
||||||
use super::type_::ArgAbiMethods;
|
use super::type_::ArgAbiMethods;
|
||||||
use super::{HasCodegen, StaticBuilderMethods};
|
use super::{HasCodegen, StaticBuilderMethods};
|
||||||
|
|
||||||
use crate::common::{AtomicOrdering, AtomicRmwBinOp, IntPredicate, RealPredicate,
|
use crate::common::{AtomicOrdering, AtomicRmwBinOp, IntPredicate, RealPredicate,
|
||||||
SynchronizationScope};
|
SynchronizationScope};
|
||||||
use crate::mir::operand::OperandRef;
|
use crate::mir::operand::OperandRef;
|
||||||
use crate::mir::place::PlaceRef;
|
use crate::mir::place::PlaceRef;
|
||||||
use crate::MemFlags;
|
use crate::MemFlags;
|
||||||
|
|
||||||
use rustc::ty::Ty;
|
use rustc::ty::Ty;
|
||||||
use rustc::ty::layout::{Align, Size, HasParamEnv};
|
use rustc::ty::layout::{Align, Size, HasParamEnv};
|
||||||
use rustc_target::spec::{HasTargetSpec};
|
use rustc_target::spec::HasTargetSpec;
|
||||||
|
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
use std::iter::TrustedLen;
|
use std::iter::TrustedLen;
|
||||||
|
|
||||||
|
|
|
@ -41,9 +41,9 @@ pub use self::type_::{
|
||||||
ArgAbiMethods, BaseTypeMethods, DerivedTypeMethods, LayoutTypeMethods, TypeMethods,
|
ArgAbiMethods, BaseTypeMethods, DerivedTypeMethods, LayoutTypeMethods, TypeMethods,
|
||||||
};
|
};
|
||||||
pub use self::write::{ModuleBufferMethods, ThinBufferMethods, WriteBackendMethods};
|
pub use self::write::{ModuleBufferMethods, ThinBufferMethods, WriteBackendMethods};
|
||||||
use rustc::ty::layout::{HasParamEnv, HasTyCtxt};
|
|
||||||
use rustc_target::spec::{HasTargetSpec};
|
|
||||||
|
|
||||||
|
use rustc::ty::layout::{HasParamEnv, HasTyCtxt};
|
||||||
|
use rustc_target::spec::HasTargetSpec;
|
||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
|
|
|
@ -212,7 +212,7 @@ impl Diagnostic {
|
||||||
}));
|
}));
|
||||||
msg.push((format!("`{}", found_extra), Style::NoStyle));
|
msg.push((format!("`{}", found_extra), Style::NoStyle));
|
||||||
|
|
||||||
// For now, just attach these as notes
|
// For now, just attach these as notes.
|
||||||
self.highlighted_note(msg);
|
self.highlighted_note(msg);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
use crate::hair::{self, *};
|
use crate::hair::{self, *};
|
||||||
use crate::hair::cx::Cx;
|
use crate::hair::cx::Cx;
|
||||||
use crate::hair::cx::to_ref::ToRef;
|
use crate::hair::cx::to_ref::ToRef;
|
||||||
|
|
||||||
use rustc::middle::region;
|
use rustc::middle::region;
|
||||||
use rustc::hir;
|
use rustc::hir;
|
||||||
use rustc::ty;
|
use rustc::ty;
|
||||||
|
|
|
@ -136,7 +136,7 @@ impl<Tag> Operand<Tag> {
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||||
pub struct OpTy<'tcx, Tag=()> {
|
pub struct OpTy<'tcx, Tag=()> {
|
||||||
op: Operand<Tag>, // Keep this private, it helps enforce invariants
|
op: Operand<Tag>, // Keep this private; it helps enforce invariants.
|
||||||
pub layout: TyLayout<'tcx>,
|
pub layout: TyLayout<'tcx>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -203,7 +203,7 @@ pub(super) fn from_known_layout<'tcx>(
|
||||||
if cfg!(debug_assertions) {
|
if cfg!(debug_assertions) {
|
||||||
let layout2 = compute()?;
|
let layout2 = compute()?;
|
||||||
assert_eq!(layout.details, layout2.details,
|
assert_eq!(layout.details, layout2.details,
|
||||||
"Mismatch in layout of supposedly equal-layout types {:?} and {:?}",
|
"mismatch in layout of supposedly equal-layout types {:?} and {:?}",
|
||||||
layout.ty, layout2.ty);
|
layout.ty, layout2.ty);
|
||||||
}
|
}
|
||||||
Ok(layout)
|
Ok(layout)
|
||||||
|
|
|
@ -48,7 +48,7 @@ pub enum Place<Tag=(), Id=AllocId> {
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug)]
|
#[derive(Copy, Clone, Debug)]
|
||||||
pub struct PlaceTy<'tcx, Tag=()> {
|
pub struct PlaceTy<'tcx, Tag=()> {
|
||||||
place: Place<Tag>, // Keep this private, it helps enforce invariants
|
place: Place<Tag>, // Keep this private; it helps enforce invariants.
|
||||||
pub layout: TyLayout<'tcx>,
|
pub layout: TyLayout<'tcx>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,16 +1,16 @@
|
||||||
|
use super::{InterpCx, Machine, MemoryKind, FnVal};
|
||||||
|
|
||||||
use rustc::ty::{self, Ty, Instance, TypeFoldable};
|
use rustc::ty::{self, Ty, Instance, TypeFoldable};
|
||||||
use rustc::ty::layout::{Size, Align, LayoutOf, HasDataLayout};
|
use rustc::ty::layout::{Size, Align, LayoutOf, HasDataLayout};
|
||||||
use rustc::mir::interpret::{Scalar, Pointer, InterpResult, PointerArithmetic,};
|
use rustc::mir::interpret::{Scalar, Pointer, InterpResult, PointerArithmetic,};
|
||||||
|
|
||||||
use super::{InterpCx, Machine, MemoryKind, FnVal};
|
|
||||||
|
|
||||||
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||||
/// Creates a dynamic vtable for the given type and vtable origin. This is used only for
|
/// Creates a dynamic vtable for the given type and vtable origin. This is used only for
|
||||||
/// objects.
|
/// objects.
|
||||||
///
|
///
|
||||||
/// The `trait_ref` encodes the erased self type. Hence if we are
|
/// The `trait_ref` encodes the erased self type. Hence, if we are
|
||||||
/// making an object `Foo<Trait>` from a value of type `Foo<T>`, then
|
/// making an object `Foo<Trait>` from a value of type `Foo<T>`, then
|
||||||
/// `trait_ref` would map `T:Trait`.
|
/// `trait_ref` would map `T: Trait`.
|
||||||
pub fn get_vtable(
|
pub fn get_vtable(
|
||||||
&mut self,
|
&mut self,
|
||||||
ty: Ty<'tcx>,
|
ty: Ty<'tcx>,
|
||||||
|
@ -51,7 +51,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||||
let ptr_align = self.tcx.data_layout.pointer_align.abi;
|
let ptr_align = self.tcx.data_layout.pointer_align.abi;
|
||||||
// /////////////////////////////////////////////////////////////////////////////////////////
|
// /////////////////////////////////////////////////////////////////////////////////////////
|
||||||
// If you touch this code, be sure to also make the corresponding changes to
|
// If you touch this code, be sure to also make the corresponding changes to
|
||||||
// `get_vtable` in rust_codegen_llvm/meth.rs
|
// `get_vtable` in `rust_codegen_llvm/meth.rs`.
|
||||||
// /////////////////////////////////////////////////////////////////////////////////////////
|
// /////////////////////////////////////////////////////////////////////////////////////////
|
||||||
let vtable = self.memory.allocate(
|
let vtable = self.memory.allocate(
|
||||||
ptr_size * (3 + methods.len() as u64),
|
ptr_size * (3 + methods.len() as u64),
|
||||||
|
@ -97,16 +97,16 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||||
Ok(vtable)
|
Ok(vtable)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resolve the function at the specified slot in the provided
|
/// Resolves the function at the specified slot in the provided
|
||||||
/// vtable. An index of '0' corresponds to the first method
|
/// vtable. An index of '0' corresponds to the first method
|
||||||
/// declared in the trait of the provided vtable
|
/// declared in the trait of the provided vtable.
|
||||||
pub fn get_vtable_slot(
|
pub fn get_vtable_slot(
|
||||||
&self,
|
&self,
|
||||||
vtable: Scalar<M::PointerTag>,
|
vtable: Scalar<M::PointerTag>,
|
||||||
idx: usize
|
idx: usize
|
||||||
) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
|
) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
|
||||||
let ptr_size = self.pointer_size();
|
let ptr_size = self.pointer_size();
|
||||||
// Skip over the 'drop_ptr', 'size', and 'align' fields
|
// Skip over the 'drop_ptr', 'size', and 'align' fields.
|
||||||
let vtable_slot = vtable.ptr_offset(ptr_size * (idx as u64 + 3), self)?;
|
let vtable_slot = vtable.ptr_offset(ptr_size * (idx as u64 + 3), self)?;
|
||||||
let vtable_slot = self.memory.check_ptr_access(
|
let vtable_slot = self.memory.check_ptr_access(
|
||||||
vtable_slot,
|
vtable_slot,
|
||||||
|
@ -118,12 +118,12 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||||
Ok(self.memory.get_fn(fn_ptr)?)
|
Ok(self.memory.get_fn(fn_ptr)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the drop fn instance as well as the actual dynamic type
|
/// Returns the drop fn instance as well as the actual dynamic type.
|
||||||
pub fn read_drop_type_from_vtable(
|
pub fn read_drop_type_from_vtable(
|
||||||
&self,
|
&self,
|
||||||
vtable: Scalar<M::PointerTag>,
|
vtable: Scalar<M::PointerTag>,
|
||||||
) -> InterpResult<'tcx, (ty::Instance<'tcx>, Ty<'tcx>)> {
|
) -> InterpResult<'tcx, (ty::Instance<'tcx>, Ty<'tcx>)> {
|
||||||
// we don't care about the pointee type, we just want a pointer
|
// We don't care about the pointee type; we just want a pointer.
|
||||||
let vtable = self.memory.check_ptr_access(
|
let vtable = self.memory.check_ptr_access(
|
||||||
vtable,
|
vtable,
|
||||||
self.tcx.data_layout.pointer_size,
|
self.tcx.data_layout.pointer_size,
|
||||||
|
@ -149,7 +149,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||||
vtable: Scalar<M::PointerTag>,
|
vtable: Scalar<M::PointerTag>,
|
||||||
) -> InterpResult<'tcx, (Size, Align)> {
|
) -> InterpResult<'tcx, (Size, Align)> {
|
||||||
let pointer_size = self.pointer_size();
|
let pointer_size = self.pointer_size();
|
||||||
// We check for size = 3*ptr_size, that covers the drop fn (unused here),
|
// We check for `size = 3 * ptr_size`, which covers the drop fn (unused here),
|
||||||
// the size, and the align (which we read below).
|
// the size, and the align (which we read below).
|
||||||
let vtable = self.memory.check_ptr_access(
|
let vtable = self.memory.check_ptr_access(
|
||||||
vtable,
|
vtable,
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
//! Mono Item Collection
|
//! Mono Item Collection
|
||||||
//! ===========================
|
//! ====================
|
||||||
//!
|
//!
|
||||||
//! This module is responsible for discovering all items that will contribute to
|
//! This module is responsible for discovering all items that will contribute to
|
||||||
//! to code generation of the crate. The important part here is that it not only
|
//! to code generation of the crate. The important part here is that it not only
|
||||||
|
@ -174,9 +174,10 @@
|
||||||
//! this is not implemented however: a mono item will be produced
|
//! this is not implemented however: a mono item will be produced
|
||||||
//! regardless of whether it is actually needed or not.
|
//! regardless of whether it is actually needed or not.
|
||||||
|
|
||||||
|
use crate::monomorphize;
|
||||||
|
|
||||||
use rustc::hir::{self, CodegenFnAttrFlags};
|
use rustc::hir::{self, CodegenFnAttrFlags};
|
||||||
use rustc::hir::itemlikevisit::ItemLikeVisitor;
|
use rustc::hir::itemlikevisit::ItemLikeVisitor;
|
||||||
|
|
||||||
use rustc::hir::def_id::{DefId, LOCAL_CRATE};
|
use rustc::hir::def_id::{DefId, LOCAL_CRATE};
|
||||||
use rustc::mir::interpret::{AllocId, ConstValue};
|
use rustc::mir::interpret::{AllocId, ConstValue};
|
||||||
use rustc::middle::lang_items::{ExchangeMallocFnLangItem, StartFnLangItem};
|
use rustc::middle::lang_items::{ExchangeMallocFnLangItem, StartFnLangItem};
|
||||||
|
@ -189,8 +190,6 @@ use rustc::mir::{self, Location, PlaceBase, Static, StaticKind};
|
||||||
use rustc::mir::visit::Visitor as MirVisitor;
|
use rustc::mir::visit::Visitor as MirVisitor;
|
||||||
use rustc::mir::mono::{MonoItem, InstantiationMode};
|
use rustc::mir::mono::{MonoItem, InstantiationMode};
|
||||||
use rustc::mir::interpret::{Scalar, GlobalId, GlobalAlloc, ErrorHandled};
|
use rustc::mir::interpret::{Scalar, GlobalId, GlobalAlloc, ErrorHandled};
|
||||||
|
|
||||||
use crate::monomorphize;
|
|
||||||
use rustc::util::nodemap::{FxHashSet, FxHashMap, DefIdMap};
|
use rustc::util::nodemap::{FxHashSet, FxHashMap, DefIdMap};
|
||||||
use rustc::util::common::time;
|
use rustc::util::common::time;
|
||||||
|
|
||||||
|
@ -530,7 +529,6 @@ struct MirNeighborCollector<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
|
impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
|
||||||
|
|
||||||
fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: Location) {
|
fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: Location) {
|
||||||
debug!("visiting rvalue {:?}", *rvalue);
|
debug!("visiting rvalue {:?}", *rvalue);
|
||||||
|
|
||||||
|
@ -698,7 +696,7 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
PlaceBase::Local(_) => {
|
PlaceBase::Local(_) => {
|
||||||
// Locals have no relevance for collector
|
// Locals have no relevance for collector.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -752,7 +750,7 @@ fn visit_instance_use<'tcx>(
|
||||||
ty::InstanceDef::ReifyShim(..) |
|
ty::InstanceDef::ReifyShim(..) |
|
||||||
ty::InstanceDef::Virtual(..) |
|
ty::InstanceDef::Virtual(..) |
|
||||||
ty::InstanceDef::DropGlue(_, None) => {
|
ty::InstanceDef::DropGlue(_, None) => {
|
||||||
// don't need to emit shim if we are calling directly.
|
// Don't need to emit shim if we are calling directly.
|
||||||
if !is_direct_call {
|
if !is_direct_call {
|
||||||
output.push(create_fn_mono_item(instance));
|
output.push(create_fn_mono_item(instance));
|
||||||
}
|
}
|
||||||
|
@ -769,8 +767,8 @@ fn visit_instance_use<'tcx>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns true if we should codegen an instance in the local crate.
|
// Returns `true` if we should codegen an instance in the local crate.
|
||||||
// Returns false if we can just link to the upstream crate and therefore don't
|
// Returns `false` if we can just link to the upstream crate and therefore don't
|
||||||
// need a mono item.
|
// need a mono item.
|
||||||
fn should_monomorphize_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: &Instance<'tcx>) -> bool {
|
fn should_monomorphize_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: &Instance<'tcx>) -> bool {
|
||||||
let def_id = match instance.def {
|
let def_id = match instance.def {
|
||||||
|
@ -786,24 +784,24 @@ fn should_monomorphize_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: &Instance<'tcx
|
||||||
};
|
};
|
||||||
|
|
||||||
if tcx.is_foreign_item(def_id) {
|
if tcx.is_foreign_item(def_id) {
|
||||||
// We can always link to foreign items
|
// We can always link to foreign items.
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if def_id.is_local() {
|
if def_id.is_local() {
|
||||||
// local items cannot be referred to locally without monomorphizing them locally
|
// Local items cannot be referred to locally without monomorphizing them locally.
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if tcx.is_reachable_non_generic(def_id) ||
|
if tcx.is_reachable_non_generic(def_id) ||
|
||||||
is_available_upstream_generic(tcx, def_id, instance.substs) {
|
is_available_upstream_generic(tcx, def_id, instance.substs) {
|
||||||
// We can link to the item in question, no instance needed
|
// We can link to the item in question, no instance needed
|
||||||
// in this crate
|
// in this crate.
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if !tcx.is_mir_available(def_id) {
|
if !tcx.is_mir_available(def_id) {
|
||||||
bug!("Cannot create local mono-item for {:?}", def_id)
|
bug!("cannot create local mono-item for {:?}", def_id)
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
|
@ -823,7 +821,7 @@ fn should_monomorphize_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: &Instance<'tcx
|
||||||
|
|
||||||
// If this instance has non-erasable parameters, it cannot be a shared
|
// If this instance has non-erasable parameters, it cannot be a shared
|
||||||
// monomorphization. Non-generic instances are already handled above
|
// monomorphization. Non-generic instances are already handled above
|
||||||
// by `is_reachable_non_generic()`
|
// by `is_reachable_non_generic()`.
|
||||||
if substs.non_erasable_generics().next().is_none() {
|
if substs.non_erasable_generics().next().is_none() {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
@ -836,7 +834,7 @@ fn should_monomorphize_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: &Instance<'tcx
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// For given pair of source and target type that occur in an unsizing coercion,
|
/// For a given pair of source and target type that occur in an unsizing coercion,
|
||||||
/// this function finds the pair of types that determines the vtable linking
|
/// this function finds the pair of types that determines the vtable linking
|
||||||
/// them.
|
/// them.
|
||||||
///
|
///
|
||||||
|
@ -930,10 +928,9 @@ fn find_vtable_types_for_unsizing<'tcx>(
|
||||||
source_fields.len() == target_fields.len());
|
source_fields.len() == target_fields.len());
|
||||||
|
|
||||||
find_vtable_types_for_unsizing(tcx,
|
find_vtable_types_for_unsizing(tcx,
|
||||||
source_fields[coerce_index].ty(tcx,
|
source_fields[coerce_index].ty(tcx, source_substs),
|
||||||
source_substs),
|
target_fields[coerce_index].ty(tcx, target_substs)
|
||||||
target_fields[coerce_index].ty(tcx,
|
)
|
||||||
target_substs))
|
|
||||||
}
|
}
|
||||||
_ => bug!("find_vtable_types_for_unsizing: invalid coercion {:?} -> {:?}",
|
_ => bug!("find_vtable_types_for_unsizing: invalid coercion {:?} -> {:?}",
|
||||||
source_ty,
|
source_ty,
|
||||||
|
@ -975,7 +972,7 @@ fn create_mono_items_for_vtable_methods<'tcx>(
|
||||||
output.extend(methods);
|
output.extend(methods);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Also add the destructor
|
// Also add the destructor.
|
||||||
visit_drop_use(tcx, impl_ty, false, output);
|
visit_drop_use(tcx, impl_ty, false, output);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -995,14 +992,14 @@ impl ItemLikeVisitor<'v> for RootCollector<'_, 'v> {
|
||||||
fn visit_item(&mut self, item: &'v hir::Item) {
|
fn visit_item(&mut self, item: &'v hir::Item) {
|
||||||
match item.kind {
|
match item.kind {
|
||||||
hir::ItemKind::ExternCrate(..) |
|
hir::ItemKind::ExternCrate(..) |
|
||||||
hir::ItemKind::Use(..) |
|
hir::ItemKind::Use(..) |
|
||||||
hir::ItemKind::ForeignMod(..) |
|
hir::ItemKind::ForeignMod(..) |
|
||||||
hir::ItemKind::TyAlias(..) |
|
hir::ItemKind::TyAlias(..) |
|
||||||
hir::ItemKind::Trait(..) |
|
hir::ItemKind::Trait(..) |
|
||||||
hir::ItemKind::TraitAlias(..) |
|
hir::ItemKind::TraitAlias(..) |
|
||||||
hir::ItemKind::OpaqueTy(..) |
|
hir::ItemKind::OpaqueTy(..) |
|
||||||
hir::ItemKind::Mod(..) => {
|
hir::ItemKind::Mod(..) => {
|
||||||
// Nothing to do, just keep recursing...
|
// Nothing to do, just keep recursing.
|
||||||
}
|
}
|
||||||
|
|
||||||
hir::ItemKind::Impl(..) => {
|
hir::ItemKind::Impl(..) => {
|
||||||
|
@ -1075,7 +1072,7 @@ impl ItemLikeVisitor<'v> for RootCollector<'_, 'v> {
|
||||||
let def_id = self.tcx.hir().local_def_id(ii.hir_id);
|
let def_id = self.tcx.hir().local_def_id(ii.hir_id);
|
||||||
self.push_if_root(def_id);
|
self.push_if_root(def_id);
|
||||||
}
|
}
|
||||||
_ => { /* Nothing to do here */ }
|
_ => { /* nothing to do here */ }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1095,7 +1092,7 @@ impl RootCollector<'_, 'v> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If `def_id` represents a root, then push it onto the list of
|
/// If `def_id` represents a root, pushes it onto the list of
|
||||||
/// outputs. (Note that all roots must be monomorphic.)
|
/// outputs. (Note that all roots must be monomorphic.)
|
||||||
fn push_if_root(&mut self, def_id: DefId) {
|
fn push_if_root(&mut self, def_id: DefId) {
|
||||||
if self.is_root(def_id) {
|
if self.is_root(def_id) {
|
||||||
|
@ -1217,7 +1214,7 @@ fn create_mono_items_for_default_impls<'tcx>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Scan the miri alloc in order to find function calls, closures, and drop-glue
|
/// Scans the miri alloc in order to find function calls, closures, and drop-glue.
|
||||||
fn collect_miri<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut Vec<MonoItem<'tcx>>) {
|
fn collect_miri<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut Vec<MonoItem<'tcx>>) {
|
||||||
let alloc_kind = tcx.alloc_map.lock().get(alloc_id);
|
let alloc_kind = tcx.alloc_map.lock().get(alloc_id);
|
||||||
match alloc_kind {
|
match alloc_kind {
|
||||||
|
@ -1244,7 +1241,7 @@ fn collect_miri<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut Vec<Mon
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Scan the MIR in order to find function calls, closures, and drop-glue
|
/// Scans the MIR in order to find function calls, closures, and drop-glue.
|
||||||
fn collect_neighbours<'tcx>(
|
fn collect_neighbours<'tcx>(
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
instance: Instance<'tcx>,
|
instance: Instance<'tcx>,
|
||||||
|
|
|
@ -22,7 +22,7 @@ pub fn custom_coerce_unsize_info<'tcx>(
|
||||||
tcx.coerce_unsized_info(impl_def_id).custom_kind.unwrap()
|
tcx.coerce_unsized_info(impl_def_id).custom_kind.unwrap()
|
||||||
}
|
}
|
||||||
vtable => {
|
vtable => {
|
||||||
bug!("invalid CoerceUnsized vtable: {:?}", vtable);
|
bug!("invalid `CoerceUnsized` vtable: {:?}", vtable);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
// substitutions.
|
// substitutions.
|
||||||
|
|
||||||
use crate::check::FnCtxt;
|
use crate::check::FnCtxt;
|
||||||
|
|
||||||
use rustc::hir;
|
use rustc::hir;
|
||||||
use rustc::hir::def_id::{DefId, DefIndex};
|
use rustc::hir::def_id::{DefId, DefIndex};
|
||||||
use rustc::hir::intravisit::{self, NestedVisitorMap, Visitor};
|
use rustc::hir::intravisit::{self, NestedVisitorMap, Visitor};
|
||||||
|
@ -12,10 +13,11 @@ use rustc::ty::fold::{TypeFoldable, TypeFolder};
|
||||||
use rustc::ty::{self, Ty, TyCtxt};
|
use rustc::ty::{self, Ty, TyCtxt};
|
||||||
use rustc::util::nodemap::DefIdSet;
|
use rustc::util::nodemap::DefIdSet;
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
use std::mem;
|
|
||||||
use syntax::symbol::sym;
|
use syntax::symbol::sym;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
|
|
||||||
|
use std::mem;
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////
|
||||||
// Entry point
|
// Entry point
|
||||||
|
|
||||||
|
@ -481,8 +483,10 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
|
||||||
if let ty::Opaque(defin_ty_def_id, _substs) = definition_ty.kind {
|
if let ty::Opaque(defin_ty_def_id, _substs) = definition_ty.kind {
|
||||||
if let hir::OpaqueTyOrigin::TypeAlias = opaque_defn.origin {
|
if let hir::OpaqueTyOrigin::TypeAlias = opaque_defn.origin {
|
||||||
if def_id == defin_ty_def_id {
|
if def_id == defin_ty_def_id {
|
||||||
debug!("Skipping adding concrete definition for opaque type {:?} {:?}",
|
debug!(
|
||||||
opaque_defn, defin_ty_def_id);
|
"skipping adding concrete definition for opaque type {:?} {:?}",
|
||||||
|
opaque_defn, defin_ty_def_id
|
||||||
|
);
|
||||||
skip_add = true;
|
skip_add = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -507,8 +511,8 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
|
||||||
if old.concrete_type != definition_ty || old.substs != opaque_defn.substs {
|
if old.concrete_type != definition_ty || old.substs != opaque_defn.substs {
|
||||||
span_bug!(
|
span_bug!(
|
||||||
span,
|
span,
|
||||||
"visit_opaque_types tried to write different types for the same \
|
"`visit_opaque_types` tried to write different types for the same \
|
||||||
opaque type: {:?}, {:?}, {:?}, {:?}",
|
opaque type: {:?}, {:?}, {:?}, {:?}",
|
||||||
def_id,
|
def_id,
|
||||||
definition_ty,
|
definition_ty,
|
||||||
opaque_defn,
|
opaque_defn,
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
// Check that we can manually implement an object
|
// Check that we can manually implement an object-unsafe trait for its trait object.
|
||||||
// unsafe trait for its trait object
|
|
||||||
//
|
|
||||||
// run-pass
|
// run-pass
|
||||||
|
|
||||||
#![feature(object_safe_for_dispatch)]
|
#![feature(object_safe_for_dispatch)]
|
||||||
|
@ -46,7 +45,7 @@ fn main() {
|
||||||
|
|
||||||
let mut res = String::new();
|
let mut res = String::new();
|
||||||
|
|
||||||
// Directly call static
|
// Directly call static.
|
||||||
res.push(Struct::stat()); // "A"
|
res.push(Struct::stat()); // "A"
|
||||||
res.push(<dyn Bad>::stat()); // "AC"
|
res.push(<dyn Bad>::stat()); // "AC"
|
||||||
|
|
||||||
|
@ -55,15 +54,13 @@ fn main() {
|
||||||
// These look similar enough...
|
// These look similar enough...
|
||||||
let bad = unsafe { std::mem::transmute::<&dyn Good, &dyn Bad>(good) };
|
let bad = unsafe { std::mem::transmute::<&dyn Good, &dyn Bad>(good) };
|
||||||
|
|
||||||
// Call virtual
|
// Call virtual.
|
||||||
res.push(s.virt()); // "ACB"
|
res.push(s.virt()); // "ACB"
|
||||||
res.push(bad.virt()); // "ACBD"
|
res.push(bad.virt()); // "ACBD"
|
||||||
|
|
||||||
// Indirectly call static
|
// Indirectly call static.
|
||||||
res.push(s.indirect()); // "ACBDA"
|
res.push(s.indirect()); // "ACBDA"
|
||||||
res.push(bad.indirect()); // "ACBDAC"
|
res.push(bad.indirect()); // "ACBDAC"
|
||||||
|
|
||||||
if &res != "ACBDAC" {
|
assert_eq!(&res, "ACBDAC");
|
||||||
panic!();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
// run-pass
|
// run-pass
|
||||||
// Check that trait-objects without a principal codegen properly.
|
// Check that trait objects without a principal codegen properly.
|
||||||
|
|
||||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
@ -10,7 +10,7 @@ use std::mem;
|
||||||
struct SetOnDrop<'a>(&'a AtomicUsize, [u8; 64]);
|
struct SetOnDrop<'a>(&'a AtomicUsize, [u8; 64]);
|
||||||
impl<'a> Drop for SetOnDrop<'a> {
|
impl<'a> Drop for SetOnDrop<'a> {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
self.0.store(self.0.load(Ordering::Relaxed)+1, Ordering::Relaxed);
|
self.0.store(self.0.load(Ordering::Relaxed) + 1, Ordering::Relaxed);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue