Auto merge of #33632 - Manishearth:rollup, r=Manishearth
Rollup of 9 pull requests - Successful merges: #33544, #33552, #33554, #33555, #33560, #33566, #33572, #33574, #33576 - Failed merges:
This commit is contained in:
commit
6ba8a1a657
38 changed files with 1185 additions and 946 deletions
|
@ -239,7 +239,7 @@ pub enum Vtable<'tcx, N> {
|
||||||
VtableParam(Vec<N>),
|
VtableParam(Vec<N>),
|
||||||
|
|
||||||
/// Virtual calls through an object
|
/// Virtual calls through an object
|
||||||
VtableObject(VtableObjectData<'tcx>),
|
VtableObject(VtableObjectData<'tcx, N>),
|
||||||
|
|
||||||
/// Successful resolution for a builtin trait.
|
/// Successful resolution for a builtin trait.
|
||||||
VtableBuiltin(VtableBuiltinData<N>),
|
VtableBuiltin(VtableBuiltinData<N>),
|
||||||
|
@ -250,7 +250,7 @@ pub enum Vtable<'tcx, N> {
|
||||||
VtableClosure(VtableClosureData<'tcx, N>),
|
VtableClosure(VtableClosureData<'tcx, N>),
|
||||||
|
|
||||||
/// Same as above, but for a fn pointer type with the given signature.
|
/// Same as above, but for a fn pointer type with the given signature.
|
||||||
VtableFnPointer(ty::Ty<'tcx>),
|
VtableFnPointer(VtableFnPointerData<'tcx, N>),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Identifies a particular impl in the source, along with a set of
|
/// Identifies a particular impl in the source, along with a set of
|
||||||
|
@ -293,14 +293,22 @@ pub struct VtableBuiltinData<N> {
|
||||||
/// A vtable for some object-safe trait `Foo` automatically derived
|
/// A vtable for some object-safe trait `Foo` automatically derived
|
||||||
/// for the object type `Foo`.
|
/// for the object type `Foo`.
|
||||||
#[derive(PartialEq,Eq,Clone)]
|
#[derive(PartialEq,Eq,Clone)]
|
||||||
pub struct VtableObjectData<'tcx> {
|
pub struct VtableObjectData<'tcx, N> {
|
||||||
/// `Foo` upcast to the obligation trait. This will be some supertrait of `Foo`.
|
/// `Foo` upcast to the obligation trait. This will be some supertrait of `Foo`.
|
||||||
pub upcast_trait_ref: ty::PolyTraitRef<'tcx>,
|
pub upcast_trait_ref: ty::PolyTraitRef<'tcx>,
|
||||||
|
|
||||||
/// The vtable is formed by concatenating together the method lists of
|
/// The vtable is formed by concatenating together the method lists of
|
||||||
/// the base object trait and all supertraits; this is the start of
|
/// the base object trait and all supertraits; this is the start of
|
||||||
/// `upcast_trait_ref`'s methods in that vtable.
|
/// `upcast_trait_ref`'s methods in that vtable.
|
||||||
pub vtable_base: usize
|
pub vtable_base: usize,
|
||||||
|
|
||||||
|
pub nested: Vec<N>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq)]
|
||||||
|
pub struct VtableFnPointerData<'tcx, N> {
|
||||||
|
pub fn_ty: ty::Ty<'tcx>,
|
||||||
|
pub nested: Vec<N>
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates predicate obligations from the generic bounds.
|
/// Creates predicate obligations from the generic bounds.
|
||||||
|
@ -569,7 +577,20 @@ impl<'tcx, N> Vtable<'tcx, N> {
|
||||||
VtableBuiltin(i) => i.nested,
|
VtableBuiltin(i) => i.nested,
|
||||||
VtableDefaultImpl(d) => d.nested,
|
VtableDefaultImpl(d) => d.nested,
|
||||||
VtableClosure(c) => c.nested,
|
VtableClosure(c) => c.nested,
|
||||||
VtableObject(_) | VtableFnPointer(..) => vec![]
|
VtableObject(d) => d.nested,
|
||||||
|
VtableFnPointer(d) => d.nested,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn nested_obligations_mut(&mut self) -> &mut Vec<N> {
|
||||||
|
match self {
|
||||||
|
&mut VtableImpl(ref mut i) => &mut i.nested,
|
||||||
|
&mut VtableParam(ref mut n) => n,
|
||||||
|
&mut VtableBuiltin(ref mut i) => &mut i.nested,
|
||||||
|
&mut VtableDefaultImpl(ref mut d) => &mut d.nested,
|
||||||
|
&mut VtableClosure(ref mut c) => &mut c.nested,
|
||||||
|
&mut VtableObject(ref mut d) => &mut d.nested,
|
||||||
|
&mut VtableFnPointer(ref mut d) => &mut d.nested,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -578,18 +599,25 @@ impl<'tcx, N> Vtable<'tcx, N> {
|
||||||
VtableImpl(i) => VtableImpl(VtableImplData {
|
VtableImpl(i) => VtableImpl(VtableImplData {
|
||||||
impl_def_id: i.impl_def_id,
|
impl_def_id: i.impl_def_id,
|
||||||
substs: i.substs,
|
substs: i.substs,
|
||||||
nested: i.nested.into_iter().map(f).collect()
|
nested: i.nested.into_iter().map(f).collect(),
|
||||||
}),
|
}),
|
||||||
VtableParam(n) => VtableParam(n.into_iter().map(f).collect()),
|
VtableParam(n) => VtableParam(n.into_iter().map(f).collect()),
|
||||||
VtableBuiltin(i) => VtableBuiltin(VtableBuiltinData {
|
VtableBuiltin(i) => VtableBuiltin(VtableBuiltinData {
|
||||||
nested: i.nested.into_iter().map(f).collect()
|
nested: i.nested.into_iter().map(f).collect(),
|
||||||
|
}),
|
||||||
|
VtableObject(o) => VtableObject(VtableObjectData {
|
||||||
|
upcast_trait_ref: o.upcast_trait_ref,
|
||||||
|
vtable_base: o.vtable_base,
|
||||||
|
nested: o.nested.into_iter().map(f).collect(),
|
||||||
}),
|
}),
|
||||||
VtableObject(o) => VtableObject(o),
|
|
||||||
VtableDefaultImpl(d) => VtableDefaultImpl(VtableDefaultImplData {
|
VtableDefaultImpl(d) => VtableDefaultImpl(VtableDefaultImplData {
|
||||||
trait_def_id: d.trait_def_id,
|
trait_def_id: d.trait_def_id,
|
||||||
nested: d.nested.into_iter().map(f).collect()
|
nested: d.nested.into_iter().map(f).collect(),
|
||||||
|
}),
|
||||||
|
VtableFnPointer(p) => VtableFnPointer(VtableFnPointerData {
|
||||||
|
fn_ty: p.fn_ty,
|
||||||
|
nested: p.nested.into_iter().map(f).collect(),
|
||||||
}),
|
}),
|
||||||
VtableFnPointer(f) => VtableFnPointer(f),
|
|
||||||
VtableClosure(c) => VtableClosure(VtableClosureData {
|
VtableClosure(c) => VtableClosure(VtableClosureData {
|
||||||
closure_def_id: c.closure_def_id,
|
closure_def_id: c.closure_def_id,
|
||||||
substs: c.substs,
|
substs: c.substs,
|
||||||
|
|
|
@ -19,6 +19,7 @@ use super::PredicateObligation;
|
||||||
use super::SelectionContext;
|
use super::SelectionContext;
|
||||||
use super::SelectionError;
|
use super::SelectionError;
|
||||||
use super::VtableClosureData;
|
use super::VtableClosureData;
|
||||||
|
use super::VtableFnPointerData;
|
||||||
use super::VtableImplData;
|
use super::VtableImplData;
|
||||||
use super::util;
|
use super::util;
|
||||||
|
|
||||||
|
@ -158,7 +159,7 @@ enum ProjectionTyCandidate<'tcx> {
|
||||||
Closure(VtableClosureData<'tcx, PredicateObligation<'tcx>>),
|
Closure(VtableClosureData<'tcx, PredicateObligation<'tcx>>),
|
||||||
|
|
||||||
// fn pointer return type
|
// fn pointer return type
|
||||||
FnPointer(Ty<'tcx>),
|
FnPointer(VtableFnPointerData<'tcx, PredicateObligation<'tcx>>),
|
||||||
}
|
}
|
||||||
|
|
||||||
struct ProjectionTyCandidateSet<'tcx> {
|
struct ProjectionTyCandidateSet<'tcx> {
|
||||||
|
@ -218,10 +219,7 @@ fn project_and_unify_type<'cx, 'gcx, 'tcx>(
|
||||||
obligation.cause.clone(),
|
obligation.cause.clone(),
|
||||||
obligation.recursion_depth) {
|
obligation.recursion_depth) {
|
||||||
Some(n) => n,
|
Some(n) => n,
|
||||||
None => {
|
None => return Ok(None),
|
||||||
consider_unification_despite_ambiguity(selcx, obligation);
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
debug!("project_and_unify_type: normalized_ty={:?} obligations={:?}",
|
debug!("project_and_unify_type: normalized_ty={:?} obligations={:?}",
|
||||||
|
@ -240,59 +238,6 @@ fn project_and_unify_type<'cx, 'gcx, 'tcx>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn consider_unification_despite_ambiguity<'cx, 'gcx, 'tcx>(
|
|
||||||
selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
|
|
||||||
obligation: &ProjectionObligation<'tcx>)
|
|
||||||
{
|
|
||||||
debug!("consider_unification_despite_ambiguity(obligation={:?})",
|
|
||||||
obligation);
|
|
||||||
|
|
||||||
let def_id = obligation.predicate.projection_ty.trait_ref.def_id;
|
|
||||||
match selcx.tcx().lang_items.fn_trait_kind(def_id) {
|
|
||||||
Some(_) => { }
|
|
||||||
None => { return; }
|
|
||||||
}
|
|
||||||
|
|
||||||
let infcx = selcx.infcx();
|
|
||||||
let self_ty = obligation.predicate.projection_ty.trait_ref.self_ty();
|
|
||||||
let self_ty = infcx.shallow_resolve(self_ty);
|
|
||||||
debug!("consider_unification_despite_ambiguity: self_ty.sty={:?}",
|
|
||||||
self_ty.sty);
|
|
||||||
match self_ty.sty {
|
|
||||||
ty::TyClosure(closure_def_id, substs) => {
|
|
||||||
let closure_typer = selcx.closure_typer();
|
|
||||||
let closure_type = closure_typer.closure_type(closure_def_id, substs);
|
|
||||||
let ty::Binder((_, ret_type)) =
|
|
||||||
infcx.tcx.closure_trait_ref_and_return_type(def_id,
|
|
||||||
self_ty,
|
|
||||||
&closure_type.sig,
|
|
||||||
util::TupleArgumentsFlag::No);
|
|
||||||
// We don't have to normalize the return type here - this is only
|
|
||||||
// reached for TyClosure: Fn inputs where the closure kind is
|
|
||||||
// still unknown, which should only occur in typeck where the
|
|
||||||
// closure type is already normalized.
|
|
||||||
let (ret_type, _) =
|
|
||||||
infcx.replace_late_bound_regions_with_fresh_var(
|
|
||||||
obligation.cause.span,
|
|
||||||
infer::AssocTypeProjection(obligation.predicate.projection_ty.item_name),
|
|
||||||
&ty::Binder(ret_type));
|
|
||||||
|
|
||||||
debug!("consider_unification_despite_ambiguity: ret_type={:?}",
|
|
||||||
ret_type);
|
|
||||||
let origin = TypeOrigin::RelateOutputImplTypes(obligation.cause.span);
|
|
||||||
let obligation_ty = obligation.predicate.ty;
|
|
||||||
match infcx.eq_types(true, origin, obligation_ty, ret_type) {
|
|
||||||
Ok(InferOk { obligations, .. }) => {
|
|
||||||
// FIXME(#32730) propagate obligations
|
|
||||||
assert!(obligations.is_empty());
|
|
||||||
}
|
|
||||||
Err(_) => { /* ignore errors */ }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => { }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Normalizes any associated type projections in `value`, replacing
|
/// Normalizes any associated type projections in `value`, replacing
|
||||||
/// them with a fully resolved type where possible. The return value
|
/// them with a fully resolved type where possible. The return value
|
||||||
/// combines the normalized result and any additional obligations that
|
/// combines the normalized result and any additional obligations that
|
||||||
|
@ -929,9 +874,9 @@ fn assemble_candidates_from_impls<'cx, 'gcx, 'tcx>(
|
||||||
candidate_set.vec.push(
|
candidate_set.vec.push(
|
||||||
ProjectionTyCandidate::Closure(data));
|
ProjectionTyCandidate::Closure(data));
|
||||||
}
|
}
|
||||||
super::VtableFnPointer(fn_type) => {
|
super::VtableFnPointer(data) => {
|
||||||
candidate_set.vec.push(
|
candidate_set.vec.push(
|
||||||
ProjectionTyCandidate::FnPointer(fn_type));
|
ProjectionTyCandidate::FnPointer(data));
|
||||||
}
|
}
|
||||||
super::VtableParam(..) => {
|
super::VtableParam(..) => {
|
||||||
// This case tell us nothing about the value of an
|
// This case tell us nothing about the value of an
|
||||||
|
@ -997,8 +942,8 @@ fn confirm_candidate<'cx, 'gcx, 'tcx>(
|
||||||
confirm_closure_candidate(selcx, obligation, closure_vtable)
|
confirm_closure_candidate(selcx, obligation, closure_vtable)
|
||||||
}
|
}
|
||||||
|
|
||||||
ProjectionTyCandidate::FnPointer(fn_type) => {
|
ProjectionTyCandidate::FnPointer(fn_pointer_vtable) => {
|
||||||
confirm_fn_pointer_candidate(selcx, obligation, fn_type)
|
confirm_fn_pointer_candidate(selcx, obligation, fn_pointer_vtable)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1006,10 +951,13 @@ fn confirm_candidate<'cx, 'gcx, 'tcx>(
|
||||||
fn confirm_fn_pointer_candidate<'cx, 'gcx, 'tcx>(
|
fn confirm_fn_pointer_candidate<'cx, 'gcx, 'tcx>(
|
||||||
selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
|
selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
|
||||||
obligation: &ProjectionTyObligation<'tcx>,
|
obligation: &ProjectionTyObligation<'tcx>,
|
||||||
fn_type: Ty<'tcx>)
|
fn_pointer_vtable: VtableFnPointerData<'tcx, PredicateObligation<'tcx>>)
|
||||||
-> (Ty<'tcx>, Vec<PredicateObligation<'tcx>>)
|
-> (Ty<'tcx>, Vec<PredicateObligation<'tcx>>)
|
||||||
{
|
{
|
||||||
let fn_type = selcx.infcx().shallow_resolve(fn_type);
|
// FIXME(#32730) propagate obligations (fn pointer vtable nested obligations ONLY come from
|
||||||
|
// unification in inference)
|
||||||
|
assert!(fn_pointer_vtable.nested.is_empty());
|
||||||
|
let fn_type = selcx.infcx().shallow_resolve(fn_pointer_vtable.fn_ty);
|
||||||
let sig = fn_type.fn_sig();
|
let sig = fn_type.fn_sig();
|
||||||
confirm_callable_candidate(selcx, obligation, sig, util::TupleArgumentsFlag::Yes)
|
confirm_callable_candidate(selcx, obligation, sig, util::TupleArgumentsFlag::Yes)
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,7 +30,7 @@ use super::SelectionResult;
|
||||||
use super::{VtableBuiltin, VtableImpl, VtableParam, VtableClosure,
|
use super::{VtableBuiltin, VtableImpl, VtableParam, VtableClosure,
|
||||||
VtableFnPointer, VtableObject, VtableDefaultImpl};
|
VtableFnPointer, VtableObject, VtableDefaultImpl};
|
||||||
use super::{VtableImplData, VtableObjectData, VtableBuiltinData,
|
use super::{VtableImplData, VtableObjectData, VtableBuiltinData,
|
||||||
VtableClosureData, VtableDefaultImplData};
|
VtableClosureData, VtableDefaultImplData, VtableFnPointerData};
|
||||||
use super::util;
|
use super::util;
|
||||||
|
|
||||||
use hir::def_id::DefId;
|
use hir::def_id::DefId;
|
||||||
|
@ -42,13 +42,24 @@ use traits;
|
||||||
use ty::fast_reject;
|
use ty::fast_reject;
|
||||||
use ty::relate::TypeRelation;
|
use ty::relate::TypeRelation;
|
||||||
|
|
||||||
|
use rustc_data_structures::snapshot_vec::{SnapshotVecDelegate, SnapshotVec};
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
use std::marker::PhantomData;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use syntax::abi::Abi;
|
use syntax::abi::Abi;
|
||||||
use hir;
|
use hir;
|
||||||
use util::nodemap::FnvHashMap;
|
use util::nodemap::FnvHashMap;
|
||||||
|
|
||||||
|
struct InferredObligationsSnapshotVecDelegate<'tcx> {
|
||||||
|
phantom: PhantomData<&'tcx i32>,
|
||||||
|
}
|
||||||
|
impl<'tcx> SnapshotVecDelegate for InferredObligationsSnapshotVecDelegate<'tcx> {
|
||||||
|
type Value = PredicateObligation<'tcx>;
|
||||||
|
type Undo = ();
|
||||||
|
fn reverse(_: &mut Vec<Self::Value>, _: Self::Undo) {}
|
||||||
|
}
|
||||||
|
|
||||||
pub struct SelectionContext<'cx, 'gcx: 'cx+'tcx, 'tcx: 'cx> {
|
pub struct SelectionContext<'cx, 'gcx: 'cx+'tcx, 'tcx: 'cx> {
|
||||||
infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
|
infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
|
||||||
|
|
||||||
|
@ -74,6 +85,8 @@ pub struct SelectionContext<'cx, 'gcx: 'cx+'tcx, 'tcx: 'cx> {
|
||||||
/// there is no type that the user could *actually name* that
|
/// there is no type that the user could *actually name* that
|
||||||
/// would satisfy it. This avoids crippling inference, basically.
|
/// would satisfy it. This avoids crippling inference, basically.
|
||||||
intercrate: bool,
|
intercrate: bool,
|
||||||
|
|
||||||
|
inferred_obligations: SnapshotVec<InferredObligationsSnapshotVecDelegate<'tcx>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// A stack that walks back up the stack frame.
|
// A stack that walks back up the stack frame.
|
||||||
|
@ -300,6 +313,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
infcx: infcx,
|
infcx: infcx,
|
||||||
freshener: infcx.freshener(),
|
freshener: infcx.freshener(),
|
||||||
intercrate: false,
|
intercrate: false,
|
||||||
|
inferred_obligations: SnapshotVec::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -308,6 +322,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
infcx: infcx,
|
infcx: infcx,
|
||||||
freshener: infcx.freshener(),
|
freshener: infcx.freshener(),
|
||||||
intercrate: true,
|
intercrate: true,
|
||||||
|
inferred_obligations: SnapshotVec::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -331,6 +346,46 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
self.infcx.projection_mode()
|
self.infcx.projection_mode()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Wraps the inference context's in_snapshot s.t. snapshot handling is only from the selection
|
||||||
|
/// context's self.
|
||||||
|
fn in_snapshot<R, F>(&mut self, f: F) -> R
|
||||||
|
where F: FnOnce(&mut Self, &infer::CombinedSnapshot) -> R
|
||||||
|
{
|
||||||
|
// The irrefutable nature of the operation means we don't need to snapshot the
|
||||||
|
// inferred_obligations vector.
|
||||||
|
self.infcx.in_snapshot(|snapshot| f(self, snapshot))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Wraps a probe s.t. obligations collected during it are ignored and old obligations are
|
||||||
|
/// retained.
|
||||||
|
fn probe<R, F>(&mut self, f: F) -> R
|
||||||
|
where F: FnOnce(&mut Self, &infer::CombinedSnapshot) -> R
|
||||||
|
{
|
||||||
|
let inferred_obligations_snapshot = self.inferred_obligations.start_snapshot();
|
||||||
|
let result = self.infcx.probe(|snapshot| f(self, snapshot));
|
||||||
|
self.inferred_obligations.rollback_to(inferred_obligations_snapshot);
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Wraps a commit_if_ok s.t. obligations collected during it are not returned in selection if
|
||||||
|
/// the transaction fails and s.t. old obligations are retained.
|
||||||
|
fn commit_if_ok<T, E, F>(&mut self, f: F) -> Result<T, E> where
|
||||||
|
F: FnOnce(&mut Self, &infer::CombinedSnapshot) -> Result<T, E>
|
||||||
|
{
|
||||||
|
let inferred_obligations_snapshot = self.inferred_obligations.start_snapshot();
|
||||||
|
match self.infcx.commit_if_ok(|snapshot| f(self, snapshot)) {
|
||||||
|
Ok(ok) => {
|
||||||
|
self.inferred_obligations.commit(inferred_obligations_snapshot);
|
||||||
|
Ok(ok)
|
||||||
|
},
|
||||||
|
Err(err) => {
|
||||||
|
self.inferred_obligations.rollback_to(inferred_obligations_snapshot);
|
||||||
|
Err(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////
|
||||||
// Selection
|
// Selection
|
||||||
//
|
//
|
||||||
|
@ -359,7 +414,15 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
let stack = self.push_stack(TraitObligationStackList::empty(), obligation);
|
let stack = self.push_stack(TraitObligationStackList::empty(), obligation);
|
||||||
match self.candidate_from_obligation(&stack)? {
|
match self.candidate_from_obligation(&stack)? {
|
||||||
None => Ok(None),
|
None => Ok(None),
|
||||||
Some(candidate) => Ok(Some(self.confirm_candidate(obligation, candidate)?)),
|
Some(candidate) => {
|
||||||
|
let mut candidate = self.confirm_candidate(obligation, candidate)?;
|
||||||
|
// FIXME(#32730) remove this assertion once inferred obligations are propagated
|
||||||
|
// from inference
|
||||||
|
assert!(self.inferred_obligations.len() == 0);
|
||||||
|
let inferred_obligations = (*self.inferred_obligations).into_iter().cloned();
|
||||||
|
candidate.nested_obligations_mut().extend(inferred_obligations);
|
||||||
|
Ok(Some(candidate))
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -381,8 +444,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
debug!("evaluate_obligation({:?})",
|
debug!("evaluate_obligation({:?})",
|
||||||
obligation);
|
obligation);
|
||||||
|
|
||||||
self.infcx.probe(|_| {
|
self.probe(|this, _| {
|
||||||
self.evaluate_predicate_recursively(TraitObligationStackList::empty(), obligation)
|
this.evaluate_predicate_recursively(TraitObligationStackList::empty(), obligation)
|
||||||
.may_apply()
|
.may_apply()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -397,8 +460,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
debug!("evaluate_obligation_conservatively({:?})",
|
debug!("evaluate_obligation_conservatively({:?})",
|
||||||
obligation);
|
obligation);
|
||||||
|
|
||||||
self.infcx.probe(|_| {
|
self.probe(|this, _| {
|
||||||
self.evaluate_predicate_recursively(TraitObligationStackList::empty(), obligation)
|
this.evaluate_predicate_recursively(TraitObligationStackList::empty(), obligation)
|
||||||
== EvaluatedToOk
|
== EvaluatedToOk
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -460,8 +523,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
// does this code ever run?
|
// does this code ever run?
|
||||||
match self.infcx.equality_predicate(obligation.cause.span, p) {
|
match self.infcx.equality_predicate(obligation.cause.span, p) {
|
||||||
Ok(InferOk { obligations, .. }) => {
|
Ok(InferOk { obligations, .. }) => {
|
||||||
// FIXME(#32730) propagate obligations
|
self.inferred_obligations.extend(obligations);
|
||||||
assert!(obligations.is_empty());
|
|
||||||
EvaluatedToOk
|
EvaluatedToOk
|
||||||
},
|
},
|
||||||
Err(_) => EvaluatedToErr
|
Err(_) => EvaluatedToErr
|
||||||
|
@ -643,11 +705,11 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
{
|
{
|
||||||
debug!("evaluate_candidate: depth={} candidate={:?}",
|
debug!("evaluate_candidate: depth={} candidate={:?}",
|
||||||
stack.obligation.recursion_depth, candidate);
|
stack.obligation.recursion_depth, candidate);
|
||||||
let result = self.infcx.probe(|_| {
|
let result = self.probe(|this, _| {
|
||||||
let candidate = (*candidate).clone();
|
let candidate = (*candidate).clone();
|
||||||
match self.confirm_candidate(stack.obligation, candidate) {
|
match this.confirm_candidate(stack.obligation, candidate) {
|
||||||
Ok(selection) => {
|
Ok(selection) => {
|
||||||
self.evaluate_predicates_recursively(
|
this.evaluate_predicates_recursively(
|
||||||
stack.list(),
|
stack.list(),
|
||||||
selection.nested_obligations().iter())
|
selection.nested_obligations().iter())
|
||||||
}
|
}
|
||||||
|
@ -1107,8 +1169,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
debug!("assemble_candidates_for_projected_tys: trait_def_id={:?}",
|
debug!("assemble_candidates_for_projected_tys: trait_def_id={:?}",
|
||||||
trait_def_id);
|
trait_def_id);
|
||||||
|
|
||||||
let result = self.infcx.probe(|snapshot| {
|
let result = self.probe(|this, snapshot| {
|
||||||
self.match_projection_obligation_against_bounds_from_trait(obligation,
|
this.match_projection_obligation_against_bounds_from_trait(obligation,
|
||||||
snapshot)
|
snapshot)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -1156,12 +1218,12 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
util::elaborate_predicates(self.tcx(), bounds.predicates.into_vec())
|
util::elaborate_predicates(self.tcx(), bounds.predicates.into_vec())
|
||||||
.filter_to_traits()
|
.filter_to_traits()
|
||||||
.find(
|
.find(
|
||||||
|bound| self.infcx.probe(
|
|bound| self.probe(
|
||||||
|_| self.match_projection(obligation,
|
|this, _| this.match_projection(obligation,
|
||||||
bound.clone(),
|
bound.clone(),
|
||||||
skol_trait_predicate.trait_ref.clone(),
|
skol_trait_predicate.trait_ref.clone(),
|
||||||
&skol_map,
|
&skol_map,
|
||||||
snapshot)));
|
snapshot)));
|
||||||
|
|
||||||
debug!("match_projection_obligation_against_bounds_from_trait: \
|
debug!("match_projection_obligation_against_bounds_from_trait: \
|
||||||
matching_bound={:?}",
|
matching_bound={:?}",
|
||||||
|
@ -1196,8 +1258,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
trait_bound.clone(),
|
trait_bound.clone(),
|
||||||
ty::Binder(skol_trait_ref.clone())) {
|
ty::Binder(skol_trait_ref.clone())) {
|
||||||
Ok(InferOk { obligations, .. }) => {
|
Ok(InferOk { obligations, .. }) => {
|
||||||
// FIXME(#32730) propagate obligations
|
self.inferred_obligations.extend(obligations);
|
||||||
assert!(obligations.is_empty());
|
|
||||||
}
|
}
|
||||||
Err(_) => { return false; }
|
Err(_) => { return false; }
|
||||||
}
|
}
|
||||||
|
@ -1239,10 +1300,10 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
where_clause_trait_ref: ty::PolyTraitRef<'tcx>)
|
where_clause_trait_ref: ty::PolyTraitRef<'tcx>)
|
||||||
-> EvaluationResult
|
-> EvaluationResult
|
||||||
{
|
{
|
||||||
self.infcx().probe(move |_| {
|
self.probe(move |this, _| {
|
||||||
match self.match_where_clause_trait_ref(stack.obligation, where_clause_trait_ref) {
|
match this.match_where_clause_trait_ref(stack.obligation, where_clause_trait_ref) {
|
||||||
Ok(obligations) => {
|
Ok(obligations) => {
|
||||||
self.evaluate_predicates_recursively(stack.list(), obligations.iter())
|
this.evaluate_predicates_recursively(stack.list(), obligations.iter())
|
||||||
}
|
}
|
||||||
Err(()) => EvaluatedToErr
|
Err(()) => EvaluatedToErr
|
||||||
}
|
}
|
||||||
|
@ -1361,8 +1422,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
self.tcx(),
|
self.tcx(),
|
||||||
obligation.predicate.0.trait_ref.self_ty(),
|
obligation.predicate.0.trait_ref.self_ty(),
|
||||||
|impl_def_id| {
|
|impl_def_id| {
|
||||||
self.infcx.probe(|snapshot| {
|
self.probe(|this, snapshot| {
|
||||||
if let Ok(_) = self.match_impl(impl_def_id, obligation, snapshot) {
|
if let Ok(_) = this.match_impl(impl_def_id, obligation, snapshot) {
|
||||||
candidates.vec.push(ImplCandidate(impl_def_id));
|
candidates.vec.push(ImplCandidate(impl_def_id));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -1448,12 +1509,12 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.infcx.in_snapshot(|snapshot| {
|
self.probe(|this, snapshot| {
|
||||||
let (self_ty, _) =
|
let (self_ty, _) =
|
||||||
self.infcx().skolemize_late_bound_regions(&obligation.self_ty(), snapshot);
|
this.infcx().skolemize_late_bound_regions(&obligation.self_ty(), snapshot);
|
||||||
let poly_trait_ref = match self_ty.sty {
|
let poly_trait_ref = match self_ty.sty {
|
||||||
ty::TyTrait(ref data) => {
|
ty::TyTrait(ref data) => {
|
||||||
match self.tcx().lang_items.to_builtin_kind(obligation.predicate.def_id()) {
|
match this.tcx().lang_items.to_builtin_kind(obligation.predicate.def_id()) {
|
||||||
Some(bound @ ty::BoundSend) | Some(bound @ ty::BoundSync) => {
|
Some(bound @ ty::BoundSend) | Some(bound @ ty::BoundSync) => {
|
||||||
if data.bounds.builtin_bounds.contains(&bound) {
|
if data.bounds.builtin_bounds.contains(&bound) {
|
||||||
debug!("assemble_candidates_from_object_ty: matched builtin bound, \
|
debug!("assemble_candidates_from_object_ty: matched builtin bound, \
|
||||||
|
@ -1465,7 +1526,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
data.principal_trait_ref_with_self_ty(self.tcx(), self_ty)
|
data.principal_trait_ref_with_self_ty(this.tcx(), self_ty)
|
||||||
}
|
}
|
||||||
ty::TyInfer(ty::TyVar(_)) => {
|
ty::TyInfer(ty::TyVar(_)) => {
|
||||||
debug!("assemble_candidates_from_object_ty: ambiguous");
|
debug!("assemble_candidates_from_object_ty: ambiguous");
|
||||||
|
@ -1486,11 +1547,11 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
// For example, we may be trying to upcast `Foo` to `Bar<i32>`,
|
// For example, we may be trying to upcast `Foo` to `Bar<i32>`,
|
||||||
// but `Foo` is declared as `trait Foo : Bar<u32>`.
|
// but `Foo` is declared as `trait Foo : Bar<u32>`.
|
||||||
let upcast_trait_refs =
|
let upcast_trait_refs =
|
||||||
util::supertraits(self.tcx(), poly_trait_ref)
|
util::supertraits(this.tcx(), poly_trait_ref)
|
||||||
.filter(|upcast_trait_ref| {
|
.filter(|upcast_trait_ref| {
|
||||||
self.infcx.probe(|_| {
|
this.probe(|this, _| {
|
||||||
let upcast_trait_ref = upcast_trait_ref.clone();
|
let upcast_trait_ref = upcast_trait_ref.clone();
|
||||||
self.match_poly_trait_ref(obligation, upcast_trait_ref).is_ok()
|
this.match_poly_trait_ref(obligation, upcast_trait_ref).is_ok()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.count();
|
.count();
|
||||||
|
@ -1894,23 +1955,23 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
types.skip_binder().into_iter().flat_map(|ty| { // binder moved -\
|
types.skip_binder().into_iter().flat_map(|ty| { // binder moved -\
|
||||||
let ty: ty::Binder<Ty<'tcx>> = ty::Binder(ty); // <----------/
|
let ty: ty::Binder<Ty<'tcx>> = ty::Binder(ty); // <----------/
|
||||||
|
|
||||||
self.infcx.in_snapshot(|snapshot| {
|
self.in_snapshot(|this, snapshot| {
|
||||||
let (skol_ty, skol_map) =
|
let (skol_ty, skol_map) =
|
||||||
self.infcx().skolemize_late_bound_regions(&ty, snapshot);
|
this.infcx().skolemize_late_bound_regions(&ty, snapshot);
|
||||||
let Normalized { value: normalized_ty, mut obligations } =
|
let Normalized { value: normalized_ty, mut obligations } =
|
||||||
project::normalize_with_depth(self,
|
project::normalize_with_depth(this,
|
||||||
cause.clone(),
|
cause.clone(),
|
||||||
recursion_depth,
|
recursion_depth,
|
||||||
&skol_ty);
|
&skol_ty);
|
||||||
let skol_obligation =
|
let skol_obligation =
|
||||||
self.tcx().predicate_for_trait_def(
|
this.tcx().predicate_for_trait_def(
|
||||||
cause.clone(),
|
cause.clone(),
|
||||||
trait_def_id,
|
trait_def_id,
|
||||||
recursion_depth,
|
recursion_depth,
|
||||||
normalized_ty,
|
normalized_ty,
|
||||||
vec![]);
|
vec![]);
|
||||||
obligations.push(skol_obligation);
|
obligations.push(skol_obligation);
|
||||||
self.infcx().plug_leaks(skol_map, snapshot, &obligations)
|
this.infcx().plug_leaks(skol_map, snapshot, &obligations)
|
||||||
})
|
})
|
||||||
}).collect()
|
}).collect()
|
||||||
}
|
}
|
||||||
|
@ -1977,9 +2038,9 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
FnPointerCandidate => {
|
FnPointerCandidate => {
|
||||||
let fn_type =
|
let data =
|
||||||
self.confirm_fn_pointer_candidate(obligation)?;
|
self.confirm_fn_pointer_candidate(obligation)?;
|
||||||
Ok(VtableFnPointer(fn_type))
|
Ok(VtableFnPointer(data))
|
||||||
}
|
}
|
||||||
|
|
||||||
ProjectionCandidate => {
|
ProjectionCandidate => {
|
||||||
|
@ -1997,9 +2058,9 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
fn confirm_projection_candidate(&mut self,
|
fn confirm_projection_candidate(&mut self,
|
||||||
obligation: &TraitObligation<'tcx>)
|
obligation: &TraitObligation<'tcx>)
|
||||||
{
|
{
|
||||||
self.infcx.in_snapshot(|snapshot| {
|
self.in_snapshot(|this, snapshot| {
|
||||||
let result =
|
let result =
|
||||||
self.match_projection_obligation_against_bounds_from_trait(obligation,
|
this.match_projection_obligation_against_bounds_from_trait(obligation,
|
||||||
snapshot);
|
snapshot);
|
||||||
assert!(result);
|
assert!(result);
|
||||||
})
|
})
|
||||||
|
@ -2140,12 +2201,12 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
trait_def_id,
|
trait_def_id,
|
||||||
nested);
|
nested);
|
||||||
|
|
||||||
let trait_obligations = self.infcx.in_snapshot(|snapshot| {
|
let trait_obligations = self.in_snapshot(|this, snapshot| {
|
||||||
let poly_trait_ref = obligation.predicate.to_poly_trait_ref();
|
let poly_trait_ref = obligation.predicate.to_poly_trait_ref();
|
||||||
let (trait_ref, skol_map) =
|
let (trait_ref, skol_map) =
|
||||||
self.infcx().skolemize_late_bound_regions(&poly_trait_ref, snapshot);
|
this.infcx().skolemize_late_bound_regions(&poly_trait_ref, snapshot);
|
||||||
let cause = self.derived_cause(obligation, ImplDerivedObligation);
|
let cause = this.derived_cause(obligation, ImplDerivedObligation);
|
||||||
self.impl_or_trait_obligations(cause,
|
this.impl_or_trait_obligations(cause,
|
||||||
obligation.recursion_depth + 1,
|
obligation.recursion_depth + 1,
|
||||||
trait_def_id,
|
trait_def_id,
|
||||||
&trait_ref.substs,
|
&trait_ref.substs,
|
||||||
|
@ -2174,13 +2235,13 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
|
|
||||||
// First, create the substitutions by matching the impl again,
|
// First, create the substitutions by matching the impl again,
|
||||||
// this time not in a probe.
|
// this time not in a probe.
|
||||||
self.infcx.in_snapshot(|snapshot| {
|
self.in_snapshot(|this, snapshot| {
|
||||||
let (substs, skol_map) =
|
let (substs, skol_map) =
|
||||||
self.rematch_impl(impl_def_id, obligation,
|
this.rematch_impl(impl_def_id, obligation,
|
||||||
snapshot);
|
snapshot);
|
||||||
debug!("confirm_impl_candidate substs={:?}", substs);
|
debug!("confirm_impl_candidate substs={:?}", substs);
|
||||||
let cause = self.derived_cause(obligation, ImplDerivedObligation);
|
let cause = this.derived_cause(obligation, ImplDerivedObligation);
|
||||||
self.vtable_impl(impl_def_id, substs, cause,
|
this.vtable_impl(impl_def_id, substs, cause,
|
||||||
obligation.recursion_depth + 1,
|
obligation.recursion_depth + 1,
|
||||||
skol_map, snapshot)
|
skol_map, snapshot)
|
||||||
})
|
})
|
||||||
|
@ -2227,7 +2288,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
|
|
||||||
fn confirm_object_candidate(&mut self,
|
fn confirm_object_candidate(&mut self,
|
||||||
obligation: &TraitObligation<'tcx>)
|
obligation: &TraitObligation<'tcx>)
|
||||||
-> VtableObjectData<'tcx>
|
-> VtableObjectData<'tcx, PredicateObligation<'tcx>>
|
||||||
{
|
{
|
||||||
debug!("confirm_object_candidate({:?})",
|
debug!("confirm_object_candidate({:?})",
|
||||||
obligation);
|
obligation);
|
||||||
|
@ -2251,6 +2312,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
let vtable_base;
|
let vtable_base;
|
||||||
|
|
||||||
{
|
{
|
||||||
|
let tcx = self.tcx();
|
||||||
|
|
||||||
// We want to find the first supertrait in the list of
|
// We want to find the first supertrait in the list of
|
||||||
// supertraits that we can unify with, and do that
|
// supertraits that we can unify with, and do that
|
||||||
// unification. We know that there is exactly one in the list
|
// unification. We know that there is exactly one in the list
|
||||||
|
@ -2258,11 +2321,11 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
// reported an ambiguity. (When we do find a match, also
|
// reported an ambiguity. (When we do find a match, also
|
||||||
// record it for later.)
|
// record it for later.)
|
||||||
let nonmatching =
|
let nonmatching =
|
||||||
util::supertraits(self.tcx(), poly_trait_ref)
|
util::supertraits(tcx, poly_trait_ref)
|
||||||
.take_while(|&t| {
|
.take_while(|&t| {
|
||||||
match
|
match
|
||||||
self.infcx.commit_if_ok(
|
self.commit_if_ok(
|
||||||
|_| self.match_poly_trait_ref(obligation, t))
|
|this, _| this.match_poly_trait_ref(obligation, t))
|
||||||
{
|
{
|
||||||
Ok(_) => { upcast_trait_ref = Some(t); false }
|
Ok(_) => { upcast_trait_ref = Some(t); false }
|
||||||
Err(_) => { true }
|
Err(_) => { true }
|
||||||
|
@ -2274,7 +2337,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
// entries, so that we can compute the offset for the selected
|
// entries, so that we can compute the offset for the selected
|
||||||
// trait.
|
// trait.
|
||||||
vtable_base =
|
vtable_base =
|
||||||
nonmatching.map(|t| self.tcx().count_own_vtable_entries(t))
|
nonmatching.map(|t| tcx.count_own_vtable_entries(t))
|
||||||
.sum();
|
.sum();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -2282,12 +2345,12 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
VtableObjectData {
|
VtableObjectData {
|
||||||
upcast_trait_ref: upcast_trait_ref.unwrap(),
|
upcast_trait_ref: upcast_trait_ref.unwrap(),
|
||||||
vtable_base: vtable_base,
|
vtable_base: vtable_base,
|
||||||
|
nested: vec![]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn confirm_fn_pointer_candidate(&mut self,
|
fn confirm_fn_pointer_candidate(&mut self, obligation: &TraitObligation<'tcx>)
|
||||||
obligation: &TraitObligation<'tcx>)
|
-> Result<VtableFnPointerData<'tcx, PredicateObligation<'tcx>>, SelectionError<'tcx>>
|
||||||
-> Result<ty::Ty<'tcx>,SelectionError<'tcx>>
|
|
||||||
{
|
{
|
||||||
debug!("confirm_fn_pointer_candidate({:?})",
|
debug!("confirm_fn_pointer_candidate({:?})",
|
||||||
obligation);
|
obligation);
|
||||||
|
@ -2305,7 +2368,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
self.confirm_poly_trait_refs(obligation.cause.clone(),
|
self.confirm_poly_trait_refs(obligation.cause.clone(),
|
||||||
obligation.predicate.to_poly_trait_ref(),
|
obligation.predicate.to_poly_trait_ref(),
|
||||||
trait_ref)?;
|
trait_ref)?;
|
||||||
Ok(self_ty)
|
Ok(VtableFnPointerData { fn_ty: self_ty, nested: vec![] })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn confirm_closure_candidate(&mut self,
|
fn confirm_closure_candidate(&mut self,
|
||||||
|
@ -2384,8 +2447,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
origin,
|
origin,
|
||||||
expected_trait_ref.clone(),
|
expected_trait_ref.clone(),
|
||||||
obligation_trait_ref.clone())
|
obligation_trait_ref.clone())
|
||||||
// FIXME(#32730) propagate obligations
|
.map(|InferOk { obligations, .. }| self.inferred_obligations.extend(obligations))
|
||||||
.map(|InferOk { obligations, .. }| assert!(obligations.is_empty()))
|
|
||||||
.map_err(|e| OutputTypeParameterMismatch(expected_trait_ref, obligation_trait_ref, e))
|
.map_err(|e| OutputTypeParameterMismatch(expected_trait_ref, obligation_trait_ref, e))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2420,8 +2482,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
let InferOk { obligations, .. } =
|
let InferOk { obligations, .. } =
|
||||||
self.infcx.sub_types(false, origin, new_trait, target)
|
self.infcx.sub_types(false, origin, new_trait, target)
|
||||||
.map_err(|_| Unimplemented)?;
|
.map_err(|_| Unimplemented)?;
|
||||||
// FIXME(#32730) propagate obligations
|
self.inferred_obligations.extend(obligations);
|
||||||
assert!(obligations.is_empty());
|
|
||||||
|
|
||||||
// Register one obligation for 'a: 'b.
|
// Register one obligation for 'a: 'b.
|
||||||
let cause = ObligationCause::new(obligation.cause.span,
|
let cause = ObligationCause::new(obligation.cause.span,
|
||||||
|
@ -2494,8 +2555,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
let InferOk { obligations, .. } =
|
let InferOk { obligations, .. } =
|
||||||
self.infcx.sub_types(false, origin, a, b)
|
self.infcx.sub_types(false, origin, a, b)
|
||||||
.map_err(|_| Unimplemented)?;
|
.map_err(|_| Unimplemented)?;
|
||||||
// FIXME(#32730) propagate obligations
|
self.inferred_obligations.extend(obligations);
|
||||||
assert!(obligations.is_empty());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Struct<T> -> Struct<U>.
|
// Struct<T> -> Struct<U>.
|
||||||
|
@ -2554,8 +2614,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
let InferOk { obligations, .. } =
|
let InferOk { obligations, .. } =
|
||||||
self.infcx.sub_types(false, origin, new_struct, target)
|
self.infcx.sub_types(false, origin, new_struct, target)
|
||||||
.map_err(|_| Unimplemented)?;
|
.map_err(|_| Unimplemented)?;
|
||||||
// FIXME(#32730) propagate obligations
|
self.inferred_obligations.extend(obligations);
|
||||||
assert!(obligations.is_empty());
|
|
||||||
|
|
||||||
// Construct the nested Field<T>: Unsize<Field<U>> predicate.
|
// Construct the nested Field<T>: Unsize<Field<U>> predicate.
|
||||||
nested.push(tcx.predicate_for_trait_def(
|
nested.push(tcx.predicate_for_trait_def(
|
||||||
|
@ -2649,8 +2708,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
debug!("match_impl: failed eq_trait_refs due to `{}`", e);
|
debug!("match_impl: failed eq_trait_refs due to `{}`", e);
|
||||||
()
|
()
|
||||||
})?;
|
})?;
|
||||||
// FIXME(#32730) propagate obligations
|
self.inferred_obligations.extend(obligations);
|
||||||
assert!(obligations.is_empty());
|
|
||||||
|
|
||||||
if let Err(e) = self.infcx.leak_check(false, &skol_map, snapshot) {
|
if let Err(e) = self.infcx.leak_check(false, &skol_map, snapshot) {
|
||||||
debug!("match_impl: failed leak check due to `{}`", e);
|
debug!("match_impl: failed leak check due to `{}`", e);
|
||||||
|
@ -2703,7 +2761,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
|
|
||||||
/// Returns `Ok` if `poly_trait_ref` being true implies that the
|
/// Returns `Ok` if `poly_trait_ref` being true implies that the
|
||||||
/// obligation is satisfied.
|
/// obligation is satisfied.
|
||||||
fn match_poly_trait_ref(&self,
|
fn match_poly_trait_ref(&mut self,
|
||||||
obligation: &TraitObligation<'tcx>,
|
obligation: &TraitObligation<'tcx>,
|
||||||
poly_trait_ref: ty::PolyTraitRef<'tcx>)
|
poly_trait_ref: ty::PolyTraitRef<'tcx>)
|
||||||
-> Result<(),()>
|
-> Result<(),()>
|
||||||
|
@ -2717,8 +2775,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
|
||||||
origin,
|
origin,
|
||||||
poly_trait_ref,
|
poly_trait_ref,
|
||||||
obligation.predicate.to_poly_trait_ref())
|
obligation.predicate.to_poly_trait_ref())
|
||||||
// FIXME(#32730) propagate obligations
|
.map(|InferOk { obligations, .. }| self.inferred_obligations.extend(obligations))
|
||||||
.map(|InferOk { obligations, .. }| assert!(obligations.is_empty()))
|
|
||||||
.map_err(|_| ())
|
.map_err(|_| ())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -99,11 +99,20 @@ impl<'tcx, N: fmt::Debug> fmt::Debug for traits::VtableDefaultImplData<N> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> fmt::Debug for traits::VtableObjectData<'tcx> {
|
impl<'tcx, N: fmt::Debug> fmt::Debug for traits::VtableObjectData<'tcx, N> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "VtableObject(upcast={:?}, vtable_base={})",
|
write!(f, "VtableObject(upcast={:?}, vtable_base={}, nested={:?})",
|
||||||
self.upcast_trait_ref,
|
self.upcast_trait_ref,
|
||||||
self.vtable_base)
|
self.vtable_base,
|
||||||
|
self.nested)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tcx, N: fmt::Debug> fmt::Debug for traits::VtableFnPointerData<'tcx, N> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(f, "VtableFnPointer(fn_ty={:?}, nested={:?})",
|
||||||
|
self.fn_ty,
|
||||||
|
self.nested)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -185,19 +194,26 @@ impl<'a, 'tcx> Lift<'tcx> for traits::Vtable<'a, ()> {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
traits::VtableFnPointer(ty) => {
|
traits::VtableFnPointer(traits::VtableFnPointerData { fn_ty, nested }) => {
|
||||||
tcx.lift(&ty).map(traits::VtableFnPointer)
|
tcx.lift(&fn_ty).map(|fn_ty| {
|
||||||
|
traits::VtableFnPointer(traits::VtableFnPointerData {
|
||||||
|
fn_ty: fn_ty,
|
||||||
|
nested: nested,
|
||||||
|
})
|
||||||
|
})
|
||||||
}
|
}
|
||||||
traits::VtableParam(n) => Some(traits::VtableParam(n)),
|
traits::VtableParam(n) => Some(traits::VtableParam(n)),
|
||||||
traits::VtableBuiltin(d) => Some(traits::VtableBuiltin(d)),
|
traits::VtableBuiltin(d) => Some(traits::VtableBuiltin(d)),
|
||||||
traits::VtableObject(traits::VtableObjectData {
|
traits::VtableObject(traits::VtableObjectData {
|
||||||
upcast_trait_ref,
|
upcast_trait_ref,
|
||||||
vtable_base
|
vtable_base,
|
||||||
|
nested
|
||||||
}) => {
|
}) => {
|
||||||
tcx.lift(&upcast_trait_ref).map(|trait_ref| {
|
tcx.lift(&upcast_trait_ref).map(|trait_ref| {
|
||||||
traits::VtableObject(traits::VtableObjectData {
|
traits::VtableObject(traits::VtableObjectData {
|
||||||
upcast_trait_ref: trait_ref,
|
upcast_trait_ref: trait_ref,
|
||||||
vtable_base: vtable_base
|
vtable_base: vtable_base,
|
||||||
|
nested: nested
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -276,16 +292,30 @@ impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::VtableBuiltinDa
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> TypeFoldable<'tcx> for traits::VtableObjectData<'tcx> {
|
impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::VtableObjectData<'tcx, N> {
|
||||||
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
|
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
|
||||||
traits::VtableObjectData {
|
traits::VtableObjectData {
|
||||||
upcast_trait_ref: self.upcast_trait_ref.fold_with(folder),
|
upcast_trait_ref: self.upcast_trait_ref.fold_with(folder),
|
||||||
vtable_base: self.vtable_base
|
vtable_base: self.vtable_base,
|
||||||
|
nested: self.nested.fold_with(folder),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
|
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
|
||||||
self.upcast_trait_ref.visit_with(visitor)
|
self.upcast_trait_ref.visit_with(visitor) || self.nested.visit_with(visitor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::VtableFnPointerData<'tcx, N> {
|
||||||
|
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
|
||||||
|
traits::VtableFnPointerData {
|
||||||
|
fn_ty: self.fn_ty.fold_with(folder),
|
||||||
|
nested: self.nested.fold_with(folder),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
|
||||||
|
self.fn_ty.visit_with(visitor) || self.nested.visit_with(visitor)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -473,9 +473,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||||
/// Given an upcast trait object described by `object`, returns the
|
/// Given an upcast trait object described by `object`, returns the
|
||||||
/// index of the method `method_def_id` (which should be part of
|
/// index of the method `method_def_id` (which should be part of
|
||||||
/// `object.upcast_trait_ref`) within the vtable for `object`.
|
/// `object.upcast_trait_ref`) within the vtable for `object`.
|
||||||
pub fn get_vtable_index_of_object_method(self,
|
pub fn get_vtable_index_of_object_method<N>(self,
|
||||||
object: &super::VtableObjectData<'tcx>,
|
object: &super::VtableObjectData<'tcx, N>,
|
||||||
method_def_id: DefId) -> usize {
|
method_def_id: DefId) -> usize {
|
||||||
// Count number of methods preceding the one we are selecting and
|
// Count number of methods preceding the one we are selecting and
|
||||||
// add them to the total offset.
|
// add them to the total offset.
|
||||||
// Skip over associated types and constants.
|
// Skip over associated types and constants.
|
||||||
|
|
|
@ -1260,7 +1260,7 @@ impl<'a, 'tcx> ParameterEnvironment<'tcx> {
|
||||||
match tcx.map.find(id) {
|
match tcx.map.find(id) {
|
||||||
Some(ast_map::NodeImplItem(ref impl_item)) => {
|
Some(ast_map::NodeImplItem(ref impl_item)) => {
|
||||||
match impl_item.node {
|
match impl_item.node {
|
||||||
hir::ImplItemKind::Type(_) => {
|
hir::ImplItemKind::Type(_) | hir::ImplItemKind::Const(_, _) => {
|
||||||
// associated types don't have their own entry (for some reason),
|
// associated types don't have their own entry (for some reason),
|
||||||
// so for now just grab environment for the impl
|
// so for now just grab environment for the impl
|
||||||
let impl_id = tcx.map.get_parent(id);
|
let impl_id = tcx.map.get_parent(id);
|
||||||
|
@ -1272,15 +1272,6 @@ impl<'a, 'tcx> ParameterEnvironment<'tcx> {
|
||||||
&predicates,
|
&predicates,
|
||||||
tcx.region_maps.item_extent(id))
|
tcx.region_maps.item_extent(id))
|
||||||
}
|
}
|
||||||
hir::ImplItemKind::Const(_, _) => {
|
|
||||||
let def_id = tcx.map.local_def_id(id);
|
|
||||||
let scheme = tcx.lookup_item_type(def_id);
|
|
||||||
let predicates = tcx.lookup_predicates(def_id);
|
|
||||||
tcx.construct_parameter_environment(impl_item.span,
|
|
||||||
&scheme.generics,
|
|
||||||
&predicates,
|
|
||||||
tcx.region_maps.item_extent(id))
|
|
||||||
}
|
|
||||||
hir::ImplItemKind::Method(_, ref body) => {
|
hir::ImplItemKind::Method(_, ref body) => {
|
||||||
let method_def_id = tcx.map.local_def_id(id);
|
let method_def_id = tcx.map.local_def_id(id);
|
||||||
match tcx.impl_or_trait_item(method_def_id) {
|
match tcx.impl_or_trait_item(method_def_id) {
|
||||||
|
@ -1303,7 +1294,7 @@ impl<'a, 'tcx> ParameterEnvironment<'tcx> {
|
||||||
}
|
}
|
||||||
Some(ast_map::NodeTraitItem(trait_item)) => {
|
Some(ast_map::NodeTraitItem(trait_item)) => {
|
||||||
match trait_item.node {
|
match trait_item.node {
|
||||||
hir::TypeTraitItem(..) => {
|
hir::TypeTraitItem(..) | hir::ConstTraitItem(..) => {
|
||||||
// associated types don't have their own entry (for some reason),
|
// associated types don't have their own entry (for some reason),
|
||||||
// so for now just grab environment for the trait
|
// so for now just grab environment for the trait
|
||||||
let trait_id = tcx.map.get_parent(id);
|
let trait_id = tcx.map.get_parent(id);
|
||||||
|
@ -1315,15 +1306,6 @@ impl<'a, 'tcx> ParameterEnvironment<'tcx> {
|
||||||
&predicates,
|
&predicates,
|
||||||
tcx.region_maps.item_extent(id))
|
tcx.region_maps.item_extent(id))
|
||||||
}
|
}
|
||||||
hir::ConstTraitItem(..) => {
|
|
||||||
let def_id = tcx.map.local_def_id(id);
|
|
||||||
let scheme = tcx.lookup_item_type(def_id);
|
|
||||||
let predicates = tcx.lookup_predicates(def_id);
|
|
||||||
tcx.construct_parameter_environment(trait_item.span,
|
|
||||||
&scheme.generics,
|
|
||||||
&predicates,
|
|
||||||
tcx.region_maps.item_extent(id))
|
|
||||||
}
|
|
||||||
hir::MethodTraitItem(_, ref body) => {
|
hir::MethodTraitItem(_, ref body) => {
|
||||||
// Use call-site for extent (unless this is a
|
// Use call-site for extent (unless this is a
|
||||||
// trait method with no default; then fallback
|
// trait method with no default; then fallback
|
||||||
|
|
|
@ -213,3 +213,11 @@ impl<D: SnapshotVecDelegate> ops::IndexMut<usize> for SnapshotVec<D> {
|
||||||
self.get_mut(index)
|
self.get_mut(index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<D: SnapshotVecDelegate> Extend<D::Value> for SnapshotVec<D> {
|
||||||
|
fn extend<T>(&mut self, iterable: T) where T: IntoIterator<Item=D::Value> {
|
||||||
|
for item in iterable {
|
||||||
|
self.push(item);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1036,7 +1036,7 @@ pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
passes.push_pass(box mir::transform::no_landing_pads::NoLandingPads);
|
passes.push_pass(box mir::transform::no_landing_pads::NoLandingPads);
|
||||||
passes.push_pass(box mir::transform::remove_dead_blocks::RemoveDeadBlocks);
|
passes.push_pass(box mir::transform::remove_dead_blocks::RemoveDeadBlocks);
|
||||||
passes.push_pass(box mir::transform::erase_regions::EraseRegions);
|
passes.push_pass(box mir::transform::erase_regions::EraseRegions);
|
||||||
passes.push_pass(box mir::transform::break_critical_edges::BreakCriticalEdges);
|
passes.push_pass(box mir::transform::break_cleanup_edges::BreakCleanupEdges);
|
||||||
passes.run_passes(tcx, &mut mir_map);
|
passes.run_passes(tcx, &mut mir_map);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
111
src/librustc_mir/transform/break_cleanup_edges.rs
Normal file
111
src/librustc_mir/transform/break_cleanup_edges.rs
Normal file
|
@ -0,0 +1,111 @@
|
||||||
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
use rustc::ty::TyCtxt;
|
||||||
|
use rustc::mir::repr::*;
|
||||||
|
use rustc::mir::transform::{MirPass, MirSource, Pass};
|
||||||
|
|
||||||
|
use rustc_data_structures::bitvec::BitVector;
|
||||||
|
|
||||||
|
use pretty;
|
||||||
|
|
||||||
|
use traversal;
|
||||||
|
|
||||||
|
pub struct BreakCleanupEdges;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Breaks outgoing critical edges for call terminators in the MIR.
|
||||||
|
*
|
||||||
|
* Critical edges are edges that are neither the only edge leaving a
|
||||||
|
* block, nor the only edge entering one.
|
||||||
|
*
|
||||||
|
* When you want something to happen "along" an edge, you can either
|
||||||
|
* do at the end of the predecessor block, or at the start of the
|
||||||
|
* successor block. Critical edges have to be broken in order to prevent
|
||||||
|
* "edge actions" from affecting other edges. We need this for calls that are
|
||||||
|
* translated to LLVM invoke instructions, because invoke is a block terminator
|
||||||
|
* in LLVM so we can't insert any code to handle the call's result into the
|
||||||
|
* block that performs the call.
|
||||||
|
*
|
||||||
|
* This function will break those edges by inserting new blocks along them.
|
||||||
|
*
|
||||||
|
* NOTE: Simplify CFG will happily undo most of the work this pass does.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
impl<'tcx> MirPass<'tcx> for BreakCleanupEdges {
|
||||||
|
fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource, mir: &mut Mir<'tcx>) {
|
||||||
|
let mut pred_count = vec![0u32; mir.basic_blocks.len()];
|
||||||
|
|
||||||
|
// Build the precedecessor map for the MIR
|
||||||
|
for (_, data) in traversal::preorder(mir) {
|
||||||
|
if let Some(ref term) = data.terminator {
|
||||||
|
for &tgt in term.successors().iter() {
|
||||||
|
pred_count[tgt.index()] += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let cleanup_map : BitVector = mir.basic_blocks
|
||||||
|
.iter().map(|bb| bb.is_cleanup).collect();
|
||||||
|
|
||||||
|
// We need a place to store the new blocks generated
|
||||||
|
let mut new_blocks = Vec::new();
|
||||||
|
|
||||||
|
let bbs = mir.all_basic_blocks();
|
||||||
|
let cur_len = mir.basic_blocks.len();
|
||||||
|
|
||||||
|
for &bb in &bbs {
|
||||||
|
let data = mir.basic_block_data_mut(bb);
|
||||||
|
|
||||||
|
if let Some(ref mut term) = data.terminator {
|
||||||
|
if term_is_invoke(term) {
|
||||||
|
let term_span = term.span;
|
||||||
|
let term_scope = term.scope;
|
||||||
|
let succs = term.successors_mut();
|
||||||
|
for tgt in succs {
|
||||||
|
let num_preds = pred_count[tgt.index()];
|
||||||
|
if num_preds > 1 {
|
||||||
|
// It's a critical edge, break it
|
||||||
|
let goto = Terminator {
|
||||||
|
span: term_span,
|
||||||
|
scope: term_scope,
|
||||||
|
kind: TerminatorKind::Goto { target: *tgt }
|
||||||
|
};
|
||||||
|
let mut data = BasicBlockData::new(Some(goto));
|
||||||
|
data.is_cleanup = cleanup_map.contains(tgt.index());
|
||||||
|
|
||||||
|
// Get the index it will be when inserted into the MIR
|
||||||
|
let idx = cur_len + new_blocks.len();
|
||||||
|
new_blocks.push(data);
|
||||||
|
*tgt = BasicBlock::new(idx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pretty::dump_mir(tcx, "break_cleanup_edges", &0, src, mir, None);
|
||||||
|
debug!("Broke {} N edges", new_blocks.len());
|
||||||
|
|
||||||
|
mir.basic_blocks.extend_from_slice(&new_blocks);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Pass for BreakCleanupEdges {}
|
||||||
|
|
||||||
|
// Returns true if the terminator is a call that would use an invoke in LLVM.
|
||||||
|
fn term_is_invoke(term: &Terminator) -> bool {
|
||||||
|
match term.kind {
|
||||||
|
TerminatorKind::Call { cleanup: Some(_), .. } |
|
||||||
|
TerminatorKind::Drop { unwind: Some(_), .. } => true,
|
||||||
|
_ => false
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,117 +0,0 @@
|
||||||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
|
||||||
// file at the top-level directory of this distribution and at
|
|
||||||
// http://rust-lang.org/COPYRIGHT.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
||||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
||||||
// option. This file may not be copied, modified, or distributed
|
|
||||||
// except according to those terms.
|
|
||||||
|
|
||||||
use rustc::ty::TyCtxt;
|
|
||||||
use rustc::mir::repr::*;
|
|
||||||
use rustc::mir::transform::{MirPass, MirSource, Pass};
|
|
||||||
|
|
||||||
use rustc_data_structures::bitvec::BitVector;
|
|
||||||
|
|
||||||
use traversal;
|
|
||||||
|
|
||||||
pub struct BreakCriticalEdges;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Breaks critical edges in the MIR.
|
|
||||||
*
|
|
||||||
* Critical edges are edges that are neither the only edge leaving a
|
|
||||||
* block, nor the only edge entering one.
|
|
||||||
*
|
|
||||||
* When you want something to happen "along" an edge, you can either
|
|
||||||
* do at the end of the predecessor block, or at the start of the
|
|
||||||
* successor block. Critical edges have to be broken in order to prevent
|
|
||||||
* "edge actions" from affecting other edges.
|
|
||||||
*
|
|
||||||
* This function will break those edges by inserting new blocks along them.
|
|
||||||
*
|
|
||||||
* A special case is Drop and Call terminators with unwind/cleanup successors,
|
|
||||||
* They use `invoke` in LLVM, which terminates a block, meaning that code cannot
|
|
||||||
* be inserted after them, so even if an edge is the only edge leaving a block
|
|
||||||
* like that, we still insert blocks if the edge is one of many entering the
|
|
||||||
* target.
|
|
||||||
*
|
|
||||||
* NOTE: Simplify CFG will happily undo most of the work this pass does.
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
|
|
||||||
impl<'tcx> MirPass<'tcx> for BreakCriticalEdges {
|
|
||||||
fn run_pass<'a>(&mut self, _: TyCtxt<'a, 'tcx, 'tcx>,
|
|
||||||
_: MirSource, mir: &mut Mir<'tcx>) {
|
|
||||||
break_critical_edges(mir);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Pass for BreakCriticalEdges {}
|
|
||||||
|
|
||||||
fn break_critical_edges(mir: &mut Mir) {
|
|
||||||
let mut pred_count = vec![0u32; mir.basic_blocks.len()];
|
|
||||||
|
|
||||||
// Build the precedecessor map for the MIR
|
|
||||||
for (_, data) in traversal::preorder(mir) {
|
|
||||||
if let Some(ref term) = data.terminator {
|
|
||||||
for &tgt in term.successors().iter() {
|
|
||||||
pred_count[tgt.index()] += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let cleanup_map : BitVector = mir.basic_blocks
|
|
||||||
.iter().map(|bb| bb.is_cleanup).collect();
|
|
||||||
|
|
||||||
// We need a place to store the new blocks generated
|
|
||||||
let mut new_blocks = Vec::new();
|
|
||||||
|
|
||||||
let bbs = mir.all_basic_blocks();
|
|
||||||
let cur_len = mir.basic_blocks.len();
|
|
||||||
|
|
||||||
for &bb in &bbs {
|
|
||||||
let data = mir.basic_block_data_mut(bb);
|
|
||||||
|
|
||||||
if let Some(ref mut term) = data.terminator {
|
|
||||||
let is_invoke = term_is_invoke(term);
|
|
||||||
let term_span = term.span;
|
|
||||||
let term_scope = term.scope;
|
|
||||||
let succs = term.successors_mut();
|
|
||||||
if succs.len() > 1 || (succs.len() > 0 && is_invoke) {
|
|
||||||
for tgt in succs {
|
|
||||||
let num_preds = pred_count[tgt.index()];
|
|
||||||
if num_preds > 1 {
|
|
||||||
// It's a critical edge, break it
|
|
||||||
let goto = Terminator {
|
|
||||||
span: term_span,
|
|
||||||
scope: term_scope,
|
|
||||||
kind: TerminatorKind::Goto { target: *tgt }
|
|
||||||
};
|
|
||||||
let mut data = BasicBlockData::new(Some(goto));
|
|
||||||
data.is_cleanup = cleanup_map.contains(tgt.index());
|
|
||||||
|
|
||||||
// Get the index it will be when inserted into the MIR
|
|
||||||
let idx = cur_len + new_blocks.len();
|
|
||||||
new_blocks.push(data);
|
|
||||||
*tgt = BasicBlock::new(idx);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
debug!("Broke {} N edges", new_blocks.len());
|
|
||||||
|
|
||||||
mir.basic_blocks.extend_from_slice(&new_blocks);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns true if the terminator would use an invoke in LLVM.
|
|
||||||
fn term_is_invoke(term: &Terminator) -> bool {
|
|
||||||
match term.kind {
|
|
||||||
TerminatorKind::Call { cleanup: Some(_), .. } |
|
|
||||||
TerminatorKind::Drop { unwind: Some(_), .. } => true,
|
|
||||||
_ => false
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -13,6 +13,6 @@ pub mod simplify_cfg;
|
||||||
pub mod erase_regions;
|
pub mod erase_regions;
|
||||||
pub mod no_landing_pads;
|
pub mod no_landing_pads;
|
||||||
pub mod type_check;
|
pub mod type_check;
|
||||||
pub mod break_critical_edges;
|
pub mod break_cleanup_edges;
|
||||||
pub mod promote_consts;
|
pub mod promote_consts;
|
||||||
pub mod qualify_consts;
|
pub mod qualify_consts;
|
||||||
|
|
|
@ -8,73 +8,155 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
|
use rustc_data_structures::bitvec::BitVector;
|
||||||
use rustc::middle::const_val::ConstVal;
|
use rustc::middle::const_val::ConstVal;
|
||||||
use rustc::ty::TyCtxt;
|
use rustc::ty::TyCtxt;
|
||||||
use rustc::mir::repr::*;
|
use rustc::mir::repr::*;
|
||||||
use rustc::mir::transform::{MirPass, MirSource, Pass};
|
use rustc::mir::transform::{MirPass, MirSource, Pass};
|
||||||
use pretty;
|
use pretty;
|
||||||
|
use std::mem;
|
||||||
|
|
||||||
use super::remove_dead_blocks::RemoveDeadBlocks;
|
use super::remove_dead_blocks::RemoveDeadBlocks;
|
||||||
|
|
||||||
|
use traversal;
|
||||||
|
|
||||||
pub struct SimplifyCfg;
|
pub struct SimplifyCfg;
|
||||||
|
|
||||||
impl SimplifyCfg {
|
impl SimplifyCfg {
|
||||||
pub fn new() -> SimplifyCfg {
|
pub fn new() -> SimplifyCfg {
|
||||||
SimplifyCfg
|
SimplifyCfg
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn remove_goto_chains(&self, mir: &mut Mir) -> bool {
|
impl<'tcx> MirPass<'tcx> for SimplifyCfg {
|
||||||
// Find the target at the end of the jump chain, return None if there is a loop
|
fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource, mir: &mut Mir<'tcx>) {
|
||||||
fn final_target(mir: &Mir, mut target: BasicBlock) -> Option<BasicBlock> {
|
simplify_branches(mir);
|
||||||
// Keep track of already seen blocks to detect loops
|
RemoveDeadBlocks.run_pass(tcx, src, mir);
|
||||||
let mut seen: Vec<BasicBlock> = Vec::with_capacity(8);
|
merge_consecutive_blocks(mir);
|
||||||
|
RemoveDeadBlocks.run_pass(tcx, src, mir);
|
||||||
|
pretty::dump_mir(tcx, "simplify_cfg", &0, src, mir, None);
|
||||||
|
|
||||||
while mir.basic_block_data(target).statements.is_empty() {
|
// FIXME: Should probably be moved into some kind of pass manager
|
||||||
// NB -- terminator may have been swapped with `None`
|
mir.basic_blocks.shrink_to_fit();
|
||||||
// below, in which case we have a cycle and just want
|
}
|
||||||
// to stop
|
}
|
||||||
if let Some(ref terminator) = mir.basic_block_data(target).terminator {
|
|
||||||
match terminator.kind {
|
impl Pass for SimplifyCfg {}
|
||||||
TerminatorKind::Goto { target: next } => {
|
|
||||||
if seen.contains(&next) {
|
fn merge_consecutive_blocks(mir: &mut Mir) {
|
||||||
return None;
|
// Build the precedecessor map for the MIR
|
||||||
}
|
let mut pred_count = vec![0u32; mir.basic_blocks.len()];
|
||||||
seen.push(next);
|
for (_, data) in traversal::preorder(mir) {
|
||||||
target = next;
|
if let Some(ref term) = data.terminator {
|
||||||
}
|
for &tgt in term.successors().iter() {
|
||||||
_ => break
|
pred_count[tgt.index()] += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let mut changed = false;
|
||||||
|
let mut seen = BitVector::new(mir.basic_blocks.len());
|
||||||
|
let mut worklist = vec![START_BLOCK];
|
||||||
|
while let Some(bb) = worklist.pop() {
|
||||||
|
// Temporarily take ownership of the terminator we're modifying to keep borrowck happy
|
||||||
|
let mut terminator = mir.basic_block_data_mut(bb).terminator.take()
|
||||||
|
.expect("invalid terminator state");
|
||||||
|
|
||||||
|
// See if we can merge the target block into this one
|
||||||
|
loop {
|
||||||
|
let mut inner_change = false;
|
||||||
|
|
||||||
|
if let TerminatorKind::Goto { target } = terminator.kind {
|
||||||
|
// Don't bother trying to merge a block into itself
|
||||||
|
if target == bb {
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
break
|
let num_insts = mir.basic_block_data(target).statements.len();
|
||||||
|
match mir.basic_block_data(target).terminator().kind {
|
||||||
|
TerminatorKind::Goto { target: new_target } if num_insts == 0 => {
|
||||||
|
inner_change = true;
|
||||||
|
terminator.kind = TerminatorKind::Goto { target: new_target };
|
||||||
|
pred_count[target.index()] -= 1;
|
||||||
|
pred_count[new_target.index()] += 1;
|
||||||
|
}
|
||||||
|
_ if pred_count[target.index()] == 1 => {
|
||||||
|
inner_change = true;
|
||||||
|
let mut stmts = Vec::new();
|
||||||
|
{
|
||||||
|
let target_data = mir.basic_block_data_mut(target);
|
||||||
|
mem::swap(&mut stmts, &mut target_data.statements);
|
||||||
|
mem::swap(&mut terminator, target_data.terminator_mut());
|
||||||
|
}
|
||||||
|
|
||||||
|
mir.basic_block_data_mut(bb).statements.append(&mut stmts);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
for target in terminator.successors_mut() {
|
||||||
|
let new_target = match final_target(mir, *target) {
|
||||||
|
Some(new_target) => new_target,
|
||||||
|
None if mir.basic_block_data(bb).statements.is_empty() => bb,
|
||||||
|
None => continue
|
||||||
|
};
|
||||||
|
if *target != new_target {
|
||||||
|
inner_change = true;
|
||||||
|
pred_count[target.index()] -= 1;
|
||||||
|
pred_count[new_target.index()] += 1;
|
||||||
|
*target = new_target;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
changed |= inner_change;
|
||||||
|
if !inner_change {
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(target)
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut changed = false;
|
|
||||||
for bb in mir.all_basic_blocks() {
|
|
||||||
// Temporarily take ownership of the terminator we're modifying to keep borrowck happy
|
|
||||||
let mut terminator = mir.basic_block_data_mut(bb).terminator.take()
|
|
||||||
.expect("invalid terminator state");
|
|
||||||
|
|
||||||
debug!("remove_goto_chains: bb={:?} terminator={:?}", bb, terminator);
|
|
||||||
|
|
||||||
for target in terminator.successors_mut() {
|
|
||||||
let new_target = match final_target(mir, *target) {
|
|
||||||
Some(new_target) => new_target,
|
|
||||||
None if mir.basic_block_data(bb).statements.is_empty() => bb,
|
|
||||||
None => continue
|
|
||||||
};
|
|
||||||
changed |= *target != new_target;
|
|
||||||
*target = new_target;
|
|
||||||
}
|
|
||||||
mir.basic_block_data_mut(bb).terminator = Some(terminator);
|
mir.basic_block_data_mut(bb).terminator = Some(terminator);
|
||||||
|
|
||||||
|
for succ in mir.basic_block_data(bb).terminator().successors().iter() {
|
||||||
|
if seen.insert(succ.index()) {
|
||||||
|
worklist.push(*succ);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !changed {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the target at the end of the jump chain, return None if there is a loop
|
||||||
|
fn final_target(mir: &Mir, mut target: BasicBlock) -> Option<BasicBlock> {
|
||||||
|
// Keep track of already seen blocks to detect loops
|
||||||
|
let mut seen: Vec<BasicBlock> = Vec::with_capacity(8);
|
||||||
|
|
||||||
|
while mir.basic_block_data(target).statements.is_empty() {
|
||||||
|
// NB -- terminator may have been swapped with `None` in
|
||||||
|
// merge_consecutive_blocks, in which case we have a cycle and just want
|
||||||
|
// to stop
|
||||||
|
match mir.basic_block_data(target).terminator {
|
||||||
|
Some(Terminator { kind: TerminatorKind::Goto { target: next }, .. }) => {
|
||||||
|
if seen.contains(&next) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
seen.push(next);
|
||||||
|
target = next;
|
||||||
|
}
|
||||||
|
_ => break
|
||||||
}
|
}
|
||||||
changed
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn simplify_branches(&self, mir: &mut Mir) -> bool {
|
Some(target)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn simplify_branches(mir: &mut Mir) {
|
||||||
|
loop {
|
||||||
let mut changed = false;
|
let mut changed = false;
|
||||||
|
|
||||||
for bb in mir.all_basic_blocks() {
|
for bb in mir.all_basic_blocks() {
|
||||||
|
@ -106,25 +188,8 @@ impl SimplifyCfg {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
changed
|
if !changed {
|
||||||
}
|
break;
|
||||||
}
|
|
||||||
|
|
||||||
impl<'tcx> MirPass<'tcx> for SimplifyCfg {
|
|
||||||
fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
||||||
src: MirSource, mir: &mut Mir<'tcx>) {
|
|
||||||
let mut counter = 0;
|
|
||||||
let mut changed = true;
|
|
||||||
while changed {
|
|
||||||
pretty::dump_mir(tcx, "simplify_cfg", &counter, src, mir, None);
|
|
||||||
counter += 1;
|
|
||||||
changed = self.simplify_branches(mir);
|
|
||||||
changed |= self.remove_goto_chains(mir);
|
|
||||||
RemoveDeadBlocks.run_pass(tcx, src, mir);
|
|
||||||
}
|
}
|
||||||
// FIXME: Should probably be moved into some kind of pass manager
|
|
||||||
mir.basic_blocks.shrink_to_fit();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Pass for SimplifyCfg {}
|
|
||||||
|
|
|
@ -1949,9 +1949,7 @@ impl<'a> Resolver<'a> {
|
||||||
this.check_trait_item(impl_item.ident.name,
|
this.check_trait_item(impl_item.ident.name,
|
||||||
impl_item.span,
|
impl_item.span,
|
||||||
|n, s| ResolutionError::ConstNotMemberOfTrait(n, s));
|
|n, s| ResolutionError::ConstNotMemberOfTrait(n, s));
|
||||||
this.with_constant_rib(|this| {
|
visit::walk_impl_item(this, impl_item);
|
||||||
visit::walk_impl_item(this, impl_item);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
ImplItemKind::Method(ref sig, _) => {
|
ImplItemKind::Method(ref sig, _) => {
|
||||||
// If this is a trait impl, ensure the method
|
// If this is a trait impl, ensure the method
|
||||||
|
|
|
@ -59,7 +59,7 @@ use callee::{Callee, CallArgs, ArgExprs, ArgVals};
|
||||||
use cleanup::{self, CleanupMethods, DropHint};
|
use cleanup::{self, CleanupMethods, DropHint};
|
||||||
use closure;
|
use closure;
|
||||||
use common::{Block, C_bool, C_bytes_in_context, C_i32, C_int, C_uint, C_integral};
|
use common::{Block, C_bool, C_bytes_in_context, C_i32, C_int, C_uint, C_integral};
|
||||||
use collector::{self, TransItem, TransItemState, TransItemCollectionMode};
|
use collector::{self, TransItemState, TransItemCollectionMode};
|
||||||
use common::{C_null, C_struct_in_context, C_u64, C_u8, C_undef};
|
use common::{C_null, C_struct_in_context, C_u64, C_u8, C_undef};
|
||||||
use common::{CrateContext, DropFlagHintsMap, Field, FunctionContext};
|
use common::{CrateContext, DropFlagHintsMap, Field, FunctionContext};
|
||||||
use common::{Result, NodeIdAndSpan, VariantInfo};
|
use common::{Result, NodeIdAndSpan, VariantInfo};
|
||||||
|
@ -80,8 +80,9 @@ use machine::{llalign_of_min, llsize_of, llsize_of_real};
|
||||||
use meth;
|
use meth;
|
||||||
use mir;
|
use mir;
|
||||||
use monomorphize::{self, Instance};
|
use monomorphize::{self, Instance};
|
||||||
use partitioning::{self, PartitioningStrategy, InstantiationMode, CodegenUnit};
|
use partitioning::{self, PartitioningStrategy, CodegenUnit};
|
||||||
use symbol_names_test;
|
use symbol_names_test;
|
||||||
|
use trans_item::TransItem;
|
||||||
use tvec;
|
use tvec;
|
||||||
use type_::Type;
|
use type_::Type;
|
||||||
use type_of;
|
use type_of;
|
||||||
|
@ -2941,8 +2942,8 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a
|
||||||
None => TransItemCollectionMode::Lazy
|
None => TransItemCollectionMode::Lazy
|
||||||
};
|
};
|
||||||
|
|
||||||
let (items, reference_map) = time(time_passes, "translation item collection", || {
|
let (items, inlining_map) = time(time_passes, "translation item collection", || {
|
||||||
collector::collect_crate_translation_items(scx, collection_mode)
|
collector::collect_crate_translation_items(&scx, collection_mode)
|
||||||
});
|
});
|
||||||
|
|
||||||
let strategy = if scx.sess().opts.debugging_opts.incremental.is_some() {
|
let strategy = if scx.sess().opts.debugging_opts.incremental.is_some() {
|
||||||
|
@ -2955,7 +2956,7 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a
|
||||||
partitioning::partition(scx.tcx(),
|
partitioning::partition(scx.tcx(),
|
||||||
items.iter().cloned(),
|
items.iter().cloned(),
|
||||||
strategy,
|
strategy,
|
||||||
&reference_map)
|
&inlining_map)
|
||||||
});
|
});
|
||||||
|
|
||||||
if scx.sess().opts.debugging_opts.print_trans_items.is_some() {
|
if scx.sess().opts.debugging_opts.print_trans_items.is_some() {
|
||||||
|
@ -2983,18 +2984,17 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a
|
||||||
output.push_str(&cgu_name[..]);
|
output.push_str(&cgu_name[..]);
|
||||||
|
|
||||||
let linkage_abbrev = match linkage {
|
let linkage_abbrev = match linkage {
|
||||||
InstantiationMode::Def(llvm::ExternalLinkage) => "External",
|
llvm::ExternalLinkage => "External",
|
||||||
InstantiationMode::Def(llvm::AvailableExternallyLinkage) => "Available",
|
llvm::AvailableExternallyLinkage => "Available",
|
||||||
InstantiationMode::Def(llvm::LinkOnceAnyLinkage) => "OnceAny",
|
llvm::LinkOnceAnyLinkage => "OnceAny",
|
||||||
InstantiationMode::Def(llvm::LinkOnceODRLinkage) => "OnceODR",
|
llvm::LinkOnceODRLinkage => "OnceODR",
|
||||||
InstantiationMode::Def(llvm::WeakAnyLinkage) => "WeakAny",
|
llvm::WeakAnyLinkage => "WeakAny",
|
||||||
InstantiationMode::Def(llvm::WeakODRLinkage) => "WeakODR",
|
llvm::WeakODRLinkage => "WeakODR",
|
||||||
InstantiationMode::Def(llvm::AppendingLinkage) => "Appending",
|
llvm::AppendingLinkage => "Appending",
|
||||||
InstantiationMode::Def(llvm::InternalLinkage) => "Internal",
|
llvm::InternalLinkage => "Internal",
|
||||||
InstantiationMode::Def(llvm::PrivateLinkage) => "Private",
|
llvm::PrivateLinkage => "Private",
|
||||||
InstantiationMode::Def(llvm::ExternalWeakLinkage) => "ExternalWeak",
|
llvm::ExternalWeakLinkage => "ExternalWeak",
|
||||||
InstantiationMode::Def(llvm::CommonLinkage) => "Common",
|
llvm::CommonLinkage => "Common",
|
||||||
InstantiationMode::Decl => "Declaration",
|
|
||||||
};
|
};
|
||||||
|
|
||||||
output.push_str("[");
|
output.push_str("[");
|
||||||
|
|
|
@ -188,9 +188,9 @@ impl<'tcx> Callee<'tcx> {
|
||||||
};
|
};
|
||||||
Callee::ptr(immediate_rvalue(llfn, fn_ptr_ty))
|
Callee::ptr(immediate_rvalue(llfn, fn_ptr_ty))
|
||||||
}
|
}
|
||||||
traits::VtableFnPointer(fn_ty) => {
|
traits::VtableFnPointer(vtable_fn_pointer) => {
|
||||||
let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_id).unwrap();
|
let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_id).unwrap();
|
||||||
let llfn = trans_fn_pointer_shim(ccx, trait_closure_kind, fn_ty);
|
let llfn = trans_fn_pointer_shim(ccx, trait_closure_kind, vtable_fn_pointer.fn_ty);
|
||||||
|
|
||||||
let method_ty = def_ty(tcx, def_id, substs);
|
let method_ty = def_ty(tcx, def_id, substs);
|
||||||
let fn_ptr_ty = match method_ty.sty {
|
let fn_ptr_ty = match method_ty.sty {
|
||||||
|
@ -494,7 +494,7 @@ fn get_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||||
_ => bug!("expected fn item type, found {}", ty)
|
_ => bug!("expected fn item type, found {}", ty)
|
||||||
};
|
};
|
||||||
|
|
||||||
let instance = Instance::mono(ccx.tcx(), def_id);
|
let instance = Instance::mono(ccx.shared(), def_id);
|
||||||
if let Some(&llfn) = ccx.instances().borrow().get(&instance) {
|
if let Some(&llfn) = ccx.instances().borrow().get(&instance) {
|
||||||
return immediate_rvalue(llfn, fn_ptr_ty);
|
return immediate_rvalue(llfn, fn_ptr_ty);
|
||||||
}
|
}
|
||||||
|
|
|
@ -188,8 +188,6 @@
|
||||||
//! this is not implemented however: a translation item will be produced
|
//! this is not implemented however: a translation item will be produced
|
||||||
//! regardless of whether it is actually needed or not.
|
//! regardless of whether it is actually needed or not.
|
||||||
|
|
||||||
use rustc_data_structures::bitvec::BitVector;
|
|
||||||
|
|
||||||
use rustc::hir;
|
use rustc::hir;
|
||||||
use rustc::hir::intravisit as hir_visit;
|
use rustc::hir::intravisit as hir_visit;
|
||||||
|
|
||||||
|
@ -198,27 +196,24 @@ use rustc::hir::def_id::DefId;
|
||||||
use rustc::middle::lang_items::{ExchangeFreeFnLangItem, ExchangeMallocFnLangItem};
|
use rustc::middle::lang_items::{ExchangeFreeFnLangItem, ExchangeMallocFnLangItem};
|
||||||
use rustc::traits;
|
use rustc::traits;
|
||||||
use rustc::ty::subst::{self, Substs, Subst};
|
use rustc::ty::subst::{self, Substs, Subst};
|
||||||
use rustc::ty::{self, Ty, TypeFoldable, TyCtxt};
|
use rustc::ty::{self, TypeFoldable, TyCtxt};
|
||||||
use rustc::ty::adjustment::CustomCoerceUnsized;
|
use rustc::ty::adjustment::CustomCoerceUnsized;
|
||||||
use rustc::mir::repr as mir;
|
use rustc::mir::repr as mir;
|
||||||
use rustc::mir::visit as mir_visit;
|
use rustc::mir::visit as mir_visit;
|
||||||
use rustc::mir::visit::Visitor as MirVisitor;
|
use rustc::mir::visit::Visitor as MirVisitor;
|
||||||
|
|
||||||
use syntax::ast::{self, NodeId};
|
use syntax::abi::Abi;
|
||||||
use syntax::codemap::DUMMY_SP;
|
use syntax::codemap::DUMMY_SP;
|
||||||
use syntax::{attr, errors};
|
use syntax::errors;
|
||||||
use syntax::parse::token;
|
use base::custom_coerce_unsize_info;
|
||||||
|
|
||||||
use base::{custom_coerce_unsize_info, llvm_linkage_by_name};
|
|
||||||
use context::SharedCrateContext;
|
use context::SharedCrateContext;
|
||||||
use common::{fulfill_obligation, normalize_and_test_predicates, type_is_sized};
|
use common::{fulfill_obligation, normalize_and_test_predicates, type_is_sized};
|
||||||
use glue::{self, DropGlueKind};
|
use glue::{self, DropGlueKind};
|
||||||
use llvm;
|
|
||||||
use meth;
|
use meth;
|
||||||
use monomorphize::{self, Instance};
|
use monomorphize::{self, Instance};
|
||||||
use util::nodemap::{FnvHashSet, FnvHashMap, DefIdMap};
|
use util::nodemap::{FnvHashSet, FnvHashMap, DefIdMap};
|
||||||
|
|
||||||
use std::hash::{Hash, Hasher};
|
use trans_item::{TransItem, type_to_string, def_id_to_string};
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
|
#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
|
||||||
pub enum TransItemCollectionMode {
|
pub enum TransItemCollectionMode {
|
||||||
|
@ -226,71 +221,35 @@ pub enum TransItemCollectionMode {
|
||||||
Lazy
|
Lazy
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
|
|
||||||
pub enum TransItem<'tcx> {
|
|
||||||
DropGlue(DropGlueKind<'tcx>),
|
|
||||||
Fn(Instance<'tcx>),
|
|
||||||
Static(NodeId)
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'tcx> Hash for TransItem<'tcx> {
|
|
||||||
fn hash<H: Hasher>(&self, s: &mut H) {
|
|
||||||
match *self {
|
|
||||||
TransItem::DropGlue(t) => {
|
|
||||||
0u8.hash(s);
|
|
||||||
t.hash(s);
|
|
||||||
},
|
|
||||||
TransItem::Fn(instance) => {
|
|
||||||
1u8.hash(s);
|
|
||||||
instance.def.hash(s);
|
|
||||||
(instance.substs as *const _ as usize).hash(s);
|
|
||||||
}
|
|
||||||
TransItem::Static(node_id) => {
|
|
||||||
2u8.hash(s);
|
|
||||||
node_id.hash(s);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Maps every translation item to all translation items it references in its
|
/// Maps every translation item to all translation items it references in its
|
||||||
/// body.
|
/// body.
|
||||||
pub struct ReferenceMap<'tcx> {
|
pub struct InliningMap<'tcx> {
|
||||||
// Maps a source translation item to a range of target translation items.
|
// Maps a source translation item to a range of target translation items
|
||||||
|
// that are potentially inlined by LLVM into the source.
|
||||||
// The two numbers in the tuple are the start (inclusive) and
|
// The two numbers in the tuple are the start (inclusive) and
|
||||||
// end index (exclusive) within the `targets` and the `inlined` vecs.
|
// end index (exclusive) within the `targets` vecs.
|
||||||
index: FnvHashMap<TransItem<'tcx>, (usize, usize)>,
|
index: FnvHashMap<TransItem<'tcx>, (usize, usize)>,
|
||||||
targets: Vec<TransItem<'tcx>>,
|
targets: Vec<TransItem<'tcx>>,
|
||||||
inlined: BitVector
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> ReferenceMap<'tcx> {
|
impl<'tcx> InliningMap<'tcx> {
|
||||||
|
|
||||||
fn new() -> ReferenceMap<'tcx> {
|
fn new() -> InliningMap<'tcx> {
|
||||||
ReferenceMap {
|
InliningMap {
|
||||||
index: FnvHashMap(),
|
index: FnvHashMap(),
|
||||||
targets: Vec::new(),
|
targets: Vec::new(),
|
||||||
inlined: BitVector::new(64 * 256),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn record_references<I>(&mut self, source: TransItem<'tcx>, targets: I)
|
fn record_inlining_canditates<I>(&mut self,
|
||||||
where I: Iterator<Item=(TransItem<'tcx>, bool)>
|
source: TransItem<'tcx>,
|
||||||
|
targets: I)
|
||||||
|
where I: Iterator<Item=TransItem<'tcx>>
|
||||||
{
|
{
|
||||||
assert!(!self.index.contains_key(&source));
|
assert!(!self.index.contains_key(&source));
|
||||||
|
|
||||||
let start_index = self.targets.len();
|
let start_index = self.targets.len();
|
||||||
|
self.targets.extend(targets);
|
||||||
for (target, inlined) in targets {
|
|
||||||
let index = self.targets.len();
|
|
||||||
self.targets.push(target);
|
|
||||||
self.inlined.grow(index + 1);
|
|
||||||
|
|
||||||
if inlined {
|
|
||||||
self.inlined.insert(index);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let end_index = self.targets.len();
|
let end_index = self.targets.len();
|
||||||
self.index.insert(source, (start_index, end_index));
|
self.index.insert(source, (start_index, end_index));
|
||||||
}
|
}
|
||||||
|
@ -301,28 +260,17 @@ impl<'tcx> ReferenceMap<'tcx> {
|
||||||
where F: FnMut(TransItem<'tcx>) {
|
where F: FnMut(TransItem<'tcx>) {
|
||||||
if let Some(&(start_index, end_index)) = self.index.get(&source)
|
if let Some(&(start_index, end_index)) = self.index.get(&source)
|
||||||
{
|
{
|
||||||
for index in start_index .. end_index {
|
for candidate in &self.targets[start_index .. end_index] {
|
||||||
if self.inlined.contains(index) {
|
f(*candidate)
|
||||||
f(self.targets[index])
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_direct_references_from(&self, source: TransItem<'tcx>) -> &[TransItem<'tcx>]
|
|
||||||
{
|
|
||||||
if let Some(&(start_index, end_index)) = self.index.get(&source) {
|
|
||||||
&self.targets[start_index .. end_index]
|
|
||||||
} else {
|
|
||||||
&self.targets[0 .. 0]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn collect_crate_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
pub fn collect_crate_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||||
mode: TransItemCollectionMode)
|
mode: TransItemCollectionMode)
|
||||||
-> (FnvHashSet<TransItem<'tcx>>,
|
-> (FnvHashSet<TransItem<'tcx>>,
|
||||||
ReferenceMap<'tcx>) {
|
InliningMap<'tcx>) {
|
||||||
// We are not tracking dependencies of this pass as it has to be re-executed
|
// We are not tracking dependencies of this pass as it has to be re-executed
|
||||||
// every time no matter what.
|
// every time no matter what.
|
||||||
scx.tcx().dep_graph.with_ignore(|| {
|
scx.tcx().dep_graph.with_ignore(|| {
|
||||||
|
@ -331,17 +279,17 @@ pub fn collect_crate_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 't
|
||||||
debug!("Building translation item graph, beginning at roots");
|
debug!("Building translation item graph, beginning at roots");
|
||||||
let mut visited = FnvHashSet();
|
let mut visited = FnvHashSet();
|
||||||
let mut recursion_depths = DefIdMap();
|
let mut recursion_depths = DefIdMap();
|
||||||
let mut reference_map = ReferenceMap::new();
|
let mut inlining_map = InliningMap::new();
|
||||||
|
|
||||||
for root in roots {
|
for root in roots {
|
||||||
collect_items_rec(scx,
|
collect_items_rec(scx,
|
||||||
root,
|
root,
|
||||||
&mut visited,
|
&mut visited,
|
||||||
&mut recursion_depths,
|
&mut recursion_depths,
|
||||||
&mut reference_map);
|
&mut inlining_map);
|
||||||
}
|
}
|
||||||
|
|
||||||
(visited, reference_map)
|
(visited, inlining_map)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -372,7 +320,7 @@ fn collect_items_rec<'a, 'tcx: 'a>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||||
starting_point: TransItem<'tcx>,
|
starting_point: TransItem<'tcx>,
|
||||||
visited: &mut FnvHashSet<TransItem<'tcx>>,
|
visited: &mut FnvHashSet<TransItem<'tcx>>,
|
||||||
recursion_depths: &mut DefIdMap<usize>,
|
recursion_depths: &mut DefIdMap<usize>,
|
||||||
reference_map: &mut ReferenceMap<'tcx>) {
|
inlining_map: &mut InliningMap<'tcx>) {
|
||||||
if !visited.insert(starting_point.clone()) {
|
if !visited.insert(starting_point.clone()) {
|
||||||
// We've been here already, no need to search again.
|
// We've been here already, no need to search again.
|
||||||
return;
|
return;
|
||||||
|
@ -392,7 +340,26 @@ fn collect_items_rec<'a, 'tcx: 'a>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||||
let ty = scx.tcx().lookup_item_type(def_id).ty;
|
let ty = scx.tcx().lookup_item_type(def_id).ty;
|
||||||
let ty = glue::get_drop_glue_type(scx.tcx(), ty);
|
let ty = glue::get_drop_glue_type(scx.tcx(), ty);
|
||||||
neighbors.push(TransItem::DropGlue(DropGlueKind::Ty(ty)));
|
neighbors.push(TransItem::DropGlue(DropGlueKind::Ty(ty)));
|
||||||
|
|
||||||
recursion_depth_reset = None;
|
recursion_depth_reset = None;
|
||||||
|
|
||||||
|
// Scan the MIR in order to find function calls, closures, and
|
||||||
|
// drop-glue
|
||||||
|
let mir = errors::expect(scx.sess().diagnostic(), scx.get_mir(def_id),
|
||||||
|
|| format!("Could not find MIR for static: {:?}", def_id));
|
||||||
|
|
||||||
|
let empty_substs = scx.empty_substs_for_def_id(def_id);
|
||||||
|
let mut visitor = MirNeighborCollector {
|
||||||
|
scx: scx,
|
||||||
|
mir: &mir,
|
||||||
|
output: &mut neighbors,
|
||||||
|
param_substs: empty_substs
|
||||||
|
};
|
||||||
|
|
||||||
|
visitor.visit_mir(&mir);
|
||||||
|
for promoted in &mir.promoted {
|
||||||
|
visitor.visit_mir(promoted);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
TransItem::Fn(instance) => {
|
TransItem::Fn(instance) => {
|
||||||
// Keep track of the monomorphization recursion depth
|
// Keep track of the monomorphization recursion depth
|
||||||
|
@ -419,10 +386,10 @@ fn collect_items_rec<'a, 'tcx: 'a>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
record_references(scx.tcx(), starting_point, &neighbors[..], reference_map);
|
record_inlining_canditates(scx.tcx(), starting_point, &neighbors[..], inlining_map);
|
||||||
|
|
||||||
for neighbour in neighbors {
|
for neighbour in neighbors {
|
||||||
collect_items_rec(scx, neighbour, visited, recursion_depths, reference_map);
|
collect_items_rec(scx, neighbour, visited, recursion_depths, inlining_map);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((def_id, depth)) = recursion_depth_reset {
|
if let Some((def_id, depth)) = recursion_depth_reset {
|
||||||
|
@ -432,17 +399,19 @@ fn collect_items_rec<'a, 'tcx: 'a>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||||
debug!("END collect_items_rec({})", starting_point.to_string(scx.tcx()));
|
debug!("END collect_items_rec({})", starting_point.to_string(scx.tcx()));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn record_references<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
fn record_inlining_canditates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
caller: TransItem<'tcx>,
|
caller: TransItem<'tcx>,
|
||||||
callees: &[TransItem<'tcx>],
|
callees: &[TransItem<'tcx>],
|
||||||
reference_map: &mut ReferenceMap<'tcx>) {
|
inlining_map: &mut InliningMap<'tcx>) {
|
||||||
let iter = callees.into_iter()
|
let is_inlining_candidate = |trans_item: &TransItem<'tcx>| {
|
||||||
.map(|callee| {
|
trans_item.is_from_extern_crate() || trans_item.requests_inline(tcx)
|
||||||
let is_inlining_candidate = callee.is_from_extern_crate() ||
|
};
|
||||||
callee.requests_inline(tcx);
|
|
||||||
(*callee, is_inlining_candidate)
|
let inlining_candidates = callees.into_iter()
|
||||||
});
|
.map(|x| *x)
|
||||||
reference_map.record_references(caller, iter);
|
.filter(is_inlining_candidate);
|
||||||
|
|
||||||
|
inlining_map.record_inlining_canditates(caller, inlining_candidates);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_recursion_limit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
fn check_recursion_limit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
@ -527,10 +496,11 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
|
||||||
.unwrap_or_else(|e| self.scx.sess().fatal(&e));
|
.unwrap_or_else(|e| self.scx.sess().fatal(&e));
|
||||||
|
|
||||||
assert!(can_have_local_instance(self.scx.tcx(), exchange_malloc_fn_def_id));
|
assert!(can_have_local_instance(self.scx.tcx(), exchange_malloc_fn_def_id));
|
||||||
|
let empty_substs = self.scx.empty_substs_for_def_id(exchange_malloc_fn_def_id);
|
||||||
let exchange_malloc_fn_trans_item =
|
let exchange_malloc_fn_trans_item =
|
||||||
create_fn_trans_item(self.scx.tcx(),
|
create_fn_trans_item(self.scx.tcx(),
|
||||||
exchange_malloc_fn_def_id,
|
exchange_malloc_fn_def_id,
|
||||||
self.scx.tcx().mk_substs(Substs::empty()),
|
empty_substs,
|
||||||
self.param_substs);
|
self.param_substs);
|
||||||
|
|
||||||
self.output.push(exchange_malloc_fn_trans_item);
|
self.output.push(exchange_malloc_fn_trans_item);
|
||||||
|
@ -633,6 +603,49 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
|
||||||
can_have_local_instance(tcx, def_id)
|
can_have_local_instance(tcx, def_id)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This takes care of the "drop_in_place" intrinsic for which we otherwise
|
||||||
|
// we would not register drop-glues.
|
||||||
|
fn visit_terminator_kind(&mut self,
|
||||||
|
block: mir::BasicBlock,
|
||||||
|
kind: &mir::TerminatorKind<'tcx>) {
|
||||||
|
let tcx = self.scx.tcx();
|
||||||
|
match *kind {
|
||||||
|
mir::TerminatorKind::Call {
|
||||||
|
func: mir::Operand::Constant(ref constant),
|
||||||
|
ref args,
|
||||||
|
..
|
||||||
|
} => {
|
||||||
|
match constant.ty.sty {
|
||||||
|
ty::TyFnDef(def_id, _, bare_fn_ty)
|
||||||
|
if is_drop_in_place_intrinsic(tcx, def_id, bare_fn_ty) => {
|
||||||
|
let operand_ty = self.mir.operand_ty(tcx, &args[0]);
|
||||||
|
if let ty::TyRawPtr(mt) = operand_ty.sty {
|
||||||
|
let operand_ty = monomorphize::apply_param_substs(tcx,
|
||||||
|
self.param_substs,
|
||||||
|
&mt.ty);
|
||||||
|
self.output.push(TransItem::DropGlue(DropGlueKind::Ty(operand_ty)));
|
||||||
|
} else {
|
||||||
|
bug!("Has the drop_in_place() intrinsic's signature changed?")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => { /* Nothing to do. */ }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => { /* Nothing to do. */ }
|
||||||
|
}
|
||||||
|
|
||||||
|
self.super_terminator_kind(block, kind);
|
||||||
|
|
||||||
|
fn is_drop_in_place_intrinsic<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
def_id: DefId,
|
||||||
|
bare_fn_ty: &ty::BareFnTy<'tcx>)
|
||||||
|
-> bool {
|
||||||
|
(bare_fn_ty.abi == Abi::RustIntrinsic ||
|
||||||
|
bare_fn_ty.abi == Abi::PlatformIntrinsic) &&
|
||||||
|
tcx.item_name(def_id).as_str() == "drop_in_place"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn can_have_local_instance<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
fn can_have_local_instance<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
@ -667,10 +680,11 @@ fn find_drop_glue_neighbors<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||||
.unwrap_or_else(|e| scx.sess().fatal(&e));
|
.unwrap_or_else(|e| scx.sess().fatal(&e));
|
||||||
|
|
||||||
assert!(can_have_local_instance(scx.tcx(), exchange_free_fn_def_id));
|
assert!(can_have_local_instance(scx.tcx(), exchange_free_fn_def_id));
|
||||||
|
let fn_substs = scx.empty_substs_for_def_id(exchange_free_fn_def_id);
|
||||||
let exchange_free_fn_trans_item =
|
let exchange_free_fn_trans_item =
|
||||||
create_fn_trans_item(scx.tcx(),
|
create_fn_trans_item(scx.tcx(),
|
||||||
exchange_free_fn_def_id,
|
exchange_free_fn_def_id,
|
||||||
scx.tcx().mk_substs(Substs::empty()),
|
fn_substs,
|
||||||
scx.tcx().mk_substs(Substs::empty()));
|
scx.tcx().mk_substs(Substs::empty()));
|
||||||
|
|
||||||
output.push(exchange_free_fn_trans_item);
|
output.push(exchange_free_fn_trans_item);
|
||||||
|
@ -730,7 +744,6 @@ fn find_drop_glue_neighbors<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||||
ty::TyRef(..) |
|
ty::TyRef(..) |
|
||||||
ty::TyFnDef(..) |
|
ty::TyFnDef(..) |
|
||||||
ty::TyFnPtr(_) |
|
ty::TyFnPtr(_) |
|
||||||
ty::TySlice(_) |
|
|
||||||
ty::TyTrait(_) => {
|
ty::TyTrait(_) => {
|
||||||
/* nothing to do */
|
/* nothing to do */
|
||||||
}
|
}
|
||||||
|
@ -756,6 +769,7 @@ fn find_drop_glue_neighbors<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ty::TyBox(inner_type) |
|
ty::TyBox(inner_type) |
|
||||||
|
ty::TySlice(inner_type) |
|
||||||
ty::TyArray(inner_type, _) => {
|
ty::TyArray(inner_type, _) => {
|
||||||
let inner_type = glue::get_drop_glue_type(scx.tcx(), inner_type);
|
let inner_type = glue::get_drop_glue_type(scx.tcx(), inner_type);
|
||||||
if glue::type_needs_drop(scx.tcx(), inner_type) {
|
if glue::type_needs_drop(scx.tcx(), inner_type) {
|
||||||
|
@ -777,6 +791,8 @@ fn find_drop_glue_neighbors<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||||
bug!("encountered unexpected type");
|
bug!("encountered unexpected type");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn do_static_dispatch<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
fn do_static_dispatch<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||||
|
@ -1097,7 +1113,7 @@ impl<'b, 'a, 'v> hir_visit::Visitor<'v> for RootCollector<'b, 'a, 'v> {
|
||||||
debug!("RootCollector: ItemFn({})",
|
debug!("RootCollector: ItemFn({})",
|
||||||
def_id_to_string(self.scx.tcx(), def_id));
|
def_id_to_string(self.scx.tcx(), def_id));
|
||||||
|
|
||||||
let instance = Instance::mono(self.scx.tcx(), def_id);
|
let instance = Instance::mono(self.scx, def_id);
|
||||||
self.output.push(TransItem::Fn(instance));
|
self.output.push(TransItem::Fn(instance));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1134,7 +1150,7 @@ impl<'b, 'a, 'v> hir_visit::Visitor<'v> for RootCollector<'b, 'a, 'v> {
|
||||||
debug!("RootCollector: MethodImplItem({})",
|
debug!("RootCollector: MethodImplItem({})",
|
||||||
def_id_to_string(self.scx.tcx(), def_id));
|
def_id_to_string(self.scx.tcx(), def_id));
|
||||||
|
|
||||||
let instance = Instance::mono(self.scx.tcx(), def_id);
|
let instance = Instance::mono(self.scx, def_id);
|
||||||
self.output.push(TransItem::Fn(instance));
|
self.output.push(TransItem::Fn(instance));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1210,334 +1226,6 @@ fn create_trans_items_for_default_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//=-----------------------------------------------------------------------------
|
|
||||||
// TransItem String Keys
|
|
||||||
//=-----------------------------------------------------------------------------
|
|
||||||
|
|
||||||
// The code below allows for producing a unique string key for a trans item.
|
|
||||||
// These keys are used by the handwritten auto-tests, so they need to be
|
|
||||||
// predictable and human-readable.
|
|
||||||
//
|
|
||||||
// Note: A lot of this could looks very similar to what's already in the
|
|
||||||
// ppaux module. It would be good to refactor things so we only have one
|
|
||||||
// parameterizable implementation for printing types.
|
|
||||||
|
|
||||||
/// Same as `unique_type_name()` but with the result pushed onto the given
|
|
||||||
/// `output` parameter.
|
|
||||||
pub fn push_unique_type_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
||||||
t: ty::Ty<'tcx>,
|
|
||||||
output: &mut String) {
|
|
||||||
match t.sty {
|
|
||||||
ty::TyBool => output.push_str("bool"),
|
|
||||||
ty::TyChar => output.push_str("char"),
|
|
||||||
ty::TyStr => output.push_str("str"),
|
|
||||||
ty::TyInt(ast::IntTy::Is) => output.push_str("isize"),
|
|
||||||
ty::TyInt(ast::IntTy::I8) => output.push_str("i8"),
|
|
||||||
ty::TyInt(ast::IntTy::I16) => output.push_str("i16"),
|
|
||||||
ty::TyInt(ast::IntTy::I32) => output.push_str("i32"),
|
|
||||||
ty::TyInt(ast::IntTy::I64) => output.push_str("i64"),
|
|
||||||
ty::TyUint(ast::UintTy::Us) => output.push_str("usize"),
|
|
||||||
ty::TyUint(ast::UintTy::U8) => output.push_str("u8"),
|
|
||||||
ty::TyUint(ast::UintTy::U16) => output.push_str("u16"),
|
|
||||||
ty::TyUint(ast::UintTy::U32) => output.push_str("u32"),
|
|
||||||
ty::TyUint(ast::UintTy::U64) => output.push_str("u64"),
|
|
||||||
ty::TyFloat(ast::FloatTy::F32) => output.push_str("f32"),
|
|
||||||
ty::TyFloat(ast::FloatTy::F64) => output.push_str("f64"),
|
|
||||||
ty::TyStruct(adt_def, substs) |
|
|
||||||
ty::TyEnum(adt_def, substs) => {
|
|
||||||
push_item_name(tcx, adt_def.did, output);
|
|
||||||
push_type_params(tcx, &substs.types, &[], output);
|
|
||||||
},
|
|
||||||
ty::TyTuple(component_types) => {
|
|
||||||
output.push('(');
|
|
||||||
for &component_type in component_types {
|
|
||||||
push_unique_type_name(tcx, component_type, output);
|
|
||||||
output.push_str(", ");
|
|
||||||
}
|
|
||||||
if !component_types.is_empty() {
|
|
||||||
output.pop();
|
|
||||||
output.pop();
|
|
||||||
}
|
|
||||||
output.push(')');
|
|
||||||
},
|
|
||||||
ty::TyBox(inner_type) => {
|
|
||||||
output.push_str("Box<");
|
|
||||||
push_unique_type_name(tcx, inner_type, output);
|
|
||||||
output.push('>');
|
|
||||||
},
|
|
||||||
ty::TyRawPtr(ty::TypeAndMut { ty: inner_type, mutbl } ) => {
|
|
||||||
output.push('*');
|
|
||||||
match mutbl {
|
|
||||||
hir::MutImmutable => output.push_str("const "),
|
|
||||||
hir::MutMutable => output.push_str("mut "),
|
|
||||||
}
|
|
||||||
|
|
||||||
push_unique_type_name(tcx, inner_type, output);
|
|
||||||
},
|
|
||||||
ty::TyRef(_, ty::TypeAndMut { ty: inner_type, mutbl }) => {
|
|
||||||
output.push('&');
|
|
||||||
if mutbl == hir::MutMutable {
|
|
||||||
output.push_str("mut ");
|
|
||||||
}
|
|
||||||
|
|
||||||
push_unique_type_name(tcx, inner_type, output);
|
|
||||||
},
|
|
||||||
ty::TyArray(inner_type, len) => {
|
|
||||||
output.push('[');
|
|
||||||
push_unique_type_name(tcx, inner_type, output);
|
|
||||||
output.push_str(&format!("; {}", len));
|
|
||||||
output.push(']');
|
|
||||||
},
|
|
||||||
ty::TySlice(inner_type) => {
|
|
||||||
output.push('[');
|
|
||||||
push_unique_type_name(tcx, inner_type, output);
|
|
||||||
output.push(']');
|
|
||||||
},
|
|
||||||
ty::TyTrait(ref trait_data) => {
|
|
||||||
push_item_name(tcx, trait_data.principal.skip_binder().def_id, output);
|
|
||||||
push_type_params(tcx,
|
|
||||||
&trait_data.principal.skip_binder().substs.types,
|
|
||||||
&trait_data.bounds.projection_bounds,
|
|
||||||
output);
|
|
||||||
},
|
|
||||||
ty::TyFnDef(_, _, &ty::BareFnTy{ unsafety, abi, ref sig } ) |
|
|
||||||
ty::TyFnPtr(&ty::BareFnTy{ unsafety, abi, ref sig } ) => {
|
|
||||||
if unsafety == hir::Unsafety::Unsafe {
|
|
||||||
output.push_str("unsafe ");
|
|
||||||
}
|
|
||||||
|
|
||||||
if abi != ::abi::Abi::Rust {
|
|
||||||
output.push_str("extern \"");
|
|
||||||
output.push_str(abi.name());
|
|
||||||
output.push_str("\" ");
|
|
||||||
}
|
|
||||||
|
|
||||||
output.push_str("fn(");
|
|
||||||
|
|
||||||
let sig = tcx.erase_late_bound_regions(sig);
|
|
||||||
if !sig.inputs.is_empty() {
|
|
||||||
for ¶meter_type in &sig.inputs {
|
|
||||||
push_unique_type_name(tcx, parameter_type, output);
|
|
||||||
output.push_str(", ");
|
|
||||||
}
|
|
||||||
output.pop();
|
|
||||||
output.pop();
|
|
||||||
}
|
|
||||||
|
|
||||||
if sig.variadic {
|
|
||||||
if !sig.inputs.is_empty() {
|
|
||||||
output.push_str(", ...");
|
|
||||||
} else {
|
|
||||||
output.push_str("...");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
output.push(')');
|
|
||||||
|
|
||||||
match sig.output {
|
|
||||||
ty::FnConverging(result_type) if result_type.is_nil() => {}
|
|
||||||
ty::FnConverging(result_type) => {
|
|
||||||
output.push_str(" -> ");
|
|
||||||
push_unique_type_name(tcx, result_type, output);
|
|
||||||
}
|
|
||||||
ty::FnDiverging => {
|
|
||||||
output.push_str(" -> !");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
ty::TyClosure(def_id, ref closure_substs) => {
|
|
||||||
push_item_name(tcx, def_id, output);
|
|
||||||
output.push_str("{");
|
|
||||||
output.push_str(&format!("{}:{}", def_id.krate, def_id.index.as_usize()));
|
|
||||||
output.push_str("}");
|
|
||||||
push_type_params(tcx, &closure_substs.func_substs.types, &[], output);
|
|
||||||
}
|
|
||||||
ty::TyError |
|
|
||||||
ty::TyInfer(_) |
|
|
||||||
ty::TyProjection(..) |
|
|
||||||
ty::TyParam(_) => {
|
|
||||||
bug!("debuginfo: Trying to create type name for \
|
|
||||||
unexpected type: {:?}", t);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn push_item_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
||||||
def_id: DefId,
|
|
||||||
output: &mut String) {
|
|
||||||
let def_path = tcx.def_path(def_id);
|
|
||||||
|
|
||||||
// some_crate::
|
|
||||||
output.push_str(&tcx.crate_name(def_path.krate));
|
|
||||||
output.push_str("::");
|
|
||||||
|
|
||||||
// foo::bar::ItemName::
|
|
||||||
for part in tcx.def_path(def_id).data {
|
|
||||||
output.push_str(&format!("{}[{}]::",
|
|
||||||
part.data.as_interned_str(),
|
|
||||||
part.disambiguator));
|
|
||||||
}
|
|
||||||
|
|
||||||
// remove final "::"
|
|
||||||
output.pop();
|
|
||||||
output.pop();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn push_type_params<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
||||||
types: &'tcx subst::VecPerParamSpace<Ty<'tcx>>,
|
|
||||||
projections: &[ty::PolyProjectionPredicate<'tcx>],
|
|
||||||
output: &mut String) {
|
|
||||||
if types.is_empty() && projections.is_empty() {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
output.push('<');
|
|
||||||
|
|
||||||
for &type_parameter in types {
|
|
||||||
push_unique_type_name(tcx, type_parameter, output);
|
|
||||||
output.push_str(", ");
|
|
||||||
}
|
|
||||||
|
|
||||||
for projection in projections {
|
|
||||||
let projection = projection.skip_binder();
|
|
||||||
let name = token::get_ident_interner().get(projection.projection_ty.item_name);
|
|
||||||
output.push_str(&name[..]);
|
|
||||||
output.push_str("=");
|
|
||||||
push_unique_type_name(tcx, projection.ty, output);
|
|
||||||
output.push_str(", ");
|
|
||||||
}
|
|
||||||
|
|
||||||
output.pop();
|
|
||||||
output.pop();
|
|
||||||
|
|
||||||
output.push('>');
|
|
||||||
}
|
|
||||||
|
|
||||||
fn push_instance_as_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
||||||
instance: Instance<'tcx>,
|
|
||||||
output: &mut String) {
|
|
||||||
push_item_name(tcx, instance.def, output);
|
|
||||||
push_type_params(tcx, &instance.substs.types, &[], output);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn def_id_to_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
||||||
def_id: DefId) -> String {
|
|
||||||
let mut output = String::new();
|
|
||||||
push_item_name(tcx, def_id, &mut output);
|
|
||||||
output
|
|
||||||
}
|
|
||||||
|
|
||||||
fn type_to_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
||||||
ty: ty::Ty<'tcx>)
|
|
||||||
-> String {
|
|
||||||
let mut output = String::new();
|
|
||||||
push_unique_type_name(tcx, ty, &mut output);
|
|
||||||
output
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, 'tcx> TransItem<'tcx> {
|
|
||||||
pub fn requests_inline(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool {
|
|
||||||
match *self {
|
|
||||||
TransItem::Fn(ref instance) => {
|
|
||||||
let attributes = tcx.get_attrs(instance.def);
|
|
||||||
attr::requests_inline(&attributes[..])
|
|
||||||
}
|
|
||||||
TransItem::DropGlue(..) => true,
|
|
||||||
TransItem::Static(..) => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_from_extern_crate(&self) -> bool {
|
|
||||||
match *self {
|
|
||||||
TransItem::Fn(ref instance) => !instance.def.is_local(),
|
|
||||||
TransItem::DropGlue(..) |
|
|
||||||
TransItem::Static(..) => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_lazily_instantiated(&self) -> bool {
|
|
||||||
match *self {
|
|
||||||
TransItem::Fn(ref instance) => !instance.substs.types.is_empty(),
|
|
||||||
TransItem::DropGlue(..) => true,
|
|
||||||
TransItem::Static(..) => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn explicit_linkage(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<llvm::Linkage> {
|
|
||||||
let def_id = match *self {
|
|
||||||
TransItem::Fn(ref instance) => instance.def,
|
|
||||||
TransItem::Static(node_id) => tcx.map.local_def_id(node_id),
|
|
||||||
TransItem::DropGlue(..) => return None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let attributes = tcx.get_attrs(def_id);
|
|
||||||
if let Some(name) = attr::first_attr_value_str_by_name(&attributes, "linkage") {
|
|
||||||
if let Some(linkage) = llvm_linkage_by_name(&name) {
|
|
||||||
Some(linkage)
|
|
||||||
} else {
|
|
||||||
let span = tcx.map.span_if_local(def_id);
|
|
||||||
if let Some(span) = span {
|
|
||||||
tcx.sess.span_fatal(span, "invalid linkage specified")
|
|
||||||
} else {
|
|
||||||
tcx.sess.fatal(&format!("invalid linkage specified: {}", name))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn to_string(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> String {
|
|
||||||
let hir_map = &tcx.map;
|
|
||||||
|
|
||||||
return match *self {
|
|
||||||
TransItem::DropGlue(dg) => {
|
|
||||||
let mut s = String::with_capacity(32);
|
|
||||||
match dg {
|
|
||||||
DropGlueKind::Ty(_) => s.push_str("drop-glue "),
|
|
||||||
DropGlueKind::TyContents(_) => s.push_str("drop-glue-contents "),
|
|
||||||
};
|
|
||||||
push_unique_type_name(tcx, dg.ty(), &mut s);
|
|
||||||
s
|
|
||||||
}
|
|
||||||
TransItem::Fn(instance) => {
|
|
||||||
to_string_internal(tcx, "fn ", instance)
|
|
||||||
},
|
|
||||||
TransItem::Static(node_id) => {
|
|
||||||
let def_id = hir_map.local_def_id(node_id);
|
|
||||||
let instance = Instance::mono(tcx, def_id);
|
|
||||||
to_string_internal(tcx, "static ", instance)
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
fn to_string_internal<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
||||||
prefix: &str,
|
|
||||||
instance: Instance<'tcx>)
|
|
||||||
-> String {
|
|
||||||
let mut result = String::with_capacity(32);
|
|
||||||
result.push_str(prefix);
|
|
||||||
push_instance_as_string(tcx, instance, &mut result);
|
|
||||||
result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_raw_string(&self) -> String {
|
|
||||||
match *self {
|
|
||||||
TransItem::DropGlue(dg) => {
|
|
||||||
format!("DropGlue({})", dg.ty() as *const _ as usize)
|
|
||||||
}
|
|
||||||
TransItem::Fn(instance) => {
|
|
||||||
format!("Fn({:?}, {})",
|
|
||||||
instance.def,
|
|
||||||
instance.substs as *const _ as usize)
|
|
||||||
}
|
|
||||||
TransItem::Static(id) => {
|
|
||||||
format!("Static({:?})", id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
pub enum TransItemState {
|
pub enum TransItemState {
|
||||||
PredictedAndGenerated,
|
PredictedAndGenerated,
|
||||||
|
@ -1546,7 +1234,7 @@ pub enum TransItemState {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn collecting_debug_information(scx: &SharedCrateContext) -> bool {
|
pub fn collecting_debug_information(scx: &SharedCrateContext) -> bool {
|
||||||
return cfg!(debug_assertions) &&
|
return scx.sess().opts.cg.debug_assertions == Some(true) &&
|
||||||
scx.sess().opts.debugging_opts.print_trans_items.is_some();
|
scx.sess().opts.debugging_opts.print_trans_items.is_some();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,8 @@ use rustc::hir::map as hir_map;
|
||||||
use {abi, adt, closure, debuginfo, expr, machine};
|
use {abi, adt, closure, debuginfo, expr, machine};
|
||||||
use base::{self, exported_name, imported_name, push_ctxt};
|
use base::{self, exported_name, imported_name, push_ctxt};
|
||||||
use callee::Callee;
|
use callee::Callee;
|
||||||
use collector::{self, TransItem};
|
use collector;
|
||||||
|
use trans_item::TransItem;
|
||||||
use common::{type_is_sized, C_nil, const_get_elt};
|
use common::{type_is_sized, C_nil, const_get_elt};
|
||||||
use common::{CrateContext, C_integral, C_floating, C_bool, C_str_slice, C_bytes, val_ty};
|
use common::{CrateContext, C_integral, C_floating, C_bool, C_str_slice, C_bytes, val_ty};
|
||||||
use common::{C_struct, C_undef, const_to_opt_int, const_to_opt_uint, VariantInfo, C_uint};
|
use common::{C_struct, C_undef, const_to_opt_int, const_to_opt_uint, VariantInfo, C_uint};
|
||||||
|
@ -1011,7 +1012,7 @@ pub fn get_static<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, def_id: DefId)
|
||||||
-> Datum<'tcx, Lvalue> {
|
-> Datum<'tcx, Lvalue> {
|
||||||
let ty = ccx.tcx().lookup_item_type(def_id).ty;
|
let ty = ccx.tcx().lookup_item_type(def_id).ty;
|
||||||
|
|
||||||
let instance = Instance::mono(ccx.tcx(), def_id);
|
let instance = Instance::mono(ccx.shared(), def_id);
|
||||||
if let Some(&g) = ccx.instances().borrow().get(&instance) {
|
if let Some(&g) = ccx.instances().borrow().get(&instance) {
|
||||||
return Datum::new(g, ty, Lvalue::new("static"));
|
return Datum::new(g, ty, Lvalue::new("static"));
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,8 +27,9 @@ use glue::DropGlueKind;
|
||||||
use mir::CachedMir;
|
use mir::CachedMir;
|
||||||
use monomorphize::Instance;
|
use monomorphize::Instance;
|
||||||
|
|
||||||
use collector::{TransItem, TransItemState};
|
|
||||||
use partitioning::CodegenUnit;
|
use partitioning::CodegenUnit;
|
||||||
|
use collector::TransItemState;
|
||||||
|
use trans_item::TransItem;
|
||||||
use type_::{Type, TypeNames};
|
use type_::{Type, TypeNames};
|
||||||
use rustc::ty::subst::{Substs, VecPerParamSpace};
|
use rustc::ty::subst::{Substs, VecPerParamSpace};
|
||||||
use rustc::ty::{self, Ty, TyCtxt};
|
use rustc::ty::{self, Ty, TyCtxt};
|
||||||
|
@ -487,6 +488,21 @@ impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> {
|
||||||
pub fn translation_items(&self) -> &RefCell<FnvHashMap<TransItem<'tcx>, TransItemState>> {
|
pub fn translation_items(&self) -> &RefCell<FnvHashMap<TransItem<'tcx>, TransItemState>> {
|
||||||
&self.translation_items
|
&self.translation_items
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Given the def-id of some item that has no type parameters, make
|
||||||
|
/// a suitable "empty substs" for it.
|
||||||
|
pub fn empty_substs_for_def_id(&self, item_def_id: DefId) -> &'tcx Substs<'tcx> {
|
||||||
|
let scheme = self.tcx().lookup_item_type(item_def_id);
|
||||||
|
self.empty_substs_for_scheme(&scheme)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn empty_substs_for_scheme(&self, scheme: &ty::TypeScheme<'tcx>)
|
||||||
|
-> &'tcx Substs<'tcx> {
|
||||||
|
assert!(scheme.generics.types.is_empty());
|
||||||
|
self.tcx().mk_substs(
|
||||||
|
Substs::new(VecPerParamSpace::empty(),
|
||||||
|
scheme.generics.regions.map(|_| ty::ReStatic)))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> LocalCrateContext<'tcx> {
|
impl<'tcx> LocalCrateContext<'tcx> {
|
||||||
|
@ -901,16 +917,12 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
|
||||||
/// Given the def-id of some item that has no type parameters, make
|
/// Given the def-id of some item that has no type parameters, make
|
||||||
/// a suitable "empty substs" for it.
|
/// a suitable "empty substs" for it.
|
||||||
pub fn empty_substs_for_def_id(&self, item_def_id: DefId) -> &'tcx Substs<'tcx> {
|
pub fn empty_substs_for_def_id(&self, item_def_id: DefId) -> &'tcx Substs<'tcx> {
|
||||||
let scheme = self.tcx().lookup_item_type(item_def_id);
|
self.shared().empty_substs_for_def_id(item_def_id)
|
||||||
self.empty_substs_for_scheme(&scheme)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn empty_substs_for_scheme(&self, scheme: &ty::TypeScheme<'tcx>)
|
pub fn empty_substs_for_scheme(&self, scheme: &ty::TypeScheme<'tcx>)
|
||||||
-> &'tcx Substs<'tcx> {
|
-> &'tcx Substs<'tcx> {
|
||||||
assert!(scheme.generics.types.is_empty());
|
self.shared().empty_substs_for_scheme(scheme)
|
||||||
self.tcx().mk_substs(
|
|
||||||
Substs::new(VecPerParamSpace::empty(),
|
|
||||||
scheme.generics.regions.map(|_| ty::ReStatic)))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -29,13 +29,14 @@ use build::*;
|
||||||
use callee::{Callee, ArgVals};
|
use callee::{Callee, ArgVals};
|
||||||
use cleanup;
|
use cleanup;
|
||||||
use cleanup::CleanupMethods;
|
use cleanup::CleanupMethods;
|
||||||
use collector::{self, TransItem};
|
use collector;
|
||||||
use common::*;
|
use common::*;
|
||||||
use debuginfo::DebugLoc;
|
use debuginfo::DebugLoc;
|
||||||
use declare;
|
use declare;
|
||||||
use expr;
|
use expr;
|
||||||
use machine::*;
|
use machine::*;
|
||||||
use monomorphize;
|
use monomorphize;
|
||||||
|
use trans_item::TransItem;
|
||||||
use type_of::{type_of, sizing_type_of, align_of};
|
use type_of::{type_of, sizing_type_of, align_of};
|
||||||
use type_::Type;
|
use type_::Type;
|
||||||
use value::Value;
|
use value::Value;
|
||||||
|
|
|
@ -123,6 +123,7 @@ mod mir;
|
||||||
mod monomorphize;
|
mod monomorphize;
|
||||||
mod partitioning;
|
mod partitioning;
|
||||||
mod symbol_names_test;
|
mod symbol_names_test;
|
||||||
|
mod trans_item;
|
||||||
mod tvec;
|
mod tvec;
|
||||||
mod type_;
|
mod type_;
|
||||||
mod type_of;
|
mod type_of;
|
||||||
|
|
|
@ -176,7 +176,10 @@ pub fn get_vtable<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||||
trait_closure_kind);
|
trait_closure_kind);
|
||||||
vec![llfn].into_iter()
|
vec![llfn].into_iter()
|
||||||
}
|
}
|
||||||
traits::VtableFnPointer(bare_fn_ty) => {
|
traits::VtableFnPointer(
|
||||||
|
traits::VtableFnPointerData {
|
||||||
|
fn_ty: bare_fn_ty,
|
||||||
|
nested: _ }) => {
|
||||||
let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_ref.def_id()).unwrap();
|
let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_ref.def_id()).unwrap();
|
||||||
vec![trans_fn_pointer_shim(ccx, trait_closure_kind, bare_fn_ty)].into_iter()
|
vec![trans_fn_pointer_shim(ccx, trait_closure_kind, bare_fn_ty)].into_iter()
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,6 +24,7 @@ use meth;
|
||||||
use type_of;
|
use type_of;
|
||||||
use glue;
|
use glue;
|
||||||
use type_::Type;
|
use type_::Type;
|
||||||
|
use rustc_data_structures::fnv::FnvHashMap;
|
||||||
|
|
||||||
use super::{MirContext, TempRef, drop};
|
use super::{MirContext, TempRef, drop};
|
||||||
use super::constant::Const;
|
use super::constant::Const;
|
||||||
|
@ -95,17 +96,32 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
|
||||||
adt::trans_get_discr(bcx, &repr, discr_lvalue.llval, None, true)
|
adt::trans_get_discr(bcx, &repr, discr_lvalue.llval, None, true)
|
||||||
);
|
);
|
||||||
|
|
||||||
// The else branch of the Switch can't be hit, so branch to an unreachable
|
let mut bb_hist = FnvHashMap();
|
||||||
// instruction so LLVM knows that
|
for target in targets {
|
||||||
let unreachable_blk = self.unreachable_block();
|
*bb_hist.entry(target).or_insert(0) += 1;
|
||||||
let switch = bcx.switch(discr, unreachable_blk.llbb, targets.len());
|
}
|
||||||
|
let (default_bb, default_blk) = match bb_hist.iter().max_by_key(|&(_, c)| c) {
|
||||||
|
// If a single target basic blocks is predominant, promote that to be the
|
||||||
|
// default case for the switch instruction to reduce the size of the generated
|
||||||
|
// code. This is especially helpful in cases like an if-let on a huge enum.
|
||||||
|
// Note: This optimization is only valid for exhaustive matches.
|
||||||
|
Some((&&bb, &c)) if c > targets.len() / 2 => {
|
||||||
|
(Some(bb), self.blocks[bb.index()])
|
||||||
|
}
|
||||||
|
// We're generating an exhaustive switch, so the else branch
|
||||||
|
// can't be hit. Branching to an unreachable instruction
|
||||||
|
// lets LLVM know this
|
||||||
|
_ => (None, self.unreachable_block())
|
||||||
|
};
|
||||||
|
let switch = bcx.switch(discr, default_blk.llbb, targets.len());
|
||||||
assert_eq!(adt_def.variants.len(), targets.len());
|
assert_eq!(adt_def.variants.len(), targets.len());
|
||||||
for (adt_variant, target) in adt_def.variants.iter().zip(targets) {
|
for (adt_variant, &target) in adt_def.variants.iter().zip(targets) {
|
||||||
let llval = bcx.with_block(|bcx|
|
if default_bb != Some(target) {
|
||||||
adt::trans_case(bcx, &repr, Disr::from(adt_variant.disr_val))
|
let llbb = self.llblock(target);
|
||||||
);
|
let llval = bcx.with_block(|bcx| adt::trans_case(
|
||||||
let llbb = self.llblock(*target);
|
bcx, &repr, Disr::from(adt_variant.disr_val)));
|
||||||
build::AddCase(switch, llval, llbb)
|
build::AddCase(switch, llval, llbb)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -854,6 +854,6 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
|
||||||
|
|
||||||
pub fn trans_static_initializer(ccx: &CrateContext, def_id: DefId)
|
pub fn trans_static_initializer(ccx: &CrateContext, def_id: DefId)
|
||||||
-> Result<ValueRef, ConstEvalFailure> {
|
-> Result<ValueRef, ConstEvalFailure> {
|
||||||
let instance = Instance::mono(ccx.tcx(), def_id);
|
let instance = Instance::mono(ccx.shared(), def_id);
|
||||||
MirConstContext::trans_def(ccx, instance, vec![]).map(|c| c.llval)
|
MirConstContext::trans_def(ccx, instance, vec![]).map(|c| c.llval)
|
||||||
}
|
}
|
||||||
|
|
|
@ -183,8 +183,8 @@ impl<'tcx> Instance<'tcx> {
|
||||||
assert!(substs.regions.iter().all(|&r| r == ty::ReStatic));
|
assert!(substs.regions.iter().all(|&r| r == ty::ReStatic));
|
||||||
Instance { def: def_id, substs: substs }
|
Instance { def: def_id, substs: substs }
|
||||||
}
|
}
|
||||||
pub fn mono<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Instance<'tcx> {
|
pub fn mono<'a>(scx: &SharedCrateContext<'a, 'tcx>, def_id: DefId) -> Instance<'tcx> {
|
||||||
Instance::new(def_id, tcx.mk_substs(Substs::empty()))
|
Instance::new(def_id, scx.empty_substs_for_def_id(def_id))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -116,30 +116,20 @@
|
||||||
//! source-level module, functions from the same module will be available for
|
//! source-level module, functions from the same module will be available for
|
||||||
//! inlining, even when they are not marked #[inline].
|
//! inlining, even when they are not marked #[inline].
|
||||||
|
|
||||||
use collector::{TransItem, ReferenceMap};
|
use collector::InliningMap;
|
||||||
|
use llvm;
|
||||||
use monomorphize;
|
use monomorphize;
|
||||||
use rustc::hir::def_id::DefId;
|
use rustc::hir::def_id::DefId;
|
||||||
use rustc::hir::map::DefPathData;
|
use rustc::hir::map::DefPathData;
|
||||||
use rustc::ty::TyCtxt;
|
use rustc::ty::TyCtxt;
|
||||||
use rustc::ty::item_path::characteristic_def_id_of_type;
|
use rustc::ty::item_path::characteristic_def_id_of_type;
|
||||||
use llvm;
|
|
||||||
use syntax::parse::token::{self, InternedString};
|
use syntax::parse::token::{self, InternedString};
|
||||||
|
use trans_item::TransItem;
|
||||||
use util::nodemap::{FnvHashMap, FnvHashSet};
|
use util::nodemap::{FnvHashMap, FnvHashSet};
|
||||||
|
|
||||||
#[derive(Clone, Copy, Eq, PartialEq, Debug)]
|
|
||||||
pub enum InstantiationMode {
|
|
||||||
/// This variant indicates that a translation item should be placed in some
|
|
||||||
/// codegen unit as a definition and with the given linkage.
|
|
||||||
Def(llvm::Linkage),
|
|
||||||
|
|
||||||
/// This variant indicates that only a declaration of some translation item
|
|
||||||
/// should be placed in a given codegen unit.
|
|
||||||
Decl
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct CodegenUnit<'tcx> {
|
pub struct CodegenUnit<'tcx> {
|
||||||
pub name: InternedString,
|
pub name: InternedString,
|
||||||
pub items: FnvHashMap<TransItem<'tcx>, InstantiationMode>,
|
pub items: FnvHashMap<TransItem<'tcx>, llvm::Linkage>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum PartitioningStrategy {
|
pub enum PartitioningStrategy {
|
||||||
|
@ -156,7 +146,7 @@ const FALLBACK_CODEGEN_UNIT: &'static str = "__rustc_fallback_codegen_unit";
|
||||||
pub fn partition<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
pub fn partition<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
trans_items: I,
|
trans_items: I,
|
||||||
strategy: PartitioningStrategy,
|
strategy: PartitioningStrategy,
|
||||||
reference_map: &ReferenceMap<'tcx>)
|
inlining_map: &InliningMap<'tcx>)
|
||||||
-> Vec<CodegenUnit<'tcx>>
|
-> Vec<CodegenUnit<'tcx>>
|
||||||
where I: Iterator<Item = TransItem<'tcx>>
|
where I: Iterator<Item = TransItem<'tcx>>
|
||||||
{
|
{
|
||||||
|
@ -176,13 +166,8 @@ pub fn partition<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
// translation items can be drop-glue, functions from external crates, and
|
// translation items can be drop-glue, functions from external crates, and
|
||||||
// local functions the definition of which is marked with #[inline].
|
// local functions the definition of which is marked with #[inline].
|
||||||
let post_inlining = place_inlined_translation_items(initial_partitioning,
|
let post_inlining = place_inlined_translation_items(initial_partitioning,
|
||||||
reference_map);
|
inlining_map);
|
||||||
|
post_inlining.0
|
||||||
// Now we know all *definitions* within all codegen units, thus we can
|
|
||||||
// easily determine which declarations need to be placed within each one.
|
|
||||||
let post_declarations = place_declarations(post_inlining, reference_map);
|
|
||||||
|
|
||||||
post_declarations.0
|
|
||||||
}
|
}
|
||||||
|
|
||||||
struct PreInliningPartitioning<'tcx> {
|
struct PreInliningPartitioning<'tcx> {
|
||||||
|
@ -191,7 +176,6 @@ struct PreInliningPartitioning<'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
struct PostInliningPartitioning<'tcx>(Vec<CodegenUnit<'tcx>>);
|
struct PostInliningPartitioning<'tcx>(Vec<CodegenUnit<'tcx>>);
|
||||||
struct PostDeclarationsPartitioning<'tcx>(Vec<CodegenUnit<'tcx>>);
|
|
||||||
|
|
||||||
fn place_root_translation_items<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
fn place_root_translation_items<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
trans_items: I)
|
trans_items: I)
|
||||||
|
@ -239,8 +223,7 @@ fn place_root_translation_items<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
codegen_unit.items.insert(trans_item,
|
codegen_unit.items.insert(trans_item, linkage);
|
||||||
InstantiationMode::Def(linkage));
|
|
||||||
roots.insert(trans_item);
|
roots.insert(trans_item);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -294,7 +277,7 @@ fn merge_codegen_units<'tcx>(initial_partitioning: &mut PreInliningPartitioning<
|
||||||
}
|
}
|
||||||
|
|
||||||
fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartitioning<'tcx>,
|
fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartitioning<'tcx>,
|
||||||
reference_map: &ReferenceMap<'tcx>)
|
inlining_map: &InliningMap<'tcx>)
|
||||||
-> PostInliningPartitioning<'tcx> {
|
-> PostInliningPartitioning<'tcx> {
|
||||||
let mut new_partitioning = Vec::new();
|
let mut new_partitioning = Vec::new();
|
||||||
|
|
||||||
|
@ -302,7 +285,7 @@ fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartit
|
||||||
// Collect all items that need to be available in this codegen unit
|
// Collect all items that need to be available in this codegen unit
|
||||||
let mut reachable = FnvHashSet();
|
let mut reachable = FnvHashSet();
|
||||||
for root in codegen_unit.items.keys() {
|
for root in codegen_unit.items.keys() {
|
||||||
follow_inlining(*root, reference_map, &mut reachable);
|
follow_inlining(*root, inlining_map, &mut reachable);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut new_codegen_unit = CodegenUnit {
|
let mut new_codegen_unit = CodegenUnit {
|
||||||
|
@ -312,22 +295,22 @@ fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartit
|
||||||
|
|
||||||
// Add all translation items that are not already there
|
// Add all translation items that are not already there
|
||||||
for trans_item in reachable {
|
for trans_item in reachable {
|
||||||
if let Some(instantiation_mode) = codegen_unit.items.get(&trans_item) {
|
if let Some(linkage) = codegen_unit.items.get(&trans_item) {
|
||||||
// This is a root, just copy it over
|
// This is a root, just copy it over
|
||||||
new_codegen_unit.items.insert(trans_item, *instantiation_mode);
|
new_codegen_unit.items.insert(trans_item, *linkage);
|
||||||
} else {
|
} else {
|
||||||
if initial_partitioning.roots.contains(&trans_item) {
|
if initial_partitioning.roots.contains(&trans_item) {
|
||||||
// This item will be instantiated in some other codegen unit,
|
// This item will be instantiated in some other codegen unit,
|
||||||
// so we just add it here with AvailableExternallyLinkage
|
// so we just add it here with AvailableExternallyLinkage
|
||||||
new_codegen_unit.items.insert(trans_item,
|
new_codegen_unit.items.insert(trans_item,
|
||||||
InstantiationMode::Def(llvm::AvailableExternallyLinkage));
|
llvm::AvailableExternallyLinkage);
|
||||||
} else {
|
} else {
|
||||||
// We can't be sure if this will also be instantiated
|
// We can't be sure if this will also be instantiated
|
||||||
// somewhere else, so we add an instance here with
|
// somewhere else, so we add an instance here with
|
||||||
// LinkOnceODRLinkage. That way the item can be discarded if
|
// LinkOnceODRLinkage. That way the item can be discarded if
|
||||||
// it's not needed (inlined) after all.
|
// it's not needed (inlined) after all.
|
||||||
new_codegen_unit.items.insert(trans_item,
|
new_codegen_unit.items.insert(trans_item,
|
||||||
InstantiationMode::Def(llvm::LinkOnceODRLinkage));
|
llvm::LinkOnceODRLinkage);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -338,43 +321,18 @@ fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartit
|
||||||
return PostInliningPartitioning(new_partitioning);
|
return PostInliningPartitioning(new_partitioning);
|
||||||
|
|
||||||
fn follow_inlining<'tcx>(trans_item: TransItem<'tcx>,
|
fn follow_inlining<'tcx>(trans_item: TransItem<'tcx>,
|
||||||
reference_map: &ReferenceMap<'tcx>,
|
inlining_map: &InliningMap<'tcx>,
|
||||||
visited: &mut FnvHashSet<TransItem<'tcx>>) {
|
visited: &mut FnvHashSet<TransItem<'tcx>>) {
|
||||||
if !visited.insert(trans_item) {
|
if !visited.insert(trans_item) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
reference_map.with_inlining_candidates(trans_item, |target| {
|
inlining_map.with_inlining_candidates(trans_item, |target| {
|
||||||
follow_inlining(target, reference_map, visited);
|
follow_inlining(target, inlining_map, visited);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn place_declarations<'tcx>(codegen_units: PostInliningPartitioning<'tcx>,
|
|
||||||
reference_map: &ReferenceMap<'tcx>)
|
|
||||||
-> PostDeclarationsPartitioning<'tcx> {
|
|
||||||
let PostInliningPartitioning(mut codegen_units) = codegen_units;
|
|
||||||
|
|
||||||
for codegen_unit in codegen_units.iter_mut() {
|
|
||||||
let mut declarations = FnvHashSet();
|
|
||||||
|
|
||||||
for (trans_item, _) in &codegen_unit.items {
|
|
||||||
for referenced_item in reference_map.get_direct_references_from(*trans_item) {
|
|
||||||
if !codegen_unit.items.contains_key(referenced_item) {
|
|
||||||
declarations.insert(*referenced_item);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
codegen_unit.items
|
|
||||||
.extend(declarations.iter()
|
|
||||||
.map(|trans_item| (*trans_item,
|
|
||||||
InstantiationMode::Decl)));
|
|
||||||
}
|
|
||||||
|
|
||||||
PostDeclarationsPartitioning(codegen_units)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn characteristic_def_id_of_trans_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
fn characteristic_def_id_of_trans_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
trans_item: TransItem<'tcx>)
|
trans_item: TransItem<'tcx>)
|
||||||
-> Option<DefId> {
|
-> Option<DefId> {
|
||||||
|
|
|
@ -52,7 +52,7 @@ impl<'a, 'tcx> SymbolNamesTest<'a, 'tcx> {
|
||||||
for attr in tcx.get_attrs(def_id).iter() {
|
for attr in tcx.get_attrs(def_id).iter() {
|
||||||
if attr.check_name(SYMBOL_NAME) {
|
if attr.check_name(SYMBOL_NAME) {
|
||||||
// for now, can only use on monomorphic names
|
// for now, can only use on monomorphic names
|
||||||
let instance = Instance::mono(tcx, def_id);
|
let instance = Instance::mono(self.ccx.shared(), def_id);
|
||||||
let name = symbol_names::exported_name(self.ccx, &instance);
|
let name = symbol_names::exported_name(self.ccx, &instance);
|
||||||
tcx.sess.span_err(attr.span, &format!("symbol-name({})", name));
|
tcx.sess.span_err(attr.span, &format!("symbol-name({})", name));
|
||||||
} else if attr.check_name(ITEM_PATH) {
|
} else if attr.check_name(ITEM_PATH) {
|
||||||
|
|
384
src/librustc_trans/trans_item.rs
Normal file
384
src/librustc_trans/trans_item.rs
Normal file
|
@ -0,0 +1,384 @@
|
||||||
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
//! Walks the crate looking for items/impl-items/trait-items that have
|
||||||
|
//! either a `rustc_symbol_name` or `rustc_item_path` attribute and
|
||||||
|
//! generates an error giving, respectively, the symbol name or
|
||||||
|
//! item-path. This is used for unit testing the code that generates
|
||||||
|
//! paths etc in all kinds of annoying scenarios.
|
||||||
|
|
||||||
|
use base::llvm_linkage_by_name;
|
||||||
|
use glue::DropGlueKind;
|
||||||
|
use llvm;
|
||||||
|
use monomorphize::Instance;
|
||||||
|
use rustc::hir;
|
||||||
|
use rustc::hir::def_id::DefId;
|
||||||
|
use rustc::ty::{self, Ty, TyCtxt};
|
||||||
|
use rustc::ty::subst;
|
||||||
|
use std::hash::{Hash, Hasher};
|
||||||
|
use syntax::ast::{self, NodeId};
|
||||||
|
use syntax::attr;
|
||||||
|
use syntax::parse::token;
|
||||||
|
|
||||||
|
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
|
||||||
|
pub enum TransItem<'tcx> {
|
||||||
|
DropGlue(DropGlueKind<'tcx>),
|
||||||
|
Fn(Instance<'tcx>),
|
||||||
|
Static(NodeId)
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tcx> Hash for TransItem<'tcx> {
|
||||||
|
fn hash<H: Hasher>(&self, s: &mut H) {
|
||||||
|
match *self {
|
||||||
|
TransItem::DropGlue(t) => {
|
||||||
|
0u8.hash(s);
|
||||||
|
t.hash(s);
|
||||||
|
},
|
||||||
|
TransItem::Fn(instance) => {
|
||||||
|
1u8.hash(s);
|
||||||
|
instance.def.hash(s);
|
||||||
|
(instance.substs as *const _ as usize).hash(s);
|
||||||
|
}
|
||||||
|
TransItem::Static(node_id) => {
|
||||||
|
2u8.hash(s);
|
||||||
|
node_id.hash(s);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//=-----------------------------------------------------------------------------
|
||||||
|
// TransItem String Keys
|
||||||
|
//=-----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// The code below allows for producing a unique string key for a trans item.
|
||||||
|
// These keys are used by the handwritten auto-tests, so they need to be
|
||||||
|
// predictable and human-readable.
|
||||||
|
//
|
||||||
|
// Note: A lot of this could looks very similar to what's already in the
|
||||||
|
// ppaux module. It would be good to refactor things so we only have one
|
||||||
|
// parameterizable implementation for printing types.
|
||||||
|
|
||||||
|
/// Same as `unique_type_name()` but with the result pushed onto the given
|
||||||
|
/// `output` parameter.
|
||||||
|
pub fn push_unique_type_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
t: ty::Ty<'tcx>,
|
||||||
|
output: &mut String) {
|
||||||
|
match t.sty {
|
||||||
|
ty::TyBool => output.push_str("bool"),
|
||||||
|
ty::TyChar => output.push_str("char"),
|
||||||
|
ty::TyStr => output.push_str("str"),
|
||||||
|
ty::TyInt(ast::IntTy::Is) => output.push_str("isize"),
|
||||||
|
ty::TyInt(ast::IntTy::I8) => output.push_str("i8"),
|
||||||
|
ty::TyInt(ast::IntTy::I16) => output.push_str("i16"),
|
||||||
|
ty::TyInt(ast::IntTy::I32) => output.push_str("i32"),
|
||||||
|
ty::TyInt(ast::IntTy::I64) => output.push_str("i64"),
|
||||||
|
ty::TyUint(ast::UintTy::Us) => output.push_str("usize"),
|
||||||
|
ty::TyUint(ast::UintTy::U8) => output.push_str("u8"),
|
||||||
|
ty::TyUint(ast::UintTy::U16) => output.push_str("u16"),
|
||||||
|
ty::TyUint(ast::UintTy::U32) => output.push_str("u32"),
|
||||||
|
ty::TyUint(ast::UintTy::U64) => output.push_str("u64"),
|
||||||
|
ty::TyFloat(ast::FloatTy::F32) => output.push_str("f32"),
|
||||||
|
ty::TyFloat(ast::FloatTy::F64) => output.push_str("f64"),
|
||||||
|
ty::TyStruct(adt_def, substs) |
|
||||||
|
ty::TyEnum(adt_def, substs) => {
|
||||||
|
push_item_name(tcx, adt_def.did, output);
|
||||||
|
push_type_params(tcx, &substs.types, &[], output);
|
||||||
|
},
|
||||||
|
ty::TyTuple(component_types) => {
|
||||||
|
output.push('(');
|
||||||
|
for &component_type in component_types {
|
||||||
|
push_unique_type_name(tcx, component_type, output);
|
||||||
|
output.push_str(", ");
|
||||||
|
}
|
||||||
|
if !component_types.is_empty() {
|
||||||
|
output.pop();
|
||||||
|
output.pop();
|
||||||
|
}
|
||||||
|
output.push(')');
|
||||||
|
},
|
||||||
|
ty::TyBox(inner_type) => {
|
||||||
|
output.push_str("Box<");
|
||||||
|
push_unique_type_name(tcx, inner_type, output);
|
||||||
|
output.push('>');
|
||||||
|
},
|
||||||
|
ty::TyRawPtr(ty::TypeAndMut { ty: inner_type, mutbl } ) => {
|
||||||
|
output.push('*');
|
||||||
|
match mutbl {
|
||||||
|
hir::MutImmutable => output.push_str("const "),
|
||||||
|
hir::MutMutable => output.push_str("mut "),
|
||||||
|
}
|
||||||
|
|
||||||
|
push_unique_type_name(tcx, inner_type, output);
|
||||||
|
},
|
||||||
|
ty::TyRef(_, ty::TypeAndMut { ty: inner_type, mutbl }) => {
|
||||||
|
output.push('&');
|
||||||
|
if mutbl == hir::MutMutable {
|
||||||
|
output.push_str("mut ");
|
||||||
|
}
|
||||||
|
|
||||||
|
push_unique_type_name(tcx, inner_type, output);
|
||||||
|
},
|
||||||
|
ty::TyArray(inner_type, len) => {
|
||||||
|
output.push('[');
|
||||||
|
push_unique_type_name(tcx, inner_type, output);
|
||||||
|
output.push_str(&format!("; {}", len));
|
||||||
|
output.push(']');
|
||||||
|
},
|
||||||
|
ty::TySlice(inner_type) => {
|
||||||
|
output.push('[');
|
||||||
|
push_unique_type_name(tcx, inner_type, output);
|
||||||
|
output.push(']');
|
||||||
|
},
|
||||||
|
ty::TyTrait(ref trait_data) => {
|
||||||
|
push_item_name(tcx, trait_data.principal.skip_binder().def_id, output);
|
||||||
|
push_type_params(tcx,
|
||||||
|
&trait_data.principal.skip_binder().substs.types,
|
||||||
|
&trait_data.bounds.projection_bounds,
|
||||||
|
output);
|
||||||
|
},
|
||||||
|
ty::TyFnDef(_, _, &ty::BareFnTy{ unsafety, abi, ref sig } ) |
|
||||||
|
ty::TyFnPtr(&ty::BareFnTy{ unsafety, abi, ref sig } ) => {
|
||||||
|
if unsafety == hir::Unsafety::Unsafe {
|
||||||
|
output.push_str("unsafe ");
|
||||||
|
}
|
||||||
|
|
||||||
|
if abi != ::abi::Abi::Rust {
|
||||||
|
output.push_str("extern \"");
|
||||||
|
output.push_str(abi.name());
|
||||||
|
output.push_str("\" ");
|
||||||
|
}
|
||||||
|
|
||||||
|
output.push_str("fn(");
|
||||||
|
|
||||||
|
let sig = tcx.erase_late_bound_regions(sig);
|
||||||
|
if !sig.inputs.is_empty() {
|
||||||
|
for ¶meter_type in &sig.inputs {
|
||||||
|
push_unique_type_name(tcx, parameter_type, output);
|
||||||
|
output.push_str(", ");
|
||||||
|
}
|
||||||
|
output.pop();
|
||||||
|
output.pop();
|
||||||
|
}
|
||||||
|
|
||||||
|
if sig.variadic {
|
||||||
|
if !sig.inputs.is_empty() {
|
||||||
|
output.push_str(", ...");
|
||||||
|
} else {
|
||||||
|
output.push_str("...");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
output.push(')');
|
||||||
|
|
||||||
|
match sig.output {
|
||||||
|
ty::FnConverging(result_type) if result_type.is_nil() => {}
|
||||||
|
ty::FnConverging(result_type) => {
|
||||||
|
output.push_str(" -> ");
|
||||||
|
push_unique_type_name(tcx, result_type, output);
|
||||||
|
}
|
||||||
|
ty::FnDiverging => {
|
||||||
|
output.push_str(" -> !");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
ty::TyClosure(def_id, ref closure_substs) => {
|
||||||
|
push_item_name(tcx, def_id, output);
|
||||||
|
output.push_str("{");
|
||||||
|
output.push_str(&format!("{}:{}", def_id.krate, def_id.index.as_usize()));
|
||||||
|
output.push_str("}");
|
||||||
|
push_type_params(tcx, &closure_substs.func_substs.types, &[], output);
|
||||||
|
}
|
||||||
|
ty::TyError |
|
||||||
|
ty::TyInfer(_) |
|
||||||
|
ty::TyProjection(..) |
|
||||||
|
ty::TyParam(_) => {
|
||||||
|
bug!("debuginfo: Trying to create type name for \
|
||||||
|
unexpected type: {:?}", t);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn push_item_name(tcx: TyCtxt,
|
||||||
|
def_id: DefId,
|
||||||
|
output: &mut String) {
|
||||||
|
let def_path = tcx.def_path(def_id);
|
||||||
|
|
||||||
|
// some_crate::
|
||||||
|
output.push_str(&tcx.crate_name(def_path.krate));
|
||||||
|
output.push_str("::");
|
||||||
|
|
||||||
|
// foo::bar::ItemName::
|
||||||
|
for part in tcx.def_path(def_id).data {
|
||||||
|
output.push_str(&format!("{}[{}]::",
|
||||||
|
part.data.as_interned_str(),
|
||||||
|
part.disambiguator));
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove final "::"
|
||||||
|
output.pop();
|
||||||
|
output.pop();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn push_type_params<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
types: &'tcx subst::VecPerParamSpace<Ty<'tcx>>,
|
||||||
|
projections: &[ty::PolyProjectionPredicate<'tcx>],
|
||||||
|
output: &mut String) {
|
||||||
|
if types.is_empty() && projections.is_empty() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
output.push('<');
|
||||||
|
|
||||||
|
for &type_parameter in types {
|
||||||
|
push_unique_type_name(tcx, type_parameter, output);
|
||||||
|
output.push_str(", ");
|
||||||
|
}
|
||||||
|
|
||||||
|
for projection in projections {
|
||||||
|
let projection = projection.skip_binder();
|
||||||
|
let name = token::get_ident_interner().get(projection.projection_ty.item_name);
|
||||||
|
output.push_str(&name[..]);
|
||||||
|
output.push_str("=");
|
||||||
|
push_unique_type_name(tcx, projection.ty, output);
|
||||||
|
output.push_str(", ");
|
||||||
|
}
|
||||||
|
|
||||||
|
output.pop();
|
||||||
|
output.pop();
|
||||||
|
|
||||||
|
output.push('>');
|
||||||
|
}
|
||||||
|
|
||||||
|
fn push_instance_as_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
instance: Instance<'tcx>,
|
||||||
|
output: &mut String) {
|
||||||
|
push_item_name(tcx, instance.def, output);
|
||||||
|
push_type_params(tcx, &instance.substs.types, &[], output);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn def_id_to_string(tcx: TyCtxt, def_id: DefId) -> String {
|
||||||
|
let mut output = String::new();
|
||||||
|
push_item_name(tcx, def_id, &mut output);
|
||||||
|
output
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn type_to_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
ty: ty::Ty<'tcx>)
|
||||||
|
-> String {
|
||||||
|
let mut output = String::new();
|
||||||
|
push_unique_type_name(tcx, ty, &mut output);
|
||||||
|
output
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tcx> TransItem<'tcx> {
|
||||||
|
|
||||||
|
pub fn requests_inline<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool {
|
||||||
|
match *self {
|
||||||
|
TransItem::Fn(ref instance) => {
|
||||||
|
let attributes = tcx.get_attrs(instance.def);
|
||||||
|
attr::requests_inline(&attributes[..])
|
||||||
|
}
|
||||||
|
TransItem::DropGlue(..) => true,
|
||||||
|
TransItem::Static(..) => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_from_extern_crate(&self) -> bool {
|
||||||
|
match *self {
|
||||||
|
TransItem::Fn(ref instance) => !instance.def.is_local(),
|
||||||
|
TransItem::DropGlue(..) |
|
||||||
|
TransItem::Static(..) => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_lazily_instantiated(&self) -> bool {
|
||||||
|
match *self {
|
||||||
|
TransItem::Fn(ref instance) => !instance.substs.types.is_empty(),
|
||||||
|
TransItem::DropGlue(..) => true,
|
||||||
|
TransItem::Static(..) => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn explicit_linkage<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<llvm::Linkage> {
|
||||||
|
let def_id = match *self {
|
||||||
|
TransItem::Fn(ref instance) => instance.def,
|
||||||
|
TransItem::Static(node_id) => tcx.map.local_def_id(node_id),
|
||||||
|
TransItem::DropGlue(..) => return None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let attributes = tcx.get_attrs(def_id);
|
||||||
|
if let Some(name) = attr::first_attr_value_str_by_name(&attributes, "linkage") {
|
||||||
|
if let Some(linkage) = llvm_linkage_by_name(&name) {
|
||||||
|
Some(linkage)
|
||||||
|
} else {
|
||||||
|
let span = tcx.map.span_if_local(def_id);
|
||||||
|
if let Some(span) = span {
|
||||||
|
tcx.sess.span_fatal(span, "invalid linkage specified")
|
||||||
|
} else {
|
||||||
|
tcx.sess.fatal(&format!("invalid linkage specified: {}", name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_string<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> String {
|
||||||
|
let hir_map = &tcx.map;
|
||||||
|
|
||||||
|
return match *self {
|
||||||
|
TransItem::DropGlue(dg) => {
|
||||||
|
let mut s = String::with_capacity(32);
|
||||||
|
match dg {
|
||||||
|
DropGlueKind::Ty(_) => s.push_str("drop-glue "),
|
||||||
|
DropGlueKind::TyContents(_) => s.push_str("drop-glue-contents "),
|
||||||
|
};
|
||||||
|
push_unique_type_name(tcx, dg.ty(), &mut s);
|
||||||
|
s
|
||||||
|
}
|
||||||
|
TransItem::Fn(instance) => {
|
||||||
|
to_string_internal(tcx, "fn ", instance)
|
||||||
|
},
|
||||||
|
TransItem::Static(node_id) => {
|
||||||
|
let def_id = hir_map.local_def_id(node_id);
|
||||||
|
let empty_substs = tcx.mk_substs(subst::Substs::empty());
|
||||||
|
let instance = Instance::new(def_id, empty_substs);
|
||||||
|
to_string_internal(tcx, "static ", instance)
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
fn to_string_internal<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
|
prefix: &str,
|
||||||
|
instance: Instance<'tcx>)
|
||||||
|
-> String {
|
||||||
|
let mut result = String::with_capacity(32);
|
||||||
|
result.push_str(prefix);
|
||||||
|
push_instance_as_string(tcx, instance, &mut result);
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_raw_string(&self) -> String {
|
||||||
|
match *self {
|
||||||
|
TransItem::DropGlue(dg) => {
|
||||||
|
format!("DropGlue({})", dg.ty() as *const _ as usize)
|
||||||
|
}
|
||||||
|
TransItem::Fn(instance) => {
|
||||||
|
format!("Fn({:?}, {})",
|
||||||
|
instance.def,
|
||||||
|
instance.substs as *const _ as usize)
|
||||||
|
}
|
||||||
|
TransItem::Static(id) => {
|
||||||
|
format!("Static({:?})", id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1153,7 +1153,8 @@ fn check_const<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
e: &'tcx hir::Expr,
|
e: &'tcx hir::Expr,
|
||||||
id: ast::NodeId) {
|
id: ast::NodeId) {
|
||||||
ccx.inherited(None).enter(|inh| {
|
let param_env = ParameterEnvironment::for_item(ccx.tcx, id);
|
||||||
|
ccx.inherited(Some(param_env)).enter(|inh| {
|
||||||
let rty = ccx.tcx.node_id_to_type(id);
|
let rty = ccx.tcx.node_id_to_type(id);
|
||||||
let fcx = FnCtxt::new(&inh, ty::FnConverging(rty), e.id);
|
let fcx = FnCtxt::new(&inh, ty::FnConverging(rty), e.id);
|
||||||
let declty = fcx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(id)).ty;
|
let declty = fcx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(id)).ty;
|
||||||
|
|
|
@ -15,7 +15,6 @@ use sys_common::backtrace::{output, output_fileline};
|
||||||
|
|
||||||
pub fn print(w: &mut Write, idx: isize, addr: *mut libc::c_void,
|
pub fn print(w: &mut Write, idx: isize, addr: *mut libc::c_void,
|
||||||
symaddr: *mut libc::c_void) -> io::Result<()> {
|
symaddr: *mut libc::c_void) -> io::Result<()> {
|
||||||
use env;
|
|
||||||
use ffi::CStr;
|
use ffi::CStr;
|
||||||
use ptr;
|
use ptr;
|
||||||
|
|
||||||
|
@ -110,46 +109,22 @@ pub fn print(w: &mut Write, idx: isize, addr: *mut libc::c_void,
|
||||||
// that is calculated the first time this is requested. Remember that
|
// that is calculated the first time this is requested. Remember that
|
||||||
// backtracing all happens serially (one global lock).
|
// backtracing all happens serially (one global lock).
|
||||||
//
|
//
|
||||||
// An additionally oddity in this function is that we initialize the
|
// Things don't work so well on not-Linux since libbacktrace can't track
|
||||||
// filename via self_exe_name() to pass to libbacktrace. It turns out
|
// down that executable this is. We at one point used env::current_exe but
|
||||||
// that on Linux libbacktrace seamlessly gets the filename of the
|
// it turns out that there are some serious security issues with that
|
||||||
// current executable, but this fails on freebsd. by always providing
|
// approach.
|
||||||
// it, we make sure that libbacktrace never has a reason to not look up
|
|
||||||
// the symbols. The libbacktrace API also states that the filename must
|
|
||||||
// be in "permanent memory", so we copy it to a static and then use the
|
|
||||||
// static as the pointer.
|
|
||||||
//
|
//
|
||||||
// FIXME: We also call self_exe_name() on DragonFly BSD. I haven't
|
// Specifically, on certain platforms like BSDs, a malicious actor can cause
|
||||||
// tested if this is required or not.
|
// an arbitrary file to be placed at the path returned by current_exe.
|
||||||
|
// libbacktrace does not behave defensively in the presence of ill-formed
|
||||||
|
// DWARF information, and has been demonstrated to segfault in at least one
|
||||||
|
// case. There is no evidence at the moment to suggest that a more carefully
|
||||||
|
// constructed file can't cause arbitrary code execution. As a result of all
|
||||||
|
// of this, we don't hint libbacktrace with the path to the current process.
|
||||||
unsafe fn init_state() -> *mut backtrace_state {
|
unsafe fn init_state() -> *mut backtrace_state {
|
||||||
static mut STATE: *mut backtrace_state = ptr::null_mut();
|
static mut STATE: *mut backtrace_state = ptr::null_mut();
|
||||||
static mut LAST_FILENAME: [libc::c_char; 256] = [0; 256];
|
|
||||||
if !STATE.is_null() { return STATE }
|
if !STATE.is_null() { return STATE }
|
||||||
let selfname = if cfg!(target_os = "freebsd") ||
|
STATE = backtrace_create_state(ptr::null(), 0, error_cb,
|
||||||
cfg!(target_os = "dragonfly") ||
|
|
||||||
cfg!(target_os = "bitrig") ||
|
|
||||||
cfg!(target_os = "openbsd") ||
|
|
||||||
cfg!(target_os = "windows") {
|
|
||||||
env::current_exe().ok()
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
let filename = match selfname.as_ref().and_then(|s| s.to_str()) {
|
|
||||||
Some(path) => {
|
|
||||||
let bytes = path.as_bytes();
|
|
||||||
if bytes.len() < LAST_FILENAME.len() {
|
|
||||||
let i = bytes.iter();
|
|
||||||
for (slot, val) in LAST_FILENAME.iter_mut().zip(i) {
|
|
||||||
*slot = *val as libc::c_char;
|
|
||||||
}
|
|
||||||
LAST_FILENAME.as_ptr()
|
|
||||||
} else {
|
|
||||||
ptr::null()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => ptr::null(),
|
|
||||||
};
|
|
||||||
STATE = backtrace_create_state(filename, 0, error_cb,
|
|
||||||
ptr::null_mut());
|
ptr::null_mut());
|
||||||
STATE
|
STATE
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,41 @@
|
||||||
|
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
// ignore-tidy-linelength
|
||||||
|
// compile-flags:-Zprint-trans-items=eager
|
||||||
|
|
||||||
|
//~ TRANS_ITEM drop-glue drop_in_place_intrinsic::StructWithDtor[0]
|
||||||
|
//~ TRANS_ITEM drop-glue-contents drop_in_place_intrinsic::StructWithDtor[0]
|
||||||
|
struct StructWithDtor(u32);
|
||||||
|
|
||||||
|
impl Drop for StructWithDtor {
|
||||||
|
//~ TRANS_ITEM fn drop_in_place_intrinsic::{{impl}}[0]::drop[0]
|
||||||
|
fn drop(&mut self) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
//~ TRANS_ITEM fn drop_in_place_intrinsic::main[0]
|
||||||
|
fn main() {
|
||||||
|
|
||||||
|
//~ TRANS_ITEM drop-glue [drop_in_place_intrinsic::StructWithDtor[0]; 2]
|
||||||
|
let x = [StructWithDtor(0), StructWithDtor(1)];
|
||||||
|
|
||||||
|
drop_slice_in_place(&x);
|
||||||
|
}
|
||||||
|
|
||||||
|
//~ TRANS_ITEM fn drop_in_place_intrinsic::drop_slice_in_place[0]
|
||||||
|
fn drop_slice_in_place(x: &[StructWithDtor]) {
|
||||||
|
unsafe {
|
||||||
|
// This is the interesting thing in this test case: Normally we would
|
||||||
|
// not have drop-glue for the unsized [StructWithDtor]. This has to be
|
||||||
|
// generated though when the drop_in_place() intrinsic is used.
|
||||||
|
//~ TRANS_ITEM drop-glue [drop_in_place_intrinsic::StructWithDtor[0]]
|
||||||
|
::std::ptr::drop_in_place(x as *const _ as *mut [StructWithDtor]);
|
||||||
|
}
|
||||||
|
}
|
23
src/test/codegen-units/item-collection/static-init.rs
Normal file
23
src/test/codegen-units/item-collection/static-init.rs
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
// compile-flags:-Zprint-trans-items=eager
|
||||||
|
|
||||||
|
pub static FN : fn() = foo::<i32>;
|
||||||
|
|
||||||
|
pub fn foo<T>() { }
|
||||||
|
|
||||||
|
//~ TRANS_ITEM fn static_init::foo[0]<i32>
|
||||||
|
//~ TRANS_ITEM static static_init::FN[0]
|
||||||
|
|
||||||
|
fn main() { }
|
||||||
|
|
||||||
|
//~ TRANS_ITEM fn static_init::main[0]
|
||||||
|
//~ TRANS_ITEM drop-glue i8
|
|
@ -23,7 +23,7 @@ struct Struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for Struct {
|
impl Drop for Struct {
|
||||||
//~ TRANS_ITEM fn local_drop_glue::{{impl}}[0]::drop[0] @@ local_drop_glue[WeakODR] local_drop_glue-mod1[Declaration]
|
//~ TRANS_ITEM fn local_drop_glue::{{impl}}[0]::drop[0] @@ local_drop_glue[WeakODR]
|
||||||
fn drop(&mut self) {}
|
fn drop(&mut self) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,10 +19,10 @@
|
||||||
// Used in different modules/codegen units but always instantiated in the same
|
// Used in different modules/codegen units but always instantiated in the same
|
||||||
// codegen unit.
|
// codegen unit.
|
||||||
|
|
||||||
//~ TRANS_ITEM fn local_generic::generic[0]<u32> @@ local_generic.volatile[WeakODR] local_generic[Declaration]
|
//~ TRANS_ITEM fn local_generic::generic[0]<u32> @@ local_generic.volatile[WeakODR]
|
||||||
//~ TRANS_ITEM fn local_generic::generic[0]<u64> @@ local_generic.volatile[WeakODR] local_generic-mod1[Declaration]
|
//~ TRANS_ITEM fn local_generic::generic[0]<u64> @@ local_generic.volatile[WeakODR]
|
||||||
//~ TRANS_ITEM fn local_generic::generic[0]<char> @@ local_generic.volatile[WeakODR] local_generic-mod1-mod1[Declaration]
|
//~ TRANS_ITEM fn local_generic::generic[0]<char> @@ local_generic.volatile[WeakODR]
|
||||||
//~ TRANS_ITEM fn local_generic::generic[0]<&str> @@ local_generic.volatile[WeakODR] local_generic-mod2[Declaration]
|
//~ TRANS_ITEM fn local_generic::generic[0]<&str> @@ local_generic.volatile[WeakODR]
|
||||||
pub fn generic<T>(x: T) -> T { x }
|
pub fn generic<T>(x: T) -> T { x }
|
||||||
|
|
||||||
//~ TRANS_ITEM fn local_generic::user[0] @@ local_generic[WeakODR]
|
//~ TRANS_ITEM fn local_generic::user[0] @@ local_generic[WeakODR]
|
||||||
|
|
|
@ -61,19 +61,19 @@ mod type2 {
|
||||||
//~ TRANS_ITEM fn methods_are_with_self_type::main[0]
|
//~ TRANS_ITEM fn methods_are_with_self_type::main[0]
|
||||||
fn main()
|
fn main()
|
||||||
{
|
{
|
||||||
//~ TRANS_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[1]::method[0]<u32, u64> @@ methods_are_with_self_type.volatile[WeakODR] methods_are_with_self_type[Declaration]
|
//~ TRANS_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[1]::method[0]<u32, u64> @@ methods_are_with_self_type.volatile[WeakODR]
|
||||||
SomeGenericType(0u32, 0u64).method();
|
SomeGenericType(0u32, 0u64).method();
|
||||||
//~ TRANS_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[1]::associated_fn[0]<char, &str> @@ methods_are_with_self_type.volatile[WeakODR] methods_are_with_self_type[Declaration]
|
//~ TRANS_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[1]::associated_fn[0]<char, &str> @@ methods_are_with_self_type.volatile[WeakODR]
|
||||||
SomeGenericType::associated_fn('c', "&str");
|
SomeGenericType::associated_fn('c', "&str");
|
||||||
|
|
||||||
//~ TRANS_ITEM fn methods_are_with_self_type::{{impl}}[0]::foo[0]<methods_are_with_self_type::type1[0]::Struct[0]> @@ methods_are_with_self_type-type1.volatile[WeakODR] methods_are_with_self_type[Declaration]
|
//~ TRANS_ITEM fn methods_are_with_self_type::{{impl}}[0]::foo[0]<methods_are_with_self_type::type1[0]::Struct[0]> @@ methods_are_with_self_type-type1.volatile[WeakODR]
|
||||||
type1::Struct.foo();
|
type1::Struct.foo();
|
||||||
//~ TRANS_ITEM fn methods_are_with_self_type::{{impl}}[0]::foo[0]<methods_are_with_self_type::type2[0]::Struct[0]> @@ methods_are_with_self_type-type2.volatile[WeakODR] methods_are_with_self_type[Declaration]
|
//~ TRANS_ITEM fn methods_are_with_self_type::{{impl}}[0]::foo[0]<methods_are_with_self_type::type2[0]::Struct[0]> @@ methods_are_with_self_type-type2.volatile[WeakODR]
|
||||||
type2::Struct.foo();
|
type2::Struct.foo();
|
||||||
|
|
||||||
//~ TRANS_ITEM fn methods_are_with_self_type::Trait[0]::default[0]<methods_are_with_self_type::type1[0]::Struct[0]> @@ methods_are_with_self_type-type1.volatile[WeakODR] methods_are_with_self_type[Declaration]
|
//~ TRANS_ITEM fn methods_are_with_self_type::Trait[0]::default[0]<methods_are_with_self_type::type1[0]::Struct[0]> @@ methods_are_with_self_type-type1.volatile[WeakODR]
|
||||||
type1::Struct.default();
|
type1::Struct.default();
|
||||||
//~ TRANS_ITEM fn methods_are_with_self_type::Trait[0]::default[0]<methods_are_with_self_type::type2[0]::Struct[0]> @@ methods_are_with_self_type-type2.volatile[WeakODR] methods_are_with_self_type[Declaration]
|
//~ TRANS_ITEM fn methods_are_with_self_type::Trait[0]::default[0]<methods_are_with_self_type::type2[0]::Struct[0]> @@ methods_are_with_self_type-type2.volatile[WeakODR]
|
||||||
type2::Struct.default();
|
type2::Struct.default();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
21
src/test/run-pass/associated-const-outer-ty-refs.rs
Normal file
21
src/test/run-pass/associated-const-outer-ty-refs.rs
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
#![feature(associated_consts)]
|
||||||
|
|
||||||
|
trait Lattice {
|
||||||
|
const BOTTOM: Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME(#33573): this should work without the 'static lifetime bound.
|
||||||
|
impl<T: 'static> Lattice for Option<T> {
|
||||||
|
const BOTTOM: Option<T> = None;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main(){}
|
|
@ -32,11 +32,15 @@ macro_rules! dump_and_die {
|
||||||
($($pos:expr),*) => ({
|
($($pos:expr),*) => ({
|
||||||
// FIXME(#18285): we cannot include the current position because
|
// FIXME(#18285): we cannot include the current position because
|
||||||
// the macro span takes over the last frame's file/line.
|
// the macro span takes over the last frame's file/line.
|
||||||
if cfg!(target_os = "macos") ||
|
if cfg!(any(target_os = "macos",
|
||||||
cfg!(target_os = "ios") ||
|
target_os = "ios",
|
||||||
cfg!(target_os = "android") ||
|
target_os = "android",
|
||||||
cfg!(all(target_os = "linux", target_arch = "arm")) ||
|
all(target_os = "linux", target_arch = "arm"),
|
||||||
cfg!(all(windows, target_env = "gnu")) {
|
target_os = "windows",
|
||||||
|
target_os = "freebsd",
|
||||||
|
target_os = "dragonfly",
|
||||||
|
target_os = "bitrig",
|
||||||
|
target_os = "openbsd")) {
|
||||||
// skip these platforms as this support isn't implemented yet.
|
// skip these platforms as this support isn't implemented yet.
|
||||||
} else {
|
} else {
|
||||||
dump_filelines(&[$($pos),*]);
|
dump_filelines(&[$($pos),*]);
|
||||||
|
|
|
@ -115,7 +115,7 @@ fn runtest(me: &str) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
if cfg!(windows) && cfg!(target_arch = "x86") && cfg!(target_env = "gnu") {
|
if cfg!(windows) && cfg!(target_env = "gnu") {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -35,7 +35,7 @@ pub fn check(path: &Path, bad: &mut bool) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
let metadata = t!(fs::metadata(&file), &file);
|
let metadata = t!(fs::symlink_metadata(&file), &file);
|
||||||
if metadata.mode() & 0o111 != 0 {
|
if metadata.mode() & 0o111 != 0 {
|
||||||
println!("binary checked into source: {}", file.display());
|
println!("binary checked into source: {}", file.display());
|
||||||
*bad = true;
|
*bad = true;
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue