2017-05-31 17:41:33 -07:00
|
|
|
use std::collections::{HashMap, HashSet};
|
2017-02-07 00:39:40 -08:00
|
|
|
use std::fmt::Write;
|
2016-12-07 20:30:37 -08:00
|
|
|
|
2016-04-14 00:01:00 +02:00
|
|
|
use rustc::hir::def_id::DefId;
|
2016-09-28 18:22:53 +02:00
|
|
|
use rustc::hir::map::definitions::DefPathData;
|
2016-12-07 20:30:37 -08:00
|
|
|
use rustc::middle::const_val::ConstVal;
|
2017-09-03 22:39:03 -04:00
|
|
|
use rustc::middle::region;
|
2016-11-03 10:38:08 +01:00
|
|
|
use rustc::mir;
|
2016-08-27 01:44:46 -06:00
|
|
|
use rustc::traits::Reveal;
|
2017-08-08 15:53:07 +02:00
|
|
|
use rustc::ty::layout::{self, Layout, Size, Align, HasDataLayout};
|
2017-03-21 13:53:55 +01:00
|
|
|
use rustc::ty::subst::{Subst, Substs, Kind};
|
2017-08-29 11:32:10 +02:00
|
|
|
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
|
2016-06-11 12:38:28 -06:00
|
|
|
use rustc_data_structures::indexed_vec::Idx;
|
2017-08-26 13:48:59 -04:00
|
|
|
use syntax::codemap::{self, DUMMY_SP};
|
|
|
|
use syntax::ast::Mutability;
|
2017-03-22 13:13:52 +01:00
|
|
|
use syntax::abi::Abi;
|
2015-11-12 15:50:58 -06:00
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
use super::{EvalError, EvalResult, EvalErrorKind, GlobalId, Lvalue, LvalueExtra, Memory,
|
|
|
|
MemoryPointer, HasMemory, MemoryKind, operator, PrimVal, PrimValKind, Value, Pointer,
|
|
|
|
ValidationQuery, Machine};
|
2016-06-01 17:05:20 +02:00
|
|
|
|
2017-07-21 17:25:30 +02:00
|
|
|
pub struct EvalContext<'a, 'tcx: 'a, M: Machine<'tcx>> {
|
|
|
|
/// Stores data required by the `Machine`
|
|
|
|
pub machine_data: M::Data,
|
|
|
|
|
2016-03-14 21:18:39 -06:00
|
|
|
/// The results of the type checker, from rustc.
|
2017-07-21 17:25:30 +02:00
|
|
|
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
2016-03-14 21:18:39 -06:00
|
|
|
|
|
|
|
/// The virtual memory system.
|
2017-07-21 17:25:30 +02:00
|
|
|
pub memory: Memory<'a, 'tcx, M>,
|
2016-03-14 21:18:39 -06:00
|
|
|
|
2017-07-19 19:45:25 -07:00
|
|
|
/// Lvalues that were suspended by the validation subsystem, and will be recovered later
|
|
|
|
pub(crate) suspended: HashMap<DynamicLifetime, Vec<ValidationQuery<'tcx>>>,
|
|
|
|
|
2016-06-14 20:13:59 -06:00
|
|
|
/// Precomputed statics, constants and promoteds.
|
2017-08-08 15:53:07 +02:00
|
|
|
pub globals: HashMap<GlobalId<'tcx>, PtrAndAlign>,
|
2016-05-09 18:21:21 -06:00
|
|
|
|
|
|
|
/// The virtual call stack.
|
2017-02-10 13:35:45 -08:00
|
|
|
pub(crate) stack: Vec<Frame<'tcx>>,
|
2016-07-05 13:23:58 +02:00
|
|
|
|
|
|
|
/// The maximum number of stack frames allowed
|
2017-02-10 13:35:45 -08:00
|
|
|
pub(crate) stack_limit: usize,
|
2016-11-17 17:22:34 +01:00
|
|
|
|
|
|
|
/// The maximum number of operations that may be executed.
|
|
|
|
/// This prevents infinite loops and huge computations from freezing up const eval.
|
|
|
|
/// Remove once halting problem is solved.
|
2017-02-10 13:35:45 -08:00
|
|
|
pub(crate) steps_remaining: u64,
|
2016-05-09 18:21:21 -06:00
|
|
|
}
|
|
|
|
|
2016-03-06 04:23:24 -06:00
|
|
|
/// A stack frame.
|
2016-11-03 10:38:08 +01:00
|
|
|
pub struct Frame<'tcx> {
|
2016-06-14 20:13:59 -06:00
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
// Function and callsite information
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
/// The MIR for the function called on this frame.
|
2017-05-04 17:42:43 +02:00
|
|
|
pub mir: &'tcx mir::Mir<'tcx>,
|
2016-06-14 20:13:59 -06:00
|
|
|
|
2017-03-21 13:53:55 +01:00
|
|
|
/// The def_id and substs of the current function
|
|
|
|
pub instance: ty::Instance<'tcx>,
|
2016-06-02 18:21:32 +02:00
|
|
|
|
2016-06-14 20:13:59 -06:00
|
|
|
/// The span of the call site.
|
|
|
|
pub span: codemap::Span,
|
2016-03-07 07:10:52 -06:00
|
|
|
|
2016-06-14 20:13:59 -06:00
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
2016-10-15 19:48:30 -06:00
|
|
|
// Return lvalue and locals
|
2016-06-14 20:13:59 -06:00
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
2016-07-06 17:55:05 +02:00
|
|
|
/// The block to return to when returning from the current stack frame
|
2016-09-09 17:44:04 +02:00
|
|
|
pub return_to_block: StackPopCleanup,
|
2016-07-06 17:55:05 +02:00
|
|
|
|
2016-10-15 19:48:30 -06:00
|
|
|
/// The location where the result of the current stack frame should be written to.
|
2017-08-08 14:22:11 +02:00
|
|
|
pub return_lvalue: Lvalue,
|
2016-10-15 19:48:30 -06:00
|
|
|
|
|
|
|
/// The list of locals for this stack frame, stored in order as
|
2017-05-31 17:41:33 -07:00
|
|
|
/// `[arguments..., variables..., temporaries...]`. The locals are stored as `Option<Value>`s.
|
|
|
|
/// `None` represents a local that is currently dead, while a live local
|
2016-10-15 19:48:30 -06:00
|
|
|
/// can either directly contain `PrimVal` or refer to some part of an `Allocation`.
|
2016-10-15 23:31:42 -06:00
|
|
|
///
|
2017-05-31 17:41:33 -07:00
|
|
|
/// Before being initialized, arguments are `Value::ByVal(PrimVal::Undef)` and other locals are `None`.
|
|
|
|
pub locals: Vec<Option<Value>>,
|
2016-02-27 19:20:25 -06:00
|
|
|
|
2016-06-14 20:13:59 -06:00
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
// Current position within the function
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
/// The block that is currently executed (or will be executed after the above call stacks
|
|
|
|
/// return).
|
|
|
|
pub block: mir::BasicBlock,
|
|
|
|
|
|
|
|
/// The index of the currently evaluated statment.
|
2016-06-10 13:01:51 +02:00
|
|
|
pub stmt: usize,
|
2016-03-06 04:23:24 -06:00
|
|
|
}
|
2016-02-27 19:20:25 -06:00
|
|
|
|
2016-09-09 17:44:04 +02:00
|
|
|
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
|
|
|
|
pub enum StackPopCleanup {
|
2016-10-21 11:48:56 +02:00
|
|
|
/// The stackframe existed to compute the initial value of a static/constant, make sure it
|
2017-02-07 19:20:16 +01:00
|
|
|
/// isn't modifyable afterwards in case of constants.
|
|
|
|
/// In case of `static mut`, mark the memory to ensure it's never marked as immutable through
|
|
|
|
/// references or deallocated
|
2017-07-13 17:25:17 +02:00
|
|
|
MarkStatic(Mutability),
|
2016-09-09 17:44:04 +02:00
|
|
|
/// A regular stackframe added due to a function call will need to get forwarded to the next
|
|
|
|
/// block
|
|
|
|
Goto(mir::BasicBlock),
|
|
|
|
/// The main function and diverging functions have nowhere to return to
|
|
|
|
None,
|
|
|
|
}
|
|
|
|
|
2017-07-19 19:45:25 -07:00
|
|
|
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
|
|
|
pub struct DynamicLifetime {
|
|
|
|
pub frame: usize,
|
2017-09-03 22:39:03 -04:00
|
|
|
pub region: Option<region::Scope>, // "None" indicates "until the function ends"
|
2017-07-19 19:45:25 -07:00
|
|
|
}
|
|
|
|
|
2016-11-26 17:54:19 -08:00
|
|
|
#[derive(Copy, Clone, Debug)]
|
|
|
|
pub struct ResourceLimits {
|
|
|
|
pub memory_size: u64,
|
|
|
|
pub step_limit: u64,
|
|
|
|
pub stack_limit: usize,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Default for ResourceLimits {
|
|
|
|
fn default() -> Self {
|
|
|
|
ResourceLimits {
|
|
|
|
memory_size: 100 * 1024 * 1024, // 100 MB
|
|
|
|
step_limit: 1_000_000,
|
|
|
|
stack_limit: 100,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-28 19:43:05 -07:00
|
|
|
#[derive(Copy, Clone, Debug)]
|
|
|
|
pub struct TyAndPacked<'tcx> {
|
|
|
|
pub ty: Ty<'tcx>,
|
|
|
|
pub packed: bool,
|
|
|
|
}
|
|
|
|
|
2017-08-24 14:41:49 +02:00
|
|
|
#[derive(Copy, Clone, Debug)]
|
|
|
|
pub struct ValTy<'tcx> {
|
|
|
|
pub value: Value,
|
|
|
|
pub ty: Ty<'tcx>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'tcx> ::std::ops::Deref for ValTy<'tcx> {
|
|
|
|
type Target = Value;
|
|
|
|
fn deref(&self) -> &Value {
|
|
|
|
&self.value
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-08 15:53:07 +02:00
|
|
|
#[derive(Copy, Clone, Debug)]
|
|
|
|
pub struct PtrAndAlign {
|
|
|
|
pub ptr: Pointer,
|
|
|
|
/// Remember whether this lvalue is *supposed* to be aligned.
|
|
|
|
pub aligned: bool,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl PtrAndAlign {
|
|
|
|
pub fn to_ptr<'tcx>(self) -> EvalResult<'tcx, MemoryPointer> {
|
|
|
|
self.ptr.to_ptr()
|
|
|
|
}
|
|
|
|
pub fn offset<'tcx, C: HasDataLayout>(self, i: u64, cx: C) -> EvalResult<'tcx, Self> {
|
|
|
|
Ok(PtrAndAlign {
|
|
|
|
ptr: self.ptr.offset(i, cx)?,
|
|
|
|
aligned: self.aligned,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-21 17:25:30 +02:00
|
|
|
impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
|
|
|
|
pub fn new(
|
|
|
|
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
|
|
limits: ResourceLimits,
|
|
|
|
machine_data: M::Data,
|
|
|
|
memory_data: M::MemoryData,
|
|
|
|
) -> Self {
|
2016-06-10 16:20:17 +02:00
|
|
|
EvalContext {
|
2017-07-21 17:25:30 +02:00
|
|
|
machine_data,
|
2017-01-16 18:45:30 -08:00
|
|
|
tcx,
|
2017-07-21 17:25:30 +02:00
|
|
|
memory: Memory::new(&tcx.data_layout, limits.memory_size, memory_data),
|
2017-07-19 19:45:25 -07:00
|
|
|
suspended: HashMap::new(),
|
2016-10-21 11:39:39 +02:00
|
|
|
globals: HashMap::new(),
|
2016-06-09 16:01:53 +02:00
|
|
|
stack: Vec::new(),
|
2016-11-26 17:54:19 -08:00
|
|
|
stack_limit: limits.stack_limit,
|
|
|
|
steps_remaining: limits.step_limit,
|
2016-03-06 04:23:24 -06:00
|
|
|
}
|
|
|
|
}
|
2016-06-01 14:33:37 +02:00
|
|
|
|
2017-07-04 13:16:29 +02:00
|
|
|
pub fn alloc_ptr(&mut self, ty: Ty<'tcx>) -> EvalResult<'tcx, MemoryPointer> {
|
2016-10-16 00:12:11 -06:00
|
|
|
let substs = self.substs();
|
|
|
|
self.alloc_ptr_with_substs(ty, substs)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn alloc_ptr_with_substs(
|
2016-10-14 03:31:45 -06:00
|
|
|
&mut self,
|
|
|
|
ty: Ty<'tcx>,
|
2017-08-10 08:48:38 -07:00
|
|
|
substs: &'tcx Substs<'tcx>,
|
2017-07-04 13:16:29 +02:00
|
|
|
) -> EvalResult<'tcx, MemoryPointer> {
|
2017-08-10 08:48:38 -07:00
|
|
|
let size = self.type_size_with_substs(ty, substs)?.expect(
|
|
|
|
"cannot alloc memory for unsized type",
|
|
|
|
);
|
2016-11-17 17:23:40 +01:00
|
|
|
let align = self.type_align_with_substs(ty, substs)?;
|
2017-07-20 16:40:57 +02:00
|
|
|
self.memory.allocate(size, align, MemoryKind::Stack)
|
2016-06-08 09:38:59 +02:00
|
|
|
}
|
2016-06-08 11:11:08 +02:00
|
|
|
|
2017-07-21 17:25:30 +02:00
|
|
|
pub fn memory(&self) -> &Memory<'a, 'tcx, M> {
|
2016-06-10 13:01:51 +02:00
|
|
|
&self.memory
|
|
|
|
}
|
|
|
|
|
2017-07-21 17:25:30 +02:00
|
|
|
pub fn memory_mut(&mut self) -> &mut Memory<'a, 'tcx, M> {
|
2016-06-15 12:55:04 +02:00
|
|
|
&mut self.memory
|
|
|
|
}
|
|
|
|
|
2016-11-03 10:38:08 +01:00
|
|
|
pub fn stack(&self) -> &[Frame<'tcx>] {
|
2016-06-10 13:01:51 +02:00
|
|
|
&self.stack
|
|
|
|
}
|
|
|
|
|
2017-06-16 17:58:18 -07:00
|
|
|
#[inline]
|
|
|
|
pub fn cur_frame(&self) -> usize {
|
|
|
|
assert!(self.stack.len() > 0);
|
|
|
|
self.stack.len() - 1
|
|
|
|
}
|
|
|
|
|
2017-07-28 13:08:27 +02:00
|
|
|
pub fn str_to_value(&mut self, s: &str) -> EvalResult<'tcx, Value> {
|
2017-02-10 13:35:33 -08:00
|
|
|
let ptr = self.memory.allocate_cached(s.as_bytes())?;
|
2017-08-10 08:48:38 -07:00
|
|
|
Ok(Value::ByValPair(
|
|
|
|
PrimVal::Ptr(ptr),
|
|
|
|
PrimVal::from_u128(s.len() as u128),
|
|
|
|
))
|
2016-09-23 10:38:30 +02:00
|
|
|
}
|
|
|
|
|
2017-03-27 10:13:21 +02:00
|
|
|
pub(super) fn const_to_value(&mut self, const_val: &ConstVal<'tcx>) -> EvalResult<'tcx, Value> {
|
2016-06-08 09:38:59 +02:00
|
|
|
use rustc::middle::const_val::ConstVal::*;
|
2016-09-19 02:19:31 -06:00
|
|
|
|
|
|
|
let primval = match *const_val {
|
2017-01-12 08:28:42 +01:00
|
|
|
Integral(const_int) => PrimVal::Bytes(const_int.to_u128_unchecked()),
|
2016-10-20 04:42:19 -06:00
|
|
|
|
2017-08-07 13:40:10 +02:00
|
|
|
Float(val) => PrimVal::Bytes(val.bits),
|
2016-10-20 04:42:19 -06:00
|
|
|
|
|
|
|
Bool(b) => PrimVal::from_bool(b),
|
|
|
|
Char(c) => PrimVal::from_char(c),
|
2016-09-23 10:38:30 +02:00
|
|
|
|
2016-09-26 17:49:30 +02:00
|
|
|
Str(ref s) => return self.str_to_value(s),
|
2016-09-19 02:19:31 -06:00
|
|
|
|
2016-06-08 09:38:59 +02:00
|
|
|
ByteStr(ref bs) => {
|
2017-09-13 13:46:54 +02:00
|
|
|
let ptr = self.memory.allocate_cached(bs.data)?;
|
2016-12-15 23:55:00 -08:00
|
|
|
PrimVal::Ptr(ptr)
|
2016-06-08 09:38:59 +02:00
|
|
|
}
|
2016-09-19 02:19:31 -06:00
|
|
|
|
2017-09-13 13:46:54 +02:00
|
|
|
Unevaluated(def_id, substs) => {
|
|
|
|
let instance = self.resolve_associated_const(def_id, substs);
|
|
|
|
let cid = GlobalId {
|
|
|
|
instance,
|
|
|
|
promoted: None,
|
|
|
|
};
|
|
|
|
return Ok(Value::ByRef(*self.globals.get(&cid).expect("static/const not cached")));
|
|
|
|
}
|
|
|
|
|
|
|
|
Aggregate(..) |
|
2017-09-13 14:21:07 +02:00
|
|
|
Variant(_) => bug!("should not have aggregate or variant constants in MIR"),
|
2017-03-27 10:13:21 +02:00
|
|
|
// function items are zero sized and thus have no readable value
|
2017-08-10 08:48:38 -07:00
|
|
|
Function(..) => PrimVal::Undef,
|
2016-09-19 02:19:31 -06:00
|
|
|
};
|
|
|
|
|
2016-09-26 17:49:30 +02:00
|
|
|
Ok(Value::ByVal(primval))
|
2016-06-08 09:38:59 +02:00
|
|
|
}
|
|
|
|
|
2016-12-07 20:30:37 -08:00
|
|
|
pub(super) fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
|
2016-09-07 18:34:59 +02:00
|
|
|
// generics are weird, don't run this function on a generic
|
2016-09-08 10:25:45 +02:00
|
|
|
assert!(!ty.needs_subst());
|
2017-06-02 21:00:35 -04:00
|
|
|
ty.is_sized(self.tcx, ty::ParamEnv::empty(Reveal::All), DUMMY_SP)
|
2016-06-08 09:38:59 +02:00
|
|
|
}
|
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
pub fn load_mir(
|
|
|
|
&self,
|
|
|
|
instance: ty::InstanceDef<'tcx>,
|
|
|
|
) -> EvalResult<'tcx, &'tcx mir::Mir<'tcx>> {
|
2017-03-21 13:53:55 +01:00
|
|
|
trace!("load mir {:?}", instance);
|
|
|
|
match instance {
|
2017-08-10 08:48:38 -07:00
|
|
|
ty::InstanceDef::Item(def_id) => {
|
|
|
|
self.tcx.maybe_optimized_mir(def_id).ok_or_else(|| {
|
|
|
|
EvalErrorKind::NoMirFor(self.tcx.item_path_str(def_id)).into()
|
|
|
|
})
|
|
|
|
}
|
2017-03-21 13:53:55 +01:00
|
|
|
_ => Ok(self.tcx.instance_mir(instance)),
|
2016-06-06 15:22:33 +02:00
|
|
|
}
|
|
|
|
}
|
2016-06-08 11:11:08 +02:00
|
|
|
|
2016-06-30 11:29:25 +02:00
|
|
|
pub fn monomorphize(&self, ty: Ty<'tcx>, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
|
2017-01-28 15:28:24 +01:00
|
|
|
// miri doesn't care about lifetimes, and will choke on some crazy ones
|
|
|
|
// let's simply get rid of them
|
|
|
|
let without_lifetimes = self.tcx.erase_regions(&ty);
|
|
|
|
let substituted = without_lifetimes.subst(self.tcx, substs);
|
2017-08-29 11:32:10 +02:00
|
|
|
let substituted = self.tcx.normalize_associated_type(&substituted);
|
|
|
|
substituted
|
2017-03-14 11:12:59 +01:00
|
|
|
}
|
|
|
|
|
2017-08-03 11:06:25 -07:00
|
|
|
/// Return the size and aligment of the value at the given type.
|
|
|
|
/// Note that the value does not matter if the type is sized. For unsized types,
|
|
|
|
/// the value has to be a fat pointer, and we only care about the "extra" data in it.
|
2017-07-28 10:16:36 +02:00
|
|
|
pub fn size_and_align_of_dst(
|
|
|
|
&mut self,
|
|
|
|
ty: ty::Ty<'tcx>,
|
2017-08-03 11:06:25 -07:00
|
|
|
value: Value,
|
2017-07-28 10:16:36 +02:00
|
|
|
) -> EvalResult<'tcx, (u64, u64)> {
|
|
|
|
if let Some(size) = self.type_size(ty)? {
|
|
|
|
Ok((size as u64, self.type_align(ty)? as u64))
|
|
|
|
} else {
|
|
|
|
match ty.sty {
|
2017-08-02 15:29:13 -07:00
|
|
|
ty::TyAdt(..) | ty::TyTuple(..) => {
|
2017-07-28 10:16:36 +02:00
|
|
|
// First get the size of all statically known fields.
|
|
|
|
// Don't use type_of::sizing_type_of because that expects t to be sized,
|
|
|
|
// and it also rounds up to alignment, which we want to avoid,
|
|
|
|
// as the unsized field's alignment could be smaller.
|
|
|
|
assert!(!ty.is_simd());
|
|
|
|
let layout = self.type_layout(ty)?;
|
|
|
|
debug!("DST {} layout: {:?}", ty, layout);
|
|
|
|
|
|
|
|
let (sized_size, sized_align) = match *layout {
|
|
|
|
ty::layout::Layout::Univariant { ref variant, .. } => {
|
2017-08-10 08:48:38 -07:00
|
|
|
(
|
|
|
|
variant.offsets.last().map_or(0, |o| o.bytes()),
|
|
|
|
variant.align,
|
|
|
|
)
|
2017-07-28 10:16:36 +02:00
|
|
|
}
|
|
|
|
_ => {
|
2017-08-10 08:48:38 -07:00
|
|
|
bug!(
|
|
|
|
"size_and_align_of_dst: expcted Univariant for `{}`, found {:#?}",
|
|
|
|
ty,
|
|
|
|
layout
|
|
|
|
);
|
2017-07-28 10:16:36 +02:00
|
|
|
}
|
|
|
|
};
|
2017-08-10 08:48:38 -07:00
|
|
|
debug!(
|
|
|
|
"DST {} statically sized prefix size: {} align: {:?}",
|
|
|
|
ty,
|
|
|
|
sized_size,
|
|
|
|
sized_align
|
|
|
|
);
|
2017-07-28 10:16:36 +02:00
|
|
|
|
|
|
|
// Recurse to get the size of the dynamically sized field (must be
|
|
|
|
// the last field).
|
2017-08-02 15:29:13 -07:00
|
|
|
let (unsized_size, unsized_align) = match ty.sty {
|
|
|
|
ty::TyAdt(def, substs) => {
|
|
|
|
let last_field = def.struct_variant().fields.last().unwrap();
|
|
|
|
let field_ty = self.field_ty(substs, last_field);
|
|
|
|
self.size_and_align_of_dst(field_ty, value)?
|
|
|
|
}
|
|
|
|
ty::TyTuple(ref types, _) => {
|
|
|
|
let field_ty = types.last().unwrap();
|
|
|
|
let field_ty = self.tcx.normalize_associated_type(field_ty);
|
|
|
|
self.size_and_align_of_dst(field_ty, value)?
|
|
|
|
}
|
|
|
|
_ => bug!("We already checked that we know this type"),
|
|
|
|
};
|
2017-07-28 10:16:36 +02:00
|
|
|
|
|
|
|
// FIXME (#26403, #27023): We should be adding padding
|
|
|
|
// to `sized_size` (to accommodate the `unsized_align`
|
|
|
|
// required of the unsized field that follows) before
|
|
|
|
// summing it with `sized_size`. (Note that since #26403
|
|
|
|
// is unfixed, we do not yet add the necessary padding
|
|
|
|
// here. But this is where the add would go.)
|
|
|
|
|
|
|
|
// Return the sum of sizes and max of aligns.
|
|
|
|
let size = sized_size + unsized_size;
|
|
|
|
|
|
|
|
// Choose max of two known alignments (combined value must
|
|
|
|
// be aligned according to more restrictive of the two).
|
2017-08-10 08:48:38 -07:00
|
|
|
let align =
|
|
|
|
sized_align.max(Align::from_bytes(unsized_align, unsized_align).unwrap());
|
2017-07-28 10:16:36 +02:00
|
|
|
|
|
|
|
// Issue #27023: must add any necessary padding to `size`
|
|
|
|
// (to make it a multiple of `align`) before returning it.
|
|
|
|
//
|
|
|
|
// Namely, the returned size should be, in C notation:
|
|
|
|
//
|
|
|
|
// `size + ((size & (align-1)) ? align : 0)`
|
|
|
|
//
|
|
|
|
// emulated via the semi-standard fast bit trick:
|
|
|
|
//
|
|
|
|
// `(size + (align-1)) & -align`
|
|
|
|
|
|
|
|
let size = Size::from_bytes(size).abi_align(align).bytes();
|
|
|
|
Ok((size, align.abi()))
|
|
|
|
}
|
|
|
|
ty::TyDynamic(..) => {
|
|
|
|
let (_, vtable) = value.into_ptr_vtable_pair(&mut self.memory)?;
|
|
|
|
// the second entry in the vtable is the dynamic size of the object.
|
|
|
|
self.read_size_and_align_from_vtable(vtable)
|
|
|
|
}
|
|
|
|
|
|
|
|
ty::TySlice(_) | ty::TyStr => {
|
|
|
|
let elem_ty = ty.sequence_element_type(self.tcx);
|
2017-08-10 08:48:38 -07:00
|
|
|
let elem_size = self.type_size(elem_ty)?.expect(
|
|
|
|
"slice element must be sized",
|
|
|
|
) as u64;
|
2017-07-28 10:16:36 +02:00
|
|
|
let (_, len) = value.into_slice(&mut self.memory)?;
|
|
|
|
let align = self.type_align(elem_ty)?;
|
|
|
|
Ok((len * elem_size, align as u64))
|
|
|
|
}
|
|
|
|
|
|
|
|
_ => bug!("size_of_val::<{:?}>", ty),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the normalized type of a struct field
|
2017-08-10 08:48:38 -07:00
|
|
|
fn field_ty(&self, param_substs: &Substs<'tcx>, f: &ty::FieldDef) -> ty::Ty<'tcx> {
|
|
|
|
self.tcx.normalize_associated_type(
|
|
|
|
&f.ty(self.tcx, param_substs),
|
|
|
|
)
|
2017-07-28 10:16:36 +02:00
|
|
|
}
|
|
|
|
|
2017-07-28 13:08:27 +02:00
|
|
|
pub fn type_size(&self, ty: Ty<'tcx>) -> EvalResult<'tcx, Option<u64>> {
|
2016-06-13 11:24:01 +02:00
|
|
|
self.type_size_with_substs(ty, self.substs())
|
2016-06-08 11:11:08 +02:00
|
|
|
}
|
|
|
|
|
2017-07-28 13:08:27 +02:00
|
|
|
pub fn type_align(&self, ty: Ty<'tcx>) -> EvalResult<'tcx, u64> {
|
2016-07-05 14:27:27 +02:00
|
|
|
self.type_align_with_substs(ty, self.substs())
|
|
|
|
}
|
|
|
|
|
2017-08-08 14:22:11 +02:00
|
|
|
pub fn type_size_with_substs(
|
2016-12-07 20:30:37 -08:00
|
|
|
&self,
|
|
|
|
ty: Ty<'tcx>,
|
|
|
|
substs: &'tcx Substs<'tcx>,
|
|
|
|
) -> EvalResult<'tcx, Option<u64>> {
|
2016-11-17 17:23:40 +01:00
|
|
|
let layout = self.type_layout_with_substs(ty, substs)?;
|
2016-11-11 13:07:41 +01:00
|
|
|
if layout.is_unsized() {
|
2016-11-17 17:23:40 +01:00
|
|
|
Ok(None)
|
2016-11-11 13:07:41 +01:00
|
|
|
} else {
|
2016-11-18 12:55:14 +01:00
|
|
|
Ok(Some(layout.size(&self.tcx.data_layout).bytes()))
|
2016-11-11 13:07:41 +01:00
|
|
|
}
|
2016-06-13 11:24:01 +02:00
|
|
|
}
|
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
pub fn type_align_with_substs(
|
|
|
|
&self,
|
|
|
|
ty: Ty<'tcx>,
|
|
|
|
substs: &'tcx Substs<'tcx>,
|
|
|
|
) -> EvalResult<'tcx, u64> {
|
|
|
|
self.type_layout_with_substs(ty, substs).map(|layout| {
|
|
|
|
layout.align(&self.tcx.data_layout).abi()
|
|
|
|
})
|
2016-07-05 14:27:27 +02:00
|
|
|
}
|
|
|
|
|
2017-07-21 17:25:30 +02:00
|
|
|
pub fn type_layout(&self, ty: Ty<'tcx>) -> EvalResult<'tcx, &'tcx Layout> {
|
2016-06-13 11:24:01 +02:00
|
|
|
self.type_layout_with_substs(ty, self.substs())
|
|
|
|
}
|
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
fn type_layout_with_substs(
|
|
|
|
&self,
|
|
|
|
ty: Ty<'tcx>,
|
|
|
|
substs: &'tcx Substs<'tcx>,
|
|
|
|
) -> EvalResult<'tcx, &'tcx Layout> {
|
2016-06-08 11:11:08 +02:00
|
|
|
// TODO(solson): Is this inefficient? Needs investigation.
|
|
|
|
let ty = self.monomorphize(ty, substs);
|
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
ty.layout(self.tcx, ty::ParamEnv::empty(Reveal::All))
|
|
|
|
.map_err(|layout| EvalErrorKind::Layout(layout).into())
|
2016-06-08 11:11:08 +02:00
|
|
|
}
|
2016-03-06 04:23:24 -06:00
|
|
|
|
2016-07-05 10:47:10 +02:00
|
|
|
pub fn push_stack_frame(
|
|
|
|
&mut self,
|
2017-03-21 13:53:55 +01:00
|
|
|
instance: ty::Instance<'tcx>,
|
2016-07-05 10:47:10 +02:00
|
|
|
span: codemap::Span,
|
2017-05-04 17:42:43 +02:00
|
|
|
mir: &'tcx mir::Mir<'tcx>,
|
2017-08-08 14:22:11 +02:00
|
|
|
return_lvalue: Lvalue,
|
2016-09-09 17:44:04 +02:00
|
|
|
return_to_block: StackPopCleanup,
|
2017-02-04 13:09:10 -08:00
|
|
|
) -> EvalResult<'tcx> {
|
2016-05-30 18:09:52 +02:00
|
|
|
::log_settings::settings().indentation += 1;
|
|
|
|
|
2017-06-01 11:01:55 -07:00
|
|
|
/// Return the set of locals that have a storage annotation anywhere
|
2017-05-31 17:41:33 -07:00
|
|
|
fn collect_storage_annotations<'tcx>(mir: &'tcx mir::Mir<'tcx>) -> HashSet<mir::Local> {
|
|
|
|
use rustc::mir::StatementKind::*;
|
|
|
|
|
|
|
|
let mut set = HashSet::new();
|
|
|
|
for block in mir.basic_blocks() {
|
|
|
|
for stmt in block.statements.iter() {
|
|
|
|
match stmt.kind {
|
2017-09-05 17:18:48 +02:00
|
|
|
StorageLive(local) |
|
|
|
|
StorageDead(local) => {
|
2017-05-31 17:41:33 -07:00
|
|
|
set.insert(local);
|
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2017-05-31 17:41:33 -07:00
|
|
|
set
|
|
|
|
}
|
|
|
|
|
2017-07-04 13:16:29 +02:00
|
|
|
// Subtract 1 because `local_decls` includes the ReturnMemoryPointer, but we don't store a local
|
2016-10-15 23:31:42 -06:00
|
|
|
// `Value` for that.
|
|
|
|
let num_locals = mir.local_decls.len() - 1;
|
2017-08-30 11:13:01 +02:00
|
|
|
|
2017-09-15 08:58:12 +02:00
|
|
|
let locals = {
|
2017-08-30 11:13:01 +02:00
|
|
|
let annotated_locals = collect_storage_annotations(mir);
|
|
|
|
let mut locals = vec![None; num_locals];
|
|
|
|
for i in 0..num_locals {
|
|
|
|
let local = mir::Local::new(i + 1);
|
|
|
|
if !annotated_locals.contains(&local) {
|
|
|
|
locals[i] = Some(Value::ByVal(PrimVal::Undef));
|
|
|
|
}
|
2017-06-01 11:01:55 -07:00
|
|
|
}
|
2017-08-30 11:13:01 +02:00
|
|
|
locals
|
|
|
|
};
|
2016-06-09 17:23:58 +02:00
|
|
|
|
2016-03-07 07:10:52 -06:00
|
|
|
self.stack.push(Frame {
|
2017-01-16 18:45:30 -08:00
|
|
|
mir,
|
2016-06-14 20:13:59 -06:00
|
|
|
block: mir::START_BLOCK,
|
2017-01-16 18:45:30 -08:00
|
|
|
return_to_block,
|
|
|
|
return_lvalue,
|
|
|
|
locals,
|
|
|
|
span,
|
2017-03-21 13:53:55 +01:00
|
|
|
instance,
|
2016-06-03 16:57:47 +02:00
|
|
|
stmt: 0,
|
2016-03-06 04:23:24 -06:00
|
|
|
});
|
2016-10-15 19:48:30 -06:00
|
|
|
|
2017-09-09 10:56:33 +02:00
|
|
|
self.memory.cur_frame = self.cur_frame();
|
2017-06-19 14:56:05 -07:00
|
|
|
|
2016-07-05 13:23:58 +02:00
|
|
|
if self.stack.len() > self.stack_limit {
|
2017-08-02 16:59:01 +02:00
|
|
|
err!(StackFrameLimitReached)
|
2016-07-05 13:23:58 +02:00
|
|
|
} else {
|
|
|
|
Ok(())
|
|
|
|
}
|
2015-11-12 15:50:58 -06:00
|
|
|
}
|
|
|
|
|
2017-02-04 13:09:10 -08:00
|
|
|
pub(super) fn pop_stack_frame(&mut self) -> EvalResult<'tcx> {
|
2016-05-30 18:09:52 +02:00
|
|
|
::log_settings::settings().indentation -= 1;
|
2017-09-09 10:56:33 +02:00
|
|
|
self.end_region(None)?;
|
2017-08-10 08:48:38 -07:00
|
|
|
let frame = self.stack.pop().expect(
|
|
|
|
"tried to pop a stack frame, but there were none",
|
|
|
|
);
|
2017-06-19 14:56:05 -07:00
|
|
|
if !self.stack.is_empty() {
|
2017-09-09 10:56:33 +02:00
|
|
|
// TODO: Is this the correct time to start considering these accesses as originating from the returned-to stack frame?
|
|
|
|
self.memory.cur_frame = self.cur_frame();
|
2017-06-19 14:56:05 -07:00
|
|
|
}
|
2016-09-09 17:44:04 +02:00
|
|
|
match frame.return_to_block {
|
2017-08-10 08:48:38 -07:00
|
|
|
StackPopCleanup::MarkStatic(mutable) => {
|
|
|
|
if let Lvalue::Ptr { ptr, .. } = frame.return_lvalue {
|
|
|
|
// FIXME: to_ptr()? might be too extreme here, static zsts might reach this under certain conditions
|
|
|
|
self.memory.mark_static_initalized(
|
|
|
|
ptr.to_ptr()?.alloc_id,
|
|
|
|
mutable,
|
|
|
|
)?
|
|
|
|
} else {
|
|
|
|
bug!("StackPopCleanup::MarkStatic on: {:?}", frame.return_lvalue);
|
|
|
|
}
|
|
|
|
}
|
2016-09-09 17:44:04 +02:00
|
|
|
StackPopCleanup::Goto(target) => self.goto_block(target),
|
2017-08-10 08:48:38 -07:00
|
|
|
StackPopCleanup::None => {}
|
2016-07-06 17:55:05 +02:00
|
|
|
}
|
2016-11-18 10:35:41 +01:00
|
|
|
// deallocate all locals that are backed by an allocation
|
2017-01-22 00:19:35 -08:00
|
|
|
for local in frame.locals {
|
2017-05-31 17:41:33 -07:00
|
|
|
self.deallocate_local(local)?;
|
2016-11-17 14:48:34 +01:00
|
|
|
}
|
2017-03-21 13:53:55 +01:00
|
|
|
|
2016-09-09 17:44:04 +02:00
|
|
|
Ok(())
|
2016-03-06 04:23:24 -06:00
|
|
|
}
|
|
|
|
|
2017-05-31 17:41:33 -07:00
|
|
|
pub fn deallocate_local(&mut self, local: Option<Value>) -> EvalResult<'tcx> {
|
2017-08-08 15:53:07 +02:00
|
|
|
if let Some(Value::ByRef(ptr)) = local {
|
2017-05-31 17:41:33 -07:00
|
|
|
trace!("deallocating local");
|
2017-07-03 14:16:11 +02:00
|
|
|
let ptr = ptr.to_ptr()?;
|
2017-05-31 17:41:33 -07:00
|
|
|
self.memory.dump_alloc(ptr.alloc_id);
|
2017-07-12 14:51:47 +02:00
|
|
|
match self.memory.get(ptr.alloc_id)?.kind {
|
2017-07-14 17:46:28 +02:00
|
|
|
// for a constant like `const FOO: &i32 = &1;` the local containing
|
|
|
|
// the `1` is referred to by the global. We transitively marked everything
|
|
|
|
// the global refers to as static itself, so we don't free it here
|
2017-07-20 16:40:57 +02:00
|
|
|
MemoryKind::Static => {}
|
|
|
|
MemoryKind::Stack => self.memory.deallocate(ptr, None, MemoryKind::Stack)?,
|
2017-07-12 14:51:47 +02:00
|
|
|
other => bug!("local contained non-stack memory: {:?}", other),
|
2017-05-31 17:41:33 -07:00
|
|
|
}
|
|
|
|
};
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2017-07-21 14:59:58 +02:00
|
|
|
pub fn assign_discr_and_fields(
|
2016-03-28 21:08:08 -06:00
|
|
|
&mut self,
|
2017-08-08 14:22:11 +02:00
|
|
|
dest: Lvalue,
|
2017-02-13 11:58:42 +01:00
|
|
|
dest_ty: Ty<'tcx>,
|
|
|
|
discr_offset: u64,
|
2017-07-21 14:59:58 +02:00
|
|
|
operands: &[mir::Operand<'tcx>],
|
2016-12-19 17:26:47 +01:00
|
|
|
discr_val: u128,
|
2017-02-13 11:58:42 +01:00
|
|
|
variant_idx: usize,
|
2016-12-19 17:26:47 +01:00
|
|
|
discr_size: u64,
|
2017-08-25 18:25:05 +02:00
|
|
|
discr_signed: bool,
|
2017-07-21 14:59:58 +02:00
|
|
|
) -> EvalResult<'tcx> {
|
2016-12-19 17:26:47 +01:00
|
|
|
// FIXME(solson)
|
2017-06-19 10:58:59 +02:00
|
|
|
let dest_ptr = self.force_allocation(dest)?.to_ptr()?;
|
2016-12-19 17:26:47 +01:00
|
|
|
|
2017-07-24 09:56:02 +02:00
|
|
|
let discr_dest = dest_ptr.offset(discr_offset, &self)?;
|
2017-08-25 18:25:05 +02:00
|
|
|
self.memory.write_primval(discr_dest, PrimVal::Bytes(discr_val), discr_size, discr_signed)?;
|
2016-12-19 17:26:47 +01:00
|
|
|
|
2017-02-13 11:58:42 +01:00
|
|
|
let dest = Lvalue::Ptr {
|
2017-08-08 15:53:07 +02:00
|
|
|
ptr: PtrAndAlign {
|
|
|
|
ptr: dest_ptr.into(),
|
|
|
|
aligned: true,
|
|
|
|
},
|
2017-02-13 11:58:42 +01:00
|
|
|
extra: LvalueExtra::DowncastVariant(variant_idx),
|
|
|
|
};
|
|
|
|
|
|
|
|
self.assign_fields(dest, dest_ty, operands)
|
2016-12-19 17:26:47 +01:00
|
|
|
}
|
|
|
|
|
2017-07-21 14:59:58 +02:00
|
|
|
pub fn assign_fields(
|
2016-12-19 17:26:47 +01:00
|
|
|
&mut self,
|
2017-08-08 14:22:11 +02:00
|
|
|
dest: Lvalue,
|
2017-02-13 11:58:42 +01:00
|
|
|
dest_ty: Ty<'tcx>,
|
2017-07-21 14:59:58 +02:00
|
|
|
operands: &[mir::Operand<'tcx>],
|
|
|
|
) -> EvalResult<'tcx> {
|
2017-02-14 10:59:38 +01:00
|
|
|
if self.type_size(dest_ty)? == Some(0) {
|
|
|
|
// zst assigning is a nop
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
if self.ty_to_primval_kind(dest_ty).is_ok() {
|
2017-07-21 14:59:58 +02:00
|
|
|
assert_eq!(operands.len(), 1);
|
|
|
|
let value = self.eval_operand(&operands[0])?;
|
2017-08-24 14:41:49 +02:00
|
|
|
return self.write_value(value, dest);
|
2017-02-14 10:59:38 +01:00
|
|
|
}
|
2017-07-21 14:59:58 +02:00
|
|
|
for (field_index, operand) in operands.iter().enumerate() {
|
|
|
|
let value = self.eval_operand(operand)?;
|
2017-09-13 12:27:13 +02:00
|
|
|
let field_dest = self.lvalue_field(dest, mir::Field::new(field_index), dest_ty, value.ty)?;
|
2017-08-24 14:41:49 +02:00
|
|
|
self.write_value(value, field_dest)?;
|
2016-03-12 22:15:59 -06:00
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2016-09-19 02:19:31 -06:00
|
|
|
/// Evaluate an assignment statement.
|
|
|
|
///
|
|
|
|
/// There is no separate `eval_rvalue` function. Instead, the code for handling each rvalue
|
|
|
|
/// type writes its results directly into the memory specified by the lvalue.
|
2016-12-07 20:30:37 -08:00
|
|
|
pub(super) fn eval_rvalue_into_lvalue(
|
2016-09-19 02:19:31 -06:00
|
|
|
&mut self,
|
|
|
|
rvalue: &mir::Rvalue<'tcx>,
|
|
|
|
lvalue: &mir::Lvalue<'tcx>,
|
2017-02-04 13:09:10 -08:00
|
|
|
) -> EvalResult<'tcx> {
|
2016-10-14 03:31:45 -06:00
|
|
|
let dest = self.eval_lvalue(lvalue)?;
|
2016-04-07 05:56:07 -06:00
|
|
|
let dest_ty = self.lvalue_ty(lvalue);
|
2016-11-17 17:23:40 +01:00
|
|
|
let dest_layout = self.type_layout(dest_ty)?;
|
2015-11-20 15:54:02 -06:00
|
|
|
|
2016-11-03 10:38:08 +01:00
|
|
|
use rustc::mir::Rvalue::*;
|
2015-11-12 16:13:35 -06:00
|
|
|
match *rvalue {
|
2016-03-07 03:32:02 -06:00
|
|
|
Use(ref operand) => {
|
2017-08-24 14:41:49 +02:00
|
|
|
let value = self.eval_operand(operand)?.value;
|
|
|
|
let valty = ValTy {
|
|
|
|
value,
|
|
|
|
ty: dest_ty,
|
|
|
|
};
|
|
|
|
self.write_value(valty, dest)?;
|
2016-02-27 19:20:25 -06:00
|
|
|
}
|
2015-11-20 15:54:02 -06:00
|
|
|
|
2016-03-13 01:43:28 -06:00
|
|
|
BinaryOp(bin_op, ref left, ref right) => {
|
2017-08-24 14:41:49 +02:00
|
|
|
let left = self.eval_operand(left)?;
|
|
|
|
let right = self.eval_operand(right)?;
|
2017-08-10 08:48:38 -07:00
|
|
|
if self.intrinsic_overflowing(
|
|
|
|
bin_op,
|
|
|
|
left,
|
|
|
|
right,
|
|
|
|
dest,
|
|
|
|
dest_ty,
|
|
|
|
)?
|
|
|
|
{
|
2017-06-07 15:39:44 -07:00
|
|
|
// There was an overflow in an unchecked binop. Right now, we consider this an error and bail out.
|
|
|
|
// The rationale is that the reason rustc emits unchecked binops in release mode (vs. the checked binops
|
2017-06-08 10:56:49 -07:00
|
|
|
// it emits in debug mode) is performance, but it doesn't cost us any performance in miri.
|
2017-06-07 15:39:44 -07:00
|
|
|
// If, however, the compiler ever starts transforming unchecked intrinsics into unchecked binops,
|
|
|
|
// we have to go back to just ignoring the overflow here.
|
2017-08-02 16:59:01 +02:00
|
|
|
return err!(OverflowingMath);
|
2017-06-07 15:39:44 -07:00
|
|
|
}
|
2016-03-13 01:43:28 -06:00
|
|
|
}
|
2016-03-07 07:10:52 -06:00
|
|
|
|
2016-06-11 13:10:42 -06:00
|
|
|
CheckedBinaryOp(bin_op, ref left, ref right) => {
|
2017-08-24 14:41:49 +02:00
|
|
|
let left = self.eval_operand(left)?;
|
|
|
|
let right = self.eval_operand(right)?;
|
2017-08-10 08:48:38 -07:00
|
|
|
self.intrinsic_with_overflow(
|
|
|
|
bin_op,
|
|
|
|
left,
|
|
|
|
right,
|
|
|
|
dest,
|
|
|
|
dest_ty,
|
|
|
|
)?;
|
2016-06-11 13:10:42 -06:00
|
|
|
}
|
2016-06-11 12:38:28 -06:00
|
|
|
|
2016-03-07 07:57:08 -06:00
|
|
|
UnaryOp(un_op, ref operand) => {
|
2016-09-19 02:19:31 -06:00
|
|
|
let val = self.eval_operand_to_primval(operand)?;
|
2016-11-26 22:58:01 -08:00
|
|
|
let kind = self.ty_to_primval_kind(dest_ty)?;
|
2017-08-10 08:48:38 -07:00
|
|
|
self.write_primval(
|
|
|
|
dest,
|
|
|
|
operator::unary_op(un_op, val, kind)?,
|
|
|
|
dest_ty,
|
|
|
|
)?;
|
2016-03-07 07:57:08 -06:00
|
|
|
}
|
2016-03-04 23:17:31 -06:00
|
|
|
|
2017-02-13 13:46:39 +01:00
|
|
|
// Skip everything for zsts
|
|
|
|
Aggregate(..) if self.type_size(dest_ty)? == Some(0) => {}
|
|
|
|
|
2016-03-12 22:15:59 -06:00
|
|
|
Aggregate(ref kind, ref operands) => {
|
2016-11-17 17:22:34 +01:00
|
|
|
self.inc_step_counter_and_check_limit(operands.len() as u64)?;
|
2016-05-08 19:29:00 -06:00
|
|
|
use rustc::ty::layout::Layout::*;
|
2016-04-23 00:03:59 -06:00
|
|
|
match *dest_layout {
|
2017-08-08 15:53:07 +02:00
|
|
|
Univariant { ref variant, .. } => {
|
|
|
|
self.write_maybe_aligned_mut(!variant.packed, |ecx| {
|
|
|
|
ecx.assign_fields(dest, dest_ty, operands)
|
|
|
|
})?;
|
|
|
|
}
|
|
|
|
|
|
|
|
Array { .. } => {
|
2017-02-13 11:58:42 +01:00
|
|
|
self.assign_fields(dest, dest_ty, operands)?;
|
2016-03-28 21:08:08 -06:00
|
|
|
}
|
2016-03-20 23:09:27 -06:00
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
General {
|
|
|
|
discr,
|
|
|
|
ref variants,
|
|
|
|
..
|
|
|
|
} => {
|
2017-05-13 07:08:30 -04:00
|
|
|
if let mir::AggregateKind::Adt(adt_def, variant, _, _) = **kind {
|
2017-08-10 08:48:38 -07:00
|
|
|
let discr_val = adt_def
|
|
|
|
.discriminants(self.tcx)
|
2017-03-02 13:11:33 +01:00
|
|
|
.nth(variant)
|
|
|
|
.expect("broken mir: Adt variant id invalid")
|
|
|
|
.to_u128_unchecked();
|
2016-11-18 12:55:14 +01:00
|
|
|
let discr_size = discr.size().bytes();
|
2016-10-14 03:31:45 -06:00
|
|
|
|
2016-12-19 17:26:47 +01:00
|
|
|
self.assign_discr_and_fields(
|
|
|
|
dest,
|
2017-02-13 11:58:42 +01:00
|
|
|
dest_ty,
|
|
|
|
variants[variant].offsets[0].bytes(),
|
2016-12-19 17:26:47 +01:00
|
|
|
operands,
|
|
|
|
discr_val,
|
2017-02-13 11:58:42 +01:00
|
|
|
variant,
|
2016-12-19 17:26:47 +01:00
|
|
|
discr_size,
|
2017-08-25 18:25:05 +02:00
|
|
|
false,
|
2016-12-19 17:26:47 +01:00
|
|
|
)?;
|
2016-04-23 00:03:59 -06:00
|
|
|
} else {
|
2016-09-06 16:16:49 +02:00
|
|
|
bug!("tried to assign {:?} to Layout::General", kind);
|
2016-03-15 05:50:53 -06:00
|
|
|
}
|
2016-04-30 01:04:17 -06:00
|
|
|
}
|
|
|
|
|
2016-05-08 19:29:00 -06:00
|
|
|
RawNullablePointer { nndiscr, .. } => {
|
2017-05-13 07:08:30 -04:00
|
|
|
if let mir::AggregateKind::Adt(_, variant, _, _) = **kind {
|
2016-04-30 01:04:17 -06:00
|
|
|
if nndiscr == variant as u64 {
|
|
|
|
assert_eq!(operands.len(), 1);
|
|
|
|
let operand = &operands[0];
|
2016-09-19 02:19:31 -06:00
|
|
|
let value = self.eval_operand(operand)?;
|
2017-08-24 14:41:49 +02:00
|
|
|
self.write_value(value, dest)?;
|
2016-04-30 01:04:17 -06:00
|
|
|
} else {
|
2016-11-15 16:15:17 +01:00
|
|
|
if let Some(operand) = operands.get(0) {
|
|
|
|
assert_eq!(operands.len(), 1);
|
|
|
|
let operand_ty = self.operand_ty(operand);
|
2016-11-17 17:23:40 +01:00
|
|
|
assert_eq!(self.type_size(operand_ty)?, Some(0));
|
2016-11-15 16:15:17 +01:00
|
|
|
}
|
2017-07-04 14:33:15 +02:00
|
|
|
self.write_null(dest, dest_ty)?;
|
2016-04-30 01:04:17 -06:00
|
|
|
}
|
|
|
|
} else {
|
2016-09-06 16:16:49 +02:00
|
|
|
bug!("tried to assign {:?} to Layout::RawNullablePointer", kind);
|
2016-03-15 05:50:53 -06:00
|
|
|
}
|
2016-04-23 00:03:59 -06:00
|
|
|
}
|
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
StructWrappedNullablePointer {
|
|
|
|
nndiscr,
|
|
|
|
ref discrfield_source,
|
|
|
|
ref nonnull,
|
|
|
|
..
|
|
|
|
} => {
|
2017-05-13 07:08:30 -04:00
|
|
|
if let mir::AggregateKind::Adt(_, variant, _, _) = **kind {
|
2016-05-25 00:37:52 -06:00
|
|
|
if nndiscr == variant as u64 {
|
2017-08-08 15:53:07 +02:00
|
|
|
self.write_maybe_aligned_mut(!nonnull.packed, |ecx| {
|
|
|
|
ecx.assign_fields(dest, dest_ty, operands)
|
|
|
|
})?;
|
2016-05-25 00:37:52 -06:00
|
|
|
} else {
|
2016-09-27 17:02:04 +02:00
|
|
|
for operand in operands {
|
|
|
|
let operand_ty = self.operand_ty(operand);
|
2016-11-17 17:23:40 +01:00
|
|
|
assert_eq!(self.type_size(operand_ty)?, Some(0));
|
2016-09-27 17:02:04 +02:00
|
|
|
}
|
2017-08-28 15:27:50 +02:00
|
|
|
self.write_struct_wrapped_null_pointer(
|
|
|
|
dest_ty,
|
|
|
|
nndiscr,
|
|
|
|
discrfield_source,
|
|
|
|
dest,
|
2017-08-10 08:48:38 -07:00
|
|
|
)?;
|
2016-05-25 00:37:52 -06:00
|
|
|
}
|
|
|
|
} else {
|
2016-09-06 16:16:49 +02:00
|
|
|
bug!("tried to assign {:?} to Layout::RawNullablePointer", kind);
|
2016-05-25 00:37:52 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-26 22:58:01 -08:00
|
|
|
CEnum { .. } => {
|
2016-05-08 19:29:00 -06:00
|
|
|
assert_eq!(operands.len(), 0);
|
2017-05-13 07:08:30 -04:00
|
|
|
if let mir::AggregateKind::Adt(adt_def, variant, _, _) = **kind {
|
2017-08-10 08:48:38 -07:00
|
|
|
let n = adt_def
|
|
|
|
.discriminants(self.tcx)
|
2017-03-02 13:11:33 +01:00
|
|
|
.nth(variant)
|
|
|
|
.expect("broken mir: Adt variant index invalid")
|
|
|
|
.to_u128_unchecked();
|
2016-12-15 23:40:45 -08:00
|
|
|
self.write_primval(dest, PrimVal::Bytes(n), dest_ty)?;
|
2016-05-08 19:29:00 -06:00
|
|
|
} else {
|
2016-09-06 16:16:49 +02:00
|
|
|
bug!("tried to assign {:?} to Layout::CEnum", kind);
|
2016-05-08 19:29:00 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-02-13 11:58:42 +01:00
|
|
|
Vector { count, .. } => {
|
2016-11-15 15:23:19 +01:00
|
|
|
debug_assert_eq!(count, operands.len() as u64);
|
2017-02-13 11:58:42 +01:00
|
|
|
self.assign_fields(dest, dest_ty, operands)?;
|
2016-11-15 15:23:19 +01:00
|
|
|
}
|
|
|
|
|
2017-08-08 15:53:07 +02:00
|
|
|
UntaggedUnion { ref variants } => {
|
2016-12-07 23:25:47 -08:00
|
|
|
assert_eq!(operands.len(), 1);
|
|
|
|
let operand = &operands[0];
|
|
|
|
let value = self.eval_operand(operand)?;
|
2017-08-08 15:53:07 +02:00
|
|
|
self.write_maybe_aligned_mut(!variants.packed, |ecx| {
|
2017-08-24 14:41:49 +02:00
|
|
|
ecx.write_value(value, dest)
|
2017-08-08 15:53:07 +02:00
|
|
|
})?;
|
2016-12-07 23:25:47 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
_ => {
|
2017-08-02 16:59:01 +02:00
|
|
|
return err!(Unimplemented(format!(
|
2016-12-07 23:25:47 -08:00
|
|
|
"can't handle destination layout {:?} when assigning {:?}",
|
|
|
|
dest_layout,
|
|
|
|
kind
|
|
|
|
)));
|
|
|
|
}
|
2016-03-12 22:15:59 -06:00
|
|
|
}
|
2015-11-12 15:50:58 -06:00
|
|
|
}
|
2015-11-12 17:24:43 -06:00
|
|
|
|
2016-03-21 03:34:24 -06:00
|
|
|
Repeat(ref operand, _) => {
|
2016-09-19 02:19:31 -06:00
|
|
|
let (elem_ty, length) = match dest_ty.sty {
|
2017-09-13 12:58:25 +02:00
|
|
|
ty::TyArray(elem_ty, n) => (elem_ty, n.val.to_const_int().unwrap().to_u64().unwrap()),
|
2017-08-10 08:48:38 -07:00
|
|
|
_ => {
|
|
|
|
bug!(
|
|
|
|
"tried to assign array-repeat to non-array type {:?}",
|
|
|
|
dest_ty
|
|
|
|
)
|
|
|
|
}
|
2016-04-23 00:03:59 -06:00
|
|
|
};
|
2016-11-18 12:55:14 +01:00
|
|
|
self.inc_step_counter_and_check_limit(length)?;
|
2017-08-10 08:48:38 -07:00
|
|
|
let elem_size = self.type_size(elem_ty)?.expect(
|
|
|
|
"repeat element type must be sized",
|
|
|
|
);
|
2017-08-24 14:41:49 +02:00
|
|
|
let value = self.eval_operand(operand)?.value;
|
2016-10-14 03:31:45 -06:00
|
|
|
|
|
|
|
// FIXME(solson)
|
2017-07-04 14:33:15 +02:00
|
|
|
let dest = Pointer::from(self.force_allocation(dest)?.to_ptr()?);
|
2016-10-14 03:31:45 -06:00
|
|
|
|
2016-04-23 00:03:59 -06:00
|
|
|
for i in 0..length {
|
2017-07-24 09:56:02 +02:00
|
|
|
let elem_dest = dest.offset(i * elem_size, &self)?;
|
2016-10-14 03:31:45 -06:00
|
|
|
self.write_value_to_ptr(value, elem_dest, elem_ty)?;
|
2016-03-21 03:34:24 -06:00
|
|
|
}
|
|
|
|
}
|
2016-03-21 03:19:48 -06:00
|
|
|
|
2016-03-20 21:30:31 -06:00
|
|
|
Len(ref lvalue) => {
|
2017-07-25 11:32:48 +02:00
|
|
|
// FIXME(CTFE): don't allow computing the length of arrays in const eval
|
2016-05-09 18:52:44 -06:00
|
|
|
let src = self.eval_lvalue(lvalue)?;
|
2016-03-20 21:30:31 -06:00
|
|
|
let ty = self.lvalue_ty(lvalue);
|
2016-09-28 18:22:09 +02:00
|
|
|
let (_, len) = src.elem_ty_and_len(ty);
|
2017-08-10 08:48:38 -07:00
|
|
|
self.write_primval(
|
|
|
|
dest,
|
|
|
|
PrimVal::from_u128(len as u128),
|
|
|
|
dest_ty,
|
|
|
|
)?;
|
2016-03-20 21:30:31 -06:00
|
|
|
}
|
|
|
|
|
2016-03-13 14:36:25 -06:00
|
|
|
Ref(_, _, ref lvalue) => {
|
2016-10-16 02:12:46 -06:00
|
|
|
let src = self.eval_lvalue(lvalue)?;
|
2017-07-19 13:31:21 -07:00
|
|
|
// We ignore the alignment of the lvalue here -- special handling for packed structs ends
|
|
|
|
// at the `&` operator.
|
2017-08-08 15:53:07 +02:00
|
|
|
let (ptr, extra) = self.force_allocation(src)?.to_ptr_extra_aligned();
|
2016-10-14 22:10:06 -06:00
|
|
|
|
2016-10-16 02:12:46 -06:00
|
|
|
let val = match extra {
|
2017-08-08 15:53:07 +02:00
|
|
|
LvalueExtra::None => ptr.ptr.to_value(),
|
|
|
|
LvalueExtra::Length(len) => ptr.ptr.to_value_with_len(len),
|
|
|
|
LvalueExtra::Vtable(vtable) => ptr.ptr.to_value_with_vtable(vtable),
|
2017-08-10 08:48:38 -07:00
|
|
|
LvalueExtra::DowncastVariant(..) => {
|
|
|
|
bug!("attempted to take a reference to an enum downcast lvalue")
|
|
|
|
}
|
2016-10-16 02:12:46 -06:00
|
|
|
};
|
2017-08-24 14:41:49 +02:00
|
|
|
let valty = ValTy {
|
|
|
|
value: val,
|
|
|
|
ty: dest_ty,
|
|
|
|
};
|
|
|
|
self.write_value(valty, dest)?;
|
2016-03-13 14:36:25 -06:00
|
|
|
}
|
2015-12-28 22:24:05 -06:00
|
|
|
|
2017-05-30 09:27:08 -04:00
|
|
|
NullaryOp(mir::NullOp::Box, ty) => {
|
2017-07-28 16:48:43 +02:00
|
|
|
let ptr = M::box_alloc(self, ty)?;
|
|
|
|
self.write_primval(dest, ptr, dest_ty)?;
|
2016-03-14 22:05:50 -06:00
|
|
|
}
|
|
|
|
|
2017-06-01 17:24:21 -07:00
|
|
|
NullaryOp(mir::NullOp::SizeOf, ty) => {
|
2017-08-10 08:48:38 -07:00
|
|
|
let size = self.type_size(ty)?.expect(
|
|
|
|
"SizeOf nullary MIR operator called for unsized type",
|
|
|
|
);
|
|
|
|
self.write_primval(
|
|
|
|
dest,
|
|
|
|
PrimVal::from_u128(size as u128),
|
|
|
|
dest_ty,
|
|
|
|
)?;
|
2017-05-30 09:27:08 -04:00
|
|
|
}
|
|
|
|
|
2016-09-27 18:01:33 +02:00
|
|
|
Cast(kind, ref operand, cast_ty) => {
|
|
|
|
debug_assert_eq!(self.monomorphize(cast_ty, self.substs()), dest_ty);
|
2016-11-03 10:38:08 +01:00
|
|
|
use rustc::mir::CastKind::*;
|
2016-03-16 23:28:49 -06:00
|
|
|
match kind {
|
|
|
|
Unsize => {
|
2016-09-23 10:27:14 +02:00
|
|
|
let src = self.eval_operand(operand)?;
|
2017-08-24 14:41:49 +02:00
|
|
|
self.unsize_into(src.value, src.ty, dest, dest_ty)?;
|
2016-03-16 23:28:49 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
Misc => {
|
2016-09-23 10:27:14 +02:00
|
|
|
let src = self.eval_operand(operand)?;
|
2017-08-24 14:41:49 +02:00
|
|
|
if self.type_is_fat_ptr(src.ty) {
|
|
|
|
match (src.value, self.type_is_fat_ptr(dest_ty)) {
|
2017-08-10 08:48:38 -07:00
|
|
|
(Value::ByRef { .. }, _) |
|
2016-10-21 13:56:38 +02:00
|
|
|
(Value::ByValPair(..), true) => {
|
2017-08-24 14:41:49 +02:00
|
|
|
let valty = ValTy {
|
|
|
|
value: src.value,
|
|
|
|
ty: dest_ty,
|
|
|
|
};
|
|
|
|
self.write_value(valty, dest)?;
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2016-09-26 17:49:30 +02:00
|
|
|
(Value::ByValPair(data, _), false) => {
|
2017-08-24 14:41:49 +02:00
|
|
|
let valty = ValTy {
|
|
|
|
value: Value::ByVal(data),
|
|
|
|
ty: dest_ty,
|
|
|
|
};
|
|
|
|
self.write_value(valty, dest)?;
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2016-09-23 10:27:14 +02:00
|
|
|
(Value::ByVal(_), _) => bug!("expected fat ptr"),
|
2016-09-07 18:34:59 +02:00
|
|
|
}
|
2016-09-08 10:26:33 +02:00
|
|
|
} else {
|
2017-08-24 14:41:49 +02:00
|
|
|
let src_val = self.value_to_primval(src)?;
|
|
|
|
let dest_val = self.cast_primval(src_val, src.ty, dest_ty)?;
|
|
|
|
let valty = ValTy {
|
|
|
|
value: Value::ByVal(dest_val),
|
|
|
|
ty: dest_ty,
|
|
|
|
};
|
|
|
|
self.write_value(valty, dest)?;
|
2016-06-13 11:24:01 +02:00
|
|
|
}
|
2016-03-16 23:28:49 -06:00
|
|
|
}
|
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
ReifyFnPointer => {
|
|
|
|
match self.operand_ty(operand).sty {
|
|
|
|
ty::TyFnDef(def_id, substs) => {
|
|
|
|
let instance = resolve(self.tcx, def_id, substs);
|
|
|
|
let fn_ptr = self.memory.create_fn_alloc(instance);
|
2017-08-24 14:41:49 +02:00
|
|
|
let valty = ValTy {
|
|
|
|
value: Value::ByVal(PrimVal::Ptr(fn_ptr)),
|
|
|
|
ty: dest_ty,
|
|
|
|
};
|
|
|
|
self.write_value(valty, dest)?;
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
|
|
|
ref other => bug!("reify fn pointer on {:?}", other),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
UnsafeFnPointer => {
|
|
|
|
match dest_ty.sty {
|
|
|
|
ty::TyFnPtr(_) => {
|
2017-08-24 14:41:49 +02:00
|
|
|
let mut src = self.eval_operand(operand)?;
|
|
|
|
src.ty = dest_ty;
|
|
|
|
self.write_value(src, dest)?;
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
|
|
|
ref other => bug!("fn to unsafe fn cast on {:?}", other),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
ClosureFnPointer => {
|
|
|
|
match self.operand_ty(operand).sty {
|
|
|
|
ty::TyClosure(def_id, substs) => {
|
|
|
|
let instance = resolve_closure(
|
|
|
|
self.tcx,
|
|
|
|
def_id,
|
|
|
|
substs,
|
|
|
|
ty::ClosureKind::FnOnce,
|
|
|
|
);
|
|
|
|
let fn_ptr = self.memory.create_fn_alloc(instance);
|
2017-08-24 14:41:49 +02:00
|
|
|
let valty = ValTy {
|
|
|
|
value: Value::ByVal(PrimVal::Ptr(fn_ptr)),
|
|
|
|
ty: dest_ty,
|
|
|
|
};
|
|
|
|
self.write_value(valty, dest)?;
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
|
|
|
ref other => bug!("closure fn pointer on {:?}", other),
|
|
|
|
}
|
|
|
|
}
|
2016-03-16 23:28:49 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-02-24 10:39:55 +01:00
|
|
|
Discriminant(ref lvalue) => {
|
|
|
|
let lval = self.eval_lvalue(lvalue)?;
|
|
|
|
let ty = self.lvalue_ty(lvalue);
|
2017-06-19 10:58:59 +02:00
|
|
|
let ptr = self.force_allocation(lval)?.to_ptr()?;
|
2017-02-24 10:39:55 +01:00
|
|
|
let discr_val = self.read_discriminant_value(ptr, ty)?;
|
|
|
|
if let ty::TyAdt(adt_def, _) = ty.sty {
|
2017-08-25 16:20:13 +02:00
|
|
|
trace!("Read discriminant {}, valid discriminants {:?}", discr_val, adt_def.discriminants(self.tcx).collect::<Vec<_>>());
|
2017-08-10 08:48:38 -07:00
|
|
|
if adt_def.discriminants(self.tcx).all(|v| {
|
|
|
|
discr_val != v.to_u128_unchecked()
|
|
|
|
})
|
|
|
|
{
|
2017-08-02 16:59:01 +02:00
|
|
|
return err!(InvalidDiscriminant);
|
2017-02-24 10:39:55 +01:00
|
|
|
}
|
2017-08-25 16:20:13 +02:00
|
|
|
self.write_primval(dest, PrimVal::Bytes(discr_val), dest_ty)?;
|
2017-02-24 10:39:55 +01:00
|
|
|
} else {
|
|
|
|
bug!("rustc only generates Rvalue::Discriminant for enums");
|
|
|
|
}
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2015-11-12 15:50:58 -06:00
|
|
|
}
|
2016-03-20 23:09:27 -06:00
|
|
|
|
2016-10-20 04:42:19 -06:00
|
|
|
if log_enabled!(::log::LogLevel::Trace) {
|
|
|
|
self.dump_local(dest);
|
|
|
|
}
|
|
|
|
|
2016-03-20 23:09:27 -06:00
|
|
|
Ok(())
|
2015-11-12 15:50:58 -06:00
|
|
|
}
|
|
|
|
|
2017-08-28 15:27:50 +02:00
|
|
|
pub(crate) fn write_struct_wrapped_null_pointer(
|
|
|
|
&mut self,
|
|
|
|
dest_ty: ty::Ty<'tcx>,
|
|
|
|
nndiscr: u64,
|
|
|
|
discrfield_source: &layout::FieldPath,
|
|
|
|
dest: Lvalue,
|
|
|
|
) -> EvalResult<'tcx> {
|
|
|
|
let (offset, TyAndPacked { ty, packed }) = self.nonnull_offset_and_ty(
|
|
|
|
dest_ty,
|
|
|
|
nndiscr,
|
|
|
|
discrfield_source,
|
|
|
|
)?;
|
|
|
|
let nonnull = self.force_allocation(dest)?.to_ptr()?.offset(
|
|
|
|
offset.bytes(),
|
|
|
|
&self,
|
|
|
|
)?;
|
|
|
|
trace!("struct wrapped nullable pointer type: {}", ty);
|
|
|
|
// only the pointer part of a fat pointer is used for this space optimization
|
|
|
|
let discr_size = self.type_size(ty)?.expect(
|
|
|
|
"bad StructWrappedNullablePointer discrfield",
|
|
|
|
);
|
|
|
|
self.memory.write_maybe_aligned_mut(!packed, |mem| {
|
|
|
|
// We're writing 0, signedness does not matter
|
|
|
|
mem.write_primval(nonnull, PrimVal::Bytes(0), discr_size, false)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-06-28 13:37:23 +02:00
|
|
|
pub(super) fn type_is_fat_ptr(&self, ty: Ty<'tcx>) -> bool {
|
2016-09-07 18:34:59 +02:00
|
|
|
match ty.sty {
|
2017-02-03 15:47:23 +01:00
|
|
|
ty::TyRawPtr(ref tam) |
|
|
|
|
ty::TyRef(_, ref tam) => !self.type_is_sized(tam.ty),
|
|
|
|
ty::TyAdt(def, _) if def.is_box() => !self.type_is_sized(ty.boxed_ty()),
|
2016-09-07 18:34:59 +02:00
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-12-07 20:30:37 -08:00
|
|
|
pub(super) fn nonnull_offset_and_ty(
|
|
|
|
&self,
|
|
|
|
ty: Ty<'tcx>,
|
|
|
|
nndiscr: u64,
|
|
|
|
discrfield: &[u32],
|
2017-07-28 19:43:05 -07:00
|
|
|
) -> EvalResult<'tcx, (Size, TyAndPacked<'tcx>)> {
|
2016-11-18 12:55:14 +01:00
|
|
|
// Skip the constant 0 at the start meant for LLVM GEP and the outer non-null variant
|
|
|
|
let path = discrfield.iter().skip(2).map(|&i| i as usize);
|
2016-05-25 00:37:52 -06:00
|
|
|
|
|
|
|
// Handle the field index for the outer non-null variant.
|
2017-01-28 15:46:46 +01:00
|
|
|
let (inner_offset, inner_ty) = match ty.sty {
|
2016-09-10 20:59:23 -06:00
|
|
|
ty::TyAdt(adt_def, substs) => {
|
2016-05-25 00:37:52 -06:00
|
|
|
let variant = &adt_def.variants[nndiscr as usize];
|
2016-11-18 12:55:14 +01:00
|
|
|
let index = discrfield[1];
|
|
|
|
let field = &variant.fields[index as usize];
|
2017-08-10 08:48:38 -07:00
|
|
|
(
|
|
|
|
self.get_field_offset(ty, index as usize)?,
|
|
|
|
field.ty(self.tcx, substs),
|
|
|
|
)
|
2016-05-25 00:37:52 -06:00
|
|
|
}
|
2016-09-06 16:16:49 +02:00
|
|
|
_ => bug!("non-enum for StructWrappedNullablePointer: {}", ty),
|
2016-05-25 00:37:52 -06:00
|
|
|
};
|
|
|
|
|
2017-01-28 15:46:46 +01:00
|
|
|
self.field_path_offset_and_ty(inner_offset, inner_ty, path)
|
2016-05-25 00:37:52 -06:00
|
|
|
}
|
|
|
|
|
2017-01-28 15:46:46 +01:00
|
|
|
fn field_path_offset_and_ty<I: Iterator<Item = usize>>(
|
|
|
|
&self,
|
|
|
|
mut offset: Size,
|
|
|
|
mut ty: Ty<'tcx>,
|
|
|
|
path: I,
|
2017-07-28 19:43:05 -07:00
|
|
|
) -> EvalResult<'tcx, (Size, TyAndPacked<'tcx>)> {
|
2016-05-25 00:37:52 -06:00
|
|
|
// Skip the initial 0 intended for LLVM GEP.
|
2017-07-26 23:43:13 -07:00
|
|
|
let mut packed = false;
|
2016-05-25 00:37:52 -06:00
|
|
|
for field_index in path {
|
2016-05-30 15:27:52 +02:00
|
|
|
let field_offset = self.get_field_offset(ty, field_index)?;
|
2017-08-10 08:48:38 -07:00
|
|
|
trace!(
|
|
|
|
"field_path_offset_and_ty: {}, {}, {:?}, {:?}",
|
|
|
|
field_index,
|
|
|
|
ty,
|
|
|
|
field_offset,
|
|
|
|
offset
|
|
|
|
);
|
2017-07-26 23:43:13 -07:00
|
|
|
let field_ty = self.get_field_ty(ty, field_index)?;
|
2017-07-28 19:43:05 -07:00
|
|
|
ty = field_ty.ty;
|
|
|
|
packed = packed || field_ty.packed;
|
2017-08-10 08:48:38 -07:00
|
|
|
offset = offset
|
|
|
|
.checked_add(field_offset, &self.tcx.data_layout)
|
|
|
|
.unwrap();
|
2016-05-25 00:37:52 -06:00
|
|
|
}
|
|
|
|
|
2017-07-28 19:43:05 -07:00
|
|
|
Ok((offset, TyAndPacked { ty, packed }))
|
2016-05-25 00:37:52 -06:00
|
|
|
}
|
2017-08-10 08:48:38 -07:00
|
|
|
fn get_fat_field(
|
|
|
|
&self,
|
|
|
|
pointee_ty: Ty<'tcx>,
|
|
|
|
field_index: usize,
|
|
|
|
) -> EvalResult<'tcx, Ty<'tcx>> {
|
2017-02-03 15:47:23 +01:00
|
|
|
match (field_index, &self.tcx.struct_tail(pointee_ty).sty) {
|
|
|
|
(1, &ty::TyStr) |
|
|
|
|
(1, &ty::TySlice(_)) => Ok(self.tcx.types.usize),
|
|
|
|
(1, &ty::TyDynamic(..)) |
|
|
|
|
(0, _) => Ok(self.tcx.mk_imm_ptr(self.tcx.types.u8)),
|
|
|
|
_ => bug!("invalid fat pointee type: {}", pointee_ty),
|
|
|
|
}
|
|
|
|
}
|
2016-05-25 00:37:52 -06:00
|
|
|
|
2017-07-26 23:43:13 -07:00
|
|
|
/// Returns the field type and whether the field is packed
|
2017-08-10 08:48:38 -07:00
|
|
|
pub fn get_field_ty(
|
|
|
|
&self,
|
|
|
|
ty: Ty<'tcx>,
|
|
|
|
field_index: usize,
|
|
|
|
) -> EvalResult<'tcx, TyAndPacked<'tcx>> {
|
2016-05-25 00:37:52 -06:00
|
|
|
match ty.sty {
|
2017-08-10 08:48:38 -07:00
|
|
|
ty::TyAdt(adt_def, _) if adt_def.is_box() => Ok(TyAndPacked {
|
|
|
|
ty: self.get_fat_field(ty.boxed_ty(), field_index)?,
|
|
|
|
packed: false,
|
|
|
|
}),
|
2017-06-28 13:37:23 +02:00
|
|
|
ty::TyAdt(adt_def, substs) if adt_def.is_enum() => {
|
|
|
|
use rustc::ty::layout::Layout::*;
|
|
|
|
match *self.type_layout(ty)? {
|
2017-08-10 08:48:38 -07:00
|
|
|
RawNullablePointer { nndiscr, .. } => Ok(TyAndPacked {
|
|
|
|
ty: adt_def.variants[nndiscr as usize].fields[field_index].ty(
|
|
|
|
self.tcx,
|
|
|
|
substs,
|
|
|
|
),
|
|
|
|
packed: false,
|
|
|
|
}),
|
|
|
|
StructWrappedNullablePointer {
|
|
|
|
nndiscr,
|
|
|
|
ref nonnull,
|
|
|
|
..
|
|
|
|
} => {
|
|
|
|
let ty = adt_def.variants[nndiscr as usize].fields[field_index].ty(
|
|
|
|
self.tcx,
|
|
|
|
substs,
|
|
|
|
);
|
|
|
|
Ok(TyAndPacked {
|
|
|
|
ty,
|
|
|
|
packed: nonnull.packed,
|
|
|
|
})
|
|
|
|
}
|
2017-08-29 12:24:23 +02:00
|
|
|
// mir optimizations treat single variant enums as structs
|
|
|
|
General { .. } if adt_def.variants.len() == 1 => Ok(TyAndPacked {
|
|
|
|
ty: adt_def.variants[0].fields[field_index].ty(self.tcx, substs),
|
|
|
|
packed: false,
|
|
|
|
}),
|
2017-08-10 08:48:38 -07:00
|
|
|
_ => {
|
|
|
|
err!(Unimplemented(format!(
|
|
|
|
"get_field_ty can't handle enum type: {:?}, {:?}",
|
|
|
|
ty,
|
|
|
|
ty.sty
|
|
|
|
)))
|
|
|
|
}
|
2017-06-28 13:37:23 +02:00
|
|
|
}
|
|
|
|
}
|
2016-09-10 20:59:23 -06:00
|
|
|
ty::TyAdt(adt_def, substs) => {
|
2017-07-26 23:43:13 -07:00
|
|
|
let variant_def = adt_def.struct_variant();
|
|
|
|
use rustc::ty::layout::Layout::*;
|
|
|
|
match *self.type_layout(ty)? {
|
2017-08-10 08:48:38 -07:00
|
|
|
UntaggedUnion { ref variants } => Ok(TyAndPacked {
|
|
|
|
ty: variant_def.fields[field_index].ty(self.tcx, substs),
|
|
|
|
packed: variants.packed,
|
|
|
|
}),
|
|
|
|
Univariant { ref variant, .. } => Ok(TyAndPacked {
|
|
|
|
ty: variant_def.fields[field_index].ty(self.tcx, substs),
|
|
|
|
packed: variant.packed,
|
|
|
|
}),
|
|
|
|
_ => {
|
|
|
|
err!(Unimplemented(format!(
|
|
|
|
"get_field_ty can't handle struct type: {:?}, {:?}",
|
|
|
|
ty,
|
|
|
|
ty.sty
|
|
|
|
)))
|
|
|
|
}
|
2017-07-26 23:43:13 -07:00
|
|
|
}
|
2016-05-25 00:37:52 -06:00
|
|
|
}
|
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
ty::TyTuple(fields, _) => Ok(TyAndPacked {
|
|
|
|
ty: fields[field_index],
|
|
|
|
packed: false,
|
|
|
|
}),
|
2016-09-28 18:22:53 +02:00
|
|
|
|
2017-02-03 15:47:23 +01:00
|
|
|
ty::TyRef(_, ref tam) |
|
2017-08-10 08:48:38 -07:00
|
|
|
ty::TyRawPtr(ref tam) => Ok(TyAndPacked {
|
|
|
|
ty: self.get_fat_field(tam.ty, field_index)?,
|
|
|
|
packed: false,
|
|
|
|
}),
|
2017-06-28 13:37:23 +02:00
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
ty::TyArray(ref inner, _) => Ok(TyAndPacked {
|
|
|
|
ty: inner,
|
|
|
|
packed: false,
|
|
|
|
}),
|
2017-06-28 13:37:23 +02:00
|
|
|
|
2017-08-19 16:38:36 +02:00
|
|
|
ty::TyClosure(def_id, ref closure_substs) => Ok(TyAndPacked {
|
|
|
|
ty: closure_substs.upvar_tys(def_id, self.tcx).nth(field_index).unwrap(),
|
|
|
|
packed: false,
|
|
|
|
}),
|
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
_ => {
|
|
|
|
err!(Unimplemented(
|
|
|
|
format!("can't handle type: {:?}, {:?}", ty, ty.sty),
|
|
|
|
))
|
|
|
|
}
|
2016-05-25 00:37:52 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-06-14 10:34:54 +02:00
|
|
|
fn get_field_offset(&self, ty: Ty<'tcx>, field_index: usize) -> EvalResult<'tcx, Size> {
|
2017-07-26 23:43:13 -07:00
|
|
|
// Also see lvalue_field in lvalue.rs, which handles more cases but needs an actual value at the given type
|
2016-11-17 17:23:40 +01:00
|
|
|
let layout = self.type_layout(ty)?;
|
2016-05-25 00:37:52 -06:00
|
|
|
|
|
|
|
use rustc::ty::layout::Layout::*;
|
|
|
|
match *layout {
|
2017-08-10 08:48:38 -07:00
|
|
|
Univariant { ref variant, .. } => Ok(variant.offsets[field_index]),
|
2016-05-25 00:37:52 -06:00
|
|
|
FatPointer { .. } => {
|
2016-11-18 12:55:14 +01:00
|
|
|
let bytes = field_index as u64 * self.memory.pointer_size();
|
|
|
|
Ok(Size::from_bytes(bytes))
|
2016-05-25 00:37:52 -06:00
|
|
|
}
|
2017-08-10 08:48:38 -07:00
|
|
|
StructWrappedNullablePointer { ref nonnull, .. } => Ok(nonnull.offsets[field_index]),
|
2017-07-31 17:14:49 +02:00
|
|
|
UntaggedUnion { .. } => Ok(Size::from_bytes(0)),
|
2017-08-29 12:24:23 +02:00
|
|
|
// mir optimizations treat single variant enums as structs
|
|
|
|
General { ref variants, .. } if variants.len() == 1 => Ok(variants[0].offsets[field_index]),
|
2016-10-16 02:12:46 -06:00
|
|
|
_ => {
|
2017-08-10 08:48:38 -07:00
|
|
|
let msg = format!(
|
|
|
|
"get_field_offset: can't handle type: {:?}, with layout: {:?}",
|
|
|
|
ty,
|
|
|
|
layout
|
|
|
|
);
|
2017-08-02 16:59:01 +02:00
|
|
|
err!(Unimplemented(msg))
|
2016-10-16 02:12:46 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-06-28 13:37:23 +02:00
|
|
|
pub fn get_field_count(&self, ty: Ty<'tcx>) -> EvalResult<'tcx, u64> {
|
2016-11-17 17:23:40 +01:00
|
|
|
let layout = self.type_layout(ty)?;
|
2016-10-16 02:12:46 -06:00
|
|
|
|
|
|
|
use rustc::ty::layout::Layout::*;
|
|
|
|
match *layout {
|
2017-06-28 13:37:23 +02:00
|
|
|
Univariant { ref variant, .. } => Ok(variant.offsets.len() as u64),
|
2016-10-16 02:12:46 -06:00
|
|
|
FatPointer { .. } => Ok(2),
|
2017-06-28 13:37:23 +02:00
|
|
|
StructWrappedNullablePointer { ref nonnull, .. } => Ok(nonnull.offsets.len() as u64),
|
2017-08-10 08:48:38 -07:00
|
|
|
Vector { count, .. } |
|
2017-06-28 13:37:23 +02:00
|
|
|
Array { count, .. } => Ok(count),
|
|
|
|
Scalar { .. } => Ok(0),
|
2017-07-31 17:14:49 +02:00
|
|
|
UntaggedUnion { .. } => Ok(1),
|
2016-10-16 02:12:46 -06:00
|
|
|
_ => {
|
2017-08-10 08:48:38 -07:00
|
|
|
let msg = format!(
|
|
|
|
"get_field_count: can't handle type: {:?}, with layout: {:?}",
|
|
|
|
ty,
|
|
|
|
layout
|
|
|
|
);
|
2017-08-02 16:59:01 +02:00
|
|
|
err!(Unimplemented(msg))
|
2016-10-16 02:12:46 -06:00
|
|
|
}
|
2016-05-25 00:37:52 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
pub(super) fn eval_operand_to_primval(
|
|
|
|
&mut self,
|
|
|
|
op: &mir::Operand<'tcx>,
|
|
|
|
) -> EvalResult<'tcx, PrimVal> {
|
2017-08-24 14:41:49 +02:00
|
|
|
let valty = self.eval_operand(op)?;
|
|
|
|
self.value_to_primval(valty)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub(crate) fn operands_to_args(
|
|
|
|
&mut self,
|
|
|
|
ops: &[mir::Operand<'tcx>],
|
|
|
|
) -> EvalResult<'tcx, Vec<ValTy<'tcx>>> {
|
|
|
|
ops.into_iter()
|
|
|
|
.map(|op| self.eval_operand(op))
|
|
|
|
.collect()
|
2016-09-19 02:19:31 -06:00
|
|
|
}
|
|
|
|
|
2017-08-24 14:41:49 +02:00
|
|
|
pub fn eval_operand(&mut self, op: &mir::Operand<'tcx>) -> EvalResult<'tcx, ValTy<'tcx>> {
|
2016-11-03 10:38:08 +01:00
|
|
|
use rustc::mir::Operand::*;
|
2015-11-12 15:50:58 -06:00
|
|
|
match *op {
|
2017-08-24 14:41:49 +02:00
|
|
|
Consume(ref lvalue) => {
|
|
|
|
Ok(ValTy {
|
|
|
|
value: self.eval_and_read_lvalue(lvalue)?,
|
|
|
|
ty: self.operand_ty(op),
|
|
|
|
})
|
|
|
|
},
|
2016-09-19 02:19:31 -06:00
|
|
|
|
2017-05-13 07:08:30 -04:00
|
|
|
Constant(ref constant) => {
|
2016-11-03 10:38:08 +01:00
|
|
|
use rustc::mir::Literal;
|
2017-05-13 07:08:30 -04:00
|
|
|
let mir::Constant { ref literal, .. } = **constant;
|
2016-09-19 02:19:31 -06:00
|
|
|
let value = match *literal {
|
2017-09-13 13:46:54 +02:00
|
|
|
Literal::Value { ref value } => self.const_to_value(&value.val)?,
|
2016-09-19 02:19:31 -06:00
|
|
|
|
|
|
|
Literal::Promoted { index } => {
|
2016-10-21 11:39:39 +02:00
|
|
|
let cid = GlobalId {
|
2017-03-21 13:53:55 +01:00
|
|
|
instance: self.frame().instance,
|
2016-10-21 11:54:38 +02:00
|
|
|
promoted: Some(index),
|
2016-06-03 17:41:36 +02:00
|
|
|
};
|
2017-08-08 15:53:07 +02:00
|
|
|
Value::ByRef(*self.globals.get(&cid).expect("promoted not cached"))
|
2016-09-19 02:19:31 -06:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2017-08-24 14:41:49 +02:00
|
|
|
Ok(ValTy {
|
|
|
|
value,
|
|
|
|
ty: self.operand_ty(op),
|
|
|
|
})
|
2015-11-12 15:50:58 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2015-11-12 17:44:29 -06:00
|
|
|
|
2017-08-25 16:20:13 +02:00
|
|
|
pub fn read_discriminant_value(
|
|
|
|
&self,
|
|
|
|
adt_ptr: MemoryPointer,
|
|
|
|
adt_ty: Ty<'tcx>,
|
|
|
|
) -> EvalResult<'tcx, u128> {
|
|
|
|
use rustc::ty::layout::Layout::*;
|
|
|
|
let adt_layout = self.type_layout(adt_ty)?;
|
|
|
|
//trace!("read_discriminant_value {:#?}", adt_layout);
|
|
|
|
|
|
|
|
let discr_val = match *adt_layout {
|
|
|
|
General { discr, .. } => {
|
|
|
|
let discr_size = discr.size().bytes();
|
|
|
|
self.memory.read_primval(adt_ptr, discr_size, false)?.to_bytes()?
|
|
|
|
}
|
|
|
|
|
|
|
|
CEnum {
|
|
|
|
discr,
|
|
|
|
signed,
|
|
|
|
..
|
|
|
|
} => {
|
|
|
|
let discr_size = discr.size().bytes();
|
|
|
|
self.memory.read_primval(adt_ptr, discr_size, signed)?.to_bytes()?
|
|
|
|
}
|
|
|
|
|
|
|
|
RawNullablePointer { nndiscr, value } => {
|
|
|
|
let discr_size = value.size(&self.tcx.data_layout).bytes();
|
|
|
|
trace!("rawnullablepointer with size {}", discr_size);
|
|
|
|
self.read_nonnull_discriminant_value(
|
|
|
|
adt_ptr,
|
|
|
|
nndiscr as u128,
|
|
|
|
discr_size,
|
|
|
|
)?
|
|
|
|
}
|
|
|
|
|
|
|
|
StructWrappedNullablePointer {
|
|
|
|
nndiscr,
|
|
|
|
ref discrfield_source,
|
|
|
|
..
|
|
|
|
} => {
|
|
|
|
let (offset, TyAndPacked { ty, packed }) = self.nonnull_offset_and_ty(
|
|
|
|
adt_ty,
|
|
|
|
nndiscr,
|
|
|
|
discrfield_source,
|
|
|
|
)?;
|
|
|
|
let nonnull = adt_ptr.offset(offset.bytes(), &*self)?;
|
|
|
|
trace!("struct wrapped nullable pointer type: {}", ty);
|
|
|
|
// only the pointer part of a fat pointer is used for this space optimization
|
|
|
|
let discr_size = self.type_size(ty)?.expect(
|
|
|
|
"bad StructWrappedNullablePointer discrfield",
|
|
|
|
);
|
|
|
|
self.read_maybe_aligned(!packed, |ectx| {
|
|
|
|
ectx.read_nonnull_discriminant_value(nonnull, nndiscr as u128, discr_size)
|
|
|
|
})?
|
|
|
|
}
|
|
|
|
|
|
|
|
// The discriminant_value intrinsic returns 0 for non-sum types.
|
|
|
|
Array { .. } |
|
|
|
|
FatPointer { .. } |
|
|
|
|
Scalar { .. } |
|
|
|
|
Univariant { .. } |
|
|
|
|
Vector { .. } |
|
|
|
|
UntaggedUnion { .. } => 0,
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok(discr_val)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn read_nonnull_discriminant_value(
|
|
|
|
&self,
|
|
|
|
ptr: MemoryPointer,
|
|
|
|
nndiscr: u128,
|
|
|
|
discr_size: u64,
|
|
|
|
) -> EvalResult<'tcx, u128> {
|
|
|
|
trace!(
|
|
|
|
"read_nonnull_discriminant_value: {:?}, {}, {}",
|
|
|
|
ptr,
|
|
|
|
nndiscr,
|
|
|
|
discr_size
|
|
|
|
);
|
|
|
|
// We are only interested in 0 vs. non-0, the sign does not matter for this
|
|
|
|
let null = match self.memory.read_primval(ptr, discr_size, false)? {
|
|
|
|
PrimVal::Bytes(0) => true,
|
|
|
|
PrimVal::Bytes(_) |
|
|
|
|
PrimVal::Ptr(..) => false,
|
|
|
|
PrimVal::Undef => return err!(ReadUndefBytes),
|
|
|
|
};
|
|
|
|
assert!(nndiscr == 0 || nndiscr == 1);
|
|
|
|
Ok(if !null { nndiscr } else { 1 - nndiscr })
|
|
|
|
}
|
|
|
|
|
2017-08-08 15:53:07 +02:00
|
|
|
pub fn read_global_as_value(&self, gid: GlobalId) -> Value {
|
|
|
|
Value::ByRef(*self.globals.get(&gid).expect("global not cached"))
|
|
|
|
}
|
|
|
|
|
2017-07-21 17:25:30 +02:00
|
|
|
pub fn operand_ty(&self, operand: &mir::Operand<'tcx>) -> Ty<'tcx> {
|
2017-05-05 10:34:38 +02:00
|
|
|
self.monomorphize(operand.ty(self.mir(), self.tcx), self.substs())
|
2016-04-07 03:02:02 -06:00
|
|
|
}
|
|
|
|
|
2017-07-04 14:33:15 +02:00
|
|
|
fn copy(&mut self, src: Pointer, dest: Pointer, ty: Ty<'tcx>) -> EvalResult<'tcx> {
|
2017-08-10 08:48:38 -07:00
|
|
|
let size = self.type_size(ty)?.expect(
|
|
|
|
"cannot copy from an unsized type",
|
|
|
|
);
|
2016-11-17 17:23:40 +01:00
|
|
|
let align = self.type_align(ty)?;
|
2017-07-03 20:27:09 -07:00
|
|
|
self.memory.copy(src, dest, size, align, false)?;
|
2016-04-07 05:56:07 -06:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2017-08-08 15:53:07 +02:00
|
|
|
pub fn is_packed(&self, ty: Ty<'tcx>) -> EvalResult<'tcx, bool> {
|
|
|
|
let layout = self.type_layout(ty)?;
|
|
|
|
use rustc::ty::layout::Layout::*;
|
|
|
|
Ok(match *layout {
|
|
|
|
Univariant { ref variant, .. } => variant.packed,
|
|
|
|
|
|
|
|
StructWrappedNullablePointer { ref nonnull, .. } => nonnull.packed,
|
|
|
|
|
|
|
|
UntaggedUnion { ref variants } => variants.packed,
|
|
|
|
|
|
|
|
// can only apply #[repr(packed)] to struct and union
|
|
|
|
_ => false,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
pub fn force_allocation(&mut self, lvalue: Lvalue) -> EvalResult<'tcx, Lvalue> {
|
2016-10-15 19:48:30 -06:00
|
|
|
let new_lvalue = match lvalue {
|
2017-06-27 13:36:41 +02:00
|
|
|
Lvalue::Local { frame, local } => {
|
2017-02-10 16:14:59 +01:00
|
|
|
// -1 since we don't store the return value
|
|
|
|
match self.stack[frame].locals[local.index() - 1] {
|
2017-08-02 16:59:01 +02:00
|
|
|
None => return err!(DeadLocal),
|
2017-08-08 15:53:07 +02:00
|
|
|
Some(Value::ByRef(ptr)) => {
|
2017-08-10 08:48:38 -07:00
|
|
|
Lvalue::Ptr {
|
|
|
|
ptr,
|
|
|
|
extra: LvalueExtra::None,
|
|
|
|
}
|
|
|
|
}
|
2017-05-31 17:41:33 -07:00
|
|
|
Some(val) => {
|
2016-10-15 19:48:30 -06:00
|
|
|
let ty = self.stack[frame].mir.local_decls[local].ty;
|
2017-03-21 13:53:55 +01:00
|
|
|
let ty = self.monomorphize(ty, self.stack[frame].instance.substs);
|
|
|
|
let substs = self.stack[frame].instance.substs;
|
2016-10-16 00:12:11 -06:00
|
|
|
let ptr = self.alloc_ptr_with_substs(ty, substs)?;
|
2017-08-10 08:48:38 -07:00
|
|
|
self.stack[frame].locals[local.index() - 1] =
|
|
|
|
Some(Value::by_ref(ptr.into())); // it stays live
|
2017-07-04 14:33:15 +02:00
|
|
|
self.write_value_to_ptr(val, ptr.into(), ty)?;
|
2017-06-27 13:36:41 +02:00
|
|
|
Lvalue::from_ptr(ptr)
|
2016-10-15 19:48:30 -06:00
|
|
|
}
|
2016-10-21 10:32:27 +02:00
|
|
|
}
|
2016-10-15 19:48:30 -06:00
|
|
|
}
|
|
|
|
Lvalue::Ptr { .. } => lvalue,
|
|
|
|
};
|
|
|
|
Ok(new_lvalue)
|
|
|
|
}
|
|
|
|
|
2016-11-04 15:49:51 +01:00
|
|
|
/// ensures this Value is not a ByRef
|
2017-08-10 08:48:38 -07:00
|
|
|
pub(super) fn follow_by_ref_value(
|
2017-09-13 12:27:13 +02:00
|
|
|
&self,
|
2017-08-10 08:48:38 -07:00
|
|
|
value: Value,
|
|
|
|
ty: Ty<'tcx>,
|
|
|
|
) -> EvalResult<'tcx, Value> {
|
2016-09-19 02:19:31 -06:00
|
|
|
match value {
|
2017-08-08 15:53:07 +02:00
|
|
|
Value::ByRef(PtrAndAlign { ptr, aligned }) => {
|
2017-07-13 10:29:11 -07:00
|
|
|
self.read_maybe_aligned(aligned, |ectx| ectx.read_value(ptr, ty))
|
2017-07-12 21:06:57 -07:00
|
|
|
}
|
2016-11-04 15:49:51 +01:00
|
|
|
other => Ok(other),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-24 14:41:49 +02:00
|
|
|
pub fn value_to_primval(
|
2017-09-13 12:27:13 +02:00
|
|
|
&self,
|
2017-08-24 14:41:49 +02:00
|
|
|
ValTy { value, ty } : ValTy<'tcx>,
|
|
|
|
) -> EvalResult<'tcx, PrimVal> {
|
2016-11-04 15:49:51 +01:00
|
|
|
match self.follow_by_ref_value(value, ty)? {
|
2017-08-10 08:48:38 -07:00
|
|
|
Value::ByRef { .. } => bug!("follow_by_ref_value can't result in `ByRef`"),
|
2016-09-19 02:19:31 -06:00
|
|
|
|
2016-10-15 19:48:30 -06:00
|
|
|
Value::ByVal(primval) => {
|
2016-11-26 23:42:17 -08:00
|
|
|
self.ensure_valid_value(primval, ty)?;
|
|
|
|
Ok(primval)
|
2016-10-15 19:48:30 -06:00
|
|
|
}
|
|
|
|
|
2016-09-26 17:49:30 +02:00
|
|
|
Value::ByValPair(..) => bug!("value_to_primval can't work with fat pointers"),
|
2016-09-19 02:19:31 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
pub fn write_null(&mut self, dest: Lvalue, dest_ty: Ty<'tcx>) -> EvalResult<'tcx> {
|
2017-07-04 14:26:27 +02:00
|
|
|
self.write_primval(dest, PrimVal::Bytes(0), dest_ty)
|
|
|
|
}
|
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
pub fn write_ptr(&mut self, dest: Lvalue, val: Pointer, dest_ty: Ty<'tcx>) -> EvalResult<'tcx> {
|
2017-08-24 14:41:49 +02:00
|
|
|
let valty = ValTy {
|
|
|
|
value: val.to_value(),
|
|
|
|
ty: dest_ty,
|
|
|
|
};
|
|
|
|
self.write_value(valty, dest)
|
2017-07-04 14:33:15 +02:00
|
|
|
}
|
|
|
|
|
2017-07-21 17:25:30 +02:00
|
|
|
pub fn write_primval(
|
2016-10-14 03:31:45 -06:00
|
|
|
&mut self,
|
2017-08-08 14:22:11 +02:00
|
|
|
dest: Lvalue,
|
2016-10-14 03:31:45 -06:00
|
|
|
val: PrimVal,
|
2016-11-26 22:58:01 -08:00
|
|
|
dest_ty: Ty<'tcx>,
|
2017-02-04 13:09:10 -08:00
|
|
|
) -> EvalResult<'tcx> {
|
2017-08-24 14:41:49 +02:00
|
|
|
let valty = ValTy {
|
|
|
|
value: Value::ByVal(val),
|
|
|
|
ty: dest_ty,
|
|
|
|
};
|
|
|
|
self.write_value(valty, dest)
|
2016-10-14 03:31:45 -06:00
|
|
|
}
|
|
|
|
|
2017-07-21 17:25:30 +02:00
|
|
|
pub fn write_value(
|
2016-10-14 03:31:45 -06:00
|
|
|
&mut self,
|
2017-08-24 14:41:49 +02:00
|
|
|
ValTy { value: src_val, ty: dest_ty } : ValTy<'tcx>,
|
2017-08-08 14:22:11 +02:00
|
|
|
dest: Lvalue,
|
2017-02-04 13:09:10 -08:00
|
|
|
) -> EvalResult<'tcx> {
|
2017-07-19 20:24:09 -07:00
|
|
|
//trace!("Writing {:?} to {:?} at type {:?}", src_val, dest, dest_ty);
|
2017-07-03 13:57:18 -07:00
|
|
|
// Note that it is really important that the type here is the right one, and matches the type things are read at.
|
|
|
|
// In case `src_val` is a `ByValPair`, we don't do any magic here to handle padding properly, which is only
|
|
|
|
// correct if we never look at this data with the wrong type.
|
|
|
|
|
2016-10-15 19:48:30 -06:00
|
|
|
match dest {
|
2017-08-10 08:48:38 -07:00
|
|
|
Lvalue::Ptr {
|
|
|
|
ptr: PtrAndAlign { ptr, aligned },
|
|
|
|
extra,
|
|
|
|
} => {
|
2016-10-15 19:48:30 -06:00
|
|
|
assert_eq!(extra, LvalueExtra::None);
|
2017-08-10 08:48:38 -07:00
|
|
|
self.write_maybe_aligned_mut(
|
|
|
|
aligned,
|
|
|
|
|ectx| ectx.write_value_to_ptr(src_val, ptr, dest_ty),
|
|
|
|
)
|
2016-10-15 19:48:30 -06:00
|
|
|
}
|
Fix write_value of ByVal into a ByRef.
Previously, you could perform the following, if you assume we could make
`Cell<i32>` into a primitive. (Alternately, you could achieve this with
unsafe code):
x = Cell::new(12);
y = &x;
// Miri locals array:
// x = ByRef(alloc123);
// y = ByVal(Ptr(alloc123));
//
// Miri allocations:
// alloc123: [12, 0, 0, 0]
x.set(42);
// Miri locals array:
// x = ByVal(I32(42));
// y = ByVal(Ptr(alloc123));
//
// Miri allocations:
// alloc123: [12, 0, 0, 0]
Notice how `y` still refers to the allocation that used to represent
`x`. But now `x` was changed to `42` and `y` is still looking at memory
containing `12`.
Now, instead, we keep `x` as a `ByRef` and write the `42` constant into
it.
Unit test to follow in the next commit.
2016-10-18 21:02:37 -06:00
|
|
|
|
2017-06-27 13:36:41 +02:00
|
|
|
Lvalue::Local { frame, local } => {
|
|
|
|
let dest = self.stack[frame].get_local(local)?;
|
2016-10-21 11:24:10 +02:00
|
|
|
self.write_value_possibly_by_val(
|
2016-10-21 10:32:27 +02:00
|
|
|
src_val,
|
2017-06-27 13:36:41 +02:00
|
|
|
|this, val| this.stack[frame].set_local(local, val),
|
2016-10-21 10:32:27 +02:00
|
|
|
dest,
|
|
|
|
dest_ty,
|
|
|
|
)
|
2016-10-15 19:48:30 -06:00
|
|
|
}
|
|
|
|
}
|
2016-10-21 10:32:27 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// The cases here can be a bit subtle. Read carefully!
|
2017-05-31 17:41:33 -07:00
|
|
|
fn write_value_possibly_by_val<F: FnOnce(&mut Self, Value) -> EvalResult<'tcx>>(
|
2016-10-21 10:32:27 +02:00
|
|
|
&mut self,
|
|
|
|
src_val: Value,
|
|
|
|
write_dest: F,
|
2016-12-18 20:59:01 -08:00
|
|
|
old_dest_val: Value,
|
2016-10-21 10:32:27 +02:00
|
|
|
dest_ty: Ty<'tcx>,
|
2017-02-04 13:09:10 -08:00
|
|
|
) -> EvalResult<'tcx> {
|
2017-08-10 08:48:38 -07:00
|
|
|
if let Value::ByRef(PtrAndAlign {
|
|
|
|
ptr: dest_ptr,
|
|
|
|
aligned,
|
|
|
|
}) = old_dest_val
|
|
|
|
{
|
2016-10-21 12:03:34 +02:00
|
|
|
// If the value is already `ByRef` (that is, backed by an `Allocation`),
|
2016-10-21 10:32:27 +02:00
|
|
|
// then we must write the new value into this allocation, because there may be
|
|
|
|
// other pointers into the allocation. These other pointers are logically
|
|
|
|
// pointers into the local variable, and must be able to observe the change.
|
|
|
|
//
|
|
|
|
// Thus, it would be an error to replace the `ByRef` with a `ByVal`, unless we
|
2016-10-21 12:03:34 +02:00
|
|
|
// knew for certain that there were no outstanding pointers to this allocation.
|
2017-08-10 08:48:38 -07:00
|
|
|
self.write_maybe_aligned_mut(aligned, |ectx| {
|
|
|
|
ectx.write_value_to_ptr(src_val, dest_ptr, dest_ty)
|
|
|
|
})?;
|
2016-10-21 10:32:27 +02:00
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
} else if let Value::ByRef(PtrAndAlign {
|
|
|
|
ptr: src_ptr,
|
|
|
|
aligned,
|
|
|
|
}) = src_val
|
|
|
|
{
|
2016-10-21 12:03:34 +02:00
|
|
|
// If the value is not `ByRef`, then we know there are no pointers to it
|
2016-10-21 10:32:27 +02:00
|
|
|
// and we can simply overwrite the `Value` in the locals array directly.
|
|
|
|
//
|
|
|
|
// In this specific case, where the source value is `ByRef`, we must duplicate
|
|
|
|
// the allocation, because this is a by-value operation. It would be incorrect
|
|
|
|
// if they referred to the same allocation, since then a change to one would
|
|
|
|
// implicitly change the other.
|
|
|
|
//
|
2016-11-28 20:22:21 -08:00
|
|
|
// It is a valid optimization to attempt reading a primitive value out of the
|
|
|
|
// source and write that into the destination without making an allocation, so
|
|
|
|
// we do so here.
|
2017-07-27 09:14:04 -07:00
|
|
|
self.read_maybe_aligned_mut(aligned, |ectx| {
|
2017-07-13 10:29:11 -07:00
|
|
|
if let Ok(Some(src_val)) = ectx.try_read_value(src_ptr, dest_ty) {
|
|
|
|
write_dest(ectx, src_val)?;
|
|
|
|
} else {
|
|
|
|
let dest_ptr = ectx.alloc_ptr(dest_ty)?.into();
|
|
|
|
ectx.copy(src_ptr, dest_ptr, dest_ty)?;
|
|
|
|
write_dest(ectx, Value::by_ref(dest_ptr))?;
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
})?;
|
2016-10-21 10:32:27 +02:00
|
|
|
|
|
|
|
} else {
|
|
|
|
// Finally, we have the simple case where neither source nor destination are
|
2016-10-21 12:03:34 +02:00
|
|
|
// `ByRef`. We may simply copy the source value over the the destintion.
|
2017-05-31 17:41:33 -07:00
|
|
|
write_dest(self, src_val)?;
|
2016-10-21 10:32:27 +02:00
|
|
|
}
|
2016-10-15 19:48:30 -06:00
|
|
|
Ok(())
|
2016-10-14 03:31:45 -06:00
|
|
|
}
|
|
|
|
|
2017-07-28 13:08:27 +02:00
|
|
|
pub fn write_value_to_ptr(
|
2016-09-19 03:35:35 -06:00
|
|
|
&mut self,
|
|
|
|
value: Value,
|
2017-07-04 14:33:15 +02:00
|
|
|
dest: Pointer,
|
2016-10-14 03:31:45 -06:00
|
|
|
dest_ty: Ty<'tcx>,
|
2017-02-04 13:09:10 -08:00
|
|
|
) -> EvalResult<'tcx> {
|
2016-09-19 02:19:31 -06:00
|
|
|
match value {
|
2017-08-08 15:53:07 +02:00
|
|
|
Value::ByRef(PtrAndAlign { ptr, aligned }) => {
|
2017-07-27 09:14:04 -07:00
|
|
|
self.read_maybe_aligned_mut(aligned, |ectx| ectx.copy(ptr, dest, dest_ty))
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2016-11-26 22:58:01 -08:00
|
|
|
Value::ByVal(primval) => {
|
2016-12-18 20:59:01 -08:00
|
|
|
let size = self.type_size(dest_ty)?.expect("dest type must be sized");
|
2017-08-28 16:06:49 +02:00
|
|
|
if size == 0 {
|
|
|
|
assert!(primval.is_undef());
|
|
|
|
Ok(())
|
|
|
|
} else {
|
|
|
|
// TODO: Do we need signedness?
|
|
|
|
self.memory.write_primval(dest.to_ptr()?, primval, size, false)
|
2017-08-25 18:25:05 +02:00
|
|
|
}
|
2016-11-26 22:58:01 -08:00
|
|
|
}
|
2017-06-20 14:26:50 +02:00
|
|
|
Value::ByValPair(a, b) => self.write_pair_to_ptr(a, b, dest.to_ptr()?, dest_ty),
|
2016-09-26 17:49:30 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-28 13:08:27 +02:00
|
|
|
pub fn write_pair_to_ptr(
|
2016-10-16 15:31:02 -06:00
|
|
|
&mut self,
|
|
|
|
a: PrimVal,
|
|
|
|
b: PrimVal,
|
2017-07-04 13:16:29 +02:00
|
|
|
ptr: MemoryPointer,
|
2017-08-10 08:48:38 -07:00
|
|
|
mut ty: Ty<'tcx>,
|
2017-02-04 13:09:10 -08:00
|
|
|
) -> EvalResult<'tcx> {
|
2017-07-26 23:43:13 -07:00
|
|
|
let mut packed = false;
|
2017-02-14 10:59:38 +01:00
|
|
|
while self.get_field_count(ty)? == 1 {
|
2017-07-26 23:43:13 -07:00
|
|
|
let field = self.get_field_ty(ty, 0)?;
|
2017-07-28 19:43:05 -07:00
|
|
|
ty = field.ty;
|
|
|
|
packed = packed || field.packed;
|
2017-02-14 10:59:38 +01:00
|
|
|
}
|
2016-10-16 15:31:02 -06:00
|
|
|
assert_eq!(self.get_field_count(ty)?, 2);
|
2017-07-26 23:43:13 -07:00
|
|
|
let field_0 = self.get_field_offset(ty, 0)?;
|
|
|
|
let field_1 = self.get_field_offset(ty, 1)?;
|
2016-11-26 22:58:01 -08:00
|
|
|
let field_0_ty = self.get_field_ty(ty, 0)?;
|
|
|
|
let field_1_ty = self.get_field_ty(ty, 1)?;
|
2017-08-10 08:48:38 -07:00
|
|
|
assert_eq!(
|
|
|
|
field_0_ty.packed,
|
|
|
|
field_1_ty.packed,
|
|
|
|
"the two fields must agree on being packed"
|
|
|
|
);
|
2017-07-28 19:43:05 -07:00
|
|
|
packed = packed || field_0_ty.packed;
|
2017-08-10 08:48:38 -07:00
|
|
|
let field_0_size = self.type_size(field_0_ty.ty)?.expect(
|
|
|
|
"pair element type must be sized",
|
|
|
|
);
|
|
|
|
let field_1_size = self.type_size(field_1_ty.ty)?.expect(
|
|
|
|
"pair element type must be sized",
|
|
|
|
);
|
2017-07-26 23:43:13 -07:00
|
|
|
let field_0_ptr = ptr.offset(field_0.bytes(), &self)?.into();
|
|
|
|
let field_1_ptr = ptr.offset(field_1.bytes(), &self)?.into();
|
2017-08-25 18:25:05 +02:00
|
|
|
// TODO: What about signedess?
|
2017-08-10 08:48:38 -07:00
|
|
|
self.write_maybe_aligned_mut(!packed, |ectx| {
|
2017-08-25 18:25:05 +02:00
|
|
|
ectx.memory.write_primval(field_0_ptr, a, field_0_size, false)
|
2017-08-10 08:48:38 -07:00
|
|
|
})?;
|
|
|
|
self.write_maybe_aligned_mut(!packed, |ectx| {
|
2017-08-25 18:25:05 +02:00
|
|
|
ectx.memory.write_primval(field_1_ptr, b, field_1_size, false)
|
2017-08-10 08:48:38 -07:00
|
|
|
})?;
|
2016-10-16 15:31:02 -06:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2017-01-11 10:04:17 +01:00
|
|
|
pub fn ty_to_primval_kind(&self, ty: Ty<'tcx>) -> EvalResult<'tcx, PrimValKind> {
|
2016-10-21 03:17:53 -06:00
|
|
|
use syntax::ast::FloatTy;
|
|
|
|
|
|
|
|
let kind = match ty.sty {
|
|
|
|
ty::TyBool => PrimValKind::Bool,
|
|
|
|
ty::TyChar => PrimValKind::Char,
|
|
|
|
|
|
|
|
ty::TyInt(int_ty) => {
|
|
|
|
use syntax::ast::IntTy::*;
|
|
|
|
let size = match int_ty {
|
|
|
|
I8 => 1,
|
|
|
|
I16 => 2,
|
|
|
|
I32 => 4,
|
|
|
|
I64 => 8,
|
2017-01-12 08:28:42 +01:00
|
|
|
I128 => 16,
|
2016-10-21 03:17:53 -06:00
|
|
|
Is => self.memory.pointer_size(),
|
|
|
|
};
|
|
|
|
PrimValKind::from_int_size(size)
|
|
|
|
}
|
|
|
|
|
|
|
|
ty::TyUint(uint_ty) => {
|
|
|
|
use syntax::ast::UintTy::*;
|
|
|
|
let size = match uint_ty {
|
|
|
|
U8 => 1,
|
|
|
|
U16 => 2,
|
|
|
|
U32 => 4,
|
|
|
|
U64 => 8,
|
2017-01-12 08:28:42 +01:00
|
|
|
U128 => 16,
|
2016-10-21 03:17:53 -06:00
|
|
|
Us => self.memory.pointer_size(),
|
|
|
|
};
|
|
|
|
PrimValKind::from_uint_size(size)
|
|
|
|
}
|
|
|
|
|
|
|
|
ty::TyFloat(FloatTy::F32) => PrimValKind::F32,
|
|
|
|
ty::TyFloat(FloatTy::F64) => PrimValKind::F64,
|
|
|
|
|
|
|
|
ty::TyFnPtr(_) => PrimValKind::FnPtr,
|
|
|
|
|
2017-02-03 15:47:23 +01:00
|
|
|
ty::TyRef(_, ref tam) |
|
|
|
|
ty::TyRawPtr(ref tam) if self.type_is_sized(tam.ty) => PrimValKind::Ptr,
|
|
|
|
|
2017-04-26 12:15:42 +02:00
|
|
|
ty::TyAdt(def, _) if def.is_box() => PrimValKind::Ptr,
|
2016-10-21 03:17:53 -06:00
|
|
|
|
2017-04-26 12:15:42 +02:00
|
|
|
ty::TyAdt(def, substs) => {
|
2016-10-21 03:17:53 -06:00
|
|
|
use rustc::ty::layout::Layout::*;
|
2016-11-26 23:20:15 -08:00
|
|
|
match *self.type_layout(ty)? {
|
|
|
|
CEnum { discr, signed, .. } => {
|
|
|
|
let size = discr.size().bytes();
|
|
|
|
if signed {
|
|
|
|
PrimValKind::from_int_size(size)
|
|
|
|
} else {
|
|
|
|
PrimValKind::from_uint_size(size)
|
|
|
|
}
|
2016-10-21 03:17:53 -06:00
|
|
|
}
|
2016-11-26 23:20:15 -08:00
|
|
|
|
|
|
|
RawNullablePointer { value, .. } => {
|
|
|
|
use rustc::ty::layout::Primitive::*;
|
|
|
|
match value {
|
|
|
|
// TODO(solson): Does signedness matter here? What should the sign be?
|
|
|
|
Int(int) => PrimValKind::from_uint_size(int.size().bytes()),
|
|
|
|
F32 => PrimValKind::F32,
|
|
|
|
F64 => PrimValKind::F64,
|
|
|
|
Pointer => PrimValKind::Ptr,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-02-14 10:59:38 +01:00
|
|
|
// represent single field structs as their single field
|
|
|
|
Univariant { .. } => {
|
|
|
|
// enums with just one variant are no different, but `.struct_variant()` doesn't work for enums
|
|
|
|
let variant = &def.variants[0];
|
|
|
|
// FIXME: also allow structs with only a single non zst field
|
|
|
|
if variant.fields.len() == 1 {
|
|
|
|
return self.ty_to_primval_kind(variant.fields[0].ty(self.tcx, substs));
|
|
|
|
} else {
|
2017-08-02 16:59:01 +02:00
|
|
|
return err!(TypeNotPrimitive(ty));
|
2017-02-14 10:59:38 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-02 16:59:01 +02:00
|
|
|
_ => return err!(TypeNotPrimitive(ty)),
|
2016-10-21 03:17:53 -06:00
|
|
|
}
|
2016-11-26 23:20:15 -08:00
|
|
|
}
|
2016-10-21 03:17:53 -06:00
|
|
|
|
2017-08-02 16:59:01 +02:00
|
|
|
_ => return err!(TypeNotPrimitive(ty)),
|
2016-10-21 03:17:53 -06:00
|
|
|
};
|
|
|
|
|
|
|
|
Ok(kind)
|
|
|
|
}
|
|
|
|
|
2017-02-04 13:09:10 -08:00
|
|
|
fn ensure_valid_value(&self, val: PrimVal, ty: Ty<'tcx>) -> EvalResult<'tcx> {
|
2016-10-21 03:17:53 -06:00
|
|
|
match ty.sty {
|
2017-08-02 16:59:01 +02:00
|
|
|
ty::TyBool if val.to_bytes()? > 1 => err!(InvalidBool),
|
2016-10-21 03:17:53 -06:00
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
ty::TyChar if ::std::char::from_u32(val.to_bytes()? as u32).is_none() => {
|
|
|
|
err!(InvalidChar(val.to_bytes()? as u32 as u128))
|
|
|
|
}
|
2016-10-21 03:17:53 -06:00
|
|
|
|
|
|
|
_ => Ok(()),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-28 13:08:27 +02:00
|
|
|
pub fn read_value(&self, ptr: Pointer, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
|
2016-11-28 20:22:21 -08:00
|
|
|
if let Some(val) = self.try_read_value(ptr, ty)? {
|
|
|
|
Ok(val)
|
|
|
|
} else {
|
|
|
|
bug!("primitive read failed for type: {:?}", ty);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
pub(crate) fn read_ptr(
|
|
|
|
&self,
|
|
|
|
ptr: MemoryPointer,
|
|
|
|
pointee_ty: Ty<'tcx>,
|
|
|
|
) -> EvalResult<'tcx, Value> {
|
2017-08-25 16:20:13 +02:00
|
|
|
let ptr_size = self.memory.pointer_size();
|
|
|
|
let p : Pointer = self.memory.read_ptr_sized_unsigned(ptr)?.into();
|
2017-02-03 15:47:23 +01:00
|
|
|
if self.type_is_sized(pointee_ty) {
|
2017-07-04 14:33:15 +02:00
|
|
|
Ok(p.to_value())
|
2017-02-03 15:47:23 +01:00
|
|
|
} else {
|
|
|
|
trace!("reading fat pointer extra of type {}", pointee_ty);
|
2017-08-25 16:20:13 +02:00
|
|
|
let extra = ptr.offset(ptr_size, self)?;
|
2017-07-11 12:39:12 +02:00
|
|
|
match self.tcx.struct_tail(pointee_ty).sty {
|
2017-08-10 08:48:38 -07:00
|
|
|
ty::TyDynamic(..) => Ok(p.to_value_with_vtable(
|
2017-08-25 16:20:13 +02:00
|
|
|
self.memory.read_ptr_sized_unsigned(extra)?.to_ptr()?,
|
2017-08-10 08:48:38 -07:00
|
|
|
)),
|
|
|
|
ty::TySlice(..) | ty::TyStr => Ok(
|
2017-08-25 16:20:13 +02:00
|
|
|
p.to_value_with_len(self.memory.read_ptr_sized_unsigned(extra)?.to_bytes()? as u64),
|
2017-08-10 08:48:38 -07:00
|
|
|
),
|
2017-02-03 15:47:23 +01:00
|
|
|
_ => bug!("unsized primval ptr read from {:?}", pointee_ty),
|
2017-07-11 12:39:12 +02:00
|
|
|
}
|
2017-02-03 15:47:23 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-27 09:14:04 -07:00
|
|
|
fn try_read_value(&self, ptr: Pointer, ty: Ty<'tcx>) -> EvalResult<'tcx, Option<Value>> {
|
2016-10-14 03:31:45 -06:00
|
|
|
use syntax::ast::FloatTy;
|
|
|
|
|
2017-08-25 16:20:13 +02:00
|
|
|
let ptr = ptr.to_ptr()?;
|
2016-10-18 22:31:21 -06:00
|
|
|
let val = match ty.sty {
|
2017-08-25 19:21:26 +02:00
|
|
|
ty::TyBool => {
|
|
|
|
let val = self.memory.read_primval(ptr, 1, false)?;
|
|
|
|
let val = match val {
|
|
|
|
PrimVal::Bytes(0) => false,
|
|
|
|
PrimVal::Bytes(1) => true,
|
|
|
|
_ => return err!(InvalidBool),
|
|
|
|
};
|
|
|
|
PrimVal::from_bool(val)
|
|
|
|
}
|
2016-10-18 22:31:21 -06:00
|
|
|
ty::TyChar => {
|
2017-08-25 16:20:13 +02:00
|
|
|
let c = self.memory.read_primval(ptr, 4, false)?.to_bytes()? as u32;
|
2016-06-20 12:29:45 +02:00
|
|
|
match ::std::char::from_u32(c) {
|
2016-10-20 04:42:19 -06:00
|
|
|
Some(ch) => PrimVal::from_char(ch),
|
2017-08-02 16:59:01 +02:00
|
|
|
None => return err!(InvalidChar(c as u128)),
|
2016-06-20 12:29:45 +02:00
|
|
|
}
|
|
|
|
}
|
2016-09-19 04:56:09 -06:00
|
|
|
|
2016-10-18 22:31:21 -06:00
|
|
|
ty::TyInt(int_ty) => {
|
2016-10-14 03:31:45 -06:00
|
|
|
use syntax::ast::IntTy::*;
|
|
|
|
let size = match int_ty {
|
|
|
|
I8 => 1,
|
|
|
|
I16 => 2,
|
|
|
|
I32 => 4,
|
|
|
|
I64 => 8,
|
2017-01-12 08:28:42 +01:00
|
|
|
I128 => 16,
|
2016-10-14 03:31:45 -06:00
|
|
|
Is => self.memory.pointer_size(),
|
|
|
|
};
|
2017-08-25 16:20:13 +02:00
|
|
|
self.memory.read_primval(ptr, size, true)?
|
2016-10-14 03:31:45 -06:00
|
|
|
}
|
2016-09-19 04:38:51 -06:00
|
|
|
|
2016-10-18 22:31:21 -06:00
|
|
|
ty::TyUint(uint_ty) => {
|
2016-10-14 03:31:45 -06:00
|
|
|
use syntax::ast::UintTy::*;
|
|
|
|
let size = match uint_ty {
|
|
|
|
U8 => 1,
|
|
|
|
U16 => 2,
|
|
|
|
U32 => 4,
|
|
|
|
U64 => 8,
|
2017-01-12 08:28:42 +01:00
|
|
|
U128 => 16,
|
2016-10-14 03:31:45 -06:00
|
|
|
Us => self.memory.pointer_size(),
|
|
|
|
};
|
2017-08-25 16:20:13 +02:00
|
|
|
self.memory.read_primval(ptr, size, false)?
|
2016-10-14 03:31:45 -06:00
|
|
|
}
|
2016-09-19 04:38:51 -06:00
|
|
|
|
2017-08-25 19:21:26 +02:00
|
|
|
ty::TyFloat(FloatTy::F32) => PrimVal::Bytes(self.memory.read_primval(ptr, 4, false)?.to_bytes()?),
|
|
|
|
ty::TyFloat(FloatTy::F64) => PrimVal::Bytes(self.memory.read_primval(ptr, 8, false)?.to_bytes()?),
|
2016-09-19 04:38:51 -06:00
|
|
|
|
2017-08-25 16:20:13 +02:00
|
|
|
ty::TyFnPtr(_) => self.memory.read_ptr_sized_unsigned(ptr)?,
|
2017-02-03 15:47:23 +01:00
|
|
|
ty::TyRef(_, ref tam) |
|
2017-08-25 16:20:13 +02:00
|
|
|
ty::TyRawPtr(ref tam) => return self.read_ptr(ptr, tam.ty).map(Some),
|
2016-03-18 23:03:46 -06:00
|
|
|
|
2017-02-03 15:47:23 +01:00
|
|
|
ty::TyAdt(def, _) => {
|
|
|
|
if def.is_box() {
|
2017-08-25 16:20:13 +02:00
|
|
|
return self.read_ptr(ptr, ty.boxed_ty()).map(Some);
|
2017-02-03 15:47:23 +01:00
|
|
|
}
|
2016-09-07 18:34:59 +02:00
|
|
|
use rustc::ty::layout::Layout::*;
|
2016-11-17 17:23:40 +01:00
|
|
|
if let CEnum { discr, signed, .. } = *self.type_layout(ty)? {
|
2016-11-18 12:55:14 +01:00
|
|
|
let size = discr.size().bytes();
|
2017-08-25 16:20:13 +02:00
|
|
|
self.memory.read_primval(ptr, size, signed)?
|
2016-09-07 18:34:59 +02:00
|
|
|
} else {
|
2016-11-28 20:22:21 -08:00
|
|
|
return Ok(None);
|
2016-09-07 18:34:59 +02:00
|
|
|
}
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2016-09-07 18:34:59 +02:00
|
|
|
|
2016-11-28 20:22:21 -08:00
|
|
|
_ => return Ok(None),
|
2016-03-18 23:03:46 -06:00
|
|
|
};
|
2016-10-18 22:31:21 -06:00
|
|
|
|
2016-11-28 20:22:21 -08:00
|
|
|
Ok(Some(Value::ByVal(val)))
|
2016-03-18 23:03:46 -06:00
|
|
|
}
|
|
|
|
|
2017-07-21 17:25:30 +02:00
|
|
|
pub fn frame(&self) -> &Frame<'tcx> {
|
2016-03-07 07:10:52 -06:00
|
|
|
self.stack.last().expect("no call frames exist")
|
|
|
|
}
|
2016-03-14 20:39:51 -06:00
|
|
|
|
2016-12-07 20:30:37 -08:00
|
|
|
pub(super) fn frame_mut(&mut self) -> &mut Frame<'tcx> {
|
2016-03-14 20:39:51 -06:00
|
|
|
self.stack.last_mut().expect("no call frames exist")
|
|
|
|
}
|
2015-11-12 15:50:58 -06:00
|
|
|
|
2017-05-04 17:42:43 +02:00
|
|
|
pub(super) fn mir(&self) -> &'tcx mir::Mir<'tcx> {
|
|
|
|
self.frame().mir
|
2016-03-20 22:07:25 -06:00
|
|
|
}
|
2016-06-08 11:11:08 +02:00
|
|
|
|
2016-12-07 20:30:37 -08:00
|
|
|
pub(super) fn substs(&self) -> &'tcx Substs<'tcx> {
|
2017-03-21 13:53:55 +01:00
|
|
|
self.frame().instance.substs
|
2016-06-08 11:11:08 +02:00
|
|
|
}
|
2016-03-14 21:18:39 -06:00
|
|
|
|
2017-02-03 15:47:23 +01:00
|
|
|
fn unsize_into_ptr(
|
|
|
|
&mut self,
|
|
|
|
src: Value,
|
|
|
|
src_ty: Ty<'tcx>,
|
2017-08-08 14:22:11 +02:00
|
|
|
dest: Lvalue,
|
2017-02-03 15:47:23 +01:00
|
|
|
dest_ty: Ty<'tcx>,
|
|
|
|
sty: Ty<'tcx>,
|
|
|
|
dty: Ty<'tcx>,
|
2017-02-04 13:09:10 -08:00
|
|
|
) -> EvalResult<'tcx> {
|
2017-02-03 15:47:23 +01:00
|
|
|
// A<Struct> -> A<Trait> conversion
|
|
|
|
let (src_pointee_ty, dest_pointee_ty) = self.tcx.struct_lockstep_tails(sty, dty);
|
|
|
|
|
|
|
|
match (&src_pointee_ty.sty, &dest_pointee_ty.sty) {
|
|
|
|
(&ty::TyArray(_, length), &ty::TySlice(_)) => {
|
2017-07-27 09:14:04 -07:00
|
|
|
let ptr = src.into_ptr(&self.memory)?;
|
2017-07-11 12:39:12 +02:00
|
|
|
// u64 cast is from usize to u64, which is always good
|
2017-08-24 14:41:49 +02:00
|
|
|
let valty = ValTy {
|
2017-09-13 12:58:25 +02:00
|
|
|
value: ptr.to_value_with_len(length.val.to_const_int().unwrap().to_u64().unwrap() ),
|
2017-08-24 14:41:49 +02:00
|
|
|
ty: dest_ty,
|
|
|
|
};
|
|
|
|
self.write_value(valty, dest)
|
2017-02-03 15:47:23 +01:00
|
|
|
}
|
|
|
|
(&ty::TyDynamic(..), &ty::TyDynamic(..)) => {
|
|
|
|
// For now, upcasts are limited to changes in marker
|
|
|
|
// traits, and hence never actually require an actual
|
|
|
|
// change to the vtable.
|
2017-08-24 14:41:49 +02:00
|
|
|
let valty = ValTy {
|
|
|
|
value: src,
|
|
|
|
ty: dest_ty,
|
|
|
|
};
|
|
|
|
self.write_value(valty, dest)
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2017-02-03 15:47:23 +01:00
|
|
|
(_, &ty::TyDynamic(ref data, _)) => {
|
2017-08-10 08:48:38 -07:00
|
|
|
let trait_ref = data.principal().unwrap().with_self_ty(
|
|
|
|
self.tcx,
|
|
|
|
src_pointee_ty,
|
|
|
|
);
|
2017-02-03 15:47:23 +01:00
|
|
|
let trait_ref = self.tcx.erase_regions(&trait_ref);
|
2017-03-21 13:53:55 +01:00
|
|
|
let vtable = self.get_vtable(src_pointee_ty, trait_ref)?;
|
2017-07-27 09:14:04 -07:00
|
|
|
let ptr = src.into_ptr(&self.memory)?;
|
2017-08-24 14:41:49 +02:00
|
|
|
let valty = ValTy {
|
|
|
|
value: ptr.to_value_with_vtable(vtable),
|
|
|
|
ty: dest_ty,
|
|
|
|
};
|
|
|
|
self.write_value(valty, dest)
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2017-02-03 15:47:23 +01:00
|
|
|
|
|
|
|
_ => bug!("invalid unsizing {:?} -> {:?}", src_ty, dest_ty),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-09-27 18:01:33 +02:00
|
|
|
fn unsize_into(
|
|
|
|
&mut self,
|
|
|
|
src: Value,
|
|
|
|
src_ty: Ty<'tcx>,
|
2017-08-08 14:22:11 +02:00
|
|
|
dest: Lvalue,
|
2016-09-27 18:01:33 +02:00
|
|
|
dest_ty: Ty<'tcx>,
|
2017-02-04 13:09:10 -08:00
|
|
|
) -> EvalResult<'tcx> {
|
2016-09-27 18:01:33 +02:00
|
|
|
match (&src_ty.sty, &dest_ty.sty) {
|
2017-02-03 15:47:23 +01:00
|
|
|
(&ty::TyRef(_, ref s), &ty::TyRef(_, ref d)) |
|
|
|
|
(&ty::TyRef(_, ref s), &ty::TyRawPtr(ref d)) |
|
2017-08-10 08:48:38 -07:00
|
|
|
(&ty::TyRawPtr(ref s), &ty::TyRawPtr(ref d)) => {
|
|
|
|
self.unsize_into_ptr(src, src_ty, dest, dest_ty, s.ty, d.ty)
|
|
|
|
}
|
2017-02-03 15:47:23 +01:00
|
|
|
(&ty::TyAdt(def_a, substs_a), &ty::TyAdt(def_b, substs_b)) => {
|
|
|
|
if def_a.is_box() || def_b.is_box() {
|
|
|
|
if !def_a.is_box() || !def_b.is_box() {
|
|
|
|
panic!("invalid unsizing between {:?} -> {:?}", src_ty, dest_ty);
|
2016-09-27 18:01:33 +02:00
|
|
|
}
|
2017-08-10 08:48:38 -07:00
|
|
|
return self.unsize_into_ptr(
|
|
|
|
src,
|
|
|
|
src_ty,
|
|
|
|
dest,
|
|
|
|
dest_ty,
|
|
|
|
src_ty.boxed_ty(),
|
|
|
|
dest_ty.boxed_ty(),
|
|
|
|
);
|
2016-09-27 18:01:33 +02:00
|
|
|
}
|
2017-02-14 10:59:38 +01:00
|
|
|
if self.ty_to_primval_kind(src_ty).is_ok() {
|
2017-07-26 23:43:13 -07:00
|
|
|
// TODO: We ignore the packed flag here
|
2017-07-28 19:43:05 -07:00
|
|
|
let sty = self.get_field_ty(src_ty, 0)?.ty;
|
|
|
|
let dty = self.get_field_ty(dest_ty, 0)?.ty;
|
2017-02-14 10:59:38 +01:00
|
|
|
return self.unsize_into(src, sty, dest, dty);
|
|
|
|
}
|
2016-09-27 18:01:33 +02:00
|
|
|
// unsizing of generic struct with pointer fields
|
|
|
|
// Example: `Arc<T>` -> `Arc<Trait>`
|
|
|
|
// here we need to increase the size of every &T thin ptr field to a fat ptr
|
|
|
|
|
|
|
|
assert_eq!(def_a, def_b);
|
|
|
|
|
|
|
|
let src_fields = def_a.variants[0].fields.iter();
|
|
|
|
let dst_fields = def_b.variants[0].fields.iter();
|
|
|
|
|
|
|
|
//let src = adt::MaybeSizedValue::sized(src);
|
|
|
|
//let dst = adt::MaybeSizedValue::sized(dst);
|
|
|
|
let src_ptr = match src {
|
2017-08-08 15:53:07 +02:00
|
|
|
Value::ByRef(PtrAndAlign { ptr, aligned: true }) => ptr,
|
2017-07-12 21:06:57 -07:00
|
|
|
// TODO: Is it possible for unaligned pointers to occur here?
|
|
|
|
_ => bug!("expected aligned pointer, got {:?}", src),
|
2016-09-27 18:01:33 +02:00
|
|
|
};
|
|
|
|
|
2017-02-14 10:59:38 +01:00
|
|
|
// FIXME(solson)
|
2017-06-19 10:58:59 +02:00
|
|
|
let dest = self.force_allocation(dest)?.to_ptr()?;
|
2016-09-27 18:01:33 +02:00
|
|
|
let iter = src_fields.zip(dst_fields).enumerate();
|
|
|
|
for (i, (src_f, dst_f)) in iter {
|
2017-07-28 10:16:36 +02:00
|
|
|
let src_fty = self.field_ty(substs_a, src_f);
|
|
|
|
let dst_fty = self.field_ty(substs_b, dst_f);
|
2016-11-17 17:23:40 +01:00
|
|
|
if self.type_size(dst_fty)? == Some(0) {
|
2016-09-27 18:01:33 +02:00
|
|
|
continue;
|
|
|
|
}
|
2016-11-18 12:55:14 +01:00
|
|
|
let src_field_offset = self.get_field_offset(src_ty, i)?.bytes();
|
|
|
|
let dst_field_offset = self.get_field_offset(dest_ty, i)?.bytes();
|
2017-07-24 09:56:02 +02:00
|
|
|
let src_f_ptr = src_ptr.offset(src_field_offset, &self)?;
|
|
|
|
let dst_f_ptr = dest.offset(dst_field_offset, &self)?;
|
2016-09-27 18:01:33 +02:00
|
|
|
if src_fty == dst_fty {
|
2017-07-04 14:33:15 +02:00
|
|
|
self.copy(src_f_ptr, dst_f_ptr.into(), src_fty)?;
|
2016-09-27 18:01:33 +02:00
|
|
|
} else {
|
2017-08-10 08:48:38 -07:00
|
|
|
self.unsize_into(
|
|
|
|
Value::by_ref(src_f_ptr),
|
|
|
|
src_fty,
|
|
|
|
Lvalue::from_ptr(dst_f_ptr),
|
|
|
|
dst_fty,
|
|
|
|
)?;
|
2016-09-27 18:01:33 +02:00
|
|
|
}
|
|
|
|
}
|
2017-02-03 15:47:23 +01:00
|
|
|
Ok(())
|
2016-09-27 18:01:33 +02:00
|
|
|
}
|
2017-08-10 08:48:38 -07:00
|
|
|
_ => {
|
|
|
|
bug!(
|
|
|
|
"unsize_into: invalid conversion: {:?} -> {:?}",
|
|
|
|
src_ty,
|
|
|
|
dest_ty
|
|
|
|
)
|
|
|
|
}
|
2016-03-20 22:59:13 -06:00
|
|
|
}
|
|
|
|
}
|
2016-10-16 17:18:56 -06:00
|
|
|
|
2017-08-08 14:22:11 +02:00
|
|
|
pub fn dump_local(&self, lvalue: Lvalue) {
|
2017-05-30 10:24:37 -07:00
|
|
|
// Debug output
|
2017-08-08 16:29:47 +02:00
|
|
|
match lvalue {
|
|
|
|
Lvalue::Local { frame, local } => {
|
|
|
|
let mut allocs = Vec::new();
|
|
|
|
let mut msg = format!("{:?}", local);
|
|
|
|
if frame != self.cur_frame() {
|
|
|
|
write!(msg, " ({} frames up)", self.cur_frame() - frame).unwrap();
|
2017-05-31 17:41:33 -07:00
|
|
|
}
|
2017-08-08 16:29:47 +02:00
|
|
|
write!(msg, ":").unwrap();
|
|
|
|
|
|
|
|
match self.stack[frame].get_local(local) {
|
2017-08-10 08:48:38 -07:00
|
|
|
Err(EvalError { kind: EvalErrorKind::DeadLocal, .. }) => {
|
2017-08-08 16:29:47 +02:00
|
|
|
write!(msg, " is dead").unwrap();
|
|
|
|
}
|
|
|
|
Err(err) => {
|
|
|
|
panic!("Failed to access local: {:?}", err);
|
|
|
|
}
|
2017-08-10 08:48:38 -07:00
|
|
|
Ok(Value::ByRef(PtrAndAlign { ptr, aligned })) => {
|
|
|
|
match ptr.into_inner_primval() {
|
|
|
|
PrimVal::Ptr(ptr) => {
|
|
|
|
write!(msg, " by {}ref:", if aligned { "" } else { "unaligned " })
|
|
|
|
.unwrap();
|
|
|
|
allocs.push(ptr.alloc_id);
|
|
|
|
}
|
|
|
|
ptr => write!(msg, " integral by ref: {:?}", ptr).unwrap(),
|
|
|
|
}
|
|
|
|
}
|
2017-08-08 16:29:47 +02:00
|
|
|
Ok(Value::ByVal(val)) => {
|
|
|
|
write!(msg, " {:?}", val).unwrap();
|
2017-08-10 08:48:38 -07:00
|
|
|
if let PrimVal::Ptr(ptr) = val {
|
|
|
|
allocs.push(ptr.alloc_id);
|
|
|
|
}
|
2017-08-08 16:29:47 +02:00
|
|
|
}
|
|
|
|
Ok(Value::ByValPair(val1, val2)) => {
|
|
|
|
write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
|
2017-08-10 08:48:38 -07:00
|
|
|
if let PrimVal::Ptr(ptr) = val1 {
|
|
|
|
allocs.push(ptr.alloc_id);
|
|
|
|
}
|
|
|
|
if let PrimVal::Ptr(ptr) = val2 {
|
|
|
|
allocs.push(ptr.alloc_id);
|
|
|
|
}
|
2017-08-08 16:29:47 +02:00
|
|
|
}
|
2017-05-31 17:41:33 -07:00
|
|
|
}
|
2017-08-08 16:29:47 +02:00
|
|
|
|
|
|
|
trace!("{}", msg);
|
|
|
|
self.memory.dump_allocs(allocs);
|
|
|
|
}
|
|
|
|
Lvalue::Ptr { ptr: PtrAndAlign { ptr, aligned }, .. } => {
|
|
|
|
match ptr.into_inner_primval() {
|
2017-07-04 14:33:15 +02:00
|
|
|
PrimVal::Ptr(ptr) => {
|
2017-08-08 16:29:47 +02:00
|
|
|
trace!("by {}ref:", if aligned { "" } else { "unaligned " });
|
|
|
|
self.memory.dump_alloc(ptr.alloc_id);
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2017-08-08 16:29:47 +02:00
|
|
|
ptr => trace!(" integral by ref: {:?}", ptr),
|
2016-10-18 21:45:48 -06:00
|
|
|
}
|
|
|
|
}
|
2017-02-07 00:39:40 -08:00
|
|
|
}
|
2016-10-18 21:45:48 -06:00
|
|
|
}
|
2016-11-03 12:52:13 +01:00
|
|
|
|
2017-08-08 14:22:11 +02:00
|
|
|
/// Convenience function to ensure correct usage of locals
|
2017-08-10 08:48:38 -07:00
|
|
|
pub fn modify_local<F>(&mut self, frame: usize, local: mir::Local, f: F) -> EvalResult<'tcx>
|
|
|
|
where
|
|
|
|
F: FnOnce(&mut Self, Value) -> EvalResult<'tcx, Value>,
|
2016-12-18 20:59:01 -08:00
|
|
|
{
|
2017-06-27 13:36:41 +02:00
|
|
|
let val = self.stack[frame].get_local(local)?;
|
2016-12-18 20:59:01 -08:00
|
|
|
let new_val = f(self, val)?;
|
2017-06-27 13:36:41 +02:00
|
|
|
self.stack[frame].set_local(local, new_val)?;
|
2016-12-18 20:59:01 -08:00
|
|
|
// FIXME(solson): Run this when setting to Undef? (See previous version of this code.)
|
|
|
|
// if let Value::ByRef(ptr) = self.stack[frame].get_local(local) {
|
|
|
|
// self.memory.deallocate(ptr)?;
|
|
|
|
// }
|
2016-11-03 12:52:13 +01:00
|
|
|
Ok(())
|
|
|
|
}
|
2017-07-21 17:25:30 +02:00
|
|
|
|
2017-08-04 10:55:35 -07:00
|
|
|
pub fn report(&self, e: &mut EvalError) {
|
|
|
|
if let Some(ref mut backtrace) = e.backtrace {
|
|
|
|
let mut trace_text = "\n\nAn error occurred in miri:\n".to_string();
|
|
|
|
let mut skip_init = true;
|
|
|
|
backtrace.resolve();
|
|
|
|
'frames: for (i, frame) in backtrace.frames().iter().enumerate() {
|
|
|
|
for symbol in frame.symbols() {
|
|
|
|
if let Some(name) = symbol.name() {
|
|
|
|
// unmangle the symbol via `to_string`
|
|
|
|
let name = name.to_string();
|
|
|
|
if name.starts_with("miri::after_analysis") {
|
|
|
|
// don't report initialization gibberish
|
|
|
|
break 'frames;
|
|
|
|
} else if name.starts_with("backtrace::capture::Backtrace::new")
|
2017-08-03 12:37:52 +02:00
|
|
|
// debug mode produces funky symbol names
|
2017-08-10 08:48:38 -07:00
|
|
|
|| name.starts_with("backtrace::capture::{{impl}}::new")
|
|
|
|
{
|
2017-08-04 10:55:35 -07:00
|
|
|
// don't report backtrace internals
|
|
|
|
skip_init = false;
|
|
|
|
continue 'frames;
|
|
|
|
}
|
2017-08-03 12:37:52 +02:00
|
|
|
}
|
|
|
|
}
|
2017-08-04 10:55:35 -07:00
|
|
|
if skip_init {
|
|
|
|
continue;
|
2017-08-03 12:37:52 +02:00
|
|
|
}
|
2017-08-04 10:55:35 -07:00
|
|
|
for symbol in frame.symbols() {
|
2017-08-10 08:48:38 -07:00
|
|
|
write!(trace_text, "{}: ", i).unwrap();
|
2017-08-04 10:55:35 -07:00
|
|
|
if let Some(name) = symbol.name() {
|
|
|
|
write!(trace_text, "{}\n", name).unwrap();
|
|
|
|
} else {
|
|
|
|
write!(trace_text, "<unknown>\n").unwrap();
|
|
|
|
}
|
|
|
|
write!(trace_text, "\tat ").unwrap();
|
|
|
|
if let Some(file_path) = symbol.filename() {
|
|
|
|
write!(trace_text, "{}", file_path.display()).unwrap();
|
|
|
|
} else {
|
|
|
|
write!(trace_text, "<unknown_file>").unwrap();
|
|
|
|
}
|
|
|
|
if let Some(line) = symbol.lineno() {
|
|
|
|
write!(trace_text, ":{}\n", line).unwrap();
|
|
|
|
} else {
|
|
|
|
write!(trace_text, "\n").unwrap();
|
|
|
|
}
|
2017-08-03 12:37:52 +02:00
|
|
|
}
|
2017-08-02 16:59:01 +02:00
|
|
|
}
|
2017-08-04 10:55:35 -07:00
|
|
|
error!("{}", trace_text);
|
2017-08-02 16:59:01 +02:00
|
|
|
}
|
2017-07-21 17:25:30 +02:00
|
|
|
if let Some(frame) = self.stack().last() {
|
|
|
|
let block = &frame.mir.basic_blocks()[frame.block];
|
|
|
|
let span = if frame.stmt < block.statements.len() {
|
|
|
|
block.statements[frame.stmt].source_info.span
|
|
|
|
} else {
|
|
|
|
block.terminator().source_info.span
|
|
|
|
};
|
|
|
|
let mut err = self.tcx.sess.struct_span_err(span, &e.to_string());
|
|
|
|
for &Frame { instance, span, .. } in self.stack().iter().rev() {
|
2017-08-10 08:48:38 -07:00
|
|
|
if self.tcx.def_key(instance.def_id()).disambiguated_data.data ==
|
|
|
|
DefPathData::ClosureExpr
|
|
|
|
{
|
2017-07-21 17:25:30 +02:00
|
|
|
err.span_note(span, "inside call to closure");
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
err.span_note(span, &format!("inside call to {}", instance));
|
|
|
|
}
|
|
|
|
err.emit();
|
|
|
|
} else {
|
|
|
|
self.tcx.sess.err(&e.to_string());
|
|
|
|
}
|
|
|
|
}
|
2016-03-20 22:59:13 -06:00
|
|
|
}
|
|
|
|
|
2016-11-03 10:38:08 +01:00
|
|
|
impl<'tcx> Frame<'tcx> {
|
2017-06-27 13:36:41 +02:00
|
|
|
pub fn get_local(&self, local: mir::Local) -> EvalResult<'tcx, Value> {
|
2016-10-15 19:48:30 -06:00
|
|
|
// Subtract 1 because we don't store a value for the ReturnPointer, the local with index 0.
|
2017-08-02 16:59:01 +02:00
|
|
|
self.locals[local.index() - 1].ok_or(EvalErrorKind::DeadLocal.into())
|
2016-10-15 19:48:30 -06:00
|
|
|
}
|
|
|
|
|
2017-06-27 13:36:41 +02:00
|
|
|
fn set_local(&mut self, local: mir::Local, value: Value) -> EvalResult<'tcx> {
|
2016-10-15 19:48:30 -06:00
|
|
|
// Subtract 1 because we don't store a value for the ReturnPointer, the local with index 0.
|
2017-06-27 13:36:41 +02:00
|
|
|
match self.locals[local.index() - 1] {
|
2017-08-02 16:59:01 +02:00
|
|
|
None => err!(DeadLocal),
|
2017-06-27 13:36:41 +02:00
|
|
|
Some(ref mut local) => {
|
|
|
|
*local = value;
|
|
|
|
Ok(())
|
2017-05-31 17:41:33 -07:00
|
|
|
}
|
2017-02-10 16:14:59 +01:00
|
|
|
}
|
2017-05-31 17:41:33 -07:00
|
|
|
}
|
|
|
|
|
2017-06-01 17:59:00 -07:00
|
|
|
pub fn storage_live(&mut self, local: mir::Local) -> EvalResult<'tcx, Option<Value>> {
|
2017-05-31 17:41:33 -07:00
|
|
|
trace!("{:?} is now live", local);
|
2017-06-01 17:59:00 -07:00
|
|
|
|
|
|
|
let old = self.locals[local.index() - 1];
|
|
|
|
self.locals[local.index() - 1] = Some(Value::ByVal(PrimVal::Undef)); // StorageLive *always* kills the value that's currently stored
|
|
|
|
return Ok(old);
|
2017-05-31 17:41:33 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the old value of the local
|
|
|
|
pub fn storage_dead(&mut self, local: mir::Local) -> EvalResult<'tcx, Option<Value>> {
|
|
|
|
trace!("{:?} is now dead", local);
|
|
|
|
|
|
|
|
let old = self.locals[local.index() - 1];
|
|
|
|
self.locals[local.index() - 1] = None;
|
|
|
|
return Ok(old);
|
2016-10-15 19:48:30 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-06-23 01:03:58 -06:00
|
|
|
// TODO(solson): Upstream these methods into rustc::ty::layout.
|
|
|
|
|
2016-12-07 20:30:37 -08:00
|
|
|
pub(super) trait IntegerExt {
|
2016-06-23 01:03:58 -06:00
|
|
|
fn size(self) -> Size;
|
|
|
|
}
|
|
|
|
|
|
|
|
impl IntegerExt for layout::Integer {
|
|
|
|
fn size(self) -> Size {
|
|
|
|
use rustc::ty::layout::Integer::*;
|
|
|
|
match self {
|
|
|
|
I1 | I8 => Size::from_bits(8),
|
|
|
|
I16 => Size::from_bits(16),
|
|
|
|
I32 => Size::from_bits(32),
|
|
|
|
I64 => Size::from_bits(64),
|
2017-01-12 08:28:42 +01:00
|
|
|
I128 => Size::from_bits(128),
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-11-04 15:48:45 +01:00
|
|
|
|
2017-01-12 10:37:14 +01:00
|
|
|
pub fn is_inhabited<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool {
|
2017-02-14 10:17:00 +01:00
|
|
|
ty.uninhabited_from(&mut HashMap::default(), tcx).is_empty()
|
2017-01-12 10:37:14 +01:00
|
|
|
}
|
2016-12-19 17:26:47 +01:00
|
|
|
|
2017-03-21 13:53:55 +01:00
|
|
|
/// FIXME: expose trans::monomorphize::resolve_closure
|
2017-08-10 08:48:38 -07:00
|
|
|
pub fn resolve_closure<'a, 'tcx>(
|
2017-03-21 13:53:55 +01:00
|
|
|
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
|
|
def_id: DefId,
|
|
|
|
substs: ty::ClosureSubsts<'tcx>,
|
|
|
|
requested_kind: ty::ClosureKind,
|
|
|
|
) -> ty::Instance<'tcx> {
|
|
|
|
let actual_kind = tcx.closure_kind(def_id);
|
|
|
|
match needs_fn_once_adapter_shim(actual_kind, requested_kind) {
|
|
|
|
Ok(true) => fn_once_adapter_instance(tcx, def_id, substs),
|
2017-08-10 08:48:38 -07:00
|
|
|
_ => ty::Instance::new(def_id, substs.substs),
|
2017-03-21 13:53:55 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn fn_once_adapter_instance<'a, 'tcx>(
|
|
|
|
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
|
|
closure_did: DefId,
|
|
|
|
substs: ty::ClosureSubsts<'tcx>,
|
|
|
|
) -> ty::Instance<'tcx> {
|
2017-08-10 08:48:38 -07:00
|
|
|
debug!("fn_once_adapter_shim({:?}, {:?})", closure_did, substs);
|
2017-09-08 19:10:21 +02:00
|
|
|
let fn_once = tcx.lang_items().fn_once_trait().unwrap();
|
2017-03-21 13:53:55 +01:00
|
|
|
let call_once = tcx.associated_items(fn_once)
|
|
|
|
.find(|it| it.kind == ty::AssociatedKind::Method)
|
2017-08-10 08:48:38 -07:00
|
|
|
.unwrap()
|
|
|
|
.def_id;
|
2017-03-21 13:53:55 +01:00
|
|
|
let def = ty::InstanceDef::ClosureOnceShim { call_once };
|
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
let self_ty = tcx.mk_closure_from_closure_substs(closure_did, substs);
|
2017-03-21 13:53:55 +01:00
|
|
|
|
2017-06-28 21:24:17 -04:00
|
|
|
let sig = tcx.fn_sig(closure_did).subst(tcx, substs.substs);
|
2017-03-21 13:53:55 +01:00
|
|
|
let sig = tcx.erase_late_bound_regions_and_normalize(&sig);
|
|
|
|
assert_eq!(sig.inputs().len(), 1);
|
2017-08-10 08:48:38 -07:00
|
|
|
let substs = tcx.mk_substs(
|
|
|
|
[Kind::from(self_ty), Kind::from(sig.inputs()[0])]
|
|
|
|
.iter()
|
|
|
|
.cloned(),
|
|
|
|
);
|
2017-03-21 13:53:55 +01:00
|
|
|
|
|
|
|
debug!("fn_once_adapter_shim: self_ty={:?} sig={:?}", self_ty, sig);
|
|
|
|
ty::Instance { def, substs }
|
|
|
|
}
|
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
fn needs_fn_once_adapter_shim(
|
|
|
|
actual_closure_kind: ty::ClosureKind,
|
|
|
|
trait_closure_kind: ty::ClosureKind,
|
|
|
|
) -> Result<bool, ()> {
|
2017-03-21 13:53:55 +01:00
|
|
|
match (actual_closure_kind, trait_closure_kind) {
|
|
|
|
(ty::ClosureKind::Fn, ty::ClosureKind::Fn) |
|
|
|
|
(ty::ClosureKind::FnMut, ty::ClosureKind::FnMut) |
|
|
|
|
(ty::ClosureKind::FnOnce, ty::ClosureKind::FnOnce) => {
|
|
|
|
// No adapter needed.
|
2017-08-10 08:48:38 -07:00
|
|
|
Ok(false)
|
2017-03-21 13:53:55 +01:00
|
|
|
}
|
|
|
|
(ty::ClosureKind::Fn, ty::ClosureKind::FnMut) => {
|
|
|
|
// The closure fn `llfn` is a `fn(&self, ...)`. We want a
|
|
|
|
// `fn(&mut self, ...)`. In fact, at trans time, these are
|
|
|
|
// basically the same thing, so we can just return llfn.
|
|
|
|
Ok(false)
|
|
|
|
}
|
|
|
|
(ty::ClosureKind::Fn, ty::ClosureKind::FnOnce) |
|
|
|
|
(ty::ClosureKind::FnMut, ty::ClosureKind::FnOnce) => {
|
|
|
|
// The closure fn `llfn` is a `fn(&self, ...)` or `fn(&mut
|
|
|
|
// self, ...)`. We want a `fn(self, ...)`. We can produce
|
|
|
|
// this by doing something like:
|
|
|
|
//
|
|
|
|
// fn call_once(self, ...) { call_mut(&self, ...) }
|
|
|
|
// fn call_once(mut self, ...) { call_mut(&mut self, ...) }
|
|
|
|
//
|
|
|
|
// These are both the same at trans time.
|
|
|
|
Ok(true)
|
|
|
|
}
|
|
|
|
_ => Err(()),
|
|
|
|
}
|
|
|
|
}
|
2017-03-22 13:13:52 +01:00
|
|
|
|
|
|
|
/// The point where linking happens. Resolve a (def_id, substs)
|
|
|
|
/// pair to an instance.
|
|
|
|
pub fn resolve<'a, 'tcx>(
|
|
|
|
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
|
|
def_id: DefId,
|
2017-08-10 08:48:38 -07:00
|
|
|
substs: &'tcx Substs<'tcx>,
|
2017-03-22 13:13:52 +01:00
|
|
|
) -> ty::Instance<'tcx> {
|
2017-08-10 08:48:38 -07:00
|
|
|
debug!("resolve(def_id={:?}, substs={:?})", def_id, substs);
|
2017-03-22 13:13:52 +01:00
|
|
|
let result = if let Some(trait_def_id) = tcx.trait_of_item(def_id) {
|
|
|
|
debug!(" => associated item, attempting to find impl");
|
|
|
|
let item = tcx.associated_item(def_id);
|
|
|
|
resolve_associated_item(tcx, &item, trait_def_id, substs)
|
|
|
|
} else {
|
|
|
|
let item_type = def_ty(tcx, def_id, substs);
|
|
|
|
let def = match item_type.sty {
|
2017-08-10 08:48:38 -07:00
|
|
|
ty::TyFnDef(..)
|
|
|
|
if {
|
|
|
|
let f = item_type.fn_sig(tcx);
|
|
|
|
f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic
|
|
|
|
} => {
|
2017-03-22 13:13:52 +01:00
|
|
|
debug!(" => intrinsic");
|
|
|
|
ty::InstanceDef::Intrinsic(def_id)
|
|
|
|
}
|
|
|
|
_ => {
|
2017-09-08 19:10:21 +02:00
|
|
|
if Some(def_id) == tcx.lang_items().drop_in_place_fn() {
|
2017-03-22 13:13:52 +01:00
|
|
|
let ty = substs.type_at(0);
|
|
|
|
if needs_drop_glue(tcx, ty) {
|
|
|
|
debug!(" => nontrivial drop glue");
|
|
|
|
ty::InstanceDef::DropGlue(def_id, Some(ty))
|
|
|
|
} else {
|
|
|
|
debug!(" => trivial drop glue");
|
|
|
|
ty::InstanceDef::DropGlue(def_id, None)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
debug!(" => free item");
|
|
|
|
ty::InstanceDef::Item(def_id)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
ty::Instance { def, substs }
|
|
|
|
};
|
2017-08-10 08:48:38 -07:00
|
|
|
debug!(
|
|
|
|
"resolve(def_id={:?}, substs={:?}) = {}",
|
|
|
|
def_id,
|
|
|
|
substs,
|
|
|
|
result
|
|
|
|
);
|
2017-03-22 13:13:52 +01:00
|
|
|
result
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn needs_drop_glue<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, t: Ty<'tcx>) -> bool {
|
|
|
|
assert!(t.is_normalized_for_trans());
|
|
|
|
|
|
|
|
let t = tcx.erase_regions(&t);
|
|
|
|
|
|
|
|
// FIXME (#22815): note that type_needs_drop conservatively
|
|
|
|
// approximates in some cases and may say a type expression
|
|
|
|
// requires drop glue when it actually does not.
|
|
|
|
//
|
|
|
|
// (In this case it is not clear whether any harm is done, i.e.
|
|
|
|
// erroneously returning `true` in some cases where we could have
|
|
|
|
// returned `false` does not appear unsound. The impact on
|
|
|
|
// code quality is unknown at this time.)
|
|
|
|
|
2017-06-02 21:00:35 -04:00
|
|
|
let env = ty::ParamEnv::empty(Reveal::All);
|
2017-05-23 23:40:39 -04:00
|
|
|
if !t.needs_drop(tcx, env) {
|
2017-03-22 13:13:52 +01:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
match t.sty {
|
|
|
|
ty::TyAdt(def, _) if def.is_box() => {
|
|
|
|
let typ = t.boxed_ty();
|
2017-05-23 23:40:39 -04:00
|
|
|
if !typ.needs_drop(tcx, env) && type_is_sized(tcx, typ) {
|
2017-06-02 21:00:35 -04:00
|
|
|
let layout = t.layout(tcx, ty::ParamEnv::empty(Reveal::All)).unwrap();
|
|
|
|
// `Box<ZeroSizeType>` does not allocate.
|
|
|
|
layout.size(&tcx.data_layout).bytes() != 0
|
2017-03-22 13:13:52 +01:00
|
|
|
} else {
|
|
|
|
true
|
|
|
|
}
|
|
|
|
}
|
2017-08-10 08:48:38 -07:00
|
|
|
_ => true,
|
2017-03-22 13:13:52 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn resolve_associated_item<'a, 'tcx>(
|
|
|
|
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
|
|
trait_item: &ty::AssociatedItem,
|
|
|
|
trait_id: DefId,
|
2017-08-10 08:48:38 -07:00
|
|
|
rcvr_substs: &'tcx Substs<'tcx>,
|
2017-03-22 13:13:52 +01:00
|
|
|
) -> ty::Instance<'tcx> {
|
|
|
|
let def_id = trait_item.def_id;
|
2017-08-10 08:48:38 -07:00
|
|
|
debug!(
|
|
|
|
"resolve_associated_item(trait_item={:?}, \
|
2017-03-22 13:13:52 +01:00
|
|
|
trait_id={:?}, \
|
|
|
|
rcvr_substs={:?})",
|
2017-08-10 08:48:38 -07:00
|
|
|
def_id,
|
|
|
|
trait_id,
|
|
|
|
rcvr_substs
|
|
|
|
);
|
2017-03-22 13:13:52 +01:00
|
|
|
|
|
|
|
let trait_ref = ty::TraitRef::from_method(tcx, trait_id, rcvr_substs);
|
2017-08-26 13:48:59 -04:00
|
|
|
let vtbl = tcx.trans_fulfill_obligation(DUMMY_SP, ty::Binder(trait_ref));
|
2017-03-22 13:13:52 +01:00
|
|
|
|
|
|
|
// Now that we know which impl is being used, we can dispatch to
|
|
|
|
// the actual function:
|
|
|
|
match vtbl {
|
|
|
|
::rustc::traits::VtableImpl(impl_data) => {
|
2017-08-10 08:48:38 -07:00
|
|
|
let (def_id, substs) =
|
|
|
|
::rustc::traits::find_associated_item(tcx, trait_item, rcvr_substs, &impl_data);
|
2017-03-22 13:13:52 +01:00
|
|
|
let substs = tcx.erase_regions(&substs);
|
|
|
|
ty::Instance::new(def_id, substs)
|
|
|
|
}
|
2017-08-30 11:13:01 +02:00
|
|
|
::rustc::traits::VtableGenerator(closure_data) => {
|
|
|
|
ty::Instance {
|
|
|
|
def: ty::InstanceDef::Item(closure_data.closure_def_id),
|
|
|
|
substs: closure_data.substs.substs
|
|
|
|
}
|
|
|
|
}
|
2017-03-22 13:13:52 +01:00
|
|
|
::rustc::traits::VtableClosure(closure_data) => {
|
2017-09-08 19:10:21 +02:00
|
|
|
let trait_closure_kind = tcx.lang_items().fn_trait_kind(trait_id).unwrap();
|
2017-08-10 08:48:38 -07:00
|
|
|
resolve_closure(
|
|
|
|
tcx,
|
|
|
|
closure_data.closure_def_id,
|
|
|
|
closure_data.substs,
|
|
|
|
trait_closure_kind,
|
|
|
|
)
|
2017-03-22 13:13:52 +01:00
|
|
|
}
|
|
|
|
::rustc::traits::VtableFnPointer(ref data) => {
|
|
|
|
ty::Instance {
|
|
|
|
def: ty::InstanceDef::FnPtrShim(trait_item.def_id, data.fn_ty),
|
2017-08-10 08:48:38 -07:00
|
|
|
substs: rcvr_substs,
|
2017-03-22 13:13:52 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
::rustc::traits::VtableObject(ref data) => {
|
|
|
|
let index = tcx.get_vtable_index_of_object_method(data, def_id);
|
|
|
|
ty::Instance {
|
|
|
|
def: ty::InstanceDef::Virtual(def_id, index),
|
2017-08-10 08:48:38 -07:00
|
|
|
substs: rcvr_substs,
|
2017-03-22 13:13:52 +01:00
|
|
|
}
|
|
|
|
}
|
2017-09-08 19:10:21 +02:00
|
|
|
::rustc::traits::VtableBuiltin(..) if Some(trait_id) == tcx.lang_items().clone_trait() => {
|
2017-08-23 17:24:38 +02:00
|
|
|
ty::Instance {
|
|
|
|
def: ty::InstanceDef::CloneShim(def_id, trait_ref.self_ty()),
|
|
|
|
substs: rcvr_substs
|
|
|
|
}
|
|
|
|
}
|
2017-08-10 08:48:38 -07:00
|
|
|
_ => bug!("static call to invalid vtable: {:?}", vtbl),
|
2017-03-22 13:13:52 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
pub fn def_ty<'a, 'tcx>(
|
|
|
|
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
|
|
def_id: DefId,
|
|
|
|
substs: &'tcx Substs<'tcx>,
|
|
|
|
) -> Ty<'tcx> {
|
2017-04-27 13:48:19 +02:00
|
|
|
let ty = tcx.type_of(def_id);
|
2017-03-22 13:13:52 +01:00
|
|
|
apply_param_substs(tcx, substs, &ty)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Monomorphizes a type from the AST by first applying the in-scope
|
|
|
|
/// substitutions and then normalizing any associated types.
|
2017-08-10 08:48:38 -07:00
|
|
|
pub fn apply_param_substs<'a, 'tcx, T>(
|
|
|
|
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
|
|
param_substs: &Substs<'tcx>,
|
|
|
|
value: &T,
|
|
|
|
) -> T
|
|
|
|
where
|
|
|
|
T: ::rustc::infer::TransNormalize<'tcx>,
|
2017-03-22 13:13:52 +01:00
|
|
|
{
|
2017-08-10 08:48:38 -07:00
|
|
|
debug!(
|
|
|
|
"apply_param_substs(param_substs={:?}, value={:?})",
|
|
|
|
param_substs,
|
|
|
|
value
|
|
|
|
);
|
2017-03-22 13:13:52 +01:00
|
|
|
let substituted = value.subst(tcx, param_substs);
|
|
|
|
let substituted = tcx.erase_regions(&substituted);
|
2017-08-10 08:48:38 -07:00
|
|
|
AssociatedTypeNormalizer { tcx }.fold(&substituted)
|
2017-03-22 13:13:52 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
struct AssociatedTypeNormalizer<'a, 'tcx: 'a> {
|
|
|
|
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, 'tcx> AssociatedTypeNormalizer<'a, 'tcx> {
|
|
|
|
fn fold<T: TypeFoldable<'tcx>>(&mut self, value: &T) -> T {
|
2017-09-13 13:46:54 +02:00
|
|
|
if !value.has_projections() {
|
2017-03-22 13:13:52 +01:00
|
|
|
value.clone()
|
|
|
|
} else {
|
|
|
|
value.fold_with(self)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, 'tcx> ::rustc::ty::fold::TypeFolder<'tcx, 'tcx> for AssociatedTypeNormalizer<'a, 'tcx> {
|
|
|
|
fn tcx<'c>(&'c self) -> TyCtxt<'c, 'tcx, 'tcx> {
|
|
|
|
self.tcx
|
|
|
|
}
|
|
|
|
|
|
|
|
fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
|
2017-09-13 13:46:54 +02:00
|
|
|
if !ty.has_projections() {
|
2017-03-22 13:13:52 +01:00
|
|
|
ty
|
|
|
|
} else {
|
|
|
|
self.tcx.normalize_associated_type(&ty)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn type_is_sized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool {
|
|
|
|
// generics are weird, don't run this function on a generic
|
|
|
|
assert!(!ty.needs_subst());
|
2017-06-02 21:00:35 -04:00
|
|
|
ty.is_sized(tcx, ty::ParamEnv::empty(Reveal::All), DUMMY_SP)
|
2017-03-22 13:13:52 +01:00
|
|
|
}
|
|
|
|
|
2017-03-22 16:16:23 +01:00
|
|
|
pub fn resolve_drop_in_place<'a, 'tcx>(
|
|
|
|
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
|
|
ty: Ty<'tcx>,
|
2017-08-10 08:48:38 -07:00
|
|
|
) -> ty::Instance<'tcx> {
|
2017-03-22 16:16:23 +01:00
|
|
|
let def_id = tcx.require_lang_item(::rustc::middle::lang_items::DropInPlaceFnLangItem);
|
|
|
|
let substs = tcx.intern_substs(&[Kind::from(ty)]);
|
|
|
|
resolve(tcx, def_id, substs)
|
|
|
|
}
|