2017-02-07 00:39:40 -08:00
|
|
|
use std::fmt::Write;
|
2018-06-21 21:40:14 -07:00
|
|
|
use std::hash::{Hash, Hasher};
|
2018-06-22 13:31:25 -07:00
|
|
|
use std::mem;
|
2016-12-07 20:30:37 -08:00
|
|
|
|
2017-12-12 17:14:49 +01:00
|
|
|
use rustc::hir::def_id::DefId;
|
2018-03-22 16:59:02 +01:00
|
|
|
use rustc::hir::def::Def;
|
2017-12-12 17:14:49 +01:00
|
|
|
use rustc::hir::map::definitions::DefPathData;
|
|
|
|
use rustc::mir;
|
2018-06-04 18:32:06 +02:00
|
|
|
use rustc::ty::layout::{self, Size, Align, HasDataLayout, IntegerExt, LayoutOf, TyLayout, Primitive};
|
2018-02-23 01:13:54 +00:00
|
|
|
use rustc::ty::subst::{Subst, Substs};
|
2018-05-02 15:21:05 +02:00
|
|
|
use rustc::ty::{self, Ty, TyCtxt, TypeAndMut};
|
2018-06-13 16:44:43 +03:00
|
|
|
use rustc::ty::query::TyCtxtAt;
|
2018-06-22 00:46:29 -07:00
|
|
|
use rustc_data_structures::fx::{FxHashSet, FxHasher};
|
2018-03-23 08:31:13 +01:00
|
|
|
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
|
2017-12-12 17:14:49 +01:00
|
|
|
use rustc::mir::interpret::{
|
2018-06-04 18:32:06 +02:00
|
|
|
GlobalId, Value, Scalar, FrameInfo, AllocType,
|
2018-06-02 23:38:57 +02:00
|
|
|
EvalResult, EvalErrorKind, Pointer, ConstValue,
|
2018-07-24 18:28:53 +02:00
|
|
|
ScalarMaybeUndef,
|
2017-12-12 17:14:49 +01:00
|
|
|
};
|
2018-06-08 03:47:26 +01:00
|
|
|
|
|
|
|
use syntax::codemap::{self, Span};
|
|
|
|
use syntax::ast::Mutability;
|
2015-11-12 15:50:58 -06:00
|
|
|
|
2017-12-12 17:14:49 +01:00
|
|
|
use super::{Place, PlaceExtra, Memory,
|
2018-02-21 22:02:52 +01:00
|
|
|
HasMemory, MemoryKind,
|
2017-12-14 11:36:28 +01:00
|
|
|
Machine};
|
2016-06-01 17:05:20 +02:00
|
|
|
|
2018-06-04 18:32:06 +02:00
|
|
|
macro_rules! validation_failure{
|
|
|
|
($what:expr, $where:expr, $details:expr) => {{
|
|
|
|
let where_ = if $where.is_empty() {
|
|
|
|
String::new()
|
|
|
|
} else {
|
|
|
|
format!(" at {}", $where)
|
|
|
|
};
|
|
|
|
err!(ValidationFailure(format!(
|
|
|
|
"encountered {}{}, but expected {}",
|
|
|
|
$what, where_, $details,
|
|
|
|
)))
|
|
|
|
}};
|
|
|
|
($what:expr, $where:expr) => {{
|
|
|
|
let where_ = if $where.is_empty() {
|
|
|
|
String::new()
|
|
|
|
} else {
|
|
|
|
format!(" at {}", $where)
|
|
|
|
};
|
|
|
|
err!(ValidationFailure(format!(
|
|
|
|
"encountered {}{}",
|
|
|
|
$what, where_,
|
|
|
|
)))
|
|
|
|
}};
|
|
|
|
}
|
|
|
|
|
2018-01-16 09:31:48 +01:00
|
|
|
pub struct EvalContext<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
|
2017-12-06 14:23:32 +02:00
|
|
|
/// Stores the `Machine` instance.
|
|
|
|
pub machine: M,
|
2017-07-21 17:25:30 +02:00
|
|
|
|
2016-03-14 21:18:39 -06:00
|
|
|
/// The results of the type checker, from rustc.
|
2018-02-06 18:33:59 +01:00
|
|
|
pub tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
|
2016-03-14 21:18:39 -06:00
|
|
|
|
2017-12-06 14:12:05 +02:00
|
|
|
/// Bounds in scope for polymorphic evaluations.
|
|
|
|
pub param_env: ty::ParamEnv<'tcx>,
|
|
|
|
|
2018-06-22 12:36:54 -07:00
|
|
|
/// The virtual memory system.
|
|
|
|
pub memory: Memory<'a, 'mir, 'tcx, M>,
|
|
|
|
|
|
|
|
/// The virtual call stack.
|
|
|
|
pub(crate) stack: Vec<Frame<'mir, 'tcx>>,
|
2016-07-05 13:23:58 +02:00
|
|
|
|
|
|
|
/// The maximum number of stack frames allowed
|
2017-02-10 13:35:45 -08:00
|
|
|
pub(crate) stack_limit: usize,
|
2016-11-17 17:22:34 +01:00
|
|
|
|
2018-06-26 21:59:43 -07:00
|
|
|
/// When this value is negative, it indicates the number of interpreter
|
|
|
|
/// steps *until* the loop detector is enabled. When it is positive, it is
|
|
|
|
/// the number of steps after the detector has been enabled modulo the loop
|
|
|
|
/// detector period.
|
|
|
|
pub(crate) steps_since_detector_enabled: isize,
|
2018-06-22 00:46:29 -07:00
|
|
|
|
|
|
|
pub(crate) loop_detector: InfiniteLoopDetector<'a, 'mir, 'tcx, M>,
|
2016-05-09 18:21:21 -06:00
|
|
|
}
|
|
|
|
|
2016-03-06 04:23:24 -06:00
|
|
|
/// A stack frame.
|
2018-06-21 21:40:14 -07:00
|
|
|
#[derive(Clone)]
|
2018-01-16 09:31:48 +01:00
|
|
|
pub struct Frame<'mir, 'tcx: 'mir> {
|
2016-06-14 20:13:59 -06:00
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
// Function and callsite information
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
/// The MIR for the function called on this frame.
|
2018-01-16 09:31:48 +01:00
|
|
|
pub mir: &'mir mir::Mir<'tcx>,
|
2016-06-14 20:13:59 -06:00
|
|
|
|
2017-03-21 13:53:55 +01:00
|
|
|
/// The def_id and substs of the current function
|
|
|
|
pub instance: ty::Instance<'tcx>,
|
2016-06-02 18:21:32 +02:00
|
|
|
|
2016-06-14 20:13:59 -06:00
|
|
|
/// The span of the call site.
|
|
|
|
pub span: codemap::Span,
|
2016-03-07 07:10:52 -06:00
|
|
|
|
2016-06-14 20:13:59 -06:00
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
2017-12-06 09:25:29 +01:00
|
|
|
// Return place and locals
|
2016-06-14 20:13:59 -06:00
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
2016-07-06 17:55:05 +02:00
|
|
|
/// The block to return to when returning from the current stack frame
|
2016-09-09 17:44:04 +02:00
|
|
|
pub return_to_block: StackPopCleanup,
|
2016-07-06 17:55:05 +02:00
|
|
|
|
2016-10-15 19:48:30 -06:00
|
|
|
/// The location where the result of the current stack frame should be written to.
|
2017-12-06 09:25:29 +01:00
|
|
|
pub return_place: Place,
|
2016-10-15 19:48:30 -06:00
|
|
|
|
|
|
|
/// The list of locals for this stack frame, stored in order as
|
2018-03-23 08:31:13 +01:00
|
|
|
/// `[return_ptr, arguments..., variables..., temporaries...]`. The locals are stored as `Option<Value>`s.
|
2017-05-31 17:41:33 -07:00
|
|
|
/// `None` represents a local that is currently dead, while a live local
|
2018-05-20 23:43:16 +02:00
|
|
|
/// can either directly contain `Scalar` or refer to some part of an `Allocation`.
|
2018-07-24 18:28:53 +02:00
|
|
|
pub locals: IndexVec<mir::Local, LocalValue>,
|
2016-02-27 19:20:25 -06:00
|
|
|
|
2016-06-14 20:13:59 -06:00
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
// Current position within the function
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
/// The block that is currently executed (or will be executed after the above call stacks
|
|
|
|
/// return).
|
|
|
|
pub block: mir::BasicBlock,
|
|
|
|
|
2018-02-16 15:56:50 +01:00
|
|
|
/// The index of the currently evaluated statement.
|
2016-06-10 13:01:51 +02:00
|
|
|
pub stmt: usize,
|
2016-03-06 04:23:24 -06:00
|
|
|
}
|
2016-02-27 19:20:25 -06:00
|
|
|
|
2018-07-24 18:28:53 +02:00
|
|
|
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
|
|
|
pub enum LocalValue {
|
|
|
|
Dead,
|
|
|
|
Live(Value),
|
|
|
|
}
|
|
|
|
|
|
|
|
impl LocalValue {
|
|
|
|
pub fn access(self) -> EvalResult<'static, Value> {
|
|
|
|
match self {
|
|
|
|
LocalValue::Dead => err!(DeadLocal),
|
|
|
|
LocalValue::Live(val) => Ok(val),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-06-21 21:40:14 -07:00
|
|
|
impl<'mir, 'tcx: 'mir> Eq for Frame<'mir, 'tcx> {}
|
|
|
|
|
|
|
|
impl<'mir, 'tcx: 'mir> PartialEq for Frame<'mir, 'tcx> {
|
|
|
|
fn eq(&self, other: &Self) -> bool {
|
|
|
|
let Frame {
|
2018-06-22 13:31:25 -07:00
|
|
|
mir: _,
|
|
|
|
instance,
|
2018-06-21 21:40:14 -07:00
|
|
|
span: _,
|
|
|
|
return_to_block,
|
|
|
|
return_place,
|
|
|
|
locals,
|
|
|
|
block,
|
|
|
|
stmt,
|
|
|
|
} = self;
|
|
|
|
|
2018-06-22 13:31:25 -07:00
|
|
|
// Some of these are constant during evaluation, but are included
|
|
|
|
// anyways for correctness.
|
|
|
|
*instance == other.instance
|
|
|
|
&& *return_to_block == other.return_to_block
|
2018-06-21 21:40:14 -07:00
|
|
|
&& *return_place == other.return_place
|
|
|
|
&& *locals == other.locals
|
|
|
|
&& *block == other.block
|
|
|
|
&& *stmt == other.stmt
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'mir, 'tcx: 'mir> Hash for Frame<'mir, 'tcx> {
|
|
|
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
|
|
|
let Frame {
|
2018-06-22 13:31:25 -07:00
|
|
|
mir: _,
|
|
|
|
instance,
|
2018-06-21 21:40:14 -07:00
|
|
|
span: _,
|
|
|
|
return_to_block,
|
|
|
|
return_place,
|
|
|
|
locals,
|
|
|
|
block,
|
|
|
|
stmt,
|
|
|
|
} = self;
|
|
|
|
|
2018-06-22 13:31:25 -07:00
|
|
|
instance.hash(state);
|
2018-06-21 21:40:14 -07:00
|
|
|
return_to_block.hash(state);
|
|
|
|
return_place.hash(state);
|
|
|
|
locals.hash(state);
|
|
|
|
block.hash(state);
|
|
|
|
stmt.hash(state);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-06-22 12:36:54 -07:00
|
|
|
/// The virtual machine state during const-evaluation at a given point in time.
|
|
|
|
type EvalSnapshot<'a, 'mir, 'tcx, M>
|
|
|
|
= (M, Vec<Frame<'mir, 'tcx>>, Memory<'a, 'mir, 'tcx, M>);
|
|
|
|
|
2018-06-22 00:46:29 -07:00
|
|
|
pub(crate) struct InfiniteLoopDetector<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
|
2018-06-22 12:36:54 -07:00
|
|
|
/// The set of all `EvalSnapshot` *hashes* observed by this detector.
|
2018-06-22 00:46:29 -07:00
|
|
|
///
|
2018-06-26 21:59:43 -07:00
|
|
|
/// When a collision occurs in this table, we store the full snapshot in
|
|
|
|
/// `snapshots`.
|
2018-06-24 12:51:49 -07:00
|
|
|
hashes: FxHashSet<u64>,
|
2018-06-22 00:46:29 -07:00
|
|
|
|
2018-06-22 12:36:54 -07:00
|
|
|
/// The set of all `EvalSnapshot`s observed by this detector.
|
2018-06-22 00:46:29 -07:00
|
|
|
///
|
2018-06-26 21:59:43 -07:00
|
|
|
/// An `EvalSnapshot` will only be fully cloned once it has caused a
|
|
|
|
/// collision in `hashes`. As a result, the detector must observe at least
|
|
|
|
/// *two* full cycles of an infinite loop before it triggers.
|
2018-06-22 12:36:54 -07:00
|
|
|
snapshots: FxHashSet<EvalSnapshot<'a, 'mir, 'tcx, M>>,
|
2018-06-22 00:46:29 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, 'mir, 'tcx, M> Default for InfiniteLoopDetector<'a, 'mir, 'tcx, M>
|
2018-07-04 13:05:43 -07:00
|
|
|
where M: Machine<'mir, 'tcx>,
|
2018-06-22 00:46:29 -07:00
|
|
|
'tcx: 'a + 'mir,
|
|
|
|
{
|
|
|
|
fn default() -> Self {
|
|
|
|
InfiniteLoopDetector {
|
2018-06-24 12:51:49 -07:00
|
|
|
hashes: FxHashSet::default(),
|
2018-06-22 00:46:29 -07:00
|
|
|
snapshots: FxHashSet::default(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, 'mir, 'tcx, M> InfiniteLoopDetector<'a, 'mir, 'tcx, M>
|
2018-07-04 13:05:43 -07:00
|
|
|
where M: Machine<'mir, 'tcx>,
|
2018-06-22 00:46:29 -07:00
|
|
|
'tcx: 'a + 'mir,
|
|
|
|
{
|
2018-06-24 12:46:23 -07:00
|
|
|
/// Returns `true` if the loop detector has not yet observed a snapshot.
|
|
|
|
pub fn is_empty(&self) -> bool {
|
2018-06-24 12:51:49 -07:00
|
|
|
self.hashes.is_empty()
|
2018-06-24 12:46:23 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn observe_and_analyze(
|
2018-06-22 12:36:54 -07:00
|
|
|
&mut self,
|
|
|
|
machine: &M,
|
|
|
|
stack: &Vec<Frame<'mir, 'tcx>>,
|
|
|
|
memory: &Memory<'a, 'mir, 'tcx, M>,
|
2018-06-24 12:46:23 -07:00
|
|
|
) -> EvalResult<'tcx, ()> {
|
2018-06-22 12:36:54 -07:00
|
|
|
let snapshot = (machine, stack, memory);
|
|
|
|
|
2018-06-22 00:46:29 -07:00
|
|
|
let mut fx = FxHasher::default();
|
|
|
|
snapshot.hash(&mut fx);
|
|
|
|
let hash = fx.finish();
|
|
|
|
|
2018-06-24 12:51:49 -07:00
|
|
|
if self.hashes.insert(hash) {
|
2018-06-22 00:46:29 -07:00
|
|
|
// No collision
|
|
|
|
return Ok(())
|
|
|
|
}
|
|
|
|
|
2018-06-22 12:36:54 -07:00
|
|
|
if self.snapshots.insert((machine.clone(), stack.clone(), memory.clone())) {
|
2018-06-22 00:46:29 -07:00
|
|
|
// Spurious collision or first cycle
|
|
|
|
return Ok(())
|
|
|
|
}
|
|
|
|
|
2018-06-24 01:44:23 -07:00
|
|
|
// Second cycle
|
|
|
|
Err(EvalErrorKind::InfiniteLoop.into())
|
2018-06-22 00:46:29 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-09-09 17:44:04 +02:00
|
|
|
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
|
|
|
|
pub enum StackPopCleanup {
|
2016-10-21 11:48:56 +02:00
|
|
|
/// The stackframe existed to compute the initial value of a static/constant, make sure it
|
2017-02-07 19:20:16 +01:00
|
|
|
/// isn't modifyable afterwards in case of constants.
|
|
|
|
/// In case of `static mut`, mark the memory to ensure it's never marked as immutable through
|
|
|
|
/// references or deallocated
|
2017-07-13 17:25:17 +02:00
|
|
|
MarkStatic(Mutability),
|
2016-09-09 17:44:04 +02:00
|
|
|
/// A regular stackframe added due to a function call will need to get forwarded to the next
|
|
|
|
/// block
|
|
|
|
Goto(mir::BasicBlock),
|
|
|
|
/// The main function and diverging functions have nowhere to return to
|
|
|
|
None,
|
|
|
|
}
|
|
|
|
|
2017-07-28 19:43:05 -07:00
|
|
|
#[derive(Copy, Clone, Debug)]
|
|
|
|
pub struct TyAndPacked<'tcx> {
|
|
|
|
pub ty: Ty<'tcx>,
|
|
|
|
pub packed: bool,
|
|
|
|
}
|
|
|
|
|
2017-08-24 14:41:49 +02:00
|
|
|
#[derive(Copy, Clone, Debug)]
|
|
|
|
pub struct ValTy<'tcx> {
|
|
|
|
pub value: Value,
|
|
|
|
pub ty: Ty<'tcx>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'tcx> ::std::ops::Deref for ValTy<'tcx> {
|
|
|
|
type Target = Value;
|
|
|
|
fn deref(&self) -> &Value {
|
|
|
|
&self.value
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-01-16 09:31:48 +01:00
|
|
|
impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for &'a EvalContext<'a, 'mir, 'tcx, M> {
|
2017-12-06 09:25:29 +01:00
|
|
|
#[inline]
|
|
|
|
fn data_layout(&self) -> &layout::TargetDataLayout {
|
|
|
|
&self.tcx.data_layout
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-01-16 09:31:48 +01:00
|
|
|
impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout
|
|
|
|
for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
|
2017-12-06 09:25:29 +01:00
|
|
|
#[inline]
|
|
|
|
fn data_layout(&self) -> &layout::TargetDataLayout {
|
|
|
|
&self.tcx.data_layout
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-01-16 09:31:48 +01:00
|
|
|
impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx> for &'a EvalContext<'a, 'mir, 'tcx, M> {
|
2017-12-06 09:25:29 +01:00
|
|
|
#[inline]
|
|
|
|
fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> {
|
2018-02-06 18:33:59 +01:00
|
|
|
*self.tcx
|
2017-12-06 09:25:29 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-01-16 09:31:48 +01:00
|
|
|
impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx>
|
|
|
|
for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
|
2017-12-06 09:25:29 +01:00
|
|
|
#[inline]
|
|
|
|
fn tcx<'d>(&'d self) -> TyCtxt<'d, 'tcx, 'tcx> {
|
2018-02-06 18:33:59 +01:00
|
|
|
*self.tcx
|
2017-12-06 09:25:29 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-02-05 21:07:20 +02:00
|
|
|
impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf for &'a EvalContext<'a, 'mir, 'tcx, M> {
|
|
|
|
type Ty = Ty<'tcx>;
|
2017-12-06 09:25:29 +01:00
|
|
|
type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
|
|
|
|
|
|
|
|
fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
|
2018-02-01 00:00:38 +02:00
|
|
|
self.tcx.layout_of(self.param_env.and(ty))
|
2017-12-06 09:25:29 +01:00
|
|
|
.map_err(|layout| EvalErrorKind::Layout(layout).into())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-02-05 21:07:20 +02:00
|
|
|
impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf
|
2018-01-16 09:31:48 +01:00
|
|
|
for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
|
2018-02-05 21:07:20 +02:00
|
|
|
type Ty = Ty<'tcx>;
|
2017-12-06 09:25:29 +01:00
|
|
|
type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
|
|
|
|
(&**self).layout_of(ty)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-06-26 21:59:43 -07:00
|
|
|
const STEPS_UNTIL_DETECTOR_ENABLED: isize = 1_000_000;
|
2018-04-26 09:18:19 +02:00
|
|
|
|
2018-01-16 09:31:48 +01:00
|
|
|
impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
2017-07-21 17:25:30 +02:00
|
|
|
pub fn new(
|
2018-02-06 18:33:59 +01:00
|
|
|
tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
|
2017-12-06 14:12:05 +02:00
|
|
|
param_env: ty::ParamEnv<'tcx>,
|
2017-12-06 14:23:32 +02:00
|
|
|
machine: M,
|
2017-07-21 17:25:30 +02:00
|
|
|
memory_data: M::MemoryData,
|
2018-07-04 13:05:43 -07:00
|
|
|
) -> Self {
|
2016-06-10 16:20:17 +02:00
|
|
|
EvalContext {
|
2017-12-06 14:23:32 +02:00
|
|
|
machine,
|
2017-01-16 18:45:30 -08:00
|
|
|
tcx,
|
2017-12-06 14:12:05 +02:00
|
|
|
param_env,
|
2018-06-22 12:36:54 -07:00
|
|
|
memory: Memory::new(tcx, memory_data),
|
|
|
|
stack: Vec::new(),
|
2018-04-01 08:19:50 +02:00
|
|
|
stack_limit: tcx.sess.const_eval_stack_frame_limit,
|
2018-06-22 00:46:29 -07:00
|
|
|
loop_detector: Default::default(),
|
2018-06-26 21:59:43 -07:00
|
|
|
steps_since_detector_enabled: -STEPS_UNTIL_DETECTOR_ENABLED,
|
2016-03-06 04:23:24 -06:00
|
|
|
}
|
|
|
|
}
|
2016-06-01 14:33:37 +02:00
|
|
|
|
2018-04-26 09:18:19 +02:00
|
|
|
pub(crate) fn with_fresh_body<F: FnOnce(&mut Self) -> R, R>(&mut self, f: F) -> R {
|
2018-06-22 12:36:54 -07:00
|
|
|
let stack = mem::replace(&mut self.stack, Vec::new());
|
2018-06-26 21:59:43 -07:00
|
|
|
let steps = mem::replace(&mut self.steps_since_detector_enabled, -STEPS_UNTIL_DETECTOR_ENABLED);
|
2018-04-26 09:18:19 +02:00
|
|
|
let r = f(self);
|
2018-06-22 12:36:54 -07:00
|
|
|
self.stack = stack;
|
2018-06-26 21:59:43 -07:00
|
|
|
self.steps_since_detector_enabled = steps;
|
2018-04-26 09:18:19 +02:00
|
|
|
r
|
|
|
|
}
|
|
|
|
|
2018-06-04 18:32:06 +02:00
|
|
|
pub fn alloc_ptr(&mut self, layout: TyLayout<'tcx>) -> EvalResult<'tcx, Pointer> {
|
2017-12-06 13:50:31 +02:00
|
|
|
assert!(!layout.is_unsized(), "cannot alloc memory for unsized type");
|
2016-10-16 00:12:11 -06:00
|
|
|
|
2018-06-22 12:36:54 -07:00
|
|
|
self.memory.allocate(layout.size, layout.align, MemoryKind::Stack)
|
2016-06-08 09:38:59 +02:00
|
|
|
}
|
2016-06-08 11:11:08 +02:00
|
|
|
|
2018-01-16 09:31:48 +01:00
|
|
|
pub fn memory(&self) -> &Memory<'a, 'mir, 'tcx, M> {
|
2018-06-22 12:36:54 -07:00
|
|
|
&self.memory
|
2016-06-10 13:01:51 +02:00
|
|
|
}
|
|
|
|
|
2018-01-16 09:31:48 +01:00
|
|
|
pub fn memory_mut(&mut self) -> &mut Memory<'a, 'mir, 'tcx, M> {
|
2018-06-22 12:36:54 -07:00
|
|
|
&mut self.memory
|
2016-06-15 12:55:04 +02:00
|
|
|
}
|
|
|
|
|
2018-01-16 09:31:48 +01:00
|
|
|
pub fn stack(&self) -> &[Frame<'mir, 'tcx>] {
|
2018-06-22 12:36:54 -07:00
|
|
|
&self.stack
|
2016-06-10 13:01:51 +02:00
|
|
|
}
|
|
|
|
|
2017-06-16 17:58:18 -07:00
|
|
|
#[inline]
|
|
|
|
pub fn cur_frame(&self) -> usize {
|
2018-06-22 12:36:54 -07:00
|
|
|
assert!(self.stack.len() > 0);
|
|
|
|
self.stack.len() - 1
|
2017-06-16 17:58:18 -07:00
|
|
|
}
|
|
|
|
|
2017-07-28 13:08:27 +02:00
|
|
|
pub fn str_to_value(&mut self, s: &str) -> EvalResult<'tcx, Value> {
|
2018-06-22 12:36:54 -07:00
|
|
|
let ptr = self.memory.allocate_bytes(s.as_bytes());
|
2018-05-23 19:39:49 +02:00
|
|
|
Ok(Scalar::Ptr(ptr).to_value_with_len(s.len() as u64, self.tcx.tcx))
|
2016-09-23 10:38:30 +02:00
|
|
|
}
|
|
|
|
|
2018-06-25 20:53:02 +02:00
|
|
|
pub fn const_to_value(
|
2018-04-26 09:18:19 +02:00
|
|
|
&mut self,
|
|
|
|
val: ConstValue<'tcx>,
|
|
|
|
) -> EvalResult<'tcx, Value> {
|
|
|
|
match val {
|
2018-06-25 20:53:02 +02:00
|
|
|
ConstValue::Unevaluated(def_id, substs) => {
|
|
|
|
let instance = self.resolve(def_id, substs)?;
|
|
|
|
self.read_global_as_value(GlobalId {
|
|
|
|
instance,
|
|
|
|
promoted: None,
|
|
|
|
})
|
|
|
|
}
|
2018-05-14 18:54:24 +02:00
|
|
|
ConstValue::ByRef(alloc, offset) => {
|
2018-04-26 09:18:19 +02:00
|
|
|
// FIXME: Allocate new AllocId for all constants inside
|
2018-06-22 12:36:54 -07:00
|
|
|
let id = self.memory.allocate_value(alloc.clone(), MemoryKind::Stack)?;
|
2018-05-21 00:37:44 +02:00
|
|
|
Ok(Value::ByRef(Pointer::new(id, offset).into(), alloc.align))
|
2018-04-26 09:18:19 +02:00
|
|
|
},
|
2018-07-24 18:28:53 +02:00
|
|
|
ConstValue::ScalarPair(a, b) => Ok(Value::ScalarPair(a.into(), b.into())),
|
|
|
|
ConstValue::Scalar(val) => Ok(Value::Scalar(val.into())),
|
2018-04-26 09:18:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-12-06 09:25:29 +01:00
|
|
|
pub(super) fn resolve(&self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> EvalResult<'tcx, ty::Instance<'tcx>> {
|
2018-02-20 11:49:50 +01:00
|
|
|
trace!("resolve: {:?}, {:#?}", def_id, substs);
|
|
|
|
trace!("substs: {:#?}", self.substs());
|
|
|
|
trace!("param_env: {:#?}", self.param_env);
|
2018-03-09 12:53:17 -05:00
|
|
|
let substs = self.tcx.subst_and_normalize_erasing_regions(
|
|
|
|
self.substs(),
|
|
|
|
self.param_env,
|
|
|
|
&substs,
|
|
|
|
);
|
2017-12-06 09:25:29 +01:00
|
|
|
ty::Instance::resolve(
|
2018-02-06 18:33:59 +01:00
|
|
|
*self.tcx,
|
2017-12-06 14:12:05 +02:00
|
|
|
self.param_env,
|
2017-12-06 09:25:29 +01:00
|
|
|
def_id,
|
|
|
|
substs,
|
2018-06-25 15:08:05 +02:00
|
|
|
).ok_or_else(|| EvalErrorKind::TooGeneric.into())
|
2017-12-06 09:25:29 +01:00
|
|
|
}
|
|
|
|
|
2016-12-07 20:30:37 -08:00
|
|
|
pub(super) fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
|
2018-02-06 18:33:59 +01:00
|
|
|
ty.is_sized(self.tcx, self.param_env)
|
2016-06-08 09:38:59 +02:00
|
|
|
}
|
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
pub fn load_mir(
|
|
|
|
&self,
|
|
|
|
instance: ty::InstanceDef<'tcx>,
|
|
|
|
) -> EvalResult<'tcx, &'tcx mir::Mir<'tcx>> {
|
2017-12-06 09:25:29 +01:00
|
|
|
// do not continue if typeck errors occurred (can only occur in local crate)
|
|
|
|
let did = instance.def_id();
|
|
|
|
if did.is_local() && self.tcx.has_typeck_tables(did) && self.tcx.typeck_tables_of(did).tainted_by_errors {
|
|
|
|
return err!(TypeckError);
|
|
|
|
}
|
2017-03-21 13:53:55 +01:00
|
|
|
trace!("load mir {:?}", instance);
|
|
|
|
match instance {
|
2017-08-10 08:48:38 -07:00
|
|
|
ty::InstanceDef::Item(def_id) => {
|
2018-04-19 08:56:27 +10:00
|
|
|
self.tcx.maybe_optimized_mir(def_id).ok_or_else(||
|
2017-08-10 08:48:38 -07:00
|
|
|
EvalErrorKind::NoMirFor(self.tcx.item_path_str(def_id)).into()
|
2018-04-19 08:56:27 +10:00
|
|
|
)
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2017-03-21 13:53:55 +01:00
|
|
|
_ => Ok(self.tcx.instance_mir(instance)),
|
2016-06-06 15:22:33 +02:00
|
|
|
}
|
|
|
|
}
|
2016-06-08 11:11:08 +02:00
|
|
|
|
2016-06-30 11:29:25 +02:00
|
|
|
pub fn monomorphize(&self, ty: Ty<'tcx>, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
|
2017-01-28 15:28:24 +01:00
|
|
|
// miri doesn't care about lifetimes, and will choke on some crazy ones
|
|
|
|
// let's simply get rid of them
|
2018-03-03 08:23:28 -05:00
|
|
|
let substituted = ty.subst(*self.tcx, substs);
|
|
|
|
self.tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), substituted)
|
2017-03-14 11:12:59 +01:00
|
|
|
}
|
|
|
|
|
2017-08-03 11:06:25 -07:00
|
|
|
/// Return the size and aligment of the value at the given type.
|
|
|
|
/// Note that the value does not matter if the type is sized. For unsized types,
|
|
|
|
/// the value has to be a fat pointer, and we only care about the "extra" data in it.
|
2017-07-28 10:16:36 +02:00
|
|
|
pub fn size_and_align_of_dst(
|
|
|
|
&mut self,
|
2017-12-06 13:55:46 +02:00
|
|
|
ty: Ty<'tcx>,
|
2017-08-03 11:06:25 -07:00
|
|
|
value: Value,
|
2017-12-06 09:25:29 +01:00
|
|
|
) -> EvalResult<'tcx, (Size, Align)> {
|
2017-12-06 13:50:31 +02:00
|
|
|
let layout = self.layout_of(ty)?;
|
2017-12-06 09:25:29 +01:00
|
|
|
if !layout.is_unsized() {
|
|
|
|
Ok(layout.size_and_align())
|
2017-07-28 10:16:36 +02:00
|
|
|
} else {
|
|
|
|
match ty.sty {
|
2017-08-02 15:29:13 -07:00
|
|
|
ty::TyAdt(..) | ty::TyTuple(..) => {
|
2017-07-28 10:16:36 +02:00
|
|
|
// First get the size of all statically known fields.
|
|
|
|
// Don't use type_of::sizing_type_of because that expects t to be sized,
|
|
|
|
// and it also rounds up to alignment, which we want to avoid,
|
|
|
|
// as the unsized field's alignment could be smaller.
|
|
|
|
assert!(!ty.is_simd());
|
|
|
|
debug!("DST {} layout: {:?}", ty, layout);
|
|
|
|
|
2017-12-06 09:25:29 +01:00
|
|
|
let sized_size = layout.fields.offset(layout.fields.count() - 1);
|
|
|
|
let sized_align = layout.align;
|
2017-08-10 08:48:38 -07:00
|
|
|
debug!(
|
2017-12-06 09:25:29 +01:00
|
|
|
"DST {} statically sized prefix size: {:?} align: {:?}",
|
2017-08-10 08:48:38 -07:00
|
|
|
ty,
|
|
|
|
sized_size,
|
|
|
|
sized_align
|
|
|
|
);
|
2017-07-28 10:16:36 +02:00
|
|
|
|
|
|
|
// Recurse to get the size of the dynamically sized field (must be
|
|
|
|
// the last field).
|
2017-12-06 13:50:31 +02:00
|
|
|
let field_ty = layout.field(&self, layout.fields.count() - 1)?.ty;
|
|
|
|
let (unsized_size, unsized_align) =
|
|
|
|
self.size_and_align_of_dst(field_ty, value)?;
|
2017-07-28 10:16:36 +02:00
|
|
|
|
|
|
|
// FIXME (#26403, #27023): We should be adding padding
|
|
|
|
// to `sized_size` (to accommodate the `unsized_align`
|
|
|
|
// required of the unsized field that follows) before
|
|
|
|
// summing it with `sized_size`. (Note that since #26403
|
|
|
|
// is unfixed, we do not yet add the necessary padding
|
|
|
|
// here. But this is where the add would go.)
|
|
|
|
|
|
|
|
// Return the sum of sizes and max of aligns.
|
|
|
|
let size = sized_size + unsized_size;
|
|
|
|
|
|
|
|
// Choose max of two known alignments (combined value must
|
|
|
|
// be aligned according to more restrictive of the two).
|
2017-12-06 09:25:29 +01:00
|
|
|
let align = sized_align.max(unsized_align);
|
2017-07-28 10:16:36 +02:00
|
|
|
|
|
|
|
// Issue #27023: must add any necessary padding to `size`
|
|
|
|
// (to make it a multiple of `align`) before returning it.
|
|
|
|
//
|
|
|
|
// Namely, the returned size should be, in C notation:
|
|
|
|
//
|
|
|
|
// `size + ((size & (align-1)) ? align : 0)`
|
|
|
|
//
|
|
|
|
// emulated via the semi-standard fast bit trick:
|
|
|
|
//
|
|
|
|
// `(size + (align-1)) & -align`
|
|
|
|
|
2017-12-06 09:25:29 +01:00
|
|
|
Ok((size.abi_align(align), align))
|
2017-07-28 10:16:36 +02:00
|
|
|
}
|
|
|
|
ty::TyDynamic(..) => {
|
2017-12-12 17:14:49 +01:00
|
|
|
let (_, vtable) = self.into_ptr_vtable_pair(value)?;
|
2017-07-28 10:16:36 +02:00
|
|
|
// the second entry in the vtable is the dynamic size of the object.
|
|
|
|
self.read_size_and_align_from_vtable(vtable)
|
|
|
|
}
|
|
|
|
|
|
|
|
ty::TySlice(_) | ty::TyStr => {
|
2017-12-06 09:25:29 +01:00
|
|
|
let (elem_size, align) = layout.field(&self, 0)?.size_and_align();
|
2017-12-12 17:14:49 +01:00
|
|
|
let (_, len) = self.into_slice(value)?;
|
2017-12-06 09:25:29 +01:00
|
|
|
Ok((elem_size * len, align))
|
2017-07-28 10:16:36 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
_ => bug!("size_of_val::<{:?}>", ty),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-07-05 10:47:10 +02:00
|
|
|
pub fn push_stack_frame(
|
|
|
|
&mut self,
|
2017-03-21 13:53:55 +01:00
|
|
|
instance: ty::Instance<'tcx>,
|
2016-07-05 10:47:10 +02:00
|
|
|
span: codemap::Span,
|
2018-01-16 09:31:48 +01:00
|
|
|
mir: &'mir mir::Mir<'tcx>,
|
2017-12-06 09:25:29 +01:00
|
|
|
return_place: Place,
|
2016-09-09 17:44:04 +02:00
|
|
|
return_to_block: StackPopCleanup,
|
2017-02-04 13:09:10 -08:00
|
|
|
) -> EvalResult<'tcx> {
|
2016-05-30 18:09:52 +02:00
|
|
|
::log_settings::settings().indentation += 1;
|
|
|
|
|
2018-07-24 18:28:53 +02:00
|
|
|
// first push a stack frame so we have access to the local substs
|
|
|
|
self.stack.push(Frame {
|
|
|
|
mir,
|
|
|
|
block: mir::START_BLOCK,
|
|
|
|
return_to_block,
|
|
|
|
return_place,
|
|
|
|
// empty local array, we fill it in below, after we are inside the stack frame and
|
|
|
|
// all methods actually know about the frame
|
|
|
|
locals: IndexVec::new(),
|
|
|
|
span,
|
|
|
|
instance,
|
|
|
|
stmt: 0,
|
|
|
|
});
|
|
|
|
|
|
|
|
// don't allocate at all for trivial constants
|
|
|
|
if mir.local_decls.len() > 1 {
|
|
|
|
let mut locals = IndexVec::from_elem(LocalValue::Dead, &mir.local_decls);
|
|
|
|
for (local, decl) in locals.iter_mut().zip(mir.local_decls.iter()) {
|
|
|
|
*local = LocalValue::Live(self.init_value(decl.ty)?);
|
|
|
|
}
|
2018-03-23 10:47:55 +01:00
|
|
|
match self.tcx.describe_def(instance.def_id()) {
|
|
|
|
// statics and constants don't have `Storage*` statements, no need to look for them
|
|
|
|
Some(Def::Static(..)) | Some(Def::Const(..)) | Some(Def::AssociatedConst(..)) => {},
|
|
|
|
_ => {
|
|
|
|
trace!("push_stack_frame: {:?}: num_bbs: {}", span, mir.basic_blocks().len());
|
|
|
|
for block in mir.basic_blocks() {
|
|
|
|
for stmt in block.statements.iter() {
|
|
|
|
use rustc::mir::StatementKind::{StorageDead, StorageLive};
|
|
|
|
match stmt.kind {
|
|
|
|
StorageLive(local) |
|
2018-07-24 18:28:53 +02:00
|
|
|
StorageDead(local) => locals[local] = LocalValue::Dead,
|
2018-03-23 10:47:55 +01:00
|
|
|
_ => {}
|
|
|
|
}
|
2018-03-22 16:59:02 +01:00
|
|
|
}
|
|
|
|
}
|
2018-03-23 10:47:55 +01:00
|
|
|
},
|
|
|
|
}
|
2018-07-24 18:28:53 +02:00
|
|
|
self.frame_mut().locals = locals;
|
|
|
|
}
|
2016-10-15 19:48:30 -06:00
|
|
|
|
2018-06-22 12:36:54 -07:00
|
|
|
self.memory.cur_frame = self.cur_frame();
|
2017-06-19 14:56:05 -07:00
|
|
|
|
2018-06-22 12:36:54 -07:00
|
|
|
if self.stack.len() > self.stack_limit {
|
2017-08-02 16:59:01 +02:00
|
|
|
err!(StackFrameLimitReached)
|
2016-07-05 13:23:58 +02:00
|
|
|
} else {
|
|
|
|
Ok(())
|
|
|
|
}
|
2015-11-12 15:50:58 -06:00
|
|
|
}
|
|
|
|
|
2017-02-04 13:09:10 -08:00
|
|
|
pub(super) fn pop_stack_frame(&mut self) -> EvalResult<'tcx> {
|
2016-05-30 18:09:52 +02:00
|
|
|
::log_settings::settings().indentation -= 1;
|
2017-12-14 11:36:28 +01:00
|
|
|
M::end_region(self, None)?;
|
2018-06-22 12:36:54 -07:00
|
|
|
let frame = self.stack.pop().expect(
|
2017-08-10 08:48:38 -07:00
|
|
|
"tried to pop a stack frame, but there were none",
|
|
|
|
);
|
2018-06-22 12:36:54 -07:00
|
|
|
if !self.stack.is_empty() {
|
2017-09-09 10:56:33 +02:00
|
|
|
// TODO: Is this the correct time to start considering these accesses as originating from the returned-to stack frame?
|
2018-06-22 12:36:54 -07:00
|
|
|
self.memory.cur_frame = self.cur_frame();
|
2017-06-19 14:56:05 -07:00
|
|
|
}
|
2016-09-09 17:44:04 +02:00
|
|
|
match frame.return_to_block {
|
2017-08-10 08:48:38 -07:00
|
|
|
StackPopCleanup::MarkStatic(mutable) => {
|
2017-12-06 09:25:29 +01:00
|
|
|
if let Place::Ptr { ptr, .. } = frame.return_place {
|
2017-08-10 08:48:38 -07:00
|
|
|
// FIXME: to_ptr()? might be too extreme here, static zsts might reach this under certain conditions
|
2018-06-22 12:36:54 -07:00
|
|
|
self.memory.mark_static_initialized(
|
2018-07-30 15:59:00 +02:00
|
|
|
ptr.unwrap_or_err()?.to_ptr()?.alloc_id,
|
2017-08-10 08:48:38 -07:00
|
|
|
mutable,
|
|
|
|
)?
|
|
|
|
} else {
|
2017-12-06 09:25:29 +01:00
|
|
|
bug!("StackPopCleanup::MarkStatic on: {:?}", frame.return_place);
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
|
|
|
}
|
2016-09-09 17:44:04 +02:00
|
|
|
StackPopCleanup::Goto(target) => self.goto_block(target),
|
2017-08-10 08:48:38 -07:00
|
|
|
StackPopCleanup::None => {}
|
2016-07-06 17:55:05 +02:00
|
|
|
}
|
2016-11-18 10:35:41 +01:00
|
|
|
// deallocate all locals that are backed by an allocation
|
2017-01-22 00:19:35 -08:00
|
|
|
for local in frame.locals {
|
2017-05-31 17:41:33 -07:00
|
|
|
self.deallocate_local(local)?;
|
2016-11-17 14:48:34 +01:00
|
|
|
}
|
2017-03-21 13:53:55 +01:00
|
|
|
|
2016-09-09 17:44:04 +02:00
|
|
|
Ok(())
|
2016-03-06 04:23:24 -06:00
|
|
|
}
|
|
|
|
|
2018-07-24 18:28:53 +02:00
|
|
|
pub fn deallocate_local(&mut self, local: LocalValue) -> EvalResult<'tcx> {
|
|
|
|
// FIXME: should we tell the user that there was a local which was never written to?
|
|
|
|
if let LocalValue::Live(Value::ByRef(ptr, _align)) = local {
|
2017-05-31 17:41:33 -07:00
|
|
|
trace!("deallocating local");
|
2017-07-03 14:16:11 +02:00
|
|
|
let ptr = ptr.to_ptr()?;
|
2018-06-22 12:36:54 -07:00
|
|
|
self.memory.dump_alloc(ptr.alloc_id);
|
|
|
|
self.memory.deallocate_local(ptr)?;
|
2017-05-31 17:41:33 -07:00
|
|
|
};
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2016-09-19 02:19:31 -06:00
|
|
|
/// Evaluate an assignment statement.
|
|
|
|
///
|
|
|
|
/// There is no separate `eval_rvalue` function. Instead, the code for handling each rvalue
|
2017-12-06 09:25:29 +01:00
|
|
|
/// type writes its results directly into the memory specified by the place.
|
|
|
|
pub(super) fn eval_rvalue_into_place(
|
2016-09-19 02:19:31 -06:00
|
|
|
&mut self,
|
|
|
|
rvalue: &mir::Rvalue<'tcx>,
|
2017-12-06 09:25:29 +01:00
|
|
|
place: &mir::Place<'tcx>,
|
2018-07-04 13:05:43 -07:00
|
|
|
) -> EvalResult<'tcx> {
|
2017-12-06 09:25:29 +01:00
|
|
|
let dest = self.eval_place(place)?;
|
|
|
|
let dest_ty = self.place_ty(place);
|
2018-07-24 18:28:53 +02:00
|
|
|
let dest_layout = self.layout_of(dest_ty)?;
|
2015-11-20 15:54:02 -06:00
|
|
|
|
2017-12-12 17:14:49 +01:00
|
|
|
use rustc::mir::Rvalue::*;
|
2015-11-12 16:13:35 -06:00
|
|
|
match *rvalue {
|
2016-03-07 03:32:02 -06:00
|
|
|
Use(ref operand) => {
|
2017-08-24 14:41:49 +02:00
|
|
|
let value = self.eval_operand(operand)?.value;
|
|
|
|
let valty = ValTy {
|
|
|
|
value,
|
|
|
|
ty: dest_ty,
|
|
|
|
};
|
|
|
|
self.write_value(valty, dest)?;
|
2016-02-27 19:20:25 -06:00
|
|
|
}
|
2015-11-20 15:54:02 -06:00
|
|
|
|
2016-03-13 01:43:28 -06:00
|
|
|
BinaryOp(bin_op, ref left, ref right) => {
|
2017-08-24 14:41:49 +02:00
|
|
|
let left = self.eval_operand(left)?;
|
|
|
|
let right = self.eval_operand(right)?;
|
2018-05-17 12:07:00 +02:00
|
|
|
self.intrinsic_overflowing(
|
2017-08-10 08:48:38 -07:00
|
|
|
bin_op,
|
|
|
|
left,
|
|
|
|
right,
|
|
|
|
dest,
|
|
|
|
dest_ty,
|
2018-05-17 12:07:00 +02:00
|
|
|
)?;
|
2016-03-13 01:43:28 -06:00
|
|
|
}
|
2016-03-07 07:10:52 -06:00
|
|
|
|
2016-06-11 13:10:42 -06:00
|
|
|
CheckedBinaryOp(bin_op, ref left, ref right) => {
|
2017-08-24 14:41:49 +02:00
|
|
|
let left = self.eval_operand(left)?;
|
|
|
|
let right = self.eval_operand(right)?;
|
2017-08-10 08:48:38 -07:00
|
|
|
self.intrinsic_with_overflow(
|
|
|
|
bin_op,
|
|
|
|
left,
|
|
|
|
right,
|
|
|
|
dest,
|
|
|
|
dest_ty,
|
|
|
|
)?;
|
2016-06-11 13:10:42 -06:00
|
|
|
}
|
2016-06-11 12:38:28 -06:00
|
|
|
|
2016-03-07 07:57:08 -06:00
|
|
|
UnaryOp(un_op, ref operand) => {
|
2018-05-22 19:30:16 +02:00
|
|
|
let val = self.eval_operand_to_scalar(operand)?;
|
2018-07-24 18:28:53 +02:00
|
|
|
let val = self.unary_op(un_op, val, dest_layout)?;
|
2018-05-22 19:30:16 +02:00
|
|
|
self.write_scalar(
|
2017-08-10 08:48:38 -07:00
|
|
|
dest,
|
2018-02-21 22:02:52 +01:00
|
|
|
val,
|
2017-08-10 08:48:38 -07:00
|
|
|
dest_ty,
|
|
|
|
)?;
|
2016-03-07 07:57:08 -06:00
|
|
|
}
|
2016-03-04 23:17:31 -06:00
|
|
|
|
2016-03-12 22:15:59 -06:00
|
|
|
Aggregate(ref kind, ref operands) => {
|
2017-12-06 09:25:29 +01:00
|
|
|
let (dest, active_field_index) = match **kind {
|
|
|
|
mir::AggregateKind::Adt(adt_def, variant_index, _, active_field_index) => {
|
|
|
|
self.write_discriminant_value(dest_ty, dest, variant_index)?;
|
|
|
|
if adt_def.is_enum() {
|
|
|
|
(self.place_downcast(dest, variant_index)?, active_field_index)
|
2016-05-08 19:29:00 -06:00
|
|
|
} else {
|
2017-12-06 09:25:29 +01:00
|
|
|
(dest, active_field_index)
|
2016-05-08 19:29:00 -06:00
|
|
|
}
|
|
|
|
}
|
2017-12-06 09:25:29 +01:00
|
|
|
_ => (dest, None)
|
|
|
|
};
|
2016-05-08 19:29:00 -06:00
|
|
|
|
2017-12-06 13:50:31 +02:00
|
|
|
let layout = self.layout_of(dest_ty)?;
|
2017-12-06 09:25:29 +01:00
|
|
|
for (i, operand) in operands.iter().enumerate() {
|
|
|
|
let value = self.eval_operand(operand)?;
|
|
|
|
// Ignore zero-sized fields.
|
2017-12-06 13:50:31 +02:00
|
|
|
if !self.layout_of(value.ty)?.is_zst() {
|
2017-12-06 09:25:29 +01:00
|
|
|
let field_index = active_field_index.unwrap_or(i);
|
|
|
|
let (field_dest, _) = self.place_field(dest, mir::Field::new(field_index), layout)?;
|
|
|
|
self.write_value(value, field_dest)?;
|
2016-12-07 23:25:47 -08:00
|
|
|
}
|
2016-03-12 22:15:59 -06:00
|
|
|
}
|
2015-11-12 15:50:58 -06:00
|
|
|
}
|
2015-11-12 17:24:43 -06:00
|
|
|
|
2016-03-21 03:34:24 -06:00
|
|
|
Repeat(ref operand, _) => {
|
2016-09-19 02:19:31 -06:00
|
|
|
let (elem_ty, length) = match dest_ty.sty {
|
2018-04-26 09:18:19 +02:00
|
|
|
ty::TyArray(elem_ty, n) => (elem_ty, n.unwrap_usize(self.tcx.tcx)),
|
2017-08-10 08:48:38 -07:00
|
|
|
_ => {
|
|
|
|
bug!(
|
|
|
|
"tried to assign array-repeat to non-array type {:?}",
|
|
|
|
dest_ty
|
|
|
|
)
|
|
|
|
}
|
2016-04-23 00:03:59 -06:00
|
|
|
};
|
2018-05-19 16:37:29 +02:00
|
|
|
let elem_size = self.layout_of(elem_ty)?.size;
|
2017-08-24 14:41:49 +02:00
|
|
|
let value = self.eval_operand(operand)?.value;
|
2016-10-14 03:31:45 -06:00
|
|
|
|
2017-12-17 08:47:22 +02:00
|
|
|
let (dest, dest_align) = self.force_allocation(dest)?.to_ptr_align();
|
2016-10-14 03:31:45 -06:00
|
|
|
|
2018-06-26 22:59:10 -04:00
|
|
|
if length > 0 {
|
2018-07-30 15:59:00 +02:00
|
|
|
let dest = dest.unwrap_or_err()?;
|
2018-06-26 22:59:10 -04:00
|
|
|
//write the first value
|
|
|
|
self.write_value_to_ptr(value, dest, dest_align, elem_ty)?;
|
|
|
|
|
|
|
|
if length > 1 {
|
|
|
|
let rest = dest.ptr_offset(elem_size * 1 as u64, &self)?;
|
|
|
|
self.memory.copy_repeatedly(dest, dest_align, rest, dest_align, elem_size, length - 1, false)?;
|
|
|
|
}
|
2016-03-21 03:34:24 -06:00
|
|
|
}
|
|
|
|
}
|
2016-03-21 03:19:48 -06:00
|
|
|
|
2017-12-06 09:25:29 +01:00
|
|
|
Len(ref place) => {
|
2017-07-25 11:32:48 +02:00
|
|
|
// FIXME(CTFE): don't allow computing the length of arrays in const eval
|
2017-12-06 09:25:29 +01:00
|
|
|
let src = self.eval_place(place)?;
|
|
|
|
let ty = self.place_ty(place);
|
2018-04-26 09:18:19 +02:00
|
|
|
let (_, len) = src.elem_ty_and_len(ty, self.tcx.tcx);
|
2018-07-24 18:28:53 +02:00
|
|
|
let size = self.memory.pointer_size().bytes() as u8;
|
2018-05-22 19:30:16 +02:00
|
|
|
self.write_scalar(
|
2017-08-10 08:48:38 -07:00
|
|
|
dest,
|
2018-05-22 10:28:46 +02:00
|
|
|
Scalar::Bits {
|
|
|
|
bits: len as u128,
|
2018-07-24 18:28:53 +02:00
|
|
|
size,
|
2018-05-22 10:28:46 +02:00
|
|
|
},
|
2017-08-10 08:48:38 -07:00
|
|
|
dest_ty,
|
|
|
|
)?;
|
2016-03-20 21:30:31 -06:00
|
|
|
}
|
|
|
|
|
2017-12-06 09:25:29 +01:00
|
|
|
Ref(_, _, ref place) => {
|
|
|
|
let src = self.eval_place(place)?;
|
|
|
|
// We ignore the alignment of the place here -- special handling for packed structs ends
|
2017-07-19 13:31:21 -07:00
|
|
|
// at the `&` operator.
|
2017-12-16 23:34:43 +02:00
|
|
|
let (ptr, _align, extra) = self.force_allocation(src)?.to_ptr_align_extra();
|
2016-10-14 22:10:06 -06:00
|
|
|
|
2016-10-16 02:12:46 -06:00
|
|
|
let val = match extra {
|
2018-07-24 18:28:53 +02:00
|
|
|
PlaceExtra::None => Value::Scalar(ptr),
|
2018-05-22 10:28:46 +02:00
|
|
|
PlaceExtra::Length(len) => ptr.to_value_with_len(len, self.tcx.tcx),
|
2017-12-16 23:34:43 +02:00
|
|
|
PlaceExtra::Vtable(vtable) => ptr.to_value_with_vtable(vtable),
|
2017-12-06 09:25:29 +01:00
|
|
|
PlaceExtra::DowncastVariant(..) => {
|
|
|
|
bug!("attempted to take a reference to an enum downcast place")
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2016-10-16 02:12:46 -06:00
|
|
|
};
|
2017-08-24 14:41:49 +02:00
|
|
|
let valty = ValTy {
|
|
|
|
value: val,
|
|
|
|
ty: dest_ty,
|
|
|
|
};
|
|
|
|
self.write_value(valty, dest)?;
|
2016-03-13 14:36:25 -06:00
|
|
|
}
|
2015-12-28 22:24:05 -06:00
|
|
|
|
2017-05-30 09:27:08 -04:00
|
|
|
NullaryOp(mir::NullOp::Box, ty) => {
|
2017-12-06 13:50:31 +02:00
|
|
|
let ty = self.monomorphize(ty, self.substs());
|
2017-09-25 15:55:21 +02:00
|
|
|
M::box_alloc(self, ty, dest)?;
|
2016-03-14 22:05:50 -06:00
|
|
|
}
|
|
|
|
|
2017-06-01 17:24:21 -07:00
|
|
|
NullaryOp(mir::NullOp::SizeOf, ty) => {
|
2017-12-06 13:50:31 +02:00
|
|
|
let ty = self.monomorphize(ty, self.substs());
|
|
|
|
let layout = self.layout_of(ty)?;
|
|
|
|
assert!(!layout.is_unsized(),
|
|
|
|
"SizeOf nullary MIR operator called for unsized type");
|
2018-07-24 18:28:53 +02:00
|
|
|
let size = self.memory.pointer_size().bytes() as u8;
|
2018-05-22 19:30:16 +02:00
|
|
|
self.write_scalar(
|
2017-08-10 08:48:38 -07:00
|
|
|
dest,
|
2018-05-22 10:28:46 +02:00
|
|
|
Scalar::Bits {
|
|
|
|
bits: layout.size.bytes() as u128,
|
2018-07-24 18:28:53 +02:00
|
|
|
size,
|
2018-05-22 10:28:46 +02:00
|
|
|
},
|
2017-08-10 08:48:38 -07:00
|
|
|
dest_ty,
|
|
|
|
)?;
|
2017-05-30 09:27:08 -04:00
|
|
|
}
|
|
|
|
|
2016-09-27 18:01:33 +02:00
|
|
|
Cast(kind, ref operand, cast_ty) => {
|
|
|
|
debug_assert_eq!(self.monomorphize(cast_ty, self.substs()), dest_ty);
|
2018-07-17 15:46:08 +02:00
|
|
|
let src = self.eval_operand(operand)?;
|
2018-07-18 11:39:17 +02:00
|
|
|
self.cast(src, kind, dest_ty, dest)?;
|
2016-03-16 23:28:49 -06:00
|
|
|
}
|
|
|
|
|
2017-12-06 09:25:29 +01:00
|
|
|
Discriminant(ref place) => {
|
|
|
|
let ty = self.place_ty(place);
|
2018-06-04 18:32:06 +02:00
|
|
|
let layout = self.layout_of(ty)?;
|
2017-12-06 09:25:29 +01:00
|
|
|
let place = self.eval_place(place)?;
|
2018-06-04 18:32:06 +02:00
|
|
|
let discr_val = self.read_discriminant_value(place, layout)?;
|
2018-07-24 18:28:53 +02:00
|
|
|
let size = self.layout_of(dest_ty).unwrap().size.bytes() as u8;
|
2018-05-22 19:30:16 +02:00
|
|
|
self.write_scalar(dest, Scalar::Bits {
|
2018-05-22 10:28:46 +02:00
|
|
|
bits: discr_val,
|
2018-07-24 18:28:53 +02:00
|
|
|
size,
|
2018-05-22 10:28:46 +02:00
|
|
|
}, dest_ty)?;
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2015-11-12 15:50:58 -06:00
|
|
|
}
|
2016-03-20 23:09:27 -06:00
|
|
|
|
2018-04-26 15:35:24 +10:00
|
|
|
self.dump_local(dest);
|
2016-10-20 04:42:19 -06:00
|
|
|
|
2016-03-20 23:09:27 -06:00
|
|
|
Ok(())
|
2015-11-12 15:50:58 -06:00
|
|
|
}
|
|
|
|
|
2017-06-28 13:37:23 +02:00
|
|
|
pub(super) fn type_is_fat_ptr(&self, ty: Ty<'tcx>) -> bool {
|
2016-09-07 18:34:59 +02:00
|
|
|
match ty.sty {
|
2018-05-02 15:21:05 +02:00
|
|
|
ty::TyRawPtr(ty::TypeAndMut { ty, .. }) |
|
|
|
|
ty::TyRef(_, ty, _) => !self.type_is_sized(ty),
|
2017-02-03 15:47:23 +01:00
|
|
|
ty::TyAdt(def, _) if def.is_box() => !self.type_is_sized(ty.boxed_ty()),
|
2016-09-07 18:34:59 +02:00
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-22 19:30:16 +02:00
|
|
|
pub(super) fn eval_operand_to_scalar(
|
2017-08-10 08:48:38 -07:00
|
|
|
&mut self,
|
|
|
|
op: &mir::Operand<'tcx>,
|
2018-05-20 23:43:16 +02:00
|
|
|
) -> EvalResult<'tcx, Scalar> {
|
2017-08-24 14:41:49 +02:00
|
|
|
let valty = self.eval_operand(op)?;
|
2018-05-22 19:30:16 +02:00
|
|
|
self.value_to_scalar(valty)
|
2017-08-24 14:41:49 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
pub(crate) fn operands_to_args(
|
|
|
|
&mut self,
|
|
|
|
ops: &[mir::Operand<'tcx>],
|
|
|
|
) -> EvalResult<'tcx, Vec<ValTy<'tcx>>> {
|
|
|
|
ops.into_iter()
|
|
|
|
.map(|op| self.eval_operand(op))
|
|
|
|
.collect()
|
2016-09-19 02:19:31 -06:00
|
|
|
}
|
|
|
|
|
2017-08-24 14:41:49 +02:00
|
|
|
pub fn eval_operand(&mut self, op: &mir::Operand<'tcx>) -> EvalResult<'tcx, ValTy<'tcx>> {
|
2017-12-12 17:14:49 +01:00
|
|
|
use rustc::mir::Operand::*;
|
2018-02-06 18:33:59 +01:00
|
|
|
let ty = self.monomorphize(op.ty(self.mir(), *self.tcx), self.substs());
|
2015-11-12 15:50:58 -06:00
|
|
|
match *op {
|
2017-12-06 09:25:29 +01:00
|
|
|
// FIXME: do some more logic on `move` to invalidate the old location
|
|
|
|
Copy(ref place) |
|
|
|
|
Move(ref place) => {
|
2017-08-24 14:41:49 +02:00
|
|
|
Ok(ValTy {
|
2017-12-06 09:25:29 +01:00
|
|
|
value: self.eval_and_read_place(place)?,
|
2017-12-06 13:50:31 +02:00
|
|
|
ty
|
2017-08-24 14:41:49 +02:00
|
|
|
})
|
|
|
|
},
|
2016-09-19 02:19:31 -06:00
|
|
|
|
2017-05-13 07:08:30 -04:00
|
|
|
Constant(ref constant) => {
|
2018-07-22 01:01:07 +02:00
|
|
|
let value = self.const_to_value(constant.literal.val)?;
|
2016-09-19 02:19:31 -06:00
|
|
|
|
2017-08-24 14:41:49 +02:00
|
|
|
Ok(ValTy {
|
|
|
|
value,
|
2017-12-06 13:50:31 +02:00
|
|
|
ty,
|
2017-08-24 14:41:49 +02:00
|
|
|
})
|
2015-11-12 15:50:58 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2015-11-12 17:44:29 -06:00
|
|
|
|
2018-04-13 16:05:54 +02:00
|
|
|
/// reads a tag and produces the corresponding variant index
|
|
|
|
pub fn read_discriminant_as_variant_index(
|
2018-06-04 18:32:06 +02:00
|
|
|
&self,
|
2018-04-13 16:05:54 +02:00
|
|
|
place: Place,
|
2018-06-04 18:32:06 +02:00
|
|
|
layout: TyLayout<'tcx>,
|
2018-04-13 16:05:54 +02:00
|
|
|
) -> EvalResult<'tcx, usize> {
|
|
|
|
match layout.variants {
|
|
|
|
ty::layout::Variants::Single { index } => Ok(index),
|
|
|
|
ty::layout::Variants::Tagged { .. } => {
|
2018-06-04 18:32:06 +02:00
|
|
|
let discr_val = self.read_discriminant_value(place, layout)?;
|
|
|
|
layout
|
|
|
|
.ty
|
2018-04-13 16:05:54 +02:00
|
|
|
.ty_adt_def()
|
|
|
|
.expect("tagged layout for non adt")
|
|
|
|
.discriminants(self.tcx.tcx)
|
|
|
|
.position(|var| var.val == discr_val)
|
|
|
|
.ok_or_else(|| EvalErrorKind::InvalidDiscriminant.into())
|
|
|
|
}
|
|
|
|
ty::layout::Variants::NicheFilling { .. } => {
|
2018-06-04 18:32:06 +02:00
|
|
|
let discr_val = self.read_discriminant_value(place, layout)?;
|
2018-04-13 16:05:54 +02:00
|
|
|
assert_eq!(discr_val as usize as u128, discr_val);
|
|
|
|
Ok(discr_val as usize)
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-25 16:20:13 +02:00
|
|
|
pub fn read_discriminant_value(
|
2018-06-04 18:32:06 +02:00
|
|
|
&self,
|
2017-12-06 09:25:29 +01:00
|
|
|
place: Place,
|
2018-06-04 18:32:06 +02:00
|
|
|
layout: TyLayout<'tcx>,
|
2017-08-25 16:20:13 +02:00
|
|
|
) -> EvalResult<'tcx, u128> {
|
2018-04-13 16:05:54 +02:00
|
|
|
trace!("read_discriminant_value {:#?}", layout);
|
2018-04-11 17:25:18 +02:00
|
|
|
if layout.abi == layout::Abi::Uninhabited {
|
|
|
|
return Ok(0);
|
|
|
|
}
|
2017-12-06 09:25:29 +01:00
|
|
|
|
|
|
|
match layout.variants {
|
|
|
|
layout::Variants::Single { index } => {
|
2018-06-04 18:32:06 +02:00
|
|
|
let discr_val = layout.ty.ty_adt_def().map_or(
|
2018-03-30 15:49:56 +02:00
|
|
|
index as u128,
|
|
|
|
|def| def.discriminant_for_variant(*self.tcx, index).val);
|
|
|
|
return Ok(discr_val);
|
2017-08-25 16:20:13 +02:00
|
|
|
}
|
2017-12-06 09:25:29 +01:00
|
|
|
layout::Variants::Tagged { .. } |
|
|
|
|
layout::Variants::NicheFilling { .. } => {},
|
|
|
|
}
|
2018-06-04 18:32:06 +02:00
|
|
|
let discr_place_val = self.read_place(place)?;
|
|
|
|
let (discr_val, discr) = self.read_field(discr_place_val, None, mir::Field::new(0), layout)?;
|
|
|
|
trace!("discr value: {:?}, {:?}", discr_val, discr);
|
2018-05-22 19:30:16 +02:00
|
|
|
let raw_discr = self.value_to_scalar(ValTy {
|
2018-06-04 18:32:06 +02:00
|
|
|
value: discr_val,
|
2017-12-06 09:25:29 +01:00
|
|
|
ty: discr.ty
|
|
|
|
})?;
|
|
|
|
let discr_val = match layout.variants {
|
|
|
|
layout::Variants::Single { .. } => bug!(),
|
2018-04-13 16:05:54 +02:00
|
|
|
// FIXME: should we catch invalid discriminants here?
|
|
|
|
layout::Variants::Tagged { .. } => {
|
|
|
|
if discr.ty.is_signed() {
|
2018-05-22 10:28:46 +02:00
|
|
|
let i = raw_discr.to_bits(discr.size)? as i128;
|
2018-04-13 16:05:54 +02:00
|
|
|
// going from layout tag type to typeck discriminant type
|
|
|
|
// requires first sign extending with the layout discriminant
|
2018-05-24 14:20:45 +02:00
|
|
|
let shift = 128 - discr.size.bits();
|
|
|
|
let sexted = (i << shift) >> shift;
|
2018-04-13 16:05:54 +02:00
|
|
|
// and then zeroing with the typeck discriminant type
|
2018-06-04 18:32:06 +02:00
|
|
|
let discr_ty = layout
|
|
|
|
.ty
|
2018-04-13 16:05:54 +02:00
|
|
|
.ty_adt_def().expect("tagged layout corresponds to adt")
|
|
|
|
.repr
|
|
|
|
.discr_type();
|
|
|
|
let discr_ty = layout::Integer::from_attr(self.tcx.tcx, discr_ty);
|
2018-05-24 14:20:45 +02:00
|
|
|
let shift = 128 - discr_ty.size().bits();
|
2018-04-13 16:05:54 +02:00
|
|
|
let truncatee = sexted as u128;
|
2018-05-24 14:20:45 +02:00
|
|
|
(truncatee << shift) >> shift
|
2018-04-13 16:05:54 +02:00
|
|
|
} else {
|
2018-05-22 10:28:46 +02:00
|
|
|
raw_discr.to_bits(discr.size)?
|
2018-04-13 16:05:54 +02:00
|
|
|
}
|
|
|
|
},
|
2017-12-06 09:25:29 +01:00
|
|
|
layout::Variants::NicheFilling {
|
|
|
|
dataful_variant,
|
|
|
|
ref niche_variants,
|
|
|
|
niche_start,
|
2017-08-25 16:20:13 +02:00
|
|
|
..
|
|
|
|
} => {
|
2018-04-06 05:21:47 +08:00
|
|
|
let variants_start = *niche_variants.start() as u128;
|
|
|
|
let variants_end = *niche_variants.end() as u128;
|
2017-12-06 09:25:29 +01:00
|
|
|
match raw_discr {
|
2018-05-20 23:43:16 +02:00
|
|
|
Scalar::Ptr(_) => {
|
2017-12-06 09:25:29 +01:00
|
|
|
assert!(niche_start == 0);
|
|
|
|
assert!(variants_start == variants_end);
|
|
|
|
dataful_variant as u128
|
|
|
|
},
|
2018-07-24 18:28:53 +02:00
|
|
|
Scalar::Bits { bits: raw_discr, size } => {
|
|
|
|
assert_eq!(size as u64, discr.size.bytes());
|
2017-12-06 09:25:29 +01:00
|
|
|
let discr = raw_discr.wrapping_sub(niche_start)
|
|
|
|
.wrapping_add(variants_start);
|
|
|
|
if variants_start <= discr && discr <= variants_end {
|
|
|
|
discr
|
|
|
|
} else {
|
|
|
|
dataful_variant as u128
|
|
|
|
}
|
|
|
|
},
|
|
|
|
}
|
2017-08-25 16:20:13 +02:00
|
|
|
}
|
2017-12-06 09:25:29 +01:00
|
|
|
};
|
2017-08-25 16:20:13 +02:00
|
|
|
|
2017-12-06 09:25:29 +01:00
|
|
|
Ok(discr_val)
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-04-07 14:39:24 +02:00
|
|
|
pub fn write_discriminant_value(
|
2017-12-06 09:25:29 +01:00
|
|
|
&mut self,
|
|
|
|
dest_ty: Ty<'tcx>,
|
|
|
|
dest: Place,
|
|
|
|
variant_index: usize,
|
|
|
|
) -> EvalResult<'tcx> {
|
2017-12-06 13:50:31 +02:00
|
|
|
let layout = self.layout_of(dest_ty)?;
|
2017-12-06 09:25:29 +01:00
|
|
|
|
|
|
|
match layout.variants {
|
|
|
|
layout::Variants::Single { index } => {
|
|
|
|
if index != variant_index {
|
|
|
|
// If the layout of an enum is `Single`, all
|
|
|
|
// other variants are necessarily uninhabited.
|
|
|
|
assert_eq!(layout.for_variant(&self, variant_index).abi,
|
|
|
|
layout::Abi::Uninhabited);
|
|
|
|
}
|
2017-08-25 16:20:13 +02:00
|
|
|
}
|
2018-05-04 22:32:08 +12:00
|
|
|
layout::Variants::Tagged { ref tag, .. } => {
|
2017-12-06 09:25:29 +01:00
|
|
|
let discr_val = dest_ty.ty_adt_def().unwrap()
|
2018-02-06 18:33:59 +01:00
|
|
|
.discriminant_for_variant(*self.tcx, variant_index)
|
2018-01-25 16:44:45 +01:00
|
|
|
.val;
|
2017-08-25 16:20:13 +02:00
|
|
|
|
2018-04-13 16:05:54 +02:00
|
|
|
// raw discriminants for enums are isize or bigger during
|
|
|
|
// their computation, but the in-memory tag is the smallest possible
|
|
|
|
// representation
|
2018-07-24 18:28:53 +02:00
|
|
|
let size = tag.value.size(self.tcx.tcx);
|
|
|
|
let shift = 128 - size.bits();
|
2018-05-24 14:20:45 +02:00
|
|
|
let discr_val = (discr_val << shift) >> shift;
|
2018-04-13 16:05:54 +02:00
|
|
|
|
2018-05-04 22:32:08 +12:00
|
|
|
let (discr_dest, tag) = self.place_field(dest, mir::Field::new(0), layout)?;
|
2018-05-22 19:30:16 +02:00
|
|
|
self.write_scalar(discr_dest, Scalar::Bits {
|
2018-05-22 10:28:46 +02:00
|
|
|
bits: discr_val,
|
2018-07-24 18:28:53 +02:00
|
|
|
size: size.bytes() as u8,
|
2018-05-22 10:28:46 +02:00
|
|
|
}, tag.ty)?;
|
2017-12-06 09:25:29 +01:00
|
|
|
}
|
|
|
|
layout::Variants::NicheFilling {
|
|
|
|
dataful_variant,
|
|
|
|
ref niche_variants,
|
|
|
|
niche_start,
|
2017-08-25 16:20:13 +02:00
|
|
|
..
|
|
|
|
} => {
|
2017-12-06 09:25:29 +01:00
|
|
|
if variant_index != dataful_variant {
|
|
|
|
let (niche_dest, niche) =
|
|
|
|
self.place_field(dest, mir::Field::new(0), layout)?;
|
2018-04-06 05:21:47 +08:00
|
|
|
let niche_value = ((variant_index - niche_variants.start()) as u128)
|
2017-12-06 09:25:29 +01:00
|
|
|
.wrapping_add(niche_start);
|
2018-05-22 19:30:16 +02:00
|
|
|
self.write_scalar(niche_dest, Scalar::Bits {
|
2018-05-22 10:28:46 +02:00
|
|
|
bits: niche_value,
|
2018-07-24 18:28:53 +02:00
|
|
|
size: niche.size.bytes() as u8,
|
2018-05-22 10:28:46 +02:00
|
|
|
}, niche.ty)?;
|
2017-12-06 09:25:29 +01:00
|
|
|
}
|
2017-08-25 16:20:13 +02:00
|
|
|
}
|
2017-12-06 09:25:29 +01:00
|
|
|
}
|
2017-08-25 16:20:13 +02:00
|
|
|
|
2017-12-06 09:25:29 +01:00
|
|
|
Ok(())
|
2017-08-25 16:20:13 +02:00
|
|
|
}
|
|
|
|
|
2018-06-25 20:53:02 +02:00
|
|
|
pub fn read_global_as_value(&mut self, gid: GlobalId<'tcx>) -> EvalResult<'tcx, Value> {
|
2018-01-31 15:06:45 +01:00
|
|
|
let cv = self.const_eval(gid)?;
|
2018-06-25 20:53:02 +02:00
|
|
|
self.const_to_value(cv.val)
|
2017-08-08 15:53:07 +02:00
|
|
|
}
|
|
|
|
|
2018-01-31 15:06:45 +01:00
|
|
|
pub fn const_eval(&self, gid: GlobalId<'tcx>) -> EvalResult<'tcx, &'tcx ty::Const<'tcx>> {
|
|
|
|
let param_env = if self.tcx.is_static(gid.instance.def_id()).is_some() {
|
2018-02-10 13:18:02 -05:00
|
|
|
ty::ParamEnv::reveal_all()
|
2018-01-31 15:06:45 +01:00
|
|
|
} else {
|
|
|
|
self.param_env
|
|
|
|
};
|
2018-06-03 03:01:06 +02:00
|
|
|
self.tcx.const_eval(param_env.and(gid)).map_err(|err| EvalErrorKind::ReferencedConstant(err).into())
|
2018-01-31 15:06:45 +01:00
|
|
|
}
|
|
|
|
|
2018-06-04 18:32:06 +02:00
|
|
|
pub fn allocate_place_for_value(
|
|
|
|
&mut self,
|
|
|
|
value: Value,
|
|
|
|
layout: TyLayout<'tcx>,
|
|
|
|
variant: Option<usize>,
|
|
|
|
) -> EvalResult<'tcx, Place> {
|
|
|
|
let (ptr, align) = match value {
|
|
|
|
Value::ByRef(ptr, align) => (ptr, align),
|
|
|
|
Value::ScalarPair(..) | Value::Scalar(_) => {
|
|
|
|
let ptr = self.alloc_ptr(layout)?.into();
|
|
|
|
self.write_value_to_ptr(value, ptr, layout.align, layout.ty)?;
|
|
|
|
(ptr, layout.align)
|
|
|
|
},
|
|
|
|
};
|
|
|
|
Ok(Place::Ptr {
|
2018-07-30 15:59:00 +02:00
|
|
|
ptr: ptr.into(),
|
2018-06-04 18:32:06 +02:00
|
|
|
align,
|
|
|
|
extra: variant.map_or(PlaceExtra::None, PlaceExtra::DowncastVariant),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-12-06 09:25:29 +01:00
|
|
|
pub fn force_allocation(&mut self, place: Place) -> EvalResult<'tcx, Place> {
|
|
|
|
let new_place = match place {
|
|
|
|
Place::Local { frame, local } => {
|
2018-07-24 18:28:53 +02:00
|
|
|
match self.stack[frame].locals[local].access()? {
|
|
|
|
Value::ByRef(ptr, align) => {
|
2017-12-06 09:25:29 +01:00
|
|
|
Place::Ptr {
|
2018-07-24 18:28:53 +02:00
|
|
|
ptr: ptr.into(),
|
2017-12-16 23:34:43 +02:00
|
|
|
align,
|
2017-12-06 09:25:29 +01:00
|
|
|
extra: PlaceExtra::None,
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
|
|
|
}
|
2018-07-24 18:28:53 +02:00
|
|
|
val => {
|
2018-06-22 12:36:54 -07:00
|
|
|
let ty = self.stack[frame].mir.local_decls[local].ty;
|
|
|
|
let ty = self.monomorphize(ty, self.stack[frame].instance.substs);
|
2017-12-16 14:07:04 +02:00
|
|
|
let layout = self.layout_of(ty)?;
|
2018-06-04 18:32:06 +02:00
|
|
|
let ptr = self.alloc_ptr(layout)?;
|
2018-06-22 12:36:54 -07:00
|
|
|
self.stack[frame].locals[local] =
|
2018-07-24 18:28:53 +02:00
|
|
|
LocalValue::Live(Value::ByRef(ptr.into(), layout.align)); // it stays live
|
|
|
|
|
2017-12-17 08:47:22 +02:00
|
|
|
let place = Place::from_ptr(ptr, layout.align);
|
|
|
|
self.write_value(ValTy { value: val, ty }, place)?;
|
|
|
|
place
|
2016-10-15 19:48:30 -06:00
|
|
|
}
|
2016-10-21 10:32:27 +02:00
|
|
|
}
|
2016-10-15 19:48:30 -06:00
|
|
|
}
|
2017-12-06 09:25:29 +01:00
|
|
|
Place::Ptr { .. } => place,
|
2016-10-15 19:48:30 -06:00
|
|
|
};
|
2017-12-06 09:25:29 +01:00
|
|
|
Ok(new_place)
|
2016-10-15 19:48:30 -06:00
|
|
|
}
|
|
|
|
|
2016-11-04 15:49:51 +01:00
|
|
|
/// ensures this Value is not a ByRef
|
2017-12-14 11:36:28 +01:00
|
|
|
pub fn follow_by_ref_value(
|
2018-07-30 15:59:00 +02:00
|
|
|
&self,
|
2017-08-10 08:48:38 -07:00
|
|
|
value: Value,
|
|
|
|
ty: Ty<'tcx>,
|
|
|
|
) -> EvalResult<'tcx, Value> {
|
2016-09-19 02:19:31 -06:00
|
|
|
match value {
|
2017-12-16 23:34:43 +02:00
|
|
|
Value::ByRef(ptr, align) => {
|
2017-12-17 08:47:22 +02:00
|
|
|
self.read_value(ptr, align, ty)
|
2017-07-12 21:06:57 -07:00
|
|
|
}
|
2016-11-04 15:49:51 +01:00
|
|
|
other => Ok(other),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-22 19:30:16 +02:00
|
|
|
pub fn value_to_scalar(
|
2018-07-30 15:59:00 +02:00
|
|
|
&self,
|
2017-08-24 14:41:49 +02:00
|
|
|
ValTy { value, ty } : ValTy<'tcx>,
|
2018-05-20 23:43:16 +02:00
|
|
|
) -> EvalResult<'tcx, Scalar> {
|
2016-11-04 15:49:51 +01:00
|
|
|
match self.follow_by_ref_value(value, ty)? {
|
2017-08-10 08:48:38 -07:00
|
|
|
Value::ByRef { .. } => bug!("follow_by_ref_value can't result in `ByRef`"),
|
2016-09-19 02:19:31 -06:00
|
|
|
|
2018-07-30 15:59:00 +02:00
|
|
|
Value::Scalar(scalar) => scalar.unwrap_or_err(),
|
2016-10-15 19:48:30 -06:00
|
|
|
|
2018-05-22 19:30:16 +02:00
|
|
|
Value::ScalarPair(..) => bug!("value_to_scalar can't work with fat pointers"),
|
2016-09-19 02:19:31 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-21 00:30:00 +02:00
|
|
|
pub fn write_ptr(&mut self, dest: Place, val: Scalar, dest_ty: Ty<'tcx>) -> EvalResult<'tcx> {
|
2017-08-24 14:41:49 +02:00
|
|
|
let valty = ValTy {
|
|
|
|
value: val.to_value(),
|
|
|
|
ty: dest_ty,
|
|
|
|
};
|
|
|
|
self.write_value(valty, dest)
|
2017-07-04 14:33:15 +02:00
|
|
|
}
|
|
|
|
|
2018-05-22 19:30:16 +02:00
|
|
|
pub fn write_scalar(
|
2016-10-14 03:31:45 -06:00
|
|
|
&mut self,
|
2017-12-06 09:25:29 +01:00
|
|
|
dest: Place,
|
2018-07-24 18:28:53 +02:00
|
|
|
val: impl Into<ScalarMaybeUndef>,
|
2016-11-26 22:58:01 -08:00
|
|
|
dest_ty: Ty<'tcx>,
|
2017-02-04 13:09:10 -08:00
|
|
|
) -> EvalResult<'tcx> {
|
2017-08-24 14:41:49 +02:00
|
|
|
let valty = ValTy {
|
2018-07-24 18:28:53 +02:00
|
|
|
value: Value::Scalar(val.into()),
|
2017-08-24 14:41:49 +02:00
|
|
|
ty: dest_ty,
|
|
|
|
};
|
|
|
|
self.write_value(valty, dest)
|
2016-10-14 03:31:45 -06:00
|
|
|
}
|
|
|
|
|
2017-07-21 17:25:30 +02:00
|
|
|
pub fn write_value(
|
2016-10-14 03:31:45 -06:00
|
|
|
&mut self,
|
2017-08-24 14:41:49 +02:00
|
|
|
ValTy { value: src_val, ty: dest_ty } : ValTy<'tcx>,
|
2017-12-06 09:25:29 +01:00
|
|
|
dest: Place,
|
2017-02-04 13:09:10 -08:00
|
|
|
) -> EvalResult<'tcx> {
|
2017-07-19 20:24:09 -07:00
|
|
|
//trace!("Writing {:?} to {:?} at type {:?}", src_val, dest, dest_ty);
|
2017-07-03 13:57:18 -07:00
|
|
|
// Note that it is really important that the type here is the right one, and matches the type things are read at.
|
2018-05-20 23:46:30 +02:00
|
|
|
// In case `src_val` is a `ScalarPair`, we don't do any magic here to handle padding properly, which is only
|
2017-07-03 13:57:18 -07:00
|
|
|
// correct if we never look at this data with the wrong type.
|
|
|
|
|
2016-10-15 19:48:30 -06:00
|
|
|
match dest {
|
2017-12-16 23:34:43 +02:00
|
|
|
Place::Ptr { ptr, align, extra } => {
|
2017-12-06 09:25:29 +01:00
|
|
|
assert_eq!(extra, PlaceExtra::None);
|
2018-07-30 15:59:00 +02:00
|
|
|
self.write_value_to_ptr(src_val, ptr.unwrap_or_err()?, align, dest_ty)
|
2016-10-15 19:48:30 -06:00
|
|
|
}
|
Fix write_value of ByVal into a ByRef.
Previously, you could perform the following, if you assume we could make
`Cell<i32>` into a primitive. (Alternately, you could achieve this with
unsafe code):
x = Cell::new(12);
y = &x;
// Miri locals array:
// x = ByRef(alloc123);
// y = ByVal(Ptr(alloc123));
//
// Miri allocations:
// alloc123: [12, 0, 0, 0]
x.set(42);
// Miri locals array:
// x = ByVal(I32(42));
// y = ByVal(Ptr(alloc123));
//
// Miri allocations:
// alloc123: [12, 0, 0, 0]
Notice how `y` still refers to the allocation that used to represent
`x`. But now `x` was changed to `42` and `y` is still looking at memory
containing `12`.
Now, instead, we keep `x` as a `ByRef` and write the `42` constant into
it.
Unit test to follow in the next commit.
2016-10-18 21:02:37 -06:00
|
|
|
|
2017-12-06 09:25:29 +01:00
|
|
|
Place::Local { frame, local } => {
|
2018-07-24 18:28:53 +02:00
|
|
|
let old_val = self.stack[frame].locals[local].access()?;
|
2016-10-21 11:24:10 +02:00
|
|
|
self.write_value_possibly_by_val(
|
2016-10-21 10:32:27 +02:00
|
|
|
src_val,
|
2018-06-22 12:36:54 -07:00
|
|
|
|this, val| this.stack[frame].set_local(local, val),
|
2018-07-24 18:28:53 +02:00
|
|
|
old_val,
|
2016-10-21 10:32:27 +02:00
|
|
|
dest_ty,
|
|
|
|
)
|
2016-10-15 19:48:30 -06:00
|
|
|
}
|
|
|
|
}
|
2016-10-21 10:32:27 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// The cases here can be a bit subtle. Read carefully!
|
2017-05-31 17:41:33 -07:00
|
|
|
fn write_value_possibly_by_val<F: FnOnce(&mut Self, Value) -> EvalResult<'tcx>>(
|
2016-10-21 10:32:27 +02:00
|
|
|
&mut self,
|
|
|
|
src_val: Value,
|
|
|
|
write_dest: F,
|
2016-12-18 20:59:01 -08:00
|
|
|
old_dest_val: Value,
|
2016-10-21 10:32:27 +02:00
|
|
|
dest_ty: Ty<'tcx>,
|
2017-02-04 13:09:10 -08:00
|
|
|
) -> EvalResult<'tcx> {
|
2018-07-24 18:28:53 +02:00
|
|
|
// FIXME: this should be a layout check, not underlying value
|
2017-12-16 23:34:43 +02:00
|
|
|
if let Value::ByRef(dest_ptr, align) = old_dest_val {
|
2016-10-21 12:03:34 +02:00
|
|
|
// If the value is already `ByRef` (that is, backed by an `Allocation`),
|
2016-10-21 10:32:27 +02:00
|
|
|
// then we must write the new value into this allocation, because there may be
|
|
|
|
// other pointers into the allocation. These other pointers are logically
|
|
|
|
// pointers into the local variable, and must be able to observe the change.
|
|
|
|
//
|
|
|
|
// Thus, it would be an error to replace the `ByRef` with a `ByVal`, unless we
|
2016-10-21 12:03:34 +02:00
|
|
|
// knew for certain that there were no outstanding pointers to this allocation.
|
2017-12-17 08:47:22 +02:00
|
|
|
self.write_value_to_ptr(src_val, dest_ptr, align, dest_ty)?;
|
2017-12-16 23:34:43 +02:00
|
|
|
} else if let Value::ByRef(src_ptr, align) = src_val {
|
2016-10-21 12:03:34 +02:00
|
|
|
// If the value is not `ByRef`, then we know there are no pointers to it
|
2016-10-21 10:32:27 +02:00
|
|
|
// and we can simply overwrite the `Value` in the locals array directly.
|
|
|
|
//
|
|
|
|
// In this specific case, where the source value is `ByRef`, we must duplicate
|
|
|
|
// the allocation, because this is a by-value operation. It would be incorrect
|
|
|
|
// if they referred to the same allocation, since then a change to one would
|
|
|
|
// implicitly change the other.
|
|
|
|
//
|
2016-11-28 20:22:21 -08:00
|
|
|
// It is a valid optimization to attempt reading a primitive value out of the
|
|
|
|
// source and write that into the destination without making an allocation, so
|
|
|
|
// we do so here.
|
2017-12-17 08:47:22 +02:00
|
|
|
if let Ok(Some(src_val)) = self.try_read_value(src_ptr, align, dest_ty) {
|
|
|
|
write_dest(self, src_val)?;
|
|
|
|
} else {
|
|
|
|
let layout = self.layout_of(dest_ty)?;
|
2018-06-04 18:32:06 +02:00
|
|
|
let dest_ptr = self.alloc_ptr(layout)?.into();
|
2018-06-22 12:36:54 -07:00
|
|
|
self.memory.copy(src_ptr, align.min(layout.align), dest_ptr, layout.align, layout.size, false)?;
|
2017-12-17 08:47:22 +02:00
|
|
|
write_dest(self, Value::ByRef(dest_ptr, layout.align))?;
|
|
|
|
}
|
2016-10-21 10:32:27 +02:00
|
|
|
} else {
|
|
|
|
// Finally, we have the simple case where neither source nor destination are
|
2016-10-21 12:03:34 +02:00
|
|
|
// `ByRef`. We may simply copy the source value over the the destintion.
|
2017-05-31 17:41:33 -07:00
|
|
|
write_dest(self, src_val)?;
|
2016-10-21 10:32:27 +02:00
|
|
|
}
|
2016-10-15 19:48:30 -06:00
|
|
|
Ok(())
|
2016-10-14 03:31:45 -06:00
|
|
|
}
|
|
|
|
|
2017-07-28 13:08:27 +02:00
|
|
|
pub fn write_value_to_ptr(
|
2016-09-19 03:35:35 -06:00
|
|
|
&mut self,
|
|
|
|
value: Value,
|
2018-05-21 00:30:00 +02:00
|
|
|
dest: Scalar,
|
2017-12-17 08:47:22 +02:00
|
|
|
dest_align: Align,
|
2016-10-14 03:31:45 -06:00
|
|
|
dest_ty: Ty<'tcx>,
|
2017-02-04 13:09:10 -08:00
|
|
|
) -> EvalResult<'tcx> {
|
2017-12-17 08:47:22 +02:00
|
|
|
let layout = self.layout_of(dest_ty)?;
|
2018-02-22 14:59:18 +01:00
|
|
|
trace!("write_value_to_ptr: {:#?}, {}, {:#?}", value, dest_ty, layout);
|
2016-09-19 02:19:31 -06:00
|
|
|
match value {
|
2017-12-16 23:34:43 +02:00
|
|
|
Value::ByRef(ptr, align) => {
|
2018-06-22 12:36:54 -07:00
|
|
|
self.memory.copy(ptr, align.min(layout.align), dest, dest_align.min(layout.align), layout.size, false)
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2018-05-22 19:30:16 +02:00
|
|
|
Value::Scalar(scalar) => {
|
2018-02-22 14:59:18 +01:00
|
|
|
let signed = match layout.abi {
|
|
|
|
layout::Abi::Scalar(ref scal) => match scal.value {
|
|
|
|
layout::Primitive::Int(_, signed) => signed,
|
|
|
|
_ => false,
|
|
|
|
},
|
2018-07-24 18:28:53 +02:00
|
|
|
_ => false,
|
2018-02-22 14:59:18 +01:00
|
|
|
};
|
2018-08-01 12:55:05 +02:00
|
|
|
self.memory.write_scalar(dest, dest_align, scalar, layout.size, layout.align, signed)
|
2016-11-26 22:58:01 -08:00
|
|
|
}
|
2018-05-20 23:46:30 +02:00
|
|
|
Value::ScalarPair(a_val, b_val) => {
|
2017-12-06 09:25:29 +01:00
|
|
|
trace!("write_value_to_ptr valpair: {:#?}", layout);
|
2017-12-01 18:29:35 +02:00
|
|
|
let (a, b) = match layout.abi {
|
|
|
|
layout::Abi::ScalarPair(ref a, ref b) => (&a.value, &b.value),
|
2018-05-20 23:46:30 +02:00
|
|
|
_ => bug!("write_value_to_ptr: invalid ScalarPair layout: {:#?}", layout)
|
2017-12-01 18:29:35 +02:00
|
|
|
};
|
|
|
|
let (a_size, b_size) = (a.size(&self), b.size(&self));
|
2018-08-01 12:55:05 +02:00
|
|
|
let (a_align, b_align) = (a.align(&self), b.align(&self));
|
2018-04-13 16:05:54 +02:00
|
|
|
let a_ptr = dest;
|
2018-08-01 12:55:05 +02:00
|
|
|
let b_offset = a_size.abi_align(b_align);
|
2018-05-21 00:30:00 +02:00
|
|
|
let b_ptr = dest.ptr_offset(b_offset, &self)?.into();
|
2017-12-06 09:25:29 +01:00
|
|
|
// TODO: What about signedess?
|
2018-08-01 12:55:05 +02:00
|
|
|
self.memory.write_scalar(a_ptr, dest_align, a_val, a_size, a_align, false)?;
|
|
|
|
self.memory.write_scalar(b_ptr, dest_align, b_val, b_size, b_align, false)
|
2017-12-06 09:25:29 +01:00
|
|
|
}
|
2017-02-14 10:59:38 +01:00
|
|
|
}
|
2016-10-16 15:31:02 -06:00
|
|
|
}
|
|
|
|
|
2018-05-21 00:30:00 +02:00
|
|
|
pub fn read_value(&self, ptr: Scalar, align: Align, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
|
2017-12-17 08:47:22 +02:00
|
|
|
if let Some(val) = self.try_read_value(ptr, align, ty)? {
|
2016-11-28 20:22:21 -08:00
|
|
|
Ok(val)
|
|
|
|
} else {
|
|
|
|
bug!("primitive read failed for type: {:?}", ty);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-06-04 18:32:06 +02:00
|
|
|
fn validate_scalar(
|
2018-04-26 09:18:19 +02:00
|
|
|
&self,
|
2018-08-01 12:55:05 +02:00
|
|
|
value: ScalarMaybeUndef,
|
2018-06-04 18:32:06 +02:00
|
|
|
size: Size,
|
|
|
|
scalar: &layout::Scalar,
|
|
|
|
path: &str,
|
|
|
|
ty: Ty,
|
2018-04-26 09:18:19 +02:00
|
|
|
) -> EvalResult<'tcx> {
|
2018-06-04 18:32:06 +02:00
|
|
|
trace!("validate scalar: {:#?}, {:#?}, {:#?}, {}", value, size, scalar, ty);
|
|
|
|
let (lo, hi) = scalar.valid_range.clone().into_inner();
|
|
|
|
|
2018-08-01 12:55:05 +02:00
|
|
|
let value = match value {
|
|
|
|
ScalarMaybeUndef::Scalar(scalar) => scalar,
|
|
|
|
ScalarMaybeUndef::Undef => return validation_failure!("undefined bytes", path),
|
|
|
|
};
|
|
|
|
|
2018-07-30 15:59:00 +02:00
|
|
|
let bits = match value {
|
|
|
|
Scalar::Bits { bits, size: value_size } => {
|
|
|
|
assert_eq!(value_size as u64, size.bytes());
|
|
|
|
bits
|
|
|
|
},
|
2018-06-04 18:32:06 +02:00
|
|
|
Scalar::Ptr(_) => {
|
|
|
|
let ptr_size = self.memory.pointer_size();
|
|
|
|
let ptr_max = u128::max_value() >> (128 - ptr_size.bits());
|
|
|
|
return if lo > hi {
|
|
|
|
if lo - hi == 1 {
|
|
|
|
// no gap, all values are ok
|
|
|
|
Ok(())
|
|
|
|
} else if hi < ptr_max || lo > 1 {
|
|
|
|
let max = u128::max_value() >> (128 - size.bits());
|
|
|
|
validation_failure!(
|
|
|
|
"pointer",
|
|
|
|
path,
|
|
|
|
format!("something in the range {:?} or {:?}", 0..=lo, hi..=max)
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
} else if hi < ptr_max || lo > 1 {
|
|
|
|
validation_failure!(
|
|
|
|
"pointer",
|
|
|
|
path,
|
|
|
|
format!("something in the range {:?}", scalar.valid_range)
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
Ok(())
|
|
|
|
};
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
// char gets a special treatment, because its number space is not contiguous so `TyLayout`
|
|
|
|
// has no special checks for chars
|
2018-04-26 09:18:19 +02:00
|
|
|
match ty.sty {
|
2016-10-18 22:31:21 -06:00
|
|
|
ty::TyChar => {
|
2018-07-30 15:42:11 +02:00
|
|
|
debug_assert_eq!(size.bytes(), 4);
|
|
|
|
if ::std::char::from_u32(bits as u32).is_none() {
|
|
|
|
return err!(InvalidChar(bits));
|
2016-06-20 12:29:45 +02:00
|
|
|
}
|
|
|
|
}
|
2018-06-04 18:32:06 +02:00
|
|
|
_ => {},
|
|
|
|
}
|
2016-09-19 04:56:09 -06:00
|
|
|
|
2018-06-04 18:32:06 +02:00
|
|
|
use std::ops::RangeInclusive;
|
2018-07-30 15:59:00 +02:00
|
|
|
let in_range = |bound: RangeInclusive<u128>| bound.contains(&bits);
|
2018-06-04 18:32:06 +02:00
|
|
|
if lo > hi {
|
|
|
|
if in_range(0..=hi) || in_range(lo..=u128::max_value()) {
|
|
|
|
Ok(())
|
|
|
|
} else {
|
2018-08-01 12:55:05 +02:00
|
|
|
validation_failure!(
|
|
|
|
bits,
|
|
|
|
path,
|
|
|
|
format!("something in the range {:?} or {:?}", ..=hi, lo..)
|
|
|
|
)
|
2018-05-02 15:21:05 +02:00
|
|
|
}
|
2018-06-04 18:32:06 +02:00
|
|
|
} else {
|
|
|
|
if in_range(scalar.valid_range.clone()) {
|
|
|
|
Ok(())
|
|
|
|
} else {
|
2018-08-01 12:55:05 +02:00
|
|
|
validation_failure!(
|
|
|
|
bits,
|
|
|
|
path,
|
|
|
|
format!("something in the range {:?}", scalar.valid_range)
|
|
|
|
)
|
2018-06-04 18:32:06 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-03-18 23:03:46 -06:00
|
|
|
|
2018-06-04 18:32:06 +02:00
|
|
|
/// This function checks the memory where `ptr` points to.
|
|
|
|
/// It will error if the bits at the destination do not match the ones described by the layout.
|
|
|
|
pub fn validate_ptr_target(
|
|
|
|
&self,
|
|
|
|
ptr: Pointer,
|
|
|
|
ptr_align: Align,
|
|
|
|
mut layout: TyLayout<'tcx>,
|
|
|
|
path: String,
|
|
|
|
seen: &mut FxHashSet<(Pointer, Ty<'tcx>)>,
|
|
|
|
todo: &mut Vec<(Pointer, Ty<'tcx>, String)>,
|
|
|
|
) -> EvalResult<'tcx> {
|
|
|
|
self.memory.dump_alloc(ptr.alloc_id);
|
|
|
|
trace!("validate_ptr_target: {:?}, {:#?}", ptr, layout);
|
2017-12-06 09:25:29 +01:00
|
|
|
|
2018-06-04 18:32:06 +02:00
|
|
|
let variant;
|
|
|
|
match layout.variants {
|
|
|
|
layout::Variants::NicheFilling { niche: ref tag, .. } |
|
|
|
|
layout::Variants::Tagged { ref tag, .. } => {
|
|
|
|
let size = tag.value.size(self);
|
|
|
|
let (tag_value, tag_layout) = self.read_field(
|
|
|
|
Value::ByRef(ptr.into(), ptr_align),
|
|
|
|
None,
|
|
|
|
mir::Field::new(0),
|
|
|
|
layout,
|
|
|
|
)?;
|
2018-08-01 12:55:05 +02:00
|
|
|
let tag_value = match self.follow_by_ref_value(tag_value, tag_layout.ty)? {
|
|
|
|
Value::Scalar(val) => val,
|
|
|
|
_ => bug!("tag must be scalar"),
|
|
|
|
};
|
2018-06-04 18:32:06 +02:00
|
|
|
let path = format!("{}.TAG", path);
|
|
|
|
self.validate_scalar(tag_value, size, tag, &path, tag_layout.ty)?;
|
|
|
|
let variant_index = self.read_discriminant_as_variant_index(
|
|
|
|
Place::from_ptr(ptr, ptr_align),
|
|
|
|
layout,
|
|
|
|
)?;
|
|
|
|
variant = variant_index;
|
|
|
|
layout = layout.for_variant(self, variant_index);
|
|
|
|
trace!("variant layout: {:#?}", layout);
|
|
|
|
},
|
|
|
|
layout::Variants::Single { index } => variant = index,
|
|
|
|
}
|
|
|
|
match layout.fields {
|
|
|
|
// primitives are unions with zero fields
|
|
|
|
layout::FieldPlacement::Union(0) => {
|
|
|
|
match layout.abi {
|
|
|
|
// nothing to do, whatever the pointer points to, it is never going to be read
|
|
|
|
layout::Abi::Uninhabited => validation_failure!("a value of an uninhabited type", path),
|
|
|
|
// check that the scalar is a valid pointer or that its bit range matches the
|
|
|
|
// expectation.
|
|
|
|
layout::Abi::Scalar(ref scalar) => {
|
|
|
|
let size = scalar.value.size(self);
|
2018-08-01 12:55:05 +02:00
|
|
|
let value = self.memory.read_scalar(ptr, ptr_align, size)?;
|
2018-06-04 18:32:06 +02:00
|
|
|
self.validate_scalar(value, size, scalar, &path, layout.ty)?;
|
|
|
|
if scalar.value == Primitive::Pointer {
|
|
|
|
// ignore integer pointers, we can't reason about the final hardware
|
2018-08-01 12:55:05 +02:00
|
|
|
if let Scalar::Ptr(ptr) = value.unwrap_or_err()? {
|
2018-06-04 18:32:06 +02:00
|
|
|
let alloc_kind = self.tcx.alloc_map.lock().get(ptr.alloc_id);
|
|
|
|
if let Some(AllocType::Static(did)) = alloc_kind {
|
|
|
|
// statics from other crates are already checked
|
|
|
|
// extern statics should not be validated as they have no body
|
|
|
|
if !did.is_local() || self.tcx.is_foreign_item(did) {
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if let Some(tam) = layout.ty.builtin_deref(false) {
|
|
|
|
// we have not encountered this pointer+layout combination before
|
|
|
|
if seen.insert((ptr, tam.ty)) {
|
|
|
|
todo.push((ptr, tam.ty, format!("(*{})", path)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
},
|
|
|
|
_ => bug!("bad abi for FieldPlacement::Union(0): {:#?}", layout.abi),
|
2016-09-07 18:34:59 +02:00
|
|
|
}
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2018-06-04 18:32:06 +02:00
|
|
|
layout::FieldPlacement::Union(_) => {
|
|
|
|
// We can't check unions, their bits are allowed to be anything.
|
|
|
|
// The fields don't need to correspond to any bit pattern of the union's fields.
|
|
|
|
// See https://github.com/rust-lang/rust/issues/32836#issuecomment-406875389
|
|
|
|
Ok(())
|
|
|
|
},
|
|
|
|
layout::FieldPlacement::Array { stride, count } => {
|
|
|
|
let elem_layout = layout.field(self, 0)?;
|
|
|
|
for i in 0..count {
|
|
|
|
let mut path = path.clone();
|
|
|
|
self.write_field_name(&mut path, layout.ty, i as usize, variant).unwrap();
|
|
|
|
self.validate_ptr_target(ptr.offset(stride * i, self)?, ptr_align, elem_layout, path, seen, todo)?;
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
},
|
|
|
|
layout::FieldPlacement::Arbitrary { ref offsets, .. } => {
|
|
|
|
|
|
|
|
// check length field and vtable field
|
|
|
|
match layout.ty.builtin_deref(false).map(|tam| &tam.ty.sty) {
|
|
|
|
| Some(ty::TyStr)
|
|
|
|
| Some(ty::TySlice(_)) => {
|
|
|
|
let (len, len_layout) = self.read_field(
|
|
|
|
Value::ByRef(ptr.into(), ptr_align),
|
|
|
|
None,
|
|
|
|
mir::Field::new(1),
|
|
|
|
layout,
|
|
|
|
)?;
|
|
|
|
let len = self.value_to_scalar(ValTy { value: len, ty: len_layout.ty })?;
|
|
|
|
if len.to_bits(len_layout.size).is_err() {
|
|
|
|
return validation_failure!("length is not a valid integer", path);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
Some(ty::TyDynamic(..)) => {
|
|
|
|
let (vtable, vtable_layout) = self.read_field(
|
|
|
|
Value::ByRef(ptr.into(), ptr_align),
|
|
|
|
None,
|
|
|
|
mir::Field::new(1),
|
|
|
|
layout,
|
|
|
|
)?;
|
|
|
|
let vtable = self.value_to_scalar(ValTy { value: vtable, ty: vtable_layout.ty })?;
|
|
|
|
if vtable.to_ptr().is_err() {
|
|
|
|
return validation_failure!("vtable address is not a pointer", path);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => {},
|
|
|
|
}
|
|
|
|
for (i, &offset) in offsets.iter().enumerate() {
|
|
|
|
let field_layout = layout.field(self, i)?;
|
|
|
|
let mut path = path.clone();
|
|
|
|
self.write_field_name(&mut path, layout.ty, i, variant).unwrap();
|
|
|
|
self.validate_ptr_target(ptr.offset(offset, self)?, ptr_align, field_layout, path, seen, todo)?;
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
2018-04-26 09:18:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-07-30 15:59:00 +02:00
|
|
|
pub fn try_read_by_ref(&self, mut val: Value, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
|
2018-05-20 23:46:30 +02:00
|
|
|
// Convert to ByVal or ScalarPair if possible
|
2018-05-17 16:14:30 +02:00
|
|
|
if let Value::ByRef(ptr, align) = val {
|
|
|
|
if let Some(read_val) = self.try_read_value(ptr, align, ty)? {
|
|
|
|
val = read_val;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(val)
|
|
|
|
}
|
|
|
|
|
2018-07-30 15:59:00 +02:00
|
|
|
pub fn try_read_value(&self, ptr: Scalar, ptr_align: Align, ty: Ty<'tcx>) -> EvalResult<'tcx, Option<Value>> {
|
2018-08-02 10:19:37 +02:00
|
|
|
let layout = self.layout_of(ty)?;
|
2018-06-22 12:36:54 -07:00
|
|
|
self.memory.check_align(ptr, ptr_align)?;
|
2016-10-18 22:31:21 -06:00
|
|
|
|
2018-05-17 16:14:30 +02:00
|
|
|
if layout.size.bytes() == 0 {
|
2018-07-24 18:28:53 +02:00
|
|
|
return Ok(Some(Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { bits: 0, size: 0 }))));
|
2018-05-17 16:14:30 +02:00
|
|
|
}
|
|
|
|
|
2018-04-26 09:18:19 +02:00
|
|
|
let ptr = ptr.to_ptr()?;
|
|
|
|
|
|
|
|
match layout.abi {
|
|
|
|
layout::Abi::Scalar(..) => {
|
2018-06-22 12:36:54 -07:00
|
|
|
let scalar = self.memory.read_scalar(ptr, ptr_align, layout.size)?;
|
2018-05-22 19:30:16 +02:00
|
|
|
Ok(Some(Value::Scalar(scalar)))
|
2018-04-26 09:18:19 +02:00
|
|
|
}
|
|
|
|
layout::Abi::ScalarPair(ref a, ref b) => {
|
|
|
|
let (a, b) = (&a.value, &b.value);
|
2018-07-30 15:59:00 +02:00
|
|
|
let (a_size, b_size) = (a.size(self), b.size(self));
|
2018-04-26 09:18:19 +02:00
|
|
|
let a_ptr = ptr;
|
2018-07-30 15:59:00 +02:00
|
|
|
let b_offset = a_size.abi_align(b.align(self));
|
|
|
|
let b_ptr = ptr.offset(b_offset, self)?.into();
|
2018-06-22 12:36:54 -07:00
|
|
|
let a_val = self.memory.read_scalar(a_ptr, ptr_align, a_size)?;
|
|
|
|
let b_val = self.memory.read_scalar(b_ptr, ptr_align, b_size)?;
|
2018-05-20 23:46:30 +02:00
|
|
|
Ok(Some(Value::ScalarPair(a_val, b_val)))
|
2018-04-26 09:18:19 +02:00
|
|
|
}
|
2018-08-02 10:19:37 +02:00
|
|
|
_ => Ok(None),
|
2018-04-26 09:18:19 +02:00
|
|
|
}
|
2016-03-18 23:03:46 -06:00
|
|
|
}
|
|
|
|
|
2018-01-16 09:31:48 +01:00
|
|
|
pub fn frame(&self) -> &Frame<'mir, 'tcx> {
|
2018-06-22 12:36:54 -07:00
|
|
|
self.stack.last().expect("no call frames exist")
|
2016-03-07 07:10:52 -06:00
|
|
|
}
|
2016-03-14 20:39:51 -06:00
|
|
|
|
2018-01-16 09:31:48 +01:00
|
|
|
pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx> {
|
2018-06-22 12:36:54 -07:00
|
|
|
self.stack.last_mut().expect("no call frames exist")
|
2016-03-14 20:39:51 -06:00
|
|
|
}
|
2015-11-12 15:50:58 -06:00
|
|
|
|
2018-01-16 09:31:48 +01:00
|
|
|
pub(super) fn mir(&self) -> &'mir mir::Mir<'tcx> {
|
2017-05-04 17:42:43 +02:00
|
|
|
self.frame().mir
|
2016-03-20 22:07:25 -06:00
|
|
|
}
|
2016-06-08 11:11:08 +02:00
|
|
|
|
2017-12-14 11:36:28 +01:00
|
|
|
pub fn substs(&self) -> &'tcx Substs<'tcx> {
|
2018-06-22 12:36:54 -07:00
|
|
|
if let Some(frame) = self.stack.last() {
|
2017-09-19 09:08:48 +02:00
|
|
|
frame.instance.substs
|
|
|
|
} else {
|
|
|
|
Substs::empty()
|
|
|
|
}
|
2016-06-08 11:11:08 +02:00
|
|
|
}
|
2016-03-14 21:18:39 -06:00
|
|
|
|
2017-02-03 15:47:23 +01:00
|
|
|
fn unsize_into_ptr(
|
|
|
|
&mut self,
|
|
|
|
src: Value,
|
|
|
|
src_ty: Ty<'tcx>,
|
2017-12-06 09:25:29 +01:00
|
|
|
dest: Place,
|
2017-02-03 15:47:23 +01:00
|
|
|
dest_ty: Ty<'tcx>,
|
|
|
|
sty: Ty<'tcx>,
|
|
|
|
dty: Ty<'tcx>,
|
2017-02-04 13:09:10 -08:00
|
|
|
) -> EvalResult<'tcx> {
|
2017-02-03 15:47:23 +01:00
|
|
|
// A<Struct> -> A<Trait> conversion
|
|
|
|
let (src_pointee_ty, dest_pointee_ty) = self.tcx.struct_lockstep_tails(sty, dty);
|
|
|
|
|
|
|
|
match (&src_pointee_ty.sty, &dest_pointee_ty.sty) {
|
|
|
|
(&ty::TyArray(_, length), &ty::TySlice(_)) => {
|
2017-12-12 17:14:49 +01:00
|
|
|
let ptr = self.into_ptr(src)?;
|
2017-07-11 12:39:12 +02:00
|
|
|
// u64 cast is from usize to u64, which is always good
|
2017-08-24 14:41:49 +02:00
|
|
|
let valty = ValTy {
|
2018-05-22 10:28:46 +02:00
|
|
|
value: ptr.to_value_with_len(length.unwrap_usize(self.tcx.tcx), self.tcx.tcx),
|
2017-08-24 14:41:49 +02:00
|
|
|
ty: dest_ty,
|
|
|
|
};
|
|
|
|
self.write_value(valty, dest)
|
2017-02-03 15:47:23 +01:00
|
|
|
}
|
|
|
|
(&ty::TyDynamic(..), &ty::TyDynamic(..)) => {
|
|
|
|
// For now, upcasts are limited to changes in marker
|
|
|
|
// traits, and hence never actually require an actual
|
|
|
|
// change to the vtable.
|
2017-08-24 14:41:49 +02:00
|
|
|
let valty = ValTy {
|
|
|
|
value: src,
|
|
|
|
ty: dest_ty,
|
|
|
|
};
|
|
|
|
self.write_value(valty, dest)
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2017-02-03 15:47:23 +01:00
|
|
|
(_, &ty::TyDynamic(ref data, _)) => {
|
2017-08-10 08:48:38 -07:00
|
|
|
let trait_ref = data.principal().unwrap().with_self_ty(
|
2018-02-06 18:33:59 +01:00
|
|
|
*self.tcx,
|
2017-08-10 08:48:38 -07:00
|
|
|
src_pointee_ty,
|
|
|
|
);
|
2017-02-03 15:47:23 +01:00
|
|
|
let trait_ref = self.tcx.erase_regions(&trait_ref);
|
2017-03-21 13:53:55 +01:00
|
|
|
let vtable = self.get_vtable(src_pointee_ty, trait_ref)?;
|
2017-12-12 17:14:49 +01:00
|
|
|
let ptr = self.into_ptr(src)?;
|
2017-08-24 14:41:49 +02:00
|
|
|
let valty = ValTy {
|
|
|
|
value: ptr.to_value_with_vtable(vtable),
|
|
|
|
ty: dest_ty,
|
|
|
|
};
|
|
|
|
self.write_value(valty, dest)
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2017-02-03 15:47:23 +01:00
|
|
|
|
|
|
|
_ => bug!("invalid unsizing {:?} -> {:?}", src_ty, dest_ty),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-07-18 11:39:17 +02:00
|
|
|
crate fn unsize_into(
|
2016-09-27 18:01:33 +02:00
|
|
|
&mut self,
|
|
|
|
src: Value,
|
2017-12-16 14:07:04 +02:00
|
|
|
src_layout: TyLayout<'tcx>,
|
2017-12-06 13:50:31 +02:00
|
|
|
dst: Place,
|
2017-12-16 14:07:04 +02:00
|
|
|
dst_layout: TyLayout<'tcx>,
|
2017-02-04 13:09:10 -08:00
|
|
|
) -> EvalResult<'tcx> {
|
2017-12-16 14:07:04 +02:00
|
|
|
match (&src_layout.ty.sty, &dst_layout.ty.sty) {
|
2018-05-02 15:21:05 +02:00
|
|
|
(&ty::TyRef(_, s, _), &ty::TyRef(_, d, _)) |
|
|
|
|
(&ty::TyRef(_, s, _), &ty::TyRawPtr(TypeAndMut { ty: d, .. })) |
|
|
|
|
(&ty::TyRawPtr(TypeAndMut { ty: s, .. }),
|
|
|
|
&ty::TyRawPtr(TypeAndMut { ty: d, .. })) => {
|
|
|
|
self.unsize_into_ptr(src, src_layout.ty, dst, dst_layout.ty, s, d)
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2017-12-06 13:50:31 +02:00
|
|
|
(&ty::TyAdt(def_a, _), &ty::TyAdt(def_b, _)) => {
|
2017-12-16 14:07:04 +02:00
|
|
|
assert_eq!(def_a, def_b);
|
2017-02-03 15:47:23 +01:00
|
|
|
if def_a.is_box() || def_b.is_box() {
|
|
|
|
if !def_a.is_box() || !def_b.is_box() {
|
2017-12-16 14:07:04 +02:00
|
|
|
bug!("invalid unsizing between {:?} -> {:?}", src_layout, dst_layout);
|
2016-09-27 18:01:33 +02:00
|
|
|
}
|
2017-08-10 08:48:38 -07:00
|
|
|
return self.unsize_into_ptr(
|
|
|
|
src,
|
2017-12-16 14:07:04 +02:00
|
|
|
src_layout.ty,
|
2017-12-06 13:50:31 +02:00
|
|
|
dst,
|
2017-12-16 14:07:04 +02:00
|
|
|
dst_layout.ty,
|
|
|
|
src_layout.ty.boxed_ty(),
|
|
|
|
dst_layout.ty.boxed_ty(),
|
2017-08-10 08:48:38 -07:00
|
|
|
);
|
2016-09-27 18:01:33 +02:00
|
|
|
}
|
2017-12-16 14:07:04 +02:00
|
|
|
|
2016-09-27 18:01:33 +02:00
|
|
|
// unsizing of generic struct with pointer fields
|
|
|
|
// Example: `Arc<T>` -> `Arc<Trait>`
|
|
|
|
// here we need to increase the size of every &T thin ptr field to a fat ptr
|
2017-12-06 13:50:31 +02:00
|
|
|
for i in 0..src_layout.fields.count() {
|
2017-12-16 14:07:04 +02:00
|
|
|
let (dst_f_place, dst_field) =
|
|
|
|
self.place_field(dst, mir::Field::new(i), dst_layout)?;
|
2017-12-06 13:50:31 +02:00
|
|
|
if dst_field.is_zst() {
|
2016-09-27 18:01:33 +02:00
|
|
|
continue;
|
|
|
|
}
|
2017-12-16 14:07:04 +02:00
|
|
|
let (src_f_value, src_field) = match src {
|
2017-12-16 23:34:43 +02:00
|
|
|
Value::ByRef(ptr, align) => {
|
2018-07-24 18:28:53 +02:00
|
|
|
let src_place = Place::from_scalar_ptr(ptr.into(), align);
|
2017-12-16 14:07:04 +02:00
|
|
|
let (src_f_place, src_field) =
|
|
|
|
self.place_field(src_place, mir::Field::new(i), src_layout)?;
|
|
|
|
(self.read_place(src_f_place)?, src_field)
|
|
|
|
}
|
2018-05-20 23:46:30 +02:00
|
|
|
Value::Scalar(_) | Value::ScalarPair(..) => {
|
2017-12-16 14:07:04 +02:00
|
|
|
let src_field = src_layout.field(&self, i)?;
|
|
|
|
assert_eq!(src_layout.fields.offset(i).bytes(), 0);
|
|
|
|
assert_eq!(src_field.size, src_layout.size);
|
|
|
|
(src, src_field)
|
|
|
|
}
|
|
|
|
};
|
2017-12-06 13:50:31 +02:00
|
|
|
if src_field.ty == dst_field.ty {
|
2017-12-16 14:07:04 +02:00
|
|
|
self.write_value(ValTy {
|
|
|
|
value: src_f_value,
|
|
|
|
ty: src_field.ty,
|
|
|
|
}, dst_f_place)?;
|
2016-09-27 18:01:33 +02:00
|
|
|
} else {
|
2017-12-16 14:07:04 +02:00
|
|
|
self.unsize_into(src_f_value, src_field, dst_f_place, dst_field)?;
|
2016-09-27 18:01:33 +02:00
|
|
|
}
|
|
|
|
}
|
2017-02-03 15:47:23 +01:00
|
|
|
Ok(())
|
2016-09-27 18:01:33 +02:00
|
|
|
}
|
2017-08-10 08:48:38 -07:00
|
|
|
_ => {
|
|
|
|
bug!(
|
|
|
|
"unsize_into: invalid conversion: {:?} -> {:?}",
|
2017-12-16 14:07:04 +02:00
|
|
|
src_layout,
|
|
|
|
dst_layout
|
2017-08-10 08:48:38 -07:00
|
|
|
)
|
|
|
|
}
|
2016-03-20 22:59:13 -06:00
|
|
|
}
|
|
|
|
}
|
2016-10-16 17:18:56 -06:00
|
|
|
|
2017-12-06 09:25:29 +01:00
|
|
|
pub fn dump_local(&self, place: Place) {
|
2017-05-30 10:24:37 -07:00
|
|
|
// Debug output
|
2018-04-26 15:35:24 +10:00
|
|
|
if !log_enabled!(::log::Level::Trace) {
|
|
|
|
return;
|
|
|
|
}
|
2017-12-06 09:25:29 +01:00
|
|
|
match place {
|
|
|
|
Place::Local { frame, local } => {
|
2017-08-08 16:29:47 +02:00
|
|
|
let mut allocs = Vec::new();
|
|
|
|
let mut msg = format!("{:?}", local);
|
|
|
|
if frame != self.cur_frame() {
|
|
|
|
write!(msg, " ({} frames up)", self.cur_frame() - frame).unwrap();
|
2017-05-31 17:41:33 -07:00
|
|
|
}
|
2017-08-08 16:29:47 +02:00
|
|
|
write!(msg, ":").unwrap();
|
|
|
|
|
2018-07-24 18:28:53 +02:00
|
|
|
match self.stack[frame].locals[local].access() {
|
2017-08-08 16:29:47 +02:00
|
|
|
Err(err) => {
|
2018-01-31 15:06:45 +01:00
|
|
|
if let EvalErrorKind::DeadLocal = err.kind {
|
2018-01-25 14:15:56 +01:00
|
|
|
write!(msg, " is dead").unwrap();
|
|
|
|
} else {
|
|
|
|
panic!("Failed to access local: {:?}", err);
|
|
|
|
}
|
2017-08-08 16:29:47 +02:00
|
|
|
}
|
2017-12-16 23:34:43 +02:00
|
|
|
Ok(Value::ByRef(ptr, align)) => {
|
2018-05-21 00:30:00 +02:00
|
|
|
match ptr {
|
2018-05-20 23:43:16 +02:00
|
|
|
Scalar::Ptr(ptr) => {
|
2017-12-16 14:07:04 +02:00
|
|
|
write!(msg, " by align({}) ref:", align.abi()).unwrap();
|
2017-08-10 08:48:38 -07:00
|
|
|
allocs.push(ptr.alloc_id);
|
|
|
|
}
|
|
|
|
ptr => write!(msg, " integral by ref: {:?}", ptr).unwrap(),
|
|
|
|
}
|
|
|
|
}
|
2018-05-20 23:46:30 +02:00
|
|
|
Ok(Value::Scalar(val)) => {
|
2017-08-08 16:29:47 +02:00
|
|
|
write!(msg, " {:?}", val).unwrap();
|
2018-07-24 18:28:53 +02:00
|
|
|
if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val {
|
2017-08-10 08:48:38 -07:00
|
|
|
allocs.push(ptr.alloc_id);
|
|
|
|
}
|
2017-08-08 16:29:47 +02:00
|
|
|
}
|
2018-05-20 23:46:30 +02:00
|
|
|
Ok(Value::ScalarPair(val1, val2)) => {
|
2017-08-08 16:29:47 +02:00
|
|
|
write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
|
2018-07-24 18:28:53 +02:00
|
|
|
if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val1 {
|
2017-08-10 08:48:38 -07:00
|
|
|
allocs.push(ptr.alloc_id);
|
|
|
|
}
|
2018-07-24 18:28:53 +02:00
|
|
|
if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val2 {
|
2017-08-10 08:48:38 -07:00
|
|
|
allocs.push(ptr.alloc_id);
|
|
|
|
}
|
2017-08-08 16:29:47 +02:00
|
|
|
}
|
2017-05-31 17:41:33 -07:00
|
|
|
}
|
2017-08-08 16:29:47 +02:00
|
|
|
|
|
|
|
trace!("{}", msg);
|
2018-06-22 12:36:54 -07:00
|
|
|
self.memory.dump_allocs(allocs);
|
2017-08-08 16:29:47 +02:00
|
|
|
}
|
2017-12-16 23:34:43 +02:00
|
|
|
Place::Ptr { ptr, align, .. } => {
|
2018-05-21 00:30:00 +02:00
|
|
|
match ptr {
|
2018-07-24 18:28:53 +02:00
|
|
|
ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) => {
|
2017-12-16 14:07:04 +02:00
|
|
|
trace!("by align({}) ref:", align.abi());
|
2018-06-22 12:36:54 -07:00
|
|
|
self.memory.dump_alloc(ptr.alloc_id);
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2017-08-08 16:29:47 +02:00
|
|
|
ptr => trace!(" integral by ref: {:?}", ptr),
|
2016-10-18 21:45:48 -06:00
|
|
|
}
|
|
|
|
}
|
2017-02-07 00:39:40 -08:00
|
|
|
}
|
2016-10-18 21:45:48 -06:00
|
|
|
}
|
2016-11-03 12:52:13 +01:00
|
|
|
|
2018-02-06 14:04:35 +01:00
|
|
|
pub fn generate_stacktrace(&self, explicit_span: Option<Span>) -> (Vec<FrameInfo>, Span) {
|
2018-01-31 15:06:45 +01:00
|
|
|
let mut last_span = None;
|
|
|
|
let mut frames = Vec::new();
|
|
|
|
// skip 1 because the last frame is just the environment of the constant
|
2018-06-02 23:38:57 +02:00
|
|
|
for &Frame { instance, span, mir, block, stmt, .. } in self.stack().iter().skip(1).rev() {
|
2018-01-31 15:06:45 +01:00
|
|
|
// make sure we don't emit frames that are duplicates of the previous
|
|
|
|
if explicit_span == Some(span) {
|
|
|
|
last_span = Some(span);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
if let Some(last) = last_span {
|
|
|
|
if last == span {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
last_span = Some(span);
|
|
|
|
}
|
|
|
|
let location = if self.tcx.def_key(instance.def_id()).disambiguated_data.data == DefPathData::ClosureExpr {
|
|
|
|
"closure".to_owned()
|
|
|
|
} else {
|
|
|
|
instance.to_string()
|
|
|
|
};
|
2018-06-02 23:38:57 +02:00
|
|
|
let block = &mir.basic_blocks()[block];
|
|
|
|
let source_info = if stmt < block.statements.len() {
|
|
|
|
block.statements[stmt].source_info
|
2018-01-16 09:31:48 +01:00
|
|
|
} else {
|
2018-06-02 23:38:57 +02:00
|
|
|
block.terminator().source_info
|
2018-01-16 09:31:48 +01:00
|
|
|
};
|
2018-06-02 23:38:57 +02:00
|
|
|
let lint_root = match mir.source_scope_local_data {
|
|
|
|
mir::ClearCrossCrate::Set(ref ivs) => Some(ivs[source_info.scope].lint_root),
|
|
|
|
mir::ClearCrossCrate::Clear => None,
|
|
|
|
};
|
|
|
|
frames.push(FrameInfo { span, location, lint_root });
|
2017-07-21 17:25:30 +02:00
|
|
|
}
|
2018-06-02 23:38:57 +02:00
|
|
|
trace!("generate stacktrace: {:#?}, {:?}", frames, explicit_span);
|
|
|
|
(frames, self.tcx.span)
|
2017-07-21 17:25:30 +02:00
|
|
|
}
|
2018-02-21 22:02:52 +01:00
|
|
|
|
2018-07-24 18:28:53 +02:00
|
|
|
pub fn sign_extend(&self, value: u128, ty: TyLayout<'_>) -> u128 {
|
|
|
|
super::sign_extend(value, ty)
|
2018-02-21 22:02:52 +01:00
|
|
|
}
|
|
|
|
|
2018-07-24 18:28:53 +02:00
|
|
|
pub fn truncate(&self, value: u128, ty: TyLayout<'_>) -> u128 {
|
|
|
|
super::truncate(value, ty)
|
2018-02-21 22:02:52 +01:00
|
|
|
}
|
2018-06-04 18:32:06 +02:00
|
|
|
|
|
|
|
fn write_field_name(&self, s: &mut String, ty: Ty<'tcx>, i: usize, variant: usize) -> ::std::fmt::Result {
|
|
|
|
match ty.sty {
|
|
|
|
ty::TyBool |
|
|
|
|
ty::TyChar |
|
|
|
|
ty::TyInt(_) |
|
|
|
|
ty::TyUint(_) |
|
|
|
|
ty::TyFloat(_) |
|
|
|
|
ty::TyFnPtr(_) |
|
|
|
|
ty::TyNever |
|
|
|
|
ty::TyFnDef(..) |
|
|
|
|
ty::TyGeneratorWitness(..) |
|
|
|
|
ty::TyForeign(..) |
|
|
|
|
ty::TyDynamic(..) => {
|
|
|
|
bug!("field_name({:?}): not applicable", ty)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Potentially-fat pointers.
|
|
|
|
ty::TyRef(_, pointee, _) |
|
|
|
|
ty::TyRawPtr(ty::TypeAndMut { ty: pointee, .. }) => {
|
|
|
|
assert!(i < 2);
|
|
|
|
|
|
|
|
// Reuse the fat *T type as its own thin pointer data field.
|
|
|
|
// This provides information about e.g. DST struct pointees
|
|
|
|
// (which may have no non-DST form), and will work as long
|
|
|
|
// as the `Abi` or `FieldPlacement` is checked by users.
|
|
|
|
if i == 0 {
|
|
|
|
return write!(s, ".data_ptr");
|
|
|
|
}
|
|
|
|
|
|
|
|
match self.tcx.struct_tail(pointee).sty {
|
|
|
|
ty::TySlice(_) |
|
|
|
|
ty::TyStr => write!(s, ".len"),
|
|
|
|
ty::TyDynamic(..) => write!(s, ".vtable_ptr"),
|
|
|
|
_ => bug!("field_name({:?}): not applicable", ty)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Arrays and slices.
|
|
|
|
ty::TyArray(_, _) |
|
|
|
|
ty::TySlice(_) |
|
|
|
|
ty::TyStr => write!(s, "[{}]", i),
|
|
|
|
|
|
|
|
// generators and closures.
|
|
|
|
ty::TyClosure(def_id, _) | ty::TyGenerator(def_id, _, _) => {
|
|
|
|
let node_id = self.tcx.hir.as_local_node_id(def_id).unwrap();
|
|
|
|
let freevar = self.tcx.with_freevars(node_id, |fv| fv[i]);
|
|
|
|
write!(s, ".upvar({})", self.tcx.hir.name(freevar.var_id()))
|
|
|
|
}
|
|
|
|
|
|
|
|
ty::TyTuple(_) => write!(s, ".{}", i),
|
|
|
|
|
|
|
|
// enums
|
|
|
|
ty::TyAdt(def, ..) if def.is_enum() => {
|
|
|
|
let variant = &def.variants[variant];
|
|
|
|
write!(s, ".{}::{}", variant.name, variant.fields[i].ident)
|
|
|
|
}
|
|
|
|
|
|
|
|
// other ADTs.
|
|
|
|
ty::TyAdt(def, _) => write!(s, ".{}", def.non_enum_variant().fields[i].ident),
|
|
|
|
|
|
|
|
ty::TyProjection(_) | ty::TyAnon(..) | ty::TyParam(_) |
|
|
|
|
ty::TyInfer(_) | ty::TyError => {
|
|
|
|
bug!("write_field_name: unexpected type `{}`", ty)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-03-20 22:59:13 -06:00
|
|
|
|
2018-07-24 18:28:53 +02:00
|
|
|
pub fn storage_live(&mut self, local: mir::Local) -> EvalResult<'tcx, LocalValue> {
|
|
|
|
trace!("{:?} is now live", local);
|
|
|
|
|
|
|
|
let ty = self.frame().mir.local_decls[local].ty;
|
|
|
|
let init = self.init_value(ty)?;
|
|
|
|
// StorageLive *always* kills the value that's currently stored
|
|
|
|
Ok(mem::replace(&mut self.frame_mut().locals[local], LocalValue::Live(init)))
|
2016-10-15 19:48:30 -06:00
|
|
|
}
|
|
|
|
|
2018-07-24 18:28:53 +02:00
|
|
|
fn init_value(&mut self, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
|
|
|
|
let ty = self.monomorphize(ty, self.substs());
|
|
|
|
let layout = self.layout_of(ty)?;
|
|
|
|
Ok(match layout.abi {
|
|
|
|
layout::Abi::Scalar(..) => Value::Scalar(ScalarMaybeUndef::Undef),
|
|
|
|
layout::Abi::ScalarPair(..) => Value::ScalarPair(
|
|
|
|
ScalarMaybeUndef::Undef,
|
|
|
|
ScalarMaybeUndef::Undef,
|
|
|
|
),
|
2018-07-30 15:59:00 +02:00
|
|
|
_ => Value::ByRef(self.alloc_ptr(layout)?.into(), layout.align),
|
2018-07-24 18:28:53 +02:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'mir, 'tcx> Frame<'mir, 'tcx> {
|
2017-06-27 13:36:41 +02:00
|
|
|
fn set_local(&mut self, local: mir::Local, value: Value) -> EvalResult<'tcx> {
|
2018-03-23 08:31:13 +01:00
|
|
|
match self.locals[local] {
|
2018-07-24 18:28:53 +02:00
|
|
|
LocalValue::Dead => err!(DeadLocal),
|
|
|
|
LocalValue::Live(ref mut local) => {
|
2017-06-27 13:36:41 +02:00
|
|
|
*local = value;
|
|
|
|
Ok(())
|
2017-05-31 17:41:33 -07:00
|
|
|
}
|
2017-02-10 16:14:59 +01:00
|
|
|
}
|
2017-05-31 17:41:33 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the old value of the local
|
2018-07-24 18:28:53 +02:00
|
|
|
pub fn storage_dead(&mut self, local: mir::Local) -> LocalValue {
|
2017-05-31 17:41:33 -07:00
|
|
|
trace!("{:?} is now dead", local);
|
|
|
|
|
2018-07-24 18:28:53 +02:00
|
|
|
mem::replace(&mut self.locals[local], LocalValue::Dead)
|
2016-10-15 19:48:30 -06:00
|
|
|
}
|
|
|
|
}
|