2019-06-27 11:36:01 +02:00
|
|
|
//! This module contains the `InterpCx` methods for executing a single step of the interpreter.
|
2016-06-23 00:02:47 -06:00
|
|
|
//!
|
|
|
|
//! The main entry point is the `step` method.
|
|
|
|
|
2020-03-29 16:41:09 +02:00
|
|
|
use rustc_middle::mir::interpret::{InterpResult, Scalar};
|
2022-07-12 10:05:00 +00:00
|
|
|
use rustc_middle::mir::{self, NonDivergingIntrinsic};
|
2021-08-30 17:38:27 +03:00
|
|
|
use rustc_middle::ty::layout::LayoutOf;
|
2016-12-07 20:30:37 -08:00
|
|
|
|
2019-09-14 07:00:16 -04:00
|
|
|
use super::{InterpCx, Machine};
|
2017-07-21 13:39:06 +02:00
|
|
|
|
2018-11-27 02:59:49 +00:00
|
|
|
/// Classify whether an operator is "left-homogeneous", i.e., the LHS has the
|
2018-08-20 20:08:24 +02:00
|
|
|
/// same type as the result.
|
|
|
|
#[inline]
|
|
|
|
fn binop_left_homogeneous(op: mir::BinOp) -> bool {
|
2020-03-29 16:41:09 +02:00
|
|
|
use rustc_middle::mir::BinOp::*;
|
2018-08-20 20:08:24 +02:00
|
|
|
match op {
|
|
|
|
Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Offset | Shl | Shr => true,
|
|
|
|
Eq | Ne | Lt | Le | Gt | Ge => false,
|
|
|
|
}
|
|
|
|
}
|
2018-11-27 02:59:49 +00:00
|
|
|
/// Classify whether an operator is "right-homogeneous", i.e., the RHS has the
|
2018-08-20 20:08:24 +02:00
|
|
|
/// same type as the LHS.
|
|
|
|
#[inline]
|
|
|
|
fn binop_right_homogeneous(op: mir::BinOp) -> bool {
|
2020-03-29 16:41:09 +02:00
|
|
|
use rustc_middle::mir::BinOp::*;
|
2018-08-20 20:08:24 +02:00
|
|
|
match op {
|
|
|
|
Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Eq | Ne | Lt | Le | Gt | Ge => true,
|
|
|
|
Offset | Shl | Shr => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-16 15:12:42 -07:00
|
|
|
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
2019-06-07 18:56:27 +02:00
|
|
|
pub fn run(&mut self) -> InterpResult<'tcx> {
|
2018-08-23 19:04:33 +02:00
|
|
|
while self.step()? {}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Returns `true` as long as there are more things to do.
|
2018-10-14 11:50:05 +02:00
|
|
|
///
|
|
|
|
/// This is used by [priroda](https://github.com/oli-obk/priroda)
|
2020-01-04 15:40:36 -08:00
|
|
|
///
|
2022-03-30 01:39:38 -04:00
|
|
|
/// This is marked `#inline(always)` to work around adversarial codegen when `opt-level = 3`
|
2020-01-04 15:40:36 -08:00
|
|
|
#[inline(always)]
|
2019-06-07 18:56:27 +02:00
|
|
|
pub fn step(&mut self) -> InterpResult<'tcx, bool> {
|
2020-03-16 15:12:42 -07:00
|
|
|
if self.stack().is_empty() {
|
2016-06-09 10:52:45 +02:00
|
|
|
return Ok(false);
|
2016-06-03 15:48:56 +02:00
|
|
|
}
|
|
|
|
|
2022-02-19 00:47:43 +01:00
|
|
|
let Ok(loc) = self.frame().loc else {
|
|
|
|
// We are unwinding and this fn has no cleanup code.
|
|
|
|
// Just go on unwinding.
|
|
|
|
trace!("unwinding: skipping frame");
|
|
|
|
self.pop_stack_frame(/* unwinding */ true)?;
|
|
|
|
return Ok(true);
|
2019-10-20 18:51:25 +02:00
|
|
|
};
|
2022-07-05 00:00:00 +00:00
|
|
|
let basic_block = &self.body().basic_blocks[loc.block];
|
2016-06-01 17:05:20 +02:00
|
|
|
|
2020-04-23 19:40:41 +02:00
|
|
|
if let Some(stmt) = basic_block.statements.get(loc.statement_index) {
|
2022-06-04 15:28:14 -04:00
|
|
|
let old_frames = self.frame_idx();
|
2018-01-16 09:31:48 +01:00
|
|
|
self.statement(stmt)?;
|
2022-06-04 15:28:14 -04:00
|
|
|
// Make sure we are not updating `statement_index` of the wrong frame.
|
|
|
|
assert_eq!(old_frames, self.frame_idx());
|
|
|
|
// Advance the program counter.
|
|
|
|
self.frame_mut().loc.as_mut().unwrap().statement_index += 1;
|
2016-06-09 10:52:45 +02:00
|
|
|
return Ok(true);
|
2016-06-01 17:05:20 +02:00
|
|
|
}
|
|
|
|
|
2018-09-20 10:12:21 +02:00
|
|
|
M::before_terminator(self)?;
|
2018-02-19 12:00:15 +01:00
|
|
|
|
2016-06-03 15:48:56 +02:00
|
|
|
let terminator = basic_block.terminator();
|
2018-01-16 09:31:48 +01:00
|
|
|
self.terminator(terminator)?;
|
2016-06-09 10:52:45 +02:00
|
|
|
Ok(true)
|
2016-06-01 17:05:20 +02:00
|
|
|
}
|
2016-06-23 00:02:47 -06:00
|
|
|
|
2020-05-17 15:39:35 +02:00
|
|
|
/// Runs the interpretation logic for the given `mir::Statement` at the current frame and
|
2022-06-04 15:28:14 -04:00
|
|
|
/// statement counter.
|
|
|
|
///
|
|
|
|
/// This does NOT move the statement counter forward, the caller has to do that!
|
2021-01-01 01:53:25 +01:00
|
|
|
pub fn statement(&mut self, stmt: &mir::Statement<'tcx>) -> InterpResult<'tcx> {
|
2018-12-19 10:10:39 +01:00
|
|
|
info!("{:?}", stmt);
|
2016-08-27 01:44:46 -06:00
|
|
|
|
2020-03-29 16:41:09 +02:00
|
|
|
use rustc_middle::mir::StatementKind::*;
|
2017-09-25 15:55:21 +02:00
|
|
|
|
2020-03-31 12:19:29 -03:00
|
|
|
match &stmt.kind {
|
|
|
|
Assign(box (place, rvalue)) => self.eval_rvalue_into_place(rvalue, *place)?,
|
2016-12-18 23:31:23 -08:00
|
|
|
|
2020-03-31 12:19:29 -03:00
|
|
|
SetDiscriminant { place, variant_index } => {
|
|
|
|
let dest = self.eval_place(**place)?;
|
2021-02-15 00:00:00 +00:00
|
|
|
self.write_discriminant(*variant_index, &dest)?;
|
2016-12-18 23:31:23 -08:00
|
|
|
}
|
2016-08-27 01:44:46 -06:00
|
|
|
|
2022-04-05 17:14:59 -04:00
|
|
|
Deinit(place) => {
|
|
|
|
let dest = self.eval_place(**place)?;
|
|
|
|
self.write_uninit(&dest)?;
|
|
|
|
}
|
|
|
|
|
2017-09-05 17:18:48 +02:00
|
|
|
// Mark locals as alive
|
|
|
|
StorageLive(local) => {
|
2020-11-21 20:23:00 +01:00
|
|
|
self.storage_live(*local)?;
|
2017-09-05 17:18:48 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// Mark locals as dead
|
|
|
|
StorageDead(local) => {
|
2020-11-21 20:23:00 +01:00
|
|
|
self.storage_dead(*local)?;
|
2017-05-31 17:41:33 -07:00
|
|
|
}
|
2016-09-21 23:16:31 -06:00
|
|
|
|
2018-09-14 21:05:31 +02:00
|
|
|
// No dynamic semantics attached to `FakeRead`; MIR
|
2018-05-22 15:05:02 +02:00
|
|
|
// interpreter is solely intended for borrowck'ed code.
|
2018-09-14 21:05:31 +02:00
|
|
|
FakeRead(..) => {}
|
2018-05-04 12:04:33 +02:00
|
|
|
|
2018-11-06 11:04:10 +01:00
|
|
|
// Stacked Borrows.
|
2020-03-31 12:19:29 -03:00
|
|
|
Retag(kind, place) => {
|
|
|
|
let dest = self.eval_place(**place)?;
|
2021-02-15 00:00:00 +00:00
|
|
|
M::retag(self, *kind, &dest)?;
|
2018-11-06 11:04:10 +01:00
|
|
|
}
|
2017-06-20 19:35:46 +09:00
|
|
|
|
2022-07-12 10:05:00 +00:00
|
|
|
Intrinsic(box NonDivergingIntrinsic::Assume(op)) => {
|
2022-06-30 08:16:05 +00:00
|
|
|
let op = self.eval_operand(op, None)?;
|
|
|
|
let cond = self.read_scalar(&op)?.to_bool()?;
|
|
|
|
if !cond {
|
|
|
|
throw_ub_format!("`assume` called with `false`");
|
|
|
|
}
|
|
|
|
}
|
2022-07-12 10:05:00 +00:00
|
|
|
Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping(mir::CopyNonOverlapping {
|
|
|
|
ref count,
|
|
|
|
ref src,
|
|
|
|
ref dst,
|
|
|
|
})) => {
|
|
|
|
let src = self.eval_operand(src, None)?;
|
|
|
|
let dst = self.eval_operand(dst, None)?;
|
|
|
|
let count = self.eval_operand(count, None)?;
|
|
|
|
self.copy_intrinsic(&src, &dst, &count, /* nonoverlapping */ true)?;
|
|
|
|
}
|
2022-06-30 08:16:05 +00:00
|
|
|
|
2018-11-06 11:04:10 +01:00
|
|
|
// Statements we do not track.
|
2018-08-31 18:59:35 -04:00
|
|
|
AscribeUserType(..) => {}
|
2018-02-23 20:52:05 +00:00
|
|
|
|
2020-08-15 04:42:13 -07:00
|
|
|
// Currently, Miri discards Coverage statements. Coverage statements are only injected
|
|
|
|
// via an optional compile time MIR pass and have no side effects. Since Coverage
|
|
|
|
// statements don't exist at the source level, it is safe for Miri to ignore them, even
|
|
|
|
// for undefined behavior (UB) checks.
|
|
|
|
//
|
|
|
|
// A coverage counter inside a const expression (for example, a counter injected in a
|
|
|
|
// const function) is discarded when the const is evaluated at compile time. Whether
|
|
|
|
// this should change, and/or how to implement a const eval counter, is a subject of the
|
|
|
|
// following issue:
|
|
|
|
//
|
|
|
|
// FIXME(#73156): Handle source code coverage in const eval
|
|
|
|
Coverage(..) => {}
|
|
|
|
|
2016-09-21 23:16:31 -06:00
|
|
|
// Defined to do nothing. These are added by optimization passes, to avoid changing the
|
|
|
|
// size of MIR constantly.
|
|
|
|
Nop => {}
|
2016-08-27 01:44:46 -06:00
|
|
|
}
|
|
|
|
|
2016-06-23 00:02:47 -06:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2018-08-09 15:04:53 +02:00
|
|
|
/// Evaluate an assignment statement.
|
|
|
|
///
|
|
|
|
/// There is no separate `eval_rvalue` function. Instead, the code for handling each rvalue
|
|
|
|
/// type writes its results directly into the memory specified by the place.
|
2019-09-05 21:06:57 -04:00
|
|
|
pub fn eval_rvalue_into_place(
|
2018-08-09 15:04:53 +02:00
|
|
|
&mut self,
|
|
|
|
rvalue: &mir::Rvalue<'tcx>,
|
2020-03-31 12:19:29 -03:00
|
|
|
place: mir::Place<'tcx>,
|
2019-06-07 18:56:27 +02:00
|
|
|
) -> InterpResult<'tcx> {
|
2018-08-09 15:04:53 +02:00
|
|
|
let dest = self.eval_place(place)?;
|
2022-06-02 15:12:08 -04:00
|
|
|
// FIXME: ensure some kind of non-aliasing between LHS and RHS?
|
|
|
|
// Also see https://github.com/rust-lang/rust/issues/68364.
|
2018-08-09 15:04:53 +02:00
|
|
|
|
2020-03-29 16:41:09 +02:00
|
|
|
use rustc_middle::mir::Rvalue::*;
|
2018-08-09 15:04:53 +02:00
|
|
|
match *rvalue {
|
2020-05-02 21:44:25 +02:00
|
|
|
ThreadLocalRef(did) => {
|
2021-07-14 22:10:17 +02:00
|
|
|
let ptr = M::thread_local_static_base_pointer(self, did)?;
|
|
|
|
self.write_pointer(ptr, &dest)?;
|
2020-05-02 21:44:25 +02:00
|
|
|
}
|
|
|
|
|
2018-08-09 15:04:53 +02:00
|
|
|
Use(ref operand) => {
|
2018-08-20 15:21:04 +02:00
|
|
|
// Avoid recomputing the layout
|
|
|
|
let op = self.eval_operand(operand, Some(dest.layout))?;
|
2022-07-02 20:40:41 -04:00
|
|
|
self.copy_op(&op, &dest, /*allow_transmute*/ false)?;
|
2018-08-09 15:04:53 +02:00
|
|
|
}
|
|
|
|
|
2022-06-13 16:37:41 +03:00
|
|
|
CopyForDeref(ref place) => {
|
|
|
|
let op = self.eval_place_to_op(*place, Some(dest.layout))?;
|
|
|
|
self.copy_op(&op, &dest, /* allow_transmute*/ false)?;
|
|
|
|
}
|
|
|
|
|
2021-03-05 09:32:47 +00:00
|
|
|
BinaryOp(bin_op, box (ref left, ref right)) => {
|
2019-12-06 12:18:32 +00:00
|
|
|
let layout = binop_left_homogeneous(bin_op).then_some(dest.layout);
|
2021-02-15 00:00:00 +00:00
|
|
|
let left = self.read_immediate(&self.eval_operand(left, layout)?)?;
|
2019-12-06 12:18:32 +00:00
|
|
|
let layout = binop_right_homogeneous(bin_op).then_some(left.layout);
|
2021-02-15 00:00:00 +00:00
|
|
|
let right = self.read_immediate(&self.eval_operand(right, layout)?)?;
|
2021-02-15 00:00:00 +00:00
|
|
|
self.binop_ignore_overflow(bin_op, &left, &right, &dest)?;
|
2018-08-09 15:04:53 +02:00
|
|
|
}
|
|
|
|
|
2021-03-05 09:32:47 +00:00
|
|
|
CheckedBinaryOp(bin_op, box (ref left, ref right)) => {
|
2018-08-20 20:08:24 +02:00
|
|
|
// Due to the extra boolean in the result, we can never reuse the `dest.layout`.
|
2021-02-15 00:00:00 +00:00
|
|
|
let left = self.read_immediate(&self.eval_operand(left, None)?)?;
|
2019-12-06 12:18:32 +00:00
|
|
|
let layout = binop_right_homogeneous(bin_op).then_some(left.layout);
|
2021-02-15 00:00:00 +00:00
|
|
|
let right = self.read_immediate(&self.eval_operand(right, layout)?)?;
|
2022-07-04 08:57:10 -04:00
|
|
|
self.binop_with_overflow(
|
|
|
|
bin_op, /*force_overflow_checks*/ false, &left, &right, &dest,
|
|
|
|
)?;
|
2018-08-09 15:04:53 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
UnaryOp(un_op, ref operand) => {
|
2018-08-20 20:08:24 +02:00
|
|
|
// The operand always has the same type as the result.
|
2021-02-15 00:00:00 +00:00
|
|
|
let val = self.read_immediate(&self.eval_operand(operand, Some(dest.layout))?)?;
|
|
|
|
let val = self.unary_op(un_op, &val)?;
|
2019-08-10 19:40:56 +02:00
|
|
|
assert_eq!(val.layout, dest.layout, "layout mismatch for result of {:?}", un_op);
|
2021-02-15 00:00:00 +00:00
|
|
|
self.write_immediate(*val, &dest)?;
|
2018-08-09 15:04:53 +02:00
|
|
|
}
|
|
|
|
|
2022-04-14 13:31:21 +00:00
|
|
|
Aggregate(box ref kind, ref operands) => {
|
|
|
|
assert!(matches!(kind, mir::AggregateKind::Array(..)));
|
|
|
|
|
|
|
|
for (field_index, operand) in operands.iter().enumerate() {
|
2018-08-20 15:21:04 +02:00
|
|
|
let op = self.eval_operand(operand, None)?;
|
2021-09-29 13:47:41 -04:00
|
|
|
let field_dest = self.place_field(&dest, field_index)?;
|
2022-07-02 20:40:41 -04:00
|
|
|
self.copy_op(&op, &field_dest, /*allow_transmute*/ false)?;
|
2018-08-09 15:04:53 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Repeat(ref operand, _) => {
|
2021-05-16 18:53:20 +02:00
|
|
|
let src = self.eval_operand(operand, None)?;
|
|
|
|
assert!(!src.layout.is_unsized());
|
2021-02-15 00:00:00 +00:00
|
|
|
let dest = self.force_allocation(&dest)?;
|
2018-11-03 22:57:53 +02:00
|
|
|
let length = dest.len(self)?;
|
2018-08-09 15:04:53 +02:00
|
|
|
|
2021-05-16 18:53:20 +02:00
|
|
|
if length == 0 {
|
|
|
|
// Nothing to copy... but let's still make sure that `dest` as a place is valid.
|
2022-04-03 13:05:49 -04:00
|
|
|
self.get_place_alloc_mut(&dest)?;
|
2021-05-16 18:53:20 +02:00
|
|
|
} else {
|
|
|
|
// Write the src to the first element.
|
2021-02-15 00:00:00 +00:00
|
|
|
let first = self.mplace_field(&dest, 0)?;
|
2022-07-02 20:40:41 -04:00
|
|
|
self.copy_op(&src, &first.into(), /*allow_transmute*/ false)?;
|
2021-05-16 18:53:20 +02:00
|
|
|
|
|
|
|
// This is performance-sensitive code for big static/const arrays! So we
|
|
|
|
// avoid writing each operand individually and instead just make many copies
|
|
|
|
// of the first element.
|
|
|
|
let elem_size = first.layout.size;
|
|
|
|
let first_ptr = first.ptr;
|
2021-07-12 18:22:15 +02:00
|
|
|
let rest_ptr = first_ptr.offset(elem_size, self)?;
|
2021-11-27 18:17:09 -05:00
|
|
|
// For the alignment of `rest_ptr`, we crucially do *not* use `first.align` as
|
|
|
|
// that place might be more aligned than its type mandates (a `u8` array could
|
|
|
|
// be 4-aligned if it sits at the right spot in a struct). Instead we use
|
|
|
|
// `first.layout.align`, i.e., the alignment given by the type.
|
2022-04-03 13:05:49 -04:00
|
|
|
self.mem_copy_repeatedly(
|
2021-05-16 18:53:20 +02:00
|
|
|
first_ptr,
|
|
|
|
first.align,
|
|
|
|
rest_ptr,
|
2021-11-27 18:17:09 -05:00
|
|
|
first.layout.align.abi,
|
2021-05-16 18:53:20 +02:00
|
|
|
elem_size,
|
|
|
|
length - 1,
|
|
|
|
/*nonoverlapping:*/ true,
|
|
|
|
)?;
|
2018-08-09 15:04:53 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-31 12:19:29 -03:00
|
|
|
Len(place) => {
|
2018-08-09 15:04:53 +02:00
|
|
|
let src = self.eval_place(place)?;
|
2022-08-02 20:52:15 -04:00
|
|
|
let op = self.place_to_op(&src)?;
|
|
|
|
let len = op.len(self)?;
|
2021-02-15 00:00:00 +00:00
|
|
|
self.write_scalar(Scalar::from_machine_usize(len, self), &dest)?;
|
2018-08-09 15:04:53 +02:00
|
|
|
}
|
|
|
|
|
2020-03-31 12:19:29 -03:00
|
|
|
AddressOf(_, place) | Ref(_, _, place) => {
|
2018-08-09 15:04:53 +02:00
|
|
|
let src = self.eval_place(place)?;
|
2021-02-15 00:00:00 +00:00
|
|
|
let place = self.force_allocation(&src)?;
|
2021-07-12 18:22:15 +02:00
|
|
|
self.write_immediate(place.to_ref(self), &dest)?;
|
2018-08-09 15:04:53 +02:00
|
|
|
}
|
|
|
|
|
2021-09-07 16:06:07 +01:00
|
|
|
NullaryOp(null_op, ty) => {
|
2021-12-05 11:13:51 +01:00
|
|
|
let ty = self.subst_from_current_frame_and_normalize_erasing_regions(ty)?;
|
2018-08-09 15:04:53 +02:00
|
|
|
let layout = self.layout_of(ty)?;
|
2021-01-19 10:11:24 +03:00
|
|
|
if layout.is_unsized() {
|
|
|
|
// FIXME: This should be a span_bug (#80742)
|
|
|
|
self.tcx.sess.delay_span_bug(
|
|
|
|
self.frame().current_span(),
|
2021-09-07 16:06:07 +01:00
|
|
|
&format!("Nullary MIR operator called for unsized type {}", ty),
|
2021-01-19 10:11:24 +03:00
|
|
|
);
|
2021-01-21 20:10:40 +03:00
|
|
|
throw_inval!(SizeOfUnsizedType(ty));
|
2021-01-19 10:11:24 +03:00
|
|
|
}
|
2021-09-07 16:06:07 +01:00
|
|
|
let val = match null_op {
|
|
|
|
mir::NullOp::SizeOf => layout.size.bytes(),
|
|
|
|
mir::NullOp::AlignOf => layout.align.abi.bytes(),
|
|
|
|
};
|
|
|
|
self.write_scalar(Scalar::from_machine_usize(val, self), &dest)?;
|
2018-08-09 15:04:53 +02:00
|
|
|
}
|
|
|
|
|
2021-09-06 18:33:23 +01:00
|
|
|
ShallowInitBox(ref operand, _) => {
|
|
|
|
let src = self.eval_operand(operand, None)?;
|
|
|
|
let v = self.read_immediate(&src)?;
|
|
|
|
self.write_immediate(*v, &dest)?;
|
|
|
|
}
|
|
|
|
|
2020-05-24 09:38:29 +02:00
|
|
|
Cast(cast_kind, ref operand, cast_ty) => {
|
2018-08-20 15:21:04 +02:00
|
|
|
let src = self.eval_operand(operand, None)?;
|
2021-12-05 11:13:51 +01:00
|
|
|
let cast_ty =
|
|
|
|
self.subst_from_current_frame_and_normalize_erasing_regions(cast_ty)?;
|
2021-02-15 00:00:00 +00:00
|
|
|
self.cast(&src, cast_kind, cast_ty, &dest)?;
|
2018-08-09 15:04:53 +02:00
|
|
|
}
|
|
|
|
|
2020-03-31 12:19:29 -03:00
|
|
|
Discriminant(place) => {
|
2019-02-16 12:36:23 +01:00
|
|
|
let op = self.eval_place_to_op(place, None)?;
|
2021-02-15 00:00:00 +00:00
|
|
|
let discr_val = self.read_discriminant(&op)?.0;
|
2021-02-15 00:00:00 +00:00
|
|
|
self.write_scalar(discr_val, &dest)?;
|
2018-08-09 15:04:53 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-28 16:15:40 +02:00
|
|
|
trace!("{:?}", self.dump_place(*dest));
|
2018-08-09 15:04:53 +02:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-06-04 15:28:14 -04:00
|
|
|
/// Evaluate the given terminator. Will also adjust the stack frame and statement position accordingly.
|
2019-06-07 18:56:27 +02:00
|
|
|
fn terminator(&mut self, terminator: &mir::Terminator<'tcx>) -> InterpResult<'tcx> {
|
2018-12-19 10:10:39 +01:00
|
|
|
info!("{:?}", terminator.kind);
|
2018-08-23 19:04:33 +02:00
|
|
|
|
2016-06-23 00:02:47 -06:00
|
|
|
self.eval_terminator(terminator)?;
|
2020-03-16 15:12:42 -07:00
|
|
|
if !self.stack().is_empty() {
|
2020-08-11 14:54:02 +02:00
|
|
|
if let Ok(loc) = self.frame().loc {
|
2020-04-23 19:40:41 +02:00
|
|
|
info!("// executing {:?}", loc.block);
|
2019-12-08 10:48:06 +01:00
|
|
|
}
|
2016-06-23 00:02:47 -06:00
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
2016-06-01 17:05:20 +02:00
|
|
|
}
|