2017-07-21 17:25:30 +02:00
|
|
|
//! This module contains everything needed to instantiate an interpreter.
|
|
|
|
//! This separation exists to ensure that no fancy miri features like
|
|
|
|
//! interpreting common C functions leak into CTFE.
|
|
|
|
|
2018-10-05 15:13:59 +02:00
|
|
|
use std::borrow::{Borrow, Cow};
|
2020-10-13 10:17:05 +02:00
|
|
|
use std::fmt::Debug;
|
2018-09-21 23:32:59 +02:00
|
|
|
use std::hash::Hash;
|
|
|
|
|
2020-03-29 16:41:09 +02:00
|
|
|
use rustc_middle::mir;
|
2020-04-15 14:05:14 -07:00
|
|
|
use rustc_middle::ty::{self, Ty};
|
2020-03-30 22:54:15 +02:00
|
|
|
use rustc_span::def_id::DefId;
|
2020-12-11 18:42:36 +00:00
|
|
|
use rustc_target::abi::Size;
|
2021-01-10 14:31:02 +00:00
|
|
|
use rustc_target::spec::abi::Abi;
|
2018-08-23 21:22:27 +02:00
|
|
|
|
2018-10-16 14:50:07 +02:00
|
|
|
use super::{
|
2021-05-16 18:53:20 +02:00
|
|
|
AllocId, Allocation, CheckInAllocMsg, Frame, ImmTy, InterpCx, InterpResult, LocalValue,
|
2021-05-23 04:37:17 +08:00
|
|
|
MemPlace, Memory, MemoryKind, OpTy, Operand, PlaceTy, Pointer, Scalar, StackPopUnwind,
|
2018-10-16 14:50:07 +02:00
|
|
|
};
|
|
|
|
|
2019-04-16 21:04:54 -04:00
|
|
|
/// Data returned by Machine::stack_pop,
|
|
|
|
/// to provide further control over the popping of the stack frame
|
2019-10-20 18:51:25 +02:00
|
|
|
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
|
2020-03-14 11:51:27 +01:00
|
|
|
pub enum StackPopJump {
|
2019-11-04 13:49:50 -05:00
|
|
|
/// Indicates that no special handling should be
|
|
|
|
/// done - we'll either return normally or unwind
|
|
|
|
/// based on the terminator for the function
|
|
|
|
/// we're leaving.
|
2019-04-16 21:04:54 -04:00
|
|
|
Normal,
|
|
|
|
|
2020-03-14 11:51:27 +01:00
|
|
|
/// Indicates that we should *not* jump to the return/unwind address, as the callback already
|
|
|
|
/// took care of everything.
|
|
|
|
NoJump,
|
2019-04-16 21:04:54 -04:00
|
|
|
}
|
|
|
|
|
2018-10-16 12:45:44 +02:00
|
|
|
/// Whether this kind of memory is allowed to leak
|
|
|
|
pub trait MayLeak: Copy {
|
|
|
|
fn may_leak(self) -> bool;
|
|
|
|
}
|
|
|
|
|
2018-10-05 15:13:59 +02:00
|
|
|
/// The functionality needed by memory to manage its allocations
|
|
|
|
pub trait AllocMap<K: Hash + Eq, V> {
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Tests if the map contains the given key.
|
2018-10-05 15:13:59 +02:00
|
|
|
/// Deliberately takes `&mut` because that is sufficient, and some implementations
|
|
|
|
/// can be more efficient then (using `RefCell::get_mut`).
|
|
|
|
fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
|
2019-12-22 17:42:04 -05:00
|
|
|
where
|
|
|
|
K: Borrow<Q>;
|
2018-10-05 15:13:59 +02:00
|
|
|
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Inserts a new entry into the map.
|
2018-10-05 15:13:59 +02:00
|
|
|
fn insert(&mut self, k: K, v: V) -> Option<V>;
|
|
|
|
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Removes an entry from the map.
|
2018-10-05 15:13:59 +02:00
|
|
|
fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
|
2019-12-22 17:42:04 -05:00
|
|
|
where
|
|
|
|
K: Borrow<Q>;
|
2018-10-05 15:13:59 +02:00
|
|
|
|
2020-04-04 12:19:10 +02:00
|
|
|
/// Returns data based on the keys and values in the map.
|
2018-10-05 15:13:59 +02:00
|
|
|
fn filter_map_collect<T>(&self, f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T>;
|
|
|
|
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Returns a reference to entry `k`. If no such entry exists, call
|
2018-10-05 15:13:59 +02:00
|
|
|
/// `vacant` and either forward its error, or add its result to the map
|
|
|
|
/// and return a reference to *that*.
|
2019-12-22 17:42:04 -05:00
|
|
|
fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E>;
|
2018-10-05 15:13:59 +02:00
|
|
|
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Returns a mutable reference to entry `k`. If no such entry exists, call
|
2018-10-05 15:13:59 +02:00
|
|
|
/// `vacant` and either forward its error, or add its result to the map
|
|
|
|
/// and return a reference to *that*.
|
2019-12-22 17:42:04 -05:00
|
|
|
fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E>;
|
2019-07-28 12:58:39 +02:00
|
|
|
|
|
|
|
/// Read-only lookup.
|
|
|
|
fn get(&self, k: K) -> Option<&V> {
|
2019-07-28 22:30:19 +02:00
|
|
|
self.get_or(k, || Err(())).ok()
|
2019-07-28 12:58:39 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Mutable lookup.
|
|
|
|
fn get_mut(&mut self, k: K) -> Option<&mut V> {
|
2019-07-28 22:30:19 +02:00
|
|
|
self.get_mut_or(k, || Err(())).ok()
|
2019-07-28 12:58:39 +02:00
|
|
|
}
|
2018-10-05 15:13:59 +02:00
|
|
|
}
|
2018-08-23 19:04:33 +02:00
|
|
|
|
2017-07-21 17:25:30 +02:00
|
|
|
/// Methods of this trait signifies a point where CTFE evaluation would fail
|
2018-09-20 10:12:21 +02:00
|
|
|
/// and some use case dependent behaviour can instead be applied.
|
2019-06-11 22:03:44 +03:00
|
|
|
pub trait Machine<'mir, 'tcx>: Sized {
|
2017-07-28 16:48:43 +02:00
|
|
|
/// Additional memory kinds a machine wishes to distinguish from the builtin ones
|
2020-10-13 10:17:05 +02:00
|
|
|
type MemoryKind: Debug + std::fmt::Display + MayLeak + Eq + 'static;
|
2018-10-16 09:15:13 +02:00
|
|
|
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Tag tracked alongside every pointer. This is used to implement "Stacked Borrows"
|
2018-10-16 09:15:13 +02:00
|
|
|
/// <https://www.ralfj.de/blog/2018/08/07/stacked-borrows.html>.
|
2018-10-22 17:15:42 +02:00
|
|
|
/// The `default()` is used for pointers to consts, statics, vtables and functions.
|
2020-04-26 18:59:20 +02:00
|
|
|
/// The `Debug` formatting is used for displaying pointers; we cannot use `Display`
|
|
|
|
/// as `()` does not implement that, but it should be "nice" output.
|
2020-10-13 10:17:05 +02:00
|
|
|
type PointerTag: Debug + Copy + Eq + Hash + 'static;
|
2018-10-16 09:15:13 +02:00
|
|
|
|
2019-06-30 13:51:18 +02:00
|
|
|
/// Machines can define extra (non-instance) things that represent values of function pointers.
|
2019-11-26 22:19:54 -05:00
|
|
|
/// For example, Miri uses this to return a function pointer from `dlsym`
|
2019-06-30 13:51:18 +02:00
|
|
|
/// that can later be called to execute the right thing.
|
2020-10-13 10:17:05 +02:00
|
|
|
type ExtraFnVal: Debug + Copy;
|
2019-06-30 13:51:18 +02:00
|
|
|
|
2018-11-15 17:14:53 +01:00
|
|
|
/// Extra data stored in every call frame.
|
|
|
|
type FrameExtra;
|
|
|
|
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Extra data stored in memory. A reference to this is available when `AllocExtra`
|
2018-11-27 02:59:49 +00:00
|
|
|
/// gets initialized, so you can e.g., have an `Rc` here if there is global state you
|
2018-11-14 16:00:52 +01:00
|
|
|
/// need access to in the `AllocExtra` hooks.
|
2019-06-26 13:56:33 -05:00
|
|
|
type MemoryExtra;
|
2018-11-14 16:00:52 +01:00
|
|
|
|
2018-10-16 09:15:13 +02:00
|
|
|
/// Extra data stored in every allocation.
|
2021-05-16 18:53:20 +02:00
|
|
|
type AllocExtra: Debug + Clone + 'static;
|
2018-08-26 12:59:59 +02:00
|
|
|
|
2018-10-05 15:13:59 +02:00
|
|
|
/// Memory's allocation map
|
2019-12-22 17:42:04 -05:00
|
|
|
type MemoryMap: AllocMap<
|
2018-10-16 09:15:13 +02:00
|
|
|
AllocId,
|
2020-03-21 19:19:10 +01:00
|
|
|
(MemoryKind<Self::MemoryKind>, Allocation<Self::PointerTag, Self::AllocExtra>),
|
2019-12-22 17:42:04 -05:00
|
|
|
> + Default
|
|
|
|
+ Clone;
|
2018-10-05 15:13:59 +02:00
|
|
|
|
2020-03-21 19:19:10 +01:00
|
|
|
/// The memory kind to use for copied global memory (held in `tcx`) --
|
|
|
|
/// or None if such memory should not be mutated and thus any such attempt will cause
|
|
|
|
/// a `ModifiedStatic` error to be raised.
|
2018-09-21 23:32:59 +02:00
|
|
|
/// Statics are copied under two circumstances: When they are mutated, and when
|
2020-03-21 19:19:10 +01:00
|
|
|
/// `tag_allocation` (see below) returns an owned allocation
|
2018-09-21 23:32:59 +02:00
|
|
|
/// that is added to the memory so that the work is not done twice.
|
2020-03-21 19:19:10 +01:00
|
|
|
const GLOBAL_KIND: Option<Self::MemoryKind>;
|
2017-07-28 16:48:43 +02:00
|
|
|
|
2019-07-28 13:44:11 +02:00
|
|
|
/// Whether memory accesses should be alignment-checked.
|
2020-04-13 17:59:12 +02:00
|
|
|
fn enforce_alignment(memory_extra: &Self::MemoryExtra) -> bool;
|
2019-07-28 13:44:11 +02:00
|
|
|
|
2020-08-17 14:11:03 +02:00
|
|
|
/// Whether, when checking alignment, we should `force_int` and thus support
|
2020-08-16 17:36:46 +02:00
|
|
|
/// custom alignment logic based on whatever the integer address happens to be.
|
|
|
|
fn force_int_for_alignment_check(memory_extra: &Self::MemoryExtra) -> bool;
|
|
|
|
|
2018-10-02 20:20:14 +02:00
|
|
|
/// Whether to enforce the validity invariant
|
2019-06-27 11:36:01 +02:00
|
|
|
fn enforce_validity(ecx: &InterpCx<'mir, 'tcx, Self>) -> bool;
|
2018-10-02 20:20:14 +02:00
|
|
|
|
2021-05-21 22:19:37 +08:00
|
|
|
/// Whether function calls should be [ABI](Abi)-checked.
|
2021-05-22 19:04:28 +08:00
|
|
|
fn enforce_abi(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool {
|
|
|
|
true
|
|
|
|
}
|
2021-05-21 22:19:37 +08:00
|
|
|
|
2020-12-07 14:27:46 +00:00
|
|
|
/// Entry point for obtaining the MIR of anything that should get evaluated.
|
|
|
|
/// So not just functions and shims, but also const/static initializers, anonymous
|
|
|
|
/// constants, ...
|
|
|
|
fn load_mir(
|
|
|
|
ecx: &InterpCx<'mir, 'tcx, Self>,
|
|
|
|
instance: ty::InstanceDef<'tcx>,
|
|
|
|
) -> InterpResult<'tcx, &'tcx mir::Body<'tcx>> {
|
|
|
|
Ok(ecx.tcx.instance_mir(instance))
|
|
|
|
}
|
|
|
|
|
2017-07-28 09:52:19 +02:00
|
|
|
/// Entry point to all function calls.
|
|
|
|
///
|
2018-08-23 19:04:33 +02:00
|
|
|
/// Returns either the mir to use for the call, or `None` if execution should
|
|
|
|
/// just proceed (which usually means this hook did all the work that the
|
2019-02-08 14:53:55 +01:00
|
|
|
/// called function should usually have done). In the latter case, it is
|
2019-11-25 16:23:44 +01:00
|
|
|
/// this hook's responsibility to advance the instruction pointer!
|
2018-08-23 19:04:33 +02:00
|
|
|
/// (This is to support functions like `__rust_maybe_catch_panic` that neither find a MIR
|
|
|
|
/// nor just jump to `ret`, but instead push their own stack frame.)
|
|
|
|
/// Passing `dest`and `ret` in the same `Option` proved very annoying when only one of them
|
|
|
|
/// was used.
|
2019-11-30 17:53:02 +01:00
|
|
|
fn find_mir_or_eval_fn(
|
2019-06-27 11:36:01 +02:00
|
|
|
ecx: &mut InterpCx<'mir, 'tcx, Self>,
|
2017-07-21 17:25:30 +02:00
|
|
|
instance: ty::Instance<'tcx>,
|
2021-01-10 14:31:02 +00:00
|
|
|
abi: Abi,
|
2018-09-21 23:32:59 +02:00
|
|
|
args: &[OpTy<'tcx, Self::PointerTag>],
|
2021-02-15 00:00:00 +00:00
|
|
|
ret: Option<(&PlaceTy<'tcx, Self::PointerTag>, mir::BasicBlock)>,
|
2021-05-23 04:37:17 +08:00
|
|
|
unwind: StackPopUnwind,
|
2019-06-07 18:56:27 +02:00
|
|
|
) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>>;
|
2017-07-25 11:32:48 +02:00
|
|
|
|
2019-11-25 16:23:44 +01:00
|
|
|
/// Execute `fn_val`. It is the hook's responsibility to advance the instruction
|
2019-06-30 15:06:13 +02:00
|
|
|
/// pointer as appropriate.
|
|
|
|
fn call_extra_fn(
|
2019-07-01 11:26:28 +02:00
|
|
|
ecx: &mut InterpCx<'mir, 'tcx, Self>,
|
2019-06-30 15:06:13 +02:00
|
|
|
fn_val: Self::ExtraFnVal,
|
2021-01-10 14:31:02 +00:00
|
|
|
abi: Abi,
|
2019-06-30 15:06:13 +02:00
|
|
|
args: &[OpTy<'tcx, Self::PointerTag>],
|
2021-02-15 00:00:00 +00:00
|
|
|
ret: Option<(&PlaceTy<'tcx, Self::PointerTag>, mir::BasicBlock)>,
|
2021-05-23 04:37:17 +08:00
|
|
|
unwind: StackPopUnwind,
|
2019-06-30 15:06:13 +02:00
|
|
|
) -> InterpResult<'tcx>;
|
|
|
|
|
2019-11-25 16:23:44 +01:00
|
|
|
/// Directly process an intrinsic without pushing a stack frame. It is the hook's
|
|
|
|
/// responsibility to advance the instruction pointer as appropriate.
|
2018-09-20 10:12:21 +02:00
|
|
|
fn call_intrinsic(
|
2019-06-27 11:36:01 +02:00
|
|
|
ecx: &mut InterpCx<'mir, 'tcx, Self>,
|
2017-07-28 13:08:27 +02:00
|
|
|
instance: ty::Instance<'tcx>,
|
2018-09-21 23:32:59 +02:00
|
|
|
args: &[OpTy<'tcx, Self::PointerTag>],
|
2021-02-15 00:00:00 +00:00
|
|
|
ret: Option<(&PlaceTy<'tcx, Self::PointerTag>, mir::BasicBlock)>,
|
2021-05-23 04:37:17 +08:00
|
|
|
unwind: StackPopUnwind,
|
2019-06-07 18:56:27 +02:00
|
|
|
) -> InterpResult<'tcx>;
|
2017-07-28 13:08:27 +02:00
|
|
|
|
2019-11-29 09:59:52 +01:00
|
|
|
/// Called to evaluate `Assert` MIR terminators that trigger a panic.
|
|
|
|
fn assert_panic(
|
|
|
|
ecx: &mut InterpCx<'mir, 'tcx, Self>,
|
2020-02-10 20:53:01 +01:00
|
|
|
msg: &mir::AssertMessage<'tcx>,
|
2019-11-29 09:59:52 +01:00
|
|
|
unwind: Option<mir::BasicBlock>,
|
|
|
|
) -> InterpResult<'tcx>;
|
|
|
|
|
2020-03-09 10:45:20 +01:00
|
|
|
/// Called to evaluate `Abort` MIR terminator.
|
2020-12-06 20:25:13 +01:00
|
|
|
fn abort(_ecx: &mut InterpCx<'mir, 'tcx, Self>, _msg: String) -> InterpResult<'tcx, !> {
|
2020-03-19 09:07:43 +01:00
|
|
|
throw_unsup_format!("aborting execution is not supported")
|
2020-03-09 10:45:20 +01:00
|
|
|
}
|
|
|
|
|
2019-07-24 16:08:50 +02:00
|
|
|
/// Called for all binary operations where the LHS has pointer type.
|
2017-07-25 11:32:48 +02:00
|
|
|
///
|
2017-08-01 11:11:57 +02:00
|
|
|
/// Returns a (value, overflowed) pair if the operation succeeded
|
2019-07-24 16:08:50 +02:00
|
|
|
fn binary_ptr_op(
|
2019-06-27 11:36:01 +02:00
|
|
|
ecx: &InterpCx<'mir, 'tcx, Self>,
|
2017-07-25 11:32:48 +02:00
|
|
|
bin_op: mir::BinOp,
|
2021-02-15 00:00:00 +00:00
|
|
|
left: &ImmTy<'tcx, Self::PointerTag>,
|
|
|
|
right: &ImmTy<'tcx, Self::PointerTag>,
|
2019-08-10 19:40:56 +02:00
|
|
|
) -> InterpResult<'tcx, (Scalar<Self::PointerTag>, bool, Ty<'tcx>)>;
|
2017-07-28 16:48:43 +02:00
|
|
|
|
2018-10-16 14:50:07 +02:00
|
|
|
/// Heap allocations via the `box` keyword.
|
2018-09-20 10:12:21 +02:00
|
|
|
fn box_alloc(
|
2019-06-27 11:36:01 +02:00
|
|
|
ecx: &mut InterpCx<'mir, 'tcx, Self>,
|
2021-02-15 00:00:00 +00:00
|
|
|
dest: &PlaceTy<'tcx, Self::PointerTag>,
|
2019-06-07 18:56:27 +02:00
|
|
|
) -> InterpResult<'tcx>;
|
2017-09-15 13:02:33 +02:00
|
|
|
|
2019-09-24 21:12:59 -04:00
|
|
|
/// Called to read the specified `local` from the `frame`.
|
2020-06-26 11:02:43 +02:00
|
|
|
/// Since reading a ZST is not actually accessing memory or locals, this is never invoked
|
|
|
|
/// for ZST reads.
|
2020-02-23 17:04:34 +01:00
|
|
|
#[inline]
|
2019-09-24 21:12:59 -04:00
|
|
|
fn access_local(
|
|
|
|
_ecx: &InterpCx<'mir, 'tcx, Self>,
|
|
|
|
frame: &Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>,
|
|
|
|
local: mir::Local,
|
|
|
|
) -> InterpResult<'tcx, Operand<Self::PointerTag>> {
|
|
|
|
frame.locals[local].access()
|
|
|
|
}
|
|
|
|
|
2020-06-26 11:02:43 +02:00
|
|
|
/// Called to write the specified `local` from the `frame`.
|
|
|
|
/// Since writing a ZST is not actually accessing memory or locals, this is never invoked
|
|
|
|
/// for ZST reads.
|
|
|
|
#[inline]
|
|
|
|
fn access_local_mut<'a>(
|
|
|
|
ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
|
|
|
|
frame: usize,
|
|
|
|
local: mir::Local,
|
|
|
|
) -> InterpResult<'tcx, Result<&'a mut LocalValue<Self::PointerTag>, MemPlace<Self::PointerTag>>>
|
|
|
|
where
|
|
|
|
'tcx: 'mir,
|
|
|
|
{
|
|
|
|
ecx.stack_mut()[frame].locals[local].access_mut()
|
|
|
|
}
|
|
|
|
|
2020-02-23 17:04:34 +01:00
|
|
|
/// Called before a basic block terminator is executed.
|
|
|
|
/// You can use this to detect endlessly running programs.
|
|
|
|
#[inline]
|
|
|
|
fn before_terminator(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-03-21 19:19:10 +01:00
|
|
|
/// Called before a global allocation is accessed.
|
2020-03-22 09:23:19 +01:00
|
|
|
/// `def_id` is `Some` if this is the "lazy" allocation of a static.
|
2020-02-23 17:04:34 +01:00
|
|
|
#[inline]
|
2020-03-21 19:19:10 +01:00
|
|
|
fn before_access_global(
|
2019-12-16 15:23:42 +01:00
|
|
|
_memory_extra: &Self::MemoryExtra,
|
2020-03-21 20:44:39 +01:00
|
|
|
_alloc_id: AllocId,
|
2019-12-16 15:23:42 +01:00
|
|
|
_allocation: &Allocation,
|
2020-03-25 08:47:59 +01:00
|
|
|
_static_def_id: Option<DefId>,
|
2020-03-21 19:19:10 +01:00
|
|
|
_is_write: bool,
|
2019-12-16 15:23:42 +01:00
|
|
|
) -> InterpResult<'tcx> {
|
2019-09-24 21:12:59 -04:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-07-26 11:11:17 +02:00
|
|
|
/// Return the `AllocId` for the given thread-local static in the current thread.
|
|
|
|
fn thread_local_static_alloc_id(
|
|
|
|
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
|
|
|
|
def_id: DefId,
|
|
|
|
) -> InterpResult<'tcx, AllocId> {
|
|
|
|
throw_unsup!(ThreadLocalStatic(def_id))
|
2020-02-23 17:04:34 +01:00
|
|
|
}
|
|
|
|
|
2020-07-26 11:11:17 +02:00
|
|
|
/// Return the `AllocId` backing the given `extern static`.
|
|
|
|
fn extern_static_alloc_id(
|
|
|
|
mem: &Memory<'mir, 'tcx, Self>,
|
|
|
|
def_id: DefId,
|
|
|
|
) -> InterpResult<'tcx, AllocId> {
|
|
|
|
// Use the `AllocId` associated with the `DefId`. Any actual *access* will fail.
|
|
|
|
Ok(mem.tcx.create_static_alloc(def_id))
|
2020-04-14 14:40:08 -07:00
|
|
|
}
|
|
|
|
|
2020-07-26 11:11:17 +02:00
|
|
|
/// Return the "base" tag for the given *global* allocation: the one that is used for direct
|
|
|
|
/// accesses to this static/const/fn allocation. If `id` is not a global allocation,
|
|
|
|
/// this will return an unusable tag (i.e., accesses will be UB)!
|
|
|
|
///
|
|
|
|
/// Called on the id returned by `thread_local_static_alloc_id` and `extern_static_alloc_id`, if needed.
|
|
|
|
fn tag_global_base_pointer(memory_extra: &Self::MemoryExtra, id: AllocId) -> Self::PointerTag;
|
|
|
|
|
2019-05-28 10:44:46 +02:00
|
|
|
/// Called to initialize the "extra" state of an allocation and make the pointers
|
|
|
|
/// it contains (in relocations) tagged. The way we construct allocations is
|
|
|
|
/// to always first construct it without extra and then add the extra.
|
|
|
|
/// This keeps uniform code paths for handling both allocations created by CTFE
|
2020-03-21 19:19:10 +01:00
|
|
|
/// for globals, and allocations created by Miri during evaluation.
|
2019-05-28 10:44:46 +02:00
|
|
|
///
|
|
|
|
/// `kind` is the kind of the allocation being tagged; it can be `None` when
|
2020-03-21 19:19:10 +01:00
|
|
|
/// it's a global and `GLOBAL_KIND` is `None`.
|
2019-04-15 10:05:13 +02:00
|
|
|
///
|
|
|
|
/// This should avoid copying if no work has to be done! If this returns an owned
|
|
|
|
/// allocation (because a copy had to be done to add tags or metadata), machine memory will
|
|
|
|
/// cache the result. (This relies on `AllocMap::get_or` being able to add the
|
|
|
|
/// owned allocation to the map even when the map is shared.)
|
2019-12-01 10:02:41 +01:00
|
|
|
///
|
|
|
|
/// Also return the "base" tag to use for this allocation: the one that is used for direct
|
|
|
|
/// accesses to this allocation. If `kind == STATIC_KIND`, this tag must be consistent
|
2020-03-21 19:19:10 +01:00
|
|
|
/// with `tag_global_base_pointer`.
|
2019-11-29 19:42:37 +01:00
|
|
|
fn init_allocation_extra<'b>(
|
2019-07-02 12:31:17 +02:00
|
|
|
memory_extra: &Self::MemoryExtra,
|
2019-05-28 10:44:46 +02:00
|
|
|
id: AllocId,
|
|
|
|
alloc: Cow<'b, Allocation>,
|
2020-03-21 19:19:10 +01:00
|
|
|
kind: Option<MemoryKind<Self::MemoryKind>>,
|
2019-12-01 10:02:41 +01:00
|
|
|
) -> (Cow<'b, Allocation<Self::PointerTag, Self::AllocExtra>>, Self::PointerTag);
|
2019-05-28 10:44:46 +02:00
|
|
|
|
2021-05-16 18:53:20 +02:00
|
|
|
/// Hook for performing extra checks on a memory read access.
|
|
|
|
///
|
|
|
|
/// Takes read-only access to the allocation so we can keep all the memory read
|
|
|
|
/// operations take `&self`. Use a `RefCell` in `AllocExtra` if you
|
|
|
|
/// need to mutate.
|
|
|
|
#[inline(always)]
|
|
|
|
fn memory_read(
|
|
|
|
_memory_extra: &Self::MemoryExtra,
|
2021-05-19 16:37:17 +02:00
|
|
|
_alloc_extra: &Self::AllocExtra,
|
2021-05-16 18:53:20 +02:00
|
|
|
_ptr: Pointer<Self::PointerTag>,
|
|
|
|
_size: Size,
|
|
|
|
) -> InterpResult<'tcx> {
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Hook for performing extra checks on a memory write access.
|
|
|
|
#[inline(always)]
|
|
|
|
fn memory_written(
|
2020-04-09 14:48:06 -04:00
|
|
|
_memory_extra: &mut Self::MemoryExtra,
|
2021-05-19 16:37:17 +02:00
|
|
|
_alloc_extra: &mut Self::AllocExtra,
|
2021-05-16 18:53:20 +02:00
|
|
|
_ptr: Pointer<Self::PointerTag>,
|
|
|
|
_size: Size,
|
|
|
|
) -> InterpResult<'tcx> {
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Hook for performing extra operations on a memory deallocation.
|
|
|
|
#[inline(always)]
|
|
|
|
fn memory_deallocated(
|
|
|
|
_memory_extra: &mut Self::MemoryExtra,
|
2021-05-19 16:37:17 +02:00
|
|
|
_alloc_extra: &mut Self::AllocExtra,
|
2021-05-16 18:53:20 +02:00
|
|
|
_ptr: Pointer<Self::PointerTag>,
|
2021-05-19 16:37:17 +02:00
|
|
|
_size: Size,
|
2020-04-09 14:48:06 -04:00
|
|
|
) -> InterpResult<'tcx> {
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-12-11 18:42:36 +00:00
|
|
|
/// Called after initializing static memory using the interpreter.
|
|
|
|
fn after_static_mem_initialized(
|
|
|
|
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
|
|
|
|
_ptr: Pointer<Self::PointerTag>,
|
2020-12-11 19:11:39 +00:00
|
|
|
_size: Size,
|
2020-12-11 18:42:36 +00:00
|
|
|
) -> InterpResult<'tcx> {
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2021-05-16 18:53:20 +02:00
|
|
|
/// Executes a retagging operation.
|
2018-10-16 14:50:07 +02:00
|
|
|
#[inline]
|
2018-10-24 11:47:17 +02:00
|
|
|
fn retag(
|
2019-06-27 11:36:01 +02:00
|
|
|
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
|
2018-12-11 19:54:38 +01:00
|
|
|
_kind: mir::RetagKind,
|
2021-02-15 00:00:00 +00:00
|
|
|
_place: &PlaceTy<'tcx, Self::PointerTag>,
|
2019-06-07 18:56:27 +02:00
|
|
|
) -> InterpResult<'tcx> {
|
2017-12-14 11:36:28 +01:00
|
|
|
Ok(())
|
|
|
|
}
|
2018-11-06 11:04:10 +01:00
|
|
|
|
2020-04-13 16:06:51 +02:00
|
|
|
/// Called immediately before a new stack frame gets pushed.
|
|
|
|
fn init_frame_extra(
|
|
|
|
ecx: &mut InterpCx<'mir, 'tcx, Self>,
|
|
|
|
frame: Frame<'mir, 'tcx, Self::PointerTag>,
|
|
|
|
) -> InterpResult<'tcx, Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>>;
|
2018-11-15 17:14:53 +01:00
|
|
|
|
2020-04-14 14:40:08 -07:00
|
|
|
/// Borrow the current thread's stack.
|
|
|
|
fn stack(
|
|
|
|
ecx: &'a InterpCx<'mir, 'tcx, Self>,
|
|
|
|
) -> &'a [Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>];
|
|
|
|
|
|
|
|
/// Mutably borrow the current thread's stack.
|
|
|
|
fn stack_mut(
|
|
|
|
ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
|
|
|
|
) -> &'a mut Vec<Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>>;
|
|
|
|
|
2020-04-13 17:07:54 +02:00
|
|
|
/// Called immediately after a stack frame got pushed and its locals got initialized.
|
|
|
|
fn after_stack_push(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
|
|
|
|
Ok(())
|
|
|
|
}
|
2018-11-15 17:14:53 +01:00
|
|
|
|
2020-04-13 16:06:51 +02:00
|
|
|
/// Called immediately after a stack frame got popped, but before jumping back to the caller.
|
|
|
|
fn after_stack_pop(
|
2019-10-28 19:09:54 -04:00
|
|
|
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
|
2020-04-13 16:06:51 +02:00
|
|
|
_frame: Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>,
|
2019-12-22 17:42:04 -05:00
|
|
|
_unwinding: bool,
|
2020-03-14 11:51:27 +01:00
|
|
|
) -> InterpResult<'tcx, StackPopJump> {
|
2019-10-28 19:09:54 -04:00
|
|
|
// By default, we do not support unwinding from panics
|
2020-03-14 11:51:27 +01:00
|
|
|
Ok(StackPopJump::Normal)
|
2019-10-28 19:09:54 -04:00
|
|
|
}
|
2019-06-12 12:49:46 -05:00
|
|
|
|
|
|
|
fn int_to_ptr(
|
2019-06-20 14:07:34 -05:00
|
|
|
_mem: &Memory<'mir, 'tcx, Self>,
|
2019-07-02 12:19:58 +02:00
|
|
|
int: u64,
|
2019-06-12 12:49:46 -05:00
|
|
|
) -> InterpResult<'tcx, Pointer<Self::PointerTag>> {
|
2019-07-02 10:48:51 +02:00
|
|
|
Err((if int == 0 {
|
2020-03-08 23:28:00 +01:00
|
|
|
// This is UB, seriously.
|
2021-05-06 00:16:27 +02:00
|
|
|
// (`DanglingIntPointer` with these exact arguments has special printing code.)
|
2020-04-30 20:37:58 +02:00
|
|
|
err_ub!(DanglingIntPointer(0, CheckInAllocMsg::InboundsTest))
|
2019-06-16 03:48:40 -05:00
|
|
|
} else {
|
2020-03-08 23:28:00 +01:00
|
|
|
// This is just something we cannot support during const-eval.
|
2019-07-31 12:48:54 +05:30
|
|
|
err_unsup!(ReadBytesAsPointer)
|
2019-12-22 17:42:04 -05:00
|
|
|
})
|
|
|
|
.into())
|
2019-06-12 12:49:46 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
fn ptr_to_int(
|
2019-06-20 14:07:34 -05:00
|
|
|
_mem: &Memory<'mir, 'tcx, Self>,
|
2019-07-02 12:19:58 +02:00
|
|
|
_ptr: Pointer<Self::PointerTag>,
|
2019-07-25 00:06:47 +02:00
|
|
|
) -> InterpResult<'tcx, u64>;
|
2017-07-21 17:25:30 +02:00
|
|
|
}
|
2020-04-27 19:01:30 +02:00
|
|
|
|
|
|
|
// A lot of the flexibility above is just needed for `Miri`, but all "compile-time" machines
|
|
|
|
// (CTFE and ConstProp) use the same instance. Here, we share that code.
|
|
|
|
pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) {
|
|
|
|
type PointerTag = ();
|
|
|
|
type ExtraFnVal = !;
|
|
|
|
|
2020-12-03 21:09:39 +05:30
|
|
|
type MemoryMap =
|
|
|
|
rustc_data_structures::fx::FxHashMap<AllocId, (MemoryKind<Self::MemoryKind>, Allocation)>;
|
|
|
|
const GLOBAL_KIND: Option<Self::MemoryKind> = None; // no copying of globals from `tcx` to machine memory
|
2020-04-27 19:01:30 +02:00
|
|
|
|
|
|
|
type AllocExtra = ();
|
|
|
|
type FrameExtra = ();
|
|
|
|
|
|
|
|
#[inline(always)]
|
|
|
|
fn enforce_alignment(_memory_extra: &Self::MemoryExtra) -> bool {
|
|
|
|
// We do not check for alignment to avoid having to carry an `Align`
|
|
|
|
// in `ConstValue::ByRef`.
|
|
|
|
false
|
|
|
|
}
|
|
|
|
|
2020-08-16 17:36:46 +02:00
|
|
|
#[inline(always)]
|
|
|
|
fn force_int_for_alignment_check(_memory_extra: &Self::MemoryExtra) -> bool {
|
|
|
|
// We do not support `force_int`.
|
|
|
|
false
|
|
|
|
}
|
|
|
|
|
2020-04-27 19:01:30 +02:00
|
|
|
#[inline(always)]
|
|
|
|
fn enforce_validity(_ecx: &InterpCx<$mir, $tcx, Self>) -> bool {
|
|
|
|
false // for now, we don't enforce validity
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
|
|
|
fn call_extra_fn(
|
|
|
|
_ecx: &mut InterpCx<$mir, $tcx, Self>,
|
|
|
|
fn_val: !,
|
2021-01-10 14:31:02 +00:00
|
|
|
_abi: Abi,
|
2020-04-27 19:01:30 +02:00
|
|
|
_args: &[OpTy<$tcx>],
|
2021-02-15 00:00:00 +00:00
|
|
|
_ret: Option<(&PlaceTy<$tcx>, mir::BasicBlock)>,
|
2021-05-23 04:37:17 +08:00
|
|
|
_unwind: StackPopUnwind,
|
2020-04-27 19:01:30 +02:00
|
|
|
) -> InterpResult<$tcx> {
|
|
|
|
match fn_val {}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
|
|
|
fn init_allocation_extra<'b>(
|
|
|
|
_memory_extra: &Self::MemoryExtra,
|
|
|
|
_id: AllocId,
|
|
|
|
alloc: Cow<'b, Allocation>,
|
2020-12-03 21:09:39 +05:30
|
|
|
_kind: Option<MemoryKind<Self::MemoryKind>>,
|
2020-04-27 19:01:30 +02:00
|
|
|
) -> (Cow<'b, Allocation<Self::PointerTag>>, Self::PointerTag) {
|
|
|
|
// We do not use a tag so we can just cheaply forward the allocation
|
|
|
|
(alloc, ())
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
|
|
|
fn tag_global_base_pointer(
|
|
|
|
_memory_extra: &Self::MemoryExtra,
|
|
|
|
_id: AllocId,
|
|
|
|
) -> Self::PointerTag {
|
|
|
|
()
|
|
|
|
}
|
|
|
|
}
|