2019-01-16 20:45:53 +01:00
|
|
|
use std::cell::Cell;
|
2023-05-17 10:30:14 +00:00
|
|
|
use std::{fmt, mem};
|
2016-12-07 20:30:37 -08:00
|
|
|
|
2022-11-18 10:18:32 +01:00
|
|
|
use either::{Either, Left, Right};
|
|
|
|
|
2022-11-02 11:57:40 +00:00
|
|
|
use hir::CRATE_HIR_ID;
|
2024-01-06 13:48:48 +01:00
|
|
|
use rustc_errors::DiagCtxt;
|
2020-12-09 10:50:34 +00:00
|
|
|
use rustc_hir::{self as hir, def_id::DefId, definitions::DefPathData};
|
2023-04-19 10:57:17 +00:00
|
|
|
use rustc_index::IndexVec;
|
2020-03-29 16:41:09 +02:00
|
|
|
use rustc_middle::mir;
|
2023-11-25 18:41:53 +01:00
|
|
|
use rustc_middle::mir::interpret::{
|
|
|
|
CtfeProvenance, ErrorHandled, InvalidMetaKind, ReportedErrorInfo,
|
|
|
|
};
|
2023-05-16 01:53:21 +02:00
|
|
|
use rustc_middle::query::TyCtxtAt;
|
2021-11-28 19:35:50 -05:00
|
|
|
use rustc_middle::ty::layout::{
|
|
|
|
self, FnAbiError, FnAbiOfHelpers, FnAbiRequest, LayoutError, LayoutOf, LayoutOfHelpers,
|
|
|
|
TyAndLayout,
|
|
|
|
};
|
2023-09-30 13:44:31 +03:00
|
|
|
use rustc_middle::ty::{self, GenericArgsRef, ParamEnv, Ty, TyCtxt, TypeFoldable, Variance};
|
2022-07-07 12:34:33 -05:00
|
|
|
use rustc_mir_dataflow::storage::always_storage_live_locals;
|
2021-06-25 18:48:26 -05:00
|
|
|
use rustc_session::Limit;
|
2022-11-15 15:44:33 +00:00
|
|
|
use rustc_span::Span;
|
2021-11-28 19:35:50 -05:00
|
|
|
use rustc_target::abi::{call::FnAbi, Align, HasDataLayout, Size, TargetDataLayout};
|
2018-06-08 03:47:26 +01:00
|
|
|
|
2018-08-13 16:14:22 +02:00
|
|
|
use super::{
|
2023-11-25 18:41:53 +01:00
|
|
|
GlobalId, Immediate, InterpErrorInfo, InterpResult, MPlaceTy, Machine, MemPlace, MemPlaceMeta,
|
|
|
|
Memory, MemoryKind, OpTy, Operand, Place, PlaceTy, Pointer, PointerArithmetic, Projectable,
|
|
|
|
Provenance, Scalar, StackPopJump,
|
2018-08-13 16:14:22 +02:00
|
|
|
};
|
2023-09-11 23:09:11 +02:00
|
|
|
use crate::errors;
|
2022-11-15 13:42:14 +01:00
|
|
|
use crate::util;
|
2023-08-27 14:41:35 +02:00
|
|
|
use crate::{fluent_generated as fluent, ReportErrorExt};
|
2016-06-01 17:05:20 +02:00
|
|
|
|
2019-06-27 11:36:01 +02:00
|
|
|
pub struct InterpCx<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
|
2017-12-06 14:23:32 +02:00
|
|
|
/// Stores the `Machine` instance.
|
2020-04-14 14:40:08 -07:00
|
|
|
///
|
|
|
|
/// Note: the stack is provided by the machine.
|
2017-12-06 14:23:32 +02:00
|
|
|
pub machine: M,
|
2017-07-21 17:25:30 +02:00
|
|
|
|
2016-03-14 21:18:39 -06:00
|
|
|
/// The results of the type checker, from rustc.
|
2020-06-14 15:02:51 +02:00
|
|
|
/// The span in this is the "root" of the evaluation, i.e., the const
|
2020-06-01 10:15:17 +02:00
|
|
|
/// we are evaluating (if this is CTFE).
|
2019-06-14 00:48:52 +03:00
|
|
|
pub tcx: TyCtxtAt<'tcx>,
|
2016-03-14 21:18:39 -06:00
|
|
|
|
2017-12-06 14:12:05 +02:00
|
|
|
/// Bounds in scope for polymorphic evaluations.
|
2018-09-20 10:22:11 +02:00
|
|
|
pub(crate) param_env: ty::ParamEnv<'tcx>,
|
2017-12-06 14:12:05 +02:00
|
|
|
|
2018-06-22 12:36:54 -07:00
|
|
|
/// The virtual memory system.
|
2019-10-11 22:33:55 +02:00
|
|
|
pub memory: Memory<'mir, 'tcx, M>,
|
2021-06-25 18:48:26 -05:00
|
|
|
|
|
|
|
/// The recursion limit (cached from `tcx.recursion_limit(())`)
|
|
|
|
pub recursion_limit: Limit,
|
2016-05-09 18:21:21 -06:00
|
|
|
}
|
|
|
|
|
2020-08-04 13:48:05 +02:00
|
|
|
// The Phantomdata exists to prevent this type from being `Send`. If it were sent across a thread
|
|
|
|
// boundary and dropped in the other thread, it would exit the span in the other thread.
|
|
|
|
struct SpanGuard(tracing::Span, std::marker::PhantomData<*const u8>);
|
|
|
|
|
|
|
|
impl SpanGuard {
|
|
|
|
/// By default a `SpanGuard` does nothing.
|
|
|
|
fn new() -> Self {
|
|
|
|
Self(tracing::Span::none(), std::marker::PhantomData)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// If a span is entered, we exit the previous span (if any, normally none) and enter the
|
|
|
|
/// new span. This is mainly so we don't have to use `Option` for the `tracing_span` field of
|
|
|
|
/// `Frame` by creating a dummy span to being with and then entering it once the frame has
|
|
|
|
/// been pushed.
|
|
|
|
fn enter(&mut self, span: tracing::Span) {
|
|
|
|
// This executes the destructor on the previous instance of `SpanGuard`, ensuring that
|
|
|
|
// we never enter or exit more spans than vice versa. Unless you `mem::leak`, then we
|
|
|
|
// can't protect the tracing stack, but that'll just lead to weird logging, no actual
|
|
|
|
// problems.
|
|
|
|
*self = Self(span, std::marker::PhantomData);
|
|
|
|
self.0.with_subscriber(|(id, dispatch)| {
|
|
|
|
dispatch.enter(id);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Drop for SpanGuard {
|
|
|
|
fn drop(&mut self) {
|
|
|
|
self.0.with_subscriber(|(id, dispatch)| {
|
|
|
|
dispatch.exit(id);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-03-06 04:23:24 -06:00
|
|
|
/// A stack frame.
|
2023-11-25 18:41:53 +01:00
|
|
|
pub struct Frame<'mir, 'tcx, Prov: Provenance = CtfeProvenance, Extra = ()> {
|
2016-06-14 20:13:59 -06:00
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
// Function and callsite information
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
/// The MIR for the function called on this frame.
|
2019-06-03 18:26:48 -04:00
|
|
|
pub body: &'mir mir::Body<'tcx>,
|
2016-06-14 20:13:59 -06:00
|
|
|
|
2023-07-11 22:35:29 +01:00
|
|
|
/// The def_id and args of the current function.
|
2017-03-21 13:53:55 +01:00
|
|
|
pub instance: ty::Instance<'tcx>,
|
2016-06-02 18:21:32 +02:00
|
|
|
|
2019-10-20 18:51:25 +02:00
|
|
|
/// Extra data for the machine.
|
|
|
|
pub extra: Extra,
|
|
|
|
|
2016-06-14 20:13:59 -06:00
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
2017-12-06 09:25:29 +01:00
|
|
|
// Return place and locals
|
2016-06-14 20:13:59 -06:00
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Work to perform when returning from this function.
|
2016-09-09 17:44:04 +02:00
|
|
|
pub return_to_block: StackPopCleanup,
|
2016-07-06 17:55:05 +02:00
|
|
|
|
2018-10-09 21:05:53 +02:00
|
|
|
/// The location where the result of the current stack frame should be written to,
|
|
|
|
/// and its layout in the caller.
|
2022-07-18 18:47:31 -04:00
|
|
|
pub return_place: PlaceTy<'tcx, Prov>,
|
2016-10-15 19:48:30 -06:00
|
|
|
|
|
|
|
/// The list of locals for this stack frame, stored in order as
|
2018-08-22 16:58:39 -03:00
|
|
|
/// `[return_ptr, arguments..., variables..., temporaries...]`.
|
|
|
|
/// The locals are stored as `Option<Value>`s.
|
2017-05-31 17:41:33 -07:00
|
|
|
/// `None` represents a local that is currently dead, while a live local
|
2018-05-20 23:43:16 +02:00
|
|
|
/// can either directly contain `Scalar` or refer to some part of an `Allocation`.
|
2022-07-02 16:24:42 -04:00
|
|
|
///
|
|
|
|
/// Do *not* access this directly; always go through the machine hook!
|
2022-07-18 18:47:31 -04:00
|
|
|
pub locals: IndexVec<mir::Local, LocalState<'tcx, Prov>>,
|
2016-02-27 19:20:25 -06:00
|
|
|
|
2020-08-04 13:48:05 +02:00
|
|
|
/// The span of the `tracing` crate is stored here.
|
|
|
|
/// When the guard is dropped, the span is exited. This gives us
|
|
|
|
/// a full stack trace on all tracing statements.
|
|
|
|
tracing_span: SpanGuard,
|
|
|
|
|
2016-06-14 20:13:59 -06:00
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
// Current position within the function
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
2022-11-18 10:18:32 +01:00
|
|
|
/// If this is `Right`, we are not currently executing any particular statement in
|
2020-08-12 08:50:17 +02:00
|
|
|
/// this frame (can happen e.g. during frame initialization, and during unwinding on
|
2020-08-11 14:54:02 +02:00
|
|
|
/// frames without cleanup code).
|
2022-06-04 15:29:51 -04:00
|
|
|
///
|
|
|
|
/// Needs to be public because ConstProp does unspeakable things to it.
|
2022-11-18 10:18:32 +01:00
|
|
|
pub loc: Either<mir::Location, Span>,
|
2016-03-06 04:23:24 -06:00
|
|
|
}
|
2016-02-27 19:20:25 -06:00
|
|
|
|
2020-08-09 15:37:32 +02:00
|
|
|
/// What we store about a frame in an interpreter backtrace.
|
2023-03-12 18:30:33 -04:00
|
|
|
#[derive(Clone, Debug)]
|
2020-08-09 15:37:32 +02:00
|
|
|
pub struct FrameInfo<'tcx> {
|
|
|
|
pub instance: ty::Instance<'tcx>,
|
|
|
|
pub span: Span,
|
|
|
|
}
|
|
|
|
|
2022-07-15 20:57:14 -04:00
|
|
|
#[derive(Clone, Copy, Eq, PartialEq, Debug)] // Miri debug-prints these
|
2018-08-23 19:04:33 +02:00
|
|
|
pub enum StackPopCleanup {
|
|
|
|
/// Jump to the next block in the caller, or cause UB if None (that's a function
|
2018-10-09 21:05:53 +02:00
|
|
|
/// that may never return). Also store layout of return place so
|
|
|
|
/// we can validate it at that layout.
|
2020-03-16 21:24:47 -04:00
|
|
|
/// `ret` stores the block we jump to on a normal return, while `unwind`
|
|
|
|
/// stores the block used for cleanup during unwinding.
|
2022-10-10 19:50:49 +01:00
|
|
|
Goto { ret: Option<mir::BasicBlock>, unwind: mir::UnwindAction },
|
2022-01-03 23:07:07 +01:00
|
|
|
/// The root frame of the stack: nowhere else to jump to.
|
2019-02-08 14:53:55 +01:00
|
|
|
/// `cleanup` says whether locals are deallocated. Static computation
|
2018-08-24 17:36:18 +02:00
|
|
|
/// wants them leaked to intern what they need (and just throw away
|
|
|
|
/// the entire `ecx` when it is done).
|
2022-01-03 23:07:07 +01:00
|
|
|
Root { cleanup: bool },
|
2018-08-23 19:04:33 +02:00
|
|
|
}
|
|
|
|
|
2019-01-30 14:16:18 +01:00
|
|
|
/// State of a local variable including a memoized layout
|
2023-09-04 17:53:38 +02:00
|
|
|
#[derive(Clone)]
|
2023-11-25 18:41:53 +01:00
|
|
|
pub struct LocalState<'tcx, Prov: Provenance = CtfeProvenance> {
|
2023-09-04 17:53:38 +02:00
|
|
|
value: LocalValue<Prov>,
|
2023-08-06 18:40:37 +02:00
|
|
|
/// Don't modify if `Some`, this is only used to prevent computing the layout twice.
|
2023-08-07 08:18:50 +02:00
|
|
|
/// Avoids computing the layout of locals that are never actually initialized.
|
2023-09-04 17:53:38 +02:00
|
|
|
layout: Cell<Option<TyAndLayout<'tcx>>>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<Prov: Provenance> std::fmt::Debug for LocalState<'_, Prov> {
|
|
|
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
|
|
f.debug_struct("LocalState")
|
|
|
|
.field("value", &self.value)
|
|
|
|
.field("ty", &self.layout.get().map(|l| l.ty))
|
|
|
|
.finish()
|
|
|
|
}
|
2019-01-30 14:16:18 +01:00
|
|
|
}
|
|
|
|
|
2019-04-08 13:28:51 +02:00
|
|
|
/// Current value of a local variable
|
2023-11-30 17:01:28 +01:00
|
|
|
///
|
|
|
|
/// This does not store the type of the local; the type is given by `body.local_decls` and can never
|
|
|
|
/// change, so by not storing here we avoid having to maintain that as an invariant.
|
2022-07-15 20:57:14 -04:00
|
|
|
#[derive(Copy, Clone, Debug)] // Miri debug-prints these
|
2023-11-25 18:41:53 +01:00
|
|
|
pub(super) enum LocalValue<Prov: Provenance = CtfeProvenance> {
|
2019-04-06 23:58:59 +02:00
|
|
|
/// This local is not currently alive, and cannot be used at all.
|
2018-08-13 16:14:22 +02:00
|
|
|
Dead,
|
2019-04-06 23:58:59 +02:00
|
|
|
/// A normal, live local.
|
|
|
|
/// Mostly for convenience, we re-use the `Operand` type here.
|
|
|
|
/// This is an optimization over just always having a pointer here;
|
|
|
|
/// we can thus avoid doing an allocation when the local just stores
|
|
|
|
/// immediate values *and* never has its address taken.
|
2022-07-18 18:47:31 -04:00
|
|
|
Live(Operand<Prov>),
|
2018-08-13 16:14:22 +02:00
|
|
|
}
|
|
|
|
|
2023-09-04 17:53:38 +02:00
|
|
|
impl<'tcx, Prov: Provenance> LocalState<'tcx, Prov> {
|
|
|
|
pub fn make_live_uninit(&mut self) {
|
|
|
|
self.value = LocalValue::Live(Operand::Immediate(Immediate::Uninit));
|
|
|
|
}
|
|
|
|
|
|
|
|
/// This is a hack because Miri needs a way to visit all the provenance in a `LocalState`
|
|
|
|
/// without having a layout or `TyCtxt` available, and we want to keep the `Operand` type
|
|
|
|
/// private.
|
|
|
|
pub fn as_mplace_or_imm(
|
|
|
|
&self,
|
|
|
|
) -> Option<Either<(Pointer<Option<Prov>>, MemPlaceMeta<Prov>), Immediate<Prov>>> {
|
|
|
|
match self.value {
|
|
|
|
LocalValue::Dead => None,
|
|
|
|
LocalValue::Live(Operand::Indirect(mplace)) => Some(Left((mplace.ptr, mplace.meta))),
|
|
|
|
LocalValue::Live(Operand::Immediate(imm)) => Some(Right(imm)),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-26 11:02:43 +02:00
|
|
|
/// Read the local's value or error if the local is not yet live or not live anymore.
|
2023-08-04 21:42:35 +02:00
|
|
|
#[inline(always)]
|
2023-09-04 17:53:38 +02:00
|
|
|
pub(super) fn access(&self) -> InterpResult<'tcx, &Operand<Prov>> {
|
2022-07-02 16:24:42 -04:00
|
|
|
match &self.value {
|
|
|
|
LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"?
|
2019-04-08 13:28:51 +02:00
|
|
|
LocalValue::Live(val) => Ok(val),
|
2018-08-13 16:14:22 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-16 20:34:16 +00:00
|
|
|
/// Overwrite the local. If the local can be overwritten in place, return a reference
|
2019-04-06 23:58:59 +02:00
|
|
|
/// to do so; otherwise return the `MemPlace` to consult instead.
|
2020-06-26 11:02:43 +02:00
|
|
|
///
|
2023-09-04 17:53:38 +02:00
|
|
|
/// Note: Before calling this, call the `before_access_local_mut` machine hook! You may be
|
|
|
|
/// invalidating machine invariants otherwise!
|
2023-08-04 21:42:35 +02:00
|
|
|
#[inline(always)]
|
2023-09-04 17:53:38 +02:00
|
|
|
pub(super) fn access_mut(&mut self) -> InterpResult<'tcx, &mut Operand<Prov>> {
|
2022-07-02 16:24:42 -04:00
|
|
|
match &mut self.value {
|
|
|
|
LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"?
|
|
|
|
LocalValue::Live(val) => Ok(val),
|
2018-08-13 16:14:22 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-07-18 18:47:31 -04:00
|
|
|
impl<'mir, 'tcx, Prov: Provenance> Frame<'mir, 'tcx, Prov> {
|
|
|
|
pub fn with_extra<Extra>(self, extra: Extra) -> Frame<'mir, 'tcx, Prov, Extra> {
|
2020-04-13 16:06:51 +02:00
|
|
|
Frame {
|
|
|
|
body: self.body,
|
|
|
|
instance: self.instance,
|
|
|
|
return_to_block: self.return_to_block,
|
|
|
|
return_place: self.return_place,
|
|
|
|
locals: self.locals,
|
2020-04-23 19:40:41 +02:00
|
|
|
loc: self.loc,
|
2020-04-13 16:06:51 +02:00
|
|
|
extra,
|
2020-08-04 13:48:05 +02:00
|
|
|
tracing_span: self.tracing_span,
|
2020-04-13 16:06:51 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-07-18 18:47:31 -04:00
|
|
|
impl<'mir, 'tcx, Prov: Provenance, Extra> Frame<'mir, 'tcx, Prov, Extra> {
|
2021-03-16 12:57:31 -04:00
|
|
|
/// Get the current location within the Frame.
|
|
|
|
///
|
2022-11-18 10:18:32 +01:00
|
|
|
/// If this is `Left`, we are not currently executing any particular statement in
|
2021-03-16 12:57:31 -04:00
|
|
|
/// this frame (can happen e.g. during frame initialization, and during unwinding on
|
|
|
|
/// frames without cleanup code).
|
|
|
|
///
|
|
|
|
/// Used by priroda.
|
2022-11-18 10:18:32 +01:00
|
|
|
pub fn current_loc(&self) -> Either<mir::Location, Span> {
|
2021-03-16 12:57:31 -04:00
|
|
|
self.loc
|
|
|
|
}
|
|
|
|
|
2019-11-29 11:48:37 +01:00
|
|
|
/// Return the `SourceInfo` of the current instruction.
|
2020-06-12 12:35:37 +02:00
|
|
|
pub fn current_source_info(&self) -> Option<&mir::SourceInfo> {
|
2022-11-18 10:18:32 +01:00
|
|
|
self.loc.left().map(|loc| self.body.source_info(loc))
|
2019-11-29 11:48:37 +01:00
|
|
|
}
|
2020-08-11 12:38:40 +02:00
|
|
|
|
|
|
|
pub fn current_span(&self) -> Span {
|
2020-08-11 14:54:02 +02:00
|
|
|
match self.loc {
|
2022-11-18 10:18:32 +01:00
|
|
|
Left(loc) => self.body.source_info(loc).span,
|
|
|
|
Right(span) => span,
|
2020-08-11 14:54:02 +02:00
|
|
|
}
|
2020-08-11 12:38:40 +02:00
|
|
|
}
|
2022-11-21 16:51:16 +00:00
|
|
|
|
|
|
|
pub fn lint_root(&self) -> Option<hir::HirId> {
|
|
|
|
self.current_source_info().and_then(|source_info| {
|
|
|
|
match &self.body.source_scopes[source_info.scope].local_data {
|
|
|
|
mir::ClearCrossCrate::Set(data) => Some(data.lint_root),
|
|
|
|
mir::ClearCrossCrate::Clear => None,
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
2019-11-29 11:48:37 +01:00
|
|
|
}
|
|
|
|
|
2023-05-17 10:30:14 +00:00
|
|
|
// FIXME: only used by miri, should be removed once translatable.
|
2020-08-09 15:37:32 +02:00
|
|
|
impl<'tcx> fmt::Display for FrameInfo<'tcx> {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
|
|
ty::tls::with(|tcx| {
|
2023-12-03 12:29:59 +03:00
|
|
|
if tcx.def_key(self.instance.def_id()).disambiguated_data.data == DefPathData::Closure {
|
2022-11-15 15:44:33 +00:00
|
|
|
write!(f, "inside closure")
|
2020-08-09 15:37:32 +02:00
|
|
|
} else {
|
2024-02-12 16:48:45 +11:00
|
|
|
// Note: this triggers a `must_produce_diag` state, which means that if we ever
|
2023-11-30 16:05:50 +11:00
|
|
|
// get here we must emit a diagnostic. We should never display a `FrameInfo` unless
|
|
|
|
// we actually want to emit a warning or error to the user.
|
2022-11-15 15:44:33 +00:00
|
|
|
write!(f, "inside `{}`", self.instance)
|
2020-08-09 15:37:32 +02:00
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-17 10:30:14 +00:00
|
|
|
impl<'tcx> FrameInfo<'tcx> {
|
|
|
|
pub fn as_note(&self, tcx: TyCtxt<'tcx>) -> errors::FrameNote {
|
|
|
|
let span = self.span;
|
2023-12-03 12:29:59 +03:00
|
|
|
if tcx.def_key(self.instance.def_id()).disambiguated_data.data == DefPathData::Closure {
|
2023-05-17 10:30:14 +00:00
|
|
|
errors::FrameNote { where_: "closure", span, instance: String::new(), times: 0 }
|
|
|
|
} else {
|
|
|
|
let instance = format!("{}", self.instance);
|
2024-02-12 16:48:45 +11:00
|
|
|
// Note: this triggers a `must_produce_diag` state, which means that if we ever get
|
2023-11-30 16:05:50 +11:00
|
|
|
// here we must emit a diagnostic. We should never display a `FrameInfo` unless we
|
2023-05-17 10:30:14 +00:00
|
|
|
// actually want to emit a warning or error to the user.
|
|
|
|
errors::FrameNote { where_: "instance", span, instance, times: 0 }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-27 11:36:01 +02:00
|
|
|
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for InterpCx<'mir, 'tcx, M> {
|
2017-12-06 09:25:29 +01:00
|
|
|
#[inline]
|
2020-03-31 18:16:47 +02:00
|
|
|
fn data_layout(&self) -> &TargetDataLayout {
|
2017-12-06 09:25:29 +01:00
|
|
|
&self.tcx.data_layout
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-27 11:36:01 +02:00
|
|
|
impl<'mir, 'tcx, M> layout::HasTyCtxt<'tcx> for InterpCx<'mir, 'tcx, M>
|
2019-06-12 00:11:55 +03:00
|
|
|
where
|
|
|
|
M: Machine<'mir, 'tcx>,
|
2018-08-22 16:59:14 -03:00
|
|
|
{
|
2017-12-06 09:25:29 +01:00
|
|
|
#[inline]
|
2019-06-14 00:48:52 +03:00
|
|
|
fn tcx(&self) -> TyCtxt<'tcx> {
|
2018-02-06 18:33:59 +01:00
|
|
|
*self.tcx
|
2017-12-06 09:25:29 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-27 11:36:01 +02:00
|
|
|
impl<'mir, 'tcx, M> layout::HasParamEnv<'tcx> for InterpCx<'mir, 'tcx, M>
|
2019-06-12 00:11:55 +03:00
|
|
|
where
|
|
|
|
M: Machine<'mir, 'tcx>,
|
2019-05-04 15:02:22 +05:30
|
|
|
{
|
|
|
|
fn param_env(&self) -> ty::ParamEnv<'tcx> {
|
|
|
|
self.param_env
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-08-30 20:37:36 +03:00
|
|
|
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> LayoutOfHelpers<'tcx> for InterpCx<'mir, 'tcx, M> {
|
2021-08-30 17:38:27 +03:00
|
|
|
type LayoutOfResult = InterpResult<'tcx, TyAndLayout<'tcx>>;
|
2017-12-06 09:25:29 +01:00
|
|
|
|
2018-08-19 17:01:31 +02:00
|
|
|
#[inline]
|
2021-08-30 18:01:58 +03:00
|
|
|
fn layout_tcx_at_span(&self) -> Span {
|
2022-06-04 15:29:51 -04:00
|
|
|
// Using the cheap root span for performance.
|
2021-08-30 18:01:58 +03:00
|
|
|
self.tcx.span
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
fn handle_layout_err(
|
|
|
|
&self,
|
|
|
|
err: LayoutError<'tcx>,
|
|
|
|
_: Span,
|
|
|
|
_: Ty<'tcx>,
|
|
|
|
) -> InterpErrorInfo<'tcx> {
|
|
|
|
err_inval!(Layout(err)).into()
|
2017-12-06 09:25:29 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-28 19:35:50 -05:00
|
|
|
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> FnAbiOfHelpers<'tcx> for InterpCx<'mir, 'tcx, M> {
|
|
|
|
type FnAbiOfResult = InterpResult<'tcx, &'tcx FnAbi<'tcx, Ty<'tcx>>>;
|
|
|
|
|
|
|
|
fn handle_fn_abi_err(
|
|
|
|
&self,
|
|
|
|
err: FnAbiError<'tcx>,
|
|
|
|
_span: Span,
|
|
|
|
_fn_abi_request: FnAbiRequest<'tcx>,
|
|
|
|
) -> InterpErrorInfo<'tcx> {
|
|
|
|
match err {
|
|
|
|
FnAbiError::Layout(err) => err_inval!(Layout(err)).into(),
|
2021-12-11 18:45:03 -05:00
|
|
|
FnAbiError::AdjustForForeignAbi(err) => {
|
|
|
|
err_inval!(FnAbiAdjustForForeignAbi(err)).into()
|
|
|
|
}
|
2021-11-28 19:35:50 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-29 15:43:36 +02:00
|
|
|
/// Test if it is valid for a MIR assignment to assign `src`-typed place to `dest`-typed value.
|
|
|
|
/// This test should be symmetric, as it is primarily about layout compatibility.
|
2020-04-02 22:49:38 +02:00
|
|
|
pub(super) fn mir_assign_valid_types<'tcx>(
|
2020-04-05 11:55:52 +02:00
|
|
|
tcx: TyCtxt<'tcx>,
|
2020-06-22 09:17:33 +02:00
|
|
|
param_env: ParamEnv<'tcx>,
|
2020-04-02 22:49:38 +02:00
|
|
|
src: TyAndLayout<'tcx>,
|
|
|
|
dest: TyAndLayout<'tcx>,
|
|
|
|
) -> bool {
|
2020-06-22 09:17:33 +02:00
|
|
|
// Type-changing assignments can happen when subtyping is used. While
|
|
|
|
// all normal lifetimes are erased, higher-ranked types with their
|
|
|
|
// late-bound lifetimes are still around and can lead to type
|
2022-11-14 17:42:46 +01:00
|
|
|
// differences.
|
2023-09-30 13:44:31 +03:00
|
|
|
if util::relate_types(tcx, param_env, Variance::Covariant, src.ty, dest.ty) {
|
2020-06-24 09:03:01 +02:00
|
|
|
// Make sure the layout is equal, too -- just to be safe. Miri really
|
|
|
|
// needs layout equality. For performance reason we skip this check when
|
|
|
|
// the types are equal. Equal types *can* have different layouts when
|
|
|
|
// enum downcast is involved (as enum variants carry the type of the
|
|
|
|
// enum), but those should never occur in assignments.
|
|
|
|
if cfg!(debug_assertions) || src.ty != dest.ty {
|
|
|
|
assert_eq!(src.layout, dest.layout);
|
|
|
|
}
|
2020-06-22 11:07:39 +02:00
|
|
|
true
|
|
|
|
} else {
|
|
|
|
false
|
2020-04-02 22:49:38 +02:00
|
|
|
}
|
2020-03-29 15:43:36 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Use the already known layout if given (but sanity check in debug mode),
|
|
|
|
/// or compute the layout.
|
|
|
|
#[cfg_attr(not(debug_assertions), inline(always))]
|
|
|
|
pub(super) fn from_known_layout<'tcx>(
|
2020-04-05 08:35:31 +02:00
|
|
|
tcx: TyCtxtAt<'tcx>,
|
2020-06-22 09:17:33 +02:00
|
|
|
param_env: ParamEnv<'tcx>,
|
2020-03-29 15:43:36 +02:00
|
|
|
known_layout: Option<TyAndLayout<'tcx>>,
|
|
|
|
compute: impl FnOnce() -> InterpResult<'tcx, TyAndLayout<'tcx>>,
|
|
|
|
) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
|
|
|
|
match known_layout {
|
|
|
|
None => compute(),
|
|
|
|
Some(known_layout) => {
|
|
|
|
if cfg!(debug_assertions) {
|
|
|
|
let check_layout = compute()?;
|
2020-06-22 09:17:33 +02:00
|
|
|
if !mir_assign_valid_types(tcx.tcx, param_env, check_layout, known_layout) {
|
2020-04-05 08:35:31 +02:00
|
|
|
span_bug!(
|
|
|
|
tcx.span,
|
2023-09-20 22:25:09 +02:00
|
|
|
"expected type differs from actual type.\nexpected: {}\nactual: {}",
|
2020-04-05 08:35:31 +02:00
|
|
|
known_layout.ty,
|
|
|
|
check_layout.ty,
|
|
|
|
);
|
|
|
|
}
|
2020-03-29 15:43:36 +02:00
|
|
|
}
|
|
|
|
Ok(known_layout)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-06 13:48:48 +01:00
|
|
|
/// Turn the given error into a human-readable string. Expects the string to be printed, so if
|
|
|
|
/// `RUSTC_CTFE_BACKTRACE` is set this will show a backtrace of the rustc internals that
|
|
|
|
/// triggered the error.
|
|
|
|
///
|
|
|
|
/// This is NOT the preferred way to render an error; use `report` from `const_eval` instead.
|
|
|
|
/// However, this is useful when error messages appear in ICEs.
|
|
|
|
pub fn format_interp_error<'tcx>(dcx: &DiagCtxt, e: InterpErrorInfo<'tcx>) -> String {
|
|
|
|
let (e, backtrace) = e.into_parts();
|
|
|
|
backtrace.print_backtrace();
|
|
|
|
// FIXME(fee1-dead), HACK: we want to use the error as title therefore we can just extract the
|
|
|
|
// label and arguments from the InterpError.
|
|
|
|
#[allow(rustc::untranslatable_diagnostic)]
|
|
|
|
let mut diag = dcx.struct_allow("");
|
|
|
|
let msg = e.diagnostic_message();
|
2024-02-12 15:18:18 +11:00
|
|
|
e.add_args(&mut diag);
|
2024-01-06 13:48:48 +01:00
|
|
|
let s = dcx.eagerly_translate_to_string(msg, diag.args());
|
|
|
|
diag.cancel();
|
|
|
|
s
|
|
|
|
}
|
|
|
|
|
2020-03-16 15:12:42 -07:00
|
|
|
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
2019-06-26 13:13:19 -05:00
|
|
|
pub fn new(
|
2020-06-01 10:15:17 +02:00
|
|
|
tcx: TyCtxt<'tcx>,
|
|
|
|
root_span: Span,
|
2019-06-26 13:13:19 -05:00
|
|
|
param_env: ty::ParamEnv<'tcx>,
|
|
|
|
machine: M,
|
|
|
|
) -> Self {
|
2019-06-27 11:36:01 +02:00
|
|
|
InterpCx {
|
2017-12-06 14:23:32 +02:00
|
|
|
machine,
|
2020-06-14 15:02:51 +02:00
|
|
|
tcx: tcx.at(root_span),
|
2017-12-06 14:12:05 +02:00
|
|
|
param_env,
|
2022-04-03 13:05:49 -04:00
|
|
|
memory: Memory::new(),
|
2021-07-04 13:02:51 -05:00
|
|
|
recursion_limit: tcx.recursion_limit(),
|
2016-03-06 04:23:24 -06:00
|
|
|
}
|
|
|
|
}
|
2016-06-01 14:33:37 +02:00
|
|
|
|
2020-04-05 09:27:14 +02:00
|
|
|
#[inline(always)]
|
2020-06-01 10:15:17 +02:00
|
|
|
pub fn cur_span(&self) -> Span {
|
2022-06-26 14:42:26 -04:00
|
|
|
// This deliberately does *not* honor `requires_caller_location` since it is used for much
|
|
|
|
// more than just panics.
|
|
|
|
self.stack().last().map_or(self.tcx.span, |f| f.current_span())
|
2020-04-05 09:27:14 +02:00
|
|
|
}
|
|
|
|
|
2022-11-02 11:57:40 +00:00
|
|
|
#[inline(always)]
|
|
|
|
/// Find the first stack frame that is within the current crate, if any, otherwise return the crate's HirId
|
|
|
|
pub fn best_lint_scope(&self) -> hir::HirId {
|
|
|
|
self.stack()
|
|
|
|
.iter()
|
|
|
|
.find_map(|frame| frame.body.source.def_id().as_local())
|
2023-11-24 19:28:19 +03:00
|
|
|
.map_or(CRATE_HIR_ID, |def_id| self.tcx.local_def_id_to_hir_id(def_id))
|
2022-11-02 11:57:40 +00:00
|
|
|
}
|
|
|
|
|
2018-10-16 15:31:04 +02:00
|
|
|
#[inline(always)]
|
2022-07-18 18:47:31 -04:00
|
|
|
pub(crate) fn stack(&self) -> &[Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>] {
|
2020-03-16 15:12:42 -07:00
|
|
|
M::stack(self)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
2020-04-14 14:40:08 -07:00
|
|
|
pub(crate) fn stack_mut(
|
|
|
|
&mut self,
|
2022-07-18 18:47:31 -04:00
|
|
|
) -> &mut Vec<Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>> {
|
2020-03-16 15:12:42 -07:00
|
|
|
M::stack_mut(self)
|
2016-06-10 13:01:51 +02:00
|
|
|
}
|
|
|
|
|
2018-10-16 15:31:04 +02:00
|
|
|
#[inline(always)]
|
2020-04-14 14:40:08 -07:00
|
|
|
pub fn frame_idx(&self) -> usize {
|
2020-03-16 15:12:42 -07:00
|
|
|
let stack = self.stack();
|
|
|
|
assert!(!stack.is_empty());
|
|
|
|
stack.len() - 1
|
2017-06-16 17:58:18 -07:00
|
|
|
}
|
|
|
|
|
2018-10-16 15:31:04 +02:00
|
|
|
#[inline(always)]
|
2022-07-18 18:47:31 -04:00
|
|
|
pub fn frame(&self) -> &Frame<'mir, 'tcx, M::Provenance, M::FrameExtra> {
|
2020-03-16 15:12:42 -07:00
|
|
|
self.stack().last().expect("no call frames exist")
|
2018-08-13 16:14:22 +02:00
|
|
|
}
|
|
|
|
|
2018-10-16 15:31:04 +02:00
|
|
|
#[inline(always)]
|
2022-07-18 18:47:31 -04:00
|
|
|
pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx, M::Provenance, M::FrameExtra> {
|
2020-03-16 15:12:42 -07:00
|
|
|
self.stack_mut().last_mut().expect("no call frames exist")
|
2018-10-16 15:31:04 +02:00
|
|
|
}
|
2018-08-13 16:14:22 +02:00
|
|
|
|
2018-10-16 15:31:04 +02:00
|
|
|
#[inline(always)]
|
2023-08-06 18:40:37 +02:00
|
|
|
pub fn body(&self) -> &'mir mir::Body<'tcx> {
|
2019-06-03 18:26:48 -04:00
|
|
|
self.frame().body
|
2018-08-13 16:14:22 +02:00
|
|
|
}
|
|
|
|
|
2019-07-02 10:49:02 +02:00
|
|
|
#[inline(always)]
|
2020-03-04 14:50:21 +00:00
|
|
|
pub fn sign_extend(&self, value: u128, ty: TyAndLayout<'_>) -> u128 {
|
2019-07-02 10:49:02 +02:00
|
|
|
assert!(ty.abi.is_signed());
|
2020-11-04 13:41:58 +00:00
|
|
|
ty.size.sign_extend(value)
|
2019-07-02 10:49:02 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
2020-03-04 14:50:21 +00:00
|
|
|
pub fn truncate(&self, value: u128, ty: TyAndLayout<'_>) -> u128 {
|
2020-11-04 13:41:58 +00:00
|
|
|
ty.size.truncate(value)
|
2019-07-02 10:49:02 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
pub fn type_is_freeze(&self, ty: Ty<'tcx>) -> bool {
|
2022-10-27 14:45:02 +04:00
|
|
|
ty.is_freeze(*self.tcx, self.param_env)
|
2019-07-02 10:49:02 +02:00
|
|
|
}
|
|
|
|
|
2017-08-10 08:48:38 -07:00
|
|
|
pub fn load_mir(
|
|
|
|
&self,
|
|
|
|
instance: ty::InstanceDef<'tcx>,
|
2019-08-05 20:01:59 -04:00
|
|
|
promoted: Option<mir::Promoted>,
|
2020-04-12 10:31:00 -07:00
|
|
|
) -> InterpResult<'tcx, &'tcx mir::Body<'tcx>> {
|
2019-08-05 20:01:59 -04:00
|
|
|
trace!("load mir(instance={:?}, promoted={:?})", instance, promoted);
|
2022-02-07 22:21:23 -08:00
|
|
|
let body = if let Some(promoted) = promoted {
|
2022-05-08 15:53:19 +02:00
|
|
|
let def = instance.def_id();
|
|
|
|
&self.tcx.promoted_mir(def)[promoted]
|
2022-02-07 22:21:23 -08:00
|
|
|
} else {
|
|
|
|
M::load_mir(self, instance)?
|
|
|
|
};
|
|
|
|
// do not continue if typeck errors occurred (can only occur in local crate)
|
|
|
|
if let Some(err) = body.tainted_by_errors {
|
2023-05-15 00:00:00 +00:00
|
|
|
throw_inval!(AlreadyReported(ReportedErrorInfo::tainted_by_errors(err)));
|
2019-08-05 20:01:59 -04:00
|
|
|
}
|
2022-02-07 22:21:23 -08:00
|
|
|
Ok(body)
|
2016-06-06 15:22:33 +02:00
|
|
|
}
|
2016-06-08 11:11:08 +02:00
|
|
|
|
2019-08-11 10:12:26 +02:00
|
|
|
/// Call this on things you got out of the MIR (so it is as generic as the current
|
2019-08-12 16:32:48 +03:00
|
|
|
/// stack frame), to bring it into the proper environment for this interpreter.
|
2024-02-12 15:39:32 +09:00
|
|
|
pub(super) fn instantiate_from_current_frame_and_normalize_erasing_regions<
|
2023-02-22 02:18:40 +00:00
|
|
|
T: TypeFoldable<TyCtxt<'tcx>>,
|
|
|
|
>(
|
2020-03-11 19:36:07 +00:00
|
|
|
&self,
|
|
|
|
value: T,
|
2023-09-11 09:52:45 +02:00
|
|
|
) -> Result<T, ErrorHandled> {
|
2024-02-12 15:39:32 +09:00
|
|
|
self.instantiate_from_frame_and_normalize_erasing_regions(self.frame(), value)
|
2020-03-11 19:36:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Call this on things you got out of the MIR (so it is as generic as the provided
|
|
|
|
/// stack frame), to bring it into the proper environment for this interpreter.
|
2024-02-12 15:39:32 +09:00
|
|
|
pub(super) fn instantiate_from_frame_and_normalize_erasing_regions<
|
|
|
|
T: TypeFoldable<TyCtxt<'tcx>>,
|
|
|
|
>(
|
2019-01-14 17:54:35 +01:00
|
|
|
&self,
|
2022-07-18 18:47:31 -04:00
|
|
|
frame: &Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>,
|
2019-08-12 16:32:48 +03:00
|
|
|
value: T,
|
2023-09-11 09:52:45 +02:00
|
|
|
) -> Result<T, ErrorHandled> {
|
2021-12-05 11:13:51 +01:00
|
|
|
frame
|
|
|
|
.instance
|
2023-09-25 15:46:38 +02:00
|
|
|
.try_instantiate_mir_and_normalize_erasing_regions(
|
2023-04-14 09:59:03 -06:00
|
|
|
*self.tcx,
|
|
|
|
self.param_env,
|
2023-05-29 13:46:10 +02:00
|
|
|
ty::EarlyBinder::bind(value),
|
2023-04-14 09:59:03 -06:00
|
|
|
)
|
2023-09-11 09:52:45 +02:00
|
|
|
.map_err(|_| ErrorHandled::TooGeneric(self.cur_span()))
|
2019-01-14 17:54:35 +01:00
|
|
|
}
|
|
|
|
|
2023-07-11 22:35:29 +01:00
|
|
|
/// The `args` are assumed to already be in our interpreter "universe" (param_env).
|
2019-08-11 10:12:26 +02:00
|
|
|
pub(super) fn resolve(
|
2018-08-16 00:18:09 +02:00
|
|
|
&self,
|
2022-05-08 15:53:19 +02:00
|
|
|
def: DefId,
|
2023-07-11 22:35:29 +01:00
|
|
|
args: GenericArgsRef<'tcx>,
|
2019-08-11 10:12:26 +02:00
|
|
|
) -> InterpResult<'tcx, ty::Instance<'tcx>> {
|
2023-07-11 22:35:29 +01:00
|
|
|
trace!("resolve: {:?}, {:#?}", def, args);
|
2019-08-11 10:12:26 +02:00
|
|
|
trace!("param_env: {:#?}", self.param_env);
|
2023-07-11 22:35:29 +01:00
|
|
|
trace!("args: {:#?}", args);
|
|
|
|
match ty::Instance::resolve(*self.tcx, self.param_env, def, args) {
|
2020-04-10 05:13:29 +03:00
|
|
|
Ok(Some(instance)) => Ok(instance),
|
|
|
|
Ok(None) => throw_inval!(TooGeneric),
|
|
|
|
|
2020-11-04 22:23:43 +05:30
|
|
|
// FIXME(eddyb) this could be a bit more specific than `AlreadyReported`.
|
2023-05-15 00:00:00 +00:00
|
|
|
Err(error_reported) => throw_inval!(AlreadyReported(error_reported.into())),
|
2020-04-10 05:13:29 +03:00
|
|
|
}
|
2017-03-14 11:12:59 +01:00
|
|
|
}
|
|
|
|
|
2023-10-28 15:39:54 +02:00
|
|
|
/// Walks up the callstack from the intrinsic's callsite, searching for the first callsite in a
|
|
|
|
/// frame which is not `#[track_caller]`. This is the fancy version of `cur_span`.
|
|
|
|
pub(crate) fn find_closest_untracked_caller_location(&self) -> Span {
|
|
|
|
for frame in self.stack().iter().rev() {
|
|
|
|
debug!("find_closest_untracked_caller_location: checking frame {:?}", frame.instance);
|
|
|
|
|
|
|
|
// Assert that the frame we look at is actually executing code currently
|
|
|
|
// (`loc` is `Right` when we are unwinding and the frame does not require cleanup).
|
|
|
|
let loc = frame.loc.left().unwrap();
|
|
|
|
|
|
|
|
// This could be a non-`Call` terminator (such as `Drop`), or not a terminator at all
|
|
|
|
// (such as `box`). Use the normal span by default.
|
|
|
|
let mut source_info = *frame.body.source_info(loc);
|
|
|
|
|
|
|
|
// If this is a `Call` terminator, use the `fn_span` instead.
|
|
|
|
let block = &frame.body.basic_blocks[loc.block];
|
|
|
|
if loc.statement_index == block.statements.len() {
|
|
|
|
debug!(
|
|
|
|
"find_closest_untracked_caller_location: got terminator {:?} ({:?})",
|
|
|
|
block.terminator(),
|
|
|
|
block.terminator().kind,
|
|
|
|
);
|
|
|
|
if let mir::TerminatorKind::Call { fn_span, .. } = block.terminator().kind {
|
|
|
|
source_info.span = fn_span;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-10-28 16:16:15 +02:00
|
|
|
let caller_location = if frame.instance.def.requires_caller_location(*self.tcx) {
|
|
|
|
// We use `Err(())` as indication that we should continue up the call stack since
|
|
|
|
// this is a `#[track_caller]` function.
|
|
|
|
Some(Err(()))
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
if let Ok(span) =
|
|
|
|
frame.body.caller_location_span(source_info, caller_location, *self.tcx, Ok)
|
|
|
|
{
|
|
|
|
return span;
|
2023-10-28 15:39:54 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
span_bug!(self.cur_span(), "no non-`#[track_caller]` frame found")
|
|
|
|
}
|
|
|
|
|
2021-04-01 10:40:50 +00:00
|
|
|
#[inline(always)]
|
2018-08-13 16:14:22 +02:00
|
|
|
pub fn layout_of_local(
|
|
|
|
&self,
|
2022-07-18 18:47:31 -04:00
|
|
|
frame: &Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>,
|
2019-01-30 14:55:31 +01:00
|
|
|
local: mir::Local,
|
2020-03-04 14:50:21 +00:00
|
|
|
layout: Option<TyAndLayout<'tcx>>,
|
|
|
|
) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
|
2023-03-07 08:20:10 +00:00
|
|
|
let state = &frame.locals[local];
|
|
|
|
if let Some(layout) = state.layout.get() {
|
|
|
|
return Ok(layout);
|
2019-01-16 20:45:53 +01:00
|
|
|
}
|
2023-03-07 08:20:10 +00:00
|
|
|
|
|
|
|
let layout = from_known_layout(self.tcx, self.param_env, layout, || {
|
|
|
|
let local_ty = frame.body.local_decls[local].ty;
|
2024-02-12 15:39:32 +09:00
|
|
|
let local_ty =
|
|
|
|
self.instantiate_from_frame_and_normalize_erasing_regions(frame, local_ty)?;
|
2023-03-07 08:20:10 +00:00
|
|
|
self.layout_of(local_ty)
|
|
|
|
})?;
|
|
|
|
|
|
|
|
// Layouts of locals are requested a lot, so we cache them.
|
|
|
|
state.layout.set(Some(layout));
|
|
|
|
Ok(layout)
|
2018-08-13 16:14:22 +02:00
|
|
|
}
|
|
|
|
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Returns the actual dynamic size and alignment of the place at the given type.
|
2018-10-09 22:41:41 +02:00
|
|
|
/// Only the "meta" (metadata) part of the place matters.
|
|
|
|
/// This can fail to provide an answer for extern types.
|
2018-08-25 14:36:24 +02:00
|
|
|
pub(super) fn size_and_align_of(
|
2018-08-03 11:21:44 +02:00
|
|
|
&self,
|
2022-07-18 18:47:31 -04:00
|
|
|
metadata: &MemPlaceMeta<M::Provenance>,
|
2021-02-15 00:00:00 +00:00
|
|
|
layout: &TyAndLayout<'tcx>,
|
2019-06-07 18:56:27 +02:00
|
|
|
) -> InterpResult<'tcx, Option<(Size, Align)>> {
|
2022-11-13 12:14:59 +01:00
|
|
|
if layout.is_sized() {
|
2018-09-09 01:16:45 +03:00
|
|
|
return Ok(Some((layout.size, layout.align.abi)));
|
2018-10-09 22:41:41 +02:00
|
|
|
}
|
2020-08-03 00:49:11 +02:00
|
|
|
match layout.ty.kind() {
|
2018-08-25 14:36:24 +02:00
|
|
|
ty::Adt(..) | ty::Tuple(..) => {
|
|
|
|
// First get the size of all statically known fields.
|
|
|
|
// Don't use type_of::sizing_type_of because that expects t to be sized,
|
|
|
|
// and it also rounds up to alignment, which we want to avoid,
|
|
|
|
// as the unsized field's alignment could be smaller.
|
|
|
|
assert!(!layout.ty.is_simd());
|
2020-03-21 13:49:02 +01:00
|
|
|
assert!(layout.fields.count() > 0);
|
2018-10-17 12:36:18 +02:00
|
|
|
trace!("DST layout: {:?}", layout);
|
2018-08-25 14:36:24 +02:00
|
|
|
|
2023-12-02 13:40:24 +01:00
|
|
|
let unsized_offset_unadjusted = layout.fields.offset(layout.fields.count() - 1);
|
2018-09-09 01:16:45 +03:00
|
|
|
let sized_align = layout.align.abi;
|
2018-08-25 14:36:24 +02:00
|
|
|
|
|
|
|
// Recurse to get the size of the dynamically sized field (must be
|
2022-11-16 20:34:16 +00:00
|
|
|
// the last field). Can't have foreign types here, how would we
|
2018-10-09 22:41:41 +02:00
|
|
|
// adjust alignment and size for them?
|
2021-08-25 18:05:10 +03:00
|
|
|
let field = layout.field(self, layout.fields.count() - 1);
|
2022-10-29 15:38:00 +02:00
|
|
|
let Some((unsized_size, mut unsized_align)) =
|
|
|
|
self.size_and_align_of(metadata, &field)?
|
|
|
|
else {
|
2022-02-19 00:47:43 +01:00
|
|
|
// A field with an extern type. We don't know the actual dynamic size
|
|
|
|
// or the alignment.
|
|
|
|
return Ok(None);
|
|
|
|
};
|
2018-08-25 14:36:24 +02:00
|
|
|
|
2023-12-02 13:40:24 +01:00
|
|
|
// # First compute the dynamic alignment
|
2018-08-25 14:36:24 +02:00
|
|
|
|
2023-12-02 13:40:24 +01:00
|
|
|
// Packed type alignment needs to be capped.
|
2022-10-29 15:38:00 +02:00
|
|
|
if let ty::Adt(def, _) = layout.ty.kind() {
|
2023-12-02 13:40:24 +01:00
|
|
|
if let Some(packed) = def.repr().pack {
|
|
|
|
unsized_align = unsized_align.min(packed);
|
2022-10-29 15:38:00 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-25 14:36:24 +02:00
|
|
|
// Choose max of two known alignments (combined value must
|
|
|
|
// be aligned according to more restrictive of the two).
|
2023-12-02 13:40:24 +01:00
|
|
|
let full_align = sized_align.max(unsized_align);
|
|
|
|
|
|
|
|
// # Then compute the dynamic size
|
2018-08-25 14:36:24 +02:00
|
|
|
|
2023-12-02 13:40:24 +01:00
|
|
|
let unsized_offset_adjusted = unsized_offset_unadjusted.align_to(unsized_align);
|
|
|
|
let full_size = (unsized_offset_adjusted + unsized_size).align_to(full_align);
|
|
|
|
|
|
|
|
// Just for our sanitiy's sake, assert that this is equal to what codegen would compute.
|
|
|
|
assert_eq!(
|
|
|
|
full_size,
|
|
|
|
(unsized_offset_unadjusted + unsized_size).align_to(full_align)
|
|
|
|
);
|
2019-08-27 12:54:46 +02:00
|
|
|
|
|
|
|
// Check if this brought us over the size limit.
|
2023-12-02 13:40:24 +01:00
|
|
|
if full_size > self.max_size_of_val() {
|
2023-05-17 10:30:14 +00:00
|
|
|
throw_ub!(InvalidMeta(InvalidMetaKind::TooBig));
|
2019-08-27 12:54:46 +02:00
|
|
|
}
|
2023-12-02 13:40:24 +01:00
|
|
|
Ok(Some((full_size, full_align)))
|
2018-08-25 14:36:24 +02:00
|
|
|
}
|
2023-02-06 16:00:54 +01:00
|
|
|
ty::Dynamic(_, _, ty::Dyn) => {
|
2022-07-23 10:36:57 -04:00
|
|
|
let vtable = metadata.unwrap_meta().to_pointer(self)?;
|
2019-08-27 12:54:46 +02:00
|
|
|
// Read size and align from vtable (already checks size).
|
2022-07-17 16:02:49 -04:00
|
|
|
Ok(Some(self.get_vtable_size_and_align(vtable)?))
|
2018-08-25 14:36:24 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
ty::Slice(_) | ty::Str => {
|
2023-02-14 14:31:26 +00:00
|
|
|
let len = metadata.unwrap_meta().to_target_usize(self)?;
|
2021-08-25 18:05:10 +03:00
|
|
|
let elem = layout.field(self, 0);
|
2019-08-27 12:54:46 +02:00
|
|
|
|
|
|
|
// Make sure the slice is not too big.
|
2022-03-27 20:02:11 -04:00
|
|
|
let size = elem.size.bytes().saturating_mul(len); // we rely on `max_size_of_val` being smaller than `u64::MAX`.
|
|
|
|
let size = Size::from_bytes(size);
|
2022-03-27 19:34:16 -04:00
|
|
|
if size > self.max_size_of_val() {
|
2023-05-17 10:30:14 +00:00
|
|
|
throw_ub!(InvalidMeta(InvalidMetaKind::SliceTooBig));
|
2022-03-27 19:34:16 -04:00
|
|
|
}
|
2019-08-27 12:54:46 +02:00
|
|
|
Ok(Some((size, elem.align.abi)))
|
2018-10-09 22:41:41 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
ty::Foreign(_) => Ok(None),
|
2018-08-25 14:36:24 +02:00
|
|
|
|
2023-09-20 22:25:09 +02:00
|
|
|
_ => span_bug!(self.cur_span(), "size_and_align_of::<{}> not supported", layout.ty),
|
2017-07-28 10:16:36 +02:00
|
|
|
}
|
|
|
|
}
|
2018-08-25 14:36:24 +02:00
|
|
|
#[inline]
|
|
|
|
pub fn size_and_align_of_mplace(
|
|
|
|
&self,
|
2022-07-18 18:47:31 -04:00
|
|
|
mplace: &MPlaceTy<'tcx, M::Provenance>,
|
2019-06-07 18:56:27 +02:00
|
|
|
) -> InterpResult<'tcx, Option<(Size, Align)>> {
|
2023-09-04 17:53:38 +02:00
|
|
|
self.size_and_align_of(&mplace.meta(), &mplace.layout)
|
2018-08-25 14:36:24 +02:00
|
|
|
}
|
2017-07-28 10:16:36 +02:00
|
|
|
|
2022-03-08 15:05:50 +01:00
|
|
|
#[instrument(skip(self, body, return_place, return_to_block), level = "debug")]
|
2016-07-05 10:47:10 +02:00
|
|
|
pub fn push_stack_frame(
|
|
|
|
&mut self,
|
2017-03-21 13:53:55 +01:00
|
|
|
instance: ty::Instance<'tcx>,
|
2019-06-03 18:26:48 -04:00
|
|
|
body: &'mir mir::Body<'tcx>,
|
2022-07-18 18:47:31 -04:00
|
|
|
return_place: &PlaceTy<'tcx, M::Provenance>,
|
2016-09-09 17:44:04 +02:00
|
|
|
return_to_block: StackPopCleanup,
|
2019-06-07 18:56:27 +02:00
|
|
|
) -> InterpResult<'tcx> {
|
2022-04-17 18:45:01 -04:00
|
|
|
trace!("body: {:#?}", body);
|
2023-08-06 18:40:37 +02:00
|
|
|
let dead_local = LocalState { value: LocalValue::Dead, layout: Cell::new(None) };
|
|
|
|
let locals = IndexVec::from_elem(dead_local, &body.local_decls);
|
2023-07-11 22:35:29 +01:00
|
|
|
// First push a stack frame so we have access to the local args
|
2020-04-13 16:06:51 +02:00
|
|
|
let pre_frame = Frame {
|
2019-06-03 18:26:48 -04:00
|
|
|
body,
|
2022-11-18 10:18:32 +01:00
|
|
|
loc: Right(body.span), // Span used for errors caused during preamble.
|
2018-07-24 18:28:53 +02:00
|
|
|
return_to_block,
|
2022-07-15 22:58:20 -04:00
|
|
|
return_place: return_place.clone(),
|
2023-08-06 18:40:37 +02:00
|
|
|
locals,
|
2018-07-24 18:28:53 +02:00
|
|
|
instance,
|
2020-08-04 13:48:05 +02:00
|
|
|
tracing_span: SpanGuard::new(),
|
2020-04-13 16:06:51 +02:00
|
|
|
extra: (),
|
|
|
|
};
|
|
|
|
let frame = M::init_frame_extra(self, pre_frame)?;
|
2020-03-16 15:12:42 -07:00
|
|
|
self.stack_mut().push(frame);
|
2018-07-24 18:28:53 +02:00
|
|
|
|
2020-08-09 17:47:29 +02:00
|
|
|
// Make sure all the constants required by this frame evaluate successfully (post-monomorphization check).
|
2023-09-12 20:17:52 +00:00
|
|
|
if M::POST_MONO_CHECKS {
|
2023-09-29 22:38:52 +02:00
|
|
|
for &const_ in &body.required_consts {
|
2024-02-12 15:39:32 +09:00
|
|
|
let c = self
|
|
|
|
.instantiate_from_current_frame_and_normalize_erasing_regions(const_.const_)?;
|
2023-09-29 22:38:52 +02:00
|
|
|
c.eval(*self.tcx, self.param_env, Some(const_.span)).map_err(|err| {
|
|
|
|
err.emit_note(*self.tcx);
|
|
|
|
err
|
|
|
|
})?;
|
|
|
|
}
|
2020-08-09 17:47:29 +02:00
|
|
|
}
|
|
|
|
|
2020-04-15 23:29:29 +02:00
|
|
|
// done
|
2020-04-13 17:07:54 +02:00
|
|
|
M::after_stack_push(self)?;
|
2022-11-18 10:18:32 +01:00
|
|
|
self.frame_mut().loc = Left(mir::Location::START);
|
2020-08-04 13:48:05 +02:00
|
|
|
|
|
|
|
let span = info_span!("frame", "{}", instance);
|
|
|
|
self.frame_mut().tracing_span.enter(span);
|
2018-10-17 12:46:20 +02:00
|
|
|
|
2020-08-09 18:01:37 +02:00
|
|
|
Ok(())
|
2015-11-12 15:50:58 -06:00
|
|
|
}
|
|
|
|
|
2019-11-22 22:17:15 +01:00
|
|
|
/// Jump to the given block.
|
|
|
|
#[inline]
|
|
|
|
pub fn go_to_block(&mut self, target: mir::BasicBlock) {
|
2022-11-18 10:18:32 +01:00
|
|
|
self.frame_mut().loc = Left(mir::Location { block: target, statement_index: 0 });
|
2019-11-22 22:17:15 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/// *Return* to the given `target` basic block.
|
|
|
|
/// Do *not* use for unwinding! Use `unwind_to_block` instead.
|
|
|
|
///
|
|
|
|
/// If `target` is `None`, that indicates the function cannot return, so we raise UB.
|
|
|
|
pub fn return_to_block(&mut self, target: Option<mir::BasicBlock>) -> InterpResult<'tcx> {
|
|
|
|
if let Some(target) = target {
|
2020-03-21 14:24:57 +01:00
|
|
|
self.go_to_block(target);
|
|
|
|
Ok(())
|
2019-11-22 22:17:15 +01:00
|
|
|
} else {
|
|
|
|
throw_ub!(Unreachable)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// *Unwind* to the given `target` basic block.
|
|
|
|
/// Do *not* use for returning! Use `return_to_block` instead.
|
|
|
|
///
|
2022-10-10 19:50:49 +01:00
|
|
|
/// If `target` is `UnwindAction::Continue`, that indicates the function does not need cleanup
|
2021-05-25 20:43:54 +08:00
|
|
|
/// during unwinding, and we will just keep propagating that upwards.
|
|
|
|
///
|
2022-10-10 19:50:49 +01:00
|
|
|
/// If `target` is `UnwindAction::Unreachable`, that indicates the function does not allow
|
2021-05-25 20:43:54 +08:00
|
|
|
/// unwinding, and doing so is UB.
|
2023-08-21 09:57:10 +02:00
|
|
|
#[cold] // usually we have normal returns, not unwinding
|
2022-10-10 19:50:49 +01:00
|
|
|
pub fn unwind_to_block(&mut self, target: mir::UnwindAction) -> InterpResult<'tcx> {
|
2020-08-11 14:54:02 +02:00
|
|
|
self.frame_mut().loc = match target {
|
2022-10-10 19:50:49 +01:00
|
|
|
mir::UnwindAction::Cleanup(block) => Left(mir::Location { block, statement_index: 0 }),
|
|
|
|
mir::UnwindAction::Continue => Right(self.frame_mut().body.span),
|
|
|
|
mir::UnwindAction::Unreachable => {
|
2023-05-17 10:30:14 +00:00
|
|
|
throw_ub_custom!(fluent::const_eval_unreachable_unwind);
|
2021-05-25 20:43:54 +08:00
|
|
|
}
|
2023-08-21 09:57:10 +02:00
|
|
|
mir::UnwindAction::Terminate(reason) => {
|
2022-10-10 23:17:07 +01:00
|
|
|
self.frame_mut().loc = Right(self.frame_mut().body.span);
|
2023-08-21 09:57:10 +02:00
|
|
|
M::unwind_terminate(self, reason)?;
|
2023-08-19 13:21:41 +02:00
|
|
|
// This might have pushed a new stack frame, or it terminated execution.
|
|
|
|
// Either way, `loc` will not be updated.
|
|
|
|
return Ok(());
|
2022-10-10 22:40:40 +01:00
|
|
|
}
|
2020-08-11 14:54:02 +02:00
|
|
|
};
|
2021-05-25 20:43:54 +08:00
|
|
|
Ok(())
|
2019-11-22 22:17:15 +01:00
|
|
|
}
|
|
|
|
|
2019-10-20 21:40:38 -04:00
|
|
|
/// Pops the current frame from the stack, deallocating the
|
|
|
|
/// memory for allocated locals.
|
|
|
|
///
|
|
|
|
/// If `unwinding` is `false`, then we are performing a normal return
|
|
|
|
/// from a function. In this case, we jump back into the frame of the caller,
|
|
|
|
/// and continue execution as normal.
|
|
|
|
///
|
|
|
|
/// If `unwinding` is `true`, then we are in the middle of a panic,
|
|
|
|
/// and need to unwind this frame. In this case, we jump to the
|
|
|
|
/// `cleanup` block for the function, which is responsible for running
|
|
|
|
/// `Drop` impls for any locals that have been initialized at this point.
|
|
|
|
/// The cleanup block ends with a special `Resume` terminator, which will
|
2019-10-28 19:09:54 -04:00
|
|
|
/// cause us to continue unwinding.
|
2022-03-08 15:05:50 +01:00
|
|
|
#[instrument(skip(self), level = "debug")]
|
2019-10-20 18:51:25 +02:00
|
|
|
pub(super) fn pop_stack_frame(&mut self, unwinding: bool) -> InterpResult<'tcx> {
|
2020-10-01 08:32:24 +02:00
|
|
|
info!(
|
|
|
|
"popping stack frame ({})",
|
|
|
|
if unwinding { "during unwinding" } else { "returning from function" }
|
|
|
|
);
|
2019-04-16 21:04:54 -04:00
|
|
|
|
2022-07-02 16:24:42 -04:00
|
|
|
// Check `unwinding`.
|
2019-10-20 18:51:25 +02:00
|
|
|
assert_eq!(
|
|
|
|
unwinding,
|
2020-04-23 19:40:41 +02:00
|
|
|
match self.frame().loc {
|
2022-11-18 10:18:32 +01:00
|
|
|
Left(loc) => self.body().basic_blocks[loc.block].is_cleanup,
|
|
|
|
Right(_) => true,
|
2019-10-20 18:51:25 +02:00
|
|
|
}
|
|
|
|
);
|
2020-07-31 20:46:05 +02:00
|
|
|
if unwinding && self.frame_idx() == 0 {
|
2023-05-17 10:30:14 +00:00
|
|
|
throw_ub_custom!(fluent::const_eval_unwind_past_top);
|
2020-07-31 20:46:05 +02:00
|
|
|
}
|
|
|
|
|
2023-07-10 22:07:07 +02:00
|
|
|
M::before_stack_pop(self, self.frame())?;
|
|
|
|
|
2022-07-02 16:24:42 -04:00
|
|
|
// Copy return value. Must of course happen *before* we deallocate the locals.
|
|
|
|
let copy_ret_result = if !unwinding {
|
|
|
|
let op = self
|
|
|
|
.local_to_op(self.frame(), mir::RETURN_PLACE, None)
|
|
|
|
.expect("return place should always be live");
|
2022-07-15 22:58:20 -04:00
|
|
|
let dest = self.frame().return_place.clone();
|
2023-10-12 11:27:43 +00:00
|
|
|
let err = if self.stack().len() == 1 {
|
|
|
|
// The initializer of constants and statics will get validated separately
|
|
|
|
// after the constant has been fully evaluated. While we could fall back to the default
|
|
|
|
// code path, that will cause -Zenforce-validity to cycle on static initializers.
|
|
|
|
// Reading from a static's memory is not allowed during its evaluation, and will always
|
|
|
|
// trigger a cycle error. Validation must read from the memory of the current item.
|
|
|
|
// For Miri this means we do not validate the root frame return value,
|
|
|
|
// but Miri anyway calls `read_target_isize` on that so separate validation
|
|
|
|
// is not needed.
|
|
|
|
self.copy_op_no_dest_validation(&op, &dest)
|
|
|
|
} else {
|
|
|
|
self.copy_op_allow_transmute(&op, &dest)
|
|
|
|
};
|
2023-09-04 17:53:38 +02:00
|
|
|
trace!("return value: {:?}", self.dump_place(&dest));
|
2022-07-02 16:24:42 -04:00
|
|
|
// We delay actually short-circuiting on this error until *after* the stack frame is
|
|
|
|
// popped, since we want this error to be attributed to the caller, whose type defines
|
|
|
|
// this transmute.
|
|
|
|
err
|
|
|
|
} else {
|
|
|
|
Ok(())
|
|
|
|
};
|
2021-05-25 20:43:54 +08:00
|
|
|
|
2022-07-02 16:24:42 -04:00
|
|
|
// Cleanup: deallocate locals.
|
2021-05-25 20:43:54 +08:00
|
|
|
// Usually we want to clean up (deallocate locals), but in a few rare cases we don't.
|
2022-07-02 16:24:42 -04:00
|
|
|
// We do this while the frame is still on the stack, so errors point to the callee.
|
|
|
|
let return_to_block = self.frame().return_to_block;
|
2021-05-25 20:43:54 +08:00
|
|
|
let cleanup = match return_to_block {
|
|
|
|
StackPopCleanup::Goto { .. } => true,
|
2022-01-03 23:07:07 +01:00
|
|
|
StackPopCleanup::Root { cleanup, .. } => cleanup,
|
2021-05-25 20:43:54 +08:00
|
|
|
};
|
2022-07-02 16:24:42 -04:00
|
|
|
if cleanup {
|
|
|
|
// We need to take the locals out, since we need to mutate while iterating.
|
|
|
|
let locals = mem::take(&mut self.frame_mut().locals);
|
|
|
|
for local in &locals {
|
|
|
|
self.deallocate_local(local.value)?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// All right, now it is time to actually pop the frame.
|
|
|
|
// Note that its locals are gone already, but that's fine.
|
|
|
|
let frame =
|
|
|
|
self.stack_mut().pop().expect("tried to pop a stack frame, but there were none");
|
|
|
|
// Report error from return value copy, if any.
|
|
|
|
copy_ret_result?;
|
2021-05-25 20:43:54 +08:00
|
|
|
|
2022-07-02 16:24:42 -04:00
|
|
|
// If we are not doing cleanup, also skip everything else.
|
2019-10-20 18:51:25 +02:00
|
|
|
if !cleanup {
|
2020-03-16 15:12:42 -07:00
|
|
|
assert!(self.stack().is_empty(), "only the topmost frame should ever be leaked");
|
2020-03-14 11:51:27 +01:00
|
|
|
assert!(!unwinding, "tried to skip cleanup during unwinding");
|
2022-07-02 16:24:42 -04:00
|
|
|
// Skip machine hook.
|
2019-10-20 18:51:25 +02:00
|
|
|
return Ok(());
|
2016-07-06 17:55:05 +02:00
|
|
|
}
|
2020-04-13 16:06:51 +02:00
|
|
|
if M::after_stack_pop(self, frame, unwinding)? == StackPopJump::NoJump {
|
2020-03-14 11:51:27 +01:00
|
|
|
// The hook already did everything.
|
|
|
|
return Ok(());
|
|
|
|
}
|
2022-07-02 16:24:42 -04:00
|
|
|
|
2020-04-13 17:07:54 +02:00
|
|
|
// Normal return, figure out where to jump.
|
2020-03-14 11:51:27 +01:00
|
|
|
if unwinding {
|
2019-10-20 18:51:25 +02:00
|
|
|
// Follow the unwind edge.
|
2021-05-25 20:43:54 +08:00
|
|
|
let unwind = match return_to_block {
|
|
|
|
StackPopCleanup::Goto { unwind, .. } => unwind,
|
2022-01-03 23:07:07 +01:00
|
|
|
StackPopCleanup::Root { .. } => {
|
|
|
|
panic!("encountered StackPopCleanup::Root when unwinding!")
|
2021-05-25 20:43:54 +08:00
|
|
|
}
|
|
|
|
};
|
2023-08-19 13:21:41 +02:00
|
|
|
// This must be the very last thing that happens, since it can in fact push a new stack frame.
|
2021-05-28 09:20:43 +08:00
|
|
|
self.unwind_to_block(unwind)
|
2019-10-20 18:51:25 +02:00
|
|
|
} else {
|
|
|
|
// Follow the normal return edge.
|
2021-05-28 09:20:43 +08:00
|
|
|
match return_to_block {
|
|
|
|
StackPopCleanup::Goto { ret, .. } => self.return_to_block(ret),
|
2022-01-03 23:07:07 +01:00
|
|
|
StackPopCleanup::Root { .. } => {
|
|
|
|
assert!(
|
|
|
|
self.stack().is_empty(),
|
|
|
|
"only the topmost frame can have StackPopCleanup::Root"
|
|
|
|
);
|
|
|
|
Ok(())
|
|
|
|
}
|
2018-10-09 21:05:53 +02:00
|
|
|
}
|
|
|
|
}
|
2016-03-06 04:23:24 -06:00
|
|
|
}
|
|
|
|
|
2023-08-06 18:40:37 +02:00
|
|
|
/// In the current stack frame, mark all locals as live that are not arguments and don't have
|
|
|
|
/// `Storage*` annotations (this includes the return place).
|
|
|
|
pub fn storage_live_for_always_live_locals(&mut self) -> InterpResult<'tcx> {
|
|
|
|
self.storage_live(mir::RETURN_PLACE)?;
|
|
|
|
|
|
|
|
let body = self.body();
|
|
|
|
let always_live = always_storage_live_locals(body);
|
|
|
|
for local in body.vars_and_temps_iter() {
|
|
|
|
if always_live.contains(local) {
|
|
|
|
self.storage_live(local)?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-08-17 18:59:39 +02:00
|
|
|
pub fn storage_live_dyn(
|
|
|
|
&mut self,
|
|
|
|
local: mir::Local,
|
|
|
|
meta: MemPlaceMeta<M::Provenance>,
|
|
|
|
) -> InterpResult<'tcx> {
|
2018-10-16 15:31:04 +02:00
|
|
|
trace!("{:?} is now live", local);
|
|
|
|
|
2023-08-07 08:18:50 +02:00
|
|
|
// We avoid `ty.is_trivially_sized` since that (a) cannot assume WF, so it recurses through
|
|
|
|
// all fields of a tuple, and (b) does something expensive for ADTs.
|
|
|
|
fn is_very_trivially_sized(ty: Ty<'_>) -> bool {
|
|
|
|
match ty.kind() {
|
|
|
|
ty::Infer(ty::IntVar(_) | ty::FloatVar(_))
|
|
|
|
| ty::Uint(_)
|
|
|
|
| ty::Int(_)
|
|
|
|
| ty::Bool
|
|
|
|
| ty::Float(_)
|
|
|
|
| ty::FnDef(..)
|
|
|
|
| ty::FnPtr(_)
|
|
|
|
| ty::RawPtr(..)
|
|
|
|
| ty::Char
|
|
|
|
| ty::Ref(..)
|
2023-10-19 16:06:43 +00:00
|
|
|
| ty::Coroutine(..)
|
|
|
|
| ty::CoroutineWitness(..)
|
2023-08-07 08:18:50 +02:00
|
|
|
| ty::Array(..)
|
|
|
|
| ty::Closure(..)
|
2024-01-24 18:01:56 +00:00
|
|
|
| ty::CoroutineClosure(..)
|
2023-08-07 08:18:50 +02:00
|
|
|
| ty::Never
|
|
|
|
| ty::Error(_) => true,
|
|
|
|
|
|
|
|
ty::Str | ty::Slice(_) | ty::Dynamic(..) | ty::Foreign(..) => false,
|
|
|
|
|
|
|
|
ty::Tuple(tys) => tys.last().iter().all(|ty| is_very_trivially_sized(**ty)),
|
|
|
|
|
|
|
|
// We don't want to do any queries, so there is not much we can do with ADTs.
|
|
|
|
ty::Adt(..) => false,
|
|
|
|
|
|
|
|
ty::Alias(..) | ty::Param(_) | ty::Placeholder(..) => false,
|
|
|
|
|
|
|
|
ty::Infer(ty::TyVar(_)) => false,
|
|
|
|
|
|
|
|
ty::Bound(..)
|
|
|
|
| ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => {
|
2023-09-20 22:25:09 +02:00
|
|
|
bug!("`is_very_trivially_sized` applied to unexpected type: {}", ty)
|
2023-08-07 08:18:50 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// This is a hot function, we avoid computing the layout when possible.
|
|
|
|
// `unsized_` will be `None` for sized types and `Some(layout)` for unsized types.
|
|
|
|
let unsized_ = if is_very_trivially_sized(self.body().local_decls[local].ty) {
|
|
|
|
None
|
2023-08-17 18:59:39 +02:00
|
|
|
} else {
|
2023-08-07 08:18:50 +02:00
|
|
|
// We need the layout.
|
|
|
|
let layout = self.layout_of_local(self.frame(), local, None)?;
|
|
|
|
if layout.is_sized() { None } else { Some(layout) }
|
|
|
|
};
|
|
|
|
|
|
|
|
let local_val = LocalValue::Live(if let Some(layout) = unsized_ {
|
|
|
|
if !meta.has_meta() {
|
|
|
|
throw_unsup!(UnsizedLocal);
|
|
|
|
}
|
|
|
|
// Need to allocate some memory, since `Immediate::Uninit` cannot be unsized.
|
2023-08-17 18:59:39 +02:00
|
|
|
let dest_place = self.allocate_dyn(layout, MemoryKind::Stack, meta)?;
|
2023-09-04 17:53:38 +02:00
|
|
|
Operand::Indirect(*dest_place.mplace())
|
2023-08-07 08:18:50 +02:00
|
|
|
} else {
|
|
|
|
assert!(!meta.has_meta()); // we're dropping the metadata
|
|
|
|
// Just make this an efficient immediate.
|
|
|
|
// Note that not calling `layout_of` here does have one real consequence:
|
|
|
|
// if the type is too big, we'll only notice this when the local is actually initialized,
|
2023-10-23 20:29:24 +08:00
|
|
|
// which is a bit too late -- we should ideally notice this already here, when the memory
|
2023-08-07 08:18:50 +02:00
|
|
|
// is conceptually allocated. But given how rare that error is and that this is a hot function,
|
|
|
|
// we accept this downside for now.
|
|
|
|
Operand::Immediate(Immediate::Uninit)
|
2023-08-17 18:59:39 +02:00
|
|
|
});
|
2023-08-06 18:40:37 +02:00
|
|
|
|
2020-11-21 20:23:00 +01:00
|
|
|
// StorageLive expects the local to be dead, and marks it live.
|
|
|
|
let old = mem::replace(&mut self.frame_mut().locals[local].value, local_val);
|
|
|
|
if !matches!(old, LocalValue::Dead) {
|
2023-05-17 10:30:14 +00:00
|
|
|
throw_ub_custom!(fluent::const_eval_double_storage_live);
|
2020-11-21 20:23:00 +01:00
|
|
|
}
|
|
|
|
Ok(())
|
2018-10-16 15:31:04 +02:00
|
|
|
}
|
|
|
|
|
2023-08-17 18:59:39 +02:00
|
|
|
/// Mark a storage as live, killing the previous content.
|
2023-08-07 08:18:50 +02:00
|
|
|
#[inline(always)]
|
2023-08-17 18:59:39 +02:00
|
|
|
pub fn storage_live(&mut self, local: mir::Local) -> InterpResult<'tcx> {
|
|
|
|
self.storage_live_dyn(local, MemPlaceMeta::None)
|
|
|
|
}
|
|
|
|
|
2020-11-21 20:23:00 +01:00
|
|
|
pub fn storage_dead(&mut self, local: mir::Local) -> InterpResult<'tcx> {
|
2018-10-16 15:31:04 +02:00
|
|
|
assert!(local != mir::RETURN_PLACE, "Cannot make return place dead");
|
|
|
|
trace!("{:?} is now dead", local);
|
|
|
|
|
2020-11-21 20:23:00 +01:00
|
|
|
// It is entirely okay for this local to be already dead (at least that's how we currently generate MIR)
|
|
|
|
let old = mem::replace(&mut self.frame_mut().locals[local].value, LocalValue::Dead);
|
|
|
|
self.deallocate_local(old)?;
|
|
|
|
Ok(())
|
2018-10-16 15:31:04 +02:00
|
|
|
}
|
|
|
|
|
2022-03-08 15:05:50 +01:00
|
|
|
#[instrument(skip(self), level = "debug")]
|
2022-07-18 18:47:31 -04:00
|
|
|
fn deallocate_local(&mut self, local: LocalValue<M::Provenance>) -> InterpResult<'tcx> {
|
2019-01-30 17:51:59 +01:00
|
|
|
if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local {
|
2019-12-14 00:04:27 +01:00
|
|
|
// All locals have a backing allocation, even if the allocation is empty
|
2021-07-16 09:39:35 +02:00
|
|
|
// due to the local having ZST type. Hence we can `unwrap`.
|
2021-07-12 18:22:15 +02:00
|
|
|
trace!(
|
|
|
|
"deallocating local {:?}: {:?}",
|
|
|
|
local,
|
2022-05-13 12:30:25 -05:00
|
|
|
// Locals always have a `alloc_id` (they are never the result of a int2ptr).
|
|
|
|
self.dump_alloc(ptr.provenance.unwrap().get_alloc_id().unwrap())
|
2021-07-12 18:22:15 +02:00
|
|
|
);
|
2022-04-03 13:05:49 -04:00
|
|
|
self.deallocate_ptr(ptr, None, MemoryKind::Stack)?;
|
2018-08-13 16:14:22 +02:00
|
|
|
};
|
|
|
|
Ok(())
|
2017-08-08 15:53:07 +02:00
|
|
|
}
|
|
|
|
|
2023-09-29 22:38:52 +02:00
|
|
|
/// Call a query that can return `ErrorHandled`. Should be used for statics and other globals.
|
|
|
|
/// (`mir::Const`/`ty::Const` have `eval` methods that can be used directly instead.)
|
2022-11-15 12:06:20 +01:00
|
|
|
pub fn ctfe_query<T>(
|
|
|
|
&self,
|
|
|
|
query: impl FnOnce(TyCtxtAt<'tcx>) -> Result<T, ErrorHandled>,
|
2023-09-11 09:52:45 +02:00
|
|
|
) -> Result<T, ErrorHandled> {
|
2022-11-15 12:06:20 +01:00
|
|
|
// Use a precise span for better cycle errors.
|
2023-09-29 22:38:52 +02:00
|
|
|
query(self.tcx.at(self.cur_span())).map_err(|err| {
|
2023-09-11 23:09:11 +02:00
|
|
|
err.emit_note(*self.tcx);
|
2023-09-11 09:52:45 +02:00
|
|
|
err
|
2022-11-15 12:06:20 +01:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn eval_global(
|
2018-11-06 16:16:27 +01:00
|
|
|
&self,
|
2023-09-11 09:52:45 +02:00
|
|
|
instance: ty::Instance<'tcx>,
|
2022-07-18 18:47:31 -04:00
|
|
|
) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
|
2023-09-11 09:52:45 +02:00
|
|
|
let gid = GlobalId { instance, promoted: None };
|
2023-10-17 08:50:41 +00:00
|
|
|
let val = if self.tcx.is_static(gid.instance.def_id()) {
|
|
|
|
let alloc_id = self.tcx.reserve_and_set_static_alloc(gid.instance.def_id());
|
|
|
|
|
|
|
|
let ty = instance.ty(self.tcx.tcx, self.param_env);
|
|
|
|
mir::ConstAlloc { alloc_id, ty }
|
2018-01-31 15:06:45 +01:00
|
|
|
} else {
|
2023-10-17 08:50:41 +00:00
|
|
|
self.ctfe_query(|tcx| tcx.eval_to_allocation_raw(self.param_env.and(gid)))?
|
2018-01-31 15:06:45 +01:00
|
|
|
};
|
2018-11-06 16:16:27 +01:00
|
|
|
self.raw_const_to_mplace(val)
|
2018-01-31 15:06:45 +01:00
|
|
|
}
|
|
|
|
|
2023-09-11 09:52:45 +02:00
|
|
|
pub fn eval_mir_constant(
|
|
|
|
&self,
|
2023-09-20 20:51:14 +02:00
|
|
|
val: &mir::Const<'tcx>,
|
2023-09-11 09:52:45 +02:00
|
|
|
span: Option<Span>,
|
|
|
|
layout: Option<TyAndLayout<'tcx>>,
|
|
|
|
) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
|
2023-11-26 17:06:13 -05:00
|
|
|
M::eval_mir_constant(self, *val, span, layout, |ecx, val, span, layout| {
|
|
|
|
let const_val = val.eval(*ecx.tcx, ecx.param_env, span).map_err(|err| {
|
|
|
|
// FIXME: somehow this is reachable even when POST_MONO_CHECKS is on.
|
|
|
|
// Are we not always populating `required_consts`?
|
|
|
|
err.emit_note(*ecx.tcx);
|
|
|
|
err
|
|
|
|
})?;
|
|
|
|
ecx.const_val_to_op(const_val, val.ty(), layout)
|
|
|
|
})
|
2023-09-11 09:52:45 +02:00
|
|
|
}
|
|
|
|
|
2020-07-28 16:15:40 +02:00
|
|
|
#[must_use]
|
2023-09-04 17:53:38 +02:00
|
|
|
pub fn dump_place(
|
|
|
|
&self,
|
|
|
|
place: &PlaceTy<'tcx, M::Provenance>,
|
|
|
|
) -> PlacePrinter<'_, 'mir, 'tcx, M> {
|
|
|
|
PlacePrinter { ecx: self, place: *place.place() }
|
2020-07-28 16:15:40 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[must_use]
|
2022-09-18 19:08:14 +02:00
|
|
|
pub fn generate_stacktrace_from_stack(
|
|
|
|
stack: &[Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>],
|
|
|
|
) -> Vec<FrameInfo<'tcx>> {
|
2020-07-28 16:15:40 +02:00
|
|
|
let mut frames = Vec::new();
|
2022-06-26 14:42:26 -04:00
|
|
|
// This deliberately does *not* honor `requires_caller_location` since it is used for much
|
|
|
|
// more than just panics.
|
2022-09-18 19:08:14 +02:00
|
|
|
for frame in stack.iter().rev() {
|
2023-05-24 21:12:30 +00:00
|
|
|
let span = match frame.loc {
|
|
|
|
Left(loc) => {
|
|
|
|
// If the stacktrace passes through MIR-inlined source scopes, add them.
|
|
|
|
let mir::SourceInfo { mut span, scope } = *frame.body.source_info(loc);
|
|
|
|
let mut scope_data = &frame.body.source_scopes[scope];
|
|
|
|
while let Some((instance, call_span)) = scope_data.inlined {
|
|
|
|
frames.push(FrameInfo { span, instance });
|
|
|
|
span = call_span;
|
|
|
|
scope_data = &frame.body.source_scopes[scope_data.parent_scope.unwrap()];
|
|
|
|
}
|
|
|
|
span
|
|
|
|
}
|
|
|
|
Right(span) => span,
|
|
|
|
};
|
2023-03-12 18:30:33 -04:00
|
|
|
frames.push(FrameInfo { span, instance: frame.instance });
|
2018-04-26 15:35:24 +10:00
|
|
|
}
|
2020-07-28 16:15:40 +02:00
|
|
|
trace!("generate stacktrace: {:#?}", frames);
|
|
|
|
frames
|
|
|
|
}
|
2022-09-18 19:08:14 +02:00
|
|
|
|
|
|
|
#[must_use]
|
|
|
|
pub fn generate_stacktrace(&self) -> Vec<FrameInfo<'tcx>> {
|
|
|
|
Self::generate_stacktrace_from_stack(self.stack())
|
|
|
|
}
|
2020-07-28 16:15:40 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[doc(hidden)]
|
|
|
|
/// Helper struct for the `dump_place` function.
|
|
|
|
pub struct PlacePrinter<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> {
|
|
|
|
ecx: &'a InterpCx<'mir, 'tcx, M>,
|
2022-07-18 18:47:31 -04:00
|
|
|
place: Place<M::Provenance>,
|
2020-07-28 16:15:40 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> std::fmt::Debug
|
|
|
|
for PlacePrinter<'a, 'mir, 'tcx, M>
|
|
|
|
{
|
|
|
|
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
|
|
match self.place {
|
2023-07-23 21:35:54 +02:00
|
|
|
Place::Local { frame, local, offset } => {
|
2017-08-08 16:29:47 +02:00
|
|
|
let mut allocs = Vec::new();
|
2023-07-25 23:17:39 +02:00
|
|
|
write!(fmt, "{local:?}")?;
|
2023-07-23 21:35:54 +02:00
|
|
|
if let Some(offset) = offset {
|
|
|
|
write!(fmt, "+{:#x}", offset.bytes())?;
|
|
|
|
}
|
2020-07-28 16:15:40 +02:00
|
|
|
if frame != self.ecx.frame_idx() {
|
|
|
|
write!(fmt, " ({} frames up)", self.ecx.frame_idx() - frame)?;
|
2017-05-31 17:41:33 -07:00
|
|
|
}
|
2020-07-28 16:15:40 +02:00
|
|
|
write!(fmt, ":")?;
|
2017-08-08 16:29:47 +02:00
|
|
|
|
2020-07-28 16:15:40 +02:00
|
|
|
match self.ecx.stack()[frame].locals[local].value {
|
|
|
|
LocalValue::Dead => write!(fmt, " is dead")?,
|
2022-07-02 16:24:42 -04:00
|
|
|
LocalValue::Live(Operand::Immediate(Immediate::Uninit)) => {
|
|
|
|
write!(fmt, " is uninitialized")?
|
|
|
|
}
|
2021-07-12 18:22:15 +02:00
|
|
|
LocalValue::Live(Operand::Indirect(mplace)) => {
|
|
|
|
write!(
|
|
|
|
fmt,
|
2022-07-03 10:21:47 -04:00
|
|
|
" by {} ref {:?}:",
|
2021-07-12 18:22:15 +02:00
|
|
|
match mplace.meta {
|
2023-07-25 23:17:39 +02:00
|
|
|
MemPlaceMeta::Meta(meta) => format!(" meta({meta:?})"),
|
2022-07-04 11:46:10 -04:00
|
|
|
MemPlaceMeta::None => String::new(),
|
2021-07-12 18:22:15 +02:00
|
|
|
},
|
|
|
|
mplace.ptr,
|
|
|
|
)?;
|
2021-07-16 09:39:35 +02:00
|
|
|
allocs.extend(mplace.ptr.provenance.map(Provenance::get_alloc_id));
|
2021-07-12 18:22:15 +02:00
|
|
|
}
|
2019-04-07 12:52:56 +02:00
|
|
|
LocalValue::Live(Operand::Immediate(Immediate::Scalar(val))) => {
|
2023-07-25 23:17:39 +02:00
|
|
|
write!(fmt, " {val:?}")?;
|
2022-08-01 19:05:20 -04:00
|
|
|
if let Scalar::Ptr(ptr, _size) = val {
|
2021-07-16 09:39:35 +02:00
|
|
|
allocs.push(ptr.provenance.get_alloc_id());
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2017-08-08 16:29:47 +02:00
|
|
|
}
|
2019-04-07 12:52:56 +02:00
|
|
|
LocalValue::Live(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => {
|
2023-07-25 23:17:39 +02:00
|
|
|
write!(fmt, " ({val1:?}, {val2:?})")?;
|
2022-08-01 19:05:20 -04:00
|
|
|
if let Scalar::Ptr(ptr, _size) = val1 {
|
2021-07-16 09:39:35 +02:00
|
|
|
allocs.push(ptr.provenance.get_alloc_id());
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2022-08-01 19:05:20 -04:00
|
|
|
if let Scalar::Ptr(ptr, _size) = val2 {
|
2021-07-16 09:39:35 +02:00
|
|
|
allocs.push(ptr.provenance.get_alloc_id());
|
2017-08-10 08:48:38 -07:00
|
|
|
}
|
2017-08-08 16:29:47 +02:00
|
|
|
}
|
2017-05-31 17:41:33 -07:00
|
|
|
}
|
2017-08-08 16:29:47 +02:00
|
|
|
|
2022-05-26 13:14:24 +02:00
|
|
|
write!(fmt, ": {:?}", self.ecx.dump_allocs(allocs.into_iter().flatten().collect()))
|
2017-08-08 16:29:47 +02:00
|
|
|
}
|
2022-05-13 12:30:25 -05:00
|
|
|
Place::Ptr(mplace) => match mplace.ptr.provenance.and_then(Provenance::get_alloc_id) {
|
2022-07-03 10:21:47 -04:00
|
|
|
Some(alloc_id) => {
|
|
|
|
write!(fmt, "by ref {:?}: {:?}", mplace.ptr, self.ecx.dump_alloc(alloc_id))
|
|
|
|
}
|
2023-07-25 23:17:39 +02:00
|
|
|
ptr => write!(fmt, " integral by ref: {ptr:?}"),
|
2016-10-18 21:45:48 -06:00
|
|
|
},
|
2017-02-07 00:39:40 -08:00
|
|
|
}
|
2016-10-18 21:45:48 -06:00
|
|
|
}
|
2016-10-15 19:48:30 -06:00
|
|
|
}
|