1
Fork 0

move marking-locals-live out of push_stack_frame, so it happens with argument passing

this entirely avoids even creating unsized locals in Immediate::Uninitialized state
This commit is contained in:
Ralf Jung 2023-08-06 18:40:37 +02:00
parent bdd5855b8e
commit a09df43d9f
15 changed files with 188 additions and 105 deletions

View file

@ -384,7 +384,7 @@ const_eval_unreachable_unwind =
const_eval_unsigned_offset_from_overflow = const_eval_unsigned_offset_from_overflow =
`ptr_offset_from_unsigned` called when first pointer has smaller offset than second: {$a_offset} < {$b_offset} `ptr_offset_from_unsigned` called when first pointer has smaller offset than second: {$a_offset} < {$b_offset}
const_eval_unsized_local = unsized locals are not supported
const_eval_unstable_const_fn = `{$def_path}` is not yet stable as a const fn const_eval_unstable_const_fn = `{$def_path}` is not yet stable as a const fn
const_eval_unstable_in_stable = const_eval_unstable_in_stable =

View file

@ -61,6 +61,7 @@ fn eval_body_using_ecx<'mir, 'tcx>(
&ret.clone().into(), &ret.clone().into(),
StackPopCleanup::Root { cleanup: false }, StackPopCleanup::Root { cleanup: false },
)?; )?;
ecx.storage_live_for_always_live_locals()?;
// The main interpreter loop. // The main interpreter loop.
while ecx.step()? {} while ecx.step()? {}

View file

@ -795,6 +795,7 @@ impl ReportErrorExt for UnsupportedOpInfo {
use crate::fluent_generated::*; use crate::fluent_generated::*;
match self { match self {
UnsupportedOpInfo::Unsupported(s) => s.clone().into(), UnsupportedOpInfo::Unsupported(s) => s.clone().into(),
UnsupportedOpInfo::UnsizedLocal => const_eval_unsized_local,
UnsupportedOpInfo::OverwritePartialPointer(_) => const_eval_partial_pointer_overwrite, UnsupportedOpInfo::OverwritePartialPointer(_) => const_eval_partial_pointer_overwrite,
UnsupportedOpInfo::ReadPartialPointer(_) => const_eval_partial_pointer_copy, UnsupportedOpInfo::ReadPartialPointer(_) => const_eval_partial_pointer_copy,
UnsupportedOpInfo::ReadPointerAsInt(_) => const_eval_read_pointer_as_int, UnsupportedOpInfo::ReadPointerAsInt(_) => const_eval_read_pointer_as_int,
@ -814,7 +815,7 @@ impl ReportErrorExt for UnsupportedOpInfo {
// `ReadPointerAsInt(Some(info))` is never printed anyway, it only serves as an error to // `ReadPointerAsInt(Some(info))` is never printed anyway, it only serves as an error to
// be further processed by validity checking which then turns it into something nice to // be further processed by validity checking which then turns it into something nice to
// print. So it's not worth the effort of having diagnostics that can print the `info`. // print. So it's not worth the effort of having diagnostics that can print the `info`.
Unsupported(_) | ReadPointerAsInt(_) => {} UnsizedLocal | Unsupported(_) | ReadPointerAsInt(_) => {}
OverwritePartialPointer(ptr) | ReadPartialPointer(ptr) => { OverwritePartialPointer(ptr) | ReadPartialPointer(ptr) => {
builder.set_arg("ptr", ptr); builder.set_arg("ptr", ptr);
} }

View file

@ -158,7 +158,9 @@ pub enum StackPopCleanup {
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct LocalState<'tcx, Prov: Provenance = AllocId> { pub struct LocalState<'tcx, Prov: Provenance = AllocId> {
pub value: LocalValue<Prov>, pub value: LocalValue<Prov>,
/// Don't modify if `Some`, this is only used to prevent computing the layout twice /// Don't modify if `Some`, this is only used to prevent computing the layout twice.
/// Layout needs to be computed lazily because ConstProp wants to run on frames where we can't
/// compute the layout of all locals.
pub layout: Cell<Option<TyAndLayout<'tcx>>>, pub layout: Cell<Option<TyAndLayout<'tcx>>>,
} }
@ -483,7 +485,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
} }
#[inline(always)] #[inline(always)]
pub(super) fn body(&self) -> &'mir mir::Body<'tcx> { pub fn body(&self) -> &'mir mir::Body<'tcx> {
self.frame().body self.frame().body
} }
@ -705,15 +707,15 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
return_to_block: StackPopCleanup, return_to_block: StackPopCleanup,
) -> InterpResult<'tcx> { ) -> InterpResult<'tcx> {
trace!("body: {:#?}", body); trace!("body: {:#?}", body);
let dead_local = LocalState { value: LocalValue::Dead, layout: Cell::new(None) };
let locals = IndexVec::from_elem(dead_local, &body.local_decls);
// First push a stack frame so we have access to the local args // First push a stack frame so we have access to the local args
let pre_frame = Frame { let pre_frame = Frame {
body, body,
loc: Right(body.span), // Span used for errors caused during preamble. loc: Right(body.span), // Span used for errors caused during preamble.
return_to_block, return_to_block,
return_place: return_place.clone(), return_place: return_place.clone(),
// empty local array, we fill it in below, after we are inside the stack frame and locals,
// all methods actually know about the frame
locals: IndexVec::new(),
instance, instance,
tracing_span: SpanGuard::new(), tracing_span: SpanGuard::new(),
extra: (), extra: (),
@ -728,19 +730,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.eval_mir_constant(&ct, Some(span), None)?; self.eval_mir_constant(&ct, Some(span), None)?;
} }
// Most locals are initially dead.
let dummy = LocalState { value: LocalValue::Dead, layout: Cell::new(None) };
let mut locals = IndexVec::from_elem(dummy, &body.local_decls);
// Now mark those locals as live that have no `Storage*` annotations.
let always_live = always_storage_live_locals(self.body());
for local in locals.indices() {
if always_live.contains(local) {
locals[local].value = LocalValue::Live(Operand::Immediate(Immediate::Uninit));
}
}
// done // done
self.frame_mut().locals = locals;
M::after_stack_push(self)?; M::after_stack_push(self)?;
self.frame_mut().loc = Left(mir::Location::START); self.frame_mut().loc = Left(mir::Location::START);
@ -907,11 +897,29 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
} }
} }
/// In the current stack frame, mark all locals as live that are not arguments and don't have
/// `Storage*` annotations (this includes the return place).
pub fn storage_live_for_always_live_locals(&mut self) -> InterpResult<'tcx> {
self.storage_live(mir::RETURN_PLACE)?;
let body = self.body();
let always_live = always_storage_live_locals(body);
for local in body.vars_and_temps_iter() {
if always_live.contains(local) {
self.storage_live(local)?;
}
}
Ok(())
}
/// Mark a storage as live, killing the previous content. /// Mark a storage as live, killing the previous content.
pub fn storage_live(&mut self, local: mir::Local) -> InterpResult<'tcx> { pub fn storage_live(&mut self, local: mir::Local) -> InterpResult<'tcx> {
assert!(local != mir::RETURN_PLACE, "Cannot make return place live");
trace!("{:?} is now live", local); trace!("{:?} is now live", local);
if self.layout_of_local(self.frame(), local, None)?.is_unsized() {
throw_unsup!(UnsizedLocal);
}
let local_val = LocalValue::Live(Operand::Immediate(Immediate::Uninit)); let local_val = LocalValue::Live(Operand::Immediate(Immediate::Uninit));
// StorageLive expects the local to be dead, and marks it live. // StorageLive expects the local to be dead, and marks it live.
let old = mem::replace(&mut self.frame_mut().locals[local].value, local_val); let old = mem::replace(&mut self.frame_mut().locals[local].value, local_val);

View file

@ -33,7 +33,7 @@ pub enum Immediate<Prov: Provenance = AllocId> {
/// A pair of two scalar value (must have `ScalarPair` ABI where both fields are /// A pair of two scalar value (must have `ScalarPair` ABI where both fields are
/// `Scalar::Initialized`). /// `Scalar::Initialized`).
ScalarPair(Scalar<Prov>, Scalar<Prov>), ScalarPair(Scalar<Prov>, Scalar<Prov>),
/// A value of fully uninitialized memory. Can have arbitrary size and layout. /// A value of fully uninitialized memory. Can have arbitrary size and layout, but must be sized.
Uninit, Uninit,
} }
@ -190,16 +190,19 @@ impl<'tcx, Prov: Provenance> From<ImmTy<'tcx, Prov>> for OpTy<'tcx, Prov> {
impl<'tcx, Prov: Provenance> ImmTy<'tcx, Prov> { impl<'tcx, Prov: Provenance> ImmTy<'tcx, Prov> {
#[inline] #[inline]
pub fn from_scalar(val: Scalar<Prov>, layout: TyAndLayout<'tcx>) -> Self { pub fn from_scalar(val: Scalar<Prov>, layout: TyAndLayout<'tcx>) -> Self {
debug_assert!(layout.abi.is_scalar(), "`ImmTy::from_scalar` on non-scalar layout");
ImmTy { imm: val.into(), layout } ImmTy { imm: val.into(), layout }
} }
#[inline] #[inline(always)]
pub fn from_immediate(imm: Immediate<Prov>, layout: TyAndLayout<'tcx>) -> Self { pub fn from_immediate(imm: Immediate<Prov>, layout: TyAndLayout<'tcx>) -> Self {
debug_assert!(layout.is_sized(), "immediates must be sized");
ImmTy { imm, layout } ImmTy { imm, layout }
} }
#[inline] #[inline]
pub fn uninit(layout: TyAndLayout<'tcx>) -> Self { pub fn uninit(layout: TyAndLayout<'tcx>) -> Self {
debug_assert!(layout.is_sized(), "immediates must be sized");
ImmTy { imm: Immediate::Uninit, layout } ImmTy { imm: Immediate::Uninit, layout }
} }
@ -322,15 +325,12 @@ impl<'tcx, Prov: Provenance + 'static> Projectable<'tcx, Prov> for OpTy<'tcx, Pr
self.layout self.layout
} }
#[inline]
fn meta(&self) -> InterpResult<'tcx, MemPlaceMeta<Prov>> { fn meta(&self) -> InterpResult<'tcx, MemPlaceMeta<Prov>> {
Ok(match self.as_mplace_or_imm() { Ok(match self.as_mplace_or_imm() {
Left(mplace) => mplace.meta, Left(mplace) => mplace.meta,
Right(_) => { Right(_) => {
if self.layout.is_unsized() { debug_assert!(self.layout.is_sized(), "unsized immediates are not a thing");
// Unsized immediate OpTy cannot occur. We create a MemPlace for all unsized locals during argument passing.
// However, ConstProp doesn't do that, so we can run into this nonsense situation.
throw_inval!(ConstPropNonsense);
}
MemPlaceMeta::None MemPlaceMeta::None
} }
}) })
@ -346,9 +346,10 @@ impl<'tcx, Prov: Provenance + 'static> Projectable<'tcx, Prov> for OpTy<'tcx, Pr
match self.as_mplace_or_imm() { match self.as_mplace_or_imm() {
Left(mplace) => Ok(mplace.offset_with_meta(offset, meta, layout, ecx)?.into()), Left(mplace) => Ok(mplace.offset_with_meta(offset, meta, layout, ecx)?.into()),
Right(imm) => { Right(imm) => {
assert!(!meta.has_meta()); // no place to store metadata here debug_assert!(layout.is_sized(), "unsized immediates are not a thing");
assert_matches!(meta, MemPlaceMeta::None); // no place to store metadata here
// Every part of an uninit is uninit. // Every part of an uninit is uninit.
Ok(imm.offset(offset, layout, ecx)?.into()) Ok(imm.offset_(offset, layout, ecx).into())
} }
} }
} }
@ -576,6 +577,13 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> { ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
let layout = self.layout_of_local(frame, local, layout)?; let layout = self.layout_of_local(frame, local, layout)?;
let op = *frame.locals[local].access()?; let op = *frame.locals[local].access()?;
if matches!(op, Operand::Immediate(_)) {
if layout.is_unsized() {
// ConstProp marks *all* locals as `Immediate::Uninit` since it cannot
// efficiently check whether they are sized. We have to catch that case here.
throw_inval!(ConstPropNonsense);
}
}
Ok(OpTy { op, layout, align: Some(layout.align.abi) }) Ok(OpTy { op, layout, align: Some(layout.align.abi) })
} }
@ -589,16 +597,15 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
match place.as_mplace_or_local() { match place.as_mplace_or_local() {
Left(mplace) => Ok(mplace.into()), Left(mplace) => Ok(mplace.into()),
Right((frame, local, offset)) => { Right((frame, local, offset)) => {
debug_assert!(place.layout.is_sized()); // only sized locals can ever be `Place::Local`.
let base = self.local_to_op(&self.stack()[frame], local, None)?; let base = self.local_to_op(&self.stack()[frame], local, None)?;
let mut field = if let Some(offset) = offset { let mut field = match offset {
// This got offset. We can be sure that the field is sized. Some(offset) => base.offset(offset, place.layout, self)?,
base.offset(offset, place.layout, self)? None => {
} else { // In the common case this hasn't been projected.
assert_eq!(place.layout, base.layout); debug_assert_eq!(place.layout, base.layout);
// Unsized cases are possible here since an unsized local will be a
// `Place::Local` until the first projection calls `place_to_op` to extract the
// underlying mplace.
base base
}
}; };
field.align = Some(place.align); field.align = Some(place.align);
Ok(field) Ok(field)

View file

@ -41,6 +41,7 @@ impl<Prov: Provenance> MemPlaceMeta<Prov> {
} }
} }
#[inline(always)]
pub fn has_meta(self) -> bool { pub fn has_meta(self) -> bool {
match self { match self {
Self::Meta(_) => true, Self::Meta(_) => true,
@ -255,15 +256,12 @@ impl<'tcx, Prov: Provenance + 'static> Projectable<'tcx, Prov> for PlaceTy<'tcx,
self.layout self.layout
} }
#[inline]
fn meta(&self) -> InterpResult<'tcx, MemPlaceMeta<Prov>> { fn meta(&self) -> InterpResult<'tcx, MemPlaceMeta<Prov>> {
Ok(match self.as_mplace_or_local() { Ok(match self.as_mplace_or_local() {
Left(mplace) => mplace.meta, Left(mplace) => mplace.meta,
Right(_) => { Right(_) => {
if self.layout.is_unsized() { debug_assert!(self.layout.is_sized(), "unsized locals should live in memory");
// Unsized `Place::Local` cannot occur. We create a MemPlace for all unsized locals during argument passing.
// However, ConstProp doesn't do that, so we can run into this nonsense situation.
throw_inval!(ConstPropNonsense);
}
MemPlaceMeta::None MemPlaceMeta::None
} }
}) })
@ -331,7 +329,7 @@ impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> {
impl<'tcx, Prov: Provenance + 'static> PlaceTy<'tcx, Prov> { impl<'tcx, Prov: Provenance + 'static> PlaceTy<'tcx, Prov> {
/// A place is either an mplace or some local. /// A place is either an mplace or some local.
#[inline] #[inline(always)]
pub fn as_mplace_or_local( pub fn as_mplace_or_local(
&self, &self,
) -> Either<MPlaceTy<'tcx, Prov>, (usize, mir::Local, Option<Size>)> { ) -> Either<MPlaceTy<'tcx, Prov>, (usize, mir::Local, Option<Size>)> {
@ -535,9 +533,19 @@ where
// So we eagerly check here if this local has an MPlace, and if yes we use it. // So we eagerly check here if this local has an MPlace, and if yes we use it.
let frame_ref = &self.stack()[frame]; let frame_ref = &self.stack()[frame];
let layout = self.layout_of_local(frame_ref, local, None)?; let layout = self.layout_of_local(frame_ref, local, None)?;
let place = match frame_ref.locals[local].access()? { let place = if layout.is_sized() {
Operand::Immediate(_) => Place::Local { frame, local, offset: None }, // We can just always use the `Local` for sized values.
Place::Local { frame, local, offset: None }
} else {
// Unsized `Local` isn't okay (we cannot store the metadata).
match frame_ref.locals[local].access()? {
Operand::Immediate(_) => {
// ConstProp marks *all* locals as `Immediate::Uninit` since it cannot
// efficiently check whether they are sized. We have to catch that case here.
throw_inval!(ConstPropNonsense);
}
Operand::Indirect(mplace) => Place::Ptr(*mplace), Operand::Indirect(mplace) => Place::Ptr(*mplace),
}
}; };
Ok(PlaceTy { place, layout, align: layout.align.abi }) Ok(PlaceTy { place, layout, align: layout.align.abi })
} }
@ -896,9 +904,7 @@ where
// that has different alignment than the outer field. // that has different alignment than the outer field.
let local_layout = let local_layout =
self.layout_of_local(&self.stack()[frame], local, None)?; self.layout_of_local(&self.stack()[frame], local, None)?;
if local_layout.is_unsized() { assert!(local_layout.is_sized(), "unsized locals cannot be immediate");
throw_unsup_format!("unsized locals are not supported");
}
let mplace = self.allocate(local_layout, MemoryKind::Stack)?; let mplace = self.allocate(local_layout, MemoryKind::Stack)?;
// Preserve old value. (As an optimization, we can skip this if it was uninit.) // Preserve old value. (As an optimization, we can skip this if it was uninit.)
if !matches!(local_val, Immediate::Uninit) { if !matches!(local_val, Immediate::Uninit) {

View file

@ -1,19 +1,21 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::mem;
use either::Either; use either::Either;
use rustc_ast::ast::InlineAsmOptions; use rustc_ast::ast::InlineAsmOptions;
use rustc_middle::mir::ProjectionElem;
use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, TyAndLayout}; use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, TyAndLayout};
use rustc_middle::ty::Instance; use rustc_middle::ty::Instance;
use rustc_middle::{ use rustc_middle::{
mir, mir,
ty::{self, Ty}, ty::{self, Ty},
}; };
use rustc_target::abi;
use rustc_target::abi::call::{ArgAbi, ArgAttribute, ArgAttributes, FnAbi, PassMode}; use rustc_target::abi::call::{ArgAbi, ArgAttribute, ArgAttributes, FnAbi, PassMode};
use rustc_target::abi::{self, FieldIdx};
use rustc_target::spec::abi::Abi; use rustc_target::spec::abi::Abi;
use super::{ use super::{
AllocId, FnVal, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy, Machine, MemoryKind, OpTy, AllocId, FnVal, ImmTy, InterpCx, InterpResult, LocalValue, MPlaceTy, Machine, MemoryKind, OpTy,
Operand, PlaceTy, Provenance, Scalar, StackPopCleanup, Operand, PlaceTy, Provenance, Scalar, StackPopCleanup,
}; };
use crate::fluent_generated as fluent; use crate::fluent_generated as fluent;
@ -358,23 +360,28 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Item = (&'x FnArg<'tcx, M::Provenance>, &'y ArgAbi<'tcx, Ty<'tcx>>), Item = (&'x FnArg<'tcx, M::Provenance>, &'y ArgAbi<'tcx, Ty<'tcx>>),
>, >,
callee_abi: &ArgAbi<'tcx, Ty<'tcx>>, callee_abi: &ArgAbi<'tcx, Ty<'tcx>>,
callee_arg: &PlaceTy<'tcx, M::Provenance>, callee_arg: &mir::Place<'tcx>,
callee_ty: Ty<'tcx>,
already_live: bool,
) -> InterpResult<'tcx> ) -> InterpResult<'tcx>
where where
'tcx: 'x, 'tcx: 'x,
'tcx: 'y, 'tcx: 'y,
{ {
if matches!(callee_abi.mode, PassMode::Ignore) { if matches!(callee_abi.mode, PassMode::Ignore) {
// This one is skipped. // This one is skipped. Still must be made live though!
if !already_live {
self.storage_live(callee_arg.as_local().unwrap())?;
}
return Ok(()); return Ok(());
} }
// Find next caller arg. // Find next caller arg.
let Some((caller_arg, caller_abi)) = caller_args.next() else { let Some((caller_arg, caller_abi)) = caller_args.next() else {
throw_ub_custom!(fluent::const_eval_not_enough_caller_args); throw_ub_custom!(fluent::const_eval_not_enough_caller_args);
}; };
// Now, check // Check compatibility
if !Self::check_argument_compat(caller_abi, callee_abi) { if !Self::check_argument_compat(caller_abi, callee_abi) {
let callee_ty = format!("{}", callee_arg.layout.ty); let callee_ty = format!("{}", callee_ty);
let caller_ty = format!("{}", caller_arg.layout().ty); let caller_ty = format!("{}", caller_arg.layout().ty);
throw_ub_custom!( throw_ub_custom!(
fluent::const_eval_incompatible_types, fluent::const_eval_incompatible_types,
@ -386,35 +393,37 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// will later protect the source it comes from. This means the callee cannot observe if we // will later protect the source it comes from. This means the callee cannot observe if we
// did in-place of by-copy argument passing, except for pointer equality tests. // did in-place of by-copy argument passing, except for pointer equality tests.
let caller_arg_copy = self.copy_fn_arg(&caller_arg)?; let caller_arg_copy = self.copy_fn_arg(&caller_arg)?;
// Special handling for unsized parameters. if !already_live {
// Special handling for unsized parameters: they are harder to make live.
if caller_arg_copy.layout.is_unsized() { if caller_arg_copy.layout.is_unsized() {
// `check_argument_compat` ensures that both have the same type, so we know they will use the metadata the same way. // `check_argument_compat` ensures that both have the same type, so we know they will use the metadata the same way.
assert_eq!(caller_arg_copy.layout.ty, callee_arg.layout.ty); assert_eq!(caller_arg_copy.layout.ty, callee_ty);
// We have to properly pre-allocate the memory for the callee. // We have to properly pre-allocate the memory for the callee.
// So let's tear down some abstractions. // So let's tear down some abstractions.
// This all has to be in memory, there are no immediate unsized values. // This all has to be in memory, there are no immediate unsized values.
let src = caller_arg_copy.assert_mem_place(); let src = caller_arg_copy.assert_mem_place();
// The destination cannot be one of these "spread args". // The destination cannot be one of these "spread args".
let (dest_frame, dest_local, dest_offset) = callee_arg let dest_local = callee_arg.as_local().expect("unsized arguments cannot be spread");
.as_mplace_or_local()
.right()
.expect("callee fn arguments must be locals");
// We are just initializing things, so there can't be anything here yet.
assert!(matches!(
*self.local_to_op(&self.stack()[dest_frame], dest_local, None)?,
Operand::Immediate(Immediate::Uninit)
));
assert_eq!(dest_offset, None);
// Allocate enough memory to hold `src`. // Allocate enough memory to hold `src`.
let dest_place = self.allocate_dyn(src.layout, MemoryKind::Stack, src.meta)?; let dest_place = self.allocate_dyn(src.layout, MemoryKind::Stack, src.meta)?;
// Update the local to be that new place. // Update the local to be that new place. This is essentially a "dyn-sized StorageLive".
*M::access_local_mut(self, dest_frame, dest_local)? = Operand::Indirect(*dest_place); let old = mem::replace(
&mut self.frame_mut().locals[dest_local].value,
LocalValue::Live(Operand::Indirect(*dest_place)),
);
assert!(matches!(old, LocalValue::Dead));
} else {
// Just make the local live.
self.storage_live(callee_arg.as_local().unwrap())?;
} }
}
// Now we can finally actually evaluate the callee place.
let callee_arg = self.eval_place(*callee_arg)?;
// We allow some transmutes here. // We allow some transmutes here.
// FIXME: Depending on the PassMode, this should reset some padding to uninitialized. (This // FIXME: Depending on the PassMode, this should reset some padding to uninitialized. (This
// is true for all `copy_op`, but there are a lot of special cases for argument passing // is true for all `copy_op`, but there are a lot of special cases for argument passing
// specifically.) // specifically.)
self.copy_op(&caller_arg_copy, callee_arg, /*allow_transmute*/ true)?; self.copy_op(&caller_arg_copy, &callee_arg, /*allow_transmute*/ true)?;
// If this was an in-place pass, protect the place it comes from for the duration of the call. // If this was an in-place pass, protect the place it comes from for the duration of the call.
if let FnArg::InPlace(place) = caller_arg { if let FnArg::InPlace(place) = caller_arg {
M::protect_in_place_function_argument(self, place)?; M::protect_in_place_function_argument(self, place)?;
@ -600,18 +609,47 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// not advance `caller_iter` for ZSTs. // not advance `caller_iter` for ZSTs.
let mut callee_args_abis = callee_fn_abi.args.iter(); let mut callee_args_abis = callee_fn_abi.args.iter();
for local in body.args_iter() { for local in body.args_iter() {
let dest = self.eval_place(mir::Place::from(local))?; // Construct the destination place for this argument. At this point all
// locals are still dead, so we cannot construct a `PlaceTy`.
let dest = mir::Place::from(local);
// `layout_of_local` does more than just the substitution we need to get the
// type, but the result gets cached so this avoids calling the substitution
// query *again* the next time this local is accessed.
let ty = self.layout_of_local(self.frame(), local, None)?.ty;
if Some(local) == body.spread_arg { if Some(local) == body.spread_arg {
// Make the local live once, then fill in the value field by field.
self.storage_live(local)?;
// Must be a tuple // Must be a tuple
for i in 0..dest.layout.fields.count() { let ty::Tuple(fields) = ty.kind() else {
let dest = self.project_field(&dest, i)?; span_bug!(
self.cur_span(),
"non-tuple type for `spread_arg`: {ty:?}"
)
};
for (i, field_ty) in fields.iter().enumerate() {
let dest = dest.project_deeper(
&[ProjectionElem::Field(FieldIdx::from_usize(i), field_ty)],
*self.tcx,
);
let callee_abi = callee_args_abis.next().unwrap(); let callee_abi = callee_args_abis.next().unwrap();
self.pass_argument(&mut caller_args, callee_abi, &dest)?; self.pass_argument(
&mut caller_args,
callee_abi,
&dest,
field_ty,
/* already_live */ true,
)?;
} }
} else { } else {
// Normal argument // Normal argument. Cannot mark it as live yet, it might be unsized!
let callee_abi = callee_args_abis.next().unwrap(); let callee_abi = callee_args_abis.next().unwrap();
self.pass_argument(&mut caller_args, callee_abi, &dest)?; self.pass_argument(
&mut caller_args,
callee_abi,
&dest,
ty,
/* already_live */ false,
)?;
} }
} }
// If the callee needs a caller location, pretend we consume one more argument from the ABI. // If the callee needs a caller location, pretend we consume one more argument from the ABI.
@ -644,6 +682,9 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// Nothing to do for locals, they are always properly allocated and aligned. // Nothing to do for locals, they are always properly allocated and aligned.
} }
M::protect_in_place_function_argument(self, destination)?; M::protect_in_place_function_argument(self, destination)?;
// Don't forget to mark "initially live" locals as live.
self.storage_live_for_always_live_locals()?;
}; };
match res { match res {
Err(err) => { Err(err) => {

View file

@ -415,6 +415,8 @@ pub enum UnsupportedOpInfo {
/// Free-form case. Only for errors that are never caught! /// Free-form case. Only for errors that are never caught!
// FIXME still use translatable diagnostics // FIXME still use translatable diagnostics
Unsupported(String), Unsupported(String),
/// Unsized local variables.
UnsizedLocal,
// //
// The variants below are only reachable from CTFE/const prop, miri will never emit them. // The variants below are only reachable from CTFE/const prop, miri will never emit them.
// //

View file

@ -376,6 +376,16 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
) )
.expect("failed to push initial stack frame"); .expect("failed to push initial stack frame");
for local in body.local_decls.indices() {
// Mark everything initially live.
// This is somewhat dicey since some of them might be unsized and it is incoherent to
// mark those as live... We rely on `local_to_place`/`local_to_op` in the interpreter
// stopping us before those unsized immediates can cause issues deeper in the
// interpreter.
ecx.frame_mut().locals[local].value =
LocalValue::Live(interpret::Operand::Immediate(Immediate::Uninit));
}
ConstPropagator { ecx, tcx, param_env, local_decls: &dummy_body.local_decls } ConstPropagator { ecx, tcx, param_env, local_decls: &dummy_body.local_decls }
} }

View file

@ -206,6 +206,16 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
) )
.expect("failed to push initial stack frame"); .expect("failed to push initial stack frame");
for local in body.local_decls.indices() {
// Mark everything initially live.
// This is somewhat dicey since some of them might be unsized and it is incoherent to
// mark those as live... We rely on `local_to_place`/`local_to_op` in the interpreter
// stopping us before those unsized immediates can cause issues deeper in the
// interpreter.
ecx.frame_mut().locals[local].value =
LocalValue::Live(interpret::Operand::Immediate(Immediate::Uninit));
}
ConstPropagator { ConstPropagator {
ecx, ecx,
tcx, tcx,

View file

@ -283,7 +283,7 @@ pub fn report_error<'tcx, 'mir>(
"resource exhaustion", "resource exhaustion",
Unsupported( Unsupported(
// We list only the ones that can actually happen. // We list only the ones that can actually happen.
UnsupportedOpInfo::Unsupported(_) UnsupportedOpInfo::Unsupported(_) | UnsupportedOpInfo::UnsizedLocal
) => ) =>
"unsupported operation", "unsupported operation",
InvalidProgram( InvalidProgram(

View file

@ -14,7 +14,7 @@ use rustc_middle::mir;
use rustc_middle::ty::{ use rustc_middle::ty::{
self, self,
layout::{IntegerExt as _, LayoutOf, TyAndLayout}, layout::{IntegerExt as _, LayoutOf, TyAndLayout},
List, Ty, TyCtxt, Ty, TyCtxt,
}; };
use rustc_span::{def_id::CrateNum, sym, Span, Symbol}; use rustc_span::{def_id::CrateNum, sym, Span, Symbol};
use rustc_target::abi::{Align, FieldIdx, FieldsShape, Integer, Size, Variants}; use rustc_target::abi::{Align, FieldIdx, FieldsShape, Integer, Size, Variants};
@ -282,13 +282,6 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
Ok(ptr.addr().bytes() == 0) Ok(ptr.addr().bytes() == 0)
} }
/// Get the `Place` for a local
fn local_place(&self, local: mir::Local) -> InterpResult<'tcx, PlaceTy<'tcx, Provenance>> {
let this = self.eval_context_ref();
let place = mir::Place { local, projection: List::empty() };
this.eval_place(place)
}
/// Generate some random bytes, and write them to `dest`. /// Generate some random bytes, and write them to `dest`.
fn gen_random(&mut self, ptr: Pointer<Option<Provenance>>, len: u64) -> InterpResult<'tcx> { fn gen_random(&mut self, ptr: Pointer<Option<Provenance>>, len: u64) -> InterpResult<'tcx> {
// Some programs pass in a null pointer and a length of 0 // Some programs pass in a null pointer and a length of 0
@ -350,17 +343,21 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
// Initialize arguments. // Initialize arguments.
let mut callee_args = this.frame().body.args_iter(); let mut callee_args = this.frame().body.args_iter();
for arg in args { for arg in args {
let callee_arg = this.local_place( let local = callee_args
callee_args
.next() .next()
.ok_or_else(|| err_ub_format!("callee has fewer arguments than expected"))?, .ok_or_else(|| err_ub_format!("callee has fewer arguments than expected"))?;
)?; // Make the local live, and insert the initial value.
this.storage_live(local)?;
let callee_arg = this.local_to_place(this.frame_idx(), local)?;
this.write_immediate(*arg, &callee_arg)?; this.write_immediate(*arg, &callee_arg)?;
} }
if callee_args.next().is_some() { if callee_args.next().is_some() {
throw_ub_format!("callee has more arguments than expected"); throw_ub_format!("callee has more arguments than expected");
} }
// Initialize remaining locals.
this.storage_live_for_always_live_locals()?;
Ok(()) Ok(())
} }

View file

@ -1,6 +1,6 @@
//@ignore-32bit //@ignore-32bit
fn main() { fn main() {
let _fat: [u8; (1 << 61) + (1 << 31)]; let _fat: [u8; (1 << 61) + (1 << 31)]; //~ ERROR: post-monomorphization error
_fat = [0; (1u64 << 61) as usize + (1u64 << 31) as usize]; //~ ERROR: post-monomorphization error _fat = [0; (1u64 << 61) as usize + (1u64 << 31) as usize];
} }

View file

@ -1,8 +1,8 @@
error: post-monomorphization error: values of the type `[u8; 2305843011361177600]` are too big for the current architecture error: post-monomorphization error: values of the type `[u8; 2305843011361177600]` are too big for the current architecture
--> $DIR/type-too-large.rs:LL:CC --> $DIR/type-too-large.rs:LL:CC
| |
LL | _fat = [0; (1u64 << 61) as usize + (1u64 << 31) as usize]; LL | let _fat: [u8; (1 << 61) + (1 << 31)];
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ values of the type `[u8; 2305843011361177600]` are too big for the current architecture | ^^^^ values of the type `[u8; 2305843011361177600]` are too big for the current architecture
| |
= note: inside `main` at $DIR/type-too-large.rs:LL:CC = note: inside `main` at $DIR/type-too-large.rs:LL:CC

View file

@ -2,7 +2,7 @@ error: unsupported operation: unsized locals are not supported
--> $DIR/unsized-local.rs:LL:CC --> $DIR/unsized-local.rs:LL:CC
| |
LL | let x = *(Box::new(A) as Box<dyn Foo>); LL | let x = *(Box::new(A) as Box<dyn Foo>);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsized locals are not supported | ^ unsized locals are not supported
| |
= help: this is likely not a bug in the program; it indicates that the program performed an operation that the interpreter does not support = help: this is likely not a bug in the program; it indicates that the program performed an operation that the interpreter does not support
= note: BACKTRACE: = note: BACKTRACE: