1
Fork 0

interpret: remove LocalValue::Unallocated, add Operand::Uninit

Operand::Uninit is an *allocated* operand that is fully uninitialized.
This lets us lazily allocate the actual backing store of *all* locals (no matter their ABI).

I also reordered things in pop_stack_frame at the same time.
I should probably have made that a separate commit...
This commit is contained in:
Ralf Jung 2022-07-02 16:24:42 -04:00
parent 049308cf8b
commit 8ef0caa23c
8 changed files with 179 additions and 175 deletions

View file

@ -189,6 +189,7 @@ pub(super) fn op_to_const<'tcx>(
let len: usize = len.try_into().unwrap(); let len: usize = len.try_into().unwrap();
ConstValue::Slice { data, start, end: start + len } ConstValue::Slice { data, start, end: start + len }
} }
Immediate::Uninit => to_const_value(&op.assert_mem_place()),
}, },
} }
} }

View file

@ -153,7 +153,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
assert_eq!(dest_layout.size, self.pointer_size()); assert_eq!(dest_layout.size, self.pointer_size());
assert!(src.layout.ty.is_unsafe_ptr()); assert!(src.layout.ty.is_unsafe_ptr());
return match **src { return match **src {
Immediate::ScalarPair(data, _) => Ok(data.into()), Immediate::ScalarPair(data, _) => Ok(data.check_init()?.into()),
Immediate::Scalar(..) => span_bug!( Immediate::Scalar(..) => span_bug!(
self.cur_span(), self.cur_span(),
"{:?} input to a fat-to-thin cast ({:?} -> {:?})", "{:?} input to a fat-to-thin cast ({:?} -> {:?})",
@ -161,6 +161,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
src.layout.ty, src.layout.ty,
cast_ty cast_ty
), ),
Immediate::Uninit => throw_ub!(InvalidUninitBytes(None)),
}; };
} }
} }

View file

@ -112,6 +112,8 @@ pub struct Frame<'mir, 'tcx, Tag: Provenance = AllocId, Extra = ()> {
/// The locals are stored as `Option<Value>`s. /// The locals are stored as `Option<Value>`s.
/// `None` represents a local that is currently dead, while a live local /// `None` represents a local that is currently dead, while a live local
/// can either directly contain `Scalar` or refer to some part of an `Allocation`. /// can either directly contain `Scalar` or refer to some part of an `Allocation`.
///
/// Do *not* access this directly; always go through the machine hook!
pub locals: IndexVec<mir::Local, LocalState<'tcx, Tag>>, pub locals: IndexVec<mir::Local, LocalState<'tcx, Tag>>,
/// The span of the `tracing` crate is stored here. /// The span of the `tracing` crate is stored here.
@ -179,10 +181,6 @@ pub struct LocalState<'tcx, Tag: Provenance = AllocId> {
pub enum LocalValue<Tag: Provenance = AllocId> { pub enum LocalValue<Tag: Provenance = AllocId> {
/// This local is not currently alive, and cannot be used at all. /// This local is not currently alive, and cannot be used at all.
Dead, Dead,
/// This local is alive but not yet allocated. It cannot be read from or have its address taken,
/// and will be allocated on the first write. This is to support unsized locals, where we cannot
/// know their size in advance.
Unallocated,
/// A normal, live local. /// A normal, live local.
/// Mostly for convenience, we re-use the `Operand` type here. /// Mostly for convenience, we re-use the `Operand` type here.
/// This is an optimization over just always having a pointer here; /// This is an optimization over just always having a pointer here;
@ -196,12 +194,10 @@ impl<'tcx, Tag: Provenance + 'static> LocalState<'tcx, Tag> {
/// ///
/// Note: This may only be invoked from the `Machine::access_local` hook and not from /// Note: This may only be invoked from the `Machine::access_local` hook and not from
/// anywhere else. You may be invalidating machine invariants if you do! /// anywhere else. You may be invalidating machine invariants if you do!
pub fn access(&self) -> InterpResult<'tcx, Operand<Tag>> { #[inline]
match self.value { pub fn access(&self) -> InterpResult<'tcx, &Operand<Tag>> {
LocalValue::Dead => throw_ub!(DeadLocal), match &self.value {
LocalValue::Unallocated => { LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"?
bug!("The type checker should prevent reading from a never-written local")
}
LocalValue::Live(val) => Ok(val), LocalValue::Live(val) => Ok(val),
} }
} }
@ -211,15 +207,11 @@ impl<'tcx, Tag: Provenance + 'static> LocalState<'tcx, Tag> {
/// ///
/// Note: This may only be invoked from the `Machine::access_local_mut` hook and not from /// Note: This may only be invoked from the `Machine::access_local_mut` hook and not from
/// anywhere else. You may be invalidating machine invariants if you do! /// anywhere else. You may be invalidating machine invariants if you do!
pub fn access_mut( #[inline]
&mut self, pub fn access_mut(&mut self) -> InterpResult<'tcx, &mut Operand<Tag>> {
) -> InterpResult<'tcx, Result<&mut LocalValue<Tag>, MemPlace<Tag>>> { match &mut self.value {
match self.value { LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"?
LocalValue::Dead => throw_ub!(DeadLocal), LocalValue::Live(val) => Ok(val),
LocalValue::Live(Operand::Indirect(mplace)) => Ok(Err(mplace)),
ref mut local @ (LocalValue::Live(Operand::Immediate(_)) | LocalValue::Unallocated) => {
Ok(Ok(local))
}
} }
} }
} }
@ -710,16 +702,15 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
})?; })?;
} }
// Locals are initially unallocated. // Most locals are initially dead.
let dummy = LocalState { value: LocalValue::Unallocated, layout: Cell::new(None) }; let dummy = LocalState { value: LocalValue::Dead, layout: Cell::new(None) };
let mut locals = IndexVec::from_elem(dummy, &body.local_decls); let mut locals = IndexVec::from_elem(dummy, &body.local_decls);
// Now mark those locals as dead that we do not want to initialize // Now mark those locals as live that have no `Storage*` annotations.
// Mark locals that use `Storage*` annotations as dead on function entry.
let always_live = always_live_locals(self.body()); let always_live = always_live_locals(self.body());
for local in locals.indices() { for local in locals.indices() {
if !always_live.contains(local) { if always_live.contains(local) {
locals[local].value = LocalValue::Dead; locals[local].value = LocalValue::Live(Operand::Immediate(Immediate::Uninit));
} }
} }
// done // done
@ -791,7 +782,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
if unwinding { "during unwinding" } else { "returning from function" } if unwinding { "during unwinding" } else { "returning from function" }
); );
// Sanity check `unwinding`. // Check `unwinding`.
assert_eq!( assert_eq!(
unwinding, unwinding,
match self.frame().loc { match self.frame().loc {
@ -799,51 +790,61 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Err(_) => true, Err(_) => true,
} }
); );
if unwinding && self.frame_idx() == 0 { if unwinding && self.frame_idx() == 0 {
throw_ub_format!("unwinding past the topmost frame of the stack"); throw_ub_format!("unwinding past the topmost frame of the stack");
} }
let frame = // Copy return value. Must of course happen *before* we deallocate the locals.
self.stack_mut().pop().expect("tried to pop a stack frame, but there were none"); let copy_ret_result = if !unwinding {
let op = self
if !unwinding { .local_to_op(self.frame(), mir::RETURN_PLACE, None)
let op = self.local_to_op(&frame, mir::RETURN_PLACE, None)?; .expect("return place should always be live");
self.copy_op_transmute(&op, &frame.return_place)?; let dest = self.frame().return_place;
trace!("{:?}", self.dump_place(*frame.return_place)); let err = self.copy_op_transmute(&op, &dest);
} trace!("return value: {:?}", self.dump_place(*dest));
// We delay actually short-circuiting on this error until *after* the stack frame is
let return_to_block = frame.return_to_block; // popped, since we want this error to be attributed to the caller, whose type defines
// this transmute.
// Now where do we jump next? err
} else {
Ok(())
};
// Cleanup: deallocate locals.
// Usually we want to clean up (deallocate locals), but in a few rare cases we don't. // Usually we want to clean up (deallocate locals), but in a few rare cases we don't.
// In that case, we return early. We also avoid validation in that case, // We do this while the frame is still on the stack, so errors point to the callee.
// because this is CTFE and the final value will be thoroughly validated anyway. let return_to_block = self.frame().return_to_block;
let cleanup = match return_to_block { let cleanup = match return_to_block {
StackPopCleanup::Goto { .. } => true, StackPopCleanup::Goto { .. } => true,
StackPopCleanup::Root { cleanup, .. } => cleanup, StackPopCleanup::Root { cleanup, .. } => cleanup,
}; };
if cleanup {
// We need to take the locals out, since we need to mutate while iterating.
let locals = mem::take(&mut self.frame_mut().locals);
for local in &locals {
self.deallocate_local(local.value)?;
}
}
// All right, now it is time to actually pop the frame.
// Note that its locals are gone already, but that's fine.
let frame =
self.stack_mut().pop().expect("tried to pop a stack frame, but there were none");
// Report error from return value copy, if any.
copy_ret_result?;
// If we are not doing cleanup, also skip everything else.
if !cleanup { if !cleanup {
assert!(self.stack().is_empty(), "only the topmost frame should ever be leaked"); assert!(self.stack().is_empty(), "only the topmost frame should ever be leaked");
assert!(!unwinding, "tried to skip cleanup during unwinding"); assert!(!unwinding, "tried to skip cleanup during unwinding");
// Leak the locals, skip validation, skip machine hook. // Skip machine hook.
return Ok(()); return Ok(());
} }
trace!("locals: {:#?}", frame.locals);
// Cleanup: deallocate all locals that are backed by an allocation.
for local in &frame.locals {
self.deallocate_local(local.value)?;
}
if M::after_stack_pop(self, frame, unwinding)? == StackPopJump::NoJump { if M::after_stack_pop(self, frame, unwinding)? == StackPopJump::NoJump {
// The hook already did everything. // The hook already did everything.
// We want to skip the `info!` below, hence early return.
return Ok(()); return Ok(());
} }
// Normal return, figure out where to jump. // Normal return, figure out where to jump.
if unwinding { if unwinding {
// Follow the unwind edge. // Follow the unwind edge.
@ -874,7 +875,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
assert!(local != mir::RETURN_PLACE, "Cannot make return place live"); assert!(local != mir::RETURN_PLACE, "Cannot make return place live");
trace!("{:?} is now live", local); trace!("{:?} is now live", local);
let local_val = LocalValue::Unallocated; let local_val = LocalValue::Live(Operand::Immediate(Immediate::Uninit));
// StorageLive expects the local to be dead, and marks it live. // StorageLive expects the local to be dead, and marks it live.
let old = mem::replace(&mut self.frame_mut().locals[local].value, local_val); let old = mem::replace(&mut self.frame_mut().locals[local].value, local_val);
if !matches!(old, LocalValue::Dead) { if !matches!(old, LocalValue::Dead) {
@ -977,7 +978,9 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> std::fmt::Debug
match self.ecx.stack()[frame].locals[local].value { match self.ecx.stack()[frame].locals[local].value {
LocalValue::Dead => write!(fmt, " is dead")?, LocalValue::Dead => write!(fmt, " is dead")?,
LocalValue::Unallocated => write!(fmt, " is unallocated")?, LocalValue::Live(Operand::Immediate(Immediate::Uninit)) => {
write!(fmt, " is uninitialized")?
}
LocalValue::Live(Operand::Indirect(mplace)) => { LocalValue::Live(Operand::Indirect(mplace)) => {
write!( write!(
fmt, fmt,

View file

@ -14,8 +14,7 @@ use rustc_target::spec::abi::Abi;
use super::{ use super::{
AllocId, AllocRange, Allocation, ConstAllocation, Frame, ImmTy, InterpCx, InterpResult, AllocId, AllocRange, Allocation, ConstAllocation, Frame, ImmTy, InterpCx, InterpResult,
LocalValue, MemPlace, MemoryKind, OpTy, Operand, PlaceTy, Pointer, Provenance, Scalar, MemoryKind, OpTy, Operand, PlaceTy, Pointer, Provenance, Scalar, StackPopUnwind,
StackPopUnwind,
}; };
/// Data returned by Machine::stack_pop, /// Data returned by Machine::stack_pop,
@ -226,11 +225,13 @@ pub trait Machine<'mir, 'tcx>: Sized {
/// Since reading a ZST is not actually accessing memory or locals, this is never invoked /// Since reading a ZST is not actually accessing memory or locals, this is never invoked
/// for ZST reads. /// for ZST reads.
#[inline] #[inline]
fn access_local( fn access_local<'a>(
_ecx: &InterpCx<'mir, 'tcx, Self>, frame: &'a Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>,
frame: &Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>,
local: mir::Local, local: mir::Local,
) -> InterpResult<'tcx, Operand<Self::PointerTag>> { ) -> InterpResult<'tcx, &'a Operand<Self::PointerTag>>
where
'tcx: 'mir,
{
frame.locals[local].access() frame.locals[local].access()
} }
@ -242,7 +243,7 @@ pub trait Machine<'mir, 'tcx>: Sized {
ecx: &'a mut InterpCx<'mir, 'tcx, Self>, ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
frame: usize, frame: usize,
local: mir::Local, local: mir::Local,
) -> InterpResult<'tcx, Result<&'a mut LocalValue<Self::PointerTag>, MemPlace<Self::PointerTag>>> ) -> InterpResult<'tcx, &'a mut Operand<Self::PointerTag>>
where where
'tcx: 'mir, 'tcx: 'mir,
{ {
@ -418,12 +419,14 @@ pub trait Machine<'mir, 'tcx>: Sized {
} }
/// Called immediately after a stack frame got popped, but before jumping back to the caller. /// Called immediately after a stack frame got popped, but before jumping back to the caller.
/// The `locals` have already been destroyed!
fn after_stack_pop( fn after_stack_pop(
_ecx: &mut InterpCx<'mir, 'tcx, Self>, _ecx: &mut InterpCx<'mir, 'tcx, Self>,
_frame: Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>, _frame: Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>,
_unwinding: bool, unwinding: bool,
) -> InterpResult<'tcx, StackPopJump> { ) -> InterpResult<'tcx, StackPopJump> {
// By default, we do not support unwinding from panics // By default, we do not support unwinding from panics
assert!(!unwinding);
Ok(StackPopJump::Normal) Ok(StackPopJump::Normal)
} }
} }

View file

@ -14,7 +14,7 @@ use rustc_target::abi::{self, Abi, Align, HasDataLayout, Size, TagEncoding};
use rustc_target::abi::{VariantIdx, Variants}; use rustc_target::abi::{VariantIdx, Variants};
use super::{ use super::{
alloc_range, from_known_layout, mir_assign_valid_types, AllocId, ConstValue, GlobalId, alloc_range, from_known_layout, mir_assign_valid_types, AllocId, ConstValue, Frame, GlobalId,
InterpCx, InterpResult, MPlaceTy, Machine, MemPlace, Place, PlaceTy, Pointer, InterpCx, InterpResult, MPlaceTy, Machine, MemPlace, Place, PlaceTy, Pointer,
PointerArithmetic, Provenance, Scalar, ScalarMaybeUninit, PointerArithmetic, Provenance, Scalar, ScalarMaybeUninit,
}; };
@ -28,8 +28,15 @@ use super::{
/// defined on `Immediate`, and do not have to work with a `Place`. /// defined on `Immediate`, and do not have to work with a `Place`.
#[derive(Copy, Clone, PartialEq, Eq, HashStable, Hash, Debug)] #[derive(Copy, Clone, PartialEq, Eq, HashStable, Hash, Debug)]
pub enum Immediate<Tag: Provenance = AllocId> { pub enum Immediate<Tag: Provenance = AllocId> {
/// A single scalar value (must have *initialized* `Scalar` ABI).
/// FIXME: we also currently often use this for ZST.
/// `ScalarMaybeUninit` should reject ZST, and we should use `Uninit` for them instead.
Scalar(ScalarMaybeUninit<Tag>), Scalar(ScalarMaybeUninit<Tag>),
/// A pair of two scalar value (must have `ScalarPair` ABI where both fields are
/// `Scalar::Initialized`).
ScalarPair(ScalarMaybeUninit<Tag>, ScalarMaybeUninit<Tag>), ScalarPair(ScalarMaybeUninit<Tag>, ScalarMaybeUninit<Tag>),
/// A value of fully uninitialized memory. Can have and size and layout.
Uninit,
} }
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
@ -75,6 +82,7 @@ impl<'tcx, Tag: Provenance> Immediate<Tag> {
match self { match self {
Immediate::Scalar(val) => val, Immediate::Scalar(val) => val,
Immediate::ScalarPair(..) => bug!("Got a scalar pair where a scalar was expected"), Immediate::ScalarPair(..) => bug!("Got a scalar pair where a scalar was expected"),
Immediate::Uninit => ScalarMaybeUninit::Uninit,
} }
} }
@ -88,6 +96,7 @@ impl<'tcx, Tag: Provenance> Immediate<Tag> {
match self { match self {
Immediate::ScalarPair(val1, val2) => (val1, val2), Immediate::ScalarPair(val1, val2) => (val1, val2),
Immediate::Scalar(..) => bug!("Got a scalar where a scalar pair was expected"), Immediate::Scalar(..) => bug!("Got a scalar where a scalar pair was expected"),
Immediate::Uninit => (ScalarMaybeUninit::Uninit, ScalarMaybeUninit::Uninit),
} }
} }
@ -149,7 +158,10 @@ impl<Tag: Provenance> std::fmt::Display for ImmTy<'_, Tag> {
} }
Immediate::ScalarPair(a, b) => { Immediate::ScalarPair(a, b) => {
// FIXME(oli-obk): at least print tuples and slices nicely // FIXME(oli-obk): at least print tuples and slices nicely
write!(f, "({:x}, {:x}): {}", a, b, self.layout.ty,) write!(f, "({:x}, {:x}): {}", a, b, self.layout.ty)
}
Immediate::Uninit => {
write!(f, "uninit: {}", self.layout.ty)
} }
} }
}) })
@ -397,7 +409,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.scalar_to_ptr(self.read_scalar(op)?.check_init()?) self.scalar_to_ptr(self.read_scalar(op)?.check_init()?)
} }
// Turn the wide MPlace into a string (must already be dereferenced!) /// Turn the wide MPlace into a string (must already be dereferenced!)
pub fn read_str(&self, mplace: &MPlaceTy<'tcx, M::PointerTag>) -> InterpResult<'tcx, &str> { pub fn read_str(&self, mplace: &MPlaceTy<'tcx, M::PointerTag>) -> InterpResult<'tcx, &str> {
let len = mplace.len(self)?; let len = mplace.len(self)?;
let bytes = self.read_bytes_ptr(mplace.ptr, Size::from_bytes(len))?; let bytes = self.read_bytes_ptr(mplace.ptr, Size::from_bytes(len))?;
@ -528,10 +540,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// Will not access memory, instead an indirect `Operand` is returned. /// Will not access memory, instead an indirect `Operand` is returned.
/// ///
/// This is public because it is used by [priroda](https://github.com/oli-obk/priroda) to get an /// This is public because it is used by [priroda](https://github.com/oli-obk/priroda) to get an
/// OpTy from a local /// OpTy from a local.
pub fn local_to_op( pub fn local_to_op(
&self, &self,
frame: &super::Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>, frame: &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>,
local: mir::Local, local: mir::Local,
layout: Option<TyAndLayout<'tcx>>, layout: Option<TyAndLayout<'tcx>>,
) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
@ -540,7 +552,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// Do not read from ZST, they might not be initialized // Do not read from ZST, they might not be initialized
Operand::Immediate(Scalar::ZST.into()) Operand::Immediate(Scalar::ZST.into())
} else { } else {
M::access_local(&self, frame, local)? *M::access_local(frame, local)?
}; };
Ok(OpTy { op, layout, align: Some(layout.align.abi) }) Ok(OpTy { op, layout, align: Some(layout.align.abi) })
} }

View file

@ -15,8 +15,8 @@ use rustc_target::abi::{HasDataLayout, Size, VariantIdx, Variants};
use super::{ use super::{
alloc_range, mir_assign_valid_types, AllocId, AllocRef, AllocRefMut, CheckInAllocMsg, alloc_range, mir_assign_valid_types, AllocId, AllocRef, AllocRefMut, CheckInAllocMsg,
ConstAlloc, ImmTy, Immediate, InterpCx, InterpResult, LocalValue, Machine, MemoryKind, OpTy, ConstAlloc, ImmTy, Immediate, InterpCx, InterpResult, Machine, MemoryKind, OpTy, Operand,
Operand, Pointer, Provenance, Scalar, ScalarMaybeUninit, Pointer, Provenance, Scalar, ScalarMaybeUninit,
}; };
#[derive(Copy, Clone, Hash, PartialEq, Eq, HashStable, Debug)] #[derive(Copy, Clone, Hash, PartialEq, Eq, HashStable, Debug)]
@ -314,6 +314,7 @@ where
let (ptr, meta) = match **val { let (ptr, meta) = match **val {
Immediate::Scalar(ptr) => (ptr, MemPlaceMeta::None), Immediate::Scalar(ptr) => (ptr, MemPlaceMeta::None),
Immediate::ScalarPair(ptr, meta) => (ptr, MemPlaceMeta::Meta(meta.check_init()?)), Immediate::ScalarPair(ptr, meta) => (ptr, MemPlaceMeta::Meta(meta.check_init()?)),
Immediate::Uninit => throw_ub!(InvalidUninitBytes(None)),
}; };
let mplace = MemPlace { ptr: self.scalar_to_ptr(ptr.check_init()?)?, meta }; let mplace = MemPlace { ptr: self.scalar_to_ptr(ptr.check_init()?)?, meta };
@ -746,14 +747,14 @@ where
let mplace = match dest.place { let mplace = match dest.place {
Place::Local { frame, local } => { Place::Local { frame, local } => {
match M::access_local_mut(self, frame, local)? { match M::access_local_mut(self, frame, local)? {
Ok(local) => { Operand::Immediate(local) => {
// Local can be updated in-place. // Local can be updated in-place.
*local = LocalValue::Live(Operand::Immediate(src)); *local = src;
return Ok(()); return Ok(());
} }
Err(mplace) => { Operand::Indirect(mplace) => {
// The local is in memory, go on below. // The local is in memory, go on below.
mplace *mplace
} }
} }
} }
@ -817,6 +818,7 @@ where
alloc.write_scalar(alloc_range(Size::ZERO, a_size), a_val)?; alloc.write_scalar(alloc_range(Size::ZERO, a_size), a_val)?;
alloc.write_scalar(alloc_range(b_offset, b_size), b_val) alloc.write_scalar(alloc_range(b_offset, b_size), b_val)
} }
Immediate::Uninit => alloc.write_uninit(),
} }
} }
@ -825,25 +827,13 @@ where
Ok(mplace) => mplace, Ok(mplace) => mplace,
Err((frame, local)) => { Err((frame, local)) => {
match M::access_local_mut(self, frame, local)? { match M::access_local_mut(self, frame, local)? {
Ok(local) => match dest.layout.abi { Operand::Immediate(local) => {
Abi::Scalar(_) => { *local = Immediate::Uninit;
*local = LocalValue::Live(Operand::Immediate(Immediate::Scalar(
ScalarMaybeUninit::Uninit,
)));
return Ok(()); return Ok(());
} }
Abi::ScalarPair(..) => { Operand::Indirect(mplace) => {
*local = LocalValue::Live(Operand::Immediate(Immediate::ScalarPair(
ScalarMaybeUninit::Uninit,
ScalarMaybeUninit::Uninit,
)));
return Ok(());
}
_ => self.force_allocation(dest)?,
},
Err(mplace) => {
// The local is in memory, go on below. // The local is in memory, go on below.
MPlaceTy { mplace, layout: dest.layout, align: dest.align } MPlaceTy { mplace: *mplace, layout: dest.layout, align: dest.align }
} }
} }
} }
@ -908,19 +898,18 @@ where
// Slow path, this does not fit into an immediate. Just memcpy. // Slow path, this does not fit into an immediate. Just memcpy.
trace!("copy_op: {:?} <- {:?}: {}", *dest, src, dest.layout.ty); trace!("copy_op: {:?} <- {:?}: {}", *dest, src, dest.layout.ty);
// This interprets `src.meta` with the `dest` local's layout, if an unsized local let dest = self.force_allocation(dest)?;
// is being initialized! assert!(!(src.layout.is_unsized() || dest.layout.is_unsized()), "cannot copy unsized data");
let (dest, size) = self.force_allocation_maybe_sized(dest, src.meta)?; assert_eq!(src.layout.size, dest.layout.size, "Cannot copy differently-sized data");
let size = size.unwrap_or_else(|| {
assert!(
!dest.layout.is_unsized(),
"Cannot copy into already initialized unsized place"
);
dest.layout.size
});
assert_eq!(src.meta, dest.meta, "Can only copy between equally-sized instances");
self.mem_copy(src.ptr, src.align, dest.ptr, dest.align, size, /*nonoverlapping*/ false) self.mem_copy(
src.ptr,
src.align,
dest.ptr,
dest.align,
dest.layout.size,
/*nonoverlapping*/ false,
)
} }
/// Copies the data from an operand to a place. The layouts may disagree, but they must /// Copies the data from an operand to a place. The layouts may disagree, but they must
@ -931,9 +920,16 @@ where
dest: &PlaceTy<'tcx, M::PointerTag>, dest: &PlaceTy<'tcx, M::PointerTag>,
) -> InterpResult<'tcx> { ) -> InterpResult<'tcx> {
if mir_assign_valid_types(*self.tcx, self.param_env, src.layout, dest.layout) { if mir_assign_valid_types(*self.tcx, self.param_env, src.layout, dest.layout) {
// Fast path: Just use normal `copy_op` // Fast path: Just use normal `copy_op`. This is faster because it tries
// `read_immediate_raw` first before doing `force_allocation`.
return self.copy_op(src, dest); return self.copy_op(src, dest);
} }
// Unsized copies rely on interpreting `src.meta` with `dest.layout`, we want
// to avoid that here.
assert!(
!src.layout.is_unsized() && !dest.layout.is_unsized(),
"Cannot transmute unsized data"
);
// We still require the sizes to match. // We still require the sizes to match.
if src.layout.size != dest.layout.size { if src.layout.size != dest.layout.size {
span_bug!( span_bug!(
@ -943,12 +939,6 @@ where
dest dest
); );
} }
// Unsized copies rely on interpreting `src.meta` with `dest.layout`, we want
// to avoid that here.
assert!(
!src.layout.is_unsized() && !dest.layout.is_unsized(),
"Cannot transmute unsized data"
);
// The hard case is `ScalarPair`. `src` is already read from memory in this case, // The hard case is `ScalarPair`. `src` is already read from memory in this case,
// using `src.layout` to figure out which bytes to use for the 1st and 2nd field. // using `src.layout` to figure out which bytes to use for the 1st and 2nd field.
@ -976,20 +966,15 @@ where
/// If the place currently refers to a local that doesn't yet have a matching allocation, /// If the place currently refers to a local that doesn't yet have a matching allocation,
/// create such an allocation. /// create such an allocation.
/// This is essentially `force_to_memplace`. /// This is essentially `force_to_memplace`.
///
/// This supports unsized types and returns the computed size to avoid some
/// redundant computation when copying; use `force_allocation` for a simpler, sized-only
/// version.
#[instrument(skip(self), level = "debug")] #[instrument(skip(self), level = "debug")]
pub fn force_allocation_maybe_sized( pub fn force_allocation(
&mut self, &mut self,
place: &PlaceTy<'tcx, M::PointerTag>, place: &PlaceTy<'tcx, M::PointerTag>,
meta: MemPlaceMeta<M::PointerTag>, ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
) -> InterpResult<'tcx, (MPlaceTy<'tcx, M::PointerTag>, Option<Size>)> { let mplace = match place.place {
let (mplace, size) = match place.place {
Place::Local { frame, local } => { Place::Local { frame, local } => {
match M::access_local_mut(self, frame, local)? { match M::access_local_mut(self, frame, local)? {
Ok(&mut local_val) => { &mut Operand::Immediate(local_val) => {
// We need to make an allocation. // We need to make an allocation.
// We need the layout of the local. We can NOT use the layout we got, // We need the layout of the local. We can NOT use the layout we got,
@ -997,44 +982,29 @@ where
// that has different alignment than the outer field. // that has different alignment than the outer field.
let local_layout = let local_layout =
self.layout_of_local(&self.stack()[frame], local, None)?; self.layout_of_local(&self.stack()[frame], local, None)?;
// We also need to support unsized types, and hence cannot use `allocate`. if local_layout.is_unsized() {
let (size, align) = self throw_unsup_format!("unsized locals are not supported");
.size_and_align_of(&meta, &local_layout)? }
.expect("Cannot allocate for non-dyn-sized type"); let mplace = self.allocate(local_layout, MemoryKind::Stack)?;
let ptr = self.allocate_ptr(size, align, MemoryKind::Stack)?; if !matches!(local_val, Immediate::Uninit) {
let mplace = MemPlace { ptr: ptr.into(), meta }; // Preserve old value. (As an optimization, we can skip this if it was uninit.)
if let LocalValue::Live(Operand::Immediate(value)) = local_val {
// Preserve old value.
// We don't have to validate as we can assume the local // We don't have to validate as we can assume the local
// was already valid for its type. // was already valid for its type.
let mplace = MPlaceTy { self.write_immediate_to_mplace_no_validate(local_val, &mplace)?;
mplace,
layout: local_layout,
align: local_layout.align.abi,
};
self.write_immediate_to_mplace_no_validate(value, &mplace)?;
} }
// Now we can call `access_mut` again, asserting it goes well, // Now we can call `access_mut` again, asserting it goes well,
// and actually overwrite things. // and actually overwrite things.
*M::access_local_mut(self, frame, local).unwrap().unwrap() = *M::access_local_mut(self, frame, local).unwrap() =
LocalValue::Live(Operand::Indirect(mplace)); Operand::Indirect(*mplace);
(mplace, Some(size)) *mplace
} }
Err(mplace) => (mplace, None), // this already was an indirect local &mut Operand::Indirect(mplace) => mplace, // this already was an indirect local
} }
} }
Place::Ptr(mplace) => (mplace, None), Place::Ptr(mplace) => mplace,
}; };
// Return with the original layout, so that the caller can go on // Return with the original layout, so that the caller can go on
Ok((MPlaceTy { mplace, layout: place.layout, align: place.align }, size)) Ok(MPlaceTy { mplace, layout: place.layout, align: place.align })
}
#[inline(always)]
pub fn force_allocation(
&mut self,
place: &PlaceTy<'tcx, M::PointerTag>,
) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
Ok(self.force_allocation_maybe_sized(place, MemPlaceMeta::None)?.0)
} }
pub fn allocate( pub fn allocate(
@ -1042,6 +1012,7 @@ where
layout: TyAndLayout<'tcx>, layout: TyAndLayout<'tcx>,
kind: MemoryKind<M::MemoryKind>, kind: MemoryKind<M::MemoryKind>,
) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> { ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
assert!(!layout.is_unsized());
let ptr = self.allocate_ptr(layout.size, layout.align.abi, kind)?; let ptr = self.allocate_ptr(layout.size, layout.align.abi, kind)?;
Ok(MPlaceTy::from_aligned_ptr(ptr.into(), layout)) Ok(MPlaceTy::from_aligned_ptr(ptr.into(), layout))
} }

View file

@ -29,9 +29,8 @@ use rustc_trait_selection::traits;
use crate::MirPass; use crate::MirPass;
use rustc_const_eval::interpret::{ use rustc_const_eval::interpret::{
self, compile_time_machine, AllocId, ConstAllocation, ConstValue, CtfeValidationMode, Frame, self, compile_time_machine, AllocId, ConstAllocation, ConstValue, CtfeValidationMode, Frame,
ImmTy, Immediate, InterpCx, InterpResult, LocalState, LocalValue, MemPlace, MemoryKind, OpTy, ImmTy, Immediate, InterpCx, InterpResult, LocalState, LocalValue, MemoryKind, OpTy, PlaceTy,
Operand as InterpOperand, PlaceTy, Pointer, Scalar, ScalarMaybeUninit, StackPopCleanup, Pointer, Scalar, ScalarMaybeUninit, StackPopCleanup, StackPopUnwind,
StackPopUnwind,
}; };
/// The maximum number of bytes that we'll allocate space for a local or the return value. /// The maximum number of bytes that we'll allocate space for a local or the return value.
@ -237,15 +236,19 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
throw_machine_stop_str!("pointer arithmetic or comparisons aren't supported in ConstProp") throw_machine_stop_str!("pointer arithmetic or comparisons aren't supported in ConstProp")
} }
fn access_local( fn access_local<'a>(
_ecx: &InterpCx<'mir, 'tcx, Self>, frame: &'a Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>,
frame: &Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>,
local: Local, local: Local,
) -> InterpResult<'tcx, InterpOperand<Self::PointerTag>> { ) -> InterpResult<'tcx, &'a interpret::Operand<Self::PointerTag>> {
let l = &frame.locals[local]; let l = &frame.locals[local];
if l.value == LocalValue::Unallocated { if matches!(
throw_machine_stop_str!("tried to access an unallocated local") l.value,
LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit))
) {
// For us "uninit" means "we don't know its value, might be initiailized or not".
// So stop here.
throw_machine_stop_str!("tried to access alocal with unknown value ")
} }
l.access() l.access()
@ -255,8 +258,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
ecx: &'a mut InterpCx<'mir, 'tcx, Self>, ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
frame: usize, frame: usize,
local: Local, local: Local,
) -> InterpResult<'tcx, Result<&'a mut LocalValue<Self::PointerTag>, MemPlace<Self::PointerTag>>> ) -> InterpResult<'tcx, &'a mut interpret::Operand<Self::PointerTag>> {
{
if ecx.machine.can_const_prop[local] == ConstPropMode::NoPropagation { if ecx.machine.can_const_prop[local] == ConstPropMode::NoPropagation {
throw_machine_stop_str!("tried to write to a local that is marked as not propagatable") throw_machine_stop_str!("tried to write to a local that is marked as not propagatable")
} }
@ -436,8 +438,10 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
/// Remove `local` from the pool of `Locals`. Allows writing to them, /// Remove `local` from the pool of `Locals`. Allows writing to them,
/// but not reading from them anymore. /// but not reading from them anymore.
fn remove_const(ecx: &mut InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>, local: Local) { fn remove_const(ecx: &mut InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>, local: Local) {
ecx.frame_mut().locals[local] = ecx.frame_mut().locals[local] = LocalState {
LocalState { value: LocalValue::Unallocated, layout: Cell::new(None) }; value: LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit)),
layout: Cell::new(None),
};
} }
fn use_ecx<F, T>(&mut self, f: F) -> Option<T> fn use_ecx<F, T>(&mut self, f: F) -> Option<T>
@ -1042,7 +1046,9 @@ impl<'tcx> MutVisitor<'tcx> for ConstPropagator<'_, 'tcx> {
let frame = self.ecx.frame_mut(); let frame = self.ecx.frame_mut();
frame.locals[local].value = frame.locals[local].value =
if let StatementKind::StorageLive(_) = statement.kind { if let StatementKind::StorageLive(_) = statement.kind {
LocalValue::Unallocated LocalValue::Live(interpret::Operand::Immediate(
interpret::Immediate::Uninit,
))
} else { } else {
LocalValue::Dead LocalValue::Dead
}; };

View file

@ -31,8 +31,8 @@ use crate::MirLint;
use rustc_const_eval::const_eval::ConstEvalErr; use rustc_const_eval::const_eval::ConstEvalErr;
use rustc_const_eval::interpret::{ use rustc_const_eval::interpret::{
self, compile_time_machine, AllocId, ConstAllocation, Frame, ImmTy, InterpCx, InterpResult, self, compile_time_machine, AllocId, ConstAllocation, Frame, ImmTy, InterpCx, InterpResult,
LocalState, LocalValue, MemPlace, MemoryKind, OpTy, Operand as InterpOperand, PlaceTy, Pointer, LocalState, LocalValue, MemoryKind, OpTy, PlaceTy, Pointer, Scalar, ScalarMaybeUninit,
Scalar, ScalarMaybeUninit, StackPopCleanup, StackPopUnwind, StackPopCleanup, StackPopUnwind,
}; };
/// The maximum number of bytes that we'll allocate space for a local or the return value. /// The maximum number of bytes that we'll allocate space for a local or the return value.
@ -229,15 +229,19 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
throw_machine_stop_str!("pointer arithmetic or comparisons aren't supported in ConstProp") throw_machine_stop_str!("pointer arithmetic or comparisons aren't supported in ConstProp")
} }
fn access_local( fn access_local<'a>(
_ecx: &InterpCx<'mir, 'tcx, Self>, frame: &'a Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>,
frame: &Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>,
local: Local, local: Local,
) -> InterpResult<'tcx, InterpOperand<Self::PointerTag>> { ) -> InterpResult<'tcx, &'a interpret::Operand<Self::PointerTag>> {
let l = &frame.locals[local]; let l = &frame.locals[local];
if l.value == LocalValue::Unallocated { if matches!(
throw_machine_stop_str!("tried to access an uninitialized local") l.value,
LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit))
) {
// For us "uninit" means "we don't know its value, might be initiailized or not".
// So stop here.
throw_machine_stop_str!("tried to access a local with unknown value")
} }
l.access() l.access()
@ -247,8 +251,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
ecx: &'a mut InterpCx<'mir, 'tcx, Self>, ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
frame: usize, frame: usize,
local: Local, local: Local,
) -> InterpResult<'tcx, Result<&'a mut LocalValue<Self::PointerTag>, MemPlace<Self::PointerTag>>> ) -> InterpResult<'tcx, &'a mut interpret::Operand<Self::PointerTag>> {
{
if ecx.machine.can_const_prop[local] == ConstPropMode::NoPropagation { if ecx.machine.can_const_prop[local] == ConstPropMode::NoPropagation {
throw_machine_stop_str!("tried to write to a local that is marked as not propagatable") throw_machine_stop_str!("tried to write to a local that is marked as not propagatable")
} }
@ -430,8 +433,10 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
/// Remove `local` from the pool of `Locals`. Allows writing to them, /// Remove `local` from the pool of `Locals`. Allows writing to them,
/// but not reading from them anymore. /// but not reading from them anymore.
fn remove_const(ecx: &mut InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>, local: Local) { fn remove_const(ecx: &mut InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>, local: Local) {
ecx.frame_mut().locals[local] = ecx.frame_mut().locals[local] = LocalState {
LocalState { value: LocalValue::Unallocated, layout: Cell::new(None) }; value: LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit)),
layout: Cell::new(None),
};
} }
fn lint_root(&self, source_info: SourceInfo) -> Option<HirId> { fn lint_root(&self, source_info: SourceInfo) -> Option<HirId> {
@ -915,7 +920,9 @@ impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> {
let frame = self.ecx.frame_mut(); let frame = self.ecx.frame_mut();
frame.locals[local].value = frame.locals[local].value =
if let StatementKind::StorageLive(_) = statement.kind { if let StatementKind::StorageLive(_) = statement.kind {
LocalValue::Unallocated LocalValue::Live(interpret::Operand::Immediate(
interpret::Immediate::Uninit,
))
} else { } else {
LocalValue::Dead LocalValue::Dead
}; };