1
Fork 0

Auto merge of #113569 - RalfJung:miri, r=oli-obk

miri: protect Move() function arguments during the call

This gives `Move` operands a meaning specific to function calls:
- for the duration of the call, the place the operand comes from is protected, making all read and write accesses insta-UB.
- the contents of that place are reset to `Uninit`, so looking at them again after the function returns, we cannot observe their contents

Turns out we can replace the existing "retag return place" hack with the exact same sort of protection on the return place, which is nicely symmetric.

Fixes https://github.com/rust-lang/rust/issues/112564
Fixes https://github.com/rust-lang/miri/issues/2927

This starts with a Miri rustc-push, since we'd otherwise conflict with a PR that recently landed in Miri.
(The "miri tree borrows" commit is an unrelated cleanup I noticed while doing the PR. I can remove it if you prefer.)
r? `@oli-obk`
This commit is contained in:
bors 2023-07-12 10:19:42 +00:00
commit 136dab6614
37 changed files with 747 additions and 161 deletions

View file

@ -22,7 +22,7 @@ use rustc_target::spec::abi::Abi as CallAbi;
use crate::errors::{LongRunning, LongRunningWarn};
use crate::interpret::{
self, compile_time_machine, AllocId, ConstAllocation, FnVal, Frame, ImmTy, InterpCx,
self, compile_time_machine, AllocId, ConstAllocation, FnArg, FnVal, Frame, ImmTy, InterpCx,
InterpResult, OpTy, PlaceTy, Pointer, Scalar,
};
use crate::{errors, fluent_generated as fluent};
@ -201,7 +201,7 @@ impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> {
fn hook_special_const_fn(
&mut self,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx>],
args: &[FnArg<'tcx>],
dest: &PlaceTy<'tcx>,
ret: Option<mir::BasicBlock>,
) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
@ -210,6 +210,7 @@ impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> {
if Some(def_id) == self.tcx.lang_items().panic_display()
|| Some(def_id) == self.tcx.lang_items().begin_panic_fn()
{
let args = self.copy_fn_args(args)?;
// &str or &&str
assert!(args.len() == 1);
@ -236,8 +237,9 @@ impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> {
return Ok(Some(new_instance));
} else if Some(def_id) == self.tcx.lang_items().align_offset_fn() {
let args = self.copy_fn_args(args)?;
// For align_offset, we replace the function call if the pointer has no address.
match self.align_offset(instance, args, dest, ret)? {
match self.align_offset(instance, &args, dest, ret)? {
ControlFlow::Continue(()) => return Ok(Some(instance)),
ControlFlow::Break(()) => return Ok(None),
}
@ -293,7 +295,7 @@ impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> {
self.eval_fn_call(
FnVal::Instance(instance),
(CallAbi::Rust, fn_abi),
&[addr, align],
&[FnArg::Copy(addr), FnArg::Copy(align)],
/* with_caller_location = */ false,
dest,
ret,
@ -427,7 +429,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
ecx: &mut InterpCx<'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
_abi: CallAbi,
args: &[OpTy<'tcx>],
args: &[FnArg<'tcx>],
dest: &PlaceTy<'tcx>,
ret: Option<mir::BasicBlock>,
_unwind: mir::UnwindAction, // unwinding is not supported in consts

View file

@ -682,11 +682,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
return_to_block: StackPopCleanup,
) -> InterpResult<'tcx> {
trace!("body: {:#?}", body);
// Clobber previous return place contents, nobody is supposed to be able to see them any more
// This also checks dereferenceable, but not align. We rely on all constructed places being
// sufficiently aligned (in particular we rely on `deref_operand` checking alignment).
self.write_uninit(return_place)?;
// first push a stack frame so we have access to the local substs
// First push a stack frame so we have access to the local substs
let pre_frame = Frame {
body,
loc: Right(body.span), // Span used for errors caused during preamble.
@ -805,6 +801,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
throw_ub_custom!(fluent::const_eval_unwind_past_top);
}
M::before_stack_pop(self, self.frame())?;
// Copy return value. Must of course happen *before* we deallocate the locals.
let copy_ret_result = if !unwinding {
let op = self

View file

@ -30,7 +30,7 @@ use super::{
use crate::const_eval;
use crate::errors::{DanglingPtrInFinal, UnsupportedUntypedPointer};
pub trait CompileTimeMachine<'mir, 'tcx, T> = Machine<
pub trait CompileTimeMachine<'mir, 'tcx: 'mir, T> = Machine<
'mir,
'tcx,
MemoryKind = T,

View file

@ -17,7 +17,7 @@ use rustc_target::spec::abi::Abi as CallAbi;
use crate::const_eval::CheckAlignment;
use super::{
AllocBytes, AllocId, AllocRange, Allocation, ConstAllocation, Frame, ImmTy, InterpCx,
AllocBytes, AllocId, AllocRange, Allocation, ConstAllocation, FnArg, Frame, ImmTy, InterpCx,
InterpResult, MemoryKind, OpTy, Operand, PlaceTy, Pointer, Provenance, Scalar,
};
@ -84,7 +84,7 @@ pub trait AllocMap<K: Hash + Eq, V> {
/// Methods of this trait signifies a point where CTFE evaluation would fail
/// and some use case dependent behaviour can instead be applied.
pub trait Machine<'mir, 'tcx>: Sized {
pub trait Machine<'mir, 'tcx: 'mir>: Sized {
/// Additional memory kinds a machine wishes to distinguish from the builtin ones
type MemoryKind: Debug + std::fmt::Display + MayLeak + Eq + 'static;
@ -182,7 +182,7 @@ pub trait Machine<'mir, 'tcx>: Sized {
ecx: &mut InterpCx<'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
abi: CallAbi,
args: &[OpTy<'tcx, Self::Provenance>],
args: &[FnArg<'tcx, Self::Provenance>],
destination: &PlaceTy<'tcx, Self::Provenance>,
target: Option<mir::BasicBlock>,
unwind: mir::UnwindAction,
@ -194,7 +194,7 @@ pub trait Machine<'mir, 'tcx>: Sized {
ecx: &mut InterpCx<'mir, 'tcx, Self>,
fn_val: Self::ExtraFnVal,
abi: CallAbi,
args: &[OpTy<'tcx, Self::Provenance>],
args: &[FnArg<'tcx, Self::Provenance>],
destination: &PlaceTy<'tcx, Self::Provenance>,
target: Option<mir::BasicBlock>,
unwind: mir::UnwindAction,
@ -418,6 +418,18 @@ pub trait Machine<'mir, 'tcx>: Sized {
Ok(())
}
/// Called on places used for in-place function argument and return value handling.
///
/// These places need to be protected to make sure the program cannot tell whether the
/// argument/return value was actually copied or passed in-place..
fn protect_in_place_function_argument(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
place: &PlaceTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx> {
// Without an aliasing model, all we can do is put `Uninit` into the place.
ecx.write_uninit(place)
}
/// Called immediately before a new stack frame gets pushed.
fn init_frame_extra(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
@ -439,6 +451,14 @@ pub trait Machine<'mir, 'tcx>: Sized {
Ok(())
}
/// Called just before the return value is copied to the caller-provided return place.
fn before_stack_pop(
_ecx: &InterpCx<'mir, 'tcx, Self>,
_frame: &Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>,
) -> InterpResult<'tcx> {
Ok(())
}
/// Called immediately after a stack frame got popped, but before jumping back to the caller.
/// The `locals` have already been destroyed!
fn after_stack_pop(
@ -484,7 +504,7 @@ pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) {
_ecx: &mut InterpCx<$mir, $tcx, Self>,
fn_val: !,
_abi: CallAbi,
_args: &[OpTy<$tcx>],
_args: &[FnArg<$tcx>],
_destination: &PlaceTy<$tcx, Self::Provenance>,
_target: Option<mir::BasicBlock>,
_unwind: mir::UnwindAction,

View file

@ -26,6 +26,7 @@ pub use self::machine::{compile_time_machine, AllocMap, Machine, MayLeak, StackP
pub use self::memory::{AllocKind, AllocRef, AllocRefMut, FnVal, Memory, MemoryKind};
pub use self::operand::{ImmTy, Immediate, OpTy, Operand};
pub use self::place::{MPlaceTy, MemPlace, MemPlaceMeta, Place, PlaceTy};
pub use self::terminator::FnArg;
pub use self::validity::{CtfeValidationMode, RefTracking};
pub use self::visitor::{MutValueVisitor, Value, ValueVisitor};

View file

@ -575,14 +575,6 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Ok(op)
}
/// Evaluate a bunch of operands at once
pub(super) fn eval_operands(
&self,
ops: &[mir::Operand<'tcx>],
) -> InterpResult<'tcx, Vec<OpTy<'tcx, M::Provenance>>> {
ops.iter().map(|op| self.eval_operand(op, None)).collect()
}
fn eval_ty_constant(
&self,
val: ty::Const<'tcx>,

View file

@ -328,7 +328,8 @@ where
};
let mplace = MemPlace { ptr: ptr.to_pointer(self)?, meta };
// When deref'ing a pointer, the *static* alignment given by the type is what matters.
// `ref_to_mplace` is called on raw pointers even if they don't actually get dereferenced;
// we hence can't call `size_and_align_of` since that asserts more validity than we want.
let align = layout.align.abi;
Ok(MPlaceTy { mplace, layout, align })
}
@ -354,34 +355,37 @@ where
#[inline]
pub(super) fn get_place_alloc(
&self,
place: &MPlaceTy<'tcx, M::Provenance>,
mplace: &MPlaceTy<'tcx, M::Provenance>,
) -> InterpResult<'tcx, Option<AllocRef<'_, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
{
assert!(place.layout.is_sized());
assert!(!place.meta.has_meta());
let size = place.layout.size;
self.get_ptr_alloc(place.ptr, size, place.align)
let (size, _align) = self
.size_and_align_of_mplace(&mplace)?
.unwrap_or((mplace.layout.size, mplace.layout.align.abi));
// Due to packed places, only `mplace.align` matters.
self.get_ptr_alloc(mplace.ptr, size, mplace.align)
}
#[inline]
pub(super) fn get_place_alloc_mut(
&mut self,
place: &MPlaceTy<'tcx, M::Provenance>,
mplace: &MPlaceTy<'tcx, M::Provenance>,
) -> InterpResult<'tcx, Option<AllocRefMut<'_, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
{
assert!(place.layout.is_sized());
assert!(!place.meta.has_meta());
let size = place.layout.size;
self.get_ptr_alloc_mut(place.ptr, size, place.align)
let (size, _align) = self
.size_and_align_of_mplace(&mplace)?
.unwrap_or((mplace.layout.size, mplace.layout.align.abi));
// Due to packed places, only `mplace.align` matters.
self.get_ptr_alloc_mut(mplace.ptr, size, mplace.align)
}
/// Check if this mplace is dereferenceable and sufficiently aligned.
pub fn check_mplace(&self, mplace: MPlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
let (size, align) = self
let (size, _align) = self
.size_and_align_of_mplace(&mplace)?
.unwrap_or((mplace.layout.size, mplace.layout.align.abi));
assert!(mplace.align <= align, "dynamic alignment less strict than static one?");
let align = if M::enforce_alignment(self).should_check() { align } else { Align::ONE };
// Due to packed places, only `mplace.align` matters.
let align =
if M::enforce_alignment(self).should_check() { mplace.align } else { Align::ONE };
self.check_ptr_access_align(mplace.ptr, size, align, CheckInAllocMsg::DerefTest)?;
Ok(())
}

View file

@ -1,7 +1,8 @@
use std::borrow::Cow;
use either::Either;
use rustc_ast::ast::InlineAsmOptions;
use rustc_middle::ty::layout::{FnAbiOf, LayoutOf};
use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, TyAndLayout};
use rustc_middle::ty::Instance;
use rustc_middle::{
mir,
@ -12,12 +13,63 @@ use rustc_target::abi::call::{ArgAbi, ArgAttribute, ArgAttributes, FnAbi, PassMo
use rustc_target::spec::abi::Abi;
use super::{
FnVal, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy, Machine, MemoryKind, OpTy, Operand,
PlaceTy, Scalar, StackPopCleanup,
AllocId, FnVal, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy, Machine, MemoryKind, OpTy,
Operand, PlaceTy, Provenance, Scalar, StackPopCleanup,
};
use crate::fluent_generated as fluent;
/// An argment passed to a function.
#[derive(Clone, Debug)]
pub enum FnArg<'tcx, Prov: Provenance = AllocId> {
/// Pass a copy of the given operand.
Copy(OpTy<'tcx, Prov>),
/// Allow for the argument to be passed in-place: destroy the value originally stored at that place and
/// make the place inaccessible for the duration of the function call.
InPlace(PlaceTy<'tcx, Prov>),
}
impl<'tcx, Prov: Provenance> FnArg<'tcx, Prov> {
pub fn layout(&self) -> &TyAndLayout<'tcx> {
match self {
FnArg::Copy(op) => &op.layout,
FnArg::InPlace(place) => &place.layout,
}
}
}
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// Make a copy of the given fn_arg. Any `InPlace` are degenerated to copies, no protection of the
/// original memory occurs.
pub fn copy_fn_arg(
&self,
arg: &FnArg<'tcx, M::Provenance>,
) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
match arg {
FnArg::Copy(op) => Ok(op.clone()),
FnArg::InPlace(place) => self.place_to_op(&place),
}
}
/// Make a copy of the given fn_args. Any `InPlace` are degenerated to copies, no protection of the
/// original memory occurs.
pub fn copy_fn_args(
&self,
args: &[FnArg<'tcx, M::Provenance>],
) -> InterpResult<'tcx, Vec<OpTy<'tcx, M::Provenance>>> {
args.iter().map(|fn_arg| self.copy_fn_arg(fn_arg)).collect()
}
pub fn fn_arg_field(
&mut self,
arg: &FnArg<'tcx, M::Provenance>,
field: usize,
) -> InterpResult<'tcx, FnArg<'tcx, M::Provenance>> {
Ok(match arg {
FnArg::Copy(op) => FnArg::Copy(self.operand_field(op, field)?),
FnArg::InPlace(place) => FnArg::InPlace(self.place_field(place, field)?),
})
}
pub(super) fn eval_terminator(
&mut self,
terminator: &mir::Terminator<'tcx>,
@ -68,14 +120,14 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let old_stack = self.frame_idx();
let old_loc = self.frame().loc;
let func = self.eval_operand(func, None)?;
let args = self.eval_operands(args)?;
let args = self.eval_fn_call_arguments(args)?;
let fn_sig_binder = func.layout.ty.fn_sig(*self.tcx);
let fn_sig =
self.tcx.normalize_erasing_late_bound_regions(self.param_env, fn_sig_binder);
let extra_args = &args[fn_sig.inputs().len()..];
let extra_args =
self.tcx.mk_type_list_from_iter(extra_args.iter().map(|arg| arg.layout.ty));
self.tcx.mk_type_list_from_iter(extra_args.iter().map(|arg| arg.layout().ty));
let (fn_val, fn_abi, with_caller_location) = match *func.layout.ty.kind() {
ty::FnPtr(_sig) => {
@ -185,6 +237,21 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Ok(())
}
/// Evaluate the arguments of a function call
pub(super) fn eval_fn_call_arguments(
&mut self,
ops: &[mir::Operand<'tcx>],
) -> InterpResult<'tcx, Vec<FnArg<'tcx, M::Provenance>>> {
ops.iter()
.map(|op| {
Ok(match op {
mir::Operand::Move(place) => FnArg::InPlace(self.eval_place(*place)?),
_ => FnArg::Copy(self.eval_operand(op, None)?),
})
})
.collect()
}
fn check_argument_compat(
caller_abi: &ArgAbi<'tcx, Ty<'tcx>>,
callee_abi: &ArgAbi<'tcx, Ty<'tcx>>,
@ -275,7 +342,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
fn pass_argument<'x, 'y>(
&mut self,
caller_args: &mut impl Iterator<
Item = (&'x OpTy<'tcx, M::Provenance>, &'y ArgAbi<'tcx, Ty<'tcx>>),
Item = (&'x FnArg<'tcx, M::Provenance>, &'y ArgAbi<'tcx, Ty<'tcx>>),
>,
callee_abi: &ArgAbi<'tcx, Ty<'tcx>>,
callee_arg: &PlaceTy<'tcx, M::Provenance>,
@ -295,21 +362,25 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// Now, check
if !Self::check_argument_compat(caller_abi, callee_abi) {
let callee_ty = format!("{}", callee_arg.layout.ty);
let caller_ty = format!("{}", caller_arg.layout.ty);
let caller_ty = format!("{}", caller_arg.layout().ty);
throw_ub_custom!(
fluent::const_eval_incompatible_types,
callee_ty = callee_ty,
caller_ty = caller_ty,
)
}
// We work with a copy of the argument for now; if this is in-place argument passing, we
// will later protect the source it comes from. This means the callee cannot observe if we
// did in-place of by-copy argument passing, except for pointer equality tests.
let caller_arg_copy = self.copy_fn_arg(&caller_arg)?;
// Special handling for unsized parameters.
if caller_arg.layout.is_unsized() {
if caller_arg_copy.layout.is_unsized() {
// `check_argument_compat` ensures that both have the same type, so we know they will use the metadata the same way.
assert_eq!(caller_arg.layout.ty, callee_arg.layout.ty);
assert_eq!(caller_arg_copy.layout.ty, callee_arg.layout.ty);
// We have to properly pre-allocate the memory for the callee.
// So let's tear down some wrappers.
// So let's tear down some abstractions.
// This all has to be in memory, there are no immediate unsized values.
let src = caller_arg.assert_mem_place();
let src = caller_arg_copy.assert_mem_place();
// The destination cannot be one of these "spread args".
let (dest_frame, dest_local) = callee_arg.assert_local();
// We are just initializing things, so there can't be anything here yet.
@ -331,7 +402,12 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// FIXME: Depending on the PassMode, this should reset some padding to uninitialized. (This
// is true for all `copy_op`, but there are a lot of special cases for argument passing
// specifically.)
self.copy_op(&caller_arg, callee_arg, /*allow_transmute*/ true)
self.copy_op(&caller_arg_copy, callee_arg, /*allow_transmute*/ true)?;
// If this was an in-place pass, protect the place it comes from for the duration of the call.
if let FnArg::InPlace(place) = caller_arg {
M::protect_in_place_function_argument(self, place)?;
}
Ok(())
}
/// Call this function -- pushing the stack frame and initializing the arguments.
@ -346,7 +422,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
&mut self,
fn_val: FnVal<'tcx, M::ExtraFnVal>,
(caller_abi, caller_fn_abi): (Abi, &FnAbi<'tcx, Ty<'tcx>>),
args: &[OpTy<'tcx, M::Provenance>],
args: &[FnArg<'tcx, M::Provenance>],
with_caller_location: bool,
destination: &PlaceTy<'tcx, M::Provenance>,
target: Option<mir::BasicBlock>,
@ -372,8 +448,15 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
match instance.def {
ty::InstanceDef::Intrinsic(def_id) => {
assert!(self.tcx.is_intrinsic(def_id));
// caller_fn_abi is not relevant here, we interpret the arguments directly for each intrinsic.
M::call_intrinsic(self, instance, args, destination, target, unwind)
// FIXME: Should `InPlace` arguments be reset to uninit?
M::call_intrinsic(
self,
instance,
&self.copy_fn_args(args)?,
destination,
target,
unwind,
)
}
ty::InstanceDef::VTableShim(..)
| ty::InstanceDef::ReifyShim(..)
@ -428,7 +511,13 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
"caller ABI: {:?}, args: {:#?}",
caller_abi,
args.iter()
.map(|arg| (arg.layout.ty, format!("{:?}", **arg)))
.map(|arg| (
arg.layout().ty,
match arg {
FnArg::Copy(op) => format!("copy({:?})", *op),
FnArg::InPlace(place) => format!("in-place({:?})", *place),
}
))
.collect::<Vec<_>>()
);
trace!(
@ -449,7 +538,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// last incoming argument. These two iterators do not have the same type,
// so to keep the code paths uniform we accept an allocation
// (for RustCall ABI only).
let caller_args: Cow<'_, [OpTy<'tcx, M::Provenance>]> =
let caller_args: Cow<'_, [FnArg<'tcx, M::Provenance>]> =
if caller_abi == Abi::RustCall && !args.is_empty() {
// Untuple
let (untuple_arg, args) = args.split_last().unwrap();
@ -458,11 +547,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
args.iter()
.map(|a| Ok(a.clone()))
.chain(
(0..untuple_arg.layout.fields.count())
.map(|i| self.operand_field(untuple_arg, i)),
(0..untuple_arg.layout().fields.count())
.map(|i| self.fn_arg_field(untuple_arg, i)),
)
.collect::<InterpResult<'_, Vec<OpTy<'tcx, M::Provenance>>>>(
)?,
.collect::<InterpResult<'_, Vec<_>>>()?,
)
} else {
// Plain arg passing
@ -523,6 +611,14 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
caller_ty = caller_ty,
)
}
// Ensure the return place is aligned and dereferenceable, and protect it for
// in-place return value passing.
if let Either::Left(mplace) = destination.as_mplace_or_local() {
self.check_mplace(mplace)?;
} else {
// Nothing to do for locals, they are always properly allocated and aligned.
}
M::protect_in_place_function_argument(self, destination)?;
};
match res {
Err(err) => {
@ -538,7 +634,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// We have to implement all "object safe receivers". So we have to go search for a
// pointer or `dyn Trait` type, but it could be wrapped in newtypes. So recursively
// unwrap those newtypes until we are there.
let mut receiver = args[0].clone();
// An `InPlace` does nothing here, we keep the original receiver intact. We can't
// really pass the argument in-place anyway, and we are constructing a new
// `Immediate` receiver.
let mut receiver = self.copy_fn_arg(&args[0])?;
let receiver_place = loop {
match receiver.layout.ty.kind() {
ty::Ref(..) | ty::RawPtr(..) => {
@ -648,11 +747,13 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
}
// Adjust receiver argument. Layout can be any (thin) ptr.
args[0] = ImmTy::from_immediate(
Scalar::from_maybe_pointer(adjusted_receiver, self).into(),
self.layout_of(Ty::new_mut_ptr(self.tcx.tcx, dyn_ty))?,
)
.into();
args[0] = FnArg::Copy(
ImmTy::from_immediate(
Scalar::from_maybe_pointer(adjusted_receiver, self).into(),
self.layout_of(Ty::new_mut_ptr(self.tcx.tcx, dyn_ty))?,
)
.into(),
);
trace!("Patched receiver operand to {:#?}", args[0]);
// recurse with concrete function
self.eval_fn_call(
@ -710,7 +811,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.eval_fn_call(
FnVal::Instance(instance),
(Abi::Rust, fn_abi),
&[arg.into()],
&[FnArg::Copy(arg.into())],
false,
&ret.into(),
Some(target),

View file

@ -13,7 +13,7 @@ use super::{InterpCx, MPlaceTy, Machine, OpTy, PlaceTy};
/// A thing that we can project into, and that has a layout.
/// This wouldn't have to depend on `Machine` but with the current type inference,
/// that's just more convenient to work with (avoids repeating all the `Machine` bounds).
pub trait Value<'mir, 'tcx, M: Machine<'mir, 'tcx>>: Sized {
pub trait Value<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>>: Sized {
/// Gets this value's layout.
fn layout(&self) -> TyAndLayout<'tcx>;
@ -54,7 +54,7 @@ pub trait Value<'mir, 'tcx, M: Machine<'mir, 'tcx>>: Sized {
/// A thing that we can project into given *mutable* access to `ecx`, and that has a layout.
/// This wouldn't have to depend on `Machine` but with the current type inference,
/// that's just more convenient to work with (avoids repeating all the `Machine` bounds).
pub trait ValueMut<'mir, 'tcx, M: Machine<'mir, 'tcx>>: Sized {
pub trait ValueMut<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>>: Sized {
/// Gets this value's layout.
fn layout(&self) -> TyAndLayout<'tcx>;

View file

@ -1050,10 +1050,6 @@ pub type PlaceElem<'tcx> = ProjectionElem<Local, Ty<'tcx>>;
/// there may be other effects: if the type has a validity constraint loading the place might be UB
/// if the validity constraint is not met.
///
/// **Needs clarification:** Ralf proposes that loading a place not have side-effects.
/// This is what is implemented in miri today. Are these the semantics we want for MIR? Is this
/// something we can even decide without knowing more about Rust's memory model?
///
/// **Needs clarification:** Is loading a place that has its variant index set well-formed? Miri
/// currently implements it, but it seems like this may be something to check against in the
/// validator.
@ -1071,6 +1067,16 @@ pub enum Operand<'tcx> {
/// in [UCG#188]. You should not emit MIR that may attempt a subsequent second load of this
/// place without first re-initializing it.
///
/// **Needs clarification:** The operational impact of `Move` is unclear. Currently (both in
/// Miri and codegen) it has no effect at all unless it appears in an argument to `Call`; for
/// `Call` it allows the argument to be passed to the callee "in-place", i.e. the callee might
/// just get a reference to this place instead of a full copy. Miri implements this with a
/// combination of aliasing model "protectors" and putting `uninit` into the place. Ralf
/// proposes that we don't want these semantics for `Move` in regular assignments, because
/// loading a place should not have side-effects, and the aliasing model "protectors" are
/// inherently tied to a function call. Are these the semantics we want for MIR? Is this
/// something we can even decide without knowing more about Rust's memory model?
///
/// [UCG#188]: https://github.com/rust-lang/unsafe-code-guidelines/issues/188
Move(Place<'tcx>),

View file

@ -22,8 +22,8 @@ use rustc_target::spec::abi::Abi as CallAbi;
use crate::MirPass;
use rustc_const_eval::interpret::{
self, compile_time_machine, AllocId, ConstAllocation, ConstValue, Frame, ImmTy, Immediate,
InterpCx, InterpResult, LocalValue, MemoryKind, OpTy, PlaceTy, Pointer, Scalar,
self, compile_time_machine, AllocId, ConstAllocation, ConstValue, FnArg, Frame, ImmTy,
Immediate, InterpCx, InterpResult, LocalValue, MemoryKind, OpTy, PlaceTy, Pointer, Scalar,
StackPopCleanup,
};
@ -185,7 +185,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_instance: ty::Instance<'tcx>,
_abi: CallAbi,
_args: &[OpTy<'tcx>],
_args: &[FnArg<'tcx>],
_destination: &PlaceTy<'tcx>,
_target: Option<BasicBlock>,
_unwind: UnwindAction,

View file

@ -532,7 +532,7 @@ impl<'tcx, 'map, 'a> Visitor<'tcx> for OperandCollector<'tcx, 'map, 'a> {
struct DummyMachine;
impl<'mir, 'tcx> rustc_const_eval::interpret::Machine<'mir, 'tcx> for DummyMachine {
impl<'mir, 'tcx: 'mir> rustc_const_eval::interpret::Machine<'mir, 'tcx> for DummyMachine {
rustc_const_eval::interpret::compile_time_machine!(<'mir, 'tcx>);
type MemoryKind = !;
const PANIC_ON_ALLOC_FAIL: bool = true;
@ -557,7 +557,7 @@ impl<'mir, 'tcx> rustc_const_eval::interpret::Machine<'mir, 'tcx> for DummyMachi
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_instance: ty::Instance<'tcx>,
_abi: rustc_target::spec::abi::Abi,
_args: &[rustc_const_eval::interpret::OpTy<'tcx, Self::Provenance>],
_args: &[rustc_const_eval::interpret::FnArg<'tcx, Self::Provenance>],
_destination: &rustc_const_eval::interpret::PlaceTy<'tcx, Self::Provenance>,
_target: Option<BasicBlock>,
_unwind: UnwindAction,