Auto merge of #116564 - oli-obk:evaluated_static_in_metadata, r=RalfJung,cjgillot

Store static initializers in metadata instead of the MIR of statics.

This means that adding generic statics would be even more difficult, as we can't evaluate statics from other crates anymore, but the subtle issue I have encountered make me think that having this be an explicit problem is better.

The issue is that

```rust
static mut FOO: &mut u32 = &mut 42;
static mut BAR = unsafe { FOO };
```

gets different allocations, instead of referring to the same one. This is also true for non-static mut, but promotion makes `static FOO: &u32 = &42;` annoying to demo.

Fixes https://github.com/rust-lang/rust/issues/61345

## Why is this being done?

In order to ensure all crates see the same nested allocations (which is the last issue that needs fixing before we can stabilize [`const_mut_refs`](https://github.com/rust-lang/rust/issues/57349)), I am working on creating anonymous (from the Rust side, to LLVM it's like a regular static item) static items for the nested allocations in a static. If we evaluate the static item in a downstream crate again, we will end up duplicating its nested allocations (and in some cases, like the `match` case, even duplicate the main allocation).
This commit is contained in:
bors 2024-02-15 10:28:31 +00:00
commit 6a4222b511
44 changed files with 391 additions and 193 deletions

View file

@ -19,6 +19,7 @@ use crate::interpret::{ErrorHandled, InterpError, InterpErrorInfo, MachineStopTy
pub enum ConstEvalErrKind {
ConstAccessesMutGlobal,
ModifiedGlobal,
RecursiveStatic,
AssertFailure(AssertKind<ConstInt>),
Panic { msg: Symbol, line: u32, col: u32, file: Symbol },
}
@ -31,13 +32,14 @@ impl MachineStopType for ConstEvalErrKind {
ConstAccessesMutGlobal => const_eval_const_accesses_mut_global,
ModifiedGlobal => const_eval_modified_global,
Panic { .. } => const_eval_panic,
RecursiveStatic => const_eval_recursive_static,
AssertFailure(x) => x.diagnostic_message(),
}
}
fn add_args(self: Box<Self>, adder: &mut dyn FnMut(DiagnosticArgName, DiagnosticArgValue)) {
use ConstEvalErrKind::*;
match *self {
ConstAccessesMutGlobal | ModifiedGlobal => {}
RecursiveStatic | ConstAccessesMutGlobal | ModifiedGlobal => {}
AssertFailure(kind) => kind.add_args(adder),
Panic { msg, line, col, file } => {
adder("msg".into(), msg.into_diagnostic_arg());

View file

@ -2,12 +2,12 @@ use either::{Left, Right};
use rustc_hir::def::DefKind;
use rustc_middle::mir::interpret::{AllocId, ErrorHandled, InterpErrorInfo};
use rustc_middle::mir::pretty::write_allocation_bytes;
use rustc_middle::mir::{self, ConstAlloc, ConstValue};
use rustc_middle::traits::Reveal;
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::ty::print::with_no_trimmed_paths;
use rustc_middle::ty::{self, TyCtxt};
use rustc_span::def_id::LocalDefId;
use rustc_span::Span;
use rustc_target::abi::{self, Abi};
@ -17,8 +17,9 @@ use crate::errors;
use crate::errors::ConstEvalError;
use crate::interpret::eval_nullary_intrinsic;
use crate::interpret::{
intern_const_alloc_recursive, CtfeValidationMode, GlobalId, Immediate, InternKind, InterpCx,
InterpError, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking, StackPopCleanup,
create_static_alloc, intern_const_alloc_recursive, take_static_root_alloc, CtfeValidationMode,
GlobalId, Immediate, InternKind, InterpCx, InterpError, InterpResult, MPlaceTy, MemoryKind,
OpTy, RefTracking, StackPopCleanup,
};
// Returns a pointer to where the result lives
@ -46,7 +47,21 @@ fn eval_body_using_ecx<'mir, 'tcx>(
);
let layout = ecx.layout_of(body.bound_return_ty().instantiate(tcx, cid.instance.args))?;
assert!(layout.is_sized());
let ret = ecx.allocate(layout, MemoryKind::Stack)?;
let intern_kind = if cid.promoted.is_some() {
InternKind::Promoted
} else {
match tcx.static_mutability(cid.instance.def_id()) {
Some(m) => InternKind::Static(m),
None => InternKind::Constant,
}
};
let ret = if let InternKind::Static(_) = intern_kind {
create_static_alloc(ecx, cid.instance.def_id(), layout)?
} else {
ecx.allocate(layout, MemoryKind::Stack)?
};
trace!(
"eval_body_using_ecx: pushing stack frame for global: {}{}",
@ -66,14 +81,6 @@ fn eval_body_using_ecx<'mir, 'tcx>(
while ecx.step()? {}
// Intern the result
let intern_kind = if cid.promoted.is_some() {
InternKind::Promoted
} else {
match tcx.static_mutability(cid.instance.def_id()) {
Some(m) => InternKind::Static(m),
None => InternKind::Constant,
}
};
intern_const_alloc_recursive(ecx, intern_kind, &ret)?;
Ok(ret)
@ -249,11 +256,37 @@ pub fn eval_to_const_value_raw_provider<'tcx>(
tcx.eval_to_allocation_raw(key).map(|val| turn_into_const_value(tcx, val, key))
}
#[instrument(skip(tcx), level = "debug")]
pub fn eval_static_initializer_provider<'tcx>(
tcx: TyCtxt<'tcx>,
def_id: LocalDefId,
) -> ::rustc_middle::mir::interpret::EvalStaticInitializerRawResult<'tcx> {
assert!(tcx.is_static(def_id.to_def_id()));
let instance = ty::Instance::mono(tcx, def_id.to_def_id());
let cid = rustc_middle::mir::interpret::GlobalId { instance, promoted: None };
let mut ecx = InterpCx::new(
tcx,
tcx.def_span(def_id),
ty::ParamEnv::reveal_all(),
// Statics (and promoteds inside statics) may access other statics, because unlike consts
// they do not have to behave "as if" they were evaluated at runtime.
CompileTimeInterpreter::new(CanAccessMutGlobal::Yes, CheckAlignment::Error),
);
let alloc_id = eval_in_interpreter(&mut ecx, cid, true)?.alloc_id;
let alloc = take_static_root_alloc(&mut ecx, alloc_id);
let alloc = tcx.mk_const_alloc(alloc);
Ok(alloc)
}
#[instrument(skip(tcx), level = "debug")]
pub fn eval_to_allocation_raw_provider<'tcx>(
tcx: TyCtxt<'tcx>,
key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
) -> ::rustc_middle::mir::interpret::EvalToAllocationRawResult<'tcx> {
// This shouldn't be used for statics, since statics are conceptually places,
// not values -- so what we do here could break pointer identity.
assert!(key.value.promoted.is_some() || !tcx.is_static(key.value.instance.def_id()));
// Const eval always happens in Reveal::All mode in order to be able to use the hidden types of
// opaque types. This is needed for trivial things like `size_of`, but also for using associated
// types that are not specified in the opaque type.
@ -273,7 +306,7 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
let def = cid.instance.def.def_id();
let is_static = tcx.is_static(def);
let ecx = InterpCx::new(
let mut ecx = InterpCx::new(
tcx,
tcx.def_span(def),
key.param_env,
@ -283,11 +316,11 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
// so we have to reject reading mutable global memory.
CompileTimeInterpreter::new(CanAccessMutGlobal::from(is_static), CheckAlignment::Error),
);
eval_in_interpreter(ecx, cid, is_static)
eval_in_interpreter(&mut ecx, cid, is_static)
}
pub fn eval_in_interpreter<'mir, 'tcx>(
mut ecx: InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>,
ecx: &mut InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>,
cid: GlobalId<'tcx>,
is_static: bool,
) -> ::rustc_middle::mir::interpret::EvalToAllocationRawResult<'tcx> {
@ -295,7 +328,7 @@ pub fn eval_in_interpreter<'mir, 'tcx>(
debug_assert_eq!(is_static, ecx.tcx.static_mutability(cid.instance.def_id()).is_some());
let res = ecx.load_mir(cid.instance.def, cid.promoted);
match res.and_then(|body| eval_body_using_ecx(&mut ecx, cid, body)) {
match res.and_then(|body| eval_body_using_ecx(ecx, cid, body)) {
Err(error) => {
let (error, backtrace) = error.into_parts();
backtrace.print_backtrace();
@ -330,8 +363,11 @@ pub fn eval_in_interpreter<'mir, 'tcx>(
}
Ok(mplace) => {
// Since evaluation had no errors, validate the resulting constant.
// This is a separate `try` block to provide more targeted error reporting.
// Temporarily allow access to the static_root_alloc_id for the purpose of validation.
let static_root_alloc_id = ecx.machine.static_root_alloc_id.take();
let validation = const_validate_mplace(&ecx, &mplace, cid);
ecx.machine.static_root_alloc_id = static_root_alloc_id;
let alloc_id = mplace.ptr().provenance.unwrap().alloc_id();
@ -383,15 +419,9 @@ pub fn const_report_error<'mir, 'tcx>(
let ub_note = matches!(error, InterpError::UndefinedBehavior(_)).then(|| {});
let alloc = ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner();
let mut bytes = String::new();
if alloc.size() != abi::Size::ZERO {
bytes = "\n".into();
// FIXME(translation) there might be pieces that are translatable.
write_allocation_bytes(*ecx.tcx, alloc, &mut bytes, " ").unwrap();
}
let raw_bytes =
errors::RawBytesNote { size: alloc.size().bytes(), align: alloc.align.bytes(), bytes };
let bytes = ecx.print_alloc_bytes_for_diagnostics(alloc_id);
let (size, align, _) = ecx.get_alloc_info(alloc_id);
let raw_bytes = errors::RawBytesNote { size: size.bytes(), align: align.bytes(), bytes };
crate::const_eval::report(
*ecx.tcx,

View file

@ -58,6 +58,9 @@ pub struct CompileTimeInterpreter<'mir, 'tcx> {
/// Whether to check alignment during evaluation.
pub(super) check_alignment: CheckAlignment,
/// Used to prevent reads from a static's base allocation, as that may allow for self-initialization.
pub(crate) static_root_alloc_id: Option<AllocId>,
}
#[derive(Copy, Clone)]
@ -91,6 +94,7 @@ impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> {
stack: Vec::new(),
can_access_mut_global,
check_alignment,
static_root_alloc_id: None,
}
}
}
@ -746,6 +750,17 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
// Everything else is fine.
Ok(())
}
fn before_alloc_read(
ecx: &InterpCx<'mir, 'tcx, Self>,
alloc_id: AllocId,
) -> InterpResult<'tcx> {
if Some(alloc_id) == ecx.machine.static_root_alloc_id {
Err(ConstEvalErrKind::RecursiveStatic.into())
} else {
Ok(())
}
}
}
// Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups

View file

@ -153,7 +153,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
);
}
self.copy_op(src, dest, /*allow_transmute*/ true)?;
self.copy_op_allow_transmute(src, dest)?;
}
}
Ok(())
@ -441,7 +441,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
if src_field.layout.is_1zst() && cast_ty_field.is_1zst() {
// Skip 1-ZST fields.
} else if src_field.layout.ty == cast_ty_field.ty {
self.copy_op(&src_field, &dst_field, /*allow_transmute*/ false)?;
self.copy_op(&src_field, &dst_field)?;
} else {
if found_cast_field {
span_bug!(self.cur_span(), "unsize_into: more than one field to cast");

View file

@ -899,7 +899,19 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
.local_to_op(self.frame(), mir::RETURN_PLACE, None)
.expect("return place should always be live");
let dest = self.frame().return_place.clone();
let err = self.copy_op(&op, &dest, /*allow_transmute*/ true);
let err = if self.stack().len() == 1 {
// The initializer of constants and statics will get validated separately
// after the constant has been fully evaluated. While we could fall back to the default
// code path, that will cause -Zenforce-validity to cycle on static initializers.
// Reading from a static's memory is not allowed during its evaluation, and will always
// trigger a cycle error. Validation must read from the memory of the current item.
// For Miri this means we do not validate the root frame return value,
// but Miri anyway calls `read_target_isize` on that so separate validation
// is not needed.
self.copy_op_no_dest_validation(&op, &dest)
} else {
self.copy_op_allow_transmute(&op, &dest)
};
trace!("return value: {:?}", self.dump_place(&dest));
// We delay actually short-circuiting on this error until *after* the stack frame is
// popped, since we want this error to be attributed to the caller, whose type defines

View file

@ -85,6 +85,8 @@ pub enum InternKind {
///
/// This *cannot raise an interpreter error*. Doing so is left to validation, which
/// tracks where in the value we are and thus can show much better error messages.
///
/// For `InternKind::Static` the root allocation will not be interned, but must be handled by the caller.
#[instrument(level = "debug", skip(ecx))]
pub fn intern_const_alloc_recursive<
'mir,
@ -97,12 +99,12 @@ pub fn intern_const_alloc_recursive<
) -> Result<(), ErrorGuaranteed> {
// We are interning recursively, and for mutability we are distinguishing the "root" allocation
// that we are starting in, and all other allocations that we are encountering recursively.
let (base_mutability, inner_mutability) = match intern_kind {
let (base_mutability, inner_mutability, is_static) = match intern_kind {
InternKind::Constant | InternKind::Promoted => {
// Completely immutable. Interning anything mutably here can only lead to unsoundness,
// since all consts are conceptually independent values but share the same underlying
// memory.
(Mutability::Not, Mutability::Not)
(Mutability::Not, Mutability::Not, false)
}
InternKind::Static(Mutability::Not) => {
(
@ -115,22 +117,31 @@ pub fn intern_const_alloc_recursive<
// Inner allocations are never mutable. They can only arise via the "tail
// expression" / "outer scope" rule, and we treat them consistently with `const`.
Mutability::Not,
true,
)
}
InternKind::Static(Mutability::Mut) => {
// Just make everything mutable. We accept code like
// `static mut X = &mut [42]`, so even inner allocations need to be mutable.
(Mutability::Mut, Mutability::Mut)
(Mutability::Mut, Mutability::Mut, true)
}
};
// Intern the base allocation, and initialize todo list for recursive interning.
let base_alloc_id = ret.ptr().provenance.unwrap().alloc_id();
trace!(?base_alloc_id, ?base_mutability);
// First we intern the base allocation, as it requires a different mutability.
// This gives us the initial set of nested allocations, which will then all be processed
// recursively in the loop below.
let mut todo: Vec<_> =
intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().map(|prov| prov).collect();
let mut todo: Vec<_> = if is_static {
// Do not steal the root allocation, we need it later for `take_static_root_alloc`
// But still change its mutability to match the requested one.
let alloc = ecx.memory.alloc_map.get_mut(&base_alloc_id).unwrap();
alloc.1.mutability = base_mutability;
alloc.1.provenance().ptrs().iter().map(|&(_, prov)| prov).collect()
} else {
intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().map(|prov| prov).collect()
};
// We need to distinguish "has just been interned" from "was already in `tcx`",
// so we track this in a separate set.
let mut just_interned: FxHashSet<_> = std::iter::once(base_alloc_id).collect();
@ -148,7 +159,17 @@ pub fn intern_const_alloc_recursive<
// before validation, and interning doesn't know the type of anything, this means we can't show
// better errors. Maybe we should consider doing validation before interning in the future.
while let Some(prov) = todo.pop() {
trace!(?prov);
let alloc_id = prov.alloc_id();
if base_alloc_id == alloc_id && is_static {
// This is a pointer to the static itself. It's ok for a static to refer to itself,
// even mutably. Whether that mutable pointer is legal at all is checked in validation.
// See tests/ui/statics/recursive_interior_mut.rs for how such a situation can occur.
// We also already collected all the nested allocations, so there's no need to do that again.
continue;
}
// Crucially, we check this *before* checking whether the `alloc_id`
// has already been interned. The point of this check is to ensure that when
// there are multiple pointers to the same allocation, they are *all* immutable.
@ -176,6 +197,7 @@ pub fn intern_const_alloc_recursive<
// `&None::<Cell<i32>>` lead to promotion that can produce mutable pointers. We rely
// on the promotion analysis not screwing up to ensure that it is sound to intern
// promoteds as immutable.
trace!("found bad mutable pointer");
found_bad_mutable_pointer = true;
}
if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {

View file

@ -120,7 +120,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let val = self.tcx.span_as_caller_location(span);
let val =
self.const_val_to_op(val, self.tcx.caller_location_ty(), Some(dest.layout))?;
self.copy_op(&val, dest, /* allow_transmute */ false)?;
self.copy_op(&val, dest)?;
}
sym::min_align_of_val | sym::size_of_val => {
@ -157,7 +157,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
tcx.const_eval_global_id(self.param_env, gid, Some(tcx.span))
})?;
let val = self.const_val_to_op(val, ty, Some(dest.layout))?;
self.copy_op(&val, dest, /*allow_transmute*/ false)?;
self.copy_op(&val, dest)?;
}
sym::ctpop
@ -391,7 +391,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
} else {
self.project_index(&input, i)?.into()
};
self.copy_op(&value, &place, /*allow_transmute*/ false)?;
self.copy_op(&value, &place)?;
}
}
sym::simd_extract => {
@ -401,15 +401,11 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
index < input_len,
"index `{index}` must be in bounds of vector with length {input_len}"
);
self.copy_op(
&self.project_index(&input, index)?,
dest,
/*allow_transmute*/ false,
)?;
self.copy_op(&self.project_index(&input, index)?, dest)?;
}
sym::likely | sym::unlikely | sym::black_box => {
// These just return their argument
self.copy_op(&args[0], dest, /*allow_transmute*/ false)?;
self.copy_op(&args[0], dest)?;
}
sym::raw_eq => {
let result = self.raw_eq_intrinsic(&args[0], &args[1])?;

View file

@ -388,6 +388,8 @@ pub trait Machine<'mir, 'tcx: 'mir>: Sized {
/// Takes read-only access to the allocation so we can keep all the memory read
/// operations take `&self`. Use a `RefCell` in `AllocExtra` if you
/// need to mutate.
///
/// This is not invoked for ZST accesses, as no read actually happens.
#[inline(always)]
fn before_memory_read(
_tcx: TyCtxtAt<'tcx>,
@ -399,7 +401,20 @@ pub trait Machine<'mir, 'tcx: 'mir>: Sized {
Ok(())
}
/// Hook for performing extra checks on any memory read access,
/// that involves an allocation, even ZST reads.
///
/// Used to prevent statics from self-initializing by reading from their own memory
/// as it is being initialized.
fn before_alloc_read(
_ecx: &InterpCx<'mir, 'tcx, Self>,
_alloc_id: AllocId,
) -> InterpResult<'tcx> {
Ok(())
}
/// Hook for performing extra checks on a memory write access.
/// This is not invoked for ZST accesses, as no write actually happens.
#[inline(always)]
fn before_memory_write(
_tcx: TyCtxtAt<'tcx>,

View file

@ -624,19 +624,20 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
size,
CheckInAllocMsg::MemoryAccessTest,
|alloc_id, offset, prov| {
// We want to call the hook on *all* accesses that involve an AllocId,
// including zero-sized accesses. That means we have to do it here
// rather than below in the `Some` branch.
M::before_alloc_read(self, alloc_id)?;
let alloc = self.get_alloc_raw(alloc_id)?;
Ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc)))
},
)?;
if let Some((alloc_id, offset, prov, alloc)) = ptr_and_alloc {
let range = alloc_range(offset, size);
M::before_memory_read(self.tcx, &self.machine, &alloc.extra, (alloc_id, prov), range)?;
Ok(Some(AllocRef { alloc, range, tcx: *self.tcx, alloc_id }))
} else {
// Even in this branch we have to be sure that we actually access the allocation, in
// order to ensure that `static FOO: Type = FOO;` causes a cycle error instead of
// magically pulling *any* ZST value from the ether. However, the `get_raw` above is
// always called when `ptr` has an `AllocId`.
Ok(None)
}
}
@ -855,6 +856,21 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
DumpAllocs { ecx: self, allocs }
}
/// Print the allocation's bytes, without any nested allocations.
pub fn print_alloc_bytes_for_diagnostics(&self, id: AllocId) -> String {
// Using the "raw" access to avoid the `before_alloc_read` hook, we specifically
// want to be able to read all memory for diagnostics, even if that is cyclic.
let alloc = self.get_alloc_raw(id).unwrap();
let mut bytes = String::new();
if alloc.size() != Size::ZERO {
bytes = "\n".into();
// FIXME(translation) there might be pieces that are translatable.
rustc_middle::mir::pretty::write_allocation_bytes(*self.tcx, alloc, &mut bytes, " ")
.unwrap();
}
bytes
}
/// Find leaked allocations. Allocations reachable from `static_roots` or a `Global` allocation
/// are not considered leaked, as well as leaks whose kind's `may_leak()` returns true.
pub fn find_leaked_allocations(

View file

@ -39,4 +39,5 @@ use self::{
};
pub(crate) use self::intrinsics::eval_nullary_intrinsic;
pub(crate) use self::util::{create_static_alloc, take_static_root_alloc};
use eval_context::{from_known_layout, mir_assign_valid_types};

View file

@ -759,14 +759,57 @@ where
}
/// Copies the data from an operand to a place.
/// `allow_transmute` indicates whether the layouts may disagree.
/// The layouts of the `src` and `dest` may disagree.
/// Does not perform validation of the destination.
/// The only known use case for this function is checking the return
/// value of a static during stack frame popping.
#[inline(always)]
pub(super) fn copy_op_no_dest_validation(
&mut self,
src: &impl Readable<'tcx, M::Provenance>,
dest: &impl Writeable<'tcx, M::Provenance>,
) -> InterpResult<'tcx> {
self.copy_op_inner(
src, dest, /* allow_transmute */ true, /* validate_dest */ false,
)
}
/// Copies the data from an operand to a place.
/// The layouts of the `src` and `dest` may disagree.
#[inline(always)]
pub fn copy_op_allow_transmute(
&mut self,
src: &impl Readable<'tcx, M::Provenance>,
dest: &impl Writeable<'tcx, M::Provenance>,
) -> InterpResult<'tcx> {
self.copy_op_inner(
src, dest, /* allow_transmute */ true, /* validate_dest */ true,
)
}
/// Copies the data from an operand to a place.
/// `src` and `dest` must have the same layout and the copied value will be validated.
#[inline(always)]
#[instrument(skip(self), level = "debug")]
pub fn copy_op(
&mut self,
src: &impl Readable<'tcx, M::Provenance>,
dest: &impl Writeable<'tcx, M::Provenance>,
) -> InterpResult<'tcx> {
self.copy_op_inner(
src, dest, /* allow_transmute */ false, /* validate_dest */ true,
)
}
/// Copies the data from an operand to a place.
/// `allow_transmute` indicates whether the layouts may disagree.
#[inline(always)]
#[instrument(skip(self), level = "debug")]
fn copy_op_inner(
&mut self,
src: &impl Readable<'tcx, M::Provenance>,
dest: &impl Writeable<'tcx, M::Provenance>,
allow_transmute: bool,
validate_dest: bool,
) -> InterpResult<'tcx> {
// Generally for transmutation, data must be valid both at the old and new type.
// But if the types are the same, the 2nd validation below suffices.
@ -777,7 +820,7 @@ where
// Do the actual copy.
self.copy_op_no_validate(src, dest, allow_transmute)?;
if M::enforce_validity(self, dest.layout()) {
if validate_dest && M::enforce_validity(self, dest.layout()) {
// Data got changed, better make sure it matches the type!
self.validate_operand(&dest.to_op(self)?)?;
}

View file

@ -151,12 +151,12 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Use(ref operand) => {
// Avoid recomputing the layout
let op = self.eval_operand(operand, Some(dest.layout))?;
self.copy_op(&op, &dest, /*allow_transmute*/ false)?;
self.copy_op(&op, &dest)?;
}
CopyForDeref(place) => {
let op = self.eval_place_to_op(place, Some(dest.layout))?;
self.copy_op(&op, &dest, /* allow_transmute*/ false)?;
self.copy_op(&op, &dest)?;
}
BinaryOp(bin_op, box (ref left, ref right)) => {
@ -316,7 +316,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let field_index = active_field_index.unwrap_or(field_index);
let field_dest = self.project_field(&variant_dest, field_index.as_usize())?;
let op = self.eval_operand(operand, Some(field_dest.layout))?;
self.copy_op(&op, &field_dest, /*allow_transmute*/ false)?;
self.copy_op(&op, &field_dest)?;
}
self.write_discriminant(variant_index, dest)
}
@ -339,7 +339,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
} else {
// Write the src to the first element.
let first = self.project_index(&dest, 0)?;
self.copy_op(&src, &first, /*allow_transmute*/ false)?;
self.copy_op(&src, &first)?;
// This is performance-sensitive code for big static/const arrays! So we
// avoid writing each operand individually and instead just make many copies

View file

@ -481,7 +481,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// FIXME: Depending on the PassMode, this should reset some padding to uninitialized. (This
// is true for all `copy_op`, but there are a lot of special cases for argument passing
// specifically.)
self.copy_op(&caller_arg_copy, &callee_arg, /*allow_transmute*/ true)?;
self.copy_op_allow_transmute(&caller_arg_copy, &callee_arg)?;
// If this was an in-place pass, protect the place it comes from for the duration of the call.
if let FnArg::InPlace(place) = caller_arg {
M::protect_in_place_function_argument(self, place)?;

View file

@ -1,9 +1,15 @@
use rustc_middle::mir::interpret::InterpResult;
use crate::const_eval::CompileTimeEvalContext;
use crate::interpret::{MemPlaceMeta, MemoryKind};
use rustc_middle::mir::interpret::{AllocId, Allocation, InterpResult, Pointer};
use rustc_middle::ty::layout::TyAndLayout;
use rustc_middle::ty::{
self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor,
};
use rustc_span::def_id::DefId;
use std::ops::ControlFlow;
use super::MPlaceTy;
/// Checks whether a type contains generic parameters which must be instantiated.
///
/// In case it does, returns a `TooGeneric` const eval error. Note that due to polymorphization
@ -73,3 +79,23 @@ where
Ok(())
}
}
pub(crate) fn take_static_root_alloc<'mir, 'tcx: 'mir>(
ecx: &mut CompileTimeEvalContext<'mir, 'tcx>,
alloc_id: AllocId,
) -> Allocation {
ecx.memory.alloc_map.swap_remove(&alloc_id).unwrap().1
}
pub(crate) fn create_static_alloc<'mir, 'tcx: 'mir>(
ecx: &mut CompileTimeEvalContext<'mir, 'tcx>,
static_def_id: DefId,
layout: TyAndLayout<'tcx>,
) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
let alloc = Allocation::try_uninit(layout.size, layout.align.abi)?;
let alloc_id = ecx.tcx.reserve_and_set_static_alloc(static_def_id);
assert_eq!(ecx.machine.static_root_alloc_id, None);
ecx.machine.static_root_alloc_id = Some(alloc_id);
assert!(ecx.memory.alloc_map.insert(alloc_id, (MemoryKind::Stack, alloc)).is_none());
Ok(ecx.ptr_with_meta_to_mplace(Pointer::from(alloc_id).into(), MemPlaceMeta::None, layout))
}

View file

@ -27,9 +27,9 @@ use rustc_target::abi::{
use std::hash::Hash;
use super::{
format_interp_error, AllocId, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx,
InterpResult, MPlaceTy, Machine, MemPlaceMeta, OpTy, Pointer, Projectable, Scalar,
ValueVisitor,
format_interp_error, machine::AllocMap, AllocId, CheckInAllocMsg, GlobalAlloc, ImmTy,
Immediate, InterpCx, InterpResult, MPlaceTy, Machine, MemPlaceMeta, OpTy, Pointer, Projectable,
Scalar, ValueVisitor,
};
// for the validation errors
@ -712,11 +712,14 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
fn in_mutable_memory(&self, op: &OpTy<'tcx, M::Provenance>) -> bool {
if let Some(mplace) = op.as_mplace_or_imm().left() {
if let Some(alloc_id) = mplace.ptr().provenance.and_then(|p| p.get_alloc_id()) {
if self.ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner().mutability
== Mutability::Mut
{
return true;
}
let mutability = match self.ecx.tcx.global_alloc(alloc_id) {
GlobalAlloc::Static(_) => {
self.ecx.memory.alloc_map.get(alloc_id).unwrap().1.mutability
}
GlobalAlloc::Memory(alloc) => alloc.inner().mutability,
_ => span_bug!(self.ecx.tcx.span, "not a memory allocation"),
};
return mutability == Mutability::Mut;
}
}
false

View file

@ -40,6 +40,7 @@ pub fn provide(providers: &mut Providers) {
const_eval::provide(providers);
providers.eval_to_const_value_raw = const_eval::eval_to_const_value_raw_provider;
providers.eval_to_allocation_raw = const_eval::eval_to_allocation_raw_provider;
providers.eval_static_initializer = const_eval::eval_static_initializer_provider;
providers.hooks.const_caller_location = util::caller_location::const_caller_location_provider;
providers.eval_to_valtree = |tcx, param_env_and_value| {
let (param_env, raw) = param_env_and_value.into_parts();