Rollup merge of #138417 - RalfJung:interpret-cleanup, r=oli-obk

minor interpreter cleanups

- remove the `eval_inline_asm` hook that `@saethlin` added; the usage never materialized and he agreed with removing it
- I tried merging `init_alloc_extra` and `adjust_global_allocation` and it didn't work; leave a comment as to why. Also, make the allocation code path a bit more clear by renaming `init_alloc_extra` to `init_local_allocation`.

r? `@oli-obk`
This commit is contained in:
Matthias Krüger 2025-03-13 17:44:09 +01:00 committed by GitHub
commit 9339bc61c9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 85 additions and 91 deletions

View file

@ -8,7 +8,6 @@ use std::hash::Hash;
use rustc_abi::{Align, Size}; use rustc_abi::{Align, Size};
use rustc_apfloat::{Float, FloatConvert}; use rustc_apfloat::{Float, FloatConvert};
use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece};
use rustc_middle::query::TyCtxtAt; use rustc_middle::query::TyCtxtAt;
use rustc_middle::ty::Ty; use rustc_middle::ty::Ty;
use rustc_middle::ty::layout::TyAndLayout; use rustc_middle::ty::layout::TyAndLayout;
@ -21,7 +20,6 @@ use super::{
AllocBytes, AllocId, AllocKind, AllocRange, Allocation, CTFE_ALLOC_SALT, ConstAllocation, AllocBytes, AllocId, AllocKind, AllocRange, Allocation, CTFE_ALLOC_SALT, ConstAllocation,
CtfeProvenance, FnArg, Frame, ImmTy, InterpCx, InterpResult, MPlaceTy, MemoryKind, CtfeProvenance, FnArg, Frame, ImmTy, InterpCx, InterpResult, MPlaceTy, MemoryKind,
Misalignment, OpTy, PlaceTy, Pointer, Provenance, RangeSet, interp_ok, throw_unsup, Misalignment, OpTy, PlaceTy, Pointer, Provenance, RangeSet, interp_ok, throw_unsup,
throw_unsup_format,
}; };
/// Data returned by [`Machine::after_stack_pop`], and consumed by /// Data returned by [`Machine::after_stack_pop`], and consumed by
@ -361,6 +359,19 @@ pub trait Machine<'tcx>: Sized {
size: i64, size: i64,
) -> Option<(AllocId, Size, Self::ProvenanceExtra)>; ) -> Option<(AllocId, Size, Self::ProvenanceExtra)>;
/// Return a "root" pointer for the given allocation: the one that is used for direct
/// accesses to this static/const/fn allocation, or the one returned from the heap allocator.
///
/// Not called on `extern` or thread-local statics (those use the methods above).
///
/// `kind` is the kind of the allocation the pointer points to; it can be `None` when
/// it's a global and `GLOBAL_KIND` is `None`.
fn adjust_alloc_root_pointer(
ecx: &InterpCx<'tcx, Self>,
ptr: Pointer,
kind: Option<MemoryKind<Self::MemoryKind>>,
) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
/// Called to adjust global allocations to the Provenance and AllocExtra of this machine. /// Called to adjust global allocations to the Provenance and AllocExtra of this machine.
/// ///
/// If `alloc` contains pointers, then they are all pointing to globals. /// If `alloc` contains pointers, then they are all pointing to globals.
@ -375,11 +386,12 @@ pub trait Machine<'tcx>: Sized {
alloc: &'b Allocation, alloc: &'b Allocation,
) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>; ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>;
/// Initialize the extra state of an allocation. /// Initialize the extra state of an allocation local to this machine.
/// ///
/// This is guaranteed to be called exactly once on all allocations that are accessed by the /// This is guaranteed to be called exactly once on all allocations local to this machine.
/// program. /// It will not be called automatically for global allocations; `adjust_global_allocation`
fn init_alloc_extra( /// has to do that itself if that is desired.
fn init_local_allocation(
ecx: &InterpCx<'tcx, Self>, ecx: &InterpCx<'tcx, Self>,
id: AllocId, id: AllocId,
kind: MemoryKind<Self::MemoryKind>, kind: MemoryKind<Self::MemoryKind>,
@ -387,34 +399,6 @@ pub trait Machine<'tcx>: Sized {
align: Align, align: Align,
) -> InterpResult<'tcx, Self::AllocExtra>; ) -> InterpResult<'tcx, Self::AllocExtra>;
/// Return a "root" pointer for the given allocation: the one that is used for direct
/// accesses to this static/const/fn allocation, or the one returned from the heap allocator.
///
/// Not called on `extern` or thread-local statics (those use the methods above).
///
/// `kind` is the kind of the allocation the pointer points to; it can be `None` when
/// it's a global and `GLOBAL_KIND` is `None`.
fn adjust_alloc_root_pointer(
ecx: &InterpCx<'tcx, Self>,
ptr: Pointer,
kind: Option<MemoryKind<Self::MemoryKind>>,
) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
/// Evaluate the inline assembly.
///
/// This should take care of jumping to the next block (one of `targets`) when asm goto
/// is triggered, `targets[0]` when the assembly falls through, or diverge in case of
/// naked_asm! or `InlineAsmOptions::NORETURN` being set.
fn eval_inline_asm(
_ecx: &mut InterpCx<'tcx, Self>,
_template: &'tcx [InlineAsmTemplatePiece],
_operands: &[mir::InlineAsmOperand<'tcx>],
_options: InlineAsmOptions,
_targets: &[mir::BasicBlock],
) -> InterpResult<'tcx> {
throw_unsup_format!("inline assembly is not supported")
}
/// Hook for performing extra checks on a memory read access. /// Hook for performing extra checks on a memory read access.
/// ///
/// This will *not* be called during validation! /// This will *not* be called during validation!
@ -699,7 +683,7 @@ pub macro compile_time_machine(<$tcx: lifetime>) {
interp_ok(Cow::Borrowed(alloc)) interp_ok(Cow::Borrowed(alloc))
} }
fn init_alloc_extra( fn init_local_allocation(
_ecx: &InterpCx<$tcx, Self>, _ecx: &InterpCx<$tcx, Self>,
_id: AllocId, _id: AllocId,
_kind: MemoryKind<Self::MemoryKind>, _kind: MemoryKind<Self::MemoryKind>,

View file

@ -263,9 +263,9 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
M::GLOBAL_KIND.map(MemoryKind::Machine), M::GLOBAL_KIND.map(MemoryKind::Machine),
"dynamically allocating global memory" "dynamically allocating global memory"
); );
// We have set things up so we don't need to call `adjust_from_tcx` here, // This cannot be merged with the `adjust_global_allocation` code path
// so we avoid copying the entire allocation contents. // since here we have an allocation that already uses `M::Bytes`.
let extra = M::init_alloc_extra(self, id, kind, alloc.size(), alloc.align)?; let extra = M::init_local_allocation(self, id, kind, alloc.size(), alloc.align)?;
let alloc = alloc.with_extra(extra); let alloc = alloc.with_extra(extra);
self.memory.alloc_map.insert(id, (kind, alloc)); self.memory.alloc_map.insert(id, (kind, alloc));
M::adjust_alloc_root_pointer(self, Pointer::from(id), Some(kind)) M::adjust_alloc_root_pointer(self, Pointer::from(id), Some(kind))

View file

@ -14,7 +14,7 @@ use tracing::{info, instrument, trace};
use super::{ use super::{
FnArg, FnVal, ImmTy, Immediate, InterpCx, InterpResult, Machine, MemPlaceMeta, PlaceTy, FnArg, FnVal, ImmTy, Immediate, InterpCx, InterpResult, Machine, MemPlaceMeta, PlaceTy,
Projectable, Scalar, interp_ok, throw_ub, Projectable, Scalar, interp_ok, throw_ub, throw_unsup_format,
}; };
use crate::util; use crate::util;
@ -590,8 +590,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
terminator.kind terminator.kind
), ),
InlineAsm { template, ref operands, options, ref targets, .. } => { InlineAsm { .. } => {
M::eval_inline_asm(self, template, operands, options, targets)?; throw_unsup_format!("inline assembly is not supported");
} }
} }

View file

@ -377,7 +377,6 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
fn get_global_alloc_bytes( fn get_global_alloc_bytes(
&self, &self,
id: AllocId, id: AllocId,
kind: MemoryKind,
bytes: &[u8], bytes: &[u8],
align: Align, align: Align,
) -> InterpResult<'tcx, MiriAllocBytes> { ) -> InterpResult<'tcx, MiriAllocBytes> {
@ -386,7 +385,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
// In native lib mode, MiriAllocBytes for global allocations are handled via `prepared_alloc_bytes`. // In native lib mode, MiriAllocBytes for global allocations are handled via `prepared_alloc_bytes`.
// This additional call ensures that some `MiriAllocBytes` are always prepared, just in case // This additional call ensures that some `MiriAllocBytes` are always prepared, just in case
// this function gets called before the first time `addr_from_alloc_id` gets called. // this function gets called before the first time `addr_from_alloc_id` gets called.
this.addr_from_alloc_id(id, kind)?; this.addr_from_alloc_id(id, MiriMemoryKind::Global.into())?;
// The memory we need here will have already been allocated during an earlier call to // The memory we need here will have already been allocated during an earlier call to
// `addr_from_alloc_id` for this allocation. So don't create a new `MiriAllocBytes` here, instead // `addr_from_alloc_id` for this allocation. So don't create a new `MiriAllocBytes` here, instead
// fetch the previously prepared bytes from `prepared_alloc_bytes`. // fetch the previously prepared bytes from `prepared_alloc_bytes`.

View file

@ -814,6 +814,59 @@ impl<'tcx> MiriMachine<'tcx> {
.and_then(|(_allocated, deallocated)| *deallocated) .and_then(|(_allocated, deallocated)| *deallocated)
.map(Span::data) .map(Span::data)
} }
fn init_allocation(
ecx: &MiriInterpCx<'tcx>,
id: AllocId,
kind: MemoryKind,
size: Size,
align: Align,
) -> InterpResult<'tcx, AllocExtra<'tcx>> {
if ecx.machine.tracked_alloc_ids.contains(&id) {
ecx.emit_diagnostic(NonHaltingDiagnostic::CreatedAlloc(id, size, align, kind));
}
let borrow_tracker = ecx
.machine
.borrow_tracker
.as_ref()
.map(|bt| bt.borrow_mut().new_allocation(id, size, kind, &ecx.machine));
let data_race = ecx.machine.data_race.as_ref().map(|data_race| {
data_race::AllocState::new_allocation(
data_race,
&ecx.machine.threads,
size,
kind,
ecx.machine.current_span(),
)
});
let weak_memory = ecx.machine.weak_memory.then(weak_memory::AllocState::new_allocation);
// If an allocation is leaked, we want to report a backtrace to indicate where it was
// allocated. We don't need to record a backtrace for allocations which are allowed to
// leak.
let backtrace = if kind.may_leak() || !ecx.machine.collect_leak_backtraces {
None
} else {
Some(ecx.generate_stacktrace())
};
if matches!(kind, MemoryKind::Machine(kind) if kind.should_save_allocation_span()) {
ecx.machine
.allocation_spans
.borrow_mut()
.insert(id, (ecx.machine.current_span(), None));
}
interp_ok(AllocExtra {
borrow_tracker,
data_race,
weak_memory,
backtrace,
sync: FxHashMap::default(),
})
}
} }
impl VisitProvenance for MiriMachine<'_> { impl VisitProvenance for MiriMachine<'_> {
@ -1200,57 +1253,15 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> {
} }
} }
fn init_alloc_extra( fn init_local_allocation(
ecx: &MiriInterpCx<'tcx>, ecx: &MiriInterpCx<'tcx>,
id: AllocId, id: AllocId,
kind: MemoryKind, kind: MemoryKind,
size: Size, size: Size,
align: Align, align: Align,
) -> InterpResult<'tcx, Self::AllocExtra> { ) -> InterpResult<'tcx, Self::AllocExtra> {
if ecx.machine.tracked_alloc_ids.contains(&id) { assert!(kind != MiriMemoryKind::Global.into());
ecx.emit_diagnostic(NonHaltingDiagnostic::CreatedAlloc(id, size, align, kind)); MiriMachine::init_allocation(ecx, id, kind, size, align)
}
let borrow_tracker = ecx
.machine
.borrow_tracker
.as_ref()
.map(|bt| bt.borrow_mut().new_allocation(id, size, kind, &ecx.machine));
let data_race = ecx.machine.data_race.as_ref().map(|data_race| {
data_race::AllocState::new_allocation(
data_race,
&ecx.machine.threads,
size,
kind,
ecx.machine.current_span(),
)
});
let weak_memory = ecx.machine.weak_memory.then(weak_memory::AllocState::new_allocation);
// If an allocation is leaked, we want to report a backtrace to indicate where it was
// allocated. We don't need to record a backtrace for allocations which are allowed to
// leak.
let backtrace = if kind.may_leak() || !ecx.machine.collect_leak_backtraces {
None
} else {
Some(ecx.generate_stacktrace())
};
if matches!(kind, MemoryKind::Machine(kind) if kind.should_save_allocation_span()) {
ecx.machine
.allocation_spans
.borrow_mut()
.insert(id, (ecx.machine.current_span(), None));
}
interp_ok(AllocExtra {
borrow_tracker,
data_race,
weak_memory,
backtrace,
sync: FxHashMap::default(),
})
} }
fn adjust_alloc_root_pointer( fn adjust_alloc_root_pointer(
@ -1340,13 +1351,13 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> {
alloc: &'b Allocation, alloc: &'b Allocation,
) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>> ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>
{ {
let kind = Self::GLOBAL_KIND.unwrap().into();
let alloc = alloc.adjust_from_tcx( let alloc = alloc.adjust_from_tcx(
&ecx.tcx, &ecx.tcx,
|bytes, align| ecx.get_global_alloc_bytes(id, kind, bytes, align), |bytes, align| ecx.get_global_alloc_bytes(id, bytes, align),
|ptr| ecx.global_root_pointer(ptr), |ptr| ecx.global_root_pointer(ptr),
)?; )?;
let extra = Self::init_alloc_extra(ecx, id, kind, alloc.size(), alloc.align)?; let kind = MiriMemoryKind::Global.into();
let extra = MiriMachine::init_allocation(ecx, id, kind, alloc.size(), alloc.align)?;
interp_ok(Cow::Owned(alloc.with_extra(extra))) interp_ok(Cow::Owned(alloc.with_extra(extra)))
} }