1
Fork 0

miri native-call support: all previously exposed provenance is accessible to the callee

This commit is contained in:
Ralf Jung 2025-02-28 15:54:26 +01:00
parent 2f581937e1
commit 50a37ca56c
6 changed files with 92 additions and 33 deletions

View file

@ -955,18 +955,13 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
/// Handle the effect an FFI call might have on the state of allocations.
/// This overapproximates the modifications which external code might make to memory:
/// We set all reachable allocations as initialized, mark all provenances as exposed
/// We set all reachable allocations as initialized, mark all reachable provenances as exposed
/// and overwrite them with `Provenance::WILDCARD`.
pub fn prepare_for_native_call(
&mut self,
id: AllocId,
initial_prov: M::Provenance,
) -> InterpResult<'tcx> {
// Expose provenance of the root allocation.
M::expose_provenance(self, initial_prov)?;
///
/// The allocations in `ids` are assumed to be already exposed.
pub fn prepare_for_native_call(&mut self, ids: Vec<AllocId>) -> InterpResult<'tcx> {
let mut done = FxHashSet::default();
let mut todo = vec![id];
let mut todo = ids;
while let Some(id) = todo.pop() {
if !done.insert(id) {
// We already saw this allocation before, don't process it again.

View file

@ -285,9 +285,19 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {}
pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
fn expose_ptr(&self, alloc_id: AllocId, tag: BorTag) -> InterpResult<'tcx> {
fn expose_provenance(&self, provenance: Provenance) -> InterpResult<'tcx> {
let this = self.eval_context_ref();
let mut global_state = this.machine.alloc_addresses.borrow_mut();
let (alloc_id, tag) = match provenance {
Provenance::Concrete { alloc_id, tag } => (alloc_id, tag),
Provenance::Wildcard => {
// No need to do anything for wildcard pointers as
// their provenances have already been previously exposed.
return interp_ok(());
}
};
// In strict mode, we don't need this, so we can save some cycles by not tracking it.
if global_state.provenance_mode == ProvenanceMode::Strict {
return interp_ok(());
@ -422,6 +432,19 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
let rel_offset = this.truncate_to_target_usize(addr.bytes().wrapping_sub(base_addr));
Some((alloc_id, Size::from_bytes(rel_offset)))
}
/// Prepare all exposed memory for a native call.
/// This overapproximates the modifications which external code might make to memory:
/// We set all reachable allocations as initialized, mark all reachable provenances as exposed
/// and overwrite them with `Provenance::WILDCARD`.
fn prepare_exposed_for_native_call(&mut self) -> InterpResult<'tcx> {
let this = self.eval_context_mut();
// We need to make a deep copy of this list, but it's fine; it also serves as scratch space
// for the search within `prepare_for_native_call`.
let exposed: Vec<AllocId> =
this.machine.alloc_addresses.get_mut().exposed.iter().copied().collect();
this.prepare_for_native_call(exposed)
}
}
impl<'tcx> MiriMachine<'tcx> {

View file

@ -1291,18 +1291,12 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> {
/// Called on `ptr as usize` casts.
/// (Actually computing the resulting `usize` doesn't need machine help,
/// that's just `Scalar::try_to_int`.)
#[inline(always)]
fn expose_provenance(
ecx: &InterpCx<'tcx, Self>,
provenance: Self::Provenance,
) -> InterpResult<'tcx> {
match provenance {
Provenance::Concrete { alloc_id, tag } => ecx.expose_ptr(alloc_id, tag),
Provenance::Wildcard => {
// No need to do anything for wildcard pointers as
// their provenances have already been previously exposed.
interp_ok(())
}
}
ecx.expose_provenance(provenance)
}
/// Convert a pointer with provenance into an allocation-offset pair and extra provenance info.

View file

@ -160,16 +160,12 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
}
let imm = this.read_immediate(arg)?;
libffi_args.push(imm_to_carg(&imm, this)?);
// If we are passing a pointer, prepare the memory it points to.
// If we are passing a pointer, expose its provenance. Below, all exposed memory
// (previously exposed and new exposed) will then be properly prepared.
if matches!(arg.layout.ty.kind(), ty::RawPtr(..)) {
let ptr = imm.to_scalar().to_pointer(this)?;
let Some(prov) = ptr.provenance else {
// Pointer without provenance may not access any memory.
continue;
};
// We use `get_alloc_id` for its best-effort behaviour with Wildcard provenance.
let Some(alloc_id) = prov.get_alloc_id() else {
// Wildcard pointer, whatever it points to must be already exposed.
// Pointer without provenance may not access any memory anyway, skip.
continue;
};
// The first time this happens, print a warning.
@ -178,12 +174,12 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
this.emit_diagnostic(NonHaltingDiagnostic::NativeCallSharedMem);
}
this.prepare_for_native_call(alloc_id, prov)?;
this.expose_provenance(prov)?;
}
}
// FIXME: In the future, we should also call `prepare_for_native_call` on all previously
// exposed allocations, since C may access any of them.
// Prepare all exposed memory.
this.prepare_exposed_for_native_call()?;
// Convert them to `libffi::high::Arg` type.
let libffi_args = libffi_args

View file

@ -6,7 +6,7 @@
#![feature(box_as_ptr)]
use std::mem::MaybeUninit;
use std::ptr::null;
use std::ptr;
fn main() {
test_increment_int();
@ -20,6 +20,8 @@ fn main() {
test_pass_dangling();
test_swap_ptr_triple_dangling();
test_return_ptr();
test_pass_ptr_as_int();
test_pass_ptr_via_previously_shared_mem();
}
/// Test function that modifies an int.
@ -112,7 +114,7 @@ fn test_swap_ptr() {
}
let x = 61;
let (mut ptr0, mut ptr1) = (&raw const x, null());
let (mut ptr0, mut ptr1) = (&raw const x, ptr::null());
unsafe { swap_ptr(&mut ptr0, &mut ptr1) };
assert_eq!(unsafe { *ptr1 }, x);
@ -131,7 +133,7 @@ fn test_swap_ptr_tuple() {
}
let x = 71;
let mut tuple = Tuple { ptr0: &raw const x, ptr1: null() };
let mut tuple = Tuple { ptr0: &raw const x, ptr1: ptr::null() };
unsafe { swap_ptr_tuple(&mut tuple) }
assert_eq!(unsafe { *tuple.ptr1 }, x);
@ -148,7 +150,7 @@ fn test_overwrite_dangling() {
drop(b);
unsafe { overwrite_ptr(&mut ptr) };
assert_eq!(ptr, null());
assert_eq!(ptr, ptr::null());
}
/// Test function that passes a dangling pointer.
@ -200,3 +202,33 @@ fn test_return_ptr() {
let ptr = unsafe { return_ptr(ptr) };
assert_eq!(unsafe { *ptr }, x);
}
/// Test casting a pointer to an integer and passing that to C.
fn test_pass_ptr_as_int() {
extern "C" {
fn pass_ptr_as_int(ptr: usize, set_to_val: i32);
}
let mut m: MaybeUninit<i32> = MaybeUninit::uninit();
unsafe { pass_ptr_as_int(m.as_mut_ptr() as usize, 42) };
assert_eq!(unsafe { m.assume_init() }, 42);
}
fn test_pass_ptr_via_previously_shared_mem() {
extern "C" {
fn set_shared_mem(ptr: *mut *mut i32);
fn init_ptr_stored_in_shared_mem(val: i32);
}
let mut m: *mut i32 = ptr::null_mut();
let ptr_to_m = &raw mut m;
unsafe { set_shared_mem(&raw mut m) };
let mut m2: MaybeUninit<i32> = MaybeUninit::uninit();
// Store a pointer to m2 somewhere that C code can access it.
unsafe { ptr_to_m.write(m2.as_mut_ptr()) };
// Have C code write there.
unsafe { init_ptr_stored_in_shared_mem(42) };
// Ensure this memory is now considered initialized.
assert_eq!(unsafe { m2.assume_init() }, 42);
}

View file

@ -1,4 +1,5 @@
#include <stddef.h>
#include <stdint.h>
// See comments in build_native_lib()
#define EXPORT __attribute__((visibility("default")))
@ -88,3 +89,21 @@ EXPORT void swap_ptr_triple_dangling(Triple *t_ptr) {
EXPORT const int *return_ptr(const int *ptr) {
return ptr;
}
/* Test: test_pass_ptr_as_int */
EXPORT void pass_ptr_as_int(uintptr_t ptr, int set_to_val) {
*(int*)ptr = set_to_val;
}
/* Test: test_pass_ptr_via_previously_shared_mem */
int** shared_place;
EXPORT void set_shared_mem(int** ptr) {
shared_place = ptr;
}
EXPORT void init_ptr_stored_in_shared_mem(int val) {
**shared_place = val;
}