1
Fork 0

Rollup merge of #94343 - RalfJung:fn-ptr, r=oli-obk

Miri fn ptr check: don't use conservative null check

In https://github.com/rust-lang/rust/pull/94270 I used the wrong NULL check for function pointers: `memory.ptr_may_be_null` is conservative even on machines that support ptr-to-int casts, leading to false errors in Miri.

This fixes that problem, and also replaces that foot-fun of a method with `scalar_may_be_null` which is never unnecessarily conservative.

r? `@oli-obk`
This commit is contained in:
Matthias Krüger 2022-02-25 14:14:39 +01:00 committed by GitHub
commit cf3bb09888
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 43 additions and 32 deletions

View file

@ -217,8 +217,9 @@ impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> {
// Comparisons of abstract pointers with null pointers are known if the pointer
// is in bounds, because if they are in bounds, the pointer can't be null.
// Inequality with integers other than null can never be known for sure.
(Scalar::Int(int), Scalar::Ptr(ptr, _)) | (Scalar::Ptr(ptr, _), Scalar::Int(int)) => {
int.is_null() && !self.memory.ptr_may_be_null(ptr.into())
(Scalar::Int(int), ptr @ Scalar::Ptr(..))
| (ptr @ Scalar::Ptr(..), Scalar::Int(int)) => {
int.is_null() && !self.scalar_may_be_null(ptr)
}
// FIXME: return `true` for at least some comparisons where we can reliably
// determine the result of runtime inequality tests at compile-time.

View file

@ -22,9 +22,9 @@ use rustc_span::{Pos, Span};
use rustc_target::abi::{call::FnAbi, Align, HasDataLayout, Size, TargetDataLayout};
use super::{
AllocId, GlobalId, Immediate, InterpErrorInfo, InterpResult, MPlaceTy, Machine, MemPlace,
MemPlaceMeta, Memory, MemoryKind, Operand, Place, PlaceTy, Pointer, Provenance, Scalar,
ScalarMaybeUninit, StackPopJump,
AllocCheck, AllocId, GlobalId, Immediate, InterpErrorInfo, InterpResult, MPlaceTy, Machine,
MemPlace, MemPlaceMeta, Memory, MemoryKind, Operand, Place, PlaceTy, Pointer, Provenance,
Scalar, ScalarMaybeUninit, StackPopJump,
};
use crate::transform::validate::equal_up_to_regions;
@ -440,6 +440,29 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.memory.scalar_to_ptr(scalar)
}
/// Test if this value might be null.
/// If the machine does not support ptr-to-int casts, this is conservative.
pub fn scalar_may_be_null(&self, scalar: Scalar<M::PointerTag>) -> bool {
match scalar.try_to_int() {
Ok(int) => int.is_null(),
Err(_) => {
let ptr = self.scalar_to_ptr(scalar);
match self.memory.ptr_try_get_alloc(ptr) {
Ok((alloc_id, offset, _)) => {
let (size, _align) = self
.memory
.get_size_and_align(alloc_id, AllocCheck::MaybeDead)
.expect("alloc info with MaybeDead cannot fail");
// If the pointer is out-of-bounds, it may be null.
// Note that one-past-the-end (offset == size) is still inbounds, and never null.
offset > size
}
Err(offset) => offset == 0,
}
}
}
}
/// Call this to turn untagged "global" pointers (obtained via `tcx`) into
/// the machine pointer to the allocation. Must never be used
/// for any other pointers, nor for TLS statics.

View file

@ -483,21 +483,6 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
}
})
}
/// Test if the pointer might be null.
pub fn ptr_may_be_null(&self, ptr: Pointer<Option<M::PointerTag>>) -> bool {
match self.ptr_try_get_alloc(ptr) {
Ok((alloc_id, offset, _)) => {
let (size, _align) = self
.get_size_and_align(alloc_id, AllocCheck::MaybeDead)
.expect("alloc info with MaybeDead cannot fail");
// If the pointer is out-of-bounds, it may be null.
// Note that one-past-the-end (offset == size) is still inbounds, and never null.
offset > size
}
Err(offset) => offset == 0,
}
}
}
/// Allocation accessors

View file

@ -720,12 +720,11 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Err(dbg_val) => {
// So this is a pointer then, and casting to an int failed.
// Can only happen during CTFE.
let ptr = self.scalar_to_ptr(tag_val);
// The niche must be just 0, and the ptr not null, then we know this is
// okay. Everything else, we conservatively reject.
let ptr_valid = niche_start == 0
&& variants_start == variants_end
&& !self.memory.ptr_may_be_null(ptr);
&& !self.scalar_may_be_null(tag_val);
if !ptr_valid {
throw_ub!(InvalidTag(dbg_val))
}

View file

@ -572,21 +572,25 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
err_unsup!(ReadPointerAsBytes) => { "part of a pointer" } expected { "a proper pointer or integer value" },
err_ub!(InvalidUninitBytes(None)) => { "uninitialized bytes" } expected { "a proper pointer or integer value" },
);
let ptr = self.ecx.scalar_to_ptr(value);
// Ensure the pointer is non-null.
if self.ecx.memory.ptr_may_be_null(ptr) {
throw_validation_failure!(self.path, { "a potentially null function pointer" });
}
// If we check references recursively, also check that this points to a function.
if let Some(_) = self.ref_tracking {
let ptr = self.ecx.scalar_to_ptr(value);
let _fn = try_validation!(
self.ecx.memory.get_fn(ptr),
self.path,
err_ub!(DanglingIntPointer(0, _)) =>
{ "a null function pointer" },
err_ub!(DanglingIntPointer(..)) |
err_ub!(InvalidFunctionPointer(..)) =>
{ "{:x}", value } expected { "a function pointer" },
);
// FIXME: Check if the signature matches
} else {
// Otherwise (for standalone Miri), we have to still check it to be non-null.
if self.ecx.scalar_may_be_null(value) {
throw_validation_failure!(self.path, { "a null function pointer" });
}
}
Ok(true)
}
@ -644,10 +648,9 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
Err(_) => {
// So this is a pointer then, and casting to an int failed.
// Can only happen during CTFE.
let ptr = self.ecx.scalar_to_ptr(value);
if start == 1 && end == max_value {
// Only null is the niche. So make sure the ptr is NOT null.
if self.ecx.memory.ptr_may_be_null(ptr) {
if self.ecx.scalar_may_be_null(value) {
throw_validation_failure!(self.path,
{ "a potentially null pointer" }
expected {
@ -758,7 +761,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
fn visit_value(&mut self, op: &OpTy<'tcx, M::PointerTag>) -> InterpResult<'tcx> {
trace!("visit_value: {:?}, {:?}", *op, op.layout);
// Check primitive types -- the leafs of our recursive descend.
// Check primitive types -- the leaves of our recursive descent.
if self.try_visit_primitive(op)? {
return Ok(());
}