Auto merge of #87123 - RalfJung:miri-provenance-overhaul, r=oli-obk
CTFE/Miri engine Pointer type overhaul This fixes the long-standing problem that we are using `Scalar` as a type to represent pointers that might be integer values (since they point to a ZST). The main problem is that with int-to-ptr casts, there are multiple ways to represent the same pointer as a `Scalar` and it is unclear if "normalization" (i.e., the cast) already happened or not. This leads to ugly methods like `force_mplace_ptr` and `force_op_ptr`. Another problem this solves is that in Miri, it would make a lot more sense to have the `Pointer::offset` field represent the full absolute address (instead of being relative to the `AllocId`). This means we can do ptr-to-int casts without access to any machine state, and it means that the overflow checks on pointer arithmetic are (finally!) accurate. To solve this, the `Pointer` type is made entirely parametric over the provenance, so that we can use `Pointer<AllocId>` inside `Scalar` but use `Pointer<Option<AllocId>>` when accessing memory (where `None` represents the case that we could not figure out an `AllocId`; in that case the `offset` is an absolute address). Moreover, the `Provenance` trait determines if a pointer with a given provenance can be cast to an integer by simply dropping the provenance. I hope this can be read commit-by-commit, but the first commit does the bulk of the work. It introduces some FIXMEs that are resolved later. Fixes https://github.com/rust-lang/miri/issues/841 Miri PR: https://github.com/rust-lang/miri/pull/1851 r? `@oli-obk`
This commit is contained in:
commit
c78ebb7bdc
106 changed files with 1317 additions and 1407 deletions
|
@ -49,6 +49,7 @@
|
|||
#![feature(iter_zip)]
|
||||
#![feature(thread_local_const_init)]
|
||||
#![feature(try_reserve)]
|
||||
#![feature(nonzero_ops)]
|
||||
#![recursion_limit = "512"]
|
||||
|
||||
#[macro_use]
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
use std::borrow::Cow;
|
||||
use std::convert::TryFrom;
|
||||
use std::iter;
|
||||
use std::ops::{Deref, DerefMut, Range};
|
||||
use std::ops::{Deref, Range};
|
||||
use std::ptr;
|
||||
|
||||
use rustc_ast::Mutability;
|
||||
|
@ -25,7 +25,7 @@ use crate::ty;
|
|||
/// module provides higher-level access.
|
||||
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
|
||||
#[derive(HashStable)]
|
||||
pub struct Allocation<Tag = (), Extra = ()> {
|
||||
pub struct Allocation<Tag = AllocId, Extra = ()> {
|
||||
/// The actual bytes of the allocation.
|
||||
/// Note that the bytes of a pointer represent the offset of the pointer.
|
||||
bytes: Vec<u8>,
|
||||
|
@ -154,26 +154,32 @@ impl<Tag> Allocation<Tag> {
|
|||
}
|
||||
}
|
||||
|
||||
impl Allocation<()> {
|
||||
/// Add Tag and Extra fields
|
||||
pub fn with_tags_and_extra<T, E>(
|
||||
impl Allocation {
|
||||
/// Convert Tag and add Extra fields
|
||||
pub fn convert_tag_add_extra<Tag, Extra>(
|
||||
self,
|
||||
mut tagger: impl FnMut(AllocId) -> T,
|
||||
extra: E,
|
||||
) -> Allocation<T, E> {
|
||||
cx: &impl HasDataLayout,
|
||||
extra: Extra,
|
||||
mut tagger: impl FnMut(Pointer<AllocId>) -> Pointer<Tag>,
|
||||
) -> Allocation<Tag, Extra> {
|
||||
// Compute new pointer tags, which also adjusts the bytes.
|
||||
let mut bytes = self.bytes;
|
||||
let mut new_relocations = Vec::with_capacity(self.relocations.0.len());
|
||||
let ptr_size = cx.data_layout().pointer_size.bytes_usize();
|
||||
let endian = cx.data_layout().endian;
|
||||
for &(offset, alloc_id) in self.relocations.iter() {
|
||||
let idx = offset.bytes_usize();
|
||||
let ptr_bytes = &mut bytes[idx..idx + ptr_size];
|
||||
let bits = read_target_uint(endian, ptr_bytes).unwrap();
|
||||
let (ptr_tag, ptr_offset) =
|
||||
tagger(Pointer::new(alloc_id, Size::from_bytes(bits))).into_parts();
|
||||
write_target_uint(endian, ptr_bytes, ptr_offset.bytes().into()).unwrap();
|
||||
new_relocations.push((offset, ptr_tag));
|
||||
}
|
||||
// Create allocation.
|
||||
Allocation {
|
||||
bytes: self.bytes,
|
||||
relocations: Relocations::from_presorted(
|
||||
self.relocations
|
||||
.iter()
|
||||
// The allocations in the relocations (pointers stored *inside* this allocation)
|
||||
// all get the base pointer tag.
|
||||
.map(|&(offset, ((), alloc))| {
|
||||
let tag = tagger(alloc);
|
||||
(offset, (tag, alloc))
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
bytes,
|
||||
relocations: Relocations::from_presorted(new_relocations),
|
||||
init_mask: self.init_mask,
|
||||
align: self.align,
|
||||
mutability: self.mutability,
|
||||
|
@ -279,6 +285,9 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
|
|||
/// A raw pointer variant of `get_bytes_mut` that avoids invalidating existing aliases into this memory.
|
||||
pub fn get_bytes_mut_ptr(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> *mut [u8] {
|
||||
self.mark_init(range, true);
|
||||
// This also clears relocations that just overlap with the written range. So writing to some
|
||||
// byte can de-initialize its neighbors! See
|
||||
// <https://github.com/rust-lang/rust/issues/87184> for details.
|
||||
self.clear_relocations(cx, range);
|
||||
|
||||
assert!(range.end().bytes_usize() <= self.bytes.len()); // need to do our own bounds-check
|
||||
|
@ -321,7 +330,11 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
|
|||
cx: &impl HasDataLayout,
|
||||
range: AllocRange,
|
||||
) -> AllocResult<ScalarMaybeUninit<Tag>> {
|
||||
// `get_bytes_unchecked` tests relocation edges.
|
||||
// `get_bytes_with_uninit_and_ptr` tests relocation edges.
|
||||
// We deliberately error when loading data that partially has provenance, or partially
|
||||
// initialized data (that's the check below), into a scalar. The LLVM semantics of this are
|
||||
// unclear so we are conservative. See <https://github.com/rust-lang/rust/issues/69488> for
|
||||
// further discussion.
|
||||
let bytes = self.get_bytes_with_uninit_and_ptr(cx, range)?;
|
||||
// Uninit check happens *after* we established that the alignment is correct.
|
||||
// We must not return `Ok()` for unaligned pointers!
|
||||
|
@ -339,9 +352,9 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
|
|||
self.check_relocations(cx, range)?;
|
||||
} else {
|
||||
// Maybe a pointer.
|
||||
if let Some(&(tag, alloc_id)) = self.relocations.get(&range.start) {
|
||||
let ptr = Pointer::new_with_tag(alloc_id, Size::from_bytes(bits), tag);
|
||||
return Ok(ScalarMaybeUninit::Scalar(ptr.into()));
|
||||
if let Some(&prov) = self.relocations.get(&range.start) {
|
||||
let ptr = Pointer::new(prov, Size::from_bytes(bits));
|
||||
return Ok(ScalarMaybeUninit::from_pointer(ptr, cx));
|
||||
}
|
||||
}
|
||||
// We don't. Just return the bits.
|
||||
|
@ -371,9 +384,14 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
|
|||
}
|
||||
};
|
||||
|
||||
let bytes = match val.to_bits_or_ptr(range.size, cx) {
|
||||
Err(val) => u128::from(val.offset.bytes()),
|
||||
Ok(data) => data,
|
||||
// `to_bits_or_ptr_internal` is the right method because we just want to store this data
|
||||
// as-is into memory.
|
||||
let (bytes, provenance) = match val.to_bits_or_ptr_internal(range.size) {
|
||||
Err(val) => {
|
||||
let (provenance, offset) = val.into_parts();
|
||||
(u128::from(offset.bytes()), Some(provenance))
|
||||
}
|
||||
Ok(data) => (data, None),
|
||||
};
|
||||
|
||||
let endian = cx.data_layout().endian;
|
||||
|
@ -381,8 +399,8 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
|
|||
write_target_uint(endian, dst, bytes).unwrap();
|
||||
|
||||
// See if we have to also write a relocation.
|
||||
if let Scalar::Ptr(val) = val {
|
||||
self.relocations.insert(range.start, (val.tag, val.alloc_id));
|
||||
if let Some(provenance) = provenance {
|
||||
self.relocations.0.insert(range.start, provenance);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -392,11 +410,7 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
|
|||
/// Relocations.
|
||||
impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
|
||||
/// Returns all relocations overlapping with the given pointer-offset pair.
|
||||
pub fn get_relocations(
|
||||
&self,
|
||||
cx: &impl HasDataLayout,
|
||||
range: AllocRange,
|
||||
) -> &[(Size, (Tag, AllocId))] {
|
||||
pub fn get_relocations(&self, cx: &impl HasDataLayout, range: AllocRange) -> &[(Size, Tag)] {
|
||||
// We have to go back `pointer_size - 1` bytes, as that one would still overlap with
|
||||
// the beginning of this range.
|
||||
let start = range.start.bytes().saturating_sub(cx.data_layout().pointer_size.bytes() - 1);
|
||||
|
@ -446,7 +460,7 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
|
|||
}
|
||||
|
||||
// Forget all the relocations.
|
||||
self.relocations.remove_range(first..last);
|
||||
self.relocations.0.remove_range(first..last);
|
||||
}
|
||||
|
||||
/// Errors if there are relocations overlapping with the edges of the
|
||||
|
@ -582,39 +596,33 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Relocations.
|
||||
/// "Relocations" stores the provenance information of pointers stored in memory.
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
|
||||
pub struct Relocations<Tag = (), Id = AllocId>(SortedMap<Size, (Tag, Id)>);
|
||||
pub struct Relocations<Tag = AllocId>(SortedMap<Size, Tag>);
|
||||
|
||||
impl<Tag, Id> Relocations<Tag, Id> {
|
||||
impl<Tag> Relocations<Tag> {
|
||||
pub fn new() -> Self {
|
||||
Relocations(SortedMap::new())
|
||||
}
|
||||
|
||||
// The caller must guarantee that the given relocations are already sorted
|
||||
// by address and contain no duplicates.
|
||||
pub fn from_presorted(r: Vec<(Size, (Tag, Id))>) -> Self {
|
||||
pub fn from_presorted(r: Vec<(Size, Tag)>) -> Self {
|
||||
Relocations(SortedMap::from_presorted_elements(r))
|
||||
}
|
||||
}
|
||||
|
||||
impl<Tag> Deref for Relocations<Tag> {
|
||||
type Target = SortedMap<Size, (Tag, AllocId)>;
|
||||
type Target = SortedMap<Size, Tag>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<Tag> DerefMut for Relocations<Tag> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
/// A partial, owned list of relocations to transfer into another allocation.
|
||||
pub struct AllocationRelocations<Tag> {
|
||||
relative_relocations: Vec<(Size, (Tag, AllocId))>,
|
||||
relative_relocations: Vec<(Size, Tag)>,
|
||||
}
|
||||
|
||||
impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
|
||||
|
@ -652,7 +660,7 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
|
|||
/// The affected range, as defined in the parameters to `prepare_relocation_copy` is expected
|
||||
/// to be clear of relocations.
|
||||
pub fn mark_relocation_range(&mut self, relocations: AllocationRelocations<Tag>) {
|
||||
self.relocations.insert_presorted(relocations.relative_relocations);
|
||||
self.relocations.0.insert_presorted(relocations.relative_relocations);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -170,6 +170,8 @@ impl fmt::Display for InvalidProgramInfo<'_> {
|
|||
/// Details of why a pointer had to be in-bounds.
|
||||
#[derive(Debug, Copy, Clone, TyEncodable, TyDecodable, HashStable)]
|
||||
pub enum CheckInAllocMsg {
|
||||
/// We are dereferencing a pointer (i.e., creating a place).
|
||||
DerefTest,
|
||||
/// We are access memory.
|
||||
MemoryAccessTest,
|
||||
/// We are doing pointer arithmetic.
|
||||
|
@ -179,13 +181,14 @@ pub enum CheckInAllocMsg {
|
|||
}
|
||||
|
||||
impl fmt::Display for CheckInAllocMsg {
|
||||
/// When this is printed as an error the context looks like this
|
||||
/// "{msg}pointer must be in-bounds at offset..."
|
||||
/// When this is printed as an error the context looks like this:
|
||||
/// "{msg}0x01 is not a valid pointer".
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
match *self {
|
||||
CheckInAllocMsg::DerefTest => "dereferencing pointer failed: ",
|
||||
CheckInAllocMsg::MemoryAccessTest => "memory access failed: ",
|
||||
CheckInAllocMsg::PointerArithmeticTest => "pointer arithmetic failed: ",
|
||||
CheckInAllocMsg::InboundsTest => "",
|
||||
|
@ -238,7 +241,9 @@ pub enum UndefinedBehaviorInfo<'tcx> {
|
|||
PointerUseAfterFree(AllocId),
|
||||
/// Used a pointer outside the bounds it is valid for.
|
||||
PointerOutOfBounds {
|
||||
ptr: Pointer,
|
||||
alloc_id: AllocId,
|
||||
offset: Size,
|
||||
size: Size,
|
||||
msg: CheckInAllocMsg,
|
||||
allocation_size: Size,
|
||||
},
|
||||
|
@ -307,20 +312,30 @@ impl fmt::Display for UndefinedBehaviorInfo<'_> {
|
|||
InvalidVtableAlignment(msg) => write!(f, "invalid vtable: alignment {}", msg),
|
||||
UnterminatedCString(p) => write!(
|
||||
f,
|
||||
"reading a null-terminated string starting at {} with no null found before end of allocation",
|
||||
"reading a null-terminated string starting at {:?} with no null found before end of allocation",
|
||||
p,
|
||||
),
|
||||
PointerUseAfterFree(a) => {
|
||||
write!(f, "pointer to {} was dereferenced after this allocation got freed", a)
|
||||
}
|
||||
PointerOutOfBounds { ptr, msg, allocation_size } => write!(
|
||||
PointerOutOfBounds { alloc_id, offset, size: Size::ZERO, msg, allocation_size } => {
|
||||
write!(
|
||||
f,
|
||||
"{}{} has size {}, so pointer at offset {} is out-of-bounds",
|
||||
msg,
|
||||
alloc_id,
|
||||
allocation_size.bytes(),
|
||||
offset.bytes(),
|
||||
)
|
||||
}
|
||||
PointerOutOfBounds { alloc_id, offset, size, msg, allocation_size } => write!(
|
||||
f,
|
||||
"{}pointer must be in-bounds at offset {}, \
|
||||
but is outside bounds of {} which has size {}",
|
||||
"{}{} has size {}, so pointer to {} bytes starting at offset {} is out-of-bounds",
|
||||
msg,
|
||||
ptr.offset.bytes(),
|
||||
ptr.alloc_id,
|
||||
allocation_size.bytes()
|
||||
alloc_id,
|
||||
allocation_size.bytes(),
|
||||
size.bytes(),
|
||||
offset.bytes(),
|
||||
),
|
||||
DanglingIntPointer(0, CheckInAllocMsg::InboundsTest) => {
|
||||
write!(f, "null pointer is not a valid pointer for this operation")
|
||||
|
@ -348,13 +363,13 @@ impl fmt::Display for UndefinedBehaviorInfo<'_> {
|
|||
}
|
||||
InvalidTag(val) => write!(f, "enum value has invalid tag: {}", val),
|
||||
InvalidFunctionPointer(p) => {
|
||||
write!(f, "using {} as function pointer but it does not point to a function", p)
|
||||
write!(f, "using {:?} as function pointer but it does not point to a function", p)
|
||||
}
|
||||
InvalidStr(err) => write!(f, "this string is not valid UTF-8: {}", err),
|
||||
InvalidUninitBytes(Some((alloc, access))) => write!(
|
||||
f,
|
||||
"reading {} byte{} of memory starting at {}, \
|
||||
but {} byte{} {} uninitialized starting at {}, \
|
||||
"reading {} byte{} of memory starting at {:?}, \
|
||||
but {} byte{} {} uninitialized starting at {:?}, \
|
||||
and this operation requires initialized memory",
|
||||
access.access_size.bytes(),
|
||||
pluralize!(access.access_size.bytes()),
|
||||
|
@ -392,8 +407,6 @@ pub enum UnsupportedOpInfo {
|
|||
//
|
||||
// The variants below are only reachable from CTFE/const prop, miri will never emit them.
|
||||
//
|
||||
/// Encountered raw bytes where we needed a pointer.
|
||||
ReadBytesAsPointer,
|
||||
/// Accessing thread local statics
|
||||
ThreadLocalStatic(DefId),
|
||||
/// Accessing an unsupported extern static.
|
||||
|
@ -408,7 +421,6 @@ impl fmt::Display for UnsupportedOpInfo {
|
|||
ReadExternStatic(did) => write!(f, "cannot read from extern static ({:?})", did),
|
||||
NoMirFor(did) => write!(f, "no MIR body is available for {:?}", did),
|
||||
ReadPointerAsBytes => write!(f, "unable to turn pointer into raw bytes",),
|
||||
ReadBytesAsPointer => write!(f, "unable to turn bytes into a pointer"),
|
||||
ThreadLocalStatic(did) => write!(f, "cannot access thread local static ({:?})", did),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -99,7 +99,7 @@ use std::convert::TryFrom;
|
|||
use std::fmt;
|
||||
use std::io;
|
||||
use std::io::{Read, Write};
|
||||
use std::num::NonZeroU32;
|
||||
use std::num::{NonZeroU32, NonZeroU64};
|
||||
use std::sync::atomic::{AtomicU32, Ordering};
|
||||
|
||||
use rustc_ast::LitKind;
|
||||
|
@ -127,7 +127,7 @@ pub use self::value::{get_slice_bytes, ConstAlloc, ConstValue, Scalar, ScalarMay
|
|||
|
||||
pub use self::allocation::{alloc_range, AllocRange, Allocation, InitMask, Relocations};
|
||||
|
||||
pub use self::pointer::{Pointer, PointerArithmetic};
|
||||
pub use self::pointer::{Pointer, PointerArithmetic, Provenance};
|
||||
|
||||
/// Uniquely identifies one of the following:
|
||||
/// - A constant
|
||||
|
@ -176,7 +176,7 @@ pub enum LitToConstError {
|
|||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
||||
pub struct AllocId(pub u64);
|
||||
pub struct AllocId(pub NonZeroU64);
|
||||
|
||||
// We want the `Debug` output to be readable as it is used by `derive(Debug)` for
|
||||
// all the Miri types.
|
||||
|
@ -427,7 +427,11 @@ crate struct AllocMap<'tcx> {
|
|||
|
||||
impl<'tcx> AllocMap<'tcx> {
|
||||
crate fn new() -> Self {
|
||||
AllocMap { alloc_map: Default::default(), dedup: Default::default(), next_id: AllocId(0) }
|
||||
AllocMap {
|
||||
alloc_map: Default::default(),
|
||||
dedup: Default::default(),
|
||||
next_id: AllocId(NonZeroU64::new(1).unwrap()),
|
||||
}
|
||||
}
|
||||
fn reserve(&mut self) -> AllocId {
|
||||
let next = self.next_id;
|
||||
|
|
|
@ -83,55 +83,77 @@ pub trait PointerArithmetic: HasDataLayout {
|
|||
|
||||
impl<T: HasDataLayout> PointerArithmetic for T {}
|
||||
|
||||
/// This trait abstracts over the kind of provenance that is associated with a `Pointer`. It is
|
||||
/// mostly opaque; the `Machine` trait extends it with some more operations that also have access to
|
||||
/// some global state.
|
||||
/// We don't actually care about this `Debug` bound (we use `Provenance::fmt` to format the entire
|
||||
/// pointer), but `derive` adds some unecessary bounds.
|
||||
pub trait Provenance: Copy + fmt::Debug {
|
||||
/// Says whether the `offset` field of `Pointer`s with this provenance is the actual physical address.
|
||||
/// If `true, ptr-to-int casts work by simply discarding the provenance.
|
||||
/// If `false`, ptr-to-int casts are not supported. The offset *must* be relative in that case.
|
||||
const OFFSET_IS_ADDR: bool;
|
||||
|
||||
/// Determines how a pointer should be printed.
|
||||
fn fmt(ptr: &Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
/// Provenance must always be able to identify the allocation this ptr points to.
|
||||
/// (Identifying the offset in that allocation, however, is harder -- use `Memory::ptr_get_alloc` for that.)
|
||||
fn get_alloc_id(self) -> AllocId;
|
||||
}
|
||||
|
||||
impl Provenance for AllocId {
|
||||
// With the `AllocId` as provenance, the `offset` is interpreted *relative to the allocation*,
|
||||
// so ptr-to-int casts are not possible (since we do not know the global physical offset).
|
||||
const OFFSET_IS_ADDR: bool = false;
|
||||
|
||||
fn fmt(ptr: &Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
// Forward `alternate` flag to `alloc_id` printing.
|
||||
if f.alternate() {
|
||||
write!(f, "{:#?}", ptr.provenance)?;
|
||||
} else {
|
||||
write!(f, "{:?}", ptr.provenance)?;
|
||||
}
|
||||
// Print offset only if it is non-zero.
|
||||
if ptr.offset.bytes() > 0 {
|
||||
write!(f, "+0x{:x}", ptr.offset.bytes())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_alloc_id(self) -> AllocId {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a pointer in the Miri engine.
|
||||
///
|
||||
/// `Pointer` is generic over the `Tag` associated with each pointer,
|
||||
/// which is used to do provenance tracking during execution.
|
||||
/// Pointers are "tagged" with provenance information; typically the `AllocId` they belong to.
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, TyEncodable, TyDecodable, Hash)]
|
||||
#[derive(HashStable)]
|
||||
pub struct Pointer<Tag = ()> {
|
||||
pub alloc_id: AllocId,
|
||||
pub offset: Size,
|
||||
pub tag: Tag,
|
||||
pub struct Pointer<Tag = AllocId> {
|
||||
pub(super) offset: Size, // kept private to avoid accidental misinterpretation (meaning depends on `Tag` type)
|
||||
pub provenance: Tag,
|
||||
}
|
||||
|
||||
static_assert_size!(Pointer, 16);
|
||||
|
||||
/// Print the address of a pointer (without the tag)
|
||||
fn print_ptr_addr<Tag>(ptr: &Pointer<Tag>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
// Forward `alternate` flag to `alloc_id` printing.
|
||||
if f.alternate() {
|
||||
write!(f, "{:#?}", ptr.alloc_id)?;
|
||||
} else {
|
||||
write!(f, "{:?}", ptr.alloc_id)?;
|
||||
}
|
||||
// Print offset only if it is non-zero.
|
||||
if ptr.offset.bytes() > 0 {
|
||||
write!(f, "+0x{:x}", ptr.offset.bytes())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// We want the `Debug` output to be readable as it is used by `derive(Debug)` for
|
||||
// all the Miri types.
|
||||
// We have to use `Debug` output for the tag, because `()` does not implement
|
||||
// `Display` so we cannot specialize that.
|
||||
impl<Tag: fmt::Debug> fmt::Debug for Pointer<Tag> {
|
||||
default fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
print_ptr_addr(self, f)?;
|
||||
write!(f, "[{:?}]", self.tag)
|
||||
}
|
||||
}
|
||||
// Specialization for no tag
|
||||
impl fmt::Debug for Pointer<()> {
|
||||
impl<Tag: Provenance> fmt::Debug for Pointer<Tag> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
print_ptr_addr(self, f)
|
||||
Provenance::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Tag: fmt::Debug> fmt::Display for Pointer<Tag> {
|
||||
impl<Tag: Provenance> fmt::Debug for Pointer<Option<Tag>> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Debug::fmt(self, f)
|
||||
match self.provenance {
|
||||
Some(tag) => Provenance::fmt(&Pointer::new(tag, self.offset), f),
|
||||
None => write!(f, "0x{:x}", self.offset.bytes()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -143,37 +165,61 @@ impl From<AllocId> for Pointer {
|
|||
}
|
||||
}
|
||||
|
||||
impl Pointer<()> {
|
||||
impl<Tag> From<Pointer<Tag>> for Pointer<Option<Tag>> {
|
||||
#[inline(always)]
|
||||
pub fn new(alloc_id: AllocId, offset: Size) -> Self {
|
||||
Pointer { alloc_id, offset, tag: () }
|
||||
fn from(ptr: Pointer<Tag>) -> Self {
|
||||
let (tag, offset) = ptr.into_parts();
|
||||
Pointer::new(Some(tag), offset)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Tag> Pointer<Option<Tag>> {
|
||||
pub fn into_pointer_or_addr(self) -> Result<Pointer<Tag>, Size> {
|
||||
match self.provenance {
|
||||
Some(tag) => Ok(Pointer::new(tag, self.offset)),
|
||||
None => Err(self.offset),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Tag> Pointer<Option<Tag>> {
|
||||
#[inline(always)]
|
||||
pub fn with_tag<Tag>(self, tag: Tag) -> Pointer<Tag> {
|
||||
Pointer::new_with_tag(self.alloc_id, self.offset, tag)
|
||||
pub fn null() -> Self {
|
||||
Pointer { provenance: None, offset: Size::ZERO }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Tag> Pointer<Tag> {
|
||||
#[inline(always)]
|
||||
pub fn new_with_tag(alloc_id: AllocId, offset: Size, tag: Tag) -> Self {
|
||||
Pointer { alloc_id, offset, tag }
|
||||
pub fn new(provenance: Tag, offset: Size) -> Self {
|
||||
Pointer { provenance, offset }
|
||||
}
|
||||
|
||||
/// Obtain the constituents of this pointer. Not that the meaning of the offset depends on the type `Tag`!
|
||||
/// This function must only be used in the implementation of `Machine::ptr_get_alloc`,
|
||||
/// and when a `Pointer` is taken apart to be stored efficiently in an `Allocation`.
|
||||
#[inline(always)]
|
||||
pub fn into_parts(self) -> (Tag, Size) {
|
||||
(self.provenance, self.offset)
|
||||
}
|
||||
|
||||
pub fn map_provenance(self, f: impl FnOnce(Tag) -> Tag) -> Self {
|
||||
Pointer { provenance: f(self.provenance), ..self }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn offset(self, i: Size, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
|
||||
Ok(Pointer::new_with_tag(
|
||||
self.alloc_id,
|
||||
Size::from_bytes(cx.data_layout().offset(self.offset.bytes(), i.bytes())?),
|
||||
self.tag,
|
||||
))
|
||||
Ok(Pointer {
|
||||
offset: Size::from_bytes(cx.data_layout().offset(self.offset.bytes(), i.bytes())?),
|
||||
..self
|
||||
})
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn overflowing_offset(self, i: Size, cx: &impl HasDataLayout) -> (Self, bool) {
|
||||
let (res, over) = cx.data_layout().overflowing_offset(self.offset.bytes(), i.bytes());
|
||||
(Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
|
||||
let ptr = Pointer { offset: Size::from_bytes(res), ..self };
|
||||
(ptr, over)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
|
@ -183,26 +229,21 @@ impl<'tcx, Tag> Pointer<Tag> {
|
|||
|
||||
#[inline]
|
||||
pub fn signed_offset(self, i: i64, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
|
||||
Ok(Pointer::new_with_tag(
|
||||
self.alloc_id,
|
||||
Size::from_bytes(cx.data_layout().signed_offset(self.offset.bytes(), i)?),
|
||||
self.tag,
|
||||
))
|
||||
Ok(Pointer {
|
||||
offset: Size::from_bytes(cx.data_layout().signed_offset(self.offset.bytes(), i)?),
|
||||
..self
|
||||
})
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn overflowing_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> (Self, bool) {
|
||||
let (res, over) = cx.data_layout().overflowing_signed_offset(self.offset.bytes(), i);
|
||||
(Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
|
||||
let ptr = Pointer { offset: Size::from_bytes(res), ..self };
|
||||
(ptr, over)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
|
||||
self.overflowing_signed_offset(i, cx).0
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn erase_tag(self) -> Pointer {
|
||||
Pointer { alloc_id: self.alloc_id, offset: self.offset, tag: () }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,11 +6,13 @@ use rustc_apfloat::{
|
|||
Float,
|
||||
};
|
||||
use rustc_macros::HashStable;
|
||||
use rustc_target::abi::{HasDataLayout, Size, TargetDataLayout};
|
||||
use rustc_target::abi::{HasDataLayout, Size};
|
||||
|
||||
use crate::ty::{Lift, ParamEnv, ScalarInt, Ty, TyCtxt};
|
||||
|
||||
use super::{AllocId, AllocRange, Allocation, InterpResult, Pointer, PointerArithmetic};
|
||||
use super::{
|
||||
AllocId, AllocRange, Allocation, InterpResult, Pointer, PointerArithmetic, Provenance,
|
||||
};
|
||||
|
||||
/// Represents the result of const evaluation via the `eval_to_allocation` query.
|
||||
#[derive(Copy, Clone, HashStable, TyEncodable, TyDecodable, Debug, Hash, Eq, PartialEq)]
|
||||
|
@ -47,12 +49,6 @@ pub enum ConstValue<'tcx> {
|
|||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
static_assert_size!(ConstValue<'_>, 32);
|
||||
|
||||
impl From<Scalar> for ConstValue<'tcx> {
|
||||
fn from(s: Scalar) -> Self {
|
||||
Self::Scalar(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Lift<'tcx> for ConstValue<'a> {
|
||||
type Lifted = ConstValue<'tcx>;
|
||||
fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option<ConstValue<'tcx>> {
|
||||
|
@ -70,7 +66,7 @@ impl<'a, 'tcx> Lift<'tcx> for ConstValue<'a> {
|
|||
|
||||
impl<'tcx> ConstValue<'tcx> {
|
||||
#[inline]
|
||||
pub fn try_to_scalar(&self) -> Option<Scalar> {
|
||||
pub fn try_to_scalar(&self) -> Option<Scalar<AllocId>> {
|
||||
match *self {
|
||||
ConstValue::ByRef { .. } | ConstValue::Slice { .. } => None,
|
||||
ConstValue::Scalar(val) => Some(val),
|
||||
|
@ -120,16 +116,23 @@ impl<'tcx> ConstValue<'tcx> {
|
|||
/// `memory::Allocation`. It is in many ways like a small chunk of a `Allocation`, up to 16 bytes in
|
||||
/// size. Like a range of bytes in an `Allocation`, a `Scalar` can either represent the raw bytes
|
||||
/// of a simple value or a pointer into another `Allocation`
|
||||
///
|
||||
/// These variants would be private if there was a convenient way to achieve that in Rust.
|
||||
/// Do *not* match on a `Scalar`! Use the various `to_*` methods instead.
|
||||
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, TyEncodable, TyDecodable, Hash)]
|
||||
#[derive(HashStable)]
|
||||
pub enum Scalar<Tag = ()> {
|
||||
pub enum Scalar<Tag = AllocId> {
|
||||
/// The raw bytes of a simple value.
|
||||
Int(ScalarInt),
|
||||
|
||||
/// A pointer into an `Allocation`. An `Allocation` in the `memory` module has a list of
|
||||
/// relocations, but a `Scalar` is only large enough to contain one, so we just represent the
|
||||
/// relocation and its associated offset together as a `Pointer` here.
|
||||
Ptr(Pointer<Tag>),
|
||||
///
|
||||
/// We also store the size of the pointer, such that a `Scalar` always knows how big it is.
|
||||
/// The size is always the pointer size of the current target, but this is not information
|
||||
/// that we always have readily available.
|
||||
Ptr(Pointer<Tag>, u8),
|
||||
}
|
||||
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
|
@ -137,20 +140,20 @@ static_assert_size!(Scalar, 24);
|
|||
|
||||
// We want the `Debug` output to be readable as it is used by `derive(Debug)` for
|
||||
// all the Miri types.
|
||||
impl<Tag: fmt::Debug> fmt::Debug for Scalar<Tag> {
|
||||
impl<Tag: Provenance> fmt::Debug for Scalar<Tag> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Scalar::Ptr(ptr) => write!(f, "{:?}", ptr),
|
||||
Scalar::Ptr(ptr, _size) => write!(f, "{:?}", ptr),
|
||||
Scalar::Int(int) => write!(f, "{:?}", int),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Tag: fmt::Debug> fmt::Display for Scalar<Tag> {
|
||||
impl<Tag: Provenance> fmt::Display for Scalar<Tag> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Scalar::Ptr(ptr) => write!(f, "pointer to {}", ptr),
|
||||
Scalar::Int { .. } => fmt::Debug::fmt(self, f),
|
||||
Scalar::Ptr(ptr, _size) => write!(f, "pointer to {:?}", ptr),
|
||||
Scalar::Int(int) => write!(f, "{:?}", int),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -169,83 +172,34 @@ impl<Tag> From<Double> for Scalar<Tag> {
|
|||
}
|
||||
}
|
||||
|
||||
impl Scalar<()> {
|
||||
/// Tag this scalar with `new_tag` if it is a pointer, leave it unchanged otherwise.
|
||||
///
|
||||
/// Used by `MemPlace::replace_tag`.
|
||||
#[inline]
|
||||
pub fn with_tag<Tag>(self, new_tag: Tag) -> Scalar<Tag> {
|
||||
match self {
|
||||
Scalar::Ptr(ptr) => Scalar::Ptr(ptr.with_tag(new_tag)),
|
||||
Scalar::Int(int) => Scalar::Int(int),
|
||||
}
|
||||
impl<Tag> From<ScalarInt> for Scalar<Tag> {
|
||||
#[inline(always)]
|
||||
fn from(ptr: ScalarInt) -> Self {
|
||||
Scalar::Int(ptr)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Tag> Scalar<Tag> {
|
||||
impl<Tag> Scalar<Tag> {
|
||||
pub const ZST: Self = Scalar::Int(ScalarInt::ZST);
|
||||
|
||||
/// Erase the tag from the scalar, if any.
|
||||
///
|
||||
/// Used by error reporting code to avoid having the error type depend on `Tag`.
|
||||
#[inline]
|
||||
pub fn erase_tag(self) -> Scalar {
|
||||
match self {
|
||||
Scalar::Ptr(ptr) => Scalar::Ptr(ptr.erase_tag()),
|
||||
Scalar::Int(int) => Scalar::Int(int),
|
||||
#[inline(always)]
|
||||
pub fn from_pointer(ptr: Pointer<Tag>, cx: &impl HasDataLayout) -> Self {
|
||||
Scalar::Ptr(ptr, u8::try_from(cx.pointer_size().bytes()).unwrap())
|
||||
}
|
||||
|
||||
/// Create a Scalar from a pointer with an `Option<_>` tag (where `None` represents a plain integer).
|
||||
pub fn from_maybe_pointer(ptr: Pointer<Option<Tag>>, cx: &impl HasDataLayout) -> Self {
|
||||
match ptr.into_parts() {
|
||||
(Some(tag), offset) => Scalar::from_pointer(Pointer::new(tag, offset), cx),
|
||||
(None, offset) => {
|
||||
Scalar::Int(ScalarInt::try_from_uint(offset.bytes(), cx.pointer_size()).unwrap())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn null_ptr(cx: &impl HasDataLayout) -> Self {
|
||||
Scalar::Int(ScalarInt::null(cx.data_layout().pointer_size))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn ptr_op(
|
||||
self,
|
||||
dl: &TargetDataLayout,
|
||||
f_int: impl FnOnce(u64) -> InterpResult<'tcx, u64>,
|
||||
f_ptr: impl FnOnce(Pointer<Tag>) -> InterpResult<'tcx, Pointer<Tag>>,
|
||||
) -> InterpResult<'tcx, Self> {
|
||||
match self {
|
||||
Scalar::Int(int) => Ok(Scalar::Int(int.ptr_sized_op(dl, f_int)?)),
|
||||
Scalar::Ptr(ptr) => Ok(Scalar::Ptr(f_ptr(ptr)?)),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn ptr_offset(self, i: Size, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
|
||||
let dl = cx.data_layout();
|
||||
self.ptr_op(dl, |int| dl.offset(int, i.bytes()), |ptr| ptr.offset(i, dl))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn ptr_wrapping_offset(self, i: Size, cx: &impl HasDataLayout) -> Self {
|
||||
let dl = cx.data_layout();
|
||||
self.ptr_op(
|
||||
dl,
|
||||
|int| Ok(dl.overflowing_offset(int, i.bytes()).0),
|
||||
|ptr| Ok(ptr.wrapping_offset(i, dl)),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn ptr_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
|
||||
let dl = cx.data_layout();
|
||||
self.ptr_op(dl, |int| dl.signed_offset(int, i), |ptr| ptr.signed_offset(i, dl))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn ptr_wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
|
||||
let dl = cx.data_layout();
|
||||
self.ptr_op(
|
||||
dl,
|
||||
|int| Ok(dl.overflowing_signed_offset(int, i).0),
|
||||
|ptr| Ok(ptr.wrapping_signed_offset(i, dl)),
|
||||
)
|
||||
.unwrap()
|
||||
Scalar::Int(ScalarInt::null(cx.pointer_size()))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -332,74 +286,71 @@ impl<'tcx, Tag> Scalar<Tag> {
|
|||
Scalar::Int(f.into())
|
||||
}
|
||||
|
||||
/// This is very rarely the method you want! You should dispatch on the type
|
||||
/// and use `force_bits`/`assert_bits`/`force_ptr`/`assert_ptr`.
|
||||
/// This method only exists for the benefit of low-level memory operations
|
||||
/// as well as the implementation of the `force_*` methods.
|
||||
/// This is almost certainly not the method you want! You should dispatch on the type
|
||||
/// and use `to_{u8,u16,...}`/`scalar_to_ptr` to perform ptr-to-int / int-to-ptr casts as needed.
|
||||
///
|
||||
/// This method only exists for the benefit of low-level operations that truly need to treat the
|
||||
/// scalar in whatever form it is.
|
||||
#[inline]
|
||||
pub fn to_bits_or_ptr(
|
||||
self,
|
||||
target_size: Size,
|
||||
cx: &impl HasDataLayout,
|
||||
) -> Result<u128, Pointer<Tag>> {
|
||||
pub fn to_bits_or_ptr_internal(self, target_size: Size) -> Result<u128, Pointer<Tag>> {
|
||||
assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST");
|
||||
match self {
|
||||
Scalar::Int(int) => Ok(int.assert_bits(target_size)),
|
||||
Scalar::Ptr(ptr) => {
|
||||
assert_eq!(target_size, cx.data_layout().pointer_size);
|
||||
Scalar::Ptr(ptr, sz) => {
|
||||
assert_eq!(target_size.bytes(), u64::from(sz));
|
||||
Err(ptr)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// This method is intentionally private!
|
||||
/// It is just a helper for other methods in this file.
|
||||
impl<'tcx, Tag: Provenance> Scalar<Tag> {
|
||||
/// Fundamental scalar-to-int (cast) operation. Many convenience wrappers exist below, that you
|
||||
/// likely want to use instead.
|
||||
///
|
||||
/// Will perform ptr-to-int casts if needed and possible.
|
||||
/// If that fails, we know the offset is relative, so we return an "erased" Scalar
|
||||
/// (which is useful for error messages but not much else).
|
||||
#[inline]
|
||||
fn to_bits(self, target_size: Size) -> InterpResult<'tcx, u128> {
|
||||
assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST");
|
||||
pub fn try_to_int(self) -> Result<ScalarInt, Scalar<AllocId>> {
|
||||
match self {
|
||||
Scalar::Int(int) => int.to_bits(target_size).map_err(|size| {
|
||||
Scalar::Int(int) => Ok(int),
|
||||
Scalar::Ptr(ptr, sz) => {
|
||||
if Tag::OFFSET_IS_ADDR {
|
||||
Ok(ScalarInt::try_from_uint(ptr.offset.bytes(), Size::from_bytes(sz)).unwrap())
|
||||
} else {
|
||||
// We know `offset` is relative, since `OFFSET_IS_ADDR == false`.
|
||||
let (tag, offset) = ptr.into_parts();
|
||||
Err(Scalar::Ptr(Pointer::new(tag.get_alloc_id(), offset), sz))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn assert_int(self) -> ScalarInt {
|
||||
self.try_to_int().unwrap()
|
||||
}
|
||||
|
||||
/// This throws UB (instead of ICEing) on a size mismatch since size mismatches can arise in
|
||||
/// Miri when someone declares a function that we shim (such as `malloc`) with a wrong type.
|
||||
#[inline]
|
||||
pub fn to_bits(self, target_size: Size) -> InterpResult<'tcx, u128> {
|
||||
assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST");
|
||||
self.try_to_int().map_err(|_| err_unsup!(ReadPointerAsBytes))?.to_bits(target_size).map_err(
|
||||
|size| {
|
||||
err_ub!(ScalarSizeMismatch {
|
||||
target_size: target_size.bytes(),
|
||||
data_size: size.bytes(),
|
||||
})
|
||||
.into()
|
||||
}),
|
||||
Scalar::Ptr(_) => throw_unsup!(ReadPointerAsBytes),
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn assert_bits(self, target_size: Size) -> u128 {
|
||||
self.to_bits(target_size).expect("expected Raw bits but got a Pointer")
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn assert_int(self) -> ScalarInt {
|
||||
match self {
|
||||
Scalar::Ptr(_) => bug!("expected an int but got an abstract pointer"),
|
||||
Scalar::Int(int) => int,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn assert_ptr(self) -> Pointer<Tag> {
|
||||
match self {
|
||||
Scalar::Ptr(p) => p,
|
||||
Scalar::Int { .. } => bug!("expected a Pointer but got Raw bits"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Do not call this method! Dispatch based on the type instead.
|
||||
#[inline]
|
||||
pub fn is_bits(self) -> bool {
|
||||
matches!(self, Scalar::Int { .. })
|
||||
}
|
||||
|
||||
/// Do not call this method! Dispatch based on the type instead.
|
||||
#[inline]
|
||||
pub fn is_ptr(self) -> bool {
|
||||
matches!(self, Scalar::Ptr(_))
|
||||
self.to_bits(target_size).unwrap()
|
||||
}
|
||||
|
||||
pub fn to_bool(self) -> InterpResult<'tcx, bool> {
|
||||
|
@ -507,22 +458,8 @@ impl<'tcx, Tag> Scalar<Tag> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<Tag> From<Pointer<Tag>> for Scalar<Tag> {
|
||||
#[inline(always)]
|
||||
fn from(ptr: Pointer<Tag>) -> Self {
|
||||
Scalar::Ptr(ptr)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Tag> From<ScalarInt> for Scalar<Tag> {
|
||||
#[inline(always)]
|
||||
fn from(ptr: ScalarInt) -> Self {
|
||||
Scalar::Int(ptr)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Eq, PartialEq, TyEncodable, TyDecodable, HashStable, Hash)]
|
||||
pub enum ScalarMaybeUninit<Tag = ()> {
|
||||
pub enum ScalarMaybeUninit<Tag = AllocId> {
|
||||
Scalar(Scalar<Tag>),
|
||||
Uninit,
|
||||
}
|
||||
|
@ -537,16 +474,9 @@ impl<Tag> From<Scalar<Tag>> for ScalarMaybeUninit<Tag> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<Tag> From<Pointer<Tag>> for ScalarMaybeUninit<Tag> {
|
||||
#[inline(always)]
|
||||
fn from(s: Pointer<Tag>) -> Self {
|
||||
ScalarMaybeUninit::Scalar(s.into())
|
||||
}
|
||||
}
|
||||
|
||||
// We want the `Debug` output to be readable as it is used by `derive(Debug)` for
|
||||
// all the Miri types.
|
||||
impl<Tag: fmt::Debug> fmt::Debug for ScalarMaybeUninit<Tag> {
|
||||
impl<Tag: Provenance> fmt::Debug for ScalarMaybeUninit<Tag> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
ScalarMaybeUninit::Uninit => write!(f, "<uninitialized>"),
|
||||
|
@ -555,7 +485,7 @@ impl<Tag: fmt::Debug> fmt::Debug for ScalarMaybeUninit<Tag> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<Tag: fmt::Debug> fmt::Display for ScalarMaybeUninit<Tag> {
|
||||
impl<Tag: Provenance> fmt::Display for ScalarMaybeUninit<Tag> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
ScalarMaybeUninit::Uninit => write!(f, "uninitialized bytes"),
|
||||
|
@ -564,16 +494,15 @@ impl<Tag: fmt::Debug> fmt::Display for ScalarMaybeUninit<Tag> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Tag> ScalarMaybeUninit<Tag> {
|
||||
/// Erase the tag from the scalar, if any.
|
||||
///
|
||||
/// Used by error reporting code to avoid having the error type depend on `Tag`.
|
||||
impl<Tag> ScalarMaybeUninit<Tag> {
|
||||
#[inline]
|
||||
pub fn erase_tag(self) -> ScalarMaybeUninit {
|
||||
match self {
|
||||
ScalarMaybeUninit::Scalar(s) => ScalarMaybeUninit::Scalar(s.erase_tag()),
|
||||
ScalarMaybeUninit::Uninit => ScalarMaybeUninit::Uninit,
|
||||
}
|
||||
pub fn from_pointer(ptr: Pointer<Tag>, cx: &impl HasDataLayout) -> Self {
|
||||
ScalarMaybeUninit::Scalar(Scalar::from_pointer(ptr, cx))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn from_maybe_pointer(ptr: Pointer<Option<Tag>>, cx: &impl HasDataLayout) -> Self {
|
||||
ScalarMaybeUninit::Scalar(Scalar::from_maybe_pointer(ptr, cx))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -583,7 +512,9 @@ impl<'tcx, Tag> ScalarMaybeUninit<Tag> {
|
|||
ScalarMaybeUninit::Uninit => throw_ub!(InvalidUninitBytes(None)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, Tag: Provenance> ScalarMaybeUninit<Tag> {
|
||||
#[inline(always)]
|
||||
pub fn to_bool(self) -> InterpResult<'tcx, bool> {
|
||||
self.check_init()?.to_bool()
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
//! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/mir/index.html
|
||||
|
||||
use crate::mir::coverage::{CodeRegion, CoverageKind};
|
||||
use crate::mir::interpret::{Allocation, GlobalAlloc, Scalar};
|
||||
use crate::mir::interpret::{Allocation, ConstValue, GlobalAlloc, Scalar};
|
||||
use crate::mir::visit::MirVisitable;
|
||||
use crate::ty::adjustment::PointerCast;
|
||||
use crate::ty::codec::{TyDecoder, TyEncoder};
|
||||
|
@ -2095,7 +2095,7 @@ impl<'tcx> Operand<'tcx> {
|
|||
Operand::Constant(box Constant {
|
||||
span,
|
||||
user_ty: None,
|
||||
literal: ConstantKind::Val(val.into(), ty),
|
||||
literal: ConstantKind::Val(ConstValue::Scalar(val), ty),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -2458,7 +2458,7 @@ pub enum ConstantKind<'tcx> {
|
|||
impl Constant<'tcx> {
|
||||
pub fn check_static_ptr(&self, tcx: TyCtxt<'_>) -> Option<DefId> {
|
||||
match self.literal.const_for_ty()?.val.try_to_scalar() {
|
||||
Some(Scalar::Ptr(ptr)) => match tcx.global_alloc(ptr.alloc_id) {
|
||||
Some(Scalar::Ptr(ptr, _size)) => match tcx.global_alloc(ptr.provenance) {
|
||||
GlobalAlloc::Static(def_id) => {
|
||||
assert!(!tcx.is_thread_local_static(def_id));
|
||||
Some(def_id)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use rustc_apfloat::ieee::{Double, Single};
|
||||
use rustc_apfloat::Float;
|
||||
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
|
||||
use rustc_target::abi::{Size, TargetDataLayout};
|
||||
use rustc_target::abi::Size;
|
||||
use std::convert::{TryFrom, TryInto};
|
||||
use std::fmt;
|
||||
|
||||
|
@ -193,15 +193,6 @@ impl ScalarInt {
|
|||
self.data == 0
|
||||
}
|
||||
|
||||
pub(crate) fn ptr_sized_op<E>(
|
||||
self,
|
||||
dl: &TargetDataLayout,
|
||||
f_int: impl FnOnce(u64) -> Result<u64, E>,
|
||||
) -> Result<Self, E> {
|
||||
assert_eq!(u64::from(self.size), dl.pointer_size.bytes());
|
||||
Ok(Self::try_from_uint(f_int(u64::try_from(self.data).unwrap())?, self.size()).unwrap())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn try_from_uint(i: impl Into<u128>, size: Size) -> Option<Self> {
|
||||
let data = i.into();
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
use std::convert::TryInto;
|
||||
|
||||
use crate::mir::interpret::ConstValue;
|
||||
use crate::mir::interpret::Scalar;
|
||||
use crate::mir::interpret::{AllocId, ConstValue, Scalar};
|
||||
use crate::mir::Promoted;
|
||||
use crate::ty::subst::{InternalSubsts, SubstsRef};
|
||||
use crate::ty::ParamEnv;
|
||||
|
@ -59,7 +58,7 @@ impl<'tcx> ConstKind<'tcx> {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
pub fn try_to_scalar(self) -> Option<Scalar> {
|
||||
pub fn try_to_scalar(self) -> Option<Scalar<AllocId>> {
|
||||
self.try_to_value()?.try_to_scalar()
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::middle::cstore::{ExternCrate, ExternCrateSource};
|
||||
use crate::mir::interpret::{AllocRange, ConstValue, GlobalAlloc, Pointer, Scalar};
|
||||
use crate::mir::interpret::{AllocRange, ConstValue, GlobalAlloc, Pointer, Provenance, Scalar};
|
||||
use crate::ty::subst::{GenericArg, GenericArgKind, Subst};
|
||||
use crate::ty::{self, ConstInt, DefIdTree, ParamConst, ScalarInt, Ty, TyCtxt, TypeFoldable};
|
||||
use rustc_apfloat::ieee::{Double, Single};
|
||||
|
@ -974,7 +974,7 @@ pub trait PrettyPrinter<'tcx>:
|
|||
print_ty: bool,
|
||||
) -> Result<Self::Const, Self::Error> {
|
||||
match scalar {
|
||||
Scalar::Ptr(ptr) => self.pretty_print_const_scalar_ptr(ptr, ty, print_ty),
|
||||
Scalar::Ptr(ptr, _size) => self.pretty_print_const_scalar_ptr(ptr, ty, print_ty),
|
||||
Scalar::Int(int) => self.pretty_print_const_scalar_int(int, ty, print_ty),
|
||||
}
|
||||
}
|
||||
|
@ -987,6 +987,7 @@ pub trait PrettyPrinter<'tcx>:
|
|||
) -> Result<Self::Const, Self::Error> {
|
||||
define_scoped_cx!(self);
|
||||
|
||||
let (alloc_id, offset) = ptr.into_parts();
|
||||
match ty.kind() {
|
||||
// Byte strings (&[u8; N])
|
||||
ty::Ref(
|
||||
|
@ -1002,10 +1003,10 @@ pub trait PrettyPrinter<'tcx>:
|
|||
..
|
||||
},
|
||||
_,
|
||||
) => match self.tcx().get_global_alloc(ptr.alloc_id) {
|
||||
) => match self.tcx().get_global_alloc(alloc_id) {
|
||||
Some(GlobalAlloc::Memory(alloc)) => {
|
||||
let len = int.assert_bits(self.tcx().data_layout.pointer_size);
|
||||
let range = AllocRange { start: ptr.offset, size: Size::from_bytes(len) };
|
||||
let range = AllocRange { start: offset, size: Size::from_bytes(len) };
|
||||
if let Ok(byte_str) = alloc.get_bytes(&self.tcx(), range) {
|
||||
p!(pretty_print_byte_str(byte_str))
|
||||
} else {
|
||||
|
@ -1020,7 +1021,7 @@ pub trait PrettyPrinter<'tcx>:
|
|||
ty::FnPtr(_) => {
|
||||
// FIXME: We should probably have a helper method to share code with the "Byte strings"
|
||||
// printing above (which also has to handle pointers to all sorts of things).
|
||||
match self.tcx().get_global_alloc(ptr.alloc_id) {
|
||||
match self.tcx().get_global_alloc(alloc_id) {
|
||||
Some(GlobalAlloc::Function(instance)) => {
|
||||
self = self.typed_value(
|
||||
|this| this.print_value_path(instance.def_id(), instance.substs),
|
||||
|
@ -1068,8 +1069,8 @@ pub trait PrettyPrinter<'tcx>:
|
|||
ty::Char if char::try_from(int).is_ok() => {
|
||||
p!(write("{:?}", char::try_from(int).unwrap()))
|
||||
}
|
||||
// Raw pointers
|
||||
ty::RawPtr(_) | ty::FnPtr(_) => {
|
||||
// Pointer types
|
||||
ty::Ref(..) | ty::RawPtr(_) | ty::FnPtr(_) => {
|
||||
let data = int.assert_bits(self.tcx().data_layout.pointer_size);
|
||||
self = self.typed_value(
|
||||
|mut this| {
|
||||
|
@ -1106,9 +1107,9 @@ pub trait PrettyPrinter<'tcx>:
|
|||
|
||||
/// This is overridden for MIR printing because we only want to hide alloc ids from users, not
|
||||
/// from MIR where it is actually useful.
|
||||
fn pretty_print_const_pointer(
|
||||
fn pretty_print_const_pointer<Tag: Provenance>(
|
||||
mut self,
|
||||
_: Pointer,
|
||||
_: Pointer<Tag>,
|
||||
ty: Ty<'tcx>,
|
||||
print_ty: bool,
|
||||
) -> Result<Self::Const, Self::Error> {
|
||||
|
@ -1679,9 +1680,9 @@ impl<F: fmt::Write> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx, F> {
|
|||
}
|
||||
}
|
||||
|
||||
fn pretty_print_const_pointer(
|
||||
fn pretty_print_const_pointer<Tag: Provenance>(
|
||||
self,
|
||||
p: Pointer,
|
||||
p: Pointer<Tag>,
|
||||
ty: Ty<'tcx>,
|
||||
print_ty: bool,
|
||||
) -> Result<Self::Const, Self::Error> {
|
||||
|
|
|
@ -595,9 +595,12 @@ fn check_const_value_eq<R: TypeRelation<'tcx>>(
|
|||
(ConstValue::Scalar(Scalar::Int(a_val)), ConstValue::Scalar(Scalar::Int(b_val))) => {
|
||||
a_val == b_val
|
||||
}
|
||||
(ConstValue::Scalar(Scalar::Ptr(a_val)), ConstValue::Scalar(Scalar::Ptr(b_val))) => {
|
||||
(
|
||||
ConstValue::Scalar(Scalar::Ptr(a_val, _a_size)),
|
||||
ConstValue::Scalar(Scalar::Ptr(b_val, _b_size)),
|
||||
) => {
|
||||
a_val == b_val
|
||||
|| match (tcx.global_alloc(a_val.alloc_id), tcx.global_alloc(b_val.alloc_id)) {
|
||||
|| match (tcx.global_alloc(a_val.provenance), tcx.global_alloc(b_val.provenance)) {
|
||||
(GlobalAlloc::Function(a_instance), GlobalAlloc::Function(b_instance)) => {
|
||||
a_instance == b_instance
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::convert::TryFrom;
|
||||
|
||||
use crate::mir::interpret::{alloc_range, AllocId, Allocation, Pointer, Scalar};
|
||||
use crate::mir::interpret::{alloc_range, AllocId, Allocation, Pointer, Scalar, ScalarMaybeUninit};
|
||||
use crate::ty::fold::TypeFoldable;
|
||||
use crate::ty::{self, DefId, SubstsRef, Ty, TyCtxt};
|
||||
use rustc_ast::Mutability;
|
||||
|
@ -74,7 +74,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
let instance = ty::Instance::resolve_drop_in_place(tcx, ty);
|
||||
let fn_alloc_id = tcx.create_fn_alloc(instance);
|
||||
let fn_ptr = Pointer::from(fn_alloc_id);
|
||||
fn_ptr.into()
|
||||
ScalarMaybeUninit::from_pointer(fn_ptr, &tcx)
|
||||
}
|
||||
VtblEntry::MetadataSize => Scalar::from_uint(size, ptr_size).into(),
|
||||
VtblEntry::MetadataAlign => Scalar::from_uint(align, ptr_size).into(),
|
||||
|
@ -90,7 +90,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
.polymorphize(tcx);
|
||||
let fn_alloc_id = tcx.create_fn_alloc(instance);
|
||||
let fn_ptr = Pointer::from(fn_alloc_id);
|
||||
fn_ptr.into()
|
||||
ScalarMaybeUninit::from_pointer(fn_ptr, &tcx)
|
||||
}
|
||||
};
|
||||
vtable
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue