ctfe interpreter: extend provenance so that it can track whether a pointer is immutable
This commit is contained in:
parent
85a4bd8f58
commit
cb86303342
38 changed files with 261 additions and 145 deletions
|
@ -155,8 +155,8 @@ pub(super) fn op_to_const<'tcx>(
|
|||
match immediate {
|
||||
Left(ref mplace) => {
|
||||
// We know `offset` is relative to the allocation, so we can use `into_parts`.
|
||||
let (alloc_id, offset) = mplace.ptr().into_parts();
|
||||
let alloc_id = alloc_id.expect("cannot have `fake` place fot non-ZST type");
|
||||
let (prov, offset) = mplace.ptr().into_parts();
|
||||
let alloc_id = prov.expect("cannot have `fake` place for non-ZST type").alloc_id();
|
||||
ConstValue::Indirect { alloc_id, offset }
|
||||
}
|
||||
// see comment on `let force_as_immediate` above
|
||||
|
@ -178,8 +178,8 @@ pub(super) fn op_to_const<'tcx>(
|
|||
);
|
||||
let msg = "`op_to_const` on an immediate scalar pair must only be used on slice references to the beginning of an actual allocation";
|
||||
// We know `offset` is relative to the allocation, so we can use `into_parts`.
|
||||
let (alloc_id, offset) = a.to_pointer(ecx).expect(msg).into_parts();
|
||||
let alloc_id = alloc_id.expect(msg);
|
||||
let (prov, offset) = a.to_pointer(ecx).expect(msg).into_parts();
|
||||
let alloc_id = prov.expect(msg).alloc_id();
|
||||
let data = ecx.tcx.global_alloc(alloc_id).unwrap_memory();
|
||||
assert!(offset == abi::Size::ZERO, "{}", msg);
|
||||
let meta = b.to_target_usize(ecx).expect(msg);
|
||||
|
@ -353,7 +353,7 @@ pub fn eval_in_interpreter<'mir, 'tcx>(
|
|||
let validation =
|
||||
const_validate_mplace(&ecx, &mplace, is_static, cid.promoted.is_some());
|
||||
|
||||
let alloc_id = mplace.ptr().provenance.unwrap();
|
||||
let alloc_id = mplace.ptr().provenance.unwrap().alloc_id();
|
||||
|
||||
// Validation failed, report an error.
|
||||
if let Err(error) = validation {
|
||||
|
|
|
@ -49,7 +49,7 @@ pub struct CompileTimeInterpreter<'mir, 'tcx> {
|
|||
pub(super) num_evaluated_steps: usize,
|
||||
|
||||
/// The virtual call stack.
|
||||
pub(super) stack: Vec<Frame<'mir, 'tcx, AllocId, ()>>,
|
||||
pub(super) stack: Vec<Frame<'mir, 'tcx>>,
|
||||
|
||||
/// We need to make sure consts never point to anything mutable, even recursively. That is
|
||||
/// relied on for pattern matching on consts with references.
|
||||
|
@ -638,10 +638,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
|
|||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn expose_ptr(
|
||||
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
|
||||
_ptr: Pointer<AllocId>,
|
||||
) -> InterpResult<'tcx> {
|
||||
fn expose_ptr(_ecx: &mut InterpCx<'mir, 'tcx, Self>, _ptr: Pointer) -> InterpResult<'tcx> {
|
||||
// This is only reachable with -Zunleash-the-miri-inside-of-you.
|
||||
throw_unsup_format!("exposing pointers is not possible at compile-time")
|
||||
}
|
||||
|
|
|
@ -7,7 +7,9 @@ use hir::CRATE_HIR_ID;
|
|||
use rustc_hir::{self as hir, def_id::DefId, definitions::DefPathData};
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_middle::mir;
|
||||
use rustc_middle::mir::interpret::{ErrorHandled, InvalidMetaKind, ReportedErrorInfo};
|
||||
use rustc_middle::mir::interpret::{
|
||||
CtfeProvenance, ErrorHandled, InvalidMetaKind, ReportedErrorInfo,
|
||||
};
|
||||
use rustc_middle::query::TyCtxtAt;
|
||||
use rustc_middle::ty::layout::{
|
||||
self, FnAbiError, FnAbiOfHelpers, FnAbiRequest, LayoutError, LayoutOf, LayoutOfHelpers,
|
||||
|
@ -20,9 +22,9 @@ use rustc_span::Span;
|
|||
use rustc_target::abi::{call::FnAbi, Align, HasDataLayout, Size, TargetDataLayout};
|
||||
|
||||
use super::{
|
||||
AllocId, GlobalId, Immediate, InterpErrorInfo, InterpResult, MPlaceTy, Machine, MemPlace,
|
||||
MemPlaceMeta, Memory, MemoryKind, OpTy, Operand, Place, PlaceTy, Pointer, PointerArithmetic,
|
||||
Projectable, Provenance, Scalar, StackPopJump,
|
||||
GlobalId, Immediate, InterpErrorInfo, InterpResult, MPlaceTy, Machine, MemPlace, MemPlaceMeta,
|
||||
Memory, MemoryKind, OpTy, Operand, Place, PlaceTy, Pointer, PointerArithmetic, Projectable,
|
||||
Provenance, Scalar, StackPopJump,
|
||||
};
|
||||
use crate::errors;
|
||||
use crate::util;
|
||||
|
@ -84,7 +86,7 @@ impl Drop for SpanGuard {
|
|||
}
|
||||
|
||||
/// A stack frame.
|
||||
pub struct Frame<'mir, 'tcx, Prov: Provenance = AllocId, Extra = ()> {
|
||||
pub struct Frame<'mir, 'tcx, Prov: Provenance = CtfeProvenance, Extra = ()> {
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Function and callsite information
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -156,7 +158,7 @@ pub enum StackPopCleanup {
|
|||
|
||||
/// State of a local variable including a memoized layout
|
||||
#[derive(Clone)]
|
||||
pub struct LocalState<'tcx, Prov: Provenance = AllocId> {
|
||||
pub struct LocalState<'tcx, Prov: Provenance = CtfeProvenance> {
|
||||
value: LocalValue<Prov>,
|
||||
/// Don't modify if `Some`, this is only used to prevent computing the layout twice.
|
||||
/// Avoids computing the layout of locals that are never actually initialized.
|
||||
|
@ -177,7 +179,7 @@ impl<Prov: Provenance> std::fmt::Debug for LocalState<'_, Prov> {
|
|||
/// This does not store the type of the local; the type is given by `body.local_decls` and can never
|
||||
/// change, so by not storing here we avoid having to maintain that as an invariant.
|
||||
#[derive(Copy, Clone, Debug)] // Miri debug-prints these
|
||||
pub(super) enum LocalValue<Prov: Provenance = AllocId> {
|
||||
pub(super) enum LocalValue<Prov: Provenance = CtfeProvenance> {
|
||||
/// This local is not currently alive, and cannot be used at all.
|
||||
Dead,
|
||||
/// A normal, live local.
|
||||
|
|
|
@ -18,7 +18,7 @@ use super::validity::RefTracking;
|
|||
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
|
||||
use rustc_errors::ErrorGuaranteed;
|
||||
use rustc_hir as hir;
|
||||
use rustc_middle::mir::interpret::InterpResult;
|
||||
use rustc_middle::mir::interpret::{CtfeProvenance, InterpResult};
|
||||
use rustc_middle::ty::{self, layout::TyAndLayout, Ty};
|
||||
|
||||
use rustc_ast::Mutability;
|
||||
|
@ -34,7 +34,7 @@ pub trait CompileTimeMachine<'mir, 'tcx: 'mir, T> = Machine<
|
|||
'mir,
|
||||
'tcx,
|
||||
MemoryKind = T,
|
||||
Provenance = AllocId,
|
||||
Provenance = CtfeProvenance,
|
||||
ExtraFnVal = !,
|
||||
FrameExtra = (),
|
||||
AllocExtra = (),
|
||||
|
@ -135,7 +135,7 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval:
|
|||
alloc.mutability = Mutability::Not;
|
||||
};
|
||||
// link the alloc id to the actual allocation
|
||||
leftover_allocations.extend(alloc.provenance().ptrs().iter().map(|&(_, alloc_id)| alloc_id));
|
||||
leftover_allocations.extend(alloc.provenance().ptrs().iter().map(|&(_, prov)| prov.alloc_id()));
|
||||
let alloc = tcx.mk_const_alloc(alloc);
|
||||
tcx.set_alloc_id_memory(alloc_id, alloc);
|
||||
None
|
||||
|
@ -178,10 +178,10 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
|
|||
tcx.struct_tail_erasing_lifetimes(referenced_ty, self.ecx.param_env).kind()
|
||||
{
|
||||
let ptr = mplace.meta().unwrap_meta().to_pointer(&tcx)?;
|
||||
if let Some(alloc_id) = ptr.provenance {
|
||||
if let Some(prov) = ptr.provenance {
|
||||
// Explicitly choose const mode here, since vtables are immutable, even
|
||||
// if the reference of the fat pointer is mutable.
|
||||
self.intern_shallow(alloc_id, InternMode::Const, None);
|
||||
self.intern_shallow(prov.alloc_id(), InternMode::Const, None);
|
||||
} else {
|
||||
// Validation will error (with a better message) on an invalid vtable pointer.
|
||||
// Let validation show the error message, but make sure it *does* error.
|
||||
|
@ -191,7 +191,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
|
|||
}
|
||||
// Check if we have encountered this pointer+layout combination before.
|
||||
// Only recurse for allocation-backed pointers.
|
||||
if let Some(alloc_id) = mplace.ptr().provenance {
|
||||
if let Some(prov) = mplace.ptr().provenance {
|
||||
// Compute the mode with which we intern this. Our goal here is to make as many
|
||||
// statics as we can immutable so they can be placed in read-only memory by LLVM.
|
||||
let ref_mode = match self.mode {
|
||||
|
@ -234,7 +234,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
|
|||
InternMode::Const
|
||||
}
|
||||
};
|
||||
match self.intern_shallow(alloc_id, ref_mode, Some(referenced_ty)) {
|
||||
match self.intern_shallow(prov.alloc_id(), ref_mode, Some(referenced_ty)) {
|
||||
// No need to recurse, these are interned already and statics may have
|
||||
// cycles, so we don't want to recurse there
|
||||
Some(IsStaticOrFn) => {}
|
||||
|
@ -353,7 +353,7 @@ pub fn intern_const_alloc_recursive<
|
|||
leftover_allocations,
|
||||
// The outermost allocation must exist, because we allocated it with
|
||||
// `Memory::allocate`.
|
||||
ret.ptr().provenance.unwrap(),
|
||||
ret.ptr().provenance.unwrap().alloc_id(),
|
||||
base_intern_mode,
|
||||
Some(ret.layout.ty),
|
||||
);
|
||||
|
@ -431,7 +431,8 @@ pub fn intern_const_alloc_recursive<
|
|||
}
|
||||
let alloc = tcx.mk_const_alloc(alloc);
|
||||
tcx.set_alloc_id_memory(alloc_id, alloc);
|
||||
for &(_, alloc_id) in alloc.inner().provenance().ptrs().iter() {
|
||||
for &(_, prov) in alloc.inner().provenance().ptrs().iter() {
|
||||
let alloc_id = prov.alloc_id();
|
||||
if leftover_allocations.insert(alloc_id) {
|
||||
todo.push(alloc_id);
|
||||
}
|
||||
|
@ -503,10 +504,11 @@ impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
|
|||
// `allocate` picks a fresh AllocId that we will associate with its data below.
|
||||
let dest = self.allocate(layout, MemoryKind::Stack)?;
|
||||
f(self, &dest.clone().into())?;
|
||||
let mut alloc = self.memory.alloc_map.remove(&dest.ptr().provenance.unwrap()).unwrap().1;
|
||||
let mut alloc =
|
||||
self.memory.alloc_map.remove(&dest.ptr().provenance.unwrap().alloc_id()).unwrap().1;
|
||||
alloc.mutability = Mutability::Not;
|
||||
let alloc = self.tcx.mk_const_alloc(alloc);
|
||||
let alloc_id = dest.ptr().provenance.unwrap(); // this was just allocated, it must have provenance
|
||||
let alloc_id = dest.ptr().provenance.unwrap().alloc_id(); // this was just allocated, it must have provenance
|
||||
self.tcx.set_alloc_id_memory(alloc_id, alloc);
|
||||
Ok(alloc_id)
|
||||
}
|
||||
|
|
|
@ -16,8 +16,9 @@ use rustc_target::abi::{Align, Size};
|
|||
use rustc_target::spec::abi::Abi as CallAbi;
|
||||
|
||||
use super::{
|
||||
AllocBytes, AllocId, AllocKind, AllocRange, Allocation, ConstAllocation, FnArg, Frame, ImmTy,
|
||||
InterpCx, InterpResult, MPlaceTy, MemoryKind, Misalignment, OpTy, PlaceTy, Pointer, Provenance,
|
||||
AllocBytes, AllocId, AllocKind, AllocRange, Allocation, ConstAllocation, CtfeProvenance, FnArg,
|
||||
Frame, ImmTy, InterpCx, InterpResult, MPlaceTy, MemoryKind, Misalignment, OpTy, PlaceTy,
|
||||
Pointer, Provenance,
|
||||
};
|
||||
|
||||
/// Data returned by Machine::stack_pop,
|
||||
|
@ -513,8 +514,8 @@ pub trait Machine<'mir, 'tcx: 'mir>: Sized {
|
|||
/// A lot of the flexibility above is just needed for `Miri`, but all "compile-time" machines
|
||||
/// (CTFE and ConstProp) use the same instance. Here, we share that code.
|
||||
pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) {
|
||||
type Provenance = AllocId;
|
||||
type ProvenanceExtra = ();
|
||||
type Provenance = CtfeProvenance;
|
||||
type ProvenanceExtra = (); // FIXME extract the "immutable" bool?
|
||||
|
||||
type ExtraFnVal = !;
|
||||
|
||||
|
@ -567,14 +568,14 @@ pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) {
|
|||
def_id: DefId,
|
||||
) -> InterpResult<$tcx, Pointer> {
|
||||
// Use the `AllocId` associated with the `DefId`. Any actual *access* will fail.
|
||||
Ok(Pointer::new(ecx.tcx.reserve_and_set_static_alloc(def_id), Size::ZERO))
|
||||
Ok(Pointer::new(ecx.tcx.reserve_and_set_static_alloc(def_id).into(), Size::ZERO))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn adjust_alloc_base_pointer(
|
||||
_ecx: &InterpCx<$mir, $tcx, Self>,
|
||||
ptr: Pointer<AllocId>,
|
||||
) -> InterpResult<$tcx, Pointer<AllocId>> {
|
||||
ptr: Pointer<CtfeProvenance>,
|
||||
) -> InterpResult<$tcx, Pointer<CtfeProvenance>> {
|
||||
Ok(ptr)
|
||||
}
|
||||
|
||||
|
@ -582,7 +583,7 @@ pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) {
|
|||
fn ptr_from_addr_cast(
|
||||
_ecx: &InterpCx<$mir, $tcx, Self>,
|
||||
addr: u64,
|
||||
) -> InterpResult<$tcx, Pointer<Option<AllocId>>> {
|
||||
) -> InterpResult<$tcx, Pointer<Option<CtfeProvenance>>> {
|
||||
// Allow these casts, but make the pointer not dereferenceable.
|
||||
// (I.e., they behave like transmutation.)
|
||||
// This is correct because no pointers can ever be exposed in compile-time evaluation.
|
||||
|
@ -592,10 +593,10 @@ pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) {
|
|||
#[inline(always)]
|
||||
fn ptr_get_alloc(
|
||||
_ecx: &InterpCx<$mir, $tcx, Self>,
|
||||
ptr: Pointer<AllocId>,
|
||||
ptr: Pointer<CtfeProvenance>,
|
||||
) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
|
||||
// We know `offset` is relative to the allocation, so we can use `into_parts`.
|
||||
let (alloc_id, offset) = ptr.into_parts();
|
||||
Some((alloc_id, offset, ()))
|
||||
let (prov, offset) = ptr.into_parts();
|
||||
Some((prov.alloc_id(), offset, ()))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,8 +22,8 @@ use crate::fluent_generated as fluent;
|
|||
|
||||
use super::{
|
||||
alloc_range, AllocBytes, AllocId, AllocMap, AllocRange, Allocation, CheckAlignMsg,
|
||||
CheckInAllocMsg, GlobalAlloc, InterpCx, InterpResult, Machine, MayLeak, Misalignment, Pointer,
|
||||
PointerArithmetic, Provenance, Scalar,
|
||||
CheckInAllocMsg, CtfeProvenance, GlobalAlloc, InterpCx, InterpResult, Machine, MayLeak,
|
||||
Misalignment, Pointer, PointerArithmetic, Provenance, Scalar,
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Copy, Clone)]
|
||||
|
@ -159,9 +159,9 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
#[inline]
|
||||
pub fn global_base_pointer(
|
||||
&self,
|
||||
ptr: Pointer<AllocId>,
|
||||
ptr: Pointer<CtfeProvenance>,
|
||||
) -> InterpResult<'tcx, Pointer<M::Provenance>> {
|
||||
let alloc_id = ptr.provenance;
|
||||
let alloc_id = ptr.provenance.alloc_id();
|
||||
// We need to handle `extern static`.
|
||||
match self.tcx.try_get_global_alloc(alloc_id) {
|
||||
Some(GlobalAlloc::Static(def_id)) if self.tcx.is_thread_local_static(def_id) => {
|
||||
|
|
|
@ -13,9 +13,9 @@ use rustc_middle::{mir, ty};
|
|||
use rustc_target::abi::{self, Abi, HasDataLayout, Size};
|
||||
|
||||
use super::{
|
||||
alloc_range, from_known_layout, mir_assign_valid_types, AllocId, Frame, InterpCx, InterpResult,
|
||||
MPlaceTy, Machine, MemPlace, MemPlaceMeta, OffsetMode, PlaceTy, Pointer, Projectable,
|
||||
Provenance, Scalar,
|
||||
alloc_range, from_known_layout, mir_assign_valid_types, CtfeProvenance, Frame, InterpCx,
|
||||
InterpResult, MPlaceTy, Machine, MemPlace, MemPlaceMeta, OffsetMode, PlaceTy, Pointer,
|
||||
Projectable, Provenance, Scalar,
|
||||
};
|
||||
|
||||
/// An `Immediate` represents a single immediate self-contained Rust value.
|
||||
|
@ -26,7 +26,7 @@ use super::{
|
|||
/// In particular, thanks to `ScalarPair`, arithmetic operations and casts can be entirely
|
||||
/// defined on `Immediate`, and do not have to work with a `Place`.
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum Immediate<Prov: Provenance = AllocId> {
|
||||
pub enum Immediate<Prov: Provenance = CtfeProvenance> {
|
||||
/// A single scalar value (must have *initialized* `Scalar` ABI).
|
||||
Scalar(Scalar<Prov>),
|
||||
/// A pair of two scalar value (must have `ScalarPair` ABI where both fields are
|
||||
|
@ -98,7 +98,7 @@ impl<Prov: Provenance> Immediate<Prov> {
|
|||
// ScalarPair needs a type to interpret, so we often have an immediate and a type together
|
||||
// as input for binary and cast operations.
|
||||
#[derive(Clone)]
|
||||
pub struct ImmTy<'tcx, Prov: Provenance = AllocId> {
|
||||
pub struct ImmTy<'tcx, Prov: Provenance = CtfeProvenance> {
|
||||
imm: Immediate<Prov>,
|
||||
pub layout: TyAndLayout<'tcx>,
|
||||
}
|
||||
|
@ -334,13 +334,13 @@ impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for ImmTy<'tcx, Prov> {
|
|||
/// or still in memory. The latter is an optimization, to delay reading that chunk of
|
||||
/// memory and to avoid having to store arbitrary-sized data here.
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(super) enum Operand<Prov: Provenance = AllocId> {
|
||||
pub(super) enum Operand<Prov: Provenance = CtfeProvenance> {
|
||||
Immediate(Immediate<Prov>),
|
||||
Indirect(MemPlace<Prov>),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OpTy<'tcx, Prov: Provenance = AllocId> {
|
||||
pub struct OpTy<'tcx, Prov: Provenance = CtfeProvenance> {
|
||||
op: Operand<Prov>, // Keep this private; it helps enforce invariants.
|
||||
pub layout: TyAndLayout<'tcx>,
|
||||
}
|
||||
|
@ -750,17 +750,19 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
let layout = from_known_layout(self.tcx, self.param_env, layout, || self.layout_of(ty))?;
|
||||
let imm = match val_val {
|
||||
mir::ConstValue::Indirect { alloc_id, offset } => {
|
||||
// We rely on mutability being set correctly in that allocation to prevent writes
|
||||
// where none should happen.
|
||||
let ptr = self.global_base_pointer(Pointer::new(alloc_id, offset))?;
|
||||
// This is const data, no mutation allowed.
|
||||
let ptr = self.global_base_pointer(Pointer::new(
|
||||
CtfeProvenance::from(alloc_id).as_immutable(),
|
||||
offset,
|
||||
))?;
|
||||
return Ok(self.ptr_to_mplace(ptr.into(), layout).into());
|
||||
}
|
||||
mir::ConstValue::Scalar(x) => adjust_scalar(x)?.into(),
|
||||
mir::ConstValue::ZeroSized => Immediate::Uninit,
|
||||
mir::ConstValue::Slice { data, meta } => {
|
||||
// We rely on mutability being set correctly in `data` to prevent writes
|
||||
// where none should happen.
|
||||
let ptr = Pointer::new(self.tcx.reserve_and_set_memory_alloc(data), Size::ZERO);
|
||||
// This is const data, no mutation allowed.
|
||||
let alloc_id = self.tcx.reserve_and_set_memory_alloc(data);
|
||||
let ptr = Pointer::new(CtfeProvenance::from(alloc_id).as_immutable(), Size::ZERO);
|
||||
Immediate::new_slice(self.global_base_pointer(ptr)?.into(), meta, self)
|
||||
}
|
||||
};
|
||||
|
|
|
@ -14,14 +14,14 @@ use rustc_middle::ty::Ty;
|
|||
use rustc_target::abi::{Abi, Align, HasDataLayout, Size};
|
||||
|
||||
use super::{
|
||||
alloc_range, mir_assign_valid_types, AllocId, AllocRef, AllocRefMut, CheckAlignMsg, ImmTy,
|
||||
Immediate, InterpCx, InterpResult, Machine, MemoryKind, Misalignment, OffsetMode, OpTy,
|
||||
alloc_range, mir_assign_valid_types, AllocRef, AllocRefMut, CheckAlignMsg, CtfeProvenance,
|
||||
ImmTy, Immediate, InterpCx, InterpResult, Machine, MemoryKind, Misalignment, OffsetMode, OpTy,
|
||||
Operand, Pointer, PointerArithmetic, Projectable, Provenance, Readable, Scalar,
|
||||
};
|
||||
|
||||
#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)]
|
||||
/// Information required for the sound usage of a `MemPlace`.
|
||||
pub enum MemPlaceMeta<Prov: Provenance = AllocId> {
|
||||
pub enum MemPlaceMeta<Prov: Provenance = CtfeProvenance> {
|
||||
/// The unsized payload (e.g. length for slices or vtable pointer for trait objects).
|
||||
Meta(Scalar<Prov>),
|
||||
/// `Sized` types or unsized `extern type`
|
||||
|
@ -49,7 +49,7 @@ impl<Prov: Provenance> MemPlaceMeta<Prov> {
|
|||
}
|
||||
|
||||
#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)]
|
||||
pub(super) struct MemPlace<Prov: Provenance = AllocId> {
|
||||
pub(super) struct MemPlace<Prov: Provenance = CtfeProvenance> {
|
||||
/// The pointer can be a pure integer, with the `None` provenance.
|
||||
pub ptr: Pointer<Option<Prov>>,
|
||||
/// Metadata for unsized places. Interpretation is up to the type.
|
||||
|
@ -100,7 +100,7 @@ impl<Prov: Provenance> MemPlace<Prov> {
|
|||
|
||||
/// A MemPlace with its layout. Constructing it is only possible in this module.
|
||||
#[derive(Clone, Hash, Eq, PartialEq)]
|
||||
pub struct MPlaceTy<'tcx, Prov: Provenance = AllocId> {
|
||||
pub struct MPlaceTy<'tcx, Prov: Provenance = CtfeProvenance> {
|
||||
mplace: MemPlace<Prov>,
|
||||
pub layout: TyAndLayout<'tcx>,
|
||||
}
|
||||
|
@ -179,7 +179,7 @@ impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for MPlaceTy<'tcx, Prov> {
|
|||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(super) enum Place<Prov: Provenance = AllocId> {
|
||||
pub(super) enum Place<Prov: Provenance = CtfeProvenance> {
|
||||
/// A place referring to a value allocated in the `Memory` system.
|
||||
Ptr(MemPlace<Prov>),
|
||||
|
||||
|
@ -195,7 +195,7 @@ pub(super) enum Place<Prov: Provenance = AllocId> {
|
|||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct PlaceTy<'tcx, Prov: Provenance = AllocId> {
|
||||
pub struct PlaceTy<'tcx, Prov: Provenance = CtfeProvenance> {
|
||||
place: Place<Prov>, // Keep this private; it helps enforce invariants.
|
||||
pub layout: TyAndLayout<'tcx>,
|
||||
}
|
||||
|
|
|
@ -18,14 +18,14 @@ use rustc_target::abi::{
|
|||
use rustc_target::spec::abi::Abi;
|
||||
|
||||
use super::{
|
||||
AllocId, FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, PlaceTy, Projectable,
|
||||
Provenance, Scalar, StackPopCleanup,
|
||||
CtfeProvenance, FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, PlaceTy,
|
||||
Projectable, Provenance, Scalar, StackPopCleanup,
|
||||
};
|
||||
use crate::fluent_generated as fluent;
|
||||
|
||||
/// An argment passed to a function.
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum FnArg<'tcx, Prov: Provenance = AllocId> {
|
||||
pub enum FnArg<'tcx, Prov: Provenance = CtfeProvenance> {
|
||||
/// Pass a copy of the given operand.
|
||||
Copy(OpTy<'tcx, Prov>),
|
||||
/// Allow for the argument to be passed in-place: destroy the value originally stored at that place and
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue