1
Fork 0

Make the necessary changes to support concurrency in Miri.

This commit is contained in:
Vytautas Astrauskas 2020-03-16 15:12:42 -07:00
parent df768c5c8f
commit c53210c550
18 changed files with 135 additions and 57 deletions

View file

@ -50,7 +50,7 @@ impl Error for ConstEvalErrKind {}
/// Turn an interpreter error into something to report to the user.
/// As a side-effect, if RUSTC_CTFE_BACKTRACE is set, this prints the backtrace.
/// Should be called only if the error is actually going to to be reported!
pub fn error_to_const_error<'mir, 'tcx, M: Machine<'mir, 'tcx>>(
pub fn error_to_const_error<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>>(
ecx: &InterpCx<'mir, 'tcx, M>,
mut error: InterpErrorInfo<'tcx>,
) -> ConstEvalErr<'tcx> {

View file

@ -19,7 +19,7 @@ use crate::interpret::{
use super::error::*;
impl<'mir, 'tcx> InterpCx<'mir, 'tcx, CompileTimeInterpreter> {
impl<'mir, 'tcx> InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>> {
/// Evaluate a const function where all arguments (if any) are zero-sized types.
/// The evaluation is memoized thanks to the query system.
///
@ -86,12 +86,15 @@ impl<'mir, 'tcx> InterpCx<'mir, 'tcx, CompileTimeInterpreter> {
}
/// Extra machine state for CTFE, and the Machine instance
pub struct CompileTimeInterpreter {
pub struct CompileTimeInterpreter<'mir, 'tcx> {
/// For now, the number of terminators that can be evaluated before we throw a resource
/// exhuastion error.
///
/// Setting this to `0` disables the limit and allows the interpreter to run forever.
pub steps_remaining: usize,
/// The virtual call stack.
pub(crate) stack: Vec<Frame<'mir, 'tcx, (), ()>>,
}
#[derive(Copy, Clone, Debug)]
@ -100,9 +103,9 @@ pub struct MemoryExtra {
pub(super) can_access_statics: bool,
}
impl CompileTimeInterpreter {
impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> {
pub(super) fn new(const_eval_limit: usize) -> Self {
CompileTimeInterpreter { steps_remaining: const_eval_limit }
CompileTimeInterpreter { steps_remaining: const_eval_limit, stack: Vec::new() }
}
}
@ -156,7 +159,8 @@ impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> {
}
}
crate type CompileTimeEvalContext<'mir, 'tcx> = InterpCx<'mir, 'tcx, CompileTimeInterpreter>;
crate type CompileTimeEvalContext<'mir, 'tcx> =
InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>;
impl interpret::MayLeak for ! {
#[inline(always)]
@ -166,7 +170,7 @@ impl interpret::MayLeak for ! {
}
}
impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter {
impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx> {
type MemoryKind = !;
type PointerTag = ();
type ExtraFnVal = !;
@ -186,6 +190,20 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter {
false
}
#[inline(always)]
fn stack(
ecx: &'a InterpCx<'mir, 'tcx, Self>,
) -> &'a [Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>] {
&ecx.machine.stack
}
#[inline(always)]
fn stack_mut(
ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
) -> &'a mut Vec<Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>> {
&mut ecx.machine.stack
}
#[inline(always)]
fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool {
false // for now, we don't enforce validity

View file

@ -12,7 +12,7 @@ use rustc_middle::ty::{self, Ty, TypeAndMut, TypeFoldable};
use rustc_span::symbol::sym;
use rustc_target::abi::{LayoutOf, Size, Variants};
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
pub fn cast(
&mut self,
src: OpTy<'tcx, M::PointerTag>,

View file

@ -39,9 +39,6 @@ pub struct InterpCx<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
/// The virtual memory system.
pub memory: Memory<'mir, 'tcx, M>,
/// The virtual call stack.
pub(crate) stack: Vec<Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>>,
/// A cache for deduplicating vtables
pub(super) vtables:
FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), Pointer<M::PointerTag>>,
@ -295,7 +292,7 @@ pub(super) fn from_known_layout<'tcx>(
}
}
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
pub fn new(
tcx: TyCtxtAt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
@ -307,7 +304,6 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
tcx,
param_env,
memory: Memory::new(tcx, memory_extra),
stack: Vec::new(),
vtables: FxHashMap::default(),
}
}
@ -348,23 +344,29 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
#[inline(always)]
pub fn stack(&self) -> &[Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>] {
&self.stack
M::stack(self)
}
#[inline(always)]
pub fn stack_mut(&mut self) -> &mut Vec<Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>> {
M::stack_mut(self)
}
#[inline(always)]
pub fn cur_frame(&self) -> usize {
assert!(!self.stack.is_empty());
self.stack.len() - 1
let stack = self.stack();
assert!(!stack.is_empty());
stack.len() - 1
}
#[inline(always)]
pub fn frame(&self) -> &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra> {
self.stack.last().expect("no call frames exist")
self.stack().last().expect("no call frames exist")
}
#[inline(always)]
pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra> {
self.stack.last_mut().expect("no call frames exist")
self.stack_mut().last_mut().expect("no call frames exist")
}
#[inline(always)]
@ -595,7 +597,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
return_place: Option<PlaceTy<'tcx, M::PointerTag>>,
return_to_block: StackPopCleanup,
) -> InterpResult<'tcx> {
if !self.stack.is_empty() {
if !self.stack().is_empty() {
info!("PAUSING({}) {}", self.cur_frame(), self.frame().instance);
}
::log_settings::settings().indentation += 1;
@ -614,7 +616,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
extra: (),
};
let frame = M::init_frame_extra(self, pre_frame)?;
self.stack.push(frame);
self.stack_mut().push(frame);
// don't allocate at all for trivial constants
if body.local_decls.len() > 1 {
@ -649,7 +651,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
M::after_stack_push(self)?;
info!("ENTERING({}) {}", self.cur_frame(), self.frame().instance);
if self.stack.len() > *self.tcx.sess.recursion_limit.get() {
if self.stack().len() > *self.tcx.sess.recursion_limit.get() {
throw_exhaust!(StackFrameLimitReached)
} else {
Ok(())
@ -719,7 +721,8 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
);
::log_settings::settings().indentation -= 1;
let frame = self.stack.pop().expect("tried to pop a stack frame, but there were none");
let frame =
self.stack_mut().pop().expect("tried to pop a stack frame, but there were none");
// Now where do we jump next?
@ -734,7 +737,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
};
if !cleanup {
assert!(self.stack.is_empty(), "only the topmost frame should ever be leaked");
assert!(self.stack().is_empty(), "only the topmost frame should ever be leaked");
assert!(next_block.is_none(), "tried to skip cleanup when we have a next block!");
assert!(!unwinding, "tried to skip cleanup during unwinding");
// Leak the locals, skip validation, skip machine hook.
@ -783,7 +786,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
}
}
if !self.stack.is_empty() {
if !self.stack().is_empty() {
info!(
"CONTINUING({}) {} (unwinding = {})",
self.cur_frame(),
@ -899,7 +902,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
}
write!(msg, ":").unwrap();
match self.stack[frame].locals[local].value {
match self.stack()[frame].locals[local].value {
LocalValue::Dead => write!(msg, " is dead").unwrap(),
LocalValue::Uninitialized => write!(msg, " is uninitialized").unwrap(),
LocalValue::Live(Operand::Indirect(mplace)) => match mplace.ptr {

View file

@ -148,7 +148,7 @@ impl<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>> InternVisitor<'rt, 'mir
}
}
impl<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
for InternVisitor<'rt, 'mir, 'tcx, M>
{
type V = MPlaceTy<'tcx>;
@ -284,7 +284,10 @@ pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>(
intern_kind: InternKind,
ret: MPlaceTy<'tcx>,
ignore_interior_mut_in_const_validation: bool,
) -> InterpResult<'tcx> {
) -> InterpResult<'tcx>
where
'tcx: 'mir,
{
let tcx = ecx.tcx;
let (base_mutability, base_intern_mode) = match intern_kind {
// `static mut` doesn't care about interior mutability, it's mutable anyway

View file

@ -73,7 +73,7 @@ crate fn eval_nullary_intrinsic<'tcx>(
})
}
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// Returns `true` if emulation happened.
pub fn emulate_intrinsic(
&mut self,

View file

@ -10,11 +10,11 @@ use crate::interpret::{
MPlaceTy, MemoryKind, Scalar,
};
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// Walks up the callstack from the intrinsic's callsite, searching for the first callsite in a
/// frame which is not `#[track_caller]`.
crate fn find_closest_untracked_caller_location(&self) -> Span {
self.stack
self.stack()
.iter()
.rev()
// Find first non-`#[track_caller]` frame.

View file

@ -120,6 +120,16 @@ pub trait Machine<'mir, 'tcx>: Sized {
/// Whether memory accesses should be alignment-checked.
fn enforce_alignment(memory_extra: &Self::MemoryExtra) -> bool;
/// Borrow the current thread's stack.
fn stack(
ecx: &'a InterpCx<'mir, 'tcx, Self>,
) -> &'a [Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>];
/// Mutably borrow the current thread's stack.
fn stack_mut(
ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
) -> &'a mut Vec<Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>>;
/// Whether to enforce the validity invariant
fn enforce_validity(ecx: &InterpCx<'mir, 'tcx, Self>) -> bool;
@ -230,6 +240,20 @@ pub trait Machine<'mir, 'tcx>: Sized {
id
}
/// In Rust, thread locals are just special statics. Therefore, the compiler
/// uses the same code for allocating both. However, in Miri we want to have
/// a property that each allocation has a unique id and, therefore, we
/// generate a fresh allocation id for each thread. This function takes a
/// potentially thread local allocation id and resolves the original static
/// allocation id that can be used to compute the value of the static.
#[inline]
fn resolve_thread_local_allocation_id(
_memory_extra: &Self::MemoryExtra,
id: AllocId,
) -> AllocId {
id
}
/// Called to initialize the "extra" state of an allocation and make the pointers
/// it contains (in relocations) tagged. The way we construct allocations is
/// to always first construct it without extra and then add the extra.

View file

@ -429,7 +429,8 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
id: AllocId,
is_write: bool,
) -> InterpResult<'tcx, Cow<'tcx, Allocation<M::PointerTag, M::AllocExtra>>> {
let alloc = tcx.alloc_map.lock().get(id);
let alloc =
tcx.alloc_map.lock().get(M::resolve_thread_local_allocation_id(memory_extra, id));
let (alloc, def_id) = match alloc {
Some(GlobalAlloc::Memory(mem)) => {
// Memory of a constant or promoted or anonymous memory referenced by a static.
@ -592,7 +593,8 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
// # Statics
// Can't do this in the match argument, we may get cycle errors since the lock would
// be held throughout the match.
let alloc = self.tcx.alloc_map.lock().get(id);
let alloc =
self.tcx.alloc_map.lock().get(M::resolve_thread_local_allocation_id(&self.extra, id));
match alloc {
Some(GlobalAlloc::Static(did)) => {
// Use size and align of the type.

View file

@ -208,7 +208,7 @@ impl<'tcx, Tag: Copy> ImmTy<'tcx, Tag> {
}
}
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// Normalice `place.ptr` to a `Pointer` if this is a place and not a ZST.
/// Can be helpful to avoid lots of `force_ptr` calls later, if this place is used a lot.
#[inline]
@ -439,7 +439,9 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
let op = match *place {
Place::Ptr(mplace) => Operand::Indirect(mplace),
Place::Local { frame, local } => *self.access_local(&self.stack[frame], local, None)?,
Place::Local { frame, local } => {
*self.access_local(&self.stack()[frame], local, None)?
}
};
Ok(OpTy { op, layout: place.layout })
}

View file

@ -9,7 +9,7 @@ use rustc_target::abi::LayoutOf;
use super::{ImmTy, Immediate, InterpCx, Machine, PlaceTy};
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// Applies the binary operation `op` to the two operands and writes a tuple of the result
/// and a boolean signifying the potential overflow to the destination.
pub fn binop_with_overflow(
@ -45,7 +45,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
}
}
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
fn binary_char_op(
&self,
bin_op: mir::BinOp,

View file

@ -283,7 +283,7 @@ impl<'tcx, Tag: ::std::fmt::Debug> PlaceTy<'tcx, Tag> {
}
// separating the pointer tag for `impl Trait`, see https://github.com/rust-lang/rust/issues/54385
impl<'mir, 'tcx, Tag, M> InterpCx<'mir, 'tcx, M>
impl<'mir, 'tcx: 'mir, Tag, M> InterpCx<'mir, 'tcx, M>
where
// FIXME: Working around https://github.com/rust-lang/rust/issues/54385
Tag: ::std::fmt::Debug + Copy + Eq + Hash + 'static,
@ -755,7 +755,7 @@ where
// but not factored as a separate function.
let mplace = match dest.place {
Place::Local { frame, local } => {
match self.stack[frame].locals[local].access_mut()? {
match self.stack_mut()[frame].locals[local].access_mut()? {
Ok(local) => {
// Local can be updated in-place.
*local = LocalValue::Live(Operand::Immediate(src));
@ -985,14 +985,15 @@ where
) -> InterpResult<'tcx, (MPlaceTy<'tcx, M::PointerTag>, Option<Size>)> {
let (mplace, size) = match place.place {
Place::Local { frame, local } => {
match self.stack[frame].locals[local].access_mut()? {
match self.stack_mut()[frame].locals[local].access_mut()? {
Ok(&mut local_val) => {
// We need to make an allocation.
// We need the layout of the local. We can NOT use the layout we got,
// that might e.g., be an inner field of a struct with `Scalar` layout,
// that has different alignment than the outer field.
let local_layout = self.layout_of_local(&self.stack[frame], local, None)?;
let local_layout =
self.layout_of_local(&self.stack()[frame], local, None)?;
// We also need to support unsized types, and hence cannot use `allocate`.
let (size, align) = self
.size_and_align_of(meta, local_layout)?
@ -1008,7 +1009,7 @@ where
}
// Now we can call `access_mut` again, asserting it goes well,
// and actually overwrite things.
*self.stack[frame].locals[local].access_mut().unwrap().unwrap() =
*self.stack_mut()[frame].locals[local].access_mut().unwrap().unwrap() =
LocalValue::Live(Operand::Indirect(mplace));
(mplace, Some(size))
}

View file

@ -29,7 +29,7 @@ fn binop_right_homogeneous(op: mir::BinOp) -> bool {
}
}
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
pub fn run(&mut self) -> InterpResult<'tcx> {
while self.step()? {}
Ok(())
@ -42,7 +42,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// This is marked `#inline(always)` to work around adverserial codegen when `opt-level = 3`
#[inline(always)]
pub fn step(&mut self) -> InterpResult<'tcx, bool> {
if self.stack.is_empty() {
if self.stack().is_empty() {
return Ok(false);
}
@ -126,7 +126,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
LlvmInlineAsm { .. } => throw_unsup_format!("inline assembly is not supported"),
}
self.stack[frame_idx].stmt += 1;
self.stack_mut()[frame_idx].stmt += 1;
Ok(())
}
@ -278,7 +278,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.set_span(terminator.source_info.span);
self.eval_terminator(terminator)?;
if !self.stack.is_empty() {
if !self.stack().is_empty() {
if let Some(block) = self.frame().block {
info!("// executing {:?}", block);
}

View file

@ -11,7 +11,7 @@ use super::{
FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, PlaceTy, StackPopCleanup,
};
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
pub(super) fn eval_terminator(
&mut self,
terminator: &mir::Terminator<'tcx>,
@ -372,7 +372,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
};
match res {
Err(err) => {
self.stack.pop();
self.stack_mut().pop();
Err(err)
}
Ok(()) => Ok(()),

View file

@ -6,7 +6,7 @@ use rustc_target::abi::{Align, HasDataLayout, LayoutOf, Size};
use super::{FnVal, InterpCx, Machine, MemoryKind};
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// Creates a dynamic vtable for the given type and vtable origin. This is used only for
/// objects.
///

View file

@ -177,7 +177,7 @@ struct ValidityVisitor<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> {
ecx: &'rt InterpCx<'mir, 'tcx, M>,
}
impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, 'tcx, M> {
impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, 'tcx, M> {
fn aggregate_field_path_elem(&mut self, layout: TyAndLayout<'tcx>, field: usize) -> PathElem {
// First, check if we are projecting to a variant.
match layout.variants {
@ -604,7 +604,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, 'tcx, M
}
}
impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
for ValidityVisitor<'rt, 'mir, 'tcx, M>
{
type V = OpTy<'tcx, M::PointerTag>;
@ -806,7 +806,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
}
}
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
fn validate_operand_internal(
&self,
op: OpTy<'tcx, M::PointerTag>,

View file

@ -35,7 +35,7 @@ pub trait Value<'mir, 'tcx, M: Machine<'mir, 'tcx>>: Copy {
// Operands and memory-places are both values.
// Places in general are not due to `place_field` having to do `force_allocation`.
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Value<'mir, 'tcx, M> for OpTy<'tcx, M::PointerTag> {
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> Value<'mir, 'tcx, M> for OpTy<'tcx, M::PointerTag> {
#[inline(always)]
fn layout(&self) -> TyAndLayout<'tcx> {
self.layout
@ -73,7 +73,9 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Value<'mir, 'tcx, M> for OpTy<'tcx, M::
}
}
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Value<'mir, 'tcx, M> for MPlaceTy<'tcx, M::PointerTag> {
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> Value<'mir, 'tcx, M>
for MPlaceTy<'tcx, M::PointerTag>
{
#[inline(always)]
fn layout(&self) -> TyAndLayout<'tcx> {
self.layout

View file

@ -158,9 +158,18 @@ impl<'tcx> MirPass<'tcx> for ConstProp {
}
}
struct ConstPropMachine;
struct ConstPropMachine<'mir, 'tcx> {
/// The virtual call stack.
stack: Vec<Frame<'mir, 'tcx, (), ()>>,
}
impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine {
impl<'mir, 'tcx> ConstPropMachine<'mir, 'tcx> {
fn new() -> Self {
Self { stack: Vec::new() }
}
}
impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx> {
type MemoryKind = !;
type PointerTag = ();
type ExtraFnVal = !;
@ -178,6 +187,20 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine {
false
}
#[inline(always)]
fn stack(
ecx: &'a InterpCx<'mir, 'tcx, Self>,
) -> &'a [Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>] {
&ecx.machine.stack
}
#[inline(always)]
fn stack_mut(
ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
) -> &'a mut Vec<Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>> {
&mut ecx.machine.stack
}
#[inline(always)]
fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool {
false
@ -300,7 +323,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine {
/// Finds optimization opportunities on the MIR.
struct ConstPropagator<'mir, 'tcx> {
ecx: InterpCx<'mir, 'tcx, ConstPropMachine>,
ecx: InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>,
tcx: TyCtxt<'tcx>,
can_const_prop: IndexVec<Local, ConstPropMode>,
param_env: ParamEnv<'tcx>,
@ -349,7 +372,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
let param_env = tcx.param_env(def_id).with_reveal_all();
let span = tcx.def_span(def_id);
let mut ecx = InterpCx::new(tcx.at(span), param_env, ConstPropMachine, ());
let mut ecx = InterpCx::new(tcx.at(span), param_env, ConstPropMachine::new(), ());
let can_const_prop = CanConstProp::check(body);
let ret = ecx