get rid of FinishStatic hack from stack clenaup; const_eval can do that itself
This commit is contained in:
parent
ef96a60a4d
commit
9cfc9f0765
4 changed files with 28 additions and 42 deletions
|
@ -157,14 +157,6 @@ fn eval_body_using_ecx<'a, 'mir, 'tcx>(
|
||||||
let layout = ecx.layout_of(mir.return_ty().subst(tcx, cid.instance.substs))?;
|
let layout = ecx.layout_of(mir.return_ty().subst(tcx, cid.instance.substs))?;
|
||||||
assert!(!layout.is_unsized());
|
assert!(!layout.is_unsized());
|
||||||
let ret = ecx.allocate(layout, MemoryKind::Stack)?;
|
let ret = ecx.allocate(layout, MemoryKind::Stack)?;
|
||||||
let internally_mutable = !layout.ty.is_freeze(tcx, param_env, mir.span);
|
|
||||||
let is_static = tcx.is_static(cid.instance.def_id());
|
|
||||||
let mutability = if is_static == Some(hir::Mutability::MutMutable) || internally_mutable {
|
|
||||||
Mutability::Mutable
|
|
||||||
} else {
|
|
||||||
Mutability::Immutable
|
|
||||||
};
|
|
||||||
let cleanup = StackPopCleanup::FinishStatic(mutability);
|
|
||||||
|
|
||||||
let name = ty::tls::with(|tcx| tcx.item_path_str(cid.instance.def_id()));
|
let name = ty::tls::with(|tcx| tcx.item_path_str(cid.instance.def_id()));
|
||||||
let prom = cid.promoted.map_or(String::new(), |p| format!("::promoted[{:?}]", p));
|
let prom = cid.promoted.map_or(String::new(), |p| format!("::promoted[{:?}]", p));
|
||||||
|
@ -175,12 +167,22 @@ fn eval_body_using_ecx<'a, 'mir, 'tcx>(
|
||||||
mir.span,
|
mir.span,
|
||||||
mir,
|
mir,
|
||||||
Place::Ptr(*ret),
|
Place::Ptr(*ret),
|
||||||
cleanup,
|
StackPopCleanup::None { cleanup: false },
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
// The main interpreter loop.
|
// The main interpreter loop.
|
||||||
ecx.run()?;
|
ecx.run()?;
|
||||||
|
|
||||||
|
// Intern the result
|
||||||
|
let internally_mutable = !layout.ty.is_freeze(tcx, param_env, mir.span);
|
||||||
|
let is_static = tcx.is_static(cid.instance.def_id());
|
||||||
|
let mutability = if is_static == Some(hir::Mutability::MutMutable) || internally_mutable {
|
||||||
|
Mutability::Mutable
|
||||||
|
} else {
|
||||||
|
Mutability::Immutable
|
||||||
|
};
|
||||||
|
ecx.memory.intern_static(ret.ptr.to_ptr()?.alloc_id, mutability)?;
|
||||||
|
|
||||||
debug!("eval_body_using_ecx done: {:?}", *ret);
|
debug!("eval_body_using_ecx done: {:?}", *ret);
|
||||||
Ok(ret.into())
|
Ok(ret.into())
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,7 +32,6 @@ use rustc::mir::interpret::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use syntax::source_map::{self, Span};
|
use syntax::source_map::{self, Span};
|
||||||
use syntax::ast::Mutability;
|
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
Value, Operand, MemPlace, MPlaceTy, Place, PlaceExtra,
|
Value, Operand, MemPlace, MPlaceTy, Place, PlaceExtra,
|
||||||
|
@ -159,15 +158,14 @@ impl<'mir, 'tcx: 'mir> Hash for Frame<'mir, 'tcx> {
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
|
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
|
||||||
pub enum StackPopCleanup {
|
pub enum StackPopCleanup {
|
||||||
/// The stackframe existed to compute the initial value of a static/constant.
|
|
||||||
/// Call `M::intern_static` on the return value and all allocations it references
|
|
||||||
/// when this is done. Must have a valid pointer as return place.
|
|
||||||
FinishStatic(Mutability),
|
|
||||||
/// Jump to the next block in the caller, or cause UB if None (that's a function
|
/// Jump to the next block in the caller, or cause UB if None (that's a function
|
||||||
/// that may never return).
|
/// that may never return).
|
||||||
Goto(Option<mir::BasicBlock>),
|
Goto(Option<mir::BasicBlock>),
|
||||||
/// Just do nohing: Used by Main and for the box_alloc hook in miri
|
/// Just do nohing: Used by Main and for the box_alloc hook in miri.
|
||||||
None,
|
/// `cleanup` says whether locals are deallocated. Static computation
|
||||||
|
/// wants them leaked to intern what they need (and just throw away
|
||||||
|
/// the entire `ecx` when it is done).
|
||||||
|
None { cleanup: bool },
|
||||||
}
|
}
|
||||||
|
|
||||||
// State of a local variable
|
// State of a local variable
|
||||||
|
@ -631,18 +629,15 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
|
||||||
"tried to pop a stack frame, but there were none",
|
"tried to pop a stack frame, but there were none",
|
||||||
);
|
);
|
||||||
match frame.return_to_block {
|
match frame.return_to_block {
|
||||||
StackPopCleanup::FinishStatic(mutability) => {
|
|
||||||
let mplace = frame.return_place.to_mem_place();
|
|
||||||
// to_ptr should be okay here; it is the responsibility of whoever pushed
|
|
||||||
// this frame to make sure that this works.
|
|
||||||
let ptr = mplace.ptr.to_ptr()?;
|
|
||||||
assert_eq!(ptr.offset.bytes(), 0);
|
|
||||||
self.memory.mark_static_initialized(ptr.alloc_id, mutability)?;
|
|
||||||
}
|
|
||||||
StackPopCleanup::Goto(block) => {
|
StackPopCleanup::Goto(block) => {
|
||||||
self.goto_block(block)?;
|
self.goto_block(block)?;
|
||||||
}
|
}
|
||||||
StackPopCleanup::None => { }
|
StackPopCleanup::None { cleanup } => {
|
||||||
|
if !cleanup {
|
||||||
|
// Leak the locals
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// deallocate all locals that are backed by an allocation
|
// deallocate all locals that are backed by an allocation
|
||||||
for local in frame.locals {
|
for local in frame.locals {
|
||||||
|
|
|
@ -570,22 +570,8 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||||
|
|
||||||
/// Reading and writing
|
/// Reading and writing
|
||||||
impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||||
/// mark an allocation pointed to by a static as static and initialized
|
|
||||||
fn mark_inner_allocation_initialized(
|
|
||||||
&mut self,
|
|
||||||
alloc: AllocId,
|
|
||||||
mutability: Mutability,
|
|
||||||
) -> EvalResult<'tcx> {
|
|
||||||
match self.alloc_map.contains_key(&alloc) {
|
|
||||||
// already interned
|
|
||||||
false => Ok(()),
|
|
||||||
// this still needs work
|
|
||||||
true => self.mark_static_initialized(alloc, mutability),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// mark an allocation as static and initialized, either mutable or not
|
/// mark an allocation as static and initialized, either mutable or not
|
||||||
pub fn mark_static_initialized(
|
pub fn intern_static(
|
||||||
&mut self,
|
&mut self,
|
||||||
alloc_id: AllocId,
|
alloc_id: AllocId,
|
||||||
mutability: Mutability,
|
mutability: Mutability,
|
||||||
|
@ -613,7 +599,10 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||||
// at references. So whenever we follow a reference, we should likely
|
// at references. So whenever we follow a reference, we should likely
|
||||||
// assume immutability -- and we should make sure that the compiler
|
// assume immutability -- and we should make sure that the compiler
|
||||||
// does not permit code that would break this!
|
// does not permit code that would break this!
|
||||||
self.mark_inner_allocation_initialized(alloc, mutability)?;
|
if self.alloc_map.contains_key(&alloc) {
|
||||||
|
// Not yet interned, so proceed recursively
|
||||||
|
self.intern_static(alloc, mutability)?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,7 +68,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.memory.mark_static_initialized(
|
self.memory.intern_static(
|
||||||
vtable.alloc_id,
|
vtable.alloc_id,
|
||||||
Mutability::Immutable,
|
Mutability::Immutable,
|
||||||
)?;
|
)?;
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue