1
Fork 0

normalization change and rebase

This commit is contained in:
b-naber 2022-03-08 15:05:50 +01:00
parent 8a811a1213
commit 26fe550670
49 changed files with 291 additions and 345 deletions

View file

@ -164,7 +164,7 @@ pub enum StackPopCleanup {
}
/// State of a local variable including a memoized layout
#[derive(Clone, PartialEq, Eq, HashStable)]
#[derive(Clone, Debug, PartialEq, Eq, HashStable)]
pub struct LocalState<'tcx, Tag: Provenance = AllocId> {
pub value: LocalValue<Tag>,
/// Don't modify if `Some`, this is only used to prevent computing the layout twice
@ -714,6 +714,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.size_and_align_of(&mplace.meta, &mplace.layout)
}
#[instrument(skip(self, body, return_place, return_to_block), level = "debug")]
pub fn push_stack_frame(
&mut self,
instance: ty::Instance<'tcx>,
@ -721,6 +722,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
return_place: Option<&PlaceTy<'tcx, M::PointerTag>>,
return_to_block: StackPopCleanup,
) -> InterpResult<'tcx> {
debug!("body: {:#?}", body);
// first push a stack frame so we have access to the local substs
let pre_frame = Frame {
body,
@ -824,6 +826,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// `Drop` impls for any locals that have been initialized at this point.
/// The cleanup block ends with a special `Resume` terminator, which will
/// cause us to continue unwinding.
#[instrument(skip(self), level = "debug")]
pub(super) fn pop_stack_frame(&mut self, unwinding: bool) -> InterpResult<'tcx> {
info!(
"popping stack frame ({})",
@ -876,6 +879,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
return Ok(());
}
debug!("locals: {:#?}", frame.locals);
// Cleanup: deallocate all locals that are backed by an allocation.
for local in &frame.locals {
self.deallocate_local(local.value)?;
@ -935,6 +940,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Ok(())
}
#[instrument(skip(self), level = "debug")]
fn deallocate_local(&mut self, local: LocalValue<M::PointerTag>) -> InterpResult<'tcx> {
if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local {
// All locals have a backing allocation, even if the allocation is empty

View file

@ -359,6 +359,8 @@ pub fn intern_const_alloc_recursive<
// pointers, ... So we can't intern them according to their type rules
let mut todo: Vec<_> = leftover_allocations.iter().cloned().collect();
debug!(?todo);
debug!("dead_alloc_map: {:#?}", ecx.memory.dead_alloc_map);
while let Some(alloc_id) = todo.pop() {
if let Some((_, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) {
// We can't call the `intern_shallow` method here, as its logic is tailored to safe

View file

@ -275,6 +275,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
Ok(new_ptr)
}
#[instrument(skip(self), level = "debug")]
pub fn deallocate(
&mut self,
ptr: Pointer<Option<M::PointerTag>>,
@ -305,6 +306,8 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
.into());
};
debug!(?alloc);
if alloc.mutability == Mutability::Not {
throw_ub_format!("deallocating immutable allocation {}", alloc_id);
}