1
Fork 0

Auto merge of #61430 - matthewjasper:drop-on-into-panic, r=oli-obk

Make `into` schedule drop for the destination

closes #47949
This commit is contained in:
bors 2019-10-07 17:47:10 +00:00
commit f3c9cece7b
17 changed files with 607 additions and 272 deletions

View file

@ -1,18 +1,22 @@
use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder}; use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder};
use crate::build::ForGuard::OutsideGuard; use crate::build::ForGuard::OutsideGuard;
use crate::build::matches::ArmHasGuard; use crate::build::matches::ArmHasGuard;
use crate::build::scope::DropKind;
use crate::hair::*; use crate::hair::*;
use rustc::middle::region;
use rustc::mir::*; use rustc::mir::*;
use rustc::hir; use rustc::hir;
use syntax_pos::Span; use syntax_pos::Span;
impl<'a, 'tcx> Builder<'a, 'tcx> { impl<'a, 'tcx> Builder<'a, 'tcx> {
pub fn ast_block(&mut self, pub fn ast_block(
destination: &Place<'tcx>, &mut self,
block: BasicBlock, destination: &Place<'tcx>,
ast_block: &'tcx hir::Block, scope: Option<region::Scope>,
source_info: SourceInfo) block: BasicBlock,
-> BlockAnd<()> { ast_block: &'tcx hir::Block,
source_info: SourceInfo,
) -> BlockAnd<()> {
let Block { let Block {
region_scope, region_scope,
opt_destruction_scope, opt_destruction_scope,
@ -21,37 +25,61 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
expr, expr,
targeted_by_break, targeted_by_break,
safety_mode safety_mode
} = } = self.hir.mirror(ast_block);
self.hir.mirror(ast_block);
self.in_opt_scope(opt_destruction_scope.map(|de|(de, source_info)), move |this| { self.in_opt_scope(opt_destruction_scope.map(|de|(de, source_info)), move |this| {
this.in_scope((region_scope, source_info), LintLevel::Inherited, move |this| { this.in_scope((region_scope, source_info), LintLevel::Inherited, move |this| {
if targeted_by_break { if targeted_by_break {
// This is a `break`-able block // This is a `break`-able block
let exit_block = this.cfg.start_new_block(); let exit_block = this.cfg.start_new_block();
if let Some(scope) = scope {
// Breakable blocks assign to their destination on each
// `break`, as well as when they exit normally. So we
// can't schedule the drop in the last expression like
// normal blocks do.
let local = destination.as_local()
.expect("cannot schedule drop of non-Local place");
this.schedule_drop(span, scope, local, DropKind::Value);
}
let block_exit = this.in_breakable_scope( let block_exit = this.in_breakable_scope(
None, exit_block, destination.clone(), |this| { None, exit_block, destination.clone(), |this| {
this.ast_block_stmts(destination, block, span, stmts, expr, this.ast_block_stmts(
safety_mode) destination,
None,
block,
span,
stmts,
expr,
safety_mode,
)
}); });
this.cfg.terminate(unpack!(block_exit), source_info, this.cfg.terminate(unpack!(block_exit), source_info,
TerminatorKind::Goto { target: exit_block }); TerminatorKind::Goto { target: exit_block });
exit_block.unit() exit_block.unit()
} else { } else {
this.ast_block_stmts(destination, block, span, stmts, expr, this.ast_block_stmts(
safety_mode) destination,
scope,
block,
span,
stmts,
expr,
safety_mode,
)
} }
}) })
}) })
} }
fn ast_block_stmts(&mut self, fn ast_block_stmts(
destination: &Place<'tcx>, &mut self,
mut block: BasicBlock, destination: &Place<'tcx>,
span: Span, scope: Option<region::Scope>,
stmts: Vec<StmtRef<'tcx>>, mut block: BasicBlock,
expr: Option<ExprRef<'tcx>>, span: Span,
safety_mode: BlockSafety) stmts: Vec<StmtRef<'tcx>>,
-> BlockAnd<()> { expr: Option<ExprRef<'tcx>>,
safety_mode: BlockSafety,
) -> BlockAnd<()> {
let this = self; let this = self;
// This convoluted structure is to avoid using recursion as we walk down a list // This convoluted structure is to avoid using recursion as we walk down a list
@ -177,7 +205,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
this.block_context.currently_ignores_tail_results(); this.block_context.currently_ignores_tail_results();
this.block_context.push(BlockFrame::TailExpr { tail_result_is_ignored }); this.block_context.push(BlockFrame::TailExpr { tail_result_is_ignored });
unpack!(block = this.into(destination, block, expr)); unpack!(block = this.into(destination, scope, block, expr));
let popped = this.block_context.pop(); let popped = this.block_context.pop();
assert!(popped.map_or(false, |bf|bf.is_tail_expr())); assert!(popped.map_or(false, |bf|bf.is_tail_expr()));

View file

@ -128,7 +128,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
expr_span, expr_span,
scope, scope,
result, result,
expr.ty,
); );
} }
@ -137,11 +136,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
this.cfg this.cfg
.push_assign(block, source_info, &Place::from(result), box_); .push_assign(block, source_info, &Place::from(result), box_);
// initialize the box contents: // Initialize the box contents. No scope is needed since the
// `Box` is already scheduled to be dropped.
unpack!( unpack!(
block = this.into( block = this.into(
&Place::from(result).deref(), &Place::from(result).deref(),
block, value None,
block,
value
) )
); );
block.and(Rvalue::Use(Operand::Move(Place::from(result)))) block.and(Rvalue::Use(Operand::Move(Place::from(result))))
@ -569,7 +571,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
upvar_span, upvar_span,
temp_lifetime, temp_lifetime,
temp, temp,
upvar_ty,
); );
} }

View file

@ -103,24 +103,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
expr_span, expr_span,
temp_lifetime, temp_lifetime,
temp, temp,
expr_ty,
DropKind::Storage, DropKind::Storage,
); );
} }
} }
} }
unpack!(block = this.into(temp_place, block, expr)); unpack!(block = this.into(temp_place, temp_lifetime, block, expr));
if let Some(temp_lifetime) = temp_lifetime {
this.schedule_drop(
expr_span,
temp_lifetime,
temp,
expr_ty,
DropKind::Value,
);
}
block.and(temp) block.and(temp)
} }

View file

@ -2,7 +2,9 @@
use crate::build::expr::category::{Category, RvalueFunc}; use crate::build::expr::category::{Category, RvalueFunc};
use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder}; use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder};
use crate::build::scope::DropKind;
use crate::hair::*; use crate::hair::*;
use rustc::middle::region;
use rustc::mir::*; use rustc::mir::*;
use rustc::ty; use rustc::ty;
@ -11,15 +13,18 @@ use rustc_target::spec::abi::Abi;
impl<'a, 'tcx> Builder<'a, 'tcx> { impl<'a, 'tcx> Builder<'a, 'tcx> {
/// Compile `expr`, storing the result into `destination`, which /// Compile `expr`, storing the result into `destination`, which
/// is assumed to be uninitialized. /// is assumed to be uninitialized.
/// If a `drop_scope` is provided, `destination` is scheduled to be dropped
/// in `scope` once it has been initialized.
pub fn into_expr( pub fn into_expr(
&mut self, &mut self,
destination: &Place<'tcx>, destination: &Place<'tcx>,
scope: Option<region::Scope>,
mut block: BasicBlock, mut block: BasicBlock,
expr: Expr<'tcx>, expr: Expr<'tcx>,
) -> BlockAnd<()> { ) -> BlockAnd<()> {
debug!( debug!(
"into_expr(destination={:?}, block={:?}, expr={:?})", "into_expr(destination={:?}, scope={:?}, block={:?}, expr={:?})",
destination, block, expr destination, scope, block, expr
); );
// since we frequently have to reference `self` from within a // since we frequently have to reference `self` from within a
@ -35,6 +40,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
_ => false, _ => false,
}; };
let schedule_drop = move |this: &mut Self| {
if let Some(drop_scope) = scope {
let local = destination.as_local()
.expect("cannot schedule drop of non-Local place");
this.schedule_drop(expr_span, drop_scope, local, DropKind::Value);
}
};
if !expr_is_block_or_scope { if !expr_is_block_or_scope {
this.block_context.push(BlockFrame::SubExpr); this.block_context.push(BlockFrame::SubExpr);
} }
@ -47,14 +60,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
} => { } => {
let region_scope = (region_scope, source_info); let region_scope = (region_scope, source_info);
this.in_scope(region_scope, lint_level, |this| { this.in_scope(region_scope, lint_level, |this| {
this.into(destination, block, value) this.into(destination, scope, block, value)
}) })
} }
ExprKind::Block { body: ast_block } => { ExprKind::Block { body: ast_block } => {
this.ast_block(destination, block, ast_block, source_info) this.ast_block(destination, scope, block, ast_block, source_info)
} }
ExprKind::Match { scrutinee, arms } => { ExprKind::Match { scrutinee, arms } => {
this.match_expr(destination, expr_span, block, scrutinee, arms) this.match_expr(destination, scope, expr_span, block, scrutinee, arms)
} }
ExprKind::NeverToAny { source } => { ExprKind::NeverToAny { source } => {
let source = this.hir.mirror(source); let source = this.hir.mirror(source);
@ -67,6 +80,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// This is an optimization. If the expression was a call then we already have an // This is an optimization. If the expression was a call then we already have an
// unreachable block. Don't bother to terminate it and create a new one. // unreachable block. Don't bother to terminate it and create a new one.
schedule_drop(this);
if is_call { if is_call {
block.unit() block.unit()
} else { } else {
@ -164,6 +178,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
TerminatorKind::Goto { target: loop_block }, TerminatorKind::Goto { target: loop_block },
); );
// Loops assign to their destination on each `break`. Since we
// can't easily unschedule drops, we schedule the drop now.
schedule_drop(this);
this.in_breakable_scope( this.in_breakable_scope(
Some(loop_block), Some(loop_block),
exit_block, exit_block,
@ -185,7 +202,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// introduce a unit temporary as the destination for the loop body. // introduce a unit temporary as the destination for the loop body.
let tmp = this.get_unit_temp(); let tmp = this.get_unit_temp();
// Execute the body, branching back to the test. // Execute the body, branching back to the test.
let body_block_end = unpack!(this.into(&tmp, body_block, body)); // No scope is provided, since we've scheduled the drop above.
let body_block_end = unpack!(this.into(&tmp, None, body_block, body));
this.cfg.terminate( this.cfg.terminate(
body_block_end, body_block_end,
source_info, source_info,
@ -234,8 +252,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
is_block_tail: None, is_block_tail: None,
}); });
let ptr_temp = Place::from(ptr_temp); let ptr_temp = Place::from(ptr_temp);
let block = unpack!(this.into(&ptr_temp, block, ptr)); // No need for a scope, ptr_temp doesn't need drop
this.into(&ptr_temp.deref(), block, val) let block = unpack!(this.into(&ptr_temp, None, block, ptr));
// Maybe we should provide a scope here so that
// `move_val_init` wouldn't leak on panic even with an
// arbitrary `val` expression, but `schedule_drop`,
// borrowck and drop elaboration all prevent us from
// dropping `ptr_temp.deref()`.
this.into(&ptr_temp.deref(), None, block, val)
} else { } else {
let args: Vec<_> = args let args: Vec<_> = args
.into_iter() .into_iter()
@ -265,11 +289,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
from_hir_call, from_hir_call,
}, },
); );
schedule_drop(this);
success.unit() success.unit()
} }
} }
ExprKind::Use { source } => { ExprKind::Use { source } => {
this.into(destination, block, source) this.into(destination, scope, block, source)
} }
// These cases don't actually need a destination // These cases don't actually need a destination
@ -296,6 +321,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place)); let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place));
this.cfg this.cfg
.push_assign(block, source_info, destination, rvalue); .push_assign(block, source_info, destination, rvalue);
schedule_drop(this);
block.unit() block.unit()
} }
ExprKind::Index { .. } | ExprKind::Deref { .. } | ExprKind::Field { .. } => { ExprKind::Index { .. } | ExprKind::Deref { .. } | ExprKind::Field { .. } => {
@ -315,6 +341,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place)); let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place));
this.cfg this.cfg
.push_assign(block, source_info, destination, rvalue); .push_assign(block, source_info, destination, rvalue);
schedule_drop(this);
block.unit() block.unit()
} }
@ -346,6 +373,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let rvalue = unpack!(block = this.as_local_rvalue(block, expr)); let rvalue = unpack!(block = this.as_local_rvalue(block, expr));
this.cfg.push_assign(block, source_info, destination, rvalue); this.cfg.push_assign(block, source_info, destination, rvalue);
schedule_drop(this);
block.unit() block.unit()
} }
}; };

View file

@ -6,6 +6,7 @@
use crate::build::{BlockAnd, Builder}; use crate::build::{BlockAnd, Builder};
use crate::hair::*; use crate::hair::*;
use rustc::middle::region;
use rustc::mir::*; use rustc::mir::*;
pub(in crate::build) trait EvalInto<'tcx> { pub(in crate::build) trait EvalInto<'tcx> {
@ -13,19 +14,23 @@ pub(in crate::build) trait EvalInto<'tcx> {
self, self,
builder: &mut Builder<'_, 'tcx>, builder: &mut Builder<'_, 'tcx>,
destination: &Place<'tcx>, destination: &Place<'tcx>,
scope: Option<region::Scope>,
block: BasicBlock, block: BasicBlock,
) -> BlockAnd<()>; ) -> BlockAnd<()>;
} }
impl<'a, 'tcx> Builder<'a, 'tcx> { impl<'a, 'tcx> Builder<'a, 'tcx> {
pub fn into<E>(&mut self, pub fn into<E>(
destination: &Place<'tcx>, &mut self,
block: BasicBlock, destination: &Place<'tcx>,
expr: E) scope: Option<region::Scope>,
-> BlockAnd<()> block: BasicBlock,
where E: EvalInto<'tcx> expr: E,
) -> BlockAnd<()>
where
E: EvalInto<'tcx>,
{ {
expr.eval_into(self, destination, block) expr.eval_into(self, destination, scope, block)
} }
} }
@ -34,10 +39,11 @@ impl<'tcx> EvalInto<'tcx> for ExprRef<'tcx> {
self, self,
builder: &mut Builder<'_, 'tcx>, builder: &mut Builder<'_, 'tcx>,
destination: &Place<'tcx>, destination: &Place<'tcx>,
scope: Option<region::Scope>,
block: BasicBlock, block: BasicBlock,
) -> BlockAnd<()> { ) -> BlockAnd<()> {
let expr = builder.hir.mirror(self); let expr = builder.hir.mirror(self);
builder.into_expr(destination, block, expr) builder.into_expr(destination, scope, block, expr)
} }
} }
@ -46,8 +52,9 @@ impl<'tcx> EvalInto<'tcx> for Expr<'tcx> {
self, self,
builder: &mut Builder<'_, 'tcx>, builder: &mut Builder<'_, 'tcx>,
destination: &Place<'tcx>, destination: &Place<'tcx>,
scope: Option<region::Scope>,
block: BasicBlock, block: BasicBlock,
) -> BlockAnd<()> { ) -> BlockAnd<()> {
builder.into_expr(destination, block, self) builder.into_expr(destination, scope, block, self)
} }
} }

View file

@ -102,6 +102,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
pub fn match_expr( pub fn match_expr(
&mut self, &mut self,
destination: &Place<'tcx>, destination: &Place<'tcx>,
destination_scope: Option<region::Scope>,
span: Span, span: Span,
mut block: BasicBlock, mut block: BasicBlock,
scrutinee: ExprRef<'tcx>, scrutinee: ExprRef<'tcx>,
@ -228,6 +229,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}; };
// Step 5. Create everything else: the guards and the arms. // Step 5. Create everything else: the guards and the arms.
if let Some(scope) = destination_scope {
// `match` assigns to its destination in each arm. Since we can't
// easily unschedule drops, we schedule the drop now.
let local = destination.as_local()
.expect("cannot schedule drop of non-Local place");
self.schedule_drop(span, scope, local, DropKind::Value);
}
let match_scope = self.scopes.topmost(); let match_scope = self.scopes.topmost();
let arm_end_blocks: Vec<_> = arm_candidates.into_iter().map(|(arm, mut candidates)| { let arm_end_blocks: Vec<_> = arm_candidates.into_iter().map(|(arm, mut candidates)| {
@ -275,7 +284,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
this.source_scope = source_scope; this.source_scope = source_scope;
} }
this.into(destination, arm_block, body) // No scope is provided, since we've scheduled the drop above.
this.into(destination, None, arm_block, body)
}) })
}).collect(); }).collect();
@ -311,8 +321,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
} => { } => {
let place = let place =
self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard); self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard);
unpack!(block = self.into(&place, block, initializer)); let region_scope = self.hir.region_scope_tree.var_scope(var.local_id);
unpack!(block = self.into(&place, Some(region_scope), block, initializer));
// Inject a fake read, see comments on `FakeReadCause::ForLet`. // Inject a fake read, see comments on `FakeReadCause::ForLet`.
let source_info = self.source_info(irrefutable_pat.span); let source_info = self.source_info(irrefutable_pat.span);
@ -324,7 +335,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}, },
); );
self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard);
block.unit() block.unit()
} }
@ -352,9 +362,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
user_ty_span, user_ty_span,
}, },
} => { } => {
let region_scope = self.hir.region_scope_tree.var_scope(var.local_id);
let place = let place =
self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard); self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard);
unpack!(block = self.into(&place, block, initializer)); unpack!(block = self.into(&place, Some(region_scope), block, initializer));
// Inject a fake read, see comments on `FakeReadCause::ForLet`. // Inject a fake read, see comments on `FakeReadCause::ForLet`.
let pattern_source_info = self.source_info(irrefutable_pat.span); let pattern_source_info = self.source_info(irrefutable_pat.span);
@ -400,7 +411,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}, },
); );
self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard);
block.unit() block.unit()
} }
@ -535,21 +545,18 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
kind: StatementKind::StorageLive(local_id), kind: StatementKind::StorageLive(local_id),
}, },
); );
let var_ty = self.local_decls[local_id].ty;
let region_scope = self.hir.region_scope_tree.var_scope(var.local_id); let region_scope = self.hir.region_scope_tree.var_scope(var.local_id);
self.schedule_drop(span, region_scope, local_id, var_ty, DropKind::Storage); self.schedule_drop(span, region_scope, local_id, DropKind::Storage);
Place::from(local_id) Place::from(local_id)
} }
pub fn schedule_drop_for_binding(&mut self, var: HirId, span: Span, for_guard: ForGuard) { pub fn schedule_drop_for_binding(&mut self, var: HirId, span: Span, for_guard: ForGuard) {
let local_id = self.var_local_id(var, for_guard); let local_id = self.var_local_id(var, for_guard);
let var_ty = self.local_decls[local_id].ty;
let region_scope = self.hir.region_scope_tree.var_scope(var.local_id); let region_scope = self.hir.region_scope_tree.var_scope(var.local_id);
self.schedule_drop( self.schedule_drop(
span, span,
region_scope, region_scope,
local_id, local_id,
var_ty,
DropKind::Value, DropKind::Value,
); );
} }

View file

@ -616,6 +616,7 @@ where
let source_info = builder.source_info(span); let source_info = builder.source_info(span);
let call_site_s = (call_site_scope, source_info); let call_site_s = (call_site_scope, source_info);
unpack!(block = builder.in_scope(call_site_s, LintLevel::Inherited, |builder| { unpack!(block = builder.in_scope(call_site_s, LintLevel::Inherited, |builder| {
builder.schedule_drop(span, call_site_scope, RETURN_PLACE, DropKind::Value);
if should_abort_on_panic(tcx, fn_def_id, abi) { if should_abort_on_panic(tcx, fn_def_id, abi) {
builder.schedule_abort(); builder.schedule_abort();
} }
@ -646,6 +647,7 @@ where
builder.cfg.terminate(unreachable_block, source_info, builder.cfg.terminate(unreachable_block, source_info,
TerminatorKind::Unreachable); TerminatorKind::Unreachable);
} }
builder.unschedule_return_place_drop();
return_block.unit() return_block.unit()
})); }));
assert_eq!(block, builder.return_block()); assert_eq!(block, builder.return_block());
@ -687,7 +689,9 @@ fn construct_const<'a, 'tcx>(
let mut block = START_BLOCK; let mut block = START_BLOCK;
let ast_expr = &tcx.hir().body(body_id).value; let ast_expr = &tcx.hir().body(body_id).value;
let expr = builder.hir.mirror(ast_expr); let expr = builder.hir.mirror(ast_expr);
unpack!(block = builder.into_expr(&Place::return_place(), block, expr)); // We don't provide a scope because we can't unwind in constants, so won't
// need to drop the return place.
unpack!(block = builder.into_expr(&Place::return_place(), None, block, expr));
let source_info = builder.source_info(span); let source_info = builder.source_info(span);
builder.cfg.terminate(block, source_info, TerminatorKind::Return); builder.cfg.terminate(block, source_info, TerminatorKind::Return);
@ -829,12 +833,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// Function arguments always get the first Local indices after the return place // Function arguments always get the first Local indices after the return place
let local = Local::new(index + 1); let local = Local::new(index + 1);
let place = Place::from(local); let place = Place::from(local);
let &ArgInfo(ty, opt_ty_info, arg_opt, ref self_binding) = arg_info; let &ArgInfo(_, opt_ty_info, arg_opt, ref self_binding) = arg_info;
// Make sure we drop (parts of) the argument even when not matched on. // Make sure we drop (parts of) the argument even when not matched on.
self.schedule_drop( self.schedule_drop(
arg_opt.as_ref().map_or(ast_body.span, |arg| arg.pat.span), arg_opt.as_ref().map_or(ast_body.span, |arg| arg.pat.span),
argument_scope, local, ty, DropKind::Value, argument_scope, local, DropKind::Value,
); );
if let Some(arg) = arg_opt { if let Some(arg) = arg_opt {
@ -888,7 +892,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
} }
let body = self.hir.mirror(ast_body); let body = self.hir.mirror(ast_body);
self.into(&Place::return_place(), block, body) // No scope is provided, since we've scheduled the drop of the return
// place.
self.into(&Place::return_place(), None, block, body)
} }
fn set_correct_source_scope_for_arg( fn set_correct_source_scope_for_arg(

View file

@ -85,7 +85,6 @@ should go to.
use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder, CFG}; use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder, CFG};
use crate::hair::{Expr, ExprRef, LintLevel}; use crate::hair::{Expr, ExprRef, LintLevel};
use rustc::middle::region; use rustc::middle::region;
use rustc::ty::Ty;
use rustc::hir; use rustc::hir;
use rustc::mir::*; use rustc::mir::*;
use syntax_pos::{DUMMY_SP, Span}; use syntax_pos::{DUMMY_SP, Span};
@ -173,11 +172,11 @@ struct BreakableScope<'tcx> {
region_scope: region::Scope, region_scope: region::Scope,
/// Where the body of the loop begins. `None` if block /// Where the body of the loop begins. `None` if block
continue_block: Option<BasicBlock>, continue_block: Option<BasicBlock>,
/// Block to branch into when the loop or block terminates (either by being `break`-en out /// Block to branch into when the loop or block terminates (either by being
/// from, or by having its condition to become false) /// `break`-en out from, or by having its condition to become false)
break_block: BasicBlock, break_block: BasicBlock,
/// The destination of the loop/block expression itself (i.e., where to put the result of a /// The destination of the loop/block expression itself (i.e., where to put
/// `break` expression) /// the result of a `break` expression)
break_destination: Place<'tcx>, break_destination: Place<'tcx>,
} }
@ -514,7 +513,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
if let Some(value) = value { if let Some(value) = value {
debug!("stmt_expr Break val block_context.push(SubExpr)"); debug!("stmt_expr Break val block_context.push(SubExpr)");
self.block_context.push(BlockFrame::SubExpr); self.block_context.push(BlockFrame::SubExpr);
unpack!(block = self.into(&destination, block, value)); unpack!(block = self.into(&destination, None, block, value));
self.block_context.pop(); self.block_context.pop();
} else { } else {
self.cfg.push_assign_unit(block, source_info, &destination) self.cfg.push_assign_unit(block, source_info, &destination)
@ -728,10 +727,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
span: Span, span: Span,
region_scope: region::Scope, region_scope: region::Scope,
local: Local, local: Local,
place_ty: Ty<'tcx>,
) { ) {
self.schedule_drop(span, region_scope, local, place_ty, DropKind::Storage); self.schedule_drop(span, region_scope, local, DropKind::Storage);
self.schedule_drop(span, region_scope, local, place_ty, DropKind::Value); self.schedule_drop(span, region_scope, local, DropKind::Value);
} }
/// Indicates that `place` should be dropped on exit from /// Indicates that `place` should be dropped on exit from
@ -744,12 +742,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
span: Span, span: Span,
region_scope: region::Scope, region_scope: region::Scope,
local: Local, local: Local,
place_ty: Ty<'tcx>,
drop_kind: DropKind, drop_kind: DropKind,
) { ) {
let needs_drop = self.hir.needs_drop(place_ty); let needs_drop = match drop_kind {
match drop_kind { DropKind::Value => {
DropKind::Value => if !needs_drop { return }, if !self.hir.needs_drop(self.local_decls[local].ty) { return }
true
},
DropKind::Storage => { DropKind::Storage => {
if local.index() <= self.arg_count { if local.index() <= self.arg_count {
span_bug!( span_bug!(
@ -758,8 +757,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
self.arg_count, self.arg_count,
) )
} }
false
} }
} };
for scope in self.scopes.iter_mut() { for scope in self.scopes.iter_mut() {
let this_scope = scope.region_scope == region_scope; let this_scope = scope.region_scope == region_scope;
@ -1070,6 +1070,18 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
success_block success_block
} }
/// Unschedules the drop of the return place.
///
/// If the return type of a function requires drop, then we schedule it
/// in the outermost scope so that it's dropped if there's a panic while
/// we drop any local variables. But we don't want to drop it if we
/// return normally.
crate fn unschedule_return_place_drop(&mut self) {
assert_eq!(self.scopes.len(), 1);
assert!(self.scopes.scopes[0].drops.len() <= 1);
self.scopes.scopes[0].drops.clear();
}
// `match` arm scopes // `match` arm scopes
// ================== // ==================
/// Unschedules any drops in the top scope. /// Unschedules any drops in the top scope.

View file

@ -109,15 +109,13 @@ impl<'mir, 'tcx> BitDenotation<'tcx> for RequiresStorage<'mir, 'tcx> {
assert_eq!(1, self.body.arg_count); assert_eq!(1, self.body.arg_count);
} }
fn statement_effect(&self, fn before_statement_effect(&self, sets: &mut GenKillSet<Self::Idx>, loc: Location) {
sets: &mut GenKillSet<Local>, // If we borrow or assign to a place then it needs storage for that
loc: Location) { // statement.
self.check_for_move(sets, loc);
self.check_for_borrow(sets, loc); self.check_for_borrow(sets, loc);
let stmt = &self.body[loc.block].statements[loc.statement_index]; let stmt = &self.body[loc.block].statements[loc.statement_index];
match stmt.kind { match stmt.kind {
StatementKind::StorageLive(l) => sets.gen(l),
StatementKind::StorageDead(l) => sets.kill(l), StatementKind::StorageDead(l) => sets.kill(l),
StatementKind::Assign(box(ref place, _)) StatementKind::Assign(box(ref place, _))
| StatementKind::SetDiscriminant { box ref place, .. } => { | StatementKind::SetDiscriminant { box ref place, .. } => {
@ -136,11 +134,35 @@ impl<'mir, 'tcx> BitDenotation<'tcx> for RequiresStorage<'mir, 'tcx> {
} }
} }
fn terminator_effect(&self, fn statement_effect(&self, sets: &mut GenKillSet<Local>, loc: Location) {
sets: &mut GenKillSet<Local>, // If we move from a place then only stops needing storage *after*
loc: Location) { // that statement.
self.check_for_move(sets, loc); self.check_for_move(sets, loc);
}
fn before_terminator_effect(&self, sets: &mut GenKillSet<Local>, loc: Location) {
self.check_for_borrow(sets, loc); self.check_for_borrow(sets, loc);
if let TerminatorKind::Call {
destination: Some((Place { base: PlaceBase::Local(local), .. }, _)),
..
} = self.body[loc.block].terminator().kind {
sets.gen(local);
}
}
fn terminator_effect(&self, sets: &mut GenKillSet<Local>, loc: Location) {
// For call terminators the destination requires storage for the call
// and after the call returns successfully, but not after a panic.
// Since `propagate_call_unwind` doesn't exist, we have to kill the
// destination here, and then gen it again in `propagate_call_return`.
if let TerminatorKind::Call {
destination: Some((Place { base: PlaceBase::Local(local), projection: box [] }, _)),
..
} = self.body[loc.block].terminator().kind {
sets.kill(local);
}
self.check_for_move(sets, loc);
} }
fn propagate_call_return( fn propagate_call_return(

View file

@ -508,10 +508,7 @@ fn locals_live_across_suspend_points(
storage_liveness_map.insert(block, storage_liveness.clone()); storage_liveness_map.insert(block, storage_liveness.clone());
requires_storage_cursor.seek(loc); requires_storage_cursor.seek(loc);
let mut storage_required = requires_storage_cursor.get().clone(); let storage_required = requires_storage_cursor.get().clone();
// Mark locals without storage statements as always requiring storage
storage_required.union(&ignored.0);
// Locals live are live at this point only if they are used across // Locals live are live at this point only if they are used across
// suspension points (the `liveness` variable) // suspension points (the `liveness` variable)

View file

@ -41,33 +41,36 @@ impl Drop for S {
// //
// bb2: { // bb2: {
// _1 = move _2; // _1 = move _2;
// drop(_2) -> bb4; // drop(_2) -> [return: bb5, unwind: bb4];
// } // }
// //
// bb3 (cleanup): { // bb3 (cleanup): {
// drop(_2) -> bb1; // drop(_2) -> bb1;
// } // }
// //
// bb4: { // bb4 (cleanup): {
// drop(_1) -> bb1;
// }
//
// bb5: {
// StorageDead(_2); // StorageDead(_2);
// StorageLive(_3); // StorageLive(_3);
// StorageLive(_4); // StorageLive(_4);
// _4 = move _1; // _4 = move _1;
// _3 = const std::mem::drop::<std::boxed::Box<S>>(move _4) -> [return: bb5, unwind: bb7]; // _3 = const std::mem::drop::<std::boxed::Box<S>>(move _4) -> [return: bb6, unwind: bb7];
// } // }
// //
// bb5: { // bb6: {
// StorageDead(_4); // StorageDead(_4);
// StorageDead(_3); // StorageDead(_3);
// _0 = (); // _0 = ();
// drop(_1) -> bb8; // drop(_1) -> bb8;
// } // }
// bb6 (cleanup): { //
// drop(_1) -> bb1;
// }
// bb7 (cleanup): { // bb7 (cleanup): {
// drop(_4) -> bb6; // drop(_4) -> bb4;
// } // }
//
// bb8: { // bb8: {
// StorageDead(_1); // StorageDead(_1);
// return; // return;

View file

@ -24,7 +24,7 @@ fn main() {
// StorageLive(_3); // StorageLive(_3);
// StorageLive(_4); // StorageLive(_4);
// _4 = std::option::Option::<u32>::None; // _4 = std::option::Option::<u32>::None;
// _3 = const <std::option::Option<u32> as std::ops::Try>::into_result(move _4) -> [return: bb2, unwind: bb3]; // _3 = const <std::option::Option<u32> as std::ops::Try>::into_result(move _4) -> [return: bb2, unwind: bb4];
// } // }
// bb1 (cleanup): { // bb1 (cleanup): {
// resume; // resume;
@ -32,60 +32,63 @@ fn main() {
// bb2: { // bb2: {
// StorageDead(_4); // StorageDead(_4);
// _5 = discriminant(_3); // _5 = discriminant(_3);
// switchInt(move _5) -> [0isize: bb10, 1isize: bb5, otherwise: bb4]; // switchInt(move _5) -> [0isize: bb11, 1isize: bb6, otherwise: bb5];
// } // }
// bb3 (cleanup): { // bb3 (cleanup): {
// drop(_2) -> bb1; // drop(_0) -> bb1;
// } // }
// bb4: { // bb4 (cleanup): {
// unreachable; // drop(_2) -> bb3;
// } // }
// bb5: { // bb5: {
// unreachable;
// }
// bb6: {
// StorageLive(_6); // StorageLive(_6);
// _6 = ((_3 as Err).0: std::option::NoneError); // _6 = ((_3 as Err).0: std::option::NoneError);
// StorageLive(_8); // StorageLive(_8);
// StorageLive(_9); // StorageLive(_9);
// _9 = _6; // _9 = _6;
// _8 = const <std::option::NoneError as std::convert::From<std::option::NoneError>>::from(move _9) -> [return: bb7, unwind: bb3]; // _8 = const <std::option::NoneError as std::convert::From<std::option::NoneError>>::from(move _9) -> [return: bb8, unwind: bb4];
// }
// bb6: {
// return;
// } // }
// bb7: { // bb7: {
// StorageDead(_9); // return;
// _0 = const <std::option::Option<std::boxed::Box<u32>> as std::ops::Try>::from_error(move _8) -> [return: bb8, unwind: bb3];
// } // }
// bb8: { // bb8: {
// StorageDead(_8); // StorageDead(_9);
// StorageDead(_6); // _0 = const <std::option::Option<std::boxed::Box<u32>> as std::ops::Try>::from_error(move _8) -> [return: bb9, unwind: bb4];
// drop(_2) -> bb9;
// } // }
// bb9: { // bb9: {
// StorageDead(_8);
// StorageDead(_6);
// drop(_2) -> [return: bb10, unwind: bb3];
// }
// bb10: {
// StorageDead(_2); // StorageDead(_2);
// StorageDead(_1); // StorageDead(_1);
// StorageDead(_3); // StorageDead(_3);
// goto -> bb6; // goto -> bb7;
// } // }
// bb10: { // bb11: {
// StorageLive(_10); // StorageLive(_10);
// _10 = ((_3 as Ok).0: u32); // _10 = ((_3 as Ok).0: u32);
// (*_2) = _10; // (*_2) = _10;
// StorageDead(_10); // StorageDead(_10);
// _1 = move _2; // _1 = move _2;
// drop(_2) -> [return: bb12, unwind: bb11]; // drop(_2) -> [return: bb13, unwind: bb12];
// } // }
// bb11 (cleanup): { // bb12 (cleanup): {
// drop(_1) -> bb1; // drop(_1) -> bb3;
// }
// bb12: {
// StorageDead(_2);
// _0 = std::option::Option::<std::boxed::Box<u32>>::Some(move _1,);
// drop(_1) -> bb13;
// } // }
// bb13: { // bb13: {
// StorageDead(_2);
// _0 = std::option::Option::<std::boxed::Box<u32>>::Some(move _1,);
// drop(_1) -> [return: bb14, unwind: bb3];
// }
// bb14: {
// StorageDead(_1); // StorageDead(_1);
// StorageDead(_3); // StorageDead(_3);
// goto -> bb6; // goto -> bb7;
// } // }
// } // }
// END rustc.test.ElaborateDrops.before.mir // END rustc.test.ElaborateDrops.before.mir

View file

@ -22,7 +22,8 @@ struct BigFut([u8; BIG_FUT_SIZE]);
impl BigFut { impl BigFut {
fn new() -> Self { fn new() -> Self {
BigFut([0; BIG_FUT_SIZE]) BigFut([0; BIG_FUT_SIZE])
} } }
}
impl Drop for BigFut { impl Drop for BigFut {
fn drop(&mut self) {} fn drop(&mut self) {}

View file

@ -0,0 +1,103 @@
// Test that we don't store uninitialized locals in futures from `async fn`.
//
// The exact sizes can change by a few bytes (we'd like to know when they do).
// What we don't want to see is the wrong multiple of 1024 (the size of `Big`)
// being reflected in the size.
// ignore-wasm32-bare (sizes don't match)
// run-pass
// edition:2018
#![allow(unused_variables, unused_assignments)]
use std::future::Future;
use std::pin::Pin;
use std::task::{Context, Poll};
const BIG_FUT_SIZE: usize = 1024;
struct Big([u8; BIG_FUT_SIZE]);
impl Big {
fn new() -> Self {
Big([0; BIG_FUT_SIZE])
}
}
impl Drop for Big {
fn drop(&mut self) {}
}
#[allow(dead_code)]
struct Joiner {
a: Option<Big>,
b: Option<Big>,
c: Option<Big>,
}
impl Future for Joiner {
type Output = ();
fn poll(self: Pin<&mut Self>, _ctx: &mut Context<'_>) -> Poll<Self::Output> {
Poll::Ready(())
}
}
fn noop() {}
async fn fut() {}
async fn single() {
let x;
fut().await;
x = Big::new();
}
async fn single_with_noop() {
let x;
fut().await;
noop();
x = Big::new();
noop();
}
async fn joined() {
let joiner;
let a = Big::new();
let b = Big::new();
let c = Big::new();
fut().await;
noop();
joiner = Joiner { a: Some(a), b: Some(b), c: Some(c) };
noop();
}
async fn joined_with_noop() {
let joiner;
let a = Big::new();
let b = Big::new();
let c = Big::new();
fut().await;
noop();
joiner = Joiner { a: Some(a), b: Some(b), c: Some(c) };
noop();
}
async fn join_retval() -> Joiner {
let a = Big::new();
let b = Big::new();
let c = Big::new();
fut().await;
noop();
Joiner { a: Some(a), b: Some(b), c: Some(c) }
}
fn main() {
assert_eq!(8, std::mem::size_of_val(&single()));
assert_eq!(12, std::mem::size_of_val(&single_with_noop()));
assert_eq!(3084, std::mem::size_of_val(&joined()));
assert_eq!(3084, std::mem::size_of_val(&joined_with_noop()));
assert_eq!(3084, std::mem::size_of_val(&join_retval()));
}

View file

@ -89,10 +89,10 @@ fn main() {
assert_eq!(8, std::mem::size_of_val(&await1_level1())); assert_eq!(8, std::mem::size_of_val(&await1_level1()));
assert_eq!(12, std::mem::size_of_val(&await2_level1())); assert_eq!(12, std::mem::size_of_val(&await2_level1()));
assert_eq!(12, std::mem::size_of_val(&await3_level1())); assert_eq!(12, std::mem::size_of_val(&await3_level1()));
assert_eq!(20, std::mem::size_of_val(&await3_level2())); assert_eq!(24, std::mem::size_of_val(&await3_level2()));
assert_eq!(28, std::mem::size_of_val(&await3_level3())); assert_eq!(36, std::mem::size_of_val(&await3_level3()));
assert_eq!(36, std::mem::size_of_val(&await3_level4())); assert_eq!(48, std::mem::size_of_val(&await3_level4()));
assert_eq!(44, std::mem::size_of_val(&await3_level5())); assert_eq!(60, std::mem::size_of_val(&await3_level5()));
assert_eq!(1, wait(base())); assert_eq!(1, wait(base()));
assert_eq!(1, wait(await1_level1())); assert_eq!(1, wait(await1_level1()));

View file

@ -7,7 +7,7 @@
// edition:2018 // edition:2018
// ignore-wasm32-bare compiled with panic=abort by default // ignore-wasm32-bare compiled with panic=abort by default
#![feature(slice_patterns)] #![feature(slice_patterns, arbitrary_self_types)]
#![allow(unused)] #![allow(unused)]
use std::{ use std::{
@ -45,6 +45,7 @@ impl<T: Unpin> Future for Defer<T> {
/// The `failing_op`-th operation will panic. /// The `failing_op`-th operation will panic.
struct Allocator { struct Allocator {
data: RefCell<Vec<bool>>, data: RefCell<Vec<bool>>,
name: &'static str,
failing_op: usize, failing_op: usize,
cur_ops: Cell<usize>, cur_ops: Cell<usize>,
} }
@ -56,23 +57,28 @@ impl Drop for Allocator {
fn drop(&mut self) { fn drop(&mut self) {
let data = self.data.borrow(); let data = self.data.borrow();
if data.iter().any(|d| *d) { if data.iter().any(|d| *d) {
panic!("missing free: {:?}", data); panic!("missing free in {:?}: {:?}", self.name, data);
} }
} }
} }
impl Allocator { impl Allocator {
fn new(failing_op: usize) -> Self { fn new(failing_op: usize, name: &'static str) -> Self {
Allocator { failing_op, cur_ops: Cell::new(0), data: RefCell::new(vec![]) } Allocator {
failing_op,
name,
cur_ops: Cell::new(0),
data: RefCell::new(vec![]),
}
} }
fn alloc(&self) -> impl Future<Output = Ptr<'_>> + '_ { fn alloc(self: &Rc<Allocator>) -> impl Future<Output = Ptr> + 'static {
self.fallible_operation(); self.fallible_operation();
let mut data = self.data.borrow_mut(); let mut data = self.data.borrow_mut();
let addr = data.len(); let addr = data.len();
data.push(true); data.push(true);
Defer { ready: false, value: Some(Ptr(addr, self)) } Defer { ready: false, value: Some(Ptr(addr, self.clone())) }
} }
fn fallible_operation(&self) { fn fallible_operation(&self) {
self.cur_ops.set(self.cur_ops.get() + 1); self.cur_ops.set(self.cur_ops.get() + 1);
@ -85,11 +91,11 @@ impl Allocator {
// Type that tracks whether it was dropped and can panic when it's created or // Type that tracks whether it was dropped and can panic when it's created or
// destroyed. // destroyed.
struct Ptr<'a>(usize, &'a Allocator); struct Ptr(usize, Rc<Allocator>);
impl<'a> Drop for Ptr<'a> { impl Drop for Ptr {
fn drop(&mut self) { fn drop(&mut self) {
match self.1.data.borrow_mut()[self.0] { match self.1.data.borrow_mut()[self.0] {
false => panic!("double free at index {:?}", self.0), false => panic!("double free in {:?} at index {:?}", self.1.name, self.0),
ref mut d => *d = false, ref mut d => *d = false,
} }
@ -113,7 +119,7 @@ async fn dynamic_drop(a: Rc<Allocator>, c: bool) {
}; };
} }
struct TwoPtrs<'a>(Ptr<'a>, Ptr<'a>); struct TwoPtrs(Ptr, Ptr);
async fn struct_dynamic_drop(a: Rc<Allocator>, c0: bool, c1: bool, c: bool) { async fn struct_dynamic_drop(a: Rc<Allocator>, c0: bool, c1: bool, c: bool) {
for i in 0..2 { for i in 0..2 {
let x; let x;
@ -228,21 +234,62 @@ async fn subslice_pattern_reassign(a: Rc<Allocator>) {
a.alloc().await; a.alloc().await;
} }
fn run_test<F, G>(cx: &mut Context<'_>, ref f: F) async fn panic_after_return(a: Rc<Allocator>, c: bool) -> (Ptr,) {
a.alloc().await;
let p = a.alloc().await;
if c {
a.alloc().await;
let q = a.alloc().await;
// We use a return type that isn't used anywhere else to make sure that
// the return place doesn't incorrectly end up in the generator state.
return (a.alloc().await,);
}
(a.alloc().await,)
}
async fn panic_after_init_by_loop(a: Rc<Allocator>) {
a.alloc().await;
let p = a.alloc().await;
let q = loop {
a.alloc().await;
let r = a.alloc().await;
break a.alloc().await;
};
}
async fn panic_after_init_by_match_with_bindings_and_guard(a: Rc<Allocator>, b: bool) {
a.alloc().await;
let p = a.alloc().await;
let q = match a.alloc().await {
ref _x if b => {
a.alloc().await;
let r = a.alloc().await;
a.alloc().await
}
_x => {
a.alloc().await;
let r = a.alloc().await;
a.alloc().await
},
};
}
fn run_test<F, G, O>(cx: &mut Context<'_>, ref f: F, name: &'static str)
where where
F: Fn(Rc<Allocator>) -> G, F: Fn(Rc<Allocator>) -> G,
G: Future<Output = ()>, G: Future<Output = O>,
{ {
for polls in 0.. { for polls in 0.. {
// Run without any panics to find which operations happen after the // Run without any panics to find which operations happen after the
// penultimate `poll`. // penultimate `poll`.
let first_alloc = Rc::new(Allocator::new(usize::MAX)); let first_alloc = Rc::new(Allocator::new(usize::MAX, name));
let mut fut = Box::pin(f(first_alloc.clone())); let mut fut = Box::pin(f(first_alloc.clone()));
let mut ops_before_last_poll = 0; let mut ops_before_last_poll = 0;
let mut completed = false; let mut completed = false;
for _ in 0..polls { for _ in 0..polls {
ops_before_last_poll = first_alloc.cur_ops.get(); ops_before_last_poll = first_alloc.cur_ops.get();
if let Poll::Ready(()) = fut.as_mut().poll(cx) { if let Poll::Ready(_) = fut.as_mut().poll(cx) {
completed = true; completed = true;
} }
} }
@ -251,7 +298,7 @@ where
// Start at `ops_before_last_poll` so that we will always be able to // Start at `ops_before_last_poll` so that we will always be able to
// `poll` the expected number of times. // `poll` the expected number of times.
for failing_op in ops_before_last_poll..first_alloc.cur_ops.get() { for failing_op in ops_before_last_poll..first_alloc.cur_ops.get() {
let alloc = Rc::new(Allocator::new(failing_op + 1)); let alloc = Rc::new(Allocator::new(failing_op + 1, name));
let f = &f; let f = &f;
let cx = &mut *cx; let cx = &mut *cx;
let result = panic::catch_unwind(panic::AssertUnwindSafe(move || { let result = panic::catch_unwind(panic::AssertUnwindSafe(move || {
@ -281,46 +328,56 @@ fn clone_waker(data: *const ()) -> RawWaker {
RawWaker::new(data, &RawWakerVTable::new(clone_waker, drop, drop, drop)) RawWaker::new(data, &RawWakerVTable::new(clone_waker, drop, drop, drop))
} }
macro_rules! run_test {
($ctxt:expr, $e:expr) => { run_test($ctxt, $e, stringify!($e)); };
}
fn main() { fn main() {
let waker = unsafe { Waker::from_raw(clone_waker(ptr::null())) }; let waker = unsafe { Waker::from_raw(clone_waker(ptr::null())) };
let context = &mut Context::from_waker(&waker); let context = &mut Context::from_waker(&waker);
run_test(context, |a| dynamic_init(a, false)); run_test!(context, |a| dynamic_init(a, false));
run_test(context, |a| dynamic_init(a, true)); run_test!(context, |a| dynamic_init(a, true));
run_test(context, |a| dynamic_drop(a, false)); run_test!(context, |a| dynamic_drop(a, false));
run_test(context, |a| dynamic_drop(a, true)); run_test!(context, |a| dynamic_drop(a, true));
run_test(context, |a| assignment(a, false, false)); run_test!(context, |a| assignment(a, false, false));
run_test(context, |a| assignment(a, false, true)); run_test!(context, |a| assignment(a, false, true));
run_test(context, |a| assignment(a, true, false)); run_test!(context, |a| assignment(a, true, false));
run_test(context, |a| assignment(a, true, true)); run_test!(context, |a| assignment(a, true, true));
run_test(context, |a| array_simple(a)); run_test!(context, |a| array_simple(a));
run_test(context, |a| vec_simple(a)); run_test!(context, |a| vec_simple(a));
run_test(context, |a| vec_unreachable(a)); run_test!(context, |a| vec_unreachable(a));
run_test(context, |a| struct_dynamic_drop(a, false, false, false)); run_test!(context, |a| struct_dynamic_drop(a, false, false, false));
run_test(context, |a| struct_dynamic_drop(a, false, false, true)); run_test!(context, |a| struct_dynamic_drop(a, false, false, true));
run_test(context, |a| struct_dynamic_drop(a, false, true, false)); run_test!(context, |a| struct_dynamic_drop(a, false, true, false));
run_test(context, |a| struct_dynamic_drop(a, false, true, true)); run_test!(context, |a| struct_dynamic_drop(a, false, true, true));
run_test(context, |a| struct_dynamic_drop(a, true, false, false)); run_test!(context, |a| struct_dynamic_drop(a, true, false, false));
run_test(context, |a| struct_dynamic_drop(a, true, false, true)); run_test!(context, |a| struct_dynamic_drop(a, true, false, true));
run_test(context, |a| struct_dynamic_drop(a, true, true, false)); run_test!(context, |a| struct_dynamic_drop(a, true, true, false));
run_test(context, |a| struct_dynamic_drop(a, true, true, true)); run_test!(context, |a| struct_dynamic_drop(a, true, true, true));
run_test(context, |a| field_assignment(a, false)); run_test!(context, |a| field_assignment(a, false));
run_test(context, |a| field_assignment(a, true)); run_test!(context, |a| field_assignment(a, true));
run_test(context, |a| mixed_drop_and_nondrop(a)); run_test!(context, |a| mixed_drop_and_nondrop(a));
run_test(context, |a| slice_pattern_one_of(a, 0)); run_test!(context, |a| slice_pattern_one_of(a, 0));
run_test(context, |a| slice_pattern_one_of(a, 1)); run_test!(context, |a| slice_pattern_one_of(a, 1));
run_test(context, |a| slice_pattern_one_of(a, 2)); run_test!(context, |a| slice_pattern_one_of(a, 2));
run_test(context, |a| slice_pattern_one_of(a, 3)); run_test!(context, |a| slice_pattern_one_of(a, 3));
run_test(context, |a| subslice_pattern_from_end_with_drop(a, true, true)); run_test!(context, |a| subslice_pattern_from_end_with_drop(a, true, true));
run_test(context, |a| subslice_pattern_from_end_with_drop(a, true, false)); run_test!(context, |a| subslice_pattern_from_end_with_drop(a, true, false));
run_test(context, |a| subslice_pattern_from_end_with_drop(a, false, true)); run_test!(context, |a| subslice_pattern_from_end_with_drop(a, false, true));
run_test(context, |a| subslice_pattern_from_end_with_drop(a, false, false)); run_test!(context, |a| subslice_pattern_from_end_with_drop(a, false, false));
run_test(context, |a| subslice_pattern_reassign(a)); run_test!(context, |a| subslice_pattern_reassign(a));
run_test!(context, |a| panic_after_return(a, false));
run_test!(context, |a| panic_after_return(a, true));
run_test!(context, |a| panic_after_init_by_loop(a));
run_test!(context, |a| panic_after_init_by_match_with_bindings_and_guard(a, false));
run_test!(context, |a| panic_after_init_by_match_with_bindings_and_guard(a, true));
} }

View file

@ -17,6 +17,7 @@ struct InjectedFailure;
struct Allocator { struct Allocator {
data: RefCell<Vec<bool>>, data: RefCell<Vec<bool>>,
name: &'static str,
failing_op: usize, failing_op: usize,
cur_ops: Cell<usize>, cur_ops: Cell<usize>,
} }
@ -28,17 +29,18 @@ impl Drop for Allocator {
fn drop(&mut self) { fn drop(&mut self) {
let data = self.data.borrow(); let data = self.data.borrow();
if data.iter().any(|d| *d) { if data.iter().any(|d| *d) {
panic!("missing free: {:?}", data); panic!("missing free in {:?}: {:?}", self.name, data);
} }
} }
} }
impl Allocator { impl Allocator {
fn new(failing_op: usize) -> Self { fn new(failing_op: usize, name: &'static str) -> Self {
Allocator { Allocator {
failing_op: failing_op, failing_op: failing_op,
cur_ops: Cell::new(0), cur_ops: Cell::new(0),
data: RefCell::new(vec![]) data: RefCell::new(vec![]),
name,
} }
} }
fn alloc(&self) -> Ptr<'_> { fn alloc(&self) -> Ptr<'_> {
@ -53,20 +55,6 @@ impl Allocator {
data.push(true); data.push(true);
Ptr(addr, self) Ptr(addr, self)
} }
// FIXME(#47949) Any use of this indicates a bug in rustc: we should never
// be leaking values in the cases here.
//
// Creates a `Ptr<'_>` and checks that the allocated value is leaked if the
// `failing_op` is in the list of exception.
fn alloc_leaked(&self, exceptions: Vec<usize>) -> Ptr<'_> {
let ptr = self.alloc();
if exceptions.iter().any(|operation| *operation == self.failing_op) {
let mut data = self.data.borrow_mut();
data[ptr.0] = false;
}
ptr
}
} }
struct Ptr<'a>(usize, &'a Allocator); struct Ptr<'a>(usize, &'a Allocator);
@ -74,7 +62,7 @@ impl<'a> Drop for Ptr<'a> {
fn drop(&mut self) { fn drop(&mut self) {
match self.1.data.borrow_mut()[self.0] { match self.1.data.borrow_mut()[self.0] {
false => { false => {
panic!("double free at index {:?}", self.0) panic!("double free in {:?} at index {:?}", self.1.name, self.0)
} }
ref mut d => *d = false ref mut d => *d = false
} }
@ -270,79 +258,148 @@ fn subslice_pattern_reassign(a: &Allocator) {
} }
fn panic_after_return(a: &Allocator) -> Ptr<'_> { fn panic_after_return(a: &Allocator) -> Ptr<'_> {
// Panic in the drop of `p` or `q` can leak
let exceptions = vec![8, 9];
a.alloc(); a.alloc();
let p = a.alloc(); let p = a.alloc();
{ {
a.alloc(); a.alloc();
let p = a.alloc(); let p = a.alloc();
// FIXME (#47949) We leak values when we panic in a destructor after a.alloc()
// evaluating an expression with `rustc_mir::build::Builder::into`.
a.alloc_leaked(exceptions)
} }
} }
fn panic_after_return_expr(a: &Allocator) -> Ptr<'_> { fn panic_after_return_expr(a: &Allocator) -> Ptr<'_> {
// Panic in the drop of `p` or `q` can leak
let exceptions = vec![8, 9];
a.alloc(); a.alloc();
let p = a.alloc(); let p = a.alloc();
{ {
a.alloc(); a.alloc();
let q = a.alloc(); let q = a.alloc();
// FIXME (#47949) return a.alloc();
return a.alloc_leaked(exceptions);
} }
} }
fn panic_after_init(a: &Allocator) { fn panic_after_init(a: &Allocator) {
// Panic in the drop of `r` can leak
let exceptions = vec![8];
a.alloc(); a.alloc();
let p = a.alloc(); let p = a.alloc();
let q = { let q = {
a.alloc(); a.alloc();
let r = a.alloc(); let r = a.alloc();
// FIXME (#47949) a.alloc()
a.alloc_leaked(exceptions)
}; };
} }
fn panic_after_init_temp(a: &Allocator) { fn panic_after_init_temp(a: &Allocator) {
// Panic in the drop of `r` can leak
let exceptions = vec![8];
a.alloc(); a.alloc();
let p = a.alloc(); let p = a.alloc();
{ {
a.alloc(); a.alloc();
let r = a.alloc(); let r = a.alloc();
// FIXME (#47949) a.alloc()
a.alloc_leaked(exceptions)
}; };
} }
fn panic_after_init_by_loop(a: &Allocator) { fn panic_after_init_by_loop(a: &Allocator) {
// Panic in the drop of `r` can leak
let exceptions = vec![8];
a.alloc(); a.alloc();
let p = a.alloc(); let p = a.alloc();
let q = loop { let q = loop {
a.alloc(); a.alloc();
let r = a.alloc(); let r = a.alloc();
// FIXME (#47949) break a.alloc();
break a.alloc_leaked(exceptions);
}; };
} }
fn run_test<F>(mut f: F) fn panic_after_init_by_match(a: &Allocator, b: bool) {
a.alloc();
let p = a.alloc();
loop {
let q = match b {
true => {
a.alloc();
let r = a.alloc();
a.alloc()
}
false => {
a.alloc();
let r = a.alloc();
break a.alloc();
}
};
return;
};
}
fn panic_after_init_by_match_with_guard(a: &Allocator, b: bool) {
a.alloc();
let p = a.alloc();
let q = match a.alloc() {
_ if b => {
a.alloc();
let r = a.alloc();
a.alloc()
}
_ => {
a.alloc();
let r = a.alloc();
a.alloc()
},
};
}
fn panic_after_init_by_match_with_bindings_and_guard(a: &Allocator, b: bool) {
a.alloc();
let p = a.alloc();
let q = match a.alloc() {
_x if b => {
a.alloc();
let r = a.alloc();
a.alloc()
}
_x => {
a.alloc();
let r = a.alloc();
a.alloc()
},
};
}
fn panic_after_init_by_match_with_ref_bindings_and_guard(a: &Allocator, b: bool) {
a.alloc();
let p = a.alloc();
let q = match a.alloc() {
ref _x if b => {
a.alloc();
let r = a.alloc();
a.alloc()
}
ref _x => {
a.alloc();
let r = a.alloc();
a.alloc()
},
};
}
fn panic_after_init_by_break_if(a: &Allocator, b: bool) {
a.alloc();
let p = a.alloc();
let q = loop {
let r = a.alloc();
break if b {
let s = a.alloc();
a.alloc()
} else {
a.alloc()
};
};
}
fn run_test<F>(mut f: F, name: &'static str)
where F: FnMut(&Allocator) where F: FnMut(&Allocator)
{ {
let first_alloc = Allocator::new(usize::MAX); let first_alloc = Allocator::new(usize::MAX, name);
f(&first_alloc); f(&first_alloc);
for failing_op in 1..first_alloc.cur_ops.get()+1 { for failing_op in 1..first_alloc.cur_ops.get()+1 {
let alloc = Allocator::new(failing_op); let alloc = Allocator::new(failing_op, name);
let alloc = &alloc; let alloc = &alloc;
let f = panic::AssertUnwindSafe(&mut f); let f = panic::AssertUnwindSafe(&mut f);
let result = panic::catch_unwind(move || { let result = panic::catch_unwind(move || {
@ -360,77 +417,91 @@ fn run_test<F>(mut f: F)
} }
} }
fn run_test_nopanic<F>(mut f: F) fn run_test_nopanic<F>(mut f: F, name: &'static str)
where F: FnMut(&Allocator) where F: FnMut(&Allocator)
{ {
let first_alloc = Allocator::new(usize::MAX); let first_alloc = Allocator::new(usize::MAX, name);
f(&first_alloc); f(&first_alloc);
} }
macro_rules! run_test {
($e:expr) => { run_test($e, stringify!($e)); }
}
fn main() { fn main() {
run_test(|a| dynamic_init(a, false)); run_test!(|a| dynamic_init(a, false));
run_test(|a| dynamic_init(a, true)); run_test!(|a| dynamic_init(a, true));
run_test(|a| dynamic_drop(a, false)); run_test!(|a| dynamic_drop(a, false));
run_test(|a| dynamic_drop(a, true)); run_test!(|a| dynamic_drop(a, true));
run_test(|a| assignment2(a, false, false)); run_test!(|a| assignment2(a, false, false));
run_test(|a| assignment2(a, false, true)); run_test!(|a| assignment2(a, false, true));
run_test(|a| assignment2(a, true, false)); run_test!(|a| assignment2(a, true, false));
run_test(|a| assignment2(a, true, true)); run_test!(|a| assignment2(a, true, true));
run_test(|a| assignment1(a, false)); run_test!(|a| assignment1(a, false));
run_test(|a| assignment1(a, true)); run_test!(|a| assignment1(a, true));
run_test(|a| array_simple(a)); run_test!(|a| array_simple(a));
run_test(|a| vec_simple(a)); run_test!(|a| vec_simple(a));
run_test(|a| vec_unreachable(a)); run_test!(|a| vec_unreachable(a));
run_test(|a| struct_dynamic_drop(a, false, false, false)); run_test!(|a| struct_dynamic_drop(a, false, false, false));
run_test(|a| struct_dynamic_drop(a, false, false, true)); run_test!(|a| struct_dynamic_drop(a, false, false, true));
run_test(|a| struct_dynamic_drop(a, false, true, false)); run_test!(|a| struct_dynamic_drop(a, false, true, false));
run_test(|a| struct_dynamic_drop(a, false, true, true)); run_test!(|a| struct_dynamic_drop(a, false, true, true));
run_test(|a| struct_dynamic_drop(a, true, false, false)); run_test!(|a| struct_dynamic_drop(a, true, false, false));
run_test(|a| struct_dynamic_drop(a, true, false, true)); run_test!(|a| struct_dynamic_drop(a, true, false, true));
run_test(|a| struct_dynamic_drop(a, true, true, false)); run_test!(|a| struct_dynamic_drop(a, true, true, false));
run_test(|a| struct_dynamic_drop(a, true, true, true)); run_test!(|a| struct_dynamic_drop(a, true, true, true));
run_test(|a| field_assignment(a, false)); run_test!(|a| field_assignment(a, false));
run_test(|a| field_assignment(a, true)); run_test!(|a| field_assignment(a, true));
run_test(|a| generator(a, 0)); run_test!(|a| generator(a, 0));
run_test(|a| generator(a, 1)); run_test!(|a| generator(a, 1));
run_test(|a| generator(a, 2)); run_test!(|a| generator(a, 2));
run_test(|a| generator(a, 3)); run_test!(|a| generator(a, 3));
run_test(|a| mixed_drop_and_nondrop(a)); run_test!(|a| mixed_drop_and_nondrop(a));
run_test(|a| slice_pattern_first(a)); run_test!(|a| slice_pattern_first(a));
run_test(|a| slice_pattern_middle(a)); run_test!(|a| slice_pattern_middle(a));
run_test(|a| slice_pattern_two(a)); run_test!(|a| slice_pattern_two(a));
run_test(|a| slice_pattern_last(a)); run_test!(|a| slice_pattern_last(a));
run_test(|a| slice_pattern_one_of(a, 0)); run_test!(|a| slice_pattern_one_of(a, 0));
run_test(|a| slice_pattern_one_of(a, 1)); run_test!(|a| slice_pattern_one_of(a, 1));
run_test(|a| slice_pattern_one_of(a, 2)); run_test!(|a| slice_pattern_one_of(a, 2));
run_test(|a| slice_pattern_one_of(a, 3)); run_test!(|a| slice_pattern_one_of(a, 3));
run_test(|a| subslice_pattern_from_end(a, true)); run_test!(|a| subslice_pattern_from_end(a, true));
run_test(|a| subslice_pattern_from_end(a, false)); run_test!(|a| subslice_pattern_from_end(a, false));
run_test(|a| subslice_pattern_from_end_with_drop(a, true, true)); run_test!(|a| subslice_pattern_from_end_with_drop(a, true, true));
run_test(|a| subslice_pattern_from_end_with_drop(a, true, false)); run_test!(|a| subslice_pattern_from_end_with_drop(a, true, false));
run_test(|a| subslice_pattern_from_end_with_drop(a, false, true)); run_test!(|a| subslice_pattern_from_end_with_drop(a, false, true));
run_test(|a| subslice_pattern_from_end_with_drop(a, false, false)); run_test!(|a| subslice_pattern_from_end_with_drop(a, false, false));
run_test(|a| slice_pattern_reassign(a)); run_test!(|a| slice_pattern_reassign(a));
run_test(|a| subslice_pattern_reassign(a)); run_test!(|a| subslice_pattern_reassign(a));
run_test(|a| { run_test!(|a| {
panic_after_return(a); panic_after_return(a);
}); });
run_test(|a| { run_test!(|a| {
panic_after_return_expr(a); panic_after_return_expr(a);
}); });
run_test(|a| panic_after_init(a)); run_test!(|a| panic_after_init(a));
run_test(|a| panic_after_init_temp(a)); run_test!(|a| panic_after_init_temp(a));
run_test(|a| panic_after_init_by_loop(a)); run_test!(|a| panic_after_init_by_loop(a));
run_test!(|a| panic_after_init_by_match(a, false));
run_test!(|a| panic_after_init_by_match(a, true));
run_test!(|a| panic_after_init_by_match_with_guard(a, false));
run_test!(|a| panic_after_init_by_match_with_guard(a, true));
run_test!(|a| panic_after_init_by_match_with_bindings_and_guard(a, false));
run_test!(|a| panic_after_init_by_match_with_bindings_and_guard(a, true));
run_test!(|a| panic_after_init_by_match_with_ref_bindings_and_guard(a, false));
run_test!(|a| panic_after_init_by_match_with_ref_bindings_and_guard(a, true));
run_test!(|a| panic_after_init_by_break_if(a, false));
run_test!(|a| panic_after_init_by_break_if(a, true));
run_test_nopanic(|a| union1(a)); run_test_nopanic(|a| union1(a), "|a| union1(a)");
} }