[MIR] Add Storage{Live,Dead} statements to emit llvm.lifetime.{start,end}.

This commit is contained in:
Eduard Burtescu 2016-08-14 06:34:14 +03:00
parent 2b7ea14cc4
commit 02aec40dc9
21 changed files with 359 additions and 59 deletions

View file

@ -688,8 +688,17 @@ pub struct Statement<'tcx> {
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
pub enum StatementKind<'tcx> {
/// Write the RHS Rvalue to the LHS Lvalue.
Assign(Lvalue<'tcx>, Rvalue<'tcx>),
SetDiscriminant{ lvalue: Lvalue<'tcx>, variant_index: usize },
/// Write the discriminant for a variant to the enum Lvalue.
SetDiscriminant { lvalue: Lvalue<'tcx>, variant_index: usize },
/// Start a live range for the storage of the local.
StorageLive(Lvalue<'tcx>),
/// End the current live range for the storage of the local.
StorageDead(Lvalue<'tcx>),
}
impl<'tcx> Debug for Statement<'tcx> {
@ -697,6 +706,8 @@ impl<'tcx> Debug for Statement<'tcx> {
use self::StatementKind::*;
match self.kind {
Assign(ref lv, ref rv) => write!(fmt, "{:?} = {:?}", lv, rv),
StorageLive(ref lv) => write!(fmt, "StorageLive({:?})", lv),
StorageDead(ref lv) => write!(fmt, "StorageDead({:?})", lv),
SetDiscriminant{lvalue: ref lv, variant_index: index} => {
write!(fmt, "discriminant({:?}) = {:?}", lv, index)
}

View file

@ -326,6 +326,12 @@ macro_rules! make_mir_visitor {
StatementKind::SetDiscriminant{ ref $($mutability)* lvalue, .. } => {
self.visit_lvalue(lvalue, LvalueContext::Store);
}
StatementKind::StorageLive(ref $($mutability)* lvalue) => {
self.visit_lvalue(lvalue, LvalueContext::StorageLive);
}
StatementKind::StorageDead(ref $($mutability)* lvalue) => {
self.visit_lvalue(lvalue, LvalueContext::StorageDead);
}
}
}
@ -759,4 +765,8 @@ pub enum LvalueContext {
// Consumed as part of an operand
Consume,
// Starting and ending a storage live range
StorageLive,
StorageDead,
}

View file

@ -459,6 +459,8 @@ impl<'a, 'tcx> BitDenotation for MovingOutStatements<'a, 'tcx> {
sets.kill_set.add(&moi);
});
}
repr::StatementKind::StorageLive(_) |
repr::StatementKind::StorageDead(_) => {}
}
}

View file

@ -104,6 +104,8 @@ fn each_block<'a, 'tcx, O>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
repr::StatementKind::Assign(ref lvalue, ref rvalue) => {
(lvalue, rvalue)
}
repr::StatementKind::StorageLive(_) |
repr::StatementKind::StorageDead(_) => continue,
repr::StatementKind::SetDiscriminant{ .. } =>
span_bug!(stmt.source_info.span,
"sanity_check should run before Deaggregator inserts SetDiscriminant"),

View file

@ -616,6 +616,8 @@ fn gather_moves<'a, 'tcx>(mir: &Mir<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> MoveD
Rvalue::InlineAsm { .. } => {}
}
}
StatementKind::StorageLive(_) |
StatementKind::StorageDead(_) => {}
StatementKind::SetDiscriminant{ .. } => {
span_bug!(stmt.source_info.span,
"SetDiscriminant should not exist during borrowck");

View file

@ -378,6 +378,8 @@ fn drop_flag_effects_for_location<'a, 'tcx, F>(
move_data.rev_lookup.find(lvalue),
|moi| callback(moi, DropFlagState::Present))
}
repr::StatementKind::StorageLive(_) |
repr::StatementKind::StorageDead(_) => {}
},
None => {
debug!("drop_flag_effects: replace {:?}", block.terminator());

View file

@ -68,6 +68,8 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
// FIXME #30046 ^~~~
this.expr_into_pattern(block, pattern, init)
}));
} else {
this.storage_live_for_bindings(block, &pattern);
}
// Enter the visibility scope, after evaluating the initializer.

View file

@ -37,6 +37,14 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
let temp = this.temp(expr_ty.clone());
let temp_lifetime = expr.temp_lifetime;
let expr_span = expr.span;
let source_info = this.source_info(expr_span);
if temp_lifetime.is_some() {
this.cfg.push(block, Statement {
source_info: source_info,
kind: StatementKind::StorageLive(temp.clone())
});
}
// Careful here not to cause an infinite cycle. If we always
// called `into`, then for lvalues like `x.f`, it would
@ -49,7 +57,6 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
Category::Lvalue => {
let lvalue = unpack!(block = this.as_lvalue(block, expr));
let rvalue = Rvalue::Use(Operand::Consume(lvalue));
let source_info = this.source_info(expr_span);
this.cfg.push_assign(block, source_info, &temp, rvalue);
}
_ => {

View file

@ -122,6 +122,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
PatternKind::Binding { mode: BindingMode::ByValue,
var,
subpattern: None, .. } => {
self.storage_live_for_bindings(block, &irrefutable_pat);
let lvalue = Lvalue::Var(self.var_indices[&var]);
return self.into(&lvalue, block, initializer);
}
@ -206,6 +207,43 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
}
var_scope
}
/// Emit `StorageLive` for every binding in the pattern.
pub fn storage_live_for_bindings(&mut self,
block: BasicBlock,
pattern: &Pattern<'tcx>) {
match *pattern.kind {
PatternKind::Binding { var, ref subpattern, .. } => {
let lvalue = Lvalue::Var(self.var_indices[&var]);
let source_info = self.source_info(pattern.span);
self.cfg.push(block, Statement {
source_info: source_info,
kind: StatementKind::StorageLive(lvalue)
});
if let Some(subpattern) = subpattern.as_ref() {
self.storage_live_for_bindings(block, subpattern);
}
}
PatternKind::Array { ref prefix, ref slice, ref suffix } |
PatternKind::Slice { ref prefix, ref slice, ref suffix } => {
for subpattern in prefix.iter().chain(slice).chain(suffix) {
self.storage_live_for_bindings(block, subpattern);
}
}
PatternKind::Constant { .. } | PatternKind::Range { .. } | PatternKind::Wild => {
}
PatternKind::Deref { ref subpattern } => {
self.storage_live_for_bindings(block, subpattern);
}
PatternKind::Leaf { ref subpatterns } |
PatternKind::Variant { ref subpatterns, .. } => {
for subpattern in subpatterns {
self.storage_live_for_bindings(block, &subpattern.pattern);
}
}
}
}
}
/// List of blocks for each arm (and potentially other metadata in the
@ -665,6 +703,10 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
};
let source_info = self.source_info(binding.span);
self.cfg.push(block, Statement {
source_info: source_info,
kind: StatementKind::StorageLive(Lvalue::Var(var_index))
});
self.cfg.push_assign(block, source_info,
&Lvalue::Var(var_index), rvalue);
}

View file

@ -238,7 +238,8 @@ pub fn construct_const<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>,
let span = tcx.map.span(item_id);
let mut builder = Builder::new(hir, span);
let extent = ROOT_CODE_EXTENT;
let extent = tcx.region_maps.temporary_scope(ast_expr.id)
.unwrap_or(ROOT_CODE_EXTENT);
let mut block = START_BLOCK;
let _ = builder.in_scope(extent, block, |builder| {
let expr = builder.hir.mirror(ast_expr);

View file

@ -107,6 +107,17 @@ pub struct Scope<'tcx> {
/// `ScopeAuxiliary`, but kept here for convenience
extent: CodeExtent,
/// Whether there's anything to do for the cleanup path, that is,
/// when unwinding through this scope. This includes destructors,
/// but not StorageDead statements, which don't get emitted at all
/// for unwinding, for several reasons:
/// * clang doesn't emit llvm.lifetime.end for C++ unwinding
/// * LLVM's memory dependency analysis can't handle it atm
/// * pollutting the cleanup MIR with StorageDead creates
/// landing pads even though there's no actual destructors
/// * freeing up stack space has no effect during unwinding
needs_cleanup: bool,
/// set of lvalues to drop when exiting this scope. This starts
/// out empty but grows as variables are declared during the
/// building process. This is a stack, so we always drop from the
@ -139,11 +150,19 @@ struct DropData<'tcx> {
/// lvalue to drop
location: Lvalue<'tcx>,
/// The cached block for the cleanups-on-diverge path. This block
/// contains code to run the current drop and all the preceding
/// drops (i.e. those having lower index in Drops Scope drop
/// array)
cached_block: Option<BasicBlock>
/// Whether this is a full value Drop, or just a StorageDead.
kind: DropKind
}
enum DropKind {
Value {
/// The cached block for the cleanups-on-diverge path. This block
/// contains code to run the current drop and all the preceding
/// drops (i.e. those having lower index in Drops Scope drop
/// array)
cached_block: Option<BasicBlock>
},
Storage
}
struct FreeData<'tcx> {
@ -182,7 +201,9 @@ impl<'tcx> Scope<'tcx> {
fn invalidate_cache(&mut self) {
self.cached_exits = FnvHashMap();
for dropdata in &mut self.drops {
dropdata.cached_block = None;
if let DropKind::Value { ref mut cached_block } = dropdata.kind {
*cached_block = None;
}
}
if let Some(ref mut freedata) = self.free {
freedata.cached_block = None;
@ -194,8 +215,14 @@ impl<'tcx> Scope<'tcx> {
/// Precondition: the caches must be fully filled (i.e. diverge_cleanup is called) in order for
/// this method to work correctly.
fn cached_block(&self) -> Option<BasicBlock> {
if let Some(data) = self.drops.last() {
Some(data.cached_block.expect("drop cache is not filled"))
let mut drops = self.drops.iter().rev().filter_map(|data| {
match data.kind {
DropKind::Value { cached_block } => Some(cached_block),
DropKind::Storage => None
}
});
if let Some(cached_block) = drops.next() {
Some(cached_block.expect("drop cache is not filled"))
} else if let Some(ref data) = self.free {
Some(data.cached_block.expect("free cache is not filled"))
} else {
@ -265,6 +292,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
id: id,
visibility_scope: vis_scope,
extent: extent,
needs_cleanup: false,
drops: vec![],
free: None,
cached_exits: FnvHashMap()
@ -415,23 +443,37 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
extent: CodeExtent,
lvalue: &Lvalue<'tcx>,
lvalue_ty: Ty<'tcx>) {
if !self.hir.needs_drop(lvalue_ty) {
return
}
let needs_drop = self.hir.needs_drop(lvalue_ty);
let drop_kind = if needs_drop {
DropKind::Value { cached_block: None }
} else {
// Only temps and vars need their storage dead.
match *lvalue {
Lvalue::Temp(_) | Lvalue::Var(_) => DropKind::Storage,
_ => return
}
};
for scope in self.scopes.iter_mut().rev() {
if scope.extent == extent {
if let DropKind::Value { .. } = drop_kind {
scope.needs_cleanup = true;
}
// No need to invalidate any caches here. The just-scheduled drop will branch into
// the drop that comes before it in the vector.
scope.drops.push(DropData {
span: span,
location: lvalue.clone(),
cached_block: None
kind: drop_kind
});
return;
} else {
// We must invalidate all the cached_blocks leading up to the scope were
// looking for, because all of the blocks in the chain will become incorrect.
scope.invalidate_cache()
if let DropKind::Value { .. } = drop_kind {
scope.invalidate_cache()
}
}
}
span_bug!(span, "extent {:?} not in scope to drop {:?}", extent, lvalue);
@ -453,6 +495,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
// We also must invalidate the caches in the scope for which the free is scheduled
// because the drops must branch into the free we schedule here.
scope.invalidate_cache();
scope.needs_cleanup = true;
scope.free = Some(FreeData {
span: span,
value: value.clone(),
@ -478,10 +521,10 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
/// See module comment for more details. None indicates theres no
/// cleanup to do at this point.
pub fn diverge_cleanup(&mut self) -> Option<BasicBlock> {
if self.scopes.iter().all(|scope| scope.drops.is_empty() && scope.free.is_none()) {
if !self.scopes.iter().any(|scope| scope.needs_cleanup) {
return None;
}
assert!(!self.scopes.is_empty()); // or `all` above would be true
assert!(!self.scopes.is_empty()); // or `any` above would be false
let unit_temp = self.get_unit_temp();
let Builder { ref mut hir, ref mut cfg, ref mut scopes,
@ -510,7 +553,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
resumeblk
};
for scope in scopes.iter_mut().filter(|s| !s.drops.is_empty() || s.free.is_some()) {
for scope in scopes.iter_mut().filter(|s| s.needs_cleanup) {
target = build_diverge_scope(hir.tcx(), cfg, &unit_temp, scope, target);
}
Some(target)
@ -591,21 +634,44 @@ fn build_scope_drops<'tcx>(cfg: &mut CFG<'tcx>,
-> BlockAnd<()> {
let mut iter = scope.drops.iter().rev().peekable();
while let Some(drop_data) = iter.next() {
// Try to find the next block with its cached block for us to diverge into in case the
// drop panics.
let on_diverge = iter.peek().iter().flat_map(|dd| dd.cached_block.into_iter()).next();
// If theres no `cached_block`s within current scope, we must look for one in the
// enclosing scope.
let on_diverge = on_diverge.or_else(||{
earlier_scopes.iter().rev().flat_map(|s| s.cached_block()).next()
});
let next = cfg.start_new_block();
cfg.terminate(block, scope.source_info(drop_data.span), TerminatorKind::Drop {
location: drop_data.location.clone(),
target: next,
unwind: on_diverge
});
block = next;
let source_info = scope.source_info(drop_data.span);
if let DropKind::Value { .. } = drop_data.kind {
// Try to find the next block with its cached block
// for us to diverge into in case the drop panics.
let on_diverge = iter.peek().iter().filter_map(|dd| {
match dd.kind {
DropKind::Value { cached_block } => cached_block,
DropKind::Storage => None
}
}).next();
// If theres no `cached_block`s within current scope,
// we must look for one in the enclosing scope.
let on_diverge = on_diverge.or_else(||{
earlier_scopes.iter().rev().flat_map(|s| s.cached_block()).next()
});
let next = cfg.start_new_block();
cfg.terminate(block, source_info, TerminatorKind::Drop {
location: drop_data.location.clone(),
target: next,
unwind: on_diverge
});
block = next;
}
match drop_data.kind {
DropKind::Value { .. } |
DropKind::Storage => {
// Only temps and vars need their storage dead.
match drop_data.location {
Lvalue::Temp(_) | Lvalue::Var(_) => {}
_ => continue
}
cfg.push(block, Statement {
source_info: source_info,
kind: StatementKind::StorageDead(drop_data.location.clone())
});
}
}
}
block.unit()
}
@ -653,7 +719,13 @@ fn build_diverge_scope<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
// *forward* order, so that we generate drops[0] first (right to
// left in diagram above).
for drop_data in &mut scope.drops {
target = if let Some(cached_block) = drop_data.cached_block {
// Only full value drops are emitted in the diverging path,
// not StorageDead.
let cached_block = match drop_data.kind {
DropKind::Value { ref mut cached_block } => cached_block,
DropKind::Storage => continue
};
target = if let Some(cached_block) = *cached_block {
cached_block
} else {
let block = cfg.start_new_cleanup_block();
@ -663,7 +735,7 @@ fn build_diverge_scope<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
target: target,
unwind: None
});
drop_data.cached_block = Some(block);
*cached_block = Some(block);
block
};
}

View file

@ -50,8 +50,7 @@ impl<'tcx> MirPass<'tcx> for Deaggregator {
let orig_stmt = bb.statements.pop().unwrap();
let (lhs, rhs) = match orig_stmt.kind {
StatementKind::Assign(ref lhs, ref rhs) => (lhs, rhs),
StatementKind::SetDiscriminant{ .. } =>
span_bug!(src_info.span, "expected aggregate, not {:?}", orig_stmt.kind),
_ => span_bug!(src_info.span, "expected assign, not {:?}", orig_stmt),
};
let (agg_kind, operands) = match rhs {
&Rvalue::Aggregate(ref agg_kind, ref operands) => (agg_kind, operands),
@ -114,7 +113,7 @@ fn get_aggregate_statement_index<'a, 'tcx, 'b>(start: usize,
let ref statement = statements[i];
let rhs = match statement.kind {
StatementKind::Assign(_, ref rhs) => rhs,
StatementKind::SetDiscriminant{ .. } => continue,
_ => continue,
};
let (kind, operands) = match rhs {
&Rvalue::Aggregate(ref kind, ref operands) => (kind, operands),

View file

@ -87,8 +87,12 @@ impl<'tcx> Visitor<'tcx> for TempCollector {
if let Lvalue::Temp(index) = *lvalue {
// Ignore drops, if the temp gets promoted,
// then it's constant and thus drop is noop.
if let LvalueContext::Drop = context {
return;
// Storage live ranges are also irrelevant.
match context {
LvalueContext::Drop |
LvalueContext::StorageLive |
LvalueContext::StorageDead => return,
_ => {}
}
let temp = &mut self.temps[index];
@ -219,12 +223,12 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> {
let (mut rvalue, mut call) = (None, None);
let source_info = if stmt_idx < no_stmts {
let statement = &mut self.source[bb].statements[stmt_idx];
let mut rhs = match statement.kind {
let rhs = match statement.kind {
StatementKind::Assign(_, ref mut rhs) => rhs,
StatementKind::SetDiscriminant{ .. } =>
span_bug!(statement.source_info.span,
"cannot promote SetDiscriminant {:?}",
statement),
_ => {
span_bug!(statement.source_info.span, "{:?} is not an assignment",
statement);
}
};
if self.keep_original {
rvalue = Some(rhs.clone());
@ -311,11 +315,7 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> {
StatementKind::Assign(_, ref mut rvalue) => {
mem::replace(rvalue, Rvalue::Use(new_operand))
}
StatementKind::SetDiscriminant{ .. } => {
span_bug!(statement.source_info.span,
"cannot promote SetDiscriminant {:?}",
statement);
}
_ => bug!()
}
}
Candidate::ShuffleIndices(bb) => {
@ -354,8 +354,10 @@ pub fn promote_candidates<'a, 'tcx>(mir: &mut Mir<'tcx>,
let statement = &mir[bb].statements[stmt_idx];
let dest = match statement.kind {
StatementKind::Assign(ref dest, _) => dest,
StatementKind::SetDiscriminant{ .. } =>
panic!("cannot promote SetDiscriminant"),
_ => {
span_bug!(statement.source_info.span,
"expected assignment to promote");
}
};
if let Lvalue::Temp(index) = *dest {
if temps[index] == TempState::PromotedOut {
@ -408,7 +410,9 @@ pub fn promote_candidates<'a, 'tcx>(mir: &mut Mir<'tcx>,
for block in mir.basic_blocks_mut() {
block.statements.retain(|statement| {
match statement.kind {
StatementKind::Assign(Lvalue::Temp(index), _) => {
StatementKind::Assign(Lvalue::Temp(index), _) |
StatementKind::StorageLive(Lvalue::Temp(index)) |
StatementKind::StorageDead(Lvalue::Temp(index)) => {
!promoted(index)
}
_ => true

View file

@ -854,7 +854,17 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> {
fn visit_statement(&mut self, bb: BasicBlock, statement: &Statement<'tcx>) {
assert_eq!(self.location.block, bb);
self.nest(|this| this.super_statement(bb, statement));
self.nest(|this| {
this.visit_source_info(&statement.source_info);
match statement.kind {
StatementKind::Assign(ref lvalue, ref rvalue) => {
this.visit_assign(bb, lvalue, rvalue);
}
StatementKind::SetDiscriminant { .. } |
StatementKind::StorageLive(_) |
StatementKind::StorageDead(_) => {}
}
});
self.location.statement_index += 1;
}

View file

@ -382,6 +382,15 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
variant_index);
};
}
StatementKind::StorageLive(ref lv) |
StatementKind::StorageDead(ref lv) => {
match *lv {
Lvalue::Temp(_) | Lvalue::Var(_) => {}
_ => {
span_mirbug!(self, stmt, "bad lvalue: expected temp or var");
}
}
}
}
}

View file

@ -161,8 +161,11 @@ impl<'mir, 'bcx, 'tcx> Visitor<'tcx> for LocalAnalyzer<'mir, 'bcx, 'tcx> {
LvalueContext::Call => {
self.mark_assigned(index);
}
LvalueContext::Consume => {
}
LvalueContext::StorageLive |
LvalueContext::StorageDead |
LvalueContext::Consume => {}
LvalueContext::Store |
LvalueContext::Inspect |
LvalueContext::Borrow { .. } |
@ -170,6 +173,7 @@ impl<'mir, 'bcx, 'tcx> Visitor<'tcx> for LocalAnalyzer<'mir, 'bcx, 'tcx> {
LvalueContext::Projection => {
self.mark_as_lvalue(index);
}
LvalueContext::Drop => {
let ty = lvalue.ty(self.mir, self.bcx.tcx());
let ty = self.bcx.monomorphize(&ty.to_ty(self.bcx.tcx()));

View file

@ -285,6 +285,8 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> {
Err(err) => if failure.is_ok() { failure = Err(err); }
}
}
mir::StatementKind::StorageLive(_) |
mir::StatementKind::StorageDead(_) => {}
mir::StatementKind::SetDiscriminant{ .. } => {
span_bug!(span, "SetDiscriminant should not appear in constants?");
}

View file

@ -10,6 +10,7 @@
use rustc::mir::repr as mir;
use base;
use common::{self, BlockAndBuilder};
use super::MirContext;
@ -71,6 +72,25 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
);
bcx
}
mir::StatementKind::StorageLive(ref lvalue) => {
self.trans_storage_liveness(bcx, lvalue, base::Lifetime::Start)
}
mir::StatementKind::StorageDead(ref lvalue) => {
self.trans_storage_liveness(bcx, lvalue, base::Lifetime::End)
}
}
}
fn trans_storage_liveness(&self,
bcx: BlockAndBuilder<'bcx, 'tcx>,
lvalue: &mir::Lvalue<'tcx>,
intrinsic: base::Lifetime)
-> BlockAndBuilder<'bcx, 'tcx> {
if let Some(index) = self.mir.local_index(lvalue) {
if let LocalRef::Lvalue(tr_lval) = self.locals[index] {
intrinsic.call(&bcx, tr_lval.llval);
}
}
bcx
}
}

View file

@ -0,0 +1,54 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags: -O -C no-prepopulate-passes
#![crate_type = "lib"]
#![feature(rustc_attrs)]
// CHECK-LABEL: @test
#[no_mangle]
#[rustc_mir] // FIXME #27840 MIR has different codegen.
pub fn test() {
let a = 0;
&a; // keep variable in an alloca
// CHECK: [[S_a:%[0-9]+]] = bitcast i32* %a to i8*
// CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_a]])
{
let b = &Some(a);
&b; // keep variable in an alloca
// CHECK: [[S_b:%[0-9]+]] = bitcast %"2.std::option::Option<i32>"** %b to i8*
// CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_b]])
// CHECK: [[S_tmp2:%[0-9]+]] = bitcast %"2.std::option::Option<i32>"* %tmp2 to i8*
// CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_tmp2]])
// CHECK: [[E_tmp2:%[0-9]+]] = bitcast %"2.std::option::Option<i32>"* %tmp2 to i8*
// CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_tmp2]])
// CHECK: [[E_b:%[0-9]+]] = bitcast %"2.std::option::Option<i32>"** %b to i8*
// CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_b]])
}
let c = 1;
&c; // keep variable in an alloca
// CHECK: [[S_c:%[0-9]+]] = bitcast i32* %c to i8*
// CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_c]])
// CHECK: [[E_c:%[0-9]+]] = bitcast i32* %c to i8*
// CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_c]])
// CHECK: [[E_a:%[0-9]+]] = bitcast i32* %a to i8*
// CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_a]])
}

View file

@ -288,8 +288,8 @@ fn while_expr(mut x: u64, y: u64, z: u64) -> u64 {
}
fn loop_expr(mut x: u64, y: u64, z: u64) -> u64 {
loop { // #break
x += z;
loop {
x += z; // #break
if x + y > 1000 {
return x;

View file

@ -0,0 +1,45 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() {
let a = 0;
{
let b = &Some(a);
}
let c = 1;
}
// END RUST SOURCE
// START rustc.node4.PreTrans.after.mir
// bb0: {
// StorageLive(var0); // scope 0 at storage_ranges.rs:12:9: 12:10
// var0 = const 0i32; // scope 0 at storage_ranges.rs:12:13: 12:14
// StorageLive(var1); // scope 1 at storage_ranges.rs:14:13: 14:14
// StorageLive(tmp1); // scope 1 at storage_ranges.rs:14:18: 14:25
// StorageLive(tmp2); // scope 1 at storage_ranges.rs:14:23: 14:24
// tmp2 = var0; // scope 1 at storage_ranges.rs:14:23: 14:24
// tmp1 = std::prelude::v1::Some<i32>(tmp2,); // scope 1 at storage_ranges.rs:14:18: 14:25
// var1 = &tmp1; // scope 1 at storage_ranges.rs:14:17: 14:25
// StorageDead(tmp2); // scope 1 at storage_ranges.rs:14:23: 14:24
// tmp0 = (); // scope 2 at storage_ranges.rs:13:5: 15:6
// StorageDead(tmp1); // scope 1 at storage_ranges.rs:14:18: 14:25
// StorageDead(var1); // scope 1 at storage_ranges.rs:14:13: 14:14
// StorageLive(var2); // scope 1 at storage_ranges.rs:16:9: 16:10
// var2 = const 1i32; // scope 1 at storage_ranges.rs:16:13: 16:14
// return = (); // scope 3 at storage_ranges.rs:11:11: 17:2
// StorageDead(var2); // scope 1 at storage_ranges.rs:16:9: 16:10
// StorageDead(var0); // scope 0 at storage_ranges.rs:12:9: 12:10
// goto -> bb1; // scope 0 at storage_ranges.rs:11:1: 17:2
// }
//
// bb1: {
// return; // scope 0 at storage_ranges.rs:11:1: 17:2
// }
// END rustc.node4.PreTrans.after.mir