1
Fork 0

Issue #3511 - Rationalize temporary lifetimes.

Major changes:

- Define temporary scopes in a syntax-based way that basically defaults
  to the innermost statement or conditional block, except for in
  a `let` initializer, where we default to the innermost block. Rules
  are documented in the code, but not in the manual (yet).
  See new test run-pass/cleanup-value-scopes.rs for examples.
- Refactors Datum to better define cleanup roles.
- Refactor cleanup scopes to not be tied to basic blocks, permitting
  us to have a very large number of scopes (one per AST node).
- Introduce nascent documentation in trans/doc.rs covering datums and
  cleanup in a more comprehensive way.
This commit is contained in:
Niko Matsakis 2014-01-15 14:39:08 -05:00
commit 419ac4a1b8
64 changed files with 4826 additions and 3691 deletions

View file

@ -1698,6 +1698,7 @@ pub mod llvm {
pub fn LLVMDICompositeTypeSetTypeArray(CompositeType: ValueRef, TypeArray: ValueRef);
pub fn LLVMTypeToString(Type: TypeRef) -> *c_char;
pub fn LLVMValueToString(value_ref: ValueRef) -> *c_char;
pub fn LLVMIsAArgument(value_ref: ValueRef) -> ValueRef;
@ -1847,8 +1848,10 @@ impl TypeNames {
pub fn val_to_str(&self, val: ValueRef) -> ~str {
unsafe {
let ty = Type::from_ref(llvm::LLVMTypeOf(val));
self.type_to_str(ty)
let s = llvm::LLVMValueToString(val);
let ret = from_c_str(s);
free(s as *c_void);
ret
}
}
}

View file

@ -12,13 +12,14 @@
use c = metadata::common;
use cstore = metadata::cstore;
use driver::session::Session;
use e = metadata::encoder;
use metadata::decoder;
use e = metadata::encoder;
use middle::freevars::freevar_entry;
use middle::region;
use metadata::tydecode;
use metadata::tydecode::{DefIdSource, NominalType, TypeWithId, TypeParameter,
RegionParameter};
use metadata::tyencode;
use middle::freevars::freevar_entry;
use middle::typeck::{method_origin, method_map_entry};
use middle::{ty, typeck, moves};
use middle;
@ -146,6 +147,7 @@ pub fn decode_inlined_item(cdata: @cstore::crate_metadata,
debug!("< Decoded inlined fn: {}::{}",
ast_map::path_to_str(path, token::get_ident_interner()),
tcx.sess.str_of(ident));
region::resolve_inlined_item(tcx.sess, &tcx.region_maps, &ii);
decode_side_tables(xcx, ast_doc);
match ii {
ast::IIItem(i) => {

View file

@ -220,7 +220,7 @@ impl<'a> GuaranteeLifetimeContext<'a> {
// If inside of a match arm, expand the rooting to the entire
// match. See the detailed discussion in `check()` above.
let mut root_scope = match discr_scope {
let root_scope = match discr_scope {
None => root_scope,
Some(id) => {
if self.bccx.is_subscope_of(root_scope, id) {
@ -231,17 +231,6 @@ impl<'a> GuaranteeLifetimeContext<'a> {
}
};
// FIXME(#3511) grow to the nearest cleanup scope---this can
// cause observable errors if freezing!
if !self.bccx.tcx.region_maps.is_cleanup_scope(root_scope) {
debug!("{:?} is not a cleanup scope, adjusting", root_scope);
let cleanup_scope =
self.bccx.tcx.region_maps.cleanup_scope(root_scope);
root_scope = cleanup_scope;
}
// Add a record of what is required
let rm_key = root_map_key {id: cmt_deref.id, derefs: derefs};
let root_info = RootInfo {scope: root_scope};
@ -301,8 +290,8 @@ impl<'a> GuaranteeLifetimeContext<'a> {
// See the SCOPE(LV) function in doc.rs
match cmt.cat {
mc::cat_rvalue(cleanup_scope_id) => {
ty::ReScope(cleanup_scope_id)
mc::cat_rvalue(temp_scope) => {
temp_scope
}
mc::cat_copied_upvar(_) => {
ty::ReScope(self.item_scope_id)
@ -313,7 +302,7 @@ impl<'a> GuaranteeLifetimeContext<'a> {
mc::cat_local(local_id) |
mc::cat_arg(local_id) |
mc::cat_self(local_id) => {
self.bccx.tcx.region_maps.encl_region(local_id)
ty::ReScope(self.bccx.tcx.region_maps.var_scope(local_id))
}
mc::cat_deref(_, _, mc::unsafe_ptr(..)) => {
ty::ReStatic

View file

@ -662,8 +662,9 @@ impl<'a> GatherLoanCtxt<'a> {
//! with immutable `&` pointers, because borrows of such pointers
//! do not require restrictions and hence do not cause a loan.
let lexical_scope = self.bccx.tcx.region_maps.encl_scope(lp.node_id());
if self.bccx.tcx.region_maps.is_subscope_of(lexical_scope, loan_scope) {
let rm = &self.bccx.tcx.region_maps;
let lexical_scope = rm.var_scope(lp.node_id());
if rm.is_subscope_of(lexical_scope, loan_scope) {
lexical_scope
} else {
assert!(self.bccx.tcx.region_maps.is_subscope_of(loan_scope, lexical_scope));
@ -688,7 +689,7 @@ impl<'a> GatherLoanCtxt<'a> {
let arg_cmt = mc_ctxt.cat_rvalue(
arg.id,
arg.pat.span,
body.id, // Arguments live only as long as the fn body.
ty::ReScope(body.id), // Args live only as long as the fn body.
arg_ty);
self.gather_pat(arg_cmt, arg.pat, None);

View file

@ -471,7 +471,7 @@ impl MoveData {
for path in paths.get().iter() {
match *path.loan_path {
LpVar(id) => {
let kill_id = tcx.region_maps.encl_scope(id);
let kill_id = tcx.region_maps.var_scope(id);
let path = {
let path_map = self.path_map.borrow();
*path_map.get().get(&path.loan_path)
@ -490,7 +490,7 @@ impl MoveData {
var_assignments.get().iter().enumerate() {
match *self.path_loan_path(assignment.path) {
LpVar(id) => {
let kill_id = tcx.region_maps.encl_scope(id);
let kill_id = tcx.region_maps.var_scope(id);
dfcx_assign.add_kill(kill_id, assignment_index);
}
LpExtend(..) => {

View file

@ -60,7 +60,7 @@ use syntax::parse::token;
#[deriving(Eq)]
pub enum categorization {
cat_rvalue(ast::NodeId), // temporary val, argument is its scope
cat_rvalue(ty::Region), // temporary val, argument is its scope
cat_static_item,
cat_copied_upvar(CopiedUpvar), // upvar copied into @fn or ~fn env
cat_stack_upvar(cmt), // by ref upvar from ||
@ -585,21 +585,26 @@ impl mem_categorization_ctxt {
pub fn cat_rvalue_node<N:ast_node>(&self,
node: &N,
expr_ty: ty::t) -> cmt {
self.cat_rvalue(node.id(),
node.span(),
self.tcx.region_maps.cleanup_scope(node.id()),
expr_ty)
match self.tcx.region_maps.temporary_scope(node.id()) {
Some(scope) => {
self.cat_rvalue(node.id(), node.span(),
ty::ReScope(scope), expr_ty)
}
None => {
self.cat_rvalue(node.id(), node.span(), ty::ReStatic, expr_ty)
}
}
}
pub fn cat_rvalue(&self,
cmt_id: ast::NodeId,
span: Span,
cleanup_scope_id: ast::NodeId,
temp_scope: ty::Region,
expr_ty: ty::t) -> cmt {
@cmt_ {
id:cmt_id,
span:span,
cat:cat_rvalue(cleanup_scope_id),
cat:cat_rvalue(temp_scope),
mutbl:McDeclared,
ty:expr_ty
}

View file

@ -108,3 +108,14 @@ pub fn pat_contains_bindings(dm: resolve::DefMap, pat: &Pat) -> bool {
});
contains_bindings
}
pub fn simple_identifier<'a>(pat: &'a Pat) -> Option<&'a Path> {
match pat.node {
PatIdent(BindByValue(_), ref path, None) => {
Some(path)
}
_ => {
None
}
}
}

View file

@ -31,6 +31,7 @@ use syntax::codemap::Span;
use syntax::{ast, visit};
use syntax::visit::{Visitor, FnKind};
use syntax::ast::{Block, Item, FnDecl, NodeId, Arm, Pat, Stmt, Expr, Local};
use syntax::ast_util::{stmt_id};
/**
The region maps encode information about region relationships.
@ -46,30 +47,30 @@ The region maps encode information about region relationships.
- the free region map is populated during type check as we check
each function. See the function `relate_free_regions` for
more information.
- `cleanup_scopes` includes scopes where trans cleanups occur
- this is intended to reflect the current state of trans, not
necessarily how I think things ought to work
- `temporary_scopes` includes scopes where cleanups for temporaries occur.
These are statements and loop/fn bodies.
*/
pub struct RegionMaps {
priv scope_map: RefCell<HashMap<ast::NodeId, ast::NodeId>>,
priv var_map: RefCell<HashMap<ast::NodeId, ast::NodeId>>,
priv free_region_map: RefCell<HashMap<FreeRegion, ~[FreeRegion]>>,
priv cleanup_scopes: RefCell<HashSet<ast::NodeId>>,
priv rvalue_scopes: RefCell<HashMap<ast::NodeId, ast::NodeId>>,
priv terminating_scopes: RefCell<HashSet<ast::NodeId>>,
}
#[deriving(Clone)]
pub struct Context {
// Scope where variables should be parented to
var_parent: Option<ast::NodeId>,
// Innermost enclosing expression
parent: Option<ast::NodeId>,
}
struct RegionResolutionVisitor {
struct RegionResolutionVisitor<'a> {
sess: Session,
// Generated maps:
region_maps: RegionMaps,
region_maps: &'a RegionMaps,
}
@ -91,22 +92,41 @@ impl RegionMaps {
free_region_map.get().insert(sub, ~[sup]);
}
pub fn record_parent(&self, sub: ast::NodeId, sup: ast::NodeId) {
debug!("record_parent(sub={:?}, sup={:?})", sub, sup);
pub fn record_encl_scope(&self, sub: ast::NodeId, sup: ast::NodeId) {
debug!("record_encl_scope(sub={}, sup={})", sub, sup);
assert!(sub != sup);
let mut scope_map = self.scope_map.borrow_mut();
scope_map.get().insert(sub, sup);
}
pub fn record_cleanup_scope(&self, scope_id: ast::NodeId) {
//! Records that a scope is a CLEANUP SCOPE. This is invoked
//! from within regionck. We wait until regionck because we do
//! not know which operators are overloaded until that point,
//! and only overloaded operators result in cleanup scopes.
pub fn record_var_scope(&self, var: ast::NodeId, lifetime: ast::NodeId) {
debug!("record_var_scope(sub={}, sup={})", var, lifetime);
assert!(var != lifetime);
let mut cleanup_scopes = self.cleanup_scopes.borrow_mut();
cleanup_scopes.get().insert(scope_id);
let mut var_map = self.var_map.borrow_mut();
var_map.get().insert(var, lifetime);
}
pub fn record_rvalue_scope(&self, var: ast::NodeId, lifetime: ast::NodeId) {
debug!("record_rvalue_scope(sub={}, sup={})", var, lifetime);
assert!(var != lifetime);
let mut rvalue_scopes = self.rvalue_scopes.borrow_mut();
rvalue_scopes.get().insert(var, lifetime);
}
pub fn mark_as_terminating_scope(&self, scope_id: ast::NodeId) {
/*!
* Records that a scope is a TERMINATING SCOPE. Whenever we
* create automatic temporaries -- e.g. by an
* expression like `a().f` -- they will be freed within
* the innermost terminating scope.
*/
debug!("record_terminating_scope(scope_id={})", scope_id);
let mut terminating_scopes = self.terminating_scopes.borrow_mut();
terminating_scopes.get().insert(scope_id);
}
pub fn opt_encl_scope(&self, id: ast::NodeId) -> Option<ast::NodeId> {
@ -122,24 +142,51 @@ impl RegionMaps {
let scope_map = self.scope_map.borrow();
match scope_map.get().find(&id) {
Some(&r) => r,
None => { fail!("No enclosing scope for id {:?}", id); }
None => { fail!("No enclosing scope for id {}", id); }
}
}
pub fn is_cleanup_scope(&self, scope_id: ast::NodeId) -> bool {
let cleanup_scopes = self.cleanup_scopes.borrow();
cleanup_scopes.get().contains(&scope_id)
pub fn var_scope(&self, var_id: ast::NodeId) -> ast::NodeId {
/*!
* Returns the lifetime of the local variable `var_id`
*/
let var_map = self.var_map.borrow();
match var_map.get().find(&var_id) {
Some(&r) => r,
None => { fail!("No enclosing scope for id {}", var_id); }
}
}
pub fn cleanup_scope(&self, expr_id: ast::NodeId) -> ast::NodeId {
//! Returns the scope when temps in expr will be cleaned up
pub fn temporary_scope(&self, expr_id: ast::NodeId) -> Option<ast::NodeId> {
//! Returns the scope when temp created by expr_id will be cleaned up
// check for a designated rvalue scope
let rvalue_scopes = self.rvalue_scopes.borrow();
match rvalue_scopes.get().find(&expr_id) {
Some(&s) => {
debug!("temporary_scope({}) = {} [custom]", expr_id, s);
return Some(s);
}
None => { }
}
// else, locate the innermost terminating scope
let mut id = self.encl_scope(expr_id);
let cleanup_scopes = self.cleanup_scopes.borrow();
while !cleanup_scopes.get().contains(&id) {
id = self.encl_scope(id);
let terminating_scopes = self.terminating_scopes.borrow();
while !terminating_scopes.get().contains(&id) {
match self.opt_encl_scope(id) {
Some(p) => {
id = p;
}
None => {
debug!("temporary_scope({}) = None", expr_id);
return None;
}
}
}
return id;
debug!("temporary_scope({}) = {} [enclosing]", expr_id, id);
return Some(id);
}
pub fn encl_region(&self, id: ast::NodeId) -> ty::Region {
@ -148,6 +195,12 @@ impl RegionMaps {
ty::ReScope(self.encl_scope(id))
}
pub fn var_region(&self, id: ast::NodeId) -> ty::Region {
//! Returns the lifetime of the variable `id`.
ty::ReScope(self.var_scope(id))
}
pub fn scopes_intersect(&self, scope1: ast::NodeId, scope2: ast::NodeId)
-> bool {
self.is_subscope_of(scope1, scope2) ||
@ -168,7 +221,7 @@ impl RegionMaps {
let scope_map = self.scope_map.borrow();
match scope_map.get().find(&s) {
None => {
debug!("is_subscope_of({:?}, {:?}, s={:?})=false",
debug!("is_subscope_of({}, {}, s={})=false",
subscope, superscope, s);
return false;
@ -177,7 +230,7 @@ impl RegionMaps {
}
}
debug!("is_subscope_of({:?}, {:?})=true",
debug!("is_subscope_of({}, {})=true",
subscope, superscope);
return true;
@ -323,67 +376,138 @@ impl RegionMaps {
}
/// Records the current parent (if any) as the parent of `child_id`.
fn parent_to_expr(visitor: &mut RegionResolutionVisitor,
cx: Context, child_id: ast::NodeId, sp: Span) {
debug!("region::parent_to_expr(span={:?})",
visitor.sess.codemap.span_to_str(sp));
for parent_id in cx.parent.iter() {
visitor.region_maps.record_parent(child_id, *parent_id);
fn record_superlifetime(visitor: &mut RegionResolutionVisitor,
cx: Context,
child_id: ast::NodeId,
_sp: Span) {
for &parent_id in cx.parent.iter() {
visitor.region_maps.record_encl_scope(child_id, parent_id);
}
}
/// Records the lifetime of a local variable as `cx.var_parent`
fn record_var_lifetime(visitor: &mut RegionResolutionVisitor,
cx: Context,
var_id: ast::NodeId,
_sp: Span) {
match cx.var_parent {
Some(parent_id) => {
visitor.region_maps.record_var_scope(var_id, parent_id);
}
None => {
// this can happen in extern fn declarations like
//
// extern fn isalnum(c: c_int) -> c_int
}
}
}
fn resolve_block(visitor: &mut RegionResolutionVisitor,
blk: &ast::Block,
cx: Context) {
// Record the parent of this block.
parent_to_expr(visitor, cx, blk.id, blk.span);
debug!("resolve_block(blk.id={})", blk.id);
// Descend.
let new_cx = Context {var_parent: Some(blk.id),
parent: Some(blk.id)};
visit::walk_block(visitor, blk, new_cx);
// Record the parent of this block.
record_superlifetime(visitor, cx, blk.id, blk.span);
// We treat the tail expression in the block (if any) somewhat
// differently from the statements. The issue has to do with
// temporary lifetimes. If the user writes:
//
// {
// ... (&foo()) ...
// }
//
let subcx = Context {var_parent: Some(blk.id), parent: Some(blk.id)};
visit::walk_block(visitor, blk, subcx);
}
fn resolve_arm(visitor: &mut RegionResolutionVisitor,
arm: &ast::Arm,
cx: Context) {
visitor.region_maps.mark_as_terminating_scope(arm.body.id);
match arm.guard {
Some(expr) => {
visitor.region_maps.mark_as_terminating_scope(expr.id);
}
None => { }
}
visit::walk_arm(visitor, arm, cx);
}
fn resolve_pat(visitor: &mut RegionResolutionVisitor,
pat: &ast::Pat,
cx: Context) {
assert_eq!(cx.var_parent, cx.parent);
parent_to_expr(visitor, cx, pat.id, pat.span);
record_superlifetime(visitor, cx, pat.id, pat.span);
// If this is a binding (or maybe a binding, I'm too lazy to check
// the def map) then record the lifetime of that binding.
match pat.node {
ast::PatIdent(..) => {
record_var_lifetime(visitor, cx, pat.id, pat.span);
}
_ => { }
}
visit::walk_pat(visitor, pat, cx);
}
fn resolve_stmt(visitor: &mut RegionResolutionVisitor,
stmt: &ast::Stmt,
cx: Context) {
match stmt.node {
ast::StmtDecl(..) => {
visit::walk_stmt(visitor, stmt, cx);
}
ast::StmtExpr(_, stmt_id) |
ast::StmtSemi(_, stmt_id) => {
parent_to_expr(visitor, cx, stmt_id, stmt.span);
let expr_cx = Context {parent: Some(stmt_id), ..cx};
visit::walk_stmt(visitor, stmt, expr_cx);
}
ast::StmtMac(..) => visitor.sess.bug("unexpanded macro")
}
let stmt_id = stmt_id(stmt);
debug!("resolve_stmt(stmt.id={})", stmt_id);
visitor.region_maps.mark_as_terminating_scope(stmt_id);
record_superlifetime(visitor, cx, stmt_id, stmt.span);
let subcx = Context {parent: Some(stmt_id), ..cx};
visit::walk_stmt(visitor, stmt, subcx);
}
fn resolve_expr(visitor: &mut RegionResolutionVisitor,
expr: &ast::Expr,
cx: Context) {
parent_to_expr(visitor, cx, expr.id, expr.span);
debug!("resolve_expr(expr.id={})", expr.id);
record_superlifetime(visitor, cx, expr.id, expr.span);
let mut new_cx = cx;
new_cx.parent = Some(expr.id);
match expr.node {
ast::ExprAssignOp(..) | ast::ExprIndex(..) | ast::ExprBinary(..) |
// Conditional or repeating scopes are always terminating
// scopes, meaning that temporaries cannot outlive them.
// This ensures fixed size stacks.
ast::ExprBinary(_, ast::BiAnd, _, r) |
ast::ExprBinary(_, ast::BiOr, _, r) => {
// For shortcircuiting operators, mark the RHS as a terminating
// scope since it only executes conditionally.
visitor.region_maps.mark_as_terminating_scope(r.id);
}
ast::ExprIf(_, then, Some(otherwise)) => {
visitor.region_maps.mark_as_terminating_scope(then.id);
visitor.region_maps.mark_as_terminating_scope(otherwise.id);
}
ast::ExprIf(_, then, None) => {
visitor.region_maps.mark_as_terminating_scope(then.id);
}
ast::ExprLoop(body, _) |
ast::ExprWhile(_, body) => {
visitor.region_maps.mark_as_terminating_scope(body.id);
}
ast::ExprMatch(..) => {
new_cx.var_parent = Some(expr.id);
}
ast::ExprAssignOp(..) | ast::ExprIndex(..) |
ast::ExprUnary(..) | ast::ExprCall(..) | ast::ExprMethodCall(..) => {
// FIXME(#6268) Nested method calls
//
@ -402,11 +526,7 @@ fn resolve_expr(visitor: &mut RegionResolutionVisitor,
// for an extended explanantion of why this distinction is
// important.
//
// parent_to_expr(new_cx, expr.callee_id);
}
ast::ExprMatch(..) => {
new_cx.var_parent = Some(expr.id);
// record_superlifetime(new_cx, expr.callee_id);
}
_ => {}
@ -419,9 +539,254 @@ fn resolve_expr(visitor: &mut RegionResolutionVisitor,
fn resolve_local(visitor: &mut RegionResolutionVisitor,
local: &ast::Local,
cx: Context) {
assert_eq!(cx.var_parent, cx.parent);
parent_to_expr(visitor, cx, local.id, local.span);
debug!("resolve_local(local.id={},local.init={})",
local.id,local.init.is_some());
let blk_id = match cx.var_parent {
Some(id) => id,
None => {
visitor.sess.span_bug(
local.span,
"Local without enclosing block");
}
};
// For convenience in trans, associate with the local-id the var
// scope that will be used for any bindings declared in this
// pattern.
visitor.region_maps.record_var_scope(local.id, blk_id);
// As an exception to the normal rules governing temporary
// lifetimes, initializers in a let have a temporary lifetime
// of the enclosing block. This means that e.g. a program
// like the following is legal:
//
// let ref x = HashMap::new();
//
// Because the hash map will be freed in the enclosing block.
//
// We express the rules more formally based on 3 grammars (defined
// fully in the helpers below that implement them):
//
// 1. `E&`, which matches expressions like `&<rvalue>` that
// own a pointer into the stack.
//
// 2. `P&`, which matches patterns like `ref x` or `(ref x, ref
// y)` that produce ref bindings into the value they are
// matched against or something (at least partially) owned by
// the value they are matched against. (By partially owned,
// I mean that creating a binding into a ref-counted or managed value
// would still count.)
//
// 3. `ET`, which matches both rvalues like `foo()` as well as lvalues
// based on rvalues like `foo().x[2].y`.
//
// A subexpression `<rvalue>` that appears in a let initializer
// `let pat [: ty] = expr` has an extended temporary lifetime if
// any of the following conditions are met:
//
// A. `pat` matches `P&` and `expr` matches `ET`
// (covers cases where `pat` creates ref bindings into an rvalue
// produced by `expr`)
// B. `ty` is a borrowed pointer and `expr` matches `ET`
// (covers cases where coercion creates a borrow)
// C. `expr` matches `E&`
// (covers cases `expr` borrows an rvalue that is then assigned
// to memory (at least partially) owned by the binding)
//
// Here are some examples hopefully giving an intution where each
// rule comes into play and why:
//
// Rule A. `let (ref x, ref y) = (foo().x, 44)`. The rvalue `(22, 44)`
// would have an extended lifetime, but not `foo()`.
//
// Rule B. `let x: &[...] = [foo().x]`. The rvalue `[foo().x]`
// would have an extended lifetime, but not `foo()`.
//
// Rule C. `let x = &foo().x`. The rvalue ``foo()` would have extended
// lifetime.
//
// In some cases, multiple rules may apply (though not to the same
// rvalue). For example:
//
// let ref x = [&a(), &b()];
//
// Here, the expression `[...]` has an extended lifetime due to rule
// A, but the inner rvalues `a()` and `b()` have an extended lifetime
// due to rule C.
//
// FIXME -- Note that `[]` patterns work more smoothly post-DST.
match local.init {
Some(expr) => {
record_rvalue_scope_if_borrow_expr(visitor, expr, blk_id);
if is_binding_pat(local.pat) || is_borrowed_ty(local.ty) {
record_rvalue_scope(visitor, expr, blk_id);
}
}
None => { }
}
visit::walk_local(visitor, local, cx);
fn is_binding_pat(pat: &ast::Pat) -> bool {
/*!
* True if `pat` match the `P&` nonterminal:
*
* P& = ref X
* | StructName { ..., P&, ... }
* | [ ..., P&, ... ]
* | ( ..., P&, ... )
* | ~P&
* | box P&
*/
match pat.node {
ast::PatIdent(ast::BindByRef(_), _, _) => true,
ast::PatStruct(_, ref field_pats, _) => {
field_pats.iter().any(|fp| is_binding_pat(fp.pat))
}
ast::PatVec(ref pats1, ref pats2, ref pats3) => {
pats1.iter().any(|&p| is_binding_pat(p)) ||
pats2.iter().any(|&p| is_binding_pat(p)) ||
pats3.iter().any(|&p| is_binding_pat(p))
}
ast::PatTup(ref subpats) => {
subpats.iter().any(|&p| is_binding_pat(p))
}
ast::PatUniq(subpat) => {
is_binding_pat(subpat)
}
_ => false,
}
}
fn is_borrowed_ty(ty: &ast::Ty) -> bool {
/*!
* True if `ty` is a borrowed pointer type
* like `&int` or `&[...]`.
*/
match ty.node {
ast::TyRptr(..) => true,
_ => false
}
}
fn record_rvalue_scope_if_borrow_expr(visitor: &mut RegionResolutionVisitor,
expr: &ast::Expr,
blk_id: ast::NodeId) {
/*!
* If `expr` matches the `E&` grammar, then records an extended
* rvalue scope as appropriate:
*
* E& = & ET
* | StructName { ..., f: E&, ... }
* | [ ..., E&, ... ]
* | ( ..., E&, ... )
* | {...; E&}
* | ~E&
* | E& as ...
* | ( E& )
*/
match expr.node {
ast::ExprAddrOf(_, subexpr) => {
record_rvalue_scope_if_borrow_expr(visitor, subexpr, blk_id);
record_rvalue_scope(visitor, subexpr, blk_id);
}
ast::ExprStruct(_, ref fields, _) => {
for field in fields.iter() {
record_rvalue_scope_if_borrow_expr(
visitor, field.expr, blk_id);
}
}
ast::ExprVstore(subexpr, _) => {
visitor.region_maps.record_rvalue_scope(subexpr.id, blk_id);
record_rvalue_scope_if_borrow_expr(visitor, subexpr, blk_id);
}
ast::ExprVec(ref subexprs, _) |
ast::ExprTup(ref subexprs) => {
for &subexpr in subexprs.iter() {
record_rvalue_scope_if_borrow_expr(
visitor, subexpr, blk_id);
}
}
ast::ExprUnary(_, ast::UnUniq, subexpr) => {
record_rvalue_scope_if_borrow_expr(visitor, subexpr, blk_id);
}
ast::ExprCast(subexpr, _) |
ast::ExprParen(subexpr) => {
record_rvalue_scope_if_borrow_expr(visitor, subexpr, blk_id)
}
ast::ExprBlock(ref block) => {
match block.expr {
Some(subexpr) => {
record_rvalue_scope_if_borrow_expr(
visitor, subexpr, blk_id);
}
None => { }
}
}
_ => {
}
}
}
fn record_rvalue_scope<'a>(visitor: &mut RegionResolutionVisitor,
expr: &'a ast::Expr,
blk_id: ast::NodeId) {
/*!
* Applied to an expression `expr` if `expr` -- or something
* owned or partially owned by `expr` -- is going to be
* indirectly referenced by a variable in a let statement. In
* that case, the "temporary lifetime" or `expr` is extended
* to be the block enclosing the `let` statement.
*
* More formally, if `expr` matches the grammar `ET`, record
* the rvalue scope of the matching `<rvalue>` as `blk_id`:
*
* ET = *ET
* | ET[...]
* | ET.f
* | (ET)
* | <rvalue>
*
* Note: ET is intended to match "rvalues or
* lvalues based on rvalues".
*/
let mut expr = expr;
loop {
// Note: give all the expressions matching `ET` with the
// extended temporary lifetime, not just the innermost rvalue,
// because in trans if we must compile e.g. `*rvalue()`
// into a temporary, we request the temporary scope of the
// outer expression.
visitor.region_maps.record_rvalue_scope(expr.id, blk_id);
match expr.node {
ast::ExprAddrOf(_, ref subexpr) |
ast::ExprUnary(_, ast::UnDeref, ref subexpr) |
ast::ExprField(ref subexpr, _, _) |
ast::ExprIndex(_, ref subexpr, _) |
ast::ExprParen(ref subexpr) => {
let subexpr: &'a @Expr = subexpr; // FIXME
expr = &**subexpr;
}
_ => {
return;
}
}
}
}
}
fn resolve_item(visitor: &mut RegionResolutionVisitor,
@ -439,22 +804,23 @@ fn resolve_fn(visitor: &mut RegionResolutionVisitor,
sp: Span,
id: ast::NodeId,
cx: Context) {
debug!("region::resolve_fn(id={:?}, \
debug!("region::resolve_fn(id={}, \
span={:?}, \
body.id={:?}, \
cx.parent={:?})",
body.id={}, \
cx.parent={})",
id,
visitor.sess.codemap.span_to_str(sp),
body.id,
cx.parent);
visitor.region_maps.mark_as_terminating_scope(body.id);
// The arguments and `self` are parented to the body of the fn.
let decl_cx = Context {parent: Some(body.id),
var_parent: Some(body.id),
..cx};
var_parent: Some(body.id)};
match *fk {
visit::FkMethod(_, _, method) => {
visitor.region_maps.record_parent(method.self_id, body.id);
visitor.region_maps.record_var_scope(method.self_id, body.id);
}
_ => {}
}
@ -471,7 +837,7 @@ fn resolve_fn(visitor: &mut RegionResolutionVisitor,
visitor.visit_block(body, body_cx);
}
impl Visitor<Context> for RegionResolutionVisitor {
impl<'a> Visitor<Context> for RegionResolutionVisitor<'a> {
fn visit_block(&mut self, b: &Block, cx: Context) {
resolve_block(self, b, cx);
@ -503,16 +869,33 @@ impl Visitor<Context> for RegionResolutionVisitor {
}
pub fn resolve_crate(sess: Session, crate: &ast::Crate) -> RegionMaps {
let mut visitor = RegionResolutionVisitor {
sess: sess,
region_maps: RegionMaps {
scope_map: RefCell::new(HashMap::new()),
free_region_map: RefCell::new(HashMap::new()),
cleanup_scopes: RefCell::new(HashSet::new())
}
let maps = RegionMaps {
scope_map: RefCell::new(HashMap::new()),
var_map: RefCell::new(HashMap::new()),
free_region_map: RefCell::new(HashMap::new()),
rvalue_scopes: RefCell::new(HashMap::new()),
terminating_scopes: RefCell::new(HashSet::new()),
};
let cx = Context { parent: None, var_parent: None };
visit::walk_crate(&mut visitor, crate, cx);
return visitor.region_maps;
{
let mut visitor = RegionResolutionVisitor {
sess: sess,
region_maps: &maps
};
let cx = Context { parent: None, var_parent: None };
visit::walk_crate(&mut visitor, crate, cx);
}
return maps;
}
pub fn resolve_inlined_item(sess: Session,
region_maps: &RegionMaps,
item: &ast::InlinedItem) {
let cx = Context {parent: None,
var_parent: None};
let mut visitor = RegionResolutionVisitor {
sess: sess,
region_maps: region_maps,
};
visit::walk_inlined_item(&mut visitor, item, cx);
}

View file

@ -2442,7 +2442,7 @@ impl Resolver {
match type_result {
BoundResult(target_module, name_bindings) => {
debug!("(resolving single import) found type target: {:?}",
name_bindings.type_def.get().unwrap().type_def);
{name_bindings.type_def.get().unwrap().type_def});
import_resolution.type_target.set(
Some(Target::new(target_module, name_bindings)));
import_resolution.type_id.set(directive.id);

View file

@ -204,6 +204,8 @@ use middle::trans::adt;
use middle::trans::base::*;
use middle::trans::build::*;
use middle::trans::callee;
use middle::trans::cleanup;
use middle::trans::cleanup::CleanupMethods;
use middle::trans::common::*;
use middle::trans::consts;
use middle::trans::controlflow;
@ -221,7 +223,6 @@ use util::ppaux::{Repr, vec_map_to_str};
use std::cell::Cell;
use std::hashmap::HashMap;
use std::ptr;
use std::vec;
use syntax::ast;
use syntax::ast::Ident;
@ -315,16 +316,18 @@ pub enum opt_result<'a> {
fn trans_opt<'a>(bcx: &'a Block<'a>, o: &Opt) -> opt_result<'a> {
let _icx = push_ctxt("match::trans_opt");
let ccx = bcx.ccx();
let bcx = bcx;
let mut bcx = bcx;
match *o {
lit(ExprLit(lit_expr)) => {
let datumblock = expr::trans_to_datum(bcx, lit_expr);
return single_result(datumblock.to_result());
let lit_datum = unpack_datum!(bcx, expr::trans(bcx, lit_expr));
let lit_datum = lit_datum.assert_rvalue(bcx); // literals are rvalues
let lit_datum = unpack_datum!(bcx, lit_datum.to_appropriate_datum(bcx));
return single_result(rslt(bcx, lit_datum.val));
}
lit(UnitLikeStructLit(pat_id)) => {
let struct_ty = ty::node_id_to_type(bcx.tcx(), pat_id);
let datumblock = datum::scratch_datum(bcx, struct_ty, "", true);
return single_result(datumblock.to_result(bcx));
let datum = datum::rvalue_scratch_datum(bcx, struct_ty, "");
return single_result(rslt(bcx, datum.val));
}
lit(ConstLit(lit_id)) => {
let (llval, _) = consts::get_const_val(bcx.ccx(), lit_id);
@ -1007,14 +1010,18 @@ fn extract_variant_args<'a>(
ExtractedBlock { vals: args, bcx: bcx }
}
fn match_datum<'a>(bcx: &'a Block<'a>, val: ValueRef, pat_id: ast::NodeId)
-> Datum {
//! Helper for converting from the ValueRef that we pass around in
//! the match code, which is always by ref, into a Datum. Eventually
//! we should just pass around a Datum and be done with it.
fn match_datum(bcx: &Block,
val: ValueRef,
pat_id: ast::NodeId)
-> Datum<Lvalue> {
/*!
* Helper for converting from the ValueRef that we pass around in
* the match code, which is always an lvalue, into a Datum. Eventually
* we should just pass around a Datum and be done with it.
*/
let ty = node_id_type(bcx, pat_id);
Datum {val: val, ty: ty, mode: datum::ByRef(RevokeClean)}
Datum(val, ty, Lvalue)
}
@ -1054,13 +1061,11 @@ fn extract_vec_elems<'a>(
ty::mt {ty: vt.unit_ty, mutbl: ast::MutImmutable},
ty::vstore_slice(ty::ReStatic)
);
let scratch = scratch_datum(bcx, slice_ty, "", false);
let scratch = rvalue_scratch_datum(bcx, slice_ty, "");
Store(bcx, slice_begin,
GEPi(bcx, scratch.val, [0u, abi::slice_elt_base])
);
GEPi(bcx, scratch.val, [0u, abi::slice_elt_base]));
Store(bcx, slice_len, GEPi(bcx, scratch.val, [0u, abi::slice_elt_len]));
elems[n] = scratch.val;
scratch.add_clean(bcx);
}
ExtractedBlock { vals: elems, bcx: bcx }
@ -1176,7 +1181,8 @@ impl<'a> DynamicFailureHandler<'a> {
_ => (),
}
let fail_cx = sub_block(self.bcx, "case_fallthrough");
let fcx = self.bcx.fcx;
let fail_cx = fcx.new_block(false, "case_fallthrough", None);
controlflow::trans_fail(fail_cx, Some(self.sp), self.msg);
self.finished.set(Some(fail_cx.llbb));
fail_cx.llbb
@ -1297,30 +1303,31 @@ fn compare_values<'a>(
fn store_non_ref_bindings<'a>(
bcx: &'a Block<'a>,
bindings_map: &BindingsMap,
mut opt_temp_cleanups: Option<&mut ~[ValueRef]>)
-> &'a Block<'a> {
opt_cleanup_scope: Option<cleanup::ScopeId>)
-> &'a Block<'a>
{
/*!
*
* For each copy/move binding, copy the value from the value
* being matched into its final home. This code executes once
* one of the patterns for a given arm has completely matched.
* It adds temporary cleanups to the `temp_cleanups` array,
* if one is provided.
* For each copy/move binding, copy the value from the value being
* matched into its final home. This code executes once one of
* the patterns for a given arm has completely matched. It adds
* cleanups to the `opt_cleanup_scope`, if one is provided.
*/
let fcx = bcx.fcx;
let mut bcx = bcx;
for (_, &binding_info) in bindings_map.iter() {
match binding_info.trmode {
TrByValue(lldest) => {
let llval = Load(bcx, binding_info.llmatch); // get a T*
let datum = Datum {val: llval, ty: binding_info.ty,
mode: ByRef(ZeroMem)};
bcx = datum.store_to(bcx, INIT, lldest);
opt_temp_cleanups.mutate(|temp_cleanups| {
add_clean_temp_mem(bcx, lldest, binding_info.ty);
temp_cleanups.push(lldest);
temp_cleanups
});
let datum = Datum(llval, binding_info.ty, Lvalue);
bcx = datum.store_to(bcx, lldest);
match opt_cleanup_scope {
None => {}
Some(s) => {
fcx.schedule_drop_mem(s, lldest, binding_info.ty);
}
}
}
TrByRef => {}
}
@ -1328,38 +1335,29 @@ fn store_non_ref_bindings<'a>(
return bcx;
}
fn insert_lllocals<'a>(
bcx: &'a Block<'a>,
bindings_map: &BindingsMap,
add_cleans: bool)
-> &'a Block<'a> {
fn insert_lllocals<'a>(bcx: &'a Block<'a>,
bindings_map: &BindingsMap,
cleanup_scope: cleanup::ScopeId)
-> &'a Block<'a> {
/*!
* For each binding in `data.bindings_map`, adds an appropriate entry into
* the `fcx.lllocals` map. If add_cleans is true, then adds cleanups for
* the bindings.
* the `fcx.lllocals` map, scheduling cleanup in `cleanup_scope`.
*/
let fcx = bcx.fcx;
for (&ident, &binding_info) in bindings_map.iter() {
let llval = match binding_info.trmode {
// By value bindings: use the stack slot that we
// copied/moved the value into
TrByValue(lldest) => lldest,
// By ref binding: use the ptr into the matched value
TrByRef => binding_info.llmatch
};
let datum = Datum {
val: llval,
ty: binding_info.ty,
mode: ByRef(ZeroMem)
};
if add_cleans {
match binding_info.trmode {
TrByValue(_) => datum.add_clean(bcx),
_ => {}
}
}
let datum = Datum(llval, binding_info.ty, Lvalue);
fcx.schedule_drop_mem(cleanup_scope, llval, binding_info.ty);
{
debug!("binding {:?} to {}",
@ -1396,24 +1394,23 @@ fn compile_guard<'r,
vec_map_to_str(vals, |v| bcx.val_to_str(*v)));
let _indenter = indenter();
// Lest the guard itself should fail, introduce a temporary cleanup
// scope for any non-ref bindings we create.
let temp_scope = bcx.fcx.push_custom_cleanup_scope();
let mut bcx = bcx;
let mut temp_cleanups = ~[];
bcx = store_non_ref_bindings(bcx,
data.bindings_map,
Some(&mut temp_cleanups));
bcx = insert_lllocals(bcx, data.bindings_map, false);
bcx = store_non_ref_bindings(bcx, data.bindings_map,
Some(cleanup::CustomScope(temp_scope)));
bcx = insert_lllocals(bcx, data.bindings_map,
cleanup::CustomScope(temp_scope));
let val = unpack_result!(bcx, {
with_scope_result(bcx, guard_expr.info(), "guard", |bcx| {
expr::trans_to_datum(bcx, guard_expr).to_result()
})
});
let val = bool_to_i1(bcx, val);
let val = unpack_datum!(bcx, expr::trans(bcx, guard_expr));
let val = val.to_llbool(bcx);
// Revoke the temp cleanups now that the guard successfully executed.
for llval in temp_cleanups.iter() {
revoke_clean(bcx, *llval);
}
// Cancel cleanups now that the guard successfully executed. If
// the guard was false, we will drop the values explicitly
// below. Otherwise, we'll add lvalue cleanups at the end.
bcx.fcx.pop_custom_cleanup_scope(temp_scope);
return with_cond(bcx, Not(bcx, val), |bcx| {
// Guard does not match: free the values we copied,
@ -1502,6 +1499,7 @@ fn compile_submatch_continue<'r,
chk: &FailureHandler,
col: uint,
val: ValueRef) {
let fcx = bcx.fcx;
let tcx = bcx.tcx();
let dm = tcx.def_map;
@ -1602,6 +1600,7 @@ fn compile_submatch_continue<'r,
debug!("options={:?}", opts);
let mut kind = no_branch;
let mut test_val = val;
debug!("test_val={}", bcx.val_to_str(test_val));
if opts.len() > 0u {
match opts[0] {
var(_, repr) => {
@ -1621,8 +1620,7 @@ fn compile_submatch_continue<'r,
},
vec_len(..) => {
let vt = tvec::vec_types(bcx, node_id_type(bcx, pat_id));
let unboxed = load_if_immediate(bcx, val, vt.vec_ty);
let (_, len) = tvec::get_base_and_len(bcx, unboxed, vt.vec_ty);
let (_, len) = tvec::get_base_and_len(bcx, val, vt.vec_ty);
test_val = len;
kind = compare_vec_len;
}
@ -1636,7 +1634,7 @@ fn compile_submatch_continue<'r,
}
let else_cx = match kind {
no_branch | single => bcx,
_ => sub_block(bcx, "match_else")
_ => bcx.fcx.new_temp_block("match_else")
};
let sw = if kind == switch {
Switch(bcx, test_val, else_cx.llbb, opts.len())
@ -1657,7 +1655,7 @@ fn compile_submatch_continue<'r,
let mut branch_chk = None;
let mut opt_cx = else_cx;
if !exhaustive || i+1 < len {
opt_cx = sub_block(bcx, "match_case");
opt_cx = bcx.fcx.new_temp_block("match_case");
match kind {
single => Br(bcx, opt_cx.llbb),
switch => {
@ -1678,75 +1676,65 @@ fn compile_submatch_continue<'r,
compare => {
let t = node_id_type(bcx, pat_id);
let Result {bcx: after_cx, val: matches} = {
with_scope_result(bcx, None, "compaReScope", |bcx| {
match trans_opt(bcx, opt) {
single_result(
Result {bcx, val}) => {
compare_values(bcx, test_val, val, t)
}
lower_bound(
Result {bcx, val}) => {
compare_scalar_types(
bcx, test_val, val,
t, ast::BiGe)
}
range_result(
Result {val: vbegin, ..},
Result {bcx, val: vend}) => {
let Result {bcx, val: llge} =
compare_scalar_types(
bcx, test_val,
vbegin, t, ast::BiGe);
let Result {bcx, val: llle} =
compare_scalar_types(
bcx, test_val, vend,
t, ast::BiLe);
rslt(bcx, And(bcx, llge, llle))
}
match trans_opt(bcx, opt) {
single_result(Result {bcx, val}) => {
compare_values(bcx, test_val, val, t)
}
})
lower_bound(Result {bcx, val}) => {
compare_scalar_types(
bcx, test_val, val,
t, ast::BiGe)
}
range_result(Result {val: vbegin, ..},
Result {bcx, val: vend}) => {
let Result {bcx, val: llge} =
compare_scalar_types(
bcx, test_val,
vbegin, t, ast::BiGe);
let Result {bcx, val: llle} =
compare_scalar_types(
bcx, test_val, vend,
t, ast::BiLe);
rslt(bcx, And(bcx, llge, llle))
}
}
};
bcx = sub_block(after_cx, "compare_next");
bcx = fcx.new_temp_block("compare_next");
CondBr(after_cx, matches, opt_cx.llbb, bcx.llbb);
}
compare_vec_len => {
let Result {bcx: after_cx, val: matches} = {
with_scope_result(bcx,
None,
"compare_vec_len_scope",
|bcx| {
match trans_opt(bcx, opt) {
single_result(
Result {bcx, val}) => {
let value = compare_scalar_values(
bcx, test_val, val,
signed_int, ast::BiEq);
rslt(bcx, value)
}
lower_bound(
Result {bcx, val: val}) => {
let value = compare_scalar_values(
bcx, test_val, val,
signed_int, ast::BiGe);
rslt(bcx, value)
}
range_result(
Result {val: vbegin, ..},
Result {bcx, val: vend}) => {
let llge =
compare_scalar_values(
bcx, test_val,
vbegin, signed_int, ast::BiGe);
let llle =
compare_scalar_values(
bcx, test_val, vend,
signed_int, ast::BiLe);
rslt(bcx, And(bcx, llge, llle))
}
match trans_opt(bcx, opt) {
single_result(
Result {bcx, val}) => {
let value = compare_scalar_values(
bcx, test_val, val,
signed_int, ast::BiEq);
rslt(bcx, value)
}
})
lower_bound(
Result {bcx, val: val}) => {
let value = compare_scalar_values(
bcx, test_val, val,
signed_int, ast::BiGe);
rslt(bcx, value)
}
range_result(
Result {val: vbegin, ..},
Result {bcx, val: vend}) => {
let llge =
compare_scalar_values(
bcx, test_val,
vbegin, signed_int, ast::BiGe);
let llle =
compare_scalar_values(
bcx, test_val, vend,
signed_int, ast::BiLe);
rslt(bcx, And(bcx, llge, llle))
}
}
};
bcx = sub_block(after_cx, "compare_vec_len_next");
bcx = fcx.new_temp_block("compare_vec_len_next");
// If none of these subcases match, move on to the
// next condition.
@ -1812,9 +1800,7 @@ pub fn trans_match<'a>(
dest: Dest)
-> &'a Block<'a> {
let _icx = push_ctxt("match::trans_match");
with_scope(bcx, match_expr.info(), "match", |bcx| {
trans_match_inner(bcx, discr_expr, arms, dest)
})
trans_match_inner(bcx, match_expr.id, discr_expr, arms, dest)
}
fn create_bindings_map(bcx: &Block, pat: @ast::Pat) -> BindingsMap {
@ -1857,19 +1843,18 @@ fn create_bindings_map(bcx: &Block, pat: @ast::Pat) -> BindingsMap {
return bindings_map;
}
fn trans_match_inner<'a>(
scope_cx: &'a Block<'a>,
discr_expr: &ast::Expr,
arms: &[ast::Arm],
dest: Dest)
-> &'a Block<'a> {
fn trans_match_inner<'a>(scope_cx: &'a Block<'a>,
match_id: ast::NodeId,
discr_expr: &ast::Expr,
arms: &[ast::Arm],
dest: Dest) -> &'a Block<'a> {
let _icx = push_ctxt("match::trans_match_inner");
let fcx = scope_cx.fcx;
let mut bcx = scope_cx;
let tcx = bcx.tcx();
let discr_datum = unpack_datum!(bcx, {
expr::trans_to_datum(bcx, discr_expr)
});
let discr_datum = unpack_datum!(bcx, expr::trans_to_lvalue(bcx, discr_expr,
"match"));
if bcx.unreachable.get() {
return bcx;
}
@ -1877,7 +1862,7 @@ fn trans_match_inner<'a>(
let mut arm_datas = ~[];
let mut matches = ~[];
for arm in arms.iter() {
let body = scope_block(bcx, arm.body.info(), "case_body");
let body = fcx.new_id_block("case_body", arm.body.id);
let bindings_map = create_bindings_map(bcx, arm.pats[0]);
let arm_data = ArmData {
bodycx: body,
@ -1910,7 +1895,7 @@ fn trans_match_inner<'a>(
Infallible
}
};
let lldiscr = discr_datum.to_ref_llval(bcx);
let lldiscr = discr_datum.val;
compile_submatch(bcx, matches, [lldiscr], &chk);
let mut arm_cxs = ~[];
@ -1926,14 +1911,15 @@ fn trans_match_inner<'a>(
}
// insert bindings into the lllocals map and add cleanups
bcx = insert_lllocals(bcx, arm_data.bindings_map, true);
let cleanup_scope = fcx.push_custom_cleanup_scope();
bcx = insert_lllocals(bcx, arm_data.bindings_map,
cleanup::CustomScope(cleanup_scope));
bcx = controlflow::trans_block(bcx, arm_data.arm.body, dest);
bcx = trans_block_cleanups(bcx, block_cleanups(arm_data.bodycx));
bcx = fcx.pop_and_trans_custom_cleanup_scope(bcx, cleanup_scope);
arm_cxs.push(bcx);
}
bcx = controlflow::join_blocks(scope_cx, arm_cxs);
bcx = scope_cx.fcx.join_blocks(match_id, arm_cxs);
return bcx;
}
@ -1944,17 +1930,18 @@ enum IrrefutablePatternBindingMode {
BindArgument
}
pub fn store_local<'a>(
bcx: &'a Block<'a>,
pat: @ast::Pat,
opt_init_expr: Option<@ast::Expr>)
-> &'a Block<'a> {
pub fn store_local<'a>(bcx: &'a Block<'a>,
local: &ast::Local)
-> &'a Block<'a> {
/*!
* Generates code for a local variable declaration like
* `let <pat>;` or `let <pat> = <opt_init_expr>`.
*/
let _icx = push_ctxt("match::store_local");
let mut bcx = bcx;
let tcx = bcx.tcx();
let pat = local.pat;
let opt_init_expr = local.init;
return match opt_init_expr {
Some(init_expr) => {
@ -1970,9 +1957,11 @@ pub fn store_local<'a>(
// it assumes it is matching against a valid value.
match simple_identifier(pat) {
Some(path) => {
let var_scope = cleanup::var_scope(tcx, local.id);
return mk_binding_alloca(
bcx, pat.id, path, BindLocal,
|bcx, datum| expr::trans_into(bcx, init_expr, expr::SaveIn(datum.val)));
bcx, pat.id, path, BindLocal, var_scope, (),
|(), bcx, v, _| expr::trans_into(bcx, init_expr,
expr::SaveIn(v)));
}
None => {}
@ -1980,17 +1969,15 @@ pub fn store_local<'a>(
// General path.
let init_datum =
unpack_datum!(
bcx,
expr::trans_to_datum(bcx, init_expr));
unpack_datum!(bcx, expr::trans_to_lvalue(bcx, init_expr, "let"));
if ty::type_is_bot(expr_ty(bcx, init_expr)) {
create_dummy_locals(bcx, pat)
} else {
if bcx.sess().asm_comments() {
add_comment(bcx, "creating zeroable ref llval");
}
let llptr = init_datum.to_ref_llval(bcx);
return bind_irrefutable_pat(bcx, pat, llptr, BindLocal);
let var_scope = cleanup::var_scope(tcx, local.id);
bind_irrefutable_pat(bcx, pat, init_datum.val, BindLocal, var_scope)
}
}
None => {
@ -1998,22 +1985,27 @@ pub fn store_local<'a>(
}
};
fn create_dummy_locals<'a>(mut bcx: &'a Block<'a>, pat: @ast::Pat)
-> &'a Block<'a> {
fn create_dummy_locals<'a>(mut bcx: &'a Block<'a>,
pat: @ast::Pat)
-> &'a Block<'a> {
// create dummy memory for the variables if we have no
// value to store into them immediately
let tcx = bcx.tcx();
pat_bindings(tcx.def_map, pat, |_, p_id, _, path| {
bcx = mk_binding_alloca(
bcx, p_id, path, BindLocal,
|bcx, datum| { datum.cancel_clean(bcx); bcx });
});
let scope = cleanup::var_scope(tcx, p_id);
bcx = mk_binding_alloca(
bcx, p_id, path, BindLocal, scope, (),
|(), bcx, llval, ty| { zero_mem(bcx, llval, ty); bcx });
});
bcx
}
}
pub fn store_arg<'a>(mut bcx: &'a Block<'a>, pat: @ast::Pat, arg: Datum)
-> &'a Block<'a> {
pub fn store_arg<'a>(mut bcx: &'a Block<'a>,
pat: @ast::Pat,
arg: Datum<Rvalue>,
arg_scope: cleanup::ScopeId)
-> &'a Block<'a> {
/*!
* Generates code for argument patterns like `fn foo(<pat>: T)`.
* Creates entries in the `llargs` map for each of the bindings
@ -2026,62 +2018,56 @@ pub fn store_arg<'a>(mut bcx: &'a Block<'a>, pat: @ast::Pat, arg: Datum)
* if the argument type is `T`, then `llval` is a `T*`). In some
* cases, this code may zero out the memory `llval` points at.
*/
let _icx = push_ctxt("match::store_arg");
// We always need to cleanup the argument as we exit the fn scope.
// Note that we cannot do it before for fear of a fn like
// fn getaddr(~ref x: ~uint) -> *uint {....}
// (From test `run-pass/func-arg-ref-pattern.rs`)
arg.add_clean(bcx);
match simple_identifier(pat) {
Some(path) => {
// Generate nicer LLVM for the common case of fn a pattern
// like `x: T`
mk_binding_alloca(
bcx, pat.id, path, BindArgument, arg_scope, arg,
|arg, bcx, llval, _| arg.store_to(bcx, llval))
}
// Debug information (the llvm.dbg.declare intrinsic to be precise) always expects to get an
// alloca, which only is the case on the general path, so lets disable the optimized path when
// debug info is enabled.
let arg_is_alloca = unsafe { llvm::LLVMIsAAllocaInst(arg.val) != ptr::null() };
let fast_path = (arg_is_alloca || !bcx.ccx().sess.opts.extra_debuginfo)
&& simple_identifier(pat).is_some();
if fast_path {
// Optimized path for `x: T` case. This just adopts
// `llval` wholesale as the pointer for `x`, avoiding the
// general logic which may copy out of `llval`.
let mut llargs = bcx.fcx.llargs.borrow_mut();
llargs.get().insert(pat.id, arg);
} else {
// General path. Copy out the values that are used in the
// pattern.
let llptr = arg.to_ref_llval(bcx);
bcx = bind_irrefutable_pat(bcx, pat, llptr, BindArgument);
None => {
// General path. Copy out the values that are used in the
// pattern.
let arg = unpack_datum!(
bcx, arg.to_lvalue_datum_in_scope(bcx, "__arg", arg_scope));
bind_irrefutable_pat(bcx, pat, arg.val,
BindArgument, arg_scope)
}
}
return bcx;
}
fn mk_binding_alloca<'a>(
bcx: &'a Block<'a>,
p_id: ast::NodeId,
path: &ast::Path,
binding_mode: IrrefutablePatternBindingMode,
populate: |&'a Block<'a>, Datum| -> &'a Block<'a>)
-> &'a Block<'a> {
fn mk_binding_alloca<'a,A>(bcx: &'a Block<'a>,
p_id: ast::NodeId,
path: &ast::Path,
binding_mode: IrrefutablePatternBindingMode,
cleanup_scope: cleanup::ScopeId,
arg: A,
populate: |A, &'a Block<'a>, ValueRef, ty::t| -> &'a Block<'a>)
-> &'a Block<'a> {
let var_ty = node_id_type(bcx, p_id);
let ident = ast_util::path_to_ident(path);
// Allocate memory on stack for the binding.
let llval = alloc_ty(bcx, var_ty, bcx.ident(ident));
let datum = Datum {
val: llval,
ty: var_ty,
mode: ByRef(ZeroMem)
// Subtle: be sure that we *populate* the memory *before*
// we schedule the cleanup.
let bcx = populate(arg, bcx, llval, var_ty);
bcx.fcx.schedule_drop_mem(cleanup_scope, llval, var_ty);
// Now that memory is initialized and has cleanup scheduled,
// create the datum and insert into the local variable map.
let datum = Datum(llval, var_ty, Lvalue);
let mut llmap = match binding_mode {
BindLocal => bcx.fcx.lllocals.borrow_mut(),
BindArgument => bcx.fcx.llargs.borrow_mut()
};
{
let mut llmap = match binding_mode {
BindLocal => bcx.fcx.lllocals.borrow_mut(),
BindArgument => bcx.fcx.llargs.borrow_mut()
};
llmap.get().insert(p_id, datum);
}
let bcx = populate(bcx, datum);
datum.add_clean(bcx);
llmap.get().insert(p_id, datum);
bcx
}
@ -2089,7 +2075,8 @@ fn bind_irrefutable_pat<'a>(
bcx: &'a Block<'a>,
pat: @ast::Pat,
val: ValueRef,
binding_mode: IrrefutablePatternBindingMode)
binding_mode: IrrefutablePatternBindingMode,
cleanup_scope: cleanup::ScopeId)
-> &'a Block<'a> {
/*!
* A simple version of the pattern matching code that only handles
@ -2103,11 +2090,10 @@ fn bind_irrefutable_pat<'a>(
* # Arguments
* - bcx: starting basic block context
* - pat: the irrefutable pattern being matched.
* - val: a pointer to the value being matched. If pat matches a value
* of type T, then this is a T*. If the value is moved from `pat`,
* then `*pat` will be zeroed; otherwise, it's existing cleanup
* applies.
* - val: the value being matched -- must be an lvalue (by ref, with cleanup)
* - binding_mode: is this for an argument or a local variable?
*
* FIXME: convert `val` to `Datum<Lvalue>` for more type safety
*/
debug!("bind_irrefutable_pat(bcx={}, pat={}, binding_mode={:?})",
@ -2133,24 +2119,20 @@ fn bind_irrefutable_pat<'a>(
// binding will live and place it into the appropriate
// map.
bcx = mk_binding_alloca(
bcx, pat.id, path, binding_mode,
|bcx, var_datum| {
bcx, pat.id, path, binding_mode, cleanup_scope, (),
|(), bcx, llval, ty| {
match pat_binding_mode {
ast::BindByValue(_) => {
// By value binding: move the value that `val`
// points at into the binding's stack slot.
let datum = Datum {
val: val,
ty: var_datum.ty,
mode: ByRef(ZeroMem)
};
datum.store_to(bcx, INIT, var_datum.val)
let d = Datum(val, ty, Lvalue);
d.store_to(bcx, llval)
}
ast::BindByRef(_) => {
// By ref binding: the value of the variable
// is the pointer `val` itself.
Store(bcx, val, var_datum.val);
Store(bcx, val, llval);
bcx
}
}
@ -2158,7 +2140,8 @@ fn bind_irrefutable_pat<'a>(
}
for &inner_pat in inner.iter() {
bcx = bind_irrefutable_pat(bcx, inner_pat, val, binding_mode);
bcx = bind_irrefutable_pat(bcx, inner_pat, val,
binding_mode, cleanup_scope);
}
}
ast::PatEnum(_, ref sub_pats) => {
@ -2176,7 +2159,8 @@ fn bind_irrefutable_pat<'a>(
for sub_pat in sub_pats.iter() {
for (i, argval) in args.vals.iter().enumerate() {
bcx = bind_irrefutable_pat(bcx, sub_pat[i],
*argval, binding_mode);
*argval, binding_mode,
cleanup_scope);
}
}
}
@ -2193,7 +2177,8 @@ fn bind_irrefutable_pat<'a>(
let fldptr = adt::trans_field_ptr(bcx, repr,
val, 0, i);
bcx = bind_irrefutable_pat(bcx, *elem,
fldptr, binding_mode);
fldptr, binding_mode,
cleanup_scope);
}
}
}
@ -2214,7 +2199,8 @@ fn bind_irrefutable_pat<'a>(
let ix = ty::field_idx_strict(tcx, f.ident.name, field_tys);
let fldptr = adt::trans_field_ptr(bcx, pat_repr, val,
discr, ix);
bcx = bind_irrefutable_pat(bcx, f.pat, fldptr, binding_mode);
bcx = bind_irrefutable_pat(bcx, f.pat, fldptr,
binding_mode, cleanup_scope);
}
})
}
@ -2222,16 +2208,17 @@ fn bind_irrefutable_pat<'a>(
let repr = adt::represent_node(bcx, pat.id);
for (i, elem) in elems.iter().enumerate() {
let fldptr = adt::trans_field_ptr(bcx, repr, val, 0, i);
bcx = bind_irrefutable_pat(bcx, *elem, fldptr, binding_mode);
bcx = bind_irrefutable_pat(bcx, *elem, fldptr,
binding_mode, cleanup_scope);
}
}
ast::PatUniq(inner) => {
let llbox = Load(bcx, val);
bcx = bind_irrefutable_pat(bcx, inner, llbox, binding_mode);
bcx = bind_irrefutable_pat(bcx, inner, llbox, binding_mode, cleanup_scope);
}
ast::PatRegion(inner) => {
let loaded_val = Load(bcx, val);
bcx = bind_irrefutable_pat(bcx, inner, loaded_val, binding_mode);
bcx = bind_irrefutable_pat(bcx, inner, loaded_val, binding_mode, cleanup_scope);
}
ast::PatVec(..) => {
bcx.tcx().sess.span_bug(
@ -2243,14 +2230,4 @@ fn bind_irrefutable_pat<'a>(
return bcx;
}
fn simple_identifier<'a>(pat: &'a ast::Pat) -> Option<&'a ast::Path> {
match pat.node {
ast::PatIdent(ast::BindByValue(_), ref path, None) => {
Some(path)
}
_ => {
None
}
}
}

View file

@ -628,6 +628,25 @@ pub fn num_args(r: &Repr, discr: Disr) -> uint {
}
}
/// Access a field, at a point when the value's case is known.
pub fn deref_ty(ccx: &CrateContext, r: &Repr) -> ty::t {
match *r {
CEnum(..) => {
ccx.sess.bug("deref of c-like enum")
}
Univariant(ref st, _) => {
st.fields[0]
}
General(_, ref cases) => {
assert!(cases.len() == 1);
cases[0].fields[0]
}
NullablePointer{ .. } => {
ccx.sess.bug("deref of nullable ptr")
}
}
}
/// Access a field, at a point when the value's case is known.
pub fn trans_field_ptr(bcx: &Block, r: &Repr, val: ValueRef, discr: Disr,
ix: uint) -> ValueRef {

View file

@ -18,8 +18,10 @@ use lib;
use middle::trans::build::*;
use middle::trans::callee;
use middle::trans::common::*;
use middle::trans::expr::*;
use middle::trans::type_of::*;
use middle::trans::cleanup;
use middle::trans::cleanup::CleanupMethods;
use middle::trans::expr;
use middle::trans::type_of;
use middle::trans::type_::Type;
@ -28,26 +30,23 @@ use syntax::ast;
// Take an inline assembly expression and splat it out via LLVM
pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
-> &'a Block<'a> {
let fcx = bcx.fcx;
let mut bcx = bcx;
let mut constraints = ~[];
let mut cleanups = ~[];
let mut output_types = ~[];
let temp_scope = fcx.push_custom_cleanup_scope();
// Prepare the output operands
let outputs = ia.outputs.map(|&(c, out)| {
constraints.push(c);
let out_datum = unpack_datum!(bcx, trans_to_datum(bcx, out));
output_types.push(type_of(bcx.ccx(), out_datum.ty));
let out_datum = unpack_datum!(bcx, expr::trans(bcx, out));
output_types.push(type_of::type_of(bcx.ccx(), out_datum.ty));
out_datum.val
});
for c in cleanups.iter() {
revoke_clean(bcx, *c);
}
cleanups.clear();
// Now the input operands
let inputs = ia.inputs.map(|&(c, input)| {
constraints.push(c);
@ -56,14 +55,13 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
callee::trans_arg_expr(bcx,
expr_ty(bcx, input),
input,
&mut cleanups,
cleanup::CustomScope(temp_scope),
callee::DontAutorefArg)
})
});
for c in cleanups.iter() {
revoke_clean(bcx, *c);
}
// no failure occurred preparing operands, no need to cleanup
fcx.pop_custom_cleanup_scope(temp_scope);
let mut constraints = constraints.connect(",");

File diff suppressed because it is too large Load diff

View file

@ -24,6 +24,7 @@ use std::cast;
use std::libc::{c_uint, c_ulonglong, c_char};
pub fn terminate(cx: &Block, _: &str) {
debug!("terminate({})", cx.to_str());
cx.terminated.set(true);
}
@ -315,12 +316,16 @@ pub fn ArrayMalloc(cx: &Block, Ty: Type, Val: ValueRef) -> ValueRef {
pub fn Alloca(cx: &Block, Ty: Type, name: &str) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(Ty.ptr_to().to_ref()); }
let b = cx.fcx.ccx.builder();
b.position_before(cx.fcx.alloca_insert_pt.get().unwrap());
b.alloca(Ty, name)
AllocaFcx(cx.fcx, Ty, name)
}
}
pub fn AllocaFcx(fcx: &FunctionContext, Ty: Type, name: &str) -> ValueRef {
let b = fcx.ccx.builder();
b.position_before(fcx.alloca_insert_pt.get().unwrap());
b.alloca(Ty, name)
}
pub fn ArrayAlloca(cx: &Block, Ty: Type, Val: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(Ty.ptr_to().to_ref()); }

View file

@ -27,6 +27,8 @@ use middle::trans::base;
use middle::trans::base::*;
use middle::trans::build::*;
use middle::trans::callee;
use middle::trans::cleanup;
use middle::trans::cleanup::CleanupMethods;
use middle::trans::common;
use middle::trans::common::*;
use middle::trans::datum::*;
@ -60,11 +62,10 @@ pub struct FnData {
pub struct MethodData {
llfn: ValueRef,
llself: ValueRef,
temp_cleanup: Option<ValueRef>
}
pub enum CalleeData {
Closure(Datum),
Closure(Datum<Lvalue>),
Fn(FnData),
Method(MethodData)
}
@ -74,7 +75,7 @@ pub struct Callee<'a> {
data: CalleeData
}
pub fn trans<'a>(bcx: &'a Block<'a>, expr: &ast::Expr) -> Callee<'a> {
fn trans<'a>(bcx: &'a Block<'a>, expr: &ast::Expr) -> Callee<'a> {
let _icx = push_ctxt("trans_callee");
debug!("callee::trans(expr={})", expr.repr(bcx.tcx()));
@ -90,13 +91,15 @@ pub fn trans<'a>(bcx: &'a Block<'a>, expr: &ast::Expr) -> Callee<'a> {
return datum_callee(bcx, expr);
fn datum_callee<'a>(bcx: &'a Block<'a>, expr: &ast::Expr) -> Callee<'a> {
let DatumBlock {bcx, datum} = expr::trans_to_datum(bcx, expr);
let DatumBlock {bcx: mut bcx, datum} = expr::trans(bcx, expr);
match ty::get(datum.ty).sty {
ty::ty_bare_fn(..) => {
let llval = datum.to_appropriate_llval(bcx);
let llval = datum.to_llscalarish(bcx);
return Callee {bcx: bcx, data: Fn(FnData {llfn: llval})};
}
ty::ty_closure(..) => {
let datum = unpack_datum!(
bcx, datum.to_lvalue_datum(bcx, "callee", expr.id));
return Callee {bcx: bcx, data: Closure(datum)};
}
_ => {
@ -458,10 +461,10 @@ pub fn trans_call<'a>(
-> &'a Block<'a> {
let _icx = push_ctxt("trans_call");
trans_call_inner(in_cx,
call_ex.info(),
Some(common::expr_info(call_ex)),
expr_ty(in_cx, f),
node_id_type(in_cx, id),
|cx| trans(cx, f),
|cx, _| trans(cx, f),
args,
Some(dest),
DontAutorefArg).bcx
@ -481,10 +484,10 @@ pub fn trans_method_call<'a>(
rcvr.repr(in_cx.tcx()));
trans_call_inner(
in_cx,
call_ex.info(),
Some(common::expr_info(call_ex)),
node_id_type(in_cx, callee_id),
expr_ty(in_cx, call_ex),
|cx| {
|cx, arg_cleanup_scope| {
let origin_opt = {
let mut method_map = cx.ccx().maps.method_map.borrow_mut();
method_map.get().find_copy(&call_ex.id)
@ -495,7 +498,11 @@ pub fn trans_method_call<'a>(
call_ex.repr(in_cx.tcx()),
origin.repr(in_cx.tcx()));
meth::trans_method_callee(cx, callee_id, rcvr, origin)
meth::trans_method_callee(cx,
callee_id,
rcvr,
origin,
arg_cleanup_scope)
}
None => {
cx.tcx().sess.span_bug(call_ex.span, "method call expr wasn't in method map")
@ -523,7 +530,7 @@ pub fn trans_lang_call<'a>(
None,
fty,
rty,
|bcx| {
|bcx, _| {
trans_fn_ref_with_vtables_to_callee(bcx,
did,
0,
@ -551,8 +558,11 @@ pub fn trans_lang_call_with_type_params<'a>(
let rty = ty::ty_fn_ret(fty);
return callee::trans_call_inner(
bcx, None, fty, rty,
|bcx| {
bcx,
None,
fty,
rty,
|bcx, _| {
let callee =
trans_fn_ref_with_vtables_to_callee(bcx, did, 0,
type_params,
@ -577,11 +587,13 @@ pub fn trans_lang_call_with_type_params<'a>(
}
pub fn trans_call_inner<'a>(
in_cx: &'a Block<'a>,
bcx: &'a Block<'a>,
call_info: Option<NodeInfo>,
callee_ty: ty::t,
ret_ty: ty::t,
get_callee: |&'a Block<'a>| -> Callee<'a>,
get_callee: |bcx: &'a Block<'a>,
arg_cleanup_scope: cleanup::ScopeId|
-> Callee<'a>,
args: CallArgs,
dest: Option<expr::Dest>,
autoref_arg: AutorefArg)
@ -593,171 +605,182 @@ pub fn trans_call_inner<'a>(
* this into two functions seems like a good idea).
*
* In particular, for lang items, it is invoked with a dest of
* None, and
* None, and in that case the return value contains the result of
* the fn. The lang item must not return a structural type or else
* all heck breaks loose.
*
* For non-lang items, `dest` is always Some, and hence the result
* is written into memory somewhere. Nonetheless we return the
* actual return value of the function.
*/
// Introduce a temporary cleanup scope that will contain cleanups
// for the arguments while they are being evaluated. The purpose
// this cleanup is to ensure that, should a failure occur while
// evaluating argument N, the values for arguments 0...N-1 are all
// cleaned up. If no failure occurs, the values are handed off to
// the callee, and hence none of the cleanups in this temporary
// scope will ever execute.
let fcx = bcx.fcx;
let ccx = fcx.ccx;
let tcx = ccx.tcx;
let arg_cleanup_scope = fcx.push_custom_cleanup_scope();
base::with_scope_result(in_cx, call_info, "call", |cx| {
let callee = get_callee(cx);
let mut bcx = callee.bcx;
let ccx = cx.ccx();
let callee = get_callee(bcx, cleanup::CustomScope(arg_cleanup_scope));
let mut bcx = callee.bcx;
let (llfn, llenv) = unsafe {
match callee.data {
Fn(d) => {
(d.llfn, llvm::LLVMGetUndef(Type::opaque_box(ccx).ptr_to().to_ref()))
}
Method(d) => {
// Weird but true: we pass self in the *environment* slot!
(d.llfn, d.llself)
}
Closure(d) => {
// Closures are represented as (llfn, llclosure) pair:
// load the requisite values out.
let pair = d.to_ref_llval(bcx);
let llfn = GEPi(bcx, pair, [0u, abi::fn_field_code]);
let llfn = Load(bcx, llfn);
let llenv = GEPi(bcx, pair, [0u, abi::fn_field_box]);
let llenv = Load(bcx, llenv);
(llfn, llenv)
let (llfn, llenv) = unsafe {
match callee.data {
Fn(d) => {
(d.llfn, llvm::LLVMGetUndef(Type::opaque_box(ccx).ptr_to().to_ref()))
}
Method(d) => {
// Weird but true: we pass self in the *environment* slot!
(d.llfn, d.llself)
}
Closure(d) => {
// Closures are represented as (llfn, llclosure) pair:
// load the requisite values out.
let pair = d.to_llref();
let llfn = GEPi(bcx, pair, [0u, abi::fn_field_code]);
let llfn = Load(bcx, llfn);
let llenv = GEPi(bcx, pair, [0u, abi::fn_field_box]);
let llenv = Load(bcx, llenv);
(llfn, llenv)
}
}
};
let abi = match ty::get(callee_ty).sty {
ty::ty_bare_fn(ref f) => f.abis,
_ => AbiSet::Rust()
};
let is_rust_fn =
abi.is_rust() ||
abi.is_intrinsic();
// Generate a location to store the result. If the user does
// not care about the result, just make a stack slot.
let opt_llretslot = match dest {
None => {
assert!(!type_of::return_uses_outptr(ccx, ret_ty));
None
}
Some(expr::SaveIn(dst)) => Some(dst),
Some(expr::Ignore) => {
if !ty::type_is_voidish(tcx, ret_ty) {
Some(alloc_ty(bcx, ret_ty, "__llret"))
} else {
unsafe {
Some(llvm::LLVMGetUndef(Type::nil().ptr_to().to_ref()))
}
}
};
}
};
let abi = match ty::get(callee_ty).sty {
ty::ty_bare_fn(ref f) => f.abis,
_ => AbiSet::Rust()
};
let is_rust_fn =
abi.is_rust() ||
abi.is_intrinsic();
let mut llresult = unsafe {
llvm::LLVMGetUndef(Type::nil().ptr_to().to_ref())
};
// Generate a location to store the result. If the user does
// not care about the result, just make a stack slot.
let opt_llretslot = match dest {
None => {
assert!(!type_of::return_uses_outptr(in_cx.ccx(), ret_ty));
None
}
Some(expr::SaveIn(dst)) => Some(dst),
Some(expr::Ignore) => {
if !ty::type_is_voidish(in_cx.tcx(), ret_ty) {
Some(alloc_ty(bcx, ret_ty, "__llret"))
} else {
unsafe {
Some(llvm::LLVMGetUndef(Type::nil().ptr_to().to_ref()))
}
}
}
};
// The code below invokes the function, using either the Rust
// conventions (if it is a rust fn) or the native conventions
// (otherwise). The important part is that, when all is sad
// and done, either the return value of the function will have been
// written in opt_llretslot (if it is Some) or `llresult` will be
// set appropriately (otherwise).
if is_rust_fn {
let mut llargs = ~[];
let mut llresult = unsafe {
llvm::LLVMGetUndef(Type::nil().ptr_to().to_ref())
};
// Push the out-pointer if we use an out-pointer for this
// return type, otherwise push "undef".
if type_of::return_uses_outptr(ccx, ret_ty) {
llargs.push(opt_llretslot.unwrap());
}
// The code below invokes the function, using either the Rust
// conventions (if it is a rust fn) or the native conventions
// (otherwise). The important part is that, when all is sad
// and done, either the return value of the function will have been
// written in opt_llretslot (if it is Some) or `llresult` will be
// set appropriately (otherwise).
if is_rust_fn {
let mut llargs = ~[];
// Push the environment.
llargs.push(llenv);
// Push the out-pointer if we use an out-pointer for this
// return type, otherwise push "undef".
if type_of::return_uses_outptr(in_cx.ccx(), ret_ty) {
llargs.push(opt_llretslot.unwrap());
}
// Push the arguments.
bcx = trans_args(bcx, args, callee_ty,
autoref_arg, &mut llargs,
cleanup::CustomScope(arg_cleanup_scope));
// Push the environment.
llargs.push(llenv);
fcx.pop_custom_cleanup_scope(arg_cleanup_scope);
// Push the arguments.
bcx = trans_args(bcx, args, callee_ty,
autoref_arg, &mut llargs);
// A function pointer is called without the declaration
// available, so we have to apply any attributes with ABI
// implications directly to the call instruction. Right now,
// the only attribute we need to worry about is `sret`.
let mut attrs = ~[];
if type_of::return_uses_outptr(ccx, ret_ty) {
attrs.push((1, StructRetAttribute));
}
// Now that the arguments have finished evaluating, we
// need to revoke the cleanup for the self argument
match callee.data {
Method(d) => {
for &v in d.temp_cleanup.iter() {
revoke_clean(bcx, v);
}
}
_ => {}
}
// A function pointer is called without the declaration available, so we have to apply
// any attributes with ABI implications directly to the call instruction. Right now, the
// only attribute we need to worry about is `sret`.
let mut attrs = ~[];
if type_of::return_uses_outptr(in_cx.ccx(), ret_ty) {
attrs.push((1, StructRetAttribute));
}
// The `noalias` attribute on the return value is useful to a function ptr caller.
match ty::get(ret_ty).sty {
// `~` pointer return values never alias because ownership is transferred
ty::ty_uniq(..) |
// The `noalias` attribute on the return value is useful to a
// function ptr caller.
match ty::get(ret_ty).sty {
// `~` pointer return values never alias because ownership
// is transferred
ty::ty_uniq(..) |
ty::ty_vec(_, ty::vstore_uniq) => {
attrs.push((0, NoAliasAttribute));
attrs.push((0, NoAliasAttribute));
}
_ => ()
}
// Invoke the actual rust fn and update bcx/llresult.
let (llret, b) = base::invoke(bcx, llfn, llargs, attrs, call_info);
bcx = b;
llresult = llret;
// If the Rust convention for this type is return via
// the return value, copy it into llretslot.
match opt_llretslot {
Some(llretslot) => {
if !type_of::return_uses_outptr(bcx.ccx(), ret_ty) &&
!ty::type_is_voidish(bcx.tcx(), ret_ty)
{
Store(bcx, llret, llretslot);
}
_ => ()
}
// Invoke the actual rust fn and update bcx/llresult.
let (llret, b) = base::invoke(bcx, llfn, llargs, attrs, call_info);
bcx = b;
llresult = llret;
// If the Rust convention for this type is return via
// the return value, copy it into llretslot.
match opt_llretslot {
Some(llretslot) => {
if !type_of::return_uses_outptr(bcx.ccx(), ret_ty) &&
!ty::type_is_voidish(bcx.tcx(), ret_ty)
{
Store(bcx, llret, llretslot);
}
}
None => {}
}
} else {
// Lang items are the only case where dest is None, and
// they are always Rust fns.
assert!(dest.is_some());
let mut llargs = ~[];
bcx = trans_args(bcx, args, callee_ty,
autoref_arg, &mut llargs);
let arg_tys = match args {
ArgExprs(a) => a.iter().map(|x| expr_ty(bcx, *x)).collect(),
ArgVals(_) => fail!("expected arg exprs.")
};
bcx = foreign::trans_native_call(bcx, callee_ty,
llfn, opt_llretslot.unwrap(), llargs, arg_tys);
None => {}
}
} else {
// Lang items are the only case where dest is None, and
// they are always Rust fns.
assert!(dest.is_some());
// If the caller doesn't care about the result of this fn call,
// drop the temporary slot we made.
match dest {
None => {
assert!(!type_of::return_uses_outptr(bcx.ccx(), ret_ty));
}
Some(expr::Ignore) => {
// drop the value if it is not being saved.
bcx = glue::drop_ty(bcx, opt_llretslot.unwrap(), ret_ty);
}
Some(expr::SaveIn(_)) => { }
let mut llargs = ~[];
bcx = trans_args(bcx, args, callee_ty,
autoref_arg, &mut llargs,
cleanup::CustomScope(arg_cleanup_scope));
fcx.pop_custom_cleanup_scope(arg_cleanup_scope);
let arg_tys = match args {
ArgExprs(a) => a.iter().map(|x| expr_ty(bcx, *x)).collect(),
ArgVals(_) => fail!("expected arg exprs.")
};
bcx = foreign::trans_native_call(bcx, callee_ty,
llfn, opt_llretslot.unwrap(), llargs, arg_tys);
}
// If the caller doesn't care about the result of this fn call,
// drop the temporary slot we made.
match dest {
None => {
assert!(!type_of::return_uses_outptr(bcx.ccx(), ret_ty));
}
if ty::type_is_bot(ret_ty) {
Unreachable(bcx);
Some(expr::Ignore) => {
// drop the value if it is not being saved.
bcx = glue::drop_ty(bcx, opt_llretslot.unwrap(), ret_ty);
}
Some(expr::SaveIn(_)) => { }
}
rslt(bcx, llresult)
})
if ty::type_is_bot(ret_ty) {
Unreachable(bcx);
}
rslt(bcx, llresult)
}
pub enum CallArgs<'a> {
@ -770,10 +793,11 @@ pub fn trans_args<'a>(
args: CallArgs,
fn_ty: ty::t,
autoref_arg: AutorefArg,
llargs: &mut ~[ValueRef])
-> &'a Block<'a> {
llargs: &mut ~[ValueRef],
arg_cleanup_scope: cleanup::ScopeId)
-> &'a Block<'a>
{
let _icx = push_ctxt("trans_args");
let mut temp_cleanups = ~[];
let arg_tys = ty::ty_fn_args(fn_ty);
let variadic = ty::fn_is_variadic(fn_ty);
@ -796,7 +820,7 @@ pub fn trans_args<'a>(
trans_arg_expr(bcx,
arg_ty,
*arg_expr,
&mut temp_cleanups,
arg_cleanup_scope,
autoref_arg)
});
llargs.push(arg_val);
@ -807,13 +831,6 @@ pub fn trans_args<'a>(
}
}
// now that all arguments have been successfully built, we can revoke any
// temporary cleanups, as they are only needed if argument construction
// should fail (for example, cleanup of copy mode args).
for c in temp_cleanups.iter() {
revoke_clean(bcx, *c)
}
bcx
}
@ -822,16 +839,15 @@ pub enum AutorefArg {
DoAutorefArg
}
// temp_cleanups: cleanups that should run only if failure occurs before the
// call takes place:
pub fn trans_arg_expr<'a>(
bcx: &'a Block<'a>,
formal_arg_ty: ty::t,
arg_expr: &ast::Expr,
temp_cleanups: &mut ~[ValueRef],
arg_cleanup_scope: cleanup::ScopeId,
autoref_arg: AutorefArg)
-> Result<'a> {
let _icx = push_ctxt("trans_arg_expr");
let mut bcx = bcx;
let ccx = bcx.ccx();
debug!("trans_arg_expr(formal_arg_ty=({}), arg_expr={})",
@ -839,14 +855,13 @@ pub fn trans_arg_expr<'a>(
arg_expr.repr(bcx.tcx()));
// translate the arg expr to a datum
let arg_datumblock = expr::trans_to_datum(bcx, arg_expr);
let arg_datum = arg_datumblock.datum;
let bcx = arg_datumblock.bcx;
let arg_datum = unpack_datum!(bcx, expr::trans(bcx, arg_expr));
let arg_datum_ty = arg_datum.ty;
debug!(" arg datum: {}", arg_datum.to_str(bcx.ccx()));
let mut val;
if ty::type_is_bot(arg_datum.ty) {
if ty::type_is_bot(arg_datum_ty) {
// For values of type _|_, we generate an
// "undef" value, as such a value should never
// be inspected. It's important for the value
@ -859,34 +874,31 @@ pub fn trans_arg_expr<'a>(
// FIXME(#3548) use the adjustments table
match autoref_arg {
DoAutorefArg => {
val = arg_datum.to_ref_llval(bcx);
// We will pass argument by reference
// We want an lvalue, so that we can pass by reference and
let arg_datum = unpack_datum!(
bcx, arg_datum.to_lvalue_datum(bcx, "arg", arg_expr.id));
val = arg_datum.val;
}
DontAutorefArg => {
let need_scratch = ty::type_needs_drop(bcx.tcx(), arg_datum.ty) ||
(bcx.expr_is_lval(arg_expr) &&
arg_datum.appropriate_mode(bcx.ccx()).is_by_ref());
// Make this an rvalue, since we are going to be
// passing ownership.
let arg_datum = unpack_datum!(
bcx, arg_datum.to_rvalue_datum(bcx, "arg"));
let arg_datum = if need_scratch {
let scratch = scratch_datum(bcx, arg_datum.ty, "__self", false);
arg_datum.store_to_datum(bcx, INIT, scratch);
// Now that arg_datum is owned, get it into the appropriate
// mode (ref vs value).
let arg_datum = unpack_datum!(
bcx, arg_datum.to_appropriate_datum(bcx));
// Technically, ownership of val passes to the callee.
// However, we must cleanup should we fail before the
// callee is actually invoked.
scratch.add_clean(bcx);
temp_cleanups.push(scratch.val);
scratch
} else {
arg_datum
};
debug!("by copy arg with type {}", bcx.ty_to_str(arg_datum.ty));
val = arg_datum.to_appropriate_llval(bcx);
// Technically, ownership of val passes to the callee.
// However, we must cleanup should we fail before the
// callee is actually invoked.
val = arg_datum.add_clean(bcx.fcx, arg_cleanup_scope);
}
}
if formal_arg_ty != arg_datum.ty {
if formal_arg_ty != arg_datum_ty {
// this could happen due to e.g. subtyping
let llformal_arg_ty = type_of::type_of_explicit_arg(ccx, formal_arg_ty);
debug!("casting actual type ({}) to match formal ({})",

View file

@ -0,0 +1,948 @@
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
* Code pertaining to cleanup of temporaries as well as execution of
* drop glue. See discussion in `doc.rs` for a high-level summary.
*/
use lib::llvm::{BasicBlockRef, ValueRef};
use middle::lang_items::{EhPersonalityLangItem};
use middle::trans::base;
use middle::trans::build;
use middle::trans::callee;
use middle::trans::common;
use middle::trans::common::{Block, FunctionContext};
use middle::trans::glue;
use middle::trans::type_::Type;
use middle::ty;
use syntax::ast;
use syntax::ast_map;
use syntax::parse::token;
use syntax::opt_vec;
use syntax::opt_vec::OptVec;
use util::ppaux::Repr;
pub struct CleanupScope<'a> {
// The id of this cleanup scope. If the id is None,
// this is a *temporary scope* that is pushed during trans to
// cleanup miscellaneous garbage that trans may generate whose
// lifetime is a subset of some expression. See module doc for
// more details.
kind: CleanupScopeKind<'a>,
// Cleanups to run upon scope exit.
cleanups: OptVec<~Cleanup>,
cached_early_exits: OptVec<CachedEarlyExit>,
cached_landing_pad: Option<BasicBlockRef>,
}
pub struct CustomScopeIndex {
priv index: uint
}
pub static EXIT_BREAK: uint = 0;
pub static EXIT_LOOP: uint = 1;
pub static EXIT_MAX: uint = 2;
enum CleanupScopeKind<'a> {
CustomScopeKind,
AstScopeKind(ast::NodeId),
LoopScopeKind(ast::NodeId, [&'a Block<'a>, ..EXIT_MAX])
}
#[deriving(Eq)]
enum EarlyExitLabel {
UnwindExit,
ReturnExit,
LoopExit(ast::NodeId, uint)
}
struct CachedEarlyExit {
label: EarlyExitLabel,
cleanup_block: BasicBlockRef,
}
pub trait Cleanup {
fn clean_on_unwind(&self) -> bool;
fn trans<'a>(&self, bcx: &'a Block<'a>) -> &'a Block<'a>;
}
pub enum ScopeId {
AstScope(ast::NodeId),
CustomScope(CustomScopeIndex)
}
impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
fn push_ast_cleanup_scope(&self, id: ast::NodeId) {
/*!
* Invoked when we start to trans the code contained
* within a new cleanup scope.
*/
debug!("push_ast_cleanup_scope({})",
ast_map::node_id_to_str(self.ccx.tcx.items, id,
token::get_ident_interner()));
// FIXME(#2202) -- currently closure bodies have a parent
// region, which messes up the assertion below, since there
// are no cleanup scopes on the stack at the start of
// trans'ing a closure body. I think though that this should
// eventually be fixed by closure bodies not having a parent
// region, though that's a touch unclear, and it might also be
// better just to narrow this assertion more (i.e., by
// excluding id's that correspond to closure bodies only). For
// now we just say that if there is already an AST scope on the stack,
// this new AST scope had better be its immediate child.
let top_scope = self.top_ast_scope();
if top_scope.is_some() {
assert_eq!(self.ccx.tcx.region_maps.opt_encl_scope(id), top_scope);
}
self.push_scope(CleanupScope::new(AstScopeKind(id)));
}
fn push_loop_cleanup_scope(&self,
id: ast::NodeId,
exits: [&'a Block<'a>, ..EXIT_MAX]) {
debug!("push_loop_cleanup_scope({})",
ast_map::node_id_to_str(self.ccx.tcx.items, id,
token::get_ident_interner()));
assert_eq!(Some(id), self.top_ast_scope());
self.push_scope(CleanupScope::new(LoopScopeKind(id, exits)));
}
fn push_custom_cleanup_scope(&self) -> CustomScopeIndex {
let index = self.scopes_len();
debug!("push_custom_cleanup_scope(): {}", index);
self.push_scope(CleanupScope::new(CustomScopeKind));
CustomScopeIndex { index: index }
}
fn pop_and_trans_ast_cleanup_scope(&self,
bcx: &'a Block<'a>,
cleanup_scope: ast::NodeId)
-> &'a Block<'a> {
/*!
* Removes the cleanup scope for id `cleanup_scope`, which
* must be at the top of the cleanup stack, and generates the
* code to do its cleanups for normal exit.
*/
debug!("pop_and_trans_ast_cleanup_scope({})",
ast_map::node_id_to_str(self.ccx.tcx.items, cleanup_scope,
token::get_ident_interner()));
assert!(self.top_scope(|s| s.kind.is_ast_with_id(cleanup_scope)));
let scope = self.pop_scope();
self.trans_scope_cleanups(bcx, &scope)
}
fn pop_loop_cleanup_scope(&self,
cleanup_scope: ast::NodeId) {
/*!
* Removes the loop cleanup scope for id `cleanup_scope`, which
* must be at the top of the cleanup stack. Does not generate
* any cleanup code, since loop scopes should exit by
* branching to a block generated by `normal_exit_block`.
*/
debug!("pop_loop_cleanup_scope({})",
ast_map::node_id_to_str(self.ccx.tcx.items, cleanup_scope,
token::get_ident_interner()));
assert!(self.top_scope(|s| s.kind.is_loop_with_id(cleanup_scope)));
let _ = self.pop_scope();
}
fn pop_custom_cleanup_scope(&self,
custom_scope: CustomScopeIndex) {
/*!
* Removes the top cleanup scope from the stack without
* executing its cleanups. The top cleanup scope must
* be the temporary scope `custom_scope`.
*/
debug!("pop_custom_cleanup_scope({})", custom_scope.index);
assert!(self.is_valid_to_pop_custom_scope(custom_scope));
let _ = self.pop_scope();
}
fn pop_and_trans_custom_cleanup_scope(&self,
bcx: &'a Block<'a>,
custom_scope: CustomScopeIndex)
-> &'a Block<'a> {
/*!
* Removes the top cleanup scope from the stack, which must be
* a temporary scope, and generates the code to do its
* cleanups for normal exit.
*/
debug!("pop_and_trans_custom_cleanup_scope({:?})", custom_scope);
assert!(self.is_valid_to_pop_custom_scope(custom_scope));
let scope = self.pop_scope();
self.trans_scope_cleanups(bcx, &scope)
}
fn top_loop_scope(&self) -> ast::NodeId {
/*!
* Returns the id of the top-most loop scope
*/
let scopes = self.scopes.borrow();
for scope in scopes.get().iter().invert() {
match scope.kind {
LoopScopeKind(id, _) => {
return id;
}
_ => {}
}
}
self.ccx.tcx.sess.bug("No loop scope found");
}
fn normal_exit_block(&self,
cleanup_scope: ast::NodeId,
exit: uint) -> BasicBlockRef {
/*!
* Returns a block to branch to which will perform all pending
* cleanups and then break/continue (depending on `exit`) out
* of the loop with id `cleanup_scope`
*/
self.trans_cleanups_to_exit_scope(LoopExit(cleanup_scope, exit))
}
fn return_exit_block(&self) -> BasicBlockRef {
/*!
* Returns a block to branch to which will perform all pending
* cleanups and then return from this function
*/
self.trans_cleanups_to_exit_scope(ReturnExit)
}
fn schedule_drop_mem(&self,
cleanup_scope: ScopeId,
val: ValueRef,
ty: ty::t) {
/*!
* Schedules a (deep) drop of `val`, which is a pointer to an
* instance of `ty`
*/
if !ty::type_needs_drop(self.ccx.tcx, ty) { return; }
let drop = ~DropValue {
is_immediate: false,
on_unwind: ty::type_needs_unwind_cleanup(self.ccx.tcx, ty),
val: val,
ty: ty
};
debug!("schedule_drop_mem({:?}, val={}, ty={})",
cleanup_scope,
self.ccx.tn.val_to_str(val),
ty.repr(self.ccx.tcx));
self.schedule_clean(cleanup_scope, drop as ~Cleanup);
}
fn schedule_drop_immediate(&self,
cleanup_scope: ScopeId,
val: ValueRef,
ty: ty::t) {
/*!
* Schedules a (deep) drop of `val`, which is an instance of `ty`
*/
if !ty::type_needs_drop(self.ccx.tcx, ty) { return; }
let drop = ~DropValue {
is_immediate: true,
on_unwind: ty::type_needs_unwind_cleanup(self.ccx.tcx, ty),
val: val,
ty: ty
};
debug!("schedule_drop_immediate({:?}, val={}, ty={})",
cleanup_scope,
self.ccx.tn.val_to_str(val),
ty.repr(self.ccx.tcx));
self.schedule_clean(cleanup_scope, drop as ~Cleanup);
}
fn schedule_free_value(&self,
cleanup_scope: ScopeId,
val: ValueRef,
heap: common::heap) {
/*!
* Schedules a call to `free(val)`. Note that this is a shallow
* operation.
*/
let drop = ~FreeValue { ptr: val, heap: heap };
debug!("schedule_free_value({:?}, val={}, heap={:?})",
cleanup_scope,
self.ccx.tn.val_to_str(val),
heap);
self.schedule_clean(cleanup_scope, drop as ~Cleanup);
}
fn schedule_clean(&self,
cleanup_scope: ScopeId,
cleanup: ~Cleanup) {
match cleanup_scope {
AstScope(id) => self.schedule_clean_in_ast_scope(id, cleanup),
CustomScope(id) => self.schedule_clean_in_custom_scope(id, cleanup),
}
}
fn schedule_clean_in_ast_scope(&self,
cleanup_scope: ast::NodeId,
cleanup: ~Cleanup) {
/*!
* Schedules a cleanup to occur upon exit from `cleanup_scope`.
* If `cleanup_scope` is not provided, then the cleanup is scheduled
* in the topmost scope, which must be a temporary scope.
*/
debug!("schedule_clean_in_ast_scope(cleanup_scope={:?})",
cleanup_scope);
let mut scopes = self.scopes.borrow_mut();
for scope in scopes.get().mut_iter().invert() {
if scope.kind.is_ast_with_id(cleanup_scope) {
scope.cleanups.push(cleanup);
scope.clear_cached_exits();
return;
} else {
// will be adding a cleanup to some enclosing scope
scope.clear_cached_exits();
}
}
self.ccx.tcx.sess.bug(
format!("No cleanup scope {} found",
ast_map::node_id_to_str(self.ccx.tcx.items, cleanup_scope,
token::get_ident_interner())));
}
fn schedule_clean_in_custom_scope(&self,
custom_scope: CustomScopeIndex,
cleanup: ~Cleanup) {
/*!
* Schedules a cleanup to occur in the top-most scope,
* which must be a temporary scope.
*/
debug!("schedule_clean_in_custom_scope(custom_scope={})",
custom_scope.index);
assert!(self.is_valid_custom_scope(custom_scope));
let mut scopes = self.scopes.borrow_mut();
let scope = &mut scopes.get()[custom_scope.index];
scope.cleanups.push(cleanup);
scope.clear_cached_exits();
}
fn needs_invoke(&self) -> bool {
/*!
* Returns true if there are pending cleanups that should
* execute on failure.
*/
let scopes = self.scopes.borrow();
scopes.get().iter().invert().any(|s| s.needs_invoke())
}
fn get_landing_pad(&self) -> BasicBlockRef {
/*!
* Returns a basic block to branch to in the event of a failure.
* This block will run the failure cleanups and eventually
* invoke the LLVM `Resume` instruction.
*/
let _icx = base::push_ctxt("get_landing_pad");
debug!("get_landing_pad");
let orig_scopes_len = self.scopes_len();
assert!(orig_scopes_len > 0);
// Remove any scopes that do not have cleanups on failure:
let mut popped_scopes = opt_vec::Empty;
while !self.top_scope(|s| s.needs_invoke()) {
debug!("top scope does not need invoke");
popped_scopes.push(self.pop_scope());
}
// Check for an existing landing pad in the new topmost scope:
let llbb = self.get_or_create_landing_pad();
// Push the scopes we removed back on:
while !popped_scopes.is_empty() {
self.push_scope(popped_scopes.pop());
}
assert_eq!(self.scopes_len(), orig_scopes_len);
return llbb;
}
}
impl<'a> CleanupHelperMethods<'a> for FunctionContext<'a> {
fn top_ast_scope(&self) -> Option<ast::NodeId> {
/*!
* Returns the id of the current top-most AST scope, if any.
*/
let scopes = self.scopes.borrow();
for scope in scopes.get().iter().invert() {
match scope.kind {
CustomScopeKind | LoopScopeKind(..) => {}
AstScopeKind(i) => {
return Some(i);
}
}
}
None
}
fn top_nonempty_cleanup_scope(&self) -> Option<uint> {
let scopes = self.scopes.borrow();
scopes.get().iter().invert().position(|s| !s.cleanups.is_empty())
}
fn is_valid_to_pop_custom_scope(&self, custom_scope: CustomScopeIndex) -> bool {
let scopes = self.scopes.borrow();
self.is_valid_custom_scope(custom_scope) &&
custom_scope.index == scopes.get().len() - 1
}
fn is_valid_custom_scope(&self, custom_scope: CustomScopeIndex) -> bool {
let scopes = self.scopes.borrow();
custom_scope.index < scopes.get().len() &&
scopes.get()[custom_scope.index].kind.is_temp()
}
fn trans_scope_cleanups(&self, // cannot borrow self, will recurse
bcx: &'a Block<'a>,
scope: &CleanupScope) -> &'a Block<'a> {
/*! Generates the cleanups for `scope` into `bcx` */
let mut bcx = bcx;
if !bcx.unreachable.get() {
for cleanup in scope.cleanups.iter().invert() {
bcx = cleanup.trans(bcx);
}
}
bcx
}
fn scopes_len(&self) -> uint {
let scopes = self.scopes.borrow();
scopes.get().len()
}
fn push_scope(&self, scope: CleanupScope<'a>) {
let mut scopes = self.scopes.borrow_mut();
scopes.get().push(scope);
}
fn pop_scope(&self) -> CleanupScope<'a> {
debug!("popping cleanup scope {}, {} scopes remaining",
self.top_scope(|s| s.block_name("")),
self.scopes_len() - 1);
let mut scopes = self.scopes.borrow_mut();
scopes.get().pop()
}
fn top_scope<R>(&self, f: |&CleanupScope<'a>| -> R) -> R {
let scopes = self.scopes.borrow();
f(scopes.get().last())
}
fn trans_cleanups_to_exit_scope(&self,
label: EarlyExitLabel)
-> BasicBlockRef {
/*!
* Used when the caller wishes to jump to an early exit, such
* as a return, break, continue, or unwind. This function will
* generate all cleanups between the top of the stack and the
* exit `label` and return a basic block that the caller can
* branch to.
*
* For example, if the current stack of cleanups were as follows:
*
* AST 22
* Custom 1
* AST 23
* Loop 23
* Custom 2
* AST 24
*
* and the `label` specifies a break from `Loop 23`, then this
* function would generate a series of basic blocks as follows:
*
* Cleanup(AST 24) -> Cleanup(Custom 2) -> break_blk
*
* where `break_blk` is the block specified in `Loop 23` as
* the target for breaks. The return value would be the first
* basic block in that sequence (`Cleanup(AST 24)`). The
* caller could then branch to `Cleanup(AST 24)` and it will
* perform all cleanups and finally branch to the `break_blk`.
*/
debug!("trans_cleanups_to_exit_scope label={:?} scopes={}",
label, self.scopes_len());
let orig_scopes_len = self.scopes_len();
let mut prev_llbb;
let mut popped_scopes = opt_vec::Empty;
// First we pop off all the cleanup stacks that are
// traversed until the exit is reached, pushing them
// onto the side vector `popped_scopes`. No code is
// generated at this time.
//
// So, continuing the example from above, we would wind up
// with a `popped_scopes` vector of `[AST 24, Custom 2]`.
// (Presuming that there are no cached exits)
loop {
if self.scopes_len() == 0 {
match label {
UnwindExit => {
// Generate a block that will `Resume`.
let prev_bcx = self.new_block(true, "resume", None);
let personality = self.personality.get().expect(
"create_landing_pad() should have set this");
build::Resume(prev_bcx,
build::Load(prev_bcx, personality));
prev_llbb = prev_bcx.llbb;
break;
}
ReturnExit => {
prev_llbb = self.get_llreturn();
break;
}
LoopExit(id, _) => {
self.ccx.tcx.sess.bug(format!(
"Cannot exit from scope {:?}, \
not in scope", id));
}
}
}
// Check if we have already cached the unwinding of this
// scope for this label. If so, we can stop popping scopes
// and branch to the cached label, since it contains the
// cleanups for any subsequent scopes.
match self.top_scope(|s| s.cached_early_exit(label)) {
Some(cleanup_block) => {
prev_llbb = cleanup_block;
break;
}
None => { }
}
// Pop off the scope, since we will be generating
// unwinding code for it. If we are searching for a loop exit,
// and this scope is that loop, then stop popping and set
// `prev_llbb` to the appropriate exit block from the loop.
popped_scopes.push(self.pop_scope());
let scope = popped_scopes.last();
match label {
UnwindExit | ReturnExit => { }
LoopExit(id, exit) => {
match scope.kind.early_exit_block(id, exit) {
Some(exitllbb) => {
prev_llbb = exitllbb;
break;
}
None => { }
}
}
}
}
debug!("trans_cleanups_to_exit_scope: popped {} scopes",
popped_scopes.len());
// Now push the popped scopes back on. As we go,
// we track in `prev_llbb` the exit to which this scope
// should branch when it's done.
//
// So, continuing with our example, we will start out with
// `prev_llbb` being set to `break_blk` (or possibly a cached
// early exit). We will then pop the scopes from `popped_scopes`
// and generate a basic block for each one, prepending it in the
// series and updating `prev_llbb`. So we begin by popping `Custom 2`
// and generating `Cleanup(Custom 2)`. We make `Cleanup(Custom 2)`
// branch to `prev_llbb == break_blk`, giving us a sequence like:
//
// Cleanup(Custom 2) -> prev_llbb
//
// We then pop `AST 24` and repeat the process, giving us the sequence:
//
// Cleanup(AST 24) -> Cleanup(Custom 2) -> prev_llbb
//
// At this point, `popped_scopes` is empty, and so the final block
// that we return to the user is `Cleanup(AST 24)`.
while !popped_scopes.is_empty() {
let mut scope = popped_scopes.pop();
if scope.cleanups.iter().any(|c| cleanup_is_suitable_for(*c, label))
{
let name = scope.block_name("clean");
debug!("generating cleanups for {}", name);
let bcx_in = self.new_block(label.is_unwind(), name, None);
let mut bcx_out = bcx_in;
for cleanup in scope.cleanups.iter().invert() {
if cleanup_is_suitable_for(*cleanup, label) {
bcx_out = cleanup.trans(bcx_out);
}
}
build::Br(bcx_out, prev_llbb);
prev_llbb = bcx_in.llbb;
} else {
debug!("no suitable cleanups in {}",
scope.block_name("clean"));
}
scope.add_cached_early_exit(label, prev_llbb);
self.push_scope(scope);
}
debug!("trans_cleanups_to_exit_scope: prev_llbb={}", prev_llbb);
assert_eq!(self.scopes_len(), orig_scopes_len);
prev_llbb
}
fn get_or_create_landing_pad(&self) -> BasicBlockRef {
/*!
* Creates a landing pad for the top scope, if one does not
* exist. The landing pad will perform all cleanups necessary
* for an unwind and then `resume` to continue error
* propagation:
*
* landing_pad -> ... cleanups ... -> [resume]
*
* (The cleanups and resume instruction are created by
* `trans_cleanups_to_exit_scope()`, not in this function
* itself.)
*/
let pad_bcx;
debug!("get_or_create_landing_pad");
// Check if a landing pad block exists; if not, create one.
{
let mut scopes = self.scopes.borrow_mut();
let last_scope = scopes.get().mut_last();
match last_scope.cached_landing_pad {
Some(llbb) => { return llbb; }
None => {
let name = last_scope.block_name("unwind");
pad_bcx = self.new_block(true, name, None);
last_scope.cached_landing_pad = Some(pad_bcx.llbb);
}
}
}
// The landing pad return type (the type being propagated). Not sure what
// this represents but it's determined by the personality function and
// this is what the EH proposal example uses.
let llretty = Type::struct_([Type::i8p(), Type::i32()], false);
// The exception handling personality function.
let def_id = common::langcall(pad_bcx, None, "", EhPersonalityLangItem);
let llpersonality = callee::trans_fn_ref(pad_bcx, def_id, 0).llfn;
// The only landing pad clause will be 'cleanup'
let llretval = build::LandingPad(pad_bcx, llretty, llpersonality, 1u);
// The landing pad block is a cleanup
build::SetCleanup(pad_bcx, llretval);
// We store the retval in a function-central alloca, so that calls to
// Resume can find it.
match self.personality.get() {
Some(addr) => {
build::Store(pad_bcx, llretval, addr);
}
None => {
let addr = base::alloca(pad_bcx, common::val_ty(llretval), "");
self.personality.set(Some(addr));
build::Store(pad_bcx, llretval, addr);
}
}
// Generate the cleanup block and branch to it.
let cleanup_llbb = self.trans_cleanups_to_exit_scope(UnwindExit);
build::Br(pad_bcx, cleanup_llbb);
return pad_bcx.llbb;
}
}
impl<'a> CleanupScope<'a> {
fn new(kind: CleanupScopeKind<'a>) -> CleanupScope<'a> {
CleanupScope {
kind: kind,
cleanups: opt_vec::Empty,
cached_early_exits: opt_vec::Empty,
cached_landing_pad: None,
}
}
fn clear_cached_exits(&mut self) {
self.cached_early_exits = opt_vec::Empty;
self.cached_landing_pad = None;
}
fn cached_early_exit(&self,
label: EarlyExitLabel)
-> Option<BasicBlockRef> {
self.cached_early_exits.iter().
find(|e| e.label == label).
map(|e| e.cleanup_block)
}
fn add_cached_early_exit(&mut self,
label: EarlyExitLabel,
blk: BasicBlockRef) {
self.cached_early_exits.push(
CachedEarlyExit { label: label,
cleanup_block: blk });
}
fn needs_invoke(&self) -> bool {
/*! True if this scope has cleanups for use during unwinding */
self.cached_landing_pad.is_some() ||
self.cleanups.iter().any(|c| c.clean_on_unwind())
}
fn block_name(&self, prefix: &str) -> ~str {
/*!
* Returns a suitable name to use for the basic block that
* handles this cleanup scope
*/
match self.kind {
CustomScopeKind => format!("{}_custom_", prefix),
AstScopeKind(id) => format!("{}_ast_{}_", prefix, id),
LoopScopeKind(id, _) => format!("{}_loop_{}_", prefix, id),
}
}
}
impl<'a> CleanupScopeKind<'a> {
fn is_temp(&self) -> bool {
match *self {
CustomScopeKind => true,
LoopScopeKind(..) | AstScopeKind(..) => false,
}
}
fn is_ast_with_id(&self, id: ast::NodeId) -> bool {
match *self {
CustomScopeKind | LoopScopeKind(..) => false,
AstScopeKind(i) => i == id
}
}
fn is_loop_with_id(&self, id: ast::NodeId) -> bool {
match *self {
CustomScopeKind | AstScopeKind(..) => false,
LoopScopeKind(i, _) => i == id
}
}
fn early_exit_block(&self,
id: ast::NodeId,
exit: uint) -> Option<BasicBlockRef> {
/*!
* If this is a loop scope with id `id`, return the early
* exit block `exit`, else `None`
*/
match *self {
LoopScopeKind(i, ref exits) if id == i => Some(exits[exit].llbb),
_ => None,
}
}
}
impl EarlyExitLabel {
fn is_unwind(&self) -> bool {
match *self {
UnwindExit => true,
_ => false
}
}
}
///////////////////////////////////////////////////////////////////////////
// Cleanup types
pub struct DropValue {
is_immediate: bool,
on_unwind: bool,
val: ValueRef,
ty: ty::t,
}
impl Cleanup for DropValue {
fn clean_on_unwind(&self) -> bool {
self.on_unwind
}
fn trans<'a>(&self, bcx: &'a Block<'a>) -> &'a Block<'a> {
if self.is_immediate {
glue::drop_ty_immediate(bcx, self.val, self.ty)
} else {
glue::drop_ty(bcx, self.val, self.ty)
}
}
}
pub struct FreeValue {
ptr: ValueRef,
heap: common::heap,
}
impl Cleanup for FreeValue {
fn clean_on_unwind(&self) -> bool {
true
}
fn trans<'a>(&self, bcx: &'a Block<'a>) -> &'a Block<'a> {
match self.heap {
common::heap_managed => {
glue::trans_free(bcx, self.ptr)
}
common::heap_exchange | common::heap_exchange_closure => {
glue::trans_exchange_free(bcx, self.ptr)
}
}
}
}
pub fn temporary_scope(tcx: ty::ctxt,
id: ast::NodeId)
-> ScopeId {
match tcx.region_maps.temporary_scope(id) {
Some(scope) => {
let r = AstScope(scope);
debug!("temporary_scope({}) = {:?}", id, r);
r
}
None => {
tcx.sess.bug(format!("no temporary scope available for expr {}", id))
}
}
}
pub fn var_scope(tcx: ty::ctxt,
id: ast::NodeId)
-> ScopeId {
let r = AstScope(tcx.region_maps.var_scope(id));
debug!("var_scope({}) = {:?}", id, r);
r
}
fn cleanup_is_suitable_for(c: &Cleanup,
label: EarlyExitLabel) -> bool {
!label.is_unwind() || c.clean_on_unwind()
}
///////////////////////////////////////////////////////////////////////////
// These traits just exist to put the methods into this file.
pub trait CleanupMethods<'a> {
fn push_ast_cleanup_scope(&self, id: ast::NodeId);
fn push_loop_cleanup_scope(&self,
id: ast::NodeId,
exits: [&'a Block<'a>, ..EXIT_MAX]);
fn push_custom_cleanup_scope(&self) -> CustomScopeIndex;
fn pop_and_trans_ast_cleanup_scope(&self,
bcx: &'a Block<'a>,
cleanup_scope: ast::NodeId)
-> &'a Block<'a>;
fn pop_loop_cleanup_scope(&self,
cleanup_scope: ast::NodeId);
fn pop_custom_cleanup_scope(&self,
custom_scope: CustomScopeIndex);
fn pop_and_trans_custom_cleanup_scope(&self,
bcx: &'a Block<'a>,
custom_scope: CustomScopeIndex)
-> &'a Block<'a>;
fn top_loop_scope(&self) -> ast::NodeId;
fn normal_exit_block(&self,
cleanup_scope: ast::NodeId,
exit: uint) -> BasicBlockRef;
fn return_exit_block(&self) -> BasicBlockRef;
fn schedule_drop_mem(&self,
cleanup_scope: ScopeId,
val: ValueRef,
ty: ty::t);
fn schedule_drop_immediate(&self,
cleanup_scope: ScopeId,
val: ValueRef,
ty: ty::t);
fn schedule_free_value(&self,
cleanup_scope: ScopeId,
val: ValueRef,
heap: common::heap);
fn schedule_clean(&self,
cleanup_scope: ScopeId,
cleanup: ~Cleanup);
fn schedule_clean_in_ast_scope(&self,
cleanup_scope: ast::NodeId,
cleanup: ~Cleanup);
fn schedule_clean_in_custom_scope(&self,
custom_scope: CustomScopeIndex,
cleanup: ~Cleanup);
fn needs_invoke(&self) -> bool;
fn get_landing_pad(&self) -> BasicBlockRef;
}
trait CleanupHelperMethods<'a> {
fn top_ast_scope(&self) -> Option<ast::NodeId>;
fn top_nonempty_cleanup_scope(&self) -> Option<uint>;
fn is_valid_to_pop_custom_scope(&self, custom_scope: CustomScopeIndex) -> bool;
fn is_valid_custom_scope(&self, custom_scope: CustomScopeIndex) -> bool;
fn trans_scope_cleanups(&self,
bcx: &'a Block<'a>,
scope: &CleanupScope<'a>) -> &'a Block<'a>;
fn trans_cleanups_to_exit_scope(&self,
label: EarlyExitLabel)
-> BasicBlockRef;
fn get_or_create_landing_pad(&self) -> BasicBlockRef;
fn scopes_len(&self) -> uint;
fn push_scope(&self, scope: CleanupScope<'a>);
fn pop_scope(&self) -> CleanupScope<'a>;
fn top_scope<R>(&self, f: |&CleanupScope<'a>| -> R) -> R;
}

View file

@ -16,7 +16,7 @@ use middle::moves;
use middle::trans::base::*;
use middle::trans::build::*;
use middle::trans::common::*;
use middle::trans::datum::{Datum, INIT};
use middle::trans::datum::{Datum, Lvalue};
use middle::trans::debuginfo;
use middle::trans::expr;
use middle::trans::glue;
@ -112,7 +112,7 @@ pub enum EnvAction {
pub struct EnvValue {
action: EnvAction,
datum: Datum
datum: Datum<Lvalue>
}
impl EnvAction {
@ -219,7 +219,7 @@ pub fn store_environment<'a>(
// Copy expr values into boxed bindings.
let mut bcx = bcx;
for (i, bv) in bound_values.iter().enumerate() {
for (i, bv) in bound_values.move_iter().enumerate() {
debug!("Copy {} into closure", bv.to_str(ccx));
if ccx.sess.asm_comments() {
@ -230,17 +230,13 @@ pub fn store_environment<'a>(
let bound_data = GEPi(bcx, llbox, [0u, abi::box_field_body, i]);
match bv.action {
EnvCopy => {
bcx = bv.datum.copy_to(bcx, INIT, bound_data);
}
EnvMove => {
bcx = bv.datum.move_to(bcx, INIT, bound_data);
EnvCopy | EnvMove => {
bcx = bv.datum.store_to(bcx, bound_data);
}
EnvRef => {
Store(bcx, bv.datum.to_ref_llval(bcx), bound_data);
Store(bcx, bv.datum.to_llref(), bound_data);
}
}
}
ClosureResult { llbox: llbox, cdata_ty: cdata_ty, bcx: bcx }
@ -413,7 +409,6 @@ pub fn trans_expr_fn<'a>(
None,
bcx.fcx.param_substs,
user_id,
None,
[],
ty::ty_fn_ret(fty),
|fcx| load_environment(fcx, cdata_ty, cap_vars, sigil));

View file

@ -20,8 +20,9 @@ use lib;
use middle::lang_items::LangItem;
use middle::trans::base;
use middle::trans::build;
use middle::trans::cleanup;
use middle::trans::datum;
use middle::trans::glue;
use middle::trans::datum::{Datum, Lvalue};
use middle::trans::debuginfo;
use middle::trans::type_::Type;
use middle::ty::substs;
@ -37,8 +38,7 @@ use std::cast;
use std::cell::{Cell, RefCell};
use std::hashmap::HashMap;
use std::libc::{c_uint, c_longlong, c_ulonglong, c_char};
use std::vec;
use syntax::ast::{Name, Ident};
use syntax::ast::{Ident};
use syntax::ast_map::{Path, PathElem, PathPrettyName};
use syntax::codemap::Span;
use syntax::parse::token;
@ -122,6 +122,15 @@ pub struct tydesc_info {
*
*/
pub struct NodeInfo {
id: ast::NodeId,
span: Span,
}
pub fn expr_info(expr: &ast::Expr) -> NodeInfo {
NodeInfo { id: expr.id, span: expr.span }
}
pub struct Stats {
n_static_tydescs: Cell<uint>,
n_glues_created: Cell<uint>,
@ -185,6 +194,10 @@ impl Repr for param_substs {
}
}
// work around bizarre resolve errors
type RvalueDatum = datum::Datum<datum::Rvalue>;
type LvalueDatum = datum::Datum<datum::Lvalue>;
// Function context. Every LLVM function we create will have one of
// these.
pub struct FunctionContext<'a> {
@ -213,13 +226,15 @@ pub struct FunctionContext<'a> {
// allocas, so that LLVM will coalesce them into a single alloca call.
alloca_insert_pt: Cell<Option<ValueRef>>,
llreturn: Cell<Option<BasicBlockRef>>,
// The 'self' value currently in use in this function, if there
// is one.
//
// NB: This is the type of the self *variable*, not the self *type*. The
// self type is set only for default methods, while the self variable is
// set for all methods.
llself: Cell<Option<datum::Datum>>,
llself: Cell<Option<LvalueDatum>>,
// The a value alloca'd for calls to upcalls.rust_personality. Used when
// outputting the resume instruction.
personality: Cell<Option<ValueRef>>,
@ -230,10 +245,12 @@ pub struct FunctionContext<'a> {
caller_expects_out_pointer: bool,
// Maps arguments to allocas created for them in llallocas.
llargs: RefCell<HashMap<ast::NodeId, datum::Datum>>,
llargs: RefCell<HashMap<ast::NodeId, LvalueDatum>>,
// Maps the def_ids for local variables to the allocas created for
// them in llallocas.
lllocals: RefCell<HashMap<ast::NodeId, datum::Datum>>,
lllocals: RefCell<HashMap<ast::NodeId, LvalueDatum>>,
// Same as above, but for closure upvars
llupvars: RefCell<HashMap<ast::NodeId, ValueRef>>,
@ -253,14 +270,14 @@ pub struct FunctionContext<'a> {
// The arena that blocks are allocated from.
block_arena: TypedArena<Block<'a>>,
// The arena that scope info is allocated from.
scope_info_arena: TypedArena<ScopeInfo<'a>>,
// This function's enclosing crate context.
ccx: @CrateContext,
// Used and maintained by the debuginfo module.
debug_context: debuginfo::FunctionDebugContext,
// Cleanup scopes.
scopes: RefCell<~[cleanup::CleanupScope<'a>]>,
}
impl<'a> FunctionContext<'a> {
@ -302,9 +319,55 @@ impl<'a> FunctionContext<'a> {
self.llreturn.get().unwrap()
}
pub fn new_block(&'a self,
is_lpad: bool,
name: &str,
opt_node_id: Option<ast::NodeId>)
-> &'a Block<'a> {
unsafe {
let llbb = name.with_c_str(|buf| {
llvm::LLVMAppendBasicBlockInContext(self.ccx.llcx,
self.llfn,
buf)
});
Block::new(llbb, is_lpad, opt_node_id, self)
}
}
pub fn new_id_block(&'a self,
name: &str,
node_id: ast::NodeId)
-> &'a Block<'a> {
self.new_block(false, name, Some(node_id))
}
pub fn new_temp_block(&'a self,
name: &str)
-> &'a Block<'a> {
self.new_block(false, name, None)
}
pub fn join_blocks(&'a self,
id: ast::NodeId,
in_cxs: &[&'a Block<'a>])
-> &'a Block<'a> {
let out = self.new_id_block("join", id);
let mut reachable = false;
for bcx in in_cxs.iter() {
if !bcx.unreachable.get() {
build::Br(*bcx, out.llbb);
reachable = true;
}
}
if !reachable {
build::Unreachable(out);
}
return out;
}
}
pub fn warn_not_to_commit(ccx: &CrateContext, msg: &str) {
pub fn warn_not_to_commit(ccx: &mut CrateContext, msg: &str) {
if !ccx.do_not_commit_warning_issued.get() {
ccx.do_not_commit_warning_issued.set(true);
ccx.sess.warn(msg.to_str() + " -- do not commit like this!");
@ -319,300 +382,6 @@ pub enum heap {
heap_exchange_closure
}
#[deriving(Clone, Eq)]
pub enum cleantype {
normal_exit_only,
normal_exit_and_unwind
}
// Cleanup functions
/// A cleanup function: a built-in destructor.
pub trait CleanupFunction {
fn clean<'a>(&self, block: &'a Block<'a>) -> &'a Block<'a>;
}
/// A cleanup function that calls the "drop glue" (destructor function) on
/// a datum.
struct DatumDroppingCleanupFunction {
datum: datum::Datum
}
impl CleanupFunction for DatumDroppingCleanupFunction {
fn clean<'a>(&self, block: &'a Block<'a>) -> &'a Block<'a> {
self.datum.drop_val(block)
}
}
/// A cleanup function that frees some memory in the garbage-collected heap.
pub struct GCHeapFreeingCleanupFunction {
ptr: ValueRef,
}
impl CleanupFunction for GCHeapFreeingCleanupFunction {
fn clean<'a>(&self, bcx: &'a Block<'a>) -> &'a Block<'a> {
glue::trans_free(bcx, self.ptr)
}
}
/// A cleanup function that frees some memory in the exchange heap.
pub struct ExchangeHeapFreeingCleanupFunction {
ptr: ValueRef,
}
impl CleanupFunction for ExchangeHeapFreeingCleanupFunction {
fn clean<'a>(&self, bcx: &'a Block) -> &'a Block<'a> {
glue::trans_exchange_free(bcx, self.ptr)
}
}
pub enum cleanup {
Clean(@CleanupFunction, cleantype),
CleanTemp(ValueRef, @CleanupFunction, cleantype),
}
// Can't use deriving(Clone) because of the managed closure.
impl Clone for cleanup {
fn clone(&self) -> cleanup {
match *self {
Clean(f, ct) => Clean(f, ct),
CleanTemp(v, f, ct) => CleanTemp(v, f, ct),
}
}
}
// Used to remember and reuse existing cleanup paths
// target: none means the path ends in an resume instruction
#[deriving(Clone)]
pub struct cleanup_path {
target: Option<BasicBlockRef>,
size: uint,
dest: BasicBlockRef
}
pub fn shrink_scope_clean(scope_info: &ScopeInfo, size: uint) {
scope_info.landing_pad.set(None);
let new_cleanup_paths = {
let cleanup_paths = scope_info.cleanup_paths.borrow();
cleanup_paths.get()
.iter()
.take_while(|&cu| cu.size <= size)
.map(|&x| x)
.collect()
};
scope_info.cleanup_paths.set(new_cleanup_paths)
}
pub fn grow_scope_clean(scope_info: &ScopeInfo) {
scope_info.landing_pad.set(None);
}
pub fn cleanup_type(cx: ty::ctxt, ty: ty::t) -> cleantype {
if ty::type_needs_unwind_cleanup(cx, ty) {
normal_exit_and_unwind
} else {
normal_exit_only
}
}
pub fn add_clean(bcx: &Block, val: ValueRef, ty: ty::t) {
if !ty::type_needs_drop(bcx.tcx(), ty) { return; }
debug!("add_clean({}, {}, {})", bcx.to_str(), bcx.val_to_str(val), ty.repr(bcx.tcx()));
let cleanup_type = cleanup_type(bcx.tcx(), ty);
in_scope_cx(bcx, None, |scope_info| {
{
let mut cleanups = scope_info.cleanups.borrow_mut();
cleanups.get().push(Clean(@DatumDroppingCleanupFunction {
datum: datum::Datum {
val: val,
ty: ty,
mode: datum::ByRef(datum::ZeroMem)
}
} as @CleanupFunction,
cleanup_type));
}
grow_scope_clean(scope_info);
})
}
pub fn add_clean_temp_immediate(bcx: &Block, val: ValueRef, ty: ty::t) {
if !ty::type_needs_drop(bcx.tcx(), ty) { return; }
debug!("add_clean_temp_immediate({}, {}, {})",
bcx.to_str(), bcx.val_to_str(val),
ty.repr(bcx.tcx()));
let cleanup_type = cleanup_type(bcx.tcx(), ty);
in_scope_cx(bcx, None, |scope_info| {
{
let mut cleanups = scope_info.cleanups.borrow_mut();
cleanups.get().push(CleanTemp(val, @DatumDroppingCleanupFunction {
datum: datum::Datum {
val: val,
ty: ty,
mode: datum::ByValue
}
} as @CleanupFunction,
cleanup_type));
}
grow_scope_clean(scope_info);
})
}
pub fn add_clean_temp_mem(bcx: &Block, val: ValueRef, t: ty::t) {
add_clean_temp_mem_in_scope_(bcx, None, val, t);
}
pub fn add_clean_temp_mem_in_scope(bcx: &Block,
scope_id: ast::NodeId,
val: ValueRef,
t: ty::t) {
add_clean_temp_mem_in_scope_(bcx, Some(scope_id), val, t);
}
pub fn add_clean_temp_mem_in_scope_(bcx: &Block, scope_id: Option<ast::NodeId>,
val: ValueRef, t: ty::t) {
if !ty::type_needs_drop(bcx.tcx(), t) { return; }
debug!("add_clean_temp_mem({}, {}, {})",
bcx.to_str(), bcx.val_to_str(val),
t.repr(bcx.tcx()));
let cleanup_type = cleanup_type(bcx.tcx(), t);
in_scope_cx(bcx, scope_id, |scope_info| {
{
let mut cleanups = scope_info.cleanups.borrow_mut();
cleanups.get().push(CleanTemp(val, @DatumDroppingCleanupFunction {
datum: datum::Datum {
val: val,
ty: t,
mode: datum::ByRef(datum::RevokeClean)
}
} as @CleanupFunction,
cleanup_type));
}
grow_scope_clean(scope_info);
})
}
pub fn add_clean_free(cx: &Block, ptr: ValueRef, heap: heap) {
let free_fn = match heap {
heap_managed => {
@GCHeapFreeingCleanupFunction {
ptr: ptr,
} as @CleanupFunction
}
heap_exchange | heap_exchange_closure => {
@ExchangeHeapFreeingCleanupFunction {
ptr: ptr,
} as @CleanupFunction
}
};
in_scope_cx(cx, None, |scope_info| {
{
let mut cleanups = scope_info.cleanups.borrow_mut();
cleanups.get().push(CleanTemp(ptr,
free_fn,
normal_exit_and_unwind));
}
grow_scope_clean(scope_info);
})
}
// Note that this only works for temporaries. We should, at some point, move
// to a system where we can also cancel the cleanup on local variables, but
// this will be more involved. For now, we simply zero out the local, and the
// drop glue checks whether it is zero.
pub fn revoke_clean(cx: &Block, val: ValueRef) {
in_scope_cx(cx, None, |scope_info| {
let cleanup_pos = {
let mut cleanups = scope_info.cleanups.borrow_mut();
debug!("revoke_clean({}, {}) revoking {:?} from {:?}",
cx.to_str(), cx.val_to_str(val), val, cleanups.get());
cleanups.get().iter().position(|cu| {
match *cu {
CleanTemp(v, _, _) if v == val => true,
_ => false
}
})
};
debug!("revoke_clean({}, {}) revoking {:?}",
cx.to_str(), cx.val_to_str(val), cleanup_pos);
for &i in cleanup_pos.iter() {
let new_cleanups = {
let cleanups = scope_info.cleanups.borrow();
vec::append(cleanups.get().slice(0u, i).to_owned(),
cleanups.get().slice(i + 1u, cleanups.get()
.len()))
};
scope_info.cleanups.set(new_cleanups);
shrink_scope_clean(scope_info, i);
}
})
}
pub fn block_cleanups(bcx: &Block) -> ~[cleanup] {
match bcx.scope.get() {
None => ~[],
Some(inf) => inf.cleanups.get(),
}
}
pub struct ScopeInfo<'a> {
parent: Option<&'a ScopeInfo<'a>>,
loop_break: Option<&'a Block<'a>>,
loop_label: Option<Name>,
// A list of functions that must be run at when leaving this
// block, cleaning up any variables that were introduced in the
// block.
cleanups: RefCell<~[cleanup]>,
// Existing cleanup paths that may be reused, indexed by destination and
// cleared when the set of cleanups changes.
cleanup_paths: RefCell<~[cleanup_path]>,
// Unwinding landing pad. Also cleared when cleanups change.
landing_pad: Cell<Option<BasicBlockRef>>,
// info about the AST node this scope originated from, if any
node_info: Option<NodeInfo>,
}
impl<'a> ScopeInfo<'a> {
pub fn empty_cleanups(&self) -> bool {
let cleanups = self.cleanups.borrow();
cleanups.get().is_empty()
}
}
pub trait get_node_info {
fn info(&self) -> Option<NodeInfo>;
}
impl get_node_info for ast::Expr {
fn info(&self) -> Option<NodeInfo> {
Some(NodeInfo {id: self.id,
callee_id: self.get_callee_id(),
span: self.span})
}
}
impl get_node_info for ast::Block {
fn info(&self) -> Option<NodeInfo> {
Some(NodeInfo {id: self.id,
callee_id: None,
span: self.span})
}
}
impl get_node_info for Option<@ast::Expr> {
fn info(&self) -> Option<NodeInfo> {
self.as_ref().and_then(|s| s.info())
}
}
pub struct NodeInfo {
id: ast::NodeId,
callee_id: Option<ast::NodeId>,
span: Span
}
// Basic block context. We create a block context for each basic block
// (single-entry, single-exit sequence of instructions) we generate from Rust
// code. Each basic block we generate is attached to a function, typically
@ -627,13 +396,14 @@ pub struct Block<'a> {
llbb: BasicBlockRef,
terminated: Cell<bool>,
unreachable: Cell<bool>,
parent: Option<&'a Block<'a>>,
// The current scope within this basic block
scope: RefCell<Option<&'a ScopeInfo<'a>>>,
// Is this block part of a landing pad?
is_lpad: bool,
// info about the AST node this block originated from, if any
node_info: Option<NodeInfo>,
// AST node-id associated with this block, if any. Used for
// debugging purposes only.
opt_node_id: Option<ast::NodeId>,
// The function context for the function to which this block is
// attached.
fcx: &'a FunctionContext<'a>,
@ -642,20 +412,17 @@ pub struct Block<'a> {
impl<'a> Block<'a> {
pub fn new<'a>(
llbb: BasicBlockRef,
parent: Option<&'a Block<'a>>,
is_lpad: bool,
node_info: Option<NodeInfo>,
opt_node_id: Option<ast::NodeId>,
fcx: &'a FunctionContext<'a>)
-> &'a Block<'a> {
fcx.block_arena.alloc(Block {
llbb: llbb,
terminated: Cell::new(false),
unreachable: Cell::new(false),
parent: parent,
scope: RefCell::new(None),
is_lpad: is_lpad,
node_info: node_info,
fcx: fcx,
opt_node_id: opt_node_id,
fcx: fcx
})
}
@ -709,12 +476,8 @@ impl<'a> Block<'a> {
}
pub fn to_str(&self) -> ~str {
unsafe {
match self.node_info {
Some(node_info) => format!("[block {}]", node_info.id),
None => format!("[block {}]", transmute::<&Block, *Block>(self)),
}
}
let blk: *Block = self;
format!("[block {}]", blk)
}
}
@ -743,48 +506,6 @@ pub fn val_ty(v: ValueRef) -> Type {
}
}
pub fn in_scope_cx<'a>(
cx: &'a Block<'a>,
scope_id: Option<ast::NodeId>,
f: |si: &'a ScopeInfo<'a>|) {
let mut cur = cx;
let mut cur_scope = cur.scope.get();
loop {
cur_scope = match cur_scope {
Some(inf) => match scope_id {
Some(wanted) => match inf.node_info {
Some(NodeInfo { id: actual, .. }) if wanted == actual => {
debug!("in_scope_cx: selected cur={} (cx={}) info={:?}",
cur.to_str(), cx.to_str(), inf.node_info);
f(inf);
return;
},
_ => inf.parent,
},
None => {
debug!("in_scope_cx: selected cur={} (cx={}) info={:?}",
cur.to_str(), cx.to_str(), inf.node_info);
f(inf);
return;
}
},
None => {
cur = block_parent(cur);
cur.scope.get()
}
}
}
}
pub fn block_parent<'a>(cx: &'a Block<'a>) -> &'a Block<'a> {
match cx.parent {
Some(b) => b,
None => cx.sess().bug(format!("block_parent called on root block {:?}",
cx))
}
}
// Let T be the content of a box @T. tuplify_box_ty(t) returns the
// representation of @T as a tuple (i.e., the ty::t version of what T_box()
// returns).
@ -1012,7 +733,7 @@ pub enum mono_param_id {
mono_repr(uint /* size */,
uint /* align */,
MonoDataClass,
datum::DatumMode),
datum::RvalueMode),
}
#[deriving(Eq,IterBytes)]

View file

@ -15,10 +15,12 @@ use middle::trans::build::*;
use middle::trans::callee;
use middle::trans::common::*;
use middle::trans::debuginfo;
use middle::trans::cleanup;
use middle::trans::cleanup::CleanupMethods;
use middle::trans::expr;
use middle::ty;
use util::common::indenter;
use util::ppaux;
use util::ppaux::Repr;
use middle::trans::type_::Type;
@ -28,10 +30,56 @@ use syntax::ast_util;
use syntax::codemap::Span;
use syntax::visit::Visitor;
pub fn trans_block<'a>(bcx: &'a Block<'a>, b: &ast::Block, dest: expr::Dest)
-> &'a Block<'a> {
pub fn trans_stmt<'a>(cx: &'a Block<'a>,
s: &ast::Stmt)
-> &'a Block<'a> {
let _icx = push_ctxt("trans_stmt");
let fcx = cx.fcx;
debug!("trans_stmt({})", s.repr(cx.tcx()));
if cx.sess().asm_comments() {
add_span_comment(cx, s.span, s.repr(cx.tcx()));
}
let mut bcx = cx;
let id = ast_util::stmt_id(s);
fcx.push_ast_cleanup_scope(id);
match s.node {
ast::StmtExpr(e, _) | ast::StmtSemi(e, _) => {
bcx = expr::trans_into(cx, e, expr::Ignore);
}
ast::StmtDecl(d, _) => {
match d.node {
ast::DeclLocal(ref local) => {
bcx = init_local(bcx, *local);
if cx.sess().opts.extra_debuginfo {
debuginfo::create_local_var_metadata(bcx, *local);
}
}
ast::DeclItem(i) => trans_item(cx.fcx.ccx, i)
}
}
ast::StmtMac(..) => cx.tcx().sess.bug("unexpanded macro")
}
bcx = fcx.pop_and_trans_ast_cleanup_scope(
bcx, ast_util::stmt_id(s));
return bcx;
}
pub fn trans_block<'a>(bcx: &'a Block<'a>,
b: &ast::Block,
dest: expr::Dest)
-> &'a Block<'a> {
let _icx = push_ctxt("trans_block");
let fcx = bcx.fcx;
let mut bcx = bcx;
fcx.push_ast_cleanup_scope(b.id);
for s in b.stmts.iter() {
bcx = trans_stmt(bcx, *s);
}
@ -43,27 +91,26 @@ pub fn trans_block<'a>(bcx: &'a Block<'a>, b: &ast::Block, dest: expr::Dest)
assert!(dest == expr::Ignore || bcx.unreachable.get());
}
}
bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, b.id);
return bcx;
}
pub fn trans_if<'a>(
bcx: &'a Block<'a>,
cond: &ast::Expr,
thn: ast::P<ast::Block>,
els: Option<@ast::Expr>,
dest: expr::Dest)
-> &'a Block<'a> {
debug!("trans_if(bcx={}, cond={}, thn={:?}, dest={})",
bcx.to_str(), bcx.expr_to_str(cond), thn.id,
pub fn trans_if<'a>(bcx: &'a Block<'a>,
if_id: ast::NodeId,
cond: &ast::Expr,
thn: ast::P<ast::Block>,
els: Option<@ast::Expr>,
dest: expr::Dest)
-> &'a Block<'a> {
debug!("trans_if(bcx={}, if_id={}, cond={}, thn={:?}, dest={})",
bcx.to_str(), if_id, bcx.expr_to_str(cond), thn.id,
dest.to_str(bcx.ccx()));
let _indenter = indenter();
let _icx = push_ctxt("trans_if");
let mut bcx = bcx;
let Result {bcx, val: cond_val} =
expr::trans_to_datum(bcx, cond).to_result();
let cond_val = bool_to_i1(bcx, cond_val);
let cond_val = unpack_result!(bcx, expr::trans(bcx, cond).to_llbool());
// Drop branches that are known to be impossible
if is_const(cond_val) && !is_undef(cond_val) {
@ -76,11 +123,8 @@ pub fn trans_if<'a>(
None => {}
}
// if true { .. } [else { .. }]
return with_scope(bcx, thn.info(), "if_true_then", |bcx| {
let bcx_out = trans_block(bcx, thn, dest);
debuginfo::clear_source_location(bcx.fcx);
bcx_out
})
bcx = trans_block(bcx, thn, dest);
debuginfo::clear_source_location(bcx.fcx);
} else {
let mut trans = TransItemVisitor { ccx: bcx.fcx.ccx } ;
trans.visit_block(thn, ());
@ -88,229 +132,174 @@ pub fn trans_if<'a>(
match els {
// if false { .. } else { .. }
Some(elexpr) => {
return with_scope(bcx,
elexpr.info(),
"if_false_then",
|bcx| {
let bcx_out = trans_if_else(bcx, elexpr, dest, false);
debuginfo::clear_source_location(bcx.fcx);
bcx_out
})
bcx = expr::trans_into(bcx, elexpr, dest);
debuginfo::clear_source_location(bcx.fcx);
}
// if false { .. }
None => return bcx,
None => { }
}
}
return bcx;
}
let then_bcx_in = scope_block(bcx, thn.info(), "then");
let name = format!("then-block-{}-", thn.id);
let then_bcx_in = bcx.fcx.new_id_block(name, thn.id);
let then_bcx_out = trans_block(then_bcx_in, thn, dest);
debuginfo::clear_source_location(bcx.fcx);
let then_bcx_out = trans_block_cleanups(then_bcx_out,
block_cleanups(then_bcx_in));
// Calling trans_block directly instead of trans_expr
// because trans_expr will create another scope block
// context for the block, but we've already got the
// 'else' context
let (else_bcx_in, next_bcx) = match els {
Some(elexpr) => {
let else_bcx_in = scope_block(bcx, elexpr.info(), "else");
let else_bcx_out = trans_if_else(else_bcx_in, elexpr, dest, true);
(else_bcx_in, join_blocks(bcx, [then_bcx_out, else_bcx_out]))
}
_ => {
let next_bcx = sub_block(bcx, "next");
Br(then_bcx_out, next_bcx.llbb);
let next_bcx;
match els {
Some(elexpr) => {
let else_bcx_in = bcx.fcx.new_id_block("else-block", elexpr.id);
let else_bcx_out = expr::trans_into(else_bcx_in, elexpr, dest);
next_bcx = bcx.fcx.join_blocks(if_id,
[then_bcx_out, else_bcx_out]);
CondBr(bcx, cond_val, then_bcx_in.llbb, else_bcx_in.llbb);
}
(next_bcx, next_bcx)
}
};
debug!("then_bcx_in={}, else_bcx_in={}",
then_bcx_in.to_str(), else_bcx_in.to_str());
None => {
next_bcx = bcx.fcx.new_id_block("next-block", if_id);
Br(then_bcx_out, next_bcx.llbb);
CondBr(bcx, cond_val, then_bcx_in.llbb, next_bcx.llbb);
}
}
// Clear the source location because it is still set to whatever has been translated
// right before.
debuginfo::clear_source_location(else_bcx_in.fcx);
CondBr(bcx, cond_val, then_bcx_in.llbb, else_bcx_in.llbb);
return next_bcx;
debuginfo::clear_source_location(next_bcx.fcx);
// trans `else [ if { .. } ... | { .. } ]`
fn trans_if_else<'a>(
else_bcx_in: &'a Block<'a>,
elexpr: @ast::Expr,
dest: expr::Dest,
cleanup: bool)
-> &'a Block<'a> {
let else_bcx_out = match elexpr.node {
ast::ExprIf(_, _, _) => {
let elseif_blk = ast_util::block_from_expr(elexpr);
trans_block(else_bcx_in, elseif_blk, dest)
}
ast::ExprBlock(blk) => {
trans_block(else_bcx_in, blk, dest)
}
// would be nice to have a constraint on ifs
_ => else_bcx_in.tcx().sess.bug("strange alternative in if")
};
if cleanup {
debuginfo::clear_source_location(else_bcx_in.fcx);
trans_block_cleanups(else_bcx_out, block_cleanups(else_bcx_in))
} else {
else_bcx_out
}
}
next_bcx
}
pub fn join_blocks<'a>(
parent_bcx: &'a Block<'a>,
in_cxs: &[&'a Block<'a>])
-> &'a Block<'a> {
let out = sub_block(parent_bcx, "join");
let mut reachable = false;
for bcx in in_cxs.iter() {
if !bcx.unreachable.get() {
Br(*bcx, out.llbb);
reachable = true;
}
}
if !reachable {
Unreachable(out);
}
return out;
}
pub fn trans_while<'a>(
bcx: &'a Block<'a>,
cond: &ast::Expr,
body: &ast::Block)
-> &'a Block<'a> {
pub fn trans_while<'a>(bcx: &'a Block<'a>,
loop_id: ast::NodeId,
cond: &ast::Expr,
body: &ast::Block)
-> &'a Block<'a> {
let _icx = push_ctxt("trans_while");
let next_bcx = sub_block(bcx, "while next");
let fcx = bcx.fcx;
// bcx
// |
// loop_bcx
// |
// cond_bcx_in <--------+
// | |
// cond_bcx_out |
// | | |
// | body_bcx_in |
// +------+ | |
// cleanup_blk | |
// | body_bcx_out --+
// next_bcx
// next_bcx_in
let loop_bcx = loop_scope_block(bcx, next_bcx, None, "`while`",
body.info());
let cond_bcx_in = scope_block(loop_bcx, cond.info(), "while loop cond");
let body_bcx_in = scope_block(loop_bcx, body.info(), "while loop body");
Br(bcx, loop_bcx.llbb);
Br(loop_bcx, cond_bcx_in.llbb);
let next_bcx_in = fcx.new_id_block("while_exit", loop_id);
let cond_bcx_in = fcx.new_id_block("while_cond", cond.id);
let body_bcx_in = fcx.new_id_block("while_body", body.id);
fcx.push_loop_cleanup_scope(loop_id, [next_bcx_in, cond_bcx_in]);
Br(bcx, cond_bcx_in.llbb);
// compile the block where we will handle loop cleanups
let cleanup_llbb = fcx.normal_exit_block(loop_id, cleanup::EXIT_BREAK);
// compile the condition
let Result {bcx: cond_bcx_out, val: cond_val} =
expr::trans_to_datum(cond_bcx_in, cond).to_result();
let cond_val = bool_to_i1(cond_bcx_out, cond_val);
let cond_bcx_out =
trans_block_cleanups(cond_bcx_out, block_cleanups(cond_bcx_in));
CondBr(cond_bcx_out, cond_val, body_bcx_in.llbb, next_bcx.llbb);
expr::trans(cond_bcx_in, cond).to_llbool();
CondBr(cond_bcx_out, cond_val, body_bcx_in.llbb, cleanup_llbb);
// loop body:
let body_bcx_out = trans_block(body_bcx_in, body, expr::Ignore);
cleanup_and_Br(body_bcx_out, body_bcx_in, cond_bcx_in.llbb);
Br(body_bcx_out, cond_bcx_in.llbb);
return next_bcx;
fcx.pop_loop_cleanup_scope(loop_id);
return next_bcx_in;
}
pub fn trans_loop<'a>(
bcx: &'a Block<'a>,
body: &ast::Block,
opt_label: Option<Name>)
-> &'a Block<'a> {
pub fn trans_loop<'a>(bcx:&'a Block<'a>,
loop_id: ast::NodeId,
body: &ast::Block)
-> &'a Block<'a> {
let _icx = push_ctxt("trans_loop");
let next_bcx = sub_block(bcx, "next");
let body_bcx_in = loop_scope_block(bcx, next_bcx, opt_label, "`loop`",
body.info());
let fcx = bcx.fcx;
// bcx
// |
// body_bcx_in
// |
// body_bcx_out
//
// next_bcx
//
// Links between body_bcx_in and next_bcx are created by
// break statements.
let next_bcx_in = bcx.fcx.new_id_block("loop_exit", loop_id);
let body_bcx_in = bcx.fcx.new_id_block("loop_body", body.id);
fcx.push_loop_cleanup_scope(loop_id, [next_bcx_in, body_bcx_in]);
Br(bcx, body_bcx_in.llbb);
let body_bcx_out = trans_block(body_bcx_in, body, expr::Ignore);
cleanup_and_Br(body_bcx_out, body_bcx_in, body_bcx_in.llbb);
return next_bcx;
Br(body_bcx_out, body_bcx_in.llbb);
fcx.pop_loop_cleanup_scope(loop_id);
return next_bcx_in;
}
pub fn trans_break_cont<'a>(
bcx: &'a Block<'a>,
opt_label: Option<Name>,
to_end: bool)
-> &'a Block<'a> {
pub fn trans_break_cont<'a>(bcx: &'a Block<'a>,
expr_id: ast::NodeId,
opt_label: Option<Name>,
exit: uint)
-> &'a Block<'a> {
let _icx = push_ctxt("trans_break_cont");
// Locate closest loop block, outputting cleanup as we go.
let mut unwind = bcx;
let mut cur_scope = unwind.scope.get();
let mut target;
loop {
cur_scope = match cur_scope {
Some(&ScopeInfo {
loop_break: Some(brk),
loop_label: l,
parent,
..
}) => {
// If we're looking for a labeled loop, check the label...
target = if to_end {
brk
} else {
unwind
};
match opt_label {
Some(desired) => match l {
Some(actual) if actual == desired => break,
// If it doesn't match the one we want,
// don't break
_ => parent,
},
None => break,
let fcx = bcx.fcx;
if bcx.unreachable.get() {
return bcx;
}
// Locate loop that we will break to
let loop_id = match opt_label {
None => fcx.top_loop_scope(),
Some(_) => {
let def_map = bcx.tcx().def_map.borrow();
match def_map.get().find(&expr_id) {
Some(&ast::DefLabel(loop_id)) => loop_id,
ref r => {
bcx.tcx().sess.bug(format!("{:?} in def-map for label", r))
}
}
Some(inf) => inf.parent,
None => {
unwind = match unwind.parent {
Some(bcx) => bcx,
// This is a return from a loop body block
None => {
Store(bcx,
C_bool(!to_end),
bcx.fcx.llretptr.get().unwrap());
cleanup_and_leave(bcx, None, Some(bcx.fcx.get_llreturn()));
Unreachable(bcx);
return bcx;
}
};
unwind.scope.get()
}
}
}
cleanup_and_Br(bcx, unwind, target.llbb);
Unreachable(bcx);
};
// Generate appropriate cleanup code and branch
let cleanup_llbb = fcx.normal_exit_block(loop_id, exit);
Br(bcx, cleanup_llbb);
Unreachable(bcx); // anything afterwards should be ignored
return bcx;
}
pub fn trans_break<'a>(bcx: &'a Block<'a>, label_opt: Option<Name>)
-> &'a Block<'a> {
return trans_break_cont(bcx, label_opt, true);
pub fn trans_break<'a>(bcx: &'a Block<'a>,
expr_id: ast::NodeId,
label_opt: Option<Name>)
-> &'a Block<'a> {
return trans_break_cont(bcx, expr_id, label_opt, cleanup::EXIT_BREAK);
}
pub fn trans_cont<'a>(bcx: &'a Block<'a>, label_opt: Option<Name>)
-> &'a Block<'a> {
return trans_break_cont(bcx, label_opt, false);
pub fn trans_cont<'a>(bcx: &'a Block<'a>,
expr_id: ast::NodeId,
label_opt: Option<Name>)
-> &'a Block<'a> {
return trans_break_cont(bcx, expr_id, label_opt, cleanup::EXIT_LOOP);
}
pub fn trans_ret<'a>(bcx: &'a Block<'a>, e: Option<@ast::Expr>)
-> &'a Block<'a> {
pub fn trans_ret<'a>(bcx: &'a Block<'a>,
e: Option<@ast::Expr>)
-> &'a Block<'a> {
let _icx = push_ctxt("trans_ret");
let fcx = bcx.fcx;
let mut bcx = bcx;
let dest = match bcx.fcx.llretptr.get() {
None => expr::Ignore,
@ -322,7 +311,8 @@ pub fn trans_ret<'a>(bcx: &'a Block<'a>, e: Option<@ast::Expr>)
}
_ => ()
}
cleanup_and_leave(bcx, None, Some(bcx.fcx.get_llreturn()));
let cleanup_llbb = fcx.return_exit_block();
Br(bcx, cleanup_llbb);
Unreachable(bcx);
return bcx;
}
@ -338,8 +328,8 @@ pub fn trans_fail_expr<'a>(
Some(arg_expr) => {
let ccx = bcx.ccx();
let tcx = ccx.tcx;
let arg_datum = unpack_datum!(
bcx, expr::trans_to_datum(bcx, arg_expr));
let arg_datum =
unpack_datum!(bcx, expr::trans_to_lvalue(bcx, arg_expr, "fail"));
if ty::type_is_str(arg_datum.ty) {
let (lldata, _) = arg_datum.get_vec_base_and_len_no_root(bcx);

File diff suppressed because it is too large Load diff

View file

@ -133,7 +133,7 @@ use middle::trans::adt;
use middle::trans::base;
use middle::trans::build;
use middle::trans::common::*;
use middle::trans::datum;
use middle::trans::datum::{Datum, Lvalue};
use middle::trans::machine;
use middle::trans::type_of;
use middle::trans::type_::Type;
@ -396,7 +396,7 @@ pub fn create_match_binding_metadata(bcx: &Block,
variable_ident: ast::Ident,
node_id: ast::NodeId,
span: Span,
datum: datum::Datum) {
datum: Datum<Lvalue>) {
if fn_should_be_ignored(bcx.fcx) {
return;
}

View file

@ -0,0 +1,227 @@
/*!
# Documentation for the trans module
This module contains high-level summaries of how the various modules
in trans work. It is a work in progress. For detailed comments,
naturally, you can refer to the individual modules themselves.
## The Expr module
The expr module handles translation of expressions. The most general
translation routine is `trans()`, which will translate an expression
into a datum. `trans_into()` is also available, which will translate
an expression and write the result directly into memory, sometimes
avoiding the need for a temporary stack slot. Finally,
`trans_to_lvalue()` is available if you'd like to ensure that the
result has cleanup scheduled.
Internally, each of these functions dispatches to various other
expression functions depending on the kind of expression. We divide
up expressions into:
- **Datum expressions:** Those that most naturally yield values.
Examples would be `22`, `~x`, or `a + b` (when not overloaded).
- **DPS expressions:** Those that most naturally write into a location
in memory. Examples would be `foo()` or `Point { x: 3, y: 4 }`.
- **Statement expressions:** That that do not generate a meaningful
result. Examples would be `while { ... }` or `return 44`.
## The Datum module
A `Datum` encapsulates the result of evaluating a Rust expression. It
contains a `ValueRef` indicating the result, a `ty::t` describing the
the Rust type, but also a *kind*. The kind indicates whether the datum
has cleanup scheduled (lvalue) or not (rvalue) and -- in the case of
rvalues -- whether or not the value is "by ref" or "by value".
The datum API is designed to try and help you avoid memory errors like
forgetting to arrange cleanup or duplicating a value. The type of the
datum incorporates the kind, and thus reflects whether it has cleanup
scheduled:
- `Datum<Lvalue>` -- by ref, cleanup scheduled
- `Datum<Rvalue>` -- by value or by ref, no cleanup scheduled
- `Datum<Expr>` -- either `Datum<Lvalue>` or `Datum<Rvalue>`
Rvalue and expr datums are noncopyable, and most of the methods on
datums consume the datum itself (with some notable exceptions). This
reflects the fact that datums may represent affine values which ought
to be consumed exactly once, and if you were to try to (for example)
store an affine value multiple times, you would be duplicating it,
which would certainly be a bug.
Some of the datum methods, however, are designed to work only on
copyable values such as ints or pointers. Those methods may borrow the
datum (`&self`) rather than consume it, but they always include
assertions on the type of the value represented to check that this
makes sense. An example is `shallow_copy_and_take()`, which duplicates
a datum value.
Translating an expression always yields a `Datum<Expr>` result, but
the methods `to_[lr]value_datum()` can be used to coerce a
`Datum<Expr>` into a `Datum<Lvalue>` or `Datum<Rvalue>` as
needed. Coercing to an lvalue is fairly common, and generally occurs
whenever it is necessary to inspect a value and pull out its
subcomponents (for example, a match, or indexing expression). Coercing
to an rvalue is more unusual; it occurs when moving values from place
to place, such as in an assignment expression or parameter passing.
### Lvalues in detail
An lvalue datum is one for which cleanup has been scheduled. Lvalue
datums are always located in memory, and thus the `ValueRef` for an
LLVM value is always a pointer to the actual Rust value. This means
that if the Datum has a Rust type of `int`, then the LLVM type of the
`ValueRef` will be `int*` (pointer to int).
Because lvalues already have cleanups scheduled, the memory must be
zeroed to prevent the cleanup from taking place (presuming that the
Rust type needs drop in the first place, otherwise it doesn't
matter). The Datum code automatically performs this zeroing when the
value is stored to a new location, for example.
Lvalues usually result from evaluating lvalue expressions. For
example, evaluating a local variable `x` yields an lvalue, as does a
reference to a field like `x.f` or an index `x[i]`.
Lvalue datums can also arise by *converting* an rvalue into an lvalue.
This is done with the `to_lvalue_datum` method defined on
`Datum<Expr>`. Basically this method just schedules cleanup if the
datum is an rvalue, possibly storing the value into a stack slot first
if needed. Converting rvalues into lvalues occurs in constructs like
`&foo()` or `match foo() { ref x => ... }`, where the user is
implicitly requesting a temporary.
Somewhat surprisingly, not all lvalue expressions yield lvalue datums
when trans'd. Ultimately the reason for this is to micro-optimize
the resulting LLVM. For example, consider the following code:
fn foo() -> ~int { ... }
let x = *foo();
The expression `*foo()` is an lvalue, but if you invoke `expr::trans`,
it will return an rvalue datum. See `deref_once` in expr.rs for
more details.
### Rvalues in detail
Rvalues datums are values with no cleanup scheduled. One must be
careful with rvalue datums to ensure that cleanup is properly
arranged, usually by converting to an lvalue datum or by invoking the
`add_clean` method.
### Scratch datums
Sometimes you need some temporary scratch space. The functions
`[lr]value_scratch_datum()` can be used to get temporary stack
space. As their name suggests, they yield lvalues and rvalues
respectively. That is, the slot from `lvalue_scratch_datum` will have
cleanup arranged, and the slot from `rvalue_scratch_datum` does not.
## The Cleanup module
The cleanup module tracks what values need to be cleaned up as scopes
are exited, either via failure or just normal control flow. The basic
idea is that the function context maintains a stack of cleanup scopes
that are pushed/popped as we traverse the AST tree. There is typically
at least one cleanup scope per AST node; some AST nodes may introduce
additional temporary scopes.
Cleanup items can be scheduled into any of the scopes on the stack.
Typically, when a scope is popped, we will also generate the code for
each of its cleanups at that time. This corresponds to a normal exit
from a block (for example, an expression completing evaluation
successfully without failure). However, it is also possible to pop a
block *without* executing its cleanups; this is typically used to
guard intermediate values that must be cleaned up on failure, but not
if everything goes right. See the section on custom scopes below for
more details.
Cleanup scopes come in three kinds:
- **AST scopes:** each AST node in a function body has a corresponding
AST scope. We push the AST scope when we start generate code for an AST
node and pop it once the AST node has been fully generated.
- **Loop scopes:** loops have an additional cleanup scope. Cleanups are
never scheduled into loop scopes; instead, they are used to record the
basic blocks that we should branch to when a `continue` or `break` statement
is encountered.
- **Custom scopes:** custom scopes are typically used to ensure cleanup
of intermediate values.
### When to schedule cleanup
Although the cleanup system is intended to *feel* fairly declarative,
it's still important to time calls to `schedule_clean()` correctly.
Basically, you should not schedule cleanup for memory until it has
been initialized, because if an unwind should occur before the memory
is fully initialized, then the cleanup will run and try to free or
drop uninitialized memory. If the initialization itself produces
byproducts that need to be freed, then you should use temporary custom
scopes to ensure that those byproducts will get freed on unwind. For
example, an expression like `~foo()` will first allocate a box in the
heap and then call `foo()` -- if `foo()` should fail, this box needs
to be *shallowly* freed.
### Long-distance jumps
In addition to popping a scope, which corresponds to normal control
flow exiting the scope, we may also *jump out* of a scope into some
earlier scope on the stack. This can occur in response to a `return`,
`break`, or `continue` statement, but also in response to failure. In
any of these cases, we will generate a series of cleanup blocks for
each of the scopes that is exited. So, if the stack contains scopes A
... Z, and we break out of a loop whose corresponding cleanup scope is
X, we would generate cleanup blocks for the cleanups in X, Y, and Z.
After cleanup is done we would branch to the exit point for scope X.
But if failure should occur, we would generate cleanups for all the
scopes from A to Z and then resume the unwind process afterwards.
To avoid generating tons of code, we cache the cleanup blocks that we
create for breaks, returns, unwinds, and other jumps. Whenever a new
cleanup is scheduled, though, we must clear these cached blocks. A
possible improvement would be to keep the cached blocks but simply
generate a new block which performs the additional cleanup and then
branches to the existing cached blocks.
### AST and loop cleanup scopes
AST cleanup scopes are pushed when we begin and end processing an AST
node. They are used to house cleanups related to rvalue temporary that
get referenced (e.g., due to an expression like `&Foo()`). Whenever an
AST scope is popped, we always trans all the cleanups, adding the cleanup
code after the postdominator of the AST node.
AST nodes that represent breakable loops also push a loop scope; the
loop scope never has any actual cleanups, it's just used to point to
the basic blocks where control should flow after a "continue" or
"break" statement. Popping a loop scope never generates code.
### Custom cleanup scopes
Custom cleanup scopes are used for a variety of purposes. The most
common though is to handle temporary byproducts, where cleanup only
needs to occur on failure. The general strategy is to push a custom
cleanup scope, schedule *shallow* cleanups into the custom scope, and
then pop the custom scope (without transing the cleanups) when
execution succeeds normally. This way the cleanups are only trans'd on
unwind, and only up until the point where execution succeeded, at
which time the complete value should be stored in an lvalue or some
other place where normal cleanup applies.
To spell it out, here is an example. Imagine an expression `~expr`.
We would basically:
1. Push a custom cleanup scope C.
2. Allocate the `~` box.
3. Schedule a shallow free in the scope C.
4. Trans `expr` into the box.
5. Pop the scope C.
6. Return the box as an rvalue.
This way, if a failure occurs while transing `expr`, the custom
cleanup scope C is pushed and hence the box will be freed. The trans
code for `expr` itself is responsible for freeing any other byproducts
that may be in play.
*/

File diff suppressed because it is too large Load diff

View file

@ -290,12 +290,12 @@ pub fn trans_native_call<'a>(
// A function pointer is called without the declaration available, so we have to apply
// any attributes with ABI implications directly to the call instruction. Right now, the
// only attribute we need to worry about is `sret`.
let attrs;
if fn_type.ret_ty.is_indirect() {
attrs = &[(1, StructRetAttribute)];
let sret_attr = [(1, StructRetAttribute)];
let attrs = if fn_type.ret_ty.is_indirect() {
sret_attr.as_slice()
} else {
attrs = &[];
}
&[]
};
let llforeign_retval = CallWithConv(bcx, llfn, llargs_foreign, cc, attrs);
// If the function we just called does not use an outpointer,
@ -491,7 +491,6 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @CrateContext,
None,
None,
id,
None,
[]);
return llfndecl;
}

View file

@ -21,9 +21,10 @@ use middle::lang_items::{FreeFnLangItem, ExchangeFreeFnLangItem};
use middle::trans::adt;
use middle::trans::base::*;
use middle::trans::callee;
use middle::trans::cleanup;
use middle::trans::cleanup::CleanupMethods;
use middle::trans::closure;
use middle::trans::common::*;
use middle::trans::datum::immediate_rvalue;
use middle::trans::build::*;
use middle::trans::expr;
use middle::trans::machine::*;
@ -269,25 +270,23 @@ fn call_tydesc_glue<'a>(cx: &'a Block<'a>, v: ValueRef, t: ty::t, field: uint)
fn make_visit_glue<'a>(bcx: &'a Block<'a>, v: ValueRef, t: ty::t)
-> &'a Block<'a> {
let _icx = push_ctxt("make_visit_glue");
with_scope(bcx, None, "visitor cleanup", |bcx| {
let mut bcx = bcx;
let (visitor_trait, object_ty) = match ty::visitor_object_ty(bcx.tcx(),
ty::ReStatic) {
Ok(pair) => pair,
Err(s) => {
bcx.tcx().sess.fatal(s);
}
};
let v = PointerCast(bcx, v, type_of(bcx.ccx(), object_ty).ptr_to());
bcx = reflect::emit_calls_to_trait_visit_ty(bcx, t, v, visitor_trait.def_id);
// The visitor is a boxed object and needs to be dropped
add_clean(bcx, v, object_ty);
bcx
})
let mut bcx = bcx;
let (visitor_trait, object_ty) = match ty::visitor_object_ty(bcx.tcx(),
ty::ReStatic) {
Ok(pair) => pair,
Err(s) => {
bcx.tcx().sess.fatal(s);
}
};
let v = PointerCast(bcx, v, type_of(bcx.ccx(), object_ty).ptr_to());
bcx = reflect::emit_calls_to_trait_visit_ty(bcx, t, v, visitor_trait.def_id);
bcx
}
pub fn make_free_glue<'a>(bcx: &'a Block<'a>, v: ValueRef, t: ty::t)
-> &'a Block<'a> {
pub fn make_free_glue<'a>(bcx: &'a Block<'a>,
v: ValueRef,
t: ty::t)
-> &'a Block<'a> {
// NB: v0 is an *alias* of type t here, not a direct value.
let _icx = push_ctxt("make_free_glue");
match ty::get(t).sty {
@ -297,14 +296,13 @@ pub fn make_free_glue<'a>(bcx: &'a Block<'a>, v: ValueRef, t: ty::t)
let bcx = drop_ty(bcx, body, body_ty);
trans_free(bcx, v)
}
ty::ty_uniq(..) => {
let box_datum = immediate_rvalue(Load(bcx, v), t);
let not_null = IsNotNull(bcx, box_datum.val);
ty::ty_uniq(content_ty) => {
let llbox = Load(bcx, v);
let not_null = IsNotNull(bcx, llbox);
with_cond(bcx, not_null, |bcx| {
let body_datum = box_datum.box_body(bcx);
let bcx = drop_ty(bcx, body_datum.to_ref_llval(bcx), body_datum.ty);
trans_exchange_free(bcx, box_datum.val)
})
let bcx = drop_ty(bcx, llbox, content_ty);
trans_exchange_free(bcx, llbox)
})
}
ty::ty_vec(_, ty::vstore_uniq) | ty::ty_str(ty::vstore_uniq) |
ty::ty_vec(_, ty::vstore_box) | ty::ty_str(ty::vstore_box) => {
@ -362,21 +360,24 @@ pub fn trans_struct_drop<'a>(
// Be sure to put all of the fields into a scope so we can use an invoke
// instruction to call the user destructor but still call the field
// destructors if the user destructor fails.
with_scope(bcx, None, "field drops", |bcx| {
let self_arg = PointerCast(bcx, v0, params[0]);
let args = ~[self_arg];
let field_scope = bcx.fcx.push_custom_cleanup_scope();
// Add all the fields as a value which needs to be cleaned at the end of
// this scope.
let field_tys = ty::struct_fields(bcx.tcx(), class_did, substs);
for (i, fld) in field_tys.iter().enumerate() {
let llfld_a = adt::trans_field_ptr(bcx, repr, v0, 0, i);
add_clean(bcx, llfld_a, fld.mt.ty);
}
let self_arg = PointerCast(bcx, v0, params[0]);
let args = ~[self_arg];
let (_, bcx) = invoke(bcx, dtor_addr, args, [], None);
bcx
})
// Add all the fields as a value which needs to be cleaned at the end of
// this scope.
let field_tys = ty::struct_fields(bcx.tcx(), class_did, substs);
for (i, fld) in field_tys.iter().enumerate() {
let llfld_a = adt::trans_field_ptr(bcx, repr, v0, 0, i);
bcx.fcx.schedule_drop_mem(cleanup::CustomScope(field_scope),
llfld_a,
fld.mt.ty);
}
let (_, bcx) = invoke(bcx, dtor_addr, args, [], None);
bcx.fcx.pop_and_trans_custom_cleanup_scope(bcx, field_scope)
}
pub fn make_drop_glue<'a>(bcx: &'a Block<'a>, v0: ValueRef, t: ty::t)
@ -451,11 +452,13 @@ pub fn make_drop_glue<'a>(bcx: &'a Block<'a>, v0: ValueRef, t: ty::t)
fn decr_refcnt_maybe_free<'a>(bcx: &'a Block<'a>, box_ptr_ptr: ValueRef,
t: Option<ty::t>) -> &'a Block<'a> {
let _icx = push_ctxt("decr_refcnt_maybe_free");
let fcx = bcx.fcx;
let ccx = bcx.ccx();
let decr_bcx = sub_block(bcx, "decr");
let free_bcx = sub_block(decr_bcx, "free");
let next_bcx = sub_block(bcx, "next");
let decr_bcx = fcx.new_temp_block("decr");
let free_bcx = fcx.new_temp_block("free");
let next_bcx = fcx.new_temp_block("next");
let box_ptr = Load(bcx, box_ptr_ptr);
let llnotnull = IsNotNull(bcx, box_ptr);
CondBr(bcx, llnotnull, decr_bcx.llbb, next_bcx.llbb);
@ -593,7 +596,7 @@ fn make_generic_glue(ccx: @CrateContext, t: ty::t, llfn: ValueRef,
let _s = StatRecorder::new(ccx, glue_name);
let fcx = new_fn_ctxt(ccx, ~[], llfn, ty::mk_nil(), None);
init_function(&fcx, false, ty::mk_nil(), None, None);
init_function(&fcx, false, ty::mk_nil(), None);
lib::llvm::SetLinkage(llfn, lib::llvm::InternalLinkage);
ccx.stats.n_glues_created.set(ccx.stats.n_glues_created.get() + 1u);

View file

@ -178,7 +178,6 @@ pub fn maybe_instantiate_inline(ccx: @CrateContext, fn_id: ast::DefId)
self_kind,
None,
mth.id,
Some(&*mth),
[]);
}
local_def(mth.id)

View file

@ -153,14 +153,14 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let output_type = ty::ty_fn_ret(ty::node_id_to_type(ccx.tcx, item.id));
let fcx = new_fn_ctxt_w_id(ccx,
path,
decl,
item.id,
output_type,
Some(substs),
Some(item.span));
init_function(&fcx, true, output_type, Some(substs), None);
let fcx = new_fn_ctxt_detailed(ccx,
path,
decl,
item.id,
output_type,
Some(substs),
Some(item.span));
init_function(&fcx, true, output_type, Some(substs));
set_always_inline(fcx.llfn);
@ -254,27 +254,18 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let lltp_ty = type_of::type_of(ccx, tp_ty);
Ret(bcx, C_uint(ccx, machine::llsize_of_real(ccx, lltp_ty)));
}
"move_val" => {
"move_val_init" => {
// Create a datum reflecting the value being moved.
// Use `appropriate_mode` so that the datum is by ref
// if the value is non-immediate. Note that, with
// intrinsics, there are no argument cleanups to
// concern ourselves with.
// concern ourselves with, so we can use an rvalue datum.
let tp_ty = substs.tys[0];
let mode = appropriate_mode(ccx, tp_ty);
let mode = appropriate_rvalue_mode(ccx, tp_ty);
let src = Datum {val: get_param(decl, first_real_arg + 1u),
ty: tp_ty, mode: mode};
bcx = src.move_to(bcx, DROP_EXISTING,
get_param(decl, first_real_arg));
RetVoid(bcx);
}
"move_val_init" => {
// See comments for `"move_val"`.
let tp_ty = substs.tys[0];
let mode = appropriate_mode(ccx, tp_ty);
let src = Datum {val: get_param(decl, first_real_arg + 1u),
ty: tp_ty, mode: mode};
bcx = src.move_to(bcx, INIT, get_param(decl, first_real_arg));
ty: tp_ty,
kind: Rvalue(mode)};
bcx = src.store_to(bcx, get_param(decl, first_real_arg));
RetVoid(bcx);
}
"min_align_of" => {

View file

@ -18,6 +18,7 @@ use middle::trans::base::*;
use middle::trans::build::*;
use middle::trans::callee::*;
use middle::trans::callee;
use middle::trans::cleanup;
use middle::trans::common::*;
use middle::trans::datum::*;
use middle::trans::expr::{SaveIn, Ignore};
@ -132,7 +133,6 @@ pub fn trans_method(ccx: @CrateContext,
self_ty,
param_substs,
method.id,
Some(method),
[]);
llfn
}
@ -141,7 +141,8 @@ pub fn trans_method_callee<'a>(
bcx: &'a Block<'a>,
callee_id: ast::NodeId,
this: &ast::Expr,
mentry: typeck::method_map_entry)
mentry: typeck::method_map_entry,
arg_cleanup_scope: cleanup::ScopeId)
-> Callee<'a> {
let _icx = push_ctxt("impl::trans_method_callee");
@ -153,9 +154,8 @@ pub fn trans_method_callee<'a>(
match mentry.origin {
typeck::method_static(did) => {
let self_ty = monomorphize_type(bcx, mentry.self_ty);
let mut temp_cleanups = ~[];
let Result {bcx, val} = trans_arg_expr(bcx, self_ty, this,
&mut temp_cleanups,
arg_cleanup_scope,
DontAutorefArg);
// HACK should not need the pointer cast, eventually trans_fn_ref
// should return a function type with the right type for self.
@ -168,7 +168,6 @@ pub fn trans_method_callee<'a>(
data: Method(MethodData {
llfn: llfn_val,
llself: val,
temp_cleanup: temp_cleanups.head_opt().map(|v| *v)
})
}
}
@ -186,7 +185,8 @@ pub fn trans_method_callee<'a>(
let vtbl = find_vtable(bcx.tcx(), substs, p, b);
trans_monomorphized_callee(bcx, callee_id, this, mentry,
trait_id, off, vtbl)
trait_id, off, vtbl,
arg_cleanup_scope)
}
// how to get rid of this?
None => fail!("trans_method_callee: missing param_substs")
@ -197,7 +197,8 @@ pub fn trans_method_callee<'a>(
trans_trait_callee(bcx,
callee_id,
mt.real_index,
this)
this,
arg_cleanup_scope)
}
}
}
@ -319,7 +320,8 @@ pub fn trans_monomorphized_callee<'a>(
mentry: typeck::method_map_entry,
trait_id: ast::DefId,
n_method: uint,
vtbl: typeck::vtable_origin)
vtbl: typeck::vtable_origin,
arg_cleanup_scope: cleanup::ScopeId)
-> Callee<'a> {
let _icx = push_ctxt("impl::trans_monomorphized_callee");
return match vtbl {
@ -330,9 +332,8 @@ pub fn trans_monomorphized_callee<'a>(
// obtain the `self` value:
let self_ty = monomorphize_type(bcx, mentry.self_ty);
let mut temp_cleanups = ~[];
let Result {bcx, val} = trans_arg_expr(bcx, self_ty, base,
&mut temp_cleanups,
arg_cleanup_scope,
DontAutorefArg);
// create a concatenated set of substitutions which includes
@ -361,7 +362,6 @@ pub fn trans_monomorphized_callee<'a>(
data: Method(MethodData {
llfn: llfn_val,
llself: val,
temp_cleanup: temp_cleanups.head_opt().map(|v| *v)
})
}
}
@ -425,7 +425,8 @@ pub fn trans_trait_callee<'a>(
bcx: &'a Block<'a>,
callee_id: ast::NodeId,
n_method: uint,
self_expr: &ast::Expr)
self_expr: &ast::Expr,
arg_cleanup_scope: cleanup::ScopeId)
-> Callee<'a> {
/*!
* Create a method callee where the method is coming from a trait
@ -443,34 +444,35 @@ pub fn trans_trait_callee<'a>(
let self_ty = expr_ty_adjusted(bcx, self_expr);
let self_scratch = match ty::get(self_ty).sty {
ty::ty_trait(_, _, ty::RegionTraitStore(..), _, _) => {
unpack_datum!(bcx, expr::trans_to_datum(bcx, self_expr))
unpack_datum!(bcx, expr::trans(bcx, self_expr))
}
_ => {
let d = scratch_datum(bcx, self_ty, "__trait_callee", false);
bcx = expr::trans_into(bcx, self_expr, expr::SaveIn(d.val));
// Arrange a temporary cleanup for the object in case something
// should go wrong before the method is actually *invoked*.
d.add_clean(bcx);
d
let datum = unpack_datum!(
bcx,
lvalue_scratch_datum(
bcx, self_ty, "__trait_callee", false, arg_cleanup_scope, (),
|(), bcx, llval| expr::trans_into(bcx, self_expr,
expr::SaveIn(llval))));
datum.to_expr_datum()
}
};
let callee_ty = node_id_type(bcx, callee_id);
assert!(self_scratch.kind.is_by_ref()); // FIXME why special case above??
trans_trait_callee_from_llval(bcx,
callee_ty,
n_method,
self_scratch.val,
Some(self_scratch.val))
self_scratch.val)
}
pub fn trans_trait_callee_from_llval<'a>(
bcx: &'a Block<'a>,
callee_ty: ty::t,
n_method: uint,
llpair: ValueRef,
temp_cleanup: Option<ValueRef>)
-> Callee<'a> {
llpair: ValueRef)
-> Callee<'a> {
/*!
* Same as `trans_trait_callee()` above, except that it is given
* a by-ref pointer to the object pair.
@ -501,7 +503,6 @@ pub fn trans_trait_callee_from_llval<'a>(
data: Method(MethodData {
llfn: mptr,
llself: llself,
temp_cleanup: temp_cleanup
})
};
}
@ -632,41 +633,38 @@ fn emit_vtable_methods(bcx: &Block,
})
}
pub fn trans_trait_cast<'a>(
bcx: &'a Block<'a>,
val: &ast::Expr,
id: ast::NodeId,
dest: expr::Dest,
obj: Option<Datum>)
-> &'a Block<'a> {
pub fn trans_trait_cast<'a>(bcx: &'a Block<'a>,
datum: Datum<Expr>,
id: ast::NodeId,
dest: expr::Dest)
-> &'a Block<'a> {
/*!
* Generates the code to convert from a pointer (`~T`, `&T`, etc)
* into an object (`~Trait`, `&Trait`, etc). This means creating a
* pair where the first word is the pointer and the second word is
* an appropriate vtable.
*/
let mut bcx = bcx;
let _icx = push_ctxt("impl::trans_cast");
let lldest = match dest {
Ignore => {
return expr::trans_into(bcx, val, Ignore);
return datum.clean(bcx, "trait_cast", id);
}
SaveIn(dest) => dest
};
let ccx = bcx.ccx();
let v_ty = expr_ty(bcx, val);
let v_ty = datum.ty;
let llbox_ty = type_of(bcx.ccx(), datum.ty);
// Store the pointer into the first half of pair.
let mut llboxdest = GEPi(bcx, lldest, [0u, abi::trt_field_box]);
// Just store the pointer into the pair. (Region/borrowed
// and boxed trait objects are represented as pairs, and
// have no type descriptor field.)
llboxdest = PointerCast(bcx,
llboxdest,
type_of(bcx.ccx(), v_ty).ptr_to());
bcx = match obj {
Some(datum) => {
datum.store_to_dest(bcx, SaveIn(llboxdest))
}
None => expr::trans_into(bcx, val, SaveIn(llboxdest))
};
llboxdest = PointerCast(bcx, llboxdest, llbox_ty.ptr_to());
bcx = datum.store_to(bcx, llboxdest);
// Store the vtable into the pair or triple.
// Store the vtable into the second half of pair.
// This is structured a bit funny because of dynamic borrow failures.
let origins = {
let res = {
@ -677,9 +675,9 @@ pub fn trans_trait_cast<'a>(
res[0]
};
let vtable = get_vtable(bcx, v_ty, origins);
Store(bcx, vtable, PointerCast(bcx,
GEPi(bcx, lldest, [0u, abi::trt_field_vtable]),
val_ty(vtable).ptr_to()));
let llvtabledest = GEPi(bcx, lldest, [0u, abi::trt_field_vtable]);
let llvtabledest = PointerCast(bcx, llvtabledest, val_ty(vtable).ptr_to());
Store(bcx, vtable, llvtabledest);
bcx
}

View file

@ -8,6 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub mod doc;
pub mod macros;
pub mod inline;
pub mod monomorphize;
@ -44,3 +45,4 @@ pub mod type_;
pub mod value;
pub mod basic_block;
pub mod llrepr;
pub mod cleanup;

View file

@ -238,7 +238,6 @@ pub fn monomorphic_fn(ccx: @CrateContext,
None,
Some(psubsts),
fn_id.node,
None,
[]);
d
}

View file

@ -61,7 +61,7 @@ impl<'a> Reflector<'a> {
let bcx = self.bcx;
let str_vstore = ty::vstore_slice(ty::ReStatic);
let str_ty = ty::mk_str(bcx.tcx(), str_vstore);
let scratch = scratch_datum(bcx, str_ty, "", false);
let scratch = rvalue_scratch_datum(bcx, str_ty, "");
let len = C_uint(bcx.ccx(), s.len());
let c_str = PointerCast(bcx, C_cstr(bcx.ccx(), s), Type::i8p());
Store(bcx, c_str, GEPi(bcx, scratch.val, [ 0, 0 ]));
@ -90,6 +90,7 @@ impl<'a> Reflector<'a> {
}
pub fn visit(&mut self, ty_name: &str, args: &[ValueRef]) {
let fcx = self.bcx.fcx;
let tcx = self.bcx.tcx();
let mth_idx = ty::method_idx(
tcx.sess.ident_of(~"visit_" + ty_name),
@ -106,14 +107,13 @@ impl<'a> Reflector<'a> {
let bool_ty = ty::mk_bool();
let result = unpack_result!(bcx, callee::trans_call_inner(
self.bcx, None, mth_ty, bool_ty,
|bcx| meth::trans_trait_callee_from_llval(bcx,
mth_ty,
mth_idx,
v,
None),
|bcx, _| meth::trans_trait_callee_from_llval(bcx,
mth_ty,
mth_idx,
v),
ArgVals(args), None, DontAutorefArg));
let result = bool_to_i1(bcx, result);
let next_bcx = sub_block(bcx, "next");
let next_bcx = fcx.new_temp_block("next");
CondBr(bcx, result, next_bcx.llbb, self.final_bcx.llbb);
self.bcx = next_bcx
}
@ -298,7 +298,7 @@ impl<'a> Reflector<'a> {
llfdecl,
ty::mk_u64(),
None);
init_function(&fcx, false, ty::mk_u64(), None, None);
init_function(&fcx, false, ty::mk_u64(), None);
let arg = unsafe {
//
@ -308,13 +308,13 @@ impl<'a> Reflector<'a> {
//
llvm::LLVMGetParam(llfdecl, fcx.arg_pos(0u) as c_uint)
};
let mut bcx = fcx.entry_bcx.get().unwrap();
let bcx = fcx.entry_bcx.get().unwrap();
let arg = BitCast(bcx, arg, llptrty);
let ret = adt::trans_get_discr(bcx, repr, arg, Some(Type::i64()));
Store(bcx, ret, fcx.llretptr.get().unwrap());
match fcx.llreturn.get() {
Some(llreturn) => cleanup_and_Br(bcx, bcx, llreturn),
None => bcx = cleanup_block(bcx, Some(bcx.llbb))
Some(llreturn) => Br(bcx, llreturn),
None => {}
};
finish_fn(&fcx, bcx);
llfdecl
@ -389,7 +389,8 @@ pub fn emit_calls_to_trait_visit_ty<'a>(
visitor_val: ValueRef,
visitor_trait_id: DefId)
-> &'a Block<'a> {
let final = sub_block(bcx, "final");
let fcx = bcx.fcx;
let final = fcx.new_temp_block("final");
let tydesc_ty = ty::get_tydesc_ty(bcx.ccx().tcx).unwrap();
let tydesc_ty = type_of(bcx.ccx(), tydesc_ty);
let mut r = Reflector {

View file

@ -17,6 +17,8 @@ use middle::trans::base::*;
use middle::trans::base;
use middle::trans::build::*;
use middle::trans::callee;
use middle::trans::cleanup;
use middle::trans::cleanup::CleanupMethods;
use middle::trans::common::*;
use middle::trans::datum::*;
use middle::trans::expr::{Dest, Ignore, SaveIn};
@ -26,7 +28,6 @@ use middle::trans::machine::{llsize_of, nonzero_llsize_of, llsize_of_alloc};
use middle::trans::type_::Type;
use middle::trans::type_of;
use middle::ty;
use util::common::indenter;
use util::ppaux::ty_to_str;
use syntax::ast;
@ -193,7 +194,6 @@ pub fn trans_fixed_vstore<'a>(
debug!("trans_fixed_vstore(vstore_expr={}, dest={:?})",
bcx.expr_to_str(vstore_expr), dest.to_str(bcx.ccx()));
let _indenter = indenter();
let vt = vec_types_from_expr(bcx, vstore_expr);
@ -214,17 +214,18 @@ pub fn trans_slice_vstore<'a>(
content_expr: &ast::Expr,
dest: expr::Dest)
-> &'a Block<'a> {
//!
//
// &[...] allocates memory on the stack and writes the values into it,
// returning a slice (pair of ptr, len). &"..." is similar except that
// the memory can be statically allocated.
/*!
* &[...] allocates memory on the stack and writes the values into it,
* returning a slice (pair of ptr, len). &"..." is similar except that
* the memory can be statically allocated.
*/
let ccx = bcx.ccx();
let fcx = bcx.fcx;
let ccx = fcx.ccx;
let mut bcx = bcx;
debug!("trans_slice_vstore(vstore_expr={}, dest={})",
bcx.expr_to_str(vstore_expr), dest.to_str(ccx));
let _indenter = indenter();
// Handle the &"..." case:
match content_expr.node {
@ -244,21 +245,29 @@ pub fn trans_slice_vstore<'a>(
let count = elements_required(bcx, content_expr);
debug!("vt={}, count={:?}", vt.to_str(ccx), count);
// Make a fixed-length backing array and allocate it on the stack.
let llcount = C_uint(ccx, count);
let llfixed = base::arrayalloca(bcx, vt.llunit_ty, llcount);
let llfixed;
if count == 0 {
// Zero-length array: just use NULL as the data pointer
llfixed = C_null(vt.llunit_ty.ptr_to());
} else {
// Make a fixed-length backing array and allocate it on the stack.
llfixed = base::arrayalloca(bcx, vt.llunit_ty, llcount);
// Arrange for the backing array to be cleaned up.
let fixed_ty = ty::mk_vec(bcx.tcx(),
ty::mt {ty: vt.unit_ty, mutbl: ast::MutMutable},
ty::vstore_fixed(count));
let llfixed_ty = type_of::type_of(bcx.ccx(), fixed_ty).ptr_to();
let llfixed_casted = BitCast(bcx, llfixed, llfixed_ty);
add_clean(bcx, llfixed_casted, fixed_ty);
// Arrange for the backing array to be cleaned up.
let fixed_ty = ty::mk_vec(bcx.tcx(),
ty::mt {ty: vt.unit_ty,
mutbl: ast::MutMutable},
ty::vstore_fixed(count));
let llfixed_ty = type_of::type_of(bcx.ccx(), fixed_ty).ptr_to();
let llfixed_casted = BitCast(bcx, llfixed, llfixed_ty);
let cleanup_scope = cleanup::temporary_scope(bcx.tcx(), content_expr.id);
fcx.schedule_drop_mem(cleanup_scope, llfixed_casted, fixed_ty);
// Generate the content into the backing array.
let bcx = write_content(bcx, &vt, vstore_expr,
// Generate the content into the backing array.
bcx = write_content(bcx, &vt, vstore_expr,
content_expr, SaveIn(llfixed));
}
// Finally, create the slice pair itself.
match dest {
@ -278,16 +287,15 @@ pub fn trans_lit_str<'a>(
str_lit: @str,
dest: Dest)
-> &'a Block<'a> {
//!
//
// Literal strings translate to slices into static memory. This is
// different from trans_slice_vstore() above because it does need to copy
// the content anywhere.
/*!
* Literal strings translate to slices into static memory. This is
* different from trans_slice_vstore() above because it does need to copy
* the content anywhere.
*/
debug!("trans_lit_str(lit_expr={}, dest={})",
bcx.expr_to_str(lit_expr),
dest.to_str(bcx.ccx()));
let _indenter = indenter();
match dest {
Ignore => bcx,
@ -308,20 +316,19 @@ pub fn trans_lit_str<'a>(
}
pub fn trans_uniq_or_managed_vstore<'a>(
bcx: &'a Block<'a>,
heap: heap,
vstore_expr: &ast::Expr,
content_expr: &ast::Expr)
-> DatumBlock<'a> {
//!
//
// @[...] or ~[...] (also @"..." or ~"...") allocate boxes in the
// appropriate heap and write the array elements into them.
pub fn trans_uniq_or_managed_vstore<'a>(bcx: &'a Block<'a>,
heap: heap,
vstore_expr: &ast::Expr,
content_expr: &ast::Expr)
-> DatumBlock<'a, Expr> {
/*!
* @[...] or ~[...] (also @"..." or ~"...") allocate boxes in the
* appropriate heap and write the array elements into them.
*/
debug!("trans_uniq_or_managed_vstore(vstore_expr={}, heap={:?})",
bcx.expr_to_str(vstore_expr), heap);
let _indenter = indenter();
let fcx = bcx.fcx;
// Handle ~"".
match heap {
@ -334,7 +341,7 @@ pub fn trans_uniq_or_managed_vstore<'a>(
let llptrval = PointerCast(bcx, llptrval, Type::i8p());
let llsizeval = C_uint(bcx.ccx(), s.len());
let typ = ty::mk_str(bcx.tcx(), ty::vstore_uniq);
let lldestval = scratch_datum(bcx, typ, "", false);
let lldestval = rvalue_scratch_datum(bcx, typ, "");
let alloc_fn = langcall(bcx,
Some(lit.span),
"",
@ -343,11 +350,8 @@ pub fn trans_uniq_or_managed_vstore<'a>(
bcx,
alloc_fn,
[ llptrval, llsizeval ],
Some(expr::SaveIn(lldestval.to_ref_llval(bcx)))).bcx;
return DatumBlock {
bcx: bcx,
datum: lldestval
};
Some(expr::SaveIn(lldestval.val))).bcx;
return DatumBlock(bcx, lldestval).to_expr_datumblock();
}
_ => {}
}
@ -364,7 +368,11 @@ pub fn trans_uniq_or_managed_vstore<'a>(
let Result {bcx, val} = alloc_vec(bcx, vt.unit_ty, count, heap);
add_clean_free(bcx, val, heap);
// Create a temporary scope lest execution should fail while
// constructing the vector.
let temp_scope = fcx.push_custom_cleanup_scope();
fcx.schedule_free_value(cleanup::CustomScope(temp_scope), val, heap);
let dataptr = get_dataptr(bcx, get_bodyptr(bcx, val, vt.vec_ty));
debug!("alloc_vec() returned val={}, dataptr={}",
@ -373,9 +381,9 @@ pub fn trans_uniq_or_managed_vstore<'a>(
let bcx = write_content(bcx, &vt, vstore_expr,
content_expr, SaveIn(dataptr));
revoke_clean(bcx, val);
fcx.pop_custom_cleanup_scope(temp_scope);
return immediate_rvalue_bcx(bcx, val, vt.vec_ty);
return immediate_rvalue_bcx(bcx, val, vt.vec_ty).to_expr_datumblock();
}
pub fn write_content<'a>(
@ -386,13 +394,13 @@ pub fn write_content<'a>(
dest: Dest)
-> &'a Block<'a> {
let _icx = push_ctxt("tvec::write_content");
let fcx = bcx.fcx;
let mut bcx = bcx;
debug!("write_content(vt={}, dest={}, vstore_expr={:?})",
vt.to_str(bcx.ccx()),
dest.to_str(bcx.ccx()),
bcx.expr_to_str(vstore_expr));
let _indenter = indenter();
match content_expr.node {
ast::ExprLit(lit) => {
@ -430,19 +438,19 @@ pub fn write_content<'a>(
}
SaveIn(lldest) => {
let mut temp_cleanups = ~[];
let temp_scope = fcx.push_custom_cleanup_scope();
for (i, element) in elements.iter().enumerate() {
let lleltptr = GEPi(bcx, lldest, [i]);
debug!("writing index {:?} with lleltptr={:?}",
i, bcx.val_to_str(lleltptr));
bcx = expr::trans_into(bcx, *element,
SaveIn(lleltptr));
add_clean_temp_mem(bcx, lleltptr, vt.unit_ty);
temp_cleanups.push(lleltptr);
}
for cleanup in temp_cleanups.iter() {
revoke_clean(bcx, *cleanup);
fcx.schedule_drop_mem(
cleanup::CustomScope(temp_scope),
lleltptr,
vt.unit_ty);
}
fcx.pop_custom_cleanup_scope(temp_scope);
}
}
return bcx;
@ -463,14 +471,16 @@ pub fn write_content<'a>(
// this can only happen as a result of OOM. So we just skip out on the
// cleanup since things would *probably* be broken at that point anyways.
let elem = unpack_datum!(bcx, {
expr::trans_to_datum(bcx, element)
let elem = unpack_datum!(bcx, expr::trans(bcx, element));
assert!(!ty::type_moves_by_default(bcx.tcx(), elem.ty));
let bcx = iter_vec_loop(bcx, lldest, vt,
C_uint(bcx.ccx(), count), |set_bcx, lleltptr, _| {
elem.shallow_copy_and_take(set_bcx, lleltptr)
});
iter_vec_loop(bcx, lldest, vt,
C_uint(bcx.ccx(), count), |set_bcx, lleltptr, _| {
elem.copy_to(set_bcx, INIT, lleltptr)
})
elem.add_clean_if_rvalue(bcx, element.id);
bcx
}
}
}
@ -522,15 +532,16 @@ pub fn elements_required(bcx: &Block, content_expr: &ast::Expr) -> uint {
}
}
pub fn get_base_and_byte_len(bcx: &Block, llval: ValueRef, vec_ty: ty::t)
pub fn get_base_and_byte_len(bcx: &Block,
llval: ValueRef,
vec_ty: ty::t)
-> (ValueRef, ValueRef) {
//!
//
// Converts a vector into the slice pair. The vector should be stored in
// `llval` which should be either immediate or by-ref as appropriate for
// the vector type. If you have a datum, you would probably prefer to
// call `Datum::get_base_and_byte_len()` which will handle any conversions for
// you.
/*!
* Converts a vector into the slice pair. The vector should be
* stored in `llval` which should be by ref. If you have a datum,
* you would probably prefer to call
* `Datum::get_base_and_byte_len()`.
*/
let ccx = bcx.ccx();
let vt = vec_types(bcx, vec_ty);
@ -542,32 +553,38 @@ pub fn get_base_and_byte_len(bcx: &Block, llval: ValueRef, vec_ty: ty::t)
match vstore {
ty::vstore_fixed(n) => {
assert!(!type_is_immediate(bcx.ccx(), vt.vec_ty));
let base = GEPi(bcx, llval, [0u, 0u]);
let len = Mul(bcx, C_uint(ccx, n), vt.llunit_size);
(base, len)
}
ty::vstore_slice(_) => {
assert!(!type_is_immediate(bcx.ccx(), vt.vec_ty));
let base = Load(bcx, GEPi(bcx, llval, [0u, abi::slice_elt_base]));
let count = Load(bcx, GEPi(bcx, llval, [0u, abi::slice_elt_len]));
let len = Mul(bcx, count, vt.llunit_size);
(base, len)
}
ty::vstore_uniq | ty::vstore_box => {
assert!(type_is_immediate(bcx.ccx(), vt.vec_ty));
let llval = Load(bcx, llval);
let body = get_bodyptr(bcx, llval, vec_ty);
(get_dataptr(bcx, body), get_fill(bcx, body))
}
}
}
pub fn get_base_and_len(bcx: &Block, llval: ValueRef, vec_ty: ty::t)
pub fn get_base_and_len(bcx: &Block,
llval: ValueRef,
vec_ty: ty::t)
-> (ValueRef, ValueRef) {
//!
//
// Converts a vector into the slice pair. The vector should be stored in
// `llval` which should be either immediate or by-ref as appropriate for
// the vector type. If you have a datum, you would probably prefer to
// call `Datum::get_base_and_len()` which will handle any conversions for
// you.
/*!
* Converts a vector into the slice pair. The vector should be
* stored in `llval` which should be by-reference. If you have a
* datum, you would probably prefer to call
* `Datum::get_base_and_len()` which will handle any conversions
* for you.
*/
let ccx = bcx.ccx();
let vt = vec_types(bcx, vec_ty);
@ -579,15 +596,19 @@ pub fn get_base_and_len(bcx: &Block, llval: ValueRef, vec_ty: ty::t)
match vstore {
ty::vstore_fixed(n) => {
assert!(!type_is_immediate(bcx.ccx(), vt.vec_ty));
let base = GEPi(bcx, llval, [0u, 0u]);
(base, C_uint(ccx, n))
}
ty::vstore_slice(_) => {
assert!(!type_is_immediate(bcx.ccx(), vt.vec_ty));
let base = Load(bcx, GEPi(bcx, llval, [0u, abi::slice_elt_base]));
let count = Load(bcx, GEPi(bcx, llval, [0u, abi::slice_elt_len]));
(base, count)
}
ty::vstore_uniq | ty::vstore_box => {
assert!(type_is_immediate(bcx.ccx(), vt.vec_ty));
let llval = Load(bcx, llval);
let body = get_bodyptr(bcx, llval, vec_ty);
(get_dataptr(bcx, body), UDiv(bcx, get_fill(bcx, body), vt.llunit_size))
}
@ -606,12 +627,13 @@ pub fn iter_vec_loop<'r,
f: iter_vec_block<'r,'b>)
-> &'b Block<'b> {
let _icx = push_ctxt("tvec::iter_vec_loop");
let fcx = bcx.fcx;
let next_bcx = sub_block(bcx, "iter_vec_loop: while next");
let loop_bcx = loop_scope_block(bcx, next_bcx, None, "iter_vec_loop", None);
let cond_bcx = scope_block(loop_bcx, None, "iter_vec_loop: loop cond");
let body_bcx = scope_block(loop_bcx, None, "iter_vec_loop: body: main");
let inc_bcx = scope_block(loop_bcx, None, "iter_vec_loop: loop inc");
let next_bcx = fcx.new_temp_block("expr_repeat: while next");
let loop_bcx = fcx.new_temp_block("expr_repeat");
let cond_bcx = fcx.new_temp_block("expr_repeat: loop cond");
let body_bcx = fcx.new_temp_block("expr_repeat: body: set");
let inc_bcx = fcx.new_temp_block("expr_repeat: body: inc");
Br(bcx, loop_bcx.llbb);
let loop_counter = {
@ -663,6 +685,7 @@ pub fn iter_vec_raw<'r,
f: iter_vec_block<'r,'b>)
-> &'b Block<'b> {
let _icx = push_ctxt("tvec::iter_vec_raw");
let fcx = bcx.fcx;
let vt = vec_types(bcx, vec_ty);
if (vt.llunit_alloc_size == 0) {
@ -676,14 +699,14 @@ pub fn iter_vec_raw<'r,
let data_end_ptr = pointer_add_byte(bcx, data_ptr, fill);
// Now perform the iteration.
let header_bcx = base::sub_block(bcx, "iter_vec_loop_header");
let header_bcx = fcx.new_temp_block("iter_vec_loop_header");
Br(bcx, header_bcx.llbb);
let data_ptr =
Phi(header_bcx, val_ty(data_ptr), [data_ptr], [bcx.llbb]);
let not_yet_at_end =
ICmp(header_bcx, lib::llvm::IntULT, data_ptr, data_end_ptr);
let body_bcx = base::sub_block(header_bcx, "iter_vec_loop_body");
let next_bcx = base::sub_block(header_bcx, "iter_vec_next");
let body_bcx = fcx.new_temp_block("iter_vec_loop_body");
let next_bcx = fcx.new_temp_block("iter_vec_next");
CondBr(header_bcx, not_yet_at_end, body_bcx.llbb, next_bcx.llbb);
let body_bcx = f(body_bcx, data_ptr, vt.unit_ty);
AddIncomingToPhi(data_ptr, InBoundsGEP(body_bcx, data_ptr,
@ -691,7 +714,6 @@ pub fn iter_vec_raw<'r,
body_bcx.llbb);
Br(body_bcx, header_bcx.llbb);
next_bcx
}
}

View file

@ -16,19 +16,17 @@
use middle::borrowck::{RootInfo, root_map_key};
use middle::trans::base::*;
use middle::trans::cleanup;
use middle::trans::common::*;
use middle::trans::datum::*;
use syntax::codemap::Span;
use syntax::ast;
pub fn root_and_write_guard<'a>(
datum: &Datum,
bcx: &'a Block<'a>,
span: Span,
expr_id: ast::NodeId,
derefs: uint)
-> &'a Block<'a> {
pub fn root_and_write_guard<'a, K:KindOps>(datum: &Datum<K>,
bcx: &'a Block<'a>,
span: Span,
expr_id: ast::NodeId,
derefs: uint) -> &'a Block<'a> {
let key = root_map_key { id: expr_id, derefs: derefs };
debug!("write_guard::root_and_write_guard(key={:?})", key);
@ -43,13 +41,11 @@ pub fn root_and_write_guard<'a>(
}
}
fn root<'a>(
datum: &Datum,
bcx: &'a Block<'a>,
_: Span,
root_key: root_map_key,
root_info: RootInfo)
-> &'a Block<'a> {
fn root<'a, K:KindOps>(datum: &Datum<K>,
bcx: &'a Block<'a>,
_span: Span,
root_key: root_map_key,
root_info: RootInfo) -> &'a Block<'a> {
//! In some cases, borrowck will decide that an @T/@[]/@str
//! value must be rooted for the program to be safe. In that
//! case, we will call this function, which will stash a copy
@ -58,17 +54,12 @@ fn root<'a>(
debug!("write_guard::root(root_key={:?}, root_info={:?}, datum={:?})",
root_key, root_info, datum.to_str(bcx.ccx()));
// First, root the datum. Note that we must zero this value,
// Root the datum. Note that we must zero this value,
// because sometimes we root on one path but not another.
// See e.g. #4904.
let scratch = scratch_datum(bcx, datum.ty, "__write_guard", true);
datum.copy_to_datum(bcx, INIT, scratch);
let cleanup_bcx = find_bcx_for_scope(bcx, root_info.scope);
add_clean_temp_mem_in_scope(cleanup_bcx,
root_info.scope,
scratch.val,
scratch.ty);
bcx
lvalue_scratch_datum(
bcx, datum.ty, "__write_guard", true,
cleanup::AstScope(root_info.scope), (),
|(), bcx, llval| datum.shallow_copy_and_take(bcx, llval)).bcx
}

View file

@ -3983,8 +3983,31 @@ pub fn ast_expr_vstore_to_vstore(fcx: @FnCtxt,
ast::ExprVstoreUniq => ty::vstore_uniq,
ast::ExprVstoreBox => ty::vstore_box,
ast::ExprVstoreSlice | ast::ExprVstoreMutSlice => {
let r = fcx.infcx().next_region_var(infer::AddrOfSlice(e.span));
ty::vstore_slice(r)
match e.node {
ast::ExprLit(..) |
ast::ExprVec([], _) => {
// string literals and *empty slices* live in static memory
ty::vstore_slice(ty::ReStatic)
}
ast::ExprRepeat(..) |
ast::ExprVec(..) => {
// vector literals are temporaries on the stack
match fcx.tcx().region_maps.temporary_scope(e.id) {
Some(scope) => {
let r = ty::ReScope(scope);
ty::vstore_slice(r)
}
None => {
// this slice occurs in a static somewhere
ty::vstore_slice(ty::ReStatic)
}
}
}
_ => {
fcx.ccx.tcx.sess.span_bug(
e.span, format!("vstore with unexpected contents"))
}
}
}
}
}
@ -4103,7 +4126,7 @@ pub fn check_intrinsic_type(ccx: @CrateCtxt, it: &ast::ForeignItem) {
"uninit" => (1u, ~[], param(ccx, 0u)),
"forget" => (1u, ~[ param(ccx, 0) ], ty::mk_nil()),
"transmute" => (2, ~[ param(ccx, 0) ], param(ccx, 1)),
"move_val" | "move_val_init" => {
"move_val_init" => {
(1u,
~[
ty::mk_mut_rptr(tcx, ty::ReLateBound(it.id, ty::BrAnon(0)), param(ccx, 0)),

View file

@ -55,21 +55,26 @@ pub struct Rcx {
repeating_scope: ast::NodeId,
}
fn encl_region_of_def(fcx: @FnCtxt, def: ast::Def) -> ty::Region {
fn region_of_def(fcx: @FnCtxt, def: ast::Def) -> ty::Region {
/*!
* Returns the validity region of `def` -- that is, how long
* is `def` valid?
*/
let tcx = fcx.tcx();
match def {
DefLocal(node_id, _) | DefArg(node_id, _) |
DefSelf(node_id, _) | DefBinding(node_id, _) => {
tcx.region_maps.encl_region(node_id)
tcx.region_maps.var_region(node_id)
}
DefUpvar(_, subdef, closure_id, body_id) => {
match ty::ty_closure_sigil(fcx.node_ty(closure_id)) {
BorrowedSigil => encl_region_of_def(fcx, *subdef),
BorrowedSigil => region_of_def(fcx, *subdef),
ManagedSigil | OwnedSigil => ReScope(body_id)
}
}
_ => {
tcx.sess.bug(format!("unexpected def in encl_region_of_def: {:?}",
tcx.sess.bug(format!("unexpected def in region_of_def: {:?}",
def))
}
}
@ -193,7 +198,6 @@ fn visit_item(_rcx: &mut Rcx, _item: &ast::Item) {
}
fn visit_block(rcx: &mut Rcx, b: &ast::Block) {
rcx.fcx.tcx().region_maps.record_cleanup_scope(b.id);
visit::walk_block(rcx, b, ());
}
@ -239,9 +243,9 @@ fn constrain_bindings_in_pat(pat: &ast::Pat, rcx: &mut Rcx) {
// that the lifetime of any regions that appear in a
// variable's type enclose at least the variable's scope.
let encl_region = tcx.region_maps.encl_region(id);
let var_region = tcx.region_maps.var_region(id);
constrain_regions_in_type_of_node(
rcx, id, encl_region,
rcx, id, var_region,
infer::BindingTypeIsNotValidAtDecl(span));
})
}
@ -255,55 +259,6 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) {
method_map.get().contains_key(&expr.id)
};
// Record cleanup scopes, which are used by borrowck to decide the
// maximum lifetime of a temporary rvalue. These were derived by
// examining where trans creates block scopes, not because this
// reflects some principled decision around temporary lifetimes.
// Ordinarily this would seem like something that should be setup
// in region, but we need to know which uses of operators are
// overloaded. See #3511.
let tcx = rcx.fcx.tcx();
match expr.node {
// You'd think that x += y where `+=` is overloaded would be a
// cleanup scope. You'd be... kind of right. In fact the
// handling of `+=` and friends in trans for overloaded
// operators is a hopeless mess and I can't figure out how to
// represent it. - ndm
//
// ast::expr_assign_op(..) |
ast::ExprIndex(..) |
ast::ExprBinary(..) |
ast::ExprUnary(..) if has_method_map => {
tcx.region_maps.record_cleanup_scope(expr.id);
}
ast::ExprBinary(_, ast::BiAnd, lhs, rhs) |
ast::ExprBinary(_, ast::BiOr, lhs, rhs) => {
tcx.region_maps.record_cleanup_scope(lhs.id);
tcx.region_maps.record_cleanup_scope(rhs.id);
}
ast::ExprCall(..) |
ast::ExprMethodCall(..) => {
tcx.region_maps.record_cleanup_scope(expr.id);
}
ast::ExprMatch(_, ref arms) => {
tcx.region_maps.record_cleanup_scope(expr.id);
for arm in arms.iter() {
for guard in arm.guard.iter() {
tcx.region_maps.record_cleanup_scope(guard.id);
}
}
}
ast::ExprLoop(ref body, _) => {
tcx.region_maps.record_cleanup_scope(body.id);
}
ast::ExprWhile(cond, ref body) => {
tcx.region_maps.record_cleanup_scope(cond.id);
tcx.region_maps.record_cleanup_scope(body.id);
}
_ => {}
}
// Check any autoderefs or autorefs that appear.
{
let adjustments = rcx.fcx.inh.adjustments.borrow();
@ -701,10 +656,10 @@ fn constrain_free_variables(rcx: &mut Rcx,
for freevar in get_freevars(tcx, expr.id).iter() {
debug!("freevar def is {:?}", freevar.def);
let def = freevar.def;
let en_region = encl_region_of_def(rcx.fcx, def);
debug!("en_region = {}", en_region.repr(tcx));
let def_region = region_of_def(rcx.fcx, def);
debug!("def_region = {}", def_region.repr(tcx));
rcx.fcx.mk_subr(true, infer::FreeVariable(freevar.span),
region, en_region);
region, def_region);
}
}

View file

@ -673,6 +673,14 @@ impl Repr for ast::Item {
}
}
impl Repr for ast::Stmt {
fn repr(&self, tcx: ctxt) -> ~str {
format!("stmt({}: {})",
ast_util::stmt_id(self),
pprust::stmt_to_str(self, tcx.sess.intr()))
}
}
impl Repr for ast::Pat {
fn repr(&self, tcx: ctxt) -> ~str {
format!("pat({}: {})",

View file

@ -276,7 +276,8 @@ pub fn run(mut crate: clean::Crate, dst: Path) {
write!(w, "var allPaths = \\{");
for (i, (&id, &(ref fqp, short))) in cache.paths.iter().enumerate() {
if i > 0 { write!(w, ","); }
write!(w, "'{}':\\{type:'{}',name:'{}'\\}", id, short, *fqp.last());
write!(w, "'{}':\\{type:'{}',name:'{}'\\}",
id, short, *fqp.last());
}
write!(w, "\\};");
w.flush();

View file

@ -521,7 +521,8 @@ mod tests {
#[test]
fn test_ascii_vec() {
assert_eq!((&[40u8, 32u8, 59u8]).to_ascii(), v2ascii!([40, 32, 59]));
let test = &[40u8, 32u8, 59u8];
assert_eq!(test.to_ascii(), v2ascii!([40, 32, 59]));
assert_eq!("( ;".to_ascii(), v2ascii!([40, 32, 59]));
// FIXME: #5475 borrowchk error, owned vectors do not live long enough
// if chained-from directly
@ -587,14 +588,18 @@ mod tests {
assert_eq!("zoä华".to_ascii_opt(), None);
assert_eq!((&[127u8, 128u8, 255u8]).to_ascii_opt(), None);
let test1 = &[127u8, 128u8, 255u8];
assert_eq!((test1).to_ascii_opt(), None);
let v = [40u8, 32u8, 59u8];
assert_eq!(v.to_ascii_opt(), Some(v2ascii!(&[40, 32, 59])));
let v2 = v2ascii!(&[40, 32, 59]);
assert_eq!(v.to_ascii_opt(), Some(v2));
let v = [127u8, 128u8, 255u8];
assert_eq!(v.to_ascii_opt(), None);
assert_eq!("( ;".to_ascii_opt(), Some(v2ascii!(&[40, 32, 59])));
let v = "( ;";
let v2 = v2ascii!(&[40, 32, 59]);
assert_eq!(v.to_ascii_opt(), Some(v2));
assert_eq!("zoä华".to_ascii_opt(), None);
assert_eq!((~[40u8, 32u8, 59u8]).into_ascii_opt(), Some(v2ascii!(~[40, 32, 59])));

View file

@ -406,7 +406,8 @@ mod test {
#[test]
fn test_read_char() {
let mut r = BufReader::new(bytes!("Việt"));
let b = bytes!("Việt");
let mut r = BufReader::new(b);
assert_eq!(r.read_char(), Some('V'));
assert_eq!(r.read_char(), Some('i'));
assert_eq!(r.read_char(), Some('ệ'));
@ -416,7 +417,8 @@ mod test {
#[test]
fn test_read_bad_char() {
let mut r = BufReader::new(bytes!(0x80));
let b = bytes!(0x80);
let mut r = BufReader::new(b);
assert_eq!(r.read_char(), None);
}

View file

@ -181,7 +181,7 @@ mod tests {
let io = ~[];
let args = ProcessConfig {
program: "/bin/sh",
args: [~"-c", ~"true"],
args: &[~"-c", ~"true"],
env: None,
cwd: None,
io: io,
@ -198,7 +198,7 @@ mod tests {
let io = ~[];
let args = ProcessConfig {
program: "if-this-is-a-binary-then-the-world-has-ended",
args: [],
args: &[],
env: None,
cwd: None,
io: io,
@ -215,7 +215,7 @@ mod tests {
let io = ~[];
let args = ProcessConfig {
program: "/bin/sh",
args: [~"-c", ~"exit 1"],
args: &[~"-c", ~"exit 1"],
env: None,
cwd: None,
io: io,
@ -231,7 +231,7 @@ mod tests {
let io = ~[];
let args = ProcessConfig {
program: "/bin/sh",
args: [~"-c", ~"kill -1 $$"],
args: &[~"-c", ~"kill -1 $$"],
env: None,
cwd: None,
io: io,
@ -274,7 +274,7 @@ mod tests {
let io = ~[Ignored, CreatePipe(false, true)];
let args = ProcessConfig {
program: "/bin/sh",
args: [~"-c", ~"echo foobar"],
args: &[~"-c", ~"echo foobar"],
env: None,
cwd: None,
io: io,
@ -289,7 +289,7 @@ mod tests {
let cwd = Some("/");
let args = ProcessConfig {
program: "/bin/sh",
args: [~"-c", ~"pwd"],
args: &[~"-c", ~"pwd"],
env: None,
cwd: cwd,
io: io,
@ -304,7 +304,7 @@ mod tests {
CreatePipe(false, true)];
let args = ProcessConfig {
program: "/bin/sh",
args: [~"-c", ~"read line; echo $line"],
args: &[~"-c", ~"read line; echo $line"],
env: None,
cwd: None,
io: io,

View file

@ -48,6 +48,7 @@ use kinds::Send;
use str::OwnedStr;
use to_str::ToStr;
use util;
use vec;
/// The option type
#[deriving(Clone, DeepClone, Eq, Ord, TotalEq, TotalOrd, ToStr)]
@ -98,6 +99,24 @@ impl<T> Option<T> {
match *self { Some(ref mut x) => Some(x), None => None }
}
/// Convert from `Option<T>` to `&[T]` (without copying)
#[inline]
pub fn as_slice<'r>(&'r self) -> &'r [T] {
match *self {
Some(ref x) => vec::ref_slice(x),
None => &[]
}
}
/// Convert from `Option<T>` to `&[T]` (without copying)
#[inline]
pub fn as_mut_slice<'r>(&'r mut self) -> &'r mut [T] {
match *self {
Some(ref mut x) => vec::mut_ref_slice(x),
None => &mut []
}
}
/////////////////////////////////////////////////////////////////////////
// Getting to contained values
/////////////////////////////////////////////////////////////////////////

View file

@ -237,7 +237,10 @@ impl GenericPath for Path {
let mut ita = self.components();
let mut itb = other.components();
if bytes!(".") == self.repr {
return itb.next() != Some(bytes!(".."));
return match itb.next() {
None => true,
Some(b) => b != bytes!("..")
};
}
loop {
match (ita.next(), itb.next()) {
@ -463,7 +466,10 @@ mod tests {
macro_rules! b(
($($arg:expr),+) => (
bytes!($($arg),+)
{
static the_bytes: &'static [u8] = bytes!($($arg),+);
the_bytes
}
)
)
@ -689,7 +695,8 @@ mod tests {
);
(v: $path:expr, $op:ident, $exp:expr) => (
{
let path = Path::new($path);
let arg = $path;
let path = Path::new(arg);
assert_eq!(path.$op(), $exp);
}
);

View file

@ -1074,7 +1074,10 @@ mod tests {
macro_rules! b(
($($arg:expr),+) => (
bytes!($($arg),+)
{
static the_bytes: &'static [u8] = bytes!($($arg),+);
the_bytes
}
)
)
@ -1372,20 +1375,23 @@ mod tests {
macro_rules! t(
(s: $path:expr, $op:ident, $exp:expr) => (
{
let path = Path::new($path);
let path = $path;
let path = Path::new(path);
assert_eq!(path.$op(), Some($exp));
}
);
(s: $path:expr, $op:ident, $exp:expr, opt) => (
{
let path = Path::new($path);
let path = $path;
let path = Path::new(path);
let left = path.$op();
assert_eq!(left, $exp);
}
);
(v: $path:expr, $op:ident, $exp:expr) => (
{
let path = Path::new($path);
let path = $path;
let path = Path::new(path);
assert_eq!(path.$op(), $exp);
}
)

View file

@ -130,14 +130,14 @@ mod tests {
let child_crate = CrateMap {
version: 2,
entries: entries,
children: [],
children: &[],
event_loop_factory: None,
};
let root_crate = CrateMap {
version: 2,
entries: [],
children: [&child_crate, &child_crate],
entries: &[],
children: &[&child_crate, &child_crate],
event_loop_factory: None,
};
@ -157,29 +157,29 @@ mod tests {
let mut level3: u32 = 3;
let child_crate2 = CrateMap {
version: 2,
entries: [
entries: &[
ModEntry { name: "c::m1", log_level: &mut level2},
ModEntry { name: "c::m2", log_level: &mut level3},
],
children: [],
children: &[],
event_loop_factory: None,
};
let child_crate1 = CrateMap {
version: 2,
entries: [
entries: &[
ModEntry { name: "t::f1", log_level: &mut 1},
],
children: [&child_crate2],
children: &[&child_crate2],
event_loop_factory: None,
};
let root_crate = CrateMap {
version: 2,
entries: [
entries: &[
ModEntry { name: "t::f2", log_level: &mut 0},
],
children: [&child_crate1],
children: &[&child_crate1],
event_loop_factory: None,
};

View file

@ -329,12 +329,6 @@ extern "rust-intrinsic" {
/// elements.
pub fn size_of<T>() -> uint;
/// Move a value to a memory location containing a value.
///
/// Drop glue is run on the destination, which must contain a
/// valid Rust value.
pub fn move_val<T>(dst: &mut T, src: T);
/// Move a value to an uninitialized memory location.
///
/// Drop glue is not run on the destination.

View file

@ -134,7 +134,7 @@ mod tests {
}
}
/// Completely miscellaneous language-construct benchmarks.
/// Completely miscellaneous language-constracuct benchmarks.
#[cfg(test)]
mod bench {

View file

@ -168,7 +168,7 @@ pub fn from_elem<T:Clone>(n_elts: uint, t: T) -> ~[T] {
let mut v = with_capacity(n_elts);
let p = v.as_mut_ptr();
let mut i = 0u;
(|| {
(|| { // FIXME what if we fail in the middle of this loop?
while i < n_elts {
intrinsics::move_val_init(&mut(*ptr::mut_offset(p, i as int)), t.clone());
i += 1u;
@ -239,6 +239,25 @@ pub fn build<A>(size: Option<uint>, builder: |push: |v: A||) -> ~[A] {
vec
}
/**
* Converts a pointer to A into a slice of length 1 (without copying).
*/
pub fn ref_slice<'a, A>(s: &'a A) -> &'a [A] {
unsafe {
cast::transmute(Slice { data: s, len: 1 })
}
}
/**
* Converts a pointer to A into a slice of length 1 (without copying).
*/
pub fn mut_ref_slice<'a, A>(s: &'a mut A) -> &'a mut [A] {
unsafe {
let ptr: *A = cast::transmute(s);
cast::transmute(Slice { data: ptr, len: 1 })
}
}
/// An iterator over the slices of a vector separated by elements that
/// match a predicate function.
pub struct SplitIterator<'a, T> {
@ -2175,6 +2194,9 @@ pub trait MutableVector<'a, T> {
/// Returns an iterator that allows modifying each value
fn mut_iter(self) -> VecMutIterator<'a, T>;
/// Returns a mutable pointer to the last item in the vector.
fn mut_last(self) -> &'a mut T;
/// Returns a reversed iterator that allows modifying each value
fn mut_rev_iter(self) -> MutRevIterator<'a, T>;
@ -2437,6 +2459,13 @@ impl<'a,T> MutableVector<'a, T> for &'a mut [T] {
}
}
#[inline]
fn mut_last(self) -> &'a mut T {
let len = self.len();
if len == 0 { fail!("mut_last: empty vector") }
&mut self[len - 1]
}
#[inline]
fn mut_rev_iter(self) -> MutRevIterator<'a, T> {
self.mut_iter().invert()

View file

@ -567,11 +567,7 @@ pub fn visit_ids_for_inlined_item<O: IdVisitingOperation>(item: &InlinedItem,
visited_outermost: false,
};
match *item {
IIItem(i) => id_visitor.visit_item(i, ()),
IIForeign(i) => id_visitor.visit_foreign_item(i, ()),
IIMethod(_, _, m) => visit::walk_method_helper(&mut id_visitor, m, ()),
}
visit::walk_inlined_item(&mut id_visitor, item, ());
}
struct IdRangeComputingVisitor {

View file

@ -634,17 +634,24 @@ impl<'a> Context<'a> {
self.ecx.expr_ident(e.span, lname)));
}
// Now create a vector containing all the arguments
let slicename = self.ecx.ident_of("__args_vec");
{
let args = names.move_iter().map(|a| a.unwrap());
let mut args = locals.move_iter().chain(args);
let args = self.ecx.expr_vec_slice(self.fmtsp, args.collect());
lets.push(self.ecx.stmt_let(self.fmtsp, false, slicename, args));
}
// Now create the fmt::Arguments struct with all our locals we created.
let args = names.move_iter().map(|a| a.unwrap());
let mut args = locals.move_iter().chain(args);
let fmt = self.ecx.expr_ident(self.fmtsp, static_name);
let args = self.ecx.expr_vec_slice(self.fmtsp, args.collect());
let args_slice = self.ecx.expr_ident(self.fmtsp, slicename);
let result = self.ecx.expr_call_global(self.fmtsp, ~[
self.ecx.ident_of("std"),
self.ecx.ident_of("fmt"),
self.ecx.ident_of("Arguments"),
self.ecx.ident_of("new"),
], ~[fmt, args]);
], ~[fmt, args_slice]);
// We did all the work of making sure that the arguments
// structure is safe, so we can safely have an unsafe block.

View file

@ -42,12 +42,31 @@ impl<T> OptVec<T> {
v.push(t);
return;
}
Empty => {}
Empty => {
*self = Vec(~[t]);
}
}
}
// FIXME(#5074): flow insensitive means we can't move
// assignment inside `match`
*self = Vec(~[t]);
pub fn pop(&mut self) -> T {
match *self {
Vec(ref mut v) => v.pop(),
Empty => fail!("pop from empty opt_vec")
}
}
pub fn last<'a>(&'a self) -> &'a T {
match *self {
Vec(ref v) => v.last(),
Empty => fail!("last on empty opt_vec")
}
}
pub fn mut_last<'a>(&'a mut self) -> &'a mut T {
match *self {
Vec(ref mut v) => v.mut_last(),
Empty => fail!("mut_last on empty opt_vec")
}
}
pub fn map<U>(&self, op: |&T| -> U) -> OptVec<U> {
@ -82,6 +101,16 @@ impl<T> OptVec<T> {
}
}
pub fn swap_remove(&mut self, index: uint) {
match *self {
Empty => { fail!("Index out of bounds"); }
Vec(ref mut v) => {
assert!(index < v.len());
v.swap_remove(index);
}
}
}
#[inline]
pub fn iter<'r>(&'r self) -> OptVecIterator<'r, T> {
match *self {
@ -166,6 +195,16 @@ impl<'a, T> Iterator<&'a T> for OptVecIterator<'a, T> {
}
}
impl<'a, T> DoubleEndedIterator<&'a T> for OptVecIterator<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a T> {
match self.iter {
Some(ref mut x) => x.next_back(),
None => None
}
}
}
impl<A> FromIterator<A> for OptVec<A> {
fn from_iterator<T: Iterator<A>>(iterator: &mut T) -> OptVec<A> {
let mut r = Empty;

View file

@ -121,6 +121,17 @@ pub trait Visitor<E: Clone> {
}
}
pub fn walk_inlined_item<E: Clone, V: Visitor<E>>(visitor: &mut V,
item: &ast::InlinedItem,
env: E) {
match *item {
IIItem(i) => visitor.visit_item(i, env),
IIForeign(i) => visitor.visit_foreign_item(i, env),
IIMethod(_, _, m) => walk_method_helper(visitor, m, env),
}
}
pub fn walk_crate<E: Clone, V: Visitor<E>>(visitor: &mut V, crate: &Crate, env: E) {
visitor.visit_mod(&crate.module, crate.span, CRATE_NODE_ID, env)
}

View file

@ -522,6 +522,17 @@ extern "C" char *LLVMTypeToString(LLVMTypeRef Type) {
return strdup(os.str().data());
}
extern "C" char *LLVMValueToString(LLVMValueRef Value) {
std::string s;
llvm::raw_string_ostream os(s);
os << "(";
unwrap<llvm::Value>(Value)->getType()->print(os);
os << ":";
unwrap<llvm::Value>(Value)->print(os);
os << ")";
return strdup(os.str().data());
}
extern "C" bool
LLVMRustLinkInExternalBitcode(LLVMModuleRef dst, char *bc, size_t len) {
Module *Dst = unwrap(dst);

View file

@ -0,0 +1,31 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that assignments to an `&mut` pointer which is found in a
// borrowed (but otherwise non-aliasable) location is illegal.
struct S<'a> {
pointer: &'a mut int
}
fn copy_borrowed_ptr<'a,'b>(p: &'a mut S<'b>) -> S<'b> {
S { pointer: &mut *p.pointer } //~ ERROR lifetime of `p` is too short to guarantee its contents can be safely reborrowed
}
fn main() {
let mut x = 1;
{
let mut y = S { pointer: &mut x };
let z = copy_borrowed_ptr(&mut y);
*y.pointer += 1;
*z.pointer += 1;
}
}

View file

@ -1,38 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Tests that rvalue lifetimes is limited to the enclosing trans
// cleanup scope. It is unclear that this is the correct lifetime for
// rvalues, but that's what it is right now.
struct Counter {
value: uint
}
impl Counter {
fn new(v: uint) -> Counter {
Counter {value: v}
}
fn inc<'a>(&'a mut self) -> &'a mut Counter {
self.value += 1;
self
}
fn get(&self) -> uint {
self.value
}
}
pub fn main() {
let v = Counter::new(22).inc().inc().get();
//~^ ERROR borrowed value does not live long enough
assert_eq!(v, 24);;
}

View file

@ -0,0 +1,45 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that the borrow checker prevents pointers to temporaries
// with statement lifetimes from escaping.
#[feature(macro_rules)];
use std::ops::Drop;
static mut FLAGS: u64 = 0;
struct Box<T> { f: T }
struct AddFlags { bits: u64 }
fn AddFlags(bits: u64) -> AddFlags {
AddFlags { bits: bits }
}
fn arg<'a>(x: &'a AddFlags) -> &'a AddFlags {
x
}
impl AddFlags {
fn get<'a>(&'a self) -> &'a AddFlags {
self
}
}
pub fn main() {
let _x = arg(&AddFlags(1)); //~ ERROR value does not live long enough
let _x = AddFlags(1).get(); //~ ERROR value does not live long enough
let _x = &*arg(&AddFlags(1)); //~ ERROR value does not live long enough
let ref _x = *arg(&AddFlags(1)); //~ ERROR value does not live long enough
let &ref _x = arg(&AddFlags(1)); //~ ERROR value does not live long enough
let _x = AddFlags(1).get(); //~ ERROR value does not live long enough
let Box { f: _x } = Box { f: AddFlags(1).get() }; //~ ERROR value does not live long enough
}

View file

@ -17,6 +17,15 @@ impl Counter {
Counter {value: v}
}
fn inc<'a>(&'a mut self) -> &'a mut Counter {
self.value += 1;
self
}
fn get(&self) -> uint {
self.value
}
fn get_and_inc(&mut self) -> uint {
let v = self.value;
self.value += 1;
@ -27,4 +36,7 @@ impl Counter {
pub fn main() {
let v = Counter::new(22).get_and_inc();
assert_eq!(v, 22);
let v = Counter::new(22).inc().inc().get();
assert_eq!(v, 24);;
}

View file

@ -0,0 +1,43 @@
// copyright 2013 the rust project developers. see the copyright
// file at the top-level directory of this distribution and at
// http://rust-lang.org/copyright.
//
// licensed under the apache license, version 2.0 <license-apache or
// http://www.apache.org/licenses/license-2.0> or the mit license
// <license-mit or http://opensource.org/licenses/mit>, at your
// option. this file may not be copied, modified, or distributed
// except according to those terms.
// Test that cleanup scope for temporaries created in a match
// arm is confined to the match arm itself.
use std::{os, run};
use std::io::process;
struct Test { x: int }
impl Test {
fn get_x(&self) -> Option<~int> {
Some(~self.x)
}
}
fn do_something(t: &Test) -> int {
// The cleanup scope for the result of `t.get_x()` should be the
// arm itself and not the match, otherwise we'll (potentially) get
// a crash trying to free an uninitialized stack slot.
match t {
&Test { x: 2 } if t.get_x().is_some() => {
t.x * 2
}
_ => { 22 }
}
}
pub fn main() {
let t = Test { x: 1 };
do_something(&t);
}

View file

@ -0,0 +1,138 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that destructors for rvalue temporaries run either at end of
// statement or end of block, as appropriate given the temporary
// lifetime rules.
#[feature(macro_rules)];
use std::ops::Drop;
static mut FLAGS: u64 = 0;
struct Box<T> { f: T }
struct AddFlags { bits: u64 }
fn AddFlags(bits: u64) -> AddFlags {
AddFlags { bits: bits }
}
fn arg(exp: u64, _x: &AddFlags) {
check_flags(exp);
}
fn pass<T>(v: T) -> T {
v
}
fn check_flags(exp: u64) {
unsafe {
let x = FLAGS;
FLAGS = 0;
println!("flags {}, expected {}", x, exp);
assert_eq!(x, exp);
}
}
impl AddFlags {
fn check_flags<'a>(&'a self, exp: u64) -> &'a AddFlags {
check_flags(exp);
self
}
fn bits(&self) -> u64 {
self.bits
}
}
impl Drop for AddFlags {
fn drop(&mut self) {
unsafe {
FLAGS = FLAGS + self.bits;
}
}
}
macro_rules! end_of_block(
($pat:pat, $expr:expr) => (
{
println!("end_of_block({})", stringify!({let $pat = $expr;}));
{
// Destructor here does not run until exit from the block,
// because value is assigned to.
let $pat = $expr;
check_flags(0);
}
check_flags(1);
}
)
)
macro_rules! end_of_stmt(
($pat:pat, $expr:expr) => (
{
println!("end_of_stmt({})", stringify!($expr));
{
// Destructor here does not run until exit from the block,
// because value is assigned to.
let $pat = $expr;
check_flags(1);
}
check_flags(0);
}
)
)
pub fn main() {
// In all these cases, we trip over the rules designed to cover
// the case where we are taking addr of rvalue and storing that
// addr into a stack slot, either via `let ref` or via a `&` in
// the initializer.
end_of_block!(_x, AddFlags(1));
end_of_block!(_x, &AddFlags(1));
end_of_block!(_x, & &AddFlags(1));
end_of_block!(_x, Box { f: AddFlags(1) });
end_of_block!(_x, Box { f: &AddFlags(1) });
end_of_block!(_x, Box { f: &AddFlags(1) });
end_of_block!(_x, pass(AddFlags(1)));
end_of_block!(ref _x, AddFlags(1));
end_of_block!(AddFlags { bits: ref _x }, AddFlags(1));
end_of_block!(&AddFlags { bits }, &AddFlags(1));
end_of_block!((_, ref _y), (AddFlags(1), 22));
end_of_block!(~ref _x, ~AddFlags(1));
end_of_block!(~_x, ~AddFlags(1));
end_of_block!(_, { { check_flags(0); &AddFlags(1) } });
end_of_block!(_, &((Box { f: AddFlags(1) }).f));
end_of_block!(_, &(([AddFlags(1)])[0]));
end_of_block!(_, &((&~[AddFlags(1)])[0]));
// LHS does not create a ref binding, so temporary lives as long
// as statement, and we do not move the AddFlags out:
end_of_stmt!(_, AddFlags(1));
end_of_stmt!((_, _), (AddFlags(1), 22));
// `&` operator appears inside an arg to a function,
// so it is not prolonged:
end_of_stmt!(ref _x, arg(0, &AddFlags(1)));
// autoref occurs inside receiver, so temp lifetime is not
// prolonged:
end_of_stmt!(ref _x, AddFlags(1).check_flags(0).bits());
// No reference is created on LHS, thus RHS is moved into
// a temporary that lives just as long as the statement.
end_of_stmt!(AddFlags { bits }, AddFlags(1));
}

View file

@ -0,0 +1,39 @@
// Test cleanup of rvalue temporary that occurs while `~` construction
// is in progress. This scenario revealed a rather terrible bug. The
// ingredients are:
//
// 1. Partial cleanup of `~` is in scope,
// 2. cleanup of return value from `get_bar()` is in scope,
// 3. do_it() fails.
//
// This led to a bug because `the top-most frame that was to be
// cleaned (which happens to be the partial cleanup of `~`) required
// multiple basic blocks, which led to us dropping part of the cleanup
// from the top-most frame.
//
// It's unclear how likely such a bug is to recur, but it seems like a
// scenario worth testing.
use std::task;
enum Conzabble {
Bickwick(Foo)
}
struct Foo { field: ~uint }
fn do_it(x: &[uint]) -> Foo {
fail!()
}
fn get_bar(x: uint) -> ~[uint] { ~[x * 2] }
pub fn fails() {
let x = 2;
let mut y = ~[];
y.push(~Bickwick(do_it(get_bar(x))));
}
pub fn main() {
task::try(fails);
}

View file

@ -0,0 +1,30 @@
// copyright 2013 the rust project developers. see the copyright
// file at the top-level directory of this distribution and at
// http://rust-lang.org/copyright.
//
// licensed under the apache license, version 2.0 <license-apache or
// http://www.apache.org/licenses/license-2.0> or the mit license
// <license-mit or http://opensource.org/licenses/mit>, at your
// option. this file may not be copied, modified, or distributed
// except according to those terms.
// Test that cleanups for the RHS of shorcircuiting operators work.
use std::{os, run};
use std::io::process;
pub fn main() {
let args = os::args();
// Here, the rvalue `~"signal"` requires cleanup. Older versions
// of the code had a problem that the cleanup scope for this
// expression was the end of the `if`, and as the `~"signal"`
// expression was never evaluated, we wound up trying to clean
// uninitialized memory.
if args.len() >= 2 && args[1] == ~"signal" {
// Raise a segfault.
unsafe { *(0 as *mut int) = 0; }
}
}

View file

@ -8,20 +8,22 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[feature(managed_boxes)];
use std::cast::transmute;
mod rusti {
extern "rust-intrinsic" {
pub fn init<T>() -> T;
pub fn move_val_init<T>(dst: &mut T, src: T);
pub fn move_val<T>(dst: &mut T, src: T);
}
}
pub fn main() {
unsafe {
let x = @1;
let mut y = @2;
rusti::move_val(&mut y, x);
let x = ~1;
let mut y = rusti::init();
let mut z: *uint = transmute(&x);
rusti::move_val_init(&mut y, x);
assert_eq!(*y, 1);
assert_eq!(*z, 0); // `x` is nulled out, not directly visible
}
}

View file

@ -30,10 +30,10 @@ pub fn main () {
let config = process::ProcessConfig {
program : args[0].as_slice(),
args : [~"child"],
args : &[~"child"],
env : None,
cwd : None,
io : []
io : &[]
};
let mut p = process::Process::new(config).unwrap();

View file

@ -28,10 +28,10 @@ struct Thing2<'a> {
pub fn main() {
let _t1_fixed = Thing1 {
baz: [],
baz: &[],
bar: ~32,
};
let _t1_uniq = Thing1 {
Thing1 {
baz: ~[],
bar: ~32,
};
@ -40,10 +40,10 @@ pub fn main() {
bar: ~32,
};
let _t2_fixed = Thing2 {
baz: [],
baz: &[],
bar: 32,
};
let _t2_uniq = Thing2 {
Thing2 {
baz: ~[],
bar: 32,
};

View file

@ -15,6 +15,7 @@ struct Triple { x: int, y: int, z: int }
fn test(x: bool, foo: @Triple) -> int {
let bar = foo;
let mut y: @Triple;
y = bar;
if x { y = bar; } else { y = @Triple{x: 4, y: 5, z: 6}; }
return y.y;
}