1
Fork 0

librustc: Remove non-procedure uses of do from librustc, librustdoc,

and librustpkg.
This commit is contained in:
Patrick Walton 2013-11-21 15:42:55 -08:00
parent efc512362b
commit 8ceb374ab7
78 changed files with 1311 additions and 1347 deletions

View file

@ -72,13 +72,13 @@ pub fn WriteOutputFile(
Output: &Path,
FileType: lib::llvm::FileType) {
unsafe {
do Output.with_c_str |Output| {
Output.with_c_str(|Output| {
let result = llvm::LLVMRustWriteOutputFile(
Target, PM, M, Output, FileType);
if !result {
llvm_err(sess, ~"Could not write output");
}
}
})
}
}
@ -130,12 +130,12 @@ pub mod jit {
for cratepath in r.iter() {
debug!("linking: {}", cratepath.display());
do cratepath.with_c_str |buf_t| {
cratepath.with_c_str(|buf_t| {
if !llvm::LLVMRustLoadCrate(manager, buf_t) {
llvm_err(sess, ~"Could not link");
}
debug!("linked: {}", cratepath.display());
}
})
}
// We custom-build a JIT execution engine via some rust wrappers
@ -149,9 +149,9 @@ pub mod jit {
// Next, we need to get a handle on the _rust_main function by
// looking up it's corresponding ValueRef and then requesting that
// the execution engine compiles the function.
let fun = do "_rust_main".with_c_str |entry| {
let fun = "_rust_main".with_c_str(|entry| {
llvm::LLVMGetNamedFunction(m, entry)
};
});
if fun.is_null() {
llvm::LLVMDisposeExecutionEngine(ee);
llvm::LLVMContextDispose(c);
@ -248,9 +248,9 @@ pub mod write {
llvm::LLVMInitializeMipsAsmParser();
if sess.opts.save_temps {
do output.with_extension("no-opt.bc").with_c_str |buf| {
output.with_extension("no-opt.bc").with_c_str(|buf| {
llvm::LLVMWriteBitcodeToFile(llmod, buf);
}
})
}
configure_llvm(sess);
@ -263,9 +263,9 @@ pub mod write {
};
let use_softfp = sess.opts.debugging_opts & session::use_softfp != 0;
let tm = do sess.targ_cfg.target_strs.target_triple.with_c_str |T| {
do sess.opts.target_cpu.with_c_str |CPU| {
do sess.opts.target_feature.with_c_str |Features| {
let tm = sess.targ_cfg.target_strs.target_triple.with_c_str(|T| {
sess.opts.target_cpu.with_c_str(|CPU| {
sess.opts.target_feature.with_c_str(|Features| {
llvm::LLVMRustCreateTargetMachine(
T, CPU, Features,
lib::llvm::CodeModelDefault,
@ -274,9 +274,9 @@ pub mod write {
true,
use_softfp
)
}
}
};
})
})
});
// Create the two optimizing pass managers. These mirror what clang
// does, and are by populated by LLVM's default PassManagerBuilder.
@ -288,7 +288,7 @@ pub mod write {
// If we're verifying or linting, add them to the function pass
// manager.
let addpass = |pass: &str| {
do pass.with_c_str |s| { llvm::LLVMRustAddPass(fpm, s) }
pass.with_c_str(|s| llvm::LLVMRustAddPass(fpm, s))
};
if !sess.no_verify() { assert!(addpass("verify")); }
if sess.lint_llvm() { assert!(addpass("lint")); }
@ -300,11 +300,11 @@ pub mod write {
}
for pass in sess.opts.custom_passes.iter() {
do pass.with_c_str |s| {
pass.with_c_str(|s| {
if !llvm::LLVMRustAddPass(mpm, s) {
sess.warn(format!("Unknown pass {}, ignoring", *pass));
}
}
})
}
// Finally, run the actual optimization passes
@ -316,9 +316,9 @@ pub mod write {
llvm::LLVMDisposePassManager(mpm);
if sess.opts.save_temps {
do output.with_extension("bc").with_c_str |buf| {
output.with_extension("bc").with_c_str(|buf| {
llvm::LLVMWriteBitcodeToFile(llmod, buf);
}
})
}
if sess.opts.jit {
@ -337,14 +337,14 @@ pub mod write {
match output_type {
output_type_none => {}
output_type_bitcode => {
do output.with_c_str |buf| {
output.with_c_str(|buf| {
llvm::LLVMWriteBitcodeToFile(llmod, buf);
}
})
}
output_type_llvm_assembly => {
do output.with_c_str |output| {
output.with_c_str(|output| {
llvm::LLVMRustPrintModule(cpm, llmod, output)
}
})
}
output_type_assembly => {
WriteOutputFile(sess, tm, cpm, llmod, output, lib::llvm::AssemblyFile);
@ -415,9 +415,9 @@ pub mod write {
add(*arg);
}
do llvm_args.as_imm_buf |p, len| {
llvm_args.as_imm_buf(|p, len| {
llvm::LLVMRustSetLLVMOptions(len as c_int, p);
}
})
}
unsafe fn populate_llvm_passes(fpm: lib::llvm::PassManagerRef,
@ -736,7 +736,7 @@ pub fn sanitize(s: &str) -> ~str {
_ => {
let mut tstr = ~"";
do char::escape_unicode(c) |c| { tstr.push_char(c); }
char::escape_unicode(c, |c| tstr.push_char(c));
result.push_char('$');
result.push_str(tstr.slice_from(1));
}

View file

@ -123,10 +123,10 @@ pub fn build_configuration(sess: Session) ->
// Convert strings provided as --cfg [cfgspec] into a crate_cfg
fn parse_cfgspecs(cfgspecs: ~[~str], demitter: @diagnostic::Emitter)
-> ast::CrateConfig {
do cfgspecs.move_iter().map |s| {
cfgspecs.move_iter().map(|s| {
let sess = parse::new_parse_sess(Some(demitter));
parse::parse_meta_from_source_str(@"cfgspec", s.to_managed(), ~[], sess)
}.collect::<ast::CrateConfig>()
}).collect::<ast::CrateConfig>()
}
pub enum input {

View file

@ -20,9 +20,7 @@ struct Context<'self> {
// any items that do not belong in the current configuration
pub fn strip_unconfigured_items(crate: ast::Crate) -> ast::Crate {
let config = crate.config.clone();
do strip_items(crate) |attrs| {
in_cfg(config, attrs)
}
strip_items(crate, |attrs| in_cfg(config, attrs))
}
impl<'self> fold::ast_fold for Context<'self> {
@ -68,14 +66,12 @@ fn filter_view_item<'r>(cx: &Context, view_item: &'r ast::view_item)
}
fn fold_mod(cx: &Context, m: &ast::_mod) -> ast::_mod {
let filtered_items = do m.items.iter().filter_map |a| {
let filtered_items = m.items.iter().filter_map(|a| {
filter_item(cx, *a).and_then(|x| cx.fold_item(x))
}.collect();
let filtered_view_items = do m.view_items.iter().filter_map |a| {
do filter_view_item(cx, a).map |x| {
cx.fold_view_item(x)
}
}.collect();
}).collect();
let filtered_view_items = m.view_items.iter().filter_map(|a| {
filter_view_item(cx, a).map(|x| cx.fold_view_item(x))
}).collect();
ast::_mod {
view_items: filtered_view_items,
items: filtered_items
@ -96,11 +92,9 @@ fn fold_foreign_mod(cx: &Context, nm: &ast::foreign_mod) -> ast::foreign_mod {
.iter()
.filter_map(|a| filter_foreign_item(cx, *a))
.collect();
let filtered_view_items = do nm.view_items.iter().filter_map |a| {
do filter_view_item(cx, a).map |x| {
cx.fold_view_item(x)
}
}.collect();
let filtered_view_items = nm.view_items.iter().filter_map(|a| {
filter_view_item(cx, a).map(|x| cx.fold_view_item(x))
}).collect();
ast::foreign_mod {
abis: nm.abis,
view_items: filtered_view_items,
@ -147,12 +141,12 @@ fn filter_stmt(cx: &Context, stmt: @ast::Stmt) -> Option<@ast::Stmt> {
}
fn fold_block(cx: &Context, b: &ast::Block) -> ast::Block {
let resulting_stmts = do b.stmts.iter().filter_map |a| {
let resulting_stmts = b.stmts.iter().filter_map(|a| {
filter_stmt(cx, *a).and_then(|stmt| cx.fold_stmt(stmt))
}.collect();
let filtered_view_items = do b.view_items.iter().filter_map |a| {
}).collect();
let filtered_view_items = b.view_items.iter().filter_map(|a| {
filter_view_item(cx, a).map(|x| cx.fold_view_item(x))
}.collect();
}).collect();
ast::Block {
view_items: filtered_view_items,
stmts: resulting_stmts,

View file

@ -118,13 +118,13 @@ impl fold::ast_fold for TestHarnessGenerator {
fn nomain(cx: @mut TestCtxt, item: @ast::item) -> @ast::item {
if !*cx.sess.building_library {
@ast::item {
attrs: do item.attrs.iter().filter_map |attr| {
attrs: item.attrs.iter().filter_map(|attr| {
if "main" != attr.name() {
Some(*attr)
} else {
None
}
}.collect(),
}).collect(),
.. (*item).clone()
}
} else {
@ -172,10 +172,10 @@ fn generate_test_harness(sess: session::Session, crate: ast::Crate)
fn strip_test_functions(crate: ast::Crate) -> ast::Crate {
// When not compiling with --test we should not compile the
// #[test] functions
do config::strip_items(crate) |attrs| {
config::strip_items(crate, |attrs| {
!attr::contains_name(attrs, "test") &&
!attr::contains_name(attrs, "bench")
}
})
}
fn is_test_fn(cx: @mut TestCtxt, i: @ast::item) -> bool {
@ -232,13 +232,13 @@ fn is_bench_fn(i: @ast::item) -> bool {
}
fn is_ignored(cx: @mut TestCtxt, i: @ast::item) -> bool {
do i.attrs.iter().any |attr| {
i.attrs.iter().any(|attr| {
// check ignore(cfg(foo, bar))
"ignore" == attr.name() && match attr.meta_item_list() {
Some(ref cfgs) => attr::test_cfg(cx.config, cfgs.iter().map(|x| *x)),
None => true
}
}
})
}
fn should_fail(i: @ast::item) -> bool {

View file

@ -261,9 +261,9 @@ pub fn run_compiler(args: &[~str], demitter: @diagnostic::Emitter) {
let odir = matches.opt_str("out-dir").map(|o| Path::new(o));
let ofile = matches.opt_str("o").map(|o| Path::new(o));
let cfg = build_configuration(sess);
let pretty = do matches.opt_default("pretty", "normal").map |a| {
let pretty = matches.opt_default("pretty", "normal").map(|a| {
parse_pretty(sess, a)
};
});
match pretty {
Some::<PpMode>(ppm) => {
pretty_print_input(sess, cfg, &input, ppm);
@ -345,7 +345,7 @@ pub fn monitor(f: proc(@diagnostic::Emitter)) {
task_builder.opts.stack_size = Some(STACK_SIZE);
}
match do task_builder.try {
match task_builder.try(|| {
let ch = ch_capture.clone();
// The 'diagnostics emitter'. Every error, warning, etc. should
// go through this function.
@ -368,7 +368,7 @@ pub fn monitor(f: proc(@diagnostic::Emitter)) {
// Due reasons explain in #7732, if there was a jit execution context it
// must be consumed and passed along to our parent task.
back::link::jit::consume_engine()
} {
}) {
result::Ok(_) => { /* fallthrough */ }
result::Err(_) => {
// Task failed without emitting a fatal diagnostic
@ -403,9 +403,6 @@ pub fn main() {
pub fn main_args(args: &[~str]) -> int {
let owned_args = args.to_owned();
do monitor |demitter| {
run_compiler(owned_args, demitter);
}
return 0;
monitor(|demitter| run_compiler(owned_args, demitter));
0
}

View file

@ -1844,9 +1844,9 @@ pub struct TargetData {
}
pub fn mk_target_data(string_rep: &str) -> TargetData {
let lltd = do string_rep.with_c_str |buf| {
let lltd = string_rep.with_c_str(|buf| {
unsafe { llvm::LLVMCreateTargetData(buf) }
};
});
TargetData {
lltd: lltd,

View file

@ -159,9 +159,9 @@ pub fn get_dep_hashes(cstore: &CStore) -> ~[@str] {
});
}
let sorted = do extra::sort::merge_sort(result) |a, b| {
let sorted = extra::sort::merge_sort(result, |a, b| {
(a.name, a.vers, a.hash) <= (b.name, b.vers, b.hash)
};
});
debug!("sorted:");
for x in sorted.iter() {

View file

@ -62,7 +62,7 @@ fn lookup_hash(d: ebml::Doc, eq_fn: |&[u8]| -> bool, hash: u64) ->
let belt = tag_index_buckets_bucket_elt;
let mut ret = None;
do reader::tagged_docs(tagged_doc.doc, belt) |elt| {
reader::tagged_docs(tagged_doc.doc, belt, |elt| {
let pos = u64_from_be_bytes(*elt.data, elt.start, 4) as uint;
if eq_fn(elt.data.slice(elt.start + 4, elt.end)) {
ret = Some(reader::doc_at(d.data, pos).doc);
@ -70,7 +70,7 @@ fn lookup_hash(d: ebml::Doc, eq_fn: |&[u8]| -> bool, hash: u64) ->
} else {
true
}
};
});
ret
}
@ -168,10 +168,10 @@ fn item_visibility(item: ebml::Doc) -> ast::visibility {
fn item_method_sort(item: ebml::Doc) -> char {
let mut ret = 'r';
do reader::tagged_docs(item, tag_item_trait_method_sort) |doc| {
reader::tagged_docs(item, tag_item_trait_method_sort, |doc| {
ret = doc.as_str_slice()[0] as char;
false
};
});
ret
}
@ -181,10 +181,10 @@ fn item_symbol(item: ebml::Doc) -> ~str {
fn item_parent_item(d: ebml::Doc) -> Option<ast::DefId> {
let mut ret = None;
do reader::tagged_docs(d, tag_items_data_parent_item) |did| {
reader::tagged_docs(d, tag_items_data_parent_item, |did| {
ret = Some(reader::with_doc_data(did, parse_def_id));
false
};
});
ret
}
@ -200,9 +200,9 @@ fn item_def_id(d: ebml::Doc, cdata: Cmd) -> ast::DefId {
}
fn get_provided_source(d: ebml::Doc, cdata: Cmd) -> Option<ast::DefId> {
do reader::maybe_get_doc(d, tag_item_method_provided_source).map |doc| {
reader::maybe_get_doc(d, tag_item_method_provided_source).map(|doc| {
translate_def_id(cdata, reader::with_doc_data(doc, parse_def_id))
}
})
}
fn each_reexport(d: ebml::Doc, f: |ebml::Doc| -> bool) -> bool {
@ -210,9 +210,9 @@ fn each_reexport(d: ebml::Doc, f: |ebml::Doc| -> bool) -> bool {
}
fn variant_disr_val(d: ebml::Doc) -> Option<ty::Disr> {
do reader::maybe_get_doc(d, tag_disr_val).and_then |val_doc| {
do reader::with_doc_data(val_doc) |data| { u64::parse_bytes(data, 10u) }
}
reader::maybe_get_doc(d, tag_disr_val).and_then(|val_doc| {
reader::with_doc_data(val_doc, |data| u64::parse_bytes(data, 10u))
})
}
fn doc_type(doc: ebml::Doc, tcx: ty::ctxt, cdata: Cmd) -> ty::t {
@ -231,10 +231,10 @@ fn doc_transformed_self_ty(doc: ebml::Doc,
tcx: ty::ctxt,
cdata: Cmd) -> Option<ty::t>
{
do reader::maybe_get_doc(doc, tag_item_method_transformed_self_ty).map |tp| {
reader::maybe_get_doc(doc, tag_item_method_transformed_self_ty).map(|tp| {
parse_ty_data(*tp.data, cdata.cnum, tp.start, tcx,
|_, did| translate_def_id(cdata, did))
}
})
}
pub fn item_type(_item_id: ast::DefId, item: ebml::Doc,
@ -258,13 +258,13 @@ fn item_ty_param_defs(item: ebml::Doc,
tag: uint)
-> @~[ty::TypeParameterDef] {
let mut bounds = ~[];
do reader::tagged_docs(item, tag) |p| {
reader::tagged_docs(item, tag, |p| {
let bd = parse_type_param_def_data(
*p.data, p.start, cdata.cnum, tcx,
|_, did| translate_def_id(cdata, did));
bounds.push(bd);
true
};
});
@bounds
}
@ -272,8 +272,8 @@ fn item_region_param_defs(item_doc: ebml::Doc,
tcx: ty::ctxt,
cdata: Cmd)
-> @[ty::RegionParameterDef] {
do at_vec::build(None) |push| {
do reader::tagged_docs(item_doc, tag_region_param_def) |rp_doc| {
at_vec::build(None, |push| {
reader::tagged_docs(item_doc, tag_region_param_def, |rp_doc| {
let ident_str_doc = reader::get_doc(rp_doc,
tag_region_param_def_ident);
let ident = item_name(tcx.sess.intr(), ident_str_doc);
@ -284,8 +284,8 @@ fn item_region_param_defs(item_doc: ebml::Doc,
push(ty::RegionParameterDef { ident: ident,
def_id: def_id });
true
};
}
});
})
}
fn item_ty_param_count(item: ebml::Doc) -> uint {
@ -298,11 +298,11 @@ fn item_ty_param_count(item: ebml::Doc) -> uint {
fn enum_variant_ids(item: ebml::Doc, cdata: Cmd) -> ~[ast::DefId] {
let mut ids: ~[ast::DefId] = ~[];
let v = tag_items_data_item_variant;
do reader::tagged_docs(item, v) |p| {
reader::tagged_docs(item, v, |p| {
let ext = reader::with_doc_data(p, parse_def_id);
ids.push(ast::DefId { crate: cdata.cnum, node: ext.node });
true
};
});
return ids;
}
@ -313,7 +313,7 @@ pub fn item_path(item_doc: ebml::Doc) -> ast_map::path {
let len = reader::doc_as_u32(len_doc) as uint;
let mut result = vec::with_capacity(len);
do reader::docs(path_doc) |tag, elt_doc| {
reader::docs(path_doc, |tag, elt_doc| {
if tag == tag_path_elt_mod {
let str = elt_doc.as_str_slice();
result.push(ast_map::path_mod(token::str_to_ident(str)));
@ -333,7 +333,7 @@ pub fn item_path(item_doc: ebml::Doc) -> ast_map::path {
// ignore tag_path_len element
}
true
};
});
return result;
}
@ -412,14 +412,14 @@ pub fn get_trait_def(cdata: Cmd,
let mut bounds = ty::EmptyBuiltinBounds();
// Collect the builtin bounds from the encoded supertraits.
// FIXME(#8559): They should be encoded directly.
do reader::tagged_docs(item_doc, tag_item_super_trait_ref) |trait_doc| {
reader::tagged_docs(item_doc, tag_item_super_trait_ref, |trait_doc| {
// NB. Bypasses real supertraits. See get_supertraits() if you wanted them.
let trait_ref = doc_trait_ref(trait_doc, tcx, cdata);
do tcx.lang_items.to_builtin_kind(trait_ref.def_id).map |bound| {
tcx.lang_items.to_builtin_kind(trait_ref.def_id).map(|bound| {
bounds.add(bound);
};
});
true
};
});
ty::TraitDef {
generics: ty::Generics {type_param_defs: tp_defs,
region_param_defs: rp_defs},
@ -455,9 +455,9 @@ pub fn get_impl_trait(cdata: Cmd,
tcx: ty::ctxt) -> Option<@ty::TraitRef>
{
let item_doc = lookup_item(id, cdata.data);
do reader::maybe_get_doc(item_doc, tag_item_trait_ref).map |tp| {
reader::maybe_get_doc(item_doc, tag_item_trait_ref).map(|tp| {
@doc_trait_ref(tp, tcx, cdata)
}
})
}
pub fn get_impl_vtables(cdata: Cmd,
@ -479,13 +479,13 @@ pub fn get_impl_method(intr: @ident_interner, cdata: Cmd, id: ast::NodeId,
name: ast::Ident) -> Option<ast::DefId> {
let items = reader::get_doc(reader::Doc(cdata.data), tag_items);
let mut found = None;
do reader::tagged_docs(find_item(id, items), tag_item_impl_method) |mid| {
reader::tagged_docs(find_item(id, items), tag_item_impl_method, |mid| {
let m_did = reader::with_doc_data(mid, parse_def_id);
if item_name(intr, find_item(m_did.node, items)) == name {
found = Some(translate_def_id(cdata, m_did));
}
true
};
});
found
}
@ -512,7 +512,7 @@ pub fn def_like_to_def(def_like: DefLike) -> ast::Def {
pub fn each_lang_item(cdata: Cmd, f: |ast::NodeId, uint| -> bool) -> bool {
let root = reader::Doc(cdata.data);
let lang_items = reader::get_doc(root, tag_lang_items);
do reader::tagged_docs(lang_items, tag_lang_items_item) |item_doc| {
reader::tagged_docs(lang_items, tag_lang_items_item, |item_doc| {
let id_doc = reader::get_doc(item_doc, tag_lang_items_item_id);
let id = reader::doc_as_u32(id_doc) as uint;
let node_id_doc = reader::get_doc(item_doc,
@ -520,7 +520,7 @@ pub fn each_lang_item(cdata: Cmd, f: |ast::NodeId, uint| -> bool) -> bool {
let node_id = reader::doc_as_u32(node_id_doc) as ast::NodeId;
f(node_id, id)
}
})
}
struct EachItemContext<'self> {
@ -624,7 +624,7 @@ impl<'self> EachItemContext<'self> {
let mut continue_ = true;
// Iterate over all children.
do reader::tagged_docs(item_doc, tag_mod_child) |child_info_doc| {
reader::tagged_docs(item_doc, tag_mod_child, |child_info_doc| {
let child_def_id = reader::with_doc_data(child_info_doc,
parse_def_id);
let child_def_id = translate_def_id(self.cdata, child_def_id);
@ -670,14 +670,14 @@ impl<'self> EachItemContext<'self> {
}
}
continue_
};
});
if !continue_ {
return false
}
// Iterate over reexports.
do each_reexport(item_doc) |reexport_doc| {
each_reexport(item_doc, |reexport_doc| {
let def_id_doc = reader::get_doc(
reexport_doc,
tag_items_data_item_reexport_def_id);
@ -723,7 +723,7 @@ impl<'self> EachItemContext<'self> {
}
continue_
};
});
continue_
}
@ -737,7 +737,7 @@ fn each_child_of_item_or_crate(intr: @ident_interner,
ast::Ident,
ast::visibility|) {
// Iterate over all children.
let _ = do reader::tagged_docs(item_doc, tag_mod_child) |child_info_doc| {
let _ = reader::tagged_docs(item_doc, tag_mod_child, |child_info_doc| {
let child_def_id = reader::with_doc_data(child_info_doc,
parse_def_id);
let child_def_id = translate_def_id(cdata, child_def_id);
@ -767,13 +767,13 @@ fn each_child_of_item_or_crate(intr: @ident_interner,
}
true
};
});
// As a special case, iterate over all static methods of
// associated implementations too. This is a bit of a botch.
// --pcwalton
let _ = do reader::tagged_docs(item_doc,
tag_items_data_item_inherent_impl)
let _ = reader::tagged_docs(item_doc,
tag_items_data_item_inherent_impl,
|inherent_impl_def_id_doc| {
let inherent_impl_def_id = item_def_id(inherent_impl_def_id_doc,
cdata);
@ -781,8 +781,8 @@ fn each_child_of_item_or_crate(intr: @ident_interner,
match maybe_find_item(inherent_impl_def_id.node, items) {
None => {}
Some(inherent_impl_doc) => {
let _ = do reader::tagged_docs(inherent_impl_doc,
tag_item_impl_method)
let _ = reader::tagged_docs(inherent_impl_doc,
tag_item_impl_method,
|impl_method_def_id_doc| {
let impl_method_def_id =
reader::with_doc_data(impl_method_def_id_doc,
@ -812,15 +812,15 @@ fn each_child_of_item_or_crate(intr: @ident_interner,
}
true
};
});
}
}
true
};
});
// Iterate over all reexports.
let _ = do each_reexport(item_doc) |reexport_doc| {
let _ = each_reexport(item_doc, |reexport_doc| {
let def_id_doc = reader::get_doc(reexport_doc,
tag_items_data_item_reexport_def_id);
let child_def_id = reader::with_doc_data(def_id_doc,
@ -854,7 +854,7 @@ fn each_child_of_item_or_crate(intr: @ident_interner,
}
true
};
});
}
/// Iterates over each child of the given item.
@ -1002,11 +1002,11 @@ fn get_explicit_self(item: ebml::Doc) -> ast::explicit_self_ {
fn item_impl_methods(intr: @ident_interner, cdata: Cmd, item: ebml::Doc,
tcx: ty::ctxt) -> ~[@ty::Method] {
let mut rslt = ~[];
do reader::tagged_docs(item, tag_item_impl_method) |doc| {
reader::tagged_docs(item, tag_item_impl_method, |doc| {
let m_did = reader::with_doc_data(doc, parse_def_id);
rslt.push(@get_method(intr, cdata, m_did.node, tcx));
true
};
});
rslt
}
@ -1083,10 +1083,10 @@ pub fn get_trait_method_def_ids(cdata: Cmd,
let data = cdata.data;
let item = lookup_item(id, data);
let mut result = ~[];
do reader::tagged_docs(item, tag_item_trait_method) |mth| {
reader::tagged_docs(item, tag_item_trait_method, |mth| {
result.push(item_def_id(mth, cdata));
true
};
});
result
}
@ -1105,7 +1105,7 @@ pub fn get_provided_trait_methods(intr: @ident_interner, cdata: Cmd,
let item = lookup_item(id, data);
let mut result = ~[];
do reader::tagged_docs(item, tag_item_trait_method) |mth_id| {
reader::tagged_docs(item, tag_item_trait_method, |mth_id| {
let did = item_def_id(mth_id, cdata);
let mth = lookup_item(did.node, data);
@ -1113,7 +1113,7 @@ pub fn get_provided_trait_methods(intr: @ident_interner, cdata: Cmd,
result.push(@get_method(intr, cdata, did.node, tcx));
}
true
};
});
return result;
}
@ -1123,7 +1123,7 @@ pub fn get_supertraits(cdata: Cmd, id: ast::NodeId, tcx: ty::ctxt)
-> ~[@ty::TraitRef] {
let mut results = ~[];
let item_doc = lookup_item(id, cdata.data);
do reader::tagged_docs(item_doc, tag_item_super_trait_ref) |trait_doc| {
reader::tagged_docs(item_doc, tag_item_super_trait_ref, |trait_doc| {
// NB. Only reads the ones that *aren't* builtin-bounds. See also
// get_trait_def() for collecting the builtin bounds.
// FIXME(#8559): The builtin bounds shouldn't be encoded in the first place.
@ -1132,7 +1132,7 @@ pub fn get_supertraits(cdata: Cmd, id: ast::NodeId, tcx: ty::ctxt)
results.push(@trait_ref);
}
true
};
});
return results;
}
@ -1144,10 +1144,10 @@ pub fn get_type_name_if_impl(cdata: Cmd,
}
let mut ret = None;
do reader::tagged_docs(item, tag_item_impl_type_basename) |doc| {
reader::tagged_docs(item, tag_item_impl_type_basename, |doc| {
ret = Some(token::str_to_ident(doc.as_str_slice()));
false
};
});
ret
}
@ -1162,17 +1162,17 @@ pub fn get_static_methods_if_impl(intr: @ident_interner,
}
// If this impl implements a trait, don't consider it.
let ret = do reader::tagged_docs(item, tag_item_trait_ref) |_doc| {
let ret = reader::tagged_docs(item, tag_item_trait_ref, |_doc| {
false
};
});
if !ret { return None }
let mut impl_method_ids = ~[];
do reader::tagged_docs(item, tag_item_impl_method) |impl_method_doc| {
reader::tagged_docs(item, tag_item_impl_method, |impl_method_doc| {
impl_method_ids.push(reader::with_doc_data(impl_method_doc, parse_def_id));
true
};
});
let mut static_impl_methods = ~[];
for impl_method_id in impl_method_ids.iter() {
@ -1205,13 +1205,13 @@ pub fn get_item_attrs(cdata: Cmd,
node_id: ast::NodeId,
f: |~[@ast::MetaItem]|) {
let item = lookup_item(node_id, cdata.data);
do reader::tagged_docs(item, tag_attributes) |attributes| {
do reader::tagged_docs(attributes, tag_attribute) |attribute| {
reader::tagged_docs(item, tag_attributes, |attributes| {
reader::tagged_docs(attributes, tag_attribute, |attribute| {
f(get_meta_items(attribute));
true
};
});
true
};
});
}
fn struct_field_family_to_visibility(family: Family) -> ast::visibility {
@ -1228,7 +1228,7 @@ pub fn get_struct_fields(intr: @ident_interner, cdata: Cmd, id: ast::NodeId)
let data = cdata.data;
let item = lookup_item(id, data);
let mut result = ~[];
do reader::tagged_docs(item, tag_item_field) |an_item| {
reader::tagged_docs(item, tag_item_field, |an_item| {
let f = item_family(an_item);
if f == PublicField || f == PrivateField || f == InheritedField {
// FIXME #6993: name should be of type Name, not Ident
@ -1241,8 +1241,8 @@ pub fn get_struct_fields(intr: @ident_interner, cdata: Cmd, id: ast::NodeId)
});
}
true
};
do reader::tagged_docs(item, tag_item_unnamed_field) |an_item| {
});
reader::tagged_docs(item, tag_item_unnamed_field, |an_item| {
let did = item_def_id(an_item, cdata);
result.push(ty::field_ty {
name: special_idents::unnamed_field.name,
@ -1250,7 +1250,7 @@ pub fn get_struct_fields(intr: @ident_interner, cdata: Cmd, id: ast::NodeId)
vis: ast::inherited,
});
true
};
});
result
}
@ -1272,13 +1272,13 @@ fn family_names_type(fam: Family) -> bool {
}
fn read_path(d: ebml::Doc) -> (~str, uint) {
do reader::with_doc_data(d) |desc| {
reader::with_doc_data(d, |desc| {
let pos = u64_from_be_bytes(desc, 0u, 4u) as uint;
let pathbytes = desc.slice(4u, desc.len());
let path = str::from_utf8(pathbytes);
(path, pos)
}
})
}
fn describe_def(items: ebml::Doc, id: ast::DefId) -> ~str {
@ -1317,13 +1317,13 @@ fn item_family_to_str(fam: Family) -> ~str {
fn get_meta_items(md: ebml::Doc) -> ~[@ast::MetaItem] {
let mut items: ~[@ast::MetaItem] = ~[];
do reader::tagged_docs(md, tag_meta_item_word) |meta_item_doc| {
reader::tagged_docs(md, tag_meta_item_word, |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let n = nd.as_str_slice().to_managed();
items.push(attr::mk_word_item(n));
true
};
do reader::tagged_docs(md, tag_meta_item_name_value) |meta_item_doc| {
});
reader::tagged_docs(md, tag_meta_item_name_value, |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let vd = reader::get_doc(meta_item_doc, tag_meta_item_value);
let n = nd.as_str_slice().to_managed();
@ -1332,14 +1332,14 @@ fn get_meta_items(md: ebml::Doc) -> ~[@ast::MetaItem] {
// but currently the encoder just drops them
items.push(attr::mk_name_value_item_str(n, v));
true
};
do reader::tagged_docs(md, tag_meta_item_list) |meta_item_doc| {
});
reader::tagged_docs(md, tag_meta_item_list, |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let n = nd.as_str_slice().to_managed();
let subitems = get_meta_items(meta_item_doc);
items.push(attr::mk_list_item(n, subitems));
true
};
});
return items;
}
@ -1347,7 +1347,7 @@ fn get_attributes(md: ebml::Doc) -> ~[ast::Attribute] {
let mut attrs: ~[ast::Attribute] = ~[];
match reader::maybe_get_doc(md, tag_attributes) {
option::Some(attrs_d) => {
do reader::tagged_docs(attrs_d, tag_attribute) |attr_doc| {
reader::tagged_docs(attrs_d, tag_attribute, |attr_doc| {
let meta_items = get_meta_items(attr_doc);
// Currently it's only possible to have a single meta item on
// an attribute
@ -1363,7 +1363,7 @@ fn get_attributes(md: ebml::Doc) -> ~[ast::Attribute] {
span: codemap::dummy_sp()
});
true
};
});
}
option::None => ()
}
@ -1412,14 +1412,14 @@ pub fn get_crate_deps(data: @~[u8]) -> ~[CrateDep] {
let d = reader::get_doc(doc, tag_);
d.as_str_slice().to_managed()
}
do reader::tagged_docs(depsdoc, tag_crate_dep) |depdoc| {
reader::tagged_docs(depsdoc, tag_crate_dep, |depdoc| {
deps.push(CrateDep {cnum: crate_num,
name: token::str_to_ident(docstr(depdoc, tag_crate_dep_name)),
vers: docstr(depdoc, tag_crate_dep_vers),
hash: docstr(depdoc, tag_crate_dep_hash)});
crate_num += 1;
true
};
});
return deps;
}
@ -1477,22 +1477,23 @@ pub fn translate_def_id(cdata: Cmd, did: ast::DefId) -> ast::DefId {
pub fn each_impl(cdata: Cmd, callback: |ast::DefId|) {
let impls_doc = reader::get_doc(reader::Doc(cdata.data), tag_impls);
let _ = do reader::tagged_docs(impls_doc, tag_impls_impl) |impl_doc| {
let _ = reader::tagged_docs(impls_doc, tag_impls_impl, |impl_doc| {
callback(item_def_id(impl_doc, cdata));
true
};
});
}
pub fn each_implementation_for_type(cdata: Cmd,
id: ast::NodeId,
callback: |ast::DefId|) {
let item_doc = lookup_item(id, cdata.data);
do reader::tagged_docs(item_doc, tag_items_data_item_inherent_impl)
reader::tagged_docs(item_doc,
tag_items_data_item_inherent_impl,
|impl_doc| {
let implementation_def_id = item_def_id(impl_doc, cdata);
callback(implementation_def_id);
true
};
});
}
pub fn each_implementation_for_trait(cdata: Cmd,
@ -1500,13 +1501,13 @@ pub fn each_implementation_for_trait(cdata: Cmd,
callback: |ast::DefId|) {
let item_doc = lookup_item(id, cdata.data);
let _ = do reader::tagged_docs(item_doc,
tag_items_data_item_extension_impl)
let _ = reader::tagged_docs(item_doc,
tag_items_data_item_extension_impl,
|impl_doc| {
let implementation_def_id = item_def_id(impl_doc, cdata);
callback(implementation_def_id);
true
};
});
}
pub fn get_trait_of_method(cdata: Cmd, id: ast::NodeId, tcx: ty::ctxt)
@ -1521,10 +1522,8 @@ pub fn get_trait_of_method(cdata: Cmd, id: ast::NodeId, tcx: ty::ctxt)
match item_family(parent_item_doc) {
Trait => Some(item_def_id(parent_item_doc, cdata)),
Impl => {
do reader::maybe_get_doc(parent_item_doc, tag_item_trait_ref).map
|_| {
item_trait_ref(parent_item_doc, tcx, cdata).def_id
}
reader::maybe_get_doc(parent_item_doc, tag_item_trait_ref)
.map(|_| item_trait_ref(parent_item_doc, tcx, cdata).def_id)
}
_ => None
}

View file

@ -615,12 +615,12 @@ fn encode_info_for_mod(ecx: &EncodeContext,
ebml_w.wr_str(def_to_str(local_def(item.id)));
ebml_w.end_tag();
do each_auxiliary_node_id(*item) |auxiliary_node_id| {
each_auxiliary_node_id(*item, |auxiliary_node_id| {
ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(auxiliary_node_id)));
ebml_w.end_tag();
true
};
});
match item.node {
item_impl(*) => {
@ -1570,13 +1570,13 @@ fn encode_crate_deps(ecx: &EncodeContext,
// Pull the cnums and name,vers,hash out of cstore
let mut deps = ~[];
do cstore::iter_crate_data(cstore) |key, val| {
cstore::iter_crate_data(cstore, |key, val| {
let dep = decoder::CrateDep {cnum: key,
name: ecx.tcx.sess.ident_of(val.name),
vers: decoder::get_crate_vers(val.data),
hash: decoder::get_crate_hash(val.data)};
deps.push(dep);
};
});
// Sort by cnum
extra::sort::quick_sort(deps, |kv1, kv2| kv1.cnum <= kv2.cnum);
@ -1697,12 +1697,12 @@ fn encode_misc_info(ecx: &EncodeContext,
ebml_w.wr_str(def_to_str(local_def(item.id)));
ebml_w.end_tag();
do each_auxiliary_node_id(item) |auxiliary_node_id| {
each_auxiliary_node_id(item, |auxiliary_node_id| {
ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(auxiliary_node_id)));
ebml_w.end_tag();
true
};
});
}
// Encode reexports for the root module.

View file

@ -118,7 +118,7 @@ pub fn mk_filesearch(maybe_sysroot: &Option<@Path>,
}
pub fn search(filesearch: @FileSearch, pick: pick) {
do filesearch.for_each_lib_search_path() |lib_search_path| {
filesearch.for_each_lib_search_path(|lib_search_path| {
debug!("searching {}", lib_search_path.display());
match io::result(|| fs::readdir(lib_search_path)) {
Ok(files) => {
@ -140,7 +140,7 @@ pub fn search(filesearch: @FileSearch, pick: pick) {
}
Err(*) => FileDoesntMatch,
}
};
});
}
pub fn relative_target_lib_path(target_triple: &str) -> Path {

View file

@ -193,17 +193,15 @@ pub fn metadata_matches(extern_metas: &[@ast::MetaItem],
debug!("matching {} metadata requirements against {} items",
local_metas.len(), extern_metas.len());
do local_metas.iter().all |needed| {
attr::contains(extern_metas, *needed)
}
local_metas.iter().all(|needed| attr::contains(extern_metas, *needed))
}
fn get_metadata_section(os: Os,
filename: &Path) -> Option<@~[u8]> {
unsafe {
let mb = do filename.with_c_str |buf| {
let mb = filename.with_c_str(|buf| {
llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf)
};
});
if mb as int == 0 { return option::None::<@~[u8]>; }
let of = match mk_object_file(mb) {
option::Some(of) => of,
@ -224,19 +222,19 @@ fn get_metadata_section(os: Os,
vlen);
let minsz = num::min(vlen, csz);
let mut version_ok = false;
do vec::raw::buf_as_slice(cvbuf, minsz) |buf0| {
vec::raw::buf_as_slice(cvbuf, minsz, |buf0| {
version_ok = (buf0 ==
encoder::metadata_encoding_version);
}
});
if !version_ok { return None; }
let cvbuf1 = ptr::offset(cvbuf, vlen as int);
debug!("inflating {} bytes of compressed metadata",
csz - vlen);
do vec::raw::buf_as_slice(cvbuf1, csz-vlen) |bytes| {
vec::raw::buf_as_slice(cvbuf1, csz-vlen, |bytes| {
let inflated = flate::inflate_bytes(bytes);
found = Some(@(inflated));
}
});
if found != None {
return found;
}

View file

@ -132,7 +132,7 @@ fn enc_opt<T>(w: @mut MemWriter, t: Option<T>, enc_f: |T|) {
fn enc_substs(w: @mut MemWriter, cx: @ctxt, substs: &ty::substs) {
enc_region_substs(w, cx, &substs.regions);
do enc_opt(w, substs.self_ty) |t| { enc_ty(w, cx, t) }
enc_opt(w, substs.self_ty, |t| enc_ty(w, cx, t));
mywrite!(w, "[");
for t in substs.tps.iter() { enc_ty(w, cx, *t); }
mywrite!(w, "]");
@ -350,10 +350,10 @@ fn enc_purity(w: @mut MemWriter, p: purity) {
fn enc_abi_set(w: @mut MemWriter, abis: AbiSet) {
mywrite!(w, "[");
do abis.each |abi| {
abis.each(|abi| {
mywrite!(w, "{},", abi.name());
true
};
});
mywrite!(w, "]")
}

View file

@ -303,7 +303,7 @@ struct NestedItemsDropper {
impl fold::ast_fold for NestedItemsDropper {
fn fold_block(&self, blk: &ast::Block) -> ast::Block {
let stmts_sans_items = do blk.stmts.iter().filter_map |stmt| {
let stmts_sans_items = blk.stmts.iter().filter_map(|stmt| {
match stmt.node {
ast::StmtExpr(_, _) | ast::StmtSemi(_, _) |
ast::StmtDecl(@codemap::Spanned {
@ -316,7 +316,7 @@ impl fold::ast_fold for NestedItemsDropper {
}, _) => None,
ast::StmtMac(*) => fail!("unexpanded macro in astencode")
}
}.collect();
}).collect();
let blk_sans_items = ast::Block {
view_items: ~[], // I don't know if we need the view_items here,
// but it doesn't break tests!
@ -568,26 +568,26 @@ trait read_method_map_entry_helper {
fn encode_method_map_entry(ecx: &e::EncodeContext,
ebml_w: &mut writer::Encoder,
mme: method_map_entry) {
do ebml_w.emit_struct("method_map_entry", 3) |ebml_w| {
do ebml_w.emit_struct_field("self_ty", 0u) |ebml_w| {
ebml_w.emit_struct("method_map_entry", 3, |ebml_w| {
ebml_w.emit_struct_field("self_ty", 0u, |ebml_w| {
ebml_w.emit_ty(ecx, mme.self_ty);
}
do ebml_w.emit_struct_field("explicit_self", 2u) |ebml_w| {
});
ebml_w.emit_struct_field("explicit_self", 2u, |ebml_w| {
mme.explicit_self.encode(ebml_w);
}
do ebml_w.emit_struct_field("origin", 1u) |ebml_w| {
});
ebml_w.emit_struct_field("origin", 1u, |ebml_w| {
mme.origin.encode(ebml_w);
}
do ebml_w.emit_struct_field("self_mode", 3) |ebml_w| {
});
ebml_w.emit_struct_field("self_mode", 3, |ebml_w| {
mme.self_mode.encode(ebml_w);
}
}
});
})
}
impl read_method_map_entry_helper for reader::Decoder {
fn read_method_map_entry(&mut self, xcx: @ExtendedDecodeContext)
-> method_map_entry {
do self.read_struct("method_map_entry", 3) |this| {
self.read_struct("method_map_entry", 3, |this| {
method_map_entry {
self_ty: this.read_struct_field("self_ty", 0u, |this| {
this.read_ty(xcx)
@ -608,7 +608,7 @@ impl read_method_map_entry_helper for reader::Decoder {
self_mode
}),
}
}
})
}
}
@ -648,50 +648,50 @@ pub fn encode_vtable_res(ecx: &e::EncodeContext,
// ty::t doesn't work, and there is no way (atm) to have
// hand-written encoding routines combine with auto-generated
// ones. perhaps we should fix this.
do ebml_w.emit_from_vec(*dr) |ebml_w, param_tables| {
ebml_w.emit_from_vec(*dr, |ebml_w, param_tables| {
encode_vtable_param_res(ecx, ebml_w, *param_tables);
}
})
}
pub fn encode_vtable_param_res(ecx: &e::EncodeContext,
ebml_w: &mut writer::Encoder,
param_tables: typeck::vtable_param_res) {
do ebml_w.emit_from_vec(*param_tables) |ebml_w, vtable_origin| {
ebml_w.emit_from_vec(*param_tables, |ebml_w, vtable_origin| {
encode_vtable_origin(ecx, ebml_w, vtable_origin)
}
})
}
pub fn encode_vtable_origin(ecx: &e::EncodeContext,
ebml_w: &mut writer::Encoder,
vtable_origin: &typeck::vtable_origin) {
do ebml_w.emit_enum("vtable_origin") |ebml_w| {
ebml_w.emit_enum("vtable_origin", |ebml_w| {
match *vtable_origin {
typeck::vtable_static(def_id, ref tys, vtable_res) => {
do ebml_w.emit_enum_variant("vtable_static", 0u, 3u) |ebml_w| {
do ebml_w.emit_enum_variant_arg(0u) |ebml_w| {
ebml_w.emit_enum_variant("vtable_static", 0u, 3u, |ebml_w| {
ebml_w.emit_enum_variant_arg(0u, |ebml_w| {
ebml_w.emit_def_id(def_id)
}
do ebml_w.emit_enum_variant_arg(1u) |ebml_w| {
});
ebml_w.emit_enum_variant_arg(1u, |ebml_w| {
ebml_w.emit_tys(ecx, *tys);
}
do ebml_w.emit_enum_variant_arg(2u) |ebml_w| {
});
ebml_w.emit_enum_variant_arg(2u, |ebml_w| {
encode_vtable_res(ecx, ebml_w, vtable_res);
}
}
})
})
}
typeck::vtable_param(pn, bn) => {
do ebml_w.emit_enum_variant("vtable_param", 1u, 2u) |ebml_w| {
do ebml_w.emit_enum_variant_arg(0u) |ebml_w| {
ebml_w.emit_enum_variant("vtable_param", 1u, 2u, |ebml_w| {
ebml_w.emit_enum_variant_arg(0u, |ebml_w| {
pn.encode(ebml_w);
}
do ebml_w.emit_enum_variant_arg(1u) |ebml_w| {
});
ebml_w.emit_enum_variant_arg(1u, |ebml_w| {
ebml_w.emit_uint(bn);
})
})
}
}
}
}
}
})
}
pub trait vtable_decoder_helpers {
@ -724,40 +724,40 @@ impl vtable_decoder_helpers for reader::Decoder {
fn read_vtable_origin(&mut self,
tcx: ty::ctxt, cdata: @cstore::crate_metadata)
-> typeck::vtable_origin {
do self.read_enum("vtable_origin") |this| {
do this.read_enum_variant(["vtable_static",
self.read_enum("vtable_origin", |this| {
this.read_enum_variant(["vtable_static",
"vtable_param",
"vtable_self"])
"vtable_self"],
|this, i| {
match i {
0 => {
typeck::vtable_static(
do this.read_enum_variant_arg(0u) |this| {
this.read_enum_variant_arg(0u, |this| {
this.read_def_id_noxcx(cdata)
},
do this.read_enum_variant_arg(1u) |this| {
}),
this.read_enum_variant_arg(1u, |this| {
this.read_tys_noxcx(tcx, cdata)
},
do this.read_enum_variant_arg(2u) |this| {
}),
this.read_enum_variant_arg(2u, |this| {
this.read_vtable_res(tcx, cdata)
}
})
)
}
1 => {
typeck::vtable_param(
do this.read_enum_variant_arg(0u) |this| {
this.read_enum_variant_arg(0u, |this| {
Decodable::decode(this)
},
do this.read_enum_variant_arg(1u) |this| {
}),
this.read_enum_variant_arg(1u, |this| {
this.read_uint()
}
})
)
}
// hard to avoid - user input
_ => fail!("bad enum variant")
}
}
}
})
})
}
}
@ -793,54 +793,48 @@ trait ebml_writer_helpers {
impl ebml_writer_helpers for writer::Encoder {
fn emit_ty(&mut self, ecx: &e::EncodeContext, ty: ty::t) {
do self.emit_opaque |this| {
e::write_type(ecx, this, ty)
}
self.emit_opaque(|this| e::write_type(ecx, this, ty))
}
fn emit_vstore(&mut self, ecx: &e::EncodeContext, vstore: ty::vstore) {
do self.emit_opaque |this| {
e::write_vstore(ecx, this, vstore)
}
self.emit_opaque(|this| e::write_vstore(ecx, this, vstore))
}
fn emit_tys(&mut self, ecx: &e::EncodeContext, tys: &[ty::t]) {
do self.emit_from_vec(tys) |this, ty| {
this.emit_ty(ecx, *ty)
}
self.emit_from_vec(tys, |this, ty| this.emit_ty(ecx, *ty))
}
fn emit_type_param_def(&mut self,
ecx: &e::EncodeContext,
type_param_def: &ty::TypeParameterDef) {
do self.emit_opaque |this| {
self.emit_opaque(|this| {
tyencode::enc_type_param_def(this.writer,
ecx.ty_str_ctxt(),
type_param_def)
}
})
}
fn emit_tpbt(&mut self,
ecx: &e::EncodeContext,
tpbt: ty::ty_param_bounds_and_ty) {
do self.emit_struct("ty_param_bounds_and_ty", 2) |this| {
do this.emit_struct_field("generics", 0) |this| {
do this.emit_struct("Generics", 2) |this| {
do this.emit_struct_field("type_param_defs", 0) |this| {
do this.emit_from_vec(*tpbt.generics.type_param_defs)
self.emit_struct("ty_param_bounds_and_ty", 2, |this| {
this.emit_struct_field("generics", 0, |this| {
this.emit_struct("Generics", 2, |this| {
this.emit_struct_field("type_param_defs", 0, |this| {
this.emit_from_vec(*tpbt.generics.type_param_defs,
|this, type_param_def| {
this.emit_type_param_def(ecx, type_param_def);
}
}
do this.emit_struct_field("region_param_defs", 1) |this| {
})
});
this.emit_struct_field("region_param_defs", 1, |this| {
tpbt.generics.region_param_defs.encode(this);
}
}
}
do this.emit_struct_field("ty", 1) |this| {
})
})
});
this.emit_struct_field("ty", 1, |this| {
this.emit_ty(ecx, tpbt.ty);
}
}
})
})
}
}
@ -912,50 +906,48 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
{
let r = tcx.def_map.find(&id);
for def in r.iter() {
do ebml_w.tag(c::tag_table_def) |ebml_w| {
ebml_w.tag(c::tag_table_def, |ebml_w| {
ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| {
(*def).encode(ebml_w)
}
}
ebml_w.tag(c::tag_table_val, |ebml_w| (*def).encode(ebml_w));
})
}
}
{
let r = tcx.node_types.find(&(id as uint));
for &ty in r.iter() {
do ebml_w.tag(c::tag_table_node_type) |ebml_w| {
ebml_w.tag(c::tag_table_node_type, |ebml_w| {
ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| {
ebml_w.tag(c::tag_table_val, |ebml_w| {
ebml_w.emit_ty(ecx, *ty);
}
}
})
})
}
}
{
let r = tcx.node_type_substs.find(&id);
for tys in r.iter() {
do ebml_w.tag(c::tag_table_node_type_subst) |ebml_w| {
ebml_w.tag(c::tag_table_node_type_subst, |ebml_w| {
ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| {
ebml_w.tag(c::tag_table_val, |ebml_w| {
ebml_w.emit_tys(ecx, **tys)
}
}
})
})
}
}
{
let r = tcx.freevars.find(&id);
for &fv in r.iter() {
do ebml_w.tag(c::tag_table_freevars) |ebml_w| {
ebml_w.tag(c::tag_table_freevars, |ebml_w| {
ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| {
do ebml_w.emit_from_vec(**fv) |ebml_w, fv_entry| {
ebml_w.tag(c::tag_table_val, |ebml_w| {
ebml_w.emit_from_vec(**fv, |ebml_w, fv_entry| {
encode_freevar_entry(ebml_w, *fv_entry)
}
}
}
})
})
})
}
}
@ -963,74 +955,74 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
{
let r = tcx.tcache.find(&lid);
for &tpbt in r.iter() {
do ebml_w.tag(c::tag_table_tcache) |ebml_w| {
ebml_w.tag(c::tag_table_tcache, |ebml_w| {
ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| {
ebml_w.tag(c::tag_table_val, |ebml_w| {
ebml_w.emit_tpbt(ecx, *tpbt);
}
}
})
})
}
}
{
let r = tcx.ty_param_defs.find(&id);
for &type_param_def in r.iter() {
do ebml_w.tag(c::tag_table_param_defs) |ebml_w| {
ebml_w.tag(c::tag_table_param_defs, |ebml_w| {
ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| {
ebml_w.tag(c::tag_table_val, |ebml_w| {
ebml_w.emit_type_param_def(ecx, type_param_def)
}
}
})
})
}
}
{
let r = maps.method_map.find(&id);
for &mme in r.iter() {
do ebml_w.tag(c::tag_table_method_map) |ebml_w| {
ebml_w.tag(c::tag_table_method_map, |ebml_w| {
ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| {
ebml_w.tag(c::tag_table_val, |ebml_w| {
encode_method_map_entry(ecx, ebml_w, *mme)
}
}
})
})
}
}
{
let r = maps.vtable_map.find(&id);
for &dr in r.iter() {
do ebml_w.tag(c::tag_table_vtable_map) |ebml_w| {
ebml_w.tag(c::tag_table_vtable_map, |ebml_w| {
ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| {
ebml_w.tag(c::tag_table_val, |ebml_w| {
encode_vtable_res(ecx, ebml_w, *dr);
}
}
})
})
}
}
{
let r = tcx.adjustments.find(&id);
for adj in r.iter() {
do ebml_w.tag(c::tag_table_adjustments) |ebml_w| {
ebml_w.tag(c::tag_table_adjustments, |ebml_w| {
ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| {
ebml_w.tag(c::tag_table_val, |ebml_w| {
(**adj).encode(ebml_w)
}
}
})
})
}
}
{
let r = maps.capture_map.find(&id);
for &cap_vars in r.iter() {
do ebml_w.tag(c::tag_table_capture_map) |ebml_w| {
ebml_w.tag(c::tag_table_capture_map, |ebml_w| {
ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| {
do ebml_w.emit_from_vec(*cap_vars) |ebml_w, cap_var| {
ebml_w.tag(c::tag_table_val, |ebml_w| {
ebml_w.emit_from_vec(*cap_vars, |ebml_w, cap_var| {
cap_var.encode(ebml_w);
}
}
}
})
})
})
}
}
}
@ -1072,14 +1064,14 @@ trait ebml_decoder_decoder_helpers {
impl ebml_decoder_decoder_helpers for reader::Decoder {
fn read_ty_noxcx(&mut self,
tcx: ty::ctxt, cdata: @cstore::crate_metadata) -> ty::t {
do self.read_opaque |_, doc| {
self.read_opaque(|_, doc| {
tydecode::parse_ty_data(
*doc.data,
cdata.cnum,
doc.start,
tcx,
|_, id| decoder::translate_def_id(cdata, id))
}
})
}
fn read_tys_noxcx(&mut self,
@ -1094,7 +1086,7 @@ impl ebml_decoder_decoder_helpers for reader::Decoder {
// context. However, we do not bother, because region types
// are not used during trans.
return do self.read_opaque |this, doc| {
return self.read_opaque(|this, doc| {
debug!("read_ty({})", type_string(doc));
let ty = tydecode::parse_ty_data(
@ -1105,7 +1097,7 @@ impl ebml_decoder_decoder_helpers for reader::Decoder {
|s, a| this.convert_def_id(xcx, s, a));
ty
};
});
fn type_string(doc: ebml::Doc) -> ~str {
let mut str = ~"";
@ -1122,22 +1114,22 @@ impl ebml_decoder_decoder_helpers for reader::Decoder {
fn read_type_param_def(&mut self, xcx: @ExtendedDecodeContext)
-> ty::TypeParameterDef {
do self.read_opaque |this, doc| {
self.read_opaque(|this, doc| {
tydecode::parse_type_param_def_data(
*doc.data,
doc.start,
xcx.dcx.cdata.cnum,
xcx.dcx.tcx,
|s, a| this.convert_def_id(xcx, s, a))
}
})
}
fn read_ty_param_bounds_and_ty(&mut self, xcx: @ExtendedDecodeContext)
-> ty::ty_param_bounds_and_ty {
do self.read_struct("ty_param_bounds_and_ty", 2) |this| {
self.read_struct("ty_param_bounds_and_ty", 2, |this| {
ty::ty_param_bounds_and_ty {
generics: do this.read_struct_field("generics", 0) |this| {
do this.read_struct("Generics", 2) |this| {
generics: this.read_struct_field("generics", 0, |this| {
this.read_struct("Generics", 2, |this| {
ty::Generics {
type_param_defs:
this.read_struct_field("type_param_defs",
@ -1153,13 +1145,13 @@ impl ebml_decoder_decoder_helpers for reader::Decoder {
Decodable::decode(this)
})
}
}
},
})
}),
ty: this.read_struct_field("ty", 1, |this| {
this.read_ty(xcx)
})
}
}
})
}
fn convert_def_id(&mut self,
@ -1208,7 +1200,7 @@ fn decode_side_tables(xcx: @ExtendedDecodeContext,
ast_doc: ebml::Doc) {
let dcx = xcx.dcx;
let tbl_doc = ast_doc.get(c::tag_table as uint);
do reader::docs(tbl_doc) |tag, entry_doc| {
reader::docs(tbl_doc, |tag, entry_doc| {
let id0 = entry_doc.get(c::tag_table_id as uint).as_int();
let id = xcx.tr_id(id0);
@ -1288,7 +1280,7 @@ fn decode_side_tables(xcx: @ExtendedDecodeContext,
debug!(">< Side table doc loaded");
true
};
});
}
// ______________________________________________________________________

View file

@ -132,10 +132,10 @@ impl<'self> CheckLoanCtxt<'self> {
//! are issued for future scopes and thus they may have been
//! *issued* but not yet be in effect.
do self.dfcx_loans.each_bit_on_entry_frozen(scope_id) |loan_index| {
self.dfcx_loans.each_bit_on_entry_frozen(scope_id, |loan_index| {
let loan = &self.all_loans[loan_index];
op(loan)
}
})
}
pub fn each_in_scope_loan(&self,
@ -146,13 +146,13 @@ impl<'self> CheckLoanCtxt<'self> {
//! currently in scope.
let region_maps = self.tcx().region_maps;
do self.each_issued_loan(scope_id) |loan| {
self.each_issued_loan(scope_id, |loan| {
if region_maps.is_subscope_of(scope_id, loan.kill_scope) {
op(loan)
} else {
true
}
}
})
}
pub fn each_in_scope_restriction(&self,
@ -163,7 +163,7 @@ impl<'self> CheckLoanCtxt<'self> {
//! Iterates through all the in-scope restrictions for the
//! given `loan_path`
do self.each_in_scope_loan(scope_id) |loan| {
self.each_in_scope_loan(scope_id, |loan| {
let mut ret = true;
for restr in loan.restrictions.iter() {
if restr.loan_path == loan_path {
@ -174,7 +174,7 @@ impl<'self> CheckLoanCtxt<'self> {
}
}
ret
}
})
}
pub fn loans_generated_by(&self, scope_id: ast::NodeId) -> ~[uint] {
@ -182,10 +182,10 @@ impl<'self> CheckLoanCtxt<'self> {
//! we encounter `scope_id`.
let mut result = ~[];
do self.dfcx_loans.each_gen_bit_frozen(scope_id) |loan_index| {
self.dfcx_loans.each_gen_bit_frozen(scope_id, |loan_index| {
result.push(loan_index);
true
};
});
return result;
}
@ -200,13 +200,13 @@ impl<'self> CheckLoanCtxt<'self> {
let new_loan_indices = self.loans_generated_by(scope_id);
debug!("new_loan_indices = {:?}", new_loan_indices);
do self.each_issued_loan(scope_id) |issued_loan| {
self.each_issued_loan(scope_id, |issued_loan| {
for &new_loan_index in new_loan_indices.iter() {
let new_loan = &self.all_loans[new_loan_index];
self.report_error_if_loans_conflict(issued_loan, new_loan);
}
true
};
});
for (i, &x) in new_loan_indices.iter().enumerate() {
let old_loan = &self.all_loans[x];
@ -317,7 +317,7 @@ impl<'self> CheckLoanCtxt<'self> {
debug!("check_if_path_is_moved(id={:?}, use_kind={:?}, lp={})",
id, use_kind, lp.repr(self.bccx.tcx));
do self.move_data.each_move_of(id, lp) |move, moved_lp| {
self.move_data.each_move_of(id, lp, |move, moved_lp| {
self.bccx.report_use_of_moved_value(
span,
use_kind,
@ -325,7 +325,7 @@ impl<'self> CheckLoanCtxt<'self> {
move,
moved_lp);
false
};
});
}
pub fn check_assignment(&self, expr: @ast::Expr) {
@ -357,13 +357,13 @@ impl<'self> CheckLoanCtxt<'self> {
if self.is_local_variable(cmt) {
assert!(cmt.mutbl.is_immutable()); // no "const" locals
let lp = opt_loan_path(cmt).unwrap();
do self.move_data.each_assignment_of(expr.id, lp) |assign| {
self.move_data.each_assignment_of(expr.id, lp, |assign| {
self.bccx.report_reassigned_immutable_variable(
expr.span,
lp,
assign);
false
};
});
return;
}
@ -546,16 +546,16 @@ impl<'self> CheckLoanCtxt<'self> {
// `RESTR_MUTATE` restriction whenever the contents of an
// owned pointer are borrowed, and hence while `v[*]` is not
// restricted from being written, `v` is.
let cont = do this.each_in_scope_restriction(expr.id, loan_path)
|loan, restr|
{
let cont = this.each_in_scope_restriction(expr.id,
loan_path,
|loan, restr| {
if restr.set.intersects(RESTR_MUTATE) {
this.report_illegal_mutation(expr, loan_path, loan);
false
} else {
true
}
};
});
if !cont { return false }
@ -621,7 +621,7 @@ impl<'self> CheckLoanCtxt<'self> {
}
// Check for a non-const loan of `loan_path`
let cont = do this.each_in_scope_loan(expr.id) |loan| {
let cont = this.each_in_scope_loan(expr.id, |loan| {
if loan.loan_path == loan_path &&
loan.mutbl != ConstMutability {
this.report_illegal_mutation(expr,
@ -631,7 +631,7 @@ impl<'self> CheckLoanCtxt<'self> {
} else {
true
}
};
});
if !cont { return false }
}
@ -666,7 +666,7 @@ impl<'self> CheckLoanCtxt<'self> {
}
fn check_move_out_from_id(&self, id: ast::NodeId, span: Span) {
do self.move_data.each_path_moved_by(id) |_, move_path| {
self.move_data.each_path_moved_by(id, |_, move_path| {
match self.analyze_move_out_from(id, move_path) {
MoveOk => {}
MoveWhileBorrowed(loan_path, loan_span) => {
@ -682,7 +682,7 @@ impl<'self> CheckLoanCtxt<'self> {
}
}
true
};
});
}
pub fn analyze_move_out_from(&self,
@ -696,11 +696,11 @@ impl<'self> CheckLoanCtxt<'self> {
let mut ret = MoveOk;
// check for a conflicting loan:
do self.each_in_scope_restriction(expr_id, move_path) |loan, _| {
self.each_in_scope_restriction(expr_id, move_path, |loan, _| {
// Any restriction prevents moves.
ret = MoveWhileBorrowed(loan.loan_path, loan.span);
false
};
});
ret
}

View file

@ -161,27 +161,25 @@ fn gather_loans_in_local(this: &mut GatherLoanCtxt,
None => {
// Variable declarations without initializers are considered "moves":
let tcx = this.bccx.tcx;
do pat_util::pat_bindings(tcx.def_map, local.pat)
|_, id, span, _| {
pat_util::pat_bindings(tcx.def_map, local.pat, |_, id, span, _| {
gather_moves::gather_decl(this.bccx,
this.move_data,
id,
span,
id);
}
})
}
Some(init) => {
// Variable declarations with initializers are considered "assigns":
let tcx = this.bccx.tcx;
do pat_util::pat_bindings(tcx.def_map, local.pat)
|_, id, span, _| {
pat_util::pat_bindings(tcx.def_map, local.pat, |_, id, span, _| {
gather_moves::gather_assignment(this.bccx,
this.move_data,
id,
span,
@LpVar(id),
id);
}
});
let init_cmt = this.bccx.cat_expr(init);
this.gather_pat(init_cmt, local.pat, None);
}
@ -692,7 +690,7 @@ impl<'self> GatherLoanCtxt<'self> {
* moves (non-`ref` bindings with linear type).
*/
do self.bccx.cat_pattern(discr_cmt, root_pat) |cmt, pat| {
self.bccx.cat_pattern(discr_cmt, root_pat, |cmt, pat| {
match pat.node {
ast::PatIdent(bm, _, _) if self.pat_is_binding(pat) => {
match bm {
@ -781,7 +779,7 @@ impl<'self> GatherLoanCtxt<'self> {
_ => {}
}
}
})
}
pub fn vec_slice_info(&self, pat: @ast::Pat, slice_ty: ty::t)

View file

@ -318,15 +318,15 @@ pub fn opt_loan_path(cmt: mc::cmt) -> Option<@LoanPath> {
}
mc::cat_deref(cmt_base, _, pk) => {
do opt_loan_path(cmt_base).map |lp| {
opt_loan_path(cmt_base).map(|lp| {
@LpExtend(lp, cmt.mutbl, LpDeref(pk))
}
})
}
mc::cat_interior(cmt_base, ik) => {
do opt_loan_path(cmt_base).map |lp| {
opt_loan_path(cmt_base).map(|lp| {
@LpExtend(lp, cmt.mutbl, LpInterior(ik))
}
})
}
mc::cat_downcast(cmt_base) |

View file

@ -277,10 +277,10 @@ impl MoveData {
match self.path_map.find_copy(&lp) {
Some(index) => {
do self.each_base_path(index) |p| {
self.each_base_path(index, |p| {
result.push(p);
true
};
});
}
None => {
match *lp {
@ -448,7 +448,7 @@ impl MoveData {
f: |MoveIndex| -> bool)
-> bool {
let mut ret = true;
do self.each_extending_path(index0) |index| {
self.each_extending_path(index0, |index| {
let mut p = self.path(index).first_move;
while p != InvalidMoveIndex {
if !f(p) {
@ -458,7 +458,7 @@ impl MoveData {
p = self.move(p).next_move;
}
ret
};
});
ret
}
@ -466,10 +466,10 @@ impl MoveData {
path: MovePathIndex,
kill_id: ast::NodeId,
dfcx_moves: &mut MoveDataFlow) {
do self.each_applicable_move(path) |move_index| {
self.each_applicable_move(path, |move_index| {
dfcx_moves.add_kill(kill_id, *move_index);
true
};
});
}
}
@ -511,11 +511,11 @@ impl FlowedMoveData {
* Iterates through each path moved by `id`
*/
do self.dfcx_moves.each_gen_bit_frozen(id) |index| {
self.dfcx_moves.each_gen_bit_frozen(id, |index| {
let move = &self.move_data.moves[index];
let moved_path = move.path;
f(move, self.move_data.path(moved_path).loan_path)
}
})
}
pub fn each_move_of(&self,
@ -549,7 +549,7 @@ impl FlowedMoveData {
let mut ret = true;
do self.dfcx_moves.each_bit_on_entry_frozen(id) |index| {
self.dfcx_moves.each_bit_on_entry_frozen(id, |index| {
let move = &self.move_data.moves[index];
let moved_path = move.path;
if base_indices.iter().any(|x| x == &moved_path) {
@ -560,7 +560,7 @@ impl FlowedMoveData {
}
} else {
for &loan_path_index in opt_loan_path_index.iter() {
let cont = do self.move_data.each_base_path(moved_path) |p| {
let cont = self.move_data.each_base_path(moved_path, |p| {
if p == loan_path_index {
// Scenario 3: some extension of `loan_path`
// was moved
@ -568,12 +568,12 @@ impl FlowedMoveData {
} else {
true
}
};
});
if !cont { ret = false; break }
}
}
ret
}
})
}
pub fn is_assignee(&self,
@ -605,14 +605,14 @@ impl FlowedMoveData {
}
};
do self.dfcx_assign.each_bit_on_entry_frozen(id) |index| {
self.dfcx_assign.each_bit_on_entry_frozen(id, |index| {
let assignment = &self.move_data.var_assignments[index];
if assignment.path == loan_path_index && !f(assignment) {
false
} else {
true
}
}
})
}
}

View file

@ -134,13 +134,13 @@ fn check_arms(cx: &MatchCheckCtxt, arms: &[Arm]) {
}
};
do walk_pat(*pat) |p| {
walk_pat(*pat, |p| {
if pat_matches_nan(p) {
cx.tcx.sess.span_warn(p.span, "unmatchable NaN in pattern, \
use the is_nan method in a guard instead");
}
true
};
});
let v = ~[*pat];
match is_useful(cx, &seen, v) {
@ -275,14 +275,14 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful {
is_useful_specialized(cx, m, v, vec(n), n, left_ty)
}
ty::ty_unboxed_vec(*) | ty::ty_evec(*) => {
let max_len = do m.rev_iter().fold(0) |max_len, r| {
let max_len = m.rev_iter().fold(0, |max_len, r| {
match r[0].node {
PatVec(ref before, _, ref after) => {
num::max(before.len() + after.len(), max_len)
}
_ => max_len
}
};
});
for n in iter::range(0u, max_len + 1) {
match is_useful_specialized(cx, m, v, vec(n), n, left_ty) {
not_useful => (),
@ -454,14 +454,14 @@ fn missing_ctor(cx: &MatchCheckCtxt,
ty::ty_unboxed_vec(*) | ty::ty_evec(*) => {
// Find the lengths and slices of all vector patterns.
let vec_pat_lens = do m.iter().filter_map |r| {
let vec_pat_lens = m.iter().filter_map(|r| {
match r[0].node {
PatVec(ref before, ref slice, ref after) => {
Some((before.len() + after.len(), slice.is_some()))
}
_ => None
}
}.collect::<~[(uint, bool)]>();
}).collect::<~[(uint, bool)]>();
// Sort them by length such that for patterns of the same length,
// those with a destructured slice come first.
@ -886,7 +886,7 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
let mut by_ref_span = None;
let mut any_by_move = false;
for pat in pats.iter() {
do pat_bindings(def_map, *pat) |bm, id, span, _path| {
pat_bindings(def_map, *pat, |bm, id, span, _path| {
match bm {
BindByRef(_) => {
by_ref_span = Some(span);
@ -897,7 +897,7 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
}
}
}
}
})
}
let check_move: |&Pat, Option<@Pat>| = |p, sub| {
@ -925,7 +925,7 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
if !any_by_move { return; } // pointless micro-optimization
for pat in pats.iter() {
do walk_pat(*pat) |p| {
walk_pat(*pat, |p| {
if pat_is_binding(def_map, p) {
match p.node {
PatIdent(_, _, sub) => {
@ -943,6 +943,6 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
}
}
true
};
});
}
}

View file

@ -219,9 +219,7 @@ impl ConstEvalVisitor {
}
ast::ExprStruct(_, ref fs, None) => {
let cs = do fs.iter().map |f| {
self.classify(f.expr)
};
let cs = fs.iter().map(|f| self.classify(f.expr));
join_all(cs)
}

View file

@ -208,17 +208,17 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
fn compute_id_range(&mut self, id: ast::NodeId) -> (uint, uint) {
let mut expanded = false;
let len = self.nodeid_to_bitset.len();
let n = do self.nodeid_to_bitset.find_or_insert_with(id) |_| {
let n = self.nodeid_to_bitset.find_or_insert_with(id, |_| {
expanded = true;
len
};
});
if expanded {
let entry = if self.oper.initial_value() { uint::max_value } else {0};
do self.words_per_id.times {
self.words_per_id.times(|| {
self.gens.push(0);
self.kills.push(0);
self.on_entry.push(entry);
}
})
}
let start = *n * self.words_per_id;
let end = start + self.words_per_id;
@ -835,12 +835,12 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
debug!("DataFlowContext::walk_pat(pat={}, in_out={})",
pat.repr(self.dfcx.tcx), bits_to_str(reslice(in_out)));
do ast_util::walk_pat(pat) |p| {
ast_util::walk_pat(pat, |p| {
debug!(" p.id={:?} in_out={}", p.id, bits_to_str(reslice(in_out)));
self.merge_with_entry_set(p.id, in_out);
self.dfcx.apply_gen_kill(p.id, in_out);
true
};
});
}
fn walk_pat_alternatives(&mut self,

View file

@ -318,22 +318,22 @@ mod test {
fn each_node() {
let graph = create_graph();
let expected = ["A", "B", "C", "D", "E", "F"];
do graph.each_node |idx, node| {
graph.each_node(|idx, node| {
assert_eq!(&expected[*idx], graph.node_data(idx));
assert_eq!(expected[*idx], node.data);
true
};
});
}
#[test]
fn each_edge() {
let graph = create_graph();
let expected = ["AB", "BC", "BD", "DE", "EC", "FB"];
do graph.each_edge |idx, edge| {
graph.each_edge(|idx, edge| {
assert_eq!(&expected[*idx], graph.edge_data(idx));
assert_eq!(expected[*idx], edge.data);
true
};
});
}
fn test_adjacent_edges<N:Eq,E:Eq>(graph: &Graph<N,E>,
@ -344,7 +344,7 @@ mod test {
assert_eq!(graph.node_data(start_index), &start_data);
let mut counter = 0;
do graph.each_incoming_edge(start_index) |edge_index, edge| {
graph.each_incoming_edge(start_index, |edge_index, edge| {
assert_eq!(graph.edge_data(edge_index), &edge.data);
assert!(counter < expected_incoming.len());
debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
@ -358,11 +358,11 @@ mod test {
}
counter += 1;
true
};
});
assert_eq!(counter, expected_incoming.len());
let mut counter = 0;
do graph.each_outgoing_edge(start_index) |edge_index, edge| {
graph.each_outgoing_edge(start_index, |edge_index, edge| {
assert_eq!(graph.edge_data(edge_index), &edge.data);
assert!(counter < expected_outgoing.len());
debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
@ -376,7 +376,7 @@ mod test {
}
counter += 1;
true
};
});
assert_eq!(counter, expected_outgoing.len());
}

View file

@ -125,14 +125,14 @@ fn check_impl_of_trait(cx: &mut Context, it: @item, trait_ref: &trait_ref, self_
// If this trait has builtin-kind supertraits, meet them.
let self_ty: ty::t = ty::node_id_to_type(cx.tcx, it.id);
debug!("checking impl with self type {:?}", ty::get(self_ty).sty);
do check_builtin_bounds(cx, self_ty, trait_def.bounds) |missing| {
check_builtin_bounds(cx, self_ty, trait_def.bounds, |missing| {
cx.tcx.sess.span_err(self_type.span,
format!("the type `{}', which does not fulfill `{}`, cannot implement this \
trait", ty_to_str(cx.tcx, self_ty), missing.user_string(cx.tcx)));
cx.tcx.sess.span_note(self_type.span,
format!("types implementing this trait must fulfill `{}`",
trait_def.bounds.user_string(cx.tcx)));
}
});
// If this is a destructor, check kinds.
if cx.tcx.lang_items.drop_trait() == Some(trait_def_id) {
@ -255,12 +255,12 @@ fn check_fn(
fn_id: NodeId) {
// Check kinds on free variables:
do with_appropriate_checker(cx, fn_id) |chk| {
with_appropriate_checker(cx, fn_id, |chk| {
let r = freevars::get_freevars(cx.tcx, fn_id);
for fv in r.iter() {
chk(cx, *fv);
}
}
});
visit::walk_fn(cx, fk, decl, body, sp, fn_id, ());
}
@ -374,20 +374,23 @@ pub fn check_typaram_bounds(cx: &Context,
ty: ty::t,
type_param_def: &ty::TypeParameterDef)
{
do check_builtin_bounds(cx, ty, type_param_def.bounds.builtin_bounds) |missing| {
check_builtin_bounds(cx,
ty,
type_param_def.bounds.builtin_bounds,
|missing| {
cx.tcx.sess.span_err(
sp,
format!("instantiating a type parameter with an incompatible type \
`{}`, which does not fulfill `{}`",
ty_to_str(cx.tcx, ty),
missing.user_string(cx.tcx)));
}
});
}
pub fn check_freevar_bounds(cx: &Context, sp: Span, ty: ty::t,
bounds: ty::BuiltinBounds, referenced_ty: Option<ty::t>)
{
do check_builtin_bounds(cx, ty, bounds) |missing| {
check_builtin_bounds(cx, ty, bounds, |missing| {
// Will be Some if the freevar is implicitly borrowed (stack closure).
// Emit a less mysterious error message in this case.
match referenced_ty {
@ -404,18 +407,18 @@ pub fn check_freevar_bounds(cx: &Context, sp: Span, ty: ty::t,
sp,
format!("this closure's environment must satisfy `{}`",
bounds.user_string(cx.tcx)));
}
});
}
pub fn check_trait_cast_bounds(cx: &Context, sp: Span, ty: ty::t,
bounds: ty::BuiltinBounds) {
do check_builtin_bounds(cx, ty, bounds) |missing| {
check_builtin_bounds(cx, ty, bounds, |missing| {
cx.tcx.sess.span_err(sp,
format!("cannot pack type `{}`, which does not fulfill \
`{}`, as a trait bounded by {}",
ty_to_str(cx.tcx, ty), missing.user_string(cx.tcx),
bounds.user_string(cx.tcx)));
}
});
}
fn is_nullary_variant(cx: &Context, ex: @Expr) -> bool {

View file

@ -413,14 +413,13 @@ impl LanguageItemCollector {
pub fn collect_external_language_items(&mut self) {
let crate_store = self.session.cstore;
do iter_crate_data(crate_store) |crate_number, _crate_metadata| {
do each_lang_item(crate_store, crate_number)
|node_id, item_index| {
iter_crate_data(crate_store, |crate_number, _crate_metadata| {
each_lang_item(crate_store, crate_number, |node_id, item_index| {
let def_id = ast::DefId { crate: crate_number, node: node_id };
self.collect_item(item_index, def_id);
true
};
}
});
})
}
pub fn collect(&mut self, crate: &ast::Crate) {

View file

@ -441,7 +441,7 @@ impl<'self> Context<'self> {
// of what we changed so we can roll everything back after invoking the
// specified closure
let mut pushed = 0u;
do each_lint(self.tcx.sess, attrs) |meta, level, lintname| {
each_lint(self.tcx.sess, attrs, |meta, level, lintname| {
match self.dict.find_equiv(&lintname) {
None => {
self.span_lint(
@ -467,7 +467,7 @@ impl<'self> Context<'self> {
}
}
true
};
});
let old_is_doc_hidden = self.is_doc_hidden;
self.is_doc_hidden = self.is_doc_hidden ||
@ -479,10 +479,10 @@ impl<'self> Context<'self> {
// rollback
self.is_doc_hidden = old_is_doc_hidden;
do pushed.times {
pushed.times(|| {
let (lint, lvl, src) = self.lint_stack.pop();
self.set_level(lint, lvl, src);
}
})
}
fn visit_ids(&self, f: |&mut ast_util::IdVisitor<Context>|) {
@ -1135,11 +1135,11 @@ fn check_stability(cx: &Context, e: &ast::Expr) {
// this crate
match cx.tcx.items.find(&id.node) {
Some(ast_node) => {
let s = do ast_node.with_attrs |attrs| {
do attrs.map |a| {
let s = ast_node.with_attrs(|attrs| {
attrs.map(|a| {
attr::find_stability(a.iter().map(|a| a.meta()))
}
};
})
});
match s {
Some(s) => s,
@ -1157,11 +1157,11 @@ fn check_stability(cx: &Context, e: &ast::Expr) {
let mut s = None;
// run through all the attributes and take the first
// stability one.
do csearch::get_item_attrs(cx.tcx.cstore, id) |meta_items| {
csearch::get_item_attrs(cx.tcx.cstore, id, |meta_items| {
if s.is_none() {
s = attr::find_stability(meta_items.move_iter())
}
}
});
s
};
@ -1189,7 +1189,7 @@ fn check_stability(cx: &Context, e: &ast::Expr) {
impl<'self> Visitor<()> for Context<'self> {
fn visit_item(&mut self, it: @ast::item, _: ()) {
do self.with_lint_attrs(it.attrs) |cx| {
self.with_lint_attrs(it.attrs, |cx| {
check_item_ctypes(cx, it);
check_item_non_camel_case_types(cx, it);
check_item_non_uppercase_statics(cx, it);
@ -1197,12 +1197,10 @@ impl<'self> Visitor<()> for Context<'self> {
check_missing_doc_item(cx, it);
check_attrs_usage(cx, it.attrs);
do cx.visit_ids |v| {
v.visit_item(it, ());
}
cx.visit_ids(|v| v.visit_item(it, ()));
visit::walk_item(cx, it, ());
}
})
}
fn visit_foreign_item(&mut self, it: @ast::foreign_item, _: ()) {
@ -1266,15 +1264,15 @@ impl<'self> Visitor<()> for Context<'self> {
match *fk {
visit::fk_method(_, _, m) => {
do self.with_lint_attrs(m.attrs) |cx| {
self.with_lint_attrs(m.attrs, |cx| {
check_missing_doc_method(cx, m);
check_attrs_usage(cx, m.attrs);
do cx.visit_ids |v| {
cx.visit_ids(|v| {
v.visit_fn(fk, decl, body, span, id, ());
}
});
recurse(cx);
}
})
}
_ => recurse(self),
}
@ -1282,12 +1280,12 @@ impl<'self> Visitor<()> for Context<'self> {
fn visit_ty_method(&mut self, t: &ast::TypeMethod, _: ()) {
do self.with_lint_attrs(t.attrs) |cx| {
self.with_lint_attrs(t.attrs, |cx| {
check_missing_doc_ty_method(cx, t);
check_attrs_usage(cx, t.attrs);
visit::walk_ty_method(cx, t, ());
}
})
}
fn visit_struct_def(&mut self,
@ -1303,21 +1301,21 @@ impl<'self> Visitor<()> for Context<'self> {
}
fn visit_struct_field(&mut self, s: @ast::struct_field, _: ()) {
do self.with_lint_attrs(s.node.attrs) |cx| {
self.with_lint_attrs(s.node.attrs, |cx| {
check_missing_doc_struct_field(cx, s);
check_attrs_usage(cx, s.node.attrs);
visit::walk_struct_field(cx, s, ());
}
})
}
fn visit_variant(&mut self, v: &ast::variant, g: &ast::Generics, _: ()) {
do self.with_lint_attrs(v.node.attrs) |cx| {
self.with_lint_attrs(v.node.attrs, |cx| {
check_missing_doc_variant(cx, v);
check_attrs_usage(cx, v.node.attrs);
visit::walk_variant(cx, v, g, ());
}
})
}
}
@ -1356,16 +1354,16 @@ pub fn check_crate(tcx: ty::ctxt,
for &(lint, level) in tcx.sess.opts.lint_opts.iter() {
cx.set_level(lint, level, CommandLine);
}
do cx.with_lint_attrs(crate.attrs) |cx| {
do cx.visit_ids |v| {
cx.with_lint_attrs(crate.attrs, |cx| {
cx.visit_ids(|v| {
v.visited_outermost = true;
visit::walk_crate(v, crate, ());
}
});
check_crate_attrs_usage(cx, crate.attrs);
visit::walk_crate(cx, crate, ());
}
});
// If we missed any lints added to the session, then there's a bug somewhere
// in the iteration code.

View file

@ -381,12 +381,13 @@ fn visit_fn(v: &mut LivenessVisitor,
}
for arg in decl.inputs.iter() {
do pat_util::pat_bindings(this.tcx.def_map, arg.pat)
pat_util::pat_bindings(this.tcx.def_map,
arg.pat,
|_bm, arg_id, _x, path| {
debug!("adding argument {}", arg_id);
let ident = ast_util::path_to_ident(path);
fn_maps.add_variable(Arg(arg_id, ident));
}
})
};
// Add `this`, whether explicit or implicit.
@ -429,7 +430,7 @@ fn visit_fn(v: &mut LivenessVisitor,
fn visit_local(v: &mut LivenessVisitor, local: @Local, this: @mut IrMaps) {
let def_map = this.tcx.def_map;
do pat_util::pat_bindings(def_map, local.pat) |bm, p_id, sp, path| {
pat_util::pat_bindings(def_map, local.pat, |bm, p_id, sp, path| {
debug!("adding local variable {}", p_id);
let name = ast_util::path_to_ident(path);
this.add_live_node_for_node(p_id, VarDefNode(sp));
@ -447,14 +448,14 @@ fn visit_local(v: &mut LivenessVisitor, local: @Local, this: @mut IrMaps) {
is_mutbl: mutbl,
kind: kind
}));
}
});
visit::walk_local(v, local, this);
}
fn visit_arm(v: &mut LivenessVisitor, arm: &Arm, this: @mut IrMaps) {
let def_map = this.tcx.def_map;
for pat in arm.pats.iter() {
do pat_util::pat_bindings(def_map, *pat) |bm, p_id, sp, path| {
pat_util::pat_bindings(def_map, *pat, |bm, p_id, sp, path| {
debug!("adding local variable {} from match with bm {:?}",
p_id, bm);
let name = ast_util::path_to_ident(path);
@ -469,7 +470,7 @@ fn visit_arm(v: &mut LivenessVisitor, arm: &Arm, this: @mut IrMaps) {
is_mutbl: mutbl,
kind: FromMatch(bm)
}));
}
})
}
visit::walk_arm(v, arm, this);
}
@ -628,9 +629,9 @@ impl Liveness {
match expr.node {
ExprPath(_) => {
let def = self.tcx.def_map.get_copy(&expr.id);
do moves::moved_variable_node_id_from_def(def).map |rdef| {
moves::moved_variable_node_id_from_def(def).map(|rdef| {
self.variable(rdef, expr.span)
}
})
}
_ => None
}
@ -644,9 +645,9 @@ impl Liveness {
-> Option<Variable> {
match self.tcx.def_map.find(&node_id) {
Some(&def) => {
do moves::moved_variable_node_id_from_def(def).map |rdef| {
moves::moved_variable_node_id_from_def(def).map(|rdef| {
self.variable(rdef, span)
}
})
}
None => {
self.tcx.sess.span_bug(
@ -659,11 +660,11 @@ impl Liveness {
pat: @Pat,
f: |LiveNode, Variable, Span, NodeId|) {
let def_map = self.tcx.def_map;
do pat_util::pat_bindings(def_map, pat) |_bm, p_id, sp, _n| {
pat_util::pat_bindings(def_map, pat, |_bm, p_id, sp, _n| {
let ln = self.live_node(p_id, sp);
let var = self.variable(p_id, sp);
f(ln, var, sp, p_id);
}
})
}
pub fn arm_pats_bindings(&self,
@ -685,11 +686,11 @@ impl Liveness {
pub fn define_bindings_in_arm_pats(&self, pats: &[@Pat], succ: LiveNode)
-> LiveNode {
let mut succ = succ;
do self.arm_pats_bindings(pats) |ln, var, _sp, _id| {
self.arm_pats_bindings(pats, |ln, var, _sp, _id| {
self.init_from_succ(ln, succ);
self.define(ln, var);
succ = ln;
}
});
succ
}
@ -792,14 +793,14 @@ impl Liveness {
}
pub fn ln_str(&self, ln: LiveNode) -> ~str {
str::from_utf8_owned(do io::mem::with_mem_writer |wr| {
str::from_utf8_owned(io::mem::with_mem_writer(|wr| {
let wr = wr as &mut io::Writer;
write!(wr, "[ln({}) of kind {:?} reads", *ln, self.ir.lnks[*ln]);
self.write_vars(wr, ln, |idx| self.users[idx].reader );
write!(wr, " writes");
self.write_vars(wr, ln, |idx| self.users[idx].writer );
write!(wr, " precedes {}]", self.successors[*ln].to_str());
})
}))
}
pub fn init_empty(&self, ln: LiveNode, succ_ln: LiveNode) {
@ -833,7 +834,7 @@ impl Liveness {
if ln == succ_ln { return false; }
let mut changed = false;
do self.indices2(ln, succ_ln) |idx, succ_idx| {
self.indices2(ln, succ_ln, |idx, succ_idx| {
let users = &mut *self.users;
changed |= copy_if_invalid(users[succ_idx].reader,
&mut users[idx].reader);
@ -843,7 +844,7 @@ impl Liveness {
users[idx].used = true;
changed = true;
}
}
});
debug!("merge_from_succ(ln={}, succ={}, first_merge={}, changed={})",
ln.to_str(), self.ln_str(succ_ln), first_merge, changed);
@ -939,9 +940,9 @@ impl Liveness {
pub fn propagate_through_block(&self, blk: &Block, succ: LiveNode)
-> LiveNode {
let succ = self.propagate_through_opt_expr(blk.expr, succ);
do blk.stmts.rev_iter().fold(succ) |succ, stmt| {
blk.stmts.rev_iter().fold(succ, |succ, stmt| {
self.propagate_through_stmt(*stmt, succ)
}
})
}
pub fn propagate_through_stmt(&self, stmt: &Stmt, succ: LiveNode)
@ -993,18 +994,18 @@ impl Liveness {
pub fn propagate_through_exprs(&self, exprs: &[@Expr], succ: LiveNode)
-> LiveNode {
do exprs.rev_iter().fold(succ) |succ, expr| {
exprs.rev_iter().fold(succ, |succ, expr| {
self.propagate_through_expr(*expr, succ)
}
})
}
pub fn propagate_through_opt_expr(&self,
opt_expr: Option<@Expr>,
succ: LiveNode)
-> LiveNode {
do opt_expr.iter().fold(succ) |succ, expr| {
opt_expr.iter().fold(succ, |succ, expr| {
self.propagate_through_expr(*expr, succ)
}
})
}
pub fn propagate_through_expr(&self, expr: @Expr, succ: LiveNode)
@ -1037,12 +1038,12 @@ impl Liveness {
// the construction of a closure itself is not important,
// but we have to consider the closed over variables.
let caps = self.ir.captures(expr);
do caps.rev_iter().fold(succ) |succ, cap| {
caps.rev_iter().fold(succ, |succ, cap| {
self.init_from_succ(cap.ln, succ);
let var = self.variable(cap.var_nid, expr.span);
self.acc(cap.ln, var, ACC_READ | ACC_USE);
cap.ln
}
})
})
}
@ -1177,9 +1178,9 @@ impl Liveness {
ExprStruct(_, ref fields, with_expr) => {
let succ = self.propagate_through_opt_expr(with_expr, succ);
do fields.rev_iter().fold(succ) |succ, field| {
fields.rev_iter().fold(succ, |succ, field| {
self.propagate_through_expr(field.expr, succ)
}
})
}
ExprCall(f, ref args, _) => {
@ -1230,15 +1231,15 @@ impl Liveness {
}
ExprInlineAsm(ref ia) => {
let succ = do ia.inputs.rev_iter().fold(succ) |succ, &(_, expr)| {
let succ = ia.inputs.rev_iter().fold(succ, |succ, &(_, expr)| {
self.propagate_through_expr(expr, succ)
};
do ia.outputs.rev_iter().fold(succ) |succ, &(_, expr)| {
});
ia.outputs.rev_iter().fold(succ, |succ, &(_, expr)| {
// see comment on lvalues in
// propagate_through_lvalue_components()
let succ = self.write_lvalue(expr, succ, ACC_WRITE);
self.propagate_through_lvalue_components(expr, succ)
}
})
}
ExprLogLevel |
@ -1437,7 +1438,7 @@ fn check_local(this: &mut Liveness, local: @Local) {
// should not be live at this point.
debug!("check_local() with no initializer");
do this.pat_bindings(local.pat) |ln, var, sp, id| {
this.pat_bindings(local.pat, |ln, var, sp, id| {
if !this.warn_about_unused(sp, id, ln, var) {
match this.live_on_exit(ln, var) {
None => { /* not live: good */ }
@ -1448,7 +1449,7 @@ fn check_local(this: &mut Liveness, local: @Local) {
}
}
}
}
})
}
}
@ -1456,9 +1457,9 @@ fn check_local(this: &mut Liveness, local: @Local) {
}
fn check_arm(this: &mut Liveness, arm: &Arm) {
do this.arm_pats_bindings(arm.pats) |ln, var, sp, id| {
this.arm_pats_bindings(arm.pats, |ln, var, sp, id| {
this.warn_about_unused(sp, id, ln, var);
}
});
visit::walk_arm(this, arm, ());
}
@ -1620,20 +1621,21 @@ impl Liveness {
pub fn warn_about_unused_args(&self, decl: &fn_decl, entry_ln: LiveNode) {
for arg in decl.inputs.iter() {
do pat_util::pat_bindings(self.tcx.def_map, arg.pat)
pat_util::pat_bindings(self.tcx.def_map,
arg.pat,
|_bm, p_id, sp, _n| {
let var = self.variable(p_id, sp);
self.warn_about_unused(sp, p_id, entry_ln, var);
}
})
}
}
pub fn warn_about_unused_or_dead_vars_in_pat(&self, pat: @Pat) {
do self.pat_bindings(pat) |ln, var, sp, id| {
self.pat_bindings(pat, |ln, var, sp, id| {
if !self.warn_about_unused(sp, id, ln, var) {
self.warn_about_dead_assign(sp, id, ln, var);
}
}
})
}
pub fn warn_about_unused(&self,

View file

@ -614,7 +614,7 @@ impl VisitContext {
* into itself or not based on its type and annotation.
*/
do pat_bindings(self.tcx.def_map, pat) |bm, id, _span, path| {
pat_bindings(self.tcx.def_map, pat, |bm, id, _span, path| {
let binding_moves = match bm {
BindByRef(_) => false,
BindByValue(_) => {
@ -633,7 +633,7 @@ impl VisitContext {
if binding_moves {
self.move_maps.moves_map.insert(id);
}
}
})
}
pub fn use_receiver(&mut self,

View file

@ -22,9 +22,9 @@ pub type PatIdMap = HashMap<Ident, NodeId>;
// use the NodeId of their namesake in the first pattern.
pub fn pat_id_map(dm: resolve::DefMap, pat: &Pat) -> PatIdMap {
let mut map = HashMap::new();
do pat_bindings(dm, pat) |_bm, p_id, _s, n| {
pat_bindings(dm, pat, |_bm, p_id, _s, n| {
map.insert(path_to_ident(n), p_id);
};
});
map
}
@ -75,7 +75,7 @@ pub fn pat_is_binding_or_wild(dm: resolve::DefMap, pat: &Pat) -> bool {
pub fn pat_bindings(dm: resolve::DefMap,
pat: &Pat,
it: |BindingMode, NodeId, Span, &Path|) {
do walk_pat(pat) |p| {
walk_pat(pat, |p| {
match p.node {
PatIdent(binding_mode, ref pth, _) if pat_is_binding(dm, p) => {
it(binding_mode, p.id, p.span, pth);
@ -83,7 +83,7 @@ pub fn pat_bindings(dm: resolve::DefMap,
_ => {}
}
true
};
});
}
pub fn pat_binding_ids(dm: resolve::DefMap, pat: &Pat) -> ~[NodeId] {
@ -96,13 +96,13 @@ pub fn pat_binding_ids(dm: resolve::DefMap, pat: &Pat) -> ~[NodeId] {
/// an ident, e.g. `foo`, or `Foo(foo)` or `foo @ Bar(*)`.
pub fn pat_contains_bindings(dm: resolve::DefMap, pat: &Pat) -> bool {
let mut contains_bindings = false;
do walk_pat(pat) |p| {
walk_pat(pat, |p| {
if pat_is_binding(dm, p) {
contains_bindings = true;
false // there's at least one binding, can short circuit now.
} else {
true
}
};
});
contains_bindings
}

View file

@ -909,11 +909,12 @@ impl<'self> Visitor<ReducedGraphParent> for BuildReducedGraphVisitor<'self> {
fn visit_foreign_item(&mut self, foreign_item: @foreign_item,
context:ReducedGraphParent) {
do self.resolver.build_reduced_graph_for_foreign_item(foreign_item,
context) |r, c| {
self.resolver.build_reduced_graph_for_foreign_item(foreign_item,
context,
|r, c| {
let mut v = BuildReducedGraphVisitor{ resolver: r };
visit::walk_foreign_item(&mut v, foreign_item, c);
}
})
}
fn visit_view_item(&mut self, view_item:&view_item, context:ReducedGraphParent) {
@ -1227,11 +1228,11 @@ impl Resolver {
// If this is a newtype or unit-like struct, define a name
// in the value namespace as well
do ctor_id.while_some |cid| {
ctor_id.while_some(|cid| {
name_bindings.define_value(DefStruct(local_def(cid)), sp,
is_public);
None
}
});
// Record the def ID of this struct.
self.structs.insert(local_def(item.id));
@ -1553,12 +1554,12 @@ impl Resolver {
let def = DefFn(local_def(foreign_item.id), unsafe_fn);
name_bindings.define_value(def, foreign_item.span, is_public);
do self.with_type_parameter_rib(
HasTypeParameters(
generics, foreign_item.id, 0, NormalRibKind)) |this|
{
f(this, new_parent)
}
self.with_type_parameter_rib(
HasTypeParameters(generics,
foreign_item.id,
0,
NormalRibKind),
|this| f(this, new_parent));
}
foreign_item_static(_, m) => {
let def = DefStatic(local_def(foreign_item.id), m);
@ -1758,15 +1759,17 @@ impl Resolver {
DefForeignMod(def_id) => {
// Foreign modules have no names. Recur and populate
// eagerly.
do csearch::each_child_of_item(self.session.cstore,
def_id)
|def_like, child_ident, vis| {
csearch::each_child_of_item(self.session.cstore,
def_id,
|def_like,
child_ident,
vis| {
self.build_reduced_graph_for_external_crate_def(
root,
def_like,
child_ident,
vis)
}
});
}
_ => {
let (child_name_bindings, new_parent) =
@ -1893,7 +1896,8 @@ impl Resolver {
Some(def_id) => def_id,
};
do csearch::each_child_of_item(self.session.cstore, def_id)
csearch::each_child_of_item(self.session.cstore,
def_id,
|def_like, child_ident, visibility| {
debug!("(populating external module) ... found ident: {}",
token::ident_to_str(&child_ident));
@ -1901,7 +1905,7 @@ impl Resolver {
def_like,
child_ident,
visibility)
}
});
module.populated = true
}
@ -1918,14 +1922,14 @@ impl Resolver {
/// crate.
fn build_reduced_graph_for_external_crate(&mut self,
root: @mut Module) {
do csearch::each_top_level_item_of_crate(self.session.cstore,
root.def_id.unwrap().crate)
csearch::each_top_level_item_of_crate(self.session.cstore,
root.def_id.unwrap().crate,
|def_like, ident, visibility| {
self.build_reduced_graph_for_external_crate_def(root,
def_like,
ident,
visibility)
}
});
}
/// Creates and adds an import directive to the given module.
@ -3544,20 +3548,23 @@ impl Resolver {
// n.b. the discr expr gets visted twice.
// but maybe it's okay since the first time will signal an
// error if there is one? -- tjc
do self.with_type_parameter_rib(
HasTypeParameters(
generics, item.id, 0, NormalRibKind)) |this| {
self.with_type_parameter_rib(HasTypeParameters(generics,
item.id,
0,
NormalRibKind),
|this| {
visit::walk_item(this, item, ());
}
});
}
item_ty(_, ref generics) => {
do self.with_type_parameter_rib
(HasTypeParameters(generics, item.id, 0,
NormalRibKind))
self.with_type_parameter_rib(HasTypeParameters(generics,
item.id,
0,
NormalRibKind),
|this| {
visit::walk_item(this, item, ());
}
});
}
item_impl(ref generics,
@ -3581,10 +3588,11 @@ impl Resolver {
DlDef(DefSelfTy(item.id)));
// Create a new rib for the trait-wide type parameters.
do self.with_type_parameter_rib
(HasTypeParameters(generics, item.id, 0,
NormalRibKind)) |this| {
self.with_type_parameter_rib(HasTypeParameters(generics,
item.id,
0,
NormalRibKind),
|this| {
this.resolve_type_parameters(&generics.ty_params);
// Resolve derived traits.
@ -3600,11 +3608,12 @@ impl Resolver {
match *method {
required(ref ty_m) => {
do this.with_type_parameter_rib
this.with_type_parameter_rib
(HasTypeParameters(&ty_m.generics,
item.id,
generics.ty_params.len(),
MethodRibKind(item.id, Required))) |this| {
MethodRibKind(item.id, Required)),
|this| {
// Resolve the method-specific type
// parameters.
@ -3616,7 +3625,7 @@ impl Resolver {
}
this.resolve_type(&ty_m.decl.output);
}
});
}
provided(m) => {
this.resolve_method(MethodRibKind(item.id,
@ -3626,7 +3635,7 @@ impl Resolver {
}
}
}
}
});
self.type_ribs.pop();
}
@ -3638,14 +3647,14 @@ impl Resolver {
}
item_mod(ref module_) => {
do self.with_scope(Some(item.ident)) |this| {
self.with_scope(Some(item.ident), |this| {
this.resolve_module(module_, item.span, item.ident,
item.id);
}
});
}
item_foreign_mod(ref foreign_module) => {
do self.with_scope(Some(item.ident)) |this| {
self.with_scope(Some(item.ident), |this| {
for foreign_item in foreign_module.items.iter() {
match foreign_item.node {
foreign_item_fn(_, ref generics) => {
@ -3664,7 +3673,7 @@ impl Resolver {
}
}
}
}
});
}
item_fn(ref fn_decl, _, _, ref generics, ref block) => {
@ -3764,7 +3773,7 @@ impl Resolver {
self.label_ribs.push(function_label_rib);
// If this function has type parameters, add them now.
do self.with_type_parameter_rib(type_parameters) |this| {
self.with_type_parameter_rib(type_parameters, |this| {
// Resolve the type parameters.
match type_parameters {
NoTypeParameters => {
@ -3815,7 +3824,7 @@ impl Resolver {
this.resolve_block(block);
debug!("(resolving function) leaving function");
}
});
self.label_ribs.pop();
self.value_ribs.pop();
@ -3890,10 +3899,11 @@ impl Resolver {
}
// If applicable, create a rib for the type parameters.
do self.with_type_parameter_rib(HasTypeParameters
(generics, id, 0,
OpaqueFunctionRibKind)) |this| {
self.with_type_parameter_rib(HasTypeParameters(generics,
id,
0,
OpaqueFunctionRibKind),
|this| {
// Resolve the type parameters.
this.resolve_type_parameters(&generics.ty_params);
@ -3901,7 +3911,7 @@ impl Resolver {
for field in fields.iter() {
this.resolve_type(&field.node.ty);
}
}
});
}
// Does this really need to take a RibKind or is it always going
@ -3937,9 +3947,11 @@ impl Resolver {
methods: &[@method]) {
// If applicable, create a rib for the type parameters.
let outer_type_parameter_count = generics.ty_params.len();
do self.with_type_parameter_rib(HasTypeParameters
(generics, id, 0,
NormalRibKind)) |this| {
self.with_type_parameter_rib(HasTypeParameters(generics,
id,
0,
NormalRibKind),
|this| {
// Resolve the type parameters.
this.resolve_type_parameters(&generics.ty_params);
@ -4000,7 +4012,7 @@ impl Resolver {
Some(r) => { this.current_trait_refs = r; }
None => ()
}
}
});
}
fn resolve_module(&mut self,
@ -4037,12 +4049,12 @@ impl Resolver {
// user and one 'x' came from the macro.
fn binding_mode_map(&mut self, pat: @Pat) -> BindingMap {
let mut result = HashMap::new();
do pat_bindings(self.def_map, pat) |binding_mode, _id, sp, path| {
pat_bindings(self.def_map, pat, |binding_mode, _id, sp, path| {
let name = mtwt_resolve(path_to_ident(path));
result.insert(name,
binding_info {span: sp,
binding_mode: binding_mode});
}
});
return result;
}
@ -4210,19 +4222,19 @@ impl Resolver {
}
}
do bounds.as_ref().map |bound_vec| {
bounds.as_ref().map(|bound_vec| {
for bound in bound_vec.iter() {
self.resolve_type_parameter_bound(ty.id, bound);
}
};
});
}
ty_closure(c) => {
do c.bounds.as_ref().map |bounds| {
c.bounds.as_ref().map(|bounds| {
for bound in bounds.iter() {
self.resolve_type_parameter_bound(ty.id, bound);
}
};
});
visit::walk_ty(self, ty, ());
}
@ -4240,7 +4252,7 @@ impl Resolver {
// pattern that binds them
bindings_list: Option<@mut HashMap<Name,NodeId>>) {
let pat_id = pattern.id;
do walk_pat(pattern) |pattern| {
walk_pat(pattern, |pattern| {
match pattern.node {
PatIdent(binding_mode, ref path, _)
if !path.global && path.segments.len() == 1 => {
@ -4480,7 +4492,7 @@ impl Resolver {
}
}
true
};
});
}
fn resolve_bare_identifier_pattern(&mut self, name: Ident)
@ -5056,14 +5068,14 @@ impl Resolver {
}
ExprLoop(_, Some(label)) => {
do self.with_label_rib |this| {
self.with_label_rib(|this| {
let def_like = DlDef(DefLabel(expr.id));
let rib = this.label_ribs[this.label_ribs.len() - 1];
// plain insert (no renaming)
rib.bindings.insert(label.name, def_like);
visit::walk_expr(this, expr, ());
}
})
}
ExprForLoop(*) => fail!("non-desugared expr_for_loop"),
@ -5316,7 +5328,7 @@ impl Resolver {
debug!("(recording def) recording {:?} for {:?}, last private {:?}",
def, node_id, lp);
self.last_private.insert(node_id, lp);
do self.def_map.insert_or_update_with(node_id, def) |_, old_value| {
self.def_map.insert_or_update_with(node_id, def, |_, old_value| {
// Resolve appears to "resolve" the same ID multiple
// times, so here is a sanity check it at least comes to
// the same conclusion! - nmatsakis
@ -5324,7 +5336,7 @@ impl Resolver {
self.session.bug(format!("node_id {:?} resolved first to {:?} \
and then {:?}", node_id, *old_value, def));
}
};
});
}
fn enforce_default_binding_mode(&mut self,

View file

@ -446,7 +446,7 @@ fn expand_nested_bindings<'r>(bcx: @mut Block,
bcx.val_to_str(val));
let _indenter = indenter();
do m.map |br| {
m.map(|br| {
match br.pats[col].node {
ast::PatIdent(_, ref path, Some(inner)) => {
let pats = vec::append(
@ -465,7 +465,7 @@ fn expand_nested_bindings<'r>(bcx: @mut Block,
}
_ => (*br).clone(),
}
}
})
}
fn assert_is_binding_or_wild(bcx: @mut Block, p: @ast::Pat) {
@ -543,13 +543,13 @@ fn enter_default<'r>(bcx: @mut Block,
let _indenter = indenter();
// Collect all of the matches that can match against anything.
let matches = do enter_match(bcx, dm, m, col, val) |p| {
let matches = enter_match(bcx, dm, m, col, val, |p| {
match p.node {
ast::PatWild | ast::PatWildMulti | ast::PatTup(_) => Some(~[]),
ast::PatIdent(_, _, None) if pat_is_binding(dm, p) => Some(~[]),
_ => None
}
};
});
// Ok, now, this is pretty subtle. A "default" match is a match
// that needs to be considered if none of the actual checks on the
@ -616,7 +616,7 @@ fn enter_opt<'r>(bcx: @mut Block,
let tcx = bcx.tcx();
let dummy = @ast::Pat {id: 0, node: ast::PatWild, span: dummy_sp()};
let mut i = 0;
do enter_match(bcx, tcx.def_map, m, col, val) |p| {
enter_match(bcx, tcx.def_map, m, col, val, |p| {
let answer = match p.node {
ast::PatEnum(*) |
ast::PatIdent(_, _, None) if pat_is_const(tcx.def_map, p) => {
@ -731,7 +731,7 @@ fn enter_opt<'r>(bcx: @mut Block,
};
i += 1;
answer
}
})
}
fn enter_rec_or_struct<'r>(bcx: @mut Block,
@ -749,7 +749,7 @@ fn enter_rec_or_struct<'r>(bcx: @mut Block,
let _indenter = indenter();
let dummy = @ast::Pat {id: 0, node: ast::PatWild, span: dummy_sp()};
do enter_match(bcx, dm, m, col, val) |p| {
enter_match(bcx, dm, m, col, val, |p| {
match p.node {
ast::PatStruct(_, ref fpats, _) => {
let mut pats = ~[];
@ -766,7 +766,7 @@ fn enter_rec_or_struct<'r>(bcx: @mut Block,
Some(vec::from_elem(fields.len(), dummy))
}
}
}
})
}
fn enter_tup<'r>(bcx: @mut Block,
@ -784,7 +784,7 @@ fn enter_tup<'r>(bcx: @mut Block,
let _indenter = indenter();
let dummy = @ast::Pat {id: 0, node: ast::PatWild, span: dummy_sp()};
do enter_match(bcx, dm, m, col, val) |p| {
enter_match(bcx, dm, m, col, val, |p| {
match p.node {
ast::PatTup(ref elts) => Some((*elts).clone()),
_ => {
@ -792,7 +792,7 @@ fn enter_tup<'r>(bcx: @mut Block,
Some(vec::from_elem(n_elts, dummy))
}
}
}
})
}
fn enter_tuple_struct<'r>(bcx: @mut Block,
@ -810,7 +810,7 @@ fn enter_tuple_struct<'r>(bcx: @mut Block,
let _indenter = indenter();
let dummy = @ast::Pat {id: 0, node: ast::PatWild, span: dummy_sp()};
do enter_match(bcx, dm, m, col, val) |p| {
enter_match(bcx, dm, m, col, val, |p| {
match p.node {
ast::PatEnum(_, Some(ref elts)) => Some((*elts).clone()),
_ => {
@ -818,7 +818,7 @@ fn enter_tuple_struct<'r>(bcx: @mut Block,
Some(vec::from_elem(n_elts, dummy))
}
}
}
})
}
fn enter_box<'r>(bcx: @mut Block,
@ -835,7 +835,7 @@ fn enter_box<'r>(bcx: @mut Block,
let _indenter = indenter();
let dummy = @ast::Pat {id: 0, node: ast::PatWild, span: dummy_sp()};
do enter_match(bcx, dm, m, col, val) |p| {
enter_match(bcx, dm, m, col, val, |p| {
match p.node {
ast::PatBox(sub) => {
Some(~[sub])
@ -845,7 +845,7 @@ fn enter_box<'r>(bcx: @mut Block,
Some(~[dummy])
}
}
}
})
}
fn enter_uniq<'r>(bcx: @mut Block,
@ -862,7 +862,7 @@ fn enter_uniq<'r>(bcx: @mut Block,
let _indenter = indenter();
let dummy = @ast::Pat {id: 0, node: ast::PatWild, span: dummy_sp()};
do enter_match(bcx, dm, m, col, val) |p| {
enter_match(bcx, dm, m, col, val, |p| {
match p.node {
ast::PatUniq(sub) => {
Some(~[sub])
@ -872,7 +872,7 @@ fn enter_uniq<'r>(bcx: @mut Block,
Some(~[dummy])
}
}
}
})
}
fn enter_region<'r>(bcx: @mut Block,
@ -889,7 +889,7 @@ fn enter_region<'r>(bcx: @mut Block,
let _indenter = indenter();
let dummy = @ast::Pat { id: 0, node: ast::PatWild, span: dummy_sp() };
do enter_match(bcx, dm, m, col, val) |p| {
enter_match(bcx, dm, m, col, val, |p| {
match p.node {
ast::PatRegion(sub) => {
Some(~[sub])
@ -899,7 +899,7 @@ fn enter_region<'r>(bcx: @mut Block,
Some(~[dummy])
}
}
}
})
}
// Returns the options in one column of matches. An option is something that
@ -998,9 +998,9 @@ fn extract_variant_args(bcx: @mut Block,
val: ValueRef)
-> ExtractedBlock {
let _icx = push_ctxt("match::extract_variant_args");
let args = do vec::from_fn(adt::num_args(repr, disr_val)) |i| {
let args = vec::from_fn(adt::num_args(repr, disr_val), |i| {
adt::trans_field_ptr(bcx, repr, val, disr_val, i)
};
});
ExtractedBlock { vals: args, bcx: bcx }
}
@ -1028,7 +1028,7 @@ fn extract_vec_elems(bcx: @mut Block,
let (bcx, base, len) = vec_datum.get_vec_base_and_len(bcx, pat_span, pat_id, 0);
let vt = tvec::vec_types(bcx, node_id_type(bcx, pat_id));
let mut elems = do vec::from_fn(elem_count) |i| {
let mut elems = vec::from_fn(elem_count, |i| {
match slice {
None => GEPi(bcx, base, [i]),
Some(n) if i < n => GEPi(bcx, base, [i]),
@ -1039,7 +1039,7 @@ fn extract_vec_elems(bcx: @mut Block,
}
_ => unsafe { llvm::LLVMGetUndef(vt.llunit_ty.to_ref()) }
}
};
});
if slice.is_some() {
let n = slice.unwrap();
let slice_byte_offset = Mul(bcx, vt.llunit_size, C_uint(bcx.ccx(), n));
@ -1108,11 +1108,11 @@ fn pats_require_rooting(bcx: @mut Block,
m: &[Match],
col: uint)
-> bool {
do m.iter().any |br| {
m.iter().any(|br| {
let pat_id = br.pats[col].id;
let key = root_map_key {id: pat_id, derefs: 0u };
bcx.ccx().maps.root_map.contains_key(&key)
}
})
}
fn root_pats_as_necessary(mut bcx: @mut Block,
@ -1137,12 +1137,12 @@ fn root_pats_as_necessary(mut bcx: @mut Block,
// matches may be wildcards like _ or identifiers).
macro_rules! any_pat (
($m:expr, $pattern:pat) => (
do ($m).iter().any |br| {
($m).iter().any(|br| {
match br.pats[col].node {
$pattern => true,
_ => false
}
}
})
)
)
@ -1163,7 +1163,7 @@ fn any_tup_pat(m: &[Match], col: uint) -> bool {
}
fn any_tuple_struct_pat(bcx: @mut Block, m: &[Match], col: uint) -> bool {
do m.iter().any |br| {
m.iter().any(|br| {
let pat = br.pats[col];
match pat.node {
ast::PatEnum(_, Some(_)) => {
@ -1175,7 +1175,7 @@ fn any_tuple_struct_pat(bcx: @mut Block, m: &[Match], col: uint) -> bool {
}
_ => false
}
}
})
}
trait CustomFailureHandler {
@ -1335,11 +1335,11 @@ fn store_non_ref_bindings(bcx: @mut Block,
let datum = Datum {val: llval, ty: binding_info.ty,
mode: ByRef(ZeroMem)};
bcx = datum.store_to(bcx, INIT, lldest);
do opt_temp_cleanups.mutate |temp_cleanups| {
opt_temp_cleanups.mutate(|temp_cleanups| {
add_clean_temp_mem(bcx, lldest, binding_info.ty);
temp_cleanups.push(lldest);
temp_cleanups
};
});
}
TrByRef => {}
}
@ -1412,10 +1412,9 @@ fn compile_guard(bcx: @mut Block,
bcx = insert_lllocals(bcx, data.bindings_map, false);
let val = unpack_result!(bcx, {
do with_scope_result(bcx, guard_expr.info(),
"guard") |bcx| {
with_scope_result(bcx, guard_expr.info(), "guard", |bcx| {
expr::trans_to_datum(bcx, guard_expr).to_result()
}
})
});
let val = bool_to_i1(bcx, val);
@ -1424,13 +1423,13 @@ fn compile_guard(bcx: @mut Block,
revoke_clean(bcx, *llval);
}
return do with_cond(bcx, Not(bcx, val)) |bcx| {
return with_cond(bcx, Not(bcx, val), |bcx| {
// Guard does not match: free the values we copied,
// and remove all bindings from the lllocals table
let bcx = drop_bindings(bcx, data);
compile_submatch(bcx, m, vals, chk);
bcx
};
});
fn drop_bindings(bcx: @mut Block, data: &ArmData) -> @mut Block {
let mut bcx = bcx;
@ -1530,7 +1529,7 @@ fn compile_submatch_continue(mut bcx: @mut Block,
Some(ref rec_fields) => {
let pat_ty = node_id_type(bcx, pat_id);
let pat_repr = adt::represent_type(bcx.ccx(), pat_ty);
do expr::with_field_tys(tcx, pat_ty, None) |discr, field_tys| {
expr::with_field_tys(tcx, pat_ty, None, |discr, field_tys| {
let rec_vals = rec_fields.map(|field_name| {
let ix = ty::field_idx_strict(tcx, field_name.name, field_tys);
adt::trans_field_ptr(bcx, pat_repr, val, discr, ix)
@ -1540,7 +1539,7 @@ fn compile_submatch_continue(mut bcx: @mut Block,
enter_rec_or_struct(bcx, dm, m, col, *rec_fields, val),
vec::append(rec_vals, vals_left),
chk);
}
});
return;
}
None => {}
@ -1553,9 +1552,9 @@ fn compile_submatch_continue(mut bcx: @mut Block,
ty::ty_tup(ref elts) => elts.len(),
_ => ccx.sess.bug("non-tuple type in tuple pattern")
};
let tup_vals = do vec::from_fn(n_tup_elts) |i| {
let tup_vals = vec::from_fn(n_tup_elts, |i| {
adt::trans_field_ptr(bcx, tup_repr, val, 0, i)
};
});
compile_submatch(bcx, enter_tup(bcx, dm, m, col, val, n_tup_elts),
vec::append(tup_vals, vals_left), chk);
return;
@ -1575,9 +1574,9 @@ fn compile_submatch_continue(mut bcx: @mut Block,
}
let struct_repr = adt::represent_type(bcx.ccx(), struct_ty);
let llstructvals = do vec::from_fn(struct_element_count) |i| {
let llstructvals = vec::from_fn(struct_element_count, |i| {
adt::trans_field_ptr(bcx, struct_repr, val, 0, i)
};
});
compile_submatch(bcx,
enter_tuple_struct(bcx, dm, m, col, val,
struct_element_count),
@ -1696,8 +1695,7 @@ fn compile_submatch_continue(mut bcx: @mut Block,
compare => {
let t = node_id_type(bcx, pat_id);
let Result {bcx: after_cx, val: matches} = {
do with_scope_result(bcx, None,
"compaReScope") |bcx| {
with_scope_result(bcx, None, "compaReScope", |bcx| {
match trans_opt(bcx, opt) {
single_result(
Result {bcx, val}) => {
@ -1723,15 +1721,17 @@ fn compile_submatch_continue(mut bcx: @mut Block,
rslt(bcx, And(bcx, llge, llle))
}
}
}
})
};
bcx = sub_block(after_cx, "compare_next");
CondBr(after_cx, matches, opt_cx.llbb, bcx.llbb);
}
compare_vec_len => {
let Result {bcx: after_cx, val: matches} = {
do with_scope_result(bcx, None,
"compare_vec_len_scope") |bcx| {
with_scope_result(bcx,
None,
"compare_vec_len_scope",
|bcx| {
match trans_opt(bcx, opt) {
single_result(
Result {bcx, val}) => {
@ -1761,7 +1761,7 @@ fn compile_submatch_continue(mut bcx: @mut Block,
rslt(bcx, And(bcx, llge, llle))
}
}
}
})
};
bcx = sub_block(after_cx, "compare_vec_len_next");
@ -1821,9 +1821,9 @@ pub fn trans_match(bcx: @mut Block,
arms: &[ast::Arm],
dest: Dest) -> @mut Block {
let _icx = push_ctxt("match::trans_match");
do with_scope(bcx, match_expr.info(), "match") |bcx| {
with_scope(bcx, match_expr.info(), "match", |bcx| {
trans_match_inner(bcx, discr_expr, arms, dest)
}
})
}
fn create_bindings_map(bcx: @mut Block, pat: @ast::Pat) -> BindingsMap {
@ -1834,7 +1834,7 @@ fn create_bindings_map(bcx: @mut Block, pat: @ast::Pat) -> BindingsMap {
let ccx = bcx.ccx();
let tcx = bcx.tcx();
let mut bindings_map = HashMap::new();
do pat_bindings(tcx.def_map, pat) |bm, p_id, span, path| {
pat_bindings(tcx.def_map, pat, |bm, p_id, span, path| {
let ident = path_to_ident(path);
let variable_ty = node_id_type(bcx, p_id);
let llvariable_ty = type_of::type_of(ccx, variable_ty);
@ -1862,7 +1862,7 @@ fn create_bindings_map(bcx: @mut Block, pat: @ast::Pat) -> BindingsMap {
span: span,
ty: variable_ty
});
}
});
return bindings_map;
}
@ -2009,11 +2009,11 @@ pub fn store_local(bcx: @mut Block,
// create dummy memory for the variables if we have no
// value to store into them immediately
let tcx = bcx.tcx();
do pat_bindings(tcx.def_map, pat) |_, p_id, _, path| {
pat_bindings(tcx.def_map, pat, |_, p_id, _, path| {
bcx = mk_binding_alloca(
bcx, p_id, path, BindLocal,
|bcx, var_ty, llval| { zero_mem(bcx, llval, var_ty); bcx });
}
});
bcx
}
}
@ -2201,14 +2201,14 @@ fn bind_irrefutable_pat(bcx: @mut Block,
let tcx = bcx.tcx();
let pat_ty = node_id_type(bcx, pat.id);
let pat_repr = adt::represent_type(bcx.ccx(), pat_ty);
do expr::with_field_tys(tcx, pat_ty, None) |discr, field_tys| {
expr::with_field_tys(tcx, pat_ty, None, |discr, field_tys| {
for f in fields.iter() {
let ix = ty::field_idx_strict(tcx, f.ident.name, field_tys);
let fldptr = adt::trans_field_ptr(bcx, pat_repr, val,
discr, ix);
bcx = bind_irrefutable_pat(bcx, f.pat, fldptr, binding_mode);
}
}
})
}
ast::PatTup(ref elems) => {
let repr = adt::represent_node(bcx, pat.id);

View file

@ -135,9 +135,9 @@ fn represent_type_uncached(cx: &mut CrateContext, t: ty::t) -> Repr {
}
ty::ty_struct(def_id, ref substs) => {
let fields = ty::lookup_struct_fields(cx.tcx, def_id);
let mut ftys = do fields.map |field| {
let mut ftys = fields.map(|field| {
ty::lookup_field_type(cx.tcx, def_id, field.id, substs)
};
});
let packed = ty::lookup_packed(cx.tcx, def_id);
let dtor = ty::ty_dtor(cx.tcx, def_id).has_drop_flag();
if dtor { ftys.push(ty::mk_bool()); }
@ -259,12 +259,12 @@ impl Case {
}
fn get_cases(tcx: ty::ctxt, def_id: ast::DefId, substs: &ty::substs) -> ~[Case] {
do ty::enum_variants(tcx, def_id).map |vi| {
let arg_tys = do vi.args.map |&raw_ty| {
ty::enum_variants(tcx, def_id).map(|vi| {
let arg_tys = vi.args.map(|&raw_ty| {
ty::subst(tcx, substs, raw_ty)
};
});
Case { discr: vi.disr_val, tys: arg_tys }
}
})
}
@ -659,9 +659,7 @@ fn struct_field_ptr(bcx: @mut Block, st: &Struct, val: ValueRef, ix: uint,
let ccx = bcx.ccx();
let val = if needs_cast {
let fields = do st.fields.map |&ty| {
type_of::type_of(ccx, ty)
};
let fields = st.fields.map(|&ty| type_of::type_of(ccx, ty));
let real_ty = Type::struct_(fields, st.packed);
PointerCast(bcx, val, real_ty.ptr_to())
} else {
@ -725,10 +723,10 @@ pub fn trans_const(ccx: &mut CrateContext, r: &Repr, discr: Disr,
C_struct(build_const_struct(ccx, nonnull, vals), false)
} else {
assert_eq!(vals.len(), 0);
let vals = do nonnull.fields.iter().enumerate().map |(i, &ty)| {
let vals = nonnull.fields.iter().enumerate().map(|(i, &ty)| {
let llty = type_of::sizing_type_of(ccx, ty);
if i == ptrfield { C_null(llty) } else { C_undef(llty) }
}.collect::<~[ValueRef]>();
}).collect::<~[ValueRef]>();
C_struct(build_const_struct(ccx, nonnull, vals), false)
}
}

View file

@ -35,14 +35,14 @@ pub fn trans_inline_asm(bcx: @mut Block, ia: &ast::inline_asm) -> @mut Block {
let mut output_types = ~[];
// Prepare the output operands
let outputs = do ia.outputs.map |&(c, out)| {
let outputs = ia.outputs.map(|&(c, out)| {
constraints.push(c);
let out_datum = unpack_datum!(bcx, trans_to_datum(bcx, out));
output_types.push(type_of(bcx.ccx(), out_datum.ty));
out_datum.val
};
});
for c in cleanups.iter() {
revoke_clean(bcx, *c);
@ -50,7 +50,7 @@ pub fn trans_inline_asm(bcx: @mut Block, ia: &ast::inline_asm) -> @mut Block {
cleanups.clear();
// Now the input operands
let inputs = do ia.inputs.map |&(c, input)| {
let inputs = ia.inputs.map(|&(c, input)| {
constraints.push(c);
unpack_result!(bcx, {
@ -61,8 +61,7 @@ pub fn trans_inline_asm(bcx: @mut Block, ia: &ast::inline_asm) -> @mut Block {
&mut cleanups,
callee::DontAutorefArg)
})
};
});
for c in cleanups.iter() {
revoke_clean(bcx, *c);
@ -103,11 +102,11 @@ pub fn trans_inline_asm(bcx: @mut Block, ia: &ast::inline_asm) -> @mut Block {
ast::asm_intel => lib::llvm::AD_Intel
};
let r = do ia.asm.with_c_str |a| {
do constraints.with_c_str |c| {
let r = ia.asm.with_c_str(|a| {
constraints.with_c_str(|c| {
InlineAsmCall(bcx, a, c, inputs, output_type, ia.volatile, ia.alignstack, dialect)
}
};
})
});
// Again, based on how many outputs we have
if numOutputs == 1 {

View file

@ -94,12 +94,12 @@ pub use middle::trans::context::task_llcx;
local_data_key!(task_local_insn_key: ~[&'static str])
pub fn with_insn_ctxt(blk: |&[&'static str]|) {
do local_data::get(task_local_insn_key) |c| {
local_data::get(task_local_insn_key, |c| {
match c {
Some(ctx) => blk(*ctx),
None => ()
}
}
})
}
pub fn init_insn_ctxt() {
@ -111,23 +111,23 @@ pub struct _InsnCtxt { _x: () }
#[unsafe_destructor]
impl Drop for _InsnCtxt {
fn drop(&mut self) {
do local_data::modify(task_local_insn_key) |c| {
do c.map |mut ctx| {
local_data::modify(task_local_insn_key, |c| {
c.map(|mut ctx| {
ctx.pop();
ctx
}
}
})
})
}
}
pub fn push_ctxt(s: &'static str) -> _InsnCtxt {
debug!("new InsnCtxt: {}", s);
do local_data::modify(task_local_insn_key) |c| {
do c.map |mut ctx| {
local_data::modify(task_local_insn_key, |c| {
c.map(|mut ctx| {
ctx.push(s);
ctx
}
}
})
});
_InsnCtxt { _x: () }
}
@ -175,11 +175,11 @@ impl<'self> Drop for StatRecorder<'self> {
// only use this for foreign function ABIs and glue, use `decl_rust_fn` for Rust functions
pub fn decl_fn(llmod: ModuleRef, name: &str, cc: lib::llvm::CallConv, ty: Type) -> ValueRef {
let llfn: ValueRef = do name.with_c_str |buf| {
let llfn: ValueRef = name.with_c_str(|buf| {
unsafe {
llvm::LLVMGetOrInsertFunction(llmod, buf, ty.to_ref())
}
};
});
lib::llvm::SetFunctionCallConv(llfn, cc);
// Function addresses in Rust are never significant, allowing functions to be merged.
@ -211,9 +211,9 @@ fn get_extern_rust_fn(ccx: &mut CrateContext, inputs: &[ty::t], output: ty::t,
None => ()
}
let f = decl_rust_fn(ccx, inputs, output, name);
do csearch::get_item_attrs(ccx.tcx.cstore, did) |meta_items| {
csearch::get_item_attrs(ccx.tcx.cstore, did, |meta_items| {
set_llvm_fn_attrs(meta_items.iter().map(|&x| attr::mk_attr(x)).to_owned_vec(), f)
}
});
ccx.externs.insert(name.to_owned(), f);
f
}
@ -285,9 +285,9 @@ pub fn get_extern_const(externs: &mut ExternMap, llmod: ModuleRef,
None => ()
}
unsafe {
let c = do name.with_c_str |buf| {
let c = name.with_c_str(|buf| {
llvm::LLVMAddGlobal(llmod, ty.to_ref(), buf)
};
});
externs.insert(name.to_owned(), c);
return c;
}
@ -496,9 +496,9 @@ pub fn set_always_inline(f: ValueRef) {
}
pub fn set_no_split_stack(f: ValueRef) {
do "no-split-stack".with_c_str |buf| {
"no-split-stack".with_c_str(|buf| {
unsafe { llvm::LLVMAddFunctionAttrString(f, buf); }
}
})
}
// Double-check that we never ask LLVM to declare the same symbol twice. It
@ -563,11 +563,11 @@ pub fn get_res_dtor(ccx: @mut CrateContext,
// Structural comparison: a rather involved form of glue.
pub fn maybe_name_value(cx: &CrateContext, v: ValueRef, s: &str) {
if cx.sess.opts.save_temps {
do s.with_c_str |buf| {
s.with_c_str(|buf| {
unsafe {
llvm::LLVMSetValueName(v, buf)
}
}
})
}
}
@ -704,12 +704,12 @@ pub fn iter_structural_ty(cx: @mut Block, av: ValueRef, t: ty::t,
match ty::get(t).sty {
ty::ty_struct(*) => {
let repr = adt::represent_type(cx.ccx(), t);
do expr::with_field_tys(cx.tcx(), t, None) |discr, field_tys| {
expr::with_field_tys(cx.tcx(), t, None, |discr, field_tys| {
for (i, field_ty) in field_tys.iter().enumerate() {
let llfld_a = adt::trans_field_ptr(cx, repr, av, discr, i);
cx = f(cx, llfld_a, field_ty.mt.ty);
}
}
})
}
ty::ty_estr(ty::vstore_fixed(_)) |
ty::ty_evec(_, ty::vstore_fixed(_)) => {
@ -836,9 +836,9 @@ pub fn fail_if_zero(cx: @mut Block, span: Span, divrem: ast::BinOp,
ty_to_str(cx.ccx().tcx, rhs_t));
}
};
do with_cond(cx, is_zero) |bcx| {
with_cond(cx, is_zero, |bcx| {
controlflow::trans_fail(bcx, Some(span), text)
}
})
}
pub fn null_env_ptr(ccx: &CrateContext) -> ValueRef {
@ -959,12 +959,12 @@ pub fn need_invoke(bcx: @mut Block) -> bool {
pub fn have_cached_lpad(bcx: @mut Block) -> bool {
let mut res = false;
do in_lpad_scope_cx(bcx) |inf| {
in_lpad_scope_cx(bcx, |inf| {
match inf.landing_pad {
Some(_) => res = true,
None => res = false
}
}
});
return res;
}
@ -993,7 +993,7 @@ pub fn get_landing_pad(bcx: @mut Block) -> BasicBlockRef {
let mut cached = None;
let mut pad_bcx = bcx; // Guaranteed to be set below
do in_lpad_scope_cx(bcx) |inf| {
in_lpad_scope_cx(bcx, |inf| {
// If there is a valid landing pad still around, use it
match inf.landing_pad {
Some(target) => cached = Some(target),
@ -1002,7 +1002,7 @@ pub fn get_landing_pad(bcx: @mut Block) -> BasicBlockRef {
inf.landing_pad = Some(pad_bcx.llbb);
}
}
}
});
// Can't return from block above
match cached { Some(b) => return b, None => () }
// The landing pad return type (the type being propagated). Not sure what
@ -1187,9 +1187,9 @@ pub fn new_block(cx: @mut FunctionContext,
opt_node_info: Option<NodeInfo>)
-> @mut Block {
unsafe {
let llbb = do name.with_c_str |buf| {
let llbb = name.with_c_str(|buf| {
llvm::LLVMAppendBasicBlockInContext(cx.ccx.llcx, cx.llfn, buf)
};
});
let bcx = @mut Block::new(llbb,
parent,
is_lpad,
@ -1614,18 +1614,18 @@ pub struct BasicBlocks {
pub fn mk_staticallocas_basic_block(llfn: ValueRef) -> BasicBlockRef {
unsafe {
let cx = task_llcx();
do "static_allocas".with_c_str | buf| {
"static_allocas".with_c_str(|buf| {
llvm::LLVMAppendBasicBlockInContext(cx, llfn, buf)
}
})
}
}
pub fn mk_return_basic_block(llfn: ValueRef) -> BasicBlockRef {
unsafe {
let cx = task_llcx();
do "return".with_c_str |buf| {
"return".with_c_str(|buf| {
llvm::LLVMAppendBasicBlockInContext(cx, llfn, buf)
}
})
}
}
@ -1765,9 +1765,9 @@ pub fn create_llargs_for_fn_args(cx: @mut FunctionContext,
// Return an array containing the ValueRefs that we get from
// llvm::LLVMGetParam for each argument.
do vec::from_fn(args.len()) |i| {
vec::from_fn(args.len(), |i| {
unsafe { llvm::LLVMGetParam(cx.llfn, cx.arg_pos(i) as c_uint) }
}
})
}
pub fn copy_args_to_allocas(fcx: @mut FunctionContext,
@ -2076,7 +2076,7 @@ pub fn trans_enum_variant_or_tuple_like_struct<A:IdAndTy>(
llfndecl: ValueRef)
{
// Translate variant arguments to function arguments.
let fn_args = do args.map |varg| {
let fn_args = args.map(|varg| {
ast::arg {
ty: (*varg.ty()).clone(),
pat: ast_util::ident_to_pat(
@ -2085,7 +2085,7 @@ pub fn trans_enum_variant_or_tuple_like_struct<A:IdAndTy>(
special_idents::arg),
id: varg.id(),
}
};
});
let no_substs: &[ty::t] = [];
let ty_param_substs = match param_substs {
@ -2386,11 +2386,11 @@ pub fn create_entry_wrapper(ccx: @mut CrateContext,
"main"
};
let llfn = decl_cdecl_fn(ccx.llmod, main_name, llfty);
let llbb = do "top".with_c_str |buf| {
let llbb = "top".with_c_str(|buf| {
unsafe {
llvm::LLVMAppendBasicBlockInContext(ccx.llcx, llfn, buf)
}
};
});
let bld = ccx.builder.B;
unsafe {
llvm::LLVMPositionBuilderAtEnd(bld, llbb);
@ -2409,9 +2409,9 @@ pub fn create_entry_wrapper(ccx: @mut CrateContext,
};
let args = {
let opaque_rust_main = do "rust_main".with_c_str |buf| {
let opaque_rust_main = "rust_main".with_c_str(|buf| {
llvm::LLVMBuildPointerCast(bld, rust_main, Type::i8p().to_ref(), buf)
};
});
~[
C_null(Type::opaque_box(ccx).ptr_to()),
@ -2432,9 +2432,9 @@ pub fn create_entry_wrapper(ccx: @mut CrateContext,
(rust_main, args)
};
let result = do args.as_imm_buf |buf, len| {
let result = args.as_imm_buf(|buf, len| {
llvm::LLVMBuildCall(bld, start_fn, buf, len as c_uint, noname())
};
});
llvm::LLVMBuildRet(bld, result);
}
@ -2509,9 +2509,9 @@ pub fn get_item_val(ccx: @mut CrateContext, id: ast::NodeId) -> ValueRef {
unsafe {
let llty = llvm::LLVMTypeOf(v);
let g = do sym.with_c_str |buf| {
let g = sym.with_c_str(|buf| {
llvm::LLVMAddGlobal(ccx.llmod, llty, buf)
};
});
if !ccx.reachable.contains(&id) {
lib::llvm::SetLinkage(g, lib::llvm::InternalLinkage);
@ -2571,9 +2571,9 @@ pub fn get_item_val(ccx: @mut CrateContext, id: ast::NodeId) -> ValueRef {
match (attr::first_attr_value_str_by_name(i.attrs, "link_section")) {
Some(sect) => unsafe {
do sect.with_c_str |buf| {
sect.with_c_str(|buf| {
llvm::LLVMSetSection(v, buf);
}
})
},
None => ()
}
@ -2618,11 +2618,14 @@ pub fn get_item_val(ccx: @mut CrateContext, id: ast::NodeId) -> ValueRef {
if attr::contains_name(ni.attrs, "crate_map") {
if *ccx.sess.building_library {
let s = "_rust_crate_map_toplevel";
let g = unsafe { do s.with_c_str |buf| {
let g = unsafe {
s.with_c_str(|buf| {
let ty = type_of(ccx, ty);
llvm::LLVMAddGlobal(ccx.llmod,
ty.to_ref(), buf)
} };
ty.to_ref(),
buf)
})
};
lib::llvm::SetLinkage(g,
lib::llvm::ExternalWeakLinkage);
g
@ -2632,11 +2635,11 @@ pub fn get_item_val(ccx: @mut CrateContext, id: ast::NodeId) -> ValueRef {
} else {
let ident = foreign::link_name(ccx, ni);
unsafe {
do ident.with_c_str |buf| {
ident.with_c_str(|buf| {
let ty = type_of(ccx, ty);
llvm::LLVMAddGlobal(ccx.llmod,
ty.to_ref(), buf)
}
})
}
}
}
@ -2904,11 +2907,11 @@ pub fn decl_gc_metadata(ccx: &mut CrateContext, llmod_id: &str) {
}
let gc_metadata_name = ~"_gc_module_metadata_" + llmod_id;
let gc_metadata = do gc_metadata_name.with_c_str |buf| {
let gc_metadata = gc_metadata_name.with_c_str(|buf| {
unsafe {
llvm::LLVMAddGlobal(ccx.llmod, Type::i32().to_ref(), buf)
}
};
});
unsafe {
llvm::LLVMSetGlobalConstant(gc_metadata, True);
lib::llvm::SetLinkage(gc_metadata, lib::llvm::ExternalLinkage);
@ -2920,11 +2923,11 @@ pub fn create_module_map(ccx: &mut CrateContext) -> (ValueRef, uint) {
let str_slice_type = Type::struct_([Type::i8p(), ccx.int_type], false);
let elttype = Type::struct_([str_slice_type, ccx.int_type], false);
let maptype = Type::array(&elttype, ccx.module_data.len() as u64);
let map = do "_rust_mod_map".with_c_str |buf| {
let map = "_rust_mod_map".with_c_str(|buf| {
unsafe {
llvm::LLVMAddGlobal(ccx.llmod, maptype.to_ref(), buf)
}
};
});
lib::llvm::SetLinkage(map, lib::llvm::InternalLinkage);
let mut elts: ~[ValueRef] = ~[];
@ -2973,11 +2976,11 @@ pub fn decl_crate_map(sess: session::Session, mapmeta: LinkMeta,
slicetype, // sub crate-maps
int_type.ptr_to(), // event loop factory
], false);
let map = do sym_name.with_c_str |buf| {
let map = sym_name.with_c_str(|buf| {
unsafe {
llvm::LLVMAddGlobal(llmod, maptype.to_ref(), buf)
}
};
});
// On windows we'd like to export the toplevel cratemap
// such that we can find it from libstd.
if targ_cfg.os == OsWin32 && "toplevel" == mapname {
@ -2999,11 +3002,11 @@ pub fn fill_crate_map(ccx: &mut CrateContext, map: ValueRef) {
cdata.name,
cstore::get_crate_vers(cstore, i),
cstore::get_crate_hash(cstore, i));
let cr = do nm.with_c_str |buf| {
let cr = nm.with_c_str(|buf| {
unsafe {
llvm::LLVMAddGlobal(ccx.llmod, ccx.int_type.to_ref(), buf)
}
};
});
subcrates.push(p2i(ccx, cr));
i += 1;
}
@ -3011,9 +3014,9 @@ pub fn fill_crate_map(ccx: &mut CrateContext, map: ValueRef) {
match ccx.tcx.lang_items.event_loop_factory() {
Some(did) => unsafe {
let name = csearch::get_symbol(ccx.sess.cstore, did);
let global = do name.with_c_str |buf| {
let global = name.with_c_str(|buf| {
llvm::LLVMAddGlobal(ccx.llmod, ccx.int_type.to_ref(), buf)
};
});
global
},
None => C_null(ccx.int_type.ptr_to())
@ -3023,9 +3026,9 @@ pub fn fill_crate_map(ccx: &mut CrateContext, map: ValueRef) {
};
unsafe {
let maptype = Type::array(&ccx.int_type, subcrates.len() as u64);
let vec_elements = do "_crate_map_child_vectors".with_c_str |buf| {
let vec_elements = "_crate_map_child_vectors".with_c_str(|buf| {
llvm::LLVMAddGlobal(ccx.llmod, maptype.to_ref(), buf)
};
});
lib::llvm::SetLinkage(vec_elements, lib::llvm::InternalLinkage);
llvm::LLVMSetInitializer(vec_elements, C_array(ccx.int_type, subcrates));
@ -3077,23 +3080,23 @@ pub fn write_metadata(cx: &CrateContext, crate: &ast::Crate) {
let encode_parms = crate_ctxt_to_encode_parms(cx, encode_inlined_item);
let llmeta = C_bytes(encoder::encode_metadata(encode_parms, crate));
let llconst = C_struct([llmeta], false);
let mut llglobal = do "rust_metadata".with_c_str |buf| {
let mut llglobal = "rust_metadata".with_c_str(|buf| {
unsafe {
llvm::LLVMAddGlobal(cx.llmod, val_ty(llconst).to_ref(), buf)
}
};
});
unsafe {
llvm::LLVMSetInitializer(llglobal, llconst);
do cx.sess.targ_cfg.target_strs.meta_sect_name.with_c_str |buf| {
cx.sess.targ_cfg.target_strs.meta_sect_name.with_c_str(|buf| {
llvm::LLVMSetSection(llglobal, buf)
};
});
lib::llvm::SetLinkage(llglobal, lib::llvm::InternalLinkage);
let t_ptr_i8 = Type::i8p();
llglobal = llvm::LLVMConstBitCast(llglobal, t_ptr_i8.to_ref());
let llvm_used = do "llvm.used".with_c_str |buf| {
let llvm_used = "llvm.used".with_c_str(|buf| {
llvm::LLVMAddGlobal(cx.llmod, Type::array(&t_ptr_i8, 1).to_ref(), buf)
};
});
lib::llvm::SetLinkage(llvm_used, lib::llvm::AppendingLinkage);
llvm::LLVMSetInitializer(llvm_used, C_array(t_ptr_i8, [llglobal]));
}
@ -3103,9 +3106,9 @@ pub fn write_metadata(cx: &CrateContext, crate: &ast::Crate) {
pub fn write_abi_version(ccx: &mut CrateContext) {
unsafe {
let llval = C_uint(ccx, abi::abi_version);
let llglobal = do "rust_abi_version".with_c_str |buf| {
let llglobal = "rust_abi_version".with_c_str(|buf| {
llvm::LLVMAddGlobal(ccx.llmod, val_ty(llval).to_ref(), buf)
};
});
llvm::LLVMSetInitializer(llglobal, llval);
llvm::LLVMSetGlobalConstant(llglobal, True);
}
@ -3161,12 +3164,12 @@ pub fn trans_crate(sess: session::Session,
let maptype = val_ty(ccx.crate_map).to_ref();
do "__rust_crate_map_toplevel".with_c_str |buf| {
"__rust_crate_map_toplevel".with_c_str(|buf| {
unsafe {
llvm::LLVMAddAlias(ccx.llmod, maptype,
ccx.crate_map, buf);
}
}
})
}
glue::emit_tydescs(ccx);
@ -3189,9 +3192,10 @@ pub fn trans_crate(sess: session::Session,
println!("n_inlines: {}", ccx.stats.n_inlines);
println!("n_closures: {}", ccx.stats.n_closures);
println("fn stats:");
do sort::quick_sort(ccx.stats.fn_stats) |&(_, _, insns_a), &(_, _, insns_b)| {
sort::quick_sort(ccx.stats.fn_stats,
|&(_, _, insns_a), &(_, _, insns_b)| {
insns_a > insns_b
}
});
for tuple in ccx.stats.fn_stats.iter() {
match *tuple {
(ref name, ms, insns) => {

View file

@ -51,7 +51,7 @@ impl Builder {
self.ccx.stats.n_llvm_insns += 1;
}
if self.ccx.sess.count_llvm_insns() {
do base::with_insn_ctxt |v| {
base::with_insn_ctxt(|v| {
let h = &mut self.ccx.stats.llvm_insns;
// Build version of path with cycles removed.
@ -85,7 +85,7 @@ impl Builder {
_ => 0u
};
h.insert(s, n+1u);
}
})
}
}
@ -419,9 +419,9 @@ impl Builder {
if name.is_empty() {
llvm::LLVMBuildAlloca(self.llbuilder, ty.to_ref(), noname())
} else {
do name.with_c_str |c| {
name.with_c_str(|c| {
llvm::LLVMBuildAlloca(self.llbuilder, ty.to_ref(), c)
}
})
}
}
}
@ -465,11 +465,11 @@ impl Builder {
let min = llvm::LLVMConstInt(t, lo, signed);
let max = llvm::LLVMConstInt(t, hi, signed);
do [min, max].as_imm_buf |ptr, len| {
[min, max].as_imm_buf(|ptr, len| {
llvm::LLVMSetMetadata(value, lib::llvm::MD_range as c_uint,
llvm::LLVMMDNodeInContext(self.ccx.llcx,
ptr, len as c_uint));
}
})
}
value
@ -518,7 +518,7 @@ impl Builder {
}
self.inbounds_gep(base, small_vec.slice(0, ixs.len()))
} else {
let v = do ixs.iter().map |i| { C_i32(*i as i32) }.collect::<~[ValueRef]>();
let v = ixs.iter().map(|i| C_i32(*i as i32)).collect::<~[ValueRef]>();
self.count_insn("gepi");
self.inbounds_gep(base, v)
}
@ -736,12 +736,12 @@ impl Builder {
let sanitized = text.replace("$", "");
let comment_text = format!("\\# {}", sanitized.replace("\n", "\n\t# "));
self.count_insn("inlineasm");
let asm = do comment_text.with_c_str |c| {
let asm = comment_text.with_c_str(|c| {
unsafe {
llvm::LLVMConstInlineAsm(Type::func([], &Type::void()).to_ref(),
c, noname(), False, False)
}
};
});
self.call(asm, [], []);
}
}
@ -757,10 +757,10 @@ impl Builder {
let alignstack = if alignstack { lib::llvm::True }
else { lib::llvm::False };
let argtys = do inputs.map |v| {
let argtys = inputs.map(|v| {
debug!("Asm Input Type: {:?}", self.ccx.tn.val_to_str(*v));
val_ty(*v)
};
});
debug!("Asm Output Type: {:?}", self.ccx.tn.type_to_str(output));
let fty = Type::func(argtys, &output);
@ -878,9 +878,9 @@ impl Builder {
let BB: BasicBlockRef = llvm::LLVMGetInsertBlock(self.llbuilder);
let FN: ValueRef = llvm::LLVMGetBasicBlockParent(BB);
let M: ModuleRef = llvm::LLVMGetGlobalParent(FN);
let T: ValueRef = do "llvm.trap".with_c_str |buf| {
let T: ValueRef = "llvm.trap".with_c_str(|buf| {
llvm::LLVMGetNamedFunction(M, buf)
};
});
assert!((T as int != 0));
let args: &[ValueRef] = [];
self.count_insn("trap");

View file

@ -616,7 +616,7 @@ pub fn trans_call_inner(in_cx: @mut Block,
*/
do base::with_scope_result(in_cx, call_info, "call") |cx| {
base::with_scope_result(in_cx, call_info, "call", |cx| {
let callee = get_callee(cx);
let mut bcx = callee.bcx;
let ccx = cx.ccx();
@ -776,7 +776,7 @@ pub fn trans_call_inner(in_cx: @mut Block,
}
rslt(bcx, llresult)
}
})
}
pub enum CallArgs<'self> {

View file

@ -438,10 +438,10 @@ pub fn make_closure_glue(cx: @mut Block,
ast::OwnedSigil | ast::ManagedSigil => {
let box_cell_v = GEPi(cx, v, [0u, abi::fn_field_box]);
let box_ptr_v = Load(cx, box_cell_v);
do with_cond(cx, IsNotNull(cx, box_ptr_v)) |bcx| {
with_cond(cx, IsNotNull(cx, box_ptr_v), |bcx| {
let closure_ty = ty::mk_opaque_closure_ptr(tcx, sigil);
glue_fn(bcx, box_cell_v, closure_ty)
}
})
}
}
}
@ -481,7 +481,7 @@ pub fn make_opaque_cbox_free_glue(
}
let ccx = bcx.ccx();
do with_cond(bcx, IsNotNull(bcx, cbox)) |bcx| {
with_cond(bcx, IsNotNull(bcx, cbox), |bcx| {
// Load the type descr found in the cbox
let lltydescty = ccx.tydesc_type.ptr_to();
let cbox = Load(bcx, cbox);
@ -498,5 +498,5 @@ pub fn make_opaque_cbox_free_glue(
glue::trans_exchange_free(bcx, cbox);
bcx
}
})
}

View file

@ -453,14 +453,14 @@ pub fn add_clean(bcx: @mut Block, val: ValueRef, t: ty::t) {
debug!("add_clean({}, {}, {})", bcx.to_str(), bcx.val_to_str(val), t.repr(bcx.tcx()));
let cleanup_type = cleanup_type(bcx.tcx(), t);
do in_scope_cx(bcx, None) |scope_info| {
in_scope_cx(bcx, None, |scope_info| {
scope_info.cleanups.push(clean(@TypeDroppingCleanupFunction {
val: val,
t: t,
} as @CleanupFunction,
cleanup_type));
grow_scope_clean(scope_info);
}
})
}
pub fn add_clean_temp_immediate(cx: @mut Block, val: ValueRef, ty: ty::t) {
@ -469,7 +469,7 @@ pub fn add_clean_temp_immediate(cx: @mut Block, val: ValueRef, ty: ty::t) {
cx.to_str(), cx.val_to_str(val),
ty.repr(cx.tcx()));
let cleanup_type = cleanup_type(cx.tcx(), ty);
do in_scope_cx(cx, None) |scope_info| {
in_scope_cx(cx, None, |scope_info| {
scope_info.cleanups.push(clean_temp(val,
@ImmediateTypeDroppingCleanupFunction {
val: val,
@ -477,7 +477,7 @@ pub fn add_clean_temp_immediate(cx: @mut Block, val: ValueRef, ty: ty::t) {
} as @CleanupFunction,
cleanup_type));
grow_scope_clean(scope_info);
}
})
}
pub fn add_clean_temp_mem(bcx: @mut Block, val: ValueRef, t: ty::t) {
@ -498,7 +498,7 @@ pub fn add_clean_temp_mem_in_scope_(bcx: @mut Block, scope_id: Option<ast::NodeI
bcx.to_str(), bcx.val_to_str(val),
t.repr(bcx.tcx()));
let cleanup_type = cleanup_type(bcx.tcx(), t);
do in_scope_cx(bcx, scope_id) |scope_info| {
in_scope_cx(bcx, scope_id, |scope_info| {
scope_info.cleanups.push(clean_temp(val,
@TypeDroppingCleanupFunction {
val: val,
@ -506,7 +506,7 @@ pub fn add_clean_temp_mem_in_scope_(bcx: @mut Block, scope_id: Option<ast::NodeI
} as @CleanupFunction,
cleanup_type));
grow_scope_clean(scope_info);
}
})
}
pub fn add_clean_return_to_mut(bcx: @mut Block,
scope_id: ast::NodeId,
@ -527,7 +527,7 @@ pub fn add_clean_return_to_mut(bcx: @mut Block,
bcx.to_str(),
bcx.val_to_str(frozen_val_ref),
bcx.val_to_str(bits_val_ref));
do in_scope_cx(bcx, Some(scope_id)) |scope_info| {
in_scope_cx(bcx, Some(scope_id), |scope_info| {
scope_info.cleanups.push(clean_temp(
frozen_val_ref,
@WriteGuardReleasingCleanupFunction {
@ -539,7 +539,7 @@ pub fn add_clean_return_to_mut(bcx: @mut Block,
} as @CleanupFunction,
normal_exit_only));
grow_scope_clean(scope_info);
}
})
}
pub fn add_clean_free(cx: @mut Block, ptr: ValueRef, heap: heap) {
let free_fn = match heap {
@ -554,12 +554,12 @@ pub fn add_clean_free(cx: @mut Block, ptr: ValueRef, heap: heap) {
} as @CleanupFunction
}
};
do in_scope_cx(cx, None) |scope_info| {
in_scope_cx(cx, None, |scope_info| {
scope_info.cleanups.push(clean_temp(ptr,
free_fn,
normal_exit_and_unwind));
grow_scope_clean(scope_info);
}
})
}
// Note that this only works for temporaries. We should, at some point, move
@ -567,7 +567,7 @@ pub fn add_clean_free(cx: @mut Block, ptr: ValueRef, heap: heap) {
// this will be more involved. For now, we simply zero out the local, and the
// drop glue checks whether it is zero.
pub fn revoke_clean(cx: @mut Block, val: ValueRef) {
do in_scope_cx(cx, None) |scope_info| {
in_scope_cx(cx, None, |scope_info| {
let cleanup_pos = scope_info.cleanups.iter().position(
|cu| match *cu {
clean_temp(v, _, _) if v == val => true,
@ -580,7 +580,7 @@ pub fn revoke_clean(cx: @mut Block, val: ValueRef) {
scope_info.cleanups.len()));
shrink_scope_clean(scope_info, *i);
}
}
})
}
pub fn block_cleanups(bcx: &mut Block) -> ~[cleanup] {
@ -844,9 +844,7 @@ pub fn C_integral(t: Type, u: u64, sign_extend: bool) -> ValueRef {
pub fn C_floating(s: &str, t: Type) -> ValueRef {
unsafe {
do s.with_c_str |buf| {
llvm::LLVMConstRealOfString(t.to_ref(), buf)
}
s.with_c_str(|buf| llvm::LLVMConstRealOfString(t.to_ref(), buf))
}
}
@ -896,14 +894,14 @@ pub fn C_cstr(cx: &mut CrateContext, s: @str) -> ValueRef {
None => ()
}
let sc = do s.as_imm_buf |buf, buflen| {
let sc = s.as_imm_buf(|buf, buflen| {
llvm::LLVMConstStringInContext(cx.llcx, buf as *c_char, buflen as c_uint, False)
};
});
let gsym = token::gensym("str");
let g = do format!("str{}", gsym).with_c_str |buf| {
let g = format!("str{}", gsym).with_c_str(|buf| {
llvm::LLVMAddGlobal(cx.llmod, val_ty(sc).to_ref(), buf)
};
});
llvm::LLVMSetInitializer(g, sc);
llvm::LLVMSetGlobalConstant(g, True);
lib::llvm::SetLinkage(g, lib::llvm::InternalLinkage);
@ -930,9 +928,9 @@ pub fn C_binary_slice(cx: &mut CrateContext, data: &[u8]) -> ValueRef {
let lldata = C_bytes(data);
let gsym = token::gensym("binary");
let g = do format!("binary{}", gsym).with_c_str |buf| {
let g = format!("binary{}", gsym).with_c_str(|buf| {
llvm::LLVMAddGlobal(cx.llmod, val_ty(lldata).to_ref(), buf)
};
});
llvm::LLVMSetInitializer(g, lldata);
llvm::LLVMSetGlobalConstant(g, True);
lib::llvm::SetLinkage(g, lib::llvm::InternalLinkage);
@ -954,17 +952,17 @@ pub fn C_zero_byte_arr(size: uint) -> ValueRef {
pub fn C_struct(elts: &[ValueRef], packed: bool) -> ValueRef {
unsafe {
do elts.as_imm_buf |ptr, len| {
elts.as_imm_buf(|ptr, len| {
llvm::LLVMConstStructInContext(base::task_llcx(), ptr, len as c_uint, packed as Bool)
}
})
}
}
pub fn C_named_struct(T: Type, elts: &[ValueRef]) -> ValueRef {
unsafe {
do elts.as_imm_buf |ptr, len| {
elts.as_imm_buf(|ptr, len| {
llvm::LLVMConstNamedStruct(T.to_ref(), ptr, len as c_uint)
}
})
}
}
@ -990,9 +988,9 @@ pub fn get_param(fndecl: ValueRef, param: uint) -> ValueRef {
pub fn const_get_elt(cx: &CrateContext, v: ValueRef, us: &[c_uint])
-> ValueRef {
unsafe {
let r = do us.as_imm_buf |p, len| {
let r = us.as_imm_buf(|p, len| {
llvm::LLVMConstExtractValue(v, p, len as c_uint)
};
});
debug!("const_get_elt(v={}, us={:?}, r={})",
cx.tn.val_to_str(v), us, cx.tn.val_to_str(r));
@ -1151,9 +1149,9 @@ pub fn node_id_type_params(bcx: &mut Block, id: ast::NodeId) -> ~[ty::t] {
match bcx.fcx.param_substs {
Some(substs) => {
do params.iter().map |t| {
params.iter().map(|t| {
ty::subst_tps(tcx, substs.tys, substs.self_ty, *t)
}.collect()
}).collect()
}
_ => params
}
@ -1207,9 +1205,9 @@ pub fn resolve_vtable_under_param_substs(tcx: ty::ctxt,
typeck::vtable_static(trait_id, ref tys, sub) => {
let tys = match param_substs {
Some(substs) => {
do tys.iter().map |t| {
tys.iter().map(|t| {
ty::subst_tps(tcx, substs.tys, substs.self_ty, *t)
}.collect()
}).collect()
}
_ => tys.to_owned()
};

View file

@ -100,9 +100,9 @@ fn const_vec(cx: @mut CrateContext, e: &ast::Expr, es: &[@ast::Expr]) -> (ValueR
fn const_addr_of(cx: &mut CrateContext, cv: ValueRef) -> ValueRef {
unsafe {
let gv = do "const".with_c_str |name| {
let gv = "const".with_c_str(|name| {
llvm::LLVMAddGlobal(cx.llmod, val_ty(cv).to_ref(), name)
};
});
llvm::LLVMSetInitializer(gv, cv);
llvm::LLVMSetGlobalConstant(gv, True);
SetLinkage(gv, PrivateLinkage);
@ -191,12 +191,12 @@ pub fn const_expr(cx: @mut CrateContext, e: &ast::Expr) -> (ValueRef, bool) {
Some(@ty::AutoDerefRef(ref adj)) => {
let mut ty = ety;
let mut maybe_ptr = None;
do adj.autoderefs.times {
adj.autoderefs.times(|| {
let (dv, dt) = const_deref(cx, llconst, ty, false);
maybe_ptr = Some(llconst);
llconst = dv;
ty = dt;
}
});
match adj.autoref {
None => { }
@ -385,10 +385,10 @@ fn const_expr_unadjusted(cx: @mut CrateContext,
let bt = ty::expr_ty_adjusted(cx.tcx, base);
let brepr = adt::represent_type(cx, bt);
let (bv, inlineable) = const_expr(cx, base);
do expr::with_field_tys(cx.tcx, bt, None) |discr, field_tys| {
expr::with_field_tys(cx.tcx, bt, None, |discr, field_tys| {
let ix = ty::field_idx_strict(cx.tcx, field.name, field_tys);
(adt::const_get_field(cx, brepr, bv, discr, ix), inlineable)
}
})
}
ast::ExprIndex(_, base, index) => {
@ -504,8 +504,7 @@ fn const_expr_unadjusted(cx: @mut CrateContext,
None => None
};
do expr::with_field_tys(tcx, ety, Some(e.id))
|discr, field_tys| {
expr::with_field_tys(tcx, ety, Some(e.id), |discr, field_tys| {
let cs = field_tys.iter().enumerate()
.map(|(ix, &field_ty)| {
match fs.iter().find(|f| field_ty.ident.name == f.ident.node.name) {
@ -524,7 +523,7 @@ fn const_expr_unadjusted(cx: @mut CrateContext,
let (cs, inlineable) = vec::unzip(cs.move_iter());
(adt::trans_const(cx, repr, discr, cs),
inlineable.iter().fold(true, |a, &b| a && b))
}
})
}
ast::ExprVec(ref es, ast::MutImmutable) => {
let (v, _, inlineable) = const_vec(cx, e, *es);
@ -541,9 +540,9 @@ fn const_expr_unadjusted(cx: @mut CrateContext,
ast::ExprVec(ref es, ast::MutImmutable) => {
let (cv, llunitty, _) = const_vec(cx, e, *es);
let llty = val_ty(cv);
let gv = do "const".with_c_str |name| {
let gv = "const".with_c_str(|name| {
llvm::LLVMAddGlobal(cx.llmod, llty.to_ref(), name)
};
});
llvm::LLVMSetInitializer(gv, cv);
llvm::LLVMSetGlobalConstant(gv, True);
SetLinkage(gv, PrivateLinkage);

View file

@ -131,17 +131,15 @@ impl CrateContext {
unsafe {
let llcx = llvm::LLVMContextCreate();
set_task_llcx(llcx);
let llmod = do name.with_c_str |buf| {
let llmod = name.with_c_str(|buf| {
llvm::LLVMModuleCreateWithNameInContext(buf, llcx)
};
});
let data_layout: &str = sess.targ_cfg.target_strs.data_layout;
let targ_triple: &str = sess.targ_cfg.target_strs.target_triple;
do data_layout.with_c_str |buf| {
llvm::LLVMSetDataLayout(llmod, buf)
};
do targ_triple.with_c_str |buf| {
data_layout.with_c_str(|buf| llvm::LLVMSetDataLayout(llmod, buf));
targ_triple.with_c_str(|buf| {
llvm::LLVMRustSetNormalizedTarget(llmod, buf)
};
});
let targ_cfg = sess.targ_cfg;
let td = mk_target_data(sess.targ_cfg.target_strs.data_layout);

View file

@ -74,11 +74,11 @@ pub fn trans_if(bcx: @mut Block,
None => {}
}
// if true { .. } [else { .. }]
return do with_scope(bcx, thn.info(), "if_true_then") |bcx| {
return with_scope(bcx, thn.info(), "if_true_then", |bcx| {
let bcx_out = trans_block(bcx, thn, dest);
debuginfo::clear_source_location(bcx.fcx);
trans_block_cleanups(bcx_out, block_cleanups(bcx))
}
})
} else {
let mut trans = TransItemVisitor { ccx: bcx.fcx.ccx } ;
trans.visit_block(thn, ());
@ -86,11 +86,14 @@ pub fn trans_if(bcx: @mut Block,
match els {
// if false { .. } else { .. }
Some(elexpr) => {
return do with_scope(bcx, elexpr.info(), "if_false_then") |bcx| {
return with_scope(bcx,
elexpr.info(),
"if_false_then",
|bcx| {
let bcx_out = trans_if_else(bcx, elexpr, dest);
debuginfo::clear_source_location(bcx.fcx);
trans_block_cleanups(bcx_out, block_cleanups(bcx))
}
})
}
// if false { .. }
None => return bcx,

View file

@ -290,9 +290,9 @@ impl Datum {
ByRef(_) => {
let cast = PointerCast(bcx, dst, val_ty(self.val));
let cmp = ICmp(bcx, lib::llvm::IntNE, cast, self.val);
do with_cond(bcx, cmp) |bcx| {
with_cond(bcx, cmp, |bcx| {
self.copy_to_no_check(bcx, action, dst)
}
})
}
ByValue => {
self.copy_to_no_check(bcx, action, dst)

View file

@ -254,8 +254,7 @@ pub fn create_local_var_metadata(bcx: @mut Block,
let cx = bcx.ccx();
let def_map = cx.tcx.def_map;
do pat_util::pat_bindings(def_map, local.pat) |_, node_id, span, path_ref| {
pat_util::pat_bindings(def_map, local.pat, |_, node_id, span, path_ref| {
let var_ident = ast_util::path_to_ident(path_ref);
let var_type = node_id_type(bcx, node_id);
@ -276,7 +275,7 @@ pub fn create_local_var_metadata(bcx: @mut Block,
DirectVariable { alloca: llptr },
LocalVariable,
span);
}
})
}
/// Creates debug information for a variable captured in a closure.
@ -449,8 +448,7 @@ pub fn create_argument_metadata(bcx: @mut Block,
let def_map = cx.tcx.def_map;
let scope_metadata = bcx.fcx.debug_context.get_ref(cx, arg.pat.span).fn_metadata;
do pat_util::pat_bindings(def_map, arg.pat) |_, node_id, span, path_ref| {
pat_util::pat_bindings(def_map, arg.pat, |_, node_id, span, path_ref| {
let llptr = match bcx.fcx.llargs.find_copy(&node_id) {
Some(v) => v,
None => {
@ -481,7 +479,7 @@ pub fn create_argument_metadata(bcx: @mut Block,
DirectVariable { alloca: llptr },
ArgumentVariable(argument_index),
span);
}
})
}
/// Sets the current debug location at the beginning of the span.
@ -655,8 +653,8 @@ pub fn create_function_debug_context(cx: &mut CrateContext,
let scope_line = get_scope_line(cx, top_level_block, loc.line);
let fn_metadata = do function_name.with_c_str |function_name| {
do linkage_name.with_c_str |linkage_name| {
let fn_metadata = function_name.with_c_str(|function_name| {
linkage_name.with_c_str(|linkage_name| {
unsafe {
llvm::LLVMDIBuilderCreateFunction(
DIB(cx),
@ -675,7 +673,8 @@ pub fn create_function_debug_context(cx: &mut CrateContext,
template_parameters,
ptr::null())
}
}};
})
});
// Initialize fn debug context (including scope map and namespace map)
let mut fn_debug_context = ~FunctionDebugContextData {
@ -685,7 +684,7 @@ pub fn create_function_debug_context(cx: &mut CrateContext,
source_locations_enabled: false,
};
let arg_pats = do fn_decl.inputs.map |arg_ref| { arg_ref.pat };
let arg_pats = fn_decl.inputs.map(|arg_ref| arg_ref.pat);
populate_scope_map(cx, arg_pats, top_level_block, fn_metadata, &mut fn_debug_context.scope_map);
return FunctionDebugContext(fn_debug_context);
@ -780,7 +779,7 @@ pub fn create_function_debug_context(cx: &mut CrateContext,
let ident = special_idents::type_self;
let param_metadata = do token::ident_to_str(&ident).with_c_str |name| {
let param_metadata = token::ident_to_str(&ident).with_c_str(|name| {
unsafe {
llvm::LLVMDIBuilderCreateTemplateTypeParameter(
DIB(cx),
@ -791,7 +790,7 @@ pub fn create_function_debug_context(cx: &mut CrateContext,
0,
0)
}
};
});
template_params.push(param_metadata);
}
@ -818,7 +817,7 @@ pub fn create_function_debug_context(cx: &mut CrateContext,
// Again, only create type information if extra_debuginfo is enabled
if cx.sess.opts.extra_debuginfo {
let actual_type_metadata = type_metadata(cx, actual_type, codemap::dummy_sp());
let param_metadata = do token::ident_to_str(&ident).with_c_str |name| {
let param_metadata = token::ident_to_str(&ident).with_c_str(|name| {
unsafe {
llvm::LLVMDIBuilderCreateTemplateTypeParameter(
DIB(cx),
@ -829,7 +828,7 @@ pub fn create_function_debug_context(cx: &mut CrateContext,
0,
0)
}
};
});
template_params.push(param_metadata);
}
}
@ -875,11 +874,11 @@ fn compile_unit_metadata(cx: @mut CrateContext) {
let work_dir = cx.sess.working_dir.as_str().unwrap();
let producer = format!("rustc version {}", env!("CFG_VERSION"));
do crate_name.with_c_str |crate_name| {
do work_dir.with_c_str |work_dir| {
do producer.with_c_str |producer| {
do "".with_c_str |flags| {
do "".with_c_str |split_name| {
crate_name.with_c_str(|crate_name| {
work_dir.with_c_str(|work_dir| {
producer.with_c_str(|producer| {
"".with_c_str(|flags| {
"".with_c_str(|split_name| {
unsafe {
llvm::LLVMDIBuilderCreateCompileUnit(
dcx.builder,
@ -892,7 +891,11 @@ fn compile_unit_metadata(cx: @mut CrateContext) {
0,
split_name);
}
}}}}};
})
})
})
})
});
}
fn declare_local(bcx: @mut Block,
@ -917,7 +920,7 @@ fn declare_local(bcx: @mut Block,
CapturedVariable => 0
} as c_uint;
let (var_alloca, var_metadata) = do name.with_c_str |name| {
let (var_alloca, var_metadata) = name.with_c_str(|name| {
match variable_access {
DirectVariable { alloca } => (
alloca,
@ -952,7 +955,7 @@ fn declare_local(bcx: @mut Block,
}
)
}
};
});
set_debug_location(cx, DebugLocation::new(scope_metadata, loc.line, *loc.col));
unsafe {
@ -992,12 +995,13 @@ fn file_metadata(cx: &mut CrateContext, full_path: &str) -> DIFile {
};
let file_metadata =
do file_name.with_c_str |file_name| {
do work_dir.with_c_str |work_dir| {
file_name.with_c_str(|file_name| {
work_dir.with_c_str(|work_dir| {
unsafe {
llvm::LLVMDIBuilderCreateFile(DIB(cx), file_name, work_dir)
}
}};
})
});
debug_context(cx).created_files.insert(full_path.to_owned(), file_metadata);
return file_metadata;
@ -1053,7 +1057,7 @@ fn basic_type_metadata(cx: &mut CrateContext, t: ty::t) -> DIType {
let llvm_type = type_of::type_of(cx, t);
let (size, align) = size_and_align_of(cx, llvm_type);
let ty_metadata = do name.with_c_str |name| {
let ty_metadata = name.with_c_str(|name| {
unsafe {
llvm::LLVMDIBuilderCreateBasicType(
DIB(cx),
@ -1062,7 +1066,7 @@ fn basic_type_metadata(cx: &mut CrateContext, t: ty::t) -> DIType {
bytes_to_bits(align),
encoding)
}
};
});
return ty_metadata;
}
@ -1074,7 +1078,7 @@ fn pointer_type_metadata(cx: &mut CrateContext,
let pointer_llvm_type = type_of::type_of(cx, pointer_type);
let (pointer_size, pointer_align) = size_and_align_of(cx, pointer_llvm_type);
let name = ppaux::ty_to_str(cx.tcx, pointer_type);
let ptr_metadata = do name.with_c_str |name| {
let ptr_metadata = name.with_c_str(|name| {
unsafe {
llvm::LLVMDIBuilderCreatePointerType(
DIB(cx),
@ -1083,7 +1087,7 @@ fn pointer_type_metadata(cx: &mut CrateContext,
bytes_to_bits(pointer_align),
name)
}
};
});
return ptr_metadata;
}
@ -1100,7 +1104,7 @@ struct StructMemberDescriptionFactory {
impl MemberDescriptionFactory for StructMemberDescriptionFactory {
fn create_member_descriptions(&self, cx: &mut CrateContext)
-> ~[MemberDescription] {
do self.fields.map |field| {
self.fields.map(|field| {
let name = if field.ident.name == special_idents::unnamed_field.name {
@""
} else {
@ -1113,7 +1117,7 @@ impl MemberDescriptionFactory for StructMemberDescriptionFactory {
type_metadata: type_metadata(cx, field.mt.ty, self.span),
offset: ComputedMemberOffset,
}
}
})
}
}
@ -1210,14 +1214,14 @@ struct TupleMemberDescriptionFactory {
impl MemberDescriptionFactory for TupleMemberDescriptionFactory {
fn create_member_descriptions(&self, cx: &mut CrateContext)
-> ~[MemberDescription] {
do self.component_types.map |&component_type| {
self.component_types.map(|&component_type| {
MemberDescription {
name: @"",
llvm_type: type_of::type_of(cx, component_type),
type_metadata: type_metadata(cx, component_type, self.span),
offset: ComputedMemberOffset,
}
}
})
}
}
@ -1267,10 +1271,10 @@ impl MemberDescriptionFactory for GeneralMemberDescriptionFactory {
_ => cx.sess.bug("unreachable")
};
do struct_defs
struct_defs
.iter()
.enumerate()
.map |(i, struct_def)| {
.map(|(i, struct_def)| {
let (variant_type_metadata, variant_llvm_type, member_desc_factory) =
describe_variant(cx,
struct_def,
@ -1295,7 +1299,7 @@ impl MemberDescriptionFactory for GeneralMemberDescriptionFactory {
type_metadata: variant_type_metadata,
offset: FixedMemberOffset { bytes: 0 },
}
}.collect()
}).collect()
}
}
@ -1308,7 +1312,7 @@ struct EnumVariantMemberDescriptionFactory {
impl MemberDescriptionFactory for EnumVariantMemberDescriptionFactory {
fn create_member_descriptions(&self, cx: &mut CrateContext)
-> ~[MemberDescription] {
do self.args.iter().enumerate().map |(i, &(name, ty))| {
self.args.iter().enumerate().map(|(i, &(name, ty))| {
MemberDescription {
name: name,
llvm_type: type_of::type_of(cx, ty),
@ -1318,7 +1322,7 @@ impl MemberDescriptionFactory for EnumVariantMemberDescriptionFactory {
},
offset: ComputedMemberOffset,
}
}.collect()
}).collect()
}
}
@ -1360,8 +1364,8 @@ fn describe_variant(cx: &mut CrateContext,
// Get the argument names from the enum variant info
let mut arg_names = match variant_info.arg_names {
Some(ref names) => do names.map |ident| { token::ident_to_str(ident) },
None => do variant_info.args.map |_| { @"" }
Some(ref names) => names.map(|ident| token::ident_to_str(ident)),
None => variant_info.args.map(|_| @"")
};
// If this is not a univariant enum, there is also the (unnamed) discriminant field
@ -1420,14 +1424,14 @@ fn prepare_enum_metadata(cx: &mut CrateContext,
let name: &str = token::ident_to_str(&v.name);
let discriminant_value = v.disr_val as c_ulonglong;
do name.with_c_str |name| {
name.with_c_str(|name| {
unsafe {
llvm::LLVMDIBuilderCreateEnumerator(
DIB(cx),
name,
discriminant_value)
}
}
})
})
.collect();
@ -1436,7 +1440,7 @@ fn prepare_enum_metadata(cx: &mut CrateContext,
let (discriminant_size, discriminant_align) = size_and_align_of(cx, discriminant_llvm_type);
let discriminant_base_type_metadata = type_metadata(cx, adt::ty_of_inttype(inttype),
codemap::dummy_sp());
do enum_name.with_c_str |enum_name| {
enum_name.with_c_str(|enum_name| {
unsafe {
llvm::LLVMDIBuilderCreateEnumerationType(
DIB(cx),
@ -1449,7 +1453,7 @@ fn prepare_enum_metadata(cx: &mut CrateContext,
create_DIArray(DIB(cx), enumerators_metadata),
discriminant_base_type_metadata)
}
}
})
};
let type_rep = adt::represent_type(cx, enum_type);
@ -1482,7 +1486,7 @@ fn prepare_enum_metadata(cx: &mut CrateContext,
let enum_llvm_type = type_of::type_of(cx, enum_type);
let (enum_type_size, enum_type_align) = size_and_align_of(cx, enum_llvm_type);
let enum_metadata = do enum_name.with_c_str |enum_name| {
let enum_metadata = enum_name.with_c_str(|enum_name| {
unsafe {
llvm::LLVMDIBuilderCreateUnionType(
DIB(cx),
@ -1495,7 +1499,8 @@ fn prepare_enum_metadata(cx: &mut CrateContext,
0, // Flags
ptr::null(),
0) // RuntimeLang
}};
}
});
UnfinishedMetadata {
cache_id: cache_id_for_type(enum_type),
@ -1605,7 +1610,7 @@ fn set_members_of_composite_type(cx: &mut CrateContext,
ComputedMemberOffset => machine::llelement_offset(cx, composite_llvm_type, i)
};
do member_description.name.with_c_str |member_name| {
member_description.name.with_c_str(|member_name| {
unsafe {
llvm::LLVMDIBuilderCreateMemberType(
DIB(cx),
@ -1619,7 +1624,7 @@ fn set_members_of_composite_type(cx: &mut CrateContext,
0,
member_description.type_metadata)
}
}
})
})
.collect();
@ -1649,8 +1654,8 @@ fn create_struct_stub(cx: &mut CrateContext,
};
return unsafe {
do struct_type_name.with_c_str |name| {
do unique_id.with_c_str |unique_id| {
struct_type_name.with_c_str(|name| {
unique_id.with_c_str(|unique_id| {
// LLVMDIBuilderCreateStructType() wants an empty array. A null pointer will lead to
// hard to trace and debug LLVM assertions later on in llvm/lib/IR/Value.cpp
let empty_array = create_DIArray(DIB(cx), []);
@ -1669,7 +1674,9 @@ fn create_struct_stub(cx: &mut CrateContext,
0,
ptr::null(),
unique_id)
}}};
})
})
};
}
fn boxed_type_metadata(cx: &mut CrateContext,
@ -1979,7 +1986,7 @@ fn unimplemented_type_metadata(cx: &mut CrateContext, t: ty::t) -> DIType {
debug!("unimplemented_type_metadata: {:?}", ty::get(t));
let name = ppaux::ty_to_str(cx.tcx, t);
let metadata = do format!("NYI<{}>", name).with_c_str |name| {
let metadata = format!("NYI<{}>", name).with_c_str(|name| {
unsafe {
llvm::LLVMDIBuilderCreateBasicType(
DIB(cx),
@ -1988,7 +1995,7 @@ fn unimplemented_type_metadata(cx: &mut CrateContext, t: ty::t) -> DIType {
8_u64,
DW_ATE_unsigned as c_uint)
}
};
});
return metadata;
}
@ -2261,10 +2268,10 @@ fn populate_scope_map(cx: &mut CrateContext,
// Push argument identifiers onto the stack so arguments integrate nicely with variable
// shadowing.
for &arg_pat in arg_pats.iter() {
do pat_util::pat_bindings(def_map, arg_pat) |_, _, _, path_ref| {
pat_util::pat_bindings(def_map, arg_pat, |_, _, _, path_ref| {
let ident = ast_util::path_to_ident(path_ref);
scope_stack.push(ScopeStackEntry { scope_metadata: fn_metadata, ident: Some(ident) });
}
})
}
walk_block(cx, fn_entry_block, &mut scope_stack, scope_map);
@ -2550,11 +2557,13 @@ fn populate_scope_map(cx: &mut CrateContext,
ast::ExprIf(@ref cond_exp, ref then_block, ref opt_else_exp) => {
walk_expr(cx, cond_exp, scope_stack, scope_map);
do with_new_scope(cx, then_block.span, scope_stack, scope_map) |cx,
with_new_scope(cx,
then_block.span,
scope_stack,
scope_map| {
scope_map,
|cx, scope_stack, scope_map| {
walk_block(cx, then_block, scope_stack, scope_map);
}
});
match *opt_else_exp {
Some(@ref else_exp) => walk_expr(cx, else_exp, scope_stack, scope_map),
@ -2565,11 +2574,13 @@ fn populate_scope_map(cx: &mut CrateContext,
ast::ExprWhile(@ref cond_exp, ref loop_body) => {
walk_expr(cx, cond_exp, scope_stack, scope_map);
do with_new_scope(cx, loop_body.span, scope_stack, scope_map) |cx,
with_new_scope(cx,
loop_body.span,
scope_stack,
scope_map| {
scope_map,
|cx, scope_stack, scope_map| {
walk_block(cx, loop_body, scope_stack, scope_map);
}
})
}
ast::ExprForLoop(_, _, _, _) => {
@ -2584,24 +2595,28 @@ fn populate_scope_map(cx: &mut CrateContext,
ast::ExprLoop(ref block, _) |
ast::ExprBlock(ref block) => {
do with_new_scope(cx, block.span, scope_stack, scope_map) |cx,
with_new_scope(cx,
block.span,
scope_stack,
scope_map| {
scope_map,
|cx, scope_stack, scope_map| {
walk_block(cx, block, scope_stack, scope_map);
}
})
}
ast::ExprFnBlock(ast::fn_decl { inputs: ref inputs, _ }, ref block) |
ast::ExprProc(ast::fn_decl { inputs: ref inputs, _ }, ref block) => {
do with_new_scope(cx, block.span, scope_stack, scope_map) |cx,
with_new_scope(cx,
block.span,
scope_stack,
scope_map| {
scope_map,
|cx, scope_stack, scope_map| {
for &ast::arg { pat: pattern, _ } in inputs.iter() {
walk_pattern(cx, pattern, scope_stack, scope_map);
}
walk_block(cx, block, scope_stack, scope_map);
}
})
}
// ast::expr_loop_body(@ref inner_exp) |
@ -2646,9 +2661,11 @@ fn populate_scope_map(cx: &mut CrateContext,
for arm_ref in arms.iter() {
let arm_span = arm_ref.pats[0].span;
do with_new_scope(cx, arm_span, scope_stack, scope_map) |cx,
with_new_scope(cx,
arm_span,
scope_stack,
scope_map| {
scope_map,
|cx, scope_stack, scope_map| {
for &pat in arm_ref.pats.iter() {
walk_pattern(cx, pat, scope_stack, scope_map);
}
@ -2658,7 +2675,7 @@ fn populate_scope_map(cx: &mut CrateContext,
}
walk_block(cx, &arm_ref.body, scope_stack, scope_map);
}
})
}
}
@ -2769,14 +2786,14 @@ fn namespace_for_item(cx: &mut CrateContext,
let namespace_name = token::ident_to_str(&ident);
let namespace_metadata = unsafe {
do namespace_name.with_c_str |namespace_name| {
namespace_name.with_c_str(|namespace_name| {
llvm::LLVMDIBuilderCreateNameSpace(
DIB(cx),
parent_scope,
namespace_name,
ptr::null(), // cannot reconstruct file ...
0) // ... or line information, but that's not so important.
}
})
};
let node = @NamespaceTreeNode {

View file

@ -693,10 +693,12 @@ fn trans_rvalue_dps_unadjusted(bcx: @mut Block, expr: &ast::Expr,
return _match::trans_match(bcx, expr, discr, *arms, dest);
}
ast::ExprBlock(ref blk) => {
return do base::with_scope(bcx, blk.info(),
"block-expr body") |bcx| {
return base::with_scope(bcx,
blk.info(),
"block-expr body",
|bcx| {
controlflow::trans_block(bcx, blk, dest)
};
});
}
ast::ExprStruct(_, ref fields, base) => {
return trans_rec_or_struct(bcx, (*fields), base, expr.span, expr.id, dest);
@ -930,17 +932,18 @@ fn trans_lvalue_unadjusted(bcx: @mut Block, expr: &ast::Expr) -> DatumBlock {
let base_datum = unpack_datum!(bcx, trans_to_datum(bcx, base));
let repr = adt::represent_type(bcx.ccx(), base_datum.ty);
do with_field_tys(bcx.tcx(), base_datum.ty, None) |discr, field_tys| {
with_field_tys(bcx.tcx(), base_datum.ty, None, |discr, field_tys| {
let ix = ty::field_idx_strict(bcx.tcx(), field.name, field_tys);
DatumBlock {
datum: do base_datum.get_element(bcx,
datum: base_datum.get_element(bcx,
field_tys[ix].mt.ty,
ZeroMem) |srcval| {
ZeroMem,
|srcval| {
adt::trans_field_ptr(bcx, repr, srcval, discr, ix)
},
}),
bcx: bcx
}
}
})
}
fn trans_index(bcx: @mut Block,
@ -984,9 +987,9 @@ fn trans_lvalue_unadjusted(bcx: @mut Block, expr: &ast::Expr) -> DatumBlock {
let bounds_check = ICmp(bcx, lib::llvm::IntUGE, ix_val, len);
let expect = ccx.intrinsics.get_copy(&("llvm.expect.i1"));
let expected = Call(bcx, expect, [bounds_check, C_i1(false)], []);
let bcx = do with_cond(bcx, expected) |bcx| {
let bcx = with_cond(bcx, expected, |bcx| {
controlflow::trans_fail_bounds_check(bcx, index_expr.span, ix_val, len)
};
});
let elt = InBoundsGEP(bcx, base, [ix_val]);
let elt = PointerCast(bcx, elt, vt.llunit_ty.ptr_to());
return DatumBlock {
@ -1044,11 +1047,11 @@ fn trans_lvalue_unadjusted(bcx: @mut Block, expr: &ast::Expr) -> DatumBlock {
let symbol = csearch::get_symbol(
bcx.ccx().sess.cstore,
did);
let llval = do symbol.with_c_str |buf| {
let llval = symbol.with_c_str(|buf| {
llvm::LLVMAddGlobal(bcx.ccx().llmod,
llty.to_ref(),
buf)
};
});
let extern_const_values = &mut bcx.ccx().extern_const_values;
extern_const_values.insert(did, llval);
llval
@ -1208,10 +1211,10 @@ fn trans_rec_or_struct(bcx: @mut Block,
let ty = node_id_type(bcx, id);
let tcx = bcx.tcx();
do with_field_tys(tcx, ty, Some(id)) |discr, field_tys| {
with_field_tys(tcx, ty, Some(id), |discr, field_tys| {
let mut need_base = vec::from_elem(field_tys.len(), true);
let numbered_fields = do fields.map |field| {
let numbered_fields = fields.map(|field| {
let opt_pos =
field_tys.iter().position(|field_ty|
field_ty.ident.name == field.ident.node.name);
@ -1225,7 +1228,7 @@ fn trans_rec_or_struct(bcx: @mut Block,
"Couldn't find field in struct type")
}
}
};
});
let optbase = match base {
Some(base_expr) => {
let mut leftovers = ~[];
@ -1247,7 +1250,7 @@ fn trans_rec_or_struct(bcx: @mut Block,
let repr = adt::represent_type(bcx.ccx(), ty);
trans_adt(bcx, repr, discr, numbered_fields, optbase, dest)
}
})
}
/**
@ -1308,9 +1311,9 @@ fn trans_adt(bcx: @mut Block, repr: &adt::Repr, discr: ty::Disr,
// And, would it ever be reasonable to be here with discr != 0?
let base_datum = unpack_datum!(bcx, trans_to_datum(bcx, base.expr));
for &(i, t) in base.fields.iter() {
let datum = do base_datum.get_element(bcx, t, ZeroMem) |srcval| {
let datum = base_datum.get_element(bcx, t, ZeroMem, |srcval| {
adt::trans_field_ptr(bcx, repr, srcval, discr, i)
};
});
let dest = adt::trans_field_ptr(bcx, repr, addr, discr, i);
bcx = datum.store_to(bcx, INIT, dest);
}
@ -1541,9 +1544,9 @@ fn trans_lazy_binop(bcx: @mut Block,
let bcx = bcx;
let Result {bcx: past_lhs, val: lhs} = {
do base::with_scope_result(bcx, a.info(), "lhs") |bcx| {
base::with_scope_result(bcx, a.info(), "lhs", |bcx| {
trans_to_datum(bcx, a).to_result()
}
})
};
if past_lhs.unreachable {
@ -1560,9 +1563,9 @@ fn trans_lazy_binop(bcx: @mut Block,
}
let Result {bcx: past_rhs, val: rhs} = {
do base::with_scope_result(before_rhs, b.info(), "rhs") |bcx| {
base::with_scope_result(before_rhs, b.info(), "rhs", |bcx| {
trans_to_datum(bcx, b).to_result()
}
})
};
if past_rhs.unreachable {
@ -1830,9 +1833,9 @@ pub fn trans_log_level(bcx: @mut Block) -> DatumBlock {
ccx, modpath, "loglevel");
let global;
unsafe {
global = do s.with_c_str |buf| {
global = s.with_c_str(|buf| {
llvm::LLVMAddGlobal(ccx.llmod, Type::i32().to_ref(), buf)
};
});
llvm::LLVMSetGlobalConstant(global, False);
llvm::LLVMSetInitializer(global, C_null(Type::i32()));
lib::llvm::SetLinkage(global, lib::llvm::InternalLinkage);

View file

@ -647,11 +647,11 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @mut CrateContext,
}
// Perform the call itself
let llrust_ret_val = do llrust_args.as_imm_buf |ptr, len| {
let llrust_ret_val = llrust_args.as_imm_buf(|ptr, len| {
debug!("calling llrustfn = {}", ccx.tn.val_to_str(llrustfn));
llvm::LLVMBuildCall(builder, llrustfn, ptr,
len as c_uint, noname())
};
});
// Get the return value where the foreign fn expects it.
let llforeign_ret_ty = match tys.fn_ty.ret_ty.cast {

View file

@ -336,7 +336,7 @@ pub fn call_tydesc_glue(cx: @mut Block, v: ValueRef, t: ty::t, field: uint)
pub fn make_visit_glue(bcx: @mut Block, v: ValueRef, t: ty::t) -> @mut Block {
let _icx = push_ctxt("make_visit_glue");
do with_scope(bcx, None, "visitor cleanup") |bcx| {
with_scope(bcx, None, "visitor cleanup", |bcx| {
let mut bcx = bcx;
let (visitor_trait, object_ty) = match ty::visitor_object_ty(bcx.tcx(),
ty::ReStatic) {
@ -350,7 +350,7 @@ pub fn make_visit_glue(bcx: @mut Block, v: ValueRef, t: ty::t) -> @mut Block {
// The visitor is a boxed object and needs to be dropped
add_clean(bcx, v, object_ty);
bcx
}
})
}
pub fn make_free_glue(bcx: @mut Block, v: ValueRef, t: ty::t) -> @mut Block {
@ -394,9 +394,9 @@ pub fn trans_struct_drop_flag(bcx: @mut Block, t: ty::t, v0: ValueRef, dtor_did:
class_did: ast::DefId, substs: &ty::substs) -> @mut Block {
let repr = adt::represent_type(bcx.ccx(), t);
let drop_flag = adt::trans_drop_flag_ptr(bcx, repr, v0);
do with_cond(bcx, IsNotNull(bcx, Load(bcx, drop_flag))) |cx| {
with_cond(bcx, IsNotNull(bcx, Load(bcx, drop_flag)), |cx| {
trans_struct_drop(cx, t, v0, dtor_did, class_did, substs)
}
})
}
pub fn trans_struct_drop(bcx: @mut Block, t: ty::t, v0: ValueRef, dtor_did: ast::DefId,
@ -420,7 +420,7 @@ pub fn trans_struct_drop(bcx: @mut Block, t: ty::t, v0: ValueRef, dtor_did: ast:
// Be sure to put all of the fields into a scope so we can use an invoke
// instruction to call the user destructor but still call the field
// destructors if the user destructor fails.
do with_scope(bcx, None, "field drops") |bcx| {
with_scope(bcx, None, "field drops", |bcx| {
let self_arg = PointerCast(bcx, v0, params[0]);
let args = ~[self_arg];
@ -434,7 +434,7 @@ pub fn trans_struct_drop(bcx: @mut Block, t: ty::t, v0: ValueRef, dtor_did: ast:
let (_, bcx) = invoke(bcx, dtor_addr, args, []);
bcx
}
})
}
pub fn make_drop_glue(bcx: @mut Block, v0: ValueRef, t: ty::t) -> @mut Block {
@ -480,7 +480,7 @@ pub fn make_drop_glue(bcx: @mut Block, v0: ValueRef, t: ty::t) -> @mut Block {
ty::ty_trait(_, _, ty::UniqTraitStore, _, _) => {
let lluniquevalue = GEPi(bcx, v0, [0, abi::trt_field_box]);
// Only drop the value when it is non-null
do with_cond(bcx, IsNotNull(bcx, Load(bcx, lluniquevalue))) |bcx| {
with_cond(bcx, IsNotNull(bcx, Load(bcx, lluniquevalue)), |bcx| {
let llvtable = Load(bcx, GEPi(bcx, v0, [0, abi::trt_field_vtable]));
// Cast the vtable to a pointer to a pointer to a tydesc.
@ -493,7 +493,7 @@ pub fn make_drop_glue(bcx: @mut Block, v0: ValueRef, t: ty::t) -> @mut Block {
abi::tydesc_field_free_glue,
None);
bcx
}
})
}
ty::ty_opaque_closure_ptr(ck) => {
closure::make_opaque_cbox_drop_glue(bcx, ck, v0)
@ -618,11 +618,11 @@ pub fn declare_tydesc(ccx: &mut CrateContext, t: ty::t) -> @mut tydesc_info {
let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc").to_managed();
note_unique_llvm_symbol(ccx, name);
debug!("+++ declare_tydesc {} {}", ppaux::ty_to_str(ccx.tcx, t), name);
let gvar = do name.with_c_str |buf| {
let gvar = name.with_c_str(|buf| {
unsafe {
llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type.to_ref(), buf)
}
};
});
let ty_name = C_estr_slice(ccx, ppaux::ty_to_str(ccx.tcx, t).to_managed());

View file

@ -545,14 +545,14 @@ pub fn get_vtable(bcx: @mut Block,
}
// Not in the cache. Actually build it.
let methods = do origins.flat_map |origin| {
let methods = origins.flat_map(|origin| {
match *origin {
typeck::vtable_static(id, ref substs, sub_vtables) => {
emit_vtable_methods(bcx, id, *substs, sub_vtables)
}
_ => ccx.sess.bug("get_vtable: expected a static origin"),
}
};
});
// Generate a type descriptor for the vtable.
let tydesc = get_tydesc(ccx, self_ty);
@ -578,9 +578,9 @@ pub fn make_vtable(ccx: &mut CrateContext,
let tbl = C_struct(components, false);
let sym = token::gensym("vtable");
let vt_gvar = do format!("vtable{}", sym).with_c_str |buf| {
let vt_gvar = format!("vtable{}", sym).with_c_str(|buf| {
llvm::LLVMAddGlobal(ccx.llmod, val_ty(tbl).to_ref(), buf)
};
});
llvm::LLVMSetInitializer(vt_gvar, tbl);
llvm::LLVMSetGlobalConstant(vt_gvar, lib::llvm::True);
lib::llvm::SetLinkage(vt_gvar, lib::llvm::InternalLinkage);
@ -605,7 +605,7 @@ fn emit_vtable_methods(bcx: @mut Block,
ty::populate_implementations_for_trait_if_necessary(bcx.tcx(), trt_id);
let trait_method_def_ids = ty::trait_method_def_ids(tcx, trt_id);
do trait_method_def_ids.map |method_def_id| {
trait_method_def_ids.map(|method_def_id| {
let ident = ty::method(tcx, *method_def_id).ident;
// The substitutions we have are on the impl, so we grab
// the method type from the impl to substitute into.
@ -626,7 +626,7 @@ fn emit_vtable_methods(bcx: @mut Block,
trans_fn_ref_with_vtables(bcx, m_id, 0,
substs, Some(vtables)).llfn
}
}
})
}
pub fn trans_trait_cast(bcx: @mut Block,

View file

@ -213,12 +213,12 @@ impl Reflector {
ty::ty_tup(ref tys) => {
let extra = ~[self.c_uint(tys.len())]
+ self.c_size_and_align(t);
do self.bracketed("tup", extra) |this| {
self.bracketed("tup", extra, |this| {
for (i, t) in tys.iter().enumerate() {
let extra = ~[this.c_uint(i), this.c_tydesc(*t)];
this.visit("tup_field", extra);
}
}
})
}
// FIXME (#2594): fetch constants out of intrinsic
@ -262,7 +262,7 @@ impl Reflector {
let extra = ~[self.c_slice(ty_to_str(tcx, t).to_managed()),
self.c_bool(named_fields),
self.c_uint(fields.len())] + self.c_size_and_align(t);
do self.bracketed("class", extra) |this| {
self.bracketed("class", extra, |this| {
for (i, field) in fields.iter().enumerate() {
let extra = ~[this.c_uint(i),
this.c_slice(bcx.ccx().sess.str_of(field.ident)),
@ -270,7 +270,7 @@ impl Reflector {
+ this.c_mt(&field.mt);
this.visit("class_field", extra);
}
}
})
}
// FIXME (#2595): visiting all the variants in turn is probably
@ -320,14 +320,14 @@ impl Reflector {
let enum_args = ~[self.c_uint(variants.len()), make_get_disr()]
+ self.c_size_and_align(t);
do self.bracketed("enum", enum_args) |this| {
self.bracketed("enum", enum_args, |this| {
for (i, v) in variants.iter().enumerate() {
let name = ccx.sess.str_of(v.name);
let variant_args = ~[this.c_uint(i),
C_u64(v.disr_val),
this.c_uint(v.args.len()),
this.c_slice(name)];
do this.bracketed("enum_variant", variant_args) |this| {
this.bracketed("enum_variant", variant_args, |this| {
for (j, a) in v.args.iter().enumerate() {
let bcx = this.bcx;
let null = C_null(llptrty);
@ -338,9 +338,9 @@ impl Reflector {
this.c_tydesc(*a)];
this.visit("enum_variant_field", field_args);
}
})
}
}
}
})
}
ty::ty_trait(_, _, _, _, _) => {

View file

@ -23,7 +23,7 @@ pub fn make_free_glue(bcx: @mut Block, vptrptr: ValueRef, box_ty: ty::t)
let box_datum = immediate_rvalue(Load(bcx, vptrptr), box_ty);
let not_null = IsNotNull(bcx, box_datum.val);
do with_cond(bcx, not_null) |bcx| {
with_cond(bcx, not_null, |bcx| {
let body_datum = box_datum.box_body(bcx);
let bcx = glue::drop_ty(bcx, body_datum.to_ref_llval(bcx),
body_datum.ty);
@ -32,5 +32,5 @@ pub fn make_free_glue(bcx: @mut Block, vptrptr: ValueRef, box_ty: ty::t)
} else {
glue::trans_exchange_free(bcx, box_datum.val)
}
}
})
}

View file

@ -52,7 +52,7 @@ impl Value {
pub fn get_dominating_store(self, bcx: &mut Block) -> Option<Value> {
match self.get_single_user().and_then(|user| user.as_store_inst()) {
Some(store) => {
do store.get_parent().and_then |store_bb| {
store.get_parent().and_then(|store_bb| {
let mut bb = BasicBlock(bcx.llbb);
let mut ret = Some(store);
while *bb != *store_bb {
@ -62,7 +62,7 @@ impl Value {
}
}
ret
}
})
}
_ => None
}
@ -150,8 +150,8 @@ impl Iterator<Value> for UserIterator {
fn next(&mut self) -> Option<Value> {
let current = self.next;
self.next = do current.and_then |u| { u.get_next_use() };
self.next = current.and_then(|u| u.get_next_use());
do current.map |u| { u.get_user() }
current.map(|u| u.get_user())
}
}

View file

@ -1684,7 +1684,7 @@ fn type_needs_unwind_cleanup_(cx: ctxt, ty: t,
let mut encountered_box = encountered_box;
let mut needs_unwind_cleanup = false;
do maybe_walk_ty(ty) |ty| {
maybe_walk_ty(ty, |ty| {
let old_encountered_box = encountered_box;
let result = match get(ty).sty {
ty_box(_) | ty_opaque_box => {
@ -1729,7 +1729,7 @@ fn type_needs_unwind_cleanup_(cx: ctxt, ty: t,
encountered_box = old_encountered_box;
result
}
});
return needs_unwind_cleanup;
}
@ -2207,14 +2207,14 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
-> TypeContents {
let _i = indenter();
let mut tc = TC::All;
do each_inherited_builtin_bound(cx, bounds, traits) |bound| {
each_inherited_builtin_bound(cx, bounds, traits, |bound| {
tc = tc - match bound {
BoundStatic => TC::Nonstatic,
BoundSend => TC::Nonsendable,
BoundFreeze => TC::Nonfreezable,
BoundSized => TC::Nonsized,
};
}
});
return tc;
// Iterates over all builtin bounds on the type parameter def, including
@ -2227,13 +2227,13 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
f(bound);
}
do each_bound_trait_and_supertraits(cx, traits) |trait_ref| {
each_bound_trait_and_supertraits(cx, traits, |trait_ref| {
let trait_def = lookup_trait_def(cx, trait_ref.def_id);
for bound in trait_def.bounds.iter() {
f(bound);
}
true
};
});
}
}
}
@ -2327,12 +2327,12 @@ pub fn is_instantiable(cx: ctxt, r_ty: t) -> bool {
ty_enum(did, ref substs) => {
seen.push(did);
let vs = enum_variants(cx, did);
let r = !vs.is_empty() && do vs.iter().all |variant| {
do variant.args.iter().any |aty| {
let r = !vs.is_empty() && vs.iter().all(|variant| {
variant.args.iter().any(|aty| {
let sty = subst(cx, substs, *aty);
type_requires(cx, seen, r_ty, sty)
}
};
})
});
seen.pop();
r
}
@ -2490,11 +2490,11 @@ pub fn type_is_pod(cx: ctxt, ty: t) -> bool {
ty_opaque_closure_ptr(_) => result = true,
ty_struct(did, ref substs) => {
let fields = lookup_struct_fields(cx, did);
result = do fields.iter().all |f| {
result = fields.iter().all(|f| {
let fty = ty::lookup_item_type(cx, f.id);
let sty = subst(cx, substs, fty.ty);
type_is_pod(cx, sty)
};
});
}
ty_estr(vstore_slice(*)) | ty_evec(_, vstore_slice(*)) => {
@ -3000,7 +3000,7 @@ pub fn method_call_type_param_defs(tcx: ctxt,
method_map: typeck::method_map,
id: ast::NodeId)
-> Option<@~[TypeParameterDef]> {
do method_map.find(&id).map |method| {
method_map.find(&id).map(|method| {
match method.origin {
typeck::method_static(did) => {
// n.b.: When we encode impl methods, the bounds
@ -3026,7 +3026,7 @@ pub fn method_call_type_param_defs(tcx: ctxt,
n_mth).generics.type_param_defs)
}
}
}
})
}
pub fn resolve_expr(tcx: ctxt, expr: &ast::Expr) -> ast::Def {
@ -3217,14 +3217,14 @@ pub fn method_idx(id: ast::Ident, meths: &[@Method]) -> Option<uint> {
/// to a bitset or some other representation.
pub fn param_tys_in_type(ty: t) -> ~[param_ty] {
let mut rslt = ~[];
do walk_ty(ty) |ty| {
walk_ty(ty, |ty| {
match get(ty).sty {
ty_param(p) => {
rslt.push(p);
}
_ => ()
}
}
});
rslt
}
@ -3233,12 +3233,12 @@ pub fn occurs_check(tcx: ctxt, sp: Span, vid: TyVid, rt: t) {
// contain duplicates. (Integral type vars aren't counted.)
fn vars_in_type(ty: t) -> ~[TyVid] {
let mut rslt = ~[];
do walk_ty(ty) |ty| {
walk_ty(ty, |ty| {
match get(ty).sty {
ty_infer(TyVar(v)) => rslt.push(v),
_ => ()
}
}
});
rslt
}
@ -3644,9 +3644,9 @@ fn struct_ctor_id(cx: ctxt, struct_did: ast::DefId) -> Option<ast::DefId> {
Some(&ast_map::node_item(item, _)) => {
match item.node {
ast::item_struct(struct_def, _) => {
do struct_def.ctor_id.map |ctor_id| {
struct_def.ctor_id.map(|ctor_id| {
ast_util::local_def(ctor_id)
}
})
}
_ => cx.sess.bug("called struct_ctor_id on non-struct")
}
@ -3698,13 +3698,13 @@ impl VariantInfo {
assert!(fields.len() > 0);
let arg_tys = ty_fn_args(ctor_ty).map(|a| *a);
let arg_names = do fields.map |field| {
let arg_names = fields.map(|field| {
match field.node.kind {
named_field(ident, _) => ident,
unnamed_field => cx.sess.bug(
"enum_variants: all fields in struct must have a name")
}
};
});
return VariantInfo {
args: arg_tys,
@ -3724,7 +3724,7 @@ pub fn substd_enum_variants(cx: ctxt,
id: ast::DefId,
substs: &substs)
-> ~[@VariantInfo] {
do enum_variants(cx, id).iter().map |variant_info| {
enum_variants(cx, id).iter().map(|variant_info| {
let substd_args = variant_info.args.iter()
.map(|aty| subst(cx, substs, *aty)).collect();
@ -3735,7 +3735,7 @@ pub fn substd_enum_variants(cx: ctxt,
ctor_ty: substd_ctor_ty,
..(**variant_info).clone()
}
}.collect()
}).collect()
}
pub fn item_path_str(cx: ctxt, id: ast::DefId) -> ~str {
@ -3967,11 +3967,11 @@ pub fn each_attr(tcx: ctxt, did: DefId, f: |@MetaItem| -> bool) -> bool {
}
} else {
let mut cont = true;
do csearch::get_item_attrs(tcx.cstore, did) |meta_items| {
csearch::get_item_attrs(tcx.cstore, did, |meta_items| {
if cont {
cont = meta_items.iter().advance(|ptrptr| f(*ptrptr));
}
}
});
return cont;
}
}
@ -4083,7 +4083,7 @@ pub fn lookup_struct_field(cx: ctxt,
}
fn struct_field_tys(fields: &[@struct_field]) -> ~[field_ty] {
do fields.map |field| {
fields.map(|field| {
match field.node.kind {
named_field(ident, visibility) => {
field_ty {
@ -4101,14 +4101,14 @@ fn struct_field_tys(fields: &[@struct_field]) -> ~[field_ty] {
}
}
}
}
})
}
// Returns a list of fields corresponding to the struct's items. trans uses
// this. Takes a list of substs with which to instantiate field types.
pub fn struct_fields(cx: ctxt, did: ast::DefId, substs: &substs)
-> ~[field] {
do lookup_struct_fields(cx, did).map |f| {
lookup_struct_fields(cx, did).map(|f| {
field {
// FIXME #6993: change type of field to Name and get rid of new()
ident: ast::Ident::new(f.name),
@ -4117,7 +4117,7 @@ pub fn struct_fields(cx: ctxt, did: ast::DefId, substs: &substs)
mutbl: MutImmutable
}
}
}
})
}
pub fn is_binopable(cx: ctxt, ty: t, op: ast::BinOp) -> bool {
@ -4381,27 +4381,27 @@ pub fn count_traits_and_supertraits(tcx: ctxt,
type_param_defs: &[TypeParameterDef]) -> uint {
let mut total = 0;
for type_param_def in type_param_defs.iter() {
do each_bound_trait_and_supertraits(
tcx, type_param_def.bounds.trait_bounds) |_| {
each_bound_trait_and_supertraits(
tcx, type_param_def.bounds.trait_bounds, |_| {
total += 1;
true
};
});
}
return total;
}
pub fn get_tydesc_ty(tcx: ctxt) -> Result<t, ~str> {
do tcx.lang_items.require(TyDescStructLangItem).map |tydesc_lang_item| {
tcx.lang_items.require(TyDescStructLangItem).map(|tydesc_lang_item| {
tcx.intrinsic_defs.find_copy(&tydesc_lang_item)
.expect("Failed to resolve TyDesc")
}
})
}
pub fn get_opaque_ty(tcx: ctxt) -> Result<t, ~str> {
do tcx.lang_items.require(OpaqueStructLangItem).map |opaque_lang_item| {
tcx.lang_items.require(OpaqueStructLangItem).map(|opaque_lang_item| {
tcx.intrinsic_defs.find_copy(&opaque_lang_item)
.expect("Failed to resolve Opaque")
}
})
}
pub fn visitor_object_ty(tcx: ctxt,
@ -4460,7 +4460,7 @@ pub fn populate_implementations_for_type_if_necessary(tcx: ctxt,
return
}
do csearch::each_implementation_for_type(tcx.sess.cstore, type_id)
csearch::each_implementation_for_type(tcx.sess.cstore, type_id,
|implementation_def_id| {
let implementation = @csearch::get_impl(tcx, implementation_def_id);
@ -4498,7 +4498,7 @@ pub fn populate_implementations_for_type_if_necessary(tcx: ctxt,
// Store the implementation info.
tcx.impls.insert(implementation_def_id, implementation);
}
});
tcx.populated_external_types.insert(type_id);
}
@ -4515,7 +4515,7 @@ pub fn populate_implementations_for_trait_if_necessary(
return
}
do csearch::each_implementation_for_trait(tcx.sess.cstore, trait_id)
csearch::each_implementation_for_trait(tcx.sess.cstore, trait_id,
|implementation_def_id| {
let implementation = @csearch::get_impl(tcx, implementation_def_id);
@ -4532,7 +4532,7 @@ pub fn populate_implementations_for_trait_if_necessary(
// Store the implementation info.
tcx.impls.insert(implementation_def_id, implementation);
}
});
tcx.populated_external_traits.insert(trait_id);
}
@ -4607,9 +4607,9 @@ pub fn hash_crate_independent(tcx: ctxt, t: t, local_hash: @str) -> u64 {
iter(hash, &mt.mutbl);
};
fn iter<T: IterBytes>(hash: &mut SipState, t: &T) {
do t.iter_bytes(true) |bytes| { hash.input(bytes); true };
t.iter_bytes(true, |bytes| { hash.input(bytes); true });
}
do ty::walk_ty(t) |t| {
ty::walk_ty(t, |t| {
match ty::get(t).sty {
ty_nil => hash.input([0]),
ty_bot => hash.input([1]),
@ -4714,7 +4714,7 @@ pub fn hash_crate_independent(tcx: ctxt, t: t, local_hash: @str) -> u64 {
mt(&mut hash, m);
}
}
}
});
hash.result_u64()
}

View file

@ -635,9 +635,9 @@ fn ty_of_method_or_bare_fn<AC:AstConv>(
// that function type
let rb = rscope::BindingRscope::new(id);
let opt_transformed_self_ty = do opt_self_info.map |self_info| {
let opt_transformed_self_ty = opt_self_info.map(|self_info| {
transform_self_ty(this, &rb, self_info)
};
});
let input_tys = decl.inputs.map(|a| ty_of_arg(this, &rb, a, None));
@ -730,14 +730,14 @@ pub fn ty_of_closure<AC:AstConv,RS:RegionScope>(
// that function type
let rb = rscope::BindingRscope::new(id);
let input_tys = do decl.inputs.iter().enumerate().map |(i, a)| {
let expected_arg_ty = do expected_sig.as_ref().and_then |e| {
let input_tys = decl.inputs.iter().enumerate().map(|(i, a)| {
let expected_arg_ty = expected_sig.as_ref().and_then(|e| {
// no guarantee that the correct number of expected args
// were supplied
if i < e.inputs.len() {Some(e.inputs[i])} else {None}
};
});
ty_of_arg(this, &rb, a, expected_arg_ty)
}.collect();
}).collect();
let expected_ret_ty = expected_sig.map(|e| e.output);
let output_ty = match decl.output.node {

View file

@ -378,14 +378,13 @@ impl<'self> LookupContext<'self> {
// we find the trait the method came from, counting up the
// methods from them.
let mut method_count = 0;
do ty::each_bound_trait_and_supertraits(tcx, &[subtrait])
|bound_ref| {
ty::each_bound_trait_and_supertraits(tcx, &[subtrait], |bound_ref| {
if bound_ref.def_id == trait_ref.def_id { false }
else {
method_count += ty::trait_methods(tcx, bound_ref.def_id).len();
true
}
};
});
return method_count + n_method;
}
@ -412,7 +411,7 @@ impl<'self> LookupContext<'self> {
};
let trait_ref = @TraitRef { def_id: did, substs: rcvr_substs.clone() };
do self.push_inherent_candidates_from_bounds_inner(&[trait_ref])
self.push_inherent_candidates_from_bounds_inner(&[trait_ref],
|new_trait_ref, m, method_num, _bound_num| {
let vtable_index =
self.get_method_index(new_trait_ref, trait_ref, method_num);
@ -436,7 +435,7 @@ impl<'self> LookupContext<'self> {
real_index: vtable_index
})
}
};
});
}
fn push_inherent_candidates_from_param(&self,
@ -464,7 +463,7 @@ impl<'self> LookupContext<'self> {
self_ty: ty::t,
bounds: &[@TraitRef],
param: param_index) {
do self.push_inherent_candidates_from_bounds_inner(bounds)
self.push_inherent_candidates_from_bounds_inner(bounds,
|trait_ref, m, method_num, bound_num| {
Candidate {
rcvr_match_condition: RcvrMatchesIfSubtype(self_ty),
@ -478,7 +477,7 @@ impl<'self> LookupContext<'self> {
bound_num: bound_num,
})
}
}
})
}
// Do a search through a list of bounds, using a callback to actually
@ -493,7 +492,7 @@ impl<'self> LookupContext<'self> {
let tcx = self.tcx();
let mut next_bound_idx = 0; // count only trait bounds
do ty::each_bound_trait_and_supertraits(tcx, bounds) |bound_trait_ref| {
ty::each_bound_trait_and_supertraits(tcx, bounds, |bound_trait_ref| {
let this_bound_idx = next_bound_idx;
next_bound_idx += 1;
@ -518,7 +517,7 @@ impl<'self> LookupContext<'self> {
}
}
true
};
});
}

View file

@ -395,9 +395,7 @@ impl Visitor<()> for GatherLocalsVisitor {
// non-obvious: the `blk` variable maps to region lb, so
// we have to keep this up-to-date. This
// is... unfortunate. It'd be nice to not need this.
do self.fcx.with_region_lb(b.id) {
visit::walk_block(self, b, ());
}
self.fcx.with_region_lb(b.id, || visit::walk_block(self, b, ()));
}
// Don't descend into fns and items
@ -517,10 +515,11 @@ pub fn check_fn(ccx: @mut CrateCtxt,
// Add formal parameters.
for (arg_ty, input) in arg_tys.iter().zip(decl.inputs.iter()) {
// Create type variables for each argument.
do pat_util::pat_bindings(tcx.def_map, input.pat)
pat_util::pat_bindings(tcx.def_map,
input.pat,
|_bm, pat_id, _sp, _path| {
visit.assign(pat_id, None);
}
});
// Check the pattern.
let pcx = pat_ctxt {
@ -1392,17 +1391,17 @@ pub fn valid_range_bounds(ccx: @mut CrateCtxt,
pub fn check_expr_has_type(
fcx: @mut FnCtxt, expr: @ast::Expr,
expected: ty::t) {
do check_expr_with_unifier(fcx, expr, Some(expected)) {
check_expr_with_unifier(fcx, expr, Some(expected), || {
demand::suptype(fcx, expr.span, expected, fcx.expr_ty(expr));
}
});
}
pub fn check_expr_coercable_to_type(
fcx: @mut FnCtxt, expr: @ast::Expr,
expected: ty::t) {
do check_expr_with_unifier(fcx, expr, Some(expected)) {
check_expr_with_unifier(fcx, expr, Some(expected), || {
demand::coerce(fcx, expr.span, expected, expr)
}
});
}
pub fn check_expr_with_hint(
@ -1462,9 +1461,7 @@ pub fn lookup_field_ty(tcx: ty::ctxt,
substs: &ty::substs) -> Option<ty::t> {
let o_field = items.iter().find(|f| f.name == fieldname);
do o_field.map() |f| {
ty::lookup_field_type(tcx, class_id, f.id, substs)
}
o_field.map(|f| ty::lookup_field_type(tcx, class_id, f.id, substs))
}
// Controls whether the arguments are automatically referenced. This is useful
@ -2693,7 +2690,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
}
}
ast::ExprUnary(callee_id, unop, oprnd) => {
let exp_inner = do unpack_expected(fcx, expected) |sty| {
let exp_inner = unpack_expected(fcx, expected, |sty| {
match unop {
ast::UnBox(_) | ast::UnUniq => match *sty {
ty::ty_box(ref mt) | ty::ty_uniq(ref mt) => Some(mt.ty),
@ -2702,7 +2699,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
ast::UnNot | ast::UnNeg => expected,
ast::UnDeref => None
}
};
});
check_expr_with_opt_hint(fcx, oprnd, exp_inner);
let mut oprnd_t = fcx.expr_ty(oprnd);
if !ty::type_is_error(oprnd_t) &&
@ -2925,12 +2922,15 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
expected,
|x| Some((*x).clone()));
let inner_ty = match expected_sty {
Some(ty::ty_closure(_)) => expected.unwrap(),
Some(ty::ty_closure(ref closure_ty))
if closure_ty.sigil == ast::OwnedSigil => {
expected.unwrap()
}
_ => match expected {
Some(expected_t) => {
fcx.type_error_message(expr.span, |actual| {
format!("last argument in `do` call \
has non-closure type: {}",
has non-procedure type: {}",
actual)
}, expected_t, None);
let err_ty = ty::mk_err();
@ -3137,7 +3137,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
let mut bot_field = false;
let mut err_field = false;
let elt_ts = do elts.iter().enumerate().map |(i, e)| {
let elt_ts = elts.iter().enumerate().map(|(i, e)| {
let opt_hint = match flds {
Some(ref fs) if i < fs.len() => Some(fs[i]),
_ => None
@ -3147,7 +3147,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
err_field = err_field || ty::type_is_error(t);
bot_field = bot_field || ty::type_is_bot(t);
t
}.collect();
}).collect();
if bot_field {
fcx.write_bot(id);
} else if err_field {
@ -3355,7 +3355,7 @@ pub fn check_block_with_expected(fcx: @mut FnCtxt,
let purity_state = fcx.ps.recurse(blk);
let prev = replace(&mut fcx.ps, purity_state);
do fcx.with_region_lb(blk.id) {
fcx.with_region_lb(blk.id, || {
let mut warned = false;
let mut last_was_bot = false;
let mut any_bot = false;
@ -3408,7 +3408,7 @@ pub fn check_block_with_expected(fcx: @mut FnCtxt,
}
}
};
}
});
fcx.ps = prev;
}

View file

@ -212,7 +212,7 @@ fn visit_local(rcx: &mut Rcx, l: @ast::Local) {
fn constrain_bindings_in_pat(pat: @ast::Pat, rcx: &mut Rcx) {
let tcx = rcx.fcx.tcx();
debug!("regionck::visit_pat(pat={})", pat.repr(tcx));
do pat_util::pat_bindings(tcx.def_map, pat) |_, id, span, _| {
pat_util::pat_bindings(tcx.def_map, pat, |_, id, span, _| {
// If we have a variable that contains region'd data, that
// data will be accessible from anywhere that the variable is
// accessed. We must be wary of loops like this:
@ -240,7 +240,7 @@ fn constrain_bindings_in_pat(pat: @ast::Pat, rcx: &mut Rcx) {
constrain_regions_in_type_of_node(
rcx, id, encl_region,
infer::BindingTypeIsNotValidAtDecl(span));
}
})
}
fn visit_expr(rcx: &mut Rcx, expr: @ast::Expr) {
@ -735,7 +735,7 @@ fn constrain_regions_in_type(
region_to_str(tcx, "", false, minimum_lifetime),
ty_to_str(tcx, ty));
do relate_nested_regions(tcx, Some(minimum_lifetime), ty) |r_sub, r_sup| {
relate_nested_regions(tcx, Some(minimum_lifetime), ty, |r_sub, r_sup| {
debug!("relate_nested_regions(r_sub={}, r_sup={})",
r_sub.repr(tcx),
r_sup.repr(tcx));
@ -754,7 +754,7 @@ fn constrain_regions_in_type(
true, infer::ReferenceOutlivesReferent(ty, origin.span()),
r_sub, r_sup);
}
}
});
return (e == rcx.errors_reported);
}

View file

@ -132,7 +132,7 @@ fn lookup_vtables_for_param(vcx: &VtableContext,
// ty is the value supplied for the type parameter A...
let mut param_result = ~[];
do ty::each_bound_trait_and_supertraits(tcx, type_param_bounds.trait_bounds) |trait_ref| {
ty::each_bound_trait_and_supertraits(tcx, type_param_bounds.trait_bounds, |trait_ref| {
// ...and here trait_ref is each bound that was declared on A,
// expressed in terms of the type parameters.
@ -161,7 +161,7 @@ fn lookup_vtables_for_param(vcx: &VtableContext,
}
}
true
};
});
debug!("lookup_vtables_for_param result(\
location_info={:?}, \
@ -291,7 +291,7 @@ fn lookup_vtable_from_bounds(vcx: &VtableContext,
let mut n_bound = 0;
let mut ret = None;
do ty::each_bound_trait_and_supertraits(tcx, bounds) |bound_trait_ref| {
ty::each_bound_trait_and_supertraits(tcx, bounds, |bound_trait_ref| {
debug!("checking bounds trait {}",
bound_trait_ref.repr(vcx.tcx()));
@ -309,7 +309,7 @@ fn lookup_vtable_from_bounds(vcx: &VtableContext,
n_bound += 1;
true
}
};
});
ret
}
@ -483,12 +483,12 @@ fn fixup_substs(vcx: &VtableContext,
ty::RegionTraitStore(ty::ReStatic),
ast::MutImmutable,
ty::EmptyBuiltinBounds());
do fixup_ty(vcx, location_info, t, is_early).map |t_f| {
fixup_ty(vcx, location_info, t, is_early).map(|t_f| {
match ty::get(t_f).sty {
ty::ty_trait(_, ref substs_f, _, _, _) => (*substs_f).clone(),
_ => fail!("t_f should be a trait")
}
}
})
}
fn fixup_ty(vcx: &VtableContext,
@ -560,7 +560,7 @@ pub fn early_resolve_expr(ex: @ast::Expr,
let cx = fcx.ccx;
match ex.node {
ast::ExprPath(*) => {
do fcx.opt_node_ty_substs(ex.id) |substs| {
fcx.opt_node_ty_substs(ex.id, |substs| {
debug!("vtable resolution on parameter bounds for expr {}",
ex.repr(fcx.tcx()));
let def = cx.tcx.def_map.get_copy(&ex.id);
@ -580,7 +580,7 @@ pub fn early_resolve_expr(ex: @ast::Expr,
}
}
true
};
});
}
ast::ExprParen(e) => {

View file

@ -180,7 +180,7 @@ fn resolve_type_vars_for_node(wbcx: &mut WbCtxt, sp: Span, id: ast::NodeId)
id, ppaux::ty_to_str(tcx, n_ty), ppaux::ty_to_str(tcx, t));
write_ty_to_tcx(tcx, id, t);
let mut ret = Some(t);
do fcx.opt_node_ty_substs(id) |substs| {
fcx.opt_node_ty_substs(id, |substs| {
let mut new_tps = ~[];
for subst in substs.tps.iter() {
match resolve_type_vars_in_type(fcx, sp, *subst) {
@ -190,7 +190,7 @@ fn resolve_type_vars_for_node(wbcx: &mut WbCtxt, sp: Span, id: ast::NodeId)
}
write_substs_to_tcx(tcx, id, new_tps);
ret.is_some()
};
});
ret
}
}

View file

@ -102,7 +102,7 @@ pub fn type_is_defined_in_local_crate(original_type: t) -> bool {
*/
let mut found_nominal = false;
do ty::walk_ty(original_type) |t| {
ty::walk_ty(original_type, |t| {
match get(t).sty {
ty_enum(def_id, _) |
ty_trait(def_id, _, _, _, _) |
@ -114,7 +114,7 @@ pub fn type_is_defined_in_local_crate(original_type: t) -> bool {
_ => { }
}
}
});
return found_nominal;
}
@ -413,22 +413,22 @@ impl CoherenceChecker {
}
pub fn check_implementation_coherence(&self) {
do self.crate_context.tcx.trait_impls.each_key |&trait_id| {
self.crate_context.tcx.trait_impls.each_key(|&trait_id| {
self.check_implementation_coherence_of(trait_id);
true
};
});
}
pub fn check_implementation_coherence_of(&self, trait_def_id: DefId) {
// Unify pairs of polytypes.
do self.iter_impls_of_trait(trait_def_id) |a| {
self.iter_impls_of_trait(trait_def_id, |a| {
let implementation_a = a;
let polytype_a =
self.get_self_type_for_implementation(implementation_a);
// "We have an impl of trait <trait_def_id> for type <polytype_a>,
// and that impl is <implementation_a>"
do self.iter_impls_of_trait(trait_def_id) |b| {
self.iter_impls_of_trait(trait_def_id, |b| {
let implementation_b = b;
// An impl is coherent with itself
@ -447,8 +447,8 @@ impl CoherenceChecker {
"note conflicting implementation here");
}
}
}
}
})
})
}
pub fn iter_impls_of_trait(&self, trait_def_id: DefId, f: |@Impl|) {
@ -665,12 +665,12 @@ impl CoherenceChecker {
let mut impls_seen = HashSet::new();
let crate_store = self.crate_context.tcx.sess.cstore;
do iter_crate_data(crate_store) |crate_number, _crate_metadata| {
do each_impl(crate_store, crate_number) |def_id| {
iter_crate_data(crate_store, |crate_number, _crate_metadata| {
each_impl(crate_store, crate_number, |def_id| {
assert_eq!(crate_number, def_id.crate);
self.add_external_impl(&mut impls_seen, def_id)
}
}
})
})
}
//

View file

@ -264,9 +264,9 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt,
// Represents [A',B',C']
let num_trait_bounds = trait_ty_generics.type_param_defs.len();
let non_shifted_trait_tps = do vec::from_fn(num_trait_bounds) |i| {
let non_shifted_trait_tps = vec::from_fn(num_trait_bounds, |i| {
ty::mk_param(tcx, i, trait_ty_generics.type_param_defs[i].def_id)
};
});
// Represents [D']
let self_param = ty::mk_param(tcx, num_trait_bounds,
@ -274,10 +274,10 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt,
// Represents [E',F',G']
let num_method_bounds = m.generics.type_param_defs.len();
let shifted_method_tps = do vec::from_fn(num_method_bounds) |i| {
let shifted_method_tps = vec::from_fn(num_method_bounds, |i| {
ty::mk_param(tcx, i + num_trait_bounds + 1,
m.generics.type_param_defs[i].def_id)
};
});
// Convert the regions 'a, 'b, 'c defined on the trait into
// bound regions on the fn.

View file

@ -98,45 +98,45 @@ impl Coerce {
// See above for details.
match ty::get(b).sty {
ty::ty_rptr(_, mt_b) => {
return do self.unpack_actual_value(a) |sty_a| {
return self.unpack_actual_value(a, |sty_a| {
self.coerce_borrowed_pointer(a, sty_a, b, mt_b)
};
});
}
ty::ty_estr(vstore_slice(_)) => {
return do self.unpack_actual_value(a) |sty_a| {
return self.unpack_actual_value(a, |sty_a| {
self.coerce_borrowed_string(a, sty_a, b)
};
});
}
ty::ty_evec(mt_b, vstore_slice(_)) => {
return do self.unpack_actual_value(a) |sty_a| {
return self.unpack_actual_value(a, |sty_a| {
self.coerce_borrowed_vector(a, sty_a, b, mt_b)
};
});
}
ty::ty_closure(ty::ClosureTy {sigil: ast::BorrowedSigil, _}) => {
return do self.unpack_actual_value(a) |sty_a| {
return self.unpack_actual_value(a, |sty_a| {
self.coerce_borrowed_fn(a, sty_a, b)
};
});
}
ty::ty_trait(_, _, ty::RegionTraitStore(*), m, _) => {
return do self.unpack_actual_value(a) |sty_a| {
return self.unpack_actual_value(a, |sty_a| {
self.coerce_borrowed_object(a, sty_a, b, m)
};
});
}
ty::ty_ptr(mt_b) => {
return do self.unpack_actual_value(a) |sty_a| {
return self.unpack_actual_value(a, |sty_a| {
self.coerce_unsafe_ptr(a, sty_a, b, mt_b)
};
});
}
_ => {}
}
do self.unpack_actual_value(a) |sty_a| {
self.unpack_actual_value(a, |sty_a| {
match *sty_a {
ty::ty_bare_fn(ref a_f) => {
// Bare functions are coercable to any closure type.
@ -151,7 +151,7 @@ impl Coerce {
self.subtype(a, b)
}
}
}
})
}
pub fn subtype(&self, a: ty::t, b: ty::t) -> CoerceResult {
@ -342,9 +342,9 @@ impl Coerce {
fn_ty_a: &ty::BareFnTy,
b: ty::t)
-> CoerceResult {
do self.unpack_actual_value(b) |sty_b| {
self.unpack_actual_value(b, |sty_b| {
self.coerce_from_bare_fn_post_unpack(a, fn_ty_a, b, sty_b)
}
})
}
pub fn coerce_from_bare_fn_post_unpack(&self,

View file

@ -232,9 +232,7 @@ pub trait Combine {
}
fn args(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
do self.contratys(a, b).and_then |t| {
Ok(t)
}
self.contratys(a, b).and_then(|t| Ok(t))
}
fn sigils(&self, p1: ast::Sigil, p2: ast::Sigil) -> cres<ast::Sigil> {
@ -270,9 +268,9 @@ pub trait Combine {
match (a, b) {
(ty::vstore_slice(a_r), ty::vstore_slice(b_r)) => {
do self.contraregions(a_r, b_r).and_then |r| {
self.contraregions(a_r, b_r).and_then(|r| {
Ok(ty::vstore_slice(r))
}
})
}
_ if a == b => {
@ -294,9 +292,9 @@ pub trait Combine {
match (a, b) {
(ty::RegionTraitStore(a_r), ty::RegionTraitStore(b_r)) => {
do self.contraregions(a_r, b_r).and_then |r| {
self.contraregions(a_r, b_r).and_then(|r| {
Ok(ty::RegionTraitStore(r))
}
})
}
_ if a == b => {
@ -346,11 +344,9 @@ pub fn expected_found<C:Combine,T>(
pub fn eq_tys<C:Combine>(this: &C, a: ty::t, b: ty::t) -> ures {
let suber = this.sub();
do this.infcx().try {
do suber.tys(a, b).and_then |_ok| {
suber.contratys(a, b)
}.to_ures()
}
this.infcx().try(|| {
suber.tys(a, b).and_then(|_ok| suber.contratys(a, b)).to_ures()
})
}
pub fn eq_regions<C:Combine>(this: &C, a: ty::Region, b: ty::Region)
@ -359,11 +355,9 @@ pub fn eq_regions<C:Combine>(this: &C, a: ty::Region, b: ty::Region)
a.repr(this.infcx().tcx),
b.repr(this.infcx().tcx));
let sub = this.sub();
do indent {
indent(|| {
this.infcx().try(|| {
do sub.regions(a, b).and_then |_r| {
sub.contraregions(a, b)
}
sub.regions(a, b).and_then(|_r| sub.contraregions(a, b))
}).or_else(|e| {
// substitute a better error, but use the regions
// found in the original error
@ -373,7 +367,7 @@ pub fn eq_regions<C:Combine>(this: &C, a: ty::Region, b: ty::Region)
_ => Err(e)
}
}).to_ures()
}
})
}
pub fn eq_opt_regions<C:Combine>(
@ -382,14 +376,8 @@ pub fn eq_opt_regions<C:Combine>(
b: Option<ty::Region>) -> cres<Option<ty::Region>> {
match (a, b) {
(None, None) => {
Ok(None)
}
(Some(a), Some(b)) => {
do eq_regions(this, a, b).then {
Ok(Some(a))
}
}
(None, None) => Ok(None),
(Some(a), Some(b)) => eq_regions(this, a, b).then(|| Ok(Some(a))),
(_, _) => {
// If these two substitutions are for the same type (and
// they should be), then the type should either
@ -528,21 +516,15 @@ pub fn super_tys<C:Combine>(this: &C, a: ty::t, b: ty::t) -> cres<ty::t> {
}
(&ty::ty_box(ref a_mt), &ty::ty_box(ref b_mt)) => {
do this.mts(a_mt, b_mt).and_then |mt| {
Ok(ty::mk_box(tcx, mt))
}
this.mts(a_mt, b_mt).and_then(|mt| Ok(ty::mk_box(tcx, mt)))
}
(&ty::ty_uniq(ref a_mt), &ty::ty_uniq(ref b_mt)) => {
do this.mts(a_mt, b_mt).and_then |mt| {
Ok(ty::mk_uniq(tcx, mt))
}
this.mts(a_mt, b_mt).and_then(|mt| Ok(ty::mk_uniq(tcx, mt)))
}
(&ty::ty_ptr(ref a_mt), &ty::ty_ptr(ref b_mt)) => {
do this.mts(a_mt, b_mt).and_then |mt| {
Ok(ty::mk_ptr(tcx, mt))
}
this.mts(a_mt, b_mt).and_then(|mt| Ok(ty::mk_ptr(tcx, mt)))
}
(&ty::ty_rptr(a_r, ref a_mt), &ty::ty_rptr(b_r, ref b_mt)) => {
@ -552,11 +534,11 @@ pub fn super_tys<C:Combine>(this: &C, a: ty::t, b: ty::t) -> cres<ty::t> {
}
(&ty::ty_evec(ref a_mt, vs_a), &ty::ty_evec(ref b_mt, vs_b)) => {
do this.mts(a_mt, b_mt).and_then |mt| {
do this.vstores(ty::terr_vec, vs_a, vs_b).and_then |vs| {
this.mts(a_mt, b_mt).and_then(|mt| {
this.vstores(ty::terr_vec, vs_a, vs_b).and_then(|vs| {
Ok(ty::mk_evec(tcx, mt, vs))
}
}
})
})
}
(&ty::ty_estr(vs_a), &ty::ty_estr(vs_b)) => {
@ -576,15 +558,15 @@ pub fn super_tys<C:Combine>(this: &C, a: ty::t, b: ty::t) -> cres<ty::t> {
}
(&ty::ty_bare_fn(ref a_fty), &ty::ty_bare_fn(ref b_fty)) => {
do this.bare_fn_tys(a_fty, b_fty).and_then |fty| {
this.bare_fn_tys(a_fty, b_fty).and_then(|fty| {
Ok(ty::mk_bare_fn(tcx, fty))
}
})
}
(&ty::ty_closure(ref a_fty), &ty::ty_closure(ref b_fty)) => {
do this.closure_tys(a_fty, b_fty).and_then |fty| {
this.closure_tys(a_fty, b_fty).and_then(|fty| {
Ok(ty::mk_closure(tcx, fty))
}
})
}
_ => Err(ty::terr_sorts(expected_found(this, a, b)))

View file

@ -230,9 +230,7 @@ impl CombineFieldsLatticeMethods for CombineFields {
(&Some(_), &None) => Ok((*a).clone()),
(&None, &Some(_)) => Ok((*b).clone()),
(&Some(ref v_a), &Some(ref v_b)) => {
do lattice_op(self, v_a, v_b).and_then |v| {
Ok(Some(v))
}
lattice_op(self, v_a, v_b).and_then(|v| Ok(Some(v)))
}
}
}
@ -471,9 +469,9 @@ pub fn lattice_vars<L:LatticeDir + Combine,
// Otherwise, we need to merge A and B into one variable. We can
// then use either variable as an upper bound:
let cf = this.combine_fields();
do cf.var_sub_var(a_vid.clone(), b_vid.clone()).then {
cf.var_sub_var(a_vid.clone(), b_vid.clone()).then(|| {
Ok(VarResult(a_vid.clone()))
}
})
}
pub fn lattice_var_and_t<L:LatticeDir + Combine,
@ -508,11 +506,11 @@ pub fn lattice_var_and_t<L:LatticeDir + Combine,
// and then return b.
debug!("bnd=None");
let a_bounds = this.with_bnd(a_bounds, (*b).clone());
do this.combine_fields().bnds(&a_bounds.lb, &a_bounds.ub).then {
this.combine_fields().bnds(&a_bounds.lb, &a_bounds.ub).then(|| {
this.infcx().set(a_id.clone(),
Root(a_bounds.clone(), nde_a.rank));
Ok((*b).clone())
}
})
}
}
}

View file

@ -290,10 +290,7 @@ pub fn common_supertype(cx: @mut InferCtxt,
values: Types(expected_found(a_is_expected, a, b))
};
let result = do cx.commit {
cx.lub(a_is_expected, trace).tys(a, b)
};
let result = cx.commit(|| cx.lub(a_is_expected, trace).tys(a, b));
match result {
Ok(t) => t,
Err(ref err) => {
@ -310,28 +307,28 @@ pub fn mk_subty(cx: @mut InferCtxt,
b: ty::t)
-> ures {
debug!("mk_subty({} <: {})", a.inf_str(cx), b.inf_str(cx));
do indent {
do cx.commit {
indent(|| {
cx.commit(|| {
let trace = TypeTrace {
origin: origin,
values: Types(expected_found(a_is_expected, a, b))
};
cx.sub(a_is_expected, trace).tys(a, b)
}
}.to_ures()
})
}).to_ures()
}
pub fn can_mk_subty(cx: @mut InferCtxt, a: ty::t, b: ty::t) -> ures {
debug!("can_mk_subty({} <: {})", a.inf_str(cx), b.inf_str(cx));
do indent {
do cx.probe {
indent(|| {
cx.probe(|| {
let trace = TypeTrace {
origin: Misc(codemap::dummy_sp()),
values: Types(expected_found(true, a, b))
};
cx.sub(true, trace).tys(a, b)
}
}.to_ures()
})
}).to_ures()
}
pub fn mk_subr(cx: @mut InferCtxt,
@ -352,16 +349,16 @@ pub fn mk_eqty(cx: @mut InferCtxt,
b: ty::t)
-> ures {
debug!("mk_eqty({} <: {})", a.inf_str(cx), b.inf_str(cx));
do indent {
do cx.commit {
indent(|| {
cx.commit(|| {
let trace = TypeTrace {
origin: origin,
values: Types(expected_found(a_is_expected, a, b))
};
let suber = cx.sub(a_is_expected, trace);
eq_tys(&suber, a, b)
}
}.to_ures()
})
}).to_ures()
}
pub fn mk_sub_trait_refs(cx: @mut InferCtxt,
@ -373,16 +370,16 @@ pub fn mk_sub_trait_refs(cx: @mut InferCtxt,
{
debug!("mk_sub_trait_refs({} <: {})",
a.inf_str(cx), b.inf_str(cx));
do indent {
do cx.commit {
indent(|| {
cx.commit(|| {
let trace = TypeTrace {
origin: origin,
values: TraitRefs(expected_found(a_is_expected, a, b))
};
let suber = cx.sub(a_is_expected, trace);
suber.trait_refs(a, b)
}
}.to_ures()
})
}).to_ures()
}
fn expected_found<T>(a_is_expected: bool,
@ -402,28 +399,28 @@ pub fn mk_coercety(cx: @mut InferCtxt,
b: ty::t)
-> CoerceResult {
debug!("mk_coercety({} -> {})", a.inf_str(cx), b.inf_str(cx));
do indent {
do cx.commit {
indent(|| {
cx.commit(|| {
let trace = TypeTrace {
origin: origin,
values: Types(expected_found(a_is_expected, a, b))
};
Coerce(cx.combine_fields(a_is_expected, trace)).tys(a, b)
}
}
})
})
}
pub fn can_mk_coercety(cx: @mut InferCtxt, a: ty::t, b: ty::t) -> ures {
debug!("can_mk_coercety({} -> {})", a.inf_str(cx), b.inf_str(cx));
do indent {
do cx.probe {
indent(|| {
cx.probe(|| {
let trace = TypeTrace {
origin: Misc(codemap::dummy_sp()),
values: Types(expected_found(true, a, b))
};
Coerce(cx.combine_fields(true, trace)).tys(a, b)
}
}.to_ures()
})
}).to_ures()
}
// See comment on the type `resolve_state` below
@ -472,13 +469,13 @@ trait CresCompare<T> {
impl<T:Clone + Eq> CresCompare<T> for cres<T> {
fn compare(&self, t: T, f: || -> ty::type_err) -> cres<T> {
do (*self).clone().and_then |s| {
(*self).clone().and_then(|s| {
if s == t {
(*self).clone()
} else {
Err(f())
}
}
})
}
}
@ -553,14 +550,14 @@ impl InferCtxt {
assert!(!self.in_snapshot());
debug!("commit()");
do indent {
indent(|| {
let r = self.try(|| f());
self.ty_var_bindings.bindings.truncate(0);
self.int_var_bindings.bindings.truncate(0);
self.region_vars.commit();
r
}
})
}
/// Execute `f`, unroll bindings on failure
@ -581,12 +578,12 @@ impl InferCtxt {
/// Execute `f` then unroll any bindings it creates
pub fn probe<T,E>(@mut self, f: || -> Result<T,E>) -> Result<T,E> {
debug!("probe()");
do indent {
indent(|| {
let snapshot = self.start_snapshot();
let r = f();
self.rollback_to(&snapshot);
r
}
})
}
}
@ -737,12 +734,12 @@ impl InferCtxt {
err: Option<&ty::type_err>) {
debug!("hi! expected_ty = {:?}, actual_ty = {}", expected_ty, actual_ty);
let error_str = do err.map_default(~"") |t_err| {
let error_str = err.map_default(~"", |t_err| {
format!(" ({})", ty::type_err_to_str(self.tcx, t_err))
};
let resolved_expected = do expected_ty.map |e_ty| {
});
let resolved_expected = expected_ty.map(|e_ty| {
self.resolve_type_vars_if_possible(e_ty)
};
});
if !resolved_expected.map_default(false, |e| { ty::type_is_error(e) }) {
match resolved_expected {
None => self.tcx.sess.span_err(sp,

View file

@ -764,7 +764,7 @@ impl RegionVarBindings {
}
fn expansion(&self, var_data: &mut [VarData]) {
do self.iterate_until_fixed_point("Expansion") |constraint| {
self.iterate_until_fixed_point("Expansion", |constraint| {
match *constraint {
ConstrainRegSubVar(a_region, b_vid) => {
let b_data = &mut var_data[b_vid.to_uint()];
@ -788,7 +788,7 @@ impl RegionVarBindings {
false
}
}
}
})
}
fn expand_node(&self,
@ -829,7 +829,7 @@ impl RegionVarBindings {
fn contraction(&self,
var_data: &mut [VarData]) {
do self.iterate_until_fixed_point("Contraction") |constraint| {
self.iterate_until_fixed_point("Contraction", |constraint| {
match *constraint {
ConstrainRegSubVar(*) => {
// This is an expansion constraint. Ignore.
@ -853,7 +853,7 @@ impl RegionVarBindings {
false
}
}
}
})
}
fn contract_node(&self,
@ -1225,7 +1225,7 @@ impl RegionVarBindings {
debug!("process_edges(source_vid={:?}, dir={:?})", source_vid, dir);
let source_node_index = NodeIndex(source_vid.to_uint());
do graph.each_adjacent_edge(source_node_index, dir) |_, edge| {
graph.each_adjacent_edge(source_node_index, dir, |_, edge| {
match edge.data {
ConstrainVarSubVar(from_vid, to_vid) => {
let opp_vid =
@ -1246,7 +1246,7 @@ impl RegionVarBindings {
ConstrainRegSubReg(*) => {}
}
true
};
});
}
}

View file

@ -167,13 +167,13 @@ impl Combine for Sub {
// Second, we instantiate each bound region in the supertype with a
// fresh concrete region.
let (skol_map, _, b_sig) = {
do replace_bound_regions_in_fn_sig(self.infcx.tcx, None, b) |br| {
replace_bound_regions_in_fn_sig(self.infcx.tcx, None, b, |br| {
let skol = self.infcx.region_vars.new_skolemized(br);
debug!("Bound region {} skolemized to {:?}",
bound_region_to_str(self.infcx.tcx, "", false, br),
skol);
skol
}
})
};
debug!("a_sig={}", a_sig.inf_str(self.infcx));

View file

@ -330,10 +330,10 @@ impl get_and_find_region for isr_alist {
fn find(&self, br: ty::BoundRegion) -> Option<ty::Region> {
let mut ret = None;
do list::each(*self) |isr| {
list::each(*self, |isr| {
let (isr_br, isr_r) = *isr;
if isr_br == br { ret = Some(isr_r); false } else { true }
};
});
ret
}
}

View file

@ -79,9 +79,9 @@ impl Clean<Crate> for visit_ast::RustdocVisitor {
let cx = local_data::get(super::ctxtkey, |x| *x.unwrap());
let mut externs = HashMap::new();
do cstore::iter_crate_data(cx.sess.cstore) |n, meta| {
cstore::iter_crate_data(cx.sess.cstore, |n, meta| {
externs.insert(n, meta.clean());
}
});
Crate {
name: match maybe_meta {

View file

@ -170,11 +170,11 @@ fn path(w: &mut io::Writer, path: &clean::Path, print_all: bool,
}
// Did someone say rightward-drift?
do local_data::get(current_location_key) |loc| {
local_data::get(current_location_key, |loc| {
let loc = loc.unwrap();
do local_data::get(cache_key) |cache| {
do cache.unwrap().read |cache| {
local_data::get(cache_key, |cache| {
cache.unwrap().read(|cache| {
let abs_root = root(cache, loc.as_slice());
let rel_root = match path.segments[0].name.as_slice() {
"self" => Some(~"./"),
@ -238,9 +238,9 @@ fn path(w: &mut io::Writer, path: &clean::Path, print_all: bool,
}
}
write!(w, "{}", generics);
}
}
}
})
})
})
}
/// Helper to render type parameters
@ -262,11 +262,11 @@ impl fmt::Default for clean::Type {
fn fmt(g: &clean::Type, f: &mut fmt::Formatter) {
match *g {
clean::TyParamBinder(id) | clean::Generic(id) => {
do local_data::get(cache_key) |cache| {
do cache.unwrap().read |m| {
local_data::get(cache_key, |cache| {
cache.unwrap().read(|m| {
f.buf.write(m.typarams.get(&id).as_bytes());
}
}
})
})
}
clean::ResolvedPath{id, typarams: ref tp, path: ref path} => {
resolved_path(f.buf, id, path, false);

View file

@ -109,14 +109,14 @@ fn render(w: &mut io::Writer, s: &str) {
let markdown = sd_markdown_new(extensions, 16, &callbacks,
&options as *html_renderopt as *libc::c_void);
do s.as_imm_buf |data, len| {
s.as_imm_buf(|data, len| {
sd_markdown_render(ob, data, len as libc::size_t, markdown);
}
});
sd_markdown_free(markdown);
do vec::raw::buf_as_slice((*ob).data, (*ob).size as uint) |buf| {
vec::raw::buf_as_slice((*ob).data, (*ob).size as uint, |buf| {
w.write(buf);
}
});
bufrelease(ob);
}

View file

@ -320,15 +320,15 @@ fn write(dst: Path, contents: &str) {
/// Makes a directory on the filesystem, failing the task if an error occurs and
/// skipping if the directory already exists.
fn mkdir(path: &Path) {
do io::io_error::cond.trap(|err| {
io::io_error::cond.trap(|err| {
error!("Couldn't create directory `{}`: {}",
path.display(), err.desc);
fail!()
}).inside {
}).inside(|| {
if !path.is_dir() {
fs::mkdir(path, io::UserRWX);
}
}
})
}
/// Takes a path to a source file and cleans the path to it. This canonicalizes
@ -439,11 +439,11 @@ impl<'self> SourceCollector<'self> {
// Create the intermediate directories
let mut cur = self.dst.clone();
let mut root_path = ~"../../";
do clean_srcpath(p.dirname()) |component| {
clean_srcpath(p.dirname(), |component| {
cur.push(component);
mkdir(&cur);
root_path.push_str("../");
}
});
cur.push(p.filename().expect("source has no filename") + bytes!(".html"));
let mut w = BufferedWriter::new(File::create(&cur).unwrap());
@ -492,9 +492,9 @@ impl DocFolder for Cache {
clean::ImplItem(ref i) => {
match i.trait_ {
Some(clean::ResolvedPath{ id, _ }) => {
let v = do self.implementors.find_or_insert_with(id) |_|{
let v = self.implementors.find_or_insert_with(id, |_|{
~[]
};
});
match i.for_ {
clean::ResolvedPath{_} => {
v.unshift(PathType(i.for_.clone()));
@ -595,9 +595,9 @@ impl DocFolder for Cache {
clean::Item{ attrs, inner: clean::ImplItem(i), _ } => {
match i.for_ {
clean::ResolvedPath { id, _ } => {
let v = do self.impls.find_or_insert_with(id) |_| {
let v = self.impls.find_or_insert_with(id, |_| {
~[]
};
});
// extract relevant documentation for this impl
match attrs.move_iter().find(|a| {
match *a {
@ -726,16 +726,16 @@ impl Context {
Process(cx, item) => {
let mut cx = cx;
let item = Cell::new(item);
do (|| {
do cx.item(item.take()) |cx, item| {
(|| {
cx.item(item.take(), |cx, item| {
prog_chan.send(JobNew);
chan.send(Process(cx.clone(), item));
}
}).finally {
})
}).finally(|| {
// If we fail, everything else should still get
// completed
prog_chan.send(JobDone);
}
})
}
Die => break,
}
@ -803,7 +803,7 @@ impl Context {
clean::ModuleItem(*) => {
let name = item.name.get_ref().to_owned();
let item = Cell::new(item);
do self.recurse(name) |this| {
self.recurse(name, |this| {
let item = item.take();
let dst = this.dst.join("index.html");
render(File::create(&dst).unwrap(), this, &item, false);
@ -816,7 +816,7 @@ impl Context {
for item in m.items.move_iter() {
f(this, item);
}
}
})
}
// Things which don't have names (like impls) don't get special
@ -875,9 +875,9 @@ impl<'self> fmt::Default for Item<'self> {
if it.cx.include_sources {
let mut path = ~[];
do clean_srcpath(it.item.source.filename.as_bytes()) |component| {
clean_srcpath(it.item.source.filename.as_bytes(), |component| {
path.push(component.to_owned());
}
});
let href = if it.item.source.loline == it.item.source.hiline {
format!("{}", it.item.source.loline)
} else {
@ -1012,9 +1012,7 @@ fn item_module(w: &mut Writer, cx: &Context,
}
debug!("{:?}", indices);
do sort::quick_sort(indices) |&i1, &i2| {
lt(&items[i1], &items[i2], i1, i2)
}
sort::quick_sort(indices, |&i1, &i2| lt(&items[i1], &items[i2], i1, i2));
debug!("{:?}", indices);
let mut curty = "";
@ -1200,8 +1198,8 @@ fn item_trait(w: &mut Writer, it: &clean::Item, t: &clean::Trait) {
write!(w, "</div>");
}
do local_data::get(cache_key) |cache| {
do cache.unwrap().read |cache| {
local_data::get(cache_key, |cache| {
cache.unwrap().read(|cache| {
match cache.implementors.find(&it.id) {
Some(implementors) => {
write!(w, "
@ -1223,8 +1221,8 @@ fn item_trait(w: &mut Writer, it: &clean::Item, t: &clean::Trait) {
}
None => {}
}
}
}
})
})
}
fn render_method(w: &mut Writer, meth: &clean::Item, withlink: bool) {
@ -1412,9 +1410,9 @@ fn render_struct(w: &mut Writer, it: &clean::Item,
}
fn render_methods(w: &mut Writer, it: &clean::Item) {
do local_data::get(cache_key) |cache| {
local_data::get(cache_key, |cache| {
let cache = cache.unwrap();
do cache.read |c| {
cache.read(|c| {
match c.impls.find(&it.id) {
Some(v) => {
let mut non_trait = v.iter().filter(|p| {
@ -1442,8 +1440,8 @@ fn render_methods(w: &mut Writer, it: &clean::Item) {
}
None => {}
}
}
}
})
})
}
fn render_impl(w: &mut Writer, i: &clean::Impl, dox: &Option<~str>) {
@ -1492,8 +1490,8 @@ fn render_impl(w: &mut Writer, i: &clean::Impl, dox: &Option<~str>) {
None => continue,
Some(id) => id,
};
do local_data::get(cache_key) |cache| {
do cache.unwrap().read |cache| {
local_data::get(cache_key, |cache| {
cache.unwrap().read(|cache| {
match cache.traits.find(&trait_id) {
Some(t) => {
let name = meth.name.clone();
@ -1513,8 +1511,8 @@ fn render_impl(w: &mut Writer, i: &clean::Impl, dox: &Option<~str>) {
}
None => {}
}
}
}
})
})
}
// If we've implemented a trait, then also emit documentation for all
@ -1522,8 +1520,8 @@ fn render_impl(w: &mut Writer, i: &clean::Impl, dox: &Option<~str>) {
match trait_id {
None => {}
Some(id) => {
do local_data::get(cache_key) |cache| {
do cache.unwrap().read |cache| {
local_data::get(cache_key, |cache| {
cache.unwrap().read(|cache| {
match cache.traits.find(&id) {
Some(t) => {
for method in t.methods.iter() {
@ -1538,8 +1536,8 @@ fn render_impl(w: &mut Writer, i: &clean::Impl, dox: &Option<~str>) {
}
None => {}
}
}
}
})
})
}
}
write!(w, "</div>");

View file

@ -57,9 +57,9 @@ pub fn strip_private(crate: clean::Crate) -> plugins::PluginResult {
// This stripper collects all *retained* nodes.
let mut retained = HashSet::new();
let crate = Cell::new(crate);
let exported_items = do local_data::get(super::analysiskey) |analysis| {
let exported_items = local_data::get(super::analysiskey, |analysis| {
analysis.unwrap().exported_items.clone()
};
});
let mut crate = crate.take();
// strip all private items
@ -231,7 +231,7 @@ pub fn unindent(s: &str) -> ~str {
let lines = s.lines_any().collect::<~[&str]>();
let mut saw_first_line = false;
let mut saw_second_line = false;
let min_indent = do lines.iter().fold(uint::max_value) |min_indent, line| {
let min_indent = lines.iter().fold(uint::max_value, |min_indent, line| {
// After we see the first non-whitespace line, look at
// the line we have. If it is not whitespace, and therefore
@ -257,7 +257,7 @@ pub fn unindent(s: &str) -> ~str {
} else {
saw_first_line = true;
let mut spaces = 0;
do line.chars().all |char| {
line.chars().all(|char| {
// Only comparing against space because I wouldn't
// know what to do with mixed whitespace chars
if char == ' ' {
@ -266,22 +266,22 @@ pub fn unindent(s: &str) -> ~str {
} else {
false
}
};
});
num::min(min_indent, spaces)
}
};
});
match lines {
[head, .. tail] => {
let mut unindented = ~[ head.trim() ];
unindented.push_all(do tail.map |&line| {
unindented.push_all(tail.map(|&line| {
if line.is_whitespace() {
line
} else {
assert!(line.len() >= min_indent);
line.slice_from(min_indent)
}
});
}));
unindented.connect("\n")
}
[] => s.to_owned()

View file

@ -19,7 +19,7 @@ use std::io::fs;
pub fn list_installed_packages(f: |&PkgId| -> bool) -> bool {
let workspaces = rust_path();
for p in workspaces.iter() {
let binfiles = do io::ignore_io_error { fs::readdir(&p.join("bin")) };
let binfiles = io::ignore_io_error(|| fs::readdir(&p.join("bin")));
for exec in binfiles.iter() {
// FIXME (#9639): This needs to handle non-utf8 paths
match exec.filestem_str() {
@ -31,7 +31,7 @@ pub fn list_installed_packages(f: |&PkgId| -> bool) -> bool {
}
}
}
let libfiles = do io::ignore_io_error { fs::readdir(&p.join("lib")) };
let libfiles = io::ignore_io_error(|| fs::readdir(&p.join("lib")));
for lib in libfiles.iter() {
debug!("Full name: {}", lib.display());
match has_library(lib) {
@ -42,10 +42,10 @@ pub fn list_installed_packages(f: |&PkgId| -> bool) -> bool {
let rel_p = lib.path_relative_from(&parent).unwrap();
debug!("Rel: {}", rel_p.display());
let rel_path = rel_p.join(basename);
do rel_path.display().with_str |s| {
rel_path.display().with_str(|s| {
debug!("Rel name: {}", s);
f(&PkgId::new(s));
}
});
}
None => ()
}
@ -55,7 +55,7 @@ pub fn list_installed_packages(f: |&PkgId| -> bool) -> bool {
}
pub fn has_library(p: &Path) -> Option<~str> {
let files = do io::ignore_io_error { fs::readdir(p) };
let files = io::ignore_io_error(|| fs::readdir(p));
for path in files.iter() {
if path.extension_str() == Some(os::consts::DLL_EXTENSION) {
let stuff : &str = path.filestem_str().expect("has_library: weird path");
@ -71,13 +71,13 @@ pub fn has_library(p: &Path) -> Option<~str> {
pub fn package_is_installed(p: &PkgId) -> bool {
let mut is_installed = false;
do list_installed_packages() |installed| {
list_installed_packages(|installed| {
if installed == p {
is_installed = true;
false
} else {
true
}
};
});
is_installed
}

View file

@ -255,7 +255,7 @@ impl CtxMethods for BuildContext {
// argument
let pkgid = PkgId::new(args[0].clone());
let mut dest_ws = default_workspace();
do each_pkg_parent_workspace(&self.context, &pkgid) |workspace| {
each_pkg_parent_workspace(&self.context, &pkgid, |workspace| {
debug!("found pkg {} in workspace {}, trying to build",
pkgid.to_str(), workspace.display());
dest_ws = determine_destination(os::getcwd(),
@ -265,7 +265,7 @@ impl CtxMethods for BuildContext {
false, pkgid.clone());
self.build(&mut pkg_src, what);
true
};
});
// n.b. If this builds multiple packages, it only returns the workspace for
// the last one. The whole building-multiple-packages-with-the-same-ID is weird
// anyway and there are no tests for it, so maybe take it out
@ -353,12 +353,10 @@ impl CtxMethods for BuildContext {
}
"list" => {
println("Installed packages:");
do installed_packages::list_installed_packages |pkg_id| {
do pkg_id.path.display().with_str |s| {
println(s);
}
installed_packages::list_installed_packages(|pkg_id| {
pkg_id.path.display().with_str(|s| println(s));
true
};
});
}
"prefer" => {
if args.len() < 1 {
@ -402,12 +400,12 @@ impl CtxMethods for BuildContext {
else {
let rp = rust_path();
assert!(!rp.is_empty());
do each_pkg_parent_workspace(&self.context, &pkgid) |workspace| {
each_pkg_parent_workspace(&self.context, &pkgid, |workspace| {
path_util::uninstall_package_from(workspace, &pkgid);
note(format!("Uninstalled package {} (was installed in {})",
pkgid.to_str(), workspace.display()));
true
};
});
}
}
"unprefer" => {
@ -471,19 +469,19 @@ impl CtxMethods for BuildContext {
// Build the package script if needed
let script_build = format!("build_package_script({})",
package_script_path.display());
let pkg_exe = do self.workcache_context.with_prep(script_build) |prep| {
let pkg_exe = self.workcache_context.with_prep(script_build, |prep| {
let subsysroot = sysroot.clone();
let psp = package_script_path.clone();
let ws = workspace.clone();
let pid = pkgid.clone();
do prep.exec |exec| {
prep.exec(|exec| {
let mut pscript = PkgScript::parse(subsysroot.clone(),
psp.clone(),
&ws,
&pid);
pscript.build_custom(exec)
}
};
})
});
// We always *run* the package script
let (cfgs, hook_result) = PkgScript::run_custom(&Path::new(pkg_exe), &sysroot);
debug!("Command return code = {:?}", hook_result);
@ -620,7 +618,7 @@ impl CtxMethods for BuildContext {
target_exec.display(), target_lib,
maybe_executable, maybe_library);
do self.workcache_context.with_prep(id.install_tag()) |prep| {
self.workcache_context.with_prep(id.install_tag(), |prep| {
for ee in maybe_executable.iter() {
// FIXME (#9639): This needs to handle non-utf8 paths
prep.declare_input("binary",
@ -638,7 +636,7 @@ impl CtxMethods for BuildContext {
let sub_target_ex = target_exec.clone();
let sub_target_lib = target_lib.clone();
let sub_build_inputs = build_inputs.to_owned();
do prep.exec |exe_thing| {
prep.exec(|exe_thing| {
let mut outputs = ~[];
// Declare all the *inputs* to the declared input too, as inputs
for executable in subex.iter() {
@ -684,8 +682,8 @@ impl CtxMethods for BuildContext {
outputs.push(target_lib.as_str().unwrap().to_owned());
}
outputs
}
}
})
})
}
fn prefer(&self, _id: &str, _vers: Option<~str>) {

View file

@ -87,10 +87,10 @@ impl PkgId {
pub fn hash(&self) -> ~str {
// FIXME (#9639): hash should take a &[u8] so we can hash the real path
do self.path.display().with_str |s| {
self.path.display().with_str(|s| {
let vers = self.version.to_str();
format!("{}-{}-{}", s, hash(s + vers), vers)
}
})
}
pub fn short_name_with_version(&self) -> ~str {

View file

@ -291,11 +291,9 @@ impl PkgSrc {
let mut failed = false;
do cond.trap(|_| {
cond.trap(|_| {
failed = true;
}).inside {
git_clone_url(url, &clone_target, &pkgid.version);
};
}).inside(|| git_clone_url(url, &clone_target, &pkgid.version));
if failed {
return None;
@ -400,7 +398,7 @@ impl PkgSrc {
debug!("build_crates: compiling {}", path.display());
let cfgs = crate.cfgs + cfgs;
do ctx.workcache_context.with_prep(crate_tag(&path)) |prep| {
ctx.workcache_context.with_prep(crate_tag(&path), |prep| {
debug!("Building crate {}, declaring it as an input", path.display());
// FIXME (#9639): This needs to handle non-utf8 paths
prep.declare_input("file", path.as_str().unwrap(),
@ -414,7 +412,7 @@ impl PkgSrc {
let sub_deps = deps.clone();
let inputs = inputs_to_discover.map(|&(ref k, ref p)|
(k.clone(), p.as_str().unwrap().to_owned()));
do prep.exec |exec| {
prep.exec(|exec| {
for &(ref kind, ref p) in inputs.iter() {
let pth = Path::new(p.clone());
exec.discover_input(*kind, *p, if *kind == ~"file" {
@ -445,8 +443,8 @@ impl PkgSrc {
let result = result.as_ref().map(|p|p.as_str().unwrap());
debug!("Result of compiling {} was {}", subpath.display(), result.to_str());
result.to_str()
}
};
})
});
}
}

View file

@ -76,7 +76,7 @@ pub fn workspace_contains_package_id_(pkgid: &PkgId, workspace: &Path,
if p.is_dir() {
if p == src_dir.join(&pkgid.path) || {
let pf = p.filename_str();
do pf.iter().any |&g| {
pf.iter().any(|&g| {
match split_version_general(g, '-') {
None => false,
Some((ref might_match, ref vers)) => {
@ -84,7 +84,7 @@ pub fn workspace_contains_package_id_(pkgid: &PkgId, workspace: &Path,
&& (pkgid.version == *vers || pkgid.version == NoVersion)
}
}
}
})
} {
found = Some(p.clone());
}
@ -215,7 +215,7 @@ pub fn system_library(sysroot: &Path, lib_name: &str) -> Option<Path> {
fn library_in(short_name: &str, version: &Version, dir_to_search: &Path) -> Option<Path> {
debug!("Listing directory {}", dir_to_search.display());
let dir_contents = do io::ignore_io_error { fs::readdir(dir_to_search) };
let dir_contents = io::ignore_io_error(|| fs::readdir(dir_to_search));
debug!("dir has {:?} entries", dir_contents.len());
let lib_prefix = format!("{}{}", os::consts::DLL_PREFIX, short_name);
@ -225,14 +225,14 @@ fn library_in(short_name: &str, version: &Version, dir_to_search: &Path) -> Opti
// Find a filename that matches the pattern:
// (lib_prefix)-hash-(version)(lib_suffix)
let mut libraries = do dir_contents.iter().filter |p| {
let mut libraries = dir_contents.iter().filter(|p| {
let extension = p.extension_str();
debug!("p = {}, p's extension is {:?}", p.display(), extension);
match extension {
None => false,
Some(ref s) => lib_filetype == *s
}
};
});
let mut result_filename = None;
for p_path in libraries {
@ -277,11 +277,11 @@ fn library_in(short_name: &str, version: &Version, dir_to_search: &Path) -> Opti
// Return the filename that matches, which we now know exists
// (if result_filename != None)
let abs_path = do result_filename.map |result_filename| {
let abs_path = result_filename.map(|result_filename| {
let absolute_path = dir_to_search.join(&result_filename);
debug!("result_filename = {}", absolute_path.display());
absolute_path
};
});
abs_path
}
@ -463,19 +463,14 @@ pub fn versionize(p: &Path, v: &Version) -> Path {
#[cfg(target_os = "win32")]
pub fn chmod_read_only(p: &Path) -> bool {
unsafe {
do p.with_c_str |src_buf| {
libc::chmod(src_buf, S_IRUSR as libc::c_int) == 0 as libc::c_int
}
p.with_c_str(|src_buf| libc::chmod(src_buf, S_IRUSR as libc::c_int) == 0 as libc::c_int)
}
}
#[cfg(not(target_os = "win32"))]
pub fn chmod_read_only(p: &Path) -> bool {
unsafe {
do p.with_c_str |src_buf| {
libc::chmod(src_buf, S_IRUSR as libc::mode_t) == 0
as libc::c_int
}
p.with_c_str(|src_buf| libc::chmod(src_buf, S_IRUSR as libc::mode_t) == 0 as libc::c_int)
}
}

View file

@ -51,11 +51,11 @@ fn read_u32v_be(dst: &mut[u32], input: &[u8]) {
unsafe {
let mut x: *mut i32 = transmute(dst.unsafe_mut_ref(0));
let mut y: *i32 = transmute(input.unsafe_ref(0));
do dst.len().times() {
dst.len().times(|| {
*x = to_be32(*y);
x = x.offset(1);
y = y.offset(1);
}
})
}
}
@ -613,9 +613,7 @@ mod bench {
pub fn sha1_10(bh: & mut BenchHarness) {
let mut sh = Sha1::new();
let bytes = [1u8, ..10];
do bh.iter {
sh.input(bytes);
}
bh.iter(|| sh.input(bytes));
bh.bytes = bytes.len() as u64;
}
@ -623,9 +621,7 @@ mod bench {
pub fn sha1_1k(bh: & mut BenchHarness) {
let mut sh = Sha1::new();
let bytes = [1u8, ..1024];
do bh.iter {
sh.input(bytes);
}
bh.iter(|| sh.input(bytes));
bh.bytes = bytes.len() as u64;
}
@ -633,9 +629,7 @@ mod bench {
pub fn sha1_64k(bh: & mut BenchHarness) {
let mut sh = Sha1::new();
let bytes = [1u8, ..65536];
do bh.iter {
sh.input(bytes);
}
bh.iter(|| sh.input(bytes));
bh.bytes = bytes.len() as u64;
}
}

View file

@ -384,9 +384,7 @@ fn executable_exists(repo: &Path, short_name: &str) -> bool {
fn test_executable_exists(repo: &Path, short_name: &str) -> bool {
debug!("test_executable_exists: repo = {}, short_name = {}", repo.display(), short_name);
let exec = built_test_in_workspace(&PkgId::new(short_name), repo);
do exec.map_default(false) |exec| {
exec.exists() && is_rwx(&exec)
}
exec.map_default(false, |exec| exec.exists() && is_rwx(&exec));
}
fn remove_executable_file(p: &PkgId, workspace: &Path) {
@ -544,12 +542,12 @@ fn frob_source_file(workspace: &Path, pkgid: &PkgId, filename: &str) {
debug!("Frobbed? {:?}", maybe_p);
match maybe_p {
Some(ref p) => {
do io::io_error::cond.trap(|e| {
io::io_error::cond.trap(|e| {
cond.raise((p.clone(), format!("Bad path: {}", e.desc)));
}).inside {
}).inside(|| {
let mut w = File::open_mode(p, io::Append, io::Write);
w.write(bytes!("/* hi */\n"));
}
})
}
None => fail!("frob_source_file failed to find a source file in {}",
pkg_src_dir.display())
@ -744,26 +742,26 @@ fn test_package_ids_must_be_relative_path_like() {
assert!("github.com/catamorphism/test-pkg-0.1" ==
PkgId::new("github.com/catamorphism/test-pkg").to_str());
do cond.trap(|(p, e)| {
cond.trap(|(p, e)| {
assert!(p.filename().is_none())
assert!("0-length pkgid" == e);
whatever.clone()
}).inside {
}).inside(|| {
let x = PkgId::new("");
assert_eq!(~"foo-0.1", x.to_str());
}
});
do cond.trap(|(p, e)| {
cond.trap(|(p, e)| {
let abs = os::make_absolute(&Path::new("foo/bar/quux"));
assert_eq!(p, abs);
assert!("absolute pkgid" == e);
whatever.clone()
}).inside {
}).inside(|| {
let zp = os::make_absolute(&Path::new("foo/bar/quux"));
// FIXME (#9639): This needs to handle non-utf8 paths
let z = PkgId::new(zp.as_str().unwrap());
assert_eq!(~"foo-0.1", z.to_str());
}
})
}

View file

@ -44,13 +44,13 @@ pub fn main() {
let my_workspace = api::my_workspace(&context.context, "cdep");
let foo_c_name = my_workspace.join_many(["src", "cdep-0.1", "foo.c"]);
let out_lib_path = do context.workcache_context.with_prep("foo.c") |prep| {
let out_lib_path = context.workcache_context.with_prep("foo.c", |prep| {
let sub_cx = context.context.clone();
debug!("foo_c_name = {}", foo_c_name.display());
prep.declare_input("file",
foo_c_name.as_str().unwrap().to_owned(),
digest_file_with_date(&foo_c_name));
let out_path = do prep.exec |exec| {
let out_path = prep.exec(|exec| {
let out_path = api::build_library_in_workspace(exec,
&mut sub_cx.clone(),
"cdep",
@ -60,9 +60,9 @@ pub fn main() {
"foo");
let out_p = Path::new(out_path);
out_p.as_str().unwrap().to_owned()
};
});
out_path
};
});
let out_lib_path = Path::new(out_lib_path);
debug!("out_lib_path = {}", out_lib_path.display());
context.add_library_path(out_lib_path.dir_path());

View file

@ -81,21 +81,19 @@ fn fold_mod(_ctx: @mut ReadyCtx, m: &ast::_mod, fold: &CrateSetup)
-> ast::_mod {
fn strip_main(item: @ast::item) -> @ast::item {
@ast::item {
attrs: do item.attrs.iter().filter_map |attr| {
attrs: item.attrs.iter().filter_map(|attr| {
if "main" != attr.name() {
Some(*attr)
} else {
None
}
}.collect(),
}).collect(),
.. (*item).clone()
}
}
fold::noop_fold_mod(&ast::_mod {
items: do m.items.map |item| {
strip_main(*item)
},
items: m.items.map(|item| strip_main(*item)),
.. (*m).clone()
}, fold)
}
@ -493,21 +491,21 @@ impl<'self> Visitor<()> for ViewItemVisitor<'self> {
// and the `PkgSrc` constructor will detect that;
// or else it's already in a workspace and we'll build into that
// workspace
let pkg_src = do cond.trap(|_| {
let pkg_src = cond.trap(|_| {
// Nonexistent package? Then print a better error
error(format!("Package {} depends on {}, but I don't know \
how to find it",
self.parent.path.display(),
pkg_id.path.display()));
fail!()
}).inside {
}).inside(|| {
PkgSrc::new(source_workspace.clone(),
dest_workspace.clone(),
// Use the rust_path_hack to search for dependencies iff
// we were already using it
self.context.context.use_rust_path_hack,
pkg_id.clone())
};
});
let (outputs_disc, inputs_disc) =
self.context.install(
pkg_src,