1
Fork 0

Convert old-style for loops to new-style

Most could use the each method, but because of the hack used to
disambiguate old- and new-style loops, some had to use vec::each.

(This hack will go away soon.)

Issue #1619
This commit is contained in:
Marijn Haverbeke 2012-04-06 20:01:43 +02:00
parent 9c88e5ef5a
commit c902eafa14
99 changed files with 623 additions and 625 deletions

View file

@ -93,7 +93,7 @@ fn load_link(mis: [@ast::meta_item]) -> (option<str>,
let mut name = none; let mut name = none;
let mut vers = none; let mut vers = none;
let mut uuid = none; let mut uuid = none;
for a: @ast::meta_item in mis { for mis.each {|a|
alt a.node { alt a.node {
ast::meta_name_value(v, {node: ast::lit_str(s), span: _}) { ast::meta_name_value(v, {node: ast::lit_str(s), span: _}) {
alt v { alt v {
@ -128,7 +128,7 @@ fn load_pkg(filename: str) -> option<pkg> {
let mut sigs = none; let mut sigs = none;
let mut crate_type = none; let mut crate_type = none;
for a in c.node.attrs { for c.node.attrs.each {|a|
alt a.node.value.node { alt a.node.value.node {
ast::meta_name_value(v, {node: ast::lit_str(s), span: _}) { ast::meta_name_value(v, {node: ast::lit_str(s), span: _}) {
alt v { alt v {
@ -273,7 +273,7 @@ fn load_one_source_package(&src: source, p: map::hashmap<str, json::json>) {
let mut tags = []; let mut tags = [];
alt p.find("tags") { alt p.find("tags") {
some(json::list(js)) { some(json::list(js)) {
for j in js { for js.each {|j|
alt j { alt j {
json::string(_j) { vec::grow(tags, 1u, _j); } json::string(_j) { vec::grow(tags, 1u, _j); }
_ { } _ { }
@ -313,7 +313,7 @@ fn load_source_packages(&c: cargo, &src: source) {
let pkgstr = io::read_whole_file_str(pkgfile); let pkgstr = io::read_whole_file_str(pkgfile);
alt json::from_str(result::get(pkgstr)) { alt json::from_str(result::get(pkgstr)) {
ok(json::list(js)) { ok(json::list(js)) {
for _j: json::json in js { for js.each {|_j|
alt _j { alt _j {
json::dict(_p) { json::dict(_p) {
load_one_source_package(src, _p); load_one_source_package(src, _p);
@ -423,7 +423,7 @@ fn configure(opts: options) -> cargo {
fn for_each_package(c: cargo, b: fn(source, package)) { fn for_each_package(c: cargo, b: fn(source, package)) {
c.sources.values({ |v| c.sources.values({ |v|
for p in copy v.packages { for vec::each(copy v.packages) {|p|
b(v, p); b(v, p);
} }
}) })
@ -432,7 +432,7 @@ fn for_each_package(c: cargo, b: fn(source, package)) {
// Runs all programs in directory <buildpath> // Runs all programs in directory <buildpath>
fn run_programs(buildpath: str) { fn run_programs(buildpath: str) {
let newv = os::list_dir_path(buildpath); let newv = os::list_dir_path(buildpath);
for ct: str in newv { for newv.each {|ct|
run::run_program(ct, []); run::run_program(ct, []);
} }
} }
@ -470,7 +470,7 @@ fn install_one_crate(c: cargo, path: str, cf: str) {
}; };
let newv = os::list_dir_path(buildpath); let newv = os::list_dir_path(buildpath);
let exec_suffix = os::exe_suffix(); let exec_suffix = os::exe_suffix();
for ct: str in newv { for newv.each {|ct|
if (exec_suffix != "" && str::ends_with(ct, exec_suffix)) || if (exec_suffix != "" && str::ends_with(ct, exec_suffix)) ||
(exec_suffix == "" && !str::starts_with(path::basename(ct), (exec_suffix == "" && !str::starts_with(path::basename(ct),
"lib")) { "lib")) {
@ -528,7 +528,7 @@ fn install_source(c: cargo, path: str) {
fail "This doesn't look like a rust package (no .rc files)."; fail "This doesn't look like a rust package (no .rc files).";
} }
for cf: str in cratefiles { for cratefiles.each {|cf|
let p = load_pkg(cf); let p = load_pkg(cf);
alt p { alt p {
none { cont; } none { cont; }
@ -618,7 +618,8 @@ fn install_uuid(c: cargo, wd: str, uuid: str) {
ret; ret;
} }
error("Found multiple packages:"); error("Found multiple packages:");
for (s,p) in ps { for ps.each {|elt|
let (s,p) = elt;
info(" " + s.name + "/" + p.uuid + " (" + p.name + ")"); info(" " + s.name + "/" + p.uuid + " (" + p.name + ")");
} }
} }
@ -639,7 +640,8 @@ fn install_named(c: cargo, wd: str, name: str) {
ret; ret;
} }
error("Found multiple packages:"); error("Found multiple packages:");
for (s,p) in ps { for ps.each {|elt|
let (s,p) = elt;
info(" " + s.name + "/" + p.uuid + " (" + p.name + ")"); info(" " + s.name + "/" + p.uuid + " (" + p.name + ")");
} }
} }

View file

@ -89,10 +89,9 @@ fn verify(root: str, data: str, sig: str, keyfp: str) -> bool {
let p = gpg(["--homedir", path, "--with-fingerprint", "--verify", sig, let p = gpg(["--homedir", path, "--with-fingerprint", "--verify", sig,
data]); data]);
let res = "Primary key fingerprint: " + keyfp; let res = "Primary key fingerprint: " + keyfp;
for line in str::split_char(p.err, '\n') { let mut rslt = false;
if line == res { for str::split_char(p.err, '\n').each {|line|
ret true; if line == res { rslt = true; }
}
} }
ret false; ret rslt;
} }

View file

@ -135,7 +135,7 @@ fn test_opts(config: config) -> test::test_opts {
fn make_tests(config: config) -> [test::test_desc] { fn make_tests(config: config) -> [test::test_desc] {
#debug("making tests from %s", config.src_base); #debug("making tests from %s", config.src_base);
let mut tests = []; let mut tests = [];
for file: str in os::list_dir_path(config.src_base) { for os::list_dir_path(config.src_base).each {|file|
let file = file; let file = file;
#debug("inspecting file %s", file); #debug("inspecting file %s", file);
if is_test(config, file) { if is_test(config, file) {
@ -154,11 +154,11 @@ fn is_test(config: config, testfile: str) -> bool {
let mut valid = false; let mut valid = false;
for ext in valid_extensions { for valid_extensions.each {|ext|
if str::ends_with(name, ext) { valid = true; } if str::ends_with(name, ext) { valid = true; }
} }
for pre in invalid_prefixes { for invalid_prefixes.each {|pre|
if str::starts_with(name, pre) { valid = false; } if str::starts_with(name, pre) { valid = false; }
} }

View file

@ -196,7 +196,7 @@ fn check_error_patterns(props: test_props,
let mut next_err_idx = 0u; let mut next_err_idx = 0u;
let mut next_err_pat = props.error_patterns[next_err_idx]; let mut next_err_pat = props.error_patterns[next_err_idx];
for line: str in str::split_char(procres.stderr, '\n') { for str::split_char(procres.stderr, '\n').each {|line|
if str::contains(line, next_err_pat) { if str::contains(line, next_err_pat) {
#debug("found error pattern %s", next_err_pat); #debug("found error pattern %s", next_err_pat);
next_err_idx += 1u; next_err_idx += 1u;
@ -215,7 +215,7 @@ fn check_error_patterns(props: test_props,
fatal_procres(#fmt["error pattern '%s' not found!", fatal_procres(#fmt["error pattern '%s' not found!",
missing_patterns[0]], procres); missing_patterns[0]], procres);
} else { } else {
for pattern: str in missing_patterns { for missing_patterns.each {|pattern|
error(#fmt["error pattern '%s' not found!", pattern]); error(#fmt["error pattern '%s' not found!", pattern]);
} }
fatal_procres("multiple error patterns not found", procres); fatal_procres("multiple error patterns not found", procres);
@ -244,7 +244,7 @@ fn check_expected_errors(expected_errors: [errors::expected_error],
// filename:line1:col1: line2:col2: *warning:* msg // filename:line1:col1: line2:col2: *warning:* msg
// where line1:col1: is the starting point, line2:col2: // where line1:col1: is the starting point, line2:col2:
// is the ending point, and * represents ANSI color codes. // is the ending point, and * represents ANSI color codes.
for line: str in str::split_char(procres.stderr, '\n') { for str::split_char(procres.stderr, '\n').each {|line|
let mut was_expected = false; let mut was_expected = false;
for vec::eachi(expected_errors) {|i, ee| for vec::eachi(expected_errors) {|i, ee|
if !found_flags[i] { if !found_flags[i] {

View file

@ -25,7 +25,7 @@ fn find_rust_files(&files: [str], path: str) {
} else if os::path_is_dir(path) } else if os::path_is_dir(path)
&& !contains(path, "compile-fail") && !contains(path, "compile-fail")
&& !contains(path, "build") { && !contains(path, "build") {
for p in os::list_dir_path(path) { for os::list_dir_path(path).each {|p|
find_rust_files(files, p); find_rust_files(files, p);
} }
} }
@ -439,7 +439,7 @@ fn content_is_dangerous_to_run(code: str) -> bool {
"unsafe", "unsafe",
"log"]; // python --> rust pipe deadlock? "log"]; // python --> rust pipe deadlock?
for p: str in dangerous_patterns { if contains(code, p) { ret true; } } for dangerous_patterns.each {|p| if contains(code, p) { ret true; } }
ret false; ret false;
} }
@ -447,7 +447,7 @@ fn content_is_dangerous_to_compile(code: str) -> bool {
let dangerous_patterns = let dangerous_patterns =
["xfail-test"]; ["xfail-test"];
for p: str in dangerous_patterns { if contains(code, p) { ret true; } } for dangerous_patterns.each {|p| if contains(code, p) { ret true; } }
ret false; ret false;
} }
@ -462,7 +462,7 @@ fn content_might_not_converge(code: str) -> bool {
"\n\n\n\n\n" // https://github.com/mozilla/rust/issues/850 "\n\n\n\n\n" // https://github.com/mozilla/rust/issues/850
]; ];
for p: str in confusing_patterns { if contains(code, p) { ret true; } } for confusing_patterns.each {|p| if contains(code, p) { ret true; } }
ret false; ret false;
} }
@ -475,7 +475,7 @@ fn file_might_not_converge(filename: str) -> bool {
]; ];
for f in confusing_files { if contains(filename, f) { ret true; } } for confusing_files.each {|f| if contains(filename, f) { ret true; } }
ret false; ret false;
} }
@ -509,7 +509,7 @@ fn check_roundtrip_convergence(code: @str, maxIters: uint) {
fn check_convergence(files: [str]) { fn check_convergence(files: [str]) {
#error("pp convergence tests: %u files", vec::len(files)); #error("pp convergence tests: %u files", vec::len(files));
for file in files { for files.each {|file|
if !file_might_not_converge(file) { if !file_might_not_converge(file) {
let s = @result::get(io::read_whole_file_str(file)); let s = @result::get(io::read_whole_file_str(file));
if !content_might_not_converge(*s) { if !content_might_not_converge(*s) {
@ -522,7 +522,7 @@ fn check_convergence(files: [str]) {
} }
fn check_variants(files: [str], cx: context) { fn check_variants(files: [str], cx: context) {
for file in files { for files.each {|file|
if cx.mode == tm_converge && file_might_not_converge(file) { if cx.mode == tm_converge && file_might_not_converge(file) {
#error("Skipping convergence test based on file_might_not_converge"); #error("Skipping convergence test based on file_might_not_converge");
cont; cont;

View file

@ -62,7 +62,7 @@ export zip;
export swap; export swap;
export reverse; export reverse;
export reversed; export reversed;
export iter, each, eachi; export iter, iter_between, each, eachi;
export iter2; export iter2;
export iteri; export iteri;
export riter; export riter;

View file

@ -142,11 +142,11 @@ fn float_ty_to_str(t: float_ty) -> str {
fn is_exported(i: ident, m: _mod) -> bool { fn is_exported(i: ident, m: _mod) -> bool {
let mut local = false; let mut local = false;
let mut parent_enum : option<ident> = none; let mut parent_enum : option<ident> = none;
for it: @item in m.items { for m.items.each {|it|
if it.ident == i { local = true; } if it.ident == i { local = true; }
alt it.node { alt it.node {
item_enum(variants, _) { item_enum(variants, _) {
for v: variant in variants { for variants.each {|v|
if v.node.name == i { if v.node.name == i {
local = true; local = true;
parent_enum = some(it.ident); parent_enum = some(it.ident);
@ -158,11 +158,11 @@ fn is_exported(i: ident, m: _mod) -> bool {
if local { break; } if local { break; }
} }
let mut has_explicit_exports = false; let mut has_explicit_exports = false;
for vi: @view_item in m.view_items { for m.view_items.each {|vi|
alt vi.node { alt vi.node {
view_item_export(vps) { view_item_export(vps) {
has_explicit_exports = true; has_explicit_exports = true;
for vp in vps { for vps.each {|vp|
alt vp.node { alt vp.node {
ast::view_path_simple(id, _, _) { ast::view_path_simple(id, _, _) {
if id == i { ret true; } if id == i { ret true; }
@ -177,7 +177,7 @@ fn is_exported(i: ident, m: _mod) -> bool {
ast::view_path_list(path, ids, _) { ast::view_path_list(path, ids, _) {
if vec::len(*path) == 1u { if vec::len(*path) == 1u {
if i == path[0] { ret true; } if i == path[0] { ret true; }
for id in ids { for ids.each {|id|
if id.node.name == i { ret true; } if id.node.name == i { ret true; }
} }
} else { } else {
@ -278,14 +278,14 @@ fn public_methods(ms: [@method]) -> [@method] {
fn split_class_items(cs: [@class_member]) -> ([ivar], [@method]) { fn split_class_items(cs: [@class_member]) -> ([ivar], [@method]) {
let mut vs = [], ms = []; let mut vs = [], ms = [];
for c in cs { for cs.each {|c|
alt c.node { alt c.node {
instance_var(i, t, cm, id, privacy) { instance_var(i, t, cm, id, privacy) {
vs += [{ident: i, ty: t, cm: cm, id: id, privacy: privacy}]; vs += [{ident: i, ty: t, cm: cm, id: id, privacy: privacy}];
} }
class_method(m) { ms += [m]; } class_method(m) { ms += [m]; }
} }
} };
(vs, ms) (vs, ms)
} }

View file

@ -46,7 +46,7 @@ fn find_linkage_metas(attrs: [ast::attribute]) -> [@ast::meta_item] {
fn find_linkage_attrs(attrs: [ast::attribute]) -> [ast::attribute] { fn find_linkage_attrs(attrs: [ast::attribute]) -> [ast::attribute] {
let mut found = []; let mut found = [];
for attr: ast::attribute in find_attrs_by_name(attrs, "link") { for find_attrs_by_name(attrs, "link").each {|attr|
alt attr.node.value.node { alt attr.node.value.node {
ast::meta_list(_, _) { found += [attr] } ast::meta_list(_, _) { found += [attr] }
_ { #debug("ignoring link attribute that has incorrect type"); } _ { #debug("ignoring link attribute that has incorrect type"); }
@ -150,7 +150,7 @@ fn attr_meta(attr: ast::attribute) -> @ast::meta_item { @attr.node.value }
// Get the meta_items from inside a vector of attributes // Get the meta_items from inside a vector of attributes
fn attr_metas(attrs: [ast::attribute]) -> [@ast::meta_item] { fn attr_metas(attrs: [ast::attribute]) -> [@ast::meta_item] {
let mut mitems = []; let mut mitems = [];
for a: ast::attribute in attrs { mitems += [attr_meta(a)]; } for attrs.each {|a| mitems += [attr_meta(a)]; }
ret mitems; ret mitems;
} }
@ -178,7 +178,7 @@ fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool {
fn contains(haystack: [@ast::meta_item], needle: @ast::meta_item) -> bool { fn contains(haystack: [@ast::meta_item], needle: @ast::meta_item) -> bool {
#debug("looking for %s", #debug("looking for %s",
print::pprust::meta_item_to_str(*needle)); print::pprust::meta_item_to_str(*needle));
for item: @ast::meta_item in haystack { for haystack.each {|item|
#debug("looking in %s", #debug("looking in %s",
print::pprust::meta_item_to_str(*item)); print::pprust::meta_item_to_str(*item));
if eq(item, needle) { #debug("found it!"); ret true; } if eq(item, needle) { #debug("found it!"); ret true; }
@ -207,12 +207,12 @@ fn sort_meta_items(items: [@ast::meta_item]) -> [@ast::meta_item] {
// This is sort of stupid here, converting to a vec of mutables and back // This is sort of stupid here, converting to a vec of mutables and back
let mut v: [mut @ast::meta_item] = [mut]; let mut v: [mut @ast::meta_item] = [mut];
for mi: @ast::meta_item in items { v += [mut mi]; } for items.each {|mi| v += [mut mi]; }
std::sort::quick_sort(lteq, v); std::sort::quick_sort(lteq, v);
let mut v2: [@ast::meta_item] = []; let mut v2: [@ast::meta_item] = [];
for mi: @ast::meta_item in v { v2 += [mi]; } for v.each {|mi| v2 += [mi]; }
ret v2; ret v2;
} }
@ -231,7 +231,7 @@ fn remove_meta_items_by_name(items: [@ast::meta_item], name: str) ->
fn require_unique_names(diagnostic: span_handler, fn require_unique_names(diagnostic: span_handler,
metas: [@ast::meta_item]) { metas: [@ast::meta_item]) {
let map = map::str_hash(); let map = map::str_hash();
for meta: @ast::meta_item in metas { for metas.each {|meta|
let name = get_meta_item_name(meta); let name = get_meta_item_name(meta);
if map.contains_key(name) { if map.contains_key(name) {
diagnostic.span_fatal(meta.span, diagnostic.span_fatal(meta.span,

View file

@ -187,7 +187,7 @@ fn get_snippet(cm: codemap::codemap, fidx: uint, lo: uint, hi: uint) -> str
} }
fn get_filemap(cm: codemap, filename: str) -> filemap { fn get_filemap(cm: codemap, filename: str) -> filemap {
for fm: filemap in cm.files { if fm.name == filename { ret fm; } } for cm.files.each {|fm| if fm.name == filename { ret fm; } }
//XXjdm the following triggers a mismatched type bug //XXjdm the following triggers a mismatched type bug
// (or expected function, found _|_) // (or expected function, found _|_)
fail; // ("asking for " + filename + " which we don't know about"); fail; // ("asking for " + filename + " which we don't know about");

View file

@ -201,7 +201,7 @@ fn highlight_lines(cm: codemap::codemap, sp: span,
elided = true; elided = true;
} }
// Print the offending lines // Print the offending lines
for line: uint in display_lines { for display_lines.each {|line|
io::stderr().write_str(#fmt["%s:%u ", fm.name, line + 1u]); io::stderr().write_str(#fmt["%s:%u ", fm.name, line + 1u]);
let s = codemap::get_line(fm, line as int) + "\n"; let s = codemap::get_line(fm, line as int) + "\n";
io::stderr().write_str(s); io::stderr().write_str(s);

View file

@ -68,7 +68,7 @@ fn mk_rec_e(cx: ext_ctxt, sp: span,
fields: [{ident: ast::ident, ex: @ast::expr}]) -> fields: [{ident: ast::ident, ex: @ast::expr}]) ->
@ast::expr { @ast::expr {
let mut astfields: [ast::field] = []; let mut astfields: [ast::field] = [];
for field: {ident: ast::ident, ex: @ast::expr} in fields { for fields.each {|field|
let ident = field.ident; let ident = field.ident;
let val = field.ex; let val = field.ex;
let astfield = let astfield =

View file

@ -11,7 +11,7 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
} }
}; };
let mut res: ast::ident = ""; let mut res: ast::ident = "";
for e: @ast::expr in args { for args.each {|e|
res += expr_to_ident(cx, e, "expected an ident"); res += expr_to_ident(cx, e, "expected an ident");
} }

View file

@ -57,7 +57,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
fn make_rt_conv_expr(cx: ext_ctxt, sp: span, cnv: conv) -> @ast::expr { fn make_rt_conv_expr(cx: ext_ctxt, sp: span, cnv: conv) -> @ast::expr {
fn make_flags(cx: ext_ctxt, sp: span, flags: [flag]) -> @ast::expr { fn make_flags(cx: ext_ctxt, sp: span, flags: [flag]) -> @ast::expr {
let mut flagexprs: [@ast::expr] = []; let mut flagexprs: [@ast::expr] = [];
for f: flag in flags { for flags.each {|f|
let mut fstr; let mut fstr;
alt f { alt f {
flag_left_justify { fstr = "flag_left_justify"; } flag_left_justify { fstr = "flag_left_justify"; }
@ -141,7 +141,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
option::none { } option::none { }
_ { cx.span_unimpl(sp, unsupported); } _ { cx.span_unimpl(sp, unsupported); }
} }
for f: flag in cnv.flags { for cnv.flags.each {|f|
alt f { alt f {
flag_left_justify { } flag_left_justify { }
flag_sign_always { flag_sign_always {
@ -197,7 +197,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
some(p) { log(debug, "param: " + int::to_str(p, 10u)); } some(p) { log(debug, "param: " + int::to_str(p, 10u)); }
_ { #debug("param: none"); } _ { #debug("param: none"); }
} }
for f: flag in c.flags { for c.flags.each {|f|
alt f { alt f {
flag_left_justify { #debug("flag: left justify"); } flag_left_justify { #debug("flag: left justify"); }
flag_left_zero_pad { #debug("flag: left zero pad"); } flag_left_zero_pad { #debug("flag: left zero pad"); }
@ -252,7 +252,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
let mut n = 0u; let mut n = 0u;
let mut tmp_expr = mk_str(cx, sp, ""); let mut tmp_expr = mk_str(cx, sp, "");
let nargs = vec::len::<@ast::expr>(args); let nargs = vec::len::<@ast::expr>(args);
for pc: piece in pieces { for pieces.each {|pc|
alt pc { alt pc {
piece_string(s) { piece_string(s) {
let s_expr = mk_str(cx, fmt_sp, s); let s_expr = mk_str(cx, fmt_sp, s);

View file

@ -73,7 +73,7 @@ fn elts_to_ell(cx: ext_ctxt, elts: [@expr]) ->
{pre: [@expr], rep: option<@expr>, post: [@expr]} { {pre: [@expr], rep: option<@expr>, post: [@expr]} {
let mut idx: uint = 0u; let mut idx: uint = 0u;
let mut res = none; let mut res = none;
for elt: @expr in elts { for elts.each {|elt|
alt elt.node { alt elt.node {
expr_mac(m) { expr_mac(m) {
alt m.node { alt m.node {
@ -102,7 +102,7 @@ fn elts_to_ell(cx: ext_ctxt, elts: [@expr]) ->
fn option_flatten_map<T: copy, U: copy>(f: fn@(T) -> option<U>, v: [T]) -> fn option_flatten_map<T: copy, U: copy>(f: fn@(T) -> option<U>, v: [T]) ->
option<[U]> { option<[U]> {
let mut res = []; let mut res = [];
for elem: T in v { for v.each {|elem|
alt f(elem) { none { ret none; } some(fv) { res += [fv]; } } alt f(elem) { none { ret none; } some(fv) { res += [fv]; } }
} }
ret some(res); ret some(res);
@ -163,7 +163,7 @@ selectors. */
fn use_selectors_to_bind(b: binders, e: @expr) -> option<bindings> { fn use_selectors_to_bind(b: binders, e: @expr) -> option<bindings> {
let res = str_hash::<arb_depth<matchable>>(); let res = str_hash::<arb_depth<matchable>>();
//need to do this first, to check vec lengths. //need to do this first, to check vec lengths.
for sel: selector in b.literal_ast_matchers { for b.literal_ast_matchers.each {|sel|
alt sel(match_expr(e)) { none { ret none; } _ { } } alt sel(match_expr(e)) { none { ret none; } _ { } }
} }
let mut never_mind: bool = false; let mut never_mind: bool = false;
@ -209,7 +209,7 @@ fn transcribe(cx: ext_ctxt, b: bindings, body: @expr) -> @expr {
fn follow(m: arb_depth<matchable>, idx_path: @mut [uint]) -> fn follow(m: arb_depth<matchable>, idx_path: @mut [uint]) ->
arb_depth<matchable> { arb_depth<matchable> {
let mut res: arb_depth<matchable> = m; let mut res: arb_depth<matchable> = m;
for idx: uint in *idx_path { for vec::each(*idx_path) {|idx|
alt res { alt res {
leaf(_) { ret res;/* end of the line */ } leaf(_) { ret res;/* end of the line */ }
seq(new_ms, _) { res = new_ms[idx]; } seq(new_ms, _) { res = new_ms[idx]; }
@ -677,7 +677,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
let mut macro_name: option<str> = none; let mut macro_name: option<str> = none;
let mut clauses: [@clause] = []; let mut clauses: [@clause] = [];
for arg: @expr in args { for args.each {|arg|
alt arg.node { alt arg.node {
expr_vec(elts, mutbl) { expr_vec(elts, mutbl) {
if vec::len(elts) != 2u { if vec::len(elts) != 2u {
@ -753,7 +753,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
fn generic_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, fn generic_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
_body: ast::mac_body, clauses: [@clause]) -> @expr { _body: ast::mac_body, clauses: [@clause]) -> @expr {
let arg = get_mac_arg(cx,sp,arg); let arg = get_mac_arg(cx,sp,arg);
for c: @clause in clauses { for clauses.each {|c|
alt use_selectors_to_bind(c.params, arg) { alt use_selectors_to_bind(c.params, arg) {
some(bindings) { ret transcribe(cx, bindings, c.body); } some(bindings) { ret transcribe(cx, bindings, c.body); }
none { cont; } none { cont; }

View file

@ -338,7 +338,7 @@ fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ {
} }
pat_rec(fields, etc) { pat_rec(fields, etc) {
let mut fs = []; let mut fs = [];
for f: ast::field_pat in fields { for fields.each {|f|
fs += [{ident: f.ident, pat: fld.fold_pat(f.pat)}]; fs += [{ident: f.ident, pat: fld.fold_pat(f.pat)}];
} }
pat_rec(fs, etc) pat_rec(fs, etc)

View file

@ -13,7 +13,7 @@ type ctx =
fn eval_crate_directives(cx: ctx, cdirs: [@ast::crate_directive], prefix: str, fn eval_crate_directives(cx: ctx, cdirs: [@ast::crate_directive], prefix: str,
&view_items: [@ast::view_item], &view_items: [@ast::view_item],
&items: [@ast::item]) { &items: [@ast::item]) {
for sub_cdir: @ast::crate_directive in cdirs { for cdirs.each {|sub_cdir|
eval_crate_directive(cx, sub_cdir, prefix, view_items, items); eval_crate_directive(cx, sub_cdir, prefix, view_items, items);
} }
} }

View file

@ -143,12 +143,13 @@ fn new_parser(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader,
// interpreted as a specific kind of statement, which would be confusing. // interpreted as a specific kind of statement, which would be confusing.
fn bad_expr_word_table() -> hashmap<str, ()> { fn bad_expr_word_table() -> hashmap<str, ()> {
let words = str_hash(); let words = str_hash();
for word in ["alt", "assert", "be", "break", "check", "claim", let keys = ["alt", "assert", "be", "break", "check", "claim",
"class", "const", "cont", "copy", "crust", "do", "else", "class", "const", "cont", "copy", "crust", "do", "else",
"enum", "export", "fail", "fn", "for", "if", "iface", "enum", "export", "fail", "fn", "for", "if", "iface",
"impl", "import", "let", "log", "loop", "mod", "mut", "impl", "import", "let", "log", "loop", "mod", "mut",
"mut", "native", "pure", "resource", "ret", "trait", "mut", "native", "pure", "resource", "ret", "trait",
"type", "unchecked", "unsafe", "while", "new"] { "type", "unchecked", "unsafe", "while", "new"];
for keys.each {|word|
words.insert(word, ()); words.insert(word, ());
} }
words words
@ -312,7 +313,7 @@ fn parse_ty_field(p: parser) -> ast::ty_field {
// otherwise, fail // otherwise, fail
fn ident_index(p: parser, args: [ast::arg], i: ast::ident) -> uint { fn ident_index(p: parser, args: [ast::arg], i: ast::ident) -> uint {
let mut j = 0u; let mut j = 0u;
for a: ast::arg in args { if a.ident == i { ret j; } j += 1u; } for args.each {|a| if a.ident == i { ret j; } j += 1u; }
p.fatal("unbound variable `" + i + "` in constraint arg"); p.fatal("unbound variable `" + i + "` in constraint arg");
} }
@ -1230,7 +1231,7 @@ fn parse_more_binops(p: parser, plhs: pexpr, min_prec: int) ->
let peeked = p.token; let peeked = p.token;
if peeked == token::BINOP(token::OR) && if peeked == token::BINOP(token::OR) &&
p.restriction == RESTRICT_NO_BAR_OP { ret lhs; } p.restriction == RESTRICT_NO_BAR_OP { ret lhs; }
for cur: op_spec in *p.precs { for vec::each(*p.precs) {|cur|
if cur.prec > min_prec && cur.tok == peeked { if cur.prec > min_prec && cur.tok == peeked {
p.bump(); p.bump();
let expr = parse_prefix_expr(p); let expr = parse_prefix_expr(p);
@ -1414,7 +1415,7 @@ fn parse_for_expr(p: parser) -> @ast::expr {
_ { false } _ { false }
}; };
if new_style { if new_style {
let call = parse_expr(p); let call = parse_expr_res(p, RESTRICT_STMT_EXPR);
alt call.node { alt call.node {
ast::expr_call(f, args, true) { ast::expr_call(f, args, true) {
let b_arg = vec::last(args); let b_arg = vec::last(args);
@ -1428,6 +1429,7 @@ fn parse_for_expr(p: parser) -> @ast::expr {
} }
} }
} else { } else {
p.warn("old-style for");
let decl = parse_local(p, false, false); let decl = parse_local(p, false, false);
expect_word(p, "in"); expect_word(p, "in");
let seq = parse_expr(p); let seq = parse_expr(p);
@ -2328,7 +2330,7 @@ fn parse_item_enum(p: parser, attrs: [ast::attribute]) -> @ast::item {
let arg_tys = parse_seq(token::LPAREN, token::RPAREN, let arg_tys = parse_seq(token::LPAREN, token::RPAREN,
seq_sep(token::COMMA), seq_sep(token::COMMA),
{|p| parse_ty(p, false)}, p); {|p| parse_ty(p, false)}, p);
for ty in arg_tys.node { for arg_tys.node.each {|ty|
args += [{ty: ty, id: p.get_id()}]; args += [{ty: ty, id: p.get_id()}];
} }
} else if eat(p, token::EQ) { } else if eat(p, token::EQ) {

View file

@ -269,7 +269,7 @@ fn synth_comment(s: ps, text: str) {
fn commasep<IN>(s: ps, b: breaks, elts: [IN], op: fn(ps, IN)) { fn commasep<IN>(s: ps, b: breaks, elts: [IN], op: fn(ps, IN)) {
box(s, 0u, b); box(s, 0u, b);
let mut first = true; let mut first = true;
for elt: IN in elts { for elts.each {|elt|
if first { first = false; } else { word_space(s, ","); } if first { first = false; } else { word_space(s, ","); }
op(s, elt); op(s, elt);
} }
@ -282,7 +282,7 @@ fn commasep_cmnt<IN>(s: ps, b: breaks, elts: [IN], op: fn(ps, IN),
box(s, 0u, b); box(s, 0u, b);
let len = vec::len::<IN>(elts); let len = vec::len::<IN>(elts);
let mut i = 0u; let mut i = 0u;
for elt: IN in elts { for elts.each {|elt|
maybe_print_comment(s, get_span(elt).hi); maybe_print_comment(s, get_span(elt).hi);
op(s, elt); op(s, elt);
i += 1u; i += 1u;
@ -303,18 +303,18 @@ fn commasep_exprs(s: ps, b: breaks, exprs: [@ast::expr]) {
fn print_mod(s: ps, _mod: ast::_mod, attrs: [ast::attribute]) { fn print_mod(s: ps, _mod: ast::_mod, attrs: [ast::attribute]) {
print_inner_attributes(s, attrs); print_inner_attributes(s, attrs);
for vitem: @ast::view_item in _mod.view_items { for _mod.view_items.each {|vitem|
print_view_item(s, vitem); print_view_item(s, vitem);
} }
for item: @ast::item in _mod.items { print_item(s, item); } for _mod.items.each {|item| print_item(s, item); }
} }
fn print_native_mod(s: ps, nmod: ast::native_mod, attrs: [ast::attribute]) { fn print_native_mod(s: ps, nmod: ast::native_mod, attrs: [ast::attribute]) {
print_inner_attributes(s, attrs); print_inner_attributes(s, attrs);
for vitem: @ast::view_item in nmod.view_items { for nmod.view_items.each {|vitem|
print_view_item(s, vitem); print_view_item(s, vitem);
} }
for item: @ast::native_item in nmod.items { print_native_item(s, item); } for nmod.items.each {|item| print_native_item(s, item); }
} }
fn print_region(s: ps, region: ast::region) { fn print_region(s: ps, region: ast::region) {
@ -476,7 +476,7 @@ fn print_item(s: ps, &&item: @ast::item) {
end(s); end(s);
} else { } else {
bopen(s); bopen(s);
for v: ast::variant in variants { for variants.each {|v|
space_if_not_bol(s); space_if_not_bol(s);
maybe_print_comment(s, v.span.lo); maybe_print_comment(s, v.span.lo);
print_outer_attributes(s, v.node.attrs); print_outer_attributes(s, v.node.attrs);
@ -500,7 +500,7 @@ fn print_item(s: ps, &&item: @ast::item) {
print_fn_args_and_ret(s, ctor.node.dec); print_fn_args_and_ret(s, ctor.node.dec);
space(s.s); space(s.s);
print_block(s, ctor.node.body); print_block(s, ctor.node.body);
for ci in items { for items.each {|ci|
/* /*
FIXME: collect all private items and print them FIXME: collect all private items and print them
in a single "priv" section in a single "priv" section
@ -556,7 +556,7 @@ fn print_item(s: ps, &&item: @ast::item) {
print_type(s, ty); print_type(s, ty);
space(s.s); space(s.s);
bopen(s); bopen(s);
for meth in methods { for methods.each {|meth|
print_method(s, meth); print_method(s, meth);
} }
bclose(s, item.span); bclose(s, item.span);
@ -567,7 +567,7 @@ fn print_item(s: ps, &&item: @ast::item) {
print_type_params(s, tps); print_type_params(s, tps);
word(s.s, " "); word(s.s, " ");
bopen(s); bopen(s);
for meth in methods { print_ty_method(s, meth); } for methods.each {|meth| print_ty_method(s, meth); }
bclose(s, item.span); bclose(s, item.span);
} }
ast::item_res(decl, tps, body, dt_id, ct_id) { ast::item_res(decl, tps, body, dt_id, ct_id) {
@ -629,7 +629,7 @@ fn print_method(s: ps, meth: @ast::method) {
fn print_outer_attributes(s: ps, attrs: [ast::attribute]) { fn print_outer_attributes(s: ps, attrs: [ast::attribute]) {
let mut count = 0; let mut count = 0;
for attr: ast::attribute in attrs { for attrs.each {|attr|
alt attr.node.style { alt attr.node.style {
ast::attr_outer { print_attribute(s, attr); count += 1; } ast::attr_outer { print_attribute(s, attr); count += 1; }
_ {/* fallthrough */ } _ {/* fallthrough */ }
@ -640,7 +640,7 @@ fn print_outer_attributes(s: ps, attrs: [ast::attribute]) {
fn print_inner_attributes(s: ps, attrs: [ast::attribute]) { fn print_inner_attributes(s: ps, attrs: [ast::attribute]) {
let mut count = 0; let mut count = 0;
for attr: ast::attribute in attrs { for attrs.each {|attr|
alt attr.node.style { alt attr.node.style {
ast::attr_inner { ast::attr_inner {
print_attribute(s, attr); print_attribute(s, attr);
@ -716,8 +716,8 @@ fn print_possibly_embedded_block_(s: ps, blk: ast::blk, embedded: embed_type,
print_inner_attributes(s, attrs); print_inner_attributes(s, attrs);
for vi in blk.node.view_items { print_view_item(s, vi); } for blk.node.view_items.each {|vi| print_view_item(s, vi); }
for st: @ast::stmt in blk.node.stmts { for blk.node.stmts.each {|st|
print_stmt(s, *st); print_stmt(s, *st);
} }
alt blk.node.expr { alt blk.node.expr {
@ -957,12 +957,12 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
print_maybe_parens_discrim(s, expr); print_maybe_parens_discrim(s, expr);
space(s.s); space(s.s);
bopen(s); bopen(s);
for arm: ast::arm in arms { for arms.each {|arm|
space(s.s); space(s.s);
cbox(s, alt_indent_unit); cbox(s, alt_indent_unit);
ibox(s, 0u); ibox(s, 0u);
let mut first = true; let mut first = true;
for p: @ast::pat in arm.pats { for arm.pats.each {|p|
if first { if first {
first = false; first = false;
} else { space(s.s); word_space(s, "|"); } } else { space(s.s); word_space(s, "|"); }
@ -1189,7 +1189,7 @@ fn print_path(s: ps, &&path: @ast::path, colons_before_params: bool) {
maybe_print_comment(s, path.span.lo); maybe_print_comment(s, path.span.lo);
if path.node.global { word(s.s, "::"); } if path.node.global { word(s.s, "::"); }
let mut first = true; let mut first = true;
for id: ast::ident in path.node.idents { for path.node.idents.each {|id|
if first { first = false; } else { word(s.s, "::"); } if first { first = false; } else { word(s.s, "::"); }
word(s.s, id); word(s.s, id);
} }
@ -1359,7 +1359,7 @@ fn print_arg_mode(s: ps, m: ast::mode) {
fn print_bounds(s: ps, bounds: @[ast::ty_param_bound]) { fn print_bounds(s: ps, bounds: @[ast::ty_param_bound]) {
if vec::len(*bounds) > 0u { if vec::len(*bounds) > 0u {
word(s.s, ":"); word(s.s, ":");
for bound in *bounds { for vec::each(*bounds) {|bound|
nbsp(s); nbsp(s);
alt bound { alt bound {
ast::bound_copy { word(s.s, "copy"); } ast::bound_copy { word(s.s, "copy"); }
@ -1403,7 +1403,7 @@ fn print_meta_item(s: ps, &&item: @ast::meta_item) {
fn print_simple_path(s: ps, path: ast::simple_path) { fn print_simple_path(s: ps, path: ast::simple_path) {
let mut first = true; let mut first = true;
for id in path { for path.each {|id|
if first { first = false; } else { word(s.s, "::"); } if first { first = false; } else { word(s.s, "::"); }
word(s.s, id); word(s.s, id);
} }
@ -1472,7 +1472,7 @@ fn print_view_item(s: ps, item: @ast::view_item) {
// FIXME: The fact that this builds up the table anew for every call is // FIXME: The fact that this builds up the table anew for every call is
// not good. Eventually, table should be a const. // not good. Eventually, table should be a const.
fn operator_prec(op: ast::binop) -> int { fn operator_prec(op: ast::binop) -> int {
for spec: parse::parser::op_spec in *parse::parser::prec_table() { for vec::each(*parse::parser::prec_table()) {|spec|
if spec.op == op { ret spec.prec; } if spec.op == op { ret spec.prec; }
} }
core::unreachable(); core::unreachable();
@ -1667,7 +1667,7 @@ fn print_comment(s: ps, cmnt: lexer::cmnt) {
} }
lexer::isolated { lexer::isolated {
pprust::hardbreak_if_not_bol(s); pprust::hardbreak_if_not_bol(s);
for line: str in cmnt.lines { for cmnt.lines.each {|line|
// Don't print empty lines because they will end up as trailing // Don't print empty lines because they will end up as trailing
// whitespace // whitespace
if str::is_not_empty(line) { word(s.s, line); } if str::is_not_empty(line) { word(s.s, line); }
@ -1681,7 +1681,7 @@ fn print_comment(s: ps, cmnt: lexer::cmnt) {
hardbreak(s.s); hardbreak(s.s);
} else { } else {
ibox(s, 0u); ibox(s, 0u);
for line: str in cmnt.lines { for cmnt.lines.each {|line|
if str::is_not_empty(line) { word(s.s, line); } if str::is_not_empty(line) { word(s.s, line); }
hardbreak(s.s); hardbreak(s.s);
} }
@ -1752,7 +1752,7 @@ fn constr_args_to_str<T>(f: fn@(T) -> str, args: [@ast::sp_constr_arg<T>]) ->
str { str {
let mut comma = false; let mut comma = false;
let mut s = "("; let mut s = "(";
for a: @ast::sp_constr_arg<T> in args { for args.each {|a|
if comma { s += ", "; } else { comma = true; } if comma { s += ", "; } else { comma = true; }
s += constr_arg_to_str::<T>(f, a.node); s += constr_arg_to_str::<T>(f, a.node);
} }
@ -1795,7 +1795,7 @@ fn ty_constr_to_str(&&c: @ast::ty_constr) -> str {
fn constrs_str<T>(constrs: [T], elt: fn(T) -> str) -> str { fn constrs_str<T>(constrs: [T], elt: fn(T) -> str) -> str {
let mut s = "", colon = true; let mut s = "", colon = true;
for c in constrs { for constrs.each {|c|
if colon { s += " : "; colon = false; } else { s += ", "; } if colon { s += " : "; colon = false; } else { s += ", "; }
s += elt(c); s += elt(c);
} }

View file

@ -84,7 +84,7 @@ fn visit_crate_directive<E>(cd: @crate_directive, e: E, v: vt<E>) {
alt cd.node { alt cd.node {
cdir_src_mod(_, _) { } cdir_src_mod(_, _) { }
cdir_dir_mod(_, cdirs, _) { cdir_dir_mod(_, cdirs, _) {
for cdir: @crate_directive in cdirs { for cdirs.each {|cdir|
visit_crate_directive(cdir, e, v); visit_crate_directive(cdir, e, v);
} }
} }
@ -94,8 +94,8 @@ fn visit_crate_directive<E>(cd: @crate_directive, e: E, v: vt<E>) {
} }
fn visit_mod<E>(m: _mod, _sp: span, _id: node_id, e: E, v: vt<E>) { fn visit_mod<E>(m: _mod, _sp: span, _id: node_id, e: E, v: vt<E>) {
for vi: @view_item in m.view_items { v.visit_view_item(vi, e, v); } for m.view_items.each {|vi| v.visit_view_item(vi, e, v); }
for i: @item in m.items { v.visit_item(i, e, v); } for m.items.each {|i| v.visit_item(i, e, v); }
} }
fn visit_view_item<E>(_vi: @view_item, _e: E, _v: vt<E>) { } fn visit_view_item<E>(_vi: @view_item, _e: E, _v: vt<E>) { }
@ -114,8 +114,8 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) {
} }
item_mod(m) { v.visit_mod(m, i.span, i.id, e, v); } item_mod(m) { v.visit_mod(m, i.span, i.id, e, v); }
item_native_mod(nm) { item_native_mod(nm) {
for vi: @view_item in nm.view_items { v.visit_view_item(vi, e, v); } for nm.view_items.each {|vi| v.visit_view_item(vi, e, v); }
for ni: @native_item in nm.items { v.visit_native_item(ni, e, v); } for nm.items.each {|ni| v.visit_native_item(ni, e, v); }
} }
item_ty(t, tps) { v.visit_ty(t, e, v); v.visit_ty_params(tps, e, v); } item_ty(t, tps) { v.visit_ty(t, e, v); v.visit_ty_params(tps, e, v); }
item_res(decl, tps, body, dtor_id, _) { item_res(decl, tps, body, dtor_id, _) {
@ -124,21 +124,21 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) {
} }
item_enum(variants, tps) { item_enum(variants, tps) {
v.visit_ty_params(tps, e, v); v.visit_ty_params(tps, e, v);
for vr: variant in variants { for variants.each {|vr|
for va: variant_arg in vr.node.args { v.visit_ty(va.ty, e, v); } for vr.node.args.each {|va| v.visit_ty(va.ty, e, v); }
} }
} }
item_impl(tps, ifce, ty, methods) { item_impl(tps, ifce, ty, methods) {
v.visit_ty_params(tps, e, v); v.visit_ty_params(tps, e, v);
alt ifce { some(ty) { v.visit_ty(ty, e, v); } none {} } alt ifce { some(ty) { v.visit_ty(ty, e, v); } none {} }
v.visit_ty(ty, e, v); v.visit_ty(ty, e, v);
for m in methods { for methods.each {|m|
visit_method_helper(m, e, v) visit_method_helper(m, e, v)
} }
} }
item_class(tps, members, ctor) { item_class(tps, members, ctor) {
v.visit_ty_params(tps, e, v); v.visit_ty_params(tps, e, v);
for m in members { for members.each {|m|
v.visit_class_item(m, e, v); v.visit_class_item(m, e, v);
} }
// make up a fake fn so as to call visit_fn on the ctor // make up a fake fn so as to call visit_fn on the ctor
@ -147,8 +147,8 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) {
} }
item_iface(tps, methods) { item_iface(tps, methods) {
v.visit_ty_params(tps, e, v); v.visit_ty_params(tps, e, v);
for m in methods { for methods.each {|m|
for a in m.decl.inputs { v.visit_ty(a.ty, e, v); } for m.decl.inputs.each {|a| v.visit_ty(a.ty, e, v); }
v.visit_ty(m.decl.output, e, v); v.visit_ty(m.decl.output, e, v);
} }
} }
@ -176,12 +176,12 @@ fn visit_ty<E>(t: @ty, e: E, v: vt<E>) {
ty_ptr(mt) { v.visit_ty(mt.ty, e, v); } ty_ptr(mt) { v.visit_ty(mt.ty, e, v); }
ty_rptr(_, mt) { v.visit_ty(mt.ty, e, v); } ty_rptr(_, mt) { v.visit_ty(mt.ty, e, v); }
ty_rec(flds) { ty_rec(flds) {
for f: ty_field in flds { v.visit_ty(f.node.mt.ty, e, v); } for flds.each {|f| v.visit_ty(f.node.mt.ty, e, v); }
} }
ty_tup(ts) { for tt in ts { v.visit_ty(tt, e, v); } } ty_tup(ts) { for ts.each {|tt| v.visit_ty(tt, e, v); } }
ty_fn(_, decl) { ty_fn(_, decl) {
for a in decl.inputs { v.visit_ty(a.ty, e, v); } for decl.inputs.each {|a| v.visit_ty(a.ty, e, v); }
for c: @constr in decl.constraints { for decl.constraints.each {|c|
v.visit_constr(c.node.path, c.span, c.node.id, e, v); v.visit_constr(c.node.path, c.span, c.node.id, e, v);
} }
v.visit_ty(decl.output, e, v); v.visit_ty(decl.output, e, v);
@ -189,7 +189,7 @@ fn visit_ty<E>(t: @ty, e: E, v: vt<E>) {
ty_path(p, _) { visit_path(p, e, v); } ty_path(p, _) { visit_path(p, e, v); }
ty_constr(t, cs) { ty_constr(t, cs) {
v.visit_ty(t, e, v); v.visit_ty(t, e, v);
for tc: @spanned<constr_general_<@path, node_id>> in cs { for cs.each {|tc|
v.visit_constr(tc.node.path, tc.span, tc.node.id, e, v); v.visit_constr(tc.node.path, tc.span, tc.node.id, e, v);
} }
} }
@ -207,19 +207,19 @@ fn visit_constr<E>(_operator: @path, _sp: span, _id: node_id, _e: E,
} }
fn visit_path<E>(p: @path, e: E, v: vt<E>) { fn visit_path<E>(p: @path, e: E, v: vt<E>) {
for tp: @ty in p.node.types { v.visit_ty(tp, e, v); } for p.node.types.each {|tp| v.visit_ty(tp, e, v); }
} }
fn visit_pat<E>(p: @pat, e: E, v: vt<E>) { fn visit_pat<E>(p: @pat, e: E, v: vt<E>) {
alt p.node { alt p.node {
pat_enum(path, children) { pat_enum(path, children) {
visit_path(path, e, v); visit_path(path, e, v);
for child: @pat in children { v.visit_pat(child, e, v); } for children.each {|child| v.visit_pat(child, e, v); }
} }
pat_rec(fields, _) { pat_rec(fields, _) {
for f: field_pat in fields { v.visit_pat(f.pat, e, v); } for fields.each {|f| v.visit_pat(f.pat, e, v); }
} }
pat_tup(elts) { for elt in elts { v.visit_pat(elt, e, v); } } pat_tup(elts) { for elts.each {|elt| v.visit_pat(elt, e, v); } }
pat_box(inner) | pat_uniq(inner) { pat_box(inner) | pat_uniq(inner) {
v.visit_pat(inner, e, v); v.visit_pat(inner, e, v);
} }
@ -243,8 +243,8 @@ fn visit_native_item<E>(ni: @native_item, e: E, v: vt<E>) {
} }
fn visit_ty_params<E>(tps: [ty_param], e: E, v: vt<E>) { fn visit_ty_params<E>(tps: [ty_param], e: E, v: vt<E>) {
for tp in tps { for tps.each {|tp|
for bound in *tp.bounds { for vec::each(*tp.bounds) {|bound|
alt bound { alt bound {
bound_iface(t) { v.visit_ty(t, e, v); } bound_iface(t) { v.visit_ty(t, e, v); }
bound_copy | bound_send { } bound_copy | bound_send { }
@ -254,8 +254,8 @@ fn visit_ty_params<E>(tps: [ty_param], e: E, v: vt<E>) {
} }
fn visit_fn_decl<E>(fd: fn_decl, e: E, v: vt<E>) { fn visit_fn_decl<E>(fd: fn_decl, e: E, v: vt<E>) {
for a: arg in fd.inputs { v.visit_ty(a.ty, e, v); } for fd.inputs.each {|a| v.visit_ty(a.ty, e, v); }
for c: @constr in fd.constraints { for fd.constraints.each {|c|
v.visit_constr(c.node.path, c.span, c.node.id, e, v); v.visit_constr(c.node.path, c.span, c.node.id, e, v);
} }
v.visit_ty(fd.output, e, v); v.visit_ty(fd.output, e, v);
@ -278,8 +278,8 @@ fn visit_fn<E>(fk: fn_kind, decl: fn_decl, body: blk, _sp: span,
} }
fn visit_block<E>(b: ast::blk, e: E, v: vt<E>) { fn visit_block<E>(b: ast::blk, e: E, v: vt<E>) {
for vi in b.node.view_items { v.visit_view_item(vi, e, v); } for b.node.view_items.each {|vi| v.visit_view_item(vi, e, v); }
for s in b.node.stmts { v.visit_stmt(s, e, v); } for b.node.stmts.each {|s| v.visit_stmt(s, e, v); }
visit_expr_opt(b.node.expr, e, v); visit_expr_opt(b.node.expr, e, v);
} }
@ -294,7 +294,7 @@ fn visit_stmt<E>(s: @stmt, e: E, v: vt<E>) {
fn visit_decl<E>(d: @decl, e: E, v: vt<E>) { fn visit_decl<E>(d: @decl, e: E, v: vt<E>) {
alt d.node { alt d.node {
decl_local(locs) { decl_local(locs) {
for loc in locs { v.visit_local(loc, e, v); } for locs.each {|loc| v.visit_local(loc, e, v); }
} }
decl_item(it) { v.visit_item(it, e, v); } decl_item(it) { v.visit_item(it, e, v); }
} }
@ -305,7 +305,7 @@ fn visit_expr_opt<E>(eo: option<@expr>, e: E, v: vt<E>) {
} }
fn visit_exprs<E>(exprs: [@expr], e: E, v: vt<E>) { fn visit_exprs<E>(exprs: [@expr], e: E, v: vt<E>) {
for ex: @expr in exprs { v.visit_expr(ex, e, v); } for exprs.each {|ex| v.visit_expr(ex, e, v); }
} }
fn visit_mac<E>(m: mac, e: E, v: vt<E>) { fn visit_mac<E>(m: mac, e: E, v: vt<E>) {
@ -328,17 +328,17 @@ fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) {
} }
expr_vec(es, _) { visit_exprs(es, e, v); } expr_vec(es, _) { visit_exprs(es, e, v); }
expr_rec(flds, base) { expr_rec(flds, base) {
for f: field in flds { v.visit_expr(f.node.expr, e, v); } for flds.each {|f| v.visit_expr(f.node.expr, e, v); }
visit_expr_opt(base, e, v); visit_expr_opt(base, e, v);
} }
expr_tup(elts) { for el in elts { v.visit_expr(el, e, v); } } expr_tup(elts) { for elts.each {|el| v.visit_expr(el, e, v); } }
expr_call(callee, args, _) { expr_call(callee, args, _) {
visit_exprs(args, e, v); visit_exprs(args, e, v);
v.visit_expr(callee, e, v); v.visit_expr(callee, e, v);
} }
expr_bind(callee, args) { expr_bind(callee, args) {
v.visit_expr(callee, e, v); v.visit_expr(callee, e, v);
for eo: option<@expr> in args { visit_expr_opt(eo, e, v); } for args.each {|eo| visit_expr_opt(eo, e, v); }
} }
expr_binary(_, a, b) { v.visit_expr(a, e, v); v.visit_expr(b, e, v); } expr_binary(_, a, b) { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
expr_addr_of(_, x) | expr_unary(_, x) | expr_loop_body(x) | expr_addr_of(_, x) | expr_unary(_, x) | expr_loop_body(x) |
@ -367,7 +367,7 @@ fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) {
expr_do_while(b, x) { v.visit_block(b, e, v); v.visit_expr(x, e, v); } expr_do_while(b, x) { v.visit_block(b, e, v); v.visit_expr(x, e, v); }
expr_alt(x, arms, _) { expr_alt(x, arms, _) {
v.visit_expr(x, e, v); v.visit_expr(x, e, v);
for a: arm in arms { v.visit_arm(a, e, v); } for arms.each {|a| v.visit_arm(a, e, v); }
} }
expr_fn(proto, decl, body, _) { expr_fn(proto, decl, body, _) {
v.visit_fn(fk_anon(proto), decl, body, ex.span, ex.id, e, v); v.visit_fn(fk_anon(proto), decl, body, ex.span, ex.id, e, v);
@ -386,7 +386,7 @@ fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) {
} }
expr_field(x, _, tys) { expr_field(x, _, tys) {
v.visit_expr(x, e, v); v.visit_expr(x, e, v);
for tp in tys { v.visit_ty(tp, e, v); } for tys.each {|tp| v.visit_ty(tp, e, v); }
} }
expr_index(a, b) { v.visit_expr(a, e, v); v.visit_expr(b, e, v); } expr_index(a, b) { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
expr_path(p) { visit_path(p, e, v); } expr_path(p) { visit_path(p, e, v); }
@ -404,7 +404,7 @@ fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) {
} }
fn visit_arm<E>(a: arm, e: E, v: vt<E>) { fn visit_arm<E>(a: arm, e: E, v: vt<E>) {
for p: @pat in a.pats { v.visit_pat(p, e, v); } for a.pats.each {|p| v.visit_pat(p, e, v); }
visit_expr_opt(a.guard, e, v); visit_expr_opt(a.guard, e, v);
v.visit_block(a.body, e, v); v.visit_block(a.body, e, v);
} }

View file

@ -67,7 +67,7 @@ fn max_key<T: copy>(m: smallintmap<T>) -> uint {
impl <V: copy> of map::map<uint, V> for smallintmap<V> { impl <V: copy> of map::map<uint, V> for smallintmap<V> {
fn size() -> uint { fn size() -> uint {
let mut sz = 0u; let mut sz = 0u;
for item in self.v { for vec::each(self.v) {|item|
alt item { some(_) { sz += 1u; } _ {} } alt item { some(_) { sz += 1u; } _ {} }
} }
sz sz
@ -90,11 +90,11 @@ impl <V: copy> of map::map<uint, V> for smallintmap<V> {
fn find(&&key: uint) -> option<V> { find(self, key) } fn find(&&key: uint) -> option<V> { find(self, key) }
fn rehash() { fail } fn rehash() { fail }
fn items(it: fn(&&uint, V)) { fn items(it: fn(&&uint, V)) {
let mut idx = 0u; let mut idx = 0u, l = self.v.len();
for item in self.v { while idx < l {
alt item { alt self.v[idx] {
some(elt) { some(elt) {
it(idx, elt); it(idx, copy elt);
} }
none { } none { }
} }
@ -102,16 +102,14 @@ impl <V: copy> of map::map<uint, V> for smallintmap<V> {
} }
} }
fn keys(it: fn(&&uint)) { fn keys(it: fn(&&uint)) {
let mut idx = 0u; let mut idx = 0u, l = self.v.len();
for item in self.v { while idx < l {
if item != none { it(idx); } if self.v[idx] != none { it(idx); }
idx += 1u; idx += 1u;
} }
} }
fn values(it: fn(V)) { fn values(it: fn(V)) {
for item in self.v { self.items({|_i, v| it(v)});
alt item { some(elt) { it(elt); } _ {} }
}
} }
} }

View file

@ -303,7 +303,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: str,
let mut cmh_items: [@ast::meta_item] = []; let mut cmh_items: [@ast::meta_item] = [];
let linkage_metas = attr::find_linkage_metas(c.node.attrs); let linkage_metas = attr::find_linkage_metas(c.node.attrs);
attr::require_unique_names(sess.diagnostic(), linkage_metas); attr::require_unique_names(sess.diagnostic(), linkage_metas);
for meta: @ast::meta_item in linkage_metas { for linkage_metas.each {|meta|
if attr::get_meta_item_name(meta) == "name" { if attr::get_meta_item_name(meta) == "name" {
alt attr::get_meta_item_value_str(meta) { alt attr::get_meta_item_value_str(meta) {
some(v) { name = some(v); } some(v) { name = some(v); }
@ -334,7 +334,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: str,
let cmh_items = attr::sort_meta_items(metas.cmh_items); let cmh_items = attr::sort_meta_items(metas.cmh_items);
sha.reset(); sha.reset();
for m_: @ast::meta_item in cmh_items { for cmh_items.each {|m_|
let m = m_; let m = m_;
alt m.node { alt m.node {
ast::meta_name_value(key, value) { ast::meta_name_value(key, value) {
@ -349,7 +349,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: str,
} }
} }
for dh in dep_hashes { for dep_hashes.each {|dh|
sha.input_str(len_and_str(dh)); sha.input_str(len_and_str(dh));
} }
@ -475,7 +475,7 @@ fn mangle(ss: path) -> str {
let mut n = "_ZN"; // Begin name-sequence. let mut n = "_ZN"; // Begin name-sequence.
for s in ss { for ss.each {|s|
alt s { path_name(s) | path_mod(s) { alt s { path_name(s) | path_mod(s) {
let sani = sanitize(s); let sani = sanitize(s);
n += #fmt["%u%s", str::len(sani), sani]; n += #fmt["%u%s", str::len(sani), sani];
@ -583,7 +583,7 @@ fn link_binary(sess: session,
} else { lib_cmd = "-shared"; } } else { lib_cmd = "-shared"; }
let cstore = sess.cstore; let cstore = sess.cstore;
for cratepath: str in cstore::get_used_crate_files(cstore) { for cstore::get_used_crate_files(cstore).each {|cratepath|
if str::ends_with(cratepath, ".rlib") { if str::ends_with(cratepath, ".rlib") {
cc_args += [cratepath]; cc_args += [cratepath];
cont; cont;
@ -596,10 +596,10 @@ fn link_binary(sess: session,
} }
let ula = cstore::get_used_link_args(cstore); let ula = cstore::get_used_link_args(cstore);
for arg: str in ula { cc_args += [arg]; } for ula.each {|arg| cc_args += [arg]; }
let used_libs = cstore::get_used_libraries(cstore); let used_libs = cstore::get_used_libraries(cstore);
for l: str in used_libs { cc_args += ["-l" + l]; } for used_libs.each {|l| cc_args += ["-l" + l]; }
if sess.building_library { if sess.building_library {
cc_args += [lib_cmd]; cc_args += [lib_cmd];

View file

@ -55,7 +55,7 @@ fn get_rpaths(os: session::os, cwd: path::path, sysroot: path::path,
#debug("sysroot: %s", sysroot); #debug("sysroot: %s", sysroot);
#debug("output: %s", output); #debug("output: %s", output);
#debug("libs:"); #debug("libs:");
for libpath in libs { for libs.each {|libpath|
#debug(" %s", libpath); #debug(" %s", libpath);
} }
#debug("target_triple: %s", target_triple); #debug("target_triple: %s", target_triple);
@ -74,7 +74,7 @@ fn get_rpaths(os: session::os, cwd: path::path, sysroot: path::path,
fn log_rpaths(desc: str, rpaths: [str]) { fn log_rpaths(desc: str, rpaths: [str]) {
#debug("%s rpaths:", desc); #debug("%s rpaths:", desc);
for rpath in rpaths { for rpaths.each {|rpath|
#debug(" %s", rpath); #debug(" %s", rpath);
} }
} }
@ -179,7 +179,7 @@ fn get_install_prefix_rpath(cwd: path::path, target_triple: str) -> str {
fn minimize_rpaths(rpaths: [str]) -> [str] { fn minimize_rpaths(rpaths: [str]) -> [str] {
let set = map::str_hash::<()>(); let set = map::str_hash::<()>();
let mut minimized = []; let mut minimized = [];
for rpath in rpaths { for rpaths.each {|rpath|
if !set.contains_key(rpath) { if !set.contains_key(rpath) {
minimized += [rpath]; minimized += [rpath];
set.insert(rpath, ()); set.insert(rpath, ());

View file

@ -35,7 +35,7 @@ fn declare_upcalls(targ_cfg: @session::config,
tys: [TypeRef], rv: TypeRef) -> tys: [TypeRef], rv: TypeRef) ->
ValueRef { ValueRef {
let mut arg_tys: [TypeRef] = []; let mut arg_tys: [TypeRef] = [];
for t: TypeRef in tys { arg_tys += [t]; } for tys.each {|t| arg_tys += [t]; }
let fn_ty = T_fn(arg_tys, rv); let fn_ty = T_fn(arg_tys, rv);
ret base::decl_cdecl_fn(llmod, prefix + name, fn_ty); ret base::decl_cdecl_fn(llmod, prefix + name, fn_ty);
} }

View file

@ -67,7 +67,7 @@ fn parse_cfgspecs(cfgspecs: [str]) -> ast::crate_cfg {
// FIXME: It would be nice to use the parser to parse all varieties of // FIXME: It would be nice to use the parser to parse all varieties of
// meta_item here. At the moment we just support the meta_word variant. // meta_item here. At the moment we just support the meta_word variant.
let mut words = []; let mut words = [];
for s: str in cfgspecs { words += [attr::mk_word_item(s)]; } for cfgspecs.each {|s| words += [attr::mk_word_item(s)]; }
ret words; ret words;
} }

View file

@ -118,7 +118,7 @@ fn metas_in_cfg(cfg: ast::crate_cfg, metas: [@ast::meta_item]) -> bool {
let has_cfg_metas = vec::len(cfg_metas) > 0u; let has_cfg_metas = vec::len(cfg_metas) > 0u;
if !has_cfg_metas { ret true; } if !has_cfg_metas { ret true; }
for cfg_mi: @ast::meta_item in cfg_metas { for cfg_metas.each {|cfg_mi|
if attr::contains(cfg, cfg_mi) { ret true; } if attr::contains(cfg, cfg_mi) { ret true; }
} }

View file

@ -268,7 +268,7 @@ fn mk_test_desc_vec_ty(cx: test_ctxt) -> @ast::ty {
fn mk_test_desc_vec(cx: test_ctxt) -> @ast::expr { fn mk_test_desc_vec(cx: test_ctxt) -> @ast::expr {
#debug("building test vector from %u tests", vec::len(cx.testfns)); #debug("building test vector from %u tests", vec::len(cx.testfns));
let mut descs = []; let mut descs = [];
for test: test in cx.testfns { for cx.testfns.each {|test|
let test_ = test; // Satisfy alias analysis let test_ = test; // Satisfy alias analysis
descs += [mk_test_desc_rec(cx, test_)]; descs += [mk_test_desc_rec(cx, test_)];
} }

View file

@ -967,7 +967,7 @@ fn type_to_str_inner(names: type_names, outer0: [TypeRef], ty: TypeRef) ->
fn tys_str(names: type_names, outer: [TypeRef], tys: [TypeRef]) -> str { fn tys_str(names: type_names, outer: [TypeRef], tys: [TypeRef]) -> str {
let mut s: str = ""; let mut s: str = "";
let mut first: bool = true; let mut first: bool = true;
for t: TypeRef in tys { for tys.each {|t|
if first { first = false; } else { s += ", "; } if first { first = false; } else { s += ", "; }
s += type_to_str_inner(names, outer, t); s += type_to_str_inner(names, outer, t);
} }
@ -1019,7 +1019,7 @@ fn type_to_str_inner(names: type_names, outer0: [TypeRef], ty: TypeRef) ->
} }
12 { 12 {
let mut i: uint = 0u; let mut i: uint = 0u;
for tout: TypeRef in outer0 { for outer0.each {|tout|
i += 1u; i += 1u;
if tout as int == ty as int { if tout as int == ty as int {
let n: uint = vec::len::<TypeRef>(outer0) - i; let n: uint = vec::len::<TypeRef>(outer0) - i;

View file

@ -142,7 +142,7 @@ fn visit_ids(item: ast::inlined_item, vfn: fn@(ast::node_id)) {
vfn(i.id); vfn(i.id);
alt i.node { alt i.node {
ast::item_res(_, _, _, d_id, c_id) { vfn(d_id); vfn(c_id); } ast::item_res(_, _, _, d_id, c_id) { vfn(d_id); vfn(c_id); }
ast::item_enum(vs, _) { for v in vs { vfn(v.node.id); } } ast::item_enum(vs, _) { for vs.each {|v| vfn(v.node.id); } }
_ {} _ {}
} }
}, },

View file

@ -111,7 +111,7 @@ fn hash_node_id(&&node_id: int) -> uint { ret 177573u ^ (node_id as uint); }
fn hash_path(&&s: str) -> uint { fn hash_path(&&s: str) -> uint {
let mut h = 5381u; let mut h = 5381u;
for ch: u8 in str::bytes(s) { h = (h << 5u) + h ^ (ch as uint); } for str::each(s) {|ch| h = (h << 5u) + h ^ (ch as uint); }
ret h; ret h;
} }

View file

@ -117,7 +117,7 @@ fn visit_item(e: env, i: @ast::item) {
e.sess.span_fatal(i.span, "library '" + native_name + e.sess.span_fatal(i.span, "library '" + native_name +
"' already added: can't specify link_args."); "' already added: can't specify link_args.");
} }
for a: ast::attribute in link_args { for link_args.each {|a|
alt attr::get_meta_item_value_str(attr::attr_meta(a)) { alt attr::get_meta_item_value_str(attr::attr_meta(a)) {
some(linkarg) { some(linkarg) {
cstore::add_used_link_args(cstore, linkarg); cstore::add_used_link_args(cstore, linkarg);
@ -153,11 +153,11 @@ fn metadata_matches(extern_metas: [@ast::meta_item],
vec::len(local_metas), vec::len(extern_metas)); vec::len(local_metas), vec::len(extern_metas));
#debug("crate metadata:"); #debug("crate metadata:");
for have: @ast::meta_item in extern_metas { for extern_metas.each {|have|
#debug(" %s", pprust::meta_item_to_str(*have)); #debug(" %s", pprust::meta_item_to_str(*have));
} }
for needed: @ast::meta_item in local_metas { for local_metas.each {|needed|
#debug("looking for %s", pprust::meta_item_to_str(*needed)); #debug("looking for %s", pprust::meta_item_to_str(*needed));
if !attr::contains(extern_metas, needed) { if !attr::contains(extern_metas, needed) {
#debug("missing %s", pprust::meta_item_to_str(*needed)); #debug("missing %s", pprust::meta_item_to_str(*needed));
@ -375,7 +375,7 @@ fn resolve_crate_deps(e: env, cdata: @[u8]) -> cstore::cnum_map {
// The map from crate numbers in the crate we're resolving to local crate // The map from crate numbers in the crate we're resolving to local crate
// numbers // numbers
let cnum_map = int_hash::<ast::crate_num>(); let cnum_map = int_hash::<ast::crate_num>();
for dep: decoder::crate_dep in decoder::get_crate_deps(cdata) { for decoder::get_crate_deps(cdata).each {|dep|
let extrn_cnum = dep.cnum; let extrn_cnum = dep.cnum;
let cname = dep.ident; let cname = dep.ident;
// FIXME: We really need to know the linkage metas of our transitive // FIXME: We really need to know the linkage metas of our transitive

View file

@ -41,7 +41,8 @@ fn lookup_defs(cstore: cstore::cstore, cnum: ast::crate_num,
path: [ast::ident]) -> [ast::def] { path: [ast::ident]) -> [ast::def] {
let mut result = []; let mut result = [];
#debug("lookup_defs: path = %? cnum = %?", path, cnum); #debug("lookup_defs: path = %? cnum = %?", path, cnum);
for (c, data, def) in resolve_path(cstore, cnum, path) { for resolve_path(cstore, cnum, path).each {|elt|
let (c, data, def) = elt;
result += [decoder::lookup_def(c, data, def)]; result += [decoder::lookup_def(c, data, def)];
} }
ret result; ret result;
@ -64,7 +65,7 @@ fn resolve_path(cstore: cstore::cstore, cnum: ast::crate_num,
#debug("resolve_path %s in crates[%d]:%s", #debug("resolve_path %s in crates[%d]:%s",
str::connect(path, "::"), cnum, cm.name); str::connect(path, "::"), cnum, cm.name);
let mut result = []; let mut result = [];
for def in decoder::resolve_path(path, cm.data) { for decoder::resolve_path(path, cm.data).each {|def|
if def.crate == ast::local_crate { if def.crate == ast::local_crate {
result += [(cnum, cm.data, def)]; result += [(cnum, cm.data, def)];
} else { } else {

View file

@ -154,7 +154,7 @@ fn get_dep_hashes(cstore: cstore) -> [str] {
} }
let sorted = std::sort::merge_sort(lteq, result); let sorted = std::sort::merge_sort(lteq, result);
#debug("sorted:"); #debug("sorted:");
for x in sorted { for sorted.each {|x|
#debug(" hash[%s]: %s", x.name, x.hash); #debug(" hash[%s]: %s", x.name, x.hash);
} }
fn mapper(ch: crate_hash) -> str { ret ch.hash; } fn mapper(ch: crate_hash) -> str { ret ch.hash; }

View file

@ -204,7 +204,7 @@ fn resolve_path(path: [ast::ident], data: @[u8]) -> [ast::def_id] {
let eqer = bind eq_item(_, s); let eqer = bind eq_item(_, s);
let mut result: [ast::def_id] = []; let mut result: [ast::def_id] = [];
#debug("resolve_path: looking up %s", s); #debug("resolve_path: looking up %s", s);
for doc: ebml::doc in lookup_hash(paths, eqer, hash_path(s)) { for lookup_hash(paths, eqer, hash_path(s)).each {|doc|
let did_doc = ebml::get_doc(doc, tag_def_id); let did_doc = ebml::get_doc(doc, tag_def_id);
result += [parse_def_id(ebml::doc_data(did_doc))]; result += [parse_def_id(ebml::doc_data(did_doc))];
} }
@ -359,7 +359,7 @@ fn get_enum_variants(cdata: cmd, id: ast::node_id, tcx: ty::ctxt)
let mut infos: [ty::variant_info] = []; let mut infos: [ty::variant_info] = [];
let variant_ids = enum_variant_ids(item, cdata); let variant_ids = enum_variant_ids(item, cdata);
let mut disr_val = 0; let mut disr_val = 0;
for did: ast::def_id in variant_ids { for variant_ids.each {|did|
let item = find_item(did.node, items); let item = find_item(did.node, items);
let ctor_ty = item_type({crate: cdata.cnum, node: id}, item, let ctor_ty = item_type({crate: cdata.cnum, node: id}, item,
tcx, cdata); tcx, cdata);
@ -367,7 +367,7 @@ fn get_enum_variants(cdata: cmd, id: ast::node_id, tcx: ty::ctxt)
let mut arg_tys: [ty::t] = []; let mut arg_tys: [ty::t] = [];
alt ty::get(ctor_ty).struct { alt ty::get(ctor_ty).struct {
ty::ty_fn(f) { ty::ty_fn(f) {
for a: ty::arg in f.inputs { arg_tys += [a.ty]; } for f.inputs.each {|a| arg_tys += [a.ty]; }
} }
_ { /* Nullary enum variant. */ } _ { /* Nullary enum variant. */ }
} }
@ -560,7 +560,7 @@ fn get_attributes(md: ebml::doc) -> [ast::attribute] {
} }
fn list_meta_items(meta_items: ebml::doc, out: io::writer) { fn list_meta_items(meta_items: ebml::doc, out: io::writer) {
for mi: @ast::meta_item in get_meta_items(meta_items) { for get_meta_items(meta_items).each {|mi|
out.write_str(#fmt["%s\n", pprust::meta_item_to_str(*mi)]); out.write_str(#fmt["%s\n", pprust::meta_item_to_str(*mi)]);
} }
} }
@ -568,7 +568,7 @@ fn list_meta_items(meta_items: ebml::doc, out: io::writer) {
fn list_crate_attributes(md: ebml::doc, hash: str, out: io::writer) { fn list_crate_attributes(md: ebml::doc, hash: str, out: io::writer) {
out.write_str(#fmt("=Crate Attributes (%s)=\n", hash)); out.write_str(#fmt("=Crate Attributes (%s)=\n", hash));
for attr: ast::attribute in get_attributes(md) { for get_attributes(md).each {|attr|
out.write_str(#fmt["%s\n", pprust::attribute_to_str(attr)]); out.write_str(#fmt["%s\n", pprust::attribute_to_str(attr)]);
} }
@ -597,7 +597,7 @@ fn get_crate_deps(data: @[u8]) -> [crate_dep] {
fn list_crate_deps(data: @[u8], out: io::writer) { fn list_crate_deps(data: @[u8], out: io::writer) {
out.write_str("=External Dependencies=\n"); out.write_str("=External Dependencies=\n");
for dep: crate_dep in get_crate_deps(data) { for get_crate_deps(data).each {|dep|
out.write_str(#fmt["%d %s\n", dep.cnum, dep.ident]); out.write_str(#fmt["%d %s\n", dep.cnum, dep.ident]);
} }

View file

@ -58,7 +58,7 @@ type entry<T> = {val: T, pos: uint};
fn encode_enum_variant_paths(ebml_w: ebml::writer, variants: [variant], fn encode_enum_variant_paths(ebml_w: ebml::writer, variants: [variant],
path: [str], &index: [entry<str>]) { path: [str], &index: [entry<str>]) {
for variant: variant in variants { for variants.each {|variant|
add_to_index(ebml_w, path, index, variant.node.name); add_to_index(ebml_w, path, index, variant.node.name);
ebml_w.wr_tag(tag_paths_data_item) {|| ebml_w.wr_tag(tag_paths_data_item) {||
encode_name(ebml_w, variant.node.name); encode_name(ebml_w, variant.node.name);
@ -76,7 +76,7 @@ fn add_to_index(ebml_w: ebml::writer, path: [str], &index: [entry<str>],
fn encode_native_module_item_paths(ebml_w: ebml::writer, nmod: native_mod, fn encode_native_module_item_paths(ebml_w: ebml::writer, nmod: native_mod,
path: [str], &index: [entry<str>]) { path: [str], &index: [entry<str>]) {
for nitem: @native_item in nmod.items { for nmod.items.each {|nitem|
add_to_index(ebml_w, path, index, nitem.ident); add_to_index(ebml_w, path, index, nitem.ident);
encode_named_def_id(ebml_w, nitem.ident, local_def(nitem.id)); encode_named_def_id(ebml_w, nitem.ident, local_def(nitem.id));
} }
@ -84,7 +84,7 @@ fn encode_native_module_item_paths(ebml_w: ebml::writer, nmod: native_mod,
fn encode_class_item_paths(ebml_w: ebml::writer, fn encode_class_item_paths(ebml_w: ebml::writer,
items: [@class_member], path: [str], &index: [entry<str>]) { items: [@class_member], path: [str], &index: [entry<str>]) {
for it in items { for items.each {|it|
alt ast_util::class_member_privacy(it) { alt ast_util::class_member_privacy(it) {
priv { cont; } priv { cont; }
pub { pub {
@ -102,7 +102,7 @@ fn encode_class_item_paths(ebml_w: ebml::writer,
fn encode_module_item_paths(ebml_w: ebml::writer, ecx: @encode_ctxt, fn encode_module_item_paths(ebml_w: ebml::writer, ecx: @encode_ctxt,
module: _mod, path: [str], &index: [entry<str>]) { module: _mod, path: [str], &index: [entry<str>]) {
// FIXME factor out add_to_index/start/encode_name/encode_def_id/end ops // FIXME factor out add_to_index/start/encode_name/encode_def_id/end ops
for it: @item in module.items { for module.items.each {|it|
if !ecx.ccx.reachable.contains_key(it.id) || if !ecx.ccx.reachable.contains_key(it.id) ||
!ast_util::is_exported(it.ident, module) { cont; } !ast_util::is_exported(it.ident, module) { cont; }
alt it.node { alt it.node {
@ -200,7 +200,7 @@ fn encode_reexport_paths(ebml_w: ebml::writer,
ecx: @encode_ctxt, &index: [entry<str>]) { ecx: @encode_ctxt, &index: [entry<str>]) {
let tcx = ecx.ccx.tcx; let tcx = ecx.ccx.tcx;
ecx.ccx.exp_map.items {|exp_id, defs| ecx.ccx.exp_map.items {|exp_id, defs|
for def in defs { for defs.each {|def|
if !def.reexp { cont; } if !def.reexp { cont; }
let path = alt check tcx.items.get(exp_id) { let path = alt check tcx.items.get(exp_id) {
ast_map::node_export(_, path) { ast_map::path_to_str(*path) } ast_map::node_export(_, path) { ast_map::path_to_str(*path) }
@ -230,7 +230,7 @@ fn encode_type_param_bounds(ebml_w: ebml::writer, ecx: @encode_ctxt,
tcx: ecx.ccx.tcx, tcx: ecx.ccx.tcx,
reachable: ecx.ccx.reachable, reachable: ecx.ccx.reachable,
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)}; abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)};
for param in params { for params.each {|param|
ebml_w.start_tag(tag_items_data_item_ty_param_bounds); ebml_w.start_tag(tag_items_data_item_ty_param_bounds);
let bs = ecx.ccx.tcx.ty_param_bounds.get(param.id); let bs = ecx.ccx.tcx.ty_param_bounds.get(param.id);
tyencode::enc_bounds(ebml_w.writer, ty_str_ctxt, bs); tyencode::enc_bounds(ebml_w.writer, ty_str_ctxt, bs);
@ -295,7 +295,7 @@ fn encode_enum_variant_info(ecx: @encode_ctxt, ebml_w: ebml::writer,
let mut disr_val = 0; let mut disr_val = 0;
let mut i = 0; let mut i = 0;
let vi = ty::enum_variants(ecx.ccx.tcx, {crate: local_crate, node: id}); let vi = ty::enum_variants(ecx.ccx.tcx, {crate: local_crate, node: id});
for variant: variant in variants { for variants.each {|variant|
*index += [{val: variant.node.id, pos: ebml_w.writer.tell()}]; *index += [{val: variant.node.id, pos: ebml_w.writer.tell()}];
ebml_w.start_tag(tag_items_data_item); ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(variant.node.id)); encode_def_id(ebml_w, local_def(variant.node.id));
@ -347,7 +347,7 @@ fn encode_info_for_mod(ecx: @encode_ctxt, ebml_w: ebml::writer, md: _mod,
encode_name(ebml_w, name); encode_name(ebml_w, name);
alt ecx.ccx.maps.impl_map.get(id) { alt ecx.ccx.maps.impl_map.get(id) {
list::cons(impls, @list::nil) { list::cons(impls, @list::nil) {
for i in *impls { for vec::each(*impls) {|i|
if ast_util::is_exported(i.ident, md) { if ast_util::is_exported(i.ident, md) {
ebml_w.wr_tagged_str(tag_mod_impl, def_to_str(i.did)); ebml_w.wr_tagged_str(tag_mod_impl, def_to_str(i.did));
} }
@ -373,7 +373,7 @@ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::writer,
-> [entry<int>] { -> [entry<int>] {
let index = @mut []; let index = @mut [];
let tcx = ecx.ccx.tcx; let tcx = ecx.ccx.tcx;
for ci in items { for items.each {|ci|
/* We encode both private and public fields -- need to include /* We encode both private and public fields -- need to include
private fields to get the offsets right */ private fields to get the offsets right */
alt ci.node { alt ci.node {
@ -409,7 +409,7 @@ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::writer,
} }
} }
} }
} };
*index *index
} }
@ -548,7 +548,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
encode_type_param_bounds(ebml_w, ecx, tps); encode_type_param_bounds(ebml_w, ecx, tps);
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id)); encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
encode_name(ebml_w, item.ident); encode_name(ebml_w, item.ident);
for v: variant in variants { for variants.each {|v|
encode_variant_id(ebml_w, local_def(v.node.id)); encode_variant_id(ebml_w, local_def(v.node.id));
} }
astencode::encode_inlined_item(ecx, ebml_w, path, ii_item(item)); astencode::encode_inlined_item(ecx, ebml_w, path, ii_item(item));
@ -579,14 +579,14 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
for methods, write all the stuff get_iface_method for methods, write all the stuff get_iface_method
needs to know*/ needs to know*/
let (fs,ms) = ast_util::split_class_items(items); let (fs,ms) = ast_util::split_class_items(items);
for f in fs { for fs.each {|f|
ebml_w.start_tag(tag_item_field); ebml_w.start_tag(tag_item_field);
encode_privacy(ebml_w, f.privacy); encode_privacy(ebml_w, f.privacy);
encode_name(ebml_w, f.ident); encode_name(ebml_w, f.ident);
encode_def_id(ebml_w, local_def(f.id)); encode_def_id(ebml_w, local_def(f.id));
ebml_w.end_tag(); ebml_w.end_tag();
} }
for m in ms { for ms.each {|m|
alt m.privacy { alt m.privacy {
priv { /* do nothing */ } priv { /* do nothing */ }
pub { pub {
@ -637,7 +637,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
encode_type_param_bounds(ebml_w, ecx, tps); encode_type_param_bounds(ebml_w, ecx, tps);
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id)); encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
encode_name(ebml_w, item.ident); encode_name(ebml_w, item.ident);
for m in methods { for methods.each {|m|
ebml_w.start_tag(tag_item_method); ebml_w.start_tag(tag_item_method);
ebml_w.writer.write(str::bytes(def_to_str(local_def(m.id)))); ebml_w.writer.write(str::bytes(def_to_str(local_def(m.id))));
ebml_w.end_tag(); ebml_w.end_tag();
@ -657,7 +657,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
ebml_w.end_tag(); ebml_w.end_tag();
let impl_path = path + [ast_map::path_name(item.ident)]; let impl_path = path + [ast_map::path_name(item.ident)];
for m in methods { for methods.each {|m|
*index += [{val: m.id, pos: ebml_w.writer.tell()}]; *index += [{val: m.id, pos: ebml_w.writer.tell()}];
encode_info_for_method(ecx, ebml_w, impl_path, encode_info_for_method(ecx, ebml_w, impl_path,
should_inline(m.attrs), item.id, m, tps + m.tps); should_inline(m.attrs), item.id, m, tps + m.tps);
@ -672,7 +672,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id)); encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
encode_name(ebml_w, item.ident); encode_name(ebml_w, item.ident);
let mut i = 0u; let mut i = 0u;
for mty in *ty::iface_methods(tcx, local_def(item.id)) { for vec::each(*ty::iface_methods(tcx, local_def(item.id))) {|mty|
ebml_w.start_tag(tag_item_method); ebml_w.start_tag(tag_item_method);
encode_name(ebml_w, mty.ident); encode_name(ebml_w, mty.ident);
encode_type_param_bounds(ebml_w, ecx, ms[i].tps); encode_type_param_bounds(ebml_w, ecx, ms[i].tps);
@ -765,13 +765,13 @@ fn create_index<T: copy>(index: [entry<T>], hash_fn: fn@(T) -> uint) ->
[@[entry<T>]] { [@[entry<T>]] {
let mut buckets: [@mut [entry<T>]] = []; let mut buckets: [@mut [entry<T>]] = [];
uint::range(0u, 256u) {|_i| buckets += [@mut []]; }; uint::range(0u, 256u) {|_i| buckets += [@mut []]; };
for elt: entry<T> in index { for index.each {|elt|
let h = hash_fn(elt.val); let h = hash_fn(elt.val);
*buckets[h % 256u] += [elt]; *buckets[h % 256u] += [elt];
} }
let mut buckets_frozen = []; let mut buckets_frozen = [];
for bucket: @mut [entry<T>] in buckets { for buckets.each {|bucket|
buckets_frozen += [@*bucket]; buckets_frozen += [@*bucket];
} }
ret buckets_frozen; ret buckets_frozen;
@ -783,10 +783,10 @@ fn encode_index<T>(ebml_w: ebml::writer, buckets: [@[entry<T>]],
ebml_w.start_tag(tag_index); ebml_w.start_tag(tag_index);
let mut bucket_locs: [uint] = []; let mut bucket_locs: [uint] = [];
ebml_w.start_tag(tag_index_buckets); ebml_w.start_tag(tag_index_buckets);
for bucket: @[entry<T>] in buckets { for buckets.each {|bucket|
bucket_locs += [ebml_w.writer.tell()]; bucket_locs += [ebml_w.writer.tell()];
ebml_w.start_tag(tag_index_buckets_bucket); ebml_w.start_tag(tag_index_buckets_bucket);
for elt: entry<T> in *bucket { for vec::each(*bucket) {|elt|
ebml_w.start_tag(tag_index_buckets_bucket_elt); ebml_w.start_tag(tag_index_buckets_bucket_elt);
writer.write_be_uint(elt.pos, 4u); writer.write_be_uint(elt.pos, 4u);
write_fn(writer, elt.val); write_fn(writer, elt.val);
@ -796,7 +796,7 @@ fn encode_index<T>(ebml_w: ebml::writer, buckets: [@[entry<T>]],
} }
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.start_tag(tag_index_table); ebml_w.start_tag(tag_index_table);
for pos: uint in bucket_locs { writer.write_be_uint(pos, 4u); } for bucket_locs.each {|pos| writer.write_be_uint(pos, 4u); }
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.end_tag(); ebml_w.end_tag();
} }
@ -836,7 +836,7 @@ fn encode_meta_item(ebml_w: ebml::writer, mi: meta_item) {
ebml_w.start_tag(tag_meta_item_name); ebml_w.start_tag(tag_meta_item_name);
ebml_w.writer.write(str::bytes(name)); ebml_w.writer.write(str::bytes(name));
ebml_w.end_tag(); ebml_w.end_tag();
for inner_item: @meta_item in items { for items.each {|inner_item|
encode_meta_item(ebml_w, *inner_item); encode_meta_item(ebml_w, *inner_item);
} }
ebml_w.end_tag(); ebml_w.end_tag();
@ -846,7 +846,7 @@ fn encode_meta_item(ebml_w: ebml::writer, mi: meta_item) {
fn encode_attributes(ebml_w: ebml::writer, attrs: [attribute]) { fn encode_attributes(ebml_w: ebml::writer, attrs: [attribute]) {
ebml_w.start_tag(tag_attributes); ebml_w.start_tag(tag_attributes);
for attr: attribute in attrs { for attrs.each {|attr|
ebml_w.start_tag(tag_attribute); ebml_w.start_tag(tag_attribute);
encode_meta_item(ebml_w, attr.node.value); encode_meta_item(ebml_w, attr.node.value);
ebml_w.end_tag(); ebml_w.end_tag();
@ -885,7 +885,7 @@ fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: @crate) -> [attribute] {
let mut attrs: [attribute] = []; let mut attrs: [attribute] = [];
let mut found_link_attr = false; let mut found_link_attr = false;
for attr: attribute in crate.node.attrs { for crate.node.attrs.each {|attr|
attrs += attrs +=
if attr::get_attr_name(attr) != "link" { if attr::get_attr_name(attr) != "link" {
[attr] [attr]
@ -923,7 +923,7 @@ fn encode_crate_deps(ebml_w: ebml::writer, cstore: cstore::cstore) {
// Sanity-check the crate numbers // Sanity-check the crate numbers
let mut expected_cnum = 1; let mut expected_cnum = 1;
for n: numname in pairs { for pairs.each {|n|
assert (n.crate == expected_cnum); assert (n.crate == expected_cnum);
expected_cnum += 1; expected_cnum += 1;
} }
@ -940,7 +940,7 @@ fn encode_crate_deps(ebml_w: ebml::writer, cstore: cstore::cstore) {
// FIXME: This is not nearly enough to support correct versioning // FIXME: This is not nearly enough to support correct versioning
// but is enough to get transitive crate dependencies working. // but is enough to get transitive crate dependencies working.
ebml_w.start_tag(tag_crate_deps); ebml_w.start_tag(tag_crate_deps);
for cname: str in get_ordered_names(cstore) { for get_ordered_names(cstore).each {|cname|
ebml_w.start_tag(tag_crate_dep); ebml_w.start_tag(tag_crate_dep);
ebml_w.writer.write(str::bytes(cname)); ebml_w.writer.write(str::bytes(cname));
ebml_w.end_tag(); ebml_w.end_tag();

View file

@ -392,25 +392,20 @@ fn parse_def_id(buf: [u8]) -> ast::def_id {
#error("didn't find ':' when parsing def id"); #error("didn't find ':' when parsing def id");
fail; fail;
} }
let crate_part = vec::slice::<u8>(buf, 0u, colon_idx); let crate_part = vec::slice(buf, 0u, colon_idx);
let def_part = vec::slice::<u8>(buf, colon_idx + 1u, len); let def_part = vec::slice(buf, colon_idx + 1u, len);
let mut crate_part_vec = []; let crate_num = alt uint::parse_buf(crate_part, 10u) {
let mut def_part_vec = [];
for b: u8 in crate_part { crate_part_vec += [b]; }
for b: u8 in def_part { def_part_vec += [b]; }
let crate_num = alt uint::parse_buf(crate_part_vec, 10u) {
some(cn) { cn as int } some(cn) { cn as int }
none { fail (#fmt("internal error: parse_def_id: error parsing %? \ none { fail (#fmt("internal error: parse_def_id: error parsing %? \
as crate", as crate",
crate_part_vec)); } crate_part)); }
}; };
let def_num = alt uint::parse_buf(def_part_vec, 10u) { let def_num = alt uint::parse_buf(def_part, 10u) {
some(dn) { dn as int } some(dn) { dn as int }
none { fail (#fmt("internal error: parse_def_id: error parsing %? \ none { fail (#fmt("internal error: parse_def_id: error parsing %? \
as id", as id",
def_part_vec)); } def_part)); }
}; };
ret {crate: crate_num, node: def_num}; ret {crate: crate_num, node: def_num};
} }

View file

@ -181,19 +181,19 @@ fn enc_sty(w: io::writer, cx: @ctxt, st: ty::sty) {
w.write_str("t["); w.write_str("t[");
w.write_str(cx.ds(def)); w.write_str(cx.ds(def));
w.write_char('|'); w.write_char('|');
for t: ty::t in tys { enc_ty(w, cx, t); } for tys.each {|t| enc_ty(w, cx, t); }
w.write_char(']'); w.write_char(']');
} }
ty::ty_iface(def, tys) { ty::ty_iface(def, tys) {
w.write_str("x["); w.write_str("x[");
w.write_str(cx.ds(def)); w.write_str(cx.ds(def));
w.write_char('|'); w.write_char('|');
for t: ty::t in tys { enc_ty(w, cx, t); } for tys.each {|t| enc_ty(w, cx, t); }
w.write_char(']'); w.write_char(']');
} }
ty::ty_tup(ts) { ty::ty_tup(ts) {
w.write_str("T["); w.write_str("T[");
for t in ts { enc_ty(w, cx, t); } for ts.each {|t| enc_ty(w, cx, t); }
w.write_char(']'); w.write_char(']');
} }
ty::ty_box(mt) { w.write_char('@'); enc_mt(w, cx, mt); } ty::ty_box(mt) { w.write_char('@'); enc_mt(w, cx, mt); }
@ -207,7 +207,7 @@ fn enc_sty(w: io::writer, cx: @ctxt, st: ty::sty) {
ty::ty_vec(mt) { w.write_char('I'); enc_mt(w, cx, mt); } ty::ty_vec(mt) { w.write_char('I'); enc_mt(w, cx, mt); }
ty::ty_rec(fields) { ty::ty_rec(fields) {
w.write_str("R["); w.write_str("R[");
for field: ty::field in fields { for fields.each {|field|
w.write_str(field.ident); w.write_str(field.ident);
w.write_char('='); w.write_char('=');
enc_mt(w, cx, field.mt); enc_mt(w, cx, field.mt);
@ -223,7 +223,7 @@ fn enc_sty(w: io::writer, cx: @ctxt, st: ty::sty) {
w.write_str(cx.ds(def)); w.write_str(cx.ds(def));
w.write_char('|'); w.write_char('|');
enc_ty(w, cx, ty); enc_ty(w, cx, ty);
for t: ty::t in tps { enc_ty(w, cx, t); } for tps.each {|t| enc_ty(w, cx, t); }
w.write_char(']'); w.write_char(']');
} }
ty::ty_var(id) { ty::ty_var(id) {
@ -238,7 +238,7 @@ fn enc_sty(w: io::writer, cx: @ctxt, st: ty::sty) {
} }
ty::ty_self(tps) { ty::ty_self(tps) {
w.write_str("s["); w.write_str("s[");
for t in tps { enc_ty(w, cx, t); } for tps.each {|t| enc_ty(w, cx, t); }
w.write_char(']'); w.write_char(']');
} }
ty::ty_type { w.write_char('Y'); } ty::ty_type { w.write_char('Y'); }
@ -248,7 +248,7 @@ fn enc_sty(w: io::writer, cx: @ctxt, st: ty::sty) {
ty::ty_constr(ty, cs) { ty::ty_constr(ty, cs) {
w.write_str("A["); w.write_str("A[");
enc_ty(w, cx, ty); enc_ty(w, cx, ty);
for tc: @ty::type_constr in cs { enc_ty_constr(w, cx, tc); } for cs.each {|tc| enc_ty_constr(w, cx, tc); }
w.write_char(']'); w.write_char(']');
} }
ty::ty_opaque_box { w.write_char('B'); } ty::ty_opaque_box { w.write_char('B'); }
@ -260,7 +260,7 @@ fn enc_sty(w: io::writer, cx: @ctxt, st: ty::sty) {
w.write_str(s); w.write_str(s);
#debug("~~~~ %s", "|"); #debug("~~~~ %s", "|");
w.write_str("|"); w.write_str("|");
for t: ty::t in tys { enc_ty(w, cx, t); } for tys.each {|t| enc_ty(w, cx, t); }
#debug("~~~~ %s", "]"); #debug("~~~~ %s", "]");
w.write_char(']'); w.write_char(']');
} }
@ -288,13 +288,13 @@ fn enc_mode(w: io::writer, cx: @ctxt, m: mode) {
fn enc_ty_fn(w: io::writer, cx: @ctxt, ft: ty::fn_ty) { fn enc_ty_fn(w: io::writer, cx: @ctxt, ft: ty::fn_ty) {
w.write_char('['); w.write_char('[');
for arg: ty::arg in ft.inputs { for ft.inputs.each {|arg|
enc_mode(w, cx, arg.mode); enc_mode(w, cx, arg.mode);
enc_ty(w, cx, arg.ty); enc_ty(w, cx, arg.ty);
} }
w.write_char(']'); w.write_char(']');
let mut colon = true; let mut colon = true;
for c: @ty::constr in ft.constraints { for ft.constraints.each {|c|
if colon { if colon {
w.write_char(':'); w.write_char(':');
colon = false; colon = false;
@ -314,7 +314,7 @@ fn enc_constr(w: io::writer, cx: @ctxt, c: @ty::constr) {
w.write_str(cx.ds(c.node.id)); w.write_str(cx.ds(c.node.id));
w.write_char('|'); w.write_char('|');
let mut semi = false; let mut semi = false;
for a: @constr_arg in c.node.args { for c.node.args.each {|a|
if semi { w.write_char(';'); } else { semi = true; } if semi { w.write_char(';'); } else { semi = true; }
alt a.node { alt a.node {
carg_base { w.write_char('*'); } carg_base { w.write_char('*'); }
@ -331,7 +331,7 @@ fn enc_ty_constr(w: io::writer, cx: @ctxt, c: @ty::type_constr) {
w.write_str(cx.ds(c.node.id)); w.write_str(cx.ds(c.node.id));
w.write_char('|'); w.write_char('|');
let mut semi = false; let mut semi = false;
for a: @ty::ty_constr_arg in c.node.args { for c.node.args.each {|a|
if semi { w.write_char(';'); } else { semi = true; } if semi { w.write_char(';'); } else { semi = true; }
alt a.node { alt a.node {
carg_base { w.write_char('*'); } carg_base { w.write_char('*'); }
@ -343,7 +343,7 @@ fn enc_ty_constr(w: io::writer, cx: @ctxt, c: @ty::type_constr) {
} }
fn enc_bounds(w: io::writer, cx: @ctxt, bs: @[ty::param_bound]) { fn enc_bounds(w: io::writer, cx: @ctxt, bs: @[ty::param_bound]) {
for bound in *bs { for vec::each(*bs) {|bound|
alt bound { alt bound {
ty::bound_send { w.write_char('S'); } ty::bound_send { w.write_char('S'); }
ty::bound_copy { w.write_char('C'); } ty::bound_copy { w.write_char('C'); }

View file

@ -136,13 +136,13 @@ fn visit_expr(cx: @ctx, ex: @ast::expr, sc: scope, v: vt<scope>) {
fn visit_block(cx: @ctx, b: ast::blk, sc: scope, v: vt<scope>) { fn visit_block(cx: @ctx, b: ast::blk, sc: scope, v: vt<scope>) {
let sc = sc; let sc = sc;
for stmt in b.node.stmts { for b.node.stmts.each {|stmt|
alt stmt.node { alt stmt.node {
ast::stmt_decl(@{node: ast::decl_item(it), _}, _) { ast::stmt_decl(@{node: ast::decl_item(it), _}, _) {
v.visit_item(it, sc, v); v.visit_item(it, sc, v);
} }
ast::stmt_decl(@{node: ast::decl_local(locs), _}, _) { ast::stmt_decl(@{node: ast::decl_local(locs), _}, _) {
for loc in locs { for locs.each {|loc|
alt loc.node.init { alt loc.node.init {
some(init) { some(init) {
if init.op == ast::init_move { if init.op == ast::init_move {
@ -245,11 +245,11 @@ fn check_call(cx: @ctx, sc: scope, f: @ast::expr, args: [@ast::expr],
}; };
if f_may_close { if f_may_close {
let mut i = 0u; let mut i = 0u;
for b in bindings { for bindings.each {|b|
let mut unsfe = vec::len(b.unsafe_tys) > 0u; let mut unsfe = vec::len(b.unsafe_tys) > 0u;
alt b.root_var { alt b.root_var {
some(rid) { some(rid) {
for o in sc.bs { for sc.bs.each {|o|
if o.node_id == rid && vec::len(o.unsafe_tys) > 0u { if o.node_id == rid && vec::len(o.unsafe_tys) > 0u {
unsfe = true; break; unsfe = true; break;
} }
@ -265,8 +265,8 @@ fn check_call(cx: @ctx, sc: scope, f: @ast::expr, args: [@ast::expr],
} }
} }
let mut j = 0u; let mut j = 0u;
for b in bindings { for bindings.each {|b|
for unsafe_ty in b.unsafe_tys { for b.unsafe_tys.each {|unsafe_ty|
vec::iteri(arg_ts) {|i, arg_t| vec::iteri(arg_ts) {|i, arg_t|
let mut_alias = let mut_alias =
(ast::by_mutbl_ref == ty::arg_mode(cx.tcx, arg_t)); (ast::by_mutbl_ref == ty::arg_mode(cx.tcx, arg_t));
@ -288,13 +288,13 @@ fn check_call(cx: @ctx, sc: scope, f: @ast::expr, args: [@ast::expr],
} }
// Ensure we're not passing a root by mut alias. // Ensure we're not passing a root by mut alias.
for {node: node, arg: arg} in mut_roots { for mut_roots.each {|mroot|
for b in bindings { for bindings.each {|b|
if b.node_id != arg.id { if b.node_id != mroot.arg.id {
alt b.root_var { alt b.root_var {
some(root) { some(root) {
if node == root && cant_copy(*cx, b) { if mroot.node == root && cant_copy(*cx, b) {
err(*cx, arg.span, err(*cx, mroot.arg.span,
"passing a mut reference to a \ "passing a mut reference to a \
variable that roots another reference"); variable that roots another reference");
break; break;
@ -308,14 +308,14 @@ fn check_call(cx: @ctx, sc: scope, f: @ast::expr, args: [@ast::expr],
// Check the bodies of block arguments against the current scope // Check the bodies of block arguments against the current scope
if blocks.len() > 0u { if blocks.len() > 0u {
let inner_sc = {bs: bindings + sc.bs, invalid: sc.invalid}; let inner_sc = {bs: bindings + sc.bs, invalid: sc.invalid};
for blk in blocks { for blocks.each {|blk|
alt check blk.node { alt check blk.node {
ast::expr_fn_block(_, body) { ast::expr_fn_block(_, body) {
v.visit_block(body, inner_sc, v); v.visit_block(body, inner_sc, v);
} }
} }
} }
for binding in bindings { for bindings.each {|binding|
test_scope(*cx, sc, binding, none); test_scope(*cx, sc, binding, none);
} }
} }
@ -327,7 +327,7 @@ fn check_alt(cx: ctx, input: @ast::expr, arms: [ast::arm], sc: scope,
let orig_invalid = *sc.invalid; let orig_invalid = *sc.invalid;
let mut all_invalid = orig_invalid; let mut all_invalid = orig_invalid;
let root = expr_root(cx, input, true); let root = expr_root(cx, input, true);
for a: ast::arm in arms { for arms.each {|a|
let mut new_bs = sc.bs; let mut new_bs = sc.bs;
let root_var = path_def_id(cx, root.ex); let root_var = path_def_id(cx, root.ex);
let pat_id_map = pat_util::pat_id_map(cx.tcx.def_map, a.pats[0]); let pat_id_map = pat_util::pat_id_map(cx.tcx.def_map, a.pats[0]);
@ -336,8 +336,8 @@ fn check_alt(cx: ctx, input: @ast::expr, arms: [ast::arm], sc: scope,
mut unsafe_tys: [unsafe_ty], mut unsafe_tys: [unsafe_ty],
span: span}; span: span};
let mut binding_info: [info] = []; let mut binding_info: [info] = [];
for pat in a.pats { for a.pats.each {|pat|
for proot in pattern_roots(cx.tcx, root.mutbl, pat) { for pattern_roots(cx.tcx, root.mutbl, pat).each {|proot|
let canon_id = pat_id_map.get(proot.name); let canon_id = pat_id_map.get(proot.name);
alt vec::find(binding_info, {|x| x.id == canon_id}) { alt vec::find(binding_info, {|x| x.id == canon_id}) {
some(s) { s.unsafe_tys += unsafe_set(proot.mutbl); } some(s) { s.unsafe_tys += unsafe_set(proot.mutbl); }
@ -350,14 +350,14 @@ fn check_alt(cx: ctx, input: @ast::expr, arms: [ast::arm], sc: scope,
} }
} }
} }
for info in binding_info { for binding_info.each {|info|
new_bs += [mk_binding(cx, info.id, info.span, root_var, new_bs += [mk_binding(cx, info.id, info.span, root_var,
copy info.unsafe_tys)]; copy info.unsafe_tys)];
} };
*sc.invalid = orig_invalid; *sc.invalid = orig_invalid;
visit::visit_arm(a, {bs: new_bs with sc}, v); visit::visit_arm(a, {bs: new_bs with sc}, v);
all_invalid = join_invalid(all_invalid, *sc.invalid); all_invalid = join_invalid(all_invalid, *sc.invalid);
} };
*sc.invalid = all_invalid; *sc.invalid = all_invalid;
} }
@ -378,7 +378,7 @@ fn check_for(cx: ctx, local: @ast::local, seq: @ast::expr, blk: ast::blk,
} }
let root_var = path_def_id(cx, root.ex); let root_var = path_def_id(cx, root.ex);
let mut new_bs = sc.bs; let mut new_bs = sc.bs;
for proot in pattern_roots(cx.tcx, cur_mutbl, local.node.pat) { for pattern_roots(cx.tcx, cur_mutbl, local.node.pat).each {|proot|
new_bs += [mk_binding(cx, proot.id, proot.span, root_var, new_bs += [mk_binding(cx, proot.id, proot.span, root_var,
unsafe_set(proot.mutbl))]; unsafe_set(proot.mutbl))];
} }
@ -392,10 +392,10 @@ fn check_var(cx: ctx, ex: @ast::expr, p: @ast::path, id: ast::node_id,
let my_defnum = ast_util::def_id_of_def(def).node; let my_defnum = ast_util::def_id_of_def(def).node;
let my_local_id = local_id_of_node(cx, my_defnum); let my_local_id = local_id_of_node(cx, my_defnum);
let var_t = ty::expr_ty(cx.tcx, ex); let var_t = ty::expr_ty(cx.tcx, ex);
for b in sc.bs { for sc.bs.each {|b|
// excludes variables introduced since the alias was made // excludes variables introduced since the alias was made
if my_local_id < b.local_id { if my_local_id < b.local_id {
for unsafe_ty in b.unsafe_tys { for b.unsafe_tys.each {|unsafe_ty|
if ty_can_unsafely_include(cx, unsafe_ty, var_t, assign) { if ty_can_unsafely_include(cx, unsafe_ty, var_t, assign) {
let inv = @{reason: val_taken, node_id: b.node_id, let inv = @{reason: val_taken, node_id: b.node_id,
sp: ex.span, path: p}; sp: ex.span, path: p};
@ -413,7 +413,7 @@ fn check_lval(cx: @ctx, dest: @ast::expr, sc: scope, v: vt<scope>) {
ast::expr_path(p) { ast::expr_path(p) {
let def = cx.tcx.def_map.get(dest.id); let def = cx.tcx.def_map.get(dest.id);
let dnum = ast_util::def_id_of_def(def).node; let dnum = ast_util::def_id_of_def(def).node;
for b in sc.bs { for sc.bs.each {|b|
if b.root_var == some(dnum) { if b.root_var == some(dnum) {
let inv = @{reason: overwritten, node_id: b.node_id, let inv = @{reason: overwritten, node_id: b.node_id,
sp: dest.span, path: p}; sp: dest.span, path: p};
@ -454,7 +454,7 @@ fn test_scope(cx: ctx, sc: scope, b: binding, p: option<@ast::path>) {
let mut prob = find_invalid(b.node_id, *sc.invalid); let mut prob = find_invalid(b.node_id, *sc.invalid);
alt b.root_var { alt b.root_var {
some(dn) { some(dn) {
for other in sc.bs { for sc.bs.each {|other|
if !is_none(prob) { break; } if !is_none(prob) { break; }
if other.node_id == dn { if other.node_id == dn {
prob = find_invalid(other.node_id, *sc.invalid); prob = find_invalid(other.node_id, *sc.invalid);
@ -507,7 +507,7 @@ fn ty_can_unsafely_include(cx: ctx, needle: unsafe_ty, haystack: ty::t,
} { ret true; } } { ret true; }
alt ty::get(haystack).struct { alt ty::get(haystack).struct {
ty::ty_enum(_, ts) { ty::ty_enum(_, ts) {
for t: ty::t in ts { for ts.each {|t|
if helper(tcx, needle, t, mutbl) { ret true; } if helper(tcx, needle, t, mutbl) { ret true; }
} }
ret false; ret false;
@ -516,7 +516,7 @@ fn ty_can_unsafely_include(cx: ctx, needle: unsafe_ty, haystack: ty::t,
ret helper(tcx, needle, mt.ty, get_mutbl(mutbl, mt)); ret helper(tcx, needle, mt.ty, get_mutbl(mutbl, mt));
} }
ty::ty_rec(fields) { ty::ty_rec(fields) {
for f: ty::field in fields { for fields.each {|f|
if helper(tcx, needle, f.mt.ty, get_mutbl(mutbl, f.mt)) { if helper(tcx, needle, f.mt.ty, get_mutbl(mutbl, f.mt)) {
ret true; ret true;
} }
@ -524,7 +524,7 @@ fn ty_can_unsafely_include(cx: ctx, needle: unsafe_ty, haystack: ty::t,
ret false; ret false;
} }
ty::ty_tup(ts) { ty::ty_tup(ts) {
for t in ts { if helper(tcx, needle, t, mutbl) { ret true; } } for ts.each {|t| if helper(tcx, needle, t, mutbl) { ret true; } }
ret false; ret false;
} }
ty::ty_fn({proto: ast::proto_bare, _}) { ret false; } ty::ty_fn({proto: ast::proto_bare, _}) { ret false; }
@ -571,12 +571,12 @@ fn copy_is_expensive(tcx: ty::ctxt, ty: ty::t) -> bool {
ty::ty_uniq(mt) { 1u + score_ty(tcx, mt.ty) } ty::ty_uniq(mt) { 1u + score_ty(tcx, mt.ty) }
ty::ty_enum(_, ts) | ty::ty_tup(ts) { ty::ty_enum(_, ts) | ty::ty_tup(ts) {
let mut sum = 0u; let mut sum = 0u;
for t in ts { sum += score_ty(tcx, t); } for ts.each {|t| sum += score_ty(tcx, t); }
sum sum
} }
ty::ty_rec(fs) { ty::ty_rec(fs) {
let mut sum = 0u; let mut sum = 0u;
for f in fs { sum += score_ty(tcx, f.mt.ty); } for fs.each {|f| sum += score_ty(tcx, f.mt.ty); }
sum sum
} }
_ { _ {
@ -608,11 +608,11 @@ fn pattern_roots(tcx: ty::ctxt, mutbl: option<unsafe_ty>, pat: @ast::pat)
ast::pat_wild | ast::pat_lit(_) | ast::pat_range(_, _) | ast::pat_wild | ast::pat_lit(_) | ast::pat_range(_, _) |
ast::pat_ident(_, _) {} ast::pat_ident(_, _) {}
ast::pat_enum(_, ps) | ast::pat_tup(ps) { ast::pat_enum(_, ps) | ast::pat_tup(ps) {
for p in ps { walk(tcx, mutbl, p, set); } for ps.each {|p| walk(tcx, mutbl, p, set); }
} }
ast::pat_rec(fs, _) { ast::pat_rec(fs, _) {
let ty = ty::node_id_to_type(tcx, pat.id); let ty = ty::node_id_to_type(tcx, pat.id);
for f in fs { for fs.each {|f|
let m = ty::get_field(ty, f.ident).mt.mutbl != ast::m_imm, let m = ty::get_field(ty, f.ident).mt.mutbl != ast::m_imm,
c = if m { some(contains(ty)) } else { mutbl }; c = if m { some(contains(ty)) } else { mutbl };
walk(tcx, c, f.pat, set); walk(tcx, c, f.pat, set);
@ -649,7 +649,7 @@ fn expr_root(cx: ctx, ex: @ast::expr, autoderef: bool)
-> {ex: @ast::expr, mutbl: option<unsafe_ty>} { -> {ex: @ast::expr, mutbl: option<unsafe_ty>} {
let base_root = mutbl::expr_root_(cx.tcx, none, ex, autoderef); let base_root = mutbl::expr_root_(cx.tcx, none, ex, autoderef);
let mut unsafe_ty = none; let mut unsafe_ty = none;
for d in *base_root.ds { for vec::each(*base_root.ds) {|d|
if d.mutbl { unsafe_ty = some(contains(d.outer_t)); break; } if d.mutbl { unsafe_ty = some(contains(d.outer_t)); break; }
} }
ret {ex: base_root.ex, mutbl: unsafe_ty}; ret {ex: base_root.ex, mutbl: unsafe_ty};

View file

@ -115,7 +115,7 @@ fn map_decoded_item(sess: session, map: map, path: path, ii: inlined_item) {
fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk, fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
sp: codemap::span, id: node_id, cx: ctx, v: vt) { sp: codemap::span, id: node_id, cx: ctx, v: vt) {
for a in decl.inputs { for decl.inputs.each {|a|
cx.map.insert(a.id, node_arg(a, cx.local_id)); cx.map.insert(a.id, node_arg(a, cx.local_id));
cx.local_id += 1u; cx.local_id += 1u;
} }
@ -162,7 +162,7 @@ fn map_item(i: @item, cx: ctx, v: vt) {
alt i.node { alt i.node {
item_impl(_, _, _, ms) { item_impl(_, _, _, ms) {
let impl_did = ast_util::local_def(i.id); let impl_did = ast_util::local_def(i.id);
for m in ms { for ms.each {|m|
map_method(impl_did, extend(cx, i.ident), m, cx); map_method(impl_did, extend(cx, i.ident), m, cx);
} }
} }
@ -171,7 +171,7 @@ fn map_item(i: @item, cx: ctx, v: vt) {
cx.map.insert(dtor_id, node_item(i, item_path)); cx.map.insert(dtor_id, node_item(i, item_path));
} }
item_enum(vs, _) { item_enum(vs, _) {
for v in vs { for vs.each {|v|
cx.map.insert(v.node.id, node_variant( cx.map.insert(v.node.id, node_variant(
v, i, extend(cx, i.ident))); v, i, extend(cx, i.ident)));
} }
@ -181,7 +181,7 @@ fn map_item(i: @item, cx: ctx, v: vt) {
either::left(msg) { cx.sess.span_fatal(i.span, msg); } either::left(msg) { cx.sess.span_fatal(i.span, msg); }
either::right(abi) { abi } either::right(abi) { abi }
}; };
for nitem in nm.items { for nm.items.each {|nitem|
cx.map.insert(nitem.id, node_native_item(nitem, abi, @cx.path)); cx.map.insert(nitem.id, node_native_item(nitem, abi, @cx.path));
} }
} }
@ -189,7 +189,7 @@ fn map_item(i: @item, cx: ctx, v: vt) {
cx.map.insert(ctor.node.id, node_ctor(i, item_path)); cx.map.insert(ctor.node.id, node_ctor(i, item_path));
let d_id = ast_util::local_def(i.id); let d_id = ast_util::local_def(i.id);
let p = extend(cx, i.ident); let p = extend(cx, i.ident);
for ci in items { for items.each {|ci|
// only need to handle methods // only need to handle methods
alt ci.node { alt ci.node {
class_method(m) { map_method(d_id, p, m, cx); } class_method(m) { map_method(d_id, p, m, cx); }
@ -212,7 +212,7 @@ fn map_item(i: @item, cx: ctx, v: vt) {
fn map_view_item(vi: @view_item, cx: ctx, _v: vt) { fn map_view_item(vi: @view_item, cx: ctx, _v: vt) {
alt vi.node { alt vi.node {
view_item_export(vps) { view_item_export(vps) {
for vp in vps { for vps.each {|vp|
let (id, name) = alt vp.node { let (id, name) = alt vp.node {
view_path_simple(nm, _, id) { (id, nm) } view_path_simple(nm, _, id) { (id, nm) }
view_path_glob(pth, id) | view_path_list(pth, _, id) { view_path_glob(pth, id) | view_path_list(pth, _, id) {

View file

@ -27,7 +27,7 @@ fn visit_expr(ex: @expr, cx: ctx, v: visit::vt<ctx>) {
cx.allow_block = true; cx.allow_block = true;
v.visit_expr(f, cx, v); v.visit_expr(f, cx, v);
let mut i = 0u; let mut i = 0u;
for arg_t in ty::ty_fn_args(ty::expr_ty(cx.tcx, f)) { for ty::ty_fn_args(ty::expr_ty(cx.tcx, f)).each {|arg_t|
cx.allow_block = (ty::arg_mode(cx.tcx, arg_t) == by_ref); cx.allow_block = (ty::arg_mode(cx.tcx, arg_t) == by_ref);
v.visit_expr(args[i], cx, v); v.visit_expr(args[i], cx, v);
i += 1u; i += 1u;

View file

@ -37,13 +37,13 @@ fn check_expr(tcx: ty::ctxt, ex: @expr, &&s: (), v: visit::vt<()>) {
fn check_arms(tcx: ty::ctxt, arms: [arm]) { fn check_arms(tcx: ty::ctxt, arms: [arm]) {
let mut i = 0; let mut i = 0;
/* Check for unreachable patterns */ /* Check for unreachable patterns */
for arm: arm in arms { for arms.each {|arm|
for arm_pat: @pat in arm.pats { for arm.pats.each {|arm_pat|
let mut reachable = true; let mut reachable = true;
let mut j = 0; let mut j = 0;
while j < i { while j < i {
if option::is_none(arms[j].guard) { if option::is_none(arms[j].guard) {
for prev_pat: @pat in arms[j].pats { for vec::each(arms[j].pats) {|prev_pat|
if pattern_supersedes(tcx, prev_pat, arm_pat) { if pattern_supersedes(tcx, prev_pat, arm_pat) {
reachable = false; reachable = false;
} }
@ -72,7 +72,7 @@ fn check_exhaustive(tcx: ty::ctxt, sp: span, pats: [@pat]) {
ret; ret;
} }
// If there a non-refutable pattern in the set, we're okay. // If there a non-refutable pattern in the set, we're okay.
for pat in pats { if !is_refutable(tcx, pat) { ret; } } for pats.each {|pat| if !is_refutable(tcx, pat) { ret; } }
alt ty::get(ty::node_id_to_type(tcx, pats[0].id)).struct { alt ty::get(ty::node_id_to_type(tcx, pats[0].id)).struct {
ty::ty_enum(id, _) { ty::ty_enum(id, _) {
@ -90,7 +90,7 @@ fn check_exhaustive(tcx: ty::ctxt, sp: span, pats: [@pat]) {
} }
ty::ty_tup(ts) { ty::ty_tup(ts) {
let cols = vec::to_mut(vec::from_elem(ts.len(), [])); let cols = vec::to_mut(vec::from_elem(ts.len(), []));
for p in pats { for pats.each {|p|
alt raw_pat(p).node { alt raw_pat(p).node {
pat_tup(sub) { pat_tup(sub) {
vec::iteri(sub) {|i, sp| cols[i] += [sp];} vec::iteri(sub) {|i, sp| cols[i] += [sp];}
@ -103,7 +103,7 @@ fn check_exhaustive(tcx: ty::ctxt, sp: span, pats: [@pat]) {
ty::ty_rec(fs) { ty::ty_rec(fs) {
let cols = vec::from_elem(fs.len(), {mut wild: false, let cols = vec::from_elem(fs.len(), {mut wild: false,
mut pats: []}); mut pats: []});
for p in pats { for pats.each {|p|
alt raw_pat(p).node { alt raw_pat(p).node {
pat_rec(sub, _) { pat_rec(sub, _) {
vec::iteri(fs) {|i, field| vec::iteri(fs) {|i, field|
@ -122,7 +122,7 @@ fn check_exhaustive(tcx: ty::ctxt, sp: span, pats: [@pat]) {
} }
ty::ty_bool { ty::ty_bool {
let mut saw_true = false, saw_false = false; let mut saw_true = false, saw_false = false;
for p in pats { for pats.each {|p|
alt raw_pat(p).node { alt raw_pat(p).node {
pat_lit(@{node: expr_lit(@{node: lit_bool(b), _}), _}) { pat_lit(@{node: expr_lit(@{node: lit_bool(b), _}), _}) {
if b { saw_true = true; } if b { saw_true = true; }
@ -160,7 +160,7 @@ fn check_exhaustive_enum(tcx: ty::ctxt, enum_id: def_id, sp: span,
cols: vec::to_mut(vec::from_elem(v.args.len(), []))} cols: vec::to_mut(vec::from_elem(v.args.len(), []))}
}); });
for pat in pats { for pats.each {|pat|
let pat = raw_pat(pat); let pat = raw_pat(pat);
alt tcx.def_map.get(pat.id) { alt tcx.def_map.get(pat.id) {
def_variant(_, id) { def_variant(_, id) {
@ -193,7 +193,7 @@ fn check_exhaustive_enum(tcx: ty::ctxt, enum_id: def_id, sp: span,
fn pattern_supersedes(tcx: ty::ctxt, a: @pat, b: @pat) -> bool { fn pattern_supersedes(tcx: ty::ctxt, a: @pat, b: @pat) -> bool {
fn patterns_supersede(tcx: ty::ctxt, as: [@pat], bs: [@pat]) -> bool { fn patterns_supersede(tcx: ty::ctxt, as: [@pat], bs: [@pat]) -> bool {
let mut i = 0; let mut i = 0;
for a: @pat in as { for as.each {|a|
if !pattern_supersedes(tcx, a, bs[i]) { ret false; } if !pattern_supersedes(tcx, a, bs[i]) { ret false; }
i += 1; i += 1;
} }
@ -202,9 +202,9 @@ fn pattern_supersedes(tcx: ty::ctxt, a: @pat, b: @pat) -> bool {
fn field_patterns_supersede(tcx: ty::ctxt, fas: [field_pat], fn field_patterns_supersede(tcx: ty::ctxt, fas: [field_pat],
fbs: [field_pat]) -> bool { fbs: [field_pat]) -> bool {
let wild = @{id: 0, node: pat_wild, span: dummy_sp()}; let wild = @{id: 0, node: pat_wild, span: dummy_sp()};
for fa: field_pat in fas { for fas.each {|fa|
let mut pb = wild; let mut pb = wild;
for fb: field_pat in fbs { for fbs.each {|fb|
if fa.ident == fb.ident { pb = fb.pat; } if fa.ident == fb.ident { pb = fb.pat; }
} }
if !pattern_supersedes(tcx, fa.pat, pb) { ret false; } if !pattern_supersedes(tcx, fa.pat, pb) { ret false; }
@ -301,17 +301,17 @@ fn is_refutable(tcx: ty::ctxt, pat: @pat) -> bool {
pat_wild | pat_ident(_, none) { false } pat_wild | pat_ident(_, none) { false }
pat_lit(_) | pat_range(_, _) { true } pat_lit(_) | pat_range(_, _) { true }
pat_rec(fields, _) { pat_rec(fields, _) {
for it: field_pat in fields { for fields.each {|it|
if is_refutable(tcx, it.pat) { ret true; } if is_refutable(tcx, it.pat) { ret true; }
} }
false false
} }
pat_tup(elts) { pat_tup(elts) {
for elt in elts { if is_refutable(tcx, elt) { ret true; } } for elts.each {|elt| if is_refutable(tcx, elt) { ret true; } }
false false
} }
pat_enum(_, args) { pat_enum(_, args) {
for p: @pat in args { if is_refutable(tcx, p) { ret true; } } for args.each {|p| if is_refutable(tcx, p) { ret true; } }
false false
} }
} }

View file

@ -23,7 +23,7 @@ fn check_item(sess: session, ast_map: ast_map::map, def_map: resolve::def_map,
check_item_recursion(sess, ast_map, def_map, it); check_item_recursion(sess, ast_map, def_map, it);
} }
item_enum(vs, _) { item_enum(vs, _) {
for var in vs { for vs.each {|var|
option::with_option_do(var.node.disr_expr) {|ex| option::with_option_do(var.node.disr_expr) {|ex|
v.visit_expr(ex, true, v); v.visit_expr(ex, true, v);
} }

View file

@ -59,7 +59,7 @@ fn fn_usage_expr(expr: @ast::expr,
let args_ctx = {unsafe_fn_legal: false, let args_ctx = {unsafe_fn_legal: false,
generic_bare_fn_legal: false with ctx}; generic_bare_fn_legal: false with ctx};
for arg in args { for args.each {|arg|
visit::visit_expr_opt(arg, args_ctx, v); visit::visit_expr_opt(arg, args_ctx, v);
} }
} }

View file

@ -622,7 +622,7 @@ impl unify_methods for infer_ctxt {
let actual_arg_len = vec::len(actual_constr.node.args); let actual_arg_len = vec::len(actual_constr.node.args);
if expected_arg_len != actual_arg_len { ret err_res; } if expected_arg_len != actual_arg_len { ret err_res; }
let mut i = 0u; let mut i = 0u;
for a in expected.node.args { for expected.node.args.each {|a|
let actual = actual_constr.node.args[i]; let actual = actual_constr.node.args[i];
alt a.node { alt a.node {
ast::carg_base { ast::carg_base {

View file

@ -85,8 +85,8 @@ fn check_fn(fk: visit::fn_kind, decl: fn_decl, body: blk, sp: span,
// the common flow point for all functions that appear in the AST. // the common flow point for all functions that appear in the AST.
with_appropriate_checker(cx, fn_id) { |checker| with_appropriate_checker(cx, fn_id) { |checker|
for @{def, span} in *freevars::get_freevars(cx.tcx, fn_id) { for vec::each(*freevars::get_freevars(cx.tcx, fn_id)) {|fv|
let id = ast_util::def_id_of_def(def).node; let id = ast_util::def_id_of_def(fv.def).node;
if checker == check_copy { if checker == check_copy {
let last_uses = alt check cx.last_uses.find(fn_id) { let last_uses = alt check cx.last_uses.find(fn_id) {
some(last_use::closes_over(vars)) { vars } some(last_use::closes_over(vars)) { vars }
@ -96,7 +96,7 @@ fn check_fn(fk: visit::fn_kind, decl: fn_decl, body: blk, sp: span,
vec::position_elem(last_uses, id)) { cont; } vec::position_elem(last_uses, id)) { cont; }
} }
let ty = ty::node_id_to_type(cx.tcx, id); let ty = ty::node_id_to_type(cx.tcx, id);
checker(cx, ty, span); checker(cx, ty, fv.span);
} }
} }
@ -146,7 +146,7 @@ fn check_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) {
// Vector add copies. // Vector add copies.
expr_binary(add, ls, rs) { maybe_copy(cx, ls); maybe_copy(cx, rs); } expr_binary(add, ls, rs) { maybe_copy(cx, ls); maybe_copy(cx, rs); }
expr_rec(fields, def) { expr_rec(fields, def) {
for field in fields { maybe_copy(cx, field.node.expr); } for fields.each {|field| maybe_copy(cx, field.node.expr); }
alt def { alt def {
some(ex) { some(ex) {
// All noncopyable fields must be overridden // All noncopyable fields must be overridden
@ -155,7 +155,7 @@ fn check_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) {
ty::ty_rec(f) { f } ty::ty_rec(f) { f }
_ { cx.tcx.sess.span_bug(ex.span, "bad expr type in record"); } _ { cx.tcx.sess.span_bug(ex.span, "bad expr type in record"); }
}; };
for tf in ty_fields { for ty_fields.each {|tf|
if !vec::any(fields, {|f| f.node.ident == tf.ident}) && if !vec::any(fields, {|f| f.node.ident == tf.ident}) &&
!ty::kind_can_be_copied(ty::type_kind(cx.tcx, tf.mt.ty)) { !ty::kind_can_be_copied(ty::type_kind(cx.tcx, tf.mt.ty)) {
cx.tcx.sess.span_err(ex.span, cx.tcx.sess.span_err(ex.span,
@ -167,14 +167,14 @@ fn check_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) {
} }
} }
expr_tup(exprs) | expr_vec(exprs, _) { expr_tup(exprs) | expr_vec(exprs, _) {
for expr in exprs { maybe_copy(cx, expr); } for exprs.each {|expr| maybe_copy(cx, expr); }
} }
expr_bind(_, args) { expr_bind(_, args) {
for a in args { alt a { some(ex) { maybe_copy(cx, ex); } _ {} } } for args.each {|a| alt a { some(ex) { maybe_copy(cx, ex); } _ {} } }
} }
expr_call(f, args, _) { expr_call(f, args, _) {
let mut i = 0u; let mut i = 0u;
for arg_t in ty::ty_fn_args(ty::expr_ty(cx.tcx, f)) { for ty::ty_fn_args(ty::expr_ty(cx.tcx, f)).each {|arg_t|
alt ty::arg_mode(cx.tcx, arg_t) { alt ty::arg_mode(cx.tcx, arg_t) {
by_copy { maybe_copy(cx, args[i]); } by_copy { maybe_copy(cx, args[i]); }
by_ref | by_val | by_mutbl_ref | by_move { } by_ref | by_val | by_mutbl_ref | by_move { }
@ -236,7 +236,7 @@ fn check_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) {
fn check_stmt(stmt: @stmt, cx: ctx, v: visit::vt<ctx>) { fn check_stmt(stmt: @stmt, cx: ctx, v: visit::vt<ctx>) {
alt stmt.node { alt stmt.node {
stmt_decl(@{node: decl_local(locals), _}, _) { stmt_decl(@{node: decl_local(locals), _}, _) {
for local in locals { for locals.each {|local|
alt local.node.init { alt local.node.init {
some({op: init_assign, expr}) { maybe_copy(cx, expr); } some({op: init_assign, expr}) { maybe_copy(cx, expr); }
_ {} _ {}

View file

@ -114,7 +114,7 @@ fn visit_expr(ex: @expr, cx: ctx, v: visit::vt<ctx>) {
v.visit_expr(input, cx, v); v.visit_expr(input, cx, v);
let before = cx.current; let before = cx.current;
let mut sets = []; let mut sets = [];
for arm in arms { for arms.each {|arm|
cx.current = before; cx.current = before;
v.visit_arm(arm, cx, v); v.visit_arm(arm, cx, v);
sets += [cx.current]; sets += [cx.current];
@ -185,7 +185,7 @@ fn visit_expr(ex: @expr, cx: ctx, v: visit::vt<ctx>) {
} }
} }
} }
for f in fns { v.visit_expr(f, cx, v); } for fns.each {|f| v.visit_expr(f, cx, v); }
vec::iter2(args, arg_ts) {|arg, arg_t| vec::iter2(args, arg_ts) {|arg, arg_t|
alt arg.node { alt arg.node {
expr_path(_) { expr_path(_) {
@ -213,7 +213,7 @@ fn visit_stmt(s: @stmt, cx: ctx, v: visit::vt<ctx>) {
stmt_decl(@{node: decl_local(ls), _}, _) { stmt_decl(@{node: decl_local(ls), _}, _) {
shadow_in_current(cx, {|id| shadow_in_current(cx, {|id|
let mut rslt = false; let mut rslt = false;
for local in ls { for ls.each {|local|
let mut found = false; let mut found = false;
pat_util::pat_bindings(cx.tcx.def_map, local.node.pat, pat_util::pat_bindings(cx.tcx.def_map, local.node.pat,
{|pid, _a, _b| {|pid, _a, _b|
@ -246,7 +246,7 @@ fn visit_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
proto_box | proto_uniq | proto_bare { proto_box | proto_uniq | proto_bare {
alt cx.tcx.freevars.find(id) { alt cx.tcx.freevars.find(id) {
some(vars) { some(vars) {
for v in *vars { for vec::each(*vars) {|v|
option::with_option_do(def_is_owned_local(cx, v.def)) {|nid| option::with_option_do(def_is_owned_local(cx, v.def)) {|nid|
clear_in_current(cx, nid, false); clear_in_current(cx, nid, false);
cx.current += [{def: nid, cx.current += [{def: nid,
@ -304,22 +304,22 @@ fn add_block_exit(cx: ctx, tp: block_type) -> bool {
fn join_branches(branches: [set]) -> set { fn join_branches(branches: [set]) -> set {
let mut found: set = [], i = 0u; let mut found: set = [], i = 0u;
let l = vec::len(branches); let l = vec::len(branches);
for set in branches { for branches.each {|set|
i += 1u; i += 1u;
for {def, uses} in set { for set.each {|elt|
if !vec::any(found, {|v| v.def == def}) { if !vec::any(found, {|v| v.def == elt.def}) {
let mut j = i, nne = uses; let mut j = i, nne = elt.uses;
while j < l { while j < l {
for {def: d2, uses} in branches[j] { for vec::each(branches[j]) {|elt2|
if d2 == def { if elt2.def == elt.def {
list::iter(uses) {|e| list::iter(elt2.uses) {|e|
if !list::has(nne, e) { nne = cons(e, @nne); } if !list::has(nne, e) { nne = cons(e, @nne); }
} }
} }
} }
j += 1u; j += 1u;
} }
found += [{def: def, uses: nne}]; found += [{def: elt.def, uses: nne}];
} }
} }
} }
@ -327,11 +327,11 @@ fn join_branches(branches: [set]) -> set {
} }
fn leave_fn(cx: ctx) { fn leave_fn(cx: ctx) {
for {def, uses} in cx.current { for cx.current.each {|elt|
list::iter(uses) {|use| list::iter(elt.uses) {|use|
let key = alt use { let key = alt use {
var_use(pth_id) { path(pth_id) } var_use(pth_id) { path(pth_id) }
close_over(fn_id) { close(fn_id, def) } close_over(fn_id) { close(fn_id, elt.def) }
}; };
if !cx.last_uses.contains_key(key) { if !cx.last_uses.contains_key(key) {
cx.last_uses.insert(key, true); cx.last_uses.insert(key, true);
@ -343,16 +343,16 @@ fn leave_fn(cx: ctx) {
fn shadow_in_current(cx: ctx, p: fn(node_id) -> bool) { fn shadow_in_current(cx: ctx, p: fn(node_id) -> bool) {
let mut out = []; let mut out = [];
cx.current <-> out; cx.current <-> out;
for e in out { if !p(e.def) { cx.current += [e]; } } for out.each {|e| if !p(e.def) { cx.current += [e]; } }
} }
fn clear_in_current(cx: ctx, my_def: node_id, to: bool) { fn clear_in_current(cx: ctx, my_def: node_id, to: bool) {
for {def, uses} in cx.current { for cx.current.each {|elt|
if def == my_def { if elt.def == my_def {
list::iter(uses) {|use| list::iter(elt.uses) {|use|
let key = alt use { let key = alt use {
var_use(pth_id) { path(pth_id) } var_use(pth_id) { path(pth_id) }
close_over(fn_id) { close(fn_id, def) } close_over(fn_id) { close(fn_id, elt.def) }
}; };
if !to || !cx.last_uses.contains_key(key) { if !to || !cx.last_uses.contains_key(key) {
cx.last_uses.insert(key, to); cx.last_uses.insert(key, to);

View file

@ -60,7 +60,8 @@ fn merge_opts(attrs: [ast::attribute], cmd_opts: [(option, bool)]) ->
} }
fn contains(xs: [(option, bool)], x: option) -> bool { fn contains(xs: [(option, bool)], x: option) -> bool {
for (o, _) in xs { for xs.each {|c|
let (o, _) = c;
if o == x { ret true; } if o == x { ret true; }
} }
ret false; ret false;
@ -85,7 +86,8 @@ fn merge_opts(attrs: [ast::attribute], cmd_opts: [(option, bool)]) ->
} }
}; };
for (o, v) in default() { for default().each {|c|
let (o, v) = c;
if !contains(result, o) { if !contains(result, o) {
result += [(o, v)]; result += [(o, v)];
} }
@ -97,7 +99,7 @@ fn merge_opts(attrs: [ast::attribute], cmd_opts: [(option, bool)]) ->
fn check_ctypes(tcx: ty::ctxt, crate: @ast::crate) { fn check_ctypes(tcx: ty::ctxt, crate: @ast::crate) {
fn check_native_fn(tcx: ty::ctxt, decl: ast::fn_decl) { fn check_native_fn(tcx: ty::ctxt, decl: ast::fn_decl) {
let tys = vec::map(decl.inputs) {|a| a.ty }; let tys = vec::map(decl.inputs) {|a| a.ty };
for ty in (tys + [decl.output]) { for vec::each(tys + [decl.output]) {|ty|
alt ty.node { alt ty.node {
ast::ty_path(_, id) { ast::ty_path(_, id) {
alt tcx.def_map.get(id) { alt tcx.def_map.get(id) {
@ -125,7 +127,7 @@ fn check_ctypes(tcx: ty::ctxt, crate: @ast::crate) {
alt it.node { alt it.node {
ast::item_native_mod(nmod) if attr::native_abi(it.attrs) != ast::item_native_mod(nmod) if attr::native_abi(it.attrs) !=
either::right(ast::native_abi_rust_intrinsic) { either::right(ast::native_abi_rust_intrinsic) {
for ni in nmod.items { for nmod.items.each {|ni|
alt ni.node { alt ni.node {
ast::native_item_fn(decl, tps) { ast::native_item_fn(decl, tps) {
check_native_fn(tcx, decl); check_native_fn(tcx, decl);
@ -148,7 +150,8 @@ fn check_ctypes(tcx: ty::ctxt, crate: @ast::crate) {
fn check_crate(tcx: ty::ctxt, crate: @ast::crate, fn check_crate(tcx: ty::ctxt, crate: @ast::crate,
opts: [(option, bool)], time: bool) { opts: [(option, bool)], time: bool) {
let lint_opts = lint::merge_opts(crate.node.attrs, opts); let lint_opts = lint::merge_opts(crate.node.attrs, opts);
for (lopt, switch) in lint_opts { for lint_opts.each {|opt|
let (lopt, switch) = opt;
if switch == true { if switch == true {
lopt.run(tcx, crate, time); lopt.run(tcx, crate, time);
} }

View file

@ -56,7 +56,7 @@ fn expr_root_(tcx: ty::ctxt, ctor_self: option<node_id>,
let mut is_mutbl = false; let mut is_mutbl = false;
alt ty::get(auto_unbox.t).struct { alt ty::get(auto_unbox.t).struct {
ty::ty_rec(fields) { ty::ty_rec(fields) {
for fld: ty::field in fields { for fields.each {|fld|
if str::eq(ident, fld.ident) { if str::eq(ident, fld.ident) {
is_mutbl = fld.mt.mutbl == m_mutbl; is_mutbl = fld.mt.mutbl == m_mutbl;
break; break;
@ -74,7 +74,7 @@ fn expr_root_(tcx: ty::ctxt, ctor_self: option<node_id>,
} }
none { false } none { false }
}; };
for fld: ty::field_ty in ty::lookup_class_fields(tcx, did) { for ty::lookup_class_fields(tcx, did).each {|fld|
if str::eq(ident, fld.ident) { if str::eq(ident, fld.ident) {
is_mutbl = fld.mutability == class_mutable is_mutbl = fld.mutability == class_mutable
|| in_self; // all fields can be mutated || in_self; // all fields can be mutated
@ -169,7 +169,7 @@ fn visit_decl(d: @decl, &&cx: @ctx, v: visit::vt<@ctx>) {
visit::visit_decl(d, cx, v); visit::visit_decl(d, cx, v);
alt d.node { alt d.node {
decl_local(locs) { decl_local(locs) {
for loc in locs { for locs.each {|loc|
alt loc.node.init { alt loc.node.init {
some(init) { some(init) {
if init.op == init_move { check_move_rhs(cx, init.expr); } if init.op == init_move { check_move_rhs(cx, init.expr); }
@ -198,7 +198,7 @@ fn visit_expr(ex: @expr, &&cx: @ctx, v: visit::vt<@ctx>) {
check_lval(cx, dest, msg_assign); check_lval(cx, dest, msg_assign);
} }
expr_fn(_, _, _, cap) { expr_fn(_, _, _, cap) {
for moved in cap.moves { for cap.moves.each {|moved|
let def = cx.tcx.def_map.get(moved.id); let def = cx.tcx.def_map.get(moved.id);
alt is_illegal_to_modify_def(cx, def, msg_move_out) { alt is_illegal_to_modify_def(cx, def, msg_move_out) {
some(name) { mk_err(cx, moved.span, msg_move_out, moved.name); } some(name) { mk_err(cx, moved.span, msg_move_out, moved.name); }
@ -281,7 +281,7 @@ fn check_move_rhs(cx: @ctx, src: @expr) {
fn check_call(cx: @ctx, f: @expr, args: [@expr]) { fn check_call(cx: @ctx, f: @expr, args: [@expr]) {
let arg_ts = ty::ty_fn_args(ty::expr_ty(cx.tcx, f)); let arg_ts = ty::ty_fn_args(ty::expr_ty(cx.tcx, f));
let mut i = 0u; let mut i = 0u;
for arg_t: ty::arg in arg_ts { for arg_ts.each {|arg_t|
alt ty::resolved_mode(cx.tcx, arg_t.mode) { alt ty::resolved_mode(cx.tcx, arg_t.mode) {
by_mutbl_ref { check_lval(cx, args[i], msg_mutbl_ref); } by_mutbl_ref { check_lval(cx, args[i], msg_mutbl_ref); }
by_move { check_lval(cx, args[i], msg_move_out); } by_move { check_lval(cx, args[i], msg_move_out); }
@ -294,7 +294,7 @@ fn check_call(cx: @ctx, f: @expr, args: [@expr]) {
fn check_bind(cx: @ctx, f: @expr, args: [option<@expr>]) { fn check_bind(cx: @ctx, f: @expr, args: [option<@expr>]) {
let arg_ts = ty::ty_fn_args(ty::expr_ty(cx.tcx, f)); let arg_ts = ty::ty_fn_args(ty::expr_ty(cx.tcx, f));
let mut i = 0u; let mut i = 0u;
for arg in args { for args.each {|arg|
alt arg { alt arg {
some(expr) { some(expr) {
let o_msg = alt ty::resolved_mode(cx.tcx, arg_ts[i].mode) { let o_msg = alt ty::resolved_mode(cx.tcx, arg_ts[i].mode) {

View file

@ -55,8 +55,8 @@ fn walk_pat(pat: @pat, it: fn(@pat)) {
it(pat); it(pat);
alt pat.node { alt pat.node {
pat_ident(pth, some(p)) { walk_pat(p, it); } pat_ident(pth, some(p)) { walk_pat(p, it); }
pat_rec(fields, _) { for f in fields { walk_pat(f.pat, it); } } pat_rec(fields, _) { for fields.each {|f| walk_pat(f.pat, it); } }
pat_enum(_, s) | pat_tup(s) { for p in s { walk_pat(p, it); } } pat_enum(_, s) | pat_tup(s) { for s.each {|p| walk_pat(p, it); } }
pat_box(s) | pat_uniq(s) { walk_pat(s, it); } pat_box(s) | pat_uniq(s) { walk_pat(s, it); }
pat_wild | pat_lit(_) | pat_range(_, _) | pat_ident(_, none) {} pat_wild | pat_lit(_) | pat_range(_, _) | pat_ident(_, none) {}
} }

View file

@ -440,7 +440,7 @@ fn resolve_block(blk: ast::blk, cx: ctxt, visitor: visit::vt<ctxt>) {
record_parent(cx, blk.node.id); record_parent(cx, blk.node.id);
// Resolve queued locals to this block. // Resolve queued locals to this block.
for local_id in cx.queued_locals { for cx.queued_locals.each {|local_id|
cx.region_map.local_blocks.insert(local_id, blk.node.id); cx.region_map.local_blocks.insert(local_id, blk.node.id);
} }

View file

@ -187,7 +187,7 @@ fn create_env(sess: session, amap: ast_map::map) -> @env {
fn iter_export_paths(vi: ast::view_item, f: fn(vp: @ast::view_path)) { fn iter_export_paths(vi: ast::view_item, f: fn(vp: @ast::view_path)) {
alt vi.node { alt vi.node {
ast::view_item_export(vps) { ast::view_item_export(vps) {
for vp in vps { for vps.each {|vp|
f(vp); f(vp);
} }
} }
@ -198,9 +198,7 @@ fn iter_export_paths(vi: ast::view_item, f: fn(vp: @ast::view_path)) {
fn iter_import_paths(vi: ast::view_item, f: fn(vp: @ast::view_path)) { fn iter_import_paths(vi: ast::view_item, f: fn(vp: @ast::view_path)) {
alt vi.node { alt vi.node {
ast::view_item_import(vps) { ast::view_item_import(vps) {
for vp in vps { for vps.each {|vp| f(vp);}
f(vp);
}
} }
_ {} _ {}
} }
@ -237,7 +235,7 @@ fn map_crate(e: @env, c: @ast::crate) {
e.imports.insert(id, is_glob(path, sc, vp.span)); e.imports.insert(id, is_glob(path, sc, vp.span));
} }
ast::view_path_list(mod_path, idents, _) { ast::view_path_list(mod_path, idents, _) {
for ident in idents { for idents.each {|ident|
let t = todo(ident.node.name, let t = todo(ident.node.name,
@(*mod_path + [ident.node.name]), @(*mod_path + [ident.node.name]),
ident.span, sc); ident.span, sc);
@ -438,9 +436,9 @@ fn resolve_names(e: @env, c: @ast::crate) {
fn walk_tps(e: @env, tps: [ast::ty_param], sc: scopes, v: vt<scopes>) { fn walk_tps(e: @env, tps: [ast::ty_param], sc: scopes, v: vt<scopes>) {
let outer_current_tp = e.current_tp; let outer_current_tp = e.current_tp;
let mut current = 0u; let mut current = 0u;
for tp in tps { for tps.each {|tp|
e.current_tp = some(current); e.current_tp = some(current);
for bound in *tp.bounds { for vec::each(*tp.bounds) {|bound|
alt bound { alt bound {
bound_iface(t) { v.visit_ty(t, sc, v); } bound_iface(t) { v.visit_ty(t, sc, v); }
_ {} _ {}
@ -504,7 +502,7 @@ fn visit_item_with_scope(e: @env, i: @ast::item, sc: scopes, v: vt<scopes>) {
visit::visit_ty_params(tps, sc, v); visit::visit_ty_params(tps, sc, v);
alt ifce { some(ty) { v.visit_ty(ty, sc, v); } _ {} } alt ifce { some(ty) { v.visit_ty(ty, sc, v); } _ {} }
v.visit_ty(sty, sc, v); v.visit_ty(sty, sc, v);
for m in methods { for methods.each {|m|
v.visit_ty_params(m.tps, sc, v); v.visit_ty_params(m.tps, sc, v);
let msc = cons(scope_method(m.self_id, tps + m.tps), @sc); let msc = cons(scope_method(m.self_id, tps + m.tps), @sc);
v.visit_fn(visit::fk_method(m.ident, [], m), v.visit_fn(visit::fk_method(m.ident, [], m),
@ -513,9 +511,9 @@ fn visit_item_with_scope(e: @env, i: @ast::item, sc: scopes, v: vt<scopes>) {
} }
ast::item_iface(tps, methods) { ast::item_iface(tps, methods) {
visit::visit_ty_params(tps, sc, v); visit::visit_ty_params(tps, sc, v);
for m in methods { for methods.each {|m|
let msc = cons(scope_method(i.id, tps + m.tps), @sc); let msc = cons(scope_method(i.id, tps + m.tps), @sc);
for a in m.decl.inputs { v.visit_ty(a.ty, msc, v); } for m.decl.inputs.each {|a| v.visit_ty(a.ty, msc, v); }
v.visit_ty(m.decl.output, msc, v); v.visit_ty(m.decl.output, msc, v);
} }
} }
@ -530,7 +528,7 @@ fn visit_item_with_scope(e: @env, i: @ast::item, sc: scopes, v: vt<scopes>) {
ctor.node.body, ctor.span, ctor.node.id, ctor.node.body, ctor.span, ctor.node.id,
ctor_scope, v); ctor_scope, v);
/* visit the items */ /* visit the items */
for cm in members { for members.each {|cm|
alt cm.node { alt cm.node {
class_method(m) { class_method(m) {
let msc = cons(scope_method(m.self_id, tps + m.tps), let msc = cons(scope_method(m.self_id, tps + m.tps),
@ -571,7 +569,7 @@ fn visit_fn_with_scope(e: @env, fk: visit::fn_kind, decl: ast::fn_decl,
// here's where we need to set up the mapping // here's where we need to set up the mapping
// for f's constrs in the table. // for f's constrs in the table.
for c: @ast::constr in decl.constraints { resolve_constr(e, c, sc, v); } for decl.constraints.each {|c| resolve_constr(e, c, sc, v); }
let scope = alt fk { let scope = alt fk {
visit::fk_item_fn(_, tps) | visit::fk_res(_, tps) | visit::fk_item_fn(_, tps) | visit::fk_res(_, tps) |
visit::fk_method(_, tps, _) | visit::fk_ctor(_, tps) visit::fk_method(_, tps, _) | visit::fk_ctor(_, tps)
@ -586,8 +584,8 @@ fn visit_fn_with_scope(e: @env, fk: visit::fn_kind, decl: ast::fn_decl,
fn visit_block_with_scope(b: ast::blk, sc: scopes, v: vt<scopes>) { fn visit_block_with_scope(b: ast::blk, sc: scopes, v: vt<scopes>) {
let pos = @mut 0u, loc = @mut 0u; let pos = @mut 0u, loc = @mut 0u;
let block_sc = cons(scope_block(b, pos, loc), @sc); let block_sc = cons(scope_block(b, pos, loc), @sc);
for vi in b.node.view_items { v.visit_view_item(vi, block_sc, v); } for b.node.view_items.each {|vi| v.visit_view_item(vi, block_sc, v); }
for stmt in b.node.stmts { for b.node.stmts.each {|stmt|
v.visit_stmt(stmt, block_sc, v);; v.visit_stmt(stmt, block_sc, v);;
*pos += 1u;; *pos += 1u;;
*loc = 0u; *loc = 0u;
@ -602,14 +600,14 @@ fn visit_decl_with_scope(d: @decl, sc: scopes, v: vt<scopes>) {
}; };
alt d.node { alt d.node {
decl_local(locs) { decl_local(locs) {
for loc in locs { v.visit_local(loc, sc, v);; *loc_pos += 1u; } for locs.each {|loc| v.visit_local(loc, sc, v);; *loc_pos += 1u; }
} }
decl_item(it) { v.visit_item(it, sc, v); } decl_item(it) { v.visit_item(it, sc, v); }
} }
} }
fn visit_arm_with_scope(a: ast::arm, sc: scopes, v: vt<scopes>) { fn visit_arm_with_scope(a: ast::arm, sc: scopes, v: vt<scopes>) {
for p: @pat in a.pats { v.visit_pat(p, sc, v); } for a.pats.each {|p| v.visit_pat(p, sc, v); }
let sc_inner = cons(scope_arm(a), @sc); let sc_inner = cons(scope_arm(a), @sc);
visit::visit_expr_opt(a.guard, sc_inner, v); visit::visit_expr_opt(a.guard, sc_inner, v);
v.visit_block(a.body, sc_inner, v); v.visit_block(a.body, sc_inner, v);
@ -717,7 +715,7 @@ fn resolve_import(e: env, n_id: node_id, name: ast::ident,
fn find_imports_after(e: env, id: node_id, sc: scopes) -> [node_id] { fn find_imports_after(e: env, id: node_id, sc: scopes) -> [node_id] {
fn lst(my_id: node_id, vis: [@view_item]) -> [node_id] { fn lst(my_id: node_id, vis: [@view_item]) -> [node_id] {
let mut imports = [], found = false; let mut imports = [], found = false;
for vi in vis { for vis.each {|vi|
iter_effective_import_paths(*vi) {|vp| iter_effective_import_paths(*vi) {|vp|
alt vp.node { alt vp.node {
view_path_simple(_, _, id) view_path_simple(_, _, id)
@ -726,7 +724,7 @@ fn resolve_import(e: env, n_id: node_id, name: ast::ident,
if found { imports += [id]; } if found { imports += [id]; }
} }
view_path_list(_, ids, _) { view_path_list(_, ids, _) {
for id in ids { for ids.each {|id|
if id.node.id == my_id { found = true; } if id.node.id == my_id { found = true; }
if found { imports += [id.node.id]; } if found { imports += [id.node.id]; }
} }
@ -844,7 +842,7 @@ fn unresolved_err(e: env, cx: ctxt, sp: span, name: ident, kind: str) {
in_scope(sc) { in_scope(sc) {
alt find_fn_or_mod_scope(sc) { alt find_fn_or_mod_scope(sc) {
some(err_scope) { some(err_scope) {
for rs: {ident: str, sc: scope} in e.reported { for e.reported.each {|rs|
if str::eq(rs.ident, name) && err_scope == rs.sc { ret; } if str::eq(rs.ident, name) && err_scope == rs.sc { ret; }
} }
e.reported += [{ident: name, sc: err_scope}]; e.reported += [{ident: name, sc: err_scope}];
@ -1115,7 +1113,7 @@ fn lookup_in_scope(e: env, sc: scopes, sp: span, name: ident, ns: namespace,
fn lookup_in_ty_params(e: env, name: ident, ty_params: [ast::ty_param]) fn lookup_in_ty_params(e: env, name: ident, ty_params: [ast::ty_param])
-> option<def> { -> option<def> {
let mut n = 0u; let mut n = 0u;
for tp: ast::ty_param in ty_params { for ty_params.each {|tp|
if str::eq(tp.ident, name) && alt e.current_tp { if str::eq(tp.ident, name) && alt e.current_tp {
some(cur) { n < cur } none { true } some(cur) { n < cur } none { true }
} { ret some(ast::def_ty_param(local_def(tp.id), n)); } } { ret some(ast::def_ty_param(local_def(tp.id), n)); }
@ -1139,7 +1137,7 @@ fn lookup_in_fn(e: env, name: ident, decl: ast::fn_decl,
ns: namespace) -> option<def> { ns: namespace) -> option<def> {
alt ns { alt ns {
ns_val { ns_val {
for a: ast::arg in decl.inputs { for decl.inputs.each {|a|
if str::eq(a.ident, name) { if str::eq(a.ident, name) {
ret some(ast::def_arg(a.id, a.mode)); ret some(ast::def_arg(a.id, a.mode));
} }
@ -1189,7 +1187,7 @@ fn lookup_in_block(e: env, name: ident, sp: span, b: ast::blk_, pos: uint,
} else { } else {
alt ns { alt ns {
ns_val { ns_val {
for v: ast::variant in variants { for variants.each {|v|
if str::eq(v.node.name, name) { if str::eq(v.node.name, name) {
let i = v.node.id; let i = v.node.id;
ret some(ast::def_variant ret some(ast::def_variant
@ -1216,8 +1214,7 @@ fn lookup_in_block(e: env, name: ident, sp: span, b: ast::blk_, pos: uint,
_ { } _ { }
} }
} }
for vi in b.view_items { for b.view_items.each {|vi|
let mut is_import = false; let mut is_import = false;
alt vi.node { alt vi.node {
ast::view_item_import(_) { is_import = true; } ast::view_item_import(_) { is_import = true; }
@ -1227,7 +1224,7 @@ fn lookup_in_block(e: env, name: ident, sp: span, b: ast::blk_, pos: uint,
alt vi.node { alt vi.node {
ast::view_item_import(vps) | ast::view_item_export(vps) { ast::view_item_import(vps) | ast::view_item_export(vps) {
for vp in vps { for vps.each {|vp|
alt vp.node { alt vp.node {
ast::view_path_simple(ident, _, id) { ast::view_path_simple(ident, _, id) {
if is_import && name == ident { if is_import && name == ident {
@ -1236,7 +1233,7 @@ fn lookup_in_block(e: env, name: ident, sp: span, b: ast::blk_, pos: uint,
} }
ast::view_path_list(path, idents, _) { ast::view_path_list(path, idents, _) {
for ident in idents { for idents.each {|ident|
if name == ident.node.name { if name == ident.node.name {
ret lookup_import(e, ident.node.id, ns); ret lookup_import(e, ident.node.id, ns);
} }
@ -1467,7 +1464,7 @@ fn lookup_in_globs(e: env, globs: [glob_imp_def], sp: span, id: ident,
else if vec::len(matches) == 1u { else if vec::len(matches) == 1u {
ret some(matches[0].def); ret some(matches[0].def);
} else { } else {
for match: glob_imp_def in matches { for matches.each {|match|
let sp = match.path.span; let sp = match.path.span;
e.sess.span_note(sp, #fmt["'%s' is imported here", id]); e.sess.span_note(sp, #fmt["'%s' is imported here", id]);
} }
@ -1546,7 +1543,7 @@ fn add_to_index(index: hashmap<ident, list<mod_index_entry>>, id: ident,
fn index_view_items(view_items: [@ast::view_item], fn index_view_items(view_items: [@ast::view_item],
index: hashmap<ident, list<mod_index_entry>>) { index: hashmap<ident, list<mod_index_entry>>) {
for vi in view_items { for view_items.each {|vi|
alt vi.node { alt vi.node {
ast::view_item_use(ident, _, id) { ast::view_item_use(ident, _, id) {
add_to_index(index, ident, mie_view_item(ident, id, vi.span)); add_to_index(index, ident, mie_view_item(ident, id, vi.span));
@ -1560,7 +1557,7 @@ fn index_view_items(view_items: [@ast::view_item],
add_to_index(index, ident, mie_import_ident(id, vp.span)); add_to_index(index, ident, mie_import_ident(id, vp.span));
} }
ast::view_path_list(_, idents, _) { ast::view_path_list(_, idents, _) {
for ident in idents { for idents.each {|ident|
add_to_index(index, ident.node.name, add_to_index(index, ident.node.name,
mie_import_ident(ident.node.id, mie_import_ident(ident.node.id,
ident.span)); ident.span));
@ -1579,7 +1576,7 @@ fn index_mod(md: ast::_mod) -> mod_index {
index_view_items(md.view_items, index); index_view_items(md.view_items, index);
for it: @ast::item in md.items { for md.items.each {|it|
alt it.node { alt it.node {
ast::item_const(_, _) | ast::item_fn(_, _, _) | ast::item_mod(_) | ast::item_const(_, _) | ast::item_fn(_, _, _) | ast::item_mod(_) |
ast::item_native_mod(_) | ast::item_ty(_, _) | ast::item_native_mod(_) | ast::item_ty(_, _) |
@ -1590,7 +1587,7 @@ fn index_mod(md: ast::_mod) -> mod_index {
ast::item_enum(variants, _) { ast::item_enum(variants, _) {
add_to_index(index, it.ident, mie_item(it)); add_to_index(index, it.ident, mie_item(it));
let mut variant_idx: uint = 0u; let mut variant_idx: uint = 0u;
for v: ast::variant in variants { for variants.each {|v|
add_to_index(index, v.node.name, add_to_index(index, v.node.name,
mie_enum_variant(variant_idx, variants, mie_enum_variant(variant_idx, variants,
it.id, it.span)); it.id, it.span));
@ -1619,7 +1616,7 @@ fn index_nmod(md: ast::native_mod) -> mod_index {
index_view_items(md.view_items, index); index_view_items(md.view_items, index);
for it: @ast::native_item in md.items { for md.items.each {|it|
add_to_index(index, it.ident, mie_native_item(it)); add_to_index(index, it.ident, mie_native_item(it));
} }
ret index; ret index;
@ -1649,11 +1646,12 @@ fn ns_ok(wanted:namespace, actual:namespace) -> bool {
fn lookup_external(e: env, cnum: int, ids: [ident], ns: namespace) -> fn lookup_external(e: env, cnum: int, ids: [ident], ns: namespace) ->
option<def> { option<def> {
for d: def in csearch::lookup_defs(e.sess.cstore, cnum, ids) { let mut result = none;
for csearch::lookup_defs(e.sess.cstore, cnum, ids).each {|d|
e.ext_map.insert(def_id_of_def(d), ids); e.ext_map.insert(def_id_of_def(d), ids);
if ns_ok(ns, ns_for_def(d)) { ret some(d); } if ns_ok(ns, ns_for_def(d)) { result = some(d); }
} }
ret none; ret result;
} }
@ -1720,7 +1718,7 @@ fn mie_span(mie: mod_index_entry) -> span {
fn check_item(e: @env, i: @ast::item, &&x: (), v: vt<()>) { fn check_item(e: @env, i: @ast::item, &&x: (), v: vt<()>) {
fn typaram_names(tps: [ast::ty_param]) -> [ident] { fn typaram_names(tps: [ast::ty_param]) -> [ident] {
let mut x: [ast::ident] = []; let mut x: [ast::ident] = [];
for tp: ast::ty_param in tps { x += [tp.ident]; } for tps.each {|tp| x += [tp.ident]; }
ret x; ret x;
} }
visit::visit_item(i, x, v); visit::visit_item(i, x, v);
@ -1769,7 +1767,7 @@ fn check_arm(e: @env, a: ast::arm, &&x: (), v: vt<()>) {
e.sess.span_err(a.pats[i].span, e.sess.span_err(a.pats[i].span,
"inconsistent number of bindings"); "inconsistent number of bindings");
} else { } else {
for name: ident in ch.seen { for ch.seen.each {|name|
if is_none(vec::find(seen0, bind str::eq(name, _))) { if is_none(vec::find(seen0, bind str::eq(name, _))) {
// Fight the alias checker // Fight the alias checker
let name_ = name; let name_ = name;
@ -1787,13 +1785,13 @@ fn check_block(e: @env, b: ast::blk, &&x: (), v: vt<()>) {
let values = checker(*e, "value"); let values = checker(*e, "value");
let types = checker(*e, "type"); let types = checker(*e, "type");
let mods = checker(*e, "module"); let mods = checker(*e, "module");
for st: @ast::stmt in b.node.stmts { for b.node.stmts.each {|st|
alt st.node { alt st.node {
ast::stmt_decl(d, _) { ast::stmt_decl(d, _) {
alt d.node { alt d.node {
ast::decl_local(locs) { ast::decl_local(locs) {
let local_values = checker(*e, "value"); let local_values = checker(*e, "value");
for loc in locs { for locs.each {|loc|
pat_util::pat_bindings(e.def_map, loc.node.pat) pat_util::pat_bindings(e.def_map, loc.node.pat)
{|_i, p_sp, n| {|_i, p_sp, n|
let ident = path_to_ident(n); let ident = path_to_ident(n);
@ -1806,7 +1804,7 @@ fn check_block(e: @env, b: ast::blk, &&x: (), v: vt<()>) {
alt it.node { alt it.node {
ast::item_enum(variants, _) { ast::item_enum(variants, _) {
add_name(types, it.span, it.ident); add_name(types, it.span, it.ident);
for v: ast::variant in variants { for variants.each {|v|
add_name(values, v.span, v.node.name); add_name(values, v.span, v.node.name);
} }
} }
@ -1868,7 +1866,7 @@ fn checker(e: env, kind: str) -> checker {
} }
fn check_name(ch: checker, sp: span, name: ident) { fn check_name(ch: checker, sp: span, name: ident) {
for s: ident in ch.seen { for ch.seen.each {|s|
if str::eq(s, name) { if str::eq(s, name) {
ch.sess.span_fatal(sp, "duplicate " + ch.kind + " name: " + name); ch.sess.span_fatal(sp, "duplicate " + ch.kind + " name: " + name);
} }
@ -1882,7 +1880,7 @@ fn add_name(ch: checker, sp: span, name: ident) {
fn ensure_unique<T>(e: env, sp: span, elts: [T], id: fn(T) -> ident, fn ensure_unique<T>(e: env, sp: span, elts: [T], id: fn(T) -> ident,
kind: str) { kind: str) {
let ch = checker(e, kind); let ch = checker(e, kind);
for elt: T in elts { add_name(ch, sp, id(elt)); } for elts.each {|elt| add_name(ch, sp, id(elt)); }
} }
fn check_exports(e: @env) { fn check_exports(e: @env) {
@ -1908,7 +1906,7 @@ fn check_exports(e: @env) {
[ found_def_item(item, ns_val), [ found_def_item(item, ns_val),
found_def_item(item, ns_type), found_def_item(item, ns_type),
found_def_item(item, ns_module) ]; found_def_item(item, ns_module) ];
for d in defs { for defs.each {|d|
alt d { alt d {
some(def) { some(def) {
f(ident, def); f(ident, def);
@ -2015,7 +2013,7 @@ fn check_exports(e: @env) {
ids: [ast::path_list_ident]) { ids: [ast::path_list_ident]) {
let parent_id = check_enum_ok(e, span, id, _mod); let parent_id = check_enum_ok(e, span, id, _mod);
add_export(e, export_id, local_def(parent_id), false); add_export(e, export_id, local_def(parent_id), false);
for variant_id in ids { for ids.each {|variant_id|
let mut found = false; let mut found = false;
alt _mod.index.find(variant_id.node.name) { alt _mod.index.find(variant_id.node.name) {
some(ms) { some(ms) {
@ -2048,7 +2046,7 @@ fn check_exports(e: @env) {
some(m) { some(m) {
let glob_is_re_exported = int_hash(); let glob_is_re_exported = int_hash();
for vi in m.view_items { for m.view_items.each {|vi|
iter_export_paths(*vi) { |vp| iter_export_paths(*vi) { |vp|
alt vp.node { alt vp.node {
ast::view_path_simple(ident, _, id) { ast::view_path_simple(ident, _, id) {
@ -2071,7 +2069,7 @@ fn check_exports(e: @env) {
} }
// Now follow the export-glob links and fill in the // Now follow the export-glob links and fill in the
// globbed_exports and exp_map lists. // globbed_exports and exp_map lists.
for glob in _mod.glob_imports { for _mod.glob_imports.each {|glob|
let id = alt check glob.path.node { let id = alt check glob.path.node {
ast::view_path_glob(_, node_id) { node_id } ast::view_path_glob(_, node_id) { node_id }
}; };
@ -2127,7 +2125,7 @@ fn find_impls_in_view_item(e: env, vi: @ast::view_item,
option::with_option_do(sc) {|sc| option::with_option_do(sc) {|sc|
list::iter(sc) {|level| list::iter(sc) {|level|
if vec::len(found) == 0u { if vec::len(found) == 0u {
for imp in *level { for vec::each(*level) {|imp|
if imp.ident == pt[0] { if imp.ident == pt[0] {
found += [@{ident: name with *imp}]; found += [@{ident: name with *imp}];
} }
@ -2138,13 +2136,15 @@ fn find_impls_in_view_item(e: env, vi: @ast::view_item,
} }
} else { } else {
lookup_imported_impls(e, id) {|is| lookup_imported_impls(e, id) {|is|
for i in *is { impls += [@{ident: name with *i}]; } for vec::each(*is) {|i|
impls += [@{ident: name with *i}];
}
} }
} }
} }
ast::view_path_list(base, names, _) { ast::view_path_list(base, names, _) {
for nm in names { for names.each {|nm|
lookup_imported_impls(e, nm.node.id) {|is| impls += *is; } lookup_imported_impls(e, nm.node.id) {|is| impls += *is; }
} }
} }
@ -2198,10 +2198,10 @@ fn find_impls_in_mod_by_id(e: env, defid: def_id, &impls: [@_impl],
let mut tmp = []; let mut tmp = [];
let mi = e.mod_map.get(defid.node); let mi = e.mod_map.get(defid.node);
let md = option::get(mi.m); let md = option::get(mi.m);
for vi in md.view_items { for md.view_items.each {|vi|
find_impls_in_view_item(e, vi, tmp, none); find_impls_in_view_item(e, vi, tmp, none);
} }
for i in md.items { for md.items.each {|i|
find_impls_in_item(e, i, tmp, none, none); find_impls_in_item(e, i, tmp, none, none);
} }
@vec::filter(tmp) {|i| is_exported(e, i.ident, mi)} @vec::filter(tmp) {|i| is_exported(e, i.ident, mi)}
@ -2213,7 +2213,7 @@ fn find_impls_in_mod_by_id(e: env, defid: def_id, &impls: [@_impl],
} }
alt name { alt name {
some(n) { some(n) {
for im in *cached { for vec::each(*cached) {|im|
if n == im.ident { impls += [im]; } if n == im.ident { impls += [im]; }
} }
} }
@ -2234,10 +2234,10 @@ fn find_impls_in_mod(e: env, m: def, &impls: [@_impl],
fn visit_block_with_impl_scope(e: @env, b: ast::blk, sc: iscopes, fn visit_block_with_impl_scope(e: @env, b: ast::blk, sc: iscopes,
v: vt<iscopes>) { v: vt<iscopes>) {
let mut impls = []; let mut impls = [];
for vi in b.node.view_items { for b.node.view_items.each {|vi|
find_impls_in_view_item(*e, vi, impls, some(sc)); find_impls_in_view_item(*e, vi, impls, some(sc));
} }
for st in b.node.stmts { for b.node.stmts.each {|st|
alt st.node { alt st.node {
ast::stmt_decl(@{node: ast::decl_item(i), _}, _) { ast::stmt_decl(@{node: ast::decl_item(i), _}, _) {
find_impls_in_item(*e, i, impls, none, none); find_impls_in_item(*e, i, impls, none, none);
@ -2252,10 +2252,10 @@ fn visit_block_with_impl_scope(e: @env, b: ast::blk, sc: iscopes,
fn visit_mod_with_impl_scope(e: @env, m: ast::_mod, s: span, id: node_id, fn visit_mod_with_impl_scope(e: @env, m: ast::_mod, s: span, id: node_id,
sc: iscopes, v: vt<iscopes>) { sc: iscopes, v: vt<iscopes>) {
let mut impls = []; let mut impls = [];
for vi in m.view_items { for m.view_items.each {|vi|
find_impls_in_view_item(*e, vi, impls, some(sc)); find_impls_in_view_item(*e, vi, impls, some(sc));
} }
for i in m.items { find_impls_in_item(*e, i, impls, none, none); } for m.items.each {|i| find_impls_in_item(*e, i, impls, none, none); }
let impls = @impls; let impls = @impls;
visit::visit_mod(m, s, id, if vec::len(*impls) > 0u { visit::visit_mod(m, s, id, if vec::len(*impls) > 0u {
cons(impls, @sc) cons(impls, @sc)

View file

@ -2788,7 +2788,7 @@ fn need_invoke(bcx: block) -> bool {
loop { loop {
alt cur.kind { alt cur.kind {
block_scope(info) { block_scope(info) {
for cleanup in info.cleanups { for info.cleanups.each {|cleanup|
alt cleanup { alt cleanup {
clean(_, cleanup_type) | clean_temp(_, _, cleanup_type) { clean(_, cleanup_type) | clean_temp(_, _, cleanup_type) {
if cleanup_type == normal_exit_and_unwind { if cleanup_type == normal_exit_and_unwind {
@ -2925,7 +2925,7 @@ fn trans_rec(bcx: block, fields: [ast::field],
let ty_fields = alt check ty::get(t).struct { ty::ty_rec(f) { f } }; let ty_fields = alt check ty::get(t).struct { ty::ty_rec(f) { f } };
let mut temp_cleanups = []; let mut temp_cleanups = [];
for fld in fields { for fields.each {|fld|
let ix = option::get(vec::position(ty_fields, {|ft| let ix = option::get(vec::position(ty_fields, {|ft|
str::eq(fld.node.ident, ft.ident) str::eq(fld.node.ident, ft.ident)
})); }));
@ -2940,7 +2940,7 @@ fn trans_rec(bcx: block, fields: [ast::field],
let mut i = 0; let mut i = 0;
bcx = cx; bcx = cx;
// Copy over inherited fields // Copy over inherited fields
for tf in ty_fields { for ty_fields.each {|tf|
if !vec::any(fields, {|f| str::eq(f.node.ident, tf.ident)}) { if !vec::any(fields, {|f| str::eq(f.node.ident, tf.ident)}) {
let dst = GEPi(bcx, addr, [0, i]); let dst = GEPi(bcx, addr, [0, i]);
let base = GEPi(bcx, base_val, [0, i]); let base = GEPi(bcx, base_val, [0, i]);
@ -2955,7 +2955,7 @@ fn trans_rec(bcx: block, fields: [ast::field],
// Now revoke the cleanups as we pass responsibility for the data // Now revoke the cleanups as we pass responsibility for the data
// structure on to the caller // structure on to the caller
for cleanup in temp_cleanups { revoke_clean(bcx, cleanup); } for temp_cleanups.each {|cleanup| revoke_clean(bcx, cleanup); }
ret bcx; ret bcx;
} }
@ -3707,15 +3707,16 @@ fn cleanup_and_leave(bcx: block, upto: option<BasicBlockRef>,
let _icx = bcx.insn_ctxt("cleanup_and_leave"); let _icx = bcx.insn_ctxt("cleanup_and_leave");
let mut cur = bcx, bcx = bcx; let mut cur = bcx, bcx = bcx;
let is_lpad = leave == none; let is_lpad = leave == none;
let mut done = false;
loop { loop {
alt cur.kind { alt cur.kind {
block_scope(info) if info.cleanups.len() > 0u { block_scope(info) if info.cleanups.len() > 0u {
for cp in info.cleanup_paths { option::with_option_do(vec::find(info.cleanup_paths,
if cp.target == leave { {|cp| cp.target == leave})) {|cp|
Br(bcx, cp.dest); Br(bcx, cp.dest);
ret; done = true;
}
} }
if done { ret; }
let sub_cx = sub_block(bcx, "cleanup"); let sub_cx = sub_block(bcx, "cleanup");
Br(bcx, sub_cx.llbb); Br(bcx, sub_cx.llbb);
info.cleanup_paths += [{target: leave, dest: sub_cx.llbb}]; info.cleanup_paths += [{target: leave, dest: sub_cx.llbb}];
@ -4318,7 +4319,7 @@ fn trans_class_ctor(ccx: @crate_ctxt, path: path, decl: ast::fn_decl,
let mut bcx = bcx_top; let mut bcx = bcx_top;
// Initialize fields to zero so init assignments can validly // Initialize fields to zero so init assignments can validly
// drop their LHS // drop their LHS
for field in fields { for fields.each {|field|
let ix = field_idx_strict(bcx.tcx(), sp, field.ident, fields); let ix = field_idx_strict(bcx.tcx(), sp, field.ident, fields);
bcx = zero_alloca(bcx, GEPi(bcx, selfptr, [0, ix]), bcx = zero_alloca(bcx, GEPi(bcx, selfptr, [0, ix]),
field.mt.ty); field.mt.ty);

View file

@ -280,19 +280,13 @@ fn add_clean_free(cx: block, ptr: ValueRef, shared: bool) {
// drop glue checks whether it is zero. // drop glue checks whether it is zero.
fn revoke_clean(cx: block, val: ValueRef) { fn revoke_clean(cx: block, val: ValueRef) {
in_scope_cx(cx) {|info| in_scope_cx(cx) {|info|
let mut i = 0u; option::with_option_do(vec::position(info.cleanups, {|cu|
for cu in info.cleanups { alt cu { clean_temp(v, _, _) if v == val { true } _ { false } }
alt cu { })) {|i|
clean_temp(v, _, _) if v == val { info.cleanups =
info.cleanups = vec::slice(info.cleanups, 0u, i) +
vec::slice(info.cleanups, 0u, i) + vec::slice(info.cleanups, i + 1u, info.cleanups.len());
vec::slice(info.cleanups, i + 1u, info.cleanups.len()); scope_clean_changed(info);
scope_clean_changed(info);
break;
}
_ {}
}
i += 1u;
} }
} }
} }

View file

@ -152,7 +152,7 @@ fn cached_metadata<T: copy>(cache: metadata_cache, mdtag: int,
eq: fn(md: T) -> bool) -> option<T> unsafe { eq: fn(md: T) -> bool) -> option<T> unsafe {
if cache.contains_key(mdtag) { if cache.contains_key(mdtag) {
let items = cache.get(mdtag); let items = cache.get(mdtag);
for item in items { for items.each {|item|
let md: T = md_from_metadata::<T>(item); let md: T = md_from_metadata::<T>(item);
if eq(md) { if eq(md) {
ret option::some(md); ret option::some(md);
@ -421,7 +421,7 @@ fn create_record(cx: @crate_ctxt, t: ty::t, fields: [ast::ty_field],
option::get(cx.dbg_cx).names("rec"), option::get(cx.dbg_cx).names("rec"),
line_from_span(cx.sess.codemap, line_from_span(cx.sess.codemap,
span) as int); span) as int);
for field in fields { for fields.each {|field|
let field_t = ty::get_field(t, field.node.ident).mt.ty; let field_t = ty::get_field(t, field.node.ident).mt.ty;
let ty_md = create_ty(cx, field_t, field.node.mt.ty); let ty_md = create_ty(cx, field_t, field.node.mt.ty);
let (size, align) = size_and_align_of(cx, field_t); let (size, align) = size_and_align_of(cx, field_t);

View file

@ -372,7 +372,7 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t, ty_param_map: [uint]) -> [u8] {
ty::ty_class(did, ts) { ty::ty_class(did, ts) {
// same as records // same as records
let mut s = [shape_struct], sub = []; let mut s = [shape_struct], sub = [];
for f:field in ty::class_items_as_fields(ccx.tcx, did, ts) { for ty::class_items_as_fields(ccx.tcx, did, ts).each {|f|
sub += shape_of(ccx, f.mt.ty, ty_param_map); sub += shape_of(ccx, f.mt.ty, ty_param_map);
} }
add_substr(s, sub); add_substr(s, sub);
@ -465,7 +465,7 @@ fn gen_enum_shapes(ccx: @crate_ctxt) -> ValueRef {
let data_sz = vec::len(data) as u16; let data_sz = vec::len(data) as u16;
let mut info_sz = 0u16; let mut info_sz = 0u16;
for did_ in ccx.shape_cx.tag_order { for ccx.shape_cx.tag_order.each {|did_|
let did = did_; // Satisfy alias checker. let did = did_; // Satisfy alias checker.
let num_variants = vec::len(*ty::enum_variants(ccx.tcx, did)) as u16; let num_variants = vec::len(*ty::enum_variants(ccx.tcx, did)) as u16;
add_u16(header, header_sz + info_sz); add_u16(header, header_sz + info_sz);
@ -478,7 +478,7 @@ fn gen_enum_shapes(ccx: @crate_ctxt) -> ValueRef {
let mut lv_table = []; let mut lv_table = [];
i = 0u; i = 0u;
for did_ in ccx.shape_cx.tag_order { for ccx.shape_cx.tag_order.each {|did_|
let did = did_; // Satisfy alias checker. let did = did_; // Satisfy alias checker.
let variants = ty::enum_variants(ccx.tcx, did); let variants = ty::enum_variants(ccx.tcx, did);
add_u16(info, vec::len(*variants) as u16); add_u16(info, vec::len(*variants) as u16);

View file

@ -38,7 +38,7 @@ fn node_ids_in_fn(tcx: ty::ctxt, body: blk, rs: @mut [node_id]) {
} }
fn init_vecs(ccx: crate_ctxt, node_ids: [node_id], len: uint) { fn init_vecs(ccx: crate_ctxt, node_ids: [node_id], len: uint) {
for i: node_id in node_ids { for node_ids.each {|i|
log(debug, int::str(i) + " |-> " + uint::str(len)); log(debug, int::str(i) + " |-> " + uint::str(len));
add_node(ccx, i, empty_ann(len)); add_node(ccx, i, empty_ann(len));
} }

View file

@ -37,7 +37,7 @@ fn def_id_to_str(d: def_id) -> str {
fn comma_str(args: [@constr_arg_use]) -> str { fn comma_str(args: [@constr_arg_use]) -> str {
let mut rslt = ""; let mut rslt = "";
let mut comma = false; let mut comma = false;
for a: @constr_arg_use in args { for args.each {|a|
if comma { rslt += ", "; } else { comma = true; } if comma { rslt += ", "; } else { comma = true; }
alt a.node { alt a.node {
carg_base { rslt += "*"; } carg_base { rslt += "*"; }
@ -66,7 +66,7 @@ fn constraint_to_str(tcx: ty::ctxt, c: sp_constr) -> str {
fn tritv_to_str(fcx: fn_ctxt, v: tritv::t) -> str { fn tritv_to_str(fcx: fn_ctxt, v: tritv::t) -> str {
let mut s = ""; let mut s = "";
let mut comma = false; let mut comma = false;
for p: norm_constraint in constraints(fcx) { for constraints(fcx).each {|p|
alt tritv_get(v, p.bit_num) { alt tritv_get(v, p.bit_num) {
dont_care { } dont_care { }
tt { tt {
@ -86,11 +86,12 @@ fn log_tritv(fcx: fn_ctxt, v: tritv::t) {
fn first_difference_string(fcx: fn_ctxt, expected: tritv::t, actual: tritv::t) fn first_difference_string(fcx: fn_ctxt, expected: tritv::t, actual: tritv::t)
-> str { -> str {
let s: str = ""; let mut s = "";
for c: norm_constraint in constraints(fcx) { for constraints(fcx).each {|c|
if tritv_get(expected, c.bit_num) == ttrue && if tritv_get(expected, c.bit_num) == ttrue &&
tritv_get(actual, c.bit_num) != ttrue { tritv_get(actual, c.bit_num) != ttrue {
ret constraint_to_str(fcx.ccx.tcx, c.c); s = constraint_to_str(fcx.ccx.tcx, c.c);
break;
} }
} }
ret s; ret s;
@ -102,7 +103,7 @@ fn log_tritv_err(fcx: fn_ctxt, v: tritv::t) {
fn tos(v: [uint]) -> str { fn tos(v: [uint]) -> str {
let mut rslt = ""; let mut rslt = "";
for i: uint in v { for v.each {|i|
if i == 0u { if i == 0u {
rslt += "0"; rslt += "0";
} else if i == 1u { rslt += "1"; } else { rslt += "?"; } } else if i == 1u { rslt += "1"; } else { rslt += "?"; }
@ -539,7 +540,7 @@ fn norm_a_constraint(id: def_id, c: constraint) -> [norm_constraint] {
} }
cpred(p, descs) { cpred(p, descs) {
let mut rslt: [norm_constraint] = []; let mut rslt: [norm_constraint] = [];
for pd: pred_args in *descs { for vec::each(*descs) {|pd|
rslt += rslt +=
[{bit_num: pd.node.bit_num, [{bit_num: pd.node.bit_num,
c: respan(pd.span, npred(p, id, pd.node.args))}]; c: respan(pd.span, npred(p, id, pd.node.args))}];
@ -567,7 +568,7 @@ fn match_args(fcx: fn_ctxt, occs: @mut [pred_args],
occ: [@constr_arg_use]) -> uint { occ: [@constr_arg_use]) -> uint {
#debug("match_args: looking at %s", #debug("match_args: looking at %s",
constr_args_to_str(fn@(i: inst) -> str { ret i.ident; }, occ)); constr_args_to_str(fn@(i: inst) -> str { ret i.ident; }, occ));
for pd: pred_args in *occs { for vec::each(*occs) {|pd|
log(debug, log(debug,
"match_args: candidate " + pred_args_to_str(pd)); "match_args: candidate " + pred_args_to_str(pd));
fn eq(p: inst, q: inst) -> bool { ret p.node == q.node; } fn eq(p: inst, q: inst) -> bool { ret p.node == q.node; }
@ -619,7 +620,7 @@ fn expr_to_constr_arg(tcx: ty::ctxt, e: @expr) -> @constr_arg_use {
fn exprs_to_constr_args(tcx: ty::ctxt, args: [@expr]) -> [@constr_arg_use] { fn exprs_to_constr_args(tcx: ty::ctxt, args: [@expr]) -> [@constr_arg_use] {
let f = bind expr_to_constr_arg(tcx, _); let f = bind expr_to_constr_arg(tcx, _);
let mut rslt: [@constr_arg_use] = []; let mut rslt: [@constr_arg_use] = [];
for e: @expr in args { rslt += [f(e)]; } for args.each {|e| rslt += [f(e)]; }
rslt rslt
} }
@ -653,7 +654,7 @@ fn pred_args_to_str(p: pred_args) -> str {
fn substitute_constr_args(cx: ty::ctxt, actuals: [@expr], c: @ty::constr) -> fn substitute_constr_args(cx: ty::ctxt, actuals: [@expr], c: @ty::constr) ->
tsconstr { tsconstr {
let mut rslt: [@constr_arg_use] = []; let mut rslt: [@constr_arg_use] = [];
for a: @constr_arg in c.node.args { for c.node.args.each {|a|
rslt += [substitute_arg(cx, actuals, a)]; rslt += [substitute_arg(cx, actuals, a)];
} }
ret npred(c.node.path, c.node.id, rslt); ret npred(c.node.path, c.node.id, rslt);
@ -678,7 +679,7 @@ fn substitute_arg(cx: ty::ctxt, actuals: [@expr], a: @constr_arg) ->
fn pred_args_matches(pattern: [constr_arg_general_<inst>], desc: pred_args) -> fn pred_args_matches(pattern: [constr_arg_general_<inst>], desc: pred_args) ->
bool { bool {
let mut i = 0u; let mut i = 0u;
for c: @constr_arg_use in desc.node.args { for desc.node.args.each {|c|
let n = pattern[i]; let n = pattern[i];
alt c.node { alt c.node {
carg_ident(p) { carg_ident(p) {
@ -702,7 +703,7 @@ fn pred_args_matches(pattern: [constr_arg_general_<inst>], desc: pred_args) ->
fn find_instance_(pattern: [constr_arg_general_<inst>], descs: [pred_args]) -> fn find_instance_(pattern: [constr_arg_general_<inst>], descs: [pred_args]) ->
option<uint> { option<uint> {
for d: pred_args in descs { for descs.each {|d|
if pred_args_matches(pattern, d) { ret some(d.node.bit_num); } if pred_args_matches(pattern, d) { ret some(d.node.bit_num); }
} }
ret none; ret none;
@ -720,7 +721,7 @@ fn find_instances(_fcx: fn_ctxt, subst: subst, c: constraint) ->
alt c { alt c {
cinit(_, _, _) {/* this is dealt with separately */ } cinit(_, _, _) {/* this is dealt with separately */ }
cpred(p, descs) { cpred(p, descs) {
for d: pred_args in *descs { for vec::each(copy *descs) {|d|
if args_mention(d.node.args, find_in_subst_bool, subst) { if args_mention(d.node.args, find_in_subst_bool, subst) {
let old_bit_num = d.node.bit_num; let old_bit_num = d.node.bit_num;
let newv = replace(subst, d); let newv = replace(subst, d);
@ -736,7 +737,7 @@ fn find_instances(_fcx: fn_ctxt, subst: subst, c: constraint) ->
} }
fn find_in_subst(id: node_id, s: subst) -> option<inst> { fn find_in_subst(id: node_id, s: subst) -> option<inst> {
for p: {from: inst, to: inst} in s { for s.each {|p|
if id == p.from.node { ret some(p.to); } if id == p.from.node { ret some(p.to); }
} }
ret none; ret none;
@ -748,7 +749,7 @@ fn find_in_subst_bool(s: subst, id: node_id) -> bool {
fn insts_to_str(stuff: [constr_arg_general_<inst>]) -> str { fn insts_to_str(stuff: [constr_arg_general_<inst>]) -> str {
let mut rslt = "<"; let mut rslt = "<";
for i: constr_arg_general_<inst> in stuff { for stuff.each {|i|
rslt += rslt +=
" " + " " +
alt i { alt i {
@ -763,7 +764,7 @@ fn insts_to_str(stuff: [constr_arg_general_<inst>]) -> str {
fn replace(subst: subst, d: pred_args) -> [constr_arg_general_<inst>] { fn replace(subst: subst, d: pred_args) -> [constr_arg_general_<inst>] {
let mut rslt: [constr_arg_general_<inst>] = []; let mut rslt: [constr_arg_general_<inst>] = [];
for c: @constr_arg_use in d.node.args { for d.node.args.each {|c|
alt c.node { alt c.node {
carg_ident(p) { carg_ident(p) {
alt find_in_subst(p.node, subst) { alt find_in_subst(p.node, subst) {
@ -872,7 +873,7 @@ fn copy_in_poststate_two(fcx: fn_ctxt, src_post: poststate,
// replace any occurrences of the src def_id with the // replace any occurrences of the src def_id with the
// dest def_id // dest def_id
let insts = find_instances(fcx, subst, val); let insts = find_instances(fcx, subst, val);
for p: {from: uint, to: uint} in insts { for insts.each {|p|
if promises_(p.from, src_post) { if promises_(p.from, src_post) {
set_in_poststate_(p.to, target_post); set_in_poststate_(p.to, target_post);
} }
@ -887,7 +888,7 @@ fn forget_in_postcond(fcx: fn_ctxt, parent_exp: node_id, dead_v: node_id) {
let d = local_node_id_to_local_def_id(fcx, dead_v); let d = local_node_id_to_local_def_id(fcx, dead_v);
alt d { alt d {
some(d_id) { some(d_id) {
for c: norm_constraint in constraints(fcx) { for constraints(fcx).each {|c|
if constraint_mentions(fcx, c, d_id) { if constraint_mentions(fcx, c, d_id) {
#debug("clearing constraint %u %s", #debug("clearing constraint %u %s",
c.bit_num, c.bit_num,
@ -909,7 +910,7 @@ fn forget_in_postcond_still_init(fcx: fn_ctxt, parent_exp: node_id,
let d = local_node_id_to_local_def_id(fcx, dead_v); let d = local_node_id_to_local_def_id(fcx, dead_v);
alt d { alt d {
some(d_id) { some(d_id) {
for c: norm_constraint in constraints(fcx) { for constraints(fcx).each {|c|
if non_init_constraint_mentions(fcx, c, d_id) { if non_init_constraint_mentions(fcx, c, d_id) {
clear_in_postcond(c.bit_num, clear_in_postcond(c.bit_num,
node_id_to_ts_ann(fcx.ccx, node_id_to_ts_ann(fcx.ccx,
@ -928,7 +929,7 @@ fn forget_in_poststate(fcx: fn_ctxt, p: poststate, dead_v: node_id) -> bool {
let mut changed = false; let mut changed = false;
alt d { alt d {
some(d_id) { some(d_id) {
for c: norm_constraint in constraints(fcx) { for constraints(fcx).each {|c|
if constraint_mentions(fcx, c, d_id) { if constraint_mentions(fcx, c, d_id) {
changed |= clear_in_poststate_(c.bit_num, p); changed |= clear_in_poststate_(c.bit_num, p);
} }
@ -947,7 +948,7 @@ fn forget_in_poststate_still_init(fcx: fn_ctxt, p: poststate, dead_v: node_id)
let mut changed = false; let mut changed = false;
alt d { alt d {
some(d_id) { some(d_id) {
for c: norm_constraint in constraints(fcx) { for constraints(fcx).each {|c|
if non_init_constraint_mentions(fcx, c, d_id) { if non_init_constraint_mentions(fcx, c, d_id) {
changed |= clear_in_poststate_(c.bit_num, p); changed |= clear_in_poststate_(c.bit_num, p);
} }
@ -959,7 +960,7 @@ fn forget_in_poststate_still_init(fcx: fn_ctxt, p: poststate, dead_v: node_id)
} }
fn any_eq(v: [node_id], d: node_id) -> bool { fn any_eq(v: [node_id], d: node_id) -> bool {
for i: node_id in v { if i == d { ret true; } } for v.each {|i| if i == d { ret true; } }
false false
} }
@ -1000,7 +1001,7 @@ fn args_mention<T>(args: [@constr_arg_use],
ret vec::any(bind mentions(s,q,_), args); ret vec::any(bind mentions(s,q,_), args);
*/ */
for a: @constr_arg_use in args { for args.each {|a|
alt a.node { carg_ident(p1) { if q(s, p1.node) { ret true; } } _ { } } alt a.node { carg_ident(p1) { if q(s, p1.node) { ret true; } } _ { } }
} }
ret false; ret false;
@ -1010,7 +1011,7 @@ fn use_var(fcx: fn_ctxt, v: node_id) { *fcx.enclosing.used_vars += [v]; }
// FIXME: This should be a function in vec::. // FIXME: This should be a function in vec::.
fn vec_contains(v: @mut [node_id], i: node_id) -> bool { fn vec_contains(v: @mut [node_id], i: node_id) -> bool {
for d: node_id in *v { if d == i { ret true; } } for vec::each(*v) {|d| if d == i { ret true; } }
ret false; ret false;
} }
@ -1029,7 +1030,7 @@ fn args_to_constr_args(tcx: ty::ctxt, args: [arg],
indices: [@sp_constr_arg<uint>]) -> [@constr_arg_use] { indices: [@sp_constr_arg<uint>]) -> [@constr_arg_use] {
let mut actuals: [@constr_arg_use] = []; let mut actuals: [@constr_arg_use] = [];
let num_args = vec::len(args); let num_args = vec::len(args);
for a: @sp_constr_arg<uint> in indices { for indices.each {|a|
actuals += actuals +=
[@respan(a.span, [@respan(a.span,
alt a.node { alt a.node {
@ -1075,7 +1076,7 @@ fn local_to_bindings(tcx: ty::ctxt, loc: @local) -> binding {
fn locals_to_bindings(tcx: ty::ctxt, locals: [@local]) -> [binding] { fn locals_to_bindings(tcx: ty::ctxt, locals: [@local]) -> [binding] {
let mut rslt = []; let mut rslt = [];
for loc in locals { rslt += [local_to_bindings(tcx, loc)]; } for locals.each {|loc| rslt += [local_to_bindings(tcx, loc)]; }
ret rslt; ret rslt;
} }
@ -1085,7 +1086,7 @@ fn callee_modes(fcx: fn_ctxt, callee: node_id) -> [mode] {
alt ty::get(ty).struct { alt ty::get(ty).struct {
ty::ty_fn({inputs: args, _}) { ty::ty_fn({inputs: args, _}) {
let mut modes = []; let mut modes = [];
for arg: ty::arg in args { modes += [arg.mode]; } for args.each {|arg| modes += [arg.mode]; }
ret modes; ret modes;
} }
_ { _ {
@ -1108,7 +1109,7 @@ fn callee_arg_init_ops(fcx: fn_ctxt, callee: node_id) -> [init_op] {
fn anon_bindings(ops: [init_op], es: [@expr]) -> [binding] { fn anon_bindings(ops: [init_op], es: [@expr]) -> [binding] {
let mut bindings: [binding] = []; let mut bindings: [binding] = [];
let mut i = 0; let mut i = 0;
for op: init_op in ops { for ops.each {|op|
bindings += [{lhs: [], rhs: some({op: op, expr: es[i]})}]; bindings += [{lhs: [], rhs: some({op: op, expr: es[i]})}];
i += 1; i += 1;
} }

View file

@ -67,7 +67,7 @@ fn seq_postconds(fcx: fn_ctxt, ps: [postcond]) -> postcond {
let sz = vec::len(ps); let sz = vec::len(ps);
if sz >= 1u { if sz >= 1u {
let prev = tritv_clone(ps[0]); let prev = tritv_clone(ps[0]);
for p: postcond in vec::slice(ps, 1u, sz) { seq_tritv(prev, p); } vec::iter_between(ps, 1u, sz) {|p| seq_tritv(prev, p); }
ret prev; ret prev;
} else { ret ann::empty_poststate(num_constraints(fcx.enclosing)); } } else { ret ann::empty_poststate(num_constraints(fcx.enclosing)); }
} }

View file

@ -19,7 +19,7 @@ import std::map::hashmap;
fn check_unused_vars(fcx: fn_ctxt) { fn check_unused_vars(fcx: fn_ctxt) {
// FIXME: could be more efficient // FIXME: could be more efficient
for c: norm_constraint in constraints(fcx) { for constraints(fcx).each {|c|
alt c.c.node { alt c.c.node {
ninit(id, v) { ninit(id, v) {
if !vec_contains(fcx.enclosing.used_vars, id) && v[0] != '_' as u8 if !vec_contains(fcx.enclosing.used_vars, id) && v[0] != '_' as u8

View file

@ -27,7 +27,7 @@ fn collect_pred(e: @expr, cx: ctxt, v: visit::vt<ctxt>) {
// If it's a call, generate appropriate instances of the // If it's a call, generate appropriate instances of the
// call's constraints. // call's constraints.
expr_call(operator, operands, _) { expr_call(operator, operands, _) {
for c: @ty::constr in constraints_expr(cx.tcx, operator) { for constraints_expr(cx.tcx, operator).each {|c|
let ct: sp_constr = let ct: sp_constr =
respan(c.span, respan(c.span,
aux::substitute_constr_args(cx.tcx, operands, c)); aux::substitute_constr_args(cx.tcx, operands, c));
@ -105,20 +105,21 @@ fn mk_fn_info(ccx: crate_ctxt,
/* now we have to add bit nums for both the constraints /* now we have to add bit nums for both the constraints
and the variables... */ and the variables... */
for c: sp_constr in { *cx.cs } { let mut i = 0u, l = vec::len(*cx.cs);
next = add_constraint(cx.tcx, c, next, res_map); while i < l {
next = add_constraint(cx.tcx, cx.cs[i], next, res_map);
i += 1u;
} }
/* if this function has any constraints, instantiate them to the /* if this function has any constraints, instantiate them to the
argument names and add them */ argument names and add them */
let mut sc; for f_decl.constraints.each {|c|
for c: @constr in f_decl.constraints { let sc = ast_constr_to_sp_constr(cx.tcx, f_decl.inputs, c);
sc = ast_constr_to_sp_constr(cx.tcx, f_decl.inputs, c);
next = add_constraint(cx.tcx, sc, next, res_map); next = add_constraint(cx.tcx, sc, next, res_map);
} }
/* Need to add constraints for args too, b/c they /* Need to add constraints for args too, b/c they
can be deinitialized */ can be deinitialized */
for a: arg in f_decl.inputs { for f_decl.inputs.each {|a|
next = add_constraint( next = add_constraint(
cx.tcx, cx.tcx,
respan(f_sp, ninit(a.id, a.ident)), respan(f_sp, ninit(a.id, a.ident)),

View file

@ -60,7 +60,9 @@ fn find_pre_post_item(ccx: crate_ctxt, i: item) {
item_class(_,_,_) { item_class(_,_,_) {
fail "find_pre_post_item: implement item_class"; fail "find_pre_post_item: implement item_class";
} }
item_impl(_, _, _, ms) { for m in ms { find_pre_post_method(ccx, m); } } item_impl(_, _, _, ms) {
for ms.each {|m| find_pre_post_method(ccx, m); }
}
} }
} }
@ -75,7 +77,7 @@ fn find_pre_post_exprs(fcx: fn_ctxt, args: [@expr], id: node_id) {
log_expr(*args[0]); log_expr(*args[0]);
} }
fn do_one(fcx: fn_ctxt, e: @expr) { find_pre_post_expr(fcx, e); } fn do_one(fcx: fn_ctxt, e: @expr) { find_pre_post_expr(fcx, e); }
for e: @expr in args { do_one(fcx, e); } for args.each {|e| do_one(fcx, e); }
fn get_pp(ccx: crate_ctxt, &&e: @expr) -> pre_and_post { fn get_pp(ccx: crate_ctxt, &&e: @expr) -> pre_and_post {
ret expr_pp(ccx, e); ret expr_pp(ccx, e);
@ -282,7 +284,7 @@ fn forget_args_moved_in(fcx: fn_ctxt, parent: @expr, modes: [mode],
fn find_pre_post_expr_fn_upvars(fcx: fn_ctxt, e: @expr) { fn find_pre_post_expr_fn_upvars(fcx: fn_ctxt, e: @expr) {
let rslt = expr_pp(fcx.ccx, e); let rslt = expr_pp(fcx.ccx, e);
clear_pp(rslt); clear_pp(rslt);
for def in *freevars::get_freevars(fcx.ccx.tcx, e.id) { for vec::each(*freevars::get_freevars(fcx.ccx.tcx, e.id)) {|def|
log(debug, ("handle_var_def: def=", def)); log(debug, ("handle_var_def: def=", def));
handle_var_def(fcx, rslt, def.def, "upvar"); handle_var_def(fcx, rslt, def.def, "upvar");
} }
@ -304,7 +306,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) {
find_pre_post_exprs(fcx, args, e.id); find_pre_post_exprs(fcx, args, e.id);
/* see if the call has any constraints on its type */ /* see if the call has any constraints on its type */
for c: @ty::constr in constraints_expr(fcx.ccx.tcx, operator) { for constraints_expr(fcx.ccx.tcx, operator).each {|c|
let i = let i =
bit_num(fcx, substitute_constr_args(fcx.ccx.tcx, args, c)); bit_num(fcx, substitute_constr_args(fcx.ccx.tcx, args, c));
require(i, expr_pp(fcx.ccx, e)); require(i, expr_pp(fcx.ccx, e));
@ -466,7 +468,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) {
ret block_pp(fcx.ccx, an_alt.body); ret block_pp(fcx.ccx, an_alt.body);
} }
let mut alt_pps = []; let mut alt_pps = [];
for a: arm in alts { alt_pps += [do_an_alt(fcx, a)]; } for alts.each {|a| alt_pps += [do_an_alt(fcx, a)]; }
fn combine_pp(antec: pre_and_post, fcx: fn_ctxt, &&pp: pre_and_post, fn combine_pp(antec: pre_and_post, fcx: fn_ctxt, &&pp: pre_and_post,
&&next: pre_and_post) -> pre_and_post { &&next: pre_and_post) -> pre_and_post {
union(pp.precondition, seq_preconds(fcx, [antec, next])); union(pp.precondition, seq_preconds(fcx, [antec, next]));
@ -517,7 +519,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) {
let mut cmodes = callee_modes(fcx, operator.id); let mut cmodes = callee_modes(fcx, operator.id);
let mut modes = []; let mut modes = [];
let mut i = 0; let mut i = 0;
for expr_opt: option<@expr> in maybe_args { for maybe_args.each {|expr_opt|
alt expr_opt { alt expr_opt {
none {/* no-op */ } none {/* no-op */ }
some(expr) { modes += [cmodes[i]]; args += [expr]; } some(expr) { modes += [cmodes[i]]; args += [expr]; }
@ -541,9 +543,8 @@ fn find_pre_post_stmt(fcx: fn_ctxt, s: stmt) {
stmt_decl(adecl, id) { stmt_decl(adecl, id) {
alt adecl.node { alt adecl.node {
decl_local(alocals) { decl_local(alocals) {
let mut e_pp;
let prev_pp = empty_pre_post(num_constraints(fcx.enclosing)); let prev_pp = empty_pre_post(num_constraints(fcx.enclosing));
for alocal in alocals { for alocals.each {|alocal|
alt alocal.node.init { alt alocal.node.init {
some(an_init) { some(an_init) {
/* LHS always becomes initialized, /* LHS always becomes initialized,
@ -586,7 +587,7 @@ fn find_pre_post_stmt(fcx: fn_ctxt, s: stmt) {
/* Clear out anything that the previous initializer /* Clear out anything that the previous initializer
guaranteed */ guaranteed */
e_pp = expr_pp(fcx.ccx, an_init.expr); let e_pp = expr_pp(fcx.ccx, an_init.expr);
tritv_copy(prev_pp.precondition, tritv_copy(prev_pp.precondition,
seq_preconds(fcx, [prev_pp, e_pp])); seq_preconds(fcx, [prev_pp, e_pp]));
/* Include the LHSs too, since those aren't in the /* Include the LHSs too, since those aren't in the
@ -650,13 +651,13 @@ fn find_pre_post_block(fcx: fn_ctxt, b: blk) {
log_pp_err(stmt_pp(fcx.ccx, *s)); log_pp_err(stmt_pp(fcx.ccx, *s));
*/ */
} }
for s: @stmt in b.node.stmts { do_one_(fcx, s); } for b.node.stmts.each {|s| do_one_(fcx, s); }
fn do_inner_(fcx: fn_ctxt, &&e: @expr) { find_pre_post_expr(fcx, e); } fn do_inner_(fcx: fn_ctxt, &&e: @expr) { find_pre_post_expr(fcx, e); }
let do_inner = bind do_inner_(fcx, _); let do_inner = bind do_inner_(fcx, _);
option::map::<@expr, ()>(b.node.expr, do_inner); option::map::<@expr, ()>(b.node.expr, do_inner);
let mut pps: [pre_and_post] = []; let mut pps: [pre_and_post] = [];
for s: @stmt in b.node.stmts { pps += [stmt_pp(fcx.ccx, *s)]; } for b.node.stmts.each {|s| pps += [stmt_pp(fcx.ccx, *s)]; }
alt b.node.expr { alt b.node.expr {
none {/* no-op */ } none {/* no-op */ }
some(e) { pps += [expr_pp(fcx.ccx, e)]; } some(e) { pps += [expr_pp(fcx.ccx, e)]; }
@ -665,7 +666,7 @@ fn find_pre_post_block(fcx: fn_ctxt, b: blk) {
let block_precond = seq_preconds(fcx, pps); let block_precond = seq_preconds(fcx, pps);
let mut postconds = []; let mut postconds = [];
for pp: pre_and_post in pps { postconds += [get_post(pp)]; } for pps.each {|pp| postconds += [get_post(pp)]; }
/* A block may be empty, so this next line ensures that the postconds /* A block may be empty, so this next line ensures that the postconds
vector is non-empty. */ vector is non-empty. */

View file

@ -67,14 +67,14 @@ fn seq_states(fcx: fn_ctxt, pres: prestate, bindings: [binding]) ->
{changed: bool, post: poststate} { {changed: bool, post: poststate} {
let mut changed = false; let mut changed = false;
let mut post = tritv_clone(pres); let mut post = tritv_clone(pres);
for b: binding in bindings { for bindings.each {|b|
alt b.rhs { alt b.rhs {
some(an_init) { some(an_init) {
// an expression, with or without a destination // an expression, with or without a destination
changed |= changed |=
find_pre_post_state_expr(fcx, post, an_init.expr) || changed; find_pre_post_state_expr(fcx, post, an_init.expr) || changed;
post = tritv_clone(expr_poststate(fcx.ccx, an_init.expr)); post = tritv_clone(expr_poststate(fcx.ccx, an_init.expr));
for i: inst in b.lhs { for b.lhs.each {|i|
alt an_init.expr.node { alt an_init.expr.node {
expr_path(p) { expr_path(p) {
handle_move_or_copy(fcx, post, p, an_init.expr.id, i, handle_move_or_copy(fcx, post, p, an_init.expr.id, i,
@ -91,7 +91,7 @@ fn seq_states(fcx: fn_ctxt, pres: prestate, bindings: [binding]) ->
} }
} }
none { none {
for i: inst in b.lhs { for b.lhs.each {|i|
// variables w/o an initializer // variables w/o an initializer
clear_in_poststate_ident_(fcx, i.node, i.ident, post); clear_in_poststate_ident_(fcx, i.node, i.ident, post);
} }
@ -375,7 +375,7 @@ fn find_pre_post_state_expr(fcx: fn_ctxt, pres: prestate, e: @expr) -> bool {
let callee_ops = callee_arg_init_ops(fcx, operator.id); let callee_ops = callee_arg_init_ops(fcx, operator.id);
let mut ops = []; let mut ops = [];
let mut i = 0; let mut i = 0;
for a_opt: option<@expr> in maybe_args { for maybe_args.each {|a_opt|
alt a_opt { alt a_opt {
none {/* no-op */ } none {/* no-op */ }
some(a) { ops += [callee_ops[i]]; args += [a]; } some(a) { ops += [callee_ops[i]]; args += [a]; }
@ -575,7 +575,7 @@ fn find_pre_post_state_expr(fcx: fn_ctxt, pres: prestate, e: @expr) -> bool {
let mut a_post; let mut a_post;
if vec::len(alts) > 0u { if vec::len(alts) > 0u {
a_post = false_postcond(num_constrs); a_post = false_postcond(num_constrs);
for an_alt: arm in alts { for alts.each {|an_alt|
alt an_alt.guard { alt an_alt.guard {
some(e) { some(e) {
changed |= find_pre_post_state_expr(fcx, e_post, e); changed |= find_pre_post_state_expr(fcx, e_post, e);
@ -702,7 +702,7 @@ fn find_pre_post_state_block(fcx: fn_ctxt, pres0: prestate, b: blk) -> bool {
initializes. Then <pres> becomes the new poststate. */ initializes. Then <pres> becomes the new poststate. */
let mut changed = false; let mut changed = false;
for s: @stmt in b.node.stmts { for b.node.stmts.each {|s|
changed |= find_pre_post_state_stmt(fcx, pres, s); changed |= find_pre_post_state_stmt(fcx, pres, s);
pres = stmt_poststate(fcx.ccx, *s); pres = stmt_poststate(fcx.ccx, *s);
} }
@ -745,12 +745,12 @@ fn find_pre_post_state_fn(fcx: fn_ctxt,
// Arguments start out initialized // Arguments start out initialized
let block_pre = block_prestate(fcx.ccx, f_body); let block_pre = block_prestate(fcx.ccx, f_body);
for a: arg in f_decl.inputs { for f_decl.inputs.each {|a|
set_in_prestate_constr(fcx, ninit(a.id, a.ident), block_pre); set_in_prestate_constr(fcx, ninit(a.id, a.ident), block_pre);
} }
// Instantiate any constraints on the arguments so we can use them // Instantiate any constraints on the arguments so we can use them
for c: @constr in f_decl.constraints { for f_decl.constraints.each {|c|
let tsc = ast_constr_to_ts_constr(fcx.ccx.tcx, f_decl.inputs, c); let tsc = ast_constr_to_ts_constr(fcx.ccx.tcx, f_decl.inputs, c);
set_in_prestate_constr(fcx, tsc, block_pre); set_in_prestate_constr(fcx, tsc, block_pre);
} }

View file

@ -372,7 +372,7 @@ impl of vid for region_vid {
fn param_bounds_to_kind(bounds: param_bounds) -> kind { fn param_bounds_to_kind(bounds: param_bounds) -> kind {
let mut kind = kind_noncopyable; let mut kind = kind_noncopyable;
for bound in *bounds { for vec::each(*bounds) {|bound|
alt bound { alt bound {
bound_copy { bound_copy {
if kind != kind_sendable { kind = kind_copyable; } if kind != kind_sendable { kind = kind_copyable; }
@ -464,7 +464,9 @@ fn mk_t_with_id(cx: ctxt, st: sty, o_def_id: option<ast::def_id>) -> t {
ty_param(_, _) { has_params = true; } ty_param(_, _) { has_params = true; }
ty_var(_) | ty_self(_) { has_vars = true; } ty_var(_) | ty_self(_) { has_vars = true; }
ty_enum(_, tys) | ty_iface(_, tys) | ty_class(_, tys) { ty_enum(_, tys) | ty_iface(_, tys) | ty_class(_, tys) {
for tt in tys { derive_flags(has_params, has_vars, has_rptrs, tt); } for tys.each {|tt|
derive_flags(has_params, has_vars, has_rptrs, tt);
}
} }
ty_box(m) | ty_uniq(m) | ty_vec(m) | ty_ptr(m) { ty_box(m) | ty_uniq(m) | ty_vec(m) | ty_ptr(m) {
derive_flags(has_params, has_vars, has_rptrs, m.ty); derive_flags(has_params, has_vars, has_rptrs, m.ty);
@ -478,22 +480,24 @@ fn mk_t_with_id(cx: ctxt, st: sty, o_def_id: option<ast::def_id>) -> t {
derive_flags(has_params, has_vars, has_rptrs, m.ty); derive_flags(has_params, has_vars, has_rptrs, m.ty);
} }
ty_rec(flds) { ty_rec(flds) {
for f in flds { for flds.each {|f|
derive_flags(has_params, has_vars, has_rptrs, f.mt.ty); derive_flags(has_params, has_vars, has_rptrs, f.mt.ty);
} }
} }
ty_tup(ts) { ty_tup(ts) {
for tt in ts { derive_flags(has_params, has_vars, has_rptrs, tt); } for ts.each {|tt| derive_flags(has_params, has_vars, has_rptrs, tt); }
} }
ty_fn(f) { ty_fn(f) {
for a in f.inputs { for f.inputs.each {|a|
derive_flags(has_params, has_vars, has_rptrs, a.ty); derive_flags(has_params, has_vars, has_rptrs, a.ty);
} }
derive_flags(has_params, has_vars, has_rptrs, f.output); derive_flags(has_params, has_vars, has_rptrs, f.output);
} }
ty_res(_, tt, tps) { ty_res(_, tt, tps) {
derive_flags(has_params, has_vars, has_rptrs, tt); derive_flags(has_params, has_vars, has_rptrs, tt);
for tt in tps { derive_flags(has_params, has_vars, has_rptrs, tt); } for tps.each {|tt|
derive_flags(has_params, has_vars, has_rptrs, tt);
}
} }
ty_constr(tt, _) { ty_constr(tt, _) {
derive_flags(has_params, has_vars, has_rptrs, tt); derive_flags(has_params, has_vars, has_rptrs, tt);
@ -633,19 +637,19 @@ fn maybe_walk_ty(ty: t, f: fn(t) -> bool) {
} }
ty_enum(_, subtys) | ty_iface(_, subtys) | ty_class(_, subtys) ty_enum(_, subtys) | ty_iface(_, subtys) | ty_class(_, subtys)
| ty_self(subtys) { | ty_self(subtys) {
for subty: t in subtys { maybe_walk_ty(subty, f); } for subtys.each {|subty| maybe_walk_ty(subty, f); }
} }
ty_rec(fields) { ty_rec(fields) {
for fl: field in fields { maybe_walk_ty(fl.mt.ty, f); } for fields.each {|fl| maybe_walk_ty(fl.mt.ty, f); }
} }
ty_tup(ts) { for tt in ts { maybe_walk_ty(tt, f); } } ty_tup(ts) { for ts.each {|tt| maybe_walk_ty(tt, f); } }
ty_fn(ft) { ty_fn(ft) {
for a: arg in ft.inputs { maybe_walk_ty(a.ty, f); } for ft.inputs.each {|a| maybe_walk_ty(a.ty, f); }
maybe_walk_ty(ft.output, f); maybe_walk_ty(ft.output, f);
} }
ty_res(_, sub, tps) { ty_res(_, sub, tps) {
maybe_walk_ty(sub, f); maybe_walk_ty(sub, f);
for tp: t in tps { maybe_walk_ty(tp, f); } for tps.each {|tp| maybe_walk_ty(tp, f); }
} }
ty_constr(sub, _) { maybe_walk_ty(sub, f); } ty_constr(sub, _) { maybe_walk_ty(sub, f); }
ty_uniq(tm) { maybe_walk_ty(tm.ty, f); } ty_uniq(tm) { maybe_walk_ty(tm.ty, f); }
@ -895,23 +899,24 @@ fn type_needs_drop(cx: ctxt, ty: t) -> bool {
ty_nil | ty_bot | ty_bool | ty_int(_) | ty_float(_) | ty_uint(_) | ty_nil | ty_bot | ty_bool | ty_int(_) | ty_float(_) | ty_uint(_) |
ty_type | ty_ptr(_) | ty_rptr(_, _) { false } ty_type | ty_ptr(_) | ty_rptr(_, _) { false }
ty_rec(flds) { ty_rec(flds) {
for f in flds { if type_needs_drop(cx, f.mt.ty) { accum = true; } } for flds.each {|f| if type_needs_drop(cx, f.mt.ty) { accum = true; } }
accum accum
} }
ty_class(did, ts) { ty_class(did, ts) {
for f in ty::class_items_as_fields(cx, did, ts) for vec::each(ty::class_items_as_fields(cx, did, ts)) {|f|
{ if type_needs_drop(cx, f.mt.ty) { accum = true; } } if type_needs_drop(cx, f.mt.ty) { accum = true; }
}
accum accum
} }
ty_tup(elts) { ty_tup(elts) {
for m in elts { if type_needs_drop(cx, m) { accum = true; } } for elts.each {|m| if type_needs_drop(cx, m) { accum = true; } }
accum accum
} }
ty_enum(did, tps) { ty_enum(did, tps) {
let variants = enum_variants(cx, did); let variants = enum_variants(cx, did);
for variant in *variants { for vec::each(*variants) {|variant|
for aty in variant.args { for variant.args.each {|aty|
// Perform any type parameter substitutions. // Perform any type parameter substitutions.
let arg_ty = substitute_type_params(cx, tps, aty); let arg_ty = substitute_type_params(cx, tps, aty);
if type_needs_drop(cx, arg_ty) { accum = true; } if type_needs_drop(cx, arg_ty) { accum = true; }
@ -968,8 +973,8 @@ fn type_needs_unwind_cleanup_(cx: ctxt, ty: t,
true true
} }
ty_enum(did, tps) { ty_enum(did, tps) {
for v in *enum_variants(cx, did) { for vec::each(*enum_variants(cx, did)) {|v|
for aty in v.args { for v.args.each {|aty|
let t = substitute_type_params(cx, tps, aty); let t = substitute_type_params(cx, tps, aty);
needs_unwind_cleanup |= needs_unwind_cleanup |=
type_needs_unwind_cleanup_(cx, t, tycache, type_needs_unwind_cleanup_(cx, t, tycache,
@ -1068,13 +1073,15 @@ fn type_kind(cx: ctxt, ty: t) -> kind {
// Records lower to the lowest of their members. // Records lower to the lowest of their members.
ty_rec(flds) { ty_rec(flds) {
let mut lowest = kind_sendable; let mut lowest = kind_sendable;
for f in flds { lowest = lower_kind(lowest, type_kind(cx, f.mt.ty)); } for flds.each {|f|
lowest = lower_kind(lowest, type_kind(cx, f.mt.ty));
}
lowest lowest
} }
// Tuples lower to the lowest of their members. // Tuples lower to the lowest of their members.
ty_tup(tys) { ty_tup(tys) {
let mut lowest = kind_sendable; let mut lowest = kind_sendable;
for ty in tys { lowest = lower_kind(lowest, type_kind(cx, ty)); } for tys.each {|ty| lowest = lower_kind(lowest, type_kind(cx, ty)); }
lowest lowest
} }
// Enums lower to the lowest of their variants. // Enums lower to the lowest of their variants.
@ -1084,8 +1091,8 @@ fn type_kind(cx: ctxt, ty: t) -> kind {
if vec::len(*variants) == 0u { if vec::len(*variants) == 0u {
lowest = kind_noncopyable; lowest = kind_noncopyable;
} else { } else {
for variant in *variants { for vec::each(*variants) {|variant|
for aty in variant.args { for variant.args.each {|aty|
// Perform any type parameter substitutions. // Perform any type parameter substitutions.
let arg_ty = substitute_type_params(cx, tps, aty); let arg_ty = substitute_type_params(cx, tps, aty);
lowest = lower_kind(lowest, type_kind(cx, arg_ty)); lowest = lower_kind(lowest, type_kind(cx, arg_ty));
@ -1246,8 +1253,8 @@ fn type_structurally_contains(cx: ctxt, ty: t, test: fn(sty) -> bool) ->
if test(sty) { ret true; } if test(sty) { ret true; }
alt sty { alt sty {
ty_enum(did, tps) { ty_enum(did, tps) {
for variant in *enum_variants(cx, did) { for vec::each(*enum_variants(cx, did)) {|variant|
for aty in variant.args { for variant.args.each {|aty|
let sty = substitute_type_params(cx, tps, aty); let sty = substitute_type_params(cx, tps, aty);
if type_structurally_contains(cx, sty, test) { ret true; } if type_structurally_contains(cx, sty, test) { ret true; }
} }
@ -1255,13 +1262,13 @@ fn type_structurally_contains(cx: ctxt, ty: t, test: fn(sty) -> bool) ->
ret false; ret false;
} }
ty_rec(fields) { ty_rec(fields) {
for field in fields { for fields.each {|field|
if type_structurally_contains(cx, field.mt.ty, test) { ret true; } if type_structurally_contains(cx, field.mt.ty, test) { ret true; }
} }
ret false; ret false;
} }
ty_tup(ts) { ty_tup(ts) {
for tt in ts { for ts.each {|tt|
if type_structurally_contains(cx, tt, test) { ret true; } if type_structurally_contains(cx, tt, test) { ret true; }
} }
ret false; ret false;
@ -1342,7 +1349,7 @@ fn type_is_pod(cx: ctxt, ty: t) -> bool {
// Structural types // Structural types
ty_enum(did, tps) { ty_enum(did, tps) {
let variants = enum_variants(cx, did); let variants = enum_variants(cx, did);
for variant: variant_info in *variants { for vec::each(*variants) {|variant|
let tup_ty = mk_tup(cx, variant.args); let tup_ty = mk_tup(cx, variant.args);
// Perform any type parameter substitutions. // Perform any type parameter substitutions.
@ -1351,12 +1358,12 @@ fn type_is_pod(cx: ctxt, ty: t) -> bool {
} }
} }
ty_rec(flds) { ty_rec(flds) {
for f: field in flds { for flds.each {|f|
if !type_is_pod(cx, f.mt.ty) { result = false; } if !type_is_pod(cx, f.mt.ty) { result = false; }
} }
} }
ty_tup(elts) { ty_tup(elts) {
for elt in elts { if !type_is_pod(cx, elt) { result = false; } } for elts.each {|elt| if !type_is_pod(cx, elt) { result = false; } }
} }
ty_res(_, inner, tps) { ty_res(_, inner, tps) {
result = type_is_pod(cx, substitute_type_params(cx, tps, inner)); result = type_is_pod(cx, substitute_type_params(cx, tps, inner));
@ -1452,14 +1459,14 @@ fn hash_type_structure(st: sty) -> uint {
fn hash_subty(id: uint, subty: t) -> uint { (id << 2u) + type_id(subty) } fn hash_subty(id: uint, subty: t) -> uint { (id << 2u) + type_id(subty) }
fn hash_subtys(id: uint, subtys: [t]) -> uint { fn hash_subtys(id: uint, subtys: [t]) -> uint {
let mut h = id; let mut h = id;
for s in subtys { h = (h << 2u) + type_id(s) } for subtys.each {|s| h = (h << 2u) + type_id(s) }
h h
} }
fn hash_type_constr(id: uint, c: @type_constr) -> uint { fn hash_type_constr(id: uint, c: @type_constr) -> uint {
let mut h = id; let mut h = id;
h = (h << 2u) + hash_def(h, c.node.id); h = (h << 2u) + hash_def(h, c.node.id);
// FIXME this makes little sense // FIXME this makes little sense
for a in c.node.args { for c.node.args.each {|a|
alt a.node { alt a.node {
carg_base { h += h << 2u; } carg_base { h += h << 2u; }
carg_lit(_) { fail "lit args not implemented yet"; } carg_lit(_) { fail "lit args not implemented yet"; }
@ -1499,27 +1506,27 @@ fn hash_type_structure(st: sty) -> uint {
ty_str { 17u } ty_str { 17u }
ty_enum(did, tys) { ty_enum(did, tys) {
let mut h = hash_def(18u, did); let mut h = hash_def(18u, did);
for typ: t in tys { h = hash_subty(h, typ); } for tys.each {|typ| h = hash_subty(h, typ); }
h h
} }
ty_box(mt) { hash_subty(19u, mt.ty) } ty_box(mt) { hash_subty(19u, mt.ty) }
ty_vec(mt) { hash_subty(21u, mt.ty) } ty_vec(mt) { hash_subty(21u, mt.ty) }
ty_rec(fields) { ty_rec(fields) {
let mut h = 26u; let mut h = 26u;
for f in fields { h = hash_subty(h, f.mt.ty); } for fields.each {|f| h = hash_subty(h, f.mt.ty); }
h h
} }
ty_tup(ts) { hash_subtys(25u, ts) } ty_tup(ts) { hash_subtys(25u, ts) }
ty_fn(f) { ty_fn(f) {
let mut h = 27u; let mut h = 27u;
for a in f.inputs { h = hash_subty(h, a.ty); } for f.inputs.each {|a| h = hash_subty(h, a.ty); }
hash_subty(h, f.output) hash_subty(h, f.output)
} }
ty_var(v) { hash_uint(30u, v.to_uint()) } ty_var(v) { hash_uint(30u, v.to_uint()) }
ty_param(pid, did) { hash_def(hash_uint(31u, pid), did) } ty_param(pid, did) { hash_def(hash_uint(31u, pid), did) }
ty_self(ts) { ty_self(ts) {
let mut h = 28u; let mut h = 28u;
for t in ts { h = hash_subty(h, t); } for ts.each {|t| h = hash_subty(h, t); }
h h
} }
ty_type { 32u } ty_type { 32u }
@ -1535,13 +1542,13 @@ fn hash_type_structure(st: sty) -> uint {
} }
ty_constr(t, cs) { ty_constr(t, cs) {
let mut h = hash_subty(36u, t); let mut h = hash_subty(36u, t);
for c in cs { h = (h << 2u) + hash_type_constr(h, c); } for cs.each {|c| h = (h << 2u) + hash_type_constr(h, c); }
h h
} }
ty_uniq(mt) { hash_subty(37u, mt.ty) } ty_uniq(mt) { hash_subty(37u, mt.ty) }
ty_iface(did, tys) { ty_iface(did, tys) {
let mut h = hash_def(40u, did); let mut h = hash_def(40u, did);
for typ: t in tys { h = hash_subty(h, typ); } for tys.each {|typ| h = hash_subty(h, typ); }
h h
} }
ty_opaque_closure_ptr(ck_block) { 41u } ty_opaque_closure_ptr(ck_block) { 41u }
@ -1550,7 +1557,7 @@ fn hash_type_structure(st: sty) -> uint {
ty_opaque_box { 44u } ty_opaque_box { 44u }
ty_class(did, tys) { ty_class(did, tys) {
let mut h = hash_def(45u, did); let mut h = hash_def(45u, did);
for typ: t in tys { h = hash_subty(h, typ); } for tys.each {|typ| h = hash_subty(h, typ); }
h h
} }
} }
@ -1579,7 +1586,7 @@ fn args_eq<T>(eq: fn(T, T) -> bool,
a: [@sp_constr_arg<T>], a: [@sp_constr_arg<T>],
b: [@sp_constr_arg<T>]) -> bool { b: [@sp_constr_arg<T>]) -> bool {
let mut i: uint = 0u; let mut i: uint = 0u;
for arg: @sp_constr_arg<T> in a { for a.each {|arg|
if !arg_eq(eq, arg, b[i]) { ret false; } if !arg_eq(eq, arg, b[i]) { ret false; }
i += 1u; i += 1u;
} }
@ -1596,7 +1603,7 @@ fn constr_eq(c: @constr, d: @constr) -> bool {
fn constrs_eq(cs: [@constr], ds: [@constr]) -> bool { fn constrs_eq(cs: [@constr], ds: [@constr]) -> bool {
if vec::len(cs) != vec::len(ds) { ret false; } if vec::len(cs) != vec::len(ds) { ret false; }
let mut i = 0u; let mut i = 0u;
for c: @constr in cs { if !constr_eq(c, ds[i]) { ret false; } i += 1u; } for cs.each {|c| if !constr_eq(c, ds[i]) { ret false; } i += 1u; }
ret true; ret true;
} }
@ -1721,7 +1728,7 @@ fn stmt_node_id(s: @ast::stmt) -> ast::node_id {
fn field_idx(id: ast::ident, fields: [field]) -> option<uint> { fn field_idx(id: ast::ident, fields: [field]) -> option<uint> {
let mut i = 0u; let mut i = 0u;
for f in fields { if f.ident == id { ret some(i); } i += 1u; } for fields.each {|f| if f.ident == id { ret some(i); } i += 1u; }
ret none; ret none;
} }
@ -1739,7 +1746,7 @@ fn get_fields(rec_ty:t) -> [field] {
fn method_idx(id: ast::ident, meths: [method]) -> option<uint> { fn method_idx(id: ast::ident, meths: [method]) -> option<uint> {
let mut i = 0u; let mut i = 0u;
for m in meths { if m.ident == id { ret some(i); } i += 1u; } for meths.each {|m| if m.ident == id { ret some(i); } i += 1u; }
ret none; ret none;
} }
@ -2214,7 +2221,7 @@ fn lookup_class_method_by_name(cx:ctxt, did: ast::def_id, name: ident,
sp: span) -> def_id { sp: span) -> def_id {
if check is_local(did) { if check is_local(did) {
let ms = lookup_class_method_ids(cx, did); let ms = lookup_class_method_ids(cx, did);
for m in ms { for ms.each {|m|
if m.name == name { if m.name == name {
ret ast_util::local_def(m.id); ret ast_util::local_def(m.id);
} }
@ -2229,7 +2236,7 @@ fn lookup_class_method_by_name(cx:ctxt, did: ast::def_id, name: ident,
fn class_field_tys(items: [@class_member]) -> [field_ty] { fn class_field_tys(items: [@class_member]) -> [field_ty] {
let mut rslt = []; let mut rslt = [];
for it in items { for items.each {|it|
alt it.node { alt it.node {
instance_var(nm, _, cm, id, privacy) { instance_var(nm, _, cm, id, privacy) {
rslt += [{ident: nm, id: ast_util::local_def(id), rslt += [{ident: nm, id: ast_util::local_def(id),
@ -2247,7 +2254,7 @@ fn class_field_tys(items: [@class_member]) -> [field_ty] {
fn class_items_as_fields(cx:ctxt, did: ast::def_id, substs: [ty::t]) fn class_items_as_fields(cx:ctxt, did: ast::def_id, substs: [ty::t])
-> [field] { -> [field] {
let mut rslt = []; let mut rslt = [];
for f in lookup_class_fields(cx, did) { for lookup_class_fields(cx, did).each {|f|
// consider all instance vars mut, because the // consider all instance vars mut, because the
// constructor may mutate all vars // constructor may mutate all vars
rslt += [{ident: f.ident, mt: rslt += [{ident: f.ident, mt:

View file

@ -317,7 +317,7 @@ fn ast_ty_to_ty(tcx: ty::ctxt, mode: mode, &&ast_ty: @ast::ty) -> ty::t {
tcx.sess.span_fatal(sp, "wrong number of type arguments for a \ tcx.sess.span_fatal(sp, "wrong number of type arguments for a \
polymorphic type"); polymorphic type");
} }
for ast_ty: @ast::ty in args { for args.each {|ast_ty|
param_bindings += [do_ast_ty_to_ty(tcx, mode, ast_ty)]; param_bindings += [do_ast_ty_to_ty(tcx, mode, ast_ty)];
} }
#debug("substituting(%s into %s)", #debug("substituting(%s into %s)",
@ -379,7 +379,7 @@ fn ast_ty_to_ty(tcx: ty::ctxt, mode: mode, &&ast_ty: @ast::ty) -> ty::t {
} }
ast::ty_rec(fields) { ast::ty_rec(fields) {
let mut flds: [field] = []; let mut flds: [field] = [];
for f: ast::ty_field in fields { for fields.each {|f|
let tm = ast_mt_to_mt(tcx, mode, f.node.mt); let tm = ast_mt_to_mt(tcx, mode, f.node.mt);
flds += [{ident: f.node.ident, mt: tm}]; flds += [{ident: f.node.ident, mt: tm}];
} }
@ -436,7 +436,7 @@ fn ast_ty_to_ty(tcx: ty::ctxt, mode: mode, &&ast_ty: @ast::ty) -> ty::t {
} }
ast::ty_constr(t, cs) { ast::ty_constr(t, cs) {
let mut out_cs = []; let mut out_cs = [];
for constr: @ast::ty_constr in cs { for cs.each {|constr|
out_cs += [ty::ast_constr_to_constr(tcx, constr)]; out_cs += [ty::ast_constr_to_constr(tcx, constr)];
} }
ty::mk_constr(tcx, do_ast_ty_to_ty(tcx, mode, t), out_cs) ty::mk_constr(tcx, do_ast_ty_to_ty(tcx, mode, t), out_cs)
@ -699,12 +699,12 @@ fn ty_of_native_fn_decl(tcx: ty::ctxt, mode: mode, decl: ast::fn_decl,
fn ty_param_bounds(tcx: ty::ctxt, mode: mode, params: [ast::ty_param]) fn ty_param_bounds(tcx: ty::ctxt, mode: mode, params: [ast::ty_param])
-> @[ty::param_bounds] { -> @[ty::param_bounds] {
let mut result = []; let mut result = [];
for param in params { for params.each {|param|
result += [alt tcx.ty_param_bounds.find(param.id) { result += [alt tcx.ty_param_bounds.find(param.id) {
some(bs) { bs } some(bs) { bs }
none { none {
let mut bounds = []; let mut bounds = [];
for b in *param.bounds { for vec::each(*param.bounds) {|b|
bounds += [alt b { bounds += [alt b {
ast::bound_send { ty::bound_send } ast::bound_send { ty::bound_send }
ast::bound_copy { ty::bound_copy } ast::bound_copy { ty::bound_copy }
@ -994,7 +994,7 @@ mod collect {
variants: [ast::variant], variants: [ast::variant],
ty_params: [ast::ty_param]) { ty_params: [ast::ty_param]) {
// Create a set of parameter types shared among all the variants. // Create a set of parameter types shared among all the variants.
for variant in variants { for variants.each {|variant|
// Nullary enum constructors get turned into constants; n-ary enum // Nullary enum constructors get turned into constants; n-ary enum
// constructors get turned into functions. // constructors get turned into functions.
let result_ty = if vec::len(variant.node.args) == 0u { let result_ty = if vec::len(variant.node.args) == 0u {
@ -1003,7 +1003,7 @@ mod collect {
// As above, tell ast_ty_to_ty() that trans_ty_item_to_ty() // As above, tell ast_ty_to_ty() that trans_ty_item_to_ty()
// should be called to resolve named types. // should be called to resolve named types.
let mut args: [arg] = []; let mut args: [arg] = [];
for va: ast::variant_arg in variant.node.args { for variant.node.args.each {|va|
let arg_ty = { let arg_ty = {
// NDM We need BOUNDS here. It should be that this // NDM We need BOUNDS here. It should be that this
// yields a type like "foo &anon". Basically every // yields a type like "foo &anon". Basically every
@ -1061,7 +1061,7 @@ mod collect {
i_bounds: @[ty::param_bounds], maybe_self: option<ty::t>) i_bounds: @[ty::param_bounds], maybe_self: option<ty::t>)
-> [{mty: ty::method, id: ast::node_id, span: span}] { -> [{mty: ty::method, id: ast::node_id, span: span}] {
let mut my_methods = []; let mut my_methods = [];
for m in ms { for ms.each {|m|
alt maybe_self { alt maybe_self {
some(selfty) { some(selfty) {
write_ty_to_tcx(tcx, m.self_id, selfty); write_ty_to_tcx(tcx, m.self_id, selfty);
@ -1086,7 +1086,7 @@ mod collect {
ast::item_native_mod(m) { ast::item_native_mod(m) {
if syntax::attr::native_abi(it.attrs) == if syntax::attr::native_abi(it.attrs) ==
either::right(ast::native_abi_rust_intrinsic) { either::right(ast::native_abi_rust_intrinsic) {
for item in m.items { check_intrinsic_type(tcx, item); } for m.items.each {|item| check_intrinsic_type(tcx, item); }
} }
} }
ast::item_enum(variants, ty_params) { ast::item_enum(variants, ty_params) {
@ -1115,7 +1115,7 @@ mod collect {
if did.crate == ast::local_crate { if did.crate == ast::local_crate {
ensure_iface_methods(tcx, did.node); ensure_iface_methods(tcx, did.node);
} }
for if_m in *ty::iface_methods(tcx, did) { for vec::each(*ty::iface_methods(tcx, did)) {|if_m|
alt vec::find(my_methods, alt vec::find(my_methods,
{|m| if_m.ident == m.mty.ident}) { {|m| if_m.ident == m.mty.ident}) {
some({mty: m, id, span}) { some({mty: m, id, span}) {
@ -1195,7 +1195,7 @@ mod collect {
/* FIXME: check for proper public/privateness */ /* FIXME: check for proper public/privateness */
// Write the type of each of the members // Write the type of each of the members
let (fields, methods) = split_class_items(members); let (fields, methods) = split_class_items(members);
for f in fields { for fields.each {|f|
convert_class_item(tcx, f); convert_class_item(tcx, f);
} }
// The selfty is just the class type // The selfty is just the class type
@ -1346,7 +1346,7 @@ mod demand {
let mut ty_param_substs: [mut ty::t] = [mut]; let mut ty_param_substs: [mut ty::t] = [mut];
let mut ty_param_subst_var_ids: [ty_vid] = []; let mut ty_param_subst_var_ids: [ty_vid] = [];
for ty_param_subst: ty::t in ty_param_substs_0 { for ty_param_substs_0.each {|ty_param_subst|
// Generate a type variable and unify it with the type parameter // Generate a type variable and unify it with the type parameter
// substitution. We will then pull out these type variables. // substitution. We will then pull out these type variables.
let t_0 = next_ty_var(fcx); let t_0 = next_ty_var(fcx);
@ -1359,7 +1359,7 @@ mod demand {
ty_param_subst_var_ids: [ty_vid]) -> ty_param_subst_var_ids: [ty_vid]) ->
ty_param_substs_and_ty { ty_param_substs_and_ty {
let mut result_ty_param_substs: [ty::t] = []; let mut result_ty_param_substs: [ty::t] = [];
for var_id in ty_param_subst_var_ids { for ty_param_subst_var_ids.each {|var_id|
let tp_subst = ty::mk_var(fcx.ccx.tcx, var_id); let tp_subst = ty::mk_var(fcx.ccx.tcx, var_id);
result_ty_param_substs += [tp_subst]; result_ty_param_substs += [tp_subst];
} }
@ -1405,7 +1405,7 @@ fn variant_arg_types(ccx: @crate_ctxt, _sp: span, vid: ast::def_id,
alt ty::get(tpt.ty).struct { alt ty::get(tpt.ty).struct {
ty::ty_fn(f) { ty::ty_fn(f) {
// N-ary variant. // N-ary variant.
for arg: ty::arg in f.inputs { for f.inputs.each {|arg|
let arg_ty = let arg_ty =
ty::substitute_type_params(ccx.tcx, enum_ty_params, arg.ty); ty::substitute_type_params(ccx.tcx, enum_ty_params, arg.ty);
result += [arg_ty]; result += [arg_ty];
@ -1463,7 +1463,7 @@ mod writeback {
alt fcx.opt_node_ty_substs(id) { alt fcx.opt_node_ty_substs(id) {
some(substs) { some(substs) {
let mut new_substs = []; let mut new_substs = [];
for subst: ty::t in substs { for substs.each {|subst|
alt resolve_type_vars_in_type(fcx, sp, subst) { alt resolve_type_vars_in_type(fcx, sp, subst) {
some(t) { new_substs += [t]; } some(t) { new_substs += [t]; }
none { wbcx.success = false; ret none; } none { wbcx.success = false; ret none; }
@ -1602,7 +1602,7 @@ mod writeback {
visit_local: visit_local visit_local: visit_local
with *visit::default_visitor()}); with *visit::default_visitor()});
visit.visit_block(blk, wbcx, visit); visit.visit_block(blk, wbcx, visit);
for arg in decl.inputs { for decl.inputs.each {|arg|
resolve_type_vars_for_node(wbcx, arg.ty.span, arg.id); resolve_type_vars_for_node(wbcx, arg.ty.span, arg.id);
} }
ret wbcx.success; ret wbcx.success;
@ -1986,7 +1986,7 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) {
fn matches(name: str, f: ty::field) -> bool { fn matches(name: str, f: ty::field) -> bool {
ret str::eq(name, f.ident); ret str::eq(name, f.ident);
} }
for f: ast::field_pat in fields { for fields.each {|f|
alt vec::find(ex_fields, bind matches(f.ident, _)) { alt vec::find(ex_fields, bind matches(f.ident, _)) {
some(field) { some(field) {
check_pat(pcx, f.pat, field.mt.ty); check_pat(pcx, f.pat, field.mt.ty);
@ -2019,7 +2019,7 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) {
fields", vec::len(ex_elts), e_count]); fields", vec::len(ex_elts), e_count]);
} }
let mut i = 0u; let mut i = 0u;
for elt in elts { for elts.each {|elt|
check_pat(pcx, elt, ex_elts[i]); check_pat(pcx, elt, ex_elts[i]);
i += 1u; i += 1u;
} }
@ -2209,7 +2209,7 @@ fn lookup_method_inner_(tcx: ty::ctxt, ms: [ty::method],
origin: method_origin, self_sub: option<self_subst>}> { origin: method_origin, self_sub: option<self_subst>}> {
#debug("lookup_method_inner_: %? %? %s", ms, parent, name); #debug("lookup_method_inner_: %? %? %s", ms, parent, name);
let mut i = 0u; let mut i = 0u;
for m in ms { for ms.each {|m|
if m.ident == name { if m.ident == name {
let fty = ty::mk_fn(tcx, {proto: ast::proto_box with m.fty}); let fty = ty::mk_fn(tcx, {proto: ast::proto_box with m.fty});
if ty::type_has_vars(fty) { if ty::type_has_vars(fty) {
@ -2261,7 +2261,7 @@ fn lookup_method_inner(fcx: @fn_ctxt, expr: @ast::expr,
alt ty::get(ty).struct { alt ty::get(ty).struct {
ty::ty_param(n, did) { ty::ty_param(n, did) {
let mut bound_n = 0u; let mut bound_n = 0u;
for bound in *tcx.ty_param_bounds.get(did.node) { for vec::each(*tcx.ty_param_bounds.get(did.node)) {|bound|
alt bound { alt bound {
ty::bound_iface(t) { ty::bound_iface(t) {
let (iid, tps) = alt check ty::get(t).struct { let (iid, tps) = alt check ty::get(t).struct {
@ -2326,10 +2326,10 @@ fn lookup_method_inner(fcx: @fn_ctxt, expr: @ast::expr,
let mut result = none, complained = false; let mut result = none, complained = false;
std::list::iter(fcx.ccx.impl_map.get(expr.id)) {|impls| std::list::iter(fcx.ccx.impl_map.get(expr.id)) {|impls|
if option::is_none(result) { if option::is_none(result) {
for @{did, methods, _} in *impls { for vec::each(*impls) {|im|
alt vec::find(methods, {|m| m.ident == name}) { alt vec::find(im.methods, {|m| m.ident == name}) {
some(m) { some(m) {
let mut {n_tps, ty: self_ty} = impl_self_ty(tcx, did); let mut {n_tps, ty: self_ty} = impl_self_ty(tcx, im.did);
let mut {vars, ty: self_ty} = if n_tps > 0u { let mut {vars, ty: self_ty} = if n_tps > 0u {
bind_params(fcx, self_ty, n_tps) bind_params(fcx, self_ty, n_tps)
} else { } else {
@ -2536,7 +2536,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
let check_args = fn@(check_blocks: bool) -> bool { let check_args = fn@(check_blocks: bool) -> bool {
let mut i = 0u; let mut i = 0u;
let mut bot = false; let mut bot = false;
for a_opt in args { for args.each {|a_opt|
alt a_opt { alt a_opt {
some(a) { some(a) {
let is_block = alt a.node { let is_block = alt a.node {
@ -2574,7 +2574,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
fn check_call(fcx: @fn_ctxt, sp: span, f: @ast::expr, args: [@ast::expr]) fn check_call(fcx: @fn_ctxt, sp: span, f: @ast::expr, args: [@ast::expr])
-> {fty: ty::t, bot: bool} { -> {fty: ty::t, bot: bool} {
let mut args_opt_0: [option<@ast::expr>] = []; let mut args_opt_0: [option<@ast::expr>] = [];
for arg: @ast::expr in args { for args.each {|arg|
args_opt_0 += [some::<@ast::expr>(arg)]; args_opt_0 += [some::<@ast::expr>(arg)];
} }
@ -2994,7 +2994,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
// Typecheck the patterns first, so that we get types for all the // Typecheck the patterns first, so that we get types for all the
// bindings. // bindings.
//let pattern_ty = fcx.expr_ty(discrim); //let pattern_ty = fcx.expr_ty(discrim);
for arm: ast::arm in arms { for arms.each {|arm|
let pcx = { let pcx = {
fcx: fcx, fcx: fcx,
map: pat_util::pat_id_map(tcx.def_map, arm.pats[0]), map: pat_util::pat_id_map(tcx.def_map, arm.pats[0]),
@ -3003,14 +3003,12 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
pat_region: ty::re_scope(parent_block) pat_region: ty::re_scope(parent_block)
}; };
for p: @ast::pat in arm.pats { for arm.pats.each {|p| check_pat(pcx, p, pattern_ty);}
check_pat(pcx, p, pattern_ty);
}
} }
// Now typecheck the blocks. // Now typecheck the blocks.
let mut result_ty = next_ty_var(fcx); let mut result_ty = next_ty_var(fcx);
let mut arm_non_bot = false; let mut arm_non_bot = false;
for arm: ast::arm in arms { for arms.each {|arm|
alt arm.guard { alt arm.guard {
some(e) { check_expr_with(fcx, e, ty::mk_bool(tcx)); } some(e) { check_expr_with(fcx, e, ty::mk_bool(tcx)); }
none { } none { }
@ -3169,14 +3167,14 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
} }
ast::expr_vec(args, mutbl) { ast::expr_vec(args, mutbl) {
let t: ty::t = next_ty_var(fcx); let t: ty::t = next_ty_var(fcx);
for e: @ast::expr in args { bot |= check_expr_with(fcx, e, t); } for args.each {|e| bot |= check_expr_with(fcx, e, t); }
let typ = ty::mk_vec(tcx, {ty: t, mutbl: mutbl}); let typ = ty::mk_vec(tcx, {ty: t, mutbl: mutbl});
fcx.write_ty(id, typ); fcx.write_ty(id, typ);
} }
ast::expr_tup(elts) { ast::expr_tup(elts) {
let mut elt_ts = []; let mut elt_ts = [];
vec::reserve(elt_ts, vec::len(elts)); vec::reserve(elt_ts, vec::len(elts));
for e in elts { for elts.each {|e|
check_expr(fcx, e); check_expr(fcx, e);
let ety = fcx.expr_ty(e); let ety = fcx.expr_ty(e);
elt_ts += [ety]; elt_ts += [ety];
@ -3212,9 +3210,9 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
} }
} }
fcx.write_ty(id, bexpr_t); fcx.write_ty(id, bexpr_t);
for f: spanned<ty::field> in fields_t { for fields_t.each {|f|
let mut found = false; let mut found = false;
for bf: ty::field in base_fields { for base_fields.each {|bf|
if str::eq(f.node.ident, bf.ident) { if str::eq(f.node.ident, bf.ident) {
demand::simple(fcx, f.span, bf.mt.ty, f.node.mt.ty); demand::simple(fcx, f.span, bf.mt.ty, f.node.mt.ty);
found = true; found = true;
@ -3465,7 +3463,7 @@ fn check_stmt(fcx: @fn_ctxt, stmt: @ast::stmt) -> bool {
node_id = id; node_id = id;
alt decl.node { alt decl.node {
ast::decl_local(ls) { ast::decl_local(ls) {
for l in ls { bot |= check_decl_local(fcx, l); } for ls.each {|l| bot |= check_decl_local(fcx, l); }
} }
ast::decl_item(_) {/* ignore for now */ } ast::decl_item(_) {/* ignore for now */ }
} }
@ -3501,7 +3499,7 @@ fn check_block(fcx0: @fn_ctxt, blk: ast::blk) -> bool {
}; };
let mut bot = false; let mut bot = false;
let mut warned = false; let mut warned = false;
for s: @ast::stmt in blk.node.stmts { for blk.node.stmts.each {|s|
if bot && !warned && if bot && !warned &&
alt s.node { alt s.node {
ast::stmt_decl(@{node: ast::decl_local(_), _}, _) | ast::stmt_decl(@{node: ast::decl_local(_), _}, _) |
@ -3588,7 +3586,7 @@ fn check_enum_variants(ccx: @crate_ctxt, sp: span, vs: [ast::variant],
ccx: ccx}; ccx: ccx};
let mut disr_vals: [int] = []; let mut disr_vals: [int] = [];
let mut disr_val = 0; let mut disr_val = 0;
for v in vs { for vs.each {|v|
alt v.node.disr_expr { alt v.node.disr_expr {
some(e) { some(e) {
check_expr(fcx, e); check_expr(fcx, e);
@ -3666,7 +3664,7 @@ fn check_pred_expr(fcx: @fn_ctxt, e: @ast::expr) -> bool {
in constraint"); in constraint");
} }
} }
for operand: @ast::expr in operands { for operands.each {|operand|
if !ast_util::is_constraint_arg(operand) { if !ast_util::is_constraint_arg(operand) {
let s = let s =
"constraint args must be slot variables or literals"; "constraint args must be slot variables or literals";
@ -3687,11 +3685,10 @@ fn check_pred_expr(fcx: @fn_ctxt, e: @ast::expr) -> bool {
} }
fn check_constraints(fcx: @fn_ctxt, cs: [@ast::constr], args: [ast::arg]) { fn check_constraints(fcx: @fn_ctxt, cs: [@ast::constr], args: [ast::arg]) {
let mut c_args;
let num_args = vec::len(args); let num_args = vec::len(args);
for c: @ast::constr in cs { for cs.each {|c|
c_args = []; let mut c_args = [];
for a: @spanned<ast::fn_constr_arg> in c.node.args { for c.node.args.each {|a|
c_args += [ c_args += [
// "base" should not occur in a fn type thing, as of // "base" should not occur in a fn type thing, as of
// yet, b/c we don't allow constraints on the return type // yet, b/c we don't allow constraints on the return type
@ -3858,7 +3855,7 @@ fn check_method(ccx: @crate_ctxt, method: @ast::method, self_ty: ty::t) {
fn class_types(ccx: @crate_ctxt, members: [@ast::class_member]) -> class_map { fn class_types(ccx: @crate_ctxt, members: [@ast::class_member]) -> class_map {
let rslt = int_hash::<ty::t>(); let rslt = int_hash::<ty::t>();
for m in members { for members.each {|m|
alt m.node { alt m.node {
ast::instance_var(_,t,_,id,_) { ast::instance_var(_,t,_,id,_) {
rslt.insert(id, ast_ty_to_ty(ccx.tcx, m_collect, t)); rslt.insert(id, ast_ty_to_ty(ccx.tcx, m_collect, t));
@ -3897,9 +3894,7 @@ fn check_item(ccx: @crate_ctxt, it: @ast::item) {
let self_ty = ast_ty_to_ty(ccx.tcx, m_check, ty); let self_ty = ast_ty_to_ty(ccx.tcx, m_check, ty);
let self_region = ty::re_free(it.id, ty::br_self); let self_region = ty::re_free(it.id, ty::br_self);
let self_ty = replace_self_region(ccx.tcx, self_region, self_ty); let self_ty = replace_self_region(ccx.tcx, self_region, self_ty);
for m in ms { for ms.each {|m| check_method(ccx, m, self_ty);}
check_method(ccx, m, self_ty);
}
} }
ast::item_class(tps, members, ctor) { ast::item_class(tps, members, ctor) {
let cid = some(it.id); let cid = some(it.id);
@ -3915,7 +3910,7 @@ fn check_item(ccx: @crate_ctxt, it: @ast::item) {
some(class_t)); some(class_t));
// typecheck the members // typecheck the members
for m in members { check_class_member(class_ccx, class_t, m); } for members.each {|m| check_class_member(class_ccx, class_t, m); }
} }
_ {/* nothing to do */ } _ {/* nothing to do */ }
} }
@ -3995,8 +3990,8 @@ mod vtable {
allow_unsafe: bool) -> vtable_res { allow_unsafe: bool) -> vtable_res {
let tcx = fcx.ccx.tcx; let tcx = fcx.ccx.tcx;
let mut result = [], i = 0u; let mut result = [], i = 0u;
for ty in tys { for tys.each {|ty|
for bound in *bounds[i] { for vec::each(*bounds[i]) {|bound|
alt bound { alt bound {
ty::bound_iface(i_ty) { ty::bound_iface(i_ty) {
let i_ty = ty::substitute_type_params(tcx, tys, i_ty); let i_ty = ty::substitute_type_params(tcx, tys, i_ty);
@ -4022,7 +4017,7 @@ mod vtable {
alt ty::get(ty).struct { alt ty::get(ty).struct {
ty::ty_param(n, did) { ty::ty_param(n, did) {
let mut n_bound = 0u; let mut n_bound = 0u;
for bound in *tcx.ty_param_bounds.get(did.node) { for vec::each(*tcx.ty_param_bounds.get(did.node)) {|bound|
alt bound { alt bound {
ty::bound_iface(ity) { ty::bound_iface(ity) {
alt check ty::get(ity).struct { alt check ty::get(ity).struct {
@ -4038,7 +4033,7 @@ mod vtable {
} }
ty::ty_iface(did, tps) if iface_id == did { ty::ty_iface(did, tps) if iface_id == did {
if !allow_unsafe { if !allow_unsafe {
for m in *ty::iface_methods(tcx, did) { for vec::each(*ty::iface_methods(tcx, did)) {|m|
if ty::type_has_vars(ty::mk_fn(tcx, m.fty)) { if ty::type_has_vars(ty::mk_fn(tcx, m.fty)) {
tcx.sess.span_err( tcx.sess.span_err(
sp, "a boxed iface with self types may not be \ sp, "a boxed iface with self types may not be \
@ -4057,7 +4052,7 @@ mod vtable {
let mut found = none; let mut found = none;
std::list::iter(isc) {|impls| std::list::iter(isc) {|impls|
if option::is_none(found) { if option::is_none(found) {
for im in *impls { for vec::each(*impls) {|im|
let match = alt ty::impl_iface(tcx, im.did) { let match = alt ty::impl_iface(tcx, im.did) {
some(ity) { some(ity) {
alt check ty::get(ity).struct { alt check ty::get(ity).struct {

View file

@ -46,7 +46,7 @@ fn field_expr(f: ast::field) -> @ast::expr { ret f.node.expr; }
fn field_exprs(fields: [ast::field]) -> [@ast::expr] { fn field_exprs(fields: [ast::field]) -> [@ast::expr] {
let mut es = []; let mut es = [];
for f: ast::field in fields { es += [f.node.expr]; } for fields.each {|f| es += [f.node.expr]; }
ret es; ret es;
} }

View file

@ -67,20 +67,23 @@ fn mk_filesearch(maybe_sysroot: option<path>,
// FIXME #1001: This can't be an obj method // FIXME #1001: This can't be an obj method
fn search<T: copy>(filesearch: filesearch, pick: pick<T>) -> option<T> { fn search<T: copy>(filesearch: filesearch, pick: pick<T>) -> option<T> {
for lib_search_path in filesearch.lib_search_paths() { let mut rslt = none;
for filesearch.lib_search_paths().each {|lib_search_path|
#debug("searching %s", lib_search_path); #debug("searching %s", lib_search_path);
for path in os::list_dir_path(lib_search_path) { for os::list_dir_path(lib_search_path).each {|path|
#debug("testing %s", path); #debug("testing %s", path);
let maybe_picked = pick(path); let maybe_picked = pick(path);
if option::is_some(maybe_picked) { if option::is_some(maybe_picked) {
#debug("picked %s", path); #debug("picked %s", path);
ret maybe_picked; rslt = maybe_picked;
break;
} else { } else {
#debug("rejected %s", path); #debug("rejected %s", path);
} }
} }
if option::is_some(rslt) { break; }
} }
ret option::none; ret rslt;
} }
fn relative_target_lib_path(target_triple: str) -> [path] { fn relative_target_lib_path(target_triple: str) -> [path] {

View file

@ -73,7 +73,7 @@ fn ty_to_str(cx: ctxt, typ: t) -> str {
alt ident { some(i) { s += " "; s += i; } _ { } } alt ident { some(i) { s += " "; s += i; } _ { } }
s += "("; s += "(";
let mut strs = []; let mut strs = [];
for a: arg in inputs { strs += [fn_input_to_str(cx, a)]; } for inputs.each {|a| strs += [fn_input_to_str(cx, a)]; }
s += str::connect(strs, ", "); s += str::connect(strs, ", ");
s += ")"; s += ")";
if ty::get(output).struct != ty_nil { if ty::get(output).struct != ty_nil {
@ -139,12 +139,12 @@ fn ty_to_str(cx: ctxt, typ: t) -> str {
ty_type { "type" } ty_type { "type" }
ty_rec(elems) { ty_rec(elems) {
let mut strs: [str] = []; let mut strs: [str] = [];
for fld: field in elems { strs += [field_to_str(cx, fld)]; } for elems.each {|fld| strs += [field_to_str(cx, fld)]; }
"{" + str::connect(strs, ",") + "}" "{" + str::connect(strs, ",") + "}"
} }
ty_tup(elems) { ty_tup(elems) {
let mut strs = []; let mut strs = [];
for elem in elems { strs += [ty_to_str(cx, elem)]; } for elems.each {|elem| strs += [ty_to_str(cx, elem)]; }
"(" + str::connect(strs, ",") + ")" "(" + str::connect(strs, ",") + ")"
} }
ty_fn(f) { ty_fn(f) {
@ -179,7 +179,7 @@ fn constr_to_str(c: @constr) -> str {
fn constrs_str(constrs: [@constr]) -> str { fn constrs_str(constrs: [@constr]) -> str {
let mut s = ""; let mut s = "";
let mut colon = true; let mut colon = true;
for c: @constr in constrs { for constrs.each {|c|
if colon { s += " : "; colon = false; } else { s += ", "; } if colon { s += " : "; colon = false; } else { s += ", "; }
s += constr_to_str(c); s += constr_to_str(c);
} }

View file

@ -59,7 +59,7 @@ fn usage() {
println("Usage: rustdoc [options] <cratefile>\n"); println("Usage: rustdoc [options] <cratefile>\n");
println("Options:\n"); println("Options:\n");
for opt in opts() { for opts().each {|opt|
println(#fmt(" %s", tuple::second(opt))); println(#fmt(" %s", tuple::second(opt)));
} }
println(""); println("");

View file

@ -328,7 +328,7 @@ fn write_mod_contents(
write_index(ctxt, option::get(doc.index)); write_index(ctxt, option::get(doc.index));
} }
for itemtag in doc.items { for doc.items.each {|itemtag|
write_item(ctxt, itemtag); write_item(ctxt, itemtag);
} }
} }
@ -381,7 +381,7 @@ fn write_index(ctxt: ctxt, index: doc::index) {
ret; ret;
} }
for entry in index.entries { for index.entries.each {|entry|
let header = header_text_(entry.kind, entry.name); let header = header_text_(entry.kind, entry.name);
let id = entry.link; let id = entry.link;
if option::is_some(entry.brief) { if option::is_some(entry.brief) {
@ -431,7 +431,7 @@ fn write_nmod(ctxt: ctxt, doc: doc::nmoddoc) {
write_index(ctxt, option::get(doc.index)); write_index(ctxt, option::get(doc.index));
} }
for fndoc in doc.fns { for doc.fns.each {|fndoc|
write_item_header(ctxt, doc::fntag(fndoc)); write_item_header(ctxt, doc::fntag(fndoc));
write_fn(ctxt, fndoc); write_fn(ctxt, fndoc);
} }

View file

@ -82,13 +82,13 @@ fn build_reexport_def_set(srv: astsrv::srv) -> def_set {
let assoc_list = astsrv::exec(srv) {|ctxt| let assoc_list = astsrv::exec(srv) {|ctxt|
let def_set = common::new_def_hash(); let def_set = common::new_def_hash();
ctxt.exp_map.items {|_id, defs| ctxt.exp_map.items {|_id, defs|
for def in defs { for defs.each {|def|
if def.reexp { if def.reexp {
def_set.insert(def.id, ()); def_set.insert(def.id, ());
} }
} }
} }
for def in find_reexport_impls(ctxt) { for find_reexport_impls(ctxt).each {|def|
def_set.insert(def, ()); def_set.insert(def, ());
} }
to_assoc_list(def_set) to_assoc_list(def_set)
@ -137,7 +137,7 @@ fn build_reexport_def_map(
fn fold_mod(fold: fold::fold<ctxt>, doc: doc::moddoc) -> doc::moddoc { fn fold_mod(fold: fold::fold<ctxt>, doc: doc::moddoc) -> doc::moddoc {
let doc = fold::default_seq_fold_mod(fold, doc); let doc = fold::default_seq_fold_mod(fold, doc);
for item in doc.items { for doc.items.each {|item|
let def_id = ast_util::local_def(item.id()); let def_id = ast_util::local_def(item.id());
if fold.ctxt.def_set.contains_key(def_id) { if fold.ctxt.def_set.contains_key(def_id) {
fold.ctxt.def_map.insert(def_id, item); fold.ctxt.def_map.insert(def_id, item);
@ -150,7 +150,7 @@ fn build_reexport_def_map(
fn fold_nmod(fold: fold::fold<ctxt>, doc: doc::nmoddoc) -> doc::nmoddoc { fn fold_nmod(fold: fold::fold<ctxt>, doc: doc::nmoddoc) -> doc::nmoddoc {
let doc = fold::default_seq_fold_nmod(fold, doc); let doc = fold::default_seq_fold_nmod(fold, doc);
for fndoc in doc.fns { for doc.fns.each {|fndoc|
let def_id = ast_util::local_def(fndoc.id()); let def_id = ast_util::local_def(fndoc.id());
if fold.ctxt.def_set.contains_key(def_id) { if fold.ctxt.def_set.contains_key(def_id) {
fold.ctxt.def_map.insert(def_id, doc::fntag(fndoc)); fold.ctxt.def_map.insert(def_id, doc::fntag(fndoc));
@ -184,7 +184,7 @@ fn build_reexport_path_map(srv: astsrv::srv, -def_map: def_map) -> path_map {
let modpath = ast_map::path_to_str(vec::init(*path)); let modpath = ast_map::path_to_str(vec::init(*path));
let mut reexportdocs = []; let mut reexportdocs = [];
for def in defs { for defs.each {|def|
if !def.reexp { cont; } if !def.reexp { cont; }
alt def_map.find(def.id) { alt def_map.find(def.id) {
some(itemtag) { some(itemtag) {
@ -206,7 +206,8 @@ fn build_reexport_path_map(srv: astsrv::srv, -def_map: def_map) -> path_map {
} }
} }
for (path, doc) in find_reexport_impl_docs(ctxt, def_map) { for find_reexport_impl_docs(ctxt, def_map).each {|elt|
let (path, doc) = elt;
let docs = alt path_map.find(path) { let docs = alt path_map.find(path) {
some(docs) { docs + [(doc)] } some(docs) { docs + [(doc)] }
none { [doc] } none { [doc] }
@ -272,7 +273,7 @@ fn for_each_reexported_impl(
let all_impls = all_impls(m); let all_impls = all_impls(m);
alt check ctxt.impl_map.get(mod_id) { alt check ctxt.impl_map.get(mod_id) {
list::cons(impls, @list::nil) { list::cons(impls, @list::nil) {
for i in *impls { for vec::each(*impls) {|i|
// This impl is not an item in the current mod // This impl is not an item in the current mod
if !all_impls.contains_key(i.did) { if !all_impls.contains_key(i.did) {
// Ignore external impls because I don't // Ignore external impls because I don't
@ -289,7 +290,7 @@ fn for_each_reexported_impl(
fn all_impls(m: ast::_mod) -> map::set<ast::def_id> { fn all_impls(m: ast::_mod) -> map::set<ast::def_id> {
let all_impls = common::new_def_hash(); let all_impls = common::new_def_hash();
for item in m.items { for m.items.each {|item|
alt item.node { alt item.node {
ast::item_impl(_, _, _, _) { ast::item_impl(_, _, _, _) {
all_impls.insert(ast_util::local_def(item.id), ()); all_impls.insert(ast_util::local_def(item.id), ());

View file

@ -94,7 +94,7 @@ fn sectionalize(desc: option<str>) -> (option<str>, [doc::section]) {
let mut current_section = none; let mut current_section = none;
let mut sections = []; let mut sections = [];
for line in lines { for lines.each {|line|
alt parse_header(line) { alt parse_header(line) {
some(header) { some(header) {
if option::is_some(current_section) { if option::is_some(current_section) {

View file

@ -6,7 +6,7 @@ fn alist_add<A: copy, B: copy>(lst: alist<A,B>, k: A, v: B) {
fn alist_get<A: copy, B: copy>(lst: alist<A,B>, k: A) -> B { fn alist_get<A: copy, B: copy>(lst: alist<A,B>, k: A) -> B {
let eq_fn = lst.eq_fn; let eq_fn = lst.eq_fn;
for pair in lst.data { for lst.data.each {|pair|
let (ki, vi) = pair; // copy req'd for alias analysis let (ki, vi) = pair; // copy req'd for alias analysis
if eq_fn(k, ki) { ret vi; } if eq_fn(k, ki) { ret vi; }
} }

View file

@ -25,7 +25,7 @@ type aminoacids = {ch: char, prob: u32};
fn make_cumulative(aa: [aminoacids]) -> [aminoacids] { fn make_cumulative(aa: [aminoacids]) -> [aminoacids] {
let mut cp: u32 = 0u32; let mut cp: u32 = 0u32;
let mut ans: [aminoacids] = []; let mut ans: [aminoacids] = [];
for a: aminoacids in aa { cp += a.prob; ans += [{ch: a.ch, prob: cp}]; } for aa.each {|a| cp += a.prob; ans += [{ch: a.ch, prob: cp}]; }
ret ans; ret ans;
} }

View file

@ -75,7 +75,7 @@ fn stress(num_tasks: int) {
results += [task::future_result(builder)]; results += [task::future_result(builder)];
task::run(builder) {|| stress_task(i); } task::run(builder) {|| stress_task(i); }
} }
for r in results { future::get(r); } for results.each {|r| future::get(r); }
} }
fn main(argv: [str]) { fn main(argv: [str]) {

View file

@ -77,7 +77,7 @@ mod map_reduce {
ctrl: chan<ctrl_proto<K2, V>>, inputs: [K1]) -> ctrl: chan<ctrl_proto<K2, V>>, inputs: [K1]) ->
[joinable_task] { [joinable_task] {
let tasks = []; let tasks = [];
for i in inputs { for inputs.each {|i|
let m = map, c = ctrl, ii = i; let m = map, c = ctrl, ii = i;
tasks += [task::spawn_joinable {|| map_task(m, c, ii)}]; tasks += [task::spawn_joinable {|| map_task(m, c, ii)}];
} }
@ -201,7 +201,7 @@ mod map_reduce {
} }
treemap::traverse(reducers, finish); treemap::traverse(reducers, finish);
for t in tasks { task::join(t); } for tasks.each {|t| task::join(t); }
} }
} }
@ -218,7 +218,7 @@ fn main(argv: [str]) {
} }
let iargs = []; let iargs = [];
for a in vec::slice(argv, 1u, vec::len(argv)) { vec::iter_between(argv, 1u, vec::len(argv)) {|a|
iargs += [str::bytes(a)]; iargs += [str::bytes(a)];
} }

View file

@ -61,7 +61,7 @@ mod map_reduce {
fn start_mappers(ctrl: chan<ctrl_proto>, -inputs: [str]) -> fn start_mappers(ctrl: chan<ctrl_proto>, -inputs: [str]) ->
[future::future<task::task_result>] { [future::future<task::task_result>] {
let mut results = []; let mut results = [];
for i: str in inputs { for inputs.each {|i|
let builder = task::builder(); let builder = task::builder();
results += [task::future_result(builder)]; results += [task::future_result(builder)];
task::run(builder) {|| map_task(ctrl, i)} task::run(builder) {|| map_task(ctrl, i)}
@ -174,7 +174,7 @@ mod map_reduce {
reducers.values {|v| send(v, done); } reducers.values {|v| send(v, done); }
for r in results { future::get(r); } for results.each {|r| future::get(r); }
} }
} }

View file

@ -1,7 +1,7 @@
// error-pattern:mismatched types: expected `()` but found `bool` // error-pattern:mismatched types: expected `()` but found `bool`
fn main() { fn main() {
for i in [0] { for vec::iter([0]) {|_i|
true true
} }
} }

View file

@ -2,5 +2,5 @@
fn main() { fn main() {
let v: [mut {mut x: int}] = [mut {mut x: 1}]; let v: [mut {mut x: int}] = [mut {mut x: 1}];
for x in v { v[0] = {mut x: 2}; log(debug, x); } for v.each {|x| v[0] = {mut x: 2}; log(debug, x); }
} }

View file

@ -2,7 +2,7 @@ fn concat<T: copy>(v: [const [const T]]) -> [T] {
let mut r = []; let mut r = [];
// Earlier versions of our type checker accepted this: // Earlier versions of our type checker accepted this:
for inner: [T] in v { for v.each {|inner|
//!^ ERROR found `[const 'a]` (values differ in mutability) //!^ ERROR found `[const 'a]` (values differ in mutability)
r += inner; r += inner;
} }

View file

@ -1,5 +1,5 @@
fn main(args: [str]) { fn main(args: [str]) {
let vs: [str] = ["hi", "there", "this", "is", "a", "vec"]; let vs: [str] = ["hi", "there", "this", "is", "a", "vec"];
let vvs: [[str]] = [args, vs]; let vvs: [[str]] = [args, vs];
for vs: [str] in vvs { for s: str in vs { log(debug, s); } } for vvs.each {|vs| for vs.each {|s| log(debug, s); } }
} }

View file

@ -1,5 +1,5 @@
fn main() { fn main() {
let mut sum = 0; let mut sum = 0;
for x in [1, 2, 3, 4, 5] { sum += x; } for vec::each([1, 2, 3, 4, 5]) {|x| sum += x; }
assert (sum == 15); assert (sum == 15);
} }

View file

@ -1,4 +1,4 @@
fn iter_vec<T>(v: [T], f: fn(T)) { for x: T in v { f(x); } } fn iter_vec<T>(v: [T], f: fn(T)) { for v.each {|x| f(x); } }
fn main() { fn main() {
let v = [1, 2, 3, 4, 5, 6, 7]; let v = [1, 2, 3, 4, 5, 6, 7];

View file

@ -1,4 +1,4 @@
fn iter_vec<T>(v: [T], f: fn(T)) { for x: T in v { f(x); } } fn iter_vec<T>(v: [T], f: fn(T)) { for v.each {|x| f(x); } }
fn main() { fn main() {
let v = [1, 2, 3, 4, 5]; let v = [1, 2, 3, 4, 5];

View file

@ -6,12 +6,14 @@ fn main() {
assert (i == 10); assert (i == 10);
do { i += 1; if i == 20 { break; } } while i < 30 do { i += 1; if i == 20 { break; } } while i < 30
assert (i == 20); assert (i == 20);
for x: int in [1, 2, 3, 4, 5, 6] { if x == 3 { break; } assert (x <= 3); } for vec::each([1, 2, 3, 4, 5, 6]) {|x|
if x == 3 { break; } assert (x <= 3);
}
i = 0; i = 0;
while i < 10 { i += 1; if i % 2 == 0 { cont; } assert (i % 2 != 0); } while i < 10 { i += 1; if i % 2 == 0 { cont; } assert (i % 2 != 0); }
i = 0; i = 0;
do { i += 1; if i % 2 == 0 { cont; } assert (i % 2 != 0); } while i < 10 do { i += 1; if i % 2 == 0 { cont; } assert (i % 2 != 0); } while i < 10
for x: int in [1, 2, 3, 4, 5, 6] { for vec::each([1, 2, 3, 4, 5, 6]) {|x|
if x % 2 == 0 { cont; } if x % 2 == 0 { cont; }
assert (x % 2 != 0); assert (x % 2 != 0);
} }

View file

@ -1,5 +1,5 @@
fn main() { fn main() {
for {x: x, y: y}: {x: int, y: int} in [{x: 10, y: 20}, {x: 30, y: 0}] { for vec::each([{x: 10, y: 20}, {x: 30, y: 0}]) {|elt|
assert (x + y == 30); assert (elt.x + elt.y == 30);
} }
} }

View file

@ -1,8 +0,0 @@
fn main() {
let x = [@{mut a: @10, b: @20}];
for @{a, b} in x {
assert *a == 10;
(*x[0]).a = @30;
assert *a == 10;
}
}

View file

@ -1 +1 @@
fn main() { let x: [int] = []; for i: int in x { fail "moop"; } } fn main() { let x: [int] = []; for x.each {|_i| fail "moop"; } }

View file

@ -34,7 +34,7 @@ mod map_reduce {
enum ctrl_proto { find_reducer([u8], chan<int>), mapper_done, } enum ctrl_proto { find_reducer([u8], chan<int>), mapper_done, }
fn start_mappers(ctrl: chan<ctrl_proto>, inputs: [str]) { fn start_mappers(ctrl: chan<ctrl_proto>, inputs: [str]) {
for i: str in inputs { for inputs.each {|i|
task::spawn {|| map_task(ctrl, i); }; task::spawn {|| map_task(ctrl, i); };
} }
} }

View file

@ -17,7 +17,7 @@ iface map<T> {
impl <T> of map<T> for [T] { impl <T> of map<T> for [T] {
fn map<U>(f: fn(T) -> U) -> [U] { fn map<U>(f: fn(T) -> U) -> [U] {
let mut r = []; let mut r = [];
for x in self { r += [f(x)]; } for self.each {|x| r += [f(x)]; }
r r
} }
} }

View file

@ -3,12 +3,12 @@
fn main() { fn main() {
let x = [1, 2, 3]; let x = [1, 2, 3];
let mut y = 0; let mut y = 0;
for i: int in x { log(debug, i); y += i; } for x.each {|i| log(debug, i); y += i; }
log(debug, y); log(debug, y);
assert (y == 6); assert (y == 6);
let s = "hello there"; let s = "hello there";
let mut i: int = 0; let mut i: int = 0;
for c: u8 in s { for str::each(s) {|c|
if i == 0 { assert (c == 'h' as u8); } if i == 0 { assert (c == 'h' as u8); }
if i == 1 { assert (c == 'e' as u8); } if i == 1 { assert (c == 'e' as u8); }
if i == 2 { assert (c == 'l' as u8); } if i == 2 { assert (c == 'l' as u8); }

View file

@ -1,6 +1,6 @@
fn main() { fn main() {
let x = [10, 20, 30]; let x = [10, 20, 30];
let mut sum = 0; let mut sum = 0;
for x in x { sum += x; } for x.each {|x| sum += x; }
assert (sum == 60); assert (sum == 60);
} }

View file

@ -1 +1 @@
fn main(args: [str]) { for s in args { log(debug, s); } } fn main(args: [str]) { for args.each {|s| log(debug, s); } }

View file

@ -4,7 +4,7 @@
// -*- rust -*- // -*- rust -*-
fn len(v: [const int]) -> uint { fn len(v: [const int]) -> uint {
let mut i = 0u; let mut i = 0u;
for x: int in v { i += 1u; } for v.each {|x| i += 1u; }
ret i; ret i;
} }

View file

@ -5,7 +5,7 @@ iface monad<A> {
impl <A> of monad<A> for [A] { impl <A> of monad<A> for [A] {
fn bind<B>(f: fn(A) -> [B]) -> [B] { fn bind<B>(f: fn(A) -> [B]) -> [B] {
let mut r = []; let mut r = [];
for elt in self { r += f(elt); } for self.each {|elt| r += f(elt); }
r r
} }
} }

View file

@ -58,7 +58,7 @@ fn main() {
calllink10 calllink10
]; ];
let rng = rand::rng(); let rng = rand::rng();
for f in fns { for fns.each {|f|
let sz = rng.next() % 256u32 + 256u32; let sz = rng.next() % 256u32 + 256u32;
let frame_backoff = rng.next() % 10u32 + 1u32; let frame_backoff = rng.next() % 10u32 + 1u32;
task::try {|| runtest(f, frame_backoff) }; task::try {|| runtest(f, frame_backoff) };

View file

@ -6,7 +6,7 @@ fn foo(c: [int]) {
alt none::<int> { alt none::<int> {
some::<int>(_) { some::<int>(_) {
for i: int in c { for c.each {|i|
log(debug, a); log(debug, a);
let a = 17; let a = 17;
b += [a]; b += [a];

View file

@ -19,10 +19,10 @@ impl util for uint {
impl util<T> for [T] { impl util<T> for [T] {
fn length_() -> uint { vec::len(self) } fn length_() -> uint { vec::len(self) }
fn iter_(f: fn(T)) { for x in self { f(x); } } fn iter_(f: fn(T)) { for self.each {|x| f(x); } }
fn map_<U>(f: fn(T) -> U) -> [U] { fn map_<U>(f: fn(T) -> U) -> [U] {
let mut r = []; let mut r = [];
for elt in self { r += [f(elt)]; } for self.each {|elt| r += [f(elt)]; }
r r
} }
} }

View file

@ -42,7 +42,7 @@ fn test00() {
// Read from spawned tasks... // Read from spawned tasks...
let mut sum = 0; let mut sum = 0;
for r in results { for results.each {|r|
i = 0; i = 0;
while i < number_of_messages { while i < number_of_messages {
let value = recv(po); let value = recv(po);
@ -52,7 +52,7 @@ fn test00() {
} }
// Join spawned tasks... // Join spawned tasks...
for r in results { future::get(r); } for results.each {|r| future::get(r); }
#debug("Completed: Final number is: "); #debug("Completed: Final number is: ");
log(error, sum); log(error, sum);

View file

@ -46,12 +46,12 @@ fn test00() {
task::run(builder) {|| test00_start(ch, i, number_of_messages);} task::run(builder) {|| test00_start(ch, i, number_of_messages);}
} }
let mut sum: int = 0; let mut sum: int = 0;
for r in results { for results.each {|r|
i = 0; i = 0;
while i < number_of_messages { sum += recv(po); i = i + 1; } while i < number_of_messages { sum += recv(po); i = i + 1; }
} }
for r in results { future::get(r); } for results.each {|r| future::get(r); }
#debug("Completed: Final number is: "); #debug("Completed: Final number is: ");
assert (sum == assert (sum ==
@ -132,7 +132,7 @@ fn test06() {
} }
for r in results { future::get(r); } for results.each {|r| future::get(r); }
} }

View file

@ -31,7 +31,7 @@ fn main() {
fn check_str_eq(a: str, b: str) { fn check_str_eq(a: str, b: str) {
let mut i: int = 0; let mut i: int = 0;
for ab: u8 in a { for str::each(a) {|ab|
log(debug, i); log(debug, i);
log(debug, ab); log(debug, ab);
let bb: u8 = b[i]; let bb: u8 = b[i];