Convert old-style for loops to new-style
Most could use the each method, but because of the hack used to disambiguate old- and new-style loops, some had to use vec::each. (This hack will go away soon.) Issue #1619
This commit is contained in:
parent
9c88e5ef5a
commit
c902eafa14
99 changed files with 623 additions and 625 deletions
|
@ -93,7 +93,7 @@ fn load_link(mis: [@ast::meta_item]) -> (option<str>,
|
|||
let mut name = none;
|
||||
let mut vers = none;
|
||||
let mut uuid = none;
|
||||
for a: @ast::meta_item in mis {
|
||||
for mis.each {|a|
|
||||
alt a.node {
|
||||
ast::meta_name_value(v, {node: ast::lit_str(s), span: _}) {
|
||||
alt v {
|
||||
|
@ -128,7 +128,7 @@ fn load_pkg(filename: str) -> option<pkg> {
|
|||
let mut sigs = none;
|
||||
let mut crate_type = none;
|
||||
|
||||
for a in c.node.attrs {
|
||||
for c.node.attrs.each {|a|
|
||||
alt a.node.value.node {
|
||||
ast::meta_name_value(v, {node: ast::lit_str(s), span: _}) {
|
||||
alt v {
|
||||
|
@ -273,7 +273,7 @@ fn load_one_source_package(&src: source, p: map::hashmap<str, json::json>) {
|
|||
let mut tags = [];
|
||||
alt p.find("tags") {
|
||||
some(json::list(js)) {
|
||||
for j in js {
|
||||
for js.each {|j|
|
||||
alt j {
|
||||
json::string(_j) { vec::grow(tags, 1u, _j); }
|
||||
_ { }
|
||||
|
@ -313,7 +313,7 @@ fn load_source_packages(&c: cargo, &src: source) {
|
|||
let pkgstr = io::read_whole_file_str(pkgfile);
|
||||
alt json::from_str(result::get(pkgstr)) {
|
||||
ok(json::list(js)) {
|
||||
for _j: json::json in js {
|
||||
for js.each {|_j|
|
||||
alt _j {
|
||||
json::dict(_p) {
|
||||
load_one_source_package(src, _p);
|
||||
|
@ -423,7 +423,7 @@ fn configure(opts: options) -> cargo {
|
|||
|
||||
fn for_each_package(c: cargo, b: fn(source, package)) {
|
||||
c.sources.values({ |v|
|
||||
for p in copy v.packages {
|
||||
for vec::each(copy v.packages) {|p|
|
||||
b(v, p);
|
||||
}
|
||||
})
|
||||
|
@ -432,7 +432,7 @@ fn for_each_package(c: cargo, b: fn(source, package)) {
|
|||
// Runs all programs in directory <buildpath>
|
||||
fn run_programs(buildpath: str) {
|
||||
let newv = os::list_dir_path(buildpath);
|
||||
for ct: str in newv {
|
||||
for newv.each {|ct|
|
||||
run::run_program(ct, []);
|
||||
}
|
||||
}
|
||||
|
@ -470,7 +470,7 @@ fn install_one_crate(c: cargo, path: str, cf: str) {
|
|||
};
|
||||
let newv = os::list_dir_path(buildpath);
|
||||
let exec_suffix = os::exe_suffix();
|
||||
for ct: str in newv {
|
||||
for newv.each {|ct|
|
||||
if (exec_suffix != "" && str::ends_with(ct, exec_suffix)) ||
|
||||
(exec_suffix == "" && !str::starts_with(path::basename(ct),
|
||||
"lib")) {
|
||||
|
@ -528,7 +528,7 @@ fn install_source(c: cargo, path: str) {
|
|||
fail "This doesn't look like a rust package (no .rc files).";
|
||||
}
|
||||
|
||||
for cf: str in cratefiles {
|
||||
for cratefiles.each {|cf|
|
||||
let p = load_pkg(cf);
|
||||
alt p {
|
||||
none { cont; }
|
||||
|
@ -618,7 +618,8 @@ fn install_uuid(c: cargo, wd: str, uuid: str) {
|
|||
ret;
|
||||
}
|
||||
error("Found multiple packages:");
|
||||
for (s,p) in ps {
|
||||
for ps.each {|elt|
|
||||
let (s,p) = elt;
|
||||
info(" " + s.name + "/" + p.uuid + " (" + p.name + ")");
|
||||
}
|
||||
}
|
||||
|
@ -639,7 +640,8 @@ fn install_named(c: cargo, wd: str, name: str) {
|
|||
ret;
|
||||
}
|
||||
error("Found multiple packages:");
|
||||
for (s,p) in ps {
|
||||
for ps.each {|elt|
|
||||
let (s,p) = elt;
|
||||
info(" " + s.name + "/" + p.uuid + " (" + p.name + ")");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -89,10 +89,9 @@ fn verify(root: str, data: str, sig: str, keyfp: str) -> bool {
|
|||
let p = gpg(["--homedir", path, "--with-fingerprint", "--verify", sig,
|
||||
data]);
|
||||
let res = "Primary key fingerprint: " + keyfp;
|
||||
for line in str::split_char(p.err, '\n') {
|
||||
if line == res {
|
||||
ret true;
|
||||
let mut rslt = false;
|
||||
for str::split_char(p.err, '\n').each {|line|
|
||||
if line == res { rslt = true; }
|
||||
}
|
||||
}
|
||||
ret false;
|
||||
ret rslt;
|
||||
}
|
||||
|
|
|
@ -135,7 +135,7 @@ fn test_opts(config: config) -> test::test_opts {
|
|||
fn make_tests(config: config) -> [test::test_desc] {
|
||||
#debug("making tests from %s", config.src_base);
|
||||
let mut tests = [];
|
||||
for file: str in os::list_dir_path(config.src_base) {
|
||||
for os::list_dir_path(config.src_base).each {|file|
|
||||
let file = file;
|
||||
#debug("inspecting file %s", file);
|
||||
if is_test(config, file) {
|
||||
|
@ -154,11 +154,11 @@ fn is_test(config: config, testfile: str) -> bool {
|
|||
|
||||
let mut valid = false;
|
||||
|
||||
for ext in valid_extensions {
|
||||
for valid_extensions.each {|ext|
|
||||
if str::ends_with(name, ext) { valid = true; }
|
||||
}
|
||||
|
||||
for pre in invalid_prefixes {
|
||||
for invalid_prefixes.each {|pre|
|
||||
if str::starts_with(name, pre) { valid = false; }
|
||||
}
|
||||
|
||||
|
|
|
@ -196,7 +196,7 @@ fn check_error_patterns(props: test_props,
|
|||
|
||||
let mut next_err_idx = 0u;
|
||||
let mut next_err_pat = props.error_patterns[next_err_idx];
|
||||
for line: str in str::split_char(procres.stderr, '\n') {
|
||||
for str::split_char(procres.stderr, '\n').each {|line|
|
||||
if str::contains(line, next_err_pat) {
|
||||
#debug("found error pattern %s", next_err_pat);
|
||||
next_err_idx += 1u;
|
||||
|
@ -215,7 +215,7 @@ fn check_error_patterns(props: test_props,
|
|||
fatal_procres(#fmt["error pattern '%s' not found!",
|
||||
missing_patterns[0]], procres);
|
||||
} else {
|
||||
for pattern: str in missing_patterns {
|
||||
for missing_patterns.each {|pattern|
|
||||
error(#fmt["error pattern '%s' not found!", pattern]);
|
||||
}
|
||||
fatal_procres("multiple error patterns not found", procres);
|
||||
|
@ -244,7 +244,7 @@ fn check_expected_errors(expected_errors: [errors::expected_error],
|
|||
// filename:line1:col1: line2:col2: *warning:* msg
|
||||
// where line1:col1: is the starting point, line2:col2:
|
||||
// is the ending point, and * represents ANSI color codes.
|
||||
for line: str in str::split_char(procres.stderr, '\n') {
|
||||
for str::split_char(procres.stderr, '\n').each {|line|
|
||||
let mut was_expected = false;
|
||||
for vec::eachi(expected_errors) {|i, ee|
|
||||
if !found_flags[i] {
|
||||
|
|
|
@ -25,7 +25,7 @@ fn find_rust_files(&files: [str], path: str) {
|
|||
} else if os::path_is_dir(path)
|
||||
&& !contains(path, "compile-fail")
|
||||
&& !contains(path, "build") {
|
||||
for p in os::list_dir_path(path) {
|
||||
for os::list_dir_path(path).each {|p|
|
||||
find_rust_files(files, p);
|
||||
}
|
||||
}
|
||||
|
@ -439,7 +439,7 @@ fn content_is_dangerous_to_run(code: str) -> bool {
|
|||
"unsafe",
|
||||
"log"]; // python --> rust pipe deadlock?
|
||||
|
||||
for p: str in dangerous_patterns { if contains(code, p) { ret true; } }
|
||||
for dangerous_patterns.each {|p| if contains(code, p) { ret true; } }
|
||||
ret false;
|
||||
}
|
||||
|
||||
|
@ -447,7 +447,7 @@ fn content_is_dangerous_to_compile(code: str) -> bool {
|
|||
let dangerous_patterns =
|
||||
["xfail-test"];
|
||||
|
||||
for p: str in dangerous_patterns { if contains(code, p) { ret true; } }
|
||||
for dangerous_patterns.each {|p| if contains(code, p) { ret true; } }
|
||||
ret false;
|
||||
}
|
||||
|
||||
|
@ -462,7 +462,7 @@ fn content_might_not_converge(code: str) -> bool {
|
|||
"\n\n\n\n\n" // https://github.com/mozilla/rust/issues/850
|
||||
];
|
||||
|
||||
for p: str in confusing_patterns { if contains(code, p) { ret true; } }
|
||||
for confusing_patterns.each {|p| if contains(code, p) { ret true; } }
|
||||
ret false;
|
||||
}
|
||||
|
||||
|
@ -475,7 +475,7 @@ fn file_might_not_converge(filename: str) -> bool {
|
|||
];
|
||||
|
||||
|
||||
for f in confusing_files { if contains(filename, f) { ret true; } }
|
||||
for confusing_files.each {|f| if contains(filename, f) { ret true; } }
|
||||
|
||||
ret false;
|
||||
}
|
||||
|
@ -509,7 +509,7 @@ fn check_roundtrip_convergence(code: @str, maxIters: uint) {
|
|||
|
||||
fn check_convergence(files: [str]) {
|
||||
#error("pp convergence tests: %u files", vec::len(files));
|
||||
for file in files {
|
||||
for files.each {|file|
|
||||
if !file_might_not_converge(file) {
|
||||
let s = @result::get(io::read_whole_file_str(file));
|
||||
if !content_might_not_converge(*s) {
|
||||
|
@ -522,7 +522,7 @@ fn check_convergence(files: [str]) {
|
|||
}
|
||||
|
||||
fn check_variants(files: [str], cx: context) {
|
||||
for file in files {
|
||||
for files.each {|file|
|
||||
if cx.mode == tm_converge && file_might_not_converge(file) {
|
||||
#error("Skipping convergence test based on file_might_not_converge");
|
||||
cont;
|
||||
|
|
|
@ -62,7 +62,7 @@ export zip;
|
|||
export swap;
|
||||
export reverse;
|
||||
export reversed;
|
||||
export iter, each, eachi;
|
||||
export iter, iter_between, each, eachi;
|
||||
export iter2;
|
||||
export iteri;
|
||||
export riter;
|
||||
|
|
|
@ -142,11 +142,11 @@ fn float_ty_to_str(t: float_ty) -> str {
|
|||
fn is_exported(i: ident, m: _mod) -> bool {
|
||||
let mut local = false;
|
||||
let mut parent_enum : option<ident> = none;
|
||||
for it: @item in m.items {
|
||||
for m.items.each {|it|
|
||||
if it.ident == i { local = true; }
|
||||
alt it.node {
|
||||
item_enum(variants, _) {
|
||||
for v: variant in variants {
|
||||
for variants.each {|v|
|
||||
if v.node.name == i {
|
||||
local = true;
|
||||
parent_enum = some(it.ident);
|
||||
|
@ -158,11 +158,11 @@ fn is_exported(i: ident, m: _mod) -> bool {
|
|||
if local { break; }
|
||||
}
|
||||
let mut has_explicit_exports = false;
|
||||
for vi: @view_item in m.view_items {
|
||||
for m.view_items.each {|vi|
|
||||
alt vi.node {
|
||||
view_item_export(vps) {
|
||||
has_explicit_exports = true;
|
||||
for vp in vps {
|
||||
for vps.each {|vp|
|
||||
alt vp.node {
|
||||
ast::view_path_simple(id, _, _) {
|
||||
if id == i { ret true; }
|
||||
|
@ -177,7 +177,7 @@ fn is_exported(i: ident, m: _mod) -> bool {
|
|||
ast::view_path_list(path, ids, _) {
|
||||
if vec::len(*path) == 1u {
|
||||
if i == path[0] { ret true; }
|
||||
for id in ids {
|
||||
for ids.each {|id|
|
||||
if id.node.name == i { ret true; }
|
||||
}
|
||||
} else {
|
||||
|
@ -278,14 +278,14 @@ fn public_methods(ms: [@method]) -> [@method] {
|
|||
|
||||
fn split_class_items(cs: [@class_member]) -> ([ivar], [@method]) {
|
||||
let mut vs = [], ms = [];
|
||||
for c in cs {
|
||||
for cs.each {|c|
|
||||
alt c.node {
|
||||
instance_var(i, t, cm, id, privacy) {
|
||||
vs += [{ident: i, ty: t, cm: cm, id: id, privacy: privacy}];
|
||||
}
|
||||
class_method(m) { ms += [m]; }
|
||||
}
|
||||
}
|
||||
};
|
||||
(vs, ms)
|
||||
}
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ fn find_linkage_metas(attrs: [ast::attribute]) -> [@ast::meta_item] {
|
|||
|
||||
fn find_linkage_attrs(attrs: [ast::attribute]) -> [ast::attribute] {
|
||||
let mut found = [];
|
||||
for attr: ast::attribute in find_attrs_by_name(attrs, "link") {
|
||||
for find_attrs_by_name(attrs, "link").each {|attr|
|
||||
alt attr.node.value.node {
|
||||
ast::meta_list(_, _) { found += [attr] }
|
||||
_ { #debug("ignoring link attribute that has incorrect type"); }
|
||||
|
@ -150,7 +150,7 @@ fn attr_meta(attr: ast::attribute) -> @ast::meta_item { @attr.node.value }
|
|||
// Get the meta_items from inside a vector of attributes
|
||||
fn attr_metas(attrs: [ast::attribute]) -> [@ast::meta_item] {
|
||||
let mut mitems = [];
|
||||
for a: ast::attribute in attrs { mitems += [attr_meta(a)]; }
|
||||
for attrs.each {|a| mitems += [attr_meta(a)]; }
|
||||
ret mitems;
|
||||
}
|
||||
|
||||
|
@ -178,7 +178,7 @@ fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool {
|
|||
fn contains(haystack: [@ast::meta_item], needle: @ast::meta_item) -> bool {
|
||||
#debug("looking for %s",
|
||||
print::pprust::meta_item_to_str(*needle));
|
||||
for item: @ast::meta_item in haystack {
|
||||
for haystack.each {|item|
|
||||
#debug("looking in %s",
|
||||
print::pprust::meta_item_to_str(*item));
|
||||
if eq(item, needle) { #debug("found it!"); ret true; }
|
||||
|
@ -207,12 +207,12 @@ fn sort_meta_items(items: [@ast::meta_item]) -> [@ast::meta_item] {
|
|||
|
||||
// This is sort of stupid here, converting to a vec of mutables and back
|
||||
let mut v: [mut @ast::meta_item] = [mut];
|
||||
for mi: @ast::meta_item in items { v += [mut mi]; }
|
||||
for items.each {|mi| v += [mut mi]; }
|
||||
|
||||
std::sort::quick_sort(lteq, v);
|
||||
|
||||
let mut v2: [@ast::meta_item] = [];
|
||||
for mi: @ast::meta_item in v { v2 += [mi]; }
|
||||
for v.each {|mi| v2 += [mi]; }
|
||||
ret v2;
|
||||
}
|
||||
|
||||
|
@ -231,7 +231,7 @@ fn remove_meta_items_by_name(items: [@ast::meta_item], name: str) ->
|
|||
fn require_unique_names(diagnostic: span_handler,
|
||||
metas: [@ast::meta_item]) {
|
||||
let map = map::str_hash();
|
||||
for meta: @ast::meta_item in metas {
|
||||
for metas.each {|meta|
|
||||
let name = get_meta_item_name(meta);
|
||||
if map.contains_key(name) {
|
||||
diagnostic.span_fatal(meta.span,
|
||||
|
|
|
@ -187,7 +187,7 @@ fn get_snippet(cm: codemap::codemap, fidx: uint, lo: uint, hi: uint) -> str
|
|||
}
|
||||
|
||||
fn get_filemap(cm: codemap, filename: str) -> filemap {
|
||||
for fm: filemap in cm.files { if fm.name == filename { ret fm; } }
|
||||
for cm.files.each {|fm| if fm.name == filename { ret fm; } }
|
||||
//XXjdm the following triggers a mismatched type bug
|
||||
// (or expected function, found _|_)
|
||||
fail; // ("asking for " + filename + " which we don't know about");
|
||||
|
|
|
@ -201,7 +201,7 @@ fn highlight_lines(cm: codemap::codemap, sp: span,
|
|||
elided = true;
|
||||
}
|
||||
// Print the offending lines
|
||||
for line: uint in display_lines {
|
||||
for display_lines.each {|line|
|
||||
io::stderr().write_str(#fmt["%s:%u ", fm.name, line + 1u]);
|
||||
let s = codemap::get_line(fm, line as int) + "\n";
|
||||
io::stderr().write_str(s);
|
||||
|
|
|
@ -68,7 +68,7 @@ fn mk_rec_e(cx: ext_ctxt, sp: span,
|
|||
fields: [{ident: ast::ident, ex: @ast::expr}]) ->
|
||||
@ast::expr {
|
||||
let mut astfields: [ast::field] = [];
|
||||
for field: {ident: ast::ident, ex: @ast::expr} in fields {
|
||||
for fields.each {|field|
|
||||
let ident = field.ident;
|
||||
let val = field.ex;
|
||||
let astfield =
|
||||
|
|
|
@ -11,7 +11,7 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
|
|||
}
|
||||
};
|
||||
let mut res: ast::ident = "";
|
||||
for e: @ast::expr in args {
|
||||
for args.each {|e|
|
||||
res += expr_to_ident(cx, e, "expected an ident");
|
||||
}
|
||||
|
||||
|
|
|
@ -57,7 +57,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
|
|||
fn make_rt_conv_expr(cx: ext_ctxt, sp: span, cnv: conv) -> @ast::expr {
|
||||
fn make_flags(cx: ext_ctxt, sp: span, flags: [flag]) -> @ast::expr {
|
||||
let mut flagexprs: [@ast::expr] = [];
|
||||
for f: flag in flags {
|
||||
for flags.each {|f|
|
||||
let mut fstr;
|
||||
alt f {
|
||||
flag_left_justify { fstr = "flag_left_justify"; }
|
||||
|
@ -141,7 +141,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
|
|||
option::none { }
|
||||
_ { cx.span_unimpl(sp, unsupported); }
|
||||
}
|
||||
for f: flag in cnv.flags {
|
||||
for cnv.flags.each {|f|
|
||||
alt f {
|
||||
flag_left_justify { }
|
||||
flag_sign_always {
|
||||
|
@ -197,7 +197,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
|
|||
some(p) { log(debug, "param: " + int::to_str(p, 10u)); }
|
||||
_ { #debug("param: none"); }
|
||||
}
|
||||
for f: flag in c.flags {
|
||||
for c.flags.each {|f|
|
||||
alt f {
|
||||
flag_left_justify { #debug("flag: left justify"); }
|
||||
flag_left_zero_pad { #debug("flag: left zero pad"); }
|
||||
|
@ -252,7 +252,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
|
|||
let mut n = 0u;
|
||||
let mut tmp_expr = mk_str(cx, sp, "");
|
||||
let nargs = vec::len::<@ast::expr>(args);
|
||||
for pc: piece in pieces {
|
||||
for pieces.each {|pc|
|
||||
alt pc {
|
||||
piece_string(s) {
|
||||
let s_expr = mk_str(cx, fmt_sp, s);
|
||||
|
|
|
@ -73,7 +73,7 @@ fn elts_to_ell(cx: ext_ctxt, elts: [@expr]) ->
|
|||
{pre: [@expr], rep: option<@expr>, post: [@expr]} {
|
||||
let mut idx: uint = 0u;
|
||||
let mut res = none;
|
||||
for elt: @expr in elts {
|
||||
for elts.each {|elt|
|
||||
alt elt.node {
|
||||
expr_mac(m) {
|
||||
alt m.node {
|
||||
|
@ -102,7 +102,7 @@ fn elts_to_ell(cx: ext_ctxt, elts: [@expr]) ->
|
|||
fn option_flatten_map<T: copy, U: copy>(f: fn@(T) -> option<U>, v: [T]) ->
|
||||
option<[U]> {
|
||||
let mut res = [];
|
||||
for elem: T in v {
|
||||
for v.each {|elem|
|
||||
alt f(elem) { none { ret none; } some(fv) { res += [fv]; } }
|
||||
}
|
||||
ret some(res);
|
||||
|
@ -163,7 +163,7 @@ selectors. */
|
|||
fn use_selectors_to_bind(b: binders, e: @expr) -> option<bindings> {
|
||||
let res = str_hash::<arb_depth<matchable>>();
|
||||
//need to do this first, to check vec lengths.
|
||||
for sel: selector in b.literal_ast_matchers {
|
||||
for b.literal_ast_matchers.each {|sel|
|
||||
alt sel(match_expr(e)) { none { ret none; } _ { } }
|
||||
}
|
||||
let mut never_mind: bool = false;
|
||||
|
@ -209,7 +209,7 @@ fn transcribe(cx: ext_ctxt, b: bindings, body: @expr) -> @expr {
|
|||
fn follow(m: arb_depth<matchable>, idx_path: @mut [uint]) ->
|
||||
arb_depth<matchable> {
|
||||
let mut res: arb_depth<matchable> = m;
|
||||
for idx: uint in *idx_path {
|
||||
for vec::each(*idx_path) {|idx|
|
||||
alt res {
|
||||
leaf(_) { ret res;/* end of the line */ }
|
||||
seq(new_ms, _) { res = new_ms[idx]; }
|
||||
|
@ -677,7 +677,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
|
|||
|
||||
let mut macro_name: option<str> = none;
|
||||
let mut clauses: [@clause] = [];
|
||||
for arg: @expr in args {
|
||||
for args.each {|arg|
|
||||
alt arg.node {
|
||||
expr_vec(elts, mutbl) {
|
||||
if vec::len(elts) != 2u {
|
||||
|
@ -753,7 +753,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
|
|||
fn generic_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
|
||||
_body: ast::mac_body, clauses: [@clause]) -> @expr {
|
||||
let arg = get_mac_arg(cx,sp,arg);
|
||||
for c: @clause in clauses {
|
||||
for clauses.each {|c|
|
||||
alt use_selectors_to_bind(c.params, arg) {
|
||||
some(bindings) { ret transcribe(cx, bindings, c.body); }
|
||||
none { cont; }
|
||||
|
|
|
@ -338,7 +338,7 @@ fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ {
|
|||
}
|
||||
pat_rec(fields, etc) {
|
||||
let mut fs = [];
|
||||
for f: ast::field_pat in fields {
|
||||
for fields.each {|f|
|
||||
fs += [{ident: f.ident, pat: fld.fold_pat(f.pat)}];
|
||||
}
|
||||
pat_rec(fs, etc)
|
||||
|
|
|
@ -13,7 +13,7 @@ type ctx =
|
|||
fn eval_crate_directives(cx: ctx, cdirs: [@ast::crate_directive], prefix: str,
|
||||
&view_items: [@ast::view_item],
|
||||
&items: [@ast::item]) {
|
||||
for sub_cdir: @ast::crate_directive in cdirs {
|
||||
for cdirs.each {|sub_cdir|
|
||||
eval_crate_directive(cx, sub_cdir, prefix, view_items, items);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -143,12 +143,13 @@ fn new_parser(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader,
|
|||
// interpreted as a specific kind of statement, which would be confusing.
|
||||
fn bad_expr_word_table() -> hashmap<str, ()> {
|
||||
let words = str_hash();
|
||||
for word in ["alt", "assert", "be", "break", "check", "claim",
|
||||
let keys = ["alt", "assert", "be", "break", "check", "claim",
|
||||
"class", "const", "cont", "copy", "crust", "do", "else",
|
||||
"enum", "export", "fail", "fn", "for", "if", "iface",
|
||||
"impl", "import", "let", "log", "loop", "mod", "mut",
|
||||
"mut", "native", "pure", "resource", "ret", "trait",
|
||||
"type", "unchecked", "unsafe", "while", "new"] {
|
||||
"type", "unchecked", "unsafe", "while", "new"];
|
||||
for keys.each {|word|
|
||||
words.insert(word, ());
|
||||
}
|
||||
words
|
||||
|
@ -312,7 +313,7 @@ fn parse_ty_field(p: parser) -> ast::ty_field {
|
|||
// otherwise, fail
|
||||
fn ident_index(p: parser, args: [ast::arg], i: ast::ident) -> uint {
|
||||
let mut j = 0u;
|
||||
for a: ast::arg in args { if a.ident == i { ret j; } j += 1u; }
|
||||
for args.each {|a| if a.ident == i { ret j; } j += 1u; }
|
||||
p.fatal("unbound variable `" + i + "` in constraint arg");
|
||||
}
|
||||
|
||||
|
@ -1230,7 +1231,7 @@ fn parse_more_binops(p: parser, plhs: pexpr, min_prec: int) ->
|
|||
let peeked = p.token;
|
||||
if peeked == token::BINOP(token::OR) &&
|
||||
p.restriction == RESTRICT_NO_BAR_OP { ret lhs; }
|
||||
for cur: op_spec in *p.precs {
|
||||
for vec::each(*p.precs) {|cur|
|
||||
if cur.prec > min_prec && cur.tok == peeked {
|
||||
p.bump();
|
||||
let expr = parse_prefix_expr(p);
|
||||
|
@ -1414,7 +1415,7 @@ fn parse_for_expr(p: parser) -> @ast::expr {
|
|||
_ { false }
|
||||
};
|
||||
if new_style {
|
||||
let call = parse_expr(p);
|
||||
let call = parse_expr_res(p, RESTRICT_STMT_EXPR);
|
||||
alt call.node {
|
||||
ast::expr_call(f, args, true) {
|
||||
let b_arg = vec::last(args);
|
||||
|
@ -1428,6 +1429,7 @@ fn parse_for_expr(p: parser) -> @ast::expr {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
p.warn("old-style for");
|
||||
let decl = parse_local(p, false, false);
|
||||
expect_word(p, "in");
|
||||
let seq = parse_expr(p);
|
||||
|
@ -2328,7 +2330,7 @@ fn parse_item_enum(p: parser, attrs: [ast::attribute]) -> @ast::item {
|
|||
let arg_tys = parse_seq(token::LPAREN, token::RPAREN,
|
||||
seq_sep(token::COMMA),
|
||||
{|p| parse_ty(p, false)}, p);
|
||||
for ty in arg_tys.node {
|
||||
for arg_tys.node.each {|ty|
|
||||
args += [{ty: ty, id: p.get_id()}];
|
||||
}
|
||||
} else if eat(p, token::EQ) {
|
||||
|
|
|
@ -269,7 +269,7 @@ fn synth_comment(s: ps, text: str) {
|
|||
fn commasep<IN>(s: ps, b: breaks, elts: [IN], op: fn(ps, IN)) {
|
||||
box(s, 0u, b);
|
||||
let mut first = true;
|
||||
for elt: IN in elts {
|
||||
for elts.each {|elt|
|
||||
if first { first = false; } else { word_space(s, ","); }
|
||||
op(s, elt);
|
||||
}
|
||||
|
@ -282,7 +282,7 @@ fn commasep_cmnt<IN>(s: ps, b: breaks, elts: [IN], op: fn(ps, IN),
|
|||
box(s, 0u, b);
|
||||
let len = vec::len::<IN>(elts);
|
||||
let mut i = 0u;
|
||||
for elt: IN in elts {
|
||||
for elts.each {|elt|
|
||||
maybe_print_comment(s, get_span(elt).hi);
|
||||
op(s, elt);
|
||||
i += 1u;
|
||||
|
@ -303,18 +303,18 @@ fn commasep_exprs(s: ps, b: breaks, exprs: [@ast::expr]) {
|
|||
|
||||
fn print_mod(s: ps, _mod: ast::_mod, attrs: [ast::attribute]) {
|
||||
print_inner_attributes(s, attrs);
|
||||
for vitem: @ast::view_item in _mod.view_items {
|
||||
for _mod.view_items.each {|vitem|
|
||||
print_view_item(s, vitem);
|
||||
}
|
||||
for item: @ast::item in _mod.items { print_item(s, item); }
|
||||
for _mod.items.each {|item| print_item(s, item); }
|
||||
}
|
||||
|
||||
fn print_native_mod(s: ps, nmod: ast::native_mod, attrs: [ast::attribute]) {
|
||||
print_inner_attributes(s, attrs);
|
||||
for vitem: @ast::view_item in nmod.view_items {
|
||||
for nmod.view_items.each {|vitem|
|
||||
print_view_item(s, vitem);
|
||||
}
|
||||
for item: @ast::native_item in nmod.items { print_native_item(s, item); }
|
||||
for nmod.items.each {|item| print_native_item(s, item); }
|
||||
}
|
||||
|
||||
fn print_region(s: ps, region: ast::region) {
|
||||
|
@ -476,7 +476,7 @@ fn print_item(s: ps, &&item: @ast::item) {
|
|||
end(s);
|
||||
} else {
|
||||
bopen(s);
|
||||
for v: ast::variant in variants {
|
||||
for variants.each {|v|
|
||||
space_if_not_bol(s);
|
||||
maybe_print_comment(s, v.span.lo);
|
||||
print_outer_attributes(s, v.node.attrs);
|
||||
|
@ -500,7 +500,7 @@ fn print_item(s: ps, &&item: @ast::item) {
|
|||
print_fn_args_and_ret(s, ctor.node.dec);
|
||||
space(s.s);
|
||||
print_block(s, ctor.node.body);
|
||||
for ci in items {
|
||||
for items.each {|ci|
|
||||
/*
|
||||
FIXME: collect all private items and print them
|
||||
in a single "priv" section
|
||||
|
@ -556,7 +556,7 @@ fn print_item(s: ps, &&item: @ast::item) {
|
|||
print_type(s, ty);
|
||||
space(s.s);
|
||||
bopen(s);
|
||||
for meth in methods {
|
||||
for methods.each {|meth|
|
||||
print_method(s, meth);
|
||||
}
|
||||
bclose(s, item.span);
|
||||
|
@ -567,7 +567,7 @@ fn print_item(s: ps, &&item: @ast::item) {
|
|||
print_type_params(s, tps);
|
||||
word(s.s, " ");
|
||||
bopen(s);
|
||||
for meth in methods { print_ty_method(s, meth); }
|
||||
for methods.each {|meth| print_ty_method(s, meth); }
|
||||
bclose(s, item.span);
|
||||
}
|
||||
ast::item_res(decl, tps, body, dt_id, ct_id) {
|
||||
|
@ -629,7 +629,7 @@ fn print_method(s: ps, meth: @ast::method) {
|
|||
|
||||
fn print_outer_attributes(s: ps, attrs: [ast::attribute]) {
|
||||
let mut count = 0;
|
||||
for attr: ast::attribute in attrs {
|
||||
for attrs.each {|attr|
|
||||
alt attr.node.style {
|
||||
ast::attr_outer { print_attribute(s, attr); count += 1; }
|
||||
_ {/* fallthrough */ }
|
||||
|
@ -640,7 +640,7 @@ fn print_outer_attributes(s: ps, attrs: [ast::attribute]) {
|
|||
|
||||
fn print_inner_attributes(s: ps, attrs: [ast::attribute]) {
|
||||
let mut count = 0;
|
||||
for attr: ast::attribute in attrs {
|
||||
for attrs.each {|attr|
|
||||
alt attr.node.style {
|
||||
ast::attr_inner {
|
||||
print_attribute(s, attr);
|
||||
|
@ -716,8 +716,8 @@ fn print_possibly_embedded_block_(s: ps, blk: ast::blk, embedded: embed_type,
|
|||
|
||||
print_inner_attributes(s, attrs);
|
||||
|
||||
for vi in blk.node.view_items { print_view_item(s, vi); }
|
||||
for st: @ast::stmt in blk.node.stmts {
|
||||
for blk.node.view_items.each {|vi| print_view_item(s, vi); }
|
||||
for blk.node.stmts.each {|st|
|
||||
print_stmt(s, *st);
|
||||
}
|
||||
alt blk.node.expr {
|
||||
|
@ -957,12 +957,12 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
|
|||
print_maybe_parens_discrim(s, expr);
|
||||
space(s.s);
|
||||
bopen(s);
|
||||
for arm: ast::arm in arms {
|
||||
for arms.each {|arm|
|
||||
space(s.s);
|
||||
cbox(s, alt_indent_unit);
|
||||
ibox(s, 0u);
|
||||
let mut first = true;
|
||||
for p: @ast::pat in arm.pats {
|
||||
for arm.pats.each {|p|
|
||||
if first {
|
||||
first = false;
|
||||
} else { space(s.s); word_space(s, "|"); }
|
||||
|
@ -1189,7 +1189,7 @@ fn print_path(s: ps, &&path: @ast::path, colons_before_params: bool) {
|
|||
maybe_print_comment(s, path.span.lo);
|
||||
if path.node.global { word(s.s, "::"); }
|
||||
let mut first = true;
|
||||
for id: ast::ident in path.node.idents {
|
||||
for path.node.idents.each {|id|
|
||||
if first { first = false; } else { word(s.s, "::"); }
|
||||
word(s.s, id);
|
||||
}
|
||||
|
@ -1359,7 +1359,7 @@ fn print_arg_mode(s: ps, m: ast::mode) {
|
|||
fn print_bounds(s: ps, bounds: @[ast::ty_param_bound]) {
|
||||
if vec::len(*bounds) > 0u {
|
||||
word(s.s, ":");
|
||||
for bound in *bounds {
|
||||
for vec::each(*bounds) {|bound|
|
||||
nbsp(s);
|
||||
alt bound {
|
||||
ast::bound_copy { word(s.s, "copy"); }
|
||||
|
@ -1403,7 +1403,7 @@ fn print_meta_item(s: ps, &&item: @ast::meta_item) {
|
|||
|
||||
fn print_simple_path(s: ps, path: ast::simple_path) {
|
||||
let mut first = true;
|
||||
for id in path {
|
||||
for path.each {|id|
|
||||
if first { first = false; } else { word(s.s, "::"); }
|
||||
word(s.s, id);
|
||||
}
|
||||
|
@ -1472,7 +1472,7 @@ fn print_view_item(s: ps, item: @ast::view_item) {
|
|||
// FIXME: The fact that this builds up the table anew for every call is
|
||||
// not good. Eventually, table should be a const.
|
||||
fn operator_prec(op: ast::binop) -> int {
|
||||
for spec: parse::parser::op_spec in *parse::parser::prec_table() {
|
||||
for vec::each(*parse::parser::prec_table()) {|spec|
|
||||
if spec.op == op { ret spec.prec; }
|
||||
}
|
||||
core::unreachable();
|
||||
|
@ -1667,7 +1667,7 @@ fn print_comment(s: ps, cmnt: lexer::cmnt) {
|
|||
}
|
||||
lexer::isolated {
|
||||
pprust::hardbreak_if_not_bol(s);
|
||||
for line: str in cmnt.lines {
|
||||
for cmnt.lines.each {|line|
|
||||
// Don't print empty lines because they will end up as trailing
|
||||
// whitespace
|
||||
if str::is_not_empty(line) { word(s.s, line); }
|
||||
|
@ -1681,7 +1681,7 @@ fn print_comment(s: ps, cmnt: lexer::cmnt) {
|
|||
hardbreak(s.s);
|
||||
} else {
|
||||
ibox(s, 0u);
|
||||
for line: str in cmnt.lines {
|
||||
for cmnt.lines.each {|line|
|
||||
if str::is_not_empty(line) { word(s.s, line); }
|
||||
hardbreak(s.s);
|
||||
}
|
||||
|
@ -1752,7 +1752,7 @@ fn constr_args_to_str<T>(f: fn@(T) -> str, args: [@ast::sp_constr_arg<T>]) ->
|
|||
str {
|
||||
let mut comma = false;
|
||||
let mut s = "(";
|
||||
for a: @ast::sp_constr_arg<T> in args {
|
||||
for args.each {|a|
|
||||
if comma { s += ", "; } else { comma = true; }
|
||||
s += constr_arg_to_str::<T>(f, a.node);
|
||||
}
|
||||
|
@ -1795,7 +1795,7 @@ fn ty_constr_to_str(&&c: @ast::ty_constr) -> str {
|
|||
|
||||
fn constrs_str<T>(constrs: [T], elt: fn(T) -> str) -> str {
|
||||
let mut s = "", colon = true;
|
||||
for c in constrs {
|
||||
for constrs.each {|c|
|
||||
if colon { s += " : "; colon = false; } else { s += ", "; }
|
||||
s += elt(c);
|
||||
}
|
||||
|
|
|
@ -84,7 +84,7 @@ fn visit_crate_directive<E>(cd: @crate_directive, e: E, v: vt<E>) {
|
|||
alt cd.node {
|
||||
cdir_src_mod(_, _) { }
|
||||
cdir_dir_mod(_, cdirs, _) {
|
||||
for cdir: @crate_directive in cdirs {
|
||||
for cdirs.each {|cdir|
|
||||
visit_crate_directive(cdir, e, v);
|
||||
}
|
||||
}
|
||||
|
@ -94,8 +94,8 @@ fn visit_crate_directive<E>(cd: @crate_directive, e: E, v: vt<E>) {
|
|||
}
|
||||
|
||||
fn visit_mod<E>(m: _mod, _sp: span, _id: node_id, e: E, v: vt<E>) {
|
||||
for vi: @view_item in m.view_items { v.visit_view_item(vi, e, v); }
|
||||
for i: @item in m.items { v.visit_item(i, e, v); }
|
||||
for m.view_items.each {|vi| v.visit_view_item(vi, e, v); }
|
||||
for m.items.each {|i| v.visit_item(i, e, v); }
|
||||
}
|
||||
|
||||
fn visit_view_item<E>(_vi: @view_item, _e: E, _v: vt<E>) { }
|
||||
|
@ -114,8 +114,8 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) {
|
|||
}
|
||||
item_mod(m) { v.visit_mod(m, i.span, i.id, e, v); }
|
||||
item_native_mod(nm) {
|
||||
for vi: @view_item in nm.view_items { v.visit_view_item(vi, e, v); }
|
||||
for ni: @native_item in nm.items { v.visit_native_item(ni, e, v); }
|
||||
for nm.view_items.each {|vi| v.visit_view_item(vi, e, v); }
|
||||
for nm.items.each {|ni| v.visit_native_item(ni, e, v); }
|
||||
}
|
||||
item_ty(t, tps) { v.visit_ty(t, e, v); v.visit_ty_params(tps, e, v); }
|
||||
item_res(decl, tps, body, dtor_id, _) {
|
||||
|
@ -124,21 +124,21 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) {
|
|||
}
|
||||
item_enum(variants, tps) {
|
||||
v.visit_ty_params(tps, e, v);
|
||||
for vr: variant in variants {
|
||||
for va: variant_arg in vr.node.args { v.visit_ty(va.ty, e, v); }
|
||||
for variants.each {|vr|
|
||||
for vr.node.args.each {|va| v.visit_ty(va.ty, e, v); }
|
||||
}
|
||||
}
|
||||
item_impl(tps, ifce, ty, methods) {
|
||||
v.visit_ty_params(tps, e, v);
|
||||
alt ifce { some(ty) { v.visit_ty(ty, e, v); } none {} }
|
||||
v.visit_ty(ty, e, v);
|
||||
for m in methods {
|
||||
for methods.each {|m|
|
||||
visit_method_helper(m, e, v)
|
||||
}
|
||||
}
|
||||
item_class(tps, members, ctor) {
|
||||
v.visit_ty_params(tps, e, v);
|
||||
for m in members {
|
||||
for members.each {|m|
|
||||
v.visit_class_item(m, e, v);
|
||||
}
|
||||
// make up a fake fn so as to call visit_fn on the ctor
|
||||
|
@ -147,8 +147,8 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) {
|
|||
}
|
||||
item_iface(tps, methods) {
|
||||
v.visit_ty_params(tps, e, v);
|
||||
for m in methods {
|
||||
for a in m.decl.inputs { v.visit_ty(a.ty, e, v); }
|
||||
for methods.each {|m|
|
||||
for m.decl.inputs.each {|a| v.visit_ty(a.ty, e, v); }
|
||||
v.visit_ty(m.decl.output, e, v);
|
||||
}
|
||||
}
|
||||
|
@ -176,12 +176,12 @@ fn visit_ty<E>(t: @ty, e: E, v: vt<E>) {
|
|||
ty_ptr(mt) { v.visit_ty(mt.ty, e, v); }
|
||||
ty_rptr(_, mt) { v.visit_ty(mt.ty, e, v); }
|
||||
ty_rec(flds) {
|
||||
for f: ty_field in flds { v.visit_ty(f.node.mt.ty, e, v); }
|
||||
for flds.each {|f| v.visit_ty(f.node.mt.ty, e, v); }
|
||||
}
|
||||
ty_tup(ts) { for tt in ts { v.visit_ty(tt, e, v); } }
|
||||
ty_tup(ts) { for ts.each {|tt| v.visit_ty(tt, e, v); } }
|
||||
ty_fn(_, decl) {
|
||||
for a in decl.inputs { v.visit_ty(a.ty, e, v); }
|
||||
for c: @constr in decl.constraints {
|
||||
for decl.inputs.each {|a| v.visit_ty(a.ty, e, v); }
|
||||
for decl.constraints.each {|c|
|
||||
v.visit_constr(c.node.path, c.span, c.node.id, e, v);
|
||||
}
|
||||
v.visit_ty(decl.output, e, v);
|
||||
|
@ -189,7 +189,7 @@ fn visit_ty<E>(t: @ty, e: E, v: vt<E>) {
|
|||
ty_path(p, _) { visit_path(p, e, v); }
|
||||
ty_constr(t, cs) {
|
||||
v.visit_ty(t, e, v);
|
||||
for tc: @spanned<constr_general_<@path, node_id>> in cs {
|
||||
for cs.each {|tc|
|
||||
v.visit_constr(tc.node.path, tc.span, tc.node.id, e, v);
|
||||
}
|
||||
}
|
||||
|
@ -207,19 +207,19 @@ fn visit_constr<E>(_operator: @path, _sp: span, _id: node_id, _e: E,
|
|||
}
|
||||
|
||||
fn visit_path<E>(p: @path, e: E, v: vt<E>) {
|
||||
for tp: @ty in p.node.types { v.visit_ty(tp, e, v); }
|
||||
for p.node.types.each {|tp| v.visit_ty(tp, e, v); }
|
||||
}
|
||||
|
||||
fn visit_pat<E>(p: @pat, e: E, v: vt<E>) {
|
||||
alt p.node {
|
||||
pat_enum(path, children) {
|
||||
visit_path(path, e, v);
|
||||
for child: @pat in children { v.visit_pat(child, e, v); }
|
||||
for children.each {|child| v.visit_pat(child, e, v); }
|
||||
}
|
||||
pat_rec(fields, _) {
|
||||
for f: field_pat in fields { v.visit_pat(f.pat, e, v); }
|
||||
for fields.each {|f| v.visit_pat(f.pat, e, v); }
|
||||
}
|
||||
pat_tup(elts) { for elt in elts { v.visit_pat(elt, e, v); } }
|
||||
pat_tup(elts) { for elts.each {|elt| v.visit_pat(elt, e, v); } }
|
||||
pat_box(inner) | pat_uniq(inner) {
|
||||
v.visit_pat(inner, e, v);
|
||||
}
|
||||
|
@ -243,8 +243,8 @@ fn visit_native_item<E>(ni: @native_item, e: E, v: vt<E>) {
|
|||
}
|
||||
|
||||
fn visit_ty_params<E>(tps: [ty_param], e: E, v: vt<E>) {
|
||||
for tp in tps {
|
||||
for bound in *tp.bounds {
|
||||
for tps.each {|tp|
|
||||
for vec::each(*tp.bounds) {|bound|
|
||||
alt bound {
|
||||
bound_iface(t) { v.visit_ty(t, e, v); }
|
||||
bound_copy | bound_send { }
|
||||
|
@ -254,8 +254,8 @@ fn visit_ty_params<E>(tps: [ty_param], e: E, v: vt<E>) {
|
|||
}
|
||||
|
||||
fn visit_fn_decl<E>(fd: fn_decl, e: E, v: vt<E>) {
|
||||
for a: arg in fd.inputs { v.visit_ty(a.ty, e, v); }
|
||||
for c: @constr in fd.constraints {
|
||||
for fd.inputs.each {|a| v.visit_ty(a.ty, e, v); }
|
||||
for fd.constraints.each {|c|
|
||||
v.visit_constr(c.node.path, c.span, c.node.id, e, v);
|
||||
}
|
||||
v.visit_ty(fd.output, e, v);
|
||||
|
@ -278,8 +278,8 @@ fn visit_fn<E>(fk: fn_kind, decl: fn_decl, body: blk, _sp: span,
|
|||
}
|
||||
|
||||
fn visit_block<E>(b: ast::blk, e: E, v: vt<E>) {
|
||||
for vi in b.node.view_items { v.visit_view_item(vi, e, v); }
|
||||
for s in b.node.stmts { v.visit_stmt(s, e, v); }
|
||||
for b.node.view_items.each {|vi| v.visit_view_item(vi, e, v); }
|
||||
for b.node.stmts.each {|s| v.visit_stmt(s, e, v); }
|
||||
visit_expr_opt(b.node.expr, e, v);
|
||||
}
|
||||
|
||||
|
@ -294,7 +294,7 @@ fn visit_stmt<E>(s: @stmt, e: E, v: vt<E>) {
|
|||
fn visit_decl<E>(d: @decl, e: E, v: vt<E>) {
|
||||
alt d.node {
|
||||
decl_local(locs) {
|
||||
for loc in locs { v.visit_local(loc, e, v); }
|
||||
for locs.each {|loc| v.visit_local(loc, e, v); }
|
||||
}
|
||||
decl_item(it) { v.visit_item(it, e, v); }
|
||||
}
|
||||
|
@ -305,7 +305,7 @@ fn visit_expr_opt<E>(eo: option<@expr>, e: E, v: vt<E>) {
|
|||
}
|
||||
|
||||
fn visit_exprs<E>(exprs: [@expr], e: E, v: vt<E>) {
|
||||
for ex: @expr in exprs { v.visit_expr(ex, e, v); }
|
||||
for exprs.each {|ex| v.visit_expr(ex, e, v); }
|
||||
}
|
||||
|
||||
fn visit_mac<E>(m: mac, e: E, v: vt<E>) {
|
||||
|
@ -328,17 +328,17 @@ fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) {
|
|||
}
|
||||
expr_vec(es, _) { visit_exprs(es, e, v); }
|
||||
expr_rec(flds, base) {
|
||||
for f: field in flds { v.visit_expr(f.node.expr, e, v); }
|
||||
for flds.each {|f| v.visit_expr(f.node.expr, e, v); }
|
||||
visit_expr_opt(base, e, v);
|
||||
}
|
||||
expr_tup(elts) { for el in elts { v.visit_expr(el, e, v); } }
|
||||
expr_tup(elts) { for elts.each {|el| v.visit_expr(el, e, v); } }
|
||||
expr_call(callee, args, _) {
|
||||
visit_exprs(args, e, v);
|
||||
v.visit_expr(callee, e, v);
|
||||
}
|
||||
expr_bind(callee, args) {
|
||||
v.visit_expr(callee, e, v);
|
||||
for eo: option<@expr> in args { visit_expr_opt(eo, e, v); }
|
||||
for args.each {|eo| visit_expr_opt(eo, e, v); }
|
||||
}
|
||||
expr_binary(_, a, b) { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
|
||||
expr_addr_of(_, x) | expr_unary(_, x) | expr_loop_body(x) |
|
||||
|
@ -367,7 +367,7 @@ fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) {
|
|||
expr_do_while(b, x) { v.visit_block(b, e, v); v.visit_expr(x, e, v); }
|
||||
expr_alt(x, arms, _) {
|
||||
v.visit_expr(x, e, v);
|
||||
for a: arm in arms { v.visit_arm(a, e, v); }
|
||||
for arms.each {|a| v.visit_arm(a, e, v); }
|
||||
}
|
||||
expr_fn(proto, decl, body, _) {
|
||||
v.visit_fn(fk_anon(proto), decl, body, ex.span, ex.id, e, v);
|
||||
|
@ -386,7 +386,7 @@ fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) {
|
|||
}
|
||||
expr_field(x, _, tys) {
|
||||
v.visit_expr(x, e, v);
|
||||
for tp in tys { v.visit_ty(tp, e, v); }
|
||||
for tys.each {|tp| v.visit_ty(tp, e, v); }
|
||||
}
|
||||
expr_index(a, b) { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
|
||||
expr_path(p) { visit_path(p, e, v); }
|
||||
|
@ -404,7 +404,7 @@ fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) {
|
|||
}
|
||||
|
||||
fn visit_arm<E>(a: arm, e: E, v: vt<E>) {
|
||||
for p: @pat in a.pats { v.visit_pat(p, e, v); }
|
||||
for a.pats.each {|p| v.visit_pat(p, e, v); }
|
||||
visit_expr_opt(a.guard, e, v);
|
||||
v.visit_block(a.body, e, v);
|
||||
}
|
||||
|
|
|
@ -67,7 +67,7 @@ fn max_key<T: copy>(m: smallintmap<T>) -> uint {
|
|||
impl <V: copy> of map::map<uint, V> for smallintmap<V> {
|
||||
fn size() -> uint {
|
||||
let mut sz = 0u;
|
||||
for item in self.v {
|
||||
for vec::each(self.v) {|item|
|
||||
alt item { some(_) { sz += 1u; } _ {} }
|
||||
}
|
||||
sz
|
||||
|
@ -90,11 +90,11 @@ impl <V: copy> of map::map<uint, V> for smallintmap<V> {
|
|||
fn find(&&key: uint) -> option<V> { find(self, key) }
|
||||
fn rehash() { fail }
|
||||
fn items(it: fn(&&uint, V)) {
|
||||
let mut idx = 0u;
|
||||
for item in self.v {
|
||||
alt item {
|
||||
let mut idx = 0u, l = self.v.len();
|
||||
while idx < l {
|
||||
alt self.v[idx] {
|
||||
some(elt) {
|
||||
it(idx, elt);
|
||||
it(idx, copy elt);
|
||||
}
|
||||
none { }
|
||||
}
|
||||
|
@ -102,16 +102,14 @@ impl <V: copy> of map::map<uint, V> for smallintmap<V> {
|
|||
}
|
||||
}
|
||||
fn keys(it: fn(&&uint)) {
|
||||
let mut idx = 0u;
|
||||
for item in self.v {
|
||||
if item != none { it(idx); }
|
||||
let mut idx = 0u, l = self.v.len();
|
||||
while idx < l {
|
||||
if self.v[idx] != none { it(idx); }
|
||||
idx += 1u;
|
||||
}
|
||||
}
|
||||
fn values(it: fn(V)) {
|
||||
for item in self.v {
|
||||
alt item { some(elt) { it(elt); } _ {} }
|
||||
}
|
||||
self.items({|_i, v| it(v)});
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -303,7 +303,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: str,
|
|||
let mut cmh_items: [@ast::meta_item] = [];
|
||||
let linkage_metas = attr::find_linkage_metas(c.node.attrs);
|
||||
attr::require_unique_names(sess.diagnostic(), linkage_metas);
|
||||
for meta: @ast::meta_item in linkage_metas {
|
||||
for linkage_metas.each {|meta|
|
||||
if attr::get_meta_item_name(meta) == "name" {
|
||||
alt attr::get_meta_item_value_str(meta) {
|
||||
some(v) { name = some(v); }
|
||||
|
@ -334,7 +334,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: str,
|
|||
let cmh_items = attr::sort_meta_items(metas.cmh_items);
|
||||
|
||||
sha.reset();
|
||||
for m_: @ast::meta_item in cmh_items {
|
||||
for cmh_items.each {|m_|
|
||||
let m = m_;
|
||||
alt m.node {
|
||||
ast::meta_name_value(key, value) {
|
||||
|
@ -349,7 +349,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: str,
|
|||
}
|
||||
}
|
||||
|
||||
for dh in dep_hashes {
|
||||
for dep_hashes.each {|dh|
|
||||
sha.input_str(len_and_str(dh));
|
||||
}
|
||||
|
||||
|
@ -475,7 +475,7 @@ fn mangle(ss: path) -> str {
|
|||
|
||||
let mut n = "_ZN"; // Begin name-sequence.
|
||||
|
||||
for s in ss {
|
||||
for ss.each {|s|
|
||||
alt s { path_name(s) | path_mod(s) {
|
||||
let sani = sanitize(s);
|
||||
n += #fmt["%u%s", str::len(sani), sani];
|
||||
|
@ -583,7 +583,7 @@ fn link_binary(sess: session,
|
|||
} else { lib_cmd = "-shared"; }
|
||||
|
||||
let cstore = sess.cstore;
|
||||
for cratepath: str in cstore::get_used_crate_files(cstore) {
|
||||
for cstore::get_used_crate_files(cstore).each {|cratepath|
|
||||
if str::ends_with(cratepath, ".rlib") {
|
||||
cc_args += [cratepath];
|
||||
cont;
|
||||
|
@ -596,10 +596,10 @@ fn link_binary(sess: session,
|
|||
}
|
||||
|
||||
let ula = cstore::get_used_link_args(cstore);
|
||||
for arg: str in ula { cc_args += [arg]; }
|
||||
for ula.each {|arg| cc_args += [arg]; }
|
||||
|
||||
let used_libs = cstore::get_used_libraries(cstore);
|
||||
for l: str in used_libs { cc_args += ["-l" + l]; }
|
||||
for used_libs.each {|l| cc_args += ["-l" + l]; }
|
||||
|
||||
if sess.building_library {
|
||||
cc_args += [lib_cmd];
|
||||
|
|
|
@ -55,7 +55,7 @@ fn get_rpaths(os: session::os, cwd: path::path, sysroot: path::path,
|
|||
#debug("sysroot: %s", sysroot);
|
||||
#debug("output: %s", output);
|
||||
#debug("libs:");
|
||||
for libpath in libs {
|
||||
for libs.each {|libpath|
|
||||
#debug(" %s", libpath);
|
||||
}
|
||||
#debug("target_triple: %s", target_triple);
|
||||
|
@ -74,7 +74,7 @@ fn get_rpaths(os: session::os, cwd: path::path, sysroot: path::path,
|
|||
|
||||
fn log_rpaths(desc: str, rpaths: [str]) {
|
||||
#debug("%s rpaths:", desc);
|
||||
for rpath in rpaths {
|
||||
for rpaths.each {|rpath|
|
||||
#debug(" %s", rpath);
|
||||
}
|
||||
}
|
||||
|
@ -179,7 +179,7 @@ fn get_install_prefix_rpath(cwd: path::path, target_triple: str) -> str {
|
|||
fn minimize_rpaths(rpaths: [str]) -> [str] {
|
||||
let set = map::str_hash::<()>();
|
||||
let mut minimized = [];
|
||||
for rpath in rpaths {
|
||||
for rpaths.each {|rpath|
|
||||
if !set.contains_key(rpath) {
|
||||
minimized += [rpath];
|
||||
set.insert(rpath, ());
|
||||
|
|
|
@ -35,7 +35,7 @@ fn declare_upcalls(targ_cfg: @session::config,
|
|||
tys: [TypeRef], rv: TypeRef) ->
|
||||
ValueRef {
|
||||
let mut arg_tys: [TypeRef] = [];
|
||||
for t: TypeRef in tys { arg_tys += [t]; }
|
||||
for tys.each {|t| arg_tys += [t]; }
|
||||
let fn_ty = T_fn(arg_tys, rv);
|
||||
ret base::decl_cdecl_fn(llmod, prefix + name, fn_ty);
|
||||
}
|
||||
|
|
|
@ -67,7 +67,7 @@ fn parse_cfgspecs(cfgspecs: [str]) -> ast::crate_cfg {
|
|||
// FIXME: It would be nice to use the parser to parse all varieties of
|
||||
// meta_item here. At the moment we just support the meta_word variant.
|
||||
let mut words = [];
|
||||
for s: str in cfgspecs { words += [attr::mk_word_item(s)]; }
|
||||
for cfgspecs.each {|s| words += [attr::mk_word_item(s)]; }
|
||||
ret words;
|
||||
}
|
||||
|
||||
|
|
|
@ -118,7 +118,7 @@ fn metas_in_cfg(cfg: ast::crate_cfg, metas: [@ast::meta_item]) -> bool {
|
|||
let has_cfg_metas = vec::len(cfg_metas) > 0u;
|
||||
if !has_cfg_metas { ret true; }
|
||||
|
||||
for cfg_mi: @ast::meta_item in cfg_metas {
|
||||
for cfg_metas.each {|cfg_mi|
|
||||
if attr::contains(cfg, cfg_mi) { ret true; }
|
||||
}
|
||||
|
||||
|
|
|
@ -268,7 +268,7 @@ fn mk_test_desc_vec_ty(cx: test_ctxt) -> @ast::ty {
|
|||
fn mk_test_desc_vec(cx: test_ctxt) -> @ast::expr {
|
||||
#debug("building test vector from %u tests", vec::len(cx.testfns));
|
||||
let mut descs = [];
|
||||
for test: test in cx.testfns {
|
||||
for cx.testfns.each {|test|
|
||||
let test_ = test; // Satisfy alias analysis
|
||||
descs += [mk_test_desc_rec(cx, test_)];
|
||||
}
|
||||
|
|
|
@ -967,7 +967,7 @@ fn type_to_str_inner(names: type_names, outer0: [TypeRef], ty: TypeRef) ->
|
|||
fn tys_str(names: type_names, outer: [TypeRef], tys: [TypeRef]) -> str {
|
||||
let mut s: str = "";
|
||||
let mut first: bool = true;
|
||||
for t: TypeRef in tys {
|
||||
for tys.each {|t|
|
||||
if first { first = false; } else { s += ", "; }
|
||||
s += type_to_str_inner(names, outer, t);
|
||||
}
|
||||
|
@ -1019,7 +1019,7 @@ fn type_to_str_inner(names: type_names, outer0: [TypeRef], ty: TypeRef) ->
|
|||
}
|
||||
12 {
|
||||
let mut i: uint = 0u;
|
||||
for tout: TypeRef in outer0 {
|
||||
for outer0.each {|tout|
|
||||
i += 1u;
|
||||
if tout as int == ty as int {
|
||||
let n: uint = vec::len::<TypeRef>(outer0) - i;
|
||||
|
|
|
@ -142,7 +142,7 @@ fn visit_ids(item: ast::inlined_item, vfn: fn@(ast::node_id)) {
|
|||
vfn(i.id);
|
||||
alt i.node {
|
||||
ast::item_res(_, _, _, d_id, c_id) { vfn(d_id); vfn(c_id); }
|
||||
ast::item_enum(vs, _) { for v in vs { vfn(v.node.id); } }
|
||||
ast::item_enum(vs, _) { for vs.each {|v| vfn(v.node.id); } }
|
||||
_ {}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -111,7 +111,7 @@ fn hash_node_id(&&node_id: int) -> uint { ret 177573u ^ (node_id as uint); }
|
|||
|
||||
fn hash_path(&&s: str) -> uint {
|
||||
let mut h = 5381u;
|
||||
for ch: u8 in str::bytes(s) { h = (h << 5u) + h ^ (ch as uint); }
|
||||
for str::each(s) {|ch| h = (h << 5u) + h ^ (ch as uint); }
|
||||
ret h;
|
||||
}
|
||||
|
||||
|
|
|
@ -117,7 +117,7 @@ fn visit_item(e: env, i: @ast::item) {
|
|||
e.sess.span_fatal(i.span, "library '" + native_name +
|
||||
"' already added: can't specify link_args.");
|
||||
}
|
||||
for a: ast::attribute in link_args {
|
||||
for link_args.each {|a|
|
||||
alt attr::get_meta_item_value_str(attr::attr_meta(a)) {
|
||||
some(linkarg) {
|
||||
cstore::add_used_link_args(cstore, linkarg);
|
||||
|
@ -153,11 +153,11 @@ fn metadata_matches(extern_metas: [@ast::meta_item],
|
|||
vec::len(local_metas), vec::len(extern_metas));
|
||||
|
||||
#debug("crate metadata:");
|
||||
for have: @ast::meta_item in extern_metas {
|
||||
for extern_metas.each {|have|
|
||||
#debug(" %s", pprust::meta_item_to_str(*have));
|
||||
}
|
||||
|
||||
for needed: @ast::meta_item in local_metas {
|
||||
for local_metas.each {|needed|
|
||||
#debug("looking for %s", pprust::meta_item_to_str(*needed));
|
||||
if !attr::contains(extern_metas, needed) {
|
||||
#debug("missing %s", pprust::meta_item_to_str(*needed));
|
||||
|
@ -375,7 +375,7 @@ fn resolve_crate_deps(e: env, cdata: @[u8]) -> cstore::cnum_map {
|
|||
// The map from crate numbers in the crate we're resolving to local crate
|
||||
// numbers
|
||||
let cnum_map = int_hash::<ast::crate_num>();
|
||||
for dep: decoder::crate_dep in decoder::get_crate_deps(cdata) {
|
||||
for decoder::get_crate_deps(cdata).each {|dep|
|
||||
let extrn_cnum = dep.cnum;
|
||||
let cname = dep.ident;
|
||||
// FIXME: We really need to know the linkage metas of our transitive
|
||||
|
|
|
@ -41,7 +41,8 @@ fn lookup_defs(cstore: cstore::cstore, cnum: ast::crate_num,
|
|||
path: [ast::ident]) -> [ast::def] {
|
||||
let mut result = [];
|
||||
#debug("lookup_defs: path = %? cnum = %?", path, cnum);
|
||||
for (c, data, def) in resolve_path(cstore, cnum, path) {
|
||||
for resolve_path(cstore, cnum, path).each {|elt|
|
||||
let (c, data, def) = elt;
|
||||
result += [decoder::lookup_def(c, data, def)];
|
||||
}
|
||||
ret result;
|
||||
|
@ -64,7 +65,7 @@ fn resolve_path(cstore: cstore::cstore, cnum: ast::crate_num,
|
|||
#debug("resolve_path %s in crates[%d]:%s",
|
||||
str::connect(path, "::"), cnum, cm.name);
|
||||
let mut result = [];
|
||||
for def in decoder::resolve_path(path, cm.data) {
|
||||
for decoder::resolve_path(path, cm.data).each {|def|
|
||||
if def.crate == ast::local_crate {
|
||||
result += [(cnum, cm.data, def)];
|
||||
} else {
|
||||
|
|
|
@ -154,7 +154,7 @@ fn get_dep_hashes(cstore: cstore) -> [str] {
|
|||
}
|
||||
let sorted = std::sort::merge_sort(lteq, result);
|
||||
#debug("sorted:");
|
||||
for x in sorted {
|
||||
for sorted.each {|x|
|
||||
#debug(" hash[%s]: %s", x.name, x.hash);
|
||||
}
|
||||
fn mapper(ch: crate_hash) -> str { ret ch.hash; }
|
||||
|
|
|
@ -204,7 +204,7 @@ fn resolve_path(path: [ast::ident], data: @[u8]) -> [ast::def_id] {
|
|||
let eqer = bind eq_item(_, s);
|
||||
let mut result: [ast::def_id] = [];
|
||||
#debug("resolve_path: looking up %s", s);
|
||||
for doc: ebml::doc in lookup_hash(paths, eqer, hash_path(s)) {
|
||||
for lookup_hash(paths, eqer, hash_path(s)).each {|doc|
|
||||
let did_doc = ebml::get_doc(doc, tag_def_id);
|
||||
result += [parse_def_id(ebml::doc_data(did_doc))];
|
||||
}
|
||||
|
@ -359,7 +359,7 @@ fn get_enum_variants(cdata: cmd, id: ast::node_id, tcx: ty::ctxt)
|
|||
let mut infos: [ty::variant_info] = [];
|
||||
let variant_ids = enum_variant_ids(item, cdata);
|
||||
let mut disr_val = 0;
|
||||
for did: ast::def_id in variant_ids {
|
||||
for variant_ids.each {|did|
|
||||
let item = find_item(did.node, items);
|
||||
let ctor_ty = item_type({crate: cdata.cnum, node: id}, item,
|
||||
tcx, cdata);
|
||||
|
@ -367,7 +367,7 @@ fn get_enum_variants(cdata: cmd, id: ast::node_id, tcx: ty::ctxt)
|
|||
let mut arg_tys: [ty::t] = [];
|
||||
alt ty::get(ctor_ty).struct {
|
||||
ty::ty_fn(f) {
|
||||
for a: ty::arg in f.inputs { arg_tys += [a.ty]; }
|
||||
for f.inputs.each {|a| arg_tys += [a.ty]; }
|
||||
}
|
||||
_ { /* Nullary enum variant. */ }
|
||||
}
|
||||
|
@ -560,7 +560,7 @@ fn get_attributes(md: ebml::doc) -> [ast::attribute] {
|
|||
}
|
||||
|
||||
fn list_meta_items(meta_items: ebml::doc, out: io::writer) {
|
||||
for mi: @ast::meta_item in get_meta_items(meta_items) {
|
||||
for get_meta_items(meta_items).each {|mi|
|
||||
out.write_str(#fmt["%s\n", pprust::meta_item_to_str(*mi)]);
|
||||
}
|
||||
}
|
||||
|
@ -568,7 +568,7 @@ fn list_meta_items(meta_items: ebml::doc, out: io::writer) {
|
|||
fn list_crate_attributes(md: ebml::doc, hash: str, out: io::writer) {
|
||||
out.write_str(#fmt("=Crate Attributes (%s)=\n", hash));
|
||||
|
||||
for attr: ast::attribute in get_attributes(md) {
|
||||
for get_attributes(md).each {|attr|
|
||||
out.write_str(#fmt["%s\n", pprust::attribute_to_str(attr)]);
|
||||
}
|
||||
|
||||
|
@ -597,7 +597,7 @@ fn get_crate_deps(data: @[u8]) -> [crate_dep] {
|
|||
fn list_crate_deps(data: @[u8], out: io::writer) {
|
||||
out.write_str("=External Dependencies=\n");
|
||||
|
||||
for dep: crate_dep in get_crate_deps(data) {
|
||||
for get_crate_deps(data).each {|dep|
|
||||
out.write_str(#fmt["%d %s\n", dep.cnum, dep.ident]);
|
||||
}
|
||||
|
||||
|
|
|
@ -58,7 +58,7 @@ type entry<T> = {val: T, pos: uint};
|
|||
|
||||
fn encode_enum_variant_paths(ebml_w: ebml::writer, variants: [variant],
|
||||
path: [str], &index: [entry<str>]) {
|
||||
for variant: variant in variants {
|
||||
for variants.each {|variant|
|
||||
add_to_index(ebml_w, path, index, variant.node.name);
|
||||
ebml_w.wr_tag(tag_paths_data_item) {||
|
||||
encode_name(ebml_w, variant.node.name);
|
||||
|
@ -76,7 +76,7 @@ fn add_to_index(ebml_w: ebml::writer, path: [str], &index: [entry<str>],
|
|||
|
||||
fn encode_native_module_item_paths(ebml_w: ebml::writer, nmod: native_mod,
|
||||
path: [str], &index: [entry<str>]) {
|
||||
for nitem: @native_item in nmod.items {
|
||||
for nmod.items.each {|nitem|
|
||||
add_to_index(ebml_w, path, index, nitem.ident);
|
||||
encode_named_def_id(ebml_w, nitem.ident, local_def(nitem.id));
|
||||
}
|
||||
|
@ -84,7 +84,7 @@ fn encode_native_module_item_paths(ebml_w: ebml::writer, nmod: native_mod,
|
|||
|
||||
fn encode_class_item_paths(ebml_w: ebml::writer,
|
||||
items: [@class_member], path: [str], &index: [entry<str>]) {
|
||||
for it in items {
|
||||
for items.each {|it|
|
||||
alt ast_util::class_member_privacy(it) {
|
||||
priv { cont; }
|
||||
pub {
|
||||
|
@ -102,7 +102,7 @@ fn encode_class_item_paths(ebml_w: ebml::writer,
|
|||
fn encode_module_item_paths(ebml_w: ebml::writer, ecx: @encode_ctxt,
|
||||
module: _mod, path: [str], &index: [entry<str>]) {
|
||||
// FIXME factor out add_to_index/start/encode_name/encode_def_id/end ops
|
||||
for it: @item in module.items {
|
||||
for module.items.each {|it|
|
||||
if !ecx.ccx.reachable.contains_key(it.id) ||
|
||||
!ast_util::is_exported(it.ident, module) { cont; }
|
||||
alt it.node {
|
||||
|
@ -200,7 +200,7 @@ fn encode_reexport_paths(ebml_w: ebml::writer,
|
|||
ecx: @encode_ctxt, &index: [entry<str>]) {
|
||||
let tcx = ecx.ccx.tcx;
|
||||
ecx.ccx.exp_map.items {|exp_id, defs|
|
||||
for def in defs {
|
||||
for defs.each {|def|
|
||||
if !def.reexp { cont; }
|
||||
let path = alt check tcx.items.get(exp_id) {
|
||||
ast_map::node_export(_, path) { ast_map::path_to_str(*path) }
|
||||
|
@ -230,7 +230,7 @@ fn encode_type_param_bounds(ebml_w: ebml::writer, ecx: @encode_ctxt,
|
|||
tcx: ecx.ccx.tcx,
|
||||
reachable: ecx.ccx.reachable,
|
||||
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)};
|
||||
for param in params {
|
||||
for params.each {|param|
|
||||
ebml_w.start_tag(tag_items_data_item_ty_param_bounds);
|
||||
let bs = ecx.ccx.tcx.ty_param_bounds.get(param.id);
|
||||
tyencode::enc_bounds(ebml_w.writer, ty_str_ctxt, bs);
|
||||
|
@ -295,7 +295,7 @@ fn encode_enum_variant_info(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
|||
let mut disr_val = 0;
|
||||
let mut i = 0;
|
||||
let vi = ty::enum_variants(ecx.ccx.tcx, {crate: local_crate, node: id});
|
||||
for variant: variant in variants {
|
||||
for variants.each {|variant|
|
||||
*index += [{val: variant.node.id, pos: ebml_w.writer.tell()}];
|
||||
ebml_w.start_tag(tag_items_data_item);
|
||||
encode_def_id(ebml_w, local_def(variant.node.id));
|
||||
|
@ -347,7 +347,7 @@ fn encode_info_for_mod(ecx: @encode_ctxt, ebml_w: ebml::writer, md: _mod,
|
|||
encode_name(ebml_w, name);
|
||||
alt ecx.ccx.maps.impl_map.get(id) {
|
||||
list::cons(impls, @list::nil) {
|
||||
for i in *impls {
|
||||
for vec::each(*impls) {|i|
|
||||
if ast_util::is_exported(i.ident, md) {
|
||||
ebml_w.wr_tagged_str(tag_mod_impl, def_to_str(i.did));
|
||||
}
|
||||
|
@ -373,7 +373,7 @@ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
|||
-> [entry<int>] {
|
||||
let index = @mut [];
|
||||
let tcx = ecx.ccx.tcx;
|
||||
for ci in items {
|
||||
for items.each {|ci|
|
||||
/* We encode both private and public fields -- need to include
|
||||
private fields to get the offsets right */
|
||||
alt ci.node {
|
||||
|
@ -409,7 +409,7 @@ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
*index
|
||||
}
|
||||
|
||||
|
@ -548,7 +548,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
|
|||
encode_type_param_bounds(ebml_w, ecx, tps);
|
||||
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
|
||||
encode_name(ebml_w, item.ident);
|
||||
for v: variant in variants {
|
||||
for variants.each {|v|
|
||||
encode_variant_id(ebml_w, local_def(v.node.id));
|
||||
}
|
||||
astencode::encode_inlined_item(ecx, ebml_w, path, ii_item(item));
|
||||
|
@ -579,14 +579,14 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
|
|||
for methods, write all the stuff get_iface_method
|
||||
needs to know*/
|
||||
let (fs,ms) = ast_util::split_class_items(items);
|
||||
for f in fs {
|
||||
for fs.each {|f|
|
||||
ebml_w.start_tag(tag_item_field);
|
||||
encode_privacy(ebml_w, f.privacy);
|
||||
encode_name(ebml_w, f.ident);
|
||||
encode_def_id(ebml_w, local_def(f.id));
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
for m in ms {
|
||||
for ms.each {|m|
|
||||
alt m.privacy {
|
||||
priv { /* do nothing */ }
|
||||
pub {
|
||||
|
@ -637,7 +637,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
|
|||
encode_type_param_bounds(ebml_w, ecx, tps);
|
||||
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
|
||||
encode_name(ebml_w, item.ident);
|
||||
for m in methods {
|
||||
for methods.each {|m|
|
||||
ebml_w.start_tag(tag_item_method);
|
||||
ebml_w.writer.write(str::bytes(def_to_str(local_def(m.id))));
|
||||
ebml_w.end_tag();
|
||||
|
@ -657,7 +657,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
|
|||
ebml_w.end_tag();
|
||||
|
||||
let impl_path = path + [ast_map::path_name(item.ident)];
|
||||
for m in methods {
|
||||
for methods.each {|m|
|
||||
*index += [{val: m.id, pos: ebml_w.writer.tell()}];
|
||||
encode_info_for_method(ecx, ebml_w, impl_path,
|
||||
should_inline(m.attrs), item.id, m, tps + m.tps);
|
||||
|
@ -672,7 +672,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
|
|||
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
|
||||
encode_name(ebml_w, item.ident);
|
||||
let mut i = 0u;
|
||||
for mty in *ty::iface_methods(tcx, local_def(item.id)) {
|
||||
for vec::each(*ty::iface_methods(tcx, local_def(item.id))) {|mty|
|
||||
ebml_w.start_tag(tag_item_method);
|
||||
encode_name(ebml_w, mty.ident);
|
||||
encode_type_param_bounds(ebml_w, ecx, ms[i].tps);
|
||||
|
@ -765,13 +765,13 @@ fn create_index<T: copy>(index: [entry<T>], hash_fn: fn@(T) -> uint) ->
|
|||
[@[entry<T>]] {
|
||||
let mut buckets: [@mut [entry<T>]] = [];
|
||||
uint::range(0u, 256u) {|_i| buckets += [@mut []]; };
|
||||
for elt: entry<T> in index {
|
||||
for index.each {|elt|
|
||||
let h = hash_fn(elt.val);
|
||||
*buckets[h % 256u] += [elt];
|
||||
}
|
||||
|
||||
let mut buckets_frozen = [];
|
||||
for bucket: @mut [entry<T>] in buckets {
|
||||
for buckets.each {|bucket|
|
||||
buckets_frozen += [@*bucket];
|
||||
}
|
||||
ret buckets_frozen;
|
||||
|
@ -783,10 +783,10 @@ fn encode_index<T>(ebml_w: ebml::writer, buckets: [@[entry<T>]],
|
|||
ebml_w.start_tag(tag_index);
|
||||
let mut bucket_locs: [uint] = [];
|
||||
ebml_w.start_tag(tag_index_buckets);
|
||||
for bucket: @[entry<T>] in buckets {
|
||||
for buckets.each {|bucket|
|
||||
bucket_locs += [ebml_w.writer.tell()];
|
||||
ebml_w.start_tag(tag_index_buckets_bucket);
|
||||
for elt: entry<T> in *bucket {
|
||||
for vec::each(*bucket) {|elt|
|
||||
ebml_w.start_tag(tag_index_buckets_bucket_elt);
|
||||
writer.write_be_uint(elt.pos, 4u);
|
||||
write_fn(writer, elt.val);
|
||||
|
@ -796,7 +796,7 @@ fn encode_index<T>(ebml_w: ebml::writer, buckets: [@[entry<T>]],
|
|||
}
|
||||
ebml_w.end_tag();
|
||||
ebml_w.start_tag(tag_index_table);
|
||||
for pos: uint in bucket_locs { writer.write_be_uint(pos, 4u); }
|
||||
for bucket_locs.each {|pos| writer.write_be_uint(pos, 4u); }
|
||||
ebml_w.end_tag();
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
@ -836,7 +836,7 @@ fn encode_meta_item(ebml_w: ebml::writer, mi: meta_item) {
|
|||
ebml_w.start_tag(tag_meta_item_name);
|
||||
ebml_w.writer.write(str::bytes(name));
|
||||
ebml_w.end_tag();
|
||||
for inner_item: @meta_item in items {
|
||||
for items.each {|inner_item|
|
||||
encode_meta_item(ebml_w, *inner_item);
|
||||
}
|
||||
ebml_w.end_tag();
|
||||
|
@ -846,7 +846,7 @@ fn encode_meta_item(ebml_w: ebml::writer, mi: meta_item) {
|
|||
|
||||
fn encode_attributes(ebml_w: ebml::writer, attrs: [attribute]) {
|
||||
ebml_w.start_tag(tag_attributes);
|
||||
for attr: attribute in attrs {
|
||||
for attrs.each {|attr|
|
||||
ebml_w.start_tag(tag_attribute);
|
||||
encode_meta_item(ebml_w, attr.node.value);
|
||||
ebml_w.end_tag();
|
||||
|
@ -885,7 +885,7 @@ fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: @crate) -> [attribute] {
|
|||
|
||||
let mut attrs: [attribute] = [];
|
||||
let mut found_link_attr = false;
|
||||
for attr: attribute in crate.node.attrs {
|
||||
for crate.node.attrs.each {|attr|
|
||||
attrs +=
|
||||
if attr::get_attr_name(attr) != "link" {
|
||||
[attr]
|
||||
|
@ -923,7 +923,7 @@ fn encode_crate_deps(ebml_w: ebml::writer, cstore: cstore::cstore) {
|
|||
|
||||
// Sanity-check the crate numbers
|
||||
let mut expected_cnum = 1;
|
||||
for n: numname in pairs {
|
||||
for pairs.each {|n|
|
||||
assert (n.crate == expected_cnum);
|
||||
expected_cnum += 1;
|
||||
}
|
||||
|
@ -940,7 +940,7 @@ fn encode_crate_deps(ebml_w: ebml::writer, cstore: cstore::cstore) {
|
|||
// FIXME: This is not nearly enough to support correct versioning
|
||||
// but is enough to get transitive crate dependencies working.
|
||||
ebml_w.start_tag(tag_crate_deps);
|
||||
for cname: str in get_ordered_names(cstore) {
|
||||
for get_ordered_names(cstore).each {|cname|
|
||||
ebml_w.start_tag(tag_crate_dep);
|
||||
ebml_w.writer.write(str::bytes(cname));
|
||||
ebml_w.end_tag();
|
||||
|
|
|
@ -392,25 +392,20 @@ fn parse_def_id(buf: [u8]) -> ast::def_id {
|
|||
#error("didn't find ':' when parsing def id");
|
||||
fail;
|
||||
}
|
||||
let crate_part = vec::slice::<u8>(buf, 0u, colon_idx);
|
||||
let def_part = vec::slice::<u8>(buf, colon_idx + 1u, len);
|
||||
let crate_part = vec::slice(buf, 0u, colon_idx);
|
||||
let def_part = vec::slice(buf, colon_idx + 1u, len);
|
||||
|
||||
let mut crate_part_vec = [];
|
||||
let mut def_part_vec = [];
|
||||
for b: u8 in crate_part { crate_part_vec += [b]; }
|
||||
for b: u8 in def_part { def_part_vec += [b]; }
|
||||
|
||||
let crate_num = alt uint::parse_buf(crate_part_vec, 10u) {
|
||||
let crate_num = alt uint::parse_buf(crate_part, 10u) {
|
||||
some(cn) { cn as int }
|
||||
none { fail (#fmt("internal error: parse_def_id: error parsing %? \
|
||||
as crate",
|
||||
crate_part_vec)); }
|
||||
crate_part)); }
|
||||
};
|
||||
let def_num = alt uint::parse_buf(def_part_vec, 10u) {
|
||||
let def_num = alt uint::parse_buf(def_part, 10u) {
|
||||
some(dn) { dn as int }
|
||||
none { fail (#fmt("internal error: parse_def_id: error parsing %? \
|
||||
as id",
|
||||
def_part_vec)); }
|
||||
def_part)); }
|
||||
};
|
||||
ret {crate: crate_num, node: def_num};
|
||||
}
|
||||
|
|
|
@ -181,19 +181,19 @@ fn enc_sty(w: io::writer, cx: @ctxt, st: ty::sty) {
|
|||
w.write_str("t[");
|
||||
w.write_str(cx.ds(def));
|
||||
w.write_char('|');
|
||||
for t: ty::t in tys { enc_ty(w, cx, t); }
|
||||
for tys.each {|t| enc_ty(w, cx, t); }
|
||||
w.write_char(']');
|
||||
}
|
||||
ty::ty_iface(def, tys) {
|
||||
w.write_str("x[");
|
||||
w.write_str(cx.ds(def));
|
||||
w.write_char('|');
|
||||
for t: ty::t in tys { enc_ty(w, cx, t); }
|
||||
for tys.each {|t| enc_ty(w, cx, t); }
|
||||
w.write_char(']');
|
||||
}
|
||||
ty::ty_tup(ts) {
|
||||
w.write_str("T[");
|
||||
for t in ts { enc_ty(w, cx, t); }
|
||||
for ts.each {|t| enc_ty(w, cx, t); }
|
||||
w.write_char(']');
|
||||
}
|
||||
ty::ty_box(mt) { w.write_char('@'); enc_mt(w, cx, mt); }
|
||||
|
@ -207,7 +207,7 @@ fn enc_sty(w: io::writer, cx: @ctxt, st: ty::sty) {
|
|||
ty::ty_vec(mt) { w.write_char('I'); enc_mt(w, cx, mt); }
|
||||
ty::ty_rec(fields) {
|
||||
w.write_str("R[");
|
||||
for field: ty::field in fields {
|
||||
for fields.each {|field|
|
||||
w.write_str(field.ident);
|
||||
w.write_char('=');
|
||||
enc_mt(w, cx, field.mt);
|
||||
|
@ -223,7 +223,7 @@ fn enc_sty(w: io::writer, cx: @ctxt, st: ty::sty) {
|
|||
w.write_str(cx.ds(def));
|
||||
w.write_char('|');
|
||||
enc_ty(w, cx, ty);
|
||||
for t: ty::t in tps { enc_ty(w, cx, t); }
|
||||
for tps.each {|t| enc_ty(w, cx, t); }
|
||||
w.write_char(']');
|
||||
}
|
||||
ty::ty_var(id) {
|
||||
|
@ -238,7 +238,7 @@ fn enc_sty(w: io::writer, cx: @ctxt, st: ty::sty) {
|
|||
}
|
||||
ty::ty_self(tps) {
|
||||
w.write_str("s[");
|
||||
for t in tps { enc_ty(w, cx, t); }
|
||||
for tps.each {|t| enc_ty(w, cx, t); }
|
||||
w.write_char(']');
|
||||
}
|
||||
ty::ty_type { w.write_char('Y'); }
|
||||
|
@ -248,7 +248,7 @@ fn enc_sty(w: io::writer, cx: @ctxt, st: ty::sty) {
|
|||
ty::ty_constr(ty, cs) {
|
||||
w.write_str("A[");
|
||||
enc_ty(w, cx, ty);
|
||||
for tc: @ty::type_constr in cs { enc_ty_constr(w, cx, tc); }
|
||||
for cs.each {|tc| enc_ty_constr(w, cx, tc); }
|
||||
w.write_char(']');
|
||||
}
|
||||
ty::ty_opaque_box { w.write_char('B'); }
|
||||
|
@ -260,7 +260,7 @@ fn enc_sty(w: io::writer, cx: @ctxt, st: ty::sty) {
|
|||
w.write_str(s);
|
||||
#debug("~~~~ %s", "|");
|
||||
w.write_str("|");
|
||||
for t: ty::t in tys { enc_ty(w, cx, t); }
|
||||
for tys.each {|t| enc_ty(w, cx, t); }
|
||||
#debug("~~~~ %s", "]");
|
||||
w.write_char(']');
|
||||
}
|
||||
|
@ -288,13 +288,13 @@ fn enc_mode(w: io::writer, cx: @ctxt, m: mode) {
|
|||
|
||||
fn enc_ty_fn(w: io::writer, cx: @ctxt, ft: ty::fn_ty) {
|
||||
w.write_char('[');
|
||||
for arg: ty::arg in ft.inputs {
|
||||
for ft.inputs.each {|arg|
|
||||
enc_mode(w, cx, arg.mode);
|
||||
enc_ty(w, cx, arg.ty);
|
||||
}
|
||||
w.write_char(']');
|
||||
let mut colon = true;
|
||||
for c: @ty::constr in ft.constraints {
|
||||
for ft.constraints.each {|c|
|
||||
if colon {
|
||||
w.write_char(':');
|
||||
colon = false;
|
||||
|
@ -314,7 +314,7 @@ fn enc_constr(w: io::writer, cx: @ctxt, c: @ty::constr) {
|
|||
w.write_str(cx.ds(c.node.id));
|
||||
w.write_char('|');
|
||||
let mut semi = false;
|
||||
for a: @constr_arg in c.node.args {
|
||||
for c.node.args.each {|a|
|
||||
if semi { w.write_char(';'); } else { semi = true; }
|
||||
alt a.node {
|
||||
carg_base { w.write_char('*'); }
|
||||
|
@ -331,7 +331,7 @@ fn enc_ty_constr(w: io::writer, cx: @ctxt, c: @ty::type_constr) {
|
|||
w.write_str(cx.ds(c.node.id));
|
||||
w.write_char('|');
|
||||
let mut semi = false;
|
||||
for a: @ty::ty_constr_arg in c.node.args {
|
||||
for c.node.args.each {|a|
|
||||
if semi { w.write_char(';'); } else { semi = true; }
|
||||
alt a.node {
|
||||
carg_base { w.write_char('*'); }
|
||||
|
@ -343,7 +343,7 @@ fn enc_ty_constr(w: io::writer, cx: @ctxt, c: @ty::type_constr) {
|
|||
}
|
||||
|
||||
fn enc_bounds(w: io::writer, cx: @ctxt, bs: @[ty::param_bound]) {
|
||||
for bound in *bs {
|
||||
for vec::each(*bs) {|bound|
|
||||
alt bound {
|
||||
ty::bound_send { w.write_char('S'); }
|
||||
ty::bound_copy { w.write_char('C'); }
|
||||
|
|
|
@ -136,13 +136,13 @@ fn visit_expr(cx: @ctx, ex: @ast::expr, sc: scope, v: vt<scope>) {
|
|||
|
||||
fn visit_block(cx: @ctx, b: ast::blk, sc: scope, v: vt<scope>) {
|
||||
let sc = sc;
|
||||
for stmt in b.node.stmts {
|
||||
for b.node.stmts.each {|stmt|
|
||||
alt stmt.node {
|
||||
ast::stmt_decl(@{node: ast::decl_item(it), _}, _) {
|
||||
v.visit_item(it, sc, v);
|
||||
}
|
||||
ast::stmt_decl(@{node: ast::decl_local(locs), _}, _) {
|
||||
for loc in locs {
|
||||
for locs.each {|loc|
|
||||
alt loc.node.init {
|
||||
some(init) {
|
||||
if init.op == ast::init_move {
|
||||
|
@ -245,11 +245,11 @@ fn check_call(cx: @ctx, sc: scope, f: @ast::expr, args: [@ast::expr],
|
|||
};
|
||||
if f_may_close {
|
||||
let mut i = 0u;
|
||||
for b in bindings {
|
||||
for bindings.each {|b|
|
||||
let mut unsfe = vec::len(b.unsafe_tys) > 0u;
|
||||
alt b.root_var {
|
||||
some(rid) {
|
||||
for o in sc.bs {
|
||||
for sc.bs.each {|o|
|
||||
if o.node_id == rid && vec::len(o.unsafe_tys) > 0u {
|
||||
unsfe = true; break;
|
||||
}
|
||||
|
@ -265,8 +265,8 @@ fn check_call(cx: @ctx, sc: scope, f: @ast::expr, args: [@ast::expr],
|
|||
}
|
||||
}
|
||||
let mut j = 0u;
|
||||
for b in bindings {
|
||||
for unsafe_ty in b.unsafe_tys {
|
||||
for bindings.each {|b|
|
||||
for b.unsafe_tys.each {|unsafe_ty|
|
||||
vec::iteri(arg_ts) {|i, arg_t|
|
||||
let mut_alias =
|
||||
(ast::by_mutbl_ref == ty::arg_mode(cx.tcx, arg_t));
|
||||
|
@ -288,13 +288,13 @@ fn check_call(cx: @ctx, sc: scope, f: @ast::expr, args: [@ast::expr],
|
|||
}
|
||||
|
||||
// Ensure we're not passing a root by mut alias.
|
||||
for {node: node, arg: arg} in mut_roots {
|
||||
for b in bindings {
|
||||
if b.node_id != arg.id {
|
||||
for mut_roots.each {|mroot|
|
||||
for bindings.each {|b|
|
||||
if b.node_id != mroot.arg.id {
|
||||
alt b.root_var {
|
||||
some(root) {
|
||||
if node == root && cant_copy(*cx, b) {
|
||||
err(*cx, arg.span,
|
||||
if mroot.node == root && cant_copy(*cx, b) {
|
||||
err(*cx, mroot.arg.span,
|
||||
"passing a mut reference to a \
|
||||
variable that roots another reference");
|
||||
break;
|
||||
|
@ -308,14 +308,14 @@ fn check_call(cx: @ctx, sc: scope, f: @ast::expr, args: [@ast::expr],
|
|||
// Check the bodies of block arguments against the current scope
|
||||
if blocks.len() > 0u {
|
||||
let inner_sc = {bs: bindings + sc.bs, invalid: sc.invalid};
|
||||
for blk in blocks {
|
||||
for blocks.each {|blk|
|
||||
alt check blk.node {
|
||||
ast::expr_fn_block(_, body) {
|
||||
v.visit_block(body, inner_sc, v);
|
||||
}
|
||||
}
|
||||
}
|
||||
for binding in bindings {
|
||||
for bindings.each {|binding|
|
||||
test_scope(*cx, sc, binding, none);
|
||||
}
|
||||
}
|
||||
|
@ -327,7 +327,7 @@ fn check_alt(cx: ctx, input: @ast::expr, arms: [ast::arm], sc: scope,
|
|||
let orig_invalid = *sc.invalid;
|
||||
let mut all_invalid = orig_invalid;
|
||||
let root = expr_root(cx, input, true);
|
||||
for a: ast::arm in arms {
|
||||
for arms.each {|a|
|
||||
let mut new_bs = sc.bs;
|
||||
let root_var = path_def_id(cx, root.ex);
|
||||
let pat_id_map = pat_util::pat_id_map(cx.tcx.def_map, a.pats[0]);
|
||||
|
@ -336,8 +336,8 @@ fn check_alt(cx: ctx, input: @ast::expr, arms: [ast::arm], sc: scope,
|
|||
mut unsafe_tys: [unsafe_ty],
|
||||
span: span};
|
||||
let mut binding_info: [info] = [];
|
||||
for pat in a.pats {
|
||||
for proot in pattern_roots(cx.tcx, root.mutbl, pat) {
|
||||
for a.pats.each {|pat|
|
||||
for pattern_roots(cx.tcx, root.mutbl, pat).each {|proot|
|
||||
let canon_id = pat_id_map.get(proot.name);
|
||||
alt vec::find(binding_info, {|x| x.id == canon_id}) {
|
||||
some(s) { s.unsafe_tys += unsafe_set(proot.mutbl); }
|
||||
|
@ -350,14 +350,14 @@ fn check_alt(cx: ctx, input: @ast::expr, arms: [ast::arm], sc: scope,
|
|||
}
|
||||
}
|
||||
}
|
||||
for info in binding_info {
|
||||
for binding_info.each {|info|
|
||||
new_bs += [mk_binding(cx, info.id, info.span, root_var,
|
||||
copy info.unsafe_tys)];
|
||||
}
|
||||
};
|
||||
*sc.invalid = orig_invalid;
|
||||
visit::visit_arm(a, {bs: new_bs with sc}, v);
|
||||
all_invalid = join_invalid(all_invalid, *sc.invalid);
|
||||
}
|
||||
};
|
||||
*sc.invalid = all_invalid;
|
||||
}
|
||||
|
||||
|
@ -378,7 +378,7 @@ fn check_for(cx: ctx, local: @ast::local, seq: @ast::expr, blk: ast::blk,
|
|||
}
|
||||
let root_var = path_def_id(cx, root.ex);
|
||||
let mut new_bs = sc.bs;
|
||||
for proot in pattern_roots(cx.tcx, cur_mutbl, local.node.pat) {
|
||||
for pattern_roots(cx.tcx, cur_mutbl, local.node.pat).each {|proot|
|
||||
new_bs += [mk_binding(cx, proot.id, proot.span, root_var,
|
||||
unsafe_set(proot.mutbl))];
|
||||
}
|
||||
|
@ -392,10 +392,10 @@ fn check_var(cx: ctx, ex: @ast::expr, p: @ast::path, id: ast::node_id,
|
|||
let my_defnum = ast_util::def_id_of_def(def).node;
|
||||
let my_local_id = local_id_of_node(cx, my_defnum);
|
||||
let var_t = ty::expr_ty(cx.tcx, ex);
|
||||
for b in sc.bs {
|
||||
for sc.bs.each {|b|
|
||||
// excludes variables introduced since the alias was made
|
||||
if my_local_id < b.local_id {
|
||||
for unsafe_ty in b.unsafe_tys {
|
||||
for b.unsafe_tys.each {|unsafe_ty|
|
||||
if ty_can_unsafely_include(cx, unsafe_ty, var_t, assign) {
|
||||
let inv = @{reason: val_taken, node_id: b.node_id,
|
||||
sp: ex.span, path: p};
|
||||
|
@ -413,7 +413,7 @@ fn check_lval(cx: @ctx, dest: @ast::expr, sc: scope, v: vt<scope>) {
|
|||
ast::expr_path(p) {
|
||||
let def = cx.tcx.def_map.get(dest.id);
|
||||
let dnum = ast_util::def_id_of_def(def).node;
|
||||
for b in sc.bs {
|
||||
for sc.bs.each {|b|
|
||||
if b.root_var == some(dnum) {
|
||||
let inv = @{reason: overwritten, node_id: b.node_id,
|
||||
sp: dest.span, path: p};
|
||||
|
@ -454,7 +454,7 @@ fn test_scope(cx: ctx, sc: scope, b: binding, p: option<@ast::path>) {
|
|||
let mut prob = find_invalid(b.node_id, *sc.invalid);
|
||||
alt b.root_var {
|
||||
some(dn) {
|
||||
for other in sc.bs {
|
||||
for sc.bs.each {|other|
|
||||
if !is_none(prob) { break; }
|
||||
if other.node_id == dn {
|
||||
prob = find_invalid(other.node_id, *sc.invalid);
|
||||
|
@ -507,7 +507,7 @@ fn ty_can_unsafely_include(cx: ctx, needle: unsafe_ty, haystack: ty::t,
|
|||
} { ret true; }
|
||||
alt ty::get(haystack).struct {
|
||||
ty::ty_enum(_, ts) {
|
||||
for t: ty::t in ts {
|
||||
for ts.each {|t|
|
||||
if helper(tcx, needle, t, mutbl) { ret true; }
|
||||
}
|
||||
ret false;
|
||||
|
@ -516,7 +516,7 @@ fn ty_can_unsafely_include(cx: ctx, needle: unsafe_ty, haystack: ty::t,
|
|||
ret helper(tcx, needle, mt.ty, get_mutbl(mutbl, mt));
|
||||
}
|
||||
ty::ty_rec(fields) {
|
||||
for f: ty::field in fields {
|
||||
for fields.each {|f|
|
||||
if helper(tcx, needle, f.mt.ty, get_mutbl(mutbl, f.mt)) {
|
||||
ret true;
|
||||
}
|
||||
|
@ -524,7 +524,7 @@ fn ty_can_unsafely_include(cx: ctx, needle: unsafe_ty, haystack: ty::t,
|
|||
ret false;
|
||||
}
|
||||
ty::ty_tup(ts) {
|
||||
for t in ts { if helper(tcx, needle, t, mutbl) { ret true; } }
|
||||
for ts.each {|t| if helper(tcx, needle, t, mutbl) { ret true; } }
|
||||
ret false;
|
||||
}
|
||||
ty::ty_fn({proto: ast::proto_bare, _}) { ret false; }
|
||||
|
@ -571,12 +571,12 @@ fn copy_is_expensive(tcx: ty::ctxt, ty: ty::t) -> bool {
|
|||
ty::ty_uniq(mt) { 1u + score_ty(tcx, mt.ty) }
|
||||
ty::ty_enum(_, ts) | ty::ty_tup(ts) {
|
||||
let mut sum = 0u;
|
||||
for t in ts { sum += score_ty(tcx, t); }
|
||||
for ts.each {|t| sum += score_ty(tcx, t); }
|
||||
sum
|
||||
}
|
||||
ty::ty_rec(fs) {
|
||||
let mut sum = 0u;
|
||||
for f in fs { sum += score_ty(tcx, f.mt.ty); }
|
||||
for fs.each {|f| sum += score_ty(tcx, f.mt.ty); }
|
||||
sum
|
||||
}
|
||||
_ {
|
||||
|
@ -608,11 +608,11 @@ fn pattern_roots(tcx: ty::ctxt, mutbl: option<unsafe_ty>, pat: @ast::pat)
|
|||
ast::pat_wild | ast::pat_lit(_) | ast::pat_range(_, _) |
|
||||
ast::pat_ident(_, _) {}
|
||||
ast::pat_enum(_, ps) | ast::pat_tup(ps) {
|
||||
for p in ps { walk(tcx, mutbl, p, set); }
|
||||
for ps.each {|p| walk(tcx, mutbl, p, set); }
|
||||
}
|
||||
ast::pat_rec(fs, _) {
|
||||
let ty = ty::node_id_to_type(tcx, pat.id);
|
||||
for f in fs {
|
||||
for fs.each {|f|
|
||||
let m = ty::get_field(ty, f.ident).mt.mutbl != ast::m_imm,
|
||||
c = if m { some(contains(ty)) } else { mutbl };
|
||||
walk(tcx, c, f.pat, set);
|
||||
|
@ -649,7 +649,7 @@ fn expr_root(cx: ctx, ex: @ast::expr, autoderef: bool)
|
|||
-> {ex: @ast::expr, mutbl: option<unsafe_ty>} {
|
||||
let base_root = mutbl::expr_root_(cx.tcx, none, ex, autoderef);
|
||||
let mut unsafe_ty = none;
|
||||
for d in *base_root.ds {
|
||||
for vec::each(*base_root.ds) {|d|
|
||||
if d.mutbl { unsafe_ty = some(contains(d.outer_t)); break; }
|
||||
}
|
||||
ret {ex: base_root.ex, mutbl: unsafe_ty};
|
||||
|
|
|
@ -115,7 +115,7 @@ fn map_decoded_item(sess: session, map: map, path: path, ii: inlined_item) {
|
|||
|
||||
fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
|
||||
sp: codemap::span, id: node_id, cx: ctx, v: vt) {
|
||||
for a in decl.inputs {
|
||||
for decl.inputs.each {|a|
|
||||
cx.map.insert(a.id, node_arg(a, cx.local_id));
|
||||
cx.local_id += 1u;
|
||||
}
|
||||
|
@ -162,7 +162,7 @@ fn map_item(i: @item, cx: ctx, v: vt) {
|
|||
alt i.node {
|
||||
item_impl(_, _, _, ms) {
|
||||
let impl_did = ast_util::local_def(i.id);
|
||||
for m in ms {
|
||||
for ms.each {|m|
|
||||
map_method(impl_did, extend(cx, i.ident), m, cx);
|
||||
}
|
||||
}
|
||||
|
@ -171,7 +171,7 @@ fn map_item(i: @item, cx: ctx, v: vt) {
|
|||
cx.map.insert(dtor_id, node_item(i, item_path));
|
||||
}
|
||||
item_enum(vs, _) {
|
||||
for v in vs {
|
||||
for vs.each {|v|
|
||||
cx.map.insert(v.node.id, node_variant(
|
||||
v, i, extend(cx, i.ident)));
|
||||
}
|
||||
|
@ -181,7 +181,7 @@ fn map_item(i: @item, cx: ctx, v: vt) {
|
|||
either::left(msg) { cx.sess.span_fatal(i.span, msg); }
|
||||
either::right(abi) { abi }
|
||||
};
|
||||
for nitem in nm.items {
|
||||
for nm.items.each {|nitem|
|
||||
cx.map.insert(nitem.id, node_native_item(nitem, abi, @cx.path));
|
||||
}
|
||||
}
|
||||
|
@ -189,7 +189,7 @@ fn map_item(i: @item, cx: ctx, v: vt) {
|
|||
cx.map.insert(ctor.node.id, node_ctor(i, item_path));
|
||||
let d_id = ast_util::local_def(i.id);
|
||||
let p = extend(cx, i.ident);
|
||||
for ci in items {
|
||||
for items.each {|ci|
|
||||
// only need to handle methods
|
||||
alt ci.node {
|
||||
class_method(m) { map_method(d_id, p, m, cx); }
|
||||
|
@ -212,7 +212,7 @@ fn map_item(i: @item, cx: ctx, v: vt) {
|
|||
fn map_view_item(vi: @view_item, cx: ctx, _v: vt) {
|
||||
alt vi.node {
|
||||
view_item_export(vps) {
|
||||
for vp in vps {
|
||||
for vps.each {|vp|
|
||||
let (id, name) = alt vp.node {
|
||||
view_path_simple(nm, _, id) { (id, nm) }
|
||||
view_path_glob(pth, id) | view_path_list(pth, _, id) {
|
||||
|
|
|
@ -27,7 +27,7 @@ fn visit_expr(ex: @expr, cx: ctx, v: visit::vt<ctx>) {
|
|||
cx.allow_block = true;
|
||||
v.visit_expr(f, cx, v);
|
||||
let mut i = 0u;
|
||||
for arg_t in ty::ty_fn_args(ty::expr_ty(cx.tcx, f)) {
|
||||
for ty::ty_fn_args(ty::expr_ty(cx.tcx, f)).each {|arg_t|
|
||||
cx.allow_block = (ty::arg_mode(cx.tcx, arg_t) == by_ref);
|
||||
v.visit_expr(args[i], cx, v);
|
||||
i += 1u;
|
||||
|
|
|
@ -37,13 +37,13 @@ fn check_expr(tcx: ty::ctxt, ex: @expr, &&s: (), v: visit::vt<()>) {
|
|||
fn check_arms(tcx: ty::ctxt, arms: [arm]) {
|
||||
let mut i = 0;
|
||||
/* Check for unreachable patterns */
|
||||
for arm: arm in arms {
|
||||
for arm_pat: @pat in arm.pats {
|
||||
for arms.each {|arm|
|
||||
for arm.pats.each {|arm_pat|
|
||||
let mut reachable = true;
|
||||
let mut j = 0;
|
||||
while j < i {
|
||||
if option::is_none(arms[j].guard) {
|
||||
for prev_pat: @pat in arms[j].pats {
|
||||
for vec::each(arms[j].pats) {|prev_pat|
|
||||
if pattern_supersedes(tcx, prev_pat, arm_pat) {
|
||||
reachable = false;
|
||||
}
|
||||
|
@ -72,7 +72,7 @@ fn check_exhaustive(tcx: ty::ctxt, sp: span, pats: [@pat]) {
|
|||
ret;
|
||||
}
|
||||
// If there a non-refutable pattern in the set, we're okay.
|
||||
for pat in pats { if !is_refutable(tcx, pat) { ret; } }
|
||||
for pats.each {|pat| if !is_refutable(tcx, pat) { ret; } }
|
||||
|
||||
alt ty::get(ty::node_id_to_type(tcx, pats[0].id)).struct {
|
||||
ty::ty_enum(id, _) {
|
||||
|
@ -90,7 +90,7 @@ fn check_exhaustive(tcx: ty::ctxt, sp: span, pats: [@pat]) {
|
|||
}
|
||||
ty::ty_tup(ts) {
|
||||
let cols = vec::to_mut(vec::from_elem(ts.len(), []));
|
||||
for p in pats {
|
||||
for pats.each {|p|
|
||||
alt raw_pat(p).node {
|
||||
pat_tup(sub) {
|
||||
vec::iteri(sub) {|i, sp| cols[i] += [sp];}
|
||||
|
@ -103,7 +103,7 @@ fn check_exhaustive(tcx: ty::ctxt, sp: span, pats: [@pat]) {
|
|||
ty::ty_rec(fs) {
|
||||
let cols = vec::from_elem(fs.len(), {mut wild: false,
|
||||
mut pats: []});
|
||||
for p in pats {
|
||||
for pats.each {|p|
|
||||
alt raw_pat(p).node {
|
||||
pat_rec(sub, _) {
|
||||
vec::iteri(fs) {|i, field|
|
||||
|
@ -122,7 +122,7 @@ fn check_exhaustive(tcx: ty::ctxt, sp: span, pats: [@pat]) {
|
|||
}
|
||||
ty::ty_bool {
|
||||
let mut saw_true = false, saw_false = false;
|
||||
for p in pats {
|
||||
for pats.each {|p|
|
||||
alt raw_pat(p).node {
|
||||
pat_lit(@{node: expr_lit(@{node: lit_bool(b), _}), _}) {
|
||||
if b { saw_true = true; }
|
||||
|
@ -160,7 +160,7 @@ fn check_exhaustive_enum(tcx: ty::ctxt, enum_id: def_id, sp: span,
|
|||
cols: vec::to_mut(vec::from_elem(v.args.len(), []))}
|
||||
});
|
||||
|
||||
for pat in pats {
|
||||
for pats.each {|pat|
|
||||
let pat = raw_pat(pat);
|
||||
alt tcx.def_map.get(pat.id) {
|
||||
def_variant(_, id) {
|
||||
|
@ -193,7 +193,7 @@ fn check_exhaustive_enum(tcx: ty::ctxt, enum_id: def_id, sp: span,
|
|||
fn pattern_supersedes(tcx: ty::ctxt, a: @pat, b: @pat) -> bool {
|
||||
fn patterns_supersede(tcx: ty::ctxt, as: [@pat], bs: [@pat]) -> bool {
|
||||
let mut i = 0;
|
||||
for a: @pat in as {
|
||||
for as.each {|a|
|
||||
if !pattern_supersedes(tcx, a, bs[i]) { ret false; }
|
||||
i += 1;
|
||||
}
|
||||
|
@ -202,9 +202,9 @@ fn pattern_supersedes(tcx: ty::ctxt, a: @pat, b: @pat) -> bool {
|
|||
fn field_patterns_supersede(tcx: ty::ctxt, fas: [field_pat],
|
||||
fbs: [field_pat]) -> bool {
|
||||
let wild = @{id: 0, node: pat_wild, span: dummy_sp()};
|
||||
for fa: field_pat in fas {
|
||||
for fas.each {|fa|
|
||||
let mut pb = wild;
|
||||
for fb: field_pat in fbs {
|
||||
for fbs.each {|fb|
|
||||
if fa.ident == fb.ident { pb = fb.pat; }
|
||||
}
|
||||
if !pattern_supersedes(tcx, fa.pat, pb) { ret false; }
|
||||
|
@ -301,17 +301,17 @@ fn is_refutable(tcx: ty::ctxt, pat: @pat) -> bool {
|
|||
pat_wild | pat_ident(_, none) { false }
|
||||
pat_lit(_) | pat_range(_, _) { true }
|
||||
pat_rec(fields, _) {
|
||||
for it: field_pat in fields {
|
||||
for fields.each {|it|
|
||||
if is_refutable(tcx, it.pat) { ret true; }
|
||||
}
|
||||
false
|
||||
}
|
||||
pat_tup(elts) {
|
||||
for elt in elts { if is_refutable(tcx, elt) { ret true; } }
|
||||
for elts.each {|elt| if is_refutable(tcx, elt) { ret true; } }
|
||||
false
|
||||
}
|
||||
pat_enum(_, args) {
|
||||
for p: @pat in args { if is_refutable(tcx, p) { ret true; } }
|
||||
for args.each {|p| if is_refutable(tcx, p) { ret true; } }
|
||||
false
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ fn check_item(sess: session, ast_map: ast_map::map, def_map: resolve::def_map,
|
|||
check_item_recursion(sess, ast_map, def_map, it);
|
||||
}
|
||||
item_enum(vs, _) {
|
||||
for var in vs {
|
||||
for vs.each {|var|
|
||||
option::with_option_do(var.node.disr_expr) {|ex|
|
||||
v.visit_expr(ex, true, v);
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ fn fn_usage_expr(expr: @ast::expr,
|
|||
|
||||
let args_ctx = {unsafe_fn_legal: false,
|
||||
generic_bare_fn_legal: false with ctx};
|
||||
for arg in args {
|
||||
for args.each {|arg|
|
||||
visit::visit_expr_opt(arg, args_ctx, v);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -622,7 +622,7 @@ impl unify_methods for infer_ctxt {
|
|||
let actual_arg_len = vec::len(actual_constr.node.args);
|
||||
if expected_arg_len != actual_arg_len { ret err_res; }
|
||||
let mut i = 0u;
|
||||
for a in expected.node.args {
|
||||
for expected.node.args.each {|a|
|
||||
let actual = actual_constr.node.args[i];
|
||||
alt a.node {
|
||||
ast::carg_base {
|
||||
|
|
|
@ -85,8 +85,8 @@ fn check_fn(fk: visit::fn_kind, decl: fn_decl, body: blk, sp: span,
|
|||
// the common flow point for all functions that appear in the AST.
|
||||
|
||||
with_appropriate_checker(cx, fn_id) { |checker|
|
||||
for @{def, span} in *freevars::get_freevars(cx.tcx, fn_id) {
|
||||
let id = ast_util::def_id_of_def(def).node;
|
||||
for vec::each(*freevars::get_freevars(cx.tcx, fn_id)) {|fv|
|
||||
let id = ast_util::def_id_of_def(fv.def).node;
|
||||
if checker == check_copy {
|
||||
let last_uses = alt check cx.last_uses.find(fn_id) {
|
||||
some(last_use::closes_over(vars)) { vars }
|
||||
|
@ -96,7 +96,7 @@ fn check_fn(fk: visit::fn_kind, decl: fn_decl, body: blk, sp: span,
|
|||
vec::position_elem(last_uses, id)) { cont; }
|
||||
}
|
||||
let ty = ty::node_id_to_type(cx.tcx, id);
|
||||
checker(cx, ty, span);
|
||||
checker(cx, ty, fv.span);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -146,7 +146,7 @@ fn check_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) {
|
|||
// Vector add copies.
|
||||
expr_binary(add, ls, rs) { maybe_copy(cx, ls); maybe_copy(cx, rs); }
|
||||
expr_rec(fields, def) {
|
||||
for field in fields { maybe_copy(cx, field.node.expr); }
|
||||
for fields.each {|field| maybe_copy(cx, field.node.expr); }
|
||||
alt def {
|
||||
some(ex) {
|
||||
// All noncopyable fields must be overridden
|
||||
|
@ -155,7 +155,7 @@ fn check_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) {
|
|||
ty::ty_rec(f) { f }
|
||||
_ { cx.tcx.sess.span_bug(ex.span, "bad expr type in record"); }
|
||||
};
|
||||
for tf in ty_fields {
|
||||
for ty_fields.each {|tf|
|
||||
if !vec::any(fields, {|f| f.node.ident == tf.ident}) &&
|
||||
!ty::kind_can_be_copied(ty::type_kind(cx.tcx, tf.mt.ty)) {
|
||||
cx.tcx.sess.span_err(ex.span,
|
||||
|
@ -167,14 +167,14 @@ fn check_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) {
|
|||
}
|
||||
}
|
||||
expr_tup(exprs) | expr_vec(exprs, _) {
|
||||
for expr in exprs { maybe_copy(cx, expr); }
|
||||
for exprs.each {|expr| maybe_copy(cx, expr); }
|
||||
}
|
||||
expr_bind(_, args) {
|
||||
for a in args { alt a { some(ex) { maybe_copy(cx, ex); } _ {} } }
|
||||
for args.each {|a| alt a { some(ex) { maybe_copy(cx, ex); } _ {} } }
|
||||
}
|
||||
expr_call(f, args, _) {
|
||||
let mut i = 0u;
|
||||
for arg_t in ty::ty_fn_args(ty::expr_ty(cx.tcx, f)) {
|
||||
for ty::ty_fn_args(ty::expr_ty(cx.tcx, f)).each {|arg_t|
|
||||
alt ty::arg_mode(cx.tcx, arg_t) {
|
||||
by_copy { maybe_copy(cx, args[i]); }
|
||||
by_ref | by_val | by_mutbl_ref | by_move { }
|
||||
|
@ -236,7 +236,7 @@ fn check_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) {
|
|||
fn check_stmt(stmt: @stmt, cx: ctx, v: visit::vt<ctx>) {
|
||||
alt stmt.node {
|
||||
stmt_decl(@{node: decl_local(locals), _}, _) {
|
||||
for local in locals {
|
||||
for locals.each {|local|
|
||||
alt local.node.init {
|
||||
some({op: init_assign, expr}) { maybe_copy(cx, expr); }
|
||||
_ {}
|
||||
|
|
|
@ -114,7 +114,7 @@ fn visit_expr(ex: @expr, cx: ctx, v: visit::vt<ctx>) {
|
|||
v.visit_expr(input, cx, v);
|
||||
let before = cx.current;
|
||||
let mut sets = [];
|
||||
for arm in arms {
|
||||
for arms.each {|arm|
|
||||
cx.current = before;
|
||||
v.visit_arm(arm, cx, v);
|
||||
sets += [cx.current];
|
||||
|
@ -185,7 +185,7 @@ fn visit_expr(ex: @expr, cx: ctx, v: visit::vt<ctx>) {
|
|||
}
|
||||
}
|
||||
}
|
||||
for f in fns { v.visit_expr(f, cx, v); }
|
||||
for fns.each {|f| v.visit_expr(f, cx, v); }
|
||||
vec::iter2(args, arg_ts) {|arg, arg_t|
|
||||
alt arg.node {
|
||||
expr_path(_) {
|
||||
|
@ -213,7 +213,7 @@ fn visit_stmt(s: @stmt, cx: ctx, v: visit::vt<ctx>) {
|
|||
stmt_decl(@{node: decl_local(ls), _}, _) {
|
||||
shadow_in_current(cx, {|id|
|
||||
let mut rslt = false;
|
||||
for local in ls {
|
||||
for ls.each {|local|
|
||||
let mut found = false;
|
||||
pat_util::pat_bindings(cx.tcx.def_map, local.node.pat,
|
||||
{|pid, _a, _b|
|
||||
|
@ -246,7 +246,7 @@ fn visit_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
|
|||
proto_box | proto_uniq | proto_bare {
|
||||
alt cx.tcx.freevars.find(id) {
|
||||
some(vars) {
|
||||
for v in *vars {
|
||||
for vec::each(*vars) {|v|
|
||||
option::with_option_do(def_is_owned_local(cx, v.def)) {|nid|
|
||||
clear_in_current(cx, nid, false);
|
||||
cx.current += [{def: nid,
|
||||
|
@ -304,22 +304,22 @@ fn add_block_exit(cx: ctx, tp: block_type) -> bool {
|
|||
fn join_branches(branches: [set]) -> set {
|
||||
let mut found: set = [], i = 0u;
|
||||
let l = vec::len(branches);
|
||||
for set in branches {
|
||||
for branches.each {|set|
|
||||
i += 1u;
|
||||
for {def, uses} in set {
|
||||
if !vec::any(found, {|v| v.def == def}) {
|
||||
let mut j = i, nne = uses;
|
||||
for set.each {|elt|
|
||||
if !vec::any(found, {|v| v.def == elt.def}) {
|
||||
let mut j = i, nne = elt.uses;
|
||||
while j < l {
|
||||
for {def: d2, uses} in branches[j] {
|
||||
if d2 == def {
|
||||
list::iter(uses) {|e|
|
||||
for vec::each(branches[j]) {|elt2|
|
||||
if elt2.def == elt.def {
|
||||
list::iter(elt2.uses) {|e|
|
||||
if !list::has(nne, e) { nne = cons(e, @nne); }
|
||||
}
|
||||
}
|
||||
}
|
||||
j += 1u;
|
||||
}
|
||||
found += [{def: def, uses: nne}];
|
||||
found += [{def: elt.def, uses: nne}];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -327,11 +327,11 @@ fn join_branches(branches: [set]) -> set {
|
|||
}
|
||||
|
||||
fn leave_fn(cx: ctx) {
|
||||
for {def, uses} in cx.current {
|
||||
list::iter(uses) {|use|
|
||||
for cx.current.each {|elt|
|
||||
list::iter(elt.uses) {|use|
|
||||
let key = alt use {
|
||||
var_use(pth_id) { path(pth_id) }
|
||||
close_over(fn_id) { close(fn_id, def) }
|
||||
close_over(fn_id) { close(fn_id, elt.def) }
|
||||
};
|
||||
if !cx.last_uses.contains_key(key) {
|
||||
cx.last_uses.insert(key, true);
|
||||
|
@ -343,16 +343,16 @@ fn leave_fn(cx: ctx) {
|
|||
fn shadow_in_current(cx: ctx, p: fn(node_id) -> bool) {
|
||||
let mut out = [];
|
||||
cx.current <-> out;
|
||||
for e in out { if !p(e.def) { cx.current += [e]; } }
|
||||
for out.each {|e| if !p(e.def) { cx.current += [e]; } }
|
||||
}
|
||||
|
||||
fn clear_in_current(cx: ctx, my_def: node_id, to: bool) {
|
||||
for {def, uses} in cx.current {
|
||||
if def == my_def {
|
||||
list::iter(uses) {|use|
|
||||
for cx.current.each {|elt|
|
||||
if elt.def == my_def {
|
||||
list::iter(elt.uses) {|use|
|
||||
let key = alt use {
|
||||
var_use(pth_id) { path(pth_id) }
|
||||
close_over(fn_id) { close(fn_id, def) }
|
||||
close_over(fn_id) { close(fn_id, elt.def) }
|
||||
};
|
||||
if !to || !cx.last_uses.contains_key(key) {
|
||||
cx.last_uses.insert(key, to);
|
||||
|
|
|
@ -60,7 +60,8 @@ fn merge_opts(attrs: [ast::attribute], cmd_opts: [(option, bool)]) ->
|
|||
}
|
||||
|
||||
fn contains(xs: [(option, bool)], x: option) -> bool {
|
||||
for (o, _) in xs {
|
||||
for xs.each {|c|
|
||||
let (o, _) = c;
|
||||
if o == x { ret true; }
|
||||
}
|
||||
ret false;
|
||||
|
@ -85,7 +86,8 @@ fn merge_opts(attrs: [ast::attribute], cmd_opts: [(option, bool)]) ->
|
|||
}
|
||||
};
|
||||
|
||||
for (o, v) in default() {
|
||||
for default().each {|c|
|
||||
let (o, v) = c;
|
||||
if !contains(result, o) {
|
||||
result += [(o, v)];
|
||||
}
|
||||
|
@ -97,7 +99,7 @@ fn merge_opts(attrs: [ast::attribute], cmd_opts: [(option, bool)]) ->
|
|||
fn check_ctypes(tcx: ty::ctxt, crate: @ast::crate) {
|
||||
fn check_native_fn(tcx: ty::ctxt, decl: ast::fn_decl) {
|
||||
let tys = vec::map(decl.inputs) {|a| a.ty };
|
||||
for ty in (tys + [decl.output]) {
|
||||
for vec::each(tys + [decl.output]) {|ty|
|
||||
alt ty.node {
|
||||
ast::ty_path(_, id) {
|
||||
alt tcx.def_map.get(id) {
|
||||
|
@ -125,7 +127,7 @@ fn check_ctypes(tcx: ty::ctxt, crate: @ast::crate) {
|
|||
alt it.node {
|
||||
ast::item_native_mod(nmod) if attr::native_abi(it.attrs) !=
|
||||
either::right(ast::native_abi_rust_intrinsic) {
|
||||
for ni in nmod.items {
|
||||
for nmod.items.each {|ni|
|
||||
alt ni.node {
|
||||
ast::native_item_fn(decl, tps) {
|
||||
check_native_fn(tcx, decl);
|
||||
|
@ -148,7 +150,8 @@ fn check_ctypes(tcx: ty::ctxt, crate: @ast::crate) {
|
|||
fn check_crate(tcx: ty::ctxt, crate: @ast::crate,
|
||||
opts: [(option, bool)], time: bool) {
|
||||
let lint_opts = lint::merge_opts(crate.node.attrs, opts);
|
||||
for (lopt, switch) in lint_opts {
|
||||
for lint_opts.each {|opt|
|
||||
let (lopt, switch) = opt;
|
||||
if switch == true {
|
||||
lopt.run(tcx, crate, time);
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ fn expr_root_(tcx: ty::ctxt, ctor_self: option<node_id>,
|
|||
let mut is_mutbl = false;
|
||||
alt ty::get(auto_unbox.t).struct {
|
||||
ty::ty_rec(fields) {
|
||||
for fld: ty::field in fields {
|
||||
for fields.each {|fld|
|
||||
if str::eq(ident, fld.ident) {
|
||||
is_mutbl = fld.mt.mutbl == m_mutbl;
|
||||
break;
|
||||
|
@ -74,7 +74,7 @@ fn expr_root_(tcx: ty::ctxt, ctor_self: option<node_id>,
|
|||
}
|
||||
none { false }
|
||||
};
|
||||
for fld: ty::field_ty in ty::lookup_class_fields(tcx, did) {
|
||||
for ty::lookup_class_fields(tcx, did).each {|fld|
|
||||
if str::eq(ident, fld.ident) {
|
||||
is_mutbl = fld.mutability == class_mutable
|
||||
|| in_self; // all fields can be mutated
|
||||
|
@ -169,7 +169,7 @@ fn visit_decl(d: @decl, &&cx: @ctx, v: visit::vt<@ctx>) {
|
|||
visit::visit_decl(d, cx, v);
|
||||
alt d.node {
|
||||
decl_local(locs) {
|
||||
for loc in locs {
|
||||
for locs.each {|loc|
|
||||
alt loc.node.init {
|
||||
some(init) {
|
||||
if init.op == init_move { check_move_rhs(cx, init.expr); }
|
||||
|
@ -198,7 +198,7 @@ fn visit_expr(ex: @expr, &&cx: @ctx, v: visit::vt<@ctx>) {
|
|||
check_lval(cx, dest, msg_assign);
|
||||
}
|
||||
expr_fn(_, _, _, cap) {
|
||||
for moved in cap.moves {
|
||||
for cap.moves.each {|moved|
|
||||
let def = cx.tcx.def_map.get(moved.id);
|
||||
alt is_illegal_to_modify_def(cx, def, msg_move_out) {
|
||||
some(name) { mk_err(cx, moved.span, msg_move_out, moved.name); }
|
||||
|
@ -281,7 +281,7 @@ fn check_move_rhs(cx: @ctx, src: @expr) {
|
|||
fn check_call(cx: @ctx, f: @expr, args: [@expr]) {
|
||||
let arg_ts = ty::ty_fn_args(ty::expr_ty(cx.tcx, f));
|
||||
let mut i = 0u;
|
||||
for arg_t: ty::arg in arg_ts {
|
||||
for arg_ts.each {|arg_t|
|
||||
alt ty::resolved_mode(cx.tcx, arg_t.mode) {
|
||||
by_mutbl_ref { check_lval(cx, args[i], msg_mutbl_ref); }
|
||||
by_move { check_lval(cx, args[i], msg_move_out); }
|
||||
|
@ -294,7 +294,7 @@ fn check_call(cx: @ctx, f: @expr, args: [@expr]) {
|
|||
fn check_bind(cx: @ctx, f: @expr, args: [option<@expr>]) {
|
||||
let arg_ts = ty::ty_fn_args(ty::expr_ty(cx.tcx, f));
|
||||
let mut i = 0u;
|
||||
for arg in args {
|
||||
for args.each {|arg|
|
||||
alt arg {
|
||||
some(expr) {
|
||||
let o_msg = alt ty::resolved_mode(cx.tcx, arg_ts[i].mode) {
|
||||
|
|
|
@ -55,8 +55,8 @@ fn walk_pat(pat: @pat, it: fn(@pat)) {
|
|||
it(pat);
|
||||
alt pat.node {
|
||||
pat_ident(pth, some(p)) { walk_pat(p, it); }
|
||||
pat_rec(fields, _) { for f in fields { walk_pat(f.pat, it); } }
|
||||
pat_enum(_, s) | pat_tup(s) { for p in s { walk_pat(p, it); } }
|
||||
pat_rec(fields, _) { for fields.each {|f| walk_pat(f.pat, it); } }
|
||||
pat_enum(_, s) | pat_tup(s) { for s.each {|p| walk_pat(p, it); } }
|
||||
pat_box(s) | pat_uniq(s) { walk_pat(s, it); }
|
||||
pat_wild | pat_lit(_) | pat_range(_, _) | pat_ident(_, none) {}
|
||||
}
|
||||
|
|
|
@ -440,7 +440,7 @@ fn resolve_block(blk: ast::blk, cx: ctxt, visitor: visit::vt<ctxt>) {
|
|||
record_parent(cx, blk.node.id);
|
||||
|
||||
// Resolve queued locals to this block.
|
||||
for local_id in cx.queued_locals {
|
||||
for cx.queued_locals.each {|local_id|
|
||||
cx.region_map.local_blocks.insert(local_id, blk.node.id);
|
||||
}
|
||||
|
||||
|
|
|
@ -187,7 +187,7 @@ fn create_env(sess: session, amap: ast_map::map) -> @env {
|
|||
fn iter_export_paths(vi: ast::view_item, f: fn(vp: @ast::view_path)) {
|
||||
alt vi.node {
|
||||
ast::view_item_export(vps) {
|
||||
for vp in vps {
|
||||
for vps.each {|vp|
|
||||
f(vp);
|
||||
}
|
||||
}
|
||||
|
@ -198,9 +198,7 @@ fn iter_export_paths(vi: ast::view_item, f: fn(vp: @ast::view_path)) {
|
|||
fn iter_import_paths(vi: ast::view_item, f: fn(vp: @ast::view_path)) {
|
||||
alt vi.node {
|
||||
ast::view_item_import(vps) {
|
||||
for vp in vps {
|
||||
f(vp);
|
||||
}
|
||||
for vps.each {|vp| f(vp);}
|
||||
}
|
||||
_ {}
|
||||
}
|
||||
|
@ -237,7 +235,7 @@ fn map_crate(e: @env, c: @ast::crate) {
|
|||
e.imports.insert(id, is_glob(path, sc, vp.span));
|
||||
}
|
||||
ast::view_path_list(mod_path, idents, _) {
|
||||
for ident in idents {
|
||||
for idents.each {|ident|
|
||||
let t = todo(ident.node.name,
|
||||
@(*mod_path + [ident.node.name]),
|
||||
ident.span, sc);
|
||||
|
@ -438,9 +436,9 @@ fn resolve_names(e: @env, c: @ast::crate) {
|
|||
fn walk_tps(e: @env, tps: [ast::ty_param], sc: scopes, v: vt<scopes>) {
|
||||
let outer_current_tp = e.current_tp;
|
||||
let mut current = 0u;
|
||||
for tp in tps {
|
||||
for tps.each {|tp|
|
||||
e.current_tp = some(current);
|
||||
for bound in *tp.bounds {
|
||||
for vec::each(*tp.bounds) {|bound|
|
||||
alt bound {
|
||||
bound_iface(t) { v.visit_ty(t, sc, v); }
|
||||
_ {}
|
||||
|
@ -504,7 +502,7 @@ fn visit_item_with_scope(e: @env, i: @ast::item, sc: scopes, v: vt<scopes>) {
|
|||
visit::visit_ty_params(tps, sc, v);
|
||||
alt ifce { some(ty) { v.visit_ty(ty, sc, v); } _ {} }
|
||||
v.visit_ty(sty, sc, v);
|
||||
for m in methods {
|
||||
for methods.each {|m|
|
||||
v.visit_ty_params(m.tps, sc, v);
|
||||
let msc = cons(scope_method(m.self_id, tps + m.tps), @sc);
|
||||
v.visit_fn(visit::fk_method(m.ident, [], m),
|
||||
|
@ -513,9 +511,9 @@ fn visit_item_with_scope(e: @env, i: @ast::item, sc: scopes, v: vt<scopes>) {
|
|||
}
|
||||
ast::item_iface(tps, methods) {
|
||||
visit::visit_ty_params(tps, sc, v);
|
||||
for m in methods {
|
||||
for methods.each {|m|
|
||||
let msc = cons(scope_method(i.id, tps + m.tps), @sc);
|
||||
for a in m.decl.inputs { v.visit_ty(a.ty, msc, v); }
|
||||
for m.decl.inputs.each {|a| v.visit_ty(a.ty, msc, v); }
|
||||
v.visit_ty(m.decl.output, msc, v);
|
||||
}
|
||||
}
|
||||
|
@ -530,7 +528,7 @@ fn visit_item_with_scope(e: @env, i: @ast::item, sc: scopes, v: vt<scopes>) {
|
|||
ctor.node.body, ctor.span, ctor.node.id,
|
||||
ctor_scope, v);
|
||||
/* visit the items */
|
||||
for cm in members {
|
||||
for members.each {|cm|
|
||||
alt cm.node {
|
||||
class_method(m) {
|
||||
let msc = cons(scope_method(m.self_id, tps + m.tps),
|
||||
|
@ -571,7 +569,7 @@ fn visit_fn_with_scope(e: @env, fk: visit::fn_kind, decl: ast::fn_decl,
|
|||
|
||||
// here's where we need to set up the mapping
|
||||
// for f's constrs in the table.
|
||||
for c: @ast::constr in decl.constraints { resolve_constr(e, c, sc, v); }
|
||||
for decl.constraints.each {|c| resolve_constr(e, c, sc, v); }
|
||||
let scope = alt fk {
|
||||
visit::fk_item_fn(_, tps) | visit::fk_res(_, tps) |
|
||||
visit::fk_method(_, tps, _) | visit::fk_ctor(_, tps)
|
||||
|
@ -586,8 +584,8 @@ fn visit_fn_with_scope(e: @env, fk: visit::fn_kind, decl: ast::fn_decl,
|
|||
fn visit_block_with_scope(b: ast::blk, sc: scopes, v: vt<scopes>) {
|
||||
let pos = @mut 0u, loc = @mut 0u;
|
||||
let block_sc = cons(scope_block(b, pos, loc), @sc);
|
||||
for vi in b.node.view_items { v.visit_view_item(vi, block_sc, v); }
|
||||
for stmt in b.node.stmts {
|
||||
for b.node.view_items.each {|vi| v.visit_view_item(vi, block_sc, v); }
|
||||
for b.node.stmts.each {|stmt|
|
||||
v.visit_stmt(stmt, block_sc, v);;
|
||||
*pos += 1u;;
|
||||
*loc = 0u;
|
||||
|
@ -602,14 +600,14 @@ fn visit_decl_with_scope(d: @decl, sc: scopes, v: vt<scopes>) {
|
|||
};
|
||||
alt d.node {
|
||||
decl_local(locs) {
|
||||
for loc in locs { v.visit_local(loc, sc, v);; *loc_pos += 1u; }
|
||||
for locs.each {|loc| v.visit_local(loc, sc, v);; *loc_pos += 1u; }
|
||||
}
|
||||
decl_item(it) { v.visit_item(it, sc, v); }
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_arm_with_scope(a: ast::arm, sc: scopes, v: vt<scopes>) {
|
||||
for p: @pat in a.pats { v.visit_pat(p, sc, v); }
|
||||
for a.pats.each {|p| v.visit_pat(p, sc, v); }
|
||||
let sc_inner = cons(scope_arm(a), @sc);
|
||||
visit::visit_expr_opt(a.guard, sc_inner, v);
|
||||
v.visit_block(a.body, sc_inner, v);
|
||||
|
@ -717,7 +715,7 @@ fn resolve_import(e: env, n_id: node_id, name: ast::ident,
|
|||
fn find_imports_after(e: env, id: node_id, sc: scopes) -> [node_id] {
|
||||
fn lst(my_id: node_id, vis: [@view_item]) -> [node_id] {
|
||||
let mut imports = [], found = false;
|
||||
for vi in vis {
|
||||
for vis.each {|vi|
|
||||
iter_effective_import_paths(*vi) {|vp|
|
||||
alt vp.node {
|
||||
view_path_simple(_, _, id)
|
||||
|
@ -726,7 +724,7 @@ fn resolve_import(e: env, n_id: node_id, name: ast::ident,
|
|||
if found { imports += [id]; }
|
||||
}
|
||||
view_path_list(_, ids, _) {
|
||||
for id in ids {
|
||||
for ids.each {|id|
|
||||
if id.node.id == my_id { found = true; }
|
||||
if found { imports += [id.node.id]; }
|
||||
}
|
||||
|
@ -844,7 +842,7 @@ fn unresolved_err(e: env, cx: ctxt, sp: span, name: ident, kind: str) {
|
|||
in_scope(sc) {
|
||||
alt find_fn_or_mod_scope(sc) {
|
||||
some(err_scope) {
|
||||
for rs: {ident: str, sc: scope} in e.reported {
|
||||
for e.reported.each {|rs|
|
||||
if str::eq(rs.ident, name) && err_scope == rs.sc { ret; }
|
||||
}
|
||||
e.reported += [{ident: name, sc: err_scope}];
|
||||
|
@ -1115,7 +1113,7 @@ fn lookup_in_scope(e: env, sc: scopes, sp: span, name: ident, ns: namespace,
|
|||
fn lookup_in_ty_params(e: env, name: ident, ty_params: [ast::ty_param])
|
||||
-> option<def> {
|
||||
let mut n = 0u;
|
||||
for tp: ast::ty_param in ty_params {
|
||||
for ty_params.each {|tp|
|
||||
if str::eq(tp.ident, name) && alt e.current_tp {
|
||||
some(cur) { n < cur } none { true }
|
||||
} { ret some(ast::def_ty_param(local_def(tp.id), n)); }
|
||||
|
@ -1139,7 +1137,7 @@ fn lookup_in_fn(e: env, name: ident, decl: ast::fn_decl,
|
|||
ns: namespace) -> option<def> {
|
||||
alt ns {
|
||||
ns_val {
|
||||
for a: ast::arg in decl.inputs {
|
||||
for decl.inputs.each {|a|
|
||||
if str::eq(a.ident, name) {
|
||||
ret some(ast::def_arg(a.id, a.mode));
|
||||
}
|
||||
|
@ -1189,7 +1187,7 @@ fn lookup_in_block(e: env, name: ident, sp: span, b: ast::blk_, pos: uint,
|
|||
} else {
|
||||
alt ns {
|
||||
ns_val {
|
||||
for v: ast::variant in variants {
|
||||
for variants.each {|v|
|
||||
if str::eq(v.node.name, name) {
|
||||
let i = v.node.id;
|
||||
ret some(ast::def_variant
|
||||
|
@ -1216,8 +1214,7 @@ fn lookup_in_block(e: env, name: ident, sp: span, b: ast::blk_, pos: uint,
|
|||
_ { }
|
||||
}
|
||||
}
|
||||
for vi in b.view_items {
|
||||
|
||||
for b.view_items.each {|vi|
|
||||
let mut is_import = false;
|
||||
alt vi.node {
|
||||
ast::view_item_import(_) { is_import = true; }
|
||||
|
@ -1227,7 +1224,7 @@ fn lookup_in_block(e: env, name: ident, sp: span, b: ast::blk_, pos: uint,
|
|||
alt vi.node {
|
||||
|
||||
ast::view_item_import(vps) | ast::view_item_export(vps) {
|
||||
for vp in vps {
|
||||
for vps.each {|vp|
|
||||
alt vp.node {
|
||||
ast::view_path_simple(ident, _, id) {
|
||||
if is_import && name == ident {
|
||||
|
@ -1236,7 +1233,7 @@ fn lookup_in_block(e: env, name: ident, sp: span, b: ast::blk_, pos: uint,
|
|||
}
|
||||
|
||||
ast::view_path_list(path, idents, _) {
|
||||
for ident in idents {
|
||||
for idents.each {|ident|
|
||||
if name == ident.node.name {
|
||||
ret lookup_import(e, ident.node.id, ns);
|
||||
}
|
||||
|
@ -1467,7 +1464,7 @@ fn lookup_in_globs(e: env, globs: [glob_imp_def], sp: span, id: ident,
|
|||
else if vec::len(matches) == 1u {
|
||||
ret some(matches[0].def);
|
||||
} else {
|
||||
for match: glob_imp_def in matches {
|
||||
for matches.each {|match|
|
||||
let sp = match.path.span;
|
||||
e.sess.span_note(sp, #fmt["'%s' is imported here", id]);
|
||||
}
|
||||
|
@ -1546,7 +1543,7 @@ fn add_to_index(index: hashmap<ident, list<mod_index_entry>>, id: ident,
|
|||
|
||||
fn index_view_items(view_items: [@ast::view_item],
|
||||
index: hashmap<ident, list<mod_index_entry>>) {
|
||||
for vi in view_items {
|
||||
for view_items.each {|vi|
|
||||
alt vi.node {
|
||||
ast::view_item_use(ident, _, id) {
|
||||
add_to_index(index, ident, mie_view_item(ident, id, vi.span));
|
||||
|
@ -1560,7 +1557,7 @@ fn index_view_items(view_items: [@ast::view_item],
|
|||
add_to_index(index, ident, mie_import_ident(id, vp.span));
|
||||
}
|
||||
ast::view_path_list(_, idents, _) {
|
||||
for ident in idents {
|
||||
for idents.each {|ident|
|
||||
add_to_index(index, ident.node.name,
|
||||
mie_import_ident(ident.node.id,
|
||||
ident.span));
|
||||
|
@ -1579,7 +1576,7 @@ fn index_mod(md: ast::_mod) -> mod_index {
|
|||
|
||||
index_view_items(md.view_items, index);
|
||||
|
||||
for it: @ast::item in md.items {
|
||||
for md.items.each {|it|
|
||||
alt it.node {
|
||||
ast::item_const(_, _) | ast::item_fn(_, _, _) | ast::item_mod(_) |
|
||||
ast::item_native_mod(_) | ast::item_ty(_, _) |
|
||||
|
@ -1590,7 +1587,7 @@ fn index_mod(md: ast::_mod) -> mod_index {
|
|||
ast::item_enum(variants, _) {
|
||||
add_to_index(index, it.ident, mie_item(it));
|
||||
let mut variant_idx: uint = 0u;
|
||||
for v: ast::variant in variants {
|
||||
for variants.each {|v|
|
||||
add_to_index(index, v.node.name,
|
||||
mie_enum_variant(variant_idx, variants,
|
||||
it.id, it.span));
|
||||
|
@ -1619,7 +1616,7 @@ fn index_nmod(md: ast::native_mod) -> mod_index {
|
|||
|
||||
index_view_items(md.view_items, index);
|
||||
|
||||
for it: @ast::native_item in md.items {
|
||||
for md.items.each {|it|
|
||||
add_to_index(index, it.ident, mie_native_item(it));
|
||||
}
|
||||
ret index;
|
||||
|
@ -1649,11 +1646,12 @@ fn ns_ok(wanted:namespace, actual:namespace) -> bool {
|
|||
|
||||
fn lookup_external(e: env, cnum: int, ids: [ident], ns: namespace) ->
|
||||
option<def> {
|
||||
for d: def in csearch::lookup_defs(e.sess.cstore, cnum, ids) {
|
||||
let mut result = none;
|
||||
for csearch::lookup_defs(e.sess.cstore, cnum, ids).each {|d|
|
||||
e.ext_map.insert(def_id_of_def(d), ids);
|
||||
if ns_ok(ns, ns_for_def(d)) { ret some(d); }
|
||||
if ns_ok(ns, ns_for_def(d)) { result = some(d); }
|
||||
}
|
||||
ret none;
|
||||
ret result;
|
||||
}
|
||||
|
||||
|
||||
|
@ -1720,7 +1718,7 @@ fn mie_span(mie: mod_index_entry) -> span {
|
|||
fn check_item(e: @env, i: @ast::item, &&x: (), v: vt<()>) {
|
||||
fn typaram_names(tps: [ast::ty_param]) -> [ident] {
|
||||
let mut x: [ast::ident] = [];
|
||||
for tp: ast::ty_param in tps { x += [tp.ident]; }
|
||||
for tps.each {|tp| x += [tp.ident]; }
|
||||
ret x;
|
||||
}
|
||||
visit::visit_item(i, x, v);
|
||||
|
@ -1769,7 +1767,7 @@ fn check_arm(e: @env, a: ast::arm, &&x: (), v: vt<()>) {
|
|||
e.sess.span_err(a.pats[i].span,
|
||||
"inconsistent number of bindings");
|
||||
} else {
|
||||
for name: ident in ch.seen {
|
||||
for ch.seen.each {|name|
|
||||
if is_none(vec::find(seen0, bind str::eq(name, _))) {
|
||||
// Fight the alias checker
|
||||
let name_ = name;
|
||||
|
@ -1787,13 +1785,13 @@ fn check_block(e: @env, b: ast::blk, &&x: (), v: vt<()>) {
|
|||
let values = checker(*e, "value");
|
||||
let types = checker(*e, "type");
|
||||
let mods = checker(*e, "module");
|
||||
for st: @ast::stmt in b.node.stmts {
|
||||
for b.node.stmts.each {|st|
|
||||
alt st.node {
|
||||
ast::stmt_decl(d, _) {
|
||||
alt d.node {
|
||||
ast::decl_local(locs) {
|
||||
let local_values = checker(*e, "value");
|
||||
for loc in locs {
|
||||
for locs.each {|loc|
|
||||
pat_util::pat_bindings(e.def_map, loc.node.pat)
|
||||
{|_i, p_sp, n|
|
||||
let ident = path_to_ident(n);
|
||||
|
@ -1806,7 +1804,7 @@ fn check_block(e: @env, b: ast::blk, &&x: (), v: vt<()>) {
|
|||
alt it.node {
|
||||
ast::item_enum(variants, _) {
|
||||
add_name(types, it.span, it.ident);
|
||||
for v: ast::variant in variants {
|
||||
for variants.each {|v|
|
||||
add_name(values, v.span, v.node.name);
|
||||
}
|
||||
}
|
||||
|
@ -1868,7 +1866,7 @@ fn checker(e: env, kind: str) -> checker {
|
|||
}
|
||||
|
||||
fn check_name(ch: checker, sp: span, name: ident) {
|
||||
for s: ident in ch.seen {
|
||||
for ch.seen.each {|s|
|
||||
if str::eq(s, name) {
|
||||
ch.sess.span_fatal(sp, "duplicate " + ch.kind + " name: " + name);
|
||||
}
|
||||
|
@ -1882,7 +1880,7 @@ fn add_name(ch: checker, sp: span, name: ident) {
|
|||
fn ensure_unique<T>(e: env, sp: span, elts: [T], id: fn(T) -> ident,
|
||||
kind: str) {
|
||||
let ch = checker(e, kind);
|
||||
for elt: T in elts { add_name(ch, sp, id(elt)); }
|
||||
for elts.each {|elt| add_name(ch, sp, id(elt)); }
|
||||
}
|
||||
|
||||
fn check_exports(e: @env) {
|
||||
|
@ -1908,7 +1906,7 @@ fn check_exports(e: @env) {
|
|||
[ found_def_item(item, ns_val),
|
||||
found_def_item(item, ns_type),
|
||||
found_def_item(item, ns_module) ];
|
||||
for d in defs {
|
||||
for defs.each {|d|
|
||||
alt d {
|
||||
some(def) {
|
||||
f(ident, def);
|
||||
|
@ -2015,7 +2013,7 @@ fn check_exports(e: @env) {
|
|||
ids: [ast::path_list_ident]) {
|
||||
let parent_id = check_enum_ok(e, span, id, _mod);
|
||||
add_export(e, export_id, local_def(parent_id), false);
|
||||
for variant_id in ids {
|
||||
for ids.each {|variant_id|
|
||||
let mut found = false;
|
||||
alt _mod.index.find(variant_id.node.name) {
|
||||
some(ms) {
|
||||
|
@ -2048,7 +2046,7 @@ fn check_exports(e: @env) {
|
|||
some(m) {
|
||||
let glob_is_re_exported = int_hash();
|
||||
|
||||
for vi in m.view_items {
|
||||
for m.view_items.each {|vi|
|
||||
iter_export_paths(*vi) { |vp|
|
||||
alt vp.node {
|
||||
ast::view_path_simple(ident, _, id) {
|
||||
|
@ -2071,7 +2069,7 @@ fn check_exports(e: @env) {
|
|||
}
|
||||
// Now follow the export-glob links and fill in the
|
||||
// globbed_exports and exp_map lists.
|
||||
for glob in _mod.glob_imports {
|
||||
for _mod.glob_imports.each {|glob|
|
||||
let id = alt check glob.path.node {
|
||||
ast::view_path_glob(_, node_id) { node_id }
|
||||
};
|
||||
|
@ -2127,7 +2125,7 @@ fn find_impls_in_view_item(e: env, vi: @ast::view_item,
|
|||
option::with_option_do(sc) {|sc|
|
||||
list::iter(sc) {|level|
|
||||
if vec::len(found) == 0u {
|
||||
for imp in *level {
|
||||
for vec::each(*level) {|imp|
|
||||
if imp.ident == pt[0] {
|
||||
found += [@{ident: name with *imp}];
|
||||
}
|
||||
|
@ -2138,13 +2136,15 @@ fn find_impls_in_view_item(e: env, vi: @ast::view_item,
|
|||
}
|
||||
} else {
|
||||
lookup_imported_impls(e, id) {|is|
|
||||
for i in *is { impls += [@{ident: name with *i}]; }
|
||||
for vec::each(*is) {|i|
|
||||
impls += [@{ident: name with *i}];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ast::view_path_list(base, names, _) {
|
||||
for nm in names {
|
||||
for names.each {|nm|
|
||||
lookup_imported_impls(e, nm.node.id) {|is| impls += *is; }
|
||||
}
|
||||
}
|
||||
|
@ -2198,10 +2198,10 @@ fn find_impls_in_mod_by_id(e: env, defid: def_id, &impls: [@_impl],
|
|||
let mut tmp = [];
|
||||
let mi = e.mod_map.get(defid.node);
|
||||
let md = option::get(mi.m);
|
||||
for vi in md.view_items {
|
||||
for md.view_items.each {|vi|
|
||||
find_impls_in_view_item(e, vi, tmp, none);
|
||||
}
|
||||
for i in md.items {
|
||||
for md.items.each {|i|
|
||||
find_impls_in_item(e, i, tmp, none, none);
|
||||
}
|
||||
@vec::filter(tmp) {|i| is_exported(e, i.ident, mi)}
|
||||
|
@ -2213,7 +2213,7 @@ fn find_impls_in_mod_by_id(e: env, defid: def_id, &impls: [@_impl],
|
|||
}
|
||||
alt name {
|
||||
some(n) {
|
||||
for im in *cached {
|
||||
for vec::each(*cached) {|im|
|
||||
if n == im.ident { impls += [im]; }
|
||||
}
|
||||
}
|
||||
|
@ -2234,10 +2234,10 @@ fn find_impls_in_mod(e: env, m: def, &impls: [@_impl],
|
|||
fn visit_block_with_impl_scope(e: @env, b: ast::blk, sc: iscopes,
|
||||
v: vt<iscopes>) {
|
||||
let mut impls = [];
|
||||
for vi in b.node.view_items {
|
||||
for b.node.view_items.each {|vi|
|
||||
find_impls_in_view_item(*e, vi, impls, some(sc));
|
||||
}
|
||||
for st in b.node.stmts {
|
||||
for b.node.stmts.each {|st|
|
||||
alt st.node {
|
||||
ast::stmt_decl(@{node: ast::decl_item(i), _}, _) {
|
||||
find_impls_in_item(*e, i, impls, none, none);
|
||||
|
@ -2252,10 +2252,10 @@ fn visit_block_with_impl_scope(e: @env, b: ast::blk, sc: iscopes,
|
|||
fn visit_mod_with_impl_scope(e: @env, m: ast::_mod, s: span, id: node_id,
|
||||
sc: iscopes, v: vt<iscopes>) {
|
||||
let mut impls = [];
|
||||
for vi in m.view_items {
|
||||
for m.view_items.each {|vi|
|
||||
find_impls_in_view_item(*e, vi, impls, some(sc));
|
||||
}
|
||||
for i in m.items { find_impls_in_item(*e, i, impls, none, none); }
|
||||
for m.items.each {|i| find_impls_in_item(*e, i, impls, none, none); }
|
||||
let impls = @impls;
|
||||
visit::visit_mod(m, s, id, if vec::len(*impls) > 0u {
|
||||
cons(impls, @sc)
|
||||
|
|
|
@ -2788,7 +2788,7 @@ fn need_invoke(bcx: block) -> bool {
|
|||
loop {
|
||||
alt cur.kind {
|
||||
block_scope(info) {
|
||||
for cleanup in info.cleanups {
|
||||
for info.cleanups.each {|cleanup|
|
||||
alt cleanup {
|
||||
clean(_, cleanup_type) | clean_temp(_, _, cleanup_type) {
|
||||
if cleanup_type == normal_exit_and_unwind {
|
||||
|
@ -2925,7 +2925,7 @@ fn trans_rec(bcx: block, fields: [ast::field],
|
|||
let ty_fields = alt check ty::get(t).struct { ty::ty_rec(f) { f } };
|
||||
|
||||
let mut temp_cleanups = [];
|
||||
for fld in fields {
|
||||
for fields.each {|fld|
|
||||
let ix = option::get(vec::position(ty_fields, {|ft|
|
||||
str::eq(fld.node.ident, ft.ident)
|
||||
}));
|
||||
|
@ -2940,7 +2940,7 @@ fn trans_rec(bcx: block, fields: [ast::field],
|
|||
let mut i = 0;
|
||||
bcx = cx;
|
||||
// Copy over inherited fields
|
||||
for tf in ty_fields {
|
||||
for ty_fields.each {|tf|
|
||||
if !vec::any(fields, {|f| str::eq(f.node.ident, tf.ident)}) {
|
||||
let dst = GEPi(bcx, addr, [0, i]);
|
||||
let base = GEPi(bcx, base_val, [0, i]);
|
||||
|
@ -2955,7 +2955,7 @@ fn trans_rec(bcx: block, fields: [ast::field],
|
|||
|
||||
// Now revoke the cleanups as we pass responsibility for the data
|
||||
// structure on to the caller
|
||||
for cleanup in temp_cleanups { revoke_clean(bcx, cleanup); }
|
||||
for temp_cleanups.each {|cleanup| revoke_clean(bcx, cleanup); }
|
||||
ret bcx;
|
||||
}
|
||||
|
||||
|
@ -3707,15 +3707,16 @@ fn cleanup_and_leave(bcx: block, upto: option<BasicBlockRef>,
|
|||
let _icx = bcx.insn_ctxt("cleanup_and_leave");
|
||||
let mut cur = bcx, bcx = bcx;
|
||||
let is_lpad = leave == none;
|
||||
let mut done = false;
|
||||
loop {
|
||||
alt cur.kind {
|
||||
block_scope(info) if info.cleanups.len() > 0u {
|
||||
for cp in info.cleanup_paths {
|
||||
if cp.target == leave {
|
||||
option::with_option_do(vec::find(info.cleanup_paths,
|
||||
{|cp| cp.target == leave})) {|cp|
|
||||
Br(bcx, cp.dest);
|
||||
ret;
|
||||
}
|
||||
done = true;
|
||||
}
|
||||
if done { ret; }
|
||||
let sub_cx = sub_block(bcx, "cleanup");
|
||||
Br(bcx, sub_cx.llbb);
|
||||
info.cleanup_paths += [{target: leave, dest: sub_cx.llbb}];
|
||||
|
@ -4318,7 +4319,7 @@ fn trans_class_ctor(ccx: @crate_ctxt, path: path, decl: ast::fn_decl,
|
|||
let mut bcx = bcx_top;
|
||||
// Initialize fields to zero so init assignments can validly
|
||||
// drop their LHS
|
||||
for field in fields {
|
||||
for fields.each {|field|
|
||||
let ix = field_idx_strict(bcx.tcx(), sp, field.ident, fields);
|
||||
bcx = zero_alloca(bcx, GEPi(bcx, selfptr, [0, ix]),
|
||||
field.mt.ty);
|
||||
|
|
|
@ -280,19 +280,13 @@ fn add_clean_free(cx: block, ptr: ValueRef, shared: bool) {
|
|||
// drop glue checks whether it is zero.
|
||||
fn revoke_clean(cx: block, val: ValueRef) {
|
||||
in_scope_cx(cx) {|info|
|
||||
let mut i = 0u;
|
||||
for cu in info.cleanups {
|
||||
alt cu {
|
||||
clean_temp(v, _, _) if v == val {
|
||||
option::with_option_do(vec::position(info.cleanups, {|cu|
|
||||
alt cu { clean_temp(v, _, _) if v == val { true } _ { false } }
|
||||
})) {|i|
|
||||
info.cleanups =
|
||||
vec::slice(info.cleanups, 0u, i) +
|
||||
vec::slice(info.cleanups, i + 1u, info.cleanups.len());
|
||||
scope_clean_changed(info);
|
||||
break;
|
||||
}
|
||||
_ {}
|
||||
}
|
||||
i += 1u;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -152,7 +152,7 @@ fn cached_metadata<T: copy>(cache: metadata_cache, mdtag: int,
|
|||
eq: fn(md: T) -> bool) -> option<T> unsafe {
|
||||
if cache.contains_key(mdtag) {
|
||||
let items = cache.get(mdtag);
|
||||
for item in items {
|
||||
for items.each {|item|
|
||||
let md: T = md_from_metadata::<T>(item);
|
||||
if eq(md) {
|
||||
ret option::some(md);
|
||||
|
@ -421,7 +421,7 @@ fn create_record(cx: @crate_ctxt, t: ty::t, fields: [ast::ty_field],
|
|||
option::get(cx.dbg_cx).names("rec"),
|
||||
line_from_span(cx.sess.codemap,
|
||||
span) as int);
|
||||
for field in fields {
|
||||
for fields.each {|field|
|
||||
let field_t = ty::get_field(t, field.node.ident).mt.ty;
|
||||
let ty_md = create_ty(cx, field_t, field.node.mt.ty);
|
||||
let (size, align) = size_and_align_of(cx, field_t);
|
||||
|
|
|
@ -372,7 +372,7 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t, ty_param_map: [uint]) -> [u8] {
|
|||
ty::ty_class(did, ts) {
|
||||
// same as records
|
||||
let mut s = [shape_struct], sub = [];
|
||||
for f:field in ty::class_items_as_fields(ccx.tcx, did, ts) {
|
||||
for ty::class_items_as_fields(ccx.tcx, did, ts).each {|f|
|
||||
sub += shape_of(ccx, f.mt.ty, ty_param_map);
|
||||
}
|
||||
add_substr(s, sub);
|
||||
|
@ -465,7 +465,7 @@ fn gen_enum_shapes(ccx: @crate_ctxt) -> ValueRef {
|
|||
let data_sz = vec::len(data) as u16;
|
||||
|
||||
let mut info_sz = 0u16;
|
||||
for did_ in ccx.shape_cx.tag_order {
|
||||
for ccx.shape_cx.tag_order.each {|did_|
|
||||
let did = did_; // Satisfy alias checker.
|
||||
let num_variants = vec::len(*ty::enum_variants(ccx.tcx, did)) as u16;
|
||||
add_u16(header, header_sz + info_sz);
|
||||
|
@ -478,7 +478,7 @@ fn gen_enum_shapes(ccx: @crate_ctxt) -> ValueRef {
|
|||
|
||||
let mut lv_table = [];
|
||||
i = 0u;
|
||||
for did_ in ccx.shape_cx.tag_order {
|
||||
for ccx.shape_cx.tag_order.each {|did_|
|
||||
let did = did_; // Satisfy alias checker.
|
||||
let variants = ty::enum_variants(ccx.tcx, did);
|
||||
add_u16(info, vec::len(*variants) as u16);
|
||||
|
|
|
@ -38,7 +38,7 @@ fn node_ids_in_fn(tcx: ty::ctxt, body: blk, rs: @mut [node_id]) {
|
|||
}
|
||||
|
||||
fn init_vecs(ccx: crate_ctxt, node_ids: [node_id], len: uint) {
|
||||
for i: node_id in node_ids {
|
||||
for node_ids.each {|i|
|
||||
log(debug, int::str(i) + " |-> " + uint::str(len));
|
||||
add_node(ccx, i, empty_ann(len));
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ fn def_id_to_str(d: def_id) -> str {
|
|||
fn comma_str(args: [@constr_arg_use]) -> str {
|
||||
let mut rslt = "";
|
||||
let mut comma = false;
|
||||
for a: @constr_arg_use in args {
|
||||
for args.each {|a|
|
||||
if comma { rslt += ", "; } else { comma = true; }
|
||||
alt a.node {
|
||||
carg_base { rslt += "*"; }
|
||||
|
@ -66,7 +66,7 @@ fn constraint_to_str(tcx: ty::ctxt, c: sp_constr) -> str {
|
|||
fn tritv_to_str(fcx: fn_ctxt, v: tritv::t) -> str {
|
||||
let mut s = "";
|
||||
let mut comma = false;
|
||||
for p: norm_constraint in constraints(fcx) {
|
||||
for constraints(fcx).each {|p|
|
||||
alt tritv_get(v, p.bit_num) {
|
||||
dont_care { }
|
||||
tt {
|
||||
|
@ -86,11 +86,12 @@ fn log_tritv(fcx: fn_ctxt, v: tritv::t) {
|
|||
|
||||
fn first_difference_string(fcx: fn_ctxt, expected: tritv::t, actual: tritv::t)
|
||||
-> str {
|
||||
let s: str = "";
|
||||
for c: norm_constraint in constraints(fcx) {
|
||||
let mut s = "";
|
||||
for constraints(fcx).each {|c|
|
||||
if tritv_get(expected, c.bit_num) == ttrue &&
|
||||
tritv_get(actual, c.bit_num) != ttrue {
|
||||
ret constraint_to_str(fcx.ccx.tcx, c.c);
|
||||
s = constraint_to_str(fcx.ccx.tcx, c.c);
|
||||
break;
|
||||
}
|
||||
}
|
||||
ret s;
|
||||
|
@ -102,7 +103,7 @@ fn log_tritv_err(fcx: fn_ctxt, v: tritv::t) {
|
|||
|
||||
fn tos(v: [uint]) -> str {
|
||||
let mut rslt = "";
|
||||
for i: uint in v {
|
||||
for v.each {|i|
|
||||
if i == 0u {
|
||||
rslt += "0";
|
||||
} else if i == 1u { rslt += "1"; } else { rslt += "?"; }
|
||||
|
@ -539,7 +540,7 @@ fn norm_a_constraint(id: def_id, c: constraint) -> [norm_constraint] {
|
|||
}
|
||||
cpred(p, descs) {
|
||||
let mut rslt: [norm_constraint] = [];
|
||||
for pd: pred_args in *descs {
|
||||
for vec::each(*descs) {|pd|
|
||||
rslt +=
|
||||
[{bit_num: pd.node.bit_num,
|
||||
c: respan(pd.span, npred(p, id, pd.node.args))}];
|
||||
|
@ -567,7 +568,7 @@ fn match_args(fcx: fn_ctxt, occs: @mut [pred_args],
|
|||
occ: [@constr_arg_use]) -> uint {
|
||||
#debug("match_args: looking at %s",
|
||||
constr_args_to_str(fn@(i: inst) -> str { ret i.ident; }, occ));
|
||||
for pd: pred_args in *occs {
|
||||
for vec::each(*occs) {|pd|
|
||||
log(debug,
|
||||
"match_args: candidate " + pred_args_to_str(pd));
|
||||
fn eq(p: inst, q: inst) -> bool { ret p.node == q.node; }
|
||||
|
@ -619,7 +620,7 @@ fn expr_to_constr_arg(tcx: ty::ctxt, e: @expr) -> @constr_arg_use {
|
|||
fn exprs_to_constr_args(tcx: ty::ctxt, args: [@expr]) -> [@constr_arg_use] {
|
||||
let f = bind expr_to_constr_arg(tcx, _);
|
||||
let mut rslt: [@constr_arg_use] = [];
|
||||
for e: @expr in args { rslt += [f(e)]; }
|
||||
for args.each {|e| rslt += [f(e)]; }
|
||||
rslt
|
||||
}
|
||||
|
||||
|
@ -653,7 +654,7 @@ fn pred_args_to_str(p: pred_args) -> str {
|
|||
fn substitute_constr_args(cx: ty::ctxt, actuals: [@expr], c: @ty::constr) ->
|
||||
tsconstr {
|
||||
let mut rslt: [@constr_arg_use] = [];
|
||||
for a: @constr_arg in c.node.args {
|
||||
for c.node.args.each {|a|
|
||||
rslt += [substitute_arg(cx, actuals, a)];
|
||||
}
|
||||
ret npred(c.node.path, c.node.id, rslt);
|
||||
|
@ -678,7 +679,7 @@ fn substitute_arg(cx: ty::ctxt, actuals: [@expr], a: @constr_arg) ->
|
|||
fn pred_args_matches(pattern: [constr_arg_general_<inst>], desc: pred_args) ->
|
||||
bool {
|
||||
let mut i = 0u;
|
||||
for c: @constr_arg_use in desc.node.args {
|
||||
for desc.node.args.each {|c|
|
||||
let n = pattern[i];
|
||||
alt c.node {
|
||||
carg_ident(p) {
|
||||
|
@ -702,7 +703,7 @@ fn pred_args_matches(pattern: [constr_arg_general_<inst>], desc: pred_args) ->
|
|||
|
||||
fn find_instance_(pattern: [constr_arg_general_<inst>], descs: [pred_args]) ->
|
||||
option<uint> {
|
||||
for d: pred_args in descs {
|
||||
for descs.each {|d|
|
||||
if pred_args_matches(pattern, d) { ret some(d.node.bit_num); }
|
||||
}
|
||||
ret none;
|
||||
|
@ -720,7 +721,7 @@ fn find_instances(_fcx: fn_ctxt, subst: subst, c: constraint) ->
|
|||
alt c {
|
||||
cinit(_, _, _) {/* this is dealt with separately */ }
|
||||
cpred(p, descs) {
|
||||
for d: pred_args in *descs {
|
||||
for vec::each(copy *descs) {|d|
|
||||
if args_mention(d.node.args, find_in_subst_bool, subst) {
|
||||
let old_bit_num = d.node.bit_num;
|
||||
let newv = replace(subst, d);
|
||||
|
@ -736,7 +737,7 @@ fn find_instances(_fcx: fn_ctxt, subst: subst, c: constraint) ->
|
|||
}
|
||||
|
||||
fn find_in_subst(id: node_id, s: subst) -> option<inst> {
|
||||
for p: {from: inst, to: inst} in s {
|
||||
for s.each {|p|
|
||||
if id == p.from.node { ret some(p.to); }
|
||||
}
|
||||
ret none;
|
||||
|
@ -748,7 +749,7 @@ fn find_in_subst_bool(s: subst, id: node_id) -> bool {
|
|||
|
||||
fn insts_to_str(stuff: [constr_arg_general_<inst>]) -> str {
|
||||
let mut rslt = "<";
|
||||
for i: constr_arg_general_<inst> in stuff {
|
||||
for stuff.each {|i|
|
||||
rslt +=
|
||||
" " +
|
||||
alt i {
|
||||
|
@ -763,7 +764,7 @@ fn insts_to_str(stuff: [constr_arg_general_<inst>]) -> str {
|
|||
|
||||
fn replace(subst: subst, d: pred_args) -> [constr_arg_general_<inst>] {
|
||||
let mut rslt: [constr_arg_general_<inst>] = [];
|
||||
for c: @constr_arg_use in d.node.args {
|
||||
for d.node.args.each {|c|
|
||||
alt c.node {
|
||||
carg_ident(p) {
|
||||
alt find_in_subst(p.node, subst) {
|
||||
|
@ -872,7 +873,7 @@ fn copy_in_poststate_two(fcx: fn_ctxt, src_post: poststate,
|
|||
// replace any occurrences of the src def_id with the
|
||||
// dest def_id
|
||||
let insts = find_instances(fcx, subst, val);
|
||||
for p: {from: uint, to: uint} in insts {
|
||||
for insts.each {|p|
|
||||
if promises_(p.from, src_post) {
|
||||
set_in_poststate_(p.to, target_post);
|
||||
}
|
||||
|
@ -887,7 +888,7 @@ fn forget_in_postcond(fcx: fn_ctxt, parent_exp: node_id, dead_v: node_id) {
|
|||
let d = local_node_id_to_local_def_id(fcx, dead_v);
|
||||
alt d {
|
||||
some(d_id) {
|
||||
for c: norm_constraint in constraints(fcx) {
|
||||
for constraints(fcx).each {|c|
|
||||
if constraint_mentions(fcx, c, d_id) {
|
||||
#debug("clearing constraint %u %s",
|
||||
c.bit_num,
|
||||
|
@ -909,7 +910,7 @@ fn forget_in_postcond_still_init(fcx: fn_ctxt, parent_exp: node_id,
|
|||
let d = local_node_id_to_local_def_id(fcx, dead_v);
|
||||
alt d {
|
||||
some(d_id) {
|
||||
for c: norm_constraint in constraints(fcx) {
|
||||
for constraints(fcx).each {|c|
|
||||
if non_init_constraint_mentions(fcx, c, d_id) {
|
||||
clear_in_postcond(c.bit_num,
|
||||
node_id_to_ts_ann(fcx.ccx,
|
||||
|
@ -928,7 +929,7 @@ fn forget_in_poststate(fcx: fn_ctxt, p: poststate, dead_v: node_id) -> bool {
|
|||
let mut changed = false;
|
||||
alt d {
|
||||
some(d_id) {
|
||||
for c: norm_constraint in constraints(fcx) {
|
||||
for constraints(fcx).each {|c|
|
||||
if constraint_mentions(fcx, c, d_id) {
|
||||
changed |= clear_in_poststate_(c.bit_num, p);
|
||||
}
|
||||
|
@ -947,7 +948,7 @@ fn forget_in_poststate_still_init(fcx: fn_ctxt, p: poststate, dead_v: node_id)
|
|||
let mut changed = false;
|
||||
alt d {
|
||||
some(d_id) {
|
||||
for c: norm_constraint in constraints(fcx) {
|
||||
for constraints(fcx).each {|c|
|
||||
if non_init_constraint_mentions(fcx, c, d_id) {
|
||||
changed |= clear_in_poststate_(c.bit_num, p);
|
||||
}
|
||||
|
@ -959,7 +960,7 @@ fn forget_in_poststate_still_init(fcx: fn_ctxt, p: poststate, dead_v: node_id)
|
|||
}
|
||||
|
||||
fn any_eq(v: [node_id], d: node_id) -> bool {
|
||||
for i: node_id in v { if i == d { ret true; } }
|
||||
for v.each {|i| if i == d { ret true; } }
|
||||
false
|
||||
}
|
||||
|
||||
|
@ -1000,7 +1001,7 @@ fn args_mention<T>(args: [@constr_arg_use],
|
|||
ret vec::any(bind mentions(s,q,_), args);
|
||||
*/
|
||||
|
||||
for a: @constr_arg_use in args {
|
||||
for args.each {|a|
|
||||
alt a.node { carg_ident(p1) { if q(s, p1.node) { ret true; } } _ { } }
|
||||
}
|
||||
ret false;
|
||||
|
@ -1010,7 +1011,7 @@ fn use_var(fcx: fn_ctxt, v: node_id) { *fcx.enclosing.used_vars += [v]; }
|
|||
|
||||
// FIXME: This should be a function in vec::.
|
||||
fn vec_contains(v: @mut [node_id], i: node_id) -> bool {
|
||||
for d: node_id in *v { if d == i { ret true; } }
|
||||
for vec::each(*v) {|d| if d == i { ret true; } }
|
||||
ret false;
|
||||
}
|
||||
|
||||
|
@ -1029,7 +1030,7 @@ fn args_to_constr_args(tcx: ty::ctxt, args: [arg],
|
|||
indices: [@sp_constr_arg<uint>]) -> [@constr_arg_use] {
|
||||
let mut actuals: [@constr_arg_use] = [];
|
||||
let num_args = vec::len(args);
|
||||
for a: @sp_constr_arg<uint> in indices {
|
||||
for indices.each {|a|
|
||||
actuals +=
|
||||
[@respan(a.span,
|
||||
alt a.node {
|
||||
|
@ -1075,7 +1076,7 @@ fn local_to_bindings(tcx: ty::ctxt, loc: @local) -> binding {
|
|||
|
||||
fn locals_to_bindings(tcx: ty::ctxt, locals: [@local]) -> [binding] {
|
||||
let mut rslt = [];
|
||||
for loc in locals { rslt += [local_to_bindings(tcx, loc)]; }
|
||||
for locals.each {|loc| rslt += [local_to_bindings(tcx, loc)]; }
|
||||
ret rslt;
|
||||
}
|
||||
|
||||
|
@ -1085,7 +1086,7 @@ fn callee_modes(fcx: fn_ctxt, callee: node_id) -> [mode] {
|
|||
alt ty::get(ty).struct {
|
||||
ty::ty_fn({inputs: args, _}) {
|
||||
let mut modes = [];
|
||||
for arg: ty::arg in args { modes += [arg.mode]; }
|
||||
for args.each {|arg| modes += [arg.mode]; }
|
||||
ret modes;
|
||||
}
|
||||
_ {
|
||||
|
@ -1108,7 +1109,7 @@ fn callee_arg_init_ops(fcx: fn_ctxt, callee: node_id) -> [init_op] {
|
|||
fn anon_bindings(ops: [init_op], es: [@expr]) -> [binding] {
|
||||
let mut bindings: [binding] = [];
|
||||
let mut i = 0;
|
||||
for op: init_op in ops {
|
||||
for ops.each {|op|
|
||||
bindings += [{lhs: [], rhs: some({op: op, expr: es[i]})}];
|
||||
i += 1;
|
||||
}
|
||||
|
|
|
@ -67,7 +67,7 @@ fn seq_postconds(fcx: fn_ctxt, ps: [postcond]) -> postcond {
|
|||
let sz = vec::len(ps);
|
||||
if sz >= 1u {
|
||||
let prev = tritv_clone(ps[0]);
|
||||
for p: postcond in vec::slice(ps, 1u, sz) { seq_tritv(prev, p); }
|
||||
vec::iter_between(ps, 1u, sz) {|p| seq_tritv(prev, p); }
|
||||
ret prev;
|
||||
} else { ret ann::empty_poststate(num_constraints(fcx.enclosing)); }
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ import std::map::hashmap;
|
|||
fn check_unused_vars(fcx: fn_ctxt) {
|
||||
|
||||
// FIXME: could be more efficient
|
||||
for c: norm_constraint in constraints(fcx) {
|
||||
for constraints(fcx).each {|c|
|
||||
alt c.c.node {
|
||||
ninit(id, v) {
|
||||
if !vec_contains(fcx.enclosing.used_vars, id) && v[0] != '_' as u8
|
||||
|
|
|
@ -27,7 +27,7 @@ fn collect_pred(e: @expr, cx: ctxt, v: visit::vt<ctxt>) {
|
|||
// If it's a call, generate appropriate instances of the
|
||||
// call's constraints.
|
||||
expr_call(operator, operands, _) {
|
||||
for c: @ty::constr in constraints_expr(cx.tcx, operator) {
|
||||
for constraints_expr(cx.tcx, operator).each {|c|
|
||||
let ct: sp_constr =
|
||||
respan(c.span,
|
||||
aux::substitute_constr_args(cx.tcx, operands, c));
|
||||
|
@ -105,20 +105,21 @@ fn mk_fn_info(ccx: crate_ctxt,
|
|||
/* now we have to add bit nums for both the constraints
|
||||
and the variables... */
|
||||
|
||||
for c: sp_constr in { *cx.cs } {
|
||||
next = add_constraint(cx.tcx, c, next, res_map);
|
||||
let mut i = 0u, l = vec::len(*cx.cs);
|
||||
while i < l {
|
||||
next = add_constraint(cx.tcx, cx.cs[i], next, res_map);
|
||||
i += 1u;
|
||||
}
|
||||
/* if this function has any constraints, instantiate them to the
|
||||
argument names and add them */
|
||||
let mut sc;
|
||||
for c: @constr in f_decl.constraints {
|
||||
sc = ast_constr_to_sp_constr(cx.tcx, f_decl.inputs, c);
|
||||
for f_decl.constraints.each {|c|
|
||||
let sc = ast_constr_to_sp_constr(cx.tcx, f_decl.inputs, c);
|
||||
next = add_constraint(cx.tcx, sc, next, res_map);
|
||||
}
|
||||
|
||||
/* Need to add constraints for args too, b/c they
|
||||
can be deinitialized */
|
||||
for a: arg in f_decl.inputs {
|
||||
for f_decl.inputs.each {|a|
|
||||
next = add_constraint(
|
||||
cx.tcx,
|
||||
respan(f_sp, ninit(a.id, a.ident)),
|
||||
|
|
|
@ -60,7 +60,9 @@ fn find_pre_post_item(ccx: crate_ctxt, i: item) {
|
|||
item_class(_,_,_) {
|
||||
fail "find_pre_post_item: implement item_class";
|
||||
}
|
||||
item_impl(_, _, _, ms) { for m in ms { find_pre_post_method(ccx, m); } }
|
||||
item_impl(_, _, _, ms) {
|
||||
for ms.each {|m| find_pre_post_method(ccx, m); }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -75,7 +77,7 @@ fn find_pre_post_exprs(fcx: fn_ctxt, args: [@expr], id: node_id) {
|
|||
log_expr(*args[0]);
|
||||
}
|
||||
fn do_one(fcx: fn_ctxt, e: @expr) { find_pre_post_expr(fcx, e); }
|
||||
for e: @expr in args { do_one(fcx, e); }
|
||||
for args.each {|e| do_one(fcx, e); }
|
||||
|
||||
fn get_pp(ccx: crate_ctxt, &&e: @expr) -> pre_and_post {
|
||||
ret expr_pp(ccx, e);
|
||||
|
@ -282,7 +284,7 @@ fn forget_args_moved_in(fcx: fn_ctxt, parent: @expr, modes: [mode],
|
|||
fn find_pre_post_expr_fn_upvars(fcx: fn_ctxt, e: @expr) {
|
||||
let rslt = expr_pp(fcx.ccx, e);
|
||||
clear_pp(rslt);
|
||||
for def in *freevars::get_freevars(fcx.ccx.tcx, e.id) {
|
||||
for vec::each(*freevars::get_freevars(fcx.ccx.tcx, e.id)) {|def|
|
||||
log(debug, ("handle_var_def: def=", def));
|
||||
handle_var_def(fcx, rslt, def.def, "upvar");
|
||||
}
|
||||
|
@ -304,7 +306,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) {
|
|||
|
||||
find_pre_post_exprs(fcx, args, e.id);
|
||||
/* see if the call has any constraints on its type */
|
||||
for c: @ty::constr in constraints_expr(fcx.ccx.tcx, operator) {
|
||||
for constraints_expr(fcx.ccx.tcx, operator).each {|c|
|
||||
let i =
|
||||
bit_num(fcx, substitute_constr_args(fcx.ccx.tcx, args, c));
|
||||
require(i, expr_pp(fcx.ccx, e));
|
||||
|
@ -466,7 +468,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) {
|
|||
ret block_pp(fcx.ccx, an_alt.body);
|
||||
}
|
||||
let mut alt_pps = [];
|
||||
for a: arm in alts { alt_pps += [do_an_alt(fcx, a)]; }
|
||||
for alts.each {|a| alt_pps += [do_an_alt(fcx, a)]; }
|
||||
fn combine_pp(antec: pre_and_post, fcx: fn_ctxt, &&pp: pre_and_post,
|
||||
&&next: pre_and_post) -> pre_and_post {
|
||||
union(pp.precondition, seq_preconds(fcx, [antec, next]));
|
||||
|
@ -517,7 +519,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) {
|
|||
let mut cmodes = callee_modes(fcx, operator.id);
|
||||
let mut modes = [];
|
||||
let mut i = 0;
|
||||
for expr_opt: option<@expr> in maybe_args {
|
||||
for maybe_args.each {|expr_opt|
|
||||
alt expr_opt {
|
||||
none {/* no-op */ }
|
||||
some(expr) { modes += [cmodes[i]]; args += [expr]; }
|
||||
|
@ -541,9 +543,8 @@ fn find_pre_post_stmt(fcx: fn_ctxt, s: stmt) {
|
|||
stmt_decl(adecl, id) {
|
||||
alt adecl.node {
|
||||
decl_local(alocals) {
|
||||
let mut e_pp;
|
||||
let prev_pp = empty_pre_post(num_constraints(fcx.enclosing));
|
||||
for alocal in alocals {
|
||||
for alocals.each {|alocal|
|
||||
alt alocal.node.init {
|
||||
some(an_init) {
|
||||
/* LHS always becomes initialized,
|
||||
|
@ -586,7 +587,7 @@ fn find_pre_post_stmt(fcx: fn_ctxt, s: stmt) {
|
|||
|
||||
/* Clear out anything that the previous initializer
|
||||
guaranteed */
|
||||
e_pp = expr_pp(fcx.ccx, an_init.expr);
|
||||
let e_pp = expr_pp(fcx.ccx, an_init.expr);
|
||||
tritv_copy(prev_pp.precondition,
|
||||
seq_preconds(fcx, [prev_pp, e_pp]));
|
||||
/* Include the LHSs too, since those aren't in the
|
||||
|
@ -650,13 +651,13 @@ fn find_pre_post_block(fcx: fn_ctxt, b: blk) {
|
|||
log_pp_err(stmt_pp(fcx.ccx, *s));
|
||||
*/
|
||||
}
|
||||
for s: @stmt in b.node.stmts { do_one_(fcx, s); }
|
||||
for b.node.stmts.each {|s| do_one_(fcx, s); }
|
||||
fn do_inner_(fcx: fn_ctxt, &&e: @expr) { find_pre_post_expr(fcx, e); }
|
||||
let do_inner = bind do_inner_(fcx, _);
|
||||
option::map::<@expr, ()>(b.node.expr, do_inner);
|
||||
|
||||
let mut pps: [pre_and_post] = [];
|
||||
for s: @stmt in b.node.stmts { pps += [stmt_pp(fcx.ccx, *s)]; }
|
||||
for b.node.stmts.each {|s| pps += [stmt_pp(fcx.ccx, *s)]; }
|
||||
alt b.node.expr {
|
||||
none {/* no-op */ }
|
||||
some(e) { pps += [expr_pp(fcx.ccx, e)]; }
|
||||
|
@ -665,7 +666,7 @@ fn find_pre_post_block(fcx: fn_ctxt, b: blk) {
|
|||
let block_precond = seq_preconds(fcx, pps);
|
||||
|
||||
let mut postconds = [];
|
||||
for pp: pre_and_post in pps { postconds += [get_post(pp)]; }
|
||||
for pps.each {|pp| postconds += [get_post(pp)]; }
|
||||
|
||||
/* A block may be empty, so this next line ensures that the postconds
|
||||
vector is non-empty. */
|
||||
|
|
|
@ -67,14 +67,14 @@ fn seq_states(fcx: fn_ctxt, pres: prestate, bindings: [binding]) ->
|
|||
{changed: bool, post: poststate} {
|
||||
let mut changed = false;
|
||||
let mut post = tritv_clone(pres);
|
||||
for b: binding in bindings {
|
||||
for bindings.each {|b|
|
||||
alt b.rhs {
|
||||
some(an_init) {
|
||||
// an expression, with or without a destination
|
||||
changed |=
|
||||
find_pre_post_state_expr(fcx, post, an_init.expr) || changed;
|
||||
post = tritv_clone(expr_poststate(fcx.ccx, an_init.expr));
|
||||
for i: inst in b.lhs {
|
||||
for b.lhs.each {|i|
|
||||
alt an_init.expr.node {
|
||||
expr_path(p) {
|
||||
handle_move_or_copy(fcx, post, p, an_init.expr.id, i,
|
||||
|
@ -91,7 +91,7 @@ fn seq_states(fcx: fn_ctxt, pres: prestate, bindings: [binding]) ->
|
|||
}
|
||||
}
|
||||
none {
|
||||
for i: inst in b.lhs {
|
||||
for b.lhs.each {|i|
|
||||
// variables w/o an initializer
|
||||
clear_in_poststate_ident_(fcx, i.node, i.ident, post);
|
||||
}
|
||||
|
@ -375,7 +375,7 @@ fn find_pre_post_state_expr(fcx: fn_ctxt, pres: prestate, e: @expr) -> bool {
|
|||
let callee_ops = callee_arg_init_ops(fcx, operator.id);
|
||||
let mut ops = [];
|
||||
let mut i = 0;
|
||||
for a_opt: option<@expr> in maybe_args {
|
||||
for maybe_args.each {|a_opt|
|
||||
alt a_opt {
|
||||
none {/* no-op */ }
|
||||
some(a) { ops += [callee_ops[i]]; args += [a]; }
|
||||
|
@ -575,7 +575,7 @@ fn find_pre_post_state_expr(fcx: fn_ctxt, pres: prestate, e: @expr) -> bool {
|
|||
let mut a_post;
|
||||
if vec::len(alts) > 0u {
|
||||
a_post = false_postcond(num_constrs);
|
||||
for an_alt: arm in alts {
|
||||
for alts.each {|an_alt|
|
||||
alt an_alt.guard {
|
||||
some(e) {
|
||||
changed |= find_pre_post_state_expr(fcx, e_post, e);
|
||||
|
@ -702,7 +702,7 @@ fn find_pre_post_state_block(fcx: fn_ctxt, pres0: prestate, b: blk) -> bool {
|
|||
initializes. Then <pres> becomes the new poststate. */
|
||||
|
||||
let mut changed = false;
|
||||
for s: @stmt in b.node.stmts {
|
||||
for b.node.stmts.each {|s|
|
||||
changed |= find_pre_post_state_stmt(fcx, pres, s);
|
||||
pres = stmt_poststate(fcx.ccx, *s);
|
||||
}
|
||||
|
@ -745,12 +745,12 @@ fn find_pre_post_state_fn(fcx: fn_ctxt,
|
|||
|
||||
// Arguments start out initialized
|
||||
let block_pre = block_prestate(fcx.ccx, f_body);
|
||||
for a: arg in f_decl.inputs {
|
||||
for f_decl.inputs.each {|a|
|
||||
set_in_prestate_constr(fcx, ninit(a.id, a.ident), block_pre);
|
||||
}
|
||||
|
||||
// Instantiate any constraints on the arguments so we can use them
|
||||
for c: @constr in f_decl.constraints {
|
||||
for f_decl.constraints.each {|c|
|
||||
let tsc = ast_constr_to_ts_constr(fcx.ccx.tcx, f_decl.inputs, c);
|
||||
set_in_prestate_constr(fcx, tsc, block_pre);
|
||||
}
|
||||
|
|
|
@ -372,7 +372,7 @@ impl of vid for region_vid {
|
|||
|
||||
fn param_bounds_to_kind(bounds: param_bounds) -> kind {
|
||||
let mut kind = kind_noncopyable;
|
||||
for bound in *bounds {
|
||||
for vec::each(*bounds) {|bound|
|
||||
alt bound {
|
||||
bound_copy {
|
||||
if kind != kind_sendable { kind = kind_copyable; }
|
||||
|
@ -464,7 +464,9 @@ fn mk_t_with_id(cx: ctxt, st: sty, o_def_id: option<ast::def_id>) -> t {
|
|||
ty_param(_, _) { has_params = true; }
|
||||
ty_var(_) | ty_self(_) { has_vars = true; }
|
||||
ty_enum(_, tys) | ty_iface(_, tys) | ty_class(_, tys) {
|
||||
for tt in tys { derive_flags(has_params, has_vars, has_rptrs, tt); }
|
||||
for tys.each {|tt|
|
||||
derive_flags(has_params, has_vars, has_rptrs, tt);
|
||||
}
|
||||
}
|
||||
ty_box(m) | ty_uniq(m) | ty_vec(m) | ty_ptr(m) {
|
||||
derive_flags(has_params, has_vars, has_rptrs, m.ty);
|
||||
|
@ -478,22 +480,24 @@ fn mk_t_with_id(cx: ctxt, st: sty, o_def_id: option<ast::def_id>) -> t {
|
|||
derive_flags(has_params, has_vars, has_rptrs, m.ty);
|
||||
}
|
||||
ty_rec(flds) {
|
||||
for f in flds {
|
||||
for flds.each {|f|
|
||||
derive_flags(has_params, has_vars, has_rptrs, f.mt.ty);
|
||||
}
|
||||
}
|
||||
ty_tup(ts) {
|
||||
for tt in ts { derive_flags(has_params, has_vars, has_rptrs, tt); }
|
||||
for ts.each {|tt| derive_flags(has_params, has_vars, has_rptrs, tt); }
|
||||
}
|
||||
ty_fn(f) {
|
||||
for a in f.inputs {
|
||||
for f.inputs.each {|a|
|
||||
derive_flags(has_params, has_vars, has_rptrs, a.ty);
|
||||
}
|
||||
derive_flags(has_params, has_vars, has_rptrs, f.output);
|
||||
}
|
||||
ty_res(_, tt, tps) {
|
||||
derive_flags(has_params, has_vars, has_rptrs, tt);
|
||||
for tt in tps { derive_flags(has_params, has_vars, has_rptrs, tt); }
|
||||
for tps.each {|tt|
|
||||
derive_flags(has_params, has_vars, has_rptrs, tt);
|
||||
}
|
||||
}
|
||||
ty_constr(tt, _) {
|
||||
derive_flags(has_params, has_vars, has_rptrs, tt);
|
||||
|
@ -633,19 +637,19 @@ fn maybe_walk_ty(ty: t, f: fn(t) -> bool) {
|
|||
}
|
||||
ty_enum(_, subtys) | ty_iface(_, subtys) | ty_class(_, subtys)
|
||||
| ty_self(subtys) {
|
||||
for subty: t in subtys { maybe_walk_ty(subty, f); }
|
||||
for subtys.each {|subty| maybe_walk_ty(subty, f); }
|
||||
}
|
||||
ty_rec(fields) {
|
||||
for fl: field in fields { maybe_walk_ty(fl.mt.ty, f); }
|
||||
for fields.each {|fl| maybe_walk_ty(fl.mt.ty, f); }
|
||||
}
|
||||
ty_tup(ts) { for tt in ts { maybe_walk_ty(tt, f); } }
|
||||
ty_tup(ts) { for ts.each {|tt| maybe_walk_ty(tt, f); } }
|
||||
ty_fn(ft) {
|
||||
for a: arg in ft.inputs { maybe_walk_ty(a.ty, f); }
|
||||
for ft.inputs.each {|a| maybe_walk_ty(a.ty, f); }
|
||||
maybe_walk_ty(ft.output, f);
|
||||
}
|
||||
ty_res(_, sub, tps) {
|
||||
maybe_walk_ty(sub, f);
|
||||
for tp: t in tps { maybe_walk_ty(tp, f); }
|
||||
for tps.each {|tp| maybe_walk_ty(tp, f); }
|
||||
}
|
||||
ty_constr(sub, _) { maybe_walk_ty(sub, f); }
|
||||
ty_uniq(tm) { maybe_walk_ty(tm.ty, f); }
|
||||
|
@ -895,23 +899,24 @@ fn type_needs_drop(cx: ctxt, ty: t) -> bool {
|
|||
ty_nil | ty_bot | ty_bool | ty_int(_) | ty_float(_) | ty_uint(_) |
|
||||
ty_type | ty_ptr(_) | ty_rptr(_, _) { false }
|
||||
ty_rec(flds) {
|
||||
for f in flds { if type_needs_drop(cx, f.mt.ty) { accum = true; } }
|
||||
for flds.each {|f| if type_needs_drop(cx, f.mt.ty) { accum = true; } }
|
||||
accum
|
||||
}
|
||||
ty_class(did, ts) {
|
||||
for f in ty::class_items_as_fields(cx, did, ts)
|
||||
{ if type_needs_drop(cx, f.mt.ty) { accum = true; } }
|
||||
for vec::each(ty::class_items_as_fields(cx, did, ts)) {|f|
|
||||
if type_needs_drop(cx, f.mt.ty) { accum = true; }
|
||||
}
|
||||
accum
|
||||
}
|
||||
|
||||
ty_tup(elts) {
|
||||
for m in elts { if type_needs_drop(cx, m) { accum = true; } }
|
||||
for elts.each {|m| if type_needs_drop(cx, m) { accum = true; } }
|
||||
accum
|
||||
}
|
||||
ty_enum(did, tps) {
|
||||
let variants = enum_variants(cx, did);
|
||||
for variant in *variants {
|
||||
for aty in variant.args {
|
||||
for vec::each(*variants) {|variant|
|
||||
for variant.args.each {|aty|
|
||||
// Perform any type parameter substitutions.
|
||||
let arg_ty = substitute_type_params(cx, tps, aty);
|
||||
if type_needs_drop(cx, arg_ty) { accum = true; }
|
||||
|
@ -968,8 +973,8 @@ fn type_needs_unwind_cleanup_(cx: ctxt, ty: t,
|
|||
true
|
||||
}
|
||||
ty_enum(did, tps) {
|
||||
for v in *enum_variants(cx, did) {
|
||||
for aty in v.args {
|
||||
for vec::each(*enum_variants(cx, did)) {|v|
|
||||
for v.args.each {|aty|
|
||||
let t = substitute_type_params(cx, tps, aty);
|
||||
needs_unwind_cleanup |=
|
||||
type_needs_unwind_cleanup_(cx, t, tycache,
|
||||
|
@ -1068,13 +1073,15 @@ fn type_kind(cx: ctxt, ty: t) -> kind {
|
|||
// Records lower to the lowest of their members.
|
||||
ty_rec(flds) {
|
||||
let mut lowest = kind_sendable;
|
||||
for f in flds { lowest = lower_kind(lowest, type_kind(cx, f.mt.ty)); }
|
||||
for flds.each {|f|
|
||||
lowest = lower_kind(lowest, type_kind(cx, f.mt.ty));
|
||||
}
|
||||
lowest
|
||||
}
|
||||
// Tuples lower to the lowest of their members.
|
||||
ty_tup(tys) {
|
||||
let mut lowest = kind_sendable;
|
||||
for ty in tys { lowest = lower_kind(lowest, type_kind(cx, ty)); }
|
||||
for tys.each {|ty| lowest = lower_kind(lowest, type_kind(cx, ty)); }
|
||||
lowest
|
||||
}
|
||||
// Enums lower to the lowest of their variants.
|
||||
|
@ -1084,8 +1091,8 @@ fn type_kind(cx: ctxt, ty: t) -> kind {
|
|||
if vec::len(*variants) == 0u {
|
||||
lowest = kind_noncopyable;
|
||||
} else {
|
||||
for variant in *variants {
|
||||
for aty in variant.args {
|
||||
for vec::each(*variants) {|variant|
|
||||
for variant.args.each {|aty|
|
||||
// Perform any type parameter substitutions.
|
||||
let arg_ty = substitute_type_params(cx, tps, aty);
|
||||
lowest = lower_kind(lowest, type_kind(cx, arg_ty));
|
||||
|
@ -1246,8 +1253,8 @@ fn type_structurally_contains(cx: ctxt, ty: t, test: fn(sty) -> bool) ->
|
|||
if test(sty) { ret true; }
|
||||
alt sty {
|
||||
ty_enum(did, tps) {
|
||||
for variant in *enum_variants(cx, did) {
|
||||
for aty in variant.args {
|
||||
for vec::each(*enum_variants(cx, did)) {|variant|
|
||||
for variant.args.each {|aty|
|
||||
let sty = substitute_type_params(cx, tps, aty);
|
||||
if type_structurally_contains(cx, sty, test) { ret true; }
|
||||
}
|
||||
|
@ -1255,13 +1262,13 @@ fn type_structurally_contains(cx: ctxt, ty: t, test: fn(sty) -> bool) ->
|
|||
ret false;
|
||||
}
|
||||
ty_rec(fields) {
|
||||
for field in fields {
|
||||
for fields.each {|field|
|
||||
if type_structurally_contains(cx, field.mt.ty, test) { ret true; }
|
||||
}
|
||||
ret false;
|
||||
}
|
||||
ty_tup(ts) {
|
||||
for tt in ts {
|
||||
for ts.each {|tt|
|
||||
if type_structurally_contains(cx, tt, test) { ret true; }
|
||||
}
|
||||
ret false;
|
||||
|
@ -1342,7 +1349,7 @@ fn type_is_pod(cx: ctxt, ty: t) -> bool {
|
|||
// Structural types
|
||||
ty_enum(did, tps) {
|
||||
let variants = enum_variants(cx, did);
|
||||
for variant: variant_info in *variants {
|
||||
for vec::each(*variants) {|variant|
|
||||
let tup_ty = mk_tup(cx, variant.args);
|
||||
|
||||
// Perform any type parameter substitutions.
|
||||
|
@ -1351,12 +1358,12 @@ fn type_is_pod(cx: ctxt, ty: t) -> bool {
|
|||
}
|
||||
}
|
||||
ty_rec(flds) {
|
||||
for f: field in flds {
|
||||
for flds.each {|f|
|
||||
if !type_is_pod(cx, f.mt.ty) { result = false; }
|
||||
}
|
||||
}
|
||||
ty_tup(elts) {
|
||||
for elt in elts { if !type_is_pod(cx, elt) { result = false; } }
|
||||
for elts.each {|elt| if !type_is_pod(cx, elt) { result = false; } }
|
||||
}
|
||||
ty_res(_, inner, tps) {
|
||||
result = type_is_pod(cx, substitute_type_params(cx, tps, inner));
|
||||
|
@ -1452,14 +1459,14 @@ fn hash_type_structure(st: sty) -> uint {
|
|||
fn hash_subty(id: uint, subty: t) -> uint { (id << 2u) + type_id(subty) }
|
||||
fn hash_subtys(id: uint, subtys: [t]) -> uint {
|
||||
let mut h = id;
|
||||
for s in subtys { h = (h << 2u) + type_id(s) }
|
||||
for subtys.each {|s| h = (h << 2u) + type_id(s) }
|
||||
h
|
||||
}
|
||||
fn hash_type_constr(id: uint, c: @type_constr) -> uint {
|
||||
let mut h = id;
|
||||
h = (h << 2u) + hash_def(h, c.node.id);
|
||||
// FIXME this makes little sense
|
||||
for a in c.node.args {
|
||||
for c.node.args.each {|a|
|
||||
alt a.node {
|
||||
carg_base { h += h << 2u; }
|
||||
carg_lit(_) { fail "lit args not implemented yet"; }
|
||||
|
@ -1499,27 +1506,27 @@ fn hash_type_structure(st: sty) -> uint {
|
|||
ty_str { 17u }
|
||||
ty_enum(did, tys) {
|
||||
let mut h = hash_def(18u, did);
|
||||
for typ: t in tys { h = hash_subty(h, typ); }
|
||||
for tys.each {|typ| h = hash_subty(h, typ); }
|
||||
h
|
||||
}
|
||||
ty_box(mt) { hash_subty(19u, mt.ty) }
|
||||
ty_vec(mt) { hash_subty(21u, mt.ty) }
|
||||
ty_rec(fields) {
|
||||
let mut h = 26u;
|
||||
for f in fields { h = hash_subty(h, f.mt.ty); }
|
||||
for fields.each {|f| h = hash_subty(h, f.mt.ty); }
|
||||
h
|
||||
}
|
||||
ty_tup(ts) { hash_subtys(25u, ts) }
|
||||
ty_fn(f) {
|
||||
let mut h = 27u;
|
||||
for a in f.inputs { h = hash_subty(h, a.ty); }
|
||||
for f.inputs.each {|a| h = hash_subty(h, a.ty); }
|
||||
hash_subty(h, f.output)
|
||||
}
|
||||
ty_var(v) { hash_uint(30u, v.to_uint()) }
|
||||
ty_param(pid, did) { hash_def(hash_uint(31u, pid), did) }
|
||||
ty_self(ts) {
|
||||
let mut h = 28u;
|
||||
for t in ts { h = hash_subty(h, t); }
|
||||
for ts.each {|t| h = hash_subty(h, t); }
|
||||
h
|
||||
}
|
||||
ty_type { 32u }
|
||||
|
@ -1535,13 +1542,13 @@ fn hash_type_structure(st: sty) -> uint {
|
|||
}
|
||||
ty_constr(t, cs) {
|
||||
let mut h = hash_subty(36u, t);
|
||||
for c in cs { h = (h << 2u) + hash_type_constr(h, c); }
|
||||
for cs.each {|c| h = (h << 2u) + hash_type_constr(h, c); }
|
||||
h
|
||||
}
|
||||
ty_uniq(mt) { hash_subty(37u, mt.ty) }
|
||||
ty_iface(did, tys) {
|
||||
let mut h = hash_def(40u, did);
|
||||
for typ: t in tys { h = hash_subty(h, typ); }
|
||||
for tys.each {|typ| h = hash_subty(h, typ); }
|
||||
h
|
||||
}
|
||||
ty_opaque_closure_ptr(ck_block) { 41u }
|
||||
|
@ -1550,7 +1557,7 @@ fn hash_type_structure(st: sty) -> uint {
|
|||
ty_opaque_box { 44u }
|
||||
ty_class(did, tys) {
|
||||
let mut h = hash_def(45u, did);
|
||||
for typ: t in tys { h = hash_subty(h, typ); }
|
||||
for tys.each {|typ| h = hash_subty(h, typ); }
|
||||
h
|
||||
}
|
||||
}
|
||||
|
@ -1579,7 +1586,7 @@ fn args_eq<T>(eq: fn(T, T) -> bool,
|
|||
a: [@sp_constr_arg<T>],
|
||||
b: [@sp_constr_arg<T>]) -> bool {
|
||||
let mut i: uint = 0u;
|
||||
for arg: @sp_constr_arg<T> in a {
|
||||
for a.each {|arg|
|
||||
if !arg_eq(eq, arg, b[i]) { ret false; }
|
||||
i += 1u;
|
||||
}
|
||||
|
@ -1596,7 +1603,7 @@ fn constr_eq(c: @constr, d: @constr) -> bool {
|
|||
fn constrs_eq(cs: [@constr], ds: [@constr]) -> bool {
|
||||
if vec::len(cs) != vec::len(ds) { ret false; }
|
||||
let mut i = 0u;
|
||||
for c: @constr in cs { if !constr_eq(c, ds[i]) { ret false; } i += 1u; }
|
||||
for cs.each {|c| if !constr_eq(c, ds[i]) { ret false; } i += 1u; }
|
||||
ret true;
|
||||
}
|
||||
|
||||
|
@ -1721,7 +1728,7 @@ fn stmt_node_id(s: @ast::stmt) -> ast::node_id {
|
|||
|
||||
fn field_idx(id: ast::ident, fields: [field]) -> option<uint> {
|
||||
let mut i = 0u;
|
||||
for f in fields { if f.ident == id { ret some(i); } i += 1u; }
|
||||
for fields.each {|f| if f.ident == id { ret some(i); } i += 1u; }
|
||||
ret none;
|
||||
}
|
||||
|
||||
|
@ -1739,7 +1746,7 @@ fn get_fields(rec_ty:t) -> [field] {
|
|||
|
||||
fn method_idx(id: ast::ident, meths: [method]) -> option<uint> {
|
||||
let mut i = 0u;
|
||||
for m in meths { if m.ident == id { ret some(i); } i += 1u; }
|
||||
for meths.each {|m| if m.ident == id { ret some(i); } i += 1u; }
|
||||
ret none;
|
||||
}
|
||||
|
||||
|
@ -2214,7 +2221,7 @@ fn lookup_class_method_by_name(cx:ctxt, did: ast::def_id, name: ident,
|
|||
sp: span) -> def_id {
|
||||
if check is_local(did) {
|
||||
let ms = lookup_class_method_ids(cx, did);
|
||||
for m in ms {
|
||||
for ms.each {|m|
|
||||
if m.name == name {
|
||||
ret ast_util::local_def(m.id);
|
||||
}
|
||||
|
@ -2229,7 +2236,7 @@ fn lookup_class_method_by_name(cx:ctxt, did: ast::def_id, name: ident,
|
|||
|
||||
fn class_field_tys(items: [@class_member]) -> [field_ty] {
|
||||
let mut rslt = [];
|
||||
for it in items {
|
||||
for items.each {|it|
|
||||
alt it.node {
|
||||
instance_var(nm, _, cm, id, privacy) {
|
||||
rslt += [{ident: nm, id: ast_util::local_def(id),
|
||||
|
@ -2247,7 +2254,7 @@ fn class_field_tys(items: [@class_member]) -> [field_ty] {
|
|||
fn class_items_as_fields(cx:ctxt, did: ast::def_id, substs: [ty::t])
|
||||
-> [field] {
|
||||
let mut rslt = [];
|
||||
for f in lookup_class_fields(cx, did) {
|
||||
for lookup_class_fields(cx, did).each {|f|
|
||||
// consider all instance vars mut, because the
|
||||
// constructor may mutate all vars
|
||||
rslt += [{ident: f.ident, mt:
|
||||
|
|
|
@ -317,7 +317,7 @@ fn ast_ty_to_ty(tcx: ty::ctxt, mode: mode, &&ast_ty: @ast::ty) -> ty::t {
|
|||
tcx.sess.span_fatal(sp, "wrong number of type arguments for a \
|
||||
polymorphic type");
|
||||
}
|
||||
for ast_ty: @ast::ty in args {
|
||||
for args.each {|ast_ty|
|
||||
param_bindings += [do_ast_ty_to_ty(tcx, mode, ast_ty)];
|
||||
}
|
||||
#debug("substituting(%s into %s)",
|
||||
|
@ -379,7 +379,7 @@ fn ast_ty_to_ty(tcx: ty::ctxt, mode: mode, &&ast_ty: @ast::ty) -> ty::t {
|
|||
}
|
||||
ast::ty_rec(fields) {
|
||||
let mut flds: [field] = [];
|
||||
for f: ast::ty_field in fields {
|
||||
for fields.each {|f|
|
||||
let tm = ast_mt_to_mt(tcx, mode, f.node.mt);
|
||||
flds += [{ident: f.node.ident, mt: tm}];
|
||||
}
|
||||
|
@ -436,7 +436,7 @@ fn ast_ty_to_ty(tcx: ty::ctxt, mode: mode, &&ast_ty: @ast::ty) -> ty::t {
|
|||
}
|
||||
ast::ty_constr(t, cs) {
|
||||
let mut out_cs = [];
|
||||
for constr: @ast::ty_constr in cs {
|
||||
for cs.each {|constr|
|
||||
out_cs += [ty::ast_constr_to_constr(tcx, constr)];
|
||||
}
|
||||
ty::mk_constr(tcx, do_ast_ty_to_ty(tcx, mode, t), out_cs)
|
||||
|
@ -699,12 +699,12 @@ fn ty_of_native_fn_decl(tcx: ty::ctxt, mode: mode, decl: ast::fn_decl,
|
|||
fn ty_param_bounds(tcx: ty::ctxt, mode: mode, params: [ast::ty_param])
|
||||
-> @[ty::param_bounds] {
|
||||
let mut result = [];
|
||||
for param in params {
|
||||
for params.each {|param|
|
||||
result += [alt tcx.ty_param_bounds.find(param.id) {
|
||||
some(bs) { bs }
|
||||
none {
|
||||
let mut bounds = [];
|
||||
for b in *param.bounds {
|
||||
for vec::each(*param.bounds) {|b|
|
||||
bounds += [alt b {
|
||||
ast::bound_send { ty::bound_send }
|
||||
ast::bound_copy { ty::bound_copy }
|
||||
|
@ -994,7 +994,7 @@ mod collect {
|
|||
variants: [ast::variant],
|
||||
ty_params: [ast::ty_param]) {
|
||||
// Create a set of parameter types shared among all the variants.
|
||||
for variant in variants {
|
||||
for variants.each {|variant|
|
||||
// Nullary enum constructors get turned into constants; n-ary enum
|
||||
// constructors get turned into functions.
|
||||
let result_ty = if vec::len(variant.node.args) == 0u {
|
||||
|
@ -1003,7 +1003,7 @@ mod collect {
|
|||
// As above, tell ast_ty_to_ty() that trans_ty_item_to_ty()
|
||||
// should be called to resolve named types.
|
||||
let mut args: [arg] = [];
|
||||
for va: ast::variant_arg in variant.node.args {
|
||||
for variant.node.args.each {|va|
|
||||
let arg_ty = {
|
||||
// NDM We need BOUNDS here. It should be that this
|
||||
// yields a type like "foo &anon". Basically every
|
||||
|
@ -1061,7 +1061,7 @@ mod collect {
|
|||
i_bounds: @[ty::param_bounds], maybe_self: option<ty::t>)
|
||||
-> [{mty: ty::method, id: ast::node_id, span: span}] {
|
||||
let mut my_methods = [];
|
||||
for m in ms {
|
||||
for ms.each {|m|
|
||||
alt maybe_self {
|
||||
some(selfty) {
|
||||
write_ty_to_tcx(tcx, m.self_id, selfty);
|
||||
|
@ -1086,7 +1086,7 @@ mod collect {
|
|||
ast::item_native_mod(m) {
|
||||
if syntax::attr::native_abi(it.attrs) ==
|
||||
either::right(ast::native_abi_rust_intrinsic) {
|
||||
for item in m.items { check_intrinsic_type(tcx, item); }
|
||||
for m.items.each {|item| check_intrinsic_type(tcx, item); }
|
||||
}
|
||||
}
|
||||
ast::item_enum(variants, ty_params) {
|
||||
|
@ -1115,7 +1115,7 @@ mod collect {
|
|||
if did.crate == ast::local_crate {
|
||||
ensure_iface_methods(tcx, did.node);
|
||||
}
|
||||
for if_m in *ty::iface_methods(tcx, did) {
|
||||
for vec::each(*ty::iface_methods(tcx, did)) {|if_m|
|
||||
alt vec::find(my_methods,
|
||||
{|m| if_m.ident == m.mty.ident}) {
|
||||
some({mty: m, id, span}) {
|
||||
|
@ -1195,7 +1195,7 @@ mod collect {
|
|||
/* FIXME: check for proper public/privateness */
|
||||
// Write the type of each of the members
|
||||
let (fields, methods) = split_class_items(members);
|
||||
for f in fields {
|
||||
for fields.each {|f|
|
||||
convert_class_item(tcx, f);
|
||||
}
|
||||
// The selfty is just the class type
|
||||
|
@ -1346,7 +1346,7 @@ mod demand {
|
|||
|
||||
let mut ty_param_substs: [mut ty::t] = [mut];
|
||||
let mut ty_param_subst_var_ids: [ty_vid] = [];
|
||||
for ty_param_subst: ty::t in ty_param_substs_0 {
|
||||
for ty_param_substs_0.each {|ty_param_subst|
|
||||
// Generate a type variable and unify it with the type parameter
|
||||
// substitution. We will then pull out these type variables.
|
||||
let t_0 = next_ty_var(fcx);
|
||||
|
@ -1359,7 +1359,7 @@ mod demand {
|
|||
ty_param_subst_var_ids: [ty_vid]) ->
|
||||
ty_param_substs_and_ty {
|
||||
let mut result_ty_param_substs: [ty::t] = [];
|
||||
for var_id in ty_param_subst_var_ids {
|
||||
for ty_param_subst_var_ids.each {|var_id|
|
||||
let tp_subst = ty::mk_var(fcx.ccx.tcx, var_id);
|
||||
result_ty_param_substs += [tp_subst];
|
||||
}
|
||||
|
@ -1405,7 +1405,7 @@ fn variant_arg_types(ccx: @crate_ctxt, _sp: span, vid: ast::def_id,
|
|||
alt ty::get(tpt.ty).struct {
|
||||
ty::ty_fn(f) {
|
||||
// N-ary variant.
|
||||
for arg: ty::arg in f.inputs {
|
||||
for f.inputs.each {|arg|
|
||||
let arg_ty =
|
||||
ty::substitute_type_params(ccx.tcx, enum_ty_params, arg.ty);
|
||||
result += [arg_ty];
|
||||
|
@ -1463,7 +1463,7 @@ mod writeback {
|
|||
alt fcx.opt_node_ty_substs(id) {
|
||||
some(substs) {
|
||||
let mut new_substs = [];
|
||||
for subst: ty::t in substs {
|
||||
for substs.each {|subst|
|
||||
alt resolve_type_vars_in_type(fcx, sp, subst) {
|
||||
some(t) { new_substs += [t]; }
|
||||
none { wbcx.success = false; ret none; }
|
||||
|
@ -1602,7 +1602,7 @@ mod writeback {
|
|||
visit_local: visit_local
|
||||
with *visit::default_visitor()});
|
||||
visit.visit_block(blk, wbcx, visit);
|
||||
for arg in decl.inputs {
|
||||
for decl.inputs.each {|arg|
|
||||
resolve_type_vars_for_node(wbcx, arg.ty.span, arg.id);
|
||||
}
|
||||
ret wbcx.success;
|
||||
|
@ -1986,7 +1986,7 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) {
|
|||
fn matches(name: str, f: ty::field) -> bool {
|
||||
ret str::eq(name, f.ident);
|
||||
}
|
||||
for f: ast::field_pat in fields {
|
||||
for fields.each {|f|
|
||||
alt vec::find(ex_fields, bind matches(f.ident, _)) {
|
||||
some(field) {
|
||||
check_pat(pcx, f.pat, field.mt.ty);
|
||||
|
@ -2019,7 +2019,7 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) {
|
|||
fields", vec::len(ex_elts), e_count]);
|
||||
}
|
||||
let mut i = 0u;
|
||||
for elt in elts {
|
||||
for elts.each {|elt|
|
||||
check_pat(pcx, elt, ex_elts[i]);
|
||||
i += 1u;
|
||||
}
|
||||
|
@ -2209,7 +2209,7 @@ fn lookup_method_inner_(tcx: ty::ctxt, ms: [ty::method],
|
|||
origin: method_origin, self_sub: option<self_subst>}> {
|
||||
#debug("lookup_method_inner_: %? %? %s", ms, parent, name);
|
||||
let mut i = 0u;
|
||||
for m in ms {
|
||||
for ms.each {|m|
|
||||
if m.ident == name {
|
||||
let fty = ty::mk_fn(tcx, {proto: ast::proto_box with m.fty});
|
||||
if ty::type_has_vars(fty) {
|
||||
|
@ -2261,7 +2261,7 @@ fn lookup_method_inner(fcx: @fn_ctxt, expr: @ast::expr,
|
|||
alt ty::get(ty).struct {
|
||||
ty::ty_param(n, did) {
|
||||
let mut bound_n = 0u;
|
||||
for bound in *tcx.ty_param_bounds.get(did.node) {
|
||||
for vec::each(*tcx.ty_param_bounds.get(did.node)) {|bound|
|
||||
alt bound {
|
||||
ty::bound_iface(t) {
|
||||
let (iid, tps) = alt check ty::get(t).struct {
|
||||
|
@ -2326,10 +2326,10 @@ fn lookup_method_inner(fcx: @fn_ctxt, expr: @ast::expr,
|
|||
let mut result = none, complained = false;
|
||||
std::list::iter(fcx.ccx.impl_map.get(expr.id)) {|impls|
|
||||
if option::is_none(result) {
|
||||
for @{did, methods, _} in *impls {
|
||||
alt vec::find(methods, {|m| m.ident == name}) {
|
||||
for vec::each(*impls) {|im|
|
||||
alt vec::find(im.methods, {|m| m.ident == name}) {
|
||||
some(m) {
|
||||
let mut {n_tps, ty: self_ty} = impl_self_ty(tcx, did);
|
||||
let mut {n_tps, ty: self_ty} = impl_self_ty(tcx, im.did);
|
||||
let mut {vars, ty: self_ty} = if n_tps > 0u {
|
||||
bind_params(fcx, self_ty, n_tps)
|
||||
} else {
|
||||
|
@ -2536,7 +2536,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
|
|||
let check_args = fn@(check_blocks: bool) -> bool {
|
||||
let mut i = 0u;
|
||||
let mut bot = false;
|
||||
for a_opt in args {
|
||||
for args.each {|a_opt|
|
||||
alt a_opt {
|
||||
some(a) {
|
||||
let is_block = alt a.node {
|
||||
|
@ -2574,7 +2574,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
|
|||
fn check_call(fcx: @fn_ctxt, sp: span, f: @ast::expr, args: [@ast::expr])
|
||||
-> {fty: ty::t, bot: bool} {
|
||||
let mut args_opt_0: [option<@ast::expr>] = [];
|
||||
for arg: @ast::expr in args {
|
||||
for args.each {|arg|
|
||||
args_opt_0 += [some::<@ast::expr>(arg)];
|
||||
}
|
||||
|
||||
|
@ -2994,7 +2994,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
|
|||
// Typecheck the patterns first, so that we get types for all the
|
||||
// bindings.
|
||||
//let pattern_ty = fcx.expr_ty(discrim);
|
||||
for arm: ast::arm in arms {
|
||||
for arms.each {|arm|
|
||||
let pcx = {
|
||||
fcx: fcx,
|
||||
map: pat_util::pat_id_map(tcx.def_map, arm.pats[0]),
|
||||
|
@ -3003,14 +3003,12 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
|
|||
pat_region: ty::re_scope(parent_block)
|
||||
};
|
||||
|
||||
for p: @ast::pat in arm.pats {
|
||||
check_pat(pcx, p, pattern_ty);
|
||||
}
|
||||
for arm.pats.each {|p| check_pat(pcx, p, pattern_ty);}
|
||||
}
|
||||
// Now typecheck the blocks.
|
||||
let mut result_ty = next_ty_var(fcx);
|
||||
let mut arm_non_bot = false;
|
||||
for arm: ast::arm in arms {
|
||||
for arms.each {|arm|
|
||||
alt arm.guard {
|
||||
some(e) { check_expr_with(fcx, e, ty::mk_bool(tcx)); }
|
||||
none { }
|
||||
|
@ -3169,14 +3167,14 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
|
|||
}
|
||||
ast::expr_vec(args, mutbl) {
|
||||
let t: ty::t = next_ty_var(fcx);
|
||||
for e: @ast::expr in args { bot |= check_expr_with(fcx, e, t); }
|
||||
for args.each {|e| bot |= check_expr_with(fcx, e, t); }
|
||||
let typ = ty::mk_vec(tcx, {ty: t, mutbl: mutbl});
|
||||
fcx.write_ty(id, typ);
|
||||
}
|
||||
ast::expr_tup(elts) {
|
||||
let mut elt_ts = [];
|
||||
vec::reserve(elt_ts, vec::len(elts));
|
||||
for e in elts {
|
||||
for elts.each {|e|
|
||||
check_expr(fcx, e);
|
||||
let ety = fcx.expr_ty(e);
|
||||
elt_ts += [ety];
|
||||
|
@ -3212,9 +3210,9 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
|
|||
}
|
||||
}
|
||||
fcx.write_ty(id, bexpr_t);
|
||||
for f: spanned<ty::field> in fields_t {
|
||||
for fields_t.each {|f|
|
||||
let mut found = false;
|
||||
for bf: ty::field in base_fields {
|
||||
for base_fields.each {|bf|
|
||||
if str::eq(f.node.ident, bf.ident) {
|
||||
demand::simple(fcx, f.span, bf.mt.ty, f.node.mt.ty);
|
||||
found = true;
|
||||
|
@ -3465,7 +3463,7 @@ fn check_stmt(fcx: @fn_ctxt, stmt: @ast::stmt) -> bool {
|
|||
node_id = id;
|
||||
alt decl.node {
|
||||
ast::decl_local(ls) {
|
||||
for l in ls { bot |= check_decl_local(fcx, l); }
|
||||
for ls.each {|l| bot |= check_decl_local(fcx, l); }
|
||||
}
|
||||
ast::decl_item(_) {/* ignore for now */ }
|
||||
}
|
||||
|
@ -3501,7 +3499,7 @@ fn check_block(fcx0: @fn_ctxt, blk: ast::blk) -> bool {
|
|||
};
|
||||
let mut bot = false;
|
||||
let mut warned = false;
|
||||
for s: @ast::stmt in blk.node.stmts {
|
||||
for blk.node.stmts.each {|s|
|
||||
if bot && !warned &&
|
||||
alt s.node {
|
||||
ast::stmt_decl(@{node: ast::decl_local(_), _}, _) |
|
||||
|
@ -3588,7 +3586,7 @@ fn check_enum_variants(ccx: @crate_ctxt, sp: span, vs: [ast::variant],
|
|||
ccx: ccx};
|
||||
let mut disr_vals: [int] = [];
|
||||
let mut disr_val = 0;
|
||||
for v in vs {
|
||||
for vs.each {|v|
|
||||
alt v.node.disr_expr {
|
||||
some(e) {
|
||||
check_expr(fcx, e);
|
||||
|
@ -3666,7 +3664,7 @@ fn check_pred_expr(fcx: @fn_ctxt, e: @ast::expr) -> bool {
|
|||
in constraint");
|
||||
}
|
||||
}
|
||||
for operand: @ast::expr in operands {
|
||||
for operands.each {|operand|
|
||||
if !ast_util::is_constraint_arg(operand) {
|
||||
let s =
|
||||
"constraint args must be slot variables or literals";
|
||||
|
@ -3687,11 +3685,10 @@ fn check_pred_expr(fcx: @fn_ctxt, e: @ast::expr) -> bool {
|
|||
}
|
||||
|
||||
fn check_constraints(fcx: @fn_ctxt, cs: [@ast::constr], args: [ast::arg]) {
|
||||
let mut c_args;
|
||||
let num_args = vec::len(args);
|
||||
for c: @ast::constr in cs {
|
||||
c_args = [];
|
||||
for a: @spanned<ast::fn_constr_arg> in c.node.args {
|
||||
for cs.each {|c|
|
||||
let mut c_args = [];
|
||||
for c.node.args.each {|a|
|
||||
c_args += [
|
||||
// "base" should not occur in a fn type thing, as of
|
||||
// yet, b/c we don't allow constraints on the return type
|
||||
|
@ -3858,7 +3855,7 @@ fn check_method(ccx: @crate_ctxt, method: @ast::method, self_ty: ty::t) {
|
|||
|
||||
fn class_types(ccx: @crate_ctxt, members: [@ast::class_member]) -> class_map {
|
||||
let rslt = int_hash::<ty::t>();
|
||||
for m in members {
|
||||
for members.each {|m|
|
||||
alt m.node {
|
||||
ast::instance_var(_,t,_,id,_) {
|
||||
rslt.insert(id, ast_ty_to_ty(ccx.tcx, m_collect, t));
|
||||
|
@ -3897,9 +3894,7 @@ fn check_item(ccx: @crate_ctxt, it: @ast::item) {
|
|||
let self_ty = ast_ty_to_ty(ccx.tcx, m_check, ty);
|
||||
let self_region = ty::re_free(it.id, ty::br_self);
|
||||
let self_ty = replace_self_region(ccx.tcx, self_region, self_ty);
|
||||
for m in ms {
|
||||
check_method(ccx, m, self_ty);
|
||||
}
|
||||
for ms.each {|m| check_method(ccx, m, self_ty);}
|
||||
}
|
||||
ast::item_class(tps, members, ctor) {
|
||||
let cid = some(it.id);
|
||||
|
@ -3915,7 +3910,7 @@ fn check_item(ccx: @crate_ctxt, it: @ast::item) {
|
|||
some(class_t));
|
||||
|
||||
// typecheck the members
|
||||
for m in members { check_class_member(class_ccx, class_t, m); }
|
||||
for members.each {|m| check_class_member(class_ccx, class_t, m); }
|
||||
}
|
||||
_ {/* nothing to do */ }
|
||||
}
|
||||
|
@ -3995,8 +3990,8 @@ mod vtable {
|
|||
allow_unsafe: bool) -> vtable_res {
|
||||
let tcx = fcx.ccx.tcx;
|
||||
let mut result = [], i = 0u;
|
||||
for ty in tys {
|
||||
for bound in *bounds[i] {
|
||||
for tys.each {|ty|
|
||||
for vec::each(*bounds[i]) {|bound|
|
||||
alt bound {
|
||||
ty::bound_iface(i_ty) {
|
||||
let i_ty = ty::substitute_type_params(tcx, tys, i_ty);
|
||||
|
@ -4022,7 +4017,7 @@ mod vtable {
|
|||
alt ty::get(ty).struct {
|
||||
ty::ty_param(n, did) {
|
||||
let mut n_bound = 0u;
|
||||
for bound in *tcx.ty_param_bounds.get(did.node) {
|
||||
for vec::each(*tcx.ty_param_bounds.get(did.node)) {|bound|
|
||||
alt bound {
|
||||
ty::bound_iface(ity) {
|
||||
alt check ty::get(ity).struct {
|
||||
|
@ -4038,7 +4033,7 @@ mod vtable {
|
|||
}
|
||||
ty::ty_iface(did, tps) if iface_id == did {
|
||||
if !allow_unsafe {
|
||||
for m in *ty::iface_methods(tcx, did) {
|
||||
for vec::each(*ty::iface_methods(tcx, did)) {|m|
|
||||
if ty::type_has_vars(ty::mk_fn(tcx, m.fty)) {
|
||||
tcx.sess.span_err(
|
||||
sp, "a boxed iface with self types may not be \
|
||||
|
@ -4057,7 +4052,7 @@ mod vtable {
|
|||
let mut found = none;
|
||||
std::list::iter(isc) {|impls|
|
||||
if option::is_none(found) {
|
||||
for im in *impls {
|
||||
for vec::each(*impls) {|im|
|
||||
let match = alt ty::impl_iface(tcx, im.did) {
|
||||
some(ity) {
|
||||
alt check ty::get(ity).struct {
|
||||
|
|
|
@ -46,7 +46,7 @@ fn field_expr(f: ast::field) -> @ast::expr { ret f.node.expr; }
|
|||
|
||||
fn field_exprs(fields: [ast::field]) -> [@ast::expr] {
|
||||
let mut es = [];
|
||||
for f: ast::field in fields { es += [f.node.expr]; }
|
||||
for fields.each {|f| es += [f.node.expr]; }
|
||||
ret es;
|
||||
}
|
||||
|
||||
|
|
|
@ -67,20 +67,23 @@ fn mk_filesearch(maybe_sysroot: option<path>,
|
|||
|
||||
// FIXME #1001: This can't be an obj method
|
||||
fn search<T: copy>(filesearch: filesearch, pick: pick<T>) -> option<T> {
|
||||
for lib_search_path in filesearch.lib_search_paths() {
|
||||
let mut rslt = none;
|
||||
for filesearch.lib_search_paths().each {|lib_search_path|
|
||||
#debug("searching %s", lib_search_path);
|
||||
for path in os::list_dir_path(lib_search_path) {
|
||||
for os::list_dir_path(lib_search_path).each {|path|
|
||||
#debug("testing %s", path);
|
||||
let maybe_picked = pick(path);
|
||||
if option::is_some(maybe_picked) {
|
||||
#debug("picked %s", path);
|
||||
ret maybe_picked;
|
||||
rslt = maybe_picked;
|
||||
break;
|
||||
} else {
|
||||
#debug("rejected %s", path);
|
||||
}
|
||||
}
|
||||
if option::is_some(rslt) { break; }
|
||||
}
|
||||
ret option::none;
|
||||
ret rslt;
|
||||
}
|
||||
|
||||
fn relative_target_lib_path(target_triple: str) -> [path] {
|
||||
|
|
|
@ -73,7 +73,7 @@ fn ty_to_str(cx: ctxt, typ: t) -> str {
|
|||
alt ident { some(i) { s += " "; s += i; } _ { } }
|
||||
s += "(";
|
||||
let mut strs = [];
|
||||
for a: arg in inputs { strs += [fn_input_to_str(cx, a)]; }
|
||||
for inputs.each {|a| strs += [fn_input_to_str(cx, a)]; }
|
||||
s += str::connect(strs, ", ");
|
||||
s += ")";
|
||||
if ty::get(output).struct != ty_nil {
|
||||
|
@ -139,12 +139,12 @@ fn ty_to_str(cx: ctxt, typ: t) -> str {
|
|||
ty_type { "type" }
|
||||
ty_rec(elems) {
|
||||
let mut strs: [str] = [];
|
||||
for fld: field in elems { strs += [field_to_str(cx, fld)]; }
|
||||
for elems.each {|fld| strs += [field_to_str(cx, fld)]; }
|
||||
"{" + str::connect(strs, ",") + "}"
|
||||
}
|
||||
ty_tup(elems) {
|
||||
let mut strs = [];
|
||||
for elem in elems { strs += [ty_to_str(cx, elem)]; }
|
||||
for elems.each {|elem| strs += [ty_to_str(cx, elem)]; }
|
||||
"(" + str::connect(strs, ",") + ")"
|
||||
}
|
||||
ty_fn(f) {
|
||||
|
@ -179,7 +179,7 @@ fn constr_to_str(c: @constr) -> str {
|
|||
fn constrs_str(constrs: [@constr]) -> str {
|
||||
let mut s = "";
|
||||
let mut colon = true;
|
||||
for c: @constr in constrs {
|
||||
for constrs.each {|c|
|
||||
if colon { s += " : "; colon = false; } else { s += ", "; }
|
||||
s += constr_to_str(c);
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ fn usage() {
|
|||
|
||||
println("Usage: rustdoc [options] <cratefile>\n");
|
||||
println("Options:\n");
|
||||
for opt in opts() {
|
||||
for opts().each {|opt|
|
||||
println(#fmt(" %s", tuple::second(opt)));
|
||||
}
|
||||
println("");
|
||||
|
|
|
@ -328,7 +328,7 @@ fn write_mod_contents(
|
|||
write_index(ctxt, option::get(doc.index));
|
||||
}
|
||||
|
||||
for itemtag in doc.items {
|
||||
for doc.items.each {|itemtag|
|
||||
write_item(ctxt, itemtag);
|
||||
}
|
||||
}
|
||||
|
@ -381,7 +381,7 @@ fn write_index(ctxt: ctxt, index: doc::index) {
|
|||
ret;
|
||||
}
|
||||
|
||||
for entry in index.entries {
|
||||
for index.entries.each {|entry|
|
||||
let header = header_text_(entry.kind, entry.name);
|
||||
let id = entry.link;
|
||||
if option::is_some(entry.brief) {
|
||||
|
@ -431,7 +431,7 @@ fn write_nmod(ctxt: ctxt, doc: doc::nmoddoc) {
|
|||
write_index(ctxt, option::get(doc.index));
|
||||
}
|
||||
|
||||
for fndoc in doc.fns {
|
||||
for doc.fns.each {|fndoc|
|
||||
write_item_header(ctxt, doc::fntag(fndoc));
|
||||
write_fn(ctxt, fndoc);
|
||||
}
|
||||
|
|
|
@ -82,13 +82,13 @@ fn build_reexport_def_set(srv: astsrv::srv) -> def_set {
|
|||
let assoc_list = astsrv::exec(srv) {|ctxt|
|
||||
let def_set = common::new_def_hash();
|
||||
ctxt.exp_map.items {|_id, defs|
|
||||
for def in defs {
|
||||
for defs.each {|def|
|
||||
if def.reexp {
|
||||
def_set.insert(def.id, ());
|
||||
}
|
||||
}
|
||||
}
|
||||
for def in find_reexport_impls(ctxt) {
|
||||
for find_reexport_impls(ctxt).each {|def|
|
||||
def_set.insert(def, ());
|
||||
}
|
||||
to_assoc_list(def_set)
|
||||
|
@ -137,7 +137,7 @@ fn build_reexport_def_map(
|
|||
fn fold_mod(fold: fold::fold<ctxt>, doc: doc::moddoc) -> doc::moddoc {
|
||||
let doc = fold::default_seq_fold_mod(fold, doc);
|
||||
|
||||
for item in doc.items {
|
||||
for doc.items.each {|item|
|
||||
let def_id = ast_util::local_def(item.id());
|
||||
if fold.ctxt.def_set.contains_key(def_id) {
|
||||
fold.ctxt.def_map.insert(def_id, item);
|
||||
|
@ -150,7 +150,7 @@ fn build_reexport_def_map(
|
|||
fn fold_nmod(fold: fold::fold<ctxt>, doc: doc::nmoddoc) -> doc::nmoddoc {
|
||||
let doc = fold::default_seq_fold_nmod(fold, doc);
|
||||
|
||||
for fndoc in doc.fns {
|
||||
for doc.fns.each {|fndoc|
|
||||
let def_id = ast_util::local_def(fndoc.id());
|
||||
if fold.ctxt.def_set.contains_key(def_id) {
|
||||
fold.ctxt.def_map.insert(def_id, doc::fntag(fndoc));
|
||||
|
@ -184,7 +184,7 @@ fn build_reexport_path_map(srv: astsrv::srv, -def_map: def_map) -> path_map {
|
|||
let modpath = ast_map::path_to_str(vec::init(*path));
|
||||
|
||||
let mut reexportdocs = [];
|
||||
for def in defs {
|
||||
for defs.each {|def|
|
||||
if !def.reexp { cont; }
|
||||
alt def_map.find(def.id) {
|
||||
some(itemtag) {
|
||||
|
@ -206,7 +206,8 @@ fn build_reexport_path_map(srv: astsrv::srv, -def_map: def_map) -> path_map {
|
|||
}
|
||||
}
|
||||
|
||||
for (path, doc) in find_reexport_impl_docs(ctxt, def_map) {
|
||||
for find_reexport_impl_docs(ctxt, def_map).each {|elt|
|
||||
let (path, doc) = elt;
|
||||
let docs = alt path_map.find(path) {
|
||||
some(docs) { docs + [(doc)] }
|
||||
none { [doc] }
|
||||
|
@ -272,7 +273,7 @@ fn for_each_reexported_impl(
|
|||
let all_impls = all_impls(m);
|
||||
alt check ctxt.impl_map.get(mod_id) {
|
||||
list::cons(impls, @list::nil) {
|
||||
for i in *impls {
|
||||
for vec::each(*impls) {|i|
|
||||
// This impl is not an item in the current mod
|
||||
if !all_impls.contains_key(i.did) {
|
||||
// Ignore external impls because I don't
|
||||
|
@ -289,7 +290,7 @@ fn for_each_reexported_impl(
|
|||
|
||||
fn all_impls(m: ast::_mod) -> map::set<ast::def_id> {
|
||||
let all_impls = common::new_def_hash();
|
||||
for item in m.items {
|
||||
for m.items.each {|item|
|
||||
alt item.node {
|
||||
ast::item_impl(_, _, _, _) {
|
||||
all_impls.insert(ast_util::local_def(item.id), ());
|
||||
|
|
|
@ -94,7 +94,7 @@ fn sectionalize(desc: option<str>) -> (option<str>, [doc::section]) {
|
|||
let mut current_section = none;
|
||||
let mut sections = [];
|
||||
|
||||
for line in lines {
|
||||
for lines.each {|line|
|
||||
alt parse_header(line) {
|
||||
some(header) {
|
||||
if option::is_some(current_section) {
|
||||
|
|
|
@ -6,7 +6,7 @@ fn alist_add<A: copy, B: copy>(lst: alist<A,B>, k: A, v: B) {
|
|||
|
||||
fn alist_get<A: copy, B: copy>(lst: alist<A,B>, k: A) -> B {
|
||||
let eq_fn = lst.eq_fn;
|
||||
for pair in lst.data {
|
||||
for lst.data.each {|pair|
|
||||
let (ki, vi) = pair; // copy req'd for alias analysis
|
||||
if eq_fn(k, ki) { ret vi; }
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ type aminoacids = {ch: char, prob: u32};
|
|||
fn make_cumulative(aa: [aminoacids]) -> [aminoacids] {
|
||||
let mut cp: u32 = 0u32;
|
||||
let mut ans: [aminoacids] = [];
|
||||
for a: aminoacids in aa { cp += a.prob; ans += [{ch: a.ch, prob: cp}]; }
|
||||
for aa.each {|a| cp += a.prob; ans += [{ch: a.ch, prob: cp}]; }
|
||||
ret ans;
|
||||
}
|
||||
|
||||
|
|
|
@ -75,7 +75,7 @@ fn stress(num_tasks: int) {
|
|||
results += [task::future_result(builder)];
|
||||
task::run(builder) {|| stress_task(i); }
|
||||
}
|
||||
for r in results { future::get(r); }
|
||||
for results.each {|r| future::get(r); }
|
||||
}
|
||||
|
||||
fn main(argv: [str]) {
|
||||
|
|
|
@ -77,7 +77,7 @@ mod map_reduce {
|
|||
ctrl: chan<ctrl_proto<K2, V>>, inputs: [K1]) ->
|
||||
[joinable_task] {
|
||||
let tasks = [];
|
||||
for i in inputs {
|
||||
for inputs.each {|i|
|
||||
let m = map, c = ctrl, ii = i;
|
||||
tasks += [task::spawn_joinable {|| map_task(m, c, ii)}];
|
||||
}
|
||||
|
@ -201,7 +201,7 @@ mod map_reduce {
|
|||
}
|
||||
treemap::traverse(reducers, finish);
|
||||
|
||||
for t in tasks { task::join(t); }
|
||||
for tasks.each {|t| task::join(t); }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -218,7 +218,7 @@ fn main(argv: [str]) {
|
|||
}
|
||||
|
||||
let iargs = [];
|
||||
for a in vec::slice(argv, 1u, vec::len(argv)) {
|
||||
vec::iter_between(argv, 1u, vec::len(argv)) {|a|
|
||||
iargs += [str::bytes(a)];
|
||||
}
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ mod map_reduce {
|
|||
fn start_mappers(ctrl: chan<ctrl_proto>, -inputs: [str]) ->
|
||||
[future::future<task::task_result>] {
|
||||
let mut results = [];
|
||||
for i: str in inputs {
|
||||
for inputs.each {|i|
|
||||
let builder = task::builder();
|
||||
results += [task::future_result(builder)];
|
||||
task::run(builder) {|| map_task(ctrl, i)}
|
||||
|
@ -174,7 +174,7 @@ mod map_reduce {
|
|||
|
||||
reducers.values {|v| send(v, done); }
|
||||
|
||||
for r in results { future::get(r); }
|
||||
for results.each {|r| future::get(r); }
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// error-pattern:mismatched types: expected `()` but found `bool`
|
||||
|
||||
fn main() {
|
||||
for i in [0] {
|
||||
for vec::iter([0]) {|_i|
|
||||
true
|
||||
}
|
||||
}
|
|
@ -2,5 +2,5 @@
|
|||
|
||||
fn main() {
|
||||
let v: [mut {mut x: int}] = [mut {mut x: 1}];
|
||||
for x in v { v[0] = {mut x: 2}; log(debug, x); }
|
||||
for v.each {|x| v[0] = {mut x: 2}; log(debug, x); }
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ fn concat<T: copy>(v: [const [const T]]) -> [T] {
|
|||
let mut r = [];
|
||||
|
||||
// Earlier versions of our type checker accepted this:
|
||||
for inner: [T] in v {
|
||||
for v.each {|inner|
|
||||
//!^ ERROR found `[const 'a]` (values differ in mutability)
|
||||
r += inner;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
fn main(args: [str]) {
|
||||
let vs: [str] = ["hi", "there", "this", "is", "a", "vec"];
|
||||
let vvs: [[str]] = [args, vs];
|
||||
for vs: [str] in vvs { for s: str in vs { log(debug, s); } }
|
||||
for vvs.each {|vs| for vs.each {|s| log(debug, s); } }
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
fn main() {
|
||||
let mut sum = 0;
|
||||
for x in [1, 2, 3, 4, 5] { sum += x; }
|
||||
for vec::each([1, 2, 3, 4, 5]) {|x| sum += x; }
|
||||
assert (sum == 15);
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
fn iter_vec<T>(v: [T], f: fn(T)) { for x: T in v { f(x); } }
|
||||
fn iter_vec<T>(v: [T], f: fn(T)) { for v.each {|x| f(x); } }
|
||||
|
||||
fn main() {
|
||||
let v = [1, 2, 3, 4, 5, 6, 7];
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
fn iter_vec<T>(v: [T], f: fn(T)) { for x: T in v { f(x); } }
|
||||
fn iter_vec<T>(v: [T], f: fn(T)) { for v.each {|x| f(x); } }
|
||||
|
||||
fn main() {
|
||||
let v = [1, 2, 3, 4, 5];
|
||||
|
|
|
@ -6,12 +6,14 @@ fn main() {
|
|||
assert (i == 10);
|
||||
do { i += 1; if i == 20 { break; } } while i < 30
|
||||
assert (i == 20);
|
||||
for x: int in [1, 2, 3, 4, 5, 6] { if x == 3 { break; } assert (x <= 3); }
|
||||
for vec::each([1, 2, 3, 4, 5, 6]) {|x|
|
||||
if x == 3 { break; } assert (x <= 3);
|
||||
}
|
||||
i = 0;
|
||||
while i < 10 { i += 1; if i % 2 == 0 { cont; } assert (i % 2 != 0); }
|
||||
i = 0;
|
||||
do { i += 1; if i % 2 == 0 { cont; } assert (i % 2 != 0); } while i < 10
|
||||
for x: int in [1, 2, 3, 4, 5, 6] {
|
||||
for vec::each([1, 2, 3, 4, 5, 6]) {|x|
|
||||
if x % 2 == 0 { cont; }
|
||||
assert (x % 2 != 0);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
fn main() {
|
||||
for {x: x, y: y}: {x: int, y: int} in [{x: 10, y: 20}, {x: 30, y: 0}] {
|
||||
assert (x + y == 30);
|
||||
for vec::each([{x: 10, y: 20}, {x: 30, y: 0}]) {|elt|
|
||||
assert (elt.x + elt.y == 30);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
fn main() {
|
||||
let x = [@{mut a: @10, b: @20}];
|
||||
for @{a, b} in x {
|
||||
assert *a == 10;
|
||||
(*x[0]).a = @30;
|
||||
assert *a == 10;
|
||||
}
|
||||
}
|
|
@ -1 +1 @@
|
|||
fn main() { let x: [int] = []; for i: int in x { fail "moop"; } }
|
||||
fn main() { let x: [int] = []; for x.each {|_i| fail "moop"; } }
|
||||
|
|
|
@ -34,7 +34,7 @@ mod map_reduce {
|
|||
enum ctrl_proto { find_reducer([u8], chan<int>), mapper_done, }
|
||||
|
||||
fn start_mappers(ctrl: chan<ctrl_proto>, inputs: [str]) {
|
||||
for i: str in inputs {
|
||||
for inputs.each {|i|
|
||||
task::spawn {|| map_task(ctrl, i); };
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ iface map<T> {
|
|||
impl <T> of map<T> for [T] {
|
||||
fn map<U>(f: fn(T) -> U) -> [U] {
|
||||
let mut r = [];
|
||||
for x in self { r += [f(x)]; }
|
||||
for self.each {|x| r += [f(x)]; }
|
||||
r
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,12 +3,12 @@
|
|||
fn main() {
|
||||
let x = [1, 2, 3];
|
||||
let mut y = 0;
|
||||
for i: int in x { log(debug, i); y += i; }
|
||||
for x.each {|i| log(debug, i); y += i; }
|
||||
log(debug, y);
|
||||
assert (y == 6);
|
||||
let s = "hello there";
|
||||
let mut i: int = 0;
|
||||
for c: u8 in s {
|
||||
for str::each(s) {|c|
|
||||
if i == 0 { assert (c == 'h' as u8); }
|
||||
if i == 1 { assert (c == 'e' as u8); }
|
||||
if i == 2 { assert (c == 'l' as u8); }
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
fn main() {
|
||||
let x = [10, 20, 30];
|
||||
let mut sum = 0;
|
||||
for x in x { sum += x; }
|
||||
for x.each {|x| sum += x; }
|
||||
assert (sum == 60);
|
||||
}
|
||||
|
|
|
@ -1 +1 @@
|
|||
fn main(args: [str]) { for s in args { log(debug, s); } }
|
||||
fn main(args: [str]) { for args.each {|s| log(debug, s); } }
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
// -*- rust -*-
|
||||
fn len(v: [const int]) -> uint {
|
||||
let mut i = 0u;
|
||||
for x: int in v { i += 1u; }
|
||||
for v.each {|x| i += 1u; }
|
||||
ret i;
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ iface monad<A> {
|
|||
impl <A> of monad<A> for [A] {
|
||||
fn bind<B>(f: fn(A) -> [B]) -> [B] {
|
||||
let mut r = [];
|
||||
for elt in self { r += f(elt); }
|
||||
for self.each {|elt| r += f(elt); }
|
||||
r
|
||||
}
|
||||
}
|
||||
|
|
|
@ -58,7 +58,7 @@ fn main() {
|
|||
calllink10
|
||||
];
|
||||
let rng = rand::rng();
|
||||
for f in fns {
|
||||
for fns.each {|f|
|
||||
let sz = rng.next() % 256u32 + 256u32;
|
||||
let frame_backoff = rng.next() % 10u32 + 1u32;
|
||||
task::try {|| runtest(f, frame_backoff) };
|
||||
|
|
|
@ -6,7 +6,7 @@ fn foo(c: [int]) {
|
|||
|
||||
alt none::<int> {
|
||||
some::<int>(_) {
|
||||
for i: int in c {
|
||||
for c.each {|i|
|
||||
log(debug, a);
|
||||
let a = 17;
|
||||
b += [a];
|
||||
|
|
|
@ -19,10 +19,10 @@ impl util for uint {
|
|||
|
||||
impl util<T> for [T] {
|
||||
fn length_() -> uint { vec::len(self) }
|
||||
fn iter_(f: fn(T)) { for x in self { f(x); } }
|
||||
fn iter_(f: fn(T)) { for self.each {|x| f(x); } }
|
||||
fn map_<U>(f: fn(T) -> U) -> [U] {
|
||||
let mut r = [];
|
||||
for elt in self { r += [f(elt)]; }
|
||||
for self.each {|elt| r += [f(elt)]; }
|
||||
r
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ fn test00() {
|
|||
|
||||
// Read from spawned tasks...
|
||||
let mut sum = 0;
|
||||
for r in results {
|
||||
for results.each {|r|
|
||||
i = 0;
|
||||
while i < number_of_messages {
|
||||
let value = recv(po);
|
||||
|
@ -52,7 +52,7 @@ fn test00() {
|
|||
}
|
||||
|
||||
// Join spawned tasks...
|
||||
for r in results { future::get(r); }
|
||||
for results.each {|r| future::get(r); }
|
||||
|
||||
#debug("Completed: Final number is: ");
|
||||
log(error, sum);
|
||||
|
|
|
@ -46,12 +46,12 @@ fn test00() {
|
|||
task::run(builder) {|| test00_start(ch, i, number_of_messages);}
|
||||
}
|
||||
let mut sum: int = 0;
|
||||
for r in results {
|
||||
for results.each {|r|
|
||||
i = 0;
|
||||
while i < number_of_messages { sum += recv(po); i = i + 1; }
|
||||
}
|
||||
|
||||
for r in results { future::get(r); }
|
||||
for results.each {|r| future::get(r); }
|
||||
|
||||
#debug("Completed: Final number is: ");
|
||||
assert (sum ==
|
||||
|
@ -132,7 +132,7 @@ fn test06() {
|
|||
}
|
||||
|
||||
|
||||
for r in results { future::get(r); }
|
||||
for results.each {|r| future::get(r); }
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ fn main() {
|
|||
|
||||
fn check_str_eq(a: str, b: str) {
|
||||
let mut i: int = 0;
|
||||
for ab: u8 in a {
|
||||
for str::each(a) {|ab|
|
||||
log(debug, i);
|
||||
log(debug, ab);
|
||||
let bb: u8 = b[i];
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue