The wonky for...in... whitespace was bothering me. Sorry!

This commit is contained in:
Lindsey Kuper 2011-08-15 21:54:52 -07:00
parent cb239cc028
commit f91351aaf6
102 changed files with 564 additions and 564 deletions

View file

@ -296,7 +296,7 @@ fn build_link_meta(sess: &session::session, c: &ast::crate, output: &str,
let cmh_items: [@ast::meta_item] = ~[];
let linkage_metas = attr::find_linkage_metas(c.node.attrs);
attr::require_unique_names(sess, linkage_metas);
for meta: @ast::meta_item in linkage_metas {
for meta: @ast::meta_item in linkage_metas {
if attr::get_meta_item_name(meta) == "name" {
alt attr::get_meta_item_value_str(meta) {
some(v) { name = some(v); }
@ -326,7 +326,7 @@ fn build_link_meta(sess: &session::session, c: &ast::crate, output: &str,
let cmh_items = attr::sort_meta_items(metas.cmh_items);
sha.reset();
for m_: @ast::meta_item in cmh_items {
for m_: @ast::meta_item in cmh_items {
let m = m_;
alt m.node {
ast::meta_name_value(key, value) {
@ -429,7 +429,7 @@ fn mangle(ss: &[str]) -> str {
let n = "_ZN"; // Begin name-sequence.
for s: str in ss { n += #fmt("%u%s", str::byte_len(s), s); }
for s: str in ss { n += #fmt("%u%s", str::byte_len(s), s); }
n += "E"; // End name-sequence.
ret n;

View file

@ -68,7 +68,7 @@ fn declare_upcalls(tn: type_names, tydesc_type: TypeRef,
fn decl(tn: type_names, llmod: ModuleRef, name: str, tys: [TypeRef],
rv: TypeRef) -> ValueRef {
let arg_tys: [TypeRef] = ~[];
for t: TypeRef in tys { arg_tys += ~[t]; }
for t: TypeRef in tys { arg_tys += ~[t]; }
let fn_ty = T_fn(arg_tys, rv);
ret trans::decl_cdecl_fn(llmod, "upcall_" + name, fn_ty);
}

View file

@ -82,7 +82,7 @@ fn parse_cfgspecs(cfgspecs: &[str]) -> ast::crate_cfg {
// FIXME: It would be nice to use the parser to parse all varieties of
// meta_item here. At the moment we just support the meta_word variant.
let words = ~[];
for s: str in cfgspecs { words += ~[attr::mk_word_item(s)]; }
for s: str in cfgspecs { words += ~[attr::mk_word_item(s)]; }
ret words;
}
@ -334,7 +334,7 @@ fn build_session_options(binary: str, match: getopts::match, binary_dir: str)
let library_search_paths = ~[binary_dir + "/lib"];
let lsp_vec = getopts::opt_strs(match, "L");
for lsp: str in lsp_vec { library_search_paths += ~[lsp]; }
for lsp: str in lsp_vec { library_search_paths += ~[lsp]; }
let parse_only = opt_present(match, "parse-only");
let no_trans = opt_present(match, "no-trans");
@ -591,7 +591,7 @@ fn main(args: vec[str]) {
}
let cstore = sess.get_cstore();
for cratepath: str in cstore::get_used_crate_files(cstore) {
for cratepath: str in cstore::get_used_crate_files(cstore) {
if str::ends_with(cratepath, ".rlib") {
gcc_args += ~[cratepath];
cont;
@ -603,10 +603,10 @@ fn main(args: vec[str]) {
}
let ula = cstore::get_used_link_args(cstore);
for arg: str in ula { gcc_args += ~[arg]; }
for arg: str in ula { gcc_args += ~[arg]; }
let used_libs = cstore::get_used_libraries(cstore);
for l: str in used_libs { gcc_args += ~["-l" + l]; }
for l: str in used_libs { gcc_args += ~["-l" + l]; }
if sopts.library {
gcc_args += ~[lib_cmd];

View file

@ -31,7 +31,7 @@ export mk_attr;
// linkage
fn find_linkage_metas(attrs: &[ast::attribute]) -> [@ast::meta_item] {
let metas: [@ast::meta_item] = ~[];
for attr: ast::attribute in find_attrs_by_name(attrs, "link") {
for attr: ast::attribute in find_attrs_by_name(attrs, "link") {
alt attr.node.value.node {
ast::meta_list(_, items) { metas += items; }
_ { log "ignoring link attribute that has incorrect type"; }
@ -96,7 +96,7 @@ fn attr_meta(attr: &ast::attribute) -> @ast::meta_item { @attr.node.value }
// Get the meta_items from inside a vector of attributes
fn attr_metas(attrs: &[ast::attribute]) -> [@ast::meta_item] {
let mitems = ~[];
for a: ast::attribute in attrs { mitems += ~[attr_meta(a)]; }
for a: ast::attribute in attrs { mitems += ~[attr_meta(a)]; }
ret mitems;
}
@ -124,7 +124,7 @@ fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool {
fn contains(haystack: &[@ast::meta_item], needle: @ast::meta_item) -> bool {
log #fmt("looking for %s",
syntax::print::pprust::meta_item_to_str(*needle));
for item: @ast::meta_item in haystack {
for item: @ast::meta_item in haystack {
log #fmt("looking in %s",
syntax::print::pprust::meta_item_to_str(*item));
if eq(item, needle) { log "found it!"; ret true; }
@ -153,12 +153,12 @@ fn sort_meta_items(items: &[@ast::meta_item]) -> [@ast::meta_item] {
// This is sort of stupid here, converting to a vec of mutables and back
let v: [mutable @ast::meta_item] = ~[mutable];
for mi: @ast::meta_item in items { v += ~[mutable mi]; }
for mi: @ast::meta_item in items { v += ~[mutable mi]; }
std::sort::quick_sort(lteq, v);
let v2: [@ast::meta_item] = ~[];
for mi: @ast::meta_item in v { v2 += ~[mi]; }
for mi: @ast::meta_item in v { v2 += ~[mi]; }
ret v2;
}
@ -179,7 +179,7 @@ fn remove_meta_items_by_name(items: &[@ast::meta_item], name: str) ->
fn require_unique_names(sess: &session::session,
metas: &[@ast::meta_item]) {
let map = map::mk_hashmap[str, ()](str::hash, str::eq);
for meta: @ast::meta_item in metas {
for meta: @ast::meta_item in metas {
let name = get_meta_item_name(meta);
if map.contains_key(name) {
sess.span_fatal(meta.span,

View file

@ -118,7 +118,7 @@ fn in_cfg(cfg: &ast::crate_cfg, attrs: &[ast::attribute]) -> bool {
ivec::foldl(extract_metas, ~[], cfg_metas)
};
for cfg_mi: @ast::meta_item in item_cfg_metas {
for cfg_mi: @ast::meta_item in item_cfg_metas {
if attr::contains(cfg, cfg_mi) { ret true; }
}

View file

@ -229,7 +229,7 @@ fn mk_test_desc_ivec_ty(cx: &test_ctxt) -> @ast::ty {
fn mk_test_desc_vec(cx: &test_ctxt) -> @ast::expr {
log #fmt("building test vector from %u tests", ivec::len(cx.testfns));
let descs = ~[];
for test: test in cx.testfns {
for test: test in cx.testfns {
let test_ = test; // Satisfy alias analysis
descs += ~[mk_test_desc_rec(cx, test_)];
}

View file

@ -1469,7 +1469,7 @@ fn type_to_str_inner(names: type_names, outer0: &[TypeRef], ty: TypeRef) ->
fn tys_str(names: type_names, outer: &[TypeRef], tys: &[TypeRef]) -> str {
let s: str = "";
let first: bool = true;
for t: TypeRef in tys {
for t: TypeRef in tys {
if first { first = false; } else { s += ", "; }
s += type_to_str_inner(names, outer, t);
}
@ -1531,7 +1531,7 @@ fn type_to_str_inner(names: type_names, outer0: &[TypeRef], ty: TypeRef) ->
11 {
let i: uint = 0u;
for tout: TypeRef in outer0 {
for tout: TypeRef in outer0 {
i += 1u;
if tout as int == ty as int {
let n: uint = ivec::len[TypeRef](outer0) - i;

View file

@ -69,7 +69,7 @@ fn hash_node_id(node_id: &int) -> uint { ret 177573u ^ (node_id as uint); }
fn hash_path(s: &str) -> uint {
let h = 5381u;
for ch: u8 in str::bytes(s) { h = (h << 5u) + h ^ (ch as uint); }
for ch: u8 in str::bytes(s) { h = (h << 5u) + h ^ (ch as uint); }
ret h;
}

View file

@ -96,7 +96,7 @@ fn metadata_matches(crate_data: &@[u8], metas: &[@ast::meta_item]) -> bool {
log #fmt("matching %u metadata requirements against %u items",
ivec::len(metas), ivec::len(linkage_metas));
for needed: @ast::meta_item in metas {
for needed: @ast::meta_item in metas {
if !attr::contains(linkage_metas, needed) {
log #fmt("missing %s", pprust::meta_item_to_str(*needed));
ret false;
@ -156,9 +156,9 @@ fn find_library_crate_aux(nn: &{prefix: str, suffix: str}, crate_name: str,
// stomach from here, and writing a C++ wrapper is more work than just
// manually filtering fs::list_dir here.
for library_search_path: str in library_search_paths {
for library_search_path: str in library_search_paths {
log #fmt("searching %s", library_search_path);
for path: str in fs::list_dir(library_search_path) {
for path: str in fs::list_dir(library_search_path) {
log #fmt("searching %s", path);
let f: str = fs::basename(path);
if !(str::starts_with(f, prefix) && str::ends_with(f, nn.suffix))
@ -250,7 +250,7 @@ fn resolve_crate_deps(e: env, cdata: &@[u8]) -> cstore::cnum_map {
// The map from crate numbers in the crate we're resolving to local crate
// numbers
let cnum_map = new_int_hash[ast::crate_num]();
for dep: decoder::crate_dep in decoder::get_crate_deps(cdata) {
for dep: decoder::crate_dep in decoder::get_crate_deps(cdata) {
let extrn_cnum = dep.cnum;
let cname = dep.ident;
log #fmt("resolving dep %s", cname);

View file

@ -46,7 +46,7 @@ fn lookup_hash(d: &ebml::doc, eq_fn: fn(&[u8]) -> bool , hash: uint) ->
let result: [ebml::doc] = ~[];
let belt = tag_index_buckets_bucket_elt;
for each elt: ebml::doc in ebml::tagged_docs(bucket, belt) {
for each elt: ebml::doc in ebml::tagged_docs(bucket, belt) {
let pos = ebml::be_uint_from_bytes(elt.data, elt.start, 4u);
if eq_fn(ivec::slice[u8](*elt.data, elt.start + 4u, elt.end)) {
result += ~[ebml::doc_at(d.data, pos)];
@ -138,7 +138,7 @@ fn tag_variant_ids(item: &ebml::doc, this_cnum: ast::crate_num) ->
[ast::def_id] {
let ids: [ast::def_id] = ~[];
let v = tag_items_data_item_variant;
for each p: ebml::doc in ebml::tagged_docs(item, v) {
for each p: ebml::doc in ebml::tagged_docs(item, v) {
let ext = parse_def_id(ebml::doc_data(p));
ids += ~[{crate: this_cnum, node: ext.node}];
}
@ -156,7 +156,7 @@ fn resolve_path(path: &[ast::ident], data: @[u8]) -> [ast::def_id] {
let paths = ebml::get_doc(md, tag_paths);
let eqer = bind eq_item(_, s);
let result: [ast::def_id] = ~[];
for doc: ebml::doc in lookup_hash(paths, eqer, hash_path(s)) {
for doc: ebml::doc in lookup_hash(paths, eqer, hash_path(s)) {
let did_doc = ebml::get_doc(doc, tag_def_id);
result += ~[parse_def_id(ebml::doc_data(did_doc))];
}
@ -233,13 +233,13 @@ fn get_tag_variants(data: &@[u8], def: ast::def_id, tcx: &ty::ctxt,
let item = find_item(def.node, items);
let infos: [ty::variant_info] = ~[];
let variant_ids = tag_variant_ids(item, external_crate_id);
for did: ast::def_id in variant_ids {
for did: ast::def_id in variant_ids {
let item = find_item(did.node, items);
let ctor_ty = item_type(item, external_crate_id, tcx, extres);
let arg_tys: [ty::t] = ~[];
alt ty::struct(tcx, ctor_ty) {
ty::ty_fn(_, args, _, _, _) {
for a: ty::arg in args { arg_tys += ~[a.ty]; }
for a: ty::arg in args { arg_tys += ~[a.ty]; }
}
_ {
// Nullary tag variant.
@ -344,7 +344,7 @@ fn get_attributes(md: &ebml::doc) -> [ast::attribute] {
}
fn list_meta_items(meta_items: &ebml::doc, out: io::writer) {
for mi: @ast::meta_item in get_meta_items(meta_items) {
for mi: @ast::meta_item in get_meta_items(meta_items) {
out.write_str(#fmt("%s\n", pprust::meta_item_to_str(*mi)));
}
}
@ -352,7 +352,7 @@ fn list_meta_items(meta_items: &ebml::doc, out: io::writer) {
fn list_crate_attributes(md: &ebml::doc, out: io::writer) {
out.write_str("=Crate Attributes=\n");
for attr: ast::attribute in get_attributes(md) {
for attr: ast::attribute in get_attributes(md) {
out.write_str(#fmt("%s\n", pprust::attribute_to_str(attr)));
}
@ -382,7 +382,7 @@ fn get_crate_deps(data: @[u8]) -> [crate_dep] {
fn list_crate_deps(data: @[u8], out: io::writer) {
out.write_str("=External Dependencies=\n");
for dep: crate_dep in get_crate_deps(data) {
for dep: crate_dep in get_crate_deps(data) {
out.write_str(#fmt("%d %s\n", dep.cnum, dep.ident));
}
@ -398,7 +398,7 @@ fn list_crate_items(bytes: &@[u8], md: &ebml::doc, out: io::writer) {
for each bucket: ebml::doc in
ebml::tagged_docs(bs, tag_index_buckets_bucket) {
let et = tag_index_buckets_bucket_elt;
for each elt: ebml::doc in ebml::tagged_docs(bucket, et) {
for each elt: ebml::doc in ebml::tagged_docs(bucket, et) {
let data = read_path(elt);
let def = ebml::doc_at(bytes, data.pos);
let did_doc = ebml::get_doc(def, tag_def_id);

View file

@ -40,7 +40,7 @@ type entry[T] = {val: T, pos: uint};
fn encode_tag_variant_paths(ebml_w: &ebml::writer, variants: &[variant],
path: &[str], index: &mutable [entry[str]]) {
for variant: variant in variants {
for variant: variant in variants {
add_to_index(ebml_w, path, index, variant.node.name);
ebml::start_tag(ebml_w, tag_paths_data_item);
encode_name(ebml_w, variant.node.name);
@ -60,7 +60,7 @@ fn add_to_index(ebml_w: &ebml::writer, path: &[str],
fn encode_native_module_item_paths(ebml_w: &ebml::writer,
nmod: &native_mod, path: &[str],
index: &mutable [entry[str]]) {
for nitem: @native_item in nmod.items {
for nitem: @native_item in nmod.items {
add_to_index(ebml_w, path, index, nitem.ident);
ebml::start_tag(ebml_w, tag_paths_data_item);
encode_name(ebml_w, nitem.ident);
@ -71,7 +71,7 @@ fn encode_native_module_item_paths(ebml_w: &ebml::writer,
fn encode_module_item_paths(ebml_w: &ebml::writer, module: &_mod,
path: &[str], index: &mutable [entry[str]]) {
for it: @item in module.items {
for it: @item in module.items {
if !is_exported(it.ident, module) { cont; }
alt it.node {
item_const(_, _) {
@ -228,7 +228,7 @@ fn encode_tag_variant_info(ecx: &@encode_ctxt, ebml_w: &ebml::writer,
id: node_id, variants: &[variant],
index: &mutable [entry[int]],
ty_params: &[ty_param]) {
for variant: variant in variants {
for variant: variant in variants {
index += ~[{val: variant.node.id, pos: ebml_w.writer.tell()}];
ebml::start_tag(ebml_w, tag_items_data_item);
encode_def_id(ebml_w, local_def(variant.node.id));
@ -298,7 +298,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: &ebml::writer,
encode_family(ebml_w, 't' as u8);
encode_type_param_kinds(ebml_w, tps);
encode_type(ecx, ebml_w, node_id_to_monotype(ecx.ccx.tcx, item.id));
for v: variant in variants {
for v: variant in variants {
encode_variant_id(ebml_w, local_def(v.node.id));
}
ebml::end_tag(ebml_w);
@ -395,14 +395,14 @@ fn encode_info_for_items(ecx: &@encode_ctxt, ebml_w: &ebml::writer) ->
fn create_index[T](index: &[entry[T]], hash_fn: fn(&T) -> uint ) ->
[@[entry[T]]] {
let buckets: [@mutable [entry[T]]] = ~[];
for each i: uint in uint::range(0u, 256u) { buckets += ~[@mutable ~[]]; }
for elt: entry[T] in index {
for each i: uint in uint::range(0u, 256u) { buckets += ~[@mutable ~[]]; }
for elt: entry[T] in index {
let h = hash_fn(elt.val);
*buckets.(h % 256u) += ~[elt];
}
let buckets_frozen = ~[];
for bucket: @mutable [entry[T]] in buckets {
for bucket: @mutable [entry[T]] in buckets {
buckets_frozen += ~[@*bucket];
}
ret buckets_frozen;
@ -414,10 +414,10 @@ fn encode_index[T](ebml_w: &ebml::writer, buckets: &[@[entry[T]]],
ebml::start_tag(ebml_w, tag_index);
let bucket_locs: [uint] = ~[];
ebml::start_tag(ebml_w, tag_index_buckets);
for bucket: @[entry[T]] in buckets {
for bucket: @[entry[T]] in buckets {
bucket_locs += ~[ebml_w.writer.tell()];
ebml::start_tag(ebml_w, tag_index_buckets_bucket);
for elt: entry[T] in *bucket {
for elt: entry[T] in *bucket {
ebml::start_tag(ebml_w, tag_index_buckets_bucket_elt);
writer.write_be_uint(elt.pos, 4u);
write_fn(writer, elt.val);
@ -427,7 +427,7 @@ fn encode_index[T](ebml_w: &ebml::writer, buckets: &[@[entry[T]]],
}
ebml::end_tag(ebml_w);
ebml::start_tag(ebml_w, tag_index_table);
for pos: uint in bucket_locs { writer.write_be_uint(pos, 4u); }
for pos: uint in bucket_locs { writer.write_be_uint(pos, 4u); }
ebml::end_tag(ebml_w);
ebml::end_tag(ebml_w);
}
@ -467,7 +467,7 @@ fn encode_meta_item(ebml_w: &ebml::writer, mi: &meta_item) {
ebml::start_tag(ebml_w, tag_meta_item_name);
ebml_w.writer.write(str::bytes(name));
ebml::end_tag(ebml_w);
for inner_item: @meta_item in items {
for inner_item: @meta_item in items {
encode_meta_item(ebml_w, *inner_item);
}
ebml::end_tag(ebml_w);
@ -477,7 +477,7 @@ fn encode_meta_item(ebml_w: &ebml::writer, mi: &meta_item) {
fn encode_attributes(ebml_w: &ebml::writer, attrs: &[attribute]) {
ebml::start_tag(ebml_w, tag_attributes);
for attr: attribute in attrs {
for attr: attribute in attrs {
ebml::start_tag(ebml_w, tag_attribute);
encode_meta_item(ebml_w, attr.node.value);
ebml::end_tag(ebml_w);
@ -516,7 +516,7 @@ fn synthesize_crate_attrs(ecx: &@encode_ctxt, crate: &@crate) -> [attribute] {
let attrs: [attribute] = ~[];
let found_link_attr = false;
for attr: attribute in crate.node.attrs {
for attr: attribute in crate.node.attrs {
attrs +=
if attr::get_attr_name(attr) != "link" {
~[attr]
@ -544,7 +544,7 @@ fn encode_crate_deps(ebml_w: &ebml::writer, cstore: &cstore::cstore) {
// Pull the cnums and names out of cstore
let pairs: [mutable numname] = ~[mutable];
for each hashkv: hashkv in cstore::iter_crate_data(cstore) {
for each hashkv: hashkv in cstore::iter_crate_data(cstore) {
pairs += ~[mutable {crate: hashkv.key, ident: hashkv.val.name}];
}
@ -556,7 +556,7 @@ fn encode_crate_deps(ebml_w: &ebml::writer, cstore: &cstore::cstore) {
// Sanity-check the crate numbers
let expected_cnum = 1;
for n: numname in pairs {
for n: numname in pairs {
assert (n.crate == expected_cnum);
expected_cnum += 1;
}
@ -573,7 +573,7 @@ fn encode_crate_deps(ebml_w: &ebml::writer, cstore: &cstore::cstore) {
// FIXME: This is not nearly enough to support correct versioning
// but is enough to get transitive crate dependencies working.
ebml::start_tag(ebml_w, tag_crate_deps);
for cname: str in get_ordered_names(cstore) {
for cname: str in get_ordered_names(cstore) {
ebml::start_tag(ebml_w, tag_crate_dep);
ebml_w.writer.write(str::bytes(cname));
ebml::end_tag(ebml_w);

View file

@ -420,8 +420,8 @@ fn parse_def_id(buf: &[u8]) -> ast::def_id {
let crate_part_vec = ~[];
let def_part_vec = ~[];
for b: u8 in crate_part { crate_part_vec += ~[b]; }
for b: u8 in def_part { def_part_vec += ~[b]; }
for b: u8 in crate_part { crate_part_vec += ~[b]; }
for b: u8 in def_part { def_part_vec += ~[b]; }
let crate_num = uint::parse_buf(crate_part_vec, 10u) as int;
let def_num = uint::parse_buf(def_part_vec, 10u) as int;

View file

@ -117,7 +117,7 @@ fn enc_sty(w: &io::writer, cx: &@ctxt, st: &ty::sty) {
w.write_str("t[");
w.write_str(cx.ds(def));
w.write_char('|');
for t: ty::t in tys { enc_ty(w, cx, t); }
for t: ty::t in tys { enc_ty(w, cx, t); }
w.write_char(']');
}
ty::ty_tup(ts) {
@ -134,7 +134,7 @@ fn enc_sty(w: &io::writer, cx: &@ctxt, st: &ty::sty) {
ty::ty_chan(t) { w.write_char('C'); enc_ty(w, cx, t); }
ty::ty_rec(fields) {
w.write_str("R[");
for field: ty::field in fields {
for field: ty::field in fields {
w.write_str(field.ident);
w.write_char('=');
enc_mt(w, cx, field.mt);
@ -158,7 +158,7 @@ fn enc_sty(w: &io::writer, cx: &@ctxt, st: &ty::sty) {
}
ty::ty_obj(methods) {
w.write_str("O[");
for m: ty::method in methods {
for m: ty::method in methods {
enc_proto(w, m.proto);
w.write_str(m.ident);
enc_ty_fn(w, cx, m.inputs, m.output, m.cf, m.constrs);
@ -170,7 +170,7 @@ fn enc_sty(w: &io::writer, cx: &@ctxt, st: &ty::sty) {
w.write_str(cx.ds(def));
w.write_char('|');
enc_ty(w, cx, ty);
for t: ty::t in tps { enc_ty(w, cx, t); }
for t: ty::t in tps { enc_ty(w, cx, t); }
w.write_char(']');
}
ty::ty_var(id) { w.write_char('X'); w.write_str(int::str(id)); }
@ -192,7 +192,7 @@ fn enc_sty(w: &io::writer, cx: &@ctxt, st: &ty::sty) {
ty::ty_constr(ty, cs) {
w.write_str("A[");
enc_ty(w, cx, ty);
for tc: @ty::type_constr in cs { enc_ty_constr(w, cx, tc); }
for tc: @ty::type_constr in cs { enc_ty_constr(w, cx, tc); }
w.write_char(']');
}
}
@ -208,7 +208,7 @@ fn enc_proto(w: &io::writer, proto: proto) {
fn enc_ty_fn(w: &io::writer, cx: &@ctxt, args: &[ty::arg], out: &ty::t,
cf: &controlflow, constrs: &[@ty::constr]) {
w.write_char('[');
for arg: ty::arg in args {
for arg: ty::arg in args {
alt arg.mode {
ty::mo_alias(mut) {
w.write_char('&');
@ -223,7 +223,7 @@ fn enc_ty_fn(w: &io::writer, cx: &@ctxt, args: &[ty::arg], out: &ty::t,
}
w.write_char(']');
let colon = true;
for c: @ty::constr in constrs {
for c: @ty::constr in constrs {
if colon {
w.write_char(':');
colon = false;
@ -241,7 +241,7 @@ fn enc_constr(w: &io::writer, cx: &@ctxt, c: &@ty::constr) {
w.write_str(cx.ds(c.node.id));
w.write_char('|');
let semi = false;
for a: @constr_arg in c.node.args {
for a: @constr_arg in c.node.args {
if semi { w.write_char(';'); } else { semi = true; }
alt a.node {
carg_base. { w.write_char('*'); }
@ -258,7 +258,7 @@ fn enc_ty_constr(w: &io::writer, cx: &@ctxt, c: &@ty::type_constr) {
w.write_str(cx.ds(c.node.id));
w.write_char('|');
let semi = false;
for a: @ty::ty_constr_arg in c.node.args {
for a: @ty::ty_constr_arg in c.node.args {
if semi { w.write_char(';'); } else { semi = true; }
alt a.node {
carg_base. { w.write_char('*'); }

View file

@ -54,7 +54,7 @@ fn check_crate(tcx: ty::ctxt, crate: &@ast::crate) {
fn visit_fn(cx: &@ctx, f: &ast::_fn, tp: &[ast::ty_param], sp: &span,
name: &fn_ident, id: ast::node_id, sc: &scope, v: &vt[scope]) {
visit::visit_fn_decl(f.decl, sc, v);
for arg_: ast::arg in f.decl.inputs {
for arg_: ast::arg in f.decl.inputs {
cx.local_map.insert(arg_.id, arg(arg_.mode));
}
let scope = alt (f.proto) {
@ -85,7 +85,7 @@ fn visit_fn(cx: &@ctx, f: &ast::_fn, tp: &[ast::ty_param], sp: &span,
fn visit_item(cx: &@ctx, i: &@ast::item, sc: &scope, v: &vt[scope]) {
alt i.node {
ast::item_obj(o, _, _) {
for f: ast::obj_field in o.fields {
for f: ast::obj_field in o.fields {
cx.local_map.insert(f.id, objfield(f.mut));
}
}
@ -173,7 +173,7 @@ fn check_call(cx: &ctx, f: &@ast::expr, args: &[@ast::expr], sc: &scope) ->
let unsafe_ts: [ty::t] = ~[];
let unsafe_t_offsets: [uint] = ~[];
let i = 0u;
for arg_t: ty::arg in arg_ts {
for arg_t: ty::arg in arg_ts {
if arg_t.mode != ty::mo_val {
let arg = args.(i);
let root = expr_root(cx, arg, false);
@ -232,11 +232,11 @@ fn check_call(cx: &ctx, f: &@ast::expr, args: &[@ast::expr], sc: &scope) ->
}
}
let j = 0u;
for unsafe: ty::t in unsafe_ts {
for unsafe: ty::t in unsafe_ts {
let offset = unsafe_t_offsets.(j);
j += 1u;
let i = 0u;
for arg_t: ty::arg in arg_ts {
for arg_t: ty::arg in arg_ts {
let mut_alias = arg_t.mode == ty::mo_alias(true);
if i != offset &&
ty_can_unsafely_include(cx, unsafe, arg_t.ty, mut_alias) {
@ -250,10 +250,10 @@ fn check_call(cx: &ctx, f: &@ast::expr, args: &[@ast::expr], sc: &scope) ->
}
// Ensure we're not passing a root by mutable alias.
for root: {arg: uint, node: node_id} in mut_roots {
for root: {arg: uint, node: node_id} in mut_roots {
let mut_alias_to_root = false;
let mut_alias_to_root_count = 0u;
for r: node_id in roots {
for r: node_id in roots {
if root.node == r {
mut_alias_to_root_count += 1u;
if mut_alias_to_root_count > 1u {
@ -277,7 +277,7 @@ fn check_tail_call(cx: &ctx, call: &@ast::expr) {
let args;
let f = alt call.node { ast::expr_call(f, args_) { args = args_; f } };
let i = 0u;
for arg_t: ty::arg in fty_args(cx, ty::expr_ty(cx.tcx, f)) {
for arg_t: ty::arg in fty_args(cx, ty::expr_ty(cx.tcx, f)) {
if arg_t.mode != ty::mo_val {
let mut_a = arg_t.mode == ty::mo_alias(true);
let ok = true;
@ -316,7 +316,7 @@ fn check_alt(cx: &ctx, input: &@ast::expr, arms: &[ast::arm], sc: &scope,
alt path_def_id(cx, root.ex) { some(did) { ~[did.node] } _ { ~[] } };
let forbidden_tp: [ty::t] =
alt inner_mut(root.ds) { some(t) { ~[t] } _ { ~[] } };
for a: ast::arm in arms {
for a: ast::arm in arms {
let dnums = arm_defnums(a);
let new_sc = sc;
if ivec::len(dnums) > 0u {
@ -390,7 +390,7 @@ fn check_var(cx: &ctx, ex: &@ast::expr, p: &ast::path, id: ast::node_id,
if !def_is_local(def, true) { ret; }
let my_defnum = ast::def_id_of_def(def).node;
let var_t = ty::expr_ty(cx.tcx, ex);
for r: restrict in *sc {
for r: restrict in *sc {
// excludes variables introduced since the alias was made
// FIXME This does not work anymore, now that we have macros.
if my_defnum < r.block_defnum {
@ -475,7 +475,7 @@ fn is_immutable_alias(cx: &ctx, sc: &scope, dnum: node_id) -> bool {
some(arg(ast::alias(false))) { ret true; }
_ { }
}
for r: restrict in *sc {
for r: restrict in *sc {
if ivec::member(dnum, r.bindings) { ret true; }
}
ret false;
@ -487,7 +487,7 @@ fn is_immutable_objfield(cx: &ctx, dnum: node_id) -> bool {
fn test_scope(cx: &ctx, sc: &scope, r: &restrict, p: &ast::path) {
let prob = r.ok;
for dep: uint in r.depends_on {
for dep: uint in r.depends_on {
if prob != valid { break; }
prob = sc.(dep).ok;
}
@ -509,8 +509,8 @@ fn test_scope(cx: &ctx, sc: &scope, r: &restrict, p: &ast::path) {
fn deps(sc: &scope, roots: &[node_id]) -> [uint] {
let i = 0u;
let result = ~[];
for r: restrict in *sc {
for dn: node_id in roots {
for r: restrict in *sc {
for dn: node_id in roots {
if ivec::member(dn, r.bindings) { result += ~[i]; }
}
i += 1u;
@ -568,7 +568,7 @@ fn expr_root(cx: &ctx, ex: @ast::expr, autoderef: bool) ->
let mut = false;
alt ty::struct(cx.tcx, auto_unbox.t) {
ty::ty_rec(fields) {
for fld: ty::field in fields {
for fld: ty::field in fields {
if str::eq(ident, fld.ident) {
mut = fld.mt.mut != ast::imm;
break;
@ -626,12 +626,12 @@ fn expr_root(cx: &ctx, ex: @ast::expr, autoderef: bool) ->
}
fn mut_field(ds: &@[deref]) -> bool {
for d: deref in *ds { if d.mut { ret true; } }
for d: deref in *ds { if d.mut { ret true; } }
ret false;
}
fn inner_mut(ds: &@[deref]) -> option::t[ty::t] {
for d: deref in *ds { if d.mut { ret some(d.outer_t); } }
for d: deref in *ds { if d.mut { ret some(d.outer_t); } }
ret none;
}
@ -661,7 +661,7 @@ fn ty_can_unsafely_include(cx: &ctx, needle: ty::t, haystack: ty::t,
if needle == haystack { ret true; }
alt ty::struct(tcx, haystack) {
ty::ty_tag(_, ts) {
for t: ty::t in ts {
for t: ty::t in ts {
if helper(tcx, needle, t, mut) { ret true; }
}
ret false;
@ -671,7 +671,7 @@ fn ty_can_unsafely_include(cx: &ctx, needle: ty::t, haystack: ty::t,
}
ty::ty_uniq(t) { ret helper(tcx, needle, t, false); }
ty::ty_rec(fields) {
for f: ty::field in fields {
for f: ty::field in fields {
if helper(tcx, needle, f.mt.ty, get_mut(mut, f.mt)) {
ret true;
}

View file

@ -93,7 +93,7 @@ fn new_smallintmap_adapter[@K,
iter items() -> @{key: K, val: V} {
let idx = 0u;
for item: option::t[V] in map.v {
for item: option::t[V] in map.v {
alt item {
option::some(elt) {
let value = elt;
@ -106,7 +106,7 @@ fn new_smallintmap_adapter[@K,
}
}
iter keys() -> K {
for each p: @{key: K, val: V} in self.items() { put p.key; }
for each p: @{key: K, val: V} in self.items() { put p.key; }
}
}

View file

@ -17,12 +17,12 @@ fn check_expr(tcx: &ty::ctxt, ex: &@expr, s: &(), v: &visit::vt[()]) {
fn check_arms(tcx: &ty::ctxt, arms: &[arm]) {
let i = 0;
for arm: arm in arms {
for arm_pat: @pat in arm.pats {
for arm: arm in arms {
for arm_pat: @pat in arm.pats {
let reachable = true;
let j = 0;
while j < i {
for prev_pat: @pat in arms.(j).pats {
for prev_pat: @pat in arms.(j).pats {
if pattern_supersedes(tcx, prev_pat, arm_pat) {
reachable = false;
}
@ -41,7 +41,7 @@ fn pattern_supersedes(tcx: &ty::ctxt, a: &@pat, b: &@pat) -> bool {
fn patterns_supersede(tcx: &ty::ctxt, as: &[@pat], bs: &[@pat]) ->
bool {
let i = 0;
for a: @pat in as {
for a: @pat in as {
if !pattern_supersedes(tcx, a, bs.(i)) { ret false; }
i += 1;
}
@ -50,9 +50,9 @@ fn pattern_supersedes(tcx: &ty::ctxt, a: &@pat, b: &@pat) -> bool {
fn field_patterns_supersede(tcx: &ty::ctxt, fas: &[field_pat],
fbs: &[field_pat]) -> bool {
let wild = @{id: 0, node: pat_wild, span: dummy_sp()};
for fa: field_pat in fas {
for fa: field_pat in fas {
let pb = wild;
for fb: field_pat in fbs {
for fb: field_pat in fbs {
if fa.ident == fb.ident { pb = fb.pat; }
}
if !pattern_supersedes(tcx, fa.pat, pb) { ret false; }

View file

@ -42,12 +42,12 @@ fn collect_freevars(def_map: &resolve::def_map, sess: &session::session,
walker: &fn(&visit::vt[()]) ,
initial_decls: [ast::node_id]) -> freevar_info {
let decls = new_int_hash();
for decl: ast::node_id in initial_decls { set_add(decls, decl); }
for decl: ast::node_id in initial_decls { set_add(decls, decl); }
let refs = @mutable ~[];
let walk_fn = lambda(f: &ast::_fn, tps: &[ast::ty_param], sp: &span,
i: &ast::fn_ident, nid: ast::node_id) {
for a: ast::arg in f.decl.inputs { set_add(decls, a.id); }
for a: ast::arg in f.decl.inputs { set_add(decls, a.id); }
};
let walk_expr = lambda(expr: &@ast::expr) {
alt expr.node {
@ -87,7 +87,7 @@ fn collect_freevars(def_map: &resolve::def_map, sess: &session::session,
// node ids of the definitions.
let canonical_refs = ~[];
let defs = new_int_hash();
for ref_id_: ast::node_id in *refs {
for ref_id_: ast::node_id in *refs {
let ref_id = ref_id_;
let def_id = ast::def_id_of_def(def_map.get(ref_id)).node;
if !decls.contains_key(def_id) && !defs.contains_key(def_id) {

View file

@ -239,7 +239,7 @@ fn map_crate(e: &@env, c: &@ast::crate) {
}
fn resolve_imports(e: &env) {
for each it: @{key: ast::node_id, val: import_state} in e.imports.items()
for each it: @{key: ast::node_id, val: import_state} in e.imports.items()
{
alt it.val {
todo(item, sc) { resolve_import(e, item, sc); }
@ -345,7 +345,7 @@ fn visit_fn_with_scope(e: &@env, f: &ast::_fn, tp: &[ast::ty_param],
// here's where we need to set up the mapping
// for f's constrs in the table.
for c: @ast::constr in f.decl.constraints {
for c: @ast::constr in f.decl.constraints {
resolve_constr(e, id, c, sc, v);
}
visit::visit_fn(f, tp, sp, name, id,
@ -531,7 +531,7 @@ fn unresolved_err(e: &env, sc: &scopes, sp: &span, name: &ident, kind: &str) {
fail;
}
let err_scope = find_fn_or_mod_scope(sc);
for rs: {ident: str, sc: scope} in e.reported {
for rs: {ident: str, sc: scope} in e.reported {
if str::eq(rs.ident, name) && err_scope == rs.sc { ret; }
}
e.reported += ~[{ident: name, sc: err_scope}];
@ -704,7 +704,7 @@ fn lookup_in_scope(e: &env, sc: scopes, sp: &span, name: &ident,
fn lookup_in_ty_params(name: &ident, ty_params: &[ast::ty_param]) ->
option::t[def] {
let i = 0u;
for tp: ast::ty_param in ty_params {
for tp: ast::ty_param in ty_params {
if str::eq(tp.ident, name) { ret some(ast::def_ty_arg(i,tp.kind)); }
i += 1u;
}
@ -727,7 +727,7 @@ fn lookup_in_fn(name: &ident, decl: &ast::fn_decl,
option::t[def] {
alt ns {
ns_value. {
for a: ast::arg in decl.inputs {
for a: ast::arg in decl.inputs {
if str::eq(a.ident, name) {
ret some(ast::def_arg(local_def(a.id)));
}
@ -743,7 +743,7 @@ fn lookup_in_obj(name: &ident, ob: &ast::_obj, ty_params: &[ast::ty_param],
ns: namespace) -> option::t[def] {
alt ns {
ns_value. {
for f: ast::obj_field in ob.fields {
for f: ast::obj_field in ob.fields {
if str::eq(f.ident, name) {
ret some(ast::def_obj_field(local_def(f.id)));
}
@ -779,7 +779,7 @@ fn lookup_in_block(name: &ident, b: &ast::blk_, ns: namespace) ->
ret some(ast::def_ty(local_def(it.id)));
}
} else if (ns == ns_value) {
for v: ast::variant in variants {
for v: ast::variant in variants {
if str::eq(v.node.name, name) {
let i = v.node.id;
ret some(ast::def_variant(local_def(it.id),
@ -971,7 +971,7 @@ fn lookup_glob_in_mod(e: &env, info: @indexed_mod, sp: &span, id: &ident,
} else if (ivec::len(matches) == 1u) {
ret some(matches.(0).def);
} else {
for match: glob_imp_def in matches {
for match: glob_imp_def in matches {
let sp = match.item.span;
e.sess.span_note(sp, #fmt("'%s' is imported here", id));
}
@ -1053,7 +1053,7 @@ fn add_to_index(index: &hashmap[ident, list[mod_index_entry]], id: &ident,
fn index_mod(md: &ast::_mod) -> mod_index {
let index = new_str_hash[list[mod_index_entry]]();
for it: @ast::view_item in md.view_items {
for it: @ast::view_item in md.view_items {
alt it.node {
ast::view_item_import(ident, _, _) | ast::view_item_use(ident, _, _)
{
@ -1065,7 +1065,7 @@ fn index_mod(md: &ast::_mod) -> mod_index {
}
}
}
for it: @ast::item in md.items {
for it: @ast::item in md.items {
alt it.node {
ast::item_const(_, _) | ast::item_fn(_, _) | ast::item_mod(_) |
ast::item_native_mod(_) | ast::item_ty(_, _) |
@ -1075,7 +1075,7 @@ fn index_mod(md: &ast::_mod) -> mod_index {
ast::item_tag(variants, _) {
add_to_index(index, it.ident, mie_item(it));
let variant_idx: uint = 0u;
for v: ast::variant in variants {
for v: ast::variant in variants {
add_to_index(index, v.node.name,
mie_tag_variant(it, variant_idx));
variant_idx += 1u;
@ -1088,7 +1088,7 @@ fn index_mod(md: &ast::_mod) -> mod_index {
fn index_nmod(md: &ast::native_mod) -> mod_index {
let index = new_str_hash[list[mod_index_entry]]();
for it: @ast::view_item in md.view_items {
for it: @ast::view_item in md.view_items {
alt it.node {
ast::view_item_use(ident, _, _) | ast::view_item_import(ident, _, _)
{
@ -1097,7 +1097,7 @@ fn index_nmod(md: &ast::native_mod) -> mod_index {
ast::view_item_import_glob(_, _) | ast::view_item_export(_, _) { }
}
}
for it: @ast::native_item in md.items {
for it: @ast::native_item in md.items {
add_to_index(index, it.ident, mie_native_item(it));
}
ret index;
@ -1125,7 +1125,7 @@ fn ns_for_def(d: def) -> namespace {
fn lookup_external(e: &env, cnum: int, ids: &[ident], ns: namespace) ->
option::t[def] {
for d: def in csearch::lookup_defs(e.sess.get_cstore(), cnum, ids) {
for d: def in csearch::lookup_defs(e.sess.get_cstore(), cnum, ids) {
e.ext_map.insert(ast::def_id_of_def(d), ids);
if ns == ns_for_def(d) { ret some(d); }
}
@ -1211,7 +1211,7 @@ fn check_item(e: &@env, i: &@ast::item, x: &(), v: &vt[()]) {
ast::item_obj(ob, ty_params, _) {
fn field_name(field: &ast::obj_field) -> ident { ret field.ident; }
ensure_unique(*e, i.span, ob.fields, field_name, "object field");
for m: @ast::method in ob.methods {
for m: @ast::method in ob.methods {
check_fn(*e, m.span, m.node.meth);
}
ensure_unique(*e, i.span, typaram_names(ty_params),
@ -1249,7 +1249,7 @@ fn check_arm(e: &@env, a: &ast::arm, x: &(), v: &vt[()]) {
e.sess.span_err(a.pats.(i).span,
"inconsistent number of bindings");
} else {
for name: ident in ch.seen {
for name: ident in ch.seen {
if is_none(ivec::find(bind str::eq(name, _), seen0)) {
// Fight the alias checker
let name_ = name;
@ -1267,7 +1267,7 @@ fn check_block(e: &@env, b: &ast::blk, x: &(), v: &vt[()]) {
let values = checker(*e, "value");
let types = checker(*e, "type");
let mods = checker(*e, "module");
for st: @ast::stmt in b.node.stmts {
for st: @ast::stmt in b.node.stmts {
alt st.node {
ast::stmt_decl(d, _) {
alt d.node {
@ -1280,7 +1280,7 @@ fn check_block(e: &@env, b: &ast::blk, x: &(), v: &vt[()]) {
alt it.node {
ast::item_tag(variants, _) {
add_name(types, it.span, it.ident);
for v: ast::variant in variants {
for v: ast::variant in variants {
add_name(values, v.span, v.node.name);
}
}
@ -1340,7 +1340,7 @@ fn checker(e: &env, kind: str) -> checker {
}
fn add_name(ch: &checker, sp: &span, name: &ident) {
for s: ident in ch.seen {
for s: ident in ch.seen {
if str::eq(s, name) {
ch.sess.span_fatal(sp, "duplicate " + ch.kind + " name: " + name);
}
@ -1353,7 +1353,7 @@ fn ident_id(i: &ident) -> ident { ret i; }
fn ensure_unique[T](e: &env, sp: &span, elts: &[T], id: fn(&T) -> ident ,
kind: &str) {
let ch = checker(e, kind);
for elt: T in elts { add_name(ch, sp, id(elt)); }
for elt: T in elts { add_name(ch, sp, id(elt)); }
}
// Local Variables:

View file

@ -95,7 +95,7 @@ fn type_of(cx: &@crate_ctxt, sp: &span, t: &ty::t) -> TypeRef {
fn type_of_explicit_args(cx: &@crate_ctxt, sp: &span, inputs: &[ty::arg]) ->
[TypeRef] {
let atys: [TypeRef] = ~[];
for arg: ty::arg in inputs {
for arg: ty::arg in inputs {
let t: TypeRef = type_of_inner(cx, sp, arg.ty);
t = alt arg.mode {
ty::mo_alias(_) { T_ptr(t) }
@ -231,7 +231,7 @@ fn type_of_inner(cx: &@crate_ctxt, sp: &span, t: &ty::t) -> TypeRef {
ty::ty_task. { llty = T_taskptr(*cx); }
ty::ty_rec(fields) {
let tys: [TypeRef] = ~[];
for f: ty::field in fields {
for f: ty::field in fields {
tys += ~[type_of_inner(cx, sp, f.mt.ty)];
}
llty = T_struct(tys);
@ -305,7 +305,7 @@ fn type_of_or_i8(bcx: &@block_ctxt, typ: ty::t) -> TypeRef {
// gas doesn't!
fn sanitize(s: &str) -> str {
let result = "";
for c: u8 in s {
for c: u8 in s {
if c == '@' as u8 {
result += "boxed_";
} else {
@ -401,7 +401,7 @@ fn trans_native_call(b: &builder, glues: @glue_fns, lltaskptr: ValueRef,
let n: int = std::ivec::len[ValueRef](args) as int;
let llnative: ValueRef = get_simple_extern_fn(externs, llmod, name, n);
let call_args: [ValueRef] = ~[];
for a: ValueRef in args { call_args += ~[b.ZExtOrBitCast(a, T_int())]; }
for a: ValueRef in args { call_args += ~[b.ZExtOrBitCast(a, T_int())]; }
ret b.Call(llnative, call_args);
}
@ -528,7 +528,7 @@ fn static_size_of_tag(cx: &@crate_ctxt, sp: &span, t: &ty::t) -> uint {
let max_size = 0u;
let variants = ty::tag_variants(cx.tcx, tid);
for variant: ty::variant_info in variants {
for variant: ty::variant_info in variants {
let tup_ty =
simplify_type(cx, ty::mk_tup(cx.tcx, variant.args));
// Perform any type parameter substitutions.
@ -562,7 +562,7 @@ fn dynamic_size_of(cx: &@block_ctxt, t: ty::t) -> result {
let off = C_int(0);
let max_align = C_int(1);
let bcx = cx;
for e: ty::t in elts {
for e: ty::t in elts {
let elt_align = align_of(bcx, e);
bcx = elt_align.bcx;
let elt_size = size_of(bcx, e);
@ -581,7 +581,7 @@ fn dynamic_size_of(cx: &@block_ctxt, t: ty::t) -> result {
}
ty::ty_rec(flds) {
let tys: [ty::t] = ~[];
for f: ty::field in flds { tys += ~[f.mt.ty]; }
for f: ty::field in flds { tys += ~[f.mt.ty]; }
ret align_elements(cx, tys);
}
ty::ty_tup(elts) {
@ -596,12 +596,12 @@ fn dynamic_size_of(cx: &@block_ctxt, t: ty::t) -> result {
let max_size: ValueRef = alloca(bcx, T_int());
bcx.build.Store(C_int(0), max_size);
let variants = ty::tag_variants(bcx_tcx(bcx), tid);
for variant: ty::variant_info in variants {
for variant: ty::variant_info in variants {
// Perform type substitution on the raw argument types.
let raw_tys: [ty::t] = variant.args;
let tys: [ty::t] = ~[];
for raw_ty: ty::t in raw_tys {
for raw_ty: ty::t in raw_tys {
let t = ty::substitute_type_params(bcx_tcx(cx), tps, raw_ty);
tys += ~[t];
}
@ -640,7 +640,7 @@ fn dynamic_align_of(cx: &@block_ctxt, t: &ty::t) -> result {
ty::ty_rec(flds) {
let a = C_int(1);
let bcx = cx;
for f: ty::field in flds {
for f: ty::field in flds {
let align = align_of(bcx, f.mt.ty);
bcx = align.bcx;
a = umax(bcx, a, align.val);
@ -674,7 +674,7 @@ fn dynamic_align_of(cx: &@block_ctxt, t: &ty::t) -> result {
// in C_int()
fn GEPi(cx: &@block_ctxt, base: ValueRef, ixs: &[int]) -> ValueRef {
let v: [ValueRef] = ~[];
for i: int in ixs { v += ~[C_int(i)]; }
for i: int in ixs { v += ~[C_int(i)]; }
ret cx.build.InBoundsGEP(base, v);
}
@ -765,7 +765,7 @@ fn GEP_tup_like(cx: &@block_ctxt, t: &ty::t, base: ValueRef, ixs: &[int]) ->
let s = split_type(bcx_ccx(cx), t, ixs, 0u);
let args = ~[];
for typ: ty::t in s.prefix { args += ~[typ]; }
for typ: ty::t in s.prefix { args += ~[typ]; }
let prefix_ty = ty::mk_tup(bcx_tcx(cx), args);
let bcx = cx;
@ -789,7 +789,7 @@ fn GEP_tag(cx: @block_ctxt, llblobptr: ValueRef, tag_id: &ast::def_id,
let i = 0;
let true_arg_tys: [ty::t] = ~[];
for aty: ty::t in arg_tys {
for aty: ty::t in arg_tys {
let arg_ty = ty::substitute_type_params(bcx_tcx(cx), ty_substs, aty);
true_arg_tys += ~[arg_ty];
if i == ix { elem_ty = arg_ty; }
@ -909,7 +909,7 @@ fn linearize_ty_params(cx: &@block_ctxt, t: &ty::t) ->
alt ty::struct(bcx_tcx(r.cx), t) {
ty::ty_param(pid,_) {
let seen: bool = false;
for d: uint in r.defs { if d == pid { seen = true; } }
for d: uint in r.defs { if d == pid { seen = true; } }
if !seen {
r.vals += ~[r.cx.fcx.lltydescs.(pid)];
r.defs += ~[pid];
@ -983,7 +983,7 @@ fn get_derived_tydesc(cx: &@block_ctxt, t: &ty::t, escapes: bool,
let tdp = bcx.build.GEP(tydescs, ~[C_int(0), C_int(i)]);
bcx.build.Store(root, tdp);
i += 1;
for td: ValueRef in tys.descs {
for td: ValueRef in tys.descs {
let tdp = bcx.build.GEP(tydescs, ~[C_int(0), C_int(i)]);
bcx.build.Store(td, tdp);
i += 1;
@ -1002,7 +1002,7 @@ fn get_derived_tydesc(cx: &@block_ctxt, t: &ty::t, escapes: bool,
alloca(bcx, T_array(T_ptr(bcx_ccx(bcx).tydesc_type),
n_params + 1u));
let i = 0;
for td: ValueRef in tys.descs {
for td: ValueRef in tys.descs {
let tdp = bcx.build.GEP(llparamtydescs, ~[C_int(0), C_int(i)]);
bcx.build.Store(td, tdp);
i += 1;
@ -1173,7 +1173,7 @@ fn make_generic_glue_inner(cx: &@local_ctxt, sp: &span, t: &ty::t,
// TODO: Implement some kind of freeze operation in the standard library.
let lltydescs_frozen = ~[];
for lltydesc: ValueRef in lltydescs { lltydescs_frozen += ~[lltydesc]; }
for lltydesc: ValueRef in lltydescs { lltydescs_frozen += ~[lltydesc]; }
fcx.lltydescs = lltydescs_frozen;
let bcx = new_top_block_ctxt(fcx);
@ -1201,7 +1201,7 @@ fn make_generic_glue(cx: &@local_ctxt, sp: &span, t: &ty::t, llfn: ValueRef,
}
fn emit_tydescs(ccx: &@crate_ctxt) {
for each pair: @{key: ty::t, val: @tydesc_info} in ccx.tydescs.items() {
for each pair: @{key: ty::t, val: @tydesc_info} in ccx.tydescs.items() {
let glue_fn_ty = T_ptr(T_glue_fn(*ccx));
let cmp_fn_ty = T_ptr(T_cmp_glue_fn(*ccx));
let ti = pair.val;
@ -1518,7 +1518,7 @@ fn trans_res_drop(cx: @block_ctxt, rs: ValueRef, did: &ast::def_id,
cx.build.Load(cx.build.GEP(dtor_pair,
~[C_int(0), C_int(abi::fn_field_box)]));
let args = ~[cx.fcx.llretptr, cx.fcx.lltaskptr, dtor_env];
for tp: ty::t in tps {
for tp: ty::t in tps {
let ti: option::t[@tydesc_info] = none;
let td = get_tydesc(cx, tp, false, ti);
args += ~[td.val];
@ -1814,7 +1814,7 @@ fn iter_structural_ty_full(cx: &@block_ctxt, av: ValueRef, t: &ty::t,
alt ty::struct(ccx.tcx, fn_ty) {
ty::ty_fn(_, args, _, _, _) {
let j = 0;
for a: ty::arg in args {
for a: ty::arg in args {
let rslt = GEP_tag(cx, a_tup, tid, variant.id, tps, j);
let llfldp_a = rslt.val;
cx = rslt.bcx;
@ -1883,7 +1883,7 @@ fn iter_structural_ty_full(cx: &@block_ctxt, av: ValueRef, t: &ty::t,
let llswitch = bcx.build.Switch(lldiscrim_a, unr_cx.llbb, n_variants);
let next_cx = new_sub_block_ctxt(bcx, "tag-iter-next");
let i = 0u;
for variant: ty::variant_info in variants {
for variant: ty::variant_info in variants {
let variant_cx =
new_sub_block_ctxt(bcx,
"tag-iter-variant-" +
@ -2041,7 +2041,7 @@ fn lazily_emit_all_tydesc_glue(cx: &@block_ctxt,
fn lazily_emit_all_generic_info_tydesc_glues(cx: &@block_ctxt,
gi: &generic_info) {
for ti: option::t[@tydesc_info] in gi.static_tis {
for ti: option::t[@tydesc_info] in gi.static_tis {
lazily_emit_all_tydesc_glue(cx, ti);
}
}
@ -3386,7 +3386,7 @@ fn join_results(parent_cx: &@block_ctxt, t: TypeRef, ins: &[result]) ->
let live: [result] = ~[];
let vals: [ValueRef] = ~[];
let bbs: [BasicBlockRef] = ~[];
for r: result in ins {
for r: result in ins {
if !is_terminated(r.bcx) {
live += ~[r];
vals += ~[r.val];
@ -3407,14 +3407,14 @@ fn join_results(parent_cx: &@block_ctxt, t: TypeRef, ins: &[result]) ->
// We have >1 incoming edges. Make a join block and br+phi them into it.
let join_cx = new_sub_block_ctxt(parent_cx, "join");
for r: result in live { r.bcx.build.Br(join_cx.llbb); }
for r: result in live { r.bcx.build.Br(join_cx.llbb); }
let phi = join_cx.build.Phi(t, vals, bbs);
ret rslt(join_cx, phi);
}
fn join_branches(parent_cx: &@block_ctxt, ins: &[result]) -> @block_ctxt {
let out = new_sub_block_ctxt(parent_cx, "join");
for r: result in ins {
for r: result in ins {
if !is_terminated(r.bcx) { r.bcx.build.Br(out.llbb); }
}
ret out;
@ -3571,7 +3571,7 @@ fn build_environment(bcx: @block_ctxt, lltydescs: [ValueRef],
GEP_tup_like(bcx, closure_ty, closure,
~[0, abi::closure_elt_bindings]);
bcx = bindings.bcx;
for lv: lval_result in bound_vals {
for lv: lval_result in bound_vals {
let bound = GEP_tup_like(bcx, bindings_ty, bindings.val,
~[0, i as int]);
bcx = bound.bcx;
@ -3591,7 +3591,7 @@ fn build_environment(bcx: @block_ctxt, lltydescs: [ValueRef],
~[0, abi::closure_elt_ty_params]);
bcx = ty_params_slot.bcx;
i = 0u;
for td: ValueRef in lltydescs {
for td: ValueRef in lltydescs {
let ty_param_slot = GEPi(bcx, ty_params_slot.val, ~[0, i as int]);
bcx.build.Store(td, ty_param_slot);
i += 1u;
@ -3612,7 +3612,7 @@ fn build_closure(cx: &@block_ctxt, upvars: &@[ast::node_id], copying: bool)
closure_tys += ~[option::get(cx.fcx.iterbodyty)];
}
// Package up the upvars
for nid: ast::node_id in *upvars {
for nid: ast::node_id in *upvars {
closure_vals += ~[trans_var(cx, cx.sp, nid)];
let ty = ty::node_id_to_monotype(bcx_tcx(cx), nid);
if !copying { ty = ty::mk_mut_ptr(bcx_tcx(cx), ty); }
@ -3689,7 +3689,7 @@ fn load_environment(enclosing_cx: &@block_ctxt, fcx: &@fn_ctxt,
i += 1u;
}
// Load the acutal upvars.
for upvar_id: ast::node_id in *upvars {
for upvar_id: ast::node_id in *upvars {
let upvarptr =
GEP_tup_like(bcx, ty, llclosure, path + ~[i as int]);
bcx = upvarptr.bcx;
@ -3875,7 +3875,7 @@ fn lval_generic_fn(cx: &@block_ctxt, tpt: &ty::ty_param_kinds_and_ty,
let bcx = lv.res.bcx;
let tydescs: [ValueRef] = ~[];
let tis: [option::t[@tydesc_info]] = ~[];
for t: ty::t in tys {
for t: ty::t in tys {
// TODO: Doesn't always escape.
let ti = none[@tydesc_info];
@ -4368,7 +4368,7 @@ fn trans_bind_thunk(cx: &@local_ctxt, sp: &span, incoming_fty: &ty::t,
let outgoing_arg_index: uint = 0u;
let llout_arg_tys: [TypeRef] =
type_of_explicit_args(cx.ccx, sp, outgoing_args);
for arg: option::t[@ast::expr] in args {
for arg: option::t[@ast::expr] in args {
let out_arg = outgoing_args.(outgoing_arg_index);
let llout_arg_ty = llout_arg_tys.(outgoing_arg_index);
let is_val = out_arg.mode == ty::mo_val;
@ -4452,7 +4452,7 @@ fn trans_bind_1(cx: &@block_ctxt, f: &@ast::expr, f_res: &lval_result,
args: &[option::t[@ast::expr]], id: ast::node_id) ->
result {
let bound: [@ast::expr] = ~[];
for argopt: option::t[@ast::expr] in args {
for argopt: option::t[@ast::expr] in args {
alt argopt { none. { } some(e) { bound += ~[e]; } }
}
@ -4487,7 +4487,7 @@ fn trans_bind_1(cx: &@block_ctxt, f: &@ast::expr, f_res: &lval_result,
let bound_tys: [ty::t] = ~[outgoing_fty];
let bound_vals: [lval_result] = ~[bound_f];
// Translate the bound expressions.
for e: @ast::expr in bound {
for e: @ast::expr in bound {
let lv = trans_lval(bcx, e);
bcx = lv.res.bcx;
bound_vals += ~[lv];
@ -4660,7 +4660,7 @@ fn trans_args(cx: &@block_ctxt, llenv: ValueRef,
// to cast her view of the arguments to the caller's view.
let arg_tys = type_of_explicit_args(bcx_ccx(cx), cx.sp, args);
let i = 0u;
for e: @ast::expr in es {
for e: @ast::expr in es {
if bcx.build.is_terminated() {
// This means an earlier arg was divergent.
// So this arg can't be evaluated.
@ -4816,7 +4816,7 @@ fn trans_vec(cx: &@block_ctxt, args: &[@ast::expr], id: ast::node_id) ->
std::ivec::init_elt[ty::t](unit_ty,
std::ivec::len(args)));
let i: int = 0;
for e: @ast::expr in args {
for e: @ast::expr in args {
let src = trans_lval(bcx, e);
bcx = src.res.bcx;
let dst_res = GEP_tup_like(bcx, pseudo_tup_ty, body, ~[0, i]);
@ -4924,7 +4924,7 @@ fn trans_ivec(bcx: @block_ctxt, args: &[@ast::expr], id: ast::node_id) ->
// Store the individual elements.
let i = 0u;
for e: @ast::expr in args {
for e: @ast::expr in args {
let lv = trans_lval(bcx, e);
bcx = lv.res.bcx;
let lleltptr;
@ -4961,12 +4961,12 @@ fn trans_rec(cx: &@block_ctxt, fields: &[ast::field],
}
let ty_fields: [ty::field] = ~[];
alt ty::struct(bcx_tcx(cx), t) { ty::ty_rec(flds) { ty_fields = flds; } }
for tf: ty::field in ty_fields {
for tf: ty::field in ty_fields {
let e_ty = tf.mt.ty;
let dst_res = GEP_tup_like(bcx, t, rec_val, ~[0, i]);
bcx = dst_res.bcx;
let expr_provided = false;
for f: ast::field in fields {
for f: ast::field in fields {
if str::eq(f.node.ident, tf.ident) {
expr_provided = true;
let lv = trans_lval(bcx, f.node.expr);
@ -5525,7 +5525,7 @@ fn trans_stmt(cx: &@block_ctxt, s: &ast::stmt) -> result {
ast::stmt_decl(d, _) {
alt d.node {
ast::decl_local(locals) {
for local: @ast::local in locals {
for local: @ast::local in locals {
bcx = init_local(bcx, local).bcx;
}
}
@ -5624,7 +5624,7 @@ fn trans_block_cleanups(cx: &@block_ctxt, cleanup_cx: &@block_ctxt) ->
iter block_locals(b: &ast::blk) -> @ast::local {
// FIXME: putting from inside an iter block doesn't work, so we can't
// use the index here.
for s: @ast::stmt in b.node.stmts {
for s: @ast::stmt in b.node.stmts {
alt s.node {
ast::stmt_decl(d, _) {
alt d.node {
@ -5725,7 +5725,7 @@ fn trans_block(cx: &@block_ctxt, b: &ast::blk, output: &out_method) ->
bcx.fcx.lllocals.insert(local.node.id, r.val);
}
let r = rslt(bcx, C_nil());
for s: @ast::stmt in b.node.stmts {
for s: @ast::stmt in b.node.stmts {
r = trans_stmt(bcx, *s);
bcx = r.bcx;
@ -5868,7 +5868,7 @@ fn create_llargs_for_fn_args(cx: &@fn_ctxt, proto: ast::proto,
some(tt) { cx.llself = some[val_self_pair]({v: cx.llenv, t: tt}); }
none. {
let i = 0u;
for tp: ast::ty_param in ty_params {
for tp: ast::ty_param in ty_params {
let llarg = llvm::LLVMGetParam(cx.llfn, arg_n);
assert (llarg as int != 0);
cx.lltydescs += ~[llarg];
@ -5891,7 +5891,7 @@ fn create_llargs_for_fn_args(cx: &@fn_ctxt, proto: ast::proto,
// Populate the llargs field of the function context with the ValueRefs
// that we get from llvm::LLVMGetParam for each argument.
for arg: ast::arg in args {
for arg: ast::arg in args {
let llarg = llvm::LLVMGetParam(cx.llfn, arg_n);
assert (llarg as int != 0);
cx.llargs.insert(arg.id, llarg);
@ -5903,7 +5903,7 @@ fn copy_args_to_allocas(fcx: @fn_ctxt, args: &[ast::arg],
arg_tys: &[ty::arg]) {
let bcx = new_raw_block_ctxt(fcx, fcx.llcopyargs);
let arg_n: uint = 0u;
for aarg: ast::arg in args {
for aarg: ast::arg in args {
if aarg.mode == ast::val {
let argval;
alt bcx.fcx.llargs.find(aarg.id) {
@ -5925,7 +5925,7 @@ fn copy_args_to_allocas(fcx: @fn_ctxt, args: &[ast::arg],
fn add_cleanups_for_args(bcx: &@block_ctxt, args: &[ast::arg],
arg_tys: &[ty::arg]) {
let arg_n: uint = 0u;
for aarg: ast::arg in args {
for aarg: ast::arg in args {
if aarg.mode == ast::val || aarg.mode == ast::move {
let argval;
alt bcx.fcx.llargs.find(aarg.id) {
@ -5955,7 +5955,7 @@ fn arg_tys_of_fn(ccx: &@crate_ctxt, id: ast::node_id) -> [ty::arg] {
fn populate_fn_ctxt_from_llself(fcx: @fn_ctxt, llself: val_self_pair) {
let bcx = llstaticallocas_block_ctxt(fcx);
let field_tys: [ty::t] = ~[];
for f: ast::obj_field in bcx.fcx.lcx.obj_fields {
for f: ast::obj_field in bcx.fcx.lcx.obj_fields {
field_tys += ~[node_id_type(bcx_ccx(bcx), f.id)];
}
// Synthesize a tuple type for the fields so that GEP_tup_like() can work
@ -5986,7 +5986,7 @@ fn populate_fn_ctxt_from_llself(fcx: @fn_ctxt, llself: val_self_pair) {
obj_fields = vi2p(bcx, obj_fields, T_ptr(llfields_ty));
} else { obj_fields = vi2p(bcx, obj_fields, T_ptr(T_i8())); }
let i: int = 0;
for p: ast::ty_param in fcx.lcx.obj_typarams {
for p: ast::ty_param in fcx.lcx.obj_typarams {
let lltyparam: ValueRef =
bcx.build.GEP(obj_typarams, ~[C_int(0), C_int(i)]);
lltyparam = bcx.build.Load(lltyparam);
@ -5994,7 +5994,7 @@ fn populate_fn_ctxt_from_llself(fcx: @fn_ctxt, llself: val_self_pair) {
i += 1;
}
i = 0;
for f: ast::obj_field in fcx.lcx.obj_fields {
for f: ast::obj_field in fcx.lcx.obj_fields {
let rslt = GEP_tup_like(bcx, fields_tup_ty, obj_fields, ~[0, i]);
bcx = llstaticallocas_block_ctxt(fcx);
let llfield = rslt.val;
@ -6166,7 +6166,7 @@ fn trans_tag_variant(cx: @local_ctxt, tag_id: ast::node_id,
let fn_args: [ast::arg] = ~[];
let i = 0u;
for varg: ast::variant_arg in variant.node.args {
for varg: ast::variant_arg in variant.node.args {
fn_args +=
~[{mode: ast::alias(false),
ty: varg.ty,
@ -6188,7 +6188,7 @@ fn trans_tag_variant(cx: @local_ctxt, tag_id: ast::node_id,
fn_args, ty_params);
let ty_param_substs: [ty::t] = ~[];
i = 0u;
for tp: ast::ty_param in ty_params {
for tp: ast::ty_param in ty_params {
ty_param_substs += ~[ty::mk_param(cx.ccx.tcx, i, tp.kind)];
i += 1u;
}
@ -6210,7 +6210,7 @@ fn trans_tag_variant(cx: @local_ctxt, tag_id: ast::node_id,
bcx.build.GEP(lltagptr, ~[C_int(0), C_int(1)])
};
i = 0u;
for va: ast::variant_arg in variant.node.args {
for va: ast::variant_arg in variant.node.args {
let rslt =
GEP_tag(bcx, llblobptr, ast::local_def(tag_id),
ast::local_def(variant.node.id), ty_param_substs,
@ -6313,7 +6313,7 @@ fn trans_item(cx: @local_ctxt, item: &ast::item) {
let sub_cx = extend_path(cx, item.ident);
let degen = std::ivec::len(variants) == 1u;
let i = 0;
for variant: ast::variant in variants {
for variant: ast::variant in variants {
trans_tag_variant(sub_cx, item.id, variant, i, degen, tps);
i += 1;
}
@ -6330,7 +6330,7 @@ fn trans_item(cx: @local_ctxt, item: &ast::item) {
// only as a convenience for humans working with the code, to organize names
// and control visibility.
fn trans_mod(cx: @local_ctxt, m: &ast::_mod) {
for item: @ast::item in m.items { trans_item(cx, *item); }
for item: @ast::item in m.items { trans_item(cx, *item); }
}
fn get_pair_fn_ty(llpairty: TypeRef) -> TypeRef {
@ -6525,7 +6525,7 @@ fn decl_native_fn_and_pair(ccx: &@crate_ctxt, sp: &span, path: &[str],
if uses_retptr { call_args += ~[bcx.fcx.llretptr]; }
let arg_n = 3u;
for each i: uint in uint::range(0u, num_ty_param) {
for each i: uint in uint::range(0u, num_ty_param) {
let llarg = llvm::LLVMGetParam(fcx.llfn, arg_n);
fcx.lltydescs += ~[llarg];
assert (llarg as int != 0);
@ -6558,7 +6558,7 @@ fn decl_native_fn_and_pair(ccx: &@crate_ctxt, sp: &span, path: &[str],
first_arg_n: uint, uses_retptr: bool, cc: uint)
-> {val: ValueRef, rptr: ValueRef} {
let call_arg_tys: [TypeRef] = ~[];
for arg: ValueRef in call_args { call_arg_tys += ~[val_ty(arg)]; }
for arg: ValueRef in call_args { call_arg_tys += ~[val_ty(arg)]; }
let llnativefnty;
if uses_retptr {
@ -6586,7 +6586,7 @@ fn decl_native_fn_and_pair(ccx: &@crate_ctxt, sp: &span, path: &[str],
let drop_args: [{val: ValueRef, ty: ty::t}] = ~[];
let i = arg_n;
for arg: ty::arg in args {
for arg: ty::arg in args {
let llarg = llvm::LLVMGetParam(fcx.llfn, i);
assert (llarg as int != 0);
if cast_to_i32 {
@ -6638,7 +6638,7 @@ fn decl_native_fn_and_pair(ccx: &@crate_ctxt, sp: &span, path: &[str],
if !rty_is_nil && !uses_retptr { bcx.build.Store(r, rptr); }
for d: {val: ValueRef, ty: ty::t} in drop_args {
for d: {val: ValueRef, ty: ty::t} in drop_args {
bcx = drop_ty(bcx, d.val, d.ty).bcx;
}
bcx.build.RetVoid();
@ -6690,7 +6690,7 @@ fn collect_item_2(ccx: &@crate_ctxt, i: &@ast::item, pt: &[str],
}
ast::item_obj(ob, tps, ctor_id) {
decl_fn_and_pair(ccx, i.span, new_pt, "obj_ctor", tps, ctor_id);
for m: @ast::method in ob.methods {
for m: @ast::method in ob.methods {
ccx.obj_methods.insert(m.node.id, ());
}
}
@ -6724,7 +6724,7 @@ fn collect_tag_ctor(ccx: @crate_ctxt, i: &@ast::item, pt: &[str],
visit::visit_item(i, new_pt, v);
alt i.node {
ast::item_tag(variants, tps) {
for variant: ast::variant in variants {
for variant: ast::variant in variants {
if std::ivec::len(variant.node.args) != 0u {
decl_fn_and_pair(ccx, i.span, new_pt + ~[variant.node.name],
"tag", tps, variant.node.id);
@ -6891,7 +6891,7 @@ fn create_module_map(ccx: &@crate_ctxt) -> ValueRef {
llvm::LLVMSetLinkage(map,
lib::llvm::LLVMInternalLinkage as llvm::Linkage);
let elts: [ValueRef] = ~[];
for each item: @{key: str, val: ValueRef} in ccx.module_data.items() {
for each item: @{key: str, val: ValueRef} in ccx.module_data.items() {
let elt = C_struct(~[p2i(C_cstr(ccx, item.key)), p2i(item.val)]);
elts += ~[elt];
}
@ -7040,7 +7040,7 @@ fn trans_crate(sess: &session::session, crate: &@ast::crate, tcx: &ty::ctxt,
log_err #fmt("n_real_glues: %u", ccx.stats.n_real_glues);
for timing: {ident: str, time: int} in *ccx.stats.fn_times {
for timing: {ident: str, time: int} in *ccx.stats.fn_times {
log_err #fmt("time: %s took %d ms", timing.ident, timing.time);
}
}

View file

@ -47,7 +47,7 @@ fn variant_opt(ccx: &@crate_ctxt, pat_id: ast::node_id) -> opt {
let vdef = ast::variant_def_ids(ccx.tcx.def_map.get(pat_id));
let variants = ty::tag_variants(ccx.tcx, vdef.tg);
let i = 0u;
for v: ty::variant_info in variants {
for v: ty::variant_info in variants {
if vdef.var == v.id { ret var(i, vdef); }
i += 1u;
}
@ -81,7 +81,7 @@ type enter_pat = fn(&@ast::pat) -> option::t[[@ast::pat]] ;
fn enter_match(m: &match, col: uint, val: ValueRef, e: &enter_pat) -> match {
let result = ~[];
for br: match_branch in m {
for br: match_branch in m {
alt e(br.pats.(col)) {
some(sub) {
let pats =
@ -132,9 +132,9 @@ fn enter_rec(m: &match, col: uint, fields: &[ast::ident], val: ValueRef) ->
alt p.node {
ast::pat_rec(fpats, _) {
let pats = ~[];
for fname: ast::ident in fields {
for fname: ast::ident in fields {
let pat = dummy;
for fpat: ast::field_pat in fpats {
for fpat: ast::field_pat in fpats {
if str::eq(fpat.ident, fname) { pat = fpat.pat; break; }
}
pats += ~[pat];
@ -172,12 +172,12 @@ fn enter_box(m: &match, col: uint, val: ValueRef) -> match {
fn get_options(ccx: &@crate_ctxt, m: &match, col: uint) -> [opt] {
fn add_to_set(set: &mutable [opt], val: &opt) {
for l: opt in set { if opt_eq(l, val) { ret; } }
for l: opt in set { if opt_eq(l, val) { ret; } }
set += ~[val];
}
let found = ~[];
for br: match_branch in m {
for br: match_branch in m {
alt br.pats.(col).node {
ast::pat_lit(l) { add_to_set(found, lit(l)); }
ast::pat_tag(_, _) {
@ -219,10 +219,10 @@ fn extract_variant_args(bcx: @block_ctxt, pat_id: ast::node_id,
fn collect_record_fields(m: &match, col: uint) -> [ast::ident] {
let fields = ~[];
for br: match_branch in m {
for br: match_branch in m {
alt br.pats.(col).node {
ast::pat_rec(fs, _) {
for f: ast::field_pat in fs {
for f: ast::field_pat in fs {
if !ivec::any(bind str::eq(f.ident, _), fields) {
fields += ~[f.ident];
}
@ -235,7 +235,7 @@ fn collect_record_fields(m: &match, col: uint) -> [ast::ident] {
}
fn any_box_pat(m: &match, col: uint) -> bool {
for br: match_branch in m {
for br: match_branch in m {
alt br.pats.(col).node { ast::pat_box(_) { ret true; } _ { } }
}
ret false;
@ -296,7 +296,7 @@ fn compile_submatch(bcx: @block_ctxt, m: &match, vals: [ValueRef],
ivec::slice(vals, col + 1u, ivec::len(vals));
let ccx = bcx.fcx.lcx.ccx;
let pat_id = 0;
for br: match_branch in m {
for br: match_branch in m {
// Find a real id (we're adding placeholder wildcard patterns, but
// each column is guaranteed to have at least one real pattern)
if pat_id == 0 { pat_id = br.pats.(col).id; }
@ -385,7 +385,7 @@ fn compile_submatch(bcx: @block_ctxt, m: &match, vals: [ValueRef],
} else { C_int(0) }; // Placeholder for when not using a switch
// Compile subtrees for each option
for opt: opt in opts {
for opt: opt in opts {
let opt_cx = new_sub_block_ctxt(bcx, "match_case");
alt kind {
single. { bcx.build.Br(opt_cx.llbb); }
@ -432,7 +432,7 @@ fn compile_submatch(bcx: @block_ctxt, m: &match, vals: [ValueRef],
fn make_phi_bindings(bcx: &@block_ctxt, map: &[exit_node],
ids: &ast::pat_id_map) -> bool {
fn assoc(key: str, list: &bind_map) -> option::t[ValueRef] {
for elt: {ident: ast::ident, val: ValueRef} in list {
for elt: {ident: ast::ident, val: ValueRef} in list {
if str::eq(elt.ident, key) { ret some(elt.val); }
}
ret none;
@ -440,10 +440,10 @@ fn make_phi_bindings(bcx: &@block_ctxt, map: &[exit_node],
let our_block = bcx.llbb as uint;
let success = true;
for each item: @{key: ast::ident, val: ast::node_id} in ids.items() {
for each item: @{key: ast::ident, val: ast::node_id} in ids.items() {
let llbbs = ~[];
let vals = ~[];
for ex: exit_node in map {
for ex: exit_node in map {
if ex.to as uint == our_block {
alt assoc(item.key, ex.bound) {
some(val) { llbbs += ~[ex.from]; vals += ~[val]; }
@ -475,10 +475,10 @@ fn trans_alt(cx: &@block_ctxt, expr: &@ast::expr, arms: &[ast::arm],
}
}
for a: ast::arm in arms {
for a: ast::arm in arms {
let body = new_scope_block_ctxt(cx, "case_body");
bodies += ~[body];
for p: @ast::pat in a.pats {
for p: @ast::pat in a.pats {
match += ~[@{pats: ~[p], body: body.llbb, mutable bound: ~[]}];
}
}
@ -502,7 +502,7 @@ fn trans_alt(cx: &@block_ctxt, expr: &@ast::expr, arms: &[ast::arm],
let i = 0u;
let arm_results = ~[];
for a: ast::arm in arms {
for a: ast::arm in arms {
let body_cx = bodies.(i);
if make_phi_bindings(body_cx, exit_map, ast::pat_id_map(a.pats.(0))) {
let block_res = trans::trans_block(body_cx, a.body, output);

View file

@ -109,7 +109,7 @@ fn trans_spawn(cx: &@block_ctxt, dom: &ast::spawn_dom, name: &option::t[str],
let arg_tys: [ty::t] = ~[];
let arg_vals: [ValueRef] = ~[];
for e: @ast::expr in args {
for e: @ast::expr in args {
let e_ty = ty::expr_ty(cx.fcx.lcx.ccx.tcx, e);
let arg = trans_expr(bcx, e);
@ -127,7 +127,7 @@ fn trans_spawn(cx: &@block_ctxt, dom: &ast::spawn_dom, name: &option::t[str],
let llargs = alloc_ty(bcx, args_ty);
let i = 0u;
for v: ValueRef in arg_vals {
for v: ValueRef in arg_vals {
let target = bcx.build.GEP(llargs.val, ~[C_int(0), C_int(i as int)]);
bcx.build.Store(v, target);

View file

@ -298,7 +298,7 @@ fn revoke_clean(cx: &@block_ctxt, val: ValueRef) {
let sc_cx = find_scope_cx(cx);
let found = -1;
let i = 0;
for c: cleanup in sc_cx.cleanups {
for c: cleanup in sc_cx.cleanups {
alt c {
clean_temp(v, _) {
if v as uint == val as uint { found = i; break; }

View file

@ -416,11 +416,11 @@ fn trans_recv(bcx: &@block_ctxt, dest: &dest, expr: &@ast::expr) ->
fn trans_block(cx: &@block_ctxt, dest: &dest, blk: &ast::blk) -> @block_ctxt {
let bcx = cx;
for each local: @ast::local in trans::block_locals(blk) {
for each local: @ast::local in trans::block_locals(blk) {
bcx = trans::alloc_local(bcx, local).bcx;
}
for stmt: @ast::stmt in blk.node.stmts {
for stmt: @ast::stmt in blk.node.stmts {
bcx = trans_stmt(bcx, stmt);
@ -456,7 +456,7 @@ fn trans_lit_str_common(ccx: &@crate_ctxt, s: &str, expand: bool) ->
let len = str::byte_len(s);
let array = ~[];
for ch: u8 in s { array += ~[tc::C_u8(ch as uint)]; }
for ch: u8 in s { array += ~[tc::C_u8(ch as uint)]; }
array += ~[tc::C_u8(0u)];
if expand {
@ -567,7 +567,7 @@ fn trans_stmt(cx: &@block_ctxt, stmt: &@ast::stmt) -> @block_ctxt {
ast::stmt_decl(d, _) {
alt d.node {
ast::decl_local(locals) {
for local: @ast::local in locals {
for local: @ast::local in locals {
bcx = trans_init_local(bcx, local);
}
}

View file

@ -44,7 +44,7 @@ fn trans_obj(cx: @local_ctxt, sp: &span, ob: &ast::_obj,
// The fields of our object will become the arguments to the function
// we're creating.
let fn_args: [ast::arg] = ~[];
for f: ast::obj_field in ob.fields {
for f: ast::obj_field in ob.fields {
fn_args +=
~[{mode: ast::alias(false), ty: f.ty, ident: f.ident, id: f.id}];
}
@ -104,11 +104,11 @@ fn trans_obj(cx: @local_ctxt, sp: &span, ob: &ast::_obj,
bcx.build.Store(C_null(llbox_ty), pair_box);
} else {
let obj_fields: [ty::t] = ~[];
for a: ty::arg in arg_tys { obj_fields += ~[a.ty]; }
for a: ty::arg in arg_tys { obj_fields += ~[a.ty]; }
let tps: [ty::t] = ~[];
let tydesc_ty = ty::mk_type(ccx.tcx);
for tp: ast::ty_param in ty_params { tps += ~[tydesc_ty]; }
for tp: ast::ty_param in ty_params { tps += ~[tydesc_ty]; }
// Synthesize an object body type and hand it off to
// trans_malloc_boxed, which allocates a box, including space for a
@ -154,7 +154,7 @@ fn trans_obj(cx: @local_ctxt, sp: &span, ob: &ast::_obj,
// TODO: can we just get typarams_ty out of body_ty instead?
let typarams_ty: ty::t = ty::mk_tup(ccx.tcx, tps);
let i: int = 0;
for tp: ast::ty_param in ty_params {
for tp: ast::ty_param in ty_params {
let typaram = bcx.fcx.lltydescs.(i);
let capture =
GEP_tup_like(bcx, typarams_ty, body_typarams.val, ~[0, i]);
@ -169,7 +169,7 @@ fn trans_obj(cx: @local_ctxt, sp: &span, ob: &ast::_obj,
~[0, abi::obj_body_elt_fields]);
bcx = body_fields.bcx;
i = 0;
for f: ast::obj_field in ob.fields {
for f: ast::obj_field in ob.fields {
alt bcx.fcx.llargs.find(f.id) {
some(arg1) {
let arg = load_if_immediate(bcx, arg1, arg_tys.(i).ty);
@ -217,7 +217,7 @@ fn trans_anon_obj(bcx: @block_ctxt, sp: &span, anon_obj: &ast::anon_obj,
none. { }
some(fields) {
additional_fields = fields;
for f: ast::anon_obj_field in fields {
for f: ast::anon_obj_field in fields {
additional_field_tys += ~[node_id_type(ccx, f.id)];
additional_field_vals += ~[trans_expr(bcx, f.expr)];
}
@ -342,7 +342,7 @@ fn trans_anon_obj(bcx: @block_ctxt, sp: &span, anon_obj: &ast::anon_obj,
~[0, abi::obj_body_elt_fields]);
bcx = body_fields.bcx;
let i: int = 0;
for f: ast::anon_obj_field in additional_fields {
for f: ast::anon_obj_field in additional_fields {
// FIXME (part of issue #538): make this work eventually, when we
// have additional field exprs in the AST.
load_if_immediate(bcx, additional_field_vals.(i).val,
@ -483,7 +483,7 @@ fn create_vtbl(cx: @local_ctxt, sp: &span, outer_obj_ty: ty::t,
// Gather up methods on the inner object.
alt ty::struct(cx.ccx.tcx, inner_obj_ty) {
ty::ty_obj(inner_obj_methods) {
for m: ty::method in inner_obj_methods {
for m: ty::method in inner_obj_methods {
meths += ~[fwding_mthd(@m)];
}
}
@ -699,7 +699,7 @@ fn process_bkwding_mthd(cx: @local_ctxt, sp: &span, m: @ty::method,
let a: uint = 3u; // retptr, task ptr, env come first
let passed_arg: ValueRef = llvm::LLVMGetParam(llbackwarding_fn, a);
for arg: ty::arg in m.inputs {
for arg: ty::arg in m.inputs {
if arg.mode == ty::mo_val {
passed_arg = load_if_immediate(bcx, passed_arg, arg.ty);
}
@ -871,7 +871,7 @@ fn process_fwding_mthd(cx: @local_ctxt, sp: &span, m: @ty::method,
let a: uint = 3u; // retptr, task ptr, env come first
let passed_arg: ValueRef = llvm::LLVMGetParam(llforwarding_fn, a);
for arg: ty::arg in m.inputs {
for arg: ty::arg in m.inputs {
if arg.mode == ty::mo_val {
passed_arg = load_if_immediate(bcx, passed_arg, arg.ty);
}

View file

@ -59,7 +59,7 @@ fn node_ids_in_fn(f: &_fn, tps: &[ty_param], sp: &span, i: &fn_ident,
}
fn init_vecs(ccx: &crate_ctxt, node_ids: &[node_id], len: uint) {
for i: node_id in node_ids {
for i: node_id in node_ids {
log int::str(i) + " |-> " + uint::str(len);
add_node(ccx, i, empty_ann(len));
}

View file

@ -61,7 +61,7 @@ fn def_id_to_str(d: def_id) -> str {
fn comma_str(args: &[@constr_arg_use]) -> str {
let rslt = "";
let comma = false;
for a: @constr_arg_use in args {
for a: @constr_arg_use in args {
if comma { rslt += ", "; } else { comma = true; }
alt a.node {
carg_base. { rslt += "*"; }
@ -87,7 +87,7 @@ fn constraint_to_str(tcx: &ty::ctxt, c: &sp_constr) -> str {
fn tritv_to_str(fcx: fn_ctxt, v: &tritv::t) -> str {
let s = "";
let comma = false;
for p: norm_constraint in constraints(fcx) {
for p: norm_constraint in constraints(fcx) {
alt tritv_get(v, p.bit_num) {
dont_care. { }
t {
@ -106,7 +106,7 @@ fn log_tritv(fcx: &fn_ctxt, v: &tritv::t) { log tritv_to_str(fcx, v); }
fn first_difference_string(fcx: &fn_ctxt, expected: &tritv::t,
actual: &tritv::t) -> str {
let s: str = "";
for c: norm_constraint in constraints(fcx) {
for c: norm_constraint in constraints(fcx) {
if tritv_get(expected, c.bit_num) == ttrue &&
tritv_get(actual, c.bit_num) != ttrue {
ret constraint_to_str(fcx.ccx.tcx, c.c);
@ -119,7 +119,7 @@ fn log_tritv_err(fcx: fn_ctxt, v: tritv::t) { log_err tritv_to_str(fcx, v); }
fn tos(v: &[uint]) -> str {
let rslt = "";
for i: uint in v {
for i: uint in v {
if i == 0u {
rslt += "0";
} else if (i == 1u) { rslt += "1"; } else { rslt += "?"; }
@ -561,7 +561,7 @@ fn norm_a_constraint(id: def_id, c: &constraint) -> [norm_constraint] {
}
cpred(p, descs) {
let rslt: [norm_constraint] = ~[];
for pd: pred_args in *descs {
for pd: pred_args in *descs {
rslt +=
~[{bit_num: pd.node.bit_num,
c: respan(pd.span, npred(p, id, pd.node.args))}];
@ -590,7 +590,7 @@ fn match_args(fcx: &fn_ctxt, occs: &@mutable [pred_args],
occ: &[@constr_arg_use]) -> uint {
log "match_args: looking at " +
constr_args_to_str(fn (i: &inst) -> str { ret i.ident; }, occ);
for pd: pred_args in *occs {
for pd: pred_args in *occs {
log "match_args: candidate " + pred_args_to_str(pd);
fn eq(p: &inst, q: &inst) -> bool { ret p.node == q.node; }
if ty::args_eq(eq, pd.node.args, occ) { ret pd.node.bit_num; }
@ -642,7 +642,7 @@ fn exprs_to_constr_args(tcx: ty::ctxt, args: &[@expr]) ->
[@constr_arg_use] {
let f = bind expr_to_constr_arg(tcx, _);
let rslt: [@constr_arg_use] = ~[];
for e: @expr in args { rslt += ~[f(e)]; }
for e: @expr in args { rslt += ~[f(e)]; }
rslt
}
@ -682,7 +682,7 @@ fn pred_args_to_str(p: &pred_args) -> str {
fn substitute_constr_args(cx: &ty::ctxt, actuals: &[@expr], c: &@ty::constr)
-> tsconstr {
let rslt: [@constr_arg_use] = ~[];
for a: @constr_arg in c.node.args {
for a: @constr_arg in c.node.args {
rslt += ~[substitute_arg(cx, actuals, a)];
}
ret npred(c.node.path, c.node.id, rslt);
@ -707,7 +707,7 @@ fn substitute_arg(cx: &ty::ctxt, actuals: &[@expr], a: @constr_arg) ->
fn pred_args_matches(pattern: &[constr_arg_general_[inst]],
desc: &pred_args) -> bool {
let i = 0u;
for c: @constr_arg_use in desc.node.args {
for c: @constr_arg_use in desc.node.args {
let n = pattern.(i);
alt c.node {
carg_ident(p) {
@ -731,7 +731,7 @@ fn pred_args_matches(pattern: &[constr_arg_general_[inst]],
fn find_instance_(pattern: &[constr_arg_general_[inst]],
descs: &[pred_args]) -> option::t[uint] {
for d: pred_args in descs {
for d: pred_args in descs {
if pred_args_matches(pattern, d) { ret some(d.node.bit_num); }
}
ret none;
@ -749,7 +749,7 @@ fn find_instances(fcx: &fn_ctxt, subst: &subst, c: &constraint) ->
alt c {
cinit(_, _, _) {/* this is dealt with separately */ }
cpred(p, descs) {
for d: pred_args in *descs {
for d: pred_args in *descs {
if args_mention(d.node.args, find_in_subst_bool, subst) {
let old_bit_num = d.node.bit_num;
let new = replace(subst, d);
@ -765,7 +765,7 @@ fn find_instances(fcx: &fn_ctxt, subst: &subst, c: &constraint) ->
}
fn find_in_subst(id: node_id, s: &subst) -> option::t[inst] {
for p: {from: inst, to: inst} in s {
for p: {from: inst, to: inst} in s {
if id == p.from.node { ret some(p.to); }
}
ret none;
@ -777,7 +777,7 @@ fn find_in_subst_bool(s: &subst, id: node_id) -> bool {
fn insts_to_str(stuff: &[constr_arg_general_[inst]]) -> str {
let rslt = "<";
for i: constr_arg_general_[inst] in stuff {
for i: constr_arg_general_[inst] in stuff {
rslt +=
" " +
alt i {
@ -792,7 +792,7 @@ fn insts_to_str(stuff: &[constr_arg_general_[inst]]) -> str {
fn replace(subst: subst, d: pred_args) -> [constr_arg_general_[inst]] {
let rslt: [constr_arg_general_[inst]] = ~[];
for c: @constr_arg_use in d.node.args {
for c: @constr_arg_use in d.node.args {
alt c.node {
carg_ident(p) {
alt find_in_subst(p.node, subst) {
@ -906,7 +906,7 @@ fn copy_in_poststate_two(fcx: &fn_ctxt, src_post: &poststate,
// replace any occurrences of the src def_id with the
// dest def_id
let insts = find_instances(fcx, subst, p.val);
for p: {from: uint, to: uint} in insts {
for p: {from: uint, to: uint} in insts {
if promises_(p.from, src_post) {
set_in_poststate_(p.to, target_post);
}
@ -922,7 +922,7 @@ fn forget_in_postcond(fcx: &fn_ctxt, parent_exp: node_id, dead_v: node_id) {
let d = local_node_id_to_local_def_id(fcx, dead_v);
alt d {
some(d_id) {
for c: norm_constraint in constraints(fcx) {
for c: norm_constraint in constraints(fcx) {
if constraint_mentions(fcx, c, d_id) {
clear_in_postcond(c.bit_num,
node_id_to_ts_ann(fcx.ccx,
@ -941,7 +941,7 @@ fn forget_in_postcond_still_init(fcx: &fn_ctxt, parent_exp: node_id,
let d = local_node_id_to_local_def_id(fcx, dead_v);
alt d {
some(d_id) {
for c: norm_constraint in constraints(fcx) {
for c: norm_constraint in constraints(fcx) {
if non_init_constraint_mentions(fcx, c, d_id) {
clear_in_postcond(c.bit_num,
node_id_to_ts_ann(fcx.ccx,
@ -961,7 +961,7 @@ fn forget_in_poststate(fcx: &fn_ctxt, p: &poststate, dead_v: node_id) ->
let changed = false;
alt d {
some(d_id) {
for c: norm_constraint in constraints(fcx) {
for c: norm_constraint in constraints(fcx) {
if constraint_mentions(fcx, c, d_id) {
changed |= clear_in_poststate_(c.bit_num, p);
}
@ -980,7 +980,7 @@ fn forget_in_poststate_still_init(fcx: &fn_ctxt, p: &poststate,
let changed = false;
alt d {
some(d_id) {
for c: norm_constraint in constraints(fcx) {
for c: norm_constraint in constraints(fcx) {
if non_init_constraint_mentions(fcx, c, d_id) {
changed |= clear_in_poststate_(c.bit_num, p);
}
@ -992,7 +992,7 @@ fn forget_in_poststate_still_init(fcx: &fn_ctxt, p: &poststate,
}
fn any_eq(v: &[node_id], d: node_id) -> bool {
for i: node_id in v { if i == d { ret true; } }
for i: node_id in v { if i == d { ret true; } }
false
}
@ -1032,7 +1032,7 @@ fn args_mention[T](args: &[@constr_arg_use], q: fn(&[T], node_id) -> bool ,
ret ivec::any(bind mentions(s,q,_), args);
*/
for a: @constr_arg_use in args {
for a: @constr_arg_use in args {
alt a.node { carg_ident(p1) { if q(s, p1.node) { ret true; } } _ { } }
}
ret false;
@ -1042,7 +1042,7 @@ fn use_var(fcx: &fn_ctxt, v: &node_id) { *fcx.enclosing.used_vars += ~[v]; }
// FIXME: This should be a function in std::ivec::.
fn vec_contains(v: &@mutable [node_id], i: &node_id) -> bool {
for d: node_id in *v { if d == i { ret true; } }
for d: node_id in *v { if d == i { ret true; } }
ret false;
}
@ -1058,7 +1058,7 @@ fn do_nothing[T](f: &_fn, tp: &[ty_param], sp: &span, i: &fn_ident,
fn args_to_constr_args(sp: &span, args: &[arg]) -> [@constr_arg_use] {
let actuals: [@constr_arg_use] = ~[];
for a: arg in args {
for a: arg in args {
actuals += ~[@respan(sp, carg_ident({ident: a.ident, node: a.id}))];
}
ret actuals;

View file

@ -80,7 +80,7 @@ fn seq_postconds(fcx: &fn_ctxt, ps: &[postcond]) -> postcond {
let sz = ivec::len(ps);
if sz >= 1u {
let prev = tritv_clone(ps.(0));
for p: postcond in ivec::slice(ps, 1u, sz) { seq_tritv(prev, p); }
for p: postcond in ivec::slice(ps, 1u, sz) { seq_tritv(prev, p); }
ret prev;
} else { ret ann::empty_poststate(num_constraints(fcx.enclosing)); }
}

View file

@ -49,7 +49,7 @@ import states::find_pre_post_state_fn;
fn check_unused_vars(fcx: &fn_ctxt) {
// FIXME: could be more efficient
for c: norm_constraint in constraints(fcx) {
for c: norm_constraint in constraints(fcx) {
alt c.c.node {
ninit(id, v) {
if !vec_contains(fcx.enclosing.used_vars, id) {

View file

@ -30,7 +30,7 @@ fn collect_pred(e: &@expr, cx: &ctxt, v: &visit::vt[ctxt]) {
// If it's a call, generate appropriate instances of the
// call's constraints.
expr_call(operator, operands) {
for c: @ty::constr in constraints_expr(cx.tcx, operator) {
for c: @ty::constr in constraints_expr(cx.tcx, operator) {
let ct: sp_constr =
respan(c.span,
aux::substitute_constr_args(cx.tcx, operands, c));
@ -98,13 +98,13 @@ fn mk_fn_info(ccx: &crate_ctxt, f: &_fn, tp: &[ty_param], f_sp: &span,
/* now we have to add bit nums for both the constraints
and the variables... */
for c: sp_constr in { *cx.cs } {
for c: sp_constr in { *cx.cs } {
next = add_constraint(cx.tcx, c, next, res_map);
}
/* if this function has any constraints, instantiate them to the
argument names and add them */
let sc;
for c: @constr in f.decl.constraints {
for c: @constr in f.decl.constraints {
sc = ast_constr_to_sp_constr(cx.tcx, f.decl.inputs, c);
next = add_constraint(cx.tcx, sc, next, res_map);
}

View file

@ -65,7 +65,7 @@ fn find_pre_post_obj(ccx: &crate_ctxt, o: _obj) {
ccx: ccx};
find_pre_post_fn(fcx, m.node.meth);
}
for m: @method in o.methods { do_a_method(ccx, m); }
for m: @method in o.methods { do_a_method(ccx, m); }
}
fn find_pre_post_item(ccx: &crate_ctxt, i: &item) {
@ -120,7 +120,7 @@ fn find_pre_post_exprs(fcx: &fn_ctxt, args: &[@expr], id: node_id) {
log_expr(*args.(0));
}
fn do_one(fcx: fn_ctxt, e: &@expr) { find_pre_post_expr(fcx, e); }
for e: @expr in args { do_one(fcx, e); }
for e: @expr in args { do_one(fcx, e); }
fn get_pp(ccx: crate_ctxt, e: &@expr) -> pre_and_post {
ret expr_pp(ccx, e);
@ -341,7 +341,7 @@ fn find_pre_post_expr(fcx: &fn_ctxt, e: @expr) {
find_pre_post_exprs(fcx, args, e.id);
/* see if the call has any constraints on its type */
for c: @ty::constr in constraints_expr(fcx.ccx.tcx, operator) {
for c: @ty::constr in constraints_expr(fcx.ccx.tcx, operator) {
let i =
bit_num(fcx, substitute_constr_args(fcx.ccx.tcx, args, c));
require(i, expr_pp(fcx.ccx, e));
@ -391,7 +391,7 @@ fn find_pre_post_expr(fcx: &fn_ctxt, e: @expr) {
let rslt = expr_pp(fcx.ccx, e);
clear_pp(rslt);
let upvars = freevars::get_freevars(fcx.ccx.tcx, e.id);
for id: node_id in *upvars { handle_var(fcx, rslt, id, "upvar"); }
for id: node_id in *upvars { handle_var(fcx, rslt, id, "upvar"); }
}
expr_block(b) {
find_pre_post_block(fcx, b);
@ -512,7 +512,7 @@ fn find_pre_post_expr(fcx: &fn_ctxt, e: @expr) {
ret block_pp(fcx.ccx, an_alt.body);
}
let alt_pps = ~[];
for a: arm in alts { alt_pps += ~[do_an_alt(fcx, a)]; }
for a: arm in alts { alt_pps += ~[do_an_alt(fcx, a)]; }
fn combine_pp(antec: pre_and_post, fcx: fn_ctxt, pp: &pre_and_post,
next: &pre_and_post) -> pre_and_post {
union(pp.precondition, seq_preconds(fcx, ~[antec, next]));
@ -607,7 +607,7 @@ fn find_pre_post_stmt(fcx: &fn_ctxt, s: &stmt) {
stmt_decl(adecl, id) {
alt adecl.node {
decl_local(alocals) {
for alocal: @local in alocals {
for alocal: @local in alocals {
alt alocal.node.init {
some(an_init) {
/* LHS always becomes initialized,
@ -694,13 +694,13 @@ fn find_pre_post_block(fcx: &fn_ctxt, b: blk) {
log "is:";
log_pp(stmt_pp(fcx.ccx, *s));
}
for s: @stmt in b.node.stmts { do_one_(fcx, s); }
for s: @stmt in b.node.stmts { do_one_(fcx, s); }
fn do_inner_(fcx: fn_ctxt, e: &@expr) { find_pre_post_expr(fcx, e); }
let do_inner = bind do_inner_(fcx, _);
option::map[@expr, ()](do_inner, b.node.expr);
let pps: [pre_and_post] = ~[];
for s: @stmt in b.node.stmts { pps += ~[stmt_pp(fcx.ccx, *s)]; }
for s: @stmt in b.node.stmts { pps += ~[stmt_pp(fcx.ccx, *s)]; }
alt b.node.expr {
none. {/* no-op */ }
some(e) { pps += ~[expr_pp(fcx.ccx, e)]; }
@ -709,7 +709,7 @@ fn find_pre_post_block(fcx: &fn_ctxt, b: blk) {
let block_precond = seq_preconds(fcx, pps);
let postconds = ~[];
for pp: pre_and_post in pps { postconds += ~[get_post(pp)]; }
for pp: pre_and_post in pps { postconds += ~[get_post(pp)]; }
/* A block may be empty, so this next line ensures that the postconds
vector is non-empty. */

View file

@ -556,7 +556,7 @@ fn find_pre_post_state_expr(fcx: &fn_ctxt, pres: &prestate, e: @expr) ->
let a_post;
if ivec::len(alts) > 0u {
a_post = false_postcond(num_constrs);
for an_alt: arm in alts {
for an_alt: arm in alts {
changed |=
find_pre_post_state_block(fcx, e_post, an_alt.body);
intersect(a_post, block_poststate(fcx.ccx, an_alt.body));
@ -712,7 +712,7 @@ fn find_pre_post_state_block(fcx: &fn_ctxt, pres0: &prestate, b: &blk) ->
initializes. Then <pres> becomes the new poststate. */
let changed = false;
for s: @stmt in b.node.stmts {
for s: @stmt in b.node.stmts {
changed |= find_pre_post_state_stmt(fcx, pres, s);
pres = stmt_poststate(fcx.ccx, *s);
}
@ -755,7 +755,7 @@ fn find_pre_post_state_fn(fcx: &fn_ctxt, f: &_fn) -> bool {
// Instantiate any constraints on the arguments so we can use them
let block_pre = block_prestate(fcx.ccx, f.body);
let tsc;
for c: @constr in f.decl.constraints {
for c: @constr in f.decl.constraints {
tsc = ast_constr_to_ts_constr(fcx.ccx.tcx, f.decl.inputs, c);
set_in_prestate_constr(fcx, tsc, block_pre);
}

View file

@ -455,7 +455,7 @@ fn mk_raw_ty(cx: &ctxt, st: &sty, in_cname: &option::t[str]) -> @raw_t {
}
fn derive_flags_sig(cx: &ctxt, has_params: &mutable bool,
has_vars: &mutable bool, args: &[arg], tt: &t) {
for a: arg in args { derive_flags_arg(cx, has_params, has_vars, a); }
for a: arg in args { derive_flags_arg(cx, has_params, has_vars, a); }
derive_flags_t(cx, has_params, has_vars, tt);
}
alt st {
@ -475,7 +475,7 @@ fn mk_raw_ty(cx: &ctxt, st: &sty, in_cname: &option::t[str]) -> @raw_t {
ty_param(_,_) { has_params = true; }
ty_var(_) { has_vars = true; }
ty_tag(_, tys) {
for tt: t in tys { derive_flags_t(cx, has_params, has_vars, tt); }
for tt: t in tys { derive_flags_t(cx, has_params, has_vars, tt); }
}
ty_box(m) { derive_flags_mt(cx, has_params, has_vars, m); }
ty_uniq(tt) { derive_flags_t(cx, has_params, has_vars, tt); }
@ -485,7 +485,7 @@ fn mk_raw_ty(cx: &ctxt, st: &sty, in_cname: &option::t[str]) -> @raw_t {
ty_port(tt) { derive_flags_t(cx, has_params, has_vars, tt); }
ty_chan(tt) { derive_flags_t(cx, has_params, has_vars, tt); }
ty_rec(flds) {
for f: field in flds {
for f: field in flds {
derive_flags_mt(cx, has_params, has_vars, f.mt);
}
}
@ -501,13 +501,13 @@ fn mk_raw_ty(cx: &ctxt, st: &sty, in_cname: &option::t[str]) -> @raw_t {
derive_flags_sig(cx, has_params, has_vars, args, tt);
}
ty_obj(meths) {
for m: method in meths {
for m: method in meths {
derive_flags_sig(cx, has_params, has_vars, m.inputs, m.output);
}
}
ty_res(_, tt, tps) {
derive_flags_t(cx, has_params, has_vars, tt);
for tt: t in tps { derive_flags_t(cx, has_params, has_vars, tt); }
for tt: t in tps { derive_flags_t(cx, has_params, has_vars, tt); }
}
ty_constr(tt, _) { derive_flags_t(cx, has_params, has_vars, tt); }
}
@ -673,31 +673,31 @@ fn walk_ty(cx: &ctxt, walker: ty_walk, ty: t) {
ty_port(subty) { walk_ty(cx, walker, subty); }
ty_chan(subty) { walk_ty(cx, walker, subty); }
ty_tag(tid, subtys) {
for subty: t in subtys { walk_ty(cx, walker, subty); }
for subty: t in subtys { walk_ty(cx, walker, subty); }
}
ty_rec(fields) {
for fl: field in fields { walk_ty(cx, walker, fl.mt.ty); }
for fl: field in fields { walk_ty(cx, walker, fl.mt.ty); }
}
ty_tup(ts) {
for tt in ts { walk_ty(cx, walker, tt); }
}
ty_fn(proto, args, ret_ty, _, _) {
for a: arg in args { walk_ty(cx, walker, a.ty); }
for a: arg in args { walk_ty(cx, walker, a.ty); }
walk_ty(cx, walker, ret_ty);
}
ty_native_fn(abi, args, ret_ty) {
for a: arg in args { walk_ty(cx, walker, a.ty); }
for a: arg in args { walk_ty(cx, walker, a.ty); }
walk_ty(cx, walker, ret_ty);
}
ty_obj(methods) {
for m: method in methods {
for a: arg in m.inputs { walk_ty(cx, walker, a.ty); }
for m: method in methods {
for a: arg in m.inputs { walk_ty(cx, walker, a.ty); }
walk_ty(cx, walker, m.output);
}
}
ty_res(_, sub, tps) {
walk_ty(cx, walker, sub);
for tp: t in tps { walk_ty(cx, walker, tp); }
for tp: t in tps { walk_ty(cx, walker, tp); }
}
ty_constr(sub, _) {
walk_ty(cx, walker, sub);
@ -755,12 +755,12 @@ fn fold_ty(cx: &ctxt, fld: fold_mode, ty_0: t) -> t {
ty_chan(subty) { ty = mk_chan(cx, fold_ty(cx, fld, subty)); }
ty_tag(tid, subtys) {
let new_subtys: [t] = ~[];
for subty: t in subtys { new_subtys += ~[fold_ty(cx, fld, subty)]; }
for subty: t in subtys { new_subtys += ~[fold_ty(cx, fld, subty)]; }
ty = copy_cname(cx, mk_tag(cx, tid, new_subtys), ty);
}
ty_rec(fields) {
let new_fields: [field] = ~[];
for fl: field in fields {
for fl: field in fields {
let new_ty = fold_ty(cx, fld, fl.mt.ty);
let new_mt = {ty: new_ty, mut: fl.mt.mut};
new_fields += ~[{ident: fl.ident, mt: new_mt}];
@ -776,7 +776,7 @@ fn fold_ty(cx: &ctxt, fld: fold_mode, ty_0: t) -> t {
}
ty_fn(proto, args, ret_ty, cf, constrs) {
let new_args: [arg] = ~[];
for a: arg in args {
for a: arg in args {
let new_ty = fold_ty(cx, fld, a.ty);
new_args += ~[{mode: a.mode, ty: new_ty}];
}
@ -787,7 +787,7 @@ fn fold_ty(cx: &ctxt, fld: fold_mode, ty_0: t) -> t {
}
ty_native_fn(abi, args, ret_ty) {
let new_args: [arg] = ~[];
for a: arg in args {
for a: arg in args {
let new_ty = fold_ty(cx, fld, a.ty);
new_args += ~[{mode: a.mode, ty: new_ty}];
}
@ -798,9 +798,9 @@ fn fold_ty(cx: &ctxt, fld: fold_mode, ty_0: t) -> t {
}
ty_obj(methods) {
let new_methods: [method] = ~[];
for m: method in methods {
for m: method in methods {
let new_args: [arg] = ~[];
for a: arg in m.inputs {
for a: arg in m.inputs {
new_args += ~[{mode: a.mode, ty: fold_ty(cx, fld, a.ty)}];
}
new_methods +=
@ -815,7 +815,7 @@ fn fold_ty(cx: &ctxt, fld: fold_mode, ty_0: t) -> t {
}
ty_res(did, subty, tps) {
let new_tps = ~[];
for tp: t in tps { new_tps += ~[fold_ty(cx, fld, tp)]; }
for tp: t in tps { new_tps += ~[fold_ty(cx, fld, tp)]; }
ty =
copy_cname(cx, mk_res(cx, did, fold_ty(cx, fld, subty), new_tps),
ty);
@ -1014,7 +1014,7 @@ fn type_has_pointers(cx: &ctxt, ty: &t) -> bool {
ty_type. {/* no-op */ }
ty_native(_) {/* no-op */ }
ty_rec(flds) {
for f: field in flds {
for f: field in flds {
if type_has_pointers(cx, f.mt.ty) {
result = true;
break;
@ -1028,8 +1028,8 @@ fn type_has_pointers(cx: &ctxt, ty: &t) -> bool {
}
ty_tag(did, tps) {
let variants = tag_variants(cx, did);
for variant: variant_info in variants {
for aty: t in variant.args {
for variant: variant_info in variants {
for aty: t in variant.args {
// Perform any type parameter substitutions.
let arg_ty = substitute_type_params(cx, tps, aty);
if type_has_pointers(cx, arg_ty) {
@ -1129,7 +1129,7 @@ fn type_kind(cx: &ctxt, ty: &t) -> ast::kind {
// Records lower to the lowest of their members.
ty_rec(flds) {
for f: field in flds {
for f: field in flds {
result = kind::lower_kind(result, type_kind(cx, f.mt.ty));
if result == ast::kind_pinned { break; }
}
@ -1138,8 +1138,8 @@ fn type_kind(cx: &ctxt, ty: &t) -> ast::kind {
// Tags lower to the lowest of their variants.
ty_tag(did, tps) {
let variants = tag_variants(cx, did);
for variant: variant_info in variants {
for aty: t in variant.args {
for variant: variant_info in variants {
for aty: t in variant.args {
// Perform any type parameter substitutions.
let arg_ty = substitute_type_params(cx, tps, aty);
result = kind::lower_kind(result, type_kind(cx, arg_ty));
@ -1226,7 +1226,7 @@ fn type_has_dynamic_size(cx: &ctxt, ty: &t) -> bool {
ty_native_fn(_, _, _) { ret false; }
ty_obj(_) { ret false; }
ty_res(_, sub, tps) {
for tp: t in tps { if type_has_dynamic_size(cx, tp) { ret true; } }
for tp: t in tps { if type_has_dynamic_size(cx, tp) { ret true; } }
ret type_has_dynamic_size(cx, sub);
}
ty_var(_) { fail "ty_var in type_has_dynamic_size()"; }
@ -1331,8 +1331,8 @@ fn type_owns_heap_mem(cx: &ctxt, ty: &t) -> bool {
// structural types
ty_tag(did, tps) {
let variants = tag_variants(cx, did);
for variant: variant_info in variants {
for aty: t in variant.args {
for variant: variant_info in variants {
for aty: t in variant.args {
// Perform any type parameter substitutions.
let arg_ty = substitute_type_params(cx, tps, aty);
if type_owns_heap_mem(cx, arg_ty) { result = true; }
@ -1340,7 +1340,7 @@ fn type_owns_heap_mem(cx: &ctxt, ty: &t) -> bool {
}
}
ty_rec(flds) {
for f: field in flds {
for f: field in flds {
if type_owns_heap_mem(cx, f.mt.ty) { result = true; }
}
}
@ -1491,7 +1491,7 @@ fn hash_type_structure(st: &sty) -> uint {
}
fn hash_type_constr_args(id: uint, args: [@ty_constr_arg]) -> uint {
let h = id;
for a: @ty_constr_arg in args {
for a: @ty_constr_arg in args {
alt a.node {
carg_base. { h += h << 5u; }
carg_lit(_) {
@ -1510,7 +1510,7 @@ fn hash_type_structure(st: &sty) -> uint {
fn hash_fn(id: uint, args: &[arg], rty: &t) -> uint {
let h = id;
for a: arg in args { h += h << 5u + hash_ty(a.ty); }
for a: arg in args { h += h << 5u + hash_ty(a.ty); }
h += h << 5u + hash_ty(rty);
ret h;
}
@ -1539,7 +1539,7 @@ fn hash_type_structure(st: &sty) -> uint {
ty_istr. { ret 17u; }
ty_tag(did, tys) {
let h = hash_def(18u, did);
for typ: t in tys { h += h << 5u + hash_ty(typ); }
for typ: t in tys { h += h << 5u + hash_ty(typ); }
ret h;
}
ty_box(mt) { ret hash_subty(19u, mt.ty); }
@ -1550,7 +1550,7 @@ fn hash_type_structure(st: &sty) -> uint {
ty_task. { ret 24u; }
ty_rec(fields) {
let h = 26u;
for f: field in fields { h += h << 5u + hash_ty(f.mt.ty); }
for f: field in fields { h += h << 5u + hash_ty(f.mt.ty); }
ret h;
}
ty_tup(ts) {
@ -1566,7 +1566,7 @@ fn hash_type_structure(st: &sty) -> uint {
ty_native_fn(_, args, rty) { ret hash_fn(28u, args, rty); }
ty_obj(methods) {
let h = 29u;
for m: method in methods { h += h << 5u + str::hash(m.ident); }
for m: method in methods { h += h << 5u + str::hash(m.ident); }
ret h;
}
ty_var(v) { ret hash_uint(30u, v as uint); }
@ -1577,12 +1577,12 @@ fn hash_type_structure(st: &sty) -> uint {
ty_ptr(mt) { ret hash_subty(35u, mt.ty); }
ty_res(did, sub, tps) {
let h = hash_subty(hash_def(18u, did), sub);
for tp: t in tps { h += h << 5u + hash_ty(tp); }
for tp: t in tps { h += h << 5u + hash_ty(tp); }
ret h;
}
ty_constr(t, cs) {
let h = 36u;
for c: @type_constr in cs { h += h << 5u + hash_type_constr(h, c); }
for c: @type_constr in cs { h += h << 5u + hash_type_constr(h, c); }
ret h;
}
ty_uniq(t) {
@ -1629,7 +1629,7 @@ fn arg_eq[T](eq: &fn(&T, &T) -> bool , a: @sp_constr_arg[T],
fn args_eq[T](eq: fn(&T, &T) -> bool , a: &[@sp_constr_arg[T]],
b: &[@sp_constr_arg[T]]) -> bool {
let i: uint = 0u;
for arg: @sp_constr_arg[T] in a {
for arg: @sp_constr_arg[T] in a {
if !arg_eq(eq, arg, b.(i)) { ret false; }
i += 1u;
}
@ -1645,7 +1645,7 @@ fn constr_eq(c: &@constr, d: &@constr) -> bool {
fn constrs_eq(cs: &[@constr], ds: &[@constr]) -> bool {
if ivec::len(cs) != ivec::len(ds) { ret false; }
let i = 0u;
for c: @constr in cs { if !constr_eq(c, ds.(i)) { ret false; } i += 1u; }
for c: @constr in cs { if !constr_eq(c, ds.(i)) { ret false; } i += 1u; }
ret true;
}
@ -1805,7 +1805,7 @@ fn equal_type_structures(a: &sty, b: &sty) -> bool {
ret false;
}
let i = 0u;
for tp_a: t in tps_a {
for tp_a: t in tps_a {
if !eq_ty(tp_a, tps_b.(i)) { ret false; }
i += 1u;
}
@ -1927,7 +1927,7 @@ fn count_ty_params(cx: &ctxt, ty: t) -> uint {
alt struct(cx, ty) {
ty_param(param_idx,_) {
let seen = false;
for other_param_idx: uint in *param_indices {
for other_param_idx: uint in *param_indices {
if param_idx == other_param_idx { seen = true; }
}
if !seen { *param_indices += ~[param_idx]; }
@ -2044,14 +2044,14 @@ fn stmt_node_id(s: &@ast::stmt) -> ast::node_id {
fn field_idx(sess: &session::session, sp: &span, id: &ast::ident,
fields: &[field]) -> uint {
let i: uint = 0u;
for f: field in fields { if str::eq(f.ident, id) { ret i; } i += 1u; }
for f: field in fields { if str::eq(f.ident, id) { ret i; } i += 1u; }
sess.span_fatal(sp, "unknown field '" + id + "' of record");
}
fn method_idx(sess: &session::session, sp: &span, id: &ast::ident,
meths: &[method]) -> uint {
let i: uint = 0u;
for m: method in meths { if str::eq(m.ident, id) { ret i; } i += 1u; }
for m: method in meths { if str::eq(m.ident, id) { ret i; } i += 1u; }
sess.span_fatal(sp, "unknown method '" + id + "' of obj");
}
@ -2219,7 +2219,7 @@ mod unify {
}
let i = 0u;
let rslt;
for c: @type_constr in expected {
for c: @type_constr in expected {
rslt = unify_constr(base_t, c, actual.(i));
alt rslt { ures_ok(_) { } ures_err(_) { ret rslt; } }
i += 1u;
@ -2236,7 +2236,7 @@ mod unify {
if expected_arg_len != actual_arg_len { ret err_res; }
let i = 0u;
let actual;
for a: @ty_constr_arg in expected.node.args {
for a: @ty_constr_arg in expected.node.args {
actual = actual_constr.node.args.(i);
alt a.node {
carg_base. {
@ -2651,7 +2651,7 @@ mod unify {
ures_ok(res_inner) {
let i = 0u;
let res_tps = ~[];
for ex_tp: t in ex_tps {
for ex_tp: t in ex_tps {
let result = unify_step(cx, ex_tp, act_tps.(i));
alt result {
ures_ok(rty) { res_tps += ~[rty]; }
@ -3012,11 +3012,11 @@ fn tag_variants(cx: &ctxt, id: &ast::def_id) -> [variant_info] {
alt item.node {
ast::item_tag(variants, _) {
let result: [variant_info] = ~[];
for variant: ast::variant in variants {
for variant: ast::variant in variants {
let ctor_ty = node_id_to_monotype(cx, variant.node.id);
let arg_tys: [t] = ~[];
if std::ivec::len(variant.node.args) > 0u {
for a: arg in ty_fn_args(cx, ctor_ty) {
for a: arg in ty_fn_args(cx, ctor_ty) {
arg_tys += ~[a.ty];
}
}

View file

@ -294,7 +294,7 @@ fn ast_ty_to_ty(tcx: &ty::ctxt, getter: &ty_getter, ast_ty: &@ast::ty) ->
//
let param_bindings: [ty::t] = ~[];
for ast_ty: @ast::ty in args {
for ast_ty: @ast::ty in args {
param_bindings += ~[ast_ty_to_ty(tcx, getter, ast_ty)];
}
if ivec::len(param_bindings) !=
@ -346,7 +346,7 @@ fn ast_ty_to_ty(tcx: &ty::ctxt, getter: &ty_getter, ast_ty: &@ast::ty) ->
}
ast::ty_rec(fields) {
let flds: [field] = ~[];
for f: ast::ty_field in fields {
for f: ast::ty_field in fields {
let tm = ast_mt_to_mt(tcx, getter, f.node.mt);
flds += ~[{ident: f.node.ident, mt: tm}];
}
@ -354,13 +354,13 @@ fn ast_ty_to_ty(tcx: &ty::ctxt, getter: &ty_getter, ast_ty: &@ast::ty) ->
}
ast::ty_fn(proto, inputs, output, cf, constrs) {
let i = ~[];
for ta: ast::ty_arg in inputs {
for ta: ast::ty_arg in inputs {
i += ~[ast_arg_to_arg(tcx, getter, ta)];
}
let out_ty = ast_ty_to_ty(tcx, getter, output);
let out_constrs = ~[];
for constr: @ast::constr in constrs {
for constr: @ast::constr in constrs {
out_constrs += ~[ty::ast_constr_to_constr(tcx, constr)];
}
typ = ty::mk_fn(tcx, proto, i, out_ty, cf, out_constrs);
@ -384,15 +384,15 @@ fn ast_ty_to_ty(tcx: &ty::ctxt, getter: &ty_getter, ast_ty: &@ast::ty) ->
}
ast::ty_obj(meths) {
let tmeths: [ty::method] = ~[];
for m: ast::ty_method in meths {
for m: ast::ty_method in meths {
let ins = ~[];
for ta: ast::ty_arg in m.node.inputs {
for ta: ast::ty_arg in m.node.inputs {
ins += ~[ast_arg_to_arg(tcx, getter, ta)];
}
let out = ast_ty_to_ty(tcx, getter, m.node.output);
let out_constrs = ~[];
for constr: @ast::constr in m.node.constrs {
for constr: @ast::constr in m.node.constrs {
out_constrs += ~[ty::ast_constr_to_constr(tcx, constr)];
}
let new_m: ty::method =
@ -408,7 +408,7 @@ fn ast_ty_to_ty(tcx: &ty::ctxt, getter: &ty_getter, ast_ty: &@ast::ty) ->
}
ast::ty_constr(t, cs) {
let out_cs = ~[];
for constr: @ast::ty_constr in cs {
for constr: @ast::ty_constr in cs {
out_cs += ~[ty::ast_constr_to_constr(tcx, constr)];
}
typ = ty::mk_constr(tcx, ast_ty_to_ty(tcx, getter, t), out_cs);
@ -554,11 +554,11 @@ mod collect {
def_id: &option::t[ast::def_id]) ->
ty::ty_param_kinds_and_ty {
let input_tys = ~[];
for a: ast::arg in decl.inputs { input_tys += ~[ty_of_arg(a)]; }
for a: ast::arg in decl.inputs { input_tys += ~[ty_of_arg(a)]; }
let output_ty = convert(decl.output);
let out_constrs = ~[];
for constr: @ast::constr in decl.constraints {
for constr: @ast::constr in decl.constraints {
out_constrs += ~[ty::ast_constr_to_constr(cx.tcx, constr)];
}
let t_fn =
@ -574,7 +574,7 @@ mod collect {
ty_params: &[ast::ty_param], def_id: &ast::def_id)
-> ty::ty_param_kinds_and_ty {
let input_tys = ~[];
for a: ast::arg in decl.inputs { input_tys += ~[ty_of_arg(a)]; }
for a: ast::arg in decl.inputs { input_tys += ~[ty_of_arg(a)]; }
let output_ty = convert(decl.output);
let t_fn = ty::mk_native_fn(cx.tcx, abi, input_tys, output_ty);
@ -619,14 +619,14 @@ mod collect {
let convert = bind ast_ty_to_ty(cx.tcx, get, _);
let inputs = ~[];
for a: ast::arg in m.node.meth.decl.inputs {
for a: ast::arg in m.node.meth.decl.inputs {
inputs += ~[ty_of_arg(cx, a)];
}
let output = convert(m.node.meth.decl.output);
let out_constrs = ~[];
for constr: @ast::constr in m.node.meth.decl.constraints {
for constr: @ast::constr in m.node.meth.decl.constraints {
out_constrs += ~[ty::ast_constr_to_constr(cx.tcx, constr)];
}
ret {proto: proto_to_ty_proto(m.node.meth.proto),
@ -649,7 +649,7 @@ mod collect {
let t_obj = ty_of_obj(cx, id, ob, ty_params);
let t_inputs: [arg] = ~[];
for f: ast::obj_field in ob.fields {
for f: ast::obj_field in ob.fields {
let g = bind getter(cx, _);
let t_field = ast_ty_to_ty(cx.tcx, g, f.ty);
t_inputs += ~[{mode: ty::mo_alias(false), ty: t_field}];
@ -747,7 +747,7 @@ mod collect {
// Create a set of parameter types shared among all the variants.
let ty_param_tys: [ty::t] = mk_ty_params(cx, ty_params);
for variant: ast::variant in variants {
for variant: ast::variant in variants {
// Nullary tag constructors get turned into constants; n-ary tag
// constructors get turned into functions.
@ -760,7 +760,7 @@ mod collect {
let f = bind getter(cx, _);
let args: [arg] = ~[];
for va: ast::variant_arg in variant.node.args {
for va: ast::variant_arg in variant.node.args {
let arg_ty = ast_ty_to_ty(cx.tcx, f, va.ty);
args += ~[{mode: ty::mo_alias(false), ty: arg_ty}];
}
@ -777,7 +777,7 @@ mod collect {
}
fn get_obj_method_types(cx: &@ctxt, object: &ast::_obj) -> [ty::method] {
let meths = ~[];
for m: @ast::method in object.methods {
for m: @ast::method in object.methods {
meths += ~[ty_of_method(cx, m)];
}
ret meths;
@ -995,7 +995,7 @@ mod demand {
let ty_param_substs: [mutable ty::t] = ~[mutable];
let ty_param_subst_var_ids: [int] = ~[];
for ty_param_subst: ty::t in ty_param_substs_0 {
for ty_param_subst: ty::t in ty_param_substs_0 {
// Generate a type variable and unify it with the type parameter
// substitution. We will then pull out these type variables.
let t_0 = next_ty_var(fcx);
@ -1008,7 +1008,7 @@ mod demand {
ty_param_subst_var_ids: &[int]) ->
ty_param_substs_and_ty {
let result_ty_param_substs: [ty::t] = ~[];
for var_id: int in ty_param_subst_var_ids {
for var_id: int in ty_param_subst_var_ids {
let tp_subst = ty::mk_var(fcx.ccx.tcx, var_id);
result_ty_param_substs += ~[tp_subst];
}
@ -1055,7 +1055,7 @@ fn variant_arg_types(ccx: &@crate_ctxt, sp: &span, vid: &ast::def_id,
// N-ary variant.
for arg: ty::arg in ins {
for arg: ty::arg in ins {
let arg_ty =
ty::substitute_type_params(ccx.tcx, tag_ty_params, arg.ty);
result += ~[arg_ty];
@ -1111,7 +1111,7 @@ mod writeback {
none[[ty::t]]. { new_substs_opt = none[[ty::t]]; }
some[[ty::t]](substs) {
let new_substs: [ty::t] = ~[];
for subst: ty::t in substs {
for subst: ty::t in substs {
alt resolve_type_vars_in_type(fcx, sp, subst) {
some(t) { new_substs += ~[t]; }
none. { wbcx.success = false; ret; }
@ -1256,7 +1256,7 @@ fn gather_locals(ccx: &@crate_ctxt, f: &ast::_fn, id: &ast::node_id,
}
none. {/* no fields */ }
}
for f: ast::obj_field in obj_fields {
for f: ast::obj_field in obj_fields {
let field_ty = ty::node_id_to_type(ccx.tcx, f.id);
assign(f.id, f.ident, some(field_ty));
}
@ -1264,7 +1264,7 @@ fn gather_locals(ccx: &@crate_ctxt, f: &ast::_fn, id: &ast::node_id,
// Add formal parameters.
let args = ty::ty_fn_args(ccx.tcx, ty::node_id_to_type(ccx.tcx, id));
let i = 0u;
for arg: ty::arg in args {
for arg: ty::arg in args {
assign(f.decl.inputs.(i).id, f.decl.inputs.(i).ident, some(arg.ty));
i += 1u;
}
@ -1391,7 +1391,7 @@ fn check_pat(fcx: &@fn_ctxt, map: &ast::pat_id_map, pat: &@ast::pat,
// TODO: ivec::iter2
let i = 0u;
for subpat: @ast::pat in subpats {
for subpat: @ast::pat in subpats {
check_pat(fcx, map, subpat, arg_types.(i));
i += 1u;
}
@ -1442,7 +1442,7 @@ fn check_pat(fcx: &@fn_ctxt, map: &ast::pat_id_map, pat: &@ast::pat,
fn matches(name: &str, f: &ty::field) -> bool {
ret str::eq(name, f.ident);
}
for f: ast::field_pat in fields {
for f: ast::field_pat in fields {
alt ivec::find(bind matches(f.ident, _), ex_fields) {
some(field) { check_pat(fcx, map, f.pat, field.mt.ty); }
none. {
@ -1619,7 +1619,7 @@ fn check_expr_with_unifier(fcx: &@fn_ctxt, expr: &@ast::expr,
let check_args = lambda(check_blocks: bool) -> bool {
let i = 0u;
let bot = false;
for a_opt: option::t[@ast::expr] in args {
for a_opt: option::t[@ast::expr] in args {
alt a_opt {
some(a) {
let is_block =
@ -1655,7 +1655,7 @@ fn check_expr_with_unifier(fcx: &@fn_ctxt, expr: &@ast::expr,
fn check_call(fcx: &@fn_ctxt, sp: &span, f: &@ast::expr,
args: &[@ast::expr], call_kind: call_kind) -> bool {
let args_opt_0: [option::t[@ast::expr]] = ~[];
for arg: @ast::expr in args {
for arg: @ast::expr in args {
args_opt_0 += ~[some[@ast::expr](arg)];
}
@ -1727,7 +1727,7 @@ fn check_expr_with_unifier(fcx: &@fn_ctxt, expr: &@ast::expr,
in constraint");
}
}
for operand: @ast::expr in operands {
for operand: @ast::expr in operands {
if !ast::is_constraint_arg(operand) {
let s =
"Constraint args must be \
@ -2048,16 +2048,16 @@ fn check_expr_with_unifier(fcx: &@fn_ctxt, expr: &@ast::expr,
// Typecheck the patterns first, so that we get types for all the
// bindings.
let pattern_ty = ty::expr_ty(tcx, expr);
for arm: ast::arm in arms {
for arm: ast::arm in arms {
let id_map = ast::pat_id_map(arm.pats.(0));
for p: @ast::pat in arm.pats {
for p: @ast::pat in arm.pats {
check_pat(fcx, id_map, p, pattern_ty);
}
}
// Now typecheck the blocks.
let result_ty = next_ty_var(fcx);
let arm_non_bot = false;
for arm: ast::arm in arms {
for arm: ast::arm in arms {
if !check_block(fcx, arm.body) { arm_non_bot = true; }
let bty = block_ty(tcx, arm.body);
result_ty = demand::simple(fcx, arm.body.span, result_ty, bty);
@ -2173,7 +2173,7 @@ fn check_expr_with_unifier(fcx: &@fn_ctxt, expr: &@ast::expr,
some(sty) {
alt sty {
ty::ty_obj(methods) {
for method: ty::method in methods {
for method: ty::method in methods {
if method.ident == ident {
t = ty::method_ty_to_fn_ty(tcx, method);
}
@ -2251,7 +2251,7 @@ fn check_expr_with_unifier(fcx: &@fn_ctxt, expr: &@ast::expr,
ast::expr_rec(fields, base) {
alt base { none. {/* no-op */ } some(b_0) { check_expr(fcx, b_0); } }
let fields_t: [spanned[field]] = ~[];
for f: ast::field in fields {
for f: ast::field in fields {
bot |= check_expr(fcx, f.node.expr);
let expr_t = expr_ty(tcx, f.node.expr);
let expr_mt = {ty: expr_t, mut: f.node.mut};
@ -2279,9 +2279,9 @@ fn check_expr_with_unifier(fcx: &@fn_ctxt, expr: &@ast::expr,
}
}
write::ty_only_fixup(fcx, id, bexpr_t);
for f: spanned[ty::field] in fields_t {
for f: spanned[ty::field] in fields_t {
let found = false;
for bf: ty::field in base_fields {
for bf: ty::field in base_fields {
if str::eq(f.node.ident, bf.ident) {
demand::simple(fcx, f.span, bf.mt.ty, f.node.mt.ty);
found = true;
@ -2384,14 +2384,14 @@ fn check_expr_with_unifier(fcx: &@fn_ctxt, expr: &@ast::expr,
let convert = bind ast_ty_to_ty_crate(ccx, _);
let inputs = ~[];
for aa: ast::arg in m.node.meth.decl.inputs {
for aa: ast::arg in m.node.meth.decl.inputs {
inputs += ~[ty_of_arg(ccx, aa)];
}
let output = convert(m.node.meth.decl.output);
let out_constrs = ~[];
for constr: @ast::constr in m.node.meth.decl.constraints {
for constr: @ast::constr in m.node.meth.decl.constraints {
out_constrs += ~[ty::ast_constr_to_constr(ccx.tcx, constr)];
}
@ -2406,7 +2406,7 @@ fn check_expr_with_unifier(fcx: &@fn_ctxt, expr: &@ast::expr,
let method_types: [ty::method] = ~[];
{
// Outer methods.
for m: @ast::method in ao.methods {
for m: @ast::method in ao.methods {
method_types += ~[ty_of_method(fcx.ccx, m)];
}
@ -2495,7 +2495,7 @@ fn check_expr_with_unifier(fcx: &@fn_ctxt, expr: &@ast::expr,
}
// Typecheck the methods.
for method: @ast::method in ao.methods {
for method: @ast::method in ao.methods {
check_method(fcx.ccx, method);
}
@ -2704,7 +2704,7 @@ fn check_item(ccx: @crate_ctxt, it: &@ast::item) {
ccx.obj_infos += ~[regular_obj(ob.fields, it.id)];
// Typecheck the methods.
for method: @ast::method in ob.methods { check_method(ccx, method); }
for method: @ast::method in ob.methods { check_method(ccx, method); }
// Now remove the info from the stack.
ivec::pop[obj_info](ccx.obj_infos);

View file

@ -618,11 +618,11 @@ tag native_item_ {
fn is_exported(i: ident, m: _mod) -> bool {
let nonlocal = true;
for it: @ast::item in m.items {
for it: @ast::item in m.items {
if it.ident == i { nonlocal = false; }
alt it.node {
item_tag(variants, _) {
for v: variant in variants {
for v: variant in variants {
if v.node.name == i { nonlocal = false; }
}
}
@ -631,7 +631,7 @@ fn is_exported(i: ident, m: _mod) -> bool {
if !nonlocal { break; }
}
let count = 0u;
for vi: @ast::view_item in m.view_items {
for vi: @ast::view_item in m.view_items {
alt vi.node {
ast::view_item_export(id, _) {
if str::eq(i, id) {

View file

@ -142,7 +142,7 @@ fn maybe_highlight_lines(sp: &option::t[span], cm: &codemap,
elided = true;
}
// Print the offending lines
for line: uint in display_lines {
for line: uint in display_lines {
io::stdout().write_str(#fmt("%s:%u ", fm.name, line + 1u));
let s = get_line(fm, line as int, file);
if !str::ends_with(s, "\n") { s += "\n"; }
@ -203,7 +203,7 @@ fn span_to_lines(sp: span, cm: codemap::codemap) -> @file_lines {
let lo = lookup_char_pos(cm, sp.lo);
let hi = lookup_char_pos(cm, sp.hi);
let lines = ~[];
for each i: uint in uint::range(lo.line - 1u, hi.line as uint) {
for each i: uint in uint::range(lo.line - 1u, hi.line as uint) {
lines += ~[i];
}
ret @{name: lo.filename, lines: lines};
@ -227,7 +227,7 @@ fn get_line(fm: filemap, line: int, file: &str) -> str {
}
fn get_filemap(cm: codemap, filename: str) -> filemap {
for fm: filemap in cm.files { if fm.name == filename { ret fm; } }
for fm: filemap in cm.files { if fm.name == filename { ret fm; } }
//XXjdm the following triggers a mismatched type bug
// (or expected function, found _|_)
fail; // ("asking for " + filename + " which we don't know about");

View file

@ -86,7 +86,7 @@ fn pieces_to_expr(cx: &ext_ctxt, sp: span, pieces: &[piece],
fields: &[{ident: ast::ident, ex: @ast::expr}]) ->
@ast::expr {
let astfields: [ast::field] = ~[];
for field: {ident: ast::ident, ex: @ast::expr} in fields {
for field: {ident: ast::ident, ex: @ast::expr} in fields {
let ident = field.ident;
let val = field.ex;
let astfield =
@ -115,7 +115,7 @@ fn pieces_to_expr(cx: &ext_ctxt, sp: span, pieces: &[piece],
fn make_flags(cx: &ext_ctxt, sp: span, flags: &[flag]) ->
@ast::expr {
let flagexprs: [@ast::expr] = ~[];
for f: flag in flags {
for f: flag in flags {
let fstr;
alt f {
flag_left_justify. { fstr = "flag_left_justify"; }
@ -205,7 +205,7 @@ fn pieces_to_expr(cx: &ext_ctxt, sp: span, pieces: &[piece],
option::none. { }
_ { cx.span_unimpl(sp, unsupported); }
}
for f: flag in cnv.flags {
for f: flag in cnv.flags {
alt f {
flag_left_justify. { }
flag_sign_always. {
@ -259,7 +259,7 @@ fn pieces_to_expr(cx: &ext_ctxt, sp: span, pieces: &[piece],
some(p) { log "param: " + std::int::to_str(p, 10u); }
_ { log "param: none"; }
}
for f: flag in c.flags {
for f: flag in c.flags {
alt f {
flag_left_justify. { log "flag: left justify"; }
flag_left_zero_pad. { log "flag: left zero pad"; }
@ -308,7 +308,7 @@ fn pieces_to_expr(cx: &ext_ctxt, sp: span, pieces: &[piece],
let n = 0u;
let tmp_expr = make_new_str(cx, sp, "");
let nargs = ivec::len[@ast::expr](args);
for pc: piece in pieces {
for pc: piece in pieces {
alt pc {
piece_string(s) {
let s_expr = make_new_str(cx, fmt_sp, s);

View file

@ -95,7 +95,7 @@ fn elts_to_ell(cx: &ext_ctxt, elts: &[@expr])
-> {pre: [@expr], rep: option::t[@expr], post: [@expr]} {
let idx: uint = 0u;
let res = none;
for elt: @expr in elts {
for elt: @expr in elts {
alt elt.node {
expr_mac(m) {
alt m.node {
@ -124,7 +124,7 @@ fn elts_to_ell(cx: &ext_ctxt, elts: &[@expr])
fn option_flatten_map[T, U](f: &fn(&T) -> option::t[U] , v: &[T]) ->
option::t[[U]] {
let res = ~[];
for elem: T in v {
for elem: T in v {
alt f(elem) { none. { ret none; } some(fv) { res += ~[fv]; } }
}
ret some(res);
@ -185,11 +185,11 @@ selectors. */
fn use_selectors_to_bind(b: &binders, e: @expr) -> option::t[bindings] {
let res = new_str_hash[arb_depth[matchable]]();
//need to do this first, to check vec lengths.
for sel: selector in b.literal_ast_matchers {
for sel: selector in b.literal_ast_matchers {
alt sel(match_expr(e)) { none. { ret none; } _ { } }
}
let never_mind: bool = false;
for each pair: @{key: ident, val: selector} in b.real_binders.items() {
for each pair: @{key: ident, val: selector} in b.real_binders.items() {
alt pair.val(match_expr(e)) {
none. { never_mind = true; }
some(mtc) { res.insert(pair.key, mtc); }
@ -233,7 +233,7 @@ fn transcribe(cx: &ext_ctxt, b: &bindings, body: @expr) -> @expr {
fn follow(m: &arb_depth[matchable], idx_path: @mutable [uint]) ->
arb_depth[matchable] {
let res: arb_depth[matchable] = m;
for idx: uint in *idx_path {
for idx: uint in *idx_path {
alt res {
leaf(_) { ret res;/* end of the line */ }
seq(new_ms, _) { res = new_ms.(idx); }
@ -276,7 +276,7 @@ iter free_vars(b: &bindings, e: @expr) -> ident {
let f = make_fold(f_pre);
f.fold_expr(e); // ignore result
dummy_out(f);
for each id: ident in idents.keys() { put id; }
for each id: ident in idents.keys() { put id; }
}
@ -293,7 +293,7 @@ fn transcribe_exprs(cx: &ext_ctxt, b: &bindings, idx_path: @mutable [uint],
let repeat: option::t[{rep_count: uint, name: ident}] = none;
/* we need to walk over all the free vars in lockstep, except for
the leaves, which are just duplicated */
for each fv: ident in free_vars(b, repeat_me) {
for each fv: ident in free_vars(b, repeat_me) {
let cur_pos = follow(b.get(fv), idx_path);
alt cur_pos {
leaf(_) { }
@ -686,7 +686,7 @@ fn add_new_extension(cx: &ext_ctxt, sp: span, arg: @expr,
let macro_name: option::t[str] = none;
let clauses: [@clause] = ~[];
for arg: @expr in args {
for arg: @expr in args {
alt arg.node {
expr_vec(elts, mut, seq_kind) {
if ivec::len(elts) != 2u {

View file

@ -283,7 +283,7 @@ fn noop_fold_pat(p: &pat_, fld: ast_fold) -> pat_ {
}
pat_rec(fields, etc) {
let fs = ~[];
for f: ast::field_pat in fields {
for f: ast::field_pat in fields {
fs += ~[{ident: f.ident, pat: fld.fold_pat(f.pat)}];
}
pat_rec(fs, etc)

View file

@ -29,7 +29,7 @@ fn eval_crate_directives(cx: ctx, cdirs: &[@ast::crate_directive],
prefix: str,
view_items: &mutable [@ast::view_item],
items: &mutable [@ast::item]) {
for sub_cdir: @ast::crate_directive in cdirs {
for sub_cdir: @ast::crate_directive in cdirs {
eval_crate_directive(cx, sub_cdir, prefix, view_items, items);
}
}

View file

@ -166,7 +166,7 @@ fn consume_block_comment(rdr: &reader) {
fn digits_to_string(s: str) -> int {
let accum_int: int = 0;
for c: u8 in s {
for c: u8 in s {
accum_int *= 10;
accum_int += dec_digit_val(c as char);
}

View file

@ -355,7 +355,7 @@ fn parse_ty_field(p: &parser) -> ast::ty_field {
// otherwise, fail
fn ident_index(p: &parser, args: &[ast::arg], i: &ast::ident) -> uint {
let j = 0u;
for a: ast::arg in args { if a.ident == i { ret j; } j += 1u; }
for a: ast::arg in args { if a.ident == i { ret j; } j += 1u; }
p.fatal("Unbound variable " + i + " in constraint arg");
}
@ -1219,7 +1219,7 @@ const ternary_prec: int = 0;
fn parse_more_binops(p: &parser, lhs: @ast::expr, min_prec: int) ->
@ast::expr {
let peeked = p.peek();
for cur: op_spec in *p.get_prec_table() {
for cur: op_spec in *p.get_prec_table() {
if cur.prec > min_prec && cur.tok == peeked {
p.bump();
let rhs = parse_more_binops(p, parse_prefix_expr(p), cur.prec);
@ -2123,7 +2123,7 @@ fn parse_item_tag(p: &parser, attrs: &[ast::attribute]) -> @ast::item {
let arg_tys =
parse_seq(token::LPAREN, token::RPAREN,
some(token::COMMA), bind parse_ty(_, false), p);
for ty: @ast::ty in arg_tys.node {
for ty: @ast::ty in arg_tys.node {
args += ~[{ty: ty, id: p.get_id()}];
}
vhi = arg_tys.span.hi;

View file

@ -218,7 +218,7 @@ fn synth_comment(s: &ps, text: str) {
fn commasep[IN](s: &ps, b: breaks, elts: &[IN], op: fn(&ps, &IN) ) {
box(s, 0u, b);
let first = true;
for elt: IN in elts {
for elt: IN in elts {
if first { first = false; } else { word_space(s, ","); }
op(s, elt);
}
@ -231,7 +231,7 @@ fn commasep_cmnt[IN](s: &ps, b: breaks, elts: &[IN], op: fn(&ps, &IN) ,
box(s, 0u, b);
let len = ivec::len[IN](elts);
let i = 0u;
for elt: IN in elts {
for elt: IN in elts {
maybe_print_comment(s, get_span(elt).hi);
op(s, elt);
i += 1u;
@ -252,19 +252,19 @@ fn commasep_exprs(s: &ps, b: breaks, exprs: &[@ast::expr]) {
fn print_mod(s: &ps, _mod: &ast::_mod, attrs: &[ast::attribute]) {
print_inner_attributes(s, attrs);
for vitem: @ast::view_item in _mod.view_items {
for vitem: @ast::view_item in _mod.view_items {
print_view_item(s, vitem);
}
for item: @ast::item in _mod.items { print_item(s, item); }
for item: @ast::item in _mod.items { print_item(s, item); }
}
fn print_native_mod(s: &ps, nmod: &ast::native_mod,
attrs: &[ast::attribute]) {
print_inner_attributes(s, attrs);
for vitem: @ast::view_item in nmod.view_items {
for vitem: @ast::view_item in nmod.view_items {
print_view_item(s, vitem);
}
for item: @ast::native_item in nmod.items { print_native_item(s, item); }
for item: @ast::native_item in nmod.items { print_native_item(s, item); }
}
fn print_type(s: &ps, ty: &@ast::ty) {
@ -330,7 +330,7 @@ fn print_type(s: &ps, ty: &@ast::ty) {
ast::ty_obj(methods) {
head(s, "obj");
bopen(s);
for m: ast::ty_method in methods {
for m: ast::ty_method in methods {
hardbreak_if_not_bol(s);
cbox(s, indent_unit);
maybe_print_comment(s, m.span.lo);
@ -471,7 +471,7 @@ fn print_item(s: &ps, item: &@ast::item) {
end(s);
} else {
bopen(s);
for v: ast::variant in variants {
for v: ast::variant in variants {
space(s.s);
maybe_print_comment(s, v.span.lo);
word(s.s, v.node.name);
@ -506,7 +506,7 @@ fn print_item(s: &ps, item: &@ast::item) {
pclose(s);
space(s.s);
bopen(s);
for meth: @ast::method in _obj.methods {
for meth: @ast::method in _obj.methods {
let typarams: [ast::ty_param] = ~[];
hardbreak_if_not_bol(s);
maybe_print_comment(s, meth.span.lo);
@ -534,7 +534,7 @@ fn print_item(s: &ps, item: &@ast::item) {
fn print_outer_attributes(s: &ps, attrs: &[ast::attribute]) {
let count = 0;
for attr: ast::attribute in attrs {
for attr: ast::attribute in attrs {
alt attr.node.style {
ast::attr_outer. { print_attribute(s, attr); count += 1; }
_ {/* fallthrough */ }
@ -545,7 +545,7 @@ fn print_outer_attributes(s: &ps, attrs: &[ast::attribute]) {
fn print_inner_attributes(s: &ps, attrs: &[ast::attribute]) {
let count = 0;
for attr: ast::attribute in attrs {
for attr: ast::attribute in attrs {
alt attr.node.style {
ast::attr_inner. {
print_attribute(s, attr);
@ -594,7 +594,7 @@ fn print_possibly_embedded_block(s: &ps, blk: &ast::blk, embedded: embed_type,
}
let last_stmt = option::none;
for st: @ast::stmt in blk.node.stmts {
for st: @ast::stmt in blk.node.stmts {
maybe_protect_unop(s, last_stmt, stmt_(st));
print_stmt(s, *st);
last_stmt = option::some(st);
@ -856,12 +856,12 @@ fn print_expr(s: &ps, expr: &@ast::expr) {
print_expr(s, expr);
space(s.s);
bopen(s);
for arm: ast::arm in arms {
for arm: ast::arm in arms {
space(s.s);
cbox(s, alt_indent_unit);
ibox(s, 0u);
let first = true;
for p: @ast::pat in arm.pats {
for p: @ast::pat in arm.pats {
if first {
first = false;
} else { space(s.s); word_space(s, "|"); }
@ -1126,7 +1126,7 @@ fn print_path(s: &ps, path: &ast::path) {
maybe_print_comment(s, path.span.lo);
if path.node.global { word(s.s, "::"); }
let first = true;
for id: str in path.node.idents {
for id: str in path.node.idents {
if first { first = false; } else { word(s.s, "::"); }
word(s.s, id);
}
@ -1293,7 +1293,7 @@ fn print_view_item(s: &ps, item: &@ast::view_item) {
word_space(s, "=");
}
let first = true;
for elt: str in ids {
for elt: str in ids {
if first { first = false; } else { word(s.s, "::"); }
word(s.s, elt);
}
@ -1301,7 +1301,7 @@ fn print_view_item(s: &ps, item: &@ast::view_item) {
ast::view_item_import_glob(ids, _) {
head(s, "import");
let first = true;
for elt: str in ids {
for elt: str in ids {
if first { first = false; } else { word(s.s, "::"); }
word(s.s, elt);
}
@ -1320,7 +1320,7 @@ fn print_view_item(s: &ps, item: &@ast::view_item) {
// FIXME: The fact that this builds up the table anew for every call is
// not good. Eventually, table should be a const.
fn operator_prec(op: ast::binop) -> int {
for spec: parse::parser::op_spec in *parse::parser::prec_table() {
for spec: parse::parser::op_spec in *parse::parser::prec_table() {
if spec.op == op { ret spec.prec; }
}
fail;
@ -1497,7 +1497,7 @@ fn print_comment(s: &ps, cmnt: lexer::cmnt) {
}
lexer::isolated. {
pprust::hardbreak_if_not_bol(s);
for line: str in cmnt.lines { word(s.s, line); hardbreak(s.s); }
for line: str in cmnt.lines { word(s.s, line); hardbreak(s.s); }
}
lexer::trailing. {
word(s.s, " ");
@ -1506,7 +1506,7 @@ fn print_comment(s: &ps, cmnt: lexer::cmnt) {
hardbreak(s.s);
} else {
ibox(s, 0u);
for line: str in cmnt.lines { word(s.s, line); hardbreak(s.s); }
for line: str in cmnt.lines { word(s.s, line); hardbreak(s.s); }
end(s);
}
}
@ -1571,7 +1571,7 @@ fn constr_args_to_str[T](f: &fn(&T) -> str ,
args: &[@ast::sp_constr_arg[T]]) -> str {
let comma = false;
let s = "(";
for a: @ast::sp_constr_arg[T] in args {
for a: @ast::sp_constr_arg[T] in args {
if comma { s += ", "; } else { comma = true; }
s += constr_arg_to_str[T](f, a.node);
}
@ -1602,7 +1602,7 @@ fn ast_ty_fn_constr_to_str(c: &@ast::constr) -> str {
fn ast_ty_fn_constrs_str(constrs: &[@ast::constr]) -> str {
let s = "";
let colon = true;
for c: @ast::constr in constrs {
for c: @ast::constr in constrs {
if colon { s += " : "; colon = false; } else { s += ", "; }
s += ast_ty_fn_constr_to_str(c);
}
@ -1624,7 +1624,7 @@ fn ast_fn_constrs_str(decl: &ast::fn_decl,
constrs: &[@ast::constr]) -> str {
let s = "";
let colon = true;
for c: @ast::constr in constrs {
for c: @ast::constr in constrs {
if colon { s += " : "; colon = false; } else { s += ", "; }
s += ast_fn_constr_to_str(decl, c);
}
@ -1649,7 +1649,7 @@ fn ty_constr_to_str(c: &@ast::ty_constr) -> str {
fn ast_ty_constrs_str(constrs: &[@ast::ty_constr]) -> str {
let s = "";
let colon = true;
for c: @ast::ty_constr in constrs {
for c: @ast::ty_constr in constrs {
if colon { s += " : "; colon = false; } else { s += ", "; }
s += ty_constr_to_str(c);
}

View file

@ -60,7 +60,7 @@ fn visit_crate_directive[E](cd: &@crate_directive, e: &E, v: &vt[E]) {
alt cd.node {
cdir_src_mod(_, _, _) { }
cdir_dir_mod(_, _, cdirs, _) {
for cdir: @crate_directive in cdirs {
for cdir: @crate_directive in cdirs {
visit_crate_directive(cdir, e, v);
}
}
@ -71,8 +71,8 @@ fn visit_crate_directive[E](cd: &@crate_directive, e: &E, v: &vt[E]) {
}
fn visit_mod[E](m: &_mod, sp: &span, e: &E, v: &vt[E]) {
for vi: @view_item in m.view_items { v.visit_view_item(vi, e, v); }
for i: @item in m.items { v.visit_item(i, e, v); }
for vi: @view_item in m.view_items { v.visit_view_item(vi, e, v); }
for i: @item in m.items { v.visit_item(i, e, v); }
}
fn visit_view_item[E](vi: &@view_item, e: &E, v: &vt[E]) { }
@ -89,21 +89,21 @@ fn visit_item[E](i: &@item, e: &E, v: &vt[E]) {
item_fn(f, tp) { v.visit_fn(f, tp, i.span, some(i.ident), i.id, e, v); }
item_mod(m) { v.visit_mod(m, i.span, e, v); }
item_native_mod(nm) {
for vi: @view_item in nm.view_items { v.visit_view_item(vi, e, v); }
for ni: @native_item in nm.items { v.visit_native_item(ni, e, v); }
for vi: @view_item in nm.view_items { v.visit_view_item(vi, e, v); }
for ni: @native_item in nm.items { v.visit_native_item(ni, e, v); }
}
item_ty(t, _) { v.visit_ty(t, e, v); }
item_res(f, dtor_id, tps, _) {
v.visit_fn(f, tps, i.span, some(i.ident), dtor_id, e, v);
}
item_tag(variants, _) {
for vr: variant in variants {
for va: variant_arg in vr.node.args { v.visit_ty(va.ty, e, v); }
for vr: variant in variants {
for va: variant_arg in vr.node.args { v.visit_ty(va.ty, e, v); }
}
}
item_obj(ob, _, _) {
for f: obj_field in ob.fields { v.visit_ty(f.ty, e, v); }
for m: @method in ob.methods {
for f: obj_field in ob.fields { v.visit_ty(f.ty, e, v); }
for m: @method in ob.methods {
v.visit_fn(m.node.meth, ~[], m.span, some(m.node.ident),
m.node.id, e, v);
}
@ -131,29 +131,29 @@ fn visit_ty[E](t: &@ty, e: &E, v: &vt[E]) {
ty_chan(t) { v.visit_ty(t, e, v); }
ty_task. {/* no-op */ }
ty_rec(flds) {
for f: ty_field in flds { v.visit_ty(f.node.mt.ty, e, v); }
for f: ty_field in flds { v.visit_ty(f.node.mt.ty, e, v); }
}
ty_tup(ts) {
for tt in ts { v.visit_ty(tt, e, v); }
}
ty_fn(_, args, out, _, constrs) {
for a: ty_arg in args { v.visit_ty(a.node.ty, e, v); }
for c: @constr in constrs {
for a: ty_arg in args { v.visit_ty(a.node.ty, e, v); }
for c: @constr in constrs {
v.visit_constr(c.node.path, c.span, c.node.id, e, v);
}
v.visit_ty(out, e, v);
}
ty_obj(tmeths) {
for m: ty_method in tmeths {
for a: ty_arg in m.node.inputs { v.visit_ty(a.node.ty, e, v); }
for m: ty_method in tmeths {
for a: ty_arg in m.node.inputs { v.visit_ty(a.node.ty, e, v); }
v.visit_ty(m.node.output, e, v);
}
}
ty_path(p, _) { for tp: @ty in p.node.types { v.visit_ty(tp, e, v); } }
ty_path(p, _) { for tp: @ty in p.node.types { v.visit_ty(tp, e, v); } }
ty_type. {/* no-op */ }
ty_constr(t, cs) {
v.visit_ty(t, e, v);
for tc: @spanned[constr_general_[path, node_id]] in cs {
for tc: @spanned[constr_general_[path, node_id]] in cs {
v.visit_constr(tc.node.path, tc.span, tc.node.id, e, v);
}
}
@ -169,11 +169,11 @@ fn visit_constr[E](operator: &path, sp: &span, id: node_id, e: &E,
fn visit_pat[E](p: &@pat, e: &E, v: &vt[E]) {
alt p.node {
pat_tag(path, children) {
for tp: @ty in path.node.types { v.visit_ty(tp, e, v); }
for child: @pat in children { v.visit_pat(child, e, v); }
for tp: @ty in path.node.types { v.visit_ty(tp, e, v); }
for child: @pat in children { v.visit_pat(child, e, v); }
}
pat_rec(fields, _) {
for f: field_pat in fields { v.visit_pat(f.pat, e, v); }
for f: field_pat in fields { v.visit_pat(f.pat, e, v); }
}
pat_tup(elts) {
for elt in elts { v.visit_pat(elt, e, v); }
@ -191,8 +191,8 @@ fn visit_native_item[E](ni: &@native_item, e: &E, v: &vt[E]) {
}
fn visit_fn_decl[E](fd: &fn_decl, e: &E, v: &vt[E]) {
for a: arg in fd.inputs { v.visit_ty(a.ty, e, v); }
for c: @constr in fd.constraints {
for a: arg in fd.inputs { v.visit_ty(a.ty, e, v); }
for c: @constr in fd.constraints {
v.visit_constr(c.node.path, c.span, c.node.id, e, v);
}
v.visit_ty(fd.output, e, v);
@ -205,7 +205,7 @@ fn visit_fn[E](f: &_fn, tp: &[ty_param], sp: &span, i: &fn_ident, id: node_id,
}
fn visit_block[E](b: &ast::blk, e: &E, v: &vt[E]) {
for s: @stmt in b.node.stmts { v.visit_stmt(s, e, v); }
for s: @stmt in b.node.stmts { v.visit_stmt(s, e, v); }
visit_expr_opt(b.node.expr, e, v);
}
@ -220,7 +220,7 @@ fn visit_stmt[E](s: &@stmt, e: &E, v: &vt[E]) {
fn visit_decl[E](d: &@decl, e: &E, v: &vt[E]) {
alt d.node {
decl_local(locs) {
for loc: @ast::local in locs { v.visit_local(loc, e, v); }
for loc: @ast::local in locs { v.visit_local(loc, e, v); }
}
decl_item(it) { v.visit_item(it, e, v); }
}
@ -231,7 +231,7 @@ fn visit_expr_opt[E](eo: option::t[@expr], e: &E, v: &vt[E]) {
}
fn visit_exprs[E](exprs: &[@expr], e: &E, v: &vt[E]) {
for ex: @expr in exprs { v.visit_expr(ex, e, v); }
for ex: @expr in exprs { v.visit_expr(ex, e, v); }
}
fn visit_mac[E](m: mac, e: &E, v: &vt[E]) {
@ -247,7 +247,7 @@ fn visit_expr[E](ex: &@expr, e: &E, v: &vt[E]) {
alt ex.node {
expr_vec(es, _, _) { visit_exprs(es, e, v); }
expr_rec(flds, base) {
for f: field in flds { v.visit_expr(f.node.expr, e, v); }
for f: field in flds { v.visit_expr(f.node.expr, e, v); }
visit_expr_opt(base, e, v);
}
expr_tup(elts) {
@ -260,7 +260,7 @@ fn visit_expr[E](ex: &@expr, e: &E, v: &vt[E]) {
expr_self_method(_) { }
expr_bind(callee, args) {
v.visit_expr(callee, e, v);
for eo: option::t[@expr] in args { visit_expr_opt(eo, e, v); }
for eo: option::t[@expr] in args { visit_expr_opt(eo, e, v); }
}
expr_spawn(_, _, callee, args) {
v.visit_expr(callee, e, v);
@ -294,7 +294,7 @@ fn visit_expr[E](ex: &@expr, e: &E, v: &vt[E]) {
expr_do_while(b, x) { v.visit_block(b, e, v); v.visit_expr(x, e, v); }
expr_alt(x, arms) {
v.visit_expr(x, e, v);
for a: arm in arms { v.visit_arm(a, e, v); }
for a: arm in arms { v.visit_arm(a, e, v); }
}
expr_fn(f) { v.visit_fn(f, ~[], ex.span, none, ex.id, e, v); }
expr_block(b) { v.visit_block(b, e, v); }
@ -310,7 +310,7 @@ fn visit_expr[E](ex: &@expr, e: &E, v: &vt[E]) {
expr_recv(a, b) { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
expr_field(x, _) { v.visit_expr(x, e, v); }
expr_index(a, b) { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
expr_path(p) { for tp: @ty in p.node.types { v.visit_ty(tp, e, v); } }
expr_path(p) { for tp: @ty in p.node.types { v.visit_ty(tp, e, v); } }
expr_fail(eo) { visit_expr_opt(eo, e, v); }
expr_break. { }
expr_cont. { }
@ -326,7 +326,7 @@ fn visit_expr[E](ex: &@expr, e: &E, v: &vt[E]) {
alt anon_obj.fields {
none. { }
some(fields) {
for f: anon_obj_field in fields {
for f: anon_obj_field in fields {
v.visit_ty(f.ty, e, v);
v.visit_expr(f.expr, e, v);
}
@ -336,7 +336,7 @@ fn visit_expr[E](ex: &@expr, e: &E, v: &vt[E]) {
none. { }
some(ex) { v.visit_expr(ex, e, v); }
}
for m: @method in anon_obj.methods {
for m: @method in anon_obj.methods {
v.visit_fn(m.node.meth, ~[], m.span, some(m.node.ident),
m.node.id, e, v);
}
@ -347,7 +347,7 @@ fn visit_expr[E](ex: &@expr, e: &E, v: &vt[E]) {
}
fn visit_arm[E](a: &arm, e: &E, v: &vt[E]) {
for p: @pat in a.pats { v.visit_pat(p, e, v); }
for p: @pat in a.pats { v.visit_pat(p, e, v); }
v.visit_block(a.body, e, v);
}

View file

@ -51,7 +51,7 @@ fn field_expr(f: &ast::field) -> @ast::expr { ret f.node.expr; }
fn field_exprs(fields: &[ast::field]) -> [@ast::expr] {
let es = ~[];
for f: ast::field in fields { es += ~[f.node.expr]; }
for f: ast::field in fields { es += ~[f.node.expr]; }
ret es;
}

View file

@ -48,7 +48,7 @@ fn ty_to_str(cx: &ctxt, typ: &t) -> str {
alt ident { some(i) { s += " "; s += i; } _ { } }
s += "(";
let strs = ~[];
for a: arg in inputs { strs += ~[fn_input_to_str(cx, a)]; }
for a: arg in inputs { strs += ~[fn_input_to_str(cx, a)]; }
s += str::connect(strs, ", ");
s += ")";
if struct(cx, output) != ty_nil {
@ -100,7 +100,7 @@ fn ty_to_str(cx: &ctxt, typ: &t) -> str {
ty_task. { s += "task"; }
ty_rec(elems) {
let strs: [str] = ~[];
for fld: field in elems { strs += ~[field_to_str(cx, fld)]; }
for fld: field in elems { strs += ~[field_to_str(cx, fld)]; }
s += "{" + str::connect(strs, ",") + "}";
}
ty_tup(elems) {
@ -114,7 +114,7 @@ fn ty_to_str(cx: &ctxt, typ: &t) -> str {
s += "<tag#" + int::str(id.crate) + ":" + int::str(id.node) + ">";
if ivec::len[t](tps) > 0u {
let strs: [str] = ~[];
for typ: t in tps { strs += ~[ty_to_str(cx, typ)]; }
for typ: t in tps { strs += ~[ty_to_str(cx, typ)]; }
s += "[" + str::connect(strs, ",") + "]";
}
}
@ -128,7 +128,7 @@ fn ty_to_str(cx: &ctxt, typ: &t) -> str {
}
ty_obj(meths) {
let strs = ~[];
for m: method in meths { strs += ~[method_to_str(cx, m)]; }
for m: method in meths { strs += ~[method_to_str(cx, m)]; }
s += "obj {\n\t" + str::connect(strs, "\n\t") + "\n}";
}
ty_res(id, _, _) {
@ -158,7 +158,7 @@ fn constr_to_str(c: &@constr) -> str {
fn constrs_str(constrs: &[@constr]) -> str {
let s = "";
let colon = true;
for c: @constr in constrs {
for c: @constr in constrs {
if colon { s += " : "; colon = false; } else { s += ", "; }
s += constr_to_str(c);
}

View file

@ -65,17 +65,17 @@ fn vec_edits[T](v: &[T], xs: &[T]) -> [[T]] {
}
for each i: uint in ix(0u, 1u, Lv) { edits += ~[vec_omit(v, i)]; }
for each i: uint in ix(0u, 1u, Lv) { edits += ~[vec_dup(v, i)]; }
for each i: uint in ix(0u, 2u, Lv) { edits += ~[vec_swadj(v, i)]; }
for each i: uint in ix(1u, 2u, Lv) { edits += ~[vec_prefix(v, i)]; }
for each i: uint in ix(2u, 1u, Lv) { edits += ~[vec_suffix(v, i)]; }
for each i: uint in ix(0u, 1u, Lv) { edits += ~[vec_omit(v, i)]; }
for each i: uint in ix(0u, 1u, Lv) { edits += ~[vec_dup(v, i)]; }
for each i: uint in ix(0u, 2u, Lv) { edits += ~[vec_swadj(v, i)]; }
for each i: uint in ix(1u, 2u, Lv) { edits += ~[vec_prefix(v, i)]; }
for each i: uint in ix(2u, 1u, Lv) { edits += ~[vec_suffix(v, i)]; }
for each j: uint in ix(0u, 1u, len(xs)) {
for each i: uint in ix(0u, 1u, Lv) {
for each j: uint in ix(0u, 1u, len(xs)) {
for each i: uint in ix(0u, 1u, Lv) {
edits += ~[vec_poke(v, i, xs.(j))];
}
for each i: uint in ix(0u, 0u, Lv) {
for each i: uint in ix(0u, 0u, Lv) {
edits += ~[vec_insert(v, i, xs.(j))];
}
}
@ -99,7 +99,7 @@ fn vec_to_str(v: &[int]) -> str {
fn show_edits(a: &[int], xs: &[int]) {
log_err "=== Edits of " + vec_to_str(a) + " ===";
let b = vec_edits(a, xs);
for each i: uint in ix(0u, 1u, len(b)) { log_err vec_to_str(b.(i)); }
for each i: uint in ix(0u, 1u, len(b)) { log_err vec_to_str(b.(i)); }
}
fn demo_edits() {

View file

@ -41,7 +41,7 @@ fn process(op: &block(uint, uint) -> uint , v0: &t, v1: &t) -> bool {
assert (ivec::len(v0.storage) == len);
assert (v0.nbits == v1.nbits);
let changed = false;
for each i: uint in uint::range(0u, len) {
for each i: uint in uint::range(0u, len) {
let w0 = v0.storage.(i);
let w1 = v1.storage.(i);
let w = op(w0, w1);
@ -71,7 +71,7 @@ fn assign(v0: &t, v1: t) -> bool {
fn clone(v: t) -> t {
let storage = ivec::init_elt_mut[uint](0u, v.nbits / uint_bits() + 1u);
let len = ivec::len(v.storage);
for each i: uint in uint::range(0u, len) { storage.(i) = v.storage.(i); }
for each i: uint in uint::range(0u, len) { storage.(i) = v.storage.(i); }
ret @{storage: storage, nbits: v.nbits};
}
@ -98,17 +98,17 @@ fn equal(v0: &t, v1: &t) -> bool {
}
fn clear(v: &t) {
for each i: uint in uint::range(0u, ivec::len(v.storage)) {
for each i: uint in uint::range(0u, ivec::len(v.storage)) {
v.storage.(i) = 0u;
}
}
fn set_all(v: &t) {
for each i: uint in uint::range(0u, v.nbits) { set(v, i, true); }
for each i: uint in uint::range(0u, v.nbits) { set(v, i, true); }
}
fn invert(v: &t) {
for each i: uint in uint::range(0u, ivec::len(v.storage)) {
for each i: uint in uint::range(0u, ivec::len(v.storage)) {
v.storage.(i) = !v.storage.(i);
}
}
@ -135,14 +135,14 @@ fn set(v: &t, i: uint, x: bool) {
/* true if all bits are 1 */
fn is_true(v: &t) -> bool {
for i: uint in to_ivec(v) { if i != 1u { ret false; } }
for i: uint in to_ivec(v) { if i != 1u { ret false; } }
ret true;
}
/* true if all bits are non-1 */
fn is_false(v: &t) -> bool {
for i: uint in to_ivec(v) { if i == 1u { ret false; } }
for i: uint in to_ivec(v) { if i == 1u { ret false; } }
ret true;
}
@ -155,7 +155,7 @@ fn to_ivec(v: &t) -> [uint] {
fn to_str(v: &t) -> str {
let rs = "";
for i: uint in to_ivec(v) {
for i: uint in to_ivec(v) {
if i == 1u { rs += "1"; } else { rs += "0"; }
}
ret rs;

View file

@ -12,7 +12,7 @@ fn either[T, U, V](f_left: &block(&T) -> V, f_right: &block(&U) -> V,
fn lefts[T, U](eithers: &[t[T, U]]) -> [T] {
let result: [T] = ~[];
for elt: t[T, U] in eithers {
for elt: t[T, U] in eithers {
alt elt { left(l) { result += ~[l] } _ {/* fallthrough */ } }
}
ret result;
@ -20,7 +20,7 @@ fn lefts[T, U](eithers: &[t[T, U]]) -> [T] {
fn rights[T, U](eithers: &[t[T, U]]) -> [U] {
let result: [U] = ~[];
for elt: t[T, U] in eithers {
for elt: t[T, U] in eithers {
alt elt { right(r) { result += ~[r] } _ {/* fallthrough */ } }
}
ret result;
@ -29,7 +29,7 @@ fn rights[T, U](eithers: &[t[T, U]]) -> [U] {
fn partition[T, U](eithers: &[t[T, U]]) -> {lefts: [T], rights: [U]} {
let lefts: [T] = ~[];
let rights: [U] = ~[];
for elt: t[T, U] in eithers {
for elt: t[T, U] in eithers {
alt elt { left(l) { lefts += ~[l] } right(r) { rights += ~[r] } }
}
ret {lefts: lefts, rights: rights};

View file

@ -440,7 +440,7 @@ mod rt {
ret padstr + s;
}
fn have_flag(flags: &[flag], f: flag) -> bool {
for candidate: flag in flags { if candidate == f { ret true; } }
for candidate: flag in flags { if candidate == f { ret true; } }
ret false;
}
}

View file

@ -48,7 +48,7 @@ fn list_dir(p: path) -> [str] {
let pl = str::byte_len(p);
if pl == 0u || p.(pl - 1u) as char != os_fs::path_sep { p += path_sep(); }
let full_paths: [str] = ~[];
for filename: str in os_fs::list_dir(p) {
for filename: str in os_fs::list_dir(p) {
if !str::eq(filename, ".") {
if !str::eq(filename, "..") { full_paths += ~[p + filename]; }
}

View file

@ -146,7 +146,7 @@ fn getopts(args: &[str], opts: &[opt]) -> result {
}
}
let name_pos = 0u;
for nm: name in names {
for nm: name in names {
name_pos += 1u;
let optid;
alt find_opt(opts, nm) {
@ -218,7 +218,7 @@ fn opt_str(m: &match, nm: str) -> str {
fn opt_strs(m: &match, nm: str) -> [str] {
let acc: [str] = ~[];
for v: optval in opt_vals(m, nm) {
for v: optval in opt_vals(m, nm) {
alt v { val(s) { acc += ~[s]; } _ { } }
}
ret acc;

View file

@ -291,7 +291,7 @@ obj fd_buf_writer(fd: int, res: option::t[@fd_res]) {
fn file_buf_writer(path: str, flags: &[fileflag]) -> buf_writer {
let fflags: int =
os::libc_constants::O_WRONLY() | os::libc_constants::O_BINARY();
for f: fileflag in flags {
for f: fileflag in flags {
alt f {
append. { fflags |= os::libc_constants::O_APPEND(); }
create. { fflags |= os::libc_constants::O_CREAT(); }
@ -402,7 +402,7 @@ obj byte_buf_writer(buf: mutable_byte_buf) {
// Fast path.
if buf.pos == ivec::len(buf.buf) {
for b: u8 in v { buf.buf += ~[mutable b]; }
for b: u8 in v { buf.buf += ~[mutable b]; }
buf.pos += ivec::len[u8](v);
ret;
}

View file

@ -74,13 +74,13 @@ fn init_elt_mut[@T](t: &T, n_elts: uint) -> [mutable T] {
fn to_mut[@T](v: &[T]) -> [mutable T] {
let vres = ~[mutable];
for t: T in v { vres += ~[mutable t]; }
for t: T in v { vres += ~[mutable t]; }
ret vres;
}
fn from_mut[@T](v: &[mutable T]) -> [T] {
let vres = ~[];
for t: T in v { vres += ~[t]; }
for t: T in v { vres += ~[t]; }
ret vres;
}
@ -193,7 +193,7 @@ fn grow_set[@T](v: &mutable [mutable T], index: uint, initval: &T, val: &T) {
fn map[@T, @U](f: &block(&T) -> U , v: &[mutable? T]) -> [U] {
let result = ~[];
reserve(result, len(v));
for elem: T in v {
for elem: T in v {
let elem2 = elem; // satisfies alias checker
result += ~[f(elem2)];
}
@ -213,7 +213,7 @@ fn map2[@T, @U, @V](f: &block(&T, &U) -> V, v0: &[T], v1: &[U])
fn filter_map[@T, @U](f: &block(&T) -> option::t[U],
v: &[mutable? T]) -> [U] {
let result = ~[];
for elem: T in v {
for elem: T in v {
let elem2 = elem; // satisfies alias checker
alt f(elem2) {
none. {/* no-op */ }
@ -232,28 +232,28 @@ fn foldl[@T, @U](p: &block(&U, &T) -> U , z: &U, v: &[mutable? T]) -> U {
}
fn any[T](f: &block(&T) -> bool , v: &[T]) -> bool {
for elem: T in v { if f(elem) { ret true; } }
for elem: T in v { if f(elem) { ret true; } }
ret false;
}
fn all[T](f: &block(&T) -> bool , v: &[T]) -> bool {
for elem: T in v { if !f(elem) { ret false; } }
for elem: T in v { if !f(elem) { ret false; } }
ret true;
}
fn member[T](x: &T, v: &[T]) -> bool {
for elt: T in v { if x == elt { ret true; } }
for elt: T in v { if x == elt { ret true; } }
ret false;
}
fn count[T](x: &T, v: &[mutable? T]) -> uint {
let cnt = 0u;
for elt: T in v { if x == elt { cnt += 1u; } }
for elt: T in v { if x == elt { cnt += 1u; } }
ret cnt;
}
fn find[@T](f: &block(&T) -> bool , v: &[T]) -> option::t[T] {
for elt: T in v { if f(elt) { ret some(elt); } }
for elt: T in v { if f(elt) { ret some(elt); } }
ret none;
}

View file

@ -9,7 +9,7 @@ fn from_vec[@T](v: &[T]) -> list[T] {
// a reverse vector iterator. Unfortunately generic iterators seem not to
// work yet.
for item: T in ivec::reversed(v) { l = cons[T](item, @l); }
for item: T in ivec::reversed(v) { l = cons[T](item, @l); }
ret l;
}

View file

@ -100,7 +100,7 @@ fn mk_hashmap[@K, @V](hasher: &hashfn[K], eqer: &eqfn[K]) -> hashmap[K, V] {
fn rehash[@K, @V](hasher: &hashfn[K], eqer: &eqfn[K],
oldbkts: &[mutable bucket[K, V]], noldbkts: uint,
newbkts: &[mutable bucket[K, V]], nnewbkts: uint) {
for b: bucket[K, V] in oldbkts {
for b: bucket[K, V] in oldbkts {
alt b {
some(k_, v_) {
let k = k_;
@ -177,12 +177,12 @@ fn mk_hashmap[@K, @V](hasher: &hashfn[K], eqer: &eqfn[K]) -> hashmap[K, V] {
bkts = newbkts;
}
iter items() -> @{key: K, val: V} {
for b: bucket[K, V] in bkts {
for b: bucket[K, V] in bkts {
alt b { some(k, v) { put @{key: k, val: v}; } _ { } }
}
}
iter keys() -> K {
for b: bucket[K, V] in bkts {
for b: bucket[K, V] in bkts {
alt b { some(k, _) { put k; } _ { } }
}
}

View file

@ -14,7 +14,7 @@ native "rust" mod rustrt {
fn arg_vec(prog: str, args: &[str]) -> [sbuf] {
let argptrs = ~[str::buf(prog)];
for arg: str in args { argptrs += ~[str::buf(arg)]; }
for arg: str in args { argptrs += ~[str::buf(arg)]; }
argptrs += ~[0 as sbuf];
ret argptrs;
}

View file

@ -64,7 +64,7 @@ fn mk_sha1() -> sha1 {
// FIXME: Should be typestate precondition
assert (!st.computed);
for element: u8 in msg {
for element: u8 in msg {
st.msg_block.(st.msg_block_idx) = element;
st.msg_block_idx += 1u;
st.len_low += 8u32;
@ -165,7 +165,7 @@ fn mk_sha1() -> sha1 {
fn mk_result(st: &sha1state) -> [u8] {
if !st.computed { pad_msg(st); st.computed = true; }
let rs: [u8] = ~[];
for hpart: u32 in st.h {
for hpart: u32 in st.h {
let a = hpart >> 24u32 & 0xFFu32 as u8;
let b = hpart >> 16u32 & 0xFFu32 as u8;
let c = hpart >> 8u32 & 0xFFu32 as u8;
@ -243,7 +243,7 @@ fn mk_sha1() -> sha1 {
fn result_str() -> str {
let r = mk_result(st);
let s = "";
for b: u8 in r { s += uint::to_str(b as uint, 16u); }
for b: u8 in r { s += uint::to_str(b as uint, 16u); }
ret s;
}
}

View file

@ -100,7 +100,7 @@ fn hash(s: &str) -> uint {
// FIXME: replace with murmur.
let u: uint = 5381u;
for c: u8 in s { u *= 33u; u += c as uint; }
for c: u8 in s { u *= 33u; u += c as uint; }
ret u;
}
@ -156,7 +156,7 @@ fn is_ascii(s: str) -> bool {
fn alloc(n_bytes: uint) -> str { ret rustrt::str_alloc(n_bytes); }
/// Returns true if the string has length 0
pred is_empty(s: str) -> bool { for c: u8 in s { ret false; } ret true; }
pred is_empty(s: str) -> bool { for c: u8 in s { ret false; } ret true; }
/// Returns true if the string has length greater than 0
pred is_not_empty(s: str) -> bool { !is_empty(s) }
@ -240,7 +240,7 @@ fn from_char(ch: char) -> str {
fn from_chars(chs: &[char]) -> str {
let buf = "";
for ch: char in chs { push_utf8_bytes(buf, ch); }
for ch: char in chs { push_utf8_bytes(buf, ch); }
ret buf;
}
@ -337,7 +337,7 @@ fn refcount(s: str) -> uint {
// Standard bits from the world of string libraries.
fn index(s: str, c: u8) -> int {
let i: int = 0;
for k: u8 in s { if k == c { ret i; } i += 1; }
for k: u8 in s { if k == c { ret i; } i += 1; }
ret -1;
}
@ -353,7 +353,7 @@ fn find(haystack: str, needle: str) -> int {
if needle_len == 0 { ret 0; }
fn match_at(haystack: &str, needle: &str, i: int) -> bool {
let j: int = i;
for c: u8 in needle { if haystack.(j) != c { ret false; } j += 1; }
for c: u8 in needle { if haystack.(j) != c { ret false; } j += 1; }
ret true;
}
let i: int = 0;
@ -436,7 +436,7 @@ fn split(s: str, sep: u8) -> [str] {
let v: [str] = ~[];
let accum: str = "";
let ends_with_sep: bool = false;
for c: u8 in s {
for c: u8 in s {
if c == sep {
v += ~[accum];
accum = "";
@ -449,14 +449,14 @@ fn split(s: str, sep: u8) -> [str] {
fn concat(v: &[str]) -> str {
let s: str = "";
for ss: str in v { s += ss; }
for ss: str in v { s += ss; }
ret s;
}
fn connect(v: &[str], sep: str) -> str {
let s: str = "";
let first: bool = true;
for ss: str in v {
for ss: str in v {
if first { first = false; } else { s += sep; }
s += ss;
}
@ -470,7 +470,7 @@ fn to_upper(s: str) -> str {
let ascii_a = 'a' as u8;
let ascii_z = 'z' as u8;
let diff = 32u8;
for byte: u8 in s {
for byte: u8 in s {
let next;
if ascii_a <= byte && byte <= ascii_z {
next = byte - diff;

View file

@ -51,7 +51,7 @@ fn color_supported() -> bool {
let supported_terms = ~["xterm-color", "xterm", "screen-bce"];
ret alt generic_os::getenv("TERM") {
option::some(env) {
for term: str in supported_terms {
for term: str in supported_terms {
if str::eq(term, env) { ret true; }
}
false

View file

@ -50,7 +50,7 @@ type test_desc = {name: test_name, fn: test_fn, ignore: bool};
// arguments and a vector of test_descs (generated at compile time).
fn test_main(args: &vec[str], tests: &[test_desc]) {
let ivec_args =
{ let iargs = ~[]; for arg: str in args { iargs += ~[arg] } iargs };
{ let iargs = ~[]; for arg: str in args { iargs += ~[arg] } iargs };
check (ivec::is_not_empty(ivec_args));
let opts =
alt parse_opts(ivec_args) {

View file

@ -43,11 +43,11 @@ fn sub(t: str, n: int) -> str {
iter ninetynine() -> int { let n: int = 100; while n > 1 { n -= 1; put n; } }
fn main() {
for each n: int in ninetynine() {
for each n: int in ninetynine() {
log sub(b1(), n);
log sub(b2(), n - 1);
log "";
}
log b7();
log b8();
}
}

View file

@ -26,7 +26,7 @@ type aminoacids = {ch: char, prob: u32};
fn make_cumulative(aa: &[aminoacids]) -> [aminoacids] {
let cp: u32 = 0u32;
let ans: [aminoacids] = ~[];
for a: aminoacids in aa { cp += a.prob; ans += ~[{ch: a.ch, prob: cp}]; }
for a: aminoacids in aa { cp += a.prob; ans += ~[{ch: a.ch, prob: cp}]; }
ret ans;
}
@ -47,7 +47,7 @@ fn make_random_fasta(id: str, desc: str, genelist: &[aminoacids], n: int) {
log ">" + id + " " + desc;
let rng = myrandom(std::rand::mk_rng().next());
let op: str = "";
for each i: uint in uint::range(0u, n as uint) {
for each i: uint in uint::range(0u, n as uint) {
str::push_byte(op, select_random(rng.next(100u32), genelist) as u8);
if str::byte_len(op) >= LINE_LENGTH() { log op; op = ""; }
}
@ -58,7 +58,7 @@ fn make_repeat_fasta(id: str, desc: str, s: str, n: int) {
log ">" + id + " " + desc;
let op: str = "";
let sl: uint = str::byte_len(s);
for each i: uint in uint::range(0u, n as uint) {
for each i: uint in uint::range(0u, n as uint) {
str::push_byte(op, s.(i % sl));
if str::byte_len(op) >= LINE_LENGTH() { log op; op = ""; }
}
@ -91,4 +91,4 @@ fn main(args: vec[str]) {
make_repeat_fasta("ONE", "Homo sapiens alu", alu, n * 2);
make_random_fasta("TWO", "IUB ambiguity codes", iub, n * 3);
make_random_fasta("THREE", "Homo sapiens frequency", homosapiens, n * 5);
}
}

View file

@ -17,7 +17,7 @@ fn main() {
let bodies: [Body::props] = NBodySystem::MakeNBodySystem();
for n: int in inputs {
for n: int in inputs {
log NBodySystem::energy(bodies);
let i: int = 0;
@ -201,4 +201,4 @@ mod Body {
props.vz = -pz / SOLAR_MASS;
}
}
}

View file

@ -73,7 +73,7 @@ fn stress_task(id: int) {
fn stress(num_tasks: int) {
let tasks = [];
for each i: int in range(0, num_tasks) {
for each i: int in range(0, num_tasks) {
tasks += [task::_spawn(bind stress_task(i))];
}
for t in tasks { task::join_id(t); }
@ -99,8 +99,8 @@ fn main(argv: vec[str]) {
let out = io::stdout();
for each n: int in range(1, max + 1) {
for each i: int in range(0, num_trials) {
for each n: int in range(1, max + 1) {
for each i: int in range(0, num_trials) {
let start = time::precise_time_ns();
let fibn = fib(n);
let stop = time::precise_time_ns();
@ -113,4 +113,4 @@ fn main(argv: vec[str]) {
}
}
}
}
}

View file

@ -76,7 +76,7 @@ mod map_reduce {
fn start_mappers(ctrl: _chan[ctrl_proto], inputs: &[str]) -> [task_id] {
let tasks = ~[];
for i: str in inputs {
for i: str in inputs {
tasks += ~[task::_spawn(bind map_task(ctrl, i))];
}
ret tasks;

View file

@ -10,10 +10,10 @@ fn bitv_to_str(enclosing: fn_info, v: bitv::t) -> str {
let s = "";
// error is that the value type in the hash map is var_info, not a box
for each p: @{key: uint, val: @uint} in enclosing.vars.items() {
for each p: @{key: uint, val: @uint} in enclosing.vars.items() {
if bitv::get(v, *p.val) { s += "foo"; }
}
ret s;
}
fn main() { log "OK"; }
fn main() { log "OK"; }

View file

@ -6,10 +6,10 @@ fn foo(c: [int]) {
alt none[int] {
some[int](_) { for i: int in c { log a; let a = 17; b += ~[a]; } }
some[int](_) { for i: int in c { log a; let a = 17; b += ~[a]; } }
}
}
tag t[T] { none; some(T); }
fn main() { foo(~[]); }
fn main() { foo(~[]); }

View file

@ -2,5 +2,5 @@
fn main() {
let v: [mutable int] = ~[mutable 1, 2, 3];
for x: int in v { v.(0) = 10; log x; }
}
for x: int in v { v.(0) = 10; log x; }
}

View file

@ -129,7 +129,7 @@ fn make_tests(cx: &cx) -> tests_and_conv_fn {
log #fmt("making tests from %s", cx.config.src_base);
let configport = mk_port[[u8]]();
let tests = ~[];
for file: str in fs::list_dir(cx.config.src_base) {
for file: str in fs::list_dir(cx.config.src_base) {
log #fmt("inspecting file %s", file);
if is_test(cx.config, file) {
tests += ~[make_test(cx, file, configport)];

View file

@ -28,7 +28,7 @@ fn load_props(testfile: &str) -> test_props {
let compile_flags = option::none;
let pp_exact = option::none;
let no_valgrind = false;
for each ln: str in iter_header(testfile) {
for each ln: str in iter_header(testfile) {
alt parse_error_pattern(ln) {
option::some(ep) { error_patterns += ~[ep]; }
option::none. { }
@ -56,7 +56,7 @@ fn load_props(testfile: &str) -> test_props {
fn is_test_ignored(config: &config, testfile: &str) -> bool {
let found = false;
for each ln: str in iter_header(testfile) {
for each ln: str in iter_header(testfile) {
// FIXME: Can't return or break from iterator
found = found
|| parse_name_directive(ln, "xfail-" + config.stage_id);

View file

@ -192,7 +192,7 @@ fn check_error_patterns(props: &test_props, testfile: &str,
let next_err_idx = 0u;
let next_err_pat = props.error_patterns.(next_err_idx);
for line: str in str::split(procres.stdout, '\n' as u8) {
for line: str in str::split(procres.stdout, '\n' as u8) {
if str::find(line, next_err_pat) > 0 {
log #fmt("found error pattern %s", next_err_pat);
next_err_idx += 1u;
@ -211,7 +211,7 @@ fn check_error_patterns(props: &test_props, testfile: &str,
fatal_procres(#fmt("error pattern '%s' not found!",
missing_patterns.(0)), procres);
} else {
for pattern: str in missing_patterns {
for pattern: str in missing_patterns {
error(#fmt("error pattern '%s' not found!", pattern));
}
fatal_procres("multiple error patterns not found", procres);

View file

@ -1,4 +1,4 @@
// error-pattern:moop
use std;
import std::uint;
fn main() { for each i: uint in uint::range(0u, 10u) { fail "moop"; } }
fn main() { for each i: uint in uint::range(0u, 10u) { fail "moop"; } }

View file

@ -7,8 +7,8 @@ iter foo() -> int { put 10; }
fn main() {
let x = true;
alt a {
a. { x = true; for each i: int in foo() { } }
a. { x = true; for each i: int in foo() { } }
b. { x = false; }
c. { x = false; }
}
}
}

View file

@ -4,5 +4,5 @@ import std::ivec;
fn main(args: vec[str]) {
let vs: [str] = ~["hi", "there", "this", "is", "a", "vec"];
let vvs: [[str]] = ~[ivec::from_vec(args), vs];
for vs: [str] in vvs { for s: str in vs { log s; } }
}
for vs: [str] in vvs { for s: str in vs { log s; } }
}

View file

@ -1,5 +1,5 @@
fn main() {
let sum = 0;
for x in ~[1, 2, 3, 4, 5] { sum += x; }
for x in ~[1, 2, 3, 4, 5] { sum += x; }
assert (sum == 15);
}
}

View file

@ -1,4 +1,4 @@
fn iter_vec[T](v: &[T], f: &block(&T) ) { for x: T in v { f(x); } }
fn iter_vec[T](v: &[T], f: &block(&T) ) { for x: T in v { f(x); } }
fn main() {
let v = ~[1, 2, 3, 4, 5, 6, 7];

View file

@ -1,4 +1,4 @@
fn iter_vec[T](v: &[T], f: &block(&T) ) { for x: T in v { f(x); } }
fn iter_vec[T](v: &[T], f: &block(&T) ) { for x: T in v { f(x); } }
fn main() {
let v = ~[1, 2, 3, 4, 5];

View file

@ -6,7 +6,7 @@ fn main() {
assert (i == 10);
do { i += 1; if i == 20 { break; } } while i < 30
assert (i == 20);
for x: int in ~[1, 2, 3, 4, 5, 6] {
for x: int in ~[1, 2, 3, 4, 5, 6] {
if x == 3 { break; }
assert (x <= 3);
}
@ -14,8 +14,8 @@ fn main() {
while i < 10 { i += 1; if i % 2 == 0 { cont; } assert (i % 2 != 0); }
i = 0;
do { i += 1; if i % 2 == 0 { cont; } assert (i % 2 != 0); } while i < 10
for x: int in ~[1, 2, 3, 4, 5, 6] {
for x: int in ~[1, 2, 3, 4, 5, 6] {
if x % 2 == 0 { cont; }
assert (x % 2 != 0);
}
}
}

View file

@ -1 +1 @@
fn main() { let x: [int] = ~[]; for i: int in x { fail "moop"; } }
fn main() { let x: [int] = ~[]; for i: int in x { fail "moop"; } }

View file

@ -4,6 +4,6 @@ obj ob[K](k: K) {
iter foo() -> @{a: K} { put @{a: k}; }
}
fn x(o: &ob[str]) { for each i: @{a: str} in o.foo() { } }
fn x(o: &ob[str]) { for each i: @{a: str} in o.foo() { } }
fn main() { let o = ob[str]("hi" + "there"); x(o); }
fn main() { let o = ob[str]("hi" + "there"); x(o); }

View file

@ -12,10 +12,10 @@ iter range(start: int, stop: int) -> int {
fn main() {
let a: [mutable int] = ~[mutable -1, -1, -1, -1, -1, -1, -1, -1];
let p: int = 0;
for each i: int in two() {
for each j: int in range(0, 2) {
for each i: int in two() {
for each j: int in range(0, 2) {
let tmp: int = 10 * i + j;
for each k: int in range(0, 2) { a.(p) = 10 * tmp + k; p += 1; }
for each k: int in range(0, 2) { a.(p) = 10 * tmp + k; p += 1; }
}
}
assert (a.(0) == 0);
@ -26,4 +26,4 @@ fn main() {
assert (a.(5) == 101);
assert (a.(6) == 110);
assert (a.(7) == 111);
}
}

View file

@ -7,11 +7,11 @@ iter two() -> int { put 0; put 1; }
fn main() {
let a: [mutable int] = ~[mutable -1, -1, -1, -1];
let p: int = 0;
for each i: int in two() {
for each j: int in two() { a.(p) = 10 * i + j; p += 1; }
for each i: int in two() {
for each j: int in two() { a.(p) = 10 * i + j; p += 1; }
}
assert (a.(0) == 0);
assert (a.(1) == 1);
assert (a.(2) == 10);
assert (a.(3) == 11);
}
}

View file

@ -9,7 +9,7 @@ iter pairs() -> {_0: int, _1: int} {
fn main() {
let i: int = 10;
let j: int = 0;
for each p: {_0: int, _1: int} in pairs() {
for each p: {_0: int, _1: int} in pairs() {
log p._0;
log p._1;
assert (p._0 + 10 == i);
@ -17,4 +17,4 @@ fn main() {
j = p._1;
}
assert (j == 45);
}
}

View file

@ -4,7 +4,7 @@
// -*- rust -*-
fn main() {
let sum: int = 0;
for each i: int in first_ten() { log "main"; log i; sum = sum + i; }
for each i: int in first_ten() { log "main"; log i; sum = sum + i; }
log "sum";
log sum;
assert (sum == 45);
@ -13,4 +13,4 @@ fn main() {
iter first_ten() -> int {
let i: int = 0;
while i < 10 { log "first_ten"; put i; i = i + 1; }
}
}

View file

@ -2,9 +2,9 @@
// -*- rust -*-
fn main() { for each i: int in first_ten() { log "main"; } }
fn main() { for each i: int in first_ten() { log "main"; } }
iter first_ten() -> int {
let i: int = 90;
while i < 100 { log "first_ten"; log i; put i; i = i + 1; }
}
}

View file

@ -4,6 +4,6 @@
// Contrived example? No. It showed up in rustc's resolve pass.
iter i() { put (); }
fn foo[T](t: &T) { let x: int = 10; for each j: () in i() { log x; } }
fn foo[T](t: &T) { let x: int = 10; for each j: () in i() { log x; } }
fn main() { foo(0xdeadbeef_u); }
fn main() { foo(0xdeadbeef_u); }

View file

@ -33,7 +33,7 @@ mod map_reduce {
tag ctrl_proto { find_reducer([u8], _chan[int]); mapper_done; }
fn start_mappers(ctrl: _chan[ctrl_proto], inputs: &[str]) {
for i: str in inputs { task::_spawn(bind map_task(ctrl, i)); }
for i: str in inputs { task::_spawn(bind map_task(ctrl, i)); }
}
fn map_task(ctrl: _chan[ctrl_proto], input: str) {
@ -93,4 +93,4 @@ mod map_reduce {
fn main() {
map_reduce::map_reduce(~["../src/test/run-pass/hashmap-memory.rs"]);
}
}

View file

@ -8,6 +8,6 @@ iter range(a: int, b: int) -> int {
fn main() {
let sum: int = 0;
for each x: int in range(0, 100) { sum += x; }
for each x: int in range(0, 100) { sum += x; }
log sum;
}
}

View file

@ -2,6 +2,6 @@
iter x() -> int { }
fn f() -> bool { for each i: int in x() { ret true; } ret false; }
fn f() -> bool { for each i: int in x() { ret true; } ret false; }
fn main(args: vec[str]) { f(); }
fn main(args: vec[str]) { f(); }

View file

@ -3,12 +3,12 @@
fn main() {
let x = ~[1, 2, 3];
let y = 0;
for i: int in x { log i; y += i; }
for i: int in x { log i; y += i; }
log y;
assert (y == 6);
let s = "hello there";
let i: int = 0;
for c: u8 in s {
for c: u8 in s {
if i == 0 { assert (c == 'h' as u8); }
if i == 1 { assert (c == 'e' as u8); }
if i == 2 { assert (c == 'l' as u8); }
@ -21,4 +21,4 @@ fn main() {
log c;
}
assert (i == 11);
}
}

View file

@ -1,6 +1,6 @@
fn main() {
let x = ~[10, 20, 30];
let sum = 0;
for x in x { sum += x; }
for x in x { sum += x; }
assert (sum == 60);
}
}

View file

@ -4,7 +4,7 @@
// -*- rust -*-
fn len(v: [mutable? int]) -> uint {
let i = 0u;
for x: int in v { i += 1u; }
for x: int in v { i += 1u; }
ret i;
}
@ -13,4 +13,4 @@ fn main() {
log len(v0);
let v1 = ~[mutable 1, 2, 3, 4, 5];
log len(v1);
}
}

View file

@ -10,8 +10,8 @@ fn test(x: bool, foo: @{x: int, y: int, z: int}) -> int {
fn main() {
let x = @{x: 1, y: 2, z: 3};
for each i: uint in uint::range(0u, 10000u) {
for each i: uint in uint::range(0u, 10000u) {
assert (test(true, x) == 2);
}
assert (test(false, x) == 5);
}
}

View file

@ -10,7 +10,7 @@ fn foo(src: uint) {
alt some(src) {
some(src_id) {
for each i: uint in uint::range(0u, 10u) {
for each i: uint in uint::range(0u, 10u) {
let yyy = src_id;
assert (yyy == 0u);
}
@ -19,4 +19,4 @@ fn foo(src: uint) {
}
}
fn main() { foo(0u); }
fn main() { foo(0u); }

View file

@ -31,7 +31,7 @@ fn test_grow() {
let myport: port[record] = port();
let mychan = chan(myport);
let val: record = {val1: 0u32, val2: 0u32, val3: 0u32};
for each i: uint in uint::range(0u, 100u) { mychan <| val; }
for each i: uint in uint::range(0u, 100u) { mychan <| val; }
}
@ -48,8 +48,8 @@ fn test_shrink2() {
let myport: port[record] = port();
let mychan = chan(myport);
let val: record = {val1: 0u32, val2: 0u32, val3: 0u32};
for each i: uint in uint::range(0u, 100u) { mychan <| val; }
for each i: uint in uint::range(0u, 100u) { let x; myport |> x; }
for each i: uint in uint::range(0u, 100u) { mychan <| val; }
for each i: uint in uint::range(0u, 100u) { let x; myport |> x; }
}
@ -57,7 +57,7 @@ fn test_shrink2() {
fn test_rotate() {
let myport: port[record] = port();
let mychan = chan(myport);
for each i: uint in uint::range(0u, 100u) {
for each i: uint in uint::range(0u, 100u) {
let val = {val1: i as u32, val2: i as u32, val3: i as u32};
mychan <| val;
let x;
@ -74,13 +74,13 @@ fn test_rotate() {
fn test_rotate_grow() {
let myport: port[record] = port();
let mychan = chan(myport);
for each j: uint in uint::range(0u, 10u) {
for each i: uint in uint::range(0u, 10u) {
for each j: uint in uint::range(0u, 10u) {
for each i: uint in uint::range(0u, 10u) {
let val: record =
{val1: i as u32, val2: i as u32, val3: i as u32};
mychan <| val;
}
for each i: uint in uint::range(0u, 10u) {
for each i: uint in uint::range(0u, 10u) {
let x;
myport |> x;
assert (x.val1 == i as u32);

View file

@ -49,7 +49,7 @@ fn test00() {
}
}
for t: task_id in tasks { task::join_id(t); }
for t: task_id in tasks { task::join_id(t); }
log "Completed: Final number is: ";
assert (sum ==
@ -137,7 +137,7 @@ fn test06() {
i = i + 1; tasks += [task::_spawn(bind test06_start(i))]; }
for t: task_id in tasks { task::join_id(t); }
for t: task_id in tasks { task::join_id(t); }
}

View file

@ -6,7 +6,7 @@ iter range(lo: uint, hi: uint) -> uint {
}
fn create_index[T](index: &[{a: T, b: uint}], hash_fn: fn(&T) -> uint ) {
for each i: uint in range(0u, 256u) { let bucket: [T] = ~[]; }
for each i: uint in range(0u, 256u) { let bucket: [T] = ~[]; }
}
fn main() { }
fn main() { }

View file

@ -31,7 +31,7 @@ fn main() {
fn check_str_eq(a: str, b: str) {
let i: int = 0;
for ab: u8 in a {
for ab: u8 in a {
log i;
log ab;
let bb: u8 = b.(i);
@ -45,4 +45,4 @@ fn main() {
check_str_eq(japan, japan_e);
check_str_eq(uzbekistan, uzbekistan_e);
check_str_eq(austria, austria_e);
}
}

View file

@ -50,7 +50,7 @@ fn test_simple() {
sort::quick_sort(lteq, names);
let pairs = ivec::zip(expected, ivec::from_mut(names));
for p: {_0: int, _1: int} in pairs {
for p: {_0: int, _1: int} in pairs {
log #fmt("%d %d", p._0, p._1);
assert (p._0 == p._1);
}

View file

@ -74,7 +74,7 @@ fn test() {
// Test that it works when accepting the message all at once
let sh = sha1::mk_sha1();
for t: test in tests {
for t: test in tests {
sh.input_str(t.input);
let out = sh.result();
check_vec_eq(t.output, out);
@ -83,7 +83,7 @@ fn test() {
// Test that it works when accepting the message in pieces
for t: test in tests {
for t: test in tests {
let len = str::byte_len(t.input);
let left = len;
while left > 0u {

Some files were not shown because too many files have changed in this diff Show more