migrate many for
loops to foreach
This commit is contained in:
parent
5f59c46e0f
commit
1fc4db2d08
255 changed files with 1292 additions and 1294 deletions
|
@ -1792,11 +1792,11 @@ msgstr ""
|
||||||
msgid ""
|
msgid ""
|
||||||
"~~~~ {.xfail-test}\n"
|
"~~~~ {.xfail-test}\n"
|
||||||
"fn iter<T>(seq: &[T], f: &fn(T)) {\n"
|
"fn iter<T>(seq: &[T], f: &fn(T)) {\n"
|
||||||
" for seq.iter().advance |elt| { f(elt); }\n"
|
" foreach elt in seq.iter() { f(elt); }\n"
|
||||||
"}\n"
|
"}\n"
|
||||||
"fn map<T, U>(seq: &[T], f: &fn(T) -> U) -> ~[U] {\n"
|
"fn map<T, U>(seq: &[T], f: &fn(T) -> U) -> ~[U] {\n"
|
||||||
" let mut acc = ~[];\n"
|
" let mut acc = ~[];\n"
|
||||||
" for seq.iter().advance |elt| { acc.push(f(elt)); }\n"
|
" foreach elt in seq.iter() { acc.push(f(elt)); }\n"
|
||||||
" acc\n"
|
" acc\n"
|
||||||
"}\n"
|
"}\n"
|
||||||
"~~~~\n"
|
"~~~~\n"
|
||||||
|
@ -4570,7 +4570,7 @@ msgstr ""
|
||||||
#: doc/rust.md:2405
|
#: doc/rust.md:2405
|
||||||
#, no-wrap
|
#, no-wrap
|
||||||
msgid ""
|
msgid ""
|
||||||
"for v.iter().advance |e| {\n"
|
"foreach e in v.iter() {\n"
|
||||||
" bar(*e);\n"
|
" bar(*e);\n"
|
||||||
"}\n"
|
"}\n"
|
||||||
"~~~~\n"
|
"~~~~\n"
|
||||||
|
|
|
@ -376,7 +376,7 @@ msgstr ""
|
||||||
#, no-wrap
|
#, no-wrap
|
||||||
msgid ""
|
msgid ""
|
||||||
"// print out all the elements in the vector\n"
|
"// print out all the elements in the vector\n"
|
||||||
"for xs.iter().advance |x| {\n"
|
"foreach x in xs.iter() {\n"
|
||||||
" println(x.to_str())\n"
|
" println(x.to_str())\n"
|
||||||
"}\n"
|
"}\n"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
@ -386,7 +386,7 @@ msgstr ""
|
||||||
#, no-wrap
|
#, no-wrap
|
||||||
msgid ""
|
msgid ""
|
||||||
"// print out all but the first 3 elements in the vector\n"
|
"// print out all but the first 3 elements in the vector\n"
|
||||||
"for xs.iter().skip(3).advance |x| {\n"
|
"foreach x in xs.iter().skip(3) {\n"
|
||||||
" println(x.to_str())\n"
|
" println(x.to_str())\n"
|
||||||
"}\n"
|
"}\n"
|
||||||
"~~~\n"
|
"~~~\n"
|
||||||
|
@ -418,7 +418,7 @@ msgstr ""
|
||||||
#, no-wrap
|
#, no-wrap
|
||||||
msgid ""
|
msgid ""
|
||||||
"// print out the pairs of elements up to (&3, &\"baz\")\n"
|
"// print out the pairs of elements up to (&3, &\"baz\")\n"
|
||||||
"for it.advance |(x, y)| {\n"
|
"foreach (x, y) in it {\n"
|
||||||
" println(fmt!(\"%d %s\", *x, *y));\n"
|
" println(fmt!(\"%d %s\", *x, *y));\n"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
|
@ -487,7 +487,7 @@ msgid ""
|
||||||
" pub fn from_iterator(iterator: &mut T) -> ~[A] {\n"
|
" pub fn from_iterator(iterator: &mut T) -> ~[A] {\n"
|
||||||
" let (lower, _) = iterator.size_hint();\n"
|
" let (lower, _) = iterator.size_hint();\n"
|
||||||
" let mut xs = with_capacity(lower);\n"
|
" let mut xs = with_capacity(lower);\n"
|
||||||
" for iterator.advance |x| {\n"
|
" foreach x in iterator {\n"
|
||||||
" xs.push(x);\n"
|
" xs.push(x);\n"
|
||||||
" }\n"
|
" }\n"
|
||||||
" xs\n"
|
" xs\n"
|
||||||
|
@ -587,7 +587,7 @@ msgstr ""
|
||||||
#, no-wrap
|
#, no-wrap
|
||||||
msgid ""
|
msgid ""
|
||||||
"// prints `5`, `4` and `3`\n"
|
"// prints `5`, `4` and `3`\n"
|
||||||
"for it.invert().advance |&x| {\n"
|
"foreach &x in it.invert() {\n"
|
||||||
" println(fmt!(\"%?\", x))\n"
|
" println(fmt!(\"%?\", x))\n"
|
||||||
"}\n"
|
"}\n"
|
||||||
"~~~\n"
|
"~~~\n"
|
||||||
|
|
|
@ -587,7 +587,7 @@ msgstr ""
|
||||||
#, no-wrap
|
#, no-wrap
|
||||||
msgid ""
|
msgid ""
|
||||||
" let mut final_res = 0f64;\n"
|
" let mut final_res = 0f64;\n"
|
||||||
" for futures.mut_iter().advance |ft| {\n"
|
" foreach ft in futures.mut_iter() {\n"
|
||||||
" final_res += ft.get();\n"
|
" final_res += ft.get();\n"
|
||||||
" }\n"
|
" }\n"
|
||||||
" println(fmt!(\"^2/6 is not far from : %?\", final_res));\n"
|
" println(fmt!(\"^2/6 is not far from : %?\", final_res));\n"
|
||||||
|
|
|
@ -2501,7 +2501,7 @@ msgstr ""
|
||||||
msgid ""
|
msgid ""
|
||||||
"// Iterate over a vector, obtaining a pointer to each element\n"
|
"// Iterate over a vector, obtaining a pointer to each element\n"
|
||||||
"// (`for` is explained in the next section)\n"
|
"// (`for` is explained in the next section)\n"
|
||||||
"for crayons.iter().advance |crayon| {\n"
|
"foreach crayon in crayons.iter() {\n"
|
||||||
" let delicious_crayon_wax = unwrap_crayon(*crayon);\n"
|
" let delicious_crayon_wax = unwrap_crayon(*crayon);\n"
|
||||||
" eat_crayon_wax(delicious_crayon_wax);\n"
|
" eat_crayon_wax(delicious_crayon_wax);\n"
|
||||||
"}\n"
|
"}\n"
|
||||||
|
@ -3101,7 +3101,7 @@ msgid ""
|
||||||
"~~~~\n"
|
"~~~~\n"
|
||||||
"fn map<T, U>(vector: &[T], function: &fn(v: &T) -> U) -> ~[U] {\n"
|
"fn map<T, U>(vector: &[T], function: &fn(v: &T) -> U) -> ~[U] {\n"
|
||||||
" let mut accumulator = ~[];\n"
|
" let mut accumulator = ~[];\n"
|
||||||
" for vector.iter().advance |element| {\n"
|
" foreach element in vector.iter() {\n"
|
||||||
" accumulator.push(function(element));\n"
|
" accumulator.push(function(element));\n"
|
||||||
" }\n"
|
" }\n"
|
||||||
" return accumulator;\n"
|
" return accumulator;\n"
|
||||||
|
@ -3570,7 +3570,7 @@ msgid ""
|
||||||
"~~~~\n"
|
"~~~~\n"
|
||||||
"# trait Printable { fn print(&self); }\n"
|
"# trait Printable { fn print(&self); }\n"
|
||||||
"fn print_all<T: Printable>(printable_things: ~[T]) {\n"
|
"fn print_all<T: Printable>(printable_things: ~[T]) {\n"
|
||||||
" for printable_things.iter().advance |thing| {\n"
|
" foreach thing in printable_things.iter() {\n"
|
||||||
" thing.print();\n"
|
" thing.print();\n"
|
||||||
" }\n"
|
" }\n"
|
||||||
"}\n"
|
"}\n"
|
||||||
|
@ -3650,7 +3650,7 @@ msgstr ""
|
||||||
#, no-wrap
|
#, no-wrap
|
||||||
msgid ""
|
msgid ""
|
||||||
"fn draw_all<T: Drawable>(shapes: ~[T]) {\n"
|
"fn draw_all<T: Drawable>(shapes: ~[T]) {\n"
|
||||||
" for shapes.iter().advance |shape| { shape.draw(); }\n"
|
" foreach shape in shapes.iter() { shape.draw(); }\n"
|
||||||
"}\n"
|
"}\n"
|
||||||
"# let c: Circle = new_circle();\n"
|
"# let c: Circle = new_circle();\n"
|
||||||
"# draw_all(~[c]);\n"
|
"# draw_all(~[c]);\n"
|
||||||
|
@ -3673,7 +3673,7 @@ msgid ""
|
||||||
"~~~~\n"
|
"~~~~\n"
|
||||||
"# trait Drawable { fn draw(&self); }\n"
|
"# trait Drawable { fn draw(&self); }\n"
|
||||||
"fn draw_all(shapes: &[@Drawable]) {\n"
|
"fn draw_all(shapes: &[@Drawable]) {\n"
|
||||||
" for shapes.iter().advance |shape| { shape.draw(); }\n"
|
" foreach shape in shapes.iter() { shape.draw(); }\n"
|
||||||
"}\n"
|
"}\n"
|
||||||
"~~~~\n"
|
"~~~~\n"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
|
@ -880,11 +880,11 @@ the function name.
|
||||||
|
|
||||||
~~~~ {.xfail-test}
|
~~~~ {.xfail-test}
|
||||||
fn iter<T>(seq: &[T], f: &fn(T)) {
|
fn iter<T>(seq: &[T], f: &fn(T)) {
|
||||||
for seq.iter().advance |elt| { f(elt); }
|
foreach elt in seq.iter() { f(elt); }
|
||||||
}
|
}
|
||||||
fn map<T, U>(seq: &[T], f: &fn(T) -> U) -> ~[U] {
|
fn map<T, U>(seq: &[T], f: &fn(T) -> U) -> ~[U] {
|
||||||
let mut acc = ~[];
|
let mut acc = ~[];
|
||||||
for seq.iter().advance |elt| { acc.push(f(elt)); }
|
foreach elt in seq.iter() { acc.push(f(elt)); }
|
||||||
acc
|
acc
|
||||||
}
|
}
|
||||||
~~~~
|
~~~~
|
||||||
|
@ -2378,7 +2378,7 @@ An example of a for loop over the contents of a vector:
|
||||||
|
|
||||||
let v: &[foo] = &[a, b, c];
|
let v: &[foo] = &[a, b, c];
|
||||||
|
|
||||||
for v.iter().advance |e| {
|
foreach e in v.iter() {
|
||||||
bar(*e);
|
bar(*e);
|
||||||
}
|
}
|
||||||
~~~~
|
~~~~
|
||||||
|
|
|
@ -164,20 +164,19 @@ dropped when they become unnecessary.
|
||||||
|
|
||||||
## For loops
|
## For loops
|
||||||
|
|
||||||
The `for` loop syntax is currently in transition, and will switch from the old
|
The `foreach` keyword is transitional, and is going to replace the current
|
||||||
closure-based iteration protocol to iterator objects. For now, the `advance`
|
obsolete `for` loop.
|
||||||
adaptor is required as a compatibility shim to use iterators with for loops.
|
|
||||||
|
|
||||||
~~~
|
~~~
|
||||||
let xs = [2, 3, 5, 7, 11, 13, 17];
|
let xs = [2, 3, 5, 7, 11, 13, 17];
|
||||||
|
|
||||||
// print out all the elements in the vector
|
// print out all the elements in the vector
|
||||||
for xs.iter().advance |x| {
|
foreach x in xs.iter() {
|
||||||
println(x.to_str())
|
println(x.to_str())
|
||||||
}
|
}
|
||||||
|
|
||||||
// print out all but the first 3 elements in the vector
|
// print out all but the first 3 elements in the vector
|
||||||
for xs.iter().skip(3).advance |x| {
|
foreach x in xs.iter().skip(3) {
|
||||||
println(x.to_str())
|
println(x.to_str())
|
||||||
}
|
}
|
||||||
~~~
|
~~~
|
||||||
|
@ -193,7 +192,7 @@ let ys = ["foo", "bar", "baz", "foobar"];
|
||||||
let mut it = xs.iter().zip(ys.iter());
|
let mut it = xs.iter().zip(ys.iter());
|
||||||
|
|
||||||
// print out the pairs of elements up to (&3, &"baz")
|
// print out the pairs of elements up to (&3, &"baz")
|
||||||
for it.advance |(x, y)| {
|
foreach (x, y) in it {
|
||||||
printfln!("%d %s", *x, *y);
|
printfln!("%d %s", *x, *y);
|
||||||
|
|
||||||
if *x == 3 {
|
if *x == 3 {
|
||||||
|
@ -230,7 +229,7 @@ impl<A, T: Iterator<A>> FromIterator<A, T> for ~[A] {
|
||||||
pub fn from_iterator(iterator: &mut T) -> ~[A] {
|
pub fn from_iterator(iterator: &mut T) -> ~[A] {
|
||||||
let (lower, _) = iterator.size_hint();
|
let (lower, _) = iterator.size_hint();
|
||||||
let mut xs = with_capacity(lower);
|
let mut xs = with_capacity(lower);
|
||||||
for iterator.advance |x| {
|
foreach x in iterator {
|
||||||
xs.push(x);
|
xs.push(x);
|
||||||
}
|
}
|
||||||
xs
|
xs
|
||||||
|
@ -301,7 +300,7 @@ printfln!("%?", it.next()); // prints `Some(&2)`
|
||||||
printfln!("%?", it.next_back()); // prints `Some(&6)`
|
printfln!("%?", it.next_back()); // prints `Some(&6)`
|
||||||
|
|
||||||
// prints `5`, `4` and `3`
|
// prints `5`, `4` and `3`
|
||||||
for it.invert().advance |&x| {
|
foreach &x in it.invert() {
|
||||||
printfln!("%?", x)
|
printfln!("%?", x)
|
||||||
}
|
}
|
||||||
~~~
|
~~~
|
||||||
|
@ -320,7 +319,7 @@ let mut it = xs.iter().chain_(ys.iter()).transform(|&x| x * 2);
|
||||||
printfln!("%?", it.next()); // prints `Some(2)`
|
printfln!("%?", it.next()); // prints `Some(2)`
|
||||||
|
|
||||||
// prints `16`, `14`, `12`, `10`, `8`, `6`, `4`
|
// prints `16`, `14`, `12`, `10`, `8`, `6`, `4`
|
||||||
for it.invert().advance |x| {
|
foreach x in it.invert() {
|
||||||
printfln!("%?", x);
|
printfln!("%?", x);
|
||||||
}
|
}
|
||||||
~~~
|
~~~
|
||||||
|
|
|
@ -327,7 +327,7 @@ fn main() {
|
||||||
let mut futures = vec::from_fn(1000, |ind| do extra::future::spawn { partial_sum(ind) });
|
let mut futures = vec::from_fn(1000, |ind| do extra::future::spawn { partial_sum(ind) });
|
||||||
|
|
||||||
let mut final_res = 0f64;
|
let mut final_res = 0f64;
|
||||||
for futures.mut_iter().advance |ft| {
|
foreach ft in futures.mut_iter() {
|
||||||
final_res += ft.get();
|
final_res += ft.get();
|
||||||
}
|
}
|
||||||
println(fmt!("π^2/6 is not far from : %?", final_res));
|
println(fmt!("π^2/6 is not far from : %?", final_res));
|
||||||
|
|
|
@ -1398,7 +1398,7 @@ assert!(!crayons.is_empty());
|
||||||
|
|
||||||
// Iterate over a vector, obtaining a pointer to each element
|
// Iterate over a vector, obtaining a pointer to each element
|
||||||
// (`for` is explained in the next section)
|
// (`for` is explained in the next section)
|
||||||
for crayons.iter().advance |crayon| {
|
foreach crayon in crayons.iter() {
|
||||||
let delicious_crayon_wax = unwrap_crayon(*crayon);
|
let delicious_crayon_wax = unwrap_crayon(*crayon);
|
||||||
eat_crayon_wax(delicious_crayon_wax);
|
eat_crayon_wax(delicious_crayon_wax);
|
||||||
}
|
}
|
||||||
|
@ -1749,7 +1749,7 @@ of `vector`:
|
||||||
~~~~
|
~~~~
|
||||||
fn map<T, U>(vector: &[T], function: &fn(v: &T) -> U) -> ~[U] {
|
fn map<T, U>(vector: &[T], function: &fn(v: &T) -> U) -> ~[U] {
|
||||||
let mut accumulator = ~[];
|
let mut accumulator = ~[];
|
||||||
for vector.iter().advance |element| {
|
foreach element in vector.iter() {
|
||||||
accumulator.push(function(element));
|
accumulator.push(function(element));
|
||||||
}
|
}
|
||||||
return accumulator;
|
return accumulator;
|
||||||
|
@ -2027,7 +2027,7 @@ generic types.
|
||||||
~~~~
|
~~~~
|
||||||
# trait Printable { fn print(&self); }
|
# trait Printable { fn print(&self); }
|
||||||
fn print_all<T: Printable>(printable_things: ~[T]) {
|
fn print_all<T: Printable>(printable_things: ~[T]) {
|
||||||
for printable_things.iter().advance |thing| {
|
foreach thing in printable_things.iter() {
|
||||||
thing.print();
|
thing.print();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2073,7 +2073,7 @@ However, consider this function:
|
||||||
trait Drawable { fn draw(&self); }
|
trait Drawable { fn draw(&self); }
|
||||||
|
|
||||||
fn draw_all<T: Drawable>(shapes: ~[T]) {
|
fn draw_all<T: Drawable>(shapes: ~[T]) {
|
||||||
for shapes.iter().advance |shape| { shape.draw(); }
|
foreach shape in shapes.iter() { shape.draw(); }
|
||||||
}
|
}
|
||||||
# let c: Circle = new_circle();
|
# let c: Circle = new_circle();
|
||||||
# draw_all(~[c]);
|
# draw_all(~[c]);
|
||||||
|
@ -2088,7 +2088,7 @@ an _object_.
|
||||||
~~~~
|
~~~~
|
||||||
# trait Drawable { fn draw(&self); }
|
# trait Drawable { fn draw(&self); }
|
||||||
fn draw_all(shapes: &[@Drawable]) {
|
fn draw_all(shapes: &[@Drawable]) {
|
||||||
for shapes.iter().advance |shape| { shape.draw(); }
|
foreach shape in shapes.iter() { shape.draw(); }
|
||||||
}
|
}
|
||||||
~~~~
|
~~~~
|
||||||
|
|
||||||
|
|
|
@ -243,7 +243,7 @@ pub fn make_tests(config: &config) -> ~[test::TestDescAndFn] {
|
||||||
config.src_base.to_str());
|
config.src_base.to_str());
|
||||||
let mut tests = ~[];
|
let mut tests = ~[];
|
||||||
let dirs = os::list_dir_path(&config.src_base);
|
let dirs = os::list_dir_path(&config.src_base);
|
||||||
for dirs.iter().advance |file| {
|
foreach file in dirs.iter() {
|
||||||
let file = file.clone();
|
let file = file.clone();
|
||||||
debug!("inspecting file %s", file.to_str());
|
debug!("inspecting file %s", file.to_str());
|
||||||
if is_test(config, &file) {
|
if is_test(config, &file) {
|
||||||
|
@ -271,11 +271,11 @@ pub fn is_test(config: &config, testfile: &Path) -> bool {
|
||||||
|
|
||||||
let mut valid = false;
|
let mut valid = false;
|
||||||
|
|
||||||
for valid_extensions.iter().advance |ext| {
|
foreach ext in valid_extensions.iter() {
|
||||||
if name.ends_with(*ext) { valid = true; }
|
if name.ends_with(*ext) { valid = true; }
|
||||||
}
|
}
|
||||||
|
|
||||||
for invalid_prefixes.iter().advance |pre| {
|
foreach pre in invalid_prefixes.iter() {
|
||||||
if name.starts_with(*pre) { valid = false; }
|
if name.starts_with(*pre) { valid = false; }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -56,7 +56,7 @@ pub fn run(lib_path: &str,
|
||||||
err_fd: None
|
err_fd: None
|
||||||
});
|
});
|
||||||
|
|
||||||
for input.iter().advance |input| {
|
foreach input in input.iter() {
|
||||||
proc.input().write_str(*input);
|
proc.input().write_str(*input);
|
||||||
}
|
}
|
||||||
let output = proc.finish_with_output();
|
let output = proc.finish_with_output();
|
||||||
|
|
|
@ -283,7 +283,7 @@ fn run_debuginfo_test(config: &config, props: &TestProps, testfile: &Path) {
|
||||||
// check if each line in props.check_lines appears in the
|
// check if each line in props.check_lines appears in the
|
||||||
// output (in order)
|
// output (in order)
|
||||||
let mut i = 0u;
|
let mut i = 0u;
|
||||||
for ProcRes.stdout.line_iter().advance |line| {
|
foreach line in ProcRes.stdout.line_iter() {
|
||||||
if check_lines[i].trim() == line.trim() {
|
if check_lines[i].trim() == line.trim() {
|
||||||
i += 1u;
|
i += 1u;
|
||||||
}
|
}
|
||||||
|
@ -313,7 +313,7 @@ fn check_error_patterns(props: &TestProps,
|
||||||
let mut next_err_idx = 0u;
|
let mut next_err_idx = 0u;
|
||||||
let mut next_err_pat = &props.error_patterns[next_err_idx];
|
let mut next_err_pat = &props.error_patterns[next_err_idx];
|
||||||
let mut done = false;
|
let mut done = false;
|
||||||
for ProcRes.stderr.line_iter().advance |line| {
|
foreach line in ProcRes.stderr.line_iter() {
|
||||||
if line.contains(*next_err_pat) {
|
if line.contains(*next_err_pat) {
|
||||||
debug!("found error pattern %s", *next_err_pat);
|
debug!("found error pattern %s", *next_err_pat);
|
||||||
next_err_idx += 1u;
|
next_err_idx += 1u;
|
||||||
|
@ -333,7 +333,7 @@ fn check_error_patterns(props: &TestProps,
|
||||||
fatal_ProcRes(fmt!("error pattern '%s' not found!",
|
fatal_ProcRes(fmt!("error pattern '%s' not found!",
|
||||||
missing_patterns[0]), ProcRes);
|
missing_patterns[0]), ProcRes);
|
||||||
} else {
|
} else {
|
||||||
for missing_patterns.iter().advance |pattern| {
|
foreach pattern in missing_patterns.iter() {
|
||||||
error(fmt!("error pattern '%s' not found!", *pattern));
|
error(fmt!("error pattern '%s' not found!", *pattern));
|
||||||
}
|
}
|
||||||
fatal_ProcRes(~"multiple error patterns not found", ProcRes);
|
fatal_ProcRes(~"multiple error patterns not found", ProcRes);
|
||||||
|
@ -386,9 +386,9 @@ fn check_expected_errors(expected_errors: ~[errors::ExpectedError],
|
||||||
// filename:line1:col1: line2:col2: *warning:* msg
|
// filename:line1:col1: line2:col2: *warning:* msg
|
||||||
// where line1:col1: is the starting point, line2:col2:
|
// where line1:col1: is the starting point, line2:col2:
|
||||||
// is the ending point, and * represents ANSI color codes.
|
// is the ending point, and * represents ANSI color codes.
|
||||||
for ProcRes.stderr.line_iter().advance |line| {
|
foreach line in ProcRes.stderr.line_iter() {
|
||||||
let mut was_expected = false;
|
let mut was_expected = false;
|
||||||
for expected_errors.iter().enumerate().advance |(i, ee)| {
|
foreach (i, ee) in expected_errors.iter().enumerate() {
|
||||||
if !found_flags[i] {
|
if !found_flags[i] {
|
||||||
debug!("prefix=%s ee.kind=%s ee.msg=%s line=%s",
|
debug!("prefix=%s ee.kind=%s ee.msg=%s line=%s",
|
||||||
prefixes[i], ee.kind, ee.msg, line);
|
prefixes[i], ee.kind, ee.msg, line);
|
||||||
|
@ -559,7 +559,7 @@ fn compose_and_run_compiler(
|
||||||
let extra_link_args = ~[~"-L",
|
let extra_link_args = ~[~"-L",
|
||||||
aux_output_dir_name(config, testfile).to_str()];
|
aux_output_dir_name(config, testfile).to_str()];
|
||||||
|
|
||||||
for props.aux_builds.iter().advance |rel_ab| {
|
foreach rel_ab in props.aux_builds.iter() {
|
||||||
let abs_ab = config.aux_base.push_rel(&Path(*rel_ab));
|
let abs_ab = config.aux_base.push_rel(&Path(*rel_ab));
|
||||||
let aux_args =
|
let aux_args =
|
||||||
make_compile_args(config, props, ~[~"--lib"] + extra_link_args,
|
make_compile_args(config, props, ~[~"--lib"] + extra_link_args,
|
||||||
|
@ -786,7 +786,7 @@ fn _arm_exec_compiled_test(config: &config, props: &TestProps,
|
||||||
runargs.push(fmt!("%s", config.adb_test_dir));
|
runargs.push(fmt!("%s", config.adb_test_dir));
|
||||||
runargs.push(fmt!("%s", prog_short));
|
runargs.push(fmt!("%s", prog_short));
|
||||||
|
|
||||||
for args.args.iter().advance |tv| {
|
foreach tv in args.args.iter() {
|
||||||
runargs.push(tv.to_owned());
|
runargs.push(tv.to_owned());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -803,7 +803,7 @@ fn _arm_exec_compiled_test(config: &config, props: &TestProps,
|
||||||
Some(~""));
|
Some(~""));
|
||||||
|
|
||||||
let mut exitcode : int = 0;
|
let mut exitcode : int = 0;
|
||||||
for exitcode_out.iter().advance |c| {
|
foreach c in exitcode_out.iter() {
|
||||||
if !c.is_digit() { break; }
|
if !c.is_digit() { break; }
|
||||||
exitcode = exitcode * 10 + match c {
|
exitcode = exitcode * 10 + match c {
|
||||||
'0' .. '9' => c as int - ('0' as int),
|
'0' .. '9' => c as int - ('0' as int),
|
||||||
|
@ -852,7 +852,7 @@ fn _arm_push_aux_shared_library(config: &config, testfile: &Path) {
|
||||||
let tstr = aux_output_dir_name(config, testfile).to_str();
|
let tstr = aux_output_dir_name(config, testfile).to_str();
|
||||||
|
|
||||||
let dirs = os::list_dir_path(&Path(tstr));
|
let dirs = os::list_dir_path(&Path(tstr));
|
||||||
for dirs.iter().advance |file| {
|
foreach file in dirs.iter() {
|
||||||
|
|
||||||
if (file.filetype() == Some(~".so")) {
|
if (file.filetype() == Some(~".so")) {
|
||||||
|
|
||||||
|
|
|
@ -789,7 +789,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Wait for children to pass their asserts
|
// Wait for children to pass their asserts
|
||||||
for children.iter().advance |r| {
|
foreach r in children.iter() {
|
||||||
r.recv();
|
r.recv();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -206,7 +206,7 @@ impl<'self> FromBase64 for &'self [u8] {
|
||||||
let mut modulus = 0;
|
let mut modulus = 0;
|
||||||
|
|
||||||
let mut it = self.iter();
|
let mut it = self.iter();
|
||||||
for it.advance |&byte| {
|
foreach &byte in it {
|
||||||
let ch = byte as char;
|
let ch = byte as char;
|
||||||
let val = byte as u32;
|
let val = byte as u32;
|
||||||
|
|
||||||
|
|
|
@ -398,7 +398,7 @@ impl Bitv {
|
||||||
match self.rep {
|
match self.rep {
|
||||||
Small(ref b) => b.is_true(self.nbits),
|
Small(ref b) => b.is_true(self.nbits),
|
||||||
_ => {
|
_ => {
|
||||||
for self.iter().advance |i| { if !i { return false; } }
|
foreach i in self.iter() { if !i { return false; } }
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -419,7 +419,7 @@ impl Bitv {
|
||||||
match self.rep {
|
match self.rep {
|
||||||
Small(ref b) => b.is_false(self.nbits),
|
Small(ref b) => b.is_false(self.nbits),
|
||||||
Big(_) => {
|
Big(_) => {
|
||||||
for self.iter().advance |i| { if i { return false; } }
|
foreach i in self.iter() { if i { return false; } }
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -483,7 +483,7 @@ impl Bitv {
|
||||||
*/
|
*/
|
||||||
pub fn to_str(&self) -> ~str {
|
pub fn to_str(&self) -> ~str {
|
||||||
let mut rs = ~"";
|
let mut rs = ~"";
|
||||||
for self.iter().advance |i| {
|
foreach i in self.iter() {
|
||||||
if i {
|
if i {
|
||||||
rs.push_char('1');
|
rs.push_char('1');
|
||||||
} else {
|
} else {
|
||||||
|
@ -684,7 +684,7 @@ impl BitvSet {
|
||||||
if self.capacity() < other.capacity() {
|
if self.capacity() < other.capacity() {
|
||||||
self.bitv.storage.grow(other.capacity() / uint::bits, &0);
|
self.bitv.storage.grow(other.capacity() / uint::bits, &0);
|
||||||
}
|
}
|
||||||
for other.bitv.storage.iter().enumerate().advance |(i, &w)| {
|
foreach (i, &w) in other.bitv.storage.iter().enumerate() {
|
||||||
let old = self.bitv.storage[i];
|
let old = self.bitv.storage[i];
|
||||||
let new = f(old, w);
|
let new = f(old, w);
|
||||||
self.bitv.storage[i] = new;
|
self.bitv.storage[i] = new;
|
||||||
|
@ -883,12 +883,12 @@ impl BitvSet {
|
||||||
let min = num::min(len1, len2);
|
let min = num::min(len1, len2);
|
||||||
|
|
||||||
/* only one of these loops will execute and that's the point */
|
/* only one of these loops will execute and that's the point */
|
||||||
for self.bitv.storage.slice(min, len1).iter().enumerate().advance |(i, &w)| {
|
foreach (i, &w) in self.bitv.storage.slice(min, len1).iter().enumerate() {
|
||||||
if !f(true, (i + min) * uint::bits, w) {
|
if !f(true, (i + min) * uint::bits, w) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for other.bitv.storage.slice(min, len2).iter().enumerate().advance |(i, &w)| {
|
foreach (i, &w) in other.bitv.storage.slice(min, len2).iter().enumerate() {
|
||||||
if !f(false, (i + min) * uint::bits, w) {
|
if !f(false, (i + min) * uint::bits, w) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -1308,7 +1308,7 @@ mod tests {
|
||||||
let bools = [true, false, true, true];
|
let bools = [true, false, true, true];
|
||||||
let bitv = from_bools(bools);
|
let bitv = from_bools(bools);
|
||||||
|
|
||||||
for bitv.iter().zip(bools.iter()).advance |(act, &ex)| {
|
foreach (act, &ex) in bitv.iter().zip(bools.iter()) {
|
||||||
assert_eq!(ex, act);
|
assert_eq!(ex, act);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1610,7 +1610,7 @@ mod tests {
|
||||||
let bitv = Bitv::new(uint::bits, false);
|
let bitv = Bitv::new(uint::bits, false);
|
||||||
do b.iter {
|
do b.iter {
|
||||||
let mut sum = 0;
|
let mut sum = 0;
|
||||||
for bitv.iter().advance |pres| {
|
foreach pres in bitv.iter() {
|
||||||
sum += pres as uint;
|
sum += pres as uint;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1621,7 +1621,7 @@ mod tests {
|
||||||
let bitv = Bitv::new(BENCH_BITS, false);
|
let bitv = Bitv::new(BENCH_BITS, false);
|
||||||
do b.iter {
|
do b.iter {
|
||||||
let mut sum = 0;
|
let mut sum = 0;
|
||||||
for bitv.iter().advance |pres| {
|
foreach pres in bitv.iter() {
|
||||||
sum += pres as uint;
|
sum += pres as uint;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1633,7 +1633,7 @@ mod tests {
|
||||||
|idx| {idx % 3 == 0}));
|
|idx| {idx % 3 == 0}));
|
||||||
do b.iter {
|
do b.iter {
|
||||||
let mut sum = 0;
|
let mut sum = 0;
|
||||||
for bitv.iter().advance |idx| {
|
foreach idx in bitv.iter() {
|
||||||
sum += idx;
|
sum += idx;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -91,7 +91,7 @@ mod bench {
|
||||||
let mut rng = rand::XorShiftRng::new();
|
let mut rng = rand::XorShiftRng::new();
|
||||||
let mut keys = vec::from_fn(n, |_| rng.gen::<uint>() % n);
|
let mut keys = vec::from_fn(n, |_| rng.gen::<uint>() % n);
|
||||||
|
|
||||||
for keys.iter().advance() |k| {
|
foreach k in keys.iter() {
|
||||||
map.insert(*k, 1);
|
map.insert(*k, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -45,7 +45,7 @@ pub trait Digest {
|
||||||
|
|
||||||
fn to_hex(rr: &[u8]) -> ~str {
|
fn to_hex(rr: &[u8]) -> ~str {
|
||||||
let mut s = ~"";
|
let mut s = ~"";
|
||||||
for rr.iter().advance() |b| {
|
foreach b in rr.iter() {
|
||||||
let hex = uint::to_str_radix(*b as uint, 16u);
|
let hex = uint::to_str_radix(*b as uint, 16u);
|
||||||
if hex.len() == 1 {
|
if hex.len() == 1 {
|
||||||
s.push_char('0');
|
s.push_char('0');
|
||||||
|
|
|
@ -53,7 +53,7 @@ pub struct Sha1 {
|
||||||
|
|
||||||
fn add_input(st: &mut Sha1, msg: &[u8]) {
|
fn add_input(st: &mut Sha1, msg: &[u8]) {
|
||||||
assert!((!st.computed));
|
assert!((!st.computed));
|
||||||
for msg.iter().advance |element| {
|
foreach element in msg.iter() {
|
||||||
st.msg_block[st.msg_block_idx] = *element;
|
st.msg_block[st.msg_block_idx] = *element;
|
||||||
st.msg_block_idx += 1;
|
st.msg_block_idx += 1;
|
||||||
st.len_low += 8;
|
st.len_low += 8;
|
||||||
|
@ -150,7 +150,7 @@ fn circular_shift(bits: u32, word: u32) -> u32 {
|
||||||
fn mk_result(st: &mut Sha1, rs: &mut [u8]) {
|
fn mk_result(st: &mut Sha1, rs: &mut [u8]) {
|
||||||
if !st.computed { pad_msg(st); st.computed = true; }
|
if !st.computed { pad_msg(st); st.computed = true; }
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
for st.h.mut_iter().advance |ptr_hpart| {
|
foreach ptr_hpart in st.h.mut_iter() {
|
||||||
let hpart = *ptr_hpart;
|
let hpart = *ptr_hpart;
|
||||||
rs[i] = (hpart >> 24u32 & 0xFFu32) as u8;
|
rs[i] = (hpart >> 24u32 & 0xFFu32) as u8;
|
||||||
rs[i+1] = (hpart >> 16u32 & 0xFFu32) as u8;
|
rs[i+1] = (hpart >> 16u32 & 0xFFu32) as u8;
|
||||||
|
@ -334,7 +334,7 @@ mod tests {
|
||||||
let mut out = [0u8, ..20];
|
let mut out = [0u8, ..20];
|
||||||
|
|
||||||
let mut sh = ~Sha1::new();
|
let mut sh = ~Sha1::new();
|
||||||
for tests.iter().advance |t| {
|
foreach t in tests.iter() {
|
||||||
(*sh).input_str(t.input);
|
(*sh).input_str(t.input);
|
||||||
sh.result(out);
|
sh.result(out);
|
||||||
assert!(t.output.as_slice() == out);
|
assert!(t.output.as_slice() == out);
|
||||||
|
@ -348,7 +348,7 @@ mod tests {
|
||||||
|
|
||||||
|
|
||||||
// Test that it works when accepting the message in pieces
|
// Test that it works when accepting the message in pieces
|
||||||
for tests.iter().advance |t| {
|
foreach t in tests.iter() {
|
||||||
let len = t.input.len();
|
let len = t.input.len();
|
||||||
let mut left = len;
|
let mut left = len;
|
||||||
while left > 0u {
|
while left > 0u {
|
||||||
|
|
|
@ -939,7 +939,7 @@ mod tests {
|
||||||
|
|
||||||
fn test_hash<D: Digest>(sh: &mut D, tests: &[Test]) {
|
fn test_hash<D: Digest>(sh: &mut D, tests: &[Test]) {
|
||||||
// Test that it works when accepting the message all at once
|
// Test that it works when accepting the message all at once
|
||||||
for tests.iter().advance() |t| {
|
foreach t in tests.iter() {
|
||||||
sh.input_str(t.input);
|
sh.input_str(t.input);
|
||||||
|
|
||||||
let out_str = sh.result_str();
|
let out_str = sh.result_str();
|
||||||
|
@ -949,7 +949,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Test that it works when accepting the message in pieces
|
// Test that it works when accepting the message in pieces
|
||||||
for tests.iter().advance() |t| {
|
foreach t in tests.iter() {
|
||||||
let len = t.input.len();
|
let len = t.input.len();
|
||||||
let mut left = len;
|
let mut left = len;
|
||||||
while left > 0u {
|
while left > 0u {
|
||||||
|
|
|
@ -548,7 +548,7 @@ impl<A, T: Iterator<A>> FromIterator<A, T> for DList<A> {
|
||||||
|
|
||||||
impl<A, T: Iterator<A>> Extendable<A, T> for DList<A> {
|
impl<A, T: Iterator<A>> Extendable<A, T> for DList<A> {
|
||||||
fn extend(&mut self, iterator: &mut T) {
|
fn extend(&mut self, iterator: &mut T) {
|
||||||
for iterator.advance |elt| { self.push_back(elt); }
|
foreach elt in *iterator { self.push_back(elt); }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -688,7 +688,7 @@ mod tests {
|
||||||
check_links(&m);
|
check_links(&m);
|
||||||
let sum = v + u;
|
let sum = v + u;
|
||||||
assert_eq!(sum.len(), m.len());
|
assert_eq!(sum.len(), m.len());
|
||||||
for sum.consume_iter().advance |elt| {
|
foreach elt in sum.consume_iter() {
|
||||||
assert_eq!(m.pop_front(), Some(elt))
|
assert_eq!(m.pop_front(), Some(elt))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -712,7 +712,7 @@ mod tests {
|
||||||
check_links(&m);
|
check_links(&m);
|
||||||
let sum = u + v;
|
let sum = u + v;
|
||||||
assert_eq!(sum.len(), m.len());
|
assert_eq!(sum.len(), m.len());
|
||||||
for sum.consume_iter().advance |elt| {
|
foreach elt in sum.consume_iter() {
|
||||||
assert_eq!(m.pop_front(), Some(elt))
|
assert_eq!(m.pop_front(), Some(elt))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -743,7 +743,7 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_iterator() {
|
fn test_iterator() {
|
||||||
let m = generate_test();
|
let m = generate_test();
|
||||||
for m.iter().enumerate().advance |(i, elt)| {
|
foreach (i, elt) in m.iter().enumerate() {
|
||||||
assert_eq!(i as int, *elt);
|
assert_eq!(i as int, *elt);
|
||||||
}
|
}
|
||||||
let mut n = DList::new();
|
let mut n = DList::new();
|
||||||
|
@ -791,7 +791,7 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_rev_iter() {
|
fn test_rev_iter() {
|
||||||
let m = generate_test();
|
let m = generate_test();
|
||||||
for m.rev_iter().enumerate().advance |(i, elt)| {
|
foreach (i, elt) in m.rev_iter().enumerate() {
|
||||||
assert_eq!((6 - i) as int, *elt);
|
assert_eq!((6 - i) as int, *elt);
|
||||||
}
|
}
|
||||||
let mut n = DList::new();
|
let mut n = DList::new();
|
||||||
|
@ -808,7 +808,7 @@ mod tests {
|
||||||
fn test_mut_iter() {
|
fn test_mut_iter() {
|
||||||
let mut m = generate_test();
|
let mut m = generate_test();
|
||||||
let mut len = m.len();
|
let mut len = m.len();
|
||||||
for m.mut_iter().enumerate().advance |(i, elt)| {
|
foreach (i, elt) in m.mut_iter().enumerate() {
|
||||||
assert_eq!(i as int, *elt);
|
assert_eq!(i as int, *elt);
|
||||||
len -= 1;
|
len -= 1;
|
||||||
}
|
}
|
||||||
|
@ -900,7 +900,7 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_mut_rev_iter() {
|
fn test_mut_rev_iter() {
|
||||||
let mut m = generate_test();
|
let mut m = generate_test();
|
||||||
for m.mut_rev_iter().enumerate().advance |(i, elt)| {
|
foreach (i, elt) in m.mut_rev_iter().enumerate() {
|
||||||
assert_eq!((6-i) as int, *elt);
|
assert_eq!((6-i) as int, *elt);
|
||||||
}
|
}
|
||||||
let mut n = DList::new();
|
let mut n = DList::new();
|
||||||
|
@ -970,7 +970,7 @@ mod tests {
|
||||||
check_links(&m);
|
check_links(&m);
|
||||||
|
|
||||||
let mut i = 0u;
|
let mut i = 0u;
|
||||||
for m.consume_iter().zip(v.iter()).advance |(a, &b)| {
|
foreach (a, &b) in m.consume_iter().zip(v.iter()) {
|
||||||
i += 1;
|
i += 1;
|
||||||
assert_eq!(a, b);
|
assert_eq!(a, b);
|
||||||
}
|
}
|
||||||
|
|
|
@ -419,7 +419,7 @@ mod test {
|
||||||
fn make_file(path : &Path, contents: &[~str]) {
|
fn make_file(path : &Path, contents: &[~str]) {
|
||||||
let file = io::file_writer(path, [io::Create, io::Truncate]).unwrap();
|
let file = io::file_writer(path, [io::Create, io::Truncate]).unwrap();
|
||||||
|
|
||||||
for contents.iter().advance |str| {
|
foreach str in contents.iter() {
|
||||||
file.write_str(*str);
|
file.write_str(*str);
|
||||||
file.write_char('\n');
|
file.write_char('\n');
|
||||||
}
|
}
|
||||||
|
@ -446,13 +446,13 @@ mod test {
|
||||||
|i| fmt!("tmp/lib-fileinput-test-fileinput-read-byte-%u.tmp", i)), true);
|
|i| fmt!("tmp/lib-fileinput-test-fileinput-read-byte-%u.tmp", i)), true);
|
||||||
|
|
||||||
// 3 files containing 0\n, 1\n, and 2\n respectively
|
// 3 files containing 0\n, 1\n, and 2\n respectively
|
||||||
for filenames.iter().enumerate().advance |(i, filename)| {
|
foreach (i, filename) in filenames.iter().enumerate() {
|
||||||
make_file(filename.get_ref(), [fmt!("%u", i)]);
|
make_file(filename.get_ref(), [fmt!("%u", i)]);
|
||||||
}
|
}
|
||||||
|
|
||||||
let fi = FileInput::from_vec(filenames.clone());
|
let fi = FileInput::from_vec(filenames.clone());
|
||||||
|
|
||||||
for "012".iter().enumerate().advance |(line, c)| {
|
foreach (line, c) in "012".iter().enumerate() {
|
||||||
assert_eq!(fi.read_byte(), c as int);
|
assert_eq!(fi.read_byte(), c as int);
|
||||||
assert_eq!(fi.state().line_num, line);
|
assert_eq!(fi.state().line_num, line);
|
||||||
assert_eq!(fi.state().line_num_file, 0);
|
assert_eq!(fi.state().line_num_file, 0);
|
||||||
|
@ -476,7 +476,7 @@ mod test {
|
||||||
|i| fmt!("tmp/lib-fileinput-test-fileinput-read-%u.tmp", i)), true);
|
|i| fmt!("tmp/lib-fileinput-test-fileinput-read-%u.tmp", i)), true);
|
||||||
|
|
||||||
// 3 files containing 1\n, 2\n, and 3\n respectively
|
// 3 files containing 1\n, 2\n, and 3\n respectively
|
||||||
for filenames.iter().enumerate().advance |(i, filename)| {
|
foreach (i, filename) in filenames.iter().enumerate() {
|
||||||
make_file(filename.get_ref(), [fmt!("%u", i)]);
|
make_file(filename.get_ref(), [fmt!("%u", i)]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -496,7 +496,7 @@ mod test {
|
||||||
3,
|
3,
|
||||||
|i| fmt!("tmp/lib-fileinput-test-input-vec-%u.tmp", i)), true);
|
|i| fmt!("tmp/lib-fileinput-test-input-vec-%u.tmp", i)), true);
|
||||||
|
|
||||||
for filenames.iter().enumerate().advance |(i, filename)| {
|
foreach (i, filename) in filenames.iter().enumerate() {
|
||||||
let contents =
|
let contents =
|
||||||
vec::from_fn(3, |j| fmt!("%u %u", i, j));
|
vec::from_fn(3, |j| fmt!("%u %u", i, j));
|
||||||
make_file(filename.get_ref(), contents);
|
make_file(filename.get_ref(), contents);
|
||||||
|
@ -517,7 +517,7 @@ mod test {
|
||||||
3,
|
3,
|
||||||
|i| fmt!("tmp/lib-fileinput-test-input-vec-state-%u.tmp", i)),true);
|
|i| fmt!("tmp/lib-fileinput-test-input-vec-state-%u.tmp", i)),true);
|
||||||
|
|
||||||
for filenames.iter().enumerate().advance |(i, filename)| {
|
foreach (i, filename) in filenames.iter().enumerate() {
|
||||||
let contents =
|
let contents =
|
||||||
vec::from_fn(3, |j| fmt!("%u %u", i, j + 1));
|
vec::from_fn(3, |j| fmt!("%u %u", i, j + 1));
|
||||||
make_file(filename.get_ref(), contents);
|
make_file(filename.get_ref(), contents);
|
||||||
|
@ -583,7 +583,7 @@ mod test {
|
||||||
3,
|
3,
|
||||||
|i| fmt!("tmp/lib-fileinput-test-next-file-%u.tmp", i)),true);
|
|i| fmt!("tmp/lib-fileinput-test-next-file-%u.tmp", i)),true);
|
||||||
|
|
||||||
for filenames.iter().enumerate().advance |(i, filename)| {
|
foreach (i, filename) in filenames.iter().enumerate() {
|
||||||
let contents =
|
let contents =
|
||||||
vec::from_fn(3, |j| fmt!("%u %u", i, j + 1));
|
vec::from_fn(3, |j| fmt!("%u %u", i, j + 1));
|
||||||
make_file(filename.get_ref(), contents);
|
make_file(filename.get_ref(), contents);
|
||||||
|
|
|
@ -307,7 +307,7 @@ pub fn getopts(args: &[~str], opts: &[Opt]) -> Result {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut name_pos = 0;
|
let mut name_pos = 0;
|
||||||
for names.iter().advance() |nm| {
|
foreach nm in names.iter() {
|
||||||
name_pos += 1;
|
name_pos += 1;
|
||||||
let optid = match find_opt(opts, (*nm).clone()) {
|
let optid = match find_opt(opts, (*nm).clone()) {
|
||||||
Some(id) => id,
|
Some(id) => id,
|
||||||
|
@ -392,7 +392,7 @@ pub fn opt_count(mm: &Matches, nm: &str) -> uint {
|
||||||
|
|
||||||
/// Returns true if any of several options were matched
|
/// Returns true if any of several options were matched
|
||||||
pub fn opts_present(mm: &Matches, names: &[~str]) -> bool {
|
pub fn opts_present(mm: &Matches, names: &[~str]) -> bool {
|
||||||
for names.iter().advance |nm| {
|
foreach nm in names.iter() {
|
||||||
match find_opt(mm.opts, mkname(*nm)) {
|
match find_opt(mm.opts, mkname(*nm)) {
|
||||||
Some(id) if !mm.vals[id].is_empty() => return true,
|
Some(id) if !mm.vals[id].is_empty() => return true,
|
||||||
_ => (),
|
_ => (),
|
||||||
|
@ -422,7 +422,7 @@ pub fn opt_str(mm: &Matches, nm: &str) -> ~str {
|
||||||
* option took an argument
|
* option took an argument
|
||||||
*/
|
*/
|
||||||
pub fn opts_str(mm: &Matches, names: &[~str]) -> ~str {
|
pub fn opts_str(mm: &Matches, names: &[~str]) -> ~str {
|
||||||
for names.iter().advance |nm| {
|
foreach nm in names.iter() {
|
||||||
match opt_val(mm, *nm) {
|
match opt_val(mm, *nm) {
|
||||||
Some(Val(ref s)) => return (*s).clone(),
|
Some(Val(ref s)) => return (*s).clone(),
|
||||||
_ => ()
|
_ => ()
|
||||||
|
@ -441,7 +441,7 @@ pub fn opts_str(mm: &Matches, names: &[~str]) -> ~str {
|
||||||
pub fn opt_strs(mm: &Matches, nm: &str) -> ~[~str] {
|
pub fn opt_strs(mm: &Matches, nm: &str) -> ~[~str] {
|
||||||
let mut acc: ~[~str] = ~[];
|
let mut acc: ~[~str] = ~[];
|
||||||
let r = opt_vals(mm, nm);
|
let r = opt_vals(mm, nm);
|
||||||
for r.iter().advance |v| {
|
foreach v in r.iter() {
|
||||||
match *v { Val(ref s) => acc.push((*s).clone()), _ => () }
|
match *v { Val(ref s) => acc.push((*s).clone()), _ => () }
|
||||||
}
|
}
|
||||||
acc
|
acc
|
||||||
|
@ -671,7 +671,7 @@ pub mod groups {
|
||||||
|
|
||||||
// Normalize desc to contain words separated by one space character
|
// Normalize desc to contain words separated by one space character
|
||||||
let mut desc_normalized_whitespace = ~"";
|
let mut desc_normalized_whitespace = ~"";
|
||||||
for desc.word_iter().advance |word| {
|
foreach word in desc.word_iter() {
|
||||||
desc_normalized_whitespace.push_str(word);
|
desc_normalized_whitespace.push_str(word);
|
||||||
desc_normalized_whitespace.push_char(' ');
|
desc_normalized_whitespace.push_char(' ');
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,7 +27,7 @@ internal iterator by calling the `advance` method. For example:
|
||||||
let xs = [0u, 1, 2, 3, 4, 5];
|
let xs = [0u, 1, 2, 3, 4, 5];
|
||||||
let ys = [30, 40, 50, 60];
|
let ys = [30, 40, 50, 60];
|
||||||
let mut it = xs.iter().chain(ys.iter());
|
let mut it = xs.iter().chain(ys.iter());
|
||||||
for it.advance |&x: &uint| {
|
foreach &x: &uint in it {
|
||||||
println(x.to_str());
|
println(x.to_str());
|
||||||
}
|
}
|
||||||
~~~
|
~~~
|
||||||
|
|
|
@ -57,7 +57,7 @@ pub struct Error {
|
||||||
|
|
||||||
fn escape_str(s: &str) -> ~str {
|
fn escape_str(s: &str) -> ~str {
|
||||||
let mut escaped = ~"\"";
|
let mut escaped = ~"\"";
|
||||||
for s.iter().advance |c| {
|
foreach c in s.iter() {
|
||||||
match c {
|
match c {
|
||||||
'"' => escaped.push_str("\\\""),
|
'"' => escaped.push_str("\\\""),
|
||||||
'\\' => escaped.push_str("\\\\"),
|
'\\' => escaped.push_str("\\\\"),
|
||||||
|
@ -923,7 +923,7 @@ impl serialize::Decoder for Decoder {
|
||||||
fn read_char(&mut self) -> char {
|
fn read_char(&mut self) -> char {
|
||||||
let mut v = ~[];
|
let mut v = ~[];
|
||||||
let s = self.read_str();
|
let s = self.read_str();
|
||||||
for s.iter().advance |c| { v.push(c) }
|
foreach c in s.iter() { v.push(c) }
|
||||||
if v.len() != 1 { fail!("string must have one character") }
|
if v.len() != 1 { fail!("string must have one character") }
|
||||||
v[0]
|
v[0]
|
||||||
}
|
}
|
||||||
|
@ -949,7 +949,7 @@ impl serialize::Decoder for Decoder {
|
||||||
let name = match self.stack.pop() {
|
let name = match self.stack.pop() {
|
||||||
String(s) => s,
|
String(s) => s,
|
||||||
List(list) => {
|
List(list) => {
|
||||||
for list.consume_rev_iter().advance |v| {
|
foreach v in list.consume_rev_iter() {
|
||||||
self.stack.push(v);
|
self.stack.push(v);
|
||||||
}
|
}
|
||||||
match self.stack.pop() {
|
match self.stack.pop() {
|
||||||
|
@ -1067,7 +1067,7 @@ impl serialize::Decoder for Decoder {
|
||||||
let len = match self.stack.pop() {
|
let len = match self.stack.pop() {
|
||||||
List(list) => {
|
List(list) => {
|
||||||
let len = list.len();
|
let len = list.len();
|
||||||
for list.consume_rev_iter().advance |v| {
|
foreach v in list.consume_rev_iter() {
|
||||||
self.stack.push(v);
|
self.stack.push(v);
|
||||||
}
|
}
|
||||||
len
|
len
|
||||||
|
@ -1087,7 +1087,7 @@ impl serialize::Decoder for Decoder {
|
||||||
let len = match self.stack.pop() {
|
let len = match self.stack.pop() {
|
||||||
Object(obj) => {
|
Object(obj) => {
|
||||||
let len = obj.len();
|
let len = obj.len();
|
||||||
for obj.consume_iter().advance |(key, value)| {
|
foreach (key, value) in obj.consume_iter() {
|
||||||
self.stack.push(value);
|
self.stack.push(value);
|
||||||
self.stack.push(String(key));
|
self.stack.push(String(key));
|
||||||
}
|
}
|
||||||
|
@ -1157,12 +1157,12 @@ impl Ord for Json {
|
||||||
let mut d1_flat = ~[];
|
let mut d1_flat = ~[];
|
||||||
|
|
||||||
// FIXME #4430: this is horribly inefficient...
|
// FIXME #4430: this is horribly inefficient...
|
||||||
for d0.iter().advance |(k, v)| {
|
foreach (k, v) in d0.iter() {
|
||||||
d0_flat.push((@(*k).clone(), @(*v).clone()));
|
d0_flat.push((@(*k).clone(), @(*v).clone()));
|
||||||
}
|
}
|
||||||
d0_flat.qsort();
|
d0_flat.qsort();
|
||||||
|
|
||||||
for d1.iter().advance |(k, v)| {
|
foreach (k, v) in d1.iter() {
|
||||||
d1_flat.push((@(*k).clone(), @(*v).clone()));
|
d1_flat.push((@(*k).clone(), @(*v).clone()));
|
||||||
}
|
}
|
||||||
d1_flat.qsort();
|
d1_flat.qsort();
|
||||||
|
@ -1297,7 +1297,7 @@ impl<A:ToJson> ToJson for ~[A] {
|
||||||
impl<A:ToJson> ToJson for TreeMap<~str, A> {
|
impl<A:ToJson> ToJson for TreeMap<~str, A> {
|
||||||
fn to_json(&self) -> Json {
|
fn to_json(&self) -> Json {
|
||||||
let mut d = TreeMap::new();
|
let mut d = TreeMap::new();
|
||||||
for self.iter().advance |(key, value)| {
|
foreach (key, value) in self.iter() {
|
||||||
d.insert((*key).clone(), value.to_json());
|
d.insert((*key).clone(), value.to_json());
|
||||||
}
|
}
|
||||||
Object(~d)
|
Object(~d)
|
||||||
|
@ -1307,7 +1307,7 @@ impl<A:ToJson> ToJson for TreeMap<~str, A> {
|
||||||
impl<A:ToJson> ToJson for HashMap<~str, A> {
|
impl<A:ToJson> ToJson for HashMap<~str, A> {
|
||||||
fn to_json(&self) -> Json {
|
fn to_json(&self) -> Json {
|
||||||
let mut d = TreeMap::new();
|
let mut d = TreeMap::new();
|
||||||
for self.iter().advance |(key, value)| {
|
foreach (key, value) in self.iter() {
|
||||||
d.insert((*key).clone(), value.to_json());
|
d.insert((*key).clone(), value.to_json());
|
||||||
}
|
}
|
||||||
Object(~d)
|
Object(~d)
|
||||||
|
@ -1339,7 +1339,6 @@ mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::result;
|
|
||||||
|
|
||||||
use serialize::Decodable;
|
use serialize::Decodable;
|
||||||
use treemap::TreeMap;
|
use treemap::TreeMap;
|
||||||
|
@ -1365,7 +1364,7 @@ mod tests {
|
||||||
fn mk_object(items: &[(~str, Json)]) -> Json {
|
fn mk_object(items: &[(~str, Json)]) -> Json {
|
||||||
let mut d = ~TreeMap::new();
|
let mut d = ~TreeMap::new();
|
||||||
|
|
||||||
for items.iter().advance |item| {
|
foreach item in items.iter() {
|
||||||
match *item {
|
match *item {
|
||||||
(ref key, ref value) => { d.insert((*key).clone(), (*value).clone()); },
|
(ref key, ref value) => { d.insert((*key).clone(), (*value).clone()); },
|
||||||
}
|
}
|
||||||
|
|
|
@ -131,7 +131,7 @@ impl TotalOrd for BigUint {
|
||||||
if s_len < o_len { return Less; }
|
if s_len < o_len { return Less; }
|
||||||
if s_len > o_len { return Greater; }
|
if s_len > o_len { return Greater; }
|
||||||
|
|
||||||
for self.data.rev_iter().zip(other.data.rev_iter()).advance |(&self_i, &other_i)| {
|
foreach (&self_i, &other_i) in self.data.rev_iter().zip(other.data.rev_iter()) {
|
||||||
cond!((self_i < other_i) { return Less; }
|
cond!((self_i < other_i) { return Less; }
|
||||||
(self_i > other_i) { return Greater; })
|
(self_i > other_i) { return Greater; })
|
||||||
}
|
}
|
||||||
|
@ -420,7 +420,7 @@ impl Integer for BigUint {
|
||||||
let bn = *b.data.last();
|
let bn = *b.data.last();
|
||||||
let mut d = ~[];
|
let mut d = ~[];
|
||||||
let mut carry = 0;
|
let mut carry = 0;
|
||||||
for an.rev_iter().advance |elt| {
|
foreach elt in an.rev_iter() {
|
||||||
let ai = BigDigit::to_uint(carry, *elt);
|
let ai = BigDigit::to_uint(carry, *elt);
|
||||||
let di = ai / (bn as uint);
|
let di = ai / (bn as uint);
|
||||||
assert!(di < BigDigit::base);
|
assert!(di < BigDigit::base);
|
||||||
|
@ -524,7 +524,7 @@ impl ToStrRadix for BigUint {
|
||||||
fn fill_concat(v: &[BigDigit], radix: uint, l: uint) -> ~str {
|
fn fill_concat(v: &[BigDigit], radix: uint, l: uint) -> ~str {
|
||||||
if v.is_empty() { return ~"0" }
|
if v.is_empty() { return ~"0" }
|
||||||
let mut s = str::with_capacity(v.len() * l);
|
let mut s = str::with_capacity(v.len() * l);
|
||||||
for v.rev_iter().advance |n| {
|
foreach n in v.rev_iter() {
|
||||||
let ss = uint::to_str_radix(*n as uint, radix);
|
let ss = uint::to_str_radix(*n as uint, radix);
|
||||||
s.push_str("0".repeat(l - ss.len()));
|
s.push_str("0".repeat(l - ss.len()));
|
||||||
s.push_str(ss);
|
s.push_str(ss);
|
||||||
|
@ -651,7 +651,7 @@ impl BigUint {
|
||||||
|
|
||||||
let mut borrow = 0;
|
let mut borrow = 0;
|
||||||
let mut shifted = ~[];
|
let mut shifted = ~[];
|
||||||
for self.data.rev_iter().advance |elem| {
|
foreach elem in self.data.rev_iter() {
|
||||||
shifted = ~[(*elem >> n_bits) | borrow] + shifted;
|
shifted = ~[(*elem >> n_bits) | borrow] + shifted;
|
||||||
borrow = *elem << (BigDigit::bits - n_bits);
|
borrow = *elem << (BigDigit::bits - n_bits);
|
||||||
}
|
}
|
||||||
|
@ -1186,8 +1186,8 @@ mod biguint_tests {
|
||||||
fn test_cmp() {
|
fn test_cmp() {
|
||||||
let data: ~[BigUint] = [ &[], &[1], &[2], &[-1], &[0, 1], &[2, 1], &[1, 1, 1] ]
|
let data: ~[BigUint] = [ &[], &[1], &[2], &[-1], &[0, 1], &[2, 1], &[1, 1, 1] ]
|
||||||
.map(|v| BigUint::from_slice(*v));
|
.map(|v| BigUint::from_slice(*v));
|
||||||
for data.iter().enumerate().advance |(i, ni)| {
|
foreach (i, ni) in data.iter().enumerate() {
|
||||||
for data.slice(i, data.len()).iter().enumerate().advance |(j0, nj)| {
|
foreach (j0, nj) in data.slice(i, data.len()).iter().enumerate() {
|
||||||
let j = j0 + i;
|
let j = j0 + i;
|
||||||
if i == j {
|
if i == j {
|
||||||
assert_eq!(ni.cmp(nj), Equal);
|
assert_eq!(ni.cmp(nj), Equal);
|
||||||
|
@ -1360,7 +1360,7 @@ mod biguint_tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_add() {
|
fn test_add() {
|
||||||
for sum_triples.iter().advance |elm| {
|
foreach elm in sum_triples.iter() {
|
||||||
let (aVec, bVec, cVec) = *elm;
|
let (aVec, bVec, cVec) = *elm;
|
||||||
let a = BigUint::from_slice(aVec);
|
let a = BigUint::from_slice(aVec);
|
||||||
let b = BigUint::from_slice(bVec);
|
let b = BigUint::from_slice(bVec);
|
||||||
|
@ -1373,7 +1373,7 @@ mod biguint_tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_sub() {
|
fn test_sub() {
|
||||||
for sum_triples.iter().advance |elm| {
|
foreach elm in sum_triples.iter() {
|
||||||
let (aVec, bVec, cVec) = *elm;
|
let (aVec, bVec, cVec) = *elm;
|
||||||
let a = BigUint::from_slice(aVec);
|
let a = BigUint::from_slice(aVec);
|
||||||
let b = BigUint::from_slice(bVec);
|
let b = BigUint::from_slice(bVec);
|
||||||
|
@ -1424,7 +1424,7 @@ mod biguint_tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_mul() {
|
fn test_mul() {
|
||||||
for mul_triples.iter().advance |elm| {
|
foreach elm in mul_triples.iter() {
|
||||||
let (aVec, bVec, cVec) = *elm;
|
let (aVec, bVec, cVec) = *elm;
|
||||||
let a = BigUint::from_slice(aVec);
|
let a = BigUint::from_slice(aVec);
|
||||||
let b = BigUint::from_slice(bVec);
|
let b = BigUint::from_slice(bVec);
|
||||||
|
@ -1434,7 +1434,7 @@ mod biguint_tests {
|
||||||
assert!(b * a == c);
|
assert!(b * a == c);
|
||||||
}
|
}
|
||||||
|
|
||||||
for div_rem_quadruples.iter().advance |elm| {
|
foreach elm in div_rem_quadruples.iter() {
|
||||||
let (aVec, bVec, cVec, dVec) = *elm;
|
let (aVec, bVec, cVec, dVec) = *elm;
|
||||||
let a = BigUint::from_slice(aVec);
|
let a = BigUint::from_slice(aVec);
|
||||||
let b = BigUint::from_slice(bVec);
|
let b = BigUint::from_slice(bVec);
|
||||||
|
@ -1448,7 +1448,7 @@ mod biguint_tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_div_rem() {
|
fn test_div_rem() {
|
||||||
for mul_triples.iter().advance |elm| {
|
foreach elm in mul_triples.iter() {
|
||||||
let (aVec, bVec, cVec) = *elm;
|
let (aVec, bVec, cVec) = *elm;
|
||||||
let a = BigUint::from_slice(aVec);
|
let a = BigUint::from_slice(aVec);
|
||||||
let b = BigUint::from_slice(bVec);
|
let b = BigUint::from_slice(bVec);
|
||||||
|
@ -1462,7 +1462,7 @@ mod biguint_tests {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for div_rem_quadruples.iter().advance |elm| {
|
foreach elm in div_rem_quadruples.iter() {
|
||||||
let (aVec, bVec, cVec, dVec) = *elm;
|
let (aVec, bVec, cVec, dVec) = *elm;
|
||||||
let a = BigUint::from_slice(aVec);
|
let a = BigUint::from_slice(aVec);
|
||||||
let b = BigUint::from_slice(bVec);
|
let b = BigUint::from_slice(bVec);
|
||||||
|
@ -1579,9 +1579,9 @@ mod biguint_tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_to_str_radix() {
|
fn test_to_str_radix() {
|
||||||
let r = to_str_pairs();
|
let r = to_str_pairs();
|
||||||
for r.iter().advance |num_pair| {
|
foreach num_pair in r.iter() {
|
||||||
let &(ref n, ref rs) = num_pair;
|
let &(ref n, ref rs) = num_pair;
|
||||||
for rs.iter().advance |str_pair| {
|
foreach str_pair in rs.iter() {
|
||||||
let &(ref radix, ref str) = str_pair;
|
let &(ref radix, ref str) = str_pair;
|
||||||
assert_eq!(&n.to_str_radix(*radix), str);
|
assert_eq!(&n.to_str_radix(*radix), str);
|
||||||
}
|
}
|
||||||
|
@ -1591,9 +1591,9 @@ mod biguint_tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_from_str_radix() {
|
fn test_from_str_radix() {
|
||||||
let r = to_str_pairs();
|
let r = to_str_pairs();
|
||||||
for r.iter().advance |num_pair| {
|
foreach num_pair in r.iter() {
|
||||||
let &(ref n, ref rs) = num_pair;
|
let &(ref n, ref rs) = num_pair;
|
||||||
for rs.iter().advance |str_pair| {
|
foreach str_pair in rs.iter() {
|
||||||
let &(ref radix, ref str) = str_pair;
|
let &(ref radix, ref str) = str_pair;
|
||||||
assert_eq!(n, &FromStrRadix::from_str_radix(*str, *radix).get());
|
assert_eq!(n, &FromStrRadix::from_str_radix(*str, *radix).get());
|
||||||
}
|
}
|
||||||
|
@ -1658,14 +1658,14 @@ mod bigint_tests {
|
||||||
fn test_cmp() {
|
fn test_cmp() {
|
||||||
let vs = [ &[2 as BigDigit], &[1, 1], &[2, 1], &[1, 1, 1] ];
|
let vs = [ &[2 as BigDigit], &[1, 1], &[2, 1], &[1, 1, 1] ];
|
||||||
let mut nums = ~[];
|
let mut nums = ~[];
|
||||||
for vs.rev_iter().advance |s| {
|
foreach s in vs.rev_iter() {
|
||||||
nums.push(BigInt::from_slice(Minus, *s));
|
nums.push(BigInt::from_slice(Minus, *s));
|
||||||
}
|
}
|
||||||
nums.push(Zero::zero());
|
nums.push(Zero::zero());
|
||||||
nums.push_all_move(vs.map(|s| BigInt::from_slice(Plus, *s)));
|
nums.push_all_move(vs.map(|s| BigInt::from_slice(Plus, *s)));
|
||||||
|
|
||||||
for nums.iter().enumerate().advance |(i, ni)| {
|
foreach (i, ni) in nums.iter().enumerate() {
|
||||||
for nums.slice(i, nums.len()).iter().enumerate().advance |(j0, nj)| {
|
foreach (j0, nj) in nums.slice(i, nums.len()).iter().enumerate() {
|
||||||
let j = i + j0;
|
let j = i + j0;
|
||||||
if i == j {
|
if i == j {
|
||||||
assert_eq!(ni.cmp(nj), Equal);
|
assert_eq!(ni.cmp(nj), Equal);
|
||||||
|
@ -1769,7 +1769,7 @@ mod bigint_tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_add() {
|
fn test_add() {
|
||||||
for sum_triples.iter().advance |elm| {
|
foreach elm in sum_triples.iter() {
|
||||||
let (aVec, bVec, cVec) = *elm;
|
let (aVec, bVec, cVec) = *elm;
|
||||||
let a = BigInt::from_slice(Plus, aVec);
|
let a = BigInt::from_slice(Plus, aVec);
|
||||||
let b = BigInt::from_slice(Plus, bVec);
|
let b = BigInt::from_slice(Plus, bVec);
|
||||||
|
@ -1788,7 +1788,7 @@ mod bigint_tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_sub() {
|
fn test_sub() {
|
||||||
for sum_triples.iter().advance |elm| {
|
foreach elm in sum_triples.iter() {
|
||||||
let (aVec, bVec, cVec) = *elm;
|
let (aVec, bVec, cVec) = *elm;
|
||||||
let a = BigInt::from_slice(Plus, aVec);
|
let a = BigInt::from_slice(Plus, aVec);
|
||||||
let b = BigInt::from_slice(Plus, bVec);
|
let b = BigInt::from_slice(Plus, bVec);
|
||||||
|
@ -1845,7 +1845,7 @@ mod bigint_tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_mul() {
|
fn test_mul() {
|
||||||
for mul_triples.iter().advance |elm| {
|
foreach elm in mul_triples.iter() {
|
||||||
let (aVec, bVec, cVec) = *elm;
|
let (aVec, bVec, cVec) = *elm;
|
||||||
let a = BigInt::from_slice(Plus, aVec);
|
let a = BigInt::from_slice(Plus, aVec);
|
||||||
let b = BigInt::from_slice(Plus, bVec);
|
let b = BigInt::from_slice(Plus, bVec);
|
||||||
|
@ -1858,7 +1858,7 @@ mod bigint_tests {
|
||||||
assert!((-b) * a == -c);
|
assert!((-b) * a == -c);
|
||||||
}
|
}
|
||||||
|
|
||||||
for div_rem_quadruples.iter().advance |elm| {
|
foreach elm in div_rem_quadruples.iter() {
|
||||||
let (aVec, bVec, cVec, dVec) = *elm;
|
let (aVec, bVec, cVec, dVec) = *elm;
|
||||||
let a = BigInt::from_slice(Plus, aVec);
|
let a = BigInt::from_slice(Plus, aVec);
|
||||||
let b = BigInt::from_slice(Plus, bVec);
|
let b = BigInt::from_slice(Plus, bVec);
|
||||||
|
@ -1897,7 +1897,7 @@ mod bigint_tests {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for mul_triples.iter().advance |elm| {
|
foreach elm in mul_triples.iter() {
|
||||||
let (aVec, bVec, cVec) = *elm;
|
let (aVec, bVec, cVec) = *elm;
|
||||||
let a = BigInt::from_slice(Plus, aVec);
|
let a = BigInt::from_slice(Plus, aVec);
|
||||||
let b = BigInt::from_slice(Plus, bVec);
|
let b = BigInt::from_slice(Plus, bVec);
|
||||||
|
@ -1907,7 +1907,7 @@ mod bigint_tests {
|
||||||
if !b.is_zero() { check(&c, &b, &a, &Zero::zero()); }
|
if !b.is_zero() { check(&c, &b, &a, &Zero::zero()); }
|
||||||
}
|
}
|
||||||
|
|
||||||
for div_rem_quadruples.iter().advance |elm| {
|
foreach elm in div_rem_quadruples.iter() {
|
||||||
let (aVec, bVec, cVec, dVec) = *elm;
|
let (aVec, bVec, cVec, dVec) = *elm;
|
||||||
let a = BigInt::from_slice(Plus, aVec);
|
let a = BigInt::from_slice(Plus, aVec);
|
||||||
let b = BigInt::from_slice(Plus, bVec);
|
let b = BigInt::from_slice(Plus, bVec);
|
||||||
|
@ -1940,7 +1940,7 @@ mod bigint_tests {
|
||||||
check_sub(&a.neg(), b, &q.neg(), &r.neg());
|
check_sub(&a.neg(), b, &q.neg(), &r.neg());
|
||||||
check_sub(&a.neg(), &b.neg(), q, &r.neg());
|
check_sub(&a.neg(), &b.neg(), q, &r.neg());
|
||||||
}
|
}
|
||||||
for mul_triples.iter().advance |elm| {
|
foreach elm in mul_triples.iter() {
|
||||||
let (aVec, bVec, cVec) = *elm;
|
let (aVec, bVec, cVec) = *elm;
|
||||||
let a = BigInt::from_slice(Plus, aVec);
|
let a = BigInt::from_slice(Plus, aVec);
|
||||||
let b = BigInt::from_slice(Plus, bVec);
|
let b = BigInt::from_slice(Plus, bVec);
|
||||||
|
@ -1950,7 +1950,7 @@ mod bigint_tests {
|
||||||
if !b.is_zero() { check(&c, &b, &a, &Zero::zero()); }
|
if !b.is_zero() { check(&c, &b, &a, &Zero::zero()); }
|
||||||
}
|
}
|
||||||
|
|
||||||
for div_rem_quadruples.iter().advance |elm| {
|
foreach elm in div_rem_quadruples.iter() {
|
||||||
let (aVec, bVec, cVec, dVec) = *elm;
|
let (aVec, bVec, cVec, dVec) = *elm;
|
||||||
let a = BigInt::from_slice(Plus, aVec);
|
let a = BigInt::from_slice(Plus, aVec);
|
||||||
let b = BigInt::from_slice(Plus, bVec);
|
let b = BigInt::from_slice(Plus, bVec);
|
||||||
|
|
|
@ -239,14 +239,14 @@ mod test {
|
||||||
fn test_scale_unscale() {
|
fn test_scale_unscale() {
|
||||||
assert_eq!(_05_05i.scale(2f), _1_1i);
|
assert_eq!(_05_05i.scale(2f), _1_1i);
|
||||||
assert_eq!(_1_1i.unscale(2f), _05_05i);
|
assert_eq!(_1_1i.unscale(2f), _05_05i);
|
||||||
for all_consts.iter().advance |&c| {
|
foreach &c in all_consts.iter() {
|
||||||
assert_eq!(c.scale(2f).unscale(2f), c);
|
assert_eq!(c.scale(2f).unscale(2f), c);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_conj() {
|
fn test_conj() {
|
||||||
for all_consts.iter().advance |&c| {
|
foreach &c in all_consts.iter() {
|
||||||
assert_eq!(c.conj(), Cmplx::new(c.re, -c.im));
|
assert_eq!(c.conj(), Cmplx::new(c.re, -c.im));
|
||||||
assert_eq!(c.conj().conj(), c);
|
assert_eq!(c.conj().conj(), c);
|
||||||
}
|
}
|
||||||
|
@ -283,7 +283,7 @@ mod test {
|
||||||
let (r, theta) = c.to_polar();
|
let (r, theta) = c.to_polar();
|
||||||
assert!((c - Cmplx::from_polar(&r, &theta)).norm() < 1e-6);
|
assert!((c - Cmplx::from_polar(&r, &theta)).norm() < 1e-6);
|
||||||
}
|
}
|
||||||
for all_consts.iter().advance |&c| { test(c); }
|
foreach &c in all_consts.iter() { test(c); }
|
||||||
}
|
}
|
||||||
|
|
||||||
mod arith {
|
mod arith {
|
||||||
|
@ -296,7 +296,7 @@ mod test {
|
||||||
assert_eq!(_0_1i + _1_0i, _1_1i);
|
assert_eq!(_0_1i + _1_0i, _1_1i);
|
||||||
assert_eq!(_1_0i + _neg1_1i, _0_1i);
|
assert_eq!(_1_0i + _neg1_1i, _0_1i);
|
||||||
|
|
||||||
for all_consts.iter().advance |&c| {
|
foreach &c in all_consts.iter() {
|
||||||
assert_eq!(_0_0i + c, c);
|
assert_eq!(_0_0i + c, c);
|
||||||
assert_eq!(c + _0_0i, c);
|
assert_eq!(c + _0_0i, c);
|
||||||
}
|
}
|
||||||
|
@ -308,7 +308,7 @@ mod test {
|
||||||
assert_eq!(_0_1i - _1_0i, _neg1_1i);
|
assert_eq!(_0_1i - _1_0i, _neg1_1i);
|
||||||
assert_eq!(_0_1i - _neg1_1i, _1_0i);
|
assert_eq!(_0_1i - _neg1_1i, _1_0i);
|
||||||
|
|
||||||
for all_consts.iter().advance |&c| {
|
foreach &c in all_consts.iter() {
|
||||||
assert_eq!(c - _0_0i, c);
|
assert_eq!(c - _0_0i, c);
|
||||||
assert_eq!(c - c, _0_0i);
|
assert_eq!(c - c, _0_0i);
|
||||||
}
|
}
|
||||||
|
@ -323,7 +323,7 @@ mod test {
|
||||||
assert_eq!(_0_1i * _0_1i, -_1_0i);
|
assert_eq!(_0_1i * _0_1i, -_1_0i);
|
||||||
assert_eq!(_0_1i * _0_1i * _0_1i * _0_1i, _1_0i);
|
assert_eq!(_0_1i * _0_1i * _0_1i * _0_1i, _1_0i);
|
||||||
|
|
||||||
for all_consts.iter().advance |&c| {
|
foreach &c in all_consts.iter() {
|
||||||
assert_eq!(c * _1_0i, c);
|
assert_eq!(c * _1_0i, c);
|
||||||
assert_eq!(_1_0i * c, c);
|
assert_eq!(_1_0i * c, c);
|
||||||
}
|
}
|
||||||
|
@ -331,7 +331,7 @@ mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_div() {
|
fn test_div() {
|
||||||
assert_eq!(_neg1_1i / _0_1i, _1_1i);
|
assert_eq!(_neg1_1i / _0_1i, _1_1i);
|
||||||
for all_consts.iter().advance |&c| {
|
foreach &c in all_consts.iter() {
|
||||||
if c != Zero::zero() {
|
if c != Zero::zero() {
|
||||||
assert_eq!(c / c, _1_0i);
|
assert_eq!(c / c, _1_0i);
|
||||||
}
|
}
|
||||||
|
@ -341,7 +341,7 @@ mod test {
|
||||||
fn test_neg() {
|
fn test_neg() {
|
||||||
assert_eq!(-_1_0i + _0_1i, _neg1_1i);
|
assert_eq!(-_1_0i + _0_1i, _neg1_1i);
|
||||||
assert_eq!((-_0_1i) * _0_1i, _1_0i);
|
assert_eq!((-_0_1i) * _0_1i, _1_0i);
|
||||||
for all_consts.iter().advance |&c| {
|
foreach &c in all_consts.iter() {
|
||||||
assert_eq!(-(-c), c);
|
assert_eq!(-(-c), c);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -500,7 +500,7 @@ mod test {
|
||||||
}
|
}
|
||||||
|
|
||||||
let xs = ["0 /1", "abc", "", "1/", "--1/2","3/2/1"];
|
let xs = ["0 /1", "abc", "", "1/", "--1/2","3/2/1"];
|
||||||
for xs.iter().advance |&s| {
|
foreach &s in xs.iter() {
|
||||||
test(s);
|
test(s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -540,7 +540,7 @@ mod test {
|
||||||
}
|
}
|
||||||
|
|
||||||
let xs = ["0 /1", "abc", "", "1/", "--1/2","3/2/1", "3/2"];
|
let xs = ["0 /1", "abc", "", "1/", "--1/2","3/2/1", "3/2"];
|
||||||
for xs.iter().advance |&s| {
|
foreach &s in xs.iter() {
|
||||||
test(s);
|
test(s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -206,7 +206,7 @@ impl<T: Ord, Iter: Iterator<T>> Extendable<T, Iter> for PriorityQueue<T> {
|
||||||
let len = self.capacity();
|
let len = self.capacity();
|
||||||
self.reserve_at_least(len + lower);
|
self.reserve_at_least(len + lower);
|
||||||
|
|
||||||
for iter.advance |elem| {
|
foreach elem in *iter {
|
||||||
self.push(elem);
|
self.push(elem);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -223,7 +223,7 @@ mod tests {
|
||||||
let iterout = ~[9, 5, 3];
|
let iterout = ~[9, 5, 3];
|
||||||
let pq = PriorityQueue::from_vec(data);
|
let pq = PriorityQueue::from_vec(data);
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
for pq.iter().advance |el| {
|
foreach el in pq.iter() {
|
||||||
assert_eq!(*el, iterout[i]);
|
assert_eq!(*el, iterout[i]);
|
||||||
i += 1;
|
i += 1;
|
||||||
}
|
}
|
||||||
|
@ -369,7 +369,7 @@ mod tests {
|
||||||
|
|
||||||
let mut q: PriorityQueue<uint> = xs.rev_iter().transform(|&x| x).collect();
|
let mut q: PriorityQueue<uint> = xs.rev_iter().transform(|&x| x).collect();
|
||||||
|
|
||||||
for xs.iter().advance |&x| {
|
foreach &x in xs.iter() {
|
||||||
assert_eq!(q.pop(), x);
|
assert_eq!(q.pop(), x);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,7 +39,7 @@ impl<T> Container for RingBuf<T> {
|
||||||
impl<T> Mutable for RingBuf<T> {
|
impl<T> Mutable for RingBuf<T> {
|
||||||
/// Clear the RingBuf, removing all values.
|
/// Clear the RingBuf, removing all values.
|
||||||
fn clear(&mut self) {
|
fn clear(&mut self) {
|
||||||
for self.elts.mut_iter().advance |x| { *x = None }
|
foreach x in self.elts.mut_iter() { *x = None }
|
||||||
self.nelts = 0;
|
self.nelts = 0;
|
||||||
self.lo = 0;
|
self.lo = 0;
|
||||||
}
|
}
|
||||||
|
@ -334,7 +334,7 @@ impl<A, T: Iterator<A>> FromIterator<A, T> for RingBuf<A> {
|
||||||
|
|
||||||
impl<A, T: Iterator<A>> Extendable<A, T> for RingBuf<A> {
|
impl<A, T: Iterator<A>> Extendable<A, T> for RingBuf<A> {
|
||||||
fn extend(&mut self, iterator: &mut T) {
|
fn extend(&mut self, iterator: &mut T) {
|
||||||
for iterator.advance |elt| {
|
foreach elt in *iterator {
|
||||||
self.push_back(elt);
|
self.push_back(elt);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -653,7 +653,7 @@ mod tests {
|
||||||
d.push_front(i);
|
d.push_front(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
for d.mut_iter().enumerate().advance |(i, elt)| {
|
foreach (i, elt) in d.mut_iter().enumerate() {
|
||||||
assert_eq!(*elt, 2 - i);
|
assert_eq!(*elt, 2 - i);
|
||||||
*elt = i;
|
*elt = i;
|
||||||
}
|
}
|
||||||
|
@ -676,7 +676,7 @@ mod tests {
|
||||||
d.push_front(i);
|
d.push_front(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
for d.mut_rev_iter().enumerate().advance |(i, elt)| {
|
foreach (i, elt) in d.mut_rev_iter().enumerate() {
|
||||||
assert_eq!(*elt, i);
|
assert_eq!(*elt, i);
|
||||||
*elt = i;
|
*elt = i;
|
||||||
}
|
}
|
||||||
|
@ -700,7 +700,7 @@ mod tests {
|
||||||
|
|
||||||
let mut seq = iterator::Counter::new(0u, 2).take_(256);
|
let mut seq = iterator::Counter::new(0u, 2).take_(256);
|
||||||
let deq: RingBuf<uint> = seq.collect();
|
let deq: RingBuf<uint> = seq.collect();
|
||||||
for deq.iter().enumerate().advance |(i, &x)| {
|
foreach (i, &x) in deq.iter().enumerate() {
|
||||||
assert_eq!(2*i, x);
|
assert_eq!(2*i, x);
|
||||||
}
|
}
|
||||||
assert_eq!(deq.len(), 256);
|
assert_eq!(deq.len(), 256);
|
||||||
|
|
|
@ -444,7 +444,7 @@ impl<D:Decoder,T:Decodable<D> + 'static> Decodable<D> for @mut T {
|
||||||
impl<'self, S:Encoder,T:Encodable<S>> Encodable<S> for &'self [T] {
|
impl<'self, S:Encoder,T:Encodable<S>> Encodable<S> for &'self [T] {
|
||||||
fn encode(&self, s: &mut S) {
|
fn encode(&self, s: &mut S) {
|
||||||
do s.emit_seq(self.len()) |s| {
|
do s.emit_seq(self.len()) |s| {
|
||||||
for self.iter().enumerate().advance |(i, e)| {
|
foreach (i, e) in self.iter().enumerate() {
|
||||||
s.emit_seq_elt(i, |s| e.encode(s))
|
s.emit_seq_elt(i, |s| e.encode(s))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -454,7 +454,7 @@ impl<'self, S:Encoder,T:Encodable<S>> Encodable<S> for &'self [T] {
|
||||||
impl<S:Encoder,T:Encodable<S>> Encodable<S> for ~[T] {
|
impl<S:Encoder,T:Encodable<S>> Encodable<S> for ~[T] {
|
||||||
fn encode(&self, s: &mut S) {
|
fn encode(&self, s: &mut S) {
|
||||||
do s.emit_seq(self.len()) |s| {
|
do s.emit_seq(self.len()) |s| {
|
||||||
for self.iter().enumerate().advance |(i, e)| {
|
foreach (i, e) in self.iter().enumerate() {
|
||||||
s.emit_seq_elt(i, |s| e.encode(s))
|
s.emit_seq_elt(i, |s| e.encode(s))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -474,7 +474,7 @@ impl<D:Decoder,T:Decodable<D>> Decodable<D> for ~[T] {
|
||||||
impl<S:Encoder,T:Encodable<S>> Encodable<S> for @[T] {
|
impl<S:Encoder,T:Encodable<S>> Encodable<S> for @[T] {
|
||||||
fn encode(&self, s: &mut S) {
|
fn encode(&self, s: &mut S) {
|
||||||
do s.emit_seq(self.len()) |s| {
|
do s.emit_seq(self.len()) |s| {
|
||||||
for self.iter().enumerate().advance |(i, e)| {
|
foreach (i, e) in self.iter().enumerate() {
|
||||||
s.emit_seq_elt(i, |s| e.encode(s))
|
s.emit_seq_elt(i, |s| e.encode(s))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -668,7 +668,7 @@ impl<
|
||||||
> Encodable<S> for DList<T> {
|
> Encodable<S> for DList<T> {
|
||||||
fn encode(&self, s: &mut S) {
|
fn encode(&self, s: &mut S) {
|
||||||
do s.emit_seq(self.len()) |s| {
|
do s.emit_seq(self.len()) |s| {
|
||||||
for self.iter().enumerate().advance |(i, e)| {
|
foreach (i, e) in self.iter().enumerate() {
|
||||||
s.emit_seq_elt(i, |s| e.encode(s));
|
s.emit_seq_elt(i, |s| e.encode(s));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -693,7 +693,7 @@ impl<
|
||||||
> Encodable<S> for RingBuf<T> {
|
> Encodable<S> for RingBuf<T> {
|
||||||
fn encode(&self, s: &mut S) {
|
fn encode(&self, s: &mut S) {
|
||||||
do s.emit_seq(self.len()) |s| {
|
do s.emit_seq(self.len()) |s| {
|
||||||
for self.iter().enumerate().advance |(i, e)| {
|
foreach (i, e) in self.iter().enumerate() {
|
||||||
s.emit_seq_elt(i, |s| e.encode(s));
|
s.emit_seq_elt(i, |s| e.encode(s));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -720,7 +720,7 @@ impl<
|
||||||
fn encode(&self, e: &mut E) {
|
fn encode(&self, e: &mut E) {
|
||||||
do e.emit_map(self.len()) |e| {
|
do e.emit_map(self.len()) |e| {
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
for self.iter().advance |(key, val)| {
|
foreach (key, val) in self.iter() {
|
||||||
e.emit_map_elt_key(i, |e| key.encode(e));
|
e.emit_map_elt_key(i, |e| key.encode(e));
|
||||||
e.emit_map_elt_val(i, |e| val.encode(e));
|
e.emit_map_elt_val(i, |e| val.encode(e));
|
||||||
i += 1;
|
i += 1;
|
||||||
|
@ -754,7 +754,7 @@ impl<
|
||||||
fn encode(&self, s: &mut S) {
|
fn encode(&self, s: &mut S) {
|
||||||
do s.emit_seq(self.len()) |s| {
|
do s.emit_seq(self.len()) |s| {
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
for self.iter().advance |e| {
|
foreach e in self.iter() {
|
||||||
s.emit_seq_elt(i, |s| e.encode(s));
|
s.emit_seq_elt(i, |s| e.encode(s));
|
||||||
i += 1;
|
i += 1;
|
||||||
}
|
}
|
||||||
|
@ -842,7 +842,7 @@ impl<
|
||||||
fn encode(&self, e: &mut E) {
|
fn encode(&self, e: &mut E) {
|
||||||
do e.emit_map(self.len()) |e| {
|
do e.emit_map(self.len()) |e| {
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
for self.iter().advance |(key, val)| {
|
foreach (key, val) in self.iter() {
|
||||||
e.emit_map_elt_key(i, |e| key.encode(e));
|
e.emit_map_elt_key(i, |e| key.encode(e));
|
||||||
e.emit_map_elt_val(i, |e| val.encode(e));
|
e.emit_map_elt_val(i, |e| val.encode(e));
|
||||||
i += 1;
|
i += 1;
|
||||||
|
@ -876,7 +876,7 @@ impl<
|
||||||
fn encode(&self, s: &mut S) {
|
fn encode(&self, s: &mut S) {
|
||||||
do s.emit_seq(self.len()) |s| {
|
do s.emit_seq(self.len()) |s| {
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
for self.iter().advance |e| {
|
foreach e in self.iter() {
|
||||||
s.emit_seq_elt(i, |s| e.encode(s));
|
s.emit_seq_elt(i, |s| e.encode(s));
|
||||||
i += 1;
|
i += 1;
|
||||||
}
|
}
|
||||||
|
@ -911,7 +911,7 @@ pub trait EncoderHelpers {
|
||||||
impl<S:Encoder> EncoderHelpers for S {
|
impl<S:Encoder> EncoderHelpers for S {
|
||||||
fn emit_from_vec<T>(&mut self, v: &[T], f: &fn(&mut S, &T)) {
|
fn emit_from_vec<T>(&mut self, v: &[T], f: &fn(&mut S, &T)) {
|
||||||
do self.emit_seq(v.len()) |this| {
|
do self.emit_seq(v.len()) |this| {
|
||||||
for v.iter().enumerate().advance |(i, e)| {
|
foreach (i, e) in v.iter().enumerate() {
|
||||||
do this.emit_seq_elt(i) |this| {
|
do this.emit_seq_elt(i) |this| {
|
||||||
f(this, e)
|
f(this, e)
|
||||||
}
|
}
|
||||||
|
|
|
@ -447,7 +447,7 @@ mod test_map {
|
||||||
assert!(m.insert(6, 10));
|
assert!(m.insert(6, 10));
|
||||||
assert!(m.insert(10, 11));
|
assert!(m.insert(10, 11));
|
||||||
|
|
||||||
for m.mut_iter().advance |(k, v)| {
|
foreach (k, v) in m.mut_iter() {
|
||||||
*v += k as int;
|
*v += k as int;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -489,7 +489,7 @@ mod test_map {
|
||||||
assert!(m.insert(6, 10));
|
assert!(m.insert(6, 10));
|
||||||
assert!(m.insert(10, 11));
|
assert!(m.insert(10, 11));
|
||||||
|
|
||||||
for m.mut_rev_iter().advance |(k, v)| {
|
foreach (k, v) in m.mut_rev_iter() {
|
||||||
*v += k as int;
|
*v += k as int;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -507,7 +507,7 @@ mod test_map {
|
||||||
let mut m = SmallIntMap::new();
|
let mut m = SmallIntMap::new();
|
||||||
m.insert(1, ~2);
|
m.insert(1, ~2);
|
||||||
let mut called = false;
|
let mut called = false;
|
||||||
for m.consume().advance |(k, v)| {
|
foreach (k, v) in m.consume() {
|
||||||
assert!(!called);
|
assert!(!called);
|
||||||
called = true;
|
called = true;
|
||||||
assert_eq!(k, 1);
|
assert_eq!(k, 1);
|
||||||
|
|
|
@ -733,7 +733,7 @@ fn copy_vec<T:Clone>(dest: &mut [T],
|
||||||
from: &[T]) {
|
from: &[T]) {
|
||||||
assert!(s1+from.len() <= dest.len());
|
assert!(s1+from.len() <= dest.len());
|
||||||
|
|
||||||
for from.iter().enumerate().advance |(i, v)| {
|
foreach (i, v) in from.iter().enumerate() {
|
||||||
dest[s1+i] = (*v).clone();
|
dest[s1+i] = (*v).clone();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -843,7 +843,7 @@ mod test_qsort {
|
||||||
let immut_names = names;
|
let immut_names = names;
|
||||||
|
|
||||||
let pairs = vec::zip_slice(expected, immut_names);
|
let pairs = vec::zip_slice(expected, immut_names);
|
||||||
for pairs.iter().advance |p| {
|
foreach p in pairs.iter() {
|
||||||
let (a, b) = *p;
|
let (a, b) = *p;
|
||||||
debug!("%d %d", a, b);
|
debug!("%d %d", a, b);
|
||||||
assert_eq!(a, b);
|
assert_eq!(a, b);
|
||||||
|
|
|
@ -167,7 +167,7 @@ impl<'self> Stats for &'self [f64] {
|
||||||
} else {
|
} else {
|
||||||
let mean = self.mean();
|
let mean = self.mean();
|
||||||
let mut v = 0.0;
|
let mut v = 0.0;
|
||||||
for self.iter().advance |s| {
|
foreach s in self.iter() {
|
||||||
let x = *s - mean;
|
let x = *s - mean;
|
||||||
v += x*x;
|
v += x*x;
|
||||||
}
|
}
|
||||||
|
@ -254,7 +254,7 @@ pub fn winsorize(samples: &mut [f64], pct: f64) {
|
||||||
sort::tim_sort(tmp);
|
sort::tim_sort(tmp);
|
||||||
let lo = percentile_of_sorted(tmp, pct);
|
let lo = percentile_of_sorted(tmp, pct);
|
||||||
let hi = percentile_of_sorted(tmp, 100.0-pct);
|
let hi = percentile_of_sorted(tmp, 100.0-pct);
|
||||||
for samples.mut_iter().advance |samp| {
|
foreach samp in samples.mut_iter() {
|
||||||
if *samp > hi {
|
if *samp > hi {
|
||||||
*samp = hi
|
*samp = hi
|
||||||
} else if *samp < lo {
|
} else if *samp < lo {
|
||||||
|
|
|
@ -989,13 +989,13 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
// wait until all children get in the mutex
|
// wait until all children get in the mutex
|
||||||
for ports.iter().advance |port| { let _ = port.recv(); }
|
foreach port in ports.iter() { let _ = port.recv(); }
|
||||||
do m.lock_cond |cond| {
|
do m.lock_cond |cond| {
|
||||||
let num_woken = cond.broadcast();
|
let num_woken = cond.broadcast();
|
||||||
assert_eq!(num_woken, num_waiters);
|
assert_eq!(num_woken, num_waiters);
|
||||||
}
|
}
|
||||||
// wait until all children wake up
|
// wait until all children wake up
|
||||||
for ports.iter().advance |port| { let _ = port.recv(); }
|
foreach port in ports.iter() { let _ = port.recv(); }
|
||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
fn test_mutex_cond_broadcast() {
|
fn test_mutex_cond_broadcast() {
|
||||||
|
@ -1080,7 +1080,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for sibling_convos.iter().advance |p| {
|
foreach p in sibling_convos.iter() {
|
||||||
let _ = p.recv(); // wait for sibling to get in the mutex
|
let _ = p.recv(); // wait for sibling to get in the mutex
|
||||||
}
|
}
|
||||||
do m2.lock { }
|
do m2.lock { }
|
||||||
|
@ -1090,7 +1090,7 @@ mod tests {
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
// child task must have finished by the time try returns
|
// child task must have finished by the time try returns
|
||||||
let r = p.recv();
|
let r = p.recv();
|
||||||
for r.iter().advance |p| { p.recv(); } // wait on all its siblings
|
foreach p in r.iter() { p.recv(); } // wait on all its siblings
|
||||||
do m.lock_cond |cond| {
|
do m.lock_cond |cond| {
|
||||||
let woken = cond.broadcast();
|
let woken = cond.broadcast();
|
||||||
assert_eq!(woken, 0);
|
assert_eq!(woken, 0);
|
||||||
|
@ -1357,13 +1357,13 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
// wait until all children get in the mutex
|
// wait until all children get in the mutex
|
||||||
for ports.iter().advance |port| { let _ = port.recv(); }
|
foreach port in ports.iter() { let _ = port.recv(); }
|
||||||
do lock_cond(x, dg2) |cond| {
|
do lock_cond(x, dg2) |cond| {
|
||||||
let num_woken = cond.broadcast();
|
let num_woken = cond.broadcast();
|
||||||
assert_eq!(num_woken, num_waiters);
|
assert_eq!(num_woken, num_waiters);
|
||||||
}
|
}
|
||||||
// wait until all children wake up
|
// wait until all children wake up
|
||||||
for ports.iter().advance |port| { let _ = port.recv(); }
|
foreach port in ports.iter() { let _ = port.recv(); }
|
||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
fn test_rwlock_cond_broadcast() {
|
fn test_rwlock_cond_broadcast() {
|
||||||
|
|
|
@ -35,7 +35,7 @@ pub struct TaskPool<T> {
|
||||||
#[unsafe_destructor]
|
#[unsafe_destructor]
|
||||||
impl<T> Drop for TaskPool<T> {
|
impl<T> Drop for TaskPool<T> {
|
||||||
fn drop(&self) {
|
fn drop(&self) {
|
||||||
for self.channels.iter().advance |channel| {
|
foreach channel in self.channels.iter() {
|
||||||
channel.send(Quit);
|
channel.send(Quit);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -102,11 +102,11 @@ pub fn expand(cap: &[u8], params: &[Param], vars: &mut Variables)
|
||||||
Number(0), Number(0), Number(0), Number(0), Number(0),
|
Number(0), Number(0), Number(0), Number(0), Number(0),
|
||||||
Number(0), Number(0), Number(0), Number(0),
|
Number(0), Number(0), Number(0), Number(0),
|
||||||
];
|
];
|
||||||
for mparams.mut_iter().zip(params.iter()).advance |(dst, src)| {
|
foreach (dst, src) in mparams.mut_iter().zip(params.iter()) {
|
||||||
*dst = (*src).clone();
|
*dst = (*src).clone();
|
||||||
}
|
}
|
||||||
|
|
||||||
for cap.iter().transform(|&x| x).advance |c| {
|
foreach c in cap.iter().transform(|&x| x) {
|
||||||
let cur = c as char;
|
let cur = c as char;
|
||||||
let mut old_state = state;
|
let mut old_state = state;
|
||||||
match state {
|
match state {
|
||||||
|
@ -605,7 +605,7 @@ mod test {
|
||||||
let mut varstruct = Variables::new();
|
let mut varstruct = Variables::new();
|
||||||
let vars = &mut varstruct;
|
let vars = &mut varstruct;
|
||||||
let caps = ["%d", "%c", "%s", "%Pa", "%l", "%!", "%~"];
|
let caps = ["%d", "%c", "%s", "%Pa", "%l", "%!", "%~"];
|
||||||
for caps.iter().advance |cap| {
|
foreach cap in caps.iter() {
|
||||||
let res = expand(cap.as_bytes(), [], vars);
|
let res = expand(cap.as_bytes(), [], vars);
|
||||||
assert!(res.is_err(),
|
assert!(res.is_err(),
|
||||||
"Op %s succeeded incorrectly with 0 stack entries", *cap);
|
"Op %s succeeded incorrectly with 0 stack entries", *cap);
|
||||||
|
@ -615,7 +615,7 @@ mod test {
|
||||||
"Op %s failed with 1 stack entry: %s", *cap, res.unwrap_err());
|
"Op %s failed with 1 stack entry: %s", *cap, res.unwrap_err());
|
||||||
}
|
}
|
||||||
let caps = ["%+", "%-", "%*", "%/", "%m", "%&", "%|", "%A", "%O"];
|
let caps = ["%+", "%-", "%*", "%/", "%m", "%&", "%|", "%A", "%O"];
|
||||||
for caps.iter().advance |cap| {
|
foreach cap in caps.iter() {
|
||||||
let res = expand(cap.as_bytes(), [], vars);
|
let res = expand(cap.as_bytes(), [], vars);
|
||||||
assert!(res.is_err(),
|
assert!(res.is_err(),
|
||||||
"Binop %s succeeded incorrectly with 0 stack entries", *cap);
|
"Binop %s succeeded incorrectly with 0 stack entries", *cap);
|
||||||
|
@ -636,7 +636,7 @@ mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_comparison_ops() {
|
fn test_comparison_ops() {
|
||||||
let v = [('<', [1u8, 0u8, 0u8]), ('=', [0u8, 1u8, 0u8]), ('>', [0u8, 0u8, 1u8])];
|
let v = [('<', [1u8, 0u8, 0u8]), ('=', [0u8, 1u8, 0u8]), ('>', [0u8, 0u8, 1u8])];
|
||||||
for v.iter().advance |&(op, bs)| {
|
foreach &(op, bs) in v.iter() {
|
||||||
let s = fmt!("%%{1}%%{2}%%%c%%d", op);
|
let s = fmt!("%%{1}%%{2}%%%c%%d", op);
|
||||||
let res = expand(s.as_bytes(), [], &mut Variables::new());
|
let res = expand(s.as_bytes(), [], &mut Variables::new());
|
||||||
assert!(res.is_ok(), res.unwrap_err());
|
assert!(res.is_ok(), res.unwrap_err());
|
||||||
|
|
|
@ -272,7 +272,7 @@ pub fn parse(file: @Reader, longnames: bool) -> Result<~TermInfo, ~str> {
|
||||||
return Err(~"error: hit EOF before end of string table");
|
return Err(~"error: hit EOF before end of string table");
|
||||||
}
|
}
|
||||||
|
|
||||||
for string_offsets.iter().enumerate().advance |(i, v)| {
|
foreach (i, v) in string_offsets.iter().enumerate() {
|
||||||
let offset = *v;
|
let offset = *v;
|
||||||
if offset == 0xFFFF { // non-entry
|
if offset == 0xFFFF { // non-entry
|
||||||
loop;
|
loop;
|
||||||
|
|
|
@ -35,7 +35,7 @@ pub fn get_dbpath_for_term(term: &str) -> Option<~path> {
|
||||||
dirs_to_search.push(homedir.unwrap().push(".terminfo")); // ncurses compatability
|
dirs_to_search.push(homedir.unwrap().push(".terminfo")); // ncurses compatability
|
||||||
}
|
}
|
||||||
match getenv("TERMINFO_DIRS") {
|
match getenv("TERMINFO_DIRS") {
|
||||||
Some(dirs) => for dirs.split_iter(':').advance |i| {
|
Some(dirs) => foreach i in dirs.split_iter(':') {
|
||||||
if i == "" {
|
if i == "" {
|
||||||
dirs_to_search.push(path("/usr/share/terminfo"));
|
dirs_to_search.push(path("/usr/share/terminfo"));
|
||||||
} else {
|
} else {
|
||||||
|
@ -54,7 +54,7 @@ pub fn get_dbpath_for_term(term: &str) -> Option<~path> {
|
||||||
};
|
};
|
||||||
|
|
||||||
// Look for the terminal in all of the search directories
|
// Look for the terminal in all of the search directories
|
||||||
for dirs_to_search.iter().advance |p| {
|
foreach p in dirs_to_search.iter() {
|
||||||
let newp = ~p.push_many(&[str::from_char(first_char), term.to_owned()]);
|
let newp = ~p.push_many(&[str::from_char(first_char), term.to_owned()]);
|
||||||
if os::path_exists(p) && os::path_exists(newp) {
|
if os::path_exists(p) && os::path_exists(newp) {
|
||||||
return Some(newp);
|
return Some(newp);
|
||||||
|
|
|
@ -430,11 +430,11 @@ impl ConsoleTestState {
|
||||||
pub fn write_failures(&self) {
|
pub fn write_failures(&self) {
|
||||||
self.out.write_line("\nfailures:");
|
self.out.write_line("\nfailures:");
|
||||||
let mut failures = ~[];
|
let mut failures = ~[];
|
||||||
for self.failures.iter().advance() |f| {
|
foreach f in self.failures.iter() {
|
||||||
failures.push(f.name.to_str());
|
failures.push(f.name.to_str());
|
||||||
}
|
}
|
||||||
sort::tim_sort(failures);
|
sort::tim_sort(failures);
|
||||||
for failures.iter().advance |name| {
|
foreach name in failures.iter() {
|
||||||
self.out.write_line(fmt!(" %s", name.to_str()));
|
self.out.write_line(fmt!(" %s", name.to_str()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -446,7 +446,7 @@ impl ConsoleTestState {
|
||||||
let mut added = 0;
|
let mut added = 0;
|
||||||
let mut removed = 0;
|
let mut removed = 0;
|
||||||
|
|
||||||
for diff.iter().advance() |(k, v)| {
|
foreach (k, v) in diff.iter() {
|
||||||
match *v {
|
match *v {
|
||||||
LikelyNoise => noise += 1,
|
LikelyNoise => noise += 1,
|
||||||
MetricAdded => {
|
MetricAdded => {
|
||||||
|
@ -566,7 +566,7 @@ pub fn run_tests_console(opts: &TestOpts,
|
||||||
TrIgnored => st.ignored += 1,
|
TrIgnored => st.ignored += 1,
|
||||||
TrMetrics(mm) => {
|
TrMetrics(mm) => {
|
||||||
let tname = test.name.to_str();
|
let tname = test.name.to_str();
|
||||||
for mm.iter().advance() |(k,v)| {
|
foreach (k,v) in mm.iter() {
|
||||||
st.metrics.insert_metric(tname + "." + *k,
|
st.metrics.insert_metric(tname + "." + *k,
|
||||||
v.value, v.noise);
|
v.value, v.noise);
|
||||||
}
|
}
|
||||||
|
@ -700,7 +700,7 @@ fn run_tests(opts: &TestOpts,
|
||||||
|
|
||||||
// All benchmarks run at the end, in serial.
|
// All benchmarks run at the end, in serial.
|
||||||
// (this includes metric fns)
|
// (this includes metric fns)
|
||||||
for filtered_benchs_and_metrics.consume_iter().advance |b| {
|
foreach b in filtered_benchs_and_metrics.consume_iter() {
|
||||||
callback(TeWait(b.desc.clone()));
|
callback(TeWait(b.desc.clone()));
|
||||||
run_test(!opts.run_benchmarks, b, ch.clone());
|
run_test(!opts.run_benchmarks, b, ch.clone());
|
||||||
let (test, result) = p.recv();
|
let (test, result) = p.recv();
|
||||||
|
@ -888,7 +888,7 @@ impl MetricMap {
|
||||||
pub fn compare_to_old(&self, old: &MetricMap,
|
pub fn compare_to_old(&self, old: &MetricMap,
|
||||||
noise_pct: Option<f64>) -> MetricDiff {
|
noise_pct: Option<f64>) -> MetricDiff {
|
||||||
let mut diff : MetricDiff = TreeMap::new();
|
let mut diff : MetricDiff = TreeMap::new();
|
||||||
for old.iter().advance |(k, vold)| {
|
foreach (k, vold) in old.iter() {
|
||||||
let r = match self.find(k) {
|
let r = match self.find(k) {
|
||||||
None => MetricRemoved,
|
None => MetricRemoved,
|
||||||
Some(v) => {
|
Some(v) => {
|
||||||
|
@ -925,7 +925,7 @@ impl MetricMap {
|
||||||
};
|
};
|
||||||
diff.insert((*k).clone(), r);
|
diff.insert((*k).clone(), r);
|
||||||
}
|
}
|
||||||
for self.iter().advance |(k, _)| {
|
foreach (k, _) in self.iter() {
|
||||||
if !diff.contains_key(k) {
|
if !diff.contains_key(k) {
|
||||||
diff.insert((*k).clone(), MetricAdded);
|
diff.insert((*k).clone(), MetricAdded);
|
||||||
}
|
}
|
||||||
|
@ -1040,7 +1040,7 @@ impl BenchHarness {
|
||||||
loop {
|
loop {
|
||||||
let loop_start = precise_time_ns();
|
let loop_start = precise_time_ns();
|
||||||
|
|
||||||
for samples.mut_iter().advance() |p| {
|
foreach p in samples.mut_iter() {
|
||||||
self.bench_n(n as u64, |x| f(x));
|
self.bench_n(n as u64, |x| f(x));
|
||||||
*p = self.ns_per_iter() as f64;
|
*p = self.ns_per_iter() as f64;
|
||||||
};
|
};
|
||||||
|
@ -1048,7 +1048,7 @@ impl BenchHarness {
|
||||||
stats::winsorize(samples, 5.0);
|
stats::winsorize(samples, 5.0);
|
||||||
let summ = stats::Summary::new(samples);
|
let summ = stats::Summary::new(samples);
|
||||||
|
|
||||||
for samples.mut_iter().advance() |p| {
|
foreach p in samples.mut_iter() {
|
||||||
self.bench_n(5 * n as u64, |x| f(x));
|
self.bench_n(5 * n as u64, |x| f(x));
|
||||||
*p = self.ns_per_iter() as f64;
|
*p = self.ns_per_iter() as f64;
|
||||||
};
|
};
|
||||||
|
@ -1288,7 +1288,7 @@ mod tests {
|
||||||
{
|
{
|
||||||
fn testfn() { }
|
fn testfn() { }
|
||||||
let mut tests = ~[];
|
let mut tests = ~[];
|
||||||
for names.iter().advance |name| {
|
foreach name in names.iter() {
|
||||||
let test = TestDescAndFn {
|
let test = TestDescAndFn {
|
||||||
desc: TestDesc {
|
desc: TestDesc {
|
||||||
name: DynTestName((*name).clone()),
|
name: DynTestName((*name).clone()),
|
||||||
|
@ -1314,7 +1314,7 @@ mod tests {
|
||||||
|
|
||||||
let pairs = vec::zip(expected, filtered);
|
let pairs = vec::zip(expected, filtered);
|
||||||
|
|
||||||
for pairs.iter().advance |p| {
|
foreach p in pairs.iter() {
|
||||||
match *p {
|
match *p {
|
||||||
(ref a, ref b) => {
|
(ref a, ref b) => {
|
||||||
assert!(*a == b.desc.name.to_str());
|
assert!(*a == b.desc.name.to_str());
|
||||||
|
|
|
@ -260,7 +260,7 @@ impl Tm {
|
||||||
priv fn do_strptime(s: &str, format: &str) -> Result<Tm, ~str> {
|
priv fn do_strptime(s: &str, format: &str) -> Result<Tm, ~str> {
|
||||||
fn match_str(s: &str, pos: uint, needle: &str) -> bool {
|
fn match_str(s: &str, pos: uint, needle: &str) -> bool {
|
||||||
let mut i = pos;
|
let mut i = pos;
|
||||||
for needle.byte_iter().advance |ch| {
|
foreach ch in needle.byte_iter() {
|
||||||
if s[i] != ch {
|
if s[i] != ch {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -863,9 +863,7 @@ mod tests {
|
||||||
|
|
||||||
use std::float;
|
use std::float;
|
||||||
use std::os;
|
use std::os;
|
||||||
use std::result;
|
|
||||||
use std::result::{Err, Ok};
|
use std::result::{Err, Ok};
|
||||||
use std::str;
|
|
||||||
|
|
||||||
fn test_get_time() {
|
fn test_get_time() {
|
||||||
static SOME_RECENT_DATE: i64 = 1325376000i64; // 2012-01-01T00:00:00Z
|
static SOME_RECENT_DATE: i64 = 1325376000i64; // 2012-01-01T00:00:00Z
|
||||||
|
@ -1041,7 +1039,7 @@ mod tests {
|
||||||
~"Friday",
|
~"Friday",
|
||||||
~"Saturday"
|
~"Saturday"
|
||||||
];
|
];
|
||||||
for days.iter().advance |day| {
|
foreach day in days.iter() {
|
||||||
assert!(test(*day, "%A"));
|
assert!(test(*day, "%A"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1054,7 +1052,7 @@ mod tests {
|
||||||
~"Fri",
|
~"Fri",
|
||||||
~"Sat"
|
~"Sat"
|
||||||
];
|
];
|
||||||
for days.iter().advance |day| {
|
foreach day in days.iter() {
|
||||||
assert!(test(*day, "%a"));
|
assert!(test(*day, "%a"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1072,7 +1070,7 @@ mod tests {
|
||||||
~"November",
|
~"November",
|
||||||
~"December"
|
~"December"
|
||||||
];
|
];
|
||||||
for months.iter().advance |day| {
|
foreach day in months.iter() {
|
||||||
assert!(test(*day, "%B"));
|
assert!(test(*day, "%B"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1090,7 +1088,7 @@ mod tests {
|
||||||
~"Nov",
|
~"Nov",
|
||||||
~"Dec"
|
~"Dec"
|
||||||
];
|
];
|
||||||
for months.iter().advance |day| {
|
foreach day in months.iter() {
|
||||||
assert!(test(*day, "%b"));
|
assert!(test(*day, "%b"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -674,7 +674,7 @@ fn remove<K: TotalOrd, V>(node: &mut Option<~TreeNode<K, V>>,
|
||||||
fn heir_swap<K: TotalOrd, V>(node: &mut ~TreeNode<K, V>,
|
fn heir_swap<K: TotalOrd, V>(node: &mut ~TreeNode<K, V>,
|
||||||
child: &mut Option<~TreeNode<K, V>>) {
|
child: &mut Option<~TreeNode<K, V>>) {
|
||||||
// *could* be done without recursion, but it won't borrow check
|
// *could* be done without recursion, but it won't borrow check
|
||||||
for child.mut_iter().advance |x| {
|
foreach x in child.mut_iter() {
|
||||||
if x.right.is_some() {
|
if x.right.is_some() {
|
||||||
heir_swap(node, &mut x.right);
|
heir_swap(node, &mut x.right);
|
||||||
} else {
|
} else {
|
||||||
|
@ -729,18 +729,18 @@ fn remove<K: TotalOrd, V>(node: &mut Option<~TreeNode<K, V>>,
|
||||||
save.level -= 1;
|
save.level -= 1;
|
||||||
|
|
||||||
if right_level > save.level {
|
if right_level > save.level {
|
||||||
for save.right.mut_iter().advance |x| { x.level = save.level }
|
foreach x in save.right.mut_iter() { x.level = save.level }
|
||||||
}
|
}
|
||||||
|
|
||||||
skew(save);
|
skew(save);
|
||||||
|
|
||||||
for save.right.mut_iter().advance |right| {
|
foreach right in save.right.mut_iter() {
|
||||||
skew(right);
|
skew(right);
|
||||||
for right.right.mut_iter().advance |x| { skew(x) }
|
foreach x in right.right.mut_iter() { skew(x) }
|
||||||
}
|
}
|
||||||
|
|
||||||
split(save);
|
split(save);
|
||||||
for save.right.mut_iter().advance |x| { split(x) }
|
foreach x in save.right.mut_iter() { split(x) }
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret;
|
return ret;
|
||||||
|
@ -763,7 +763,7 @@ impl<K: TotalOrd, V, T: Iterator<(K, V)>> FromIterator<(K, V), T> for TreeMap<K,
|
||||||
impl<K: TotalOrd, V, T: Iterator<(K, V)>> Extendable<(K, V), T> for TreeMap<K, V> {
|
impl<K: TotalOrd, V, T: Iterator<(K, V)>> Extendable<(K, V), T> for TreeMap<K, V> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn extend(&mut self, iter: &mut T) {
|
fn extend(&mut self, iter: &mut T) {
|
||||||
for iter.advance |(k, v)| {
|
foreach (k, v) in *iter {
|
||||||
self.insert(k, v);
|
self.insert(k, v);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -780,7 +780,7 @@ impl<T: TotalOrd, Iter: Iterator<T>> FromIterator<T, Iter> for TreeSet<T> {
|
||||||
impl<T: TotalOrd, Iter: Iterator<T>> Extendable<T, Iter> for TreeSet<T> {
|
impl<T: TotalOrd, Iter: Iterator<T>> Extendable<T, Iter> for TreeSet<T> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn extend(&mut self, iter: &mut Iter) {
|
fn extend(&mut self, iter: &mut Iter) {
|
||||||
for iter.advance |elem| {
|
foreach elem in *iter {
|
||||||
self.insert(elem);
|
self.insert(elem);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -863,13 +863,13 @@ mod test_treemap {
|
||||||
fn check_equal<K: Eq + TotalOrd, V: Eq>(ctrl: &[(K, V)],
|
fn check_equal<K: Eq + TotalOrd, V: Eq>(ctrl: &[(K, V)],
|
||||||
map: &TreeMap<K, V>) {
|
map: &TreeMap<K, V>) {
|
||||||
assert_eq!(ctrl.is_empty(), map.is_empty());
|
assert_eq!(ctrl.is_empty(), map.is_empty());
|
||||||
for ctrl.iter().advance |x| {
|
foreach x in ctrl.iter() {
|
||||||
let &(ref k, ref v) = x;
|
let &(ref k, ref v) = x;
|
||||||
assert!(map.find(k).unwrap() == v)
|
assert!(map.find(k).unwrap() == v)
|
||||||
}
|
}
|
||||||
for map.iter().advance |(map_k, map_v)| {
|
foreach (map_k, map_v) in map.iter() {
|
||||||
let mut found = false;
|
let mut found = false;
|
||||||
for ctrl.iter().advance |x| {
|
foreach x in ctrl.iter() {
|
||||||
let &(ref ctrl_k, ref ctrl_v) = x;
|
let &(ref ctrl_k, ref ctrl_v) = x;
|
||||||
if *map_k == *ctrl_k {
|
if *map_k == *ctrl_k {
|
||||||
assert!(*map_v == *ctrl_v);
|
assert!(*map_v == *ctrl_v);
|
||||||
|
@ -983,7 +983,7 @@ mod test_treemap {
|
||||||
assert!(m.insert(1, 2));
|
assert!(m.insert(1, 2));
|
||||||
|
|
||||||
let mut n = 0;
|
let mut n = 0;
|
||||||
for m.iter().advance |(k, v)| {
|
foreach (k, v) in m.iter() {
|
||||||
assert_eq!(*k, n);
|
assert_eq!(*k, n);
|
||||||
assert_eq!(*v, n * 2);
|
assert_eq!(*v, n * 2);
|
||||||
n += 1;
|
n += 1;
|
||||||
|
@ -1090,7 +1090,7 @@ mod test_treemap {
|
||||||
(&x5, &y5)];
|
(&x5, &y5)];
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
|
|
||||||
for b.advance |x| {
|
foreach x in b {
|
||||||
assert_eq!(expected[i], x);
|
assert_eq!(expected[i], x);
|
||||||
i += 1;
|
i += 1;
|
||||||
|
|
||||||
|
@ -1099,7 +1099,7 @@ mod test_treemap {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for b.advance |x| {
|
foreach x in b {
|
||||||
assert_eq!(expected[i], x);
|
assert_eq!(expected[i], x);
|
||||||
i += 1;
|
i += 1;
|
||||||
}
|
}
|
||||||
|
@ -1111,7 +1111,7 @@ mod test_treemap {
|
||||||
|
|
||||||
let map: TreeMap<int, int> = xs.iter().transform(|&x| x).collect();
|
let map: TreeMap<int, int> = xs.iter().transform(|&x| x).collect();
|
||||||
|
|
||||||
for xs.iter().advance |&(k, v)| {
|
foreach &(k, v) in xs.iter() {
|
||||||
assert_eq!(map.find(&k), Some(&v));
|
assert_eq!(map.find(&k), Some(&v));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1259,7 +1259,7 @@ mod test_set {
|
||||||
assert!(m.insert(1));
|
assert!(m.insert(1));
|
||||||
|
|
||||||
let mut n = 0;
|
let mut n = 0;
|
||||||
for m.iter().advance |x| {
|
foreach x in m.iter() {
|
||||||
printfln!(x);
|
printfln!(x);
|
||||||
assert_eq!(*x, n);
|
assert_eq!(*x, n);
|
||||||
n += 1
|
n += 1
|
||||||
|
@ -1288,8 +1288,8 @@ mod test_set {
|
||||||
let mut set_a = TreeSet::new();
|
let mut set_a = TreeSet::new();
|
||||||
let mut set_b = TreeSet::new();
|
let mut set_b = TreeSet::new();
|
||||||
|
|
||||||
for a.iter().advance |x| { assert!(set_a.insert(*x)) }
|
foreach x in a.iter() { assert!(set_a.insert(*x)) }
|
||||||
for b.iter().advance |y| { assert!(set_b.insert(*y)) }
|
foreach y in b.iter() { assert!(set_b.insert(*y)) }
|
||||||
|
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
for f(&set_a, &set_b) |x| {
|
for f(&set_a, &set_b) |x| {
|
||||||
|
@ -1410,7 +1410,7 @@ mod test_set {
|
||||||
|
|
||||||
let set: TreeSet<int> = xs.iter().transform(|&x| x).collect();
|
let set: TreeSet<int> = xs.iter().transform(|&x| x).collect();
|
||||||
|
|
||||||
for xs.iter().advance |x: &int| {
|
foreach x in xs.iter() {
|
||||||
assert!(set.contains(x));
|
assert!(set.contains(x));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -206,10 +206,10 @@ pub fn encode_form_urlencoded(m: &HashMap<~str, ~[~str]>) -> ~str {
|
||||||
let mut out = ~"";
|
let mut out = ~"";
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
|
|
||||||
for m.iter().advance |(key, values)| {
|
foreach (key, values) in m.iter() {
|
||||||
let key = encode_plus(*key);
|
let key = encode_plus(*key);
|
||||||
|
|
||||||
for values.iter().advance |value| {
|
foreach value in values.iter() {
|
||||||
if first {
|
if first {
|
||||||
first = false;
|
first = false;
|
||||||
} else {
|
} else {
|
||||||
|
@ -331,7 +331,7 @@ fn userinfo_to_str(userinfo: &UserInfo) -> ~str {
|
||||||
fn query_from_str(rawquery: &str) -> Query {
|
fn query_from_str(rawquery: &str) -> Query {
|
||||||
let mut query: Query = ~[];
|
let mut query: Query = ~[];
|
||||||
if !rawquery.is_empty() {
|
if !rawquery.is_empty() {
|
||||||
for rawquery.split_iter('&').advance |p| {
|
foreach p in rawquery.split_iter('&') {
|
||||||
let (k, v) = split_char_first(p, '=');
|
let (k, v) = split_char_first(p, '=');
|
||||||
query.push((decode_component(k), decode_component(v)));
|
query.push((decode_component(k), decode_component(v)));
|
||||||
};
|
};
|
||||||
|
@ -341,7 +341,7 @@ fn query_from_str(rawquery: &str) -> Query {
|
||||||
|
|
||||||
pub fn query_to_str(query: &Query) -> ~str {
|
pub fn query_to_str(query: &Query) -> ~str {
|
||||||
let mut strvec = ~[];
|
let mut strvec = ~[];
|
||||||
for query.iter().advance |kv| {
|
foreach kv in query.iter() {
|
||||||
match kv {
|
match kv {
|
||||||
&(ref k, ref v) => {
|
&(ref k, ref v) => {
|
||||||
strvec.push(fmt!("%s=%s",
|
strvec.push(fmt!("%s=%s",
|
||||||
|
@ -356,7 +356,7 @@ pub fn query_to_str(query: &Query) -> ~str {
|
||||||
|
|
||||||
// returns the scheme and the rest of the url, or a parsing error
|
// returns the scheme and the rest of the url, or a parsing error
|
||||||
pub fn get_scheme(rawurl: &str) -> Result<(~str, ~str), ~str> {
|
pub fn get_scheme(rawurl: &str) -> Result<(~str, ~str), ~str> {
|
||||||
for rawurl.iter().enumerate().advance |(i,c)| {
|
foreach (i,c) in rawurl.iter().enumerate() {
|
||||||
match c {
|
match c {
|
||||||
'A' .. 'Z' | 'a' .. 'z' => loop,
|
'A' .. 'Z' | 'a' .. 'z' => loop,
|
||||||
'0' .. '9' | '+' | '-' | '.' => {
|
'0' .. '9' | '+' | '-' | '.' => {
|
||||||
|
@ -418,7 +418,7 @@ fn get_authority(rawurl: &str) ->
|
||||||
let mut begin = 2;
|
let mut begin = 2;
|
||||||
let mut end = len;
|
let mut end = len;
|
||||||
|
|
||||||
for rawurl.iter().enumerate().advance |(i,c)| {
|
foreach (i,c) in rawurl.iter().enumerate() {
|
||||||
if i < 2 { loop; } // ignore the leading //
|
if i < 2 { loop; } // ignore the leading //
|
||||||
|
|
||||||
// deal with input class first
|
// deal with input class first
|
||||||
|
@ -563,7 +563,7 @@ fn get_path(rawurl: &str, authority: bool) ->
|
||||||
Result<(~str, ~str), ~str> {
|
Result<(~str, ~str), ~str> {
|
||||||
let len = rawurl.len();
|
let len = rawurl.len();
|
||||||
let mut end = len;
|
let mut end = len;
|
||||||
for rawurl.iter().enumerate().advance |(i,c)| {
|
foreach (i,c) in rawurl.iter().enumerate() {
|
||||||
match c {
|
match c {
|
||||||
'A' .. 'Z' | 'a' .. 'z' | '0' .. '9' | '&' |'\'' | '(' | ')' | '.'
|
'A' .. 'Z' | 'a' .. 'z' | '0' .. '9' | '&' |'\'' | '(' | ')' | '.'
|
||||||
| '@' | ':' | '%' | '/' | '+' | '!' | '*' | ',' | ';' | '='
|
| '@' | ':' | '%' | '/' | '+' | '!' | '*' | ',' | ';' | '='
|
||||||
|
|
|
@ -287,7 +287,7 @@ impl<'self> Prep<'self> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn all_fresh(&self, cat: &str, map: &WorkMap) -> bool {
|
fn all_fresh(&self, cat: &str, map: &WorkMap) -> bool {
|
||||||
for map.iter().advance |(k, v)| {
|
foreach (k, v) in map.iter() {
|
||||||
if ! self.is_fresh(cat, k.kind, k.name, *v) {
|
if ! self.is_fresh(cat, k.kind, k.name, *v) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -215,7 +215,7 @@ fn usage() {
|
||||||
\n"
|
\n"
|
||||||
);
|
);
|
||||||
|
|
||||||
for COMMANDS.iter().advance |command| {
|
foreach command in COMMANDS.iter() {
|
||||||
let padding = " ".repeat(INDENT - command.cmd.len());
|
let padding = " ".repeat(INDENT - command.cmd.len());
|
||||||
printfln!(" %s%s%s", command.cmd, padding, command.usage_line);
|
printfln!(" %s%s%s", command.cmd, padding, command.usage_line);
|
||||||
}
|
}
|
||||||
|
@ -240,7 +240,7 @@ pub fn main() {
|
||||||
|
|
||||||
if !args.is_empty() {
|
if !args.is_empty() {
|
||||||
let r = find_cmd(*args.head());
|
let r = find_cmd(*args.head());
|
||||||
for r.iter().advance |command| {
|
foreach command in r.iter() {
|
||||||
let result = do_command(command, args.tail());
|
let result = do_command(command, args.tail());
|
||||||
match result {
|
match result {
|
||||||
Valid(exit_code) => unsafe { exit(exit_code.to_i32()) },
|
Valid(exit_code) => unsafe { exit(exit_code.to_i32()) },
|
||||||
|
|
|
@ -141,7 +141,7 @@ pub mod jit {
|
||||||
|
|
||||||
let cstore = sess.cstore;
|
let cstore = sess.cstore;
|
||||||
let r = cstore::get_used_crate_files(cstore);
|
let r = cstore::get_used_crate_files(cstore);
|
||||||
for r.iter().advance |cratepath| {
|
foreach cratepath in r.iter() {
|
||||||
let path = cratepath.to_str();
|
let path = cratepath.to_str();
|
||||||
|
|
||||||
debug!("linking: %s", path);
|
debug!("linking: %s", path);
|
||||||
|
@ -507,7 +507,7 @@ pub fn build_link_meta(sess: Session,
|
||||||
let mut cmh_items = ~[];
|
let mut cmh_items = ~[];
|
||||||
let linkage_metas = attr::find_linkage_metas(c.attrs);
|
let linkage_metas = attr::find_linkage_metas(c.attrs);
|
||||||
attr::require_unique_names(sess.diagnostic(), linkage_metas);
|
attr::require_unique_names(sess.diagnostic(), linkage_metas);
|
||||||
for linkage_metas.iter().advance |meta| {
|
foreach meta in linkage_metas.iter() {
|
||||||
match meta.name_str_pair() {
|
match meta.name_str_pair() {
|
||||||
Some((n, value)) if "name" == n => name = Some(value),
|
Some((n, value)) if "name" == n => name = Some(value),
|
||||||
Some((n, value)) if "vers" == n => vers = Some(value),
|
Some((n, value)) if "vers" == n => vers = Some(value),
|
||||||
|
@ -547,7 +547,7 @@ pub fn build_link_meta(sess: Session,
|
||||||
}
|
}
|
||||||
ast::MetaList(name, ref mis) => {
|
ast::MetaList(name, ref mis) => {
|
||||||
write_string(symbol_hasher, len_and_str(name));
|
write_string(symbol_hasher, len_and_str(name));
|
||||||
for mis.iter().advance |m_| {
|
foreach m_ in mis.iter() {
|
||||||
hash(symbol_hasher, m_);
|
hash(symbol_hasher, m_);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -555,11 +555,11 @@ pub fn build_link_meta(sess: Session,
|
||||||
}
|
}
|
||||||
|
|
||||||
symbol_hasher.reset();
|
symbol_hasher.reset();
|
||||||
for cmh_items.iter().advance |m| {
|
foreach m in cmh_items.iter() {
|
||||||
hash(symbol_hasher, m);
|
hash(symbol_hasher, m);
|
||||||
}
|
}
|
||||||
|
|
||||||
for dep_hashes.iter().advance |dh| {
|
foreach dh in dep_hashes.iter() {
|
||||||
write_string(symbol_hasher, len_and_str(*dh));
|
write_string(symbol_hasher, len_and_str(*dh));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -665,7 +665,7 @@ pub fn get_symbol_hash(ccx: &mut CrateContext, t: ty::t) -> @str {
|
||||||
// gas accepts the following characters in symbols: a-z, A-Z, 0-9, ., _, $
|
// gas accepts the following characters in symbols: a-z, A-Z, 0-9, ., _, $
|
||||||
pub fn sanitize(s: &str) -> ~str {
|
pub fn sanitize(s: &str) -> ~str {
|
||||||
let mut result = ~"";
|
let mut result = ~"";
|
||||||
for s.iter().advance |c| {
|
foreach c in s.iter() {
|
||||||
match c {
|
match c {
|
||||||
// Escape these with $ sequences
|
// Escape these with $ sequences
|
||||||
'@' => result.push_str("$SP$"),
|
'@' => result.push_str("$SP$"),
|
||||||
|
@ -712,7 +712,7 @@ pub fn mangle(sess: Session, ss: path) -> ~str {
|
||||||
|
|
||||||
let mut n = ~"_ZN"; // Begin name-sequence.
|
let mut n = ~"_ZN"; // Begin name-sequence.
|
||||||
|
|
||||||
for ss.iter().advance |s| {
|
foreach s in ss.iter() {
|
||||||
match *s {
|
match *s {
|
||||||
path_name(s) | path_mod(s) => {
|
path_name(s) | path_mod(s) => {
|
||||||
let sani = sanitize(sess.str_of(s));
|
let sani = sanitize(sess.str_of(s));
|
||||||
|
@ -905,7 +905,7 @@ pub fn link_args(sess: Session,
|
||||||
|
|
||||||
let cstore = sess.cstore;
|
let cstore = sess.cstore;
|
||||||
let r = cstore::get_used_crate_files(cstore);
|
let r = cstore::get_used_crate_files(cstore);
|
||||||
for r.iter().advance |cratepath| {
|
foreach cratepath in r.iter() {
|
||||||
if cratepath.filetype() == Some(~".rlib") {
|
if cratepath.filetype() == Some(~".rlib") {
|
||||||
args.push(cratepath.to_str());
|
args.push(cratepath.to_str());
|
||||||
loop;
|
loop;
|
||||||
|
@ -917,12 +917,12 @@ pub fn link_args(sess: Session,
|
||||||
}
|
}
|
||||||
|
|
||||||
let ula = cstore::get_used_link_args(cstore);
|
let ula = cstore::get_used_link_args(cstore);
|
||||||
for ula.iter().advance |arg| { args.push(arg.to_owned()); }
|
foreach arg in ula.iter() { args.push(arg.to_owned()); }
|
||||||
|
|
||||||
// Add all the link args for external crates.
|
// Add all the link args for external crates.
|
||||||
do cstore::iter_crate_data(cstore) |crate_num, _| {
|
do cstore::iter_crate_data(cstore) |crate_num, _| {
|
||||||
let link_args = csearch::get_link_args_for_crate(cstore, crate_num);
|
let link_args = csearch::get_link_args_for_crate(cstore, crate_num);
|
||||||
for link_args.consume_iter().advance |link_arg| {
|
foreach link_arg in link_args.consume_iter() {
|
||||||
args.push(link_arg);
|
args.push(link_arg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -935,13 +935,13 @@ pub fn link_args(sess: Session,
|
||||||
// to be found at compile time so it is still entirely up to outside
|
// to be found at compile time so it is still entirely up to outside
|
||||||
// forces to make sure that library can be found at runtime.
|
// forces to make sure that library can be found at runtime.
|
||||||
|
|
||||||
for sess.opts.addl_lib_search_paths.iter().advance |path| {
|
foreach path in sess.opts.addl_lib_search_paths.iter() {
|
||||||
args.push(~"-L" + path.to_str());
|
args.push(~"-L" + path.to_str());
|
||||||
}
|
}
|
||||||
|
|
||||||
// The names of the extern libraries
|
// The names of the extern libraries
|
||||||
let used_libs = cstore::get_used_libraries(cstore);
|
let used_libs = cstore::get_used_libraries(cstore);
|
||||||
for used_libs.iter().advance |l| { args.push(~"-l" + *l); }
|
foreach l in used_libs.iter() { args.push(~"-l" + *l); }
|
||||||
|
|
||||||
if *sess.building_library {
|
if *sess.building_library {
|
||||||
args.push(lib_cmd);
|
args.push(lib_cmd);
|
||||||
|
|
|
@ -164,7 +164,7 @@ pub fn create_standard_passes(level: OptLevel) -> ~[~str] {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn populate_pass_manager(sess: Session, pm: &mut PassManager, pass_list:&[~str]) {
|
pub fn populate_pass_manager(sess: Session, pm: &mut PassManager, pass_list:&[~str]) {
|
||||||
for pass_list.iter().advance |nm| {
|
foreach nm in pass_list.iter() {
|
||||||
match create_pass(*nm) {
|
match create_pass(*nm) {
|
||||||
Some(p) => pm.add_pass(p),
|
Some(p) => pm.add_pass(p),
|
||||||
None => sess.warn(fmt!("Unknown pass %s", *nm))
|
None => sess.warn(fmt!("Unknown pass %s", *nm))
|
||||||
|
@ -189,15 +189,15 @@ pub fn list_passes() {
|
||||||
io::println("\nAvailable Passes:");
|
io::println("\nAvailable Passes:");
|
||||||
|
|
||||||
io::println("\nAnalysis Passes:");
|
io::println("\nAnalysis Passes:");
|
||||||
for analysis_passes.iter().advance |&(name, desc)| {
|
foreach &(name, desc) in analysis_passes.iter() {
|
||||||
printfln!(" %-30s -- %s", name, desc);
|
printfln!(" %-30s -- %s", name, desc);
|
||||||
}
|
}
|
||||||
io::println("\nTransformation Passes:");
|
io::println("\nTransformation Passes:");
|
||||||
for transform_passes.iter().advance |&(name, desc)| {
|
foreach &(name, desc) in transform_passes.iter() {
|
||||||
printfln!(" %-30s -- %s", name, desc);
|
printfln!(" %-30s -- %s", name, desc);
|
||||||
}
|
}
|
||||||
io::println("\nUtility Passes:");
|
io::println("\nUtility Passes:");
|
||||||
for utility_passes.iter().advance |&(name, desc)| {
|
foreach &(name, desc) in utility_passes.iter() {
|
||||||
printfln!(" %-30s -- %s", name, desc);
|
printfln!(" %-30s -- %s", name, desc);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -315,7 +315,7 @@ static utility_passes : &'static [(&'static str, &'static str)] = &'static [
|
||||||
fn passes_exist() {
|
fn passes_exist() {
|
||||||
let mut failed = ~[];
|
let mut failed = ~[];
|
||||||
unsafe { llvm::LLVMInitializePasses(); }
|
unsafe { llvm::LLVMInitializePasses(); }
|
||||||
for analysis_passes.iter().advance |&(name,_)| {
|
foreach &(name,_) in analysis_passes.iter() {
|
||||||
let pass = create_pass(name);
|
let pass = create_pass(name);
|
||||||
if !pass.is_some() {
|
if !pass.is_some() {
|
||||||
failed.push(name);
|
failed.push(name);
|
||||||
|
@ -323,7 +323,7 @@ fn passes_exist() {
|
||||||
unsafe { llvm::LLVMDestroyPass(pass.get()) }
|
unsafe { llvm::LLVMDestroyPass(pass.get()) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for transform_passes.iter().advance |&(name,_)| {
|
foreach &(name,_) in transform_passes.iter() {
|
||||||
let pass = create_pass(name);
|
let pass = create_pass(name);
|
||||||
if !pass.is_some() {
|
if !pass.is_some() {
|
||||||
failed.push(name);
|
failed.push(name);
|
||||||
|
@ -331,7 +331,7 @@ fn passes_exist() {
|
||||||
unsafe { llvm::LLVMDestroyPass(pass.get()) }
|
unsafe { llvm::LLVMDestroyPass(pass.get()) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for utility_passes.iter().advance |&(name,_)| {
|
foreach &(name,_) in utility_passes.iter() {
|
||||||
let pass = create_pass(name);
|
let pass = create_pass(name);
|
||||||
if !pass.is_some() {
|
if !pass.is_some() {
|
||||||
failed.push(name);
|
failed.push(name);
|
||||||
|
@ -342,7 +342,7 @@ fn passes_exist() {
|
||||||
|
|
||||||
if failed.len() > 0 {
|
if failed.len() > 0 {
|
||||||
io::println("Some passes don't exist:");
|
io::println("Some passes don't exist:");
|
||||||
for failed.iter().advance |&n| {
|
foreach &n in failed.iter() {
|
||||||
printfln!(" %s", n);
|
printfln!(" %s", n);
|
||||||
}
|
}
|
||||||
fail!();
|
fail!();
|
||||||
|
|
|
@ -64,7 +64,7 @@ fn get_rpaths(os: session::os,
|
||||||
debug!("sysroot: %s", sysroot.to_str());
|
debug!("sysroot: %s", sysroot.to_str());
|
||||||
debug!("output: %s", output.to_str());
|
debug!("output: %s", output.to_str());
|
||||||
debug!("libs:");
|
debug!("libs:");
|
||||||
for libs.iter().advance |libpath| {
|
foreach libpath in libs.iter() {
|
||||||
debug!(" %s", libpath.to_str());
|
debug!(" %s", libpath.to_str());
|
||||||
}
|
}
|
||||||
debug!("target_triple: %s", target_triple);
|
debug!("target_triple: %s", target_triple);
|
||||||
|
@ -83,7 +83,7 @@ fn get_rpaths(os: session::os,
|
||||||
|
|
||||||
fn log_rpaths(desc: &str, rpaths: &[Path]) {
|
fn log_rpaths(desc: &str, rpaths: &[Path]) {
|
||||||
debug!("%s rpaths:", desc);
|
debug!("%s rpaths:", desc);
|
||||||
for rpaths.iter().advance |rpath| {
|
foreach rpath in rpaths.iter() {
|
||||||
debug!(" %s", rpath.to_str());
|
debug!(" %s", rpath.to_str());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -183,7 +183,7 @@ pub fn get_install_prefix_rpath(target_triple: &str) -> Path {
|
||||||
pub fn minimize_rpaths(rpaths: &[Path]) -> ~[Path] {
|
pub fn minimize_rpaths(rpaths: &[Path]) -> ~[Path] {
|
||||||
let mut set = HashSet::new();
|
let mut set = HashSet::new();
|
||||||
let mut minimized = ~[];
|
let mut minimized = ~[];
|
||||||
for rpaths.iter().advance |rpath| {
|
foreach rpath in rpaths.iter() {
|
||||||
if set.insert(rpath.to_str()) {
|
if set.insert(rpath.to_str()) {
|
||||||
minimized.push(rpath.clone());
|
minimized.push(rpath.clone());
|
||||||
}
|
}
|
||||||
|
|
|
@ -508,7 +508,7 @@ pub fn pretty_print_input(sess: Session, cfg: ast::CrateConfig, input: &input,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_os(triple: &str) -> Option<session::os> {
|
pub fn get_os(triple: &str) -> Option<session::os> {
|
||||||
for os_names.iter().advance |&(name, os)| {
|
foreach &(name, os) in os_names.iter() {
|
||||||
if triple.contains(name) { return Some(os) }
|
if triple.contains(name) { return Some(os) }
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
|
@ -522,7 +522,7 @@ static os_names : &'static [(&'static str, session::os)] = &'static [
|
||||||
("freebsd", session::os_freebsd)];
|
("freebsd", session::os_freebsd)];
|
||||||
|
|
||||||
pub fn get_arch(triple: &str) -> Option<abi::Architecture> {
|
pub fn get_arch(triple: &str) -> Option<abi::Architecture> {
|
||||||
for architecture_abis.iter().advance |&(arch, abi)| {
|
foreach &(arch, abi) in architecture_abis.iter() {
|
||||||
if triple.contains(arch) { return Some(abi) }
|
if triple.contains(arch) { return Some(abi) }
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
|
@ -611,7 +611,7 @@ pub fn build_session_options(binary: @str,
|
||||||
lint::deny, lint::forbid];
|
lint::deny, lint::forbid];
|
||||||
let mut lint_opts = ~[];
|
let mut lint_opts = ~[];
|
||||||
let lint_dict = lint::get_lint_dict();
|
let lint_dict = lint::get_lint_dict();
|
||||||
for lint_levels.iter().advance |level| {
|
foreach level in lint_levels.iter() {
|
||||||
let level_name = lint::level_to_str(*level);
|
let level_name = lint::level_to_str(*level);
|
||||||
|
|
||||||
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
|
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
|
||||||
|
@ -620,7 +620,7 @@ pub fn build_session_options(binary: @str,
|
||||||
let level_short = level_short.to_ascii().to_upper().to_str_ascii();
|
let level_short = level_short.to_ascii().to_upper().to_str_ascii();
|
||||||
let flags = vec::append(getopts::opt_strs(matches, level_short),
|
let flags = vec::append(getopts::opt_strs(matches, level_short),
|
||||||
getopts::opt_strs(matches, level_name));
|
getopts::opt_strs(matches, level_name));
|
||||||
for flags.iter().advance |lint_name| {
|
foreach lint_name in flags.iter() {
|
||||||
let lint_name = lint_name.replace("-", "_");
|
let lint_name = lint_name.replace("-", "_");
|
||||||
match lint_dict.find_equiv(&lint_name) {
|
match lint_dict.find_equiv(&lint_name) {
|
||||||
None => {
|
None => {
|
||||||
|
@ -637,9 +637,9 @@ pub fn build_session_options(binary: @str,
|
||||||
let mut debugging_opts = 0u;
|
let mut debugging_opts = 0u;
|
||||||
let debug_flags = getopts::opt_strs(matches, "Z");
|
let debug_flags = getopts::opt_strs(matches, "Z");
|
||||||
let debug_map = session::debugging_opts_map();
|
let debug_map = session::debugging_opts_map();
|
||||||
for debug_flags.iter().advance |debug_flag| {
|
foreach debug_flag in debug_flags.iter() {
|
||||||
let mut this_bit = 0u;
|
let mut this_bit = 0u;
|
||||||
for debug_map.iter().advance |tuple| {
|
foreach tuple in debug_map.iter() {
|
||||||
let (name, bit) = match *tuple { (ref a, _, b) => (a, b) };
|
let (name, bit) = match *tuple { (ref a, _, b) => (a, b) };
|
||||||
if name == debug_flag { this_bit = bit; break; }
|
if name == debug_flag { this_bit = bit; break; }
|
||||||
}
|
}
|
||||||
|
|
|
@ -380,7 +380,7 @@ fn is_extra(cx: &TestCtxt) -> bool {
|
||||||
fn mk_test_descs(cx: &TestCtxt) -> @ast::expr {
|
fn mk_test_descs(cx: &TestCtxt) -> @ast::expr {
|
||||||
debug!("building test vector from %u tests", cx.testfns.len());
|
debug!("building test vector from %u tests", cx.testfns.len());
|
||||||
let mut descs = ~[];
|
let mut descs = ~[];
|
||||||
for cx.testfns.iter().advance |test| {
|
foreach test in cx.testfns.iter() {
|
||||||
descs.push(mk_test_desc_and_fn_rec(cx, test));
|
descs.push(mk_test_desc_and_fn_rec(cx, test));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -65,7 +65,7 @@ struct cache_entry {
|
||||||
|
|
||||||
fn dump_crates(crate_cache: &[cache_entry]) {
|
fn dump_crates(crate_cache: &[cache_entry]) {
|
||||||
debug!("resolved crates:");
|
debug!("resolved crates:");
|
||||||
for crate_cache.iter().advance |entry| {
|
foreach entry in crate_cache.iter() {
|
||||||
debug!("cnum: %?", entry.cnum);
|
debug!("cnum: %?", entry.cnum);
|
||||||
debug!("span: %?", entry.span);
|
debug!("span: %?", entry.span);
|
||||||
debug!("hash: %?", entry.hash);
|
debug!("hash: %?", entry.hash);
|
||||||
|
@ -97,7 +97,7 @@ fn warn_if_multiple_versions(e: @mut Env,
|
||||||
if matches.len() != 1u {
|
if matches.len() != 1u {
|
||||||
diag.handler().warn(
|
diag.handler().warn(
|
||||||
fmt!("using multiple versions of crate `%s`", name));
|
fmt!("using multiple versions of crate `%s`", name));
|
||||||
for matches.iter().advance |match_| {
|
foreach match_ in matches.iter() {
|
||||||
diag.span_note(match_.span, "used here");
|
diag.span_note(match_.span, "used here");
|
||||||
let attrs = ~[
|
let attrs = ~[
|
||||||
attr::mk_attr(attr::mk_list_item(@"link",
|
attr::mk_attr(attr::mk_list_item(@"link",
|
||||||
|
@ -125,7 +125,7 @@ struct Env {
|
||||||
fn visit_crate(e: &Env, c: &ast::Crate) {
|
fn visit_crate(e: &Env, c: &ast::Crate) {
|
||||||
let cstore = e.cstore;
|
let cstore = e.cstore;
|
||||||
|
|
||||||
for c.attrs.iter().filter(|m| "link_args" == m.name()).advance |a| {
|
foreach a in c.attrs.iter().filter(|m| "link_args" == m.name()) {
|
||||||
match a.value_str() {
|
match a.value_str() {
|
||||||
Some(ref linkarg) => {
|
Some(ref linkarg) => {
|
||||||
cstore::add_used_link_args(cstore, *linkarg);
|
cstore::add_used_link_args(cstore, *linkarg);
|
||||||
|
@ -194,7 +194,7 @@ fn visit_item(e: &Env, i: @ast::item) {
|
||||||
ast::anonymous => { /* do nothing */ }
|
ast::anonymous => { /* do nothing */ }
|
||||||
}
|
}
|
||||||
|
|
||||||
for link_args.iter().advance |m| {
|
foreach m in link_args.iter() {
|
||||||
match m.value_str() {
|
match m.value_str() {
|
||||||
Some(linkarg) => {
|
Some(linkarg) => {
|
||||||
cstore::add_used_link_args(cstore, linkarg);
|
cstore::add_used_link_args(cstore, linkarg);
|
||||||
|
@ -223,7 +223,7 @@ fn metas_with_ident(ident: @str, metas: ~[@ast::MetaItem])
|
||||||
|
|
||||||
fn existing_match(e: &Env, metas: &[@ast::MetaItem], hash: &str)
|
fn existing_match(e: &Env, metas: &[@ast::MetaItem], hash: &str)
|
||||||
-> Option<int> {
|
-> Option<int> {
|
||||||
for e.crate_cache.iter().advance |c| {
|
foreach c in e.crate_cache.iter() {
|
||||||
if loader::metadata_matches(*c.metas, metas)
|
if loader::metadata_matches(*c.metas, metas)
|
||||||
&& (hash.is_empty() || c.hash.as_slice() == hash) {
|
&& (hash.is_empty() || c.hash.as_slice() == hash) {
|
||||||
return Some(c.cnum);
|
return Some(c.cnum);
|
||||||
|
@ -306,7 +306,7 @@ fn resolve_crate_deps(e: @mut Env, cdata: @~[u8]) -> cstore::cnum_map {
|
||||||
// numbers
|
// numbers
|
||||||
let mut cnum_map = HashMap::new();
|
let mut cnum_map = HashMap::new();
|
||||||
let r = decoder::get_crate_deps(cdata);
|
let r = decoder::get_crate_deps(cdata);
|
||||||
for r.iter().advance |dep| {
|
foreach dep in r.iter() {
|
||||||
let extrn_cnum = dep.cnum;
|
let extrn_cnum = dep.cnum;
|
||||||
let cname = dep.name;
|
let cname = dep.name;
|
||||||
let cname_str = token::ident_to_str(&dep.name);
|
let cname_str = token::ident_to_str(&dep.name);
|
||||||
|
|
|
@ -84,7 +84,7 @@ pub fn have_crate_data(cstore: &CStore, cnum: ast::CrateNum) -> bool {
|
||||||
|
|
||||||
pub fn iter_crate_data(cstore: &CStore,
|
pub fn iter_crate_data(cstore: &CStore,
|
||||||
i: &fn(ast::CrateNum, @crate_metadata)) {
|
i: &fn(ast::CrateNum, @crate_metadata)) {
|
||||||
for cstore.metas.iter().advance |(&k, &v)| {
|
foreach (&k, &v) in cstore.metas.iter() {
|
||||||
i(k, v);
|
i(k, v);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -114,7 +114,7 @@ pub fn get_used_libraries<'a>(cstore: &'a CStore) -> &'a [@str] {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_used_link_args(cstore: &mut CStore, args: &str) {
|
pub fn add_used_link_args(cstore: &mut CStore, args: &str) {
|
||||||
for args.split_iter(' ').advance |s| {
|
foreach s in args.split_iter(' ') {
|
||||||
cstore.used_link_args.push(s.to_managed());
|
cstore.used_link_args.push(s.to_managed());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -165,7 +165,7 @@ pub fn get_dep_hashes(cstore: &CStore) -> ~[@str] {
|
||||||
};
|
};
|
||||||
|
|
||||||
debug!("sorted:");
|
debug!("sorted:");
|
||||||
for sorted.iter().advance |x| {
|
foreach x in sorted.iter() {
|
||||||
debug!(" hash[%s]: %s", x.name, x.hash);
|
debug!(" hash[%s]: %s", x.name, x.hash);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -762,7 +762,7 @@ pub fn get_enum_variants(intr: @ident_interner, cdata: cmd, id: ast::NodeId,
|
||||||
let mut infos: ~[@ty::VariantInfo] = ~[];
|
let mut infos: ~[@ty::VariantInfo] = ~[];
|
||||||
let variant_ids = enum_variant_ids(item, cdata);
|
let variant_ids = enum_variant_ids(item, cdata);
|
||||||
let mut disr_val = 0;
|
let mut disr_val = 0;
|
||||||
for variant_ids.iter().advance |did| {
|
foreach did in variant_ids.iter() {
|
||||||
let item = find_item(did.node, items);
|
let item = find_item(did.node, items);
|
||||||
let ctor_ty = item_type(ast::def_id { crate: cdata.cnum, node: id},
|
let ctor_ty = item_type(ast::def_id { crate: cdata.cnum, node: id},
|
||||||
item, tcx, cdata);
|
item, tcx, cdata);
|
||||||
|
@ -966,7 +966,7 @@ pub fn get_static_methods_if_impl(intr: @ident_interner,
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut static_impl_methods = ~[];
|
let mut static_impl_methods = ~[];
|
||||||
for impl_method_ids.iter().advance |impl_method_id| {
|
foreach impl_method_id in impl_method_ids.iter() {
|
||||||
let impl_method_doc = lookup_item(impl_method_id.node, cdata.data);
|
let impl_method_doc = lookup_item(impl_method_id.node, cdata.data);
|
||||||
let family = item_family(impl_method_doc);
|
let family = item_family(impl_method_doc);
|
||||||
match family {
|
match family {
|
||||||
|
@ -1155,7 +1155,7 @@ fn list_meta_items(intr: @ident_interner,
|
||||||
meta_items: ebml::Doc,
|
meta_items: ebml::Doc,
|
||||||
out: @io::Writer) {
|
out: @io::Writer) {
|
||||||
let r = get_meta_items(meta_items);
|
let r = get_meta_items(meta_items);
|
||||||
for r.iter().advance |mi| {
|
foreach mi in r.iter() {
|
||||||
out.write_str(fmt!("%s\n", pprust::meta_item_to_str(*mi, intr)));
|
out.write_str(fmt!("%s\n", pprust::meta_item_to_str(*mi, intr)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1165,7 +1165,7 @@ fn list_crate_attributes(intr: @ident_interner, md: ebml::Doc, hash: &str,
|
||||||
out.write_str(fmt!("=Crate Attributes (%s)=\n", hash));
|
out.write_str(fmt!("=Crate Attributes (%s)=\n", hash));
|
||||||
|
|
||||||
let r = get_attributes(md);
|
let r = get_attributes(md);
|
||||||
for r.iter().advance |attr| {
|
foreach attr in r.iter() {
|
||||||
out.write_str(fmt!("%s\n", pprust::attribute_to_str(attr, intr)));
|
out.write_str(fmt!("%s\n", pprust::attribute_to_str(attr, intr)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1207,7 +1207,7 @@ fn list_crate_deps(data: @~[u8], out: @io::Writer) {
|
||||||
out.write_str("=External Dependencies=\n");
|
out.write_str("=External Dependencies=\n");
|
||||||
|
|
||||||
let r = get_crate_deps(data);
|
let r = get_crate_deps(data);
|
||||||
for r.iter().advance |dep| {
|
foreach dep in r.iter() {
|
||||||
out.write_str(
|
out.write_str(
|
||||||
fmt!("%d %s-%s-%s\n",
|
fmt!("%d %s-%s-%s\n",
|
||||||
dep.cnum, token::ident_to_str(&dep.name), dep.hash, dep.vers));
|
dep.cnum, token::ident_to_str(&dep.name), dep.hash, dep.vers));
|
||||||
|
|
|
@ -120,7 +120,7 @@ fn encode_region_param(ecx: &EncodeContext,
|
||||||
ebml_w: &mut writer::Encoder,
|
ebml_w: &mut writer::Encoder,
|
||||||
it: @ast::item) {
|
it: @ast::item) {
|
||||||
let opt_rp = ecx.tcx.region_paramd_items.find(&it.id);
|
let opt_rp = ecx.tcx.region_paramd_items.find(&it.id);
|
||||||
for opt_rp.iter().advance |rp| {
|
foreach rp in opt_rp.iter() {
|
||||||
ebml_w.start_tag(tag_region_param);
|
ebml_w.start_tag(tag_region_param);
|
||||||
rp.encode(ebml_w);
|
rp.encode(ebml_w);
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
|
@ -193,7 +193,7 @@ fn encode_ty_type_param_defs(ebml_w: &mut writer::Encoder,
|
||||||
tcx: ecx.tcx,
|
tcx: ecx.tcx,
|
||||||
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)
|
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)
|
||||||
};
|
};
|
||||||
for params.iter().advance |param| {
|
foreach param in params.iter() {
|
||||||
ebml_w.start_tag(tag);
|
ebml_w.start_tag(tag);
|
||||||
tyencode::enc_type_param_def(ebml_w.writer, ty_str_ctxt, param);
|
tyencode::enc_type_param_def(ebml_w.writer, ty_str_ctxt, param);
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
|
@ -250,7 +250,7 @@ fn encode_type(ecx: &EncodeContext,
|
||||||
fn encode_transformed_self_ty(ecx: &EncodeContext,
|
fn encode_transformed_self_ty(ecx: &EncodeContext,
|
||||||
ebml_w: &mut writer::Encoder,
|
ebml_w: &mut writer::Encoder,
|
||||||
opt_typ: Option<ty::t>) {
|
opt_typ: Option<ty::t>) {
|
||||||
for opt_typ.iter().advance |&typ| {
|
foreach &typ in opt_typ.iter() {
|
||||||
ebml_w.start_tag(tag_item_method_transformed_self_ty);
|
ebml_w.start_tag(tag_item_method_transformed_self_ty);
|
||||||
write_type(ecx, ebml_w, typ);
|
write_type(ecx, ebml_w, typ);
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
|
@ -327,7 +327,7 @@ fn encode_enum_variant_info(ecx: &EncodeContext,
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
let vi = ty::enum_variants(ecx.tcx,
|
let vi = ty::enum_variants(ecx.tcx,
|
||||||
ast::def_id { crate: LOCAL_CRATE, node: id });
|
ast::def_id { crate: LOCAL_CRATE, node: id });
|
||||||
for variants.iter().advance |variant| {
|
foreach variant in variants.iter() {
|
||||||
let def_id = local_def(variant.node.id);
|
let def_id = local_def(variant.node.id);
|
||||||
index.push(entry {val: variant.node.id, pos: ebml_w.writer.tell()});
|
index.push(entry {val: variant.node.id, pos: ebml_w.writer.tell()});
|
||||||
ebml_w.start_tag(tag_items_data_item);
|
ebml_w.start_tag(tag_items_data_item);
|
||||||
|
@ -375,7 +375,7 @@ fn encode_path(ecx: &EncodeContext,
|
||||||
|
|
||||||
ebml_w.start_tag(tag_path);
|
ebml_w.start_tag(tag_path);
|
||||||
ebml_w.wr_tagged_u32(tag_path_len, (path.len() + 1) as u32);
|
ebml_w.wr_tagged_u32(tag_path_len, (path.len() + 1) as u32);
|
||||||
for path.iter().advance |pe| {
|
foreach pe in path.iter() {
|
||||||
encode_path_elt(ecx, ebml_w, *pe);
|
encode_path_elt(ecx, ebml_w, *pe);
|
||||||
}
|
}
|
||||||
encode_path_elt(ecx, ebml_w, name);
|
encode_path_elt(ecx, ebml_w, name);
|
||||||
|
@ -405,8 +405,8 @@ fn encode_reexported_static_base_methods(ecx: &EncodeContext,
|
||||||
-> bool {
|
-> bool {
|
||||||
match ecx.tcx.inherent_impls.find(&exp.def_id) {
|
match ecx.tcx.inherent_impls.find(&exp.def_id) {
|
||||||
Some(implementations) => {
|
Some(implementations) => {
|
||||||
for implementations.iter().advance |&base_impl| {
|
foreach &base_impl in implementations.iter() {
|
||||||
for base_impl.methods.iter().advance |&m| {
|
foreach &m in base_impl.methods.iter() {
|
||||||
if m.explicit_self == ast::sty_static {
|
if m.explicit_self == ast::sty_static {
|
||||||
encode_reexported_static_method(ecx, ebml_w, exp,
|
encode_reexported_static_method(ecx, ebml_w, exp,
|
||||||
m.def_id, m.ident);
|
m.def_id, m.ident);
|
||||||
|
@ -426,7 +426,7 @@ fn encode_reexported_static_trait_methods(ecx: &EncodeContext,
|
||||||
-> bool {
|
-> bool {
|
||||||
match ecx.tcx.trait_methods_cache.find(&exp.def_id) {
|
match ecx.tcx.trait_methods_cache.find(&exp.def_id) {
|
||||||
Some(methods) => {
|
Some(methods) => {
|
||||||
for methods.iter().advance |&m| {
|
foreach &m in methods.iter() {
|
||||||
if m.explicit_self == ast::sty_static {
|
if m.explicit_self == ast::sty_static {
|
||||||
encode_reexported_static_method(ecx, ebml_w, exp,
|
encode_reexported_static_method(ecx, ebml_w, exp,
|
||||||
m.def_id, m.ident);
|
m.def_id, m.ident);
|
||||||
|
@ -486,7 +486,7 @@ fn each_auxiliary_node_id(item: @item, callback: &fn(NodeId) -> bool)
|
||||||
let mut continue = true;
|
let mut continue = true;
|
||||||
match item.node {
|
match item.node {
|
||||||
item_enum(ref enum_def, _) => {
|
item_enum(ref enum_def, _) => {
|
||||||
for enum_def.variants.iter().advance |variant| {
|
foreach variant in enum_def.variants.iter() {
|
||||||
continue = callback(variant.node.id);
|
continue = callback(variant.node.id);
|
||||||
if !continue {
|
if !continue {
|
||||||
break
|
break
|
||||||
|
@ -518,7 +518,7 @@ fn encode_reexports(ecx: &EncodeContext,
|
||||||
match ecx.reexports2.find(&id) {
|
match ecx.reexports2.find(&id) {
|
||||||
Some(ref exports) => {
|
Some(ref exports) => {
|
||||||
debug!("(encoding info for module) found reexports for %d", id);
|
debug!("(encoding info for module) found reexports for %d", id);
|
||||||
for exports.iter().advance |exp| {
|
foreach exp in exports.iter() {
|
||||||
debug!("(encoding info for module) reexport '%s' for %d",
|
debug!("(encoding info for module) reexport '%s' for %d",
|
||||||
exp.name, id);
|
exp.name, id);
|
||||||
ebml_w.start_tag(tag_items_data_item_reexport);
|
ebml_w.start_tag(tag_items_data_item_reexport);
|
||||||
|
@ -553,7 +553,7 @@ fn encode_info_for_mod(ecx: &EncodeContext,
|
||||||
debug!("(encoding info for module) encoding info for module ID %d", id);
|
debug!("(encoding info for module) encoding info for module ID %d", id);
|
||||||
|
|
||||||
// Encode info about all the module children.
|
// Encode info about all the module children.
|
||||||
for md.items.iter().advance |item| {
|
foreach item in md.items.iter() {
|
||||||
ebml_w.start_tag(tag_mod_child);
|
ebml_w.start_tag(tag_mod_child);
|
||||||
ebml_w.wr_str(def_to_str(local_def(item.id)));
|
ebml_w.wr_str(def_to_str(local_def(item.id)));
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
|
@ -663,7 +663,7 @@ fn encode_method_sort(ebml_w: &mut writer::Encoder, sort: char) {
|
||||||
|
|
||||||
fn encode_provided_source(ebml_w: &mut writer::Encoder,
|
fn encode_provided_source(ebml_w: &mut writer::Encoder,
|
||||||
source_opt: Option<def_id>) {
|
source_opt: Option<def_id>) {
|
||||||
for source_opt.iter().advance |source| {
|
foreach source in source_opt.iter() {
|
||||||
ebml_w.start_tag(tag_item_method_provided_source);
|
ebml_w.start_tag(tag_item_method_provided_source);
|
||||||
let s = def_to_str(*source);
|
let s = def_to_str(*source);
|
||||||
ebml_w.writer.write(s.as_bytes());
|
ebml_w.writer.write(s.as_bytes());
|
||||||
|
@ -684,7 +684,7 @@ fn encode_info_for_struct(ecx: &EncodeContext,
|
||||||
let tcx = ecx.tcx;
|
let tcx = ecx.tcx;
|
||||||
/* We encode both private and public fields -- need to include
|
/* We encode both private and public fields -- need to include
|
||||||
private fields to get the offsets right */
|
private fields to get the offsets right */
|
||||||
for fields.iter().advance |field| {
|
foreach field in fields.iter() {
|
||||||
let (nm, vis) = match field.node.kind {
|
let (nm, vis) = match field.node.kind {
|
||||||
named_field(nm, vis) => (nm, vis),
|
named_field(nm, vis) => (nm, vis),
|
||||||
unnamed_field => (special_idents::unnamed_field, inherited)
|
unnamed_field => (special_idents::unnamed_field, inherited)
|
||||||
|
@ -771,7 +771,7 @@ fn encode_info_for_method(ecx: &EncodeContext,
|
||||||
|
|
||||||
encode_path(ecx, ebml_w, impl_path, ast_map::path_name(m.ident));
|
encode_path(ecx, ebml_w, impl_path, ast_map::path_name(m.ident));
|
||||||
|
|
||||||
for ast_method_opt.iter().advance |ast_method| {
|
foreach ast_method in ast_method_opt.iter() {
|
||||||
let num_params = tpt.generics.type_param_defs.len();
|
let num_params = tpt.generics.type_param_defs.len();
|
||||||
if num_params > 0u || is_default_impl
|
if num_params > 0u || is_default_impl
|
||||||
|| should_inline(ast_method.attrs) {
|
|| should_inline(ast_method.attrs) {
|
||||||
|
@ -881,7 +881,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||||
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
|
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
|
||||||
|
|
||||||
// Encode all the items in this module.
|
// Encode all the items in this module.
|
||||||
for fm.items.iter().advance |foreign_item| {
|
foreach foreign_item in fm.items.iter() {
|
||||||
ebml_w.start_tag(tag_mod_child);
|
ebml_w.start_tag(tag_mod_child);
|
||||||
ebml_w.wr_str(def_to_str(local_def(foreign_item.id)));
|
ebml_w.wr_str(def_to_str(local_def(foreign_item.id)));
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
|
@ -908,7 +908,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||||
encode_family(ebml_w, 't');
|
encode_family(ebml_w, 't');
|
||||||
encode_bounds_and_type(ebml_w, ecx, &lookup_item_type(tcx, def_id));
|
encode_bounds_and_type(ebml_w, ecx, &lookup_item_type(tcx, def_id));
|
||||||
encode_name(ecx, ebml_w, item.ident);
|
encode_name(ecx, ebml_w, item.ident);
|
||||||
for (*enum_definition).variants.iter().advance |v| {
|
foreach v in (*enum_definition).variants.iter() {
|
||||||
encode_variant_id(ebml_w, local_def(v.node.id));
|
encode_variant_id(ebml_w, local_def(v.node.id));
|
||||||
}
|
}
|
||||||
(ecx.encode_inlined_item)(ecx, ebml_w, path, ii_item(item));
|
(ecx.encode_inlined_item)(ecx, ebml_w, path, ii_item(item));
|
||||||
|
@ -949,7 +949,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||||
/* Encode def_ids for each field and method
|
/* Encode def_ids for each field and method
|
||||||
for methods, write all the stuff get_trait_method
|
for methods, write all the stuff get_trait_method
|
||||||
needs to know*/
|
needs to know*/
|
||||||
for struct_def.fields.iter().advance |f| {
|
foreach f in struct_def.fields.iter() {
|
||||||
match f.node.kind {
|
match f.node.kind {
|
||||||
named_field(ident, vis) => {
|
named_field(ident, vis) => {
|
||||||
ebml_w.start_tag(tag_item_field);
|
ebml_w.start_tag(tag_item_field);
|
||||||
|
@ -1009,13 +1009,13 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
for imp.methods.iter().advance |method| {
|
foreach method in imp.methods.iter() {
|
||||||
ebml_w.start_tag(tag_item_impl_method);
|
ebml_w.start_tag(tag_item_impl_method);
|
||||||
let s = def_to_str(method.def_id);
|
let s = def_to_str(method.def_id);
|
||||||
ebml_w.writer.write(s.as_bytes());
|
ebml_w.writer.write(s.as_bytes());
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
}
|
}
|
||||||
for opt_trait.iter().advance |ast_trait_ref| {
|
foreach ast_trait_ref in opt_trait.iter() {
|
||||||
let trait_ref = ty::node_id_to_trait_ref(
|
let trait_ref = ty::node_id_to_trait_ref(
|
||||||
tcx, ast_trait_ref.ref_id);
|
tcx, ast_trait_ref.ref_id);
|
||||||
encode_trait_ref(ebml_w, ecx, trait_ref, tag_item_trait_ref);
|
encode_trait_ref(ebml_w, ecx, trait_ref, tag_item_trait_ref);
|
||||||
|
@ -1034,7 +1034,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||||
// appear first in the impl structure, in the same order they do
|
// appear first in the impl structure, in the same order they do
|
||||||
// in the ast. This is a little sketchy.
|
// in the ast. This is a little sketchy.
|
||||||
let num_implemented_methods = ast_methods.len();
|
let num_implemented_methods = ast_methods.len();
|
||||||
for imp.methods.iter().enumerate().advance |(i, m)| {
|
foreach (i, m) in imp.methods.iter().enumerate() {
|
||||||
let ast_method = if i < num_implemented_methods {
|
let ast_method = if i < num_implemented_methods {
|
||||||
Some(ast_methods[i])
|
Some(ast_methods[i])
|
||||||
} else { None };
|
} else { None };
|
||||||
|
@ -1062,7 +1062,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||||
encode_trait_ref(ebml_w, ecx, trait_def.trait_ref, tag_item_trait_ref);
|
encode_trait_ref(ebml_w, ecx, trait_def.trait_ref, tag_item_trait_ref);
|
||||||
encode_name(ecx, ebml_w, item.ident);
|
encode_name(ecx, ebml_w, item.ident);
|
||||||
encode_attributes(ebml_w, item.attrs);
|
encode_attributes(ebml_w, item.attrs);
|
||||||
for ty::trait_method_def_ids(tcx, def_id).iter().advance |&method_def_id| {
|
foreach &method_def_id in ty::trait_method_def_ids(tcx, def_id).iter() {
|
||||||
ebml_w.start_tag(tag_item_trait_method);
|
ebml_w.start_tag(tag_item_trait_method);
|
||||||
encode_def_id(ebml_w, method_def_id);
|
encode_def_id(ebml_w, method_def_id);
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
|
@ -1072,7 +1072,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
}
|
}
|
||||||
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
|
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
|
||||||
for super_traits.iter().advance |ast_trait_ref| {
|
foreach ast_trait_ref in super_traits.iter() {
|
||||||
let trait_ref = ty::node_id_to_trait_ref(ecx.tcx, ast_trait_ref.ref_id);
|
let trait_ref = ty::node_id_to_trait_ref(ecx.tcx, ast_trait_ref.ref_id);
|
||||||
encode_trait_ref(ebml_w, ecx, trait_ref, tag_item_super_trait_ref);
|
encode_trait_ref(ebml_w, ecx, trait_ref, tag_item_super_trait_ref);
|
||||||
}
|
}
|
||||||
|
@ -1080,7 +1080,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||||
|
|
||||||
// Now output the method info for each method.
|
// Now output the method info for each method.
|
||||||
let r = ty::trait_method_def_ids(tcx, def_id);
|
let r = ty::trait_method_def_ids(tcx, def_id);
|
||||||
for r.iter().enumerate().advance |(i, &method_def_id)| {
|
foreach (i, &method_def_id) in r.iter().enumerate() {
|
||||||
assert_eq!(method_def_id.crate, ast::LOCAL_CRATE);
|
assert_eq!(method_def_id.crate, ast::LOCAL_CRATE);
|
||||||
|
|
||||||
let method_ty = ty::method(tcx, method_def_id);
|
let method_ty = ty::method(tcx, method_def_id);
|
||||||
|
@ -1257,13 +1257,13 @@ fn create_index<T:Clone + Hash + IterBytes + 'static>(
|
||||||
-> ~[@~[entry<T>]] {
|
-> ~[@~[entry<T>]] {
|
||||||
let mut buckets: ~[@mut ~[entry<T>]] = ~[];
|
let mut buckets: ~[@mut ~[entry<T>]] = ~[];
|
||||||
for uint::range(0u, 256u) |_i| { buckets.push(@mut ~[]); };
|
for uint::range(0u, 256u) |_i| { buckets.push(@mut ~[]); };
|
||||||
for index.iter().advance |elt| {
|
foreach elt in index.iter() {
|
||||||
let h = elt.val.hash() as uint;
|
let h = elt.val.hash() as uint;
|
||||||
buckets[h % 256].push((*elt).clone());
|
buckets[h % 256].push((*elt).clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut buckets_frozen = ~[];
|
let mut buckets_frozen = ~[];
|
||||||
for buckets.iter().advance |bucket| {
|
foreach bucket in buckets.iter() {
|
||||||
buckets_frozen.push(@/*bad*/(**bucket).clone());
|
buckets_frozen.push(@/*bad*/(**bucket).clone());
|
||||||
}
|
}
|
||||||
return buckets_frozen;
|
return buckets_frozen;
|
||||||
|
@ -1277,10 +1277,10 @@ fn encode_index<T:'static>(
|
||||||
ebml_w.start_tag(tag_index);
|
ebml_w.start_tag(tag_index);
|
||||||
let mut bucket_locs: ~[uint] = ~[];
|
let mut bucket_locs: ~[uint] = ~[];
|
||||||
ebml_w.start_tag(tag_index_buckets);
|
ebml_w.start_tag(tag_index_buckets);
|
||||||
for buckets.iter().advance |bucket| {
|
foreach bucket in buckets.iter() {
|
||||||
bucket_locs.push(ebml_w.writer.tell());
|
bucket_locs.push(ebml_w.writer.tell());
|
||||||
ebml_w.start_tag(tag_index_buckets_bucket);
|
ebml_w.start_tag(tag_index_buckets_bucket);
|
||||||
for (**bucket).iter().advance |elt| {
|
foreach elt in (**bucket).iter() {
|
||||||
ebml_w.start_tag(tag_index_buckets_bucket_elt);
|
ebml_w.start_tag(tag_index_buckets_bucket_elt);
|
||||||
assert!(elt.pos < 0xffff_ffff);
|
assert!(elt.pos < 0xffff_ffff);
|
||||||
writer.write_be_u32(elt.pos as u32);
|
writer.write_be_u32(elt.pos as u32);
|
||||||
|
@ -1291,7 +1291,7 @@ fn encode_index<T:'static>(
|
||||||
}
|
}
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
ebml_w.start_tag(tag_index_table);
|
ebml_w.start_tag(tag_index_table);
|
||||||
for bucket_locs.iter().advance |pos| {
|
foreach pos in bucket_locs.iter() {
|
||||||
assert!(*pos < 0xffff_ffff);
|
assert!(*pos < 0xffff_ffff);
|
||||||
writer.write_be_u32(*pos as u32);
|
writer.write_be_u32(*pos as u32);
|
||||||
}
|
}
|
||||||
|
@ -1337,7 +1337,7 @@ fn encode_meta_item(ebml_w: &mut writer::Encoder, mi: @MetaItem) {
|
||||||
ebml_w.start_tag(tag_meta_item_name);
|
ebml_w.start_tag(tag_meta_item_name);
|
||||||
ebml_w.writer.write(name.as_bytes());
|
ebml_w.writer.write(name.as_bytes());
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
for items.iter().advance |inner_item| {
|
foreach inner_item in items.iter() {
|
||||||
encode_meta_item(ebml_w, *inner_item);
|
encode_meta_item(ebml_w, *inner_item);
|
||||||
}
|
}
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
|
@ -1347,7 +1347,7 @@ fn encode_meta_item(ebml_w: &mut writer::Encoder, mi: @MetaItem) {
|
||||||
|
|
||||||
fn encode_attributes(ebml_w: &mut writer::Encoder, attrs: &[Attribute]) {
|
fn encode_attributes(ebml_w: &mut writer::Encoder, attrs: &[Attribute]) {
|
||||||
ebml_w.start_tag(tag_attributes);
|
ebml_w.start_tag(tag_attributes);
|
||||||
for attrs.iter().advance |attr| {
|
foreach attr in attrs.iter() {
|
||||||
ebml_w.start_tag(tag_attribute);
|
ebml_w.start_tag(tag_attribute);
|
||||||
encode_meta_item(ebml_w, attr.node.value);
|
encode_meta_item(ebml_w, attr.node.value);
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
|
@ -1389,7 +1389,7 @@ fn synthesize_crate_attrs(ecx: &EncodeContext,
|
||||||
|
|
||||||
let mut attrs = ~[];
|
let mut attrs = ~[];
|
||||||
let mut found_link_attr = false;
|
let mut found_link_attr = false;
|
||||||
for crate.attrs.iter().advance |attr| {
|
foreach attr in crate.attrs.iter() {
|
||||||
attrs.push(
|
attrs.push(
|
||||||
if "link" != attr.name() {
|
if "link" != attr.name() {
|
||||||
*attr
|
*attr
|
||||||
|
@ -1431,7 +1431,7 @@ fn encode_crate_deps(ecx: &EncodeContext,
|
||||||
|
|
||||||
// Sanity-check the crate numbers
|
// Sanity-check the crate numbers
|
||||||
let mut expected_cnum = 1;
|
let mut expected_cnum = 1;
|
||||||
for deps.iter().advance |n| {
|
foreach n in deps.iter() {
|
||||||
assert_eq!(n.cnum, expected_cnum);
|
assert_eq!(n.cnum, expected_cnum);
|
||||||
expected_cnum += 1;
|
expected_cnum += 1;
|
||||||
}
|
}
|
||||||
|
@ -1445,7 +1445,7 @@ fn encode_crate_deps(ecx: &EncodeContext,
|
||||||
// but is enough to get transitive crate dependencies working.
|
// but is enough to get transitive crate dependencies working.
|
||||||
ebml_w.start_tag(tag_crate_deps);
|
ebml_w.start_tag(tag_crate_deps);
|
||||||
let r = get_ordered_deps(ecx, cstore);
|
let r = get_ordered_deps(ecx, cstore);
|
||||||
for r.iter().advance |dep| {
|
foreach dep in r.iter() {
|
||||||
encode_crate_dep(ecx, ebml_w, *dep);
|
encode_crate_dep(ecx, ebml_w, *dep);
|
||||||
}
|
}
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
|
@ -1482,7 +1482,7 @@ fn encode_link_args(ecx: &EncodeContext, ebml_w: &mut writer::Encoder) {
|
||||||
ebml_w.start_tag(tag_link_args);
|
ebml_w.start_tag(tag_link_args);
|
||||||
|
|
||||||
let link_args = cstore::get_used_link_args(ecx.cstore);
|
let link_args = cstore::get_used_link_args(ecx.cstore);
|
||||||
for link_args.iter().advance |link_arg| {
|
foreach link_arg in link_args.iter() {
|
||||||
ebml_w.start_tag(tag_link_args_arg);
|
ebml_w.start_tag(tag_link_args_arg);
|
||||||
ebml_w.writer.write_str(link_arg.to_str());
|
ebml_w.writer.write_str(link_arg.to_str());
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
|
@ -1496,7 +1496,7 @@ fn encode_misc_info(ecx: &EncodeContext,
|
||||||
ebml_w: &mut writer::Encoder) {
|
ebml_w: &mut writer::Encoder) {
|
||||||
ebml_w.start_tag(tag_misc_info);
|
ebml_w.start_tag(tag_misc_info);
|
||||||
ebml_w.start_tag(tag_misc_info_crate_items);
|
ebml_w.start_tag(tag_misc_info_crate_items);
|
||||||
for crate.module.items.iter().advance |&item| {
|
foreach &item in crate.module.items.iter() {
|
||||||
ebml_w.start_tag(tag_mod_child);
|
ebml_w.start_tag(tag_mod_child);
|
||||||
ebml_w.wr_str(def_to_str(local_def(item.id)));
|
ebml_w.wr_str(def_to_str(local_def(item.id)));
|
||||||
ebml_w.end_tag();
|
ebml_w.end_tag();
|
||||||
|
@ -1632,7 +1632,7 @@ pub fn encode_metadata(parms: EncodeParams, crate: &Crate) -> ~[u8] {
|
||||||
ecx.stats.total_bytes = *wr.pos;
|
ecx.stats.total_bytes = *wr.pos;
|
||||||
|
|
||||||
if (tcx.sess.meta_stats()) {
|
if (tcx.sess.meta_stats()) {
|
||||||
for wr.bytes.iter().advance |e| {
|
foreach e in wr.bytes.iter() {
|
||||||
if *e == 0 {
|
if *e == 0 {
|
||||||
ecx.stats.zero_bytes += 1;
|
ecx.stats.zero_bytes += 1;
|
||||||
}
|
}
|
||||||
|
|
|
@ -91,7 +91,7 @@ pub fn search<T>(filesearch: @FileSearch, pick: pick<T>) -> Option<T> {
|
||||||
for filesearch.for_each_lib_search_path() |lib_search_path| {
|
for filesearch.for_each_lib_search_path() |lib_search_path| {
|
||||||
debug!("searching %s", lib_search_path.to_str());
|
debug!("searching %s", lib_search_path.to_str());
|
||||||
let r = os::list_dir_path(lib_search_path);
|
let r = os::list_dir_path(lib_search_path);
|
||||||
for r.iter().advance |path| {
|
foreach path in r.iter() {
|
||||||
debug!("testing %s", path.to_str());
|
debug!("testing %s", path.to_str());
|
||||||
let maybe_picked = pick(path);
|
let maybe_picked = pick(path);
|
||||||
if maybe_picked.is_some() {
|
if maybe_picked.is_some() {
|
||||||
|
|
|
@ -128,7 +128,7 @@ fn find_library_crate_aux(
|
||||||
cx.diag.span_err(
|
cx.diag.span_err(
|
||||||
cx.span, fmt!("multiple matching crates for `%s`", crate_name));
|
cx.span, fmt!("multiple matching crates for `%s`", crate_name));
|
||||||
cx.diag.handler().note("candidates:");
|
cx.diag.handler().note("candidates:");
|
||||||
for matches.iter().advance |pair| {
|
foreach pair in matches.iter() {
|
||||||
let ident = pair.first();
|
let ident = pair.first();
|
||||||
let data = pair.second();
|
let data = pair.second();
|
||||||
cx.diag.handler().note(fmt!("path: %s", ident));
|
cx.diag.handler().note(fmt!("path: %s", ident));
|
||||||
|
@ -142,7 +142,7 @@ fn find_library_crate_aux(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn crate_name_from_metas(metas: &[@ast::MetaItem]) -> @str {
|
pub fn crate_name_from_metas(metas: &[@ast::MetaItem]) -> @str {
|
||||||
for metas.iter().advance |m| {
|
foreach m in metas.iter() {
|
||||||
match m.name_str_pair() {
|
match m.name_str_pair() {
|
||||||
Some((name, s)) if "name" == name => { return s; }
|
Some((name, s)) if "name" == name => { return s; }
|
||||||
_ => {}
|
_ => {}
|
||||||
|
@ -155,7 +155,7 @@ pub fn note_linkage_attrs(intr: @ident_interner,
|
||||||
diag: @span_handler,
|
diag: @span_handler,
|
||||||
attrs: ~[ast::Attribute]) {
|
attrs: ~[ast::Attribute]) {
|
||||||
let r = attr::find_linkage_metas(attrs);
|
let r = attr::find_linkage_metas(attrs);
|
||||||
for r.iter().advance |mi| {
|
foreach mi in r.iter() {
|
||||||
diag.handler().note(fmt!("meta: %s", pprust::meta_item_to_str(*mi,intr)));
|
diag.handler().note(fmt!("meta: %s", pprust::meta_item_to_str(*mi,intr)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -123,7 +123,7 @@ fn enc_substs(w: @io::Writer, cx: @ctxt, substs: &ty::substs) {
|
||||||
enc_region_substs(w, cx, &substs.regions);
|
enc_region_substs(w, cx, &substs.regions);
|
||||||
do enc_opt(w, substs.self_ty) |t| { enc_ty(w, cx, t) }
|
do enc_opt(w, substs.self_ty) |t| { enc_ty(w, cx, t) }
|
||||||
w.write_char('[');
|
w.write_char('[');
|
||||||
for substs.tps.iter().advance |t| { enc_ty(w, cx, *t); }
|
foreach t in substs.tps.iter() { enc_ty(w, cx, *t); }
|
||||||
w.write_char(']');
|
w.write_char(']');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -134,7 +134,7 @@ fn enc_region_substs(w: @io::Writer, cx: @ctxt, substs: &ty::RegionSubsts) {
|
||||||
}
|
}
|
||||||
ty::NonerasedRegions(ref regions) => {
|
ty::NonerasedRegions(ref regions) => {
|
||||||
w.write_char('n');
|
w.write_char('n');
|
||||||
for regions.iter().advance |&r| {
|
foreach &r in regions.iter() {
|
||||||
enc_region(w, cx, r);
|
enc_region(w, cx, r);
|
||||||
}
|
}
|
||||||
w.write_char('.');
|
w.write_char('.');
|
||||||
|
@ -288,7 +288,7 @@ fn enc_sty(w: @io::Writer, cx: @ctxt, st: &ty::sty) {
|
||||||
}
|
}
|
||||||
ty::ty_tup(ref ts) => {
|
ty::ty_tup(ref ts) => {
|
||||||
w.write_str(&"T[");
|
w.write_str(&"T[");
|
||||||
for ts.iter().advance |t| { enc_ty(w, cx, *t); }
|
foreach t in ts.iter() { enc_ty(w, cx, *t); }
|
||||||
w.write_char(']');
|
w.write_char(']');
|
||||||
}
|
}
|
||||||
ty::ty_box(mt) => { w.write_char('@'); enc_mt(w, cx, mt); }
|
ty::ty_box(mt) => { w.write_char('@'); enc_mt(w, cx, mt); }
|
||||||
|
@ -404,7 +404,7 @@ fn enc_closure_ty(w: @io::Writer, cx: @ctxt, ft: &ty::ClosureTy) {
|
||||||
|
|
||||||
fn enc_fn_sig(w: @io::Writer, cx: @ctxt, fsig: &ty::FnSig) {
|
fn enc_fn_sig(w: @io::Writer, cx: @ctxt, fsig: &ty::FnSig) {
|
||||||
w.write_char('[');
|
w.write_char('[');
|
||||||
for fsig.inputs.iter().advance |ty| {
|
foreach ty in fsig.inputs.iter() {
|
||||||
enc_ty(w, cx, *ty);
|
enc_ty(w, cx, *ty);
|
||||||
}
|
}
|
||||||
w.write_char(']');
|
w.write_char(']');
|
||||||
|
@ -421,7 +421,7 @@ fn enc_bounds(w: @io::Writer, cx: @ctxt, bs: &ty::ParamBounds) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for bs.trait_bounds.iter().advance |&tp| {
|
foreach &tp in bs.trait_bounds.iter() {
|
||||||
w.write_char('I');
|
w.write_char('I');
|
||||||
enc_trait_ref(w, cx, tp);
|
enc_trait_ref(w, cx, tp);
|
||||||
}
|
}
|
||||||
|
|
|
@ -855,7 +855,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
|
||||||
|
|
||||||
{
|
{
|
||||||
let r = tcx.def_map.find(&id);
|
let r = tcx.def_map.find(&id);
|
||||||
for r.iter().advance |def| {
|
foreach def in r.iter() {
|
||||||
do ebml_w.tag(c::tag_table_def) |ebml_w| {
|
do ebml_w.tag(c::tag_table_def) |ebml_w| {
|
||||||
ebml_w.id(id);
|
ebml_w.id(id);
|
||||||
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
||||||
|
@ -867,7 +867,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
|
||||||
|
|
||||||
{
|
{
|
||||||
let r = tcx.node_types.find(&(id as uint));
|
let r = tcx.node_types.find(&(id as uint));
|
||||||
for r.iter().advance |&ty| {
|
foreach &ty in r.iter() {
|
||||||
do ebml_w.tag(c::tag_table_node_type) |ebml_w| {
|
do ebml_w.tag(c::tag_table_node_type) |ebml_w| {
|
||||||
ebml_w.id(id);
|
ebml_w.id(id);
|
||||||
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
||||||
|
@ -879,7 +879,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
|
||||||
|
|
||||||
{
|
{
|
||||||
let r = tcx.node_type_substs.find(&id);
|
let r = tcx.node_type_substs.find(&id);
|
||||||
for r.iter().advance |tys| {
|
foreach tys in r.iter() {
|
||||||
do ebml_w.tag(c::tag_table_node_type_subst) |ebml_w| {
|
do ebml_w.tag(c::tag_table_node_type_subst) |ebml_w| {
|
||||||
ebml_w.id(id);
|
ebml_w.id(id);
|
||||||
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
||||||
|
@ -891,7 +891,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
|
||||||
|
|
||||||
{
|
{
|
||||||
let r = tcx.freevars.find(&id);
|
let r = tcx.freevars.find(&id);
|
||||||
for r.iter().advance |&fv| {
|
foreach &fv in r.iter() {
|
||||||
do ebml_w.tag(c::tag_table_freevars) |ebml_w| {
|
do ebml_w.tag(c::tag_table_freevars) |ebml_w| {
|
||||||
ebml_w.id(id);
|
ebml_w.id(id);
|
||||||
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
||||||
|
@ -906,7 +906,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
|
||||||
let lid = ast::def_id { crate: ast::LOCAL_CRATE, node: id };
|
let lid = ast::def_id { crate: ast::LOCAL_CRATE, node: id };
|
||||||
{
|
{
|
||||||
let r = tcx.tcache.find(&lid);
|
let r = tcx.tcache.find(&lid);
|
||||||
for r.iter().advance |&tpbt| {
|
foreach &tpbt in r.iter() {
|
||||||
do ebml_w.tag(c::tag_table_tcache) |ebml_w| {
|
do ebml_w.tag(c::tag_table_tcache) |ebml_w| {
|
||||||
ebml_w.id(id);
|
ebml_w.id(id);
|
||||||
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
||||||
|
@ -918,7 +918,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
|
||||||
|
|
||||||
{
|
{
|
||||||
let r = tcx.ty_param_defs.find(&id);
|
let r = tcx.ty_param_defs.find(&id);
|
||||||
for r.iter().advance |&type_param_def| {
|
foreach &type_param_def in r.iter() {
|
||||||
do ebml_w.tag(c::tag_table_param_defs) |ebml_w| {
|
do ebml_w.tag(c::tag_table_param_defs) |ebml_w| {
|
||||||
ebml_w.id(id);
|
ebml_w.id(id);
|
||||||
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
||||||
|
@ -930,7 +930,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
|
||||||
|
|
||||||
{
|
{
|
||||||
let r = maps.method_map.find(&id);
|
let r = maps.method_map.find(&id);
|
||||||
for r.iter().advance |&mme| {
|
foreach &mme in r.iter() {
|
||||||
do ebml_w.tag(c::tag_table_method_map) |ebml_w| {
|
do ebml_w.tag(c::tag_table_method_map) |ebml_w| {
|
||||||
ebml_w.id(id);
|
ebml_w.id(id);
|
||||||
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
||||||
|
@ -942,7 +942,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
|
||||||
|
|
||||||
{
|
{
|
||||||
let r = maps.vtable_map.find(&id);
|
let r = maps.vtable_map.find(&id);
|
||||||
for r.iter().advance |&dr| {
|
foreach &dr in r.iter() {
|
||||||
do ebml_w.tag(c::tag_table_vtable_map) |ebml_w| {
|
do ebml_w.tag(c::tag_table_vtable_map) |ebml_w| {
|
||||||
ebml_w.id(id);
|
ebml_w.id(id);
|
||||||
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
||||||
|
@ -954,7 +954,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
|
||||||
|
|
||||||
{
|
{
|
||||||
let r = tcx.adjustments.find(&id);
|
let r = tcx.adjustments.find(&id);
|
||||||
for r.iter().advance |adj| {
|
foreach adj in r.iter() {
|
||||||
do ebml_w.tag(c::tag_table_adjustments) |ebml_w| {
|
do ebml_w.tag(c::tag_table_adjustments) |ebml_w| {
|
||||||
ebml_w.id(id);
|
ebml_w.id(id);
|
||||||
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
||||||
|
@ -966,7 +966,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
|
||||||
|
|
||||||
{
|
{
|
||||||
let r = maps.capture_map.find(&id);
|
let r = maps.capture_map.find(&id);
|
||||||
for r.iter().advance |&cap_vars| {
|
foreach &cap_vars in r.iter() {
|
||||||
do ebml_w.tag(c::tag_table_capture_map) |ebml_w| {
|
do ebml_w.tag(c::tag_table_capture_map) |ebml_w| {
|
||||||
ebml_w.id(id);
|
ebml_w.id(id);
|
||||||
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
do ebml_w.tag(c::tag_table_val) |ebml_w| {
|
||||||
|
|
|
@ -118,7 +118,7 @@ impl<'self> CheckLoanCtxt<'self> {
|
||||||
//! given `loan_path`
|
//! given `loan_path`
|
||||||
|
|
||||||
for self.each_in_scope_loan(scope_id) |loan| {
|
for self.each_in_scope_loan(scope_id) |loan| {
|
||||||
for loan.restrictions.iter().advance |restr| {
|
foreach restr in loan.restrictions.iter() {
|
||||||
if restr.loan_path == loan_path {
|
if restr.loan_path == loan_path {
|
||||||
if !op(loan, restr) {
|
if !op(loan, restr) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -152,7 +152,7 @@ impl<'self> CheckLoanCtxt<'self> {
|
||||||
debug!("new_loan_indices = %?", new_loan_indices);
|
debug!("new_loan_indices = %?", new_loan_indices);
|
||||||
|
|
||||||
for self.each_issued_loan(scope_id) |issued_loan| {
|
for self.each_issued_loan(scope_id) |issued_loan| {
|
||||||
for new_loan_indices.iter().advance |&new_loan_index| {
|
foreach &new_loan_index in new_loan_indices.iter() {
|
||||||
let new_loan = &self.all_loans[new_loan_index];
|
let new_loan = &self.all_loans[new_loan_index];
|
||||||
self.report_error_if_loans_conflict(issued_loan, new_loan);
|
self.report_error_if_loans_conflict(issued_loan, new_loan);
|
||||||
}
|
}
|
||||||
|
@ -210,7 +210,7 @@ impl<'self> CheckLoanCtxt<'self> {
|
||||||
};
|
};
|
||||||
debug!("illegal_if=%?", illegal_if);
|
debug!("illegal_if=%?", illegal_if);
|
||||||
|
|
||||||
for loan1.restrictions.iter().advance |restr| {
|
foreach restr in loan1.restrictions.iter() {
|
||||||
if !restr.set.intersects(illegal_if) { loop; }
|
if !restr.set.intersects(illegal_if) { loop; }
|
||||||
if restr.loan_path != loan2.loan_path { loop; }
|
if restr.loan_path != loan2.loan_path { loop; }
|
||||||
|
|
||||||
|
@ -639,7 +639,7 @@ fn check_loans_in_fn<'a>(fk: &visit::fn_kind,
|
||||||
closure_id: ast::NodeId,
|
closure_id: ast::NodeId,
|
||||||
span: span) {
|
span: span) {
|
||||||
let cap_vars = this.bccx.capture_map.get(&closure_id);
|
let cap_vars = this.bccx.capture_map.get(&closure_id);
|
||||||
for cap_vars.iter().advance |cap_var| {
|
foreach cap_var in cap_vars.iter() {
|
||||||
let var_id = ast_util::def_id_of_def(cap_var.def).node;
|
let var_id = ast_util::def_id_of_def(cap_var.def).node;
|
||||||
let var_path = @LpVar(var_id);
|
let var_path = @LpVar(var_id);
|
||||||
this.check_if_path_is_moved(closure_id, span,
|
this.check_if_path_is_moved(closure_id, span,
|
||||||
|
@ -700,7 +700,7 @@ fn check_loans_in_expr<'a>(expr: @ast::expr,
|
||||||
let cmt = this.bccx.cat_expr_unadjusted(expr);
|
let cmt = this.bccx.cat_expr_unadjusted(expr);
|
||||||
debug!("path cmt=%s", cmt.repr(this.tcx()));
|
debug!("path cmt=%s", cmt.repr(this.tcx()));
|
||||||
let r = opt_loan_path(cmt);
|
let r = opt_loan_path(cmt);
|
||||||
for r.iter().advance |&lp| {
|
foreach &lp in r.iter() {
|
||||||
this.check_if_path_is_moved(expr.id, expr.span, MovedInUse, lp);
|
this.check_if_path_is_moved(expr.id, expr.span, MovedInUse, lp);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -70,7 +70,7 @@ pub fn gather_captures(bccx: @BorrowckCtxt,
|
||||||
move_data: &mut MoveData,
|
move_data: &mut MoveData,
|
||||||
closure_expr: @ast::expr) {
|
closure_expr: @ast::expr) {
|
||||||
let captured_vars = bccx.capture_map.get(&closure_expr.id);
|
let captured_vars = bccx.capture_map.get(&closure_expr.id);
|
||||||
for captured_vars.iter().advance |captured_var| {
|
foreach captured_var in captured_vars.iter() {
|
||||||
match captured_var.mode {
|
match captured_var.mode {
|
||||||
moves::CapMove => {
|
moves::CapMove => {
|
||||||
let fvar_id = ast_util::def_id_of_def(captured_var.def).node;
|
let fvar_id = ast_util::def_id_of_def(captured_var.def).node;
|
||||||
|
|
|
@ -187,7 +187,7 @@ fn gather_loans_in_expr(ex: @ast::expr,
|
||||||
|
|
||||||
{
|
{
|
||||||
let r = ex.get_callee_id();
|
let r = ex.get_callee_id();
|
||||||
for r.iter().advance |callee_id| {
|
foreach callee_id in r.iter() {
|
||||||
this.id_range.add(*callee_id);
|
this.id_range.add(*callee_id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -195,7 +195,7 @@ fn gather_loans_in_expr(ex: @ast::expr,
|
||||||
// If this expression is borrowed, have to ensure it remains valid:
|
// If this expression is borrowed, have to ensure it remains valid:
|
||||||
{
|
{
|
||||||
let r = tcx.adjustments.find(&ex.id);
|
let r = tcx.adjustments.find(&ex.id);
|
||||||
for r.iter().advance |&adjustments| {
|
foreach &adjustments in r.iter() {
|
||||||
this.guarantee_adjustments(ex, *adjustments);
|
this.guarantee_adjustments(ex, *adjustments);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -238,8 +238,8 @@ fn gather_loans_in_expr(ex: @ast::expr,
|
||||||
|
|
||||||
ast::expr_match(ex_v, ref arms) => {
|
ast::expr_match(ex_v, ref arms) => {
|
||||||
let cmt = this.bccx.cat_expr(ex_v);
|
let cmt = this.bccx.cat_expr(ex_v);
|
||||||
for arms.iter().advance |arm| {
|
foreach arm in arms.iter() {
|
||||||
for arm.pats.iter().advance |pat| {
|
foreach pat in arm.pats.iter() {
|
||||||
this.gather_pat(cmt, *pat, Some((arm.body.id, ex.id)));
|
this.gather_pat(cmt, *pat, Some((arm.body.id, ex.id)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -617,7 +617,7 @@ impl GatherLoanCtxt {
|
||||||
*/
|
*/
|
||||||
|
|
||||||
let mc_ctxt = self.bccx.mc_ctxt();
|
let mc_ctxt = self.bccx.mc_ctxt();
|
||||||
for decl.inputs.iter().advance |arg| {
|
foreach arg in decl.inputs.iter() {
|
||||||
let arg_ty = ty::node_id_to_type(self.tcx(), arg.pat.id);
|
let arg_ty = ty::node_id_to_type(self.tcx(), arg.pat.id);
|
||||||
|
|
||||||
let arg_cmt = mc_ctxt.cat_rvalue(
|
let arg_cmt = mc_ctxt.cat_rvalue(
|
||||||
|
|
|
@ -139,7 +139,7 @@ impl RestrictionsContext {
|
||||||
// static errors. For example, if there is code like
|
// static errors. For example, if there is code like
|
||||||
//
|
//
|
||||||
// let v = @mut ~[1, 2, 3];
|
// let v = @mut ~[1, 2, 3];
|
||||||
// for v.iter().advance |e| {
|
// foreach e in v.iter() {
|
||||||
// v.push(e + 1);
|
// v.push(e + 1);
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
|
@ -151,7 +151,7 @@ impl RestrictionsContext {
|
||||||
//
|
//
|
||||||
// let v = @mut ~[1, 2, 3];
|
// let v = @mut ~[1, 2, 3];
|
||||||
// let w = v;
|
// let w = v;
|
||||||
// for v.iter().advance |e| {
|
// foreach e in v.iter() {
|
||||||
// w.push(e + 1);
|
// w.push(e + 1);
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
|
@ -164,7 +164,7 @@ impl RestrictionsContext {
|
||||||
// }
|
// }
|
||||||
// ...
|
// ...
|
||||||
// let v: &V = ...;
|
// let v: &V = ...;
|
||||||
// for v.get_list().iter().advance |e| {
|
// foreach e in v.get_list().iter() {
|
||||||
// v.get_list().push(e + 1);
|
// v.get_list().push(e + 1);
|
||||||
// }
|
// }
|
||||||
match opt_loan_path(cmt_base) {
|
match opt_loan_path(cmt_base) {
|
||||||
|
|
|
@ -139,7 +139,7 @@ fn borrowck_fn(fk: &visit::fn_kind,
|
||||||
LoanDataFlowOperator,
|
LoanDataFlowOperator,
|
||||||
id_range,
|
id_range,
|
||||||
all_loans.len());
|
all_loans.len());
|
||||||
for all_loans.iter().enumerate().advance |(loan_idx, loan)| {
|
foreach (loan_idx, loan) in all_loans.iter().enumerate() {
|
||||||
loan_dfcx.add_gen(loan.gen_scope, loan_idx);
|
loan_dfcx.add_gen(loan.gen_scope, loan_idx);
|
||||||
loan_dfcx.add_kill(loan.kill_scope, loan_idx);
|
loan_dfcx.add_kill(loan.kill_scope, loan_idx);
|
||||||
}
|
}
|
||||||
|
|
|
@ -371,22 +371,22 @@ impl MoveData {
|
||||||
* killed by scoping. See `doc.rs` for more details.
|
* killed by scoping. See `doc.rs` for more details.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
for self.moves.iter().enumerate().advance |(i, move)| {
|
foreach (i, move) in self.moves.iter().enumerate() {
|
||||||
dfcx_moves.add_gen(move.id, i);
|
dfcx_moves.add_gen(move.id, i);
|
||||||
}
|
}
|
||||||
|
|
||||||
for self.var_assignments.iter().enumerate().advance |(i, assignment)| {
|
foreach (i, assignment) in self.var_assignments.iter().enumerate() {
|
||||||
dfcx_assign.add_gen(assignment.id, i);
|
dfcx_assign.add_gen(assignment.id, i);
|
||||||
self.kill_moves(assignment.path, assignment.id, dfcx_moves);
|
self.kill_moves(assignment.path, assignment.id, dfcx_moves);
|
||||||
}
|
}
|
||||||
|
|
||||||
for self.path_assignments.iter().advance |assignment| {
|
foreach assignment in self.path_assignments.iter() {
|
||||||
self.kill_moves(assignment.path, assignment.id, dfcx_moves);
|
self.kill_moves(assignment.path, assignment.id, dfcx_moves);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Kill all moves related to a variable `x` when it goes out
|
// Kill all moves related to a variable `x` when it goes out
|
||||||
// of scope:
|
// of scope:
|
||||||
for self.paths.iter().advance |path| {
|
foreach path in self.paths.iter() {
|
||||||
match *path.loan_path {
|
match *path.loan_path {
|
||||||
LpVar(id) => {
|
LpVar(id) => {
|
||||||
let kill_id = tcx.region_maps.encl_scope(id);
|
let kill_id = tcx.region_maps.encl_scope(id);
|
||||||
|
@ -398,7 +398,7 @@ impl MoveData {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Kill all assignments when the variable goes out of scope:
|
// Kill all assignments when the variable goes out of scope:
|
||||||
for self.var_assignments.iter().enumerate().advance |(assignment_index, assignment)| {
|
foreach (assignment_index, assignment) in self.var_assignments.iter().enumerate() {
|
||||||
match *self.path(assignment.path).loan_path {
|
match *self.path(assignment.path).loan_path {
|
||||||
LpVar(id) => {
|
LpVar(id) => {
|
||||||
let kill_id = tcx.region_maps.encl_scope(id);
|
let kill_id = tcx.region_maps.encl_scope(id);
|
||||||
|
@ -557,7 +557,7 @@ impl FlowedMoveData {
|
||||||
loop;
|
loop;
|
||||||
}
|
}
|
||||||
|
|
||||||
for opt_loan_path_index.iter().advance |&loan_path_index| {
|
foreach &loan_path_index in opt_loan_path_index.iter() {
|
||||||
for self.move_data.each_base_path(moved_path) |p| {
|
for self.move_data.each_base_path(moved_path) |p| {
|
||||||
if p == loan_path_index {
|
if p == loan_path_index {
|
||||||
// Scenario 3: some extension of `loan_path`
|
// Scenario 3: some extension of `loan_path`
|
||||||
|
|
|
@ -53,7 +53,7 @@ pub fn construct(tcx: ty::ctxt,
|
||||||
impl CFGBuilder {
|
impl CFGBuilder {
|
||||||
fn block(&mut self, blk: &ast::Block, pred: CFGIndex) -> CFGIndex {
|
fn block(&mut self, blk: &ast::Block, pred: CFGIndex) -> CFGIndex {
|
||||||
let mut stmts_exit = pred;
|
let mut stmts_exit = pred;
|
||||||
for blk.stmts.iter().advance |&stmt| {
|
foreach &stmt in blk.stmts.iter() {
|
||||||
stmts_exit = self.stmt(stmt, stmts_exit);
|
stmts_exit = self.stmt(stmt, stmts_exit);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -151,7 +151,7 @@ impl CFGBuilder {
|
||||||
self.pat(pats[0], pred)
|
self.pat(pats[0], pred)
|
||||||
} else {
|
} else {
|
||||||
let collect = self.add_dummy_node([]);
|
let collect = self.add_dummy_node([]);
|
||||||
for pats.iter().advance |&pat| {
|
foreach &pat in pats.iter() {
|
||||||
let pat_exit = self.pat(pat, pred);
|
let pat_exit = self.pat(pat, pred);
|
||||||
self.add_contained_edge(pat_exit, collect);
|
self.add_contained_edge(pat_exit, collect);
|
||||||
}
|
}
|
||||||
|
@ -297,7 +297,7 @@ impl CFGBuilder {
|
||||||
|
|
||||||
let expr_exit = self.add_node(expr.id, []);
|
let expr_exit = self.add_node(expr.id, []);
|
||||||
let mut guard_exit = discr_exit;
|
let mut guard_exit = discr_exit;
|
||||||
for arms.iter().advance |arm| {
|
foreach arm in arms.iter() {
|
||||||
guard_exit = self.opt_expr(arm.guard, guard_exit); // 2
|
guard_exit = self.opt_expr(arm.guard, guard_exit); // 2
|
||||||
let pats_exit = self.pats_any(arm.pats, guard_exit); // 3
|
let pats_exit = self.pats_any(arm.pats, guard_exit); // 3
|
||||||
let body_exit = self.block(&arm.body, pats_exit); // 4
|
let body_exit = self.block(&arm.body, pats_exit); // 4
|
||||||
|
@ -460,7 +460,7 @@ impl CFGBuilder {
|
||||||
assert!(!self.exit_map.contains_key(&id));
|
assert!(!self.exit_map.contains_key(&id));
|
||||||
let node = self.graph.add_node(CFGNodeData {id: id});
|
let node = self.graph.add_node(CFGNodeData {id: id});
|
||||||
self.exit_map.insert(id, node);
|
self.exit_map.insert(id, node);
|
||||||
for preds.iter().advance |&pred| {
|
foreach &pred in preds.iter() {
|
||||||
self.add_contained_edge(pred, node);
|
self.add_contained_edge(pred, node);
|
||||||
}
|
}
|
||||||
node
|
node
|
||||||
|
@ -498,7 +498,7 @@ impl CFGBuilder {
|
||||||
Some(_) => {
|
Some(_) => {
|
||||||
match self.tcx.def_map.find(&expr.id) {
|
match self.tcx.def_map.find(&expr.id) {
|
||||||
Some(&ast::def_label(loop_id)) => {
|
Some(&ast::def_label(loop_id)) => {
|
||||||
for self.loop_scopes.iter().advance |l| {
|
foreach l in self.loop_scopes.iter() {
|
||||||
if l.loop_id == loop_id {
|
if l.loop_id == loop_id {
|
||||||
return *l;
|
return *l;
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,8 +47,8 @@ pub fn check_item(sess: Session,
|
||||||
check_item_recursion(sess, ast_map, def_map, it);
|
check_item_recursion(sess, ast_map, def_map, it);
|
||||||
}
|
}
|
||||||
item_enum(ref enum_definition, _) => {
|
item_enum(ref enum_definition, _) => {
|
||||||
for (*enum_definition).variants.iter().advance |var| {
|
foreach var in (*enum_definition).variants.iter() {
|
||||||
for var.node.disr_expr.iter().advance |ex| {
|
foreach ex in var.node.disr_expr.iter() {
|
||||||
(v.visit_expr)(*ex, (true, v));
|
(v.visit_expr)(*ex, (true, v));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -55,7 +55,7 @@ pub fn check_expr(cx: @MatchCheckCtxt, ex: @expr, (s, v): ((), visit::vt<()>)) {
|
||||||
match ex.node {
|
match ex.node {
|
||||||
expr_match(scrut, ref arms) => {
|
expr_match(scrut, ref arms) => {
|
||||||
// First, check legality of move bindings.
|
// First, check legality of move bindings.
|
||||||
for arms.iter().advance |arm| {
|
foreach arm in arms.iter() {
|
||||||
check_legality_of_move_bindings(cx,
|
check_legality_of_move_bindings(cx,
|
||||||
arm.guard.is_some(),
|
arm.guard.is_some(),
|
||||||
arm.pats);
|
arm.pats);
|
||||||
|
@ -100,8 +100,8 @@ pub fn check_expr(cx: @MatchCheckCtxt, ex: @expr, (s, v): ((), visit::vt<()>)) {
|
||||||
// Check for unreachable patterns
|
// Check for unreachable patterns
|
||||||
pub fn check_arms(cx: &MatchCheckCtxt, arms: &[arm]) {
|
pub fn check_arms(cx: &MatchCheckCtxt, arms: &[arm]) {
|
||||||
let mut seen = ~[];
|
let mut seen = ~[];
|
||||||
for arms.iter().advance |arm| {
|
foreach arm in arms.iter() {
|
||||||
for arm.pats.iter().advance |pat| {
|
foreach pat in arm.pats.iter() {
|
||||||
|
|
||||||
// Check that we do not match against a static NaN (#6804)
|
// Check that we do not match against a static NaN (#6804)
|
||||||
let pat_matches_nan: &fn(@pat) -> bool = |p| {
|
let pat_matches_nan: &fn(@pat) -> bool = |p| {
|
||||||
|
@ -243,7 +243,7 @@ pub fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@pat]) -> useful {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ty::ty_enum(eid, _) => {
|
ty::ty_enum(eid, _) => {
|
||||||
for (*ty::enum_variants(cx.tcx, eid)).iter().advance |va| {
|
foreach va in (*ty::enum_variants(cx.tcx, eid)).iter() {
|
||||||
match is_useful_specialized(cx, m, v, variant(va.id),
|
match is_useful_specialized(cx, m, v, variant(va.id),
|
||||||
va.args.len(), left_ty) {
|
va.args.len(), left_ty) {
|
||||||
not_useful => (),
|
not_useful => (),
|
||||||
|
@ -365,16 +365,16 @@ pub fn missing_ctor(cx: &MatchCheckCtxt,
|
||||||
match ty::get(left_ty).sty {
|
match ty::get(left_ty).sty {
|
||||||
ty::ty_box(_) | ty::ty_uniq(_) | ty::ty_rptr(*) | ty::ty_tup(_) |
|
ty::ty_box(_) | ty::ty_uniq(_) | ty::ty_rptr(*) | ty::ty_tup(_) |
|
||||||
ty::ty_struct(*) => {
|
ty::ty_struct(*) => {
|
||||||
for m.iter().advance |r| {
|
foreach r in m.iter() {
|
||||||
if !is_wild(cx, r[0]) { return None; }
|
if !is_wild(cx, r[0]) { return None; }
|
||||||
}
|
}
|
||||||
return Some(single);
|
return Some(single);
|
||||||
}
|
}
|
||||||
ty::ty_enum(eid, _) => {
|
ty::ty_enum(eid, _) => {
|
||||||
let mut found = ~[];
|
let mut found = ~[];
|
||||||
for m.iter().advance |r| {
|
foreach r in m.iter() {
|
||||||
let r = pat_ctor_id(cx, r[0]);
|
let r = pat_ctor_id(cx, r[0]);
|
||||||
for r.iter().advance |id| {
|
foreach id in r.iter() {
|
||||||
if !found.contains(id) {
|
if !found.contains(id) {
|
||||||
found.push(*id);
|
found.push(*id);
|
||||||
}
|
}
|
||||||
|
@ -382,7 +382,7 @@ pub fn missing_ctor(cx: &MatchCheckCtxt,
|
||||||
}
|
}
|
||||||
let variants = ty::enum_variants(cx.tcx, eid);
|
let variants = ty::enum_variants(cx.tcx, eid);
|
||||||
if found.len() != (*variants).len() {
|
if found.len() != (*variants).len() {
|
||||||
for (*variants).iter().advance |v| {
|
foreach v in (*variants).iter() {
|
||||||
if !found.iter().any(|x| x == &(variant(v.id))) {
|
if !found.iter().any(|x| x == &(variant(v.id))) {
|
||||||
return Some(variant(v.id));
|
return Some(variant(v.id));
|
||||||
}
|
}
|
||||||
|
@ -394,7 +394,7 @@ pub fn missing_ctor(cx: &MatchCheckCtxt,
|
||||||
ty::ty_bool => {
|
ty::ty_bool => {
|
||||||
let mut true_found = false;
|
let mut true_found = false;
|
||||||
let mut false_found = false;
|
let mut false_found = false;
|
||||||
for m.iter().advance |r| {
|
foreach r in m.iter() {
|
||||||
match pat_ctor_id(cx, r[0]) {
|
match pat_ctor_id(cx, r[0]) {
|
||||||
None => (),
|
None => (),
|
||||||
Some(val(const_bool(true))) => true_found = true,
|
Some(val(const_bool(true))) => true_found = true,
|
||||||
|
@ -434,7 +434,7 @@ pub fn missing_ctor(cx: &MatchCheckCtxt,
|
||||||
let mut found_slice = false;
|
let mut found_slice = false;
|
||||||
let mut next = 0;
|
let mut next = 0;
|
||||||
let mut missing = None;
|
let mut missing = None;
|
||||||
for sorted_vec_lens.iter().advance |&(length, slice)| {
|
foreach &(length, slice) in sorted_vec_lens.iter() {
|
||||||
if length != next {
|
if length != next {
|
||||||
missing = Some(next);
|
missing = Some(next);
|
||||||
break;
|
break;
|
||||||
|
@ -781,7 +781,7 @@ pub fn check_fn(cx: &MatchCheckCtxt,
|
||||||
(s, v): ((),
|
(s, v): ((),
|
||||||
visit::vt<()>)) {
|
visit::vt<()>)) {
|
||||||
visit::visit_fn(kind, decl, body, sp, id, (s, v));
|
visit::visit_fn(kind, decl, body, sp, id, (s, v));
|
||||||
for decl.inputs.iter().advance |input| {
|
foreach input in decl.inputs.iter() {
|
||||||
if is_refutable(cx, input.pat) {
|
if is_refutable(cx, input.pat) {
|
||||||
cx.tcx.sess.span_err(input.pat.span,
|
cx.tcx.sess.span_err(input.pat.span,
|
||||||
"refutable pattern in function argument");
|
"refutable pattern in function argument");
|
||||||
|
@ -834,7 +834,7 @@ pub fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
|
||||||
let def_map = tcx.def_map;
|
let def_map = tcx.def_map;
|
||||||
let mut by_ref_span = None;
|
let mut by_ref_span = None;
|
||||||
let mut any_by_move = false;
|
let mut any_by_move = false;
|
||||||
for pats.iter().advance |pat| {
|
foreach pat in pats.iter() {
|
||||||
do pat_bindings(def_map, *pat) |bm, id, span, _path| {
|
do pat_bindings(def_map, *pat) |bm, id, span, _path| {
|
||||||
match bm {
|
match bm {
|
||||||
bind_by_ref(_) => {
|
bind_by_ref(_) => {
|
||||||
|
@ -871,7 +871,7 @@ pub fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
|
||||||
};
|
};
|
||||||
|
|
||||||
if !any_by_move { return; } // pointless micro-optimization
|
if !any_by_move { return; } // pointless micro-optimization
|
||||||
for pats.iter().advance |pat| {
|
foreach pat in pats.iter() {
|
||||||
for walk_pat(*pat) |p| {
|
for walk_pat(*pat) |p| {
|
||||||
if pat_is_binding(def_map, p) {
|
if pat_is_binding(def_map, p) {
|
||||||
match p.node {
|
match p.node {
|
||||||
|
|
|
@ -176,7 +176,7 @@ pub fn lookup_variant_by_id(tcx: ty::ctxt,
|
||||||
variant_def: ast::def_id)
|
variant_def: ast::def_id)
|
||||||
-> Option<@expr> {
|
-> Option<@expr> {
|
||||||
fn variant_expr(variants: &[ast::variant], id: ast::NodeId) -> Option<@expr> {
|
fn variant_expr(variants: &[ast::variant], id: ast::NodeId) -> Option<@expr> {
|
||||||
for variants.iter().advance |variant| {
|
foreach variant in variants.iter() {
|
||||||
if variant.node.id == id {
|
if variant.node.id == id {
|
||||||
return variant.node.disr_expr;
|
return variant.node.disr_expr;
|
||||||
}
|
}
|
||||||
|
|
|
@ -266,7 +266,7 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
|
||||||
f: &fn(uint) -> bool) -> bool {
|
f: &fn(uint) -> bool) -> bool {
|
||||||
//! Helper for iterating over the bits in a bit set.
|
//! Helper for iterating over the bits in a bit set.
|
||||||
|
|
||||||
for words.iter().enumerate().advance |(word_index, &word)| {
|
foreach (word_index, &word) in words.iter().enumerate() {
|
||||||
if word != 0 {
|
if word != 0 {
|
||||||
let base_index = word_index * uint::bits;
|
let base_index = word_index * uint::bits;
|
||||||
for uint::range(0, uint::bits) |offset| {
|
for uint::range(0, uint::bits) |offset| {
|
||||||
|
@ -391,7 +391,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
|
||||||
|
|
||||||
self.merge_with_entry_set(blk.id, in_out);
|
self.merge_with_entry_set(blk.id, in_out);
|
||||||
|
|
||||||
for blk.stmts.iter().advance |&stmt| {
|
foreach &stmt in blk.stmts.iter() {
|
||||||
self.walk_stmt(stmt, in_out, loop_scopes);
|
self.walk_stmt(stmt, in_out, loop_scopes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -512,7 +512,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
|
||||||
loop_kind: ForLoop,
|
loop_kind: ForLoop,
|
||||||
break_bits: reslice(in_out).to_owned()
|
break_bits: reslice(in_out).to_owned()
|
||||||
});
|
});
|
||||||
for decl.inputs.iter().advance |input| {
|
foreach input in decl.inputs.iter() {
|
||||||
self.walk_pat(input.pat, func_bits, loop_scopes);
|
self.walk_pat(input.pat, func_bits, loop_scopes);
|
||||||
}
|
}
|
||||||
self.walk_block(body, func_bits, loop_scopes);
|
self.walk_block(body, func_bits, loop_scopes);
|
||||||
|
@ -631,7 +631,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
|
||||||
// together the bits from each arm:
|
// together the bits from each arm:
|
||||||
self.reset(in_out);
|
self.reset(in_out);
|
||||||
|
|
||||||
for arms.iter().advance |arm| {
|
foreach arm in arms.iter() {
|
||||||
// in_out reflects the discr and all guards to date
|
// in_out reflects the discr and all guards to date
|
||||||
self.walk_opt_expr(arm.guard, guards, loop_scopes);
|
self.walk_opt_expr(arm.guard, guards, loop_scopes);
|
||||||
|
|
||||||
|
@ -706,7 +706,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
|
||||||
}
|
}
|
||||||
|
|
||||||
ast::expr_struct(_, ref fields, with_expr) => {
|
ast::expr_struct(_, ref fields, with_expr) => {
|
||||||
for fields.iter().advance |field| {
|
foreach field in fields.iter() {
|
||||||
self.walk_expr(field.expr, in_out, loop_scopes);
|
self.walk_expr(field.expr, in_out, loop_scopes);
|
||||||
}
|
}
|
||||||
self.walk_opt_expr(with_expr, in_out, loop_scopes);
|
self.walk_opt_expr(with_expr, in_out, loop_scopes);
|
||||||
|
@ -767,10 +767,10 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
|
||||||
}
|
}
|
||||||
|
|
||||||
ast::expr_inline_asm(ref inline_asm) => {
|
ast::expr_inline_asm(ref inline_asm) => {
|
||||||
for inline_asm.inputs.iter().advance |&(_, expr)| {
|
foreach &(_, expr) in inline_asm.inputs.iter() {
|
||||||
self.walk_expr(expr, in_out, loop_scopes);
|
self.walk_expr(expr, in_out, loop_scopes);
|
||||||
}
|
}
|
||||||
for inline_asm.outputs.iter().advance |&(_, expr)| {
|
foreach &(_, expr) in inline_asm.outputs.iter() {
|
||||||
self.walk_expr(expr, in_out, loop_scopes);
|
self.walk_expr(expr, in_out, loop_scopes);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -838,7 +838,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
|
||||||
exprs: &[@ast::expr],
|
exprs: &[@ast::expr],
|
||||||
in_out: &mut [uint],
|
in_out: &mut [uint],
|
||||||
loop_scopes: &mut ~[LoopScope]) {
|
loop_scopes: &mut ~[LoopScope]) {
|
||||||
for exprs.iter().advance |&expr| {
|
foreach &expr in exprs.iter() {
|
||||||
self.walk_expr(expr, in_out, loop_scopes);
|
self.walk_expr(expr, in_out, loop_scopes);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -847,7 +847,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
|
||||||
opt_expr: Option<@ast::expr>,
|
opt_expr: Option<@ast::expr>,
|
||||||
in_out: &mut [uint],
|
in_out: &mut [uint],
|
||||||
loop_scopes: &mut ~[LoopScope]) {
|
loop_scopes: &mut ~[LoopScope]) {
|
||||||
for opt_expr.iter().advance |&expr| {
|
foreach &expr in opt_expr.iter() {
|
||||||
self.walk_expr(expr, in_out, loop_scopes);
|
self.walk_expr(expr, in_out, loop_scopes);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -900,7 +900,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
|
||||||
// alternatives, so we must treat this like an N-way select
|
// alternatives, so we must treat this like an N-way select
|
||||||
// statement.
|
// statement.
|
||||||
let initial_state = reslice(in_out).to_owned();
|
let initial_state = reslice(in_out).to_owned();
|
||||||
for pats.iter().advance |&pat| {
|
foreach &pat in pats.iter() {
|
||||||
let mut temp = initial_state.clone();
|
let mut temp = initial_state.clone();
|
||||||
self.walk_pat(pat, temp, loop_scopes);
|
self.walk_pat(pat, temp, loop_scopes);
|
||||||
join_bits(&self.dfcx.oper, temp, in_out);
|
join_bits(&self.dfcx.oper, temp, in_out);
|
||||||
|
@ -948,7 +948,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
|
||||||
|
|
||||||
fn reset(&mut self, bits: &mut [uint]) {
|
fn reset(&mut self, bits: &mut [uint]) {
|
||||||
let e = if self.dfcx.oper.initial_value() {uint::max_value} else {0};
|
let e = if self.dfcx.oper.initial_value() {uint::max_value} else {0};
|
||||||
for bits.mut_iter().advance |b| { *b = e; }
|
foreach b in bits.mut_iter() { *b = e; }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_to_entry_set(&mut self, id: ast::NodeId, pred_bits: &[uint]) {
|
fn add_to_entry_set(&mut self, id: ast::NodeId, pred_bits: &[uint]) {
|
||||||
|
@ -996,7 +996,7 @@ fn bits_to_str(words: &[uint]) -> ~str {
|
||||||
|
|
||||||
// Note: this is a little endian printout of bytes.
|
// Note: this is a little endian printout of bytes.
|
||||||
|
|
||||||
for words.iter().advance |&word| {
|
foreach &word in words.iter() {
|
||||||
let mut v = word;
|
let mut v = word;
|
||||||
for uint::range(0, uint::bytes) |_| {
|
for uint::range(0, uint::bytes) |_| {
|
||||||
result.push_char(sep);
|
result.push_char(sep);
|
||||||
|
|
|
@ -137,7 +137,7 @@ fn configure_main(ctxt: @mut EntryContext) {
|
||||||
but you have one or more functions named 'main' that are not \
|
but you have one or more functions named 'main' that are not \
|
||||||
defined at the crate level. Either move the definition or \
|
defined at the crate level. Either move the definition or \
|
||||||
attach the `#[main]` attribute to override this behavior.");
|
attach the `#[main]` attribute to override this behavior.");
|
||||||
for this.non_main_fns.iter().advance |&(_, span)| {
|
foreach &(_, span) in this.non_main_fns.iter() {
|
||||||
this.session.span_note(span, "here is a function named 'main'");
|
this.session.span_note(span, "here is a function named 'main'");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -251,7 +251,7 @@ impl<N,E> Graph<N,E> {
|
||||||
while changed {
|
while changed {
|
||||||
changed = false;
|
changed = false;
|
||||||
iteration += 1;
|
iteration += 1;
|
||||||
for self.edges.iter().enumerate().advance |(i, edge)| {
|
foreach (i, edge) in self.edges.iter().enumerate() {
|
||||||
changed |= op(iteration, EdgeIndex(i), edge);
|
changed |= op(iteration, EdgeIndex(i), edge);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -237,7 +237,7 @@ fn check_fn(
|
||||||
// Check kinds on free variables:
|
// Check kinds on free variables:
|
||||||
do with_appropriate_checker(cx, fn_id) |chk| {
|
do with_appropriate_checker(cx, fn_id) |chk| {
|
||||||
let r = freevars::get_freevars(cx.tcx, fn_id);
|
let r = freevars::get_freevars(cx.tcx, fn_id);
|
||||||
for r.iter().advance |fv| {
|
foreach fv in r.iter() {
|
||||||
chk(cx, *fv);
|
chk(cx, *fv);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -255,7 +255,7 @@ pub fn check_expr(e: @expr, (cx, v): (Context, visit::vt<Context>)) {
|
||||||
};
|
};
|
||||||
{
|
{
|
||||||
let r = cx.tcx.node_type_substs.find(&type_parameter_id);
|
let r = cx.tcx.node_type_substs.find(&type_parameter_id);
|
||||||
for r.iter().advance |ts| {
|
foreach ts in r.iter() {
|
||||||
let type_param_defs = match e.node {
|
let type_param_defs = match e.node {
|
||||||
expr_path(_) => {
|
expr_path(_) => {
|
||||||
let did = ast_util::def_id_of_def(cx.tcx.def_map.get_copy(&e.id));
|
let did = ast_util::def_id_of_def(cx.tcx.def_map.get_copy(&e.id));
|
||||||
|
@ -279,7 +279,7 @@ pub fn check_expr(e: @expr, (cx, v): (Context, visit::vt<Context>)) {
|
||||||
ts.repr(cx.tcx),
|
ts.repr(cx.tcx),
|
||||||
type_param_defs.repr(cx.tcx));
|
type_param_defs.repr(cx.tcx));
|
||||||
}
|
}
|
||||||
for ts.iter().zip(type_param_defs.iter()).advance |(&ty, type_param_def)| {
|
foreach (&ty, type_param_def) in ts.iter().zip(type_param_defs.iter()) {
|
||||||
check_typaram_bounds(cx, type_parameter_id, e.span, ty, type_param_def)
|
check_typaram_bounds(cx, type_parameter_id, e.span, ty, type_param_def)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -317,11 +317,11 @@ fn check_ty(aty: &Ty, (cx, v): (Context, visit::vt<Context>)) {
|
||||||
match aty.node {
|
match aty.node {
|
||||||
ty_path(_, _, id) => {
|
ty_path(_, _, id) => {
|
||||||
let r = cx.tcx.node_type_substs.find(&id);
|
let r = cx.tcx.node_type_substs.find(&id);
|
||||||
for r.iter().advance |ts| {
|
foreach ts in r.iter() {
|
||||||
let did = ast_util::def_id_of_def(cx.tcx.def_map.get_copy(&id));
|
let did = ast_util::def_id_of_def(cx.tcx.def_map.get_copy(&id));
|
||||||
let type_param_defs =
|
let type_param_defs =
|
||||||
ty::lookup_item_type(cx.tcx, did).generics.type_param_defs;
|
ty::lookup_item_type(cx.tcx, did).generics.type_param_defs;
|
||||||
for ts.iter().zip(type_param_defs.iter()).advance |(&ty, type_param_def)| {
|
foreach (&ty, type_param_def) in ts.iter().zip(type_param_defs.iter()) {
|
||||||
check_typaram_bounds(cx, aty.id, aty.span, ty, type_param_def)
|
check_typaram_bounds(cx, aty.id, aty.span, ty, type_param_def)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -412,7 +412,7 @@ impl<'self> LanguageItemCollector<'self> {
|
||||||
let this: *mut LanguageItemCollector = &mut *self;
|
let this: *mut LanguageItemCollector = &mut *self;
|
||||||
visit_crate(self.crate, ((), mk_simple_visitor(@SimpleVisitor {
|
visit_crate(self.crate, ((), mk_simple_visitor(@SimpleVisitor {
|
||||||
visit_item: |item| {
|
visit_item: |item| {
|
||||||
for item.attrs.iter().advance |attribute| {
|
foreach attribute in item.attrs.iter() {
|
||||||
unsafe {
|
unsafe {
|
||||||
(*this).match_and_collect_meta_item(
|
(*this).match_and_collect_meta_item(
|
||||||
local_def(item.id),
|
local_def(item.id),
|
||||||
|
|
|
@ -284,7 +284,7 @@ static lint_table: &'static [(&'static str, LintSpec)] = &[
|
||||||
*/
|
*/
|
||||||
pub fn get_lint_dict() -> LintDict {
|
pub fn get_lint_dict() -> LintDict {
|
||||||
let mut map = HashMap::new();
|
let mut map = HashMap::new();
|
||||||
for lint_table.iter().advance |&(k, v)| {
|
foreach &(k, v) in lint_table.iter() {
|
||||||
map.insert(k, v);
|
map.insert(k, v);
|
||||||
}
|
}
|
||||||
return map;
|
return map;
|
||||||
|
@ -348,7 +348,7 @@ impl Context {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lint_to_str(&self, lint: lint) -> &'static str {
|
fn lint_to_str(&self, lint: lint) -> &'static str {
|
||||||
for self.dict.iter().advance |(k, v)| {
|
foreach (k, v) in self.dict.iter() {
|
||||||
if v.lint == lint {
|
if v.lint == lint {
|
||||||
return *k;
|
return *k;
|
||||||
}
|
}
|
||||||
|
@ -384,7 +384,7 @@ impl Context {
|
||||||
allow => fail!(),
|
allow => fail!(),
|
||||||
}
|
}
|
||||||
|
|
||||||
for note.iter().advance |&span| {
|
foreach &span in note.iter() {
|
||||||
self.tcx.sess.span_note(span, "lint level defined here");
|
self.tcx.sess.span_note(span, "lint level defined here");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -466,12 +466,12 @@ impl Context {
|
||||||
// pair instead of just one visitor.
|
// pair instead of just one visitor.
|
||||||
match n {
|
match n {
|
||||||
Item(it) => {
|
Item(it) => {
|
||||||
for self.visitors.iter().advance |&(orig, stopping)| {
|
foreach &(orig, stopping) in self.visitors.iter() {
|
||||||
(orig.visit_item)(it, (self, stopping));
|
(orig.visit_item)(it, (self, stopping));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Crate(c) => {
|
Crate(c) => {
|
||||||
for self.visitors.iter().advance |&(_, stopping)| {
|
foreach &(_, stopping) in self.visitors.iter() {
|
||||||
visit::visit_crate(c, (self, stopping));
|
visit::visit_crate(c, (self, stopping));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -480,7 +480,7 @@ impl Context {
|
||||||
// to be a no-op, so manually invoke visit_fn.
|
// to be a no-op, so manually invoke visit_fn.
|
||||||
Method(m) => {
|
Method(m) => {
|
||||||
let fk = visit::fk_method(m.ident, &m.generics, m);
|
let fk = visit::fk_method(m.ident, &m.generics, m);
|
||||||
for self.visitors.iter().advance |&(orig, stopping)| {
|
foreach &(orig, stopping) in self.visitors.iter() {
|
||||||
(orig.visit_fn)(&fk, &m.decl, &m.body, m.span, m.id,
|
(orig.visit_fn)(&fk, &m.decl, &m.body, m.span, m.id,
|
||||||
(self, stopping));
|
(self, stopping));
|
||||||
}
|
}
|
||||||
|
@ -493,9 +493,9 @@ pub fn each_lint(sess: session::Session,
|
||||||
attrs: &[ast::Attribute],
|
attrs: &[ast::Attribute],
|
||||||
f: &fn(@ast::MetaItem, level, @str) -> bool) -> bool {
|
f: &fn(@ast::MetaItem, level, @str) -> bool) -> bool {
|
||||||
let xs = [allow, warn, deny, forbid];
|
let xs = [allow, warn, deny, forbid];
|
||||||
for xs.iter().advance |&level| {
|
foreach &level in xs.iter() {
|
||||||
let level_name = level_to_str(level);
|
let level_name = level_to_str(level);
|
||||||
for attrs.iter().filter(|m| level_name == m.name()).advance |attr| {
|
foreach attr in attrs.iter().filter(|m| level_name == m.name()) {
|
||||||
let meta = attr.node.value;
|
let meta = attr.node.value;
|
||||||
let metas = match meta.node {
|
let metas = match meta.node {
|
||||||
ast::MetaList(_, ref metas) => metas,
|
ast::MetaList(_, ref metas) => metas,
|
||||||
|
@ -504,7 +504,7 @@ pub fn each_lint(sess: session::Session,
|
||||||
loop;
|
loop;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
for metas.iter().advance |meta| {
|
foreach meta in metas.iter() {
|
||||||
match meta.node {
|
match meta.node {
|
||||||
ast::MetaWord(lintname) => {
|
ast::MetaWord(lintname) => {
|
||||||
if !f(*meta, level, lintname) {
|
if !f(*meta, level, lintname) {
|
||||||
|
@ -706,7 +706,7 @@ fn check_item_ctypes(cx: &Context, it: &ast::item) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_foreign_fn(cx: &Context, decl: &ast::fn_decl) {
|
fn check_foreign_fn(cx: &Context, decl: &ast::fn_decl) {
|
||||||
for decl.inputs.iter().advance |input| {
|
foreach input in decl.inputs.iter() {
|
||||||
check_ty(cx, &input.ty);
|
check_ty(cx, &input.ty);
|
||||||
}
|
}
|
||||||
check_ty(cx, &decl.output)
|
check_ty(cx, &decl.output)
|
||||||
|
@ -714,7 +714,7 @@ fn check_item_ctypes(cx: &Context, it: &ast::item) {
|
||||||
|
|
||||||
match it.node {
|
match it.node {
|
||||||
ast::item_foreign_mod(ref nmod) if !nmod.abis.is_intrinsic() => {
|
ast::item_foreign_mod(ref nmod) if !nmod.abis.is_intrinsic() => {
|
||||||
for nmod.items.iter().advance |ni| {
|
foreach ni in nmod.items.iter() {
|
||||||
match ni.node {
|
match ni.node {
|
||||||
ast::foreign_item_fn(ref decl, _, _) => {
|
ast::foreign_item_fn(ref decl, _, _) => {
|
||||||
check_foreign_fn(cx, decl);
|
check_foreign_fn(cx, decl);
|
||||||
|
@ -756,7 +756,7 @@ fn check_type_for_lint(cx: &Context, lint: lint, span: span, ty: ty::t) {
|
||||||
|
|
||||||
fn check_type(cx: &Context, span: span, ty: ty::t) {
|
fn check_type(cx: &Context, span: span, ty: ty::t) {
|
||||||
let xs = [managed_heap_memory, owned_heap_memory, heap_memory];
|
let xs = [managed_heap_memory, owned_heap_memory, heap_memory];
|
||||||
for xs.iter().advance |lint| {
|
foreach lint in xs.iter() {
|
||||||
check_type_for_lint(cx, *lint, span, ty);
|
check_type_for_lint(cx, *lint, span, ty);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -775,7 +775,7 @@ fn check_item_heap(cx: &Context, it: &ast::item) {
|
||||||
// If it's a struct, we also have to check the fields' types
|
// If it's a struct, we also have to check the fields' types
|
||||||
match it.node {
|
match it.node {
|
||||||
ast::item_struct(struct_def, _) => {
|
ast::item_struct(struct_def, _) => {
|
||||||
for struct_def.fields.iter().advance |struct_field| {
|
foreach struct_field in struct_def.fields.iter() {
|
||||||
check_type(cx, struct_field.span,
|
check_type(cx, struct_field.span,
|
||||||
ty::node_id_to_type(cx.tcx,
|
ty::node_id_to_type(cx.tcx,
|
||||||
struct_field.node.id));
|
struct_field.node.id));
|
||||||
|
@ -845,7 +845,7 @@ fn check_item_non_camel_case_types(cx: &Context, it: &ast::item) {
|
||||||
}
|
}
|
||||||
ast::item_enum(ref enum_definition, _) => {
|
ast::item_enum(ref enum_definition, _) => {
|
||||||
check_case(cx, "type", it.ident, it.span);
|
check_case(cx, "type", it.ident, it.span);
|
||||||
for enum_definition.variants.iter().advance |variant| {
|
foreach variant in enum_definition.variants.iter() {
|
||||||
check_case(cx, "variant", variant.node.name, variant.span);
|
check_case(cx, "variant", variant.node.name, variant.span);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -907,7 +907,7 @@ fn lint_unused_mut() -> visit::vt<@mut Context> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_fn_decl(cx: &Context, fd: &ast::fn_decl) {
|
fn visit_fn_decl(cx: &Context, fd: &ast::fn_decl) {
|
||||||
for fd.inputs.iter().advance |arg| {
|
foreach arg in fd.inputs.iter() {
|
||||||
if arg.is_mutbl {
|
if arg.is_mutbl {
|
||||||
check_pat(cx, arg.pat);
|
check_pat(cx, arg.pat);
|
||||||
}
|
}
|
||||||
|
@ -945,7 +945,7 @@ fn lint_session() -> visit::vt<@mut Context> {
|
||||||
match cx.tcx.sess.lints.pop(&id) {
|
match cx.tcx.sess.lints.pop(&id) {
|
||||||
None => {},
|
None => {},
|
||||||
Some(l) => {
|
Some(l) => {
|
||||||
for l.consume_iter().advance |(lint, span, msg)| {
|
foreach (lint, span, msg) in l.consume_iter() {
|
||||||
cx.span_lint(lint, span, msg)
|
cx.span_lint(lint, span, msg)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1042,7 +1042,7 @@ fn lint_missing_doc() -> visit::vt<@mut Context> {
|
||||||
ast::item_struct(sdef, _) if it.vis == ast::public => {
|
ast::item_struct(sdef, _) if it.vis == ast::public => {
|
||||||
check_attrs(cx, it.attrs, it.span,
|
check_attrs(cx, it.attrs, it.span,
|
||||||
"missing documentation for a struct");
|
"missing documentation for a struct");
|
||||||
for sdef.fields.iter().advance |field| {
|
foreach field in sdef.fields.iter() {
|
||||||
match field.node.kind {
|
match field.node.kind {
|
||||||
ast::named_field(_, vis) if vis != ast::private => {
|
ast::named_field(_, vis) if vis != ast::private => {
|
||||||
check_attrs(cx, field.node.attrs, field.span,
|
check_attrs(cx, field.node.attrs, field.span,
|
||||||
|
@ -1090,7 +1090,7 @@ pub fn check_crate(tcx: ty::ctxt, crate: @ast::Crate) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Install command-line options, overriding defaults.
|
// Install command-line options, overriding defaults.
|
||||||
for tcx.sess.opts.lint_opts.iter().advance |&(lint, level)| {
|
foreach &(lint, level) in tcx.sess.opts.lint_opts.iter() {
|
||||||
cx.set_level(lint, level, CommandLine);
|
cx.set_level(lint, level, CommandLine);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1147,8 +1147,8 @@ pub fn check_crate(tcx: ty::ctxt, crate: @ast::Crate) {
|
||||||
|
|
||||||
// If we missed any lints added to the session, then there's a bug somewhere
|
// If we missed any lints added to the session, then there's a bug somewhere
|
||||||
// in the iteration code.
|
// in the iteration code.
|
||||||
for tcx.sess.lints.iter().advance |(_, v)| {
|
foreach (_, v) in tcx.sess.lints.iter() {
|
||||||
for v.iter().advance |t| {
|
foreach t in v.iter() {
|
||||||
match *t {
|
match *t {
|
||||||
(lint, span, ref msg) =>
|
(lint, span, ref msg) =>
|
||||||
tcx.sess.span_bug(span, fmt!("unprocessed lint %?: %s",
|
tcx.sess.span_bug(span, fmt!("unprocessed lint %?: %s",
|
||||||
|
|
|
@ -360,7 +360,7 @@ fn visit_fn(fk: &visit::fn_kind,
|
||||||
debug!("creating fn_maps: %x", transmute(&*fn_maps));
|
debug!("creating fn_maps: %x", transmute(&*fn_maps));
|
||||||
}
|
}
|
||||||
|
|
||||||
for decl.inputs.iter().advance |arg| {
|
foreach arg in decl.inputs.iter() {
|
||||||
do pat_util::pat_bindings(this.tcx.def_map, arg.pat)
|
do pat_util::pat_bindings(this.tcx.def_map, arg.pat)
|
||||||
|_bm, arg_id, _x, path| {
|
|_bm, arg_id, _x, path| {
|
||||||
debug!("adding argument %d", arg_id);
|
debug!("adding argument %d", arg_id);
|
||||||
|
@ -436,7 +436,7 @@ fn visit_local(local: @Local, (this, vt): (@mut IrMaps, vt<@mut IrMaps>)) {
|
||||||
|
|
||||||
fn visit_arm(arm: &arm, (this, vt): (@mut IrMaps, vt<@mut IrMaps>)) {
|
fn visit_arm(arm: &arm, (this, vt): (@mut IrMaps, vt<@mut IrMaps>)) {
|
||||||
let def_map = this.tcx.def_map;
|
let def_map = this.tcx.def_map;
|
||||||
for arm.pats.iter().advance |pat| {
|
foreach pat in arm.pats.iter() {
|
||||||
do pat_util::pat_bindings(def_map, *pat) |bm, p_id, sp, path| {
|
do pat_util::pat_bindings(def_map, *pat) |bm, p_id, sp, path| {
|
||||||
debug!("adding local variable %d from match with bm %?",
|
debug!("adding local variable %d from match with bm %?",
|
||||||
p_id, bm);
|
p_id, bm);
|
||||||
|
@ -475,7 +475,7 @@ fn visit_expr(expr: @expr, (this, vt): (@mut IrMaps, vt<@mut IrMaps>)) {
|
||||||
// construction site.
|
// construction site.
|
||||||
let cvs = this.capture_map.get(&expr.id);
|
let cvs = this.capture_map.get(&expr.id);
|
||||||
let mut call_caps = ~[];
|
let mut call_caps = ~[];
|
||||||
for cvs.iter().advance |cv| {
|
foreach cv in cvs.iter() {
|
||||||
match moves::moved_variable_node_id_from_def(cv.def) {
|
match moves::moved_variable_node_id_from_def(cv.def) {
|
||||||
Some(rv) => {
|
Some(rv) => {
|
||||||
let cv_ln = this.add_live_node(FreeVarNode(cv.span));
|
let cv_ln = this.add_live_node(FreeVarNode(cv.span));
|
||||||
|
@ -1084,7 +1084,7 @@ impl Liveness {
|
||||||
let ln = self.live_node(expr.id, expr.span);
|
let ln = self.live_node(expr.id, expr.span);
|
||||||
self.init_empty(ln, succ);
|
self.init_empty(ln, succ);
|
||||||
let mut first_merge = true;
|
let mut first_merge = true;
|
||||||
for arms.iter().advance |arm| {
|
foreach arm in arms.iter() {
|
||||||
let body_succ =
|
let body_succ =
|
||||||
self.propagate_through_block(&arm.body, succ);
|
self.propagate_through_block(&arm.body, succ);
|
||||||
let guard_succ =
|
let guard_succ =
|
||||||
|
@ -1461,12 +1461,12 @@ fn check_expr(expr: @expr, (this, vt): (@Liveness, vt<@Liveness>)) {
|
||||||
}
|
}
|
||||||
|
|
||||||
expr_inline_asm(ref ia) => {
|
expr_inline_asm(ref ia) => {
|
||||||
for ia.inputs.iter().advance |&(_, input)| {
|
foreach &(_, input) in ia.inputs.iter() {
|
||||||
(vt.visit_expr)(input, (this, vt));
|
(vt.visit_expr)(input, (this, vt));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Output operands must be lvalues
|
// Output operands must be lvalues
|
||||||
for ia.outputs.iter().advance |&(_, out)| {
|
foreach &(_, out) in ia.outputs.iter() {
|
||||||
match out.node {
|
match out.node {
|
||||||
expr_addr_of(_, inner) => {
|
expr_addr_of(_, inner) => {
|
||||||
this.check_lvalue(inner, vt);
|
this.check_lvalue(inner, vt);
|
||||||
|
@ -1603,7 +1603,7 @@ impl Liveness {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn warn_about_unused_args(&self, decl: &fn_decl, entry_ln: LiveNode) {
|
pub fn warn_about_unused_args(&self, decl: &fn_decl, entry_ln: LiveNode) {
|
||||||
for decl.inputs.iter().advance |arg| {
|
foreach arg in decl.inputs.iter() {
|
||||||
do pat_util::pat_bindings(self.tcx.def_map, arg.pat)
|
do pat_util::pat_bindings(self.tcx.def_map, arg.pat)
|
||||||
|_bm, p_id, sp, _n| {
|
|_bm, p_id, sp, _n| {
|
||||||
let var = self.variable(p_id, sp);
|
let var = self.variable(p_id, sp);
|
||||||
|
@ -1628,7 +1628,7 @@ impl Liveness {
|
||||||
-> bool {
|
-> bool {
|
||||||
if !self.used_on_entry(ln, var) {
|
if !self.used_on_entry(ln, var) {
|
||||||
let r = self.should_warn(var);
|
let r = self.should_warn(var);
|
||||||
for r.iter().advance |name| {
|
foreach name in r.iter() {
|
||||||
|
|
||||||
// annoying: for parameters in funcs like `fn(x: int)
|
// annoying: for parameters in funcs like `fn(x: int)
|
||||||
// {ret}`, there is only one node, so asking about
|
// {ret}`, there is only one node, so asking about
|
||||||
|
@ -1661,7 +1661,7 @@ impl Liveness {
|
||||||
var: Variable) {
|
var: Variable) {
|
||||||
if self.live_on_exit(ln, var).is_none() {
|
if self.live_on_exit(ln, var).is_none() {
|
||||||
let r = self.should_warn(var);
|
let r = self.should_warn(var);
|
||||||
for r.iter().advance |name| {
|
foreach name in r.iter() {
|
||||||
self.tcx.sess.add_lint(dead_assignment, id, sp,
|
self.tcx.sess.add_lint(dead_assignment, id, sp,
|
||||||
fmt!("value assigned to `%s` is never read", *name));
|
fmt!("value assigned to `%s` is never read", *name));
|
||||||
}
|
}
|
||||||
|
|
|
@ -907,7 +907,7 @@ impl mem_categorization_ctxt {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
for subpats.iter().enumerate().advance |(i, &subpat)| {
|
foreach (i, &subpat) in subpats.iter().enumerate() {
|
||||||
let subpat_ty = self.pat_ty(subpat); // see (*)
|
let subpat_ty = self.pat_ty(subpat); // see (*)
|
||||||
|
|
||||||
let subcmt =
|
let subcmt =
|
||||||
|
@ -920,7 +920,7 @@ impl mem_categorization_ctxt {
|
||||||
}
|
}
|
||||||
Some(&ast::def_fn(*)) |
|
Some(&ast::def_fn(*)) |
|
||||||
Some(&ast::def_struct(*)) => {
|
Some(&ast::def_struct(*)) => {
|
||||||
for subpats.iter().enumerate().advance |(i, &subpat)| {
|
foreach (i, &subpat) in subpats.iter().enumerate() {
|
||||||
let subpat_ty = self.pat_ty(subpat); // see (*)
|
let subpat_ty = self.pat_ty(subpat); // see (*)
|
||||||
let cmt_field =
|
let cmt_field =
|
||||||
self.cat_imm_interior(
|
self.cat_imm_interior(
|
||||||
|
@ -930,7 +930,7 @@ impl mem_categorization_ctxt {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(&ast::def_static(*)) => {
|
Some(&ast::def_static(*)) => {
|
||||||
for subpats.iter().advance |&subpat| {
|
foreach &subpat in subpats.iter() {
|
||||||
self.cat_pattern(cmt, subpat, |x,y| op(x,y));
|
self.cat_pattern(cmt, subpat, |x,y| op(x,y));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -952,7 +952,7 @@ impl mem_categorization_ctxt {
|
||||||
|
|
||||||
ast::pat_struct(_, ref field_pats, _) => {
|
ast::pat_struct(_, ref field_pats, _) => {
|
||||||
// {f1: p1, ..., fN: pN}
|
// {f1: p1, ..., fN: pN}
|
||||||
for field_pats.iter().advance |fp| {
|
foreach fp in field_pats.iter() {
|
||||||
let field_ty = self.pat_ty(fp.pat); // see (*)
|
let field_ty = self.pat_ty(fp.pat); // see (*)
|
||||||
let cmt_field = self.cat_field(pat, cmt, fp.ident, field_ty);
|
let cmt_field = self.cat_field(pat, cmt, fp.ident, field_ty);
|
||||||
self.cat_pattern(cmt_field, fp.pat, |x,y| op(x,y));
|
self.cat_pattern(cmt_field, fp.pat, |x,y| op(x,y));
|
||||||
|
@ -961,7 +961,7 @@ impl mem_categorization_ctxt {
|
||||||
|
|
||||||
ast::pat_tup(ref subpats) => {
|
ast::pat_tup(ref subpats) => {
|
||||||
// (p1, ..., pN)
|
// (p1, ..., pN)
|
||||||
for subpats.iter().enumerate().advance |(i, &subpat)| {
|
foreach (i, &subpat) in subpats.iter().enumerate() {
|
||||||
let subpat_ty = self.pat_ty(subpat); // see (*)
|
let subpat_ty = self.pat_ty(subpat); // see (*)
|
||||||
let subcmt =
|
let subcmt =
|
||||||
self.cat_imm_interior(
|
self.cat_imm_interior(
|
||||||
|
@ -980,15 +980,15 @@ impl mem_categorization_ctxt {
|
||||||
|
|
||||||
ast::pat_vec(ref before, slice, ref after) => {
|
ast::pat_vec(ref before, slice, ref after) => {
|
||||||
let elt_cmt = self.cat_index(pat, cmt, 0);
|
let elt_cmt = self.cat_index(pat, cmt, 0);
|
||||||
for before.iter().advance |&before_pat| {
|
foreach &before_pat in before.iter() {
|
||||||
self.cat_pattern(elt_cmt, before_pat, |x,y| op(x,y));
|
self.cat_pattern(elt_cmt, before_pat, |x,y| op(x,y));
|
||||||
}
|
}
|
||||||
for slice.iter().advance |&slice_pat| {
|
foreach &slice_pat in slice.iter() {
|
||||||
let slice_ty = self.pat_ty(slice_pat);
|
let slice_ty = self.pat_ty(slice_pat);
|
||||||
let slice_cmt = self.cat_rvalue_node(pat, slice_ty);
|
let slice_cmt = self.cat_rvalue_node(pat, slice_ty);
|
||||||
self.cat_pattern(slice_cmt, slice_pat, |x,y| op(x,y));
|
self.cat_pattern(slice_cmt, slice_pat, |x,y| op(x,y));
|
||||||
}
|
}
|
||||||
for after.iter().advance |&after_pat| {
|
foreach &after_pat in after.iter() {
|
||||||
self.cat_pattern(elt_cmt, after_pat, |x,y| op(x,y));
|
self.cat_pattern(elt_cmt, after_pat, |x,y| op(x,y));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1077,7 +1077,7 @@ pub fn field_mutbl(tcx: ty::ctxt,
|
||||||
match ty::get(base_ty).sty {
|
match ty::get(base_ty).sty {
|
||||||
ty::ty_struct(did, _) => {
|
ty::ty_struct(did, _) => {
|
||||||
let r = ty::lookup_struct_fields(tcx, did);
|
let r = ty::lookup_struct_fields(tcx, did);
|
||||||
for r.iter().advance |fld| {
|
foreach fld in r.iter() {
|
||||||
if fld.ident == f_name {
|
if fld.ident == f_name {
|
||||||
return Some(ast::m_imm);
|
return Some(ast::m_imm);
|
||||||
}
|
}
|
||||||
|
@ -1087,7 +1087,7 @@ pub fn field_mutbl(tcx: ty::ctxt,
|
||||||
match tcx.def_map.get_copy(&node_id) {
|
match tcx.def_map.get_copy(&node_id) {
|
||||||
ast::def_variant(_, variant_id) => {
|
ast::def_variant(_, variant_id) => {
|
||||||
let r = ty::lookup_struct_fields(tcx, variant_id);
|
let r = ty::lookup_struct_fields(tcx, variant_id);
|
||||||
for r.iter().advance |fld| {
|
foreach fld in r.iter() {
|
||||||
if fld.ident == f_name {
|
if fld.ident == f_name {
|
||||||
return Some(ast::m_imm);
|
return Some(ast::m_imm);
|
||||||
}
|
}
|
||||||
|
|
|
@ -231,7 +231,7 @@ fn compute_modes_for_local<'a>(local: @Local,
|
||||||
(cx, v): (VisitContext,
|
(cx, v): (VisitContext,
|
||||||
vt<VisitContext>)) {
|
vt<VisitContext>)) {
|
||||||
cx.use_pat(local.pat);
|
cx.use_pat(local.pat);
|
||||||
for local.init.iter().advance |&init| {
|
foreach &init in local.init.iter() {
|
||||||
cx.use_expr(init, Read, v);
|
cx.use_expr(init, Read, v);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -243,7 +243,7 @@ fn compute_modes_for_fn(fk: &visit::fn_kind,
|
||||||
id: NodeId,
|
id: NodeId,
|
||||||
(cx, v): (VisitContext,
|
(cx, v): (VisitContext,
|
||||||
vt<VisitContext>)) {
|
vt<VisitContext>)) {
|
||||||
for decl.inputs.iter().advance |a| {
|
foreach a in decl.inputs.iter() {
|
||||||
cx.use_pat(a.pat);
|
cx.use_pat(a.pat);
|
||||||
}
|
}
|
||||||
visit::visit_fn(fk, decl, body, span, id, (cx, v));
|
visit::visit_fn(fk, decl, body, span, id, (cx, v));
|
||||||
|
@ -258,7 +258,7 @@ fn compute_modes_for_expr(expr: @expr,
|
||||||
|
|
||||||
impl VisitContext {
|
impl VisitContext {
|
||||||
pub fn consume_exprs(&self, exprs: &[@expr], visitor: vt<VisitContext>) {
|
pub fn consume_exprs(&self, exprs: &[@expr], visitor: vt<VisitContext>) {
|
||||||
for exprs.iter().advance |expr| {
|
foreach expr in exprs.iter() {
|
||||||
self.consume_expr(*expr, visitor);
|
self.consume_expr(*expr, visitor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -289,11 +289,11 @@ impl VisitContext {
|
||||||
|
|
||||||
debug!("consume_block(blk.id=%?)", blk.id);
|
debug!("consume_block(blk.id=%?)", blk.id);
|
||||||
|
|
||||||
for blk.stmts.iter().advance |stmt| {
|
foreach stmt in blk.stmts.iter() {
|
||||||
(visitor.visit_stmt)(*stmt, (*self, visitor));
|
(visitor.visit_stmt)(*stmt, (*self, visitor));
|
||||||
}
|
}
|
||||||
|
|
||||||
for blk.expr.iter().advance |tail_expr| {
|
foreach tail_expr in blk.expr.iter() {
|
||||||
self.consume_expr(*tail_expr, visitor);
|
self.consume_expr(*tail_expr, visitor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -329,7 +329,7 @@ impl VisitContext {
|
||||||
Move => {
|
Move => {
|
||||||
let def = self.tcx.def_map.get_copy(&expr.id);
|
let def = self.tcx.def_map.get_copy(&expr.id);
|
||||||
let r = moved_variable_node_id_from_def(def);
|
let r = moved_variable_node_id_from_def(def);
|
||||||
for r.iter().advance |&id| {
|
foreach &id in r.iter() {
|
||||||
self.move_maps.moved_variables_set.insert(id);
|
self.move_maps.moved_variables_set.insert(id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -393,11 +393,11 @@ impl VisitContext {
|
||||||
}
|
}
|
||||||
|
|
||||||
expr_struct(_, ref fields, opt_with) => {
|
expr_struct(_, ref fields, opt_with) => {
|
||||||
for fields.iter().advance |field| {
|
foreach field in fields.iter() {
|
||||||
self.consume_expr(field.expr, visitor);
|
self.consume_expr(field.expr, visitor);
|
||||||
}
|
}
|
||||||
|
|
||||||
for opt_with.iter().advance |with_expr| {
|
foreach with_expr in opt_with.iter() {
|
||||||
// If there are any fields whose type is move-by-default,
|
// If there are any fields whose type is move-by-default,
|
||||||
// then `with` is consumed, otherwise it is only read
|
// then `with` is consumed, otherwise it is only read
|
||||||
let with_ty = ty::expr_ty(self.tcx, *with_expr);
|
let with_ty = ty::expr_ty(self.tcx, *with_expr);
|
||||||
|
@ -436,7 +436,7 @@ impl VisitContext {
|
||||||
expr_if(cond_expr, ref then_blk, opt_else_expr) => {
|
expr_if(cond_expr, ref then_blk, opt_else_expr) => {
|
||||||
self.consume_expr(cond_expr, visitor);
|
self.consume_expr(cond_expr, visitor);
|
||||||
self.consume_block(then_blk, visitor);
|
self.consume_block(then_blk, visitor);
|
||||||
for opt_else_expr.iter().advance |else_expr| {
|
foreach else_expr in opt_else_expr.iter() {
|
||||||
self.consume_expr(*else_expr, visitor);
|
self.consume_expr(*else_expr, visitor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -444,7 +444,7 @@ impl VisitContext {
|
||||||
expr_match(discr, ref arms) => {
|
expr_match(discr, ref arms) => {
|
||||||
// We must do this first so that `arms_have_by_move_bindings`
|
// We must do this first so that `arms_have_by_move_bindings`
|
||||||
// below knows which bindings are moves.
|
// below knows which bindings are moves.
|
||||||
for arms.iter().advance |arm| {
|
foreach arm in arms.iter() {
|
||||||
self.consume_arm(arm, visitor);
|
self.consume_arm(arm, visitor);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -511,7 +511,7 @@ impl VisitContext {
|
||||||
}
|
}
|
||||||
|
|
||||||
expr_ret(ref opt_expr) => {
|
expr_ret(ref opt_expr) => {
|
||||||
for opt_expr.iter().advance |expr| {
|
foreach expr in opt_expr.iter() {
|
||||||
self.consume_expr(*expr, visitor);
|
self.consume_expr(*expr, visitor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -547,7 +547,7 @@ impl VisitContext {
|
||||||
}
|
}
|
||||||
|
|
||||||
expr_fn_block(ref decl, ref body) => {
|
expr_fn_block(ref decl, ref body) => {
|
||||||
for decl.inputs.iter().advance |a| {
|
foreach a in decl.inputs.iter() {
|
||||||
self.use_pat(a.pat);
|
self.use_pat(a.pat);
|
||||||
}
|
}
|
||||||
let cap_vars = self.compute_captures(expr.id);
|
let cap_vars = self.compute_captures(expr.id);
|
||||||
|
@ -581,7 +581,7 @@ impl VisitContext {
|
||||||
|
|
||||||
// for overloaded operatrs, we are always passing in a
|
// for overloaded operatrs, we are always passing in a
|
||||||
// borrowed pointer, so it's always read mode:
|
// borrowed pointer, so it's always read mode:
|
||||||
for arg_exprs.iter().advance |arg_expr| {
|
foreach arg_expr in arg_exprs.iter() {
|
||||||
self.use_expr(*arg_expr, Read, visitor);
|
self.use_expr(*arg_expr, Read, visitor);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -589,11 +589,11 @@ impl VisitContext {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn consume_arm(&self, arm: &arm, visitor: vt<VisitContext>) {
|
pub fn consume_arm(&self, arm: &arm, visitor: vt<VisitContext>) {
|
||||||
for arm.pats.iter().advance |pat| {
|
foreach pat in arm.pats.iter() {
|
||||||
self.use_pat(*pat);
|
self.use_pat(*pat);
|
||||||
}
|
}
|
||||||
|
|
||||||
for arm.guard.iter().advance |guard| {
|
foreach guard in arm.guard.iter() {
|
||||||
self.consume_expr(*guard, visitor);
|
self.consume_expr(*guard, visitor);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -640,7 +640,7 @@ impl VisitContext {
|
||||||
arg_exprs: &[@expr],
|
arg_exprs: &[@expr],
|
||||||
visitor: vt<VisitContext>) {
|
visitor: vt<VisitContext>) {
|
||||||
//! Uses the argument expressions.
|
//! Uses the argument expressions.
|
||||||
for arg_exprs.iter().advance |arg_expr| {
|
foreach arg_expr in arg_exprs.iter() {
|
||||||
self.use_fn_arg(*arg_expr, visitor);
|
self.use_fn_arg(*arg_expr, visitor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -654,8 +654,8 @@ impl VisitContext {
|
||||||
moves_map: MovesMap,
|
moves_map: MovesMap,
|
||||||
arms: &[arm])
|
arms: &[arm])
|
||||||
-> Option<@pat> {
|
-> Option<@pat> {
|
||||||
for arms.iter().advance |arm| {
|
foreach arm in arms.iter() {
|
||||||
for arm.pats.iter().advance |&pat| {
|
foreach &pat in arm.pats.iter() {
|
||||||
for ast_util::walk_pat(pat) |p| {
|
for ast_util::walk_pat(pat) |p| {
|
||||||
if moves_map.contains(&p.id) {
|
if moves_map.contains(&p.id) {
|
||||||
return Some(p);
|
return Some(p);
|
||||||
|
|
|
@ -50,7 +50,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
|
||||||
*count += 1;
|
*count += 1;
|
||||||
}
|
}
|
||||||
item_impl(_, _, _, ref methods) => {
|
item_impl(_, _, _, ref methods) => {
|
||||||
for methods.iter().advance |method| {
|
foreach method in methods.iter() {
|
||||||
privileged_items.push(method.id);
|
privileged_items.push(method.id);
|
||||||
*count += 1;
|
*count += 1;
|
||||||
}
|
}
|
||||||
|
@ -58,7 +58,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
|
||||||
*count += 1;
|
*count += 1;
|
||||||
}
|
}
|
||||||
item_foreign_mod(ref foreign_mod) => {
|
item_foreign_mod(ref foreign_mod) => {
|
||||||
for foreign_mod.items.iter().advance |foreign_item| {
|
foreach foreign_item in foreign_mod.items.iter() {
|
||||||
privileged_items.push(foreign_item.id);
|
privileged_items.push(foreign_item.id);
|
||||||
*count += 1;
|
*count += 1;
|
||||||
}
|
}
|
||||||
|
@ -70,7 +70,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
|
||||||
// Adds items that are privileged to this scope.
|
// Adds items that are privileged to this scope.
|
||||||
let add_privileged_items: @fn(&[@ast::item]) -> uint = |items| {
|
let add_privileged_items: @fn(&[@ast::item]) -> uint = |items| {
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
for items.iter().advance |&item| {
|
foreach &item in items.iter() {
|
||||||
add_privileged_item(item, &mut count);
|
add_privileged_item(item, &mut count);
|
||||||
}
|
}
|
||||||
count
|
count
|
||||||
|
@ -206,7 +206,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
|
||||||
let check_field: @fn(span: span, id: ast::def_id, ident: ast::ident) =
|
let check_field: @fn(span: span, id: ast::def_id, ident: ast::ident) =
|
||||||
|span, id, ident| {
|
|span, id, ident| {
|
||||||
let fields = ty::lookup_struct_fields(tcx, id);
|
let fields = ty::lookup_struct_fields(tcx, id);
|
||||||
for fields.iter().advance |field| {
|
foreach field in fields.iter() {
|
||||||
if field.ident != ident { loop; }
|
if field.ident != ident { loop; }
|
||||||
if field.vis == private {
|
if field.vis == private {
|
||||||
tcx.sess.span_err(span, fmt!("field `%s` is private",
|
tcx.sess.span_err(span, fmt!("field `%s` is private",
|
||||||
|
@ -354,7 +354,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
|
||||||
visit_block: |block, (method_map, visitor)| {
|
visit_block: |block, (method_map, visitor)| {
|
||||||
// Gather up all the privileged items.
|
// Gather up all the privileged items.
|
||||||
let mut n_added = 0;
|
let mut n_added = 0;
|
||||||
for block.stmts.iter().advance |stmt| {
|
foreach stmt in block.stmts.iter() {
|
||||||
match stmt.node {
|
match stmt.node {
|
||||||
stmt_decl(decl, _) => {
|
stmt_decl(decl, _) => {
|
||||||
match decl.node {
|
match decl.node {
|
||||||
|
@ -425,7 +425,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
|
||||||
ty_struct(id, _) => {
|
ty_struct(id, _) => {
|
||||||
if id.crate != LOCAL_CRATE ||
|
if id.crate != LOCAL_CRATE ||
|
||||||
!privileged_items.iter().any(|x| x == &(id.node)) {
|
!privileged_items.iter().any(|x| x == &(id.node)) {
|
||||||
for (*fields).iter().advance |field| {
|
foreach field in (*fields).iter() {
|
||||||
debug!("(privacy checking) checking \
|
debug!("(privacy checking) checking \
|
||||||
field in struct literal");
|
field in struct literal");
|
||||||
check_field(expr.span, id, field.ident);
|
check_field(expr.span, id, field.ident);
|
||||||
|
@ -437,7 +437,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
|
||||||
!privileged_items.iter().any(|x| x == &(id.node)) {
|
!privileged_items.iter().any(|x| x == &(id.node)) {
|
||||||
match tcx.def_map.get_copy(&expr.id) {
|
match tcx.def_map.get_copy(&expr.id) {
|
||||||
def_variant(_, variant_id) => {
|
def_variant(_, variant_id) => {
|
||||||
for (*fields).iter().advance |field| {
|
foreach field in (*fields).iter() {
|
||||||
debug!("(privacy checking) \
|
debug!("(privacy checking) \
|
||||||
checking field in \
|
checking field in \
|
||||||
struct variant \
|
struct variant \
|
||||||
|
@ -489,7 +489,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
|
||||||
ty_struct(id, _) => {
|
ty_struct(id, _) => {
|
||||||
if id.crate != LOCAL_CRATE ||
|
if id.crate != LOCAL_CRATE ||
|
||||||
!privileged_items.iter().any(|x| x == &(id.node)) {
|
!privileged_items.iter().any(|x| x == &(id.node)) {
|
||||||
for fields.iter().advance |field| {
|
foreach field in fields.iter() {
|
||||||
debug!("(privacy checking) checking \
|
debug!("(privacy checking) checking \
|
||||||
struct pattern");
|
struct pattern");
|
||||||
check_field(pattern.span, id, field.ident);
|
check_field(pattern.span, id, field.ident);
|
||||||
|
@ -501,7 +501,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
|
||||||
!privileged_items.iter().any(|x| x == &enum_id.node) {
|
!privileged_items.iter().any(|x| x == &enum_id.node) {
|
||||||
match tcx.def_map.find(&pattern.id) {
|
match tcx.def_map.find(&pattern.id) {
|
||||||
Some(&def_variant(_, variant_id)) => {
|
Some(&def_variant(_, variant_id)) => {
|
||||||
for fields.iter().advance |field| {
|
foreach field in fields.iter() {
|
||||||
debug!("(privacy checking) \
|
debug!("(privacy checking) \
|
||||||
checking field in \
|
checking field in \
|
||||||
struct variant pattern");
|
struct variant pattern");
|
||||||
|
|
|
@ -136,7 +136,7 @@ impl ReachableContext {
|
||||||
}
|
}
|
||||||
item_enum(ref enum_def, _) => {
|
item_enum(ref enum_def, _) => {
|
||||||
if privacy_context == PublicContext {
|
if privacy_context == PublicContext {
|
||||||
for enum_def.variants.iter().advance |variant| {
|
foreach variant in enum_def.variants.iter() {
|
||||||
reachable_symbols.insert(variant.node.id);
|
reachable_symbols.insert(variant.node.id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -155,7 +155,7 @@ impl ReachableContext {
|
||||||
};
|
};
|
||||||
|
|
||||||
// Mark all public methods as reachable.
|
// Mark all public methods as reachable.
|
||||||
for methods.iter().advance |&method| {
|
foreach &method in methods.iter() {
|
||||||
if should_be_considered_public(method) {
|
if should_be_considered_public(method) {
|
||||||
reachable_symbols.insert(method.id);
|
reachable_symbols.insert(method.id);
|
||||||
}
|
}
|
||||||
|
@ -164,7 +164,7 @@ impl ReachableContext {
|
||||||
if generics_require_inlining(generics) {
|
if generics_require_inlining(generics) {
|
||||||
// If the impl itself has generics, add all public
|
// If the impl itself has generics, add all public
|
||||||
// symbols to the worklist.
|
// symbols to the worklist.
|
||||||
for methods.iter().advance |&method| {
|
foreach &method in methods.iter() {
|
||||||
if should_be_considered_public(method) {
|
if should_be_considered_public(method) {
|
||||||
worklist.push(method.id)
|
worklist.push(method.id)
|
||||||
}
|
}
|
||||||
|
@ -172,7 +172,7 @@ impl ReachableContext {
|
||||||
} else {
|
} else {
|
||||||
// Otherwise, add only public methods that have
|
// Otherwise, add only public methods that have
|
||||||
// generics to the worklist.
|
// generics to the worklist.
|
||||||
for methods.iter().advance |method| {
|
foreach method in methods.iter() {
|
||||||
let generics = &method.generics;
|
let generics = &method.generics;
|
||||||
let attrs = &method.attrs;
|
let attrs = &method.attrs;
|
||||||
if generics_require_inlining(generics) ||
|
if generics_require_inlining(generics) ||
|
||||||
|
@ -186,7 +186,7 @@ impl ReachableContext {
|
||||||
item_trait(_, _, ref trait_methods) => {
|
item_trait(_, _, ref trait_methods) => {
|
||||||
// Mark all provided methods as reachable.
|
// Mark all provided methods as reachable.
|
||||||
if privacy_context == PublicContext {
|
if privacy_context == PublicContext {
|
||||||
for trait_methods.iter().advance |trait_method| {
|
foreach trait_method in trait_methods.iter() {
|
||||||
match *trait_method {
|
match *trait_method {
|
||||||
provided(method) => {
|
provided(method) => {
|
||||||
reachable_symbols.insert(method.id);
|
reachable_symbols.insert(method.id);
|
||||||
|
|
|
@ -198,7 +198,7 @@ impl RegionMaps {
|
||||||
while i < queue.len() {
|
while i < queue.len() {
|
||||||
match self.free_region_map.find(&queue[i]) {
|
match self.free_region_map.find(&queue[i]) {
|
||||||
Some(parents) => {
|
Some(parents) => {
|
||||||
for parents.iter().advance |parent| {
|
foreach parent in parents.iter() {
|
||||||
if *parent == sup {
|
if *parent == sup {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -318,7 +318,7 @@ impl RegionMaps {
|
||||||
fn parent_to_expr(cx: Context, child_id: ast::NodeId, sp: span) {
|
fn parent_to_expr(cx: Context, child_id: ast::NodeId, sp: span) {
|
||||||
debug!("region::parent_to_expr(span=%?)",
|
debug!("region::parent_to_expr(span=%?)",
|
||||||
cx.sess.codemap.span_to_str(sp));
|
cx.sess.codemap.span_to_str(sp));
|
||||||
for cx.parent.iter().advance |parent_id| {
|
foreach parent_id in cx.parent.iter() {
|
||||||
cx.region_maps.record_parent(child_id, *parent_id);
|
cx.region_maps.record_parent(child_id, *parent_id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -713,7 +713,7 @@ fn determine_rp_in_fn(fk: &visit::fn_kind,
|
||||||
visit::vt<@mut DetermineRpCtxt>)) {
|
visit::vt<@mut DetermineRpCtxt>)) {
|
||||||
do cx.with(cx.item_id, false) {
|
do cx.with(cx.item_id, false) {
|
||||||
do cx.with_ambient_variance(rv_contravariant) {
|
do cx.with_ambient_variance(rv_contravariant) {
|
||||||
for decl.inputs.iter().advance |a| {
|
foreach a in decl.inputs.iter() {
|
||||||
(visitor.visit_ty)(&a.ty, (cx, visitor));
|
(visitor.visit_ty)(&a.ty, (cx, visitor));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -824,7 +824,7 @@ fn determine_rp_in_ty(ty: &ast::Ty,
|
||||||
ast::ty_path(ref path, _, _) => {
|
ast::ty_path(ref path, _, _) => {
|
||||||
// type parameters are---for now, anyway---always invariant
|
// type parameters are---for now, anyway---always invariant
|
||||||
do cx.with_ambient_variance(rv_invariant) {
|
do cx.with_ambient_variance(rv_invariant) {
|
||||||
for path.types.iter().advance |tp| {
|
foreach tp in path.types.iter() {
|
||||||
(visitor.visit_ty)(tp, (cx, visitor));
|
(visitor.visit_ty)(tp, (cx, visitor));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -837,7 +837,7 @@ fn determine_rp_in_ty(ty: &ast::Ty,
|
||||||
do cx.with(cx.item_id, false) {
|
do cx.with(cx.item_id, false) {
|
||||||
// parameters are contravariant
|
// parameters are contravariant
|
||||||
do cx.with_ambient_variance(rv_contravariant) {
|
do cx.with_ambient_variance(rv_contravariant) {
|
||||||
for decl.inputs.iter().advance |a| {
|
foreach a in decl.inputs.iter() {
|
||||||
(visitor.visit_ty)(&a.ty, (cx, visitor));
|
(visitor.visit_ty)(&a.ty, (cx, visitor));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -917,7 +917,7 @@ pub fn determine_rp_in_crate(sess: Session,
|
||||||
match cx.dep_map.find(&c_id) {
|
match cx.dep_map.find(&c_id) {
|
||||||
None => {}
|
None => {}
|
||||||
Some(deps) => {
|
Some(deps) => {
|
||||||
for deps.iter().advance |dep| {
|
foreach dep in deps.iter() {
|
||||||
let v = add_variance(dep.ambient_variance, c_variance);
|
let v = add_variance(dep.ambient_variance, c_variance);
|
||||||
cx.add_rp(dep.id, v);
|
cx.add_rp(dep.id, v);
|
||||||
}
|
}
|
||||||
|
@ -929,7 +929,7 @@ pub fn determine_rp_in_crate(sess: Session,
|
||||||
debug!("%s", {
|
debug!("%s", {
|
||||||
debug!("Region variance results:");
|
debug!("Region variance results:");
|
||||||
let region_paramd_items = cx.region_paramd_items;
|
let region_paramd_items = cx.region_paramd_items;
|
||||||
for region_paramd_items.iter().advance |(&key, &value)| {
|
foreach (&key, &value) in region_paramd_items.iter() {
|
||||||
debug!("item %? (%s) is parameterized with variance %?",
|
debug!("item %? (%s) is parameterized with variance %?",
|
||||||
key,
|
key,
|
||||||
ast_map::node_id_to_str(ast_map, key,
|
ast_map::node_id_to_str(ast_map, key,
|
||||||
|
|
|
@ -1037,7 +1037,7 @@ impl Resolver {
|
||||||
self.session.str_of(name)));
|
self.session.str_of(name)));
|
||||||
{
|
{
|
||||||
let r = child.span_for_namespace(ns);
|
let r = child.span_for_namespace(ns);
|
||||||
for r.iter().advance |sp| {
|
foreach sp in r.iter() {
|
||||||
self.session.span_note(*sp,
|
self.session.span_note(*sp,
|
||||||
fmt!("first definition of %s `%s` here",
|
fmt!("first definition of %s `%s` here",
|
||||||
namespace_error_to_str(duplicate_type),
|
namespace_error_to_str(duplicate_type),
|
||||||
|
@ -1057,7 +1057,7 @@ impl Resolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check each statement.
|
// Check each statement.
|
||||||
for block.stmts.iter().advance |statement| {
|
foreach statement in block.stmts.iter() {
|
||||||
match statement.node {
|
match statement.node {
|
||||||
stmt_decl(declaration, _) => {
|
stmt_decl(declaration, _) => {
|
||||||
match declaration.node {
|
match declaration.node {
|
||||||
|
@ -1179,7 +1179,7 @@ impl Resolver {
|
||||||
name_bindings.define_type
|
name_bindings.define_type
|
||||||
(privacy, def_ty(local_def(item.id)), sp);
|
(privacy, def_ty(local_def(item.id)), sp);
|
||||||
|
|
||||||
for (*enum_definition).variants.iter().advance |variant| {
|
foreach variant in (*enum_definition).variants.iter() {
|
||||||
self.build_reduced_graph_for_variant(
|
self.build_reduced_graph_for_variant(
|
||||||
variant,
|
variant,
|
||||||
local_def(item.id),
|
local_def(item.id),
|
||||||
|
@ -1264,7 +1264,7 @@ impl Resolver {
|
||||||
};
|
};
|
||||||
|
|
||||||
// For each method...
|
// For each method...
|
||||||
for methods.iter().advance |method| {
|
foreach method in methods.iter() {
|
||||||
// Add the method to the module.
|
// Add the method to the module.
|
||||||
let ident = method.ident;
|
let ident = method.ident;
|
||||||
let (method_name_bindings, _) =
|
let (method_name_bindings, _) =
|
||||||
|
@ -1316,7 +1316,7 @@ impl Resolver {
|
||||||
|
|
||||||
// Add the names of all the methods to the trait info.
|
// Add the names of all the methods to the trait info.
|
||||||
let mut method_names = HashMap::new();
|
let mut method_names = HashMap::new();
|
||||||
for methods.iter().advance |method| {
|
foreach method in methods.iter() {
|
||||||
let ty_m = trait_method_to_ty_method(method);
|
let ty_m = trait_method_to_ty_method(method);
|
||||||
|
|
||||||
let ident = ty_m.ident;
|
let ident = ty_m.ident;
|
||||||
|
@ -1353,7 +1353,7 @@ impl Resolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
let def_id = local_def(item.id);
|
let def_id = local_def(item.id);
|
||||||
for method_names.iter().advance |(name, _)| {
|
foreach (name, _) in method_names.iter() {
|
||||||
if !self.method_map.contains_key(name) {
|
if !self.method_map.contains_key(name) {
|
||||||
self.method_map.insert(*name, HashSet::new());
|
self.method_map.insert(*name, HashSet::new());
|
||||||
}
|
}
|
||||||
|
@ -1422,7 +1422,7 @@ impl Resolver {
|
||||||
let privacy = visibility_to_privacy(view_item.vis);
|
let privacy = visibility_to_privacy(view_item.vis);
|
||||||
match view_item.node {
|
match view_item.node {
|
||||||
view_item_use(ref view_paths) => {
|
view_item_use(ref view_paths) => {
|
||||||
for view_paths.iter().advance |view_path| {
|
foreach view_path in view_paths.iter() {
|
||||||
// Extract and intern the module part of the path. For
|
// Extract and intern the module part of the path. For
|
||||||
// globs and lists, the path is found directly in the AST;
|
// globs and lists, the path is found directly in the AST;
|
||||||
// for simple paths we have to munge the path a little.
|
// for simple paths we have to munge the path a little.
|
||||||
|
@ -1433,7 +1433,7 @@ impl Resolver {
|
||||||
let path_len = full_path.idents.len();
|
let path_len = full_path.idents.len();
|
||||||
assert!(path_len != 0);
|
assert!(path_len != 0);
|
||||||
|
|
||||||
for full_path.idents.iter().enumerate().advance |(i, ident)| {
|
foreach (i, ident) in full_path.idents.iter().enumerate() {
|
||||||
if i != path_len - 1 {
|
if i != path_len - 1 {
|
||||||
module_path.push(*ident);
|
module_path.push(*ident);
|
||||||
}
|
}
|
||||||
|
@ -1442,7 +1442,7 @@ impl Resolver {
|
||||||
|
|
||||||
view_path_glob(ref module_ident_path, _) |
|
view_path_glob(ref module_ident_path, _) |
|
||||||
view_path_list(ref module_ident_path, _, _) => {
|
view_path_list(ref module_ident_path, _, _) => {
|
||||||
for module_ident_path.idents.iter().advance |ident| {
|
foreach ident in module_ident_path.idents.iter() {
|
||||||
module_path.push(*ident);
|
module_path.push(*ident);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1463,7 +1463,7 @@ impl Resolver {
|
||||||
id);
|
id);
|
||||||
}
|
}
|
||||||
view_path_list(_, ref source_idents, _) => {
|
view_path_list(_, ref source_idents, _) => {
|
||||||
for source_idents.iter().advance |source_ident| {
|
foreach source_ident in source_idents.iter() {
|
||||||
let name = source_ident.node.name;
|
let name = source_ident.node.name;
|
||||||
let subclass = @SingleImport(name, name);
|
let subclass = @SingleImport(name, name);
|
||||||
self.build_import_directive(
|
self.build_import_directive(
|
||||||
|
@ -1657,7 +1657,7 @@ impl Resolver {
|
||||||
let method_def_ids =
|
let method_def_ids =
|
||||||
get_trait_method_def_ids(self.session.cstore, def_id);
|
get_trait_method_def_ids(self.session.cstore, def_id);
|
||||||
let mut interned_method_names = HashSet::new();
|
let mut interned_method_names = HashSet::new();
|
||||||
for method_def_ids.iter().advance |&method_def_id| {
|
foreach &method_def_id in method_def_ids.iter() {
|
||||||
let (method_name, explicit_self) =
|
let (method_name, explicit_self) =
|
||||||
get_method_name_and_explicit_self(self.session.cstore,
|
get_method_name_and_explicit_self(self.session.cstore,
|
||||||
method_def_id);
|
method_def_id);
|
||||||
|
@ -1672,7 +1672,7 @@ impl Resolver {
|
||||||
interned_method_names.insert(method_name);
|
interned_method_names.insert(method_name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for interned_method_names.iter().advance |name| {
|
foreach name in interned_method_names.iter() {
|
||||||
if !self.method_map.contains_key(name) {
|
if !self.method_map.contains_key(name) {
|
||||||
self.method_map.insert(*name, HashSet::new());
|
self.method_map.insert(*name, HashSet::new());
|
||||||
}
|
}
|
||||||
|
@ -1741,7 +1741,7 @@ impl Resolver {
|
||||||
// need to.
|
// need to.
|
||||||
|
|
||||||
let mut current_module = root;
|
let mut current_module = root;
|
||||||
for pieces.iter().advance |ident_str| {
|
foreach ident_str in pieces.iter() {
|
||||||
let ident = self.session.ident_of(*ident_str);
|
let ident = self.session.ident_of(*ident_str);
|
||||||
// Create or reuse a graph node for the child.
|
// Create or reuse a graph node for the child.
|
||||||
let (child_name_bindings, new_parent) =
|
let (child_name_bindings, new_parent) =
|
||||||
|
@ -1861,7 +1861,7 @@ impl Resolver {
|
||||||
// Add each static method to the module.
|
// Add each static method to the module.
|
||||||
let new_parent = ModuleReducedGraphParent(
|
let new_parent = ModuleReducedGraphParent(
|
||||||
type_module);
|
type_module);
|
||||||
for static_methods.iter().advance |static_method_info| {
|
foreach static_method_info in static_methods.iter() {
|
||||||
let ident = static_method_info.ident;
|
let ident = static_method_info.ident;
|
||||||
debug!("(building reduced graph for \
|
debug!("(building reduced graph for \
|
||||||
external crate) creating \
|
external crate) creating \
|
||||||
|
@ -2047,7 +2047,7 @@ impl Resolver {
|
||||||
pub fn idents_to_str(@mut self, idents: &[ident]) -> ~str {
|
pub fn idents_to_str(@mut self, idents: &[ident]) -> ~str {
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
let mut result = ~"";
|
let mut result = ~"";
|
||||||
for idents.iter().advance |ident| {
|
foreach ident in idents.iter() {
|
||||||
if first {
|
if first {
|
||||||
first = false
|
first = false
|
||||||
} else {
|
} else {
|
||||||
|
@ -2531,7 +2531,7 @@ impl Resolver {
|
||||||
};
|
};
|
||||||
|
|
||||||
// Add all children from the containing module.
|
// Add all children from the containing module.
|
||||||
for containing_module.children.iter().advance |(&ident, name_bindings)| {
|
foreach (&ident, name_bindings) in containing_module.children.iter() {
|
||||||
merge_import_resolution(ident, *name_bindings);
|
merge_import_resolution(ident, *name_bindings);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3237,7 +3237,7 @@ impl Resolver {
|
||||||
loop;
|
loop;
|
||||||
}
|
}
|
||||||
let xs = [TypeNS, ValueNS];
|
let xs = [TypeNS, ValueNS];
|
||||||
for xs.iter().advance |ns| {
|
foreach ns in xs.iter() {
|
||||||
match importresolution.target_for_namespace(*ns) {
|
match importresolution.target_for_namespace(*ns) {
|
||||||
Some(target) => {
|
Some(target) => {
|
||||||
debug!("(computing exports) maybe reexport '%s'",
|
debug!("(computing exports) maybe reexport '%s'",
|
||||||
|
@ -3485,8 +3485,8 @@ impl Resolver {
|
||||||
// enum item: resolve all the variants' discrs,
|
// enum item: resolve all the variants' discrs,
|
||||||
// then resolve the ty params
|
// then resolve the ty params
|
||||||
item_enum(ref enum_def, ref generics) => {
|
item_enum(ref enum_def, ref generics) => {
|
||||||
for (*enum_def).variants.iter().advance |variant| {
|
foreach variant in (*enum_def).variants.iter() {
|
||||||
for variant.node.disr_expr.iter().advance |dis_expr| {
|
foreach dis_expr in variant.node.disr_expr.iter() {
|
||||||
// resolve the discriminator expr
|
// resolve the discriminator expr
|
||||||
// as a constant
|
// as a constant
|
||||||
self.with_constant_rib(|| {
|
self.with_constant_rib(|| {
|
||||||
|
@ -3543,11 +3543,11 @@ impl Resolver {
|
||||||
visitor);
|
visitor);
|
||||||
|
|
||||||
// Resolve derived traits.
|
// Resolve derived traits.
|
||||||
for traits.iter().advance |trt| {
|
foreach trt in traits.iter() {
|
||||||
self.resolve_trait_reference(item.id, trt, visitor, TraitDerivation);
|
self.resolve_trait_reference(item.id, trt, visitor, TraitDerivation);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (*methods).iter().advance |method| {
|
foreach method in (*methods).iter() {
|
||||||
// Create a new rib for the method-specific type
|
// Create a new rib for the method-specific type
|
||||||
// parameters.
|
// parameters.
|
||||||
//
|
//
|
||||||
|
@ -3567,7 +3567,7 @@ impl Resolver {
|
||||||
&ty_m.generics.ty_params,
|
&ty_m.generics.ty_params,
|
||||||
visitor);
|
visitor);
|
||||||
|
|
||||||
for ty_m.decl.inputs.iter().advance |argument| {
|
foreach argument in ty_m.decl.inputs.iter() {
|
||||||
self.resolve_type(&argument.ty, visitor);
|
self.resolve_type(&argument.ty, visitor);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3604,7 +3604,7 @@ impl Resolver {
|
||||||
|
|
||||||
item_foreign_mod(ref foreign_module) => {
|
item_foreign_mod(ref foreign_module) => {
|
||||||
do self.with_scope(Some(item.ident)) {
|
do self.with_scope(Some(item.ident)) {
|
||||||
for foreign_module.items.iter().advance |foreign_item| {
|
foreach foreign_item in foreign_module.items.iter() {
|
||||||
match foreign_item.node {
|
match foreign_item.node {
|
||||||
foreign_item_fn(_, _, ref generics) => {
|
foreign_item_fn(_, _, ref generics) => {
|
||||||
self.with_type_parameter_rib(
|
self.with_type_parameter_rib(
|
||||||
|
@ -3660,7 +3660,7 @@ impl Resolver {
|
||||||
let function_type_rib = @Rib(rib_kind);
|
let function_type_rib = @Rib(rib_kind);
|
||||||
self.type_ribs.push(function_type_rib);
|
self.type_ribs.push(function_type_rib);
|
||||||
|
|
||||||
for generics.ty_params.iter().enumerate().advance |(index, type_parameter)| {
|
foreach (index, type_parameter) in generics.ty_params.iter().enumerate() {
|
||||||
let name = type_parameter.ident;
|
let name = type_parameter.ident;
|
||||||
debug!("with_type_parameter_rib: %d %d", node_id,
|
debug!("with_type_parameter_rib: %d %d", node_id,
|
||||||
type_parameter.id);
|
type_parameter.id);
|
||||||
|
@ -3751,7 +3751,7 @@ impl Resolver {
|
||||||
// Nothing to do.
|
// Nothing to do.
|
||||||
}
|
}
|
||||||
Some(declaration) => {
|
Some(declaration) => {
|
||||||
for declaration.inputs.iter().advance |argument| {
|
foreach argument in declaration.inputs.iter() {
|
||||||
let binding_mode = ArgumentIrrefutableMode;
|
let binding_mode = ArgumentIrrefutableMode;
|
||||||
let mutability =
|
let mutability =
|
||||||
if argument.is_mutbl {Mutable} else {Immutable};
|
if argument.is_mutbl {Mutable} else {Immutable};
|
||||||
|
@ -3783,8 +3783,8 @@ impl Resolver {
|
||||||
pub fn resolve_type_parameters(@mut self,
|
pub fn resolve_type_parameters(@mut self,
|
||||||
type_parameters: &OptVec<TyParam>,
|
type_parameters: &OptVec<TyParam>,
|
||||||
visitor: ResolveVisitor) {
|
visitor: ResolveVisitor) {
|
||||||
for type_parameters.iter().advance |type_parameter| {
|
foreach type_parameter in type_parameters.iter() {
|
||||||
for type_parameter.bounds.iter().advance |bound| {
|
foreach bound in type_parameter.bounds.iter() {
|
||||||
self.resolve_type_parameter_bound(type_parameter.id, bound, visitor);
|
self.resolve_type_parameter_bound(type_parameter.id, bound, visitor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3833,7 +3833,7 @@ impl Resolver {
|
||||||
fields: &[@struct_field],
|
fields: &[@struct_field],
|
||||||
visitor: ResolveVisitor) {
|
visitor: ResolveVisitor) {
|
||||||
let mut ident_map = HashMap::new::<ast::ident, @struct_field>();
|
let mut ident_map = HashMap::new::<ast::ident, @struct_field>();
|
||||||
for fields.iter().advance |&field| {
|
foreach &field in fields.iter() {
|
||||||
match field.node.kind {
|
match field.node.kind {
|
||||||
named_field(ident, _) => {
|
named_field(ident, _) => {
|
||||||
match ident_map.find(&ident) {
|
match ident_map.find(&ident) {
|
||||||
|
@ -3862,7 +3862,7 @@ impl Resolver {
|
||||||
self.resolve_type_parameters(&generics.ty_params, visitor);
|
self.resolve_type_parameters(&generics.ty_params, visitor);
|
||||||
|
|
||||||
// Resolve fields.
|
// Resolve fields.
|
||||||
for fields.iter().advance |field| {
|
foreach field in fields.iter() {
|
||||||
self.resolve_type(&field.node.ty, visitor);
|
self.resolve_type(&field.node.ty, visitor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3922,7 +3922,7 @@ impl Resolver {
|
||||||
let mut new_trait_refs = ~[];
|
let mut new_trait_refs = ~[];
|
||||||
{
|
{
|
||||||
let r = self.def_map.find(&trait_reference.ref_id);
|
let r = self.def_map.find(&trait_reference.ref_id);
|
||||||
for r.iter().advance |&def| {
|
foreach &def in r.iter() {
|
||||||
new_trait_refs.push(def_id_of_def(*def));
|
new_trait_refs.push(def_id_of_def(*def));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3938,7 +3938,7 @@ impl Resolver {
|
||||||
// Resolve the self type.
|
// Resolve the self type.
|
||||||
self.resolve_type(self_type, visitor);
|
self.resolve_type(self_type, visitor);
|
||||||
|
|
||||||
for methods.iter().advance |method| {
|
foreach method in methods.iter() {
|
||||||
// We also need a new scope for the method-specific
|
// We also need a new scope for the method-specific
|
||||||
// type parameters.
|
// type parameters.
|
||||||
self.resolve_method(MethodRibKind(
|
self.resolve_method(MethodRibKind(
|
||||||
|
@ -4018,10 +4018,10 @@ impl Resolver {
|
||||||
pub fn check_consistent_bindings(@mut self, arm: &arm) {
|
pub fn check_consistent_bindings(@mut self, arm: &arm) {
|
||||||
if arm.pats.len() == 0 { return; }
|
if arm.pats.len() == 0 { return; }
|
||||||
let map_0 = self.binding_mode_map(arm.pats[0]);
|
let map_0 = self.binding_mode_map(arm.pats[0]);
|
||||||
for arm.pats.iter().enumerate().advance |(i, p)| {
|
foreach (i, p) in arm.pats.iter().enumerate() {
|
||||||
let map_i = self.binding_mode_map(*p);
|
let map_i = self.binding_mode_map(*p);
|
||||||
|
|
||||||
for map_0.iter().advance |(&key, &binding_0)| {
|
foreach (&key, &binding_0) in map_0.iter() {
|
||||||
match map_i.find(&key) {
|
match map_i.find(&key) {
|
||||||
None => {
|
None => {
|
||||||
self.session.span_err(
|
self.session.span_err(
|
||||||
|
@ -4042,7 +4042,7 @@ impl Resolver {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for map_i.iter().advance |(&key, &binding)| {
|
foreach (&key, &binding) in map_i.iter() {
|
||||||
if !map_0.contains_key(&key) {
|
if !map_0.contains_key(&key) {
|
||||||
self.session.span_err(
|
self.session.span_err(
|
||||||
binding.span,
|
binding.span,
|
||||||
|
@ -4058,7 +4058,7 @@ impl Resolver {
|
||||||
self.value_ribs.push(@Rib(NormalRibKind));
|
self.value_ribs.push(@Rib(NormalRibKind));
|
||||||
|
|
||||||
let bindings_list = @mut HashMap::new();
|
let bindings_list = @mut HashMap::new();
|
||||||
for arm.pats.iter().advance |pattern| {
|
foreach pattern in arm.pats.iter() {
|
||||||
self.resolve_pattern(*pattern, RefutableMode, Immutable,
|
self.resolve_pattern(*pattern, RefutableMode, Immutable,
|
||||||
Some(bindings_list), visitor);
|
Some(bindings_list), visitor);
|
||||||
}
|
}
|
||||||
|
@ -4164,7 +4164,7 @@ impl Resolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
do bounds.map |bound_vec| {
|
do bounds.map |bound_vec| {
|
||||||
for bound_vec.iter().advance |bound| {
|
foreach bound in bound_vec.iter() {
|
||||||
self.resolve_type_parameter_bound(ty.id, bound, visitor);
|
self.resolve_type_parameter_bound(ty.id, bound, visitor);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -4172,7 +4172,7 @@ impl Resolver {
|
||||||
|
|
||||||
ty_closure(c) => {
|
ty_closure(c) => {
|
||||||
do c.bounds.map |bounds| {
|
do c.bounds.map |bounds| {
|
||||||
for bounds.iter().advance |bound| {
|
foreach bound in bounds.iter() {
|
||||||
self.resolve_type_parameter_bound(ty.id, bound, visitor);
|
self.resolve_type_parameter_bound(ty.id, bound, visitor);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -4319,7 +4319,7 @@ impl Resolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check the types in the path pattern.
|
// Check the types in the path pattern.
|
||||||
for path.types.iter().advance |ty| {
|
foreach ty in path.types.iter() {
|
||||||
self.resolve_type(ty, visitor);
|
self.resolve_type(ty, visitor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4352,7 +4352,7 @@ impl Resolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check the types in the path pattern.
|
// Check the types in the path pattern.
|
||||||
for path.types.iter().advance |ty| {
|
foreach ty in path.types.iter() {
|
||||||
self.resolve_type(ty, visitor);
|
self.resolve_type(ty, visitor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4381,7 +4381,7 @@ impl Resolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check the types in the path pattern.
|
// Check the types in the path pattern.
|
||||||
for path.types.iter().advance |ty| {
|
foreach ty in path.types.iter() {
|
||||||
self.resolve_type(ty, visitor);
|
self.resolve_type(ty, visitor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4477,7 +4477,7 @@ impl Resolver {
|
||||||
visitor: ResolveVisitor)
|
visitor: ResolveVisitor)
|
||||||
-> Option<def> {
|
-> Option<def> {
|
||||||
// First, resolve the types.
|
// First, resolve the types.
|
||||||
for path.types.iter().advance |ty| {
|
foreach ty in path.types.iter() {
|
||||||
self.resolve_type(ty, visitor);
|
self.resolve_type(ty, visitor);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4607,7 +4607,7 @@ impl Resolver {
|
||||||
|
|
||||||
pub fn intern_module_part_of_path(@mut self, path: &Path) -> ~[ident] {
|
pub fn intern_module_part_of_path(@mut self, path: &Path) -> ~[ident] {
|
||||||
let mut module_path_idents = ~[];
|
let mut module_path_idents = ~[];
|
||||||
for path.idents.iter().enumerate().advance |(index, ident)| {
|
foreach (index, ident) in path.idents.iter().enumerate() {
|
||||||
if index == path.idents.len() - 1 {
|
if index == path.idents.len() - 1 {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -4845,7 +4845,7 @@ impl Resolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut smallest = 0;
|
let mut smallest = 0;
|
||||||
for maybes.iter().enumerate().advance |(i, &other)| {
|
foreach (i, &other) in maybes.iter().enumerate() {
|
||||||
values[i] = name.lev_distance(other);
|
values[i] = name.lev_distance(other);
|
||||||
|
|
||||||
if values[i] <= values[smallest] {
|
if values[i] <= values[smallest] {
|
||||||
|
@ -4874,11 +4874,11 @@ impl Resolver {
|
||||||
i -= 1;
|
i -= 1;
|
||||||
match this.type_ribs[i].kind {
|
match this.type_ribs[i].kind {
|
||||||
MethodRibKind(node_id, _) =>
|
MethodRibKind(node_id, _) =>
|
||||||
for this.crate.module.items.iter().advance |item| {
|
foreach item in this.crate.module.items.iter() {
|
||||||
if item.id == node_id {
|
if item.id == node_id {
|
||||||
match item.node {
|
match item.node {
|
||||||
item_struct(class_def, _) => {
|
item_struct(class_def, _) => {
|
||||||
for class_def.fields.iter().advance |field| {
|
foreach field in class_def.fields.iter() {
|
||||||
match field.node.kind {
|
match field.node.kind {
|
||||||
unnamed_field => {},
|
unnamed_field => {},
|
||||||
named_field(ident, _) => {
|
named_field(ident, _) => {
|
||||||
|
@ -5152,7 +5152,7 @@ impl Resolver {
|
||||||
// Look for the current trait.
|
// Look for the current trait.
|
||||||
match self.current_trait_refs {
|
match self.current_trait_refs {
|
||||||
Some(ref trait_def_ids) => {
|
Some(ref trait_def_ids) => {
|
||||||
for trait_def_ids.iter().advance |trait_def_id| {
|
foreach trait_def_id in trait_def_ids.iter() {
|
||||||
if candidate_traits.contains(trait_def_id) {
|
if candidate_traits.contains(trait_def_id) {
|
||||||
self.add_trait_info(&mut found_traits,
|
self.add_trait_info(&mut found_traits,
|
||||||
*trait_def_id,
|
*trait_def_id,
|
||||||
|
@ -5308,7 +5308,7 @@ impl Resolver {
|
||||||
match vi.node {
|
match vi.node {
|
||||||
view_item_extern_mod(*) => {} // ignore
|
view_item_extern_mod(*) => {} // ignore
|
||||||
view_item_use(ref path) => {
|
view_item_use(ref path) => {
|
||||||
for path.iter().advance |p| {
|
foreach p in path.iter() {
|
||||||
match p.node {
|
match p.node {
|
||||||
view_path_simple(_, _, id) | view_path_glob(_, id) => {
|
view_path_simple(_, _, id) | view_path_glob(_, id) => {
|
||||||
if !self.used_imports.contains(&id) {
|
if !self.used_imports.contains(&id) {
|
||||||
|
@ -5319,7 +5319,7 @@ impl Resolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
view_path_list(_, ref list, _) => {
|
view_path_list(_, ref list, _) => {
|
||||||
for list.iter().advance |i| {
|
foreach i in list.iter() {
|
||||||
if !self.used_imports.contains(&i.node.id) {
|
if !self.used_imports.contains(&i.node.id) {
|
||||||
self.session.add_lint(unused_imports,
|
self.session.add_lint(unused_imports,
|
||||||
i.node.id, i.span,
|
i.node.id, i.span,
|
||||||
|
@ -5375,7 +5375,7 @@ impl Resolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!("Import resolutions:");
|
debug!("Import resolutions:");
|
||||||
for module_.import_resolutions.iter().advance |(name, import_resolution)| {
|
foreach (name, import_resolution) in module_.import_resolutions.iter() {
|
||||||
let value_repr;
|
let value_repr;
|
||||||
match import_resolution.target_for_namespace(ValueNS) {
|
match import_resolution.target_for_namespace(ValueNS) {
|
||||||
None => { value_repr = ~""; }
|
None => { value_repr = ~""; }
|
||||||
|
|
|
@ -298,7 +298,7 @@ pub fn variant_opt(bcx: @mut Block, pat_id: ast::NodeId)
|
||||||
match ccx.tcx.def_map.get_copy(&pat_id) {
|
match ccx.tcx.def_map.get_copy(&pat_id) {
|
||||||
ast::def_variant(enum_id, var_id) => {
|
ast::def_variant(enum_id, var_id) => {
|
||||||
let variants = ty::enum_variants(ccx.tcx, enum_id);
|
let variants = ty::enum_variants(ccx.tcx, enum_id);
|
||||||
for (*variants).iter().advance |v| {
|
foreach v in (*variants).iter() {
|
||||||
if var_id == v.id {
|
if var_id == v.id {
|
||||||
return var(v.disr_val,
|
return var(v.disr_val,
|
||||||
adt::represent_node(bcx, pat_id))
|
adt::represent_node(bcx, pat_id))
|
||||||
|
@ -367,7 +367,7 @@ pub fn matches_to_str(bcx: @mut Block, m: &[Match]) -> ~str {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn has_nested_bindings(m: &[Match], col: uint) -> bool {
|
pub fn has_nested_bindings(m: &[Match], col: uint) -> bool {
|
||||||
for m.iter().advance |br| {
|
foreach br in m.iter() {
|
||||||
match br.pats[col].node {
|
match br.pats[col].node {
|
||||||
ast::pat_ident(_, _, Some(_)) => return true,
|
ast::pat_ident(_, _, Some(_)) => return true,
|
||||||
_ => ()
|
_ => ()
|
||||||
|
@ -437,7 +437,7 @@ pub fn enter_match<'r>(bcx: @mut Block,
|
||||||
let _indenter = indenter();
|
let _indenter = indenter();
|
||||||
|
|
||||||
let mut result = ~[];
|
let mut result = ~[];
|
||||||
for m.iter().advance |br| {
|
foreach br in m.iter() {
|
||||||
match e(br.pats[col]) {
|
match e(br.pats[col]) {
|
||||||
Some(sub) => {
|
Some(sub) => {
|
||||||
let pats =
|
let pats =
|
||||||
|
@ -589,7 +589,7 @@ pub fn enter_opt<'r>(bcx: @mut Block,
|
||||||
// unspecified fields with dummy.
|
// unspecified fields with dummy.
|
||||||
let mut reordered_patterns = ~[];
|
let mut reordered_patterns = ~[];
|
||||||
let r = ty::lookup_struct_fields(tcx, struct_id);
|
let r = ty::lookup_struct_fields(tcx, struct_id);
|
||||||
for r.iter().advance |field| {
|
foreach field in r.iter() {
|
||||||
match field_pats.iter().find_(|p| p.ident == field.ident) {
|
match field_pats.iter().find_(|p| p.ident == field.ident) {
|
||||||
None => reordered_patterns.push(dummy),
|
None => reordered_patterns.push(dummy),
|
||||||
Some(fp) => reordered_patterns.push(fp.pat)
|
Some(fp) => reordered_patterns.push(fp.pat)
|
||||||
|
@ -649,7 +649,7 @@ pub fn enter_rec_or_struct<'r>(bcx: @mut Block,
|
||||||
match p.node {
|
match p.node {
|
||||||
ast::pat_struct(_, ref fpats, _) => {
|
ast::pat_struct(_, ref fpats, _) => {
|
||||||
let mut pats = ~[];
|
let mut pats = ~[];
|
||||||
for fields.iter().advance |fname| {
|
foreach fname in fields.iter() {
|
||||||
match fpats.iter().find_(|p| p.ident == *fname) {
|
match fpats.iter().find_(|p| p.ident == *fname) {
|
||||||
None => pats.push(dummy),
|
None => pats.push(dummy),
|
||||||
Some(pat) => pats.push(pat.pat)
|
Some(pat) => pats.push(pat.pat)
|
||||||
|
@ -809,7 +809,7 @@ pub fn get_options(bcx: @mut Block, m: &[Match], col: uint) -> ~[Opt] {
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut found = ~[];
|
let mut found = ~[];
|
||||||
for m.iter().advance |br| {
|
foreach br in m.iter() {
|
||||||
let cur = br.pats[col];
|
let cur = br.pats[col];
|
||||||
match cur.node {
|
match cur.node {
|
||||||
ast::pat_lit(l) => {
|
ast::pat_lit(l) => {
|
||||||
|
@ -955,7 +955,7 @@ pub fn collect_record_or_struct_fields(bcx: @mut Block,
|
||||||
col: uint)
|
col: uint)
|
||||||
-> ~[ast::ident] {
|
-> ~[ast::ident] {
|
||||||
let mut fields: ~[ast::ident] = ~[];
|
let mut fields: ~[ast::ident] = ~[];
|
||||||
for m.iter().advance |br| {
|
foreach br in m.iter() {
|
||||||
match br.pats[col].node {
|
match br.pats[col].node {
|
||||||
ast::pat_struct(_, ref fs, _) => {
|
ast::pat_struct(_, ref fs, _) => {
|
||||||
match ty::get(node_id_type(bcx, br.pats[col].id)).sty {
|
match ty::get(node_id_type(bcx, br.pats[col].id)).sty {
|
||||||
|
@ -969,7 +969,7 @@ pub fn collect_record_or_struct_fields(bcx: @mut Block,
|
||||||
return fields;
|
return fields;
|
||||||
|
|
||||||
fn extend(idents: &mut ~[ast::ident], field_pats: &[ast::field_pat]) {
|
fn extend(idents: &mut ~[ast::ident], field_pats: &[ast::field_pat]) {
|
||||||
for field_pats.iter().advance |field_pat| {
|
foreach field_pat in field_pats.iter() {
|
||||||
let field_ident = field_pat.ident;
|
let field_ident = field_pat.ident;
|
||||||
if !idents.iter().any(|x| *x == field_ident) {
|
if !idents.iter().any(|x| *x == field_ident) {
|
||||||
idents.push(field_ident);
|
idents.push(field_ident);
|
||||||
|
@ -994,7 +994,7 @@ pub fn root_pats_as_necessary(mut bcx: @mut Block,
|
||||||
col: uint,
|
col: uint,
|
||||||
val: ValueRef)
|
val: ValueRef)
|
||||||
-> @mut Block {
|
-> @mut Block {
|
||||||
for m.iter().advance |br| {
|
foreach br in m.iter() {
|
||||||
let pat_id = br.pats[col].id;
|
let pat_id = br.pats[col].id;
|
||||||
if pat_id != 0 {
|
if pat_id != 0 {
|
||||||
let datum = Datum {val: val, ty: node_id_type(bcx, pat_id),
|
let datum = Datum {val: val, ty: node_id_type(bcx, pat_id),
|
||||||
|
@ -1063,14 +1063,14 @@ pub fn pick_col(m: &[Match]) -> uint {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut scores = vec::from_elem(m[0].pats.len(), 0u);
|
let mut scores = vec::from_elem(m[0].pats.len(), 0u);
|
||||||
for m.iter().advance |br| {
|
foreach br in m.iter() {
|
||||||
let mut i = 0u;
|
let mut i = 0u;
|
||||||
for br.pats.iter().advance |p| { scores[i] += score(*p); i += 1u; }
|
foreach p in br.pats.iter() { scores[i] += score(*p); i += 1u; }
|
||||||
}
|
}
|
||||||
let mut max_score = 0u;
|
let mut max_score = 0u;
|
||||||
let mut best_col = 0u;
|
let mut best_col = 0u;
|
||||||
let mut i = 0u;
|
let mut i = 0u;
|
||||||
for scores.iter().advance |score| {
|
foreach score in scores.iter() {
|
||||||
let score = *score;
|
let score = *score;
|
||||||
|
|
||||||
// Irrefutable columns always go first, they'd only be duplicated in
|
// Irrefutable columns always go first, they'd only be duplicated in
|
||||||
|
@ -1236,7 +1236,7 @@ pub fn compile_guard(bcx: @mut Block,
|
||||||
let val = bool_to_i1(bcx, val);
|
let val = bool_to_i1(bcx, val);
|
||||||
|
|
||||||
// Revoke the temp cleanups now that the guard successfully executed.
|
// Revoke the temp cleanups now that the guard successfully executed.
|
||||||
for temp_cleanups.iter().advance |llval| {
|
foreach llval in temp_cleanups.iter() {
|
||||||
revoke_clean(bcx, *llval);
|
revoke_clean(bcx, *llval);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1325,7 +1325,7 @@ fn compile_submatch_continue(mut bcx: @mut Block,
|
||||||
let ccx = bcx.fcx.ccx;
|
let ccx = bcx.fcx.ccx;
|
||||||
let mut pat_id = 0;
|
let mut pat_id = 0;
|
||||||
let mut pat_span = dummy_sp();
|
let mut pat_span = dummy_sp();
|
||||||
for m.iter().advance |br| {
|
foreach br in m.iter() {
|
||||||
// Find a real id (we're adding placeholder wildcard patterns, but
|
// Find a real id (we're adding placeholder wildcard patterns, but
|
||||||
// each column is guaranteed to have at least one real pattern)
|
// each column is guaranteed to have at least one real pattern)
|
||||||
if pat_id == 0 {
|
if pat_id == 0 {
|
||||||
|
@ -1434,7 +1434,7 @@ fn compile_submatch_continue(mut bcx: @mut Block,
|
||||||
var(_, repr) => {
|
var(_, repr) => {
|
||||||
let (the_kind, val_opt) = adt::trans_switch(bcx, repr, val);
|
let (the_kind, val_opt) = adt::trans_switch(bcx, repr, val);
|
||||||
kind = the_kind;
|
kind = the_kind;
|
||||||
for val_opt.iter().advance |&tval| { test_val = tval; }
|
foreach &tval in val_opt.iter() { test_val = tval; }
|
||||||
}
|
}
|
||||||
lit(_) => {
|
lit(_) => {
|
||||||
let pty = node_id_type(bcx, pat_id);
|
let pty = node_id_type(bcx, pat_id);
|
||||||
|
@ -1457,7 +1457,7 @@ fn compile_submatch_continue(mut bcx: @mut Block,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for opts.iter().advance |o| {
|
foreach o in opts.iter() {
|
||||||
match *o {
|
match *o {
|
||||||
range(_, _) => { kind = compare; break }
|
range(_, _) => { kind = compare; break }
|
||||||
_ => ()
|
_ => ()
|
||||||
|
@ -1479,7 +1479,7 @@ fn compile_submatch_continue(mut bcx: @mut Block,
|
||||||
let mut i = 0u;
|
let mut i = 0u;
|
||||||
|
|
||||||
// Compile subtrees for each option
|
// Compile subtrees for each option
|
||||||
for opts.iter().advance |opt| {
|
foreach opt in opts.iter() {
|
||||||
i += 1u;
|
i += 1u;
|
||||||
let mut opt_cx = else_cx;
|
let mut opt_cx = else_cx;
|
||||||
if !exhaustive || i < len {
|
if !exhaustive || i < len {
|
||||||
|
@ -1688,7 +1688,7 @@ pub fn trans_match_inner(scope_cx: @mut Block,
|
||||||
|
|
||||||
let mut arm_datas = ~[];
|
let mut arm_datas = ~[];
|
||||||
let mut matches = ~[];
|
let mut matches = ~[];
|
||||||
for arms.iter().advance |arm| {
|
foreach arm in arms.iter() {
|
||||||
let body = scope_block(bcx, arm.body.info(), "case_body");
|
let body = scope_block(bcx, arm.body.info(), "case_body");
|
||||||
let bindings_map = create_bindings_map(bcx, arm.pats[0]);
|
let bindings_map = create_bindings_map(bcx, arm.pats[0]);
|
||||||
let arm_data = ArmData {
|
let arm_data = ArmData {
|
||||||
|
@ -1697,7 +1697,7 @@ pub fn trans_match_inner(scope_cx: @mut Block,
|
||||||
bindings_map: @bindings_map
|
bindings_map: @bindings_map
|
||||||
};
|
};
|
||||||
arm_datas.push(arm_data.clone());
|
arm_datas.push(arm_data.clone());
|
||||||
for arm.pats.iter().advance |p| {
|
foreach p in arm.pats.iter() {
|
||||||
matches.push(Match {
|
matches.push(Match {
|
||||||
pats: ~[*p],
|
pats: ~[*p],
|
||||||
data: arm_data.clone(),
|
data: arm_data.clone(),
|
||||||
|
@ -1721,7 +1721,7 @@ pub fn trans_match_inner(scope_cx: @mut Block,
|
||||||
compile_submatch(bcx, matches, [lldiscr], chk);
|
compile_submatch(bcx, matches, [lldiscr], chk);
|
||||||
|
|
||||||
let mut arm_cxs = ~[];
|
let mut arm_cxs = ~[];
|
||||||
for arm_datas.iter().advance |arm_data| {
|
foreach arm_data in arm_datas.iter() {
|
||||||
let mut bcx = arm_data.bodycx;
|
let mut bcx = arm_data.bodycx;
|
||||||
|
|
||||||
// If this arm has a guard, then the various by-value bindings have
|
// If this arm has a guard, then the various by-value bindings have
|
||||||
|
@ -1957,7 +1957,7 @@ fn bind_irrefutable_pat(bcx: @mut Block,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
for inner.iter().advance |&inner_pat| {
|
foreach &inner_pat in inner.iter() {
|
||||||
bcx = bind_irrefutable_pat(bcx, inner_pat, val, binding_mode);
|
bcx = bind_irrefutable_pat(bcx, inner_pat, val, binding_mode);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1972,8 +1972,8 @@ fn bind_irrefutable_pat(bcx: @mut Block,
|
||||||
repr,
|
repr,
|
||||||
vinfo.disr_val,
|
vinfo.disr_val,
|
||||||
val);
|
val);
|
||||||
for sub_pats.iter().advance |sub_pat| {
|
foreach sub_pat in sub_pats.iter() {
|
||||||
for args.vals.iter().enumerate().advance |(i, argval)| {
|
foreach (i, argval) in args.vals.iter().enumerate() {
|
||||||
bcx = bind_irrefutable_pat(bcx, sub_pat[i],
|
bcx = bind_irrefutable_pat(bcx, sub_pat[i],
|
||||||
*argval, binding_mode);
|
*argval, binding_mode);
|
||||||
}
|
}
|
||||||
|
@ -1988,7 +1988,7 @@ fn bind_irrefutable_pat(bcx: @mut Block,
|
||||||
Some(ref elems) => {
|
Some(ref elems) => {
|
||||||
// This is the tuple struct case.
|
// This is the tuple struct case.
|
||||||
let repr = adt::represent_node(bcx, pat.id);
|
let repr = adt::represent_node(bcx, pat.id);
|
||||||
for elems.iter().enumerate().advance |(i, elem)| {
|
foreach (i, elem) in elems.iter().enumerate() {
|
||||||
let fldptr = adt::trans_field_ptr(bcx, repr,
|
let fldptr = adt::trans_field_ptr(bcx, repr,
|
||||||
val, 0, i);
|
val, 0, i);
|
||||||
bcx = bind_irrefutable_pat(bcx, *elem,
|
bcx = bind_irrefutable_pat(bcx, *elem,
|
||||||
|
@ -2009,7 +2009,7 @@ fn bind_irrefutable_pat(bcx: @mut Block,
|
||||||
let pat_ty = node_id_type(bcx, pat.id);
|
let pat_ty = node_id_type(bcx, pat.id);
|
||||||
let pat_repr = adt::represent_type(bcx.ccx(), pat_ty);
|
let pat_repr = adt::represent_type(bcx.ccx(), pat_ty);
|
||||||
do expr::with_field_tys(tcx, pat_ty, None) |discr, field_tys| {
|
do expr::with_field_tys(tcx, pat_ty, None) |discr, field_tys| {
|
||||||
for fields.iter().advance |f| {
|
foreach f in fields.iter() {
|
||||||
let ix = ty::field_idx_strict(tcx, f.ident, field_tys);
|
let ix = ty::field_idx_strict(tcx, f.ident, field_tys);
|
||||||
let fldptr = adt::trans_field_ptr(bcx, pat_repr, val,
|
let fldptr = adt::trans_field_ptr(bcx, pat_repr, val,
|
||||||
discr, ix);
|
discr, ix);
|
||||||
|
@ -2019,7 +2019,7 @@ fn bind_irrefutable_pat(bcx: @mut Block,
|
||||||
}
|
}
|
||||||
ast::pat_tup(ref elems) => {
|
ast::pat_tup(ref elems) => {
|
||||||
let repr = adt::represent_node(bcx, pat.id);
|
let repr = adt::represent_node(bcx, pat.id);
|
||||||
for elems.iter().enumerate().advance |(i, elem)| {
|
foreach (i, elem) in elems.iter().enumerate() {
|
||||||
let fldptr = adt::trans_field_ptr(bcx, repr, val, 0, i);
|
let fldptr = adt::trans_field_ptr(bcx, repr, val, 0, i);
|
||||||
bcx = bind_irrefutable_pat(bcx, *elem, fldptr, binding_mode);
|
bcx = bind_irrefutable_pat(bcx, *elem, fldptr, binding_mode);
|
||||||
}
|
}
|
||||||
|
|
|
@ -249,7 +249,7 @@ fn generic_fields_of(cx: &mut CrateContext, r: &Repr, sizing: bool) -> ~[Type] {
|
||||||
let mut most_aligned = None;
|
let mut most_aligned = None;
|
||||||
let mut largest_align = 0;
|
let mut largest_align = 0;
|
||||||
let mut largest_size = 0;
|
let mut largest_size = 0;
|
||||||
for sts.iter().advance |st| {
|
foreach st in sts.iter() {
|
||||||
if largest_size < st.size {
|
if largest_size < st.size {
|
||||||
largest_size = st.size;
|
largest_size = st.size;
|
||||||
}
|
}
|
||||||
|
@ -545,7 +545,7 @@ fn build_const_struct(ccx: &mut CrateContext, st: &Struct, vals: &[ValueRef])
|
||||||
|
|
||||||
let mut offset = 0;
|
let mut offset = 0;
|
||||||
let mut cfields = ~[];
|
let mut cfields = ~[];
|
||||||
for st.fields.iter().enumerate().advance |(i, &ty)| {
|
foreach (i, &ty) in st.fields.iter().enumerate() {
|
||||||
let llty = type_of::sizing_type_of(ccx, ty);
|
let llty = type_of::sizing_type_of(ccx, ty);
|
||||||
let type_align = machine::llalign_of_min(ccx, llty)
|
let type_align = machine::llalign_of_min(ccx, llty)
|
||||||
/*bad*/as u64;
|
/*bad*/as u64;
|
||||||
|
|
|
@ -62,7 +62,7 @@ pub fn trans_inline_asm(bcx: @mut Block, ia: &ast::inline_asm) -> @mut Block {
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
for cleanups.iter().advance |c| {
|
foreach c in cleanups.iter() {
|
||||||
revoke_clean(bcx, *c);
|
revoke_clean(bcx, *c);
|
||||||
}
|
}
|
||||||
cleanups.clear();
|
cleanups.clear();
|
||||||
|
@ -83,7 +83,7 @@ pub fn trans_inline_asm(bcx: @mut Block, ia: &ast::inline_asm) -> @mut Block {
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
for cleanups.iter().advance |c| {
|
foreach c in cleanups.iter() {
|
||||||
revoke_clean(bcx, *c);
|
revoke_clean(bcx, *c);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -133,7 +133,7 @@ pub fn trans_inline_asm(bcx: @mut Block, ia: &ast::inline_asm) -> @mut Block {
|
||||||
let op = PointerCast(bcx, aoutputs[0], val_ty(outputs[0]).ptr_to());
|
let op = PointerCast(bcx, aoutputs[0], val_ty(outputs[0]).ptr_to());
|
||||||
Store(bcx, r, op);
|
Store(bcx, r, op);
|
||||||
} else {
|
} else {
|
||||||
for aoutputs.iter().enumerate().advance |(i, o)| {
|
foreach (i, o) in aoutputs.iter().enumerate() {
|
||||||
let v = ExtractValue(bcx, r, i);
|
let v = ExtractValue(bcx, r, i);
|
||||||
let op = PointerCast(bcx, *o, val_ty(outputs[i]).ptr_to());
|
let op = PointerCast(bcx, *o, val_ty(outputs[i]).ptr_to());
|
||||||
Store(bcx, v, op);
|
Store(bcx, v, op);
|
||||||
|
|
|
@ -675,7 +675,7 @@ pub fn iter_structural_ty(cx: @mut Block, av: ValueRef, t: ty::t,
|
||||||
let tcx = cx.tcx();
|
let tcx = cx.tcx();
|
||||||
let mut cx = cx;
|
let mut cx = cx;
|
||||||
|
|
||||||
for variant.args.iter().enumerate().advance |(i, &arg)| {
|
foreach (i, &arg) in variant.args.iter().enumerate() {
|
||||||
cx = f(cx,
|
cx = f(cx,
|
||||||
adt::trans_field_ptr(cx, repr, av, variant.disr_val, i),
|
adt::trans_field_ptr(cx, repr, av, variant.disr_val, i),
|
||||||
ty::subst_tps(tcx, tps, None, arg));
|
ty::subst_tps(tcx, tps, None, arg));
|
||||||
|
@ -688,7 +688,7 @@ pub fn iter_structural_ty(cx: @mut Block, av: ValueRef, t: ty::t,
|
||||||
ty::ty_struct(*) => {
|
ty::ty_struct(*) => {
|
||||||
let repr = adt::represent_type(cx.ccx(), t);
|
let repr = adt::represent_type(cx.ccx(), t);
|
||||||
do expr::with_field_tys(cx.tcx(), t, None) |discr, field_tys| {
|
do expr::with_field_tys(cx.tcx(), t, None) |discr, field_tys| {
|
||||||
for field_tys.iter().enumerate().advance |(i, field_ty)| {
|
foreach (i, field_ty) in field_tys.iter().enumerate() {
|
||||||
let llfld_a = adt::trans_field_ptr(cx, repr, av, discr, i);
|
let llfld_a = adt::trans_field_ptr(cx, repr, av, discr, i);
|
||||||
cx = f(cx, llfld_a, field_ty.mt.ty);
|
cx = f(cx, llfld_a, field_ty.mt.ty);
|
||||||
}
|
}
|
||||||
|
@ -701,7 +701,7 @@ pub fn iter_structural_ty(cx: @mut Block, av: ValueRef, t: ty::t,
|
||||||
}
|
}
|
||||||
ty::ty_tup(ref args) => {
|
ty::ty_tup(ref args) => {
|
||||||
let repr = adt::represent_type(cx.ccx(), t);
|
let repr = adt::represent_type(cx.ccx(), t);
|
||||||
for args.iter().enumerate().advance |(i, arg)| {
|
foreach (i, arg) in args.iter().enumerate() {
|
||||||
let llfld_a = adt::trans_field_ptr(cx, repr, av, 0, i);
|
let llfld_a = adt::trans_field_ptr(cx, repr, av, 0, i);
|
||||||
cx = f(cx, llfld_a, *arg);
|
cx = f(cx, llfld_a, *arg);
|
||||||
}
|
}
|
||||||
|
@ -729,7 +729,7 @@ pub fn iter_structural_ty(cx: @mut Block, av: ValueRef, t: ty::t,
|
||||||
n_variants);
|
n_variants);
|
||||||
let next_cx = sub_block(cx, "enum-iter-next");
|
let next_cx = sub_block(cx, "enum-iter-next");
|
||||||
|
|
||||||
for (*variants).iter().advance |variant| {
|
foreach variant in (*variants).iter() {
|
||||||
let variant_cx =
|
let variant_cx =
|
||||||
sub_block(cx, ~"enum-iter-variant-" +
|
sub_block(cx, ~"enum-iter-variant-" +
|
||||||
uint::to_str(variant.disr_val));
|
uint::to_str(variant.disr_val));
|
||||||
|
@ -863,7 +863,7 @@ pub fn invoke(bcx: @mut Block, llfn: ValueRef, llargs: ~[ValueRef])
|
||||||
debug!("invoking %x at %x",
|
debug!("invoking %x at %x",
|
||||||
::std::cast::transmute(llfn),
|
::std::cast::transmute(llfn),
|
||||||
::std::cast::transmute(bcx.llbb));
|
::std::cast::transmute(bcx.llbb));
|
||||||
for llargs.iter().advance |&llarg| {
|
foreach &llarg in llargs.iter() {
|
||||||
debug!("arg: %x", ::std::cast::transmute(llarg));
|
debug!("arg: %x", ::std::cast::transmute(llarg));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -879,7 +879,7 @@ pub fn invoke(bcx: @mut Block, llfn: ValueRef, llargs: ~[ValueRef])
|
||||||
debug!("calling %x at %x",
|
debug!("calling %x at %x",
|
||||||
::std::cast::transmute(llfn),
|
::std::cast::transmute(llfn),
|
||||||
::std::cast::transmute(bcx.llbb));
|
::std::cast::transmute(bcx.llbb));
|
||||||
for llargs.iter().advance |&llarg| {
|
foreach &llarg in llargs.iter() {
|
||||||
debug!("arg: %x", ::std::cast::transmute(llarg));
|
debug!("arg: %x", ::std::cast::transmute(llarg));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -908,7 +908,7 @@ pub fn need_invoke(bcx: @mut Block) -> bool {
|
||||||
loop {
|
loop {
|
||||||
cur_scope = match cur_scope {
|
cur_scope = match cur_scope {
|
||||||
Some(inf) => {
|
Some(inf) => {
|
||||||
for inf.cleanups.iter().advance |cleanup| {
|
foreach cleanup in inf.cleanups.iter() {
|
||||||
match *cleanup {
|
match *cleanup {
|
||||||
clean(_, cleanup_type) | clean_temp(_, _, cleanup_type) => {
|
clean(_, cleanup_type) | clean_temp(_, _, cleanup_type) => {
|
||||||
if cleanup_type == normal_exit_and_unwind {
|
if cleanup_type == normal_exit_and_unwind {
|
||||||
|
@ -1171,7 +1171,7 @@ pub fn new_block(cx: @mut FunctionContext,
|
||||||
opt_node_info,
|
opt_node_info,
|
||||||
cx);
|
cx);
|
||||||
bcx.scope = scope;
|
bcx.scope = scope;
|
||||||
for parent.iter().advance |cx| {
|
foreach cx in parent.iter() {
|
||||||
if cx.unreachable {
|
if cx.unreachable {
|
||||||
Unreachable(bcx);
|
Unreachable(bcx);
|
||||||
break;
|
break;
|
||||||
|
@ -1261,7 +1261,7 @@ pub fn trans_block_cleanups_(bcx: @mut Block,
|
||||||
bcx.ccx().sess.opts.debugging_opts & session::no_landing_pads != 0;
|
bcx.ccx().sess.opts.debugging_opts & session::no_landing_pads != 0;
|
||||||
if bcx.unreachable && !no_lpads { return bcx; }
|
if bcx.unreachable && !no_lpads { return bcx; }
|
||||||
let mut bcx = bcx;
|
let mut bcx = bcx;
|
||||||
for cleanups.rev_iter().advance |cu| {
|
foreach cu in cleanups.rev_iter() {
|
||||||
match *cu {
|
match *cu {
|
||||||
clean(cfn, cleanup_type) | clean_temp(_, cfn, cleanup_type) => {
|
clean(cfn, cleanup_type) | clean_temp(_, cfn, cleanup_type) => {
|
||||||
// Some types don't need to be cleaned up during
|
// Some types don't need to be cleaned up during
|
||||||
|
@ -1304,7 +1304,7 @@ pub fn cleanup_and_leave(bcx: @mut Block,
|
||||||
let mut dest = None;
|
let mut dest = None;
|
||||||
{
|
{
|
||||||
let r = (*inf).cleanup_paths.rev_iter().find_(|cp| cp.target == leave);
|
let r = (*inf).cleanup_paths.rev_iter().find_(|cp| cp.target == leave);
|
||||||
for r.iter().advance |cp| {
|
foreach cp in r.iter() {
|
||||||
if cp.size == inf.cleanups.len() {
|
if cp.size == inf.cleanups.len() {
|
||||||
Br(bcx, cp.dest);
|
Br(bcx, cp.dest);
|
||||||
return;
|
return;
|
||||||
|
@ -1326,7 +1326,7 @@ pub fn cleanup_and_leave(bcx: @mut Block,
|
||||||
bcx = trans_block_cleanups_(sub_cx,
|
bcx = trans_block_cleanups_(sub_cx,
|
||||||
inf_cleanups,
|
inf_cleanups,
|
||||||
is_lpad);
|
is_lpad);
|
||||||
for dest.iter().advance |&dest| {
|
foreach &dest in dest.iter() {
|
||||||
Br(bcx, dest);
|
Br(bcx, dest);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -1449,7 +1449,7 @@ pub fn with_scope_datumblock(bcx: @mut Block, opt_node_info: Option<NodeInfo>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn block_locals(b: &ast::Block, it: &fn(@ast::Local)) {
|
pub fn block_locals(b: &ast::Block, it: &fn(@ast::Local)) {
|
||||||
for b.stmts.iter().advance |s| {
|
foreach s in b.stmts.iter() {
|
||||||
match s.node {
|
match s.node {
|
||||||
ast::stmt_decl(d, _) => {
|
ast::stmt_decl(d, _) => {
|
||||||
match d.node {
|
match d.node {
|
||||||
|
@ -1624,7 +1624,7 @@ pub fn new_fn_ctxt_w_id(ccx: @mut CrateContext,
|
||||||
opt_node_info: Option<NodeInfo>,
|
opt_node_info: Option<NodeInfo>,
|
||||||
sp: Option<span>)
|
sp: Option<span>)
|
||||||
-> @mut FunctionContext {
|
-> @mut FunctionContext {
|
||||||
for param_substs.iter().advance |p| { p.validate(); }
|
foreach p in param_substs.iter() { p.validate(); }
|
||||||
|
|
||||||
debug!("new_fn_ctxt_w_id(path=%s, id=%?, \
|
debug!("new_fn_ctxt_w_id(path=%s, id=%?, \
|
||||||
param_substs=%s)",
|
param_substs=%s)",
|
||||||
|
@ -1901,7 +1901,7 @@ pub fn trans_closure(ccx: @mut CrateContext,
|
||||||
// Put return block after all other blocks.
|
// Put return block after all other blocks.
|
||||||
// This somewhat improves single-stepping experience in debugger.
|
// This somewhat improves single-stepping experience in debugger.
|
||||||
unsafe {
|
unsafe {
|
||||||
for fcx.llreturn.iter().advance |&llreturn| {
|
foreach &llreturn in fcx.llreturn.iter() {
|
||||||
llvm::LLVMMoveBasicBlockAfter(llreturn, bcx.llbb);
|
llvm::LLVMMoveBasicBlockAfter(llreturn, bcx.llbb);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2090,7 +2090,7 @@ pub fn trans_enum_variant_or_tuple_like_struct<A:IdAndTy>(
|
||||||
|
|
||||||
let repr = adt::represent_type(ccx, result_ty);
|
let repr = adt::represent_type(ccx, result_ty);
|
||||||
adt::trans_start_init(bcx, repr, fcx.llretptr.get(), disr);
|
adt::trans_start_init(bcx, repr, fcx.llretptr.get(), disr);
|
||||||
for fn_args.iter().enumerate().advance |(i, fn_arg)| {
|
foreach (i, fn_arg) in fn_args.iter().enumerate() {
|
||||||
let lldestptr = adt::trans_field_ptr(bcx,
|
let lldestptr = adt::trans_field_ptr(bcx,
|
||||||
repr,
|
repr,
|
||||||
fcx.llretptr.get(),
|
fcx.llretptr.get(),
|
||||||
|
@ -2106,7 +2106,7 @@ pub fn trans_enum_variant_or_tuple_like_struct<A:IdAndTy>(
|
||||||
pub fn trans_enum_def(ccx: @mut CrateContext, enum_definition: &ast::enum_def,
|
pub fn trans_enum_def(ccx: @mut CrateContext, enum_definition: &ast::enum_def,
|
||||||
id: ast::NodeId, vi: @~[@ty::VariantInfo],
|
id: ast::NodeId, vi: @~[@ty::VariantInfo],
|
||||||
i: &mut uint) {
|
i: &mut uint) {
|
||||||
for enum_definition.variants.iter().advance |variant| {
|
foreach variant in enum_definition.variants.iter() {
|
||||||
let disr_val = vi[*i].disr_val;
|
let disr_val = vi[*i].disr_val;
|
||||||
*i += 1;
|
*i += 1;
|
||||||
|
|
||||||
|
@ -2156,7 +2156,7 @@ pub fn trans_item(ccx: @mut CrateContext, item: &ast::item) {
|
||||||
item.id,
|
item.id,
|
||||||
item.attrs);
|
item.attrs);
|
||||||
} else {
|
} else {
|
||||||
for body.stmts.iter().advance |stmt| {
|
foreach stmt in body.stmts.iter() {
|
||||||
match stmt.node {
|
match stmt.node {
|
||||||
ast::stmt_decl(@codemap::spanned { node: ast::decl_item(i),
|
ast::stmt_decl(@codemap::spanned { node: ast::decl_item(i),
|
||||||
_ }, _) => {
|
_ }, _) => {
|
||||||
|
@ -2189,7 +2189,7 @@ pub fn trans_item(ccx: @mut CrateContext, item: &ast::item) {
|
||||||
consts::trans_const(ccx, m, item.id);
|
consts::trans_const(ccx, m, item.id);
|
||||||
// Do static_assert checking. It can't really be done much earlier because we need to get
|
// Do static_assert checking. It can't really be done much earlier because we need to get
|
||||||
// the value of the bool out of LLVM
|
// the value of the bool out of LLVM
|
||||||
for item.attrs.iter().advance |attr| {
|
foreach attr in item.attrs.iter() {
|
||||||
if "static_assert" == attr.name() {
|
if "static_assert" == attr.name() {
|
||||||
if m == ast::m_mutbl {
|
if m == ast::m_mutbl {
|
||||||
ccx.sess.span_fatal(expr.span,
|
ccx.sess.span_fatal(expr.span,
|
||||||
|
@ -2237,7 +2237,7 @@ pub fn trans_struct_def(ccx: @mut CrateContext, struct_def: @ast::struct_def) {
|
||||||
// and control visibility.
|
// and control visibility.
|
||||||
pub fn trans_mod(ccx: @mut CrateContext, m: &ast::_mod) {
|
pub fn trans_mod(ccx: @mut CrateContext, m: &ast::_mod) {
|
||||||
let _icx = push_ctxt("trans_mod");
|
let _icx = push_ctxt("trans_mod");
|
||||||
for m.items.iter().advance |item| {
|
foreach item in m.items.iter() {
|
||||||
trans_item(ccx, *item);
|
trans_item(ccx, *item);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2627,7 +2627,7 @@ pub fn trans_constant(ccx: &mut CrateContext, it: @ast::item) {
|
||||||
node: it.id });
|
node: it.id });
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
let path = item_path(ccx, &it.id);
|
let path = item_path(ccx, &it.id);
|
||||||
for (*enum_definition).variants.iter().advance |variant| {
|
foreach variant in (*enum_definition).variants.iter() {
|
||||||
let p = vec::append(path.clone(), [
|
let p = vec::append(path.clone(), [
|
||||||
path_name(variant.node.name),
|
path_name(variant.node.name),
|
||||||
path_name(special_idents::descrim)
|
path_name(special_idents::descrim)
|
||||||
|
@ -2807,7 +2807,7 @@ pub fn create_module_map(ccx: &mut CrateContext) -> ValueRef {
|
||||||
keys.push(k.to_managed());
|
keys.push(k.to_managed());
|
||||||
}
|
}
|
||||||
|
|
||||||
for keys.iter().advance |key| {
|
foreach key in keys.iter() {
|
||||||
let val = *ccx.module_data.find_equiv(key).get();
|
let val = *ccx.module_data.find_equiv(key).get();
|
||||||
let s_const = C_cstr(ccx, *key);
|
let s_const = C_cstr(ccx, *key);
|
||||||
let s_ptr = p2i(ccx, s_const);
|
let s_ptr = p2i(ccx, s_const);
|
||||||
|
@ -3034,7 +3034,7 @@ pub fn trans_crate(sess: session::Session,
|
||||||
do sort::quick_sort(ccx.stats.fn_stats) |&(_, _, insns_a), &(_, _, insns_b)| {
|
do sort::quick_sort(ccx.stats.fn_stats) |&(_, _, insns_a), &(_, _, insns_b)| {
|
||||||
insns_a > insns_b
|
insns_a > insns_b
|
||||||
}
|
}
|
||||||
for ccx.stats.fn_stats.iter().advance |tuple| {
|
foreach tuple in ccx.stats.fn_stats.iter() {
|
||||||
match *tuple {
|
match *tuple {
|
||||||
(ref name, ms, insns) => {
|
(ref name, ms, insns) => {
|
||||||
printfln!("%u insns, %u ms, %s", insns, ms, *name);
|
printfln!("%u insns, %u ms, %s", insns, ms, *name);
|
||||||
|
@ -3043,7 +3043,7 @@ pub fn trans_crate(sess: session::Session,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if ccx.sess.count_llvm_insns() {
|
if ccx.sess.count_llvm_insns() {
|
||||||
for ccx.stats.llvm_insns.iter().advance |(k, v)| {
|
foreach (k, v) in ccx.stats.llvm_insns.iter() {
|
||||||
printfln!("%-7u %s", *v, *k);
|
printfln!("%-7u %s", *v, *k);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -516,7 +516,7 @@ impl Builder {
|
||||||
// we care about.
|
// we care about.
|
||||||
if ixs.len() < 16 {
|
if ixs.len() < 16 {
|
||||||
let mut small_vec = [ C_i32(0), ..16 ];
|
let mut small_vec = [ C_i32(0), ..16 ];
|
||||||
for small_vec.mut_iter().zip(ixs.iter()).advance |(small_vec_e, &ix)| {
|
foreach (small_vec_e, &ix) in small_vec.mut_iter().zip(ixs.iter()) {
|
||||||
*small_vec_e = C_i32(ix as i32);
|
*small_vec_e = C_i32(ix as i32);
|
||||||
}
|
}
|
||||||
self.inbounds_gep(base, small_vec.slice(0, ixs.len()))
|
self.inbounds_gep(base, small_vec.slice(0, ixs.len()))
|
||||||
|
|
|
@ -42,7 +42,7 @@ impl FnType {
|
||||||
let fnty = Type::func(atys, &rty);
|
let fnty = Type::func(atys, &rty);
|
||||||
let llfn = decl(fnty);
|
let llfn = decl(fnty);
|
||||||
|
|
||||||
for self.attrs.iter().enumerate().advance |(i, a)| {
|
foreach (i, a) in self.attrs.iter().enumerate() {
|
||||||
match *a {
|
match *a {
|
||||||
option::Some(attr) => {
|
option::Some(attr) => {
|
||||||
unsafe {
|
unsafe {
|
||||||
|
@ -92,7 +92,7 @@ impl FnType {
|
||||||
|
|
||||||
pub fn build_shim_ret(&self, bcx: @mut Block, arg_tys: &[Type], ret_def: bool,
|
pub fn build_shim_ret(&self, bcx: @mut Block, arg_tys: &[Type], ret_def: bool,
|
||||||
llargbundle: ValueRef, llretval: ValueRef) {
|
llargbundle: ValueRef, llretval: ValueRef) {
|
||||||
for self.attrs.iter().enumerate().advance |(i, a)| {
|
foreach (i, a) in self.attrs.iter().enumerate() {
|
||||||
match *a {
|
match *a {
|
||||||
option::Some(attr) => {
|
option::Some(attr) => {
|
||||||
unsafe {
|
unsafe {
|
||||||
|
|
|
@ -133,7 +133,7 @@ impl ABIInfo for ARM_ABIInfo {
|
||||||
ret_def: bool) -> FnType {
|
ret_def: bool) -> FnType {
|
||||||
let mut arg_tys = ~[];
|
let mut arg_tys = ~[];
|
||||||
let mut attrs = ~[];
|
let mut attrs = ~[];
|
||||||
for atys.iter().advance |&aty| {
|
foreach &aty in atys.iter() {
|
||||||
let (ty, attr) = classify_arg_ty(aty);
|
let (ty, attr) = classify_arg_ty(aty);
|
||||||
arg_tys.push(ty);
|
arg_tys.push(ty);
|
||||||
attrs.push(attr);
|
attrs.push(attr);
|
||||||
|
|
|
@ -190,7 +190,7 @@ impl ABIInfo for MIPS_ABIInfo {
|
||||||
let mut attrs = ~[];
|
let mut attrs = ~[];
|
||||||
let mut offset = if sret { 4 } else { 0 };
|
let mut offset = if sret { 4 } else { 0 };
|
||||||
|
|
||||||
for atys.iter().advance |aty| {
|
foreach aty in atys.iter() {
|
||||||
let (ty, attr) = classify_arg_ty(*aty, &mut offset);
|
let (ty, attr) = classify_arg_ty(*aty, &mut offset);
|
||||||
arg_tys.push(ty);
|
arg_tys.push(ty);
|
||||||
attrs.push(attr);
|
attrs.push(attr);
|
||||||
|
|
|
@ -180,7 +180,7 @@ fn classify_ty(ty: Type) -> ~[RegClass] {
|
||||||
cls: &mut [RegClass], i: uint,
|
cls: &mut [RegClass], i: uint,
|
||||||
off: uint) {
|
off: uint) {
|
||||||
let mut field_off = off;
|
let mut field_off = off;
|
||||||
for tys.iter().advance |ty| {
|
foreach ty in tys.iter() {
|
||||||
field_off = align(field_off, *ty);
|
field_off = align(field_off, *ty);
|
||||||
classify(*ty, cls, i, field_off);
|
classify(*ty, cls, i, field_off);
|
||||||
field_off += ty_size(*ty);
|
field_off += ty_size(*ty);
|
||||||
|
@ -295,7 +295,7 @@ fn classify_ty(ty: Type) -> ~[RegClass] {
|
||||||
fn llreg_ty(cls: &[RegClass]) -> Type {
|
fn llreg_ty(cls: &[RegClass]) -> Type {
|
||||||
fn llvec_len(cls: &[RegClass]) -> uint {
|
fn llvec_len(cls: &[RegClass]) -> uint {
|
||||||
let mut len = 1u;
|
let mut len = 1u;
|
||||||
for cls.iter().advance |c| {
|
foreach c in cls.iter() {
|
||||||
if *c != SSEUp {
|
if *c != SSEUp {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -356,7 +356,7 @@ fn x86_64_tys(atys: &[Type],
|
||||||
|
|
||||||
let mut arg_tys = ~[];
|
let mut arg_tys = ~[];
|
||||||
let mut attrs = ~[];
|
let mut attrs = ~[];
|
||||||
for atys.iter().advance |t| {
|
foreach t in atys.iter() {
|
||||||
let (ty, attr) = x86_64_ty(*t, |cls| cls.is_pass_byval(), ByValAttribute);
|
let (ty, attr) = x86_64_ty(*t, |cls| cls.is_pass_byval(), ByValAttribute);
|
||||||
arg_tys.push(ty);
|
arg_tys.push(ty);
|
||||||
attrs.push(attr);
|
attrs.push(attr);
|
||||||
|
|
|
@ -618,7 +618,7 @@ pub fn trans_call_inner(in_cx: @mut Block,
|
||||||
// the cleanup for the self argument
|
// the cleanup for the self argument
|
||||||
match callee.data {
|
match callee.data {
|
||||||
Method(d) => {
|
Method(d) => {
|
||||||
for d.temp_cleanup.iter().advance |&v| {
|
foreach &v in d.temp_cleanup.iter() {
|
||||||
revoke_clean(bcx, v);
|
revoke_clean(bcx, v);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -628,7 +628,7 @@ pub fn trans_call_inner(in_cx: @mut Block,
|
||||||
// Uncomment this to debug calls.
|
// Uncomment this to debug calls.
|
||||||
/*
|
/*
|
||||||
printfln!("calling: %s", bcx.val_to_str(llfn));
|
printfln!("calling: %s", bcx.val_to_str(llfn));
|
||||||
for llargs.iter().advance |llarg| {
|
foreach llarg in llargs.iter() {
|
||||||
printfln!("arg: %s", bcx.val_to_str(*llarg));
|
printfln!("arg: %s", bcx.val_to_str(*llarg));
|
||||||
}
|
}
|
||||||
io::println("---");
|
io::println("---");
|
||||||
|
@ -671,7 +671,7 @@ pub fn trans_call_inner(in_cx: @mut Block,
|
||||||
bcx = do with_cond(bcx, ret_flag_result) |bcx| {
|
bcx = do with_cond(bcx, ret_flag_result) |bcx| {
|
||||||
{
|
{
|
||||||
let r = bcx.fcx.loop_ret;
|
let r = bcx.fcx.loop_ret;
|
||||||
for r.iter().advance |&(flagptr, _)| {
|
foreach &(flagptr, _) in r.iter() {
|
||||||
Store(bcx, C_bool(true), flagptr);
|
Store(bcx, C_bool(true), flagptr);
|
||||||
Store(bcx, C_bool(false), bcx.fcx.llretptr.get());
|
Store(bcx, C_bool(false), bcx.fcx.llretptr.get());
|
||||||
}
|
}
|
||||||
|
@ -728,7 +728,7 @@ pub fn trans_args(cx: @mut Block,
|
||||||
match args {
|
match args {
|
||||||
ArgExprs(arg_exprs) => {
|
ArgExprs(arg_exprs) => {
|
||||||
let last = arg_exprs.len() - 1u;
|
let last = arg_exprs.len() - 1u;
|
||||||
for arg_exprs.iter().enumerate().advance |(i, arg_expr)| {
|
foreach (i, arg_expr) in arg_exprs.iter().enumerate() {
|
||||||
let arg_val = unpack_result!(bcx, {
|
let arg_val = unpack_result!(bcx, {
|
||||||
trans_arg_expr(bcx,
|
trans_arg_expr(bcx,
|
||||||
arg_tys[i],
|
arg_tys[i],
|
||||||
|
@ -749,7 +749,7 @@ pub fn trans_args(cx: @mut Block,
|
||||||
// now that all arguments have been successfully built, we can revoke any
|
// now that all arguments have been successfully built, we can revoke any
|
||||||
// temporary cleanups, as they are only needed if argument construction
|
// temporary cleanups, as they are only needed if argument construction
|
||||||
// should fail (for example, cleanup of copy mode args).
|
// should fail (for example, cleanup of copy mode args).
|
||||||
for temp_cleanups.iter().advance |c| {
|
foreach c in temp_cleanups.iter() {
|
||||||
revoke_clean(bcx, *c)
|
revoke_clean(bcx, *c)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -228,7 +228,7 @@ pub fn store_environment(bcx: @mut Block,
|
||||||
|
|
||||||
// Copy expr values into boxed bindings.
|
// Copy expr values into boxed bindings.
|
||||||
let mut bcx = bcx;
|
let mut bcx = bcx;
|
||||||
for bound_values.iter().enumerate().advance |(i, bv)| {
|
foreach (i, bv) in bound_values.iter().enumerate() {
|
||||||
debug!("Copy %s into closure", bv.to_str(ccx));
|
debug!("Copy %s into closure", bv.to_str(ccx));
|
||||||
|
|
||||||
if ccx.sess.asm_comments() {
|
if ccx.sess.asm_comments() {
|
||||||
|
@ -268,7 +268,7 @@ pub fn build_closure(bcx0: @mut Block,
|
||||||
|
|
||||||
// Package up the captured upvars
|
// Package up the captured upvars
|
||||||
let mut env_vals = ~[];
|
let mut env_vals = ~[];
|
||||||
for cap_vars.iter().advance |cap_var| {
|
foreach cap_var in cap_vars.iter() {
|
||||||
debug!("Building closure: captured variable %?", *cap_var);
|
debug!("Building closure: captured variable %?", *cap_var);
|
||||||
let datum = expr::trans_local_var(bcx, cap_var.def);
|
let datum = expr::trans_local_var(bcx, cap_var.def);
|
||||||
match cap_var.mode {
|
match cap_var.mode {
|
||||||
|
@ -290,7 +290,7 @@ pub fn build_closure(bcx0: @mut Block,
|
||||||
|
|
||||||
// If this is a `for` loop body, add two special environment
|
// If this is a `for` loop body, add two special environment
|
||||||
// variables:
|
// variables:
|
||||||
for include_ret_handle.iter().advance |flagptr| {
|
foreach flagptr in include_ret_handle.iter() {
|
||||||
// Flag indicating we have returned (a by-ref bool):
|
// Flag indicating we have returned (a by-ref bool):
|
||||||
let flag_datum = Datum {val: *flagptr, ty: ty::mk_bool(),
|
let flag_datum = Datum {val: *flagptr, ty: ty::mk_bool(),
|
||||||
mode: ByRef(ZeroMem)};
|
mode: ByRef(ZeroMem)};
|
||||||
|
@ -337,7 +337,7 @@ pub fn load_environment(fcx: @mut FunctionContext,
|
||||||
|
|
||||||
// Populate the upvars from the environment.
|
// Populate the upvars from the environment.
|
||||||
let mut i = 0u;
|
let mut i = 0u;
|
||||||
for cap_vars.iter().advance |cap_var| {
|
foreach cap_var in cap_vars.iter() {
|
||||||
let mut upvarptr = GEPi(bcx, llcdata, [0u, i]);
|
let mut upvarptr = GEPi(bcx, llcdata, [0u, i]);
|
||||||
match sigil {
|
match sigil {
|
||||||
ast::BorrowedSigil => { upvarptr = Load(bcx, upvarptr); }
|
ast::BorrowedSigil => { upvarptr = Load(bcx, upvarptr); }
|
||||||
|
|
|
@ -138,8 +138,8 @@ pub struct param_substs {
|
||||||
|
|
||||||
impl param_substs {
|
impl param_substs {
|
||||||
pub fn validate(&self) {
|
pub fn validate(&self) {
|
||||||
for self.tys.iter().advance |t| { assert!(!ty::type_needs_infer(*t)); }
|
foreach t in self.tys.iter() { assert!(!ty::type_needs_infer(*t)); }
|
||||||
for self.self_ty.iter().advance |t| { assert!(!ty::type_needs_infer(*t)); }
|
foreach t in self.self_ty.iter() { assert!(!ty::type_needs_infer(*t)); }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -437,7 +437,7 @@ pub fn revoke_clean(cx: @mut Block, val: ValueRef) {
|
||||||
clean_temp(v, _, _) if v == val => true,
|
clean_temp(v, _, _) if v == val => true,
|
||||||
_ => false
|
_ => false
|
||||||
});
|
});
|
||||||
for cleanup_pos.iter().advance |i| {
|
foreach i in cleanup_pos.iter() {
|
||||||
scope_info.cleanups =
|
scope_info.cleanups =
|
||||||
vec::append(scope_info.cleanups.slice(0u, *i).to_owned(),
|
vec::append(scope_info.cleanups.slice(0u, *i).to_owned(),
|
||||||
scope_info.cleanups.slice(*i + 1u,
|
scope_info.cleanups.slice(*i + 1u,
|
||||||
|
@ -943,7 +943,7 @@ pub fn align_to(cx: @mut Block, off: ValueRef, align: ValueRef) -> ValueRef {
|
||||||
pub fn path_str(sess: session::Session, p: &[path_elt]) -> ~str {
|
pub fn path_str(sess: session::Session, p: &[path_elt]) -> ~str {
|
||||||
let mut r = ~"";
|
let mut r = ~"";
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
for p.iter().advance |e| {
|
foreach e in p.iter() {
|
||||||
match *e {
|
match *e {
|
||||||
ast_map::path_name(s) | ast_map::path_mod(s) => {
|
ast_map::path_name(s) | ast_map::path_mod(s) => {
|
||||||
if first {
|
if first {
|
||||||
|
|
|
@ -36,7 +36,7 @@ use syntax::codemap::span;
|
||||||
pub fn trans_block(bcx: @mut Block, b: &ast::Block, dest: expr::Dest) -> @mut Block {
|
pub fn trans_block(bcx: @mut Block, b: &ast::Block, dest: expr::Dest) -> @mut Block {
|
||||||
let _icx = push_ctxt("trans_block");
|
let _icx = push_ctxt("trans_block");
|
||||||
let mut bcx = bcx;
|
let mut bcx = bcx;
|
||||||
for b.stmts.iter().advance |s| {
|
foreach s in b.stmts.iter() {
|
||||||
debuginfo::update_source_pos(bcx, b.span);
|
debuginfo::update_source_pos(bcx, b.span);
|
||||||
bcx = trans_stmt(bcx, *s);
|
bcx = trans_stmt(bcx, *s);
|
||||||
}
|
}
|
||||||
|
@ -144,7 +144,7 @@ pub fn trans_if(bcx: @mut Block,
|
||||||
pub fn join_blocks(parent_bcx: @mut Block, in_cxs: &[@mut Block]) -> @mut Block {
|
pub fn join_blocks(parent_bcx: @mut Block, in_cxs: &[@mut Block]) -> @mut Block {
|
||||||
let out = sub_block(parent_bcx, "join");
|
let out = sub_block(parent_bcx, "join");
|
||||||
let mut reachable = false;
|
let mut reachable = false;
|
||||||
for in_cxs.iter().advance |bcx| {
|
foreach bcx in in_cxs.iter() {
|
||||||
if !bcx.unreachable {
|
if !bcx.unreachable {
|
||||||
Br(*bcx, out.llbb);
|
Br(*bcx, out.llbb);
|
||||||
reachable = true;
|
reachable = true;
|
||||||
|
@ -223,7 +223,7 @@ pub fn trans_log(log_ex: &ast::expr,
|
||||||
let (modpath, modname) = {
|
let (modpath, modname) = {
|
||||||
let path = &mut bcx.fcx.path;
|
let path = &mut bcx.fcx.path;
|
||||||
let mut modpath = ~[path_mod(ccx.sess.ident_of(ccx.link_meta.name))];
|
let mut modpath = ~[path_mod(ccx.sess.ident_of(ccx.link_meta.name))];
|
||||||
for path.iter().advance |e| {
|
foreach e in path.iter() {
|
||||||
match *e {
|
match *e {
|
||||||
path_mod(_) => { modpath.push(*e) }
|
path_mod(_) => { modpath.push(*e) }
|
||||||
_ => {}
|
_ => {}
|
||||||
|
|
|
@ -1154,7 +1154,7 @@ fn trans_rec_or_struct(bcx: @mut Block,
|
||||||
let optbase = match base {
|
let optbase = match base {
|
||||||
Some(base_expr) => {
|
Some(base_expr) => {
|
||||||
let mut leftovers = ~[];
|
let mut leftovers = ~[];
|
||||||
for need_base.iter().enumerate().advance |(i, b)| {
|
foreach (i, b) in need_base.iter().enumerate() {
|
||||||
if *b {
|
if *b {
|
||||||
leftovers.push((i, field_tys[i].mt.ty))
|
leftovers.push((i, field_tys[i].mt.ty))
|
||||||
}
|
}
|
||||||
|
@ -1208,10 +1208,10 @@ fn trans_adt(bcx: @mut Block, repr: &adt::Repr, discr: uint,
|
||||||
let mut bcx = bcx;
|
let mut bcx = bcx;
|
||||||
let addr = match dest {
|
let addr = match dest {
|
||||||
Ignore => {
|
Ignore => {
|
||||||
for fields.iter().advance |&(_i, e)| {
|
foreach &(_i, e) in fields.iter() {
|
||||||
bcx = trans_into(bcx, e, Ignore);
|
bcx = trans_into(bcx, e, Ignore);
|
||||||
}
|
}
|
||||||
for optbase.iter().advance |sbi| {
|
foreach sbi in optbase.iter() {
|
||||||
// FIXME #7261: this moves entire base, not just certain fields
|
// FIXME #7261: this moves entire base, not just certain fields
|
||||||
bcx = trans_into(bcx, sbi.expr, Ignore);
|
bcx = trans_into(bcx, sbi.expr, Ignore);
|
||||||
}
|
}
|
||||||
|
@ -1221,18 +1221,18 @@ fn trans_adt(bcx: @mut Block, repr: &adt::Repr, discr: uint,
|
||||||
};
|
};
|
||||||
let mut temp_cleanups = ~[];
|
let mut temp_cleanups = ~[];
|
||||||
adt::trans_start_init(bcx, repr, addr, discr);
|
adt::trans_start_init(bcx, repr, addr, discr);
|
||||||
for fields.iter().advance |&(i, e)| {
|
foreach &(i, e) in fields.iter() {
|
||||||
let dest = adt::trans_field_ptr(bcx, repr, addr, discr, i);
|
let dest = adt::trans_field_ptr(bcx, repr, addr, discr, i);
|
||||||
let e_ty = expr_ty(bcx, e);
|
let e_ty = expr_ty(bcx, e);
|
||||||
bcx = trans_into(bcx, e, SaveIn(dest));
|
bcx = trans_into(bcx, e, SaveIn(dest));
|
||||||
add_clean_temp_mem(bcx, dest, e_ty);
|
add_clean_temp_mem(bcx, dest, e_ty);
|
||||||
temp_cleanups.push(dest);
|
temp_cleanups.push(dest);
|
||||||
}
|
}
|
||||||
for optbase.iter().advance |base| {
|
foreach base in optbase.iter() {
|
||||||
// FIXME #6573: is it sound to use the destination's repr on the base?
|
// FIXME #6573: is it sound to use the destination's repr on the base?
|
||||||
// And, would it ever be reasonable to be here with discr != 0?
|
// And, would it ever be reasonable to be here with discr != 0?
|
||||||
let base_datum = unpack_datum!(bcx, trans_to_datum(bcx, base.expr));
|
let base_datum = unpack_datum!(bcx, trans_to_datum(bcx, base.expr));
|
||||||
for base.fields.iter().advance |&(i, t)| {
|
foreach &(i, t) in base.fields.iter() {
|
||||||
let datum = do base_datum.get_element(bcx, t, ZeroMem) |srcval| {
|
let datum = do base_datum.get_element(bcx, t, ZeroMem) |srcval| {
|
||||||
adt::trans_field_ptr(bcx, repr, srcval, discr, i)
|
adt::trans_field_ptr(bcx, repr, srcval, discr, i)
|
||||||
};
|
};
|
||||||
|
@ -1241,7 +1241,7 @@ fn trans_adt(bcx: @mut Block, repr: &adt::Repr, discr: uint,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for temp_cleanups.iter().advance |cleanup| {
|
foreach cleanup in temp_cleanups.iter() {
|
||||||
revoke_clean(bcx, *cleanup);
|
revoke_clean(bcx, *cleanup);
|
||||||
}
|
}
|
||||||
return bcx;
|
return bcx;
|
||||||
|
|
|
@ -287,7 +287,7 @@ pub fn trans_foreign_mod(ccx: @mut CrateContext,
|
||||||
Some(abi) => abi,
|
Some(abi) => abi,
|
||||||
};
|
};
|
||||||
|
|
||||||
for foreign_mod.items.iter().advance |&foreign_item| {
|
foreach &foreign_item in foreign_mod.items.iter() {
|
||||||
match foreign_item.node {
|
match foreign_item.node {
|
||||||
ast::foreign_item_fn(*) => {
|
ast::foreign_item_fn(*) => {
|
||||||
let id = foreign_item.id;
|
let id = foreign_item.id;
|
||||||
|
@ -513,7 +513,7 @@ pub fn trans_foreign_mod(ccx: @mut CrateContext,
|
||||||
store_inbounds(bcx, llargval, llargbundle, [0u, i]);
|
store_inbounds(bcx, llargval, llargbundle, [0u, i]);
|
||||||
}
|
}
|
||||||
|
|
||||||
for bcx.fcx.llretptr.iter().advance |&retptr| {
|
foreach &retptr in bcx.fcx.llretptr.iter() {
|
||||||
store_inbounds(bcx, retptr, llargbundle, [0u, n]);
|
store_inbounds(bcx, retptr, llargbundle, [0u, n]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -523,7 +523,7 @@ pub fn trans_foreign_mod(ccx: @mut CrateContext,
|
||||||
llargbundle: ValueRef) {
|
llargbundle: ValueRef) {
|
||||||
let _icx = push_ctxt("foreign::wrap::build_ret");
|
let _icx = push_ctxt("foreign::wrap::build_ret");
|
||||||
let arg_count = shim_types.fn_sig.inputs.len();
|
let arg_count = shim_types.fn_sig.inputs.len();
|
||||||
for bcx.fcx.llretptr.iter().advance |&retptr| {
|
foreach &retptr in bcx.fcx.llretptr.iter() {
|
||||||
let llretptr = load_inbounds(bcx, llargbundle, [0, arg_count]);
|
let llretptr = load_inbounds(bcx, llargbundle, [0, arg_count]);
|
||||||
Store(bcx, Load(bcx, llretptr), retptr);
|
Store(bcx, Load(bcx, llretptr), retptr);
|
||||||
}
|
}
|
||||||
|
|
|
@ -426,7 +426,7 @@ pub fn trans_struct_drop_flag(bcx: @mut Block, t: ty::t, v0: ValueRef, dtor_did:
|
||||||
|
|
||||||
// Drop the fields
|
// Drop the fields
|
||||||
let field_tys = ty::struct_fields(bcx.tcx(), class_did, substs);
|
let field_tys = ty::struct_fields(bcx.tcx(), class_did, substs);
|
||||||
for field_tys.iter().enumerate().advance |(i, fld)| {
|
foreach (i, fld) in field_tys.iter().enumerate() {
|
||||||
let llfld_a = adt::trans_field_ptr(bcx, repr, v0, 0, i);
|
let llfld_a = adt::trans_field_ptr(bcx, repr, v0, 0, i);
|
||||||
bcx = drop_ty(bcx, llfld_a, fld.mt.ty);
|
bcx = drop_ty(bcx, llfld_a, fld.mt.ty);
|
||||||
}
|
}
|
||||||
|
@ -461,7 +461,7 @@ pub fn trans_struct_drop(mut bcx: @mut Block, t: ty::t, v0: ValueRef, dtor_did:
|
||||||
|
|
||||||
// Drop the fields
|
// Drop the fields
|
||||||
let field_tys = ty::struct_fields(bcx.tcx(), class_did, substs);
|
let field_tys = ty::struct_fields(bcx.tcx(), class_did, substs);
|
||||||
for field_tys.iter().enumerate().advance |(i, fld)| {
|
foreach (i, fld) in field_tys.iter().enumerate() {
|
||||||
let llfld_a = adt::trans_field_ptr(bcx, repr, v0, 0, i);
|
let llfld_a = adt::trans_field_ptr(bcx, repr, v0, 0, i);
|
||||||
bcx = drop_ty(bcx, llfld_a, fld.mt.ty);
|
bcx = drop_ty(bcx, llfld_a, fld.mt.ty);
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,7 +68,7 @@ pub fn maybe_instantiate_inline(ccx: @mut CrateContext, fn_id: ast::def_id)
|
||||||
ast::item_enum(_, _) => {
|
ast::item_enum(_, _) => {
|
||||||
let vs_here = ty::enum_variants(ccx.tcx, local_def(item.id));
|
let vs_here = ty::enum_variants(ccx.tcx, local_def(item.id));
|
||||||
let vs_there = ty::enum_variants(ccx.tcx, parent_id);
|
let vs_there = ty::enum_variants(ccx.tcx, parent_id);
|
||||||
for vs_here.iter().zip(vs_there.iter()).advance |(here, there)| {
|
foreach (here, there) in vs_here.iter().zip(vs_there.iter()) {
|
||||||
if there.id == fn_id { my_id = here.id.node; }
|
if there.id == fn_id { my_id = here.id.node; }
|
||||||
ccx.external.insert(there.id, Some(here.id.node));
|
ccx.external.insert(there.id, Some(here.id.node));
|
||||||
}
|
}
|
||||||
|
|
|
@ -57,7 +57,7 @@ pub fn trans_impl(ccx: @mut CrateContext,
|
||||||
|
|
||||||
if !generics.ty_params.is_empty() { return; }
|
if !generics.ty_params.is_empty() { return; }
|
||||||
let sub_path = vec::append_one(path, path_name(name));
|
let sub_path = vec::append_one(path, path_name(name));
|
||||||
for methods.iter().advance |method| {
|
foreach method in methods.iter() {
|
||||||
if method.generics.ty_params.len() == 0u {
|
if method.generics.ty_params.len() == 0u {
|
||||||
let llfn = get_item_val(ccx, method.id);
|
let llfn = get_item_val(ccx, method.id);
|
||||||
let path = vec::append_one(sub_path.clone(),
|
let path = vec::append_one(sub_path.clone(),
|
||||||
|
@ -599,7 +599,7 @@ pub fn make_vtable(ccx: &mut CrateContext,
|
||||||
let _icx = push_ctxt("impl::make_vtable");
|
let _icx = push_ctxt("impl::make_vtable");
|
||||||
|
|
||||||
let mut components = ~[ tydesc.tydesc ];
|
let mut components = ~[ tydesc.tydesc ];
|
||||||
for ptrs.iter().advance |&ptr| {
|
foreach &ptr in ptrs.iter() {
|
||||||
components.push(ptr)
|
components.push(ptr)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -75,8 +75,8 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
|
||||||
self_vtables: self_vtables
|
self_vtables: self_vtables
|
||||||
};
|
};
|
||||||
|
|
||||||
for real_substs.tps.iter().advance |s| { assert!(!ty::type_has_params(*s)); }
|
foreach s in real_substs.tps.iter() { assert!(!ty::type_has_params(*s)); }
|
||||||
for psubsts.tys.iter().advance |s| { assert!(!ty::type_has_params(*s)); }
|
foreach s in psubsts.tys.iter() { assert!(!ty::type_has_params(*s)); }
|
||||||
let param_uses = type_use::type_uses_for(ccx, fn_id, psubsts.tys.len());
|
let param_uses = type_use::type_uses_for(ccx, fn_id, psubsts.tys.len());
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -96,7 +96,7 @@ impl Reflector {
|
||||||
let v = self.visitor_val;
|
let v = self.visitor_val;
|
||||||
debug!("passing %u args:", args.len());
|
debug!("passing %u args:", args.len());
|
||||||
let mut bcx = self.bcx;
|
let mut bcx = self.bcx;
|
||||||
for args.iter().enumerate().advance |(i, a)| {
|
foreach (i, a) in args.iter().enumerate() {
|
||||||
debug!("arg %u: %s", i, bcx.val_to_str(*a));
|
debug!("arg %u: %s", i, bcx.val_to_str(*a));
|
||||||
}
|
}
|
||||||
let bool_ty = ty::mk_bool();
|
let bool_ty = ty::mk_bool();
|
||||||
|
@ -214,7 +214,7 @@ impl Reflector {
|
||||||
let extra = ~[self.c_uint(tys.len())]
|
let extra = ~[self.c_uint(tys.len())]
|
||||||
+ self.c_size_and_align(t);
|
+ self.c_size_and_align(t);
|
||||||
do self.bracketed("tup", extra) |this| {
|
do self.bracketed("tup", extra) |this| {
|
||||||
for tys.iter().enumerate().advance |(i, t)| {
|
foreach (i, t) in tys.iter().enumerate() {
|
||||||
let extra = ~[this.c_uint(i), this.c_tydesc(*t)];
|
let extra = ~[this.c_uint(i), this.c_tydesc(*t)];
|
||||||
this.visit("tup_field", extra);
|
this.visit("tup_field", extra);
|
||||||
}
|
}
|
||||||
|
@ -259,7 +259,7 @@ impl Reflector {
|
||||||
let extra = ~[self.c_uint(fields.len())]
|
let extra = ~[self.c_uint(fields.len())]
|
||||||
+ self.c_size_and_align(t);
|
+ self.c_size_and_align(t);
|
||||||
do self.bracketed("class", extra) |this| {
|
do self.bracketed("class", extra) |this| {
|
||||||
for fields.iter().enumerate().advance |(i, field)| {
|
foreach (i, field) in fields.iter().enumerate() {
|
||||||
let extra = ~[this.c_uint(i),
|
let extra = ~[this.c_uint(i),
|
||||||
this.c_slice(
|
this.c_slice(
|
||||||
bcx.ccx().sess.str_of(field.ident))]
|
bcx.ccx().sess.str_of(field.ident))]
|
||||||
|
@ -318,14 +318,14 @@ impl Reflector {
|
||||||
let enum_args = ~[self.c_uint(variants.len()), make_get_disr()]
|
let enum_args = ~[self.c_uint(variants.len()), make_get_disr()]
|
||||||
+ self.c_size_and_align(t);
|
+ self.c_size_and_align(t);
|
||||||
do self.bracketed("enum", enum_args) |this| {
|
do self.bracketed("enum", enum_args) |this| {
|
||||||
for variants.iter().enumerate().advance |(i, v)| {
|
foreach (i, v) in variants.iter().enumerate() {
|
||||||
let name = ccx.sess.str_of(v.name);
|
let name = ccx.sess.str_of(v.name);
|
||||||
let variant_args = ~[this.c_uint(i),
|
let variant_args = ~[this.c_uint(i),
|
||||||
this.c_uint(v.disr_val),
|
this.c_uint(v.disr_val),
|
||||||
this.c_uint(v.args.len()),
|
this.c_uint(v.args.len()),
|
||||||
this.c_slice(name)];
|
this.c_slice(name)];
|
||||||
do this.bracketed("enum_variant", variant_args) |this| {
|
do this.bracketed("enum_variant", variant_args) |this| {
|
||||||
for v.args.iter().enumerate().advance |(j, a)| {
|
foreach (j, a) in v.args.iter().enumerate() {
|
||||||
let bcx = this.bcx;
|
let bcx = this.bcx;
|
||||||
let null = C_null(llptrty);
|
let null = C_null(llptrty);
|
||||||
let ptr = adt::trans_field_ptr(bcx, repr, null, v.disr_val, j);
|
let ptr = adt::trans_field_ptr(bcx, repr, null, v.disr_val, j);
|
||||||
|
@ -360,7 +360,7 @@ impl Reflector {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn visit_sig(&mut self, retval: uint, sig: &ty::FnSig) {
|
pub fn visit_sig(&mut self, retval: uint, sig: &ty::FnSig) {
|
||||||
for sig.inputs.iter().enumerate().advance |(i, arg)| {
|
foreach (i, arg) in sig.inputs.iter().enumerate() {
|
||||||
let modeval = 5u; // "by copy"
|
let modeval = 5u; // "by copy"
|
||||||
let extra = ~[self.c_uint(i),
|
let extra = ~[self.c_uint(i),
|
||||||
self.c_uint(modeval),
|
self.c_uint(modeval),
|
||||||
|
|
|
@ -374,14 +374,14 @@ pub fn write_content(bcx: @mut Block,
|
||||||
ast::expr_vec(ref elements, _) => {
|
ast::expr_vec(ref elements, _) => {
|
||||||
match dest {
|
match dest {
|
||||||
Ignore => {
|
Ignore => {
|
||||||
for elements.iter().advance |element| {
|
foreach element in elements.iter() {
|
||||||
bcx = expr::trans_into(bcx, *element, Ignore);
|
bcx = expr::trans_into(bcx, *element, Ignore);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
SaveIn(lldest) => {
|
SaveIn(lldest) => {
|
||||||
let mut temp_cleanups = ~[];
|
let mut temp_cleanups = ~[];
|
||||||
for elements.iter().enumerate().advance |(i, element)| {
|
foreach (i, element) in elements.iter().enumerate() {
|
||||||
let lleltptr = GEPi(bcx, lldest, [i]);
|
let lleltptr = GEPi(bcx, lldest, [i]);
|
||||||
debug!("writing index %? with lleltptr=%?",
|
debug!("writing index %? with lleltptr=%?",
|
||||||
i, bcx.val_to_str(lleltptr));
|
i, bcx.val_to_str(lleltptr));
|
||||||
|
@ -390,7 +390,7 @@ pub fn write_content(bcx: @mut Block,
|
||||||
add_clean_temp_mem(bcx, lleltptr, vt.unit_ty);
|
add_clean_temp_mem(bcx, lleltptr, vt.unit_ty);
|
||||||
temp_cleanups.push(lleltptr);
|
temp_cleanups.push(lleltptr);
|
||||||
}
|
}
|
||||||
for temp_cleanups.iter().advance |cleanup| {
|
foreach cleanup in temp_cleanups.iter() {
|
||||||
revoke_clean(bcx, *cleanup);
|
revoke_clean(bcx, *cleanup);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue