remove obsolete foreach keyword

this has been replaced by `for`
This commit is contained in:
Daniel Micay 2013-08-03 12:45:23 -04:00
parent 9f74217d80
commit 1008945528
298 changed files with 1569 additions and 1574 deletions

View File

@ -1792,11 +1792,11 @@ msgstr ""
msgid "" msgid ""
"~~~~ {.xfail-test}\n" "~~~~ {.xfail-test}\n"
"fn iter<T>(seq: &[T], f: &fn(T)) {\n" "fn iter<T>(seq: &[T], f: &fn(T)) {\n"
" foreach elt in seq.iter() { f(elt); }\n" " for elt in seq.iter() { f(elt); }\n"
"}\n" "}\n"
"fn map<T, U>(seq: &[T], f: &fn(T) -> U) -> ~[U] {\n" "fn map<T, U>(seq: &[T], f: &fn(T) -> U) -> ~[U] {\n"
" let mut acc = ~[];\n" " let mut acc = ~[];\n"
" foreach elt in seq.iter() { acc.push(f(elt)); }\n" " for elt in seq.iter() { acc.push(f(elt)); }\n"
" acc\n" " acc\n"
"}\n" "}\n"
"~~~~\n" "~~~~\n"
@ -4570,7 +4570,7 @@ msgstr ""
#: doc/rust.md:2405 #: doc/rust.md:2405
#, no-wrap #, no-wrap
msgid "" msgid ""
"foreach e in v.iter() {\n" "for e in v.iter() {\n"
" bar(*e);\n" " bar(*e);\n"
"}\n" "}\n"
"~~~~\n" "~~~~\n"

View File

@ -376,7 +376,7 @@ msgstr ""
#, no-wrap #, no-wrap
msgid "" msgid ""
"// print out all the elements in the vector\n" "// print out all the elements in the vector\n"
"foreach x in xs.iter() {\n" "for x in xs.iter() {\n"
" println(x.to_str())\n" " println(x.to_str())\n"
"}\n" "}\n"
msgstr "" msgstr ""
@ -386,7 +386,7 @@ msgstr ""
#, no-wrap #, no-wrap
msgid "" msgid ""
"// print out all but the first 3 elements in the vector\n" "// print out all but the first 3 elements in the vector\n"
"foreach x in xs.iter().skip(3) {\n" "for x in xs.iter().skip(3) {\n"
" println(x.to_str())\n" " println(x.to_str())\n"
"}\n" "}\n"
"~~~\n" "~~~\n"
@ -418,7 +418,7 @@ msgstr ""
#, no-wrap #, no-wrap
msgid "" msgid ""
"// print out the pairs of elements up to (&3, &\"baz\")\n" "// print out the pairs of elements up to (&3, &\"baz\")\n"
"foreach (x, y) in it {\n" "for (x, y) in it {\n"
" println(fmt!(\"%d %s\", *x, *y));\n" " println(fmt!(\"%d %s\", *x, *y));\n"
msgstr "" msgstr ""
@ -487,7 +487,7 @@ msgid ""
" pub fn from_iterator(iterator: &mut T) -> ~[A] {\n" " pub fn from_iterator(iterator: &mut T) -> ~[A] {\n"
" let (lower, _) = iterator.size_hint();\n" " let (lower, _) = iterator.size_hint();\n"
" let mut xs = with_capacity(lower);\n" " let mut xs = with_capacity(lower);\n"
" foreach x in iterator {\n" " for x in iterator {\n"
" xs.push(x);\n" " xs.push(x);\n"
" }\n" " }\n"
" xs\n" " xs\n"
@ -587,7 +587,7 @@ msgstr ""
#, no-wrap #, no-wrap
msgid "" msgid ""
"// prints `5`, `4` and `3`\n" "// prints `5`, `4` and `3`\n"
"foreach &x in it.invert() {\n" "for &x in it.invert() {\n"
" println(fmt!(\"%?\", x))\n" " println(fmt!(\"%?\", x))\n"
"}\n" "}\n"
"~~~\n" "~~~\n"

View File

@ -587,7 +587,7 @@ msgstr ""
#, no-wrap #, no-wrap
msgid "" msgid ""
" let mut final_res = 0f64;\n" " let mut final_res = 0f64;\n"
" foreach ft in futures.mut_iter() {\n" " for ft in futures.mut_iter() {\n"
" final_res += ft.get();\n" " final_res += ft.get();\n"
" }\n" " }\n"
" println(fmt!(\"^2/6 is not far from : %?\", final_res));\n" " println(fmt!(\"^2/6 is not far from : %?\", final_res));\n"

View File

@ -2501,7 +2501,7 @@ msgstr ""
msgid "" msgid ""
"// Iterate over a vector, obtaining a pointer to each element\n" "// Iterate over a vector, obtaining a pointer to each element\n"
"// (`for` is explained in the next section)\n" "// (`for` is explained in the next section)\n"
"foreach crayon in crayons.iter() {\n" "for crayon in crayons.iter() {\n"
" let delicious_crayon_wax = unwrap_crayon(*crayon);\n" " let delicious_crayon_wax = unwrap_crayon(*crayon);\n"
" eat_crayon_wax(delicious_crayon_wax);\n" " eat_crayon_wax(delicious_crayon_wax);\n"
"}\n" "}\n"
@ -3101,7 +3101,7 @@ msgid ""
"~~~~\n" "~~~~\n"
"fn map<T, U>(vector: &[T], function: &fn(v: &T) -> U) -> ~[U] {\n" "fn map<T, U>(vector: &[T], function: &fn(v: &T) -> U) -> ~[U] {\n"
" let mut accumulator = ~[];\n" " let mut accumulator = ~[];\n"
" foreach element in vector.iter() {\n" " for element in vector.iter() {\n"
" accumulator.push(function(element));\n" " accumulator.push(function(element));\n"
" }\n" " }\n"
" return accumulator;\n" " return accumulator;\n"
@ -3570,7 +3570,7 @@ msgid ""
"~~~~\n" "~~~~\n"
"# trait Printable { fn print(&self); }\n" "# trait Printable { fn print(&self); }\n"
"fn print_all<T: Printable>(printable_things: ~[T]) {\n" "fn print_all<T: Printable>(printable_things: ~[T]) {\n"
" foreach thing in printable_things.iter() {\n" " for thing in printable_things.iter() {\n"
" thing.print();\n" " thing.print();\n"
" }\n" " }\n"
"}\n" "}\n"
@ -3650,7 +3650,7 @@ msgstr ""
#, no-wrap #, no-wrap
msgid "" msgid ""
"fn draw_all<T: Drawable>(shapes: ~[T]) {\n" "fn draw_all<T: Drawable>(shapes: ~[T]) {\n"
" foreach shape in shapes.iter() { shape.draw(); }\n" " for shape in shapes.iter() { shape.draw(); }\n"
"}\n" "}\n"
"# let c: Circle = new_circle();\n" "# let c: Circle = new_circle();\n"
"# draw_all(~[c]);\n" "# draw_all(~[c]);\n"
@ -3673,7 +3673,7 @@ msgid ""
"~~~~\n" "~~~~\n"
"# trait Drawable { fn draw(&self); }\n" "# trait Drawable { fn draw(&self); }\n"
"fn draw_all(shapes: &[@Drawable]) {\n" "fn draw_all(shapes: &[@Drawable]) {\n"
" foreach shape in shapes.iter() { shape.draw(); }\n" " for shape in shapes.iter() { shape.draw(); }\n"
"}\n" "}\n"
"~~~~\n" "~~~~\n"
msgstr "" msgstr ""

View File

@ -880,11 +880,11 @@ the function name.
~~~~ {.xfail-test} ~~~~ {.xfail-test}
fn iter<T>(seq: &[T], f: &fn(T)) { fn iter<T>(seq: &[T], f: &fn(T)) {
foreach elt in seq.iter() { f(elt); } for elt in seq.iter() { f(elt); }
} }
fn map<T, U>(seq: &[T], f: &fn(T) -> U) -> ~[U] { fn map<T, U>(seq: &[T], f: &fn(T) -> U) -> ~[U] {
let mut acc = ~[]; let mut acc = ~[];
foreach elt in seq.iter() { acc.push(f(elt)); } for elt in seq.iter() { acc.push(f(elt)); }
acc acc
} }
~~~~ ~~~~
@ -2378,7 +2378,7 @@ An example of a for loop over the contents of a vector:
let v: &[foo] = &[a, b, c]; let v: &[foo] = &[a, b, c];
foreach e in v.iter() { for e in v.iter() {
bar(*e); bar(*e);
} }
~~~~ ~~~~
@ -2387,7 +2387,7 @@ An example of a for loop over a series of integers:
~~~~ ~~~~
# fn bar(b:uint) { } # fn bar(b:uint) { }
foreach i in range(0u, 256) { for i in range(0u, 256) {
bar(i); bar(i);
} }
~~~~ ~~~~

View File

@ -121,7 +121,7 @@ should interleave the output in vaguely random order.
# use std::io::print; # use std::io::print;
# use std::task::spawn; # use std::task::spawn;
foreach child_task_number in range(0, 20) { for child_task_number in range(0, 20) {
do spawn { do spawn {
print(fmt!("I am child number %d\n", child_task_number)); print(fmt!("I am child number %d\n", child_task_number));
} }
@ -240,7 +240,7 @@ Instead we can use a `SharedChan`, a type that allows a single
let (port, chan) = stream(); let (port, chan) = stream();
let chan = SharedChan::new(chan); let chan = SharedChan::new(chan);
foreach init_val in range(0u, 3) { for init_val in range(0u, 3) {
// Create a new channel handle to distribute to the child task // Create a new channel handle to distribute to the child task
let child_chan = chan.clone(); let child_chan = chan.clone();
do spawn { do spawn {
@ -314,7 +314,7 @@ be distributed on the available cores.
# use std::vec; # use std::vec;
fn partial_sum(start: uint) -> f64 { fn partial_sum(start: uint) -> f64 {
let mut local_sum = 0f64; let mut local_sum = 0f64;
foreach num in range(start*100000, (start+1)*100000) { for num in range(start*100000, (start+1)*100000) {
local_sum += (num as f64 + 1.0).pow(&-2.0); local_sum += (num as f64 + 1.0).pow(&-2.0);
} }
local_sum local_sum
@ -324,7 +324,7 @@ fn main() {
let mut futures = vec::from_fn(1000, |ind| do extra::future::spawn { partial_sum(ind) }); let mut futures = vec::from_fn(1000, |ind| do extra::future::spawn { partial_sum(ind) });
let mut final_res = 0f64; let mut final_res = 0f64;
foreach ft in futures.mut_iter() { for ft in futures.mut_iter() {
final_res += ft.get(); final_res += ft.get();
} }
println(fmt!("π^2/6 is not far from : %?", final_res)); println(fmt!("π^2/6 is not far from : %?", final_res));
@ -359,7 +359,7 @@ fn main() {
let numbers_arc = Arc::new(numbers); let numbers_arc = Arc::new(numbers);
foreach num in range(1u, 10) { for num in range(1u, 10) {
let (port, chan) = stream(); let (port, chan) = stream();
chan.send(numbers_arc.clone()); chan.send(numbers_arc.clone());

View File

@ -243,7 +243,7 @@ pub fn make_tests(config: &config) -> ~[test::TestDescAndFn] {
config.src_base.to_str()); config.src_base.to_str());
let mut tests = ~[]; let mut tests = ~[];
let dirs = os::list_dir_path(&config.src_base); let dirs = os::list_dir_path(&config.src_base);
foreach file in dirs.iter() { for file in dirs.iter() {
let file = file.clone(); let file = file.clone();
debug!("inspecting file %s", file.to_str()); debug!("inspecting file %s", file.to_str());
if is_test(config, &file) { if is_test(config, &file) {
@ -271,11 +271,11 @@ pub fn is_test(config: &config, testfile: &Path) -> bool {
let mut valid = false; let mut valid = false;
foreach ext in valid_extensions.iter() { for ext in valid_extensions.iter() {
if name.ends_with(*ext) { valid = true; } if name.ends_with(*ext) { valid = true; }
} }
foreach pre in invalid_prefixes.iter() { for pre in invalid_prefixes.iter() {
if name.starts_with(*pre) { valid = false; } if name.starts_with(*pre) { valid = false; }
} }

View File

@ -56,7 +56,7 @@ pub fn run(lib_path: &str,
err_fd: None err_fd: None
}); });
foreach input in input.iter() { for input in input.iter() {
proc.input().write_str(*input); proc.input().write_str(*input);
} }
let output = proc.finish_with_output(); let output = proc.finish_with_output();

View File

@ -282,7 +282,7 @@ fn run_debuginfo_test(config: &config, props: &TestProps, testfile: &Path) {
// check if each line in props.check_lines appears in the // check if each line in props.check_lines appears in the
// output (in order) // output (in order)
let mut i = 0u; let mut i = 0u;
foreach line in ProcRes.stdout.line_iter() { for line in ProcRes.stdout.line_iter() {
if check_lines[i].trim() == line.trim() { if check_lines[i].trim() == line.trim() {
i += 1u; i += 1u;
} }
@ -312,7 +312,7 @@ fn check_error_patterns(props: &TestProps,
let mut next_err_idx = 0u; let mut next_err_idx = 0u;
let mut next_err_pat = &props.error_patterns[next_err_idx]; let mut next_err_pat = &props.error_patterns[next_err_idx];
let mut done = false; let mut done = false;
foreach line in ProcRes.stderr.line_iter() { for line in ProcRes.stderr.line_iter() {
if line.contains(*next_err_pat) { if line.contains(*next_err_pat) {
debug!("found error pattern %s", *next_err_pat); debug!("found error pattern %s", *next_err_pat);
next_err_idx += 1u; next_err_idx += 1u;
@ -332,7 +332,7 @@ fn check_error_patterns(props: &TestProps,
fatal_ProcRes(fmt!("error pattern '%s' not found!", fatal_ProcRes(fmt!("error pattern '%s' not found!",
missing_patterns[0]), ProcRes); missing_patterns[0]), ProcRes);
} else { } else {
foreach pattern in missing_patterns.iter() { for pattern in missing_patterns.iter() {
error(fmt!("error pattern '%s' not found!", *pattern)); error(fmt!("error pattern '%s' not found!", *pattern));
} }
fatal_ProcRes(~"multiple error patterns not found", ProcRes); fatal_ProcRes(~"multiple error patterns not found", ProcRes);
@ -385,9 +385,9 @@ fn check_expected_errors(expected_errors: ~[errors::ExpectedError],
// filename:line1:col1: line2:col2: *warning:* msg // filename:line1:col1: line2:col2: *warning:* msg
// where line1:col1: is the starting point, line2:col2: // where line1:col1: is the starting point, line2:col2:
// is the ending point, and * represents ANSI color codes. // is the ending point, and * represents ANSI color codes.
foreach line in ProcRes.stderr.line_iter() { for line in ProcRes.stderr.line_iter() {
let mut was_expected = false; let mut was_expected = false;
foreach (i, ee) in expected_errors.iter().enumerate() { for (i, ee) in expected_errors.iter().enumerate() {
if !found_flags[i] { if !found_flags[i] {
debug!("prefix=%s ee.kind=%s ee.msg=%s line=%s", debug!("prefix=%s ee.kind=%s ee.msg=%s line=%s",
prefixes[i], ee.kind, ee.msg, line); prefixes[i], ee.kind, ee.msg, line);
@ -413,7 +413,7 @@ fn check_expected_errors(expected_errors: ~[errors::ExpectedError],
} }
} }
foreach i in range(0u, found_flags.len()) { for i in range(0u, found_flags.len()) {
if !found_flags[i] { if !found_flags[i] {
let ee = &expected_errors[i]; let ee = &expected_errors[i];
fatal_ProcRes(fmt!("expected %s on line %u not found: %s", fatal_ProcRes(fmt!("expected %s on line %u not found: %s",
@ -558,7 +558,7 @@ fn compose_and_run_compiler(
let extra_link_args = ~[~"-L", let extra_link_args = ~[~"-L",
aux_output_dir_name(config, testfile).to_str()]; aux_output_dir_name(config, testfile).to_str()];
foreach rel_ab in props.aux_builds.iter() { for rel_ab in props.aux_builds.iter() {
let abs_ab = config.aux_base.push_rel(&Path(*rel_ab)); let abs_ab = config.aux_base.push_rel(&Path(*rel_ab));
let aux_args = let aux_args =
make_compile_args(config, props, ~[~"--lib"] + extra_link_args, make_compile_args(config, props, ~[~"--lib"] + extra_link_args,
@ -785,7 +785,7 @@ fn _arm_exec_compiled_test(config: &config, props: &TestProps,
runargs.push(fmt!("%s", config.adb_test_dir)); runargs.push(fmt!("%s", config.adb_test_dir));
runargs.push(fmt!("%s", prog_short)); runargs.push(fmt!("%s", prog_short));
foreach tv in args.args.iter() { for tv in args.args.iter() {
runargs.push(tv.to_owned()); runargs.push(tv.to_owned());
} }
@ -802,7 +802,7 @@ fn _arm_exec_compiled_test(config: &config, props: &TestProps,
Some(~"")); Some(~""));
let mut exitcode : int = 0; let mut exitcode : int = 0;
foreach c in exitcode_out.iter() { for c in exitcode_out.iter() {
if !c.is_digit() { break; } if !c.is_digit() { break; }
exitcode = exitcode * 10 + match c { exitcode = exitcode * 10 + match c {
'0' .. '9' => c as int - ('0' as int), '0' .. '9' => c as int - ('0' as int),
@ -851,7 +851,7 @@ fn _arm_push_aux_shared_library(config: &config, testfile: &Path) {
let tstr = aux_output_dir_name(config, testfile).to_str(); let tstr = aux_output_dir_name(config, testfile).to_str();
let dirs = os::list_dir_path(&Path(tstr)); let dirs = os::list_dir_path(&Path(tstr));
foreach file in dirs.iter() { for file in dirs.iter() {
if (file.filetype() == Some(~".so")) { if (file.filetype() == Some(~".so")) {

View File

@ -7,7 +7,7 @@ my @lines = <>;
my $anchors = {}; my $anchors = {};
my $i = 0; my $i = 0;
foreach $line (@lines) { for $line (@lines) {
$i++; $i++;
if ($line =~ m/id="([^"]+)"/) { if ($line =~ m/id="([^"]+)"/) {
$anchors->{$1} = $i; $anchors->{$1} = $i;
@ -15,7 +15,7 @@ foreach $line (@lines) {
} }
$i = 0; $i = 0;
foreach $line (@lines) { for $line (@lines) {
$i++; $i++;
while ($line =~ m/href="#([^"]+)"/g) { while ($line =~ m/href="#([^"]+)"/g) {
if (! exists($anchors->{$1})) { if (! exists($anchors->{$1})) {

View File

@ -782,7 +782,7 @@ mod tests {
} }
// Wait for children to pass their asserts // Wait for children to pass their asserts
foreach r in children.iter() { for r in children.iter() {
r.recv(); r.recv();
} }
@ -848,7 +848,7 @@ mod tests {
*state = 31337; *state = 31337;
// FIXME: #7372: hits type inference bug with iterators // FIXME: #7372: hits type inference bug with iterators
// send to other readers // send to other readers
foreach i in range(0u, reader_convos.len()) { for i in range(0u, reader_convos.len()) {
match reader_convos[i] { match reader_convos[i] {
(ref rc, _) => rc.send(()), (ref rc, _) => rc.send(()),
} }
@ -858,7 +858,7 @@ mod tests {
do (&read_mode).read |state| { do (&read_mode).read |state| {
// FIXME: #7372: hits type inference bug with iterators // FIXME: #7372: hits type inference bug with iterators
// complete handshake with other readers // complete handshake with other readers
foreach i in range(0u, reader_convos.len()) { for i in range(0u, reader_convos.len()) {
match reader_convos[i] { match reader_convos[i] {
(_, ref rp) => rp.recv(), (_, ref rp) => rp.recv(),
} }

View File

@ -277,7 +277,7 @@ impl Arena {
#[test] #[test]
fn test_arena_destructors() { fn test_arena_destructors() {
let arena = Arena(); let arena = Arena();
foreach i in range(0u, 10) { for i in range(0u, 10) {
// Arena allocate something with drop glue to make sure it // Arena allocate something with drop glue to make sure it
// doesn't leak. // doesn't leak.
do arena.alloc { @i }; do arena.alloc { @i };
@ -293,7 +293,7 @@ fn test_arena_destructors() {
fn test_arena_destructors_fail() { fn test_arena_destructors_fail() {
let arena = Arena(); let arena = Arena();
// Put some stuff in the arena. // Put some stuff in the arena.
foreach i in range(0u, 10) { for i in range(0u, 10) {
// Arena allocate something with drop glue to make sure it // Arena allocate something with drop glue to make sure it
// doesn't leak. // doesn't leak.
do arena.alloc { @i }; do arena.alloc { @i };

View File

@ -206,7 +206,7 @@ impl<'self> FromBase64 for &'self [u8] {
let mut modulus = 0; let mut modulus = 0;
let mut it = self.iter(); let mut it = self.iter();
foreach &byte in it { for &byte in it {
let ch = byte as char; let ch = byte as char;
let val = byte as u32; let val = byte as u32;

View File

@ -145,7 +145,7 @@ impl BigBitv {
let len = b.storage.len(); let len = b.storage.len();
assert_eq!(self.storage.len(), len); assert_eq!(self.storage.len(), len);
let mut changed = false; let mut changed = false;
foreach i in range(0, len) { for i in range(0, len) {
let mask = big_mask(nbits, i); let mask = big_mask(nbits, i);
let w0 = self.storage[i] & mask; let w0 = self.storage[i] & mask;
let w1 = b.storage[i] & mask; let w1 = b.storage[i] & mask;
@ -401,7 +401,7 @@ impl Bitv {
match self.rep { match self.rep {
Small(ref b) => b.is_true(self.nbits), Small(ref b) => b.is_true(self.nbits),
_ => { _ => {
foreach i in self.iter() { if !i { return false; } } for i in self.iter() { if !i { return false; } }
true true
} }
} }
@ -422,7 +422,7 @@ impl Bitv {
match self.rep { match self.rep {
Small(ref b) => b.is_false(self.nbits), Small(ref b) => b.is_false(self.nbits),
Big(_) => { Big(_) => {
foreach i in self.iter() { if i { return false; } } for i in self.iter() { if i { return false; } }
true true
} }
} }
@ -486,7 +486,7 @@ impl Bitv {
*/ */
pub fn to_str(&self) -> ~str { pub fn to_str(&self) -> ~str {
let mut rs = ~""; let mut rs = ~"";
foreach i in self.iter() { for i in self.iter() {
if i { if i {
rs.push_char('1'); rs.push_char('1');
} else { } else {
@ -544,7 +544,7 @@ pub fn from_bools(bools: &[bool]) -> Bitv {
*/ */
pub fn from_fn(len: uint, f: &fn(index: uint) -> bool) -> Bitv { pub fn from_fn(len: uint, f: &fn(index: uint) -> bool) -> Bitv {
let mut bitv = Bitv::new(len, false); let mut bitv = Bitv::new(len, false);
foreach i in range(0u, len) { for i in range(0u, len) {
bitv.set(i, f(i)); bitv.set(i, f(i));
} }
bitv bitv
@ -561,7 +561,7 @@ fn iterate_bits(base: uint, bits: uint, f: &fn(uint) -> bool) -> bool {
if bits == 0 { if bits == 0 {
return true; return true;
} }
foreach i in range(0u, uint::bits) { for i in range(0u, uint::bits) {
if bits & (1 << i) != 0 { if bits & (1 << i) != 0 {
if !f(base + i) { if !f(base + i) {
return false; return false;
@ -676,7 +676,7 @@ impl BitvSet {
fn other_op(&mut self, other: &BitvSet, f: &fn(uint, uint) -> uint) { fn other_op(&mut self, other: &BitvSet, f: &fn(uint, uint) -> uint) {
fn nbits(mut w: uint) -> uint { fn nbits(mut w: uint) -> uint {
let mut bits = 0; let mut bits = 0;
foreach _ in range(0u, uint::bits) { for _ in range(0u, uint::bits) {
if w == 0 { if w == 0 {
break; break;
} }
@ -688,7 +688,7 @@ impl BitvSet {
if self.capacity() < other.capacity() { if self.capacity() < other.capacity() {
self.bitv.storage.grow(other.capacity() / uint::bits, &0); self.bitv.storage.grow(other.capacity() / uint::bits, &0);
} }
foreach (i, &w) in other.bitv.storage.iter().enumerate() { for (i, &w) in other.bitv.storage.iter().enumerate() {
let old = self.bitv.storage[i]; let old = self.bitv.storage[i];
let new = f(old, w); let new = f(old, w);
self.bitv.storage[i] = new; self.bitv.storage[i] = new;
@ -721,7 +721,7 @@ impl BitvSet {
} }
pub fn difference(&self, other: &BitvSet, f: &fn(&uint) -> bool) -> bool { pub fn difference(&self, other: &BitvSet, f: &fn(&uint) -> bool) -> bool {
foreach (i, w1, w2) in self.common_iter(other) { for (i, w1, w2) in self.common_iter(other) {
if !iterate_bits(i, w1 & !w2, |b| f(&b)) { if !iterate_bits(i, w1 & !w2, |b| f(&b)) {
return false return false
} }
@ -734,7 +734,7 @@ impl BitvSet {
pub fn symmetric_difference(&self, other: &BitvSet, pub fn symmetric_difference(&self, other: &BitvSet,
f: &fn(&uint) -> bool) -> bool { f: &fn(&uint) -> bool) -> bool {
foreach (i, w1, w2) in self.common_iter(other) { for (i, w1, w2) in self.common_iter(other) {
if !iterate_bits(i, w1 ^ w2, |b| f(&b)) { if !iterate_bits(i, w1 ^ w2, |b| f(&b)) {
return false return false
} }
@ -747,7 +747,7 @@ impl BitvSet {
} }
pub fn union(&self, other: &BitvSet, f: &fn(&uint) -> bool) -> bool { pub fn union(&self, other: &BitvSet, f: &fn(&uint) -> bool) -> bool {
foreach (i, w1, w2) in self.common_iter(other) { for (i, w1, w2) in self.common_iter(other) {
if !iterate_bits(i, w1 | w2, |b| f(&b)) { if !iterate_bits(i, w1 | w2, |b| f(&b)) {
return false return false
} }
@ -761,12 +761,12 @@ impl cmp::Eq for BitvSet {
if self.size != other.size { if self.size != other.size {
return false; return false;
} }
foreach (_, w1, w2) in self.common_iter(other) { for (_, w1, w2) in self.common_iter(other) {
if w1 != w2 { if w1 != w2 {
return false; return false;
} }
} }
foreach (_, _, w) in self.outlier_iter(other) { for (_, _, w) in self.outlier_iter(other) {
if w != 0 { if w != 0 {
return false; return false;
} }
@ -801,7 +801,7 @@ impl Set<uint> for BitvSet {
} }
fn is_subset(&self, other: &BitvSet) -> bool { fn is_subset(&self, other: &BitvSet) -> bool {
foreach (_, w1, w2) in self.common_iter(other) { for (_, w1, w2) in self.common_iter(other) {
if w1 & w2 != w1 { if w1 & w2 != w1 {
return false; return false;
} }
@ -809,7 +809,7 @@ impl Set<uint> for BitvSet {
/* If anything is not ours, then everything is not ours so we're /* If anything is not ours, then everything is not ours so we're
definitely a subset in that case. Otherwise if there's any stray definitely a subset in that case. Otherwise if there's any stray
ones that 'other' doesn't have, we're not a subset. */ ones that 'other' doesn't have, we're not a subset. */
foreach (mine, _, w) in self.outlier_iter(other) { for (mine, _, w) in self.outlier_iter(other) {
if !mine { if !mine {
return true; return true;
} else if w != 0 { } else if w != 0 {
@ -1285,12 +1285,12 @@ mod tests {
#[test] #[test]
fn test_equal_sneaky_big() { fn test_equal_sneaky_big() {
let mut a = bitv::Bitv::new(100, false); let mut a = bitv::Bitv::new(100, false);
foreach i in range(0u, 100) { for i in range(0u, 100) {
a.set(i, true); a.set(i, true);
} }
let mut b = bitv::Bitv::new(100, true); let mut b = bitv::Bitv::new(100, true);
foreach i in range(0u, 100) { for i in range(0u, 100) {
b.set(i, true); b.set(i, true);
} }
@ -1333,7 +1333,7 @@ mod tests {
let bools = [true, false, true, true]; let bools = [true, false, true, true];
let bitv = from_bools(bools); let bitv = from_bools(bools);
foreach (act, &ex) in bitv.iter().zip(bools.iter()) { for (act, &ex) in bitv.iter().zip(bools.iter()) {
assert_eq!(ex, act); assert_eq!(ex, act);
} }
} }
@ -1639,7 +1639,7 @@ mod tests {
let bitv = Bitv::new(uint::bits, false); let bitv = Bitv::new(uint::bits, false);
do b.iter { do b.iter {
let mut _sum = 0; let mut _sum = 0;
foreach pres in bitv.iter() { for pres in bitv.iter() {
_sum += pres as uint; _sum += pres as uint;
} }
} }
@ -1650,7 +1650,7 @@ mod tests {
let bitv = Bitv::new(BENCH_BITS, false); let bitv = Bitv::new(BENCH_BITS, false);
do b.iter { do b.iter {
let mut _sum = 0; let mut _sum = 0;
foreach pres in bitv.iter() { for pres in bitv.iter() {
_sum += pres as uint; _sum += pres as uint;
} }
} }
@ -1662,7 +1662,7 @@ mod tests {
|idx| {idx % 3 == 0})); |idx| {idx % 3 == 0}));
do b.iter { do b.iter {
let mut _sum = 0; let mut _sum = 0;
foreach idx in bitv.iter() { for idx in bitv.iter() {
_sum += idx; _sum += idx;
} }
} }

View File

@ -53,7 +53,7 @@ mod bench {
let mut rng = rand::XorShiftRng::new(); let mut rng = rand::XorShiftRng::new();
map.clear(); map.clear();
foreach _ in range(0, n) { for _ in range(0, n) {
map.insert(rng.gen::<uint>() % n, 1); map.insert(rng.gen::<uint>() % n, 1);
} }
@ -70,7 +70,7 @@ mod bench {
bh: &mut BenchHarness) { bh: &mut BenchHarness) {
// setup // setup
map.clear(); map.clear();
foreach i in range(0u, n) { for i in range(0u, n) {
map.insert(i*2, 1); map.insert(i*2, 1);
} }
@ -90,7 +90,7 @@ mod bench {
let mut rng = rand::XorShiftRng::new(); let mut rng = rand::XorShiftRng::new();
let mut keys = vec::from_fn(n, |_| rng.gen::<uint>() % n); let mut keys = vec::from_fn(n, |_| rng.gen::<uint>() % n);
foreach k in keys.iter() { for k in keys.iter() {
map.insert(*k, 1); map.insert(*k, 1);
} }
@ -108,7 +108,7 @@ mod bench {
map: &mut M, map: &mut M,
bh: &mut BenchHarness) { bh: &mut BenchHarness) {
// setup // setup
foreach i in range(0u, n) { for i in range(0u, n) {
map.insert(i, 1); map.insert(i, 1);
} }

View File

@ -70,7 +70,7 @@ pub trait Digest {
fn to_hex(rr: &[u8]) -> ~str { fn to_hex(rr: &[u8]) -> ~str {
let mut s = ~""; let mut s = ~"";
foreach b in rr.iter() { for b in rr.iter() {
let hex = uint::to_str_radix(*b as uint, 16u); let hex = uint::to_str_radix(*b as uint, 16u);
if hex.len() == 1 { if hex.len() == 1 {
s.push_char('0'); s.push_char('0');

View File

@ -250,7 +250,7 @@ mod tests {
let mut out = [0u8, ..20]; let mut out = [0u8, ..20];
let mut sh = ~Sha1::new(); let mut sh = ~Sha1::new();
foreach t in tests.iter() { for t in tests.iter() {
(*sh).input_str(t.input); (*sh).input_str(t.input);
sh.result(out); sh.result(out);
assert!(t.output.as_slice() == out); assert!(t.output.as_slice() == out);
@ -264,7 +264,7 @@ mod tests {
// Test that it works when accepting the message in pieces // Test that it works when accepting the message in pieces
foreach t in tests.iter() { for t in tests.iter() {
let len = t.input.len(); let len = t.input.len();
let mut left = len; let mut left = len;
while left > 0u { while left > 0u {

View File

@ -736,7 +736,7 @@ mod tests {
fn test_hash<D: Digest>(sh: &mut D, tests: &[Test]) { fn test_hash<D: Digest>(sh: &mut D, tests: &[Test]) {
// Test that it works when accepting the message all at once // Test that it works when accepting the message all at once
foreach t in tests.iter() { for t in tests.iter() {
sh.input_str(t.input); sh.input_str(t.input);
let out_str = sh.result_str(); let out_str = sh.result_str();
@ -746,7 +746,7 @@ mod tests {
} }
// Test that it works when accepting the message in pieces // Test that it works when accepting the message in pieces
foreach t in tests.iter() { for t in tests.iter() {
let len = t.input.len(); let len = t.input.len();
let mut left = len; let mut left = len;
while left > 0u { while left > 0u {

View File

@ -548,7 +548,7 @@ impl<A, T: Iterator<A>> FromIterator<A, T> for DList<A> {
impl<A, T: Iterator<A>> Extendable<A, T> for DList<A> { impl<A, T: Iterator<A>> Extendable<A, T> for DList<A> {
fn extend(&mut self, iterator: &mut T) { fn extend(&mut self, iterator: &mut T) {
foreach elt in *iterator { self.push_back(elt); } for elt in *iterator { self.push_back(elt); }
} }
} }
@ -687,7 +687,7 @@ mod tests {
check_links(&m); check_links(&m);
let sum = v + u; let sum = v + u;
assert_eq!(sum.len(), m.len()); assert_eq!(sum.len(), m.len());
foreach elt in sum.consume_iter() { for elt in sum.consume_iter() {
assert_eq!(m.pop_front(), Some(elt)) assert_eq!(m.pop_front(), Some(elt))
} }
} }
@ -711,7 +711,7 @@ mod tests {
check_links(&m); check_links(&m);
let sum = u + v; let sum = u + v;
assert_eq!(sum.len(), m.len()); assert_eq!(sum.len(), m.len());
foreach elt in sum.consume_iter() { for elt in sum.consume_iter() {
assert_eq!(m.pop_front(), Some(elt)) assert_eq!(m.pop_front(), Some(elt))
} }
} }
@ -742,7 +742,7 @@ mod tests {
#[test] #[test]
fn test_iterator() { fn test_iterator() {
let m = generate_test(); let m = generate_test();
foreach (i, elt) in m.iter().enumerate() { for (i, elt) in m.iter().enumerate() {
assert_eq!(i as int, *elt); assert_eq!(i as int, *elt);
} }
let mut n = DList::new(); let mut n = DList::new();
@ -790,7 +790,7 @@ mod tests {
#[test] #[test]
fn test_rev_iter() { fn test_rev_iter() {
let m = generate_test(); let m = generate_test();
foreach (i, elt) in m.rev_iter().enumerate() { for (i, elt) in m.rev_iter().enumerate() {
assert_eq!((6 - i) as int, *elt); assert_eq!((6 - i) as int, *elt);
} }
let mut n = DList::new(); let mut n = DList::new();
@ -807,7 +807,7 @@ mod tests {
fn test_mut_iter() { fn test_mut_iter() {
let mut m = generate_test(); let mut m = generate_test();
let mut len = m.len(); let mut len = m.len();
foreach (i, elt) in m.mut_iter().enumerate() { for (i, elt) in m.mut_iter().enumerate() {
assert_eq!(i as int, *elt); assert_eq!(i as int, *elt);
len -= 1; len -= 1;
} }
@ -899,7 +899,7 @@ mod tests {
#[test] #[test]
fn test_mut_rev_iter() { fn test_mut_rev_iter() {
let mut m = generate_test(); let mut m = generate_test();
foreach (i, elt) in m.mut_rev_iter().enumerate() { for (i, elt) in m.mut_rev_iter().enumerate() {
assert_eq!((6-i) as int, *elt); assert_eq!((6-i) as int, *elt);
} }
let mut n = DList::new(); let mut n = DList::new();
@ -943,7 +943,7 @@ mod tests {
fn fuzz_test(sz: int) { fn fuzz_test(sz: int) {
let mut m = DList::new::<int>(); let mut m = DList::new::<int>();
let mut v = ~[]; let mut v = ~[];
foreach i in range(0, sz) { for i in range(0, sz) {
check_links(&m); check_links(&m);
let r: u8 = rand::random(); let r: u8 = rand::random();
match r % 6 { match r % 6 {
@ -969,7 +969,7 @@ mod tests {
check_links(&m); check_links(&m);
let mut i = 0u; let mut i = 0u;
foreach (a, &b) in m.consume_iter().zip(v.iter()) { for (a, &b) in m.consume_iter().zip(v.iter()) {
i += 1; i += 1;
assert_eq!(a, b); assert_eq!(a, b);
} }

View File

@ -419,7 +419,7 @@ mod test {
fn make_file(path : &Path, contents: &[~str]) { fn make_file(path : &Path, contents: &[~str]) {
let file = io::file_writer(path, [io::Create, io::Truncate]).unwrap(); let file = io::file_writer(path, [io::Create, io::Truncate]).unwrap();
foreach str in contents.iter() { for str in contents.iter() {
file.write_str(*str); file.write_str(*str);
file.write_char('\n'); file.write_char('\n');
} }
@ -446,13 +446,13 @@ mod test {
|i| fmt!("tmp/lib-fileinput-test-fileinput-read-byte-%u.tmp", i)), true); |i| fmt!("tmp/lib-fileinput-test-fileinput-read-byte-%u.tmp", i)), true);
// 3 files containing 0\n, 1\n, and 2\n respectively // 3 files containing 0\n, 1\n, and 2\n respectively
foreach (i, filename) in filenames.iter().enumerate() { for (i, filename) in filenames.iter().enumerate() {
make_file(filename.get_ref(), [fmt!("%u", i)]); make_file(filename.get_ref(), [fmt!("%u", i)]);
} }
let fi = FileInput::from_vec(filenames.clone()); let fi = FileInput::from_vec(filenames.clone());
foreach (line, c) in "012".iter().enumerate() { for (line, c) in "012".iter().enumerate() {
assert_eq!(fi.read_byte(), c as int); assert_eq!(fi.read_byte(), c as int);
assert_eq!(fi.state().line_num, line); assert_eq!(fi.state().line_num, line);
assert_eq!(fi.state().line_num_file, 0); assert_eq!(fi.state().line_num_file, 0);
@ -476,7 +476,7 @@ mod test {
|i| fmt!("tmp/lib-fileinput-test-fileinput-read-%u.tmp", i)), true); |i| fmt!("tmp/lib-fileinput-test-fileinput-read-%u.tmp", i)), true);
// 3 files containing 1\n, 2\n, and 3\n respectively // 3 files containing 1\n, 2\n, and 3\n respectively
foreach (i, filename) in filenames.iter().enumerate() { for (i, filename) in filenames.iter().enumerate() {
make_file(filename.get_ref(), [fmt!("%u", i)]); make_file(filename.get_ref(), [fmt!("%u", i)]);
} }
@ -496,7 +496,7 @@ mod test {
3, 3,
|i| fmt!("tmp/lib-fileinput-test-input-vec-%u.tmp", i)), true); |i| fmt!("tmp/lib-fileinput-test-input-vec-%u.tmp", i)), true);
foreach (i, filename) in filenames.iter().enumerate() { for (i, filename) in filenames.iter().enumerate() {
let contents = let contents =
vec::from_fn(3, |j| fmt!("%u %u", i, j)); vec::from_fn(3, |j| fmt!("%u %u", i, j));
make_file(filename.get_ref(), contents); make_file(filename.get_ref(), contents);
@ -518,7 +518,7 @@ mod test {
3, 3,
|i| fmt!("tmp/lib-fileinput-test-input-vec-state-%u.tmp", i)),true); |i| fmt!("tmp/lib-fileinput-test-input-vec-state-%u.tmp", i)),true);
foreach (i, filename) in filenames.iter().enumerate() { for (i, filename) in filenames.iter().enumerate() {
let contents = let contents =
vec::from_fn(3, |j| fmt!("%u %u", i, j + 1)); vec::from_fn(3, |j| fmt!("%u %u", i, j + 1));
make_file(filename.get_ref(), contents); make_file(filename.get_ref(), contents);
@ -587,7 +587,7 @@ mod test {
3, 3,
|i| fmt!("tmp/lib-fileinput-test-next-file-%u.tmp", i)),true); |i| fmt!("tmp/lib-fileinput-test-next-file-%u.tmp", i)),true);
foreach (i, filename) in filenames.iter().enumerate() { for (i, filename) in filenames.iter().enumerate() {
let contents = let contents =
vec::from_fn(3, |j| fmt!("%u %u", i, j + 1)); vec::from_fn(3, |j| fmt!("%u %u", i, j + 1));
make_file(filename.get_ref(), contents); make_file(filename.get_ref(), contents);
@ -600,7 +600,7 @@ mod test {
input.next_file(); // skip the rest of 1 input.next_file(); // skip the rest of 1
// read all lines from 1 (but don't read any from 2), // read all lines from 1 (but don't read any from 2),
foreach i in range(1u, 4) { for i in range(1u, 4) {
assert_eq!(input.read_line(), fmt!("1 %u", i)); assert_eq!(input.read_line(), fmt!("1 %u", i));
} }
// 1 is finished, but 2 hasn't been started yet, so this will // 1 is finished, but 2 hasn't been started yet, so this will

View File

@ -29,12 +29,12 @@ This example sends boxed integers across tasks using serialization.
let (port, chan) = serial::pipe_stream(); let (port, chan) = serial::pipe_stream();
do task::spawn || { do task::spawn || {
foreach i in range(0, 10) { for i in range(0, 10) {
chan.send(@i) chan.send(@i)
} }
} }
foreach i in range(0, 10) { for i in range(0, 10) {
assert @i == port.recv() assert @i == port.recv()
} }
~~~ ~~~
@ -664,12 +664,12 @@ mod test {
let (port, chan) = serial::pipe_stream(); let (port, chan) = serial::pipe_stream();
do task::spawn || { do task::spawn || {
foreach i in range(0, 10) { for i in range(0, 10) {
chan.send(i) chan.send(i)
} }
} }
foreach i in range(0, 10) { for i in range(0, 10) {
assert!(i == port.recv()) assert!(i == port.recv())
} }
} }
@ -680,12 +680,12 @@ mod test {
let (port, chan) = serial::pipe_stream(); let (port, chan) = serial::pipe_stream();
do task::spawn || { do task::spawn || {
foreach i in range(0, 10) { for i in range(0, 10) {
chan.send(@i) chan.send(@i)
} }
} }
foreach i in range(0, 10) { for i in range(0, 10) {
assert!(@i == port.recv()) assert!(@i == port.recv())
} }
} }
@ -711,12 +711,12 @@ mod test {
let (port, chan) = pod::pipe_stream(); let (port, chan) = pod::pipe_stream();
do task::spawn || { do task::spawn || {
foreach i in range(0, 10) { for i in range(0, 10) {
chan.send(i) chan.send(i)
} }
} }
foreach i in range(0, 10) { for i in range(0, 10) {
assert!(i == port.recv()) assert!(i == port.recv())
} }
} }
@ -828,7 +828,7 @@ mod test {
// TcpSocketBuf is a Writer! // TcpSocketBuf is a Writer!
let chan = writer_chan(socket_buf); let chan = writer_chan(socket_buf);
foreach i in range(0, 10) { for i in range(0, 10) {
debug!("sending %?", i); debug!("sending %?", i);
chan.send(i) chan.send(i)
} }
@ -851,7 +851,7 @@ mod test {
// TcpSocketBuf is a Reader! // TcpSocketBuf is a Reader!
let port = reader_port(socket_buf); let port = reader_port(socket_buf);
foreach i in range(0, 10) { for i in range(0, 10) {
let j = port.recv(); let j = port.recv();
debug!("received %?", j); debug!("received %?", j);
assert_eq!(i, j); assert_eq!(i, j);

View File

@ -307,7 +307,7 @@ pub fn getopts(args: &[~str], opts: &[Opt]) -> Result {
} }
} }
let mut name_pos = 0; let mut name_pos = 0;
foreach nm in names.iter() { for nm in names.iter() {
name_pos += 1; name_pos += 1;
let optid = match find_opt(opts, (*nm).clone()) { let optid = match find_opt(opts, (*nm).clone()) {
Some(id) => id, Some(id) => id,
@ -392,7 +392,7 @@ pub fn opt_count(mm: &Matches, nm: &str) -> uint {
/// Returns true if any of several options were matched /// Returns true if any of several options were matched
pub fn opts_present(mm: &Matches, names: &[~str]) -> bool { pub fn opts_present(mm: &Matches, names: &[~str]) -> bool {
foreach nm in names.iter() { for nm in names.iter() {
match find_opt(mm.opts, mkname(*nm)) { match find_opt(mm.opts, mkname(*nm)) {
Some(id) if !mm.vals[id].is_empty() => return true, Some(id) if !mm.vals[id].is_empty() => return true,
_ => (), _ => (),
@ -422,7 +422,7 @@ pub fn opt_str(mm: &Matches, nm: &str) -> ~str {
* option took an argument * option took an argument
*/ */
pub fn opts_str(mm: &Matches, names: &[~str]) -> ~str { pub fn opts_str(mm: &Matches, names: &[~str]) -> ~str {
foreach nm in names.iter() { for nm in names.iter() {
match opt_val(mm, *nm) { match opt_val(mm, *nm) {
Some(Val(ref s)) => return (*s).clone(), Some(Val(ref s)) => return (*s).clone(),
_ => () _ => ()
@ -441,7 +441,7 @@ pub fn opts_str(mm: &Matches, names: &[~str]) -> ~str {
pub fn opt_strs(mm: &Matches, nm: &str) -> ~[~str] { pub fn opt_strs(mm: &Matches, nm: &str) -> ~[~str] {
let mut acc: ~[~str] = ~[]; let mut acc: ~[~str] = ~[];
let r = opt_vals(mm, nm); let r = opt_vals(mm, nm);
foreach v in r.iter() { for v in r.iter() {
match *v { Val(ref s) => acc.push((*s).clone()), _ => () } match *v { Val(ref s) => acc.push((*s).clone()), _ => () }
} }
acc acc
@ -671,7 +671,7 @@ pub mod groups {
// Normalize desc to contain words separated by one space character // Normalize desc to contain words separated by one space character
let mut desc_normalized_whitespace = ~""; let mut desc_normalized_whitespace = ~"";
foreach word in desc.word_iter() { for word in desc.word_iter() {
desc_normalized_whitespace.push_str(word); desc_normalized_whitespace.push_str(word);
desc_normalized_whitespace.push_char(' '); desc_normalized_whitespace.push_char(' ');
} }

View File

@ -27,7 +27,7 @@ internal iterator by calling the `advance` method. For example:
let xs = [0u, 1, 2, 3, 4, 5]; let xs = [0u, 1, 2, 3, 4, 5];
let ys = [30, 40, 50, 60]; let ys = [30, 40, 50, 60];
let mut it = xs.iter().chain(ys.iter()); let mut it = xs.iter().chain(ys.iter());
foreach &x: &uint in it { for &x: &uint in it {
println(x.to_str()); println(x.to_str());
} }
~~~ ~~~

View File

@ -57,7 +57,7 @@ pub struct Error {
fn escape_str(s: &str) -> ~str { fn escape_str(s: &str) -> ~str {
let mut escaped = ~"\""; let mut escaped = ~"\"";
foreach c in s.iter() { for c in s.iter() {
match c { match c {
'"' => escaped.push_str("\\\""), '"' => escaped.push_str("\\\""),
'\\' => escaped.push_str("\\\\"), '\\' => escaped.push_str("\\\\"),
@ -923,7 +923,7 @@ impl serialize::Decoder for Decoder {
fn read_char(&mut self) -> char { fn read_char(&mut self) -> char {
let mut v = ~[]; let mut v = ~[];
let s = self.read_str(); let s = self.read_str();
foreach c in s.iter() { v.push(c) } for c in s.iter() { v.push(c) }
if v.len() != 1 { fail!("string must have one character") } if v.len() != 1 { fail!("string must have one character") }
v[0] v[0]
} }
@ -949,7 +949,7 @@ impl serialize::Decoder for Decoder {
let name = match self.stack.pop() { let name = match self.stack.pop() {
String(s) => s, String(s) => s,
List(list) => { List(list) => {
foreach v in list.consume_rev_iter() { for v in list.consume_rev_iter() {
self.stack.push(v); self.stack.push(v);
} }
match self.stack.pop() { match self.stack.pop() {
@ -1067,7 +1067,7 @@ impl serialize::Decoder for Decoder {
let len = match self.stack.pop() { let len = match self.stack.pop() {
List(list) => { List(list) => {
let len = list.len(); let len = list.len();
foreach v in list.consume_rev_iter() { for v in list.consume_rev_iter() {
self.stack.push(v); self.stack.push(v);
} }
len len
@ -1087,7 +1087,7 @@ impl serialize::Decoder for Decoder {
let len = match self.stack.pop() { let len = match self.stack.pop() {
Object(obj) => { Object(obj) => {
let len = obj.len(); let len = obj.len();
foreach (key, value) in obj.consume_iter() { for (key, value) in obj.consume_iter() {
self.stack.push(value); self.stack.push(value);
self.stack.push(String(key)); self.stack.push(String(key));
} }
@ -1157,12 +1157,12 @@ impl Ord for Json {
let mut d1_flat = ~[]; let mut d1_flat = ~[];
// FIXME #4430: this is horribly inefficient... // FIXME #4430: this is horribly inefficient...
foreach (k, v) in d0.iter() { for (k, v) in d0.iter() {
d0_flat.push((@(*k).clone(), @(*v).clone())); d0_flat.push((@(*k).clone(), @(*v).clone()));
} }
d0_flat.qsort(); d0_flat.qsort();
foreach (k, v) in d1.iter() { for (k, v) in d1.iter() {
d1_flat.push((@(*k).clone(), @(*v).clone())); d1_flat.push((@(*k).clone(), @(*v).clone()));
} }
d1_flat.qsort(); d1_flat.qsort();
@ -1297,7 +1297,7 @@ impl<A:ToJson> ToJson for ~[A] {
impl<A:ToJson> ToJson for TreeMap<~str, A> { impl<A:ToJson> ToJson for TreeMap<~str, A> {
fn to_json(&self) -> Json { fn to_json(&self) -> Json {
let mut d = TreeMap::new(); let mut d = TreeMap::new();
foreach (key, value) in self.iter() { for (key, value) in self.iter() {
d.insert((*key).clone(), value.to_json()); d.insert((*key).clone(), value.to_json());
} }
Object(~d) Object(~d)
@ -1307,7 +1307,7 @@ impl<A:ToJson> ToJson for TreeMap<~str, A> {
impl<A:ToJson> ToJson for HashMap<~str, A> { impl<A:ToJson> ToJson for HashMap<~str, A> {
fn to_json(&self) -> Json { fn to_json(&self) -> Json {
let mut d = TreeMap::new(); let mut d = TreeMap::new();
foreach (key, value) in self.iter() { for (key, value) in self.iter() {
d.insert((*key).clone(), value.to_json()); d.insert((*key).clone(), value.to_json());
} }
Object(~d) Object(~d)
@ -1364,7 +1364,7 @@ mod tests {
fn mk_object(items: &[(~str, Json)]) -> Json { fn mk_object(items: &[(~str, Json)]) -> Json {
let mut d = ~TreeMap::new(); let mut d = ~TreeMap::new();
foreach item in items.iter() { for item in items.iter() {
match *item { match *item {
(ref key, ref value) => { d.insert((*key).clone(), (*value).clone()); }, (ref key, ref value) => { d.insert((*key).clone(), (*value).clone()); },
} }

View File

@ -131,7 +131,7 @@ impl TotalOrd for BigUint {
if s_len < o_len { return Less; } if s_len < o_len { return Less; }
if s_len > o_len { return Greater; } if s_len > o_len { return Greater; }
foreach (&self_i, &other_i) in self.data.rev_iter().zip(other.data.rev_iter()) { for (&self_i, &other_i) in self.data.rev_iter().zip(other.data.rev_iter()) {
cond!((self_i < other_i) { return Less; } cond!((self_i < other_i) { return Less; }
(self_i > other_i) { return Greater; }) (self_i > other_i) { return Greater; })
} }
@ -420,7 +420,7 @@ impl Integer for BigUint {
let bn = *b.data.last(); let bn = *b.data.last();
let mut d = ~[]; let mut d = ~[];
let mut carry = 0; let mut carry = 0;
foreach elt in an.rev_iter() { for elt in an.rev_iter() {
let ai = BigDigit::to_uint(carry, *elt); let ai = BigDigit::to_uint(carry, *elt);
let di = ai / (bn as uint); let di = ai / (bn as uint);
assert!(di < BigDigit::base); assert!(di < BigDigit::base);
@ -524,7 +524,7 @@ impl ToStrRadix for BigUint {
fn fill_concat(v: &[BigDigit], radix: uint, l: uint) -> ~str { fn fill_concat(v: &[BigDigit], radix: uint, l: uint) -> ~str {
if v.is_empty() { return ~"0" } if v.is_empty() { return ~"0" }
let mut s = str::with_capacity(v.len() * l); let mut s = str::with_capacity(v.len() * l);
foreach n in v.rev_iter() { for n in v.rev_iter() {
let ss = uint::to_str_radix(*n as uint, radix); let ss = uint::to_str_radix(*n as uint, radix);
s.push_str("0".repeat(l - ss.len())); s.push_str("0".repeat(l - ss.len()));
s.push_str(ss); s.push_str(ss);
@ -651,7 +651,7 @@ impl BigUint {
let mut borrow = 0; let mut borrow = 0;
let mut shifted = ~[]; let mut shifted = ~[];
foreach elem in self.data.rev_iter() { for elem in self.data.rev_iter() {
shifted = ~[(*elem >> n_bits) | borrow] + shifted; shifted = ~[(*elem >> n_bits) | borrow] + shifted;
borrow = *elem << (BigDigit::bits - n_bits); borrow = *elem << (BigDigit::bits - n_bits);
} }
@ -1186,8 +1186,8 @@ mod biguint_tests {
fn test_cmp() { fn test_cmp() {
let data: ~[BigUint] = [ &[], &[1], &[2], &[-1], &[0, 1], &[2, 1], &[1, 1, 1] ] let data: ~[BigUint] = [ &[], &[1], &[2], &[-1], &[0, 1], &[2, 1], &[1, 1, 1] ]
.map(|v| BigUint::from_slice(*v)); .map(|v| BigUint::from_slice(*v));
foreach (i, ni) in data.iter().enumerate() { for (i, ni) in data.iter().enumerate() {
foreach (j0, nj) in data.slice(i, data.len()).iter().enumerate() { for (j0, nj) in data.slice(i, data.len()).iter().enumerate() {
let j = j0 + i; let j = j0 + i;
if i == j { if i == j {
assert_eq!(ni.cmp(nj), Equal); assert_eq!(ni.cmp(nj), Equal);
@ -1360,7 +1360,7 @@ mod biguint_tests {
#[test] #[test]
fn test_add() { fn test_add() {
foreach elm in sum_triples.iter() { for elm in sum_triples.iter() {
let (aVec, bVec, cVec) = *elm; let (aVec, bVec, cVec) = *elm;
let a = BigUint::from_slice(aVec); let a = BigUint::from_slice(aVec);
let b = BigUint::from_slice(bVec); let b = BigUint::from_slice(bVec);
@ -1373,7 +1373,7 @@ mod biguint_tests {
#[test] #[test]
fn test_sub() { fn test_sub() {
foreach elm in sum_triples.iter() { for elm in sum_triples.iter() {
let (aVec, bVec, cVec) = *elm; let (aVec, bVec, cVec) = *elm;
let a = BigUint::from_slice(aVec); let a = BigUint::from_slice(aVec);
let b = BigUint::from_slice(bVec); let b = BigUint::from_slice(bVec);
@ -1424,7 +1424,7 @@ mod biguint_tests {
#[test] #[test]
fn test_mul() { fn test_mul() {
foreach elm in mul_triples.iter() { for elm in mul_triples.iter() {
let (aVec, bVec, cVec) = *elm; let (aVec, bVec, cVec) = *elm;
let a = BigUint::from_slice(aVec); let a = BigUint::from_slice(aVec);
let b = BigUint::from_slice(bVec); let b = BigUint::from_slice(bVec);
@ -1434,7 +1434,7 @@ mod biguint_tests {
assert!(b * a == c); assert!(b * a == c);
} }
foreach elm in div_rem_quadruples.iter() { for elm in div_rem_quadruples.iter() {
let (aVec, bVec, cVec, dVec) = *elm; let (aVec, bVec, cVec, dVec) = *elm;
let a = BigUint::from_slice(aVec); let a = BigUint::from_slice(aVec);
let b = BigUint::from_slice(bVec); let b = BigUint::from_slice(bVec);
@ -1448,7 +1448,7 @@ mod biguint_tests {
#[test] #[test]
fn test_div_rem() { fn test_div_rem() {
foreach elm in mul_triples.iter() { for elm in mul_triples.iter() {
let (aVec, bVec, cVec) = *elm; let (aVec, bVec, cVec) = *elm;
let a = BigUint::from_slice(aVec); let a = BigUint::from_slice(aVec);
let b = BigUint::from_slice(bVec); let b = BigUint::from_slice(bVec);
@ -1462,7 +1462,7 @@ mod biguint_tests {
} }
} }
foreach elm in div_rem_quadruples.iter() { for elm in div_rem_quadruples.iter() {
let (aVec, bVec, cVec, dVec) = *elm; let (aVec, bVec, cVec, dVec) = *elm;
let a = BigUint::from_slice(aVec); let a = BigUint::from_slice(aVec);
let b = BigUint::from_slice(bVec); let b = BigUint::from_slice(bVec);
@ -1579,9 +1579,9 @@ mod biguint_tests {
#[test] #[test]
fn test_to_str_radix() { fn test_to_str_radix() {
let r = to_str_pairs(); let r = to_str_pairs();
foreach num_pair in r.iter() { for num_pair in r.iter() {
let &(ref n, ref rs) = num_pair; let &(ref n, ref rs) = num_pair;
foreach str_pair in rs.iter() { for str_pair in rs.iter() {
let &(ref radix, ref str) = str_pair; let &(ref radix, ref str) = str_pair;
assert_eq!(&n.to_str_radix(*radix), str); assert_eq!(&n.to_str_radix(*radix), str);
} }
@ -1591,9 +1591,9 @@ mod biguint_tests {
#[test] #[test]
fn test_from_str_radix() { fn test_from_str_radix() {
let r = to_str_pairs(); let r = to_str_pairs();
foreach num_pair in r.iter() { for num_pair in r.iter() {
let &(ref n, ref rs) = num_pair; let &(ref n, ref rs) = num_pair;
foreach str_pair in rs.iter() { for str_pair in rs.iter() {
let &(ref radix, ref str) = str_pair; let &(ref radix, ref str) = str_pair;
assert_eq!(n, &FromStrRadix::from_str_radix(*str, *radix).get()); assert_eq!(n, &FromStrRadix::from_str_radix(*str, *radix).get());
} }
@ -1608,7 +1608,7 @@ mod biguint_tests {
fn test_factor() { fn test_factor() {
fn factor(n: uint) -> BigUint { fn factor(n: uint) -> BigUint {
let mut f= One::one::<BigUint>(); let mut f= One::one::<BigUint>();
foreach i in range(2, n + 1) { for i in range(2, n + 1) {
// FIXME(#6102): Assignment operator for BigInt causes ICE // FIXME(#6102): Assignment operator for BigInt causes ICE
// f *= BigUint::from_uint(i); // f *= BigUint::from_uint(i);
f = f * BigUint::from_uint(i); f = f * BigUint::from_uint(i);
@ -1658,14 +1658,14 @@ mod bigint_tests {
fn test_cmp() { fn test_cmp() {
let vs = [ &[2 as BigDigit], &[1, 1], &[2, 1], &[1, 1, 1] ]; let vs = [ &[2 as BigDigit], &[1, 1], &[2, 1], &[1, 1, 1] ];
let mut nums = ~[]; let mut nums = ~[];
foreach s in vs.rev_iter() { for s in vs.rev_iter() {
nums.push(BigInt::from_slice(Minus, *s)); nums.push(BigInt::from_slice(Minus, *s));
} }
nums.push(Zero::zero()); nums.push(Zero::zero());
nums.push_all_move(vs.map(|s| BigInt::from_slice(Plus, *s))); nums.push_all_move(vs.map(|s| BigInt::from_slice(Plus, *s)));
foreach (i, ni) in nums.iter().enumerate() { for (i, ni) in nums.iter().enumerate() {
foreach (j0, nj) in nums.slice(i, nums.len()).iter().enumerate() { for (j0, nj) in nums.slice(i, nums.len()).iter().enumerate() {
let j = i + j0; let j = i + j0;
if i == j { if i == j {
assert_eq!(ni.cmp(nj), Equal); assert_eq!(ni.cmp(nj), Equal);
@ -1769,7 +1769,7 @@ mod bigint_tests {
#[test] #[test]
fn test_add() { fn test_add() {
foreach elm in sum_triples.iter() { for elm in sum_triples.iter() {
let (aVec, bVec, cVec) = *elm; let (aVec, bVec, cVec) = *elm;
let a = BigInt::from_slice(Plus, aVec); let a = BigInt::from_slice(Plus, aVec);
let b = BigInt::from_slice(Plus, bVec); let b = BigInt::from_slice(Plus, bVec);
@ -1788,7 +1788,7 @@ mod bigint_tests {
#[test] #[test]
fn test_sub() { fn test_sub() {
foreach elm in sum_triples.iter() { for elm in sum_triples.iter() {
let (aVec, bVec, cVec) = *elm; let (aVec, bVec, cVec) = *elm;
let a = BigInt::from_slice(Plus, aVec); let a = BigInt::from_slice(Plus, aVec);
let b = BigInt::from_slice(Plus, bVec); let b = BigInt::from_slice(Plus, bVec);
@ -1845,7 +1845,7 @@ mod bigint_tests {
#[test] #[test]
fn test_mul() { fn test_mul() {
foreach elm in mul_triples.iter() { for elm in mul_triples.iter() {
let (aVec, bVec, cVec) = *elm; let (aVec, bVec, cVec) = *elm;
let a = BigInt::from_slice(Plus, aVec); let a = BigInt::from_slice(Plus, aVec);
let b = BigInt::from_slice(Plus, bVec); let b = BigInt::from_slice(Plus, bVec);
@ -1858,7 +1858,7 @@ mod bigint_tests {
assert!((-b) * a == -c); assert!((-b) * a == -c);
} }
foreach elm in div_rem_quadruples.iter() { for elm in div_rem_quadruples.iter() {
let (aVec, bVec, cVec, dVec) = *elm; let (aVec, bVec, cVec, dVec) = *elm;
let a = BigInt::from_slice(Plus, aVec); let a = BigInt::from_slice(Plus, aVec);
let b = BigInt::from_slice(Plus, bVec); let b = BigInt::from_slice(Plus, bVec);
@ -1897,7 +1897,7 @@ mod bigint_tests {
} }
} }
foreach elm in mul_triples.iter() { for elm in mul_triples.iter() {
let (aVec, bVec, cVec) = *elm; let (aVec, bVec, cVec) = *elm;
let a = BigInt::from_slice(Plus, aVec); let a = BigInt::from_slice(Plus, aVec);
let b = BigInt::from_slice(Plus, bVec); let b = BigInt::from_slice(Plus, bVec);
@ -1907,7 +1907,7 @@ mod bigint_tests {
if !b.is_zero() { check(&c, &b, &a, &Zero::zero()); } if !b.is_zero() { check(&c, &b, &a, &Zero::zero()); }
} }
foreach elm in div_rem_quadruples.iter() { for elm in div_rem_quadruples.iter() {
let (aVec, bVec, cVec, dVec) = *elm; let (aVec, bVec, cVec, dVec) = *elm;
let a = BigInt::from_slice(Plus, aVec); let a = BigInt::from_slice(Plus, aVec);
let b = BigInt::from_slice(Plus, bVec); let b = BigInt::from_slice(Plus, bVec);
@ -1940,7 +1940,7 @@ mod bigint_tests {
check_sub(&a.neg(), b, &q.neg(), &r.neg()); check_sub(&a.neg(), b, &q.neg(), &r.neg());
check_sub(&a.neg(), &b.neg(), q, &r.neg()); check_sub(&a.neg(), &b.neg(), q, &r.neg());
} }
foreach elm in mul_triples.iter() { for elm in mul_triples.iter() {
let (aVec, bVec, cVec) = *elm; let (aVec, bVec, cVec) = *elm;
let a = BigInt::from_slice(Plus, aVec); let a = BigInt::from_slice(Plus, aVec);
let b = BigInt::from_slice(Plus, bVec); let b = BigInt::from_slice(Plus, bVec);
@ -1950,7 +1950,7 @@ mod bigint_tests {
if !b.is_zero() { check(&c, &b, &a, &Zero::zero()); } if !b.is_zero() { check(&c, &b, &a, &Zero::zero()); }
} }
foreach elm in div_rem_quadruples.iter() { for elm in div_rem_quadruples.iter() {
let (aVec, bVec, cVec, dVec) = *elm; let (aVec, bVec, cVec, dVec) = *elm;
let a = BigInt::from_slice(Plus, aVec); let a = BigInt::from_slice(Plus, aVec);
let b = BigInt::from_slice(Plus, bVec); let b = BigInt::from_slice(Plus, bVec);

View File

@ -239,14 +239,14 @@ mod test {
fn test_scale_unscale() { fn test_scale_unscale() {
assert_eq!(_05_05i.scale(2f), _1_1i); assert_eq!(_05_05i.scale(2f), _1_1i);
assert_eq!(_1_1i.unscale(2f), _05_05i); assert_eq!(_1_1i.unscale(2f), _05_05i);
foreach &c in all_consts.iter() { for &c in all_consts.iter() {
assert_eq!(c.scale(2f).unscale(2f), c); assert_eq!(c.scale(2f).unscale(2f), c);
} }
} }
#[test] #[test]
fn test_conj() { fn test_conj() {
foreach &c in all_consts.iter() { for &c in all_consts.iter() {
assert_eq!(c.conj(), Cmplx::new(c.re, -c.im)); assert_eq!(c.conj(), Cmplx::new(c.re, -c.im));
assert_eq!(c.conj().conj(), c); assert_eq!(c.conj().conj(), c);
} }
@ -283,7 +283,7 @@ mod test {
let (r, theta) = c.to_polar(); let (r, theta) = c.to_polar();
assert!((c - Cmplx::from_polar(&r, &theta)).norm() < 1e-6); assert!((c - Cmplx::from_polar(&r, &theta)).norm() < 1e-6);
} }
foreach &c in all_consts.iter() { test(c); } for &c in all_consts.iter() { test(c); }
} }
mod arith { mod arith {
@ -296,7 +296,7 @@ mod test {
assert_eq!(_0_1i + _1_0i, _1_1i); assert_eq!(_0_1i + _1_0i, _1_1i);
assert_eq!(_1_0i + _neg1_1i, _0_1i); assert_eq!(_1_0i + _neg1_1i, _0_1i);
foreach &c in all_consts.iter() { for &c in all_consts.iter() {
assert_eq!(_0_0i + c, c); assert_eq!(_0_0i + c, c);
assert_eq!(c + _0_0i, c); assert_eq!(c + _0_0i, c);
} }
@ -308,7 +308,7 @@ mod test {
assert_eq!(_0_1i - _1_0i, _neg1_1i); assert_eq!(_0_1i - _1_0i, _neg1_1i);
assert_eq!(_0_1i - _neg1_1i, _1_0i); assert_eq!(_0_1i - _neg1_1i, _1_0i);
foreach &c in all_consts.iter() { for &c in all_consts.iter() {
assert_eq!(c - _0_0i, c); assert_eq!(c - _0_0i, c);
assert_eq!(c - c, _0_0i); assert_eq!(c - c, _0_0i);
} }
@ -323,7 +323,7 @@ mod test {
assert_eq!(_0_1i * _0_1i, -_1_0i); assert_eq!(_0_1i * _0_1i, -_1_0i);
assert_eq!(_0_1i * _0_1i * _0_1i * _0_1i, _1_0i); assert_eq!(_0_1i * _0_1i * _0_1i * _0_1i, _1_0i);
foreach &c in all_consts.iter() { for &c in all_consts.iter() {
assert_eq!(c * _1_0i, c); assert_eq!(c * _1_0i, c);
assert_eq!(_1_0i * c, c); assert_eq!(_1_0i * c, c);
} }
@ -331,7 +331,7 @@ mod test {
#[test] #[test]
fn test_div() { fn test_div() {
assert_eq!(_neg1_1i / _0_1i, _1_1i); assert_eq!(_neg1_1i / _0_1i, _1_1i);
foreach &c in all_consts.iter() { for &c in all_consts.iter() {
if c != Zero::zero() { if c != Zero::zero() {
assert_eq!(c / c, _1_0i); assert_eq!(c / c, _1_0i);
} }
@ -341,7 +341,7 @@ mod test {
fn test_neg() { fn test_neg() {
assert_eq!(-_1_0i + _0_1i, _neg1_1i); assert_eq!(-_1_0i + _0_1i, _neg1_1i);
assert_eq!((-_0_1i) * _0_1i, _1_0i); assert_eq!((-_0_1i) * _0_1i, _1_0i);
foreach &c in all_consts.iter() { for &c in all_consts.iter() {
assert_eq!(-(-c), c); assert_eq!(-(-c), c);
} }
} }

View File

@ -500,7 +500,7 @@ mod test {
} }
let xs = ["0 /1", "abc", "", "1/", "--1/2","3/2/1"]; let xs = ["0 /1", "abc", "", "1/", "--1/2","3/2/1"];
foreach &s in xs.iter() { for &s in xs.iter() {
test(s); test(s);
} }
} }
@ -540,7 +540,7 @@ mod test {
} }
let xs = ["0 /1", "abc", "", "1/", "--1/2","3/2/1", "3/2"]; let xs = ["0 /1", "abc", "", "1/", "--1/2","3/2/1", "3/2"];
foreach &s in xs.iter() { for &s in xs.iter() {
test(s); test(s);
} }
} }

View File

@ -206,7 +206,7 @@ impl<T: Ord, Iter: Iterator<T>> Extendable<T, Iter> for PriorityQueue<T> {
let len = self.capacity(); let len = self.capacity();
self.reserve_at_least(len + lower); self.reserve_at_least(len + lower);
foreach elem in *iter { for elem in *iter {
self.push(elem); self.push(elem);
} }
} }
@ -223,7 +223,7 @@ mod tests {
let iterout = ~[9, 5, 3]; let iterout = ~[9, 5, 3];
let pq = PriorityQueue::from_vec(data); let pq = PriorityQueue::from_vec(data);
let mut i = 0; let mut i = 0;
foreach el in pq.iter() { for el in pq.iter() {
assert_eq!(*el, iterout[i]); assert_eq!(*el, iterout[i]);
i += 1; i += 1;
} }
@ -369,7 +369,7 @@ mod tests {
let mut q: PriorityQueue<uint> = xs.rev_iter().transform(|&x| x).collect(); let mut q: PriorityQueue<uint> = xs.rev_iter().transform(|&x| x).collect();
foreach &x in xs.iter() { for &x in xs.iter() {
assert_eq!(q.pop(), x); assert_eq!(q.pop(), x);
} }
} }

View File

@ -38,7 +38,7 @@ impl<T> Container for RingBuf<T> {
impl<T> Mutable for RingBuf<T> { impl<T> Mutable for RingBuf<T> {
/// Clear the RingBuf, removing all values. /// Clear the RingBuf, removing all values.
fn clear(&mut self) { fn clear(&mut self) {
foreach x in self.elts.mut_iter() { *x = None } for x in self.elts.mut_iter() { *x = None }
self.nelts = 0; self.nelts = 0;
self.lo = 0; self.lo = 0;
} }
@ -277,7 +277,7 @@ fn grow<T>(nelts: uint, loptr: &mut uint, elts: &mut ~[Option<T>]) {
elts.reserve(newlen); elts.reserve(newlen);
/* fill with None */ /* fill with None */
foreach _ in range(elts.len(), elts.capacity()) { for _ in range(elts.len(), elts.capacity()) {
elts.push(None); elts.push(None);
} }
@ -292,11 +292,11 @@ fn grow<T>(nelts: uint, loptr: &mut uint, elts: &mut ~[Option<T>]) {
assert!(newlen - nelts/2 >= nelts); assert!(newlen - nelts/2 >= nelts);
if lo <= (nelts - lo) { // A if lo <= (nelts - lo) { // A
foreach i in range(0u, lo) { for i in range(0u, lo) {
elts.swap(i, nelts + i); elts.swap(i, nelts + i);
} }
} else { // B } else { // B
foreach i in range(lo, nelts) { for i in range(lo, nelts) {
elts.swap(i, newlen - nelts + i); elts.swap(i, newlen - nelts + i);
} }
*loptr += newlen - nelts; *loptr += newlen - nelts;
@ -333,7 +333,7 @@ impl<A, T: Iterator<A>> FromIterator<A, T> for RingBuf<A> {
impl<A, T: Iterator<A>> Extendable<A, T> for RingBuf<A> { impl<A, T: Iterator<A>> Extendable<A, T> for RingBuf<A> {
fn extend(&mut self, iterator: &mut T) { fn extend(&mut self, iterator: &mut T) {
foreach elt in *iterator { for elt in *iterator {
self.push_back(elt); self.push_back(elt);
} }
} }
@ -461,21 +461,21 @@ mod tests {
#[test] #[test]
fn test_push_front_grow() { fn test_push_front_grow() {
let mut deq = RingBuf::new(); let mut deq = RingBuf::new();
foreach i in range(0u, 66) { for i in range(0u, 66) {
deq.push_front(i); deq.push_front(i);
} }
assert_eq!(deq.len(), 66); assert_eq!(deq.len(), 66);
foreach i in range(0u, 66) { for i in range(0u, 66) {
assert_eq!(*deq.get(i), 65 - i); assert_eq!(*deq.get(i), 65 - i);
} }
let mut deq = RingBuf::new(); let mut deq = RingBuf::new();
foreach i in range(0u, 66) { for i in range(0u, 66) {
deq.push_back(i); deq.push_back(i);
} }
foreach i in range(0u, 66) { for i in range(0u, 66) {
assert_eq!(*deq.get(i), i); assert_eq!(*deq.get(i), i);
} }
} }
@ -606,12 +606,12 @@ mod tests {
assert_eq!(d.iter().next(), None); assert_eq!(d.iter().next(), None);
assert_eq!(d.iter().size_hint(), (0, Some(0))); assert_eq!(d.iter().size_hint(), (0, Some(0)));
foreach i in range(0, 5) { for i in range(0, 5) {
d.push_back(i); d.push_back(i);
} }
assert_eq!(d.iter().collect::<~[&int]>(), ~[&0,&1,&2,&3,&4]); assert_eq!(d.iter().collect::<~[&int]>(), ~[&0,&1,&2,&3,&4]);
foreach i in range(6, 9) { for i in range(6, 9) {
d.push_front(i); d.push_front(i);
} }
assert_eq!(d.iter().collect::<~[&int]>(), ~[&8,&7,&6,&0,&1,&2,&3,&4]); assert_eq!(d.iter().collect::<~[&int]>(), ~[&8,&7,&6,&0,&1,&2,&3,&4]);
@ -631,12 +631,12 @@ mod tests {
let mut d = RingBuf::new(); let mut d = RingBuf::new();
assert_eq!(d.rev_iter().next(), None); assert_eq!(d.rev_iter().next(), None);
foreach i in range(0, 5) { for i in range(0, 5) {
d.push_back(i); d.push_back(i);
} }
assert_eq!(d.rev_iter().collect::<~[&int]>(), ~[&4,&3,&2,&1,&0]); assert_eq!(d.rev_iter().collect::<~[&int]>(), ~[&4,&3,&2,&1,&0]);
foreach i in range(6, 9) { for i in range(6, 9) {
d.push_front(i); d.push_front(i);
} }
assert_eq!(d.rev_iter().collect::<~[&int]>(), ~[&4,&3,&2,&1,&0,&6,&7,&8]); assert_eq!(d.rev_iter().collect::<~[&int]>(), ~[&4,&3,&2,&1,&0,&6,&7,&8]);
@ -647,11 +647,11 @@ mod tests {
let mut d = RingBuf::new(); let mut d = RingBuf::new();
assert!(d.mut_iter().next().is_none()); assert!(d.mut_iter().next().is_none());
foreach i in range(0u, 3) { for i in range(0u, 3) {
d.push_front(i); d.push_front(i);
} }
foreach (i, elt) in d.mut_iter().enumerate() { for (i, elt) in d.mut_iter().enumerate() {
assert_eq!(*elt, 2 - i); assert_eq!(*elt, 2 - i);
*elt = i; *elt = i;
} }
@ -670,11 +670,11 @@ mod tests {
let mut d = RingBuf::new(); let mut d = RingBuf::new();
assert!(d.mut_rev_iter().next().is_none()); assert!(d.mut_rev_iter().next().is_none());
foreach i in range(0u, 3) { for i in range(0u, 3) {
d.push_front(i); d.push_front(i);
} }
foreach (i, elt) in d.mut_rev_iter().enumerate() { for (i, elt) in d.mut_rev_iter().enumerate() {
assert_eq!(*elt, i); assert_eq!(*elt, i);
*elt = i; *elt = i;
} }
@ -698,7 +698,7 @@ mod tests {
let mut seq = iterator::Counter::new(0u, 2).take_(256); let mut seq = iterator::Counter::new(0u, 2).take_(256);
let deq: RingBuf<uint> = seq.collect(); let deq: RingBuf<uint> = seq.collect();
foreach (i, &x) in deq.iter().enumerate() { for (i, &x) in deq.iter().enumerate() {
assert_eq!(2*i, x); assert_eq!(2*i, x);
} }
assert_eq!(deq.len(), 256); assert_eq!(deq.len(), 256);

View File

@ -443,7 +443,7 @@ impl<D:Decoder,T:Decodable<D> + 'static> Decodable<D> for @mut T {
impl<'self, S:Encoder,T:Encodable<S>> Encodable<S> for &'self [T] { impl<'self, S:Encoder,T:Encodable<S>> Encodable<S> for &'self [T] {
fn encode(&self, s: &mut S) { fn encode(&self, s: &mut S) {
do s.emit_seq(self.len()) |s| { do s.emit_seq(self.len()) |s| {
foreach (i, e) in self.iter().enumerate() { for (i, e) in self.iter().enumerate() {
s.emit_seq_elt(i, |s| e.encode(s)) s.emit_seq_elt(i, |s| e.encode(s))
} }
} }
@ -453,7 +453,7 @@ impl<'self, S:Encoder,T:Encodable<S>> Encodable<S> for &'self [T] {
impl<S:Encoder,T:Encodable<S>> Encodable<S> for ~[T] { impl<S:Encoder,T:Encodable<S>> Encodable<S> for ~[T] {
fn encode(&self, s: &mut S) { fn encode(&self, s: &mut S) {
do s.emit_seq(self.len()) |s| { do s.emit_seq(self.len()) |s| {
foreach (i, e) in self.iter().enumerate() { for (i, e) in self.iter().enumerate() {
s.emit_seq_elt(i, |s| e.encode(s)) s.emit_seq_elt(i, |s| e.encode(s))
} }
} }
@ -473,7 +473,7 @@ impl<D:Decoder,T:Decodable<D>> Decodable<D> for ~[T] {
impl<S:Encoder,T:Encodable<S>> Encodable<S> for @[T] { impl<S:Encoder,T:Encodable<S>> Encodable<S> for @[T] {
fn encode(&self, s: &mut S) { fn encode(&self, s: &mut S) {
do s.emit_seq(self.len()) |s| { do s.emit_seq(self.len()) |s| {
foreach (i, e) in self.iter().enumerate() { for (i, e) in self.iter().enumerate() {
s.emit_seq_elt(i, |s| e.encode(s)) s.emit_seq_elt(i, |s| e.encode(s))
} }
} }
@ -667,7 +667,7 @@ impl<
> Encodable<S> for DList<T> { > Encodable<S> for DList<T> {
fn encode(&self, s: &mut S) { fn encode(&self, s: &mut S) {
do s.emit_seq(self.len()) |s| { do s.emit_seq(self.len()) |s| {
foreach (i, e) in self.iter().enumerate() { for (i, e) in self.iter().enumerate() {
s.emit_seq_elt(i, |s| e.encode(s)); s.emit_seq_elt(i, |s| e.encode(s));
} }
} }
@ -678,7 +678,7 @@ impl<D:Decoder,T:Decodable<D>> Decodable<D> for DList<T> {
fn decode(d: &mut D) -> DList<T> { fn decode(d: &mut D) -> DList<T> {
let mut list = DList::new(); let mut list = DList::new();
do d.read_seq |d, len| { do d.read_seq |d, len| {
foreach i in range(0u, len) { for i in range(0u, len) {
list.push_back(d.read_seq_elt(i, |d| Decodable::decode(d))); list.push_back(d.read_seq_elt(i, |d| Decodable::decode(d)));
} }
} }
@ -692,7 +692,7 @@ impl<
> Encodable<S> for RingBuf<T> { > Encodable<S> for RingBuf<T> {
fn encode(&self, s: &mut S) { fn encode(&self, s: &mut S) {
do s.emit_seq(self.len()) |s| { do s.emit_seq(self.len()) |s| {
foreach (i, e) in self.iter().enumerate() { for (i, e) in self.iter().enumerate() {
s.emit_seq_elt(i, |s| e.encode(s)); s.emit_seq_elt(i, |s| e.encode(s));
} }
} }
@ -703,7 +703,7 @@ impl<D:Decoder,T:Decodable<D>> Decodable<D> for RingBuf<T> {
fn decode(d: &mut D) -> RingBuf<T> { fn decode(d: &mut D) -> RingBuf<T> {
let mut deque = RingBuf::new(); let mut deque = RingBuf::new();
do d.read_seq |d, len| { do d.read_seq |d, len| {
foreach i in range(0u, len) { for i in range(0u, len) {
deque.push_back(d.read_seq_elt(i, |d| Decodable::decode(d))); deque.push_back(d.read_seq_elt(i, |d| Decodable::decode(d)));
} }
} }
@ -719,7 +719,7 @@ impl<
fn encode(&self, e: &mut E) { fn encode(&self, e: &mut E) {
do e.emit_map(self.len()) |e| { do e.emit_map(self.len()) |e| {
let mut i = 0; let mut i = 0;
foreach (key, val) in self.iter() { for (key, val) in self.iter() {
e.emit_map_elt_key(i, |e| key.encode(e)); e.emit_map_elt_key(i, |e| key.encode(e));
e.emit_map_elt_val(i, |e| val.encode(e)); e.emit_map_elt_val(i, |e| val.encode(e));
i += 1; i += 1;
@ -736,7 +736,7 @@ impl<
fn decode(d: &mut D) -> HashMap<K, V> { fn decode(d: &mut D) -> HashMap<K, V> {
do d.read_map |d, len| { do d.read_map |d, len| {
let mut map = HashMap::with_capacity(len); let mut map = HashMap::with_capacity(len);
foreach i in range(0u, len) { for i in range(0u, len) {
let key = d.read_map_elt_key(i, |d| Decodable::decode(d)); let key = d.read_map_elt_key(i, |d| Decodable::decode(d));
let val = d.read_map_elt_val(i, |d| Decodable::decode(d)); let val = d.read_map_elt_val(i, |d| Decodable::decode(d));
map.insert(key, val); map.insert(key, val);
@ -753,7 +753,7 @@ impl<
fn encode(&self, s: &mut S) { fn encode(&self, s: &mut S) {
do s.emit_seq(self.len()) |s| { do s.emit_seq(self.len()) |s| {
let mut i = 0; let mut i = 0;
foreach e in self.iter() { for e in self.iter() {
s.emit_seq_elt(i, |s| e.encode(s)); s.emit_seq_elt(i, |s| e.encode(s));
i += 1; i += 1;
} }
@ -768,7 +768,7 @@ impl<
fn decode(d: &mut D) -> HashSet<T> { fn decode(d: &mut D) -> HashSet<T> {
do d.read_seq |d, len| { do d.read_seq |d, len| {
let mut set = HashSet::with_capacity(len); let mut set = HashSet::with_capacity(len);
foreach i in range(0u, len) { for i in range(0u, len) {
set.insert(d.read_seq_elt(i, |d| Decodable::decode(d))); set.insert(d.read_seq_elt(i, |d| Decodable::decode(d)));
} }
set set
@ -800,7 +800,7 @@ impl<
fn decode(d: &mut D) -> TrieMap<V> { fn decode(d: &mut D) -> TrieMap<V> {
do d.read_map |d, len| { do d.read_map |d, len| {
let mut map = TrieMap::new(); let mut map = TrieMap::new();
foreach i in range(0u, len) { for i in range(0u, len) {
let key = d.read_map_elt_key(i, |d| Decodable::decode(d)); let key = d.read_map_elt_key(i, |d| Decodable::decode(d));
let val = d.read_map_elt_val(i, |d| Decodable::decode(d)); let val = d.read_map_elt_val(i, |d| Decodable::decode(d));
map.insert(key, val); map.insert(key, val);
@ -827,7 +827,7 @@ impl<D: Decoder> Decodable<D> for TrieSet {
fn decode(d: &mut D) -> TrieSet { fn decode(d: &mut D) -> TrieSet {
do d.read_seq |d, len| { do d.read_seq |d, len| {
let mut set = TrieSet::new(); let mut set = TrieSet::new();
foreach i in range(0u, len) { for i in range(0u, len) {
set.insert(d.read_seq_elt(i, |d| Decodable::decode(d))); set.insert(d.read_seq_elt(i, |d| Decodable::decode(d)));
} }
set set
@ -843,7 +843,7 @@ impl<
fn encode(&self, e: &mut E) { fn encode(&self, e: &mut E) {
do e.emit_map(self.len()) |e| { do e.emit_map(self.len()) |e| {
let mut i = 0; let mut i = 0;
foreach (key, val) in self.iter() { for (key, val) in self.iter() {
e.emit_map_elt_key(i, |e| key.encode(e)); e.emit_map_elt_key(i, |e| key.encode(e));
e.emit_map_elt_val(i, |e| val.encode(e)); e.emit_map_elt_val(i, |e| val.encode(e));
i += 1; i += 1;
@ -860,7 +860,7 @@ impl<
fn decode(d: &mut D) -> TreeMap<K, V> { fn decode(d: &mut D) -> TreeMap<K, V> {
do d.read_map |d, len| { do d.read_map |d, len| {
let mut map = TreeMap::new(); let mut map = TreeMap::new();
foreach i in range(0u, len) { for i in range(0u, len) {
let key = d.read_map_elt_key(i, |d| Decodable::decode(d)); let key = d.read_map_elt_key(i, |d| Decodable::decode(d));
let val = d.read_map_elt_val(i, |d| Decodable::decode(d)); let val = d.read_map_elt_val(i, |d| Decodable::decode(d));
map.insert(key, val); map.insert(key, val);
@ -877,7 +877,7 @@ impl<
fn encode(&self, s: &mut S) { fn encode(&self, s: &mut S) {
do s.emit_seq(self.len()) |s| { do s.emit_seq(self.len()) |s| {
let mut i = 0; let mut i = 0;
foreach e in self.iter() { for e in self.iter() {
s.emit_seq_elt(i, |s| e.encode(s)); s.emit_seq_elt(i, |s| e.encode(s));
i += 1; i += 1;
} }
@ -892,7 +892,7 @@ impl<
fn decode(d: &mut D) -> TreeSet<T> { fn decode(d: &mut D) -> TreeSet<T> {
do d.read_seq |d, len| { do d.read_seq |d, len| {
let mut set = TreeSet::new(); let mut set = TreeSet::new();
foreach i in range(0u, len) { for i in range(0u, len) {
set.insert(d.read_seq_elt(i, |d| Decodable::decode(d))); set.insert(d.read_seq_elt(i, |d| Decodable::decode(d)));
} }
set set
@ -912,7 +912,7 @@ pub trait EncoderHelpers {
impl<S:Encoder> EncoderHelpers for S { impl<S:Encoder> EncoderHelpers for S {
fn emit_from_vec<T>(&mut self, v: &[T], f: &fn(&mut S, &T)) { fn emit_from_vec<T>(&mut self, v: &[T], f: &fn(&mut S, &T)) {
do self.emit_seq(v.len()) |this| { do self.emit_seq(v.len()) |this| {
foreach (i, e) in v.iter().enumerate() { for (i, e) in v.iter().enumerate() {
do this.emit_seq_elt(i) |this| { do this.emit_seq_elt(i) |this| {
f(this, e) f(this, e)
} }

View File

@ -30,7 +30,7 @@ impl<V> Container for SmallIntMap<V> {
/// Return the number of elements in the map /// Return the number of elements in the map
fn len(&self) -> uint { fn len(&self) -> uint {
let mut sz = 0; let mut sz = 0;
foreach i in range(0u, self.v.len()) { for i in range(0u, self.v.len()) {
match self.v[i] { match self.v[i] {
Some(_) => sz += 1, Some(_) => sz += 1,
None => {} None => {}
@ -123,7 +123,7 @@ impl<V> SmallIntMap<V> {
/// Visit all key-value pairs in order /// Visit all key-value pairs in order
pub fn each<'a>(&'a self, it: &fn(&uint, &'a V) -> bool) -> bool { pub fn each<'a>(&'a self, it: &fn(&uint, &'a V) -> bool) -> bool {
foreach i in range(0u, self.v.len()) { for i in range(0u, self.v.len()) {
match self.v[i] { match self.v[i] {
Some(ref elt) => if !it(&i, elt) { return false; }, Some(ref elt) => if !it(&i, elt) { return false; },
None => () None => ()
@ -144,7 +144,7 @@ impl<V> SmallIntMap<V> {
/// Iterate over the map and mutate the contained values /// Iterate over the map and mutate the contained values
pub fn mutate_values(&mut self, it: &fn(&uint, &mut V) -> bool) -> bool { pub fn mutate_values(&mut self, it: &fn(&uint, &mut V) -> bool) -> bool {
foreach i in range(0, self.v.len()) { for i in range(0, self.v.len()) {
match self.v[i] { match self.v[i] {
Some(ref mut elt) => if !it(&i, elt) { return false; }, Some(ref mut elt) => if !it(&i, elt) { return false; },
None => () None => ()
@ -446,7 +446,7 @@ mod test_map {
assert!(m.insert(6, 10)); assert!(m.insert(6, 10));
assert!(m.insert(10, 11)); assert!(m.insert(10, 11));
foreach (k, v) in m.mut_iter() { for (k, v) in m.mut_iter() {
*v += k as int; *v += k as int;
} }
@ -488,7 +488,7 @@ mod test_map {
assert!(m.insert(6, 10)); assert!(m.insert(6, 10));
assert!(m.insert(10, 11)); assert!(m.insert(10, 11));
foreach (k, v) in m.mut_rev_iter() { for (k, v) in m.mut_rev_iter() {
*v += k as int; *v += k as int;
} }
@ -506,7 +506,7 @@ mod test_map {
let mut m = SmallIntMap::new(); let mut m = SmallIntMap::new();
m.insert(1, ~2); m.insert(1, ~2);
let mut called = false; let mut called = false;
foreach (k, v) in m.consume() { for (k, v) in m.consume() {
assert!(!called); assert!(!called);
called = true; called = true;
assert_eq!(k, 1); assert_eq!(k, 1);

View File

@ -470,7 +470,7 @@ impl<T:Clone + Ord> MergeState<T> {
assert!(len1 != 0 && len2 != 0 && base1+len1 == base2); assert!(len1 != 0 && len2 != 0 && base1+len1 == base2);
let mut tmp = ~[]; let mut tmp = ~[];
foreach i in range(base1, base1+len1) { for i in range(base1, base1+len1) {
tmp.push(array[i].clone()); tmp.push(array[i].clone());
} }
@ -580,7 +580,7 @@ impl<T:Clone + Ord> MergeState<T> {
assert!(len1 != 1 && len2 != 0 && base1 + len1 == base2); assert!(len1 != 1 && len2 != 0 && base1 + len1 == base2);
let mut tmp = ~[]; let mut tmp = ~[];
foreach i in range(base2, base2+len2) { for i in range(base2, base2+len2) {
tmp.push(array[i].clone()); tmp.push(array[i].clone());
} }
@ -732,7 +732,7 @@ fn copy_vec<T:Clone>(dest: &mut [T],
from: &[T]) { from: &[T]) {
assert!(s1+from.len() <= dest.len()); assert!(s1+from.len() <= dest.len());
foreach (i, v) in from.iter().enumerate() { for (i, v) in from.iter().enumerate() {
dest[s1+i] = (*v).clone(); dest[s1+i] = (*v).clone();
} }
} }
@ -842,7 +842,7 @@ mod test_qsort {
let immut_names = names; let immut_names = names;
let pairs = vec::zip_slice(expected, immut_names); let pairs = vec::zip_slice(expected, immut_names);
foreach p in pairs.iter() { for p in pairs.iter() {
let (a, b) = *p; let (a, b) = *p;
debug!("%d %d", a, b); debug!("%d %d", a, b);
assert_eq!(a, b); assert_eq!(a, b);
@ -1054,7 +1054,7 @@ mod big_tests {
fn tabulate_unique(lo: uint, hi: uint) { fn tabulate_unique(lo: uint, hi: uint) {
fn isSorted<T:Ord>(arr: &[T]) { fn isSorted<T:Ord>(arr: &[T]) {
foreach i in range(0u, arr.len() - 1) { for i in range(0u, arr.len() - 1) {
if arr[i] > arr[i+1] { if arr[i] > arr[i+1] {
fail!("Array not sorted"); fail!("Array not sorted");
} }
@ -1063,7 +1063,7 @@ mod big_tests {
let mut rng = rand::rng(); let mut rng = rand::rng();
foreach i in range(lo, hi) { for i in range(lo, hi) {
let n = 1 << i; let n = 1 << i;
let mut arr: ~[float] = do vec::from_fn(n) |_i| { let mut arr: ~[float] = do vec::from_fn(n) |_i| {
rng.gen() rng.gen()
@ -1125,7 +1125,7 @@ mod big_tests {
fn tabulate_managed(lo: uint, hi: uint) { fn tabulate_managed(lo: uint, hi: uint) {
fn isSorted<T:Ord>(arr: &[@T]) { fn isSorted<T:Ord>(arr: &[@T]) {
foreach i in range(0u, arr.len() - 1) { for i in range(0u, arr.len() - 1) {
if arr[i] > arr[i+1] { if arr[i] > arr[i+1] {
fail!("Array not sorted"); fail!("Array not sorted");
} }
@ -1134,7 +1134,7 @@ mod big_tests {
let mut rng = rand::rng(); let mut rng = rand::rng();
foreach i in range(lo, hi) { for i in range(lo, hi) {
let n = 1 << i; let n = 1 << i;
let arr: ~[@float] = do vec::from_fn(n) |_i| { let arr: ~[@float] = do vec::from_fn(n) |_i| {
@rng.gen() @rng.gen()

View File

@ -167,7 +167,7 @@ impl<'self> Stats for &'self [f64] {
} else { } else {
let mean = self.mean(); let mean = self.mean();
let mut v = 0.0; let mut v = 0.0;
foreach s in self.iter() { for s in self.iter() {
let x = *s - mean; let x = *s - mean;
v += x*x; v += x*x;
} }
@ -254,7 +254,7 @@ pub fn winsorize(samples: &mut [f64], pct: f64) {
sort::tim_sort(tmp); sort::tim_sort(tmp);
let lo = percentile_of_sorted(tmp, pct); let lo = percentile_of_sorted(tmp, pct);
let hi = percentile_of_sorted(tmp, 100.0-pct); let hi = percentile_of_sorted(tmp, 100.0-pct);
foreach samp in samples.mut_iter() { for samp in samples.mut_iter() {
if *samp > hi { if *samp > hi {
*samp = hi *samp = hi
} else if *samp < lo { } else if *samp < lo {

View File

@ -893,13 +893,13 @@ mod tests {
} }
// wait until all children get in the mutex // wait until all children get in the mutex
foreach port in ports.iter() { let _ = port.recv(); } for port in ports.iter() { let _ = port.recv(); }
do m.lock_cond |cond| { do m.lock_cond |cond| {
let num_woken = cond.broadcast(); let num_woken = cond.broadcast();
assert_eq!(num_woken, num_waiters); assert_eq!(num_woken, num_waiters);
} }
// wait until all children wake up // wait until all children wake up
foreach port in ports.iter() { let _ = port.recv(); } for port in ports.iter() { let _ = port.recv(); }
} }
#[test] #[test]
fn test_mutex_cond_broadcast() { fn test_mutex_cond_broadcast() {
@ -991,7 +991,7 @@ mod tests {
} }
} }
} }
foreach p in sibling_convos.iter() { for p in sibling_convos.iter() {
let _ = p.recv(); // wait for sibling to get in the mutex let _ = p.recv(); // wait for sibling to get in the mutex
} }
do m2.lock { } do m2.lock { }
@ -1001,7 +1001,7 @@ mod tests {
assert!(result.is_err()); assert!(result.is_err());
// child task must have finished by the time try returns // child task must have finished by the time try returns
let r = p.recv(); let r = p.recv();
foreach p in r.iter() { p.recv(); } // wait on all its siblings for p in r.iter() { p.recv(); } // wait on all its siblings
do m.lock_cond |cond| { do m.lock_cond |cond| {
let woken = cond.broadcast(); let woken = cond.broadcast();
assert_eq!(woken, 0); assert_eq!(woken, 0);
@ -1253,13 +1253,13 @@ mod tests {
} }
// wait until all children get in the mutex // wait until all children get in the mutex
foreach port in ports.iter() { let _ = port.recv(); } for port in ports.iter() { let _ = port.recv(); }
do lock_cond(x, dg2) |cond| { do lock_cond(x, dg2) |cond| {
let num_woken = cond.broadcast(); let num_woken = cond.broadcast();
assert_eq!(num_woken, num_waiters); assert_eq!(num_woken, num_waiters);
} }
// wait until all children wake up // wait until all children wake up
foreach port in ports.iter() { let _ = port.recv(); } for port in ports.iter() { let _ = port.recv(); }
} }
#[test] #[test]
fn test_rwlock_cond_broadcast() { fn test_rwlock_cond_broadcast() {

View File

@ -35,7 +35,7 @@ pub struct TaskPool<T> {
#[unsafe_destructor] #[unsafe_destructor]
impl<T> Drop for TaskPool<T> { impl<T> Drop for TaskPool<T> {
fn drop(&self) { fn drop(&self) {
foreach channel in self.channels.iter() { for channel in self.channels.iter() {
channel.send(Quit); channel.send(Quit);
} }
} }

View File

@ -19,7 +19,7 @@ use std::rand;
/// have the suffix `suffix`. If no directory can be created, None is returned. /// have the suffix `suffix`. If no directory can be created, None is returned.
pub fn mkdtemp(tmpdir: &Path, suffix: &str) -> Option<Path> { pub fn mkdtemp(tmpdir: &Path, suffix: &str) -> Option<Path> {
let mut r = rand::rng(); let mut r = rand::rng();
foreach _ in range(0u, 1000) { for _ in range(0u, 1000) {
let p = tmpdir.push(r.gen_str(16) + suffix); let p = tmpdir.push(r.gen_str(16) + suffix);
if os::make_dir(&p, 0x1c0) { // 700 if os::make_dir(&p, 0x1c0) { // 700
return Some(p); return Some(p);

View File

@ -102,11 +102,11 @@ pub fn expand(cap: &[u8], params: &[Param], vars: &mut Variables)
Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
]; ];
foreach (dst, src) in mparams.mut_iter().zip(params.iter()) { for (dst, src) in mparams.mut_iter().zip(params.iter()) {
*dst = (*src).clone(); *dst = (*src).clone();
} }
foreach c in cap.iter().transform(|&x| x) { for c in cap.iter().transform(|&x| x) {
let cur = c as char; let cur = c as char;
let mut old_state = state; let mut old_state = state;
match state { match state {
@ -605,7 +605,7 @@ mod test {
let mut varstruct = Variables::new(); let mut varstruct = Variables::new();
let vars = &mut varstruct; let vars = &mut varstruct;
let caps = ["%d", "%c", "%s", "%Pa", "%l", "%!", "%~"]; let caps = ["%d", "%c", "%s", "%Pa", "%l", "%!", "%~"];
foreach cap in caps.iter() { for cap in caps.iter() {
let res = expand(cap.as_bytes(), [], vars); let res = expand(cap.as_bytes(), [], vars);
assert!(res.is_err(), assert!(res.is_err(),
"Op %s succeeded incorrectly with 0 stack entries", *cap); "Op %s succeeded incorrectly with 0 stack entries", *cap);
@ -615,7 +615,7 @@ mod test {
"Op %s failed with 1 stack entry: %s", *cap, res.unwrap_err()); "Op %s failed with 1 stack entry: %s", *cap, res.unwrap_err());
} }
let caps = ["%+", "%-", "%*", "%/", "%m", "%&", "%|", "%A", "%O"]; let caps = ["%+", "%-", "%*", "%/", "%m", "%&", "%|", "%A", "%O"];
foreach cap in caps.iter() { for cap in caps.iter() {
let res = expand(cap.as_bytes(), [], vars); let res = expand(cap.as_bytes(), [], vars);
assert!(res.is_err(), assert!(res.is_err(),
"Binop %s succeeded incorrectly with 0 stack entries", *cap); "Binop %s succeeded incorrectly with 0 stack entries", *cap);
@ -636,7 +636,7 @@ mod test {
#[test] #[test]
fn test_comparison_ops() { fn test_comparison_ops() {
let v = [('<', [1u8, 0u8, 0u8]), ('=', [0u8, 1u8, 0u8]), ('>', [0u8, 0u8, 1u8])]; let v = [('<', [1u8, 0u8, 0u8]), ('=', [0u8, 1u8, 0u8]), ('>', [0u8, 0u8, 1u8])];
foreach &(op, bs) in v.iter() { for &(op, bs) in v.iter() {
let s = fmt!("%%{1}%%{2}%%%c%%d", op); let s = fmt!("%%{1}%%{2}%%%c%%d", op);
let res = expand(s.as_bytes(), [], &mut Variables::new()); let res = expand(s.as_bytes(), [], &mut Variables::new());
assert!(res.is_ok(), res.unwrap_err()); assert!(res.is_ok(), res.unwrap_err());

View File

@ -222,7 +222,7 @@ pub fn parse(file: @Reader, longnames: bool) -> Result<~TermInfo, ~str> {
let mut bools_map = HashMap::new(); let mut bools_map = HashMap::new();
if bools_bytes != 0 { if bools_bytes != 0 {
foreach i in range(0, bools_bytes) { for i in range(0, bools_bytes) {
let b = file.read_byte(); let b = file.read_byte();
if b < 0 { if b < 0 {
error!("EOF reading bools after %? entries", i); error!("EOF reading bools after %? entries", i);
@ -243,7 +243,7 @@ pub fn parse(file: @Reader, longnames: bool) -> Result<~TermInfo, ~str> {
let mut numbers_map = HashMap::new(); let mut numbers_map = HashMap::new();
if numbers_count != 0 { if numbers_count != 0 {
foreach i in range(0, numbers_count) { for i in range(0, numbers_count) {
let n = file.read_le_u16(); let n = file.read_le_u16();
if n != 0xFFFF { if n != 0xFFFF {
debug!("%s#%?", nnames[i], n); debug!("%s#%?", nnames[i], n);
@ -258,7 +258,7 @@ pub fn parse(file: @Reader, longnames: bool) -> Result<~TermInfo, ~str> {
if string_offsets_count != 0 { if string_offsets_count != 0 {
let mut string_offsets = vec::with_capacity(10); let mut string_offsets = vec::with_capacity(10);
foreach _ in range(0, string_offsets_count) { for _ in range(0, string_offsets_count) {
string_offsets.push(file.read_le_u16()); string_offsets.push(file.read_le_u16());
} }
@ -272,7 +272,7 @@ pub fn parse(file: @Reader, longnames: bool) -> Result<~TermInfo, ~str> {
return Err(~"error: hit EOF before end of string table"); return Err(~"error: hit EOF before end of string table");
} }
foreach (i, v) in string_offsets.iter().enumerate() { for (i, v) in string_offsets.iter().enumerate() {
let offset = *v; let offset = *v;
if offset == 0xFFFF { // non-entry if offset == 0xFFFF { // non-entry
loop; loop;

View File

@ -35,7 +35,7 @@ pub fn get_dbpath_for_term(term: &str) -> Option<~path> {
dirs_to_search.push(homedir.unwrap().push(".terminfo")); // ncurses compatability dirs_to_search.push(homedir.unwrap().push(".terminfo")); // ncurses compatability
} }
match getenv("TERMINFO_DIRS") { match getenv("TERMINFO_DIRS") {
Some(dirs) => foreach i in dirs.split_iter(':') { Some(dirs) => for i in dirs.split_iter(':') {
if i == "" { if i == "" {
dirs_to_search.push(path("/usr/share/terminfo")); dirs_to_search.push(path("/usr/share/terminfo"));
} else { } else {
@ -54,7 +54,7 @@ pub fn get_dbpath_for_term(term: &str) -> Option<~path> {
}; };
// Look for the terminal in all of the search directories // Look for the terminal in all of the search directories
foreach p in dirs_to_search.iter() { for p in dirs_to_search.iter() {
let newp = ~p.push_many(&[str::from_char(first_char), term.to_owned()]); let newp = ~p.push_many(&[str::from_char(first_char), term.to_owned()]);
if os::path_exists(p) && os::path_exists(newp) { if os::path_exists(p) && os::path_exists(newp) {
return Some(newp); return Some(newp);

View File

@ -429,11 +429,11 @@ impl ConsoleTestState {
pub fn write_failures(&self) { pub fn write_failures(&self) {
self.out.write_line("\nfailures:"); self.out.write_line("\nfailures:");
let mut failures = ~[]; let mut failures = ~[];
foreach f in self.failures.iter() { for f in self.failures.iter() {
failures.push(f.name.to_str()); failures.push(f.name.to_str());
} }
sort::tim_sort(failures); sort::tim_sort(failures);
foreach name in failures.iter() { for name in failures.iter() {
self.out.write_line(fmt!(" %s", name.to_str())); self.out.write_line(fmt!(" %s", name.to_str()));
} }
} }
@ -445,7 +445,7 @@ impl ConsoleTestState {
let mut added = 0; let mut added = 0;
let mut removed = 0; let mut removed = 0;
foreach (k, v) in diff.iter() { for (k, v) in diff.iter() {
match *v { match *v {
LikelyNoise => noise += 1, LikelyNoise => noise += 1,
MetricAdded => { MetricAdded => {
@ -565,7 +565,7 @@ pub fn run_tests_console(opts: &TestOpts,
TrIgnored => st.ignored += 1, TrIgnored => st.ignored += 1,
TrMetrics(mm) => { TrMetrics(mm) => {
let tname = test.name.to_str(); let tname = test.name.to_str();
foreach (k,v) in mm.iter() { for (k,v) in mm.iter() {
st.metrics.insert_metric(tname + "." + *k, st.metrics.insert_metric(tname + "." + *k,
v.value, v.noise); v.value, v.noise);
} }
@ -699,7 +699,7 @@ fn run_tests(opts: &TestOpts,
// All benchmarks run at the end, in serial. // All benchmarks run at the end, in serial.
// (this includes metric fns) // (this includes metric fns)
foreach b in filtered_benchs_and_metrics.consume_iter() { for b in filtered_benchs_and_metrics.consume_iter() {
callback(TeWait(b.desc.clone())); callback(TeWait(b.desc.clone()));
run_test(!opts.run_benchmarks, b, ch.clone()); run_test(!opts.run_benchmarks, b, ch.clone());
let (test, result) = p.recv(); let (test, result) = p.recv();
@ -887,7 +887,7 @@ impl MetricMap {
pub fn compare_to_old(&self, old: &MetricMap, pub fn compare_to_old(&self, old: &MetricMap,
noise_pct: Option<f64>) -> MetricDiff { noise_pct: Option<f64>) -> MetricDiff {
let mut diff : MetricDiff = TreeMap::new(); let mut diff : MetricDiff = TreeMap::new();
foreach (k, vold) in old.iter() { for (k, vold) in old.iter() {
let r = match self.find(k) { let r = match self.find(k) {
None => MetricRemoved, None => MetricRemoved,
Some(v) => { Some(v) => {
@ -924,7 +924,7 @@ impl MetricMap {
}; };
diff.insert((*k).clone(), r); diff.insert((*k).clone(), r);
} }
foreach (k, _) in self.iter() { for (k, _) in self.iter() {
if !diff.contains_key(k) { if !diff.contains_key(k) {
diff.insert((*k).clone(), MetricAdded); diff.insert((*k).clone(), MetricAdded);
} }
@ -990,7 +990,7 @@ impl BenchHarness {
pub fn iter(&mut self, inner:&fn()) { pub fn iter(&mut self, inner:&fn()) {
self.ns_start = precise_time_ns(); self.ns_start = precise_time_ns();
let k = self.iterations; let k = self.iterations;
foreach _ in range(0u64, k) { for _ in range(0u64, k) {
inner(); inner();
} }
self.ns_end = precise_time_ns(); self.ns_end = precise_time_ns();
@ -1039,7 +1039,7 @@ impl BenchHarness {
loop { loop {
let loop_start = precise_time_ns(); let loop_start = precise_time_ns();
foreach p in samples.mut_iter() { for p in samples.mut_iter() {
self.bench_n(n as u64, |x| f(x)); self.bench_n(n as u64, |x| f(x));
*p = self.ns_per_iter() as f64; *p = self.ns_per_iter() as f64;
}; };
@ -1047,7 +1047,7 @@ impl BenchHarness {
stats::winsorize(samples, 5.0); stats::winsorize(samples, 5.0);
let summ = stats::Summary::new(samples); let summ = stats::Summary::new(samples);
foreach p in samples.mut_iter() { for p in samples.mut_iter() {
self.bench_n(5 * n as u64, |x| f(x)); self.bench_n(5 * n as u64, |x| f(x));
*p = self.ns_per_iter() as f64; *p = self.ns_per_iter() as f64;
}; };
@ -1287,7 +1287,7 @@ mod tests {
{ {
fn testfn() { } fn testfn() { }
let mut tests = ~[]; let mut tests = ~[];
foreach name in names.iter() { for name in names.iter() {
let test = TestDescAndFn { let test = TestDescAndFn {
desc: TestDesc { desc: TestDesc {
name: DynTestName((*name).clone()), name: DynTestName((*name).clone()),
@ -1313,7 +1313,7 @@ mod tests {
let pairs = vec::zip(expected, filtered); let pairs = vec::zip(expected, filtered);
foreach p in pairs.iter() { for p in pairs.iter() {
match *p { match *p {
(ref a, ref b) => { (ref a, ref b) => {
assert!(*a == b.desc.name.to_str()); assert!(*a == b.desc.name.to_str());

View File

@ -257,7 +257,7 @@ impl Tm {
priv fn do_strptime(s: &str, format: &str) -> Result<Tm, ~str> { priv fn do_strptime(s: &str, format: &str) -> Result<Tm, ~str> {
fn match_str(s: &str, pos: uint, needle: &str) -> bool { fn match_str(s: &str, pos: uint, needle: &str) -> bool {
let mut i = pos; let mut i = pos;
foreach ch in needle.byte_iter() { for ch in needle.byte_iter() {
if s[i] != ch { if s[i] != ch {
return false; return false;
} }
@ -1036,7 +1036,7 @@ mod tests {
~"Friday", ~"Friday",
~"Saturday" ~"Saturday"
]; ];
foreach day in days.iter() { for day in days.iter() {
assert!(test(*day, "%A")); assert!(test(*day, "%A"));
} }
@ -1049,7 +1049,7 @@ mod tests {
~"Fri", ~"Fri",
~"Sat" ~"Sat"
]; ];
foreach day in days.iter() { for day in days.iter() {
assert!(test(*day, "%a")); assert!(test(*day, "%a"));
} }
@ -1067,7 +1067,7 @@ mod tests {
~"November", ~"November",
~"December" ~"December"
]; ];
foreach day in months.iter() { for day in months.iter() {
assert!(test(*day, "%B")); assert!(test(*day, "%B"));
} }
@ -1085,7 +1085,7 @@ mod tests {
~"Nov", ~"Nov",
~"Dec" ~"Dec"
]; ];
foreach day in months.iter() { for day in months.iter() {
assert!(test(*day, "%b")); assert!(test(*day, "%b"));
} }

View File

@ -47,7 +47,7 @@ impl<K: Eq + TotalOrd, V: Eq> Eq for TreeMap<K, V> {
} else { } else {
let mut x = self.iter(); let mut x = self.iter();
let mut y = other.iter(); let mut y = other.iter();
foreach _ in range(0u, self.len()) { for _ in range(0u, self.len()) {
if x.next().unwrap() != y.next().unwrap() { if x.next().unwrap() != y.next().unwrap() {
return false return false
} }
@ -65,7 +65,7 @@ fn lt<K: Ord + TotalOrd, V: Ord>(a: &TreeMap<K, V>,
let mut y = b.iter(); let mut y = b.iter();
let (a_len, b_len) = (a.len(), b.len()); let (a_len, b_len) = (a.len(), b.len());
foreach _ in range(0u, num::min(a_len, b_len)) { for _ in range(0u, num::min(a_len, b_len)) {
let (key_a, value_a) = x.next().unwrap(); let (key_a, value_a) = x.next().unwrap();
let (key_b, value_b) = y.next().unwrap(); let (key_b, value_b) = y.next().unwrap();
if *key_a < *key_b { return true; } if *key_a < *key_b { return true; }
@ -674,7 +674,7 @@ fn remove<K: TotalOrd, V>(node: &mut Option<~TreeNode<K, V>>,
fn heir_swap<K: TotalOrd, V>(node: &mut ~TreeNode<K, V>, fn heir_swap<K: TotalOrd, V>(node: &mut ~TreeNode<K, V>,
child: &mut Option<~TreeNode<K, V>>) { child: &mut Option<~TreeNode<K, V>>) {
// *could* be done without recursion, but it won't borrow check // *could* be done without recursion, but it won't borrow check
foreach x in child.mut_iter() { for x in child.mut_iter() {
if x.right.is_some() { if x.right.is_some() {
heir_swap(node, &mut x.right); heir_swap(node, &mut x.right);
} else { } else {
@ -729,18 +729,18 @@ fn remove<K: TotalOrd, V>(node: &mut Option<~TreeNode<K, V>>,
save.level -= 1; save.level -= 1;
if right_level > save.level { if right_level > save.level {
foreach x in save.right.mut_iter() { x.level = save.level } for x in save.right.mut_iter() { x.level = save.level }
} }
skew(save); skew(save);
foreach right in save.right.mut_iter() { for right in save.right.mut_iter() {
skew(right); skew(right);
foreach x in right.right.mut_iter() { skew(x) } for x in right.right.mut_iter() { skew(x) }
} }
split(save); split(save);
foreach x in save.right.mut_iter() { split(x) } for x in save.right.mut_iter() { split(x) }
} }
return ret; return ret;
@ -763,7 +763,7 @@ impl<K: TotalOrd, V, T: Iterator<(K, V)>> FromIterator<(K, V), T> for TreeMap<K,
impl<K: TotalOrd, V, T: Iterator<(K, V)>> Extendable<(K, V), T> for TreeMap<K, V> { impl<K: TotalOrd, V, T: Iterator<(K, V)>> Extendable<(K, V), T> for TreeMap<K, V> {
#[inline] #[inline]
fn extend(&mut self, iter: &mut T) { fn extend(&mut self, iter: &mut T) {
foreach (k, v) in *iter { for (k, v) in *iter {
self.insert(k, v); self.insert(k, v);
} }
} }
@ -780,7 +780,7 @@ impl<T: TotalOrd, Iter: Iterator<T>> FromIterator<T, Iter> for TreeSet<T> {
impl<T: TotalOrd, Iter: Iterator<T>> Extendable<T, Iter> for TreeSet<T> { impl<T: TotalOrd, Iter: Iterator<T>> Extendable<T, Iter> for TreeSet<T> {
#[inline] #[inline]
fn extend(&mut self, iter: &mut Iter) { fn extend(&mut self, iter: &mut Iter) {
foreach elem in *iter { for elem in *iter {
self.insert(elem); self.insert(elem);
} }
} }
@ -863,13 +863,13 @@ mod test_treemap {
fn check_equal<K: Eq + TotalOrd, V: Eq>(ctrl: &[(K, V)], fn check_equal<K: Eq + TotalOrd, V: Eq>(ctrl: &[(K, V)],
map: &TreeMap<K, V>) { map: &TreeMap<K, V>) {
assert_eq!(ctrl.is_empty(), map.is_empty()); assert_eq!(ctrl.is_empty(), map.is_empty());
foreach x in ctrl.iter() { for x in ctrl.iter() {
let &(ref k, ref v) = x; let &(ref k, ref v) = x;
assert!(map.find(k).unwrap() == v) assert!(map.find(k).unwrap() == v)
} }
foreach (map_k, map_v) in map.iter() { for (map_k, map_v) in map.iter() {
let mut found = false; let mut found = false;
foreach x in ctrl.iter() { for x in ctrl.iter() {
let &(ref ctrl_k, ref ctrl_v) = x; let &(ref ctrl_k, ref ctrl_v) = x;
if *map_k == *ctrl_k { if *map_k == *ctrl_k {
assert!(*map_v == *ctrl_v); assert!(*map_v == *ctrl_v);
@ -983,7 +983,7 @@ mod test_treemap {
assert!(m.insert(1, 2)); assert!(m.insert(1, 2));
let mut n = 0; let mut n = 0;
foreach (k, v) in m.iter() { for (k, v) in m.iter() {
assert_eq!(*k, n); assert_eq!(*k, n);
assert_eq!(*v, n * 2); assert_eq!(*v, n * 2);
n += 1; n += 1;
@ -1091,7 +1091,7 @@ mod test_treemap {
(&x5, &y5)]; (&x5, &y5)];
let mut i = 0; let mut i = 0;
foreach x in b { for x in b {
assert_eq!(expected[i], x); assert_eq!(expected[i], x);
i += 1; i += 1;
@ -1100,7 +1100,7 @@ mod test_treemap {
} }
} }
foreach x in b { for x in b {
assert_eq!(expected[i], x); assert_eq!(expected[i], x);
i += 1; i += 1;
} }
@ -1112,7 +1112,7 @@ mod test_treemap {
let map: TreeMap<int, int> = xs.iter().transform(|&x| x).collect(); let map: TreeMap<int, int> = xs.iter().transform(|&x| x).collect();
foreach &(k, v) in xs.iter() { for &(k, v) in xs.iter() {
assert_eq!(map.find(&k), Some(&v)); assert_eq!(map.find(&k), Some(&v));
} }
} }
@ -1260,7 +1260,7 @@ mod test_set {
assert!(m.insert(1)); assert!(m.insert(1));
let mut n = 0; let mut n = 0;
foreach x in m.iter() { for x in m.iter() {
printfln!(x); printfln!(x);
assert_eq!(*x, n); assert_eq!(*x, n);
n += 1 n += 1
@ -1290,8 +1290,8 @@ mod test_set {
let mut set_a = TreeSet::new(); let mut set_a = TreeSet::new();
let mut set_b = TreeSet::new(); let mut set_b = TreeSet::new();
foreach x in a.iter() { assert!(set_a.insert(*x)) } for x in a.iter() { assert!(set_a.insert(*x)) }
foreach y in b.iter() { assert!(set_b.insert(*y)) } for y in b.iter() { assert!(set_b.insert(*y)) }
let mut i = 0; let mut i = 0;
do f(&set_a, &set_b) |x| { do f(&set_a, &set_b) |x| {
@ -1413,7 +1413,7 @@ mod test_set {
let set: TreeSet<int> = xs.iter().transform(|&x| x).collect(); let set: TreeSet<int> = xs.iter().transform(|&x| x).collect();
foreach x in xs.iter() { for x in xs.iter() {
assert!(set.contains(x)); assert!(set.contains(x));
} }
} }

View File

@ -206,10 +206,10 @@ pub fn encode_form_urlencoded(m: &HashMap<~str, ~[~str]>) -> ~str {
let mut out = ~""; let mut out = ~"";
let mut first = true; let mut first = true;
foreach (key, values) in m.iter() { for (key, values) in m.iter() {
let key = encode_plus(*key); let key = encode_plus(*key);
foreach value in values.iter() { for value in values.iter() {
if first { if first {
first = false; first = false;
} else { } else {
@ -331,7 +331,7 @@ fn userinfo_to_str(userinfo: &UserInfo) -> ~str {
fn query_from_str(rawquery: &str) -> Query { fn query_from_str(rawquery: &str) -> Query {
let mut query: Query = ~[]; let mut query: Query = ~[];
if !rawquery.is_empty() { if !rawquery.is_empty() {
foreach p in rawquery.split_iter('&') { for p in rawquery.split_iter('&') {
let (k, v) = split_char_first(p, '='); let (k, v) = split_char_first(p, '=');
query.push((decode_component(k), decode_component(v))); query.push((decode_component(k), decode_component(v)));
}; };
@ -341,7 +341,7 @@ fn query_from_str(rawquery: &str) -> Query {
pub fn query_to_str(query: &Query) -> ~str { pub fn query_to_str(query: &Query) -> ~str {
let mut strvec = ~[]; let mut strvec = ~[];
foreach kv in query.iter() { for kv in query.iter() {
match kv { match kv {
&(ref k, ref v) => { &(ref k, ref v) => {
strvec.push(fmt!("%s=%s", strvec.push(fmt!("%s=%s",
@ -356,7 +356,7 @@ pub fn query_to_str(query: &Query) -> ~str {
// returns the scheme and the rest of the url, or a parsing error // returns the scheme and the rest of the url, or a parsing error
pub fn get_scheme(rawurl: &str) -> Result<(~str, ~str), ~str> { pub fn get_scheme(rawurl: &str) -> Result<(~str, ~str), ~str> {
foreach (i,c) in rawurl.iter().enumerate() { for (i,c) in rawurl.iter().enumerate() {
match c { match c {
'A' .. 'Z' | 'a' .. 'z' => loop, 'A' .. 'Z' | 'a' .. 'z' => loop,
'0' .. '9' | '+' | '-' | '.' => { '0' .. '9' | '+' | '-' | '.' => {
@ -418,7 +418,7 @@ fn get_authority(rawurl: &str) ->
let mut begin = 2; let mut begin = 2;
let mut end = len; let mut end = len;
foreach (i,c) in rawurl.iter().enumerate() { for (i,c) in rawurl.iter().enumerate() {
if i < 2 { loop; } // ignore the leading // if i < 2 { loop; } // ignore the leading //
// deal with input class first // deal with input class first
@ -563,7 +563,7 @@ fn get_path(rawurl: &str, authority: bool) ->
Result<(~str, ~str), ~str> { Result<(~str, ~str), ~str> {
let len = rawurl.len(); let len = rawurl.len();
let mut end = len; let mut end = len;
foreach (i,c) in rawurl.iter().enumerate() { for (i,c) in rawurl.iter().enumerate() {
match c { match c {
'A' .. 'Z' | 'a' .. 'z' | '0' .. '9' | '&' |'\'' | '(' | ')' | '.' 'A' .. 'Z' | 'a' .. 'z' | '0' .. '9' | '&' |'\'' | '(' | ')' | '.'
| '@' | ':' | '%' | '/' | '+' | '!' | '*' | ',' | ';' | '=' | '@' | ':' | '%' | '/' | '+' | '!' | '*' | ',' | ';' | '='

View File

@ -286,7 +286,7 @@ impl<'self> Prep<'self> {
} }
fn all_fresh(&self, cat: &str, map: &WorkMap) -> bool { fn all_fresh(&self, cat: &str, map: &WorkMap) -> bool {
foreach (k, v) in map.iter() { for (k, v) in map.iter() {
if ! self.is_fresh(cat, k.kind, k.name, *v) { if ! self.is_fresh(cat, k.kind, k.name, *v) {
return false; return false;
} }

View File

@ -215,7 +215,7 @@ fn usage() {
\n" \n"
); );
foreach command in COMMANDS.iter() { for command in COMMANDS.iter() {
let padding = " ".repeat(INDENT - command.cmd.len()); let padding = " ".repeat(INDENT - command.cmd.len());
printfln!(" %s%s%s", command.cmd, padding, command.usage_line); printfln!(" %s%s%s", command.cmd, padding, command.usage_line);
} }
@ -240,7 +240,7 @@ pub fn main() {
if !args.is_empty() { if !args.is_empty() {
let r = find_cmd(*args.head()); let r = find_cmd(*args.head());
foreach command in r.iter() { for command in r.iter() {
let result = do_command(command, args.tail()); let result = do_command(command, args.tail());
match result { match result {
Valid(exit_code) => unsafe { exit(exit_code.to_i32()) }, Valid(exit_code) => unsafe { exit(exit_code.to_i32()) },

View File

@ -141,7 +141,7 @@ pub mod jit {
let cstore = sess.cstore; let cstore = sess.cstore;
let r = cstore::get_used_crate_files(cstore); let r = cstore::get_used_crate_files(cstore);
foreach cratepath in r.iter() { for cratepath in r.iter() {
let path = cratepath.to_str(); let path = cratepath.to_str();
debug!("linking: %s", path); debug!("linking: %s", path);
@ -507,7 +507,7 @@ pub fn build_link_meta(sess: Session,
let mut cmh_items = ~[]; let mut cmh_items = ~[];
let linkage_metas = attr::find_linkage_metas(c.attrs); let linkage_metas = attr::find_linkage_metas(c.attrs);
attr::require_unique_names(sess.diagnostic(), linkage_metas); attr::require_unique_names(sess.diagnostic(), linkage_metas);
foreach meta in linkage_metas.iter() { for meta in linkage_metas.iter() {
match meta.name_str_pair() { match meta.name_str_pair() {
Some((n, value)) if "name" == n => name = Some(value), Some((n, value)) if "name" == n => name = Some(value),
Some((n, value)) if "vers" == n => vers = Some(value), Some((n, value)) if "vers" == n => vers = Some(value),
@ -547,7 +547,7 @@ pub fn build_link_meta(sess: Session,
} }
ast::MetaList(name, ref mis) => { ast::MetaList(name, ref mis) => {
write_string(symbol_hasher, len_and_str(name)); write_string(symbol_hasher, len_and_str(name));
foreach m_ in mis.iter() { for m_ in mis.iter() {
hash(symbol_hasher, m_); hash(symbol_hasher, m_);
} }
} }
@ -555,11 +555,11 @@ pub fn build_link_meta(sess: Session,
} }
symbol_hasher.reset(); symbol_hasher.reset();
foreach m in cmh_items.iter() { for m in cmh_items.iter() {
hash(symbol_hasher, m); hash(symbol_hasher, m);
} }
foreach dh in dep_hashes.iter() { for dh in dep_hashes.iter() {
write_string(symbol_hasher, len_and_str(*dh)); write_string(symbol_hasher, len_and_str(*dh));
} }
@ -665,7 +665,7 @@ pub fn get_symbol_hash(ccx: &mut CrateContext, t: ty::t) -> @str {
// gas accepts the following characters in symbols: a-z, A-Z, 0-9, ., _, $ // gas accepts the following characters in symbols: a-z, A-Z, 0-9, ., _, $
pub fn sanitize(s: &str) -> ~str { pub fn sanitize(s: &str) -> ~str {
let mut result = ~""; let mut result = ~"";
foreach c in s.iter() { for c in s.iter() {
match c { match c {
// Escape these with $ sequences // Escape these with $ sequences
'@' => result.push_str("$SP$"), '@' => result.push_str("$SP$"),
@ -712,7 +712,7 @@ pub fn mangle(sess: Session, ss: path) -> ~str {
let mut n = ~"_ZN"; // Begin name-sequence. let mut n = ~"_ZN"; // Begin name-sequence.
foreach s in ss.iter() { for s in ss.iter() {
match *s { match *s {
path_name(s) | path_mod(s) => { path_name(s) | path_mod(s) => {
let sani = sanitize(sess.str_of(s)); let sani = sanitize(sess.str_of(s));
@ -905,7 +905,7 @@ pub fn link_args(sess: Session,
let cstore = sess.cstore; let cstore = sess.cstore;
let r = cstore::get_used_crate_files(cstore); let r = cstore::get_used_crate_files(cstore);
foreach cratepath in r.iter() { for cratepath in r.iter() {
if cratepath.filetype() == Some(~".rlib") { if cratepath.filetype() == Some(~".rlib") {
args.push(cratepath.to_str()); args.push(cratepath.to_str());
loop; loop;
@ -917,12 +917,12 @@ pub fn link_args(sess: Session,
} }
let ula = cstore::get_used_link_args(cstore); let ula = cstore::get_used_link_args(cstore);
foreach arg in ula.iter() { args.push(arg.to_owned()); } for arg in ula.iter() { args.push(arg.to_owned()); }
// Add all the link args for external crates. // Add all the link args for external crates.
do cstore::iter_crate_data(cstore) |crate_num, _| { do cstore::iter_crate_data(cstore) |crate_num, _| {
let link_args = csearch::get_link_args_for_crate(cstore, crate_num); let link_args = csearch::get_link_args_for_crate(cstore, crate_num);
foreach link_arg in link_args.consume_iter() { for link_arg in link_args.consume_iter() {
args.push(link_arg); args.push(link_arg);
} }
} }
@ -935,13 +935,13 @@ pub fn link_args(sess: Session,
// to be found at compile time so it is still entirely up to outside // to be found at compile time so it is still entirely up to outside
// forces to make sure that library can be found at runtime. // forces to make sure that library can be found at runtime.
foreach path in sess.opts.addl_lib_search_paths.iter() { for path in sess.opts.addl_lib_search_paths.iter() {
args.push(~"-L" + path.to_str()); args.push(~"-L" + path.to_str());
} }
// The names of the extern libraries // The names of the extern libraries
let used_libs = cstore::get_used_libraries(cstore); let used_libs = cstore::get_used_libraries(cstore);
foreach l in used_libs.iter() { args.push(~"-l" + *l); } for l in used_libs.iter() { args.push(~"-l" + *l); }
if *sess.building_library { if *sess.building_library {
args.push(lib_cmd); args.push(lib_cmd);

View File

@ -164,7 +164,7 @@ pub fn create_standard_passes(level: OptLevel) -> ~[~str] {
} }
pub fn populate_pass_manager(sess: Session, pm: &mut PassManager, pass_list:&[~str]) { pub fn populate_pass_manager(sess: Session, pm: &mut PassManager, pass_list:&[~str]) {
foreach nm in pass_list.iter() { for nm in pass_list.iter() {
match create_pass(*nm) { match create_pass(*nm) {
Some(p) => pm.add_pass(p), Some(p) => pm.add_pass(p),
None => sess.warn(fmt!("Unknown pass %s", *nm)) None => sess.warn(fmt!("Unknown pass %s", *nm))
@ -189,15 +189,15 @@ pub fn list_passes() {
io::println("\nAvailable Passes:"); io::println("\nAvailable Passes:");
io::println("\nAnalysis Passes:"); io::println("\nAnalysis Passes:");
foreach &(name, desc) in analysis_passes.iter() { for &(name, desc) in analysis_passes.iter() {
printfln!(" %-30s -- %s", name, desc); printfln!(" %-30s -- %s", name, desc);
} }
io::println("\nTransformation Passes:"); io::println("\nTransformation Passes:");
foreach &(name, desc) in transform_passes.iter() { for &(name, desc) in transform_passes.iter() {
printfln!(" %-30s -- %s", name, desc); printfln!(" %-30s -- %s", name, desc);
} }
io::println("\nUtility Passes:"); io::println("\nUtility Passes:");
foreach &(name, desc) in utility_passes.iter() { for &(name, desc) in utility_passes.iter() {
printfln!(" %-30s -- %s", name, desc); printfln!(" %-30s -- %s", name, desc);
} }
} }
@ -315,7 +315,7 @@ static utility_passes : &'static [(&'static str, &'static str)] = &'static [
fn passes_exist() { fn passes_exist() {
let mut failed = ~[]; let mut failed = ~[];
unsafe { llvm::LLVMInitializePasses(); } unsafe { llvm::LLVMInitializePasses(); }
foreach &(name,_) in analysis_passes.iter() { for &(name,_) in analysis_passes.iter() {
let pass = create_pass(name); let pass = create_pass(name);
if !pass.is_some() { if !pass.is_some() {
failed.push(name); failed.push(name);
@ -323,7 +323,7 @@ fn passes_exist() {
unsafe { llvm::LLVMDestroyPass(pass.get()) } unsafe { llvm::LLVMDestroyPass(pass.get()) }
} }
} }
foreach &(name,_) in transform_passes.iter() { for &(name,_) in transform_passes.iter() {
let pass = create_pass(name); let pass = create_pass(name);
if !pass.is_some() { if !pass.is_some() {
failed.push(name); failed.push(name);
@ -331,7 +331,7 @@ fn passes_exist() {
unsafe { llvm::LLVMDestroyPass(pass.get()) } unsafe { llvm::LLVMDestroyPass(pass.get()) }
} }
} }
foreach &(name,_) in utility_passes.iter() { for &(name,_) in utility_passes.iter() {
let pass = create_pass(name); let pass = create_pass(name);
if !pass.is_some() { if !pass.is_some() {
failed.push(name); failed.push(name);
@ -342,7 +342,7 @@ fn passes_exist() {
if failed.len() > 0 { if failed.len() > 0 {
io::println("Some passes don't exist:"); io::println("Some passes don't exist:");
foreach &n in failed.iter() { for &n in failed.iter() {
printfln!(" %s", n); printfln!(" %s", n);
} }
fail!(); fail!();

View File

@ -63,7 +63,7 @@ fn get_rpaths(os: session::os,
debug!("sysroot: %s", sysroot.to_str()); debug!("sysroot: %s", sysroot.to_str());
debug!("output: %s", output.to_str()); debug!("output: %s", output.to_str());
debug!("libs:"); debug!("libs:");
foreach libpath in libs.iter() { for libpath in libs.iter() {
debug!(" %s", libpath.to_str()); debug!(" %s", libpath.to_str());
} }
debug!("target_triple: %s", target_triple); debug!("target_triple: %s", target_triple);
@ -82,7 +82,7 @@ fn get_rpaths(os: session::os,
fn log_rpaths(desc: &str, rpaths: &[Path]) { fn log_rpaths(desc: &str, rpaths: &[Path]) {
debug!("%s rpaths:", desc); debug!("%s rpaths:", desc);
foreach rpath in rpaths.iter() { for rpath in rpaths.iter() {
debug!(" %s", rpath.to_str()); debug!(" %s", rpath.to_str());
} }
} }
@ -149,7 +149,7 @@ pub fn get_relative_to(abs1: &Path, abs2: &Path) -> Path {
} }
let mut path = ~[]; let mut path = ~[];
foreach _ in range(start_idx, len1 - 1) { path.push(~".."); }; for _ in range(start_idx, len1 - 1) { path.push(~".."); };
path.push_all(split2.slice(start_idx, len2 - 1)); path.push_all(split2.slice(start_idx, len2 - 1));
@ -182,7 +182,7 @@ pub fn get_install_prefix_rpath(target_triple: &str) -> Path {
pub fn minimize_rpaths(rpaths: &[Path]) -> ~[Path] { pub fn minimize_rpaths(rpaths: &[Path]) -> ~[Path] {
let mut set = HashSet::new(); let mut set = HashSet::new();
let mut minimized = ~[]; let mut minimized = ~[];
foreach rpath in rpaths.iter() { for rpath in rpaths.iter() {
if set.insert(rpath.to_str()) { if set.insert(rpath.to_str()) {
minimized.push(rpath.clone()); minimized.push(rpath.clone());
} }

View File

@ -508,7 +508,7 @@ pub fn pretty_print_input(sess: Session, cfg: ast::CrateConfig, input: &input,
} }
pub fn get_os(triple: &str) -> Option<session::os> { pub fn get_os(triple: &str) -> Option<session::os> {
foreach &(name, os) in os_names.iter() { for &(name, os) in os_names.iter() {
if triple.contains(name) { return Some(os) } if triple.contains(name) { return Some(os) }
} }
None None
@ -522,7 +522,7 @@ static os_names : &'static [(&'static str, session::os)] = &'static [
("freebsd", session::os_freebsd)]; ("freebsd", session::os_freebsd)];
pub fn get_arch(triple: &str) -> Option<abi::Architecture> { pub fn get_arch(triple: &str) -> Option<abi::Architecture> {
foreach &(arch, abi) in architecture_abis.iter() { for &(arch, abi) in architecture_abis.iter() {
if triple.contains(arch) { return Some(abi) } if triple.contains(arch) { return Some(abi) }
} }
None None
@ -611,7 +611,7 @@ pub fn build_session_options(binary: @str,
lint::deny, lint::forbid]; lint::deny, lint::forbid];
let mut lint_opts = ~[]; let mut lint_opts = ~[];
let lint_dict = lint::get_lint_dict(); let lint_dict = lint::get_lint_dict();
foreach level in lint_levels.iter() { for level in lint_levels.iter() {
let level_name = lint::level_to_str(*level); let level_name = lint::level_to_str(*level);
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use // FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
@ -620,7 +620,7 @@ pub fn build_session_options(binary: @str,
let level_short = level_short.to_ascii().to_upper().to_str_ascii(); let level_short = level_short.to_ascii().to_upper().to_str_ascii();
let flags = vec::append(getopts::opt_strs(matches, level_short), let flags = vec::append(getopts::opt_strs(matches, level_short),
getopts::opt_strs(matches, level_name)); getopts::opt_strs(matches, level_name));
foreach lint_name in flags.iter() { for lint_name in flags.iter() {
let lint_name = lint_name.replace("-", "_"); let lint_name = lint_name.replace("-", "_");
match lint_dict.find_equiv(&lint_name) { match lint_dict.find_equiv(&lint_name) {
None => { None => {
@ -637,9 +637,9 @@ pub fn build_session_options(binary: @str,
let mut debugging_opts = 0u; let mut debugging_opts = 0u;
let debug_flags = getopts::opt_strs(matches, "Z"); let debug_flags = getopts::opt_strs(matches, "Z");
let debug_map = session::debugging_opts_map(); let debug_map = session::debugging_opts_map();
foreach debug_flag in debug_flags.iter() { for debug_flag in debug_flags.iter() {
let mut this_bit = 0u; let mut this_bit = 0u;
foreach tuple in debug_map.iter() { for tuple in debug_map.iter() {
let (name, bit) = match *tuple { (ref a, _, b) => (a, b) }; let (name, bit) = match *tuple { (ref a, _, b) => (a, b) };
if name == debug_flag { this_bit = bit; break; } if name == debug_flag { this_bit = bit; break; }
} }

View File

@ -380,7 +380,7 @@ fn is_extra(cx: &TestCtxt) -> bool {
fn mk_test_descs(cx: &TestCtxt) -> @ast::expr { fn mk_test_descs(cx: &TestCtxt) -> @ast::expr {
debug!("building test vector from %u tests", cx.testfns.len()); debug!("building test vector from %u tests", cx.testfns.len());
let mut descs = ~[]; let mut descs = ~[];
foreach test in cx.testfns.iter() { for test in cx.testfns.iter() {
descs.push(mk_test_desc_and_fn_rec(cx, test)); descs.push(mk_test_desc_and_fn_rec(cx, test));
} }

View File

@ -65,7 +65,7 @@ struct cache_entry {
fn dump_crates(crate_cache: &[cache_entry]) { fn dump_crates(crate_cache: &[cache_entry]) {
debug!("resolved crates:"); debug!("resolved crates:");
foreach entry in crate_cache.iter() { for entry in crate_cache.iter() {
debug!("cnum: %?", entry.cnum); debug!("cnum: %?", entry.cnum);
debug!("span: %?", entry.span); debug!("span: %?", entry.span);
debug!("hash: %?", entry.hash); debug!("hash: %?", entry.hash);
@ -97,7 +97,7 @@ fn warn_if_multiple_versions(e: @mut Env,
if matches.len() != 1u { if matches.len() != 1u {
diag.handler().warn( diag.handler().warn(
fmt!("using multiple versions of crate `%s`", name)); fmt!("using multiple versions of crate `%s`", name));
foreach match_ in matches.iter() { for match_ in matches.iter() {
diag.span_note(match_.span, "used here"); diag.span_note(match_.span, "used here");
let attrs = ~[ let attrs = ~[
attr::mk_attr(attr::mk_list_item(@"link", attr::mk_attr(attr::mk_list_item(@"link",
@ -125,7 +125,7 @@ struct Env {
fn visit_crate(e: &Env, c: &ast::Crate) { fn visit_crate(e: &Env, c: &ast::Crate) {
let cstore = e.cstore; let cstore = e.cstore;
foreach a in c.attrs.iter().filter(|m| "link_args" == m.name()) { for a in c.attrs.iter().filter(|m| "link_args" == m.name()) {
match a.value_str() { match a.value_str() {
Some(ref linkarg) => { Some(ref linkarg) => {
cstore::add_used_link_args(cstore, *linkarg); cstore::add_used_link_args(cstore, *linkarg);
@ -194,7 +194,7 @@ fn visit_item(e: &Env, i: @ast::item) {
ast::anonymous => { /* do nothing */ } ast::anonymous => { /* do nothing */ }
} }
foreach m in link_args.iter() { for m in link_args.iter() {
match m.value_str() { match m.value_str() {
Some(linkarg) => { Some(linkarg) => {
cstore::add_used_link_args(cstore, linkarg); cstore::add_used_link_args(cstore, linkarg);
@ -223,7 +223,7 @@ fn metas_with_ident(ident: @str, metas: ~[@ast::MetaItem])
fn existing_match(e: &Env, metas: &[@ast::MetaItem], hash: &str) fn existing_match(e: &Env, metas: &[@ast::MetaItem], hash: &str)
-> Option<int> { -> Option<int> {
foreach c in e.crate_cache.iter() { for c in e.crate_cache.iter() {
if loader::metadata_matches(*c.metas, metas) if loader::metadata_matches(*c.metas, metas)
&& (hash.is_empty() || c.hash.as_slice() == hash) { && (hash.is_empty() || c.hash.as_slice() == hash) {
return Some(c.cnum); return Some(c.cnum);
@ -306,7 +306,7 @@ fn resolve_crate_deps(e: @mut Env, cdata: @~[u8]) -> cstore::cnum_map {
// numbers // numbers
let mut cnum_map = HashMap::new(); let mut cnum_map = HashMap::new();
let r = decoder::get_crate_deps(cdata); let r = decoder::get_crate_deps(cdata);
foreach dep in r.iter() { for dep in r.iter() {
let extrn_cnum = dep.cnum; let extrn_cnum = dep.cnum;
let cname = dep.name; let cname = dep.name;
let cname_str = token::ident_to_str(&dep.name); let cname_str = token::ident_to_str(&dep.name);

View File

@ -84,7 +84,7 @@ pub fn have_crate_data(cstore: &CStore, cnum: ast::CrateNum) -> bool {
pub fn iter_crate_data(cstore: &CStore, pub fn iter_crate_data(cstore: &CStore,
i: &fn(ast::CrateNum, @crate_metadata)) { i: &fn(ast::CrateNum, @crate_metadata)) {
foreach (&k, &v) in cstore.metas.iter() { for (&k, &v) in cstore.metas.iter() {
i(k, v); i(k, v);
} }
} }
@ -114,7 +114,7 @@ pub fn get_used_libraries<'a>(cstore: &'a CStore) -> &'a [@str] {
} }
pub fn add_used_link_args(cstore: &mut CStore, args: &str) { pub fn add_used_link_args(cstore: &mut CStore, args: &str) {
foreach s in args.split_iter(' ') { for s in args.split_iter(' ') {
cstore.used_link_args.push(s.to_managed()); cstore.used_link_args.push(s.to_managed());
} }
} }
@ -148,7 +148,7 @@ struct crate_hash {
pub fn get_dep_hashes(cstore: &CStore) -> ~[@str] { pub fn get_dep_hashes(cstore: &CStore) -> ~[@str] {
let mut result = ~[]; let mut result = ~[];
foreach (_, &cnum) in cstore.extern_mod_crate_map.iter() { for (_, &cnum) in cstore.extern_mod_crate_map.iter() {
let cdata = cstore::get_crate_data(cstore, cnum); let cdata = cstore::get_crate_data(cstore, cnum);
let hash = decoder::get_crate_hash(cdata.data); let hash = decoder::get_crate_hash(cdata.data);
let vers = decoder::get_crate_vers(cdata.data); let vers = decoder::get_crate_vers(cdata.data);
@ -165,7 +165,7 @@ pub fn get_dep_hashes(cstore: &CStore) -> ~[@str] {
}; };
debug!("sorted:"); debug!("sorted:");
foreach x in sorted.iter() { for x in sorted.iter() {
debug!(" hash[%s]: %s", x.name, x.hash); debug!(" hash[%s]: %s", x.name, x.hash);
} }

View File

@ -761,7 +761,7 @@ pub fn get_enum_variants(intr: @ident_interner, cdata: cmd, id: ast::NodeId,
let mut infos: ~[@ty::VariantInfo] = ~[]; let mut infos: ~[@ty::VariantInfo] = ~[];
let variant_ids = enum_variant_ids(item, cdata); let variant_ids = enum_variant_ids(item, cdata);
let mut disr_val = 0; let mut disr_val = 0;
foreach did in variant_ids.iter() { for did in variant_ids.iter() {
let item = find_item(did.node, items); let item = find_item(did.node, items);
let ctor_ty = item_type(ast::def_id { crate: cdata.cnum, node: id}, let ctor_ty = item_type(ast::def_id { crate: cdata.cnum, node: id},
item, tcx, cdata); item, tcx, cdata);
@ -974,7 +974,7 @@ pub fn get_static_methods_if_impl(intr: @ident_interner,
}; };
let mut static_impl_methods = ~[]; let mut static_impl_methods = ~[];
foreach impl_method_id in impl_method_ids.iter() { for impl_method_id in impl_method_ids.iter() {
let impl_method_doc = lookup_item(impl_method_id.node, cdata.data); let impl_method_doc = lookup_item(impl_method_id.node, cdata.data);
let family = item_family(impl_method_doc); let family = item_family(impl_method_doc);
match family { match family {
@ -1171,7 +1171,7 @@ fn list_meta_items(intr: @ident_interner,
meta_items: ebml::Doc, meta_items: ebml::Doc,
out: @io::Writer) { out: @io::Writer) {
let r = get_meta_items(meta_items); let r = get_meta_items(meta_items);
foreach mi in r.iter() { for mi in r.iter() {
out.write_str(fmt!("%s\n", pprust::meta_item_to_str(*mi, intr))); out.write_str(fmt!("%s\n", pprust::meta_item_to_str(*mi, intr)));
} }
} }
@ -1181,7 +1181,7 @@ fn list_crate_attributes(intr: @ident_interner, md: ebml::Doc, hash: &str,
out.write_str(fmt!("=Crate Attributes (%s)=\n", hash)); out.write_str(fmt!("=Crate Attributes (%s)=\n", hash));
let r = get_attributes(md); let r = get_attributes(md);
foreach attr in r.iter() { for attr in r.iter() {
out.write_str(fmt!("%s\n", pprust::attribute_to_str(attr, intr))); out.write_str(fmt!("%s\n", pprust::attribute_to_str(attr, intr)));
} }
@ -1224,7 +1224,7 @@ fn list_crate_deps(data: @~[u8], out: @io::Writer) {
out.write_str("=External Dependencies=\n"); out.write_str("=External Dependencies=\n");
let r = get_crate_deps(data); let r = get_crate_deps(data);
foreach dep in r.iter() { for dep in r.iter() {
out.write_str( out.write_str(
fmt!("%d %s-%s-%s\n", fmt!("%d %s-%s-%s\n",
dep.cnum, token::ident_to_str(&dep.name), dep.hash, dep.vers)); dep.cnum, token::ident_to_str(&dep.name), dep.hash, dep.vers));

View File

@ -120,7 +120,7 @@ fn encode_region_param(ecx: &EncodeContext,
ebml_w: &mut writer::Encoder, ebml_w: &mut writer::Encoder,
it: @ast::item) { it: @ast::item) {
let opt_rp = ecx.tcx.region_paramd_items.find(&it.id); let opt_rp = ecx.tcx.region_paramd_items.find(&it.id);
foreach rp in opt_rp.iter() { for rp in opt_rp.iter() {
ebml_w.start_tag(tag_region_param); ebml_w.start_tag(tag_region_param);
rp.encode(ebml_w); rp.encode(ebml_w);
ebml_w.end_tag(); ebml_w.end_tag();
@ -193,7 +193,7 @@ fn encode_ty_type_param_defs(ebml_w: &mut writer::Encoder,
tcx: ecx.tcx, tcx: ecx.tcx,
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs) abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)
}; };
foreach param in params.iter() { for param in params.iter() {
ebml_w.start_tag(tag); ebml_w.start_tag(tag);
tyencode::enc_type_param_def(ebml_w.writer, ty_str_ctxt, param); tyencode::enc_type_param_def(ebml_w.writer, ty_str_ctxt, param);
ebml_w.end_tag(); ebml_w.end_tag();
@ -250,7 +250,7 @@ fn encode_type(ecx: &EncodeContext,
fn encode_transformed_self_ty(ecx: &EncodeContext, fn encode_transformed_self_ty(ecx: &EncodeContext,
ebml_w: &mut writer::Encoder, ebml_w: &mut writer::Encoder,
opt_typ: Option<ty::t>) { opt_typ: Option<ty::t>) {
foreach &typ in opt_typ.iter() { for &typ in opt_typ.iter() {
ebml_w.start_tag(tag_item_method_transformed_self_ty); ebml_w.start_tag(tag_item_method_transformed_self_ty);
write_type(ecx, ebml_w, typ); write_type(ecx, ebml_w, typ);
ebml_w.end_tag(); ebml_w.end_tag();
@ -327,7 +327,7 @@ fn encode_enum_variant_info(ecx: &EncodeContext,
let mut i = 0; let mut i = 0;
let vi = ty::enum_variants(ecx.tcx, let vi = ty::enum_variants(ecx.tcx,
ast::def_id { crate: LOCAL_CRATE, node: id }); ast::def_id { crate: LOCAL_CRATE, node: id });
foreach variant in variants.iter() { for variant in variants.iter() {
let def_id = local_def(variant.node.id); let def_id = local_def(variant.node.id);
index.push(entry {val: variant.node.id, pos: ebml_w.writer.tell()}); index.push(entry {val: variant.node.id, pos: ebml_w.writer.tell()});
ebml_w.start_tag(tag_items_data_item); ebml_w.start_tag(tag_items_data_item);
@ -375,7 +375,7 @@ fn encode_path(ecx: &EncodeContext,
ebml_w.start_tag(tag_path); ebml_w.start_tag(tag_path);
ebml_w.wr_tagged_u32(tag_path_len, (path.len() + 1) as u32); ebml_w.wr_tagged_u32(tag_path_len, (path.len() + 1) as u32);
foreach pe in path.iter() { for pe in path.iter() {
encode_path_elt(ecx, ebml_w, *pe); encode_path_elt(ecx, ebml_w, *pe);
} }
encode_path_elt(ecx, ebml_w, name); encode_path_elt(ecx, ebml_w, name);
@ -405,8 +405,8 @@ fn encode_reexported_static_base_methods(ecx: &EncodeContext,
-> bool { -> bool {
match ecx.tcx.inherent_impls.find(&exp.def_id) { match ecx.tcx.inherent_impls.find(&exp.def_id) {
Some(implementations) => { Some(implementations) => {
foreach &base_impl in implementations.iter() { for &base_impl in implementations.iter() {
foreach &m in base_impl.methods.iter() { for &m in base_impl.methods.iter() {
if m.explicit_self == ast::sty_static { if m.explicit_self == ast::sty_static {
encode_reexported_static_method(ecx, ebml_w, exp, encode_reexported_static_method(ecx, ebml_w, exp,
m.def_id, m.ident); m.def_id, m.ident);
@ -426,7 +426,7 @@ fn encode_reexported_static_trait_methods(ecx: &EncodeContext,
-> bool { -> bool {
match ecx.tcx.trait_methods_cache.find(&exp.def_id) { match ecx.tcx.trait_methods_cache.find(&exp.def_id) {
Some(methods) => { Some(methods) => {
foreach &m in methods.iter() { for &m in methods.iter() {
if m.explicit_self == ast::sty_static { if m.explicit_self == ast::sty_static {
encode_reexported_static_method(ecx, ebml_w, exp, encode_reexported_static_method(ecx, ebml_w, exp,
m.def_id, m.ident); m.def_id, m.ident);
@ -486,7 +486,7 @@ fn each_auxiliary_node_id(item: @item, callback: &fn(NodeId) -> bool)
let mut continue = true; let mut continue = true;
match item.node { match item.node {
item_enum(ref enum_def, _) => { item_enum(ref enum_def, _) => {
foreach variant in enum_def.variants.iter() { for variant in enum_def.variants.iter() {
continue = callback(variant.node.id); continue = callback(variant.node.id);
if !continue { if !continue {
break break
@ -518,7 +518,7 @@ fn encode_reexports(ecx: &EncodeContext,
match ecx.reexports2.find(&id) { match ecx.reexports2.find(&id) {
Some(ref exports) => { Some(ref exports) => {
debug!("(encoding info for module) found reexports for %d", id); debug!("(encoding info for module) found reexports for %d", id);
foreach exp in exports.iter() { for exp in exports.iter() {
debug!("(encoding info for module) reexport '%s' for %d", debug!("(encoding info for module) reexport '%s' for %d",
exp.name, id); exp.name, id);
ebml_w.start_tag(tag_items_data_item_reexport); ebml_w.start_tag(tag_items_data_item_reexport);
@ -553,7 +553,7 @@ fn encode_info_for_mod(ecx: &EncodeContext,
debug!("(encoding info for module) encoding info for module ID %d", id); debug!("(encoding info for module) encoding info for module ID %d", id);
// Encode info about all the module children. // Encode info about all the module children.
foreach item in md.items.iter() { for item in md.items.iter() {
ebml_w.start_tag(tag_mod_child); ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(item.id))); ebml_w.wr_str(def_to_str(local_def(item.id)));
ebml_w.end_tag(); ebml_w.end_tag();
@ -664,7 +664,7 @@ fn encode_method_sort(ebml_w: &mut writer::Encoder, sort: char) {
fn encode_provided_source(ebml_w: &mut writer::Encoder, fn encode_provided_source(ebml_w: &mut writer::Encoder,
source_opt: Option<def_id>) { source_opt: Option<def_id>) {
foreach source in source_opt.iter() { for source in source_opt.iter() {
ebml_w.start_tag(tag_item_method_provided_source); ebml_w.start_tag(tag_item_method_provided_source);
let s = def_to_str(*source); let s = def_to_str(*source);
ebml_w.writer.write(s.as_bytes()); ebml_w.writer.write(s.as_bytes());
@ -685,7 +685,7 @@ fn encode_info_for_struct(ecx: &EncodeContext,
let tcx = ecx.tcx; let tcx = ecx.tcx;
/* We encode both private and public fields -- need to include /* We encode both private and public fields -- need to include
private fields to get the offsets right */ private fields to get the offsets right */
foreach field in fields.iter() { for field in fields.iter() {
let (nm, vis) = match field.node.kind { let (nm, vis) = match field.node.kind {
named_field(nm, vis) => (nm, vis), named_field(nm, vis) => (nm, vis),
unnamed_field => (special_idents::unnamed_field, inherited) unnamed_field => (special_idents::unnamed_field, inherited)
@ -772,7 +772,7 @@ fn encode_info_for_method(ecx: &EncodeContext,
encode_path(ecx, ebml_w, impl_path, ast_map::path_name(m.ident)); encode_path(ecx, ebml_w, impl_path, ast_map::path_name(m.ident));
foreach ast_method in ast_method_opt.iter() { for ast_method in ast_method_opt.iter() {
let num_params = tpt.generics.type_param_defs.len(); let num_params = tpt.generics.type_param_defs.len();
if num_params > 0u || is_default_impl if num_params > 0u || is_default_impl
|| should_inline(ast_method.attrs) { || should_inline(ast_method.attrs) {
@ -882,7 +882,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident)); encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
// Encode all the items in this module. // Encode all the items in this module.
foreach foreign_item in fm.items.iter() { for foreign_item in fm.items.iter() {
ebml_w.start_tag(tag_mod_child); ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(foreign_item.id))); ebml_w.wr_str(def_to_str(local_def(foreign_item.id)));
ebml_w.end_tag(); ebml_w.end_tag();
@ -909,7 +909,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
encode_family(ebml_w, 't'); encode_family(ebml_w, 't');
encode_bounds_and_type(ebml_w, ecx, &lookup_item_type(tcx, def_id)); encode_bounds_and_type(ebml_w, ecx, &lookup_item_type(tcx, def_id));
encode_name(ecx, ebml_w, item.ident); encode_name(ecx, ebml_w, item.ident);
foreach v in (*enum_definition).variants.iter() { for v in (*enum_definition).variants.iter() {
encode_variant_id(ebml_w, local_def(v.node.id)); encode_variant_id(ebml_w, local_def(v.node.id));
} }
(ecx.encode_inlined_item)(ecx, ebml_w, path, ii_item(item)); (ecx.encode_inlined_item)(ecx, ebml_w, path, ii_item(item));
@ -950,7 +950,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
/* Encode def_ids for each field and method /* Encode def_ids for each field and method
for methods, write all the stuff get_trait_method for methods, write all the stuff get_trait_method
needs to know*/ needs to know*/
foreach f in struct_def.fields.iter() { for f in struct_def.fields.iter() {
match f.node.kind { match f.node.kind {
named_field(ident, vis) => { named_field(ident, vis) => {
ebml_w.start_tag(tag_item_field); ebml_w.start_tag(tag_item_field);
@ -1010,13 +1010,13 @@ fn encode_info_for_item(ecx: &EncodeContext,
} }
_ => {} _ => {}
} }
foreach method in imp.methods.iter() { for method in imp.methods.iter() {
ebml_w.start_tag(tag_item_impl_method); ebml_w.start_tag(tag_item_impl_method);
let s = def_to_str(method.def_id); let s = def_to_str(method.def_id);
ebml_w.writer.write(s.as_bytes()); ebml_w.writer.write(s.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
} }
foreach ast_trait_ref in opt_trait.iter() { for ast_trait_ref in opt_trait.iter() {
let trait_ref = ty::node_id_to_trait_ref( let trait_ref = ty::node_id_to_trait_ref(
tcx, ast_trait_ref.ref_id); tcx, ast_trait_ref.ref_id);
encode_trait_ref(ebml_w, ecx, trait_ref, tag_item_trait_ref); encode_trait_ref(ebml_w, ecx, trait_ref, tag_item_trait_ref);
@ -1035,7 +1035,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
// appear first in the impl structure, in the same order they do // appear first in the impl structure, in the same order they do
// in the ast. This is a little sketchy. // in the ast. This is a little sketchy.
let num_implemented_methods = ast_methods.len(); let num_implemented_methods = ast_methods.len();
foreach (i, m) in imp.methods.iter().enumerate() { for (i, m) in imp.methods.iter().enumerate() {
let ast_method = if i < num_implemented_methods { let ast_method = if i < num_implemented_methods {
Some(ast_methods[i]) Some(ast_methods[i])
} else { None }; } else { None };
@ -1063,7 +1063,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
encode_trait_ref(ebml_w, ecx, trait_def.trait_ref, tag_item_trait_ref); encode_trait_ref(ebml_w, ecx, trait_def.trait_ref, tag_item_trait_ref);
encode_name(ecx, ebml_w, item.ident); encode_name(ecx, ebml_w, item.ident);
encode_attributes(ebml_w, item.attrs); encode_attributes(ebml_w, item.attrs);
foreach &method_def_id in ty::trait_method_def_ids(tcx, def_id).iter() { for &method_def_id in ty::trait_method_def_ids(tcx, def_id).iter() {
ebml_w.start_tag(tag_item_trait_method); ebml_w.start_tag(tag_item_trait_method);
encode_def_id(ebml_w, method_def_id); encode_def_id(ebml_w, method_def_id);
ebml_w.end_tag(); ebml_w.end_tag();
@ -1073,7 +1073,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
ebml_w.end_tag(); ebml_w.end_tag();
} }
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident)); encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
foreach ast_trait_ref in super_traits.iter() { for ast_trait_ref in super_traits.iter() {
let trait_ref = ty::node_id_to_trait_ref(ecx.tcx, ast_trait_ref.ref_id); let trait_ref = ty::node_id_to_trait_ref(ecx.tcx, ast_trait_ref.ref_id);
encode_trait_ref(ebml_w, ecx, trait_ref, tag_item_super_trait_ref); encode_trait_ref(ebml_w, ecx, trait_ref, tag_item_super_trait_ref);
} }
@ -1081,7 +1081,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
// Now output the method info for each method. // Now output the method info for each method.
let r = ty::trait_method_def_ids(tcx, def_id); let r = ty::trait_method_def_ids(tcx, def_id);
foreach (i, &method_def_id) in r.iter().enumerate() { for (i, &method_def_id) in r.iter().enumerate() {
assert_eq!(method_def_id.crate, ast::LOCAL_CRATE); assert_eq!(method_def_id.crate, ast::LOCAL_CRATE);
let method_ty = ty::method(tcx, method_def_id); let method_ty = ty::method(tcx, method_def_id);
@ -1257,14 +1257,14 @@ fn create_index<T:Clone + Hash + IterBytes + 'static>(
index: ~[entry<T>]) index: ~[entry<T>])
-> ~[@~[entry<T>]] { -> ~[@~[entry<T>]] {
let mut buckets: ~[@mut ~[entry<T>]] = ~[]; let mut buckets: ~[@mut ~[entry<T>]] = ~[];
foreach _ in range(0u, 256u) { buckets.push(@mut ~[]); }; for _ in range(0u, 256u) { buckets.push(@mut ~[]); };
foreach elt in index.iter() { for elt in index.iter() {
let h = elt.val.hash() as uint; let h = elt.val.hash() as uint;
buckets[h % 256].push((*elt).clone()); buckets[h % 256].push((*elt).clone());
} }
let mut buckets_frozen = ~[]; let mut buckets_frozen = ~[];
foreach bucket in buckets.iter() { for bucket in buckets.iter() {
buckets_frozen.push(@/*bad*/(**bucket).clone()); buckets_frozen.push(@/*bad*/(**bucket).clone());
} }
return buckets_frozen; return buckets_frozen;
@ -1278,10 +1278,10 @@ fn encode_index<T:'static>(
ebml_w.start_tag(tag_index); ebml_w.start_tag(tag_index);
let mut bucket_locs: ~[uint] = ~[]; let mut bucket_locs: ~[uint] = ~[];
ebml_w.start_tag(tag_index_buckets); ebml_w.start_tag(tag_index_buckets);
foreach bucket in buckets.iter() { for bucket in buckets.iter() {
bucket_locs.push(ebml_w.writer.tell()); bucket_locs.push(ebml_w.writer.tell());
ebml_w.start_tag(tag_index_buckets_bucket); ebml_w.start_tag(tag_index_buckets_bucket);
foreach elt in (**bucket).iter() { for elt in (**bucket).iter() {
ebml_w.start_tag(tag_index_buckets_bucket_elt); ebml_w.start_tag(tag_index_buckets_bucket_elt);
assert!(elt.pos < 0xffff_ffff); assert!(elt.pos < 0xffff_ffff);
writer.write_be_u32(elt.pos as u32); writer.write_be_u32(elt.pos as u32);
@ -1292,7 +1292,7 @@ fn encode_index<T:'static>(
} }
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.start_tag(tag_index_table); ebml_w.start_tag(tag_index_table);
foreach pos in bucket_locs.iter() { for pos in bucket_locs.iter() {
assert!(*pos < 0xffff_ffff); assert!(*pos < 0xffff_ffff);
writer.write_be_u32(*pos as u32); writer.write_be_u32(*pos as u32);
} }
@ -1338,7 +1338,7 @@ fn encode_meta_item(ebml_w: &mut writer::Encoder, mi: @MetaItem) {
ebml_w.start_tag(tag_meta_item_name); ebml_w.start_tag(tag_meta_item_name);
ebml_w.writer.write(name.as_bytes()); ebml_w.writer.write(name.as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
foreach inner_item in items.iter() { for inner_item in items.iter() {
encode_meta_item(ebml_w, *inner_item); encode_meta_item(ebml_w, *inner_item);
} }
ebml_w.end_tag(); ebml_w.end_tag();
@ -1348,7 +1348,7 @@ fn encode_meta_item(ebml_w: &mut writer::Encoder, mi: @MetaItem) {
fn encode_attributes(ebml_w: &mut writer::Encoder, attrs: &[Attribute]) { fn encode_attributes(ebml_w: &mut writer::Encoder, attrs: &[Attribute]) {
ebml_w.start_tag(tag_attributes); ebml_w.start_tag(tag_attributes);
foreach attr in attrs.iter() { for attr in attrs.iter() {
ebml_w.start_tag(tag_attribute); ebml_w.start_tag(tag_attribute);
encode_meta_item(ebml_w, attr.node.value); encode_meta_item(ebml_w, attr.node.value);
ebml_w.end_tag(); ebml_w.end_tag();
@ -1378,7 +1378,7 @@ fn synthesize_crate_attrs(ecx: &EncodeContext,
let mut meta_items = ~[name_item, vers_item]; let mut meta_items = ~[name_item, vers_item];
foreach &mi in items.iter().filter(|mi| "name" != mi.name() && "vers" != mi.name()) { for &mi in items.iter().filter(|mi| "name" != mi.name() && "vers" != mi.name()) {
meta_items.push(mi); meta_items.push(mi);
} }
let link_item = attr::mk_list_item(@"link", meta_items); let link_item = attr::mk_list_item(@"link", meta_items);
@ -1388,7 +1388,7 @@ fn synthesize_crate_attrs(ecx: &EncodeContext,
let mut attrs = ~[]; let mut attrs = ~[];
let mut found_link_attr = false; let mut found_link_attr = false;
foreach attr in crate.attrs.iter() { for attr in crate.attrs.iter() {
attrs.push( attrs.push(
if "link" != attr.name() { if "link" != attr.name() {
*attr *attr
@ -1430,7 +1430,7 @@ fn encode_crate_deps(ecx: &EncodeContext,
// Sanity-check the crate numbers // Sanity-check the crate numbers
let mut expected_cnum = 1; let mut expected_cnum = 1;
foreach n in deps.iter() { for n in deps.iter() {
assert_eq!(n.cnum, expected_cnum); assert_eq!(n.cnum, expected_cnum);
expected_cnum += 1; expected_cnum += 1;
} }
@ -1444,7 +1444,7 @@ fn encode_crate_deps(ecx: &EncodeContext,
// but is enough to get transitive crate dependencies working. // but is enough to get transitive crate dependencies working.
ebml_w.start_tag(tag_crate_deps); ebml_w.start_tag(tag_crate_deps);
let r = get_ordered_deps(ecx, cstore); let r = get_ordered_deps(ecx, cstore);
foreach dep in r.iter() { for dep in r.iter() {
encode_crate_dep(ecx, ebml_w, *dep); encode_crate_dep(ecx, ebml_w, *dep);
} }
ebml_w.end_tag(); ebml_w.end_tag();
@ -1454,7 +1454,7 @@ fn encode_lang_items(ecx: &EncodeContext, ebml_w: &mut writer::Encoder) {
ebml_w.start_tag(tag_lang_items); ebml_w.start_tag(tag_lang_items);
do ecx.tcx.lang_items.each_item |def_id, i| { do ecx.tcx.lang_items.each_item |def_id, i| {
foreach id in def_id.iter() { for id in def_id.iter() {
if id.crate == LOCAL_CRATE { if id.crate == LOCAL_CRATE {
ebml_w.start_tag(tag_lang_items_item); ebml_w.start_tag(tag_lang_items_item);
@ -1479,7 +1479,7 @@ fn encode_link_args(ecx: &EncodeContext, ebml_w: &mut writer::Encoder) {
ebml_w.start_tag(tag_link_args); ebml_w.start_tag(tag_link_args);
let link_args = cstore::get_used_link_args(ecx.cstore); let link_args = cstore::get_used_link_args(ecx.cstore);
foreach link_arg in link_args.iter() { for link_arg in link_args.iter() {
ebml_w.start_tag(tag_link_args_arg); ebml_w.start_tag(tag_link_args_arg);
ebml_w.writer.write_str(link_arg.to_str()); ebml_w.writer.write_str(link_arg.to_str());
ebml_w.end_tag(); ebml_w.end_tag();
@ -1493,7 +1493,7 @@ fn encode_misc_info(ecx: &EncodeContext,
ebml_w: &mut writer::Encoder) { ebml_w: &mut writer::Encoder) {
ebml_w.start_tag(tag_misc_info); ebml_w.start_tag(tag_misc_info);
ebml_w.start_tag(tag_misc_info_crate_items); ebml_w.start_tag(tag_misc_info_crate_items);
foreach &item in crate.module.items.iter() { for &item in crate.module.items.iter() {
ebml_w.start_tag(tag_mod_child); ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(item.id))); ebml_w.wr_str(def_to_str(local_def(item.id)));
ebml_w.end_tag(); ebml_w.end_tag();
@ -1630,7 +1630,7 @@ pub fn encode_metadata(parms: EncodeParams, crate: &Crate) -> ~[u8] {
ecx.stats.total_bytes = *wr.pos; ecx.stats.total_bytes = *wr.pos;
if (tcx.sess.meta_stats()) { if (tcx.sess.meta_stats()) {
foreach e in wr.bytes.iter() { for e in wr.bytes.iter() {
if *e == 0 { if *e == 0 {
ecx.stats.zero_bytes += 1; ecx.stats.zero_bytes += 1;
} }

View File

@ -91,7 +91,7 @@ pub fn search<T>(filesearch: @FileSearch, pick: pick<T>) -> Option<T> {
do filesearch.for_each_lib_search_path() |lib_search_path| { do filesearch.for_each_lib_search_path() |lib_search_path| {
debug!("searching %s", lib_search_path.to_str()); debug!("searching %s", lib_search_path.to_str());
let r = os::list_dir_path(lib_search_path); let r = os::list_dir_path(lib_search_path);
foreach path in r.iter() { for path in r.iter() {
debug!("testing %s", path.to_str()); debug!("testing %s", path.to_str());
let maybe_picked = pick(path); let maybe_picked = pick(path);
if maybe_picked.is_some() { if maybe_picked.is_some() {

View File

@ -128,7 +128,7 @@ fn find_library_crate_aux(
cx.diag.span_err( cx.diag.span_err(
cx.span, fmt!("multiple matching crates for `%s`", crate_name)); cx.span, fmt!("multiple matching crates for `%s`", crate_name));
cx.diag.handler().note("candidates:"); cx.diag.handler().note("candidates:");
foreach pair in matches.iter() { for pair in matches.iter() {
let ident = pair.first(); let ident = pair.first();
let data = pair.second(); let data = pair.second();
cx.diag.handler().note(fmt!("path: %s", ident)); cx.diag.handler().note(fmt!("path: %s", ident));
@ -142,7 +142,7 @@ fn find_library_crate_aux(
} }
pub fn crate_name_from_metas(metas: &[@ast::MetaItem]) -> @str { pub fn crate_name_from_metas(metas: &[@ast::MetaItem]) -> @str {
foreach m in metas.iter() { for m in metas.iter() {
match m.name_str_pair() { match m.name_str_pair() {
Some((name, s)) if "name" == name => { return s; } Some((name, s)) if "name" == name => { return s; }
_ => {} _ => {}
@ -155,7 +155,7 @@ pub fn note_linkage_attrs(intr: @ident_interner,
diag: @span_handler, diag: @span_handler,
attrs: ~[ast::Attribute]) { attrs: ~[ast::Attribute]) {
let r = attr::find_linkage_metas(attrs); let r = attr::find_linkage_metas(attrs);
foreach mi in r.iter() { for mi in r.iter() {
diag.handler().note(fmt!("meta: %s", pprust::meta_item_to_str(*mi,intr))); diag.handler().note(fmt!("meta: %s", pprust::meta_item_to_str(*mi,intr)));
} }
} }

View File

@ -123,7 +123,7 @@ fn enc_substs(w: @io::Writer, cx: @ctxt, substs: &ty::substs) {
enc_region_substs(w, cx, &substs.regions); enc_region_substs(w, cx, &substs.regions);
do enc_opt(w, substs.self_ty) |t| { enc_ty(w, cx, t) } do enc_opt(w, substs.self_ty) |t| { enc_ty(w, cx, t) }
w.write_char('['); w.write_char('[');
foreach t in substs.tps.iter() { enc_ty(w, cx, *t); } for t in substs.tps.iter() { enc_ty(w, cx, *t); }
w.write_char(']'); w.write_char(']');
} }
@ -134,7 +134,7 @@ fn enc_region_substs(w: @io::Writer, cx: @ctxt, substs: &ty::RegionSubsts) {
} }
ty::NonerasedRegions(ref regions) => { ty::NonerasedRegions(ref regions) => {
w.write_char('n'); w.write_char('n');
foreach &r in regions.iter() { for &r in regions.iter() {
enc_region(w, cx, r); enc_region(w, cx, r);
} }
w.write_char('.'); w.write_char('.');
@ -288,7 +288,7 @@ fn enc_sty(w: @io::Writer, cx: @ctxt, st: &ty::sty) {
} }
ty::ty_tup(ref ts) => { ty::ty_tup(ref ts) => {
w.write_str(&"T["); w.write_str(&"T[");
foreach t in ts.iter() { enc_ty(w, cx, *t); } for t in ts.iter() { enc_ty(w, cx, *t); }
w.write_char(']'); w.write_char(']');
} }
ty::ty_box(mt) => { w.write_char('@'); enc_mt(w, cx, mt); } ty::ty_box(mt) => { w.write_char('@'); enc_mt(w, cx, mt); }
@ -405,7 +405,7 @@ fn enc_closure_ty(w: @io::Writer, cx: @ctxt, ft: &ty::ClosureTy) {
fn enc_fn_sig(w: @io::Writer, cx: @ctxt, fsig: &ty::FnSig) { fn enc_fn_sig(w: @io::Writer, cx: @ctxt, fsig: &ty::FnSig) {
w.write_char('['); w.write_char('[');
foreach ty in fsig.inputs.iter() { for ty in fsig.inputs.iter() {
enc_ty(w, cx, *ty); enc_ty(w, cx, *ty);
} }
w.write_char(']'); w.write_char(']');
@ -423,7 +423,7 @@ fn enc_bounds(w: @io::Writer, cx: @ctxt, bs: &ty::ParamBounds) {
true true
}; };
foreach &tp in bs.trait_bounds.iter() { for &tp in bs.trait_bounds.iter() {
w.write_char('I'); w.write_char('I');
enc_trait_ref(w, cx, tp); enc_trait_ref(w, cx, tp);
} }

View File

@ -854,7 +854,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
{ {
let r = tcx.def_map.find(&id); let r = tcx.def_map.find(&id);
foreach def in r.iter() { for def in r.iter() {
do ebml_w.tag(c::tag_table_def) |ebml_w| { do ebml_w.tag(c::tag_table_def) |ebml_w| {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| { do ebml_w.tag(c::tag_table_val) |ebml_w| {
@ -866,7 +866,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
{ {
let r = tcx.node_types.find(&(id as uint)); let r = tcx.node_types.find(&(id as uint));
foreach &ty in r.iter() { for &ty in r.iter() {
do ebml_w.tag(c::tag_table_node_type) |ebml_w| { do ebml_w.tag(c::tag_table_node_type) |ebml_w| {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| { do ebml_w.tag(c::tag_table_val) |ebml_w| {
@ -878,7 +878,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
{ {
let r = tcx.node_type_substs.find(&id); let r = tcx.node_type_substs.find(&id);
foreach tys in r.iter() { for tys in r.iter() {
do ebml_w.tag(c::tag_table_node_type_subst) |ebml_w| { do ebml_w.tag(c::tag_table_node_type_subst) |ebml_w| {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| { do ebml_w.tag(c::tag_table_val) |ebml_w| {
@ -890,7 +890,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
{ {
let r = tcx.freevars.find(&id); let r = tcx.freevars.find(&id);
foreach &fv in r.iter() { for &fv in r.iter() {
do ebml_w.tag(c::tag_table_freevars) |ebml_w| { do ebml_w.tag(c::tag_table_freevars) |ebml_w| {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| { do ebml_w.tag(c::tag_table_val) |ebml_w| {
@ -905,7 +905,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
let lid = ast::def_id { crate: ast::LOCAL_CRATE, node: id }; let lid = ast::def_id { crate: ast::LOCAL_CRATE, node: id };
{ {
let r = tcx.tcache.find(&lid); let r = tcx.tcache.find(&lid);
foreach &tpbt in r.iter() { for &tpbt in r.iter() {
do ebml_w.tag(c::tag_table_tcache) |ebml_w| { do ebml_w.tag(c::tag_table_tcache) |ebml_w| {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| { do ebml_w.tag(c::tag_table_val) |ebml_w| {
@ -917,7 +917,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
{ {
let r = tcx.ty_param_defs.find(&id); let r = tcx.ty_param_defs.find(&id);
foreach &type_param_def in r.iter() { for &type_param_def in r.iter() {
do ebml_w.tag(c::tag_table_param_defs) |ebml_w| { do ebml_w.tag(c::tag_table_param_defs) |ebml_w| {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| { do ebml_w.tag(c::tag_table_val) |ebml_w| {
@ -929,7 +929,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
{ {
let r = maps.method_map.find(&id); let r = maps.method_map.find(&id);
foreach &mme in r.iter() { for &mme in r.iter() {
do ebml_w.tag(c::tag_table_method_map) |ebml_w| { do ebml_w.tag(c::tag_table_method_map) |ebml_w| {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| { do ebml_w.tag(c::tag_table_val) |ebml_w| {
@ -941,7 +941,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
{ {
let r = maps.vtable_map.find(&id); let r = maps.vtable_map.find(&id);
foreach &dr in r.iter() { for &dr in r.iter() {
do ebml_w.tag(c::tag_table_vtable_map) |ebml_w| { do ebml_w.tag(c::tag_table_vtable_map) |ebml_w| {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| { do ebml_w.tag(c::tag_table_val) |ebml_w| {
@ -953,7 +953,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
{ {
let r = tcx.adjustments.find(&id); let r = tcx.adjustments.find(&id);
foreach adj in r.iter() { for adj in r.iter() {
do ebml_w.tag(c::tag_table_adjustments) |ebml_w| { do ebml_w.tag(c::tag_table_adjustments) |ebml_w| {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| { do ebml_w.tag(c::tag_table_val) |ebml_w| {
@ -965,7 +965,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
{ {
let r = maps.capture_map.find(&id); let r = maps.capture_map.find(&id);
foreach &cap_vars in r.iter() { for &cap_vars in r.iter() {
do ebml_w.tag(c::tag_table_capture_map) |ebml_w| { do ebml_w.tag(c::tag_table_capture_map) |ebml_w| {
ebml_w.id(id); ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| { do ebml_w.tag(c::tag_table_val) |ebml_w| {
@ -1054,7 +1054,7 @@ impl ebml_decoder_decoder_helpers for reader::Decoder {
fn type_string(doc: ebml::Doc) -> ~str { fn type_string(doc: ebml::Doc) -> ~str {
let mut str = ~""; let mut str = ~"";
foreach i in range(doc.start, doc.end) { for i in range(doc.start, doc.end) {
str.push_char(doc.data[i] as char); str.push_char(doc.data[i] as char);
} }
str str

View File

@ -116,7 +116,7 @@ impl<'self> CheckLoanCtxt<'self> {
do self.each_in_scope_loan(scope_id) |loan| { do self.each_in_scope_loan(scope_id) |loan| {
let mut ret = true; let mut ret = true;
foreach restr in loan.restrictions.iter() { for restr in loan.restrictions.iter() {
if restr.loan_path == loan_path { if restr.loan_path == loan_path {
if !op(loan, restr) { if !op(loan, restr) {
ret = false; ret = false;
@ -152,16 +152,16 @@ impl<'self> CheckLoanCtxt<'self> {
debug!("new_loan_indices = %?", new_loan_indices); debug!("new_loan_indices = %?", new_loan_indices);
do self.each_issued_loan(scope_id) |issued_loan| { do self.each_issued_loan(scope_id) |issued_loan| {
foreach &new_loan_index in new_loan_indices.iter() { for &new_loan_index in new_loan_indices.iter() {
let new_loan = &self.all_loans[new_loan_index]; let new_loan = &self.all_loans[new_loan_index];
self.report_error_if_loans_conflict(issued_loan, new_loan); self.report_error_if_loans_conflict(issued_loan, new_loan);
} }
true true
}; };
foreach i in range(0u, new_loan_indices.len()) { for i in range(0u, new_loan_indices.len()) {
let old_loan = &self.all_loans[new_loan_indices[i]]; let old_loan = &self.all_loans[new_loan_indices[i]];
foreach j in range(i+1, new_loan_indices.len()) { for j in range(i+1, new_loan_indices.len()) {
let new_loan = &self.all_loans[new_loan_indices[j]]; let new_loan = &self.all_loans[new_loan_indices[j]];
self.report_error_if_loans_conflict(old_loan, new_loan); self.report_error_if_loans_conflict(old_loan, new_loan);
} }
@ -211,7 +211,7 @@ impl<'self> CheckLoanCtxt<'self> {
}; };
debug!("illegal_if=%?", illegal_if); debug!("illegal_if=%?", illegal_if);
foreach restr in loan1.restrictions.iter() { for restr in loan1.restrictions.iter() {
if !restr.set.intersects(illegal_if) { loop; } if !restr.set.intersects(illegal_if) { loop; }
if restr.loan_path != loan2.loan_path { loop; } if restr.loan_path != loan2.loan_path { loop; }
@ -652,7 +652,7 @@ fn check_loans_in_fn<'a>(fk: &oldvisit::fn_kind,
closure_id: ast::NodeId, closure_id: ast::NodeId,
span: span) { span: span) {
let cap_vars = this.bccx.capture_map.get(&closure_id); let cap_vars = this.bccx.capture_map.get(&closure_id);
foreach cap_var in cap_vars.iter() { for cap_var in cap_vars.iter() {
let var_id = ast_util::def_id_of_def(cap_var.def).node; let var_id = ast_util::def_id_of_def(cap_var.def).node;
let var_path = @LpVar(var_id); let var_path = @LpVar(var_id);
this.check_if_path_is_moved(closure_id, span, this.check_if_path_is_moved(closure_id, span,
@ -713,7 +713,7 @@ fn check_loans_in_expr<'a>(expr: @ast::expr,
let cmt = this.bccx.cat_expr_unadjusted(expr); let cmt = this.bccx.cat_expr_unadjusted(expr);
debug!("path cmt=%s", cmt.repr(this.tcx())); debug!("path cmt=%s", cmt.repr(this.tcx()));
let r = opt_loan_path(cmt); let r = opt_loan_path(cmt);
foreach &lp in r.iter() { for &lp in r.iter() {
this.check_if_path_is_moved(expr.id, expr.span, MovedInUse, lp); this.check_if_path_is_moved(expr.id, expr.span, MovedInUse, lp);
} }
} }

View File

@ -70,7 +70,7 @@ pub fn gather_captures(bccx: @BorrowckCtxt,
move_data: &mut MoveData, move_data: &mut MoveData,
closure_expr: @ast::expr) { closure_expr: @ast::expr) {
let captured_vars = bccx.capture_map.get(&closure_expr.id); let captured_vars = bccx.capture_map.get(&closure_expr.id);
foreach captured_var in captured_vars.iter() { for captured_var in captured_vars.iter() {
match captured_var.mode { match captured_var.mode {
moves::CapMove => { moves::CapMove => {
let fvar_id = ast_util::def_id_of_def(captured_var.def).node; let fvar_id = ast_util::def_id_of_def(captured_var.def).node;

View File

@ -189,7 +189,7 @@ fn gather_loans_in_expr(ex: @ast::expr,
{ {
let r = ex.get_callee_id(); let r = ex.get_callee_id();
foreach callee_id in r.iter() { for callee_id in r.iter() {
this.id_range.add(*callee_id); this.id_range.add(*callee_id);
} }
} }
@ -197,7 +197,7 @@ fn gather_loans_in_expr(ex: @ast::expr,
// If this expression is borrowed, have to ensure it remains valid: // If this expression is borrowed, have to ensure it remains valid:
{ {
let r = tcx.adjustments.find(&ex.id); let r = tcx.adjustments.find(&ex.id);
foreach &adjustments in r.iter() { for &adjustments in r.iter() {
this.guarantee_adjustments(ex, *adjustments); this.guarantee_adjustments(ex, *adjustments);
} }
} }
@ -240,8 +240,8 @@ fn gather_loans_in_expr(ex: @ast::expr,
ast::expr_match(ex_v, ref arms) => { ast::expr_match(ex_v, ref arms) => {
let cmt = this.bccx.cat_expr(ex_v); let cmt = this.bccx.cat_expr(ex_v);
foreach arm in arms.iter() { for arm in arms.iter() {
foreach pat in arm.pats.iter() { for pat in arm.pats.iter() {
this.gather_pat(cmt, *pat, Some((arm.body.id, ex.id))); this.gather_pat(cmt, *pat, Some((arm.body.id, ex.id)));
} }
} }
@ -619,7 +619,7 @@ impl GatherLoanCtxt {
*/ */
let mc_ctxt = self.bccx.mc_ctxt(); let mc_ctxt = self.bccx.mc_ctxt();
foreach arg in decl.inputs.iter() { for arg in decl.inputs.iter() {
let arg_ty = ty::node_id_to_type(self.tcx(), arg.pat.id); let arg_ty = ty::node_id_to_type(self.tcx(), arg.pat.id);
let arg_cmt = mc_ctxt.cat_rvalue( let arg_cmt = mc_ctxt.cat_rvalue(

View File

@ -139,7 +139,7 @@ impl RestrictionsContext {
// static errors. For example, if there is code like // static errors. For example, if there is code like
// //
// let v = @mut ~[1, 2, 3]; // let v = @mut ~[1, 2, 3];
// foreach e in v.iter() { // for e in v.iter() {
// v.push(e + 1); // v.push(e + 1);
// } // }
// //
@ -151,7 +151,7 @@ impl RestrictionsContext {
// //
// let v = @mut ~[1, 2, 3]; // let v = @mut ~[1, 2, 3];
// let w = v; // let w = v;
// foreach e in v.iter() { // for e in v.iter() {
// w.push(e + 1); // w.push(e + 1);
// } // }
// //
@ -164,7 +164,7 @@ impl RestrictionsContext {
// } // }
// ... // ...
// let v: &V = ...; // let v: &V = ...;
// foreach e in v.get_list().iter() { // for e in v.get_list().iter() {
// v.get_list().push(e + 1); // v.get_list().push(e + 1);
// } // }
match opt_loan_path(cmt_base) { match opt_loan_path(cmt_base) {

View File

@ -139,7 +139,7 @@ fn borrowck_fn(fk: &oldvisit::fn_kind,
LoanDataFlowOperator, LoanDataFlowOperator,
id_range, id_range,
all_loans.len()); all_loans.len());
foreach (loan_idx, loan) in all_loans.iter().enumerate() { for (loan_idx, loan) in all_loans.iter().enumerate() {
loan_dfcx.add_gen(loan.gen_scope, loan_idx); loan_dfcx.add_gen(loan.gen_scope, loan_idx);
loan_dfcx.add_kill(loan.kill_scope, loan_idx); loan_dfcx.add_kill(loan.kill_scope, loan_idx);
} }

View File

@ -372,22 +372,22 @@ impl MoveData {
* killed by scoping. See `doc.rs` for more details. * killed by scoping. See `doc.rs` for more details.
*/ */
foreach (i, move) in self.moves.iter().enumerate() { for (i, move) in self.moves.iter().enumerate() {
dfcx_moves.add_gen(move.id, i); dfcx_moves.add_gen(move.id, i);
} }
foreach (i, assignment) in self.var_assignments.iter().enumerate() { for (i, assignment) in self.var_assignments.iter().enumerate() {
dfcx_assign.add_gen(assignment.id, i); dfcx_assign.add_gen(assignment.id, i);
self.kill_moves(assignment.path, assignment.id, dfcx_moves); self.kill_moves(assignment.path, assignment.id, dfcx_moves);
} }
foreach assignment in self.path_assignments.iter() { for assignment in self.path_assignments.iter() {
self.kill_moves(assignment.path, assignment.id, dfcx_moves); self.kill_moves(assignment.path, assignment.id, dfcx_moves);
} }
// Kill all moves related to a variable `x` when it goes out // Kill all moves related to a variable `x` when it goes out
// of scope: // of scope:
foreach path in self.paths.iter() { for path in self.paths.iter() {
match *path.loan_path { match *path.loan_path {
LpVar(id) => { LpVar(id) => {
let kill_id = tcx.region_maps.encl_scope(id); let kill_id = tcx.region_maps.encl_scope(id);
@ -399,7 +399,7 @@ impl MoveData {
} }
// Kill all assignments when the variable goes out of scope: // Kill all assignments when the variable goes out of scope:
foreach (assignment_index, assignment) in self.var_assignments.iter().enumerate() { for (assignment_index, assignment) in self.var_assignments.iter().enumerate() {
match *self.path(assignment.path).loan_path { match *self.path(assignment.path).loan_path {
LpVar(id) => { LpVar(id) => {
let kill_id = tcx.region_maps.encl_scope(id); let kill_id = tcx.region_maps.encl_scope(id);
@ -559,7 +559,7 @@ impl FlowedMoveData {
ret = false; ret = false;
} }
} else { } else {
foreach &loan_path_index in opt_loan_path_index.iter() { for &loan_path_index in opt_loan_path_index.iter() {
let cont = do self.move_data.each_base_path(moved_path) |p| { let cont = do self.move_data.each_base_path(moved_path) |p| {
if p == loan_path_index { if p == loan_path_index {
// Scenario 3: some extension of `loan_path` // Scenario 3: some extension of `loan_path`

View File

@ -53,7 +53,7 @@ pub fn construct(tcx: ty::ctxt,
impl CFGBuilder { impl CFGBuilder {
fn block(&mut self, blk: &ast::Block, pred: CFGIndex) -> CFGIndex { fn block(&mut self, blk: &ast::Block, pred: CFGIndex) -> CFGIndex {
let mut stmts_exit = pred; let mut stmts_exit = pred;
foreach &stmt in blk.stmts.iter() { for &stmt in blk.stmts.iter() {
stmts_exit = self.stmt(stmt, stmts_exit); stmts_exit = self.stmt(stmt, stmts_exit);
} }
@ -151,7 +151,7 @@ impl CFGBuilder {
self.pat(pats[0], pred) self.pat(pats[0], pred)
} else { } else {
let collect = self.add_dummy_node([]); let collect = self.add_dummy_node([]);
foreach &pat in pats.iter() { for &pat in pats.iter() {
let pat_exit = self.pat(pat, pred); let pat_exit = self.pat(pat, pred);
self.add_contained_edge(pat_exit, collect); self.add_contained_edge(pat_exit, collect);
} }
@ -297,7 +297,7 @@ impl CFGBuilder {
let expr_exit = self.add_node(expr.id, []); let expr_exit = self.add_node(expr.id, []);
let mut guard_exit = discr_exit; let mut guard_exit = discr_exit;
foreach arm in arms.iter() { for arm in arms.iter() {
guard_exit = self.opt_expr(arm.guard, guard_exit); // 2 guard_exit = self.opt_expr(arm.guard, guard_exit); // 2
let pats_exit = self.pats_any(arm.pats, guard_exit); // 3 let pats_exit = self.pats_any(arm.pats, guard_exit); // 3
let body_exit = self.block(&arm.body, pats_exit); // 4 let body_exit = self.block(&arm.body, pats_exit); // 4
@ -460,7 +460,7 @@ impl CFGBuilder {
assert!(!self.exit_map.contains_key(&id)); assert!(!self.exit_map.contains_key(&id));
let node = self.graph.add_node(CFGNodeData {id: id}); let node = self.graph.add_node(CFGNodeData {id: id});
self.exit_map.insert(id, node); self.exit_map.insert(id, node);
foreach &pred in preds.iter() { for &pred in preds.iter() {
self.add_contained_edge(pred, node); self.add_contained_edge(pred, node);
} }
node node
@ -498,7 +498,7 @@ impl CFGBuilder {
Some(_) => { Some(_) => {
match self.tcx.def_map.find(&expr.id) { match self.tcx.def_map.find(&expr.id) {
Some(&ast::def_label(loop_id)) => { Some(&ast::def_label(loop_id)) => {
foreach l in self.loop_scopes.iter() { for l in self.loop_scopes.iter() {
if l.loop_id == loop_id { if l.loop_id == loop_id {
return *l; return *l;
} }

View File

@ -47,8 +47,8 @@ pub fn check_item(sess: Session,
check_item_recursion(sess, ast_map, def_map, it); check_item_recursion(sess, ast_map, def_map, it);
} }
item_enum(ref enum_definition, _) => { item_enum(ref enum_definition, _) => {
foreach var in (*enum_definition).variants.iter() { for var in (*enum_definition).variants.iter() {
foreach ex in var.node.disr_expr.iter() { for ex in var.node.disr_expr.iter() {
(v.visit_expr)(*ex, (true, v)); (v.visit_expr)(*ex, (true, v));
} }
} }

View File

@ -57,7 +57,7 @@ pub fn check_expr(cx: @MatchCheckCtxt,
match ex.node { match ex.node {
expr_match(scrut, ref arms) => { expr_match(scrut, ref arms) => {
// First, check legality of move bindings. // First, check legality of move bindings.
foreach arm in arms.iter() { for arm in arms.iter() {
check_legality_of_move_bindings(cx, check_legality_of_move_bindings(cx,
arm.guard.is_some(), arm.guard.is_some(),
arm.pats); arm.pats);
@ -102,8 +102,8 @@ pub fn check_expr(cx: @MatchCheckCtxt,
// Check for unreachable patterns // Check for unreachable patterns
pub fn check_arms(cx: &MatchCheckCtxt, arms: &[arm]) { pub fn check_arms(cx: &MatchCheckCtxt, arms: &[arm]) {
let mut seen = ~[]; let mut seen = ~[];
foreach arm in arms.iter() { for arm in arms.iter() {
foreach pat in arm.pats.iter() { for pat in arm.pats.iter() {
// Check that we do not match against a static NaN (#6804) // Check that we do not match against a static NaN (#6804)
let pat_matches_nan: &fn(@pat) -> bool = |p| { let pat_matches_nan: &fn(@pat) -> bool = |p| {
@ -246,7 +246,7 @@ pub fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@pat]) -> useful {
} }
} }
ty::ty_enum(eid, _) => { ty::ty_enum(eid, _) => {
foreach va in (*ty::enum_variants(cx.tcx, eid)).iter() { for va in (*ty::enum_variants(cx.tcx, eid)).iter() {
match is_useful_specialized(cx, m, v, variant(va.id), match is_useful_specialized(cx, m, v, variant(va.id),
va.args.len(), left_ty) { va.args.len(), left_ty) {
not_useful => (), not_useful => (),
@ -264,7 +264,7 @@ pub fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@pat]) -> useful {
_ => max_len _ => max_len
} }
}; };
foreach n in iterator::range(0u, max_len + 1) { for n in iterator::range(0u, max_len + 1) {
match is_useful_specialized(cx, m, v, vec(n), n, left_ty) { match is_useful_specialized(cx, m, v, vec(n), n, left_ty) {
not_useful => (), not_useful => (),
ref u => return *u, ref u => return *u,
@ -368,16 +368,16 @@ pub fn missing_ctor(cx: &MatchCheckCtxt,
match ty::get(left_ty).sty { match ty::get(left_ty).sty {
ty::ty_box(_) | ty::ty_uniq(_) | ty::ty_rptr(*) | ty::ty_tup(_) | ty::ty_box(_) | ty::ty_uniq(_) | ty::ty_rptr(*) | ty::ty_tup(_) |
ty::ty_struct(*) => { ty::ty_struct(*) => {
foreach r in m.iter() { for r in m.iter() {
if !is_wild(cx, r[0]) { return None; } if !is_wild(cx, r[0]) { return None; }
} }
return Some(single); return Some(single);
} }
ty::ty_enum(eid, _) => { ty::ty_enum(eid, _) => {
let mut found = ~[]; let mut found = ~[];
foreach r in m.iter() { for r in m.iter() {
let r = pat_ctor_id(cx, r[0]); let r = pat_ctor_id(cx, r[0]);
foreach id in r.iter() { for id in r.iter() {
if !found.contains(id) { if !found.contains(id) {
found.push(*id); found.push(*id);
} }
@ -385,7 +385,7 @@ pub fn missing_ctor(cx: &MatchCheckCtxt,
} }
let variants = ty::enum_variants(cx.tcx, eid); let variants = ty::enum_variants(cx.tcx, eid);
if found.len() != (*variants).len() { if found.len() != (*variants).len() {
foreach v in (*variants).iter() { for v in (*variants).iter() {
if !found.iter().any(|x| x == &(variant(v.id))) { if !found.iter().any(|x| x == &(variant(v.id))) {
return Some(variant(v.id)); return Some(variant(v.id));
} }
@ -397,7 +397,7 @@ pub fn missing_ctor(cx: &MatchCheckCtxt,
ty::ty_bool => { ty::ty_bool => {
let mut true_found = false; let mut true_found = false;
let mut false_found = false; let mut false_found = false;
foreach r in m.iter() { for r in m.iter() {
match pat_ctor_id(cx, r[0]) { match pat_ctor_id(cx, r[0]) {
None => (), None => (),
Some(val(const_bool(true))) => true_found = true, Some(val(const_bool(true))) => true_found = true,
@ -437,7 +437,7 @@ pub fn missing_ctor(cx: &MatchCheckCtxt,
let mut found_slice = false; let mut found_slice = false;
let mut next = 0; let mut next = 0;
let mut missing = None; let mut missing = None;
foreach &(length, slice) in sorted_vec_lens.iter() { for &(length, slice) in sorted_vec_lens.iter() {
if length != next { if length != next {
missing = Some(next); missing = Some(next);
break; break;
@ -783,7 +783,7 @@ pub fn check_fn(cx: &MatchCheckCtxt,
(s, v): ((), (s, v): ((),
oldvisit::vt<()>)) { oldvisit::vt<()>)) {
oldvisit::visit_fn(kind, decl, body, sp, id, (s, v)); oldvisit::visit_fn(kind, decl, body, sp, id, (s, v));
foreach input in decl.inputs.iter() { for input in decl.inputs.iter() {
if is_refutable(cx, input.pat) { if is_refutable(cx, input.pat) {
cx.tcx.sess.span_err(input.pat.span, cx.tcx.sess.span_err(input.pat.span,
"refutable pattern in function argument"); "refutable pattern in function argument");
@ -836,7 +836,7 @@ pub fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
let def_map = tcx.def_map; let def_map = tcx.def_map;
let mut by_ref_span = None; let mut by_ref_span = None;
let mut any_by_move = false; let mut any_by_move = false;
foreach pat in pats.iter() { for pat in pats.iter() {
do pat_bindings(def_map, *pat) |bm, id, span, _path| { do pat_bindings(def_map, *pat) |bm, id, span, _path| {
match bm { match bm {
bind_by_ref(_) => { bind_by_ref(_) => {
@ -873,7 +873,7 @@ pub fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
}; };
if !any_by_move { return; } // pointless micro-optimization if !any_by_move { return; } // pointless micro-optimization
foreach pat in pats.iter() { for pat in pats.iter() {
do walk_pat(*pat) |p| { do walk_pat(*pat) |p| {
if pat_is_binding(def_map, p) { if pat_is_binding(def_map, p) {
match p.node { match p.node {

View File

@ -176,7 +176,7 @@ pub fn lookup_variant_by_id(tcx: ty::ctxt,
variant_def: ast::def_id) variant_def: ast::def_id)
-> Option<@expr> { -> Option<@expr> {
fn variant_expr(variants: &[ast::variant], id: ast::NodeId) -> Option<@expr> { fn variant_expr(variants: &[ast::variant], id: ast::NodeId) -> Option<@expr> {
foreach variant in variants.iter() { for variant in variants.iter() {
if variant.node.id == id { if variant.node.id == id {
return variant.node.disr_expr; return variant.node.disr_expr;
} }

View File

@ -266,10 +266,10 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
f: &fn(uint) -> bool) -> bool { f: &fn(uint) -> bool) -> bool {
//! Helper for iterating over the bits in a bit set. //! Helper for iterating over the bits in a bit set.
foreach (word_index, &word) in words.iter().enumerate() { for (word_index, &word) in words.iter().enumerate() {
if word != 0 { if word != 0 {
let base_index = word_index * uint::bits; let base_index = word_index * uint::bits;
foreach offset in range(0u, uint::bits) { for offset in range(0u, uint::bits) {
let bit = 1 << offset; let bit = 1 << offset;
if (word & bit) != 0 { if (word & bit) != 0 {
// NB: we round up the total number of bits // NB: we round up the total number of bits
@ -391,7 +391,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
self.merge_with_entry_set(blk.id, in_out); self.merge_with_entry_set(blk.id, in_out);
foreach &stmt in blk.stmts.iter() { for &stmt in blk.stmts.iter() {
self.walk_stmt(stmt, in_out, loop_scopes); self.walk_stmt(stmt, in_out, loop_scopes);
} }
@ -512,7 +512,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
loop_kind: ForLoop, loop_kind: ForLoop,
break_bits: reslice(in_out).to_owned() break_bits: reslice(in_out).to_owned()
}); });
foreach input in decl.inputs.iter() { for input in decl.inputs.iter() {
self.walk_pat(input.pat, func_bits, loop_scopes); self.walk_pat(input.pat, func_bits, loop_scopes);
} }
self.walk_block(body, func_bits, loop_scopes); self.walk_block(body, func_bits, loop_scopes);
@ -631,7 +631,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
// together the bits from each arm: // together the bits from each arm:
self.reset(in_out); self.reset(in_out);
foreach arm in arms.iter() { for arm in arms.iter() {
// in_out reflects the discr and all guards to date // in_out reflects the discr and all guards to date
self.walk_opt_expr(arm.guard, guards, loop_scopes); self.walk_opt_expr(arm.guard, guards, loop_scopes);
@ -706,7 +706,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
} }
ast::expr_struct(_, ref fields, with_expr) => { ast::expr_struct(_, ref fields, with_expr) => {
foreach field in fields.iter() { for field in fields.iter() {
self.walk_expr(field.expr, in_out, loop_scopes); self.walk_expr(field.expr, in_out, loop_scopes);
} }
self.walk_opt_expr(with_expr, in_out, loop_scopes); self.walk_opt_expr(with_expr, in_out, loop_scopes);
@ -767,10 +767,10 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
} }
ast::expr_inline_asm(ref inline_asm) => { ast::expr_inline_asm(ref inline_asm) => {
foreach &(_, expr) in inline_asm.inputs.iter() { for &(_, expr) in inline_asm.inputs.iter() {
self.walk_expr(expr, in_out, loop_scopes); self.walk_expr(expr, in_out, loop_scopes);
} }
foreach &(_, expr) in inline_asm.outputs.iter() { for &(_, expr) in inline_asm.outputs.iter() {
self.walk_expr(expr, in_out, loop_scopes); self.walk_expr(expr, in_out, loop_scopes);
} }
} }
@ -838,7 +838,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
exprs: &[@ast::expr], exprs: &[@ast::expr],
in_out: &mut [uint], in_out: &mut [uint],
loop_scopes: &mut ~[LoopScope]) { loop_scopes: &mut ~[LoopScope]) {
foreach &expr in exprs.iter() { for &expr in exprs.iter() {
self.walk_expr(expr, in_out, loop_scopes); self.walk_expr(expr, in_out, loop_scopes);
} }
} }
@ -847,7 +847,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
opt_expr: Option<@ast::expr>, opt_expr: Option<@ast::expr>,
in_out: &mut [uint], in_out: &mut [uint],
loop_scopes: &mut ~[LoopScope]) { loop_scopes: &mut ~[LoopScope]) {
foreach &expr in opt_expr.iter() { for &expr in opt_expr.iter() {
self.walk_expr(expr, in_out, loop_scopes); self.walk_expr(expr, in_out, loop_scopes);
} }
} }
@ -901,7 +901,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
// alternatives, so we must treat this like an N-way select // alternatives, so we must treat this like an N-way select
// statement. // statement.
let initial_state = reslice(in_out).to_owned(); let initial_state = reslice(in_out).to_owned();
foreach &pat in pats.iter() { for &pat in pats.iter() {
let mut temp = initial_state.clone(); let mut temp = initial_state.clone();
self.walk_pat(pat, temp, loop_scopes); self.walk_pat(pat, temp, loop_scopes);
join_bits(&self.dfcx.oper, temp, in_out); join_bits(&self.dfcx.oper, temp, in_out);
@ -949,7 +949,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
fn reset(&mut self, bits: &mut [uint]) { fn reset(&mut self, bits: &mut [uint]) {
let e = if self.dfcx.oper.initial_value() {uint::max_value} else {0}; let e = if self.dfcx.oper.initial_value() {uint::max_value} else {0};
foreach b in bits.mut_iter() { *b = e; } for b in bits.mut_iter() { *b = e; }
} }
fn add_to_entry_set(&mut self, id: ast::NodeId, pred_bits: &[uint]) { fn add_to_entry_set(&mut self, id: ast::NodeId, pred_bits: &[uint]) {
@ -997,9 +997,9 @@ fn bits_to_str(words: &[uint]) -> ~str {
// Note: this is a little endian printout of bytes. // Note: this is a little endian printout of bytes.
foreach &word in words.iter() { for &word in words.iter() {
let mut v = word; let mut v = word;
foreach _ in range(0u, uint::bytes) { for _ in range(0u, uint::bytes) {
result.push_char(sep); result.push_char(sep);
result.push_str(fmt!("%02x", v & 0xFF)); result.push_str(fmt!("%02x", v & 0xFF));
v >>= 8; v >>= 8;
@ -1026,7 +1026,7 @@ fn bitwise(out_vec: &mut [uint],
op: &fn(uint, uint) -> uint) -> bool { op: &fn(uint, uint) -> uint) -> bool {
assert_eq!(out_vec.len(), in_vec.len()); assert_eq!(out_vec.len(), in_vec.len());
let mut changed = false; let mut changed = false;
foreach i in range(0u, out_vec.len()) { for i in range(0u, out_vec.len()) {
let old_val = out_vec[i]; let old_val = out_vec[i];
let new_val = op(old_val, in_vec[i]); let new_val = op(old_val, in_vec[i]);
out_vec[i] = new_val; out_vec[i] = new_val;

View File

@ -138,7 +138,7 @@ fn configure_main(ctxt: @mut EntryContext) {
but you have one or more functions named 'main' that are not \ but you have one or more functions named 'main' that are not \
defined at the crate level. Either move the definition or \ defined at the crate level. Either move the definition or \
attach the `#[main]` attribute to override this behavior."); attach the `#[main]` attribute to override this behavior.");
foreach &(_, span) in this.non_main_fns.iter() { for &(_, span) in this.non_main_fns.iter() {
this.session.span_note(span, "here is a function named 'main'"); this.session.span_note(span, "here is a function named 'main'");
} }
} }

View File

@ -247,7 +247,7 @@ impl<N,E> Graph<N,E> {
while changed { while changed {
changed = false; changed = false;
iteration += 1; iteration += 1;
foreach (i, edge) in self.edges.iter().enumerate() { for (i, edge) in self.edges.iter().enumerate() {
changed |= op(iteration, EdgeIndex(i), edge); changed |= op(iteration, EdgeIndex(i), edge);
} }
} }

View File

@ -238,7 +238,7 @@ fn check_fn(
// Check kinds on free variables: // Check kinds on free variables:
do with_appropriate_checker(cx, fn_id) |chk| { do with_appropriate_checker(cx, fn_id) |chk| {
let r = freevars::get_freevars(cx.tcx, fn_id); let r = freevars::get_freevars(cx.tcx, fn_id);
foreach fv in r.iter() { for fv in r.iter() {
chk(cx, *fv); chk(cx, *fv);
} }
} }
@ -256,7 +256,7 @@ pub fn check_expr(e: @expr, (cx, v): (Context, oldvisit::vt<Context>)) {
}; };
{ {
let r = cx.tcx.node_type_substs.find(&type_parameter_id); let r = cx.tcx.node_type_substs.find(&type_parameter_id);
foreach ts in r.iter() { for ts in r.iter() {
let type_param_defs = match e.node { let type_param_defs = match e.node {
expr_path(_) => { expr_path(_) => {
let did = ast_util::def_id_of_def(cx.tcx.def_map.get_copy(&e.id)); let did = ast_util::def_id_of_def(cx.tcx.def_map.get_copy(&e.id));
@ -280,7 +280,7 @@ pub fn check_expr(e: @expr, (cx, v): (Context, oldvisit::vt<Context>)) {
ts.repr(cx.tcx), ts.repr(cx.tcx),
type_param_defs.repr(cx.tcx)); type_param_defs.repr(cx.tcx));
} }
foreach (&ty, type_param_def) in ts.iter().zip(type_param_defs.iter()) { for (&ty, type_param_def) in ts.iter().zip(type_param_defs.iter()) {
check_typaram_bounds(cx, type_parameter_id, e.span, ty, type_param_def) check_typaram_bounds(cx, type_parameter_id, e.span, ty, type_param_def)
} }
} }
@ -318,11 +318,11 @@ fn check_ty(aty: &Ty, (cx, v): (Context, oldvisit::vt<Context>)) {
match aty.node { match aty.node {
ty_path(_, _, id) => { ty_path(_, _, id) => {
let r = cx.tcx.node_type_substs.find(&id); let r = cx.tcx.node_type_substs.find(&id);
foreach ts in r.iter() { for ts in r.iter() {
let did = ast_util::def_id_of_def(cx.tcx.def_map.get_copy(&id)); let did = ast_util::def_id_of_def(cx.tcx.def_map.get_copy(&id));
let type_param_defs = let type_param_defs =
ty::lookup_item_type(cx.tcx, did).generics.type_param_defs; ty::lookup_item_type(cx.tcx, did).generics.type_param_defs;
foreach (&ty, type_param_def) in ts.iter().zip(type_param_defs.iter()) { for (&ty, type_param_def) in ts.iter().zip(type_param_defs.iter()) {
check_typaram_bounds(cx, aty.id, aty.span, ty, type_param_def) check_typaram_bounds(cx, aty.id, aty.span, ty, type_param_def)
} }
} }

View File

@ -412,7 +412,7 @@ impl<'self> LanguageItemCollector<'self> {
let this: *mut LanguageItemCollector = &mut *self; let this: *mut LanguageItemCollector = &mut *self;
visit_crate(self.crate, ((), mk_simple_visitor(@SimpleVisitor { visit_crate(self.crate, ((), mk_simple_visitor(@SimpleVisitor {
visit_item: |item| { visit_item: |item| {
foreach attribute in item.attrs.iter() { for attribute in item.attrs.iter() {
unsafe { unsafe {
(*this).match_and_collect_meta_item( (*this).match_and_collect_meta_item(
local_def(item.id), local_def(item.id),

View File

@ -286,7 +286,7 @@ static lint_table: &'static [(&'static str, LintSpec)] = &[
*/ */
pub fn get_lint_dict() -> LintDict { pub fn get_lint_dict() -> LintDict {
let mut map = HashMap::new(); let mut map = HashMap::new();
foreach &(k, v) in lint_table.iter() { for &(k, v) in lint_table.iter() {
map.insert(k, v); map.insert(k, v);
} }
return map; return map;
@ -354,7 +354,7 @@ impl Context {
} }
fn lint_to_str(&self, lint: lint) -> &'static str { fn lint_to_str(&self, lint: lint) -> &'static str {
foreach (k, v) in self.dict.iter() { for (k, v) in self.dict.iter() {
if v.lint == lint { if v.lint == lint {
return *k; return *k;
} }
@ -390,7 +390,7 @@ impl Context {
allow => fail!(), allow => fail!(),
} }
foreach &span in note.iter() { for &span in note.iter() {
self.tcx.sess.span_note(span, "lint level defined here"); self.tcx.sess.span_note(span, "lint level defined here");
} }
} }
@ -474,7 +474,7 @@ impl Context {
// pair instead of just one visitor. // pair instead of just one visitor.
match n { match n {
Item(it) => { Item(it) => {
foreach visitor in self.visitors.iter() { for visitor in self.visitors.iter() {
match *visitor { match *visitor {
OldVisitor(orig, stopping) => { OldVisitor(orig, stopping) => {
(orig.visit_item)(it, (self, stopping)); (orig.visit_item)(it, (self, stopping));
@ -486,7 +486,7 @@ impl Context {
} }
} }
Crate(c) => { Crate(c) => {
foreach visitor in self.visitors.iter() { for visitor in self.visitors.iter() {
match *visitor { match *visitor {
OldVisitor(_, stopping) => { OldVisitor(_, stopping) => {
oldvisit::visit_crate(c, (self, stopping)) oldvisit::visit_crate(c, (self, stopping))
@ -501,7 +501,7 @@ impl Context {
// item_stopping_visitor has overridden visit_fn(&fk_method(... )) // item_stopping_visitor has overridden visit_fn(&fk_method(... ))
// to be a no-op, so manually invoke visit_fn. // to be a no-op, so manually invoke visit_fn.
Method(m) => { Method(m) => {
foreach visitor in self.visitors.iter() { for visitor in self.visitors.iter() {
match *visitor { match *visitor {
OldVisitor(orig, stopping) => { OldVisitor(orig, stopping) => {
let fk = oldvisit::fk_method(m.ident, let fk = oldvisit::fk_method(m.ident,
@ -536,9 +536,9 @@ pub fn each_lint(sess: session::Session,
attrs: &[ast::Attribute], attrs: &[ast::Attribute],
f: &fn(@ast::MetaItem, level, @str) -> bool) -> bool { f: &fn(@ast::MetaItem, level, @str) -> bool) -> bool {
let xs = [allow, warn, deny, forbid]; let xs = [allow, warn, deny, forbid];
foreach &level in xs.iter() { for &level in xs.iter() {
let level_name = level_to_str(level); let level_name = level_to_str(level);
foreach attr in attrs.iter().filter(|m| level_name == m.name()) { for attr in attrs.iter().filter(|m| level_name == m.name()) {
let meta = attr.node.value; let meta = attr.node.value;
let metas = match meta.node { let metas = match meta.node {
ast::MetaList(_, ref metas) => metas, ast::MetaList(_, ref metas) => metas,
@ -547,7 +547,7 @@ pub fn each_lint(sess: session::Session,
loop; loop;
} }
}; };
foreach meta in metas.iter() { for meta in metas.iter() {
match meta.node { match meta.node {
ast::MetaWord(lintname) => { ast::MetaWord(lintname) => {
if !f(*meta, level, lintname) { if !f(*meta, level, lintname) {
@ -751,7 +751,7 @@ fn check_item_ctypes(cx: &Context, it: &ast::item) {
} }
fn check_foreign_fn(cx: &Context, decl: &ast::fn_decl) { fn check_foreign_fn(cx: &Context, decl: &ast::fn_decl) {
foreach input in decl.inputs.iter() { for input in decl.inputs.iter() {
check_ty(cx, &input.ty); check_ty(cx, &input.ty);
} }
check_ty(cx, &decl.output) check_ty(cx, &decl.output)
@ -759,7 +759,7 @@ fn check_item_ctypes(cx: &Context, it: &ast::item) {
match it.node { match it.node {
ast::item_foreign_mod(ref nmod) if !nmod.abis.is_intrinsic() => { ast::item_foreign_mod(ref nmod) if !nmod.abis.is_intrinsic() => {
foreach ni in nmod.items.iter() { for ni in nmod.items.iter() {
match ni.node { match ni.node {
ast::foreign_item_fn(ref decl, _) => { ast::foreign_item_fn(ref decl, _) => {
check_foreign_fn(cx, decl); check_foreign_fn(cx, decl);
@ -801,7 +801,7 @@ fn check_type_for_lint(cx: &Context, lint: lint, span: span, ty: ty::t) {
fn check_type(cx: &Context, span: span, ty: ty::t) { fn check_type(cx: &Context, span: span, ty: ty::t) {
let xs = [managed_heap_memory, owned_heap_memory, heap_memory]; let xs = [managed_heap_memory, owned_heap_memory, heap_memory];
foreach lint in xs.iter() { for lint in xs.iter() {
check_type_for_lint(cx, *lint, span, ty); check_type_for_lint(cx, *lint, span, ty);
} }
} }
@ -820,7 +820,7 @@ fn check_item_heap(cx: &Context, it: &ast::item) {
// If it's a struct, we also have to check the fields' types // If it's a struct, we also have to check the fields' types
match it.node { match it.node {
ast::item_struct(struct_def, _) => { ast::item_struct(struct_def, _) => {
foreach struct_field in struct_def.fields.iter() { for struct_field in struct_def.fields.iter() {
check_type(cx, struct_field.span, check_type(cx, struct_field.span,
ty::node_id_to_type(cx.tcx, ty::node_id_to_type(cx.tcx,
struct_field.node.id)); struct_field.node.id));
@ -892,7 +892,7 @@ fn check_item_non_camel_case_types(cx: &Context, it: &ast::item) {
} }
ast::item_enum(ref enum_definition, _) => { ast::item_enum(ref enum_definition, _) => {
check_case(cx, "type", it.ident, it.span); check_case(cx, "type", it.ident, it.span);
foreach variant in enum_definition.variants.iter() { for variant in enum_definition.variants.iter() {
check_case(cx, "variant", variant.node.name, variant.span); check_case(cx, "variant", variant.node.name, variant.span);
} }
} }
@ -955,7 +955,7 @@ fn lint_unused_mut() -> oldvisit::vt<@mut Context> {
} }
fn visit_fn_decl(cx: &Context, fd: &ast::fn_decl) { fn visit_fn_decl(cx: &Context, fd: &ast::fn_decl) {
foreach arg in fd.inputs.iter() { for arg in fd.inputs.iter() {
if arg.is_mutbl { if arg.is_mutbl {
check_pat(cx, arg.pat); check_pat(cx, arg.pat);
} }
@ -994,7 +994,7 @@ fn lint_session(cx: @mut Context) -> @visit::Visitor<()> {
match cx.tcx.sess.lints.pop(&id) { match cx.tcx.sess.lints.pop(&id) {
None => {}, None => {},
Some(l) => { Some(l) => {
foreach (lint, span, msg) in l.consume_iter() { for (lint, span, msg) in l.consume_iter() {
cx.span_lint(lint, span, msg) cx.span_lint(lint, span, msg)
} }
} }
@ -1094,7 +1094,7 @@ fn lint_missing_doc() -> oldvisit::vt<@mut Context> {
ast::item_struct(sdef, _) if it.vis == ast::public => { ast::item_struct(sdef, _) if it.vis == ast::public => {
check_attrs(cx, it.attrs, it.span, check_attrs(cx, it.attrs, it.span,
"missing documentation for a struct"); "missing documentation for a struct");
foreach field in sdef.fields.iter() { for field in sdef.fields.iter() {
match field.node.kind { match field.node.kind {
ast::named_field(_, vis) if vis != ast::private => { ast::named_field(_, vis) if vis != ast::private => {
check_attrs(cx, field.node.attrs, field.span, check_attrs(cx, field.node.attrs, field.span,
@ -1137,12 +1137,12 @@ pub fn check_crate(tcx: ty::ctxt, crate: @ast::Crate) {
}; };
// Install defaults. // Install defaults.
foreach (_, spec) in cx.dict.iter() { for (_, spec) in cx.dict.iter() {
cx.set_level(spec.lint, spec.default, Default); cx.set_level(spec.lint, spec.default, Default);
} }
// Install command-line options, overriding defaults. // Install command-line options, overriding defaults.
foreach &(lint, level) in tcx.sess.opts.lint_opts.iter() { for &(lint, level) in tcx.sess.opts.lint_opts.iter() {
cx.set_level(lint, level, CommandLine); cx.set_level(lint, level, CommandLine);
} }
@ -1211,8 +1211,8 @@ pub fn check_crate(tcx: ty::ctxt, crate: @ast::Crate) {
// If we missed any lints added to the session, then there's a bug somewhere // If we missed any lints added to the session, then there's a bug somewhere
// in the iteration code. // in the iteration code.
foreach (id, v) in tcx.sess.lints.iter() { for (id, v) in tcx.sess.lints.iter() {
foreach t in v.iter() { for t in v.iter() {
match *t { match *t {
(lint, span, ref msg) => (lint, span, ref msg) =>
tcx.sess.span_bug(span, fmt!("unprocessed lint %? at %s: \ tcx.sess.span_bug(span, fmt!("unprocessed lint %? at %s: \

View File

@ -360,7 +360,7 @@ fn visit_fn(fk: &oldvisit::fn_kind,
debug!("creating fn_maps: %x", transmute(&*fn_maps)); debug!("creating fn_maps: %x", transmute(&*fn_maps));
} }
foreach arg in decl.inputs.iter() { for arg in decl.inputs.iter() {
do pat_util::pat_bindings(this.tcx.def_map, arg.pat) do pat_util::pat_bindings(this.tcx.def_map, arg.pat)
|_bm, arg_id, _x, path| { |_bm, arg_id, _x, path| {
debug!("adding argument %d", arg_id); debug!("adding argument %d", arg_id);
@ -436,7 +436,7 @@ fn visit_local(local: @Local, (this, vt): (@mut IrMaps, vt<@mut IrMaps>)) {
fn visit_arm(arm: &arm, (this, vt): (@mut IrMaps, vt<@mut IrMaps>)) { fn visit_arm(arm: &arm, (this, vt): (@mut IrMaps, vt<@mut IrMaps>)) {
let def_map = this.tcx.def_map; let def_map = this.tcx.def_map;
foreach pat in arm.pats.iter() { for pat in arm.pats.iter() {
do pat_util::pat_bindings(def_map, *pat) |bm, p_id, sp, path| { do pat_util::pat_bindings(def_map, *pat) |bm, p_id, sp, path| {
debug!("adding local variable %d from match with bm %?", debug!("adding local variable %d from match with bm %?",
p_id, bm); p_id, bm);
@ -475,7 +475,7 @@ fn visit_expr(expr: @expr, (this, vt): (@mut IrMaps, vt<@mut IrMaps>)) {
// construction site. // construction site.
let cvs = this.capture_map.get(&expr.id); let cvs = this.capture_map.get(&expr.id);
let mut call_caps = ~[]; let mut call_caps = ~[];
foreach cv in cvs.iter() { for cv in cvs.iter() {
match moves::moved_variable_node_id_from_def(cv.def) { match moves::moved_variable_node_id_from_def(cv.def) {
Some(rv) => { Some(rv) => {
let cv_ln = this.add_live_node(FreeVarNode(cv.span)); let cv_ln = this.add_live_node(FreeVarNode(cv.span));
@ -710,7 +710,7 @@ impl Liveness {
pub fn indices(&self, ln: LiveNode, op: &fn(uint)) { pub fn indices(&self, ln: LiveNode, op: &fn(uint)) {
let node_base_idx = self.idx(ln, Variable(0)); let node_base_idx = self.idx(ln, Variable(0));
foreach var_idx in range(0u, self.ir.num_vars) { for var_idx in range(0u, self.ir.num_vars) {
op(node_base_idx + var_idx) op(node_base_idx + var_idx)
} }
} }
@ -721,7 +721,7 @@ impl Liveness {
op: &fn(uint, uint)) { op: &fn(uint, uint)) {
let node_base_idx = self.idx(ln, Variable(0u)); let node_base_idx = self.idx(ln, Variable(0u));
let succ_base_idx = self.idx(succ_ln, Variable(0u)); let succ_base_idx = self.idx(succ_ln, Variable(0u));
foreach var_idx in range(0u, self.ir.num_vars) { for var_idx in range(0u, self.ir.num_vars) {
op(node_base_idx + var_idx, succ_base_idx + var_idx); op(node_base_idx + var_idx, succ_base_idx + var_idx);
} }
} }
@ -731,7 +731,7 @@ impl Liveness {
ln: LiveNode, ln: LiveNode,
test: &fn(uint) -> LiveNode) { test: &fn(uint) -> LiveNode) {
let node_base_idx = self.idx(ln, Variable(0)); let node_base_idx = self.idx(ln, Variable(0));
foreach var_idx in range(0u, self.ir.num_vars) { for var_idx in range(0u, self.ir.num_vars) {
let idx = node_base_idx + var_idx; let idx = node_base_idx + var_idx;
if test(idx).is_valid() { if test(idx).is_valid() {
wr.write_str(" "); wr.write_str(" ");
@ -900,7 +900,7 @@ impl Liveness {
// hack to skip the loop unless debug! is enabled: // hack to skip the loop unless debug! is enabled:
debug!("^^ liveness computation results for body %d (entry=%s)", debug!("^^ liveness computation results for body %d (entry=%s)",
{ {
foreach ln_idx in range(0u, self.ir.num_live_nodes) { for ln_idx in range(0u, self.ir.num_live_nodes) {
debug!("%s", self.ln_str(LiveNode(ln_idx))); debug!("%s", self.ln_str(LiveNode(ln_idx)));
} }
body.id body.id
@ -1084,7 +1084,7 @@ impl Liveness {
let ln = self.live_node(expr.id, expr.span); let ln = self.live_node(expr.id, expr.span);
self.init_empty(ln, succ); self.init_empty(ln, succ);
let mut first_merge = true; let mut first_merge = true;
foreach arm in arms.iter() { for arm in arms.iter() {
let body_succ = let body_succ =
self.propagate_through_block(&arm.body, succ); self.propagate_through_block(&arm.body, succ);
let guard_succ = let guard_succ =
@ -1461,12 +1461,12 @@ fn check_expr(expr: @expr, (this, vt): (@Liveness, vt<@Liveness>)) {
} }
expr_inline_asm(ref ia) => { expr_inline_asm(ref ia) => {
foreach &(_, input) in ia.inputs.iter() { for &(_, input) in ia.inputs.iter() {
(vt.visit_expr)(input, (this, vt)); (vt.visit_expr)(input, (this, vt));
} }
// Output operands must be lvalues // Output operands must be lvalues
foreach &(_, out) in ia.outputs.iter() { for &(_, out) in ia.outputs.iter() {
match out.node { match out.node {
expr_addr_of(_, inner) => { expr_addr_of(_, inner) => {
this.check_lvalue(inner, vt); this.check_lvalue(inner, vt);
@ -1606,7 +1606,7 @@ impl Liveness {
} }
pub fn warn_about_unused_args(&self, decl: &fn_decl, entry_ln: LiveNode) { pub fn warn_about_unused_args(&self, decl: &fn_decl, entry_ln: LiveNode) {
foreach arg in decl.inputs.iter() { for arg in decl.inputs.iter() {
do pat_util::pat_bindings(self.tcx.def_map, arg.pat) do pat_util::pat_bindings(self.tcx.def_map, arg.pat)
|_bm, p_id, sp, _n| { |_bm, p_id, sp, _n| {
let var = self.variable(p_id, sp); let var = self.variable(p_id, sp);
@ -1631,7 +1631,7 @@ impl Liveness {
-> bool { -> bool {
if !self.used_on_entry(ln, var) { if !self.used_on_entry(ln, var) {
let r = self.should_warn(var); let r = self.should_warn(var);
foreach name in r.iter() { for name in r.iter() {
// annoying: for parameters in funcs like `fn(x: int) // annoying: for parameters in funcs like `fn(x: int)
// {ret}`, there is only one node, so asking about // {ret}`, there is only one node, so asking about
@ -1664,7 +1664,7 @@ impl Liveness {
var: Variable) { var: Variable) {
if self.live_on_exit(ln, var).is_none() { if self.live_on_exit(ln, var).is_none() {
let r = self.should_warn(var); let r = self.should_warn(var);
foreach name in r.iter() { for name in r.iter() {
self.tcx.sess.add_lint(dead_assignment, id, sp, self.tcx.sess.add_lint(dead_assignment, id, sp,
fmt!("value assigned to `%s` is never read", *name)); fmt!("value assigned to `%s` is never read", *name));
} }

View File

@ -375,7 +375,7 @@ impl mem_categorization_ctxt {
pub fn cat_expr_autoderefd(&self, expr: @ast::expr, autoderefs: uint) pub fn cat_expr_autoderefd(&self, expr: @ast::expr, autoderefs: uint)
-> cmt { -> cmt {
let mut cmt = self.cat_expr_unadjusted(expr); let mut cmt = self.cat_expr_unadjusted(expr);
foreach deref in range(1u, autoderefs + 1) { for deref in range(1u, autoderefs + 1) {
cmt = self.cat_deref(expr, cmt, deref); cmt = self.cat_deref(expr, cmt, deref);
} }
return cmt; return cmt;
@ -906,7 +906,7 @@ impl mem_categorization_ctxt {
} }
}; };
foreach (i, &subpat) in subpats.iter().enumerate() { for (i, &subpat) in subpats.iter().enumerate() {
let subpat_ty = self.pat_ty(subpat); // see (*) let subpat_ty = self.pat_ty(subpat); // see (*)
let subcmt = let subcmt =
@ -919,7 +919,7 @@ impl mem_categorization_ctxt {
} }
Some(&ast::def_fn(*)) | Some(&ast::def_fn(*)) |
Some(&ast::def_struct(*)) => { Some(&ast::def_struct(*)) => {
foreach (i, &subpat) in subpats.iter().enumerate() { for (i, &subpat) in subpats.iter().enumerate() {
let subpat_ty = self.pat_ty(subpat); // see (*) let subpat_ty = self.pat_ty(subpat); // see (*)
let cmt_field = let cmt_field =
self.cat_imm_interior( self.cat_imm_interior(
@ -929,7 +929,7 @@ impl mem_categorization_ctxt {
} }
} }
Some(&ast::def_static(*)) => { Some(&ast::def_static(*)) => {
foreach &subpat in subpats.iter() { for &subpat in subpats.iter() {
self.cat_pattern(cmt, subpat, |x,y| op(x,y)); self.cat_pattern(cmt, subpat, |x,y| op(x,y));
} }
} }
@ -951,7 +951,7 @@ impl mem_categorization_ctxt {
ast::pat_struct(_, ref field_pats, _) => { ast::pat_struct(_, ref field_pats, _) => {
// {f1: p1, ..., fN: pN} // {f1: p1, ..., fN: pN}
foreach fp in field_pats.iter() { for fp in field_pats.iter() {
let field_ty = self.pat_ty(fp.pat); // see (*) let field_ty = self.pat_ty(fp.pat); // see (*)
let cmt_field = self.cat_field(pat, cmt, fp.ident, field_ty); let cmt_field = self.cat_field(pat, cmt, fp.ident, field_ty);
self.cat_pattern(cmt_field, fp.pat, |x,y| op(x,y)); self.cat_pattern(cmt_field, fp.pat, |x,y| op(x,y));
@ -960,7 +960,7 @@ impl mem_categorization_ctxt {
ast::pat_tup(ref subpats) => { ast::pat_tup(ref subpats) => {
// (p1, ..., pN) // (p1, ..., pN)
foreach (i, &subpat) in subpats.iter().enumerate() { for (i, &subpat) in subpats.iter().enumerate() {
let subpat_ty = self.pat_ty(subpat); // see (*) let subpat_ty = self.pat_ty(subpat); // see (*)
let subcmt = let subcmt =
self.cat_imm_interior( self.cat_imm_interior(
@ -979,15 +979,15 @@ impl mem_categorization_ctxt {
ast::pat_vec(ref before, slice, ref after) => { ast::pat_vec(ref before, slice, ref after) => {
let elt_cmt = self.cat_index(pat, cmt, 0); let elt_cmt = self.cat_index(pat, cmt, 0);
foreach &before_pat in before.iter() { for &before_pat in before.iter() {
self.cat_pattern(elt_cmt, before_pat, |x,y| op(x,y)); self.cat_pattern(elt_cmt, before_pat, |x,y| op(x,y));
} }
foreach &slice_pat in slice.iter() { for &slice_pat in slice.iter() {
let slice_ty = self.pat_ty(slice_pat); let slice_ty = self.pat_ty(slice_pat);
let slice_cmt = self.cat_rvalue_node(pat, slice_ty); let slice_cmt = self.cat_rvalue_node(pat, slice_ty);
self.cat_pattern(slice_cmt, slice_pat, |x,y| op(x,y)); self.cat_pattern(slice_cmt, slice_pat, |x,y| op(x,y));
} }
foreach &after_pat in after.iter() { for &after_pat in after.iter() {
self.cat_pattern(elt_cmt, after_pat, |x,y| op(x,y)); self.cat_pattern(elt_cmt, after_pat, |x,y| op(x,y));
} }
} }
@ -1076,7 +1076,7 @@ pub fn field_mutbl(tcx: ty::ctxt,
match ty::get(base_ty).sty { match ty::get(base_ty).sty {
ty::ty_struct(did, _) => { ty::ty_struct(did, _) => {
let r = ty::lookup_struct_fields(tcx, did); let r = ty::lookup_struct_fields(tcx, did);
foreach fld in r.iter() { for fld in r.iter() {
if fld.ident == f_name { if fld.ident == f_name {
return Some(ast::m_imm); return Some(ast::m_imm);
} }
@ -1086,7 +1086,7 @@ pub fn field_mutbl(tcx: ty::ctxt,
match tcx.def_map.get_copy(&node_id) { match tcx.def_map.get_copy(&node_id) {
ast::def_variant(_, variant_id) => { ast::def_variant(_, variant_id) => {
let r = ty::lookup_struct_fields(tcx, variant_id); let r = ty::lookup_struct_fields(tcx, variant_id);
foreach fld in r.iter() { for fld in r.iter() {
if fld.ident == f_name { if fld.ident == f_name {
return Some(ast::m_imm); return Some(ast::m_imm);
} }

View File

@ -231,7 +231,7 @@ fn compute_modes_for_local<'a>(local: @Local,
(cx, v): (VisitContext, (cx, v): (VisitContext,
vt<VisitContext>)) { vt<VisitContext>)) {
cx.use_pat(local.pat); cx.use_pat(local.pat);
foreach &init in local.init.iter() { for &init in local.init.iter() {
cx.use_expr(init, Read, v); cx.use_expr(init, Read, v);
} }
} }
@ -243,7 +243,7 @@ fn compute_modes_for_fn(fk: &oldvisit::fn_kind,
id: NodeId, id: NodeId,
(cx, v): (VisitContext, (cx, v): (VisitContext,
vt<VisitContext>)) { vt<VisitContext>)) {
foreach a in decl.inputs.iter() { for a in decl.inputs.iter() {
cx.use_pat(a.pat); cx.use_pat(a.pat);
} }
oldvisit::visit_fn(fk, decl, body, span, id, (cx, v)); oldvisit::visit_fn(fk, decl, body, span, id, (cx, v));
@ -258,7 +258,7 @@ fn compute_modes_for_expr(expr: @expr,
impl VisitContext { impl VisitContext {
pub fn consume_exprs(&self, exprs: &[@expr], visitor: vt<VisitContext>) { pub fn consume_exprs(&self, exprs: &[@expr], visitor: vt<VisitContext>) {
foreach expr in exprs.iter() { for expr in exprs.iter() {
self.consume_expr(*expr, visitor); self.consume_expr(*expr, visitor);
} }
} }
@ -289,11 +289,11 @@ impl VisitContext {
debug!("consume_block(blk.id=%?)", blk.id); debug!("consume_block(blk.id=%?)", blk.id);
foreach stmt in blk.stmts.iter() { for stmt in blk.stmts.iter() {
(visitor.visit_stmt)(*stmt, (*self, visitor)); (visitor.visit_stmt)(*stmt, (*self, visitor));
} }
foreach tail_expr in blk.expr.iter() { for tail_expr in blk.expr.iter() {
self.consume_expr(*tail_expr, visitor); self.consume_expr(*tail_expr, visitor);
} }
} }
@ -329,7 +329,7 @@ impl VisitContext {
Move => { Move => {
let def = self.tcx.def_map.get_copy(&expr.id); let def = self.tcx.def_map.get_copy(&expr.id);
let r = moved_variable_node_id_from_def(def); let r = moved_variable_node_id_from_def(def);
foreach &id in r.iter() { for &id in r.iter() {
self.move_maps.moved_variables_set.insert(id); self.move_maps.moved_variables_set.insert(id);
} }
} }
@ -393,11 +393,11 @@ impl VisitContext {
} }
expr_struct(_, ref fields, opt_with) => { expr_struct(_, ref fields, opt_with) => {
foreach field in fields.iter() { for field in fields.iter() {
self.consume_expr(field.expr, visitor); self.consume_expr(field.expr, visitor);
} }
foreach with_expr in opt_with.iter() { for with_expr in opt_with.iter() {
// If there are any fields whose type is move-by-default, // If there are any fields whose type is move-by-default,
// then `with` is consumed, otherwise it is only read // then `with` is consumed, otherwise it is only read
let with_ty = ty::expr_ty(self.tcx, *with_expr); let with_ty = ty::expr_ty(self.tcx, *with_expr);
@ -436,7 +436,7 @@ impl VisitContext {
expr_if(cond_expr, ref then_blk, opt_else_expr) => { expr_if(cond_expr, ref then_blk, opt_else_expr) => {
self.consume_expr(cond_expr, visitor); self.consume_expr(cond_expr, visitor);
self.consume_block(then_blk, visitor); self.consume_block(then_blk, visitor);
foreach else_expr in opt_else_expr.iter() { for else_expr in opt_else_expr.iter() {
self.consume_expr(*else_expr, visitor); self.consume_expr(*else_expr, visitor);
} }
} }
@ -444,7 +444,7 @@ impl VisitContext {
expr_match(discr, ref arms) => { expr_match(discr, ref arms) => {
// We must do this first so that `arms_have_by_move_bindings` // We must do this first so that `arms_have_by_move_bindings`
// below knows which bindings are moves. // below knows which bindings are moves.
foreach arm in arms.iter() { for arm in arms.iter() {
self.consume_arm(arm, visitor); self.consume_arm(arm, visitor);
} }
@ -511,7 +511,7 @@ impl VisitContext {
} }
expr_ret(ref opt_expr) => { expr_ret(ref opt_expr) => {
foreach expr in opt_expr.iter() { for expr in opt_expr.iter() {
self.consume_expr(*expr, visitor); self.consume_expr(*expr, visitor);
} }
} }
@ -547,7 +547,7 @@ impl VisitContext {
} }
expr_fn_block(ref decl, ref body) => { expr_fn_block(ref decl, ref body) => {
foreach a in decl.inputs.iter() { for a in decl.inputs.iter() {
self.use_pat(a.pat); self.use_pat(a.pat);
} }
let cap_vars = self.compute_captures(expr.id); let cap_vars = self.compute_captures(expr.id);
@ -581,7 +581,7 @@ impl VisitContext {
// for overloaded operatrs, we are always passing in a // for overloaded operatrs, we are always passing in a
// borrowed pointer, so it's always read mode: // borrowed pointer, so it's always read mode:
foreach arg_expr in arg_exprs.iter() { for arg_expr in arg_exprs.iter() {
self.use_expr(*arg_expr, Read, visitor); self.use_expr(*arg_expr, Read, visitor);
} }
@ -589,11 +589,11 @@ impl VisitContext {
} }
pub fn consume_arm(&self, arm: &arm, visitor: vt<VisitContext>) { pub fn consume_arm(&self, arm: &arm, visitor: vt<VisitContext>) {
foreach pat in arm.pats.iter() { for pat in arm.pats.iter() {
self.use_pat(*pat); self.use_pat(*pat);
} }
foreach guard in arm.guard.iter() { for guard in arm.guard.iter() {
self.consume_expr(*guard, visitor); self.consume_expr(*guard, visitor);
} }
@ -640,7 +640,7 @@ impl VisitContext {
arg_exprs: &[@expr], arg_exprs: &[@expr],
visitor: vt<VisitContext>) { visitor: vt<VisitContext>) {
//! Uses the argument expressions. //! Uses the argument expressions.
foreach arg_expr in arg_exprs.iter() { for arg_expr in arg_exprs.iter() {
self.use_fn_arg(*arg_expr, visitor); self.use_fn_arg(*arg_expr, visitor);
} }
} }
@ -655,8 +655,8 @@ impl VisitContext {
arms: &[arm]) arms: &[arm])
-> Option<@pat> { -> Option<@pat> {
let mut ret = None; let mut ret = None;
foreach arm in arms.iter() { for arm in arms.iter() {
foreach &pat in arm.pats.iter() { for &pat in arm.pats.iter() {
let cont = do ast_util::walk_pat(pat) |p| { let cont = do ast_util::walk_pat(pat) |p| {
if moves_map.contains(&p.id) { if moves_map.contains(&p.id) {
ret = Some(p); ret = Some(p);

View File

@ -50,7 +50,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
*count += 1; *count += 1;
} }
item_impl(_, _, _, ref methods) => { item_impl(_, _, _, ref methods) => {
foreach method in methods.iter() { for method in methods.iter() {
privileged_items.push(method.id); privileged_items.push(method.id);
*count += 1; *count += 1;
} }
@ -58,7 +58,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
*count += 1; *count += 1;
} }
item_foreign_mod(ref foreign_mod) => { item_foreign_mod(ref foreign_mod) => {
foreach foreign_item in foreign_mod.items.iter() { for foreign_item in foreign_mod.items.iter() {
privileged_items.push(foreign_item.id); privileged_items.push(foreign_item.id);
*count += 1; *count += 1;
} }
@ -70,7 +70,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
// Adds items that are privileged to this scope. // Adds items that are privileged to this scope.
let add_privileged_items: @fn(&[@ast::item]) -> uint = |items| { let add_privileged_items: @fn(&[@ast::item]) -> uint = |items| {
let mut count = 0; let mut count = 0;
foreach &item in items.iter() { for &item in items.iter() {
add_privileged_item(item, &mut count); add_privileged_item(item, &mut count);
} }
count count
@ -206,7 +206,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
let check_field: @fn(span: span, id: ast::def_id, ident: ast::ident) = let check_field: @fn(span: span, id: ast::def_id, ident: ast::ident) =
|span, id, ident| { |span, id, ident| {
let fields = ty::lookup_struct_fields(tcx, id); let fields = ty::lookup_struct_fields(tcx, id);
foreach field in fields.iter() { for field in fields.iter() {
if field.ident != ident { loop; } if field.ident != ident { loop; }
if field.vis == private { if field.vis == private {
tcx.sess.span_err(span, fmt!("field `%s` is private", tcx.sess.span_err(span, fmt!("field `%s` is private",
@ -357,7 +357,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
visit_block: |block, (method_map, visitor)| { visit_block: |block, (method_map, visitor)| {
// Gather up all the privileged items. // Gather up all the privileged items.
let mut n_added = 0; let mut n_added = 0;
foreach stmt in block.stmts.iter() { for stmt in block.stmts.iter() {
match stmt.node { match stmt.node {
stmt_decl(decl, _) => { stmt_decl(decl, _) => {
match decl.node { match decl.node {
@ -430,7 +430,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
ty_struct(id, _) => { ty_struct(id, _) => {
if id.crate != LOCAL_CRATE || if id.crate != LOCAL_CRATE ||
!privileged_items.iter().any(|x| x == &(id.node)) { !privileged_items.iter().any(|x| x == &(id.node)) {
foreach field in (*fields).iter() { for field in (*fields).iter() {
debug!("(privacy checking) checking \ debug!("(privacy checking) checking \
field in struct literal"); field in struct literal");
check_field(expr.span, id, field.ident); check_field(expr.span, id, field.ident);
@ -442,7 +442,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
!privileged_items.iter().any(|x| x == &(id.node)) { !privileged_items.iter().any(|x| x == &(id.node)) {
match tcx.def_map.get_copy(&expr.id) { match tcx.def_map.get_copy(&expr.id) {
def_variant(_, variant_id) => { def_variant(_, variant_id) => {
foreach field in (*fields).iter() { for field in (*fields).iter() {
debug!("(privacy checking) \ debug!("(privacy checking) \
checking field in \ checking field in \
struct variant \ struct variant \
@ -494,7 +494,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
ty_struct(id, _) => { ty_struct(id, _) => {
if id.crate != LOCAL_CRATE || if id.crate != LOCAL_CRATE ||
!privileged_items.iter().any(|x| x == &(id.node)) { !privileged_items.iter().any(|x| x == &(id.node)) {
foreach field in fields.iter() { for field in fields.iter() {
debug!("(privacy checking) checking \ debug!("(privacy checking) checking \
struct pattern"); struct pattern");
check_field(pattern.span, id, field.ident); check_field(pattern.span, id, field.ident);
@ -506,7 +506,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
!privileged_items.iter().any(|x| x == &enum_id.node) { !privileged_items.iter().any(|x| x == &enum_id.node) {
match tcx.def_map.find(&pattern.id) { match tcx.def_map.find(&pattern.id) {
Some(&def_variant(_, variant_id)) => { Some(&def_variant(_, variant_id)) => {
foreach field in fields.iter() { for field in fields.iter() {
debug!("(privacy checking) \ debug!("(privacy checking) \
checking field in \ checking field in \
struct variant pattern"); struct variant pattern");

View File

@ -134,7 +134,7 @@ impl ReachableContext {
} }
item_enum(ref enum_def, _) => { item_enum(ref enum_def, _) => {
if privacy_context == PublicContext { if privacy_context == PublicContext {
foreach variant in enum_def.variants.iter() { for variant in enum_def.variants.iter() {
reachable_symbols.insert(variant.node.id); reachable_symbols.insert(variant.node.id);
} }
} }
@ -153,7 +153,7 @@ impl ReachableContext {
}; };
// Mark all public methods as reachable. // Mark all public methods as reachable.
foreach &method in methods.iter() { for &method in methods.iter() {
if should_be_considered_public(method) { if should_be_considered_public(method) {
reachable_symbols.insert(method.id); reachable_symbols.insert(method.id);
} }
@ -162,7 +162,7 @@ impl ReachableContext {
if generics_require_inlining(generics) { if generics_require_inlining(generics) {
// If the impl itself has generics, add all public // If the impl itself has generics, add all public
// symbols to the worklist. // symbols to the worklist.
foreach &method in methods.iter() { for &method in methods.iter() {
if should_be_considered_public(method) { if should_be_considered_public(method) {
worklist.push(method.id) worklist.push(method.id)
} }
@ -170,7 +170,7 @@ impl ReachableContext {
} else { } else {
// Otherwise, add only public methods that have // Otherwise, add only public methods that have
// generics to the worklist. // generics to the worklist.
foreach method in methods.iter() { for method in methods.iter() {
let generics = &method.generics; let generics = &method.generics;
let attrs = &method.attrs; let attrs = &method.attrs;
if generics_require_inlining(generics) || if generics_require_inlining(generics) ||
@ -184,7 +184,7 @@ impl ReachableContext {
item_trait(_, _, ref trait_methods) => { item_trait(_, _, ref trait_methods) => {
// Mark all provided methods as reachable. // Mark all provided methods as reachable.
if privacy_context == PublicContext { if privacy_context == PublicContext {
foreach trait_method in trait_methods.iter() { for trait_method in trait_methods.iter() {
match *trait_method { match *trait_method {
provided(method) => { provided(method) => {
reachable_symbols.insert(method.id); reachable_symbols.insert(method.id);
@ -390,7 +390,7 @@ impl ReachableContext {
// this properly would result in the necessity of computing *type* // this properly would result in the necessity of computing *type*
// reachability, which might result in a compile time loss. // reachability, which might result in a compile time loss.
fn mark_destructors_reachable(&self) { fn mark_destructors_reachable(&self) {
foreach (_, destructor_def_id) in self.tcx.destructor_for_type.iter() { for (_, destructor_def_id) in self.tcx.destructor_for_type.iter() {
if destructor_def_id.crate == LOCAL_CRATE { if destructor_def_id.crate == LOCAL_CRATE {
self.reachable_symbols.insert(destructor_def_id.node); self.reachable_symbols.insert(destructor_def_id.node);
} }

View File

@ -198,7 +198,7 @@ impl RegionMaps {
while i < queue.len() { while i < queue.len() {
match self.free_region_map.find(&queue[i]) { match self.free_region_map.find(&queue[i]) {
Some(parents) => { Some(parents) => {
foreach parent in parents.iter() { for parent in parents.iter() {
if *parent == sup { if *parent == sup {
return true; return true;
} }
@ -318,7 +318,7 @@ impl RegionMaps {
fn parent_to_expr(cx: Context, child_id: ast::NodeId, sp: span) { fn parent_to_expr(cx: Context, child_id: ast::NodeId, sp: span) {
debug!("region::parent_to_expr(span=%?)", debug!("region::parent_to_expr(span=%?)",
cx.sess.codemap.span_to_str(sp)); cx.sess.codemap.span_to_str(sp));
foreach parent_id in cx.parent.iter() { for parent_id in cx.parent.iter() {
cx.region_maps.record_parent(child_id, *parent_id); cx.region_maps.record_parent(child_id, *parent_id);
} }
} }
@ -718,7 +718,7 @@ fn determine_rp_in_fn(fk: &oldvisit::fn_kind,
oldvisit::vt<@mut DetermineRpCtxt>)) { oldvisit::vt<@mut DetermineRpCtxt>)) {
do cx.with(cx.item_id, false) { do cx.with(cx.item_id, false) {
do cx.with_ambient_variance(rv_contravariant) { do cx.with_ambient_variance(rv_contravariant) {
foreach a in decl.inputs.iter() { for a in decl.inputs.iter() {
(visitor.visit_ty)(&a.ty, (cx, visitor)); (visitor.visit_ty)(&a.ty, (cx, visitor));
} }
} }
@ -830,7 +830,7 @@ fn determine_rp_in_ty(ty: &ast::Ty,
ast::ty_path(ref path, _, _) => { ast::ty_path(ref path, _, _) => {
// type parameters are---for now, anyway---always invariant // type parameters are---for now, anyway---always invariant
do cx.with_ambient_variance(rv_invariant) { do cx.with_ambient_variance(rv_invariant) {
foreach tp in path.types.iter() { for tp in path.types.iter() {
(visitor.visit_ty)(tp, (cx, visitor)); (visitor.visit_ty)(tp, (cx, visitor));
} }
} }
@ -843,7 +843,7 @@ fn determine_rp_in_ty(ty: &ast::Ty,
do cx.with(cx.item_id, false) { do cx.with(cx.item_id, false) {
// parameters are contravariant // parameters are contravariant
do cx.with_ambient_variance(rv_contravariant) { do cx.with_ambient_variance(rv_contravariant) {
foreach a in decl.inputs.iter() { for a in decl.inputs.iter() {
(visitor.visit_ty)(&a.ty, (cx, visitor)); (visitor.visit_ty)(&a.ty, (cx, visitor));
} }
} }
@ -923,7 +923,7 @@ pub fn determine_rp_in_crate(sess: Session,
match cx.dep_map.find(&c_id) { match cx.dep_map.find(&c_id) {
None => {} None => {}
Some(deps) => { Some(deps) => {
foreach dep in deps.iter() { for dep in deps.iter() {
let v = add_variance(dep.ambient_variance, c_variance); let v = add_variance(dep.ambient_variance, c_variance);
cx.add_rp(dep.id, v); cx.add_rp(dep.id, v);
} }
@ -935,7 +935,7 @@ pub fn determine_rp_in_crate(sess: Session,
debug!("%s", { debug!("%s", {
debug!("Region variance results:"); debug!("Region variance results:");
let region_paramd_items = cx.region_paramd_items; let region_paramd_items = cx.region_paramd_items;
foreach (&key, &value) in region_paramd_items.iter() { for (&key, &value) in region_paramd_items.iter() {
debug!("item %? (%s) is parameterized with variance %?", debug!("item %? (%s) is parameterized with variance %?",
key, key,
ast_map::node_id_to_str(ast_map, key, ast_map::node_id_to_str(ast_map, key,

View File

@ -1037,7 +1037,7 @@ impl Resolver {
self.session.str_of(name))); self.session.str_of(name)));
{ {
let r = child.span_for_namespace(ns); let r = child.span_for_namespace(ns);
foreach sp in r.iter() { for sp in r.iter() {
self.session.span_note(*sp, self.session.span_note(*sp,
fmt!("first definition of %s `%s` here", fmt!("first definition of %s `%s` here",
namespace_error_to_str(duplicate_type), namespace_error_to_str(duplicate_type),
@ -1057,7 +1057,7 @@ impl Resolver {
} }
// Check each statement. // Check each statement.
foreach statement in block.stmts.iter() { for statement in block.stmts.iter() {
match statement.node { match statement.node {
stmt_decl(declaration, _) => { stmt_decl(declaration, _) => {
match declaration.node { match declaration.node {
@ -1179,7 +1179,7 @@ impl Resolver {
name_bindings.define_type name_bindings.define_type
(privacy, def_ty(local_def(item.id)), sp); (privacy, def_ty(local_def(item.id)), sp);
foreach variant in (*enum_definition).variants.iter() { for variant in (*enum_definition).variants.iter() {
self.build_reduced_graph_for_variant( self.build_reduced_graph_for_variant(
variant, variant,
local_def(item.id), local_def(item.id),
@ -1264,7 +1264,7 @@ impl Resolver {
}; };
// For each method... // For each method...
foreach method in methods.iter() { for method in methods.iter() {
// Add the method to the module. // Add the method to the module.
let ident = method.ident; let ident = method.ident;
let (method_name_bindings, _) = let (method_name_bindings, _) =
@ -1316,7 +1316,7 @@ impl Resolver {
// Add the names of all the methods to the trait info. // Add the names of all the methods to the trait info.
let mut method_names = HashMap::new(); let mut method_names = HashMap::new();
foreach method in methods.iter() { for method in methods.iter() {
let ty_m = trait_method_to_ty_method(method); let ty_m = trait_method_to_ty_method(method);
let ident = ty_m.ident; let ident = ty_m.ident;
@ -1353,7 +1353,7 @@ impl Resolver {
} }
let def_id = local_def(item.id); let def_id = local_def(item.id);
foreach (name, _) in method_names.iter() { for (name, _) in method_names.iter() {
if !self.method_map.contains_key(name) { if !self.method_map.contains_key(name) {
self.method_map.insert(*name, HashSet::new()); self.method_map.insert(*name, HashSet::new());
} }
@ -1422,7 +1422,7 @@ impl Resolver {
let privacy = visibility_to_privacy(view_item.vis); let privacy = visibility_to_privacy(view_item.vis);
match view_item.node { match view_item.node {
view_item_use(ref view_paths) => { view_item_use(ref view_paths) => {
foreach view_path in view_paths.iter() { for view_path in view_paths.iter() {
// Extract and intern the module part of the path. For // Extract and intern the module part of the path. For
// globs and lists, the path is found directly in the AST; // globs and lists, the path is found directly in the AST;
// for simple paths we have to munge the path a little. // for simple paths we have to munge the path a little.
@ -1433,7 +1433,7 @@ impl Resolver {
let path_len = full_path.idents.len(); let path_len = full_path.idents.len();
assert!(path_len != 0); assert!(path_len != 0);
foreach (i, ident) in full_path.idents.iter().enumerate() { for (i, ident) in full_path.idents.iter().enumerate() {
if i != path_len - 1 { if i != path_len - 1 {
module_path.push(*ident); module_path.push(*ident);
} }
@ -1442,7 +1442,7 @@ impl Resolver {
view_path_glob(ref module_ident_path, _) | view_path_glob(ref module_ident_path, _) |
view_path_list(ref module_ident_path, _, _) => { view_path_list(ref module_ident_path, _, _) => {
foreach ident in module_ident_path.idents.iter() { for ident in module_ident_path.idents.iter() {
module_path.push(*ident); module_path.push(*ident);
} }
} }
@ -1463,7 +1463,7 @@ impl Resolver {
id); id);
} }
view_path_list(_, ref source_idents, _) => { view_path_list(_, ref source_idents, _) => {
foreach source_ident in source_idents.iter() { for source_ident in source_idents.iter() {
let name = source_ident.node.name; let name = source_ident.node.name;
let subclass = @SingleImport(name, name); let subclass = @SingleImport(name, name);
self.build_import_directive( self.build_import_directive(
@ -1657,7 +1657,7 @@ impl Resolver {
let method_def_ids = let method_def_ids =
get_trait_method_def_ids(self.session.cstore, def_id); get_trait_method_def_ids(self.session.cstore, def_id);
let mut interned_method_names = HashSet::new(); let mut interned_method_names = HashSet::new();
foreach &method_def_id in method_def_ids.iter() { for &method_def_id in method_def_ids.iter() {
let (method_name, explicit_self) = let (method_name, explicit_self) =
get_method_name_and_explicit_self(self.session.cstore, get_method_name_and_explicit_self(self.session.cstore,
method_def_id); method_def_id);
@ -1672,7 +1672,7 @@ impl Resolver {
interned_method_names.insert(method_name); interned_method_names.insert(method_name);
} }
} }
foreach name in interned_method_names.iter() { for name in interned_method_names.iter() {
if !self.method_map.contains_key(name) { if !self.method_map.contains_key(name) {
self.method_map.insert(*name, HashSet::new()); self.method_map.insert(*name, HashSet::new());
} }
@ -1741,7 +1741,7 @@ impl Resolver {
// need to. // need to.
let mut current_module = root; let mut current_module = root;
foreach ident_str in pieces.iter() { for ident_str in pieces.iter() {
let ident = self.session.ident_of(*ident_str); let ident = self.session.ident_of(*ident_str);
// Create or reuse a graph node for the child. // Create or reuse a graph node for the child.
let (child_name_bindings, new_parent) = let (child_name_bindings, new_parent) =
@ -1861,7 +1861,7 @@ impl Resolver {
// Add each static method to the module. // Add each static method to the module.
let new_parent = ModuleReducedGraphParent( let new_parent = ModuleReducedGraphParent(
type_module); type_module);
foreach static_method_info in static_methods.iter() { for static_method_info in static_methods.iter() {
let ident = static_method_info.ident; let ident = static_method_info.ident;
debug!("(building reduced graph for \ debug!("(building reduced graph for \
external crate) creating \ external crate) creating \
@ -1993,7 +1993,7 @@ impl Resolver {
self.module_to_str(module_)); self.module_to_str(module_));
self.resolve_imports_for_module(module_); self.resolve_imports_for_module(module_);
foreach (_, &child_node) in module_.children.iter() { for (_, &child_node) in module_.children.iter() {
match child_node.get_module_if_available() { match child_node.get_module_if_available() {
None => { None => {
// Nothing to do. // Nothing to do.
@ -2004,7 +2004,7 @@ impl Resolver {
} }
} }
foreach (_, &child_module) in module_.anonymous_children.iter() { for (_, &child_module) in module_.anonymous_children.iter() {
self.resolve_imports_for_module_subtree(child_module); self.resolve_imports_for_module_subtree(child_module);
} }
} }
@ -2048,7 +2048,7 @@ impl Resolver {
pub fn idents_to_str(@mut self, idents: &[ident]) -> ~str { pub fn idents_to_str(@mut self, idents: &[ident]) -> ~str {
let mut first = true; let mut first = true;
let mut result = ~""; let mut result = ~"";
foreach ident in idents.iter() { for ident in idents.iter() {
if first { if first {
first = false first = false
} else { } else {
@ -2447,7 +2447,7 @@ impl Resolver {
assert_eq!(containing_module.glob_count, 0); assert_eq!(containing_module.glob_count, 0);
// Add all resolved imports from the containing module. // Add all resolved imports from the containing module.
foreach (ident, target_import_resolution) in containing_module.import_resolutions.iter() { for (ident, target_import_resolution) in containing_module.import_resolutions.iter() {
debug!("(resolving glob import) writing module resolution \ debug!("(resolving glob import) writing module resolution \
%? into `%s`", %? into `%s`",
@ -2531,12 +2531,12 @@ impl Resolver {
}; };
// Add all children from the containing module. // Add all children from the containing module.
foreach (&ident, name_bindings) in containing_module.children.iter() { for (&ident, name_bindings) in containing_module.children.iter() {
merge_import_resolution(ident, *name_bindings); merge_import_resolution(ident, *name_bindings);
} }
// Add external module children from the containing module. // Add external module children from the containing module.
foreach (&ident, module) in containing_module.external_module_children.iter() { for (&ident, module) in containing_module.external_module_children.iter() {
let name_bindings = let name_bindings =
@mut Resolver::create_name_bindings_from_module(*module); @mut Resolver::create_name_bindings_from_module(*module);
merge_import_resolution(ident, name_bindings); merge_import_resolution(ident, name_bindings);
@ -3108,7 +3108,7 @@ impl Resolver {
} }
// Descend into children and anonymous children. // Descend into children and anonymous children.
foreach (_, &child_node) in module_.children.iter() { for (_, &child_node) in module_.children.iter() {
match child_node.get_module_if_available() { match child_node.get_module_if_available() {
None => { None => {
// Continue. // Continue.
@ -3119,7 +3119,7 @@ impl Resolver {
} }
} }
foreach (_, &module_) in module_.anonymous_children.iter() { for (_, &module_) in module_.anonymous_children.iter() {
self.report_unresolved_imports(module_); self.report_unresolved_imports(module_);
} }
} }
@ -3167,7 +3167,7 @@ impl Resolver {
self.record_exports_for_module(module_); self.record_exports_for_module(module_);
foreach (_, &child_name_bindings) in module_.children.iter() { for (_, &child_name_bindings) in module_.children.iter() {
match child_name_bindings.get_module_if_available() { match child_name_bindings.get_module_if_available() {
None => { None => {
// Nothing to do. // Nothing to do.
@ -3178,7 +3178,7 @@ impl Resolver {
} }
} }
foreach (_, &child_module) in module_.anonymous_children.iter() { for (_, &child_module) in module_.anonymous_children.iter() {
self.record_exports_for_module_subtree(child_module); self.record_exports_for_module_subtree(child_module);
} }
} }
@ -3228,14 +3228,14 @@ impl Resolver {
pub fn add_exports_for_module(@mut self, pub fn add_exports_for_module(@mut self,
exports2: &mut ~[Export2], exports2: &mut ~[Export2],
module_: @mut Module) { module_: @mut Module) {
foreach (ident, importresolution) in module_.import_resolutions.iter() { for (ident, importresolution) in module_.import_resolutions.iter() {
if importresolution.privacy != Public { if importresolution.privacy != Public {
debug!("(computing exports) not reexporting private `%s`", debug!("(computing exports) not reexporting private `%s`",
self.session.str_of(*ident)); self.session.str_of(*ident));
loop; loop;
} }
let xs = [TypeNS, ValueNS]; let xs = [TypeNS, ValueNS];
foreach ns in xs.iter() { for ns in xs.iter() {
match importresolution.target_for_namespace(*ns) { match importresolution.target_for_namespace(*ns) {
Some(target) => { Some(target) => {
debug!("(computing exports) maybe reexport '%s'", debug!("(computing exports) maybe reexport '%s'",
@ -3483,8 +3483,8 @@ impl Resolver {
// enum item: resolve all the variants' discrs, // enum item: resolve all the variants' discrs,
// then resolve the ty params // then resolve the ty params
item_enum(ref enum_def, ref generics) => { item_enum(ref enum_def, ref generics) => {
foreach variant in (*enum_def).variants.iter() { for variant in (*enum_def).variants.iter() {
foreach dis_expr in variant.node.disr_expr.iter() { for dis_expr in variant.node.disr_expr.iter() {
// resolve the discriminator expr // resolve the discriminator expr
// as a constant // as a constant
self.with_constant_rib(|| { self.with_constant_rib(|| {
@ -3541,11 +3541,11 @@ impl Resolver {
visitor); visitor);
// Resolve derived traits. // Resolve derived traits.
foreach trt in traits.iter() { for trt in traits.iter() {
self.resolve_trait_reference(item.id, trt, visitor, TraitDerivation); self.resolve_trait_reference(item.id, trt, visitor, TraitDerivation);
} }
foreach method in (*methods).iter() { for method in (*methods).iter() {
// Create a new rib for the method-specific type // Create a new rib for the method-specific type
// parameters. // parameters.
// //
@ -3565,7 +3565,7 @@ impl Resolver {
&ty_m.generics.ty_params, &ty_m.generics.ty_params,
visitor); visitor);
foreach argument in ty_m.decl.inputs.iter() { for argument in ty_m.decl.inputs.iter() {
self.resolve_type(&argument.ty, visitor); self.resolve_type(&argument.ty, visitor);
} }
@ -3602,7 +3602,7 @@ impl Resolver {
item_foreign_mod(ref foreign_module) => { item_foreign_mod(ref foreign_module) => {
do self.with_scope(Some(item.ident)) { do self.with_scope(Some(item.ident)) {
foreach foreign_item in foreign_module.items.iter() { for foreign_item in foreign_module.items.iter() {
match foreign_item.node { match foreign_item.node {
foreign_item_fn(_, ref generics) => { foreign_item_fn(_, ref generics) => {
self.with_type_parameter_rib( self.with_type_parameter_rib(
@ -3658,7 +3658,7 @@ impl Resolver {
let function_type_rib = @Rib(rib_kind); let function_type_rib = @Rib(rib_kind);
self.type_ribs.push(function_type_rib); self.type_ribs.push(function_type_rib);
foreach (index, type_parameter) in generics.ty_params.iter().enumerate() { for (index, type_parameter) in generics.ty_params.iter().enumerate() {
let name = type_parameter.ident; let name = type_parameter.ident;
debug!("with_type_parameter_rib: %d %d", node_id, debug!("with_type_parameter_rib: %d %d", node_id,
type_parameter.id); type_parameter.id);
@ -3749,7 +3749,7 @@ impl Resolver {
// Nothing to do. // Nothing to do.
} }
Some(declaration) => { Some(declaration) => {
foreach argument in declaration.inputs.iter() { for argument in declaration.inputs.iter() {
let binding_mode = ArgumentIrrefutableMode; let binding_mode = ArgumentIrrefutableMode;
let mutability = let mutability =
if argument.is_mutbl {Mutable} else {Immutable}; if argument.is_mutbl {Mutable} else {Immutable};
@ -3781,8 +3781,8 @@ impl Resolver {
pub fn resolve_type_parameters(@mut self, pub fn resolve_type_parameters(@mut self,
type_parameters: &OptVec<TyParam>, type_parameters: &OptVec<TyParam>,
visitor: ResolveVisitor) { visitor: ResolveVisitor) {
foreach type_parameter in type_parameters.iter() { for type_parameter in type_parameters.iter() {
foreach bound in type_parameter.bounds.iter() { for bound in type_parameter.bounds.iter() {
self.resolve_type_parameter_bound(type_parameter.id, bound, visitor); self.resolve_type_parameter_bound(type_parameter.id, bound, visitor);
} }
} }
@ -3831,7 +3831,7 @@ impl Resolver {
fields: &[@struct_field], fields: &[@struct_field],
visitor: ResolveVisitor) { visitor: ResolveVisitor) {
let mut ident_map = HashMap::new::<ast::ident, @struct_field>(); let mut ident_map = HashMap::new::<ast::ident, @struct_field>();
foreach &field in fields.iter() { for &field in fields.iter() {
match field.node.kind { match field.node.kind {
named_field(ident, _) => { named_field(ident, _) => {
match ident_map.find(&ident) { match ident_map.find(&ident) {
@ -3860,7 +3860,7 @@ impl Resolver {
self.resolve_type_parameters(&generics.ty_params, visitor); self.resolve_type_parameters(&generics.ty_params, visitor);
// Resolve fields. // Resolve fields.
foreach field in fields.iter() { for field in fields.iter() {
self.resolve_type(&field.node.ty, visitor); self.resolve_type(&field.node.ty, visitor);
} }
} }
@ -3920,7 +3920,7 @@ impl Resolver {
let mut new_trait_refs = ~[]; let mut new_trait_refs = ~[];
{ {
let r = self.def_map.find(&trait_reference.ref_id); let r = self.def_map.find(&trait_reference.ref_id);
foreach &def in r.iter() { for &def in r.iter() {
new_trait_refs.push(def_id_of_def(*def)); new_trait_refs.push(def_id_of_def(*def));
} }
} }
@ -3936,7 +3936,7 @@ impl Resolver {
// Resolve the self type. // Resolve the self type.
self.resolve_type(self_type, visitor); self.resolve_type(self_type, visitor);
foreach method in methods.iter() { for method in methods.iter() {
// We also need a new scope for the method-specific // We also need a new scope for the method-specific
// type parameters. // type parameters.
self.resolve_method(MethodRibKind( self.resolve_method(MethodRibKind(
@ -4016,10 +4016,10 @@ impl Resolver {
pub fn check_consistent_bindings(@mut self, arm: &arm) { pub fn check_consistent_bindings(@mut self, arm: &arm) {
if arm.pats.len() == 0 { return; } if arm.pats.len() == 0 { return; }
let map_0 = self.binding_mode_map(arm.pats[0]); let map_0 = self.binding_mode_map(arm.pats[0]);
foreach (i, p) in arm.pats.iter().enumerate() { for (i, p) in arm.pats.iter().enumerate() {
let map_i = self.binding_mode_map(*p); let map_i = self.binding_mode_map(*p);
foreach (&key, &binding_0) in map_0.iter() { for (&key, &binding_0) in map_0.iter() {
match map_i.find(&key) { match map_i.find(&key) {
None => { None => {
self.session.span_err( self.session.span_err(
@ -4040,7 +4040,7 @@ impl Resolver {
} }
} }
foreach (&key, &binding) in map_i.iter() { for (&key, &binding) in map_i.iter() {
if !map_0.contains_key(&key) { if !map_0.contains_key(&key) {
self.session.span_err( self.session.span_err(
binding.span, binding.span,
@ -4056,7 +4056,7 @@ impl Resolver {
self.value_ribs.push(@Rib(NormalRibKind)); self.value_ribs.push(@Rib(NormalRibKind));
let bindings_list = @mut HashMap::new(); let bindings_list = @mut HashMap::new();
foreach pattern in arm.pats.iter() { for pattern in arm.pats.iter() {
self.resolve_pattern(*pattern, RefutableMode, Immutable, self.resolve_pattern(*pattern, RefutableMode, Immutable,
Some(bindings_list), visitor); Some(bindings_list), visitor);
} }
@ -4162,7 +4162,7 @@ impl Resolver {
} }
do bounds.map |bound_vec| { do bounds.map |bound_vec| {
foreach bound in bound_vec.iter() { for bound in bound_vec.iter() {
self.resolve_type_parameter_bound(ty.id, bound, visitor); self.resolve_type_parameter_bound(ty.id, bound, visitor);
} }
}; };
@ -4170,7 +4170,7 @@ impl Resolver {
ty_closure(c) => { ty_closure(c) => {
do c.bounds.map |bounds| { do c.bounds.map |bounds| {
foreach bound in bounds.iter() { for bound in bounds.iter() {
self.resolve_type_parameter_bound(ty.id, bound, visitor); self.resolve_type_parameter_bound(ty.id, bound, visitor);
} }
}; };
@ -4317,7 +4317,7 @@ impl Resolver {
} }
// Check the types in the path pattern. // Check the types in the path pattern.
foreach ty in path.types.iter() { for ty in path.types.iter() {
self.resolve_type(ty, visitor); self.resolve_type(ty, visitor);
} }
} }
@ -4350,7 +4350,7 @@ impl Resolver {
} }
// Check the types in the path pattern. // Check the types in the path pattern.
foreach ty in path.types.iter() { for ty in path.types.iter() {
self.resolve_type(ty, visitor); self.resolve_type(ty, visitor);
} }
} }
@ -4379,7 +4379,7 @@ impl Resolver {
} }
// Check the types in the path pattern. // Check the types in the path pattern.
foreach ty in path.types.iter() { for ty in path.types.iter() {
self.resolve_type(ty, visitor); self.resolve_type(ty, visitor);
} }
} }
@ -4476,7 +4476,7 @@ impl Resolver {
visitor: ResolveVisitor) visitor: ResolveVisitor)
-> Option<def> { -> Option<def> {
// First, resolve the types. // First, resolve the types.
foreach ty in path.types.iter() { for ty in path.types.iter() {
self.resolve_type(ty, visitor); self.resolve_type(ty, visitor);
} }
@ -4606,7 +4606,7 @@ impl Resolver {
pub fn intern_module_part_of_path(@mut self, path: &Path) -> ~[ident] { pub fn intern_module_part_of_path(@mut self, path: &Path) -> ~[ident] {
let mut module_path_idents = ~[]; let mut module_path_idents = ~[];
foreach (index, ident) in path.idents.iter().enumerate() { for (index, ident) in path.idents.iter().enumerate() {
if index == path.idents.len() - 1 { if index == path.idents.len() - 1 {
break; break;
} }
@ -4837,14 +4837,14 @@ impl Resolver {
let mut j = this.value_ribs.len(); let mut j = this.value_ribs.len();
while j != 0 { while j != 0 {
j -= 1; j -= 1;
foreach (&k, _) in this.value_ribs[j].bindings.iter() { for (&k, _) in this.value_ribs[j].bindings.iter() {
maybes.push(this.session.str_of(k)); maybes.push(this.session.str_of(k));
values.push(uint::max_value); values.push(uint::max_value);
} }
} }
let mut smallest = 0; let mut smallest = 0;
foreach (i, &other) in maybes.iter().enumerate() { for (i, &other) in maybes.iter().enumerate() {
values[i] = name.lev_distance(other); values[i] = name.lev_distance(other);
if values[i] <= values[smallest] { if values[i] <= values[smallest] {
@ -4873,11 +4873,11 @@ impl Resolver {
i -= 1; i -= 1;
match this.type_ribs[i].kind { match this.type_ribs[i].kind {
MethodRibKind(node_id, _) => MethodRibKind(node_id, _) =>
foreach item in this.crate.module.items.iter() { for item in this.crate.module.items.iter() {
if item.id == node_id { if item.id == node_id {
match item.node { match item.node {
item_struct(class_def, _) => { item_struct(class_def, _) => {
foreach field in class_def.fields.iter() { for field in class_def.fields.iter() {
match field.node.kind { match field.node.kind {
unnamed_field => {}, unnamed_field => {},
named_field(ident, _) => { named_field(ident, _) => {
@ -5151,7 +5151,7 @@ impl Resolver {
// Look for the current trait. // Look for the current trait.
match self.current_trait_refs { match self.current_trait_refs {
Some(ref trait_def_ids) => { Some(ref trait_def_ids) => {
foreach trait_def_id in trait_def_ids.iter() { for trait_def_id in trait_def_ids.iter() {
if candidate_traits.contains(trait_def_id) { if candidate_traits.contains(trait_def_id) {
self.add_trait_info(&mut found_traits, self.add_trait_info(&mut found_traits,
*trait_def_id, *trait_def_id,
@ -5165,7 +5165,7 @@ impl Resolver {
} }
// Look for trait children. // Look for trait children.
foreach (_, &child_name_bindings) in search_module.children.iter() { for (_, &child_name_bindings) in search_module.children.iter() {
match child_name_bindings.def_for_namespace(TypeNS) { match child_name_bindings.def_for_namespace(TypeNS) {
Some(def) => { Some(def) => {
match def { match def {
@ -5188,7 +5188,7 @@ impl Resolver {
} }
// Look for imports. // Look for imports.
foreach (_, &import_resolution) in search_module.import_resolutions.iter() { for (_, &import_resolution) in search_module.import_resolutions.iter() {
match import_resolution.target_for_namespace(TypeNS) { match import_resolution.target_for_namespace(TypeNS) {
None => { None => {
// Continue. // Continue.
@ -5305,7 +5305,7 @@ impl Resolver {
match vi.node { match vi.node {
view_item_extern_mod(*) => {} // ignore view_item_extern_mod(*) => {} // ignore
view_item_use(ref path) => { view_item_use(ref path) => {
foreach p in path.iter() { for p in path.iter() {
match p.node { match p.node {
view_path_simple(_, _, id) | view_path_glob(_, id) => { view_path_simple(_, _, id) | view_path_glob(_, id) => {
if !self.used_imports.contains(&id) { if !self.used_imports.contains(&id) {
@ -5316,7 +5316,7 @@ impl Resolver {
} }
view_path_list(_, ref list, _) => { view_path_list(_, ref list, _) => {
foreach i in list.iter() { for i in list.iter() {
if !self.used_imports.contains(&i.node.id) { if !self.used_imports.contains(&i.node.id) {
self.session.add_lint(unused_imports, self.session.add_lint(unused_imports,
i.node.id, i.span, i.node.id, i.span,
@ -5367,12 +5367,12 @@ impl Resolver {
debug!("Dump of module `%s`:", self.module_to_str(module_)); debug!("Dump of module `%s`:", self.module_to_str(module_));
debug!("Children:"); debug!("Children:");
foreach (&name, _) in module_.children.iter() { for (&name, _) in module_.children.iter() {
debug!("* %s", self.session.str_of(name)); debug!("* %s", self.session.str_of(name));
} }
debug!("Import resolutions:"); debug!("Import resolutions:");
foreach (name, import_resolution) in module_.import_resolutions.iter() { for (name, import_resolution) in module_.import_resolutions.iter() {
let value_repr; let value_repr;
match import_resolution.target_for_namespace(ValueNS) { match import_resolution.target_for_namespace(ValueNS) {
None => { value_repr = ~""; } None => { value_repr = ~""; }

View File

@ -298,7 +298,7 @@ pub fn variant_opt(bcx: @mut Block, pat_id: ast::NodeId)
match ccx.tcx.def_map.get_copy(&pat_id) { match ccx.tcx.def_map.get_copy(&pat_id) {
ast::def_variant(enum_id, var_id) => { ast::def_variant(enum_id, var_id) => {
let variants = ty::enum_variants(ccx.tcx, enum_id); let variants = ty::enum_variants(ccx.tcx, enum_id);
foreach v in (*variants).iter() { for v in (*variants).iter() {
if var_id == v.id { if var_id == v.id {
return var(v.disr_val, return var(v.disr_val,
adt::represent_node(bcx, pat_id)) adt::represent_node(bcx, pat_id))
@ -367,7 +367,7 @@ pub fn matches_to_str(bcx: @mut Block, m: &[Match]) -> ~str {
} }
pub fn has_nested_bindings(m: &[Match], col: uint) -> bool { pub fn has_nested_bindings(m: &[Match], col: uint) -> bool {
foreach br in m.iter() { for br in m.iter() {
match br.pats[col].node { match br.pats[col].node {
ast::pat_ident(_, _, Some(_)) => return true, ast::pat_ident(_, _, Some(_)) => return true,
_ => () _ => ()
@ -437,7 +437,7 @@ pub fn enter_match<'r>(bcx: @mut Block,
let _indenter = indenter(); let _indenter = indenter();
let mut result = ~[]; let mut result = ~[];
foreach br in m.iter() { for br in m.iter() {
match e(br.pats[col]) { match e(br.pats[col]) {
Some(sub) => { Some(sub) => {
let pats = let pats =
@ -589,7 +589,7 @@ pub fn enter_opt<'r>(bcx: @mut Block,
// unspecified fields with dummy. // unspecified fields with dummy.
let mut reordered_patterns = ~[]; let mut reordered_patterns = ~[];
let r = ty::lookup_struct_fields(tcx, struct_id); let r = ty::lookup_struct_fields(tcx, struct_id);
foreach field in r.iter() { for field in r.iter() {
match field_pats.iter().find_(|p| p.ident == field.ident) { match field_pats.iter().find_(|p| p.ident == field.ident) {
None => reordered_patterns.push(dummy), None => reordered_patterns.push(dummy),
Some(fp) => reordered_patterns.push(fp.pat) Some(fp) => reordered_patterns.push(fp.pat)
@ -649,7 +649,7 @@ pub fn enter_rec_or_struct<'r>(bcx: @mut Block,
match p.node { match p.node {
ast::pat_struct(_, ref fpats, _) => { ast::pat_struct(_, ref fpats, _) => {
let mut pats = ~[]; let mut pats = ~[];
foreach fname in fields.iter() { for fname in fields.iter() {
match fpats.iter().find_(|p| p.ident == *fname) { match fpats.iter().find_(|p| p.ident == *fname) {
None => pats.push(dummy), None => pats.push(dummy),
Some(pat) => pats.push(pat.pat) Some(pat) => pats.push(pat.pat)
@ -809,7 +809,7 @@ pub fn get_options(bcx: @mut Block, m: &[Match], col: uint) -> ~[Opt] {
} }
let mut found = ~[]; let mut found = ~[];
foreach br in m.iter() { for br in m.iter() {
let cur = br.pats[col]; let cur = br.pats[col];
match cur.node { match cur.node {
ast::pat_lit(l) => { ast::pat_lit(l) => {
@ -955,7 +955,7 @@ pub fn collect_record_or_struct_fields(bcx: @mut Block,
col: uint) col: uint)
-> ~[ast::ident] { -> ~[ast::ident] {
let mut fields: ~[ast::ident] = ~[]; let mut fields: ~[ast::ident] = ~[];
foreach br in m.iter() { for br in m.iter() {
match br.pats[col].node { match br.pats[col].node {
ast::pat_struct(_, ref fs, _) => { ast::pat_struct(_, ref fs, _) => {
match ty::get(node_id_type(bcx, br.pats[col].id)).sty { match ty::get(node_id_type(bcx, br.pats[col].id)).sty {
@ -969,7 +969,7 @@ pub fn collect_record_or_struct_fields(bcx: @mut Block,
return fields; return fields;
fn extend(idents: &mut ~[ast::ident], field_pats: &[ast::field_pat]) { fn extend(idents: &mut ~[ast::ident], field_pats: &[ast::field_pat]) {
foreach field_pat in field_pats.iter() { for field_pat in field_pats.iter() {
let field_ident = field_pat.ident; let field_ident = field_pat.ident;
if !idents.iter().any(|x| *x == field_ident) { if !idents.iter().any(|x| *x == field_ident) {
idents.push(field_ident); idents.push(field_ident);
@ -994,7 +994,7 @@ pub fn root_pats_as_necessary(mut bcx: @mut Block,
col: uint, col: uint,
val: ValueRef) val: ValueRef)
-> @mut Block { -> @mut Block {
foreach br in m.iter() { for br in m.iter() {
let pat_id = br.pats[col].id; let pat_id = br.pats[col].id;
if pat_id != 0 { if pat_id != 0 {
let datum = Datum {val: val, ty: node_id_type(bcx, pat_id), let datum = Datum {val: val, ty: node_id_type(bcx, pat_id),
@ -1063,14 +1063,14 @@ pub fn pick_col(m: &[Match]) -> uint {
} }
} }
let mut scores = vec::from_elem(m[0].pats.len(), 0u); let mut scores = vec::from_elem(m[0].pats.len(), 0u);
foreach br in m.iter() { for br in m.iter() {
let mut i = 0u; let mut i = 0u;
foreach p in br.pats.iter() { scores[i] += score(*p); i += 1u; } for p in br.pats.iter() { scores[i] += score(*p); i += 1u; }
} }
let mut max_score = 0u; let mut max_score = 0u;
let mut best_col = 0u; let mut best_col = 0u;
let mut i = 0u; let mut i = 0u;
foreach score in scores.iter() { for score in scores.iter() {
let score = *score; let score = *score;
// Irrefutable columns always go first, they'd only be duplicated in // Irrefutable columns always go first, they'd only be duplicated in
@ -1148,7 +1148,7 @@ fn store_non_ref_bindings(bcx: @mut Block,
*/ */
let mut bcx = bcx; let mut bcx = bcx;
foreach (_, &binding_info) in bindings_map.iter() { for (_, &binding_info) in bindings_map.iter() {
match binding_info.trmode { match binding_info.trmode {
TrByValue(lldest) => { TrByValue(lldest) => {
let llval = Load(bcx, binding_info.llmatch); // get a T* let llval = Load(bcx, binding_info.llmatch); // get a T*
@ -1182,7 +1182,7 @@ fn insert_lllocals(bcx: @mut Block,
BindArgument => bcx.fcx.llargs BindArgument => bcx.fcx.llargs
}; };
foreach (_, &binding_info) in bindings_map.iter() { for (_, &binding_info) in bindings_map.iter() {
let llval = match binding_info.trmode { let llval = match binding_info.trmode {
// By value bindings: use the stack slot that we // By value bindings: use the stack slot that we
// copied/moved the value into // copied/moved the value into
@ -1236,7 +1236,7 @@ pub fn compile_guard(bcx: @mut Block,
let val = bool_to_i1(bcx, val); let val = bool_to_i1(bcx, val);
// Revoke the temp cleanups now that the guard successfully executed. // Revoke the temp cleanups now that the guard successfully executed.
foreach llval in temp_cleanups.iter() { for llval in temp_cleanups.iter() {
revoke_clean(bcx, *llval); revoke_clean(bcx, *llval);
} }
@ -1250,7 +1250,7 @@ pub fn compile_guard(bcx: @mut Block,
fn drop_bindings(bcx: @mut Block, data: &ArmData) -> @mut Block { fn drop_bindings(bcx: @mut Block, data: &ArmData) -> @mut Block {
let mut bcx = bcx; let mut bcx = bcx;
foreach (_, &binding_info) in data.bindings_map.iter() { for (_, &binding_info) in data.bindings_map.iter() {
match binding_info.trmode { match binding_info.trmode {
TrByValue(llval) => { TrByValue(llval) => {
bcx = glue::drop_ty(bcx, llval, binding_info.ty); bcx = glue::drop_ty(bcx, llval, binding_info.ty);
@ -1325,7 +1325,7 @@ fn compile_submatch_continue(mut bcx: @mut Block,
let ccx = bcx.fcx.ccx; let ccx = bcx.fcx.ccx;
let mut pat_id = 0; let mut pat_id = 0;
let mut pat_span = dummy_sp(); let mut pat_span = dummy_sp();
foreach br in m.iter() { for br in m.iter() {
// Find a real id (we're adding placeholder wildcard patterns, but // Find a real id (we're adding placeholder wildcard patterns, but
// each column is guaranteed to have at least one real pattern) // each column is guaranteed to have at least one real pattern)
if pat_id == 0 { if pat_id == 0 {
@ -1434,7 +1434,7 @@ fn compile_submatch_continue(mut bcx: @mut Block,
var(_, repr) => { var(_, repr) => {
let (the_kind, val_opt) = adt::trans_switch(bcx, repr, val); let (the_kind, val_opt) = adt::trans_switch(bcx, repr, val);
kind = the_kind; kind = the_kind;
foreach &tval in val_opt.iter() { test_val = tval; } for &tval in val_opt.iter() { test_val = tval; }
} }
lit(_) => { lit(_) => {
let pty = node_id_type(bcx, pat_id); let pty = node_id_type(bcx, pat_id);
@ -1457,7 +1457,7 @@ fn compile_submatch_continue(mut bcx: @mut Block,
} }
} }
} }
foreach o in opts.iter() { for o in opts.iter() {
match *o { match *o {
range(_, _) => { kind = compare; break } range(_, _) => { kind = compare; break }
_ => () _ => ()
@ -1479,7 +1479,7 @@ fn compile_submatch_continue(mut bcx: @mut Block,
let mut i = 0u; let mut i = 0u;
// Compile subtrees for each option // Compile subtrees for each option
foreach opt in opts.iter() { for opt in opts.iter() {
i += 1u; i += 1u;
let mut opt_cx = else_cx; let mut opt_cx = else_cx;
if !exhaustive || i < len { if !exhaustive || i < len {
@ -1688,7 +1688,7 @@ pub fn trans_match_inner(scope_cx: @mut Block,
let mut arm_datas = ~[]; let mut arm_datas = ~[];
let mut matches = ~[]; let mut matches = ~[];
foreach arm in arms.iter() { for arm in arms.iter() {
let body = scope_block(bcx, arm.body.info(), "case_body"); let body = scope_block(bcx, arm.body.info(), "case_body");
let bindings_map = create_bindings_map(bcx, arm.pats[0]); let bindings_map = create_bindings_map(bcx, arm.pats[0]);
let arm_data = ArmData { let arm_data = ArmData {
@ -1697,7 +1697,7 @@ pub fn trans_match_inner(scope_cx: @mut Block,
bindings_map: @bindings_map bindings_map: @bindings_map
}; };
arm_datas.push(arm_data.clone()); arm_datas.push(arm_data.clone());
foreach p in arm.pats.iter() { for p in arm.pats.iter() {
matches.push(Match { matches.push(Match {
pats: ~[*p], pats: ~[*p],
data: arm_data.clone(), data: arm_data.clone(),
@ -1721,7 +1721,7 @@ pub fn trans_match_inner(scope_cx: @mut Block,
compile_submatch(bcx, matches, [lldiscr], chk); compile_submatch(bcx, matches, [lldiscr], chk);
let mut arm_cxs = ~[]; let mut arm_cxs = ~[];
foreach arm_data in arm_datas.iter() { for arm_data in arm_datas.iter() {
let mut bcx = arm_data.bodycx; let mut bcx = arm_data.bodycx;
// If this arm has a guard, then the various by-value bindings have // If this arm has a guard, then the various by-value bindings have
@ -1957,7 +1957,7 @@ fn bind_irrefutable_pat(bcx: @mut Block,
}); });
} }
foreach &inner_pat in inner.iter() { for &inner_pat in inner.iter() {
bcx = bind_irrefutable_pat(bcx, inner_pat, val, binding_mode); bcx = bind_irrefutable_pat(bcx, inner_pat, val, binding_mode);
} }
} }
@ -1972,8 +1972,8 @@ fn bind_irrefutable_pat(bcx: @mut Block,
repr, repr,
vinfo.disr_val, vinfo.disr_val,
val); val);
foreach sub_pat in sub_pats.iter() { for sub_pat in sub_pats.iter() {
foreach (i, argval) in args.vals.iter().enumerate() { for (i, argval) in args.vals.iter().enumerate() {
bcx = bind_irrefutable_pat(bcx, sub_pat[i], bcx = bind_irrefutable_pat(bcx, sub_pat[i],
*argval, binding_mode); *argval, binding_mode);
} }
@ -1988,7 +1988,7 @@ fn bind_irrefutable_pat(bcx: @mut Block,
Some(ref elems) => { Some(ref elems) => {
// This is the tuple struct case. // This is the tuple struct case.
let repr = adt::represent_node(bcx, pat.id); let repr = adt::represent_node(bcx, pat.id);
foreach (i, elem) in elems.iter().enumerate() { for (i, elem) in elems.iter().enumerate() {
let fldptr = adt::trans_field_ptr(bcx, repr, let fldptr = adt::trans_field_ptr(bcx, repr,
val, 0, i); val, 0, i);
bcx = bind_irrefutable_pat(bcx, *elem, bcx = bind_irrefutable_pat(bcx, *elem,
@ -2009,7 +2009,7 @@ fn bind_irrefutable_pat(bcx: @mut Block,
let pat_ty = node_id_type(bcx, pat.id); let pat_ty = node_id_type(bcx, pat.id);
let pat_repr = adt::represent_type(bcx.ccx(), pat_ty); let pat_repr = adt::represent_type(bcx.ccx(), pat_ty);
do expr::with_field_tys(tcx, pat_ty, None) |discr, field_tys| { do expr::with_field_tys(tcx, pat_ty, None) |discr, field_tys| {
foreach f in fields.iter() { for f in fields.iter() {
let ix = ty::field_idx_strict(tcx, f.ident, field_tys); let ix = ty::field_idx_strict(tcx, f.ident, field_tys);
let fldptr = adt::trans_field_ptr(bcx, pat_repr, val, let fldptr = adt::trans_field_ptr(bcx, pat_repr, val,
discr, ix); discr, ix);
@ -2019,7 +2019,7 @@ fn bind_irrefutable_pat(bcx: @mut Block,
} }
ast::pat_tup(ref elems) => { ast::pat_tup(ref elems) => {
let repr = adt::represent_node(bcx, pat.id); let repr = adt::represent_node(bcx, pat.id);
foreach (i, elem) in elems.iter().enumerate() { for (i, elem) in elems.iter().enumerate() {
let fldptr = adt::trans_field_ptr(bcx, repr, val, 0, i); let fldptr = adt::trans_field_ptr(bcx, repr, val, 0, i);
bcx = bind_irrefutable_pat(bcx, *elem, fldptr, binding_mode); bcx = bind_irrefutable_pat(bcx, *elem, fldptr, binding_mode);
} }

View File

@ -249,7 +249,7 @@ fn generic_fields_of(cx: &mut CrateContext, r: &Repr, sizing: bool) -> ~[Type] {
let mut most_aligned = None; let mut most_aligned = None;
let mut largest_align = 0; let mut largest_align = 0;
let mut largest_size = 0; let mut largest_size = 0;
foreach st in sts.iter() { for st in sts.iter() {
if largest_size < st.size { if largest_size < st.size {
largest_size = st.size; largest_size = st.size;
} }
@ -545,7 +545,7 @@ fn build_const_struct(ccx: &mut CrateContext, st: &Struct, vals: &[ValueRef])
let mut offset = 0; let mut offset = 0;
let mut cfields = ~[]; let mut cfields = ~[];
foreach (i, &ty) in st.fields.iter().enumerate() { for (i, &ty) in st.fields.iter().enumerate() {
let llty = type_of::sizing_type_of(ccx, ty); let llty = type_of::sizing_type_of(ccx, ty);
let type_align = machine::llalign_of_min(ccx, llty) let type_align = machine::llalign_of_min(ccx, llty)
/*bad*/as u64; /*bad*/as u64;

View File

@ -62,7 +62,7 @@ pub fn trans_inline_asm(bcx: @mut Block, ia: &ast::inline_asm) -> @mut Block {
}; };
foreach c in cleanups.iter() { for c in cleanups.iter() {
revoke_clean(bcx, *c); revoke_clean(bcx, *c);
} }
cleanups.clear(); cleanups.clear();
@ -83,7 +83,7 @@ pub fn trans_inline_asm(bcx: @mut Block, ia: &ast::inline_asm) -> @mut Block {
}; };
foreach c in cleanups.iter() { for c in cleanups.iter() {
revoke_clean(bcx, *c); revoke_clean(bcx, *c);
} }
@ -133,7 +133,7 @@ pub fn trans_inline_asm(bcx: @mut Block, ia: &ast::inline_asm) -> @mut Block {
let op = PointerCast(bcx, aoutputs[0], val_ty(outputs[0]).ptr_to()); let op = PointerCast(bcx, aoutputs[0], val_ty(outputs[0]).ptr_to());
Store(bcx, r, op); Store(bcx, r, op);
} else { } else {
foreach (i, o) in aoutputs.iter().enumerate() { for (i, o) in aoutputs.iter().enumerate() {
let v = ExtractValue(bcx, r, i); let v = ExtractValue(bcx, r, i);
let op = PointerCast(bcx, *o, val_ty(outputs[i]).ptr_to()); let op = PointerCast(bcx, *o, val_ty(outputs[i]).ptr_to());
Store(bcx, v, op); Store(bcx, v, op);

View File

@ -675,7 +675,7 @@ pub fn iter_structural_ty(cx: @mut Block, av: ValueRef, t: ty::t,
let tcx = cx.tcx(); let tcx = cx.tcx();
let mut cx = cx; let mut cx = cx;
foreach (i, &arg) in variant.args.iter().enumerate() { for (i, &arg) in variant.args.iter().enumerate() {
cx = f(cx, cx = f(cx,
adt::trans_field_ptr(cx, repr, av, variant.disr_val, i), adt::trans_field_ptr(cx, repr, av, variant.disr_val, i),
ty::subst_tps(tcx, tps, None, arg)); ty::subst_tps(tcx, tps, None, arg));
@ -688,7 +688,7 @@ pub fn iter_structural_ty(cx: @mut Block, av: ValueRef, t: ty::t,
ty::ty_struct(*) => { ty::ty_struct(*) => {
let repr = adt::represent_type(cx.ccx(), t); let repr = adt::represent_type(cx.ccx(), t);
do expr::with_field_tys(cx.tcx(), t, None) |discr, field_tys| { do expr::with_field_tys(cx.tcx(), t, None) |discr, field_tys| {
foreach (i, field_ty) in field_tys.iter().enumerate() { for (i, field_ty) in field_tys.iter().enumerate() {
let llfld_a = adt::trans_field_ptr(cx, repr, av, discr, i); let llfld_a = adt::trans_field_ptr(cx, repr, av, discr, i);
cx = f(cx, llfld_a, field_ty.mt.ty); cx = f(cx, llfld_a, field_ty.mt.ty);
} }
@ -701,7 +701,7 @@ pub fn iter_structural_ty(cx: @mut Block, av: ValueRef, t: ty::t,
} }
ty::ty_tup(ref args) => { ty::ty_tup(ref args) => {
let repr = adt::represent_type(cx.ccx(), t); let repr = adt::represent_type(cx.ccx(), t);
foreach (i, arg) in args.iter().enumerate() { for (i, arg) in args.iter().enumerate() {
let llfld_a = adt::trans_field_ptr(cx, repr, av, 0, i); let llfld_a = adt::trans_field_ptr(cx, repr, av, 0, i);
cx = f(cx, llfld_a, *arg); cx = f(cx, llfld_a, *arg);
} }
@ -729,7 +729,7 @@ pub fn iter_structural_ty(cx: @mut Block, av: ValueRef, t: ty::t,
n_variants); n_variants);
let next_cx = sub_block(cx, "enum-iter-next"); let next_cx = sub_block(cx, "enum-iter-next");
foreach variant in (*variants).iter() { for variant in (*variants).iter() {
let variant_cx = let variant_cx =
sub_block(cx, ~"enum-iter-variant-" + sub_block(cx, ~"enum-iter-variant-" +
uint::to_str(variant.disr_val)); uint::to_str(variant.disr_val));
@ -863,7 +863,7 @@ pub fn invoke(bcx: @mut Block, llfn: ValueRef, llargs: ~[ValueRef])
debug!("invoking %x at %x", debug!("invoking %x at %x",
::std::cast::transmute(llfn), ::std::cast::transmute(llfn),
::std::cast::transmute(bcx.llbb)); ::std::cast::transmute(bcx.llbb));
foreach &llarg in llargs.iter() { for &llarg in llargs.iter() {
debug!("arg: %x", ::std::cast::transmute(llarg)); debug!("arg: %x", ::std::cast::transmute(llarg));
} }
} }
@ -879,7 +879,7 @@ pub fn invoke(bcx: @mut Block, llfn: ValueRef, llargs: ~[ValueRef])
debug!("calling %x at %x", debug!("calling %x at %x",
::std::cast::transmute(llfn), ::std::cast::transmute(llfn),
::std::cast::transmute(bcx.llbb)); ::std::cast::transmute(bcx.llbb));
foreach &llarg in llargs.iter() { for &llarg in llargs.iter() {
debug!("arg: %x", ::std::cast::transmute(llarg)); debug!("arg: %x", ::std::cast::transmute(llarg));
} }
} }
@ -908,7 +908,7 @@ pub fn need_invoke(bcx: @mut Block) -> bool {
loop { loop {
cur_scope = match cur_scope { cur_scope = match cur_scope {
Some(inf) => { Some(inf) => {
foreach cleanup in inf.cleanups.iter() { for cleanup in inf.cleanups.iter() {
match *cleanup { match *cleanup {
clean(_, cleanup_type) | clean_temp(_, _, cleanup_type) => { clean(_, cleanup_type) | clean_temp(_, _, cleanup_type) => {
if cleanup_type == normal_exit_and_unwind { if cleanup_type == normal_exit_and_unwind {
@ -1171,7 +1171,7 @@ pub fn new_block(cx: @mut FunctionContext,
opt_node_info, opt_node_info,
cx); cx);
bcx.scope = scope; bcx.scope = scope;
foreach cx in parent.iter() { for cx in parent.iter() {
if cx.unreachable { if cx.unreachable {
Unreachable(bcx); Unreachable(bcx);
break; break;
@ -1261,7 +1261,7 @@ pub fn trans_block_cleanups_(bcx: @mut Block,
bcx.ccx().sess.opts.debugging_opts & session::no_landing_pads != 0; bcx.ccx().sess.opts.debugging_opts & session::no_landing_pads != 0;
if bcx.unreachable && !no_lpads { return bcx; } if bcx.unreachable && !no_lpads { return bcx; }
let mut bcx = bcx; let mut bcx = bcx;
foreach cu in cleanups.rev_iter() { for cu in cleanups.rev_iter() {
match *cu { match *cu {
clean(cfn, cleanup_type) | clean_temp(_, cfn, cleanup_type) => { clean(cfn, cleanup_type) | clean_temp(_, cfn, cleanup_type) => {
// Some types don't need to be cleaned up during // Some types don't need to be cleaned up during
@ -1304,7 +1304,7 @@ pub fn cleanup_and_leave(bcx: @mut Block,
let mut dest = None; let mut dest = None;
{ {
let r = (*inf).cleanup_paths.rev_iter().find_(|cp| cp.target == leave); let r = (*inf).cleanup_paths.rev_iter().find_(|cp| cp.target == leave);
foreach cp in r.iter() { for cp in r.iter() {
if cp.size == inf.cleanups.len() { if cp.size == inf.cleanups.len() {
Br(bcx, cp.dest); Br(bcx, cp.dest);
return; return;
@ -1326,7 +1326,7 @@ pub fn cleanup_and_leave(bcx: @mut Block,
bcx = trans_block_cleanups_(sub_cx, bcx = trans_block_cleanups_(sub_cx,
inf_cleanups, inf_cleanups,
is_lpad); is_lpad);
foreach &dest in dest.iter() { for &dest in dest.iter() {
Br(bcx, dest); Br(bcx, dest);
return; return;
} }
@ -1449,7 +1449,7 @@ pub fn with_scope_datumblock(bcx: @mut Block, opt_node_info: Option<NodeInfo>,
} }
pub fn block_locals(b: &ast::Block, it: &fn(@ast::Local)) { pub fn block_locals(b: &ast::Block, it: &fn(@ast::Local)) {
foreach s in b.stmts.iter() { for s in b.stmts.iter() {
match s.node { match s.node {
ast::stmt_decl(d, _) => { ast::stmt_decl(d, _) => {
match d.node { match d.node {
@ -1624,7 +1624,7 @@ pub fn new_fn_ctxt_w_id(ccx: @mut CrateContext,
opt_node_info: Option<NodeInfo>, opt_node_info: Option<NodeInfo>,
sp: Option<span>) sp: Option<span>)
-> @mut FunctionContext { -> @mut FunctionContext {
foreach p in param_substs.iter() { p.validate(); } for p in param_substs.iter() { p.validate(); }
debug!("new_fn_ctxt_w_id(path=%s, id=%?, \ debug!("new_fn_ctxt_w_id(path=%s, id=%?, \
param_substs=%s)", param_substs=%s)",
@ -1769,7 +1769,7 @@ pub fn copy_args_to_allocas(fcx: @mut FunctionContext,
_ => {} _ => {}
} }
foreach arg_n in range(0u, arg_tys.len()) { for arg_n in range(0u, arg_tys.len()) {
let arg_ty = arg_tys[arg_n]; let arg_ty = arg_tys[arg_n];
let raw_llarg = raw_llargs[arg_n]; let raw_llarg = raw_llargs[arg_n];
@ -1901,7 +1901,7 @@ pub fn trans_closure(ccx: @mut CrateContext,
// Put return block after all other blocks. // Put return block after all other blocks.
// This somewhat improves single-stepping experience in debugger. // This somewhat improves single-stepping experience in debugger.
unsafe { unsafe {
foreach &llreturn in fcx.llreturn.iter() { for &llreturn in fcx.llreturn.iter() {
llvm::LLVMMoveBasicBlockAfter(llreturn, bcx.llbb); llvm::LLVMMoveBasicBlockAfter(llreturn, bcx.llbb);
} }
} }
@ -1965,7 +1965,7 @@ fn insert_synthetic_type_entries(bcx: @mut Block,
*/ */
let tcx = bcx.tcx(); let tcx = bcx.tcx();
foreach i in range(0u, fn_args.len()) { for i in range(0u, fn_args.len()) {
debug!("setting type of argument %u (pat node %d) to %s", debug!("setting type of argument %u (pat node %d) to %s",
i, fn_args[i].pat.id, bcx.ty_to_str(arg_tys[i])); i, fn_args[i].pat.id, bcx.ty_to_str(arg_tys[i]));
@ -2090,7 +2090,7 @@ pub fn trans_enum_variant_or_tuple_like_struct<A:IdAndTy>(
let repr = adt::represent_type(ccx, result_ty); let repr = adt::represent_type(ccx, result_ty);
adt::trans_start_init(bcx, repr, fcx.llretptr.get(), disr); adt::trans_start_init(bcx, repr, fcx.llretptr.get(), disr);
foreach (i, fn_arg) in fn_args.iter().enumerate() { for (i, fn_arg) in fn_args.iter().enumerate() {
let lldestptr = adt::trans_field_ptr(bcx, let lldestptr = adt::trans_field_ptr(bcx,
repr, repr,
fcx.llretptr.get(), fcx.llretptr.get(),
@ -2106,7 +2106,7 @@ pub fn trans_enum_variant_or_tuple_like_struct<A:IdAndTy>(
pub fn trans_enum_def(ccx: @mut CrateContext, enum_definition: &ast::enum_def, pub fn trans_enum_def(ccx: @mut CrateContext, enum_definition: &ast::enum_def,
id: ast::NodeId, vi: @~[@ty::VariantInfo], id: ast::NodeId, vi: @~[@ty::VariantInfo],
i: &mut uint) { i: &mut uint) {
foreach variant in enum_definition.variants.iter() { for variant in enum_definition.variants.iter() {
let disr_val = vi[*i].disr_val; let disr_val = vi[*i].disr_val;
*i += 1; *i += 1;
@ -2156,7 +2156,7 @@ pub fn trans_item(ccx: @mut CrateContext, item: &ast::item) {
item.id, item.id,
item.attrs); item.attrs);
} else { } else {
foreach stmt in body.stmts.iter() { for stmt in body.stmts.iter() {
match stmt.node { match stmt.node {
ast::stmt_decl(@codemap::spanned { node: ast::decl_item(i), ast::stmt_decl(@codemap::spanned { node: ast::decl_item(i),
_ }, _) => { _ }, _) => {
@ -2189,7 +2189,7 @@ pub fn trans_item(ccx: @mut CrateContext, item: &ast::item) {
consts::trans_const(ccx, m, item.id); consts::trans_const(ccx, m, item.id);
// Do static_assert checking. It can't really be done much earlier because we need to get // Do static_assert checking. It can't really be done much earlier because we need to get
// the value of the bool out of LLVM // the value of the bool out of LLVM
foreach attr in item.attrs.iter() { for attr in item.attrs.iter() {
if "static_assert" == attr.name() { if "static_assert" == attr.name() {
if m == ast::m_mutbl { if m == ast::m_mutbl {
ccx.sess.span_fatal(expr.span, ccx.sess.span_fatal(expr.span,
@ -2237,7 +2237,7 @@ pub fn trans_struct_def(ccx: @mut CrateContext, struct_def: @ast::struct_def) {
// and control visibility. // and control visibility.
pub fn trans_mod(ccx: @mut CrateContext, m: &ast::_mod) { pub fn trans_mod(ccx: @mut CrateContext, m: &ast::_mod) {
let _icx = push_ctxt("trans_mod"); let _icx = push_ctxt("trans_mod");
foreach item in m.items.iter() { for item in m.items.iter() {
trans_item(ccx, *item); trans_item(ccx, *item);
} }
} }
@ -2625,7 +2625,7 @@ pub fn trans_constant(ccx: &mut CrateContext, it: @ast::item) {
node: it.id }); node: it.id });
let mut i = 0; let mut i = 0;
let path = item_path(ccx, &it.id); let path = item_path(ccx, &it.id);
foreach variant in (*enum_definition).variants.iter() { for variant in (*enum_definition).variants.iter() {
let p = vec::append(path.clone(), [ let p = vec::append(path.clone(), [
path_name(variant.node.name), path_name(variant.node.name),
path_name(special_idents::descrim) path_name(special_idents::descrim)
@ -2801,11 +2801,11 @@ pub fn create_module_map(ccx: &mut CrateContext) -> ValueRef {
// like the multiple borrows. At least, it doesn't // like the multiple borrows. At least, it doesn't
// like them on the current snapshot. (2013-06-14) // like them on the current snapshot. (2013-06-14)
let mut keys = ~[]; let mut keys = ~[];
foreach (k, _) in ccx.module_data.iter() { for (k, _) in ccx.module_data.iter() {
keys.push(k.to_managed()); keys.push(k.to_managed());
} }
foreach key in keys.iter() { for key in keys.iter() {
let val = *ccx.module_data.find_equiv(key).get(); let val = *ccx.module_data.find_equiv(key).get();
let s_const = C_cstr(ccx, *key); let s_const = C_cstr(ccx, *key);
let s_ptr = p2i(ccx, s_const); let s_ptr = p2i(ccx, s_const);
@ -3032,7 +3032,7 @@ pub fn trans_crate(sess: session::Session,
do sort::quick_sort(ccx.stats.fn_stats) |&(_, _, insns_a), &(_, _, insns_b)| { do sort::quick_sort(ccx.stats.fn_stats) |&(_, _, insns_a), &(_, _, insns_b)| {
insns_a > insns_b insns_a > insns_b
} }
foreach tuple in ccx.stats.fn_stats.iter() { for tuple in ccx.stats.fn_stats.iter() {
match *tuple { match *tuple {
(ref name, ms, insns) => { (ref name, ms, insns) => {
printfln!("%u insns, %u ms, %s", insns, ms, *name); printfln!("%u insns, %u ms, %s", insns, ms, *name);
@ -3041,7 +3041,7 @@ pub fn trans_crate(sess: session::Session,
} }
} }
if ccx.sess.count_llvm_insns() { if ccx.sess.count_llvm_insns() {
foreach (k, v) in ccx.stats.llvm_insns.iter() { for (k, v) in ccx.stats.llvm_insns.iter() {
printfln!("%-7u %s", *v, *k); printfln!("%-7u %s", *v, *k);
} }
} }

View File

@ -516,7 +516,7 @@ impl Builder {
// we care about. // we care about.
if ixs.len() < 16 { if ixs.len() < 16 {
let mut small_vec = [ C_i32(0), ..16 ]; let mut small_vec = [ C_i32(0), ..16 ];
foreach (small_vec_e, &ix) in small_vec.mut_iter().zip(ixs.iter()) { for (small_vec_e, &ix) in small_vec.mut_iter().zip(ixs.iter()) {
*small_vec_e = C_i32(ix as i32); *small_vec_e = C_i32(ix as i32);
} }
self.inbounds_gep(base, small_vec.slice(0, ixs.len())) self.inbounds_gep(base, small_vec.slice(0, ixs.len()))

View File

@ -42,7 +42,7 @@ impl FnType {
let fnty = Type::func(atys, &rty); let fnty = Type::func(atys, &rty);
let llfn = decl(fnty); let llfn = decl(fnty);
foreach (i, a) in self.attrs.iter().enumerate() { for (i, a) in self.attrs.iter().enumerate() {
match *a { match *a {
option::Some(attr) => { option::Some(attr) => {
unsafe { unsafe {
@ -92,7 +92,7 @@ impl FnType {
pub fn build_shim_ret(&self, bcx: @mut Block, arg_tys: &[Type], ret_def: bool, pub fn build_shim_ret(&self, bcx: @mut Block, arg_tys: &[Type], ret_def: bool,
llargbundle: ValueRef, llretval: ValueRef) { llargbundle: ValueRef, llretval: ValueRef) {
foreach (i, a) in self.attrs.iter().enumerate() { for (i, a) in self.attrs.iter().enumerate() {
match *a { match *a {
option::Some(attr) => { option::Some(attr) => {
unsafe { unsafe {

View File

@ -133,7 +133,7 @@ impl ABIInfo for ARM_ABIInfo {
ret_def: bool) -> FnType { ret_def: bool) -> FnType {
let mut arg_tys = ~[]; let mut arg_tys = ~[];
let mut attrs = ~[]; let mut attrs = ~[];
foreach &aty in atys.iter() { for &aty in atys.iter() {
let (ty, attr) = classify_arg_ty(aty); let (ty, attr) = classify_arg_ty(aty);
arg_tys.push(ty); arg_tys.push(ty);
attrs.push(attr); attrs.push(attr);

View File

@ -190,7 +190,7 @@ impl ABIInfo for MIPS_ABIInfo {
let mut attrs = ~[]; let mut attrs = ~[];
let mut offset = if sret { 4 } else { 0 }; let mut offset = if sret { 4 } else { 0 };
foreach aty in atys.iter() { for aty in atys.iter() {
let (ty, attr) = classify_arg_ty(*aty, &mut offset); let (ty, attr) = classify_arg_ty(*aty, &mut offset);
arg_tys.push(ty); arg_tys.push(ty);
attrs.push(attr); attrs.push(attr);

View File

@ -145,7 +145,7 @@ fn classify_ty(ty: Type) -> ~[RegClass] {
} }
fn all_mem(cls: &mut [RegClass]) { fn all_mem(cls: &mut [RegClass]) {
foreach i in range(0u, cls.len()) { for i in range(0u, cls.len()) {
cls[i] = Memory; cls[i] = Memory;
} }
} }
@ -179,7 +179,7 @@ fn classify_ty(ty: Type) -> ~[RegClass] {
cls: &mut [RegClass], i: uint, cls: &mut [RegClass], i: uint,
off: uint) { off: uint) {
let mut field_off = off; let mut field_off = off;
foreach ty in tys.iter() { for ty in tys.iter() {
field_off = align(field_off, *ty); field_off = align(field_off, *ty);
classify(*ty, cls, i, field_off); classify(*ty, cls, i, field_off);
field_off += ty_size(*ty); field_off += ty_size(*ty);
@ -294,7 +294,7 @@ fn classify_ty(ty: Type) -> ~[RegClass] {
fn llreg_ty(cls: &[RegClass]) -> Type { fn llreg_ty(cls: &[RegClass]) -> Type {
fn llvec_len(cls: &[RegClass]) -> uint { fn llvec_len(cls: &[RegClass]) -> uint {
let mut len = 1u; let mut len = 1u;
foreach c in cls.iter() { for c in cls.iter() {
if *c != SSEUp { if *c != SSEUp {
break; break;
} }
@ -355,7 +355,7 @@ fn x86_64_tys(atys: &[Type],
let mut arg_tys = ~[]; let mut arg_tys = ~[];
let mut attrs = ~[]; let mut attrs = ~[];
foreach t in atys.iter() { for t in atys.iter() {
let (ty, attr) = x86_64_ty(*t, |cls| cls.is_pass_byval(), ByValAttribute); let (ty, attr) = x86_64_ty(*t, |cls| cls.is_pass_byval(), ByValAttribute);
arg_tys.push(ty); arg_tys.push(ty);
attrs.push(attr); attrs.push(attr);

View File

@ -619,7 +619,7 @@ pub fn trans_call_inner(in_cx: @mut Block,
// the cleanup for the self argument // the cleanup for the self argument
match callee.data { match callee.data {
Method(d) => { Method(d) => {
foreach &v in d.temp_cleanup.iter() { for &v in d.temp_cleanup.iter() {
revoke_clean(bcx, v); revoke_clean(bcx, v);
} }
} }
@ -629,7 +629,7 @@ pub fn trans_call_inner(in_cx: @mut Block,
// Uncomment this to debug calls. // Uncomment this to debug calls.
/* /*
printfln!("calling: %s", bcx.val_to_str(llfn)); printfln!("calling: %s", bcx.val_to_str(llfn));
foreach llarg in llargs.iter() { for llarg in llargs.iter() {
printfln!("arg: %s", bcx.val_to_str(*llarg)); printfln!("arg: %s", bcx.val_to_str(*llarg));
} }
io::println("---"); io::println("---");
@ -672,7 +672,7 @@ pub fn trans_call_inner(in_cx: @mut Block,
bcx = do with_cond(bcx, ret_flag_result) |bcx| { bcx = do with_cond(bcx, ret_flag_result) |bcx| {
{ {
let r = bcx.fcx.loop_ret; let r = bcx.fcx.loop_ret;
foreach &(flagptr, _) in r.iter() { for &(flagptr, _) in r.iter() {
Store(bcx, C_bool(true), flagptr); Store(bcx, C_bool(true), flagptr);
Store(bcx, C_bool(false), bcx.fcx.llretptr.get()); Store(bcx, C_bool(false), bcx.fcx.llretptr.get());
} }
@ -729,7 +729,7 @@ pub fn trans_args(cx: @mut Block,
match args { match args {
ArgExprs(arg_exprs) => { ArgExprs(arg_exprs) => {
let last = arg_exprs.len() - 1u; let last = arg_exprs.len() - 1u;
foreach (i, arg_expr) in arg_exprs.iter().enumerate() { for (i, arg_expr) in arg_exprs.iter().enumerate() {
let arg_val = unpack_result!(bcx, { let arg_val = unpack_result!(bcx, {
trans_arg_expr(bcx, trans_arg_expr(bcx,
arg_tys[i], arg_tys[i],
@ -750,7 +750,7 @@ pub fn trans_args(cx: @mut Block,
// now that all arguments have been successfully built, we can revoke any // now that all arguments have been successfully built, we can revoke any
// temporary cleanups, as they are only needed if argument construction // temporary cleanups, as they are only needed if argument construction
// should fail (for example, cleanup of copy mode args). // should fail (for example, cleanup of copy mode args).
foreach c in temp_cleanups.iter() { for c in temp_cleanups.iter() {
revoke_clean(bcx, *c) revoke_clean(bcx, *c)
} }

View File

@ -228,7 +228,7 @@ pub fn store_environment(bcx: @mut Block,
// Copy expr values into boxed bindings. // Copy expr values into boxed bindings.
let mut bcx = bcx; let mut bcx = bcx;
foreach (i, bv) in bound_values.iter().enumerate() { for (i, bv) in bound_values.iter().enumerate() {
debug!("Copy %s into closure", bv.to_str(ccx)); debug!("Copy %s into closure", bv.to_str(ccx));
if ccx.sess.asm_comments() { if ccx.sess.asm_comments() {
@ -268,7 +268,7 @@ pub fn build_closure(bcx0: @mut Block,
// Package up the captured upvars // Package up the captured upvars
let mut env_vals = ~[]; let mut env_vals = ~[];
foreach cap_var in cap_vars.iter() { for cap_var in cap_vars.iter() {
debug!("Building closure: captured variable %?", *cap_var); debug!("Building closure: captured variable %?", *cap_var);
let datum = expr::trans_local_var(bcx, cap_var.def); let datum = expr::trans_local_var(bcx, cap_var.def);
match cap_var.mode { match cap_var.mode {
@ -290,7 +290,7 @@ pub fn build_closure(bcx0: @mut Block,
// If this is a `for` loop body, add two special environment // If this is a `for` loop body, add two special environment
// variables: // variables:
foreach flagptr in include_ret_handle.iter() { for flagptr in include_ret_handle.iter() {
// Flag indicating we have returned (a by-ref bool): // Flag indicating we have returned (a by-ref bool):
let flag_datum = Datum {val: *flagptr, ty: ty::mk_bool(), let flag_datum = Datum {val: *flagptr, ty: ty::mk_bool(),
mode: ByRef(ZeroMem)}; mode: ByRef(ZeroMem)};
@ -337,7 +337,7 @@ pub fn load_environment(fcx: @mut FunctionContext,
// Populate the upvars from the environment. // Populate the upvars from the environment.
let mut i = 0u; let mut i = 0u;
foreach cap_var in cap_vars.iter() { for cap_var in cap_vars.iter() {
let mut upvarptr = GEPi(bcx, llcdata, [0u, i]); let mut upvarptr = GEPi(bcx, llcdata, [0u, i]);
match sigil { match sigil {
ast::BorrowedSigil => { upvarptr = Load(bcx, upvarptr); } ast::BorrowedSigil => { upvarptr = Load(bcx, upvarptr); }

View File

@ -138,8 +138,8 @@ pub struct param_substs {
impl param_substs { impl param_substs {
pub fn validate(&self) { pub fn validate(&self) {
foreach t in self.tys.iter() { assert!(!ty::type_needs_infer(*t)); } for t in self.tys.iter() { assert!(!ty::type_needs_infer(*t)); }
foreach t in self.self_ty.iter() { assert!(!ty::type_needs_infer(*t)); } for t in self.self_ty.iter() { assert!(!ty::type_needs_infer(*t)); }
} }
} }
@ -437,7 +437,7 @@ pub fn revoke_clean(cx: @mut Block, val: ValueRef) {
clean_temp(v, _, _) if v == val => true, clean_temp(v, _, _) if v == val => true,
_ => false _ => false
}); });
foreach i in cleanup_pos.iter() { for i in cleanup_pos.iter() {
scope_info.cleanups = scope_info.cleanups =
vec::append(scope_info.cleanups.slice(0u, *i).to_owned(), vec::append(scope_info.cleanups.slice(0u, *i).to_owned(),
scope_info.cleanups.slice(*i + 1u, scope_info.cleanups.slice(*i + 1u,
@ -943,7 +943,7 @@ pub fn align_to(cx: @mut Block, off: ValueRef, align: ValueRef) -> ValueRef {
pub fn path_str(sess: session::Session, p: &[path_elt]) -> ~str { pub fn path_str(sess: session::Session, p: &[path_elt]) -> ~str {
let mut r = ~""; let mut r = ~"";
let mut first = true; let mut first = true;
foreach e in p.iter() { for e in p.iter() {
match *e { match *e {
ast_map::path_name(s) | ast_map::path_mod(s) => { ast_map::path_name(s) | ast_map::path_mod(s) => {
if first { if first {

View File

@ -36,7 +36,7 @@ use syntax::codemap::span;
pub fn trans_block(bcx: @mut Block, b: &ast::Block, dest: expr::Dest) -> @mut Block { pub fn trans_block(bcx: @mut Block, b: &ast::Block, dest: expr::Dest) -> @mut Block {
let _icx = push_ctxt("trans_block"); let _icx = push_ctxt("trans_block");
let mut bcx = bcx; let mut bcx = bcx;
foreach s in b.stmts.iter() { for s in b.stmts.iter() {
debuginfo::update_source_pos(bcx, b.span); debuginfo::update_source_pos(bcx, b.span);
bcx = trans_stmt(bcx, *s); bcx = trans_stmt(bcx, *s);
} }
@ -144,7 +144,7 @@ pub fn trans_if(bcx: @mut Block,
pub fn join_blocks(parent_bcx: @mut Block, in_cxs: &[@mut Block]) -> @mut Block { pub fn join_blocks(parent_bcx: @mut Block, in_cxs: &[@mut Block]) -> @mut Block {
let out = sub_block(parent_bcx, "join"); let out = sub_block(parent_bcx, "join");
let mut reachable = false; let mut reachable = false;
foreach bcx in in_cxs.iter() { for bcx in in_cxs.iter() {
if !bcx.unreachable { if !bcx.unreachable {
Br(*bcx, out.llbb); Br(*bcx, out.llbb);
reachable = true; reachable = true;
@ -223,7 +223,7 @@ pub fn trans_log(log_ex: &ast::expr,
let (modpath, modname) = { let (modpath, modname) = {
let path = &mut bcx.fcx.path; let path = &mut bcx.fcx.path;
let mut modpath = ~[path_mod(ccx.sess.ident_of(ccx.link_meta.name))]; let mut modpath = ~[path_mod(ccx.sess.ident_of(ccx.link_meta.name))];
foreach e in path.iter() { for e in path.iter() {
match *e { match *e {
path_mod(_) => { modpath.push(*e) } path_mod(_) => { modpath.push(*e) }
_ => {} _ => {}

View File

@ -1145,7 +1145,7 @@ fn trans_rec_or_struct(bcx: @mut Block,
let optbase = match base { let optbase = match base {
Some(base_expr) => { Some(base_expr) => {
let mut leftovers = ~[]; let mut leftovers = ~[];
foreach (i, b) in need_base.iter().enumerate() { for (i, b) in need_base.iter().enumerate() {
if *b { if *b {
leftovers.push((i, field_tys[i].mt.ty)) leftovers.push((i, field_tys[i].mt.ty))
} }
@ -1199,10 +1199,10 @@ fn trans_adt(bcx: @mut Block, repr: &adt::Repr, discr: uint,
let mut bcx = bcx; let mut bcx = bcx;
let addr = match dest { let addr = match dest {
Ignore => { Ignore => {
foreach &(_i, e) in fields.iter() { for &(_i, e) in fields.iter() {
bcx = trans_into(bcx, e, Ignore); bcx = trans_into(bcx, e, Ignore);
} }
foreach sbi in optbase.iter() { for sbi in optbase.iter() {
// FIXME #7261: this moves entire base, not just certain fields // FIXME #7261: this moves entire base, not just certain fields
bcx = trans_into(bcx, sbi.expr, Ignore); bcx = trans_into(bcx, sbi.expr, Ignore);
} }
@ -1212,18 +1212,18 @@ fn trans_adt(bcx: @mut Block, repr: &adt::Repr, discr: uint,
}; };
let mut temp_cleanups = ~[]; let mut temp_cleanups = ~[];
adt::trans_start_init(bcx, repr, addr, discr); adt::trans_start_init(bcx, repr, addr, discr);
foreach &(i, e) in fields.iter() { for &(i, e) in fields.iter() {
let dest = adt::trans_field_ptr(bcx, repr, addr, discr, i); let dest = adt::trans_field_ptr(bcx, repr, addr, discr, i);
let e_ty = expr_ty(bcx, e); let e_ty = expr_ty(bcx, e);
bcx = trans_into(bcx, e, SaveIn(dest)); bcx = trans_into(bcx, e, SaveIn(dest));
add_clean_temp_mem(bcx, dest, e_ty); add_clean_temp_mem(bcx, dest, e_ty);
temp_cleanups.push(dest); temp_cleanups.push(dest);
} }
foreach base in optbase.iter() { for base in optbase.iter() {
// FIXME #6573: is it sound to use the destination's repr on the base? // FIXME #6573: is it sound to use the destination's repr on the base?
// And, would it ever be reasonable to be here with discr != 0? // And, would it ever be reasonable to be here with discr != 0?
let base_datum = unpack_datum!(bcx, trans_to_datum(bcx, base.expr)); let base_datum = unpack_datum!(bcx, trans_to_datum(bcx, base.expr));
foreach &(i, t) in base.fields.iter() { for &(i, t) in base.fields.iter() {
let datum = do base_datum.get_element(bcx, t, ZeroMem) |srcval| { let datum = do base_datum.get_element(bcx, t, ZeroMem) |srcval| {
adt::trans_field_ptr(bcx, repr, srcval, discr, i) adt::trans_field_ptr(bcx, repr, srcval, discr, i)
}; };
@ -1232,7 +1232,7 @@ fn trans_adt(bcx: @mut Block, repr: &adt::Repr, discr: uint,
} }
} }
foreach cleanup in temp_cleanups.iter() { for cleanup in temp_cleanups.iter() {
revoke_clean(bcx, *cleanup); revoke_clean(bcx, *cleanup);
} }
return bcx; return bcx;

View File

@ -286,7 +286,7 @@ pub fn trans_foreign_mod(ccx: @mut CrateContext,
Some(abi) => abi, Some(abi) => abi,
}; };
foreach &foreign_item in foreign_mod.items.iter() { for &foreign_item in foreign_mod.items.iter() {
match foreign_item.node { match foreign_item.node {
ast::foreign_item_fn(*) => { ast::foreign_item_fn(*) => {
let id = foreign_item.id; let id = foreign_item.id;
@ -498,7 +498,7 @@ pub fn trans_foreign_mod(ccx: @mut CrateContext,
let _icx = push_ctxt("foreign::wrap::build_args"); let _icx = push_ctxt("foreign::wrap::build_args");
let ccx = bcx.ccx(); let ccx = bcx.ccx();
let n = tys.llsig.llarg_tys.len(); let n = tys.llsig.llarg_tys.len();
foreach i in range(0u, n) { for i in range(0u, n) {
let arg_i = bcx.fcx.arg_pos(i); let arg_i = bcx.fcx.arg_pos(i);
let mut llargval = get_param(llwrapfn, arg_i); let mut llargval = get_param(llwrapfn, arg_i);
@ -512,7 +512,7 @@ pub fn trans_foreign_mod(ccx: @mut CrateContext,
store_inbounds(bcx, llargval, llargbundle, [0u, i]); store_inbounds(bcx, llargval, llargbundle, [0u, i]);
} }
foreach &retptr in bcx.fcx.llretptr.iter() { for &retptr in bcx.fcx.llretptr.iter() {
store_inbounds(bcx, retptr, llargbundle, [0u, n]); store_inbounds(bcx, retptr, llargbundle, [0u, n]);
} }
} }
@ -522,7 +522,7 @@ pub fn trans_foreign_mod(ccx: @mut CrateContext,
llargbundle: ValueRef) { llargbundle: ValueRef) {
let _icx = push_ctxt("foreign::wrap::build_ret"); let _icx = push_ctxt("foreign::wrap::build_ret");
let arg_count = shim_types.fn_sig.inputs.len(); let arg_count = shim_types.fn_sig.inputs.len();
foreach &retptr in bcx.fcx.llretptr.iter() { for &retptr in bcx.fcx.llretptr.iter() {
let llretptr = load_inbounds(bcx, llargbundle, [0, arg_count]); let llretptr = load_inbounds(bcx, llargbundle, [0, arg_count]);
Store(bcx, Load(bcx, llretptr), retptr); Store(bcx, Load(bcx, llretptr), retptr);
} }
@ -543,7 +543,7 @@ pub fn trans_intrinsic(ccx: @mut CrateContext,
assert!(num_args <= 4); assert!(num_args <= 4);
let mut args = [0 as ValueRef, ..4]; let mut args = [0 as ValueRef, ..4];
let first_real_arg = bcx.fcx.arg_pos(0u); let first_real_arg = bcx.fcx.arg_pos(0u);
foreach i in range(0u, num_args) { for i in range(0u, num_args) {
args[i] = get_param(bcx.fcx.llfn, first_real_arg + i); args[i] = get_param(bcx.fcx.llfn, first_real_arg + i);
} }
let llfn = bcx.ccx().intrinsics.get_copy(&name); let llfn = bcx.ccx().intrinsics.get_copy(&name);

View File

@ -426,7 +426,7 @@ pub fn trans_struct_drop_flag(bcx: @mut Block, t: ty::t, v0: ValueRef, dtor_did:
// Drop the fields // Drop the fields
let field_tys = ty::struct_fields(bcx.tcx(), class_did, substs); let field_tys = ty::struct_fields(bcx.tcx(), class_did, substs);
foreach (i, fld) in field_tys.iter().enumerate() { for (i, fld) in field_tys.iter().enumerate() {
let llfld_a = adt::trans_field_ptr(bcx, repr, v0, 0, i); let llfld_a = adt::trans_field_ptr(bcx, repr, v0, 0, i);
bcx = drop_ty(bcx, llfld_a, fld.mt.ty); bcx = drop_ty(bcx, llfld_a, fld.mt.ty);
} }
@ -461,7 +461,7 @@ pub fn trans_struct_drop(mut bcx: @mut Block, t: ty::t, v0: ValueRef, dtor_did:
// Drop the fields // Drop the fields
let field_tys = ty::struct_fields(bcx.tcx(), class_did, substs); let field_tys = ty::struct_fields(bcx.tcx(), class_did, substs);
foreach (i, fld) in field_tys.iter().enumerate() { for (i, fld) in field_tys.iter().enumerate() {
let llfld_a = adt::trans_field_ptr(bcx, repr, v0, 0, i); let llfld_a = adt::trans_field_ptr(bcx, repr, v0, 0, i);
bcx = drop_ty(bcx, llfld_a, fld.mt.ty); bcx = drop_ty(bcx, llfld_a, fld.mt.ty);
} }
@ -736,7 +736,7 @@ pub fn emit_tydescs(ccx: &mut CrateContext) {
ccx.finished_tydescs = true; ccx.finished_tydescs = true;
let glue_fn_ty = Type::generic_glue_fn(ccx).ptr_to(); let glue_fn_ty = Type::generic_glue_fn(ccx).ptr_to();
let tyds = &mut ccx.tydescs; let tyds = &mut ccx.tydescs;
foreach (_, &val) in tyds.iter() { for (_, &val) in tyds.iter() {
let ti = val; let ti = val;
// Each of the glue functions needs to be cast to a generic type // Each of the glue functions needs to be cast to a generic type

View File

@ -68,7 +68,7 @@ pub fn maybe_instantiate_inline(ccx: @mut CrateContext, fn_id: ast::def_id)
ast::item_enum(_, _) => { ast::item_enum(_, _) => {
let vs_here = ty::enum_variants(ccx.tcx, local_def(item.id)); let vs_here = ty::enum_variants(ccx.tcx, local_def(item.id));
let vs_there = ty::enum_variants(ccx.tcx, parent_id); let vs_there = ty::enum_variants(ccx.tcx, parent_id);
foreach (here, there) in vs_here.iter().zip(vs_there.iter()) { for (here, there) in vs_here.iter().zip(vs_there.iter()) {
if there.id == fn_id { my_id = here.id.node; } if there.id == fn_id { my_id = here.id.node; }
ccx.external.insert(there.id, Some(here.id.node)); ccx.external.insert(there.id, Some(here.id.node));
} }

View File

@ -57,7 +57,7 @@ pub fn trans_impl(ccx: @mut CrateContext,
if !generics.ty_params.is_empty() { return; } if !generics.ty_params.is_empty() { return; }
let sub_path = vec::append_one(path, path_name(name)); let sub_path = vec::append_one(path, path_name(name));
foreach method in methods.iter() { for method in methods.iter() {
if method.generics.ty_params.len() == 0u { if method.generics.ty_params.len() == 0u {
let llfn = get_item_val(ccx, method.id); let llfn = get_item_val(ccx, method.id);
let path = vec::append_one(sub_path.clone(), let path = vec::append_one(sub_path.clone(),
@ -599,7 +599,7 @@ pub fn make_vtable(ccx: &mut CrateContext,
let _icx = push_ctxt("impl::make_vtable"); let _icx = push_ctxt("impl::make_vtable");
let mut components = ~[ tydesc.tydesc ]; let mut components = ~[ tydesc.tydesc ];
foreach &ptr in ptrs.iter() { for &ptr in ptrs.iter() {
components.push(ptr) components.push(ptr)
} }

View File

@ -75,8 +75,8 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
self_vtables: self_vtables self_vtables: self_vtables
}; };
foreach s in real_substs.tps.iter() { assert!(!ty::type_has_params(*s)); } for s in real_substs.tps.iter() { assert!(!ty::type_has_params(*s)); }
foreach s in psubsts.tys.iter() { assert!(!ty::type_has_params(*s)); } for s in psubsts.tys.iter() { assert!(!ty::type_has_params(*s)); }
let param_uses = type_use::type_uses_for(ccx, fn_id, psubsts.tys.len()); let param_uses = type_use::type_uses_for(ccx, fn_id, psubsts.tys.len());

Some files were not shown because too many files have changed in this diff Show More