auto merge of #7274 : thestinger/rust/size_hint, r=huonw

I ran into a weird lifetime bug blocking updating the `collect` method to use `FromIterator`, but everything here works fine.
This commit is contained in:
bors 2013-06-22 23:07:55 -07:00
commit 3b126e4d6d
182 changed files with 886 additions and 884 deletions

View File

@ -882,11 +882,11 @@ the function name.
~~~~ {.xfail-test}
fn iter<T>(seq: &[T], f: &fn(T)) {
for seq.each |elt| { f(elt); }
for seq.iter().advance |elt| { f(elt); }
}
fn map<T, U>(seq: &[T], f: &fn(T) -> U) -> ~[U] {
let mut acc = ~[];
for seq.each |elt| { acc.push(f(elt)); }
for seq.iter().advance |elt| { acc.push(f(elt)); }
acc
}
~~~~
@ -2329,7 +2329,7 @@ An example of a for loop over the contents of a vector:
let v: &[foo] = &[a, b, c];
for v.each |e| {
for v.iter().advance |e| {
bar(*e);
}
~~~~

View File

@ -1396,7 +1396,7 @@ assert!(!crayons.is_empty());
// Iterate over a vector, obtaining a pointer to each element
// (`for` is explained in the next section)
for crayons.each |crayon| {
for crayons.iter().advance |crayon| {
let delicious_crayon_wax = unwrap_crayon(*crayon);
eat_crayon_wax(delicious_crayon_wax);
}
@ -2119,7 +2119,7 @@ generic types.
~~~~
# trait Printable { fn print(&self); }
fn print_all<T: Printable>(printable_things: ~[T]) {
for printable_things.each |thing| {
for printable_things.iter().advance |thing| {
thing.print();
}
}
@ -2165,7 +2165,7 @@ However, consider this function:
trait Drawable { fn draw(&self); }
fn draw_all<T: Drawable>(shapes: ~[T]) {
for shapes.each |shape| { shape.draw(); }
for shapes.iter().advance |shape| { shape.draw(); }
}
# let c: Circle = new_circle();
# draw_all(~[c]);
@ -2180,7 +2180,7 @@ an _object_.
~~~~
# trait Drawable { fn draw(&self); }
fn draw_all(shapes: &[@Drawable]) {
for shapes.each |shape| { shape.draw(); }
for shapes.iter().advance |shape| { shape.draw(); }
}
~~~~

View File

@ -208,7 +208,8 @@ pub fn make_tests(config: &config) -> ~[test::TestDescAndFn] {
debug!("making tests from %s",
config.src_base.to_str());
let mut tests = ~[];
for os::list_dir_path(&config.src_base).each |file| {
let dirs = os::list_dir_path(&config.src_base);
for dirs.iter().advance |file| {
let file = copy *file;
debug!("inspecting file %s", file.to_str());
if is_test(config, file) {
@ -230,11 +231,11 @@ pub fn is_test(config: &config, testfile: &Path) -> bool {
let mut valid = false;
for valid_extensions.each |ext| {
for valid_extensions.iter().advance |ext| {
if name.ends_with(*ext) { valid = true; }
}
for invalid_prefixes.each |pre| {
for invalid_prefixes.iter().advance |pre| {
if name.starts_with(*pre) { valid = false; }
}

View File

@ -327,7 +327,7 @@ fn check_error_patterns(props: &TestProps,
fatal_ProcRes(fmt!("error pattern '%s' not found!",
missing_patterns[0]), ProcRes);
} else {
for missing_patterns.each |pattern| {
for missing_patterns.iter().advance |pattern| {
error(fmt!("error pattern '%s' not found!", *pattern));
}
fatal_ProcRes(~"multiple error patterns not found", ProcRes);
@ -757,7 +757,7 @@ fn _arm_exec_compiled_test(config: &config, props: &TestProps,
runargs.push(fmt!("%s", config.adb_test_dir));
runargs.push(fmt!("%s", prog_short));
for args.args.each |tv| {
for args.args.iter().advance |tv| {
runargs.push(tv.to_owned());
}
@ -822,7 +822,8 @@ fn _dummy_exec_compiled_test(config: &config, props: &TestProps,
fn _arm_push_aux_shared_library(config: &config, testfile: &Path) {
let tstr = aux_output_dir_name(config, testfile).to_str();
for os::list_dir_path(&Path(tstr)).each |file| {
let dirs = os::list_dir_path(&Path(tstr));
for dirs.iter().advance |file| {
if (file.filetype() == Some(~".so")) {

View File

@ -517,6 +517,7 @@ mod tests {
use arc::*;
use core::vec;
use core::cell::Cell;
use core::comm;
use core::task;
@ -725,7 +726,7 @@ mod tests {
}
// Wait for children to pass their asserts
for children.each |r| {
for children.iter().advance |r| {
r.recv();
}
@ -790,7 +791,7 @@ mod tests {
assert_eq!(*state, 42);
*state = 31337;
// send to other readers
for reader_convos.each |x| {
for vec::each(reader_convos) |x| {
match *x {
(ref rc, _) => rc.send(()),
}
@ -799,7 +800,7 @@ mod tests {
let read_mode = arc.downgrade(write_mode);
do (&read_mode).read |state| {
// complete handshake with other readers
for reader_convos.each |x| {
for vec::each(reader_convos) |x| {
match *x {
(_, ref rp) => rp.recv(),
}

View File

@ -421,7 +421,7 @@ mod test {
fn make_file(path : &Path, contents: &[~str]) {
let file = io::file_writer(path, [io::Create, io::Truncate]).get();
for contents.each |&str| {
for contents.iter().advance |&str| {
file.write_str(str);
file.write_char('\n');
}

View File

@ -295,7 +295,7 @@ pub fn getopts(args: &[~str], opts: &[Opt]) -> Result {
}
}
let mut name_pos = 0;
for names.each() |nm| {
for names.iter().advance() |nm| {
name_pos += 1;
let optid = match find_opt(opts, copy *nm) {
Some(id) => id,
@ -373,7 +373,7 @@ pub fn opt_count(mm: &Matches, nm: &str) -> uint {
/// Returns true if any of several options were matched
pub fn opts_present(mm: &Matches, names: &[~str]) -> bool {
for names.each |nm| {
for names.iter().advance |nm| {
match find_opt(mm.opts, mkname(*nm)) {
Some(id) if !mm.vals[id].is_empty() => return true,
_ => (),
@ -400,7 +400,7 @@ pub fn opt_str(mm: &Matches, nm: &str) -> ~str {
* option took an argument
*/
pub fn opts_str(mm: &Matches, names: &[~str]) -> ~str {
for names.each |nm| {
for names.iter().advance |nm| {
match opt_val(mm, *nm) {
Val(ref s) => return copy *s,
_ => ()

View File

@ -1385,7 +1385,7 @@ mod tests {
fn mk_object(items: &[(~str, Json)]) -> Json {
let mut d = ~HashMap::new();
for items.each |item| {
for items.iter().advance |item| {
match *item {
(ref key, ref value) => { d.insert(copy *key, copy *value); },
}

View File

@ -426,7 +426,7 @@ mod test {
let results = result::unwrap(ga_result);
debug!("test_get_addr: Number of results for %s: %?",
localhost_name, results.len());
for results.each |r| {
for results.iter().advance |r| {
let ipv_prefix = match *r {
Ipv4(_) => ~"IPv4",
Ipv6(_) => ~"IPv6"

View File

@ -210,7 +210,7 @@ pub fn encode_form_urlencoded(m: &HashMap<~str, ~[~str]>) -> ~str {
for m.each |key, values| {
let key = encode_plus(*key);
for values.each |value| {
for values.iter().advance |value| {
if first {
first = false;
} else {
@ -342,7 +342,7 @@ fn query_from_str(rawquery: &str) -> Query {
pub fn query_to_str(query: &Query) -> ~str {
let mut strvec = ~[];
for query.each |kv| {
for query.iter().advance |kv| {
match kv {
&(ref k, ref v) => {
strvec.push(fmt!("%s=%s",

View File

@ -1349,7 +1349,7 @@ mod biguint_tests {
#[test]
fn test_add() {
for sum_triples.each |elm| {
for sum_triples.iter().advance |elm| {
let (aVec, bVec, cVec) = *elm;
let a = BigUint::from_slice(aVec);
let b = BigUint::from_slice(bVec);
@ -1362,7 +1362,7 @@ mod biguint_tests {
#[test]
fn test_sub() {
for sum_triples.each |elm| {
for sum_triples.iter().advance |elm| {
let (aVec, bVec, cVec) = *elm;
let a = BigUint::from_slice(aVec);
let b = BigUint::from_slice(bVec);
@ -1413,7 +1413,7 @@ mod biguint_tests {
#[test]
fn test_mul() {
for mul_triples.each |elm| {
for mul_triples.iter().advance |elm| {
let (aVec, bVec, cVec) = *elm;
let a = BigUint::from_slice(aVec);
let b = BigUint::from_slice(bVec);
@ -1423,7 +1423,7 @@ mod biguint_tests {
assert!(b * a == c);
}
for div_rem_quadruples.each |elm| {
for div_rem_quadruples.iter().advance |elm| {
let (aVec, bVec, cVec, dVec) = *elm;
let a = BigUint::from_slice(aVec);
let b = BigUint::from_slice(bVec);
@ -1437,7 +1437,7 @@ mod biguint_tests {
#[test]
fn test_div_rem() {
for mul_triples.each |elm| {
for mul_triples.iter().advance |elm| {
let (aVec, bVec, cVec) = *elm;
let a = BigUint::from_slice(aVec);
let b = BigUint::from_slice(bVec);
@ -1451,7 +1451,7 @@ mod biguint_tests {
}
}
for div_rem_quadruples.each |elm| {
for div_rem_quadruples.iter().advance |elm| {
let (aVec, bVec, cVec, dVec) = *elm;
let a = BigUint::from_slice(aVec);
let b = BigUint::from_slice(bVec);
@ -1567,9 +1567,10 @@ mod biguint_tests {
#[test]
fn test_to_str_radix() {
for to_str_pairs().each |num_pair| {
let r = to_str_pairs();
for r.iter().advance |num_pair| {
let &(n, rs) = num_pair;
for rs.each |str_pair| {
for rs.iter().advance |str_pair| {
let &(radix, str) = str_pair;
assert_eq!(n.to_str_radix(radix), str);
}
@ -1578,9 +1579,10 @@ mod biguint_tests {
#[test]
fn test_from_str_radix() {
for to_str_pairs().each |num_pair| {
let r = to_str_pairs();
for r.iter().advance |num_pair| {
let &(n, rs) = num_pair;
for rs.each |str_pair| {
for rs.iter().advance |str_pair| {
let &(radix, str) = str_pair;
assert_eq!(&n, &FromStrRadix::from_str_radix(str, radix).get());
}
@ -1756,7 +1758,7 @@ mod bigint_tests {
#[test]
fn test_add() {
for sum_triples.each |elm| {
for sum_triples.iter().advance |elm| {
let (aVec, bVec, cVec) = *elm;
let a = BigInt::from_slice(Plus, aVec);
let b = BigInt::from_slice(Plus, bVec);
@ -1775,7 +1777,7 @@ mod bigint_tests {
#[test]
fn test_sub() {
for sum_triples.each |elm| {
for sum_triples.iter().advance |elm| {
let (aVec, bVec, cVec) = *elm;
let a = BigInt::from_slice(Plus, aVec);
let b = BigInt::from_slice(Plus, bVec);
@ -1832,7 +1834,7 @@ mod bigint_tests {
#[test]
fn test_mul() {
for mul_triples.each |elm| {
for mul_triples.iter().advance |elm| {
let (aVec, bVec, cVec) = *elm;
let a = BigInt::from_slice(Plus, aVec);
let b = BigInt::from_slice(Plus, bVec);
@ -1845,7 +1847,7 @@ mod bigint_tests {
assert!((-b) * a == -c);
}
for div_rem_quadruples.each |elm| {
for div_rem_quadruples.iter().advance |elm| {
let (aVec, bVec, cVec, dVec) = *elm;
let a = BigInt::from_slice(Plus, aVec);
let b = BigInt::from_slice(Plus, bVec);
@ -1884,7 +1886,7 @@ mod bigint_tests {
}
}
for mul_triples.each |elm| {
for mul_triples.iter().advance |elm| {
let (aVec, bVec, cVec) = *elm;
let a = BigInt::from_slice(Plus, aVec);
let b = BigInt::from_slice(Plus, bVec);
@ -1894,7 +1896,7 @@ mod bigint_tests {
if !b.is_zero() { check(&c, &b, &a, &Zero::zero()); }
}
for div_rem_quadruples.each |elm| {
for div_rem_quadruples.iter().advance |elm| {
let (aVec, bVec, cVec, dVec) = *elm;
let a = BigInt::from_slice(Plus, aVec);
let b = BigInt::from_slice(Plus, bVec);
@ -1927,7 +1929,7 @@ mod bigint_tests {
check_sub(&a.neg(), b, &q.neg(), &r.neg());
check_sub(&a.neg(), &b.neg(), q, &r.neg());
}
for mul_triples.each |elm| {
for mul_triples.iter().advance |elm| {
let (aVec, bVec, cVec) = *elm;
let a = BigInt::from_slice(Plus, aVec);
let b = BigInt::from_slice(Plus, bVec);
@ -1937,7 +1939,7 @@ mod bigint_tests {
if !b.is_zero() { check(&c, &b, &a, &Zero::zero()); }
}
for div_rem_quadruples.each |elm| {
for div_rem_quadruples.iter().advance |elm| {
let (aVec, bVec, cVec, dVec) = *elm;
let a = BigInt::from_slice(Plus, aVec);
let b = BigInt::from_slice(Plus, bVec);

View File

@ -238,14 +238,14 @@ mod test {
fn test_scale_unscale() {
assert_eq!(_05_05i.scale(2f), _1_1i);
assert_eq!(_1_1i.unscale(2f), _05_05i);
for all_consts.each |&c| {
for all_consts.iter().advance |&c| {
assert_eq!(c.scale(2f).unscale(2f), c);
}
}
#[test]
fn test_conj() {
for all_consts.each |&c| {
for all_consts.iter().advance |&c| {
assert_eq!(c.conj(), Cmplx::new(c.re, -c.im));
assert_eq!(c.conj().conj(), c);
}
@ -282,7 +282,7 @@ mod test {
let (r, theta) = c.to_polar();
assert!((c - Cmplx::from_polar(&r, &theta)).norm() < 1e-6);
}
for all_consts.each |&c| { test(c); }
for all_consts.iter().advance |&c| { test(c); }
}
mod arith {
@ -295,7 +295,7 @@ mod test {
assert_eq!(_0_1i + _1_0i, _1_1i);
assert_eq!(_1_0i + _neg1_1i, _0_1i);
for all_consts.each |&c| {
for all_consts.iter().advance |&c| {
assert_eq!(_0_0i + c, c);
assert_eq!(c + _0_0i, c);
}
@ -307,7 +307,7 @@ mod test {
assert_eq!(_0_1i - _1_0i, _neg1_1i);
assert_eq!(_0_1i - _neg1_1i, _1_0i);
for all_consts.each |&c| {
for all_consts.iter().advance |&c| {
assert_eq!(c - _0_0i, c);
assert_eq!(c - c, _0_0i);
}
@ -322,7 +322,7 @@ mod test {
assert_eq!(_0_1i * _0_1i, -_1_0i);
assert_eq!(_0_1i * _0_1i * _0_1i * _0_1i, _1_0i);
for all_consts.each |&c| {
for all_consts.iter().advance |&c| {
assert_eq!(c * _1_0i, c);
assert_eq!(_1_0i * c, c);
}
@ -330,7 +330,7 @@ mod test {
#[test]
fn test_div() {
assert_eq!(_neg1_1i / _0_1i, _1_1i);
for all_consts.each |&c| {
for all_consts.iter().advance |&c| {
if c != Zero::zero() {
assert_eq!(c / c, _1_0i);
}
@ -340,7 +340,7 @@ mod test {
fn test_neg() {
assert_eq!(-_1_0i + _0_1i, _neg1_1i);
assert_eq!((-_0_1i) * _0_1i, _1_0i);
for all_consts.each |&c| {
for all_consts.iter().advance |&c| {
assert_eq!(-(-c), c);
}
}

View File

@ -482,7 +482,8 @@ mod test {
assert_eq!(FromStr::from_str::<Rational>(s), None);
}
for ["0 /1", "abc", "", "1/", "--1/2","3/2/1"].each |&s| {
let xs = ["0 /1", "abc", "", "1/", "--1/2","3/2/1"];
for xs.iter().advance |&s| {
test(s);
}
}
@ -521,7 +522,8 @@ mod test {
assert_eq!(FromStrRadix::from_str_radix::<Rational>(s, 3), None);
}
for ["0 /1", "abc", "", "1/", "--1/2","3/2/1", "3/2"].each |&s| {
let xs = ["0 /1", "abc", "", "1/", "--1/2","3/2/1", "3/2"];
for xs.iter().advance |&s| {
test(s);
}
}

View File

@ -28,9 +28,9 @@ impl<T:Ord> BaseIter<T> for PriorityQueue<T> {
/// Visit all values in the underlying vector.
///
/// The values are **not** visited in order.
fn each(&self, f: &fn(&T) -> bool) -> bool { self.data.each(f) }
fn each(&self, f: &fn(&T) -> bool) -> bool { self.data.iter().advance(f) }
fn size_hint(&self) -> Option<uint> { self.data.size_hint() }
fn size_hint(&self) -> Option<uint> { Some(self.data.len()) }
}
impl<T:Ord> Container for PriorityQueue<T> {

View File

@ -250,7 +250,7 @@ pub fn sha1() -> @Sha1 {
fn result_str(&mut self) -> ~str {
let rr = mk_result(self);
let mut s = ~"";
for rr.each |b| {
for rr.iter().advance |b| {
let hex = uint::to_str_radix(*b as uint, 16u);
if hex.len() == 1 {
s += "0";
@ -375,7 +375,7 @@ mod tests {
// Test that it works when accepting the message all at once
let mut sh = sha1::sha1();
for tests.each |t| {
for tests.iter().advance |t| {
sh.input_str(t.input);
let out = sh.result();
check_vec_eq(copy t.output, out);
@ -389,7 +389,7 @@ mod tests {
// Test that it works when accepting the message in pieces
for tests.each |t| {
for tests.iter().advance |t| {
let len = t.input.len();
let mut left = len;
while left > 0u {

View File

@ -846,7 +846,7 @@ mod test_qsort {
let immut_names = names;
let pairs = vec::zip_slice(expected, immut_names);
for pairs.each |p| {
for pairs.iter().advance |p| {
let (a, b) = *p;
debug!("%d %d", a, b);
assert_eq!(a, b);

View File

@ -72,7 +72,7 @@ impl<'self> Stats for &'self [f64] {
} else {
let mean = self.mean();
let mut v = 0.0;
for self.each |s| {
for self.iter().advance |s| {
let x = *s - mean;
v += x*x;
}

View File

@ -994,13 +994,13 @@ mod tests {
}
// wait until all children get in the mutex
for ports.each |port| { let _ = port.recv(); }
for ports.iter().advance |port| { let _ = port.recv(); }
do m.lock_cond |cond| {
let num_woken = cond.broadcast();
assert_eq!(num_woken, num_waiters);
}
// wait until all children wake up
for ports.each |port| { let _ = port.recv(); }
for ports.iter().advance |port| { let _ = port.recv(); }
}
#[test]
fn test_mutex_cond_broadcast() {
@ -1085,7 +1085,7 @@ mod tests {
}
}
}
for sibling_convos.each |p| {
for sibling_convos.iter().advance |p| {
let _ = p.recv(); // wait for sibling to get in the mutex
}
do m2.lock { }
@ -1361,13 +1361,13 @@ mod tests {
}
// wait until all children get in the mutex
for ports.each |port| { let _ = port.recv(); }
for ports.iter().advance |port| { let _ = port.recv(); }
do lock_cond(x, dg2) |cond| {
let num_woken = cond.broadcast();
assert_eq!(num_woken, num_waiters);
}
// wait until all children wake up
for ports.each |port| { let _ = port.recv(); }
for ports.iter().advance |port| { let _ = port.recv(); }
}
#[test]
fn test_rwlock_cond_broadcast() {

View File

@ -36,7 +36,7 @@ pub struct TaskPool<T> {
#[unsafe_destructor]
impl<T> Drop for TaskPool<T> {
fn finalize(&self) {
for self.channels.each |channel| {
for self.channels.iter().advance |channel| {
channel.send(Quit);
}
}

View File

@ -55,7 +55,7 @@ pub fn get_dbpath_for_term(term: &str) -> Option<~path> {
};
// Look for the terminal in all of the search directories
for dirs_to_search.each |p| {
for dirs_to_search.iter().advance |p| {
let newp = ~p.push_many(&[str::from_char(first_char), term.to_owned()]);
if os::path_exists(p) && os::path_exists(newp) {
return Some(newp);

View File

@ -369,7 +369,7 @@ fn print_failures(st: &ConsoleTestState) {
failures.push(name.to_str());
}
sort::tim_sort(failures);
for failures.each |name| {
for failures.iter().advance |name| {
st.out.write_line(fmt!(" %s", name.to_str()));
}
}
@ -947,7 +947,7 @@ mod tests {
{
fn testfn() { }
let mut tests = ~[];
for names.each |name| {
for names.iter().advance |name| {
let test = TestDescAndFn {
desc: TestDesc {
name: DynTestName(copy *name),
@ -973,7 +973,7 @@ mod tests {
let pairs = vec::zip(expected, filtered);
for pairs.each |p| {
for pairs.iter().advance |p| {
match *p {
(ref a, ref b) => {
assert!(*a == b.desc.name.to_str());

View File

@ -1033,7 +1033,7 @@ mod tests {
}
}
for [
let days = [
~"Sunday",
~"Monday",
~"Tuesday",
@ -1041,11 +1041,12 @@ mod tests {
~"Thursday",
~"Friday",
~"Saturday"
].each |day| {
];
for days.iter().advance |day| {
assert!(test(*day, "%A"));
}
for [
let days = [
~"Sun",
~"Mon",
~"Tue",
@ -1053,11 +1054,12 @@ mod tests {
~"Thu",
~"Fri",
~"Sat"
].each |day| {
];
for days.iter().advance |day| {
assert!(test(*day, "%a"));
}
for [
let months = [
~"January",
~"February",
~"March",
@ -1070,11 +1072,12 @@ mod tests {
~"October",
~"November",
~"December"
].each |day| {
];
for months.iter().advance |day| {
assert!(test(*day, "%B"));
}
for [
let months = [
~"Jan",
~"Feb",
~"Mar",
@ -1087,7 +1090,8 @@ mod tests {
~"Oct",
~"Nov",
~"Dec"
].each |day| {
];
for months.iter().advance |day| {
assert!(test(*day, "%b"));
}

View File

@ -217,7 +217,7 @@ mod test {
for repeat.times {
let ch = ch.clone();
for spec.each |spec| {
for spec.iter().advance |spec| {
let (times, maxms) = *spec;
let ch = ch.clone();
let hl_loop_clone = hl_loop.clone();

View File

@ -781,13 +781,13 @@ mod test_treemap {
fn check_equal<K: Eq + TotalOrd, V: Eq>(ctrl: &[(K, V)],
map: &TreeMap<K, V>) {
assert_eq!(ctrl.is_empty(), map.is_empty());
for ctrl.each |x| {
for ctrl.iter().advance |x| {
let &(k, v) = x;
assert!(map.find(&k).unwrap() == &v)
}
for map.each |map_k, map_v| {
let mut found = false;
for ctrl.each |x| {
for ctrl.iter().advance |x| {
let &(ctrl_k, ctrl_v) = x;
if *map_k == ctrl_k {
assert!(*map_v == ctrl_v);
@ -1135,8 +1135,8 @@ mod test_set {
let mut set_a = TreeSet::new();
let mut set_b = TreeSet::new();
for a.each |x| { assert!(set_a.insert(*x)) }
for b.each |y| { assert!(set_b.insert(*y)) }
for a.iter().advance |x| { assert!(set_a.insert(*x)) }
for b.iter().advance |y| { assert!(set_b.insert(*y)) }
let mut i = 0;
for f(&set_a, &set_b) |x| {

View File

@ -157,7 +157,7 @@ impl<D:Decoder> Decodable<D> for WorkMap {
fn decode(d: &mut D) -> WorkMap {
let v : ~[(WorkKey,~str)] = Decodable::decode(d);
let mut w = WorkMap::new();
for v.each |&(k, v)| {
for v.iter().advance |&(k, v)| {
w.insert(copy k, copy v);
}
w

View File

@ -222,7 +222,7 @@ fn usage() {
\n"
);
for commands.each |command| {
for commands.iter().advance |command| {
let padding = " ".repeat(indent - command.cmd.len());
io::println(fmt!(" %s%s%s",
command.cmd, padding, command.usage_line));

View File

@ -124,7 +124,8 @@ pub mod jit {
// incase the user wants to use an older extra library.
let cstore = sess.cstore;
for cstore::get_used_crate_files(cstore).each |cratepath| {
let r = cstore::get_used_crate_files(cstore);
for r.iter().advance |cratepath| {
let path = cratepath.to_str();
debug!("linking: %s", path);
@ -470,7 +471,7 @@ pub fn build_link_meta(sess: Session,
let mut cmh_items = ~[];
let linkage_metas = attr::find_linkage_metas(c.node.attrs);
attr::require_unique_names(sess.diagnostic(), linkage_metas);
for linkage_metas.each |meta| {
for linkage_metas.iter().advance |meta| {
match attr::get_meta_item_value_str(*meta) {
Some(value) => {
let item_name : &str = attr::get_meta_item_name(*meta);
@ -518,7 +519,7 @@ pub fn build_link_meta(sess: Session,
}
ast::meta_list(name, ref mis) => {
write_string(symbol_hasher, len_and_str(name));
for mis.each |m_| {
for mis.iter().advance |m_| {
hash(symbol_hasher, m_);
}
}
@ -526,11 +527,11 @@ pub fn build_link_meta(sess: Session,
}
symbol_hasher.reset();
for cmh_items.each |m| {
for cmh_items.iter().advance |m| {
hash(symbol_hasher, m);
}
for dep_hashes.each |dh| {
for dep_hashes.iter().advance |dh| {
write_string(symbol_hasher, len_and_str(*dh));
}
@ -682,7 +683,7 @@ pub fn mangle(sess: Session, ss: path) -> ~str {
let mut n = ~"_ZN"; // Begin name-sequence.
for ss.each |s| {
for ss.iter().advance |s| {
match *s { path_name(s) | path_mod(s) => {
let sani = sanitize(sess.str_of(s));
n += fmt!("%u%s", sani.len(), sani);
@ -872,7 +873,8 @@ pub fn link_args(sess: Session,
// # Crate linking
let cstore = sess.cstore;
for cstore::get_used_crate_files(cstore).each |cratepath| {
let r = cstore::get_used_crate_files(cstore);
for r.iter().advance |cratepath| {
if cratepath.filetype() == Some(~".rlib") {
args.push(cratepath.to_str());
loop;
@ -884,7 +886,7 @@ pub fn link_args(sess: Session,
}
let ula = cstore::get_used_link_args(cstore);
for ula.each |arg| { args.push(arg.to_owned()); }
for ula.iter().advance |arg| { args.push(arg.to_owned()); }
// Add all the link args for external crates.
do cstore::iter_crate_data(cstore) |crate_num, _| {
@ -902,13 +904,13 @@ pub fn link_args(sess: Session,
// to be found at compile time so it is still entirely up to outside
// forces to make sure that library can be found at runtime.
for sess.opts.addl_lib_search_paths.each |path| {
for sess.opts.addl_lib_search_paths.iter().advance |path| {
args.push(~"-L" + path.to_str());
}
// The names of the extern libraries
let used_libs = cstore::get_used_libraries(cstore);
for used_libs.each |l| { args.push(~"-l" + *l); }
for used_libs.iter().advance |l| { args.push(~"-l" + *l); }
if *sess.building_library {
args.push(lib_cmd);

View File

@ -147,7 +147,7 @@ pub fn create_standard_passes(level:OptLevel) -> ~[~str] {
}
pub fn populate_pass_manager(sess: Session, pm: &mut PassManager, pass_list:&[~str]) {
for pass_list.each |&nm| {
for pass_list.iter().advance |&nm| {
match create_pass(nm) {
Some(p) => pm.add_pass(p),
None => sess.warn(fmt!("Unknown pass %s", nm))
@ -172,15 +172,15 @@ pub fn list_passes() {
io::println("\nAvailable Passes:");
io::println("\nAnalysis Passes:");
for analysis_passes.each |&(name, desc)| {
for analysis_passes.iter().advance |&(name, desc)| {
io::println(fmt!(" %-30s -- %s", name, desc));
}
io::println("\nTransformation Passes:");
for transform_passes.each |&(name, desc)| {
for transform_passes.iter().advance |&(name, desc)| {
io::println(fmt!(" %-30s -- %s", name, desc));
}
io::println("\nUtility Passes:");
for utility_passes.each |&(name, desc)| {
for utility_passes.iter().advance |&(name, desc)| {
io::println(fmt!(" %-30s -- %s", name, desc));
}
}
@ -298,7 +298,7 @@ static utility_passes : &'static [(&'static str, &'static str)] = &'static [
fn passes_exist() {
let mut failed = ~[];
unsafe { llvm::LLVMInitializePasses(); }
for analysis_passes.each() |&(name,_)| {
for analysis_passes.iter().advance |&(name,_)| {
let pass = create_pass(name);
if !pass.is_some() {
failed.push(name);
@ -306,7 +306,7 @@ fn passes_exist() {
unsafe { llvm::LLVMDestroyPass(pass.get()) }
}
}
for transform_passes.each() |&(name,_)| {
for transform_passes.iter().advance |&(name,_)| {
let pass = create_pass(name);
if !pass.is_some() {
failed.push(name);
@ -314,7 +314,7 @@ fn passes_exist() {
unsafe { llvm::LLVMDestroyPass(pass.get()) }
}
}
for utility_passes.each() |&(name,_)| {
for utility_passes.iter().advance |&(name,_)| {
let pass = create_pass(name);
if !pass.is_some() {
failed.push(name);
@ -325,7 +325,7 @@ fn passes_exist() {
if failed.len() > 0 {
io::println("Some passes don't exist:");
for failed.each |&n| {
for failed.iter().advance |&n| {
io::println(fmt!(" %s", n));
}
fail!();

View File

@ -64,7 +64,7 @@ fn get_rpaths(os: session::os,
debug!("sysroot: %s", sysroot.to_str());
debug!("output: %s", output.to_str());
debug!("libs:");
for libs.each |libpath| {
for libs.iter().advance |libpath| {
debug!(" %s", libpath.to_str());
}
debug!("target_triple: %s", target_triple);
@ -83,7 +83,7 @@ fn get_rpaths(os: session::os,
fn log_rpaths(desc: &str, rpaths: &[Path]) {
debug!("%s rpaths:", desc);
for rpaths.each |rpath| {
for rpaths.iter().advance |rpath| {
debug!(" %s", rpath.to_str());
}
}
@ -185,7 +185,7 @@ pub fn get_install_prefix_rpath(target_triple: &str) -> Path {
pub fn minimize_rpaths(rpaths: &[Path]) -> ~[Path] {
let mut set = HashSet::new();
let mut minimized = ~[];
for rpaths.each |rpath| {
for rpaths.iter().advance |rpath| {
if set.insert(rpath.to_str()) {
minimized.push(copy *rpath);
}

View File

@ -453,7 +453,7 @@ pub fn pretty_print_input(sess: Session, cfg: ast::crate_cfg, input: &input,
}
pub fn get_os(triple: &str) -> Option<session::os> {
for os_names.each |&(name, os)| {
for os_names.iter().advance |&(name, os)| {
if triple.contains(name) { return Some(os) }
}
None
@ -467,7 +467,7 @@ static os_names : &'static [(&'static str, session::os)] = &'static [
("freebsd", session::os_freebsd)];
pub fn get_arch(triple: &str) -> Option<abi::Architecture> {
for architecture_abis.each |&(arch, abi)| {
for architecture_abis.iter().advance |&(arch, abi)| {
if triple.contains(arch) { return Some(abi) }
}
None
@ -556,7 +556,7 @@ pub fn build_session_options(binary: @str,
lint::deny, lint::forbid];
let mut lint_opts = ~[];
let lint_dict = lint::get_lint_dict();
for lint_levels.each |level| {
for lint_levels.iter().advance |level| {
let level_name = lint::level_to_str(*level);
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
@ -565,7 +565,7 @@ pub fn build_session_options(binary: @str,
let level_short = level_short.to_ascii().to_upper().to_str_ascii();
let flags = vec::append(getopts::opt_strs(matches, level_short),
getopts::opt_strs(matches, level_name));
for flags.each |lint_name| {
for flags.iter().advance |lint_name| {
let lint_name = lint_name.replace("-", "_");
match lint_dict.find_equiv(&lint_name) {
None => {
@ -582,9 +582,9 @@ pub fn build_session_options(binary: @str,
let mut debugging_opts = 0u;
let debug_flags = getopts::opt_strs(matches, "Z");
let debug_map = session::debugging_opts_map();
for debug_flags.each |debug_flag| {
for debug_flags.iter().advance |debug_flag| {
let mut this_bit = 0u;
for debug_map.each |tuple| {
for debug_map.iter().advance |tuple| {
let (name, bit) = match *tuple { (ref a, _, b) => (a, b) };
if name == debug_flag { this_bit = bit; break; }
}

View File

@ -143,7 +143,7 @@ fn fold_block(
let filtered_view_items =
filtered_view_items.map(|x| fld.fold_view_item(*x));
let mut resulting_stmts = ~[];
for filtered_stmts.each |stmt| {
for filtered_stmts.iter().advance |stmt| {
match fld.fold_stmt(*stmt) {
None => {}
Some(stmt) => resulting_stmts.push(stmt),

View File

@ -386,7 +386,7 @@ fn is_std(cx: &TestCtxt) -> bool {
fn mk_test_descs(cx: &TestCtxt) -> @ast::expr {
debug!("building test vector from %u tests", cx.testfns.len());
let mut descs = ~[];
for cx.testfns.each |test| {
for cx.testfns.iter().advance |test| {
descs.push(mk_test_desc_and_fn_rec(cx, test));
}

View File

@ -66,7 +66,7 @@ struct cache_entry {
fn dump_crates(crate_cache: @mut ~[cache_entry]) {
debug!("resolved crates:");
for crate_cache.each |entry| {
for crate_cache.iter().advance |entry| {
debug!("cnum: %?", entry.cnum);
debug!("span: %?", entry.span);
debug!("hash: %?", entry.hash);
@ -101,7 +101,7 @@ fn warn_if_multiple_versions(e: @mut Env,
if matches.len() != 1u {
diag.handler().warn(
fmt!("using multiple versions of crate `%s`", name));
for matches.each |match_| {
for matches.iter().advance |match_| {
diag.span_note(match_.span, "used here");
let attrs = ~[
attr::mk_attr(attr::mk_list_item(
@ -130,7 +130,7 @@ fn visit_crate(e: @mut Env, c: &ast::crate) {
let cstore = e.cstore;
let link_args = attr::find_attrs_by_name(c.node.attrs, "link_args");
for link_args.each |a| {
for link_args.iter().advance |a| {
match attr::get_meta_item_value_str(attr::attr_meta(*a)) {
Some(ref linkarg) => {
cstore::add_used_link_args(cstore, *linkarg);
@ -191,7 +191,7 @@ fn visit_item(e: @mut Env, i: @ast::item) {
ast::anonymous => { /* do nothing */ }
}
for link_args.each |a| {
for link_args.iter().advance |a| {
match attr::get_meta_item_value_str(attr::attr_meta(*a)) {
Some(linkarg) => {
cstore::add_used_link_args(cstore, linkarg);
@ -221,7 +221,7 @@ fn metas_with_ident(ident: @str, metas: ~[@ast::meta_item])
fn existing_match(e: @mut Env, metas: &[@ast::meta_item], hash: @str)
-> Option<int> {
for e.crate_cache.each |c| {
for e.crate_cache.iter().advance |c| {
if loader::metadata_matches(*c.metas, metas)
&& (hash.is_empty() || c.hash == hash) {
return Some(c.cnum);
@ -303,7 +303,8 @@ fn resolve_crate_deps(e: @mut Env, cdata: @~[u8]) -> cstore::cnum_map {
// The map from crate numbers in the crate we're resolving to local crate
// numbers
let mut cnum_map = HashMap::new();
for decoder::get_crate_deps(cdata).each |dep| {
let r = decoder::get_crate_deps(cdata);
for r.iter().advance |dep| {
let extrn_cnum = dep.cnum;
let cname = dep.name;
let cname_str = token::ident_to_str(&dep.name);

View File

@ -160,7 +160,7 @@ pub fn get_dep_hashes(cstore: &CStore) -> ~[@str] {
};
debug!("sorted:");
for sorted.each |x| {
for sorted.iter().advance |x| {
debug!(" hash[%s]: %s", x.name, x.hash);
}

View File

@ -598,7 +598,7 @@ pub fn get_enum_variants(intr: @ident_interner, cdata: cmd, id: ast::node_id,
let mut infos: ~[ty::VariantInfo] = ~[];
let variant_ids = enum_variant_ids(item, cdata);
let mut disr_val = 0;
for variant_ids.each |did| {
for variant_ids.iter().advance |did| {
let item = find_item(did.node, items);
let ctor_ty = item_type(ast::def_id { crate: cdata.cnum, node: id},
item, tcx, cdata);
@ -818,7 +818,7 @@ pub fn get_static_methods_if_impl(intr: @ident_interner,
}
let mut static_impl_methods = ~[];
for impl_method_ids.each |impl_method_id| {
for impl_method_ids.iter().advance |impl_method_id| {
let impl_method_doc = lookup_item(impl_method_id.node, cdata.data);
let family = item_family(impl_method_doc);
match family {
@ -1008,7 +1008,8 @@ fn get_attributes(md: ebml::Doc) -> ~[ast::attribute] {
fn list_meta_items(intr: @ident_interner,
meta_items: ebml::Doc,
out: @io::Writer) {
for get_meta_items(meta_items).each |mi| {
let r = get_meta_items(meta_items);
for r.iter().advance |mi| {
out.write_str(fmt!("%s\n", pprust::meta_item_to_str(*mi, intr)));
}
}
@ -1017,7 +1018,8 @@ fn list_crate_attributes(intr: @ident_interner, md: ebml::Doc, hash: &str,
out: @io::Writer) {
out.write_str(fmt!("=Crate Attributes (%s)=\n", hash));
for get_attributes(md).each |attr| {
let r = get_attributes(md);
for r.iter().advance |attr| {
out.write_str(fmt!("%s\n", pprust::attribute_to_str(*attr, intr)));
}
@ -1057,7 +1059,8 @@ pub fn get_crate_deps(data: @~[u8]) -> ~[crate_dep] {
fn list_crate_deps(data: @~[u8], out: @io::Writer) {
out.write_str("=External Dependencies=\n");
for get_crate_deps(data).each |dep| {
let r = get_crate_deps(data);
for r.iter().advance |dep| {
out.write_str(
fmt!("%d %s-%s-%s\n",
dep.cnum, token::ident_to_str(&dep.name), dep.hash, dep.vers));

View File

@ -187,7 +187,7 @@ fn encode_ty_type_param_defs(ebml_w: &mut writer::Encoder,
tcx: ecx.tcx,
reachable: |a| r.contains(&a),
abbrevs: tyencode::ac_use_abbrevs(ecx.type_abbrevs)};
for params.each |param| {
for params.iter().advance |param| {
ebml_w.start_tag(tag);
tyencode::enc_type_param_def(ebml_w.writer, ty_str_ctxt, param);
ebml_w.end_tag();
@ -325,7 +325,7 @@ fn encode_enum_variant_info(ecx: &EncodeContext,
let mut i = 0;
let vi = ty::enum_variants(ecx.tcx,
ast::def_id { crate: local_crate, node: id });
for variants.each |variant| {
for variants.iter().advance |variant| {
index.push(entry {val: variant.node.id, pos: ebml_w.writer.tell()});
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(variant.node.id));
@ -373,7 +373,7 @@ fn encode_path(ecx: &EncodeContext,
ebml_w.start_tag(tag_path);
ebml_w.wr_tagged_u32(tag_path_len, (path.len() + 1) as u32);
for path.each |pe| {
for path.iter().advance |pe| {
encode_path_elt(ecx, ebml_w, *pe);
}
encode_path_elt(ecx, ebml_w, name);
@ -403,8 +403,8 @@ fn encode_reexported_static_base_methods(ecx: &EncodeContext,
-> bool {
match ecx.tcx.base_impls.find(&exp.def_id) {
Some(implementations) => {
for implementations.each |&base_impl| {
for base_impl.methods.each |&m| {
for implementations.iter().advance |&base_impl| {
for base_impl.methods.iter().advance |&m| {
if m.explicit_self == ast::sty_static {
encode_reexported_static_method(ecx, ebml_w, exp,
m.did, m.ident);
@ -424,7 +424,7 @@ fn encode_reexported_static_trait_methods(ecx: &EncodeContext,
-> bool {
match ecx.tcx.trait_methods_cache.find(&exp.def_id) {
Some(methods) => {
for methods.each |&m| {
for methods.iter().advance |&m| {
if m.explicit_self == ast::sty_static {
encode_reexported_static_method(ecx, ebml_w, exp,
m.def_id, m.ident);
@ -486,7 +486,7 @@ fn encode_info_for_mod(ecx: &EncodeContext,
debug!("(encoding info for module) encoding info for module ID %d", id);
// Encode info about all the module children.
for md.items.each |item| {
for md.items.iter().advance |item| {
match item.node {
item_impl(*) => {
let (ident, did) = (item.ident, item.id);
@ -511,7 +511,7 @@ fn encode_info_for_mod(ecx: &EncodeContext,
match ecx.reexports2.find(&id) {
Some(ref exports) => {
debug!("(encoding info for module) found reexports for %d", id);
for exports.each |exp| {
for exports.iter().advance |exp| {
debug!("(encoding info for module) reexport '%s' for %d",
exp.name, id);
ebml_w.start_tag(tag_items_data_item_reexport);
@ -617,7 +617,7 @@ fn encode_info_for_struct(ecx: &EncodeContext,
let tcx = ecx.tcx;
/* We encode both private and public fields -- need to include
private fields to get the offsets right */
for fields.each |field| {
for fields.iter().advance |field| {
let (nm, vis) = match field.node.kind {
named_field(nm, vis) => (nm, vis),
unnamed_field => (special_idents::unnamed_field, inherited)
@ -862,7 +862,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
encode_type_param_bounds(ebml_w, ecx, &generics.ty_params);
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
encode_name(ecx, ebml_w, item.ident);
for (*enum_definition).variants.each |v| {
for (*enum_definition).variants.iter().advance |v| {
encode_variant_id(ebml_w, local_def(v.node.id));
}
(ecx.encode_inlined_item)(ecx, ebml_w, path, ii_item(item));
@ -921,7 +921,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
/* Encode def_ids for each field and method
for methods, write all the stuff get_trait_method
needs to know*/
for struct_def.fields.each |f| {
for struct_def.fields.iter().advance |f| {
match f.node.kind {
named_field(ident, vis) => {
ebml_w.start_tag(tag_item_field);
@ -960,7 +960,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
}
_ => {}
}
for methods.each |m| {
for methods.iter().advance |m| {
ebml_w.start_tag(tag_item_impl_method);
let method_def_id = local_def(m.id);
let s = def_to_str(method_def_id);
@ -978,7 +978,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
let mut impl_path = vec::append(~[], path);
impl_path += [ast_map::path_name(item.ident)];
for methods.each |m| {
for methods.iter().advance |m| {
index.push(entry {val: m.id, pos: ebml_w.writer.tell()});
encode_info_for_method(ecx,
ebml_w,
@ -1001,13 +1001,13 @@ fn encode_info_for_item(ecx: &EncodeContext,
encode_trait_ref(ebml_w, ecx, trait_def.trait_ref, tag_item_trait_ref);
encode_name(ecx, ebml_w, item.ident);
encode_attributes(ebml_w, item.attrs);
for ty::trait_method_def_ids(tcx, local_def(item.id)).each |&method_def_id| {
for ty::trait_method_def_ids(tcx, local_def(item.id)).iter().advance |&method_def_id| {
ebml_w.start_tag(tag_item_trait_method);
encode_def_id(ebml_w, method_def_id);
ebml_w.end_tag();
}
encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident));
for super_traits.each |ast_trait_ref| {
for super_traits.iter().advance |ast_trait_ref| {
let trait_ref = ty::node_id_to_trait_ref(ecx.tcx, ast_trait_ref.ref_id);
encode_trait_ref(ebml_w, ecx, trait_ref, tag_item_super_trait_ref);
}
@ -1183,13 +1183,13 @@ fn create_index<T:Copy + Hash + IterBytes>(index: ~[entry<T>]) ->
~[@~[entry<T>]] {
let mut buckets: ~[@mut ~[entry<T>]] = ~[];
for uint::range(0u, 256u) |_i| { buckets.push(@mut ~[]); };
for index.each |elt| {
for index.iter().advance |elt| {
let h = elt.val.hash() as uint;
buckets[h % 256].push(copy *elt);
}
let mut buckets_frozen = ~[];
for buckets.each |bucket| {
for buckets.iter().advance |bucket| {
buckets_frozen.push(@/*bad*/copy **bucket);
}
return buckets_frozen;
@ -1202,10 +1202,10 @@ fn encode_index<T>(ebml_w: &mut writer::Encoder,
ebml_w.start_tag(tag_index);
let mut bucket_locs: ~[uint] = ~[];
ebml_w.start_tag(tag_index_buckets);
for buckets.each |bucket| {
for buckets.iter().advance |bucket| {
bucket_locs.push(ebml_w.writer.tell());
ebml_w.start_tag(tag_index_buckets_bucket);
for (**bucket).each |elt| {
for (**bucket).iter().advance |elt| {
ebml_w.start_tag(tag_index_buckets_bucket_elt);
assert!(elt.pos < 0xffff_ffff);
writer.write_be_u32(elt.pos as u32);
@ -1216,7 +1216,7 @@ fn encode_index<T>(ebml_w: &mut writer::Encoder,
}
ebml_w.end_tag();
ebml_w.start_tag(tag_index_table);
for bucket_locs.each |pos| {
for bucket_locs.iter().advance |pos| {
assert!(*pos < 0xffff_ffff);
writer.write_be_u32(*pos as u32);
}
@ -1262,7 +1262,7 @@ fn encode_meta_item(ebml_w: &mut writer::Encoder, mi: @meta_item) {
ebml_w.start_tag(tag_meta_item_name);
ebml_w.writer.write(name.as_bytes());
ebml_w.end_tag();
for items.each |inner_item| {
for items.iter().advance |inner_item| {
encode_meta_item(ebml_w, *inner_item);
}
ebml_w.end_tag();
@ -1272,7 +1272,7 @@ fn encode_meta_item(ebml_w: &mut writer::Encoder, mi: @meta_item) {
fn encode_attributes(ebml_w: &mut writer::Encoder, attrs: &[attribute]) {
ebml_w.start_tag(tag_attributes);
for attrs.each |attr| {
for attrs.iter().advance |attr| {
ebml_w.start_tag(tag_attribute);
encode_meta_item(ebml_w, attr.node.value);
ebml_w.end_tag();
@ -1314,7 +1314,7 @@ fn synthesize_crate_attrs(ecx: &EncodeContext,
let mut attrs: ~[attribute] = ~[];
let mut found_link_attr = false;
for crate.node.attrs.each |attr| {
for crate.node.attrs.iter().advance |attr| {
attrs.push(
if "link" != attr::get_attr_name(attr) {
copy *attr
@ -1356,7 +1356,7 @@ fn encode_crate_deps(ecx: &EncodeContext,
// Sanity-check the crate numbers
let mut expected_cnum = 1;
for deps.each |n| {
for deps.iter().advance |n| {
assert_eq!(n.cnum, expected_cnum);
expected_cnum += 1;
}
@ -1370,7 +1370,8 @@ fn encode_crate_deps(ecx: &EncodeContext,
// FIXME (#2166): This is not nearly enough to support correct versioning
// but is enough to get transitive crate dependencies working.
ebml_w.start_tag(tag_crate_deps);
for get_ordered_deps(ecx, cstore).each |dep| {
let r = get_ordered_deps(ecx, cstore);
for r.iter().advance |dep| {
encode_crate_dep(ecx, ebml_w, *dep);
}
ebml_w.end_tag();
@ -1404,7 +1405,7 @@ fn encode_link_args(ecx: &EncodeContext, ebml_w: &mut writer::Encoder) {
ebml_w.start_tag(tag_link_args);
let link_args = cstore::get_used_link_args(ecx.cstore);
for link_args.each |link_arg| {
for link_args.iter().advance |link_arg| {
ebml_w.start_tag(tag_link_args_arg);
ebml_w.writer.write_str(link_arg.to_str());
ebml_w.end_tag();
@ -1515,7 +1516,7 @@ pub fn encode_metadata(parms: EncodeParams, crate: &crate) -> ~[u8] {
ecx.stats.total_bytes = *wr.pos;
if (tcx.sess.meta_stats()) {
for wr.bytes.each |e| {
for wr.bytes.iter().advance |e| {
if *e == 0 {
ecx.stats.zero_bytes += 1;
}

View File

@ -48,7 +48,7 @@ pub fn mk_filesearch(maybe_sysroot: &Option<@Path>,
debug!("filesearch: searching additional lib search paths [%?]",
self.addl_lib_search_paths.len());
// a little weird
self.addl_lib_search_paths.each(f);
self.addl_lib_search_paths.iter().advance(f);
debug!("filesearch: searching target lib path");
if !f(&make_target_lib_path(self.sysroot,
@ -89,7 +89,8 @@ pub fn search<T:Copy>(filesearch: @FileSearch, pick: pick<T>) -> Option<T> {
let mut rslt = None;
for filesearch.for_each_lib_search_path() |lib_search_path| {
debug!("searching %s", lib_search_path.to_str());
for os::list_dir_path(lib_search_path).each |path| {
let r = os::list_dir_path(lib_search_path);
for r.iter().advance |path| {
debug!("testing %s", path.to_str());
let maybe_picked = pick(*path);
if maybe_picked.is_some() {

View File

@ -130,7 +130,7 @@ fn find_library_crate_aux(
cx.diag.span_err(
cx.span, fmt!("multiple matching crates for `%s`", crate_name));
cx.diag.handler().note("candidates:");
for matches.each |&(ident, data)| {
for matches.iter().advance |&(ident, data)| {
cx.diag.handler().note(fmt!("path: %s", ident));
let attrs = decoder::get_crate_attributes(data);
note_linkage_attrs(cx.intr, cx.diag, attrs);
@ -158,9 +158,9 @@ pub fn crate_name_from_metas(metas: &[@ast::meta_item]) -> @str {
pub fn note_linkage_attrs(intr: @ident_interner,
diag: @span_handler,
attrs: ~[ast::attribute]) {
for attr::find_linkage_metas(attrs).each |mi| {
diag.handler().note(fmt!("meta: %s",
pprust::meta_item_to_str(*mi,intr)));
let r = attr::find_linkage_metas(attrs);
for r.iter().advance |mi| {
diag.handler().note(fmt!("meta: %s", pprust::meta_item_to_str(*mi,intr)));
}
}
@ -182,7 +182,7 @@ pub fn metadata_matches(extern_metas: &[@ast::meta_item],
debug!("matching %u metadata requirements against %u items",
local_metas.len(), extern_metas.len());
for local_metas.each |needed| {
for local_metas.iter().advance |needed| {
if !attr::contains(extern_metas, *needed) {
return false;
}

View File

@ -125,7 +125,7 @@ fn enc_substs(w: @io::Writer, cx: @ctxt, substs: &ty::substs) {
do enc_opt(w, substs.self_r) |r| { enc_region(w, cx, r) }
do enc_opt(w, substs.self_ty) |t| { enc_ty(w, cx, t) }
w.write_char('[');
for substs.tps.each |t| { enc_ty(w, cx, *t); }
for substs.tps.iter().advance |t| { enc_ty(w, cx, *t); }
w.write_char(']');
}
@ -272,7 +272,7 @@ fn enc_sty(w: @io::Writer, cx: @ctxt, st: ty::sty) {
}
ty::ty_tup(ts) => {
w.write_str(&"T[");
for ts.each |t| { enc_ty(w, cx, *t); }
for ts.iter().advance |t| { enc_ty(w, cx, *t); }
w.write_char(']');
}
ty::ty_box(mt) => { w.write_char('@'); enc_mt(w, cx, mt); }
@ -389,7 +389,7 @@ fn enc_closure_ty(w: @io::Writer, cx: @ctxt, ft: &ty::ClosureTy) {
fn enc_fn_sig(w: @io::Writer, cx: @ctxt, fsig: &ty::FnSig) {
w.write_char('[');
for fsig.inputs.each |ty| {
for fsig.inputs.iter().advance |ty| {
enc_ty(w, cx, *ty);
}
w.write_char(']');
@ -407,7 +407,7 @@ fn enc_bounds(w: @io::Writer, cx: @ctxt, bs: &ty::ParamBounds) {
}
}
for bs.trait_bounds.each |&tp| {
for bs.trait_bounds.iter().advance |&tp| {
w.write_char('I');
enc_trait_ref(w, cx, tp);
}

View File

@ -118,7 +118,7 @@ impl<'self> CheckLoanCtxt<'self> {
//! given `loan_path`
for self.each_in_scope_loan(scope_id) |loan| {
for loan.restrictions.each |restr| {
for loan.restrictions.iter().advance |restr| {
if restr.loan_path == loan_path {
if !op(loan, restr) {
return false;
@ -152,7 +152,7 @@ impl<'self> CheckLoanCtxt<'self> {
debug!("new_loan_indices = %?", new_loan_indices);
for self.each_issued_loan(scope_id) |issued_loan| {
for new_loan_indices.each |&new_loan_index| {
for new_loan_indices.iter().advance |&new_loan_index| {
let new_loan = &self.all_loans[new_loan_index];
self.report_error_if_loans_conflict(issued_loan, new_loan);
}
@ -210,7 +210,7 @@ impl<'self> CheckLoanCtxt<'self> {
};
debug!("illegal_if=%?", illegal_if);
for loan1.restrictions.each |restr| {
for loan1.restrictions.iter().advance |restr| {
if !restr.set.intersects(illegal_if) { loop; }
if restr.loan_path != loan2.loan_path { loop; }
@ -634,7 +634,7 @@ fn check_loans_in_fn<'a>(fk: &visit::fn_kind,
closure_id: ast::node_id,
span: span) {
let cap_vars = this.bccx.capture_map.get(&closure_id);
for cap_vars.each |cap_var| {
for cap_vars.iter().advance |cap_var| {
match cap_var.mode {
moves::CapRef | moves::CapCopy => {
let var_id = ast_util::def_id_of_def(cap_var.def).node;

View File

@ -71,7 +71,7 @@ pub fn gather_captures(bccx: @BorrowckCtxt,
move_data: &mut MoveData,
closure_expr: @ast::expr) {
let captured_vars = bccx.capture_map.get(&closure_expr.id);
for captured_vars.each |captured_var| {
for captured_vars.iter().advance |captured_var| {
match captured_var.mode {
moves::CapMove => {
let fvar_id = ast_util::def_id_of_def(captured_var.def).node;

View File

@ -229,8 +229,8 @@ fn gather_loans_in_expr(ex: @ast::expr,
ast::expr_match(ex_v, ref arms) => {
let cmt = this.bccx.cat_expr(ex_v);
for arms.each |arm| {
for arm.pats.each |pat| {
for arms.iter().advance |arm| {
for arm.pats.iter().advance |pat| {
this.gather_pat(cmt, *pat, arm.body.node.id, ex.id);
}
}

View File

@ -140,7 +140,7 @@ impl RestrictionsContext {
// static errors. For example, if there is code like
//
// let v = @mut ~[1, 2, 3];
// for v.each |e| {
// for v.iter().advance |e| {
// v.push(e + 1);
// }
//
@ -152,7 +152,7 @@ impl RestrictionsContext {
//
// let v = @mut ~[1, 2, 3];
// let w = v;
// for v.each |e| {
// for v.iter().advance |e| {
// w.push(e + 1);
// }
//
@ -165,7 +165,7 @@ impl RestrictionsContext {
// }
// ...
// let v: &V = ...;
// for v.get_list().each |e| {
// for v.get_list().iter().advance |e| {
// v.get_list().push(e + 1);
// }
match opt_loan_path(cmt_base) {

View File

@ -357,13 +357,13 @@ impl MoveData {
self.kill_moves(assignment.path, assignment.id, dfcx_moves);
}
for self.path_assignments.each |assignment| {
for self.path_assignments.iter().advance |assignment| {
self.kill_moves(assignment.path, assignment.id, dfcx_moves);
}
// Kill all moves related to a variable `x` when it goes out
// of scope:
for self.paths.each |path| {
for self.paths.iter().advance |path| {
match *path.loan_path {
LpVar(id) => {
let kill_id = tcx.region_maps.encl_scope(id);

View File

@ -48,7 +48,7 @@ pub fn check_item(sess: Session,
check_item_recursion(sess, ast_map, def_map, it);
}
item_enum(ref enum_definition, _) => {
for (*enum_definition).variants.each |var| {
for (*enum_definition).variants.iter().advance |var| {
for var.node.disr_expr.iter().advance |ex| {
(v.visit_expr)(*ex, (true, v));
}

View File

@ -64,7 +64,7 @@ pub fn check_expr(cx: @MatchCheckCtxt, ex: @expr, (s, v): ((), visit::vt<()>)) {
expr_match(scrut, ref arms) => {
// First, check legality of move bindings.
let is_non_moving_lvalue = expr_is_non_moving_lvalue(cx, ex);
for arms.each |arm| {
for arms.iter().advance |arm| {
check_legality_of_move_bindings(cx,
is_non_moving_lvalue,
arm.guard.is_some(),
@ -110,8 +110,8 @@ pub fn check_expr(cx: @MatchCheckCtxt, ex: @expr, (s, v): ((), visit::vt<()>)) {
// Check for unreachable patterns
pub fn check_arms(cx: @MatchCheckCtxt, arms: &[arm]) {
let mut seen = ~[];
for arms.each |arm| {
for arm.pats.each |pat| {
for arms.iter().advance |arm| {
for arm.pats.iter().advance |pat| {
let v = ~[*pat];
match is_useful(cx, &seen, v) {
not_useful => {
@ -232,7 +232,7 @@ pub fn is_useful(cx: @MatchCheckCtxt, m: &matrix, v: &[@pat]) -> useful {
}
}
ty::ty_enum(eid, _) => {
for (*ty::enum_variants(cx.tcx, eid)).each |va| {
for (*ty::enum_variants(cx.tcx, eid)).iter().advance |va| {
match is_useful_specialized(cx, m, v, variant(va.id),
va.args.len(), left_ty) {
not_useful => (),
@ -354,14 +354,14 @@ pub fn missing_ctor(cx: @MatchCheckCtxt,
match ty::get(left_ty).sty {
ty::ty_box(_) | ty::ty_uniq(_) | ty::ty_rptr(*) | ty::ty_tup(_) |
ty::ty_struct(*) => {
for m.each |r| {
for m.iter().advance |r| {
if !is_wild(cx, r[0]) { return None; }
}
return Some(single);
}
ty::ty_enum(eid, _) => {
let mut found = ~[];
for m.each |r| {
for m.iter().advance |r| {
let r = pat_ctor_id(cx, r[0]);
for r.iter().advance |id| {
if !vec::contains(found, id) {
@ -371,7 +371,7 @@ pub fn missing_ctor(cx: @MatchCheckCtxt,
}
let variants = ty::enum_variants(cx.tcx, eid);
if found.len() != (*variants).len() {
for (*variants).each |v| {
for (*variants).iter().advance |v| {
if !found.iter().any_(|x| x == &(variant(v.id))) {
return Some(variant(v.id));
}
@ -383,7 +383,7 @@ pub fn missing_ctor(cx: @MatchCheckCtxt,
ty::ty_bool => {
let mut true_found = false;
let mut false_found = false;
for m.each |r| {
for m.iter().advance |r| {
match pat_ctor_id(cx, r[0]) {
None => (),
Some(val(const_bool(true))) => true_found = true,
@ -423,7 +423,7 @@ pub fn missing_ctor(cx: @MatchCheckCtxt,
let mut found_slice = false;
let mut next = 0;
let mut missing = None;
for sorted_vec_lens.each |&(length, slice)| {
for sorted_vec_lens.iter().advance |&(length, slice)| {
if length != next {
missing = Some(next);
break;
@ -775,7 +775,7 @@ pub fn check_fn(cx: @MatchCheckCtxt,
(s, v): ((),
visit::vt<()>)) {
visit::visit_fn(kind, decl, body, sp, id, (s, v));
for decl.inputs.each |input| {
for decl.inputs.iter().advance |input| {
if is_refutable(cx, input.pat) {
cx.tcx.sess.span_err(input.pat.span,
"refutable pattern in function argument");
@ -829,7 +829,7 @@ pub fn check_legality_of_move_bindings(cx: @MatchCheckCtxt,
let def_map = tcx.def_map;
let mut by_ref_span = None;
let mut any_by_move = false;
for pats.each |pat| {
for pats.iter().advance |pat| {
do pat_bindings(def_map, *pat) |bm, id, span, _path| {
match bm {
bind_by_ref(_) => {
@ -871,7 +871,7 @@ pub fn check_legality_of_move_bindings(cx: @MatchCheckCtxt,
};
if !any_by_move { return; } // pointless micro-optimization
for pats.each |pat| {
for pats.iter().advance |pat| {
for walk_pat(*pat) |p| {
if pat_is_binding(def_map, p) {
match p.node {

View File

@ -389,7 +389,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
self.merge_with_entry_set(blk.node.id, in_out);
for blk.node.stmts.each |&stmt| {
for blk.node.stmts.iter().advance |&stmt| {
self.walk_stmt(stmt, in_out, loop_scopes);
}
@ -510,7 +510,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
loop_kind: ForLoop,
break_bits: reslice(in_out).to_owned()
});
for decl.inputs.each |input| {
for decl.inputs.iter().advance |input| {
self.walk_pat(input.pat, func_bits, loop_scopes);
}
self.walk_block(body, func_bits, loop_scopes);
@ -627,7 +627,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
// together the bits from each arm:
self.reset(in_out);
for arms.each |arm| {
for arms.iter().advance |arm| {
// in_out reflects the discr and all guards to date
self.walk_opt_expr(arm.guard, guards, loop_scopes);
@ -702,7 +702,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
}
ast::expr_struct(_, ref fields, with_expr) => {
for fields.each |field| {
for fields.iter().advance |field| {
self.walk_expr(field.node.expr, in_out, loop_scopes);
}
self.walk_opt_expr(with_expr, in_out, loop_scopes);
@ -764,10 +764,10 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
}
ast::expr_inline_asm(ref inline_asm) => {
for inline_asm.inputs.each |&(_, expr)| {
for inline_asm.inputs.iter().advance |&(_, expr)| {
self.walk_expr(expr, in_out, loop_scopes);
}
for inline_asm.outputs.each |&(_, expr)| {
for inline_asm.outputs.iter().advance |&(_, expr)| {
self.walk_expr(expr, in_out, loop_scopes);
}
}
@ -835,7 +835,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
exprs: &[@ast::expr],
in_out: &mut [uint],
loop_scopes: &mut ~[LoopScope]) {
for exprs.each |&expr| {
for exprs.iter().advance |&expr| {
self.walk_expr(expr, in_out, loop_scopes);
}
}
@ -897,7 +897,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
// alternatives, so we must treat this like an N-way select
// statement.
let initial_state = reslice(in_out).to_owned();
for pats.each |&pat| {
for pats.iter().advance |&pat| {
let mut temp = copy initial_state;
self.walk_pat(pat, temp, loop_scopes);
join_bits(&self.dfcx.oper, temp, in_out);
@ -993,7 +993,7 @@ fn bits_to_str(words: &[uint]) -> ~str {
// Note: this is a little endian printout of bytes.
for words.each |&word| {
for words.iter().advance |&word| {
let mut v = word;
for uint::range(0, uint::bytes) |_| {
result.push_char(sep);

View File

@ -138,7 +138,7 @@ fn configure_main(ctxt: @mut EntryContext) {
but you have one or more functions named 'main' that are not \
defined at the crate level. Either move the definition or \
attach the `#[main]` attribute to override this behavior.");
for this.non_main_fns.each |&(_, span)| {
for this.non_main_fns.iter().advance |&(_, span)| {
this.session.span_note(span, "here is a function named 'main'");
}
}

View File

@ -391,7 +391,7 @@ impl LanguageItemCollector {
let this: *mut LanguageItemCollector = &mut *self;
visit_crate(self.crate, ((), mk_simple_visitor(@SimpleVisitor {
visit_item: |item| {
for item.attrs.each |attribute| {
for item.attrs.iter().advance |attribute| {
unsafe {
(*this).match_and_collect_meta_item(
local_def(item.id),

View File

@ -288,7 +288,7 @@ static lint_table: &'static [(&'static str, LintSpec)] = &[
*/
pub fn get_lint_dict() -> LintDict {
let mut map = HashMap::new();
for lint_table.each|&(k, v)| {
for lint_table.iter().advance |&(k, v)| {
map.insert(k, v);
}
return map;
@ -435,7 +435,8 @@ impl Context {
// detect doc(hidden)
let mut doc_hidden = false;
for attr::find_attrs_by_name(attrs, "doc").each |attr| {
let r = attr::find_attrs_by_name(attrs, "doc");
for r.iter().advance |attr| {
match attr::get_meta_item_list(attr.node.value) {
Some(s) => {
if attr::find_meta_items_by_name(s, "hidden").len() > 0 {
@ -472,12 +473,12 @@ impl Context {
// pair instead of just one visitor.
match n {
Item(it) => {
for self.visitors.each |&(orig, stopping)| {
for self.visitors.iter().advance |&(orig, stopping)| {
(orig.visit_item)(it, (self, stopping));
}
}
Crate(c) => {
for self.visitors.each |&(_, stopping)| {
for self.visitors.iter().advance |&(_, stopping)| {
visit::visit_crate(c, (self, stopping));
}
}
@ -486,7 +487,7 @@ impl Context {
// to be a no-op, so manually invoke visit_fn.
Method(m) => {
let fk = visit::fk_method(copy m.ident, &m.generics, m);
for self.visitors.each |&(orig, stopping)| {
for self.visitors.iter().advance |&(orig, stopping)| {
(orig.visit_fn)(&fk, &m.decl, &m.body, m.span, m.id,
(self, stopping));
}
@ -497,12 +498,12 @@ impl Context {
pub fn each_lint(sess: session::Session,
attrs: &[ast::attribute],
f: &fn(@ast::meta_item, level, @str) -> bool) -> bool
{
for [allow, warn, deny, forbid].each |&level| {
f: &fn(@ast::meta_item, level, @str) -> bool) -> bool {
let xs = [allow, warn, deny, forbid];
for xs.iter().advance |&level| {
let level_name = level_to_str(level);
let attrs = attr::find_attrs_by_name(attrs, level_name);
for attrs.each |attr| {
for attrs.iter().advance |attr| {
let meta = attr.node.value;
let metas = match meta.node {
ast::meta_list(_, ref metas) => metas,
@ -511,7 +512,7 @@ pub fn each_lint(sess: session::Session,
loop;
}
};
for metas.each |meta| {
for metas.iter().advance |meta| {
match meta.node {
ast::meta_word(lintname) => {
if !f(*meta, level, lintname) {
@ -525,7 +526,7 @@ pub fn each_lint(sess: session::Session,
}
}
}
return true;
true
}
// Take a visitor, and modify it so that it will not proceed past subitems.
@ -693,7 +694,7 @@ fn lint_type_limits() -> visit::vt<@mut Context> {
fn check_item_default_methods(cx: &Context, item: @ast::item) {
match item.node {
ast::item_trait(_, _, ref methods) => {
for methods.each |method| {
for methods.iter().advance |method| {
match *method {
ast::required(*) => {}
ast::provided(*) => {
@ -735,7 +736,7 @@ fn check_item_ctypes(cx: &Context, it: @ast::item) {
match it.node {
ast::item_foreign_mod(ref nmod) if !nmod.abis.is_intrinsic() => {
for nmod.items.each |ni| {
for nmod.items.iter().advance |ni| {
match ni.node {
ast::foreign_item_fn(ref decl, _, _) => {
check_foreign_fn(cx, decl);
@ -777,7 +778,8 @@ fn check_type_for_lint(cx: &Context, lint: lint, span: span, ty: ty::t) {
}
fn check_type(cx: &Context, span: span, ty: ty::t) {
for [managed_heap_memory, owned_heap_memory, heap_memory].each |lint| {
let xs = [managed_heap_memory, owned_heap_memory, heap_memory];
for xs.iter().advance |lint| {
check_type_for_lint(cx, *lint, span, ty);
}
}
@ -796,7 +798,7 @@ fn check_item_heap(cx: &Context, it: @ast::item) {
// If it's a struct, we also have to check the fields' types
match it.node {
ast::item_struct(struct_def, _) => {
for struct_def.fields.each |struct_field| {
for struct_def.fields.iter().advance |struct_field| {
check_type(cx, struct_field.span,
ty::node_id_to_type(cx.tcx,
struct_field.node.id));
@ -860,7 +862,7 @@ fn check_item_non_camel_case_types(cx: &Context, it: @ast::item) {
}
ast::item_enum(ref enum_definition, _) => {
check_case(cx, it.ident, it.span);
for enum_definition.variants.each |variant| {
for enum_definition.variants.iter().advance |variant| {
check_case(cx, variant.node.name, variant.span);
}
}
@ -905,7 +907,7 @@ fn lint_unused_mut() -> visit::vt<@mut Context> {
}
fn visit_fn_decl(cx: &Context, fd: &ast::fn_decl) {
for fd.inputs.each |arg| {
for fd.inputs.iter().advance |arg| {
if arg.is_mutbl {
check_pat(cx, arg.pat);
}
@ -1052,7 +1054,7 @@ fn lint_missing_doc() -> visit::vt<@mut Context> {
ast::item_struct(sdef, _) if it.vis == ast::public => {
check_attrs(cx, it.attrs, it.span,
"missing documentation for a struct");
for sdef.fields.each |field| {
for sdef.fields.iter().advance |field| {
match field.node.kind {
ast::named_field(_, vis) if vis != ast::private => {
check_attrs(cx, field.node.attrs, field.span,
@ -1100,7 +1102,7 @@ pub fn check_crate(tcx: ty::ctxt, crate: @ast::crate) {
}
// Install command-line options, overriding defaults.
for tcx.sess.opts.lint_opts.each |&(lint, level)| {
for tcx.sess.opts.lint_opts.iter().advance |&(lint, level)| {
cx.set_level(lint, level, CommandLine);
}
@ -1158,7 +1160,7 @@ pub fn check_crate(tcx: ty::ctxt, crate: @ast::crate) {
// If we missed any lints added to the session, then there's a bug somewhere
// in the iteration code.
for tcx.sess.lints.each |_, v| {
for v.each |t| {
for v.iter().advance |t| {
match *t {
(lint, span, ref msg) =>
tcx.sess.span_bug(span, fmt!("unprocessed lint %?: %s",

View File

@ -355,7 +355,7 @@ fn visit_fn(fk: &visit::fn_kind,
debug!("creating fn_maps: %x", transmute(&*fn_maps));
}
for decl.inputs.each |arg| {
for decl.inputs.iter().advance |arg| {
do pat_util::pat_bindings(this.tcx.def_map, arg.pat)
|_bm, arg_id, _x, path| {
debug!("adding argument %d", arg_id);
@ -431,7 +431,7 @@ fn visit_local(local: @local, (this, vt): (@mut IrMaps, vt<@mut IrMaps>)) {
fn visit_arm(arm: &arm, (this, vt): (@mut IrMaps, vt<@mut IrMaps>)) {
let def_map = this.tcx.def_map;
for arm.pats.each |pat| {
for arm.pats.iter().advance |pat| {
do pat_util::pat_bindings(def_map, *pat) |bm, p_id, sp, path| {
debug!("adding local variable %d from match with bm %?",
p_id, bm);
@ -470,7 +470,7 @@ fn visit_expr(expr: @expr, (this, vt): (@mut IrMaps, vt<@mut IrMaps>)) {
// construction site.
let cvs = this.capture_map.get(&expr.id);
let mut call_caps = ~[];
for cvs.each |cv| {
for cvs.iter().advance |cv| {
match moves::moved_variable_node_id_from_def(cv.def) {
Some(rv) => {
let cv_ln = this.add_live_node(FreeVarNode(cv.span));
@ -1075,7 +1075,7 @@ impl Liveness {
let ln = self.live_node(expr.id, expr.span);
self.init_empty(ln, succ);
let mut first_merge = true;
for arms.each |arm| {
for arms.iter().advance |arm| {
let body_succ =
self.propagate_through_block(&arm.body, succ);
let guard_succ =
@ -1453,12 +1453,12 @@ fn check_expr(expr: @expr, (this, vt): (@Liveness, vt<@Liveness>)) {
}
expr_inline_asm(ref ia) => {
for ia.inputs.each |&(_, in)| {
for ia.inputs.iter().advance |&(_, in)| {
(vt.visit_expr)(in, (this, vt));
}
// Output operands must be lvalues
for ia.outputs.each |&(_, out)| {
for ia.outputs.iter().advance |&(_, out)| {
match out.node {
expr_addr_of(_, inner) => {
this.check_lvalue(inner, vt);
@ -1594,7 +1594,7 @@ impl Liveness {
}
pub fn warn_about_unused_args(&self, decl: &fn_decl, entry_ln: LiveNode) {
for decl.inputs.each |arg| {
for decl.inputs.iter().advance |arg| {
do pat_util::pat_bindings(self.tcx.def_map, arg.pat)
|_bm, p_id, sp, _n| {
let var = self.variable(p_id, sp);

View File

@ -895,7 +895,7 @@ impl mem_categorization_ctxt {
}
}
Some(&ast::def_const(*)) => {
for subpats.each |&subpat| {
for subpats.iter().advance |&subpat| {
self.cat_pattern(cmt, subpat, op);
}
}
@ -917,7 +917,7 @@ impl mem_categorization_ctxt {
ast::pat_struct(_, ref field_pats, _) => {
// {f1: p1, ..., fN: pN}
for field_pats.each |fp| {
for field_pats.iter().advance |fp| {
let field_ty = self.pat_ty(fp.pat); // see (*)
let cmt_field = self.cat_field(pat, cmt, fp.ident, field_ty);
self.cat_pattern(cmt_field, fp.pat, op);
@ -945,7 +945,7 @@ impl mem_categorization_ctxt {
ast::pat_vec(ref before, slice, ref after) => {
let elt_cmt = self.cat_index(pat, cmt, 0);
for before.each |&before_pat| {
for before.iter().advance |&before_pat| {
self.cat_pattern(elt_cmt, before_pat, op);
}
for slice.iter().advance |&slice_pat| {
@ -953,7 +953,7 @@ impl mem_categorization_ctxt {
let slice_cmt = self.cat_rvalue(pat, slice_ty);
self.cat_pattern(slice_cmt, slice_pat, op);
}
for after.each |&after_pat| {
for after.iter().advance |&after_pat| {
self.cat_pattern(elt_cmt, after_pat, op);
}
}
@ -1041,7 +1041,8 @@ pub fn field_mutbl(tcx: ty::ctxt,
// Need to refactor so that struct/enum fields can be treated uniformly.
match ty::get(base_ty).sty {
ty::ty_struct(did, _) => {
for ty::lookup_struct_fields(tcx, did).each |fld| {
let r = ty::lookup_struct_fields(tcx, did);
for r.iter().advance |fld| {
if fld.ident == f_name {
return Some(ast::m_imm);
}
@ -1050,7 +1051,8 @@ pub fn field_mutbl(tcx: ty::ctxt,
ty::ty_enum(*) => {
match tcx.def_map.get_copy(&node_id) {
ast::def_variant(_, variant_id) => {
for ty::lookup_struct_fields(tcx, variant_id).each |fld| {
let r = ty::lookup_struct_fields(tcx, variant_id);
for r.iter().advance |fld| {
if fld.ident == f_name {
return Some(ast::m_imm);
}

View File

@ -232,7 +232,7 @@ fn compute_modes_for_expr(expr: @expr,
impl VisitContext {
pub fn consume_exprs(&self, exprs: &[@expr], visitor: vt<VisitContext>) {
for exprs.each |expr| {
for exprs.iter().advance |expr| {
self.consume_expr(*expr, visitor);
}
}
@ -263,7 +263,7 @@ impl VisitContext {
debug!("consume_block(blk.id=%?)", blk.node.id);
for blk.node.stmts.each |stmt| {
for blk.node.stmts.iter().advance |stmt| {
(visitor.visit_stmt)(*stmt, (*self, visitor));
}
@ -347,7 +347,7 @@ impl VisitContext {
}
expr_struct(_, ref fields, opt_with) => {
for fields.each |field| {
for fields.iter().advance |field| {
self.consume_expr(field.node.expr, visitor);
}
@ -398,7 +398,7 @@ impl VisitContext {
expr_match(discr, ref arms) => {
// We must do this first so that `arms_have_by_move_bindings`
// below knows which bindings are moves.
for arms.each |arm| {
for arms.iter().advance |arm| {
self.consume_arm(arm, visitor);
}
@ -534,7 +534,7 @@ impl VisitContext {
// for overloaded operatrs, we are always passing in a
// borrowed pointer, so it's always read mode:
for arg_exprs.each |arg_expr| {
for arg_exprs.iter().advance |arg_expr| {
self.use_expr(*arg_expr, Read, visitor);
}
@ -591,7 +591,7 @@ impl VisitContext {
arg_exprs: &[@expr],
visitor: vt<VisitContext>) {
//! Uses the argument expressions.
for arg_exprs.each |arg_expr| {
for arg_exprs.iter().advance |arg_expr| {
self.use_fn_arg(*arg_expr, visitor);
}
}
@ -605,8 +605,8 @@ impl VisitContext {
moves_map: MovesMap,
arms: &[arm])
-> Option<@pat> {
for arms.each |arm| {
for arm.pats.each |&pat| {
for arms.iter().advance |arm| {
for arm.pats.iter().advance |&pat| {
for ast_util::walk_pat(pat) |p| {
if moves_map.contains(&p.id) {
return Some(p);

View File

@ -52,7 +52,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
*count += 1;
}
item_impl(_, _, _, ref methods) => {
for methods.each |method| {
for methods.iter().advance |method| {
privileged_items.push(method.id);
*count += 1;
}
@ -60,7 +60,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
*count += 1;
}
item_foreign_mod(ref foreign_mod) => {
for foreign_mod.items.each |foreign_item| {
for foreign_mod.items.iter().advance |foreign_item| {
privileged_items.push(foreign_item.id);
*count += 1;
}
@ -72,7 +72,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
// Adds items that are privileged to this scope.
let add_privileged_items: @fn(&[@ast::item]) -> uint = |items| {
let mut count = 0;
for items.each |&item| {
for items.iter().advance |&item| {
add_privileged_item(item, &mut count);
}
count
@ -231,7 +231,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
let check_field: @fn(span: span, id: ast::def_id, ident: ast::ident) =
|span, id, ident| {
let fields = ty::lookup_struct_fields(tcx, id);
for fields.each |field| {
for fields.iter().advance |field| {
if field.ident != ident { loop; }
if field.vis == private {
tcx.sess.span_err(span, fmt!("field `%s` is private",
@ -377,7 +377,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
visit_block: |block, (method_map, visitor)| {
// Gather up all the privileged items.
let mut n_added = 0;
for block.node.stmts.each |stmt| {
for block.node.stmts.iter().advance |stmt| {
match stmt.node {
stmt_decl(decl, _) => {
match decl.node {
@ -450,7 +450,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
ty_struct(id, _) => {
if id.crate != local_crate ||
!privileged_items.iter().any_(|x| x == &(id.node)) {
for (*fields).each |field| {
for (*fields).iter().advance |field| {
debug!("(privacy checking) checking \
field in struct literal");
check_field(expr.span, id,
@ -463,7 +463,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
!privileged_items.iter().any_(|x| x == &(id.node)) {
match tcx.def_map.get_copy(&expr.id) {
def_variant(_, variant_id) => {
for (*fields).each |field| {
for (*fields).iter().advance |field| {
debug!("(privacy checking) \
checking field in \
struct variant \
@ -516,7 +516,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
ty_struct(id, _) => {
if id.crate != local_crate ||
!privileged_items.iter().any_(|x| x == &(id.node)) {
for fields.each |field| {
for fields.iter().advance |field| {
debug!("(privacy checking) checking \
struct pattern");
check_field(pattern.span, id,
@ -529,7 +529,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
!privileged_items.iter().any_(|x| x == &enum_id.node) {
match tcx.def_map.find(&pattern.id) {
Some(&def_variant(_, variant_id)) => {
for fields.each |field| {
for fields.iter().advance |field| {
debug!("(privacy checking) \
checking field in \
struct variant pattern");

View File

@ -197,7 +197,7 @@ impl RegionMaps {
while i < queue.len() {
match self.free_region_map.find(&queue[i]) {
Some(parents) => {
for parents.each |parent| {
for parents.iter().advance |parent| {
if *parent == sup {
return true;
}
@ -732,7 +732,7 @@ pub fn determine_rp_in_fn(fk: &visit::fn_kind,
visit::vt<@mut DetermineRpCtxt>)) {
do cx.with(cx.item_id, false) {
do cx.with_ambient_variance(rv_contravariant) {
for decl.inputs.each |a| {
for decl.inputs.iter().advance |a| {
(visitor.visit_ty)(a.ty, (cx, visitor));
}
}
@ -843,7 +843,7 @@ pub fn determine_rp_in_ty(ty: @ast::Ty,
ast::ty_path(path, _) => {
// type parameters are---for now, anyway---always invariant
do cx.with_ambient_variance(rv_invariant) {
for path.types.each |tp| {
for path.types.iter().advance |tp| {
(visitor.visit_ty)(*tp, (cx, visitor));
}
}
@ -856,7 +856,7 @@ pub fn determine_rp_in_ty(ty: @ast::Ty,
do cx.with(cx.item_id, false) {
// parameters are contravariant
do cx.with_ambient_variance(rv_contravariant) {
for decl.inputs.each |a| {
for decl.inputs.iter().advance |a| {
(visitor.visit_ty)(a.ty, (cx, visitor));
}
}
@ -936,7 +936,7 @@ pub fn determine_rp_in_crate(sess: Session,
match cx.dep_map.find(&c_id) {
None => {}
Some(deps) => {
for deps.each |dep| {
for deps.iter().advance |dep| {
let v = add_variance(dep.ambient_variance, c_variance);
cx.add_rp(dep.id, v);
}

View File

@ -1057,7 +1057,7 @@ impl Resolver {
}
// Check each statement.
for block.node.stmts.each |statement| {
for block.node.stmts.iter().advance |statement| {
match statement.node {
stmt_decl(declaration, _) => {
match declaration.node {
@ -1178,7 +1178,7 @@ impl Resolver {
name_bindings.define_type
(privacy, def_ty(local_def(item.id)), sp);
for (*enum_definition).variants.each |variant| {
for (*enum_definition).variants.iter().advance |variant| {
self.build_reduced_graph_for_variant(
variant,
local_def(item.id),
@ -1227,7 +1227,7 @@ impl Resolver {
// Bail out early if there are no static methods.
let mut methods_seen = HashMap::new();
let mut has_static_methods = false;
for methods.each |method| {
for methods.iter().advance |method| {
match method.explicit_self.node {
sty_static => has_static_methods = true,
_ => {
@ -1282,7 +1282,7 @@ impl Resolver {
};
// For each static method...
for methods.each |method| {
for methods.iter().advance |method| {
match method.explicit_self.node {
sty_static => {
// Add the static method to the
@ -1319,7 +1319,7 @@ impl Resolver {
// We only need to create the module if the trait has static
// methods, so check that first.
let mut has_static_methods = false;
for (*methods).each |method| {
for (*methods).iter().advance |method| {
let ty_m = trait_method_to_ty_method(method);
match ty_m.explicit_self.node {
sty_static => {
@ -1347,7 +1347,7 @@ impl Resolver {
// Add the names of all the methods to the trait info.
let mut method_names = HashMap::new();
for methods.each |method| {
for methods.iter().advance |method| {
let ty_m = trait_method_to_ty_method(method);
let ident = ty_m.ident;
@ -1452,7 +1452,7 @@ impl Resolver {
let privacy = visibility_to_privacy(view_item.vis);
match view_item.node {
view_item_use(ref view_paths) => {
for view_paths.each |view_path| {
for view_paths.iter().advance |view_path| {
// Extract and intern the module part of the path. For
// globs and lists, the path is found directly in the AST;
// for simple paths we have to munge the path a little.
@ -1472,7 +1472,7 @@ impl Resolver {
view_path_glob(module_ident_path, _) |
view_path_list(module_ident_path, _, _) => {
for module_ident_path.idents.each |ident| {
for module_ident_path.idents.iter().advance |ident| {
module_path.push(*ident);
}
}
@ -1493,7 +1493,7 @@ impl Resolver {
id);
}
view_path_list(_, ref source_idents, _) => {
for source_idents.each |source_ident| {
for source_idents.iter().advance |source_ident| {
let name = source_ident.node.name;
let subclass = @SingleImport(name, name);
self.build_import_directive(privacy,
@ -1686,7 +1686,7 @@ impl Resolver {
let method_def_ids =
get_trait_method_def_ids(self.session.cstore, def_id);
let mut interned_method_names = HashSet::new();
for method_def_ids.each |&method_def_id| {
for method_def_ids.iter().advance |&method_def_id| {
let (method_name, explicit_self) =
get_method_name_and_explicit_self(self.session.cstore,
method_def_id);
@ -1767,7 +1767,7 @@ impl Resolver {
// need to.
let mut current_module = root;
for pieces.each |ident_str| {
for pieces.iter().advance |ident_str| {
let ident = self.session.ident_of(*ident_str);
// Create or reuse a graph node for the child.
let (child_name_bindings, new_parent) =
@ -1887,8 +1887,7 @@ impl Resolver {
// Add each static method to the module.
let new_parent = ModuleReducedGraphParent(
type_module);
for static_methods.each
|static_method_info| {
for static_methods.iter().advance |static_method_info| {
let ident = static_method_info.ident;
debug!("(building reduced graph for \
external crate) creating \
@ -2074,7 +2073,7 @@ impl Resolver {
pub fn idents_to_str(@mut self, idents: &[ident]) -> ~str {
let mut first = true;
let mut result = ~"";
for idents.each |ident| {
for idents.iter().advance |ident| {
if first { first = false; } else { result += "::" };
result += self.session.str_of(*ident);
};
@ -3270,7 +3269,8 @@ impl Resolver {
self.session.str_of(*ident));
loop;
}
for [ TypeNS, ValueNS ].each |ns| {
let xs = [TypeNS, ValueNS];
for xs.iter().advance |ns| {
match importresolution.target_for_namespace(*ns) {
Some(target) => {
debug!("(computing exports) maybe reexport '%s'",
@ -3517,7 +3517,7 @@ impl Resolver {
// enum item: resolve all the variants' discrs,
// then resolve the ty params
item_enum(ref enum_def, ref generics) => {
for (*enum_def).variants.each() |variant| {
for (*enum_def).variants.iter().advance |variant| {
for variant.node.disr_expr.iter().advance |dis_expr| {
// resolve the discriminator expr
// as a constant
@ -3575,7 +3575,7 @@ impl Resolver {
visitor);
// Resolve derived traits.
for traits.each |trt| {
for traits.iter().advance |trt| {
match self.resolve_path(trt.path, TypeNS, true,
visitor) {
None =>
@ -3595,7 +3595,7 @@ impl Resolver {
}
}
for (*methods).each |method| {
for (*methods).iter().advance |method| {
// Create a new rib for the method-specific type
// parameters.
//
@ -3615,7 +3615,7 @@ impl Resolver {
&ty_m.generics.ty_params,
visitor);
for ty_m.decl.inputs.each |argument| {
for ty_m.decl.inputs.iter().advance |argument| {
self.resolve_type(argument.ty, visitor);
}
@ -3652,7 +3652,7 @@ impl Resolver {
item_foreign_mod(ref foreign_module) => {
do self.with_scope(Some(item.ident)) {
for foreign_module.items.each |foreign_item| {
for foreign_module.items.iter().advance |foreign_item| {
match foreign_item.node {
foreign_item_fn(_, _, ref generics) => {
self.with_type_parameter_rib(
@ -3799,7 +3799,7 @@ impl Resolver {
// Nothing to do.
}
Some(declaration) => {
for declaration.inputs.each |argument| {
for declaration.inputs.iter().advance |argument| {
let binding_mode = ArgumentIrrefutableMode;
let mutability =
if argument.is_mutbl {Mutable} else {Immutable};
@ -3878,7 +3878,7 @@ impl Resolver {
self.resolve_type_parameters(&generics.ty_params, visitor);
// Resolve fields.
for fields.each |field| {
for fields.iter().advance |field| {
self.resolve_type(field.node.ty, visitor);
}
}
@ -3953,7 +3953,7 @@ impl Resolver {
// Resolve the self type.
self.resolve_type(self_type, visitor);
for methods.each |method| {
for methods.iter().advance |method| {
// We also need a new scope for the method-specific
// type parameters.
self.resolve_method(MethodRibKind(
@ -4073,7 +4073,7 @@ impl Resolver {
self.value_ribs.push(@Rib(NormalRibKind));
let bindings_list = @mut HashMap::new();
for arm.pats.each |pattern| {
for arm.pats.iter().advance |pattern| {
self.resolve_pattern(*pattern, RefutableMode, Immutable,
Some(bindings_list), visitor);
}
@ -4326,7 +4326,7 @@ impl Resolver {
}
// Check the types in the path pattern.
for path.types.each |ty| {
for path.types.iter().advance |ty| {
self.resolve_type(*ty, visitor);
}
}
@ -4359,7 +4359,7 @@ impl Resolver {
}
// Check the types in the path pattern.
for path.types.each |ty| {
for path.types.iter().advance |ty| {
self.resolve_type(*ty, visitor);
}
}
@ -4388,7 +4388,7 @@ impl Resolver {
}
// Check the types in the path pattern.
for path.types.each |ty| {
for path.types.iter().advance |ty| {
self.resolve_type(*ty, visitor);
}
}
@ -4483,7 +4483,7 @@ impl Resolver {
visitor: ResolveVisitor)
-> Option<def> {
// First, resolve the types.
for path.types.each |ty| {
for path.types.iter().advance |ty| {
self.resolve_type(*ty, visitor);
}
@ -4872,11 +4872,11 @@ impl Resolver {
i -= 1;
match this.type_ribs[i].kind {
MethodRibKind(node_id, _) =>
for this.crate.node.module.items.each |item| {
for this.crate.node.module.items.iter().advance |item| {
if item.id == node_id {
match item.node {
item_struct(class_def, _) => {
for class_def.fields.each |field| {
for class_def.fields.iter().advance |field| {
match field.node.kind {
unnamed_field => {},
named_field(ident, _) => {
@ -5130,7 +5130,7 @@ impl Resolver {
// Look for the current trait.
match /*bad*/copy self.current_trait_refs {
Some(trait_def_ids) => {
for trait_def_ids.each |trait_def_id| {
for trait_def_ids.iter().advance |trait_def_id| {
if candidate_traits.contains(trait_def_id) {
self.add_trait_info(
&mut found_traits,
@ -5281,7 +5281,7 @@ impl Resolver {
match vi.node {
view_item_extern_mod(*) => {} // ignore
view_item_use(ref path) => {
for path.each |p| {
for path.iter().advance |p| {
match p.node {
view_path_simple(_, _, id) | view_path_glob(_, id) => {
if !self.used_imports.contains(&id) {
@ -5292,7 +5292,7 @@ impl Resolver {
}
view_path_list(_, ref list, _) => {
for list.each |i| {
for list.iter().advance |i| {
if !self.used_imports.contains(&i.node.id) {
self.session.add_lint(unused_imports,
i.node.id, i.span,

View File

@ -298,7 +298,7 @@ pub fn variant_opt(bcx: block, pat_id: ast::node_id)
match ccx.tcx.def_map.get_copy(&pat_id) {
ast::def_variant(enum_id, var_id) => {
let variants = ty::enum_variants(ccx.tcx, enum_id);
for (*variants).each |v| {
for (*variants).iter().advance |v| {
if var_id == v.id {
return var(v.disr_val,
adt::represent_node(bcx, pat_id))
@ -363,7 +363,7 @@ pub fn matches_to_str(bcx: block, m: &[@Match]) -> ~str {
}
pub fn has_nested_bindings(m: &[@Match], col: uint) -> bool {
for m.each |br| {
for m.iter().advance |br| {
match br.pats[col].node {
ast::pat_ident(_, _, Some(_)) => return true,
_ => ()
@ -432,7 +432,7 @@ pub fn enter_match<'r>(bcx: block,
let _indenter = indenter();
let mut result = ~[];
for m.each |br| {
for m.iter().advance |br| {
match e(br.pats[col]) {
Some(sub) => {
let pats =
@ -579,8 +579,8 @@ pub fn enter_opt<'r>(bcx: block,
// specified in the struct definition. Also fill in
// unspecified fields with dummy.
let mut reordered_patterns = ~[];
for ty::lookup_struct_fields(tcx, struct_id).each
|field| {
let r = ty::lookup_struct_fields(tcx, struct_id);
for r.iter().advance |field| {
match field_pats.iter().find_(|p| p.ident == field.ident) {
None => reordered_patterns.push(dummy),
Some(fp) => reordered_patterns.push(fp.pat)
@ -640,7 +640,7 @@ pub fn enter_rec_or_struct<'r>(bcx: block,
match p.node {
ast::pat_struct(_, ref fpats, _) => {
let mut pats = ~[];
for fields.each |fname| {
for fields.iter().advance |fname| {
match fpats.iter().find_(|p| p.ident == *fname) {
None => pats.push(dummy),
Some(pat) => pats.push(pat.pat)
@ -802,7 +802,7 @@ pub fn get_options(bcx: block, m: &[@Match], col: uint) -> ~[Opt] {
}
let mut found = ~[];
for m.each |br| {
for m.iter().advance |br| {
let cur = br.pats[col];
match cur.node {
ast::pat_lit(l) => {
@ -948,7 +948,7 @@ pub fn collect_record_or_struct_fields(bcx: block,
col: uint)
-> ~[ast::ident] {
let mut fields: ~[ast::ident] = ~[];
for m.each |br| {
for m.iter().advance |br| {
match br.pats[col].node {
ast::pat_struct(_, ref fs, _) => {
match ty::get(node_id_type(bcx, br.pats[col].id)).sty {
@ -962,7 +962,7 @@ pub fn collect_record_or_struct_fields(bcx: block,
return fields;
fn extend(idents: &mut ~[ast::ident], field_pats: &[ast::field_pat]) {
for field_pats.each |field_pat| {
for field_pats.iter().advance |field_pat| {
let field_ident = field_pat.ident;
if !idents.iter().any_(|x| *x == field_ident) {
idents.push(field_ident);
@ -987,7 +987,7 @@ pub fn root_pats_as_necessary(mut bcx: block,
col: uint,
val: ValueRef)
-> block {
for m.each |br| {
for m.iter().advance |br| {
let pat_id = br.pats[col].id;
if pat_id != 0 {
let datum = Datum {val: val, ty: node_id_type(bcx, pat_id),
@ -1056,14 +1056,14 @@ pub fn pick_col(m: &[@Match]) -> uint {
}
}
let mut scores = vec::from_elem(m[0].pats.len(), 0u);
for m.each |br| {
for m.iter().advance |br| {
let mut i = 0u;
for br.pats.each |p| { scores[i] += score(*p); i += 1u; }
for br.pats.iter().advance |p| { scores[i] += score(*p); i += 1u; }
}
let mut max_score = 0u;
let mut best_col = 0u;
let mut i = 0u;
for scores.each |score| {
for scores.iter().advance |score| {
let score = *score;
// Irrefutable columns always go first, they'd only be duplicated in
@ -1236,7 +1236,7 @@ pub fn compile_guard(bcx: block,
let val = bool_to_i1(bcx, val);
// Revoke the temp cleanups now that the guard successfully executed.
for temp_cleanups.each |llval| {
for temp_cleanups.iter().advance |llval| {
revoke_clean(bcx, *llval);
}
@ -1314,7 +1314,7 @@ pub fn compile_submatch(bcx: block,
let ccx = bcx.fcx.ccx;
let mut pat_id = 0;
let mut pat_span = dummy_sp();
for m.each |br| {
for m.iter().advance |br| {
// Find a real id (we're adding placeholder wildcard patterns, but
// each column is guaranteed to have at least one real pattern)
if pat_id == 0 {
@ -1442,7 +1442,7 @@ pub fn compile_submatch(bcx: block,
}
}
}
for opts.each |o| {
for opts.iter().advance |o| {
match *o {
range(_, _) => { kind = compare; break }
_ => ()
@ -1464,7 +1464,7 @@ pub fn compile_submatch(bcx: block,
let mut i = 0u;
// Compile subtrees for each option
for opts.each |opt| {
for opts.iter().advance |opt| {
i += 1u;
let mut opt_cx = else_cx;
if !exhaustive || i < len {
@ -1680,7 +1680,7 @@ pub fn trans_match_inner(scope_cx: block,
arm: arm,
bindings_map: bindings_map};
arm_datas.push(arm_data);
for arm.pats.each |p| {
for arm.pats.iter().advance |p| {
matches.push(@Match {pats: ~[*p], data: arm_data});
}
}
@ -1701,7 +1701,7 @@ pub fn trans_match_inner(scope_cx: block,
compile_submatch(bcx, matches, [lldiscr], chk);
let mut arm_cxs = ~[];
for arm_datas.each |arm_data| {
for arm_datas.iter().advance |arm_data| {
let mut bcx = arm_data.bodycx;
// If this arm has a guard, then the various by-value bindings have
@ -1844,7 +1844,7 @@ pub fn bind_irrefutable_pat(bcx: block,
let pat_ty = node_id_type(bcx, pat.id);
let pat_repr = adt::represent_type(bcx.ccx(), pat_ty);
do expr::with_field_tys(tcx, pat_ty, None) |discr, field_tys| {
for fields.each |f| {
for fields.iter().advance |f| {
let ix = ty::field_idx_strict(tcx, f.ident, field_tys);
let fldptr = adt::trans_field_ptr(bcx, pat_repr, val,
discr, ix);

View File

@ -248,7 +248,7 @@ fn generic_fields_of(cx: &mut CrateContext, r: &Repr, sizing: bool) -> ~[Type] {
let mut most_aligned = None;
let mut largest_align = 0;
let mut largest_size = 0;
for sts.each |st| {
for sts.iter().advance |st| {
if largest_size < st.size {
largest_size = st.size;
}

View File

@ -64,7 +64,7 @@ pub fn trans_inline_asm(bcx: block, ia: &ast::inline_asm) -> block {
};
for cleanups.each |c| {
for cleanups.iter().advance |c| {
revoke_clean(bcx, *c);
}
cleanups.clear();
@ -85,7 +85,7 @@ pub fn trans_inline_asm(bcx: block, ia: &ast::inline_asm) -> block {
};
for cleanups.each |c| {
for cleanups.iter().advance |c| {
revoke_clean(bcx, *c);
}

View File

@ -670,7 +670,7 @@ pub fn iter_structural_ty(cx: block, av: ValueRef, t: ty::t,
n_variants);
let next_cx = sub_block(cx, "enum-iter-next");
for (*variants).each |variant| {
for (*variants).iter().advance |variant| {
let variant_cx =
sub_block(cx, ~"enum-iter-variant-" +
int::to_str(variant.disr_val));
@ -804,7 +804,7 @@ pub fn invoke(bcx: block, llfn: ValueRef, llargs: ~[ValueRef])
debug!("invoking %x at %x",
::core::cast::transmute(llfn),
::core::cast::transmute(bcx.llbb));
for llargs.each |&llarg| {
for llargs.iter().advance |&llarg| {
debug!("arg: %x", ::core::cast::transmute(llarg));
}
}
@ -820,7 +820,7 @@ pub fn invoke(bcx: block, llfn: ValueRef, llargs: ~[ValueRef])
debug!("calling %x at %x",
::core::cast::transmute(llfn),
::core::cast::transmute(bcx.llbb));
for llargs.each |&llarg| {
for llargs.iter().advance |&llarg| {
debug!("arg: %x", ::core::cast::transmute(llarg));
}
}
@ -849,7 +849,7 @@ pub fn need_invoke(bcx: block) -> bool {
match cur.kind {
block_scope(inf) => {
let inf = &mut *inf; // FIXME(#5074) workaround old borrowck
for inf.cleanups.each |cleanup| {
for inf.cleanups.iter().advance |cleanup| {
match *cleanup {
clean(_, cleanup_type) | clean_temp(_, _, cleanup_type) => {
if cleanup_type == normal_exit_and_unwind {
@ -1366,7 +1366,7 @@ pub fn with_scope_datumblock(bcx: block, opt_node_info: Option<NodeInfo>,
}
pub fn block_locals(b: &ast::blk, it: &fn(@ast::local)) {
for b.node.stmts.each |s| {
for b.node.stmts.iter().advance |s| {
match s.node {
ast::stmt_decl(d, _) => {
match d.node {
@ -2046,7 +2046,7 @@ pub fn trans_tuple_struct(ccx: @mut CrateContext,
pub fn trans_enum_def(ccx: @mut CrateContext, enum_definition: &ast::enum_def,
id: ast::node_id, vi: @~[ty::VariantInfo],
i: &mut uint) {
for enum_definition.variants.each |variant| {
for enum_definition.variants.iter().advance |variant| {
let disr_val = vi[*i].disr_val;
*i += 1;
@ -2097,7 +2097,7 @@ pub fn trans_item(ccx: @mut CrateContext, item: &ast::item) {
None,
item.attrs);
} else {
for body.node.stmts.each |stmt| {
for body.node.stmts.iter().advance |stmt| {
match stmt.node {
ast::stmt_decl(@codemap::spanned { node: ast::decl_item(i),
_ }, _) => {
@ -2126,7 +2126,7 @@ pub fn trans_item(ccx: @mut CrateContext, item: &ast::item) {
consts::trans_const(ccx, expr, item.id);
// Do static_assert checking. It can't really be done much earlier because we need to get
// the value of the bool out of LLVM
for item.attrs.each |attr| {
for item.attrs.iter().advance |attr| {
match attr.node.value.node {
ast::meta_word(x) => {
if x.slice(0, x.len()) == "static_assert" {
@ -2175,7 +2175,7 @@ pub fn trans_struct_def(ccx: @mut CrateContext, struct_def: @ast::struct_def) {
// and control visibility.
pub fn trans_mod(ccx: @mut CrateContext, m: &ast::_mod) {
let _icx = push_ctxt("trans_mod");
for m.items.each |item| {
for m.items.iter().advance |item| {
trans_item(ccx, *item);
}
}
@ -2549,7 +2549,7 @@ pub fn trans_constant(ccx: @mut CrateContext, it: @ast::item) {
node: it.id });
let mut i = 0;
let path = item_path(ccx, it);
for (*enum_definition).variants.each |variant| {
for (*enum_definition).variants.iter().advance |variant| {
let p = vec::append(/*bad*/copy path, [
path_name(variant.node.name),
path_name(special_idents::descrim)
@ -2729,7 +2729,7 @@ pub fn create_module_map(ccx: &mut CrateContext) -> ValueRef {
keys.push(k.to_managed());
}
for keys.each |key| {
for keys.iter().advance |key| {
let val = *ccx.module_data.find_equiv(key).get();
let s_const = C_cstr(ccx, *key);
let s_ptr = p2i(ccx, s_const);

View File

@ -133,7 +133,7 @@ impl ABIInfo for ARM_ABIInfo {
ret_def: bool) -> FnType {
let mut arg_tys = ~[];
let mut attrs = ~[];
for atys.each |&aty| {
for atys.iter().advance |&aty| {
let (ty, attr) = classify_arg_ty(aty);
arg_tys.push(ty);
attrs.push(attr);

View File

@ -189,7 +189,7 @@ impl ABIInfo for MIPS_ABIInfo {
let mut attrs = ~[];
let mut offset = if sret { 4 } else { 0 };
for atys.each() |aty| {
for atys.iter().advance |aty| {
let (ty, attr) = classify_arg_ty(*aty, &mut offset);
arg_tys.push(ty);
attrs.push(attr);

View File

@ -176,7 +176,7 @@ fn classify_ty(ty: Type) -> ~[RegClass] {
cls: &mut [RegClass], i: uint,
off: uint) {
let mut field_off = off;
for tys.each |ty| {
for tys.iter().advance |ty| {
field_off = align(field_off, *ty);
classify(*ty, cls, i, field_off);
field_off += ty_size(*ty);
@ -294,7 +294,7 @@ fn classify_ty(ty: Type) -> ~[RegClass] {
fn llreg_ty(cls: &[RegClass]) -> Type {
fn llvec_len(cls: &[RegClass]) -> uint {
let mut len = 1u;
for cls.each |c| {
for cls.iter().advance |c| {
if *c != SSEUp {
break;
}
@ -355,7 +355,7 @@ fn x86_64_tys(atys: &[Type],
let mut arg_tys = ~[];
let mut attrs = ~[];
for atys.each |t| {
for atys.iter().advance |t| {
let (ty, attr) = x86_64_ty(*t, |cls| cls.is_pass_byval(), ByValAttribute);
arg_tys.push(ty);
attrs.push(attr);

View File

@ -575,7 +575,7 @@ pub fn trans_call_inner(in_cx: block,
// Uncomment this to debug calls.
/*
io::println(fmt!("calling: %s", bcx.val_to_str(llfn)));
for llargs.each |llarg| {
for llargs.iter().advance |llarg| {
io::println(fmt!("arg: %s", bcx.val_to_str(*llarg)));
}
io::println("---");

View File

@ -266,7 +266,7 @@ pub fn build_closure(bcx0: block,
// Package up the captured upvars
let mut env_vals = ~[];
for cap_vars.each |cap_var| {
for cap_vars.iter().advance |cap_var| {
debug!("Building closure: captured variable %?", *cap_var);
let datum = expr::trans_local_var(bcx, cap_var.def);
match cap_var.mode {
@ -346,7 +346,7 @@ pub fn load_environment(fcx: fn_ctxt,
// Populate the upvars from the environment.
let mut i = 0u;
for cap_vars.each |cap_var| {
for cap_vars.iter().advance |cap_var| {
let mut upvarptr = GEPi(bcx, llcdata, [0u, i]);
match sigil {
ast::BorrowedSigil => { upvarptr = Load(bcx, upvarptr); }

View File

@ -143,7 +143,7 @@ pub struct param_substs {
impl param_substs {
pub fn validate(&self) {
for self.tys.each |t| { assert!(!ty::type_needs_infer(*t)); }
for self.tys.iter().advance |t| { assert!(!ty::type_needs_infer(*t)); }
for self.self_ty.iter().advance |t| { assert!(!ty::type_needs_infer(*t)); }
}
}
@ -982,7 +982,7 @@ pub fn align_to(cx: block, off: ValueRef, align: ValueRef) -> ValueRef {
pub fn path_str(sess: session::Session, p: &[path_elt]) -> ~str {
let mut r = ~"";
let mut first = true;
for p.each |e| {
for p.iter().advance |e| {
match *e {
ast_map::path_name(s) | ast_map::path_mod(s) => {
if first { first = false; }

View File

@ -40,7 +40,7 @@ pub fn trans_block(bcx: block, b: &ast::blk, dest: expr::Dest) -> block {
do block_locals(b) |local| {
bcx = alloc_local(bcx, local);
};
for b.node.stmts.each |s| {
for b.node.stmts.iter().advance |s| {
debuginfo::update_source_pos(bcx, b.span);
bcx = trans_stmt(bcx, *s);
}
@ -113,7 +113,7 @@ pub fn trans_if(bcx: block,
pub fn join_blocks(parent_bcx: block, in_cxs: &[block]) -> block {
let out = sub_block(parent_bcx, "join");
let mut reachable = false;
for in_cxs.each |bcx| {
for in_cxs.iter().advance |bcx| {
if !bcx.unreachable {
Br(*bcx, out.llbb);
reachable = true;

View File

@ -335,7 +335,7 @@ fn create_struct(cx: @mut CrateContext, t: ty::t, fields: ~[ty::field], span: sp
let file_md = create_file(cx, loc.file.name);
let mut scx = StructContext::new(cx, ty_to_str(cx.tcx, t), file_md, loc.line);
for fields.each |field| {
for fields.iter().advance |field| {
let field_t = field.mt.ty;
let ty_md = create_ty(cx, field_t, span);
let (size, align) = size_and_align_of(cx, field_t);
@ -362,7 +362,7 @@ fn create_tuple(cx: @mut CrateContext, _t: ty::t, elements: &[ty::t], span: span
let name = (cx.sess.str_of((dbg_cx(cx).names)("tuple"))).to_owned();
let mut scx = StructContext::new(cx, name, file_md, loc.line);
for elements.each |element| {
for elements.iter().advance |element| {
let ty_md = create_ty(cx, *element, span);
let (size, align) = size_and_align_of(cx, *element);
scx.add_member("", loc.line, size, align, ty_md);

View File

@ -1221,7 +1221,7 @@ fn trans_adt(bcx: block, repr: &adt::Repr, discr: int,
let mut bcx = bcx;
let addr = match dest {
Ignore => {
for fields.each |&(_i, e)| {
for fields.iter().advance |&(_i, e)| {
bcx = trans_into(bcx, e, Ignore);
}
for optbase.iter().advance |sbi| {
@ -1233,7 +1233,7 @@ fn trans_adt(bcx: block, repr: &adt::Repr, discr: int,
};
let mut temp_cleanups = ~[];
adt::trans_start_init(bcx, repr, addr, discr);
for fields.each |&(i, e)| {
for fields.iter().advance |&(i, e)| {
let dest = adt::trans_field_ptr(bcx, repr, addr, discr, i);
let e_ty = expr_ty(bcx, e);
bcx = trans_into(bcx, e, SaveIn(dest));
@ -1253,7 +1253,7 @@ fn trans_adt(bcx: block, repr: &adt::Repr, discr: int,
}
}
for temp_cleanups.each |cleanup| {
for temp_cleanups.iter().advance |cleanup| {
revoke_clean(bcx, *cleanup);
}
return bcx;

View File

@ -289,7 +289,7 @@ pub fn trans_foreign_mod(ccx: @mut CrateContext,
Some(abi) => abi,
};
for foreign_mod.items.each |&foreign_item| {
for foreign_mod.items.iter().advance |&foreign_item| {
match foreign_item.node {
ast::foreign_item_fn(*) => {
let id = foreign_item.id;

View File

@ -130,7 +130,7 @@ pub fn static_size_of_enum(cx: &mut CrateContext, t: ty::t) -> uint {
// Compute max(variant sizes).
let mut max_size = 0;
let variants = ty::enum_variants(cx.tcx, tid);
for variants.each |variant| {
for variants.iter().advance |variant| {
if variant.args.len() == 0 {
loop;
}

View File

@ -59,7 +59,7 @@ pub fn trans_impl(ccx: @mut CrateContext,
if !generics.ty_params.is_empty() { return; }
let sub_path = vec::append_one(path, path_name(name));
for methods.each |method| {
for methods.iter().advance |method| {
if method.generics.ty_params.len() == 0u {
let llfn = get_item_val(ccx, method.id);
let path = vec::append_one(/*bad*/copy sub_path,
@ -175,7 +175,7 @@ pub fn trans_self_arg(bcx: block,
// FIXME(#3446)---this is wrong, actually. The temp_cleanups
// should be revoked only after all arguments have been passed.
for temp_cleanups.each |c| {
for temp_cleanups.iter().advance |c| {
revoke_clean(bcx, *c)
}
@ -406,7 +406,7 @@ pub fn method_with_name_or_default(ccx: @mut CrateContext,
let pmm = ccx.tcx.provided_methods;
match pmm.find(&impl_id) {
Some(pmis) => {
for pmis.each |pmi| {
for pmis.iter().advance |pmi| {
if pmi.method_info.ident == name {
debug!("pmi.method_info.did = %?",
pmi.method_info.did);
@ -773,7 +773,7 @@ pub fn make_vtable(ccx: @mut CrateContext,
let _icx = push_ctxt("impl::make_vtable");
let mut components = ~[ tydesc.tydesc ];
for ptrs.each |&ptr| {
for ptrs.iter().advance |&ptr| {
components.push(ptr)
}

View File

@ -69,8 +69,8 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
}
});
for real_substs.tps.each() |s| { assert!(!ty::type_has_params(*s)); }
for substs.each() |s| { assert!(!ty::type_has_params(*s)); }
for real_substs.tps.iter().advance |s| { assert!(!ty::type_has_params(*s)); }
for substs.iter().advance |s| { assert!(!ty::type_has_params(*s)); }
let param_uses = type_use::type_uses_for(ccx, fn_id, substs.len());
let hash_id = make_mono_id(ccx, fn_id, substs, vtables, impl_did_opt,
Some(param_uses));
@ -340,7 +340,7 @@ pub fn make_mono_id(ccx: @mut CrateContext,
let mut i = 0;
vec::map_zip(*item_ty.generics.type_param_defs, substs, |type_param_def, subst| {
let mut v = ~[];
for type_param_def.bounds.trait_bounds.each |_bound| {
for type_param_def.bounds.trait_bounds.iter().advance |_bound| {
v.push(meth::vtable_id(ccx, &vts[i]));
i += 1;
}

View File

@ -59,7 +59,7 @@ fn traverse_exports(cx: @mut ctx, mod_id: node_id) -> bool {
let mut found_export = false;
match cx.exp_map2.find(&mod_id) {
Some(ref exp2s) => {
for (*exp2s).each |e2| {
for (*exp2s).iter().advance |e2| {
found_export = true;
traverse_def_id(cx, e2.def_id)
};
@ -90,7 +90,7 @@ fn traverse_def_id(cx: @mut ctx, did: def_id) {
fn traverse_public_mod(cx: @mut ctx, mod_id: node_id, m: &_mod) {
if !traverse_exports(cx, mod_id) {
// No exports, so every local item is exported
for m.items.each |item| {
for m.items.iter().advance |item| {
traverse_public_item(cx, *item);
}
}
@ -109,7 +109,7 @@ fn traverse_public_item(cx: @mut ctx, item: @item) {
item_mod(ref m) => traverse_public_mod(cx, item.id, m),
item_foreign_mod(ref nm) => {
if !traverse_exports(cx, item.id) {
for nm.items.each |item| {
for nm.items.iter().advance |item| {
let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut
cx.rmap.insert(item.id);
}
@ -122,7 +122,7 @@ fn traverse_public_item(cx: @mut ctx, item: @item) {
}
}
item_impl(ref generics, _, _, ref ms) => {
for ms.each |m| {
for ms.iter().advance |m| {
if generics.ty_params.len() > 0u ||
m.generics.ty_params.len() > 0u ||
attr::find_inline_attr(m.attrs) != attr::ia_none
@ -168,7 +168,7 @@ fn traverse_ty<'a>(ty: @Ty, (cx, v): (@mut ctx<'a>, visit::vt<@mut ctx<'a>>)) {
Some(&d) => traverse_def_id(cx, def_id_of_def(d)),
None => { /* do nothing -- but should we fail here? */ }
}
for p.types.each |t| {
for p.types.iter().advance |t| {
(v.visit_ty)(*t, (cx, v));
}
}

View File

@ -376,7 +376,7 @@ pub fn write_content(bcx: block,
ast::expr_vec(ref elements, _) => {
match dest {
Ignore => {
for elements.each |element| {
for elements.iter().advance |element| {
bcx = expr::trans_into(bcx, *element, Ignore);
}
}
@ -392,7 +392,7 @@ pub fn write_content(bcx: block,
add_clean_temp_mem(bcx, lleltptr, vt.unit_ty);
temp_cleanups.push(lleltptr);
}
for temp_cleanups.each |cleanup| {
for temp_cleanups.iter().advance |cleanup| {
revoke_clean(bcx, *cleanup);
}
}

View File

@ -78,7 +78,7 @@ pub fn type_uses_for(ccx: @mut CrateContext, fn_id: def_id, n_tps: uint)
match ty::get(ty::lookup_item_type(cx.ccx.tcx, fn_id).ty).sty {
ty::ty_bare_fn(ty::BareFnTy {sig: ref sig, _}) |
ty::ty_closure(ty::ClosureTy {sig: ref sig, _}) => {
for sig.inputs.each |arg| {
for sig.inputs.iter().advance |arg| {
type_needs(cx, use_repr, *arg);
}
}
@ -214,7 +214,7 @@ pub fn type_needs_inner(cx: Context,
if list::find(enums_seen, |id| *id == did).is_none() {
let seen = @Cons(did, enums_seen);
for vec::each(*ty::enum_variants(cx.ccx.tcx, did)) |v| {
for v.args.each |aty| {
for v.args.iter().advance |aty| {
let t = ty::subst(cx.ccx.tcx, &(*substs), *aty);
type_needs_inner(cx, use_, t, seen);
}
@ -314,7 +314,7 @@ pub fn mark_for_expr(cx: Context, e: @expr) {
match ty::ty_closure_sigil(ty::expr_ty(cx.ccx.tcx, e)) {
ast::OwnedSigil => {}
ast::BorrowedSigil | ast::ManagedSigil => {
for freevars::get_freevars(cx.ccx.tcx, e.id).each |fv| {
for freevars::get_freevars(cx.ccx.tcx, e.id).iter().advance |fv| {
let node_id = ast_util::def_id_of_def(fv.def).node;
node_type_needs(cx, use_repr, node_id);
}
@ -344,7 +344,8 @@ pub fn mark_for_expr(cx: Context, e: @expr) {
node_type_needs(cx, use_tydesc, val.id);
}
expr_call(f, _, _) => {
for ty::ty_fn_args(ty::node_id_to_type(cx.ccx.tcx, f.id)).each |a| {
let r = ty::ty_fn_args(ty::node_id_to_type(cx.ccx.tcx, f.id));
for r.iter().advance |a| {
type_needs(cx, use_repr, *a);
}
}
@ -352,17 +353,18 @@ pub fn mark_for_expr(cx: Context, e: @expr) {
let base_ty = ty::node_id_to_type(cx.ccx.tcx, rcvr.id);
type_needs(cx, use_repr, ty::type_autoderef(cx.ccx.tcx, base_ty));
for ty::ty_fn_args(ty::node_id_to_type(cx.ccx.tcx, callee_id)).each |a| {
let r = ty::ty_fn_args(ty::node_id_to_type(cx.ccx.tcx, callee_id));
for r.iter().advance |a| {
type_needs(cx, use_repr, *a);
}
mark_for_method_call(cx, e.id, callee_id);
}
expr_inline_asm(ref ia) => {
for ia.inputs.each |&(_, in)| {
for ia.inputs.iter().advance |&(_, in)| {
node_type_needs(cx, use_repr, in.id);
}
for ia.outputs.each |&(_, out)| {
for ia.outputs.iter().advance |&(_, out)| {
node_type_needs(cx, use_repr, out.id);
}
}

View File

@ -1018,7 +1018,7 @@ fn mk_t(cx: ctxt, st: sty) -> t {
}
fn sflags(substs: &substs) -> uint {
let mut f = 0u;
for substs.tps.each |tt| { f |= get(*tt).flags; }
for substs.tps.iter().advance |tt| { f |= get(*tt).flags; }
for substs.self_r.iter().advance |r| { f |= rflags(*r) }
return f;
}
@ -1057,16 +1057,16 @@ fn mk_t(cx: ctxt, st: sty) -> t {
flags |= rflags(r);
flags |= get(m.ty).flags;
}
&ty_tup(ref ts) => for ts.each |tt| { flags |= get(*tt).flags; },
&ty_tup(ref ts) => for ts.iter().advance |tt| { flags |= get(*tt).flags; },
&ty_bare_fn(ref f) => {
for f.sig.inputs.each |a| { flags |= get(*a).flags; }
for f.sig.inputs.iter().advance |a| { flags |= get(*a).flags; }
flags |= get(f.sig.output).flags;
// T -> _|_ is *not* _|_ !
flags &= !(has_ty_bot as uint);
}
&ty_closure(ref f) => {
flags |= rflags(f.region);
for f.sig.inputs.each |a| { flags |= get(*a).flags; }
for f.sig.inputs.iter().advance |a| { flags |= get(*a).flags; }
flags |= get(f.sig.output).flags;
// T -> _|_ is *not* _|_ !
flags &= !(has_ty_bot as uint);
@ -1320,15 +1320,15 @@ pub fn maybe_walk_ty(ty: t, f: &fn(t) -> bool) {
}
ty_enum(_, ref substs) | ty_struct(_, ref substs) |
ty_trait(_, ref substs, _, _) => {
for (*substs).tps.each |subty| { maybe_walk_ty(*subty, f); }
for (*substs).tps.iter().advance |subty| { maybe_walk_ty(*subty, f); }
}
ty_tup(ref ts) => { for ts.each |tt| { maybe_walk_ty(*tt, f); } }
ty_tup(ref ts) => { for ts.iter().advance |tt| { maybe_walk_ty(*tt, f); } }
ty_bare_fn(ref ft) => {
for ft.sig.inputs.each |a| { maybe_walk_ty(*a, f); }
for ft.sig.inputs.iter().advance |a| { maybe_walk_ty(*a, f); }
maybe_walk_ty(ft.sig.output, f);
}
ty_closure(ref ft) => {
for ft.sig.inputs.each |a| { maybe_walk_ty(*a, f); }
for ft.sig.inputs.iter().advance |a| { maybe_walk_ty(*a, f); }
maybe_walk_ty(ft.sig.output, f);
}
}
@ -1772,8 +1772,8 @@ fn type_needs_unwind_cleanup_(cx: ctxt, ty: t,
true
}
ty_enum(did, ref substs) => {
for (*enum_variants(cx, did)).each |v| {
for v.args.each |aty| {
for (*enum_variants(cx, did)).iter().advance |v| {
for v.args.iter().advance |aty| {
let t = subst(cx, substs, *aty);
needs_unwind_cleanup |=
type_needs_unwind_cleanup_(cx, t, tycache,
@ -2407,8 +2407,8 @@ pub fn type_structurally_contains(cx: ctxt,
if test(sty) { return true; }
match *sty {
ty_enum(did, ref substs) => {
for (*enum_variants(cx, did)).each |variant| {
for variant.args.each |aty| {
for (*enum_variants(cx, did)).iter().advance |variant| {
for variant.args.iter().advance |aty| {
let sty = subst(cx, substs, *aty);
if type_structurally_contains(cx, sty, test) { return true; }
}
@ -2416,7 +2416,8 @@ pub fn type_structurally_contains(cx: ctxt,
return false;
}
ty_struct(did, ref substs) => {
for lookup_struct_fields(cx, did).each |field| {
let r = lookup_struct_fields(cx, did);
for r.iter().advance |field| {
let ft = lookup_field_type(cx, did, field.id, substs);
if type_structurally_contains(cx, ft, test) { return true; }
}
@ -2424,7 +2425,7 @@ pub fn type_structurally_contains(cx: ctxt,
}
ty_tup(ref ts) => {
for ts.each |tt| {
for ts.iter().advance |tt| {
if type_structurally_contains(cx, *tt, test) { return true; }
}
return false;
@ -2503,7 +2504,7 @@ pub fn type_is_pod(cx: ctxt, ty: t) -> bool {
// Structural types
ty_enum(did, ref substs) => {
let variants = enum_variants(cx, did);
for (*variants).each |variant| {
for (*variants).iter().advance |variant| {
let tup_ty = mk_tup(cx, /*bad*/copy variant.args);
// Perform any type parameter substitutions.
@ -2512,7 +2513,7 @@ pub fn type_is_pod(cx: ctxt, ty: t) -> bool {
}
}
ty_tup(ref elts) => {
for elts.each |elt| { if !type_is_pod(cx, *elt) { result = false; } }
for elts.iter().advance |elt| { if !type_is_pod(cx, *elt) { result = false; } }
}
ty_estr(vstore_fixed(_)) => result = true,
ty_evec(ref mt, vstore_fixed(_)) | ty_unboxed_vec(ref mt) => {
@ -3357,14 +3358,14 @@ pub fn stmt_node_id(s: @ast::stmt) -> ast::node_id {
pub fn field_idx(id: ast::ident, fields: &[field]) -> Option<uint> {
let mut i = 0u;
for fields.each |f| { if f.ident == id { return Some(i); } i += 1u; }
for fields.iter().advance |f| { if f.ident == id { return Some(i); } i += 1u; }
return None;
}
pub fn field_idx_strict(tcx: ty::ctxt, id: ast::ident, fields: &[field])
-> uint {
let mut i = 0u;
for fields.each |f| { if f.ident == id { return i; } i += 1u; }
for fields.iter().advance |f| { if f.ident == id { return i; } i += 1u; }
tcx.sess.bug(fmt!(
"No field named `%s` found in the list of fields `%?`",
tcx.sess.str_of(id),
@ -4383,7 +4384,7 @@ pub fn determine_inherited_purity(parent: (ast::purity, ast::node_id),
pub fn each_bound_trait_and_supertraits(tcx: ctxt,
bounds: &ParamBounds,
f: &fn(@TraitRef) -> bool) -> bool {
for bounds.trait_bounds.each |&bound_trait_ref| {
for bounds.trait_bounds.iter().advance |&bound_trait_ref| {
let mut supertrait_set = HashMap::new();
let mut trait_refs = ~[];
let mut i = 0;
@ -4403,7 +4404,7 @@ pub fn each_bound_trait_and_supertraits(tcx: ctxt,
// Add supertraits to supertrait_set
let supertrait_refs = trait_ref_supertraits(tcx, trait_refs[i]);
for supertrait_refs.each |&supertrait_ref| {
for supertrait_refs.iter().advance |&supertrait_ref| {
debug!("each_bound_trait_and_supertraits(supertrait_ref=%s)",
supertrait_ref.repr(tcx));
@ -4424,7 +4425,7 @@ pub fn each_bound_trait_and_supertraits(tcx: ctxt,
pub fn count_traits_and_supertraits(tcx: ctxt,
type_param_defs: &[TypeParameterDef]) -> uint {
let mut total = 0;
for type_param_defs.each |type_param_def| {
for type_param_defs.iter().advance |type_param_def| {
for each_bound_trait_and_supertraits(tcx, type_param_def.bounds) |_| {
total += 1;
}

View File

@ -35,7 +35,7 @@ pub fn check_match(fcx: @mut FnCtxt,
// Typecheck the patterns first, so that we get types for all the
// bindings.
for arms.each |arm| {
for arms.iter().advance |arm| {
let pcx = pat_ctxt {
fcx: fcx,
map: pat_id_map(tcx.def_map, arm.pats[0]),
@ -43,14 +43,14 @@ pub fn check_match(fcx: @mut FnCtxt,
block_region: ty::re_scope(arm.body.node.id)
};
for arm.pats.each |p| { check_pat(&pcx, *p, pattern_ty);}
for arm.pats.iter().advance |p| { check_pat(&pcx, *p, pattern_ty);}
}
// Now typecheck the blocks.
let mut result_ty = fcx.infcx().next_ty_var();
let mut arm_non_bot = false;
let mut saw_err = false;
for arms.each |arm| {
for arms.iter().advance |arm| {
let mut guard_err = false;
let mut guard_bot = false;
match arm.guard {
@ -248,7 +248,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: @ast::pat, path: @ast::Path,
if error_happened {
for subpats.iter().advance |pats| {
for pats.each |pat| {
for pats.iter().advance |pat| {
check_pat(pcx, *pat, ty::mk_err());
}
}
@ -280,7 +280,7 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
// Typecheck each field.
let mut found_fields = HashSet::new();
for fields.each |field| {
for fields.iter().advance |field| {
match field_map.find(&field.ident) {
Some(&index) => {
let class_field = class_fields[index];
@ -516,7 +516,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: @ast::pat, expected: ty::t) {
fcx.write_ty(pat.id, expected);
}
_ => {
for elts.each |elt| {
for elts.iter().advance |elt| {
check_pat(pcx, *elt, ty::mk_err());
}
// use terr_tuple_size if both types are tuples
@ -565,13 +565,13 @@ pub fn check_pat(pcx: &pat_ctxt, pat: @ast::pat, expected: ty::t) {
(mt, default_region_var)
},
_ => {
for before.each |&elt| {
for before.iter().advance |&elt| {
check_pat(pcx, elt, ty::mk_err());
}
for slice.iter().advance |&elt| {
check_pat(pcx, elt, ty::mk_err());
}
for after.each |&elt| {
for after.iter().advance |&elt| {
check_pat(pcx, elt, ty::mk_err());
}
fcx.infcx().type_error_message_str_with_expected(
@ -587,7 +587,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: @ast::pat, expected: ty::t) {
return;
}
};
for before.each |elt| {
for before.iter().advance |elt| {
check_pat(pcx, *elt, elt_type.ty);
}
match slice {
@ -600,7 +600,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: @ast::pat, expected: ty::t) {
}
None => ()
}
for after.each |elt| {
for after.iter().advance |elt| {
check_pat(pcx, *elt, elt_type.ty);
}
fcx.write_ty(pat.id, expected);

View File

@ -333,14 +333,14 @@ impl<'self> LookupContext<'self> {
let trait_map: &mut resolve::TraitMap = &mut self.fcx.ccx.trait_map;
let opt_applicable_traits = trait_map.find(&self.expr.id);
for opt_applicable_traits.iter().advance |applicable_traits| {
for applicable_traits.each |trait_did| {
for applicable_traits.iter().advance |trait_did| {
let coherence_info = self.fcx.ccx.coherence_info;
// Look for explicit implementations.
let opt_impl_infos =
coherence_info.extension_methods.find(trait_did);
for opt_impl_infos.iter().advance |impl_infos| {
for impl_infos.each |impl_info| {
for impl_infos.iter().advance |impl_info| {
self.push_candidates_from_impl(
self.extension_candidates, *impl_info);
@ -486,7 +486,7 @@ impl<'self> LookupContext<'self> {
}
// No method found yet? Check each supertrait
if method_info.is_none() {
for ty::trait_supertraits(tcx, did).each() |trait_ref| {
for ty::trait_supertraits(tcx, did).iter().advance |trait_ref| {
let supertrait_methods =
ty::trait_methods(tcx, trait_ref.def_id);
match supertrait_methods.iter().position_(|m| m.ident == self.m_name) {
@ -527,7 +527,7 @@ impl<'self> LookupContext<'self> {
let opt_impl_infos =
self.fcx.ccx.coherence_info.inherent_methods.find(&did);
for opt_impl_infos.iter().advance |impl_infos| {
for impl_infos.each |impl_info| {
for impl_infos.iter().advance |impl_info| {
self.push_candidates_from_impl(
self.inherent_candidates, *impl_info);
}
@ -767,7 +767,7 @@ impl<'self> LookupContext<'self> {
// This is hokey. We should have mutability inference as a
// variable. But for now, try &const, then &, then &mut:
let region = self.infcx().next_region_var_nb(self.expr.span);
for mutbls.each |mutbl| {
for mutbls.iter().advance |mutbl| {
let autoref_ty = mk_autoref_ty(*mutbl, region);
match self.search_for_method(autoref_ty) {
None => {}

View File

@ -550,7 +550,7 @@ pub fn check_no_duplicate_fields(tcx: ty::ctxt,
fields: ~[(ast::ident, span)]) {
let mut field_names = HashMap::new();
for fields.each |p| {
for fields.iter().advance |p| {
let (id, sp) = *p;
let orig_sp = field_names.find(&id).map_consume(|x| *x);
match orig_sp {
@ -599,12 +599,12 @@ pub fn check_item(ccx: @mut CrateCtxt, it: @ast::item) {
let rp = ccx.tcx.region_paramd_items.find(&it.id).map_consume(|x| *x);
debug!("item_impl %s with id %d rp %?",
ccx.tcx.sess.str_of(it.ident), it.id, rp);
for ms.each |m| {
for ms.iter().advance |m| {
check_method(ccx, *m);
}
}
ast::item_trait(_, _, ref trait_methods) => {
for (*trait_methods).each |trait_method| {
for (*trait_methods).iter().advance |trait_method| {
match *trait_method {
required(*) => {
// Nothing to do, since required methods don't have
@ -625,11 +625,11 @@ pub fn check_item(ccx: @mut CrateCtxt, it: @ast::item) {
}
ast::item_foreign_mod(ref m) => {
if m.abis.is_intrinsic() {
for m.items.each |item| {
for m.items.iter().advance |item| {
check_intrinsic_type(ccx, *item);
}
} else {
for m.items.each |item| {
for m.items.iter().advance |item| {
let tpt = ty::lookup_item_type(ccx.tcx, local_def(item.id));
if tpt.generics.has_type_params() {
ccx.tcx.sess.span_err(
@ -1225,7 +1225,8 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
// functions. This is so that we have more information about the types
// of arguments when we typecheck the functions. This isn't really the
// right way to do this.
for [false, true].each |check_blocks| {
let xs = [false, true];
for xs.iter().advance |check_blocks| {
let check_blocks = *check_blocks;
debug!("check_blocks=%b", check_blocks);
@ -1803,14 +1804,14 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
let mut class_field_map = HashMap::new();
let mut fields_found = 0;
for field_types.each |field| {
for field_types.iter().advance |field| {
class_field_map.insert(field.ident, (field.id, false));
}
let mut error_happened = false;
// Typecheck each field.
for ast_fields.each |field| {
for ast_fields.iter().advance |field| {
let mut expected_field_type = ty::mk_err();
let pair = class_field_map.find(&field.node.ident).
@ -1856,7 +1857,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
assert!(fields_found <= field_types.len());
if fields_found < field_types.len() {
let mut missing_fields = ~[];
for field_types.each |class_field| {
for field_types.iter().advance |class_field| {
let name = class_field.ident;
let (_, seen) = *class_field_map.get(&name);
if !seen {
@ -2175,7 +2176,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
_ => mutability = mutbl
}
let t: ty::t = fcx.infcx().next_ty_var();
for args.each |e| {
for args.iter().advance |e| {
check_expr_has_type(fcx, *e, t);
let arg_t = fcx.expr_ty(*e);
if ty::type_is_error(arg_t) {
@ -2377,10 +2378,10 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
fcx.write_ty(id, ty_param_bounds_and_ty.ty);
}
ast::expr_inline_asm(ref ia) => {
for ia.inputs.each |&(_, in)| {
for ia.inputs.iter().advance |&(_, in)| {
check_expr(fcx, in);
}
for ia.outputs.each |&(_, out)| {
for ia.outputs.iter().advance |&(_, out)| {
check_expr(fcx, out);
}
fcx.write_nil(id);
@ -2506,7 +2507,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
// We know there's at least one because we already checked
// for n=0 as well as all arms being _|_ in the previous
// `if`.
for arm_tys.each() |arm_ty| {
for arm_tys.iter().advance |arm_ty| {
if !ty::type_is_bot(*arm_ty) {
fcx.write_ty(id, *arm_ty);
break;
@ -2687,7 +2688,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
let t: ty::t = fcx.infcx().next_ty_var();
let mut arg_is_bot = false;
let mut arg_is_err = false;
for args.each |e| {
for args.iter().advance |e| {
check_expr_has_type(fcx, *e, t);
let arg_t = fcx.expr_ty(*e);
arg_is_bot |= ty::type_is_bot(arg_t);
@ -2948,7 +2949,7 @@ pub fn check_block_with_expected(fcx: @mut FnCtxt,
let mut last_was_bot = false;
let mut any_bot = false;
let mut any_err = false;
for blk.node.stmts.each |s| {
for blk.node.stmts.iter().advance |s| {
check_stmt(fcx, *s);
let s_id = ast_util::stmt_id(*s);
let s_ty = fcx.node_ty(s_id);
@ -3085,7 +3086,7 @@ pub fn check_enum_variants(ccx: @mut CrateCtxt,
disr_val: &mut int,
variants: &mut ~[ty::VariantInfo]) {
let rty = ty::node_id_to_type(ccx.tcx, id);
for vs.each |v| {
for vs.iter().advance |v| {
for v.node.disr_expr.iter().advance |e_ref| {
let e = *e_ref;
debug!("disr expr, checking %s",

View File

@ -184,7 +184,7 @@ fn visit_block(b: &ast::blk, (rcx, v): (@mut Rcx, rvt)) {
fn visit_arm(arm: &ast::arm, (rcx, v): (@mut Rcx, rvt)) {
// see above
for arm.pats.each |&p| {
for arm.pats.iter().advance |&p| {
constrain_bindings_in_pat(p, rcx);
}
@ -267,7 +267,7 @@ fn visit_expr(expr: @ast::expr, (rcx, v): (@mut Rcx, rvt)) {
}
ast::expr_match(_, ref arms) => {
tcx.region_maps.record_cleanup_scope(expr.id);
for arms.each |arm| {
for arms.iter().advance |arm| {
for arm.guard.iter().advance |guard| {
tcx.region_maps.record_cleanup_scope(guard.id);
}
@ -476,7 +476,7 @@ fn constrain_call(rcx: @mut Rcx,
let callee_scope = call_expr.id;
let callee_region = ty::re_scope(callee_scope);
for arg_exprs.each |&arg_expr| {
for arg_exprs.iter().advance |&arg_expr| {
// ensure that any regions appearing in the argument type are
// valid for at least the lifetime of the function:
constrain_regions_in_type_of_node(
@ -614,7 +614,7 @@ fn constrain_free_variables(rcx: @mut Rcx,
let tcx = rcx.fcx.ccx.tcx;
debug!("constrain_free_variables(%s, %s)",
region.repr(tcx), expr.repr(tcx));
for get_freevars(tcx, expr.id).each |freevar| {
for get_freevars(tcx, expr.id).iter().advance |freevar| {
debug!("freevar def is %?", freevar.def);
let def = freevar.def;
let en_region = encl_region_of_def(rcx.fcx, def);
@ -824,8 +824,8 @@ pub mod guarantor {
debug!("regionck::for_match()");
let discr_guarantor = guarantor(rcx, discr);
debug!("discr_guarantor=%s", discr_guarantor.repr(rcx.tcx()));
for arms.each |arm| {
for arm.pats.each |pat| {
for arms.iter().advance |arm| {
for arm.pats.iter().advance |pat| {
link_ref_bindings_in_pat(rcx, *pat, discr_guarantor);
}
}
@ -1217,7 +1217,7 @@ pub mod guarantor {
link_ref_bindings_in_pats(rcx, pats, guarantor);
}
ast::pat_struct(_, ref fpats, _) => {
for fpats.each |fpat| {
for fpats.iter().advance |fpat| {
link_ref_bindings_in_pat(rcx, fpat.pat, guarantor);
}
}
@ -1258,7 +1258,7 @@ pub mod guarantor {
fn link_ref_bindings_in_pats(rcx: @mut Rcx,
pats: &~[@ast::pat],
guarantor: Option<ty::Region>) {
for pats.each |pat| {
for pats.iter().advance |pat| {
link_ref_bindings_in_pat(rcx, *pat, guarantor);
}
}

View File

@ -231,7 +231,7 @@ pub fn relate_nested_regions(
r_sub: ty::Region,
relate_op: &fn(ty::Region, ty::Region))
{
for the_stack.each |&r| {
for the_stack.iter().advance |&r| {
if !r.is_bound() && !r_sub.is_bound() {
relate_op(r, r_sub);
}
@ -259,14 +259,14 @@ pub fn relate_free_regions(
debug!("relate_free_regions >>");
let mut all_tys = ~[];
for fn_sig.inputs.each |arg| {
for fn_sig.inputs.iter().advance |arg| {
all_tys.push(*arg);
}
for self_ty.iter().advance |&t| {
all_tys.push(t);
}
for all_tys.each |&t| {
for all_tys.iter().advance |&t| {
debug!("relate_free_regions(t=%s)", ppaux::ty_to_str(tcx, t));
relate_nested_regions(tcx, None, t, |a, b| {
match (&a, &b) {

View File

@ -88,7 +88,7 @@ fn lookup_vtables(vcx: &VtableContext,
let tcx = vcx.tcx();
let mut result = ~[];
let mut i = 0u;
for substs.tps.each |ty| {
for substs.tps.iter().advance |ty| {
// ty is the value supplied for the type parameter A...
for ty::each_bound_trait_and_supertraits(

View File

@ -175,7 +175,7 @@ fn resolve_type_vars_for_node(wbcx: @mut WbCtxt, sp: span, id: ast::node_id)
write_ty_to_tcx(tcx, id, t);
for fcx.opt_node_ty_substs(id) |substs| {
let mut new_tps = ~[];
for substs.tps.each |subst| {
for substs.tps.iter().advance |subst| {
match resolve_type_vars_in_type(fcx, sp, *subst) {
Some(t) => new_tps.push(t),
None => { wbcx.success = false; return None; }
@ -240,7 +240,7 @@ fn visit_expr(e: @ast::expr, (wbcx, v): (@mut WbCtxt, wb_vt)) {
match e.node {
ast::expr_fn_block(ref decl, _) => {
for decl.inputs.each |input| {
for decl.inputs.iter().advance |input| {
let _ = resolve_type_vars_for_node(wbcx, e.span, input.id);
}
}
@ -341,7 +341,7 @@ pub fn resolve_type_vars_in_fn(fcx: @mut FnCtxt,
self_info.span,
self_info.self_id);
}
for decl.inputs.each |arg| {
for decl.inputs.iter().advance |arg| {
do pat_util::pat_bindings(fcx.tcx().def_map, arg.pat)
|_bm, pat_id, span, _path| {
resolve_type_vars_for_node(wbcx, span, pat_id);

View File

@ -270,7 +270,7 @@ impl CoherenceChecker {
// We only want to generate one Impl structure. When we generate one,
// we store it here so that we don't recreate it.
let mut implementation_opt = None;
for associated_traits.each |&associated_trait| {
for associated_traits.iter().advance |&associated_trait| {
let trait_ref =
ty::node_id_to_trait_ref(
self.crate_context.tcx,
@ -536,11 +536,12 @@ impl CoherenceChecker {
// XXX: This is horrible.
let mut provided_method_idents = HashSet::new();
let tcx = self.crate_context.tcx;
for ty::provided_trait_methods(tcx, trait_did).each |ident| {
let r = ty::provided_trait_methods(tcx, trait_did);
for r.iter().advance |ident| {
provided_method_idents.insert(*ident);
}
for ty::trait_methods(tcx, trait_did).each |&method| {
for ty::trait_methods(tcx, trait_did).iter().advance |&method| {
if provided_method_idents.contains(&method.ident) {
if !f(method) {
return false;
@ -611,7 +612,8 @@ impl CoherenceChecker {
if result.is_ok() {
// Check to ensure that each parameter binding respected its
// kind bounds.
for [ a, b ].each |result| {
let xs = [a, b];
for xs.iter().advance |result| {
for result.type_variables.iter()
.zip(result.type_param_defs.iter())
.advance |(ty_var, type_param_def)|
@ -728,12 +730,14 @@ impl CoherenceChecker {
provided_names.insert(all_methods[i].ident);
}
// Default methods
for ty::provided_trait_methods(tcx, trait_did).each |ident| {
let r = ty::provided_trait_methods(tcx, trait_did);
for r.iter().advance |ident| {
debug!("inserting provided method %s", ident.repr(tcx));
provided_names.insert(*ident);
}
for (*ty::trait_methods(tcx, trait_did)).each |method| {
let r = ty::trait_methods(tcx, trait_did);
for r.iter().advance |method| {
debug!("checking for %s", method.ident.repr(tcx));
if provided_names.contains(&method.ident) { loop; }
@ -802,7 +806,7 @@ impl CoherenceChecker {
debug!("(creating impl) trait with node_id `%d` \
has provided methods", trait_did.node);
// Add all provided methods.
for all_provided_methods.each |provided_method| {
for all_provided_methods.iter().advance |provided_method| {
debug!(
"(creating impl) adding provided method \
`%s` to impl",
@ -821,7 +825,7 @@ impl CoherenceChecker {
match item.node {
item_impl(_, ref trait_refs, _, ref ast_methods) => {
let mut methods = ~[];
for ast_methods.each |ast_method| {
for ast_methods.iter().advance |ast_method| {
methods.push(method_to_MethodInfo(*ast_method));
}
@ -882,7 +886,7 @@ impl CoherenceChecker {
let implementations = get_impls_for_mod(crate_store,
module_def_id,
None);
for implementations.each |implementation| {
for implementations.iter().advance |implementation| {
debug!("coherence: adding impl from external crate: %s",
ty::item_path_str(self.crate_context.tcx,
implementation.did));
@ -1009,7 +1013,7 @@ impl CoherenceChecker {
Some(found_impls) => impls = found_impls
}
for impls.each |impl_info| {
for impls.iter().advance |impl_info| {
if impl_info.methods.len() < 1 {
// We'll error out later. For now, just don't ICE.
loop;

View File

@ -66,13 +66,13 @@ pub fn collect_item_types(ccx: @mut CrateCtxt, crate: @ast::crate) {
// FIXME (#2592): hooking into the "intrinsic" root module is crude.
// There ought to be a better approach. Attributes?
for crate.node.module.items.each |crate_item| {
for crate.node.module.items.iter().advance |crate_item| {
if crate_item.ident
== ::syntax::parse::token::special_idents::intrinsic {
match crate_item.node {
ast::item_mod(ref m) => {
for m.items.each |intrinsic_item| {
for m.items.iter().advance |intrinsic_item| {
let def_id = ast::def_id { crate: ast::local_crate,
node: intrinsic_item.id };
let substs = substs {
@ -168,7 +168,7 @@ pub fn get_enum_variant_types(ccx: &CrateCtxt,
let tcx = ccx.tcx;
// Create a set of parameter types shared among all the variants.
for variants.each |variant| {
for variants.iter().advance |variant| {
let region_parameterization =
RegionParameterization::from_variance_and_generics(rp, generics);
@ -233,7 +233,7 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt,
// For each method, construct a suitable ty::Method and
// store it into the `tcx.methods` table:
for ms.each |m| {
for ms.iter().advance |m| {
let ty_method = @match m {
&ast::required(ref m) => {
ty_method_of_trait_method(
@ -416,7 +416,7 @@ pub fn ensure_supertraits(ccx: &CrateCtxt,
let self_ty = ty::mk_self(ccx.tcx, local_def(id));
let mut ty_trait_refs: ~[@ty::TraitRef] = ~[];
for ast_trait_refs.each |&ast_trait_ref| {
for ast_trait_refs.iter().advance |&ast_trait_ref| {
let trait_ref = instantiate_trait_ref(ccx, ast_trait_ref, rp,
generics, self_ty);
@ -686,7 +686,7 @@ pub fn check_methods_against_trait(ccx: &CrateCtxt,
// Trait methods we don't implement must be default methods, but if not
// we'll catch it in coherence
let trait_ms = ty::trait_methods(tcx, trait_ref.def_id);
for impl_ms.each |impl_m| {
for impl_ms.iter().advance |impl_m| {
match trait_ms.iter().find_(|trait_m| trait_m.ident == impl_m.mty.ident) {
Some(trait_m) => {
let num_impl_tps = generics.ty_params.len();
@ -921,7 +921,7 @@ pub fn convert_struct(ccx: &CrateCtxt,
let tcx = ccx.tcx;
// Write the type of each of the members
for struct_def.fields.each |f| {
for struct_def.fields.iter().advance |f| {
convert_field(ccx, rp, tpt.generics.type_param_defs, *f, generics);
}
let (_, substs) = mk_item_substs(ccx, generics, rp, None);

View File

@ -223,7 +223,7 @@ impl Combine for Glb {
let mut a_r = None;
let mut b_r = None;
let mut only_new_vars = true;
for tainted.each |r| {
for tainted.iter().advance |r| {
if is_var_in_set(a_vars, *r) {
if a_r.is_some() {
return fresh_bound_variable(this);

View File

@ -1572,8 +1572,8 @@ impl RegionVarBindings {
return;
}
for lower_bounds.each |lower_bound| {
for upper_bounds.each |upper_bound| {
for lower_bounds.iter().advance |lower_bound| {
for upper_bounds.iter().advance |upper_bound| {
if !self.is_subregion_of(lower_bound.region,
upper_bound.region) {
@ -1629,8 +1629,8 @@ impl RegionVarBindings {
return;
}
for upper_bounds.each |upper_bound_1| {
for upper_bounds.each |upper_bound_2| {
for upper_bounds.iter().advance |upper_bound_1| {
for upper_bounds.iter().advance |upper_bound_2| {
match self.glb_concrete_regions(upper_bound_1.region,
upper_bound_2.region) {
Ok(_) => {}

View File

@ -198,7 +198,7 @@ impl Combine for Sub {
for list::each(skol_isr) |pair| {
let (skol_br, skol) = *pair;
let tainted = self.infcx.region_vars.tainted(snapshot, skol);
for tainted.each |tainted_region| {
for tainted.iter().advance |tainted_region| {
// Each skolemized should only be relatable to itself
// or new variables:
match *tainted_region {

View File

@ -79,7 +79,7 @@ fn setup_env(test_name: &str, source_string: &str) -> Env {
impl Env {
pub fn create_region_hierarchy(&self, rh: &RH) {
for rh.sub.each |child_rh| {
for rh.sub.iter().advance |child_rh| {
self.create_region_hierarchy(child_rh);
self.tcx.region_map.insert(child_rh.id, rh.id);
}
@ -109,7 +109,7 @@ impl Env {
idx: uint,
names: &[~str]) -> Option<ast::node_id> {
assert!(idx < names.len());
for m.items.each |item| {
for m.items.iter().advance |item| {
if self.tcx.sess.str_of(item.ident) == names[idx] {
return search(self, *item, idx+1, names);
}
@ -227,7 +227,7 @@ impl Env {
self.infcx.resolve_regions();
if self.err_messages.len() != exp_count {
for self.err_messages.each |msg| {
for self.err_messages.iter().advance |msg| {
debug!("Error encountered: %s", *msg);
}
fmt!("Resolving regions encountered %u errors but expected %u!",

View File

@ -75,7 +75,7 @@ impl RegionParamNames {
opt_vec::Vec(new_lifetimes.map(|lt| lt.ident)));
}
opt_vec::Vec(ref mut existing_lifetimes) => {
for new_lifetimes.each |new_lifetime| {
for new_lifetimes.iter().advance |new_lifetime| {
existing_lifetimes.push(new_lifetime.ident);
}
}

View File

@ -184,7 +184,8 @@ Available lint options:
pub fn describe_debug_flags() {
io::println(fmt!("\nAvailable debug options:\n"));
for session::debugging_opts_map().each |pair| {
let r = session::debugging_opts_map();
for r.iter().advance |pair| {
let (name, desc, _) = /*bad*/copy *pair;
io::println(fmt!(" -Z %-20s -- %s", name, desc));
}
@ -344,13 +345,14 @@ pub fn monitor(f: ~fn(diagnostic::Emitter)) {
diagnostic::ice_msg("unexpected failure"),
diagnostic::error);
for [
let xs = [
~"the compiler hit an unexpected failure path. \
this is a bug",
~"try running with RUST_LOG=rustc=1,::rt::backtrace \
to get further details and report the results \
to github.com/mozilla/rust/issues"
].each |note| {
];
for xs.iter().advance |note| {
diagnostic::emit(None, *note, diagnostic::note)
}
}

View File

@ -395,7 +395,7 @@ pub fn ty_to_str(cx: ctxt, typ: t) -> ~str {
}
// if there is an id, print that instead of the structural type:
/*for ty::type_def_id(typ).each |def_id| {
/*for ty::type_def_id(typ).iter().advance |def_id| {
// note that this typedef cannot have type parameters
return ast_map::path_to_str(ty::item_path(cx, *def_id),
cx.sess.intr());
@ -573,7 +573,7 @@ impl Repr for ty::ParamBounds {
ty::BoundSized => ~"Sized",
});
}
for self.trait_bounds.each |t| {
for self.trait_bounds.iter().advance |t| {
res.push(t.repr(tcx));
}
res.connect("+")

View File

@ -76,7 +76,8 @@ pub fn usage() {
println("Usage: rustdoc [options] <cratefile>\n");
println("Options:\n");
for opts().each |opt| {
let r = opts();
for r.iter().advance |opt| {
println(fmt!(" %s", opt.second()));
}
println("");

View File

@ -144,7 +144,7 @@ fn nmoddoc_from_mod(
module_: ast::foreign_mod
) -> doc::NmodDoc {
let mut fns = ~[];
for module_.items.each |item| {
for module_.items.iter().advance |item| {
let ItemDoc = mk_itemdoc(item.id, to_str(item.ident));
match item.node {
ast::foreign_item_fn(*) => {

View File

@ -280,7 +280,7 @@ fn write_desc(
}
fn write_sections(ctxt: &Ctxt, sections: &[doc::Section]) {
for sections.each |section| {
for sections.iter().advance |section| {
write_section(ctxt, copy *section);
}
}
@ -300,7 +300,7 @@ fn write_mod_contents(
write_index(ctxt, doc.index.get_ref());
}
for doc.items.each |itemTag| {
for doc.items.iter().advance |itemTag| {
write_item(ctxt, copy *itemTag);
}
}
@ -350,7 +350,7 @@ fn write_index(ctxt: &Ctxt, index: &doc::Index) {
ctxt.w.put_line(~"<div class='index'>");
ctxt.w.put_line(~"");
for index.entries.each |entry| {
for index.entries.iter().advance |entry| {
let header = header_text_(entry.kind, entry.name);
let id = copy entry.link;
if entry.brief.is_some() {
@ -371,7 +371,7 @@ fn write_nmod(ctxt: &Ctxt, doc: doc::NmodDoc) {
write_index(ctxt, doc.index.get_ref());
}
for doc.fns.each |FnDoc| {
for doc.fns.iter().advance |FnDoc| {
write_item_header(ctxt, doc::FnTag(copy *FnDoc));
write_fn(ctxt, copy *FnDoc);
}
@ -441,7 +441,7 @@ fn write_variants(
write_header_(ctxt, H4, ~"Variants");
for docs.each |variant| {
for docs.iter().advance |variant| {
write_variant(ctxt, copy *variant);
}
@ -480,7 +480,7 @@ fn write_trait(ctxt: &Ctxt, doc: doc::TraitDoc) {
}
fn write_methods(ctxt: &Ctxt, docs: &[doc::MethodDoc]) {
for docs.each |doc| {
for docs.iter().advance |doc| {
write_method(ctxt, copy *doc);
}
}

View File

@ -112,7 +112,7 @@ impl Program {
None => {}
}
for new_locals.each |p| {
for new_locals.iter().advance |p| {
code.push_str(fmt!("assert_encodable(&%s);\n", *p.first_ref()));
}
code.push_str("};}");
@ -370,7 +370,7 @@ impl Program {
// helper functions to perform ast iteration
fn each_user_local(blk: &ast::blk, f: &fn(@ast::local)) {
do find_user_block(blk) |blk| {
for blk.node.stmts.each |stmt| {
for blk.node.stmts.iter().advance |stmt| {
match stmt.node {
ast::stmt_decl(d, _) => {
match d.node {
@ -385,7 +385,7 @@ impl Program {
}
fn find_user_block(blk: &ast::blk, f: &fn(&ast::blk)) {
for blk.node.stmts.each |stmt| {
for blk.node.stmts.iter().advance |stmt| {
match stmt.node {
ast::stmt_semi(e, _) => {
match e.node {

View File

@ -130,7 +130,7 @@ fn run(mut repl: Repl, input: ~str) -> Repl {
do find_main(crate, sess) |blk| {
// Fish out all the view items, be sure to record 'extern mod' items
// differently beause they must appear before all 'use' statements
for blk.node.view_items.each |vi| {
for blk.node.view_items.iter().advance |vi| {
let s = do with_pp(intr) |pp, _| {
pprust::print_view_item(pp, *vi);
};
@ -144,7 +144,7 @@ fn run(mut repl: Repl, input: ~str) -> Repl {
// Iterate through all of the block's statements, inserting them into
// the correct portions of the program
for blk.node.stmts.each |stmt| {
for blk.node.stmts.iter().advance |stmt| {
let s = do with_pp(intr) |pp, _| { pprust::print_stmt(pp, *stmt); };
match stmt.node {
ast::stmt_decl(d, _) => {
@ -248,7 +248,7 @@ fn run(mut repl: Repl, input: ~str) -> Repl {
fn find_main(crate: @ast::crate, sess: session::Session,
f: &fn(&ast::blk)) {
for crate.node.module.items.each |item| {
for crate.node.module.items.iter().advance |item| {
match item.node {
ast::item_fn(_, _, _, _, ref blk) => {
if item.ident == sess.ident_of("main") {
@ -365,7 +365,7 @@ fn run_cmd(repl: &mut Repl, _in: @io::Reader, _out: @io::Writer,
}
~"load" => {
let mut loaded_crates: ~[~str] = ~[];
for args.each |arg| {
for args.iter().advance |arg| {
let (crate, filename) =
if arg.ends_with(".rs") || arg.ends_with(".rc") {
(arg.slice_to(arg.len() - 3).to_owned(), copy *arg)
@ -377,7 +377,7 @@ fn run_cmd(repl: &mut Repl, _in: @io::Reader, _out: @io::Writer,
None => { }
}
}
for loaded_crates.each |crate| {
for loaded_crates.iter().advance |crate| {
let crate_path = Path(*crate);
let crate_dir = crate_path.dirname();
repl.program.record_extern(fmt!("extern mod %s;", *crate));

View File

@ -147,7 +147,7 @@ impl PkgSrc {
assert!(p.components.len() > prefix);
let mut sub = Path("");
for vec::slice(p.components, prefix,
p.components.len()).each |c| {
p.components.len()).iter().advance |c| {
sub = sub.push(*c);
}
debug!("found crate %s", sub.to_str());
@ -204,7 +204,7 @@ impl PkgSrc {
crates: &[Crate],
cfgs: &[~str],
what: OutputType) {
for crates.each |&crate| {
for crates.iter().advance |&crate| {
let path = &src_dir.push_rel(&crate.file).normalize();
note(fmt!("build_crates: compiling %s", path.to_str()));
note(fmt!("build_crates: destination dir is %s", dst_dir.to_str()));

View File

@ -43,7 +43,8 @@ pub fn make_dir_rwx(p: &Path) -> bool { os::make_dir(p, u_rwx) }
/// pkgid's short name
pub fn workspace_contains_package_id(pkgid: &PkgId, workspace: &Path) -> bool {
let src_dir = workspace.push("src");
for os::list_dir(&src_dir).each |&p| {
let dirs = os::list_dir(&src_dir);
for dirs.iter().advance |&p| {
let p = Path(p);
debug!("=> p = %s", p.to_str());
if !os::path_is_dir(&src_dir.push_rel(&p)) {
@ -93,7 +94,7 @@ pub fn pkgid_src_in_workspace(pkgid: &PkgId, workspace: &Path) -> ~[Path] {
/// Returns a src for pkgid that does exist -- None if none of them do
pub fn first_pkgid_src_in_workspace(pkgid: &PkgId, workspace: &Path) -> Option<Path> {
let rs = pkgid_src_in_workspace(pkgid, workspace);
for rs.each |p| {
for rs.iter().advance |p| {
if os::path_exists(p) {
return Some(copy *p);
}
@ -189,7 +190,7 @@ pub fn library_in_workspace(path: &LocalPath, short_name: &str, where: Target,
debug!("lib_prefix = %s and lib_filetype = %s", lib_prefix, lib_filetype);
let mut result_filename = None;
for dir_contents.each |&p| {
for dir_contents.iter().advance |&p| {
let mut which = 0;
let mut hash = None;
let p_path = Path(p);

Some files were not shown because too many files have changed in this diff Show More