Removed many pointless calls to *iter() and iter_mut()

This commit is contained in:
Joshua Landau 2015-06-10 17:22:20 +01:00
parent d8a9570154
commit ca7418b846
117 changed files with 292 additions and 294 deletions

View File

@ -285,8 +285,8 @@ fn run_pretty_test(config: &Config, props: &TestProps, testfile: &Path) {
format!("--target={}", config.target),
"-L".to_string(),
aux_dir.to_str().unwrap().to_string());
args.extend(split_maybe_args(&config.target_rustcflags).into_iter());
args.extend(split_maybe_args(&props.compile_flags).into_iter());
args.extend(split_maybe_args(&config.target_rustcflags));
args.extend(split_maybe_args(&props.compile_flags));
return ProcArgs {
prog: config.rustc_path.to_str().unwrap().to_string(),
args: args,
@ -333,8 +333,8 @@ actual:\n\
config.build_base.to_str().unwrap().to_string(),
"-L".to_string(),
aux_dir.to_str().unwrap().to_string());
args.extend(split_maybe_args(&config.target_rustcflags).into_iter());
args.extend(split_maybe_args(&props.compile_flags).into_iter());
args.extend(split_maybe_args(&config.target_rustcflags));
args.extend(split_maybe_args(&props.compile_flags));
// FIXME (#9639): This needs to handle non-utf8 paths
return ProcArgs {
prog: config.rustc_path.to_str().unwrap().to_string(),
@ -380,7 +380,7 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) {
script_str.push_str(&format!("set solib-search-path \
./{}/stage2/lib/rustlib/{}/lib/\n",
config.host, config.target));
for line in breakpoint_lines.iter() {
for line in &breakpoint_lines {
script_str.push_str(&format!("break {:?}:{}\n",
testfile.file_name().unwrap()
.to_string_lossy(),
@ -1171,7 +1171,7 @@ fn document(config: &Config, props: &TestProps,
out_dir.to_str().unwrap().to_string(),
testfile.to_str().unwrap().to_string()];
args.extend(extra_args.iter().cloned());
args.extend(split_maybe_args(&props.compile_flags).into_iter());
args.extend(split_maybe_args(&props.compile_flags));
let args = ProcArgs {
prog: config.rustdoc_path.to_str().unwrap().to_string(),
args: args,
@ -1236,7 +1236,7 @@ fn compose_and_run_compiler(config: &Config, props: &TestProps,
vec!("--crate-type=dylib".to_string())
}
};
crate_type.extend(extra_link_args.clone().into_iter());
crate_type.extend(extra_link_args.clone());
let aux_args =
make_compile_args(config,
&aux_props,
@ -1334,11 +1334,11 @@ fn make_compile_args<F>(config: &Config,
};
args.push(path.to_str().unwrap().to_string());
if props.force_host {
args.extend(split_maybe_args(&config.host_rustcflags).into_iter());
args.extend(split_maybe_args(&config.host_rustcflags));
} else {
args.extend(split_maybe_args(&config.target_rustcflags).into_iter());
args.extend(split_maybe_args(&config.target_rustcflags));
}
args.extend(split_maybe_args(&props.compile_flags).into_iter());
args.extend(split_maybe_args(&props.compile_flags));
return ProcArgs {
prog: config.rustc_path.to_str().unwrap().to_string(),
args: args,
@ -1373,7 +1373,7 @@ fn make_run_args(config: &Config, props: &TestProps, testfile: &Path)
args.push(exe_file.to_str().unwrap().to_string());
// Add the arguments in the run_flags directive
args.extend(split_maybe_args(&props.run_flags).into_iter());
args.extend(split_maybe_args(&props.run_flags));
let prog = args.remove(0);
return ProcArgs {
@ -1683,7 +1683,7 @@ fn compile_test_and_save_ir(config: &Config, props: &TestProps,
aux_dir.to_str().unwrap().to_string());
let llvm_args = vec!("--emit=llvm-ir".to_string(),
"--crate-type=lib".to_string());
link_args.extend(llvm_args.into_iter());
link_args.extend(llvm_args);
let args = make_compile_args(config,
props,
link_args,

View File

@ -73,7 +73,7 @@ r##"<!DOCTYPE html>
try!(write!(&mut output_file, "<h1>Rust Compiler Error Index</h1>\n"));
for (err_code, info) in err_map.iter() {
for (err_code, info) in err_map {
// Enclose each error in a div so they can be shown/hidden en masse.
let desc_desc = match info.description {
Some(_) => "error-described",

View File

@ -130,7 +130,7 @@
//! gadget_owner.gadgets.borrow_mut().push(gadget2.clone().downgrade());
//!
//! // Iterate over our Gadgets, printing their details out
//! for gadget_opt in gadget_owner.gadgets.borrow().iter() {
//! for gadget_opt in &*gadget_owner.gadgets.borrow() {
//!
//! // gadget_opt is a Weak<Gadget>. Since weak pointers can't guarantee
//! // that their object is still allocated, we need to call upgrade()

View File

@ -85,7 +85,7 @@
//!
//! // For each node we can reach, see if we can find a way with
//! // a lower cost going through this node
//! for edge in adj_list[position].iter() {
//! for edge in &adj_list[position] {
//! let next = State { cost: cost + edge.cost, position: edge.node };
//!
//! // If so, add it to the frontier and continue
@ -450,7 +450,7 @@ impl<T: Ord> BinaryHeap<T> {
/// let vec = heap.into_vec();
///
/// // Will print in some order
/// for x in vec.iter() {
/// for x in vec {
/// println!("{}", x);
/// }
/// ```

View File

@ -266,9 +266,9 @@ impl BitVec {
/// # #![feature(collections)]
/// use std::collections::BitVec;
///
/// let mut bv = BitVec::from_elem(10, false);
/// let bv = BitVec::from_elem(10, false);
/// assert_eq!(bv.len(), 10);
/// for x in bv.iter() {
/// for x in &bv {
/// assert_eq!(x, false);
/// }
/// ```
@ -1245,7 +1245,7 @@ impl<'a> IntoIterator for &'a BitVec {
/// s.union_with(&other);
///
/// // Print 0, 1, 3 in some order
/// for x in s.iter() {
/// for x in &s {
/// println!("{}", x);
/// }
///
@ -1370,7 +1370,7 @@ impl BitSet {
/// let s = BitSet::from_bit_vec(bv);
///
/// // Print 1, 2 in arbitrary order
/// for x in s.iter() {
/// for x in &s {
/// println!("{}", x);
/// }
/// ```

View File

@ -907,7 +907,7 @@ impl<K: Ord, V> Default for BTreeMap<K, V> {
impl<K: PartialEq, V: PartialEq> PartialEq for BTreeMap<K, V> {
fn eq(&self, other: &BTreeMap<K, V>) -> bool {
self.len() == other.len() &&
self.iter().zip(other.iter()).all(|(a, b)| a == b)
self.iter().zip(other).all(|(a, b)| a == b)
}
}
@ -1544,7 +1544,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// for (_, balance) in map.range_mut(Included(&"B"), Excluded(&"Cheryl")) {
/// *balance += 100;
/// }
/// for (name, balance) in map.iter() {
/// for (name, balance) in &map {
/// println!("{} => {}", name, balance);
/// }
/// ```

View File

@ -265,7 +265,7 @@ impl<T> LinkedList<T> {
///
/// a.append(&mut b);
///
/// for e in a.iter() {
/// for e in &a {
/// println!("{}", e); // prints 1, then 2, then 3, then 4
/// }
/// println!("{}", b.len()); // prints 0
@ -1189,7 +1189,7 @@ mod tests {
check_links(&m);
let mut i = 0;
for (a, &b) in m.into_iter().zip(v.iter()) {
for (a, &b) in m.into_iter().zip(&v) {
i += 1;
assert_eq!(a, b);
}

View File

@ -971,7 +971,7 @@ impl<T> [T] {
reason = "uncertain about this API approach")]
#[inline]
pub fn move_from(&mut self, mut src: Vec<T>, start: usize, end: usize) -> usize {
for (a, b) in self.iter_mut().zip(src[start .. end].iter_mut()) {
for (a, b) in self.iter_mut().zip(&mut src[start .. end]) {
mem::swap(a, b);
}
cmp::min(self.len(), end-start)

View File

@ -1890,7 +1890,7 @@ impl str {
#[stable(feature = "unicode_case_mapping", since = "1.2.0")]
pub fn to_uppercase(&self) -> String {
let mut s = String::with_capacity(self.len());
s.extend(self[..].chars().flat_map(|c| c.to_uppercase()));
s.extend(self.chars().flat_map(|c| c.to_uppercase()));
return s;
}

View File

@ -1331,7 +1331,7 @@ impl<T:Clone> Clone for Vec<T> {
}
// reuse the contained values' allocations/resources.
for (place, thing) in self.iter_mut().zip(other.iter()) {
for (place, thing) in self.iter_mut().zip(other) {
place.clone_from(thing)
}

View File

@ -1443,7 +1443,7 @@ impl<T: Clone> VecDeque<T> {
/// buf.push_back(15);
/// buf.resize(2, 0);
/// buf.resize(6, 20);
/// for (a, b) in [5, 10, 20, 20, 20, 20].iter().zip(buf.iter()) {
/// for (a, b) in [5, 10, 20, 20, 20, 20].iter().zip(&buf) {
/// assert_eq!(a, b);
/// }
/// ```
@ -1681,7 +1681,7 @@ impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {}
impl<A: PartialEq> PartialEq for VecDeque<A> {
fn eq(&self, other: &VecDeque<A>) -> bool {
self.len() == other.len() &&
self.iter().zip(other.iter()).all(|(a, b)| a.eq(b))
self.iter().zip(other).all(|(a, b)| a.eq(b))
}
}

View File

@ -54,7 +54,7 @@ use vec::Vec;
/// assert_eq!(months.get(&3), Some(&"Venus"));
///
/// // Print out all months
/// for (key, value) in months.iter() {
/// for (key, value) in &months {
/// println!("month {} is {}", key, value);
/// }
///
@ -287,7 +287,7 @@ impl<V> VecMap<V> {
/// *value = "x";
/// }
///
/// for (key, value) in map.iter() {
/// for (key, value) in &map {
/// assert_eq!(value, &"x");
/// }
/// ```

View File

@ -625,7 +625,7 @@ fn test_bit_vec_grow() {
fn test_bit_vec_extend() {
let mut bit_vec = BitVec::from_bytes(&[0b10110110, 0b00000000, 0b11111111]);
let ext = BitVec::from_bytes(&[0b01001001, 0b10010010, 0b10111101]);
bit_vec.extend(ext.iter());
bit_vec.extend(&ext);
assert_eq!(bit_vec, BitVec::from_bytes(&[0b10110110, 0b00000000, 0b11111111,
0b01001001, 0b10010010, 0b10111101]));
}

View File

@ -147,7 +147,7 @@ fn test_zip() {
let x = x;
let y = y;
let mut z = x.iter().zip(y.iter());
let mut z = x.iter().zip(&y);
// FIXME: #5801: this needs a type hint to compile...
let result: Option<(&usize, & &'static str)> = z.next();

View File

@ -344,7 +344,7 @@ fn test_from_iterator() {
assert_eq!(s, c);
let mut d = t.to_string();
d.extend(vec![u].into_iter());
d.extend(vec![u]);
assert_eq!(s, d);
}

View File

@ -699,7 +699,7 @@ fn do_bench_from_iter(b: &mut Bencher, src_len: usize) {
b.bytes = src_len as u64;
b.iter(|| {
let dst: Vec<_> = FromIterator::from_iter(src.clone().into_iter());
let dst: Vec<_> = FromIterator::from_iter(src.clone());
assert_eq!(dst.len(), src_len);
assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
});
@ -733,7 +733,7 @@ fn do_bench_extend(b: &mut Bencher, dst_len: usize, src_len: usize) {
b.iter(|| {
let mut dst = dst.clone();
dst.extend(src.clone().into_iter());
dst.extend(src.clone());
assert_eq!(dst.len(), dst_len + src_len);
assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
});
@ -831,7 +831,7 @@ fn do_bench_push_all_move(b: &mut Bencher, dst_len: usize, src_len: usize) {
b.iter(|| {
let mut dst = dst.clone();
dst.extend(src.clone().into_iter());
dst.extend(src.clone());
assert_eq!(dst.len(), dst_len + src_len);
assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
});

View File

@ -177,7 +177,7 @@ pub trait Iterator {
/// ```
/// let a = [0];
/// let b = [1];
/// let mut it = a.iter().chain(b.iter());
/// let mut it = a.iter().chain(&b);
/// assert_eq!(it.next(), Some(&0));
/// assert_eq!(it.next(), Some(&1));
/// assert!(it.next().is_none());
@ -200,7 +200,7 @@ pub trait Iterator {
/// ```
/// let a = [0];
/// let b = [1];
/// let mut it = a.iter().zip(b.iter());
/// let mut it = a.iter().zip(&b);
/// assert_eq!(it.next(), Some((&0, &1)));
/// assert!(it.next().is_none());
/// ```
@ -585,9 +585,9 @@ pub trait Iterator {
for x in self {
if f(&x) {
left.extend(Some(x).into_iter())
left.extend(Some(x))
} else {
right.extend(Some(x).into_iter())
right.extend(Some(x))
}
}
@ -994,8 +994,8 @@ pub trait Iterator {
us.extend(SizeHint(lo, hi, marker::PhantomData));
for (t, u) in self {
ts.extend(Some(t).into_iter());
us.extend(Some(u).into_iter());
ts.extend(Some(t));
us.extend(Some(u));
}
(ts, us)

View File

@ -146,7 +146,7 @@ macro_rules! define_bignum {
let mut sz = cmp::max(self.size, other.size);
let mut carry = false;
for (a, b) in self.base[..sz].iter_mut().zip(other.base[..sz].iter()) {
for (a, b) in self.base[..sz].iter_mut().zip(&other.base[..sz]) {
let (c, v) = (*a).full_add(*b, carry);
*a = v;
carry = c;
@ -166,7 +166,7 @@ macro_rules! define_bignum {
let sz = cmp::max(self.size, other.size);
let mut noborrow = true;
for (a, b) in self.base[..sz].iter_mut().zip(other.base[..sz].iter()) {
for (a, b) in self.base[..sz].iter_mut().zip(&other.base[..sz]) {
let (c, v) = (*a).full_add(!*b, noborrow);
*a = v;
noborrow = c;
@ -183,7 +183,7 @@ macro_rules! define_bignum {
let mut sz = self.size;
let mut carry = 0;
for a in self.base[..sz].iter_mut() {
for a in &mut self.base[..sz] {
let (c, v) = (*a).full_mul(other, carry);
*a = v;
carry = c;

View File

@ -124,7 +124,7 @@
//! // but to start with we've just got `None`.
//! let mut name_of_biggest_animal = None;
//! let mut size_of_biggest_animal = 0;
//! for big_thing in all_the_big_things.iter() {
//! for big_thing in &all_the_big_things {
//! match *big_thing {
//! Kingdom::Animal(size, name) if size > size_of_biggest_animal => {
//! // Now we've found the name of some big animal

View File

@ -82,7 +82,7 @@ fn test_iterator_chain() {
let xs = [0, 1, 2, 3, 4, 5];
let ys = [30, 40, 50, 60];
let expected = [0, 1, 2, 3, 4, 5, 30, 40, 50, 60];
let it = xs.iter().chain(ys.iter());
let it = xs.iter().chain(&ys);
let mut i = 0;
for &x in it {
assert_eq!(x, expected[i]);
@ -107,11 +107,11 @@ fn test_iterator_chain_nth() {
let zs = [];
let expected = [0, 1, 2, 3, 4, 5, 30, 40, 50, 60];
for (i, x) in expected.iter().enumerate() {
assert_eq!(Some(x), xs.iter().chain(ys.iter()).nth(i));
assert_eq!(Some(x), xs.iter().chain(&ys).nth(i));
}
assert_eq!(zs.iter().chain(xs.iter()).nth(0), Some(&0));
assert_eq!(zs.iter().chain(&xs).nth(0), Some(&0));
let mut it = xs.iter().chain(zs.iter());
let mut it = xs.iter().chain(&zs);
assert_eq!(it.nth(5), Some(&5));
assert_eq!(it.next(), None);
}
@ -121,10 +121,10 @@ fn test_iterator_chain_last() {
let xs = [0, 1, 2, 3, 4, 5];
let ys = [30, 40, 50, 60];
let zs = [];
assert_eq!(xs.iter().chain(ys.iter()).last(), Some(&60));
assert_eq!(zs.iter().chain(ys.iter()).last(), Some(&60));
assert_eq!(ys.iter().chain(zs.iter()).last(), Some(&60));
assert_eq!(zs.iter().chain(zs.iter()).last(), None);
assert_eq!(xs.iter().chain(&ys).last(), Some(&60));
assert_eq!(zs.iter().chain(&ys).last(), Some(&60));
assert_eq!(ys.iter().chain(&zs).last(), Some(&60));
assert_eq!(zs.iter().chain(&zs).last(), None);
}
#[test]
@ -132,8 +132,8 @@ fn test_iterator_chain_count() {
let xs = [0, 1, 2, 3, 4, 5];
let ys = [30, 40, 50, 60];
let zs = [];
assert_eq!(xs.iter().chain(ys.iter()).count(), 10);
assert_eq!(zs.iter().chain(ys.iter()).count(), 4);
assert_eq!(xs.iter().chain(&ys).count(), 10);
assert_eq!(zs.iter().chain(&ys).count(), 4);
}
#[test]
@ -571,8 +571,8 @@ fn test_iterator_size_hint() {
assert_eq!(vi.clone().take_while(|_| false).size_hint(), (0, Some(10)));
assert_eq!(vi.clone().skip_while(|_| false).size_hint(), (0, Some(10)));
assert_eq!(vi.clone().enumerate().size_hint(), (10, Some(10)));
assert_eq!(vi.clone().chain(v2.iter()).size_hint(), (13, Some(13)));
assert_eq!(vi.clone().zip(v2.iter()).size_hint(), (3, Some(3)));
assert_eq!(vi.clone().chain(v2).size_hint(), (13, Some(13)));
assert_eq!(vi.clone().zip(v2).size_hint(), (3, Some(3)));
assert_eq!(vi.clone().scan(0, |_,_| Some(0)).size_hint(), (0, Some(10)));
assert_eq!(vi.clone().filter(|_| false).size_hint(), (0, Some(10)));
assert_eq!(vi.clone().map(|&i| i+1).size_hint(), (10, Some(10)));
@ -742,7 +742,7 @@ fn test_double_ended_filter_map() {
fn test_double_ended_chain() {
let xs = [1, 2, 3, 4, 5];
let ys = [7, 9, 11];
let mut it = xs.iter().chain(ys.iter()).rev();
let mut it = xs.iter().chain(&ys).rev();
assert_eq!(it.next().unwrap(), &11);
assert_eq!(it.next().unwrap(), &9);
assert_eq!(it.next_back().unwrap(), &1);
@ -807,7 +807,7 @@ fn check_randacc_iter<A, T>(a: T, len: usize) where
fn test_double_ended_flat_map() {
let u = [0,1];
let v = [5,6,7,8];
let mut it = u.iter().flat_map(|x| v[*x..v.len()].iter());
let mut it = u.iter().flat_map(|x| &v[*x..v.len()]);
assert_eq!(it.next_back().unwrap(), &8);
assert_eq!(it.next().unwrap(), &5);
assert_eq!(it.next_back().unwrap(), &7);
@ -824,7 +824,7 @@ fn test_double_ended_flat_map() {
fn test_random_access_chain() {
let xs = [1, 2, 3, 4, 5];
let ys = [7, 9, 11];
let mut it = xs.iter().chain(ys.iter());
let mut it = xs.iter().chain(&ys);
assert_eq!(it.idx(0).unwrap(), &1);
assert_eq!(it.idx(5).unwrap(), &7);
assert_eq!(it.idx(7).unwrap(), &11);
@ -862,7 +862,7 @@ fn test_random_access_rev() {
fn test_random_access_zip() {
let xs = [1, 2, 3, 4, 5];
let ys = [7, 9, 11];
check_randacc_iter(xs.iter().zip(ys.iter()), cmp::min(xs.len(), ys.len()));
check_randacc_iter(xs.iter().zip(&ys), cmp::min(xs.len(), ys.len()));
}
#[test]

View File

@ -72,7 +72,7 @@ fn test_option_dance() {
let x = Some(());
let mut y = Some(5);
let mut y2 = 0;
for _x in x.iter() {
for _x in x {
y2 = y.take().unwrap();
}
assert_eq!(y2, 5);

View File

@ -953,7 +953,7 @@ fn test_split_within() {
fn t(s: &str, i: usize, u: &[String]) {
let mut v = Vec::new();
each_split_within(s, i, |s| { v.push(s.to_string()); true });
assert!(v.iter().zip(u.iter()).all(|(a,b)| a == b));
assert!(v.iter().zip(u).all(|(a,b)| a == b));
}
t("", 0, &[]);
t("", 15, &[]);

View File

@ -74,7 +74,7 @@
//! // (assumes that |N| \approxeq |E|)
//! let &Edges(ref v) = self;
//! let mut nodes = Vec::with_capacity(v.len());
//! for &(s,t) in v.iter() {
//! for &(s,t) in v {
//! nodes.push(s); nodes.push(t);
//! }
//! nodes.sort();

View File

@ -174,7 +174,7 @@ impl<'a> SeedableRng<&'a [u32]> for ChaChaRng {
self.init(&[0; KEY_WORDS]);
// set key in place
let key = &mut self.state[4 .. 4+KEY_WORDS];
for (k, s) in key.iter_mut().zip(seed.iter()) {
for (k, s) in key.iter_mut().zip(seed) {
*k = *s;
}
}

View File

@ -140,7 +140,7 @@ impl IsaacRng {
}
let r = [(0, MIDPOINT), (MIDPOINT, 0)];
for &(mr_offset, m2_offset) in r.iter() {
for &(mr_offset, m2_offset) in &r {
macro_rules! rngstepp {
($j:expr, $shift:expr) => {{
@ -379,7 +379,7 @@ impl Isaac64Rng {
}
}
for &(mr_offset, m2_offset) in MP_VEC.iter() {
for &(mr_offset, m2_offset) in &MP_VEC {
for base in (0..MIDPOINT / 4).map(|i| i * 4) {
macro_rules! rngstepp {

View File

@ -404,7 +404,7 @@ impl<'ast> Map<'ast> {
fn path_to_str_with_ident(&self, id: NodeId, i: Ident) -> String {
self.with_path(id, |path| {
path_to_string(path.chain(Some(PathName(i.name)).into_iter()))
path_to_string(path.chain(Some(PathName(i.name))))
})
}

View File

@ -698,7 +698,7 @@ pub fn import_codemap(local_codemap: &codemap::CodeMap,
return false;
}
for (&line1, &line2) in lines1.iter().zip(lines2.iter()) {
for (&line1, &line2) in lines1.iter().zip(&*lines2) {
if (line1 - fm1.start_pos) != (line2 - fm2.start_pos) {
return false;
}
@ -711,7 +711,7 @@ pub fn import_codemap(local_codemap: &codemap::CodeMap,
return false;
}
for (mb1, mb2) in multibytes1.iter().zip(multibytes2.iter()) {
for (mb1, mb2) in multibytes1.iter().zip(&*multibytes2) {
if (mb1.bytes != mb2.bytes) ||
((mb1.pos - fm1.start_pos) != (mb2.pos - fm2.start_pos)) {
return false;

View File

@ -723,14 +723,14 @@ fn encode_generics<'a, 'tcx>(rbml_w: &mut Encoder,
abbrevs: &ecx.type_abbrevs
};
for param in generics.types.iter() {
for param in &generics.types {
rbml_w.start_tag(tag_type_param_def);
tyencode::enc_type_param_def(rbml_w, ty_str_ctxt, param);
rbml_w.end_tag();
}
// Region parameters
for param in generics.regions.iter() {
for param in &generics.regions {
rbml_w.start_tag(tag_region_param_def);
rbml_w.start_tag(tag_region_param_def_ident);
@ -838,7 +838,7 @@ fn encode_info_for_associated_const(ecx: &EncodeContext,
encode_stability(rbml_w, stab);
let elem = ast_map::PathName(associated_const.name);
encode_path(rbml_w, impl_path.chain(Some(elem).into_iter()));
encode_path(rbml_w, impl_path.chain(Some(elem)));
if let Some(ii) = impl_item_opt {
encode_attributes(rbml_w, &ii.attrs);
@ -871,7 +871,7 @@ fn encode_info_for_method<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>,
encode_bounds_and_type_for_item(rbml_w, ecx, m.def_id.local_id());
let elem = ast_map::PathName(m.name);
encode_path(rbml_w, impl_path.chain(Some(elem).into_iter()));
encode_path(rbml_w, impl_path.chain(Some(elem)));
if let Some(impl_item) = impl_item_opt {
if let ast::MethodImplItem(ref sig, _) = impl_item.node {
encode_attributes(rbml_w, &impl_item.attrs);
@ -917,7 +917,7 @@ fn encode_info_for_associated_type<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>,
encode_stability(rbml_w, stab);
let elem = ast_map::PathName(associated_type.name);
encode_path(rbml_w, impl_path.chain(Some(elem).into_iter()));
encode_path(rbml_w, impl_path.chain(Some(elem)));
if let Some(ii) = impl_item_opt {
encode_attributes(rbml_w, &ii.attrs);
@ -955,7 +955,7 @@ fn encode_repr_attrs(rbml_w: &mut Encoder,
let mut repr_attrs = Vec::new();
for attr in attrs {
repr_attrs.extend(attr::find_repr_attrs(ecx.tcx.sess.diagnostic(),
attr).into_iter());
attr));
}
rbml_w.start_tag(tag_items_data_item_repr);
repr_attrs.encode(rbml_w);
@ -1409,7 +1409,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
let elem = ast_map::PathName(associated_const.name);
encode_path(rbml_w,
path.clone().chain(Some(elem).into_iter()));
path.clone().chain(Some(elem)));
encode_item_sort(rbml_w, 'C');
encode_family(rbml_w, 'C');
@ -1426,7 +1426,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
let elem = ast_map::PathName(method_ty.name);
encode_path(rbml_w,
path.clone().chain(Some(elem).into_iter()));
path.clone().chain(Some(elem)));
match method_ty.explicit_self {
ty::StaticExplicitSelfCategory => {
@ -1449,7 +1449,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
let elem = ast_map::PathName(associated_type.name);
encode_path(rbml_w,
path.clone().chain(Some(elem).into_iter()));
path.clone().chain(Some(elem)));
encode_item_sort(rbml_w, 't');
encode_family(rbml_w, 'y');
@ -1822,8 +1822,8 @@ fn encode_lang_items(ecx: &EncodeContext, rbml_w: &mut Encoder) {
fn encode_native_libraries(ecx: &EncodeContext, rbml_w: &mut Encoder) {
rbml_w.start_tag(tag_native_libraries);
for &(ref lib, kind) in ecx.tcx.sess.cstore.get_used_libraries()
.borrow().iter() {
for &(ref lib, kind) in &*ecx.tcx.sess.cstore.get_used_libraries()
.borrow() {
match kind {
cstore::NativeStatic => {} // these libraries are not propagated
cstore::NativeFramework | cstore::NativeUnknown => {

View File

@ -438,7 +438,7 @@ impl<'a> Context<'a> {
FileMatches
}).unwrap_or(FileDoesntMatch)
});
self.rejected_via_kind.extend(staticlibs.into_iter());
self.rejected_via_kind.extend(staticlibs);
// We have now collected all known libraries into a set of candidates
// keyed of the filename hash listed. For each filename, we also have a

View File

@ -164,14 +164,14 @@ impl<'a> MacroLoader<'a> {
}
if let Some(sel) = import.as_ref() {
for (name, span) in sel.iter() {
for (name, span) in sel {
if !seen.contains(name) {
self.sess.span_err(*span, "imported macro not found");
}
}
}
for (name, span) in reexport.iter() {
for (name, span) in &reexport {
if !seen.contains(name) {
self.sess.span_err(*span, "reexported macro not found");
}

View File

@ -1126,7 +1126,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
})
}
for &qualif in tcx.const_qualif_map.borrow().get(&id).iter() {
if let Some(qualif) = tcx.const_qualif_map.borrow().get(&id) {
rbml_w.tag(c::tag_table_const_qualif, |rbml_w| {
rbml_w.id(id);
qualif.encode(rbml_w).unwrap()

View File

@ -338,7 +338,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
ast::ExprRange(ref start, ref end) => {
let fields = start.as_ref().map(|e| &**e).into_iter()
.chain(end.as_ref().map(|e| &**e).into_iter());
.chain(end.as_ref().map(|e| &**e));
self.straightline(expr, pred, fields)
}

View File

@ -409,7 +409,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> {
// Special-case some expressions to avoid certain flags bubbling up.
match ex.node {
ast::ExprCall(ref callee, ref args) => {
for arg in args.iter() {
for arg in args {
self.visit_expr(&**arg)
}
@ -435,7 +435,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> {
// Compute the most demanding borrow from all the arms'
// patterns and set that on the discriminator.
let mut borrow = None;
for pat in arms.iter().flat_map(|arm| arm.pats.iter()) {
for pat in arms.iter().flat_map(|arm| &arm.pats) {
let pat_borrow = self.rvalue_borrows.remove(&pat.id);
match (borrow, pat_borrow) {
(None, _) | (_, Some(ast::MutMutable)) => {

View File

@ -187,7 +187,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) {
for pat in inlined_arms
.iter()
.flat_map(|&(ref pats, _)| pats.iter()) {
.flat_map(|&(ref pats, _)| pats) {
// Third, check legality of move bindings.
check_legality_of_bindings_in_at_patterns(cx, &**pat);
@ -220,7 +220,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) {
let matrix: Matrix = inlined_arms
.iter()
.filter(|&&(_, guard)| guard.is_none())
.flat_map(|arm| arm.0.iter())
.flat_map(|arm| &arm.0)
.map(|pat| vec![&**pat])
.collect();
check_exhaustive(cx, ex.span, &matrix, source);
@ -583,7 +583,7 @@ fn construct_witness(cx: &MatchCheckCtxt, ctor: &Constructor,
fn missing_constructor(cx: &MatchCheckCtxt, &Matrix(ref rows): &Matrix,
left_ty: Ty, max_slice_length: usize) -> Option<Constructor> {
let used_constructors: Vec<Constructor> = rows.iter()
.flat_map(|row| pat_constructors(cx, row[0], left_ty, max_slice_length).into_iter())
.flat_map(|row| pat_constructors(cx, row[0], left_ty, max_slice_length))
.collect();
all_constructors(cx, left_ty, max_slice_length)
.into_iter()
@ -705,7 +705,7 @@ fn is_useful(cx: &MatchCheckCtxt,
let wild_pats: Vec<_> = repeat(DUMMY_WILD_PAT).take(arity).collect();
let enum_pat = construct_witness(cx, &constructor, wild_pats, left_ty);
let mut new_pats = vec![enum_pat];
new_pats.extend(pats.into_iter());
new_pats.extend(pats);
UsefulWithWitness(new_pats)
},
result => result

View File

@ -634,7 +634,7 @@ fn bitwise<Op:BitwiseOperator>(out_vec: &mut [usize],
op: &Op) -> bool {
assert_eq!(out_vec.len(), in_vec.len());
let mut changed = false;
for (out_elt, in_elt) in out_vec.iter_mut().zip(in_vec.iter()) {
for (out_elt, in_elt) in out_vec.iter_mut().zip(in_vec) {
let old_val = *out_elt;
let new_val = op.join(old_val, *in_elt);
*out_elt = new_val;

View File

@ -212,11 +212,11 @@ enum OverloadedCallType {
impl OverloadedCallType {
fn from_trait_id(tcx: &ty::ctxt, trait_id: ast::DefId)
-> OverloadedCallType {
for &(maybe_function_trait, overloaded_call_type) in [
for &(maybe_function_trait, overloaded_call_type) in &[
(tcx.lang_items.fn_once_trait(), FnOnceOverloadedCall),
(tcx.lang_items.fn_mut_trait(), FnMutOverloadedCall),
(tcx.lang_items.fn_trait(), FnOverloadedCall)
].iter() {
] {
match maybe_function_trait {
Some(function_trait) if function_trait == trait_id => {
return overloaded_call_type

View File

@ -276,7 +276,7 @@ impl<'a, 'tcx> Implicator<'a, 'tcx> {
let variances = ty::item_variances(self.tcx(), def_id);
for (&region, &variance) in substs.regions().iter().zip(variances.regions.iter()) {
for (&region, &variance) in substs.regions().iter().zip(&variances.regions) {
match variance {
ty::Contravariant | ty::Invariant => {
// If any data with this lifetime is reachable
@ -287,7 +287,7 @@ impl<'a, 'tcx> Implicator<'a, 'tcx> {
}
}
for (&ty, &variance) in substs.types.iter().zip(variances.types.iter()) {
for (&ty, &variance) in substs.types.iter().zip(&variances.types) {
match variance {
ty::Covariant | ty::Invariant => {
// If any data of this type is reachable within,

View File

@ -443,7 +443,7 @@ impl<'a,'tcx> InferCtxtExt for InferCtxt<'a,'tcx> {
let escaping_region_vars: FnvHashSet<_> =
escaping_types
.iter()
.flat_map(|&t| ty_fold::collect_regions(self.tcx, &t).into_iter())
.flat_map(|&t| ty_fold::collect_regions(self.tcx, &t))
.collect();
region_vars.retain(|&region_vid| {

View File

@ -372,22 +372,22 @@ struct RegionResolutionVisitor<'a> {
impl RegionMaps {
pub fn each_encl_scope<E>(&self, mut e:E) where E: FnMut(&CodeExtent, &CodeExtent) {
for (child, parent) in self.scope_map.borrow().iter() {
for (child, parent) in &*self.scope_map.borrow() {
e(child, parent)
}
}
pub fn each_var_scope<E>(&self, mut e:E) where E: FnMut(&ast::NodeId, &CodeExtent) {
for (child, parent) in self.var_map.borrow().iter() {
for (child, parent) in &*self.var_map.borrow() {
e(child, parent)
}
}
pub fn each_rvalue_scope<E>(&self, mut e:E) where E: FnMut(&ast::NodeId, &CodeExtent) {
for (child, parent) in self.rvalue_scopes.borrow().iter() {
for (child, parent) in &*self.rvalue_scopes.borrow() {
e(child, parent)
}
}
pub fn each_terminating_scope<E>(&self, mut e:E) where E: FnMut(&CodeExtent) {
for scope in self.terminating_scopes.borrow().iter() {
for scope in &*self.terminating_scopes.borrow() {
e(scope)
}
}

View File

@ -485,7 +485,7 @@ pub fn check_pat(tcx: &ty::ctxt, pat: &ast::Pat,
match pat.node {
// Foo(a, b, c)
ast::PatEnum(_, Some(ref pat_fields)) => {
for (field, struct_field) in pat_fields.iter().zip(struct_fields.iter()) {
for (field, struct_field) in pat_fields.iter().zip(&struct_fields) {
// a .. pattern is fine, but anything positional is
// not.
if let ast::PatWild(ast::PatWildMulti) = field.node {
@ -595,14 +595,14 @@ pub fn check_unused_or_stable_features(sess: &Session,
let stable_msg = "this feature is stable. attribute no longer needed";
for &span in sess.features.borrow().declared_stable_lang_features.iter() {
for &span in &sess.features.borrow().declared_stable_lang_features {
sess.add_lint(lint::builtin::STABLE_FEATURES,
ast::CRATE_NODE_ID,
span,
stable_msg.to_string());
}
for (used_lib_feature, level) in lib_features_used.iter() {
for (used_lib_feature, level) in lib_features_used {
match remaining_lib_features.remove(used_lib_feature) {
Some(span) => {
if *level == attr::Stable {
@ -616,7 +616,7 @@ pub fn check_unused_or_stable_features(sess: &Session,
}
}
for (_, &span) in remaining_lib_features.iter() {
for &span in remaining_lib_features.values() {
sess.add_lint(lint::builtin::UNUSED_FEATURES,
ast::CRATE_NODE_ID,
span,

View File

@ -279,8 +279,8 @@ impl<T> VecPerParamSpace<T> {
let self_limit = type_limit + s.len();
let mut content = t;
content.extend(s.into_iter());
content.extend(f.into_iter());
content.extend(s);
content.extend(f);
VecPerParamSpace {
type_limit: type_limit,

View File

@ -85,7 +85,7 @@ fn overlap(selcx: &mut SelectionContext,
let infcx = selcx.infcx();
let opt_failing_obligation =
a_obligations.iter()
.chain(b_obligations.iter())
.chain(&b_obligations)
.map(|o| infcx.resolve_type_vars_if_possible(o))
.find(|o| !selcx.evaluate_obligation(o));
@ -159,8 +159,8 @@ fn impl_trait_ref_and_oblig<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>,
let impl_obligations: Vec<_> =
impl_obligations.into_iter()
.chain(normalization_obligations1.into_iter())
.chain(normalization_obligations2.into_iter())
.chain(normalization_obligations1)
.chain(normalization_obligations2)
.collect();
(impl_trait_ref, impl_obligations)
@ -209,7 +209,7 @@ fn orphan_check_trait_ref<'tcx>(tcx: &ty::ctxt<'tcx>,
// First, create an ordered iterator over all the type parameters to the trait, with the self
// type appearing first.
let input_tys = Some(trait_ref.self_ty());
let input_tys = input_tys.iter().chain(trait_ref.substs.types.get_slice(TypeSpace).iter());
let input_tys = input_tys.iter().chain(trait_ref.substs.types.get_slice(TypeSpace));
// Find the first input type that either references a type parameter OR
// some local type.
@ -255,7 +255,7 @@ fn uncovered_tys<'tcx>(tcx: &ty::ctxt<'tcx>,
vec![]
} else if fundamental_ty(tcx, ty) {
ty.walk_shallow()
.flat_map(|t| uncovered_tys(tcx, t, infer_is_local).into_iter())
.flat_map(|t| uncovered_tys(tcx, t, infer_is_local))
.collect()
} else {
vec![ty]

View File

@ -399,7 +399,7 @@ fn process_predicate<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>,
result.repr(tcx));
match result {
Ok(Some(obligations)) => {
new_obligations.extend(obligations.into_iter());
new_obligations.extend(obligations);
true
}
Ok(None) => {

View File

@ -80,7 +80,7 @@ pub fn object_safety_violations<'tcx>(tcx: &ty::ctxt<'tcx>,
-> Vec<ObjectSafetyViolation<'tcx>>
{
traits::supertrait_def_ids(tcx, trait_def_id)
.flat_map(|def_id| object_safety_violations_for_trait(tcx, def_id).into_iter())
.flat_map(|def_id| object_safety_violations_for_trait(tcx, def_id))
.collect()
}

View File

@ -280,7 +280,7 @@ impl<'a,'b,'tcx> TypeFolder<'tcx> for AssociatedTypeNormalizer<'a,'b,'tcx> {
data.clone(),
self.cause.clone(),
self.depth);
self.obligations.extend(obligations.into_iter());
self.obligations.extend(obligations);
ty
}
@ -376,7 +376,7 @@ fn opt_normalize_projection_type<'a,'b,'tcx>(
normalized_ty.repr(tcx),
depth);
obligations.extend(normalizer.obligations.into_iter());
obligations.extend(normalizer.obligations);
Some(Normalized {
value: normalized_ty,
obligations: obligations,
@ -872,7 +872,7 @@ fn confirm_impl_candidate<'cx,'tcx>(
// It is not in the impl - get the default from the trait.
let trait_ref = obligation.predicate.trait_ref;
for trait_item in ty::trait_items(selcx.tcx(), trait_ref.def_id).iter() {
for trait_item in &*ty::trait_items(selcx.tcx(), trait_ref.def_id) {
if let &ty::TypeTraitItem(ref assoc_ty) = trait_item {
if assoc_ty.name == obligation.predicate.item_name {
if let Some(ty) = assoc_ty.ty {

View File

@ -1750,7 +1750,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let types: Vec<Ty> =
ty::substd_enum_variants(self.tcx(), def_id, substs)
.iter()
.flat_map(|variant| variant.args.iter())
.flat_map(|variant| &variant.args)
.cloned()
.collect();
nominal(bound, types)
@ -1893,7 +1893,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
ty::ty_enum(def_id, substs) => {
Some(ty::substd_enum_variants(self.tcx(), def_id, substs)
.iter()
.flat_map(|variant| variant.args.iter())
.flat_map(|variant| &variant.args)
.map(|&ty| ty)
.collect())
}
@ -1960,7 +1960,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
// Flatten those vectors (couldn't do it above due `collect`)
match obligations {
Ok(obligations) => obligations.into_iter().flat_map(|o| o.into_iter()).collect(),
Ok(obligations) => obligations.into_iter().flat_map(|o| o).collect(),
Err(ErrorReported) => Vec::new(),
}
}
@ -2689,7 +2689,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
// simplified, do not match.
obligation.predicate.0.input_types().iter()
.zip(impl_trait_ref.input_types().iter())
.zip(impl_trait_ref.input_types())
.any(|(&obligation_ty, &impl_ty)| {
let simplified_obligation_ty =
fast_reject::simplify_type(self.tcx(), obligation_ty, true);

View File

@ -135,7 +135,7 @@ impl<'cx, 'tcx> Elaborator<'cx, 'tcx> {
// Sized { }`.
predicates.retain(|r| self.visited.insert(r));
self.stack.extend(predicates.into_iter());
self.stack.extend(predicates);
}
ty::Predicate::Equate(..) => {
// Currently, we do not "elaborate" predicates like

View File

@ -2154,7 +2154,7 @@ impl<'tcx> Predicate<'tcx> {
let trait_inputs = data.0.projection_ty.trait_ref.substs.types.as_slice();
trait_inputs.iter()
.cloned()
.chain(Some(data.0.ty).into_iter())
.chain(Some(data.0.ty))
.collect()
}
};
@ -2571,7 +2571,7 @@ impl<'tcx> TraitDef<'tcx> {
pub fn for_each_impl<F: FnMut(DefId)>(&self, tcx: &ctxt<'tcx>, mut f: F) {
ty::populate_implementations_for_trait_if_necessary(tcx, self.trait_ref.def_id);
for &impl_def_id in self.blanket_impls.borrow().iter() {
for &impl_def_id in &*self.blanket_impls.borrow() {
f(impl_def_id);
}
@ -2589,7 +2589,7 @@ impl<'tcx> TraitDef<'tcx> {
{
ty::populate_implementations_for_trait_if_necessary(tcx, self.trait_ref.def_id);
for &impl_def_id in self.blanket_impls.borrow().iter() {
for &impl_def_id in &*self.blanket_impls.borrow() {
f(impl_def_id);
}
@ -3068,7 +3068,7 @@ impl FlagComputation {
match substs.regions {
subst::ErasedRegions => {}
subst::NonerasedRegions(ref regions) => {
for &r in regions.iter() {
for &r in regions {
self.add_region(r);
}
}
@ -4199,7 +4199,7 @@ pub fn is_type_representable<'tcx>(cx: &ctxt<'tcx>, sp: Span, ty: Ty<'tcx>)
ty_enum(did, substs) => {
let vs = enum_variants(cx, did);
let iter = vs.iter()
.flat_map(|variant| { variant.args.iter() })
.flat_map(|variant| &variant.args)
.map(|aty| { aty.subst_spanned(cx, substs, Some(sp)) });
find_nonrepresentable(cx, sp, seen, iter)
@ -4233,7 +4233,7 @@ pub fn is_type_representable<'tcx>(cx: &ctxt<'tcx>, sp: Span, ty: Ty<'tcx>)
let types_a = substs_a.types.get_slice(subst::TypeSpace);
let types_b = substs_b.types.get_slice(subst::TypeSpace);
let mut pairs = types_a.iter().zip(types_b.iter());
let mut pairs = types_a.iter().zip(types_b);
pairs.all(|(&a, &b)| same_type(a, b))
}
@ -4509,7 +4509,7 @@ pub fn named_element_ty<'tcx>(cx: &ctxt<'tcx>,
let variant_info = enum_variant_with_id(cx, def_id, variant_def_id);
variant_info.arg_names.as_ref()
.expect("must have struct enum variant if accessing a named fields")
.iter().zip(variant_info.args.iter())
.iter().zip(&variant_info.args)
.find(|&(&name, _)| name == n)
.map(|(_name, arg_t)| arg_t.subst(cx, substs))
}
@ -5401,7 +5401,7 @@ pub fn associated_type_parameter_index(cx: &ctxt,
trait_def: &TraitDef,
associated_type_id: ast::DefId)
-> usize {
for type_parameter_def in trait_def.generics.types.iter() {
for type_parameter_def in &trait_def.generics.types {
if type_parameter_def.def_id == associated_type_id {
return type_parameter_def.index as usize
}
@ -6607,8 +6607,8 @@ pub fn hash_crate_independent<'tcx>(tcx: &ctxt<'tcx>, ty: Ty<'tcx>, svh: &Svh) -
hash!(data.bounds);
let principal = anonymize_late_bound_regions(tcx, &data.principal).0;
for subty in principal.substs.types.iter() {
helper(tcx, *subty, svh, state);
for subty in &principal.substs.types {
helper(tcx, subty, svh, state);
}
return false;
@ -6933,7 +6933,7 @@ pub fn accumulate_lifetimes_in_type(accumulator: &mut Vec<ty::Region>,
match substs.regions {
subst::ErasedRegions => {}
subst::NonerasedRegions(ref regions) => {
for region in regions.iter() {
for region in regions {
accumulator.push(*region)
}
}

View File

@ -274,8 +274,7 @@ fn relate_arg_vecs<'a,'tcx:'a,R>(relation: &mut R,
return Err(ty::terr_arg_count);
}
a_args.iter()
.zip(b_args.iter())
a_args.iter().zip(b_args)
.map(|(a, b)| relation.relate_with_variance(ty::Contravariant, a, b))
.collect()
}
@ -355,8 +354,7 @@ impl<'a,'tcx:'a> Relate<'a,'tcx> for Vec<ty::PolyProjectionPredicate<'tcx>> {
if a.len() != b.len() {
Err(ty::terr_projection_bounds_length(expected_found(relation, &a.len(), &b.len())))
} else {
a.iter()
.zip(b.iter())
a.iter().zip(b)
.map(|(a, b)| relation.relate(a, b))
.collect()
}
@ -539,8 +537,7 @@ pub fn super_relate_tys<'a,'tcx:'a,R>(relation: &mut R,
(&ty::ty_tup(ref as_), &ty::ty_tup(ref bs)) =>
{
if as_.len() == bs.len() {
let ts = try!(as_.iter()
.zip(bs.iter())
let ts = try!(as_.iter().zip(bs)
.map(|(a, b)| relation.relate(a, b))
.collect::<Result<_, _>>());
Ok(ty::mk_tup(tcx, ts))

View File

@ -470,15 +470,15 @@ pub fn parameterized<'tcx,GG>(cx: &ctxt<'tcx>,
strings.push(format!(".."));
}
subst::NonerasedRegions(ref regions) => {
for region in regions.iter() {
for region in regions {
strings.push(region.repr(cx));
}
}
}
for ty in substs.types.iter() {
for ty in &substs.types {
strings.push(ty.repr(cx));
}
for projection in projections.iter() {
for projection in projections {
strings.push(format!("{}={}",
projection.projection_ty.item_name.user_string(cx),
projection.ty.user_string(cx)));
@ -495,7 +495,7 @@ pub fn parameterized<'tcx,GG>(cx: &ctxt<'tcx>,
match substs.regions {
subst::ErasedRegions => { }
subst::NonerasedRegions(ref regions) => {
for &r in regions.iter() {
for &r in regions {
let s = region_to_string(cx, "", false, r);
if s.is_empty() {
// This happens when the value of the region
@ -523,7 +523,7 @@ pub fn parameterized<'tcx,GG>(cx: &ctxt<'tcx>,
let ty_params = generics.types.get_slice(subst::TypeSpace);
let has_defaults = ty_params.last().map_or(false, |def| def.default.is_some());
let num_defaults = if has_defaults {
ty_params.iter().zip(tps.iter()).rev().take_while(|&(def, &actual)| {
ty_params.iter().zip(tps).rev().take_while(|&(def, &actual)| {
match def.default {
Some(default) => {
if !has_self && ty::type_has_self(default) {

View File

@ -99,7 +99,7 @@ fn group_errors_with_same_origin<'tcx>(errors: &Vec<MoveError<'tcx>>)
for ge in &mut *grouped_errors {
if move_from_id == ge.move_from.id && error.move_to.is_some() {
debug!("appending move_to to list");
ge.move_to_places.extend(move_to.into_iter());
ge.move_to_places.extend(move_to);
return
}
}

View File

@ -484,7 +484,7 @@ pub fn phase_2_configure_and_expand(sess: &Session,
let mut new_path = sess.host_filesearch(PathKind::All)
.get_dylib_search_paths();
new_path.extend(env::split_paths(&_old_path));
env::set_var("PATH", &env::join_paths(new_path.iter()).unwrap());
env::set_var("PATH", &env::join_paths(new_path).unwrap());
}
let features = sess.features.borrow();
let cfg = syntax::ext::expand::ExpansionConfig {
@ -765,7 +765,7 @@ pub fn phase_6_link_output(sess: &Session,
let old_path = env::var_os("PATH").unwrap_or(OsString::new());
let mut new_path = sess.host_filesearch(PathKind::All).get_tools_search_paths();
new_path.extend(env::split_paths(&old_path));
env::set_var("PATH", &env::join_paths(new_path.iter()).unwrap());
env::set_var("PATH", &env::join_paths(&new_path).unwrap());
time(sess.time_passes(), "linking", (), |_|
link::link_binary(sess,
@ -895,7 +895,7 @@ pub fn collect_crate_types(session: &Session,
// will be found in crate attributes.
let mut base = session.opts.crate_types.clone();
if base.is_empty() {
base.extend(attr_types.into_iter());
base.extend(attr_types);
if base.is_empty() {
base.push(link::default_output_for_target(session));
}

View File

@ -566,7 +566,7 @@ Available lint options:
let plugin_groups = sort_lint_groups(plugin_groups);
let builtin_groups = sort_lint_groups(builtin_groups);
let max_name_len = plugin.iter().chain(builtin.iter())
let max_name_len = plugin.iter().chain(&builtin)
.map(|&s| s.name.chars().count())
.max().unwrap_or(0);
let padded = |x: &str| {
@ -593,7 +593,7 @@ Available lint options:
let max_name_len = plugin_groups.iter().chain(builtin_groups.iter())
let max_name_len = plugin_groups.iter().chain(&builtin_groups)
.map(|&(s, _)| s.chars().count())
.max().unwrap_or(0);
let padded = |x: &str| {

View File

@ -645,7 +645,7 @@ impl LintPass for UnusedAttributes {
}
let plugin_attributes = cx.sess().plugin_attributes.borrow_mut();
for &(ref name, ty) in plugin_attributes.iter() {
for &(ref name, ty) in &*plugin_attributes {
if ty == AttributeType::Whitelisted && attr.check_name(&*name) {
break;
}
@ -2249,7 +2249,7 @@ impl LintPass for DropWithReprExtern {
lint_array!(DROP_WITH_REPR_EXTERN)
}
fn check_crate(&mut self, ctx: &Context, _: &ast::Crate) {
for dtor_did in ctx.tcx.destructors.borrow().iter() {
for dtor_did in &*ctx.tcx.destructors.borrow() {
let (drop_impl_did, dtor_self_type) =
if dtor_did.krate == ast::LOCAL_CRATE {
let impl_did = ctx.tcx.map.get_parent_did(dtor_did.node);

View File

@ -215,7 +215,7 @@ impl<'a, 'v, 'tcx> Visitor<'v> for Resolver<'a, 'tcx> {
// `visit::walk_variant` without the discriminant expression.
match variant.node.kind {
ast::TupleVariantKind(ref variant_arguments) => {
for variant_argument in variant_arguments.iter() {
for variant_argument in variant_arguments {
self.visit_ty(&*variant_argument.ty);
}
}

View File

@ -355,7 +355,7 @@ pub fn mangle_internal_name_by_type_and_seq<'a, 'tcx>(ccx: &CrateContext<'a, 'tc
}
pub fn mangle_internal_name_by_path_and_seq(path: PathElems, flav: &str) -> String {
mangle(path.chain(Some(gensym_name(flav)).into_iter()), None)
mangle(path.chain(Some(gensym_name(flav))), None)
}
pub fn get_cc_prog(sess: &Session) -> String {
@ -761,7 +761,7 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) {
ab.add_rlib(&p, &name[..], sess.lto()).unwrap();
let native_libs = csearch::get_native_libraries(&sess.cstore, cnum);
all_native_libs.extend(native_libs.into_iter());
all_native_libs.extend(native_libs);
}
ab.update_symbols();
@ -918,7 +918,7 @@ fn link_args(cmd: &mut Linker,
let empty_vec = Vec::new();
let empty_str = String::new();
let args = sess.opts.cg.link_args.as_ref().unwrap_or(&empty_vec);
let mut args = args.iter().chain(used_link_args.iter());
let mut args = args.iter().chain(&*used_link_args);
let relocation_model = sess.opts.cg.relocation_model.as_ref()
.unwrap_or(&empty_str);
if (t.options.relocation_model == "pic" || *relocation_model == "pic")

View File

@ -138,7 +138,7 @@ impl <'l, 'tcx> DumpCsvVisitor<'l, 'tcx> {
let mut result: Vec<(Span, String)> = vec!();
let mut segs = vec!();
for (i, (seg, span)) in path.segments.iter().zip(spans.iter()).enumerate() {
for (i, (seg, span)) in path.segments.iter().zip(&spans).enumerate() {
segs.push(seg.clone());
let sub_path = ast::Path{span: *span, // span for the last segment
global: path.global,
@ -476,14 +476,14 @@ impl <'l, 'tcx> DumpCsvVisitor<'l, 'tcx> {
// the first few to match the number of generics we're looking for.
let param_sub_spans = self.span.spans_for_ty_params(full_span,
(generics.ty_params.len() as isize));
for (param, param_ss) in generics.ty_params.iter().zip(param_sub_spans.iter()) {
for (param, param_ss) in generics.ty_params.iter().zip(param_sub_spans) {
// Append $id to name to make sure each one is unique
let name = format!("{}::{}${}",
prefix,
escape(self.span.snippet(*param_ss)),
escape(self.span.snippet(param_ss)),
id);
self.fmt.typedef_str(full_span,
Some(*param_ss),
Some(param_ss),
param.id,
&name,
"");

View File

@ -1501,7 +1501,7 @@ fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>,
};
let mut matches = Vec::new();
for (arm_data, pats) in arm_datas.iter().zip(arm_pats.iter()) {
for (arm_data, pats) in arm_datas.iter().zip(&arm_pats) {
matches.extend(pats.iter().map(|p| Match {
pats: vec![&**p],
data: arm_data,
@ -1833,7 +1833,7 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
.iter()
.chain(slice.iter())
.chain(after.iter())
.zip(extracted.vals.into_iter())
.zip(extracted.vals)
.fold(bcx, |bcx, (inner, elem)|
bind_irrefutable_pat(bcx, &**inner, elem, cleanup_scope)
);

View File

@ -1199,7 +1199,7 @@ fn build_const_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
// offset of current value
let mut offset = 0;
let mut cfields = Vec::new();
for (&val, &target_offset) in vals.iter().zip(target_offsets.iter()) {
for (&val, target_offset) in vals.iter().zip(target_offsets) {
if !st.packed {
let val_align = machine::llalign_of_min(ccx, val_ty(val));
offset = roundup(offset, val_align);

View File

@ -88,7 +88,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm)
let all_constraints= constraints.iter()
.map(|s| s.to_string())
.chain(ext_constraints.into_iter())
.chain(ext_constraints)
.chain(clobbers)
.chain(arch_clobbers.iter()
.map(|s| s.to_string()))

View File

@ -561,7 +561,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// we care about.
if ixs.len() < 16 {
let mut small_vec = [ C_i32(self.ccx, 0); 16 ];
for (small_vec_e, &ix) in small_vec.iter_mut().zip(ixs.iter()) {
for (small_vec_e, &ix) in small_vec.iter_mut().zip(ixs) {
*small_vec_e = C_i32(self.ccx, ix as i32);
}
self.inbounds_gep(base, &small_vec[..ixs.len()])

View File

@ -257,7 +257,7 @@ fn walk_pattern(cx: &CrateContext,
for &codemap::Spanned {
node: ast::FieldPat { pat: ref sub_pat, .. },
..
} in field_pats.iter() {
} in field_pats {
walk_pattern(cx, &**sub_pat, scope_stack, scope_map);
}
}

View File

@ -1557,7 +1557,7 @@ fn describe_enum_variant<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
// Build an array of (field name, field type) pairs to be captured in the factory closure.
let args: Vec<(String, Ty)> = arg_names.iter()
.zip(struct_def.fields.iter())
.zip(&struct_def.fields)
.map(|(s, &t)| (s.to_string(), t))
.collect();

View File

@ -216,7 +216,7 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
output.push('<');
for &type_parameter in substs.types.iter() {
for &type_parameter in &substs.types {
push_debuginfo_type_name(cx, type_parameter, true, output);
output.push_str(", ");
}

View File

@ -526,7 +526,7 @@ fn coerce_unsized<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
};
assert!(coerce_index < src_fields.len() && src_fields.len() == target_fields.len());
let iter = src_fields.iter().zip(target_fields.iter()).enumerate();
let iter = src_fields.iter().zip(target_fields).enumerate();
for (i, (src_ty, target_ty)) in iter {
let ll_source = adt::trans_field_ptr(bcx, &repr_source, source.val, 0, i);
let ll_target = adt::trans_field_ptr(bcx, &repr_target, target.val, 0, i);
@ -1547,7 +1547,7 @@ pub fn trans_adt<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
}
// Finally, move scratch field values into actual field locations
for (i, datum) in scratch_vals.into_iter() {
for (i, datum) in scratch_vals {
let dest = adt::trans_field_ptr(bcx, &*repr, addr, discr, i);
bcx = datum.store_to(bcx, dest);
}

View File

@ -454,7 +454,7 @@ fn gate_simd_ffi(tcx: &ty::ctxt, decl: &ast::FnDecl, ty: &ty::BareFnTy) {
}
};
let sig = &ty.sig.0;
for (input, ty) in decl.inputs.iter().zip(sig.inputs.iter()) {
for (input, ty) in decl.inputs.iter().zip(&sig.inputs) {
check(&*input.ty, *ty)
}
if let ast::Return(ref ty) = decl.output {
@ -600,7 +600,7 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
let ps = ccx.tcx().map.with_path(id, |path| {
let abi = Some(ast_map::PathName(special_idents::clownshoe_abi.name));
link::mangle(path.chain(abi.into_iter()), hash)
link::mangle(path.chain(abi), hash)
});
// Compute the type that the function would have if it were just a

View File

@ -106,7 +106,7 @@ fn instantiate_inline(ccx: &CrateContext, fn_id: ast::DefId)
ast::ItemEnum(_, _) => {
let vs_here = ty::enum_variants(ccx.tcx(), local_def(item.id));
let vs_there = ty::enum_variants(ccx.tcx(), parent_id);
for (here, there) in vs_here.iter().zip(vs_there.iter()) {
for (here, there) in vs_here.iter().zip(&*vs_there) {
if there.id == fn_id { my_id = here.id.node; }
ccx.external().borrow_mut().insert(there.id, Some(here.id.node));
}

View File

@ -508,7 +508,7 @@ fn find_implied_output_region(input_tys: &[Ty], input_pats: Vec<String>)
let mut lifetimes_for_params: Vec<(String, usize)> = Vec::new();
let mut possible_implied_output_region = None;
for (input_type, input_pat) in input_tys.iter().zip(input_pats.into_iter()) {
for (input_type, input_pat) in input_tys.iter().zip(input_pats) {
let mut accumulator = Vec::new();
ty::accumulate_lifetimes_in_type(&mut accumulator, *input_type);

View File

@ -250,7 +250,7 @@ pub fn check_pat<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>,
let pat_ty = ty::mk_tup(tcx, element_tys.clone());
fcx.write_ty(pat.id, pat_ty);
demand::eqtype(fcx, pat.span, expected, pat_ty);
for (element_pat, element_ty) in elements.iter().zip(element_tys.into_iter()) {
for (element_pat, element_ty) in elements.iter().zip(element_tys) {
check_pat(pcx, &**element_pat, element_ty);
}
}
@ -697,8 +697,8 @@ pub fn check_pat_enum<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>,
if let Some(subpats) = subpats {
if subpats.len() == arg_tys.len() {
for (subpat, arg_ty) in subpats.iter().zip(arg_tys.iter()) {
check_pat(pcx, &**subpat, *arg_ty);
for (subpat, arg_ty) in subpats.iter().zip(arg_tys) {
check_pat(pcx, &**subpat, arg_ty);
}
} else if arg_tys.is_empty() {
span_err!(tcx.sess, pat.span, E0024,

View File

@ -184,11 +184,11 @@ fn try_overloaded_call_traits<'a,'tcx>(fcx: &FnCtxt<'a, 'tcx>,
-> Option<ty::MethodCallee<'tcx>>
{
// Try the options that are least restrictive on the caller first.
for &(opt_trait_def_id, method_name) in [
for &(opt_trait_def_id, method_name) in &[
(fcx.tcx().lang_items.fn_trait(), token::intern("call")),
(fcx.tcx().lang_items.fn_mut_trait(), token::intern("call_mut")),
(fcx.tcx().lang_items.fn_once_trait(), token::intern("call_once")),
].iter() {
] {
let trait_def_id = match opt_trait_def_id {
Some(def_id) => def_id,
None => continue,
@ -379,7 +379,7 @@ impl<'tcx> DeferredCallResolution<'tcx> for CallResolution<'tcx> {
method_callee.repr(fcx.tcx()));
for (&method_arg_ty, &self_arg_ty) in
method_sig.inputs[1..].iter().zip(self.fn_sig.inputs.iter())
method_sig.inputs[1..].iter().zip(&self.fn_sig.inputs)
{
demand::eqtype(fcx, self.call_expr.span, self_arg_ty, method_arg_ty);
}

View File

@ -475,7 +475,7 @@ fn iterate_over_potentially_unsafe_regions_in_type<'a, 'tcx>(
let fields =
ty::lookup_struct_fields(rcx.tcx(), struct_did);
for field in fields.iter() {
for field in &fields {
let field_type =
ty::lookup_field_type(rcx.tcx(),
struct_did,
@ -507,7 +507,7 @@ fn iterate_over_potentially_unsafe_regions_in_type<'a, 'tcx>(
ty::substd_enum_variants(rcx.tcx(),
enum_did,
substs);
for variant_info in all_variant_info.iter() {
for variant_info in &all_variant_info {
for (i, arg_type) in variant_info.args.iter().enumerate() {
try!(iterate_over_potentially_unsafe_regions_in_type(
rcx,

View File

@ -482,7 +482,7 @@ pub fn check_item_types(ccx: &CrateCtxt) {
ccx.tcx.sess.abort_if_errors();
for drop_method_did in ccx.tcx.destructors.borrow().iter() {
for drop_method_did in &*ccx.tcx.destructors.borrow() {
if drop_method_did.krate == ast::LOCAL_CRATE {
let drop_impl_did = ccx.tcx.map.get_parent_did(drop_method_did.node);
match dropck::check_drop_impl(ccx.tcx, drop_impl_did) {
@ -679,7 +679,7 @@ fn check_fn<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
let mut visit = GatherLocalsVisitor { fcx: &fcx, };
// Add formal parameters.
for (arg_ty, input) in arg_tys.iter().zip(decl.inputs.iter()) {
for (arg_ty, input) in arg_tys.iter().zip(&decl.inputs) {
// Create type variables for each argument.
pat_util::pat_bindings(
&tcx.def_map,
@ -706,8 +706,8 @@ fn check_fn<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
ty::FnDiverging => NoExpectation
});
for (input, arg) in decl.inputs.iter().zip(arg_tys.iter()) {
fcx.write_ty(input.id, *arg);
for (input, arg) in decl.inputs.iter().zip(arg_tys) {
fcx.write_ty(input.id, arg);
}
fcx
@ -1753,7 +1753,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
substs: &Substs<'tcx>,
expr: &ast::Expr)
{
for &ty in substs.types.iter() {
for &ty in &substs.types {
let default_bound = ty::ReScope(CodeExtent::from_node_id(expr.id));
let cause = traits::ObligationCause::new(expr.span, self.body_id,
traits::MiscObligation);
@ -4292,7 +4292,7 @@ pub fn check_enum_variants<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
// we need not check for that.
let variants = ty::enum_variants(ccx.tcx, def_id);
for (v, variant) in vs.iter().zip(variants.iter()) {
for (v, variant) in vs.iter().zip(&*variants) {
let current_disr_val = variant.disr_val;
// Check for duplicate discriminant values
@ -4563,7 +4563,7 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
// provided (if any) into their appropriate spaces. We'll also report
// errors if type parameters are provided in an inappropriate place.
let mut substs = Substs::empty();
for (opt_space, segment) in segment_spaces.iter().zip(segments.iter()) {
for (opt_space, segment) in segment_spaces.iter().zip(segments) {
match *opt_space {
None => {
check_path_args(fcx.tcx(), slice::ref_slice(segment),

View File

@ -637,7 +637,7 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) {
// outlive the operation you are performing.
let lhs_ty = rcx.resolve_expr_type_adjusted(&**lhs);
let rhs_ty = rcx.resolve_expr_type_adjusted(&**rhs);
for &ty in [lhs_ty, rhs_ty].iter() {
for &ty in &[lhs_ty, rhs_ty] {
type_must_outlive(rcx,
infer::Operand(expr.span),
ty,

View File

@ -235,7 +235,7 @@ impl<'a,'tcx> AdjustBorrowKind<'a,'tcx> {
if self.closures_with_inferred_kinds.contains(&id) {
let mut deferred_call_resolutions =
self.fcx.remove_deferred_call_resolutions(closure_def_id);
for deferred_call_resolution in deferred_call_resolutions.iter_mut() {
for deferred_call_resolution in &mut deferred_call_resolutions {
deferred_call_resolution.resolve(self.fcx);
}
}

View File

@ -375,7 +375,7 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> {
fn reject_non_type_param_bounds<'tcx>(tcx: &ty::ctxt<'tcx>,
span: Span,
predicates: &ty::GenericPredicates<'tcx>) {
for predicate in predicates.predicates.iter() {
for predicate in &predicates.predicates {
match predicate {
&ty::Predicate::Trait(ty::Binder(ref tr)) => {
let found_param = tr.input_types().iter()
@ -514,7 +514,7 @@ impl<'cx,'tcx> BoundsChecker<'cx,'tcx> {
traits::ItemObligation(trait_ref.def_id)),
&bounds);
for &ty in trait_ref.substs.types.iter() {
for &ty in &trait_ref.substs.types {
self.check_traits_in_ty(ty);
}
}

View File

@ -260,7 +260,7 @@ impl<'a,'tcx> CrateCtxt<'a,'tcx> {
}
}
for request in cycle[1..].iter() {
for request in &cycle[1..] {
match *request {
AstConvRequest::GetItemTypeScheme(def_id) |
AstConvRequest::GetTraitDef(def_id) => {
@ -443,7 +443,7 @@ impl<'a,'b,'tcx,A,B> GetTypeParameterBounds<'tcx> for (&'a A,&'b B)
-> Vec<ty::Predicate<'tcx>>
{
let mut v = self.0.get_type_parameter_bounds(astconv, span, node_id);
v.extend(self.1.get_type_parameter_bounds(astconv, span, node_id).into_iter());
v.extend(self.1.get_type_parameter_bounds(astconv, span, node_id));
v
}
}
@ -516,8 +516,8 @@ impl<'tcx> GetTypeParameterBounds<'tcx> for ast::Generics {
self.ty_params
.iter()
.filter(|p| p.id == node_id)
.flat_map(|p| p.bounds.iter())
.flat_map(|b| predicates_from_bound(astconv, ty, b).into_iter());
.flat_map(|p| &*p.bounds)
.flat_map(|b| predicates_from_bound(astconv, ty, b));
let from_where_clauses =
self.where_clause
@ -528,8 +528,8 @@ impl<'tcx> GetTypeParameterBounds<'tcx> for ast::Generics {
_ => None
})
.filter(|bp| is_param(astconv.tcx(), &bp.bounded_ty, node_id))
.flat_map(|bp| bp.bounds.iter())
.flat_map(|b| predicates_from_bound(astconv, ty, b).into_iter());
.flat_map(|bp| &*bp.bounds)
.flat_map(|b| predicates_from_bound(astconv, ty, b));
from_ty_params.chain(from_where_clauses).collect()
}
@ -1188,7 +1188,7 @@ fn ensure_super_predicates_step(ccx: &CrateCtxt,
let superbounds2 = generics.get_type_parameter_bounds(&ccx.icx(scope), item.span, item.id);
// Combine the two lists to form the complete set of superbounds:
let superbounds = superbounds1.into_iter().chain(superbounds2.into_iter()).collect();
let superbounds = superbounds1.into_iter().chain(superbounds2).collect();
let superpredicates = ty::GenericPredicates {
predicates: VecPerParamSpace::new(superbounds, vec![], vec![])
};
@ -1928,7 +1928,7 @@ fn compute_object_lifetime_default<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
let inline_bounds = from_bounds(ccx, param_bounds);
let where_bounds = from_predicates(ccx, param_id, &where_clause.predicates);
let all_bounds: HashSet<_> = inline_bounds.into_iter()
.chain(where_bounds.into_iter())
.chain(where_bounds)
.collect();
return if all_bounds.len() > 1 {
Some(ty::ObjectLifetimeDefault::Ambiguous)
@ -2026,7 +2026,7 @@ fn predicates_from_bound<'tcx>(astconv: &AstConv<'tcx>,
let pred = conv_poly_trait_ref(astconv, param_ty, tr, &mut projections);
projections.into_iter()
.map(|p| p.as_predicate())
.chain(Some(pred.as_predicate()).into_iter())
.chain(Some(pred.as_predicate()))
.collect()
}
ast::RegionTyParamBound(ref lifetime) => {
@ -2096,7 +2096,7 @@ fn compute_type_scheme_of_foreign_fn_decl<'a, 'tcx>(
abi: abi::Abi)
-> ty::TypeScheme<'tcx>
{
for i in decl.inputs.iter() {
for i in &decl.inputs {
match (*i).pat.node {
ast::PatIdent(_, _, _) => (),
ast::PatWild(ast::PatWildSingle) => (),
@ -2300,7 +2300,7 @@ fn enforce_impl_params_are_constrained<'tcx>(tcx: &ty::ctxt<'tcx>,
ty::TypeTraitItem(ref assoc_ty) => assoc_ty.ty,
ty::ConstTraitItem(..) | ty::MethodTraitItem(..) => None
})
.flat_map(|ty| ctp::parameters_for_type(ty).into_iter())
.flat_map(|ty| ctp::parameters_for_type(ty))
.filter_map(|p| match p {
ctp::Parameter::Type(_) => None,
ctp::Parameter::Region(r) => Some(r),

View File

@ -21,7 +21,7 @@ pub enum Parameter {
pub fn parameters_for_type<'tcx>(ty: Ty<'tcx>) -> Vec<Parameter> {
ty.walk()
.flat_map(|ty| parameters_for_type_shallow(ty).into_iter())
.flat_map(|ty| parameters_for_type_shallow(ty))
.collect()
}
@ -31,7 +31,7 @@ pub fn parameters_for_trait_ref<'tcx>(trait_ref: &ty::TraitRef<'tcx>) -> Vec<Par
let type_parameters =
trait_ref.substs.types.iter()
.flat_map(|ty| parameters_for_type(ty).into_iter());
.flat_map(|ty| parameters_for_type(ty));
region_parameters.extend(type_parameters);

View File

@ -83,17 +83,17 @@ fn try_inline_def(cx: &DocContext, tcx: &ty::ctxt,
}
def::DefStruct(did) => {
record_extern_fqn(cx, did, clean::TypeStruct);
ret.extend(build_impls(cx, tcx, did).into_iter());
ret.extend(build_impls(cx, tcx, did));
clean::StructItem(build_struct(cx, tcx, did))
}
def::DefTy(did, false) => {
record_extern_fqn(cx, did, clean::TypeTypedef);
ret.extend(build_impls(cx, tcx, did).into_iter());
ret.extend(build_impls(cx, tcx, did));
build_type(cx, tcx, did)
}
def::DefTy(did, true) => {
record_extern_fqn(cx, did, clean::TypeEnum);
ret.extend(build_impls(cx, tcx, did).into_iter());
ret.extend(build_impls(cx, tcx, did));
build_type(cx, tcx, did)
}
// Assume that the enum type is reexported next to the variant, and
@ -228,7 +228,7 @@ pub fn build_impls(cx: &DocContext, tcx: &ty::ctxt,
match tcx.inherent_impls.borrow().get(&did) {
None => {}
Some(i) => {
for &did in i.iter() {
for &did in &**i {
build_impl(cx, tcx, did, &mut impls);
}
}
@ -451,7 +451,7 @@ fn build_module(cx: &DocContext, tcx: &ty::ctxt,
decoder::DlDef(def) if vis == ast::Public => {
if !visited.insert(def) { return }
match try_inline_def(cx, tcx, def) {
Some(i) => items.extend(i.into_iter()),
Some(i) => items.extend(i),
None => {}
}
}

View File

@ -190,7 +190,7 @@ impl<'a, 'tcx> Clean<Crate> for visit_ast::RustdocVisitor<'a, 'tcx> {
inner: PrimitiveItem(prim),
});
}
m.items.extend(tmp.into_iter());
m.items.extend(tmp);
}
let src = match cx.input {
@ -382,17 +382,17 @@ impl Clean<Item> for doctree::Module {
let mut items: Vec<Item> = vec![];
items.extend(self.extern_crates.iter().map(|x| x.clean(cx)));
items.extend(self.imports.iter().flat_map(|x| x.clean(cx).into_iter()));
items.extend(self.imports.iter().flat_map(|x| x.clean(cx)));
items.extend(self.structs.iter().map(|x| x.clean(cx)));
items.extend(self.enums.iter().map(|x| x.clean(cx)));
items.extend(self.fns.iter().map(|x| x.clean(cx)));
items.extend(self.foreigns.iter().flat_map(|x| x.clean(cx).into_iter()));
items.extend(self.foreigns.iter().flat_map(|x| x.clean(cx)));
items.extend(self.mods.iter().map(|x| x.clean(cx)));
items.extend(self.typedefs.iter().map(|x| x.clean(cx)));
items.extend(self.statics.iter().map(|x| x.clean(cx)));
items.extend(self.constants.iter().map(|x| x.clean(cx)));
items.extend(self.traits.iter().map(|x| x.clean(cx)));
items.extend(self.impls.iter().flat_map(|x| x.clean(cx).into_iter()));
items.extend(self.impls.iter().flat_map(|x| x.clean(cx)));
items.extend(self.macros.iter().map(|x| x.clean(cx)));
items.extend(self.def_traits.iter().map(|x| x.clean(cx)));
@ -1884,7 +1884,7 @@ impl<'tcx> Clean<Item> for ty::VariantInfo<'tcx> {
StructVariant(VariantStruct {
struct_type: doctree::Plain,
fields_stripped: false,
fields: s.iter().zip(self.args.iter()).map(|(name, ty)| {
fields: s.iter().zip(&self.args).map(|(name, ty)| {
Item {
source: Span::empty(),
name: Some(name.clean(cx)),
@ -2375,7 +2375,7 @@ impl Clean<Vec<Item>> for doctree::Import {
for path in list {
match inline::try_inline(cx, path.node.id(), None) {
Some(items) => {
ret.extend(items.into_iter());
ret.extend(items);
}
None => {
remaining.push(path.clean(cx));

View File

@ -139,7 +139,7 @@ pub fn where_clauses(cx: &DocContext, clauses: Vec<WP>) -> Vec<WP> {
}
pub fn ty_params(mut params: Vec<clean::TyParam>) -> Vec<clean::TyParam> {
for param in params.iter_mut() {
for param in &mut params {
param.bounds = ty_bounds(mem::replace(&mut param.bounds, Vec::new()));
}
return params;
@ -165,7 +165,7 @@ fn trait_is_same_or_supertrait(cx: &DocContext, child: ast::DefId,
} if *s == "Self" => Some(bounds),
_ => None,
}
}).flat_map(|bounds| bounds.iter()).any(|bound| {
}).flat_map(|bounds| bounds).any(|bound| {
let poly_trait = match *bound {
clean::TraitBound(ref t, _) => t,
_ => return false,

View File

@ -2414,7 +2414,7 @@ fn render_impl(w: &mut fmt::Formatter, i: &Impl, link: AssocItemLink,
}
try!(write!(w, "<div class='impl-items'>"));
for trait_item in i.impl_.items.iter() {
for trait_item in &i.impl_.items {
try!(doctraititem(w, trait_item, link, render_header));
}

View File

@ -75,7 +75,7 @@ pub fn run(input: &str,
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let mut cfg = config::build_configuration(&sess);
cfg.extend(config::parse_cfgspecs(cfgs).into_iter());
cfg.extend(config::parse_cfgspecs(cfgs));
let krate = driver::phase_1_parse_input(&sess, cfg, &input);
let krate = driver::phase_2_configure_and_expand(&sess, krate,
"rustdoc-test", None)
@ -129,12 +129,13 @@ fn scrape_test_config(krate: &::syntax::ast::Crate) -> TestOptions {
attrs: Vec::new(),
};
let attrs = krate.attrs.iter().filter(|a| a.check_name("doc"))
let attrs = krate.attrs.iter()
.filter(|a| a.check_name("doc"))
.filter_map(|a| a.meta_item_list())
.flat_map(|l| l.iter())
.flat_map(|l| l)
.filter(|a| a.check_name("test"))
.filter_map(|a| a.meta_item_list())
.flat_map(|l| l.iter());
.flat_map(|l| l);
for attr in attrs {
if attr.check_name("no_crate_inject") {
opts.no_crate_inject = true;
@ -239,7 +240,7 @@ fn runtest(test: &str, cratename: &str, libs: SearchPaths,
let path = env::var_os(var).unwrap_or(OsString::new());
let mut path = env::split_paths(&path).collect::<Vec<_>>();
path.insert(0, libdir.clone());
env::join_paths(path.iter()).unwrap()
env::join_paths(path).unwrap()
};
cmd.env(var, &newpath);

View File

@ -226,7 +226,7 @@ impl AsciiExt for [u8] {
#[inline]
fn eq_ignore_ascii_case(&self, other: &[u8]) -> bool {
self.len() == other.len() &&
self.iter().zip(other.iter()).all(|(a, b)| {
self.iter().zip(other).all(|(a, b)| {
a.eq_ignore_ascii_case(b)
})
}

View File

@ -906,7 +906,7 @@ impl<K, V, S> HashMap<K, V, S>
/// *val *= 2;
/// }
///
/// for (key, val) in map.iter() {
/// for (key, val) in &map {
/// println!("key: {} val: {}", key, val);
/// }
/// ```

View File

@ -647,7 +647,7 @@ impl<'a, 'b, T, S> BitOr<&'b HashSet<T, S>> for &'a HashSet<T, S>
///
/// let mut i = 0;
/// let expected = [1, 2, 3, 4, 5];
/// for x in set.iter() {
/// for x in &set {
/// assert!(expected.contains(x));
/// i += 1;
/// }
@ -679,7 +679,7 @@ impl<'a, 'b, T, S> BitAnd<&'b HashSet<T, S>> for &'a HashSet<T, S>
///
/// let mut i = 0;
/// let expected = [2, 3];
/// for x in set.iter() {
/// for x in &set {
/// assert!(expected.contains(x));
/// i += 1;
/// }
@ -711,7 +711,7 @@ impl<'a, 'b, T, S> BitXor<&'b HashSet<T, S>> for &'a HashSet<T, S>
///
/// let mut i = 0;
/// let expected = [1, 2, 4, 5];
/// for x in set.iter() {
/// for x in &set {
/// assert!(expected.contains(x));
/// i += 1;
/// }
@ -743,7 +743,7 @@ impl<'a, 'b, T, S> Sub<&'b HashSet<T, S>> for &'a HashSet<T, S>
///
/// let mut i = 0;
/// let expected = [1, 2];
/// for x in set.iter() {
/// for x in &set {
/// assert!(expected.contains(x));
/// i += 1;
/// }
@ -838,7 +838,7 @@ impl<T, S> IntoIterator for HashSet<T, S>
/// let v: Vec<String> = set.into_iter().collect();
///
/// // Will print in an arbitrary order.
/// for x in v.iter() {
/// for x in &v {
/// println!("{}", x);
/// }
/// ```

View File

@ -252,6 +252,7 @@
//! contents by-value. This is great when the collection itself is no longer
//! needed, and the values are needed elsewhere. Using `extend` with `into_iter`
//! is the main way that contents of one collection are moved into another.
//! `extend` automatically calls `into_iter`, and takes any `T: IntoIterator`.
//! Calling `collect` on an iterator itself is also a great way to convert one
//! collection into another. Both of these methods should internally use the
//! capacity management tools discussed in the previous section to do this as
@ -260,7 +261,7 @@
//! ```
//! let mut vec1 = vec![1, 2, 3, 4];
//! let vec2 = vec![10, 20, 30, 40];
//! vec1.extend(vec2.into_iter());
//! vec1.extend(vec2);
//! ```
//!
//! ```
@ -339,7 +340,7 @@
//! assert_eq!(count.get(&'s'), Some(&8));
//!
//! println!("Number of occurrences of each character");
//! for (char, count) in count.iter() {
//! for (char, count) in &count {
//! println!("{}: {}", char, count);
//! }
//! ```
@ -362,7 +363,7 @@
//! // Our clients.
//! let mut blood_alcohol = BTreeMap::new();
//!
//! for id in orders.into_iter() {
//! for id in orders {
//! // If this is the first time we've seen this customer, initialize them
//! // with no blood alcohol. Otherwise, just retrieve them.
//! let person = blood_alcohol.entry(id).or_insert(Person{id: id, blood_alcohol: 0.0});

View File

@ -294,7 +294,7 @@ mod dl {
let result = match filename {
Some(filename) => {
let filename_str: Vec<_> =
filename.encode_wide().chain(Some(0).into_iter()).collect();
filename.encode_wide().chain(Some(0)).collect();
let result = unsafe {
LoadLibraryW(filename_str.as_ptr() as *const libc::c_void)
};

View File

@ -365,7 +365,7 @@ pub struct JoinPathsError {
/// if let Some(path) = env::var_os("PATH") {
/// let mut paths = env::split_paths(&path).collect::<Vec<_>>();
/// paths.push(PathBuf::from("/home/xyz/bin"));
/// let new_path = env::join_paths(paths.iter()).unwrap();
/// let new_path = env::join_paths(paths).unwrap();
/// env::set_var("PATH", &new_path);
/// }
/// ```

View File

@ -78,7 +78,7 @@ pub fn repeat(byte: u8) -> Repeat { Repeat { byte: byte } }
#[stable(feature = "rust1", since = "1.0.0")]
impl Read for Repeat {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
for slot in buf.iter_mut() {
for slot in &mut *buf {
*slot = self.byte;
}
Ok(buf.len())

View File

@ -63,7 +63,7 @@ impl<'a> Parser<'a> {
// Return result of first successful parser
fn read_or<T>(&mut self, parsers: &mut [Box<FnMut(&mut Parser) -> Option<T> + 'static>])
-> Option<T> {
for pf in parsers.iter_mut() {
for pf in parsers {
match self.read_atomically(|p: &mut Parser| pf(p)) {
Some(r) => return Some(r),
None => {}

View File

@ -368,7 +368,7 @@ impl fmt::Debug for File {
}
pub fn to_utf16(s: &Path) -> Vec<u16> {
s.as_os_str().encode_wide().chain(Some(0).into_iter()).collect()
s.as_os_str().encode_wide().chain(Some(0)).collect()
}
impl FileAttr {

View File

@ -615,7 +615,7 @@ pub fn path_name_eq(a : &ast::Path, b : &ast::Path) -> bool {
// are two arrays of segments equal when compared unhygienically?
pub fn segments_name_eq(a : &[ast::PathSegment], b : &[ast::PathSegment]) -> bool {
a.len() == b.len() &&
a.iter().zip(b.iter()).all(|(s, t)| {
a.iter().zip(b).all(|(s, t)| {
s.identifier.name == t.identifier.name &&
// FIXME #7743: ident -> name problems in lifetime comparison?
// can types contain idents?

View File

@ -414,7 +414,7 @@ fn find_stability_generic<'a,
let mut feature = None;
let mut since = None;
let mut reason = None;
for meta in metas.iter() {
for meta in metas {
if meta.name() == "feature" {
match meta.value_str() {
Some(v) => feature = Some(v),

View File

@ -595,7 +595,7 @@ fn highlight_lines(err: &mut EmitterWriter,
let display_line_strings = &line_strings[..display_lines];
// Print the offending lines
for (line_info, line) in display_line_infos.iter().zip(display_line_strings.iter()) {
for (line_info, line) in display_line_infos.iter().zip(display_line_strings) {
try!(write!(&mut err.dst, "{}:{} {}\n",
fm.name,
line_info.line_index + 1,

View File

@ -549,10 +549,10 @@ impl<'a> TraitDef<'a> {
.map(|ty_param| ty_param.ident.name)
.collect();
for field_ty in field_tys.into_iter() {
for field_ty in field_tys {
let tys = find_type_parameters(&*field_ty, &ty_param_names);
for ty in tys.into_iter() {
for ty in tys {
let mut bounds: Vec<_> = self.additional_bounds.iter().map(|p| {
cx.typarambound(p.to_path(cx, self.span, type_ident, generics))
}).collect();
@ -672,7 +672,7 @@ impl<'a> TraitDef<'a> {
generics: &Generics) -> P<ast::Item> {
let mut field_tys = Vec::new();
for variant in enum_def.variants.iter() {
for variant in &enum_def.variants {
match variant.node.kind {
ast::VariantKind::TupleVariantKind(ref args) => {
field_tys.extend(args.iter()
@ -967,7 +967,7 @@ impl<'a> MethodDef<'a> {
// make a series of nested matches, to destructure the
// structs. This is actually right-to-left, but it shouldn't
// matter.
for (arg_expr, pat) in self_args.iter().zip(patterns.iter()) {
for (arg_expr, pat) in self_args.iter().zip(patterns) {
body = cx.expr_match(trait_.span, arg_expr.clone(),
vec!( cx.arm(trait_.span, vec!(pat.clone()), body) ))
}
@ -1226,7 +1226,7 @@ impl<'a> MethodDef<'a> {
let target_type_name =
find_repr_type_name(&cx.parse_sess.span_diagnostic, type_attrs);
for (&ident, self_arg) in vi_idents.iter().zip(self_args.iter()) {
for (&ident, self_arg) in vi_idents.iter().zip(&self_args) {
let path = vec![cx.ident_of_std("core"),
cx.ident_of("intrinsics"),
cx.ident_of("discriminant_value")];
@ -1465,7 +1465,7 @@ impl<'a> TraitDef<'a> {
// struct_type is definitely not Unknown, since struct_def.fields
// must be nonempty to reach here
let pattern = if struct_type == Record {
let field_pats = subpats.into_iter().zip(ident_expr.iter())
let field_pats = subpats.into_iter().zip(&ident_expr)
.map(|(pat, &(_, id, _, _))| {
// id is guaranteed to be Some
codemap::Spanned {

View File

@ -183,7 +183,7 @@ pub fn expand_expr(e: P<ast::Expr>, fld: &mut MacroExpander) -> P<ast::Expr> {
let mut arms = Vec::with_capacity(else_if_arms.len() + 2);
arms.push(pat_arm);
arms.extend(else_if_arms.into_iter());
arms.extend(else_if_arms);
arms.push(else_arm);
let match_expr = fld.cx.expr(span,
@ -779,7 +779,7 @@ fn expand_non_macro_stmt(Spanned {node, span: stmt_span}: Stmt, fld: &mut MacroE
};
// add them to the existing pending renames:
fld.cx.syntax_env.info().pending_renames
.extend(new_pending_renames.into_iter());
.extend(new_pending_renames);
Local {
id: id,
ty: expanded_ty,

View File

@ -49,7 +49,7 @@ pub mod rt {
impl<T: ToTokens> ToTokens for Vec<T> {
fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
self.iter().flat_map(|t| t.to_tokens(cx).into_iter()).collect()
self.iter().flat_map(|t| t.to_tokens(cx)).collect()
}
}
@ -387,7 +387,7 @@ pub fn expand_quote_matcher(cx: &mut ExtCtxt,
-> Box<base::MacResult+'static> {
let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
let mut vector = mk_stmts_let(cx, sp);
vector.extend(statements_mk_tts(cx, &tts[..], true).into_iter());
vector.extend(statements_mk_tts(cx, &tts[..], true));
let block = cx.expr_block(
cx.block_all(sp,
vector,
@ -593,7 +593,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
fn statements_mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree, matcher: bool) -> Vec<P<ast::Stmt>> {
match *tt {
ast::TtToken(sp, SubstNt(ident, _)) => {
// tt.extend($ident.to_tokens(ext_cx).into_iter())
// tt.extend($ident.to_tokens(ext_cx))
let e_to_toks =
cx.expr_method_call(sp,
@ -633,8 +633,8 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree, matcher: bool) -> Vec<P<a
ast::TtDelimited(_, ref delimed) => {
statements_mk_tt(cx, &delimed.open_tt(), matcher).into_iter()
.chain(delimed.tts.iter()
.flat_map(|tt| statements_mk_tt(cx, tt, matcher).into_iter()))
.chain(statements_mk_tt(cx, &delimed.close_tt(), matcher).into_iter())
.flat_map(|tt| statements_mk_tt(cx, tt, matcher)))
.chain(statements_mk_tt(cx, &delimed.close_tt(), matcher))
.collect()
},
ast::TtSequence(sp, ref seq) => {
@ -646,7 +646,7 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree, matcher: bool) -> Vec<P<a
let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp));
let mut tts_stmts = vec![stmt_let_tt];
tts_stmts.extend(statements_mk_tts(cx, &seq.tts[..], matcher).into_iter());
tts_stmts.extend(statements_mk_tts(cx, &seq.tts[..], matcher));
let e_tts = cx.expr_block(cx.block(sp, tts_stmts,
Some(cx.expr_ident(sp, id_ext("tt")))));
let e_separator = match seq.separator {
@ -748,7 +748,7 @@ fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec<P<ast::Stmt>> {
fn statements_mk_tts(cx: &ExtCtxt, tts: &[ast::TokenTree], matcher: bool) -> Vec<P<ast::Stmt>> {
let mut ss = Vec::new();
for tt in tts {
ss.extend(statements_mk_tt(cx, tt, matcher).into_iter());
ss.extend(statements_mk_tt(cx, tt, matcher));
}
ss
}
@ -758,7 +758,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
let mut vector = mk_stmts_let(cx, sp);
vector.extend(statements_mk_tts(cx, &tts[..], false).into_iter());
vector.extend(statements_mk_tts(cx, &tts[..], false));
let block = cx.expr_block(
cx.block_all(sp,
vector,
@ -780,7 +780,7 @@ fn expand_wrapper(cx: &ExtCtxt,
// make item: `use ...;`
let path = path.iter().map(|s| s.to_string()).collect();
cx.stmt_item(sp, cx.item_use_glob(sp, ast::Inherited, ids_ext(path)))
}).chain(Some(stmt_let_ext_cx).into_iter()).collect();
}).chain(Some(stmt_let_ext_cx)).collect();
cx.expr_block(cx.block_all(sp, stmts, Some(expr)))
}

View File

@ -385,7 +385,7 @@ impl<'a> Context<'a> {
return;
}
}
for &(ref n, ref ty) in self.plugin_attributes.iter() {
for &(ref n, ref ty) in self.plugin_attributes {
if &*n == name {
// Plugins can't gate attributes, so we don't check for it
// unlike the code above; we only use this loop to

View File

@ -353,7 +353,7 @@ pub fn noop_fold_view_path<T: Folder>(view_path: P<ViewPath>, fld: &mut T) -> P<
}
pub fn fold_attrs<T: Folder>(attrs: Vec<Attribute>, fld: &mut T) -> Vec<Attribute> {
attrs.into_iter().flat_map(|x| fld.fold_attribute(x).into_iter()).collect()
attrs.into_iter().flat_map(|x| fld.fold_attribute(x)).collect()
}
pub fn noop_fold_arm<T: Folder>(Arm {attrs, pats, guard, body}: Arm, fld: &mut T) -> Arm {

View File

@ -1060,7 +1060,7 @@ impl<'a> Parser<'a> {
};
let all_bounds =
Some(TraitTyParamBound(poly_trait_ref, TraitBoundModifier::None)).into_iter()
.chain(other_bounds.into_vec().into_iter())
.chain(other_bounds.into_vec())
.collect();
Ok(ast::TyPolyTraitRef(all_bounds))
}
@ -2058,7 +2058,7 @@ impl<'a> Parser<'a> {
|p| Ok(try!(p.parse_expr_nopanic()))
));
let mut exprs = vec!(first_expr);
exprs.extend(remaining_exprs.into_iter());
exprs.extend(remaining_exprs);
ex = ExprVec(exprs);
} else {
// Vector with one element.
@ -4423,7 +4423,7 @@ impl<'a> Parser<'a> {
(name, ConstImplItem(typ, expr))
} else {
let (name, inner_attrs, node) = try!(self.parse_impl_method(vis));
attrs.extend(inner_attrs.into_iter());
attrs.extend(inner_attrs);
(name, node)
};
@ -5068,7 +5068,7 @@ impl<'a> Parser<'a> {
let abi = opt_abi.unwrap_or(abi::C);
attrs.extend(self.parse_inner_attributes().into_iter());
attrs.extend(self.parse_inner_attributes());
let mut foreign_items = vec![];
while let Some(item) = try!(self.parse_foreign_item()) {
@ -5244,7 +5244,7 @@ impl<'a> Parser<'a> {
try!(self.bump());
let mut attrs = attrs;
mem::swap(&mut item.attrs, &mut attrs);
item.attrs.extend(attrs.into_iter());
item.attrs.extend(attrs);
return Ok(Some(P(item)));
}
None => {}

Some files were not shown because too many files have changed in this diff Show More