Replace push loops with collect() and extend() where possible

This commit is contained in:
ljedrz 2018-07-26 17:11:10 +02:00
parent a5c2d0fffa
commit 59c8a279da
28 changed files with 101 additions and 150 deletions

View File

@ -92,10 +92,7 @@ pub fn push_exe_path(mut buf: PathBuf, components: &[&str]) -> PathBuf {
file.push_str(".exe");
}
for c in components {
buf.push(c);
}
buf.extend(components);
buf.push(file);
buf

View File

@ -567,12 +567,12 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
fn add_returning_edge(&mut self,
_from_expr: &hir::Expr,
from_index: CFGIndex) {
let mut data = CFGEdgeData {
exiting_scopes: vec![],
let data = CFGEdgeData {
exiting_scopes: self.loop_scopes.iter()
.rev()
.map(|&LoopScope { loop_id: id, .. }| id)
.collect()
};
for &LoopScope { loop_id: id, .. } in self.loop_scopes.iter().rev() {
data.exiting_scopes.push(id);
}
self.graph.add_edge(from_index, self.fn_exit, data);
}

View File

@ -151,13 +151,12 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
debug!("process_registered_region_obligations()");
// pull out the region obligations with the given `body_id` (leaving the rest)
let mut my_region_obligations = Vec::with_capacity(self.region_obligations.borrow().len());
{
let my_region_obligations = {
let mut r_o = self.region_obligations.borrow_mut();
for (_, obligation) in r_o.drain_filter(|(ro_body_id, _)| *ro_body_id == body_id) {
my_region_obligations.push(obligation);
}
}
let my_r_o = r_o.drain_filter(|(ro_body_id, _)| *ro_body_id == body_id)
.map(|(_, obligation)| obligation).collect::<Vec<_>>();
my_r_o
};
let outlives = &mut TypeOutlives::new(
self,

View File

@ -367,9 +367,7 @@ impl<'a, 'tcx: 'a> ItemLikeVisitor<'tcx> for CollectPrivateImplItemsVisitor<'a,
// We need only trait impls here, not inherent impls, and only non-exported ones
if let hir::ItemKind::Impl(.., Some(ref trait_ref), _, ref impl_item_refs) = item.node {
if !self.access_levels.is_reachable(item.id) {
for impl_item_ref in impl_item_refs {
self.worklist.push(impl_item_ref.id.node_id);
}
self.worklist.extend(impl_item_refs.iter().map(|r| r.id.node_id));
let trait_def_id = match trait_ref.path.def {
Def::Trait(def_id) => def_id,
@ -426,9 +424,7 @@ fn reachable_set<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) ->
// If other crates link to us, they're going to expect to be able to
// use the lang items, so we need to be sure to mark them as
// exported.
for (id, _) in &access_levels.map {
reachable_context.worklist.push(*id);
}
reachable_context.worklist.extend(access_levels.map.iter().map(|(id, _)| *id));
for item in tcx.lang_items().items().iter() {
if let Some(did) = *item {
if let Some(node_id) = tcx.hir.as_local_node_id(did) {

View File

@ -64,9 +64,7 @@ impl<'a, 'tcx> Iterator for Preorder<'a, 'tcx> {
let data = &self.mir[idx];
if let Some(ref term) = data.terminator {
for &succ in term.successors() {
self.worklist.push(succ);
}
self.worklist.extend(term.successors());
}
return Some((idx, data));

View File

@ -899,9 +899,7 @@ macro_rules! options {
-> bool {
match v {
Some(s) => {
for s in s.split_whitespace() {
slot.push(s.to_string());
}
slot.extend(s.split_whitespace().map(|s| s.to_string()));
true
},
None => false,

View File

@ -438,9 +438,9 @@ fn to_pretty_impl_header(tcx: TyCtxt, impl_def_id: DefId) -> Option<String> {
}
pretty_predicates.push(p.to_string());
}
for ty in types_without_default_bounds {
pretty_predicates.push(format!("{}: ?Sized", ty));
}
pretty_predicates.extend(
types_without_default_bounds.iter().map(|ty| format!("{}: ?Sized", ty))
);
if !pretty_predicates.is_empty() {
write!(w, "\n where {}", pretty_predicates.join(", ")).unwrap();
}

View File

@ -152,9 +152,7 @@ fn path_relative_from(path: &Path, base: &Path) -> Option<PathBuf> {
(Some(_), Some(b)) if b == Component::ParentDir => return None,
(Some(a), Some(_)) => {
comps.push(Component::ParentDir);
for _ in itb {
comps.push(Component::ParentDir);
}
comps.extend(itb.map(|_| Component::ParentDir));
comps.push(a);
comps.extend(ita.by_ref());
break;

View File

@ -39,6 +39,7 @@ use rustc::util::common::path2cstr;
use libc::{c_uint, c_longlong};
use std::ffi::CString;
use std::fmt::Write;
use std::iter;
use std::ptr;
use std::path::{Path, PathBuf};
use syntax::ast;
@ -364,18 +365,16 @@ fn subroutine_type_metadata<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>,
&signature,
);
let mut signature_metadata: Vec<DIType> = Vec::with_capacity(signature.inputs().len() + 1);
// return type
signature_metadata.push(match signature.output().sty {
ty::TyTuple(ref tys) if tys.is_empty() => ptr::null_mut(),
_ => type_metadata(cx, signature.output(), span)
});
// regular arguments
for &argument_type in signature.inputs() {
signature_metadata.push(type_metadata(cx, argument_type, span));
}
let signature_metadata: Vec<DIType> = iter::once(
// return type
match signature.output().sty {
ty::TyTuple(ref tys) if tys.is_empty() => ptr::null_mut(),
_ => type_metadata(cx, signature.output(), span)
}
).chain(
// regular arguments
signature.inputs().iter().map(|argument_type| type_metadata(cx, argument_type, span))
).collect();
return_if_metadata_created_in_meantime!(cx, unique_type_id);

View File

@ -352,9 +352,10 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>,
if sig.abi == Abi::RustCall && !sig.inputs().is_empty() {
if let ty::TyTuple(args) = sig.inputs()[sig.inputs().len() - 1].sty {
for &argument_type in args {
signature.push(type_metadata(cx, argument_type, syntax_pos::DUMMY_SP));
}
signature.extend(
args.iter().map(|argument_type|
type_metadata(cx, argument_type, syntax_pos::DUMMY_SP))
);
}
}

View File

@ -1588,10 +1588,7 @@ pub fn in_rustc_thread<F, R>(f: F) -> Result<R, Box<dyn Any + Send>>
/// debugging, since some ICEs only happens with non-default compiler flags
/// (and the users don't always report them).
fn extra_compiler_flags() -> Option<(Vec<String>, bool)> {
let mut args = Vec::new();
for arg in env::args_os() {
args.push(arg.to_string_lossy().to_string());
}
let args = env::args_os().map(|arg| arg.to_string_lossy().to_string()).collect::<Vec<_>>();
// Avoid printing help because of empty args. This can suggest the compiler
// itself is not the program root (consider RLS).

View File

@ -204,10 +204,9 @@ pub fn write_counts(count_file: &mut File, counts: &mut HashMap<String,QueryMetr
use rustc::util::common::duration_to_secs_str;
use std::cmp::Reverse;
let mut data = vec![];
for (ref cons, ref qm) in counts.iter() {
data.push((cons.clone(), qm.count.clone(), qm.dur_total.clone(), qm.dur_self.clone()));
};
let mut data = counts.iter().map(|(ref cons, ref qm)|
(cons.clone(), qm.count.clone(), qm.dur_total.clone(), qm.dur_self.clone())
).collect::<Vec<_>>();
data.sort_by_key(|k| Reverse(k.3));
for (cons, count, dur_total, dur_self) in data {
write!(count_file, "{}, {}, {}, {}\n",

View File

@ -275,12 +275,8 @@ pub fn get_param(llfn: ValueRef, index: c_uint) -> ValueRef {
fn get_params(llfn: ValueRef) -> Vec<ValueRef> {
unsafe {
let num_params = LLVMCountParams(llfn);
let mut params = Vec::with_capacity(num_params as usize);
for idx in 0..num_params {
params.push(LLVMGetParam(llfn, idx));
}
params
(0..num_params).map(|idx| LLVMGetParam(llfn, idx)).collect()
}
}

View File

@ -406,10 +406,9 @@ impl<'a, 'tcx> Inliner<'a, 'tcx> {
local_map.push(idx);
}
for p in callee_mir.promoted.iter().cloned() {
let idx = caller_mir.promoted.push(p);
promoted_map.push(idx);
}
promoted_map.extend(
callee_mir.promoted.iter().cloned().map(|p| caller_mir.promoted.push(p))
);
// If the call is something like `a[*i] = f(i)`, where
// `i : &mut usize`, then just duplicating the `a[*i]`

View File

@ -3831,9 +3831,9 @@ impl<'a> Resolver<'a> {
}
// Add primitive types to the mix
if filter_fn(Def::PrimTy(TyBool)) {
for (name, _) in &self.primitive_type_table.primitive_types {
names.push(*name);
}
names.extend(
self.primitive_type_table.primitive_types.iter().map(|(name, _)| name)
)
}
} else {
// Search in module.

View File

@ -1318,14 +1318,13 @@ impl<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> DumpVisitor<'l, 'tcx, 'll, O> {
};
// Make a comma-separated list of names of imported modules.
let mut names = vec![];
let glob_map = &self.save_ctxt.analysis.glob_map;
let glob_map = glob_map.as_ref().unwrap();
if glob_map.contains_key(&id) {
for n in glob_map.get(&id).unwrap() {
names.push(n.to_string());
}
}
let names = if glob_map.contains_key(&id) {
glob_map.get(&id).unwrap().iter().map(|n| n.to_string()).collect()
} else {
Vec::new()
};
let sub_span = self.span.sub_span_of_token(use_tree.span,
token::BinOp(token::Star));

View File

@ -962,19 +962,21 @@ fn generics_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
&["<closure_kind>", "<closure_signature>"][..]
};
for (i, &arg) in dummy_args.iter().enumerate() {
params.push(ty::GenericParamDef {
index: type_start + i as u32,
name: Symbol::intern(arg).as_interned_str(),
def_id,
pure_wrt_drop: false,
kind: ty::GenericParamDefKind::Type {
has_default: false,
object_lifetime_default: rl::Set1::Empty,
synthetic: None,
},
});
}
params.extend(
dummy_args.iter().enumerate().map(|(i, &arg)|
ty::GenericParamDef {
index: type_start + i as u32,
name: Symbol::intern(arg).as_interned_str(),
def_id,
pure_wrt_drop: false,
kind: ty::GenericParamDefKind::Type {
has_default: false,
object_lifetime_default: rl::Set1::Empty,
synthetic: None,
},
}
)
);
tcx.with_freevars(node_id, |fv| {
params.extend(fv.iter().zip((dummy_args.len() as u32)..).map(|(_, i)| {
@ -1651,10 +1653,7 @@ fn explicit_predicates_of<'a, 'tcx>(
&mut projections);
predicates.push(trait_ref.to_predicate());
for projection in &projections {
predicates.push(projection.to_predicate());
}
predicates.extend(projections.iter().map(|p| p.to_predicate()));
}
&hir::GenericBound::Outlives(ref lifetime) => {

View File

@ -203,9 +203,7 @@ pub fn run_core(search_paths: SearchPaths,
intra_link_resolution_failure_name.to_owned(),
missing_docs.to_owned()];
for (lint, _) in &cmd_lints {
whitelisted_lints.push(lint.clone());
}
whitelisted_lints.extend(cmd_lints.iter().map(|(lint, _)| lint).cloned());
let lints = lint::builtin::HardwiredLints.get_lints()
.into_iter()

View File

@ -722,9 +722,7 @@ where R: 'static + Send,
},
_ => continue,
};
for p in value.as_str().split_whitespace() {
sink.push(p.to_string());
}
sink.extend(value.as_str().split_whitespace().map(|p| p.to_string()));
}
if attr.is_word() && name == Some("document_private_items") {

View File

@ -1364,9 +1364,7 @@ impl Stack {
// Used by Parser to insert StackElement::Key elements at the top of the stack.
fn push_key(&mut self, key: string::String) {
self.stack.push(InternalKey(self.str_buffer.len() as u16, key.len() as u16));
for c in key.as_bytes() {
self.str_buffer.push(*c);
}
self.str_buffer.extend(key.as_bytes());
}
// Used by Parser to insert StackElement::Index elements at the top of the stack.
@ -2212,9 +2210,7 @@ impl ::Decoder for Decoder {
};
match o.remove(&"fields".to_string()) {
Some(Json::Array(l)) => {
for field in l.into_iter().rev() {
self.stack.push(field);
}
self.stack.extend(l.into_iter().rev());
},
Some(val) => {
return Err(ExpectedError("Array".to_owned(), format!("{}", val)))
@ -2346,9 +2342,7 @@ impl ::Decoder for Decoder {
{
let array = expect!(self.pop(), Array)?;
let len = array.len();
for v in array.into_iter().rev() {
self.stack.push(v);
}
self.stack.extend(array.into_iter().rev());
f(self, len)
}

View File

@ -13,6 +13,7 @@ use ffi::OsStr;
use os::unix::ffi::OsStrExt;
use fmt;
use io::{self, Error, ErrorKind};
use iter;
use libc::{EXIT_SUCCESS, EXIT_FAILURE};
use path::{Path, PathBuf};
use sys::fd::FileDesc;
@ -296,11 +297,11 @@ impl Command {
t!(callback());
}
let mut args: Vec<[usize; 2]> = Vec::new();
args.push([self.program.as_ptr() as usize, self.program.len()]);
for arg in self.args.iter() {
args.push([arg.as_ptr() as usize, arg.len()]);
}
let args: Vec<[usize; 2]> = iter::once(
[self.program.as_ptr() as usize, self.program.len()]
).chain(
self.args.iter().map(|arg| [arg.as_ptr() as usize, arg.len()])
).collect();
self.env.apply();

View File

@ -487,9 +487,7 @@ fn make_command_line(prog: &OsStr, args: &[OsString]) -> io::Result<Vec<u16>> {
} else {
if x == '"' as u16 {
// Add n+1 backslashes to total 2n+1 before internal '"'.
for _ in 0..(backslashes+1) {
cmd.push('\\' as u16);
}
cmd.extend((0..(backslashes + 1)).map(|_| '\\' as u16));
}
backslashes = 0;
}
@ -498,9 +496,7 @@ fn make_command_line(prog: &OsStr, args: &[OsString]) -> io::Result<Vec<u16>> {
if quote {
// Add n backslashes to total 2n before ending '"'.
for _ in 0..backslashes {
cmd.push('\\' as u16);
}
cmd.extend((0..backslashes).map(|_| '\\' as u16));
cmd.push('"' as u16);
}
Ok(())

View File

@ -500,10 +500,7 @@ impl Pat {
PatKind::Slice(pats, None, _) if pats.len() == 1 =>
pats[0].to_ty().map(TyKind::Slice)?,
PatKind::Tuple(pats, None) => {
let mut tys = Vec::new();
for pat in pats {
tys.push(pat.to_ty()?);
}
let tys = pats.iter().map(|pat| pat.to_ty()).collect::<Option<Vec<_>>>()?;
TyKind::Tup(tys)
}
_ => return None,
@ -949,10 +946,7 @@ impl Expr {
ExprKind::Array(exprs) if exprs.len() == 1 =>
exprs[0].to_ty().map(TyKind::Slice)?,
ExprKind::Tup(exprs) => {
let mut tys = Vec::new();
for expr in exprs {
tys.push(expr.to_ty()?);
}
let tys = exprs.iter().map(|expr| expr.to_ty()).collect::<Option<Vec<_>>>()?;
TyKind::Tup(tys)
}
ExprKind::Binary(binop, lhs, rhs) if binop.node == BinOpKind::Add =>

View File

@ -265,9 +265,7 @@ impl<'a> StringReader<'a> {
m.push(c);
}
_ => {
for c in c.escape_default() {
m.push(c);
}
m.extend(c.escape_default());
}
}
}

View File

@ -131,8 +131,8 @@ fn decodable_substructure(cx: &mut ExtCtxt,
StaticEnum(_, ref fields) => {
let variant = cx.ident_of("i");
let mut arms = Vec::new();
let mut variants = Vec::new();
let mut arms = Vec::with_capacity(fields.len() + 1);
let mut variants = Vec::with_capacity(fields.len());
let rvariant_arg = cx.ident_of("read_enum_variant_arg");
for (i, &(ident, v_span, ref parts)) in fields.iter().enumerate() {

View File

@ -188,6 +188,7 @@ pub use self::StaticFields::*;
pub use self::SubstructureFields::*;
use std::cell::RefCell;
use std::iter;
use std::vec;
use rustc_target::spec::abi::Abi;
@ -558,15 +559,13 @@ impl<'a> TraitDef<'a> {
// type being derived upon
self.additional_bounds.iter().map(|p| {
cx.trait_bound(p.to_path(cx, self.span, type_ident, generics))
}).collect();
// require the current trait
bounds.push(cx.trait_bound(trait_path.clone()));
// also add in any bounds from the declaration
for declared_bound in &param.bounds {
bounds.push((*declared_bound).clone());
}
}).chain(
// require the current trait
iter::once(cx.trait_bound(trait_path.clone()))
).chain(
// also add in any bounds from the declaration
param.bounds.iter().cloned()
).collect();
cx.typaram(self.span, param.ident, vec![], bounds, None)
}

View File

@ -95,9 +95,8 @@ fn hash_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure)
_ => cx.span_bug(trait_span, "impossible substructure in `derive(Hash)`"),
};
for &FieldInfo { ref self_, span, .. } in fields {
stmts.push(call_hash(span, self_.clone()));
}
stmts.extend(fields.iter().map(|FieldInfo { ref self_, span, .. }|
call_hash(*span, self_.clone())));
cx.expr_block(cx.block(trait_span, stmts))
}

View File

@ -406,10 +406,7 @@ impl<'a, 'b> Context<'a, 'b> {
// Map the arguments
for i in 0..args_len {
let ref arg_types = self.arg_types[i];
let mut arg_offsets = Vec::with_capacity(arg_types.len());
for offset in arg_types {
arg_offsets.push(sofar + *offset);
}
let arg_offsets = arg_types.iter().map(|offset| sofar + *offset).collect::<Vec<_>>();
self.arg_index_map.push(arg_offsets);
sofar += self.arg_unique_types[i].len();
}
@ -581,10 +578,12 @@ impl<'a, 'b> Context<'a, 'b> {
/// Actually builds the expression which the format_args! block will be
/// expanded to
fn into_expr(self) -> P<ast::Expr> {
let mut locals = Vec::new();
let mut counts = Vec::new();
let mut pats = Vec::new();
let mut heads = Vec::new();
let mut locals = Vec::with_capacity(
(0..self.args.len()).map(|i| self.arg_unique_types[i].len()).sum()
);
let mut counts = Vec::with_capacity(self.count_args.len());
let mut pats = Vec::with_capacity(self.args.len());
let mut heads = Vec::with_capacity(self.args.len());
let names_pos: Vec<_> = (0..self.args.len())
.map(|i| self.ecx.ident_of(&format!("arg{}", i)).gensym())