remove redundant closures (clippy::redundant_closure)

This commit is contained in:
Matthias Krüger 2020-12-30 04:19:09 +01:00
parent 18cb4ad3b9
commit e2272cdffc
11 changed files with 42 additions and 45 deletions

View File

@ -292,7 +292,7 @@ fn add_unreachable_coverage<'tcx>(
if let Some(non_codegenned_file_name) = tcx.covered_file_name(non_codegenned_def_id) {
let def_ids = unreachable_def_ids_by_file
.entry(*non_codegenned_file_name)
.or_insert_with(|| Vec::new());
.or_insert_with(Vec::new);
def_ids.push(non_codegenned_def_id);
}
}

View File

@ -170,30 +170,30 @@ impl<'tcx> FunctionCoverage<'tcx> {
// `expression_index`s lower than the referencing `Expression`. Therefore, it is
// reasonable to look up the new index of an expression operand while the `new_indexes`
// vector is only complete up to the current `ExpressionIndex`.
let id_to_counter =
|new_indexes: &IndexVec<InjectedExpressionIndex, Option<MappedExpressionIndex>>,
id: ExpressionOperandId| {
if id == ExpressionOperandId::ZERO {
Some(Counter::zero())
} else if id.index() < self.counters.len() {
// Note: Some codegen-injected Counters may be only referenced by `Expression`s,
// and may not have their own `CodeRegion`s,
let index = CounterValueReference::from(id.index());
Some(Counter::counter_value_reference(index))
} else {
let index = self.expression_index(u32::from(id));
self.expressions
.get(index)
.expect("expression id is out of range")
.as_ref()
// If an expression was optimized out, assume it would have produced a count
// of zero. This ensures that expressions dependent on optimized-out
// expressions are still valid.
.map_or(Some(Counter::zero()), |_| {
new_indexes[index].map(|new_index| Counter::expression(new_index))
})
}
};
let id_to_counter = |new_indexes: &IndexVec<
InjectedExpressionIndex,
Option<MappedExpressionIndex>,
>,
id: ExpressionOperandId| {
if id == ExpressionOperandId::ZERO {
Some(Counter::zero())
} else if id.index() < self.counters.len() {
// Note: Some codegen-injected Counters may be only referenced by `Expression`s,
// and may not have their own `CodeRegion`s,
let index = CounterValueReference::from(id.index());
Some(Counter::counter_value_reference(index))
} else {
let index = self.expression_index(u32::from(id));
self.expressions
.get(index)
.expect("expression id is out of range")
.as_ref()
// If an expression was optimized out, assume it would have produced a count
// of zero. This ensures that expressions dependent on optimized-out
// expressions are still valid.
.map_or(Some(Counter::zero()), |_| new_indexes[index].map(Counter::expression))
}
};
for (original_index, expression) in
self.expressions.iter_enumerated().filter_map(|(original_index, entry)| {

View File

@ -198,7 +198,7 @@ impl<E: rustc_serialize::Encoder> Encodable<E> for PackedFingerprint {
impl<D: rustc_serialize::Decoder> Decodable<D> for PackedFingerprint {
#[inline]
fn decode(d: &mut D) -> Result<Self, D::Error> {
Fingerprint::decode(d).map(|f| PackedFingerprint(f))
Fingerprint::decode(d).map(PackedFingerprint)
}
}

View File

@ -92,7 +92,7 @@ crate fn matches_codepattern(a: &str, b: &str) -> bool {
/// Advances the given peekable `Iterator` until it reaches a non-whitespace character.
fn scan_for_non_ws_or_end<I: Iterator<Item = char>>(iter: &mut Peekable<I>) {
while iter.peek().copied().map(|c| rustc_lexer::is_whitespace(c)) == Some(true) {
while iter.peek().copied().map(rustc_lexer::is_whitespace) == Some(true) {
iter.next();
}
}

View File

@ -55,7 +55,7 @@ impl NodeLabels<&'static str> {
fn to_opt_strs(self) -> Vec<Option<&'static str>> {
match self {
UnlabelledNodes(len) => vec![None; len],
AllNodesLabelled(lbls) => lbls.into_iter().map(|l| Some(l)).collect(),
AllNodesLabelled(lbls) => lbls.into_iter().map(Some).collect(),
SomeNodesLabelled(lbls) => lbls.into_iter().collect(),
}
}

View File

@ -1429,22 +1429,21 @@ impl<'tcx> ToPredicate<'tcx> for ConstnessAnd<PolyTraitPredicate<'tcx>> {
impl<'tcx> ToPredicate<'tcx> for PolyRegionOutlivesPredicate<'tcx> {
fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
self.map_bound(|value| PredicateAtom::RegionOutlives(value))
self.map_bound(PredicateAtom::RegionOutlives)
.potentially_quantified(tcx, PredicateKind::ForAll)
}
}
impl<'tcx> ToPredicate<'tcx> for PolyTypeOutlivesPredicate<'tcx> {
fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
self.map_bound(|value| PredicateAtom::TypeOutlives(value))
self.map_bound(PredicateAtom::TypeOutlives)
.potentially_quantified(tcx, PredicateKind::ForAll)
}
}
impl<'tcx> ToPredicate<'tcx> for PolyProjectionPredicate<'tcx> {
fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
self.map_bound(|value| PredicateAtom::Projection(value))
.potentially_quantified(tcx, PredicateKind::ForAll)
self.map_bound(PredicateAtom::Projection).potentially_quantified(tcx, PredicateKind::ForAll)
}
}

View File

@ -130,7 +130,7 @@ const RUSTC_COVERAGE_DEBUG_OPTIONS: &str = "RUSTC_COVERAGE_DEBUG_OPTIONS";
pub(super) fn debug_options<'a>() -> &'a DebugOptions {
static DEBUG_OPTIONS: SyncOnceCell<DebugOptions> = SyncOnceCell::new();
&DEBUG_OPTIONS.get_or_init(|| DebugOptions::from_env())
&DEBUG_OPTIONS.get_or_init(DebugOptions::from_env)
}
/// Parses and maintains coverage-specific debug options captured from the environment variable
@ -430,7 +430,7 @@ impl GraphvizData {
{
bcb_to_coverage_spans_with_counters
.entry(bcb)
.or_insert_with(|| Vec::new())
.or_insert_with(Vec::new)
.push((coverage_span.clone(), counter_kind.clone()));
}
}
@ -456,7 +456,7 @@ impl GraphvizData {
if let Some(bcb_to_dependency_counters) = self.some_bcb_to_dependency_counters.as_mut() {
bcb_to_dependency_counters
.entry(bcb)
.or_insert_with(|| Vec::new())
.or_insert_with(Vec::new)
.push(counter_kind.clone());
}
}
@ -527,8 +527,8 @@ impl UsedExpressions {
pub fn add_expression_operands(&mut self, expression: &CoverageKind) {
if let Some(used_expression_operands) = self.some_used_expression_operands.as_mut() {
if let CoverageKind::Expression { id, lhs, rhs, .. } = *expression {
used_expression_operands.entry(lhs).or_insert_with(|| Vec::new()).push(id);
used_expression_operands.entry(rhs).or_insert_with(|| Vec::new()).push(id);
used_expression_operands.entry(lhs).or_insert_with(Vec::new).push(id);
used_expression_operands.entry(rhs).or_insert_with(Vec::new).push(id);
}
}
}

View File

@ -394,7 +394,7 @@ impl BasicCoverageBlockData {
let operand = counter_kind.as_operand_id();
if let Some(replaced) = self
.edge_from_bcbs
.get_or_insert_with(|| FxHashMap::default())
.get_or_insert_with(FxHashMap::default)
.insert(from_bcb, counter_kind)
{
Error::from_string(format!(

View File

@ -12,7 +12,7 @@ macro_rules! test {
let (lines, multi_byte_chars, non_narrow_chars) =
analyze_source_file($text, BytePos($source_file_start_pos));
let expected_lines: Vec<BytePos> = $lines.into_iter().map(|pos| BytePos(pos)).collect();
let expected_lines: Vec<BytePos> = $lines.into_iter().map(BytePos).collect();
assert_eq!(lines, expected_lines);

View File

@ -4,7 +4,7 @@ use super::*;
fn test_lev_distance() {
use std::char::{from_u32, MAX};
// Test bytelength agnosticity
for c in (0..MAX as u32).filter_map(|i| from_u32(i)).map(|i| i.to_string()) {
for c in (0..MAX as u32).filter_map(from_u32).map(|i| i.to_string()) {
assert_eq!(lev_distance(&c[..], &c[..]), 0);
}

View File

@ -1256,17 +1256,15 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
})
});
let regular_trait_predicates = existential_trait_refs.map(|trait_ref| {
trait_ref.map_bound(|trait_ref| ty::ExistentialPredicate::Trait(trait_ref))
});
let regular_trait_predicates = existential_trait_refs
.map(|trait_ref| trait_ref.map_bound(ty::ExistentialPredicate::Trait));
let auto_trait_predicates = auto_traits.into_iter().map(|trait_ref| {
ty::Binder::dummy(ty::ExistentialPredicate::AutoTrait(trait_ref.trait_ref().def_id()))
});
let mut v = regular_trait_predicates
.chain(auto_trait_predicates)
.chain(
existential_projections
.map(|x| x.map_bound(|x| ty::ExistentialPredicate::Projection(x))),
existential_projections.map(|x| x.map_bound(ty::ExistentialPredicate::Projection)),
)
.collect::<SmallVec<[_; 8]>>();
v.sort_by(|a, b| a.skip_binder().stable_cmp(tcx, &b.skip_binder()));