remove redundant closures (clippy::redundant_closure)

This commit is contained in:
Matthias Krüger 2020-12-30 04:19:09 +01:00
parent 18cb4ad3b9
commit e2272cdffc
11 changed files with 42 additions and 45 deletions

View File

@ -292,7 +292,7 @@ fn add_unreachable_coverage<'tcx>(
if let Some(non_codegenned_file_name) = tcx.covered_file_name(non_codegenned_def_id) { if let Some(non_codegenned_file_name) = tcx.covered_file_name(non_codegenned_def_id) {
let def_ids = unreachable_def_ids_by_file let def_ids = unreachable_def_ids_by_file
.entry(*non_codegenned_file_name) .entry(*non_codegenned_file_name)
.or_insert_with(|| Vec::new()); .or_insert_with(Vec::new);
def_ids.push(non_codegenned_def_id); def_ids.push(non_codegenned_def_id);
} }
} }

View File

@ -170,30 +170,30 @@ impl<'tcx> FunctionCoverage<'tcx> {
// `expression_index`s lower than the referencing `Expression`. Therefore, it is // `expression_index`s lower than the referencing `Expression`. Therefore, it is
// reasonable to look up the new index of an expression operand while the `new_indexes` // reasonable to look up the new index of an expression operand while the `new_indexes`
// vector is only complete up to the current `ExpressionIndex`. // vector is only complete up to the current `ExpressionIndex`.
let id_to_counter = let id_to_counter = |new_indexes: &IndexVec<
|new_indexes: &IndexVec<InjectedExpressionIndex, Option<MappedExpressionIndex>>, InjectedExpressionIndex,
id: ExpressionOperandId| { Option<MappedExpressionIndex>,
if id == ExpressionOperandId::ZERO { >,
Some(Counter::zero()) id: ExpressionOperandId| {
} else if id.index() < self.counters.len() { if id == ExpressionOperandId::ZERO {
// Note: Some codegen-injected Counters may be only referenced by `Expression`s, Some(Counter::zero())
// and may not have their own `CodeRegion`s, } else if id.index() < self.counters.len() {
let index = CounterValueReference::from(id.index()); // Note: Some codegen-injected Counters may be only referenced by `Expression`s,
Some(Counter::counter_value_reference(index)) // and may not have their own `CodeRegion`s,
} else { let index = CounterValueReference::from(id.index());
let index = self.expression_index(u32::from(id)); Some(Counter::counter_value_reference(index))
self.expressions } else {
.get(index) let index = self.expression_index(u32::from(id));
.expect("expression id is out of range") self.expressions
.as_ref() .get(index)
// If an expression was optimized out, assume it would have produced a count .expect("expression id is out of range")
// of zero. This ensures that expressions dependent on optimized-out .as_ref()
// expressions are still valid. // If an expression was optimized out, assume it would have produced a count
.map_or(Some(Counter::zero()), |_| { // of zero. This ensures that expressions dependent on optimized-out
new_indexes[index].map(|new_index| Counter::expression(new_index)) // expressions are still valid.
}) .map_or(Some(Counter::zero()), |_| new_indexes[index].map(Counter::expression))
} }
}; };
for (original_index, expression) in for (original_index, expression) in
self.expressions.iter_enumerated().filter_map(|(original_index, entry)| { self.expressions.iter_enumerated().filter_map(|(original_index, entry)| {

View File

@ -198,7 +198,7 @@ impl<E: rustc_serialize::Encoder> Encodable<E> for PackedFingerprint {
impl<D: rustc_serialize::Decoder> Decodable<D> for PackedFingerprint { impl<D: rustc_serialize::Decoder> Decodable<D> for PackedFingerprint {
#[inline] #[inline]
fn decode(d: &mut D) -> Result<Self, D::Error> { fn decode(d: &mut D) -> Result<Self, D::Error> {
Fingerprint::decode(d).map(|f| PackedFingerprint(f)) Fingerprint::decode(d).map(PackedFingerprint)
} }
} }

View File

@ -92,7 +92,7 @@ crate fn matches_codepattern(a: &str, b: &str) -> bool {
/// Advances the given peekable `Iterator` until it reaches a non-whitespace character. /// Advances the given peekable `Iterator` until it reaches a non-whitespace character.
fn scan_for_non_ws_or_end<I: Iterator<Item = char>>(iter: &mut Peekable<I>) { fn scan_for_non_ws_or_end<I: Iterator<Item = char>>(iter: &mut Peekable<I>) {
while iter.peek().copied().map(|c| rustc_lexer::is_whitespace(c)) == Some(true) { while iter.peek().copied().map(rustc_lexer::is_whitespace) == Some(true) {
iter.next(); iter.next();
} }
} }

View File

@ -55,7 +55,7 @@ impl NodeLabels<&'static str> {
fn to_opt_strs(self) -> Vec<Option<&'static str>> { fn to_opt_strs(self) -> Vec<Option<&'static str>> {
match self { match self {
UnlabelledNodes(len) => vec![None; len], UnlabelledNodes(len) => vec![None; len],
AllNodesLabelled(lbls) => lbls.into_iter().map(|l| Some(l)).collect(), AllNodesLabelled(lbls) => lbls.into_iter().map(Some).collect(),
SomeNodesLabelled(lbls) => lbls.into_iter().collect(), SomeNodesLabelled(lbls) => lbls.into_iter().collect(),
} }
} }

View File

@ -1429,22 +1429,21 @@ impl<'tcx> ToPredicate<'tcx> for ConstnessAnd<PolyTraitPredicate<'tcx>> {
impl<'tcx> ToPredicate<'tcx> for PolyRegionOutlivesPredicate<'tcx> { impl<'tcx> ToPredicate<'tcx> for PolyRegionOutlivesPredicate<'tcx> {
fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> { fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
self.map_bound(|value| PredicateAtom::RegionOutlives(value)) self.map_bound(PredicateAtom::RegionOutlives)
.potentially_quantified(tcx, PredicateKind::ForAll) .potentially_quantified(tcx, PredicateKind::ForAll)
} }
} }
impl<'tcx> ToPredicate<'tcx> for PolyTypeOutlivesPredicate<'tcx> { impl<'tcx> ToPredicate<'tcx> for PolyTypeOutlivesPredicate<'tcx> {
fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> { fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
self.map_bound(|value| PredicateAtom::TypeOutlives(value)) self.map_bound(PredicateAtom::TypeOutlives)
.potentially_quantified(tcx, PredicateKind::ForAll) .potentially_quantified(tcx, PredicateKind::ForAll)
} }
} }
impl<'tcx> ToPredicate<'tcx> for PolyProjectionPredicate<'tcx> { impl<'tcx> ToPredicate<'tcx> for PolyProjectionPredicate<'tcx> {
fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> { fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
self.map_bound(|value| PredicateAtom::Projection(value)) self.map_bound(PredicateAtom::Projection).potentially_quantified(tcx, PredicateKind::ForAll)
.potentially_quantified(tcx, PredicateKind::ForAll)
} }
} }

View File

@ -130,7 +130,7 @@ const RUSTC_COVERAGE_DEBUG_OPTIONS: &str = "RUSTC_COVERAGE_DEBUG_OPTIONS";
pub(super) fn debug_options<'a>() -> &'a DebugOptions { pub(super) fn debug_options<'a>() -> &'a DebugOptions {
static DEBUG_OPTIONS: SyncOnceCell<DebugOptions> = SyncOnceCell::new(); static DEBUG_OPTIONS: SyncOnceCell<DebugOptions> = SyncOnceCell::new();
&DEBUG_OPTIONS.get_or_init(|| DebugOptions::from_env()) &DEBUG_OPTIONS.get_or_init(DebugOptions::from_env)
} }
/// Parses and maintains coverage-specific debug options captured from the environment variable /// Parses and maintains coverage-specific debug options captured from the environment variable
@ -430,7 +430,7 @@ impl GraphvizData {
{ {
bcb_to_coverage_spans_with_counters bcb_to_coverage_spans_with_counters
.entry(bcb) .entry(bcb)
.or_insert_with(|| Vec::new()) .or_insert_with(Vec::new)
.push((coverage_span.clone(), counter_kind.clone())); .push((coverage_span.clone(), counter_kind.clone()));
} }
} }
@ -456,7 +456,7 @@ impl GraphvizData {
if let Some(bcb_to_dependency_counters) = self.some_bcb_to_dependency_counters.as_mut() { if let Some(bcb_to_dependency_counters) = self.some_bcb_to_dependency_counters.as_mut() {
bcb_to_dependency_counters bcb_to_dependency_counters
.entry(bcb) .entry(bcb)
.or_insert_with(|| Vec::new()) .or_insert_with(Vec::new)
.push(counter_kind.clone()); .push(counter_kind.clone());
} }
} }
@ -527,8 +527,8 @@ impl UsedExpressions {
pub fn add_expression_operands(&mut self, expression: &CoverageKind) { pub fn add_expression_operands(&mut self, expression: &CoverageKind) {
if let Some(used_expression_operands) = self.some_used_expression_operands.as_mut() { if let Some(used_expression_operands) = self.some_used_expression_operands.as_mut() {
if let CoverageKind::Expression { id, lhs, rhs, .. } = *expression { if let CoverageKind::Expression { id, lhs, rhs, .. } = *expression {
used_expression_operands.entry(lhs).or_insert_with(|| Vec::new()).push(id); used_expression_operands.entry(lhs).or_insert_with(Vec::new).push(id);
used_expression_operands.entry(rhs).or_insert_with(|| Vec::new()).push(id); used_expression_operands.entry(rhs).or_insert_with(Vec::new).push(id);
} }
} }
} }

View File

@ -394,7 +394,7 @@ impl BasicCoverageBlockData {
let operand = counter_kind.as_operand_id(); let operand = counter_kind.as_operand_id();
if let Some(replaced) = self if let Some(replaced) = self
.edge_from_bcbs .edge_from_bcbs
.get_or_insert_with(|| FxHashMap::default()) .get_or_insert_with(FxHashMap::default)
.insert(from_bcb, counter_kind) .insert(from_bcb, counter_kind)
{ {
Error::from_string(format!( Error::from_string(format!(

View File

@ -12,7 +12,7 @@ macro_rules! test {
let (lines, multi_byte_chars, non_narrow_chars) = let (lines, multi_byte_chars, non_narrow_chars) =
analyze_source_file($text, BytePos($source_file_start_pos)); analyze_source_file($text, BytePos($source_file_start_pos));
let expected_lines: Vec<BytePos> = $lines.into_iter().map(|pos| BytePos(pos)).collect(); let expected_lines: Vec<BytePos> = $lines.into_iter().map(BytePos).collect();
assert_eq!(lines, expected_lines); assert_eq!(lines, expected_lines);

View File

@ -4,7 +4,7 @@ use super::*;
fn test_lev_distance() { fn test_lev_distance() {
use std::char::{from_u32, MAX}; use std::char::{from_u32, MAX};
// Test bytelength agnosticity // Test bytelength agnosticity
for c in (0..MAX as u32).filter_map(|i| from_u32(i)).map(|i| i.to_string()) { for c in (0..MAX as u32).filter_map(from_u32).map(|i| i.to_string()) {
assert_eq!(lev_distance(&c[..], &c[..]), 0); assert_eq!(lev_distance(&c[..], &c[..]), 0);
} }

View File

@ -1256,17 +1256,15 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
}) })
}); });
let regular_trait_predicates = existential_trait_refs.map(|trait_ref| { let regular_trait_predicates = existential_trait_refs
trait_ref.map_bound(|trait_ref| ty::ExistentialPredicate::Trait(trait_ref)) .map(|trait_ref| trait_ref.map_bound(ty::ExistentialPredicate::Trait));
});
let auto_trait_predicates = auto_traits.into_iter().map(|trait_ref| { let auto_trait_predicates = auto_traits.into_iter().map(|trait_ref| {
ty::Binder::dummy(ty::ExistentialPredicate::AutoTrait(trait_ref.trait_ref().def_id())) ty::Binder::dummy(ty::ExistentialPredicate::AutoTrait(trait_ref.trait_ref().def_id()))
}); });
let mut v = regular_trait_predicates let mut v = regular_trait_predicates
.chain(auto_trait_predicates) .chain(auto_trait_predicates)
.chain( .chain(
existential_projections existential_projections.map(|x| x.map_bound(ty::ExistentialPredicate::Projection)),
.map(|x| x.map_bound(|x| ty::ExistentialPredicate::Projection(x))),
) )
.collect::<SmallVec<[_; 8]>>(); .collect::<SmallVec<[_; 8]>>();
v.sort_by(|a, b| a.skip_binder().stable_cmp(tcx, &b.skip_binder())); v.sort_by(|a, b| a.skip_binder().stable_cmp(tcx, &b.skip_binder()));