Rollup merge of #70254 - matthiaskrgr:cl4ppy, r=Centril

couple more clippy fixes (let_and_return, if_same_then_else)

* summarize if-else-code with identical blocks (clippy::if_same_then_else)
* don't create variable bindings just to return the bound value immediately (clippy::let_and_return)
This commit is contained in:
Dylan DPC 2020-03-22 15:48:41 +01:00 committed by GitHub
commit 8fe8bad96b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 34 additions and 67 deletions

View File

@ -145,8 +145,7 @@ mod hack {
unsafe { unsafe {
let len = b.len(); let len = b.len();
let b = Box::into_raw(b); let b = Box::into_raw(b);
let xs = Vec::from_raw_parts(b as *mut T, len, len); Vec::from_raw_parts(b as *mut T, len, len)
xs
} }
} }

View File

@ -1044,9 +1044,7 @@ pub(super) fn index_hir<'tcx>(tcx: TyCtxt<'tcx>, cnum: CrateNum) -> &'tcx Indexe
collector.finalize_and_compute_crate_hash(crate_disambiguator, &*tcx.cstore, cmdline_args) collector.finalize_and_compute_crate_hash(crate_disambiguator, &*tcx.cstore, cmdline_args)
}; };
let map = tcx.arena.alloc(IndexedHir { crate_hash, map }); tcx.arena.alloc(IndexedHir { crate_hash, map })
map
} }
/// Identical to the `PpAnn` implementation for `hir::Crate`, /// Identical to the `PpAnn` implementation for `hir::Crate`,

View File

@ -81,9 +81,7 @@ fn get_rpaths(config: &mut RPathConfig<'_>, libs: &[PathBuf]) -> Vec<String> {
rpaths.extend_from_slice(&fallback_rpaths); rpaths.extend_from_slice(&fallback_rpaths);
// Remove duplicates // Remove duplicates
let rpaths = minimize_rpaths(&rpaths); minimize_rpaths(&rpaths)
rpaths
} }
fn get_rpaths_relative_to_output(config: &mut RPathConfig<'_>, libs: &[PathBuf]) -> Vec<String> { fn get_rpaths_relative_to_output(config: &mut RPathConfig<'_>, libs: &[PathBuf]) -> Vec<String> {

View File

@ -288,7 +288,7 @@ fn generate_lto_work<B: ExtraBackendMethods>(
B::run_thin_lto(cgcx, needs_thin_lto, import_only_modules).unwrap_or_else(|e| e.raise()) B::run_thin_lto(cgcx, needs_thin_lto, import_only_modules).unwrap_or_else(|e| e.raise())
}; };
let result = lto_modules lto_modules
.into_iter() .into_iter()
.map(|module| { .map(|module| {
let cost = module.cost(); let cost = module.cost();
@ -303,9 +303,7 @@ fn generate_lto_work<B: ExtraBackendMethods>(
0, 0,
) )
})) }))
.collect(); .collect()
result
} }
pub struct CompiledModules { pub struct CompiledModules {

View File

@ -555,7 +555,7 @@ impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> {
// avoid allocations in those cases. We also don't use `indices` to // avoid allocations in those cases. We also don't use `indices` to
// determine if a kind has been seen before until the limit of 8 has // determine if a kind has been seen before until the limit of 8 has
// been exceeded, to also avoid allocations for `indices`. // been exceeded, to also avoid allocations for `indices`.
let var = if !var_values.spilled() { if !var_values.spilled() {
// `var_values` is stack-allocated. `indices` isn't used yet. Do a // `var_values` is stack-allocated. `indices` isn't used yet. Do a
// direct linear search of `var_values`. // direct linear search of `var_values`.
if let Some(idx) = var_values.iter().position(|&k| k == kind) { if let Some(idx) = var_values.iter().position(|&k| k == kind) {
@ -589,9 +589,7 @@ impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> {
assert_eq!(variables.len(), var_values.len()); assert_eq!(variables.len(), var_values.len());
BoundVar::new(variables.len() - 1) BoundVar::new(variables.len() - 1)
}) })
}; }
var
} }
/// Shorthand helper that creates a canonical region variable for /// Shorthand helper that creates a canonical region variable for

View File

@ -94,14 +94,12 @@ mod dl {
let result = f(); let result = f();
let last_error = libc::dlerror() as *const _; let last_error = libc::dlerror() as *const _;
let ret = if ptr::null() == last_error { if ptr::null() == last_error {
Ok(result) Ok(result)
} else { } else {
let s = CStr::from_ptr(last_error).to_bytes(); let s = CStr::from_ptr(last_error).to_bytes();
Err(str::from_utf8(s).unwrap().to_owned()) Err(str::from_utf8(s).unwrap().to_owned())
}; }
ret
} }
} }

View File

@ -184,14 +184,13 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> {
.. ..
} = self.builder; } = self.builder;
*rev_lookup.projections.entry((base, elem.lift())).or_insert_with(move || { *rev_lookup.projections.entry((base, elem.lift())).or_insert_with(move || {
let path = MoveDataBuilder::new_move_path( MoveDataBuilder::new_move_path(
move_paths, move_paths,
path_map, path_map,
init_path_map, init_path_map,
Some(base), Some(base),
mk_place(*tcx), mk_place(*tcx),
); )
path
}) })
} }

View File

@ -398,7 +398,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
where where
F: FnOnce(&mut Self) -> InterpResult<'tcx, T>, F: FnOnce(&mut Self) -> InterpResult<'tcx, T>,
{ {
let r = match f(self) { match f(self) {
Ok(val) => Some(val), Ok(val) => Some(val),
Err(error) => { Err(error) => {
// Some errors shouldn't come up because creating them causes // Some errors shouldn't come up because creating them causes
@ -412,8 +412,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
); );
None None
} }
}; }
r
} }
fn eval_constant(&mut self, c: &Constant<'tcx>, source_info: SourceInfo) -> Option<OpTy<'tcx>> { fn eval_constant(&mut self, c: &Constant<'tcx>, source_info: SourceInfo) -> Option<OpTy<'tcx>> {

View File

@ -187,14 +187,12 @@ impl<'a> StringReader<'a> {
rustc_lexer::TokenKind::LineComment => { rustc_lexer::TokenKind::LineComment => {
let string = self.str_from(start); let string = self.str_from(start);
// comments with only more "/"s are not doc comments // comments with only more "/"s are not doc comments
let tok = if comments::is_line_doc_comment(string) { if comments::is_line_doc_comment(string) {
self.forbid_bare_cr(start, string, "bare CR not allowed in doc-comment"); self.forbid_bare_cr(start, string, "bare CR not allowed in doc-comment");
token::DocComment(Symbol::intern(string)) token::DocComment(Symbol::intern(string))
} else { } else {
token::Comment token::Comment
}; }
tok
} }
rustc_lexer::TokenKind::BlockComment { terminated } => { rustc_lexer::TokenKind::BlockComment { terminated } => {
let string = self.str_from(start); let string = self.str_from(start);
@ -212,14 +210,12 @@ impl<'a> StringReader<'a> {
self.fatal_span_(start, last_bpos, msg).raise(); self.fatal_span_(start, last_bpos, msg).raise();
} }
let tok = if is_doc_comment { if is_doc_comment {
self.forbid_bare_cr(start, string, "bare CR not allowed in block doc-comment"); self.forbid_bare_cr(start, string, "bare CR not allowed in block doc-comment");
token::DocComment(Symbol::intern(string)) token::DocComment(Symbol::intern(string))
} else { } else {
token::Comment token::Comment
}; }
tok
} }
rustc_lexer::TokenKind::Whitespace => token::Whitespace, rustc_lexer::TokenKind::Whitespace => token::Whitespace,
rustc_lexer::TokenKind::Ident | rustc_lexer::TokenKind::RawIdent => { rustc_lexer::TokenKind::Ident | rustc_lexer::TokenKind::RawIdent => {

View File

@ -217,13 +217,7 @@ impl<'a> Parser<'a> {
/// Parses the RHS of a local variable declaration (e.g., '= 14;'). /// Parses the RHS of a local variable declaration (e.g., '= 14;').
fn parse_initializer(&mut self, skip_eq: bool) -> PResult<'a, Option<P<Expr>>> { fn parse_initializer(&mut self, skip_eq: bool) -> PResult<'a, Option<P<Expr>>> {
if self.eat(&token::Eq) { if self.eat(&token::Eq) || skip_eq { Ok(Some(self.parse_expr()?)) } else { Ok(None) }
Ok(Some(self.parse_expr()?))
} else if skip_eq {
Ok(Some(self.parse_expr()?))
} else {
Ok(None)
}
} }
/// Parses a block. No inner attributes are allowed. /// Parses a block. No inner attributes are allowed.

View File

@ -59,8 +59,7 @@ crate struct ImportSuggestion {
/// `source_map` functions and this function to something more robust. /// `source_map` functions and this function to something more robust.
fn reduce_impl_span_to_impl_keyword(sm: &SourceMap, impl_span: Span) -> Span { fn reduce_impl_span_to_impl_keyword(sm: &SourceMap, impl_span: Span) -> Span {
let impl_span = sm.span_until_char(impl_span, '<'); let impl_span = sm.span_until_char(impl_span, '<');
let impl_span = sm.span_until_whitespace(impl_span); sm.span_until_whitespace(impl_span)
impl_span
} }
impl<'a> Resolver<'a> { impl<'a> Resolver<'a> {

View File

@ -1871,7 +1871,7 @@ impl<'a> Resolver<'a> {
// No adjustments // No adjustments
} }
} }
let result = self.resolve_ident_in_module_unadjusted_ext( self.resolve_ident_in_module_unadjusted_ext(
module, module,
ident, ident,
ns, ns,
@ -1879,8 +1879,7 @@ impl<'a> Resolver<'a> {
false, false,
record_used, record_used,
path_span, path_span,
); )
result
} }
fn resolve_crate_root(&mut self, ident: Ident) -> Module<'a> { fn resolve_crate_root(&mut self, ident: Ident) -> Module<'a> {

View File

@ -1069,16 +1069,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
} }
}); });
let elt_ts_iter = elts.iter().enumerate().map(|(i, e)| { let elt_ts_iter = elts.iter().enumerate().map(|(i, e)| match flds {
let t = match flds { Some(ref fs) if i < fs.len() => {
Some(ref fs) if i < fs.len() => { let ety = fs[i].expect_ty();
let ety = fs[i].expect_ty(); self.check_expr_coercable_to_type(&e, ety);
self.check_expr_coercable_to_type(&e, ety); ety
ety }
} _ => self.check_expr_with_expectation(&e, NoExpectation),
_ => self.check_expr_with_expectation(&e, NoExpectation),
};
t
}); });
let tuple = self.tcx.mk_tup(elt_ts_iter); let tuple = self.tcx.mk_tup(elt_ts_iter);
if tuple.references_error() { if tuple.references_error() {

View File

@ -3654,14 +3654,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Otherwise, fall back to the immutable version. // Otherwise, fall back to the immutable version.
let (imm_tr, imm_op) = self.resolve_place_op(op, false); let (imm_tr, imm_op) = self.resolve_place_op(op, false);
let method = match (method, imm_tr) { match (method, imm_tr) {
(None, Some(trait_did)) => { (None, Some(trait_did)) => {
self.lookup_method_in_trait(span, imm_op, trait_did, base_ty, Some(arg_tys)) self.lookup_method_in_trait(span, imm_op, trait_did, base_ty, Some(arg_tys))
} }
(method, _) => method, (method, _) => method,
}; }
method
} }
fn check_method_argument_types( fn check_method_argument_types(

View File

@ -507,7 +507,7 @@ pub fn print_const(cx: &DocContext<'_>, n: &'tcx ty::Const<'_>) -> String {
} }
pub fn print_evaluated_const(cx: &DocContext<'_>, def_id: DefId) -> Option<String> { pub fn print_evaluated_const(cx: &DocContext<'_>, def_id: DefId) -> Option<String> {
let value = cx.tcx.const_eval_poly(def_id).ok().and_then(|val| { cx.tcx.const_eval_poly(def_id).ok().and_then(|val| {
let ty = cx.tcx.type_of(def_id); let ty = cx.tcx.type_of(def_id);
match (val, &ty.kind) { match (val, &ty.kind) {
(_, &ty::Ref(..)) => None, (_, &ty::Ref(..)) => None,
@ -518,9 +518,7 @@ pub fn print_evaluated_const(cx: &DocContext<'_>, def_id: DefId) -> Option<Strin
} }
_ => None, _ => None,
} }
}); })
value
} }
fn format_integer_with_underscore_sep(num: &str) -> String { fn format_integer_with_underscore_sep(num: &str) -> String {

View File

@ -666,13 +666,12 @@ fn get_index_search_type(item: &clean::Item) -> Option<IndexItemFunctionType> {
} }
fn get_index_type(clean_type: &clean::Type) -> RenderType { fn get_index_type(clean_type: &clean::Type) -> RenderType {
let t = RenderType { RenderType {
ty: clean_type.def_id(), ty: clean_type.def_id(),
idx: None, idx: None,
name: get_index_type_name(clean_type, true).map(|s| s.to_ascii_lowercase()), name: get_index_type_name(clean_type, true).map(|s| s.to_ascii_lowercase()),
generics: get_generics(clean_type), generics: get_generics(clean_type),
}; }
t
} }
fn get_index_type_name(clean_type: &clean::Type, accept_generic: bool) -> Option<String> { fn get_index_type_name(clean_type: &clean::Type, accept_generic: bool) -> Option<String> {