Rollup merge of #80944 - LingMan:map_or, r=nagisa

Use Option::map_or instead of `.map(..).unwrap_or(..)`

``@rustbot`` modify labels +C-cleanup +T-compiler
This commit is contained in:
Yuki Okushi 2021-01-15 18:26:14 +09:00 committed by GitHub
commit a584d87417
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
50 changed files with 67 additions and 79 deletions

View File

@ -273,7 +273,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
if !generic_args.parenthesized && !has_lifetimes {
generic_args.args = self
.elided_path_lifetimes(
first_generic_span.map(|s| s.shrink_to_lo()).unwrap_or(segment.ident.span),
first_generic_span.map_or(segment.ident.span, |s| s.shrink_to_lo()),
expected_lifetimes,
)
.map(GenericArg::Lifetime)

View File

@ -370,7 +370,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
gate_feature_post!(
&self,
negative_impls,
span.to(of_trait.as_ref().map(|t| t.path.span).unwrap_or(span)),
span.to(of_trait.as_ref().map_or(span, |t| t.path.span)),
"negative trait bounds are not yet fully implemented; \
use marker types for now"
);

View File

@ -1002,8 +1002,7 @@ pub unsafe fn with_llvm_pmb(
// reasonable defaults and prepare it to actually populate the pass
// manager.
let builder = llvm::LLVMPassManagerBuilderCreate();
let opt_size =
config.opt_size.map(|x| to_llvm_opt_settings(x).1).unwrap_or(llvm::CodeGenOptSizeNone);
let opt_size = config.opt_size.map_or(llvm::CodeGenOptSizeNone, |x| to_llvm_opt_settings(x).1);
let inline_threshold = config.inline_threshold;
let pgo_gen_path = get_pgo_gen_path(config);
let pgo_use_path = get_pgo_use_path(config);

View File

@ -166,7 +166,7 @@ fn get_linker(
_ => match flavor {
LinkerFlavor::Lld(f) => Command::lld(linker, f),
LinkerFlavor::Msvc if sess.opts.cg.linker.is_none() && sess.target.linker.is_none() => {
Command::new(msvc_tool.as_ref().map(|t| t.path()).unwrap_or(linker))
Command::new(msvc_tool.as_ref().map_or(linker, |t| t.path()))
}
_ => Command::new(linker),
},

View File

@ -166,7 +166,7 @@ impl SelfProfilerRef {
// If there is no SelfProfiler then the filter mask is set to NONE,
// ensuring that nothing ever tries to actually access it.
let event_filter_mask =
profiler.as_ref().map(|p| p.event_filter_mask).unwrap_or(EventFilter::empty());
profiler.as_ref().map_or(EventFilter::empty(), |p| p.event_filter_mask);
SelfProfilerRef {
profiler,

View File

@ -1236,7 +1236,7 @@ pub fn report_ice(info: &panic::PanicInfo<'_>, bug_report_url: &str) {
}
// If backtraces are enabled, also print the query stack
let backtrace = env::var_os("RUST_BACKTRACE").map(|x| &x != "0").unwrap_or(false);
let backtrace = env::var_os("RUST_BACKTRACE").map_or(false, |x| &x != "0");
let num_frames = if backtrace { None } else { Some(2) };

View File

@ -804,7 +804,7 @@ impl HandlerInner {
}
fn treat_err_as_bug(&self) -> bool {
self.flags.treat_err_as_bug.map(|c| self.err_count() >= c).unwrap_or(false)
self.flags.treat_err_as_bug.map_or(false, |c| self.err_count() >= c)
}
fn print_error_count(&mut self, registry: &Registry) {
@ -913,7 +913,7 @@ impl HandlerInner {
// This is technically `self.treat_err_as_bug()` but `delay_span_bug` is called before
// incrementing `err_count` by one, so we need to +1 the comparing.
// FIXME: Would be nice to increment err_count in a more coherent way.
if self.flags.treat_err_as_bug.map(|c| self.err_count() + 1 >= c).unwrap_or(false) {
if self.flags.treat_err_as_bug.map_or(false, |c| self.err_count() + 1 >= c) {
// FIXME: don't abort here if report_delayed_bugs is off
self.span_bug(sp, msg);
}

View File

@ -423,7 +423,7 @@ impl<'a> StripUnconfigured<'a> {
/// If attributes are not allowed on expressions, emit an error for `attr`
pub fn maybe_emit_expr_attr_err(&self, attr: &Attribute) {
if !self.features.map(|features| features.stmt_expr_attributes).unwrap_or(true) {
if !self.features.map_or(true, |features| features.stmt_expr_attributes) {
let mut err = feature_err(
&self.sess.parse_sess,
sym::stmt_expr_attributes,

View File

@ -500,7 +500,7 @@ fn inner_parse_loop<'root, 'tt>(
if idx == len && item.sep.is_some() {
// We have a separator, and it is the current token. We can advance past the
// separator token.
if item.sep.as_ref().map(|sep| token_name_eq(token, sep)).unwrap_or(false) {
if item.sep.as_ref().map_or(false, |sep| token_name_eq(token, sep)) {
item.idx += 1;
next_items.push(item);
}

View File

@ -203,7 +203,7 @@ fn macro_rules_dummy_expander<'cx>(
}
fn trace_macros_note(cx_expansions: &mut FxHashMap<Span, Vec<String>>, sp: Span, message: String) {
let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
let sp = sp.macro_backtrace().last().map_or(sp, |trace| trace.call_site);
cx_expansions.entry(sp).or_default().push(message);
}

View File

@ -99,10 +99,10 @@ pub(super) fn parse(
}
_ => token.span,
},
tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
tree => tree.as_ref().map_or(span, tokenstream::TokenTree::span),
}
}
tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
tree => tree.as_ref().map_or(start_sp, tokenstream::TokenTree::span),
};
if node_id != DUMMY_NODE_ID {
// Macros loaded from other crates have dummy node ids.
@ -250,7 +250,7 @@ fn parse_kleene_op(
Some(op) => Ok(Ok((op, token.span))),
None => Ok(Err(token)),
},
tree => Err(tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span)),
tree => Err(tree.as_ref().map_or(span, tokenstream::TokenTree::span)),
}
}

View File

@ -561,7 +561,7 @@ impl WhereClause<'_> {
/// in `fn foo<T>(t: T) where T: Foo,` so we don't suggest two trailing commas.
pub fn tail_span_for_suggestion(&self) -> Span {
let end = self.span_for_predicates_or_empty_place().shrink_to_hi();
self.predicates.last().map(|p| p.span()).unwrap_or(end).shrink_to_hi().to(end)
self.predicates.last().map_or(end, |p| p.span()).shrink_to_hi().to(end)
}
}

View File

@ -2118,7 +2118,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
let consider = format!(
"{} {}...",
msg,
if type_param_span.map(|(_, _, is_impl_trait)| is_impl_trait).unwrap_or(false) {
if type_param_span.map_or(false, |(_, _, is_impl_trait)| is_impl_trait) {
format!(" `{}` to `{}`", sub, bound_kind)
} else {
format!("`{}: {}`", bound_kind, sub)

View File

@ -1533,7 +1533,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
// Note: if these two lines are combined into one we get
// dynamic borrow errors on `self.inner`.
let known = self.inner.borrow_mut().type_variables().probe(v).known();
known.map(|t| self.shallow_resolve_ty(t)).unwrap_or(typ)
known.map_or(typ, |t| self.shallow_resolve_ty(t))
}
ty::Infer(ty::IntVar(v)) => self

View File

@ -647,8 +647,7 @@ pub fn transparent_newtype_field<'a, 'tcx>(
let param_env = tcx.param_env(variant.def_id);
for field in &variant.fields {
let field_ty = tcx.type_of(field.did);
let is_zst =
tcx.layout_of(param_env.and(field_ty)).map(|layout| layout.is_zst()).unwrap_or(false);
let is_zst = tcx.layout_of(param_env.and(field_ty)).map_or(false, |layout| layout.is_zst());
if !is_zst {
return Some(field);

View File

@ -529,8 +529,8 @@ trait UnusedDelimLint {
pprust::expr_to_string(value)
};
let keep_space = (
left_pos.map(|s| s >= value.span.lo()).unwrap_or(false),
right_pos.map(|s| s <= value.span.hi()).unwrap_or(false),
left_pos.map_or(false, |s| s >= value.span.lo()),
right_pos.map_or(false, |s| s <= value.span.hi()),
);
self.emit_unused_delims(cx, value.span, &expr_text, ctx.into(), keep_space);
}

View File

@ -429,7 +429,7 @@ fn add_query_description_impl(
};
let (tcx, desc) = modifiers.desc;
let tcx = tcx.as_ref().map(|t| quote! { #t }).unwrap_or(quote! { _ });
let tcx = tcx.as_ref().map_or(quote! { _ }, |t| quote! { #t });
let desc = quote! {
#[allow(unused_variables)]

View File

@ -326,7 +326,7 @@ impl<'a> CrateLoader<'a> {
self.verify_no_symbol_conflicts(&crate_root)?;
let private_dep =
self.sess.opts.externs.get(&name.as_str()).map(|e| e.is_private_dep).unwrap_or(false);
self.sess.opts.externs.get(&name.as_str()).map_or(false, |e| e.is_private_dep);
// Claim this crate number and cache it
let cnum = self.cstore.alloc_new_crate_num();

View File

@ -132,7 +132,7 @@ impl ItemLikeVisitor<'tcx> for Collector<'tcx> {
impl Collector<'tcx> {
fn register_native_lib(&mut self, span: Option<Span>, lib: NativeLib) {
if lib.name.as_ref().map(|&s| s == kw::Empty).unwrap_or(false) {
if lib.name.as_ref().map_or(false, |&s| s == kw::Empty) {
match span {
Some(span) => {
struct_span_err!(

View File

@ -815,7 +815,7 @@ impl<'hir> Map<'hir> {
/// Given a node ID, gets a list of attributes associated with the AST
/// corresponding to the node-ID.
pub fn attrs(&self, id: HirId) -> &'hir [ast::Attribute] {
let attrs = self.find_entry(id).map(|entry| match entry.node {
self.find_entry(id).map_or(&[], |entry| match entry.node {
Node::Param(a) => &a.attrs[..],
Node::Local(l) => &l.attrs[..],
Node::Item(i) => &i.attrs[..],
@ -842,8 +842,7 @@ impl<'hir> Map<'hir> {
| Node::Block(..)
| Node::Lifetime(..)
| Node::Visibility(..) => &[],
});
attrs.unwrap_or(&[])
})
}
/// Gets the span of the definition of the specified HIR node.

View File

@ -82,7 +82,7 @@ impl<'tcx> ConstKind<'tcx> {
/// Tries to evaluate the constant if it is `Unevaluated`. If that doesn't succeed, return the
/// unevaluated constant.
pub fn eval(self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>) -> Self {
self.try_eval(tcx, param_env).and_then(Result::ok).map(ConstKind::Value).unwrap_or(self)
self.try_eval(tcx, param_env).and_then(Result::ok).map_or(self, ConstKind::Value)
}
#[inline]

View File

@ -1338,7 +1338,7 @@ impl<'tcx> TyCtxt<'tcx> {
}
pub fn serialize_query_result_cache(self, encoder: &mut FileEncoder) -> FileEncodeResult {
self.queries.on_disk_cache.as_ref().map(|c| c.serialize(self, encoder)).unwrap_or(Ok(()))
self.queries.on_disk_cache.as_ref().map_or(Ok(()), |c| c.serialize(self, encoder))
}
/// If `true`, we should use the MIR-based borrowck, but also
@ -2601,7 +2601,7 @@ impl<'tcx> TyCtxt<'tcx> {
}
pub fn is_late_bound(self, id: HirId) -> bool {
self.is_late_bound_map(id.owner).map(|set| set.contains(&id.local_id)).unwrap_or(false)
self.is_late_bound_map(id.owner).map_or(false, |set| set.contains(&id.local_id))
}
pub fn object_lifetime_defaults(self, id: HirId) -> Option<&'tcx [ObjectLifetimeDefault]> {

View File

@ -535,7 +535,7 @@ fn polymorphize<'tcx>(
} else {
None
};
let has_upvars = upvars_ty.map(|ty| ty.tuple_fields().count() > 0).unwrap_or(false);
let has_upvars = upvars_ty.map_or(false, |ty| ty.tuple_fields().count() > 0);
debug!("polymorphize: upvars_ty={:?} has_upvars={:?}", upvars_ty, has_upvars);
struct PolymorphizationFolder<'tcx> {

View File

@ -149,7 +149,7 @@ impl<'tcx> BorrowSet<'tcx> {
}
crate fn activations_at_location(&self, location: Location) -> &[BorrowIndex] {
self.activation_map.get(&location).map(|activations| &activations[..]).unwrap_or(&[])
self.activation_map.get(&location).map_or(&[], |activations| &activations[..])
}
crate fn len(&self) -> usize {

View File

@ -75,7 +75,7 @@ impl BorrowExplanation {
LaterUseKind::FakeLetRead => "stored here",
LaterUseKind::Other => "used here",
};
if !borrow_span.map(|sp| sp.overlaps(var_or_use_span)).unwrap_or(false) {
if !borrow_span.map_or(false, |sp| sp.overlaps(var_or_use_span)) {
err.span_label(
var_or_use_span,
format!("{}borrow later {}", borrow_desc, message),

View File

@ -370,7 +370,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
#[inline(always)]
pub fn cur_span(&self) -> Span {
self.stack().last().map(|f| f.current_span()).unwrap_or(self.tcx.span)
self.stack().last().map_or(self.tcx.span, |f| f.current_span())
}
#[inline(always)]

View File

@ -47,8 +47,7 @@ where
let index = index
.try_into()
.expect("more generic parameters than can fit into a `u32`");
let is_used =
unused_params.contains(index).map(|unused| !unused).unwrap_or(true);
let is_used = unused_params.contains(index).map_or(true, |unused| !unused);
// Only recurse when generic parameters in fns, closures and generators
// are used and require substitution.
match (is_used, subst.needs_subst()) {

View File

@ -247,8 +247,7 @@ where
for (mono_item, linkage) in cgu.items() {
let symbol_name = mono_item.symbol_name(tcx).name;
let symbol_hash_start = symbol_name.rfind('h');
let symbol_hash =
symbol_hash_start.map(|i| &symbol_name[i..]).unwrap_or("<no hash>");
let symbol_hash = symbol_hash_start.map_or("<no hash>", |i| &symbol_name[i..]);
debug!(
" - {} [{:?}] [{}] estimated size {}",

View File

@ -113,7 +113,7 @@ fn get_arm_identity_info<'a, 'tcx>(
test: impl Fn(&'a Statement<'tcx>) -> bool,
mut action: impl FnMut(usize, &'a Statement<'tcx>),
) {
while stmt_iter.peek().map(|(_, stmt)| test(stmt)).unwrap_or(false) {
while stmt_iter.peek().map_or(false, |(_, stmt)| test(stmt)) {
let (idx, stmt) = stmt_iter.next().unwrap();
action(idx, stmt);
@ -635,7 +635,7 @@ impl<'a, 'tcx> SimplifyBranchSameOptimizationFinder<'a, 'tcx> {
})
.peekable();
let bb_first = iter_bbs_reachable.peek().map(|(idx, _)| *idx).unwrap_or(&targets_and_values[0]);
let bb_first = iter_bbs_reachable.peek().map_or(&targets_and_values[0], |(idx, _)| *idx);
let mut all_successors_equivalent = StatementEquality::TrivialEqual;
// All successor basic blocks must be equal or contain statements that are pairwise considered equal.

View File

@ -952,7 +952,7 @@ fn is_useful<'p, 'tcx>(
assert!(rows.iter().all(|r| r.len() == v.len()));
// FIXME(Nadrieril): Hack to work around type normalization issues (see #72476).
let ty = matrix.heads().next().map(|r| r.ty).unwrap_or(v.head().ty);
let ty = matrix.heads().next().map_or(v.head().ty, |r| r.ty);
let pcx = PatCtxt { cx, ty, span: v.head().span, is_top_level };
debug!("is_useful_expand_first_col: ty={:#?}, expanding {:#?}", pcx.ty, v.head());

View File

@ -511,7 +511,7 @@ impl<'a> Parser<'a> {
//
// `x.foo::<u32>>>(3)`
let parsed_angle_bracket_args =
segment.args.as_ref().map(|args| args.is_angle_bracketed()).unwrap_or(false);
segment.args.as_ref().map_or(false, |args| args.is_angle_bracketed());
debug!(
"check_trailing_angle_brackets: parsed_angle_bracket_args={:?}",

View File

@ -347,7 +347,7 @@ impl<'a> Parser<'a> {
let mut pos = pos;
// This handles the raw string case, the raw argument is the number of #
// in r###"..."### (we need to add one because of the `r`).
let raw = self.style.map(|raw| raw + 1).unwrap_or(0);
let raw = self.style.map_or(0, |raw| raw + 1);
for skip in &self.skips {
if pos > *skip {
pos += 1;
@ -814,7 +814,7 @@ fn find_skips_from_snippet(
skips
}
let r_start = str_style.map(|r| r + 1).unwrap_or(0);
let r_start = str_style.map_or(0, |r| r + 1);
let r_end = str_style.unwrap_or(0);
let s = &snippet[r_start + 1..snippet.len() - r_end - 1];
(find_skips(s, str_style.is_some()), true)

View File

@ -953,7 +953,7 @@ impl<K: DepKind> DepGraph<K> {
// Returns true if the given node has been marked as green during the
// current compilation session. Used in various assertions
pub fn is_green(&self, dep_node: &DepNode<K>) -> bool {
self.node_color(dep_node).map(|c| c.is_green()).unwrap_or(false)
self.node_color(dep_node).map_or(false, |c| c.is_green())
}
// This method loads all on-disk cacheable query results into memory, so

View File

@ -1925,7 +1925,7 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
{
// Check if we wrote `str::from_utf8` instead of `std::str::from_utf8`
let item_span =
path.iter().last().map(|segment| segment.ident.span).unwrap_or(span);
path.iter().last().map_or(span, |segment| segment.ident.span);
let mut hm = self.r.session.confused_type_with_std_module.borrow_mut();
hm.insert(item_span, span);

View File

@ -264,7 +264,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
// The current function has a `self' parameter, but we were unable to resolve
// a reference to `self`. This can only happen if the `self` identifier we
// are resolving came from a different hygiene context.
if fn_kind.decl().inputs.get(0).map(|p| p.is_self()).unwrap_or(false) {
if fn_kind.decl().inputs.get(0).map_or(false, |p| p.is_self()) {
err.span_label(*span, "this function has a `self` parameter, but a macro invocation can only access identifiers it receives from parameters");
} else {
let doesnt = if is_assoc_fn {
@ -1452,8 +1452,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
}
} else {
let needs_placeholder = |def_id: DefId, kind: CtorKind| {
let has_no_fields =
self.r.field_names.get(&def_id).map(|f| f.is_empty()).unwrap_or(false);
let has_no_fields = self.r.field_names.get(&def_id).map_or(false, |f| f.is_empty());
match kind {
CtorKind::Const => false,
CtorKind::Fn | CtorKind::Fictive if has_no_fields => false,

View File

@ -1052,7 +1052,7 @@ pub struct ResolverArenas<'a> {
impl<'a> ResolverArenas<'a> {
fn alloc_module(&'a self, module: ModuleData<'a>) -> Module<'a> {
let module = self.modules.alloc(module);
if module.def_id().map(|def_id| def_id.is_local()).unwrap_or(true) {
if module.def_id().map_or(true, |def_id| def_id.is_local()) {
self.local_modules.borrow_mut().push(module);
}
module
@ -3031,7 +3031,7 @@ impl<'a> Resolver<'a> {
let duplicate = new_binding.res().opt_def_id() == old_binding.res().opt_def_id();
let has_dummy_span = new_binding.span.is_dummy() || old_binding.span.is_dummy();
let from_item =
self.extern_prelude.get(&ident).map(|entry| entry.introduced_by_item).unwrap_or(true);
self.extern_prelude.get(&ident).map_or(true, |entry| entry.introduced_by_item);
// Only suggest removing an import if both bindings are to the same def, if both spans
// aren't dummy spans. Further, if both bindings are imports, then the ident must have
// been introduced by a item.

View File

@ -670,7 +670,7 @@ impl<'tcx> SaveContext<'tcx> {
) -> Option<Ref> {
// Returns true if the path is function type sugar, e.g., `Fn(A) -> B`.
fn fn_type(seg: &hir::PathSegment<'_>) -> bool {
seg.args.map(|args| args.parenthesized).unwrap_or(false)
seg.args.map_or(false, |args| args.parenthesized)
}
let res = self.get_path_res(id);

View File

@ -1358,7 +1358,7 @@ pub fn build_session(
let optimization_fuel_crate = sopts.debugging_opts.fuel.as_ref().map(|i| i.0.clone());
let optimization_fuel = Lock::new(OptimizationFuel {
remaining: sopts.debugging_opts.fuel.as_ref().map(|i| i.1).unwrap_or(0),
remaining: sopts.debugging_opts.fuel.as_ref().map_or(0, |i| i.1),
out_of_fuel: false,
});
let print_fuel_crate = sopts.debugging_opts.print_fuel.clone();

View File

@ -539,7 +539,7 @@ impl SourceMap {
pub fn is_line_before_span_empty(&self, sp: Span) -> bool {
match self.span_to_prev_source(sp) {
Ok(s) => s.split('\n').last().map(|l| l.trim_start().is_empty()).unwrap_or(false),
Ok(s) => s.split('\n').last().map_or(false, |l| l.trim_start().is_empty()),
Err(_) => false,
}
}
@ -568,7 +568,7 @@ impl SourceMap {
// asserting that the line numbers here are all indeed 1-based.
let hi_line = hi.line.saturating_sub(1);
for line_index in lo.line.saturating_sub(1)..hi_line {
let line_len = lo.file.get_line(line_index).map(|s| s.chars().count()).unwrap_or(0);
let line_len = lo.file.get_line(line_index).map_or(0, |s| s.chars().count());
lines.push(LineInfo { line_index, start_col, end_col: CharPos::from_usize(line_len) });
start_col = CharPos::from_usize(0);
}

View File

@ -107,7 +107,7 @@ fn t7() {
fn span_from_selection(input: &str, selection: &str) -> Span {
assert_eq!(input.len(), selection.len());
let left_index = selection.find('~').unwrap() as u32;
let right_index = selection.rfind('~').map(|x| x as u32).unwrap_or(left_index);
let right_index = selection.rfind('~').map_or(left_index, |x| x as u32);
Span::with_root_ctxt(BytePos(left_index), BytePos(right_index + 1))
}

View File

@ -830,7 +830,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
.collect::<Vec<ArgKind>>(),
),
Node::Ctor(ref variant_data) => {
let span = variant_data.ctor_hir_id().map(|id| hir.span(id)).unwrap_or(DUMMY_SP);
let span = variant_data.ctor_hir_id().map_or(DUMMY_SP, |id| hir.span(id));
let span = sm.guess_head_span(span);
(span, vec![ArgKind::empty(); variant_data.fields().len()])
}

View File

@ -1259,8 +1259,8 @@ pub(super) fn check_transparent<'tcx>(tcx: TyCtxt<'tcx>, sp: Span, adt: &'tcx ty
let layout = tcx.layout_of(param_env.and(ty));
// We are currently checking the type this field came from, so it must be local
let span = tcx.hir().span_if_local(field.did).unwrap();
let zst = layout.map(|layout| layout.is_zst()).unwrap_or(false);
let align1 = layout.map(|layout| layout.align.abi.bytes() == 1).unwrap_or(false);
let zst = layout.map_or(false, |layout| layout.is_zst());
let align1 = layout.map_or(false, |layout| layout.align.abi.bytes() == 1);
(span, zst, align1)
});

View File

@ -364,13 +364,14 @@ fn check_region_bounds_on_impl_item<'tcx>(
if trait_params != impl_params {
let item_kind = assoc_item_kind_str(impl_m);
let def_span = tcx.sess.source_map().guess_head_span(span);
let span = tcx.hir().get_generics(impl_m.def_id).map(|g| g.span).unwrap_or(def_span);
let span = tcx.hir().get_generics(impl_m.def_id).map_or(def_span, |g| g.span);
let generics_span = if let Some(sp) = tcx.hir().span_if_local(trait_m.def_id) {
let def_sp = tcx.sess.source_map().guess_head_span(sp);
Some(tcx.hir().get_generics(trait_m.def_id).map(|g| g.span).unwrap_or(def_sp))
Some(tcx.hir().get_generics(trait_m.def_id).map_or(def_sp, |g| g.span))
} else {
None
};
tcx.sess.emit_err(LifetimesOrBoundsMismatchOnTrait {
span,
item_kind,

View File

@ -904,8 +904,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
{
// Return directly on cache hit. This is useful to avoid doubly reporting
// errors with default match binding modes. See #44614.
let def =
cached_result.map(|(kind, def_id)| Res::Def(kind, def_id)).unwrap_or(Res::Err);
let def = cached_result.map_or(Res::Err, |(kind, def_id)| Res::Def(kind, def_id));
return (def, Some(ty), slice::from_ref(&**item_segment));
}
let item_name = item_segment.ident;
@ -932,7 +931,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Write back the new resolution.
self.write_resolution(hir_id, result);
(
result.map(|(kind, def_id)| Res::Def(kind, def_id)).unwrap_or(Res::Err),
result.map_or(Res::Err, |(kind, def_id)| Res::Def(kind, def_id)),
Some(ty),
slice::from_ref(&**item_segment),
)

View File

@ -816,7 +816,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
Some(match &elem.kind {
// Point at the tail expression when possible.
hir::ExprKind::Block(block, _) => {
block.expr.as_ref().map(|e| e.span).unwrap_or(block.span)
block.expr.as_ref().map_or(block.span, |e| e.span)
}
_ => elem.span,
})
@ -888,7 +888,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Write back the new resolution.
self.write_resolution(hir_id, result);
(result.map(|(kind, def_id)| Res::Def(kind, def_id)).unwrap_or(Res::Err), ty)
(result.map_or(Res::Err, |(kind, def_id)| Res::Def(kind, def_id)), ty)
}
QPath::LangItem(lang_item, span) => {
self.resolve_lang_item_path(lang_item, span, hir_id)

View File

@ -1193,7 +1193,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.any(|imp_did| {
let imp = self.tcx.impl_trait_ref(imp_did).unwrap();
let imp_simp = simplify_type(self.tcx, imp.self_ty(), true);
imp_simp.map(|s| s == simp_rcvr_ty).unwrap_or(false)
imp_simp.map_or(false, |s| s == simp_rcvr_ty)
})
{
explicitly_negative.push(candidate);
@ -1270,11 +1270,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
match ty.kind() {
ty::Adt(def, _) => def.did.is_local(),
ty::Foreign(did) => did.is_local(),
ty::Dynamic(ref tr, ..) => {
tr.principal().map(|d| d.def_id().is_local()).unwrap_or(false)
}
ty::Dynamic(ref tr, ..) => tr.principal().map_or(false, |d| d.def_id().is_local()),
ty::Param(_) => true,
// Everything else (primitive types, etc.) is effectively

View File

@ -108,7 +108,7 @@ pub fn check_item_well_formed(tcx: TyCtxt<'_>, def_id: LocalDefId) {
.impl_trait_ref(tcx.hir().local_def_id(item.hir_id))
.map_or(false, |trait_ref| tcx.trait_is_auto(trait_ref.def_id));
if let (hir::Defaultness::Default { .. }, true) = (impl_.defaultness, is_auto) {
let sp = impl_.of_trait.as_ref().map(|t| t.path.span).unwrap_or(item.span);
let sp = impl_.of_trait.as_ref().map_or(item.span, |t| t.path.span);
let mut err =
tcx.sess.struct_span_err(sp, "impls of auto traits cannot be default");
err.span_labels(impl_.defaultness_span, "default because of this");

View File

@ -1656,7 +1656,7 @@ fn impl_polarity(tcx: TyCtxt<'_>, def_id: DefId) -> ty::ImplPolarity {
..
}) => {
if is_rustc_reservation {
let span = span.to(of_trait.as_ref().map(|t| t.path.span).unwrap_or(*span));
let span = span.to(of_trait.as_ref().map_or(*span, |t| t.path.span));
tcx.sess.span_err(span, "reservation impls can't be negative");
}
ty::ImplPolarity::Negative

View File

@ -654,9 +654,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> {
// Then we see that to get the same result, we must start with
// `deref { deref { place_foo }}` instead of `place_foo` since the pattern is now `Some(x,)`
// and not `&&Some(x,)`, even though its assigned type is that of `&&Some(x,)`.
for _ in
0..self.typeck_results.pat_adjustments().get(pat.hir_id).map(|v| v.len()).unwrap_or(0)
{
for _ in 0..self.typeck_results.pat_adjustments().get(pat.hir_id).map_or(0, |v| v.len()) {
debug!("cat_pattern: applying adjustment to place_with_id={:?}", place_with_id);
place_with_id = self.cat_deref(pat, place_with_id)?;
}

View File

@ -99,7 +99,7 @@ impl<'cx, 'tcx> ItemLikeVisitor<'tcx> for InferVisitor<'cx, 'tcx> {
// we walk the crates again and re-calculate predicates for all
// items.
let item_predicates_len: usize =
self.global_inferred_outlives.get(&item_did.to_def_id()).map(|p| p.len()).unwrap_or(0);
self.global_inferred_outlives.get(&item_did.to_def_id()).map_or(0, |p| p.len());
if item_required_predicates.len() > item_predicates_len {
*self.predicates_added = true;
self.global_inferred_outlives.insert(item_did.to_def_id(), item_required_predicates);