Auto merge of #78956 - jonas-schievink:rollup-r53giob, r=jonas-schievink
Rollup of 11 pull requests Successful merges: - #78216 (Duration::zero() -> Duration::ZERO) - #78354 (Support enable/disable sanitizers/profiler per target) - #78417 (BTreeMap: split off most code of append) - #78832 (look at assoc ct, check the type of nodes) - #78873 (Add flags customizing behaviour of MIR inlining) - #78899 (Support inlining diverging function calls) - #78923 (Cleanup and comment intra-doc link pass) - #78929 (rustc_target: Move target env "gnu" from `linux_base` to `linux_gnu_base`) - #78930 (rustc_taret: Remove `TargetOptions::is_like_android`) - #78942 (Fix typo in comment) - #78947 (Ship llvm-cov through llvm-tools) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
77180db6f8
@ -554,6 +554,8 @@ fn test_debugging_options_tracking_hash() {
|
||||
tracked!(function_sections, Some(false));
|
||||
tracked!(human_readable_cgu_names, true);
|
||||
tracked!(inline_in_all_cgus, Some(true));
|
||||
tracked!(inline_mir_threshold, 123);
|
||||
tracked!(inline_mir_hint_threshold, 123);
|
||||
tracked!(insert_sideeffect, true);
|
||||
tracked!(instrument_coverage, true);
|
||||
tracked!(instrument_mcount, true);
|
||||
|
@ -16,9 +16,6 @@ use crate::transform::MirPass;
|
||||
use std::iter;
|
||||
use std::ops::{Range, RangeFrom};
|
||||
|
||||
const DEFAULT_THRESHOLD: usize = 50;
|
||||
const HINT_THRESHOLD: usize = 100;
|
||||
|
||||
const INSTR_COST: usize = 5;
|
||||
const CALL_PENALTY: usize = 25;
|
||||
const LANDINGPAD_PENALTY: usize = 50;
|
||||
@ -31,7 +28,8 @@ pub struct Inline;
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
struct CallSite<'tcx> {
|
||||
callee: Instance<'tcx>,
|
||||
bb: BasicBlock,
|
||||
block: BasicBlock,
|
||||
target: Option<BasicBlock>,
|
||||
source_info: SourceInfo,
|
||||
}
|
||||
|
||||
@ -175,8 +173,7 @@ impl Inliner<'tcx> {
|
||||
|
||||
// Only consider direct calls to functions
|
||||
let terminator = bb_data.terminator();
|
||||
// FIXME: Handle inlining of diverging calls
|
||||
if let TerminatorKind::Call { func: ref op, destination: Some(_), .. } = terminator.kind {
|
||||
if let TerminatorKind::Call { func: ref op, ref destination, .. } = terminator.kind {
|
||||
if let ty::FnDef(callee_def_id, substs) = *op.ty(caller_body, self.tcx).kind() {
|
||||
// To resolve an instance its substs have to be fully normalized, so
|
||||
// we do this here.
|
||||
@ -190,7 +187,12 @@ impl Inliner<'tcx> {
|
||||
return None;
|
||||
}
|
||||
|
||||
return Some(CallSite { callee, bb, source_info: terminator.source_info });
|
||||
return Some(CallSite {
|
||||
callee,
|
||||
block: bb,
|
||||
target: destination.map(|(_, target)| target),
|
||||
source_info: terminator.source_info,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -248,7 +250,11 @@ impl Inliner<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
let mut threshold = if hinted { HINT_THRESHOLD } else { DEFAULT_THRESHOLD };
|
||||
let mut threshold = if hinted {
|
||||
self.tcx.sess.opts.debugging_opts.inline_mir_hint_threshold
|
||||
} else {
|
||||
self.tcx.sess.opts.debugging_opts.inline_mir_threshold
|
||||
};
|
||||
|
||||
// Significantly lower the threshold for inlining cold functions
|
||||
if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::COLD) {
|
||||
@ -398,9 +404,9 @@ impl Inliner<'tcx> {
|
||||
caller_body: &mut Body<'tcx>,
|
||||
mut callee_body: Body<'tcx>,
|
||||
) {
|
||||
let terminator = caller_body[callsite.bb].terminator.take().unwrap();
|
||||
let terminator = caller_body[callsite.block].terminator.take().unwrap();
|
||||
match terminator.kind {
|
||||
TerminatorKind::Call { args, destination: Some(destination), cleanup, .. } => {
|
||||
TerminatorKind::Call { args, destination, cleanup, .. } => {
|
||||
// If the call is something like `a[*i] = f(i)`, where
|
||||
// `i : &mut usize`, then just duplicating the `a[*i]`
|
||||
// Place could result in two different locations if `f`
|
||||
@ -417,35 +423,31 @@ impl Inliner<'tcx> {
|
||||
false
|
||||
}
|
||||
|
||||
let dest = if dest_needs_borrow(destination.0) {
|
||||
trace!("creating temp for return destination");
|
||||
let dest = Rvalue::Ref(
|
||||
self.tcx.lifetimes.re_erased,
|
||||
BorrowKind::Mut { allow_two_phase_borrow: false },
|
||||
destination.0,
|
||||
);
|
||||
|
||||
let ty = dest.ty(caller_body, self.tcx);
|
||||
|
||||
let temp = LocalDecl::new(ty, callsite.source_info.span);
|
||||
|
||||
let tmp = caller_body.local_decls.push(temp);
|
||||
let tmp = Place::from(tmp);
|
||||
|
||||
let stmt = Statement {
|
||||
source_info: callsite.source_info,
|
||||
kind: StatementKind::Assign(box (tmp, dest)),
|
||||
};
|
||||
caller_body[callsite.bb].statements.push(stmt);
|
||||
self.tcx.mk_place_deref(tmp)
|
||||
let dest = if let Some((destination_place, _)) = destination {
|
||||
if dest_needs_borrow(destination_place) {
|
||||
trace!("creating temp for return destination");
|
||||
let dest = Rvalue::Ref(
|
||||
self.tcx.lifetimes.re_erased,
|
||||
BorrowKind::Mut { allow_two_phase_borrow: false },
|
||||
destination_place,
|
||||
);
|
||||
let dest_ty = dest.ty(caller_body, self.tcx);
|
||||
let temp = Place::from(self.new_call_temp(caller_body, &callsite, dest_ty));
|
||||
caller_body[callsite.block].statements.push(Statement {
|
||||
source_info: callsite.source_info,
|
||||
kind: StatementKind::Assign(box (temp, dest)),
|
||||
});
|
||||
self.tcx.mk_place_deref(temp)
|
||||
} else {
|
||||
destination_place
|
||||
}
|
||||
} else {
|
||||
destination.0
|
||||
trace!("creating temp for return place");
|
||||
Place::from(self.new_call_temp(caller_body, &callsite, callee_body.return_ty()))
|
||||
};
|
||||
|
||||
let return_block = destination.1;
|
||||
|
||||
// Copy the arguments if needed.
|
||||
let args: Vec<_> = self.make_call_args(args, &callsite, caller_body, return_block);
|
||||
let args: Vec<_> = self.make_call_args(args, &callsite, caller_body);
|
||||
|
||||
let mut integrator = Integrator {
|
||||
args: &args,
|
||||
@ -453,7 +455,7 @@ impl Inliner<'tcx> {
|
||||
new_scopes: SourceScope::new(caller_body.source_scopes.len())..,
|
||||
new_blocks: BasicBlock::new(caller_body.basic_blocks().len())..,
|
||||
destination: dest,
|
||||
return_block,
|
||||
return_block: callsite.target,
|
||||
cleanup_block: cleanup,
|
||||
in_cleanup_block: false,
|
||||
tcx: self.tcx,
|
||||
@ -502,7 +504,7 @@ impl Inliner<'tcx> {
|
||||
caller_body.var_debug_info.extend(callee_body.var_debug_info.drain(..));
|
||||
caller_body.basic_blocks_mut().extend(callee_body.basic_blocks_mut().drain(..));
|
||||
|
||||
caller_body[callsite.bb].terminator = Some(Terminator {
|
||||
caller_body[callsite.block].terminator = Some(Terminator {
|
||||
source_info: callsite.source_info,
|
||||
kind: TerminatorKind::Goto { target: integrator.map_block(START_BLOCK) },
|
||||
});
|
||||
@ -526,7 +528,6 @@ impl Inliner<'tcx> {
|
||||
args: Vec<Operand<'tcx>>,
|
||||
callsite: &CallSite<'tcx>,
|
||||
caller_body: &mut Body<'tcx>,
|
||||
return_block: BasicBlock,
|
||||
) -> Vec<Local> {
|
||||
let tcx = self.tcx;
|
||||
|
||||
@ -557,18 +558,8 @@ impl Inliner<'tcx> {
|
||||
// `callee_body.spread_arg == None`, instead of special-casing closures.
|
||||
if tcx.is_closure(callsite.callee.def_id()) {
|
||||
let mut args = args.into_iter();
|
||||
let self_ = self.create_temp_if_necessary(
|
||||
args.next().unwrap(),
|
||||
callsite,
|
||||
caller_body,
|
||||
return_block,
|
||||
);
|
||||
let tuple = self.create_temp_if_necessary(
|
||||
args.next().unwrap(),
|
||||
callsite,
|
||||
caller_body,
|
||||
return_block,
|
||||
);
|
||||
let self_ = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_body);
|
||||
let tuple = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_body);
|
||||
assert!(args.next().is_none());
|
||||
|
||||
let tuple = Place::from(tuple);
|
||||
@ -588,13 +579,13 @@ impl Inliner<'tcx> {
|
||||
Operand::Move(tcx.mk_place_field(tuple, Field::new(i), ty.expect_ty()));
|
||||
|
||||
// Spill to a local to make e.g., `tmp0`.
|
||||
self.create_temp_if_necessary(tuple_field, callsite, caller_body, return_block)
|
||||
self.create_temp_if_necessary(tuple_field, callsite, caller_body)
|
||||
});
|
||||
|
||||
closure_ref_arg.chain(tuple_tmp_args).collect()
|
||||
} else {
|
||||
args.into_iter()
|
||||
.map(|a| self.create_temp_if_necessary(a, callsite, caller_body, return_block))
|
||||
.map(|a| self.create_temp_if_necessary(a, callsite, caller_body))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
@ -606,46 +597,52 @@ impl Inliner<'tcx> {
|
||||
arg: Operand<'tcx>,
|
||||
callsite: &CallSite<'tcx>,
|
||||
caller_body: &mut Body<'tcx>,
|
||||
return_block: BasicBlock,
|
||||
) -> Local {
|
||||
// FIXME: Analysis of the usage of the arguments to avoid
|
||||
// unnecessary temporaries.
|
||||
|
||||
// Reuse the operand if it is a moved temporary.
|
||||
if let Operand::Move(place) = &arg {
|
||||
if let Some(local) = place.as_local() {
|
||||
if caller_body.local_kind(local) == LocalKind::Temp {
|
||||
// Reuse the operand if it's a temporary already
|
||||
return local;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise, create a temporary for the argument.
|
||||
trace!("creating temp for argument {:?}", arg);
|
||||
// Otherwise, create a temporary for the arg
|
||||
let arg = Rvalue::Use(arg);
|
||||
|
||||
let ty = arg.ty(caller_body, self.tcx);
|
||||
|
||||
let arg_tmp = LocalDecl::new(ty, callsite.source_info.span);
|
||||
let arg_tmp = caller_body.local_decls.push(arg_tmp);
|
||||
|
||||
caller_body[callsite.bb].statements.push(Statement {
|
||||
let arg_ty = arg.ty(caller_body, self.tcx);
|
||||
let local = self.new_call_temp(caller_body, callsite, arg_ty);
|
||||
caller_body[callsite.block].statements.push(Statement {
|
||||
source_info: callsite.source_info,
|
||||
kind: StatementKind::StorageLive(arg_tmp),
|
||||
kind: StatementKind::Assign(box (Place::from(local), Rvalue::Use(arg))),
|
||||
});
|
||||
caller_body[callsite.bb].statements.push(Statement {
|
||||
source_info: callsite.source_info,
|
||||
kind: StatementKind::Assign(box (Place::from(arg_tmp), arg)),
|
||||
});
|
||||
caller_body[return_block].statements.insert(
|
||||
0,
|
||||
Statement {
|
||||
source_info: callsite.source_info,
|
||||
kind: StatementKind::StorageDead(arg_tmp),
|
||||
},
|
||||
);
|
||||
local
|
||||
}
|
||||
|
||||
arg_tmp
|
||||
/// Introduces a new temporary into the caller body that is live for the duration of the call.
|
||||
fn new_call_temp(
|
||||
&self,
|
||||
caller_body: &mut Body<'tcx>,
|
||||
callsite: &CallSite<'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
) -> Local {
|
||||
let local = caller_body.local_decls.push(LocalDecl::new(ty, callsite.source_info.span));
|
||||
|
||||
caller_body[callsite.block].statements.push(Statement {
|
||||
source_info: callsite.source_info,
|
||||
kind: StatementKind::StorageLive(local),
|
||||
});
|
||||
|
||||
if let Some(block) = callsite.target {
|
||||
caller_body[block].statements.insert(
|
||||
0,
|
||||
Statement {
|
||||
source_info: callsite.source_info,
|
||||
kind: StatementKind::StorageDead(local),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
local
|
||||
}
|
||||
}
|
||||
|
||||
@ -670,7 +667,7 @@ struct Integrator<'a, 'tcx> {
|
||||
new_scopes: RangeFrom<SourceScope>,
|
||||
new_blocks: RangeFrom<BasicBlock>,
|
||||
destination: Place<'tcx>,
|
||||
return_block: BasicBlock,
|
||||
return_block: Option<BasicBlock>,
|
||||
cleanup_block: Option<BasicBlock>,
|
||||
in_cleanup_block: bool,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
@ -816,7 +813,11 @@ impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
TerminatorKind::Return => {
|
||||
terminator.kind = TerminatorKind::Goto { target: self.return_block };
|
||||
terminator.kind = if let Some(tgt) = self.return_block {
|
||||
TerminatorKind::Goto { target: tgt }
|
||||
} else {
|
||||
TerminatorKind::Unreachable
|
||||
}
|
||||
}
|
||||
TerminatorKind::Resume => {
|
||||
if let Some(tgt) = self.cleanup_block {
|
||||
|
@ -929,6 +929,10 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
|
||||
(default: no)"),
|
||||
incremental_verify_ich: bool = (false, parse_bool, [UNTRACKED],
|
||||
"verify incr. comp. hashes of green query instances (default: no)"),
|
||||
inline_mir_threshold: usize = (50, parse_uint, [TRACKED],
|
||||
"a default MIR inlining threshold (default: 50)"),
|
||||
inline_mir_hint_threshold: usize = (100, parse_uint, [TRACKED],
|
||||
"inlining threshold for functions with inline hint (default: 100)"),
|
||||
inline_in_all_cgus: Option<bool> = (None, parse_opt_bool, [TRACKED],
|
||||
"control whether `#[inline]` functions are in all CGUs"),
|
||||
input_stats: bool = (false, parse_bool, [UNTRACKED],
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::spec::{Target, TargetOptions};
|
||||
|
||||
pub fn target() -> Target {
|
||||
let mut base = super::linux_base::opts();
|
||||
let mut base = super::linux_gnu_base::opts();
|
||||
base.max_atomic_width = Some(128);
|
||||
|
||||
Target {
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::spec::{LinkerFlavor, TargetOptions};
|
||||
|
||||
pub fn opts() -> TargetOptions {
|
||||
let mut base = super::linux_base::opts();
|
||||
let mut base = super::linux_gnu_base::opts();
|
||||
base.os = "android".to_string();
|
||||
// Many of the symbols defined in compiler-rt are also defined in libgcc.
|
||||
// Android's linker doesn't like that by default.
|
||||
@ -9,7 +9,6 @@ pub fn opts() -> TargetOptions {
|
||||
.get_mut(&LinkerFlavor::Gcc)
|
||||
.unwrap()
|
||||
.push("-Wl,--allow-multiple-definition".to_string());
|
||||
base.is_like_android = true;
|
||||
base.dwarf_version = Some(2);
|
||||
base.position_independent_executables = true;
|
||||
base.has_elf_tls = false;
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::spec::{Target, TargetOptions};
|
||||
|
||||
pub fn target() -> Target {
|
||||
let mut base = super::linux_base::opts();
|
||||
let mut base = super::linux_gnu_base::opts();
|
||||
base.max_atomic_width = Some(64);
|
||||
Target {
|
||||
llvm_target: "arm-unknown-linux-gnueabi".to_string(),
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::spec::{Target, TargetOptions};
|
||||
|
||||
pub fn target() -> Target {
|
||||
let mut base = super::linux_base::opts();
|
||||
let mut base = super::linux_gnu_base::opts();
|
||||
base.max_atomic_width = Some(64);
|
||||
Target {
|
||||
llvm_target: "arm-unknown-linux-gnueabihf".to_string(),
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::spec::{Target, TargetOptions};
|
||||
|
||||
pub fn target() -> Target {
|
||||
let base = super::linux_base::opts();
|
||||
let base = super::linux_gnu_base::opts();
|
||||
Target {
|
||||
llvm_target: "armv4t-unknown-linux-gnueabi".to_string(),
|
||||
pointer_width: 32,
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::spec::{Target, TargetOptions};
|
||||
|
||||
pub fn target() -> Target {
|
||||
let base = super::linux_base::opts();
|
||||
let base = super::linux_gnu_base::opts();
|
||||
Target {
|
||||
llvm_target: "armv5te-unknown-linux-gnueabi".to_string(),
|
||||
pointer_width: 32,
|
||||
|
@ -4,7 +4,7 @@ use crate::spec::{Target, TargetOptions};
|
||||
// hardfloat.
|
||||
|
||||
pub fn target() -> Target {
|
||||
let base = super::linux_base::opts();
|
||||
let base = super::linux_gnu_base::opts();
|
||||
Target {
|
||||
llvm_target: "armv7-unknown-linux-gnueabi".to_string(),
|
||||
pointer_width: 32,
|
||||
|
@ -4,7 +4,7 @@ use crate::spec::{Target, TargetOptions};
|
||||
// thumb-mode. See the thumbv7neon variant for enabling both.
|
||||
|
||||
pub fn target() -> Target {
|
||||
let base = super::linux_base::opts();
|
||||
let base = super::linux_gnu_base::opts();
|
||||
Target {
|
||||
llvm_target: "armv7-unknown-linux-gnueabihf".to_string(),
|
||||
pointer_width: 32,
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::spec::{LinkerFlavor, Target};
|
||||
|
||||
pub fn target() -> Target {
|
||||
let mut base = super::linux_base::opts();
|
||||
let mut base = super::linux_gnu_base::opts();
|
||||
base.cpu = "pentium4".to_string();
|
||||
base.max_atomic_width = Some(64);
|
||||
base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m32".to_string());
|
||||
|
@ -20,7 +20,6 @@ pub fn opts() -> TargetOptions {
|
||||
|
||||
TargetOptions {
|
||||
os: "linux".to_string(),
|
||||
env: "gnu".to_string(),
|
||||
dynamic_linking: true,
|
||||
executables: true,
|
||||
os_family: Some("unix".to_string()),
|
||||
|
5
compiler/rustc_target/src/spec/linux_gnu_base.rs
Normal file
5
compiler/rustc_target/src/spec/linux_gnu_base.rs
Normal file
@ -0,0 +1,5 @@
|
||||
use crate::spec::TargetOptions;
|
||||
|
||||
pub fn opts() -> TargetOptions {
|
||||
TargetOptions { env: "gnu".to_string(), ..super::linux_base::opts() }
|
||||
}
|
@ -14,7 +14,7 @@ pub fn target() -> Target {
|
||||
max_atomic_width: Some(64),
|
||||
mcount: "_mcount".to_string(),
|
||||
|
||||
..super::linux_base::opts()
|
||||
..super::linux_gnu_base::opts()
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ pub fn target() -> Target {
|
||||
max_atomic_width: Some(64),
|
||||
mcount: "_mcount".to_string(),
|
||||
|
||||
..super::linux_base::opts()
|
||||
..super::linux_gnu_base::opts()
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ pub fn target() -> Target {
|
||||
max_atomic_width: Some(32),
|
||||
mcount: "_mcount".to_string(),
|
||||
|
||||
..super::linux_base::opts()
|
||||
..super::linux_gnu_base::opts()
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ pub fn target() -> Target {
|
||||
max_atomic_width: Some(32),
|
||||
mcount: "_mcount".to_string(),
|
||||
|
||||
..super::linux_base::opts()
|
||||
..super::linux_gnu_base::opts()
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ pub fn target() -> Target {
|
||||
max_atomic_width: Some(32),
|
||||
mcount: "_mcount".to_string(),
|
||||
|
||||
..super::linux_base::opts()
|
||||
..super::linux_gnu_base::opts()
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ pub fn target() -> Target {
|
||||
max_atomic_width: Some(32),
|
||||
mcount: "_mcount".to_string(),
|
||||
|
||||
..super::linux_base::opts()
|
||||
..super::linux_gnu_base::opts()
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ pub fn target() -> Target {
|
||||
max_atomic_width: Some(64),
|
||||
mcount: "_mcount".to_string(),
|
||||
|
||||
..super::linux_base::opts()
|
||||
..super::linux_gnu_base::opts()
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ pub fn target() -> Target {
|
||||
max_atomic_width: Some(64),
|
||||
mcount: "_mcount".to_string(),
|
||||
|
||||
..super::linux_base::opts()
|
||||
..super::linux_gnu_base::opts()
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -64,6 +64,7 @@ mod hermit_kernel_base;
|
||||
mod illumos_base;
|
||||
mod l4re_base;
|
||||
mod linux_base;
|
||||
mod linux_gnu_base;
|
||||
mod linux_kernel_base;
|
||||
mod linux_musl_base;
|
||||
mod linux_uclibc_base;
|
||||
@ -823,9 +824,6 @@ pub struct TargetOptions {
|
||||
/// library naming convention. Defaults to false.
|
||||
pub is_like_windows: bool,
|
||||
pub is_like_msvc: bool,
|
||||
/// Whether the target toolchain is like Android's. Only useful for compiling against Android.
|
||||
/// Defaults to false.
|
||||
pub is_like_android: bool,
|
||||
/// Whether the target toolchain is like Emscripten's. Only useful for compiling with
|
||||
/// Emscripten toolchain.
|
||||
/// Defaults to false.
|
||||
@ -1033,7 +1031,6 @@ impl Default for TargetOptions {
|
||||
is_like_osx: false,
|
||||
is_like_solaris: false,
|
||||
is_like_windows: false,
|
||||
is_like_android: false,
|
||||
is_like_emscripten: false,
|
||||
is_like_msvc: false,
|
||||
is_like_fuchsia: false,
|
||||
@ -1476,7 +1473,6 @@ impl Target {
|
||||
key!(is_like_windows, bool);
|
||||
key!(is_like_msvc, bool);
|
||||
key!(is_like_emscripten, bool);
|
||||
key!(is_like_android, bool);
|
||||
key!(is_like_fuchsia, bool);
|
||||
key!(dwarf_version, Option<u32>);
|
||||
key!(linker_is_gnu, bool);
|
||||
@ -1712,7 +1708,6 @@ impl ToJson for Target {
|
||||
target_option_val!(is_like_windows);
|
||||
target_option_val!(is_like_msvc);
|
||||
target_option_val!(is_like_emscripten);
|
||||
target_option_val!(is_like_android);
|
||||
target_option_val!(is_like_fuchsia);
|
||||
target_option_val!(dwarf_version);
|
||||
target_option_val!(linker_is_gnu);
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::spec::{LinkerFlavor, RelroLevel, Target, TargetOptions};
|
||||
|
||||
pub fn target() -> Target {
|
||||
let mut base = super::linux_base::opts();
|
||||
let mut base = super::linux_gnu_base::opts();
|
||||
base.cpu = "ppc64".to_string();
|
||||
base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
|
||||
base.max_atomic_width = Some(64);
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::spec::{LinkerFlavor, Target, TargetOptions};
|
||||
|
||||
pub fn target() -> Target {
|
||||
let mut base = super::linux_base::opts();
|
||||
let mut base = super::linux_gnu_base::opts();
|
||||
base.cpu = "ppc64le".to_string();
|
||||
base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
|
||||
base.max_atomic_width = Some(64);
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::spec::{LinkerFlavor, Target, TargetOptions};
|
||||
|
||||
pub fn target() -> Target {
|
||||
let mut base = super::linux_base::opts();
|
||||
let mut base = super::linux_gnu_base::opts();
|
||||
base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m32".to_string());
|
||||
base.max_atomic_width = Some(32);
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::spec::{LinkerFlavor, Target, TargetOptions};
|
||||
|
||||
pub fn target() -> Target {
|
||||
let mut base = super::linux_base::opts();
|
||||
let mut base = super::linux_gnu_base::opts();
|
||||
base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-mspe".to_string());
|
||||
base.max_atomic_width = Some(32);
|
||||
|
||||
|
@ -13,7 +13,7 @@ pub fn target() -> Target {
|
||||
features: "+m,+a,+f,+d,+c".to_string(),
|
||||
llvm_abiname: "ilp32d".to_string(),
|
||||
max_atomic_width: Some(32),
|
||||
..super::linux_base::opts()
|
||||
..super::linux_gnu_base::opts()
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ pub fn target() -> Target {
|
||||
features: "+m,+a,+f,+d,+c".to_string(),
|
||||
llvm_abiname: "lp64d".to_string(),
|
||||
max_atomic_width: Some(64),
|
||||
..super::linux_base::opts()
|
||||
..super::linux_gnu_base::opts()
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::spec::Target;
|
||||
|
||||
pub fn target() -> Target {
|
||||
let mut base = super::linux_base::opts();
|
||||
let mut base = super::linux_gnu_base::opts();
|
||||
base.endian = "big".to_string();
|
||||
// z10 is the oldest CPU supported by LLVM
|
||||
base.cpu = "z10".to_string();
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::spec::Target;
|
||||
|
||||
pub fn target() -> Target {
|
||||
let mut base = super::linux_base::opts();
|
||||
let mut base = super::linux_gnu_base::opts();
|
||||
base.endian = "big".to_string();
|
||||
base.cpu = "v9".to_string();
|
||||
base.max_atomic_width = Some(64);
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::spec::{LinkerFlavor, Target};
|
||||
|
||||
pub fn target() -> Target {
|
||||
let mut base = super::linux_base::opts();
|
||||
let mut base = super::linux_gnu_base::opts();
|
||||
base.endian = "big".to_string();
|
||||
base.cpu = "v9".to_string();
|
||||
base.max_atomic_width = Some(64);
|
||||
|
@ -7,7 +7,7 @@ use crate::spec::{Target, TargetOptions};
|
||||
// https://static.docs.arm.com/ddi0406/cd/DDI0406C_d_armv7ar_arm.pdf
|
||||
|
||||
pub fn target() -> Target {
|
||||
let base = super::linux_base::opts();
|
||||
let base = super::linux_gnu_base::opts();
|
||||
Target {
|
||||
llvm_target: "armv7-unknown-linux-gnueabihf".to_string(),
|
||||
pointer_width: 32,
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::spec::{LinkerFlavor, Target};
|
||||
|
||||
pub fn target() -> Target {
|
||||
let mut base = super::linux_base::opts();
|
||||
let mut base = super::linux_gnu_base::opts();
|
||||
base.cpu = "x86-64".to_string();
|
||||
base.max_atomic_width = Some(64);
|
||||
base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::spec::{LinkerFlavor, Target};
|
||||
|
||||
pub fn target() -> Target {
|
||||
let mut base = super::linux_base::opts();
|
||||
let mut base = super::linux_gnu_base::opts();
|
||||
base.cpu = "x86-64".to_string();
|
||||
base.max_atomic_width = Some(64);
|
||||
base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-mx32".to_string());
|
||||
|
@ -512,6 +512,13 @@ impl<'a, 'tcx> AbstractConstBuilder<'a, 'tcx> {
|
||||
block = &self.body.basic_blocks()[next];
|
||||
} else {
|
||||
assert_eq!(self.locals[mir::RETURN_PLACE], self.nodes.last().unwrap());
|
||||
// `AbstractConst`s should not contain any promoteds as they require references which
|
||||
// are not allowed.
|
||||
assert!(!self.nodes.iter().any(|n| matches!(
|
||||
n.node,
|
||||
Node::Leaf(ty::Const { val: ty::ConstKind::Unevaluated(_, _, Some(_)), ty: _ })
|
||||
)));
|
||||
|
||||
self.nodes[self.locals[mir::RETURN_PLACE]].used = true;
|
||||
if let Some(&unused) = self.nodes.iter().find(|n| !n.used) {
|
||||
self.error(Some(unused.span), "dead code")?;
|
||||
@ -609,6 +616,10 @@ pub(super) fn try_unify<'tcx>(
|
||||
(Node::Leaf(a_ct), Node::Leaf(b_ct)) => {
|
||||
let a_ct = a_ct.subst(tcx, a.substs);
|
||||
let b_ct = b_ct.subst(tcx, b.substs);
|
||||
if a_ct.ty != b_ct.ty {
|
||||
return false;
|
||||
}
|
||||
|
||||
match (a_ct.val, b_ct.val) {
|
||||
// We can just unify errors with everything to reduce the amount of
|
||||
// emitted errors here.
|
||||
@ -621,6 +632,12 @@ pub(super) fn try_unify<'tcx>(
|
||||
// we do not want to use `assert_eq!(a(), b())` to infer that `N` and `M` have to be `1`. This
|
||||
// means that we only allow inference variables if they are equal.
|
||||
(ty::ConstKind::Infer(a_val), ty::ConstKind::Infer(b_val)) => a_val == b_val,
|
||||
// We may want to instead recurse into unevaluated constants here. That may require some
|
||||
// care to prevent infinite recursion, so let's just ignore this for now.
|
||||
(
|
||||
ty::ConstKind::Unevaluated(a_def, a_substs, None),
|
||||
ty::ConstKind::Unevaluated(b_def, b_substs, None),
|
||||
) => a_def == b_def && a_substs == b_substs,
|
||||
// FIXME(const_evaluatable_checked): We may want to either actually try
|
||||
// to evaluate `a_ct` and `b_ct` if they are are fully concrete or something like
|
||||
// this, for now we just return false here.
|
||||
|
@ -586,6 +586,15 @@ changelog-seen = 2
|
||||
# build native code.
|
||||
#android-ndk = "/path/to/ndk"
|
||||
|
||||
# Build the sanitizer runtimes for this target.
|
||||
# This option will override the same option under [build] section.
|
||||
#sanitizers = false
|
||||
|
||||
# Build the profiler runtime for this target(required when compiling with options that depend
|
||||
# on this runtime, such as `-C profile-generate` or `-Z instrument-coverage`).
|
||||
# This option will override the same option under [build] section.
|
||||
#profiler = false
|
||||
|
||||
# Force static or dynamic linkage of the standard library for this target. If
|
||||
# this target is a host for rustc, this will also affect the linkage of the
|
||||
# compiler itself. This is useful for building rustc on targets that normally
|
||||
|
124
library/alloc/src/collections/btree/append.rs
Normal file
124
library/alloc/src/collections/btree/append.rs
Normal file
@ -0,0 +1,124 @@
|
||||
use super::map::MIN_LEN;
|
||||
use super::merge_iter::MergeIterInner;
|
||||
use super::node::{self, ForceResult::*, Root};
|
||||
use core::iter::FusedIterator;
|
||||
|
||||
impl<K, V> Root<K, V> {
|
||||
/// Appends all key-value pairs from the union of two ascending iterators,
|
||||
/// incrementing a `length` variable along the way. The latter makes it
|
||||
/// easier for the caller to avoid a leak when a drop handler panicks.
|
||||
///
|
||||
/// If both iterators produce the same key, this method drops the pair from
|
||||
/// the left iterator and appends the pair from the right iterator.
|
||||
///
|
||||
/// If you want the tree to end up in a strictly ascending order, like for
|
||||
/// a `BTreeMap`, both iterators should produce keys in strictly ascending
|
||||
/// order, each greater than all keys in the tree, including any keys
|
||||
/// already in the tree upon entry.
|
||||
pub fn append_from_sorted_iters<I>(&mut self, left: I, right: I, length: &mut usize)
|
||||
where
|
||||
K: Ord,
|
||||
I: Iterator<Item = (K, V)> + FusedIterator,
|
||||
{
|
||||
// We prepare to merge `left` and `right` into a sorted sequence in linear time.
|
||||
let iter = MergeIter(MergeIterInner::new(left, right));
|
||||
|
||||
// Meanwhile, we build a tree from the sorted sequence in linear time.
|
||||
self.bulk_push(iter, length)
|
||||
}
|
||||
|
||||
/// Pushes all key-value pairs to the end of the tree, incrementing a
|
||||
/// `length` variable along the way. The latter makes it easier for the
|
||||
/// caller to avoid a leak when the iterator panicks.
|
||||
fn bulk_push<I>(&mut self, iter: I, length: &mut usize)
|
||||
where
|
||||
I: Iterator<Item = (K, V)>,
|
||||
{
|
||||
let mut cur_node = self.node_as_mut().last_leaf_edge().into_node();
|
||||
// Iterate through all key-value pairs, pushing them into nodes at the right level.
|
||||
for (key, value) in iter {
|
||||
// Try to push key-value pair into the current leaf node.
|
||||
if cur_node.len() < node::CAPACITY {
|
||||
cur_node.push(key, value);
|
||||
} else {
|
||||
// No space left, go up and push there.
|
||||
let mut open_node;
|
||||
let mut test_node = cur_node.forget_type();
|
||||
loop {
|
||||
match test_node.ascend() {
|
||||
Ok(parent) => {
|
||||
let parent = parent.into_node();
|
||||
if parent.len() < node::CAPACITY {
|
||||
// Found a node with space left, push here.
|
||||
open_node = parent;
|
||||
break;
|
||||
} else {
|
||||
// Go up again.
|
||||
test_node = parent.forget_type();
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
// We are at the top, create a new root node and push there.
|
||||
open_node = self.push_internal_level();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Push key-value pair and new right subtree.
|
||||
let tree_height = open_node.height() - 1;
|
||||
let mut right_tree = Root::new_leaf();
|
||||
for _ in 0..tree_height {
|
||||
right_tree.push_internal_level();
|
||||
}
|
||||
open_node.push(key, value, right_tree);
|
||||
|
||||
// Go down to the right-most leaf again.
|
||||
cur_node = open_node.forget_type().last_leaf_edge().into_node();
|
||||
}
|
||||
|
||||
// Increment length every iteration, to make sure the map drops
|
||||
// the appended elements even if advancing the iterator panicks.
|
||||
*length += 1;
|
||||
}
|
||||
self.fix_right_edge();
|
||||
}
|
||||
|
||||
fn fix_right_edge(&mut self) {
|
||||
// Handle underfull nodes, start from the top.
|
||||
let mut cur_node = self.node_as_mut();
|
||||
while let Internal(internal) = cur_node.force() {
|
||||
// Check if right-most child is underfull.
|
||||
let mut last_edge = internal.last_edge();
|
||||
let right_child_len = last_edge.reborrow().descend().len();
|
||||
if right_child_len < MIN_LEN {
|
||||
// We need to steal.
|
||||
let mut last_kv = match last_edge.left_kv() {
|
||||
Ok(left) => left,
|
||||
Err(_) => unreachable!(),
|
||||
};
|
||||
last_kv.bulk_steal_left(MIN_LEN - right_child_len);
|
||||
last_edge = last_kv.right_edge();
|
||||
}
|
||||
|
||||
// Go further down.
|
||||
cur_node = last_edge.descend();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// An iterator for merging two sorted sequences into one
|
||||
struct MergeIter<K, V, I: Iterator<Item = (K, V)>>(MergeIterInner<I>);
|
||||
|
||||
impl<K: Ord, V, I> Iterator for MergeIter<K, V, I>
|
||||
where
|
||||
I: Iterator<Item = (K, V)> + FusedIterator,
|
||||
{
|
||||
type Item = (K, V);
|
||||
|
||||
/// If two keys are equal, returns the key-value pair from the right source.
|
||||
fn next(&mut self) -> Option<(K, V)> {
|
||||
let (a_next, b_next) = self.0.nexts(|a: &(K, V), b: &(K, V)| K::cmp(&a.0, &b.0));
|
||||
b_next.or(a_next)
|
||||
}
|
||||
}
|
@ -9,7 +9,6 @@ use core::ops::{Index, RangeBounds};
|
||||
use core::ptr;
|
||||
|
||||
use super::borrow::DormantMutRef;
|
||||
use super::merge_iter::MergeIterInner;
|
||||
use super::node::{self, marker, ForceResult::*, Handle, NodeRef};
|
||||
use super::search::{self, SearchResult::*};
|
||||
use super::unwrap_unchecked;
|
||||
@ -458,9 +457,6 @@ impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for RangeMut<'_, K, V> {
|
||||
}
|
||||
}
|
||||
|
||||
// An iterator for merging two sorted sequences into one
|
||||
struct MergeIter<K, V, I: Iterator<Item = (K, V)>>(MergeIterInner<I>);
|
||||
|
||||
impl<K: Ord, V> BTreeMap<K, V> {
|
||||
/// Makes a new empty BTreeMap.
|
||||
///
|
||||
@ -908,13 +904,10 @@ impl<K: Ord, V> BTreeMap<K, V> {
|
||||
return;
|
||||
}
|
||||
|
||||
// First, we merge `self` and `other` into a sorted sequence in linear time.
|
||||
let self_iter = mem::take(self).into_iter();
|
||||
let other_iter = mem::take(other).into_iter();
|
||||
let iter = MergeIter(MergeIterInner::new(self_iter, other_iter));
|
||||
|
||||
// Second, we build a tree from the sorted sequence in linear time.
|
||||
self.from_sorted_iter(iter);
|
||||
let root = BTreeMap::ensure_is_owned(&mut self.root);
|
||||
root.append_from_sorted_iters(self_iter, other_iter, &mut self.length)
|
||||
}
|
||||
|
||||
/// Constructs a double-ended iterator over a sub-range of elements in the map.
|
||||
@ -1039,78 +1032,6 @@ impl<K: Ord, V> BTreeMap<K, V> {
|
||||
}
|
||||
}
|
||||
|
||||
fn from_sorted_iter<I: Iterator<Item = (K, V)>>(&mut self, iter: I) {
|
||||
let root = Self::ensure_is_owned(&mut self.root);
|
||||
let mut cur_node = root.node_as_mut().last_leaf_edge().into_node();
|
||||
// Iterate through all key-value pairs, pushing them into nodes at the right level.
|
||||
for (key, value) in iter {
|
||||
// Try to push key-value pair into the current leaf node.
|
||||
if cur_node.len() < node::CAPACITY {
|
||||
cur_node.push(key, value);
|
||||
} else {
|
||||
// No space left, go up and push there.
|
||||
let mut open_node;
|
||||
let mut test_node = cur_node.forget_type();
|
||||
loop {
|
||||
match test_node.ascend() {
|
||||
Ok(parent) => {
|
||||
let parent = parent.into_node();
|
||||
if parent.len() < node::CAPACITY {
|
||||
// Found a node with space left, push here.
|
||||
open_node = parent;
|
||||
break;
|
||||
} else {
|
||||
// Go up again.
|
||||
test_node = parent.forget_type();
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
// We are at the top, create a new root node and push there.
|
||||
open_node = root.push_internal_level();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Push key-value pair and new right subtree.
|
||||
let tree_height = open_node.height() - 1;
|
||||
let mut right_tree = node::Root::new_leaf();
|
||||
for _ in 0..tree_height {
|
||||
right_tree.push_internal_level();
|
||||
}
|
||||
open_node.push(key, value, right_tree);
|
||||
|
||||
// Go down to the right-most leaf again.
|
||||
cur_node = open_node.forget_type().last_leaf_edge().into_node();
|
||||
}
|
||||
|
||||
self.length += 1;
|
||||
}
|
||||
Self::fix_right_edge(root)
|
||||
}
|
||||
|
||||
fn fix_right_edge(root: &mut node::Root<K, V>) {
|
||||
// Handle underfull nodes, start from the top.
|
||||
let mut cur_node = root.node_as_mut();
|
||||
while let Internal(internal) = cur_node.force() {
|
||||
// Check if right-most child is underfull.
|
||||
let mut last_edge = internal.last_edge();
|
||||
let right_child_len = last_edge.reborrow().descend().len();
|
||||
if right_child_len < MIN_LEN {
|
||||
// We need to steal.
|
||||
let mut last_kv = match last_edge.left_kv() {
|
||||
Ok(left) => left,
|
||||
Err(_) => unreachable!(),
|
||||
};
|
||||
last_kv.bulk_steal_left(MIN_LEN - right_child_len);
|
||||
last_edge = last_kv.right_edge();
|
||||
}
|
||||
|
||||
// Go further down.
|
||||
cur_node = last_edge.descend();
|
||||
}
|
||||
}
|
||||
|
||||
/// Splits the collection into two at the given key. Returns everything after the given key,
|
||||
/// including the key.
|
||||
///
|
||||
@ -2220,18 +2141,5 @@ impl<K, V> BTreeMap<K, V> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Ord, V, I> Iterator for MergeIter<K, V, I>
|
||||
where
|
||||
I: Iterator<Item = (K, V)> + ExactSizeIterator + FusedIterator,
|
||||
{
|
||||
type Item = (K, V);
|
||||
|
||||
/// If two keys are equal, returns the key/value-pair from the right source.
|
||||
fn next(&mut self) -> Option<(K, V)> {
|
||||
let (a_next, b_next) = self.0.nexts(|a: &(K, V), b: &(K, V)| K::cmp(&a.0, &b.0));
|
||||
b_next.or(a_next)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
@ -1685,6 +1685,33 @@ create_append_test!(test_append_239, 239);
|
||||
#[cfg(not(miri))] // Miri is too slow
|
||||
create_append_test!(test_append_1700, 1700);
|
||||
|
||||
#[test]
|
||||
fn test_append_drop_leak() {
|
||||
static DROPS: AtomicUsize = AtomicUsize::new(0);
|
||||
|
||||
struct D;
|
||||
|
||||
impl Drop for D {
|
||||
fn drop(&mut self) {
|
||||
if DROPS.fetch_add(1, Ordering::SeqCst) == 0 {
|
||||
panic!("panic in `drop`");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut left = BTreeMap::new();
|
||||
let mut right = BTreeMap::new();
|
||||
left.insert(0, D);
|
||||
left.insert(1, D); // first to be dropped during append
|
||||
left.insert(2, D);
|
||||
right.insert(1, D);
|
||||
right.insert(2, D);
|
||||
|
||||
catch_unwind(move || left.append(&mut right)).unwrap_err();
|
||||
|
||||
assert_eq!(DROPS.load(Ordering::SeqCst), 4); // Rust issue #47949 ate one little piggy
|
||||
}
|
||||
|
||||
fn rand_data(len: usize) -> Vec<(u32, u32)> {
|
||||
assert!(len * 2 <= 70029); // from that point on numbers repeat
|
||||
let mut rng = DeterministicRng::new();
|
||||
|
@ -2,27 +2,25 @@ use core::cmp::Ordering;
|
||||
use core::fmt::{self, Debug};
|
||||
use core::iter::FusedIterator;
|
||||
|
||||
/// Core of an iterator that merges the output of two ascending iterators,
|
||||
/// Core of an iterator that merges the output of two strictly ascending iterators,
|
||||
/// for instance a union or a symmetric difference.
|
||||
pub struct MergeIterInner<I>
|
||||
where
|
||||
I: Iterator,
|
||||
{
|
||||
pub struct MergeIterInner<I: Iterator> {
|
||||
a: I,
|
||||
b: I,
|
||||
peeked: Option<Peeked<I>>,
|
||||
}
|
||||
|
||||
/// Benchmarks faster than wrapping both iterators in a Peekable.
|
||||
/// Benchmarks faster than wrapping both iterators in a Peekable,
|
||||
/// probably because we can afford to impose a FusedIterator bound.
|
||||
#[derive(Clone, Debug)]
|
||||
enum Peeked<I: Iterator> {
|
||||
A(I::Item),
|
||||
B(I::Item),
|
||||
}
|
||||
|
||||
impl<I> Clone for MergeIterInner<I>
|
||||
impl<I: Iterator> Clone for MergeIterInner<I>
|
||||
where
|
||||
I: Clone + Iterator,
|
||||
I: Clone,
|
||||
I::Item: Clone,
|
||||
{
|
||||
fn clone(&self) -> Self {
|
||||
@ -30,20 +28,17 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<I> Debug for MergeIterInner<I>
|
||||
impl<I: Iterator> Debug for MergeIterInner<I>
|
||||
where
|
||||
I: Iterator + Debug,
|
||||
I: Debug,
|
||||
I::Item: Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_tuple("MergeIterInner").field(&self.a).field(&self.b).finish()
|
||||
f.debug_tuple("MergeIterInner").field(&self.a).field(&self.b).field(&self.peeked).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<I> MergeIterInner<I>
|
||||
where
|
||||
I: ExactSizeIterator + FusedIterator,
|
||||
{
|
||||
impl<I: Iterator> MergeIterInner<I> {
|
||||
/// Creates a new core for an iterator merging a pair of sources.
|
||||
pub fn new(a: I, b: I) -> Self {
|
||||
MergeIterInner { a, b, peeked: None }
|
||||
@ -52,13 +47,17 @@ where
|
||||
/// Returns the next pair of items stemming from the pair of sources
|
||||
/// being merged. If both returned options contain a value, that value
|
||||
/// is equal and occurs in both sources. If one of the returned options
|
||||
/// contains a value, that value doesn't occur in the other source.
|
||||
/// If neither returned option contains a value, iteration has finished
|
||||
/// and subsequent calls will return the same empty pair.
|
||||
/// contains a value, that value doesn't occur in the other source (or
|
||||
/// the sources are not strictly ascending). If neither returned option
|
||||
/// contains a value, iteration has finished and subsequent calls will
|
||||
/// return the same empty pair.
|
||||
pub fn nexts<Cmp: Fn(&I::Item, &I::Item) -> Ordering>(
|
||||
&mut self,
|
||||
cmp: Cmp,
|
||||
) -> (Option<I::Item>, Option<I::Item>) {
|
||||
) -> (Option<I::Item>, Option<I::Item>)
|
||||
where
|
||||
I: FusedIterator,
|
||||
{
|
||||
let mut a_next;
|
||||
let mut b_next;
|
||||
match self.peeked.take() {
|
||||
@ -86,7 +85,10 @@ where
|
||||
}
|
||||
|
||||
/// Returns a pair of upper bounds for the `size_hint` of the final iterator.
|
||||
pub fn lens(&self) -> (usize, usize) {
|
||||
pub fn lens(&self) -> (usize, usize)
|
||||
where
|
||||
I: ExactSizeIterator,
|
||||
{
|
||||
match self.peeked {
|
||||
Some(Peeked::A(_)) => (1 + self.a.len(), self.b.len()),
|
||||
Some(Peeked::B(_)) => (self.a.len(), 1 + self.b.len()),
|
||||
|
@ -1,3 +1,4 @@
|
||||
mod append;
|
||||
mod borrow;
|
||||
pub mod map;
|
||||
mod mem;
|
||||
|
@ -108,18 +108,20 @@ impl Duration {
|
||||
#[unstable(feature = "duration_constants", issue = "57391")]
|
||||
pub const NANOSECOND: Duration = Duration::from_nanos(1);
|
||||
|
||||
/// The minimum duration.
|
||||
/// A duration of zero time.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(duration_constants)]
|
||||
/// #![feature(duration_zero)]
|
||||
/// use std::time::Duration;
|
||||
///
|
||||
/// assert_eq!(Duration::MIN, Duration::new(0, 0));
|
||||
/// let duration = Duration::ZERO;
|
||||
/// assert!(duration.is_zero());
|
||||
/// assert_eq!(duration.as_nanos(), 0);
|
||||
/// ```
|
||||
#[unstable(feature = "duration_constants", issue = "57391")]
|
||||
pub const MIN: Duration = Duration::from_nanos(0);
|
||||
#[unstable(feature = "duration_zero", issue = "73544")]
|
||||
pub const ZERO: Duration = Duration::from_nanos(0);
|
||||
|
||||
/// The maximum duration.
|
||||
///
|
||||
@ -166,24 +168,6 @@ impl Duration {
|
||||
Duration { secs, nanos }
|
||||
}
|
||||
|
||||
/// Creates a new `Duration` that spans no time.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(duration_zero)]
|
||||
/// use std::time::Duration;
|
||||
///
|
||||
/// let duration = Duration::zero();
|
||||
/// assert!(duration.is_zero());
|
||||
/// assert_eq!(duration.as_nanos(), 0);
|
||||
/// ```
|
||||
#[unstable(feature = "duration_zero", issue = "73544")]
|
||||
#[inline]
|
||||
pub const fn zero() -> Duration {
|
||||
Duration { secs: 0, nanos: 0 }
|
||||
}
|
||||
|
||||
/// Creates a new `Duration` from the specified number of whole seconds.
|
||||
///
|
||||
/// # Examples
|
||||
@ -277,7 +261,7 @@ impl Duration {
|
||||
/// #![feature(duration_zero)]
|
||||
/// use std::time::Duration;
|
||||
///
|
||||
/// assert!(Duration::zero().is_zero());
|
||||
/// assert!(Duration::ZERO.is_zero());
|
||||
/// assert!(Duration::new(0, 0).is_zero());
|
||||
/// assert!(Duration::from_nanos(0).is_zero());
|
||||
/// assert!(Duration::from_secs(0).is_zero());
|
||||
@ -536,18 +520,18 @@ impl Duration {
|
||||
}
|
||||
}
|
||||
|
||||
/// Saturating `Duration` subtraction. Computes `self - other`, returning [`Duration::MIN`]
|
||||
/// Saturating `Duration` subtraction. Computes `self - other`, returning [`Duration::ZERO`]
|
||||
/// if the result would be negative or if overflow occurred.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(duration_saturating_ops)]
|
||||
/// #![feature(duration_constants)]
|
||||
/// #![feature(duration_zero)]
|
||||
/// use std::time::Duration;
|
||||
///
|
||||
/// assert_eq!(Duration::new(0, 1).saturating_sub(Duration::new(0, 0)), Duration::new(0, 1));
|
||||
/// assert_eq!(Duration::new(0, 0).saturating_sub(Duration::new(0, 1)), Duration::MIN);
|
||||
/// assert_eq!(Duration::new(0, 0).saturating_sub(Duration::new(0, 1)), Duration::ZERO);
|
||||
/// ```
|
||||
#[unstable(feature = "duration_saturating_ops", issue = "76416")]
|
||||
#[inline]
|
||||
@ -555,7 +539,7 @@ impl Duration {
|
||||
pub const fn saturating_sub(self, rhs: Duration) -> Duration {
|
||||
match self.checked_sub(rhs) {
|
||||
Some(res) => res,
|
||||
None => Duration::MIN,
|
||||
None => Duration::ZERO,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -108,24 +108,24 @@ fn sub() {
|
||||
|
||||
#[test]
|
||||
fn checked_sub() {
|
||||
let zero = Duration::new(0, 0);
|
||||
let one_nano = Duration::new(0, 1);
|
||||
let one_sec = Duration::new(1, 0);
|
||||
assert_eq!(one_nano.checked_sub(zero), Some(Duration::new(0, 1)));
|
||||
assert_eq!(one_sec.checked_sub(one_nano), Some(Duration::new(0, 999_999_999)));
|
||||
assert_eq!(zero.checked_sub(one_nano), None);
|
||||
assert_eq!(zero.checked_sub(one_sec), None);
|
||||
assert_eq!(Duration::NANOSECOND.checked_sub(Duration::ZERO), Some(Duration::NANOSECOND));
|
||||
assert_eq!(
|
||||
Duration::SECOND.checked_sub(Duration::NANOSECOND),
|
||||
Some(Duration::new(0, 999_999_999))
|
||||
);
|
||||
assert_eq!(Duration::ZERO.checked_sub(Duration::NANOSECOND), None);
|
||||
assert_eq!(Duration::ZERO.checked_sub(Duration::SECOND), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn saturating_sub() {
|
||||
let zero = Duration::new(0, 0);
|
||||
let one_nano = Duration::new(0, 1);
|
||||
let one_sec = Duration::new(1, 0);
|
||||
assert_eq!(one_nano.saturating_sub(zero), Duration::new(0, 1));
|
||||
assert_eq!(one_sec.saturating_sub(one_nano), Duration::new(0, 999_999_999));
|
||||
assert_eq!(zero.saturating_sub(one_nano), Duration::MIN);
|
||||
assert_eq!(zero.saturating_sub(one_sec), Duration::MIN);
|
||||
assert_eq!(Duration::NANOSECOND.saturating_sub(Duration::ZERO), Duration::NANOSECOND);
|
||||
assert_eq!(
|
||||
Duration::SECOND.saturating_sub(Duration::NANOSECOND),
|
||||
Duration::new(0, 999_999_999)
|
||||
);
|
||||
assert_eq!(Duration::ZERO.saturating_sub(Duration::NANOSECOND), Duration::ZERO);
|
||||
assert_eq!(Duration::ZERO.saturating_sub(Duration::SECOND), Duration::ZERO);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -337,87 +337,82 @@ fn duration_const() {
|
||||
const SUB_SEC_NANOS: u32 = DURATION.subsec_nanos();
|
||||
assert_eq!(SUB_SEC_NANOS, 123_456_789);
|
||||
|
||||
const ZERO: Duration = Duration::zero();
|
||||
assert_eq!(ZERO, Duration::new(0, 0));
|
||||
|
||||
const IS_ZERO: bool = ZERO.is_zero();
|
||||
const IS_ZERO: bool = Duration::ZERO.is_zero();
|
||||
assert!(IS_ZERO);
|
||||
|
||||
const ONE: Duration = Duration::new(1, 0);
|
||||
|
||||
const SECONDS: u64 = ONE.as_secs();
|
||||
const SECONDS: u64 = Duration::SECOND.as_secs();
|
||||
assert_eq!(SECONDS, 1);
|
||||
|
||||
const FROM_SECONDS: Duration = Duration::from_secs(1);
|
||||
assert_eq!(FROM_SECONDS, ONE);
|
||||
assert_eq!(FROM_SECONDS, Duration::SECOND);
|
||||
|
||||
const SECONDS_F32: f32 = ONE.as_secs_f32();
|
||||
const SECONDS_F32: f32 = Duration::SECOND.as_secs_f32();
|
||||
assert_eq!(SECONDS_F32, 1.0);
|
||||
|
||||
const FROM_SECONDS_F32: Duration = Duration::from_secs_f32(1.0);
|
||||
assert_eq!(FROM_SECONDS_F32, ONE);
|
||||
assert_eq!(FROM_SECONDS_F32, Duration::SECOND);
|
||||
|
||||
const SECONDS_F64: f64 = ONE.as_secs_f64();
|
||||
const SECONDS_F64: f64 = Duration::SECOND.as_secs_f64();
|
||||
assert_eq!(SECONDS_F64, 1.0);
|
||||
|
||||
const FROM_SECONDS_F64: Duration = Duration::from_secs_f64(1.0);
|
||||
assert_eq!(FROM_SECONDS_F64, ONE);
|
||||
assert_eq!(FROM_SECONDS_F64, Duration::SECOND);
|
||||
|
||||
const MILLIS: u128 = ONE.as_millis();
|
||||
const MILLIS: u128 = Duration::SECOND.as_millis();
|
||||
assert_eq!(MILLIS, 1_000);
|
||||
|
||||
const FROM_MILLIS: Duration = Duration::from_millis(1_000);
|
||||
assert_eq!(FROM_MILLIS, ONE);
|
||||
assert_eq!(FROM_MILLIS, Duration::SECOND);
|
||||
|
||||
const MICROS: u128 = ONE.as_micros();
|
||||
const MICROS: u128 = Duration::SECOND.as_micros();
|
||||
assert_eq!(MICROS, 1_000_000);
|
||||
|
||||
const FROM_MICROS: Duration = Duration::from_micros(1_000_000);
|
||||
assert_eq!(FROM_MICROS, ONE);
|
||||
assert_eq!(FROM_MICROS, Duration::SECOND);
|
||||
|
||||
const NANOS: u128 = ONE.as_nanos();
|
||||
const NANOS: u128 = Duration::SECOND.as_nanos();
|
||||
assert_eq!(NANOS, 1_000_000_000);
|
||||
|
||||
const FROM_NANOS: Duration = Duration::from_nanos(1_000_000_000);
|
||||
assert_eq!(FROM_NANOS, ONE);
|
||||
assert_eq!(FROM_NANOS, Duration::SECOND);
|
||||
|
||||
const MAX: Duration = Duration::new(u64::MAX, 999_999_999);
|
||||
|
||||
const CHECKED_ADD: Option<Duration> = MAX.checked_add(ONE);
|
||||
const CHECKED_ADD: Option<Duration> = MAX.checked_add(Duration::SECOND);
|
||||
assert_eq!(CHECKED_ADD, None);
|
||||
|
||||
const CHECKED_SUB: Option<Duration> = ZERO.checked_sub(ONE);
|
||||
const CHECKED_SUB: Option<Duration> = Duration::ZERO.checked_sub(Duration::SECOND);
|
||||
assert_eq!(CHECKED_SUB, None);
|
||||
|
||||
const CHECKED_MUL: Option<Duration> = ONE.checked_mul(1);
|
||||
assert_eq!(CHECKED_MUL, Some(ONE));
|
||||
const CHECKED_MUL: Option<Duration> = Duration::SECOND.checked_mul(1);
|
||||
assert_eq!(CHECKED_MUL, Some(Duration::SECOND));
|
||||
|
||||
const MUL_F32: Duration = ONE.mul_f32(1.0);
|
||||
assert_eq!(MUL_F32, ONE);
|
||||
const MUL_F32: Duration = Duration::SECOND.mul_f32(1.0);
|
||||
assert_eq!(MUL_F32, Duration::SECOND);
|
||||
|
||||
const MUL_F64: Duration = ONE.mul_f64(1.0);
|
||||
assert_eq!(MUL_F64, ONE);
|
||||
const MUL_F64: Duration = Duration::SECOND.mul_f64(1.0);
|
||||
assert_eq!(MUL_F64, Duration::SECOND);
|
||||
|
||||
const CHECKED_DIV: Option<Duration> = ONE.checked_div(1);
|
||||
assert_eq!(CHECKED_DIV, Some(ONE));
|
||||
const CHECKED_DIV: Option<Duration> = Duration::SECOND.checked_div(1);
|
||||
assert_eq!(CHECKED_DIV, Some(Duration::SECOND));
|
||||
|
||||
const DIV_F32: Duration = ONE.div_f32(1.0);
|
||||
assert_eq!(DIV_F32, ONE);
|
||||
const DIV_F32: Duration = Duration::SECOND.div_f32(1.0);
|
||||
assert_eq!(DIV_F32, Duration::SECOND);
|
||||
|
||||
const DIV_F64: Duration = ONE.div_f64(1.0);
|
||||
assert_eq!(DIV_F64, ONE);
|
||||
const DIV_F64: Duration = Duration::SECOND.div_f64(1.0);
|
||||
assert_eq!(DIV_F64, Duration::SECOND);
|
||||
|
||||
const DIV_DURATION_F32: f32 = ONE.div_duration_f32(ONE);
|
||||
const DIV_DURATION_F32: f32 = Duration::SECOND.div_duration_f32(Duration::SECOND);
|
||||
assert_eq!(DIV_DURATION_F32, 1.0);
|
||||
|
||||
const DIV_DURATION_F64: f64 = ONE.div_duration_f64(ONE);
|
||||
const DIV_DURATION_F64: f64 = Duration::SECOND.div_duration_f64(Duration::SECOND);
|
||||
assert_eq!(DIV_DURATION_F64, 1.0);
|
||||
|
||||
const SATURATING_ADD: Duration = MAX.saturating_add(ONE);
|
||||
const SATURATING_ADD: Duration = MAX.saturating_add(Duration::SECOND);
|
||||
assert_eq!(SATURATING_ADD, MAX);
|
||||
|
||||
const SATURATING_SUB: Duration = ZERO.saturating_sub(ONE);
|
||||
assert_eq!(SATURATING_SUB, ZERO);
|
||||
const SATURATING_SUB: Duration = Duration::ZERO.saturating_sub(Duration::SECOND);
|
||||
assert_eq!(SATURATING_SUB, Duration::ZERO);
|
||||
|
||||
const SATURATING_MUL: Duration = MAX.saturating_mul(2);
|
||||
assert_eq!(SATURATING_MUL, MAX);
|
||||
|
@ -257,6 +257,7 @@
|
||||
#![feature(doc_spotlight)]
|
||||
#![feature(dropck_eyepatch)]
|
||||
#![feature(duration_constants)]
|
||||
#![feature(duration_zero)]
|
||||
#![feature(exact_size_is_empty)]
|
||||
#![feature(exhaustive_patterns)]
|
||||
#![feature(extend_one)]
|
||||
|
@ -5,7 +5,7 @@ macro_rules! assert_almost_eq {
|
||||
let (a, b) = ($a, $b);
|
||||
if a != b {
|
||||
let (a, b) = if a > b { (a, b) } else { (b, a) };
|
||||
assert!(a - Duration::new(0, 1000) <= b, "{:?} is not almost equal to {:?}", a, b);
|
||||
assert!(a - Duration::from_micros(1) <= b, "{:?} is not almost equal to {:?}", a, b);
|
||||
}
|
||||
}};
|
||||
}
|
||||
@ -34,7 +34,7 @@ fn instant_math() {
|
||||
assert_almost_eq!(b - dur, a);
|
||||
assert_almost_eq!(a + dur, b);
|
||||
|
||||
let second = Duration::new(1, 0);
|
||||
let second = Duration::SECOND;
|
||||
assert_almost_eq!(a - second + second, a);
|
||||
assert_almost_eq!(a.checked_sub(second).unwrap().checked_add(second).unwrap(), a);
|
||||
|
||||
@ -65,24 +65,24 @@ fn instant_math_is_associative() {
|
||||
#[should_panic]
|
||||
fn instant_duration_since_panic() {
|
||||
let a = Instant::now();
|
||||
(a - Duration::new(1, 0)).duration_since(a);
|
||||
(a - Duration::SECOND).duration_since(a);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn instant_checked_duration_since_nopanic() {
|
||||
let now = Instant::now();
|
||||
let earlier = now - Duration::new(1, 0);
|
||||
let later = now + Duration::new(1, 0);
|
||||
let earlier = now - Duration::SECOND;
|
||||
let later = now + Duration::SECOND;
|
||||
assert_eq!(earlier.checked_duration_since(now), None);
|
||||
assert_eq!(later.checked_duration_since(now), Some(Duration::new(1, 0)));
|
||||
assert_eq!(now.checked_duration_since(now), Some(Duration::new(0, 0)));
|
||||
assert_eq!(later.checked_duration_since(now), Some(Duration::SECOND));
|
||||
assert_eq!(now.checked_duration_since(now), Some(Duration::ZERO));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn instant_saturating_duration_since_nopanic() {
|
||||
let a = Instant::now();
|
||||
let ret = (a - Duration::new(1, 0)).saturating_duration_since(a);
|
||||
assert_eq!(ret, Duration::new(0, 0));
|
||||
let ret = (a - Duration::SECOND).saturating_duration_since(a);
|
||||
assert_eq!(ret, Duration::ZERO);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -90,7 +90,7 @@ fn system_time_math() {
|
||||
let a = SystemTime::now();
|
||||
let b = SystemTime::now();
|
||||
match b.duration_since(a) {
|
||||
Ok(dur) if dur == Duration::new(0, 0) => {
|
||||
Ok(Duration::ZERO) => {
|
||||
assert_almost_eq!(a, b);
|
||||
}
|
||||
Ok(dur) => {
|
||||
@ -106,16 +106,16 @@ fn system_time_math() {
|
||||
}
|
||||
}
|
||||
|
||||
let second = Duration::new(1, 0);
|
||||
let second = Duration::SECOND;
|
||||
assert_almost_eq!(a.duration_since(a - second).unwrap(), second);
|
||||
assert_almost_eq!(a.duration_since(a + second).unwrap_err().duration(), second);
|
||||
|
||||
assert_almost_eq!(a - second + second, a);
|
||||
assert_almost_eq!(a.checked_sub(second).unwrap().checked_add(second).unwrap(), a);
|
||||
|
||||
let one_second_from_epoch = UNIX_EPOCH + Duration::new(1, 0);
|
||||
let one_second_from_epoch = UNIX_EPOCH + Duration::SECOND;
|
||||
let one_second_from_epoch2 =
|
||||
UNIX_EPOCH + Duration::new(0, 500_000_000) + Duration::new(0, 500_000_000);
|
||||
UNIX_EPOCH + Duration::from_millis(500) + Duration::from_millis(500);
|
||||
assert_eq!(one_second_from_epoch, one_second_from_epoch2);
|
||||
|
||||
// checked_add_duration will not panic on overflow
|
||||
@ -141,12 +141,12 @@ fn system_time_elapsed() {
|
||||
#[test]
|
||||
fn since_epoch() {
|
||||
let ts = SystemTime::now();
|
||||
let a = ts.duration_since(UNIX_EPOCH + Duration::new(1, 0)).unwrap();
|
||||
let a = ts.duration_since(UNIX_EPOCH + Duration::SECOND).unwrap();
|
||||
let b = ts.duration_since(UNIX_EPOCH).unwrap();
|
||||
assert!(b > a);
|
||||
assert_eq!(b - a, Duration::new(1, 0));
|
||||
assert_eq!(b - a, Duration::SECOND);
|
||||
|
||||
let thirty_years = Duration::new(1, 0) * 60 * 60 * 24 * 365 * 30;
|
||||
let thirty_years = Duration::SECOND * 60 * 60 * 24 * 365 * 30;
|
||||
|
||||
// Right now for CI this test is run in an emulator, and apparently the
|
||||
// aarch64 emulator's sense of time is that we're still living in the
|
||||
|
@ -264,7 +264,7 @@ impl<'a> ShouldRun<'a> {
|
||||
/// `all_krates` should probably be removed at some point.
|
||||
pub fn all_krates(mut self, name: &str) -> Self {
|
||||
let mut set = BTreeSet::new();
|
||||
for krate in self.builder.in_tree_crates(name) {
|
||||
for krate in self.builder.in_tree_crates(name, None) {
|
||||
let path = krate.local_path(self.builder);
|
||||
set.insert(path);
|
||||
}
|
||||
@ -277,7 +277,7 @@ impl<'a> ShouldRun<'a> {
|
||||
///
|
||||
/// `make_run` will be called separately for each matching command-line path.
|
||||
pub fn krate(mut self, name: &str) -> Self {
|
||||
for krate in self.builder.in_tree_crates(name) {
|
||||
for krate in self.builder.in_tree_crates(name, None) {
|
||||
let path = krate.local_path(self.builder);
|
||||
self.paths.insert(PathSet::one(path));
|
||||
}
|
||||
|
@ -108,7 +108,7 @@ impl Step for Std {
|
||||
// Explicitly pass -p for all dependencies krates -- this will force cargo
|
||||
// to also check the tests/benches/examples for these crates, rather
|
||||
// than just the leaf crate.
|
||||
for krate in builder.in_tree_crates("test") {
|
||||
for krate in builder.in_tree_crates("test", Some(target)) {
|
||||
cargo.arg("-p").arg(krate.name);
|
||||
}
|
||||
|
||||
@ -172,7 +172,7 @@ impl Step for Rustc {
|
||||
// Explicitly pass -p for all compiler krates -- this will force cargo
|
||||
// to also check the tests/benches/examples for these crates, rather
|
||||
// than just the leaf crate.
|
||||
for krate in builder.in_tree_crates("rustc-main") {
|
||||
for krate in builder.in_tree_crates("rustc-main", Some(target)) {
|
||||
cargo.arg("-p").arg(krate.name);
|
||||
}
|
||||
|
||||
|
@ -143,7 +143,7 @@ fn copy_third_party_objects(
|
||||
}
|
||||
}
|
||||
|
||||
if builder.config.sanitizers && compiler.stage != 0 {
|
||||
if builder.config.sanitizers_enabled(target) && compiler.stage != 0 {
|
||||
// The sanitizers are only copied in stage1 or above,
|
||||
// to avoid creating dependency on LLVM.
|
||||
target_deps.extend(
|
||||
@ -251,7 +251,7 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car
|
||||
.arg("--features")
|
||||
.arg(features);
|
||||
} else {
|
||||
let mut features = builder.std_features();
|
||||
let mut features = builder.std_features(target);
|
||||
features.push_str(compiler_builtins_c_feature);
|
||||
|
||||
cargo
|
||||
|
@ -279,6 +279,8 @@ pub struct Target {
|
||||
pub ranlib: Option<PathBuf>,
|
||||
pub linker: Option<PathBuf>,
|
||||
pub ndk: Option<PathBuf>,
|
||||
pub sanitizers: bool,
|
||||
pub profiler: bool,
|
||||
pub crt_static: Option<bool>,
|
||||
pub musl_root: Option<PathBuf>,
|
||||
pub musl_libdir: Option<PathBuf>,
|
||||
@ -503,6 +505,8 @@ struct TomlTarget {
|
||||
llvm_config: Option<String>,
|
||||
llvm_filecheck: Option<String>,
|
||||
android_ndk: Option<String>,
|
||||
sanitizers: Option<bool>,
|
||||
profiler: Option<bool>,
|
||||
crt_static: Option<bool>,
|
||||
musl_root: Option<String>,
|
||||
musl_libdir: Option<String>,
|
||||
@ -890,6 +894,8 @@ impl Config {
|
||||
target.musl_libdir = cfg.musl_libdir.map(PathBuf::from);
|
||||
target.wasi_root = cfg.wasi_root.map(PathBuf::from);
|
||||
target.qemu_rootfs = cfg.qemu_rootfs.map(PathBuf::from);
|
||||
target.sanitizers = cfg.sanitizers.unwrap_or(build.sanitizers.unwrap_or_default());
|
||||
target.profiler = cfg.profiler.unwrap_or(build.profiler.unwrap_or_default());
|
||||
|
||||
config.target_config.insert(TargetSelection::from_user(&triple), target);
|
||||
}
|
||||
@ -999,6 +1005,22 @@ impl Config {
|
||||
self.verbose > 1
|
||||
}
|
||||
|
||||
pub fn sanitizers_enabled(&self, target: TargetSelection) -> bool {
|
||||
self.target_config.get(&target).map(|t| t.sanitizers).unwrap_or(self.sanitizers)
|
||||
}
|
||||
|
||||
pub fn any_sanitizers_enabled(&self) -> bool {
|
||||
self.target_config.values().any(|t| t.sanitizers) || self.sanitizers
|
||||
}
|
||||
|
||||
pub fn profiler_enabled(&self, target: TargetSelection) -> bool {
|
||||
self.target_config.get(&target).map(|t| t.profiler).unwrap_or(self.profiler)
|
||||
}
|
||||
|
||||
pub fn any_profiler_enabled(&self) -> bool {
|
||||
self.target_config.values().any(|t| t.profiler) || self.profiler
|
||||
}
|
||||
|
||||
pub fn llvm_enabled(&self) -> bool {
|
||||
self.rust_codegen_backends.contains(&INTERNER.intern_str("llvm"))
|
||||
}
|
||||
|
@ -535,8 +535,12 @@ impl Step for Rustc {
|
||||
// Find dependencies for top level crates.
|
||||
let mut compiler_crates = HashSet::new();
|
||||
for root_crate in &["rustc_driver", "rustc_codegen_llvm", "rustc_codegen_ssa"] {
|
||||
compiler_crates
|
||||
.extend(builder.in_tree_crates(root_crate).into_iter().map(|krate| krate.name));
|
||||
compiler_crates.extend(
|
||||
builder
|
||||
.in_tree_crates(root_crate, Some(target))
|
||||
.into_iter()
|
||||
.map(|krate| krate.name),
|
||||
);
|
||||
}
|
||||
|
||||
for krate in &compiler_crates {
|
||||
|
@ -169,17 +169,18 @@ pub use crate::config::Config;
|
||||
pub use crate::flags::Subcommand;
|
||||
|
||||
const LLVM_TOOLS: &[&str] = &[
|
||||
"llvm-nm", // used to inspect binaries; it shows symbol names, their sizes and visibility
|
||||
"llvm-objcopy", // used to transform ELFs into binary format which flashing tools consume
|
||||
"llvm-objdump", // used to disassemble programs
|
||||
"llvm-cov", // used to generate coverage report
|
||||
"llvm-nm", // used to inspect binaries; it shows symbol names, their sizes and visibility
|
||||
"llvm-objcopy", // used to transform ELFs into binary format which flashing tools consume
|
||||
"llvm-objdump", // used to disassemble programs
|
||||
"llvm-profdata", // used to inspect and merge files generated by profiles
|
||||
"llvm-readobj", // used to get information from ELFs/objects that the other tools don't provide
|
||||
"llvm-size", // used to prints the size of the linker sections of a program
|
||||
"llvm-strip", // used to discard symbols from binary files to reduce their size
|
||||
"llvm-ar", // used for creating and modifying archive files
|
||||
"llvm-dis", // used to disassemble LLVM bitcode
|
||||
"llc", // used to compile LLVM bytecode
|
||||
"opt", // used to optimize LLVM bytecode
|
||||
"llvm-readobj", // used to get information from ELFs/objects that the other tools don't provide
|
||||
"llvm-size", // used to prints the size of the linker sections of a program
|
||||
"llvm-strip", // used to discard symbols from binary files to reduce their size
|
||||
"llvm-ar", // used for creating and modifying archive files
|
||||
"llvm-dis", // used to disassemble LLVM bitcode
|
||||
"llc", // used to compile LLVM bytecode
|
||||
"opt", // used to optimize LLVM bytecode
|
||||
];
|
||||
|
||||
pub const VERSION: usize = 2;
|
||||
@ -541,7 +542,7 @@ impl Build {
|
||||
|
||||
/// Gets the space-separated set of activated features for the standard
|
||||
/// library.
|
||||
fn std_features(&self) -> String {
|
||||
fn std_features(&self, target: TargetSelection) -> String {
|
||||
let mut features = "panic-unwind".to_string();
|
||||
|
||||
match self.config.llvm_libunwind.unwrap_or_default() {
|
||||
@ -552,7 +553,7 @@ impl Build {
|
||||
if self.config.backtrace {
|
||||
features.push_str(" backtrace");
|
||||
}
|
||||
if self.config.profiler {
|
||||
if self.config.profiler_enabled(target) {
|
||||
features.push_str(" profiler");
|
||||
}
|
||||
features
|
||||
@ -1115,7 +1116,7 @@ impl Build {
|
||||
/// Returns a Vec of all the dependencies of the given root crate,
|
||||
/// including transitive dependencies and the root itself. Only includes
|
||||
/// "local" crates (those in the local source tree, not from a registry).
|
||||
fn in_tree_crates(&self, root: &str) -> Vec<&Crate> {
|
||||
fn in_tree_crates(&self, root: &str, target: Option<TargetSelection>) -> Vec<&Crate> {
|
||||
let mut ret = Vec::new();
|
||||
let mut list = vec![INTERNER.intern_str(root)];
|
||||
let mut visited = HashSet::new();
|
||||
@ -1136,7 +1137,10 @@ impl Build {
|
||||
// metadata::build.
|
||||
if visited.insert(dep)
|
||||
&& dep != "build_helper"
|
||||
&& (dep != "profiler_builtins" || self.config.profiler)
|
||||
&& (dep != "profiler_builtins"
|
||||
|| target
|
||||
.map(|t| self.config.profiler_enabled(t))
|
||||
.unwrap_or(self.config.any_profiler_enabled()))
|
||||
&& (dep != "rustc_codegen_llvm" || self.config.llvm_enabled())
|
||||
{
|
||||
list.push(*dep);
|
||||
|
@ -91,7 +91,7 @@ pub fn check(build: &mut Build) {
|
||||
.unwrap_or(true)
|
||||
})
|
||||
.any(|build_llvm_ourselves| build_llvm_ourselves);
|
||||
if building_llvm || build.config.sanitizers {
|
||||
if building_llvm || build.config.any_sanitizers_enabled() {
|
||||
cmd_finder.must_have("cmake");
|
||||
}
|
||||
|
||||
|
@ -1271,11 +1271,11 @@ note: if you're sure you want to do this, please open an issue as to why. In the
|
||||
cmd.env("RUSTC_BOOTSTRAP", "1");
|
||||
builder.add_rust_test_threads(&mut cmd);
|
||||
|
||||
if builder.config.sanitizers {
|
||||
if builder.config.sanitizers_enabled(target) {
|
||||
cmd.env("RUSTC_SANITIZER_SUPPORT", "1");
|
||||
}
|
||||
|
||||
if builder.config.profiler {
|
||||
if builder.config.profiler_enabled(target) {
|
||||
cmd.env("RUSTC_PROFILER_SUPPORT", "1");
|
||||
}
|
||||
|
||||
@ -1591,7 +1591,7 @@ impl Step for CrateLibrustc {
|
||||
let builder = run.builder;
|
||||
let compiler = builder.compiler(builder.top_stage, run.build_triple());
|
||||
|
||||
for krate in builder.in_tree_crates("rustc-main") {
|
||||
for krate in builder.in_tree_crates("rustc-main", Some(run.target)) {
|
||||
if krate.path.ends_with(&run.path) {
|
||||
let test_kind = builder.kind.into();
|
||||
|
||||
@ -1698,7 +1698,7 @@ impl Step for Crate {
|
||||
});
|
||||
};
|
||||
|
||||
for krate in builder.in_tree_crates("test") {
|
||||
for krate in builder.in_tree_crates("test", Some(run.target)) {
|
||||
if krate.path.ends_with(&run.path) {
|
||||
make(Mode::Std, krate);
|
||||
}
|
||||
|
@ -1,3 +1,7 @@
|
||||
//! This module implements [RFC 1946]: Intra-rustdoc-links
|
||||
//!
|
||||
//! [RFC 1946]: https://github.com/rust-lang/rfcs/blob/master/text/1946-intra-rustdoc-links.md
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_data_structures::stable_set::FxHashSet;
|
||||
use rustc_errors::{Applicability, DiagnosticBuilder};
|
||||
@ -27,7 +31,7 @@ use std::cell::Cell;
|
||||
use std::mem;
|
||||
use std::ops::Range;
|
||||
|
||||
use crate::clean::*;
|
||||
use crate::clean::{self, Crate, GetDefId, Import, Item, ItemLink, PrimitiveType};
|
||||
use crate::core::DocContext;
|
||||
use crate::fold::DocFolder;
|
||||
use crate::html::markdown::markdown_links;
|
||||
@ -42,10 +46,10 @@ pub const COLLECT_INTRA_DOC_LINKS: Pass = Pass {
|
||||
};
|
||||
|
||||
pub fn collect_intra_doc_links(krate: Crate, cx: &DocContext<'_>) -> Crate {
|
||||
let mut coll = LinkCollector::new(cx);
|
||||
coll.fold_crate(krate)
|
||||
LinkCollector::new(cx).fold_crate(krate)
|
||||
}
|
||||
|
||||
/// Top-level errors emitted by this pass.
|
||||
enum ErrorKind<'a> {
|
||||
Resolve(Box<ResolutionFailure<'a>>),
|
||||
AnchorFailure(AnchorFailure),
|
||||
@ -58,18 +62,37 @@ impl<'a> From<ResolutionFailure<'a>> for ErrorKind<'a> {
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
/// A link failed to resolve.
|
||||
enum ResolutionFailure<'a> {
|
||||
/// This resolved, but with the wrong namespace.
|
||||
/// `Namespace` is the expected namespace (as opposed to the actual).
|
||||
WrongNamespace(Res, Namespace),
|
||||
///
|
||||
/// `Namespace` is the namespace specified with a disambiguator
|
||||
/// (as opposed to the actual namespace of the `Res`).
|
||||
WrongNamespace(Res, /* disambiguated */ Namespace),
|
||||
/// The link failed to resolve. `resolution_failure` should look to see if there's
|
||||
/// a more helpful error that can be given.
|
||||
NotResolved { module_id: DefId, partial_res: Option<Res>, unresolved: Cow<'a, str> },
|
||||
/// should not ever happen
|
||||
NotResolved {
|
||||
/// The scope the link was resolved in.
|
||||
module_id: DefId,
|
||||
/// If part of the link resolved, this has the `Res`.
|
||||
///
|
||||
/// In `[std::io::Error::x]`, `std::io::Error` would be a partial resolution.
|
||||
partial_res: Option<Res>,
|
||||
/// The remaining unresolved path segments.
|
||||
///
|
||||
/// In `[std::io::Error::x]`, `x` would be unresolved.
|
||||
unresolved: Cow<'a, str>,
|
||||
},
|
||||
/// This happens when rustdoc can't determine the parent scope for an item.
|
||||
///
|
||||
/// It is always a bug in rustdoc.
|
||||
NoParentItem,
|
||||
/// This link has malformed generic parameters; e.g., the angle brackets are unbalanced.
|
||||
MalformedGenerics(MalformedGenerics),
|
||||
/// used to communicate that this should be ignored, but shouldn't be reported to the user
|
||||
/// Used to communicate that this should be ignored, but shouldn't be reported to the user
|
||||
///
|
||||
/// This happens when there is no disambiguator and one of the namespaces
|
||||
/// failed to resolve.
|
||||
Dummy,
|
||||
}
|
||||
|
||||
@ -115,7 +138,9 @@ enum MalformedGenerics {
|
||||
}
|
||||
|
||||
impl ResolutionFailure<'a> {
|
||||
// This resolved fully (not just partially) but is erroneous for some other reason
|
||||
/// This resolved fully (not just partially) but is erroneous for some other reason
|
||||
///
|
||||
/// Returns the full resolution of the link, if present.
|
||||
fn full_res(&self) -> Option<Res> {
|
||||
match self {
|
||||
Self::WrongNamespace(res, _) => Some(*res),
|
||||
@ -125,13 +150,30 @@ impl ResolutionFailure<'a> {
|
||||
}
|
||||
|
||||
enum AnchorFailure {
|
||||
/// User error: `[std#x#y]` is not valid
|
||||
MultipleAnchors,
|
||||
/// The anchor provided by the user conflicts with Rustdoc's generated anchor.
|
||||
///
|
||||
/// This is an unfortunate state of affairs. Not every item that can be
|
||||
/// linked to has its own page; sometimes it is a subheading within a page,
|
||||
/// like for associated items. In those cases, rustdoc uses an anchor to
|
||||
/// link to the subheading. Since you can't have two anchors for the same
|
||||
/// link, Rustdoc disallows having a user-specified anchor.
|
||||
///
|
||||
/// Most of the time this is fine, because you can just link to the page of
|
||||
/// the item if you want to provide your own anchor. For primitives, though,
|
||||
/// rustdoc uses the anchor as a side channel to know which page to link to;
|
||||
/// it doesn't show up in the generated link. Ideally, rustdoc would remove
|
||||
/// this limitation, allowing you to link to subheaders on primitives.
|
||||
RustdocAnchorConflict(Res),
|
||||
}
|
||||
|
||||
struct LinkCollector<'a, 'tcx> {
|
||||
cx: &'a DocContext<'tcx>,
|
||||
// NOTE: this may not necessarily be a module in the current crate
|
||||
/// A stack of modules used to decide what scope to resolve in.
|
||||
///
|
||||
/// The last module will be used if the parent scope of the current item is
|
||||
/// unknown.
|
||||
mod_ids: Vec<DefId>,
|
||||
/// This is used to store the kind of associated items,
|
||||
/// because `clean` and the disambiguator code expect them to be different.
|
||||
@ -144,6 +186,12 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
||||
LinkCollector { cx, mod_ids: Vec::new(), kind_side_channel: Cell::new(None) }
|
||||
}
|
||||
|
||||
/// Given a full link, parse it as an [enum struct variant].
|
||||
///
|
||||
/// In particular, this will return an error whenever there aren't three
|
||||
/// full path segments left in the link.
|
||||
///
|
||||
/// [enum struct variant]: hir::VariantData::Struct
|
||||
fn variant_field(
|
||||
&self,
|
||||
path_str: &'path str,
|
||||
@ -235,6 +283,10 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a primitive type, try to resolve an associated item.
|
||||
///
|
||||
/// HACK(jynelson): `item_str` is passed in instead of derived from `item_name` so the
|
||||
/// lifetimes on `&'path` will work.
|
||||
fn resolve_primitive_associated_item(
|
||||
&self,
|
||||
prim_ty: hir::PrimTy,
|
||||
@ -286,7 +338,9 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
||||
}
|
||||
|
||||
/// Resolves a string as a macro.
|
||||
fn macro_resolve(
|
||||
///
|
||||
/// FIXME(jynelson): Can this be unified with `resolve()`?
|
||||
fn resolve_macro(
|
||||
&self,
|
||||
path_str: &'a str,
|
||||
module_id: DefId,
|
||||
@ -294,6 +348,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
||||
let cx = self.cx;
|
||||
let path = ast::Path::from_ident(Ident::from_str(path_str));
|
||||
cx.enter_resolver(|resolver| {
|
||||
// FIXME(jynelson): does this really need 3 separate lookups?
|
||||
if let Ok((Some(ext), res)) = resolver.resolve_macro_path(
|
||||
&path,
|
||||
None,
|
||||
@ -326,6 +381,11 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
||||
})
|
||||
}
|
||||
|
||||
/// Convenience wrapper around `resolve_str_path_error`.
|
||||
///
|
||||
/// This also handles resolving `true` and `false` as booleans.
|
||||
/// NOTE: `resolve_str_path_error` knows only about paths, not about types.
|
||||
/// Associated items will never be resolved by this function.
|
||||
fn resolve_path(&self, path_str: &str, ns: Namespace, module_id: DefId) -> Option<Res> {
|
||||
let result = self.cx.enter_resolver(|resolver| {
|
||||
resolver.resolve_str_path_error(DUMMY_SP, &path_str, ns, module_id)
|
||||
@ -339,12 +399,13 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolves a string as a path within a particular namespace. Also returns an optional
|
||||
/// URL fragment in the case of variants and methods.
|
||||
/// Resolves a string as a path within a particular namespace. Returns an
|
||||
/// optional URL fragment in the case of variants and methods.
|
||||
fn resolve<'path>(
|
||||
&self,
|
||||
path_str: &'path str,
|
||||
ns: Namespace,
|
||||
// FIXME(#76467): This is for `Self`, and it's wrong.
|
||||
current_item: &Option<String>,
|
||||
module_id: DefId,
|
||||
extra_fragment: &Option<String>,
|
||||
@ -353,15 +414,13 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
||||
|
||||
if let Some(res) = self.resolve_path(path_str, ns, module_id) {
|
||||
match res {
|
||||
// FIXME(#76467): make this fallthrough to lookup the associated
|
||||
// item a separate function.
|
||||
Res::Def(DefKind::AssocFn | DefKind::AssocConst, _) => {
|
||||
assert_eq!(ns, ValueNS);
|
||||
// Fall through: In case this is a trait item, skip the
|
||||
// early return and try looking for the trait.
|
||||
}
|
||||
Res::Def(DefKind::AssocTy, _) => {
|
||||
assert_eq!(ns, TypeNS);
|
||||
// Fall through: In case this is a trait item, skip the
|
||||
// early return and try looking for the trait.
|
||||
}
|
||||
Res::Def(DefKind::Variant, _) => {
|
||||
return handle_variant(cx, res, extra_fragment);
|
||||
@ -410,7 +469,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
||||
})?;
|
||||
|
||||
// FIXME: are these both necessary?
|
||||
let ty_res = if let Some(ty_res) = is_primitive(&path_root, TypeNS)
|
||||
let ty_res = if let Some(ty_res) = resolve_primitive(&path_root, TypeNS)
|
||||
.map(|(_, res)| res)
|
||||
.or_else(|| self.resolve_path(&path_root, TypeNS, module_id))
|
||||
{
|
||||
@ -452,8 +511,8 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
||||
// There should only ever be one associated item that matches from any inherent impl
|
||||
.next()
|
||||
// Check if item_name belongs to `impl SomeTrait for SomeItem`
|
||||
// This gives precedence to `impl SomeItem`:
|
||||
// Although having both would be ambiguous, use impl version for compat. sake.
|
||||
// FIXME(#74563): This gives precedence to `impl SomeItem`:
|
||||
// Although having both would be ambiguous, use impl version for compatibility's sake.
|
||||
// To handle that properly resolve() would have to support
|
||||
// something like [`ambi_fn`](<SomeStruct as SomeTrait>::ambi_fn)
|
||||
.or_else(|| {
|
||||
@ -480,6 +539,8 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
||||
})
|
||||
} else if ns == Namespace::ValueNS {
|
||||
debug!("looking for variants or fields named {} for {:?}", item_name, did);
|
||||
// FIXME(jynelson): why is this different from
|
||||
// `variant_field`?
|
||||
match cx.tcx.type_of(did).kind() {
|
||||
ty::Adt(def, _) => {
|
||||
let field = if def.is_enum() {
|
||||
@ -577,7 +638,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
||||
) -> Option<Res> {
|
||||
// resolve() can't be used for macro namespace
|
||||
let result = match ns {
|
||||
Namespace::MacroNS => self.macro_resolve(path_str, module_id).map_err(ErrorKind::from),
|
||||
Namespace::MacroNS => self.resolve_macro(path_str, module_id).map_err(ErrorKind::from),
|
||||
Namespace::TypeNS | Namespace::ValueNS => self
|
||||
.resolve(path_str, ns, current_item, module_id, extra_fragment)
|
||||
.map(|(res, _)| res),
|
||||
@ -593,6 +654,11 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Look to see if a resolved item has an associated item named `item_name`.
|
||||
///
|
||||
/// Given `[std::io::Error::source]`, where `source` is unresolved, this would
|
||||
/// find `std::error::Error::source` and return
|
||||
/// `<io::Error as error::Error>::source`.
|
||||
fn resolve_associated_trait_item(
|
||||
did: DefId,
|
||||
module: DefId,
|
||||
@ -601,12 +667,12 @@ fn resolve_associated_trait_item(
|
||||
cx: &DocContext<'_>,
|
||||
) -> Option<(ty::AssocKind, DefId)> {
|
||||
let ty = cx.tcx.type_of(did);
|
||||
// First consider automatic impls: `impl From<T> for T`
|
||||
// First consider blanket impls: `impl From<T> for T`
|
||||
let implicit_impls = crate::clean::get_auto_trait_and_blanket_impls(cx, ty, did);
|
||||
let mut candidates: Vec<_> = implicit_impls
|
||||
.flat_map(|impl_outer| {
|
||||
match impl_outer.inner {
|
||||
ImplItem(impl_) => {
|
||||
clean::ImplItem(impl_) => {
|
||||
debug!("considering auto or blanket impl for trait {:?}", impl_.trait_);
|
||||
// Give precedence to methods that were overridden
|
||||
if !impl_.provided_trait_methods.contains(&*item_name.as_str()) {
|
||||
@ -669,7 +735,7 @@ fn resolve_associated_trait_item(
|
||||
.map(|assoc| (assoc.kind, assoc.def_id))
|
||||
}));
|
||||
}
|
||||
// FIXME: warn about ambiguity
|
||||
// FIXME(#74563): warn about ambiguity
|
||||
debug!("the candidates were {:?}", candidates);
|
||||
candidates.pop()
|
||||
}
|
||||
@ -719,20 +785,15 @@ fn traits_implemented_by(cx: &DocContext<'_>, type_: DefId, module: DefId) -> Fx
|
||||
iter.collect()
|
||||
}
|
||||
|
||||
/// Check for resolve collisions between a trait and its derive
|
||||
/// Check for resolve collisions between a trait and its derive.
|
||||
///
|
||||
/// These are common and we should just resolve to the trait in that case
|
||||
/// These are common and we should just resolve to the trait in that case.
|
||||
fn is_derive_trait_collision<T>(ns: &PerNS<Result<(Res, T), ResolutionFailure<'_>>>) -> bool {
|
||||
if let PerNS {
|
||||
matches!(*ns, PerNS {
|
||||
type_ns: Ok((Res::Def(DefKind::Trait, _), _)),
|
||||
macro_ns: Ok((Res::Def(DefKind::Macro(MacroKind::Derive), _), _)),
|
||||
..
|
||||
} = *ns
|
||||
{
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
|
||||
@ -772,29 +833,30 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
|
||||
}
|
||||
|
||||
let current_item = match item.inner {
|
||||
ModuleItem(..) => {
|
||||
clean::ModuleItem(..) => {
|
||||
if item.attrs.inner_docs {
|
||||
if item.def_id.is_top_level_module() { item.name.clone() } else { None }
|
||||
} else {
|
||||
match parent_node.or(self.mod_ids.last().copied()) {
|
||||
Some(parent) if !parent.is_top_level_module() => {
|
||||
// FIXME: can we pull the parent module's name from elsewhere?
|
||||
Some(self.cx.tcx.item_name(parent).to_string())
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
ImplItem(Impl { ref for_, .. }) => {
|
||||
clean::ImplItem(clean::Impl { ref for_, .. }) => {
|
||||
for_.def_id().map(|did| self.cx.tcx.item_name(did).to_string())
|
||||
}
|
||||
// we don't display docs on `extern crate` items anyway, so don't process them.
|
||||
ExternCrateItem(..) => {
|
||||
clean::ExternCrateItem(..) => {
|
||||
debug!("ignoring extern crate item {:?}", item.def_id);
|
||||
return self.fold_item_recur(item);
|
||||
}
|
||||
ImportItem(Import { kind: ImportKind::Simple(ref name, ..), .. }) => Some(name.clone()),
|
||||
MacroItem(..) => None,
|
||||
clean::ImportItem(Import { kind: clean::ImportKind::Simple(ref name, ..), .. }) => {
|
||||
Some(name.clone())
|
||||
}
|
||||
clean::MacroItem(..) => None,
|
||||
_ => item.name.clone(),
|
||||
};
|
||||
|
||||
@ -803,6 +865,8 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
|
||||
}
|
||||
|
||||
// find item's parent to resolve `Self` in item's docs below
|
||||
// FIXME(#76467, #75809): this is a mess and doesn't handle cross-crate
|
||||
// re-exports
|
||||
let parent_name = self.cx.as_local_hir_id(item.def_id).and_then(|item_hir| {
|
||||
let parent_hir = self.cx.tcx.hir().get_parent_item(item_hir);
|
||||
let item_parent = self.cx.tcx.hir().find(parent_hir);
|
||||
@ -870,7 +934,6 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
|
||||
};
|
||||
// NOTE: if there are links that start in one crate and end in another, this will not resolve them.
|
||||
// This is a degenerate case and it's not supported by rustdoc.
|
||||
// FIXME: this will break links that start in `#[doc = ...]` and end as a sugared doc. Should this be supported?
|
||||
for (ori_link, link_range) in markdown_links(&combined_docs) {
|
||||
let link = self.resolve_link(
|
||||
&item,
|
||||
@ -888,15 +951,13 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
if item.is_mod() && !item.attrs.inner_docs {
|
||||
self.mod_ids.push(item.def_id);
|
||||
}
|
||||
|
||||
if item.is_mod() {
|
||||
if !item.attrs.inner_docs {
|
||||
self.mod_ids.push(item.def_id);
|
||||
}
|
||||
|
||||
let ret = self.fold_item_recur(item);
|
||||
|
||||
self.mod_ids.pop();
|
||||
|
||||
ret
|
||||
} else {
|
||||
self.fold_item_recur(item)
|
||||
@ -905,6 +966,9 @@ impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
|
||||
}
|
||||
|
||||
impl LinkCollector<'_, '_> {
|
||||
/// This is the entry point for resolving an intra-doc link.
|
||||
///
|
||||
/// FIXME(jynelson): this is way too many arguments
|
||||
fn resolve_link(
|
||||
&self,
|
||||
item: &Item,
|
||||
@ -943,130 +1007,121 @@ impl LinkCollector<'_, '_> {
|
||||
} else {
|
||||
(parts[0], None)
|
||||
};
|
||||
let resolved_self;
|
||||
let link_text;
|
||||
let mut path_str;
|
||||
let disambiguator;
|
||||
let stripped_path_string;
|
||||
let (mut res, mut fragment) = {
|
||||
path_str = if let Ok((d, path)) = Disambiguator::from_str(&link) {
|
||||
disambiguator = Some(d);
|
||||
path
|
||||
} else {
|
||||
disambiguator = None;
|
||||
&link
|
||||
}
|
||||
.trim();
|
||||
|
||||
if path_str.contains(|ch: char| !(ch.is_alphanumeric() || ":_<>, ".contains(ch))) {
|
||||
return None;
|
||||
}
|
||||
|
||||
// We stripped `()` and `!` when parsing the disambiguator.
|
||||
// Add them back to be displayed, but not prefix disambiguators.
|
||||
link_text = disambiguator
|
||||
.map(|d| d.display_for(path_str))
|
||||
.unwrap_or_else(|| path_str.to_owned());
|
||||
|
||||
// In order to correctly resolve intra-doc-links we need to
|
||||
// pick a base AST node to work from. If the documentation for
|
||||
// this module came from an inner comment (//!) then we anchor
|
||||
// our name resolution *inside* the module. If, on the other
|
||||
// hand it was an outer comment (///) then we anchor the name
|
||||
// resolution in the parent module on the basis that the names
|
||||
// used are more likely to be intended to be parent names. For
|
||||
// this, we set base_node to None for inner comments since
|
||||
// we've already pushed this node onto the resolution stack but
|
||||
// for outer comments we explicitly try and resolve against the
|
||||
// parent_node first.
|
||||
let base_node = if item.is_mod() && item.attrs.inner_docs {
|
||||
self.mod_ids.last().copied()
|
||||
} else {
|
||||
parent_node
|
||||
};
|
||||
|
||||
let mut module_id = if let Some(id) = base_node {
|
||||
id
|
||||
} else {
|
||||
debug!("attempting to resolve item without parent module: {}", path_str);
|
||||
let err_kind = ResolutionFailure::NoParentItem.into();
|
||||
resolution_failure(
|
||||
self,
|
||||
&item,
|
||||
path_str,
|
||||
disambiguator,
|
||||
dox,
|
||||
link_range,
|
||||
smallvec![err_kind],
|
||||
);
|
||||
return None;
|
||||
};
|
||||
|
||||
// replace `Self` with suitable item's parent name
|
||||
if path_str.starts_with("Self::") {
|
||||
if let Some(ref name) = parent_name {
|
||||
resolved_self = format!("{}::{}", name, &path_str[6..]);
|
||||
path_str = &resolved_self;
|
||||
}
|
||||
} else if path_str.starts_with("crate::") {
|
||||
use rustc_span::def_id::CRATE_DEF_INDEX;
|
||||
|
||||
// HACK(jynelson): rustc_resolve thinks that `crate` is the crate currently being documented.
|
||||
// But rustdoc wants it to mean the crate this item was originally present in.
|
||||
// To work around this, remove it and resolve relative to the crate root instead.
|
||||
// HACK(jynelson)(2): If we just strip `crate::` then suddenly primitives become ambiguous
|
||||
// (consider `crate::char`). Instead, change it to `self::`. This works because 'self' is now the crate root.
|
||||
resolved_self = format!("self::{}", &path_str["crate::".len()..]);
|
||||
path_str = &resolved_self;
|
||||
module_id = DefId { krate, index: CRATE_DEF_INDEX };
|
||||
}
|
||||
|
||||
// Strip generics from the path.
|
||||
if path_str.contains(['<', '>'].as_slice()) {
|
||||
stripped_path_string = match strip_generics_from_path(path_str) {
|
||||
Ok(path) => path,
|
||||
Err(err_kind) => {
|
||||
debug!("link has malformed generics: {}", path_str);
|
||||
resolution_failure(
|
||||
self,
|
||||
&item,
|
||||
path_str,
|
||||
disambiguator,
|
||||
dox,
|
||||
link_range,
|
||||
smallvec![err_kind],
|
||||
);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
path_str = &stripped_path_string;
|
||||
}
|
||||
|
||||
// Sanity check to make sure we don't have any angle brackets after stripping generics.
|
||||
assert!(!path_str.contains(['<', '>'].as_slice()));
|
||||
|
||||
// The link is not an intra-doc link if it still contains commas or spaces after
|
||||
// stripping generics.
|
||||
if path_str.contains([',', ' '].as_slice()) {
|
||||
return None;
|
||||
}
|
||||
|
||||
match self.resolve_with_disambiguator(
|
||||
disambiguator,
|
||||
item,
|
||||
dox,
|
||||
path_str,
|
||||
current_item,
|
||||
module_id,
|
||||
extra_fragment,
|
||||
&ori_link,
|
||||
link_range.clone(),
|
||||
) {
|
||||
Some(x) => x,
|
||||
None => return None,
|
||||
}
|
||||
// Parse and strip the disambiguator from the link, if present.
|
||||
let (mut path_str, disambiguator) = if let Ok((d, path)) = Disambiguator::from_str(&link) {
|
||||
(path.trim(), Some(d))
|
||||
} else {
|
||||
(link.trim(), None)
|
||||
};
|
||||
|
||||
if path_str.contains(|ch: char| !(ch.is_alphanumeric() || ":_<>, ".contains(ch))) {
|
||||
return None;
|
||||
}
|
||||
|
||||
// We stripped `()` and `!` when parsing the disambiguator.
|
||||
// Add them back to be displayed, but not prefix disambiguators.
|
||||
let link_text =
|
||||
disambiguator.map(|d| d.display_for(path_str)).unwrap_or_else(|| path_str.to_owned());
|
||||
|
||||
// In order to correctly resolve intra-doc-links we need to
|
||||
// pick a base AST node to work from. If the documentation for
|
||||
// this module came from an inner comment (//!) then we anchor
|
||||
// our name resolution *inside* the module. If, on the other
|
||||
// hand it was an outer comment (///) then we anchor the name
|
||||
// resolution in the parent module on the basis that the names
|
||||
// used are more likely to be intended to be parent names. For
|
||||
// this, we set base_node to None for inner comments since
|
||||
// we've already pushed this node onto the resolution stack but
|
||||
// for outer comments we explicitly try and resolve against the
|
||||
// parent_node first.
|
||||
let base_node = if item.is_mod() && item.attrs.inner_docs {
|
||||
self.mod_ids.last().copied()
|
||||
} else {
|
||||
parent_node
|
||||
};
|
||||
|
||||
let mut module_id = if let Some(id) = base_node {
|
||||
id
|
||||
} else {
|
||||
// This is a bug.
|
||||
debug!("attempting to resolve item without parent module: {}", path_str);
|
||||
let err_kind = ResolutionFailure::NoParentItem.into();
|
||||
resolution_failure(
|
||||
self,
|
||||
&item,
|
||||
path_str,
|
||||
disambiguator,
|
||||
dox,
|
||||
link_range,
|
||||
smallvec![err_kind],
|
||||
);
|
||||
return None;
|
||||
};
|
||||
|
||||
let resolved_self;
|
||||
// replace `Self` with suitable item's parent name
|
||||
if path_str.starts_with("Self::") {
|
||||
if let Some(ref name) = parent_name {
|
||||
resolved_self = format!("{}::{}", name, &path_str[6..]);
|
||||
path_str = &resolved_self;
|
||||
}
|
||||
} else if path_str.starts_with("crate::") {
|
||||
use rustc_span::def_id::CRATE_DEF_INDEX;
|
||||
|
||||
// HACK(jynelson): rustc_resolve thinks that `crate` is the crate currently being documented.
|
||||
// But rustdoc wants it to mean the crate this item was originally present in.
|
||||
// To work around this, remove it and resolve relative to the crate root instead.
|
||||
// HACK(jynelson)(2): If we just strip `crate::` then suddenly primitives become ambiguous
|
||||
// (consider `crate::char`). Instead, change it to `self::`. This works because 'self' is now the crate root.
|
||||
// FIXME(#78696): This doesn't always work.
|
||||
resolved_self = format!("self::{}", &path_str["crate::".len()..]);
|
||||
path_str = &resolved_self;
|
||||
module_id = DefId { krate, index: CRATE_DEF_INDEX };
|
||||
}
|
||||
|
||||
// Strip generics from the path.
|
||||
let stripped_path_string;
|
||||
if path_str.contains(['<', '>'].as_slice()) {
|
||||
stripped_path_string = match strip_generics_from_path(path_str) {
|
||||
Ok(path) => path,
|
||||
Err(err_kind) => {
|
||||
debug!("link has malformed generics: {}", path_str);
|
||||
resolution_failure(
|
||||
self,
|
||||
&item,
|
||||
path_str,
|
||||
disambiguator,
|
||||
dox,
|
||||
link_range,
|
||||
smallvec![err_kind],
|
||||
);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
path_str = &stripped_path_string;
|
||||
}
|
||||
// Sanity check to make sure we don't have any angle brackets after stripping generics.
|
||||
assert!(!path_str.contains(['<', '>'].as_slice()));
|
||||
|
||||
// The link is not an intra-doc link if it still contains commas or spaces after
|
||||
// stripping generics.
|
||||
if path_str.contains([',', ' '].as_slice()) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let (mut res, mut fragment) = self.resolve_with_disambiguator(
|
||||
disambiguator,
|
||||
item,
|
||||
dox,
|
||||
path_str,
|
||||
current_item,
|
||||
module_id,
|
||||
extra_fragment,
|
||||
&ori_link,
|
||||
link_range.clone(),
|
||||
)?;
|
||||
|
||||
// Check for a primitive which might conflict with a module
|
||||
// Report the ambiguity and require that the user specify which one they meant.
|
||||
// FIXME: could there ever be a primitive not in the type namespace?
|
||||
@ -1075,7 +1130,7 @@ impl LinkCollector<'_, '_> {
|
||||
None | Some(Disambiguator::Namespace(Namespace::TypeNS) | Disambiguator::Primitive)
|
||||
) && !matches!(res, Res::PrimTy(_))
|
||||
{
|
||||
if let Some((path, prim)) = is_primitive(path_str, TypeNS) {
|
||||
if let Some((path, prim)) = resolve_primitive(path_str, TypeNS) {
|
||||
// `prim@char`
|
||||
if matches!(disambiguator, Some(Disambiguator::Primitive)) {
|
||||
if fragment.is_some() {
|
||||
@ -1168,11 +1223,13 @@ impl LinkCollector<'_, '_> {
|
||||
privacy_error(cx, &item, &path_str, dox, link_range);
|
||||
}
|
||||
}
|
||||
let id = register_res(cx, res);
|
||||
let id = clean::register_res(cx, res);
|
||||
Some(ItemLink { link: ori_link, link_text, did: Some(id), fragment })
|
||||
}
|
||||
}
|
||||
|
||||
/// After parsing the disambiguator, resolve the main part of the link.
|
||||
// FIXME(jynelson): wow this is just so much
|
||||
fn resolve_with_disambiguator(
|
||||
&self,
|
||||
disambiguator: Option<Disambiguator>,
|
||||
@ -1232,7 +1289,7 @@ impl LinkCollector<'_, '_> {
|
||||
// Try everything!
|
||||
let mut candidates = PerNS {
|
||||
macro_ns: self
|
||||
.macro_resolve(path_str, base_node)
|
||||
.resolve_macro(path_str, base_node)
|
||||
.map(|res| (res, extra_fragment.clone())),
|
||||
type_ns: match self.resolve(
|
||||
path_str,
|
||||
@ -1320,10 +1377,10 @@ impl LinkCollector<'_, '_> {
|
||||
}
|
||||
}
|
||||
Some(MacroNS) => {
|
||||
match self.macro_resolve(path_str, base_node) {
|
||||
match self.resolve_macro(path_str, base_node) {
|
||||
Ok(res) => Some((res, extra_fragment)),
|
||||
Err(mut kind) => {
|
||||
// `macro_resolve` only looks in the macro namespace. Try to give a better error if possible.
|
||||
// `resolve_macro` only looks in the macro namespace. Try to give a better error if possible.
|
||||
for &ns in &[TypeNS, ValueNS] {
|
||||
if let Some(res) = self.check_full_res(
|
||||
ns,
|
||||
@ -1354,9 +1411,15 @@ impl LinkCollector<'_, '_> {
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
/// Disambiguators for a link.
|
||||
enum Disambiguator {
|
||||
/// `prim@`
|
||||
///
|
||||
/// This is buggy, see <https://github.com/rust-lang/rust/pull/77875#discussion_r503583103>
|
||||
Primitive,
|
||||
/// `struct@` or `f()`
|
||||
Kind(DefKind),
|
||||
/// `type@`
|
||||
Namespace(Namespace),
|
||||
}
|
||||
|
||||
@ -1373,7 +1436,7 @@ impl Disambiguator {
|
||||
}
|
||||
}
|
||||
|
||||
/// (disambiguator, path_str)
|
||||
/// Given a link, parse and return `(disambiguator, path_str)`
|
||||
fn from_str(link: &str) -> Result<(Self, &str), ()> {
|
||||
use Disambiguator::{Kind, Namespace as NS, Primitive};
|
||||
|
||||
@ -1424,6 +1487,7 @@ impl Disambiguator {
|
||||
}
|
||||
}
|
||||
|
||||
/// Used for error reporting.
|
||||
fn suggestion(self) -> Suggestion {
|
||||
let kind = match self {
|
||||
Disambiguator::Primitive => return Suggestion::Prefix("prim"),
|
||||
@ -1490,9 +1554,13 @@ impl Disambiguator {
|
||||
}
|
||||
}
|
||||
|
||||
/// A suggestion to show in a diagnostic.
|
||||
enum Suggestion {
|
||||
/// `struct@`
|
||||
Prefix(&'static str),
|
||||
/// `f()`
|
||||
Function,
|
||||
/// `m!`
|
||||
Macro,
|
||||
}
|
||||
|
||||
@ -1582,6 +1650,11 @@ fn report_diagnostic(
|
||||
});
|
||||
}
|
||||
|
||||
/// Reports a link that failed to resolve.
|
||||
///
|
||||
/// This also tries to resolve any intermediate path segments that weren't
|
||||
/// handled earlier. For example, if passed `Item::Crate(std)` and `path_str`
|
||||
/// `std::io::Error::x`, this will resolve `std::io::Error`.
|
||||
fn resolution_failure(
|
||||
collector: &LinkCollector<'_, '_>,
|
||||
item: &Item,
|
||||
@ -1816,6 +1889,7 @@ fn resolution_failure(
|
||||
);
|
||||
}
|
||||
|
||||
/// Report an anchor failure.
|
||||
fn anchor_failure(
|
||||
cx: &DocContext<'_>,
|
||||
item: &Item,
|
||||
@ -1840,6 +1914,7 @@ fn anchor_failure(
|
||||
});
|
||||
}
|
||||
|
||||
/// Report an ambiguity error, where there were multiple possible resolutions.
|
||||
fn ambiguity_error(
|
||||
cx: &DocContext<'_>,
|
||||
item: &Item,
|
||||
@ -1886,6 +1961,8 @@ fn ambiguity_error(
|
||||
});
|
||||
}
|
||||
|
||||
/// In case of an ambiguity or mismatched disambiguator, suggest the correct
|
||||
/// disambiguator.
|
||||
fn suggest_disambiguator(
|
||||
disambiguator: Disambiguator,
|
||||
diag: &mut DiagnosticBuilder<'_>,
|
||||
@ -1911,6 +1988,7 @@ fn suggest_disambiguator(
|
||||
}
|
||||
}
|
||||
|
||||
/// Report a link from a public item to a private one.
|
||||
fn privacy_error(
|
||||
cx: &DocContext<'_>,
|
||||
item: &Item,
|
||||
@ -1978,7 +2056,8 @@ const PRIMITIVES: &[(Symbol, Res)] = &[
|
||||
(sym::char, Res::PrimTy(hir::PrimTy::Char)),
|
||||
];
|
||||
|
||||
fn is_primitive(path_str: &str, ns: Namespace) -> Option<(Symbol, Res)> {
|
||||
/// Resolve a primitive type or value.
|
||||
fn resolve_primitive(path_str: &str, ns: Namespace) -> Option<(Symbol, Res)> {
|
||||
is_bool_value(path_str, ns).or_else(|| {
|
||||
if ns == TypeNS {
|
||||
// FIXME: this should be replaced by a lookup in PrimitiveTypeTable
|
||||
@ -1990,6 +2069,7 @@ fn is_primitive(path_str: &str, ns: Namespace) -> Option<(Symbol, Res)> {
|
||||
})
|
||||
}
|
||||
|
||||
/// Resolve a primitive value.
|
||||
fn is_bool_value(path_str: &str, ns: Namespace) -> Option<(Symbol, Res)> {
|
||||
if ns == TypeNS && (path_str == "true" || path_str == "false") {
|
||||
Some((sym::bool, Res::PrimTy(hir::PrimTy::Bool)))
|
||||
|
40
src/test/mir-opt/inline/inline-diverging.rs
Normal file
40
src/test/mir-opt/inline/inline-diverging.rs
Normal file
@ -0,0 +1,40 @@
|
||||
// Tests inlining of diverging calls.
|
||||
//
|
||||
// ignore-wasm32-bare compiled with panic=abort by default
|
||||
#![crate_type = "lib"]
|
||||
|
||||
// EMIT_MIR inline_diverging.f.Inline.diff
|
||||
pub fn f() {
|
||||
sleep();
|
||||
}
|
||||
|
||||
// EMIT_MIR inline_diverging.g.Inline.diff
|
||||
pub fn g(i: i32) -> u32 {
|
||||
if i > 0 {
|
||||
i as u32
|
||||
} else {
|
||||
panic();
|
||||
}
|
||||
}
|
||||
|
||||
// EMIT_MIR inline_diverging.h.Inline.diff
|
||||
pub fn h() {
|
||||
call_twice(sleep);
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn call_twice<R, F: Fn() -> R>(f: F) -> (R, R) {
|
||||
let a = f();
|
||||
let b = f();
|
||||
(a, b)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn panic() -> ! {
|
||||
panic!();
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn sleep() -> ! {
|
||||
loop {}
|
||||
}
|
19
src/test/mir-opt/inline/inline-options.rs
Normal file
19
src/test/mir-opt/inline/inline-options.rs
Normal file
@ -0,0 +1,19 @@
|
||||
// Checks that inlining threshold can be controlled with
|
||||
// inline-mir-threshold and inline-hint-threshold options.
|
||||
//
|
||||
// compile-flags: -Zinline-mir-threshold=90
|
||||
// compile-flags: -Zinline-mir-hint-threshold=50
|
||||
|
||||
// EMIT_MIR inline_options.main.Inline.after.mir
|
||||
fn main() {
|
||||
not_inlined();
|
||||
inlined::<u32>();
|
||||
}
|
||||
|
||||
// Cost is approximately 3 * 25 + 5 = 80.
|
||||
#[inline]
|
||||
pub fn not_inlined() { g(); g(); g(); }
|
||||
pub fn inlined<T>() { g(); g(); g(); }
|
||||
|
||||
#[inline(never)]
|
||||
fn g() {}
|
26
src/test/mir-opt/inline/inline_diverging.f.Inline.diff
Normal file
26
src/test/mir-opt/inline/inline_diverging.f.Inline.diff
Normal file
@ -0,0 +1,26 @@
|
||||
- // MIR for `f` before Inline
|
||||
+ // MIR for `f` after Inline
|
||||
|
||||
fn f() -> () {
|
||||
let mut _0: (); // return place in scope 0 at $DIR/inline-diverging.rs:7:12: 7:12
|
||||
let mut _1: !; // in scope 0 at $DIR/inline-diverging.rs:7:12: 9:2
|
||||
let _2: !; // in scope 0 at $DIR/inline-diverging.rs:8:5: 8:12
|
||||
+ let mut _3: !; // in scope 0 at $DIR/inline-diverging.rs:8:5: 8:12
|
||||
+ scope 1 (inlined sleep) { // at $DIR/inline-diverging.rs:8:5: 8:12
|
||||
+ }
|
||||
|
||||
bb0: {
|
||||
StorageLive(_2); // scope 0 at $DIR/inline-diverging.rs:8:5: 8:12
|
||||
- sleep(); // scope 0 at $DIR/inline-diverging.rs:8:5: 8:12
|
||||
- // mir::Constant
|
||||
- // + span: $DIR/inline-diverging.rs:8:5: 8:10
|
||||
- // + literal: Const { ty: fn() -> ! {sleep}, val: Value(Scalar(<ZST>)) }
|
||||
+ StorageLive(_3); // scope 0 at $DIR/inline-diverging.rs:8:5: 8:12
|
||||
+ goto -> bb1; // scope 0 at $DIR/inline-diverging.rs:8:5: 8:12
|
||||
+ }
|
||||
+
|
||||
+ bb1: {
|
||||
+ goto -> bb1; // scope 1 at $DIR/inline-diverging.rs:8:5: 8:12
|
||||
}
|
||||
}
|
||||
|
52
src/test/mir-opt/inline/inline_diverging.g.Inline.diff
Normal file
52
src/test/mir-opt/inline/inline_diverging.g.Inline.diff
Normal file
@ -0,0 +1,52 @@
|
||||
- // MIR for `g` before Inline
|
||||
+ // MIR for `g` after Inline
|
||||
|
||||
fn g(_1: i32) -> u32 {
|
||||
debug i => _1; // in scope 0 at $DIR/inline-diverging.rs:12:10: 12:11
|
||||
let mut _0: u32; // return place in scope 0 at $DIR/inline-diverging.rs:12:21: 12:24
|
||||
let mut _2: bool; // in scope 0 at $DIR/inline-diverging.rs:13:8: 13:13
|
||||
let mut _3: i32; // in scope 0 at $DIR/inline-diverging.rs:13:8: 13:9
|
||||
let mut _4: i32; // in scope 0 at $DIR/inline-diverging.rs:14:9: 14:10
|
||||
let mut _5: !; // in scope 0 at $DIR/inline-diverging.rs:15:12: 17:6
|
||||
let _6: !; // in scope 0 at $DIR/inline-diverging.rs:16:9: 16:16
|
||||
+ let mut _7: !; // in scope 0 at $DIR/inline-diverging.rs:16:9: 16:16
|
||||
+ scope 1 (inlined panic) { // at $DIR/inline-diverging.rs:16:9: 16:16
|
||||
+ }
|
||||
|
||||
bb0: {
|
||||
StorageLive(_2); // scope 0 at $DIR/inline-diverging.rs:13:8: 13:13
|
||||
StorageLive(_3); // scope 0 at $DIR/inline-diverging.rs:13:8: 13:9
|
||||
_3 = _1; // scope 0 at $DIR/inline-diverging.rs:13:8: 13:9
|
||||
_2 = Gt(move _3, const 0_i32); // scope 0 at $DIR/inline-diverging.rs:13:8: 13:13
|
||||
StorageDead(_3); // scope 0 at $DIR/inline-diverging.rs:13:12: 13:13
|
||||
switchInt(_2) -> [false: bb1, otherwise: bb2]; // scope 0 at $DIR/inline-diverging.rs:13:5: 17:6
|
||||
}
|
||||
|
||||
bb1: {
|
||||
StorageLive(_6); // scope 0 at $DIR/inline-diverging.rs:16:9: 16:16
|
||||
- panic(); // scope 0 at $DIR/inline-diverging.rs:16:9: 16:16
|
||||
+ StorageLive(_7); // scope 0 at $DIR/inline-diverging.rs:16:9: 16:16
|
||||
+ begin_panic::<&str>(const "explicit panic"); // scope 1 at $DIR/inline-diverging.rs:16:9: 16:16
|
||||
// mir::Constant
|
||||
- // + span: $DIR/inline-diverging.rs:16:9: 16:14
|
||||
- // + literal: Const { ty: fn() -> ! {panic}, val: Value(Scalar(<ZST>)) }
|
||||
+ // + span: $DIR/inline-diverging.rs:16:9: 16:16
|
||||
+ // + literal: Const { ty: fn(&str) -> ! {std::rt::begin_panic::<&str>}, val: Value(Scalar(<ZST>)) }
|
||||
+ // ty::Const
|
||||
+ // + ty: &str
|
||||
+ // + val: Value(Slice { data: Allocation { bytes: [101, 120, 112, 108, 105, 99, 105, 116, 32, 112, 97, 110, 105, 99], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [16383], len: Size { raw: 14 } }, size: Size { raw: 14 }, align: Align { pow2: 0 }, mutability: Not, extra: () }, start: 0, end: 14 })
|
||||
+ // mir::Constant
|
||||
+ // + span: $DIR/inline-diverging.rs:16:9: 16:16
|
||||
+ // + literal: Const { ty: &str, val: Value(Slice { data: Allocation { bytes: [101, 120, 112, 108, 105, 99, 105, 116, 32, 112, 97, 110, 105, 99], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [16383], len: Size { raw: 14 } }, size: Size { raw: 14 }, align: Align { pow2: 0 }, mutability: Not, extra: () }, start: 0, end: 14 }) }
|
||||
}
|
||||
|
||||
bb2: {
|
||||
StorageLive(_4); // scope 0 at $DIR/inline-diverging.rs:14:9: 14:10
|
||||
_4 = _1; // scope 0 at $DIR/inline-diverging.rs:14:9: 14:10
|
||||
_0 = move _4 as u32 (Misc); // scope 0 at $DIR/inline-diverging.rs:14:9: 14:17
|
||||
StorageDead(_4); // scope 0 at $DIR/inline-diverging.rs:14:16: 14:17
|
||||
StorageDead(_2); // scope 0 at $DIR/inline-diverging.rs:18:1: 18:2
|
||||
return; // scope 0 at $DIR/inline-diverging.rs:18:2: 18:2
|
||||
}
|
||||
}
|
||||
|
58
src/test/mir-opt/inline/inline_diverging.h.Inline.diff
Normal file
58
src/test/mir-opt/inline/inline_diverging.h.Inline.diff
Normal file
@ -0,0 +1,58 @@
|
||||
- // MIR for `h` before Inline
|
||||
+ // MIR for `h` after Inline
|
||||
|
||||
fn h() -> () {
|
||||
let mut _0: (); // return place in scope 0 at $DIR/inline-diverging.rs:21:12: 21:12
|
||||
let _1: (!, !); // in scope 0 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ let mut _2: fn() -> ! {sleep}; // in scope 0 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ let mut _7: (); // in scope 0 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ let mut _8: (); // in scope 0 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ scope 1 (inlined call_twice::<!, fn() -> ! {sleep}>) { // at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ debug f => _2; // in scope 1 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ let _3: !; // in scope 1 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ let mut _4: &fn() -> ! {sleep}; // in scope 1 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ let mut _5: &fn() -> ! {sleep}; // in scope 1 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ let mut _6: !; // in scope 1 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ scope 2 {
|
||||
+ debug a => _3; // in scope 2 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ scope 3 {
|
||||
+ debug b => _6; // in scope 3 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ }
|
||||
+ scope 6 (inlined <fn() -> ! {sleep} as Fn<()>>::call - shim(fn() -> ! {sleep})) { // at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ scope 7 (inlined sleep) { // at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+ scope 4 (inlined <fn() -> ! {sleep} as Fn<()>>::call - shim(fn() -> ! {sleep})) { // at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ scope 5 (inlined sleep) { // at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
|
||||
bb0: {
|
||||
StorageLive(_1); // scope 0 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
- _1 = call_twice::<!, fn() -> ! {sleep}>(sleep) -> bb1; // scope 0 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ StorageLive(_2); // scope 0 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ _2 = sleep; // scope 0 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
// mir::Constant
|
||||
- // + span: $DIR/inline-diverging.rs:22:5: 22:15
|
||||
- // + literal: Const { ty: fn(fn() -> ! {sleep}) -> (!, !) {call_twice::<!, fn() -> ! {sleep}>}, val: Value(Scalar(<ZST>)) }
|
||||
- // mir::Constant
|
||||
// + span: $DIR/inline-diverging.rs:22:16: 22:21
|
||||
// + literal: Const { ty: fn() -> ! {sleep}, val: Value(Scalar(<ZST>)) }
|
||||
+ StorageLive(_3); // scope 1 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ StorageLive(_4); // scope 1 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ _4 = &_2; // scope 1 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ StorageLive(_7); // scope 1 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ _7 = const (); // scope 1 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
+ goto -> bb1; // scope 5 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
}
|
||||
|
||||
bb1: {
|
||||
- StorageDead(_1); // scope 0 at $DIR/inline-diverging.rs:22:22: 22:23
|
||||
- _0 = const (); // scope 0 at $DIR/inline-diverging.rs:21:12: 23:2
|
||||
- return; // scope 0 at $DIR/inline-diverging.rs:23:2: 23:2
|
||||
+ goto -> bb1; // scope 5 at $DIR/inline-diverging.rs:22:5: 22:22
|
||||
}
|
||||
}
|
||||
|
@ -18,6 +18,7 @@
|
||||
StorageLive(_2); // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
|
||||
_2 = Box(std::vec::Vec<u32>); // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
|
||||
- (*_2) = Vec::<u32>::new() -> [return: bb1, unwind: bb4]; // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
|
||||
+ StorageLive(_4); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
|
||||
+ _4 = &mut (*_2); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
|
||||
+ ((*_4).0: alloc::raw_vec::RawVec<u32>) = const alloc::raw_vec::RawVec::<u32> { ptr: Unique::<u32> { pointer: {0x4 as *const u32}, _marker: PhantomData::<u32> }, cap: 0_usize, alloc: std::alloc::Global }; // scope 2 at $DIR/inline-into-box-place.rs:8:33: 8:43
|
||||
+ // ty::Const
|
||||
@ -34,6 +35,7 @@
|
||||
+ // + user_ty: UserType(0)
|
||||
+ // + literal: Const { ty: alloc::raw_vec::RawVec<u32>, val: Value(ByRef { alloc: Allocation { bytes: [4, 0, 0, 0, 0, 0, 0, 0], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [255], len: Size { raw: 8 } }, size: Size { raw: 8 }, align: Align { pow2: 2 }, mutability: Not, extra: () }, offset: Size { raw: 0 } }) }
|
||||
+ ((*_4).1: usize) = const 0_usize; // scope 2 at $DIR/inline-into-box-place.rs:8:33: 8:43
|
||||
+ StorageDead(_4); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
|
||||
_1 = move _2; // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
|
||||
StorageDead(_2); // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
|
||||
_0 = const (); // scope 0 at $DIR/inline-into-box-place.rs:7:11: 9:2
|
||||
|
@ -18,6 +18,7 @@
|
||||
StorageLive(_2); // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
|
||||
_2 = Box(std::vec::Vec<u32>); // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
|
||||
- (*_2) = Vec::<u32>::new() -> [return: bb1, unwind: bb4]; // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
|
||||
+ StorageLive(_4); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
|
||||
+ _4 = &mut (*_2); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
|
||||
+ ((*_4).0: alloc::raw_vec::RawVec<u32>) = const alloc::raw_vec::RawVec::<u32> { ptr: Unique::<u32> { pointer: {0x4 as *const u32}, _marker: PhantomData::<u32> }, cap: 0_usize, alloc: std::alloc::Global }; // scope 2 at $DIR/inline-into-box-place.rs:8:33: 8:43
|
||||
+ // ty::Const
|
||||
@ -34,6 +35,7 @@
|
||||
+ // + user_ty: UserType(0)
|
||||
+ // + literal: Const { ty: alloc::raw_vec::RawVec<u32>, val: Value(ByRef { alloc: Allocation { bytes: [4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [65535], len: Size { raw: 16 } }, size: Size { raw: 16 }, align: Align { pow2: 3 }, mutability: Not, extra: () }, offset: Size { raw: 0 } }) }
|
||||
+ ((*_4).1: usize) = const 0_usize; // scope 2 at $DIR/inline-into-box-place.rs:8:33: 8:43
|
||||
+ StorageDead(_4); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
|
||||
_1 = move _2; // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
|
||||
StorageDead(_2); // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
|
||||
_0 = const (); // scope 0 at $DIR/inline-into-box-place.rs:7:11: 9:2
|
||||
|
56
src/test/mir-opt/inline/inline_options.main.Inline.after.mir
Normal file
56
src/test/mir-opt/inline/inline_options.main.Inline.after.mir
Normal file
@ -0,0 +1,56 @@
|
||||
// MIR for `main` after Inline
|
||||
|
||||
fn main() -> () {
|
||||
let mut _0: (); // return place in scope 0 at $DIR/inline-options.rs:8:11: 8:11
|
||||
let _1: (); // in scope 0 at $DIR/inline-options.rs:9:5: 9:18
|
||||
let _2: (); // in scope 0 at $DIR/inline-options.rs:10:5: 10:21
|
||||
scope 1 (inlined inlined::<u32>) { // at $DIR/inline-options.rs:10:5: 10:21
|
||||
let _3: (); // in scope 1 at $DIR/inline-options.rs:10:5: 10:21
|
||||
let _4: (); // in scope 1 at $DIR/inline-options.rs:10:5: 10:21
|
||||
let _5: (); // in scope 1 at $DIR/inline-options.rs:10:5: 10:21
|
||||
}
|
||||
|
||||
bb0: {
|
||||
StorageLive(_1); // scope 0 at $DIR/inline-options.rs:9:5: 9:18
|
||||
_1 = not_inlined() -> bb1; // scope 0 at $DIR/inline-options.rs:9:5: 9:18
|
||||
// mir::Constant
|
||||
// + span: $DIR/inline-options.rs:9:5: 9:16
|
||||
// + literal: Const { ty: fn() {not_inlined}, val: Value(Scalar(<ZST>)) }
|
||||
}
|
||||
|
||||
bb1: {
|
||||
StorageDead(_1); // scope 0 at $DIR/inline-options.rs:9:18: 9:19
|
||||
StorageLive(_2); // scope 0 at $DIR/inline-options.rs:10:5: 10:21
|
||||
StorageLive(_3); // scope 1 at $DIR/inline-options.rs:10:5: 10:21
|
||||
_3 = g() -> bb2; // scope 1 at $DIR/inline-options.rs:10:5: 10:21
|
||||
// mir::Constant
|
||||
// + span: $DIR/inline-options.rs:10:5: 10:21
|
||||
// + literal: Const { ty: fn() {g}, val: Value(Scalar(<ZST>)) }
|
||||
}
|
||||
|
||||
bb2: {
|
||||
StorageDead(_3); // scope 1 at $DIR/inline-options.rs:10:5: 10:21
|
||||
StorageLive(_4); // scope 1 at $DIR/inline-options.rs:10:5: 10:21
|
||||
_4 = g() -> bb3; // scope 1 at $DIR/inline-options.rs:10:5: 10:21
|
||||
// mir::Constant
|
||||
// + span: $DIR/inline-options.rs:10:5: 10:21
|
||||
// + literal: Const { ty: fn() {g}, val: Value(Scalar(<ZST>)) }
|
||||
}
|
||||
|
||||
bb3: {
|
||||
StorageDead(_4); // scope 1 at $DIR/inline-options.rs:10:5: 10:21
|
||||
StorageLive(_5); // scope 1 at $DIR/inline-options.rs:10:5: 10:21
|
||||
_5 = g() -> bb4; // scope 1 at $DIR/inline-options.rs:10:5: 10:21
|
||||
// mir::Constant
|
||||
// + span: $DIR/inline-options.rs:10:5: 10:21
|
||||
// + literal: Const { ty: fn() {g}, val: Value(Scalar(<ZST>)) }
|
||||
}
|
||||
|
||||
bb4: {
|
||||
StorageDead(_5); // scope 1 at $DIR/inline-options.rs:10:5: 10:21
|
||||
_2 = const (); // scope 1 at $DIR/inline-options.rs:10:5: 10:21
|
||||
StorageDead(_2); // scope 0 at $DIR/inline-options.rs:10:21: 10:22
|
||||
_0 = const (); // scope 0 at $DIR/inline-options.rs:8:11: 11:2
|
||||
return; // scope 0 at $DIR/inline-options.rs:11:2: 11:2
|
||||
}
|
||||
}
|
@ -0,0 +1,31 @@
|
||||
// run-pass
|
||||
#![feature(const_generics, const_evaluatable_checked)]
|
||||
#![allow(incomplete_features)]
|
||||
|
||||
pub trait BlockCipher {
|
||||
const BLOCK_SIZE: usize;
|
||||
}
|
||||
|
||||
struct FooCipher;
|
||||
impl BlockCipher for FooCipher {
|
||||
const BLOCK_SIZE: usize = 64;
|
||||
}
|
||||
|
||||
struct BarCipher;
|
||||
impl BlockCipher for BarCipher {
|
||||
const BLOCK_SIZE: usize = 32;
|
||||
}
|
||||
|
||||
pub struct Block<C>(C);
|
||||
|
||||
pub fn test<C: BlockCipher, const M: usize>()
|
||||
where
|
||||
[u8; M - C::BLOCK_SIZE]: Sized,
|
||||
{
|
||||
let _ = [0; M - C::BLOCK_SIZE];
|
||||
}
|
||||
|
||||
fn main() {
|
||||
test::<FooCipher, 128>();
|
||||
test::<BarCipher, 64>();
|
||||
}
|
@ -0,0 +1,16 @@
|
||||
#![feature(const_generics, const_evaluatable_checked)]
|
||||
#![allow(incomplete_features)]
|
||||
|
||||
use std::mem::size_of;
|
||||
use std::marker::PhantomData;
|
||||
|
||||
struct Foo<T>(PhantomData<T>);
|
||||
|
||||
fn test<T>() -> [u8; size_of::<T>()] {
|
||||
[0; size_of::<Foo<T>>()]
|
||||
//~^ ERROR unconstrained generic constant
|
||||
}
|
||||
|
||||
fn main() {
|
||||
test::<u32>();
|
||||
}
|
@ -0,0 +1,14 @@
|
||||
error: unconstrained generic constant
|
||||
--> $DIR/different-fn.rs:10:9
|
||||
|
|
||||
LL | [0; size_of::<Foo<T>>()]
|
||||
| ^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: consider adding a `where` bound for this expression
|
||||
--> $DIR/different-fn.rs:10:9
|
||||
|
|
||||
LL | [0; size_of::<Foo<T>>()]
|
||||
| ^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
error: aborting due to previous error
|
||||
|
@ -3089,7 +3089,7 @@ impl<'tcx> Visitor<'tcx> for IterFunctionVisitor {
|
||||
}
|
||||
}
|
||||
|
||||
/// Detect the occurences of calls to `iter` or `into_iter` for the
|
||||
/// Detect the occurrences of calls to `iter` or `into_iter` for the
|
||||
/// given identifier
|
||||
fn detect_iter_and_into_iters<'tcx>(block: &'tcx Block<'tcx>, identifier: Ident) -> Option<Vec<IterFunction>> {
|
||||
let mut visitor = IterFunctionVisitor {
|
||||
|
Loading…
Reference in New Issue
Block a user