Auto merge of #82663 - jyn514:rollup-xh3cb0c, r=jyn514

Rollup of 8 pull requests

Successful merges:

 - #81210 (BTreeMap: correct node size test case for choices of B)
 - #82360 (config.toml parsing error improvements)
 - #82428 (Update mdbook)
 - #82480 (Remove `ENABLE_DOWNLOAD_RUSTC` constant)
 - #82578 (Add some diagnostic items for Clippy)
 - #82620 (Apply lint restrictions from renamed lints)
 - #82635 (Fix typos in rustc_infer::infer::nll_relate)
 - #82645 (Clarify that SyncOnceCell::set blocks.)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2021-03-01 20:52:47 +00:00
commit 4f20caa625
29 changed files with 164 additions and 163 deletions

View File

@ -1767,7 +1767,7 @@ dependencies = [
"regex",
"serde",
"serde_json",
"shlex",
"shlex 0.1.1",
]
[[package]]
@ -2122,9 +2122,9 @@ dependencies = [
[[package]]
name = "mdbook"
version = "0.4.6"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3d948b64449003363127ed6c6139f03273982c3fe97da4cb3dee933e38ce38f"
checksum = "28f6a882f3880ec68e96f60d6b543c34941e2f307ad10e2992e4db9acfe96529"
dependencies = [
"ammonia",
"anyhow",
@ -2142,7 +2142,7 @@ dependencies = [
"serde",
"serde_derive",
"serde_json",
"shlex",
"shlex 1.0.0",
"tempfile",
"toml",
]
@ -4858,6 +4858,12 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2"
[[package]]
name = "shlex"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42a568c8f2cd051a4d283bd6eb0343ac214c1b0f1ac19f93e1175b2dee38c73d"
[[package]]
name = "signal-hook-registry"
version = "1.2.1"

View File

@ -44,7 +44,7 @@ where
{
infcx: &'me InferCtxt<'me, 'tcx>,
/// Callback to use when we deduce an outlives relationship
/// Callback to use when we deduce an outlives relationship.
delegate: D,
/// How are we relating `a` and `b`?
@ -768,7 +768,7 @@ impl<'me, 'tcx> TypeVisitor<'tcx> for ScopeInstantiator<'me, 'tcx> {
}
}
/// The "type generalize" is used when handling inference variables.
/// The "type generalizer" is used when handling inference variables.
///
/// The basic strategy for handling a constraint like `?A <: B` is to
/// apply a "generalization strategy" to the type `B` -- this replaces

View File

@ -321,17 +321,18 @@ impl<'s> LintLevelsBuilder<'s> {
None
};
let name = meta_item.path.segments.last().expect("empty lint name").ident.name;
match store.check_lint_name(&name.as_str(), tool_name) {
let lint_result = store.check_lint_name(&name.as_str(), tool_name);
match &lint_result {
CheckLintNameResult::Ok(ids) => {
let src = LintLevelSource::Node(name, li.span(), reason);
for &id in ids {
for &id in *ids {
self.check_gated_lint(id, attr.span);
self.insert_spec(&mut specs, id, (level, src));
}
}
CheckLintNameResult::Tool(result) => {
match result {
match *result {
Ok(ids) => {
let complete_name = &format!("{}::{}", tool_name.unwrap(), name);
let src = LintLevelSource::Node(
@ -343,7 +344,7 @@ impl<'s> LintLevelsBuilder<'s> {
self.insert_spec(&mut specs, *id, (level, src));
}
}
Err((Some(ids), new_lint_name)) => {
Err((Some(ids), ref new_lint_name)) => {
let lint = builtin::RENAMED_AND_REMOVED_LINTS;
let (lvl, src) =
self.sets.get_lint_level(lint, self.cur, Some(&specs), &sess);
@ -392,21 +393,21 @@ impl<'s> LintLevelsBuilder<'s> {
CheckLintNameResult::Warning(msg, renamed) => {
let lint = builtin::RENAMED_AND_REMOVED_LINTS;
let (level, src) =
let (renamed_lint_level, src) =
self.sets.get_lint_level(lint, self.cur, Some(&specs), &sess);
struct_lint_level(
self.sess,
lint,
level,
renamed_lint_level,
src,
Some(li.span().into()),
|lint| {
let mut err = lint.build(&msg);
if let Some(new_name) = renamed {
if let Some(new_name) = &renamed {
err.span_suggestion(
li.span(),
"use the new name",
new_name,
new_name.to_string(),
Applicability::MachineApplicable,
);
}
@ -444,6 +445,22 @@ impl<'s> LintLevelsBuilder<'s> {
);
}
}
// If this lint was renamed, apply the new lint instead of ignoring the attribute.
// This happens outside of the match because the new lint should be applied even if
// we don't warn about the name change.
if let CheckLintNameResult::Warning(_, Some(new_name)) = lint_result {
// Ignore any errors or warnings that happen because the new name is inaccurate
if let CheckLintNameResult::Ok(ids) =
store.check_lint_name(&new_name, tool_name)
{
let src =
LintLevelSource::Node(Symbol::intern(&new_name), li.span(), reason);
for &id in ids {
self.check_gated_lint(id, attr.span);
self.insert_spec(&mut specs, id, (level, src));
}
}
}
}
}

View File

@ -126,6 +126,9 @@ symbols! {
Argument,
ArgumentV1,
Arguments,
BTreeMap,
BTreeSet,
BinaryHeap,
C,
CString,
Center,
@ -163,6 +166,7 @@ symbols! {
Iterator,
Layout,
Left,
LinkedList,
LintPass,
None,
Ok,
@ -191,6 +195,7 @@ symbols! {
RangeToInclusive,
Rc,
Ready,
Receiver,
Result,
Return,
Right,
@ -592,6 +597,8 @@ symbols! {
gt,
half_open_range_patterns,
hash,
hashmap_type,
hashset_type,
hexagon_target_feature,
hidden,
homogeneous_aggregate,
@ -1256,6 +1263,7 @@ symbols! {
variant_count,
vec,
vec_type,
vecdeque_type,
version,
vis,
visible_private_types,

View File

@ -637,7 +637,9 @@ changelog-seen = 2
# The full path to the musl libdir.
#musl-libdir = musl-root/lib
# The root location of the `wasm32-wasi` sysroot.
# The root location of the `wasm32-wasi` sysroot. Only used for the
# `wasm32-wasi` target. If you are building wasm32-wasi target, make sure to
# create a `[target.wasm32-wasi]` section and move this field there.
#wasi-root = "..."
# Used in testing for configuring where the QEMU images are located, you

View File

@ -296,11 +296,6 @@ fn fat_val_map(n: usize) -> BTreeMap<usize, [usize; FAT]> {
(0..n).map(|i| (i, [i; FAT])).collect::<BTreeMap<_, _>>()
}
// The returned map has large keys and values.
fn fat_map(n: usize) -> BTreeMap<[usize; FAT], [usize; FAT]> {
(0..n).map(|i| ([i; FAT], [i; FAT])).collect::<BTreeMap<_, _>>()
}
#[bench]
pub fn clone_slim_100(b: &mut Bencher) {
let src = slim_map(100);
@ -513,74 +508,3 @@ pub fn clone_fat_val_100_and_remove_half(b: &mut Bencher) {
map
})
}
#[bench]
pub fn clone_fat_100(b: &mut Bencher) {
let src = fat_map(100);
b.iter(|| src.clone())
}
#[bench]
pub fn clone_fat_100_and_clear(b: &mut Bencher) {
let src = fat_map(100);
b.iter(|| src.clone().clear())
}
#[bench]
pub fn clone_fat_100_and_drain_all(b: &mut Bencher) {
let src = fat_map(100);
b.iter(|| src.clone().drain_filter(|_, _| true).count())
}
#[bench]
pub fn clone_fat_100_and_drain_half(b: &mut Bencher) {
let src = fat_map(100);
b.iter(|| {
let mut map = src.clone();
assert_eq!(map.drain_filter(|i, _| i[0] % 2 == 0).count(), 100 / 2);
assert_eq!(map.len(), 100 / 2);
})
}
#[bench]
pub fn clone_fat_100_and_into_iter(b: &mut Bencher) {
let src = fat_map(100);
b.iter(|| src.clone().into_iter().count())
}
#[bench]
pub fn clone_fat_100_and_pop_all(b: &mut Bencher) {
let src = fat_map(100);
b.iter(|| {
let mut map = src.clone();
while map.pop_first().is_some() {}
map
});
}
#[bench]
pub fn clone_fat_100_and_remove_all(b: &mut Bencher) {
let src = fat_map(100);
b.iter(|| {
let mut map = src.clone();
while let Some(elt) = map.iter().map(|(&i, _)| i).next() {
let v = map.remove(&elt);
debug_assert!(v.is_some());
}
map
});
}
#[bench]
pub fn clone_fat_100_and_remove_half(b: &mut Bencher) {
let src = fat_map(100);
b.iter(|| {
let mut map = src.clone();
for i in (0..100).step_by(2) {
let v = map.remove(&[i; FAT]);
debug_assert!(v.is_some());
}
assert_eq!(map.len(), 100 / 2);
map
})
}

View File

@ -247,6 +247,7 @@ use super::SpecExtend;
/// [peek]: BinaryHeap::peek
/// [peek\_mut]: BinaryHeap::peek_mut
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "BinaryHeap")]
pub struct BinaryHeap<T> {
data: Vec<T>,
}

View File

@ -138,6 +138,7 @@ pub(super) const MIN_LEN: usize = node::MIN_LEN_AFTER_SPLIT;
/// *stat += random_stat_buff();
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "BTreeMap")]
pub struct BTreeMap<K, V> {
root: Option<Root<K, V>>,
length: usize,

View File

@ -136,8 +136,9 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>
}
}
// Tests our value of MIN_INSERTS_HEIGHT_2. It may change according to the
// implementation of insertion, but it's best to be aware of when it does.
// Tests our value of MIN_INSERTS_HEIGHT_2. Failure may mean you just need to
// adapt that value to match a change in node::CAPACITY or the choices made
// during insertion, otherwise other test cases may fail or be less useful.
#[test]
fn test_levels() {
let mut map = BTreeMap::new();

View File

@ -95,7 +95,7 @@ fn test_partial_eq() {
#[cfg(target_arch = "x86_64")]
fn test_sizes() {
assert_eq!(core::mem::size_of::<LeafNode<(), ()>>(), 16);
assert_eq!(core::mem::size_of::<LeafNode<i64, i64>>(), 16 + CAPACITY * 8 * 2);
assert_eq!(core::mem::size_of::<InternalNode<(), ()>>(), 112);
assert_eq!(core::mem::size_of::<InternalNode<i64, i64>>(), 112 + CAPACITY * 8 * 2);
assert_eq!(core::mem::size_of::<LeafNode<i64, i64>>(), 16 + CAPACITY * 2 * 8);
assert_eq!(core::mem::size_of::<InternalNode<(), ()>>(), 16 + (CAPACITY + 1) * 8);
assert_eq!(core::mem::size_of::<InternalNode<i64, i64>>(), 16 + (CAPACITY * 3 + 1) * 8);
}

View File

@ -61,6 +61,7 @@ use super::Recover;
/// ```
#[derive(Hash, PartialEq, Eq, Ord, PartialOrd)]
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "BTreeSet")]
pub struct BTreeSet<T> {
map: BTreeMap<T, ()>,
}

View File

@ -35,6 +35,7 @@ mod tests;
/// array-based containers are generally faster,
/// more memory efficient, and make better use of CPU cache.
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "LinkedList")]
pub struct LinkedList<T> {
head: Option<NonNull<Node<T>>>,
tail: Option<NonNull<Node<T>>>,

View File

@ -177,7 +177,10 @@ impl<T> SyncOnceCell<T> {
/// Sets the contents of this cell to `value`.
///
/// Returns `Ok(())` if the cell's value was updated.
/// May block if another thread is currently attempting to initialize the cell. The cell is
/// guaranteed to contain a value when set returns, though not necessarily the one provided.
///
/// Returns `Ok(())` if the cell's value was set by this call.
///
/// # Examples
///

View File

@ -310,6 +310,7 @@ mod cache_aligned;
/// println!("{}", recv.recv().unwrap()); // Received after 2 seconds
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "Receiver")]
pub struct Receiver<T> {
inner: UnsafeCell<Flavor<T>>,
}

View File

@ -57,14 +57,6 @@ pub trait Step: 'static + Clone + Debug + PartialEq + Eq + Hash {
/// `true` here can still be overwritten by `should_run` calling `default_condition`.
const DEFAULT: bool = false;
/// Whether this step should be run even when `download-rustc` is set.
///
/// Most steps are not important when the compiler is downloaded, since they will be included in
/// the pre-compiled sysroot. Steps can set this to `true` to be built anyway.
///
/// When in doubt, set this to `false`.
const ENABLE_DOWNLOAD_RUSTC: bool = false;
/// If true, then this rule should be skipped if --target was specified, but --host was not
const ONLY_HOSTS: bool = false;
@ -107,7 +99,6 @@ impl RunConfig<'_> {
struct StepDescription {
default: bool,
enable_download_rustc: bool,
only_hosts: bool,
should_run: fn(ShouldRun<'_>) -> ShouldRun<'_>,
make_run: fn(RunConfig<'_>),
@ -162,7 +153,6 @@ impl StepDescription {
fn from<S: Step>() -> StepDescription {
StepDescription {
default: S::DEFAULT,
enable_download_rustc: S::ENABLE_DOWNLOAD_RUSTC,
only_hosts: S::ONLY_HOSTS,
should_run: S::should_run,
make_run: S::make_run,
@ -179,14 +169,6 @@ impl StepDescription {
"{:?} not skipped for {:?} -- not in {:?}",
pathset, self.name, builder.config.exclude
);
} else if builder.config.download_rustc && !self.enable_download_rustc {
if !builder.config.dry_run {
eprintln!(
"Not running {} because its artifacts have been downloaded from CI (`download-rustc` is set)",
self.name
);
}
return;
}
// Determine the targets participating in this rule.

View File

@ -62,7 +62,6 @@ fn cargo_subcommand(kind: Kind) -> &'static str {
impl Step for Std {
type Output = ();
const DEFAULT: bool = true;
const ENABLE_DOWNLOAD_RUSTC: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.all_krates("test")
@ -156,7 +155,6 @@ impl Step for Rustc {
type Output = ();
const ONLY_HOSTS: bool = true;
const DEFAULT: bool = true;
const ENABLE_DOWNLOAD_RUSTC: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.all_krates("rustc-main")
@ -235,7 +233,6 @@ impl Step for CodegenBackend {
type Output = ();
const ONLY_HOSTS: bool = true;
const DEFAULT: bool = true;
const ENABLE_DOWNLOAD_RUSTC: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.paths(&["compiler/rustc_codegen_cranelift", "rustc_codegen_cranelift"])
@ -293,7 +290,6 @@ macro_rules! tool_check_step {
type Output = ();
const ONLY_HOSTS: bool = true;
const DEFAULT: bool = true;
const ENABLE_DOWNLOAD_RUSTC: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.path($path)

View File

@ -63,6 +63,12 @@ impl Step for Std {
let target = self.target;
let compiler = self.compiler;
// These artifacts were already copied (in `impl Step for Sysroot`).
// Don't recompile them.
if builder.config.download_rustc {
return;
}
if builder.config.keep_stage.contains(&compiler.stage)
|| builder.config.keep_stage_std.contains(&compiler.stage)
{
@ -178,7 +184,9 @@ fn copy_self_contained_objects(
// To do that we have to distribute musl startup objects as a part of Rust toolchain
// and link with them manually in the self-contained mode.
if target.contains("musl") {
let srcdir = builder.musl_libdir(target).unwrap();
let srcdir = builder.musl_libdir(target).unwrap_or_else(|| {
panic!("Target {:?} does not have a \"musl-libdir\" key", target.triple)
});
for &obj in &["crt1.o", "Scrt1.o", "rcrt1.o", "crti.o", "crtn.o"] {
copy_and_stamp(
builder,
@ -196,7 +204,12 @@ fn copy_self_contained_objects(
target_deps.push((target, DependencyType::TargetSelfContained));
}
} else if target.ends_with("-wasi") {
let srcdir = builder.wasi_root(target).unwrap().join("lib/wasm32-wasi");
let srcdir = builder
.wasi_root(target)
.unwrap_or_else(|| {
panic!("Target {:?} does not have a \"wasi-root\" key", target.triple)
})
.join("lib/wasm32-wasi");
for &obj in &["crt1.o", "crt1-reactor.o"] {
copy_and_stamp(
builder,
@ -500,6 +513,13 @@ impl Step for Rustc {
let compiler = self.compiler;
let target = self.target;
if builder.config.download_rustc {
// Copy the existing artifacts instead of rebuilding them.
// NOTE: this path is only taken for tools linking to rustc-dev.
builder.ensure(Sysroot { compiler });
return;
}
builder.ensure(Std { compiler, target });
if builder.config.keep_stage.contains(&compiler.stage) {

View File

@ -483,7 +483,6 @@ pub struct Rustdoc {
impl Step for Rustdoc {
type Output = PathBuf;
const DEFAULT: bool = true;
const ENABLE_DOWNLOAD_RUSTC: bool = true;
const ONLY_HOSTS: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {

View File

@ -0,0 +1,9 @@
// compile-flags: --crate-type lib
#![deny(single_use_lifetime)]
//~^ WARNING renamed
//~| NOTE `#[warn(renamed_and_removed_lints)]` on by default
//~| NOTE defined here
fn _foo<'a>(_x: &'a u32) {}
//~^ ERROR only used once
//~| NOTE this lifetime
//~| NOTE is used only here

View File

@ -0,0 +1,28 @@
warning: lint `single_use_lifetime` has been renamed to `single_use_lifetimes`
--> $DIR/renamed-lints-still-apply.rs:2:9
|
LL | #![deny(single_use_lifetime)]
| ^^^^^^^^^^^^^^^^^^^ help: use the new name: `single_use_lifetimes`
|
= note: `#[warn(renamed_and_removed_lints)]` on by default
error: lifetime parameter `'a` only used once
--> $DIR/renamed-lints-still-apply.rs:6:9
|
LL | fn _foo<'a>(_x: &'a u32) {}
| ^^ -- ...is used only here
| |
| this lifetime...
|
note: the lint level is defined here
--> $DIR/renamed-lints-still-apply.rs:2:9
|
LL | #![deny(single_use_lifetime)]
| ^^^^^^^^^^^^^^^^^^^
help: elide the single-use lifetime
|
LL | fn _foo(_x: &u32) {}
| -- --
error: aborting due to previous error; 1 warning emitted

View File

@ -9,6 +9,7 @@ use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::map::Map;
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Span;
use rustc_span::sym;
declare_clippy_lint! {
/// **What it does:** Checks for uses of `contains_key` + `insert` on `HashMap`
@ -111,7 +112,7 @@ fn check_cond<'a>(cx: &LateContext<'_>, check: &'a Expr<'a>) -> Option<(&'static
return if match_type(cx, obj_ty, &paths::BTREEMAP) {
Some(("BTreeMap", map, key))
}
else if is_type_diagnostic_item(cx, obj_ty, sym!(hashmap_type)) {
else if is_type_diagnostic_item(cx, obj_ty, sym::hashmap_type) {
Some(("HashMap", map, key))
}
else {

View File

@ -1010,7 +1010,7 @@ fn is_slice_like<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'_>) -> bool {
_ => false,
};
is_slice || is_type_diagnostic_item(cx, ty, sym::vec_type) || is_type_diagnostic_item(cx, ty, sym!(vecdeque_type))
is_slice || is_type_diagnostic_item(cx, ty, sym::vec_type) || is_type_diagnostic_item(cx, ty, sym::vecdeque_type)
}
fn fetch_cloned_expr<'tcx>(expr: &'tcx Expr<'tcx>) -> &'tcx Expr<'tcx> {
@ -1908,7 +1908,7 @@ fn check_for_loop_over_map_kv<'tcx>(
_ => arg,
};
if is_type_diagnostic_item(cx, ty, sym!(hashmap_type)) || match_type(cx, ty, &paths::BTREEMAP) {
if is_type_diagnostic_item(cx, ty, sym::hashmap_type) || match_type(cx, ty, &paths::BTREEMAP) {
span_lint_and_then(
cx,
FOR_KV_MAP,
@ -2386,9 +2386,9 @@ fn is_ref_iterable_type(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
is_iterable_array(ty, cx) ||
is_type_diagnostic_item(cx, ty, sym::vec_type) ||
match_type(cx, ty, &paths::LINKED_LIST) ||
is_type_diagnostic_item(cx, ty, sym!(hashmap_type)) ||
is_type_diagnostic_item(cx, ty, sym!(hashset_type)) ||
is_type_diagnostic_item(cx, ty, sym!(vecdeque_type)) ||
is_type_diagnostic_item(cx, ty, sym::hashmap_type) ||
is_type_diagnostic_item(cx, ty, sym::hashset_type) ||
is_type_diagnostic_item(cx, ty, sym::vecdeque_type) ||
match_type(cx, ty, &paths::BINARY_HEAP) ||
match_type(cx, ty, &paths::BTREEMAP) ||
match_type(cx, ty, &paths::BTREESET)
@ -2922,9 +2922,9 @@ fn check_needless_collect_direct_usage<'tcx>(expr: &'tcx Expr<'_>, cx: &LateCont
then {
let ty = cx.typeck_results().node_type(ty.hir_id);
if is_type_diagnostic_item(cx, ty, sym::vec_type) ||
is_type_diagnostic_item(cx, ty, sym!(vecdeque_type)) ||
is_type_diagnostic_item(cx, ty, sym::vecdeque_type) ||
match_type(cx, ty, &paths::BTREEMAP) ||
is_type_diagnostic_item(cx, ty, sym!(hashmap_type)) {
is_type_diagnostic_item(cx, ty, sym::hashmap_type) {
if method.ident.name == sym!(len) {
let span = shorten_needless_collect_span(expr);
span_lint_and_sugg(
@ -2992,7 +2992,7 @@ fn check_needless_collect_indirect_usage<'tcx>(expr: &'tcx Expr<'_>, cx: &LateCo
if let Some(GenericArg::Type(ref ty)) = generic_args.args.get(0);
if let ty = cx.typeck_results().node_type(ty.hir_id);
if is_type_diagnostic_item(cx, ty, sym::vec_type) ||
is_type_diagnostic_item(cx, ty, sym!(vecdeque_type)) ||
is_type_diagnostic_item(cx, ty, sym::vecdeque_type) ||
match_type(cx, ty, &paths::LINKED_LIST);
if let Some(iter_calls) = detect_iter_and_into_iters(block, *ident);
if iter_calls.len() == 1;

View File

@ -24,7 +24,7 @@ use rustc_middle::ty::{self, TraitRef, Ty, TyS};
use rustc_semver::RustcVersion;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::source_map::Span;
use rustc_span::symbol::{sym, SymbolStr};
use rustc_span::symbol::{sym, Symbol, SymbolStr};
use rustc_typeck::hir_ty_to_ty;
use crate::consts::{constant, Constant};
@ -2598,7 +2598,7 @@ fn lint_iter_nth<'tcx>(
"slice"
} else if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(&iter_args[0]), sym::vec_type) {
"Vec"
} else if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(&iter_args[0]), sym!(vecdeque_type)) {
} else if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(&iter_args[0]), sym::vecdeque_type) {
"VecDeque"
} else {
let nth_args = nth_and_iter_args[0];
@ -2652,10 +2652,10 @@ fn lint_get_unwrap<'tcx>(cx: &LateContext<'tcx>, expr: &hir::Expr<'_>, get_args:
} else if is_type_diagnostic_item(cx, expr_ty, sym::vec_type) {
needs_ref = get_args_str.parse::<usize>().is_ok();
"Vec"
} else if is_type_diagnostic_item(cx, expr_ty, sym!(vecdeque_type)) {
} else if is_type_diagnostic_item(cx, expr_ty, sym::vecdeque_type) {
needs_ref = get_args_str.parse::<usize>().is_ok();
"VecDeque"
} else if !is_mut && is_type_diagnostic_item(cx, expr_ty, sym!(hashmap_type)) {
} else if !is_mut && is_type_diagnostic_item(cx, expr_ty, sym::hashmap_type) {
needs_ref = true;
"HashMap"
} else if !is_mut && match_type(cx, expr_ty, &paths::BTREEMAP) {
@ -3619,7 +3619,7 @@ fn lint_asref(cx: &LateContext<'_>, expr: &hir::Expr<'_>, call_name: &str, as_re
}
}
fn ty_has_iter_method(cx: &LateContext<'_>, self_ref_ty: Ty<'_>) -> Option<(&'static str, &'static str)> {
fn ty_has_iter_method(cx: &LateContext<'_>, self_ref_ty: Ty<'_>) -> Option<(Symbol, &'static str)> {
has_iter_method(cx, self_ref_ty).map(|ty_name| {
let mutbl = match self_ref_ty.kind() {
ty::Ref(_, _, mutbl) => mutbl,

View File

@ -199,7 +199,7 @@ fn check_for_slice<'a>(cx: &LateContext<'_>, lhs1: &'a Expr<'_>, lhs2: &'a Expr<
if matches!(ty.kind(), ty::Slice(_))
|| matches!(ty.kind(), ty::Array(_, _))
|| is_type_diagnostic_item(cx, ty, sym::vec_type)
|| is_type_diagnostic_item(cx, ty, sym!(vecdeque_type))
|| is_type_diagnostic_item(cx, ty, sym::vecdeque_type)
{
return Slice::Swappable(lhs1, idx1, idx2);
}

View File

@ -2680,14 +2680,14 @@ impl<'tcx> ImplicitHasherType<'tcx> {
let ty = hir_ty_to_ty(cx.tcx, hir_ty);
if is_type_diagnostic_item(cx, ty, sym!(hashmap_type)) && params_len == 2 {
if is_type_diagnostic_item(cx, ty, sym::hashmap_type) && params_len == 2 {
Some(ImplicitHasherType::HashMap(
hir_ty.span,
ty,
snippet(cx, params[0].span, "K"),
snippet(cx, params[1].span, "V"),
))
} else if is_type_diagnostic_item(cx, ty, sym!(hashset_type)) && params_len == 1 {
} else if is_type_diagnostic_item(cx, ty, sym::hashset_type) && params_len == 1 {
Some(ImplicitHasherType::HashSet(
hir_ty.span,
ty,

View File

@ -5,6 +5,7 @@ use rustc_middle::ty::{Adt, Ty};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_target::abi::LayoutOf as _;
use rustc_typeck::hir_ty_to_ty;
use rustc_span::sym;
use crate::utils::{is_normalizable, is_type_diagnostic_item, match_type, paths, span_lint_and_help};
@ -47,7 +48,7 @@ impl LateLintPass<'_> for ZeroSizedMapValues {
if !hir_ty.span.from_expansion();
if !in_trait_impl(cx, hir_ty.hir_id);
let ty = ty_from_hir_ty(cx, hir_ty);
if is_type_diagnostic_item(cx, ty, sym!(hashmap_type)) || match_type(cx, ty, &paths::BTREEMAP);
if is_type_diagnostic_item(cx, ty, sym::hashmap_type) || match_type(cx, ty, &paths::BTREEMAP);
if let Adt(_, ref substs) = ty.kind();
let ty = substs.type_at(1);
// Do this to prevent `layout_of` crashing, being unable to fully normalize `ty`.

View File

@ -18,6 +18,7 @@ use rustc_hir::intravisit::{NestedVisitorMap, Visitor};
use rustc_hir::{Block, Expr, ExprKind, Path, QPath};
use rustc_lint::LateContext;
use rustc_middle::hir::map::Map;
use rustc_span::sym;
/// Is the expr pure (is it free from side-effects)?
/// This function is named so to stress that it isn't exhaustive and returns FNs.
@ -99,7 +100,7 @@ fn identify_some_potentially_expensive_patterns<'tcx>(cx: &LateContext<'tcx>, ex
ExprKind::Call(..) => !is_ctor_or_promotable_const_function(self.cx, expr),
ExprKind::Index(obj, _) => {
let ty = self.cx.typeck_results().expr_ty(obj);
is_type_diagnostic_item(self.cx, ty, sym!(hashmap_type))
is_type_diagnostic_item(self.cx, ty, sym::hashmap_type)
|| match_type(self.cx, ty, &paths::BTREEMAP)
},
ExprKind::MethodCall(..) => true,

View File

@ -1295,24 +1295,24 @@ pub fn any_parent_is_automatically_derived(tcx: TyCtxt<'_>, node: HirId) -> bool
}
/// Returns true if ty has `iter` or `iter_mut` methods
pub fn has_iter_method(cx: &LateContext<'_>, probably_ref_ty: Ty<'_>) -> Option<&'static str> {
pub fn has_iter_method(cx: &LateContext<'_>, probably_ref_ty: Ty<'_>) -> Option<Symbol> {
// FIXME: instead of this hard-coded list, we should check if `<adt>::iter`
// exists and has the desired signature. Unfortunately FnCtxt is not exported
// so we can't use its `lookup_method` method.
let into_iter_collections: [&[&str]; 13] = [
&paths::VEC,
&paths::OPTION,
&paths::RESULT,
&paths::BTREESET,
&paths::BTREEMAP,
&paths::VEC_DEQUE,
&paths::LINKED_LIST,
&paths::BINARY_HEAP,
&paths::HASHSET,
&paths::HASHMAP,
&paths::PATH_BUF,
&paths::PATH,
&paths::RECEIVER,
let into_iter_collections: &[Symbol] = &[
sym::vec_type,
sym::option_type,
sym::result_type,
sym::BTreeMap,
sym::BTreeSet,
sym::vecdeque_type,
sym::LinkedList,
sym::BinaryHeap,
sym::hashset_type,
sym::hashmap_type,
sym::PathBuf,
sym::Path,
sym::Receiver,
];
let ty_to_check = match probably_ref_ty.kind() {
@ -1321,15 +1321,15 @@ pub fn has_iter_method(cx: &LateContext<'_>, probably_ref_ty: Ty<'_>) -> Option<
};
let def_id = match ty_to_check.kind() {
ty::Array(..) => return Some("array"),
ty::Slice(..) => return Some("slice"),
ty::Array(..) => return Some(sym::array),
ty::Slice(..) => return Some(sym::slice),
ty::Adt(adt, _) => adt.did,
_ => return None,
};
for path in &into_iter_collections {
if match_def_path(cx, def_id, path) {
return Some(*path.last().unwrap());
for &name in into_iter_collections {
if cx.tcx.is_diagnostic_item(name, def_id) {
return Some(cx.tcx.item_name(def_id));
}
}
None

View File

@ -99,7 +99,6 @@ pub(super) const PANIC_ANY: [&str; 3] = ["std", "panic", "panic_any"];
pub const PARKING_LOT_MUTEX_GUARD: [&str; 2] = ["parking_lot", "MutexGuard"];
pub const PARKING_LOT_RWLOCK_READ_GUARD: [&str; 2] = ["parking_lot", "RwLockReadGuard"];
pub const PARKING_LOT_RWLOCK_WRITE_GUARD: [&str; 2] = ["parking_lot", "RwLockWriteGuard"];
pub const PATH: [&str; 3] = ["std", "path", "Path"];
pub const PATH_BUF: [&str; 3] = ["std", "path", "PathBuf"];
pub const PATH_BUF_AS_PATH: [&str; 4] = ["std", "path", "PathBuf", "as_path"];
pub const PATH_TO_PATH_BUF: [&str; 4] = ["std", "path", "Path", "to_path_buf"];
@ -116,7 +115,6 @@ pub const PUSH_STR: [&str; 4] = ["alloc", "string", "String", "push_str"];
pub const RANGE_ARGUMENT_TRAIT: [&str; 3] = ["core", "ops", "RangeBounds"];
pub const RC: [&str; 3] = ["alloc", "rc", "Rc"];
pub const RC_PTR_EQ: [&str; 4] = ["alloc", "rc", "Rc", "ptr_eq"];
pub const RECEIVER: [&str; 4] = ["std", "sync", "mpsc", "Receiver"];
pub const REFCELL_REF: [&str; 3] = ["core", "cell", "Ref"];
pub const REFCELL_REFMUT: [&str; 3] = ["core", "cell", "RefMut"];
pub const REGEX_BUILDER_NEW: [&str; 5] = ["regex", "re_builder", "unicode", "RegexBuilder", "new"];