Auto merge of #72671 - flip1995:clippyup, r=Xanewok

Update Clippy, RLS, and rustfmt

r? @Dylan-DPC

This makes Clippy test-pass again: 3089c3b

Otherwise this includes bugfixes and a few new lints.

Fixes #72231
Fixes #72232
This commit is contained in:
bors 2020-05-29 11:16:45 +00:00
commit 96dd4690c3
89 changed files with 1609 additions and 482 deletions

View File

@ -551,10 +551,12 @@ dependencies = [
"lazy_static",
"pulldown-cmark 0.7.1",
"quine-mc_cluskey",
"quote 1.0.2",
"regex-syntax",
"semver 0.9.0",
"serde",
"smallvec 1.4.0",
"syn 1.0.11",
"toml",
"unicode-normalization",
"url 2.1.0",
@ -2784,9 +2786,9 @@ dependencies = [
[[package]]
name = "racer"
version = "2.1.33"
version = "2.1.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54322b696f7df20e0d79d0244a1088f387b7164a5f17987c4ab984dec1a23e42"
checksum = "cc9caecf1286a3ed28d3ae35207a178ba12e58de95540781e5c6cba05e0f0833"
dependencies = [
"bitflags",
"clap",
@ -3212,9 +3214,9 @@ dependencies = [
[[package]]
name = "rustc-ap-arena"
version = "654.0.0"
version = "659.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81dfcfbb0ddfd533abf8c076e3b49d1e5042d1962526a12ce2c66d514b24cca3"
checksum = "fdaf0295fc40b10ec1091aad1a1760b4bb3b4e7c4f77d543d1a2e9d50a01e6b1"
dependencies = [
"rustc-ap-rustc_data_structures",
"smallvec 1.4.0",
@ -3222,15 +3224,15 @@ dependencies = [
[[package]]
name = "rustc-ap-graphviz"
version = "654.0.0"
version = "659.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7490bb07b014a7f9531bde33c905a805e08095dbefdb4c9988a1b19fe6d019fd"
checksum = "8028e8cdb4eb71810d0c22a5a5e1e3106c81123be63ce7f044b6d4ac100d8941"
[[package]]
name = "rustc-ap-rustc_ast"
version = "654.0.0"
version = "659.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "189f16dbb8dd11089274c9ced58b0cae9e1ea3e434a58f3db683817eda849e58"
checksum = "16e9e502bb3a5568433db1cf2fb1f1e1074934636069cf744ad7c77b58e1428e"
dependencies = [
"log",
"rustc-ap-rustc_data_structures",
@ -3245,9 +3247,9 @@ dependencies = [
[[package]]
name = "rustc-ap-rustc_ast_passes"
version = "654.0.0"
version = "659.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbe619609b56a617fa986332b066d53270093c816d8ff8281fc90e1dbe74c1cc"
checksum = "faf35ffecab28f97f7ac01cf6a13afaca6408529d15eb95f317a43b2ffb88933"
dependencies = [
"itertools 0.8.0",
"log",
@ -3264,21 +3266,20 @@ dependencies = [
[[package]]
name = "rustc-ap-rustc_ast_pretty"
version = "654.0.0"
version = "659.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26ab1495f7b420e937688749c1da5763aaabd6ebe8cacb758665a0b8481da094"
checksum = "3684ed43dc552f1e030e3f7a5a300a7a834bdda4e9e00ab80284be4220d8c603"
dependencies = [
"log",
"rustc-ap-rustc_ast",
"rustc-ap-rustc_data_structures",
"rustc-ap-rustc_span",
]
[[package]]
name = "rustc-ap-rustc_attr"
version = "654.0.0"
version = "659.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e057495724c60729c1d1d9d49374e0b3ebd6d3481cd161b2871f52fe017b7b5"
checksum = "31b413927daa666983b3b49227f9ac218aa29254546abdb585f20cd71c391870"
dependencies = [
"rustc-ap-rustc_ast",
"rustc-ap-rustc_ast_pretty",
@ -3289,19 +3290,19 @@ dependencies = [
"rustc-ap-rustc_session",
"rustc-ap-rustc_span",
"rustc-ap-serialize",
"smallvec 1.4.0",
"version_check",
]
[[package]]
name = "rustc-ap-rustc_data_structures"
version = "654.0.0"
version = "659.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2130997667833692f4bec4681d0e73b066d5a01dac1d8a68f22068b82bf173a"
checksum = "4b1c6069e5c522657f1c6f5ab33074e097092f48e804cc896d337e319aacbd60"
dependencies = [
"bitflags",
"cfg-if",
"crossbeam-utils 0.7.2",
"ena 0.13.1",
"ena 0.14.0",
"indexmap",
"jobserver",
"lazy_static",
@ -3317,14 +3318,15 @@ dependencies = [
"rustc-rayon-core",
"smallvec 1.4.0",
"stable_deref_trait",
"stacker",
"winapi 0.3.8",
]
[[package]]
name = "rustc-ap-rustc_errors"
version = "654.0.0"
version = "659.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "908e1ea187c6bb368af4ba6db980001e920515e67371ddc4086e749baabe6080"
checksum = "0c374e89b3c9714869ef86076942155383804ba6778c26be2169d324563c31f9"
dependencies = [
"annotate-snippets",
"atty",
@ -3340,9 +3342,9 @@ dependencies = [
[[package]]
name = "rustc-ap-rustc_expand"
version = "654.0.0"
version = "659.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "50066a75bca872ff933b0ee8a582d18ef1876c8054a392f60c39e538446bfb00"
checksum = "259d2a7aa7a12f3c99a4ce4123643ec065f1a26f8e89be1f9bedd9757ea53fdc"
dependencies = [
"log",
"rustc-ap-rustc_ast",
@ -3362,9 +3364,9 @@ dependencies = [
[[package]]
name = "rustc-ap-rustc_feature"
version = "654.0.0"
version = "659.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96fb53e1710e6de7c2e371ca56c857b79f9b399aba58aa6b6fbed6e2f677d3f6"
checksum = "c0296fbc29b629d5ae2ebee1bbf0407bb22de04d26d87216c20899b79579ccb3"
dependencies = [
"lazy_static",
"rustc-ap-rustc_data_structures",
@ -3373,15 +3375,15 @@ dependencies = [
[[package]]
name = "rustc-ap-rustc_fs_util"
version = "654.0.0"
version = "659.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3f91357e5e468fc2729211571d769723c728a34e200d90a70164e945f881e09"
checksum = "34734f6cc681399630acd836a14207c6b5b9671a290cc7cad0354b0a4d71b3c9"
[[package]]
name = "rustc-ap-rustc_index"
version = "654.0.0"
version = "659.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32220c3e6cdf226f38e4474b747dca15f3106bb680c74f10b299af3f6cdb1663"
checksum = "d1e4508753d71d3523209c2ca5086db15a1413e71ebf17ad5412bb7ced5e44c2"
dependencies = [
"rustc-ap-serialize",
"smallvec 1.4.0",
@ -3389,18 +3391,18 @@ dependencies = [
[[package]]
name = "rustc-ap-rustc_lexer"
version = "654.0.0"
version = "659.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b324d2a2bacad344e53e182e5ca04ffb74745b932849aa074f8f7fec8177da5"
checksum = "42b9fcd8407e322908a721262fbc0b35b5f3c35bb173a26dd1e0070bde336e33"
dependencies = [
"unicode-xid 0.2.0",
]
[[package]]
name = "rustc-ap-rustc_macros"
version = "654.0.0"
version = "659.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59686c56d5f1b3ed47d0f070c257ed35caf24ecf2d744dd11fe44b1014baee0f"
checksum = "3d104115a689367d2e0bcd99f37e0ebd6b9c8c78bab0d9cbea5bae86323601b5"
dependencies = [
"proc-macro2 1.0.3",
"quote 1.0.2",
@ -3410,9 +3412,9 @@ dependencies = [
[[package]]
name = "rustc-ap-rustc_parse"
version = "654.0.0"
version = "659.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dfb0c11c591ec5f87bbadb10819795abc9035ff79a26703c1b6c9487ac51f49"
checksum = "afaaab91853fc5a3916785ccae727a4433359d9787c260d42b96a2265fe5b287"
dependencies = [
"bitflags",
"log",
@ -3424,15 +3426,14 @@ dependencies = [
"rustc-ap-rustc_lexer",
"rustc-ap-rustc_session",
"rustc-ap-rustc_span",
"smallvec 1.4.0",
"unicode-normalization",
]
[[package]]
name = "rustc-ap-rustc_session"
version = "654.0.0"
version = "659.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d1a194b1a81d7233ee492847638dc9ebdb7d084300e5ade8dea0ceaa98f95b9"
checksum = "86e756a57ce6ce1b868e35e64a7e10ab28d49ece80d7c661b07aff5afc6e5d2d"
dependencies = [
"getopts",
"log",
@ -3450,9 +3451,9 @@ dependencies = [
[[package]]
name = "rustc-ap-rustc_span"
version = "654.0.0"
version = "659.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a648146050fed6b58e681ec22488e728f60e16036bb7497c9815e3debd1e4242"
checksum = "21031c3396ee452f4c6e994b67513a633055c57c86d00336afd9d63149518f34"
dependencies = [
"cfg-if",
"log",
@ -3469,9 +3470,9 @@ dependencies = [
[[package]]
name = "rustc-ap-rustc_target"
version = "654.0.0"
version = "659.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28cf28798f0988b808e3616713630e4098d68c6f1f41052a2f7e922e094da744"
checksum = "ff21badfbead5b0050391eaad8840f2e4fcb03b6b0fc6006f447443529e9ae6e"
dependencies = [
"bitflags",
"log",
@ -3484,9 +3485,9 @@ dependencies = [
[[package]]
name = "rustc-ap-serialize"
version = "654.0.0"
version = "659.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "756e8f526ec7906e132188bf25e3c10a6ee42ab77294ecb3b3602647f0508eef"
checksum = "768b5a305669d934522712bc13502962edfde5128ea63b9e7db4000410be1dc6"
dependencies = [
"indexmap",
"smallvec 1.4.0",
@ -4459,7 +4460,7 @@ dependencies = [
[[package]]
name = "rustfmt-nightly"
version = "1.4.14"
version = "1.4.15"
dependencies = [
"annotate-snippets",
"bytecount",
@ -4477,6 +4478,7 @@ dependencies = [
"regex",
"rustc-ap-rustc_ast",
"rustc-ap-rustc_ast_pretty",
"rustc-ap-rustc_attr",
"rustc-ap-rustc_data_structures",
"rustc-ap-rustc_errors",
"rustc-ap-rustc_expand",

View File

@ -252,6 +252,10 @@ pub fn prepare_tool_cargo(
// own copy
cargo.env("LZMA_API_STATIC", "1");
// CFG_RELEASE is needed by rustfmt (and possibly other tools) which
// import rustc-ap-rustc_attr which requires this to be set for the
// `#[cfg(version(...))]` attribute.
cargo.env("CFG_RELEASE", builder.rust_release());
cargo.env("CFG_RELEASE_CHANNEL", &builder.config.channel);
cargo.env("CFG_VERSION", builder.rust_version());
cargo.env("CFG_RELEASE_NUM", channel::CFG_RELEASE_NUM);

View File

@ -49,7 +49,7 @@ jobs:
run: cargo update
- name: Cache cargo dir
uses: actions/cache@v1
uses: actions/cache@v2
with:
path: ~/.cargo
key: ${{ runner.os }}-x86_64-unknown-linux-gnu-${{ hashFiles('Cargo.lock') }}

View File

@ -94,7 +94,7 @@ jobs:
run: cargo update
- name: Cache cargo dir
uses: actions/cache@v1
uses: actions/cache@v2
with:
path: ~/.cargo
key: ${{ runner.os }}-${{ matrix.host }}-${{ hashFiles('Cargo.lock') }}
@ -190,7 +190,7 @@ jobs:
run: cargo update
- name: Cache cargo dir
uses: actions/cache@v1
uses: actions/cache@v2
with:
path: ~/.cargo
key: ${{ runner.os }}-x86_64-unknown-linux-gnu-${{ hashFiles('Cargo.lock') }}
@ -269,7 +269,7 @@ jobs:
run: cargo update
- name: Cache cargo dir
uses: actions/cache@v1
uses: actions/cache@v2
with:
path: ~/.cargo
key: ${{ runner.os }}-x86_64-unknown-linux-gnu-${{ hashFiles('Cargo.lock') }}
@ -312,7 +312,7 @@ jobs:
name: bors test finished
if: github.event.pusher.name == 'bors' && success()
runs-on: ubuntu-latest
needs: [base, integration]
needs: [changelog, base, integration_build, integration]
steps:
- name: Mark the job as successful
@ -322,7 +322,7 @@ jobs:
name: bors test finished
if: github.event.pusher.name == 'bors' && (failure() || cancelled())
runs-on: ubuntu-latest
needs: [base, integration]
needs: [changelog, base, integration_build, integration]
steps:
- name: Mark the job as a failure

View File

@ -1439,6 +1439,7 @@ Released 2018-09-13
[`match_same_arms`]: https://rust-lang.github.io/rust-clippy/master/index.html#match_same_arms
[`match_single_binding`]: https://rust-lang.github.io/rust-clippy/master/index.html#match_single_binding
[`match_wild_err_arm`]: https://rust-lang.github.io/rust-clippy/master/index.html#match_wild_err_arm
[`match_wildcard_for_single_variants`]: https://rust-lang.github.io/rust-clippy/master/index.html#match_wildcard_for_single_variants
[`maybe_infinite_iter`]: https://rust-lang.github.io/rust-clippy/master/index.html#maybe_infinite_iter
[`mem_discriminant_non_enum`]: https://rust-lang.github.io/rust-clippy/master/index.html#mem_discriminant_non_enum
[`mem_forget`]: https://rust-lang.github.io/rust-clippy/master/index.html#mem_forget

View File

@ -155,47 +155,77 @@ That's why the `else_if_without_else` example uses the `register_early_pass` fun
## Fixing build failures caused by Rust
Clippy will sometimes fail to build from source because building it depends on unstable internal Rust features. Most of
the times we have to adapt to the changes and only very rarely there's an actual bug in Rust. Fixing build failures
caused by Rust updates, can be a good way to learn about Rust internals.
Clippy currently gets built with `rustc` of the `rust-lang/rust` `master`
branch. Most of the times we have to adapt to the changes and only very rarely
there's an actual bug in Rust.
In order to find out why Clippy does not work properly with a new Rust commit, you can use the [rust-toolstate commit
history][toolstate_commit_history]. You will then have to look for the last commit that contains
`test-pass -> build-fail` or `test-pass -> test-fail` for the `clippy-driver` component.
[Here][toolstate_commit] is an example.
If you decide to make Clippy work again with a Rust commit that breaks it, you
have to sync the `rust-lang/rust-clippy` repository with the `subtree` copy of
Clippy in the `rust-lang/rust` repository.
The commit message contains a link to the PR. The PRs are usually small enough to discover the breaking API change and
if they are bigger, they likely include some discussion that may help you to fix Clippy.
For general information about `subtree`s in the Rust repository see [Rust's
`CONTRIBUTING.md`][subtree].
To check if Clippy is available for a specific target platform, you can check
the [rustup component history][rustup_component_history].
Here is a TL;DR version of the sync process (all of the following commands have
to be run inside the `rust` directory):
If you decide to make Clippy work again with a Rust commit that breaks it,
you probably want to install the latest Rust from master locally and run Clippy
using that version of Rust.
1. Clone the [`rust-lang/rust`] repository
2. Sync the changes to the rust-copy of Clippy to your Clippy fork:
```bash
# Make sure to change `your-github-name` to your github name in the following command
git subtree push -P src/tools/clippy git@github.com:your-github-name/rust-clippy sync-from-rust
```
_Note:_ This will directly push to the remote repository. You can also push
to your local copy by replacing the remote address with `/path/to/rust-clippy`
directory.
You can set up the master toolchain by running `./setup-toolchain.sh`. That script will install
[rustup-toolchain-install-master][rtim] and master toolchain, then run `rustup override set master`.
_Note:_ Most of the time you have to create a merge commit in the
`rust-clippy` repo (this has to be done in the Clippy repo, not in the
rust-copy of Clippy):
```bash
git fetch origin && git fetch upstream
git checkout sync-from-rust
git merge upstream/master
```
3. Open a PR to `rust-lang/rust-clippy` and wait for it to get merged (to
accelerate the process ping the `@rust-lang/clippy` team in your PR and/or
~~annoy~~ ask them in the [Discord] channel.)
4. Sync the `rust-lang/rust-clippy` master to the rust-copy of Clippy:
```bash
git checkout -b sync-from-clippy
git subtree pull -P src/tools/clippy https://github.com/rust-lang/rust-clippy master
```
5. Open a PR to [`rust-lang/rust`]
After fixing the build failure on this repository, we can submit a pull request
to [`rust-lang/rust`] to fix the toolstate.
To submit a pull request, you should follow these steps:
Also, you may want to define remotes, so you don't have to type out the remote
addresses on every sync. You can do this with the following commands (these
commands still have to be run inside the `rust` directory):
```bash
# Assuming you already cloned the rust-lang/rust repo and you're in the correct directory
git submodule update --remote src/tools/clippy
cargo update -p clippy
git add -u
git commit -m "Update Clippy"
./x.py test -i --stage 1 src/tools/clippy # This is optional and should succeed anyway
# Open a PR in rust-lang/rust
# Set clippy-upstream remote for pulls
$ git remote add clippy-upstream https://github.com/rust-lang/rust-clippy
# Make sure to not push to the upstream repo
$ git remote set-url --push clippy-upstream DISABLED
# Set clippy-origin remote to your fork for pushes
$ git remote add clippy-origin git@github.com:your-github-name/rust-clippy
# Set a local remote
$ git remote add clippy-local /path/to/rust-clippy
```
[rustup_component_history]: https://rust-lang.github.io/rustup-components-history
[toolstate_commit_history]: https://github.com/rust-lang-nursery/rust-toolstate/commits/master
[toolstate_commit]: https://github.com/rust-lang-nursery/rust-toolstate/commit/aad74d8294e198a7cf8ac81a91aebb7f3bbcf727
[rtim]: https://github.com/kennytm/rustup-toolchain-install-master
You can then sync with the remote names from above, e.g.:
```bash
$ git subtree push -P src/tools/clippy clippy-local sync-from-rust
```
_Note:_ The first time running `git subtree push` a cache has to be built. This
involves going through the complete Clippy history once. For this you have to
increase the stack limit though, which you can do with `ulimit -s 60000`. For
this to work, you will need the fix of `git subtree` available
[here][gitgitgadget-pr].
[gitgitgadget-pr]: https://github.com/gitgitgadget/git/pull/493
[subtree]: https://github.com/rust-lang/rust/blob/master/CONTRIBUTING.md#external-dependencies-subtree
[`rust-lang/rust`]: https://github.com/rust-lang/rust
## Issue and PR triage

View File

@ -1,91 +1,111 @@
use crate::clippy_project_root;
use std::fs::{File, OpenOptions};
use std::io;
use std::fs::{self, OpenOptions};
use std::io::prelude::*;
use std::io::ErrorKind;
use std::path::Path;
use std::io::{self, ErrorKind};
use std::path::{Path, PathBuf};
/// Creates files required to implement and test a new lint and runs `update_lints`.
///
/// # Errors
///
/// This function errors, if the files couldn't be created
pub fn create(pass: Option<&str>, lint_name: Option<&str>, category: Option<&str>) -> Result<(), io::Error> {
let pass = pass.expect("`pass` argument is validated by clap");
let lint_name = lint_name.expect("`name` argument is validated by clap");
let category = category.expect("`category` argument is validated by clap");
struct LintData<'a> {
pass: &'a str,
name: &'a str,
category: &'a str,
project_root: PathBuf,
}
match open_files(lint_name) {
Ok((mut test_file, mut lint_file)) => {
let (pass_type, pass_lifetimes, pass_import, context_import) = match pass {
"early" => ("EarlyLintPass", "", "use rustc_ast::ast::*;", "EarlyContext"),
"late" => ("LateLintPass", "<'_, '_>", "use rustc_hir::*;", "LateContext"),
_ => {
unreachable!("`pass_type` should only ever be `early` or `late`!");
},
};
trait Context {
fn context<C: AsRef<str>>(self, text: C) -> Self;
}
let camel_case_name = to_camel_case(lint_name);
if let Err(e) = test_file.write_all(get_test_file_contents(lint_name).as_bytes()) {
return Err(io::Error::new(
ErrorKind::Other,
format!("Could not write to test file: {}", e),
));
};
if let Err(e) = lint_file.write_all(
get_lint_file_contents(
pass_type,
pass_lifetimes,
lint_name,
&camel_case_name,
category,
pass_import,
context_import,
)
.as_bytes(),
) {
return Err(io::Error::new(
ErrorKind::Other,
format!("Could not write to lint file: {}", e),
));
}
Ok(())
},
Err(e) => Err(io::Error::new(
ErrorKind::Other,
format!("Unable to create lint: {}", e),
)),
impl<T> Context for io::Result<T> {
fn context<C: AsRef<str>>(self, text: C) -> Self {
match self {
Ok(t) => Ok(t),
Err(e) => {
let message = format!("{}: {}", text.as_ref(), e);
Err(io::Error::new(ErrorKind::Other, message))
},
}
}
}
fn open_files(lint_name: &str) -> Result<(File, File), io::Error> {
let project_root = clippy_project_root();
/// Creates the files required to implement and test a new lint and runs `update_lints`.
///
/// # Errors
///
/// This function errors out if the files couldn't be created or written to.
pub fn create(pass: Option<&str>, lint_name: Option<&str>, category: Option<&str>) -> io::Result<()> {
let lint = LintData {
pass: pass.expect("`pass` argument is validated by clap"),
name: lint_name.expect("`name` argument is validated by clap"),
category: category.expect("`category` argument is validated by clap"),
project_root: clippy_project_root(),
};
let test_file_path = project_root.join("tests").join("ui").join(format!("{}.rs", lint_name));
let lint_file_path = project_root
.join("clippy_lints")
.join("src")
.join(format!("{}.rs", lint_name));
create_lint(&lint).context("Unable to create lint implementation")?;
create_test(&lint).context("Unable to create a test for the new lint")
}
if Path::new(&test_file_path).exists() {
return Err(io::Error::new(
ErrorKind::AlreadyExists,
format!("test file {:?} already exists", test_file_path),
));
}
if Path::new(&lint_file_path).exists() {
return Err(io::Error::new(
ErrorKind::AlreadyExists,
format!("lint file {:?} already exists", lint_file_path),
));
fn create_lint(lint: &LintData) -> io::Result<()> {
let (pass_type, pass_lifetimes, pass_import, context_import) = match lint.pass {
"early" => ("EarlyLintPass", "", "use rustc_ast::ast::*;", "EarlyContext"),
"late" => ("LateLintPass", "<'_, '_>", "use rustc_hir::*;", "LateContext"),
_ => {
unreachable!("`pass_type` should only ever be `early` or `late`!");
},
};
let camel_case_name = to_camel_case(lint.name);
let lint_contents = get_lint_file_contents(
pass_type,
pass_lifetimes,
lint.name,
&camel_case_name,
lint.category,
pass_import,
context_import,
);
let lint_path = format!("clippy_lints/src/{}.rs", lint.name);
write_file(lint.project_root.join(&lint_path), lint_contents.as_bytes())
}
fn create_test(lint: &LintData) -> io::Result<()> {
fn create_project_layout<P: Into<PathBuf>>(lint_name: &str, location: P, case: &str, hint: &str) -> io::Result<()> {
let mut path = location.into().join(case);
fs::create_dir(&path)?;
write_file(path.join("Cargo.toml"), get_manifest_contents(lint_name, hint))?;
path.push("src");
fs::create_dir(&path)?;
let header = format!("// compile-flags: --crate-name={}", lint_name);
write_file(path.join("main.rs"), get_test_file_contents(lint_name, Some(&header)))?;
Ok(())
}
let test_file = OpenOptions::new().write(true).create_new(true).open(test_file_path)?;
let lint_file = OpenOptions::new().write(true).create_new(true).open(lint_file_path)?;
if lint.category == "cargo" {
let relative_test_dir = format!("tests/ui-cargo/{}", lint.name);
let test_dir = lint.project_root.join(relative_test_dir);
fs::create_dir(&test_dir)?;
Ok((test_file, lint_file))
create_project_layout(lint.name, &test_dir, "fail", "Content that triggers the lint goes here")?;
create_project_layout(lint.name, &test_dir, "pass", "This file should not trigger the lint")
} else {
let test_path = format!("tests/ui/{}.rs", lint.name);
let test_contents = get_test_file_contents(lint.name, None);
write_file(lint.project_root.join(test_path), test_contents)
}
}
fn write_file<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> io::Result<()> {
fn inner(path: &Path, contents: &[u8]) -> io::Result<()> {
OpenOptions::new()
.write(true)
.create_new(true)
.open(path)?
.write_all(contents)
}
inner(path.as_ref(), contents.as_ref()).context(format!("writing to file: {}", path.as_ref().display()))
}
fn to_camel_case(name: &str) -> String {
@ -100,8 +120,8 @@ fn to_camel_case(name: &str) -> String {
.collect()
}
fn get_test_file_contents(lint_name: &str) -> String {
format!(
fn get_test_file_contents(lint_name: &str, header_commands: Option<&str>) -> String {
let mut contents = format!(
"#![warn(clippy::{})]
fn main() {{
@ -109,6 +129,26 @@ fn main() {{
}}
",
lint_name
);
if let Some(header) = header_commands {
contents = format!("{}\n{}", header, contents);
}
contents
}
fn get_manifest_contents(lint_name: &str, hint: &str) -> String {
format!(
r#"
# {}
[package]
name = "{}"
version = "0.1.0"
publish = false
"#,
hint, lint_name
)
}

View File

@ -32,6 +32,8 @@ semver = "0.9.0"
# NOTE: cargo requires serde feat in its url dep
# see <https://github.com/rust-lang/rust/pull/63587#issuecomment-522343864>
url = { version = "2.1.0", features = ["serde"] }
quote = "1"
syn = { version = "1", features = ["full"] }
[features]
deny-warnings = []

View File

@ -248,7 +248,6 @@ declare_lint_pass!(Attributes => [
INLINE_ALWAYS,
DEPRECATED_SEMVER,
USELESS_ATTRIBUTE,
EMPTY_LINE_AFTER_OUTER_ATTR,
UNKNOWN_CLIPPY_LINTS,
]);
@ -480,36 +479,6 @@ fn check_attrs(cx: &LateContext<'_, '_>, span: Span, name: Name, attrs: &[Attrib
}
for attr in attrs {
let attr_item = if let AttrKind::Normal(ref attr) = attr.kind {
attr
} else {
continue;
};
if attr.style == AttrStyle::Outer {
if attr_item.args.inner_tokens().is_empty() || !is_present_in_source(cx, attr.span) {
return;
}
let begin_of_attr_to_item = Span::new(attr.span.lo(), span.lo(), span.ctxt());
let end_of_attr_to_item = Span::new(attr.span.hi(), span.lo(), span.ctxt());
if let Some(snippet) = snippet_opt(cx, end_of_attr_to_item) {
let lines = snippet.split('\n').collect::<Vec<_>>();
let lines = without_block_comments(lines);
if lines.iter().filter(|l| l.trim().is_empty()).count() > 2 {
span_lint(
cx,
EMPTY_LINE_AFTER_OUTER_ATTR,
begin_of_attr_to_item,
"Found an empty line after an outer attribute. \
Perhaps you forgot to add a `!` to make it an inner attribute?",
);
}
}
}
if let Some(values) = attr.meta_item_list() {
if values.len() != 1 || !attr.check_name(sym!(inline)) {
continue;
@ -551,15 +520,57 @@ fn is_word(nmi: &NestedMetaItem, expected: Symbol) -> bool {
}
}
declare_lint_pass!(EarlyAttributes => [DEPRECATED_CFG_ATTR, MISMATCHED_TARGET_OS]);
declare_lint_pass!(EarlyAttributes => [
DEPRECATED_CFG_ATTR,
MISMATCHED_TARGET_OS,
EMPTY_LINE_AFTER_OUTER_ATTR,
]);
impl EarlyLintPass for EarlyAttributes {
fn check_item(&mut self, cx: &EarlyContext<'_>, item: &rustc_ast::ast::Item) {
check_empty_line_after_outer_attr(cx, item);
}
fn check_attribute(&mut self, cx: &EarlyContext<'_>, attr: &Attribute) {
check_deprecated_cfg_attr(cx, attr);
check_mismatched_target_os(cx, attr);
}
}
fn check_empty_line_after_outer_attr(cx: &EarlyContext<'_>, item: &rustc_ast::ast::Item) {
for attr in &item.attrs {
let attr_item = if let AttrKind::Normal(ref attr) = attr.kind {
attr
} else {
return;
};
if attr.style == AttrStyle::Outer {
if attr_item.args.inner_tokens().is_empty() || !is_present_in_source(cx, attr.span) {
return;
}
let begin_of_attr_to_item = Span::new(attr.span.lo(), item.span.lo(), item.span.ctxt());
let end_of_attr_to_item = Span::new(attr.span.hi(), item.span.lo(), item.span.ctxt());
if let Some(snippet) = snippet_opt(cx, end_of_attr_to_item) {
let lines = snippet.split('\n').collect::<Vec<_>>();
let lines = without_block_comments(lines);
if lines.iter().filter(|l| l.trim().is_empty()).count() > 2 {
span_lint(
cx,
EMPTY_LINE_AFTER_OUTER_ATTR,
begin_of_attr_to_item,
"Found an empty line after an outer attribute. \
Perhaps you forgot to add a `!` to make it an inner attribute?",
);
}
}
}
}
}
fn check_deprecated_cfg_attr(cx: &EarlyContext<'_>, attr: &Attribute) {
if_chain! {
// check cfg_attr

View File

@ -23,6 +23,7 @@ declare_clippy_lint! {
/// [package]
/// name = "clippy"
/// version = "0.0.212"
/// authors = ["Someone <someone@rust-lang.org>"]
/// description = "A bunch of helpful lints to avoid common pitfalls in Rust"
/// repository = "https://github.com/rust-lang/rust-clippy"
/// readme = "README.md"

View File

@ -77,7 +77,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for FloatLiteral {
let type_suffix = match lit_float_ty {
LitFloatType::Suffixed(FloatTy::F32) => Some("f32"),
LitFloatType::Suffixed(FloatTy::F64) => Some("f64"),
_ => None
LitFloatType::Unsuffixed => None
};
let (is_whole, mut float_str) = match fty {
FloatTy::F32 => {

View File

@ -346,13 +346,8 @@ mod reexport {
/// level (i.e `#![cfg_attr(...)]`) will still be expanded even when using a pre-expansion pass.
///
/// Used in `./src/driver.rs`.
pub fn register_pre_expansion_lints(store: &mut rustc_lint::LintStore, conf: &Conf) {
pub fn register_pre_expansion_lints(store: &mut rustc_lint::LintStore) {
store.register_pre_expansion_pass(|| box write::Write::default());
store.register_pre_expansion_pass(|| box redundant_field_names::RedundantFieldNames);
let single_char_binding_names_threshold = conf.single_char_binding_names_threshold;
store.register_pre_expansion_pass(move || box non_expressive_names::NonExpressiveNames {
single_char_binding_names_threshold,
});
store.register_pre_expansion_pass(|| box attrs::EarlyAttributes);
store.register_pre_expansion_pass(|| box dbg_macro::DbgMacro);
}
@ -638,6 +633,7 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
&matches::MATCH_OVERLAPPING_ARM,
&matches::MATCH_REF_PATS,
&matches::MATCH_SINGLE_BINDING,
&matches::MATCH_WILDCARD_FOR_SINGLE_VARIANTS,
&matches::MATCH_WILD_ERR_ARM,
&matches::REST_PAT_IN_FULLY_BOUND_STRUCTS,
&matches::SINGLE_MATCH,
@ -1065,6 +1061,11 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
store.register_late_pass(|| box match_on_vec_items::MatchOnVecItems);
store.register_early_pass(|| box manual_non_exhaustive::ManualNonExhaustive);
store.register_late_pass(|| box manual_async_fn::ManualAsyncFn);
store.register_early_pass(|| box redundant_field_names::RedundantFieldNames);
let single_char_binding_names_threshold = conf.single_char_binding_names_threshold;
store.register_early_pass(move || box non_expressive_names::NonExpressiveNames {
single_char_binding_names_threshold,
});
store.register_group(true, "clippy::restriction", Some("clippy_restriction"), vec![
LintId::of(&arithmetic::FLOAT_ARITHMETIC),
@ -1139,6 +1140,8 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
LintId::of(&macro_use::MACRO_USE_IMPORTS),
LintId::of(&match_on_vec_items::MATCH_ON_VEC_ITEMS),
LintId::of(&matches::MATCH_BOOL),
LintId::of(&matches::MATCH_WILDCARD_FOR_SINGLE_VARIANTS),
LintId::of(&matches::MATCH_WILD_ERR_ARM),
LintId::of(&matches::SINGLE_MATCH_ELSE),
LintId::of(&methods::FILTER_MAP),
LintId::of(&methods::FILTER_MAP_NEXT),
@ -1283,7 +1286,6 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
LintId::of(&matches::MATCH_OVERLAPPING_ARM),
LintId::of(&matches::MATCH_REF_PATS),
LintId::of(&matches::MATCH_SINGLE_BINDING),
LintId::of(&matches::MATCH_WILD_ERR_ARM),
LintId::of(&matches::SINGLE_MATCH),
LintId::of(&matches::WILDCARD_IN_OR_PATTERNS),
LintId::of(&mem_discriminant::MEM_DISCRIMINANT_NON_ENUM),
@ -1474,7 +1476,6 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
LintId::of(&matches::INFALLIBLE_DESTRUCTURING_MATCH),
LintId::of(&matches::MATCH_OVERLAPPING_ARM),
LintId::of(&matches::MATCH_REF_PATS),
LintId::of(&matches::MATCH_WILD_ERR_ARM),
LintId::of(&matches::SINGLE_MATCH),
LintId::of(&mem_replace::MEM_REPLACE_OPTION_WITH_NONE),
LintId::of(&mem_replace::MEM_REPLACE_WITH_DEFAULT),

View File

@ -168,7 +168,7 @@ declare_clippy_lint! {
/// **What it does:** Checks for arm which matches all errors with `Err(_)`
/// and take drastic actions like `panic!`.
///
/// **Why is this bad?** It is generally a bad practice, just like
/// **Why is this bad?** It is generally a bad practice, similar to
/// catching all exceptions in java with `catch(Exception)`
///
/// **Known problems:** None.
@ -182,7 +182,7 @@ declare_clippy_lint! {
/// }
/// ```
pub MATCH_WILD_ERR_ARM,
style,
pedantic,
"a `match` with `Err(_)` arm and take drastic actions"
}
@ -220,7 +220,7 @@ declare_clippy_lint! {
/// # enum Foo { A(usize), B(usize) }
/// # let x = Foo::B(1);
/// match x {
/// A => {},
/// Foo::A(_) => {},
/// _ => {},
/// }
/// ```
@ -229,6 +229,40 @@ declare_clippy_lint! {
"a wildcard enum match arm using `_`"
}
declare_clippy_lint! {
/// **What it does:** Checks for wildcard enum matches for a single variant.
///
/// **Why is this bad?** New enum variants added by library updates can be missed.
///
/// **Known problems:** Suggested replacements may not use correct path to enum
/// if it's not present in the current scope.
///
/// **Example:**
///
/// ```rust
/// # enum Foo { A, B, C }
/// # let x = Foo::B;
/// match x {
/// Foo::A => {},
/// Foo::B => {},
/// _ => {},
/// }
/// ```
/// Use instead:
/// ```rust
/// # enum Foo { A, B, C }
/// # let x = Foo::B;
/// match x {
/// Foo::A => {},
/// Foo::B => {},
/// Foo::C => {},
/// }
/// ```
pub MATCH_WILDCARD_FOR_SINGLE_VARIANTS,
pedantic,
"a wildcard enum match for a single variant"
}
declare_clippy_lint! {
/// **What it does:** Checks for wildcard pattern used with others patterns in same match arm.
///
@ -356,6 +390,7 @@ impl_lint_pass!(Matches => [
MATCH_WILD_ERR_ARM,
MATCH_AS_REF,
WILDCARD_ENUM_MATCH_ARM,
MATCH_WILDCARD_FOR_SINGLE_VARIANTS,
WILDCARD_IN_OR_PATTERNS,
MATCH_SINGLE_BINDING,
INFALLIBLE_DESTRUCTURING_MATCH,
@ -676,7 +711,7 @@ fn check_wild_err_arm(cx: &LateContext<'_, '_>, ex: &Expr<'_>, arms: &[Arm<'_>])
arm.pat.span,
&format!("`Err({})` matches all errors", &ident_bind_name),
None,
"match each error separately or use the error output",
"match each error separately or use the error output, or use `.except(msg)` if the error case is unreachable",
);
}
}
@ -729,9 +764,21 @@ fn check_wild_enum_match(cx: &LateContext<'_, '_>, ex: &Expr<'_>, arms: &[Arm<'_
if let QPath::Resolved(_, p) = path {
missing_variants.retain(|e| e.ctor_def_id != Some(p.res.def_id()));
}
} else if let PatKind::TupleStruct(ref path, ..) = arm.pat.kind {
} else if let PatKind::TupleStruct(ref path, ref patterns, ..) = arm.pat.kind {
if let QPath::Resolved(_, p) = path {
missing_variants.retain(|e| e.ctor_def_id != Some(p.res.def_id()));
// Some simple checks for exhaustive patterns.
// There is a room for improvements to detect more cases,
// but it can be more expensive to do so.
let is_pattern_exhaustive = |pat: &&Pat<'_>| {
if let PatKind::Wild | PatKind::Binding(.., None) = pat.kind {
true
} else {
false
}
};
if patterns.iter().all(is_pattern_exhaustive) {
missing_variants.retain(|e| e.ctor_def_id != Some(p.res.def_id()));
}
}
}
}
@ -766,6 +813,19 @@ fn check_wild_enum_match(cx: &LateContext<'_, '_>, ex: &Expr<'_>, arms: &[Arm<'_
}
}
if suggestion.len() == 1 {
// No need to check for non-exhaustive enum as in that case len would be greater than 1
span_lint_and_sugg(
cx,
MATCH_WILDCARD_FOR_SINGLE_VARIANTS,
wildcard_span,
message,
"try this",
suggestion[0].clone(),
Applicability::MaybeIncorrect,
)
};
span_lint_and_sugg(
cx,
WILDCARD_ENUM_MATCH_ARM,
@ -773,7 +833,7 @@ fn check_wild_enum_match(cx: &LateContext<'_, '_>, ex: &Expr<'_>, arms: &[Arm<'_
message,
"try this",
suggestion.join(" | "),
Applicability::MachineApplicable,
Applicability::MaybeIncorrect,
)
}
}

View File

@ -1496,17 +1496,14 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for Methods {
if let ty::Opaque(def_id, _) = ret_ty.kind {
// one of the associated types must be Self
for predicate in cx.tcx.predicates_of(def_id).predicates {
match predicate.0.kind() {
ty::PredicateKind::Projection(poly_projection_predicate) => {
let binder = poly_projection_predicate.ty();
let associated_type = binder.skip_binder();
if let ty::PredicateKind::Projection(poly_projection_predicate) = predicate.0.kind() {
let binder = poly_projection_predicate.ty();
let associated_type = binder.skip_binder();
// walk the associated type and check for Self
if contains_self_ty(associated_type) {
return;
}
},
_ => {},
// walk the associated type and check for Self
if contains_self_ty(associated_type) {
return;
}
}
}
}
@ -1617,6 +1614,21 @@ fn lint_or_fun_call<'a, 'tcx>(
or_has_args: bool,
span: Span,
) {
if let hir::ExprKind::MethodCall(ref path, _, ref args) = &arg.kind {
if path.ident.as_str() == "len" {
let ty = walk_ptrs_ty(cx.tables.expr_ty(&args[0]));
match ty.kind {
ty::Slice(_) | ty::Array(_, _) => return,
_ => (),
}
if match_type(cx, ty, &paths::VEC) {
return;
}
}
}
// (path, fn_has_argument, methods, suffix)
let know_types: &[(&[_], _, &[_], _)] = &[
(&paths::BTREEMAP_ENTRY, false, &["or_insert"], "with"),

View File

@ -379,7 +379,7 @@ impl EarlyLintPass for MiscEarlyLints {
let left_binding = match left {
BindingMode::ByRef(Mutability::Mut) => "ref mut ",
BindingMode::ByRef(Mutability::Not) => "ref ",
_ => "",
BindingMode::ByValue(..) => "",
};
if let PatKind::Wild = right.kind {

View File

@ -113,7 +113,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MissingConstForFn {
return;
}
},
_ => return,
FnKind::Closure(..) => return,
}
let mir = cx.tcx.optimized_mir(def_id);

View File

@ -1,11 +1,14 @@
//! lint on multiple versions of a crate being used
use crate::utils::{run_lints, span_lint};
use rustc_hir::def_id::LOCAL_CRATE;
use rustc_hir::{Crate, CRATE_HIR_ID};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::DUMMY_SP;
use cargo_metadata::{DependencyKind, MetadataCommand, Node, Package, PackageId};
use if_chain::if_chain;
use itertools::Itertools;
declare_clippy_lint! {
@ -39,30 +42,61 @@ impl LateLintPass<'_, '_> for MultipleCrateVersions {
return;
}
let metadata = if let Ok(metadata) = cargo_metadata::MetadataCommand::new().exec() {
let metadata = if let Ok(metadata) = MetadataCommand::new().exec() {
metadata
} else {
span_lint(cx, MULTIPLE_CRATE_VERSIONS, DUMMY_SP, "could not read cargo metadata");
return;
};
let local_name = cx.tcx.crate_name(LOCAL_CRATE).as_str();
let mut packages = metadata.packages;
packages.sort_by(|a, b| a.name.cmp(&b.name));
for (name, group) in &packages.into_iter().group_by(|p| p.name.clone()) {
let group: Vec<cargo_metadata::Package> = group.collect();
if_chain! {
if let Some(resolve) = &metadata.resolve;
if let Some(local_id) = packages
.iter()
.find_map(|p| if p.name == *local_name { Some(&p.id) } else { None });
then {
for (name, group) in &packages.iter().group_by(|p| p.name.clone()) {
let group: Vec<&Package> = group.collect();
if group.len() > 1 {
let versions = group.into_iter().map(|p| p.version).join(", ");
if group.len() <= 1 {
continue;
}
span_lint(
cx,
MULTIPLE_CRATE_VERSIONS,
DUMMY_SP,
&format!("multiple versions for dependency `{}`: {}", name, versions),
);
if group.iter().all(|p| is_normal_dep(&resolve.nodes, local_id, &p.id)) {
let mut versions: Vec<_> = group.into_iter().map(|p| &p.version).collect();
versions.sort();
let versions = versions.iter().join(", ");
span_lint(
cx,
MULTIPLE_CRATE_VERSIONS,
DUMMY_SP,
&format!("multiple versions for dependency `{}`: {}", name, versions),
);
}
}
}
}
}
}
fn is_normal_dep(nodes: &[Node], local_id: &PackageId, dep_id: &PackageId) -> bool {
fn depends_on(node: &Node, dep_id: &PackageId) -> bool {
node.deps.iter().any(|dep| {
dep.pkg == *dep_id
&& dep
.dep_kinds
.iter()
.any(|info| matches!(info.kind, DependencyKind::Normal))
})
}
nodes
.iter()
.filter(|node| depends_on(node, dep_id))
.any(|node| node.id == *local_id || is_normal_dep(nodes, local_id, &node.id))
}

View File

@ -6,7 +6,7 @@ use rustc_middle::ty::{self, Ty};
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// **What it does:** Detects giving a mutable reference to a function that only
/// **What it does:** Detects passing a mutable reference to a function that only
/// requires an immutable reference.
///
/// **Why is this bad?** The immutable reference rules out all other references

View File

@ -86,7 +86,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NeedlessPassByValue {
}
},
FnKind::Method(..) => (),
_ => return,
FnKind::Closure(..) => return,
}
// Exclude non-inherent impls

View File

@ -1,27 +1,20 @@
use crate::utils::paths;
use crate::utils::sugg::DiagnosticBuilderExt;
use crate::utils::{get_trait_def_id, implements_trait, return_ty, same_tys, span_lint_hir_and_then};
use crate::utils::{get_trait_def_id, return_ty, same_tys, span_lint_hir_and_then};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_hir::HirIdSet;
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::{self, Ty};
use rustc_middle::ty::Ty;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::source_map::Span;
declare_clippy_lint! {
/// **What it does:** Checks for types with a `fn new() -> Self` method and no
/// implementation of
/// [`Default`](https://doc.rust-lang.org/std/default/trait.Default.html).
///
/// It detects both the case when a manual
/// [`Default`](https://doc.rust-lang.org/std/default/trait.Default.html)
/// implementation is required and also when it can be created with
/// `#[derive(Default)]`
///
/// **Why is this bad?** The user might expect to be able to use
/// [`Default`](https://doc.rust-lang.org/std/default/trait.Default.html) as the
/// type can be constructed without arguments.
@ -40,46 +33,17 @@ declare_clippy_lint! {
/// }
/// ```
///
/// Instead, use:
/// To fix the lint, and a `Default` implementation that delegates to `new`:
///
/// ```ignore
/// struct Foo(Bar);
///
/// impl Default for Foo {
/// fn default() -> Self {
/// Foo(Bar::new())
/// Foo::new()
/// }
/// }
/// ```
///
/// Or, if
/// [`Default`](https://doc.rust-lang.org/std/default/trait.Default.html)
/// can be derived by `#[derive(Default)]`:
///
/// ```rust
/// struct Foo;
///
/// impl Foo {
/// fn new() -> Self {
/// Foo
/// }
/// }
/// ```
///
/// Instead, use:
///
/// ```rust
/// #[derive(Default)]
/// struct Foo;
///
/// impl Foo {
/// fn new() -> Self {
/// Foo
/// }
/// }
/// ```
///
/// You can also have `new()` call `Default::default()`.
pub NEW_WITHOUT_DEFAULT,
style,
"`fn new() -> Self` method without `Default` implementation"
@ -126,8 +90,8 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NewWithoutDefault {
return;
}
if sig.decl.inputs.is_empty() && name == sym!(new) && cx.access_levels.is_reachable(id) {
let self_did = cx.tcx.hir().local_def_id(cx.tcx.hir().get_parent_item(id));
let self_ty = cx.tcx.type_of(self_did);
let self_def_id = cx.tcx.hir().local_def_id(cx.tcx.hir().get_parent_item(id));
let self_ty = cx.tcx.type_of(self_def_id);
if_chain! {
if same_tys(cx, self_ty, return_ty(cx, id));
if let Some(default_trait_id) = get_trait_def_id(cx, &paths::DEFAULT_TRAIT);
@ -148,56 +112,35 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NewWithoutDefault {
// generics
if_chain! {
if let Some(ref impling_types) = self.impling_types;
if let Some(self_def) = cx.tcx.type_of(self_did).ty_adt_def();
if let Some(self_def_id) = self_def.did.as_local();
if let Some(self_def) = cx.tcx.type_of(self_def_id).ty_adt_def();
if let Some(self_local_did) = self_def.did.as_local();
then {
let self_id = cx.tcx.hir().local_def_id_to_hir_id(self_def_id);
let self_id = cx.tcx.hir().local_def_id_to_hir_id(self_local_did);
if impling_types.contains(&self_id) {
return;
}
}
}
if let Some(sp) = can_derive_default(self_ty, cx, default_trait_id) {
span_lint_hir_and_then(
cx,
NEW_WITHOUT_DEFAULT,
id,
impl_item.span,
&format!(
"you should consider deriving a `Default` implementation for `{}`",
self_ty
),
|diag| {
diag.suggest_item_with_attr(
cx,
sp,
"try this",
"#[derive(Default)]",
Applicability::MaybeIncorrect,
);
});
} else {
span_lint_hir_and_then(
cx,
NEW_WITHOUT_DEFAULT,
id,
impl_item.span,
&format!(
"you should consider adding a `Default` implementation for `{}`",
self_ty
),
|diag| {
diag.suggest_prepend_item(
cx,
item.span,
"try this",
&create_new_without_default_suggest_msg(self_ty),
Applicability::MaybeIncorrect,
);
},
);
}
span_lint_hir_and_then(
cx,
NEW_WITHOUT_DEFAULT,
id,
impl_item.span,
&format!(
"you should consider adding a `Default` implementation for `{}`",
self_ty
),
|diag| {
diag.suggest_prepend_item(
cx,
item.span,
"try this",
&create_new_without_default_suggest_msg(self_ty),
Applicability::MaybeIncorrect,
);
},
);
}
}
}
@ -217,18 +160,3 @@ fn create_new_without_default_suggest_msg(ty: Ty<'_>) -> String {
}}
}}", ty)
}
fn can_derive_default<'t, 'c>(ty: Ty<'t>, cx: &LateContext<'c, 't>, default_trait_id: DefId) -> Option<Span> {
match ty.kind {
ty::Adt(adt_def, substs) if adt_def.is_struct() => {
for field in adt_def.all_fields() {
let f_ty = field.ty(cx.tcx, substs);
if !implements_trait(cx, f_ty, default_trait_id, &[]) {
return None;
}
}
Some(cx.tcx.def_span(adt_def.did))
},
_ => None,
}
}

View File

@ -5,6 +5,7 @@ use rustc_ast::ast::{
use rustc_ast::attr;
use rustc_ast::visit::{walk_block, walk_expr, walk_pat, Visitor};
use rustc_lint::{EarlyContext, EarlyLintPass};
use rustc_middle::lint::in_external_macro;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::source_map::Span;
use rustc_span::symbol::{Ident, SymbolStr};
@ -131,7 +132,11 @@ struct SimilarNamesNameVisitor<'a, 'tcx, 'b>(&'b mut SimilarNamesLocalVisitor<'a
impl<'a, 'tcx, 'b> Visitor<'tcx> for SimilarNamesNameVisitor<'a, 'tcx, 'b> {
fn visit_pat(&mut self, pat: &'tcx Pat) {
match pat.kind {
PatKind::Ident(_, ident, _) => self.check_ident(ident),
PatKind::Ident(_, ident, _) => {
if !pat.span.from_expansion() {
self.check_ident(ident);
}
},
PatKind::Struct(_, ref fields, _) => {
for field in fields {
if !field.is_shorthand {
@ -354,12 +359,20 @@ impl<'a, 'tcx> Visitor<'tcx> for SimilarNamesLocalVisitor<'a, 'tcx> {
impl EarlyLintPass for NonExpressiveNames {
fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) {
if in_external_macro(cx.sess, item.span) {
return;
}
if let ItemKind::Fn(_, ref sig, _, Some(ref blk)) = item.kind {
do_check(self, cx, &item.attrs, &sig.decl, blk);
}
}
fn check_impl_item(&mut self, cx: &EarlyContext<'_>, item: &AssocItem) {
if in_external_macro(cx.sess, item.span) {
return;
}
if let AssocItemKind::Fn(_, ref sig, _, Some(ref blk)) = item.kind {
do_check(self, cx, &item.attrs, &sig.decl, blk);
}

View File

@ -2,7 +2,7 @@
use crate::utils::ptr::get_spans;
use crate::utils::{
is_type_diagnostic_item, match_qpath, match_type, paths, snippet_opt, span_lint, span_lint_and_sugg,
is_allowed, is_type_diagnostic_item, match_qpath, match_type, paths, snippet_opt, span_lint, span_lint_and_sugg,
span_lint_and_then, walk_ptrs_hir_ty,
};
use if_chain::if_chain;
@ -150,8 +150,16 @@ fn check_fn(cx: &LateContext<'_, '_>, decl: &FnDecl<'_>, fn_id: HirId, opt_body_
let fn_def_id = cx.tcx.hir().local_def_id(fn_id);
let sig = cx.tcx.fn_sig(fn_def_id);
let fn_ty = sig.skip_binder();
let body = opt_body_id.map(|id| cx.tcx.hir().body(id));
for (idx, (arg, ty)) in decl.inputs.iter().zip(fn_ty.inputs()).enumerate() {
// Honor the allow attribute on parameters. See issue 5644.
if let Some(body) = &body {
if is_allowed(cx, PTR_ARG, body.params[idx].hir_id) {
continue;
}
}
if let ty::Ref(_, ty, Mutability::Not) = ty.kind {
if is_type_diagnostic_item(cx, ty, sym!(vec_type)) {
let mut ty_snippet = None;

View File

@ -241,14 +241,14 @@ fn check_inclusive_range_minus_one(cx: &LateContext<'_, '_>, expr: &Expr<'_>) {
}
fn check_reversed_empty_range(cx: &LateContext<'_, '_>, expr: &Expr<'_>) {
fn inside_indexing_expr(cx: &LateContext<'_, '_>, expr: &Expr<'_>) -> bool {
matches!(
get_parent_expr(cx, expr),
Some(Expr {
fn inside_indexing_expr<'a>(cx: &'a LateContext<'_, '_>, expr: &Expr<'_>) -> Option<&'a Expr<'a>> {
match get_parent_expr(cx, expr) {
parent_expr @ Some(Expr {
kind: ExprKind::Index(..),
..
})
)
}) => parent_expr,
_ => None,
}
}
fn is_empty_range(limits: RangeLimits, ordering: Ordering) -> bool {
@ -267,18 +267,32 @@ fn check_reversed_empty_range(cx: &LateContext<'_, '_>, expr: &Expr<'_>) {
if let Some(ordering) = Constant::partial_cmp(cx.tcx, ty, &start_idx, &end_idx);
if is_empty_range(limits, ordering);
then {
if inside_indexing_expr(cx, expr) {
if let Some(parent_expr) = inside_indexing_expr(cx, expr) {
let (reason, outcome) = if ordering == Ordering::Equal {
("empty", "always yield an empty slice")
} else {
("reversed", "panic at run-time")
};
span_lint(
span_lint_and_then(
cx,
REVERSED_EMPTY_RANGES,
expr.span,
&format!("this range is {} and using it to index a slice will {}", reason, outcome),
|diag| {
if_chain! {
if ordering == Ordering::Equal;
if let ty::Slice(slice_ty) = cx.tables.expr_ty(parent_expr).kind;
then {
diag.span_suggestion(
parent_expr.span,
"if you want an empty slice, use",
format!("[] as &[{}]", slice_ty),
Applicability::MaybeIncorrect
);
}
}
}
);
} else {
span_lint_and_then(

View File

@ -2,6 +2,7 @@ use crate::utils::span_lint_and_sugg;
use rustc_ast::ast::{Expr, ExprKind};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
use rustc_middle::lint::in_external_macro;
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
@ -36,6 +37,9 @@ declare_lint_pass!(RedundantFieldNames => [REDUNDANT_FIELD_NAMES]);
impl EarlyLintPass for RedundantFieldNames {
fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
if in_external_macro(cx.sess, expr.span) {
return;
}
if let ExprKind::Struct(_, ref fields, _) = expr.kind {
for field in fields {
if field.is_shorthand {

View File

@ -161,7 +161,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TriviallyCopyPassByRef {
}
},
FnKind::Method(..) => (),
_ => return,
FnKind::Closure(..) => return,
}
// Exclude non-inherent impls

View File

@ -1,14 +1,17 @@
use crate::utils::{
match_def_path, match_trait_method, paths, same_tys, snippet, snippet_with_macro_callsite, span_lint_and_sugg,
is_type_diagnostic_item, match_def_path, match_trait_method, paths, same_tys, snippet, snippet_with_macro_callsite,
span_lint_and_help, span_lint_and_sugg,
};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, HirId, MatchSource};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty;
use rustc_session::{declare_tool_lint, impl_lint_pass};
declare_clippy_lint! {
/// **What it does:** Checks for `Into`/`From`/`IntoIter` calls that useless converts
/// to the same type as caller.
/// **What it does:** Checks for `Into`, `TryInto`, `From`, `TryFrom`,`IntoIter` calls
/// that useless converts to the same type as caller.
///
/// **Why is this bad?** Redundant code.
///
@ -26,7 +29,7 @@ declare_clippy_lint! {
/// ```
pub USELESS_CONVERSION,
complexity,
"calls to `Into`/`From`/`IntoIter` that performs useless conversions to the same type"
"calls to `Into`, `TryInto`, `From`, `TryFrom`, `IntoIter` that performs useless conversions to the same type"
}
#[derive(Default)]
@ -36,6 +39,7 @@ pub struct UselessConversion {
impl_lint_pass!(UselessConversion => [USELESS_CONVERSION]);
#[allow(clippy::too_many_lines)]
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UselessConversion {
fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, e: &'tcx Expr<'_>) {
if e.span.from_expansion() {
@ -63,12 +67,11 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UselessConversion {
let b = cx.tables.expr_ty(&args[0]);
if same_tys(cx, a, b) {
let sugg = snippet_with_macro_callsite(cx, args[0].span, "<expr>").to_string();
span_lint_and_sugg(
cx,
USELESS_CONVERSION,
e.span,
"useless conversion",
"useless conversion to the same type",
"consider removing `.into()`",
sugg,
Applicability::MachineApplicable, // snippet
@ -84,22 +87,70 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UselessConversion {
cx,
USELESS_CONVERSION,
e.span,
"useless conversion",
"useless conversion to the same type",
"consider removing `.into_iter()`",
sugg,
Applicability::MachineApplicable, // snippet
);
}
}
if match_trait_method(cx, e, &paths::TRY_INTO_TRAIT) && &*name.ident.as_str() == "try_into" {
if_chain! {
let a = cx.tables.expr_ty(e);
let b = cx.tables.expr_ty(&args[0]);
if is_type_diagnostic_item(cx, a, sym!(result_type));
if let ty::Adt(_, substs) = a.kind;
if let Some(a_type) = substs.types().next();
if same_tys(cx, a_type, b);
then {
span_lint_and_help(
cx,
USELESS_CONVERSION,
e.span,
"useless conversion to the same type",
None,
"consider removing `.try_into()`",
);
}
}
}
},
ExprKind::Call(ref path, ref args) => {
if let ExprKind::Path(ref qpath) = path.kind {
if let Some(def_id) = cx.tables.qpath_res(qpath, path.hir_id).opt_def_id() {
if match_def_path(cx, def_id, &paths::FROM_FROM) {
let a = cx.tables.expr_ty(e);
let b = cx.tables.expr_ty(&args[0]);
if same_tys(cx, a, b) {
if_chain! {
if args.len() == 1;
if let ExprKind::Path(ref qpath) = path.kind;
if let Some(def_id) = cx.tables.qpath_res(qpath, path.hir_id).opt_def_id();
let a = cx.tables.expr_ty(e);
let b = cx.tables.expr_ty(&args[0]);
then {
if_chain! {
if match_def_path(cx, def_id, &paths::TRY_FROM);
if is_type_diagnostic_item(cx, a, sym!(result_type));
if let ty::Adt(_, substs) = a.kind;
if let Some(a_type) = substs.types().next();
if same_tys(cx, a_type, b);
then {
let hint = format!("consider removing `{}()`", snippet(cx, path.span, "TryFrom::try_from"));
span_lint_and_help(
cx,
USELESS_CONVERSION,
e.span,
"useless conversion to the same type",
None,
&hint,
);
}
}
if_chain! {
if match_def_path(cx, def_id, &paths::FROM_FROM);
if same_tys(cx, a, b);
then {
let sugg = snippet(cx, args[0].span.source_callsite(), "<expr>").into_owned();
let sugg_msg =
format!("consider removing `{}()`", snippet(cx, path.span, "From::from"));
@ -107,7 +158,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UselessConversion {
cx,
USELESS_CONVERSION,
e.span,
"useless conversion",
"useless conversion to the same type",
&sugg_msg,
sugg,
Applicability::MachineApplicable, // snippet

View File

@ -120,10 +120,12 @@ define_Conf! {
"GPLv2", "GPLv3",
"GitHub", "GitLab",
"IPv4", "IPv6",
"JavaScript",
"ClojureScript", "CoffeeScript", "JavaScript", "PureScript", "TypeScript",
"NaN", "NaNs",
"OAuth",
"OpenGL", "OpenSSH", "OpenSSL", "OpenStreetMap",
"OCaml",
"OpenGL", "OpenMP", "OpenSSH", "OpenSSL", "OpenStreetMap",
"TensorFlow",
"TrueType",
"iOS", "macOS",
"TeX", "LaTeX", "BibTeX", "BibLaTeX",

View File

@ -289,21 +289,21 @@ fn print_expr(cx: &LateContext<'_, '_>, expr: &hir::Expr<'_>, indent: usize) {
println!("{}operands:", ind);
for op in asm.operands {
match op {
hir::InlineAsmOperand::In { expr, .. } => print_expr(cx, expr, indent + 1),
hir::InlineAsmOperand::In { expr, .. }
| hir::InlineAsmOperand::InOut { expr, .. }
| hir::InlineAsmOperand::Const { expr }
| hir::InlineAsmOperand::Sym { expr } => print_expr(cx, expr, indent + 1),
hir::InlineAsmOperand::Out { expr, .. } => {
if let Some(expr) = expr {
print_expr(cx, expr, indent + 1);
}
},
hir::InlineAsmOperand::InOut { expr, .. } => print_expr(cx, expr, indent + 1),
hir::InlineAsmOperand::SplitInOut { in_expr, out_expr, .. } => {
print_expr(cx, in_expr, indent + 1);
if let Some(out_expr) = out_expr {
print_expr(cx, out_expr, indent + 1);
}
},
hir::InlineAsmOperand::Const { expr } => print_expr(cx, expr, indent + 1),
hir::InlineAsmOperand::Sym { expr } => print_expr(cx, expr, indent + 1),
}
}
},

View File

@ -358,7 +358,7 @@ pub fn trait_ref_of_method<'tcx>(cx: &LateContext<'_, 'tcx>, hir_id: HirId) -> O
pub fn has_drop<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
match ty.ty_adt_def() {
Some(def) => def.has_dtor(cx.tcx),
_ => false,
None => false,
}
}

View File

@ -128,8 +128,10 @@ pub const TO_OWNED_METHOD: [&str; 4] = ["alloc", "borrow", "ToOwned", "to_owned"
pub const TO_STRING: [&str; 3] = ["alloc", "string", "ToString"];
pub const TO_STRING_METHOD: [&str; 4] = ["alloc", "string", "ToString", "to_string"];
pub const TRANSMUTE: [&str; 4] = ["core", "intrinsics", "", "transmute"];
pub const TRY_FROM: [&str; 4] = ["core", "convert", "TryFrom", "try_from"];
pub const TRY_FROM_ERROR: [&str; 4] = ["std", "ops", "Try", "from_error"];
pub const TRY_INTO_RESULT: [&str; 4] = ["std", "ops", "Try", "into_result"];
pub const TRY_INTO_TRAIT: [&str; 3] = ["core", "convert", "TryInto"];
pub const VEC: [&str; 3] = ["alloc", "vec", "Vec"];
pub const VEC_AS_MUT_SLICE: [&str; 4] = ["alloc", "vec", "Vec", "as_mut_slice"];
pub const VEC_AS_SLICE: [&str; 4] = ["alloc", "vec", "Vec", "as_slice"];

View File

@ -530,7 +530,7 @@ pub trait DiagnosticBuilderExt<'a, T: LintContext> {
/// Suggest to add an item before another.
///
/// The item should not be indented (expect for inner indentation).
/// The item should not be indented (except for inner indentation).
///
/// # Example
///

View File

@ -42,8 +42,10 @@ case), and we don't need type information so it will have an early pass type
`cargo dev new_lint --name=foo_functions --pass=early --category=pedantic`
(category will default to nursery if not provided). This command will create
two files: `tests/ui/foo_functions.rs` and `clippy_lints/src/foo_functions.rs`,
as well as run `cargo dev update_lints` to register the new lint. Next, we'll
open up these files and add our lint!
as well as run `cargo dev update_lints` to register the new lint. For cargo lints,
two project hierarchies (fail/pass) will be created by default under `tests/ui-cargo`.
Next, we'll open up these files and add our lint!
## Testing
@ -105,6 +107,24 @@ our lint, we need to commit the generated `.stderr` files, too. In general, you
should only commit files changed by `tests/ui/update-all-references.sh` for the
specific lint you are creating/editing.
### Cargo lints
For cargo lints, the process of testing differs in that we are interested in
the `Cargo.toml` manifest file. We also need a minimal crate associated
with that manifest.
If our new lint is named e.g. `foo_categories`, after running `cargo dev new_lint`
we will find by default two new crates, each with its manifest file:
* `tests/ui-cargo/foo_categories/fail/Cargo.toml`: this file should cause the new lint to raise an error.
* `tests/ui-cargo/foo_categories/pass/Cargo.toml`: this file should not trigger the lint.
If you need more cases, you can copy one of those crates (under `foo_categories`) and rename it.
The process of generating the `.stderr` file is the same, and prepending the `TESTNAME`
variable to `cargo uitest` works too, but the script to update the references
is in another path: `tests/ui-cargo/update-all-references.sh`.
## Rustfix tests
If the lint you are working on is making use of structured suggestions, the
@ -445,6 +465,7 @@ Here are some pointers to things you are likely going to need for every lint:
* [`from_expansion`][from_expansion] and [`in_external_macro`][in_external_macro]
* [`Span`][span]
* [`Applicability`][applicability]
* [Common tools for writing lints](common_tools_writing_lints.md) helps with common operations
* [The rustc-dev-guide][rustc-dev-guide] explains a lot of internal compiler concepts
* [The nightly rustc docs][nightly_docs] which has been linked to throughout
this guide

View File

@ -0,0 +1,152 @@
# Common tools for writing lints
You may need following tooltips to catch up with common operations.
- [Common tools for writing lints](#common-tools-for-writing-lints)
- [Retrieving the type of an expression](#retrieving-the-type-of-an-expression)
- [Checking if a type implements a specific trait](#checking-if-a-type-implements-a-specific-trait)
- [Dealing with macros](#dealing-with-macros)
Useful Rustc dev guide links:
- [Stages of compilation](https://rustc-dev-guide.rust-lang.org/compiler-src.html#the-main-stages-of-compilation)
- [Type checking](https://rustc-dev-guide.rust-lang.org/type-checking.html)
- [Ty module](https://rustc-dev-guide.rust-lang.org/ty.html)
# Retrieving the type of an expression
Sometimes you may want to retrieve the type `Ty` of an expression `Expr`, for example to answer following questions:
- which type does this expression correspond to (using its [`TyKind`][TyKind])?
- is it a sized type?
- is it a primitive type?
- does it implement a trait?
This operation is performed using the [`expr_ty()`][expr_ty] method from the [`TypeckTables`][TypeckTables] struct,
that gives you access to the underlying structure [`TyS`][TyS].
Example of use:
```rust
impl LateLintPass<'_, '_> for MyStructLint {
fn check_expr(&mut self, cx: &LateContext<'_, '_>, expr: &Expr<'_>) {
// Get type of `expr`
let ty = cx.tables.expr_ty(expr);
// Match its kind to enter its type
match ty.kind {
ty::Adt(adt_def, _) if adt_def.is_struct() => println!("Our `expr` is a struct!"),
_ => ()
}
}
}
```
Similarly in [`TypeckTables`][TypeckTables] methods, you have the [`pat_ty()`][pat_ty] method
to retrieve a type from a pattern.
Two noticeable items here:
- `cx` is the lint context [`LateContext`][LateContext].
The two most useful data structures in this context are `tcx` and `tables`,
allowing us to jump to type definitions and other compilation stages such as HIR.
- `tables` is [`TypeckTables`][TypeckTables] and is created by type checking step,
it includes useful information such as types of expressions, ways to resolve methods and so on.
# Checking if a type implements a specific trait
There are two ways to do this, depending if the target trait is part of lang items.
```rust
use crate::utils::{implements_trait, match_trait_method, paths};
impl LateLintPass<'_, '_> for MyStructLint {
fn check_expr(&mut self, cx: &LateContext<'_, '_>, expr: &Expr<'_>) {
// 1. Using expression and Clippy's convenient method
// we use `match_trait_method` function from Clippy's toolbox
if match_trait_method(cx, expr, &paths::INTO) {
// `expr` implements `Into` trait
}
// 2. Using type context `TyCtxt`
let ty = cx.tables.expr_ty(expr);
if cx.tcx.lang_items()
// we are looking for the `DefId` of `Drop` trait in lang items
.drop_trait()
// then we use it with our type `ty` by calling `implements_trait` from Clippy's utils
.map_or(false, |id| implements_trait(cx, ty, id, &[])) {
// `expr` implements `Drop` trait
}
}
}
```
> Prefer using lang items, if the target trait is available there.
A list of defined paths for Clippy can be found in [paths.rs][paths]
We access lang items through the type context `tcx`. `tcx` is of type [`TyCtxt`][TyCtxt] and is defined in the `rustc_middle` crate.
# Dealing with macros
There are several helpers in Clippy's utils to deal with macros:
- `in_macro()`: detect if the given span is expanded by a macro
You may want to use this for example to not start linting in any macro.
```rust
macro_rules! foo {
($param:expr) => {
match $param {
"bar" => println!("whatever"),
_ => ()
}
};
}
foo!("bar");
// if we lint the `match` of `foo` call and test its span
assert_eq!(in_macro(match_span), true);
```
- `in_external_macro()`: detect if the given span is from an external macro, defined in a foreign crate
You may want to use it for example to not start linting in macros from other crates
```rust
#[macro_use]
extern crate a_crate_with_macros;
// `foo` is defined in `a_crate_with_macros`
foo!("bar");
// if we lint the `match` of `foo` call and test its span
assert_eq!(in_external_macro(cx.sess(), match_span), true);
```
- `differing_macro_contexts()`: returns true if the two given spans are not from the same context
```rust
macro_rules! m {
($a:expr, $b:expr) => {
if $a.is_some() {
$b;
}
}
}
let x: Option<u32> = Some(42);
m!(x, x.unwrap());
// These spans are not from the same context
// x.is_some() is from inside the macro
// x.unwrap() is from outside the macro
assert_eq!(differing_macro_contexts(x_is_some_span, x_unwrap_span), true);
```
[TyS]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TyS.html
[TyKind]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/enum.TyKind.html
[TypeckTables]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TypeckTables.html
[expr_ty]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TypeckTables.html#method.expr_ty
[LateContext]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/struct.LateContext.html
[TyCtxt]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/context/struct.TyCtxt.html
[pat_ty]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/context/struct.TypeckTables.html#method.pat_ty
[paths]: ../clippy_lints/src/utils/paths.rs

View File

@ -0,0 +1 @@
../LICENSE-APACHE

View File

@ -0,0 +1 @@
../LICENSE-MIT

View File

@ -32,5 +32,5 @@ else
TOOLCHAIN=()
fi
rustup-toolchain-install-master -f -n master "${TOOLCHAIN[@]}" -c rustc-dev -- "$RUST_COMMIT"
rustup-toolchain-install-master -f -n master "${TOOLCHAIN[@]}" -c rustc-dev -c llvm-tools -- "$RUST_COMMIT"
rustup override set master

View File

@ -78,7 +78,7 @@ impl rustc_driver::Callbacks for ClippyCallbacks {
let conf = clippy_lints::read_conf(&[], &sess);
clippy_lints::register_plugins(&mut lint_store, &sess, &conf);
clippy_lints::register_pre_expansion_lints(&mut lint_store, &conf);
clippy_lints::register_pre_expansion_lints(&mut lint_store);
clippy_lints::register_renamed(&mut lint_store);
}));

View File

@ -1195,11 +1195,18 @@ pub static ref ALL_LINTS: Vec<Lint> = vec![
},
Lint {
name: "match_wild_err_arm",
group: "style",
group: "pedantic",
desc: "a `match` with `Err(_)` arm and take drastic actions",
deprecation: None,
module: "matches",
},
Lint {
name: "match_wildcard_for_single_variants",
group: "pedantic",
desc: "a wildcard enum match for a single variant",
deprecation: None,
module: "matches",
},
Lint {
name: "maybe_infinite_iter",
group: "pedantic",
@ -2414,7 +2421,7 @@ pub static ref ALL_LINTS: Vec<Lint> = vec![
Lint {
name: "useless_conversion",
group: "complexity",
desc: "calls to `Into`/`From`/`IntoIter` that performs useless conversions to the same type",
desc: "calls to `Into`, `TryInto`, `From`, `TryFrom`, `IntoIter` that performs useless conversions to the same type",
deprecation: None,
module: "useless_conversion",
},

View File

@ -38,13 +38,13 @@ fn clippy_driver_path() -> PathBuf {
// as what we manually pass to `cargo` invocation
fn third_party_crates() -> String {
use std::collections::HashMap;
static CRATES: &[&str] = &["serde", "serde_derive", "regex", "clippy_lints"];
static CRATES: &[&str] = &["serde", "serde_derive", "regex", "clippy_lints", "syn", "quote"];
let dep_dir = cargo::TARGET_LIB.join("deps");
let mut crates: HashMap<&str, PathBuf> = HashMap::with_capacity(CRATES.len());
for entry in fs::read_dir(dep_dir).unwrap() {
let path = match entry {
Ok(entry) => entry.path(),
_ => continue,
Err(_) => continue,
};
if let Some(name) = path.file_name().and_then(OsStr::to_str) {
for dep in CRATES {
@ -101,54 +101,136 @@ fn run_mode(cfg: &mut compiletest::Config) {
compiletest::run_tests(&cfg);
}
#[allow(clippy::identity_conversion)]
fn run_ui_toml_tests(config: &compiletest::Config, mut tests: Vec<tester::TestDescAndFn>) -> Result<bool, io::Error> {
let mut result = true;
let opts = compiletest::test_opts(config);
for dir in fs::read_dir(&config.src_base)? {
let dir = dir?;
if !dir.file_type()?.is_dir() {
continue;
}
let dir_path = dir.path();
set_var("CARGO_MANIFEST_DIR", &dir_path);
for file in fs::read_dir(&dir_path)? {
let file = file?;
let file_path = file.path();
if file.file_type()?.is_dir() {
continue;
}
if file_path.extension() != Some(OsStr::new("rs")) {
continue;
}
let paths = compiletest::common::TestPaths {
file: file_path,
base: config.src_base.clone(),
relative_dir: dir_path.file_name().unwrap().into(),
};
let test_name = compiletest::make_test_name(&config, &paths);
let index = tests
.iter()
.position(|test| test.desc.name == test_name)
.expect("The test should be in there");
result &= tester::run_tests_console(&opts, vec![tests.swap_remove(index)])?;
}
}
Ok(result)
}
fn run_ui_toml(config: &mut compiletest::Config) {
fn run_tests(config: &compiletest::Config, mut tests: Vec<tester::TestDescAndFn>) -> Result<bool, io::Error> {
let mut result = true;
let opts = compiletest::test_opts(config);
for dir in fs::read_dir(&config.src_base)? {
let dir = dir?;
if !dir.file_type()?.is_dir() {
continue;
}
let dir_path = dir.path();
set_var("CARGO_MANIFEST_DIR", &dir_path);
for file in fs::read_dir(&dir_path)? {
let file = file?;
let file_path = file.path();
if file.file_type()?.is_dir() {
continue;
}
if file_path.extension() != Some(OsStr::new("rs")) {
continue;
}
let paths = compiletest::common::TestPaths {
file: file_path,
base: config.src_base.clone(),
relative_dir: dir_path.file_name().unwrap().into(),
};
let test_name = compiletest::make_test_name(&config, &paths);
let index = tests
.iter()
.position(|test| test.desc.name == test_name)
.expect("The test should be in there");
result &= tester::run_tests_console(&opts, vec![tests.swap_remove(index)])?;
}
}
Ok(result)
}
config.mode = TestMode::Ui;
config.src_base = Path::new("tests").join("ui-toml").canonicalize().unwrap();
let tests = compiletest::make_tests(&config);
let res = run_ui_toml_tests(&config, tests);
let res = run_tests(&config, tests);
match res {
Ok(true) => {},
Ok(false) => panic!("Some tests failed"),
Err(e) => {
println!("I/O failure during tests: {:?}", e);
panic!("I/O failure during tests: {:?}", e);
},
}
}
fn run_ui_cargo(config: &mut compiletest::Config) {
if cargo::is_rustc_test_suite() {
return;
}
fn run_tests(
config: &compiletest::Config,
filter: &Option<String>,
mut tests: Vec<tester::TestDescAndFn>,
) -> Result<bool, io::Error> {
let mut result = true;
let opts = compiletest::test_opts(config);
for dir in fs::read_dir(&config.src_base)? {
let dir = dir?;
if !dir.file_type()?.is_dir() {
continue;
}
// Use the filter if provided
let dir_path = dir.path();
match &filter {
Some(name) if !dir_path.ends_with(name) => continue,
_ => {},
}
for case in fs::read_dir(&dir_path)? {
let case = case?;
if !case.file_type()?.is_dir() {
continue;
}
let src_path = case.path().join("src");
env::set_current_dir(&src_path)?;
for file in fs::read_dir(&src_path)? {
let file = file?;
if file.file_type()?.is_dir() {
continue;
}
// Search for the main file to avoid running a test for each file in the project
let file_path = file.path();
match file_path.file_name().and_then(OsStr::to_str) {
Some("main.rs") => {},
_ => continue,
}
let paths = compiletest::common::TestPaths {
file: file_path,
base: config.src_base.clone(),
relative_dir: src_path.strip_prefix(&config.src_base).unwrap().into(),
};
let test_name = compiletest::make_test_name(&config, &paths);
let index = tests
.iter()
.position(|test| test.desc.name == test_name)
.expect("The test should be in there");
result &= tester::run_tests_console(&opts, vec![tests.swap_remove(index)])?;
}
}
}
Ok(result)
}
config.mode = TestMode::Ui;
config.src_base = Path::new("tests").join("ui-cargo").canonicalize().unwrap();
let tests = compiletest::make_tests(&config);
let current_dir = env::current_dir().unwrap();
let filter = env::var("TESTNAME").ok();
let res = run_tests(&config, &filter, tests);
env::set_current_dir(current_dir).unwrap();
match res {
Ok(true) => {},
Ok(false) => panic!("Some tests failed"),
Err(e) => {
panic!("I/O failure during tests: {:?}", e);
},
}
}
@ -165,4 +247,5 @@ fn compile_test() {
let mut config = default_config();
run_mode(&mut config);
run_ui_toml(&mut config);
run_ui_cargo(&mut config);
}

View File

@ -0,0 +1,4 @@
[package]
name = "cargo_common_metadata"
version = "0.1.0"
publish = false

View File

@ -0,0 +1,4 @@
// compile-flags: --crate-name=cargo_common_metadata
#![warn(clippy::cargo_common_metadata)]
fn main() {}

View File

@ -0,0 +1,18 @@
error: package `cargo_common_metadata` is missing `package.authors` metadata
|
= note: `-D clippy::cargo-common-metadata` implied by `-D warnings`
error: package `cargo_common_metadata` is missing `package.description` metadata
error: package `cargo_common_metadata` is missing `either package.license or package.license_file` metadata
error: package `cargo_common_metadata` is missing `package.repository` metadata
error: package `cargo_common_metadata` is missing `package.readme` metadata
error: package `cargo_common_metadata` is missing `package.keywords` metadata
error: package `cargo_common_metadata` is missing `package.categories` metadata
error: aborting due to 7 previous errors

View File

@ -0,0 +1,11 @@
[package]
name = "cargo_common_metadata"
version = "0.1.0"
publish = false
authors = ["Random person from the Internet <someone@someplace.org>"]
description = "A test package for the cargo_common_metadata lint"
repository = "https://github.com/someone/cargo_common_metadata"
readme = "README.md"
license = "MIT OR Apache-2.0"
keywords = ["metadata", "lint", "clippy"]
categories = ["development-tools::testing"]

View File

@ -0,0 +1,4 @@
// compile-flags: --crate-name=cargo_common_metadata
#![warn(clippy::cargo_common_metadata)]
fn main() {}

View File

@ -0,0 +1,17 @@
# Should not lint for dev or build dependencies. See issue 5041.
[package]
name = "multiple_crate_versions"
version = "0.1.0"
publish = false
# One of the versions of winapi is only a dev dependency: allowed
[dependencies]
ctrlc = "=3.1.0"
[dev-dependencies]
ansi_term = "=0.11.0"
# Both versions of winapi are a build dependency: allowed
[build-dependencies]
ctrlc = "=3.1.0"
ansi_term = "=0.11.0"

View File

@ -0,0 +1,4 @@
// compile-flags: --crate-name=multiple_crate_versions
#![warn(clippy::multiple_crate_versions)]
fn main() {}

View File

@ -0,0 +1,8 @@
[package]
name = "multiple_crate_versions"
version = "0.1.0"
publish = false
[dependencies]
ctrlc = "=3.1.0"
ansi_term = "=0.11.0"

View File

@ -0,0 +1,4 @@
// compile-flags: --crate-name=multiple_crate_versions
#![warn(clippy::multiple_crate_versions)]
fn main() {}

View File

@ -0,0 +1,6 @@
error: multiple versions for dependency `winapi`: 0.2.8, 0.3.8
|
= note: `-D clippy::multiple-crate-versions` implied by `-D warnings`
error: aborting due to previous error

View File

@ -0,0 +1,8 @@
[package]
name = "cargo_common_metadata"
version = "0.1.0"
publish = false
[dependencies]
regex = "1.3.7"
serde = "1.0.110"

View File

@ -0,0 +1,4 @@
// compile-flags: --crate-name=multiple_crate_versions
#![warn(clippy::multiple_crate_versions)]
fn main() {}

View File

@ -0,0 +1,18 @@
#!/bin/bash
#
# A script to update the references for all tests. The idea is that
# you do a run, which will generate files in the build directory
# containing the (normalized) actual output of the compiler. You then
# run this script, which will copy those files over. If you find
# yourself manually editing a foo.stderr file, you're doing it wrong.
#
# See all `update-references.sh`, if you just want to update a single test.
if [[ "$1" == "--help" || "$1" == "-h" ]]; then
echo "usage: $0"
fi
BUILD_DIR=$PWD/target/debug/test_build_base
MY_DIR=$(dirname "$0")
cd "$MY_DIR" || exit
find . -name '*.rs' -exec ./update-references.sh "$BUILD_DIR" {} +

View File

@ -0,0 +1,38 @@
#!/bin/bash
# A script to update the references for particular tests. The idea is
# that you do a run, which will generate files in the build directory
# containing the (normalized) actual output of the compiler. This
# script will then copy that output and replace the "expected output"
# files. You can then commit the changes.
#
# If you find yourself manually editing a foo.stderr file, you're
# doing it wrong.
if [[ "$1" == "--help" || "$1" == "-h" || "$1" == "" || "$2" == "" ]]; then
echo "usage: $0 <build-directory> <relative-path-to-rs-files>"
echo ""
echo "For example:"
echo " $0 ../../../build/x86_64-apple-darwin/test/ui *.rs */*.rs"
fi
MYDIR=$(dirname "$0")
BUILD_DIR="$1"
shift
while [[ "$1" != "" ]]; do
STDERR_NAME="${1/%.rs/.stderr}"
STDOUT_NAME="${1/%.rs/.stdout}"
shift
if [[ -f "$BUILD_DIR"/"$STDOUT_NAME" ]] && \
! (cmp -s -- "$BUILD_DIR"/"$STDOUT_NAME" "$MYDIR"/"$STDOUT_NAME"); then
echo updating "$MYDIR"/"$STDOUT_NAME"
cp "$BUILD_DIR"/"$STDOUT_NAME" "$MYDIR"/"$STDOUT_NAME"
fi
if [[ -f "$BUILD_DIR"/"$STDERR_NAME" ]] && \
! (cmp -s -- "$BUILD_DIR"/"$STDERR_NAME" "$MYDIR"/"$STDERR_NAME"); then
echo updating "$MYDIR"/"$STDERR_NAME"
cp "$BUILD_DIR"/"$STDERR_NAME" "$MYDIR"/"$STDERR_NAME"
fi
done

View File

@ -0,0 +1,7 @@
[package]
name = "wildcard_dependencies"
version = "0.1.0"
publish = false
[dependencies]
regex = "*"

View File

@ -0,0 +1,4 @@
// compile-flags: --crate-name=wildcard_dependencies
#![warn(clippy::wildcard_dependencies)]
fn main() {}

View File

@ -0,0 +1,6 @@
error: wildcard dependency for `regex`
|
= note: `-D clippy::wildcard-dependencies` implied by `-D warnings`
error: aborting due to previous error

View File

@ -0,0 +1,7 @@
[package]
name = "wildcard_dependencies"
version = "0.1.0"
publish = false
[dependencies]
regex = "1"

View File

@ -0,0 +1,4 @@
// compile-flags: --crate-name=wildcard_dependencies
#![warn(clippy::wildcard_dependencies)]
fn main() {}

View File

@ -0,0 +1,37 @@
// no-prefer-dynamic
#![crate_type = "proc-macro"]
#![feature(repr128, proc_macro_hygiene, proc_macro_quote)]
#![allow(clippy::useless_conversion)]
extern crate proc_macro;
extern crate quote;
extern crate syn;
use proc_macro::TokenStream;
use quote::{quote, quote_spanned};
use syn::parse_macro_input;
use syn::{parse_quote, ItemTrait, TraitItem};
#[proc_macro_attribute]
pub fn fake_async_trait(_args: TokenStream, input: TokenStream) -> TokenStream {
let mut item = parse_macro_input!(input as ItemTrait);
for inner in &mut item.items {
if let TraitItem::Method(method) = inner {
let sig = &method.sig;
let block = &mut method.default;
if let Some(block) = block {
let brace = block.brace_token;
let my_block = quote_spanned!( brace.span => {
// Should not trigger `empty_line_after_outer_attr`
#[crate_type = "lib"]
#sig #block
Vec::new()
});
*block = parse_quote!(#my_block);
}
}
}
TokenStream::from(quote!(#item))
}

View File

@ -1,6 +1,6 @@
#![allow(clippy::all)]
#![warn(clippy::cognitive_complexity)]
#![allow(unused)]
#![allow(unused, unused_crate_dependencies)]
#[rustfmt::skip]
fn main() {

View File

@ -1,5 +1,5 @@
#![warn(clippy::cognitive_complexity)]
#![warn(unused)]
#![warn(unused, clippy::cognitive_complexity)]
#![allow(unused_crate_dependencies)]
fn main() {
kaboom();

View File

@ -1,8 +1,12 @@
// aux-build:proc_macro_attr.rs
#![warn(clippy::empty_line_after_outer_attr)]
#![allow(clippy::assertions_on_constants)]
#![feature(custom_inner_attributes)]
#![rustfmt::skip]
#[macro_use]
extern crate proc_macro_attr;
// This should produce a warning
#[crate_type = "lib"]
@ -93,4 +97,17 @@ pub struct S;
/* test */
pub struct T;
fn main() { }
// This should not produce a warning
// See https://github.com/rust-lang/rust-clippy/issues/5567
#[fake_async_trait]
pub trait Bazz {
fn foo() -> Vec<u8> {
let _i = "";
vec![]
}
}
fn main() {}

View File

@ -1,5 +1,5 @@
error: Found an empty line after an outer attribute. Perhaps you forgot to add a `!` to make it an inner attribute?
--> $DIR/empty_line_after_outer_attribute.rs:7:1
--> $DIR/empty_line_after_outer_attribute.rs:11:1
|
LL | / #[crate_type = "lib"]
LL | |
@ -10,7 +10,7 @@ LL | | fn with_one_newline_and_comment() { assert!(true) }
= note: `-D clippy::empty-line-after-outer-attr` implied by `-D warnings`
error: Found an empty line after an outer attribute. Perhaps you forgot to add a `!` to make it an inner attribute?
--> $DIR/empty_line_after_outer_attribute.rs:19:1
--> $DIR/empty_line_after_outer_attribute.rs:23:1
|
LL | / #[crate_type = "lib"]
LL | |
@ -18,7 +18,7 @@ LL | | fn with_one_newline() { assert!(true) }
| |_
error: Found an empty line after an outer attribute. Perhaps you forgot to add a `!` to make it an inner attribute?
--> $DIR/empty_line_after_outer_attribute.rs:24:1
--> $DIR/empty_line_after_outer_attribute.rs:28:1
|
LL | / #[crate_type = "lib"]
LL | |
@ -27,7 +27,7 @@ LL | | fn with_two_newlines() { assert!(true) }
| |_
error: Found an empty line after an outer attribute. Perhaps you forgot to add a `!` to make it an inner attribute?
--> $DIR/empty_line_after_outer_attribute.rs:31:1
--> $DIR/empty_line_after_outer_attribute.rs:35:1
|
LL | / #[crate_type = "lib"]
LL | |
@ -35,7 +35,7 @@ LL | | enum Baz {
| |_
error: Found an empty line after an outer attribute. Perhaps you forgot to add a `!` to make it an inner attribute?
--> $DIR/empty_line_after_outer_attribute.rs:39:1
--> $DIR/empty_line_after_outer_attribute.rs:43:1
|
LL | / #[crate_type = "lib"]
LL | |
@ -43,7 +43,7 @@ LL | | struct Foo {
| |_
error: Found an empty line after an outer attribute. Perhaps you forgot to add a `!` to make it an inner attribute?
--> $DIR/empty_line_after_outer_attribute.rs:47:1
--> $DIR/empty_line_after_outer_attribute.rs:51:1
|
LL | / #[crate_type = "lib"]
LL | |

View File

@ -47,17 +47,32 @@ error: future cannot be sent between threads safely
--> $DIR/future_not_send.rs:20:63
|
LL | async fn private_future2(rc: Rc<[u8]>, cell: &Cell<usize>) -> bool {
| ^^^^
| ^^^^ future returned by `private_future2` is not `Send`
|
note: captured value is not `Send`
--> $DIR/future_not_send.rs:20:26
|
LL | async fn private_future2(rc: Rc<[u8]>, cell: &Cell<usize>) -> bool {
| ^^ has type `std::rc::Rc<[u8]>` which is not `Send`
= note: `std::rc::Rc<[u8]>` doesn't implement `std::marker::Send`
note: captured value is not `Send`
--> $DIR/future_not_send.rs:20:40
|
LL | async fn private_future2(rc: Rc<[u8]>, cell: &Cell<usize>) -> bool {
| ^^^^ has type `&std::cell::Cell<usize>` which is not `Send`
= note: `std::cell::Cell<usize>` doesn't implement `std::marker::Sync`
error: future cannot be sent between threads safely
--> $DIR/future_not_send.rs:24:43
|
LL | pub async fn public_future2(rc: Rc<[u8]>) {}
| ^
| ^ future returned by `public_future2` is not `Send`
|
note: captured value is not `Send`
--> $DIR/future_not_send.rs:24:29
|
LL | pub async fn public_future2(rc: Rc<[u8]>) {}
| ^^ has type `std::rc::Rc<[u8]>` which is not `Send`
= note: `std::rc::Rc<[u8]>` doesn't implement `std::marker::Send`
error: future cannot be sent between threads safely
@ -117,8 +132,13 @@ error: future cannot be sent between threads safely
--> $DIR/future_not_send.rs:66:34
|
LL | async fn unclear_future<T>(t: T) {}
| ^
| ^ future returned by `unclear_future` is not `Send`
|
note: captured value is not `Send`
--> $DIR/future_not_send.rs:66:28
|
LL | async fn unclear_future<T>(t: T) {}
| ^ has type `T` which is not `Send`
= note: `T` doesn't implement `std::marker::Send`
error: aborting due to 8 previous errors

View File

@ -5,7 +5,7 @@ LL | Err(_) => panic!("err"),
| ^^^^^^
|
= note: `-D clippy::match-wild-err-arm` implied by `-D warnings`
= note: match each error separately or use the error output
= note: match each error separately or use the error output, or use `.except(msg)` if the error case is unreachable
error: `Err(_)` matches all errors
--> $DIR/match_wild_err_arm.rs:17:9
@ -13,7 +13,7 @@ error: `Err(_)` matches all errors
LL | Err(_) => panic!(),
| ^^^^^^
|
= note: match each error separately or use the error output
= note: match each error separately or use the error output, or use `.except(msg)` if the error case is unreachable
error: `Err(_)` matches all errors
--> $DIR/match_wild_err_arm.rs:23:9
@ -21,7 +21,7 @@ error: `Err(_)` matches all errors
LL | Err(_) => {
| ^^^^^^
|
= note: match each error separately or use the error output
= note: match each error separately or use the error output, or use `.except(msg)` if the error case is unreachable
error: `Err(_e)` matches all errors
--> $DIR/match_wild_err_arm.rs:31:9
@ -29,7 +29,7 @@ error: `Err(_e)` matches all errors
LL | Err(_e) => panic!(),
| ^^^^^^^
|
= note: match each error separately or use the error output
= note: match each error separately or use the error output, or use `.except(msg)` if the error case is unreachable
error: aborting due to 4 previous errors

View File

@ -0,0 +1,59 @@
// run-rustfix
#![warn(clippy::match_wildcard_for_single_variants)]
#![allow(dead_code)]
enum Foo {
A,
B,
C,
}
enum Color {
Red,
Green,
Blue,
Rgb(u8, u8, u8),
}
fn main() {
let f = Foo::A;
match f {
Foo::A => {},
Foo::B => {},
Foo::C => {},
}
let color = Color::Red;
// check exhaustive bindings
match color {
Color::Red => {},
Color::Green => {},
Color::Rgb(_r, _g, _b) => {},
Color::Blue => {},
}
// check exhaustive wild
match color {
Color::Red => {},
Color::Green => {},
Color::Rgb(..) => {},
Color::Blue => {},
}
match color {
Color::Red => {},
Color::Green => {},
Color::Rgb(_, _, _) => {},
Color::Blue => {},
}
// shouldn't lint as there is one missing variant
// and one that isn't exhaustively covered
match color {
Color::Red => {},
Color::Green => {},
Color::Rgb(255, _, _) => {},
_ => {},
}
}

View File

@ -0,0 +1,59 @@
// run-rustfix
#![warn(clippy::match_wildcard_for_single_variants)]
#![allow(dead_code)]
enum Foo {
A,
B,
C,
}
enum Color {
Red,
Green,
Blue,
Rgb(u8, u8, u8),
}
fn main() {
let f = Foo::A;
match f {
Foo::A => {},
Foo::B => {},
_ => {},
}
let color = Color::Red;
// check exhaustive bindings
match color {
Color::Red => {},
Color::Green => {},
Color::Rgb(_r, _g, _b) => {},
_ => {},
}
// check exhaustive wild
match color {
Color::Red => {},
Color::Green => {},
Color::Rgb(..) => {},
_ => {},
}
match color {
Color::Red => {},
Color::Green => {},
Color::Rgb(_, _, _) => {},
_ => {},
}
// shouldn't lint as there is one missing variant
// and one that isn't exhaustively covered
match color {
Color::Red => {},
Color::Green => {},
Color::Rgb(255, _, _) => {},
_ => {},
}
}

View File

@ -0,0 +1,28 @@
error: wildcard match will miss any future added variants
--> $DIR/match_wildcard_for_single_variants.rs:24:9
|
LL | _ => {},
| ^ help: try this: `Foo::C`
|
= note: `-D clippy::match-wildcard-for-single-variants` implied by `-D warnings`
error: wildcard match will miss any future added variants
--> $DIR/match_wildcard_for_single_variants.rs:34:9
|
LL | _ => {},
| ^ help: try this: `Color::Blue`
error: wildcard match will miss any future added variants
--> $DIR/match_wildcard_for_single_variants.rs:42:9
|
LL | _ => {},
| ^ help: try this: `Color::Blue`
error: wildcard match will miss any future added variants
--> $DIR/match_wildcard_for_single_variants.rs:48:9
|
LL | _ => {},
| ^ help: try this: `Color::Blue`
error: aborting due to 4 previous errors

View File

@ -148,4 +148,15 @@ impl AllowDerive {
}
}
pub struct NewNotEqualToDerive {
foo: i32,
}
impl NewNotEqualToDerive {
// This `new` implementation is not equal to a derived `Default`, so do not suggest deriving.
pub fn new() -> Self {
NewNotEqualToDerive { foo: 1 }
}
}
fn main() {}

View File

@ -1,4 +1,4 @@
error: you should consider deriving a `Default` implementation for `Foo`
error: you should consider adding a `Default` implementation for `Foo`
--> $DIR/new_without_default.rs:8:5
|
LL | / pub fn new() -> Foo {
@ -9,10 +9,14 @@ LL | | }
= note: `-D clippy::new-without-default` implied by `-D warnings`
help: try this
|
LL | #[derive(Default)]
LL | impl Default for Foo {
LL | fn default() -> Self {
LL | Self::new()
LL | }
LL | }
|
error: you should consider deriving a `Default` implementation for `Bar`
error: you should consider adding a `Default` implementation for `Bar`
--> $DIR/new_without_default.rs:16:5
|
LL | / pub fn new() -> Self {
@ -22,7 +26,11 @@ LL | | }
|
help: try this
|
LL | #[derive(Default)]
LL | impl Default for Bar {
LL | fn default() -> Self {
LL | Self::new()
LL | }
LL | }
|
error: you should consider adding a `Default` implementation for `LtKo<'c>`
@ -42,5 +50,22 @@ LL | }
LL | }
|
error: aborting due to 3 previous errors
error: you should consider adding a `Default` implementation for `NewNotEqualToDerive`
--> $DIR/new_without_default.rs:157:5
|
LL | / pub fn new() -> Self {
LL | | NewNotEqualToDerive { foo: 1 }
LL | | }
| |_____^
|
help: try this
|
LL | impl Default for NewNotEqualToDerive {
LL | fn default() -> Self {
LL | Self::new()
LL | }
LL | }
|
error: aborting due to 4 previous errors

View File

@ -60,3 +60,28 @@ fn main() {
// The lint allows this
let expr = Some(Some(true));
}
extern crate serde;
mod issue_4298 {
use serde::{Deserialize, Deserializer, Serialize};
use std::borrow::Cow;
#[derive(Serialize, Deserialize)]
struct Foo<'a> {
#[serde(deserialize_with = "func")]
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
#[serde(borrow)]
// FIXME: should not lint here
#[allow(clippy::option_option)]
foo: Option<Option<Cow<'a, str>>>,
}
#[allow(clippy::option_option)]
fn func<'a, D>(_: D) -> Result<Option<Option<Cow<'a, str>>>, D::Error>
where
D: Deserializer<'a>,
{
Ok(Some(Some(Cow::Borrowed("hi"))))
}
}

View File

@ -58,5 +58,11 @@ error: consider using `Option<T>` instead of `Option<Option<T>>` or a custom enu
LL | Struct { x: Option<Option<u8>> },
| ^^^^^^^^^^^^^^^^^^
error: aborting due to 9 previous errors
error: consider using `Option<T>` instead of `Option<Option<T>>` or a custom enum if you need to distinguish all 3 cases
--> $DIR/option_option.rs:77:14
|
LL | foo: Option<Option<Cow<'a, str>>>,
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: aborting due to 10 previous errors

View File

@ -95,6 +95,15 @@ fn test_or_with_ctors() {
let b = "b".to_string();
let _ = Some(Bar("a".to_string(), Duration::from_secs(1)))
.or(Some(Bar(b, Duration::from_secs(2))));
let vec = vec!["foo"];
let _ = opt.ok_or(vec.len());
let array = ["foo"];
let _ = opt.ok_or(array.len());
let slice = &["foo"][..];
let _ = opt.ok_or(slice.len());
}
// Issue 4514 - early return

View File

@ -95,6 +95,15 @@ fn test_or_with_ctors() {
let b = "b".to_string();
let _ = Some(Bar("a".to_string(), Duration::from_secs(1)))
.or(Some(Bar(b, Duration::from_secs(2))));
let vec = vec!["foo"];
let _ = opt.ok_or(vec.len());
let array = ["foo"];
let _ = opt.ok_or(array.len());
let slice = &["foo"][..];
let _ = opt.ok_or(slice.len());
}
// Issue 4514 - early return

View File

@ -71,7 +71,6 @@ fn false_positive_capacity_too(x: &String) -> String {
#[allow(dead_code)]
fn test_cow_with_ref(c: &Cow<[i32]>) {}
#[allow(dead_code)]
fn test_cow(c: Cow<[i32]>) {
let _c = c;
}
@ -84,3 +83,34 @@ trait Foo2 {
impl Foo2 for String {
fn do_string(&self) {}
}
// Check that the allow attribute on parameters is honored
mod issue_5644 {
use std::borrow::Cow;
fn allowed(
#[allow(clippy::ptr_arg)] _v: &Vec<u32>,
#[allow(clippy::ptr_arg)] _s: &String,
#[allow(clippy::ptr_arg)] _c: &Cow<[i32]>,
) {
}
struct S {}
impl S {
fn allowed(
#[allow(clippy::ptr_arg)] _v: &Vec<u32>,
#[allow(clippy::ptr_arg)] _s: &String,
#[allow(clippy::ptr_arg)] _c: &Cow<[i32]>,
) {
}
}
trait T {
fn allowed(
#[allow(clippy::ptr_arg)] _v: &Vec<u32>,
#[allow(clippy::ptr_arg)] _s: &String,
#[allow(clippy::ptr_arg)] _c: &Cow<[i32]>,
) {
}
}
}

View File

@ -9,12 +9,12 @@ fn main() {
let offset_isize = 1_isize;
unsafe {
ptr.add(offset_usize);
ptr.offset(offset_isize as isize);
ptr.offset(offset_u8 as isize);
let _ = ptr.add(offset_usize);
let _ = ptr.offset(offset_isize as isize);
let _ = ptr.offset(offset_u8 as isize);
ptr.wrapping_add(offset_usize);
ptr.wrapping_offset(offset_isize as isize);
ptr.wrapping_offset(offset_u8 as isize);
let _ = ptr.wrapping_add(offset_usize);
let _ = ptr.wrapping_offset(offset_isize as isize);
let _ = ptr.wrapping_offset(offset_u8 as isize);
}
}

View File

@ -9,12 +9,12 @@ fn main() {
let offset_isize = 1_isize;
unsafe {
ptr.offset(offset_usize as isize);
ptr.offset(offset_isize as isize);
ptr.offset(offset_u8 as isize);
let _ = ptr.offset(offset_usize as isize);
let _ = ptr.offset(offset_isize as isize);
let _ = ptr.offset(offset_u8 as isize);
ptr.wrapping_offset(offset_usize as isize);
ptr.wrapping_offset(offset_isize as isize);
ptr.wrapping_offset(offset_u8 as isize);
let _ = ptr.wrapping_offset(offset_usize as isize);
let _ = ptr.wrapping_offset(offset_isize as isize);
let _ = ptr.wrapping_offset(offset_u8 as isize);
}
}

View File

@ -1,16 +1,16 @@
error: use of `offset` with a `usize` casted to an `isize`
--> $DIR/ptr_offset_with_cast.rs:12:9
--> $DIR/ptr_offset_with_cast.rs:12:17
|
LL | ptr.offset(offset_usize as isize);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `ptr.add(offset_usize)`
LL | let _ = ptr.offset(offset_usize as isize);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `ptr.add(offset_usize)`
|
= note: `-D clippy::ptr-offset-with-cast` implied by `-D warnings`
error: use of `wrapping_offset` with a `usize` casted to an `isize`
--> $DIR/ptr_offset_with_cast.rs:16:9
--> $DIR/ptr_offset_with_cast.rs:16:17
|
LL | ptr.wrapping_offset(offset_usize as isize);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `ptr.wrapping_add(offset_usize)`
LL | let _ = ptr.wrapping_offset(offset_usize as isize);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `ptr.wrapping_add(offset_usize)`
error: aborting due to 2 previous errors

View File

@ -4,18 +4,23 @@
const ANSWER: i32 = 42;
fn main() {
let arr = [1, 2, 3, 4, 5];
// These should be linted:
(21..=42).rev().for_each(|x| println!("{}", x));
let _ = (21..ANSWER).rev().filter(|x| x % 2 == 0).take(10).collect::<Vec<_>>();
for _ in (-42..=-21).rev() {}
for _ in (21u32..42u32).rev() {}
let _ = &[] as &[i32];
// These should be ignored as they are not empty ranges:
(21..=42).for_each(|x| println!("{}", x));
(21..42).for_each(|x| println!("{}", x));
let arr = [1, 2, 3, 4, 5];
let _ = &arr[1..=3];
let _ = &arr[1..3];

View File

@ -4,18 +4,23 @@
const ANSWER: i32 = 42;
fn main() {
let arr = [1, 2, 3, 4, 5];
// These should be linted:
(42..=21).for_each(|x| println!("{}", x));
let _ = (ANSWER..21).filter(|x| x % 2 == 0).take(10).collect::<Vec<_>>();
for _ in -21..=-42 {}
for _ in 42u32..21u32 {}
let _ = &arr[3..3];
// These should be ignored as they are not empty ranges:
(21..=42).for_each(|x| println!("{}", x));
(21..42).for_each(|x| println!("{}", x));
let arr = [1, 2, 3, 4, 5];
let _ = &arr[1..=3];
let _ = &arr[1..3];

View File

@ -1,5 +1,5 @@
error: this range is empty so it will yield no values
--> $DIR/reversed_empty_ranges_fixable.rs:7:5
--> $DIR/reversed_empty_ranges_fixable.rs:11:5
|
LL | (42..=21).for_each(|x| println!("{}", x));
| ^^^^^^^^^
@ -11,7 +11,7 @@ LL | (21..=42).rev().for_each(|x| println!("{}", x));
| ^^^^^^^^^^^^^^^
error: this range is empty so it will yield no values
--> $DIR/reversed_empty_ranges_fixable.rs:8:13
--> $DIR/reversed_empty_ranges_fixable.rs:12:13
|
LL | let _ = (ANSWER..21).filter(|x| x % 2 == 0).take(10).collect::<Vec<_>>();
| ^^^^^^^^^^^^
@ -22,7 +22,7 @@ LL | let _ = (21..ANSWER).rev().filter(|x| x % 2 == 0).take(10).collect::<Ve
| ^^^^^^^^^^^^^^^^^^
error: this range is empty so it will yield no values
--> $DIR/reversed_empty_ranges_fixable.rs:10:14
--> $DIR/reversed_empty_ranges_fixable.rs:14:14
|
LL | for _ in -21..=-42 {}
| ^^^^^^^^^
@ -33,7 +33,7 @@ LL | for _ in (-42..=-21).rev() {}
| ^^^^^^^^^^^^^^^^^
error: this range is empty so it will yield no values
--> $DIR/reversed_empty_ranges_fixable.rs:11:14
--> $DIR/reversed_empty_ranges_fixable.rs:15:14
|
LL | for _ in 42u32..21u32 {}
| ^^^^^^^^^^^^
@ -43,5 +43,11 @@ help: consider using the following if you are attempting to iterate over this ra
LL | for _ in (21u32..42u32).rev() {}
| ^^^^^^^^^^^^^^^^^^^^
error: aborting due to 4 previous errors
error: this range is empty and using it to index a slice will always yield an empty slice
--> $DIR/reversed_empty_ranges_fixable.rs:17:18
|
LL | let _ = &arr[3..3];
| ----^^^^- help: if you want an empty slice, use: `[] as &[i32]`
error: aborting due to 5 previous errors

View File

@ -9,7 +9,6 @@ fn main() {
let arr = [1, 2, 3, 4, 5];
let _ = &arr[3usize..=1usize];
let _ = &arr[SOME_NUM..1];
let _ = &arr[3..3];
for _ in ANSWER..ANSWER {}
}

View File

@ -18,17 +18,11 @@ error: this range is reversed and using it to index a slice will panic at run-ti
LL | let _ = &arr[SOME_NUM..1];
| ^^^^^^^^^^^
error: this range is empty and using it to index a slice will always yield an empty slice
--> $DIR/reversed_empty_ranges_unfixable.rs:12:18
|
LL | let _ = &arr[3..3];
| ^^^^
error: this range is empty so it will yield no values
--> $DIR/reversed_empty_ranges_unfixable.rs:14:14
--> $DIR/reversed_empty_ranges_unfixable.rs:13:14
|
LL | for _ in ANSWER..ANSWER {}
| ^^^^^^^^^^^^^^
error: aborting due to 5 previous errors
error: aborting due to 4 previous errors

View File

@ -1,4 +1,4 @@
error: useless conversion
error: useless conversion to the same type
--> $DIR/useless_conversion.rs:6:13
|
LL | let _ = T::from(val);
@ -10,55 +10,55 @@ note: the lint level is defined here
LL | #![deny(clippy::useless_conversion)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
error: useless conversion
error: useless conversion to the same type
--> $DIR/useless_conversion.rs:7:5
|
LL | val.into()
| ^^^^^^^^^^ help: consider removing `.into()`: `val`
error: useless conversion
error: useless conversion to the same type
--> $DIR/useless_conversion.rs:19:22
|
LL | let _: i32 = 0i32.into();
| ^^^^^^^^^^^ help: consider removing `.into()`: `0i32`
error: useless conversion
error: useless conversion to the same type
--> $DIR/useless_conversion.rs:51:21
|
LL | let _: String = "foo".to_string().into();
| ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `.into()`: `"foo".to_string()`
error: useless conversion
error: useless conversion to the same type
--> $DIR/useless_conversion.rs:52:21
|
LL | let _: String = From::from("foo".to_string());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `From::from()`: `"foo".to_string()`
error: useless conversion
error: useless conversion to the same type
--> $DIR/useless_conversion.rs:53:13
|
LL | let _ = String::from("foo".to_string());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `String::from()`: `"foo".to_string()`
error: useless conversion
error: useless conversion to the same type
--> $DIR/useless_conversion.rs:54:13
|
LL | let _ = String::from(format!("A: {:04}", 123));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `String::from()`: `format!("A: {:04}", 123)`
error: useless conversion
error: useless conversion to the same type
--> $DIR/useless_conversion.rs:55:13
|
LL | let _ = "".lines().into_iter();
| ^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `.into_iter()`: `"".lines()`
error: useless conversion
error: useless conversion to the same type
--> $DIR/useless_conversion.rs:56:13
|
LL | let _ = vec![1, 2, 3].into_iter().into_iter();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider removing `.into_iter()`: `vec![1, 2, 3].into_iter()`
error: useless conversion
error: useless conversion to the same type
--> $DIR/useless_conversion.rs:57:21
|
LL | let _: String = format!("Hello {}", "world").into();

View File

@ -0,0 +1,42 @@
#![deny(clippy::useless_conversion)]
use std::convert::{TryFrom, TryInto};
fn test_generic<T: Copy>(val: T) -> T {
let _ = T::try_from(val).unwrap();
val.try_into().unwrap()
}
fn test_generic2<T: Copy + Into<i32> + Into<U>, U: From<T>>(val: T) {
// ok
let _: i32 = val.try_into().unwrap();
let _: U = val.try_into().unwrap();
let _ = U::try_from(val).unwrap();
}
fn main() {
test_generic(10i32);
test_generic2::<i32, i32>(10i32);
let _: String = "foo".try_into().unwrap();
let _: String = TryFrom::try_from("foo").unwrap();
let _ = String::try_from("foo").unwrap();
#[allow(clippy::useless_conversion)]
{
let _ = String::try_from("foo").unwrap();
let _: String = "foo".try_into().unwrap();
}
let _: String = "foo".to_string().try_into().unwrap();
let _: String = TryFrom::try_from("foo".to_string()).unwrap();
let _ = String::try_from("foo".to_string()).unwrap();
let _ = String::try_from(format!("A: {:04}", 123)).unwrap();
let _: String = format!("Hello {}", "world").try_into().unwrap();
let _: String = "".to_owned().try_into().unwrap();
let _: String = match String::from("_").try_into() {
Ok(a) => a,
Err(_) => "".into(),
};
// FIXME this is a false negative
#[allow(clippy::cmp_owned)]
if String::from("a") == TryInto::<String>::try_into(String::from("a")).unwrap() {}
}

View File

@ -0,0 +1,79 @@
error: useless conversion to the same type
--> $DIR/useless_conversion_try.rs:6:13
|
LL | let _ = T::try_from(val).unwrap();
| ^^^^^^^^^^^^^^^^
|
note: the lint level is defined here
--> $DIR/useless_conversion_try.rs:1:9
|
LL | #![deny(clippy::useless_conversion)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
= help: consider removing `T::try_from()`
error: useless conversion to the same type
--> $DIR/useless_conversion_try.rs:7:5
|
LL | val.try_into().unwrap()
| ^^^^^^^^^^^^^^
|
= help: consider removing `.try_into()`
error: useless conversion to the same type
--> $DIR/useless_conversion_try.rs:29:21
|
LL | let _: String = "foo".to_string().try_into().unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: consider removing `.try_into()`
error: useless conversion to the same type
--> $DIR/useless_conversion_try.rs:30:21
|
LL | let _: String = TryFrom::try_from("foo".to_string()).unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: consider removing `TryFrom::try_from()`
error: useless conversion to the same type
--> $DIR/useless_conversion_try.rs:31:13
|
LL | let _ = String::try_from("foo".to_string()).unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: consider removing `String::try_from()`
error: useless conversion to the same type
--> $DIR/useless_conversion_try.rs:32:13
|
LL | let _ = String::try_from(format!("A: {:04}", 123)).unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: consider removing `String::try_from()`
error: useless conversion to the same type
--> $DIR/useless_conversion_try.rs:33:21
|
LL | let _: String = format!("Hello {}", "world").try_into().unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: consider removing `.try_into()`
error: useless conversion to the same type
--> $DIR/useless_conversion_try.rs:34:21
|
LL | let _: String = "".to_owned().try_into().unwrap();
| ^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: consider removing `.try_into()`
error: useless conversion to the same type
--> $DIR/useless_conversion_try.rs:35:27
|
LL | let _: String = match String::from("_").try_into() {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: consider removing `.try_into()`
error: aborting due to 9 previous errors

@ -1 +1 @@
Subproject commit 1cb7c09eb245454648bdecd61fa93bace3041b6d
Subproject commit 085f24b9ecbc0e90d204cab1c111c4abe4608ce0

View File

@ -22,6 +22,7 @@ features = [
"basetsd",
"consoleapi",
"errhandlingapi",
"fibersapi",
"ioapiset",
"jobapi",
"jobapi2",

@ -1 +1 @@
Subproject commit a5cb5d26833cfda6fa2ed35735448953f728bd5e
Subproject commit aedff61f7ac4fc2b287ff76d33f2584e1f63a3af