Rollup merge of #73178 - petrochenkov:explint, r=varkor
expand: More precise locations for expansion-time lints First commit: a macro expansion doesn't have a `NodeId` associated with it, but it has a parent `DefId` which we can use for linting. The observable effect is that lints associated with macro expansions can now be `allow`ed at finer-grained level than whole crate. Second commit: each macro definition has a `NodeId` which we can use for linting, unless that macro definition was decoded from other crate.
This commit is contained in:
commit
657a41fe73
@ -122,6 +122,7 @@ pub fn expand_include<'cx>(
|
||||
|
||||
struct ExpandResult<'a> {
|
||||
p: Parser<'a>,
|
||||
node_id: ast::NodeId,
|
||||
}
|
||||
impl<'a> base::MacResult for ExpandResult<'a> {
|
||||
fn make_expr(mut self: Box<ExpandResult<'a>>) -> Option<P<ast::Expr>> {
|
||||
@ -130,7 +131,7 @@ pub fn expand_include<'cx>(
|
||||
self.p.sess.buffer_lint(
|
||||
&INCOMPLETE_INCLUDE,
|
||||
self.p.token.span,
|
||||
ast::CRATE_NODE_ID,
|
||||
self.node_id,
|
||||
"include macro expected single expression in source",
|
||||
);
|
||||
}
|
||||
@ -158,7 +159,7 @@ pub fn expand_include<'cx>(
|
||||
}
|
||||
}
|
||||
|
||||
Box::new(ExpandResult { p })
|
||||
Box::new(ExpandResult { p, node_id: cx.resolver.lint_node_id(cx.current_expansion.id) })
|
||||
}
|
||||
|
||||
// include_str! : read the given file, insert it as a literal string expr
|
||||
|
@ -915,6 +915,9 @@ pub trait Resolver {
|
||||
|
||||
fn check_unused_macros(&mut self);
|
||||
|
||||
/// Some parent node that is close enough to the given macro call.
|
||||
fn lint_node_id(&mut self, expn_id: ExpnId) -> NodeId;
|
||||
|
||||
fn has_derive_copy(&self, expn_id: ExpnId) -> bool;
|
||||
fn add_derive_copy(&mut self, expn_id: ExpnId);
|
||||
fn cfg_accessible(&mut self, expn_id: ExpnId, path: &ast::Path) -> Result<bool, Indeterminate>;
|
||||
|
@ -106,7 +106,7 @@
|
||||
//! bound.
|
||||
use crate::mbe::{KleeneToken, TokenTree};
|
||||
|
||||
use rustc_ast::ast::NodeId;
|
||||
use rustc_ast::ast::{NodeId, DUMMY_NODE_ID};
|
||||
use rustc_ast::token::{DelimToken, Token, TokenKind};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_session::lint::builtin::META_VARIABLE_MISUSE;
|
||||
@ -626,5 +626,8 @@ fn ops_is_prefix(
|
||||
}
|
||||
|
||||
fn buffer_lint(sess: &ParseSess, span: MultiSpan, node_id: NodeId, message: &str) {
|
||||
// Macros loaded from other crates have dummy node ids.
|
||||
if node_id != DUMMY_NODE_ID {
|
||||
sess.buffer_lint(&META_VARIABLE_MISUSE, span, node_id, message);
|
||||
}
|
||||
}
|
||||
|
@ -383,7 +383,7 @@ fn nameize<I: Iterator<Item = NamedMatch>>(
|
||||
}
|
||||
}
|
||||
TokenTree::MetaVarDecl(span, _, id) if id.name == kw::Invalid => {
|
||||
if sess.missing_fragment_specifiers.borrow_mut().remove(&span) {
|
||||
if sess.missing_fragment_specifiers.borrow_mut().remove(&span).is_some() {
|
||||
return Err((span, "missing fragment specifier".to_string()));
|
||||
}
|
||||
}
|
||||
@ -566,7 +566,7 @@ fn inner_parse_loop<'root, 'tt>(
|
||||
|
||||
// We need to match a metavar (but the identifier is invalid)... this is an error
|
||||
TokenTree::MetaVarDecl(span, _, id) if id.name == kw::Invalid => {
|
||||
if sess.missing_fragment_specifiers.borrow_mut().remove(&span) {
|
||||
if sess.missing_fragment_specifiers.borrow_mut().remove(&span).is_some() {
|
||||
return Error(span, "missing fragment specifier".to_string());
|
||||
}
|
||||
}
|
||||
|
@ -474,7 +474,9 @@ pub fn compile_declarative_macro(
|
||||
.map(|m| {
|
||||
if let MatchedNonterminal(ref nt) = *m {
|
||||
if let NtTT(ref tt) = **nt {
|
||||
let tt = mbe::quoted::parse(tt.clone().into(), true, sess).pop().unwrap();
|
||||
let tt = mbe::quoted::parse(tt.clone().into(), true, sess, def.id)
|
||||
.pop()
|
||||
.unwrap();
|
||||
valid &= check_lhs_nt_follows(sess, features, &def.attrs, &tt);
|
||||
return tt;
|
||||
}
|
||||
@ -491,7 +493,9 @@ pub fn compile_declarative_macro(
|
||||
.map(|m| {
|
||||
if let MatchedNonterminal(ref nt) = *m {
|
||||
if let NtTT(ref tt) = **nt {
|
||||
return mbe::quoted::parse(tt.clone().into(), false, sess).pop().unwrap();
|
||||
return mbe::quoted::parse(tt.clone().into(), false, sess, def.id)
|
||||
.pop()
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
|
||||
@ -509,9 +513,7 @@ pub fn compile_declarative_macro(
|
||||
valid &= check_lhs_no_empty_seq(sess, slice::from_ref(lhs));
|
||||
}
|
||||
|
||||
// We use CRATE_NODE_ID instead of `def.id` otherwise we may emit buffered lints for a node id
|
||||
// that is not lint-checked and trigger the "failed to process buffered lint here" bug.
|
||||
valid &= macro_check::check_meta_variables(sess, ast::CRATE_NODE_ID, def.span, &lhses, &rhses);
|
||||
valid &= macro_check::check_meta_variables(sess, def.id, def.span, &lhses, &rhses);
|
||||
|
||||
let (transparency, transparency_error) = attr::find_transparency(&def.attrs, macro_rules);
|
||||
match transparency_error {
|
||||
|
@ -1,6 +1,7 @@
|
||||
use crate::mbe::macro_parser;
|
||||
use crate::mbe::{Delimited, KleeneOp, KleeneToken, SequenceRepetition, TokenTree};
|
||||
|
||||
use rustc_ast::ast::{NodeId, DUMMY_NODE_ID};
|
||||
use rustc_ast::token::{self, Token};
|
||||
use rustc_ast::tokenstream;
|
||||
use rustc_ast_pretty::pprust;
|
||||
@ -36,6 +37,7 @@ pub(super) fn parse(
|
||||
input: tokenstream::TokenStream,
|
||||
expect_matchers: bool,
|
||||
sess: &ParseSess,
|
||||
node_id: NodeId,
|
||||
) -> Vec<TokenTree> {
|
||||
// Will contain the final collection of `self::TokenTree`
|
||||
let mut result = Vec::new();
|
||||
@ -46,7 +48,7 @@ pub(super) fn parse(
|
||||
while let Some(tree) = trees.next() {
|
||||
// Given the parsed tree, if there is a metavar and we are expecting matchers, actually
|
||||
// parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
|
||||
let tree = parse_tree(tree, &mut trees, expect_matchers, sess);
|
||||
let tree = parse_tree(tree, &mut trees, expect_matchers, sess, node_id);
|
||||
match tree {
|
||||
TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
|
||||
let span = match trees.next() {
|
||||
@ -65,7 +67,10 @@ pub(super) fn parse(
|
||||
}
|
||||
tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
|
||||
};
|
||||
sess.missing_fragment_specifiers.borrow_mut().insert(span);
|
||||
if node_id != DUMMY_NODE_ID {
|
||||
// Macros loaded from other crates have dummy node ids.
|
||||
sess.missing_fragment_specifiers.borrow_mut().insert(span, node_id);
|
||||
}
|
||||
result.push(TokenTree::MetaVarDecl(span, ident, Ident::invalid()));
|
||||
}
|
||||
|
||||
@ -96,6 +101,7 @@ fn parse_tree(
|
||||
trees: &mut impl Iterator<Item = tokenstream::TokenTree>,
|
||||
expect_matchers: bool,
|
||||
sess: &ParseSess,
|
||||
node_id: NodeId,
|
||||
) -> TokenTree {
|
||||
// Depending on what `tree` is, we could be parsing different parts of a macro
|
||||
match tree {
|
||||
@ -111,7 +117,7 @@ fn parse_tree(
|
||||
sess.span_diagnostic.span_err(span.entire(), &msg);
|
||||
}
|
||||
// Parse the contents of the sequence itself
|
||||
let sequence = parse(tts, expect_matchers, sess);
|
||||
let sequence = parse(tts, expect_matchers, sess, node_id);
|
||||
// Get the Kleene operator and optional separator
|
||||
let (separator, kleene) = parse_sep_and_kleene_op(trees, span.entire(), sess);
|
||||
// Count the number of captured "names" (i.e., named metavars)
|
||||
@ -158,7 +164,7 @@ fn parse_tree(
|
||||
// descend into the delimited set and further parse it.
|
||||
tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
|
||||
span,
|
||||
Lrc::new(Delimited { delim, tts: parse(tts, expect_matchers, sess) }),
|
||||
Lrc::new(Delimited { delim, tts: parse(tts, expect_matchers, sess, node_id) }),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
@ -519,6 +519,12 @@ impl Definitions {
|
||||
let old_index = self.placeholder_field_indices.insert(node_id, index);
|
||||
assert!(old_index.is_none(), "placeholder field index is reset for a node ID");
|
||||
}
|
||||
|
||||
pub fn lint_node_id(&mut self, expn_id: ExpnId) -> ast::NodeId {
|
||||
self.invocation_parents
|
||||
.get(&expn_id)
|
||||
.map_or(ast::CRATE_NODE_ID, |id| self.def_id_to_node_id[*id])
|
||||
}
|
||||
}
|
||||
|
||||
impl DefPathData {
|
||||
|
@ -307,16 +307,21 @@ fn configure_and_expand_inner<'a>(
|
||||
ecx.check_unused_macros();
|
||||
});
|
||||
|
||||
let mut missing_fragment_specifiers: Vec<_> =
|
||||
ecx.parse_sess.missing_fragment_specifiers.borrow().iter().cloned().collect();
|
||||
missing_fragment_specifiers.sort();
|
||||
let mut missing_fragment_specifiers: Vec<_> = ecx
|
||||
.parse_sess
|
||||
.missing_fragment_specifiers
|
||||
.borrow()
|
||||
.iter()
|
||||
.map(|(span, node_id)| (*span, *node_id))
|
||||
.collect();
|
||||
missing_fragment_specifiers.sort_unstable_by_key(|(span, _)| *span);
|
||||
|
||||
let recursion_limit_hit = ecx.reduced_recursion_limit.is_some();
|
||||
|
||||
for span in missing_fragment_specifiers {
|
||||
for (span, node_id) in missing_fragment_specifiers {
|
||||
let lint = lint::builtin::MISSING_FRAGMENT_SPECIFIER;
|
||||
let msg = "missing fragment specifier";
|
||||
resolver.lint_buffer().buffer_lint(lint, ast::CRATE_NODE_ID, span, msg);
|
||||
resolver.lint_buffer().buffer_lint(lint, node_id, span, msg);
|
||||
}
|
||||
if cfg!(windows) {
|
||||
env::set_var("PATH", &old_path);
|
||||
|
@ -288,7 +288,8 @@ impl<'a> base::Resolver for Resolver<'a> {
|
||||
|
||||
// Derives are not included when `invocations` are collected, so we have to add them here.
|
||||
let parent_scope = &ParentScope { derives, ..parent_scope };
|
||||
let (ext, res) = self.smart_resolve_macro_path(path, kind, parent_scope, force)?;
|
||||
let node_id = self.lint_node_id(eager_expansion_root);
|
||||
let (ext, res) = self.smart_resolve_macro_path(path, kind, parent_scope, node_id, force)?;
|
||||
|
||||
let span = invoc.span();
|
||||
invoc_id.set_expn_data(ext.expn_data(
|
||||
@ -338,6 +339,10 @@ impl<'a> base::Resolver for Resolver<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn lint_node_id(&mut self, expn_id: ExpnId) -> NodeId {
|
||||
self.definitions.lint_node_id(expn_id)
|
||||
}
|
||||
|
||||
fn has_derive_copy(&self, expn_id: ExpnId) -> bool {
|
||||
self.containers_deriving_copy.contains(&expn_id)
|
||||
}
|
||||
@ -390,6 +395,7 @@ impl<'a> Resolver<'a> {
|
||||
path: &ast::Path,
|
||||
kind: MacroKind,
|
||||
parent_scope: &ParentScope<'a>,
|
||||
node_id: NodeId,
|
||||
force: bool,
|
||||
) -> Result<(Lrc<SyntaxExtension>, Res), Indeterminate> {
|
||||
let (ext, res) = match self.resolve_macro_path(path, Some(kind), parent_scope, true, force)
|
||||
@ -430,7 +436,7 @@ impl<'a> Resolver<'a> {
|
||||
_ => panic!("expected `DefKind::Macro` or `Res::NonMacroAttr`"),
|
||||
};
|
||||
|
||||
self.check_stability_and_deprecation(&ext, path);
|
||||
self.check_stability_and_deprecation(&ext, path, node_id);
|
||||
|
||||
Ok(if ext.macro_kind() != kind {
|
||||
let expected = kind.descr_expected();
|
||||
@ -984,13 +990,17 @@ impl<'a> Resolver<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn check_stability_and_deprecation(&mut self, ext: &SyntaxExtension, path: &ast::Path) {
|
||||
fn check_stability_and_deprecation(
|
||||
&mut self,
|
||||
ext: &SyntaxExtension,
|
||||
path: &ast::Path,
|
||||
node_id: NodeId,
|
||||
) {
|
||||
let span = path.span;
|
||||
if let Some(stability) = &ext.stability {
|
||||
if let StabilityLevel::Unstable { reason, issue, is_soft } = stability.level {
|
||||
let feature = stability.feature;
|
||||
if !self.active_features.contains(&feature) && !span.allows_unstable(feature) {
|
||||
let node_id = ast::CRATE_NODE_ID;
|
||||
let lint_buffer = &mut self.lint_buffer;
|
||||
let soft_handler =
|
||||
|lint, span, msg: &_| lint_buffer.buffer_lint(lint, node_id, span, msg);
|
||||
|
@ -606,6 +606,7 @@ declare_lint_pass! {
|
||||
INLINE_NO_SANITIZE,
|
||||
ASM_SUB_REGISTER,
|
||||
UNSAFE_OP_IN_UNSAFE_FN,
|
||||
INCOMPLETE_INCLUDE,
|
||||
]
|
||||
}
|
||||
|
||||
|
@ -119,7 +119,7 @@ pub struct ParseSess {
|
||||
pub unstable_features: UnstableFeatures,
|
||||
pub config: CrateConfig,
|
||||
pub edition: Edition,
|
||||
pub missing_fragment_specifiers: Lock<FxHashSet<Span>>,
|
||||
pub missing_fragment_specifiers: Lock<FxHashMap<Span, NodeId>>,
|
||||
/// Places where raw identifiers were used. This is used for feature-gating raw identifiers.
|
||||
pub raw_identifier_spans: Lock<Vec<Span>>,
|
||||
/// Used to determine and report recursive module inclusions.
|
||||
@ -150,7 +150,7 @@ impl ParseSess {
|
||||
unstable_features: UnstableFeatures::from_environment(),
|
||||
config: FxHashSet::default(),
|
||||
edition: ExpnId::root().expn_data().edition,
|
||||
missing_fragment_specifiers: Lock::new(FxHashSet::default()),
|
||||
missing_fragment_specifiers: Default::default(),
|
||||
raw_identifier_spans: Lock::new(Vec::new()),
|
||||
included_mod_stack: Lock::new(vec![]),
|
||||
source_map,
|
||||
|
4
src/test/ui/lint/expansion-time-include.rs
Normal file
4
src/test/ui/lint/expansion-time-include.rs
Normal file
@ -0,0 +1,4 @@
|
||||
// ignore-test auxiliary file for expansion-time.rs
|
||||
|
||||
1
|
||||
2
|
23
src/test/ui/lint/expansion-time.rs
Normal file
23
src/test/ui/lint/expansion-time.rs
Normal file
@ -0,0 +1,23 @@
|
||||
// check-pass
|
||||
|
||||
#[warn(meta_variable_misuse)]
|
||||
macro_rules! foo {
|
||||
( $($i:ident)* ) => { $($i)+ }; //~ WARN meta-variable repeats with different Kleene operator
|
||||
}
|
||||
|
||||
#[warn(missing_fragment_specifier)]
|
||||
macro_rules! m { ($i) => {} } //~ WARN missing fragment specifier
|
||||
//~| WARN this was previously accepted
|
||||
|
||||
#[warn(soft_unstable)]
|
||||
mod benches {
|
||||
#[bench] //~ WARN use of unstable library feature 'test'
|
||||
//~| WARN this was previously accepted
|
||||
fn foo() {}
|
||||
}
|
||||
|
||||
#[warn(incomplete_include)]
|
||||
fn main() {
|
||||
// WARN see in the stderr file, the warning points to the included file.
|
||||
include!("expansion-time-include.rs");
|
||||
}
|
56
src/test/ui/lint/expansion-time.stderr
Normal file
56
src/test/ui/lint/expansion-time.stderr
Normal file
@ -0,0 +1,56 @@
|
||||
warning: meta-variable repeats with different Kleene operator
|
||||
--> $DIR/expansion-time.rs:5:29
|
||||
|
|
||||
LL | ( $($i:ident)* ) => { $($i)+ };
|
||||
| - ^^ - conflicting repetition
|
||||
| |
|
||||
| expected repetition
|
||||
|
|
||||
note: the lint level is defined here
|
||||
--> $DIR/expansion-time.rs:3:8
|
||||
|
|
||||
LL | #[warn(meta_variable_misuse)]
|
||||
| ^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
warning: missing fragment specifier
|
||||
--> $DIR/expansion-time.rs:9:19
|
||||
|
|
||||
LL | macro_rules! m { ($i) => {} }
|
||||
| ^^
|
||||
|
|
||||
note: the lint level is defined here
|
||||
--> $DIR/expansion-time.rs:8:8
|
||||
|
|
||||
LL | #[warn(missing_fragment_specifier)]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
|
||||
= note: for more information, see issue #40107 <https://github.com/rust-lang/rust/issues/40107>
|
||||
|
||||
warning: use of unstable library feature 'test': `bench` is a part of custom test frameworks which are unstable
|
||||
--> $DIR/expansion-time.rs:14:7
|
||||
|
|
||||
LL | #[bench]
|
||||
| ^^^^^
|
||||
|
|
||||
note: the lint level is defined here
|
||||
--> $DIR/expansion-time.rs:12:8
|
||||
|
|
||||
LL | #[warn(soft_unstable)]
|
||||
| ^^^^^^^^^^^^^
|
||||
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
|
||||
= note: for more information, see issue #64266 <https://github.com/rust-lang/rust/issues/64266>
|
||||
|
||||
warning: include macro expected single expression in source
|
||||
--> $DIR/expansion-time-include.rs:4:1
|
||||
|
|
||||
LL | 2
|
||||
| ^
|
||||
|
|
||||
note: the lint level is defined here
|
||||
--> $DIR/expansion-time.rs:19:8
|
||||
|
|
||||
LL | #[warn(incomplete_include)]
|
||||
| ^^^^^^^^^^^^^^^^^^
|
||||
|
||||
warning: 4 warnings emitted
|
||||
|
Loading…
Reference in New Issue
Block a user