Properly handle attributes on statements

We now collect tokens for the underlying node wrapped by `StmtKind`
instead of storing tokens directly in `Stmt`.

`LazyTokenStream` now supports capturing a trailing semicolon after it
is initially constructed. This allows us to avoid refactoring statement
parsing to wrap the parsing of the semicolon in `parse_tokens`.

Attributes on item statements
(e.g. `fn foo() { #[bar] struct MyStruct; }`) are now treated as
item attributes, not statement attributes, which is consistent with how
we handle attributes on other kinds of statements. The feature-gating
code is adjusted so that proc-macro attributes are still allowed on item
statements on stable.

Two built-in macros (`#[global_allocator]` and `#[test]`) needed to be
adjusted to support being passed `Annotatable::Stmt`.
This commit is contained in:
Aaron Hill 2020-11-17 14:27:44 -05:00
parent 72da5a9d85
commit de88bf148b
No known key found for this signature in database
GPG Key ID: B4087E510E98B164
20 changed files with 485 additions and 187 deletions

View File

@ -901,10 +901,39 @@ pub struct Stmt {
pub id: NodeId,
pub kind: StmtKind,
pub span: Span,
pub tokens: Option<LazyTokenStream>,
}
impl Stmt {
pub fn tokens(&self) -> Option<&LazyTokenStream> {
match self.kind {
StmtKind::Local(ref local) => local.tokens.as_ref(),
StmtKind::Item(ref item) => item.tokens.as_ref(),
StmtKind::Expr(ref expr) | StmtKind::Semi(ref expr) => expr.tokens.as_ref(),
StmtKind::Empty => None,
StmtKind::MacCall(ref mac) => mac.tokens.as_ref(),
}
}
pub fn tokens_mut(&mut self) -> Option<&mut LazyTokenStream> {
match self.kind {
StmtKind::Local(ref mut local) => local.tokens.as_mut(),
StmtKind::Item(ref mut item) => item.tokens.as_mut(),
StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => expr.tokens.as_mut(),
StmtKind::Empty => None,
StmtKind::MacCall(ref mut mac) => mac.tokens.as_mut(),
}
}
pub fn set_tokens(&mut self, tokens: Option<LazyTokenStream>) {
match self.kind {
StmtKind::Local(ref mut local) => local.tokens = tokens,
StmtKind::Item(ref mut item) => item.tokens = tokens,
StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => expr.tokens = tokens,
StmtKind::Empty => {}
StmtKind::MacCall(ref mut mac) => mac.tokens = tokens,
}
}
pub fn has_trailing_semicolon(&self) -> bool {
match &self.kind {
StmtKind::Semi(_) => true,
@ -912,18 +941,25 @@ impl Stmt {
_ => false,
}
}
/// Converts a parsed `Stmt` to a `Stmt` with
/// a trailing semicolon.
///
/// This only modifies the parsed AST struct, not the attached
/// `LazyTokenStream`. The parser is responsible for calling
/// `CreateTokenStream::add_trailing_semi` when there is actually
/// a semicolon in the tokenstream.
pub fn add_trailing_semicolon(mut self) -> Self {
self.kind = match self.kind {
StmtKind::Expr(expr) => StmtKind::Semi(expr),
StmtKind::MacCall(mac) => {
StmtKind::MacCall(mac.map(|MacCallStmt { mac, style: _, attrs }| MacCallStmt {
mac,
style: MacStmtStyle::Semicolon,
attrs,
StmtKind::MacCall(mac.map(|MacCallStmt { mac, style: _, attrs, tokens }| {
MacCallStmt { mac, style: MacStmtStyle::Semicolon, attrs, tokens }
}))
}
kind => kind,
};
self
}
@ -963,6 +999,7 @@ pub struct MacCallStmt {
pub mac: MacCall,
pub style: MacStmtStyle,
pub attrs: AttrVec,
pub tokens: Option<LazyTokenStream>,
}
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug)]
@ -988,6 +1025,7 @@ pub struct Local {
pub init: Option<P<Expr>>,
pub span: Span,
pub attrs: AttrVec,
pub tokens: Option<LazyTokenStream>,
}
/// An arm of a 'match'.

View File

@ -576,13 +576,14 @@ pub fn noop_visit_parenthesized_parameter_data<T: MutVisitor>(
}
pub fn noop_visit_local<T: MutVisitor>(local: &mut P<Local>, vis: &mut T) {
let Local { id, pat, ty, init, span, attrs } = local.deref_mut();
let Local { id, pat, ty, init, span, attrs, tokens } = local.deref_mut();
vis.visit_id(id);
vis.visit_pat(pat);
visit_opt(ty, |ty| vis.visit_ty(ty));
visit_opt(init, |init| vis.visit_expr(init));
vis.visit_span(span);
visit_thin_attrs(attrs, vis);
visit_lazy_tts(tokens, vis);
}
pub fn noop_visit_attribute<T: MutVisitor>(attr: &mut Attribute, vis: &mut T) {
@ -1325,16 +1326,12 @@ pub fn noop_filter_map_expr<T: MutVisitor>(mut e: P<Expr>, vis: &mut T) -> Optio
}
pub fn noop_flat_map_stmt<T: MutVisitor>(
Stmt { kind, mut span, mut id, mut tokens }: Stmt,
Stmt { kind, mut span, mut id }: Stmt,
vis: &mut T,
) -> SmallVec<[Stmt; 1]> {
vis.visit_id(&mut id);
vis.visit_span(&mut span);
visit_lazy_tts(&mut tokens, vis);
noop_flat_map_stmt_kind(kind, vis)
.into_iter()
.map(|kind| Stmt { id, kind, span, tokens: tokens.clone() })
.collect()
noop_flat_map_stmt_kind(kind, vis).into_iter().map(|kind| Stmt { id, kind, span }).collect()
}
pub fn noop_flat_map_stmt_kind<T: MutVisitor>(
@ -1351,9 +1348,10 @@ pub fn noop_flat_map_stmt_kind<T: MutVisitor>(
StmtKind::Semi(expr) => vis.filter_map_expr(expr).into_iter().map(StmtKind::Semi).collect(),
StmtKind::Empty => smallvec![StmtKind::Empty],
StmtKind::MacCall(mut mac) => {
let MacCallStmt { mac: mac_, style: _, attrs } = mac.deref_mut();
let MacCallStmt { mac: mac_, style: _, attrs, tokens } = mac.deref_mut();
vis.visit_mac_call(mac_);
visit_thin_attrs(attrs, vis);
visit_lazy_tts(tokens, vis);
smallvec![StmtKind::MacCall(mac)]
}
}

View File

@ -121,10 +121,14 @@ where
}
pub trait CreateTokenStream: sync::Send + sync::Sync {
fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream>;
fn create_token_stream(&self) -> TokenStream;
}
impl CreateTokenStream for TokenStream {
fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream> {
panic!("Cannot call `add_trailing_semi` on a `TokenStream`!");
}
fn create_token_stream(&self) -> TokenStream {
self.clone()
}
@ -141,6 +145,13 @@ impl LazyTokenStream {
LazyTokenStream(Lrc::new(Box::new(inner)))
}
/// Extends the captured stream by one token,
/// which must be a trailing semicolon. This
/// affects the `TokenStream` created by `make_tokenstream`.
pub fn add_trailing_semi(&self) -> LazyTokenStream {
LazyTokenStream(Lrc::new(self.0.add_trailing_semi()))
}
pub fn create_token_stream(&self) -> TokenStream {
self.0.create_token_stream()
}

View File

@ -686,7 +686,7 @@ pub fn walk_stmt<'a, V: Visitor<'a>>(visitor: &mut V, statement: &'a Stmt) {
StmtKind::Expr(ref expr) | StmtKind::Semi(ref expr) => visitor.visit_expr(expr),
StmtKind::Empty => {}
StmtKind::MacCall(ref mac) => {
let MacCallStmt { ref mac, style: _, ref attrs } = **mac;
let MacCallStmt { ref mac, style: _, ref attrs, tokens: _ } = **mac;
visitor.visit_mac_call(mac);
for attr in attrs.iter() {
visitor.visit_attribute(attr);

View File

@ -132,6 +132,7 @@ fn stmt_let_underscore(cx: &mut ExtCtxt<'_>, sp: Span, expr: P<ast::Expr>) -> as
id: ast::DUMMY_NODE_ID,
span: sp,
attrs: ast::AttrVec::new(),
tokens: None,
});
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp, tokens: None }
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp }
}

View File

@ -64,7 +64,6 @@ impl MultiItemModifier for BuiltinDerive {
id: ast::DUMMY_NODE_ID,
kind: ast::StmtKind::Item(a.expect_item()),
span,
tokens: None,
})));
});
} else {

View File

@ -374,7 +374,6 @@ macro_rules! make_stmts_default {
id: ast::DUMMY_NODE_ID,
span: e.span,
kind: ast::StmtKind::Expr(e),
tokens: None
}]
})
};
@ -617,7 +616,6 @@ impl MacResult for DummyResult {
id: ast::DUMMY_NODE_ID,
kind: ast::StmtKind::Expr(DummyResult::raw_expr(self.span, self.is_error)),
span: self.span,
tokens: None
}])
}

View File

@ -140,12 +140,7 @@ impl<'a> ExtCtxt<'a> {
}
pub fn stmt_expr(&self, expr: P<ast::Expr>) -> ast::Stmt {
ast::Stmt {
id: ast::DUMMY_NODE_ID,
span: expr.span,
kind: ast::StmtKind::Expr(expr),
tokens: None,
}
ast::Stmt { id: ast::DUMMY_NODE_ID, span: expr.span, kind: ast::StmtKind::Expr(expr) }
}
pub fn stmt_let(&self, sp: Span, mutbl: bool, ident: Ident, ex: P<ast::Expr>) -> ast::Stmt {
@ -162,13 +157,9 @@ impl<'a> ExtCtxt<'a> {
id: ast::DUMMY_NODE_ID,
span: sp,
attrs: AttrVec::new(),
});
ast::Stmt {
id: ast::DUMMY_NODE_ID,
kind: ast::StmtKind::Local(local),
span: sp,
tokens: None,
}
});
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp }
}
// Generates `let _: Type;`, which is usually used for type assertions.
@ -180,17 +171,13 @@ impl<'a> ExtCtxt<'a> {
id: ast::DUMMY_NODE_ID,
span,
attrs: AttrVec::new(),
tokens: None,
});
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span, tokens: None }
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span }
}
pub fn stmt_item(&self, sp: Span, item: P<ast::Item>) -> ast::Stmt {
ast::Stmt {
id: ast::DUMMY_NODE_ID,
kind: ast::StmtKind::Item(item),
span: sp,
tokens: None,
}
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Item(item), span: sp }
}
pub fn block_expr(&self, expr: P<ast::Expr>) -> P<ast::Block> {
@ -200,7 +187,6 @@ impl<'a> ExtCtxt<'a> {
id: ast::DUMMY_NODE_ID,
span: expr.span,
kind: ast::StmtKind::Expr(expr),
tokens: None,
}],
)
}

View File

@ -1274,12 +1274,6 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
// we'll expand attributes on expressions separately
if !stmt.is_expr() {
let attr = if stmt.is_item() {
// FIXME: Implement proper token collection for statements
if let StmtKind::Item(item) = &mut stmt.kind {
stmt.tokens = item.tokens.take()
} else {
unreachable!()
};
self.take_first_attr(&mut stmt)
} else {
// Ignore derives on non-item statements for backwards compatibility.
@ -1295,7 +1289,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
}
if let StmtKind::MacCall(mac) = stmt.kind {
let MacCallStmt { mac, style, attrs } = mac.into_inner();
let MacCallStmt { mac, style, attrs, tokens: _ } = mac.into_inner();
self.check_attributes(&attrs);
let mut placeholder =
self.collect_bang(mac, stmt.span, AstFragmentKind::Stmts).make_stmts();
@ -1312,10 +1306,10 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
}
// The placeholder expander gives ids to statements, so we avoid folding the id here.
let ast::Stmt { id, kind, span, tokens } = stmt;
let ast::Stmt { id, kind, span } = stmt;
noop_flat_map_stmt_kind(kind, self)
.into_iter()
.map(|kind| ast::Stmt { id, kind, span, tokens: tokens.clone() })
.map(|kind| ast::Stmt { id, kind, span })
.collect()
}

View File

@ -104,8 +104,9 @@ pub fn placeholder(
mac: mac_placeholder(),
style: ast::MacStmtStyle::Braces,
attrs: ast::AttrVec::new(),
tokens: None,
});
ast::Stmt { id, span, kind: ast::StmtKind::MacCall(mac), tokens: None }
ast::Stmt { id, span, kind: ast::StmtKind::MacCall(mac) }
}]),
AstFragmentKind::Arms => AstFragment::Arms(smallvec![ast::Arm {
attrs: Default::default(),
@ -331,12 +332,8 @@ impl<'a, 'b> MutVisitor for PlaceholderExpander<'a, 'b> {
// FIXME: We will need to preserve the original semicolon token and
// span as part of #15701
let empty_stmt = ast::Stmt {
id: ast::DUMMY_NODE_ID,
kind: ast::StmtKind::Empty,
span: DUMMY_SP,
tokens: None,
};
let empty_stmt =
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Empty, span: DUMMY_SP };
if let Some(stmt) = stmts.pop() {
if stmt.has_trailing_semicolon() {

View File

@ -810,7 +810,6 @@ impl<'a> MutVisitor for ReplaceBodyWithLoop<'a, '_> {
id: resolver.next_node_id(),
kind: ast::StmtKind::Expr(expr),
span: rustc_span::DUMMY_SP,
tokens: None,
}
}
@ -827,7 +826,6 @@ impl<'a> MutVisitor for ReplaceBodyWithLoop<'a, '_> {
id: self.resolver.next_node_id(),
span: rustc_span::DUMMY_SP,
kind: ast::StmtKind::Expr(loop_expr),
tokens: None,
};
if self.within_static_or_const {

View File

@ -6,6 +6,7 @@
#![feature(or_patterns)]
use rustc_ast as ast;
use rustc_ast::attr::HasAttrs;
use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{self, LazyTokenStream, TokenStream, TokenTree};
use rustc_ast_pretty::pprust;
@ -249,29 +250,23 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
// before we fall back to the stringification.
let convert_tokens =
|tokens: &Option<LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream());
|tokens: Option<&LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream());
let tokens = match *nt {
Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()),
Nonterminal::NtBlock(ref block) => convert_tokens(&block.tokens),
Nonterminal::NtStmt(ref stmt) => {
// FIXME: We currently only collect tokens for `:stmt`
// matchers in `macro_rules!` macros. When we start collecting
// tokens for attributes on statements, we will need to prepend
// attributes here
convert_tokens(&stmt.tokens)
}
Nonterminal::NtPat(ref pat) => convert_tokens(&pat.tokens),
Nonterminal::NtTy(ref ty) => convert_tokens(&ty.tokens),
Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.as_ref()),
Nonterminal::NtStmt(ref stmt) => prepend_attrs(stmt.attrs(), stmt.tokens()),
Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.as_ref()),
Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.as_ref()),
Nonterminal::NtIdent(ident, is_raw) => {
Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into())
}
Nonterminal::NtLifetime(ident) => {
Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into())
}
Nonterminal::NtMeta(ref attr) => convert_tokens(&attr.tokens),
Nonterminal::NtPath(ref path) => convert_tokens(&path.tokens),
Nonterminal::NtVis(ref vis) => convert_tokens(&vis.tokens),
Nonterminal::NtMeta(ref attr) => convert_tokens(attr.tokens.as_ref()),
Nonterminal::NtPath(ref path) => convert_tokens(path.tokens.as_ref()),
Nonterminal::NtVis(ref vis) => convert_tokens(vis.tokens.as_ref()),
Nonterminal::NtTT(ref tt) => Some(tt.clone().into()),
Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => {
if expr.tokens.is_none() {

View File

@ -1213,14 +1213,20 @@ impl<'a> Parser<'a> {
//
// This also makes `Parser` very cheap to clone, since
// there is no intermediate collection buffer to clone.
#[derive(Clone)]
struct LazyTokenStreamImpl {
start_token: (Token, Spacing),
cursor_snapshot: TokenCursor,
num_calls: usize,
desugar_doc_comments: bool,
trailing_semi: bool,
}
impl CreateTokenStream for LazyTokenStreamImpl {
fn create_token_stream(&self) -> TokenStream {
let mut num_calls = self.num_calls;
if self.trailing_semi {
num_calls += 1;
}
// The token produced by the final call to `next` or `next_desugared`
// was not actually consumed by the callback. The combination
// of chaining the initial token and using `take` produces the desired
@ -1228,17 +1234,25 @@ impl<'a> Parser<'a> {
// and omit the final token otherwise.
let mut cursor_snapshot = self.cursor_snapshot.clone();
let tokens = std::iter::once(self.start_token.clone())
.chain((0..self.num_calls).map(|_| {
.chain((0..num_calls).map(|_| {
if self.desugar_doc_comments {
cursor_snapshot.next_desugared()
} else {
cursor_snapshot.next()
}
}))
.take(self.num_calls);
.take(num_calls);
make_token_stream(tokens)
}
fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream> {
if self.trailing_semi {
panic!("Called `add_trailing_semi` twice!");
}
let mut new = self.clone();
new.trailing_semi = true;
Box::new(new)
}
}
let lazy_impl = LazyTokenStreamImpl {
@ -1246,6 +1260,7 @@ impl<'a> Parser<'a> {
num_calls: self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls,
cursor_snapshot,
desugar_doc_comments: self.desugar_doc_comments,
trailing_semi: false,
};
Ok((ret, Some(LazyTokenStream::new(lazy_impl))))
}

View File

@ -117,8 +117,8 @@ impl<'a> Parser<'a> {
let (stmt, tokens) = self.collect_tokens(|this| this.parse_stmt())?;
match stmt {
Some(mut s) => {
if s.tokens.is_none() {
s.tokens = tokens;
if s.tokens().is_none() {
s.set_tokens(tokens);
}
token::NtStmt(s)
}

View File

@ -7,8 +7,10 @@ use super::{BlockMode, Parser, Restrictions, SemiColonMode};
use crate::maybe_whole;
use rustc_ast as ast;
use rustc_ast::attr::HasAttrs;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, TokenKind};
use rustc_ast::tokenstream::LazyTokenStream;
use rustc_ast::util::classify;
use rustc_ast::{AttrStyle, AttrVec, Attribute, MacCall, MacCallStmt, MacStmtStyle};
use rustc_ast::{Block, BlockCheckMode, Expr, ExprKind, Local, Stmt, StmtKind, DUMMY_NODE_ID};
@ -31,45 +33,75 @@ impl<'a> Parser<'a> {
}
fn parse_stmt_without_recovery(&mut self) -> PResult<'a, Option<Stmt>> {
maybe_whole!(self, NtStmt, |x| Some(x));
let attrs = self.parse_outer_attributes()?;
let mut attrs = self.parse_outer_attributes()?;
let has_attrs = !attrs.is_empty();
let lo = self.token.span;
let stmt = if self.eat_keyword(kw::Let) {
self.parse_local_mk(lo, attrs.into())?
} else if self.is_kw_followed_by_ident(kw::Mut) {
self.recover_stmt_local(lo, attrs.into(), "missing keyword", "let mut")?
} else if self.is_kw_followed_by_ident(kw::Auto) {
self.bump(); // `auto`
let msg = "write `let` instead of `auto` to introduce a new variable";
self.recover_stmt_local(lo, attrs.into(), msg, "let")?
} else if self.is_kw_followed_by_ident(sym::var) {
self.bump(); // `var`
let msg = "write `let` instead of `var` to introduce a new variable";
self.recover_stmt_local(lo, attrs.into(), msg, "let")?
} else if self.check_path() && !self.token.is_qpath_start() && !self.is_path_start_item() {
// We have avoided contextual keywords like `union`, items with `crate` visibility,
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
// that starts like a path (1 token), but it fact not a path.
// Also, we avoid stealing syntax from `parse_item_`.
self.parse_stmt_path_start(lo, attrs)?
} else if let Some(item) = self.parse_item_common(attrs.clone(), false, true, |_| true)? {
// FIXME: Bad copy of attrs
self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item)))
} else if self.eat(&token::Semi) {
// Do not attempt to parse an expression if we're done here.
self.error_outer_attrs(&attrs);
self.mk_stmt(lo, StmtKind::Empty)
} else if self.token != token::CloseDelim(token::Brace) {
// Remainder are line-expr stmts.
let e = self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs.into()))?;
self.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
} else {
self.error_outer_attrs(&attrs);
return Ok(None);
maybe_whole!(self, NtStmt, |stmt| {
let mut stmt = stmt;
stmt.visit_attrs(|stmt_attrs| {
mem::swap(stmt_attrs, &mut attrs);
stmt_attrs.extend(attrs);
});
Some(stmt)
});
let parse_stmt_inner = |this: &mut Self| {
let stmt = if this.eat_keyword(kw::Let) {
this.parse_local_mk(lo, attrs.into())?
} else if this.is_kw_followed_by_ident(kw::Mut) {
this.recover_stmt_local(lo, attrs.into(), "missing keyword", "let mut")?
} else if this.is_kw_followed_by_ident(kw::Auto) {
this.bump(); // `auto`
let msg = "write `let` instead of `auto` to introduce a new variable";
this.recover_stmt_local(lo, attrs.into(), msg, "let")?
} else if this.is_kw_followed_by_ident(sym::var) {
this.bump(); // `var`
let msg = "write `let` instead of `var` to introduce a new variable";
this.recover_stmt_local(lo, attrs.into(), msg, "let")?
} else if this.check_path()
&& !this.token.is_qpath_start()
&& !this.is_path_start_item()
{
// We have avoided contextual keywords like `union`, items with `crate` visibility,
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
// that starts like a path (1 token), but it fact not a path.
// Also, we avoid stealing syntax from `parse_item_`.
this.parse_stmt_path_start(lo, attrs)?
} else if let Some(item) =
this.parse_item_common(attrs.clone(), false, true, |_| true)?
{
// FIXME: Bad copy of attrs
this.mk_stmt(lo.to(item.span), StmtKind::Item(P(item)))
} else if this.eat(&token::Semi) {
// Do not attempt to parse an expression if we're done here.
this.error_outer_attrs(&attrs);
this.mk_stmt(lo, StmtKind::Empty)
} else if this.token != token::CloseDelim(token::Brace) {
// Remainder are line-expr stmts.
let e = this.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs.into()))?;
this.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
} else {
this.error_outer_attrs(&attrs);
return Ok(None);
};
Ok(Some(stmt))
};
Ok(Some(stmt))
let stmt = if has_attrs {
let (mut stmt, tokens) = self.collect_tokens(parse_stmt_inner)?;
if let Some(stmt) = &mut stmt {
// If we already have tokens (e.g. due to encounting an `NtStmt`),
// use those instead.
if stmt.tokens().is_none() {
stmt.set_tokens(tokens);
}
}
stmt
} else {
parse_stmt_inner(self)?
};
Ok(stmt)
}
fn parse_stmt_path_start(&mut self, lo: Span, attrs: Vec<Attribute>) -> PResult<'a, Stmt> {
@ -107,7 +139,7 @@ impl<'a> Parser<'a> {
let kind = if delim == token::Brace || self.token == token::Semi || self.token == token::Eof
{
StmtKind::MacCall(P(MacCallStmt { mac, style, attrs }))
StmtKind::MacCall(P(MacCallStmt { mac, style, attrs, tokens: None }))
} else {
// Since none of the above applied, this is an expression statement macro.
let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac), AttrVec::new());
@ -219,7 +251,7 @@ impl<'a> Parser<'a> {
}
};
let hi = if self.token == token::Semi { self.token.span } else { self.prev_token.span };
Ok(P(ast::Local { ty, pat, init, id: DUMMY_NODE_ID, span: lo.to(hi), attrs }))
Ok(P(ast::Local { ty, pat, init, id: DUMMY_NODE_ID, span: lo.to(hi), attrs, tokens: None }))
}
/// Parses the RHS of a local variable declaration (e.g., '= 14;').
@ -376,6 +408,12 @@ impl<'a> Parser<'a> {
None => return Ok(None),
};
let add_semi_token = |tokens: Option<&mut LazyTokenStream>| {
if let Some(tokens) = tokens {
*tokens = tokens.add_trailing_semi();
}
};
let mut eat_semi = true;
match stmt.kind {
// Expression without semicolon.
@ -417,6 +455,7 @@ impl<'a> Parser<'a> {
*expr = self.mk_expr_err(sp);
}
}
StmtKind::Expr(_) | StmtKind::MacCall(_) => {}
StmtKind::Local(ref mut local) => {
if let Err(e) = self.expect_semi() {
// We might be at the `,` in `let x = foo<bar, baz>;`. Try to recover.
@ -430,6 +469,9 @@ impl<'a> Parser<'a> {
}
}
eat_semi = false;
// We just checked that there's a semicolon in the tokenstream,
// so capture it
add_semi_token(local.tokens.as_mut());
}
StmtKind::Empty => eat_semi = false,
_ => {}
@ -437,6 +479,9 @@ impl<'a> Parser<'a> {
if eat_semi && self.eat(&token::Semi) {
stmt = stmt.add_trailing_semicolon();
// We just checked that we have a semicolon in the tokenstream,
// so capture it
add_semi_token(stmt.tokens_mut());
}
stmt.span = stmt.span.to(self.prev_token.span);
Ok(Some(stmt))
@ -447,7 +492,7 @@ impl<'a> Parser<'a> {
}
pub(super) fn mk_stmt(&self, span: Span, kind: StmtKind) -> Stmt {
Stmt { id: DUMMY_NODE_ID, kind, span, tokens: None }
Stmt { id: DUMMY_NODE_ID, kind, span }
}
pub(super) fn mk_stmt_err(&self, span: Span) -> Stmt {

View File

@ -25,6 +25,8 @@ fn print_str(string: &'static str) {
macro_rules! make_stmt {
($stmt:stmt) => {
#[print_attr]
#[allow(unused)]
$stmt
}
}

View File

@ -1,43 +1,84 @@
PRINT-ATTR INPUT (DISPLAY): #[allow(unused)] struct Foo { }
PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/allowed-attr-stmt-expr.rs:29:9: 29:10 (#12),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
span: $DIR/allowed-attr-stmt-expr.rs:29:11: 29:16 (#12),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "unused",
span: $DIR/allowed-attr-stmt-expr.rs:29:17: 29:23 (#12),
},
],
span: $DIR/allowed-attr-stmt-expr.rs:29:16: 29:24 (#12),
},
],
span: $DIR/allowed-attr-stmt-expr.rs:29:10: 29:25 (#12),
},
Ident {
ident: "struct",
span: $DIR/allowed-attr-stmt-expr.rs:42:16: 42:22 (#0),
},
Ident {
ident: "Foo",
span: $DIR/allowed-attr-stmt-expr.rs:42:23: 42:26 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
span: $DIR/allowed-attr-stmt-expr.rs:42:27: 42:29 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): #[expect_let] let string = "Hello, world!" ;
PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:45:5: 45:6 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "expect_let",
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:45:7: 45:17 (#0),
},
],
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:45:6: 45:18 (#0),
},
Ident {
ident: "let",
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:46:5: 46:8 (#0),
},
Ident {
ident: "string",
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:46:9: 46:15 (#0),
},
Punct {
ch: '=',
spacing: Alone,
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:46:16: 46:17 (#0),
},
Literal {
kind: Str,
symbol: "Hello, world!",
suffix: None,
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:46:18: 46:33 (#0),
},
Punct {
ch: ';',
spacing: Alone,
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:46:33: 46:34 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): #[expect_print_stmt] println ! ("{}", string) ;
@ -45,26 +86,26 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:49:5: 49:6 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "expect_print_stmt",
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:49:7: 49:24 (#0),
},
],
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:49:6: 49:25 (#0),
},
Ident {
ident: "println",
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:50:5: 50:12 (#0),
},
Punct {
ch: '!',
spacing: Alone,
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:50:12: 50:13 (#0),
},
Group {
delimiter: Parenthesis,
@ -73,36 +114,36 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
kind: Str,
symbol: "{}",
suffix: None,
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:50:14: 50:18 (#0),
},
Punct {
ch: ',',
spacing: Alone,
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:50:18: 50:19 (#0),
},
Ident {
ident: "string",
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:50:20: 50:26 (#0),
},
],
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:50:13: 50:27 (#0),
},
Punct {
ch: ';',
spacing: Alone,
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:50:27: 50:28 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): second_make_stmt ! (#[allow(dead_code)] struct Bar { }) ;
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "second_make_stmt",
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:53:5: 53:21 (#0),
},
Punct {
ch: '!',
spacing: Alone,
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:53:21: 53:22 (#0),
},
Group {
delimiter: Parenthesis,
@ -110,78 +151,149 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:53:23: 53:24 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:53:25: 53:30 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "dead_code",
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:53:31: 53:40 (#0),
},
],
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:53:30: 53:41 (#0),
},
],
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:53:24: 53:42 (#0),
},
Ident {
ident: "struct",
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:53:43: 53:49 (#0),
},
Ident {
ident: "Bar",
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:53:50: 53:53 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:53:54: 53:56 (#0),
},
],
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:53:22: 53:57 (#0),
},
Punct {
ch: ';',
spacing: Alone,
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:53:57: 53:58 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] struct Other { }
PRINT-ATTR INPUT (DISPLAY): #[allow(unused)] #[allow(dead_code)] struct Bar { }
PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:29:9: 29:10 (#34),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
span: $DIR/allowed-attr-stmt-expr.rs:29:11: 29:16 (#34),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "unused",
span: $DIR/allowed-attr-stmt-expr.rs:29:17: 29:23 (#34),
},
],
span: $DIR/allowed-attr-stmt-expr.rs:29:16: 29:24 (#34),
},
],
span: $DIR/allowed-attr-stmt-expr.rs:29:10: 29:25 (#34),
},
Punct {
ch: '#',
spacing: Alone,
span: $DIR/allowed-attr-stmt-expr.rs:53:23: 53:24 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
span: $DIR/allowed-attr-stmt-expr.rs:53:25: 53:30 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "dead_code",
span: $DIR/allowed-attr-stmt-expr.rs:53:31: 53:40 (#0),
},
],
span: $DIR/allowed-attr-stmt-expr.rs:53:30: 53:41 (#0),
},
],
span: $DIR/allowed-attr-stmt-expr.rs:53:24: 53:42 (#0),
},
Ident {
ident: "struct",
span: $DIR/allowed-attr-stmt-expr.rs:53:43: 53:49 (#0),
},
Ident {
ident: "Bar",
span: $DIR/allowed-attr-stmt-expr.rs:53:50: 53:53 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
span: $DIR/allowed-attr-stmt-expr.rs:53:54: 53:56 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] struct Other { } ;
PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/allowed-attr-stmt-expr.rs:56:5: 56:6 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:56:7: 56:18 (#0),
},
],
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:56:6: 56:19 (#0),
},
Ident {
ident: "struct",
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:57:5: 57:11 (#0),
},
Ident {
ident: "Other",
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:57:12: 57:17 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/allowed-attr-stmt-expr.rs:57:18: 57:20 (#0),
},
Punct {
ch: ';',
spacing: Alone,
span: $DIR/allowed-attr-stmt-expr.rs:57:20: 57:21 (#0),
},
]

View File

@ -24,6 +24,8 @@ fn print_str(string: &'static str) {
macro_rules! make_stmt {
($stmt:stmt) => {
#[print_attr]
#[allow(unused)]
$stmt
}
}

View File

@ -8,7 +8,7 @@ LL | #[expect_print_expr]
= help: add `#![feature(stmt_expr_attributes)]` to the crate attributes to enable
error[E0658]: attributes on expressions are experimental
--> $DIR/attr-stmt-expr.rs:55:5
--> $DIR/attr-stmt-expr.rs:57:5
|
LL | #[expect_expr]
| ^^^^^^^^^^^^^^

View File

@ -1,43 +1,84 @@
PRINT-ATTR INPUT (DISPLAY): #[allow(unused)] struct Foo { }
PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/attr-stmt-expr.rs:28:9: 28:10 (#12),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
span: $DIR/attr-stmt-expr.rs:28:11: 28:16 (#12),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "unused",
span: $DIR/attr-stmt-expr.rs:28:17: 28:23 (#12),
},
],
span: $DIR/attr-stmt-expr.rs:28:16: 28:24 (#12),
},
],
span: $DIR/attr-stmt-expr.rs:28:10: 28:25 (#12),
},
Ident {
ident: "struct",
span: $DIR/attr-stmt-expr.rs:40:16: 40:22 (#0),
},
Ident {
ident: "Foo",
span: $DIR/attr-stmt-expr.rs:40:23: 40:26 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
span: $DIR/attr-stmt-expr.rs:40:27: 40:29 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): #[expect_let] let string = "Hello, world!" ;
PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:43:5: 43:6 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "expect_let",
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:43:7: 43:17 (#0),
},
],
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:43:6: 43:18 (#0),
},
Ident {
ident: "let",
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:44:5: 44:8 (#0),
},
Ident {
ident: "string",
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:44:9: 44:15 (#0),
},
Punct {
ch: '=',
spacing: Alone,
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:44:16: 44:17 (#0),
},
Literal {
kind: Str,
symbol: "Hello, world!",
suffix: None,
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:44:18: 44:33 (#0),
},
Punct {
ch: ';',
spacing: Alone,
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:44:33: 44:34 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): #[expect_print_stmt] println ! ("{}", string) ;
@ -45,26 +86,26 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:47:5: 47:6 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "expect_print_stmt",
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:47:7: 47:24 (#0),
},
],
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:47:6: 47:25 (#0),
},
Ident {
ident: "println",
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:48:5: 48:12 (#0),
},
Punct {
ch: '!',
spacing: Alone,
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:48:12: 48:13 (#0),
},
Group {
delimiter: Parenthesis,
@ -73,36 +114,36 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
kind: Str,
symbol: "{}",
suffix: None,
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:48:14: 48:18 (#0),
},
Punct {
ch: ',',
spacing: Alone,
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:48:18: 48:19 (#0),
},
Ident {
ident: "string",
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:48:20: 48:26 (#0),
},
],
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:48:13: 48:27 (#0),
},
Punct {
ch: ';',
spacing: Alone,
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:48:27: 48:28 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): second_make_stmt ! (#[allow(dead_code)] struct Bar { }) ;
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "second_make_stmt",
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:51:5: 51:21 (#0),
},
Punct {
ch: '!',
spacing: Alone,
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:51:21: 51:22 (#0),
},
Group {
delimiter: Parenthesis,
@ -110,48 +151,114 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:51:23: 51:24 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:51:25: 51:30 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "dead_code",
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:51:31: 51:40 (#0),
},
],
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:51:30: 51:41 (#0),
},
],
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:51:24: 51:42 (#0),
},
Ident {
ident: "struct",
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:51:43: 51:49 (#0),
},
Ident {
ident: "Bar",
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:51:50: 51:53 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:51:54: 51:56 (#0),
},
],
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:51:22: 51:57 (#0),
},
Punct {
ch: ';',
spacing: Alone,
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:51:57: 51:58 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): #[allow(unused)] #[allow(dead_code)] struct Bar { }
PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/attr-stmt-expr.rs:28:9: 28:10 (#34),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
span: $DIR/attr-stmt-expr.rs:28:11: 28:16 (#34),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "unused",
span: $DIR/attr-stmt-expr.rs:28:17: 28:23 (#34),
},
],
span: $DIR/attr-stmt-expr.rs:28:16: 28:24 (#34),
},
],
span: $DIR/attr-stmt-expr.rs:28:10: 28:25 (#34),
},
Punct {
ch: '#',
spacing: Alone,
span: $DIR/attr-stmt-expr.rs:51:23: 51:24 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
span: $DIR/attr-stmt-expr.rs:51:25: 51:30 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "dead_code",
span: $DIR/attr-stmt-expr.rs:51:31: 51:40 (#0),
},
],
span: $DIR/attr-stmt-expr.rs:51:30: 51:41 (#0),
},
],
span: $DIR/attr-stmt-expr.rs:51:24: 51:42 (#0),
},
Ident {
ident: "struct",
span: $DIR/attr-stmt-expr.rs:51:43: 51:49 (#0),
},
Ident {
ident: "Bar",
span: $DIR/attr-stmt-expr.rs:51:50: 51:53 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
span: $DIR/attr-stmt-expr.rs:51:54: 51:56 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] struct Other { }
@ -159,29 +266,29 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:54:5: 54:6 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:54:7: 54:18 (#0),
},
],
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:54:6: 54:19 (#0),
},
Ident {
ident: "struct",
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:55:5: 55:11 (#0),
},
Ident {
ident: "Other",
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:55:12: 55:17 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0),
span: $DIR/attr-stmt-expr.rs:55:18: 55:20 (#0),
},
]