Remove token::FlattenGroup
This commit is contained in:
parent
a5764de00b
commit
b37434ef31
@ -475,7 +475,7 @@ impl MetaItem {
|
|||||||
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
|
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
|
||||||
Path { span, segments }
|
Path { span, segments }
|
||||||
}
|
}
|
||||||
Some(TokenTree::Token(Token { kind: token::Interpolated(nt, _), .. })) => match *nt {
|
Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. })) => match *nt {
|
||||||
token::Nonterminal::NtMeta(ref item) => return item.meta(item.path.span),
|
token::Nonterminal::NtMeta(ref item) => return item.meta(item.path.span),
|
||||||
token::Nonterminal::NtPath(ref path) => path.clone(),
|
token::Nonterminal::NtPath(ref path) => path.clone(),
|
||||||
_ => return None,
|
_ => return None,
|
||||||
|
@ -656,7 +656,7 @@ pub fn noop_visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
|
|||||||
*span = ident.span;
|
*span = ident.span;
|
||||||
return; // Avoid visiting the span for the second time.
|
return; // Avoid visiting the span for the second time.
|
||||||
}
|
}
|
||||||
token::Interpolated(nt, _) => {
|
token::Interpolated(nt) => {
|
||||||
let mut nt = Lrc::make_mut(nt);
|
let mut nt = Lrc::make_mut(nt);
|
||||||
vis.visit_interpolated(&mut nt);
|
vis.visit_interpolated(&mut nt);
|
||||||
}
|
}
|
||||||
|
@ -182,15 +182,6 @@ fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool {
|
|||||||
.contains(&name)
|
.contains(&name)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A hack used to pass AST fragments to attribute and derive macros
|
|
||||||
/// as a single nonterminal token instead of a token stream.
|
|
||||||
/// FIXME: It needs to be removed, but there are some compatibility issues (see #73345).
|
|
||||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
|
|
||||||
pub enum FlattenGroup {
|
|
||||||
Yes,
|
|
||||||
No,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
|
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
|
||||||
pub enum TokenKind {
|
pub enum TokenKind {
|
||||||
/* Expression-operator symbols. */
|
/* Expression-operator symbols. */
|
||||||
@ -245,7 +236,7 @@ pub enum TokenKind {
|
|||||||
/// treat regular and interpolated lifetime identifiers in the same way.
|
/// treat regular and interpolated lifetime identifiers in the same way.
|
||||||
Lifetime(Symbol),
|
Lifetime(Symbol),
|
||||||
|
|
||||||
Interpolated(Lrc<Nonterminal>, FlattenGroup),
|
Interpolated(Lrc<Nonterminal>),
|
||||||
|
|
||||||
// Can be expanded into several tokens.
|
// Can be expanded into several tokens.
|
||||||
/// A doc comment.
|
/// A doc comment.
|
||||||
@ -352,7 +343,7 @@ impl Token {
|
|||||||
/// if they keep spans or perform edition checks.
|
/// if they keep spans or perform edition checks.
|
||||||
pub fn uninterpolated_span(&self) -> Span {
|
pub fn uninterpolated_span(&self) -> Span {
|
||||||
match &self.kind {
|
match &self.kind {
|
||||||
Interpolated(nt, _) => nt.span(),
|
Interpolated(nt) => nt.span(),
|
||||||
_ => self.span,
|
_ => self.span,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -391,7 +382,7 @@ impl Token {
|
|||||||
ModSep | // global path
|
ModSep | // global path
|
||||||
Lifetime(..) | // labeled loop
|
Lifetime(..) | // labeled loop
|
||||||
Pound => true, // expression attributes
|
Pound => true, // expression attributes
|
||||||
Interpolated(ref nt, _) => match **nt {
|
Interpolated(ref nt) => match **nt {
|
||||||
NtLiteral(..) |
|
NtLiteral(..) |
|
||||||
NtExpr(..) |
|
NtExpr(..) |
|
||||||
NtBlock(..) |
|
NtBlock(..) |
|
||||||
@ -417,7 +408,7 @@ impl Token {
|
|||||||
Lifetime(..) | // lifetime bound in trait object
|
Lifetime(..) | // lifetime bound in trait object
|
||||||
Lt | BinOp(Shl) | // associated path
|
Lt | BinOp(Shl) | // associated path
|
||||||
ModSep => true, // global path
|
ModSep => true, // global path
|
||||||
Interpolated(ref nt, _) => match **nt {
|
Interpolated(ref nt) => match **nt {
|
||||||
NtTy(..) | NtPath(..) => true,
|
NtTy(..) | NtPath(..) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
@ -429,7 +420,7 @@ impl Token {
|
|||||||
pub fn can_begin_const_arg(&self) -> bool {
|
pub fn can_begin_const_arg(&self) -> bool {
|
||||||
match self.kind {
|
match self.kind {
|
||||||
OpenDelim(Brace) => true,
|
OpenDelim(Brace) => true,
|
||||||
Interpolated(ref nt, _) => match **nt {
|
Interpolated(ref nt) => match **nt {
|
||||||
NtExpr(..) | NtBlock(..) | NtLiteral(..) => true,
|
NtExpr(..) | NtBlock(..) | NtLiteral(..) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
@ -464,7 +455,7 @@ impl Token {
|
|||||||
match self.uninterpolate().kind {
|
match self.uninterpolate().kind {
|
||||||
Literal(..) | BinOp(Minus) => true,
|
Literal(..) | BinOp(Minus) => true,
|
||||||
Ident(name, false) if name.is_bool_lit() => true,
|
Ident(name, false) if name.is_bool_lit() => true,
|
||||||
Interpolated(ref nt, _) => match &**nt {
|
Interpolated(ref nt) => match &**nt {
|
||||||
NtLiteral(_) => true,
|
NtLiteral(_) => true,
|
||||||
NtExpr(e) => match &e.kind {
|
NtExpr(e) => match &e.kind {
|
||||||
ast::ExprKind::Lit(_) => true,
|
ast::ExprKind::Lit(_) => true,
|
||||||
@ -485,7 +476,7 @@ impl Token {
|
|||||||
// otherwise returns the original token.
|
// otherwise returns the original token.
|
||||||
pub fn uninterpolate(&self) -> Cow<'_, Token> {
|
pub fn uninterpolate(&self) -> Cow<'_, Token> {
|
||||||
match &self.kind {
|
match &self.kind {
|
||||||
Interpolated(nt, _) => match **nt {
|
Interpolated(nt) => match **nt {
|
||||||
NtIdent(ident, is_raw) => {
|
NtIdent(ident, is_raw) => {
|
||||||
Cow::Owned(Token::new(Ident(ident.name, is_raw), ident.span))
|
Cow::Owned(Token::new(Ident(ident.name, is_raw), ident.span))
|
||||||
}
|
}
|
||||||
@ -532,7 +523,7 @@ impl Token {
|
|||||||
|
|
||||||
/// Returns `true` if the token is an interpolated path.
|
/// Returns `true` if the token is an interpolated path.
|
||||||
fn is_path(&self) -> bool {
|
fn is_path(&self) -> bool {
|
||||||
if let Interpolated(ref nt, _) = self.kind {
|
if let Interpolated(ref nt) = self.kind {
|
||||||
if let NtPath(..) = **nt {
|
if let NtPath(..) = **nt {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -544,7 +535,7 @@ impl Token {
|
|||||||
/// That is, is this a pre-parsed expression dropped into the token stream
|
/// That is, is this a pre-parsed expression dropped into the token stream
|
||||||
/// (which happens while parsing the result of macro expansion)?
|
/// (which happens while parsing the result of macro expansion)?
|
||||||
pub fn is_whole_expr(&self) -> bool {
|
pub fn is_whole_expr(&self) -> bool {
|
||||||
if let Interpolated(ref nt, _) = self.kind {
|
if let Interpolated(ref nt) = self.kind {
|
||||||
if let NtExpr(_) | NtLiteral(_) | NtPath(_) | NtIdent(..) | NtBlock(_) = **nt {
|
if let NtExpr(_) | NtLiteral(_) | NtPath(_) | NtIdent(..) | NtBlock(_) = **nt {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -555,7 +546,7 @@ impl Token {
|
|||||||
|
|
||||||
// Is the token an interpolated block (`$b:block`)?
|
// Is the token an interpolated block (`$b:block`)?
|
||||||
pub fn is_whole_block(&self) -> bool {
|
pub fn is_whole_block(&self) -> bool {
|
||||||
if let Interpolated(ref nt, _) = self.kind {
|
if let Interpolated(ref nt) = self.kind {
|
||||||
if let NtBlock(..) = **nt {
|
if let NtBlock(..) = **nt {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -205,7 +205,7 @@ impl Lit {
|
|||||||
token::Lit::new(token::Bool, name, None)
|
token::Lit::new(token::Bool, name, None)
|
||||||
}
|
}
|
||||||
token::Literal(lit) => lit,
|
token::Literal(lit) => lit,
|
||||||
token::Interpolated(ref nt, _) => {
|
token::Interpolated(ref nt) => {
|
||||||
if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt {
|
if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt {
|
||||||
if let ast::ExprKind::Lit(lit) = &expr.kind {
|
if let ast::ExprKind::Lit(lit) = &expr.kind {
|
||||||
return Ok(lit.clone());
|
return Ok(lit.clone());
|
||||||
|
@ -1027,7 +1027,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
|
|
||||||
fn lower_token(&mut self, token: Token) -> TokenStream {
|
fn lower_token(&mut self, token: Token) -> TokenStream {
|
||||||
match token.kind {
|
match token.kind {
|
||||||
token::Interpolated(nt, _) => {
|
token::Interpolated(nt) => {
|
||||||
let tts = (self.nt_to_tokenstream)(&nt, &self.sess.parse_sess, token.span);
|
let tts = (self.nt_to_tokenstream)(&nt, &self.sess.parse_sess, token.span);
|
||||||
self.lower_token_stream(tts)
|
self.lower_token_stream(tts)
|
||||||
}
|
}
|
||||||
|
@ -278,7 +278,7 @@ fn token_kind_to_string_ext(tok: &TokenKind, convert_dollar_crate: Option<Span>)
|
|||||||
token::Shebang(s) => format!("/* shebang: {}*/", s),
|
token::Shebang(s) => format!("/* shebang: {}*/", s),
|
||||||
token::Unknown(s) => s.to_string(),
|
token::Unknown(s) => s.to_string(),
|
||||||
|
|
||||||
token::Interpolated(ref nt, _) => nonterminal_to_string(nt),
|
token::Interpolated(ref nt) => nonterminal_to_string(nt),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -371,7 +371,7 @@ where
|
|||||||
impl MutVisitor for AvoidInterpolatedIdents {
|
impl MutVisitor for AvoidInterpolatedIdents {
|
||||||
fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
|
fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
|
||||||
if let tokenstream::TokenTree::Token(token) = tt {
|
if let tokenstream::TokenTree::Token(token) = tt {
|
||||||
if let token::Interpolated(nt, _) = &token.kind {
|
if let token::Interpolated(nt) = &token.kind {
|
||||||
if let token::NtIdent(ident, is_raw) = **nt {
|
if let token::NtIdent(ident, is_raw) = **nt {
|
||||||
*tt = tokenstream::TokenTree::token(
|
*tt = tokenstream::TokenTree::token(
|
||||||
token::Ident(ident.name, is_raw),
|
token::Ident(ident.name, is_raw),
|
||||||
|
@ -790,7 +790,7 @@ fn may_begin_with(token: &Token, name: Symbol) -> bool {
|
|||||||
},
|
},
|
||||||
sym::block => match token.kind {
|
sym::block => match token.kind {
|
||||||
token::OpenDelim(token::Brace) => true,
|
token::OpenDelim(token::Brace) => true,
|
||||||
token::Interpolated(ref nt, _) => match **nt {
|
token::Interpolated(ref nt) => match **nt {
|
||||||
token::NtItem(_)
|
token::NtItem(_)
|
||||||
| token::NtPat(_)
|
| token::NtPat(_)
|
||||||
| token::NtTy(_)
|
| token::NtTy(_)
|
||||||
@ -804,7 +804,7 @@ fn may_begin_with(token: &Token, name: Symbol) -> bool {
|
|||||||
},
|
},
|
||||||
sym::path | sym::meta => match token.kind {
|
sym::path | sym::meta => match token.kind {
|
||||||
token::ModSep | token::Ident(..) => true,
|
token::ModSep | token::Ident(..) => true,
|
||||||
token::Interpolated(ref nt, _) => match **nt {
|
token::Interpolated(ref nt) => match **nt {
|
||||||
token::NtPath(_) | token::NtMeta(_) => true,
|
token::NtPath(_) | token::NtMeta(_) => true,
|
||||||
_ => may_be_ident(&nt),
|
_ => may_be_ident(&nt),
|
||||||
},
|
},
|
||||||
@ -823,12 +823,12 @@ fn may_begin_with(token: &Token, name: Symbol) -> bool {
|
|||||||
token::ModSep | // path
|
token::ModSep | // path
|
||||||
token::Lt | // path (UFCS constant)
|
token::Lt | // path (UFCS constant)
|
||||||
token::BinOp(token::Shl) => true, // path (double UFCS)
|
token::BinOp(token::Shl) => true, // path (double UFCS)
|
||||||
token::Interpolated(ref nt, _) => may_be_ident(nt),
|
token::Interpolated(ref nt) => may_be_ident(nt),
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
sym::lifetime => match token.kind {
|
sym::lifetime => match token.kind {
|
||||||
token::Lifetime(_) => true,
|
token::Lifetime(_) => true,
|
||||||
token::Interpolated(ref nt, _) => match **nt {
|
token::Interpolated(ref nt) => match **nt {
|
||||||
token::NtLifetime(_) | token::NtTT(_) => true,
|
token::NtLifetime(_) | token::NtTT(_) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
|
@ -4,7 +4,7 @@ use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
|
|||||||
|
|
||||||
use rustc_ast::ast::MacCall;
|
use rustc_ast::ast::MacCall;
|
||||||
use rustc_ast::mut_visit::{self, MutVisitor};
|
use rustc_ast::mut_visit::{self, MutVisitor};
|
||||||
use rustc_ast::token::{self, FlattenGroup, NtTT, Token};
|
use rustc_ast::token::{self, NtTT, Token};
|
||||||
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
|
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
@ -240,10 +240,7 @@ pub(super) fn transcribe<'a>(
|
|||||||
result.push(tt.clone().into());
|
result.push(tt.clone().into());
|
||||||
} else {
|
} else {
|
||||||
marker.visit_span(&mut sp);
|
marker.visit_span(&mut sp);
|
||||||
let token = TokenTree::token(
|
let token = TokenTree::token(token::Interpolated(nt.clone()), sp);
|
||||||
token::Interpolated(nt.clone(), FlattenGroup::No),
|
|
||||||
sp,
|
|
||||||
);
|
|
||||||
result.push(token.into());
|
result.push(token.into());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -2,7 +2,7 @@ use crate::base::{self, *};
|
|||||||
use crate::proc_macro_server;
|
use crate::proc_macro_server;
|
||||||
|
|
||||||
use rustc_ast::ast::{self, ItemKind, MetaItemKind, NestedMetaItem};
|
use rustc_ast::ast::{self, ItemKind, MetaItemKind, NestedMetaItem};
|
||||||
use rustc_ast::token::{self, FlattenGroup};
|
use rustc_ast::token;
|
||||||
use rustc_ast::tokenstream::{TokenStream, TokenTree};
|
use rustc_ast::tokenstream::{TokenStream, TokenTree};
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
use rustc_errors::{Applicability, ErrorReported};
|
use rustc_errors::{Applicability, ErrorReported};
|
||||||
@ -105,8 +105,7 @@ impl MultiItemModifier for ProcMacroDerive {
|
|||||||
|
|
||||||
let item = token::NtItem(item);
|
let item = token::NtItem(item);
|
||||||
let input = if item.pretty_printing_compatibility_hack() {
|
let input = if item.pretty_printing_compatibility_hack() {
|
||||||
TokenTree::token(token::Interpolated(Lrc::new(item), FlattenGroup::Yes), DUMMY_SP)
|
TokenTree::token(token::Interpolated(Lrc::new(item)), DUMMY_SP).into()
|
||||||
.into()
|
|
||||||
} else {
|
} else {
|
||||||
nt_to_tokenstream(&item, ecx.parse_sess, DUMMY_SP)
|
nt_to_tokenstream(&item, ecx.parse_sess, DUMMY_SP)
|
||||||
};
|
};
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use crate::base::ExtCtxt;
|
use crate::base::ExtCtxt;
|
||||||
|
|
||||||
use rustc_ast::ast;
|
use rustc_ast::ast;
|
||||||
use rustc_ast::token::{self, FlattenGroup};
|
use rustc_ast::token;
|
||||||
use rustc_ast::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
|
use rustc_ast::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
|
||||||
use rustc_ast::util::comments;
|
use rustc_ast::util::comments;
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
@ -60,12 +60,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
|||||||
let Token { kind, span } = match tree {
|
let Token { kind, span } = match tree {
|
||||||
tokenstream::TokenTree::Delimited(span, delim, tts) => {
|
tokenstream::TokenTree::Delimited(span, delim, tts) => {
|
||||||
let delimiter = Delimiter::from_internal(delim);
|
let delimiter = Delimiter::from_internal(delim);
|
||||||
return TokenTree::Group(Group {
|
return TokenTree::Group(Group { delimiter, stream: tts, span, flatten: false });
|
||||||
delimiter,
|
|
||||||
stream: tts,
|
|
||||||
span,
|
|
||||||
flatten: FlattenGroup::No,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
tokenstream::TokenTree::Token(token) => token,
|
tokenstream::TokenTree::Token(token) => token,
|
||||||
};
|
};
|
||||||
@ -172,7 +167,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
|||||||
delimiter: Delimiter::Bracket,
|
delimiter: Delimiter::Bracket,
|
||||||
stream,
|
stream,
|
||||||
span: DelimSpan::from_single(span),
|
span: DelimSpan::from_single(span),
|
||||||
flatten: FlattenGroup::No,
|
flatten: false,
|
||||||
}));
|
}));
|
||||||
if style == ast::AttrStyle::Inner {
|
if style == ast::AttrStyle::Inner {
|
||||||
stack.push(tt!(Punct::new('!', false)));
|
stack.push(tt!(Punct::new('!', false)));
|
||||||
@ -180,17 +175,13 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
|||||||
tt!(Punct::new('#', false))
|
tt!(Punct::new('#', false))
|
||||||
}
|
}
|
||||||
|
|
||||||
Interpolated(nt, _) => {
|
Interpolated(nt) => {
|
||||||
let stream = nt_to_tokenstream(&nt, sess, span);
|
let stream = nt_to_tokenstream(&nt, sess, span);
|
||||||
TokenTree::Group(Group {
|
TokenTree::Group(Group {
|
||||||
delimiter: Delimiter::None,
|
delimiter: Delimiter::None,
|
||||||
stream,
|
stream,
|
||||||
span: DelimSpan::from_single(span),
|
span: DelimSpan::from_single(span),
|
||||||
flatten: if nt.pretty_printing_compatibility_hack() {
|
flatten: nt.pretty_printing_compatibility_hack(),
|
||||||
FlattenGroup::Yes
|
|
||||||
} else {
|
|
||||||
FlattenGroup::No
|
|
||||||
},
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -297,7 +288,7 @@ pub struct Group {
|
|||||||
/// A hack used to pass AST fragments to attribute and derive macros
|
/// A hack used to pass AST fragments to attribute and derive macros
|
||||||
/// as a single nonterminal token instead of a token stream.
|
/// as a single nonterminal token instead of a token stream.
|
||||||
/// FIXME: It needs to be removed, but there are some compatibility issues (see #73345).
|
/// FIXME: It needs to be removed, but there are some compatibility issues (see #73345).
|
||||||
flatten: FlattenGroup,
|
flatten: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||||
@ -457,7 +448,7 @@ impl server::TokenStreamIter for Rustc<'_> {
|
|||||||
// Such token needs to be "unwrapped" and not represented as a delimited group.
|
// Such token needs to be "unwrapped" and not represented as a delimited group.
|
||||||
// FIXME: It needs to be removed, but there are some compatibility issues (see #73345).
|
// FIXME: It needs to be removed, but there are some compatibility issues (see #73345).
|
||||||
if let TokenTree::Group(ref group) = tree {
|
if let TokenTree::Group(ref group) = tree {
|
||||||
if matches!(group.flatten, FlattenGroup::Yes) {
|
if group.flatten {
|
||||||
iter.cursor.append(group.stream.clone());
|
iter.cursor.append(group.stream.clone());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -473,7 +464,7 @@ impl server::Group for Rustc<'_> {
|
|||||||
delimiter,
|
delimiter,
|
||||||
stream,
|
stream,
|
||||||
span: DelimSpan::from_single(server::Span::call_site(self)),
|
span: DelimSpan::from_single(server::Span::call_site(self)),
|
||||||
flatten: FlattenGroup::No,
|
flatten: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn delimiter(&mut self, group: &Self::Group) -> Delimiter {
|
fn delimiter(&mut self, group: &Self::Group) -> Delimiter {
|
||||||
|
@ -155,7 +155,7 @@ impl<'a> Parser<'a> {
|
|||||||
/// The delimiters or `=` are still put into the resulting token stream.
|
/// The delimiters or `=` are still put into the resulting token stream.
|
||||||
pub fn parse_attr_item(&mut self) -> PResult<'a, ast::AttrItem> {
|
pub fn parse_attr_item(&mut self) -> PResult<'a, ast::AttrItem> {
|
||||||
let item = match self.token.kind {
|
let item = match self.token.kind {
|
||||||
token::Interpolated(ref nt, _) => match **nt {
|
token::Interpolated(ref nt) => match **nt {
|
||||||
Nonterminal::NtMeta(ref item) => Some(item.clone().into_inner()),
|
Nonterminal::NtMeta(ref item) => Some(item.clone().into_inner()),
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
@ -254,7 +254,7 @@ impl<'a> Parser<'a> {
|
|||||||
/// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
|
/// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
|
||||||
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
|
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
|
||||||
let nt_meta = match self.token.kind {
|
let nt_meta = match self.token.kind {
|
||||||
token::Interpolated(ref nt, _) => match **nt {
|
token::Interpolated(ref nt) => match **nt {
|
||||||
token::NtMeta(ref e) => Some(e.clone()),
|
token::NtMeta(ref e) => Some(e.clone()),
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
|
@ -26,7 +26,7 @@ use std::mem;
|
|||||||
/// `token::Interpolated` tokens.
|
/// `token::Interpolated` tokens.
|
||||||
macro_rules! maybe_whole_expr {
|
macro_rules! maybe_whole_expr {
|
||||||
($p:expr) => {
|
($p:expr) => {
|
||||||
if let token::Interpolated(nt, _) = &$p.token.kind {
|
if let token::Interpolated(nt) = &$p.token.kind {
|
||||||
match &**nt {
|
match &**nt {
|
||||||
token::NtExpr(e) | token::NtLiteral(e) => {
|
token::NtExpr(e) | token::NtLiteral(e) => {
|
||||||
let e = e.clone();
|
let e = e.clone();
|
||||||
|
@ -1780,7 +1780,7 @@ impl<'a> Parser<'a> {
|
|||||||
|
|
||||||
fn is_named_param(&self) -> bool {
|
fn is_named_param(&self) -> bool {
|
||||||
let offset = match self.token.kind {
|
let offset = match self.token.kind {
|
||||||
token::Interpolated(ref nt, _) => match **nt {
|
token::Interpolated(ref nt) => match **nt {
|
||||||
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
|
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
|
||||||
_ => 0,
|
_ => 0,
|
||||||
},
|
},
|
||||||
|
@ -54,7 +54,7 @@ enum BlockMode {
|
|||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! maybe_whole {
|
macro_rules! maybe_whole {
|
||||||
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
|
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
|
||||||
if let token::Interpolated(nt, _) = &$p.token.kind {
|
if let token::Interpolated(nt) = &$p.token.kind {
|
||||||
if let token::$constructor(x) = &**nt {
|
if let token::$constructor(x) = &**nt {
|
||||||
let $x = x.clone();
|
let $x = x.clone();
|
||||||
$p.bump();
|
$p.bump();
|
||||||
@ -69,7 +69,7 @@ macro_rules! maybe_whole {
|
|||||||
macro_rules! maybe_recover_from_interpolated_ty_qpath {
|
macro_rules! maybe_recover_from_interpolated_ty_qpath {
|
||||||
($self: expr, $allow_qpath_recovery: expr) => {
|
($self: expr, $allow_qpath_recovery: expr) => {
|
||||||
if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) {
|
if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) {
|
||||||
if let token::Interpolated(nt, _) = &$self.token.kind {
|
if let token::Interpolated(nt) = &$self.token.kind {
|
||||||
if let token::NtTy(ty) = &**nt {
|
if let token::NtTy(ty) = &**nt {
|
||||||
let ty = ty.clone();
|
let ty = ty.clone();
|
||||||
$self.bump();
|
$self.bump();
|
||||||
@ -922,7 +922,7 @@ impl<'a> Parser<'a> {
|
|||||||
if self.eat(&token::Eq) {
|
if self.eat(&token::Eq) {
|
||||||
let eq_span = self.prev_token.span;
|
let eq_span = self.prev_token.span;
|
||||||
let mut is_interpolated_expr = false;
|
let mut is_interpolated_expr = false;
|
||||||
if let token::Interpolated(nt, _) = &self.token.kind {
|
if let token::Interpolated(nt) = &self.token.kind {
|
||||||
if let token::NtExpr(..) = **nt {
|
if let token::NtExpr(..) = **nt {
|
||||||
is_interpolated_expr = true;
|
is_interpolated_expr = true;
|
||||||
}
|
}
|
||||||
|
@ -515,7 +515,7 @@ impl<'a> Parser<'a> {
|
|||||||
self.recover_additional_muts();
|
self.recover_additional_muts();
|
||||||
|
|
||||||
// Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
|
// Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
|
||||||
if let token::Interpolated(ref nt, _) = self.token.kind {
|
if let token::Interpolated(ref nt) = self.token.kind {
|
||||||
if let token::NtPat(_) = **nt {
|
if let token::NtPat(_) = **nt {
|
||||||
self.expected_ident_found().emit();
|
self.expected_ident_found().emit();
|
||||||
}
|
}
|
||||||
|
@ -1325,7 +1325,7 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn visit_token(&mut self, t: Token) {
|
fn visit_token(&mut self, t: Token) {
|
||||||
if let token::Interpolated(nt, _) = t.kind {
|
if let token::Interpolated(nt) = t.kind {
|
||||||
if let token::NtExpr(ref expr) = *nt {
|
if let token::NtExpr(ref expr) = *nt {
|
||||||
if let ast::ExprKind::MacCall(..) = expr.kind {
|
if let ast::ExprKind::MacCall(..) = expr.kind {
|
||||||
self.visit_invoc(expr.id);
|
self.visit_invoc(expr.id);
|
||||||
|
@ -256,7 +256,7 @@ impl<'a, 'b> visit::Visitor<'a> for DefCollector<'a, 'b> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn visit_token(&mut self, t: Token) {
|
fn visit_token(&mut self, t: Token) {
|
||||||
if let token::Interpolated(nt, _) = t.kind {
|
if let token::Interpolated(nt) = t.kind {
|
||||||
if let token::NtExpr(ref expr) = *nt {
|
if let token::NtExpr(ref expr) = *nt {
|
||||||
if let ExprKind::MacCall(..) = expr.kind {
|
if let ExprKind::MacCall(..) = expr.kind {
|
||||||
self.visit_macro_invoc(expr.id);
|
self.visit_macro_invoc(expr.id);
|
||||||
|
Loading…
Reference in New Issue
Block a user