Remove `token::FlattenGroup`

This commit is contained in:
Vadim Petrochenkov 2020-07-01 13:16:49 +03:00
parent a5764de00b
commit b37434ef31
18 changed files with 42 additions and 64 deletions

View File

@ -475,7 +475,7 @@ impl MetaItem {
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
Path { span, segments }
}
Some(TokenTree::Token(Token { kind: token::Interpolated(nt, _), .. })) => match *nt {
Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. })) => match *nt {
token::Nonterminal::NtMeta(ref item) => return item.meta(item.path.span),
token::Nonterminal::NtPath(ref path) => path.clone(),
_ => return None,

View File

@ -656,7 +656,7 @@ pub fn noop_visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
*span = ident.span;
return; // Avoid visiting the span for the second time.
}
token::Interpolated(nt, _) => {
token::Interpolated(nt) => {
let mut nt = Lrc::make_mut(nt);
vis.visit_interpolated(&mut nt);
}

View File

@ -182,15 +182,6 @@ fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool {
.contains(&name)
}
/// A hack used to pass AST fragments to attribute and derive macros
/// as a single nonterminal token instead of a token stream.
/// FIXME: It needs to be removed, but there are some compatibility issues (see #73345).
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
pub enum FlattenGroup {
Yes,
No,
}
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
pub enum TokenKind {
/* Expression-operator symbols. */
@ -245,7 +236,7 @@ pub enum TokenKind {
/// treat regular and interpolated lifetime identifiers in the same way.
Lifetime(Symbol),
Interpolated(Lrc<Nonterminal>, FlattenGroup),
Interpolated(Lrc<Nonterminal>),
// Can be expanded into several tokens.
/// A doc comment.
@ -352,7 +343,7 @@ impl Token {
/// if they keep spans or perform edition checks.
pub fn uninterpolated_span(&self) -> Span {
match &self.kind {
Interpolated(nt, _) => nt.span(),
Interpolated(nt) => nt.span(),
_ => self.span,
}
}
@ -391,7 +382,7 @@ impl Token {
ModSep | // global path
Lifetime(..) | // labeled loop
Pound => true, // expression attributes
Interpolated(ref nt, _) => match **nt {
Interpolated(ref nt) => match **nt {
NtLiteral(..) |
NtExpr(..) |
NtBlock(..) |
@ -417,7 +408,7 @@ impl Token {
Lifetime(..) | // lifetime bound in trait object
Lt | BinOp(Shl) | // associated path
ModSep => true, // global path
Interpolated(ref nt, _) => match **nt {
Interpolated(ref nt) => match **nt {
NtTy(..) | NtPath(..) => true,
_ => false,
},
@ -429,7 +420,7 @@ impl Token {
pub fn can_begin_const_arg(&self) -> bool {
match self.kind {
OpenDelim(Brace) => true,
Interpolated(ref nt, _) => match **nt {
Interpolated(ref nt) => match **nt {
NtExpr(..) | NtBlock(..) | NtLiteral(..) => true,
_ => false,
},
@ -464,7 +455,7 @@ impl Token {
match self.uninterpolate().kind {
Literal(..) | BinOp(Minus) => true,
Ident(name, false) if name.is_bool_lit() => true,
Interpolated(ref nt, _) => match &**nt {
Interpolated(ref nt) => match &**nt {
NtLiteral(_) => true,
NtExpr(e) => match &e.kind {
ast::ExprKind::Lit(_) => true,
@ -485,7 +476,7 @@ impl Token {
// otherwise returns the original token.
pub fn uninterpolate(&self) -> Cow<'_, Token> {
match &self.kind {
Interpolated(nt, _) => match **nt {
Interpolated(nt) => match **nt {
NtIdent(ident, is_raw) => {
Cow::Owned(Token::new(Ident(ident.name, is_raw), ident.span))
}
@ -532,7 +523,7 @@ impl Token {
/// Returns `true` if the token is an interpolated path.
fn is_path(&self) -> bool {
if let Interpolated(ref nt, _) = self.kind {
if let Interpolated(ref nt) = self.kind {
if let NtPath(..) = **nt {
return true;
}
@ -544,7 +535,7 @@ impl Token {
/// That is, is this a pre-parsed expression dropped into the token stream
/// (which happens while parsing the result of macro expansion)?
pub fn is_whole_expr(&self) -> bool {
if let Interpolated(ref nt, _) = self.kind {
if let Interpolated(ref nt) = self.kind {
if let NtExpr(_) | NtLiteral(_) | NtPath(_) | NtIdent(..) | NtBlock(_) = **nt {
return true;
}
@ -555,7 +546,7 @@ impl Token {
// Is the token an interpolated block (`$b:block`)?
pub fn is_whole_block(&self) -> bool {
if let Interpolated(ref nt, _) = self.kind {
if let Interpolated(ref nt) = self.kind {
if let NtBlock(..) = **nt {
return true;
}

View File

@ -205,7 +205,7 @@ impl Lit {
token::Lit::new(token::Bool, name, None)
}
token::Literal(lit) => lit,
token::Interpolated(ref nt, _) => {
token::Interpolated(ref nt) => {
if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt {
if let ast::ExprKind::Lit(lit) = &expr.kind {
return Ok(lit.clone());

View File

@ -1027,7 +1027,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
fn lower_token(&mut self, token: Token) -> TokenStream {
match token.kind {
token::Interpolated(nt, _) => {
token::Interpolated(nt) => {
let tts = (self.nt_to_tokenstream)(&nt, &self.sess.parse_sess, token.span);
self.lower_token_stream(tts)
}

View File

@ -278,7 +278,7 @@ fn token_kind_to_string_ext(tok: &TokenKind, convert_dollar_crate: Option<Span>)
token::Shebang(s) => format!("/* shebang: {}*/", s),
token::Unknown(s) => s.to_string(),
token::Interpolated(ref nt, _) => nonterminal_to_string(nt),
token::Interpolated(ref nt) => nonterminal_to_string(nt),
}
}

View File

@ -371,7 +371,7 @@ where
impl MutVisitor for AvoidInterpolatedIdents {
fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
if let tokenstream::TokenTree::Token(token) = tt {
if let token::Interpolated(nt, _) = &token.kind {
if let token::Interpolated(nt) = &token.kind {
if let token::NtIdent(ident, is_raw) = **nt {
*tt = tokenstream::TokenTree::token(
token::Ident(ident.name, is_raw),

View File

@ -790,7 +790,7 @@ fn may_begin_with(token: &Token, name: Symbol) -> bool {
},
sym::block => match token.kind {
token::OpenDelim(token::Brace) => true,
token::Interpolated(ref nt, _) => match **nt {
token::Interpolated(ref nt) => match **nt {
token::NtItem(_)
| token::NtPat(_)
| token::NtTy(_)
@ -804,7 +804,7 @@ fn may_begin_with(token: &Token, name: Symbol) -> bool {
},
sym::path | sym::meta => match token.kind {
token::ModSep | token::Ident(..) => true,
token::Interpolated(ref nt, _) => match **nt {
token::Interpolated(ref nt) => match **nt {
token::NtPath(_) | token::NtMeta(_) => true,
_ => may_be_ident(&nt),
},
@ -823,12 +823,12 @@ fn may_begin_with(token: &Token, name: Symbol) -> bool {
token::ModSep | // path
token::Lt | // path (UFCS constant)
token::BinOp(token::Shl) => true, // path (double UFCS)
token::Interpolated(ref nt, _) => may_be_ident(nt),
token::Interpolated(ref nt) => may_be_ident(nt),
_ => false,
},
sym::lifetime => match token.kind {
token::Lifetime(_) => true,
token::Interpolated(ref nt, _) => match **nt {
token::Interpolated(ref nt) => match **nt {
token::NtLifetime(_) | token::NtTT(_) => true,
_ => false,
},

View File

@ -4,7 +4,7 @@ use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
use rustc_ast::ast::MacCall;
use rustc_ast::mut_visit::{self, MutVisitor};
use rustc_ast::token::{self, FlattenGroup, NtTT, Token};
use rustc_ast::token::{self, NtTT, Token};
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
@ -240,10 +240,7 @@ pub(super) fn transcribe<'a>(
result.push(tt.clone().into());
} else {
marker.visit_span(&mut sp);
let token = TokenTree::token(
token::Interpolated(nt.clone(), FlattenGroup::No),
sp,
);
let token = TokenTree::token(token::Interpolated(nt.clone()), sp);
result.push(token.into());
}
} else {

View File

@ -2,7 +2,7 @@ use crate::base::{self, *};
use crate::proc_macro_server;
use rustc_ast::ast::{self, ItemKind, MetaItemKind, NestedMetaItem};
use rustc_ast::token::{self, FlattenGroup};
use rustc_ast::token;
use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_data_structures::sync::Lrc;
use rustc_errors::{Applicability, ErrorReported};
@ -105,8 +105,7 @@ impl MultiItemModifier for ProcMacroDerive {
let item = token::NtItem(item);
let input = if item.pretty_printing_compatibility_hack() {
TokenTree::token(token::Interpolated(Lrc::new(item), FlattenGroup::Yes), DUMMY_SP)
.into()
TokenTree::token(token::Interpolated(Lrc::new(item)), DUMMY_SP).into()
} else {
nt_to_tokenstream(&item, ecx.parse_sess, DUMMY_SP)
};

View File

@ -1,7 +1,7 @@
use crate::base::ExtCtxt;
use rustc_ast::ast;
use rustc_ast::token::{self, FlattenGroup};
use rustc_ast::token;
use rustc_ast::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
use rustc_ast::util::comments;
use rustc_ast_pretty::pprust;
@ -60,12 +60,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
let Token { kind, span } = match tree {
tokenstream::TokenTree::Delimited(span, delim, tts) => {
let delimiter = Delimiter::from_internal(delim);
return TokenTree::Group(Group {
delimiter,
stream: tts,
span,
flatten: FlattenGroup::No,
});
return TokenTree::Group(Group { delimiter, stream: tts, span, flatten: false });
}
tokenstream::TokenTree::Token(token) => token,
};
@ -172,7 +167,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
delimiter: Delimiter::Bracket,
stream,
span: DelimSpan::from_single(span),
flatten: FlattenGroup::No,
flatten: false,
}));
if style == ast::AttrStyle::Inner {
stack.push(tt!(Punct::new('!', false)));
@ -180,17 +175,13 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
tt!(Punct::new('#', false))
}
Interpolated(nt, _) => {
Interpolated(nt) => {
let stream = nt_to_tokenstream(&nt, sess, span);
TokenTree::Group(Group {
delimiter: Delimiter::None,
stream,
span: DelimSpan::from_single(span),
flatten: if nt.pretty_printing_compatibility_hack() {
FlattenGroup::Yes
} else {
FlattenGroup::No
},
flatten: nt.pretty_printing_compatibility_hack(),
})
}
@ -297,7 +288,7 @@ pub struct Group {
/// A hack used to pass AST fragments to attribute and derive macros
/// as a single nonterminal token instead of a token stream.
/// FIXME: It needs to be removed, but there are some compatibility issues (see #73345).
flatten: FlattenGroup,
flatten: bool,
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
@ -457,7 +448,7 @@ impl server::TokenStreamIter for Rustc<'_> {
// Such token needs to be "unwrapped" and not represented as a delimited group.
// FIXME: It needs to be removed, but there are some compatibility issues (see #73345).
if let TokenTree::Group(ref group) = tree {
if matches!(group.flatten, FlattenGroup::Yes) {
if group.flatten {
iter.cursor.append(group.stream.clone());
continue;
}
@ -473,7 +464,7 @@ impl server::Group for Rustc<'_> {
delimiter,
stream,
span: DelimSpan::from_single(server::Span::call_site(self)),
flatten: FlattenGroup::No,
flatten: false,
}
}
fn delimiter(&mut self, group: &Self::Group) -> Delimiter {

View File

@ -155,7 +155,7 @@ impl<'a> Parser<'a> {
/// The delimiters or `=` are still put into the resulting token stream.
pub fn parse_attr_item(&mut self) -> PResult<'a, ast::AttrItem> {
let item = match self.token.kind {
token::Interpolated(ref nt, _) => match **nt {
token::Interpolated(ref nt) => match **nt {
Nonterminal::NtMeta(ref item) => Some(item.clone().into_inner()),
_ => None,
},
@ -254,7 +254,7 @@ impl<'a> Parser<'a> {
/// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
let nt_meta = match self.token.kind {
token::Interpolated(ref nt, _) => match **nt {
token::Interpolated(ref nt) => match **nt {
token::NtMeta(ref e) => Some(e.clone()),
_ => None,
},

View File

@ -26,7 +26,7 @@ use std::mem;
/// `token::Interpolated` tokens.
macro_rules! maybe_whole_expr {
($p:expr) => {
if let token::Interpolated(nt, _) = &$p.token.kind {
if let token::Interpolated(nt) = &$p.token.kind {
match &**nt {
token::NtExpr(e) | token::NtLiteral(e) => {
let e = e.clone();

View File

@ -1780,7 +1780,7 @@ impl<'a> Parser<'a> {
fn is_named_param(&self) -> bool {
let offset = match self.token.kind {
token::Interpolated(ref nt, _) => match **nt {
token::Interpolated(ref nt) => match **nt {
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
_ => 0,
},

View File

@ -54,7 +54,7 @@ enum BlockMode {
#[macro_export]
macro_rules! maybe_whole {
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
if let token::Interpolated(nt, _) = &$p.token.kind {
if let token::Interpolated(nt) = &$p.token.kind {
if let token::$constructor(x) = &**nt {
let $x = x.clone();
$p.bump();
@ -69,7 +69,7 @@ macro_rules! maybe_whole {
macro_rules! maybe_recover_from_interpolated_ty_qpath {
($self: expr, $allow_qpath_recovery: expr) => {
if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) {
if let token::Interpolated(nt, _) = &$self.token.kind {
if let token::Interpolated(nt) = &$self.token.kind {
if let token::NtTy(ty) = &**nt {
let ty = ty.clone();
$self.bump();
@ -922,7 +922,7 @@ impl<'a> Parser<'a> {
if self.eat(&token::Eq) {
let eq_span = self.prev_token.span;
let mut is_interpolated_expr = false;
if let token::Interpolated(nt, _) = &self.token.kind {
if let token::Interpolated(nt) = &self.token.kind {
if let token::NtExpr(..) = **nt {
is_interpolated_expr = true;
}

View File

@ -515,7 +515,7 @@ impl<'a> Parser<'a> {
self.recover_additional_muts();
// Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
if let token::Interpolated(ref nt, _) = self.token.kind {
if let token::Interpolated(ref nt) = self.token.kind {
if let token::NtPat(_) = **nt {
self.expected_ident_found().emit();
}

View File

@ -1325,7 +1325,7 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> {
}
fn visit_token(&mut self, t: Token) {
if let token::Interpolated(nt, _) = t.kind {
if let token::Interpolated(nt) = t.kind {
if let token::NtExpr(ref expr) = *nt {
if let ast::ExprKind::MacCall(..) = expr.kind {
self.visit_invoc(expr.id);

View File

@ -256,7 +256,7 @@ impl<'a, 'b> visit::Visitor<'a> for DefCollector<'a, 'b> {
}
fn visit_token(&mut self, t: Token) {
if let token::Interpolated(nt, _) = t.kind {
if let token::Interpolated(nt) = t.kind {
if let token::NtExpr(ref expr) = *nt {
if let ExprKind::MacCall(..) = expr.kind {
self.visit_macro_invoc(expr.id);