syntax: Use Token
in TokenTree::Token
This commit is contained in:
parent
a3425edb46
commit
e0127dbf81
@ -67,7 +67,7 @@ use syntax::source_map::CompilerDesugaringKind::IfTemporary;
|
||||
use syntax::std_inject;
|
||||
use syntax::symbol::{kw, sym, Symbol};
|
||||
use syntax::tokenstream::{TokenStream, TokenTree};
|
||||
use syntax::parse::token::{self, TokenKind};
|
||||
use syntax::parse::token::{self, Token};
|
||||
use syntax::visit::{self, Visitor};
|
||||
use syntax_pos::{DUMMY_SP, edition, Span};
|
||||
|
||||
@ -1328,7 +1328,7 @@ impl<'a> LoweringContext<'a> {
|
||||
|
||||
fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
|
||||
match tree {
|
||||
TokenTree::Token(span, token) => self.lower_token(token, span),
|
||||
TokenTree::Token(token) => self.lower_token(token),
|
||||
TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
|
||||
span,
|
||||
delim,
|
||||
@ -1337,13 +1337,13 @@ impl<'a> LoweringContext<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_token(&mut self, token: TokenKind, span: Span) -> TokenStream {
|
||||
match token {
|
||||
fn lower_token(&mut self, token: Token) -> TokenStream {
|
||||
match token.kind {
|
||||
token::Interpolated(nt) => {
|
||||
let tts = nt.to_tokenstream(&self.sess.parse_sess, span);
|
||||
let tts = nt.to_tokenstream(&self.sess.parse_sess, token.span);
|
||||
self.lower_token_stream(tts)
|
||||
}
|
||||
other => TokenTree::Token(span, other).into(),
|
||||
_ => TokenTree::Token(token).into(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -261,9 +261,8 @@ for tokenstream::TokenTree {
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
tokenstream::TokenTree::Token(span, ref token) => {
|
||||
span.hash_stable(hcx, hasher);
|
||||
hash_token(token, hcx, hasher);
|
||||
tokenstream::TokenTree::Token(ref token) => {
|
||||
token.hash_stable(hcx, hasher);
|
||||
}
|
||||
tokenstream::TokenTree::Delimited(span, delim, ref tts) => {
|
||||
span.hash_stable(hcx, hasher);
|
||||
@ -306,70 +305,75 @@ impl_stable_hash_for!(struct token::Lit {
|
||||
suffix
|
||||
});
|
||||
|
||||
fn hash_token<'a, 'gcx, W: StableHasherResult>(
|
||||
token: &token::TokenKind,
|
||||
hcx: &mut StableHashingContext<'a>,
|
||||
hasher: &mut StableHasher<W>,
|
||||
) {
|
||||
mem::discriminant(token).hash_stable(hcx, hasher);
|
||||
match *token {
|
||||
token::Eq |
|
||||
token::Lt |
|
||||
token::Le |
|
||||
token::EqEq |
|
||||
token::Ne |
|
||||
token::Ge |
|
||||
token::Gt |
|
||||
token::AndAnd |
|
||||
token::OrOr |
|
||||
token::Not |
|
||||
token::Tilde |
|
||||
token::At |
|
||||
token::Dot |
|
||||
token::DotDot |
|
||||
token::DotDotDot |
|
||||
token::DotDotEq |
|
||||
token::Comma |
|
||||
token::Semi |
|
||||
token::Colon |
|
||||
token::ModSep |
|
||||
token::RArrow |
|
||||
token::LArrow |
|
||||
token::FatArrow |
|
||||
token::Pound |
|
||||
token::Dollar |
|
||||
token::Question |
|
||||
token::SingleQuote |
|
||||
token::Whitespace |
|
||||
token::Comment |
|
||||
token::Eof => {}
|
||||
impl<'a> HashStable<StableHashingContext<'a>> for token::TokenKind {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
token::Eq |
|
||||
token::Lt |
|
||||
token::Le |
|
||||
token::EqEq |
|
||||
token::Ne |
|
||||
token::Ge |
|
||||
token::Gt |
|
||||
token::AndAnd |
|
||||
token::OrOr |
|
||||
token::Not |
|
||||
token::Tilde |
|
||||
token::At |
|
||||
token::Dot |
|
||||
token::DotDot |
|
||||
token::DotDotDot |
|
||||
token::DotDotEq |
|
||||
token::Comma |
|
||||
token::Semi |
|
||||
token::Colon |
|
||||
token::ModSep |
|
||||
token::RArrow |
|
||||
token::LArrow |
|
||||
token::FatArrow |
|
||||
token::Pound |
|
||||
token::Dollar |
|
||||
token::Question |
|
||||
token::SingleQuote |
|
||||
token::Whitespace |
|
||||
token::Comment |
|
||||
token::Eof => {}
|
||||
|
||||
token::BinOp(bin_op_token) |
|
||||
token::BinOpEq(bin_op_token) => {
|
||||
std_hash::Hash::hash(&bin_op_token, hasher);
|
||||
token::BinOp(bin_op_token) |
|
||||
token::BinOpEq(bin_op_token) => {
|
||||
std_hash::Hash::hash(&bin_op_token, hasher);
|
||||
}
|
||||
|
||||
token::OpenDelim(delim_token) |
|
||||
token::CloseDelim(delim_token) => {
|
||||
std_hash::Hash::hash(&delim_token, hasher);
|
||||
}
|
||||
token::Literal(lit) => lit.hash_stable(hcx, hasher),
|
||||
|
||||
token::Ident(ident, is_raw) => {
|
||||
ident.name.hash_stable(hcx, hasher);
|
||||
is_raw.hash_stable(hcx, hasher);
|
||||
}
|
||||
token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
|
||||
|
||||
token::Interpolated(_) => {
|
||||
bug!("interpolated tokens should not be present in the HIR")
|
||||
}
|
||||
|
||||
token::DocComment(val) |
|
||||
token::Shebang(val) => val.hash_stable(hcx, hasher),
|
||||
}
|
||||
|
||||
token::OpenDelim(delim_token) |
|
||||
token::CloseDelim(delim_token) => {
|
||||
std_hash::Hash::hash(&delim_token, hasher);
|
||||
}
|
||||
token::Literal(lit) => lit.hash_stable(hcx, hasher),
|
||||
|
||||
token::Ident(ident, is_raw) => {
|
||||
ident.name.hash_stable(hcx, hasher);
|
||||
is_raw.hash_stable(hcx, hasher);
|
||||
}
|
||||
token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
|
||||
|
||||
token::Interpolated(_) => {
|
||||
bug!("interpolated tokens should not be present in the HIR")
|
||||
}
|
||||
|
||||
token::DocComment(val) |
|
||||
token::Shebang(val) => val.hash_stable(hcx, hasher),
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct token::Token {
|
||||
kind,
|
||||
span
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum ::syntax::ast::NestedMetaItem {
|
||||
MetaItem(meta_item),
|
||||
Literal(lit)
|
||||
|
@ -1414,11 +1414,11 @@ impl KeywordIdents {
|
||||
fn check_tokens(&mut self, cx: &EarlyContext<'_>, tokens: TokenStream) {
|
||||
for tt in tokens.into_trees() {
|
||||
match tt {
|
||||
TokenTree::Token(span, tok) => match tok.ident() {
|
||||
TokenTree::Token(token) => match token.ident() {
|
||||
// only report non-raw idents
|
||||
Some((ident, false)) => {
|
||||
self.check_ident_token(cx, UnderMacro(true), ast::Ident {
|
||||
span: span.substitute_dummy(ident.span),
|
||||
span: token.span.substitute_dummy(ident.span),
|
||||
..ident
|
||||
});
|
||||
}
|
||||
|
@ -234,7 +234,7 @@ impl<'a> Classifier<'a> {
|
||||
// reference or dereference operator or a reference or pointer type, instead of the
|
||||
// bit-and or multiplication operator.
|
||||
token::BinOp(token::And) | token::BinOp(token::Star)
|
||||
if self.lexer.peek().kind != token::Whitespace => Class::RefKeyWord,
|
||||
if self.lexer.peek() != token::Whitespace => Class::RefKeyWord,
|
||||
|
||||
// Consider this as part of a macro invocation if there was a
|
||||
// leading identifier.
|
||||
@ -335,7 +335,7 @@ impl<'a> Classifier<'a> {
|
||||
sym::Option | sym::Result => Class::PreludeTy,
|
||||
sym::Some | sym::None | sym::Ok | sym::Err => Class::PreludeVal,
|
||||
|
||||
_ if token.kind.is_reserved_ident() => Class::KeyWord,
|
||||
_ if token.is_reserved_ident() => Class::KeyWord,
|
||||
|
||||
_ => {
|
||||
if self.in_macro_nonterminal {
|
||||
|
@ -20,7 +20,7 @@ use crate::source_map::{BytePos, Spanned, dummy_spanned};
|
||||
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
||||
use crate::parse::parser::Parser;
|
||||
use crate::parse::{self, ParseSess, PResult};
|
||||
use crate::parse::token::{self, TokenKind};
|
||||
use crate::parse::token::{self, Token, TokenKind};
|
||||
use crate::ptr::P;
|
||||
use crate::symbol::{sym, Symbol};
|
||||
use crate::ThinVec;
|
||||
@ -465,9 +465,9 @@ impl MetaItem {
|
||||
let mod_sep_span = Span::new(last_pos,
|
||||
segment.ident.span.lo(),
|
||||
segment.ident.span.ctxt());
|
||||
idents.push(TokenTree::Token(mod_sep_span, token::ModSep).into());
|
||||
idents.push(TokenTree::token(mod_sep_span, token::ModSep).into());
|
||||
}
|
||||
idents.push(TokenTree::Token(segment.ident.span,
|
||||
idents.push(TokenTree::token(segment.ident.span,
|
||||
TokenKind::from_ast_ident(segment.ident)).into());
|
||||
last_pos = segment.ident.span.hi();
|
||||
}
|
||||
@ -480,10 +480,10 @@ impl MetaItem {
|
||||
{
|
||||
// FIXME: Share code with `parse_path`.
|
||||
let path = match tokens.next() {
|
||||
Some(TokenTree::Token(span, token @ token::Ident(..))) |
|
||||
Some(TokenTree::Token(span, token @ token::ModSep)) => 'arm: {
|
||||
let mut segments = if let token::Ident(ident, _) = token {
|
||||
if let Some(TokenTree::Token(_, token::ModSep)) = tokens.peek() {
|
||||
Some(TokenTree::Token(Token { kind: kind @ token::Ident(..), span })) |
|
||||
Some(TokenTree::Token(Token { kind: kind @ token::ModSep, span })) => 'arm: {
|
||||
let mut segments = if let token::Ident(ident, _) = kind {
|
||||
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() {
|
||||
tokens.next();
|
||||
vec![PathSegment::from_ident(ident.with_span_pos(span))]
|
||||
} else {
|
||||
@ -493,13 +493,12 @@ impl MetaItem {
|
||||
vec![PathSegment::path_root(span)]
|
||||
};
|
||||
loop {
|
||||
if let Some(TokenTree::Token(span,
|
||||
token::Ident(ident, _))) = tokens.next() {
|
||||
if let Some(TokenTree::Token(Token { kind: token::Ident(ident, _), span })) = tokens.next() {
|
||||
segments.push(PathSegment::from_ident(ident.with_span_pos(span)));
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
if let Some(TokenTree::Token(_, token::ModSep)) = tokens.peek() {
|
||||
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() {
|
||||
tokens.next();
|
||||
} else {
|
||||
break;
|
||||
@ -508,7 +507,7 @@ impl MetaItem {
|
||||
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
|
||||
Path { span, segments }
|
||||
}
|
||||
Some(TokenTree::Token(_, token::Interpolated(nt))) => match *nt {
|
||||
Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. })) => match *nt {
|
||||
token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident),
|
||||
token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
|
||||
token::Nonterminal::NtPath(ref path) => path.clone(),
|
||||
@ -533,7 +532,7 @@ impl MetaItemKind {
|
||||
match *self {
|
||||
MetaItemKind::Word => TokenStream::empty(),
|
||||
MetaItemKind::NameValue(ref lit) => {
|
||||
let mut vec = vec![TokenTree::Token(span, token::Eq).into()];
|
||||
let mut vec = vec![TokenTree::token(span, token::Eq).into()];
|
||||
lit.tokens().append_to_tree_and_joint_vec(&mut vec);
|
||||
TokenStream::new(vec)
|
||||
}
|
||||
@ -541,7 +540,7 @@ impl MetaItemKind {
|
||||
let mut tokens = Vec::new();
|
||||
for (i, item) in list.iter().enumerate() {
|
||||
if i > 0 {
|
||||
tokens.push(TokenTree::Token(span, token::Comma).into());
|
||||
tokens.push(TokenTree::token(span, token::Comma).into());
|
||||
}
|
||||
item.tokens().append_to_tree_and_joint_vec(&mut tokens);
|
||||
}
|
||||
@ -558,10 +557,10 @@ impl MetaItemKind {
|
||||
where I: Iterator<Item = TokenTree>,
|
||||
{
|
||||
let delimited = match tokens.peek().cloned() {
|
||||
Some(TokenTree::Token(_, token::Eq)) => {
|
||||
Some(TokenTree::Token(token)) if token == token::Eq => {
|
||||
tokens.next();
|
||||
return if let Some(TokenTree::Token(span, token)) = tokens.next() {
|
||||
Lit::from_token(&token, span).ok().map(MetaItemKind::NameValue)
|
||||
return if let Some(TokenTree::Token(token)) = tokens.next() {
|
||||
Lit::from_token(&token, token.span).ok().map(MetaItemKind::NameValue)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@ -579,7 +578,7 @@ impl MetaItemKind {
|
||||
let item = NestedMetaItem::from_tokens(&mut tokens)?;
|
||||
result.push(item);
|
||||
match tokens.next() {
|
||||
None | Some(TokenTree::Token(_, token::Comma)) => {}
|
||||
None | Some(TokenTree::Token(Token { kind: token::Comma, .. })) => {}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
@ -605,8 +604,8 @@ impl NestedMetaItem {
|
||||
fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<NestedMetaItem>
|
||||
where I: Iterator<Item = TokenTree>,
|
||||
{
|
||||
if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() {
|
||||
if let Ok(lit) = Lit::from_token(&token, span) {
|
||||
if let Some(TokenTree::Token(token)) = tokens.peek().cloned() {
|
||||
if let Ok(lit) = Lit::from_token(&token, token.span) {
|
||||
tokens.next();
|
||||
return Some(NestedMetaItem::Literal(lit));
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ use crate::ast::{self, Ident, Name};
|
||||
use crate::source_map;
|
||||
use crate::ext::base::{ExtCtxt, MacEager, MacResult};
|
||||
use crate::ext::build::AstBuilder;
|
||||
use crate::parse::token;
|
||||
use crate::parse::token::{self, Token};
|
||||
use crate::ptr::P;
|
||||
use crate::symbol::kw;
|
||||
use crate::tokenstream::{TokenTree};
|
||||
@ -34,7 +34,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
||||
token_tree: &[TokenTree])
|
||||
-> Box<dyn MacResult+'cx> {
|
||||
let code = match (token_tree.len(), token_tree.get(0)) {
|
||||
(1, Some(&TokenTree::Token(_, token::Ident(code, _)))) => code,
|
||||
(1, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. }))) => code,
|
||||
_ => unreachable!()
|
||||
};
|
||||
|
||||
@ -72,12 +72,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
||||
token_tree.get(1),
|
||||
token_tree.get(2)
|
||||
) {
|
||||
(1, Some(&TokenTree::Token(_, token::Ident(ref code, _))), None, None) => {
|
||||
(1, Some(&TokenTree::Token(Token { kind: token::Ident(ref code, _), .. })), None, None) => {
|
||||
(code, None)
|
||||
},
|
||||
(3, Some(&TokenTree::Token(_, token::Ident(ref code, _))),
|
||||
Some(&TokenTree::Token(_, token::Comma)),
|
||||
Some(&TokenTree::Token(_, token::Literal(token::Lit { symbol, .. })))) => {
|
||||
(3, Some(&TokenTree::Token(Token { kind: token::Ident(ref code, _), .. })),
|
||||
Some(&TokenTree::Token(Token { kind: token::Comma, .. })),
|
||||
Some(&TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), .. }))) => {
|
||||
(code, Some(symbol))
|
||||
}
|
||||
_ => unreachable!()
|
||||
@ -143,9 +143,9 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
||||
let (crate_name, name) = match (&token_tree[0], &token_tree[2]) {
|
||||
(
|
||||
// Crate name.
|
||||
&TokenTree::Token(_, token::Ident(ref crate_name, _)),
|
||||
&TokenTree::Token(Token { kind: token::Ident(ref crate_name, _), .. }),
|
||||
// DIAGNOSTICS ident.
|
||||
&TokenTree::Token(_, token::Ident(ref name, _))
|
||||
&TokenTree::Token(Token { kind: token::Ident(ref name, _), .. })
|
||||
) => (*&crate_name, name),
|
||||
_ => unreachable!()
|
||||
};
|
||||
|
@ -265,10 +265,12 @@ impl<F> TTMacroExpander for F
|
||||
|
||||
impl MutVisitor for AvoidInterpolatedIdents {
|
||||
fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
|
||||
if let tokenstream::TokenTree::Token(_, token::Interpolated(nt)) = tt {
|
||||
if let token::NtIdent(ident, is_raw) = **nt {
|
||||
*tt = tokenstream::TokenTree::Token(ident.span,
|
||||
token::Ident(ident, is_raw));
|
||||
if let tokenstream::TokenTree::Token(token) = tt {
|
||||
if let token::Interpolated(nt) = &token.kind {
|
||||
if let token::NtIdent(ident, is_raw) = **nt {
|
||||
*tt = tokenstream::TokenTree::token(ident.span,
|
||||
token::Ident(ident, is_raw));
|
||||
}
|
||||
}
|
||||
}
|
||||
mut_visit::noop_visit_tt(tt, self)
|
||||
|
@ -585,7 +585,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||
}
|
||||
AttrProcMacro(ref mac, ..) => {
|
||||
self.gate_proc_macro_attr_item(attr.span, &item);
|
||||
let item_tok = TokenTree::Token(DUMMY_SP, token::Interpolated(Lrc::new(match item {
|
||||
let item_tok = TokenTree::token(DUMMY_SP, token::Interpolated(Lrc::new(match item {
|
||||
Annotatable::Item(item) => token::NtItem(item),
|
||||
Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
|
||||
Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
|
||||
|
@ -78,7 +78,7 @@ use crate::ast::Ident;
|
||||
use crate::ext::tt::quoted::{self, TokenTree};
|
||||
use crate::parse::{Directory, ParseSess};
|
||||
use crate::parse::parser::{Parser, PathStyle};
|
||||
use crate::parse::token::{self, DocComment, Nonterminal, TokenKind};
|
||||
use crate::parse::token::{self, DocComment, Nonterminal, Token, TokenKind};
|
||||
use crate::print::pprust;
|
||||
use crate::symbol::{kw, sym, Symbol};
|
||||
use crate::tokenstream::{DelimSpan, TokenStream};
|
||||
@ -609,7 +609,8 @@ fn inner_parse_loop<'root, 'tt>(
|
||||
//
|
||||
// At the beginning of the loop, if we reach the end of the delimited submatcher,
|
||||
// we pop the stack to backtrack out of the descent.
|
||||
seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => {
|
||||
seq @ TokenTree::Delimited(..) |
|
||||
seq @ TokenTree::Token(Token { kind: DocComment(..), .. }) => {
|
||||
let lower_elts = mem::replace(&mut item.top_elts, Tt(seq));
|
||||
let idx = item.idx;
|
||||
item.stack.push(MatcherTtFrame {
|
||||
@ -621,7 +622,7 @@ fn inner_parse_loop<'root, 'tt>(
|
||||
}
|
||||
|
||||
// We just matched a normal token. We can just advance the parser.
|
||||
TokenTree::Token(_, ref t) if token_name_eq(t, token) => {
|
||||
TokenTree::Token(t) if token_name_eq(&t, token) => {
|
||||
item.idx += 1;
|
||||
next_items.push(item);
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ use crate::ext::tt::transcribe::transcribe;
|
||||
use crate::feature_gate::Features;
|
||||
use crate::parse::{Directory, ParseSess};
|
||||
use crate::parse::parser::Parser;
|
||||
use crate::parse::token::{self, NtTT};
|
||||
use crate::parse::token::{self, Token, NtTT};
|
||||
use crate::parse::token::TokenKind::*;
|
||||
use crate::symbol::{Symbol, kw, sym};
|
||||
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
|
||||
@ -270,7 +270,7 @@ pub fn compile(
|
||||
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
|
||||
tts: vec![
|
||||
quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
|
||||
quoted::TokenTree::Token(DUMMY_SP, token::FatArrow),
|
||||
quoted::TokenTree::token(DUMMY_SP, token::FatArrow),
|
||||
quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
|
||||
],
|
||||
separator: Some(if body.legacy { token::Semi } else { token::Comma }),
|
||||
@ -279,7 +279,7 @@ pub fn compile(
|
||||
})),
|
||||
// to phase into semicolon-termination instead of semicolon-separation
|
||||
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
|
||||
tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)],
|
||||
tts: vec![quoted::TokenTree::token(DUMMY_SP, token::Semi)],
|
||||
separator: None,
|
||||
op: quoted::KleeneOp::ZeroOrMore,
|
||||
num_captures: 0
|
||||
@ -613,7 +613,7 @@ impl FirstSets {
|
||||
|
||||
if let (Some(ref sep), true) = (seq_rep.separator.clone(),
|
||||
subfirst.maybe_empty) {
|
||||
first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone()));
|
||||
first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone()));
|
||||
}
|
||||
|
||||
// Reverse scan: Sequence comes before `first`.
|
||||
@ -663,7 +663,7 @@ impl FirstSets {
|
||||
|
||||
if let (Some(ref sep), true) = (seq_rep.separator.clone(),
|
||||
subfirst.maybe_empty) {
|
||||
first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone()));
|
||||
first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone()));
|
||||
}
|
||||
|
||||
assert!(first.maybe_empty);
|
||||
@ -869,7 +869,7 @@ fn check_matcher_core(sess: &ParseSess,
|
||||
let mut new;
|
||||
let my_suffix = if let Some(ref u) = seq_rep.separator {
|
||||
new = suffix_first.clone();
|
||||
new.add_one_maybe(TokenTree::Token(sp.entire(), u.clone()));
|
||||
new.add_one_maybe(TokenTree::token(sp.entire(), u.clone()));
|
||||
&new
|
||||
} else {
|
||||
&suffix_first
|
||||
@ -1015,7 +1015,7 @@ enum IsInFollow {
|
||||
fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow {
|
||||
use quoted::TokenTree;
|
||||
|
||||
if let TokenTree::Token(_, token::CloseDelim(_)) = *tok {
|
||||
if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }) = *tok {
|
||||
// closing a token tree can never be matched by any fragment;
|
||||
// iow, we always require that `(` and `)` match, etc.
|
||||
IsInFollow::Yes
|
||||
@ -1033,8 +1033,8 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow {
|
||||
},
|
||||
"stmt" | "expr" => {
|
||||
let tokens = vec!["`=>`", "`,`", "`;`"];
|
||||
match *tok {
|
||||
TokenTree::Token(_, ref tok) => match *tok {
|
||||
match tok {
|
||||
TokenTree::Token(token) => match token.kind {
|
||||
FatArrow | Comma | Semi => IsInFollow::Yes,
|
||||
_ => IsInFollow::No(tokens),
|
||||
},
|
||||
@ -1043,8 +1043,8 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow {
|
||||
},
|
||||
"pat" => {
|
||||
let tokens = vec!["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
|
||||
match *tok {
|
||||
TokenTree::Token(_, ref tok) => match *tok {
|
||||
match tok {
|
||||
TokenTree::Token(token) => match token.kind {
|
||||
FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
|
||||
Ident(i, false) if i.name == kw::If ||
|
||||
i.name == kw::In => IsInFollow::Yes,
|
||||
@ -1058,8 +1058,8 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow {
|
||||
"`{`", "`[`", "`=>`", "`,`", "`>`","`=`", "`:`", "`;`", "`|`", "`as`",
|
||||
"`where`",
|
||||
];
|
||||
match *tok {
|
||||
TokenTree::Token(_, ref tok) => match *tok {
|
||||
match tok {
|
||||
TokenTree::Token(token) => match token.kind {
|
||||
OpenDelim(token::DelimToken::Brace) |
|
||||
OpenDelim(token::DelimToken::Bracket) |
|
||||
Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi |
|
||||
@ -1089,8 +1089,8 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow {
|
||||
"vis" => {
|
||||
// Explicitly disallow `priv`, on the off chance it comes back.
|
||||
let tokens = vec!["`,`", "an ident", "a type"];
|
||||
match *tok {
|
||||
TokenTree::Token(_, ref tok) => match *tok {
|
||||
match tok {
|
||||
TokenTree::Token(token) => match token.kind {
|
||||
Comma => IsInFollow::Yes,
|
||||
Ident(i, is_raw) if is_raw || i.name != kw::Priv =>
|
||||
IsInFollow::Yes,
|
||||
@ -1150,7 +1150,7 @@ fn is_legal_fragment_specifier(_sess: &ParseSess,
|
||||
|
||||
fn quoted_tt_to_string(tt: "ed::TokenTree) -> String {
|
||||
match *tt {
|
||||
quoted::TokenTree::Token(_, ref tok) => crate::print::pprust::token_to_string(tok),
|
||||
quoted::TokenTree::Token(ref token) => crate::print::pprust::token_to_string(&token),
|
||||
quoted::TokenTree::MetaVar(_, name) => format!("${}", name),
|
||||
quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
|
||||
_ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \
|
||||
|
@ -2,7 +2,8 @@ use crate::ast::NodeId;
|
||||
use crate::early_buffered_lints::BufferedEarlyLintId;
|
||||
use crate::ext::tt::macro_parser;
|
||||
use crate::feature_gate::Features;
|
||||
use crate::parse::{token, ParseSess};
|
||||
use crate::parse::token::{self, Token, TokenKind};
|
||||
use crate::parse::ParseSess;
|
||||
use crate::print::pprust;
|
||||
use crate::tokenstream::{self, DelimSpan};
|
||||
use crate::ast;
|
||||
@ -39,7 +40,7 @@ impl Delimited {
|
||||
} else {
|
||||
span.with_lo(span.lo() + BytePos(self.delim.len() as u32))
|
||||
};
|
||||
TokenTree::Token(open_span, self.open_token())
|
||||
TokenTree::token(open_span, self.open_token())
|
||||
}
|
||||
|
||||
/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
|
||||
@ -49,7 +50,7 @@ impl Delimited {
|
||||
} else {
|
||||
span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
|
||||
};
|
||||
TokenTree::Token(close_span, self.close_token())
|
||||
TokenTree::token(close_span, self.close_token())
|
||||
}
|
||||
}
|
||||
|
||||
@ -81,7 +82,7 @@ pub enum KleeneOp {
|
||||
/// are "first-class" token trees. Useful for parsing macros.
|
||||
#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
|
||||
pub enum TokenTree {
|
||||
Token(Span, token::TokenKind),
|
||||
Token(Token),
|
||||
Delimited(DelimSpan, Lrc<Delimited>),
|
||||
/// A kleene-style repetition sequence
|
||||
Sequence(DelimSpan, Lrc<SequenceRepetition>),
|
||||
@ -144,13 +145,17 @@ impl TokenTree {
|
||||
/// Retrieves the `TokenTree`'s span.
|
||||
pub fn span(&self) -> Span {
|
||||
match *self {
|
||||
TokenTree::Token(sp, _)
|
||||
| TokenTree::MetaVar(sp, _)
|
||||
| TokenTree::MetaVarDecl(sp, _, _) => sp,
|
||||
TokenTree::Delimited(sp, _)
|
||||
| TokenTree::Sequence(sp, _) => sp.entire(),
|
||||
TokenTree::Token(Token { span, .. })
|
||||
| TokenTree::MetaVar(span, _)
|
||||
| TokenTree::MetaVarDecl(span, _, _) => span,
|
||||
TokenTree::Delimited(span, _)
|
||||
| TokenTree::Sequence(span, _) => span.entire(),
|
||||
}
|
||||
}
|
||||
|
||||
crate fn token(span: Span, kind: TokenKind) -> TokenTree {
|
||||
TokenTree::Token(Token { kind, span })
|
||||
}
|
||||
}
|
||||
|
||||
/// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
|
||||
@ -205,14 +210,14 @@ pub fn parse(
|
||||
match tree {
|
||||
TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
|
||||
let span = match trees.next() {
|
||||
Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
|
||||
Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() {
|
||||
Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => match trees.next() {
|
||||
Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
|
||||
Some((kind, _)) => {
|
||||
let span = end_sp.with_lo(start_sp.lo());
|
||||
let span = token.span.with_lo(start_sp.lo());
|
||||
result.push(TokenTree::MetaVarDecl(span, ident, kind));
|
||||
continue;
|
||||
}
|
||||
_ => end_sp,
|
||||
_ => token.span,
|
||||
},
|
||||
tree => tree
|
||||
.as_ref()
|
||||
@ -270,7 +275,7 @@ where
|
||||
// Depending on what `tree` is, we could be parsing different parts of a macro
|
||||
match tree {
|
||||
// `tree` is a `$` token. Look at the next token in `trees`
|
||||
tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() {
|
||||
tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => match trees.next() {
|
||||
// `tree` is followed by a delimited set of token trees. This indicates the beginning
|
||||
// of a repetition sequence in the macro (e.g. `$(pat)*`).
|
||||
Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
|
||||
@ -316,33 +321,33 @@ where
|
||||
|
||||
// `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
|
||||
// metavariable that names the crate of the invocation.
|
||||
Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => {
|
||||
Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
|
||||
let (ident, is_raw) = token.ident().unwrap();
|
||||
let span = ident_span.with_lo(span.lo());
|
||||
let span = token.span.with_lo(span.lo());
|
||||
if ident.name == kw::Crate && !is_raw {
|
||||
let ident = ast::Ident::new(kw::DollarCrate, ident.span);
|
||||
TokenTree::Token(span, token::Ident(ident, is_raw))
|
||||
TokenTree::token(span, token::Ident(ident, is_raw))
|
||||
} else {
|
||||
TokenTree::MetaVar(span, ident)
|
||||
}
|
||||
}
|
||||
|
||||
// `tree` is followed by a random token. This is an error.
|
||||
Some(tokenstream::TokenTree::Token(span, tok)) => {
|
||||
Some(tokenstream::TokenTree::Token(token)) => {
|
||||
let msg = format!(
|
||||
"expected identifier, found `{}`",
|
||||
pprust::token_to_string(&tok)
|
||||
pprust::token_to_string(&token),
|
||||
);
|
||||
sess.span_diagnostic.span_err(span, &msg);
|
||||
TokenTree::MetaVar(span, ast::Ident::invalid())
|
||||
sess.span_diagnostic.span_err(token.span, &msg);
|
||||
TokenTree::MetaVar(token.span, ast::Ident::invalid())
|
||||
}
|
||||
|
||||
// There are no more tokens. Just return the `$` we already have.
|
||||
None => TokenTree::Token(span, token::Dollar),
|
||||
None => TokenTree::token(span, token::Dollar),
|
||||
},
|
||||
|
||||
// `tree` is an arbitrary token. Keep it.
|
||||
tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok),
|
||||
tokenstream::TokenTree::Token(token) => TokenTree::Token(token),
|
||||
|
||||
// `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
|
||||
// descend into the delimited set and further parse it.
|
||||
@ -380,17 +385,14 @@ fn kleene_op(token: &token::TokenKind) -> Option<KleeneOp> {
|
||||
/// - Ok(Ok((op, span))) if the next token tree is a KleeneOp
|
||||
/// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp
|
||||
/// - Err(span) if the next token tree is not a token
|
||||
fn parse_kleene_op<I>(
|
||||
input: &mut I,
|
||||
span: Span,
|
||||
) -> Result<Result<(KleeneOp, Span), (token::TokenKind, Span)>, Span>
|
||||
fn parse_kleene_op<I>(input: &mut I, span: Span) -> Result<Result<(KleeneOp, Span), Token>, Span>
|
||||
where
|
||||
I: Iterator<Item = tokenstream::TokenTree>,
|
||||
{
|
||||
match input.next() {
|
||||
Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) {
|
||||
Some(op) => Ok(Ok((op, span))),
|
||||
None => Ok(Err((tok, span))),
|
||||
Some(tokenstream::TokenTree::Token(token)) => match kleene_op(&token) {
|
||||
Some(op) => Ok(Ok((op, token.span))),
|
||||
None => Ok(Err(token)),
|
||||
},
|
||||
tree => Err(tree
|
||||
.as_ref()
|
||||
@ -466,7 +468,7 @@ where
|
||||
assert_eq!(op, KleeneOp::ZeroOrOne);
|
||||
|
||||
// Lookahead at #2. If it is a KleenOp, then #1 is a separator.
|
||||
let is_1_sep = if let Some(&tokenstream::TokenTree::Token(_, ref tok2)) = input.peek() {
|
||||
let is_1_sep = if let Some(tokenstream::TokenTree::Token(tok2)) = input.peek() {
|
||||
kleene_op(tok2).is_some()
|
||||
} else {
|
||||
false
|
||||
@ -504,7 +506,7 @@ where
|
||||
}
|
||||
|
||||
// #2 is a random token (this is an error) :(
|
||||
Ok(Err((_, _))) => op1_span,
|
||||
Ok(Err(_)) => op1_span,
|
||||
|
||||
// #2 is not even a token at all :(
|
||||
Err(_) => op1_span,
|
||||
@ -524,7 +526,7 @@ where
|
||||
}
|
||||
|
||||
// #1 is a separator followed by #2, a KleeneOp
|
||||
Ok(Err((tok, span))) => match parse_kleene_op(input, span) {
|
||||
Ok(Err(token)) => match parse_kleene_op(input, token.span) {
|
||||
// #2 is a `?`, which is not allowed as a Kleene op in 2015 edition,
|
||||
// but is allowed in the 2018 edition
|
||||
Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => {
|
||||
@ -539,10 +541,10 @@ where
|
||||
}
|
||||
|
||||
// #2 is a KleeneOp :D
|
||||
Ok(Ok((op, _))) => return (Some(tok), op),
|
||||
Ok(Ok((op, _))) => return (Some(token.kind), op),
|
||||
|
||||
// #2 is a random token :(
|
||||
Ok(Err((_, span))) => span,
|
||||
Ok(Err(token)) => token.span,
|
||||
|
||||
// #2 is not a token at all :(
|
||||
Err(span) => span,
|
||||
@ -580,12 +582,12 @@ where
|
||||
Ok(Ok((op, _))) => return (None, op),
|
||||
|
||||
// #1 is a separator followed by #2, a KleeneOp
|
||||
Ok(Err((tok, span))) => match parse_kleene_op(input, span) {
|
||||
Ok(Err(token)) => match parse_kleene_op(input, token.span) {
|
||||
// #2 is the `?` Kleene op, which does not take a separator (error)
|
||||
Ok(Ok((op, _op2_span))) if op == KleeneOp::ZeroOrOne => {
|
||||
// Error!
|
||||
sess.span_diagnostic.span_err(
|
||||
span,
|
||||
token.span,
|
||||
"the `?` macro repetition operator does not take a separator",
|
||||
);
|
||||
|
||||
@ -594,10 +596,10 @@ where
|
||||
}
|
||||
|
||||
// #2 is a KleeneOp :D
|
||||
Ok(Ok((op, _))) => return (Some(tok), op),
|
||||
Ok(Ok((op, _))) => return (Some(token.kind), op),
|
||||
|
||||
// #2 is a random token :(
|
||||
Ok(Err((_, span))) => span,
|
||||
Ok(Err(token)) => token.span,
|
||||
|
||||
// #2 is not a token at all :(
|
||||
Err(span) => span,
|
||||
|
@ -119,7 +119,7 @@ pub fn transcribe(
|
||||
Some((tt, _)) => tt.span(),
|
||||
None => DUMMY_SP,
|
||||
};
|
||||
result.push(TokenTree::Token(prev_span, sep).into());
|
||||
result.push(TokenTree::token(prev_span, sep).into());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
@ -225,7 +225,7 @@ pub fn transcribe(
|
||||
result.push(tt.clone().into());
|
||||
} else {
|
||||
sp = sp.apply_mark(cx.current_expansion.mark);
|
||||
let token = TokenTree::Token(sp, token::Interpolated(nt.clone()));
|
||||
let token = TokenTree::token(sp, token::Interpolated(nt.clone()));
|
||||
result.push(token.into());
|
||||
}
|
||||
} else {
|
||||
@ -241,8 +241,8 @@ pub fn transcribe(
|
||||
let ident =
|
||||
Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark));
|
||||
sp = sp.apply_mark(cx.current_expansion.mark);
|
||||
result.push(TokenTree::Token(sp, token::Dollar).into());
|
||||
result.push(TokenTree::Token(sp, token::TokenKind::from_ast_ident(ident)).into());
|
||||
result.push(TokenTree::token(sp, token::Dollar).into());
|
||||
result.push(TokenTree::token(sp, token::TokenKind::from_ast_ident(ident)).into());
|
||||
}
|
||||
}
|
||||
|
||||
@ -259,9 +259,9 @@ pub fn transcribe(
|
||||
|
||||
// Nothing much to do here. Just push the token to the result, being careful to
|
||||
// preserve syntax context.
|
||||
quoted::TokenTree::Token(sp, tok) => {
|
||||
quoted::TokenTree::Token(token) => {
|
||||
let mut marker = Marker(cx.current_expansion.mark);
|
||||
let mut tt = TokenTree::Token(sp, tok);
|
||||
let mut tt = TokenTree::Token(token);
|
||||
noop_visit_tt(&mut tt, &mut marker);
|
||||
result.push(tt.into());
|
||||
}
|
||||
|
@ -1958,9 +1958,11 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
||||
name,
|
||||
template
|
||||
),
|
||||
None => if let Some(TokenTree::Token(_, token::Eq)) = attr.tokens.trees().next() {
|
||||
// All key-value attributes are restricted to meta-item syntax.
|
||||
attr.parse_meta(self.context.parse_sess).map_err(|mut err| err.emit()).ok();
|
||||
None => if let Some(TokenTree::Token(token)) = attr.tokens.trees().next() {
|
||||
if token == token::Eq {
|
||||
// All key-value attributes are restricted to meta-item syntax.
|
||||
attr.parse_meta(self.context.parse_sess).map_err(|mut err| err.emit()).ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -10,6 +10,7 @@
|
||||
#![deny(rust_2018_idioms)]
|
||||
#![deny(internal)]
|
||||
|
||||
#![feature(bind_by_move_pattern_guards)]
|
||||
#![feature(crate_visibility_modifier)]
|
||||
#![feature(label_break_value)]
|
||||
#![feature(nll)]
|
||||
|
@ -9,7 +9,7 @@
|
||||
|
||||
use crate::ast::*;
|
||||
use crate::source_map::{Spanned, respan};
|
||||
use crate::parse::token::{self, TokenKind};
|
||||
use crate::parse::token::{self, Token, TokenKind};
|
||||
use crate::ptr::P;
|
||||
use crate::ThinVec;
|
||||
use crate::tokenstream::*;
|
||||
@ -576,9 +576,9 @@ pub fn noop_visit_arg<T: MutVisitor>(Arg { id, pat, ty }: &mut Arg, vis: &mut T)
|
||||
|
||||
pub fn noop_visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
|
||||
match tt {
|
||||
TokenTree::Token(span, tok) => {
|
||||
TokenTree::Token(Token { kind, span }) => {
|
||||
vis.visit_token(kind);
|
||||
vis.visit_span(span);
|
||||
vis.visit_token(tok);
|
||||
}
|
||||
TokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
|
||||
vis.visit_span(open);
|
||||
|
@ -157,7 +157,7 @@ impl<'a> Parser<'a> {
|
||||
self.check(&token::OpenDelim(DelimToken::Brace)) {
|
||||
self.parse_token_tree().into()
|
||||
} else if self.eat(&token::Eq) {
|
||||
let eq = TokenTree::Token(self.prev_span, token::Eq);
|
||||
let eq = TokenTree::token(self.prev_span, token::Eq);
|
||||
let mut is_interpolated_expr = false;
|
||||
if let token::Interpolated(nt) = &self.token {
|
||||
if let token::NtExpr(..) = **nt {
|
||||
|
@ -1596,8 +1596,8 @@ mod tests {
|
||||
"/* my source file */ fn main() { println!(\"zebra\"); }\n"
|
||||
.to_string());
|
||||
let id = Ident::from_str("fn");
|
||||
assert_eq!(string_reader.next_token().kind, token::Comment);
|
||||
assert_eq!(string_reader.next_token().kind, token::Whitespace);
|
||||
assert_eq!(string_reader.next_token(), token::Comment);
|
||||
assert_eq!(string_reader.next_token(), token::Whitespace);
|
||||
let tok1 = string_reader.next_token();
|
||||
let tok2 = Token {
|
||||
kind: token::Ident(id, false),
|
||||
@ -1605,7 +1605,7 @@ mod tests {
|
||||
};
|
||||
assert_eq!(tok1.kind, tok2.kind);
|
||||
assert_eq!(tok1.span, tok2.span);
|
||||
assert_eq!(string_reader.next_token().kind, token::Whitespace);
|
||||
assert_eq!(string_reader.next_token(), token::Whitespace);
|
||||
// the 'main' id is already read:
|
||||
assert_eq!(string_reader.pos.clone(), BytePos(28));
|
||||
// read another token:
|
||||
@ -1625,7 +1625,7 @@ mod tests {
|
||||
// of tokens (stop checking after exhausting the expected vec)
|
||||
fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<TokenKind>) {
|
||||
for expected_tok in &expected {
|
||||
assert_eq!(&string_reader.next_token().kind, expected_tok);
|
||||
assert_eq!(&string_reader.next_token(), expected_tok);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1683,7 +1683,7 @@ mod tests {
|
||||
with_default_globals(|| {
|
||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(sm.clone());
|
||||
assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token().kind,
|
||||
assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token(),
|
||||
mk_lit(token::Char, "a", None));
|
||||
})
|
||||
}
|
||||
@ -1693,7 +1693,7 @@ mod tests {
|
||||
with_default_globals(|| {
|
||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(sm.clone());
|
||||
assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token().kind,
|
||||
assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token(),
|
||||
mk_lit(token::Char, " ", None));
|
||||
})
|
||||
}
|
||||
@ -1703,7 +1703,7 @@ mod tests {
|
||||
with_default_globals(|| {
|
||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(sm.clone());
|
||||
assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token().kind,
|
||||
assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token(),
|
||||
mk_lit(token::Char, "\\n", None));
|
||||
})
|
||||
}
|
||||
@ -1713,7 +1713,7 @@ mod tests {
|
||||
with_default_globals(|| {
|
||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(sm.clone());
|
||||
assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token().kind,
|
||||
assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token(),
|
||||
token::Lifetime(Ident::from_str("'abc")));
|
||||
})
|
||||
}
|
||||
@ -1723,7 +1723,7 @@ mod tests {
|
||||
with_default_globals(|| {
|
||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(sm.clone());
|
||||
assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token().kind,
|
||||
assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(),
|
||||
mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None));
|
||||
})
|
||||
}
|
||||
@ -1735,10 +1735,10 @@ mod tests {
|
||||
let sh = mk_sess(sm.clone());
|
||||
macro_rules! test {
|
||||
($input: expr, $tok_type: ident, $tok_contents: expr) => {{
|
||||
assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token().kind,
|
||||
assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token(),
|
||||
mk_lit(token::$tok_type, $tok_contents, Some("suffix")));
|
||||
// with a whitespace separator:
|
||||
assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token().kind,
|
||||
assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token(),
|
||||
mk_lit(token::$tok_type, $tok_contents, None));
|
||||
}}
|
||||
}
|
||||
@ -1753,11 +1753,11 @@ mod tests {
|
||||
test!("1.0", Float, "1.0");
|
||||
test!("1.0e10", Float, "1.0e10");
|
||||
|
||||
assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token().kind,
|
||||
assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token(),
|
||||
mk_lit(token::Integer, "2", Some("us")));
|
||||
assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token().kind,
|
||||
assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(),
|
||||
mk_lit(token::StrRaw(3), "raw", Some("suffix")));
|
||||
assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token().kind,
|
||||
assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(),
|
||||
mk_lit(token::ByteStrRaw(3), "raw", Some("suffix")));
|
||||
})
|
||||
}
|
||||
@ -1775,11 +1775,8 @@ mod tests {
|
||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(sm.clone());
|
||||
let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string());
|
||||
match lexer.next_token().kind {
|
||||
token::Comment => {}
|
||||
_ => panic!("expected a comment!"),
|
||||
}
|
||||
assert_eq!(lexer.next_token().kind, mk_lit(token::Char, "a", None));
|
||||
assert_eq!(lexer.next_token(), token::Comment);
|
||||
assert_eq!(lexer.next_token(), mk_lit(token::Char, "a", None));
|
||||
})
|
||||
}
|
||||
|
||||
@ -1792,9 +1789,8 @@ mod tests {
|
||||
let comment = lexer.next_token();
|
||||
assert_eq!(comment.kind, token::Comment);
|
||||
assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7)));
|
||||
assert_eq!(lexer.next_token().kind, token::Whitespace);
|
||||
assert_eq!(lexer.next_token().kind,
|
||||
token::DocComment(Symbol::intern("/// test")));
|
||||
assert_eq!(lexer.next_token(), token::Whitespace);
|
||||
assert_eq!(lexer.next_token(), token::DocComment(Symbol::intern("/// test")));
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -203,7 +203,7 @@ impl<'a> TokenTreesReader<'a> {
|
||||
Err(err)
|
||||
},
|
||||
_ => {
|
||||
let tt = TokenTree::Token(self.span, self.token.clone());
|
||||
let tt = TokenTree::token(self.span, self.token.clone());
|
||||
// Note that testing for joint-ness here is done via the raw
|
||||
// source span as the joint-ness is a property of the raw source
|
||||
// rather than wanting to take `override_span` into account.
|
||||
|
@ -261,7 +261,7 @@ impl Lit {
|
||||
token::Bool => token::Ident(Ident::new(self.token.symbol, self.span), false),
|
||||
_ => token::Literal(self.token),
|
||||
};
|
||||
TokenTree::Token(self.span, token).into()
|
||||
TokenTree::token(self.span, token).into()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -385,6 +385,7 @@ mod tests {
|
||||
use crate::ast::{self, Ident, PatKind};
|
||||
use crate::attr::first_attr_value_str_by_name;
|
||||
use crate::ptr::P;
|
||||
use crate::parse::token::Token;
|
||||
use crate::print::pprust::item_to_string;
|
||||
use crate::tokenstream::{DelimSpan, TokenTree};
|
||||
use crate::util::parser_testing::string_to_stream;
|
||||
@ -426,9 +427,9 @@ mod tests {
|
||||
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
|
||||
(
|
||||
4,
|
||||
Some(&TokenTree::Token(_, token::Ident(name_macro_rules, false))),
|
||||
Some(&TokenTree::Token(_, token::Not)),
|
||||
Some(&TokenTree::Token(_, token::Ident(name_zip, false))),
|
||||
Some(&TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. })),
|
||||
Some(&TokenTree::Token(Token { kind: token::Not, .. })),
|
||||
Some(&TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. })),
|
||||
Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)),
|
||||
)
|
||||
if name_macro_rules.name == sym::macro_rules
|
||||
@ -438,7 +439,7 @@ mod tests {
|
||||
(
|
||||
3,
|
||||
Some(&TokenTree::Delimited(_, first_delim, ref first_tts)),
|
||||
Some(&TokenTree::Token(_, token::FatArrow)),
|
||||
Some(&TokenTree::Token(Token { kind: token::FatArrow, .. })),
|
||||
Some(&TokenTree::Delimited(_, second_delim, ref second_tts)),
|
||||
)
|
||||
if macro_delim == token::Paren => {
|
||||
@ -446,8 +447,8 @@ mod tests {
|
||||
match (tts.len(), tts.get(0), tts.get(1)) {
|
||||
(
|
||||
2,
|
||||
Some(&TokenTree::Token(_, token::Dollar)),
|
||||
Some(&TokenTree::Token(_, token::Ident(ident, false))),
|
||||
Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
|
||||
Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })),
|
||||
)
|
||||
if first_delim == token::Paren && ident.name.as_str() == "a" => {},
|
||||
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
|
||||
@ -456,8 +457,8 @@ mod tests {
|
||||
match (tts.len(), tts.get(0), tts.get(1)) {
|
||||
(
|
||||
2,
|
||||
Some(&TokenTree::Token(_, token::Dollar)),
|
||||
Some(&TokenTree::Token(_, token::Ident(ident, false))),
|
||||
Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
|
||||
Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })),
|
||||
)
|
||||
if second_delim == token::Paren && ident.name.as_str() == "a" => {},
|
||||
_ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
|
||||
@ -477,16 +478,16 @@ mod tests {
|
||||
let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
|
||||
|
||||
let expected = TokenStream::new(vec![
|
||||
TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
|
||||
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
|
||||
TokenTree::token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
|
||||
TokenTree::token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
|
||||
TokenTree::Delimited(
|
||||
DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
|
||||
token::DelimToken::Paren,
|
||||
TokenStream::new(vec![
|
||||
TokenTree::Token(sp(6, 7),
|
||||
TokenTree::token(sp(6, 7),
|
||||
token::Ident(Ident::from_str("b"), false)).into(),
|
||||
TokenTree::Token(sp(8, 9), token::Colon).into(),
|
||||
TokenTree::Token(sp(10, 13),
|
||||
TokenTree::token(sp(8, 9), token::Colon).into(),
|
||||
TokenTree::token(sp(10, 13),
|
||||
token::Ident(Ident::from_str("i32"), false)).into(),
|
||||
]).into(),
|
||||
).into(),
|
||||
@ -494,9 +495,9 @@ mod tests {
|
||||
DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
|
||||
token::DelimToken::Brace,
|
||||
TokenStream::new(vec![
|
||||
TokenTree::Token(sp(17, 18),
|
||||
TokenTree::token(sp(17, 18),
|
||||
token::Ident(Ident::from_str("b"), false)).into(),
|
||||
TokenTree::Token(sp(18, 19), token::Semi).into(),
|
||||
TokenTree::token(sp(18, 19), token::Semi).into(),
|
||||
]).into(),
|
||||
).into()
|
||||
]);
|
||||
|
@ -318,7 +318,7 @@ impl TokenCursor {
|
||||
}
|
||||
|
||||
match tree {
|
||||
TokenTree::Token(span, kind) => return Token { kind, span },
|
||||
TokenTree::Token(token) => return token,
|
||||
TokenTree::Delimited(sp, delim, tts) => {
|
||||
let frame = TokenCursorFrame::new(sp, delim, &tts);
|
||||
self.stack.push(mem::replace(&mut self.frame, frame));
|
||||
@ -353,9 +353,9 @@ impl TokenCursor {
|
||||
delim_span,
|
||||
token::Bracket,
|
||||
[
|
||||
TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
|
||||
TokenTree::Token(sp, token::Eq),
|
||||
TokenTree::Token(sp, token::TokenKind::lit(
|
||||
TokenTree::token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
|
||||
TokenTree::token(sp, token::Eq),
|
||||
TokenTree::token(sp, token::TokenKind::lit(
|
||||
token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
|
||||
)),
|
||||
]
|
||||
@ -366,10 +366,10 @@ impl TokenCursor {
|
||||
delim_span,
|
||||
token::NoDelim,
|
||||
&if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
|
||||
[TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body]
|
||||
[TokenTree::token(sp, token::Pound), TokenTree::token(sp, token::Not), body]
|
||||
.iter().cloned().collect::<TokenStream>().into()
|
||||
} else {
|
||||
[TokenTree::Token(sp, token::Pound), body]
|
||||
[TokenTree::token(sp, token::Pound), body]
|
||||
.iter().cloned().collect::<TokenStream>().into()
|
||||
},
|
||||
)));
|
||||
@ -1052,7 +1052,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
|
||||
Some(tree) => match tree {
|
||||
TokenTree::Token(_, tok) => tok,
|
||||
TokenTree::Token(token) => token.kind,
|
||||
TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim),
|
||||
},
|
||||
None => token::CloseDelim(self.token_cursor.frame.delim),
|
||||
@ -1065,7 +1065,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
|
||||
Some(TokenTree::Token(span, _)) => span,
|
||||
Some(TokenTree::Token(token)) => token.span,
|
||||
Some(TokenTree::Delimited(span, ..)) => span.entire(),
|
||||
None => self.look_ahead_span(dist - 1),
|
||||
}
|
||||
@ -2675,7 +2675,7 @@ impl<'a> Parser<'a> {
|
||||
_ => {
|
||||
let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span);
|
||||
self.bump();
|
||||
TokenTree::Token(span, token)
|
||||
TokenTree::token(span, token)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4344,7 +4344,7 @@ impl<'a> Parser<'a> {
|
||||
};
|
||||
TokenStream::new(vec![
|
||||
args.into(),
|
||||
TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(),
|
||||
TokenTree::token(token_lo.to(self.prev_span), token::FatArrow).into(),
|
||||
body.into(),
|
||||
])
|
||||
} else {
|
||||
|
@ -18,6 +18,7 @@ use log::info;
|
||||
|
||||
use std::fmt;
|
||||
use std::mem;
|
||||
use std::ops::Deref;
|
||||
#[cfg(target_arch = "x86_64")]
|
||||
use rustc_data_structures::static_assert_size;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
@ -165,7 +166,7 @@ fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool {
|
||||
].contains(&ident.name)
|
||||
}
|
||||
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)]
|
||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub enum TokenKind {
|
||||
/* Expression-operator symbols. */
|
||||
Eq,
|
||||
@ -235,7 +236,7 @@ pub enum TokenKind {
|
||||
#[cfg(target_arch = "x86_64")]
|
||||
static_assert_size!(TokenKind, 16);
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct Token {
|
||||
pub kind: TokenKind,
|
||||
pub span: Span,
|
||||
@ -614,6 +615,14 @@ impl PartialEq<TokenKind> for Token {
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: Remove this after all necessary methods are moved from `TokenKind` to `Token`.
|
||||
impl Deref for Token {
|
||||
type Target = TokenKind;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.kind
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable)]
|
||||
/// For interpolation during macro expansion.
|
||||
pub enum Nonterminal {
|
||||
@ -704,11 +713,11 @@ impl Nonterminal {
|
||||
}
|
||||
Nonterminal::NtIdent(ident, is_raw) => {
|
||||
let token = Ident(ident, is_raw);
|
||||
Some(TokenTree::Token(ident.span, token).into())
|
||||
Some(TokenTree::token(ident.span, token).into())
|
||||
}
|
||||
Nonterminal::NtLifetime(ident) => {
|
||||
let token = Lifetime(ident);
|
||||
Some(TokenTree::Token(ident.span, token).into())
|
||||
Some(TokenTree::token(ident.span, token).into())
|
||||
}
|
||||
Nonterminal::NtTT(ref tt) => {
|
||||
Some(tt.clone().into())
|
||||
@ -794,7 +803,7 @@ fn prepend_attrs(sess: &ParseSess,
|
||||
if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() {
|
||||
let ident = attr.path.segments[0].ident;
|
||||
let token = Ident(ident, ident.as_str().starts_with("r#"));
|
||||
brackets.push(tokenstream::TokenTree::Token(ident.span, token));
|
||||
brackets.push(tokenstream::TokenTree::token(ident.span, token));
|
||||
|
||||
// ... and for more complicated paths, fall back to a reparse hack that
|
||||
// should eventually be removed.
|
||||
@ -808,7 +817,7 @@ fn prepend_attrs(sess: &ParseSess,
|
||||
// The span we list here for `#` and for `[ ... ]` are both wrong in
|
||||
// that it encompasses more than each token, but it hopefully is "good
|
||||
// enough" for now at least.
|
||||
builder.push(tokenstream::TokenTree::Token(attr.span, Pound));
|
||||
builder.push(tokenstream::TokenTree::token(attr.span, Pound));
|
||||
let delim_span = DelimSpan::from_single(attr.span);
|
||||
builder.push(tokenstream::TokenTree::Delimited(
|
||||
delim_span, DelimToken::Bracket, brackets.build().into()));
|
||||
|
@ -724,10 +724,10 @@ pub trait PrintState<'a> {
|
||||
/// expression arguments as expressions). It can be done! I think.
|
||||
fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> {
|
||||
match tt {
|
||||
TokenTree::Token(_, ref tk) => {
|
||||
self.writer().word(token_to_string(tk))?;
|
||||
match *tk {
|
||||
parse::token::DocComment(..) => {
|
||||
TokenTree::Token(ref token) => {
|
||||
self.writer().word(token_to_string(&token))?;
|
||||
match token.kind {
|
||||
token::DocComment(..) => {
|
||||
self.writer().hardbreak()
|
||||
}
|
||||
_ => Ok(())
|
||||
|
@ -16,7 +16,7 @@
|
||||
use crate::ext::base;
|
||||
use crate::ext::tt::{macro_parser, quoted};
|
||||
use crate::parse::Directory;
|
||||
use crate::parse::token::{self, DelimToken, TokenKind};
|
||||
use crate::parse::token::{self, DelimToken, Token, TokenKind};
|
||||
use crate::print::pprust;
|
||||
|
||||
use syntax_pos::{BytePos, Mark, Span, DUMMY_SP};
|
||||
@ -44,7 +44,7 @@ use std::{fmt, iter, mem};
|
||||
#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
|
||||
pub enum TokenTree {
|
||||
/// A single token
|
||||
Token(Span, token::TokenKind),
|
||||
Token(Token),
|
||||
/// A delimited sequence of token trees
|
||||
Delimited(DelimSpan, DelimToken, TokenStream),
|
||||
}
|
||||
@ -53,8 +53,7 @@ pub enum TokenTree {
|
||||
#[cfg(parallel_compiler)]
|
||||
fn _dummy()
|
||||
where
|
||||
Span: Send + Sync,
|
||||
token::TokenKind: Send + Sync,
|
||||
Token: Send + Sync,
|
||||
DelimSpan: Send + Sync,
|
||||
DelimToken: Send + Sync,
|
||||
TokenStream: Send + Sync,
|
||||
@ -86,12 +85,11 @@ impl TokenTree {
|
||||
/// Checks if this TokenTree is equal to the other, regardless of span information.
|
||||
pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
|
||||
match (self, other) {
|
||||
(&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
|
||||
(&TokenTree::Delimited(_, delim, ref tts),
|
||||
&TokenTree::Delimited(_, delim2, ref tts2)) => {
|
||||
(TokenTree::Token(token), TokenTree::Token(token2)) => token.kind == token2.kind,
|
||||
(TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
|
||||
delim == delim2 && tts.eq_unspanned(&tts2)
|
||||
}
|
||||
(_, _) => false,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
@ -102,37 +100,36 @@ impl TokenTree {
|
||||
// different method.
|
||||
pub fn probably_equal_for_proc_macro(&self, other: &TokenTree) -> bool {
|
||||
match (self, other) {
|
||||
(&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => {
|
||||
tk.probably_equal_for_proc_macro(tk2)
|
||||
(TokenTree::Token(token), TokenTree::Token(token2)) => {
|
||||
token.probably_equal_for_proc_macro(token2)
|
||||
}
|
||||
(&TokenTree::Delimited(_, delim, ref tts),
|
||||
&TokenTree::Delimited(_, delim2, ref tts2)) => {
|
||||
(TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
|
||||
delim == delim2 && tts.probably_equal_for_proc_macro(&tts2)
|
||||
}
|
||||
(_, _) => false,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves the TokenTree's span.
|
||||
pub fn span(&self) -> Span {
|
||||
match *self {
|
||||
TokenTree::Token(sp, _) => sp,
|
||||
match self {
|
||||
TokenTree::Token(token) => token.span,
|
||||
TokenTree::Delimited(sp, ..) => sp.entire(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Modify the `TokenTree`'s span in-place.
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
match *self {
|
||||
TokenTree::Token(ref mut sp, _) => *sp = span,
|
||||
TokenTree::Delimited(ref mut sp, ..) => *sp = DelimSpan::from_single(span),
|
||||
match self {
|
||||
TokenTree::Token(token) => token.span = span,
|
||||
TokenTree::Delimited(dspan, ..) => *dspan = DelimSpan::from_single(span),
|
||||
}
|
||||
}
|
||||
|
||||
/// Indicates if the stream is a token that is equal to the provided token.
|
||||
pub fn eq_token(&self, t: TokenKind) -> bool {
|
||||
match *self {
|
||||
TokenTree::Token(_, ref tk) => *tk == t,
|
||||
match self {
|
||||
TokenTree::Token(token) => *token == t,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
@ -141,6 +138,10 @@ impl TokenTree {
|
||||
TokenStream::new(vec![(self, Joint)])
|
||||
}
|
||||
|
||||
pub fn token(span: Span, kind: TokenKind) -> TokenTree {
|
||||
TokenTree::Token(Token { kind, span })
|
||||
}
|
||||
|
||||
/// Returns the opening delimiter as a token tree.
|
||||
pub fn open_tt(span: Span, delim: DelimToken) -> TokenTree {
|
||||
let open_span = if span.is_dummy() {
|
||||
@ -148,7 +149,7 @@ impl TokenTree {
|
||||
} else {
|
||||
span.with_hi(span.lo() + BytePos(delim.len() as u32))
|
||||
};
|
||||
TokenTree::Token(open_span, token::OpenDelim(delim))
|
||||
TokenTree::token(open_span, token::OpenDelim(delim))
|
||||
}
|
||||
|
||||
/// Returns the closing delimiter as a token tree.
|
||||
@ -158,7 +159,7 @@ impl TokenTree {
|
||||
} else {
|
||||
span.with_lo(span.hi() - BytePos(delim.len() as u32))
|
||||
};
|
||||
TokenTree::Token(close_span, token::CloseDelim(delim))
|
||||
TokenTree::token(close_span, token::CloseDelim(delim))
|
||||
}
|
||||
}
|
||||
|
||||
@ -201,18 +202,17 @@ impl TokenStream {
|
||||
while let Some((pos, ts)) = iter.next() {
|
||||
if let Some((_, next)) = iter.peek() {
|
||||
let sp = match (&ts, &next) {
|
||||
(_, (TokenTree::Token(_, token::Comma), _)) => continue,
|
||||
((TokenTree::Token(sp, token_left), NonJoint),
|
||||
(TokenTree::Token(_, token_right), _))
|
||||
(_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
|
||||
((TokenTree::Token(token_left), NonJoint), (TokenTree::Token(token_right), _))
|
||||
if ((token_left.is_ident() && !token_left.is_reserved_ident())
|
||||
|| token_left.is_lit()) &&
|
||||
((token_right.is_ident() && !token_right.is_reserved_ident())
|
||||
|| token_right.is_lit()) => *sp,
|
||||
|| token_right.is_lit()) => token_left.span,
|
||||
((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
|
||||
_ => continue,
|
||||
};
|
||||
let sp = sp.shrink_to_hi();
|
||||
let comma = (TokenTree::Token(sp, token::Comma), NonJoint);
|
||||
let comma = (TokenTree::token(sp, token::Comma), NonJoint);
|
||||
suggestion = Some((pos, comma, sp));
|
||||
}
|
||||
}
|
||||
@ -241,12 +241,6 @@ impl From<TokenTree> for TreeAndJoint {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TokenKind> for TokenStream {
|
||||
fn from(token: TokenKind) -> TokenStream {
|
||||
TokenTree::Token(DUMMY_SP, token).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
|
||||
TokenStream::from_streams(iter.into_iter().map(Into::into).collect::<SmallVec<_>>())
|
||||
@ -349,22 +343,25 @@ impl TokenStream {
|
||||
// streams, making a comparison between a token stream generated from an
|
||||
// AST and a token stream which was parsed into an AST more reliable.
|
||||
fn semantic_tree(tree: &TokenTree) -> bool {
|
||||
match tree {
|
||||
// The pretty printer tends to add trailing commas to
|
||||
// everything, and in particular, after struct fields.
|
||||
| TokenTree::Token(_, token::Comma)
|
||||
// The pretty printer emits `NoDelim` as whitespace.
|
||||
| TokenTree::Token(_, token::OpenDelim(DelimToken::NoDelim))
|
||||
| TokenTree::Token(_, token::CloseDelim(DelimToken::NoDelim))
|
||||
// The pretty printer collapses many semicolons into one.
|
||||
| TokenTree::Token(_, token::Semi)
|
||||
// The pretty printer collapses whitespace arbitrarily and can
|
||||
// introduce whitespace from `NoDelim`.
|
||||
| TokenTree::Token(_, token::Whitespace)
|
||||
// The pretty printer can turn `$crate` into `::crate_name`
|
||||
| TokenTree::Token(_, token::ModSep) => false,
|
||||
_ => true
|
||||
if let TokenTree::Token(token) = tree {
|
||||
if let
|
||||
// The pretty printer tends to add trailing commas to
|
||||
// everything, and in particular, after struct fields.
|
||||
| token::Comma
|
||||
// The pretty printer emits `NoDelim` as whitespace.
|
||||
| token::OpenDelim(DelimToken::NoDelim)
|
||||
| token::CloseDelim(DelimToken::NoDelim)
|
||||
// The pretty printer collapses many semicolons into one.
|
||||
| token::Semi
|
||||
// The pretty printer collapses whitespace arbitrarily and can
|
||||
// introduce whitespace from `NoDelim`.
|
||||
| token::Whitespace
|
||||
// The pretty printer can turn `$crate` into `::crate_name`
|
||||
| token::ModSep = token.kind {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
let mut t1 = self.trees().filter(semantic_tree);
|
||||
@ -430,13 +427,13 @@ impl TokenStreamBuilder {
|
||||
pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
|
||||
let stream = stream.into();
|
||||
let last_tree_if_joint = self.0.last().and_then(TokenStream::last_tree_if_joint);
|
||||
if let Some(TokenTree::Token(last_span, last_tok)) = last_tree_if_joint {
|
||||
if let Some((TokenTree::Token(span, tok), is_joint)) = stream.first_tree_and_joint() {
|
||||
if let Some(glued_tok) = last_tok.glue(tok) {
|
||||
if let Some(TokenTree::Token(last_token)) = last_tree_if_joint {
|
||||
if let Some((TokenTree::Token(token), is_joint)) = stream.first_tree_and_joint() {
|
||||
if let Some(glued_tok) = last_token.kind.glue(token.kind) {
|
||||
let last_stream = self.0.pop().unwrap();
|
||||
self.push_all_but_last_tree(&last_stream);
|
||||
let glued_span = last_span.to(span);
|
||||
let glued_tt = TokenTree::Token(glued_span, glued_tok);
|
||||
let glued_span = last_token.span.to(token.span);
|
||||
let glued_tt = TokenTree::token(glued_span, glued_tok);
|
||||
let glued_tokenstream = TokenStream::new(vec![(glued_tt, is_joint)]);
|
||||
self.0.push(glued_tokenstream);
|
||||
self.push_all_but_first_tree(&stream);
|
||||
@ -663,7 +660,7 @@ mod tests {
|
||||
with_default_globals(|| {
|
||||
let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
|
||||
let test1: TokenStream =
|
||||
TokenTree::Token(sp(0, 1), token::Ident(Ident::from_str("a"), false)).into();
|
||||
TokenTree::token(sp(0, 1), token::Ident(Ident::from_str("a"), false)).into();
|
||||
let test2 = string_to_ts("foo(bar::baz)");
|
||||
|
||||
assert_eq!(test0.is_empty(), true);
|
||||
@ -676,9 +673,9 @@ mod tests {
|
||||
fn test_dotdotdot() {
|
||||
with_default_globals(|| {
|
||||
let mut builder = TokenStreamBuilder::new();
|
||||
builder.push(TokenTree::Token(sp(0, 1), token::Dot).joint());
|
||||
builder.push(TokenTree::Token(sp(1, 2), token::Dot).joint());
|
||||
builder.push(TokenTree::Token(sp(2, 3), token::Dot));
|
||||
builder.push(TokenTree::token(sp(0, 1), token::Dot).joint());
|
||||
builder.push(TokenTree::token(sp(1, 2), token::Dot).joint());
|
||||
builder.push(TokenTree::token(sp(2, 3), token::Dot));
|
||||
let stream = builder.build();
|
||||
assert!(stream.eq_unspanned(&string_to_ts("...")));
|
||||
assert_eq!(stream.trees().count(), 1);
|
||||
|
@ -855,7 +855,7 @@ pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute)
|
||||
|
||||
pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) {
|
||||
match tt {
|
||||
TokenTree::Token(_, tok) => visitor.visit_token(tok),
|
||||
TokenTree::Token(token) => visitor.visit_token(token.kind),
|
||||
TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts),
|
||||
}
|
||||
}
|
||||
|
@ -9,7 +9,8 @@ use errors::DiagnosticBuilder;
|
||||
use syntax::ast;
|
||||
use syntax::ext::base::{self, *};
|
||||
use syntax::feature_gate;
|
||||
use syntax::parse::{self, token};
|
||||
use syntax::parse;
|
||||
use syntax::parse::token::{self, Token};
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::{kw, sym, Symbol};
|
||||
use syntax::ast::AsmDialect;
|
||||
@ -86,8 +87,8 @@ fn parse_inline_asm<'a>(
|
||||
let first_colon = tts.iter()
|
||||
.position(|tt| {
|
||||
match *tt {
|
||||
tokenstream::TokenTree::Token(_, token::Colon) |
|
||||
tokenstream::TokenTree::Token(_, token::ModSep) => true,
|
||||
tokenstream::TokenTree::Token(Token { kind: token::Colon, .. }) |
|
||||
tokenstream::TokenTree::Token(Token { kind: token::ModSep, .. }) => true,
|
||||
_ => false,
|
||||
}
|
||||
})
|
||||
|
@ -29,7 +29,7 @@ pub fn expand_assert<'cx>(
|
||||
let panic_call = Mac_ {
|
||||
path: Path::from_ident(Ident::new(sym::panic, sp)),
|
||||
tts: custom_message.unwrap_or_else(|| {
|
||||
TokenStream::from(TokenTree::Token(
|
||||
TokenStream::from(TokenTree::token(
|
||||
DUMMY_SP,
|
||||
TokenKind::lit(token::Str, Symbol::intern(&format!(
|
||||
"assertion failed: {}",
|
||||
|
@ -3,7 +3,7 @@ use rustc_data_structures::thin_vec::ThinVec;
|
||||
use syntax::ast;
|
||||
use syntax::ext::base::{self, *};
|
||||
use syntax::feature_gate;
|
||||
use syntax::parse::token;
|
||||
use syntax::parse::token::{self, Token};
|
||||
use syntax::ptr::P;
|
||||
use syntax_pos::Span;
|
||||
use syntax_pos::symbol::{Symbol, sym};
|
||||
@ -30,7 +30,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||
for (i, e) in tts.iter().enumerate() {
|
||||
if i & 1 == 1 {
|
||||
match *e {
|
||||
TokenTree::Token(_, token::Comma) => {}
|
||||
TokenTree::Token(Token { kind: token::Comma, .. }) => {}
|
||||
_ => {
|
||||
cx.span_err(sp, "concat_idents! expecting comma.");
|
||||
return DummyResult::any(sp);
|
||||
@ -38,7 +38,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||
}
|
||||
} else {
|
||||
match *e {
|
||||
TokenTree::Token(_, token::Ident(ident, _)) =>
|
||||
TokenTree::Token(Token { kind: token::Ident(ident, _), .. }) =>
|
||||
res_str.push_str(&ident.as_str()),
|
||||
_ => {
|
||||
cx.span_err(sp, "concat_idents! requires ident args.");
|
||||
|
@ -69,7 +69,7 @@ impl MultiItemModifier for ProcMacroDerive {
|
||||
MarkAttrs(&self.attrs).visit_item(&item);
|
||||
|
||||
let token = token::Interpolated(Lrc::new(token::NtItem(item)));
|
||||
let input = tokenstream::TokenTree::Token(DUMMY_SP, token).into();
|
||||
let input = tokenstream::TokenTree::token(DUMMY_SP, token).into();
|
||||
|
||||
let server = proc_macro_server::Rustc::new(ecx);
|
||||
let stream = match self.client.run(&EXEC_STRATEGY, server, input) {
|
||||
|
@ -55,7 +55,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
||||
use syntax::parse::token::*;
|
||||
|
||||
let joint = is_joint == Joint;
|
||||
let (span, token) = match tree {
|
||||
let Token { kind, span } = match tree {
|
||||
tokenstream::TokenTree::Delimited(span, delim, tts) => {
|
||||
let delimiter = Delimiter::from_internal(delim);
|
||||
return TokenTree::Group(Group {
|
||||
@ -64,7 +64,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
||||
span,
|
||||
});
|
||||
}
|
||||
tokenstream::TokenTree::Token(span, token) => (span, token),
|
||||
tokenstream::TokenTree::Token(token) => token,
|
||||
};
|
||||
|
||||
macro_rules! tt {
|
||||
@ -93,7 +93,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
||||
}};
|
||||
}
|
||||
|
||||
match token {
|
||||
match kind {
|
||||
Eq => op!('='),
|
||||
Lt => op!('<'),
|
||||
Le => op!('<', '='),
|
||||
@ -164,7 +164,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
||||
TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
|
||||
]
|
||||
.into_iter()
|
||||
.map(|token| tokenstream::TokenTree::Token(span, token))
|
||||
.map(|kind| tokenstream::TokenTree::token(span, kind))
|
||||
.collect();
|
||||
stack.push(TokenTree::Group(Group {
|
||||
delimiter: Delimiter::Bracket,
|
||||
@ -212,7 +212,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
|
||||
}
|
||||
TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
|
||||
let token = Ident(ast::Ident::new(sym, span), is_raw);
|
||||
return tokenstream::TokenTree::Token(span, token).into();
|
||||
return tokenstream::TokenTree::token(span, token).into();
|
||||
}
|
||||
TokenTree::Literal(self::Literal {
|
||||
lit: token::Lit { kind: token::Integer, symbol, suffix },
|
||||
@ -221,8 +221,8 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
|
||||
let minus = BinOp(BinOpToken::Minus);
|
||||
let symbol = Symbol::intern(&symbol.as_str()[1..]);
|
||||
let integer = TokenKind::lit(token::Integer, symbol, suffix);
|
||||
let a = tokenstream::TokenTree::Token(span, minus);
|
||||
let b = tokenstream::TokenTree::Token(span, integer);
|
||||
let a = tokenstream::TokenTree::token(span, minus);
|
||||
let b = tokenstream::TokenTree::token(span, integer);
|
||||
return vec![a, b].into_iter().collect();
|
||||
}
|
||||
TokenTree::Literal(self::Literal {
|
||||
@ -232,16 +232,16 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
|
||||
let minus = BinOp(BinOpToken::Minus);
|
||||
let symbol = Symbol::intern(&symbol.as_str()[1..]);
|
||||
let float = TokenKind::lit(token::Float, symbol, suffix);
|
||||
let a = tokenstream::TokenTree::Token(span, minus);
|
||||
let b = tokenstream::TokenTree::Token(span, float);
|
||||
let a = tokenstream::TokenTree::token(span, minus);
|
||||
let b = tokenstream::TokenTree::token(span, float);
|
||||
return vec![a, b].into_iter().collect();
|
||||
}
|
||||
TokenTree::Literal(self::Literal { lit, span }) => {
|
||||
return tokenstream::TokenTree::Token(span, Literal(lit)).into()
|
||||
return tokenstream::TokenTree::token(span, Literal(lit)).into()
|
||||
}
|
||||
};
|
||||
|
||||
let token = match ch {
|
||||
let kind = match ch {
|
||||
'=' => Eq,
|
||||
'<' => Lt,
|
||||
'>' => Gt,
|
||||
@ -267,7 +267,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let tree = tokenstream::TokenTree::Token(span, token);
|
||||
let tree = tokenstream::TokenTree::token(span, kind);
|
||||
TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
|
||||
}
|
||||
}
|
||||
|
@ -17,10 +17,10 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt<'_>,
|
||||
}
|
||||
|
||||
match (tt.len(), tt.first()) {
|
||||
(1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::True) => {
|
||||
(1, Some(TokenTree::Token(token))) if token.is_keyword(kw::True) => {
|
||||
cx.set_trace_macros(true);
|
||||
}
|
||||
(1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::False) => {
|
||||
(1, Some(TokenTree::Token(token))) if token.is_keyword(kw::False) => {
|
||||
cx.set_trace_macros(false);
|
||||
}
|
||||
_ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),
|
||||
|
Loading…
Reference in New Issue
Block a user