Rollup merge of #39419 - jseyfried:simplify_tokentree, r=nrc
Simplify `TokenTree` and fix `macro_rules!` bugs This PR - fixes #39390, fixes #39403, and fixes #39404 (each is a [breaking-change], see issues for examples), - fixes #39889, - simplifies and optimizes macro invocation parsing, - cleans up `ext::tt::transcribe`, - removes `tokenstream::TokenTree::Sequence` and `Token::MatchNt`, - instead, adds a new type `ext::tt::quoted::TokenTree` for use by `macro_rules!` (`ext::tt`) - removes `parser.quote_depth` and `parser.parsing_token_tree`, and - removes `quote_matcher!`. - Instead, use `quote_tokens!` and `ext::tt::quoted::parse` the result with `expect_matchers=true`. - I found no outside uses of `quote_matcher!` when searching Rust code on Github. r? @nrc
This commit is contained in:
commit
4ba49ab39f
|
@ -173,8 +173,7 @@ impl FromStr for TokenStream {
|
|||
__internal::with_parse_sess(|sess| {
|
||||
let src = src.to_string();
|
||||
let name = "<proc-macro source code>".to_string();
|
||||
let tts = try!(parse::parse_tts_from_source_str(name, src, sess)
|
||||
.map_err(parse_to_lex_err));
|
||||
let tts = parse::parse_tts_from_source_str(name, src, sess);
|
||||
|
||||
Ok(__internal::token_stream_wrap(tts.into_iter().collect()))
|
||||
})
|
||||
|
|
|
@ -119,7 +119,6 @@ impl Quote for TokenTree {
|
|||
::syntax::tokenstream::TokenTree::Delimited(::syntax::ext::quote::rt::DUMMY_SP,
|
||||
(quote delimited))
|
||||
},
|
||||
_ => panic!("unexpected `TokenTree::Sequence` in `qquote`"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -236,6 +236,12 @@ declare_lint! {
|
|||
"detects use of struct constructors that would be invisible with new visibility rules"
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
pub MISSING_FRAGMENT_SPECIFIER,
|
||||
Warn,
|
||||
"detects missing fragment specifiers in unused `macro_rules!` patterns"
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
pub DEPRECATED,
|
||||
Warn,
|
||||
|
@ -286,6 +292,7 @@ impl LintPass for HardwiredLints {
|
|||
LEGACY_DIRECTORY_OWNERSHIP,
|
||||
LEGACY_IMPORTS,
|
||||
LEGACY_CONSTRUCTOR_VISIBILITY,
|
||||
MISSING_FRAGMENT_SPECIFIER,
|
||||
DEPRECATED
|
||||
)
|
||||
}
|
||||
|
|
|
@ -688,6 +688,14 @@ pub fn phase_2_configure_and_expand<F>(sess: &Session,
|
|||
|
||||
let krate = ecx.monotonic_expander().expand_crate(krate);
|
||||
|
||||
let mut missing_fragment_specifiers: Vec<_> =
|
||||
ecx.parse_sess.missing_fragment_specifiers.borrow().iter().cloned().collect();
|
||||
missing_fragment_specifiers.sort();
|
||||
for span in missing_fragment_specifiers {
|
||||
let lint = lint::builtin::MISSING_FRAGMENT_SPECIFIER;
|
||||
let msg = "missing fragment specifier".to_string();
|
||||
sess.add_lint(lint, ast::CRATE_NODE_ID, span, msg);
|
||||
}
|
||||
if ecx.parse_sess.span_diagnostic.err_count() - ecx.resolve_err_count > err_count {
|
||||
ecx.parse_sess.span_diagnostic.abort_if_errors();
|
||||
}
|
||||
|
|
|
@ -1044,26 +1044,6 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
|
|||
self.hash_token_tree(sub_tt);
|
||||
}
|
||||
}
|
||||
tokenstream::TokenTree::Sequence(span, ref sequence_repetition) => {
|
||||
hash_span!(self, span);
|
||||
let tokenstream::SequenceRepetition {
|
||||
ref tts,
|
||||
ref separator,
|
||||
op,
|
||||
num_captures,
|
||||
} = **sequence_repetition;
|
||||
|
||||
tts.len().hash(self.st);
|
||||
for sub_tt in tts {
|
||||
self.hash_token_tree(sub_tt);
|
||||
}
|
||||
self.hash_discriminant(separator);
|
||||
if let Some(ref separator) = *separator {
|
||||
self.hash_token(separator, span);
|
||||
}
|
||||
op.hash(self.st);
|
||||
num_captures.hash(self.st);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1129,10 +1109,6 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
|
|||
token::Token::Ident(ident) |
|
||||
token::Token::Lifetime(ident) |
|
||||
token::Token::SubstNt(ident) => ident.name.as_str().hash(self.st),
|
||||
token::Token::MatchNt(ident1, ident2) => {
|
||||
ident1.name.as_str().hash(self.st);
|
||||
ident2.name.as_str().hash(self.st);
|
||||
}
|
||||
|
||||
token::Token::Interpolated(ref non_terminal) => {
|
||||
// FIXME(mw): This could be implemented properly. It's just a
|
||||
|
|
|
@ -247,6 +247,10 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) {
|
|||
id: LintId::of(LEGACY_CONSTRUCTOR_VISIBILITY),
|
||||
reference: "issue #39207 <https://github.com/rust-lang/rust/issues/39207>",
|
||||
},
|
||||
FutureIncompatibleInfo {
|
||||
id: LintId::of(MISSING_FRAGMENT_SPECIFIER),
|
||||
reference: "issue #40107 <https://github.com/rust-lang/rust/issues/40107>",
|
||||
},
|
||||
]);
|
||||
|
||||
// Register renamed and removed lints
|
||||
|
|
|
@ -287,7 +287,7 @@ impl<'a> SpanUtils<'a> {
|
|||
let mut toks = toks.parse_all_token_trees().unwrap().into_iter();
|
||||
let mut prev = toks.next().unwrap();
|
||||
|
||||
let first_span = prev.get_span();
|
||||
let first_span = prev.span();
|
||||
let mut angle_count = 0;
|
||||
for tok in toks {
|
||||
if let TokenTree::Token(_, ref tok) = prev {
|
||||
|
@ -305,10 +305,10 @@ impl<'a> SpanUtils<'a> {
|
|||
continue;
|
||||
}
|
||||
if let TokenTree::Token(_, token::Semi) = tok {
|
||||
return self.snippet(mk_sp(first_span.lo, prev.get_span().hi));
|
||||
return self.snippet(mk_sp(first_span.lo, prev.span().hi));
|
||||
} else if let TokenTree::Delimited(_, ref d) = tok {
|
||||
if d.delim == token::Brace {
|
||||
return self.snippet(mk_sp(first_span.lo, prev.get_span().hi));
|
||||
return self.snippet(mk_sp(first_span.lo, prev.span().hi));
|
||||
}
|
||||
}
|
||||
prev = tok;
|
||||
|
|
|
@ -315,7 +315,7 @@ impl<'a> Classifier<'a> {
|
|||
token::Lifetime(..) => Class::Lifetime,
|
||||
|
||||
token::Underscore | token::Eof | token::Interpolated(..) |
|
||||
token::MatchNt(..) | token::SubstNt(..) | token::Tilde | token::At => Class::None,
|
||||
token::SubstNt(..) | token::Tilde | token::At => Class::None,
|
||||
};
|
||||
|
||||
// Anything that didn't return above is the simple case where we the
|
||||
|
|
|
@ -211,7 +211,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
|
|||
};
|
||||
|
||||
// FIXME(jseyfried) merge with `self.visit_macro()`
|
||||
let matchers = def.body.chunks(4).map(|arm| arm[0].get_span()).collect();
|
||||
let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect();
|
||||
om.macros.push(Macro {
|
||||
def_id: def_id,
|
||||
attrs: def.attrs.clone().into(),
|
||||
|
@ -521,7 +521,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
|
|||
// convert each exported_macro into a doc item
|
||||
fn visit_local_macro(&self, def: &hir::MacroDef) -> Macro {
|
||||
// Extract the spans of all matchers. They represent the "interface" of the macro.
|
||||
let matchers = def.body.chunks(4).map(|arm| arm[0].get_span()).collect();
|
||||
let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect();
|
||||
|
||||
Macro {
|
||||
def_id: self.cx.tcx.hir.local_def_id(def.id),
|
||||
|
|
|
@ -14,10 +14,9 @@ use ext::base::ExtCtxt;
|
|||
use ext::base;
|
||||
use ext::build::AstBuilder;
|
||||
use parse::parser::{Parser, PathStyle};
|
||||
use parse::token::*;
|
||||
use parse::token;
|
||||
use ptr::P;
|
||||
use tokenstream::{self, TokenTree};
|
||||
use tokenstream::TokenTree;
|
||||
|
||||
|
||||
/// Quasiquoting works via token trees.
|
||||
|
@ -356,14 +355,35 @@ pub mod rt {
|
|||
}
|
||||
|
||||
fn parse_tts(&self, s: String) -> Vec<TokenTree> {
|
||||
panictry!(parse::parse_tts_from_source_str(
|
||||
"<quote expansion>".to_string(),
|
||||
s,
|
||||
self.parse_sess()))
|
||||
parse::parse_tts_from_source_str("<quote expansion>".to_string(), s, self.parse_sess())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Replaces `Token::OpenDelim .. Token::CloseDelim` with `TokenTree::Delimited(..)`.
|
||||
pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
|
||||
use std::rc::Rc;
|
||||
use tokenstream::Delimited;
|
||||
|
||||
let mut results = Vec::new();
|
||||
let mut result = Vec::new();
|
||||
for tree in tts {
|
||||
match tree {
|
||||
TokenTree::Token(_, token::OpenDelim(..)) => {
|
||||
results.push(::std::mem::replace(&mut result, Vec::new()));
|
||||
}
|
||||
TokenTree::Token(span, token::CloseDelim(delim)) => {
|
||||
let tree =
|
||||
TokenTree::Delimited(span, Rc::new(Delimited { delim: delim, tts: result }));
|
||||
result = results.pop().unwrap();
|
||||
result.push(tree);
|
||||
}
|
||||
tree @ _ => result.push(tree),
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
// These panicking parsing functions are used by the quote_*!() syntax extensions,
|
||||
// but shouldn't be used otherwise.
|
||||
pub fn parse_expr_panic(parser: &mut Parser) -> P<Expr> {
|
||||
|
@ -510,20 +530,6 @@ pub fn expand_quote_path(cx: &mut ExtCtxt,
|
|||
base::MacEager::expr(expanded)
|
||||
}
|
||||
|
||||
pub fn expand_quote_matcher(cx: &mut ExtCtxt,
|
||||
sp: Span,
|
||||
tts: &[TokenTree])
|
||||
-> Box<base::MacResult+'static> {
|
||||
let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
|
||||
let mut vector = mk_stmts_let(cx, sp);
|
||||
vector.extend(statements_mk_tts(cx, &tts[..], true));
|
||||
vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
|
||||
let block = cx.expr_block(cx.block(sp, vector));
|
||||
|
||||
let expanded = expand_wrapper(cx, sp, cx_expr, block, &[&["syntax", "ext", "quote", "rt"]]);
|
||||
base::MacEager::expr(expanded)
|
||||
}
|
||||
|
||||
fn ids_ext(strs: Vec<String>) -> Vec<ast::Ident> {
|
||||
strs.iter().map(|s| ast::Ident::from_str(s)).collect()
|
||||
}
|
||||
|
@ -669,12 +675,6 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
|
|||
vec![mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))]);
|
||||
}
|
||||
|
||||
token::MatchNt(name, kind) => {
|
||||
return cx.expr_call(sp,
|
||||
mk_token_path(cx, sp, "MatchNt"),
|
||||
vec![mk_ident(cx, sp, name), mk_ident(cx, sp, kind)]);
|
||||
}
|
||||
|
||||
token::Interpolated(_) => panic!("quote! with interpolated token"),
|
||||
|
||||
_ => ()
|
||||
|
@ -712,9 +712,9 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
|
|||
mk_token_path(cx, sp, name)
|
||||
}
|
||||
|
||||
fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stmt> {
|
||||
fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt> {
|
||||
match *tt {
|
||||
TokenTree::Token(sp, SubstNt(ident)) => {
|
||||
TokenTree::Token(sp, token::Ident(ident)) if quoted => {
|
||||
// tt.extend($ident.to_tokens(ext_cx))
|
||||
|
||||
let e_to_toks =
|
||||
|
@ -733,13 +733,6 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm
|
|||
|
||||
vec![cx.stmt_expr(e_push)]
|
||||
}
|
||||
ref tt @ TokenTree::Token(_, MatchNt(..)) if !matcher => {
|
||||
let mut seq = vec![];
|
||||
for i in 0..tt.len() {
|
||||
seq.push(tt.get_tt(i));
|
||||
}
|
||||
statements_mk_tts(cx, &seq[..], matcher)
|
||||
}
|
||||
TokenTree::Token(sp, ref tok) => {
|
||||
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
|
||||
let e_tok = cx.expr_call(sp,
|
||||
|
@ -753,77 +746,17 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm
|
|||
vec![cx.stmt_expr(e_push)]
|
||||
},
|
||||
TokenTree::Delimited(span, ref delimed) => {
|
||||
statements_mk_tt(cx, &delimed.open_tt(span), matcher).into_iter()
|
||||
.chain(delimed.tts.iter()
|
||||
.flat_map(|tt| statements_mk_tt(cx, tt, matcher)))
|
||||
.chain(statements_mk_tt(cx, &delimed.close_tt(span), matcher))
|
||||
.collect()
|
||||
},
|
||||
TokenTree::Sequence(sp, ref seq) => {
|
||||
if !matcher {
|
||||
panic!("TokenTree::Sequence in quote!");
|
||||
}
|
||||
|
||||
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
|
||||
|
||||
let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp));
|
||||
let mut tts_stmts = vec![stmt_let_tt];
|
||||
tts_stmts.extend(statements_mk_tts(cx, &seq.tts[..], matcher));
|
||||
tts_stmts.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
|
||||
let e_tts = cx.expr_block(cx.block(sp, tts_stmts));
|
||||
|
||||
let e_separator = match seq.separator {
|
||||
Some(ref sep) => cx.expr_some(sp, expr_mk_token(cx, sp, sep)),
|
||||
None => cx.expr_none(sp),
|
||||
};
|
||||
let e_op = match seq.op {
|
||||
tokenstream::KleeneOp::ZeroOrMore => "ZeroOrMore",
|
||||
tokenstream::KleeneOp::OneOrMore => "OneOrMore",
|
||||
};
|
||||
let e_op_idents = vec![
|
||||
id_ext("syntax"),
|
||||
id_ext("tokenstream"),
|
||||
id_ext("KleeneOp"),
|
||||
id_ext(e_op),
|
||||
];
|
||||
let e_op = cx.expr_path(cx.path_global(sp, e_op_idents));
|
||||
let fields = vec![cx.field_imm(sp, id_ext("tts"), e_tts),
|
||||
cx.field_imm(sp, id_ext("separator"), e_separator),
|
||||
cx.field_imm(sp, id_ext("op"), e_op),
|
||||
cx.field_imm(sp, id_ext("num_captures"),
|
||||
cx.expr_usize(sp, seq.num_captures))];
|
||||
let seq_path = vec![id_ext("syntax"),
|
||||
id_ext("tokenstream"),
|
||||
id_ext("SequenceRepetition")];
|
||||
let e_seq_struct = cx.expr_struct(sp, cx.path_global(sp, seq_path), fields);
|
||||
let e_rc_new = cx.expr_call_global(sp, vec![id_ext("std"),
|
||||
id_ext("rc"),
|
||||
id_ext("Rc"),
|
||||
id_ext("new")],
|
||||
vec![e_seq_struct]);
|
||||
let e_tok = cx.expr_call(sp,
|
||||
mk_tt_path(cx, sp, "Sequence"),
|
||||
vec![e_sp, e_rc_new]);
|
||||
let e_push =
|
||||
cx.expr_method_call(sp,
|
||||
cx.expr_ident(sp, id_ext("tt")),
|
||||
id_ext("push"),
|
||||
vec![e_tok]);
|
||||
vec![cx.stmt_expr(e_push)]
|
||||
let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span), false);
|
||||
stmts.extend(statements_mk_tts(cx, &delimed.tts));
|
||||
stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span), false));
|
||||
stmts
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[TokenTree])
|
||||
-> (P<ast::Expr>, Vec<TokenTree>) {
|
||||
// NB: It appears that the main parser loses its mind if we consider
|
||||
// $foo as a SubstNt during the main parse, so we have to re-parse
|
||||
// under quote_depth > 0. This is silly and should go away; the _guess_ is
|
||||
// it has to do with transition away from supporting old-style macros, so
|
||||
// try removing it when enough of them are gone.
|
||||
|
||||
let mut p = cx.new_parser_from_tts(tts);
|
||||
p.quote_depth += 1;
|
||||
|
||||
let cx_expr = panictry!(p.parse_expr());
|
||||
if !p.eat(&token::Comma) {
|
||||
|
@ -877,24 +810,31 @@ fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec<ast::Stmt> {
|
|||
vec![stmt_let_sp, stmt_let_tt]
|
||||
}
|
||||
|
||||
fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree], matcher: bool) -> Vec<ast::Stmt> {
|
||||
fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree]) -> Vec<ast::Stmt> {
|
||||
let mut ss = Vec::new();
|
||||
let mut quoted = false;
|
||||
for tt in tts {
|
||||
ss.extend(statements_mk_tt(cx, tt, matcher));
|
||||
quoted = match *tt {
|
||||
TokenTree::Token(_, token::Dollar) if !quoted => true,
|
||||
_ => {
|
||||
ss.extend(statements_mk_tt(cx, tt, quoted));
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
ss
|
||||
}
|
||||
|
||||
fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree])
|
||||
-> (P<ast::Expr>, P<ast::Expr>) {
|
||||
fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree]) -> (P<ast::Expr>, P<ast::Expr>) {
|
||||
let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
|
||||
|
||||
let mut vector = mk_stmts_let(cx, sp);
|
||||
vector.extend(statements_mk_tts(cx, &tts[..], false));
|
||||
vector.extend(statements_mk_tts(cx, &tts[..]));
|
||||
vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
|
||||
let block = cx.expr_block(cx.block(sp, vector));
|
||||
let unflatten = vec![id_ext("syntax"), id_ext("ext"), id_ext("quote"), id_ext("unflatten")];
|
||||
|
||||
(cx_expr, block)
|
||||
(cx_expr, cx.expr_call_global(sp, unflatten, vec![block]))
|
||||
}
|
||||
|
||||
fn expand_wrapper(cx: &ExtCtxt,
|
||||
|
|
|
@ -82,13 +82,13 @@ use ast::Ident;
|
|||
use syntax_pos::{self, BytePos, mk_sp, Span};
|
||||
use codemap::Spanned;
|
||||
use errors::FatalError;
|
||||
use ext::tt::quoted;
|
||||
use parse::{Directory, ParseSess};
|
||||
use parse::parser::{PathStyle, Parser};
|
||||
use parse::token::{DocComment, MatchNt, SubstNt};
|
||||
use parse::token::{Token, Nonterminal};
|
||||
use parse::token;
|
||||
use parse::token::{self, DocComment, Token, Nonterminal};
|
||||
use print::pprust;
|
||||
use tokenstream::{self, TokenTree};
|
||||
use symbol::keywords;
|
||||
use tokenstream::TokenTree;
|
||||
use util::small_vector::SmallVector;
|
||||
|
||||
use std::mem;
|
||||
|
@ -101,8 +101,8 @@ use std::collections::hash_map::Entry::{Vacant, Occupied};
|
|||
|
||||
#[derive(Clone)]
|
||||
enum TokenTreeOrTokenTreeVec {
|
||||
Tt(tokenstream::TokenTree),
|
||||
TtSeq(Vec<tokenstream::TokenTree>),
|
||||
Tt(quoted::TokenTree),
|
||||
TtSeq(Vec<quoted::TokenTree>),
|
||||
}
|
||||
|
||||
impl TokenTreeOrTokenTreeVec {
|
||||
|
@ -113,7 +113,7 @@ impl TokenTreeOrTokenTreeVec {
|
|||
}
|
||||
}
|
||||
|
||||
fn get_tt(&self, index: usize) -> TokenTree {
|
||||
fn get_tt(&self, index: usize) -> quoted::TokenTree {
|
||||
match *self {
|
||||
TtSeq(ref v) => v[index].clone(),
|
||||
Tt(ref tt) => tt.get_tt(index),
|
||||
|
@ -144,7 +144,9 @@ struct MatcherPos {
|
|||
|
||||
pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>;
|
||||
|
||||
pub fn count_names(ms: &[TokenTree]) -> usize {
|
||||
pub fn count_names(ms: &[quoted::TokenTree]) -> usize {
|
||||
use self::quoted::TokenTree;
|
||||
|
||||
ms.iter().fold(0, |count, elt| {
|
||||
count + match *elt {
|
||||
TokenTree::Sequence(_, ref seq) => {
|
||||
|
@ -153,7 +155,7 @@ pub fn count_names(ms: &[TokenTree]) -> usize {
|
|||
TokenTree::Delimited(_, ref delim) => {
|
||||
count_names(&delim.tts)
|
||||
}
|
||||
TokenTree::Token(_, MatchNt(..)) => {
|
||||
TokenTree::MetaVarDecl(..) => {
|
||||
1
|
||||
}
|
||||
TokenTree::Token(..) => 0,
|
||||
|
@ -161,7 +163,7 @@ pub fn count_names(ms: &[TokenTree]) -> usize {
|
|||
})
|
||||
}
|
||||
|
||||
fn initial_matcher_pos(ms: Vec<TokenTree>, lo: BytePos) -> Box<MatcherPos> {
|
||||
fn initial_matcher_pos(ms: Vec<quoted::TokenTree>, lo: BytePos) -> Box<MatcherPos> {
|
||||
let match_idx_hi = count_names(&ms[..]);
|
||||
let matches = create_matches(match_idx_hi);
|
||||
Box::new(MatcherPos {
|
||||
|
@ -200,22 +202,30 @@ pub enum NamedMatch {
|
|||
MatchedNonterminal(Rc<Nonterminal>)
|
||||
}
|
||||
|
||||
fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(ms: &[TokenTree], mut res: I) -> NamedParseResult {
|
||||
fn n_rec<I: Iterator<Item=Rc<NamedMatch>>>(m: &TokenTree, mut res: &mut I,
|
||||
fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(sess: &ParseSess, ms: &[quoted::TokenTree], mut res: I)
|
||||
-> NamedParseResult {
|
||||
use self::quoted::TokenTree;
|
||||
|
||||
fn n_rec<I: Iterator<Item=Rc<NamedMatch>>>(sess: &ParseSess, m: &TokenTree, mut res: &mut I,
|
||||
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>)
|
||||
-> Result<(), (syntax_pos::Span, String)> {
|
||||
match *m {
|
||||
TokenTree::Sequence(_, ref seq) => {
|
||||
for next_m in &seq.tts {
|
||||
n_rec(next_m, res.by_ref(), ret_val)?
|
||||
n_rec(sess, next_m, res.by_ref(), ret_val)?
|
||||
}
|
||||
}
|
||||
TokenTree::Delimited(_, ref delim) => {
|
||||
for next_m in &delim.tts {
|
||||
n_rec(next_m, res.by_ref(), ret_val)?;
|
||||
n_rec(sess, next_m, res.by_ref(), ret_val)?;
|
||||
}
|
||||
}
|
||||
TokenTree::Token(sp, MatchNt(bind_name, _)) => {
|
||||
TokenTree::MetaVarDecl(span, _, id) if id.name == keywords::Invalid.name() => {
|
||||
if sess.missing_fragment_specifiers.borrow_mut().remove(&span) {
|
||||
return Err((span, "missing fragment specifier".to_string()));
|
||||
}
|
||||
}
|
||||
TokenTree::MetaVarDecl(sp, bind_name, _) => {
|
||||
match ret_val.entry(bind_name) {
|
||||
Vacant(spot) => {
|
||||
spot.insert(res.next().unwrap());
|
||||
|
@ -225,9 +235,6 @@ fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(ms: &[TokenTree], mut res: I) -> Na
|
|||
}
|
||||
}
|
||||
}
|
||||
TokenTree::Token(sp, SubstNt(..)) => {
|
||||
return Err((sp, "missing fragment specifier".to_string()))
|
||||
}
|
||||
TokenTree::Token(..) => (),
|
||||
}
|
||||
|
||||
|
@ -236,7 +243,7 @@ fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(ms: &[TokenTree], mut res: I) -> Na
|
|||
|
||||
let mut ret_val = HashMap::new();
|
||||
for m in ms {
|
||||
match n_rec(m, res.by_ref(), &mut ret_val) {
|
||||
match n_rec(sess, m, res.by_ref(), &mut ret_val) {
|
||||
Ok(_) => {},
|
||||
Err((sp, msg)) => return Error(sp, msg),
|
||||
}
|
||||
|
@ -276,11 +283,15 @@ fn create_matches(len: usize) -> Vec<Vec<Rc<NamedMatch>>> {
|
|||
(0..len).into_iter().map(|_| Vec::new()).collect()
|
||||
}
|
||||
|
||||
fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
|
||||
fn inner_parse_loop(sess: &ParseSess,
|
||||
cur_eis: &mut SmallVector<Box<MatcherPos>>,
|
||||
next_eis: &mut Vec<Box<MatcherPos>>,
|
||||
eof_eis: &mut SmallVector<Box<MatcherPos>>,
|
||||
bb_eis: &mut SmallVector<Box<MatcherPos>>,
|
||||
token: &Token, span: &syntax_pos::Span) -> ParseResult<()> {
|
||||
token: &Token,
|
||||
span: &syntax_pos::Span) -> ParseResult<()> {
|
||||
use self::quoted::TokenTree;
|
||||
|
||||
while let Some(mut ei) = cur_eis.pop() {
|
||||
// When unzipped trees end, remove them
|
||||
while ei.idx >= ei.top_elts.len() {
|
||||
|
@ -346,7 +357,7 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
|
|||
match ei.top_elts.get_tt(idx) {
|
||||
/* need to descend into sequence */
|
||||
TokenTree::Sequence(sp, seq) => {
|
||||
if seq.op == tokenstream::KleeneOp::ZeroOrMore {
|
||||
if seq.op == quoted::KleeneOp::ZeroOrMore {
|
||||
// Examine the case where there are 0 matches of this sequence
|
||||
let mut new_ei = ei.clone();
|
||||
new_ei.match_cur += seq.num_captures;
|
||||
|
@ -372,7 +383,12 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
|
|||
top_elts: Tt(TokenTree::Sequence(sp, seq)),
|
||||
}));
|
||||
}
|
||||
TokenTree::Token(_, MatchNt(..)) => {
|
||||
TokenTree::MetaVarDecl(span, _, id) if id.name == keywords::Invalid.name() => {
|
||||
if sess.missing_fragment_specifiers.borrow_mut().remove(&span) {
|
||||
return Error(span, "missing fragment specifier".to_string());
|
||||
}
|
||||
}
|
||||
TokenTree::MetaVarDecl(..) => {
|
||||
// Built-in nonterminals never start with these tokens,
|
||||
// so we can eliminate them from consideration.
|
||||
match *token {
|
||||
|
@ -380,9 +396,6 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
|
|||
_ => bb_eis.push(ei),
|
||||
}
|
||||
}
|
||||
TokenTree::Token(sp, SubstNt(..)) => {
|
||||
return Error(sp, "missing fragment specifier".to_string())
|
||||
}
|
||||
seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => {
|
||||
let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq));
|
||||
let idx = ei.idx;
|
||||
|
@ -406,8 +419,13 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
|
|||
Success(())
|
||||
}
|
||||
|
||||
pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory: Option<Directory>)
|
||||
pub fn parse(sess: &ParseSess,
|
||||
tts: Vec<TokenTree>,
|
||||
ms: &[quoted::TokenTree],
|
||||
directory: Option<Directory>)
|
||||
-> NamedParseResult {
|
||||
use self::quoted::TokenTree;
|
||||
|
||||
let mut parser = Parser::new(sess, tts, directory, true);
|
||||
let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo));
|
||||
let mut next_eis = Vec::new(); // or proceed normally
|
||||
|
@ -417,7 +435,7 @@ pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory:
|
|||
let mut eof_eis = SmallVector::new();
|
||||
assert!(next_eis.is_empty());
|
||||
|
||||
match inner_parse_loop(&mut cur_eis, &mut next_eis, &mut eof_eis, &mut bb_eis,
|
||||
match inner_parse_loop(sess, &mut cur_eis, &mut next_eis, &mut eof_eis, &mut bb_eis,
|
||||
&parser.token, &parser.span) {
|
||||
Success(_) => {},
|
||||
Failure(sp, tok) => return Failure(sp, tok),
|
||||
|
@ -430,7 +448,8 @@ pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory:
|
|||
/* error messages here could be improved with links to orig. rules */
|
||||
if token_name_eq(&parser.token, &token::Eof) {
|
||||
if eof_eis.len() == 1 {
|
||||
return nameize(ms, eof_eis[0].matches.iter_mut().map(|mut dv| dv.pop().unwrap()));
|
||||
let matches = eof_eis[0].matches.iter_mut().map(|mut dv| dv.pop().unwrap());
|
||||
return nameize(sess, ms, matches);
|
||||
} else if eof_eis.len() > 1 {
|
||||
return Error(parser.span, "ambiguity: multiple successful parses".to_string());
|
||||
} else {
|
||||
|
@ -438,7 +457,7 @@ pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory:
|
|||
}
|
||||
} else if (!bb_eis.is_empty() && !next_eis.is_empty()) || bb_eis.len() > 1 {
|
||||
let nts = bb_eis.iter().map(|ei| match ei.top_elts.get_tt(ei.idx) {
|
||||
TokenTree::Token(_, MatchNt(bind, name)) => {
|
||||
TokenTree::MetaVarDecl(_, bind, name) => {
|
||||
format!("{} ('{}')", name, bind)
|
||||
}
|
||||
_ => panic!()
|
||||
|
@ -460,7 +479,7 @@ pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory:
|
|||
parser.bump();
|
||||
} else /* bb_eis.len() == 1 */ {
|
||||
let mut ei = bb_eis.pop().unwrap();
|
||||
if let TokenTree::Token(span, MatchNt(_, ident)) = ei.top_elts.get_tt(ei.idx) {
|
||||
if let TokenTree::MetaVarDecl(span, _, ident) = ei.top_elts.get_tt(ei.idx) {
|
||||
let match_cur = ei.match_cur;
|
||||
ei.matches[match_cur].push(Rc::new(MatchedNonterminal(
|
||||
Rc::new(parse_nt(&mut parser, span, &ident.name.as_str())))));
|
||||
|
@ -479,10 +498,7 @@ pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory:
|
|||
fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
|
||||
match name {
|
||||
"tt" => {
|
||||
p.quote_depth += 1; //but in theory, non-quoted tts might be useful
|
||||
let tt = panictry!(p.parse_token_tree());
|
||||
p.quote_depth -= 1;
|
||||
return token::NtTT(tt);
|
||||
return token::NtTT(panictry!(p.parse_token_tree()));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
|
|
@ -16,14 +16,15 @@ use ext::expand::{Expansion, ExpansionKind};
|
|||
use ext::tt::macro_parser::{Success, Error, Failure};
|
||||
use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
|
||||
use ext::tt::macro_parser::{parse, parse_failure_msg};
|
||||
use ext::tt::quoted;
|
||||
use ext::tt::transcribe::transcribe;
|
||||
use parse::{Directory, ParseSess};
|
||||
use parse::parser::Parser;
|
||||
use parse::token::{self, NtTT, Token};
|
||||
use parse::token::{self, NtTT};
|
||||
use parse::token::Token::*;
|
||||
use print;
|
||||
use symbol::Symbol;
|
||||
use tokenstream::{self, TokenTree};
|
||||
use tokenstream::TokenTree;
|
||||
|
||||
use std::collections::{HashMap};
|
||||
use std::collections::hash_map::{Entry};
|
||||
|
@ -58,8 +59,8 @@ impl<'a> ParserAnyMacro<'a> {
|
|||
|
||||
struct MacroRulesMacroExpander {
|
||||
name: ast::Ident,
|
||||
lhses: Vec<TokenTree>,
|
||||
rhses: Vec<TokenTree>,
|
||||
lhses: Vec<quoted::TokenTree>,
|
||||
rhses: Vec<quoted::TokenTree>,
|
||||
valid: bool,
|
||||
}
|
||||
|
||||
|
@ -86,8 +87,8 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
|||
sp: Span,
|
||||
name: ast::Ident,
|
||||
arg: &[TokenTree],
|
||||
lhses: &[TokenTree],
|
||||
rhses: &[TokenTree])
|
||||
lhses: &[quoted::TokenTree],
|
||||
rhses: &[quoted::TokenTree])
|
||||
-> Box<MacResult+'cx> {
|
||||
if cx.trace_macros() {
|
||||
println!("{}! {{ {} }}",
|
||||
|
@ -101,7 +102,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
|||
|
||||
for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
|
||||
let lhs_tt = match *lhs {
|
||||
TokenTree::Delimited(_, ref delim) => &delim.tts[..],
|
||||
quoted::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
|
||||
_ => cx.span_bug(sp, "malformed macro lhs")
|
||||
};
|
||||
|
||||
|
@ -109,7 +110,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
|||
Success(named_matches) => {
|
||||
let rhs = match rhses[i] {
|
||||
// ignore delimiters
|
||||
TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(),
|
||||
quoted::TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(),
|
||||
_ => cx.span_bug(sp, "malformed macro rhs"),
|
||||
};
|
||||
// rhs has holes ( `$id` and `$(...)` that need filled)
|
||||
|
@ -164,24 +165,22 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
|
|||
// $( $lhs:tt => $rhs:tt );+
|
||||
// ...quasiquoting this would be nice.
|
||||
// These spans won't matter, anyways
|
||||
let match_lhs_tok = MatchNt(lhs_nm, ast::Ident::from_str("tt"));
|
||||
let match_rhs_tok = MatchNt(rhs_nm, ast::Ident::from_str("tt"));
|
||||
let argument_gram = vec![
|
||||
TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
|
||||
quoted::TokenTree::Sequence(DUMMY_SP, Rc::new(quoted::SequenceRepetition {
|
||||
tts: vec![
|
||||
TokenTree::Token(DUMMY_SP, match_lhs_tok),
|
||||
TokenTree::Token(DUMMY_SP, token::FatArrow),
|
||||
TokenTree::Token(DUMMY_SP, match_rhs_tok),
|
||||
quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
|
||||
quoted::TokenTree::Token(DUMMY_SP, token::FatArrow),
|
||||
quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
|
||||
],
|
||||
separator: Some(token::Semi),
|
||||
op: tokenstream::KleeneOp::OneOrMore,
|
||||
op: quoted::KleeneOp::OneOrMore,
|
||||
num_captures: 2,
|
||||
})),
|
||||
// to phase into semicolon-termination instead of semicolon-separation
|
||||
TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
|
||||
tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)],
|
||||
quoted::TokenTree::Sequence(DUMMY_SP, Rc::new(quoted::SequenceRepetition {
|
||||
tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)],
|
||||
separator: None,
|
||||
op: tokenstream::KleeneOp::ZeroOrMore,
|
||||
op: quoted::KleeneOp::ZeroOrMore,
|
||||
num_captures: 0
|
||||
})),
|
||||
];
|
||||
|
@ -206,12 +205,13 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
|
|||
s.iter().map(|m| {
|
||||
if let MatchedNonterminal(ref nt) = **m {
|
||||
if let NtTT(ref tt) = **nt {
|
||||
valid &= check_lhs_nt_follows(sess, tt);
|
||||
return (*tt).clone();
|
||||
let tt = quoted::parse(&[tt.clone()], true, sess).pop().unwrap();
|
||||
valid &= check_lhs_nt_follows(sess, &tt);
|
||||
return tt;
|
||||
}
|
||||
}
|
||||
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
|
||||
}).collect::<Vec<TokenTree>>()
|
||||
}).collect::<Vec<quoted::TokenTree>>()
|
||||
}
|
||||
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
|
||||
};
|
||||
|
@ -221,11 +221,11 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
|
|||
s.iter().map(|m| {
|
||||
if let MatchedNonterminal(ref nt) = **m {
|
||||
if let NtTT(ref tt) = **nt {
|
||||
return (*tt).clone();
|
||||
return quoted::parse(&[tt.clone()], false, sess).pop().unwrap();
|
||||
}
|
||||
}
|
||||
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
|
||||
}).collect()
|
||||
}).collect::<Vec<quoted::TokenTree>>()
|
||||
}
|
||||
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs")
|
||||
};
|
||||
|
@ -249,14 +249,14 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
|
|||
NormalTT(exp, Some(def.span), attr::contains_name(&def.attrs, "allow_internal_unstable"))
|
||||
}
|
||||
|
||||
fn check_lhs_nt_follows(sess: &ParseSess, lhs: &TokenTree) -> bool {
|
||||
fn check_lhs_nt_follows(sess: &ParseSess, lhs: "ed::TokenTree) -> bool {
|
||||
// lhs is going to be like TokenTree::Delimited(...), where the
|
||||
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
|
||||
match lhs {
|
||||
&TokenTree::Delimited(_, ref tts) => check_matcher(sess, &tts.tts),
|
||||
"ed::TokenTree::Delimited(_, ref tts) => check_matcher(sess, &tts.tts),
|
||||
_ => {
|
||||
let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
|
||||
sess.span_diagnostic.span_err(lhs.get_span(), msg);
|
||||
sess.span_diagnostic.span_err(lhs.span(), msg);
|
||||
false
|
||||
}
|
||||
}
|
||||
|
@ -266,10 +266,11 @@ fn check_lhs_nt_follows(sess: &ParseSess, lhs: &TokenTree) -> bool {
|
|||
|
||||
/// Check that the lhs contains no repetition which could match an empty token
|
||||
/// tree, because then the matcher would hang indefinitely.
|
||||
fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[TokenTree]) -> bool {
|
||||
fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
|
||||
use self::quoted::TokenTree;
|
||||
for tt in tts {
|
||||
match *tt {
|
||||
TokenTree::Token(_, _) => (),
|
||||
TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => (),
|
||||
TokenTree::Delimited(_, ref del) => if !check_lhs_no_empty_seq(sess, &del.tts) {
|
||||
return false;
|
||||
},
|
||||
|
@ -278,7 +279,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[TokenTree]) -> bool {
|
|||
if seq.tts.iter().all(|seq_tt| {
|
||||
match *seq_tt {
|
||||
TokenTree::Sequence(_, ref sub_seq) =>
|
||||
sub_seq.op == tokenstream::KleeneOp::ZeroOrMore,
|
||||
sub_seq.op == quoted::KleeneOp::ZeroOrMore,
|
||||
_ => false,
|
||||
}
|
||||
}) {
|
||||
|
@ -296,15 +297,15 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[TokenTree]) -> bool {
|
|||
true
|
||||
}
|
||||
|
||||
fn check_rhs(sess: &ParseSess, rhs: &TokenTree) -> bool {
|
||||
fn check_rhs(sess: &ParseSess, rhs: "ed::TokenTree) -> bool {
|
||||
match *rhs {
|
||||
TokenTree::Delimited(..) => return true,
|
||||
_ => sess.span_diagnostic.span_err(rhs.get_span(), "macro rhs must be delimited")
|
||||
quoted::TokenTree::Delimited(..) => return true,
|
||||
_ => sess.span_diagnostic.span_err(rhs.span(), "macro rhs must be delimited")
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn check_matcher(sess: &ParseSess, matcher: &[TokenTree]) -> bool {
|
||||
fn check_matcher(sess: &ParseSess, matcher: &[quoted::TokenTree]) -> bool {
|
||||
let first_sets = FirstSets::new(matcher);
|
||||
let empty_suffix = TokenSet::empty();
|
||||
let err = sess.span_diagnostic.err_count();
|
||||
|
@ -335,7 +336,9 @@ struct FirstSets {
|
|||
}
|
||||
|
||||
impl FirstSets {
|
||||
fn new(tts: &[TokenTree]) -> FirstSets {
|
||||
fn new(tts: &[quoted::TokenTree]) -> FirstSets {
|
||||
use self::quoted::TokenTree;
|
||||
|
||||
let mut sets = FirstSets { first: HashMap::new() };
|
||||
build_recur(&mut sets, tts);
|
||||
return sets;
|
||||
|
@ -347,13 +350,12 @@ impl FirstSets {
|
|||
let mut first = TokenSet::empty();
|
||||
for tt in tts.iter().rev() {
|
||||
match *tt {
|
||||
TokenTree::Token(sp, ref tok) => {
|
||||
first.replace_with((sp, tok.clone()));
|
||||
TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => {
|
||||
first.replace_with(tt.clone());
|
||||
}
|
||||
TokenTree::Delimited(span, ref delimited) => {
|
||||
build_recur(sets, &delimited.tts[..]);
|
||||
first.replace_with((delimited.open_tt(span).span(),
|
||||
Token::OpenDelim(delimited.delim)));
|
||||
first.replace_with(delimited.open_tt(span));
|
||||
}
|
||||
TokenTree::Sequence(sp, ref seq_rep) => {
|
||||
let subfirst = build_recur(sets, &seq_rep.tts[..]);
|
||||
|
@ -378,11 +380,11 @@ impl FirstSets {
|
|||
|
||||
if let (Some(ref sep), true) = (seq_rep.separator.clone(),
|
||||
subfirst.maybe_empty) {
|
||||
first.add_one_maybe((sp, sep.clone()));
|
||||
first.add_one_maybe(TokenTree::Token(sp, sep.clone()));
|
||||
}
|
||||
|
||||
// Reverse scan: Sequence comes before `first`.
|
||||
if subfirst.maybe_empty || seq_rep.op == tokenstream::KleeneOp::ZeroOrMore {
|
||||
if subfirst.maybe_empty || seq_rep.op == quoted::KleeneOp::ZeroOrMore {
|
||||
// If sequence is potentially empty, then
|
||||
// union them (preserving first emptiness).
|
||||
first.add_all(&TokenSet { maybe_empty: true, ..subfirst });
|
||||
|
@ -401,18 +403,19 @@ impl FirstSets {
|
|||
|
||||
// walks forward over `tts` until all potential FIRST tokens are
|
||||
// identified.
|
||||
fn first(&self, tts: &[TokenTree]) -> TokenSet {
|
||||
fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet {
|
||||
use self::quoted::TokenTree;
|
||||
|
||||
let mut first = TokenSet::empty();
|
||||
for tt in tts.iter() {
|
||||
assert!(first.maybe_empty);
|
||||
match *tt {
|
||||
TokenTree::Token(sp, ref tok) => {
|
||||
first.add_one((sp, tok.clone()));
|
||||
TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => {
|
||||
first.add_one(tt.clone());
|
||||
return first;
|
||||
}
|
||||
TokenTree::Delimited(span, ref delimited) => {
|
||||
first.add_one((delimited.open_tt(span).span(),
|
||||
Token::OpenDelim(delimited.delim)));
|
||||
first.add_one(delimited.open_tt(span));
|
||||
return first;
|
||||
}
|
||||
TokenTree::Sequence(sp, ref seq_rep) => {
|
||||
|
@ -424,13 +427,13 @@ impl FirstSets {
|
|||
|
||||
if let (Some(ref sep), true) = (seq_rep.separator.clone(),
|
||||
subfirst.maybe_empty) {
|
||||
first.add_one_maybe((sp, sep.clone()));
|
||||
first.add_one_maybe(TokenTree::Token(sp, sep.clone()));
|
||||
}
|
||||
|
||||
assert!(first.maybe_empty);
|
||||
first.add_all(subfirst);
|
||||
if subfirst.maybe_empty ||
|
||||
seq_rep.op == tokenstream::KleeneOp::ZeroOrMore {
|
||||
seq_rep.op == quoted::KleeneOp::ZeroOrMore {
|
||||
// continue scanning for more first
|
||||
// tokens, but also make sure we
|
||||
// restore empty-tracking state
|
||||
|
@ -460,8 +463,8 @@ impl FirstSets {
|
|||
}
|
||||
}
|
||||
|
||||
// A set of Tokens, which may include MatchNt tokens (for
|
||||
// macro-by-example syntactic variables). It also carries the
|
||||
// A set of `quoted::TokenTree`s, which may include `TokenTree::Match`s
|
||||
// (for macro-by-example syntactic variables). It also carries the
|
||||
// `maybe_empty` flag; that is true if and only if the matcher can
|
||||
// match an empty token sequence.
|
||||
//
|
||||
|
@ -472,7 +475,7 @@ impl FirstSets {
|
|||
// (Notably, we must allow for *-op to occur zero times.)
|
||||
#[derive(Clone, Debug)]
|
||||
struct TokenSet {
|
||||
tokens: Vec<(Span, Token)>,
|
||||
tokens: Vec<quoted::TokenTree>,
|
||||
maybe_empty: bool,
|
||||
}
|
||||
|
||||
|
@ -482,13 +485,13 @@ impl TokenSet {
|
|||
|
||||
// Returns the set `{ tok }` for the single-token (and thus
|
||||
// non-empty) sequence [tok].
|
||||
fn singleton(tok: (Span, Token)) -> Self {
|
||||
fn singleton(tok: quoted::TokenTree) -> Self {
|
||||
TokenSet { tokens: vec![tok], maybe_empty: false }
|
||||
}
|
||||
|
||||
// Changes self to be the set `{ tok }`.
|
||||
// Since `tok` is always present, marks self as non-empty.
|
||||
fn replace_with(&mut self, tok: (Span, Token)) {
|
||||
fn replace_with(&mut self, tok: quoted::TokenTree) {
|
||||
self.tokens.clear();
|
||||
self.tokens.push(tok);
|
||||
self.maybe_empty = false;
|
||||
|
@ -503,7 +506,7 @@ impl TokenSet {
|
|||
}
|
||||
|
||||
// Adds `tok` to the set for `self`, marking sequence as non-empy.
|
||||
fn add_one(&mut self, tok: (Span, Token)) {
|
||||
fn add_one(&mut self, tok: quoted::TokenTree) {
|
||||
if !self.tokens.contains(&tok) {
|
||||
self.tokens.push(tok);
|
||||
}
|
||||
|
@ -511,7 +514,7 @@ impl TokenSet {
|
|||
}
|
||||
|
||||
// Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.)
|
||||
fn add_one_maybe(&mut self, tok: (Span, Token)) {
|
||||
fn add_one_maybe(&mut self, tok: quoted::TokenTree) {
|
||||
if !self.tokens.contains(&tok) {
|
||||
self.tokens.push(tok);
|
||||
}
|
||||
|
@ -549,9 +552,9 @@ impl TokenSet {
|
|||
// see `FirstSets::new`.
|
||||
fn check_matcher_core(sess: &ParseSess,
|
||||
first_sets: &FirstSets,
|
||||
matcher: &[TokenTree],
|
||||
matcher: &[quoted::TokenTree],
|
||||
follow: &TokenSet) -> TokenSet {
|
||||
use print::pprust::token_to_string;
|
||||
use self::quoted::TokenTree;
|
||||
|
||||
let mut last = TokenSet::empty();
|
||||
|
||||
|
@ -576,11 +579,11 @@ fn check_matcher_core(sess: &ParseSess,
|
|||
// First, update `last` so that it corresponds to the set
|
||||
// of NT tokens that might end the sequence `... token`.
|
||||
match *token {
|
||||
TokenTree::Token(sp, ref tok) => {
|
||||
TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => {
|
||||
let can_be_followed_by_any;
|
||||
if let Err(bad_frag) = has_legal_fragment_specifier(tok) {
|
||||
if let Err(bad_frag) = has_legal_fragment_specifier(token) {
|
||||
let msg = format!("invalid fragment specifier `{}`", bad_frag);
|
||||
sess.span_diagnostic.struct_span_err(sp, &msg)
|
||||
sess.span_diagnostic.struct_span_err(token.span(), &msg)
|
||||
.help("valid fragment specifiers are `ident`, `block`, \
|
||||
`stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \
|
||||
and `item`")
|
||||
|
@ -589,7 +592,7 @@ fn check_matcher_core(sess: &ParseSess,
|
|||
// from error messages.)
|
||||
can_be_followed_by_any = true;
|
||||
} else {
|
||||
can_be_followed_by_any = token_can_be_followed_by_any(tok);
|
||||
can_be_followed_by_any = token_can_be_followed_by_any(token);
|
||||
}
|
||||
|
||||
if can_be_followed_by_any {
|
||||
|
@ -599,13 +602,12 @@ fn check_matcher_core(sess: &ParseSess,
|
|||
// followed by anything against SUFFIX.
|
||||
continue 'each_token;
|
||||
} else {
|
||||
last.replace_with((sp, tok.clone()));
|
||||
last.replace_with(token.clone());
|
||||
suffix_first = build_suffix_first();
|
||||
}
|
||||
}
|
||||
TokenTree::Delimited(span, ref d) => {
|
||||
let my_suffix = TokenSet::singleton((d.close_tt(span).span(),
|
||||
Token::CloseDelim(d.delim)));
|
||||
let my_suffix = TokenSet::singleton(d.close_tt(span));
|
||||
check_matcher_core(sess, first_sets, &d.tts, &my_suffix);
|
||||
// don't track non NT tokens
|
||||
last.replace_with_irrelevant();
|
||||
|
@ -629,7 +631,7 @@ fn check_matcher_core(sess: &ParseSess,
|
|||
let mut new;
|
||||
let my_suffix = if let Some(ref u) = seq_rep.separator {
|
||||
new = suffix_first.clone();
|
||||
new.add_one_maybe((sp, u.clone()));
|
||||
new.add_one_maybe(TokenTree::Token(sp, u.clone()));
|
||||
&new
|
||||
} else {
|
||||
&suffix_first
|
||||
|
@ -655,12 +657,13 @@ fn check_matcher_core(sess: &ParseSess,
|
|||
|
||||
// Now `last` holds the complete set of NT tokens that could
|
||||
// end the sequence before SUFFIX. Check that every one works with `suffix`.
|
||||
'each_last: for &(_sp, ref t) in &last.tokens {
|
||||
if let MatchNt(ref name, ref frag_spec) = *t {
|
||||
for &(sp, ref next_token) in &suffix_first.tokens {
|
||||
'each_last: for token in &last.tokens {
|
||||
if let TokenTree::MetaVarDecl(_, ref name, ref frag_spec) = *token {
|
||||
for next_token in &suffix_first.tokens {
|
||||
match is_in_follow(next_token, &frag_spec.name.as_str()) {
|
||||
Err((msg, help)) => {
|
||||
sess.span_diagnostic.struct_span_err(sp, &msg).help(help).emit();
|
||||
sess.span_diagnostic.struct_span_err(next_token.span(), &msg)
|
||||
.help(help).emit();
|
||||
// don't bother reporting every source of
|
||||
// conflict for a particular element of `last`.
|
||||
continue 'each_last;
|
||||
|
@ -676,12 +679,12 @@ fn check_matcher_core(sess: &ParseSess,
|
|||
};
|
||||
|
||||
sess.span_diagnostic.span_err(
|
||||
sp,
|
||||
next_token.span(),
|
||||
&format!("`${name}:{frag}` {may_be} followed by `{next}`, which \
|
||||
is not allowed for `{frag}` fragments",
|
||||
name=name,
|
||||
frag=frag_spec,
|
||||
next=token_to_string(next_token),
|
||||
next=quoted_tt_to_string(next_token),
|
||||
may_be=may_be)
|
||||
);
|
||||
}
|
||||
|
@ -693,8 +696,8 @@ fn check_matcher_core(sess: &ParseSess,
|
|||
last
|
||||
}
|
||||
|
||||
fn token_can_be_followed_by_any(tok: &Token) -> bool {
|
||||
if let &MatchNt(_, ref frag_spec) = tok {
|
||||
fn token_can_be_followed_by_any(tok: "ed::TokenTree) -> bool {
|
||||
if let quoted::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
|
||||
frag_can_be_followed_by_any(&frag_spec.name.as_str())
|
||||
} else {
|
||||
// (Non NT's can always be followed by anthing in matchers.)
|
||||
|
@ -732,8 +735,10 @@ fn frag_can_be_followed_by_any(frag: &str) -> bool {
|
|||
/// break macros that were relying on that binary operator as a
|
||||
/// separator.
|
||||
// when changing this do not forget to update doc/book/macros.md!
|
||||
fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)> {
|
||||
if let &CloseDelim(_) = tok {
|
||||
fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> Result<bool, (String, &'static str)> {
|
||||
use self::quoted::TokenTree;
|
||||
|
||||
if let TokenTree::Token(_, token::CloseDelim(_)) = *tok {
|
||||
// closing a token tree can never be matched by any fragment;
|
||||
// iow, we always require that `(` and `)` match, etc.
|
||||
Ok(true)
|
||||
|
@ -749,27 +754,30 @@ fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)>
|
|||
// maintain
|
||||
Ok(true)
|
||||
},
|
||||
"stmt" | "expr" => {
|
||||
match *tok {
|
||||
"stmt" | "expr" => match *tok {
|
||||
TokenTree::Token(_, ref tok) => match *tok {
|
||||
FatArrow | Comma | Semi => Ok(true),
|
||||
_ => Ok(false)
|
||||
}
|
||||
},
|
||||
_ => Ok(false),
|
||||
},
|
||||
"pat" => {
|
||||
match *tok {
|
||||
"pat" => match *tok {
|
||||
TokenTree::Token(_, ref tok) => match *tok {
|
||||
FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true),
|
||||
Ident(i) if i.name == "if" || i.name == "in" => Ok(true),
|
||||
_ => Ok(false)
|
||||
}
|
||||
},
|
||||
_ => Ok(false),
|
||||
},
|
||||
"path" | "ty" => {
|
||||
match *tok {
|
||||
"path" | "ty" => match *tok {
|
||||
TokenTree::Token(_, ref tok) => match *tok {
|
||||
OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) |
|
||||
Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true),
|
||||
MatchNt(_, ref frag) if frag.name == "block" => Ok(true),
|
||||
Ident(i) if i.name == "as" || i.name == "where" => Ok(true),
|
||||
_ => Ok(false)
|
||||
}
|
||||
},
|
||||
TokenTree::MetaVarDecl(_, _, frag) if frag.name == "block" => Ok(true),
|
||||
_ => Ok(false),
|
||||
},
|
||||
"ident" => {
|
||||
// being a single token, idents are harmless
|
||||
|
@ -780,6 +788,7 @@ fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)>
|
|||
// harmless
|
||||
Ok(true)
|
||||
},
|
||||
"" => Ok(true), // keywords::Invalid
|
||||
_ => Err((format!("invalid fragment specifier `{}`", frag),
|
||||
"valid fragment specifiers are `ident`, `block`, \
|
||||
`stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \
|
||||
|
@ -788,9 +797,9 @@ fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)>
|
|||
}
|
||||
}
|
||||
|
||||
fn has_legal_fragment_specifier(tok: &Token) -> Result<(), String> {
|
||||
fn has_legal_fragment_specifier(tok: "ed::TokenTree) -> Result<(), String> {
|
||||
debug!("has_legal_fragment_specifier({:?})", tok);
|
||||
if let &MatchNt(_, ref frag_spec) = tok {
|
||||
if let quoted::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
|
||||
let s = &frag_spec.name.as_str();
|
||||
if !is_legal_fragment_specifier(s) {
|
||||
return Err(s.to_string());
|
||||
|
@ -802,7 +811,15 @@ fn has_legal_fragment_specifier(tok: &Token) -> Result<(), String> {
|
|||
fn is_legal_fragment_specifier(frag: &str) -> bool {
|
||||
match frag {
|
||||
"item" | "block" | "stmt" | "expr" | "pat" |
|
||||
"path" | "ty" | "ident" | "meta" | "tt" => true,
|
||||
"path" | "ty" | "ident" | "meta" | "tt" | "" => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn quoted_tt_to_string(tt: "ed::TokenTree) -> String {
|
||||
match *tt {
|
||||
quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok),
|
||||
quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
|
||||
_ => panic!("unexpected quoted::TokenTree::{Sequence or Delimited} in follow set checker"),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,234 @@
|
|||
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use ast;
|
||||
use ext::tt::macro_parser;
|
||||
use parse::{ParseSess, token};
|
||||
use print::pprust;
|
||||
use symbol::{keywords, Symbol};
|
||||
use syntax_pos::{DUMMY_SP, Span, BytePos};
|
||||
use tokenstream;
|
||||
|
||||
use std::rc::Rc;
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
||||
pub struct Delimited {
|
||||
pub delim: token::DelimToken,
|
||||
pub tts: Vec<TokenTree>,
|
||||
}
|
||||
|
||||
impl Delimited {
|
||||
pub fn open_token(&self) -> token::Token {
|
||||
token::OpenDelim(self.delim)
|
||||
}
|
||||
|
||||
pub fn close_token(&self) -> token::Token {
|
||||
token::CloseDelim(self.delim)
|
||||
}
|
||||
|
||||
pub fn open_tt(&self, span: Span) -> TokenTree {
|
||||
let open_span = match span {
|
||||
DUMMY_SP => DUMMY_SP,
|
||||
_ => Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span },
|
||||
};
|
||||
TokenTree::Token(open_span, self.open_token())
|
||||
}
|
||||
|
||||
pub fn close_tt(&self, span: Span) -> TokenTree {
|
||||
let close_span = match span {
|
||||
DUMMY_SP => DUMMY_SP,
|
||||
_ => Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span },
|
||||
};
|
||||
TokenTree::Token(close_span, self.close_token())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
||||
pub struct SequenceRepetition {
|
||||
/// The sequence of token trees
|
||||
pub tts: Vec<TokenTree>,
|
||||
/// The optional separator
|
||||
pub separator: Option<token::Token>,
|
||||
/// Whether the sequence can be repeated zero (*), or one or more times (+)
|
||||
pub op: KleeneOp,
|
||||
/// The number of `Match`s that appear in the sequence (and subsequences)
|
||||
pub num_captures: usize,
|
||||
}
|
||||
|
||||
/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
|
||||
/// for token sequences.
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||
pub enum KleeneOp {
|
||||
ZeroOrMore,
|
||||
OneOrMore,
|
||||
}
|
||||
|
||||
/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)`
|
||||
/// are "first-class" token trees.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
|
||||
pub enum TokenTree {
|
||||
Token(Span, token::Token),
|
||||
Delimited(Span, Rc<Delimited>),
|
||||
/// A kleene-style repetition sequence with a span
|
||||
Sequence(Span, Rc<SequenceRepetition>),
|
||||
/// Matches a nonterminal. This is only used in the left hand side of MBE macros.
|
||||
MetaVarDecl(Span, ast::Ident /* name to bind */, ast::Ident /* kind of nonterminal */),
|
||||
}
|
||||
|
||||
impl TokenTree {
|
||||
pub fn len(&self) -> usize {
|
||||
match *self {
|
||||
TokenTree::Delimited(_, ref delimed) => match delimed.delim {
|
||||
token::NoDelim => delimed.tts.len(),
|
||||
_ => delimed.tts.len() + 2,
|
||||
},
|
||||
TokenTree::Sequence(_, ref seq) => seq.tts.len(),
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_tt(&self, index: usize) -> TokenTree {
|
||||
match (self, index) {
|
||||
(&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
|
||||
delimed.tts[index].clone()
|
||||
}
|
||||
(&TokenTree::Delimited(span, ref delimed), _) => {
|
||||
if index == 0 {
|
||||
return delimed.open_tt(span);
|
||||
}
|
||||
if index == delimed.tts.len() + 1 {
|
||||
return delimed.close_tt(span);
|
||||
}
|
||||
delimed.tts[index - 1].clone()
|
||||
}
|
||||
(&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
|
||||
_ => panic!("Cannot expand a token tree"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieve the TokenTree's span.
|
||||
pub fn span(&self) -> Span {
|
||||
match *self {
|
||||
TokenTree::Token(sp, _) |
|
||||
TokenTree::MetaVarDecl(sp, _, _) |
|
||||
TokenTree::Delimited(sp, _) |
|
||||
TokenTree::Sequence(sp, _) => sp,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse(input: &[tokenstream::TokenTree], expect_matchers: bool, sess: &ParseSess)
|
||||
-> Vec<TokenTree> {
|
||||
let mut result = Vec::new();
|
||||
let mut trees = input.iter().cloned();
|
||||
while let Some(tree) = trees.next() {
|
||||
let tree = parse_tree(tree, &mut trees, expect_matchers, sess);
|
||||
match tree {
|
||||
TokenTree::Token(start_sp, token::SubstNt(ident)) if expect_matchers => {
|
||||
let span = match trees.next() {
|
||||
Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
|
||||
Some(tokenstream::TokenTree::Token(end_sp, token::Ident(kind))) => {
|
||||
let span = Span { lo: start_sp.lo, ..end_sp };
|
||||
result.push(TokenTree::MetaVarDecl(span, ident, kind));
|
||||
continue
|
||||
}
|
||||
tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
|
||||
},
|
||||
tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
|
||||
};
|
||||
sess.missing_fragment_specifiers.borrow_mut().insert(span);
|
||||
result.push(TokenTree::MetaVarDecl(span, ident, keywords::Invalid.ident()));
|
||||
}
|
||||
_ => result.push(tree),
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
fn parse_tree<I>(tree: tokenstream::TokenTree,
|
||||
trees: &mut I,
|
||||
expect_matchers: bool,
|
||||
sess: &ParseSess)
|
||||
-> TokenTree
|
||||
where I: Iterator<Item = tokenstream::TokenTree>,
|
||||
{
|
||||
match tree {
|
||||
tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() {
|
||||
Some(tokenstream::TokenTree::Delimited(span, ref delimited)) => {
|
||||
if delimited.delim != token::Paren {
|
||||
let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim));
|
||||
let msg = format!("expected `(`, found `{}`", tok);
|
||||
sess.span_diagnostic.span_err(span, &msg);
|
||||
}
|
||||
let sequence = parse(&delimited.tts, expect_matchers, sess);
|
||||
let (separator, op) = parse_sep_and_kleene_op(trees, span, sess);
|
||||
let name_captures = macro_parser::count_names(&sequence);
|
||||
TokenTree::Sequence(span, Rc::new(SequenceRepetition {
|
||||
tts: sequence,
|
||||
separator: separator,
|
||||
op: op,
|
||||
num_captures: name_captures,
|
||||
}))
|
||||
}
|
||||
Some(tokenstream::TokenTree::Token(ident_span, token::Ident(ident))) => {
|
||||
let span = Span { lo: span.lo, ..ident_span };
|
||||
if ident.name == keywords::Crate.name() {
|
||||
let ident = ast::Ident { name: Symbol::intern("$crate"), ..ident };
|
||||
TokenTree::Token(span, token::Ident(ident))
|
||||
} else {
|
||||
TokenTree::Token(span, token::SubstNt(ident))
|
||||
}
|
||||
}
|
||||
Some(tokenstream::TokenTree::Token(span, tok)) => {
|
||||
let msg = format!("expected identifier, found `{}`", pprust::token_to_string(&tok));
|
||||
sess.span_diagnostic.span_err(span, &msg);
|
||||
TokenTree::Token(span, token::SubstNt(keywords::Invalid.ident()))
|
||||
}
|
||||
None => TokenTree::Token(span, token::Dollar),
|
||||
},
|
||||
tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok),
|
||||
tokenstream::TokenTree::Delimited(span, delimited) => {
|
||||
TokenTree::Delimited(span, Rc::new(Delimited {
|
||||
delim: delimited.delim,
|
||||
tts: parse(&delimited.tts, expect_matchers, sess),
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_sep_and_kleene_op<I>(input: &mut I, span: Span, sess: &ParseSess)
|
||||
-> (Option<token::Token>, KleeneOp)
|
||||
where I: Iterator<Item = tokenstream::TokenTree>,
|
||||
{
|
||||
fn kleene_op(token: &token::Token) -> Option<KleeneOp> {
|
||||
match *token {
|
||||
token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
|
||||
token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
let span = match input.next() {
|
||||
Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) {
|
||||
Some(op) => return (None, op),
|
||||
None => match input.next() {
|
||||
Some(tokenstream::TokenTree::Token(span, tok2)) => match kleene_op(&tok2) {
|
||||
Some(op) => return (Some(tok), op),
|
||||
None => span,
|
||||
},
|
||||
tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
|
||||
}
|
||||
},
|
||||
tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
|
||||
};
|
||||
|
||||
sess.span_diagnostic.span_err(span, "expected `*` or `+`");
|
||||
(None, KleeneOp::ZeroOrMore)
|
||||
}
|
|
@ -7,99 +7,201 @@
|
|||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
use self::LockstepIterSize::*;
|
||||
|
||||
use ast::Ident;
|
||||
use errors::Handler;
|
||||
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
|
||||
use parse::token::{self, MatchNt, SubstNt, Token, NtIdent, NtTT};
|
||||
use ext::tt::quoted;
|
||||
use parse::token::{self, SubstNt, Token, NtIdent, NtTT};
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use tokenstream::{self, TokenTree};
|
||||
use tokenstream::{TokenTree, Delimited};
|
||||
use util::small_vector::SmallVector;
|
||||
|
||||
use std::rc::Rc;
|
||||
use std::mem;
|
||||
use std::ops::Add;
|
||||
use std::collections::HashMap;
|
||||
|
||||
///an unzipping of `TokenTree`s
|
||||
#[derive(Clone)]
|
||||
struct TtFrame {
|
||||
forest: TokenTree,
|
||||
idx: usize,
|
||||
dotdotdoted: bool,
|
||||
sep: Option<Token>,
|
||||
// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
|
||||
enum Frame {
|
||||
Delimited {
|
||||
forest: Rc<quoted::Delimited>,
|
||||
idx: usize,
|
||||
span: Span,
|
||||
},
|
||||
Sequence {
|
||||
forest: Rc<quoted::SequenceRepetition>,
|
||||
idx: usize,
|
||||
sep: Option<Token>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct TtReader<'a> {
|
||||
sp_diag: &'a Handler,
|
||||
/// the unzipped tree:
|
||||
stack: SmallVector<TtFrame>,
|
||||
/* for MBE-style macro transcription */
|
||||
interpolations: HashMap<Ident, Rc<NamedMatch>>,
|
||||
impl Frame {
|
||||
fn new(tts: Vec<quoted::TokenTree>) -> Frame {
|
||||
let forest = Rc::new(quoted::Delimited { delim: token::NoDelim, tts: tts });
|
||||
Frame::Delimited { forest: forest, idx: 0, span: DUMMY_SP }
|
||||
}
|
||||
}
|
||||
|
||||
repeat_idx: Vec<usize>,
|
||||
repeat_len: Vec<usize>,
|
||||
impl Iterator for Frame {
|
||||
type Item = quoted::TokenTree;
|
||||
|
||||
fn next(&mut self) -> Option<quoted::TokenTree> {
|
||||
match *self {
|
||||
Frame::Delimited { ref forest, ref mut idx, .. } => {
|
||||
*idx += 1;
|
||||
forest.tts.get(*idx - 1).cloned()
|
||||
}
|
||||
Frame::Sequence { ref forest, ref mut idx, .. } => {
|
||||
*idx += 1;
|
||||
forest.tts.get(*idx - 1).cloned()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// This can do Macro-By-Example transcription. On the other hand, if
|
||||
/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
|
||||
/// `src` contains no `TokenTree::{Sequence, Match}`s, or `SubstNt`s, `interp` can
|
||||
/// (and should) be None.
|
||||
pub fn transcribe(sp_diag: &Handler,
|
||||
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
|
||||
src: Vec<tokenstream::TokenTree>)
|
||||
src: Vec<quoted::TokenTree>)
|
||||
-> Vec<TokenTree> {
|
||||
let mut r = TtReader {
|
||||
sp_diag: sp_diag,
|
||||
stack: SmallVector::one(TtFrame {
|
||||
forest: TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
|
||||
tts: src,
|
||||
// doesn't matter. This merely holds the root unzipping.
|
||||
separator: None, op: tokenstream::KleeneOp::ZeroOrMore, num_captures: 0
|
||||
})),
|
||||
idx: 0,
|
||||
dotdotdoted: false,
|
||||
sep: None,
|
||||
}),
|
||||
interpolations: match interp { /* just a convenience */
|
||||
None => HashMap::new(),
|
||||
Some(x) => x,
|
||||
},
|
||||
repeat_idx: Vec::new(),
|
||||
repeat_len: Vec::new(),
|
||||
};
|
||||
let mut stack = SmallVector::one(Frame::new(src));
|
||||
let interpolations = interp.unwrap_or_else(HashMap::new); /* just a convenience */
|
||||
let mut repeats = Vec::new();
|
||||
let mut result = Vec::new();
|
||||
let mut result_stack = Vec::new();
|
||||
|
||||
let mut tts = Vec::new();
|
||||
let mut prev_span = DUMMY_SP;
|
||||
while let Some(tt) = tt_next_token(&mut r, prev_span) {
|
||||
prev_span = tt.span();
|
||||
tts.push(tt);
|
||||
}
|
||||
tts
|
||||
}
|
||||
|
||||
fn lookup_cur_matched_by_matched(r: &TtReader, start: Rc<NamedMatch>) -> Rc<NamedMatch> {
|
||||
r.repeat_idx.iter().fold(start, |ad, idx| {
|
||||
match *ad {
|
||||
MatchedNonterminal(_) => {
|
||||
// end of the line; duplicate henceforth
|
||||
ad.clone()
|
||||
loop {
|
||||
let tree = if let Some(tree) = stack.last_mut().unwrap().next() {
|
||||
tree
|
||||
} else {
|
||||
if let Frame::Sequence { ref mut idx, ref sep, .. } = *stack.last_mut().unwrap() {
|
||||
let (ref mut repeat_idx, repeat_len) = *repeats.last_mut().unwrap();
|
||||
*repeat_idx += 1;
|
||||
if *repeat_idx < repeat_len {
|
||||
*idx = 0;
|
||||
if let Some(sep) = sep.clone() {
|
||||
// repeat same span, I guess
|
||||
let prev_span = result.last().map(TokenTree::span).unwrap_or(DUMMY_SP);
|
||||
result.push(TokenTree::Token(prev_span, sep));
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
MatchedSeq(ref ads, _) => ads[*idx].clone()
|
||||
|
||||
match stack.pop().unwrap() {
|
||||
Frame::Sequence { .. } => {
|
||||
repeats.pop();
|
||||
}
|
||||
Frame::Delimited { forest, span, .. } => {
|
||||
if result_stack.is_empty() {
|
||||
return result;
|
||||
}
|
||||
let tree = TokenTree::Delimited(span, Rc::new(Delimited {
|
||||
delim: forest.delim,
|
||||
tts: result,
|
||||
}));
|
||||
result = result_stack.pop().unwrap();
|
||||
result.push(tree);
|
||||
}
|
||||
}
|
||||
continue
|
||||
};
|
||||
|
||||
match tree {
|
||||
quoted::TokenTree::Sequence(sp, seq) => {
|
||||
// FIXME(pcwalton): Bad copy.
|
||||
match lockstep_iter_size("ed::TokenTree::Sequence(sp, seq.clone()),
|
||||
&interpolations,
|
||||
&repeats) {
|
||||
LockstepIterSize::Unconstrained => {
|
||||
panic!(sp_diag.span_fatal(
|
||||
sp.clone(), /* blame macro writer */
|
||||
"attempted to repeat an expression \
|
||||
containing no syntax \
|
||||
variables matched as repeating at this depth"));
|
||||
}
|
||||
LockstepIterSize::Contradiction(ref msg) => {
|
||||
// FIXME #2887 blame macro invoker instead
|
||||
panic!(sp_diag.span_fatal(sp.clone(), &msg[..]));
|
||||
}
|
||||
LockstepIterSize::Constraint(len, _) => {
|
||||
if len == 0 {
|
||||
if seq.op == quoted::KleeneOp::OneOrMore {
|
||||
// FIXME #2887 blame invoker
|
||||
panic!(sp_diag.span_fatal(sp.clone(),
|
||||
"this must repeat at least once"));
|
||||
}
|
||||
} else {
|
||||
repeats.push((0, len));
|
||||
stack.push(Frame::Sequence {
|
||||
idx: 0,
|
||||
sep: seq.separator.clone(),
|
||||
forest: seq,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// FIXME #2887: think about span stuff here
|
||||
quoted::TokenTree::Token(sp, SubstNt(ident)) => {
|
||||
match lookup_cur_matched(ident, &interpolations, &repeats) {
|
||||
None => result.push(TokenTree::Token(sp, SubstNt(ident))),
|
||||
Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched {
|
||||
match **nt {
|
||||
// sidestep the interpolation tricks for ident because
|
||||
// (a) idents can be in lots of places, so it'd be a pain
|
||||
// (b) we actually can, since it's a token.
|
||||
NtIdent(ref sn) => {
|
||||
result.push(TokenTree::Token(sn.span, token::Ident(sn.node)));
|
||||
}
|
||||
NtTT(ref tt) => result.push(tt.clone()),
|
||||
_ => {
|
||||
// FIXME(pcwalton): Bad copy
|
||||
result.push(TokenTree::Token(sp, token::Interpolated(nt.clone())));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
panic!(sp_diag.span_fatal(
|
||||
sp, /* blame the macro writer */
|
||||
&format!("variable '{}' is still repeating at this depth", ident)));
|
||||
}
|
||||
}
|
||||
}
|
||||
quoted::TokenTree::Delimited(span, delimited) => {
|
||||
stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span });
|
||||
result_stack.push(mem::replace(&mut result, Vec::new()));
|
||||
}
|
||||
quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok)),
|
||||
quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn lookup_cur_matched(r: &TtReader, name: Ident) -> Option<Rc<NamedMatch>> {
|
||||
let matched_opt = r.interpolations.get(&name).cloned();
|
||||
matched_opt.map(|s| lookup_cur_matched_by_matched(r, s))
|
||||
fn lookup_cur_matched(ident: Ident,
|
||||
interpolations: &HashMap<Ident, Rc<NamedMatch>>,
|
||||
repeats: &[(usize, usize)])
|
||||
-> Option<Rc<NamedMatch>> {
|
||||
interpolations.get(&ident).map(|matched| {
|
||||
repeats.iter().fold(matched.clone(), |ad, &(idx, _)| {
|
||||
match *ad {
|
||||
MatchedNonterminal(_) => {
|
||||
// end of the line; duplicate henceforth
|
||||
ad.clone()
|
||||
}
|
||||
MatchedSeq(ref ads, _) => ads[idx].clone()
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum LockstepIterSize {
|
||||
LisUnconstrained,
|
||||
LisConstraint(usize, Ident),
|
||||
LisContradiction(String),
|
||||
Unconstrained,
|
||||
Constraint(usize, Ident),
|
||||
Contradiction(String),
|
||||
}
|
||||
|
||||
impl Add for LockstepIterSize {
|
||||
|
@ -107,168 +209,47 @@ impl Add for LockstepIterSize {
|
|||
|
||||
fn add(self, other: LockstepIterSize) -> LockstepIterSize {
|
||||
match self {
|
||||
LisUnconstrained => other,
|
||||
LisContradiction(_) => self,
|
||||
LisConstraint(l_len, ref l_id) => match other {
|
||||
LisUnconstrained => self.clone(),
|
||||
LisContradiction(_) => other,
|
||||
LisConstraint(r_len, _) if l_len == r_len => self.clone(),
|
||||
LisConstraint(r_len, r_id) => {
|
||||
LisContradiction(format!("inconsistent lockstep iteration: \
|
||||
'{}' has {} items, but '{}' has {}",
|
||||
l_id, l_len, r_id, r_len))
|
||||
LockstepIterSize::Unconstrained => other,
|
||||
LockstepIterSize::Contradiction(_) => self,
|
||||
LockstepIterSize::Constraint(l_len, ref l_id) => match other {
|
||||
LockstepIterSize::Unconstrained => self.clone(),
|
||||
LockstepIterSize::Contradiction(_) => other,
|
||||
LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self.clone(),
|
||||
LockstepIterSize::Constraint(r_len, r_id) => {
|
||||
let msg = format!("inconsistent lockstep iteration: \
|
||||
'{}' has {} items, but '{}' has {}",
|
||||
l_id, l_len, r_id, r_len);
|
||||
LockstepIterSize::Contradiction(msg)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
|
||||
match *t {
|
||||
fn lockstep_iter_size(tree: "ed::TokenTree,
|
||||
interpolations: &HashMap<Ident, Rc<NamedMatch>>,
|
||||
repeats: &[(usize, usize)])
|
||||
-> LockstepIterSize {
|
||||
use self::quoted::TokenTree;
|
||||
match *tree {
|
||||
TokenTree::Delimited(_, ref delimed) => {
|
||||
delimed.tts.iter().fold(LisUnconstrained, |size, tt| {
|
||||
size + lockstep_iter_size(tt, r)
|
||||
delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
|
||||
size + lockstep_iter_size(tt, interpolations, repeats)
|
||||
})
|
||||
},
|
||||
TokenTree::Sequence(_, ref seq) => {
|
||||
seq.tts.iter().fold(LisUnconstrained, |size, tt| {
|
||||
size + lockstep_iter_size(tt, r)
|
||||
seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
|
||||
size + lockstep_iter_size(tt, interpolations, repeats)
|
||||
})
|
||||
},
|
||||
TokenTree::Token(_, SubstNt(name)) | TokenTree::Token(_, MatchNt(name, _)) =>
|
||||
match lookup_cur_matched(r, name) {
|
||||
TokenTree::Token(_, SubstNt(name)) | TokenTree::MetaVarDecl(_, name, _) =>
|
||||
match lookup_cur_matched(name, interpolations, repeats) {
|
||||
Some(matched) => match *matched {
|
||||
MatchedNonterminal(_) => LisUnconstrained,
|
||||
MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name),
|
||||
MatchedNonterminal(_) => LockstepIterSize::Unconstrained,
|
||||
MatchedSeq(ref ads, _) => LockstepIterSize::Constraint(ads.len(), name),
|
||||
},
|
||||
_ => LisUnconstrained
|
||||
_ => LockstepIterSize::Unconstrained
|
||||
},
|
||||
TokenTree::Token(..) => LisUnconstrained,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the next token from the TtReader.
|
||||
/// EFFECT: advances the reader's token field
|
||||
fn tt_next_token(r: &mut TtReader, prev_span: Span) -> Option<TokenTree> {
|
||||
loop {
|
||||
let should_pop = if let Some(frame) = r.stack.last() {
|
||||
if frame.idx < frame.forest.len() {
|
||||
break;
|
||||
}
|
||||
!frame.dotdotdoted || *r.repeat_idx.last().unwrap() == *r.repeat_len.last().unwrap() - 1
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
|
||||
/* done with this set; pop or repeat? */
|
||||
if should_pop {
|
||||
let prev = r.stack.pop().unwrap();
|
||||
if let Some(frame) = r.stack.last_mut() {
|
||||
frame.idx += 1;
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
if prev.dotdotdoted {
|
||||
r.repeat_idx.pop();
|
||||
r.repeat_len.pop();
|
||||
}
|
||||
} else { /* repeat */
|
||||
*r.repeat_idx.last_mut().unwrap() += 1;
|
||||
r.stack.last_mut().unwrap().idx = 0;
|
||||
if let Some(tk) = r.stack.last().unwrap().sep.clone() {
|
||||
return Some(TokenTree::Token(prev_span, tk)); // repeat same span, I guess
|
||||
}
|
||||
}
|
||||
}
|
||||
loop { /* because it's easiest, this handles `TokenTree::Delimited` not starting
|
||||
with a `TokenTree::Token`, even though it won't happen */
|
||||
let t = {
|
||||
let frame = r.stack.last().unwrap();
|
||||
// FIXME(pcwalton): Bad copy.
|
||||
frame.forest.get_tt(frame.idx)
|
||||
};
|
||||
match t {
|
||||
TokenTree::Sequence(sp, seq) => {
|
||||
// FIXME(pcwalton): Bad copy.
|
||||
match lockstep_iter_size(&TokenTree::Sequence(sp, seq.clone()),
|
||||
r) {
|
||||
LisUnconstrained => {
|
||||
panic!(r.sp_diag.span_fatal(
|
||||
sp.clone(), /* blame macro writer */
|
||||
"attempted to repeat an expression \
|
||||
containing no syntax \
|
||||
variables matched as repeating at this depth"));
|
||||
}
|
||||
LisContradiction(ref msg) => {
|
||||
// FIXME #2887 blame macro invoker instead
|
||||
panic!(r.sp_diag.span_fatal(sp.clone(), &msg[..]));
|
||||
}
|
||||
LisConstraint(len, _) => {
|
||||
if len == 0 {
|
||||
if seq.op == tokenstream::KleeneOp::OneOrMore {
|
||||
// FIXME #2887 blame invoker
|
||||
panic!(r.sp_diag.span_fatal(sp.clone(),
|
||||
"this must repeat at least once"));
|
||||
}
|
||||
|
||||
r.stack.last_mut().unwrap().idx += 1;
|
||||
return tt_next_token(r, prev_span);
|
||||
}
|
||||
r.repeat_len.push(len);
|
||||
r.repeat_idx.push(0);
|
||||
r.stack.push(TtFrame {
|
||||
idx: 0,
|
||||
dotdotdoted: true,
|
||||
sep: seq.separator.clone(),
|
||||
forest: TokenTree::Sequence(sp, seq),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
// FIXME #2887: think about span stuff here
|
||||
TokenTree::Token(sp, SubstNt(ident)) => {
|
||||
r.stack.last_mut().unwrap().idx += 1;
|
||||
match lookup_cur_matched(r, ident) {
|
||||
None => {
|
||||
return Some(TokenTree::Token(sp, SubstNt(ident)));
|
||||
// this can't be 0 length, just like TokenTree::Delimited
|
||||
}
|
||||
Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched {
|
||||
match **nt {
|
||||
// sidestep the interpolation tricks for ident because
|
||||
// (a) idents can be in lots of places, so it'd be a pain
|
||||
// (b) we actually can, since it's a token.
|
||||
NtIdent(ref sn) => {
|
||||
return Some(TokenTree::Token(sn.span, token::Ident(sn.node)));
|
||||
}
|
||||
NtTT(ref tt) => return Some(tt.clone()),
|
||||
_ => {
|
||||
// FIXME(pcwalton): Bad copy
|
||||
return Some(TokenTree::Token(sp, token::Interpolated(nt.clone())));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
panic!(r.sp_diag.span_fatal(
|
||||
sp, /* blame the macro writer */
|
||||
&format!("variable '{}' is still repeating at this depth", ident)));
|
||||
}
|
||||
}
|
||||
}
|
||||
// TokenTree::Delimited or any token that can be unzipped
|
||||
seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, MatchNt(..)) => {
|
||||
// do not advance the idx yet
|
||||
r.stack.push(TtFrame {
|
||||
forest: seq,
|
||||
idx: 0,
|
||||
dotdotdoted: false,
|
||||
sep: None
|
||||
});
|
||||
// if this could be 0-length, we'd need to potentially recur here
|
||||
}
|
||||
tt @ TokenTree::Token(..) => {
|
||||
r.stack.last_mut().unwrap().idx += 1;
|
||||
return Some(tt);
|
||||
}
|
||||
}
|
||||
TokenTree::Token(..) => LockstepIterSize::Unconstrained,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -551,13 +551,6 @@ pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
|
|||
}
|
||||
))
|
||||
},
|
||||
TokenTree::Sequence(span, ref seq) =>
|
||||
TokenTree::Sequence(fld.new_span(span),
|
||||
Rc::new(SequenceRepetition {
|
||||
tts: fld.fold_tts(&seq.tts),
|
||||
separator: seq.separator.clone().map(|tok| fld.fold_token(tok)),
|
||||
..**seq
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -578,7 +571,6 @@ pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token
|
|||
token::Interpolated(Rc::new(fld.fold_interpolated(nt)))
|
||||
}
|
||||
token::SubstNt(ident) => token::SubstNt(fld.fold_ident(ident)),
|
||||
token::MatchNt(name, kind) => token::MatchNt(fld.fold_ident(name), fld.fold_ident(kind)),
|
||||
_ => t
|
||||
}
|
||||
}
|
||||
|
|
|
@ -139,6 +139,7 @@ pub mod ext {
|
|||
pub mod transcribe;
|
||||
pub mod macro_parser;
|
||||
pub mod macro_rules;
|
||||
pub mod quoted;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1693,6 +1693,7 @@ mod tests {
|
|||
use feature_gate::UnstableFeatures;
|
||||
use parse::token;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashSet;
|
||||
use std::io;
|
||||
use std::rc::Rc;
|
||||
|
||||
|
@ -1704,6 +1705,7 @@ mod tests {
|
|||
config: CrateConfig::new(),
|
||||
included_mod_stack: RefCell::new(Vec::new()),
|
||||
code_map: cm,
|
||||
missing_fragment_specifiers: RefCell::new(HashSet::new()),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -46,6 +46,7 @@ pub struct ParseSess {
|
|||
pub span_diagnostic: Handler,
|
||||
pub unstable_features: UnstableFeatures,
|
||||
pub config: CrateConfig,
|
||||
pub missing_fragment_specifiers: RefCell<HashSet<Span>>,
|
||||
/// Used to determine and report recursive mod inclusions
|
||||
included_mod_stack: RefCell<Vec<PathBuf>>,
|
||||
code_map: Rc<CodeMap>,
|
||||
|
@ -66,6 +67,7 @@ impl ParseSess {
|
|||
span_diagnostic: handler,
|
||||
unstable_features: UnstableFeatures::from_environment(),
|
||||
config: HashSet::new(),
|
||||
missing_fragment_specifiers: RefCell::new(HashSet::new()),
|
||||
included_mod_stack: RefCell::new(vec![]),
|
||||
code_map: code_map
|
||||
}
|
||||
|
@ -139,13 +141,9 @@ pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a Pa
|
|||
new_parser_from_source_str(sess, name, source).parse_stmt()
|
||||
}
|
||||
|
||||
// Warning: This parses with quote_depth > 0, which is not the default.
|
||||
pub fn parse_tts_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
|
||||
-> PResult<'a, Vec<tokenstream::TokenTree>> {
|
||||
let mut p = new_parser_from_source_str(sess, name, source);
|
||||
p.quote_depth += 1;
|
||||
// right now this is re-creating the token trees from ... token trees.
|
||||
p.parse_all_token_trees()
|
||||
-> Vec<tokenstream::TokenTree> {
|
||||
filemap_to_tts(sess, sess.codemap().new_filemap(name, None, source))
|
||||
}
|
||||
|
||||
// Create a new parser from a source string
|
||||
|
@ -986,7 +984,7 @@ mod tests {
|
|||
_ => panic!("not a macro"),
|
||||
};
|
||||
|
||||
let span = tts.iter().rev().next().unwrap().get_span();
|
||||
let span = tts.iter().rev().next().unwrap().span();
|
||||
|
||||
match sess.codemap().span_to_snippet(span) {
|
||||
Ok(s) => assert_eq!(&s[..], "{ body }"),
|
||||
|
|
|
@ -43,19 +43,16 @@ use {ast, attr};
|
|||
use codemap::{self, CodeMap, Spanned, spanned, respan};
|
||||
use syntax_pos::{self, Span, Pos, BytePos, mk_sp};
|
||||
use errors::{self, DiagnosticBuilder};
|
||||
use ext::tt::macro_parser;
|
||||
use parse;
|
||||
use parse::classify;
|
||||
use parse::{self, classify, token};
|
||||
use parse::common::SeqSep;
|
||||
use parse::lexer::TokenAndSpan;
|
||||
use parse::obsolete::ObsoleteSyntax;
|
||||
use parse::token::{self, MatchNt, SubstNt};
|
||||
use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
|
||||
use util::parser::{AssocOp, Fixity};
|
||||
use print::pprust;
|
||||
use ptr::P;
|
||||
use parse::PResult;
|
||||
use tokenstream::{self, Delimited, SequenceRepetition, TokenTree};
|
||||
use tokenstream::{Delimited, TokenTree};
|
||||
use symbol::{Symbol, keywords};
|
||||
use util::ThinVec;
|
||||
|
||||
|
@ -168,8 +165,6 @@ pub struct Parser<'a> {
|
|||
/// the previous token kind
|
||||
prev_token_kind: PrevTokenKind,
|
||||
pub restrictions: Restrictions,
|
||||
pub quote_depth: usize, // not (yet) related to the quasiquoter
|
||||
parsing_token_tree: bool,
|
||||
/// The set of seen errors about obsolete syntax. Used to suppress
|
||||
/// extra detail when the same error is seen twice
|
||||
pub obsolete_set: HashSet<ObsoleteSyntax>,
|
||||
|
@ -329,8 +324,6 @@ impl<'a> Parser<'a> {
|
|||
prev_span: syntax_pos::DUMMY_SP,
|
||||
prev_token_kind: PrevTokenKind::Other,
|
||||
restrictions: Restrictions::empty(),
|
||||
quote_depth: 0,
|
||||
parsing_token_tree: false,
|
||||
obsolete_set: HashSet::new(),
|
||||
directory: Directory { path: PathBuf::new(), ownership: DirectoryOwnership::Owned },
|
||||
root_module_name: None,
|
||||
|
@ -359,20 +352,11 @@ impl<'a> Parser<'a> {
|
|||
if i + 1 < tts.len() {
|
||||
self.tts.push((tts, i + 1));
|
||||
}
|
||||
// FIXME(jseyfried): remove after fixing #39390 in #39419.
|
||||
if self.quote_depth > 0 {
|
||||
if let TokenTree::Sequence(sp, _) = tt {
|
||||
self.span_err(sp, "attempted to repeat an expression containing no \
|
||||
syntax variables matched as repeating at this depth");
|
||||
}
|
||||
}
|
||||
match tt {
|
||||
TokenTree::Token(sp, tok) => TokenAndSpan { tok: tok, sp: sp },
|
||||
_ if tt.len() > 0 => {
|
||||
self.tts.push((tt, 0));
|
||||
continue
|
||||
}
|
||||
_ => continue,
|
||||
if let TokenTree::Token(sp, tok) = tt {
|
||||
TokenAndSpan { tok: tok, sp: sp }
|
||||
} else {
|
||||
self.tts.push((tt, 0));
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
TokenAndSpan { tok: token::Eof, sp: self.span }
|
||||
|
@ -997,7 +981,6 @@ impl<'a> Parser<'a> {
|
|||
tok = match tts.get_tt(i) {
|
||||
TokenTree::Token(_, tok) => tok,
|
||||
TokenTree::Delimited(_, delimited) => token::OpenDelim(delimited.delim),
|
||||
TokenTree::Sequence(..) => token::Dollar,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@ -1187,10 +1170,7 @@ impl<'a> Parser<'a> {
|
|||
self.expect(&token::Not)?;
|
||||
|
||||
// eat a matched-delimiter token tree:
|
||||
let delim = self.expect_open_delim()?;
|
||||
let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
|
||||
SeqSep::none(),
|
||||
|pp| pp.parse_token_tree())?;
|
||||
let (delim, tts) = self.expect_delimited_token_tree()?;
|
||||
if delim != token::Brace {
|
||||
self.expect(&token::Semi)?
|
||||
}
|
||||
|
@ -1448,10 +1428,7 @@ impl<'a> Parser<'a> {
|
|||
let path = self.parse_path(PathStyle::Type)?;
|
||||
if self.eat(&token::Not) {
|
||||
// MACRO INVOCATION
|
||||
let delim = self.expect_open_delim()?;
|
||||
let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
|
||||
SeqSep::none(),
|
||||
|p| p.parse_token_tree())?;
|
||||
let (_, tts) = self.expect_delimited_token_tree()?;
|
||||
let hi = self.span.hi;
|
||||
TyKind::Mac(spanned(lo, hi, Mac_ { path: path, tts: tts }))
|
||||
} else {
|
||||
|
@ -2045,13 +2022,12 @@ impl<'a> Parser<'a> {
|
|||
})
|
||||
}
|
||||
|
||||
fn expect_open_delim(&mut self) -> PResult<'a, token::DelimToken> {
|
||||
self.expected_tokens.push(TokenType::Token(token::Gt));
|
||||
fn expect_delimited_token_tree(&mut self) -> PResult<'a, (token::DelimToken, Vec<TokenTree>)> {
|
||||
match self.token {
|
||||
token::OpenDelim(delim) => {
|
||||
self.bump();
|
||||
Ok(delim)
|
||||
},
|
||||
token::OpenDelim(delim) => self.parse_token_tree().map(|tree| match tree {
|
||||
TokenTree::Delimited(_, delimited) => (delim, delimited.tts.clone()),
|
||||
_ => unreachable!(),
|
||||
}),
|
||||
_ => Err(self.fatal("expected open delimiter")),
|
||||
}
|
||||
}
|
||||
|
@ -2261,10 +2237,7 @@ impl<'a> Parser<'a> {
|
|||
// `!`, as an operator, is prefix, so we know this isn't that
|
||||
if self.eat(&token::Not) {
|
||||
// MACRO INVOCATION expression
|
||||
let delim = self.expect_open_delim()?;
|
||||
let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
|
||||
SeqSep::none(),
|
||||
|p| p.parse_token_tree())?;
|
||||
let (_, tts) = self.expect_delimited_token_tree()?;
|
||||
let hi = self.prev_span.hi;
|
||||
return Ok(self.mk_mac_expr(lo, hi, Mac_ { path: pth, tts: tts }, attrs));
|
||||
}
|
||||
|
@ -2586,139 +2559,22 @@ impl<'a> Parser<'a> {
|
|||
return Ok(e);
|
||||
}
|
||||
|
||||
// Parse unquoted tokens after a `$` in a token tree
|
||||
fn parse_unquoted(&mut self) -> PResult<'a, TokenTree> {
|
||||
let mut sp = self.span;
|
||||
let name = match self.token {
|
||||
token::Dollar => {
|
||||
self.bump();
|
||||
|
||||
if self.token == token::OpenDelim(token::Paren) {
|
||||
let Spanned { node: seq, span: seq_span } = self.parse_seq(
|
||||
&token::OpenDelim(token::Paren),
|
||||
&token::CloseDelim(token::Paren),
|
||||
SeqSep::none(),
|
||||
|p| p.parse_token_tree()
|
||||
)?;
|
||||
let (sep, repeat) = self.parse_sep_and_kleene_op()?;
|
||||
let name_num = macro_parser::count_names(&seq);
|
||||
return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi),
|
||||
Rc::new(SequenceRepetition {
|
||||
tts: seq,
|
||||
separator: sep,
|
||||
op: repeat,
|
||||
num_captures: name_num
|
||||
})));
|
||||
} else if self.token.is_keyword(keywords::Crate) {
|
||||
let ident = match self.token {
|
||||
token::Ident(id) => ast::Ident { name: Symbol::intern("$crate"), ..id },
|
||||
_ => unreachable!(),
|
||||
};
|
||||
self.bump();
|
||||
return Ok(TokenTree::Token(sp, token::Ident(ident)));
|
||||
} else {
|
||||
sp = mk_sp(sp.lo, self.span.hi);
|
||||
self.parse_ident().unwrap_or_else(|mut e| {
|
||||
e.emit();
|
||||
keywords::Invalid.ident()
|
||||
})
|
||||
}
|
||||
}
|
||||
token::SubstNt(name) => {
|
||||
self.bump();
|
||||
name
|
||||
}
|
||||
_ => unreachable!()
|
||||
};
|
||||
// continue by trying to parse the `:ident` after `$name`
|
||||
if self.token == token::Colon &&
|
||||
self.look_ahead(1, |t| t.is_ident() && !t.is_any_keyword()) {
|
||||
self.bump();
|
||||
sp = mk_sp(sp.lo, self.span.hi);
|
||||
let nt_kind = self.parse_ident()?;
|
||||
Ok(TokenTree::Token(sp, MatchNt(name, nt_kind)))
|
||||
} else {
|
||||
Ok(TokenTree::Token(sp, SubstNt(name)))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn check_unknown_macro_variable(&mut self) {
|
||||
if self.quote_depth == 0 && !self.parsing_token_tree {
|
||||
match self.token {
|
||||
token::SubstNt(name) =>
|
||||
self.fatal(&format!("unknown macro variable `{}`", name)).emit(),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse an optional separator followed by a Kleene-style
|
||||
/// repetition token (+ or *).
|
||||
pub fn parse_sep_and_kleene_op(&mut self)
|
||||
-> PResult<'a, (Option<token::Token>, tokenstream::KleeneOp)> {
|
||||
fn parse_kleene_op<'a>(parser: &mut Parser<'a>) ->
|
||||
PResult<'a, Option<tokenstream::KleeneOp>> {
|
||||
match parser.token {
|
||||
token::BinOp(token::Star) => {
|
||||
parser.bump();
|
||||
Ok(Some(tokenstream::KleeneOp::ZeroOrMore))
|
||||
},
|
||||
token::BinOp(token::Plus) => {
|
||||
parser.bump();
|
||||
Ok(Some(tokenstream::KleeneOp::OneOrMore))
|
||||
},
|
||||
_ => Ok(None)
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(kleene_op) = parse_kleene_op(self)? {
|
||||
return Ok((None, kleene_op));
|
||||
}
|
||||
|
||||
let separator = match self.token {
|
||||
token::CloseDelim(..) => None,
|
||||
_ => Some(self.bump_and_get()),
|
||||
};
|
||||
match parse_kleene_op(self)? {
|
||||
Some(zerok) => Ok((separator, zerok)),
|
||||
None => return Err(self.fatal("expected `*` or `+`"))
|
||||
if let token::SubstNt(name) = self.token {
|
||||
self.fatal(&format!("unknown macro variable `{}`", name)).emit()
|
||||
}
|
||||
}
|
||||
|
||||
/// parse a single token tree from the input.
|
||||
pub fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
|
||||
// FIXME #6994: currently, this is too eager. It
|
||||
// parses token trees but also identifies TokenType::Sequence's
|
||||
// and token::SubstNt's; it's too early to know yet
|
||||
// whether something will be a nonterminal or a seq
|
||||
// yet.
|
||||
match self.token {
|
||||
token::OpenDelim(delim) => {
|
||||
if self.quote_depth == 0 && self.tts.last().map(|&(_, i)| i == 1).unwrap_or(false) {
|
||||
let tt = self.tts.pop().unwrap().0;
|
||||
self.bump();
|
||||
return Ok(tt);
|
||||
}
|
||||
|
||||
let parsing_token_tree = ::std::mem::replace(&mut self.parsing_token_tree, true);
|
||||
let lo = self.span.lo;
|
||||
token::OpenDelim(..) => {
|
||||
let tt = self.tts.pop().unwrap().0;
|
||||
self.span = tt.span();
|
||||
self.bump();
|
||||
let tts = self.parse_seq_to_before_tokens(&[&token::CloseDelim(token::Brace),
|
||||
&token::CloseDelim(token::Paren),
|
||||
&token::CloseDelim(token::Bracket)],
|
||||
SeqSep::none(),
|
||||
|p| p.parse_token_tree(),
|
||||
|mut e| e.emit());
|
||||
self.parsing_token_tree = parsing_token_tree;
|
||||
self.bump();
|
||||
|
||||
Ok(TokenTree::Delimited(Span { lo: lo, ..self.prev_span }, Rc::new(Delimited {
|
||||
delim: delim,
|
||||
tts: tts,
|
||||
})))
|
||||
return Ok(tt);
|
||||
},
|
||||
token::CloseDelim(..) | token::Eof => Ok(TokenTree::Token(self.span, token::Eof)),
|
||||
token::Dollar | token::SubstNt(..) if self.quote_depth > 0 => self.parse_unquoted(),
|
||||
token::CloseDelim(_) | token::Eof => unreachable!(),
|
||||
_ => Ok(TokenTree::Token(self.span, self.bump_and_get())),
|
||||
}
|
||||
}
|
||||
|
@ -3528,10 +3384,7 @@ impl<'a> Parser<'a> {
|
|||
token::Not if qself.is_none() => {
|
||||
// Parse macro invocation
|
||||
self.bump();
|
||||
let delim = self.expect_open_delim()?;
|
||||
let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
|
||||
SeqSep::none(),
|
||||
|p| p.parse_token_tree())?;
|
||||
let (_, tts) = self.expect_delimited_token_tree()?;
|
||||
let mac = spanned(lo, self.prev_span.hi, Mac_ { path: path, tts: tts });
|
||||
pat = PatKind::Mac(mac);
|
||||
}
|
||||
|
@ -3831,12 +3684,7 @@ impl<'a> Parser<'a> {
|
|||
},
|
||||
};
|
||||
|
||||
let tts = self.parse_unspanned_seq(
|
||||
&token::OpenDelim(delim),
|
||||
&token::CloseDelim(delim),
|
||||
SeqSep::none(),
|
||||
|p| p.parse_token_tree()
|
||||
)?;
|
||||
let (_, tts) = self.expect_delimited_token_tree()?;
|
||||
let hi = self.prev_span.hi;
|
||||
|
||||
let style = if delim == token::Brace {
|
||||
|
@ -4744,10 +4592,7 @@ impl<'a> Parser<'a> {
|
|||
self.expect(&token::Not)?;
|
||||
|
||||
// eat a matched-delimiter token tree:
|
||||
let delim = self.expect_open_delim()?;
|
||||
let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
|
||||
SeqSep::none(),
|
||||
|p| p.parse_token_tree())?;
|
||||
let (delim, tts) = self.expect_delimited_token_tree()?;
|
||||
if delim != token::Brace {
|
||||
self.expect(&token::Semi)?
|
||||
}
|
||||
|
@ -5893,10 +5738,7 @@ impl<'a> Parser<'a> {
|
|||
keywords::Invalid.ident() // no special identifier
|
||||
};
|
||||
// eat a matched-delimiter token tree:
|
||||
let delim = self.expect_open_delim()?;
|
||||
let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
|
||||
SeqSep::none(),
|
||||
|p| p.parse_token_tree())?;
|
||||
let (delim, tts) = self.expect_delimited_token_tree()?;
|
||||
if delim != token::Brace {
|
||||
if !self.eat(&token::Semi) {
|
||||
let prev_span = self.prev_span;
|
||||
|
|
|
@ -50,8 +50,8 @@ pub enum DelimToken {
|
|||
}
|
||||
|
||||
impl DelimToken {
|
||||
pub fn len(&self) -> u32 {
|
||||
if *self == NoDelim { 0 } else { 1 }
|
||||
pub fn len(self) -> usize {
|
||||
if self == NoDelim { 0 } else { 1 }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -152,9 +152,6 @@ pub enum Token {
|
|||
// Can be expanded into several tokens.
|
||||
/// Doc comment
|
||||
DocComment(ast::Name),
|
||||
// In left-hand-sides of MBE macros:
|
||||
/// Parse a nonterminal (name to bind, name of NT)
|
||||
MatchNt(ast::Ident, ast::Ident),
|
||||
// In right-hand-sides of MBE macros:
|
||||
/// A syntactic variable that will be filled in by macro expansion.
|
||||
SubstNt(ast::Ident),
|
||||
|
|
|
@ -271,7 +271,6 @@ pub fn token_to_string(tok: &Token) -> String {
|
|||
/* Other */
|
||||
token::DocComment(s) => s.to_string(),
|
||||
token::SubstNt(s) => format!("${}", s),
|
||||
token::MatchNt(s, t) => format!("${}:{}", s, t),
|
||||
token::Eof => "<eof>".to_string(),
|
||||
token::Whitespace => " ".to_string(),
|
||||
token::Comment => "/* */".to_string(),
|
||||
|
@ -1475,20 +1474,6 @@ impl<'a> State<'a> {
|
|||
space(&mut self.s)?;
|
||||
word(&mut self.s, &token_to_string(&delimed.close_token()))
|
||||
},
|
||||
TokenTree::Sequence(_, ref seq) => {
|
||||
word(&mut self.s, "$(")?;
|
||||
for tt_elt in &seq.tts {
|
||||
self.print_tt(tt_elt)?;
|
||||
}
|
||||
word(&mut self.s, ")")?;
|
||||
if let Some(ref tk) = seq.separator {
|
||||
word(&mut self.s, &token_to_string(tk))?;
|
||||
}
|
||||
match seq.op {
|
||||
tokenstream::KleeneOp::ZeroOrMore => word(&mut self.s, "*"),
|
||||
tokenstream::KleeneOp::OneOrMore => word(&mut self.s, "+"),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -12,9 +12,7 @@
|
|||
//!
|
||||
//! TokenStreams represent syntactic objects before they are converted into ASTs.
|
||||
//! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s,
|
||||
//! which are themselves either a single Token, a Delimited subsequence of tokens,
|
||||
//! or a SequenceRepetition specifier (for the purpose of sequence generation during macro
|
||||
//! expansion).
|
||||
//! which are themselves a single `Token` or a `Delimited` subsequence of tokens.
|
||||
//!
|
||||
//! ## Ownership
|
||||
//! TokenStreams are persistent data structures constructed as ropes with reference
|
||||
|
@ -28,10 +26,10 @@ use ast::{self, AttrStyle, LitKind};
|
|||
use syntax_pos::{BytePos, Span, DUMMY_SP};
|
||||
use codemap::Spanned;
|
||||
use ext::base;
|
||||
use ext::tt::macro_parser;
|
||||
use ext::tt::{macro_parser, quoted};
|
||||
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
||||
use parse::{self, Directory};
|
||||
use parse::token::{self, Token, Lit, Nonterminal};
|
||||
use parse::token::{self, Token, Lit};
|
||||
use print::pprust;
|
||||
use serialize::{Decoder, Decodable, Encoder, Encodable};
|
||||
use symbol::Symbol;
|
||||
|
@ -64,7 +62,7 @@ impl Delimited {
|
|||
pub fn open_tt(&self, span: Span) -> TokenTree {
|
||||
let open_span = match span {
|
||||
DUMMY_SP => DUMMY_SP,
|
||||
_ => Span { hi: span.lo + BytePos(self.delim.len()), ..span },
|
||||
_ => Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span },
|
||||
};
|
||||
TokenTree::Token(open_span, self.open_token())
|
||||
}
|
||||
|
@ -73,7 +71,7 @@ impl Delimited {
|
|||
pub fn close_tt(&self, span: Span) -> TokenTree {
|
||||
let close_span = match span {
|
||||
DUMMY_SP => DUMMY_SP,
|
||||
_ => Span { lo: span.hi - BytePos(self.delim.len()), ..span },
|
||||
_ => Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span },
|
||||
};
|
||||
TokenTree::Token(close_span, self.close_token())
|
||||
}
|
||||
|
@ -84,27 +82,6 @@ impl Delimited {
|
|||
}
|
||||
}
|
||||
|
||||
/// A sequence of token trees
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
||||
pub struct SequenceRepetition {
|
||||
/// The sequence of token trees
|
||||
pub tts: Vec<TokenTree>,
|
||||
/// The optional separator
|
||||
pub separator: Option<token::Token>,
|
||||
/// Whether the sequence can be repeated zero (*), or one or more times (+)
|
||||
pub op: KleeneOp,
|
||||
/// The number of `MatchNt`s that appear in the sequence (and subsequences)
|
||||
pub num_captures: usize,
|
||||
}
|
||||
|
||||
/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
|
||||
/// for token sequences.
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||
pub enum KleeneOp {
|
||||
ZeroOrMore,
|
||||
OneOrMore,
|
||||
}
|
||||
|
||||
/// When the main rust parser encounters a syntax-extension invocation, it
|
||||
/// parses the arguments to the invocation as a token-tree. This is a very
|
||||
/// loose structure, such that all sorts of different AST-fragments can
|
||||
|
@ -123,10 +100,6 @@ pub enum TokenTree {
|
|||
Token(Span, token::Token),
|
||||
/// A delimited sequence of token trees
|
||||
Delimited(Span, Rc<Delimited>),
|
||||
|
||||
// This only makes sense in MBE macros.
|
||||
/// A kleene-style repetition sequence with a span
|
||||
Sequence(Span, Rc<SequenceRepetition>),
|
||||
}
|
||||
|
||||
impl TokenTree {
|
||||
|
@ -138,15 +111,10 @@ impl TokenTree {
|
|||
AttrStyle::Inner => 3,
|
||||
}
|
||||
}
|
||||
TokenTree::Token(_, token::Interpolated(ref nt)) => {
|
||||
if let Nonterminal::NtTT(..) = **nt { 1 } else { 0 }
|
||||
},
|
||||
TokenTree::Token(_, token::MatchNt(..)) => 3,
|
||||
TokenTree::Delimited(_, ref delimed) => match delimed.delim {
|
||||
token::NoDelim => delimed.tts.len(),
|
||||
_ => delimed.tts.len() + 2,
|
||||
},
|
||||
TokenTree::Sequence(_, ref seq) => seq.tts.len(),
|
||||
TokenTree::Token(..) => 0,
|
||||
}
|
||||
}
|
||||
|
@ -197,30 +165,12 @@ impl TokenTree {
|
|||
}
|
||||
delimed.tts[index - 1].clone()
|
||||
}
|
||||
(&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => {
|
||||
let v = [TokenTree::Token(sp, token::SubstNt(name)),
|
||||
TokenTree::Token(sp, token::Colon),
|
||||
TokenTree::Token(sp, token::Ident(kind))];
|
||||
v[index].clone()
|
||||
}
|
||||
(&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
|
||||
_ => panic!("Cannot expand a token tree"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the `Span` corresponding to this token tree.
|
||||
pub fn get_span(&self) -> Span {
|
||||
match *self {
|
||||
TokenTree::Token(span, _) => span,
|
||||
TokenTree::Delimited(span, _) => span,
|
||||
TokenTree::Sequence(span, _) => span,
|
||||
}
|
||||
}
|
||||
|
||||
/// Use this token tree as a matcher to parse given tts.
|
||||
pub fn parse(cx: &base::ExtCtxt,
|
||||
mtch: &[TokenTree],
|
||||
tts: &[TokenTree])
|
||||
pub fn parse(cx: &base::ExtCtxt, mtch: &[quoted::TokenTree], tts: &[TokenTree])
|
||||
-> macro_parser::NamedParseResult {
|
||||
// `None` is because we're not interpolating
|
||||
let directory = Directory {
|
||||
|
@ -252,9 +202,7 @@ impl TokenTree {
|
|||
/// Retrieve the TokenTree's span.
|
||||
pub fn span(&self) -> Span {
|
||||
match *self {
|
||||
TokenTree::Token(sp, _) |
|
||||
TokenTree::Delimited(sp, _) |
|
||||
TokenTree::Sequence(sp, _) => sp,
|
||||
TokenTree::Token(sp, _) | TokenTree::Delimited(sp, _) => sp,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -79,7 +79,6 @@ pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver,
|
|||
quote_pat: expand_quote_pat,
|
||||
quote_arm: expand_quote_arm,
|
||||
quote_stmt: expand_quote_stmt,
|
||||
quote_matcher: expand_quote_matcher,
|
||||
quote_attr: expand_quote_attr,
|
||||
quote_arg: expand_quote_arg,
|
||||
quote_block: expand_quote_block,
|
||||
|
|
|
@ -54,8 +54,6 @@ pub fn main() {
|
|||
//~^ ERROR cannot find macro `quote_arm!` in this scope
|
||||
let x = quote_stmt!(ecx, 3);
|
||||
//~^ ERROR cannot find macro `quote_stmt!` in this scope
|
||||
let x = quote_matcher!(ecx, 3);
|
||||
//~^ ERROR cannot find macro `quote_matcher!` in this scope
|
||||
let x = quote_attr!(ecx, 3);
|
||||
//~^ ERROR cannot find macro `quote_attr!` in this scope
|
||||
let x = quote_arg!(ecx, 3);
|
||||
|
|
|
@ -8,9 +8,8 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
macro_rules! m { ($t:tt) => { $t } }
|
||||
macro_rules! m { ($($t:tt)*) => { $($t)* } }
|
||||
|
||||
fn main() {
|
||||
m!($t); //~ ERROR unknown macro variable
|
||||
//~| ERROR expected expression
|
||||
m!($t); //~ ERROR expected expression
|
||||
}
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#![deny(missing_fragment_specifier)] //~ NOTE lint level defined here
|
||||
|
||||
macro_rules! m { ($i) => {} }
|
||||
//~^ ERROR missing fragment specifier
|
||||
//~| WARN previously accepted
|
||||
//~| NOTE issue #40107
|
||||
|
||||
fn main() {}
|
|
@ -9,7 +9,7 @@
|
|||
// except according to those terms.
|
||||
|
||||
macro_rules! foo {
|
||||
($a:expr) => $a; //~ ERROR macro rhs must be delimited
|
||||
($a:expr) => a; //~ ERROR macro rhs must be delimited
|
||||
}
|
||||
|
||||
fn main() {
|
||||
|
|
|
@ -17,16 +17,5 @@ macro_rules! foo {
|
|||
|
||||
foo!(Box);
|
||||
|
||||
macro_rules! bar {
|
||||
($x:tt) => {
|
||||
macro_rules! baz {
|
||||
($x:tt, $y:tt) => { ($x, $y) }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[rustc_error]
|
||||
fn main() { //~ ERROR compilation successful
|
||||
bar!($y);
|
||||
let _: (i8, i16) = baz!(0i8, 0i16);
|
||||
}
|
||||
fn main() {} //~ ERROR compilation successful
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
// except according to those terms.
|
||||
|
||||
macro_rules! my_precioooous {
|
||||
$($t:tt)* => (1); //~ ERROR invalid macro matcher
|
||||
t => (1); //~ ERROR invalid macro matcher
|
||||
}
|
||||
|
||||
fn main() {
|
||||
|
|
|
@ -12,7 +12,9 @@
|
|||
|
||||
macro_rules! foo {
|
||||
{ $+ } => { //~ ERROR expected identifier, found `+`
|
||||
//~^ ERROR missing fragment specifier
|
||||
$(x)(y) //~ ERROR expected `*` or `+`
|
||||
//~^ ERROR no rules expected the token `)`
|
||||
}
|
||||
}
|
||||
|
||||
foo!();
|
||||
|
|
|
@ -23,6 +23,7 @@ use syntax::ast::{Ident, Pat};
|
|||
use syntax::tokenstream::{TokenTree};
|
||||
use syntax::ext::base::{ExtCtxt, MacResult, MacEager};
|
||||
use syntax::ext::build::AstBuilder;
|
||||
use syntax::ext::tt::quoted;
|
||||
use syntax::ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
|
||||
use syntax::ext::tt::macro_parser::{Success, Failure, Error};
|
||||
use syntax::ext::tt::macro_parser::parse_failure_msg;
|
||||
|
@ -33,7 +34,8 @@ use rustc_plugin::Registry;
|
|||
fn expand_mbe_matches(cx: &mut ExtCtxt, _: Span, args: &[TokenTree])
|
||||
-> Box<MacResult + 'static> {
|
||||
|
||||
let mbe_matcher = quote_matcher!(cx, $matched:expr, $($pat:pat)|+);
|
||||
let mbe_matcher = quote_tokens!(cx, $$matched:expr, $$($$pat:pat)|+);
|
||||
let mbe_matcher = quoted::parse(&mbe_matcher, true, cx.parse_sess);
|
||||
let map = match TokenTree::parse(cx, &mbe_matcher, args) {
|
||||
Success(map) => map,
|
||||
Failure(_, tok) => {
|
||||
|
|
|
@ -14,11 +14,7 @@
|
|||
#![feature(plugin)]
|
||||
#![plugin(procedural_mbe_matching)]
|
||||
|
||||
#[no_link]
|
||||
extern crate procedural_mbe_matching;
|
||||
|
||||
pub fn main() {
|
||||
let abc = 123u32;
|
||||
assert_eq!(matches!(Some(123), None | Some(0)), false);
|
||||
assert_eq!(matches!(Some(123), None | Some(123)), true);
|
||||
assert_eq!(matches!(true, true), true);
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// force-host
|
||||
// no-prefer-dynamic
|
||||
|
||||
#![crate_type = "proc-macro"]
|
||||
|
||||
extern crate proc_macro;
|
||||
use proc_macro::TokenStream;
|
||||
|
||||
#[proc_macro_derive(Issue39889)]
|
||||
pub fn f(_input: TokenStream) -> TokenStream {
|
||||
let rules = r#"
|
||||
macro_rules! id {
|
||||
($($tt:tt)*) => { $($tt)* };
|
||||
}
|
||||
"#;
|
||||
rules.parse().unwrap()
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// aux-build:issue-39889.rs
|
||||
|
||||
#![feature(proc_macro)]
|
||||
#![allow(unused)]
|
||||
|
||||
extern crate issue_39889;
|
||||
use issue_39889::Issue39889;
|
||||
|
||||
#[derive(Issue39889)]
|
||||
struct S;
|
||||
|
||||
fn main() {}
|
|
@ -37,7 +37,6 @@ fn syntax_extension(cx: &ExtCtxt) {
|
|||
|
||||
let _l: P<syntax::ast::Ty> = quote_ty!(cx, &isize);
|
||||
|
||||
let _m: Vec<syntax::tokenstream::TokenTree> = quote_matcher!(cx, $($foo:tt,)* bar);
|
||||
let _n: syntax::ast::Attribute = quote_attr!(cx, #![cfg(foo, bar = "baz")]);
|
||||
|
||||
let _o: Option<P<syntax::ast::Item>> = quote_item!(cx, fn foo<T: ?Sized>() {});
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
// except according to those terms.
|
||||
|
||||
fn main() {
|
||||
println!("{}", { macro_rules! x { ($()*) => {} } 33 });
|
||||
//~^ ERROR no syntax variables matched as repeating at this depth
|
||||
println!("{}", { macro_rules! x { ($(t:tt)*) => {} } 33 });
|
||||
}
|
||||
|
Loading…
Reference in New Issue