Add LazyTokenStream.

This commit is contained in:
Jeffrey Seyfried 2017-03-29 01:55:01 +00:00
parent e42836b208
commit 7d493bdd2a
13 changed files with 151 additions and 89 deletions

View File

@ -42,6 +42,7 @@
#![feature(staged_api)]
#![feature(lang_items)]
#[macro_use]
extern crate syntax;
extern crate syntax_pos;
@ -50,7 +51,8 @@ use std::str::FromStr;
use syntax::ast;
use syntax::errors::DiagnosticBuilder;
use syntax::parse::{self, token};
use syntax::parse::{self, token, parse_stream_from_source_str};
use syntax::print::pprust;
use syntax::symbol;
use syntax::tokenstream;
use syntax_pos::DUMMY_SP;
@ -337,8 +339,18 @@ impl Iterator for TokenIter {
type Item = TokenTree;
fn next(&mut self) -> Option<TokenTree> {
self.next.take().or_else(|| self.cursor.next_as_stream())
.map(|next| TokenTree::from_raw(next, &mut self.next))
loop {
let next =
unwrap_or!(self.next.take().or_else(|| self.cursor.next_as_stream()), return None);
let tree = TokenTree::from_raw(next, &mut self.next);
if tree.span.0 == DUMMY_SP {
if let TokenKind::Sequence(Delimiter::None, stream) = tree.kind {
self.cursor.insert(stream.0);
continue
}
}
return Some(tree);
}
}
}
@ -449,7 +461,14 @@ impl TokenTree {
Ident(ident) | Lifetime(ident) => TokenKind::Word(Symbol(ident.name)),
Literal(..) | DocComment(..) => TokenKind::Literal(self::Literal(token)),
Interpolated(..) => unimplemented!(),
Interpolated(ref nt) => __internal::with_sess(|(sess, _)| {
TokenKind::Sequence(Delimiter::None, TokenStream(nt.1.force(|| {
// FIXME(jseyfried): Avoid this pretty-print + reparse hack
let name = "<macro expansion>".to_owned();
let source = pprust::token_to_string(&token);
parse_stream_from_source_str(name, source, sess, Some(span))
})))
}),
OpenDelim(..) | CloseDelim(..) => unreachable!(),
Whitespace | Comment | Shebang(..) | Eof => unreachable!(),
@ -530,20 +549,21 @@ pub mod __internal {
pub use self::quote::{Quoter, __rt};
use std::cell::Cell;
use std::rc::Rc;
use syntax::ast;
use syntax::ext::base::ExtCtxt;
use syntax::ext::hygiene::Mark;
use syntax::ptr::P;
use syntax::parse::{self, token, ParseSess};
use syntax::parse::{self, ParseSess};
use syntax::parse::token::{self, Token};
use syntax::tokenstream;
use syntax_pos::DUMMY_SP;
use super::{TokenStream, LexError};
pub fn new_token_stream(item: P<ast::Item>) -> TokenStream {
let (span, token) = (item.span, token::Interpolated(Rc::new(token::NtItem(item))));
TokenStream(tokenstream::TokenTree::Token(span, token).into())
let token = Token::interpolated(token::NtItem(item));
TokenStream(tokenstream::TokenTree::Token(DUMMY_SP, token).into())
}
pub fn token_stream_wrap(inner: tokenstream::TokenStream) -> TokenStream {

View File

@ -1057,7 +1057,7 @@ impl MetaItem {
{
let (mut span, name) = match tokens.next() {
Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name),
Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match **nt {
Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match nt.0 {
token::Nonterminal::NtIdent(ident) => (ident.span, ident.node.name),
token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
_ => return None,
@ -1229,7 +1229,7 @@ impl LitKind {
match token {
Token::Ident(ident) if ident.name == "true" => Some(LitKind::Bool(true)),
Token::Ident(ident) if ident.name == "false" => Some(LitKind::Bool(false)),
Token::Interpolated(ref nt) => match **nt {
Token::Interpolated(ref nt) => match nt.0 {
token::NtExpr(ref v) => match v.node {
ExprKind::Lit(ref lit) => Some(lit.node.clone()),
_ => None,

View File

@ -215,7 +215,7 @@ impl<F> TTMacroExpander for F
impl Folder for AvoidInterpolatedIdents {
fn fold_tt(&mut self, tt: tokenstream::TokenTree) -> tokenstream::TokenTree {
if let tokenstream::TokenTree::Token(_, token::Interpolated(ref nt)) = tt {
if let token::NtIdent(ident) = **nt {
if let token::NtIdent(ident) = nt.0 {
return tokenstream::TokenTree::Token(ident.span, token::Ident(ident.node));
}
}

View File

@ -21,15 +21,15 @@ use ext::placeholders::{placeholder, PlaceholderExpander};
use feature_gate::{self, Features, is_builtin_attr};
use fold;
use fold::*;
use parse::{filemap_to_stream, ParseSess, DirectoryOwnership, PResult, token};
use parse::{DirectoryOwnership, PResult};
use parse::token::{self, Token};
use parse::parser::Parser;
use print::pprust;
use ptr::P;
use std_inject;
use symbol::Symbol;
use symbol::keywords;
use syntax_pos::{Span, DUMMY_SP};
use tokenstream::TokenStream;
use tokenstream::{TokenStream, TokenTree};
use util::small_vector::SmallVector;
use visit::Visitor;
@ -427,11 +427,13 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
kind.expect_from_annotatables(items)
}
SyntaxExtension::AttrProcMacro(ref mac) => {
let item_toks = stream_for_item(&item, self.cx.parse_sess);
let span = Span { ctxt: self.cx.backtrace(), ..attr.span };
let tok_result = mac.expand(self.cx, attr.span, attr.tokens, item_toks);
self.parse_expansion(tok_result, kind, &attr.path, span)
let item_tok = TokenTree::Token(DUMMY_SP, Token::interpolated(match item {
Annotatable::Item(item) => token::NtItem(item),
Annotatable::TraitItem(item) => token::NtTraitItem(item.unwrap()),
Annotatable::ImplItem(item) => token::NtImplItem(item.unwrap()),
})).into();
let tok_result = mac.expand(self.cx, attr.span, attr.tokens, item_tok);
self.parse_expansion(tok_result, kind, &attr.path, attr.span)
}
SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => {
self.cx.span_err(attr.span, &format!("`{}` is a derive mode", attr.path));
@ -769,28 +771,6 @@ pub fn find_attr_invoc(attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute
.map(|i| attrs.remove(i))
}
// These are pretty nasty. Ideally, we would keep the tokens around, linked from
// the AST. However, we don't so we need to create new ones. Since the item might
// have come from a macro expansion (possibly only in part), we can't use the
// existing codemap.
//
// Therefore, we must use the pretty printer (yuck) to turn the AST node into a
// string, which we then re-tokenise (double yuck), but first we have to patch
// the pretty-printed string on to the end of the existing codemap (infinity-yuck).
fn stream_for_item(item: &Annotatable, parse_sess: &ParseSess) -> TokenStream {
let text = match *item {
Annotatable::Item(ref i) => pprust::item_to_string(i),
Annotatable::TraitItem(ref ti) => pprust::trait_item_to_string(ti),
Annotatable::ImplItem(ref ii) => pprust::impl_item_to_string(ii),
};
string_to_stream(text, parse_sess, item.span())
}
fn string_to_stream(text: String, parse_sess: &ParseSess, span: Span) -> TokenStream {
let filename = String::from("<macro expansion>");
filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, text), Some(span))
}
impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
fn fold_expr(&mut self, expr: P<ast::Expr>) -> P<ast::Expr> {
let mut expr = self.cfg.configure_expr(expr).unwrap();

View File

@ -30,9 +30,9 @@ pub mod rt {
use ast;
use codemap::Spanned;
use ext::base::ExtCtxt;
use parse::{self, token, classify};
use parse::{self, classify};
use parse::token::{self, Token};
use ptr::P;
use std::rc::Rc;
use symbol::Symbol;
use tokenstream::{self, TokenTree, TokenStream};
@ -82,70 +82,70 @@ pub mod rt {
impl ToTokens for ast::Path {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
let nt = token::NtPath(self.clone());
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))]
}
}
impl ToTokens for ast::Ty {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
let nt = token::NtTy(P(self.clone()));
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
vec![TokenTree::Token(self.span, Token::interpolated(nt))]
}
}
impl ToTokens for ast::Block {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
let nt = token::NtBlock(P(self.clone()));
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
vec![TokenTree::Token(self.span, Token::interpolated(nt))]
}
}
impl ToTokens for ast::Generics {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
let nt = token::NtGenerics(self.clone());
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))]
}
}
impl ToTokens for ast::WhereClause {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
let nt = token::NtWhereClause(self.clone());
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))]
}
}
impl ToTokens for P<ast::Item> {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
let nt = token::NtItem(self.clone());
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
vec![TokenTree::Token(self.span, Token::interpolated(nt))]
}
}
impl ToTokens for ast::ImplItem {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
let nt = token::NtImplItem(self.clone());
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
vec![TokenTree::Token(self.span, Token::interpolated(nt))]
}
}
impl ToTokens for P<ast::ImplItem> {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
let nt = token::NtImplItem((**self).clone());
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
vec![TokenTree::Token(self.span, Token::interpolated(nt))]
}
}
impl ToTokens for ast::TraitItem {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
let nt = token::NtTraitItem(self.clone());
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
vec![TokenTree::Token(self.span, Token::interpolated(nt))]
}
}
impl ToTokens for ast::Stmt {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
let nt = token::NtStmt(self.clone());
let mut tts = vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))];
let mut tts = vec![TokenTree::Token(self.span, Token::interpolated(nt))];
// Some statements require a trailing semicolon.
if classify::stmt_ends_with_semi(&self.node) {
@ -159,35 +159,35 @@ pub mod rt {
impl ToTokens for P<ast::Expr> {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
let nt = token::NtExpr(self.clone());
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
vec![TokenTree::Token(self.span, Token::interpolated(nt))]
}
}
impl ToTokens for P<ast::Pat> {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
let nt = token::NtPat(self.clone());
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
vec![TokenTree::Token(self.span, Token::interpolated(nt))]
}
}
impl ToTokens for ast::Arm {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
let nt = token::NtArm(self.clone());
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))]
}
}
impl ToTokens for ast::Arg {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
let nt = token::NtArg(self.clone());
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))]
}
}
impl ToTokens for P<ast::Block> {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
let nt = token::NtBlock(self.clone());
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))]
}
}
@ -215,7 +215,7 @@ pub mod rt {
impl ToTokens for ast::MetaItem {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
let nt = token::NtMeta(self.clone());
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))]
}
}

View File

@ -156,7 +156,7 @@ pub fn transcribe(cx: &ExtCtxt,
result.push(tt.clone().into());
} else {
sp.ctxt = sp.ctxt.apply_mark(cx.current_expansion.mark);
let token = TokenTree::Token(sp, token::Interpolated(nt.clone()));
let token = TokenTree::Token(sp, Token::interpolated((**nt).clone()));
result.push(token.into());
}
} else {

View File

@ -22,7 +22,7 @@ use ast::*;
use ast;
use syntax_pos::Span;
use codemap::{Spanned, respan};
use parse::token;
use parse::token::{self, Token};
use ptr::P;
use symbol::keywords;
use tokenstream::*;
@ -586,7 +586,7 @@ pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token
Ok(nt) => nt,
Err(nt) => (*nt).clone(),
};
token::Interpolated(Rc::new(fld.fold_interpolated(nt)))
Token::interpolated(fld.fold_interpolated(nt.0))
}
_ => t
}

View File

@ -151,7 +151,7 @@ impl<'a> Parser<'a> {
pub fn parse_path_and_tokens(&mut self) -> PResult<'a, (ast::Path, TokenStream)> {
let meta = match self.token {
token::Interpolated(ref nt) => match **nt {
token::Interpolated(ref nt) => match nt.0 {
Nonterminal::NtMeta(ref meta) => Some(meta.clone()),
_ => None,
},
@ -223,7 +223,7 @@ impl<'a> Parser<'a> {
/// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
let nt_meta = match self.token {
token::Interpolated(ref nt) => match **nt {
token::Interpolated(ref nt) => match nt.0 {
token::NtMeta(ref e) => Some(e.clone()),
_ => None,
},

View File

@ -107,7 +107,7 @@ pub enum BlockMode {
macro_rules! maybe_whole_expr {
($p:expr) => {
if let token::Interpolated(nt) = $p.token.clone() {
match *nt {
match nt.0 {
token::NtExpr(ref e) => {
$p.bump();
return Ok((*e).clone());
@ -134,7 +134,7 @@ macro_rules! maybe_whole_expr {
macro_rules! maybe_whole {
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
if let token::Interpolated(nt) = $p.token.clone() {
if let token::$constructor($x) = (*nt).clone() {
if let token::$constructor($x) = nt.0.clone() {
$p.bump();
return Ok($e);
}
@ -1620,7 +1620,7 @@ impl<'a> Parser<'a> {
/// Matches token_lit = LIT_INTEGER | ...
pub fn parse_lit_token(&mut self) -> PResult<'a, LitKind> {
let out = match self.token {
token::Interpolated(ref nt) => match **nt {
token::Interpolated(ref nt) => match nt.0 {
token::NtExpr(ref v) => match v.node {
ExprKind::Lit(ref lit) => { lit.node.clone() }
_ => { return self.unexpected_last(&self.token); }
@ -1791,7 +1791,7 @@ impl<'a> Parser<'a> {
/// This is used when parsing derive macro paths in `#[derive]` attributes.
pub fn parse_path_allowing_meta(&mut self, mode: PathStyle) -> PResult<'a, ast::Path> {
let meta_ident = match self.token {
token::Interpolated(ref nt) => match **nt {
token::Interpolated(ref nt) => match nt.0 {
token::NtMeta(ref meta) => match meta.node {
ast::MetaItemKind::Word => Some(ast::Ident::with_empty_ctxt(meta.name)),
_ => None,
@ -2635,7 +2635,7 @@ impl<'a> Parser<'a> {
}
token::Interpolated(ref nt) => {
self.meta_var_span = Some(self.span);
match **nt {
match nt.0 {
token::NtIdent(ident) => ident,
_ => return,
}

View File

@ -16,9 +16,11 @@ pub use self::Token::*;
use ast::{self};
use ptr::P;
use serialize::{Decodable, Decoder, Encodable, Encoder};
use symbol::keywords;
use tokenstream::TokenTree;
use tokenstream::{TokenStream, TokenTree};
use std::cell::RefCell;
use std::fmt;
use std::rc::Rc;
@ -168,7 +170,7 @@ pub enum Token {
Lifetime(ast::Ident),
/* For interpolation */
Interpolated(Rc<Nonterminal>),
Interpolated(Rc<(Nonterminal, LazyTokenStream)>),
// Can be expanded into several tokens.
/// Doc comment
DocComment(ast::Name),
@ -187,6 +189,10 @@ pub enum Token {
}
impl Token {
pub fn interpolated(nt: Nonterminal) -> Token {
Token::Interpolated(Rc::new((nt, LazyTokenStream::new())))
}
/// Returns `true` if the token starts with '>'.
pub fn is_like_gt(&self) -> bool {
match *self {
@ -211,7 +217,7 @@ impl Token {
Lt | BinOp(Shl) | // associated path
ModSep | // global path
Pound => true, // expression attributes
Interpolated(ref nt) => match **nt {
Interpolated(ref nt) => match nt.0 {
NtIdent(..) | NtExpr(..) | NtBlock(..) | NtPath(..) => true,
_ => false,
},
@ -234,7 +240,7 @@ impl Token {
Lifetime(..) | // lifetime bound in trait object
Lt | BinOp(Shl) | // associated path
ModSep => true, // global path
Interpolated(ref nt) => match **nt {
Interpolated(ref nt) => match nt.0 {
NtIdent(..) | NtTy(..) | NtPath(..) => true,
_ => false,
},
@ -253,7 +259,7 @@ impl Token {
pub fn ident(&self) -> Option<ast::Ident> {
match *self {
Ident(ident) => Some(ident),
Interpolated(ref nt) => match **nt {
Interpolated(ref nt) => match nt.0 {
NtIdent(ident) => Some(ident.node),
_ => None,
},
@ -285,7 +291,7 @@ impl Token {
/// Returns `true` if the token is an interpolated path.
pub fn is_path(&self) -> bool {
if let Interpolated(ref nt) = *self {
if let NtPath(..) = **nt {
if let NtPath(..) = nt.0 {
return true;
}
}
@ -461,3 +467,38 @@ pub fn is_op(tok: &Token) -> bool {
_ => true,
}
}
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct LazyTokenStream(RefCell<Option<TokenStream>>);
impl LazyTokenStream {
pub fn new() -> Self {
LazyTokenStream(RefCell::new(None))
}
pub fn force<F: FnOnce() -> TokenStream>(&self, f: F) -> TokenStream {
let mut opt_stream = self.0.borrow_mut();
if opt_stream.is_none() {
*opt_stream = Some(f());
};
opt_stream.clone().unwrap()
}
}
impl Encodable for LazyTokenStream {
fn encode<S: Encoder>(&self, _: &mut S) -> Result<(), S::Error> {
Ok(())
}
}
impl Decodable for LazyTokenStream {
fn decode<D: Decoder>(_: &mut D) -> Result<LazyTokenStream, D::Error> {
Ok(LazyTokenStream::new())
}
}
impl ::std::hash::Hash for LazyTokenStream {
fn hash<H: ::std::hash::Hasher>(&self, hasher: &mut H) {
self.0.borrow().hash(hasher);
}
}

View File

@ -275,7 +275,7 @@ pub fn token_to_string(tok: &Token) -> String {
token::Comment => "/* */".to_string(),
token::Shebang(s) => format!("/* shebang: {}*/", s),
token::Interpolated(ref nt) => match **nt {
token::Interpolated(ref nt) => match nt.0 {
token::NtExpr(ref e) => expr_to_string(e),
token::NtMeta(ref e) => meta_item_to_string(e),
token::NtTy(ref e) => ty_to_string(e),

View File

@ -348,6 +348,10 @@ struct StreamCursor {
}
impl StreamCursor {
fn new(stream: RcSlice<TokenStream>) -> Self {
StreamCursor { stream: stream, index: 0, stack: Vec::new() }
}
fn next_as_stream(&mut self) -> Option<TokenStream> {
loop {
if self.index < self.stream.len() {
@ -355,10 +359,7 @@ impl StreamCursor {
let next = self.stream[self.index - 1].clone();
match next.kind {
TokenStreamKind::Tree(..) | TokenStreamKind::JointTree(..) => return Some(next),
TokenStreamKind::Stream(stream) => {
self.stack.push((mem::replace(&mut self.stream, stream),
mem::replace(&mut self.index, 0)));
}
TokenStreamKind::Stream(stream) => self.insert(stream),
TokenStreamKind::Empty => {}
}
} else if let Some((stream, index)) = self.stack.pop() {
@ -369,6 +370,11 @@ impl StreamCursor {
}
}
}
fn insert(&mut self, stream: RcSlice<TokenStream>) {
self.stack.push((mem::replace(&mut self.stream, stream),
mem::replace(&mut self.index, 0)));
}
}
impl Iterator for Cursor {
@ -388,9 +394,7 @@ impl Cursor {
TokenStreamKind::Empty => CursorKind::Empty,
TokenStreamKind::Tree(tree) => CursorKind::Tree(tree, false),
TokenStreamKind::JointTree(tree) => CursorKind::JointTree(tree, false),
TokenStreamKind::Stream(stream) => {
CursorKind::Stream(StreamCursor { stream: stream, index: 0, stack: Vec::new() })
}
TokenStreamKind::Stream(stream) => CursorKind::Stream(StreamCursor::new(stream)),
})
}
@ -408,13 +412,30 @@ impl Cursor {
Some(stream)
}
pub fn original_stream(self) -> TokenStream {
pub fn insert(&mut self, stream: TokenStream) {
match self.0 {
_ if stream.is_empty() => return,
CursorKind::Empty => *self = stream.trees(),
CursorKind::Tree(_, consumed) | CursorKind::JointTree(_, consumed) => {
*self = TokenStream::concat(vec![self.original_stream(), stream]).trees();
if consumed {
self.next();
}
}
CursorKind::Stream(ref mut cursor) => {
cursor.insert(ThinTokenStream::from(stream).0.unwrap());
}
}
}
pub fn original_stream(&self) -> TokenStream {
match self.0 {
CursorKind::Empty => TokenStream::empty(),
CursorKind::Tree(tree, _) => tree.into(),
CursorKind::JointTree(tree, _) => tree.joint(),
CursorKind::Stream(cursor) => TokenStream::concat_rc_slice({
cursor.stack.get(0).cloned().map(|(stream, _)| stream).unwrap_or(cursor.stream)
CursorKind::Tree(ref tree, _) => tree.clone().into(),
CursorKind::JointTree(ref tree, _) => tree.clone().joint(),
CursorKind::Stream(ref cursor) => TokenStream::concat_rc_slice({
cursor.stack.get(0).cloned().map(|(stream, _)| stream)
.unwrap_or(cursor.stream.clone())
}),
}
}

View File

@ -24,7 +24,7 @@ pub fn attr_with_args(args: TokenStream, input: TokenStream) -> TokenStream {
let input = input.to_string();
assert_eq!(input, "fn foo ( ) { }");
assert_eq!(input, "fn foo() { }");
r#"
fn foo() -> &'static str { "Hello, world!" }