Remove crate proc_macro_tokens.

This commit is contained in:
Jeffrey Seyfried 2017-01-22 09:09:33 +00:00
parent 2dc60b1180
commit 31417efcd3
7 changed files with 1 additions and 205 deletions

View File

@ -60,7 +60,7 @@ RUSTC_CRATES := rustc rustc_typeck rustc_mir rustc_borrowck rustc_resolve rustc_
rustc_data_structures rustc_platform_intrinsics rustc_errors \ rustc_data_structures rustc_platform_intrinsics rustc_errors \
rustc_plugin rustc_metadata rustc_passes rustc_save_analysis \ rustc_plugin rustc_metadata rustc_passes rustc_save_analysis \
rustc_const_eval rustc_const_math rustc_incremental proc_macro rustc_const_eval rustc_const_math rustc_incremental proc_macro
HOST_CRATES := syntax syntax_ext proc_macro_tokens proc_macro_plugin syntax_pos $(RUSTC_CRATES) \ HOST_CRATES := syntax syntax_ext proc_macro_plugin syntax_pos $(RUSTC_CRATES) \
rustdoc fmt_macros flate arena graphviz log serialize rustdoc fmt_macros flate arena graphviz log serialize
TOOLS := compiletest rustdoc rustc rustbook error_index_generator TOOLS := compiletest rustdoc rustc rustbook error_index_generator
@ -102,7 +102,6 @@ DEPS_syntax := std term serialize log arena libc rustc_bitflags std_unicode rust
DEPS_syntax_ext := syntax syntax_pos rustc_errors fmt_macros proc_macro DEPS_syntax_ext := syntax syntax_pos rustc_errors fmt_macros proc_macro
DEPS_proc_macro := syntax syntax_pos rustc_plugin log DEPS_proc_macro := syntax syntax_pos rustc_plugin log
DEPS_syntax_pos := serialize DEPS_syntax_pos := serialize
DEPS_proc_macro_tokens := syntax syntax_pos log
DEPS_proc_macro_plugin := syntax syntax_pos rustc_plugin DEPS_proc_macro_plugin := syntax syntax_pos rustc_plugin
DEPS_rustc_const_math := std syntax log serialize rustc_i128 DEPS_rustc_const_math := std syntax log serialize rustc_i128

7
src/Cargo.lock generated
View File

@ -213,13 +213,6 @@ dependencies = [
"syntax_pos 0.0.0", "syntax_pos 0.0.0",
] ]
[[package]]
name = "proc_macro_tokens"
version = "0.0.0"
dependencies = [
"syntax 0.0.0",
]
[[package]] [[package]]
name = "rand" name = "rand"
version = "0.0.0" version = "0.0.0"

View File

@ -1,12 +0,0 @@
[package]
authors = ["The Rust Project Developers"]
name = "proc_macro_tokens"
version = "0.0.0"
build = false
[lib]
path = "lib.rs"
crate-type = ["dylib"]
[dependencies]
syntax = { path = "../libsyntax" }

View File

@ -1,85 +0,0 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use syntax::ast::Ident;
use syntax::codemap::DUMMY_SP;
use syntax::parse::token::{self, Token};
use syntax::symbol::keywords;
use syntax::tokenstream::{self, TokenTree, TokenStream};
use std::rc::Rc;
/// A wrapper around `TokenStream::concat` to avoid extra namespace specification and
/// provide TokenStream concatenation as a generic operator.
pub fn concat(ts1: TokenStream, ts2: TokenStream) -> TokenStream {
TokenStream::concat([ts1, ts2].iter().cloned())
}
/// Checks if two identifiers have the same name, disregarding context. This allows us to
/// fake 'reserved' keywords.
// FIXME We really want `free-identifier-=?` (a la Dybvig 1993). von Tander 2007 is
// probably the easiest way to do that.
pub fn ident_eq(tident: &TokenTree, id: Ident) -> bool {
let tid = match *tident {
TokenTree::Token(_, Token::Ident(ref id)) => id,
_ => {
return false;
}
};
tid.name == id.name
}
// ____________________________________________________________________________________________
// Conversion operators
/// Convert a `&str` into a Token.
pub fn str_to_token_ident(s: &str) -> Token {
Token::Ident(Ident::from_str(s))
}
/// Converts a keyword (from `syntax::parse::token::keywords`) into a Token that
/// corresponds to it.
pub fn keyword_to_token_ident(kw: keywords::Keyword) -> Token {
Token::Ident(Ident::from_str(&kw.name().as_str()[..]))
}
// ____________________________________________________________________________________________
// Build Procedures
/// Generically takes a `ts` and delimiter and returns `ts` delimited by the specified
/// delimiter.
pub fn build_delimited(ts: TokenStream, delim: token::DelimToken) -> TokenStream {
TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited {
delim: delim,
open_span: DUMMY_SP,
tts: ts.trees().cloned().collect(),
close_span: DUMMY_SP,
})).into()
}
/// Takes `ts` and returns `[ts]`.
pub fn build_bracket_delimited(ts: TokenStream) -> TokenStream {
build_delimited(ts, token::DelimToken::Bracket)
}
/// Takes `ts` and returns `{ts}`.
pub fn build_brace_delimited(ts: TokenStream) -> TokenStream {
build_delimited(ts, token::DelimToken::Brace)
}
/// Takes `ts` and returns `(ts)`.
pub fn build_paren_delimited(ts: TokenStream) -> TokenStream {
build_delimited(ts, token::DelimToken::Paren)
}
/// Constructs `()`.
pub fn build_empty_args() -> TokenStream {
build_paren_delimited(TokenStream::empty())
}

View File

@ -1,64 +0,0 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! # Proc_Macro
//!
//! A library for procedural macro writers.
//!
//! ## Usage
//! This crate provides the prelude (at libproc_macro_tokens::prelude), which
//! provides a number of operations:
//! - `concat`, for concatenating two TokenStreams.
//! - `ident_eq`, for checking if two identifiers are equal regardless of syntax context.
//! - `str_to_token_ident`, for converting an `&str` into a Token.
//! - `keyword_to_token_delim`, for converting a `parse::token::keywords::Keyword` into a
//! Token.
//! - `build_delimited`, for creating a new TokenStream from an existing one and a delimiter
//! by wrapping the TokenStream in the delimiter.
//! - `build_bracket_delimited`, `build_brace_delimited`, and `build_paren_delimited`, for
//! easing the above.
//! - `build_empty_args`, which returns a TokenStream containing `()`.
//! - `lex`, which takes an `&str` and returns the TokenStream it represents.
//!
//! ## TokenStreams
//!
//! TokenStreams serve as the basis of the macro system. They are, in essence, vectors of
//! TokenTrees, where indexing treats delimited values as a single term. That is, the term
//! `even(a+c) && even(b)` will be indexibly encoded as `even | (a+c) | even | (b)` where,
//! in reality, `(a+c)` is actually a decorated pointer to `a | + | c`.
//!
//! If a user has a TokenStream that is a single, delimited value, they can use
//! `maybe_delimited` to destruct it and receive the internal vector as a new TokenStream
//! as:
//! ```
//! `(a+c)`.maybe_delimited() ~> Some(a | + | c)`
//! ```
//!
//! Check the TokenStream documentation for more information; the structure also provides
//! cheap concatenation and slicing.
//!
#![crate_name = "proc_macro_tokens"]
#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![deny(warnings)]
#![feature(staged_api)]
#![feature(rustc_private)]
extern crate syntax;
pub mod build;
pub mod parse;
pub mod prelude;

View File

@ -1,23 +0,0 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Parsing utilities for writing procedural macros.
use syntax::parse::{ParseSess, filemap_to_tts};
use syntax::tokenstream::TokenStream;
/// Map a string to tts, using a made-up filename. For example, `lex("15")` will return a
/// TokenStream containing the literal 15.
pub fn lex(source_str: &str) -> TokenStream {
let sess = ParseSess::new();
let filemap =
sess.codemap().new_filemap("<procmacro_lex>".to_string(), None, source_str.to_owned());
filemap_to_tts(&sess, filemap).into_iter().collect()
}

View File

@ -1,12 +0,0 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub use build::*;
pub use parse::*;