Move `syntax::util::interner` -> `syntax::symbol`, cleanup.

This commit is contained in:
Jeffrey Seyfried 2016-11-16 08:21:52 +00:00
parent f177a00ac9
commit d2f8fb0a0a
102 changed files with 752 additions and 806 deletions

View File

@ -34,8 +34,9 @@ use syntax::codemap::Span;
use syntax::ext::base::*; use syntax::ext::base::*;
use syntax::ext::base; use syntax::ext::base;
use syntax::ext::proc_macro_shim::build_block_emitter; use syntax::ext::proc_macro_shim::build_block_emitter;
use syntax::parse::token::{self, Token, gensym_ident, str_to_ident}; use syntax::parse::token::{self, Token};
use syntax::print::pprust; use syntax::print::pprust;
use syntax::symbol::Symbol;
use syntax::tokenstream::{TokenTree, TokenStream}; use syntax::tokenstream::{TokenTree, TokenStream};
// ____________________________________________________________________________________________ // ____________________________________________________________________________________________
@ -124,7 +125,7 @@ fn qquote_iter<'cx>(cx: &'cx mut ExtCtxt, depth: i64, ts: TokenStream) -> (Bindi
} // produce an error or something first } // produce an error or something first
let exp = vec![exp.unwrap().to_owned()]; let exp = vec![exp.unwrap().to_owned()];
debug!("RHS: {:?}", exp.clone()); debug!("RHS: {:?}", exp.clone());
let new_id = gensym_ident("tmp"); let new_id = Ident::with_empty_ctxt(Symbol::gensym("tmp"));
debug!("RHS TS: {:?}", TokenStream::from_tts(exp.clone())); debug!("RHS TS: {:?}", TokenStream::from_tts(exp.clone()));
debug!("RHS TS TT: {:?}", TokenStream::from_tts(exp.clone()).to_vec()); debug!("RHS TS TT: {:?}", TokenStream::from_tts(exp.clone()).to_vec());
bindings.push((new_id, TokenStream::from_tts(exp))); bindings.push((new_id, TokenStream::from_tts(exp)));
@ -179,7 +180,7 @@ fn unravel_concats(tss: Vec<TokenStream>) -> TokenStream {
}; };
while let Some(ts) = pushes.pop() { while let Some(ts) = pushes.pop() {
output = build_fn_call(str_to_ident("concat"), output = build_fn_call(Ident::from_str("concat"),
concat(concat(ts, concat(concat(ts,
from_tokens(vec![Token::Comma])), from_tokens(vec![Token::Comma])),
output)); output));
@ -209,18 +210,19 @@ fn convert_complex_tts<'cx>(cx: &'cx mut ExtCtxt, tts: Vec<QTT>) -> (Bindings, T
// FIXME handle sequence repetition tokens // FIXME handle sequence repetition tokens
QTT::QDL(qdl) => { QTT::QDL(qdl) => {
debug!(" QDL: {:?} ", qdl.tts); debug!(" QDL: {:?} ", qdl.tts);
let new_id = gensym_ident("qdl_tmp"); let new_id = Ident::with_empty_ctxt(Symbol::gensym("qdl_tmp"));
let mut cct_rec = convert_complex_tts(cx, qdl.tts); let mut cct_rec = convert_complex_tts(cx, qdl.tts);
bindings.append(&mut cct_rec.0); bindings.append(&mut cct_rec.0);
bindings.push((new_id, cct_rec.1)); bindings.push((new_id, cct_rec.1));
let sep = build_delim_tok(qdl.delim); let sep = build_delim_tok(qdl.delim);
pushes.push(build_mod_call(vec![str_to_ident("proc_macro_tokens"), pushes.push(build_mod_call(
str_to_ident("build"), vec![Ident::from_str("proc_macro_tokens"),
str_to_ident("build_delimited")], Ident::from_str("build"),
concat(from_tokens(vec![Token::Ident(new_id)]), Ident::from_str("build_delimited")],
concat(lex(","), sep)))); concat(from_tokens(vec![Token::Ident(new_id)]), concat(lex(","), sep)),
));
} }
QTT::QIdent(t) => { QTT::QIdent(t) => {
pushes.push(TokenStream::from_tts(vec![t])); pushes.push(TokenStream::from_tts(vec![t]));
@ -250,13 +252,13 @@ fn unravel(binds: Bindings) -> TokenStream {
/// Checks if the Ident is `unquote`. /// Checks if the Ident is `unquote`.
fn is_unquote(id: Ident) -> bool { fn is_unquote(id: Ident) -> bool {
let qq = str_to_ident("unquote"); let qq = Ident::from_str("unquote");
id.name == qq.name // We disregard context; unquote is _reserved_ id.name == qq.name // We disregard context; unquote is _reserved_
} }
/// Checks if the Ident is `quote`. /// Checks if the Ident is `quote`.
fn is_qquote(id: Ident) -> bool { fn is_qquote(id: Ident) -> bool {
let qq = str_to_ident("qquote"); let qq = Ident::from_str("qquote");
id.name == qq.name // We disregard context; qquote is _reserved_ id.name == qq.name // We disregard context; qquote is _reserved_
} }
@ -266,7 +268,8 @@ mod int_build {
use syntax::ast::{self, Ident}; use syntax::ast::{self, Ident};
use syntax::codemap::{DUMMY_SP}; use syntax::codemap::{DUMMY_SP};
use syntax::parse::token::{self, Token, keywords, str_to_ident}; use syntax::parse::token::{self, Token, Lit};
use syntax::symbol::keywords;
use syntax::tokenstream::{TokenTree, TokenStream}; use syntax::tokenstream::{TokenTree, TokenStream};
// ____________________________________________________________________________________________ // ____________________________________________________________________________________________
@ -277,19 +280,19 @@ mod int_build {
build_paren_delimited(build_vec(build_token_tt(t)))) build_paren_delimited(build_vec(build_token_tt(t))))
} }
pub fn emit_lit(l: token::Lit, n: Option<ast::Name>) -> TokenStream { pub fn emit_lit(l: Lit, n: Option<ast::Name>) -> TokenStream {
let suf = match n { let suf = match n {
Some(n) => format!("Some(ast::Name({}))", n.0), Some(n) => format!("Some(ast::Name({}))", n.as_u32()),
None => "None".to_string(), None => "None".to_string(),
}; };
let lit = match l { let lit = match l {
token::Lit::Byte(n) => format!("Lit::Byte(token::intern(\"{}\"))", n.to_string()), Lit::Byte(n) => format!("Lit::Byte(Symbol::intern(\"{}\"))", n.to_string()),
token::Lit::Char(n) => format!("Lit::Char(token::intern(\"{}\"))", n.to_string()), Lit::Char(n) => format!("Lit::Char(Symbol::intern(\"{}\"))", n.to_string()),
token::Lit::Integer(n) => format!("Lit::Integer(token::intern(\"{}\"))", n.to_string()), Lit::Float(n) => format!("Lit::Float(Symbol::intern(\"{}\"))", n.to_string()),
token::Lit::Float(n) => format!("Lit::Float(token::intern(\"{}\"))", n.to_string()), Lit::Str_(n) => format!("Lit::Str_(Symbol::intern(\"{}\"))", n.to_string()),
token::Lit::Str_(n) => format!("Lit::Str_(token::intern(\"{}\"))", n.to_string()), Lit::Integer(n) => format!("Lit::Integer(Symbol::intern(\"{}\"))", n.to_string()),
token::Lit::ByteStr(n) => format!("Lit::ByteStr(token::intern(\"{}\"))", n.to_string()), Lit::ByteStr(n) => format!("Lit::ByteStr(Symbol::intern(\"{}\"))", n.to_string()),
_ => panic!("Unsupported literal"), _ => panic!("Unsupported literal"),
}; };
@ -388,9 +391,10 @@ mod int_build {
Token::Underscore => lex("_"), Token::Underscore => lex("_"),
Token::Literal(lit, sfx) => emit_lit(lit, sfx), Token::Literal(lit, sfx) => emit_lit(lit, sfx),
// fix ident expansion information... somehow // fix ident expansion information... somehow
Token::Ident(ident) => lex(&format!("Token::Ident(str_to_ident(\"{}\"))", ident.name)), Token::Ident(ident) =>
Token::Lifetime(ident) => lex(&format!("Token::Ident(str_to_ident(\"{}\"))", lex(&format!("Token::Ident(Ident::from_str(\"{}\"))", ident.name)),
ident.name)), Token::Lifetime(ident) =>
lex(&format!("Token::Ident(Ident::from_str(\"{}\"))", ident.name)),
_ => panic!("Unhandled case!"), _ => panic!("Unhandled case!"),
} }
} }
@ -408,7 +412,7 @@ mod int_build {
/// Takes `input` and returns `vec![input]`. /// Takes `input` and returns `vec![input]`.
pub fn build_vec(ts: TokenStream) -> TokenStream { pub fn build_vec(ts: TokenStream) -> TokenStream {
build_mac_call(str_to_ident("vec"), ts) build_mac_call(Ident::from_str("vec"), ts)
// tts.clone().to_owned() // tts.clone().to_owned()
} }

View File

@ -13,7 +13,8 @@ extern crate syntax_pos;
use syntax::ast::Ident; use syntax::ast::Ident;
use syntax::codemap::DUMMY_SP; use syntax::codemap::DUMMY_SP;
use syntax::parse::token::{self, Token, keywords, str_to_ident}; use syntax::parse::token::{self, Token};
use syntax::symbol::keywords;
use syntax::tokenstream::{self, TokenTree, TokenStream}; use syntax::tokenstream::{self, TokenTree, TokenStream};
use std::rc::Rc; use std::rc::Rc;
@ -43,13 +44,13 @@ pub fn ident_eq(tident: &TokenTree, id: Ident) -> bool {
/// Convert a `&str` into a Token. /// Convert a `&str` into a Token.
pub fn str_to_token_ident(s: &str) -> Token { pub fn str_to_token_ident(s: &str) -> Token {
Token::Ident(str_to_ident(s)) Token::Ident(Ident::from_str(s))
} }
/// Converts a keyword (from `syntax::parse::token::keywords`) into a Token that /// Converts a keyword (from `syntax::parse::token::keywords`) into a Token that
/// corresponds to it. /// corresponds to it.
pub fn keyword_to_token_ident(kw: keywords::Keyword) -> Token { pub fn keyword_to_token_ident(kw: keywords::Keyword) -> Token {
Token::Ident(str_to_ident(&kw.name().as_str()[..])) Token::Ident(Ident::from_str(&kw.name().as_str()[..]))
} }
// ____________________________________________________________________________________________ // ____________________________________________________________________________________________

View File

@ -53,8 +53,8 @@ use syntax::ast::*;
use syntax::errors; use syntax::errors;
use syntax::ptr::P; use syntax::ptr::P;
use syntax::codemap::{respan, Spanned}; use syntax::codemap::{respan, Spanned};
use syntax::parse::token;
use syntax::std_inject; use syntax::std_inject;
use syntax::symbol::{Symbol, keywords};
use syntax::visit::{self, Visitor}; use syntax::visit::{self, Visitor};
use syntax_pos::Span; use syntax_pos::Span;
@ -149,7 +149,7 @@ impl<'a> LoweringContext<'a> {
} }
fn str_to_ident(&self, s: &'static str) -> Name { fn str_to_ident(&self, s: &'static str) -> Name {
token::gensym(s) Symbol::gensym(s)
} }
fn with_parent_def<T, F>(&mut self, parent_id: NodeId, f: F) -> T fn with_parent_def<T, F>(&mut self, parent_id: NodeId, f: F) -> T
@ -400,8 +400,8 @@ impl<'a> LoweringContext<'a> {
// Don't expose `Self` (recovered "keyword used as ident" parse error). // Don't expose `Self` (recovered "keyword used as ident" parse error).
// `rustc::ty` expects `Self` to be only used for a trait's `Self`. // `rustc::ty` expects `Self` to be only used for a trait's `Self`.
// Instead, use gensym("Self") to create a distinct name that looks the same. // Instead, use gensym("Self") to create a distinct name that looks the same.
if name == token::keywords::SelfType.name() { if name == keywords::SelfType.name() {
name = token::gensym("Self"); name = Symbol::gensym("Self");
} }
hir::TyParam { hir::TyParam {
@ -540,7 +540,7 @@ impl<'a> LoweringContext<'a> {
hir::StructField { hir::StructField {
span: f.span, span: f.span,
id: f.id, id: f.id,
name: f.ident.map(|ident| ident.name).unwrap_or(token::intern(&index.to_string())), name: f.ident.map(|ident| ident.name).unwrap_or(Symbol::intern(&index.to_string())),
vis: self.lower_visibility(&f.vis), vis: self.lower_visibility(&f.vis),
ty: self.lower_ty(&f.ty), ty: self.lower_ty(&f.ty),
attrs: self.lower_attrs(&f.attrs), attrs: self.lower_attrs(&f.attrs),
@ -1189,7 +1189,7 @@ impl<'a> LoweringContext<'a> {
e.span, e.span,
hir::PopUnstableBlock, hir::PopUnstableBlock,
ThinVec::new()); ThinVec::new());
this.field(token::intern(s), signal_block, ast_expr.span) this.field(Symbol::intern(s), signal_block, ast_expr.span)
}).collect(); }).collect();
let attrs = ast_expr.attrs.clone(); let attrs = ast_expr.attrs.clone();
@ -1953,9 +1953,9 @@ impl<'a> LoweringContext<'a> {
fn std_path_components(&mut self, components: &[&str]) -> Vec<Name> { fn std_path_components(&mut self, components: &[&str]) -> Vec<Name> {
let mut v = Vec::new(); let mut v = Vec::new();
if let Some(s) = self.crate_root { if let Some(s) = self.crate_root {
v.push(token::intern(s)); v.push(Symbol::intern(s));
} }
v.extend(components.iter().map(|s| token::intern(s))); v.extend(components.iter().map(|s| Symbol::intern(s)));
return v; return v;
} }

View File

@ -19,7 +19,7 @@ use middle::cstore::InlinedItem;
use syntax::ast::*; use syntax::ast::*;
use syntax::ext::hygiene::Mark; use syntax::ext::hygiene::Mark;
use syntax::visit; use syntax::visit;
use syntax::parse::token::{self, keywords}; use syntax::symbol::{Symbol, keywords};
/// Creates def ids for nodes in the HIR. /// Creates def ids for nodes in the HIR.
pub struct DefCollector<'a> { pub struct DefCollector<'a> {
@ -169,7 +169,7 @@ impl<'a> visit::Visitor for DefCollector<'a> {
this.with_parent(variant_def_index, |this| { this.with_parent(variant_def_index, |this| {
for (index, field) in v.node.data.fields().iter().enumerate() { for (index, field) in v.node.data.fields().iter().enumerate() {
let name = field.ident.map(|ident| ident.name) let name = field.ident.map(|ident| ident.name)
.unwrap_or_else(|| token::intern(&index.to_string())); .unwrap_or_else(|| Symbol::intern(&index.to_string()));
this.create_def(field.id, DefPathData::Field(name.as_str())); this.create_def(field.id, DefPathData::Field(name.as_str()));
} }
@ -188,7 +188,7 @@ impl<'a> visit::Visitor for DefCollector<'a> {
for (index, field) in struct_def.fields().iter().enumerate() { for (index, field) in struct_def.fields().iter().enumerate() {
let name = field.ident.map(|ident| ident.name.as_str()) let name = field.ident.map(|ident| ident.name.as_str())
.unwrap_or(token::intern(&index.to_string()).as_str()); .unwrap_or(Symbol::intern(&index.to_string()).as_str());
this.create_def(field.id, DefPathData::Field(name)); this.create_def(field.id, DefPathData::Field(name));
} }
} }

View File

@ -14,7 +14,7 @@ use std::fmt::Write;
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use std::collections::hash_map::DefaultHasher; use std::collections::hash_map::DefaultHasher;
use syntax::ast; use syntax::ast;
use syntax::parse::token::{self, InternedString}; use syntax::symbol::{Symbol, InternedString};
use ty::TyCtxt; use ty::TyCtxt;
use util::nodemap::NodeMap; use util::nodemap::NodeMap;
@ -328,7 +328,7 @@ impl DefPathData {
LifetimeDef(ref name) | LifetimeDef(ref name) |
EnumVariant(ref name) | EnumVariant(ref name) |
Binding(ref name) | Binding(ref name) |
Field(ref name) => Some(token::intern(name)), Field(ref name) => Some(Symbol::intern(name)),
Impl | Impl |
CrateRoot | CrateRoot |

View File

@ -40,8 +40,8 @@ use syntax::codemap::{self, respan, Spanned};
use syntax::abi::Abi; use syntax::abi::Abi;
use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, AsmDialect}; use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, AsmDialect};
use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem}; use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem};
use syntax::parse::token::{keywords, InternedString};
use syntax::ptr::P; use syntax::ptr::P;
use syntax::symbol::{keywords, InternedString};
use syntax::tokenstream::TokenTree; use syntax::tokenstream::TokenTree;
use syntax::util::ThinVec; use syntax::util::ThinVec;

View File

@ -13,13 +13,14 @@ pub use self::AnnNode::*;
use syntax::abi::Abi; use syntax::abi::Abi;
use syntax::ast; use syntax::ast;
use syntax::codemap::{CodeMap, Spanned}; use syntax::codemap::{CodeMap, Spanned};
use syntax::parse::token::{self, keywords, BinOpToken}; use syntax::parse::token::{self, BinOpToken};
use syntax::parse::lexer::comments; use syntax::parse::lexer::comments;
use syntax::print::pp::{self, break_offset, word, space, hardbreak}; use syntax::print::pp::{self, break_offset, word, space, hardbreak};
use syntax::print::pp::{Breaks, eof}; use syntax::print::pp::{Breaks, eof};
use syntax::print::pp::Breaks::{Consistent, Inconsistent}; use syntax::print::pp::Breaks::{Consistent, Inconsistent};
use syntax::print::pprust::{self as ast_pp, PrintState}; use syntax::print::pprust::{self as ast_pp, PrintState};
use syntax::ptr::P; use syntax::ptr::P;
use syntax::symbol::keywords;
use syntax_pos::{self, BytePos}; use syntax_pos::{self, BytePos};
use errors; use errors;

View File

@ -91,8 +91,8 @@ use std::cell::{Cell, RefCell};
use std::char::from_u32; use std::char::from_u32;
use std::fmt; use std::fmt;
use syntax::ast; use syntax::ast;
use syntax::parse::token;
use syntax::ptr::P; use syntax::ptr::P;
use syntax::symbol::Symbol;
use syntax_pos::{self, Pos, Span}; use syntax_pos::{self, Pos, Span};
use errors::DiagnosticBuilder; use errors::DiagnosticBuilder;
@ -1219,7 +1219,7 @@ impl<'a, 'gcx, 'tcx> Rebuilder<'a, 'gcx, 'tcx> {
names.push(lt_name); names.push(lt_name);
} }
names.sort(); names.sort();
let name = token::intern(&names[0]); let name = Symbol::intern(&names[0]);
return (name_to_dummy_lifetime(name), Kept); return (name_to_dummy_lifetime(name), Kept);
} }
return (self.life_giver.give_lifetime(), Fresh); return (self.life_giver.give_lifetime(), Fresh);
@ -1931,7 +1931,7 @@ impl LifeGiver {
let mut s = String::from("'"); let mut s = String::from("'");
s.push_str(&num_to_string(self.counter.get())); s.push_str(&num_to_string(self.counter.get()));
if !self.taken.contains(&s) { if !self.taken.contains(&s) {
lifetime = name_to_dummy_lifetime(token::intern(&s[..])); lifetime = name_to_dummy_lifetime(Symbol::intern(&s));
self.generated.borrow_mut().push(lifetime); self.generated.borrow_mut().push(lifetime);
break; break;
} }

View File

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
use syntax::parse::token::InternedString; use syntax::symbol::InternedString;
use syntax::ast; use syntax::ast;
use std::rc::Rc; use std::rc::Rc;
use hir::def_id::DefId; use hir::def_id::DefId;

View File

@ -39,7 +39,7 @@ use syntax::ast;
use syntax::attr; use syntax::attr;
use syntax::ext::base::SyntaxExtension; use syntax::ext::base::SyntaxExtension;
use syntax::ptr::P; use syntax::ptr::P;
use syntax::parse::token::InternedString; use syntax::symbol::InternedString;
use syntax_pos::Span; use syntax_pos::Span;
use rustc_back::target::Target; use rustc_back::target::Target;
use hir; use hir;

View File

@ -30,7 +30,7 @@ use middle::weak_lang_items;
use util::nodemap::FxHashMap; use util::nodemap::FxHashMap;
use syntax::ast; use syntax::ast;
use syntax::parse::token::InternedString; use syntax::symbol::InternedString;
use hir::itemlikevisit::ItemLikeVisitor; use hir::itemlikevisit::ItemLikeVisitor;
use hir; use hir;

View File

@ -123,8 +123,8 @@ use std::io::prelude::*;
use std::io; use std::io;
use std::rc::Rc; use std::rc::Rc;
use syntax::ast::{self, NodeId}; use syntax::ast::{self, NodeId};
use syntax::parse::token::keywords;
use syntax::ptr::P; use syntax::ptr::P;
use syntax::symbol::keywords;
use syntax_pos::Span; use syntax_pos::Span;
use hir::Expr; use hir::Expr;

View File

@ -27,7 +27,7 @@ use middle::region;
use ty; use ty;
use std::mem::replace; use std::mem::replace;
use syntax::ast; use syntax::ast;
use syntax::parse::token::keywords; use syntax::symbol::keywords;
use syntax_pos::Span; use syntax_pos::Span;
use util::nodemap::NodeMap; use util::nodemap::NodeMap;

View File

@ -21,7 +21,7 @@ use hir::def::Def;
use hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, DefIndex, LOCAL_CRATE}; use hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, DefIndex, LOCAL_CRATE};
use ty::{self, TyCtxt, AdtKind}; use ty::{self, TyCtxt, AdtKind};
use middle::privacy::AccessLevels; use middle::privacy::AccessLevels;
use syntax::parse::token::InternedString; use syntax::symbol::InternedString;
use syntax_pos::{Span, DUMMY_SP}; use syntax_pos::{Span, DUMMY_SP};
use syntax::ast; use syntax::ast;
use syntax::ast::{NodeId, Attribute}; use syntax::ast::{NodeId, Attribute};

View File

@ -16,7 +16,7 @@ use middle::lang_items;
use rustc_back::PanicStrategy; use rustc_back::PanicStrategy;
use syntax::ast; use syntax::ast;
use syntax::parse::token::InternedString; use syntax::symbol::InternedString;
use syntax_pos::Span; use syntax_pos::Span;
use hir::intravisit::Visitor; use hir::intravisit::Visitor;
use hir::intravisit; use hir::intravisit;

View File

@ -25,8 +25,8 @@ use lint;
use middle::cstore; use middle::cstore;
use syntax::ast::{self, IntTy, UintTy}; use syntax::ast::{self, IntTy, UintTy};
use syntax::parse::{self, token}; use syntax::parse;
use syntax::parse::token::InternedString; use syntax::symbol::{Symbol, InternedString};
use syntax::feature_gate::UnstableFeatures; use syntax::feature_gate::UnstableFeatures;
use errors::{ColorConfig, FatalError, Handler}; use errors::{ColorConfig, FatalError, Handler};
@ -927,7 +927,7 @@ pub fn default_lib_output() -> CrateType {
} }
pub fn default_configuration(sess: &Session) -> ast::CrateConfig { pub fn default_configuration(sess: &Session) -> ast::CrateConfig {
use syntax::parse::token::intern_and_get_ident as intern; use syntax::symbol::intern_and_get_ident as intern;
let end = &sess.target.target.target_endian; let end = &sess.target.target.target_endian;
let arch = &sess.target.target.arch; let arch = &sess.target.target.arch;
@ -947,33 +947,33 @@ pub fn default_configuration(sess: &Session) -> ast::CrateConfig {
let mut ret = HashSet::new(); let mut ret = HashSet::new();
// Target bindings. // Target bindings.
ret.insert((token::intern("target_os"), Some(intern(os)))); ret.insert((Symbol::intern("target_os"), Some(intern(os))));
ret.insert((token::intern("target_family"), Some(fam.clone()))); ret.insert((Symbol::intern("target_family"), Some(fam.clone())));
ret.insert((token::intern("target_arch"), Some(intern(arch)))); ret.insert((Symbol::intern("target_arch"), Some(intern(arch))));
ret.insert((token::intern("target_endian"), Some(intern(end)))); ret.insert((Symbol::intern("target_endian"), Some(intern(end))));
ret.insert((token::intern("target_pointer_width"), Some(intern(wordsz)))); ret.insert((Symbol::intern("target_pointer_width"), Some(intern(wordsz))));
ret.insert((token::intern("target_env"), Some(intern(env)))); ret.insert((Symbol::intern("target_env"), Some(intern(env))));
ret.insert((token::intern("target_vendor"), Some(intern(vendor)))); ret.insert((Symbol::intern("target_vendor"), Some(intern(vendor))));
if &fam == "windows" || &fam == "unix" { if &fam == "windows" || &fam == "unix" {
ret.insert((token::intern(&fam), None)); ret.insert((Symbol::intern(&fam), None));
} }
if sess.target.target.options.has_elf_tls { if sess.target.target.options.has_elf_tls {
ret.insert((token::intern("target_thread_local"), None)); ret.insert((Symbol::intern("target_thread_local"), None));
} }
for &i in &[8, 16, 32, 64, 128] { for &i in &[8, 16, 32, 64, 128] {
if i <= max_atomic_width { if i <= max_atomic_width {
let s = i.to_string(); let s = i.to_string();
ret.insert((token::intern("target_has_atomic"), Some(intern(&s)))); ret.insert((Symbol::intern("target_has_atomic"), Some(intern(&s))));
if &s == wordsz { if &s == wordsz {
ret.insert((token::intern("target_has_atomic"), Some(intern("ptr")))); ret.insert((Symbol::intern("target_has_atomic"), Some(intern("ptr"))));
} }
} }
} }
if sess.opts.debug_assertions { if sess.opts.debug_assertions {
ret.insert((token::intern("debug_assertions"), None)); ret.insert((Symbol::intern("debug_assertions"), None));
} }
if sess.opts.crate_types.contains(&CrateTypeProcMacro) { if sess.opts.crate_types.contains(&CrateTypeProcMacro) {
ret.insert((token::intern("proc_macro"), None)); ret.insert((Symbol::intern("proc_macro"), None));
} }
return ret; return ret;
} }
@ -986,7 +986,7 @@ pub fn build_configuration(sess: &Session,
let default_cfg = default_configuration(sess); let default_cfg = default_configuration(sess);
// If the user wants a test runner, then add the test cfg // If the user wants a test runner, then add the test cfg
if sess.opts.test { if sess.opts.test {
user_cfg.insert((token::intern("test"), None)); user_cfg.insert((Symbol::intern("test"), None));
} }
user_cfg.extend(default_cfg.iter().cloned()); user_cfg.extend(default_cfg.iter().cloned());
user_cfg user_cfg

View File

@ -28,7 +28,7 @@ use syntax::json::JsonEmitter;
use syntax::feature_gate; use syntax::feature_gate;
use syntax::parse; use syntax::parse;
use syntax::parse::ParseSess; use syntax::parse::ParseSess;
use syntax::parse::token; use syntax::symbol::{Symbol, InternedString};
use syntax::{ast, codemap}; use syntax::{ast, codemap};
use syntax::feature_gate::AttributeType; use syntax::feature_gate::AttributeType;
use syntax_pos::{Span, MultiSpan}; use syntax_pos::{Span, MultiSpan};
@ -89,7 +89,7 @@ pub struct Session {
// forms a unique global identifier for the crate. It is used to allow // forms a unique global identifier for the crate. It is used to allow
// multiple crates with the same name to coexist. See the // multiple crates with the same name to coexist. See the
// trans::back::symbol_names module for more information. // trans::back::symbol_names module for more information.
pub crate_disambiguator: RefCell<token::InternedString>, pub crate_disambiguator: RefCell<InternedString>,
pub features: RefCell<feature_gate::Features>, pub features: RefCell<feature_gate::Features>,
/// The maximum recursion limit for potentially infinitely recursive /// The maximum recursion limit for potentially infinitely recursive
@ -129,7 +129,7 @@ pub struct PerfStats {
} }
impl Session { impl Session {
pub fn local_crate_disambiguator(&self) -> token::InternedString { pub fn local_crate_disambiguator(&self) -> InternedString {
self.crate_disambiguator.borrow().clone() self.crate_disambiguator.borrow().clone()
} }
pub fn struct_span_warn<'a, S: Into<MultiSpan>>(&'a self, pub fn struct_span_warn<'a, S: Into<MultiSpan>>(&'a self,
@ -610,7 +610,7 @@ pub fn build_session_(sopts: config::Options,
plugin_attributes: RefCell::new(Vec::new()), plugin_attributes: RefCell::new(Vec::new()),
crate_types: RefCell::new(Vec::new()), crate_types: RefCell::new(Vec::new()),
dependency_formats: RefCell::new(FxHashMap()), dependency_formats: RefCell::new(FxHashMap()),
crate_disambiguator: RefCell::new(token::intern("").as_str()), crate_disambiguator: RefCell::new(Symbol::intern("").as_str()),
features: RefCell::new(feature_gate::Features::new()), features: RefCell::new(feature_gate::Features::new()),
recursion_limit: Cell::new(64), recursion_limit: Cell::new(64),
next_node_id: Cell::new(NodeId::new(1)), next_node_id: Cell::new(NodeId::new(1)),

View File

@ -26,8 +26,8 @@ use super::util;
use hir::def_id::DefId; use hir::def_id::DefId;
use infer::InferOk; use infer::InferOk;
use rustc_data_structures::snapshot_map::{Snapshot, SnapshotMap}; use rustc_data_structures::snapshot_map::{Snapshot, SnapshotMap};
use syntax::parse::token;
use syntax::ast; use syntax::ast;
use syntax::symbol::Symbol;
use ty::subst::Subst; use ty::subst::Subst;
use ty::{self, ToPredicate, ToPolyTraitRef, Ty, TyCtxt}; use ty::{self, ToPredicate, ToPolyTraitRef, Ty, TyCtxt};
use ty::fold::{TypeFoldable, TypeFolder}; use ty::fold::{TypeFoldable, TypeFolder};
@ -1245,7 +1245,7 @@ fn confirm_callable_candidate<'cx, 'gcx, 'tcx>(
let predicate = ty::Binder(ty::ProjectionPredicate { // (1) recreate binder here let predicate = ty::Binder(ty::ProjectionPredicate { // (1) recreate binder here
projection_ty: ty::ProjectionTy { projection_ty: ty::ProjectionTy {
trait_ref: trait_ref, trait_ref: trait_ref,
item_name: token::intern(FN_OUTPUT_NAME), item_name: Symbol::intern(FN_OUTPUT_NAME),
}, },
ty: ret_type ty: ret_type
}); });

View File

@ -49,7 +49,7 @@ use std::rc::Rc;
use std::iter; use std::iter;
use syntax::ast::{self, Name, NodeId}; use syntax::ast::{self, Name, NodeId};
use syntax::attr; use syntax::attr;
use syntax::parse::token::{self, keywords}; use syntax::symbol::{InternedString, intern_and_get_ident, keywords};
use hir; use hir;
@ -561,7 +561,7 @@ pub struct GlobalCtxt<'tcx> {
/// The definite name of the current crate after taking into account /// The definite name of the current crate after taking into account
/// attributes, commandline parameters, etc. /// attributes, commandline parameters, etc.
pub crate_name: token::InternedString, pub crate_name: InternedString,
/// Data layout specification for the current target. /// Data layout specification for the current target.
pub data_layout: TargetDataLayout, pub data_layout: TargetDataLayout,
@ -574,7 +574,7 @@ pub struct GlobalCtxt<'tcx> {
/// Map from function to the `#[derive]` mode that it's defining. Only used /// Map from function to the `#[derive]` mode that it's defining. Only used
/// by `proc-macro` crates. /// by `proc-macro` crates.
pub derive_macros: RefCell<NodeMap<token::InternedString>>, pub derive_macros: RefCell<NodeMap<InternedString>>,
} }
impl<'tcx> GlobalCtxt<'tcx> { impl<'tcx> GlobalCtxt<'tcx> {
@ -588,7 +588,7 @@ impl<'tcx> GlobalCtxt<'tcx> {
} }
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
pub fn crate_name(self, cnum: CrateNum) -> token::InternedString { pub fn crate_name(self, cnum: CrateNum) -> InternedString {
if cnum == LOCAL_CRATE { if cnum == LOCAL_CRATE {
self.crate_name.clone() self.crate_name.clone()
} else { } else {
@ -596,7 +596,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
} }
} }
pub fn original_crate_name(self, cnum: CrateNum) -> token::InternedString { pub fn original_crate_name(self, cnum: CrateNum) -> InternedString {
if cnum == LOCAL_CRATE { if cnum == LOCAL_CRATE {
self.crate_name.clone() self.crate_name.clone()
} else { } else {
@ -604,7 +604,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
} }
} }
pub fn crate_disambiguator(self, cnum: CrateNum) -> token::InternedString { pub fn crate_disambiguator(self, cnum: CrateNum) -> InternedString {
if cnum == LOCAL_CRATE { if cnum == LOCAL_CRATE {
self.sess.local_crate_disambiguator() self.sess.local_crate_disambiguator()
} else { } else {
@ -835,7 +835,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
custom_coerce_unsized_kinds: RefCell::new(DefIdMap()), custom_coerce_unsized_kinds: RefCell::new(DefIdMap()),
cast_kinds: RefCell::new(NodeMap()), cast_kinds: RefCell::new(NodeMap()),
fragment_infos: RefCell::new(DefIdMap()), fragment_infos: RefCell::new(DefIdMap()),
crate_name: token::intern_and_get_ident(crate_name), crate_name: intern_and_get_ident(crate_name),
data_layout: data_layout, data_layout: data_layout,
layout_cache: RefCell::new(FxHashMap()), layout_cache: RefCell::new(FxHashMap()),
layout_depth: Cell::new(0), layout_depth: Cell::new(0),

View File

@ -12,7 +12,7 @@ use hir::map::DefPathData;
use hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE}; use hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE};
use ty::{self, Ty, TyCtxt}; use ty::{self, Ty, TyCtxt};
use syntax::ast; use syntax::ast;
use syntax::parse::token; use syntax::symbol::Symbol;
use std::cell::Cell; use std::cell::Cell;
@ -136,7 +136,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
cur_path.push(self.sess.cstore.def_key(cur_def) cur_path.push(self.sess.cstore.def_key(cur_def)
.disambiguated_data.data.get_opt_name().unwrap_or_else(|| .disambiguated_data.data.get_opt_name().unwrap_or_else(||
token::intern("<unnamed>"))); Symbol::intern("<unnamed>")));
match visible_parent_map.get(&cur_def) { match visible_parent_map.get(&cur_def) {
Some(&def) => cur_def = def, Some(&def) => cur_def = def,
None => return false, None => return false,

View File

@ -44,7 +44,7 @@ use std::vec::IntoIter;
use std::mem; use std::mem;
use syntax::ast::{self, Name, NodeId}; use syntax::ast::{self, Name, NodeId};
use syntax::attr; use syntax::attr;
use syntax::parse::token::{self, InternedString}; use syntax::symbol::{Symbol, InternedString};
use syntax_pos::{DUMMY_SP, Span}; use syntax_pos::{DUMMY_SP, Span};
use rustc_const_math::ConstInt; use rustc_const_math::ConstInt;
@ -2344,7 +2344,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
if let Some(id) = self.map.as_local_node_id(id) { if let Some(id) = self.map.as_local_node_id(id) {
self.map.name(id) self.map.name(id)
} else if id.index == CRATE_DEF_INDEX { } else if id.index == CRATE_DEF_INDEX {
token::intern(&self.sess.cstore.original_crate_name(id.krate)) Symbol::intern(&self.sess.cstore.original_crate_name(id.krate))
} else { } else {
let def_key = self.sess.cstore.def_key(id); let def_key = self.sess.cstore.def_key(id);
// The name of a StructCtor is that of its struct parent. // The name of a StructCtor is that of its struct parent.

View File

@ -23,7 +23,7 @@ use std::fmt;
use std::ops; use std::ops;
use syntax::abi; use syntax::abi;
use syntax::ast::{self, Name}; use syntax::ast::{self, Name};
use syntax::parse::token::{keywords, InternedString}; use syntax::symbol::{keywords, InternedString};
use serialize; use serialize;

View File

@ -25,8 +25,8 @@ use std::fmt;
use std::usize; use std::usize;
use syntax::abi::Abi; use syntax::abi::Abi;
use syntax::parse::token;
use syntax::ast::CRATE_NODE_ID; use syntax::ast::CRATE_NODE_ID;
use syntax::symbol::Symbol;
use hir; use hir;
pub fn verbose() -> bool { pub fn verbose() -> bool {
@ -284,7 +284,7 @@ fn in_binder<'a, 'gcx, 'tcx, T, U>(f: &mut fmt::Formatter,
ty::BrAnon(_) | ty::BrAnon(_) |
ty::BrFresh(_) | ty::BrFresh(_) |
ty::BrEnv => { ty::BrEnv => {
let name = token::intern("'r"); let name = Symbol::intern("'r");
let _ = write!(f, "{}", name); let _ = write!(f, "{}", name);
ty::BrNamed(tcx.map.local_def_id(CRATE_NODE_ID), ty::BrNamed(tcx.map.local_def_id(CRATE_NODE_ID),
name, name,

View File

@ -53,7 +53,8 @@ use std::path::{Path, PathBuf};
use syntax::{ast, diagnostics, visit}; use syntax::{ast, diagnostics, visit};
use syntax::attr; use syntax::attr;
use syntax::ext::base::ExtCtxt; use syntax::ext::base::ExtCtxt;
use syntax::parse::{self, PResult, token}; use syntax::parse::{self, PResult};
use syntax::symbol::{self, Symbol};
use syntax::util::node_count::NodeCounter; use syntax::util::node_count::NodeCounter;
use syntax; use syntax;
use syntax_ext; use syntax_ext;
@ -561,7 +562,7 @@ pub fn phase_2_configure_and_expand<'a, F>(sess: &Session,
*sess.crate_types.borrow_mut() = collect_crate_types(sess, &krate.attrs); *sess.crate_types.borrow_mut() = collect_crate_types(sess, &krate.attrs);
*sess.crate_disambiguator.borrow_mut() = *sess.crate_disambiguator.borrow_mut() =
token::intern(&compute_crate_disambiguator(sess)).as_str(); Symbol::intern(&compute_crate_disambiguator(sess)).as_str();
time(time_passes, "recursion limit", || { time(time_passes, "recursion limit", || {
middle::recursion_limit::update_recursion_limit(sess, &krate); middle::recursion_limit::update_recursion_limit(sess, &krate);
@ -1360,6 +1361,6 @@ pub fn build_output_filenames(input: &Input,
pub fn reset_thread_local_state() { pub fn reset_thread_local_state() {
// These may be left in an incoherent state after a previous compile. // These may be left in an incoherent state after a previous compile.
syntax::ext::hygiene::reset_hygiene_data(); syntax::ext::hygiene::reset_hygiene_data();
// `clear_ident_interner` can be used to free memory, but it does not restore the initial state. // `clear_interner` can be used to free memory, but it does not restore the initial state.
token::reset_ident_interner(); symbol::reset_interner();
} }

View File

@ -450,15 +450,15 @@ impl<'ast> PrinterSupport<'ast> for HygieneAnnotation<'ast> {
impl<'ast> pprust::PpAnn for HygieneAnnotation<'ast> { impl<'ast> pprust::PpAnn for HygieneAnnotation<'ast> {
fn post(&self, s: &mut pprust::State, node: pprust::AnnNode) -> io::Result<()> { fn post(&self, s: &mut pprust::State, node: pprust::AnnNode) -> io::Result<()> {
match node { match node {
pprust::NodeIdent(&ast::Ident { name: ast::Name(nm), ctxt }) => { pprust::NodeIdent(&ast::Ident { name, ctxt }) => {
pp::space(&mut s.s)?; pp::space(&mut s.s)?;
// FIXME #16420: this doesn't display the connections // FIXME #16420: this doesn't display the connections
// between syntax contexts // between syntax contexts
s.synth_comment(format!("{}{:?}", nm, ctxt)) s.synth_comment(format!("{}{:?}", name.as_u32(), ctxt))
} }
pprust::NodeName(&ast::Name(nm)) => { pprust::NodeName(&name) => {
pp::space(&mut s.s)?; pp::space(&mut s.s)?;
s.synth_comment(nm.to_string()) s.synth_comment(name.as_u32().to_string())
} }
_ => Ok(()), _ => Ok(()),
} }

View File

@ -13,7 +13,7 @@ use llvm::LLVMRustHasFeature;
use rustc::session::Session; use rustc::session::Session;
use rustc_trans::back::write::create_target_machine; use rustc_trans::back::write::create_target_machine;
use syntax::feature_gate::UnstableFeatures; use syntax::feature_gate::UnstableFeatures;
use syntax::parse::token::{self, intern_and_get_ident as intern}; use syntax::symbol::{Symbol, intern_and_get_ident as intern};
use libc::c_char; use libc::c_char;
// WARNING: the features must be known to LLVM or the feature // WARNING: the features must be known to LLVM or the feature
@ -40,7 +40,7 @@ pub fn add_configuration(cfg: &mut ast::CrateConfig, sess: &Session) {
_ => &[], _ => &[],
}; };
let tf = token::intern("target_feature"); let tf = Symbol::intern("target_feature");
for feat in whitelist { for feat in whitelist {
assert_eq!(feat.chars().last(), Some('\0')); assert_eq!(feat.chars().last(), Some('\0'));
if unsafe { LLVMRustHasFeature(target_machine, feat.as_ptr() as *const c_char) } { if unsafe { LLVMRustHasFeature(target_machine, feat.as_ptr() as *const c_char) } {

View File

@ -18,6 +18,7 @@ use syntax::abi::Abi;
use syntax::ast::{self, Name, NodeId}; use syntax::ast::{self, Name, NodeId};
use syntax::attr; use syntax::attr;
use syntax::parse::token; use syntax::parse::token;
use syntax::symbol::InternedString;
use syntax_pos::{Span, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos}; use syntax_pos::{Span, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos};
use syntax::tokenstream; use syntax::tokenstream;
use rustc::hir; use rustc::hir;
@ -169,8 +170,8 @@ enum SawAbiComponent<'a> {
// FIXME (#14132): should we include (some function of) // FIXME (#14132): should we include (some function of)
// ident.ctxt as well? // ident.ctxt as well?
SawIdent(token::InternedString), SawIdent(InternedString),
SawStructDef(token::InternedString), SawStructDef(InternedString),
SawLifetime, SawLifetime,
SawLifetimeDef(usize), SawLifetimeDef(usize),
@ -232,11 +233,11 @@ enum SawAbiComponent<'a> {
#[derive(Hash)] #[derive(Hash)]
enum SawExprComponent<'a> { enum SawExprComponent<'a> {
SawExprLoop(Option<token::InternedString>), SawExprLoop(Option<InternedString>),
SawExprField(token::InternedString), SawExprField(InternedString),
SawExprTupField(usize), SawExprTupField(usize),
SawExprBreak(Option<token::InternedString>), SawExprBreak(Option<InternedString>),
SawExprAgain(Option<token::InternedString>), SawExprAgain(Option<InternedString>),
SawExprBox, SawExprBox,
SawExprArray, SawExprArray,

View File

@ -48,7 +48,7 @@ use rustc::hir::def_id::DefId;
use rustc::hir::itemlikevisit::ItemLikeVisitor; use rustc::hir::itemlikevisit::ItemLikeVisitor;
use syntax::ast::{self, Attribute, NestedMetaItem}; use syntax::ast::{self, Attribute, NestedMetaItem};
use rustc_data_structures::fx::{FxHashSet, FxHashMap}; use rustc_data_structures::fx::{FxHashSet, FxHashMap};
use syntax::parse::token; use syntax::symbol::Symbol;
use syntax_pos::Span; use syntax_pos::Span;
use rustc::ty::TyCtxt; use rustc::ty::TyCtxt;
use ich::Fingerprint; use ich::Fingerprint;
@ -286,7 +286,7 @@ fn check_config(tcx: TyCtxt, attr: &ast::Attribute) -> bool {
fn expect_associated_value(tcx: TyCtxt, item: &NestedMetaItem) -> ast::Name { fn expect_associated_value(tcx: TyCtxt, item: &NestedMetaItem) -> ast::Name {
if let Some(value) = item.value_str() { if let Some(value) = item.value_str() {
token::intern(&value) Symbol::intern(&value)
} else { } else {
let msg = if let Some(name) = item.name() { let msg = if let Some(name) = item.name() {
format!("associated value expected for `{}`", name) format!("associated value expected for `{}`", name)

View File

@ -20,7 +20,7 @@ use std::collections::hash_map::Entry::{Occupied, Vacant};
use syntax::ast; use syntax::ast;
use syntax::attr; use syntax::attr;
use syntax::feature_gate::{BUILTIN_ATTRIBUTES, AttributeType}; use syntax::feature_gate::{BUILTIN_ATTRIBUTES, AttributeType};
use syntax::parse::token::keywords; use syntax::symbol::keywords;
use syntax::ptr::P; use syntax::ptr::P;
use syntax_pos::Span; use syntax_pos::Span;
@ -48,7 +48,7 @@ impl UnusedMut {
let name = path1.node; let name = path1.node;
if let hir::BindByValue(hir::MutMutable) = mode { if let hir::BindByValue(hir::MutMutable) = mode {
if !name.as_str().starts_with("_") { if !name.as_str().starts_with("_") {
match mutables.entry(name.0 as usize) { match mutables.entry(name) {
Vacant(entry) => { Vacant(entry) => {
entry.insert(vec![id]); entry.insert(vec![id]);
} }

View File

@ -37,7 +37,7 @@ use syntax::abi::Abi;
use syntax::attr; use syntax::attr;
use syntax::ext::base::SyntaxExtension; use syntax::ext::base::SyntaxExtension;
use syntax::feature_gate::{self, GateIssue}; use syntax::feature_gate::{self, GateIssue};
use syntax::parse::token::{self, InternedString}; use syntax::symbol::{Symbol, InternedString};
use syntax_pos::{Span, DUMMY_SP}; use syntax_pos::{Span, DUMMY_SP};
use log; use log;
@ -582,11 +582,11 @@ impl<'a> CrateLoader<'a> {
trait_name: &str, trait_name: &str,
expand: fn(TokenStream) -> TokenStream, expand: fn(TokenStream) -> TokenStream,
attributes: &[&'static str]) { attributes: &[&'static str]) {
let attrs = attributes.iter().cloned().map(token::intern).collect(); let attrs = attributes.iter().cloned().map(Symbol::intern).collect();
let derive = SyntaxExtension::CustomDerive( let derive = SyntaxExtension::CustomDerive(
Box::new(CustomDerive::new(expand, attrs)) Box::new(CustomDerive::new(expand, attrs))
); );
self.0.push((token::intern(trait_name), Rc::new(derive))); self.0.push((Symbol::intern(trait_name), Rc::new(derive)));
} }
} }

View File

@ -31,7 +31,8 @@ use rustc_back::PanicStrategy;
use std::path::PathBuf; use std::path::PathBuf;
use syntax::ast; use syntax::ast;
use syntax::attr; use syntax::attr;
use syntax::parse::{token, new_parser_from_source_str}; use syntax::parse::new_parser_from_source_str;
use syntax::symbol::{InternedString, intern_and_get_ident};
use syntax_pos::mk_sp; use syntax_pos::mk_sp;
use rustc::hir::svh::Svh; use rustc::hir::svh::Svh;
use rustc_back::target::Target; use rustc_back::target::Target;
@ -262,14 +263,14 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore {
self.get_crate_data(cnum).panic_strategy() self.get_crate_data(cnum).panic_strategy()
} }
fn crate_name(&self, cnum: CrateNum) -> token::InternedString fn crate_name(&self, cnum: CrateNum) -> InternedString
{ {
token::intern_and_get_ident(&self.get_crate_data(cnum).name[..]) intern_and_get_ident(&self.get_crate_data(cnum).name[..])
} }
fn original_crate_name(&self, cnum: CrateNum) -> token::InternedString fn original_crate_name(&self, cnum: CrateNum) -> InternedString
{ {
token::intern_and_get_ident(&self.get_crate_data(cnum).name()) intern_and_get_ident(&self.get_crate_data(cnum).name())
} }
fn extern_crate(&self, cnum: CrateNum) -> Option<ExternCrate> fn extern_crate(&self, cnum: CrateNum) -> Option<ExternCrate>
@ -282,9 +283,9 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore {
self.get_crate_hash(cnum) self.get_crate_hash(cnum)
} }
fn crate_disambiguator(&self, cnum: CrateNum) -> token::InternedString fn crate_disambiguator(&self, cnum: CrateNum) -> InternedString
{ {
token::intern_and_get_ident(&self.get_crate_data(cnum).disambiguator()) intern_and_get_ident(&self.get_crate_data(cnum).disambiguator())
} }
fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option<DefId> fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option<DefId>

View File

@ -34,7 +34,7 @@ use std::rc::Rc;
use std::u32; use std::u32;
use syntax::ast::{self, CRATE_NODE_ID}; use syntax::ast::{self, CRATE_NODE_ID};
use syntax::attr; use syntax::attr;
use syntax; use syntax::symbol::Symbol;
use syntax_pos; use syntax_pos;
use rustc::hir::{self, PatKind}; use rustc::hir::{self, PatKind};
@ -600,7 +600,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
if let PatKind::Binding(_, ref path1, _) = arg.pat.node { if let PatKind::Binding(_, ref path1, _) = arg.pat.node {
path1.node path1.node
} else { } else {
syntax::parse::token::intern("") Symbol::intern("")
} }
})) }))
} }
@ -1119,7 +1119,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
let deps = get_ordered_deps(self.cstore); let deps = get_ordered_deps(self.cstore);
self.lazy_seq(deps.iter().map(|&(_, ref dep)| { self.lazy_seq(deps.iter().map(|&(_, ref dep)| {
CrateDep { CrateDep {
name: syntax::parse::token::intern(dep.name()), name: Symbol::intern(dep.name()),
hash: dep.hash(), hash: dep.hash(),
kind: dep.dep_kind.get(), kind: dep.dep_kind.get(),
} }

View File

@ -18,7 +18,7 @@ use rustc::util::nodemap::NodeMap;
use rustc::hir; use rustc::hir;
use syntax::abi::Abi; use syntax::abi::Abi;
use syntax::ast; use syntax::ast;
use syntax::parse::token::keywords; use syntax::symbol::keywords;
use syntax_pos::Span; use syntax_pos::Span;
use rustc_data_structures::indexed_vec::{IndexVec, Idx}; use rustc_data_structures::indexed_vec::{IndexVec, Idx};

View File

@ -29,7 +29,7 @@ use rustc::hir::map::blocks::FnLikeNode;
use rustc::infer::InferCtxt; use rustc::infer::InferCtxt;
use rustc::ty::subst::Subst; use rustc::ty::subst::Subst;
use rustc::ty::{self, Ty, TyCtxt}; use rustc::ty::{self, Ty, TyCtxt};
use syntax::parse::token; use syntax::symbol::{Symbol, InternedString};
use rustc::hir; use rustc::hir;
use rustc_const_math::{ConstInt, ConstUsize}; use rustc_const_math::{ConstInt, ConstUsize};
@ -121,7 +121,7 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> {
self.tcx.mk_nil() self.tcx.mk_nil()
} }
pub fn str_literal(&mut self, value: token::InternedString) -> Literal<'tcx> { pub fn str_literal(&mut self, value: InternedString) -> Literal<'tcx> {
Literal::Value { value: ConstVal::Str(value) } Literal::Value { value: ConstVal::Str(value) }
} }
@ -145,7 +145,7 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> {
self_ty: Ty<'tcx>, self_ty: Ty<'tcx>,
params: &[Ty<'tcx>]) params: &[Ty<'tcx>])
-> (Ty<'tcx>, Literal<'tcx>) { -> (Ty<'tcx>, Literal<'tcx>) {
let method_name = token::intern(method_name); let method_name = Symbol::intern(method_name);
let substs = self.tcx.mk_substs_trait(self_ty, params); let substs = self.tcx.mk_substs_trait(self_ty, params);
for item in self.tcx.associated_items(trait_def_id) { for item in self.tcx.associated_items(trait_def_id) {
if item.kind == ty::AssociatedKind::Method && item.name == method_name { if item.kind == ty::AssociatedKind::Method && item.name == method_name {

View File

@ -21,7 +21,8 @@ use rustc::session::Session;
use syntax::ast::*; use syntax::ast::*;
use syntax::attr; use syntax::attr;
use syntax::codemap::Spanned; use syntax::codemap::Spanned;
use syntax::parse::token::{self, keywords}; use syntax::parse::token;
use syntax::symbol::keywords;
use syntax::visit::{self, Visitor}; use syntax::visit::{self, Visitor};
use syntax_pos::Span; use syntax_pos::Span;
use errors; use errors;

View File

@ -17,7 +17,7 @@ use rustc::mir::transform::MirMapPass;
use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT, IdentTT}; use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT, IdentTT};
use syntax::ext::base::MacroExpanderFn; use syntax::ext::base::MacroExpanderFn;
use syntax::parse::token; use syntax::symbol::Symbol;
use syntax::ast; use syntax::ast;
use syntax::feature_gate::AttributeType; use syntax::feature_gate::AttributeType;
use syntax_pos::Span; use syntax_pos::Span;
@ -121,7 +121,7 @@ impl<'a> Registry<'a> {
/// It builds for you a `NormalTT` that calls `expander`, /// It builds for you a `NormalTT` that calls `expander`,
/// and also takes care of interning the macro's name. /// and also takes care of interning the macro's name.
pub fn register_macro(&mut self, name: &str, expander: MacroExpanderFn) { pub fn register_macro(&mut self, name: &str, expander: MacroExpanderFn) {
self.register_syntax_extension(token::intern(name), self.register_syntax_extension(Symbol::intern(name),
NormalTT(Box::new(expander), None, false)); NormalTT(Box::new(expander), None, false));
} }

View File

@ -40,7 +40,7 @@ use syntax::ext::base::Determinacy::Undetermined;
use syntax::ext::expand::mark_tts; use syntax::ext::expand::mark_tts;
use syntax::ext::hygiene::Mark; use syntax::ext::hygiene::Mark;
use syntax::ext::tt::macro_rules; use syntax::ext::tt::macro_rules;
use syntax::parse::token::keywords; use syntax::symbol::keywords;
use syntax::visit::{self, Visitor}; use syntax::visit::{self, Visitor};
use syntax_pos::{Span, DUMMY_SP}; use syntax_pos::{Span, DUMMY_SP};

View File

@ -57,7 +57,7 @@ use syntax::ext::hygiene::{Mark, SyntaxContext};
use syntax::ast::{self, FloatTy}; use syntax::ast::{self, FloatTy};
use syntax::ast::{CRATE_NODE_ID, Name, NodeId, Ident, SpannedIdent, IntTy, UintTy}; use syntax::ast::{CRATE_NODE_ID, Name, NodeId, Ident, SpannedIdent, IntTy, UintTy};
use syntax::ext::base::SyntaxExtension; use syntax::ext::base::SyntaxExtension;
use syntax::parse::token::{self, keywords}; use syntax::symbol::{Symbol, InternedString, keywords};
use syntax::util::lev_distance::find_best_match_for_name; use syntax::util::lev_distance::find_best_match_for_name;
use syntax::visit::{self, FnKind, Visitor}; use syntax::visit::{self, FnKind, Visitor};
@ -90,7 +90,7 @@ mod resolve_imports;
enum SuggestionType { enum SuggestionType {
Macro(String), Macro(String),
Function(token::InternedString), Function(InternedString),
NotFound, NotFound,
} }
@ -1039,7 +1039,7 @@ impl PrimitiveTypeTable {
} }
fn intern(&mut self, string: &str, primitive_type: PrimTy) { fn intern(&mut self, string: &str, primitive_type: PrimTy) {
self.primitive_types.insert(token::intern(string), primitive_type); self.primitive_types.insert(Symbol::intern(string), primitive_type);
} }
} }
@ -3606,7 +3606,7 @@ fn module_to_string(module: Module) -> String {
} }
} else { } else {
// danger, shouldn't be ident? // danger, shouldn't be ident?
names.push(token::str_to_ident("<opaque>")); names.push(Ident::from_str("<opaque>"));
collect_mod(names, module.parent.unwrap()); collect_mod(names, module.parent.unwrap());
} }
} }

View File

@ -39,7 +39,8 @@ use std::collections::hash_map::DefaultHasher;
use std::hash::*; use std::hash::*;
use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID}; use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID};
use syntax::parse::token::{self, keywords}; use syntax::parse::token;
use syntax::symbol::keywords;
use syntax::visit::{self, Visitor}; use syntax::visit::{self, Visitor};
use syntax::print::pprust::{path_to_string, ty_to_string, bounds_to_string, generics_to_string}; use syntax::print::pprust::{path_to_string, ty_to_string, bounds_to_string, generics_to_string};
use syntax::ptr::P; use syntax::ptr::P;

View File

@ -54,7 +54,8 @@ use std::path::{Path, PathBuf};
use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID}; use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID};
use syntax::parse::lexer::comments::strip_doc_comment_decoration; use syntax::parse::lexer::comments::strip_doc_comment_decoration;
use syntax::parse::token::{self, keywords}; use syntax::parse::token;
use syntax::symbol::{Symbol, keywords};
use syntax::visit::{self, Visitor}; use syntax::visit::{self, Visitor};
use syntax::print::pprust::{ty_to_string, arg_to_string}; use syntax::print::pprust::{ty_to_string, arg_to_string};
use syntax::codemap::MacroAttribute; use syntax::codemap::MacroAttribute;
@ -728,7 +729,7 @@ impl Visitor for PathCollector {
} }
fn docs_for_attrs(attrs: &[Attribute]) -> String { fn docs_for_attrs(attrs: &[Attribute]) -> String {
let doc = token::intern("doc"); let doc = Symbol::intern("doc");
let mut result = String::new(); let mut result = String::new();
for attr in attrs { for attr in attrs {

View File

@ -18,7 +18,8 @@ use std::path::Path;
use syntax::ast; use syntax::ast;
use syntax::parse::lexer::{self, Reader, StringReader}; use syntax::parse::lexer::{self, Reader, StringReader};
use syntax::parse::token::{self, keywords, Token}; use syntax::parse::token::{self, Token};
use syntax::symbol::keywords;
use syntax_pos::*; use syntax_pos::*;
#[derive(Clone)] #[derive(Clone)]

View File

@ -29,7 +29,7 @@
use rustc::ty::TyCtxt; use rustc::ty::TyCtxt;
use syntax::ast; use syntax::ast;
use syntax::parse::token; use syntax::symbol::Symbol;
use {ModuleSource, ModuleTranslation}; use {ModuleSource, ModuleTranslation};
@ -117,7 +117,7 @@ impl<'a, 'tcx> AssertModuleSource<'a, 'tcx> {
for item in attr.meta_item_list().unwrap_or(&[]) { for item in attr.meta_item_list().unwrap_or(&[]) {
if item.check_name(name) { if item.check_name(name) {
if let Some(value) = item.value_str() { if let Some(value) = item.value_str() {
return token::intern(&value); return Symbol::intern(&value);
} else { } else {
self.tcx.sess.span_fatal( self.tcx.sess.span_fatal(
item.span, item.span,

View File

@ -113,7 +113,7 @@ use rustc::hir::map::definitions::{DefPath, DefPathData};
use rustc::util::common::record_time; use rustc::util::common::record_time;
use syntax::attr; use syntax::attr;
use syntax::parse::token::{self, InternedString}; use syntax::symbol::{Symbol, InternedString, intern_and_get_ident};
fn get_symbol_hash<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, fn get_symbol_hash<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
@ -275,7 +275,7 @@ impl ItemPathBuffer for SymbolPathBuffer {
} }
fn push(&mut self, text: &str) { fn push(&mut self, text: &str) {
self.names.push(token::intern(text).as_str()); self.names.push(Symbol::intern(text).as_str());
} }
} }
@ -288,7 +288,7 @@ pub fn exported_name_from_type_and_prefix<'a, 'tcx>(scx: &SharedCrateContext<'a,
krate: LOCAL_CRATE, krate: LOCAL_CRATE,
}; };
let hash = get_symbol_hash(scx, &empty_def_path, t, None); let hash = get_symbol_hash(scx, &empty_def_path, t, None);
let path = [token::intern_and_get_ident(prefix)]; let path = [intern_and_get_ident(prefix)];
mangle(path.iter().cloned(), &hash) mangle(path.iter().cloned(), &hash)
} }

View File

@ -52,8 +52,7 @@ use std::ffi::CString;
use std::cell::{Cell, RefCell, Ref}; use std::cell::{Cell, RefCell, Ref};
use syntax::ast; use syntax::ast;
use syntax::parse::token::InternedString; use syntax::symbol::{Symbol, InternedString};
use syntax::parse::token;
use syntax_pos::{DUMMY_SP, Span}; use syntax_pos::{DUMMY_SP, Span};
pub use context::{CrateContext, SharedCrateContext}; pub use context::{CrateContext, SharedCrateContext};
@ -225,7 +224,7 @@ impl<'a, 'tcx> VariantInfo<'tcx> {
VariantInfo { VariantInfo {
discr: Disr(0), discr: Disr(0),
fields: v.iter().enumerate().map(|(i, &t)| { fields: v.iter().enumerate().map(|(i, &t)| {
Field(token::intern(&i.to_string()), t) Field(Symbol::intern(&i.to_string()), t)
}).collect() }).collect()
} }
} }

View File

@ -42,7 +42,7 @@ use std::ptr;
use std::rc::Rc; use std::rc::Rc;
use std::str; use std::str;
use syntax::ast; use syntax::ast;
use syntax::parse::token::InternedString; use syntax::symbol::InternedString;
use abi::FnType; use abi::FnType;
pub struct Stats { pub struct Stats {

View File

@ -45,9 +45,8 @@ use std::fmt::Write;
use std::path::Path; use std::path::Path;
use std::ptr; use std::ptr;
use std::rc::Rc; use std::rc::Rc;
use syntax::util::interner::Interner;
use syntax::ast; use syntax::ast;
use syntax::parse::token; use syntax::symbol::{Interner, InternedString};
use syntax_pos::{self, Span}; use syntax_pos::{self, Span};
@ -1566,7 +1565,7 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
fn get_enum_discriminant_name(cx: &CrateContext, fn get_enum_discriminant_name(cx: &CrateContext,
def_id: DefId) def_id: DefId)
-> token::InternedString { -> InternedString {
cx.tcx().item_name(def_id).as_str() cx.tcx().item_name(def_id).as_str()
} }
} }

View File

@ -30,7 +30,7 @@ use rustc::ty::{self, Ty};
use Disr; use Disr;
use rustc::hir; use rustc::hir;
use syntax::ast; use syntax::ast;
use syntax::parse::token; use syntax::symbol::intern_and_get_ident;
use rustc::session::Session; use rustc::session::Session;
use syntax_pos::{Span, DUMMY_SP}; use syntax_pos::{Span, DUMMY_SP};
@ -208,7 +208,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
} }
(_, "type_name") => { (_, "type_name") => {
let tp_ty = substs.type_at(0); let tp_ty = substs.type_at(0);
let ty_name = token::intern_and_get_ident(&tp_ty.to_string()); let ty_name = intern_and_get_ident(&tp_ty.to_string());
C_str_slice(ccx, ty_name) C_str_slice(ccx, ty_name)
} }
(_, "type_id") => { (_, "type_id") => {

View File

@ -30,7 +30,7 @@ use glue;
use type_::Type; use type_::Type;
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use syntax::parse::token; use syntax::symbol::intern_and_get_ident;
use super::{MirContext, LocalRef}; use super::{MirContext, LocalRef};
use super::analyze::CleanupKind; use super::analyze::CleanupKind;
@ -321,7 +321,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
// Get the location information. // Get the location information.
let loc = bcx.sess().codemap().lookup_char_pos(span.lo); let loc = bcx.sess().codemap().lookup_char_pos(span.lo);
let filename = token::intern_and_get_ident(&loc.file.name); let filename = intern_and_get_ident(&loc.file.name);
let filename = C_str_slice(bcx.ccx(), filename); let filename = C_str_slice(bcx.ccx(), filename);
let line = C_u32(bcx.ccx(), loc.line as u32); let line = C_u32(bcx.ccx(), loc.line as u32);
@ -351,7 +351,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
const_err) const_err)
} }
mir::AssertMessage::Math(ref err) => { mir::AssertMessage::Math(ref err) => {
let msg_str = token::intern_and_get_ident(err.description()); let msg_str = intern_and_get_ident(err.description());
let msg_str = C_str_slice(bcx.ccx(), msg_str); let msg_str = C_str_slice(bcx.ccx(), msg_str);
let msg_file_line = C_struct(bcx.ccx(), let msg_file_line = C_struct(bcx.ccx(),
&[msg_str, filename, line], &[msg_str, filename, line],

View File

@ -21,7 +21,7 @@ use machine;
use type_of; use type_of;
use syntax_pos::{DUMMY_SP, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos}; use syntax_pos::{DUMMY_SP, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos};
use syntax::parse::token::keywords; use syntax::symbol::keywords;
use std::cell::Ref; use std::cell::Ref;
use std::iter; use std::iter;

View File

@ -132,7 +132,7 @@ use std::sync::Arc;
use std::collections::hash_map::DefaultHasher; use std::collections::hash_map::DefaultHasher;
use symbol_map::SymbolMap; use symbol_map::SymbolMap;
use syntax::ast::NodeId; use syntax::ast::NodeId;
use syntax::parse::token::{self, InternedString}; use syntax::symbol::{InternedString, intern_and_get_ident};
use trans_item::TransItem; use trans_item::TransItem;
use util::nodemap::{FxHashMap, FxHashSet}; use util::nodemap::{FxHashMap, FxHashSet};
@ -542,11 +542,11 @@ fn compute_codegen_unit_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
mod_path.push_str(".volatile"); mod_path.push_str(".volatile");
} }
return token::intern_and_get_ident(&mod_path[..]); return intern_and_get_ident(&mod_path[..]);
} }
fn numbered_codegen_unit_name(crate_name: &str, index: usize) -> InternedString { fn numbered_codegen_unit_name(crate_name: &str, index: usize) -> InternedString {
token::intern_and_get_ident(&format!("{}{}{}", intern_and_get_ident(&format!("{}{}{}",
crate_name, crate_name,
NUMBERED_CODEGEN_UNIT_MARKER, NUMBERED_CODEGEN_UNIT_MARKER,
index)[..]) index)[..])

View File

@ -71,7 +71,7 @@ use util::nodemap::{NodeMap, FxHashSet};
use std::cell::RefCell; use std::cell::RefCell;
use syntax::{abi, ast}; use syntax::{abi, ast};
use syntax::feature_gate::{GateIssue, emit_feature_err}; use syntax::feature_gate::{GateIssue, emit_feature_err};
use syntax::parse::token::{self, keywords}; use syntax::symbol::{Symbol, keywords};
use syntax_pos::{Span, Pos}; use syntax_pos::{Span, Pos};
use errors::DiagnosticBuilder; use errors::DiagnosticBuilder;
@ -645,7 +645,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o {
}; };
let output_binding = ConvertedBinding { let output_binding = ConvertedBinding {
item_name: token::intern(FN_OUTPUT_NAME), item_name: Symbol::intern(FN_OUTPUT_NAME),
ty: output, ty: output,
span: output_span span: output_span
}; };

View File

@ -20,7 +20,7 @@ use rustc::ty::{LvaluePreference, NoPreference, PreferMutLvalue};
use rustc::hir; use rustc::hir;
use syntax_pos::Span; use syntax_pos::Span;
use syntax::parse::token; use syntax::symbol::Symbol;
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
enum AutoderefKind { enum AutoderefKind {
@ -120,7 +120,7 @@ impl<'a, 'gcx, 'tcx> Autoderef<'a, 'gcx, 'tcx> {
let normalized = traits::normalize_projection_type(&mut selcx, let normalized = traits::normalize_projection_type(&mut selcx,
ty::ProjectionTy { ty::ProjectionTy {
trait_ref: trait_ref, trait_ref: trait_ref,
item_name: token::intern("Target"), item_name: Symbol::intern("Target"),
}, },
cause, cause,
0); 0);
@ -198,7 +198,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
(PreferMutLvalue, Some(trait_did)) => { (PreferMutLvalue, Some(trait_did)) => {
self.lookup_method_in_trait(span, self.lookup_method_in_trait(span,
base_expr, base_expr,
token::intern("deref_mut"), Symbol::intern("deref_mut"),
trait_did, trait_did,
base_ty, base_ty,
None) None)
@ -211,7 +211,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
(None, Some(trait_did)) => { (None, Some(trait_did)) => {
self.lookup_method_in_trait(span, self.lookup_method_in_trait(span,
base_expr, base_expr,
token::intern("deref"), Symbol::intern("deref"),
trait_did, trait_did,
base_ty, base_ty,
None) None)

View File

@ -16,7 +16,7 @@ use hir::def_id::{DefId, LOCAL_CRATE};
use hir::print; use hir::print;
use rustc::{infer, traits}; use rustc::{infer, traits};
use rustc::ty::{self, LvaluePreference, Ty}; use rustc::ty::{self, LvaluePreference, Ty};
use syntax::parse::token; use syntax::symbol::Symbol;
use syntax::ptr::P; use syntax::ptr::P;
use syntax_pos::Span; use syntax_pos::Span;
@ -160,9 +160,9 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
-> Option<ty::MethodCallee<'tcx>> { -> Option<ty::MethodCallee<'tcx>> {
// Try the options that are least restrictive on the caller first. // Try the options that are least restrictive on the caller first.
for &(opt_trait_def_id, method_name) in for &(opt_trait_def_id, method_name) in
&[(self.tcx.lang_items.fn_trait(), token::intern("call")), &[(self.tcx.lang_items.fn_trait(), Symbol::intern("call")),
(self.tcx.lang_items.fn_mut_trait(), token::intern("call_mut")), (self.tcx.lang_items.fn_mut_trait(), Symbol::intern("call_mut")),
(self.tcx.lang_items.fn_once_trait(), token::intern("call_once"))] { (self.tcx.lang_items.fn_once_trait(), Symbol::intern("call_once"))] {
let trait_def_id = match opt_trait_def_id { let trait_def_id = match opt_trait_def_id {
Some(def_id) => def_id, Some(def_id) => def_id,
None => continue, None => continue,

View File

@ -21,7 +21,7 @@ use {CrateCtxt, require_same_types};
use syntax::abi::Abi; use syntax::abi::Abi;
use syntax::ast; use syntax::ast;
use syntax::parse::token; use syntax::symbol::Symbol;
use syntax_pos::Span; use syntax_pos::Span;
use rustc::hir; use rustc::hir;
@ -75,7 +75,7 @@ fn equate_intrinsic_type<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
/// and in libcore/intrinsics.rs /// and in libcore/intrinsics.rs
pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) { pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) {
fn param<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, n: u32) -> Ty<'tcx> { fn param<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, n: u32) -> Ty<'tcx> {
let name = token::intern(&format!("P{}", n)); let name = Symbol::intern(&format!("P{}", n));
ccx.tcx.mk_param(n, name) ccx.tcx.mk_param(n, name)
} }
@ -326,7 +326,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) {
pub fn check_platform_intrinsic_type(ccx: &CrateCtxt, pub fn check_platform_intrinsic_type(ccx: &CrateCtxt,
it: &hir::ForeignItem) { it: &hir::ForeignItem) {
let param = |n| { let param = |n| {
let name = token::intern(&format!("P{}", n)); let name = Symbol::intern(&format!("P{}", n));
ccx.tcx.mk_param(n, name) ccx.tcx.mk_param(n, name)
}; };

View File

@ -115,8 +115,8 @@ use syntax::ast;
use syntax::attr; use syntax::attr;
use syntax::codemap::{self, original_sp, Spanned}; use syntax::codemap::{self, original_sp, Spanned};
use syntax::feature_gate::{GateIssue, emit_feature_err}; use syntax::feature_gate::{GateIssue, emit_feature_err};
use syntax::parse::token::{self, InternedString, keywords};
use syntax::ptr::P; use syntax::ptr::P;
use syntax::symbol::{Symbol, InternedString, keywords};
use syntax::util::lev_distance::find_best_match_for_name; use syntax::util::lev_distance::find_best_match_for_name;
use syntax_pos::{self, BytePos, Span}; use syntax_pos::{self, BytePos, Span};
@ -2369,7 +2369,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
(PreferMutLvalue, Some(trait_did)) => { (PreferMutLvalue, Some(trait_did)) => {
self.lookup_method_in_trait_adjusted(expr.span, self.lookup_method_in_trait_adjusted(expr.span,
Some(&base_expr), Some(&base_expr),
token::intern("index_mut"), Symbol::intern("index_mut"),
trait_did, trait_did,
autoderefs, autoderefs,
unsize, unsize,
@ -2384,7 +2384,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
(None, Some(trait_did)) => { (None, Some(trait_did)) => {
self.lookup_method_in_trait_adjusted(expr.span, self.lookup_method_in_trait_adjusted(expr.span,
Some(&base_expr), Some(&base_expr),
token::intern("index"), Symbol::intern("index"),
trait_did, trait_did,
autoderefs, autoderefs,
unsize, unsize,

View File

@ -14,7 +14,7 @@ use super::FnCtxt;
use hir::def_id::DefId; use hir::def_id::DefId;
use rustc::ty::{Ty, TypeFoldable, PreferMutLvalue}; use rustc::ty::{Ty, TypeFoldable, PreferMutLvalue};
use syntax::ast; use syntax::ast;
use syntax::parse::token; use syntax::symbol::Symbol;
use rustc::hir; use rustc::hir;
impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
@ -182,7 +182,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
let rhs_ty_var = self.next_ty_var(); let rhs_ty_var = self.next_ty_var();
let return_ty = match self.lookup_op_method(expr, lhs_ty, vec![rhs_ty_var], let return_ty = match self.lookup_op_method(expr, lhs_ty, vec![rhs_ty_var],
token::intern(name), trait_def_id, Symbol::intern(name), trait_def_id,
lhs_expr) { lhs_expr) {
Ok(return_ty) => return_ty, Ok(return_ty) => return_ty,
Err(()) => { Err(()) => {
@ -248,9 +248,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
-> Ty<'tcx> -> Ty<'tcx>
{ {
assert!(op.is_by_value()); assert!(op.is_by_value());
match self.lookup_op_method(ex, operand_ty, vec![], let mname = Symbol::intern(mname);
token::intern(mname), trait_did, match self.lookup_op_method(ex, operand_ty, vec![], mname, trait_did, operand_expr) {
operand_expr) {
Ok(t) => t, Ok(t) => t,
Err(()) => { Err(()) => {
self.type_error_message(ex.span, |actual| { self.type_error_message(ex.span, |actual| {

View File

@ -79,7 +79,7 @@ use rustc_const_math::ConstInt;
use std::cell::RefCell; use std::cell::RefCell;
use syntax::{abi, ast, attr}; use syntax::{abi, ast, attr};
use syntax::parse::token::{self, keywords}; use syntax::symbol::{Symbol, keywords};
use syntax_pos::Span; use syntax_pos::Span;
use rustc::hir::{self, map as hir_map, print as pprust}; use rustc::hir::{self, map as hir_map, print as pprust};
@ -585,7 +585,7 @@ fn convert_closure<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
let upvar_decls : Vec<_> = tcx.with_freevars(node_id, |fv| { let upvar_decls : Vec<_> = tcx.with_freevars(node_id, |fv| {
fv.iter().enumerate().map(|(i, _)| ty::TypeParameterDef { fv.iter().enumerate().map(|(i, _)| ty::TypeParameterDef {
index: (base_generics.count() as u32) + (i as u32), index: (base_generics.count() as u32) + (i as u32),
name: token::intern("<upvar>"), name: Symbol::intern("<upvar>"),
def_id: def_id, def_id: def_id,
default_def_id: base_def_id, default_def_id: base_def_id,
default: None, default: None,

View File

@ -14,15 +14,16 @@ pub use self::TyParamBound::*;
pub use self::UnsafeSource::*; pub use self::UnsafeSource::*;
pub use self::ViewPath_::*; pub use self::ViewPath_::*;
pub use self::PathParameters::*; pub use self::PathParameters::*;
pub use symbol::Symbol as Name;
pub use util::ThinVec; pub use util::ThinVec;
use syntax_pos::{mk_sp, Span, DUMMY_SP, ExpnId}; use syntax_pos::{mk_sp, Span, DUMMY_SP, ExpnId};
use codemap::{respan, Spanned}; use codemap::{respan, Spanned};
use abi::Abi; use abi::Abi;
use ext::hygiene::SyntaxContext; use ext::hygiene::SyntaxContext;
use parse::token::{self, keywords, InternedString};
use print::pprust; use print::pprust;
use ptr::P; use ptr::P;
use symbol::{Symbol, keywords, InternedString};
use tokenstream::{TokenTree}; use tokenstream::{TokenTree};
use std::collections::HashSet; use std::collections::HashSet;
@ -32,60 +33,24 @@ use std::u32;
use serialize::{self, Encodable, Decodable, Encoder, Decoder}; use serialize::{self, Encodable, Decodable, Encoder, Decoder};
/// A name is a part of an identifier, representing a string or gensym. It's
/// the result of interning.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Name(pub u32);
/// An identifier contains a Name (index into the interner /// An identifier contains a Name (index into the interner
/// table) and a SyntaxContext to track renaming and /// table) and a SyntaxContext to track renaming and
/// macro expansion per Flatt et al., "Macros That Work Together" /// macro expansion per Flatt et al., "Macros That Work Together"
#[derive(Clone, Copy, PartialEq, Eq, Hash)] #[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Ident { pub struct Ident {
pub name: Name, pub name: Symbol,
pub ctxt: SyntaxContext pub ctxt: SyntaxContext
} }
impl Name {
pub fn as_str(self) -> token::InternedString {
token::InternedString::new_from_name(self)
}
}
impl fmt::Debug for Name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}({})", self, self.0)
}
}
impl fmt::Display for Name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.as_str(), f)
}
}
impl Encodable for Name {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_str(&self.as_str())
}
}
impl Decodable for Name {
fn decode<D: Decoder>(d: &mut D) -> Result<Name, D::Error> {
Ok(token::intern(&d.read_str()?))
}
}
impl<'a> ::std::cmp::PartialEq<&'a str> for Name {
fn eq(&self, other: &&str) -> bool {
*self.as_str() == **other
}
}
impl Ident { impl Ident {
pub const fn with_empty_ctxt(name: Name) -> Ident { pub const fn with_empty_ctxt(name: Name) -> Ident {
Ident { name: name, ctxt: SyntaxContext::empty() } Ident { name: name, ctxt: SyntaxContext::empty() }
} }
/// Maps a string to an identifier with an empty syntax context.
pub fn from_str(s: &str) -> Ident {
Ident::with_empty_ctxt(Symbol::intern(s))
}
} }
impl fmt::Debug for Ident { impl fmt::Debug for Ident {

View File

@ -23,9 +23,9 @@ use syntax_pos::{Span, BytePos, DUMMY_SP};
use errors::Handler; use errors::Handler;
use feature_gate::{Features, GatedCfg}; use feature_gate::{Features, GatedCfg};
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
use parse::token::InternedString; use parse::ParseSess;
use parse::{ParseSess, token};
use ptr::P; use ptr::P;
use symbol::{self, Symbol, InternedString};
use util::ThinVec; use util::ThinVec;
use std::cell::{RefCell, Cell}; use std::cell::{RefCell, Cell};
@ -278,8 +278,8 @@ impl Attribute {
if self.is_sugared_doc { if self.is_sugared_doc {
let comment = self.value_str().unwrap(); let comment = self.value_str().unwrap();
let meta = mk_name_value_item_str( let meta = mk_name_value_item_str(
token::intern("doc"), Symbol::intern("doc"),
token::intern_and_get_ident(&strip_doc_comment_decoration( symbol::intern_and_get_ident(&strip_doc_comment_decoration(
&comment))); &comment)));
if self.style == ast::AttrStyle::Outer { if self.style == ast::AttrStyle::Outer {
f(&mk_attr_outer(self.id, meta)) f(&mk_attr_outer(self.id, meta))
@ -392,7 +392,7 @@ pub fn mk_sugared_doc_attr(id: AttrId, text: InternedString, lo: BytePos, hi: By
style: style, style: style,
value: MetaItem { value: MetaItem {
span: mk_sp(lo, hi), span: mk_sp(lo, hi),
name: token::intern("doc"), name: Symbol::intern("doc"),
node: MetaItemKind::NameValue(lit), node: MetaItemKind::NameValue(lit),
}, },
is_sugared_doc: true, is_sugared_doc: true,

View File

@ -19,6 +19,7 @@ use ext::base::{ExtCtxt, MacEager, MacResult};
use ext::build::AstBuilder; use ext::build::AstBuilder;
use parse::token; use parse::token;
use ptr::P; use ptr::P;
use symbol::Symbol;
use tokenstream::{TokenTree}; use tokenstream::{TokenTree};
use util::small_vector::SmallVector; use util::small_vector::SmallVector;
@ -141,7 +142,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
)); ));
} }
}); });
let sym = Ident::with_empty_ctxt(token::gensym(&format!( let sym = Ident::with_empty_ctxt(Symbol::gensym(&format!(
"__register_diagnostic_{}", code "__register_diagnostic_{}", code
))); )));
MacEager::items(SmallVector::many(vec![ MacEager::items(SmallVector::many(vec![

View File

@ -20,8 +20,8 @@ use ext::hygiene::Mark;
use fold::{self, Folder}; use fold::{self, Folder};
use parse::{self, parser}; use parse::{self, parser};
use parse::token; use parse::token;
use parse::token::{InternedString, str_to_ident};
use ptr::P; use ptr::P;
use symbol::{Symbol, InternedString};
use util::small_vector::SmallVector; use util::small_vector::SmallVector;
use std::path::PathBuf; use std::path::PathBuf;
@ -735,7 +735,7 @@ impl<'a> ExtCtxt<'a> {
self.ecfg.trace_mac = x self.ecfg.trace_mac = x
} }
pub fn ident_of(&self, st: &str) -> ast::Ident { pub fn ident_of(&self, st: &str) -> ast::Ident {
str_to_ident(st) ast::Ident::from_str(st)
} }
pub fn std_path(&self, components: &[&str]) -> Vec<ast::Ident> { pub fn std_path(&self, components: &[&str]) -> Vec<ast::Ident> {
let mut v = Vec::new(); let mut v = Vec::new();
@ -746,7 +746,7 @@ impl<'a> ExtCtxt<'a> {
return v return v
} }
pub fn name_of(&self, st: &str) -> ast::Name { pub fn name_of(&self, st: &str) -> ast::Name {
token::intern(st) Symbol::intern(st)
} }
} }

View File

@ -14,8 +14,8 @@ use attr;
use syntax_pos::{Span, DUMMY_SP, Pos}; use syntax_pos::{Span, DUMMY_SP, Pos};
use codemap::{dummy_spanned, respan, Spanned}; use codemap::{dummy_spanned, respan, Spanned};
use ext::base::ExtCtxt; use ext::base::ExtCtxt;
use parse::token::{self, keywords, InternedString};
use ptr::P; use ptr::P;
use symbol::{intern_and_get_ident, keywords, InternedString};
// Transitional reexports so qquote can find the paths it is looking for // Transitional reexports so qquote can find the paths it is looking for
mod syntax { mod syntax {
@ -787,8 +787,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
fn expr_fail(&self, span: Span, msg: InternedString) -> P<ast::Expr> { fn expr_fail(&self, span: Span, msg: InternedString) -> P<ast::Expr> {
let loc = self.codemap().lookup_char_pos(span.lo); let loc = self.codemap().lookup_char_pos(span.lo);
let expr_file = self.expr_str(span, let expr_file = self.expr_str(span, intern_and_get_ident(&loc.file.name));
token::intern_and_get_ident(&loc.file.name));
let expr_line = self.expr_u32(span, loc.line as u32); let expr_line = self.expr_u32(span, loc.line as u32);
let expr_file_line_tuple = self.expr_tuple(span, vec![expr_file, expr_line]); let expr_file_line_tuple = self.expr_tuple(span, vec![expr_file, expr_line]);
let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple); let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple);

View File

@ -23,10 +23,11 @@ use fold;
use fold::*; use fold::*;
use parse::{ParseSess, PResult, lexer}; use parse::{ParseSess, PResult, lexer};
use parse::parser::Parser; use parse::parser::Parser;
use parse::token::{self, keywords}; use parse::token;
use print::pprust; use print::pprust;
use ptr::P; use ptr::P;
use std_inject; use std_inject;
use symbol::keywords;
use tokenstream::{TokenTree, TokenStream}; use tokenstream::{TokenTree, TokenStream};
use util::small_vector::SmallVector; use util::small_vector::SmallVector;
use visit::Visitor; use visit::Visitor;
@ -190,7 +191,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
pub fn expand_crate(&mut self, mut krate: ast::Crate) -> ast::Crate { pub fn expand_crate(&mut self, mut krate: ast::Crate) -> ast::Crate {
self.cx.crate_root = std_inject::injected_crate_name(&krate); self.cx.crate_root = std_inject::injected_crate_name(&krate);
let mut module = ModuleData { let mut module = ModuleData {
mod_path: vec![token::str_to_ident(&self.cx.ecfg.crate_name)], mod_path: vec![Ident::from_str(&self.cx.ecfg.crate_name)],
directory: PathBuf::from(self.cx.codemap().span_to_filename(krate.span)), directory: PathBuf::from(self.cx.codemap().span_to_filename(krate.span)),
}; };
module.directory.pop(); module.directory.pop();
@ -246,7 +247,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
self.cx.resolver.resolve_macro(scope, &mac.node.path, force) self.cx.resolver.resolve_macro(scope, &mac.node.path, force)
} }
InvocationKind::Attr { ref attr, .. } => { InvocationKind::Attr { ref attr, .. } => {
let ident = ast::Ident::with_empty_ctxt(attr.name()); let ident = Ident::with_empty_ctxt(attr.name());
let path = ast::Path::from_ident(attr.span, ident); let path = ast::Path::from_ident(attr.span, ident);
self.cx.resolver.resolve_macro(scope, &path, force) self.cx.resolver.resolve_macro(scope, &path, force)
} }

View File

@ -13,8 +13,8 @@ use codemap::{DUMMY_SP, dummy_spanned};
use ext::base::ExtCtxt; use ext::base::ExtCtxt;
use ext::expand::{Expansion, ExpansionKind}; use ext::expand::{Expansion, ExpansionKind};
use fold::*; use fold::*;
use parse::token::{intern, keywords};
use ptr::P; use ptr::P;
use symbol::{Symbol, keywords};
use util::move_map::MoveMap; use util::move_map::MoveMap;
use util::small_vector::SmallVector; use util::small_vector::SmallVector;
@ -227,7 +227,7 @@ pub fn reconstructed_macro_rules(def: &ast::MacroDef) -> Expansion {
span: DUMMY_SP, span: DUMMY_SP,
global: false, global: false,
segments: vec![ast::PathSegment { segments: vec![ast::PathSegment {
identifier: ast::Ident::with_empty_ctxt(intern("macro_rules")), identifier: ast::Ident::with_empty_ctxt(Symbol::intern("macro_rules")),
parameters: ast::PathParameters::none(), parameters: ast::PathParameters::none(),
}], }],
}, },

View File

@ -66,6 +66,7 @@ pub mod prelude {
pub use ast::Ident; pub use ast::Ident;
pub use codemap::{DUMMY_SP, Span}; pub use codemap::{DUMMY_SP, Span};
pub use ext::base::{ExtCtxt, MacResult}; pub use ext::base::{ExtCtxt, MacResult};
pub use parse::token::{self, Token, DelimToken, keywords, str_to_ident}; pub use parse::token::{self, Token, DelimToken};
pub use symbol::keywords;
pub use tokenstream::{TokenTree, TokenStream}; pub use tokenstream::{TokenTree, TokenStream};
} }

View File

@ -33,6 +33,7 @@ pub mod rt {
use parse::{self, token, classify}; use parse::{self, token, classify};
use ptr::P; use ptr::P;
use std::rc::Rc; use std::rc::Rc;
use symbol;
use tokenstream::{self, TokenTree}; use tokenstream::{self, TokenTree};
@ -239,7 +240,7 @@ pub mod rt {
impl ToTokens for str { impl ToTokens for str {
fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
let lit = ast::LitKind::Str( let lit = ast::LitKind::Str(
token::intern_and_get_ident(self), ast::StrStyle::Cooked); symbol::intern_and_get_ident(self), ast::StrStyle::Cooked);
dummy_spanned(lit).to_tokens(cx) dummy_spanned(lit).to_tokens(cx)
} }
} }
@ -527,12 +528,12 @@ pub fn expand_quote_matcher(cx: &mut ExtCtxt,
base::MacEager::expr(expanded) base::MacEager::expr(expanded)
} }
fn ids_ext(strs: Vec<String> ) -> Vec<ast::Ident> { fn ids_ext(strs: Vec<String>) -> Vec<ast::Ident> {
strs.iter().map(|str| str_to_ident(&(*str))).collect() strs.iter().map(|s| ast::Ident::from_str(s)).collect()
} }
fn id_ext(str: &str) -> ast::Ident { fn id_ext(s: &str) -> ast::Ident {
str_to_ident(str) ast::Ident::from_str(s)
} }
// Lift an ident to the expr that evaluates to that ident. // Lift an ident to the expr that evaluates to that ident.

View File

@ -17,6 +17,7 @@ use parse::token;
use parse; use parse;
use print::pprust; use print::pprust;
use ptr::P; use ptr::P;
use symbol;
use tokenstream; use tokenstream;
use util::small_vector::SmallVector; use util::small_vector::SmallVector;
@ -60,15 +61,14 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
let topmost = cx.expansion_cause(); let topmost = cx.expansion_cause();
let loc = cx.codemap().lookup_char_pos(topmost.lo); let loc = cx.codemap().lookup_char_pos(topmost.lo);
let filename = token::intern_and_get_ident(&loc.file.name); let filename = symbol::intern_and_get_ident(&loc.file.name);
base::MacEager::expr(cx.expr_str(topmost, filename)) base::MacEager::expr(cx.expr_str(topmost, filename))
} }
pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
-> Box<base::MacResult+'static> { -> Box<base::MacResult+'static> {
let s = pprust::tts_to_string(tts); let s = pprust::tts_to_string(tts);
base::MacEager::expr(cx.expr_str(sp, base::MacEager::expr(cx.expr_str(sp, symbol::intern_and_get_ident(&s)))
token::intern_and_get_ident(&s[..])))
} }
pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
@ -77,9 +77,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
let mod_path = &cx.current_expansion.module.mod_path; let mod_path = &cx.current_expansion.module.mod_path;
let string = mod_path.iter().map(|x| x.to_string()).collect::<Vec<String>>().join("::"); let string = mod_path.iter().map(|x| x.to_string()).collect::<Vec<String>>().join("::");
base::MacEager::expr(cx.expr_str( base::MacEager::expr(cx.expr_str(sp, symbol::intern_and_get_ident(&string)))
sp,
token::intern_and_get_ident(&string[..])))
} }
/// include! : parse the given file as an expr /// include! : parse the given file as an expr
@ -144,7 +142,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT
// Add this input file to the code map to make it available as // Add this input file to the code map to make it available as
// dependency information // dependency information
let filename = format!("{}", file.display()); let filename = format!("{}", file.display());
let interned = token::intern_and_get_ident(&src[..]); let interned = symbol::intern_and_get_ident(&src);
cx.codemap().new_filemap_and_lines(&filename, None, &src); cx.codemap().new_filemap_and_lines(&filename, None, &src);
base::MacEager::expr(cx.expr_str(sp, interned)) base::MacEager::expr(cx.expr_str(sp, interned))

View File

@ -20,9 +20,10 @@ use ext::tt::macro_parser::{parse, parse_failure_msg};
use parse::ParseSess; use parse::ParseSess;
use parse::lexer::new_tt_reader; use parse::lexer::new_tt_reader;
use parse::parser::{Parser, Restrictions}; use parse::parser::{Parser, Restrictions};
use parse::token::{self, gensym_ident, NtTT, Token}; use parse::token::{self, NtTT, Token};
use parse::token::Token::*; use parse::token::Token::*;
use print; use print;
use symbol::Symbol;
use tokenstream::{self, TokenTree}; use tokenstream::{self, TokenTree};
use std::collections::{HashMap}; use std::collections::{HashMap};
@ -187,16 +188,16 @@ impl IdentMacroExpander for MacroRulesExpander {
/// Converts a `macro_rules!` invocation into a syntax extension. /// Converts a `macro_rules!` invocation into a syntax extension.
pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension { pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
let lhs_nm = gensym_ident("lhs"); let lhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("lhs"));
let rhs_nm = gensym_ident("rhs"); let rhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("rhs"));
// The pattern that macro_rules matches. // The pattern that macro_rules matches.
// The grammar for macro_rules! is: // The grammar for macro_rules! is:
// $( $lhs:tt => $rhs:tt );+ // $( $lhs:tt => $rhs:tt );+
// ...quasiquoting this would be nice. // ...quasiquoting this would be nice.
// These spans won't matter, anyways // These spans won't matter, anyways
let match_lhs_tok = MatchNt(lhs_nm, token::str_to_ident("tt")); let match_lhs_tok = MatchNt(lhs_nm, ast::Ident::from_str("tt"));
let match_rhs_tok = MatchNt(rhs_nm, token::str_to_ident("tt")); let match_rhs_tok = MatchNt(rhs_nm, ast::Ident::from_str("tt"));
let argument_gram = vec![ let argument_gram = vec![
TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
tts: vec![ tts: vec![

View File

@ -33,7 +33,7 @@ use syntax_pos::Span;
use errors::{DiagnosticBuilder, Handler}; use errors::{DiagnosticBuilder, Handler};
use visit::{self, FnKind, Visitor}; use visit::{self, FnKind, Visitor};
use parse::ParseSess; use parse::ParseSess;
use parse::token::InternedString; use symbol::InternedString;
use std::ascii::AsciiExt; use std::ascii::AsciiExt;
use std::env; use std::env;

View File

@ -22,8 +22,9 @@ use ast::*;
use ast; use ast;
use syntax_pos::Span; use syntax_pos::Span;
use codemap::{Spanned, respan}; use codemap::{Spanned, respan};
use parse::token::{self, keywords}; use parse::token;
use ptr::P; use ptr::P;
use symbol::keywords;
use tokenstream::*; use tokenstream::*;
use util::small_vector::SmallVector; use util::small_vector::SmallVector;
use util::move_map::MoveMap; use util::move_map::MoveMap;

View File

@ -83,7 +83,6 @@ pub mod diagnostics {
pub mod diagnostic_list; pub mod diagnostic_list;
pub mod util { pub mod util {
pub mod interner;
pub mod lev_distance; pub mod lev_distance;
pub mod node_count; pub mod node_count;
pub mod parser; pub mod parser;
@ -118,6 +117,7 @@ pub mod ptr;
pub mod show_span; pub mod show_span;
pub mod std_inject; pub mod std_inject;
pub mod str; pub mod str;
pub mod symbol;
pub mod test; pub mod test;
pub mod tokenstream; pub mod tokenstream;
pub mod visit; pub mod visit;

View File

@ -8,13 +8,14 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
use ast; use ast::{self, Ident};
use syntax_pos::{self, BytePos, CharPos, Pos, Span}; use syntax_pos::{self, BytePos, CharPos, Pos, Span};
use codemap::CodeMap; use codemap::CodeMap;
use errors::{FatalError, Handler, DiagnosticBuilder}; use errors::{FatalError, Handler, DiagnosticBuilder};
use ext::tt::transcribe::tt_next_token; use ext::tt::transcribe::tt_next_token;
use parse::token::{self, keywords, str_to_ident}; use parse::token;
use str::char_at; use str::char_at;
use symbol::{Symbol, keywords};
use rustc_unicode::property::Pattern_White_Space; use rustc_unicode::property::Pattern_White_Space;
use std::borrow::Cow; use std::borrow::Cow;
@ -350,13 +351,13 @@ impl<'a> StringReader<'a> {
/// single-byte delimiter). /// single-byte delimiter).
pub fn name_from(&self, start: BytePos) -> ast::Name { pub fn name_from(&self, start: BytePos) -> ast::Name {
debug!("taking an ident from {:?} to {:?}", start, self.pos); debug!("taking an ident from {:?} to {:?}", start, self.pos);
self.with_str_from(start, token::intern) self.with_str_from(start, Symbol::intern)
} }
/// As name_from, with an explicit endpoint. /// As name_from, with an explicit endpoint.
pub fn name_from_to(&self, start: BytePos, end: BytePos) -> ast::Name { pub fn name_from_to(&self, start: BytePos, end: BytePos) -> ast::Name {
debug!("taking an ident from {:?} to {:?}", start, end); debug!("taking an ident from {:?} to {:?}", start, end);
self.with_str_from_to(start, end, token::intern) self.with_str_from_to(start, end, Symbol::intern)
} }
/// Calls `f` with a string slice of the source text spanning from `start` /// Calls `f` with a string slice of the source text spanning from `start`
@ -492,7 +493,7 @@ impl<'a> StringReader<'a> {
if string == "_" { if string == "_" {
None None
} else { } else {
Some(token::intern(string)) Some(Symbol::intern(string))
} }
}) })
} }
@ -540,7 +541,7 @@ impl<'a> StringReader<'a> {
self.with_str_from(start_bpos, |string| { self.with_str_from(start_bpos, |string| {
// comments with only more "/"s are not doc comments // comments with only more "/"s are not doc comments
let tok = if is_doc_comment(string) { let tok = if is_doc_comment(string) {
token::DocComment(token::intern(string)) token::DocComment(Symbol::intern(string))
} else { } else {
token::Comment token::Comment
}; };
@ -669,7 +670,7 @@ impl<'a> StringReader<'a> {
} else { } else {
string.into() string.into()
}; };
token::DocComment(token::intern(&string[..])) token::DocComment(Symbol::intern(&string[..]))
} else { } else {
token::Comment token::Comment
}; };
@ -758,7 +759,7 @@ impl<'a> StringReader<'a> {
self.err_span_(start_bpos, self.err_span_(start_bpos,
self.pos, self.pos,
"no valid digits found for number"); "no valid digits found for number");
return token::Integer(token::intern("0")); return token::Integer(Symbol::intern("0"));
} }
// might be a float, but don't be greedy if this is actually an // might be a float, but don't be greedy if this is actually an
@ -1097,7 +1098,7 @@ impl<'a> StringReader<'a> {
token::Underscore token::Underscore
} else { } else {
// FIXME: perform NFKC normalization here. (Issue #2253) // FIXME: perform NFKC normalization here. (Issue #2253)
token::Ident(str_to_ident(string)) token::Ident(Ident::from_str(string))
} }
})); }));
} }
@ -1277,13 +1278,13 @@ impl<'a> StringReader<'a> {
// expansion purposes. See #12512 for the gory details of why // expansion purposes. See #12512 for the gory details of why
// this is necessary. // this is necessary.
let ident = self.with_str_from(start, |lifetime_name| { let ident = self.with_str_from(start, |lifetime_name| {
str_to_ident(&format!("'{}", lifetime_name)) Ident::from_str(&format!("'{}", lifetime_name))
}); });
// Conjure up a "keyword checking ident" to make sure that // Conjure up a "keyword checking ident" to make sure that
// the lifetime name is not a keyword. // the lifetime name is not a keyword.
let keyword_checking_ident = self.with_str_from(start, |lifetime_name| { let keyword_checking_ident = self.with_str_from(start, |lifetime_name| {
str_to_ident(lifetime_name) Ident::from_str(lifetime_name)
}); });
let keyword_checking_token = &token::Ident(keyword_checking_ident); let keyword_checking_token = &token::Ident(keyword_checking_ident);
let last_bpos = self.pos; let last_bpos = self.pos;
@ -1310,7 +1311,7 @@ impl<'a> StringReader<'a> {
let id = if valid { let id = if valid {
self.name_from(start) self.name_from(start)
} else { } else {
token::intern("0") Symbol::intern("0")
}; };
self.bump(); // advance ch past token self.bump(); // advance ch past token
let suffix = self.scan_optional_raw_name(); let suffix = self.scan_optional_raw_name();
@ -1352,7 +1353,7 @@ impl<'a> StringReader<'a> {
let id = if valid { let id = if valid {
self.name_from(start_bpos + BytePos(1)) self.name_from(start_bpos + BytePos(1))
} else { } else {
token::intern("??") Symbol::intern("??")
}; };
self.bump(); self.bump();
let suffix = self.scan_optional_raw_name(); let suffix = self.scan_optional_raw_name();
@ -1424,7 +1425,7 @@ impl<'a> StringReader<'a> {
let id = if valid { let id = if valid {
self.name_from_to(content_start_bpos, content_end_bpos) self.name_from_to(content_start_bpos, content_end_bpos)
} else { } else {
token::intern("??") Symbol::intern("??")
}; };
let suffix = self.scan_optional_raw_name(); let suffix = self.scan_optional_raw_name();
return Ok(token::Literal(token::StrRaw(id, hash_count), suffix)); return Ok(token::Literal(token::StrRaw(id, hash_count), suffix));
@ -1551,7 +1552,7 @@ impl<'a> StringReader<'a> {
let id = if valid { let id = if valid {
self.name_from(start) self.name_from(start)
} else { } else {
token::intern("?") Symbol::intern("?")
}; };
self.bump(); // advance ch past token self.bump(); // advance ch past token
return token::Byte(id); return token::Byte(id);
@ -1584,7 +1585,7 @@ impl<'a> StringReader<'a> {
let id = if valid { let id = if valid {
self.name_from(start) self.name_from(start)
} else { } else {
token::intern("??") Symbol::intern("??")
}; };
self.bump(); self.bump();
return token::ByteStr(id); return token::ByteStr(id);
@ -1700,11 +1701,11 @@ fn ident_continue(c: Option<char>) -> bool {
mod tests { mod tests {
use super::*; use super::*;
use ast::Ident;
use syntax_pos::{BytePos, Span, NO_EXPANSION}; use syntax_pos::{BytePos, Span, NO_EXPANSION};
use codemap::CodeMap; use codemap::CodeMap;
use errors; use errors;
use parse::token; use parse::token;
use parse::token::str_to_ident;
use std::io; use std::io;
use std::rc::Rc; use std::rc::Rc;
@ -1732,7 +1733,7 @@ mod tests {
&sh, &sh,
"/* my source file */ fn main() { println!(\"zebra\"); }\n" "/* my source file */ fn main() { println!(\"zebra\"); }\n"
.to_string()); .to_string());
let id = str_to_ident("fn"); let id = Ident::from_str("fn");
assert_eq!(string_reader.next_token().tok, token::Comment); assert_eq!(string_reader.next_token().tok, token::Comment);
assert_eq!(string_reader.next_token().tok, token::Whitespace); assert_eq!(string_reader.next_token().tok, token::Whitespace);
let tok1 = string_reader.next_token(); let tok1 = string_reader.next_token();
@ -1813,7 +1814,7 @@ mod tests {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone()); let sh = mk_sh(cm.clone());
assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok, assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok,
token::Literal(token::Char(token::intern("a")), None)); token::Literal(token::Char(Symbol::intern("a")), None));
} }
#[test] #[test]
@ -1821,7 +1822,7 @@ mod tests {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone()); let sh = mk_sh(cm.clone());
assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok, assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok,
token::Literal(token::Char(token::intern(" ")), None)); token::Literal(token::Char(Symbol::intern(" ")), None));
} }
#[test] #[test]
@ -1829,7 +1830,7 @@ mod tests {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone()); let sh = mk_sh(cm.clone());
assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok, assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok,
token::Literal(token::Char(token::intern("\\n")), None)); token::Literal(token::Char(Symbol::intern("\\n")), None));
} }
#[test] #[test]
@ -1847,7 +1848,7 @@ mod tests {
assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()) assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
.next_token() .next_token()
.tok, .tok,
token::Literal(token::StrRaw(token::intern("\"#a\\b\x00c\""), 3), None)); token::Literal(token::StrRaw(Symol::intern("\"#a\\b\x00c\""), 3), None));
} }
#[test] #[test]
@ -1857,11 +1858,11 @@ mod tests {
macro_rules! test { macro_rules! test {
($input: expr, $tok_type: ident, $tok_contents: expr) => {{ ($input: expr, $tok_type: ident, $tok_contents: expr) => {{
assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok, assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok,
token::Literal(token::$tok_type(token::intern($tok_contents)), token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
Some(token::intern("suffix")))); Some(Symbol::intern("suffix"))));
// with a whitespace separator: // with a whitespace separator:
assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok, assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok,
token::Literal(token::$tok_type(token::intern($tok_contents)), token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
None)); None));
}} }}
} }
@ -1877,14 +1878,14 @@ mod tests {
test!("1.0e10", Float, "1.0e10"); test!("1.0e10", Float, "1.0e10");
assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok, assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok,
token::Literal(token::Integer(token::intern("2")), token::Literal(token::Integer(Symbol::intern("2")),
Some(token::intern("us")))); Some(Symbol::intern("us"))));
assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok, assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok,
token::Literal(token::StrRaw(token::intern("raw"), 3), token::Literal(token::StrRaw(Symbol::intern("raw"), 3),
Some(token::intern("suffix")))); Some(Symbol::intern("suffix"))));
assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok, assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok,
token::Literal(token::ByteStrRaw(token::intern("raw"), 3), token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3),
Some(token::intern("suffix")))); Some(Symbol::intern("suffix"))));
} }
#[test] #[test]
@ -1904,7 +1905,7 @@ mod tests {
_ => panic!("expected a comment!"), _ => panic!("expected a comment!"),
} }
assert_eq!(lexer.next_token().tok, assert_eq!(lexer.next_token().tok,
token::Literal(token::Char(token::intern("a")), None)); token::Literal(token::Char(Symbol::intern("a")), None));
} }
#[test] #[test]
@ -1917,6 +1918,6 @@ mod tests {
assert_eq!(comment.sp, ::syntax_pos::mk_sp(BytePos(0), BytePos(7))); assert_eq!(comment.sp, ::syntax_pos::mk_sp(BytePos(0), BytePos(7)));
assert_eq!(lexer.next_token().tok, token::Whitespace); assert_eq!(lexer.next_token().tok, token::Whitespace);
assert_eq!(lexer.next_token().tok, assert_eq!(lexer.next_token().tok,
token::DocComment(token::intern("/// test"))); token::DocComment(Symbol::intern("/// test")));
} }
} }

View File

@ -16,9 +16,9 @@ use syntax_pos::{self, Span, FileMap};
use errors::{Handler, ColorConfig, DiagnosticBuilder}; use errors::{Handler, ColorConfig, DiagnosticBuilder};
use feature_gate::UnstableFeatures; use feature_gate::UnstableFeatures;
use parse::parser::Parser; use parse::parser::Parser;
use parse::token::InternedString;
use ptr::P; use ptr::P;
use str::char_at; use str::char_at;
use symbol::{self, InternedString};
use tokenstream; use tokenstream;
use std::cell::RefCell; use std::cell::RefCell;
@ -372,7 +372,7 @@ fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
s[1..].chars().all(|c| '0' <= c && c <= '9') s[1..].chars().all(|c| '0' <= c && c <= '9')
} }
fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>, fn filtered_float_lit(data: InternedString, suffix: Option<&str>,
sd: &Handler, sp: Span) -> ast::LitKind { sd: &Handler, sp: Span) -> ast::LitKind {
debug!("filtered_float_lit: {}, {:?}", data, suffix); debug!("filtered_float_lit: {}, {:?}", data, suffix);
match suffix.as_ref().map(|s| &**s) { match suffix.as_ref().map(|s| &**s) {
@ -400,7 +400,7 @@ pub fn float_lit(s: &str, suffix: Option<InternedString>,
debug!("float_lit: {:?}, {:?}", s, suffix); debug!("float_lit: {:?}, {:?}", s, suffix);
// FIXME #2252: bounds checking float literals is deferred until trans // FIXME #2252: bounds checking float literals is deferred until trans
let s = s.chars().filter(|&c| c != '_').collect::<String>(); let s = s.chars().filter(|&c| c != '_').collect::<String>();
let data = token::intern_and_get_ident(&s); let data = symbol::intern_and_get_ident(&s);
filtered_float_lit(data, suffix.as_ref().map(|s| &**s), sd, sp) filtered_float_lit(data, suffix.as_ref().map(|s| &**s), sd, sp)
} }
@ -530,7 +530,7 @@ pub fn integer_lit(s: &str,
2 => sd.span_err(sp, "binary float literal is not supported"), 2 => sd.span_err(sp, "binary float literal is not supported"),
_ => () _ => ()
} }
let ident = token::intern_and_get_ident(&s); let ident = symbol::intern_and_get_ident(&s);
return filtered_float_lit(ident, Some(&suf), sd, sp) return filtered_float_lit(ident, Some(&suf), sd, sp)
} }
} }

View File

@ -48,13 +48,14 @@ use parse::classify;
use parse::common::SeqSep; use parse::common::SeqSep;
use parse::lexer::{Reader, TokenAndSpan}; use parse::lexer::{Reader, TokenAndSpan};
use parse::obsolete::ObsoleteSyntax; use parse::obsolete::ObsoleteSyntax;
use parse::token::{self, intern, keywords, MatchNt, SubstNt, InternedString}; use parse::token::{self, MatchNt, SubstNt};
use parse::{new_sub_parser_from_file, ParseSess}; use parse::{new_sub_parser_from_file, ParseSess};
use util::parser::{AssocOp, Fixity}; use util::parser::{AssocOp, Fixity};
use print::pprust; use print::pprust;
use ptr::P; use ptr::P;
use parse::PResult; use parse::PResult;
use tokenstream::{self, Delimited, SequenceRepetition, TokenTree}; use tokenstream::{self, Delimited, SequenceRepetition, TokenTree};
use symbol::{self, Symbol, keywords, InternedString};
use util::ThinVec; use util::ThinVec;
use std::collections::HashSet; use std::collections::HashSet;
@ -1537,13 +1538,13 @@ impl<'a> Parser<'a> {
token::Str_(s) => { token::Str_(s) => {
(true, (true,
LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())), LitKind::Str(symbol::intern_and_get_ident(&parse::str_lit(&s.as_str())),
ast::StrStyle::Cooked)) ast::StrStyle::Cooked))
} }
token::StrRaw(s, n) => { token::StrRaw(s, n) => {
(true, (true,
LitKind::Str( LitKind::Str(
token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())), symbol::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
ast::StrStyle::Raw(n))) ast::StrStyle::Raw(n)))
} }
token::ByteStr(i) => token::ByteStr(i) =>
@ -2627,7 +2628,7 @@ impl<'a> Parser<'a> {
}))); })));
} else if self.token.is_keyword(keywords::Crate) { } else if self.token.is_keyword(keywords::Crate) {
let ident = match self.token { let ident = match self.token {
token::Ident(id) => ast::Ident { name: token::intern("$crate"), ..id }, token::Ident(id) => ast::Ident { name: Symbol::intern("$crate"), ..id },
_ => unreachable!(), _ => unreachable!(),
}; };
self.bump(); self.bump();
@ -4835,7 +4836,7 @@ impl<'a> Parser<'a> {
Visibility::Inherited => (), Visibility::Inherited => (),
_ => { _ => {
let is_macro_rules: bool = match self.token { let is_macro_rules: bool = match self.token {
token::Ident(sid) => sid.name == intern("macro_rules"), token::Ident(sid) => sid.name == Symbol::intern("macro_rules"),
_ => false, _ => false,
}; };
if is_macro_rules { if is_macro_rules {

View File

@ -16,13 +16,10 @@ pub use self::Token::*;
use ast::{self}; use ast::{self};
use ptr::P; use ptr::P;
use util::interner::Interner; use symbol::keywords;
use tokenstream; use tokenstream;
use serialize::{Decodable, Decoder, Encodable, Encoder};
use std::cell::RefCell;
use std::fmt; use std::fmt;
use std::ops::Deref;
use std::rc::Rc; use std::rc::Rc;
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
@ -335,266 +332,3 @@ impl fmt::Debug for Nonterminal {
} }
} }
} }
// In this macro, there is the requirement that the name (the number) must be monotonically
// increasing by one in the special identifiers, starting at 0; the same holds for the keywords,
// except starting from the next number instead of zero.
macro_rules! declare_keywords {(
$( ($index: expr, $konst: ident, $string: expr) )*
) => {
pub mod keywords {
use ast;
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct Keyword {
ident: ast::Ident,
}
impl Keyword {
#[inline] pub fn ident(self) -> ast::Ident { self.ident }
#[inline] pub fn name(self) -> ast::Name { self.ident.name }
}
$(
#[allow(non_upper_case_globals)]
pub const $konst: Keyword = Keyword {
ident: ast::Ident::with_empty_ctxt(ast::Name($index))
};
)*
}
fn mk_fresh_ident_interner() -> IdentInterner {
Interner::prefill(&[$($string,)*])
}
}}
// NB: leaving holes in the ident table is bad! a different ident will get
// interned with the id from the hole, but it will be between the min and max
// of the reserved words, and thus tagged as "reserved".
// After modifying this list adjust `is_strict_keyword`/`is_reserved_keyword`,
// this should be rarely necessary though if the keywords are kept in alphabetic order.
declare_keywords! {
// Invalid identifier
(0, Invalid, "")
// Strict keywords used in the language.
(1, As, "as")
(2, Box, "box")
(3, Break, "break")
(4, Const, "const")
(5, Continue, "continue")
(6, Crate, "crate")
(7, Else, "else")
(8, Enum, "enum")
(9, Extern, "extern")
(10, False, "false")
(11, Fn, "fn")
(12, For, "for")
(13, If, "if")
(14, Impl, "impl")
(15, In, "in")
(16, Let, "let")
(17, Loop, "loop")
(18, Match, "match")
(19, Mod, "mod")
(20, Move, "move")
(21, Mut, "mut")
(22, Pub, "pub")
(23, Ref, "ref")
(24, Return, "return")
(25, SelfValue, "self")
(26, SelfType, "Self")
(27, Static, "static")
(28, Struct, "struct")
(29, Super, "super")
(30, Trait, "trait")
(31, True, "true")
(32, Type, "type")
(33, Unsafe, "unsafe")
(34, Use, "use")
(35, Where, "where")
(36, While, "while")
// Keywords reserved for future use.
(37, Abstract, "abstract")
(38, Alignof, "alignof")
(39, Become, "become")
(40, Do, "do")
(41, Final, "final")
(42, Macro, "macro")
(43, Offsetof, "offsetof")
(44, Override, "override")
(45, Priv, "priv")
(46, Proc, "proc")
(47, Pure, "pure")
(48, Sizeof, "sizeof")
(49, Typeof, "typeof")
(50, Unsized, "unsized")
(51, Virtual, "virtual")
(52, Yield, "yield")
// Weak keywords, have special meaning only in specific contexts.
(53, Default, "default")
(54, StaticLifetime, "'static")
(55, Union, "union")
}
// looks like we can get rid of this completely...
pub type IdentInterner = Interner;
// if an interner exists in TLS, return it. Otherwise, prepare a
// fresh one.
// FIXME(eddyb) #8726 This should probably use a thread-local reference.
pub fn with_ident_interner<T, F: FnOnce(&mut IdentInterner) -> T>(f: F) -> T {
thread_local!(static KEY: RefCell<IdentInterner> = {
RefCell::new(mk_fresh_ident_interner())
});
KEY.with(|interner| f(&mut *interner.borrow_mut()))
}
/// Reset the ident interner to its initial state.
pub fn reset_ident_interner() {
with_ident_interner(|interner| *interner = mk_fresh_ident_interner());
}
/// Represents a string stored in the thread-local interner. Because the
/// interner lives for the life of the thread, this can be safely treated as an
/// immortal string, as long as it never crosses between threads.
///
/// FIXME(pcwalton): You must be careful about what you do in the destructors
/// of objects stored in TLS, because they may run after the interner is
/// destroyed. In particular, they must not access string contents. This can
/// be fixed in the future by just leaking all strings until thread death
/// somehow.
#[derive(Clone, PartialEq, Hash, PartialOrd, Eq, Ord)]
pub struct InternedString {
string: Rc<str>,
}
impl InternedString {
#[inline]
pub fn new(string: &'static str) -> InternedString {
InternedString {
string: Rc::__from_str(string),
}
}
#[inline]
pub fn new_from_name(name: ast::Name) -> InternedString {
with_ident_interner(|interner| InternedString { string: interner.get(name) })
}
}
impl Deref for InternedString {
type Target = str;
fn deref(&self) -> &str { &self.string }
}
impl fmt::Debug for InternedString {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.string, f)
}
}
impl fmt::Display for InternedString {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.string, f)
}
}
impl<'a> PartialEq<&'a str> for InternedString {
#[inline(always)]
fn eq(&self, other: & &'a str) -> bool {
PartialEq::eq(&self.string[..], *other)
}
#[inline(always)]
fn ne(&self, other: & &'a str) -> bool {
PartialEq::ne(&self.string[..], *other)
}
}
impl<'a> PartialEq<InternedString> for &'a str {
#[inline(always)]
fn eq(&self, other: &InternedString) -> bool {
PartialEq::eq(*self, &other.string[..])
}
#[inline(always)]
fn ne(&self, other: &InternedString) -> bool {
PartialEq::ne(*self, &other.string[..])
}
}
impl PartialEq<str> for InternedString {
#[inline(always)]
fn eq(&self, other: &str) -> bool {
PartialEq::eq(&self.string[..], other)
}
#[inline(always)]
fn ne(&self, other: &str) -> bool {
PartialEq::ne(&self.string[..], other)
}
}
impl PartialEq<InternedString> for str {
#[inline(always)]
fn eq(&self, other: &InternedString) -> bool {
PartialEq::eq(self, &other.string[..])
}
#[inline(always)]
fn ne(&self, other: &InternedString) -> bool {
PartialEq::ne(self, &other.string[..])
}
}
impl Decodable for InternedString {
fn decode<D: Decoder>(d: &mut D) -> Result<InternedString, D::Error> {
Ok(intern(&d.read_str()?).as_str())
}
}
impl Encodable for InternedString {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_str(&self.string)
}
}
/// Interns and returns the string contents of an identifier, using the
/// thread-local interner.
#[inline]
pub fn intern_and_get_ident(s: &str) -> InternedString {
intern(s).as_str()
}
/// Maps a string to its interned representation.
#[inline]
pub fn intern(s: &str) -> ast::Name {
with_ident_interner(|interner| interner.intern(s))
}
/// gensym's a new usize, using the current interner.
#[inline]
pub fn gensym(s: &str) -> ast::Name {
with_ident_interner(|interner| interner.gensym(s))
}
/// Maps a string to an identifier with an empty syntax context.
#[inline]
pub fn str_to_ident(s: &str) -> ast::Ident {
ast::Ident::with_empty_ctxt(intern(s))
}
/// Maps a string to a gensym'ed identifier.
#[inline]
pub fn gensym_ident(s: &str) -> ast::Ident {
ast::Ident::with_empty_ctxt(gensym(s))
}
// create a fresh name that maps to the same string as the old one.
// note that this guarantees that str_ptr_eq(ident_to_string(src),interner_get(fresh_name(src)));
// that is, that the new name and the old one are connected to ptr_eq strings.
pub fn fresh_name(src: ast::Ident) -> ast::Name {
with_ident_interner(|interner| interner.gensym_copy(src.name))
// following: debug version. Could work in final except that it's incompatible with
// good error messages and uses of struct names in ambiguous could-be-binding
// locations. Also definitely destroys the guarantee given above about ptr_eq.
/*let num = rand::thread_rng().gen_uint_range(0,0xffff);
gensym(format!("{}_{}",ident_to_string(src),num))*/
}

View File

@ -19,7 +19,7 @@ use attr;
use codemap::{self, CodeMap}; use codemap::{self, CodeMap};
use syntax_pos::{self, BytePos}; use syntax_pos::{self, BytePos};
use errors; use errors;
use parse::token::{self, keywords, BinOpToken, Token}; use parse::token::{self, BinOpToken, Token};
use parse::lexer::comments; use parse::lexer::comments;
use parse; use parse;
use print::pp::{self, break_offset, word, space, zerobreak, hardbreak}; use print::pp::{self, break_offset, word, space, zerobreak, hardbreak};
@ -27,6 +27,7 @@ use print::pp::{Breaks, eof};
use print::pp::Breaks::{Consistent, Inconsistent}; use print::pp::Breaks::{Consistent, Inconsistent};
use ptr::P; use ptr::P;
use std_inject; use std_inject;
use symbol::{Symbol, keywords};
use tokenstream::{self, TokenTree}; use tokenstream::{self, TokenTree};
use std::ascii; use std::ascii;
@ -119,13 +120,13 @@ pub fn print_crate<'a>(cm: &'a CodeMap,
// of the feature gate, so we fake them up here. // of the feature gate, so we fake them up here.
// #![feature(prelude_import)] // #![feature(prelude_import)]
let prelude_import_meta = attr::mk_list_word_item(token::intern("prelude_import")); let prelude_import_meta = attr::mk_list_word_item(Symbol::intern("prelude_import"));
let list = attr::mk_list_item(token::intern("feature"), vec![prelude_import_meta]); let list = attr::mk_list_item(Symbol::intern("feature"), vec![prelude_import_meta]);
let fake_attr = attr::mk_attr_inner(attr::mk_attr_id(), list); let fake_attr = attr::mk_attr_inner(attr::mk_attr_id(), list);
try!(s.print_attribute(&fake_attr)); try!(s.print_attribute(&fake_attr));
// #![no_std] // #![no_std]
let no_std_meta = attr::mk_word_item(token::intern("no_std")); let no_std_meta = attr::mk_word_item(Symbol::intern("no_std"));
let fake_attr = attr::mk_attr_inner(attr::mk_attr_id(), no_std_meta); let fake_attr = attr::mk_attr_inner(attr::mk_attr_id(), no_std_meta);
try!(s.print_attribute(&fake_attr)); try!(s.print_attribute(&fake_attr));
} }

View File

@ -10,10 +10,10 @@
use ast; use ast;
use attr; use attr;
use symbol::{Symbol, keywords};
use syntax_pos::{DUMMY_SP, Span}; use syntax_pos::{DUMMY_SP, Span};
use codemap::{self, ExpnInfo, NameAndSpan, MacroAttribute}; use codemap::{self, ExpnInfo, NameAndSpan, MacroAttribute};
use parse::token::{intern, keywords}; use parse::ParseSess;
use parse::{token, ParseSess};
use ptr::P; use ptr::P;
/// Craft a span that will be ignored by the stability lint's /// Craft a span that will be ignored by the stability lint's
@ -23,7 +23,7 @@ fn ignored_span(sess: &ParseSess, sp: Span) -> Span {
let info = ExpnInfo { let info = ExpnInfo {
call_site: DUMMY_SP, call_site: DUMMY_SP,
callee: NameAndSpan { callee: NameAndSpan {
format: MacroAttribute(intern("std_inject")), format: MacroAttribute(Symbol::intern("std_inject")),
span: None, span: None,
allow_internal_unstable: true, allow_internal_unstable: true,
} }
@ -53,14 +53,14 @@ pub fn maybe_inject_crates_ref(sess: &ParseSess,
None => return krate, None => return krate,
}; };
let crate_name = token::intern(&alt_std_name.unwrap_or(name.to_string())); let crate_name = Symbol::intern(&alt_std_name.unwrap_or(name.to_string()));
krate.module.items.insert(0, P(ast::Item { krate.module.items.insert(0, P(ast::Item {
attrs: vec![attr::mk_attr_outer(attr::mk_attr_id(), attrs: vec![attr::mk_attr_outer(attr::mk_attr_id(),
attr::mk_word_item(token::intern("macro_use")))], attr::mk_word_item(Symbol::intern("macro_use")))],
vis: ast::Visibility::Inherited, vis: ast::Visibility::Inherited,
node: ast::ItemKind::ExternCrate(Some(crate_name)), node: ast::ItemKind::ExternCrate(Some(crate_name)),
ident: token::str_to_ident(name), ident: ast::Ident::from_str(name),
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
span: DUMMY_SP, span: DUMMY_SP,
})); }));
@ -70,7 +70,7 @@ pub fn maybe_inject_crates_ref(sess: &ParseSess,
attrs: vec![ast::Attribute { attrs: vec![ast::Attribute {
style: ast::AttrStyle::Outer, style: ast::AttrStyle::Outer,
value: ast::MetaItem { value: ast::MetaItem {
name: token::intern("prelude_import"), name: Symbol::intern("prelude_import"),
node: ast::MetaItemKind::Word, node: ast::MetaItemKind::Word,
span: span, span: span,
}, },
@ -82,7 +82,7 @@ pub fn maybe_inject_crates_ref(sess: &ParseSess,
node: ast::ItemKind::Use(P(codemap::dummy_spanned(ast::ViewPathGlob(ast::Path { node: ast::ItemKind::Use(P(codemap::dummy_spanned(ast::ViewPathGlob(ast::Path {
global: false, global: false,
segments: vec![name, "prelude", "v1"].into_iter().map(|name| ast::PathSegment { segments: vec![name, "prelude", "v1"].into_iter().map(|name| ast::PathSegment {
identifier: token::str_to_ident(name), identifier: ast::Ident::from_str(name),
parameters: ast::PathParameters::none(), parameters: ast::PathParameters::none(),
}).collect(), }).collect(),
span: span, span: span,

339
src/libsyntax/symbol.rs Normal file
View File

@ -0,0 +1,339 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! An "interner" is a data structure that associates values with usize tags and
//! allows bidirectional lookup; i.e. given a value, one can easily find the
//! type, and vice versa.
use serialize::{Decodable, Decoder, Encodable, Encoder};
use std::cell::RefCell;
use std::collections::HashMap;
use std::fmt;
use std::rc::Rc;
/// A symbol is an interned or gensymed string.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Symbol(u32);
impl Symbol {
/// Maps a string to its interned representation.
pub fn intern(string: &str) -> Self {
with_interner(|interner| interner.intern(string))
}
/// gensym's a new usize, using the current interner.
pub fn gensym(string: &str) -> Self {
with_interner(|interner| interner.gensym(string))
}
pub fn as_str(self) -> InternedString {
with_interner(|interner| InternedString { string: interner.get(self) })
}
pub fn as_u32(self) -> u32 {
self.0
}
}
impl fmt::Debug for Symbol {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}({})", self, self.0)
}
}
impl fmt::Display for Symbol {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.as_str(), f)
}
}
impl Encodable for Symbol {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_str(&self.as_str())
}
}
impl Decodable for Symbol {
fn decode<D: Decoder>(d: &mut D) -> Result<Symbol, D::Error> {
Ok(Symbol::intern(&d.read_str()?))
}
}
impl<'a> PartialEq<&'a str> for Symbol {
fn eq(&self, other: &&str) -> bool {
*self.as_str() == **other
}
}
#[derive(Default)]
pub struct Interner {
names: HashMap<Rc<str>, Symbol>,
strings: Vec<Rc<str>>,
}
impl Interner {
pub fn new() -> Self {
Interner::default()
}
fn prefill(init: &[&str]) -> Self {
let mut this = Interner::new();
for &string in init {
this.intern(string);
}
this
}
pub fn intern(&mut self, string: &str) -> Symbol {
if let Some(&name) = self.names.get(string) {
return name;
}
let name = Symbol(self.strings.len() as u32);
let string = Rc::__from_str(string);
self.strings.push(string.clone());
self.names.insert(string, name);
name
}
fn gensym(&mut self, string: &str) -> Symbol {
let gensym = Symbol(self.strings.len() as u32);
// leave out of `names` to avoid colliding
self.strings.push(Rc::__from_str(string));
gensym
}
pub fn get(&self, name: Symbol) -> Rc<str> {
self.strings[name.0 as usize].clone()
}
}
// In this macro, there is the requirement that the name (the number) must be monotonically
// increasing by one in the special identifiers, starting at 0; the same holds for the keywords,
// except starting from the next number instead of zero.
macro_rules! declare_keywords {(
$( ($index: expr, $konst: ident, $string: expr) )*
) => {
pub mod keywords {
use ast;
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct Keyword {
ident: ast::Ident,
}
impl Keyword {
#[inline] pub fn ident(self) -> ast::Ident { self.ident }
#[inline] pub fn name(self) -> ast::Name { self.ident.name }
}
$(
#[allow(non_upper_case_globals)]
pub const $konst: Keyword = Keyword {
ident: ast::Ident::with_empty_ctxt(ast::Name($index))
};
)*
}
impl Interner {
fn fresh() -> Self {
Interner::prefill(&[$($string,)*])
}
}
}}
// NB: leaving holes in the ident table is bad! a different ident will get
// interned with the id from the hole, but it will be between the min and max
// of the reserved words, and thus tagged as "reserved".
// After modifying this list adjust `is_strict_keyword`/`is_reserved_keyword`,
// this should be rarely necessary though if the keywords are kept in alphabetic order.
declare_keywords! {
// Invalid identifier
(0, Invalid, "")
// Strict keywords used in the language.
(1, As, "as")
(2, Box, "box")
(3, Break, "break")
(4, Const, "const")
(5, Continue, "continue")
(6, Crate, "crate")
(7, Else, "else")
(8, Enum, "enum")
(9, Extern, "extern")
(10, False, "false")
(11, Fn, "fn")
(12, For, "for")
(13, If, "if")
(14, Impl, "impl")
(15, In, "in")
(16, Let, "let")
(17, Loop, "loop")
(18, Match, "match")
(19, Mod, "mod")
(20, Move, "move")
(21, Mut, "mut")
(22, Pub, "pub")
(23, Ref, "ref")
(24, Return, "return")
(25, SelfValue, "self")
(26, SelfType, "Self")
(27, Static, "static")
(28, Struct, "struct")
(29, Super, "super")
(30, Trait, "trait")
(31, True, "true")
(32, Type, "type")
(33, Unsafe, "unsafe")
(34, Use, "use")
(35, Where, "where")
(36, While, "while")
// Keywords reserved for future use.
(37, Abstract, "abstract")
(38, Alignof, "alignof")
(39, Become, "become")
(40, Do, "do")
(41, Final, "final")
(42, Macro, "macro")
(43, Offsetof, "offsetof")
(44, Override, "override")
(45, Priv, "priv")
(46, Proc, "proc")
(47, Pure, "pure")
(48, Sizeof, "sizeof")
(49, Typeof, "typeof")
(50, Unsized, "unsized")
(51, Virtual, "virtual")
(52, Yield, "yield")
// Weak keywords, have special meaning only in specific contexts.
(53, Default, "default")
(54, StaticLifetime, "'static")
(55, Union, "union")
}
// If an interner exists in TLS, return it. Otherwise, prepare a fresh one.
fn with_interner<T, F: FnOnce(&mut Interner) -> T>(f: F) -> T {
thread_local!(static INTERNER: RefCell<Interner> = {
RefCell::new(Interner::fresh())
});
INTERNER.with(|interner| f(&mut *interner.borrow_mut()))
}
/// Reset the ident interner to its initial state.
pub fn reset_interner() {
with_interner(|interner| *interner = Interner::fresh());
}
/// Represents a string stored in the thread-local interner. Because the
/// interner lives for the life of the thread, this can be safely treated as an
/// immortal string, as long as it never crosses between threads.
///
/// FIXME(pcwalton): You must be careful about what you do in the destructors
/// of objects stored in TLS, because they may run after the interner is
/// destroyed. In particular, they must not access string contents. This can
/// be fixed in the future by just leaking all strings until thread death
/// somehow.
#[derive(Clone, PartialEq, Hash, PartialOrd, Eq, Ord)]
pub struct InternedString {
string: Rc<str>,
}
impl InternedString {
pub fn new(string: &'static str) -> InternedString {
InternedString {
string: Rc::__from_str(string),
}
}
}
impl ::std::ops::Deref for InternedString {
type Target = str;
fn deref(&self) -> &str { &self.string }
}
impl fmt::Debug for InternedString {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.string, f)
}
}
impl fmt::Display for InternedString {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.string, f)
}
}
impl<'a> PartialEq<&'a str> for InternedString {
fn eq(&self, other: & &'a str) -> bool {
PartialEq::eq(&self.string[..], *other)
}
}
impl<'a> PartialEq<InternedString> for &'a str {
fn eq(&self, other: &InternedString) -> bool {
PartialEq::eq(*self, &other.string[..])
}
}
impl PartialEq<str> for InternedString {
fn eq(&self, other: &str) -> bool {
PartialEq::eq(&self.string[..], other)
}
}
impl PartialEq<InternedString> for str {
fn eq(&self, other: &InternedString) -> bool {
PartialEq::eq(self, &other.string[..])
}
}
impl Decodable for InternedString {
fn decode<D: Decoder>(d: &mut D) -> Result<InternedString, D::Error> {
Ok(Symbol::intern(&d.read_str()?).as_str())
}
}
impl Encodable for InternedString {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_str(&self.string)
}
}
/// Interns and returns the string contents of an identifier, using the
/// thread-local interner.
#[inline]
pub fn intern_and_get_ident(s: &str) -> InternedString {
Symbol::intern(s).as_str()
}
#[cfg(test)]
mod tests {
use super::*;
use ast::Name;
#[test]
fn interner_tests() {
let mut i: Interner = Interner::new();
// first one is zero:
assert_eq!(i.intern("dog"), Name(0));
// re-use gets the same entry:
assert_eq!(i.intern ("dog"), Name(0));
// different string gets a different #:
assert_eq!(i.intern("cat"), Name(1));
assert_eq!(i.intern("cat"), Name(1));
// dog is still at zero
assert_eq!(i.intern("dog"), Name(0));
// gensym gets 3
assert_eq!(i.gensym("zebra"), Name(2));
// gensym of same string gets new number :
assert_eq!(i.gensym("zebra"), Name(3));
// gensym of *existing* string gets new number:
assert_eq!(i.gensym("dog"), Name(4));
}
}

View File

@ -34,11 +34,11 @@ use ext::expand::ExpansionConfig;
use fold::Folder; use fold::Folder;
use util::move_map::MoveMap; use util::move_map::MoveMap;
use fold; use fold;
use parse::token::{intern, keywords, InternedString};
use parse::{token, ParseSess}; use parse::{token, ParseSess};
use print::pprust; use print::pprust;
use ast; use ast::{self, Ident};
use ptr::P; use ptr::P;
use symbol::{self, Symbol, keywords, InternedString};
use util::small_vector::SmallVector; use util::small_vector::SmallVector;
enum ShouldPanic { enum ShouldPanic {
@ -48,7 +48,7 @@ enum ShouldPanic {
struct Test { struct Test {
span: Span, span: Span,
path: Vec<ast::Ident> , path: Vec<Ident> ,
bench: bool, bench: bool,
ignore: bool, ignore: bool,
should_panic: ShouldPanic should_panic: ShouldPanic
@ -57,14 +57,14 @@ struct Test {
struct TestCtxt<'a> { struct TestCtxt<'a> {
sess: &'a ParseSess, sess: &'a ParseSess,
span_diagnostic: &'a errors::Handler, span_diagnostic: &'a errors::Handler,
path: Vec<ast::Ident>, path: Vec<Ident>,
ext_cx: ExtCtxt<'a>, ext_cx: ExtCtxt<'a>,
testfns: Vec<Test>, testfns: Vec<Test>,
reexport_test_harness_main: Option<InternedString>, reexport_test_harness_main: Option<InternedString>,
is_test_crate: bool, is_test_crate: bool,
// top-level re-export submodule, filled out after folding is finished // top-level re-export submodule, filled out after folding is finished
toplevel_reexport: Option<ast::Ident>, toplevel_reexport: Option<Ident>,
} }
// Traverse the crate, collecting all the test functions, eliding any // Traverse the crate, collecting all the test functions, eliding any
@ -91,10 +91,10 @@ pub fn modify_for_testing(sess: &ParseSess,
struct TestHarnessGenerator<'a> { struct TestHarnessGenerator<'a> {
cx: TestCtxt<'a>, cx: TestCtxt<'a>,
tests: Vec<ast::Ident>, tests: Vec<Ident>,
// submodule name, gensym'd identifier for re-exports // submodule name, gensym'd identifier for re-exports
tested_submods: Vec<(ast::Ident, ast::Ident)>, tested_submods: Vec<(Ident, Ident)>,
} }
impl<'a> fold::Folder for TestHarnessGenerator<'a> { impl<'a> fold::Folder for TestHarnessGenerator<'a> {
@ -191,8 +191,8 @@ impl fold::Folder for EntryPointCleaner {
EntryPointType::MainAttr | EntryPointType::MainAttr |
EntryPointType::Start => EntryPointType::Start =>
folded.map(|ast::Item {id, ident, attrs, node, vis, span}| { folded.map(|ast::Item {id, ident, attrs, node, vis, span}| {
let allow_str = token::intern("allow"); let allow_str = Symbol::intern("allow");
let dead_code_str = token::intern("dead_code"); let dead_code_str = Symbol::intern("dead_code");
let word_vec = vec![attr::mk_list_word_item(dead_code_str)]; let word_vec = vec![attr::mk_list_word_item(dead_code_str)];
let allow_dead_code_item = attr::mk_list_item(allow_str, word_vec); let allow_dead_code_item = attr::mk_list_item(allow_str, word_vec);
let allow_dead_code = attr::mk_attr_outer(attr::mk_attr_id(), let allow_dead_code = attr::mk_attr_outer(attr::mk_attr_id(),
@ -222,15 +222,18 @@ impl fold::Folder for EntryPointCleaner {
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { mac } fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { mac }
} }
fn mk_reexport_mod(cx: &mut TestCtxt, parent: ast::NodeId, tests: Vec<ast::Ident>, fn mk_reexport_mod(cx: &mut TestCtxt,
tested_submods: Vec<(ast::Ident, ast::Ident)>) -> (P<ast::Item>, ast::Ident) { parent: ast::NodeId,
let super_ = token::str_to_ident("super"); tests: Vec<Ident>,
tested_submods: Vec<(Ident, Ident)>)
-> (P<ast::Item>, Ident) {
let super_ = Ident::from_str("super");
// Generate imports with `#[allow(private_in_public)]` to work around issue #36768. // Generate imports with `#[allow(private_in_public)]` to work around issue #36768.
let allow_private_in_public = cx.ext_cx.attribute(DUMMY_SP, cx.ext_cx.meta_list( let allow_private_in_public = cx.ext_cx.attribute(DUMMY_SP, cx.ext_cx.meta_list(
DUMMY_SP, DUMMY_SP,
token::intern("allow"), Symbol::intern("allow"),
vec![cx.ext_cx.meta_list_item_word(DUMMY_SP, token::intern("private_in_public"))], vec![cx.ext_cx.meta_list_item_word(DUMMY_SP, Symbol::intern("private_in_public"))],
)); ));
let items = tests.into_iter().map(|r| { let items = tests.into_iter().map(|r| {
cx.ext_cx.item_use_simple(DUMMY_SP, ast::Visibility::Public, cx.ext_cx.item_use_simple(DUMMY_SP, ast::Visibility::Public,
@ -247,7 +250,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt, parent: ast::NodeId, tests: Vec<ast::Ident
items: items, items: items,
}; };
let sym = token::gensym_ident("__test_reexports"); let sym = Ident::with_empty_ctxt(Symbol::gensym("__test_reexports"));
let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent }; let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent };
cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent); cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent);
let it = cx.ext_cx.monotonic_expander().fold_item(P(ast::Item { let it = cx.ext_cx.monotonic_expander().fold_item(P(ast::Item {
@ -286,7 +289,7 @@ fn generate_test_harness(sess: &ParseSess,
cx.ext_cx.bt_push(ExpnInfo { cx.ext_cx.bt_push(ExpnInfo {
call_site: DUMMY_SP, call_site: DUMMY_SP,
callee: NameAndSpan { callee: NameAndSpan {
format: MacroAttribute(intern("test")), format: MacroAttribute(Symbol::intern("test")),
span: None, span: None,
allow_internal_unstable: false, allow_internal_unstable: false,
} }
@ -306,7 +309,7 @@ fn ignored_span(cx: &TestCtxt, sp: Span) -> Span {
let info = ExpnInfo { let info = ExpnInfo {
call_site: DUMMY_SP, call_site: DUMMY_SP,
callee: NameAndSpan { callee: NameAndSpan {
format: MacroAttribute(intern("test")), format: MacroAttribute(Symbol::intern("test")),
span: None, span: None,
allow_internal_unstable: true, allow_internal_unstable: true,
} }
@ -456,7 +459,7 @@ mod __test {
*/ */
fn mk_std(cx: &TestCtxt) -> P<ast::Item> { fn mk_std(cx: &TestCtxt) -> P<ast::Item> {
let id_test = token::str_to_ident("test"); let id_test = Ident::from_str("test");
let (vi, vis, ident) = if cx.is_test_crate { let (vi, vis, ident) = if cx.is_test_crate {
(ast::ItemKind::Use( (ast::ItemKind::Use(
P(nospan(ast::ViewPathSimple(id_test, P(nospan(ast::ViewPathSimple(id_test,
@ -487,16 +490,17 @@ fn mk_main(cx: &mut TestCtxt) -> P<ast::Item> {
let ecx = &cx.ext_cx; let ecx = &cx.ext_cx;
// test::test_main_static // test::test_main_static
let test_main_path = ecx.path(sp, vec![token::str_to_ident("test"), let test_main_path =
token::str_to_ident("test_main_static")]); ecx.path(sp, vec![Ident::from_str("test"), Ident::from_str("test_main_static")]);
// test::test_main_static(...) // test::test_main_static(...)
let test_main_path_expr = ecx.expr_path(test_main_path); let test_main_path_expr = ecx.expr_path(test_main_path);
let tests_ident_expr = ecx.expr_ident(sp, token::str_to_ident("TESTS")); let tests_ident_expr = ecx.expr_ident(sp, Ident::from_str("TESTS"));
let call_test_main = ecx.expr_call(sp, test_main_path_expr, let call_test_main = ecx.expr_call(sp, test_main_path_expr,
vec![tests_ident_expr]); vec![tests_ident_expr]);
let call_test_main = ecx.stmt_expr(call_test_main); let call_test_main = ecx.stmt_expr(call_test_main);
// #![main] // #![main]
let main_meta = ecx.meta_word(sp, token::intern("main")); let main_meta = ecx.meta_word(sp, Symbol::intern("main"));
let main_attr = ecx.attribute(sp, main_meta); let main_attr = ecx.attribute(sp, main_meta);
// pub fn main() { ... } // pub fn main() { ... }
let main_ret_ty = ecx.ty(sp, ast::TyKind::Tup(vec![])); let main_ret_ty = ecx.ty(sp, ast::TyKind::Tup(vec![]));
@ -506,7 +510,7 @@ fn mk_main(cx: &mut TestCtxt) -> P<ast::Item> {
dummy_spanned(ast::Constness::NotConst), dummy_spanned(ast::Constness::NotConst),
::abi::Abi::Rust, ast::Generics::default(), main_body); ::abi::Abi::Rust, ast::Generics::default(), main_body);
let main = P(ast::Item { let main = P(ast::Item {
ident: token::str_to_ident("main"), ident: Ident::from_str("main"),
attrs: vec![main_attr], attrs: vec![main_attr],
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
node: main, node: main,
@ -533,7 +537,7 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) {
items: vec![import, mainfn, tests], items: vec![import, mainfn, tests],
}; };
let item_ = ast::ItemKind::Mod(testmod); let item_ = ast::ItemKind::Mod(testmod);
let mod_ident = token::gensym_ident("__test"); let mod_ident = Ident::with_empty_ctxt(Symbol::gensym("__test"));
let mut expander = cx.ext_cx.monotonic_expander(); let mut expander = cx.ext_cx.monotonic_expander();
let item = expander.fold_item(P(ast::Item { let item = expander.fold_item(P(ast::Item {
@ -546,11 +550,11 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) {
})).pop().unwrap(); })).pop().unwrap();
let reexport = cx.reexport_test_harness_main.as_ref().map(|s| { let reexport = cx.reexport_test_harness_main.as_ref().map(|s| {
// building `use <ident> = __test::main` // building `use <ident> = __test::main`
let reexport_ident = token::str_to_ident(&s); let reexport_ident = Ident::from_str(&s);
let use_path = let use_path =
nospan(ast::ViewPathSimple(reexport_ident, nospan(ast::ViewPathSimple(reexport_ident,
path_node(vec![mod_ident, token::str_to_ident("main")]))); path_node(vec![mod_ident, Ident::from_str("main")])));
expander.fold_item(P(ast::Item { expander.fold_item(P(ast::Item {
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
@ -571,7 +575,7 @@ fn nospan<T>(t: T) -> codemap::Spanned<T> {
codemap::Spanned { node: t, span: DUMMY_SP } codemap::Spanned { node: t, span: DUMMY_SP }
} }
fn path_node(ids: Vec<ast::Ident> ) -> ast::Path { fn path_node(ids: Vec<Ident>) -> ast::Path {
ast::Path { ast::Path {
span: DUMMY_SP, span: DUMMY_SP,
global: false, global: false,
@ -582,7 +586,7 @@ fn path_node(ids: Vec<ast::Ident> ) -> ast::Path {
} }
} }
fn path_name_i(idents: &[ast::Ident]) -> String { fn path_name_i(idents: &[Ident]) -> String {
// FIXME: Bad copies (#2543 -- same for everything else that says "bad") // FIXME: Bad copies (#2543 -- same for everything else that says "bad")
idents.iter().map(|i| i.to_string()).collect::<Vec<String>>().join("::") idents.iter().map(|i| i.to_string()).collect::<Vec<String>>().join("::")
} }
@ -660,7 +664,7 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> P<ast::Expr> {
// path to the #[test] function: "foo::bar::baz" // path to the #[test] function: "foo::bar::baz"
let path_string = path_name_i(&path[..]); let path_string = path_name_i(&path[..]);
let name_expr = ecx.expr_str(span, token::intern_and_get_ident(&path_string[..])); let name_expr = ecx.expr_str(span, symbol::intern_and_get_ident(&path_string[..]));
// self::test::StaticTestName($name_expr) // self::test::StaticTestName($name_expr)
let name_expr = ecx.expr_call(span, let name_expr = ecx.expr_call(span,

View File

@ -34,6 +34,7 @@ use parse::lexer;
use parse; use parse;
use parse::token::{self, Token, Lit, Nonterminal}; use parse::token::{self, Token, Lit, Nonterminal};
use print::pprust; use print::pprust;
use symbol::{self, Symbol};
use std::fmt; use std::fmt;
use std::iter::*; use std::iter::*;
@ -173,10 +174,10 @@ impl TokenTree {
TokenTree::Delimited(sp, Rc::new(Delimited { TokenTree::Delimited(sp, Rc::new(Delimited {
delim: token::Bracket, delim: token::Bracket,
open_span: sp, open_span: sp,
tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))), tts: vec![TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))),
TokenTree::Token(sp, token::Eq), TokenTree::Token(sp, token::Eq),
TokenTree::Token(sp, token::Literal( TokenTree::Token(sp, token::Literal(
token::StrRaw(token::intern(&stripped), num_of_hashes), None))], token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))],
close_span: sp, close_span: sp,
})) }))
} }
@ -295,7 +296,7 @@ impl TokenTree {
pub fn maybe_str(&self) -> Option<ast::Lit> { pub fn maybe_str(&self) -> Option<ast::Lit> {
match *self { match *self {
TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => { TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => {
let l = LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())), let l = LitKind::Str(symbol::intern_and_get_ident(&parse::str_lit(&s.as_str())),
ast::StrStyle::Cooked); ast::StrStyle::Cooked);
Some(Spanned { Some(Spanned {
node: l, node: l,
@ -303,7 +304,7 @@ impl TokenTree {
}) })
} }
TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => { TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => {
let l = LitKind::Str(token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())), let l = LitKind::Str(symbol::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
ast::StrStyle::Raw(n)); ast::StrStyle::Raw(n));
Some(Spanned { Some(Spanned {
node: l, node: l,

View File

@ -1,111 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! An "interner" is a data structure that associates values with usize tags and
//! allows bidirectional lookup; i.e. given a value, one can easily find the
//! type, and vice versa.
use ast::Name;
use std::collections::HashMap;
use std::rc::Rc;
#[derive(Default)]
pub struct Interner {
names: HashMap<Rc<str>, Name>,
strings: Vec<Rc<str>>,
}
/// When traits can extend traits, we should extend index<Name,T> to get []
impl Interner {
pub fn new() -> Self {
Interner::default()
}
pub fn prefill(init: &[&str]) -> Self {
let mut this = Interner::new();
for &string in init {
this.intern(string);
}
this
}
pub fn intern(&mut self, string: &str) -> Name {
if let Some(&name) = self.names.get(string) {
return name;
}
let name = Name(self.strings.len() as u32);
let string = Rc::__from_str(string);
self.strings.push(string.clone());
self.names.insert(string, name);
name
}
pub fn gensym(&mut self, string: &str) -> Name {
let gensym = Name(self.strings.len() as u32);
// leave out of `names` to avoid colliding
self.strings.push(Rc::__from_str(string));
gensym
}
/// Create a gensym with the same name as an existing entry.
pub fn gensym_copy(&mut self, name: Name) -> Name {
let gensym = Name(self.strings.len() as u32);
// leave out of `names` to avoid colliding
let string = self.strings[name.0 as usize].clone();
self.strings.push(string);
gensym
}
pub fn get(&self, name: Name) -> Rc<str> {
self.strings[name.0 as usize].clone()
}
pub fn find(&self, string: &str) -> Option<Name> {
self.names.get(string).cloned()
}
}
#[cfg(test)]
mod tests {
use super::*;
use ast::Name;
#[test]
fn interner_tests() {
let mut i: Interner = Interner::new();
// first one is zero:
assert_eq!(i.intern("dog"), Name(0));
// re-use gets the same entry:
assert_eq!(i.intern ("dog"), Name(0));
// different string gets a different #:
assert_eq!(i.intern("cat"), Name(1));
assert_eq!(i.intern("cat"), Name(1));
// dog is still at zero
assert_eq!(i.intern("dog"), Name(0));
// gensym gets 3
assert_eq!(i.gensym("zebra"), Name(2));
// gensym of same string gets new number :
assert_eq!(i.gensym("zebra"), Name(3));
// gensym of *existing* string gets new number:
assert_eq!(i.gensym("dog"), Name(4));
// gensym tests again with gensym_copy:
assert_eq!(i.gensym_copy(Name(2)), Name(5));
assert_eq!(&*i.get(Name(5)), "zebra");
assert_eq!(i.gensym_copy(Name(2)), Name(6));
assert_eq!(&*i.get(Name(6)), "zebra");
assert_eq!(&*i.get(Name(0)), "dog");
assert_eq!(&*i.get(Name(1)), "cat");
assert_eq!(&*i.get(Name(2)), "zebra");
assert_eq!(&*i.get(Name(3)), "zebra");
assert_eq!(&*i.get(Name(4)), "dog");
}
}

View File

@ -10,7 +10,7 @@
use ast::Name; use ast::Name;
use std::cmp; use std::cmp;
use parse::token::InternedString; use symbol::InternedString;
/// To find the Levenshtein distance between two strings /// To find the Levenshtein distance between two strings
pub fn lev_distance(a: &str, b: &str) -> usize { pub fn lev_distance(a: &str, b: &str) -> usize {

View File

@ -7,7 +7,8 @@
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
use parse::token::{Token, BinOpToken, keywords}; use parse::token::{Token, BinOpToken};
use symbol::keywords;
use ast::BinOpKind; use ast::BinOpKind;
/// Associative operator with precedence. /// Associative operator with precedence.

View File

@ -17,9 +17,9 @@ use syntax::codemap;
use syntax::ext::base; use syntax::ext::base;
use syntax::ext::base::*; use syntax::ext::base::*;
use syntax::feature_gate; use syntax::feature_gate;
use syntax::parse::token::intern;
use syntax::parse::{self, token}; use syntax::parse::{self, token};
use syntax::ptr::P; use syntax::ptr::P;
use syntax::symbol::{self, Symbol, InternedString};
use syntax::ast::AsmDialect; use syntax::ast::AsmDialect;
use syntax_pos::Span; use syntax_pos::Span;
use syntax::tokenstream; use syntax::tokenstream;
@ -73,7 +73,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
}) })
.unwrap_or(tts.len()); .unwrap_or(tts.len());
let mut p = cx.new_parser_from_tts(&tts[first_colon..]); let mut p = cx.new_parser_from_tts(&tts[first_colon..]);
let mut asm = token::InternedString::new(""); let mut asm = InternedString::new("");
let mut asm_str_style = None; let mut asm_str_style = None;
let mut outputs = Vec::new(); let mut outputs = Vec::new();
let mut inputs = Vec::new(); let mut inputs = Vec::new();
@ -139,7 +139,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
let output = match ch.next() { let output = match ch.next() {
Some('=') => None, Some('=') => None,
Some('+') => { Some('+') => {
Some(token::intern_and_get_ident(&format!("={}", ch.as_str()))) Some(symbol::intern_and_get_ident(&format!("={}", ch.as_str())))
} }
_ => { _ => {
cx.span_err(span, "output operand constraint lacks '=' or '+'"); cx.span_err(span, "output operand constraint lacks '=' or '+'");
@ -242,7 +242,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
let expn_id = cx.codemap().record_expansion(codemap::ExpnInfo { let expn_id = cx.codemap().record_expansion(codemap::ExpnInfo {
call_site: sp, call_site: sp,
callee: codemap::NameAndSpan { callee: codemap::NameAndSpan {
format: codemap::MacroBang(intern("asm")), format: codemap::MacroBang(Symbol::intern("asm")),
span: None, span: None,
allow_internal_unstable: false, allow_internal_unstable: false,
}, },
@ -251,7 +251,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
MacEager::expr(P(ast::Expr { MacEager::expr(P(ast::Expr {
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::InlineAsm(P(ast::InlineAsm { node: ast::ExprKind::InlineAsm(P(ast::InlineAsm {
asm: token::intern_and_get_ident(&asm), asm: symbol::intern_and_get_ident(&asm),
asm_str_style: asm_str_style.unwrap(), asm_str_style: asm_str_style.unwrap(),
outputs: outputs, outputs: outputs,
inputs: inputs, inputs: inputs,

View File

@ -11,7 +11,7 @@
use syntax::ast; use syntax::ast;
use syntax::ext::base; use syntax::ext::base;
use syntax::ext::build::AstBuilder; use syntax::ext::build::AstBuilder;
use syntax::parse::token; use syntax::symbol::intern_and_get_ident;
use syntax_pos; use syntax_pos;
use syntax::tokenstream; use syntax::tokenstream;
@ -57,5 +57,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
} }
} }
} }
base::MacEager::expr(cx.expr_str(sp, token::intern_and_get_ident(&accumulator[..]))) base::MacEager::expr(cx.expr_str(sp, intern_and_get_ident(&accumulator[..])))
} }

View File

@ -13,7 +13,6 @@ use syntax::ext::base::*;
use syntax::ext::base; use syntax::ext::base;
use syntax::feature_gate; use syntax::feature_gate;
use syntax::parse::token; use syntax::parse::token;
use syntax::parse::token::str_to_ident;
use syntax::ptr::P; use syntax::ptr::P;
use syntax_pos::Span; use syntax_pos::Span;
use syntax::tokenstream::TokenTree; use syntax::tokenstream::TokenTree;
@ -51,7 +50,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt,
} }
} }
} }
let res = str_to_ident(&res_str); let res = ast::Ident::from_str(&res_str);
struct Result { struct Result {
ident: ast::Ident, ident: ast::Ident,

View File

@ -15,8 +15,8 @@ use syntax::ast::{self, Expr, Generics, ItemKind, MetaItem, VariantData};
use syntax::attr; use syntax::attr;
use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ext::build::AstBuilder; use syntax::ext::build::AstBuilder;
use syntax::parse::token::{self, keywords};
use syntax::ptr::P; use syntax::ptr::P;
use syntax::symbol::{Symbol, keywords};
use syntax_pos::Span; use syntax_pos::Span;
pub fn expand_deriving_clone(cx: &mut ExtCtxt, pub fn expand_deriving_clone(cx: &mut ExtCtxt,
@ -74,7 +74,7 @@ pub fn expand_deriving_clone(cx: &mut ExtCtxt,
_ => cx.span_bug(span, "#[derive(Clone)] on trait item or impl item"), _ => cx.span_bug(span, "#[derive(Clone)] on trait item or impl item"),
} }
let inline = cx.meta_word(span, token::intern("inline")); let inline = cx.meta_word(span, Symbol::intern("inline"));
let attrs = vec![cx.attribute(span, inline)]; let attrs = vec![cx.attribute(span, inline)];
let trait_def = TraitDef { let trait_def = TraitDef {
span: span, span: span,

View File

@ -14,8 +14,8 @@ use deriving::generic::ty::*;
use syntax::ast::{self, Expr, MetaItem}; use syntax::ast::{self, Expr, MetaItem};
use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ext::build::AstBuilder; use syntax::ext::build::AstBuilder;
use syntax::parse::token;
use syntax::ptr::P; use syntax::ptr::P;
use syntax::symbol::Symbol;
use syntax_pos::Span; use syntax_pos::Span;
pub fn expand_deriving_eq(cx: &mut ExtCtxt, pub fn expand_deriving_eq(cx: &mut ExtCtxt,
@ -23,9 +23,9 @@ pub fn expand_deriving_eq(cx: &mut ExtCtxt,
mitem: &MetaItem, mitem: &MetaItem,
item: &Annotatable, item: &Annotatable,
push: &mut FnMut(Annotatable)) { push: &mut FnMut(Annotatable)) {
let inline = cx.meta_word(span, token::intern("inline")); let inline = cx.meta_word(span, Symbol::intern("inline"));
let hidden = cx.meta_list_item_word(span, token::intern("hidden")); let hidden = cx.meta_list_item_word(span, Symbol::intern("hidden"));
let doc = cx.meta_list(span, token::intern("doc"), vec![hidden]); let doc = cx.meta_list(span, Symbol::intern("doc"), vec![hidden]);
let attrs = vec![cx.attribute(span, inline), cx.attribute(span, doc)]; let attrs = vec![cx.attribute(span, inline), cx.attribute(span, doc)];
let trait_def = TraitDef { let trait_def = TraitDef {
span: span, span: span,

View File

@ -14,8 +14,8 @@ use deriving::generic::ty::*;
use syntax::ast::{self, Expr, MetaItem}; use syntax::ast::{self, Expr, MetaItem};
use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ext::build::AstBuilder; use syntax::ext::build::AstBuilder;
use syntax::parse::token;
use syntax::ptr::P; use syntax::ptr::P;
use syntax::symbol::Symbol;
use syntax_pos::Span; use syntax_pos::Span;
pub fn expand_deriving_ord(cx: &mut ExtCtxt, pub fn expand_deriving_ord(cx: &mut ExtCtxt,
@ -23,7 +23,7 @@ pub fn expand_deriving_ord(cx: &mut ExtCtxt,
mitem: &MetaItem, mitem: &MetaItem,
item: &Annotatable, item: &Annotatable,
push: &mut FnMut(Annotatable)) { push: &mut FnMut(Annotatable)) {
let inline = cx.meta_word(span, token::intern("inline")); let inline = cx.meta_word(span, Symbol::intern("inline"));
let attrs = vec![cx.attribute(span, inline)]; let attrs = vec![cx.attribute(span, inline)];
let trait_def = TraitDef { let trait_def = TraitDef {
span: span, span: span,

View File

@ -14,8 +14,8 @@ use deriving::generic::ty::*;
use syntax::ast::{BinOpKind, Expr, MetaItem}; use syntax::ast::{BinOpKind, Expr, MetaItem};
use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ext::build::AstBuilder; use syntax::ext::build::AstBuilder;
use syntax::parse::token;
use syntax::ptr::P; use syntax::ptr::P;
use syntax::symbol::Symbol;
use syntax_pos::Span; use syntax_pos::Span;
pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt, pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt,
@ -64,7 +64,7 @@ pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt,
macro_rules! md { macro_rules! md {
($name:expr, $f:ident) => { { ($name:expr, $f:ident) => { {
let inline = cx.meta_word(span, token::intern("inline")); let inline = cx.meta_word(span, Symbol::intern("inline"));
let attrs = vec![cx.attribute(span, inline)]; let attrs = vec![cx.attribute(span, inline)];
MethodDef { MethodDef {
name: $name, name: $name,

View File

@ -16,8 +16,8 @@ use deriving::generic::ty::*;
use syntax::ast::{self, BinOpKind, Expr, MetaItem}; use syntax::ast::{self, BinOpKind, Expr, MetaItem};
use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ext::build::AstBuilder; use syntax::ext::build::AstBuilder;
use syntax::parse::token;
use syntax::ptr::P; use syntax::ptr::P;
use syntax::symbol::Symbol;
use syntax_pos::Span; use syntax_pos::Span;
pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt, pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt,
@ -27,7 +27,7 @@ pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt,
push: &mut FnMut(Annotatable)) { push: &mut FnMut(Annotatable)) {
macro_rules! md { macro_rules! md {
($name:expr, $op:expr, $equal:expr) => { { ($name:expr, $op:expr, $equal:expr) => { {
let inline = cx.meta_word(span, token::intern("inline")); let inline = cx.meta_word(span, Symbol::intern("inline"));
let attrs = vec![cx.attribute(span, inline)]; let attrs = vec![cx.attribute(span, inline)];
MethodDef { MethodDef {
name: $name, name: $name,
@ -51,7 +51,7 @@ pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt,
vec![Box::new(ordering_ty)], vec![Box::new(ordering_ty)],
true)); true));
let inline = cx.meta_word(span, token::intern("inline")); let inline = cx.meta_word(span, Symbol::intern("inline"));
let attrs = vec![cx.attribute(span, inline)]; let attrs = vec![cx.attribute(span, inline)];
let partial_cmp_def = MethodDef { let partial_cmp_def = MethodDef {

View File

@ -11,11 +11,10 @@
use deriving::generic::*; use deriving::generic::*;
use deriving::generic::ty::*; use deriving::generic::ty::*;
use syntax::ast; use syntax::ast::{self, Ident};
use syntax::ast::{Expr, MetaItem}; use syntax::ast::{Expr, MetaItem};
use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ext::build::AstBuilder; use syntax::ext::build::AstBuilder;
use syntax::parse::token;
use syntax::ptr::P; use syntax::ptr::P;
use syntax_pos::{DUMMY_SP, Span}; use syntax_pos::{DUMMY_SP, Span};
@ -71,7 +70,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<E
let span = Span { expn_id: cx.backtrace(), ..span }; let span = Span { expn_id: cx.backtrace(), ..span };
let name = cx.expr_lit(span, let name = cx.expr_lit(span,
ast::LitKind::Str(ident.name.as_str(), ast::StrStyle::Cooked)); ast::LitKind::Str(ident.name.as_str(), ast::StrStyle::Cooked));
let builder = token::str_to_ident("builder"); let builder = Ident::from_str("builder");
let builder_expr = cx.expr_ident(span, builder.clone()); let builder_expr = cx.expr_ident(span, builder.clone());
let fmt = substr.nonself_args[0].clone(); let fmt = substr.nonself_args[0].clone();
@ -83,7 +82,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<E
if !is_struct { if !is_struct {
// tuple struct/"normal" variant // tuple struct/"normal" variant
let expr = let expr =
cx.expr_method_call(span, fmt, token::str_to_ident("debug_tuple"), vec![name]); cx.expr_method_call(span, fmt, Ident::from_str("debug_tuple"), vec![name]);
stmts.push(cx.stmt_let(DUMMY_SP, true, builder, expr)); stmts.push(cx.stmt_let(DUMMY_SP, true, builder, expr));
for field in fields { for field in fields {
@ -93,7 +92,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<E
let expr = cx.expr_method_call(span, let expr = cx.expr_method_call(span,
builder_expr.clone(), builder_expr.clone(),
token::str_to_ident("field"), Ident::from_str("field"),
vec![field]); vec![field]);
// Use `let _ = expr;` to avoid triggering the // Use `let _ = expr;` to avoid triggering the
@ -103,7 +102,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<E
} else { } else {
// normal struct/struct variant // normal struct/struct variant
let expr = let expr =
cx.expr_method_call(span, fmt, token::str_to_ident("debug_struct"), vec![name]); cx.expr_method_call(span, fmt, Ident::from_str("debug_struct"), vec![name]);
stmts.push(cx.stmt_let(DUMMY_SP, true, builder, expr)); stmts.push(cx.stmt_let(DUMMY_SP, true, builder, expr));
for field in fields { for field in fields {
@ -116,7 +115,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<E
let field = cx.expr_addr_of(field.span, field); let field = cx.expr_addr_of(field.span, field);
let expr = cx.expr_method_call(span, let expr = cx.expr_method_call(span,
builder_expr.clone(), builder_expr.clone(),
token::str_to_ident("field"), Ident::from_str("field"),
vec![name, field]); vec![name, field]);
stmts.push(stmt_let_undescore(cx, span, expr)); stmts.push(stmt_let_undescore(cx, span, expr));
} }
@ -126,7 +125,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<E
_ => unreachable!(), _ => unreachable!(),
}; };
let expr = cx.expr_method_call(span, builder_expr, token::str_to_ident("finish"), vec![]); let expr = cx.expr_method_call(span, builder_expr, Ident::from_str("finish"), vec![]);
stmts.push(cx.stmt_expr(expr)); stmts.push(cx.stmt_expr(expr));
let block = cx.block(span, stmts); let block = cx.block(span, stmts);

View File

@ -18,9 +18,9 @@ use syntax::ast;
use syntax::ast::{Expr, MetaItem, Mutability}; use syntax::ast::{Expr, MetaItem, Mutability};
use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ext::build::AstBuilder; use syntax::ext::build::AstBuilder;
use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::ptr::P; use syntax::ptr::P;
use syntax::symbol::InternedString;
use syntax::symbol::intern_and_get_ident;
use syntax_pos::Span; use syntax_pos::Span;
pub fn expand_deriving_rustc_decodable(cx: &mut ExtCtxt, pub fn expand_deriving_rustc_decodable(cx: &mut ExtCtxt,
@ -202,10 +202,7 @@ fn decode_static_fields<F>(cx: &mut ExtCtxt,
let fields = fields.iter() let fields = fields.iter()
.enumerate() .enumerate()
.map(|(i, &span)| { .map(|(i, &span)| {
getarg(cx, getarg(cx, span, intern_and_get_ident(&format!("_field{}", i)), i)
span,
token::intern_and_get_ident(&format!("_field{}", i)),
i)
}) })
.collect(); .collect();

View File

@ -14,8 +14,8 @@ use deriving::generic::ty::*;
use syntax::ast::{Expr, MetaItem}; use syntax::ast::{Expr, MetaItem};
use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ext::build::AstBuilder; use syntax::ext::build::AstBuilder;
use syntax::parse::token;
use syntax::ptr::P; use syntax::ptr::P;
use syntax::symbol::Symbol;
use syntax_pos::Span; use syntax_pos::Span;
pub fn expand_deriving_default(cx: &mut ExtCtxt, pub fn expand_deriving_default(cx: &mut ExtCtxt,
@ -23,7 +23,7 @@ pub fn expand_deriving_default(cx: &mut ExtCtxt,
mitem: &MetaItem, mitem: &MetaItem,
item: &Annotatable, item: &Annotatable,
push: &mut FnMut(Annotatable)) { push: &mut FnMut(Annotatable)) {
let inline = cx.meta_word(span, token::intern("inline")); let inline = cx.meta_word(span, Symbol::intern("inline"));
let attrs = vec![cx.attribute(span, inline)]; let attrs = vec![cx.attribute(span, inline)];
let trait_def = TraitDef { let trait_def = TraitDef {
span: span, span: span,

View File

@ -95,8 +95,8 @@ use deriving::generic::ty::*;
use syntax::ast::{Expr, ExprKind, MetaItem, Mutability}; use syntax::ast::{Expr, ExprKind, MetaItem, Mutability};
use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ext::build::AstBuilder; use syntax::ext::build::AstBuilder;
use syntax::parse::token;
use syntax::ptr::P; use syntax::ptr::P;
use syntax::symbol::intern_and_get_ident;
use syntax_pos::Span; use syntax_pos::Span;
pub fn expand_deriving_rustc_encodable(cx: &mut ExtCtxt, pub fn expand_deriving_rustc_encodable(cx: &mut ExtCtxt,
@ -193,7 +193,7 @@ fn encodable_substructure(cx: &mut ExtCtxt,
for (i, &FieldInfo { name, ref self_, span, .. }) in fields.iter().enumerate() { for (i, &FieldInfo { name, ref self_, span, .. }) in fields.iter().enumerate() {
let name = match name { let name = match name {
Some(id) => id.name.as_str(), Some(id) => id.name.as_str(),
None => token::intern_and_get_ident(&format!("_field{}", i)), None => intern_and_get_ident(&format!("_field{}", i)),
}; };
let self_ref = cx.expr_addr_of(span, self_.clone()); let self_ref = cx.expr_addr_of(span, self_.clone());
let enc = cx.expr_call(span, fn_path.clone(), vec![self_ref, blkencoder.clone()]); let enc = cx.expr_call(span, fn_path.clone(), vec![self_ref, blkencoder.clone()]);

View File

@ -198,8 +198,8 @@ use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ext::build::AstBuilder; use syntax::ext::build::AstBuilder;
use syntax::codemap::{self, dummy_spanned, respan}; use syntax::codemap::{self, dummy_spanned, respan};
use syntax::util::move_map::MoveMap; use syntax::util::move_map::MoveMap;
use syntax::parse::token::{self, keywords};
use syntax::ptr::P; use syntax::ptr::P;
use syntax::symbol::{Symbol, keywords};
use syntax_pos::{DUMMY_SP, Span}; use syntax_pos::{DUMMY_SP, Span};
use errors::Handler; use errors::Handler;
@ -639,13 +639,13 @@ impl<'a> TraitDef<'a> {
let attr = cx.attribute(self.span, let attr = cx.attribute(self.span,
cx.meta_word(self.span, cx.meta_word(self.span,
token::intern("automatically_derived"))); Symbol::intern("automatically_derived")));
// Just mark it now since we know that it'll end up used downstream // Just mark it now since we know that it'll end up used downstream
attr::mark_used(&attr); attr::mark_used(&attr);
let opt_trait_ref = Some(trait_ref); let opt_trait_ref = Some(trait_ref);
let unused_qual = { let unused_qual = {
let word = cx.meta_list_item_word(self.span, token::intern("unused_qualifications")); let word = cx.meta_list_item_word(self.span, Symbol::intern("unused_qualifications"));
cx.attribute(self.span, cx.meta_list(self.span, token::intern("allow"), vec![word])) cx.attribute(self.span, cx.meta_list(self.span, Symbol::intern("allow"), vec![word]))
}; };
let mut a = vec![attr, unused_qual]; let mut a = vec![attr, unused_qual];

View File

@ -16,8 +16,8 @@ use syntax::codemap;
use syntax::ext::base::{Annotatable, ExtCtxt, SyntaxExtension}; use syntax::ext::base::{Annotatable, ExtCtxt, SyntaxExtension};
use syntax::ext::build::AstBuilder; use syntax::ext::build::AstBuilder;
use syntax::feature_gate::{self, emit_feature_err}; use syntax::feature_gate::{self, emit_feature_err};
use syntax::parse::token;
use syntax::ptr::P; use syntax::ptr::P;
use syntax::symbol::Symbol;
use syntax_pos::Span; use syntax_pos::Span;
macro_rules! pathvec { macro_rules! pathvec {
@ -80,7 +80,7 @@ fn allow_unstable(cx: &mut ExtCtxt, span: Span, attr_name: &str) -> Span {
expn_id: cx.codemap().record_expansion(codemap::ExpnInfo { expn_id: cx.codemap().record_expansion(codemap::ExpnInfo {
call_site: span, call_site: span,
callee: codemap::NameAndSpan { callee: codemap::NameAndSpan {
format: codemap::MacroAttribute(token::intern(attr_name)), format: codemap::MacroAttribute(Symbol::intern(attr_name)),
span: Some(span), span: Some(span),
allow_internal_unstable: true, allow_internal_unstable: true,
}, },
@ -105,7 +105,7 @@ pub fn expand_derive(cx: &mut ExtCtxt,
} }
}; };
let derive = token::intern("derive"); let derive = Symbol::intern("derive");
let mut derive_attrs = Vec::new(); let mut derive_attrs = Vec::new();
item = item.map_attrs(|attrs| { item = item.map_attrs(|attrs| {
let partition = attrs.into_iter().partition(|attr| attr.name() == derive); let partition = attrs.into_iter().partition(|attr| attr.name() == derive);
@ -176,7 +176,7 @@ pub fn expand_derive(cx: &mut ExtCtxt,
feature_gate::EXPLAIN_CUSTOM_DERIVE); feature_gate::EXPLAIN_CUSTOM_DERIVE);
} else { } else {
cx.span_warn(titem.span, feature_gate::EXPLAIN_DEPR_CUSTOM_DERIVE); cx.span_warn(titem.span, feature_gate::EXPLAIN_DEPR_CUSTOM_DERIVE);
let name = token::intern(&format!("derive_{}", tname)); let name = Symbol::intern(&format!("derive_{}", tname));
let mitem = cx.meta_word(titem.span, name); let mitem = cx.meta_word(titem.span, name);
new_attributes.push(cx.attribute(mitem.span, mitem)); new_attributes.push(cx.attribute(mitem.span, mitem));
} }
@ -251,10 +251,10 @@ pub fn expand_derive(cx: &mut ExtCtxt,
// RFC #1445. `#[derive(PartialEq, Eq)]` adds a (trusted) // RFC #1445. `#[derive(PartialEq, Eq)]` adds a (trusted)
// `#[structural_match]` attribute. // `#[structural_match]` attribute.
let (partial_eq, eq) = (token::intern("PartialEq"), token::intern("Eq")); let (partial_eq, eq) = (Symbol::intern("PartialEq"), Symbol::intern("Eq"));
if traits.iter().any(|t| t.name() == Some(partial_eq)) && if traits.iter().any(|t| t.name() == Some(partial_eq)) &&
traits.iter().any(|t| t.name() == Some(eq)) { traits.iter().any(|t| t.name() == Some(eq)) {
let structural_match = token::intern("structural_match"); let structural_match = Symbol::intern("structural_match");
let span = allow_unstable(cx, span, "derive(PartialEq, Eq)"); let span = allow_unstable(cx, span, "derive(PartialEq, Eq)");
let meta = cx.meta_word(span, structural_match); let meta = cx.meta_word(span, structural_match);
item = item.map(|mut i| { item = item.map(|mut i| {
@ -267,10 +267,10 @@ pub fn expand_derive(cx: &mut ExtCtxt,
// the same as the copy implementation. // the same as the copy implementation.
// //
// Add a marker attribute here picked up during #[derive(Clone)] // Add a marker attribute here picked up during #[derive(Clone)]
let (copy, clone) = (token::intern("Copy"), token::intern("Clone")); let (copy, clone) = (Symbol::intern("Copy"), Symbol::intern("Clone"));
if traits.iter().any(|t| t.name() == Some(clone)) && if traits.iter().any(|t| t.name() == Some(clone)) &&
traits.iter().any(|t| t.name() == Some(copy)) { traits.iter().any(|t| t.name() == Some(copy)) {
let marker = token::intern("rustc_copy_clone_marker"); let marker = Symbol::intern("rustc_copy_clone_marker");
let span = allow_unstable(cx, span, "derive(Copy, Clone)"); let span = allow_unstable(cx, span, "derive(Copy, Clone)");
let meta = cx.meta_word(span, marker); let meta = cx.meta_word(span, marker);
item = item.map(|mut i| { item = item.map(|mut i| {
@ -282,14 +282,14 @@ pub fn expand_derive(cx: &mut ExtCtxt,
let mut items = Vec::new(); let mut items = Vec::new();
for titem in traits.iter() { for titem in traits.iter() {
let tname = titem.word().unwrap().name(); let tname = titem.word().unwrap().name();
let name = token::intern(&format!("derive({})", tname)); let name = Symbol::intern(&format!("derive({})", tname));
let mitem = cx.meta_word(titem.span, name); let mitem = cx.meta_word(titem.span, name);
let span = Span { let span = Span {
expn_id: cx.codemap().record_expansion(codemap::ExpnInfo { expn_id: cx.codemap().record_expansion(codemap::ExpnInfo {
call_site: titem.span, call_site: titem.span,
callee: codemap::NameAndSpan { callee: codemap::NameAndSpan {
format: codemap::MacroAttribute(token::intern(&format!("derive({})", tname))), format: codemap::MacroAttribute(Symbol::intern(&format!("derive({})", tname))),
span: Some(titem.span), span: Some(titem.span),
allow_internal_unstable: true, allow_internal_unstable: true,
}, },
@ -408,7 +408,7 @@ fn call_intrinsic(cx: &ExtCtxt,
span.expn_id = cx.codemap().record_expansion(codemap::ExpnInfo { span.expn_id = cx.codemap().record_expansion(codemap::ExpnInfo {
call_site: span, call_site: span,
callee: codemap::NameAndSpan { callee: codemap::NameAndSpan {
format: codemap::MacroAttribute(token::intern("derive")), format: codemap::MacroAttribute(Symbol::intern("derive")),
span: Some(span), span: Some(span),
allow_internal_unstable: true, allow_internal_unstable: true,
}, },

View File

@ -17,7 +17,7 @@ use syntax::ast;
use syntax::ext::base::*; use syntax::ext::base::*;
use syntax::ext::base; use syntax::ext::base;
use syntax::ext::build::AstBuilder; use syntax::ext::build::AstBuilder;
use syntax::parse::token; use syntax::symbol::intern_and_get_ident;
use syntax_pos::Span; use syntax_pos::Span;
use syntax::tokenstream; use syntax::tokenstream;
@ -49,7 +49,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt,
Ok(s) => { Ok(s) => {
cx.expr_call_global(sp, cx.expr_call_global(sp,
cx.std_path(&["option", "Option", "Some"]), cx.std_path(&["option", "Option", "Some"]),
vec![cx.expr_str(sp, token::intern_and_get_ident(&s[..]))]) vec![cx.expr_str(sp, intern_and_get_ident(&s[..]))])
} }
}; };
MacEager::expr(e) MacEager::expr(e)
@ -73,7 +73,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt,
Some((v, _style)) => v, Some((v, _style)) => v,
}; };
let msg = match exprs.next() { let msg = match exprs.next() {
None => token::intern_and_get_ident(&format!("environment variable `{}` not defined", var)), None => intern_and_get_ident(&format!("environment variable `{}` not defined", var)),
Some(second) => { Some(second) => {
match expr_to_string(cx, second, "expected string literal") { match expr_to_string(cx, second, "expected string literal") {
None => return DummyResult::expr(sp), None => return DummyResult::expr(sp),
@ -92,7 +92,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt,
cx.span_err(sp, &msg); cx.span_err(sp, &msg);
cx.expr_usize(sp, 0) cx.expr_usize(sp, 0)
} }
Ok(s) => cx.expr_str(sp, token::intern_and_get_ident(&s)), Ok(s) => cx.expr_str(sp, intern_and_get_ident(&s)),
}; };
MacEager::expr(e) MacEager::expr(e)
} }

View File

@ -17,8 +17,9 @@ use syntax::ast;
use syntax::ext::base::*; use syntax::ext::base::*;
use syntax::ext::base; use syntax::ext::base;
use syntax::ext::build::AstBuilder; use syntax::ext::build::AstBuilder;
use syntax::parse::token::{self, keywords}; use syntax::parse::token;
use syntax::ptr::P; use syntax::ptr::P;
use syntax::symbol::{self, keywords};
use syntax_pos::{Span, DUMMY_SP}; use syntax_pos::{Span, DUMMY_SP};
use syntax::tokenstream; use syntax::tokenstream;
@ -369,7 +370,7 @@ impl<'a, 'b> Context<'a, 'b> {
/// Translate the accumulated string literals to a literal expression /// Translate the accumulated string literals to a literal expression
fn trans_literal_string(&mut self) -> P<ast::Expr> { fn trans_literal_string(&mut self) -> P<ast::Expr> {
let sp = self.fmtsp; let sp = self.fmtsp;
let s = token::intern_and_get_ident(&self.literal); let s = symbol::intern_and_get_ident(&self.literal);
self.literal.clear(); self.literal.clear();
self.ecx.expr_str(sp, s) self.ecx.expr_str(sp, s)
} }

View File

@ -53,7 +53,7 @@ use std::rc::Rc;
use syntax::ast; use syntax::ast;
use syntax::ext::base::{MacroExpanderFn, NormalTT, IdentTT, MultiModifier, NamedSyntaxExtension}; use syntax::ext::base::{MacroExpanderFn, NormalTT, IdentTT, MultiModifier, NamedSyntaxExtension};
use syntax::ext::tt::macro_rules::MacroRulesExpander; use syntax::ext::tt::macro_rules::MacroRulesExpander;
use syntax::parse::token::intern; use syntax::symbol::Symbol;
pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver, pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver,
user_exts: Vec<NamedSyntaxExtension>, user_exts: Vec<NamedSyntaxExtension>,
@ -62,11 +62,11 @@ pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver,
resolver.add_ext(ast::Ident::with_empty_ctxt(name), Rc::new(ext)); resolver.add_ext(ast::Ident::with_empty_ctxt(name), Rc::new(ext));
}; };
register(intern("macro_rules"), IdentTT(Box::new(MacroRulesExpander), None, false)); register(Symbol::intern("macro_rules"), IdentTT(Box::new(MacroRulesExpander), None, false));
macro_rules! register { macro_rules! register {
($( $name:ident: $f:expr, )*) => { $( ($( $name:ident: $f:expr, )*) => { $(
register(intern(stringify!($name)), register(Symbol::intern(stringify!($name)),
NormalTT(Box::new($f as MacroExpanderFn), None, false)); NormalTT(Box::new($f as MacroExpanderFn), None, false));
)* } )* }
} }
@ -112,9 +112,10 @@ pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver,
} }
// format_args uses `unstable` things internally. // format_args uses `unstable` things internally.
register(intern("format_args"), NormalTT(Box::new(format::expand_format_args), None, true)); register(Symbol::intern("format_args"),
NormalTT(Box::new(format::expand_format_args), None, true));
register(intern("derive"), MultiModifier(Box::new(deriving::expand_derive))); register(Symbol::intern("derive"), MultiModifier(Box::new(deriving::expand_derive)));
for (name, ext) in user_exts { for (name, ext) in user_exts {
register(name, ext); register(name, ext);

Some files were not shown because too many files have changed in this diff Show More