From d2f8fb0a0a9dd98ea9d6a01620f1a21f425236c0 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Wed, 16 Nov 2016 08:21:52 +0000 Subject: [PATCH] Move `syntax::util::interner` -> `syntax::symbol`, cleanup. --- src/libproc_macro_plugin/qquote.rs | 52 +-- src/libproc_macro_tokens/build.rs | 7 +- src/librustc/hir/lowering.rs | 16 +- src/librustc/hir/map/def_collector.rs | 6 +- src/librustc/hir/map/definitions.rs | 4 +- src/librustc/hir/mod.rs | 2 +- src/librustc/hir/print.rs | 3 +- src/librustc/infer/error_reporting.rs | 6 +- src/librustc/middle/const_val.rs | 2 +- src/librustc/middle/cstore.rs | 2 +- src/librustc/middle/lang_items.rs | 2 +- src/librustc/middle/liveness.rs | 2 +- src/librustc/middle/resolve_lifetime.rs | 2 +- src/librustc/middle/stability.rs | 2 +- src/librustc/middle/weak_lang_items.rs | 2 +- src/librustc/session/config.rs | 34 +- src/librustc/session/mod.rs | 8 +- src/librustc/traits/project.rs | 4 +- src/librustc/ty/context.rs | 14 +- src/librustc/ty/item_path.rs | 4 +- src/librustc/ty/mod.rs | 4 +- src/librustc/ty/sty.rs | 2 +- src/librustc/util/ppaux.rs | 4 +- src/librustc_driver/driver.rs | 9 +- src/librustc_driver/pretty.rs | 8 +- src/librustc_driver/target_features.rs | 4 +- .../calculate_svh/svh_visitor.rs | 13 +- .../persist/dirty_clean.rs | 4 +- src/librustc_lint/unused.rs | 4 +- src/librustc_metadata/creader.rs | 6 +- src/librustc_metadata/cstore_impl.rs | 15 +- src/librustc_metadata/encoder.rs | 6 +- src/librustc_mir/build/mod.rs | 2 +- src/librustc_mir/hair/cx/mod.rs | 6 +- src/librustc_passes/ast_validation.rs | 3 +- src/librustc_plugin/registry.rs | 4 +- src/librustc_resolve/build_reduced_graph.rs | 2 +- src/librustc_resolve/lib.rs | 8 +- src/librustc_save_analysis/dump_visitor.rs | 3 +- src/librustc_save_analysis/lib.rs | 5 +- src/librustc_save_analysis/span_utils.rs | 3 +- src/librustc_trans/assert_module_sources.rs | 4 +- src/librustc_trans/back/symbol_names.rs | 6 +- src/librustc_trans/common.rs | 5 +- src/librustc_trans/context.rs | 2 +- src/librustc_trans/debuginfo/metadata.rs | 5 +- src/librustc_trans/intrinsic.rs | 4 +- src/librustc_trans/mir/block.rs | 6 +- src/librustc_trans/mir/mod.rs | 2 +- src/librustc_trans/partitioning.rs | 6 +- src/librustc_typeck/astconv.rs | 4 +- src/librustc_typeck/check/autoderef.rs | 8 +- src/librustc_typeck/check/callee.rs | 8 +- src/librustc_typeck/check/intrinsic.rs | 6 +- src/librustc_typeck/check/mod.rs | 6 +- src/librustc_typeck/check/op.rs | 9 +- src/librustc_typeck/collect.rs | 4 +- src/libsyntax/ast.rs | 51 +-- src/libsyntax/attr.rs | 10 +- src/libsyntax/diagnostics/plugin.rs | 3 +- src/libsyntax/ext/base.rs | 6 +- src/libsyntax/ext/build.rs | 5 +- src/libsyntax/ext/expand.rs | 7 +- src/libsyntax/ext/placeholders.rs | 4 +- src/libsyntax/ext/proc_macro_shim.rs | 3 +- src/libsyntax/ext/quote.rs | 11 +- src/libsyntax/ext/source_util.rs | 12 +- src/libsyntax/ext/tt/macro_rules.rs | 11 +- src/libsyntax/feature_gate.rs | 2 +- src/libsyntax/fold.rs | 3 +- src/libsyntax/lib.rs | 2 +- src/libsyntax/parse/lexer/mod.rs | 67 ++-- src/libsyntax/parse/mod.rs | 8 +- src/libsyntax/parse/parser.rs | 11 +- src/libsyntax/parse/token.rs | 268 +------------- src/libsyntax/print/pprust.rs | 9 +- src/libsyntax/std_inject.rs | 16 +- src/libsyntax/symbol.rs | 339 ++++++++++++++++++ src/libsyntax/test.rs | 62 ++-- src/libsyntax/tokenstream.rs | 9 +- src/libsyntax/util/interner.rs | 111 ------ src/libsyntax/util/lev_distance.rs | 2 +- src/libsyntax/util/parser.rs | 3 +- src/libsyntax_ext/asm.rs | 10 +- src/libsyntax_ext/concat.rs | 4 +- src/libsyntax_ext/concat_idents.rs | 3 +- src/libsyntax_ext/deriving/clone.rs | 4 +- src/libsyntax_ext/deriving/cmp/eq.rs | 8 +- src/libsyntax_ext/deriving/cmp/ord.rs | 4 +- src/libsyntax_ext/deriving/cmp/partial_eq.rs | 4 +- src/libsyntax_ext/deriving/cmp/partial_ord.rs | 6 +- src/libsyntax_ext/deriving/debug.rs | 15 +- src/libsyntax_ext/deriving/decodable.rs | 9 +- src/libsyntax_ext/deriving/default.rs | 4 +- src/libsyntax_ext/deriving/encodable.rs | 4 +- src/libsyntax_ext/deriving/generic/mod.rs | 8 +- src/libsyntax_ext/deriving/mod.rs | 22 +- src/libsyntax_ext/env.rs | 8 +- src/libsyntax_ext/format.rs | 5 +- src/libsyntax_ext/lib.rs | 11 +- src/libsyntax_ext/proc_macro_registrar.rs | 18 +- src/libsyntax_ext/trace_macros.rs | 2 +- 102 files changed, 752 insertions(+), 806 deletions(-) create mode 100644 src/libsyntax/symbol.rs delete mode 100644 src/libsyntax/util/interner.rs diff --git a/src/libproc_macro_plugin/qquote.rs b/src/libproc_macro_plugin/qquote.rs index e5a3abc2ea9..1ae906e0aa4 100644 --- a/src/libproc_macro_plugin/qquote.rs +++ b/src/libproc_macro_plugin/qquote.rs @@ -34,8 +34,9 @@ use syntax::codemap::Span; use syntax::ext::base::*; use syntax::ext::base; use syntax::ext::proc_macro_shim::build_block_emitter; -use syntax::parse::token::{self, Token, gensym_ident, str_to_ident}; +use syntax::parse::token::{self, Token}; use syntax::print::pprust; +use syntax::symbol::Symbol; use syntax::tokenstream::{TokenTree, TokenStream}; // ____________________________________________________________________________________________ @@ -124,7 +125,7 @@ fn qquote_iter<'cx>(cx: &'cx mut ExtCtxt, depth: i64, ts: TokenStream) -> (Bindi } // produce an error or something first let exp = vec![exp.unwrap().to_owned()]; debug!("RHS: {:?}", exp.clone()); - let new_id = gensym_ident("tmp"); + let new_id = Ident::with_empty_ctxt(Symbol::gensym("tmp")); debug!("RHS TS: {:?}", TokenStream::from_tts(exp.clone())); debug!("RHS TS TT: {:?}", TokenStream::from_tts(exp.clone()).to_vec()); bindings.push((new_id, TokenStream::from_tts(exp))); @@ -179,7 +180,7 @@ fn unravel_concats(tss: Vec) -> TokenStream { }; while let Some(ts) = pushes.pop() { - output = build_fn_call(str_to_ident("concat"), + output = build_fn_call(Ident::from_str("concat"), concat(concat(ts, from_tokens(vec![Token::Comma])), output)); @@ -209,18 +210,19 @@ fn convert_complex_tts<'cx>(cx: &'cx mut ExtCtxt, tts: Vec) -> (Bindings, T // FIXME handle sequence repetition tokens QTT::QDL(qdl) => { debug!(" QDL: {:?} ", qdl.tts); - let new_id = gensym_ident("qdl_tmp"); + let new_id = Ident::with_empty_ctxt(Symbol::gensym("qdl_tmp")); let mut cct_rec = convert_complex_tts(cx, qdl.tts); bindings.append(&mut cct_rec.0); bindings.push((new_id, cct_rec.1)); let sep = build_delim_tok(qdl.delim); - pushes.push(build_mod_call(vec![str_to_ident("proc_macro_tokens"), - str_to_ident("build"), - str_to_ident("build_delimited")], - concat(from_tokens(vec![Token::Ident(new_id)]), - concat(lex(","), sep)))); + pushes.push(build_mod_call( + vec![Ident::from_str("proc_macro_tokens"), + Ident::from_str("build"), + Ident::from_str("build_delimited")], + concat(from_tokens(vec![Token::Ident(new_id)]), concat(lex(","), sep)), + )); } QTT::QIdent(t) => { pushes.push(TokenStream::from_tts(vec![t])); @@ -250,13 +252,13 @@ fn unravel(binds: Bindings) -> TokenStream { /// Checks if the Ident is `unquote`. fn is_unquote(id: Ident) -> bool { - let qq = str_to_ident("unquote"); + let qq = Ident::from_str("unquote"); id.name == qq.name // We disregard context; unquote is _reserved_ } /// Checks if the Ident is `quote`. fn is_qquote(id: Ident) -> bool { - let qq = str_to_ident("qquote"); + let qq = Ident::from_str("qquote"); id.name == qq.name // We disregard context; qquote is _reserved_ } @@ -266,7 +268,8 @@ mod int_build { use syntax::ast::{self, Ident}; use syntax::codemap::{DUMMY_SP}; - use syntax::parse::token::{self, Token, keywords, str_to_ident}; + use syntax::parse::token::{self, Token, Lit}; + use syntax::symbol::keywords; use syntax::tokenstream::{TokenTree, TokenStream}; // ____________________________________________________________________________________________ @@ -277,19 +280,19 @@ mod int_build { build_paren_delimited(build_vec(build_token_tt(t)))) } - pub fn emit_lit(l: token::Lit, n: Option) -> TokenStream { + pub fn emit_lit(l: Lit, n: Option) -> TokenStream { let suf = match n { - Some(n) => format!("Some(ast::Name({}))", n.0), + Some(n) => format!("Some(ast::Name({}))", n.as_u32()), None => "None".to_string(), }; let lit = match l { - token::Lit::Byte(n) => format!("Lit::Byte(token::intern(\"{}\"))", n.to_string()), - token::Lit::Char(n) => format!("Lit::Char(token::intern(\"{}\"))", n.to_string()), - token::Lit::Integer(n) => format!("Lit::Integer(token::intern(\"{}\"))", n.to_string()), - token::Lit::Float(n) => format!("Lit::Float(token::intern(\"{}\"))", n.to_string()), - token::Lit::Str_(n) => format!("Lit::Str_(token::intern(\"{}\"))", n.to_string()), - token::Lit::ByteStr(n) => format!("Lit::ByteStr(token::intern(\"{}\"))", n.to_string()), + Lit::Byte(n) => format!("Lit::Byte(Symbol::intern(\"{}\"))", n.to_string()), + Lit::Char(n) => format!("Lit::Char(Symbol::intern(\"{}\"))", n.to_string()), + Lit::Float(n) => format!("Lit::Float(Symbol::intern(\"{}\"))", n.to_string()), + Lit::Str_(n) => format!("Lit::Str_(Symbol::intern(\"{}\"))", n.to_string()), + Lit::Integer(n) => format!("Lit::Integer(Symbol::intern(\"{}\"))", n.to_string()), + Lit::ByteStr(n) => format!("Lit::ByteStr(Symbol::intern(\"{}\"))", n.to_string()), _ => panic!("Unsupported literal"), }; @@ -388,9 +391,10 @@ mod int_build { Token::Underscore => lex("_"), Token::Literal(lit, sfx) => emit_lit(lit, sfx), // fix ident expansion information... somehow - Token::Ident(ident) => lex(&format!("Token::Ident(str_to_ident(\"{}\"))", ident.name)), - Token::Lifetime(ident) => lex(&format!("Token::Ident(str_to_ident(\"{}\"))", - ident.name)), + Token::Ident(ident) => + lex(&format!("Token::Ident(Ident::from_str(\"{}\"))", ident.name)), + Token::Lifetime(ident) => + lex(&format!("Token::Ident(Ident::from_str(\"{}\"))", ident.name)), _ => panic!("Unhandled case!"), } } @@ -408,7 +412,7 @@ mod int_build { /// Takes `input` and returns `vec![input]`. pub fn build_vec(ts: TokenStream) -> TokenStream { - build_mac_call(str_to_ident("vec"), ts) + build_mac_call(Ident::from_str("vec"), ts) // tts.clone().to_owned() } diff --git a/src/libproc_macro_tokens/build.rs b/src/libproc_macro_tokens/build.rs index 7b7590b863b..d39aba0aa77 100644 --- a/src/libproc_macro_tokens/build.rs +++ b/src/libproc_macro_tokens/build.rs @@ -13,7 +13,8 @@ extern crate syntax_pos; use syntax::ast::Ident; use syntax::codemap::DUMMY_SP; -use syntax::parse::token::{self, Token, keywords, str_to_ident}; +use syntax::parse::token::{self, Token}; +use syntax::symbol::keywords; use syntax::tokenstream::{self, TokenTree, TokenStream}; use std::rc::Rc; @@ -43,13 +44,13 @@ pub fn ident_eq(tident: &TokenTree, id: Ident) -> bool { /// Convert a `&str` into a Token. pub fn str_to_token_ident(s: &str) -> Token { - Token::Ident(str_to_ident(s)) + Token::Ident(Ident::from_str(s)) } /// Converts a keyword (from `syntax::parse::token::keywords`) into a Token that /// corresponds to it. pub fn keyword_to_token_ident(kw: keywords::Keyword) -> Token { - Token::Ident(str_to_ident(&kw.name().as_str()[..])) + Token::Ident(Ident::from_str(&kw.name().as_str()[..])) } // ____________________________________________________________________________________________ diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 05c4ae52180..9547e09afe0 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -53,8 +53,8 @@ use syntax::ast::*; use syntax::errors; use syntax::ptr::P; use syntax::codemap::{respan, Spanned}; -use syntax::parse::token; use syntax::std_inject; +use syntax::symbol::{Symbol, keywords}; use syntax::visit::{self, Visitor}; use syntax_pos::Span; @@ -149,7 +149,7 @@ impl<'a> LoweringContext<'a> { } fn str_to_ident(&self, s: &'static str) -> Name { - token::gensym(s) + Symbol::gensym(s) } fn with_parent_def(&mut self, parent_id: NodeId, f: F) -> T @@ -400,8 +400,8 @@ impl<'a> LoweringContext<'a> { // Don't expose `Self` (recovered "keyword used as ident" parse error). // `rustc::ty` expects `Self` to be only used for a trait's `Self`. // Instead, use gensym("Self") to create a distinct name that looks the same. - if name == token::keywords::SelfType.name() { - name = token::gensym("Self"); + if name == keywords::SelfType.name() { + name = Symbol::gensym("Self"); } hir::TyParam { @@ -540,7 +540,7 @@ impl<'a> LoweringContext<'a> { hir::StructField { span: f.span, id: f.id, - name: f.ident.map(|ident| ident.name).unwrap_or(token::intern(&index.to_string())), + name: f.ident.map(|ident| ident.name).unwrap_or(Symbol::intern(&index.to_string())), vis: self.lower_visibility(&f.vis), ty: self.lower_ty(&f.ty), attrs: self.lower_attrs(&f.attrs), @@ -1189,7 +1189,7 @@ impl<'a> LoweringContext<'a> { e.span, hir::PopUnstableBlock, ThinVec::new()); - this.field(token::intern(s), signal_block, ast_expr.span) + this.field(Symbol::intern(s), signal_block, ast_expr.span) }).collect(); let attrs = ast_expr.attrs.clone(); @@ -1953,9 +1953,9 @@ impl<'a> LoweringContext<'a> { fn std_path_components(&mut self, components: &[&str]) -> Vec { let mut v = Vec::new(); if let Some(s) = self.crate_root { - v.push(token::intern(s)); + v.push(Symbol::intern(s)); } - v.extend(components.iter().map(|s| token::intern(s))); + v.extend(components.iter().map(|s| Symbol::intern(s))); return v; } diff --git a/src/librustc/hir/map/def_collector.rs b/src/librustc/hir/map/def_collector.rs index 421843a7f11..0d4c0c9689f 100644 --- a/src/librustc/hir/map/def_collector.rs +++ b/src/librustc/hir/map/def_collector.rs @@ -19,7 +19,7 @@ use middle::cstore::InlinedItem; use syntax::ast::*; use syntax::ext::hygiene::Mark; use syntax::visit; -use syntax::parse::token::{self, keywords}; +use syntax::symbol::{Symbol, keywords}; /// Creates def ids for nodes in the HIR. pub struct DefCollector<'a> { @@ -169,7 +169,7 @@ impl<'a> visit::Visitor for DefCollector<'a> { this.with_parent(variant_def_index, |this| { for (index, field) in v.node.data.fields().iter().enumerate() { let name = field.ident.map(|ident| ident.name) - .unwrap_or_else(|| token::intern(&index.to_string())); + .unwrap_or_else(|| Symbol::intern(&index.to_string())); this.create_def(field.id, DefPathData::Field(name.as_str())); } @@ -188,7 +188,7 @@ impl<'a> visit::Visitor for DefCollector<'a> { for (index, field) in struct_def.fields().iter().enumerate() { let name = field.ident.map(|ident| ident.name.as_str()) - .unwrap_or(token::intern(&index.to_string()).as_str()); + .unwrap_or(Symbol::intern(&index.to_string()).as_str()); this.create_def(field.id, DefPathData::Field(name)); } } diff --git a/src/librustc/hir/map/definitions.rs b/src/librustc/hir/map/definitions.rs index 38157c7e565..eafcb8b6f43 100644 --- a/src/librustc/hir/map/definitions.rs +++ b/src/librustc/hir/map/definitions.rs @@ -14,7 +14,7 @@ use std::fmt::Write; use std::hash::{Hash, Hasher}; use std::collections::hash_map::DefaultHasher; use syntax::ast; -use syntax::parse::token::{self, InternedString}; +use syntax::symbol::{Symbol, InternedString}; use ty::TyCtxt; use util::nodemap::NodeMap; @@ -328,7 +328,7 @@ impl DefPathData { LifetimeDef(ref name) | EnumVariant(ref name) | Binding(ref name) | - Field(ref name) => Some(token::intern(name)), + Field(ref name) => Some(Symbol::intern(name)), Impl | CrateRoot | diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index 9f5ff6914b0..9d8f6bb5c95 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -40,8 +40,8 @@ use syntax::codemap::{self, respan, Spanned}; use syntax::abi::Abi; use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, AsmDialect}; use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem}; -use syntax::parse::token::{keywords, InternedString}; use syntax::ptr::P; +use syntax::symbol::{keywords, InternedString}; use syntax::tokenstream::TokenTree; use syntax::util::ThinVec; diff --git a/src/librustc/hir/print.rs b/src/librustc/hir/print.rs index 807bbec3b58..ccb43644b31 100644 --- a/src/librustc/hir/print.rs +++ b/src/librustc/hir/print.rs @@ -13,13 +13,14 @@ pub use self::AnnNode::*; use syntax::abi::Abi; use syntax::ast; use syntax::codemap::{CodeMap, Spanned}; -use syntax::parse::token::{self, keywords, BinOpToken}; +use syntax::parse::token::{self, BinOpToken}; use syntax::parse::lexer::comments; use syntax::print::pp::{self, break_offset, word, space, hardbreak}; use syntax::print::pp::{Breaks, eof}; use syntax::print::pp::Breaks::{Consistent, Inconsistent}; use syntax::print::pprust::{self as ast_pp, PrintState}; use syntax::ptr::P; +use syntax::symbol::keywords; use syntax_pos::{self, BytePos}; use errors; diff --git a/src/librustc/infer/error_reporting.rs b/src/librustc/infer/error_reporting.rs index 58caac4034e..8db09d0b73d 100644 --- a/src/librustc/infer/error_reporting.rs +++ b/src/librustc/infer/error_reporting.rs @@ -91,8 +91,8 @@ use std::cell::{Cell, RefCell}; use std::char::from_u32; use std::fmt; use syntax::ast; -use syntax::parse::token; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax_pos::{self, Pos, Span}; use errors::DiagnosticBuilder; @@ -1219,7 +1219,7 @@ impl<'a, 'gcx, 'tcx> Rebuilder<'a, 'gcx, 'tcx> { names.push(lt_name); } names.sort(); - let name = token::intern(&names[0]); + let name = Symbol::intern(&names[0]); return (name_to_dummy_lifetime(name), Kept); } return (self.life_giver.give_lifetime(), Fresh); @@ -1931,7 +1931,7 @@ impl LifeGiver { let mut s = String::from("'"); s.push_str(&num_to_string(self.counter.get())); if !self.taken.contains(&s) { - lifetime = name_to_dummy_lifetime(token::intern(&s[..])); + lifetime = name_to_dummy_lifetime(Symbol::intern(&s)); self.generated.borrow_mut().push(lifetime); break; } diff --git a/src/librustc/middle/const_val.rs b/src/librustc/middle/const_val.rs index 3482971cd19..9677082a43a 100644 --- a/src/librustc/middle/const_val.rs +++ b/src/librustc/middle/const_val.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use syntax::parse::token::InternedString; +use syntax::symbol::InternedString; use syntax::ast; use std::rc::Rc; use hir::def_id::DefId; diff --git a/src/librustc/middle/cstore.rs b/src/librustc/middle/cstore.rs index 2ca2f69cf22..fea0a6edffe 100644 --- a/src/librustc/middle/cstore.rs +++ b/src/librustc/middle/cstore.rs @@ -39,7 +39,7 @@ use syntax::ast; use syntax::attr; use syntax::ext::base::SyntaxExtension; use syntax::ptr::P; -use syntax::parse::token::InternedString; +use syntax::symbol::InternedString; use syntax_pos::Span; use rustc_back::target::Target; use hir; diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index 9b4b1396669..1b61163574e 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -30,7 +30,7 @@ use middle::weak_lang_items; use util::nodemap::FxHashMap; use syntax::ast; -use syntax::parse::token::InternedString; +use syntax::symbol::InternedString; use hir::itemlikevisit::ItemLikeVisitor; use hir; diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index d381188d56b..cf53fcf2dac 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -123,8 +123,8 @@ use std::io::prelude::*; use std::io; use std::rc::Rc; use syntax::ast::{self, NodeId}; -use syntax::parse::token::keywords; use syntax::ptr::P; +use syntax::symbol::keywords; use syntax_pos::Span; use hir::Expr; diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index f682dfbf1be..a0043d0a886 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -27,7 +27,7 @@ use middle::region; use ty; use std::mem::replace; use syntax::ast; -use syntax::parse::token::keywords; +use syntax::symbol::keywords; use syntax_pos::Span; use util::nodemap::NodeMap; diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index 7e4efc7ddca..4b8433e3bf6 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -21,7 +21,7 @@ use hir::def::Def; use hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, DefIndex, LOCAL_CRATE}; use ty::{self, TyCtxt, AdtKind}; use middle::privacy::AccessLevels; -use syntax::parse::token::InternedString; +use syntax::symbol::InternedString; use syntax_pos::{Span, DUMMY_SP}; use syntax::ast; use syntax::ast::{NodeId, Attribute}; diff --git a/src/librustc/middle/weak_lang_items.rs b/src/librustc/middle/weak_lang_items.rs index 12d32bf31b1..bee049df985 100644 --- a/src/librustc/middle/weak_lang_items.rs +++ b/src/librustc/middle/weak_lang_items.rs @@ -16,7 +16,7 @@ use middle::lang_items; use rustc_back::PanicStrategy; use syntax::ast; -use syntax::parse::token::InternedString; +use syntax::symbol::InternedString; use syntax_pos::Span; use hir::intravisit::Visitor; use hir::intravisit; diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index d22d5d915f6..9fd80acea89 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -25,8 +25,8 @@ use lint; use middle::cstore; use syntax::ast::{self, IntTy, UintTy}; -use syntax::parse::{self, token}; -use syntax::parse::token::InternedString; +use syntax::parse; +use syntax::symbol::{Symbol, InternedString}; use syntax::feature_gate::UnstableFeatures; use errors::{ColorConfig, FatalError, Handler}; @@ -927,7 +927,7 @@ pub fn default_lib_output() -> CrateType { } pub fn default_configuration(sess: &Session) -> ast::CrateConfig { - use syntax::parse::token::intern_and_get_ident as intern; + use syntax::symbol::intern_and_get_ident as intern; let end = &sess.target.target.target_endian; let arch = &sess.target.target.arch; @@ -947,33 +947,33 @@ pub fn default_configuration(sess: &Session) -> ast::CrateConfig { let mut ret = HashSet::new(); // Target bindings. - ret.insert((token::intern("target_os"), Some(intern(os)))); - ret.insert((token::intern("target_family"), Some(fam.clone()))); - ret.insert((token::intern("target_arch"), Some(intern(arch)))); - ret.insert((token::intern("target_endian"), Some(intern(end)))); - ret.insert((token::intern("target_pointer_width"), Some(intern(wordsz)))); - ret.insert((token::intern("target_env"), Some(intern(env)))); - ret.insert((token::intern("target_vendor"), Some(intern(vendor)))); + ret.insert((Symbol::intern("target_os"), Some(intern(os)))); + ret.insert((Symbol::intern("target_family"), Some(fam.clone()))); + ret.insert((Symbol::intern("target_arch"), Some(intern(arch)))); + ret.insert((Symbol::intern("target_endian"), Some(intern(end)))); + ret.insert((Symbol::intern("target_pointer_width"), Some(intern(wordsz)))); + ret.insert((Symbol::intern("target_env"), Some(intern(env)))); + ret.insert((Symbol::intern("target_vendor"), Some(intern(vendor)))); if &fam == "windows" || &fam == "unix" { - ret.insert((token::intern(&fam), None)); + ret.insert((Symbol::intern(&fam), None)); } if sess.target.target.options.has_elf_tls { - ret.insert((token::intern("target_thread_local"), None)); + ret.insert((Symbol::intern("target_thread_local"), None)); } for &i in &[8, 16, 32, 64, 128] { if i <= max_atomic_width { let s = i.to_string(); - ret.insert((token::intern("target_has_atomic"), Some(intern(&s)))); + ret.insert((Symbol::intern("target_has_atomic"), Some(intern(&s)))); if &s == wordsz { - ret.insert((token::intern("target_has_atomic"), Some(intern("ptr")))); + ret.insert((Symbol::intern("target_has_atomic"), Some(intern("ptr")))); } } } if sess.opts.debug_assertions { - ret.insert((token::intern("debug_assertions"), None)); + ret.insert((Symbol::intern("debug_assertions"), None)); } if sess.opts.crate_types.contains(&CrateTypeProcMacro) { - ret.insert((token::intern("proc_macro"), None)); + ret.insert((Symbol::intern("proc_macro"), None)); } return ret; } @@ -986,7 +986,7 @@ pub fn build_configuration(sess: &Session, let default_cfg = default_configuration(sess); // If the user wants a test runner, then add the test cfg if sess.opts.test { - user_cfg.insert((token::intern("test"), None)); + user_cfg.insert((Symbol::intern("test"), None)); } user_cfg.extend(default_cfg.iter().cloned()); user_cfg diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index 9becbd99eb3..a9a4553f176 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -28,7 +28,7 @@ use syntax::json::JsonEmitter; use syntax::feature_gate; use syntax::parse; use syntax::parse::ParseSess; -use syntax::parse::token; +use syntax::symbol::{Symbol, InternedString}; use syntax::{ast, codemap}; use syntax::feature_gate::AttributeType; use syntax_pos::{Span, MultiSpan}; @@ -89,7 +89,7 @@ pub struct Session { // forms a unique global identifier for the crate. It is used to allow // multiple crates with the same name to coexist. See the // trans::back::symbol_names module for more information. - pub crate_disambiguator: RefCell, + pub crate_disambiguator: RefCell, pub features: RefCell, /// The maximum recursion limit for potentially infinitely recursive @@ -129,7 +129,7 @@ pub struct PerfStats { } impl Session { - pub fn local_crate_disambiguator(&self) -> token::InternedString { + pub fn local_crate_disambiguator(&self) -> InternedString { self.crate_disambiguator.borrow().clone() } pub fn struct_span_warn<'a, S: Into>(&'a self, @@ -610,7 +610,7 @@ pub fn build_session_(sopts: config::Options, plugin_attributes: RefCell::new(Vec::new()), crate_types: RefCell::new(Vec::new()), dependency_formats: RefCell::new(FxHashMap()), - crate_disambiguator: RefCell::new(token::intern("").as_str()), + crate_disambiguator: RefCell::new(Symbol::intern("").as_str()), features: RefCell::new(feature_gate::Features::new()), recursion_limit: Cell::new(64), next_node_id: Cell::new(NodeId::new(1)), diff --git a/src/librustc/traits/project.rs b/src/librustc/traits/project.rs index a2d45fa2714..76bead99343 100644 --- a/src/librustc/traits/project.rs +++ b/src/librustc/traits/project.rs @@ -26,8 +26,8 @@ use super::util; use hir::def_id::DefId; use infer::InferOk; use rustc_data_structures::snapshot_map::{Snapshot, SnapshotMap}; -use syntax::parse::token; use syntax::ast; +use syntax::symbol::Symbol; use ty::subst::Subst; use ty::{self, ToPredicate, ToPolyTraitRef, Ty, TyCtxt}; use ty::fold::{TypeFoldable, TypeFolder}; @@ -1245,7 +1245,7 @@ fn confirm_callable_candidate<'cx, 'gcx, 'tcx>( let predicate = ty::Binder(ty::ProjectionPredicate { // (1) recreate binder here projection_ty: ty::ProjectionTy { trait_ref: trait_ref, - item_name: token::intern(FN_OUTPUT_NAME), + item_name: Symbol::intern(FN_OUTPUT_NAME), }, ty: ret_type }); diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 45450456e8a..d187fed732c 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -49,7 +49,7 @@ use std::rc::Rc; use std::iter; use syntax::ast::{self, Name, NodeId}; use syntax::attr; -use syntax::parse::token::{self, keywords}; +use syntax::symbol::{InternedString, intern_and_get_ident, keywords}; use hir; @@ -561,7 +561,7 @@ pub struct GlobalCtxt<'tcx> { /// The definite name of the current crate after taking into account /// attributes, commandline parameters, etc. - pub crate_name: token::InternedString, + pub crate_name: InternedString, /// Data layout specification for the current target. pub data_layout: TargetDataLayout, @@ -574,7 +574,7 @@ pub struct GlobalCtxt<'tcx> { /// Map from function to the `#[derive]` mode that it's defining. Only used /// by `proc-macro` crates. - pub derive_macros: RefCell>, + pub derive_macros: RefCell>, } impl<'tcx> GlobalCtxt<'tcx> { @@ -588,7 +588,7 @@ impl<'tcx> GlobalCtxt<'tcx> { } impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { - pub fn crate_name(self, cnum: CrateNum) -> token::InternedString { + pub fn crate_name(self, cnum: CrateNum) -> InternedString { if cnum == LOCAL_CRATE { self.crate_name.clone() } else { @@ -596,7 +596,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - pub fn original_crate_name(self, cnum: CrateNum) -> token::InternedString { + pub fn original_crate_name(self, cnum: CrateNum) -> InternedString { if cnum == LOCAL_CRATE { self.crate_name.clone() } else { @@ -604,7 +604,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - pub fn crate_disambiguator(self, cnum: CrateNum) -> token::InternedString { + pub fn crate_disambiguator(self, cnum: CrateNum) -> InternedString { if cnum == LOCAL_CRATE { self.sess.local_crate_disambiguator() } else { @@ -835,7 +835,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { custom_coerce_unsized_kinds: RefCell::new(DefIdMap()), cast_kinds: RefCell::new(NodeMap()), fragment_infos: RefCell::new(DefIdMap()), - crate_name: token::intern_and_get_ident(crate_name), + crate_name: intern_and_get_ident(crate_name), data_layout: data_layout, layout_cache: RefCell::new(FxHashMap()), layout_depth: Cell::new(0), diff --git a/src/librustc/ty/item_path.rs b/src/librustc/ty/item_path.rs index 8222583d9a7..92d5b734f17 100644 --- a/src/librustc/ty/item_path.rs +++ b/src/librustc/ty/item_path.rs @@ -12,7 +12,7 @@ use hir::map::DefPathData; use hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE}; use ty::{self, Ty, TyCtxt}; use syntax::ast; -use syntax::parse::token; +use syntax::symbol::Symbol; use std::cell::Cell; @@ -136,7 +136,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { cur_path.push(self.sess.cstore.def_key(cur_def) .disambiguated_data.data.get_opt_name().unwrap_or_else(|| - token::intern(""))); + Symbol::intern(""))); match visible_parent_map.get(&cur_def) { Some(&def) => cur_def = def, None => return false, diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index f5c23401a4e..f7f34c0af3a 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -44,7 +44,7 @@ use std::vec::IntoIter; use std::mem; use syntax::ast::{self, Name, NodeId}; use syntax::attr; -use syntax::parse::token::{self, InternedString}; +use syntax::symbol::{Symbol, InternedString}; use syntax_pos::{DUMMY_SP, Span}; use rustc_const_math::ConstInt; @@ -2344,7 +2344,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { if let Some(id) = self.map.as_local_node_id(id) { self.map.name(id) } else if id.index == CRATE_DEF_INDEX { - token::intern(&self.sess.cstore.original_crate_name(id.krate)) + Symbol::intern(&self.sess.cstore.original_crate_name(id.krate)) } else { let def_key = self.sess.cstore.def_key(id); // The name of a StructCtor is that of its struct parent. diff --git a/src/librustc/ty/sty.rs b/src/librustc/ty/sty.rs index 56466d59682..81896ecfb53 100644 --- a/src/librustc/ty/sty.rs +++ b/src/librustc/ty/sty.rs @@ -23,7 +23,7 @@ use std::fmt; use std::ops; use syntax::abi; use syntax::ast::{self, Name}; -use syntax::parse::token::{keywords, InternedString}; +use syntax::symbol::{keywords, InternedString}; use serialize; diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index a63c7ba6a25..d04825d5604 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -25,8 +25,8 @@ use std::fmt; use std::usize; use syntax::abi::Abi; -use syntax::parse::token; use syntax::ast::CRATE_NODE_ID; +use syntax::symbol::Symbol; use hir; pub fn verbose() -> bool { @@ -284,7 +284,7 @@ fn in_binder<'a, 'gcx, 'tcx, T, U>(f: &mut fmt::Formatter, ty::BrAnon(_) | ty::BrFresh(_) | ty::BrEnv => { - let name = token::intern("'r"); + let name = Symbol::intern("'r"); let _ = write!(f, "{}", name); ty::BrNamed(tcx.map.local_def_id(CRATE_NODE_ID), name, diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 487bcf92f54..f864f083402 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -53,7 +53,8 @@ use std::path::{Path, PathBuf}; use syntax::{ast, diagnostics, visit}; use syntax::attr; use syntax::ext::base::ExtCtxt; -use syntax::parse::{self, PResult, token}; +use syntax::parse::{self, PResult}; +use syntax::symbol::{self, Symbol}; use syntax::util::node_count::NodeCounter; use syntax; use syntax_ext; @@ -561,7 +562,7 @@ pub fn phase_2_configure_and_expand<'a, F>(sess: &Session, *sess.crate_types.borrow_mut() = collect_crate_types(sess, &krate.attrs); *sess.crate_disambiguator.borrow_mut() = - token::intern(&compute_crate_disambiguator(sess)).as_str(); + Symbol::intern(&compute_crate_disambiguator(sess)).as_str(); time(time_passes, "recursion limit", || { middle::recursion_limit::update_recursion_limit(sess, &krate); @@ -1360,6 +1361,6 @@ pub fn build_output_filenames(input: &Input, pub fn reset_thread_local_state() { // These may be left in an incoherent state after a previous compile. syntax::ext::hygiene::reset_hygiene_data(); - // `clear_ident_interner` can be used to free memory, but it does not restore the initial state. - token::reset_ident_interner(); + // `clear_interner` can be used to free memory, but it does not restore the initial state. + symbol::reset_interner(); } diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index ecbf28c1082..6c99c9d71b8 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -450,15 +450,15 @@ impl<'ast> PrinterSupport<'ast> for HygieneAnnotation<'ast> { impl<'ast> pprust::PpAnn for HygieneAnnotation<'ast> { fn post(&self, s: &mut pprust::State, node: pprust::AnnNode) -> io::Result<()> { match node { - pprust::NodeIdent(&ast::Ident { name: ast::Name(nm), ctxt }) => { + pprust::NodeIdent(&ast::Ident { name, ctxt }) => { pp::space(&mut s.s)?; // FIXME #16420: this doesn't display the connections // between syntax contexts - s.synth_comment(format!("{}{:?}", nm, ctxt)) + s.synth_comment(format!("{}{:?}", name.as_u32(), ctxt)) } - pprust::NodeName(&ast::Name(nm)) => { + pprust::NodeName(&name) => { pp::space(&mut s.s)?; - s.synth_comment(nm.to_string()) + s.synth_comment(name.as_u32().to_string()) } _ => Ok(()), } diff --git a/src/librustc_driver/target_features.rs b/src/librustc_driver/target_features.rs index 34073dfd24d..677b9bca976 100644 --- a/src/librustc_driver/target_features.rs +++ b/src/librustc_driver/target_features.rs @@ -13,7 +13,7 @@ use llvm::LLVMRustHasFeature; use rustc::session::Session; use rustc_trans::back::write::create_target_machine; use syntax::feature_gate::UnstableFeatures; -use syntax::parse::token::{self, intern_and_get_ident as intern}; +use syntax::symbol::{Symbol, intern_and_get_ident as intern}; use libc::c_char; // WARNING: the features must be known to LLVM or the feature @@ -40,7 +40,7 @@ pub fn add_configuration(cfg: &mut ast::CrateConfig, sess: &Session) { _ => &[], }; - let tf = token::intern("target_feature"); + let tf = Symbol::intern("target_feature"); for feat in whitelist { assert_eq!(feat.chars().last(), Some('\0')); if unsafe { LLVMRustHasFeature(target_machine, feat.as_ptr() as *const c_char) } { diff --git a/src/librustc_incremental/calculate_svh/svh_visitor.rs b/src/librustc_incremental/calculate_svh/svh_visitor.rs index a1a7c05f774..91646aa7f5d 100644 --- a/src/librustc_incremental/calculate_svh/svh_visitor.rs +++ b/src/librustc_incremental/calculate_svh/svh_visitor.rs @@ -18,6 +18,7 @@ use syntax::abi::Abi; use syntax::ast::{self, Name, NodeId}; use syntax::attr; use syntax::parse::token; +use syntax::symbol::InternedString; use syntax_pos::{Span, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos}; use syntax::tokenstream; use rustc::hir; @@ -169,8 +170,8 @@ enum SawAbiComponent<'a> { // FIXME (#14132): should we include (some function of) // ident.ctxt as well? - SawIdent(token::InternedString), - SawStructDef(token::InternedString), + SawIdent(InternedString), + SawStructDef(InternedString), SawLifetime, SawLifetimeDef(usize), @@ -232,11 +233,11 @@ enum SawAbiComponent<'a> { #[derive(Hash)] enum SawExprComponent<'a> { - SawExprLoop(Option), - SawExprField(token::InternedString), + SawExprLoop(Option), + SawExprField(InternedString), SawExprTupField(usize), - SawExprBreak(Option), - SawExprAgain(Option), + SawExprBreak(Option), + SawExprAgain(Option), SawExprBox, SawExprArray, diff --git a/src/librustc_incremental/persist/dirty_clean.rs b/src/librustc_incremental/persist/dirty_clean.rs index c22416e1120..4d759b66617 100644 --- a/src/librustc_incremental/persist/dirty_clean.rs +++ b/src/librustc_incremental/persist/dirty_clean.rs @@ -48,7 +48,7 @@ use rustc::hir::def_id::DefId; use rustc::hir::itemlikevisit::ItemLikeVisitor; use syntax::ast::{self, Attribute, NestedMetaItem}; use rustc_data_structures::fx::{FxHashSet, FxHashMap}; -use syntax::parse::token; +use syntax::symbol::Symbol; use syntax_pos::Span; use rustc::ty::TyCtxt; use ich::Fingerprint; @@ -286,7 +286,7 @@ fn check_config(tcx: TyCtxt, attr: &ast::Attribute) -> bool { fn expect_associated_value(tcx: TyCtxt, item: &NestedMetaItem) -> ast::Name { if let Some(value) = item.value_str() { - token::intern(&value) + Symbol::intern(&value) } else { let msg = if let Some(name) = item.name() { format!("associated value expected for `{}`", name) diff --git a/src/librustc_lint/unused.rs b/src/librustc_lint/unused.rs index 5ff131b0684..535c8094bed 100644 --- a/src/librustc_lint/unused.rs +++ b/src/librustc_lint/unused.rs @@ -20,7 +20,7 @@ use std::collections::hash_map::Entry::{Occupied, Vacant}; use syntax::ast; use syntax::attr; use syntax::feature_gate::{BUILTIN_ATTRIBUTES, AttributeType}; -use syntax::parse::token::keywords; +use syntax::symbol::keywords; use syntax::ptr::P; use syntax_pos::Span; @@ -48,7 +48,7 @@ impl UnusedMut { let name = path1.node; if let hir::BindByValue(hir::MutMutable) = mode { if !name.as_str().starts_with("_") { - match mutables.entry(name.0 as usize) { + match mutables.entry(name) { Vacant(entry) => { entry.insert(vec![id]); } diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index 525b8f8227b..3b162f31a94 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -37,7 +37,7 @@ use syntax::abi::Abi; use syntax::attr; use syntax::ext::base::SyntaxExtension; use syntax::feature_gate::{self, GateIssue}; -use syntax::parse::token::{self, InternedString}; +use syntax::symbol::{Symbol, InternedString}; use syntax_pos::{Span, DUMMY_SP}; use log; @@ -582,11 +582,11 @@ impl<'a> CrateLoader<'a> { trait_name: &str, expand: fn(TokenStream) -> TokenStream, attributes: &[&'static str]) { - let attrs = attributes.iter().cloned().map(token::intern).collect(); + let attrs = attributes.iter().cloned().map(Symbol::intern).collect(); let derive = SyntaxExtension::CustomDerive( Box::new(CustomDerive::new(expand, attrs)) ); - self.0.push((token::intern(trait_name), Rc::new(derive))); + self.0.push((Symbol::intern(trait_name), Rc::new(derive))); } } diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index 2018d829597..7ab7a32bbc4 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -31,7 +31,8 @@ use rustc_back::PanicStrategy; use std::path::PathBuf; use syntax::ast; use syntax::attr; -use syntax::parse::{token, new_parser_from_source_str}; +use syntax::parse::new_parser_from_source_str; +use syntax::symbol::{InternedString, intern_and_get_ident}; use syntax_pos::mk_sp; use rustc::hir::svh::Svh; use rustc_back::target::Target; @@ -262,14 +263,14 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { self.get_crate_data(cnum).panic_strategy() } - fn crate_name(&self, cnum: CrateNum) -> token::InternedString + fn crate_name(&self, cnum: CrateNum) -> InternedString { - token::intern_and_get_ident(&self.get_crate_data(cnum).name[..]) + intern_and_get_ident(&self.get_crate_data(cnum).name[..]) } - fn original_crate_name(&self, cnum: CrateNum) -> token::InternedString + fn original_crate_name(&self, cnum: CrateNum) -> InternedString { - token::intern_and_get_ident(&self.get_crate_data(cnum).name()) + intern_and_get_ident(&self.get_crate_data(cnum).name()) } fn extern_crate(&self, cnum: CrateNum) -> Option @@ -282,9 +283,9 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { self.get_crate_hash(cnum) } - fn crate_disambiguator(&self, cnum: CrateNum) -> token::InternedString + fn crate_disambiguator(&self, cnum: CrateNum) -> InternedString { - token::intern_and_get_ident(&self.get_crate_data(cnum).disambiguator()) + intern_and_get_ident(&self.get_crate_data(cnum).disambiguator()) } fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index 3ab542442a1..c11694e561a 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -34,7 +34,7 @@ use std::rc::Rc; use std::u32; use syntax::ast::{self, CRATE_NODE_ID}; use syntax::attr; -use syntax; +use syntax::symbol::Symbol; use syntax_pos; use rustc::hir::{self, PatKind}; @@ -600,7 +600,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { if let PatKind::Binding(_, ref path1, _) = arg.pat.node { path1.node } else { - syntax::parse::token::intern("") + Symbol::intern("") } })) } @@ -1119,7 +1119,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { let deps = get_ordered_deps(self.cstore); self.lazy_seq(deps.iter().map(|&(_, ref dep)| { CrateDep { - name: syntax::parse::token::intern(dep.name()), + name: Symbol::intern(dep.name()), hash: dep.hash(), kind: dep.dep_kind.get(), } diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs index 458a952543e..d281b2a32d0 100644 --- a/src/librustc_mir/build/mod.rs +++ b/src/librustc_mir/build/mod.rs @@ -18,7 +18,7 @@ use rustc::util::nodemap::NodeMap; use rustc::hir; use syntax::abi::Abi; use syntax::ast; -use syntax::parse::token::keywords; +use syntax::symbol::keywords; use syntax_pos::Span; use rustc_data_structures::indexed_vec::{IndexVec, Idx}; diff --git a/src/librustc_mir/hair/cx/mod.rs b/src/librustc_mir/hair/cx/mod.rs index 038300068fc..83809ba7635 100644 --- a/src/librustc_mir/hair/cx/mod.rs +++ b/src/librustc_mir/hair/cx/mod.rs @@ -29,7 +29,7 @@ use rustc::hir::map::blocks::FnLikeNode; use rustc::infer::InferCtxt; use rustc::ty::subst::Subst; use rustc::ty::{self, Ty, TyCtxt}; -use syntax::parse::token; +use syntax::symbol::{Symbol, InternedString}; use rustc::hir; use rustc_const_math::{ConstInt, ConstUsize}; @@ -121,7 +121,7 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> { self.tcx.mk_nil() } - pub fn str_literal(&mut self, value: token::InternedString) -> Literal<'tcx> { + pub fn str_literal(&mut self, value: InternedString) -> Literal<'tcx> { Literal::Value { value: ConstVal::Str(value) } } @@ -145,7 +145,7 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> { self_ty: Ty<'tcx>, params: &[Ty<'tcx>]) -> (Ty<'tcx>, Literal<'tcx>) { - let method_name = token::intern(method_name); + let method_name = Symbol::intern(method_name); let substs = self.tcx.mk_substs_trait(self_ty, params); for item in self.tcx.associated_items(trait_def_id) { if item.kind == ty::AssociatedKind::Method && item.name == method_name { diff --git a/src/librustc_passes/ast_validation.rs b/src/librustc_passes/ast_validation.rs index 828efbf3731..7c6dc6b0880 100644 --- a/src/librustc_passes/ast_validation.rs +++ b/src/librustc_passes/ast_validation.rs @@ -21,7 +21,8 @@ use rustc::session::Session; use syntax::ast::*; use syntax::attr; use syntax::codemap::Spanned; -use syntax::parse::token::{self, keywords}; +use syntax::parse::token; +use syntax::symbol::keywords; use syntax::visit::{self, Visitor}; use syntax_pos::Span; use errors; diff --git a/src/librustc_plugin/registry.rs b/src/librustc_plugin/registry.rs index 88e248e2efa..d9fd89b4a77 100644 --- a/src/librustc_plugin/registry.rs +++ b/src/librustc_plugin/registry.rs @@ -17,7 +17,7 @@ use rustc::mir::transform::MirMapPass; use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT, IdentTT}; use syntax::ext::base::MacroExpanderFn; -use syntax::parse::token; +use syntax::symbol::Symbol; use syntax::ast; use syntax::feature_gate::AttributeType; use syntax_pos::Span; @@ -121,7 +121,7 @@ impl<'a> Registry<'a> { /// It builds for you a `NormalTT` that calls `expander`, /// and also takes care of interning the macro's name. pub fn register_macro(&mut self, name: &str, expander: MacroExpanderFn) { - self.register_syntax_extension(token::intern(name), + self.register_syntax_extension(Symbol::intern(name), NormalTT(Box::new(expander), None, false)); } diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index 8a973ab4d95..6ec825a0637 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -40,7 +40,7 @@ use syntax::ext::base::Determinacy::Undetermined; use syntax::ext::expand::mark_tts; use syntax::ext::hygiene::Mark; use syntax::ext::tt::macro_rules; -use syntax::parse::token::keywords; +use syntax::symbol::keywords; use syntax::visit::{self, Visitor}; use syntax_pos::{Span, DUMMY_SP}; diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index a3a60e4f6d7..ec6e762100d 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -57,7 +57,7 @@ use syntax::ext::hygiene::{Mark, SyntaxContext}; use syntax::ast::{self, FloatTy}; use syntax::ast::{CRATE_NODE_ID, Name, NodeId, Ident, SpannedIdent, IntTy, UintTy}; use syntax::ext::base::SyntaxExtension; -use syntax::parse::token::{self, keywords}; +use syntax::symbol::{Symbol, InternedString, keywords}; use syntax::util::lev_distance::find_best_match_for_name; use syntax::visit::{self, FnKind, Visitor}; @@ -90,7 +90,7 @@ mod resolve_imports; enum SuggestionType { Macro(String), - Function(token::InternedString), + Function(InternedString), NotFound, } @@ -1039,7 +1039,7 @@ impl PrimitiveTypeTable { } fn intern(&mut self, string: &str, primitive_type: PrimTy) { - self.primitive_types.insert(token::intern(string), primitive_type); + self.primitive_types.insert(Symbol::intern(string), primitive_type); } } @@ -3606,7 +3606,7 @@ fn module_to_string(module: Module) -> String { } } else { // danger, shouldn't be ident? - names.push(token::str_to_ident("")); + names.push(Ident::from_str("")); collect_mod(names, module.parent.unwrap()); } } diff --git a/src/librustc_save_analysis/dump_visitor.rs b/src/librustc_save_analysis/dump_visitor.rs index e83c2359979..87b2b88fe33 100644 --- a/src/librustc_save_analysis/dump_visitor.rs +++ b/src/librustc_save_analysis/dump_visitor.rs @@ -39,7 +39,8 @@ use std::collections::hash_map::DefaultHasher; use std::hash::*; use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID}; -use syntax::parse::token::{self, keywords}; +use syntax::parse::token; +use syntax::symbol::keywords; use syntax::visit::{self, Visitor}; use syntax::print::pprust::{path_to_string, ty_to_string, bounds_to_string, generics_to_string}; use syntax::ptr::P; diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index e5589b04108..563d87a09b8 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -54,7 +54,8 @@ use std::path::{Path, PathBuf}; use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID}; use syntax::parse::lexer::comments::strip_doc_comment_decoration; -use syntax::parse::token::{self, keywords}; +use syntax::parse::token; +use syntax::symbol::{Symbol, keywords}; use syntax::visit::{self, Visitor}; use syntax::print::pprust::{ty_to_string, arg_to_string}; use syntax::codemap::MacroAttribute; @@ -728,7 +729,7 @@ impl Visitor for PathCollector { } fn docs_for_attrs(attrs: &[Attribute]) -> String { - let doc = token::intern("doc"); + let doc = Symbol::intern("doc"); let mut result = String::new(); for attr in attrs { diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs index 9ec764b82f8..e06aefd865f 100644 --- a/src/librustc_save_analysis/span_utils.rs +++ b/src/librustc_save_analysis/span_utils.rs @@ -18,7 +18,8 @@ use std::path::Path; use syntax::ast; use syntax::parse::lexer::{self, Reader, StringReader}; -use syntax::parse::token::{self, keywords, Token}; +use syntax::parse::token::{self, Token}; +use syntax::symbol::keywords; use syntax_pos::*; #[derive(Clone)] diff --git a/src/librustc_trans/assert_module_sources.rs b/src/librustc_trans/assert_module_sources.rs index 0bdf66e4589..e3ebb0fabfc 100644 --- a/src/librustc_trans/assert_module_sources.rs +++ b/src/librustc_trans/assert_module_sources.rs @@ -29,7 +29,7 @@ use rustc::ty::TyCtxt; use syntax::ast; -use syntax::parse::token; +use syntax::symbol::Symbol; use {ModuleSource, ModuleTranslation}; @@ -117,7 +117,7 @@ impl<'a, 'tcx> AssertModuleSource<'a, 'tcx> { for item in attr.meta_item_list().unwrap_or(&[]) { if item.check_name(name) { if let Some(value) = item.value_str() { - return token::intern(&value); + return Symbol::intern(&value); } else { self.tcx.sess.span_fatal( item.span, diff --git a/src/librustc_trans/back/symbol_names.rs b/src/librustc_trans/back/symbol_names.rs index 0ad663f05b4..30143b335d6 100644 --- a/src/librustc_trans/back/symbol_names.rs +++ b/src/librustc_trans/back/symbol_names.rs @@ -113,7 +113,7 @@ use rustc::hir::map::definitions::{DefPath, DefPathData}; use rustc::util::common::record_time; use syntax::attr; -use syntax::parse::token::{self, InternedString}; +use syntax::symbol::{Symbol, InternedString, intern_and_get_ident}; fn get_symbol_hash<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, @@ -275,7 +275,7 @@ impl ItemPathBuffer for SymbolPathBuffer { } fn push(&mut self, text: &str) { - self.names.push(token::intern(text).as_str()); + self.names.push(Symbol::intern(text).as_str()); } } @@ -288,7 +288,7 @@ pub fn exported_name_from_type_and_prefix<'a, 'tcx>(scx: &SharedCrateContext<'a, krate: LOCAL_CRATE, }; let hash = get_symbol_hash(scx, &empty_def_path, t, None); - let path = [token::intern_and_get_ident(prefix)]; + let path = [intern_and_get_ident(prefix)]; mangle(path.iter().cloned(), &hash) } diff --git a/src/librustc_trans/common.rs b/src/librustc_trans/common.rs index df70a6e8116..29925d964da 100644 --- a/src/librustc_trans/common.rs +++ b/src/librustc_trans/common.rs @@ -52,8 +52,7 @@ use std::ffi::CString; use std::cell::{Cell, RefCell, Ref}; use syntax::ast; -use syntax::parse::token::InternedString; -use syntax::parse::token; +use syntax::symbol::{Symbol, InternedString}; use syntax_pos::{DUMMY_SP, Span}; pub use context::{CrateContext, SharedCrateContext}; @@ -225,7 +224,7 @@ impl<'a, 'tcx> VariantInfo<'tcx> { VariantInfo { discr: Disr(0), fields: v.iter().enumerate().map(|(i, &t)| { - Field(token::intern(&i.to_string()), t) + Field(Symbol::intern(&i.to_string()), t) }).collect() } } diff --git a/src/librustc_trans/context.rs b/src/librustc_trans/context.rs index 7657fc7d1c8..771c5ef6d9d 100644 --- a/src/librustc_trans/context.rs +++ b/src/librustc_trans/context.rs @@ -42,7 +42,7 @@ use std::ptr; use std::rc::Rc; use std::str; use syntax::ast; -use syntax::parse::token::InternedString; +use syntax::symbol::InternedString; use abi::FnType; pub struct Stats { diff --git a/src/librustc_trans/debuginfo/metadata.rs b/src/librustc_trans/debuginfo/metadata.rs index 5b9ef78ddc2..cecf698fdc0 100644 --- a/src/librustc_trans/debuginfo/metadata.rs +++ b/src/librustc_trans/debuginfo/metadata.rs @@ -45,9 +45,8 @@ use std::fmt::Write; use std::path::Path; use std::ptr; use std::rc::Rc; -use syntax::util::interner::Interner; use syntax::ast; -use syntax::parse::token; +use syntax::symbol::{Interner, InternedString}; use syntax_pos::{self, Span}; @@ -1566,7 +1565,7 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, fn get_enum_discriminant_name(cx: &CrateContext, def_id: DefId) - -> token::InternedString { + -> InternedString { cx.tcx().item_name(def_id).as_str() } } diff --git a/src/librustc_trans/intrinsic.rs b/src/librustc_trans/intrinsic.rs index b1b09d3ca20..98294cb8695 100644 --- a/src/librustc_trans/intrinsic.rs +++ b/src/librustc_trans/intrinsic.rs @@ -30,7 +30,7 @@ use rustc::ty::{self, Ty}; use Disr; use rustc::hir; use syntax::ast; -use syntax::parse::token; +use syntax::symbol::intern_and_get_ident; use rustc::session::Session; use syntax_pos::{Span, DUMMY_SP}; @@ -208,7 +208,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, } (_, "type_name") => { let tp_ty = substs.type_at(0); - let ty_name = token::intern_and_get_ident(&tp_ty.to_string()); + let ty_name = intern_and_get_ident(&tp_ty.to_string()); C_str_slice(ccx, ty_name) } (_, "type_id") => { diff --git a/src/librustc_trans/mir/block.rs b/src/librustc_trans/mir/block.rs index b22bcf9825a..ce1507e8181 100644 --- a/src/librustc_trans/mir/block.rs +++ b/src/librustc_trans/mir/block.rs @@ -30,7 +30,7 @@ use glue; use type_::Type; use rustc_data_structures::fx::FxHashMap; -use syntax::parse::token; +use syntax::symbol::intern_and_get_ident; use super::{MirContext, LocalRef}; use super::analyze::CleanupKind; @@ -321,7 +321,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { // Get the location information. let loc = bcx.sess().codemap().lookup_char_pos(span.lo); - let filename = token::intern_and_get_ident(&loc.file.name); + let filename = intern_and_get_ident(&loc.file.name); let filename = C_str_slice(bcx.ccx(), filename); let line = C_u32(bcx.ccx(), loc.line as u32); @@ -351,7 +351,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { const_err) } mir::AssertMessage::Math(ref err) => { - let msg_str = token::intern_and_get_ident(err.description()); + let msg_str = intern_and_get_ident(err.description()); let msg_str = C_str_slice(bcx.ccx(), msg_str); let msg_file_line = C_struct(bcx.ccx(), &[msg_str, filename, line], diff --git a/src/librustc_trans/mir/mod.rs b/src/librustc_trans/mir/mod.rs index 12b17c26cbc..12cbfcef7d2 100644 --- a/src/librustc_trans/mir/mod.rs +++ b/src/librustc_trans/mir/mod.rs @@ -21,7 +21,7 @@ use machine; use type_of; use syntax_pos::{DUMMY_SP, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos}; -use syntax::parse::token::keywords; +use syntax::symbol::keywords; use std::cell::Ref; use std::iter; diff --git a/src/librustc_trans/partitioning.rs b/src/librustc_trans/partitioning.rs index 09a1cbd319a..06b6c950293 100644 --- a/src/librustc_trans/partitioning.rs +++ b/src/librustc_trans/partitioning.rs @@ -132,7 +132,7 @@ use std::sync::Arc; use std::collections::hash_map::DefaultHasher; use symbol_map::SymbolMap; use syntax::ast::NodeId; -use syntax::parse::token::{self, InternedString}; +use syntax::symbol::{InternedString, intern_and_get_ident}; use trans_item::TransItem; use util::nodemap::{FxHashMap, FxHashSet}; @@ -542,11 +542,11 @@ fn compute_codegen_unit_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, mod_path.push_str(".volatile"); } - return token::intern_and_get_ident(&mod_path[..]); + return intern_and_get_ident(&mod_path[..]); } fn numbered_codegen_unit_name(crate_name: &str, index: usize) -> InternedString { - token::intern_and_get_ident(&format!("{}{}{}", + intern_and_get_ident(&format!("{}{}{}", crate_name, NUMBERED_CODEGEN_UNIT_MARKER, index)[..]) diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index 9bde6b0c4d9..adb660c25ae 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -71,7 +71,7 @@ use util::nodemap::{NodeMap, FxHashSet}; use std::cell::RefCell; use syntax::{abi, ast}; use syntax::feature_gate::{GateIssue, emit_feature_err}; -use syntax::parse::token::{self, keywords}; +use syntax::symbol::{Symbol, keywords}; use syntax_pos::{Span, Pos}; use errors::DiagnosticBuilder; @@ -645,7 +645,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { }; let output_binding = ConvertedBinding { - item_name: token::intern(FN_OUTPUT_NAME), + item_name: Symbol::intern(FN_OUTPUT_NAME), ty: output, span: output_span }; diff --git a/src/librustc_typeck/check/autoderef.rs b/src/librustc_typeck/check/autoderef.rs index 900c22a8176..e72dba858c5 100644 --- a/src/librustc_typeck/check/autoderef.rs +++ b/src/librustc_typeck/check/autoderef.rs @@ -20,7 +20,7 @@ use rustc::ty::{LvaluePreference, NoPreference, PreferMutLvalue}; use rustc::hir; use syntax_pos::Span; -use syntax::parse::token; +use syntax::symbol::Symbol; #[derive(Copy, Clone, Debug)] enum AutoderefKind { @@ -120,7 +120,7 @@ impl<'a, 'gcx, 'tcx> Autoderef<'a, 'gcx, 'tcx> { let normalized = traits::normalize_projection_type(&mut selcx, ty::ProjectionTy { trait_ref: trait_ref, - item_name: token::intern("Target"), + item_name: Symbol::intern("Target"), }, cause, 0); @@ -198,7 +198,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { (PreferMutLvalue, Some(trait_did)) => { self.lookup_method_in_trait(span, base_expr, - token::intern("deref_mut"), + Symbol::intern("deref_mut"), trait_did, base_ty, None) @@ -211,7 +211,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { (None, Some(trait_did)) => { self.lookup_method_in_trait(span, base_expr, - token::intern("deref"), + Symbol::intern("deref"), trait_did, base_ty, None) diff --git a/src/librustc_typeck/check/callee.rs b/src/librustc_typeck/check/callee.rs index 7606a5b7a4d..548f37cea06 100644 --- a/src/librustc_typeck/check/callee.rs +++ b/src/librustc_typeck/check/callee.rs @@ -16,7 +16,7 @@ use hir::def_id::{DefId, LOCAL_CRATE}; use hir::print; use rustc::{infer, traits}; use rustc::ty::{self, LvaluePreference, Ty}; -use syntax::parse::token; +use syntax::symbol::Symbol; use syntax::ptr::P; use syntax_pos::Span; @@ -160,9 +160,9 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { -> Option> { // Try the options that are least restrictive on the caller first. for &(opt_trait_def_id, method_name) in - &[(self.tcx.lang_items.fn_trait(), token::intern("call")), - (self.tcx.lang_items.fn_mut_trait(), token::intern("call_mut")), - (self.tcx.lang_items.fn_once_trait(), token::intern("call_once"))] { + &[(self.tcx.lang_items.fn_trait(), Symbol::intern("call")), + (self.tcx.lang_items.fn_mut_trait(), Symbol::intern("call_mut")), + (self.tcx.lang_items.fn_once_trait(), Symbol::intern("call_once"))] { let trait_def_id = match opt_trait_def_id { Some(def_id) => def_id, None => continue, diff --git a/src/librustc_typeck/check/intrinsic.rs b/src/librustc_typeck/check/intrinsic.rs index 77106b8b0c3..a07573a7b9e 100644 --- a/src/librustc_typeck/check/intrinsic.rs +++ b/src/librustc_typeck/check/intrinsic.rs @@ -21,7 +21,7 @@ use {CrateCtxt, require_same_types}; use syntax::abi::Abi; use syntax::ast; -use syntax::parse::token; +use syntax::symbol::Symbol; use syntax_pos::Span; use rustc::hir; @@ -75,7 +75,7 @@ fn equate_intrinsic_type<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, /// and in libcore/intrinsics.rs pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) { fn param<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, n: u32) -> Ty<'tcx> { - let name = token::intern(&format!("P{}", n)); + let name = Symbol::intern(&format!("P{}", n)); ccx.tcx.mk_param(n, name) } @@ -326,7 +326,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) { pub fn check_platform_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) { let param = |n| { - let name = token::intern(&format!("P{}", n)); + let name = Symbol::intern(&format!("P{}", n)); ccx.tcx.mk_param(n, name) }; diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index f08178e49fb..7f03c51b9ea 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -115,8 +115,8 @@ use syntax::ast; use syntax::attr; use syntax::codemap::{self, original_sp, Spanned}; use syntax::feature_gate::{GateIssue, emit_feature_err}; -use syntax::parse::token::{self, InternedString, keywords}; use syntax::ptr::P; +use syntax::symbol::{Symbol, InternedString, keywords}; use syntax::util::lev_distance::find_best_match_for_name; use syntax_pos::{self, BytePos, Span}; @@ -2369,7 +2369,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { (PreferMutLvalue, Some(trait_did)) => { self.lookup_method_in_trait_adjusted(expr.span, Some(&base_expr), - token::intern("index_mut"), + Symbol::intern("index_mut"), trait_did, autoderefs, unsize, @@ -2384,7 +2384,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { (None, Some(trait_did)) => { self.lookup_method_in_trait_adjusted(expr.span, Some(&base_expr), - token::intern("index"), + Symbol::intern("index"), trait_did, autoderefs, unsize, diff --git a/src/librustc_typeck/check/op.rs b/src/librustc_typeck/check/op.rs index 8b4975b7e3a..adb8c6be42b 100644 --- a/src/librustc_typeck/check/op.rs +++ b/src/librustc_typeck/check/op.rs @@ -14,7 +14,7 @@ use super::FnCtxt; use hir::def_id::DefId; use rustc::ty::{Ty, TypeFoldable, PreferMutLvalue}; use syntax::ast; -use syntax::parse::token; +use syntax::symbol::Symbol; use rustc::hir; impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { @@ -182,7 +182,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let rhs_ty_var = self.next_ty_var(); let return_ty = match self.lookup_op_method(expr, lhs_ty, vec![rhs_ty_var], - token::intern(name), trait_def_id, + Symbol::intern(name), trait_def_id, lhs_expr) { Ok(return_ty) => return_ty, Err(()) => { @@ -248,9 +248,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { -> Ty<'tcx> { assert!(op.is_by_value()); - match self.lookup_op_method(ex, operand_ty, vec![], - token::intern(mname), trait_did, - operand_expr) { + let mname = Symbol::intern(mname); + match self.lookup_op_method(ex, operand_ty, vec![], mname, trait_did, operand_expr) { Ok(t) => t, Err(()) => { self.type_error_message(ex.span, |actual| { diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index 535b6bcdcba..48d79a3ba4c 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -79,7 +79,7 @@ use rustc_const_math::ConstInt; use std::cell::RefCell; use syntax::{abi, ast, attr}; -use syntax::parse::token::{self, keywords}; +use syntax::symbol::{Symbol, keywords}; use syntax_pos::Span; use rustc::hir::{self, map as hir_map, print as pprust}; @@ -585,7 +585,7 @@ fn convert_closure<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, let upvar_decls : Vec<_> = tcx.with_freevars(node_id, |fv| { fv.iter().enumerate().map(|(i, _)| ty::TypeParameterDef { index: (base_generics.count() as u32) + (i as u32), - name: token::intern(""), + name: Symbol::intern(""), def_id: def_id, default_def_id: base_def_id, default: None, diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 45970a63ab4..a001985ded9 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -14,15 +14,16 @@ pub use self::TyParamBound::*; pub use self::UnsafeSource::*; pub use self::ViewPath_::*; pub use self::PathParameters::*; +pub use symbol::Symbol as Name; pub use util::ThinVec; use syntax_pos::{mk_sp, Span, DUMMY_SP, ExpnId}; use codemap::{respan, Spanned}; use abi::Abi; use ext::hygiene::SyntaxContext; -use parse::token::{self, keywords, InternedString}; use print::pprust; use ptr::P; +use symbol::{Symbol, keywords, InternedString}; use tokenstream::{TokenTree}; use std::collections::HashSet; @@ -32,60 +33,24 @@ use std::u32; use serialize::{self, Encodable, Decodable, Encoder, Decoder}; -/// A name is a part of an identifier, representing a string or gensym. It's -/// the result of interning. -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Name(pub u32); - /// An identifier contains a Name (index into the interner /// table) and a SyntaxContext to track renaming and /// macro expansion per Flatt et al., "Macros That Work Together" #[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct Ident { - pub name: Name, + pub name: Symbol, pub ctxt: SyntaxContext } -impl Name { - pub fn as_str(self) -> token::InternedString { - token::InternedString::new_from_name(self) - } -} - -impl fmt::Debug for Name { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}({})", self, self.0) - } -} - -impl fmt::Display for Name { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.as_str(), f) - } -} - -impl Encodable for Name { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(&self.as_str()) - } -} - -impl Decodable for Name { - fn decode(d: &mut D) -> Result { - Ok(token::intern(&d.read_str()?)) - } -} - -impl<'a> ::std::cmp::PartialEq<&'a str> for Name { - fn eq(&self, other: &&str) -> bool { - *self.as_str() == **other - } -} - impl Ident { pub const fn with_empty_ctxt(name: Name) -> Ident { Ident { name: name, ctxt: SyntaxContext::empty() } } + + /// Maps a string to an identifier with an empty syntax context. + pub fn from_str(s: &str) -> Ident { + Ident::with_empty_ctxt(Symbol::intern(s)) + } } impl fmt::Debug for Ident { diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index cd93b8e96e4..4fce739efe2 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -23,9 +23,9 @@ use syntax_pos::{Span, BytePos, DUMMY_SP}; use errors::Handler; use feature_gate::{Features, GatedCfg}; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; -use parse::token::InternedString; -use parse::{ParseSess, token}; +use parse::ParseSess; use ptr::P; +use symbol::{self, Symbol, InternedString}; use util::ThinVec; use std::cell::{RefCell, Cell}; @@ -278,8 +278,8 @@ impl Attribute { if self.is_sugared_doc { let comment = self.value_str().unwrap(); let meta = mk_name_value_item_str( - token::intern("doc"), - token::intern_and_get_ident(&strip_doc_comment_decoration( + Symbol::intern("doc"), + symbol::intern_and_get_ident(&strip_doc_comment_decoration( &comment))); if self.style == ast::AttrStyle::Outer { f(&mk_attr_outer(self.id, meta)) @@ -392,7 +392,7 @@ pub fn mk_sugared_doc_attr(id: AttrId, text: InternedString, lo: BytePos, hi: By style: style, value: MetaItem { span: mk_sp(lo, hi), - name: token::intern("doc"), + name: Symbol::intern("doc"), node: MetaItemKind::NameValue(lit), }, is_sugared_doc: true, diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 81c8e0bdb82..aa81d7afcb3 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -19,6 +19,7 @@ use ext::base::{ExtCtxt, MacEager, MacResult}; use ext::build::AstBuilder; use parse::token; use ptr::P; +use symbol::Symbol; use tokenstream::{TokenTree}; use util::small_vector::SmallVector; @@ -141,7 +142,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, )); } }); - let sym = Ident::with_empty_ctxt(token::gensym(&format!( + let sym = Ident::with_empty_ctxt(Symbol::gensym(&format!( "__register_diagnostic_{}", code ))); MacEager::items(SmallVector::many(vec![ diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 7f66b060052..0fd360ba2e4 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -20,8 +20,8 @@ use ext::hygiene::Mark; use fold::{self, Folder}; use parse::{self, parser}; use parse::token; -use parse::token::{InternedString, str_to_ident}; use ptr::P; +use symbol::{Symbol, InternedString}; use util::small_vector::SmallVector; use std::path::PathBuf; @@ -735,7 +735,7 @@ impl<'a> ExtCtxt<'a> { self.ecfg.trace_mac = x } pub fn ident_of(&self, st: &str) -> ast::Ident { - str_to_ident(st) + ast::Ident::from_str(st) } pub fn std_path(&self, components: &[&str]) -> Vec { let mut v = Vec::new(); @@ -746,7 +746,7 @@ impl<'a> ExtCtxt<'a> { return v } pub fn name_of(&self, st: &str) -> ast::Name { - token::intern(st) + Symbol::intern(st) } } diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index f488e1cf95c..b96a4624508 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -14,8 +14,8 @@ use attr; use syntax_pos::{Span, DUMMY_SP, Pos}; use codemap::{dummy_spanned, respan, Spanned}; use ext::base::ExtCtxt; -use parse::token::{self, keywords, InternedString}; use ptr::P; +use symbol::{intern_and_get_ident, keywords, InternedString}; // Transitional reexports so qquote can find the paths it is looking for mod syntax { @@ -787,8 +787,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn expr_fail(&self, span: Span, msg: InternedString) -> P { let loc = self.codemap().lookup_char_pos(span.lo); - let expr_file = self.expr_str(span, - token::intern_and_get_ident(&loc.file.name)); + let expr_file = self.expr_str(span, intern_and_get_ident(&loc.file.name)); let expr_line = self.expr_u32(span, loc.line as u32); let expr_file_line_tuple = self.expr_tuple(span, vec![expr_file, expr_line]); let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple); diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index a2d42e14592..f033b3400d0 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -23,10 +23,11 @@ use fold; use fold::*; use parse::{ParseSess, PResult, lexer}; use parse::parser::Parser; -use parse::token::{self, keywords}; +use parse::token; use print::pprust; use ptr::P; use std_inject; +use symbol::keywords; use tokenstream::{TokenTree, TokenStream}; use util::small_vector::SmallVector; use visit::Visitor; @@ -190,7 +191,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { pub fn expand_crate(&mut self, mut krate: ast::Crate) -> ast::Crate { self.cx.crate_root = std_inject::injected_crate_name(&krate); let mut module = ModuleData { - mod_path: vec![token::str_to_ident(&self.cx.ecfg.crate_name)], + mod_path: vec![Ident::from_str(&self.cx.ecfg.crate_name)], directory: PathBuf::from(self.cx.codemap().span_to_filename(krate.span)), }; module.directory.pop(); @@ -246,7 +247,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { self.cx.resolver.resolve_macro(scope, &mac.node.path, force) } InvocationKind::Attr { ref attr, .. } => { - let ident = ast::Ident::with_empty_ctxt(attr.name()); + let ident = Ident::with_empty_ctxt(attr.name()); let path = ast::Path::from_ident(attr.span, ident); self.cx.resolver.resolve_macro(scope, &path, force) } diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs index e323dd2f623..4fe57a8345e 100644 --- a/src/libsyntax/ext/placeholders.rs +++ b/src/libsyntax/ext/placeholders.rs @@ -13,8 +13,8 @@ use codemap::{DUMMY_SP, dummy_spanned}; use ext::base::ExtCtxt; use ext::expand::{Expansion, ExpansionKind}; use fold::*; -use parse::token::{intern, keywords}; use ptr::P; +use symbol::{Symbol, keywords}; use util::move_map::MoveMap; use util::small_vector::SmallVector; @@ -227,7 +227,7 @@ pub fn reconstructed_macro_rules(def: &ast::MacroDef) -> Expansion { span: DUMMY_SP, global: false, segments: vec![ast::PathSegment { - identifier: ast::Ident::with_empty_ctxt(intern("macro_rules")), + identifier: ast::Ident::with_empty_ctxt(Symbol::intern("macro_rules")), parameters: ast::PathParameters::none(), }], }, diff --git a/src/libsyntax/ext/proc_macro_shim.rs b/src/libsyntax/ext/proc_macro_shim.rs index dc3a01f41bc..21ce89a6dd5 100644 --- a/src/libsyntax/ext/proc_macro_shim.rs +++ b/src/libsyntax/ext/proc_macro_shim.rs @@ -66,6 +66,7 @@ pub mod prelude { pub use ast::Ident; pub use codemap::{DUMMY_SP, Span}; pub use ext::base::{ExtCtxt, MacResult}; - pub use parse::token::{self, Token, DelimToken, keywords, str_to_ident}; + pub use parse::token::{self, Token, DelimToken}; + pub use symbol::keywords; pub use tokenstream::{TokenTree, TokenStream}; } diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index fa38f21e9b3..0bd018603d2 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -33,6 +33,7 @@ pub mod rt { use parse::{self, token, classify}; use ptr::P; use std::rc::Rc; + use symbol; use tokenstream::{self, TokenTree}; @@ -239,7 +240,7 @@ pub mod rt { impl ToTokens for str { fn to_tokens(&self, cx: &ExtCtxt) -> Vec { let lit = ast::LitKind::Str( - token::intern_and_get_ident(self), ast::StrStyle::Cooked); + symbol::intern_and_get_ident(self), ast::StrStyle::Cooked); dummy_spanned(lit).to_tokens(cx) } } @@ -527,12 +528,12 @@ pub fn expand_quote_matcher(cx: &mut ExtCtxt, base::MacEager::expr(expanded) } -fn ids_ext(strs: Vec ) -> Vec { - strs.iter().map(|str| str_to_ident(&(*str))).collect() +fn ids_ext(strs: Vec) -> Vec { + strs.iter().map(|s| ast::Ident::from_str(s)).collect() } -fn id_ext(str: &str) -> ast::Ident { - str_to_ident(str) +fn id_ext(s: &str) -> ast::Ident { + ast::Ident::from_str(s) } // Lift an ident to the expr that evaluates to that ident. diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index bda84cdaf39..7893ad839ea 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -17,6 +17,7 @@ use parse::token; use parse; use print::pprust; use ptr::P; +use symbol; use tokenstream; use util::small_vector::SmallVector; @@ -60,15 +61,14 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) let topmost = cx.expansion_cause(); let loc = cx.codemap().lookup_char_pos(topmost.lo); - let filename = token::intern_and_get_ident(&loc.file.name); + let filename = symbol::intern_and_get_ident(&loc.file.name); base::MacEager::expr(cx.expr_str(topmost, filename)) } pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { let s = pprust::tts_to_string(tts); - base::MacEager::expr(cx.expr_str(sp, - token::intern_and_get_ident(&s[..]))) + base::MacEager::expr(cx.expr_str(sp, symbol::intern_and_get_ident(&s))) } pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) @@ -77,9 +77,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) let mod_path = &cx.current_expansion.module.mod_path; let string = mod_path.iter().map(|x| x.to_string()).collect::>().join("::"); - base::MacEager::expr(cx.expr_str( - sp, - token::intern_and_get_ident(&string[..]))) + base::MacEager::expr(cx.expr_str(sp, symbol::intern_and_get_ident(&string))) } /// include! : parse the given file as an expr @@ -144,7 +142,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT // Add this input file to the code map to make it available as // dependency information let filename = format!("{}", file.display()); - let interned = token::intern_and_get_ident(&src[..]); + let interned = symbol::intern_and_get_ident(&src); cx.codemap().new_filemap_and_lines(&filename, None, &src); base::MacEager::expr(cx.expr_str(sp, interned)) diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 552d4de9617..4c37a0a71e2 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -20,9 +20,10 @@ use ext::tt::macro_parser::{parse, parse_failure_msg}; use parse::ParseSess; use parse::lexer::new_tt_reader; use parse::parser::{Parser, Restrictions}; -use parse::token::{self, gensym_ident, NtTT, Token}; +use parse::token::{self, NtTT, Token}; use parse::token::Token::*; use print; +use symbol::Symbol; use tokenstream::{self, TokenTree}; use std::collections::{HashMap}; @@ -187,16 +188,16 @@ impl IdentMacroExpander for MacroRulesExpander { /// Converts a `macro_rules!` invocation into a syntax extension. pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension { - let lhs_nm = gensym_ident("lhs"); - let rhs_nm = gensym_ident("rhs"); + let lhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("lhs")); + let rhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("rhs")); // The pattern that macro_rules matches. // The grammar for macro_rules! is: // $( $lhs:tt => $rhs:tt );+ // ...quasiquoting this would be nice. // These spans won't matter, anyways - let match_lhs_tok = MatchNt(lhs_nm, token::str_to_ident("tt")); - let match_rhs_tok = MatchNt(rhs_nm, token::str_to_ident("tt")); + let match_lhs_tok = MatchNt(lhs_nm, ast::Ident::from_str("tt")); + let match_rhs_tok = MatchNt(rhs_nm, ast::Ident::from_str("tt")); let argument_gram = vec![ TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { tts: vec![ diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 19f5e860b27..0f94018997c 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -33,7 +33,7 @@ use syntax_pos::Span; use errors::{DiagnosticBuilder, Handler}; use visit::{self, FnKind, Visitor}; use parse::ParseSess; -use parse::token::InternedString; +use symbol::InternedString; use std::ascii::AsciiExt; use std::env; diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index b6c87155d6b..44ce8668c54 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -22,8 +22,9 @@ use ast::*; use ast; use syntax_pos::Span; use codemap::{Spanned, respan}; -use parse::token::{self, keywords}; +use parse::token; use ptr::P; +use symbol::keywords; use tokenstream::*; use util::small_vector::SmallVector; use util::move_map::MoveMap; diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 34280812421..15b93a4864b 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -83,7 +83,6 @@ pub mod diagnostics { pub mod diagnostic_list; pub mod util { - pub mod interner; pub mod lev_distance; pub mod node_count; pub mod parser; @@ -118,6 +117,7 @@ pub mod ptr; pub mod show_span; pub mod std_inject; pub mod str; +pub mod symbol; pub mod test; pub mod tokenstream; pub mod visit; diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index cf48c445c80..7048be1478b 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -8,13 +8,14 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast; +use ast::{self, Ident}; use syntax_pos::{self, BytePos, CharPos, Pos, Span}; use codemap::CodeMap; use errors::{FatalError, Handler, DiagnosticBuilder}; use ext::tt::transcribe::tt_next_token; -use parse::token::{self, keywords, str_to_ident}; +use parse::token; use str::char_at; +use symbol::{Symbol, keywords}; use rustc_unicode::property::Pattern_White_Space; use std::borrow::Cow; @@ -350,13 +351,13 @@ impl<'a> StringReader<'a> { /// single-byte delimiter). pub fn name_from(&self, start: BytePos) -> ast::Name { debug!("taking an ident from {:?} to {:?}", start, self.pos); - self.with_str_from(start, token::intern) + self.with_str_from(start, Symbol::intern) } /// As name_from, with an explicit endpoint. pub fn name_from_to(&self, start: BytePos, end: BytePos) -> ast::Name { debug!("taking an ident from {:?} to {:?}", start, end); - self.with_str_from_to(start, end, token::intern) + self.with_str_from_to(start, end, Symbol::intern) } /// Calls `f` with a string slice of the source text spanning from `start` @@ -492,7 +493,7 @@ impl<'a> StringReader<'a> { if string == "_" { None } else { - Some(token::intern(string)) + Some(Symbol::intern(string)) } }) } @@ -540,7 +541,7 @@ impl<'a> StringReader<'a> { self.with_str_from(start_bpos, |string| { // comments with only more "/"s are not doc comments let tok = if is_doc_comment(string) { - token::DocComment(token::intern(string)) + token::DocComment(Symbol::intern(string)) } else { token::Comment }; @@ -669,7 +670,7 @@ impl<'a> StringReader<'a> { } else { string.into() }; - token::DocComment(token::intern(&string[..])) + token::DocComment(Symbol::intern(&string[..])) } else { token::Comment }; @@ -758,7 +759,7 @@ impl<'a> StringReader<'a> { self.err_span_(start_bpos, self.pos, "no valid digits found for number"); - return token::Integer(token::intern("0")); + return token::Integer(Symbol::intern("0")); } // might be a float, but don't be greedy if this is actually an @@ -1097,7 +1098,7 @@ impl<'a> StringReader<'a> { token::Underscore } else { // FIXME: perform NFKC normalization here. (Issue #2253) - token::Ident(str_to_ident(string)) + token::Ident(Ident::from_str(string)) } })); } @@ -1277,13 +1278,13 @@ impl<'a> StringReader<'a> { // expansion purposes. See #12512 for the gory details of why // this is necessary. let ident = self.with_str_from(start, |lifetime_name| { - str_to_ident(&format!("'{}", lifetime_name)) + Ident::from_str(&format!("'{}", lifetime_name)) }); // Conjure up a "keyword checking ident" to make sure that // the lifetime name is not a keyword. let keyword_checking_ident = self.with_str_from(start, |lifetime_name| { - str_to_ident(lifetime_name) + Ident::from_str(lifetime_name) }); let keyword_checking_token = &token::Ident(keyword_checking_ident); let last_bpos = self.pos; @@ -1310,7 +1311,7 @@ impl<'a> StringReader<'a> { let id = if valid { self.name_from(start) } else { - token::intern("0") + Symbol::intern("0") }; self.bump(); // advance ch past token let suffix = self.scan_optional_raw_name(); @@ -1352,7 +1353,7 @@ impl<'a> StringReader<'a> { let id = if valid { self.name_from(start_bpos + BytePos(1)) } else { - token::intern("??") + Symbol::intern("??") }; self.bump(); let suffix = self.scan_optional_raw_name(); @@ -1424,7 +1425,7 @@ impl<'a> StringReader<'a> { let id = if valid { self.name_from_to(content_start_bpos, content_end_bpos) } else { - token::intern("??") + Symbol::intern("??") }; let suffix = self.scan_optional_raw_name(); return Ok(token::Literal(token::StrRaw(id, hash_count), suffix)); @@ -1551,7 +1552,7 @@ impl<'a> StringReader<'a> { let id = if valid { self.name_from(start) } else { - token::intern("?") + Symbol::intern("?") }; self.bump(); // advance ch past token return token::Byte(id); @@ -1584,7 +1585,7 @@ impl<'a> StringReader<'a> { let id = if valid { self.name_from(start) } else { - token::intern("??") + Symbol::intern("??") }; self.bump(); return token::ByteStr(id); @@ -1700,11 +1701,11 @@ fn ident_continue(c: Option) -> bool { mod tests { use super::*; + use ast::Ident; use syntax_pos::{BytePos, Span, NO_EXPANSION}; use codemap::CodeMap; use errors; use parse::token; - use parse::token::str_to_ident; use std::io; use std::rc::Rc; @@ -1732,7 +1733,7 @@ mod tests { &sh, "/* my source file */ fn main() { println!(\"zebra\"); }\n" .to_string()); - let id = str_to_ident("fn"); + let id = Ident::from_str("fn"); assert_eq!(string_reader.next_token().tok, token::Comment); assert_eq!(string_reader.next_token().tok, token::Whitespace); let tok1 = string_reader.next_token(); @@ -1813,7 +1814,7 @@ mod tests { let cm = Rc::new(CodeMap::new()); let sh = mk_sh(cm.clone()); assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok, - token::Literal(token::Char(token::intern("a")), None)); + token::Literal(token::Char(Symbol::intern("a")), None)); } #[test] @@ -1821,7 +1822,7 @@ mod tests { let cm = Rc::new(CodeMap::new()); let sh = mk_sh(cm.clone()); assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok, - token::Literal(token::Char(token::intern(" ")), None)); + token::Literal(token::Char(Symbol::intern(" ")), None)); } #[test] @@ -1829,7 +1830,7 @@ mod tests { let cm = Rc::new(CodeMap::new()); let sh = mk_sh(cm.clone()); assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok, - token::Literal(token::Char(token::intern("\\n")), None)); + token::Literal(token::Char(Symbol::intern("\\n")), None)); } #[test] @@ -1847,7 +1848,7 @@ mod tests { assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()) .next_token() .tok, - token::Literal(token::StrRaw(token::intern("\"#a\\b\x00c\""), 3), None)); + token::Literal(token::StrRaw(Symol::intern("\"#a\\b\x00c\""), 3), None)); } #[test] @@ -1857,11 +1858,11 @@ mod tests { macro_rules! test { ($input: expr, $tok_type: ident, $tok_contents: expr) => {{ assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok, - token::Literal(token::$tok_type(token::intern($tok_contents)), - Some(token::intern("suffix")))); + token::Literal(token::$tok_type(Symbol::intern($tok_contents)), + Some(Symbol::intern("suffix")))); // with a whitespace separator: assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok, - token::Literal(token::$tok_type(token::intern($tok_contents)), + token::Literal(token::$tok_type(Symbol::intern($tok_contents)), None)); }} } @@ -1877,14 +1878,14 @@ mod tests { test!("1.0e10", Float, "1.0e10"); assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok, - token::Literal(token::Integer(token::intern("2")), - Some(token::intern("us")))); + token::Literal(token::Integer(Symbol::intern("2")), + Some(Symbol::intern("us")))); assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok, - token::Literal(token::StrRaw(token::intern("raw"), 3), - Some(token::intern("suffix")))); + token::Literal(token::StrRaw(Symbol::intern("raw"), 3), + Some(Symbol::intern("suffix")))); assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok, - token::Literal(token::ByteStrRaw(token::intern("raw"), 3), - Some(token::intern("suffix")))); + token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3), + Some(Symbol::intern("suffix")))); } #[test] @@ -1904,7 +1905,7 @@ mod tests { _ => panic!("expected a comment!"), } assert_eq!(lexer.next_token().tok, - token::Literal(token::Char(token::intern("a")), None)); + token::Literal(token::Char(Symbol::intern("a")), None)); } #[test] @@ -1917,6 +1918,6 @@ mod tests { assert_eq!(comment.sp, ::syntax_pos::mk_sp(BytePos(0), BytePos(7))); assert_eq!(lexer.next_token().tok, token::Whitespace); assert_eq!(lexer.next_token().tok, - token::DocComment(token::intern("/// test"))); + token::DocComment(Symbol::intern("/// test"))); } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 19e8b711ba4..a44f78b3c3a 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -16,9 +16,9 @@ use syntax_pos::{self, Span, FileMap}; use errors::{Handler, ColorConfig, DiagnosticBuilder}; use feature_gate::UnstableFeatures; use parse::parser::Parser; -use parse::token::InternedString; use ptr::P; use str::char_at; +use symbol::{self, InternedString}; use tokenstream; use std::cell::RefCell; @@ -372,7 +372,7 @@ fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool { s[1..].chars().all(|c| '0' <= c && c <= '9') } -fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>, +fn filtered_float_lit(data: InternedString, suffix: Option<&str>, sd: &Handler, sp: Span) -> ast::LitKind { debug!("filtered_float_lit: {}, {:?}", data, suffix); match suffix.as_ref().map(|s| &**s) { @@ -400,7 +400,7 @@ pub fn float_lit(s: &str, suffix: Option, debug!("float_lit: {:?}, {:?}", s, suffix); // FIXME #2252: bounds checking float literals is deferred until trans let s = s.chars().filter(|&c| c != '_').collect::(); - let data = token::intern_and_get_ident(&s); + let data = symbol::intern_and_get_ident(&s); filtered_float_lit(data, suffix.as_ref().map(|s| &**s), sd, sp) } @@ -530,7 +530,7 @@ pub fn integer_lit(s: &str, 2 => sd.span_err(sp, "binary float literal is not supported"), _ => () } - let ident = token::intern_and_get_ident(&s); + let ident = symbol::intern_and_get_ident(&s); return filtered_float_lit(ident, Some(&suf), sd, sp) } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 98ce00c7d38..13c701795a8 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -48,13 +48,14 @@ use parse::classify; use parse::common::SeqSep; use parse::lexer::{Reader, TokenAndSpan}; use parse::obsolete::ObsoleteSyntax; -use parse::token::{self, intern, keywords, MatchNt, SubstNt, InternedString}; +use parse::token::{self, MatchNt, SubstNt}; use parse::{new_sub_parser_from_file, ParseSess}; use util::parser::{AssocOp, Fixity}; use print::pprust; use ptr::P; use parse::PResult; use tokenstream::{self, Delimited, SequenceRepetition, TokenTree}; +use symbol::{self, Symbol, keywords, InternedString}; use util::ThinVec; use std::collections::HashSet; @@ -1537,13 +1538,13 @@ impl<'a> Parser<'a> { token::Str_(s) => { (true, - LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())), + LitKind::Str(symbol::intern_and_get_ident(&parse::str_lit(&s.as_str())), ast::StrStyle::Cooked)) } token::StrRaw(s, n) => { (true, LitKind::Str( - token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())), + symbol::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())), ast::StrStyle::Raw(n))) } token::ByteStr(i) => @@ -2627,7 +2628,7 @@ impl<'a> Parser<'a> { }))); } else if self.token.is_keyword(keywords::Crate) { let ident = match self.token { - token::Ident(id) => ast::Ident { name: token::intern("$crate"), ..id }, + token::Ident(id) => ast::Ident { name: Symbol::intern("$crate"), ..id }, _ => unreachable!(), }; self.bump(); @@ -4835,7 +4836,7 @@ impl<'a> Parser<'a> { Visibility::Inherited => (), _ => { let is_macro_rules: bool = match self.token { - token::Ident(sid) => sid.name == intern("macro_rules"), + token::Ident(sid) => sid.name == Symbol::intern("macro_rules"), _ => false, }; if is_macro_rules { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 4aaa028ef75..8ac39dd462e 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -16,13 +16,10 @@ pub use self::Token::*; use ast::{self}; use ptr::P; -use util::interner::Interner; +use symbol::keywords; use tokenstream; -use serialize::{Decodable, Decoder, Encodable, Encoder}; -use std::cell::RefCell; use std::fmt; -use std::ops::Deref; use std::rc::Rc; #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] @@ -335,266 +332,3 @@ impl fmt::Debug for Nonterminal { } } } - -// In this macro, there is the requirement that the name (the number) must be monotonically -// increasing by one in the special identifiers, starting at 0; the same holds for the keywords, -// except starting from the next number instead of zero. -macro_rules! declare_keywords {( - $( ($index: expr, $konst: ident, $string: expr) )* -) => { - pub mod keywords { - use ast; - #[derive(Clone, Copy, PartialEq, Eq)] - pub struct Keyword { - ident: ast::Ident, - } - impl Keyword { - #[inline] pub fn ident(self) -> ast::Ident { self.ident } - #[inline] pub fn name(self) -> ast::Name { self.ident.name } - } - $( - #[allow(non_upper_case_globals)] - pub const $konst: Keyword = Keyword { - ident: ast::Ident::with_empty_ctxt(ast::Name($index)) - }; - )* - } - - fn mk_fresh_ident_interner() -> IdentInterner { - Interner::prefill(&[$($string,)*]) - } -}} - -// NB: leaving holes in the ident table is bad! a different ident will get -// interned with the id from the hole, but it will be between the min and max -// of the reserved words, and thus tagged as "reserved". -// After modifying this list adjust `is_strict_keyword`/`is_reserved_keyword`, -// this should be rarely necessary though if the keywords are kept in alphabetic order. -declare_keywords! { - // Invalid identifier - (0, Invalid, "") - - // Strict keywords used in the language. - (1, As, "as") - (2, Box, "box") - (3, Break, "break") - (4, Const, "const") - (5, Continue, "continue") - (6, Crate, "crate") - (7, Else, "else") - (8, Enum, "enum") - (9, Extern, "extern") - (10, False, "false") - (11, Fn, "fn") - (12, For, "for") - (13, If, "if") - (14, Impl, "impl") - (15, In, "in") - (16, Let, "let") - (17, Loop, "loop") - (18, Match, "match") - (19, Mod, "mod") - (20, Move, "move") - (21, Mut, "mut") - (22, Pub, "pub") - (23, Ref, "ref") - (24, Return, "return") - (25, SelfValue, "self") - (26, SelfType, "Self") - (27, Static, "static") - (28, Struct, "struct") - (29, Super, "super") - (30, Trait, "trait") - (31, True, "true") - (32, Type, "type") - (33, Unsafe, "unsafe") - (34, Use, "use") - (35, Where, "where") - (36, While, "while") - - // Keywords reserved for future use. - (37, Abstract, "abstract") - (38, Alignof, "alignof") - (39, Become, "become") - (40, Do, "do") - (41, Final, "final") - (42, Macro, "macro") - (43, Offsetof, "offsetof") - (44, Override, "override") - (45, Priv, "priv") - (46, Proc, "proc") - (47, Pure, "pure") - (48, Sizeof, "sizeof") - (49, Typeof, "typeof") - (50, Unsized, "unsized") - (51, Virtual, "virtual") - (52, Yield, "yield") - - // Weak keywords, have special meaning only in specific contexts. - (53, Default, "default") - (54, StaticLifetime, "'static") - (55, Union, "union") -} - -// looks like we can get rid of this completely... -pub type IdentInterner = Interner; - -// if an interner exists in TLS, return it. Otherwise, prepare a -// fresh one. -// FIXME(eddyb) #8726 This should probably use a thread-local reference. -pub fn with_ident_interner T>(f: F) -> T { - thread_local!(static KEY: RefCell = { - RefCell::new(mk_fresh_ident_interner()) - }); - KEY.with(|interner| f(&mut *interner.borrow_mut())) -} - -/// Reset the ident interner to its initial state. -pub fn reset_ident_interner() { - with_ident_interner(|interner| *interner = mk_fresh_ident_interner()); -} - -/// Represents a string stored in the thread-local interner. Because the -/// interner lives for the life of the thread, this can be safely treated as an -/// immortal string, as long as it never crosses between threads. -/// -/// FIXME(pcwalton): You must be careful about what you do in the destructors -/// of objects stored in TLS, because they may run after the interner is -/// destroyed. In particular, they must not access string contents. This can -/// be fixed in the future by just leaking all strings until thread death -/// somehow. -#[derive(Clone, PartialEq, Hash, PartialOrd, Eq, Ord)] -pub struct InternedString { - string: Rc, -} - -impl InternedString { - #[inline] - pub fn new(string: &'static str) -> InternedString { - InternedString { - string: Rc::__from_str(string), - } - } - - #[inline] - pub fn new_from_name(name: ast::Name) -> InternedString { - with_ident_interner(|interner| InternedString { string: interner.get(name) }) - } -} - -impl Deref for InternedString { - type Target = str; - - fn deref(&self) -> &str { &self.string } -} - -impl fmt::Debug for InternedString { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Debug::fmt(&self.string, f) - } -} - -impl fmt::Display for InternedString { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.string, f) - } -} - -impl<'a> PartialEq<&'a str> for InternedString { - #[inline(always)] - fn eq(&self, other: & &'a str) -> bool { - PartialEq::eq(&self.string[..], *other) - } - #[inline(always)] - fn ne(&self, other: & &'a str) -> bool { - PartialEq::ne(&self.string[..], *other) - } -} - -impl<'a> PartialEq for &'a str { - #[inline(always)] - fn eq(&self, other: &InternedString) -> bool { - PartialEq::eq(*self, &other.string[..]) - } - #[inline(always)] - fn ne(&self, other: &InternedString) -> bool { - PartialEq::ne(*self, &other.string[..]) - } -} - -impl PartialEq for InternedString { - #[inline(always)] - fn eq(&self, other: &str) -> bool { - PartialEq::eq(&self.string[..], other) - } - #[inline(always)] - fn ne(&self, other: &str) -> bool { - PartialEq::ne(&self.string[..], other) - } -} - -impl PartialEq for str { - #[inline(always)] - fn eq(&self, other: &InternedString) -> bool { - PartialEq::eq(self, &other.string[..]) - } - #[inline(always)] - fn ne(&self, other: &InternedString) -> bool { - PartialEq::ne(self, &other.string[..]) - } -} - -impl Decodable for InternedString { - fn decode(d: &mut D) -> Result { - Ok(intern(&d.read_str()?).as_str()) - } -} - -impl Encodable for InternedString { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(&self.string) - } -} - -/// Interns and returns the string contents of an identifier, using the -/// thread-local interner. -#[inline] -pub fn intern_and_get_ident(s: &str) -> InternedString { - intern(s).as_str() -} - -/// Maps a string to its interned representation. -#[inline] -pub fn intern(s: &str) -> ast::Name { - with_ident_interner(|interner| interner.intern(s)) -} - -/// gensym's a new usize, using the current interner. -#[inline] -pub fn gensym(s: &str) -> ast::Name { - with_ident_interner(|interner| interner.gensym(s)) -} - -/// Maps a string to an identifier with an empty syntax context. -#[inline] -pub fn str_to_ident(s: &str) -> ast::Ident { - ast::Ident::with_empty_ctxt(intern(s)) -} - -/// Maps a string to a gensym'ed identifier. -#[inline] -pub fn gensym_ident(s: &str) -> ast::Ident { - ast::Ident::with_empty_ctxt(gensym(s)) -} - -// create a fresh name that maps to the same string as the old one. -// note that this guarantees that str_ptr_eq(ident_to_string(src),interner_get(fresh_name(src))); -// that is, that the new name and the old one are connected to ptr_eq strings. -pub fn fresh_name(src: ast::Ident) -> ast::Name { - with_ident_interner(|interner| interner.gensym_copy(src.name)) - // following: debug version. Could work in final except that it's incompatible with - // good error messages and uses of struct names in ambiguous could-be-binding - // locations. Also definitely destroys the guarantee given above about ptr_eq. - /*let num = rand::thread_rng().gen_uint_range(0,0xffff); - gensym(format!("{}_{}",ident_to_string(src),num))*/ -} diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index da766d17672..52ff17d1c12 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -19,7 +19,7 @@ use attr; use codemap::{self, CodeMap}; use syntax_pos::{self, BytePos}; use errors; -use parse::token::{self, keywords, BinOpToken, Token}; +use parse::token::{self, BinOpToken, Token}; use parse::lexer::comments; use parse; use print::pp::{self, break_offset, word, space, zerobreak, hardbreak}; @@ -27,6 +27,7 @@ use print::pp::{Breaks, eof}; use print::pp::Breaks::{Consistent, Inconsistent}; use ptr::P; use std_inject; +use symbol::{Symbol, keywords}; use tokenstream::{self, TokenTree}; use std::ascii; @@ -119,13 +120,13 @@ pub fn print_crate<'a>(cm: &'a CodeMap, // of the feature gate, so we fake them up here. // #![feature(prelude_import)] - let prelude_import_meta = attr::mk_list_word_item(token::intern("prelude_import")); - let list = attr::mk_list_item(token::intern("feature"), vec![prelude_import_meta]); + let prelude_import_meta = attr::mk_list_word_item(Symbol::intern("prelude_import")); + let list = attr::mk_list_item(Symbol::intern("feature"), vec![prelude_import_meta]); let fake_attr = attr::mk_attr_inner(attr::mk_attr_id(), list); try!(s.print_attribute(&fake_attr)); // #![no_std] - let no_std_meta = attr::mk_word_item(token::intern("no_std")); + let no_std_meta = attr::mk_word_item(Symbol::intern("no_std")); let fake_attr = attr::mk_attr_inner(attr::mk_attr_id(), no_std_meta); try!(s.print_attribute(&fake_attr)); } diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index 166d95911b9..6a291ad9c40 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -10,10 +10,10 @@ use ast; use attr; +use symbol::{Symbol, keywords}; use syntax_pos::{DUMMY_SP, Span}; use codemap::{self, ExpnInfo, NameAndSpan, MacroAttribute}; -use parse::token::{intern, keywords}; -use parse::{token, ParseSess}; +use parse::ParseSess; use ptr::P; /// Craft a span that will be ignored by the stability lint's @@ -23,7 +23,7 @@ fn ignored_span(sess: &ParseSess, sp: Span) -> Span { let info = ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { - format: MacroAttribute(intern("std_inject")), + format: MacroAttribute(Symbol::intern("std_inject")), span: None, allow_internal_unstable: true, } @@ -53,14 +53,14 @@ pub fn maybe_inject_crates_ref(sess: &ParseSess, None => return krate, }; - let crate_name = token::intern(&alt_std_name.unwrap_or(name.to_string())); + let crate_name = Symbol::intern(&alt_std_name.unwrap_or(name.to_string())); krate.module.items.insert(0, P(ast::Item { attrs: vec![attr::mk_attr_outer(attr::mk_attr_id(), - attr::mk_word_item(token::intern("macro_use")))], + attr::mk_word_item(Symbol::intern("macro_use")))], vis: ast::Visibility::Inherited, node: ast::ItemKind::ExternCrate(Some(crate_name)), - ident: token::str_to_ident(name), + ident: ast::Ident::from_str(name), id: ast::DUMMY_NODE_ID, span: DUMMY_SP, })); @@ -70,7 +70,7 @@ pub fn maybe_inject_crates_ref(sess: &ParseSess, attrs: vec![ast::Attribute { style: ast::AttrStyle::Outer, value: ast::MetaItem { - name: token::intern("prelude_import"), + name: Symbol::intern("prelude_import"), node: ast::MetaItemKind::Word, span: span, }, @@ -82,7 +82,7 @@ pub fn maybe_inject_crates_ref(sess: &ParseSess, node: ast::ItemKind::Use(P(codemap::dummy_spanned(ast::ViewPathGlob(ast::Path { global: false, segments: vec![name, "prelude", "v1"].into_iter().map(|name| ast::PathSegment { - identifier: token::str_to_ident(name), + identifier: ast::Ident::from_str(name), parameters: ast::PathParameters::none(), }).collect(), span: span, diff --git a/src/libsyntax/symbol.rs b/src/libsyntax/symbol.rs new file mode 100644 index 00000000000..9620b8412b4 --- /dev/null +++ b/src/libsyntax/symbol.rs @@ -0,0 +1,339 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! An "interner" is a data structure that associates values with usize tags and +//! allows bidirectional lookup; i.e. given a value, one can easily find the +//! type, and vice versa. + +use serialize::{Decodable, Decoder, Encodable, Encoder}; +use std::cell::RefCell; +use std::collections::HashMap; +use std::fmt; +use std::rc::Rc; + +/// A symbol is an interned or gensymed string. +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Symbol(u32); + +impl Symbol { + /// Maps a string to its interned representation. + pub fn intern(string: &str) -> Self { + with_interner(|interner| interner.intern(string)) + } + + /// gensym's a new usize, using the current interner. + pub fn gensym(string: &str) -> Self { + with_interner(|interner| interner.gensym(string)) + } + + pub fn as_str(self) -> InternedString { + with_interner(|interner| InternedString { string: interner.get(self) }) + } + + pub fn as_u32(self) -> u32 { + self.0 + } +} + +impl fmt::Debug for Symbol { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}({})", self, self.0) + } +} + +impl fmt::Display for Symbol { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(&self.as_str(), f) + } +} + +impl Encodable for Symbol { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_str(&self.as_str()) + } +} + +impl Decodable for Symbol { + fn decode(d: &mut D) -> Result { + Ok(Symbol::intern(&d.read_str()?)) + } +} + +impl<'a> PartialEq<&'a str> for Symbol { + fn eq(&self, other: &&str) -> bool { + *self.as_str() == **other + } +} + +#[derive(Default)] +pub struct Interner { + names: HashMap, Symbol>, + strings: Vec>, +} + +impl Interner { + pub fn new() -> Self { + Interner::default() + } + + fn prefill(init: &[&str]) -> Self { + let mut this = Interner::new(); + for &string in init { + this.intern(string); + } + this + } + + pub fn intern(&mut self, string: &str) -> Symbol { + if let Some(&name) = self.names.get(string) { + return name; + } + + let name = Symbol(self.strings.len() as u32); + let string = Rc::__from_str(string); + self.strings.push(string.clone()); + self.names.insert(string, name); + name + } + + fn gensym(&mut self, string: &str) -> Symbol { + let gensym = Symbol(self.strings.len() as u32); + // leave out of `names` to avoid colliding + self.strings.push(Rc::__from_str(string)); + gensym + } + + pub fn get(&self, name: Symbol) -> Rc { + self.strings[name.0 as usize].clone() + } +} + +// In this macro, there is the requirement that the name (the number) must be monotonically +// increasing by one in the special identifiers, starting at 0; the same holds for the keywords, +// except starting from the next number instead of zero. +macro_rules! declare_keywords {( + $( ($index: expr, $konst: ident, $string: expr) )* +) => { + pub mod keywords { + use ast; + #[derive(Clone, Copy, PartialEq, Eq)] + pub struct Keyword { + ident: ast::Ident, + } + impl Keyword { + #[inline] pub fn ident(self) -> ast::Ident { self.ident } + #[inline] pub fn name(self) -> ast::Name { self.ident.name } + } + $( + #[allow(non_upper_case_globals)] + pub const $konst: Keyword = Keyword { + ident: ast::Ident::with_empty_ctxt(ast::Name($index)) + }; + )* + } + + impl Interner { + fn fresh() -> Self { + Interner::prefill(&[$($string,)*]) + } + } +}} + +// NB: leaving holes in the ident table is bad! a different ident will get +// interned with the id from the hole, but it will be between the min and max +// of the reserved words, and thus tagged as "reserved". +// After modifying this list adjust `is_strict_keyword`/`is_reserved_keyword`, +// this should be rarely necessary though if the keywords are kept in alphabetic order. +declare_keywords! { + // Invalid identifier + (0, Invalid, "") + + // Strict keywords used in the language. + (1, As, "as") + (2, Box, "box") + (3, Break, "break") + (4, Const, "const") + (5, Continue, "continue") + (6, Crate, "crate") + (7, Else, "else") + (8, Enum, "enum") + (9, Extern, "extern") + (10, False, "false") + (11, Fn, "fn") + (12, For, "for") + (13, If, "if") + (14, Impl, "impl") + (15, In, "in") + (16, Let, "let") + (17, Loop, "loop") + (18, Match, "match") + (19, Mod, "mod") + (20, Move, "move") + (21, Mut, "mut") + (22, Pub, "pub") + (23, Ref, "ref") + (24, Return, "return") + (25, SelfValue, "self") + (26, SelfType, "Self") + (27, Static, "static") + (28, Struct, "struct") + (29, Super, "super") + (30, Trait, "trait") + (31, True, "true") + (32, Type, "type") + (33, Unsafe, "unsafe") + (34, Use, "use") + (35, Where, "where") + (36, While, "while") + + // Keywords reserved for future use. + (37, Abstract, "abstract") + (38, Alignof, "alignof") + (39, Become, "become") + (40, Do, "do") + (41, Final, "final") + (42, Macro, "macro") + (43, Offsetof, "offsetof") + (44, Override, "override") + (45, Priv, "priv") + (46, Proc, "proc") + (47, Pure, "pure") + (48, Sizeof, "sizeof") + (49, Typeof, "typeof") + (50, Unsized, "unsized") + (51, Virtual, "virtual") + (52, Yield, "yield") + + // Weak keywords, have special meaning only in specific contexts. + (53, Default, "default") + (54, StaticLifetime, "'static") + (55, Union, "union") +} + +// If an interner exists in TLS, return it. Otherwise, prepare a fresh one. +fn with_interner T>(f: F) -> T { + thread_local!(static INTERNER: RefCell = { + RefCell::new(Interner::fresh()) + }); + INTERNER.with(|interner| f(&mut *interner.borrow_mut())) +} + +/// Reset the ident interner to its initial state. +pub fn reset_interner() { + with_interner(|interner| *interner = Interner::fresh()); +} + +/// Represents a string stored in the thread-local interner. Because the +/// interner lives for the life of the thread, this can be safely treated as an +/// immortal string, as long as it never crosses between threads. +/// +/// FIXME(pcwalton): You must be careful about what you do in the destructors +/// of objects stored in TLS, because they may run after the interner is +/// destroyed. In particular, they must not access string contents. This can +/// be fixed in the future by just leaking all strings until thread death +/// somehow. +#[derive(Clone, PartialEq, Hash, PartialOrd, Eq, Ord)] +pub struct InternedString { + string: Rc, +} + +impl InternedString { + pub fn new(string: &'static str) -> InternedString { + InternedString { + string: Rc::__from_str(string), + } + } +} + +impl ::std::ops::Deref for InternedString { + type Target = str; + fn deref(&self) -> &str { &self.string } +} + +impl fmt::Debug for InternedString { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(&self.string, f) + } +} + +impl fmt::Display for InternedString { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(&self.string, f) + } +} + +impl<'a> PartialEq<&'a str> for InternedString { + fn eq(&self, other: & &'a str) -> bool { + PartialEq::eq(&self.string[..], *other) + } +} + +impl<'a> PartialEq for &'a str { + fn eq(&self, other: &InternedString) -> bool { + PartialEq::eq(*self, &other.string[..]) + } +} + +impl PartialEq for InternedString { + fn eq(&self, other: &str) -> bool { + PartialEq::eq(&self.string[..], other) + } +} + +impl PartialEq for str { + fn eq(&self, other: &InternedString) -> bool { + PartialEq::eq(self, &other.string[..]) + } +} + +impl Decodable for InternedString { + fn decode(d: &mut D) -> Result { + Ok(Symbol::intern(&d.read_str()?).as_str()) + } +} + +impl Encodable for InternedString { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_str(&self.string) + } +} + +/// Interns and returns the string contents of an identifier, using the +/// thread-local interner. +#[inline] +pub fn intern_and_get_ident(s: &str) -> InternedString { + Symbol::intern(s).as_str() +} + +#[cfg(test)] +mod tests { + use super::*; + use ast::Name; + + #[test] + fn interner_tests() { + let mut i: Interner = Interner::new(); + // first one is zero: + assert_eq!(i.intern("dog"), Name(0)); + // re-use gets the same entry: + assert_eq!(i.intern ("dog"), Name(0)); + // different string gets a different #: + assert_eq!(i.intern("cat"), Name(1)); + assert_eq!(i.intern("cat"), Name(1)); + // dog is still at zero + assert_eq!(i.intern("dog"), Name(0)); + // gensym gets 3 + assert_eq!(i.gensym("zebra"), Name(2)); + // gensym of same string gets new number : + assert_eq!(i.gensym("zebra"), Name(3)); + // gensym of *existing* string gets new number: + assert_eq!(i.gensym("dog"), Name(4)); + } +} diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index 194bd4a9085..271de16e7ac 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -34,11 +34,11 @@ use ext::expand::ExpansionConfig; use fold::Folder; use util::move_map::MoveMap; use fold; -use parse::token::{intern, keywords, InternedString}; use parse::{token, ParseSess}; use print::pprust; -use ast; +use ast::{self, Ident}; use ptr::P; +use symbol::{self, Symbol, keywords, InternedString}; use util::small_vector::SmallVector; enum ShouldPanic { @@ -48,7 +48,7 @@ enum ShouldPanic { struct Test { span: Span, - path: Vec , + path: Vec , bench: bool, ignore: bool, should_panic: ShouldPanic @@ -57,14 +57,14 @@ struct Test { struct TestCtxt<'a> { sess: &'a ParseSess, span_diagnostic: &'a errors::Handler, - path: Vec, + path: Vec, ext_cx: ExtCtxt<'a>, testfns: Vec, reexport_test_harness_main: Option, is_test_crate: bool, // top-level re-export submodule, filled out after folding is finished - toplevel_reexport: Option, + toplevel_reexport: Option, } // Traverse the crate, collecting all the test functions, eliding any @@ -91,10 +91,10 @@ pub fn modify_for_testing(sess: &ParseSess, struct TestHarnessGenerator<'a> { cx: TestCtxt<'a>, - tests: Vec, + tests: Vec, // submodule name, gensym'd identifier for re-exports - tested_submods: Vec<(ast::Ident, ast::Ident)>, + tested_submods: Vec<(Ident, Ident)>, } impl<'a> fold::Folder for TestHarnessGenerator<'a> { @@ -191,8 +191,8 @@ impl fold::Folder for EntryPointCleaner { EntryPointType::MainAttr | EntryPointType::Start => folded.map(|ast::Item {id, ident, attrs, node, vis, span}| { - let allow_str = token::intern("allow"); - let dead_code_str = token::intern("dead_code"); + let allow_str = Symbol::intern("allow"); + let dead_code_str = Symbol::intern("dead_code"); let word_vec = vec![attr::mk_list_word_item(dead_code_str)]; let allow_dead_code_item = attr::mk_list_item(allow_str, word_vec); let allow_dead_code = attr::mk_attr_outer(attr::mk_attr_id(), @@ -222,15 +222,18 @@ impl fold::Folder for EntryPointCleaner { fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { mac } } -fn mk_reexport_mod(cx: &mut TestCtxt, parent: ast::NodeId, tests: Vec, - tested_submods: Vec<(ast::Ident, ast::Ident)>) -> (P, ast::Ident) { - let super_ = token::str_to_ident("super"); +fn mk_reexport_mod(cx: &mut TestCtxt, + parent: ast::NodeId, + tests: Vec, + tested_submods: Vec<(Ident, Ident)>) + -> (P, Ident) { + let super_ = Ident::from_str("super"); // Generate imports with `#[allow(private_in_public)]` to work around issue #36768. let allow_private_in_public = cx.ext_cx.attribute(DUMMY_SP, cx.ext_cx.meta_list( DUMMY_SP, - token::intern("allow"), - vec![cx.ext_cx.meta_list_item_word(DUMMY_SP, token::intern("private_in_public"))], + Symbol::intern("allow"), + vec![cx.ext_cx.meta_list_item_word(DUMMY_SP, Symbol::intern("private_in_public"))], )); let items = tests.into_iter().map(|r| { cx.ext_cx.item_use_simple(DUMMY_SP, ast::Visibility::Public, @@ -247,7 +250,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt, parent: ast::NodeId, tests: Vec Span { let info = ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { - format: MacroAttribute(intern("test")), + format: MacroAttribute(Symbol::intern("test")), span: None, allow_internal_unstable: true, } @@ -456,7 +459,7 @@ mod __test { */ fn mk_std(cx: &TestCtxt) -> P { - let id_test = token::str_to_ident("test"); + let id_test = Ident::from_str("test"); let (vi, vis, ident) = if cx.is_test_crate { (ast::ItemKind::Use( P(nospan(ast::ViewPathSimple(id_test, @@ -487,16 +490,17 @@ fn mk_main(cx: &mut TestCtxt) -> P { let ecx = &cx.ext_cx; // test::test_main_static - let test_main_path = ecx.path(sp, vec![token::str_to_ident("test"), - token::str_to_ident("test_main_static")]); + let test_main_path = + ecx.path(sp, vec![Ident::from_str("test"), Ident::from_str("test_main_static")]); + // test::test_main_static(...) let test_main_path_expr = ecx.expr_path(test_main_path); - let tests_ident_expr = ecx.expr_ident(sp, token::str_to_ident("TESTS")); + let tests_ident_expr = ecx.expr_ident(sp, Ident::from_str("TESTS")); let call_test_main = ecx.expr_call(sp, test_main_path_expr, vec![tests_ident_expr]); let call_test_main = ecx.stmt_expr(call_test_main); // #![main] - let main_meta = ecx.meta_word(sp, token::intern("main")); + let main_meta = ecx.meta_word(sp, Symbol::intern("main")); let main_attr = ecx.attribute(sp, main_meta); // pub fn main() { ... } let main_ret_ty = ecx.ty(sp, ast::TyKind::Tup(vec![])); @@ -506,7 +510,7 @@ fn mk_main(cx: &mut TestCtxt) -> P { dummy_spanned(ast::Constness::NotConst), ::abi::Abi::Rust, ast::Generics::default(), main_body); let main = P(ast::Item { - ident: token::str_to_ident("main"), + ident: Ident::from_str("main"), attrs: vec![main_attr], id: ast::DUMMY_NODE_ID, node: main, @@ -533,7 +537,7 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P, Option>) { items: vec![import, mainfn, tests], }; let item_ = ast::ItemKind::Mod(testmod); - let mod_ident = token::gensym_ident("__test"); + let mod_ident = Ident::with_empty_ctxt(Symbol::gensym("__test")); let mut expander = cx.ext_cx.monotonic_expander(); let item = expander.fold_item(P(ast::Item { @@ -546,11 +550,11 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P, Option>) { })).pop().unwrap(); let reexport = cx.reexport_test_harness_main.as_ref().map(|s| { // building `use = __test::main` - let reexport_ident = token::str_to_ident(&s); + let reexport_ident = Ident::from_str(&s); let use_path = nospan(ast::ViewPathSimple(reexport_ident, - path_node(vec![mod_ident, token::str_to_ident("main")]))); + path_node(vec![mod_ident, Ident::from_str("main")]))); expander.fold_item(P(ast::Item { id: ast::DUMMY_NODE_ID, @@ -571,7 +575,7 @@ fn nospan(t: T) -> codemap::Spanned { codemap::Spanned { node: t, span: DUMMY_SP } } -fn path_node(ids: Vec ) -> ast::Path { +fn path_node(ids: Vec) -> ast::Path { ast::Path { span: DUMMY_SP, global: false, @@ -582,7 +586,7 @@ fn path_node(ids: Vec ) -> ast::Path { } } -fn path_name_i(idents: &[ast::Ident]) -> String { +fn path_name_i(idents: &[Ident]) -> String { // FIXME: Bad copies (#2543 -- same for everything else that says "bad") idents.iter().map(|i| i.to_string()).collect::>().join("::") } @@ -660,7 +664,7 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> P { // path to the #[test] function: "foo::bar::baz" let path_string = path_name_i(&path[..]); - let name_expr = ecx.expr_str(span, token::intern_and_get_ident(&path_string[..])); + let name_expr = ecx.expr_str(span, symbol::intern_and_get_ident(&path_string[..])); // self::test::StaticTestName($name_expr) let name_expr = ecx.expr_call(span, diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 9ef6c07e489..7ca4650a3ae 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -34,6 +34,7 @@ use parse::lexer; use parse; use parse::token::{self, Token, Lit, Nonterminal}; use print::pprust; +use symbol::{self, Symbol}; use std::fmt; use std::iter::*; @@ -173,10 +174,10 @@ impl TokenTree { TokenTree::Delimited(sp, Rc::new(Delimited { delim: token::Bracket, open_span: sp, - tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))), + tts: vec![TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))), TokenTree::Token(sp, token::Eq), TokenTree::Token(sp, token::Literal( - token::StrRaw(token::intern(&stripped), num_of_hashes), None))], + token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))], close_span: sp, })) } @@ -295,7 +296,7 @@ impl TokenTree { pub fn maybe_str(&self) -> Option { match *self { TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => { - let l = LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())), + let l = LitKind::Str(symbol::intern_and_get_ident(&parse::str_lit(&s.as_str())), ast::StrStyle::Cooked); Some(Spanned { node: l, @@ -303,7 +304,7 @@ impl TokenTree { }) } TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => { - let l = LitKind::Str(token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())), + let l = LitKind::Str(symbol::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())), ast::StrStyle::Raw(n)); Some(Spanned { node: l, diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs deleted file mode 100644 index f56c6cedcd1..00000000000 --- a/src/libsyntax/util/interner.rs +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! An "interner" is a data structure that associates values with usize tags and -//! allows bidirectional lookup; i.e. given a value, one can easily find the -//! type, and vice versa. - -use ast::Name; - -use std::collections::HashMap; -use std::rc::Rc; - -#[derive(Default)] -pub struct Interner { - names: HashMap, Name>, - strings: Vec>, -} - -/// When traits can extend traits, we should extend index to get [] -impl Interner { - pub fn new() -> Self { - Interner::default() - } - - pub fn prefill(init: &[&str]) -> Self { - let mut this = Interner::new(); - for &string in init { - this.intern(string); - } - this - } - - pub fn intern(&mut self, string: &str) -> Name { - if let Some(&name) = self.names.get(string) { - return name; - } - - let name = Name(self.strings.len() as u32); - let string = Rc::__from_str(string); - self.strings.push(string.clone()); - self.names.insert(string, name); - name - } - - pub fn gensym(&mut self, string: &str) -> Name { - let gensym = Name(self.strings.len() as u32); - // leave out of `names` to avoid colliding - self.strings.push(Rc::__from_str(string)); - gensym - } - - /// Create a gensym with the same name as an existing entry. - pub fn gensym_copy(&mut self, name: Name) -> Name { - let gensym = Name(self.strings.len() as u32); - // leave out of `names` to avoid colliding - let string = self.strings[name.0 as usize].clone(); - self.strings.push(string); - gensym - } - - pub fn get(&self, name: Name) -> Rc { - self.strings[name.0 as usize].clone() - } - - pub fn find(&self, string: &str) -> Option { - self.names.get(string).cloned() - } -} - -#[cfg(test)] -mod tests { - use super::*; - use ast::Name; - - #[test] - fn interner_tests() { - let mut i: Interner = Interner::new(); - // first one is zero: - assert_eq!(i.intern("dog"), Name(0)); - // re-use gets the same entry: - assert_eq!(i.intern ("dog"), Name(0)); - // different string gets a different #: - assert_eq!(i.intern("cat"), Name(1)); - assert_eq!(i.intern("cat"), Name(1)); - // dog is still at zero - assert_eq!(i.intern("dog"), Name(0)); - // gensym gets 3 - assert_eq!(i.gensym("zebra"), Name(2)); - // gensym of same string gets new number : - assert_eq!(i.gensym("zebra"), Name(3)); - // gensym of *existing* string gets new number: - assert_eq!(i.gensym("dog"), Name(4)); - // gensym tests again with gensym_copy: - assert_eq!(i.gensym_copy(Name(2)), Name(5)); - assert_eq!(&*i.get(Name(5)), "zebra"); - assert_eq!(i.gensym_copy(Name(2)), Name(6)); - assert_eq!(&*i.get(Name(6)), "zebra"); - assert_eq!(&*i.get(Name(0)), "dog"); - assert_eq!(&*i.get(Name(1)), "cat"); - assert_eq!(&*i.get(Name(2)), "zebra"); - assert_eq!(&*i.get(Name(3)), "zebra"); - assert_eq!(&*i.get(Name(4)), "dog"); - } -} diff --git a/src/libsyntax/util/lev_distance.rs b/src/libsyntax/util/lev_distance.rs index e0796c34e57..0d6df2cfcb6 100644 --- a/src/libsyntax/util/lev_distance.rs +++ b/src/libsyntax/util/lev_distance.rs @@ -10,7 +10,7 @@ use ast::Name; use std::cmp; -use parse::token::InternedString; +use symbol::InternedString; /// To find the Levenshtein distance between two strings pub fn lev_distance(a: &str, b: &str) -> usize { diff --git a/src/libsyntax/util/parser.rs b/src/libsyntax/util/parser.rs index df4eb1c9ed7..ce24fe1eb61 100644 --- a/src/libsyntax/util/parser.rs +++ b/src/libsyntax/util/parser.rs @@ -7,7 +7,8 @@ // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. -use parse::token::{Token, BinOpToken, keywords}; +use parse::token::{Token, BinOpToken}; +use symbol::keywords; use ast::BinOpKind; /// Associative operator with precedence. diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs index e4d0cb74046..58fd83e17e1 100644 --- a/src/libsyntax_ext/asm.rs +++ b/src/libsyntax_ext/asm.rs @@ -17,9 +17,9 @@ use syntax::codemap; use syntax::ext::base; use syntax::ext::base::*; use syntax::feature_gate; -use syntax::parse::token::intern; use syntax::parse::{self, token}; use syntax::ptr::P; +use syntax::symbol::{self, Symbol, InternedString}; use syntax::ast::AsmDialect; use syntax_pos::Span; use syntax::tokenstream; @@ -73,7 +73,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, }) .unwrap_or(tts.len()); let mut p = cx.new_parser_from_tts(&tts[first_colon..]); - let mut asm = token::InternedString::new(""); + let mut asm = InternedString::new(""); let mut asm_str_style = None; let mut outputs = Vec::new(); let mut inputs = Vec::new(); @@ -139,7 +139,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, let output = match ch.next() { Some('=') => None, Some('+') => { - Some(token::intern_and_get_ident(&format!("={}", ch.as_str()))) + Some(symbol::intern_and_get_ident(&format!("={}", ch.as_str()))) } _ => { cx.span_err(span, "output operand constraint lacks '=' or '+'"); @@ -242,7 +242,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, let expn_id = cx.codemap().record_expansion(codemap::ExpnInfo { call_site: sp, callee: codemap::NameAndSpan { - format: codemap::MacroBang(intern("asm")), + format: codemap::MacroBang(Symbol::intern("asm")), span: None, allow_internal_unstable: false, }, @@ -251,7 +251,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, MacEager::expr(P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::InlineAsm(P(ast::InlineAsm { - asm: token::intern_and_get_ident(&asm), + asm: symbol::intern_and_get_ident(&asm), asm_str_style: asm_str_style.unwrap(), outputs: outputs, inputs: inputs, diff --git a/src/libsyntax_ext/concat.rs b/src/libsyntax_ext/concat.rs index 02b44f2d012..45df1ace985 100644 --- a/src/libsyntax_ext/concat.rs +++ b/src/libsyntax_ext/concat.rs @@ -11,7 +11,7 @@ use syntax::ast; use syntax::ext::base; use syntax::ext::build::AstBuilder; -use syntax::parse::token; +use syntax::symbol::intern_and_get_ident; use syntax_pos; use syntax::tokenstream; @@ -57,5 +57,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, } } } - base::MacEager::expr(cx.expr_str(sp, token::intern_and_get_ident(&accumulator[..]))) + base::MacEager::expr(cx.expr_str(sp, intern_and_get_ident(&accumulator[..]))) } diff --git a/src/libsyntax_ext/concat_idents.rs b/src/libsyntax_ext/concat_idents.rs index e56c6e2229a..b26e33eb384 100644 --- a/src/libsyntax_ext/concat_idents.rs +++ b/src/libsyntax_ext/concat_idents.rs @@ -13,7 +13,6 @@ use syntax::ext::base::*; use syntax::ext::base; use syntax::feature_gate; use syntax::parse::token; -use syntax::parse::token::str_to_ident; use syntax::ptr::P; use syntax_pos::Span; use syntax::tokenstream::TokenTree; @@ -51,7 +50,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt, } } } - let res = str_to_ident(&res_str); + let res = ast::Ident::from_str(&res_str); struct Result { ident: ast::Ident, diff --git a/src/libsyntax_ext/deriving/clone.rs b/src/libsyntax_ext/deriving/clone.rs index c2a166e0819..d14b59d6c70 100644 --- a/src/libsyntax_ext/deriving/clone.rs +++ b/src/libsyntax_ext/deriving/clone.rs @@ -15,8 +15,8 @@ use syntax::ast::{self, Expr, Generics, ItemKind, MetaItem, VariantData}; use syntax::attr; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; -use syntax::parse::token::{self, keywords}; use syntax::ptr::P; +use syntax::symbol::{Symbol, keywords}; use syntax_pos::Span; pub fn expand_deriving_clone(cx: &mut ExtCtxt, @@ -74,7 +74,7 @@ pub fn expand_deriving_clone(cx: &mut ExtCtxt, _ => cx.span_bug(span, "#[derive(Clone)] on trait item or impl item"), } - let inline = cx.meta_word(span, token::intern("inline")); + let inline = cx.meta_word(span, Symbol::intern("inline")); let attrs = vec![cx.attribute(span, inline)]; let trait_def = TraitDef { span: span, diff --git a/src/libsyntax_ext/deriving/cmp/eq.rs b/src/libsyntax_ext/deriving/cmp/eq.rs index a1be573551e..6ab5987a159 100644 --- a/src/libsyntax_ext/deriving/cmp/eq.rs +++ b/src/libsyntax_ext/deriving/cmp/eq.rs @@ -14,8 +14,8 @@ use deriving::generic::ty::*; use syntax::ast::{self, Expr, MetaItem}; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; -use syntax::parse::token; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax_pos::Span; pub fn expand_deriving_eq(cx: &mut ExtCtxt, @@ -23,9 +23,9 @@ pub fn expand_deriving_eq(cx: &mut ExtCtxt, mitem: &MetaItem, item: &Annotatable, push: &mut FnMut(Annotatable)) { - let inline = cx.meta_word(span, token::intern("inline")); - let hidden = cx.meta_list_item_word(span, token::intern("hidden")); - let doc = cx.meta_list(span, token::intern("doc"), vec![hidden]); + let inline = cx.meta_word(span, Symbol::intern("inline")); + let hidden = cx.meta_list_item_word(span, Symbol::intern("hidden")); + let doc = cx.meta_list(span, Symbol::intern("doc"), vec![hidden]); let attrs = vec![cx.attribute(span, inline), cx.attribute(span, doc)]; let trait_def = TraitDef { span: span, diff --git a/src/libsyntax_ext/deriving/cmp/ord.rs b/src/libsyntax_ext/deriving/cmp/ord.rs index 85d29f5e213..9fc3d997585 100644 --- a/src/libsyntax_ext/deriving/cmp/ord.rs +++ b/src/libsyntax_ext/deriving/cmp/ord.rs @@ -14,8 +14,8 @@ use deriving::generic::ty::*; use syntax::ast::{self, Expr, MetaItem}; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; -use syntax::parse::token; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax_pos::Span; pub fn expand_deriving_ord(cx: &mut ExtCtxt, @@ -23,7 +23,7 @@ pub fn expand_deriving_ord(cx: &mut ExtCtxt, mitem: &MetaItem, item: &Annotatable, push: &mut FnMut(Annotatable)) { - let inline = cx.meta_word(span, token::intern("inline")); + let inline = cx.meta_word(span, Symbol::intern("inline")); let attrs = vec![cx.attribute(span, inline)]; let trait_def = TraitDef { span: span, diff --git a/src/libsyntax_ext/deriving/cmp/partial_eq.rs b/src/libsyntax_ext/deriving/cmp/partial_eq.rs index 1b6ccfd012c..f2a050ce971 100644 --- a/src/libsyntax_ext/deriving/cmp/partial_eq.rs +++ b/src/libsyntax_ext/deriving/cmp/partial_eq.rs @@ -14,8 +14,8 @@ use deriving::generic::ty::*; use syntax::ast::{BinOpKind, Expr, MetaItem}; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; -use syntax::parse::token; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax_pos::Span; pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt, @@ -64,7 +64,7 @@ pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt, macro_rules! md { ($name:expr, $f:ident) => { { - let inline = cx.meta_word(span, token::intern("inline")); + let inline = cx.meta_word(span, Symbol::intern("inline")); let attrs = vec![cx.attribute(span, inline)]; MethodDef { name: $name, diff --git a/src/libsyntax_ext/deriving/cmp/partial_ord.rs b/src/libsyntax_ext/deriving/cmp/partial_ord.rs index 7d76722ee97..ce4d549d696 100644 --- a/src/libsyntax_ext/deriving/cmp/partial_ord.rs +++ b/src/libsyntax_ext/deriving/cmp/partial_ord.rs @@ -16,8 +16,8 @@ use deriving::generic::ty::*; use syntax::ast::{self, BinOpKind, Expr, MetaItem}; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; -use syntax::parse::token; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax_pos::Span; pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt, @@ -27,7 +27,7 @@ pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt, push: &mut FnMut(Annotatable)) { macro_rules! md { ($name:expr, $op:expr, $equal:expr) => { { - let inline = cx.meta_word(span, token::intern("inline")); + let inline = cx.meta_word(span, Symbol::intern("inline")); let attrs = vec![cx.attribute(span, inline)]; MethodDef { name: $name, @@ -51,7 +51,7 @@ pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt, vec![Box::new(ordering_ty)], true)); - let inline = cx.meta_word(span, token::intern("inline")); + let inline = cx.meta_word(span, Symbol::intern("inline")); let attrs = vec![cx.attribute(span, inline)]; let partial_cmp_def = MethodDef { diff --git a/src/libsyntax_ext/deriving/debug.rs b/src/libsyntax_ext/deriving/debug.rs index f367fed9cc2..4d181052d40 100644 --- a/src/libsyntax_ext/deriving/debug.rs +++ b/src/libsyntax_ext/deriving/debug.rs @@ -11,11 +11,10 @@ use deriving::generic::*; use deriving::generic::ty::*; -use syntax::ast; +use syntax::ast::{self, Ident}; use syntax::ast::{Expr, MetaItem}; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; -use syntax::parse::token; use syntax::ptr::P; use syntax_pos::{DUMMY_SP, Span}; @@ -71,7 +70,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P P P P P P unreachable!(), }; - let expr = cx.expr_method_call(span, builder_expr, token::str_to_ident("finish"), vec![]); + let expr = cx.expr_method_call(span, builder_expr, Ident::from_str("finish"), vec![]); stmts.push(cx.stmt_expr(expr)); let block = cx.block(span, stmts); diff --git a/src/libsyntax_ext/deriving/decodable.rs b/src/libsyntax_ext/deriving/decodable.rs index dc1f7b4e620..4ce4edef282 100644 --- a/src/libsyntax_ext/deriving/decodable.rs +++ b/src/libsyntax_ext/deriving/decodable.rs @@ -18,9 +18,9 @@ use syntax::ast; use syntax::ast::{Expr, MetaItem, Mutability}; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; -use syntax::parse::token::InternedString; -use syntax::parse::token; use syntax::ptr::P; +use syntax::symbol::InternedString; +use syntax::symbol::intern_and_get_ident; use syntax_pos::Span; pub fn expand_deriving_rustc_decodable(cx: &mut ExtCtxt, @@ -202,10 +202,7 @@ fn decode_static_fields(cx: &mut ExtCtxt, let fields = fields.iter() .enumerate() .map(|(i, &span)| { - getarg(cx, - span, - token::intern_and_get_ident(&format!("_field{}", i)), - i) + getarg(cx, span, intern_and_get_ident(&format!("_field{}", i)), i) }) .collect(); diff --git a/src/libsyntax_ext/deriving/default.rs b/src/libsyntax_ext/deriving/default.rs index 0b97213394b..69391f48c22 100644 --- a/src/libsyntax_ext/deriving/default.rs +++ b/src/libsyntax_ext/deriving/default.rs @@ -14,8 +14,8 @@ use deriving::generic::ty::*; use syntax::ast::{Expr, MetaItem}; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; -use syntax::parse::token; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax_pos::Span; pub fn expand_deriving_default(cx: &mut ExtCtxt, @@ -23,7 +23,7 @@ pub fn expand_deriving_default(cx: &mut ExtCtxt, mitem: &MetaItem, item: &Annotatable, push: &mut FnMut(Annotatable)) { - let inline = cx.meta_word(span, token::intern("inline")); + let inline = cx.meta_word(span, Symbol::intern("inline")); let attrs = vec![cx.attribute(span, inline)]; let trait_def = TraitDef { span: span, diff --git a/src/libsyntax_ext/deriving/encodable.rs b/src/libsyntax_ext/deriving/encodable.rs index ebbddc6e480..0a139778483 100644 --- a/src/libsyntax_ext/deriving/encodable.rs +++ b/src/libsyntax_ext/deriving/encodable.rs @@ -95,8 +95,8 @@ use deriving::generic::ty::*; use syntax::ast::{Expr, ExprKind, MetaItem, Mutability}; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; -use syntax::parse::token; use syntax::ptr::P; +use syntax::symbol::intern_and_get_ident; use syntax_pos::Span; pub fn expand_deriving_rustc_encodable(cx: &mut ExtCtxt, @@ -193,7 +193,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, for (i, &FieldInfo { name, ref self_, span, .. }) in fields.iter().enumerate() { let name = match name { Some(id) => id.name.as_str(), - None => token::intern_and_get_ident(&format!("_field{}", i)), + None => intern_and_get_ident(&format!("_field{}", i)), }; let self_ref = cx.expr_addr_of(span, self_.clone()); let enc = cx.expr_call(span, fn_path.clone(), vec![self_ref, blkencoder.clone()]); diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index b245e399f1c..63cd7678321 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -198,8 +198,8 @@ use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; use syntax::codemap::{self, dummy_spanned, respan}; use syntax::util::move_map::MoveMap; -use syntax::parse::token::{self, keywords}; use syntax::ptr::P; +use syntax::symbol::{Symbol, keywords}; use syntax_pos::{DUMMY_SP, Span}; use errors::Handler; @@ -639,13 +639,13 @@ impl<'a> TraitDef<'a> { let attr = cx.attribute(self.span, cx.meta_word(self.span, - token::intern("automatically_derived"))); + Symbol::intern("automatically_derived"))); // Just mark it now since we know that it'll end up used downstream attr::mark_used(&attr); let opt_trait_ref = Some(trait_ref); let unused_qual = { - let word = cx.meta_list_item_word(self.span, token::intern("unused_qualifications")); - cx.attribute(self.span, cx.meta_list(self.span, token::intern("allow"), vec![word])) + let word = cx.meta_list_item_word(self.span, Symbol::intern("unused_qualifications")); + cx.attribute(self.span, cx.meta_list(self.span, Symbol::intern("allow"), vec![word])) }; let mut a = vec![attr, unused_qual]; diff --git a/src/libsyntax_ext/deriving/mod.rs b/src/libsyntax_ext/deriving/mod.rs index ef4c02931ab..535d7de19e3 100644 --- a/src/libsyntax_ext/deriving/mod.rs +++ b/src/libsyntax_ext/deriving/mod.rs @@ -16,8 +16,8 @@ use syntax::codemap; use syntax::ext::base::{Annotatable, ExtCtxt, SyntaxExtension}; use syntax::ext::build::AstBuilder; use syntax::feature_gate::{self, emit_feature_err}; -use syntax::parse::token; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax_pos::Span; macro_rules! pathvec { @@ -80,7 +80,7 @@ fn allow_unstable(cx: &mut ExtCtxt, span: Span, attr_name: &str) -> Span { expn_id: cx.codemap().record_expansion(codemap::ExpnInfo { call_site: span, callee: codemap::NameAndSpan { - format: codemap::MacroAttribute(token::intern(attr_name)), + format: codemap::MacroAttribute(Symbol::intern(attr_name)), span: Some(span), allow_internal_unstable: true, }, @@ -105,7 +105,7 @@ pub fn expand_derive(cx: &mut ExtCtxt, } }; - let derive = token::intern("derive"); + let derive = Symbol::intern("derive"); let mut derive_attrs = Vec::new(); item = item.map_attrs(|attrs| { let partition = attrs.into_iter().partition(|attr| attr.name() == derive); @@ -176,7 +176,7 @@ pub fn expand_derive(cx: &mut ExtCtxt, feature_gate::EXPLAIN_CUSTOM_DERIVE); } else { cx.span_warn(titem.span, feature_gate::EXPLAIN_DEPR_CUSTOM_DERIVE); - let name = token::intern(&format!("derive_{}", tname)); + let name = Symbol::intern(&format!("derive_{}", tname)); let mitem = cx.meta_word(titem.span, name); new_attributes.push(cx.attribute(mitem.span, mitem)); } @@ -251,10 +251,10 @@ pub fn expand_derive(cx: &mut ExtCtxt, // RFC #1445. `#[derive(PartialEq, Eq)]` adds a (trusted) // `#[structural_match]` attribute. - let (partial_eq, eq) = (token::intern("PartialEq"), token::intern("Eq")); + let (partial_eq, eq) = (Symbol::intern("PartialEq"), Symbol::intern("Eq")); if traits.iter().any(|t| t.name() == Some(partial_eq)) && traits.iter().any(|t| t.name() == Some(eq)) { - let structural_match = token::intern("structural_match"); + let structural_match = Symbol::intern("structural_match"); let span = allow_unstable(cx, span, "derive(PartialEq, Eq)"); let meta = cx.meta_word(span, structural_match); item = item.map(|mut i| { @@ -267,10 +267,10 @@ pub fn expand_derive(cx: &mut ExtCtxt, // the same as the copy implementation. // // Add a marker attribute here picked up during #[derive(Clone)] - let (copy, clone) = (token::intern("Copy"), token::intern("Clone")); + let (copy, clone) = (Symbol::intern("Copy"), Symbol::intern("Clone")); if traits.iter().any(|t| t.name() == Some(clone)) && traits.iter().any(|t| t.name() == Some(copy)) { - let marker = token::intern("rustc_copy_clone_marker"); + let marker = Symbol::intern("rustc_copy_clone_marker"); let span = allow_unstable(cx, span, "derive(Copy, Clone)"); let meta = cx.meta_word(span, marker); item = item.map(|mut i| { @@ -282,14 +282,14 @@ pub fn expand_derive(cx: &mut ExtCtxt, let mut items = Vec::new(); for titem in traits.iter() { let tname = titem.word().unwrap().name(); - let name = token::intern(&format!("derive({})", tname)); + let name = Symbol::intern(&format!("derive({})", tname)); let mitem = cx.meta_word(titem.span, name); let span = Span { expn_id: cx.codemap().record_expansion(codemap::ExpnInfo { call_site: titem.span, callee: codemap::NameAndSpan { - format: codemap::MacroAttribute(token::intern(&format!("derive({})", tname))), + format: codemap::MacroAttribute(Symbol::intern(&format!("derive({})", tname))), span: Some(titem.span), allow_internal_unstable: true, }, @@ -408,7 +408,7 @@ fn call_intrinsic(cx: &ExtCtxt, span.expn_id = cx.codemap().record_expansion(codemap::ExpnInfo { call_site: span, callee: codemap::NameAndSpan { - format: codemap::MacroAttribute(token::intern("derive")), + format: codemap::MacroAttribute(Symbol::intern("derive")), span: Some(span), allow_internal_unstable: true, }, diff --git a/src/libsyntax_ext/env.rs b/src/libsyntax_ext/env.rs index 5c081b98962..f0cefe70ead 100644 --- a/src/libsyntax_ext/env.rs +++ b/src/libsyntax_ext/env.rs @@ -17,7 +17,7 @@ use syntax::ast; use syntax::ext::base::*; use syntax::ext::base; use syntax::ext::build::AstBuilder; -use syntax::parse::token; +use syntax::symbol::intern_and_get_ident; use syntax_pos::Span; use syntax::tokenstream; @@ -49,7 +49,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, Ok(s) => { cx.expr_call_global(sp, cx.std_path(&["option", "Option", "Some"]), - vec![cx.expr_str(sp, token::intern_and_get_ident(&s[..]))]) + vec![cx.expr_str(sp, intern_and_get_ident(&s[..]))]) } }; MacEager::expr(e) @@ -73,7 +73,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, Some((v, _style)) => v, }; let msg = match exprs.next() { - None => token::intern_and_get_ident(&format!("environment variable `{}` not defined", var)), + None => intern_and_get_ident(&format!("environment variable `{}` not defined", var)), Some(second) => { match expr_to_string(cx, second, "expected string literal") { None => return DummyResult::expr(sp), @@ -92,7 +92,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, cx.span_err(sp, &msg); cx.expr_usize(sp, 0) } - Ok(s) => cx.expr_str(sp, token::intern_and_get_ident(&s)), + Ok(s) => cx.expr_str(sp, intern_and_get_ident(&s)), }; MacEager::expr(e) } diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index 6eba8baf5b8..ed3271f01e7 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -17,8 +17,9 @@ use syntax::ast; use syntax::ext::base::*; use syntax::ext::base; use syntax::ext::build::AstBuilder; -use syntax::parse::token::{self, keywords}; +use syntax::parse::token; use syntax::ptr::P; +use syntax::symbol::{self, keywords}; use syntax_pos::{Span, DUMMY_SP}; use syntax::tokenstream; @@ -369,7 +370,7 @@ impl<'a, 'b> Context<'a, 'b> { /// Translate the accumulated string literals to a literal expression fn trans_literal_string(&mut self) -> P { let sp = self.fmtsp; - let s = token::intern_and_get_ident(&self.literal); + let s = symbol::intern_and_get_ident(&self.literal); self.literal.clear(); self.ecx.expr_str(sp, s) } diff --git a/src/libsyntax_ext/lib.rs b/src/libsyntax_ext/lib.rs index 1ebac19b4f0..e83fd55cd71 100644 --- a/src/libsyntax_ext/lib.rs +++ b/src/libsyntax_ext/lib.rs @@ -53,7 +53,7 @@ use std::rc::Rc; use syntax::ast; use syntax::ext::base::{MacroExpanderFn, NormalTT, IdentTT, MultiModifier, NamedSyntaxExtension}; use syntax::ext::tt::macro_rules::MacroRulesExpander; -use syntax::parse::token::intern; +use syntax::symbol::Symbol; pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver, user_exts: Vec, @@ -62,11 +62,11 @@ pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver, resolver.add_ext(ast::Ident::with_empty_ctxt(name), Rc::new(ext)); }; - register(intern("macro_rules"), IdentTT(Box::new(MacroRulesExpander), None, false)); + register(Symbol::intern("macro_rules"), IdentTT(Box::new(MacroRulesExpander), None, false)); macro_rules! register { ($( $name:ident: $f:expr, )*) => { $( - register(intern(stringify!($name)), + register(Symbol::intern(stringify!($name)), NormalTT(Box::new($f as MacroExpanderFn), None, false)); )* } } @@ -112,9 +112,10 @@ pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver, } // format_args uses `unstable` things internally. - register(intern("format_args"), NormalTT(Box::new(format::expand_format_args), None, true)); + register(Symbol::intern("format_args"), + NormalTT(Box::new(format::expand_format_args), None, true)); - register(intern("derive"), MultiModifier(Box::new(deriving::expand_derive))); + register(Symbol::intern("derive"), MultiModifier(Box::new(deriving::expand_derive))); for (name, ext) in user_exts { register(name, ext); diff --git a/src/libsyntax_ext/proc_macro_registrar.rs b/src/libsyntax_ext/proc_macro_registrar.rs index 1165eb46bf0..f9843cf7294 100644 --- a/src/libsyntax_ext/proc_macro_registrar.rs +++ b/src/libsyntax_ext/proc_macro_registrar.rs @@ -17,10 +17,10 @@ use syntax::ext::base::ExtCtxt; use syntax::ext::build::AstBuilder; use syntax::ext::expand::ExpansionConfig; use syntax::parse::ParseSess; -use syntax::parse::token; use syntax::feature_gate::Features; use syntax::fold::Folder; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax_pos::{Span, DUMMY_SP}; use syntax::visit::{self, Visitor}; @@ -271,23 +271,23 @@ fn mk_registrar(cx: &mut ExtCtxt, let eid = cx.codemap().record_expansion(ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { - format: MacroAttribute(token::intern("proc_macro")), + format: MacroAttribute(Symbol::intern("proc_macro")), span: None, allow_internal_unstable: true, } }); let span = Span { expn_id: eid, ..DUMMY_SP }; - let proc_macro = token::str_to_ident("proc_macro"); + let proc_macro = Ident::from_str("proc_macro"); let krate = cx.item(span, proc_macro, Vec::new(), ast::ItemKind::ExternCrate(None)); - let __internal = token::str_to_ident("__internal"); - let registry = token::str_to_ident("Registry"); - let registrar = token::str_to_ident("registrar"); - let register_custom_derive = token::str_to_ident("register_custom_derive"); + let __internal = Ident::from_str("__internal"); + let registry = Ident::from_str("Registry"); + let registrar = Ident::from_str("registrar"); + let register_custom_derive = Ident::from_str("register_custom_derive"); let stmts = custom_derives.iter().map(|cd| { let path = cx.path_global(cd.span, vec![cd.function_name]); let trait_name = cx.expr_str(cd.span, cd.trait_name.as_str()); @@ -316,14 +316,14 @@ fn mk_registrar(cx: &mut ExtCtxt, cx.ty(span, ast::TyKind::Tup(Vec::new())), cx.block(span, stmts)); - let derive_registrar = cx.meta_word(span, token::intern("rustc_derive_registrar")); + let derive_registrar = cx.meta_word(span, Symbol::intern("rustc_derive_registrar")); let derive_registrar = cx.attribute(span, derive_registrar); let func = func.map(|mut i| { i.attrs.push(derive_registrar); i.vis = ast::Visibility::Public; i }); - let ident = ast::Ident::with_empty_ctxt(token::gensym("registrar")); + let ident = ast::Ident::with_empty_ctxt(Symbol::gensym("registrar")); let module = cx.item_mod(span, span, ident, Vec::new(), vec![krate, func]).map(|mut i| { i.vis = ast::Visibility::Public; i diff --git a/src/libsyntax_ext/trace_macros.rs b/src/libsyntax_ext/trace_macros.rs index 9578af68100..48be8e0c53c 100644 --- a/src/libsyntax_ext/trace_macros.rs +++ b/src/libsyntax_ext/trace_macros.rs @@ -11,7 +11,7 @@ use syntax::ext::base::ExtCtxt; use syntax::ext::base; use syntax::feature_gate; -use syntax::parse::token::keywords; +use syntax::symbol::keywords; use syntax_pos::Span; use syntax::tokenstream::TokenTree;