diff --git a/src/librustc/driver/session.rs b/src/librustc/driver/session.rs index 05302b8cca1..749c065de32 100644 --- a/src/librustc/driver/session.rs +++ b/src/librustc/driver/session.rs @@ -294,12 +294,17 @@ impl Session_ { self.opts.optimize == No && !self.debugging_opt(no_debug_borrows) } + // pointless function, now... pub fn str_of(@self, id: ast::ident) -> @~str { - token::get_ident_interner().get(id) + token::ident_to_str(id) } + + // pointless function, now... pub fn ident_of(@self, st: &str) -> ast::ident { - token::get_ident_interner().intern(st) + token::str_to_ident(st) } + + // pointless function, now... pub fn intr(@self) -> @syntax::parse::token::ident_interner { token::get_ident_interner() } diff --git a/src/librustc/metadata/creader.rs b/src/librustc/metadata/creader.rs index 0e263c78bfe..642df89da6e 100644 --- a/src/librustc/metadata/creader.rs +++ b/src/librustc/metadata/creader.rs @@ -22,6 +22,7 @@ use core::vec; use syntax::attr; use syntax::codemap::{span, dummy_sp}; use syntax::diagnostic::span_handler; +use syntax::parse::token; use syntax::parse::token::ident_interner; use syntax::visit; use syntax::ast; @@ -176,7 +177,7 @@ fn visit_item(e: @mut Env, i: @ast::item) { } nn } - None => e.intr.get(i.ident) + None => token::ident_to_str(i.ident) }; if attr::find_attrs_by_name(i.attrs, "nolink").is_empty() { already_added = @@ -235,7 +236,7 @@ fn resolve_crate(e: @mut Env, hash: @~str, span: span) -> ast::crate_num { - let metas = metas_with_ident(@/*bad*/copy *e.intr.get(ident), metas); + let metas = metas_with_ident(token::ident_to_str(ident), metas); match existing_match(e, metas, hash) { None => { @@ -276,7 +277,7 @@ fn resolve_crate(e: @mut Env, match attr::last_meta_item_value_str_by_name(load_ctxt.metas, "name") { Some(v) => v, - None => e.intr.get(ident), + None => token::ident_to_str(ident), }; let cmeta = @cstore::crate_metadata { name: cname, @@ -305,10 +306,11 @@ fn resolve_crate_deps(e: @mut Env, cdata: @~[u8]) -> cstore::cnum_map { for decoder::get_crate_deps(e.intr, cdata).each |dep| { let extrn_cnum = dep.cnum; let cname = dep.name; + let cname_str = token::ident_to_str(dep.name); let cmetas = metas_with(dep.vers, @~"vers", ~[]); debug!("resolving dep crate %s ver: %s hash: %s", - *e.intr.get(dep.name), *dep.vers, *dep.hash); - match existing_match(e, metas_with_ident(e.intr.get(cname), + *cname_str, *dep.vers, *dep.hash); + match existing_match(e, metas_with_ident(cname_str, copy cmetas), dep.hash) { Some(local_cnum) => { diff --git a/src/librustc/metadata/decoder.rs b/src/librustc/metadata/decoder.rs index bb35af19807..93136aca18a 100644 --- a/src/librustc/metadata/decoder.rs +++ b/src/librustc/metadata/decoder.rs @@ -40,6 +40,7 @@ use syntax::parse::token::{StringRef, ident_interner, special_idents}; use syntax::print::pprust; use syntax::{ast, ast_util}; use syntax::codemap; +use syntax::parse::token; type cmd = @crate_metadata; @@ -297,10 +298,10 @@ fn item_path(intr: @ident_interner, item_doc: ebml::Doc) -> ast_map::path { for reader::docs(path_doc) |tag, elt_doc| { if tag == tag_path_elt_mod { let str = reader::doc_as_str(elt_doc); - result.push(ast_map::path_mod(intr.intern(str))); + result.push(ast_map::path_mod(token::str_to_ident(str))); } else if tag == tag_path_elt_name { let str = reader::doc_as_str(elt_doc); - result.push(ast_map::path_name(intr.intern(str))); + result.push(ast_map::path_name(token::str_to_ident(str))); } else { // ignore tag_path_len element } @@ -314,8 +315,8 @@ fn item_name(intr: @ident_interner, item: ebml::Doc) -> ast::ident { do reader::with_doc_data(name) |data| { let string = str::from_bytes_slice(data); match intr.find_equiv(&StringRef(string)) { - None => intr.intern(string), - Some(val) => val, + None => token::str_to_ident(string), + Some(val) => ast::new_ident(val), } } } @@ -843,7 +844,7 @@ pub fn get_type_name_if_impl(intr: @ident_interner, } for reader::tagged_docs(item, tag_item_impl_type_basename) |doc| { - return Some(intr.intern(str::from_bytes(reader::doc_data(doc)))); + return Some(token::str_to_ident(str::from_bytes(reader::doc_data(doc)))); } return None; @@ -1095,7 +1096,7 @@ pub fn get_crate_deps(intr: @ident_interner, data: @~[u8]) -> ~[crate_dep] { } for reader::tagged_docs(depsdoc, tag_crate_dep) |depdoc| { deps.push(crate_dep {cnum: crate_num, - name: intr.intern(docstr(depdoc, tag_crate_dep_name)), + name: token::str_to_ident(docstr(depdoc, tag_crate_dep_name)), vers: @docstr(depdoc, tag_crate_dep_vers), hash: @docstr(depdoc, tag_crate_dep_hash)}); crate_num += 1; @@ -1109,7 +1110,7 @@ fn list_crate_deps(intr: @ident_interner, data: @~[u8], out: @io::Writer) { for get_crate_deps(intr, data).each |dep| { out.write_str( fmt!("%d %s-%s-%s\n", - dep.cnum, *intr.get(dep.name), *dep.hash, *dep.vers)); + dep.cnum, *token::ident_to_str(dep.name), *dep.hash, *dep.vers)); } out.write_str("\n"); diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index 871bc65afc6..427ec80740d 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -1055,7 +1055,7 @@ fn encode_info_for_item(ecx: @EncodeContext, tcx.sess.span_unimpl( item.span, fmt!("Method %s is both provided and static", - *tcx.sess.intr().get(method_ty.ident))); + *token::ident_to_str(method_ty.ident))); } encode_type_param_bounds(ebml_w, ecx, &m.generics.ty_params); diff --git a/src/librustc/metadata/loader.rs b/src/librustc/metadata/loader.rs index 182f1e9078c..08155d675fe 100644 --- a/src/librustc/metadata/loader.rs +++ b/src/librustc/metadata/loader.rs @@ -19,6 +19,7 @@ use metadata::filesearch::FileSearch; use metadata::filesearch; use syntax::codemap::span; use syntax::diagnostic::span_handler; +use syntax::parse::token; use syntax::parse::token::ident_interner; use syntax::print::pprust; use syntax::{ast, attr}; @@ -59,7 +60,7 @@ pub fn load_library_crate(cx: &Context) -> (~str, @~[u8]) { None => { cx.diag.span_fatal( cx.span, fmt!("can't find crate for `%s`", - *cx.intr.get(cx.ident))); + *token::ident_to_str(cx.ident))); } } } diff --git a/src/librustc/middle/astencode.rs b/src/librustc/middle/astencode.rs index b9b78e5827d..8bde4e37d54 100644 --- a/src/librustc/middle/astencode.rs +++ b/src/librustc/middle/astencode.rs @@ -1168,7 +1168,7 @@ impl fake_ext_ctxt for fake_session { } } fn ident_of(&self, st: &str) -> ast::ident { - token::get_ident_interner().intern(st) + token::str_to_ident(st) } } diff --git a/src/librustc/middle/borrowck/mod.rs b/src/librustc/middle/borrowck/mod.rs index 0834d036083..e3fc0bde72a 100644 --- a/src/librustc/middle/borrowck/mod.rs +++ b/src/librustc/middle/borrowck/mod.rs @@ -30,6 +30,7 @@ use syntax::ast; use syntax::ast_map; use syntax::visit; use syntax::codemap::span; +use syntax::parse::token; macro_rules! if_ok( ($inp: expr) => ( @@ -711,7 +712,7 @@ impl BorrowckCtxt { LpVar(id) => { match self.tcx.items.find(&id) { Some(&ast_map::node_local(ident)) => { - str::push_str(out, *self.tcx.sess.intr().get(ident)); + str::push_str(out, *token::ident_to_str(ident)); } r => { self.tcx.sess.bug( @@ -726,7 +727,7 @@ impl BorrowckCtxt { match fname { mc::NamedField(fname) => { str::push_char(out, '.'); - str::push_str(out, *self.tcx.sess.intr().get(fname)); + str::push_str(out, *token::ident_to_str(fname)); } mc::PositionalField(idx) => { str::push_char(out, '#'); // invent a notation here diff --git a/src/librustc/middle/privacy.rs b/src/librustc/middle/privacy.rs index 39564dcff25..d2f3f875275 100644 --- a/src/librustc/middle/privacy.rs +++ b/src/librustc/middle/privacy.rs @@ -235,8 +235,7 @@ pub fn check_crate(tcx: ty::ctxt, if field.ident != ident { loop; } if field.vis == private { tcx.sess.span_err(span, fmt!("field `%s` is private", - *token::get_ident_interner() - .get(ident))); + *token::ident_to_str(ident))); } break; } @@ -245,7 +244,7 @@ pub fn check_crate(tcx: ty::ctxt, // Given the ID of a method, checks to ensure it's in scope. let check_method_common: @fn(span: span, method_id: def_id, - name: &ident) = + name: ident) = |span, method_id, name| { if method_id.crate == local_crate { let is_private = method_is_private(span, method_id.node); @@ -256,8 +255,7 @@ pub fn check_crate(tcx: ty::ctxt, !privileged_items.contains(&(container_id.node))) { tcx.sess.span_err(span, fmt!("method `%s` is private", - *token::get_ident_interner() - .get(*name))); + *token::ident_to_str(name))); } } else { let visibility = @@ -265,8 +263,7 @@ pub fn check_crate(tcx: ty::ctxt, if visibility != public { tcx.sess.span_err(span, fmt!("method `%s` is private", - *token::get_ident_interner() - .get(*name))); + *token::ident_to_str(name))); } } }; @@ -278,7 +275,7 @@ pub fn check_crate(tcx: ty::ctxt, match def { def_static_method(method_id, _, _) => { debug!("found static method def, checking it"); - check_method_common(span, method_id, path.idents.last()) + check_method_common(span, method_id, *path.idents.last()) } def_fn(def_id, _) => { if def_id.crate == local_crate { @@ -286,19 +283,13 @@ pub fn check_crate(tcx: ty::ctxt, !privileged_items.contains(&def_id.node) { tcx.sess.span_err(span, fmt!("function `%s` is private", - *token::get_ident_interner() - .get(copy *path - .idents - .last()))); + *token::ident_to_str(*path.idents.last()))); } } else if csearch::get_item_visibility(tcx.sess.cstore, def_id) != public { tcx.sess.span_err(span, fmt!("function `%s` is private", - *token::get_ident_interner() - .get(copy *path - .idents - .last()))); + *token::ident_to_str(*path.idents.last()))); } } _ => {} @@ -312,7 +303,7 @@ pub fn check_crate(tcx: ty::ctxt, |span, origin, ident| { match *origin { method_static(method_id) => { - check_method_common(span, method_id, &ident) + check_method_common(span, method_id, ident) } method_param(method_param { trait_id: trait_id, @@ -328,10 +319,7 @@ pub fn check_crate(tcx: ty::ctxt, match item.node { item_trait(_, _, ref methods) => { if method_num >= (*methods).len() { - tcx.sess.span_bug(span, "method \ - number \ - out of \ - range?!"); + tcx.sess.span_bug(span, "method number out of range?!"); } match (*methods)[method_num] { provided(method) @@ -339,14 +327,9 @@ pub fn check_crate(tcx: ty::ctxt, !privileged_items .contains(&(trait_id.node)) => { tcx.sess.span_err(span, - fmt!("method - `%s` \ - is \ - private", - *token::get_ident_interner() - .get - (method - .ident))); + fmt!("method `%s` is private", + *token::ident_to_str(method + .ident))); } provided(_) | required(_) => { // Required methods can't be @@ -355,20 +338,15 @@ pub fn check_crate(tcx: ty::ctxt, } } _ => { - tcx.sess.span_bug(span, "trait wasn't \ - actually a \ - trait?!"); + tcx.sess.span_bug(span, "trait wasn't actually a trait?!"); } } } Some(_) => { - tcx.sess.span_bug(span, "trait wasn't an \ - item?!"); + tcx.sess.span_bug(span, "trait wasn't an item?!"); } None => { - tcx.sess.span_bug(span, "trait item wasn't \ - found in the AST \ - map?!"); + tcx.sess.span_bug(span, "trait item wasn't found in the AST map?!"); } } } else { diff --git a/src/librustc/middle/resolve.rs b/src/librustc/middle/resolve.rs index 5004ee670ff..6b306186444 100644 --- a/src/librustc/middle/resolve.rs +++ b/src/librustc/middle/resolve.rs @@ -728,7 +728,7 @@ impl PrimitiveTypeTable { intr: @ident_interner, string: &str, primitive_type: prim_ty) { - let ident = intr.intern(string); + let ident = token::str_to_ident(string); self.primitive_types.insert(ident, primitive_type); } } @@ -2948,11 +2948,11 @@ impl Resolver { // top of the crate otherwise. let mut containing_module; let mut i; - if *interner.get(module_path[0]) == ~"self" { + if *token::ident_to_str(module_path[0]) == ~"self" { containing_module = self.get_nearest_normal_module_parent_or_self(module_); i = 1; - } else if *interner.get(module_path[0]) == ~"super" { + } else if *token::ident_to_str(module_path[0]) == ~"super" { containing_module = self.get_nearest_normal_module_parent_or_self(module_); i = 0; // We'll handle `super` below. @@ -2962,7 +2962,7 @@ impl Resolver { // Now loop through all the `super`s we find. while i < module_path.len() && - *interner.get(module_path[i]) == ~"super" { + *token::ident_to_str(module_path[i]) == ~"super" { debug!("(resolving module prefix) resolving `super` at %s", self.module_to_str(containing_module)); match self.get_nearest_normal_module_parent(containing_module) { diff --git a/src/librustc/middle/trans/base.rs b/src/librustc/middle/trans/base.rs index cce7a577ff7..846d3cd2610 100644 --- a/src/librustc/middle/trans/base.rs +++ b/src/librustc/middle/trans/base.rs @@ -2505,7 +2505,7 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::node_id) -> ValueRef { } ast::foreign_item_const(*) => { let typ = ty::node_id_to_type(tcx, ni.id); - let ident = token::get_ident_interner().get(ni.ident); + let ident = token::ident_to_str(ni.ident); let g = do str::as_c_str(*ident) |buf| { unsafe { llvm::LLVMAddGlobal(ccx.llmod, diff --git a/src/librustc/middle/trans/common.rs b/src/librustc/middle/trans/common.rs index c923c49decc..8831cde1105 100644 --- a/src/librustc/middle/trans/common.rs +++ b/src/librustc/middle/trans/common.rs @@ -52,16 +52,19 @@ use core::vec; use syntax::ast::ident; use syntax::ast_map::{path, path_elt}; use syntax::codemap::span; +use syntax::parse::token; use syntax::parse::token::ident_interner; use syntax::{ast, ast_map}; use syntax::abi::{X86, X86_64, Arm, Mips}; +// NOTE: this thunk is totally pointless now that we're not passing +// interners around... pub type namegen = @fn(s: &str) -> ident; pub fn new_namegen(intr: @ident_interner) -> namegen { let f: @fn(s: &str) -> ident = |prefix| { - intr.gensym(fmt!("%s_%u", - prefix, - intr.gensym(prefix).name)) + token::str_to_ident(fmt!("%s_%u", + prefix, + token::gensym(prefix))) }; f } diff --git a/src/librustc/middle/trans/foreign.rs b/src/librustc/middle/trans/foreign.rs index e2eaef9cbfe..e494d5f0db9 100644 --- a/src/librustc/middle/trans/foreign.rs +++ b/src/librustc/middle/trans/foreign.rs @@ -344,7 +344,7 @@ pub fn trans_foreign_mod(ccx: @CrateContext, } } ast::foreign_item_const(*) => { - let ident = token::get_ident_interner().get(foreign_item.ident); + let ident = token::ident_to_str(foreign_item.ident); ccx.item_symbols.insert(foreign_item.id, copy *ident); } } diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index c3c8ddbf7c9..1764db8ba6d 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -655,7 +655,7 @@ impl Repr for ty::Method { impl Repr for ast::ident { fn repr(&self, tcx: ctxt) -> ~str { - copy *tcx.sess.intr().get(*self) + copy *token::ident_to_str(*self) } } diff --git a/src/librustdoc/extract.rs b/src/librustdoc/extract.rs index 920a72077f0..54722fae2d7 100644 --- a/src/librustdoc/extract.rs +++ b/src/librustdoc/extract.rs @@ -18,14 +18,14 @@ use doc; use core::vec; use syntax::ast; -use syntax::parse::token::{ident_interner}; +use syntax; +use syntax::parse::token::{ident_interner, ident_to_str}; use syntax::parse::token; // Hack; rather than thread an interner through everywhere, rely on // thread-local data pub fn to_str(id: ast::ident) -> ~str { - let intr = token::get_ident_interner(); - return copy *(*intr).get(id); + return copy *ident_to_str(id); } pub fn interner() -> @ident_interner { diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 2cd8e5dea67..45c801f0994 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -15,7 +15,7 @@ use core::prelude::*; use codemap::{span, spanned}; use abi::AbiSet; use opt_vec::OptVec; -use parse::token::get_ident_interner; +use parse::token::{get_ident_interner, ident_to_str, interner_get, str_to_ident}; use core::hashmap::HashMap; use core::option::Option; @@ -32,6 +32,9 @@ use extra::serialize::{Encodable, Decodable, Encoder, Decoder}; #[deriving(Eq)] pub struct ident { name: Name, ctxt: SyntaxContext } +/// Construct an identifier with the given name and an empty context: +pub fn new_ident(name: Name) -> ident { ident {name: name, ctxt: empty_ctxt}} + // a SyntaxContext represents a chain of macro-expandings // and renamings. Each macro expansion corresponds to // a fresh uint @@ -81,13 +84,13 @@ pub type Mrk = uint; impl Encodable for ident { fn encode(&self, s: &mut S) { - s.emit_str(*get_ident_interner().get(*self)); + s.emit_str(*interner_get(self.name)); } } impl Decodable for ident { fn decode(d: &mut D) -> ident { - get_ident_interner().intern(d.read_str()) + str_to_ident(d.read_str()) } } diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs index 53150bb1010..ec77ba88530 100644 --- a/src/libsyntax/ast_map.rs +++ b/src/libsyntax/ast_map.rs @@ -58,8 +58,8 @@ pub fn path_to_str_with_sep(p: &[path_elt], sep: &str, itr: @ident_interner) -> ~str { let strs = do p.map |e| { match *e { - path_mod(s) => copy *itr.get(s), - path_name(s) => copy *itr.get(s) + path_mod(s) => copy *itr.get(s.name), + path_name(s) => copy *itr.get(s.name) } }; str::connect(strs, sep) @@ -68,9 +68,9 @@ pub fn path_to_str_with_sep(p: &[path_elt], sep: &str, itr: @ident_interner) pub fn path_ident_to_str(p: &path, i: ident, itr: @ident_interner) -> ~str { if vec::is_empty(*p) { //FIXME /* FIXME (#2543) */ copy *i - copy *itr.get(i) + copy *itr.get(i.name) } else { - fmt!("%s::%s", path_to_str(*p, itr), *itr.get(i)) + fmt!("%s::%s", path_to_str(*p, itr), *itr.get(i.name)) } } @@ -80,8 +80,8 @@ pub fn path_to_str(p: &[path_elt], itr: @ident_interner) -> ~str { pub fn path_elt_to_str(pe: path_elt, itr: @ident_interner) -> ~str { match pe { - path_mod(s) => copy *itr.get(s), - path_name(s) => copy *itr.get(s) + path_mod(s) => copy *itr.get(s.name), + path_name(s) => copy *itr.get(s.name) } } @@ -356,16 +356,16 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str { } Some(&node_method(m, _, path)) => { fmt!("method %s in %s (id=%?)", - *itr.get(m.ident), path_to_str(*path, itr), id) + *itr.get(m.ident.name), path_to_str(*path, itr), id) } Some(&node_trait_method(ref tm, _, path)) => { let m = ast_util::trait_method_to_ty_method(&**tm); fmt!("method %s in %s (id=%?)", - *itr.get(m.ident), path_to_str(*path, itr), id) + *itr.get(m.ident.name), path_to_str(*path, itr), id) } Some(&node_variant(ref variant, _, path)) => { fmt!("variant %s in %s (id=%?)", - *itr.get(variant.node.name), path_to_str(*path, itr), id) + *itr.get(variant.node.name.name), path_to_str(*path, itr), id) } Some(&node_expr(expr)) => { fmt!("expr %s (id=%?)", pprust::expr_to_str(expr, itr), id) @@ -381,7 +381,7 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str { fmt!("arg (id=%?)", id) } Some(&node_local(ident)) => { - fmt!("local (id=%?, name=%s)", id, *itr.get(ident)) + fmt!("local (id=%?, name=%s)", id, *itr.get(ident.name)) } Some(&node_block(_)) => { fmt!("block") diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 446610d987d..cf90da4ffae 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -26,7 +26,7 @@ use core::to_bytes; pub fn path_name_i(idents: &[ident], intr: @token::ident_interner) -> ~str { // FIXME: Bad copies (#2543 -- same for everything else that says "bad") - str::connect(idents.map(|i| copy *intr.get(*i)), "::") + str::connect(idents.map(|i| copy *token::interner_get(i.name)), "::") } diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index bf9f32b6030..97df2c6554e 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -19,7 +19,7 @@ use diagnostic::span_handler; use ext; use parse; use parse::token; -use parse::token::{intern, get_ident_interner}; +use parse::token::{ident_to_str, intern, get_ident_interner, str_to_ident}; use core::hashmap::HashMap; use core::vec; @@ -310,10 +310,10 @@ impl ExtCtxt { *self.trace_mac = x } pub fn str_of(&self, id: ast::ident) -> ~str { - copy *get_ident_interner().get(id) + copy *ident_to_str(id) } pub fn ident_of(&self, st: &str) -> ast::ident { - get_ident_interner().intern(st) + str_to_ident(st) } } diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index cdb57adb7bb..d46e0efcd0e 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -15,7 +15,7 @@ use codemap::span; use ext::base::*; use ext::base; use parse::token; -use parse::token::{get_ident_interner}; +use parse::token::{get_ident_interner, str_to_ident}; pub fn expand_syntax_ext(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { @@ -34,7 +34,7 @@ pub fn expand_syntax_ext(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree]) } } } - let res = get_ident_interner().intern(res_str); + let res = str_to_ident(res_str); let e = @ast::expr { id: cx.next_id(), diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 06a03ad5a9b..1e95f62e3e8 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -25,7 +25,7 @@ use ext::base::*; use fold::*; use parse; use parse::{parse_item_from_source_str}; -use parse::token::{get_ident_interner,intern}; +use parse::token::{get_ident_interner, ident_to_str, intern}; use core::vec; @@ -50,7 +50,7 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv, separators")); } let extname = pth.idents[0]; - let extnamestr = get_ident_interner().get(extname); + let extnamestr = ident_to_str(extname); // leaving explicit deref here to highlight unbox op: match (*extsbox).find(&extname.name) { None => { @@ -218,7 +218,7 @@ pub fn expand_item_mac(extsbox: @mut SyntaxEnv, let extname = pth.idents[0]; let interner = get_ident_interner(); - let extnamestr = interner.get(extname); + let extnamestr = ident_to_str(extname); let expanded = match (*extsbox).find(&extname.name) { None => cx.span_fatal(pth.span, fmt!("macro undefined: '%s!'", *extnamestr)), @@ -228,7 +228,7 @@ pub fn expand_item_mac(extsbox: @mut SyntaxEnv, cx.span_fatal(pth.span, fmt!("macro %s! expects no ident argument, \ given '%s'", *extnamestr, - *interner.get(it.ident))); + *ident_to_str(it.ident))); } cx.bt_push(ExpandedFrom(CallInfo { call_site: it.span, @@ -316,7 +316,7 @@ pub fn expand_stmt(extsbox: @mut SyntaxEnv, separators")); } let extname = pth.idents[0]; - let extnamestr = get_ident_interner().get(extname); + let extnamestr = ident_to_str(extname); let (fully_expanded, sp) = match (*extsbox).find(&extname.name) { None => cx.span_fatal(pth.span, fmt!("macro undefined: '%s'", *extnamestr)), @@ -735,7 +735,7 @@ mod test { use codemap; use codemap::spanned; use parse; - use parse::token::{get_ident_interner}; + use parse::token::{gensym, get_ident_interner}; use core::io; use core::option::{None, Some}; use util::parser_testing::{string_to_item_and_sess}; @@ -849,7 +849,7 @@ mod test { }; let table = @mut new_sctable(); let a_name = 100; // enforced by testing_interner - let a2_name = get_ident_interner().gensym("a2").name; + let a2_name = gensym("a2"); let renamer = new_ident_renamer(ast::ident{name:a_name,ctxt:empty_ctxt}, a2_name,table); let renamed_ast = fun_to_ident_folder(renamer).fold_item(item_ast).get(); diff --git a/src/libsyntax/ext/fmt.rs b/src/libsyntax/ext/fmt.rs index 46b1334835f..6648992dc61 100644 --- a/src/libsyntax/ext/fmt.rs +++ b/src/libsyntax/ext/fmt.rs @@ -25,7 +25,7 @@ use ext::build::AstBuilder; use core::option; use core::unstable::extfmt::ct::*; use core::vec; -use parse::token::{get_ident_interner}; +use parse::token::{get_ident_interner, str_to_ident}; pub fn expand_syntax_ext(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree]) -> base::MacResult { @@ -54,12 +54,11 @@ fn pieces_to_expr(cx: @ExtCtxt, sp: span, pieces: ~[Piece], args: ~[@ast::expr]) -> @ast::expr { fn make_path_vec(cx: @ExtCtxt, ident: &str) -> ~[ast::ident] { - let intr = get_ident_interner(); - return ~[intr.intern("std"), - intr.intern("unstable"), - intr.intern("extfmt"), - intr.intern("rt"), - intr.intern(ident)]; + return ~[str_to_ident("std"), + str_to_ident("unstable"), + str_to_ident("extfmt"), + str_to_ident("rt"), + str_to_ident(ident)]; } fn make_rt_path_expr(cx: @ExtCtxt, sp: span, nm: &str) -> @ast::expr { let path = make_path_vec(cx, nm); @@ -113,15 +112,14 @@ fn pieces_to_expr(cx: @ExtCtxt, sp: span, fn make_conv_struct(cx: @ExtCtxt, sp: span, flags_expr: @ast::expr, width_expr: @ast::expr, precision_expr: @ast::expr, ty_expr: @ast::expr) -> @ast::expr { - let intr = get_ident_interner(); cx.expr_struct( sp, cx.path_global(sp, make_path_vec(cx, "Conv")), ~[ - cx.field_imm(sp, intr.intern("flags"), flags_expr), - cx.field_imm(sp, intr.intern("width"), width_expr), - cx.field_imm(sp, intr.intern("precision"), precision_expr), - cx.field_imm(sp, intr.intern("ty"), ty_expr) + cx.field_imm(sp, str_to_ident("flags"), flags_expr), + cx.field_imm(sp, str_to_ident("width"), width_expr), + cx.field_imm(sp, str_to_ident("precision"), precision_expr), + cx.field_imm(sp, str_to_ident("ty"), ty_expr) ] ) } @@ -256,11 +254,11 @@ fn pieces_to_expr(cx: @ExtCtxt, sp: span, let nargs = args.len(); /* 'ident' is the local buffer building up the result of fmt! */ - let ident = get_ident_interner().intern("__fmtbuf"); + let ident = str_to_ident("__fmtbuf"); let buf = || cx.expr_ident(fmt_sp, ident); - let core_ident = get_ident_interner().intern("std"); - let str_ident = get_ident_interner().intern("str"); - let push_ident = get_ident_interner().intern("push_str"); + let core_ident = str_to_ident("std"); + let str_ident = str_to_ident("str"); + let push_ident = str_to_ident("push_str"); let mut stms = ~[]; /* Translate each piece (portion of the fmt expression) by invoking the diff --git a/src/libsyntax/ext/pipes/parse_proto.rs b/src/libsyntax/ext/pipes/parse_proto.rs index 732094655f2..68641d3f3e7 100644 --- a/src/libsyntax/ext/pipes/parse_proto.rs +++ b/src/libsyntax/ext/pipes/parse_proto.rs @@ -17,7 +17,7 @@ use ext::pipes::proto::*; use parse::common::SeqSep; use parse::parser; use parse::token; -use parse::token::{get_ident_interner}; +use parse::token::{get_ident_interner, interner_get}; pub trait proto_parser { fn parse_proto(&self, id: ~str) -> protocol; @@ -43,11 +43,11 @@ impl proto_parser for parser::Parser { fn parse_state(&self, proto: protocol) { let id = self.parse_ident(); - let name = copy *get_ident_interner().get(id); + let name = copy *interner_get(id.name); self.expect(&token::COLON); let dir = match copy *self.token { - token::IDENT(n, _) => get_ident_interner().get(n), + token::IDENT(n, _) => interner_get(n.name), _ => fail!() }; self.bump(); @@ -78,7 +78,7 @@ impl proto_parser for parser::Parser { } fn parse_message(&self, state: state) { - let mname = copy *get_ident_interner().get(self.parse_ident()); + let mname = copy *interner_get(self.parse_ident().name); let args = if *self.token == token::LPAREN { self.parse_unspanned_seq( @@ -97,7 +97,7 @@ impl proto_parser for parser::Parser { let next = match *self.token { token::IDENT(_, _) => { - let name = copy *get_ident_interner().get(self.parse_ident()); + let name = copy *interner_get(self.parse_ident().name); let ntys = if *self.token == token::LT { self.parse_unspanned_seq( &token::LT, diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index bfa4faa8b28..f8835b51aa5 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -80,7 +80,7 @@ pub mod rt { impl ToSource for ast::ident { fn to_source(&self, cx: @ExtCtxt) -> ~str { - copy *get_ident_interner().get(*self) + copy *interner_get(self.name) } } @@ -418,11 +418,11 @@ pub fn expand_quote_stmt(cx: @ExtCtxt, } fn ids_ext(cx: @ExtCtxt, strs: ~[~str]) -> ~[ast::ident] { - strs.map(|str| get_ident_interner().intern(*str)) + strs.map(|str| str_to_ident(*str)) } fn id_ext(cx: @ExtCtxt, str: &str) -> ast::ident { - get_ident_interner().intern(str) + str_to_ident(str) } // Lift an ident to the expr that evaluates to that ident. diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 1b23b96a732..86077a5c0b3 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -19,7 +19,7 @@ use codemap; use parse::lexer::*; //resolve bug? use parse::ParseSess; use parse::parser::Parser; -use parse::token::{Token, EOF, to_str, nonterminal, get_ident_interner}; +use parse::token::{Token, EOF, to_str, nonterminal, get_ident_interner, ident_to_str}; use parse::token; use core::hashmap::HashMap; @@ -201,13 +201,13 @@ pub fn nameize(p_s: @mut ParseSess, ms: &[matcher], res: &[@named_match]) }; } codemap::spanned { - node: match_nonterminal(bind_name, _, idx), span: sp + node: match_nonterminal(ref bind_name, _, idx), span: sp } => { - if ret_val.contains_key(&bind_name) { + if ret_val.contains_key(bind_name) { p_s.span_diagnostic.span_fatal(sp, ~"Duplicated bind name: "+ - *get_ident_interner().get(bind_name)) + *ident_to_str(*bind_name)) } - ret_val.insert(bind_name, res[idx]); + ret_val.insert(*bind_name, res[idx]); } } } @@ -372,9 +372,9 @@ pub fn parse( || bb_eis.len() > 1u { let nts = str::connect(vec::map(bb_eis, |ei| { match ei.elts[ei.idx].node { - match_nonterminal(bind,name,_) => { - fmt!("%s ('%s')", *get_ident_interner().get(name), - *get_ident_interner().get(bind)) + match_nonterminal(ref bind,ref name,_) => { + fmt!("%s ('%s')", *ident_to_str(*name), + *ident_to_str(*bind)) } _ => fail!() } }), " or "); @@ -396,9 +396,9 @@ pub fn parse( let mut ei = bb_eis.pop(); match ei.elts[ei.idx].node { - match_nonterminal(_, name, idx) => { + match_nonterminal(_, ref name, idx) => { ei.matches[idx].push(@matched_nonterminal( - parse_nt(&rust_parser, *get_ident_interner().get(name)))); + parse_nt(&rust_parser, *ident_to_str(*name)))); ei.idx += 1u; } _ => fail!() diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 13393b76663..be2c50d223d 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -21,7 +21,7 @@ use ext::tt::macro_parser::{named_match, matched_seq, matched_nonterminal}; use ext::tt::macro_parser::{parse, parse_or_else, success, failure}; use parse::lexer::{new_tt_reader, reader}; use parse::parser::Parser; -use parse::token::{get_ident_interner, special_idents}; +use parse::token::{get_ident_interner, special_idents, gensym_ident, ident_to_str}; use parse::token::{FAT_ARROW, SEMI, nt_matchers, nt_tt}; use print; @@ -38,8 +38,8 @@ pub fn add_new_extension(cx: @ExtCtxt, spanned { node: copy m, span: dummy_sp() } } - let lhs_nm = get_ident_interner().gensym("lhs"); - let rhs_nm = get_ident_interner().gensym("rhs"); + let lhs_nm = gensym_ident("lhs"); + let rhs_nm = gensym_ident("rhs"); // The grammar for macro_rules! is: // $( $lhs:mtcs => $rhs:tt );+ @@ -151,7 +151,7 @@ pub fn add_new_extension(cx: @ExtCtxt, |cx, sp, arg| generic_extension(cx, sp, name, arg, *lhses, *rhses); return MRDef(MacroDef{ - name: copy *get_ident_interner().get(name), + name: copy *ident_to_str(name), ext: NormalTT(base::SyntaxExpanderTT{expander: exp, span: Some(sp)}) }); } diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index b508b342478..8ff66dc62e3 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -15,7 +15,8 @@ use ast::{token_tree, tt_delim, tt_tok, tt_seq, tt_nonterminal,ident}; use codemap::{span, dummy_sp}; use diagnostic::span_handler; use ext::tt::macro_parser::{named_match, matched_seq, matched_nonterminal}; -use parse::token::{EOF, INTERPOLATED, IDENT, Token, nt_ident, ident_interner, get_ident_interner}; +use parse::token::{EOF, INTERPOLATED, IDENT, Token, nt_ident, ident_interner}; +use parse::token::{ident_to_str, get_ident_interner}; use parse::lexer::TokenAndSpan; use core::hashmap::HashMap; @@ -126,7 +127,7 @@ fn lookup_cur_matched(r: &mut TtReader, name: ident) -> @named_match { Some(s) => lookup_cur_matched_by_matched(r, s), None => { r.sp_diag.span_fatal(r.cur_span, fmt!("unknown macro variable `%s`", - *r.interner.get(name))); + *r.interner.get(name.name))); } } } @@ -139,13 +140,13 @@ fn lockstep_iter_size(t: &token_tree, r: &mut TtReader) -> lis { match lhs { lis_unconstrained => copy rhs, lis_contradiction(_) => copy lhs, - lis_constraint(l_len, l_id) => match rhs { + lis_constraint(l_len, ref l_id) => match rhs { lis_unconstrained => copy lhs, lis_contradiction(_) => copy rhs, lis_constraint(r_len, _) if l_len == r_len => copy lhs, - lis_constraint(r_len, r_id) => { - let l_n = copy *get_ident_interner().get(l_id); - let r_n = copy *get_ident_interner().get(r_id); + lis_constraint(r_len, ref r_id) => { + let l_n = copy *ident_to_str(*l_id); + let r_n = copy *ident_to_str(*r_id); lis_contradiction(fmt!("Inconsistent lockstep iteration: \ '%s' has %u items, but '%s' has %u", l_n, l_len, r_n, r_len)) @@ -295,7 +296,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { r.sp_diag.span_fatal( copy r.cur_span, /* blame the macro writer */ fmt!("variable '%s' is still repeating at this depth", - *get_ident_interner().get(ident))); + *ident_to_str(ident))); } } } diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index fe93b5b688f..7c6b2774d77 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -17,7 +17,7 @@ use diagnostic::span_handler; use ext::tt::transcribe::{tt_next_token}; use ext::tt::transcribe::{dup_tt_reader}; use parse::token; -use parse::token::{get_ident_interner}; +use parse::token::{get_ident_interner, str_to_ident}; use core::char; use core::either; @@ -275,7 +275,7 @@ fn consume_any_line_comment(rdr: @mut StringReader) // but comments with only more "/"s are not if !is_line_non_doc_comment(acc) { return Some(TokenAndSpan{ - tok: token::DOC_COMMENT(get_ident_interner().intern(acc)), + tok: token::DOC_COMMENT(str_to_ident(acc)), sp: codemap::mk_sp(start_bpos, rdr.pos) }); } @@ -329,7 +329,7 @@ fn consume_block_comment(rdr: @mut StringReader) // but comments with only "*"s between two "/"s are not if !is_block_non_doc_comment(acc) { return Some(TokenAndSpan{ - tok: token::DOC_COMMENT(get_ident_interner().intern(acc)), + tok: token::DOC_COMMENT(str_to_ident(acc)), sp: codemap::mk_sp(start_bpos, rdr.pos) }); } @@ -475,12 +475,12 @@ fn scan_number(c: char, rdr: @mut StringReader) -> token::Token { if c == '3' && n == '2' { bump(rdr); bump(rdr); - return token::LIT_FLOAT(get_ident_interner().intern(num_str), + return token::LIT_FLOAT(str_to_ident(num_str), ast::ty_f32); } else if c == '6' && n == '4' { bump(rdr); bump(rdr); - return token::LIT_FLOAT(get_ident_interner().intern(num_str), + return token::LIT_FLOAT(str_to_ident(num_str), ast::ty_f64); /* FIXME (#2252): if this is out of range for either a 32-bit or 64-bit float, it won't be noticed till the @@ -492,9 +492,9 @@ fn scan_number(c: char, rdr: @mut StringReader) -> token::Token { } if is_float { if is_machine_float { - return token::LIT_FLOAT(get_ident_interner().intern(num_str), ast::ty_f); + return token::LIT_FLOAT(str_to_ident(num_str), ast::ty_f); } - return token::LIT_FLOAT_UNSUFFIXED(get_ident_interner().intern(num_str)); + return token::LIT_FLOAT_UNSUFFIXED(str_to_ident(num_str)); } else { if str::len(num_str) == 0u { rdr.fatal(~"no valid digits found for number"); @@ -557,7 +557,7 @@ fn next_token_inner(rdr: @mut StringReader) -> token::Token { let is_mod_name = c == ':' && nextch(rdr) == ':'; // FIXME: perform NFKC normalization here. (Issue #2253) - return token::IDENT(get_ident_interner().intern(accum_str), is_mod_name); + return token::IDENT(str_to_ident(accum_str), is_mod_name); } if is_dec_digit(c) { return scan_number(c, rdr); @@ -667,7 +667,7 @@ fn next_token_inner(rdr: @mut StringReader) -> token::Token { lifetime_name.push_char(rdr.curr); bump(rdr); } - return token::LIFETIME(get_ident_interner().intern(lifetime_name)); + return token::LIFETIME(str_to_ident(lifetime_name)); } // Otherwise it is a character constant: @@ -740,7 +740,7 @@ fn next_token_inner(rdr: @mut StringReader) -> token::Token { } } bump(rdr); - return token::LIT_STR(get_ident_interner().intern(accum_str)); + return token::LIT_STR(str_to_ident(accum_str)); } '-' => { if nextch(rdr) == '>' { @@ -784,7 +784,7 @@ mod test { use core::option::None; use diagnostic; use parse::token; - use parse::token::{get_ident_interner}; + use parse::token::{get_ident_interner, str_to_ident}; // represents a testing reader (incl. both reader and interner) struct Env { @@ -809,7 +809,7 @@ mod test { let Env {interner: ident_interner, string_reader} = setup(~"/* my source file */ \ fn main() { io::println(~\"zebra\"); }\n"); - let id = ident_interner.intern("fn"); + let id = str_to_ident("fn"); let tok1 = string_reader.next_token(); let tok2 = TokenAndSpan{ tok:token::IDENT(id, false), @@ -820,7 +820,7 @@ mod test { // read another token: let tok3 = string_reader.next_token(); let tok4 = TokenAndSpan{ - tok:token::IDENT(ident_interner.intern("main"), false), + tok:token::IDENT(str_to_ident("main"), false), sp:span {lo:BytePos(24),hi:BytePos(28),expn_info: None}}; assert_eq!(tok3,tok4); // the lparen is already read: @@ -839,7 +839,7 @@ mod test { // make the identifier by looking up the string in the interner fn mk_ident (env: Env, id: &str, is_mod_name: bool) -> token::Token { - token::IDENT (get_ident_interner().intern(id),is_mod_name) + token::IDENT (str_to_ident(id),is_mod_name) } #[test] fn doublecolonparsing () { @@ -898,7 +898,7 @@ mod test { let env = setup(~"'abc"); let TokenAndSpan {tok, sp: _} = env.string_reader.next_token(); - let id = get_ident_interner().intern("abc"); + let id = token::str_to_ident("abc"); assert_eq!(tok, token::LIFETIME(id)); } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 7ca2756c211..cfc2eaca241 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -348,8 +348,8 @@ mod test { use ast_util::new_ident; use abi; use parse::parser::Parser; - use parse::token::intern; - use util::parser_testing::{string_to_tts_and_sess,string_to_parser}; + use parse::token::{intern, str_to_ident}; + use util::parser_testing::{string_to_tts_and_sess, string_to_parser}; use util::parser_testing::{string_to_expr, string_to_item}; use util::parser_testing::{string_to_stmt}; diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 6260e7c460c..54845849ebb 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -85,7 +85,7 @@ use parse::obsolete::{ObsoleteLifetimeNotation, ObsoleteConstManagedPointer}; use parse::obsolete::{ObsoletePurity, ObsoleteStaticMethod}; use parse::obsolete::{ObsoleteConstItem, ObsoleteFixedLengthVectorType}; use parse::obsolete::{ObsoleteNamedExternModule, ObsoleteMultipleLocalDecl}; -use parse::token::{can_begin_expr, get_ident_interner, is_ident, is_ident_or_path}; +use parse::token::{can_begin_expr, get_ident_interner, ident_to_str, is_ident, is_ident_or_path}; use parse::token::{is_plain_ident, INTERPOLATED, keywords, special_idents, token_to_binop}; use parse::token; use parse::{new_sub_parser_from_file, next_node_id, ParseSess}; @@ -333,7 +333,7 @@ impl Parser { pub fn get_id(&self) -> node_id { next_node_id(self.sess) } pub fn id_to_str(&self, id: ident) -> @~str { - get_ident_interner().get(id) + get_ident_interner().get(id.name) } // is this one of the keywords that signals a closure type? @@ -3370,7 +3370,7 @@ impl Parser { } if fields.len() == 0 { self.fatal(fmt!("Unit-like struct should be written as `struct %s;`", - *get_ident_interner().get(class_name))); + *get_ident_interner().get(class_name.name))); } self.bump(); } else if *self.token == token::LPAREN { @@ -3582,7 +3582,7 @@ impl Parser { } fn push_mod_path(&self, id: ident, attrs: ~[ast::attribute]) { - let default_path = get_ident_interner().get(id); + let default_path = token::interner_get(id.name); let file_path = match ::attr::first_attr_value_str_by_name( attrs, "path") { @@ -3605,7 +3605,7 @@ impl Parser { let prefix = prefix.dir_path(); let mod_path_stack = &*self.mod_path_stack; let mod_path = Path(".").push_many(*mod_path_stack); - let default_path = *get_ident_interner().get(id) + ".rs"; + let default_path = *token::interner_get(id.name) + ".rs"; let file_path = match ::attr::first_attr_value_str_by_name( outer_attrs, "path") { Some(d) => { @@ -3980,7 +3980,7 @@ impl Parser { match *self.token { token::LIT_STR(s) => { self.bump(); - let the_string = self.id_to_str(s); + let the_string = ident_to_str(s); let mut words = ~[]; for str::each_word(*the_string) |s| { words.push(s) } let mut abis = AbiSet::empty(); @@ -4542,7 +4542,7 @@ impl Parser { match *self.token { token::LIT_STR(s) => { self.bump(); - self.id_to_str(s) + ident_to_str(s) } _ => self.fatal("expected string literal") } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index b5882a71dd3..400d52d5a52 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -11,6 +11,7 @@ use core::prelude::*; use ast; +use ast::Name; use ast_util; use parse::token; use util::interner::StrInterner; @@ -176,29 +177,29 @@ pub fn to_str(in: @ident_interner, t: &Token) -> ~str { u.to_str() + ast_util::uint_ty_to_str(t) } LIT_INT_UNSUFFIXED(i) => { i.to_str() } - LIT_FLOAT(s, t) => { - let mut body = copy *in.get(s); + LIT_FLOAT(ref s, t) => { + let mut body = copy *ident_to_str(*s); if body.ends_with(".") { body += "0"; // `10.f` is not a float literal } body + ast_util::float_ty_to_str(t) } - LIT_FLOAT_UNSUFFIXED(s) => { - let mut body = copy *in.get(s); + LIT_FLOAT_UNSUFFIXED(ref s) => { + let mut body = copy *ident_to_str(*s); if body.ends_with(".") { body += "0"; // `10.f` is not a float literal } body } - LIT_STR(s) => { ~"\"" + str::escape_default(*in.get(s)) + "\"" } + LIT_STR(ref s) => { ~"\"" + str::escape_default(*ident_to_str(*s)) + "\"" } /* Name components */ - IDENT(s, _) => copy *in.get(s), - LIFETIME(s) => fmt!("'%s", *in.get(s)), + IDENT(s, _) => copy *in.get(s.name), + LIFETIME(s) => fmt!("'%s", *in.get(s.name)), UNDERSCORE => ~"_", /* Other */ - DOC_COMMENT(s) => copy *in.get(s), + DOC_COMMENT(ref s) => copy *ident_to_str(*s), EOF => ~"", INTERPOLATED(ref nt) => { match nt { @@ -394,27 +395,22 @@ pub struct ident_interner { } impl ident_interner { - // I'm torn as to whether these should produce idents or - // just uints. - pub fn intern(&self, val: &str) -> ast::ident { - ast::ident { name: self.interner.intern(val), ctxt: 0 } + pub fn intern(&self, val: &str) -> Name { + self.interner.intern(val) } - pub fn gensym(&self, val: &str) -> ast::ident { - ast::ident { name: self.interner.gensym(val), ctxt: 0 } + pub fn gensym(&self, val: &str) -> Name { + self.interner.gensym(val) } - pub fn get(&self, idx: ast::ident) -> @~str { - self.interner.get(idx.name) + pub fn get(&self, idx: Name) -> @~str { + self.interner.get(idx) } + // is this really something that should be exposed? pub fn len(&self) -> uint { self.interner.len() } - pub fn find_equiv>(&self, val: &Q) -> Option { - match self.interner.find_equiv(val) { - Some(v) => Some(ast::ident { name: v, ctxt: 0 }), - None => None, - } + pub fn find_equiv>(&self, val: &Q) + -> Option { + self.interner.find_equiv(val) } } @@ -532,9 +528,35 @@ pub fn mk_fake_ident_interner() -> @ident_interner { } // maps a string to its interned representation -pub fn intern(str : &str) -> uint { +pub fn intern(str : &str) -> Name { let interner = get_ident_interner(); - interner.intern(str).name + interner.intern(str) +} + +// gensyms a new uint, using the current interner +pub fn gensym(str : &str) -> Name { + let interner = get_ident_interner(); + interner.gensym(str) +} + +// map an interned representation back to a string +pub fn interner_get(name : Name) -> @~str { + get_ident_interner().get(name) +} + +// maps an identifier to the string that it corresponds to +pub fn ident_to_str(id : ast::ident) -> @~str { + interner_get(id.name) +} + +// maps a string to an identifier with an empty syntax context +pub fn str_to_ident(str : &str) -> ast::ident { + ast::new_ident(intern(str)) +} + +// maps a string to a gensym'ed identifier +pub fn gensym_ident(str : &str) -> ast::ident { + ast::new_ident(gensym(str)) } /** diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 7a3eddbd573..0e8612cbffb 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -21,7 +21,7 @@ use codemap::{CodeMap, BytePos}; use codemap; use diagnostic; use parse::classify::expr_is_simple_block; -use parse::token::ident_interner; +use parse::token::{ident_interner, ident_to_str}; use parse::{comments, token}; use parse; use print::pp::{break_offset, word, space, zerobreak, hardbreak}; @@ -1475,7 +1475,7 @@ pub fn print_decl(s: @ps, decl: @ast::decl) { } pub fn print_ident(s: @ps, ident: ast::ident) { - word(s.s, *s.intr.get(ident)); + word(s.s, *ident_to_str(ident)); } pub fn print_for_decl(s: @ps, loc: @ast::local, coll: @ast::expr) { @@ -2237,6 +2237,7 @@ mod test { use core::cmp::Eq; use core::option::None; use parse; + use parse::token; fn string_check (given : &T, expected: &T) { if !(given == expected) { @@ -2246,8 +2247,7 @@ mod test { #[test] fn test_fun_to_str() { - let mock_interner = parse::token::mk_fake_ident_interner(); - let abba_ident = mock_interner.intern("abba"); + let abba_ident = token::str_to_ident("abba"); let decl = ast::fn_decl { inputs: ~[], @@ -2258,14 +2258,13 @@ mod test { }; let generics = ast_util::empty_generics(); assert_eq!(&fun_to_str(&decl, ast::impure_fn, abba_ident, - None, &generics, mock_interner), + None, &generics, token::get_ident_interner()), &~"fn abba()"); } #[test] fn test_variant_to_str() { - let mock_interner = parse::token::mk_fake_ident_interner(); - let ident = mock_interner.intern("principal_skinner"); + let ident = token::str_to_ident("principal_skinner"); let var = codemap::respan(codemap::dummy_sp(), ast::variant_ { name: ident, @@ -2277,7 +2276,7 @@ mod test { vis: ast::public, }); - let varstr = variant_to_str(&var,mock_interner); + let varstr = variant_to_str(&var,token::get_ident_interner()); assert_eq!(&varstr,&~"pub principal_skinner"); } } diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index 1c2210c96b6..2b74ef7fbea 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -10,11 +10,11 @@ use core::option::{Option,None}; use ast; -use parse::parser::Parser; use parse::{new_parse_sess}; - -use syntax::parse::{ParseSess,string_to_filemap,filemap_to_tts}; -use syntax::parse::{new_parser_from_source_str}; +use parse::{ParseSess,string_to_filemap,filemap_to_tts}; +use parse::{new_parser_from_source_str}; +use parse::parser::Parser; +use parse::token; // map a string to tts, using a made-up filename: return both the token_trees // and the ParseSess @@ -58,3 +58,13 @@ pub fn string_to_stmt (source_str : @~str) -> @ast::stmt { string_to_parser(source_str).parse_stmt(~[]) } +// parse a string, return a pat. Uses "irrefutable"... which doesn't +// (currently) affect parsing. +pub fn string_to_pat(source_str : @~str) -> @ast::pat { + string_to_parser(source_str).parse_pat() +} + +// convert a vector of strings to a vector of ast::idents +pub fn strs_to_idents(ids: ~[&str]) -> ~[ast::ident] { + ids.map(|u| token::str_to_ident(*u)) +}