auto merge of #6938 : jbclements/rust/hygiene_fns_and_cleanup, r=jbclements

I'm close to flipping the switch on hygiene for let-bound identifiers.  This commit adds a bunch of support functions for that change... but also a huge amount of cleanup in syntax.rc. The most interesting of these are

- the use of TLS for the interners everywhere.  We had already breached the "no-global-state" dam by using TLS for encoding, and it saves a lot of code just to use it everywhere.
Also, there were many places where two or more interners were passed in attached to different structures, and the danger of having those diverge seemed greater that the danger of having a single one get big and heavy. If the interner size proves to be a problem, it should be quite simple to add a "parameterize"-like dynamic binding form--because we don't have interesting continuation operations (or tail calling, mostly) this should just be a case of a mutation followed by another later mutation. Again, this is only if the interner gets too big.

- I renamed the "repr" field of the identifier to "name". I can see the case for "repr" when there's only one field in the structure, but that's no longer the case; there's now a name and a context (both are uints).

- the interner now just maps between strings and uints, rather than between idents and uints. The former state made perfect sense when identifiers didn't have syntax contexts, but that's no longer the case.

I've run this patch against a fairly recent incoming, and it appears to pass all tests. Let's see if it can be merged....
This commit is contained in:
bors 2013-06-05 13:10:45 -07:00
commit 0409f86106
50 changed files with 875 additions and 694 deletions

View File

@ -734,7 +734,7 @@ pub fn mangle_internal_name_by_path(ccx: @CrateContext, path: path) -> ~str {
}
pub fn mangle_internal_name_by_seq(ccx: @CrateContext, flav: &str) -> ~str {
return fmt!("%s_%u", flav, (ccx.names)(flav).repr);
return fmt!("%s_%u", flav, (ccx.names)(flav).name);
}

View File

@ -39,6 +39,7 @@ use syntax::attr;
use syntax::codemap;
use syntax::diagnostic;
use syntax::parse;
use syntax::parse::token;
use syntax::print::{pp, pprust};
use syntax;
@ -230,7 +231,7 @@ pub fn compile_rest(sess: Session,
sess.filesearch,
session::sess_os_to_meta_os(sess.targ_cfg.os),
sess.opts.is_static,
sess.parse_sess.interner));
token::get_ident_interner()));
let lang_items = time(time_passes, ~"language item collection", ||
middle::lang_items::collect_language_items(crate, sess));
@ -455,7 +456,7 @@ pub fn pretty_print_input(sess: Session, cfg: ast::crate_cfg, input: &input,
let is_expanded = upto != cu_parse;
let src = sess.codemap.get_filemap(source_name(input)).src;
do io::with_str_reader(*src) |rdr| {
pprust::print_crate(sess.codemap, sess.parse_sess.interner,
pprust::print_crate(sess.codemap, token::get_ident_interner(),
sess.span_diagnostic, crate.unwrap(),
source_name(input),
rdr, io::stdout(), ann, is_expanded);
@ -754,7 +755,7 @@ pub fn build_session_(sopts: @session::options,
let target_cfg = build_target_config(sopts, demitter);
let p_s = parse::new_parse_sess_special_handler(span_diagnostic_handler,
cm);
let cstore = @mut cstore::mk_cstore(p_s.interner);
let cstore = @mut cstore::mk_cstore(token::get_ident_interner());
let filesearch = filesearch::mk_filesearch(
&sopts.maybe_sysroot,
sopts.target_triple,
@ -963,7 +964,7 @@ pub fn early_error(emitter: diagnostic::Emitter, msg: ~str) -> ! {
pub fn list_metadata(sess: Session, path: &Path, out: @io::Writer) {
metadata::loader::list_file_metadata(
sess.parse_sess.interner,
token::get_ident_interner(),
session::sess_os_to_meta_os(sess.targ_cfg.os), path, out);
}

View File

@ -26,6 +26,7 @@ use syntax::diagnostic;
use syntax::parse::ParseSess;
use syntax::{ast, codemap};
use syntax::abi;
use syntax::parse::token;
use syntax;
use core::hashmap::HashMap;
@ -293,14 +294,19 @@ impl Session_ {
self.opts.optimize == No && !self.debugging_opt(no_debug_borrows)
}
// pointless function, now...
pub fn str_of(@self, id: ast::ident) -> @~str {
self.parse_sess.interner.get(id)
token::ident_to_str(&id)
}
// pointless function, now...
pub fn ident_of(@self, st: &str) -> ast::ident {
self.parse_sess.interner.intern(st)
token::str_to_ident(st)
}
// pointless function, now...
pub fn intr(@self) -> @syntax::parse::token::ident_interner {
self.parse_sess.interner
token::get_ident_interner()
}
}

View File

@ -22,6 +22,7 @@ use syntax::codemap::{dummy_sp, span, ExpandedFrom, CallInfo, NameAndSpan};
use syntax::codemap;
use syntax::ext::base::ExtCtxt;
use syntax::fold;
use syntax::parse::token;
use syntax::print::pprust;
use syntax::{ast, ast_util};
@ -143,7 +144,7 @@ fn fold_item(cx: @mut TestCtxt, i: @ast::item, fld: @fold::ast_fold)
-> Option<@ast::item> {
cx.path.push(i.ident);
debug!("current path: %s",
ast_util::path_name_i(copy cx.path, cx.sess.parse_sess.interner));
ast_util::path_name_i(copy cx.path));
if is_test_fn(cx, i) || is_bench_fn(i) {
match i.node {
@ -411,13 +412,10 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> @ast::expr {
let ext_cx = cx.ext_cx;
debug!("encoding %s", ast_util::path_name_i(path,
cx.sess.parse_sess.interner));
debug!("encoding %s", ast_util::path_name_i(path));
let name_lit: ast::lit =
nospan(ast::lit_str(@ast_util::path_name_i(
path,
cx.sess.parse_sess.interner)));
nospan(ast::lit_str(@ast_util::path_name_i(path)));
let name_expr = @ast::expr {
id: cx.sess.next_node_id(),

View File

@ -22,6 +22,7 @@ use core::vec;
use syntax::attr;
use syntax::codemap::{span, dummy_sp};
use syntax::diagnostic::span_handler;
use syntax::parse::token;
use syntax::parse::token::ident_interner;
use syntax::visit;
use syntax::ast;
@ -176,7 +177,7 @@ fn visit_item(e: @mut Env, i: @ast::item) {
}
nn
}
None => e.intr.get(i.ident)
None => token::ident_to_str(&i.ident)
};
if attr::find_attrs_by_name(i.attrs, "nolink").is_empty() {
already_added =
@ -235,7 +236,7 @@ fn resolve_crate(e: @mut Env,
hash: @~str,
span: span)
-> ast::crate_num {
let metas = metas_with_ident(@/*bad*/copy *e.intr.get(ident), metas);
let metas = metas_with_ident(token::ident_to_str(&ident), metas);
match existing_match(e, metas, hash) {
None => {
@ -276,7 +277,7 @@ fn resolve_crate(e: @mut Env,
match attr::last_meta_item_value_str_by_name(load_ctxt.metas,
"name") {
Some(v) => v,
None => e.intr.get(ident),
None => token::ident_to_str(&ident),
};
let cmeta = @cstore::crate_metadata {
name: cname,
@ -305,10 +306,11 @@ fn resolve_crate_deps(e: @mut Env, cdata: @~[u8]) -> cstore::cnum_map {
for decoder::get_crate_deps(e.intr, cdata).each |dep| {
let extrn_cnum = dep.cnum;
let cname = dep.name;
let cname_str = token::ident_to_str(&dep.name);
let cmetas = metas_with(dep.vers, @~"vers", ~[]);
debug!("resolving dep crate %s ver: %s hash: %s",
*e.intr.get(dep.name), *dep.vers, *dep.hash);
match existing_match(e, metas_with_ident(e.intr.get(cname),
*cname_str, *dep.vers, *dep.hash);
match existing_match(e, metas_with_ident(cname_str,
copy cmetas),
dep.hash) {
Some(local_cnum) => {

View File

@ -40,6 +40,7 @@ use syntax::parse::token::{StringRef, ident_interner, special_idents};
use syntax::print::pprust;
use syntax::{ast, ast_util};
use syntax::codemap;
use syntax::parse::token;
type cmd = @crate_metadata;
@ -297,10 +298,10 @@ fn item_path(intr: @ident_interner, item_doc: ebml::Doc) -> ast_map::path {
for reader::docs(path_doc) |tag, elt_doc| {
if tag == tag_path_elt_mod {
let str = reader::doc_as_str(elt_doc);
result.push(ast_map::path_mod(intr.intern(str)));
result.push(ast_map::path_mod(token::str_to_ident(str)));
} else if tag == tag_path_elt_name {
let str = reader::doc_as_str(elt_doc);
result.push(ast_map::path_name(intr.intern(str)));
result.push(ast_map::path_name(token::str_to_ident(str)));
} else {
// ignore tag_path_len element
}
@ -314,8 +315,8 @@ fn item_name(intr: @ident_interner, item: ebml::Doc) -> ast::ident {
do reader::with_doc_data(name) |data| {
let string = str::from_bytes_slice(data);
match intr.find_equiv(&StringRef(string)) {
None => intr.intern(string),
Some(val) => val,
None => token::str_to_ident(string),
Some(val) => ast::new_ident(val),
}
}
}
@ -843,7 +844,7 @@ pub fn get_type_name_if_impl(intr: @ident_interner,
}
for reader::tagged_docs(item, tag_item_impl_type_basename) |doc| {
return Some(intr.intern(str::from_bytes(reader::doc_data(doc))));
return Some(token::str_to_ident(str::from_bytes(reader::doc_data(doc))));
}
return None;
@ -1095,7 +1096,7 @@ pub fn get_crate_deps(intr: @ident_interner, data: @~[u8]) -> ~[crate_dep] {
}
for reader::tagged_docs(depsdoc, tag_crate_dep) |depdoc| {
deps.push(crate_dep {cnum: crate_num,
name: intr.intern(docstr(depdoc, tag_crate_dep_name)),
name: token::str_to_ident(docstr(depdoc, tag_crate_dep_name)),
vers: @docstr(depdoc, tag_crate_dep_vers),
hash: @docstr(depdoc, tag_crate_dep_hash)});
crate_num += 1;
@ -1109,7 +1110,7 @@ fn list_crate_deps(intr: @ident_interner, data: @~[u8], out: @io::Writer) {
for get_crate_deps(intr, data).each |dep| {
out.write_str(
fmt!("%d %s-%s-%s\n",
dep.cnum, *intr.get(dep.name), *dep.hash, *dep.vers));
dep.cnum, *token::ident_to_str(&dep.name), *dep.hash, *dep.vers));
}
out.write_str("\n");

View File

@ -43,6 +43,7 @@ use syntax::opt_vec::OptVec;
use syntax::opt_vec;
use syntax::parse::token::special_idents;
use syntax::{ast_util, visit};
use syntax::parse::token;
use syntax;
use writer = extra::ebml::writer;
@ -141,8 +142,7 @@ fn add_to_index(ecx: @EncodeContext,
full_path.push(name);
index.push(
entry {
val: ast_util::path_name_i(full_path,
ecx.tcx.sess.parse_sess.interner),
val: ast_util::path_name_i(full_path),
pos: ebml_w.writer.tell()
});
}
@ -485,8 +485,7 @@ fn encode_info_for_mod(ecx: @EncodeContext,
(%?/%?)",
*ecx.tcx.sess.str_of(ident),
did,
ast_map::node_id_to_str(ecx.tcx.items, did, ecx.tcx
.sess.parse_sess.interner));
ast_map::node_id_to_str(ecx.tcx.items, did, token::get_ident_interner()));
ebml_w.start_tag(tag_mod_impl);
ebml_w.wr_str(def_to_str(local_def(did)));
@ -1055,7 +1054,7 @@ fn encode_info_for_item(ecx: @EncodeContext,
tcx.sess.span_unimpl(
item.span,
fmt!("Method %s is both provided and static",
*tcx.sess.intr().get(method_ty.ident)));
*token::ident_to_str(&method_ty.ident)));
}
encode_type_param_bounds(ebml_w, ecx,
&m.generics.ty_params);

View File

@ -19,6 +19,7 @@ use metadata::filesearch::FileSearch;
use metadata::filesearch;
use syntax::codemap::span;
use syntax::diagnostic::span_handler;
use syntax::parse::token;
use syntax::parse::token::ident_interner;
use syntax::print::pprust;
use syntax::{ast, attr};
@ -59,7 +60,7 @@ pub fn load_library_crate(cx: &Context) -> (~str, @~[u8]) {
None => {
cx.diag.span_fatal(
cx.span, fmt!("can't find crate for `%s`",
*cx.intr.get(cx.ident)));
*token::ident_to_str(&cx.ident)));
}
}
}

View File

@ -41,6 +41,7 @@ use syntax::codemap::span;
use syntax::codemap;
use syntax::fold::*;
use syntax::fold;
use syntax::parse::token;
use syntax;
use writer = extra::ebml::writer;
@ -86,7 +87,7 @@ pub fn encode_inlined_item(ecx: @e::EncodeContext,
ii: ast::inlined_item,
maps: Maps) {
debug!("> Encoding inlined item: %s::%s (%u)",
ast_map::path_to_str(path, ecx.tcx.sess.parse_sess.interner),
ast_map::path_to_str(path, token::get_ident_interner()),
*ecx.tcx.sess.str_of(ii.ident()),
ebml_w.writer.tell());
@ -99,7 +100,7 @@ pub fn encode_inlined_item(ecx: @e::EncodeContext,
ebml_w.end_tag();
debug!("< Encoded inlined fn: %s::%s (%u)",
ast_map::path_to_str(path, ecx.tcx.sess.parse_sess.interner),
ast_map::path_to_str(path, token::get_ident_interner()),
*ecx.tcx.sess.str_of(ii.ident()),
ebml_w.writer.tell());
}
@ -119,7 +120,7 @@ pub fn decode_inlined_item(cdata: @cstore::crate_metadata,
None => None,
Some(ast_doc) => {
debug!("> Decoding inlined fn: %s::?",
ast_map::path_to_str(path, tcx.sess.parse_sess.interner));
ast_map::path_to_str(path, token::get_ident_interner()));
let mut ast_dsr = reader::Decoder(ast_doc);
let from_id_range = Decodable::decode(&mut ast_dsr);
let to_id_range = reserve_id_range(dcx.tcx.sess, from_id_range);
@ -132,7 +133,7 @@ pub fn decode_inlined_item(cdata: @cstore::crate_metadata,
let ii = renumber_ast(xcx, raw_ii);
debug!("Fn named: %s", *tcx.sess.str_of(ii.ident()));
debug!("< Decoded inlined fn: %s::%s",
ast_map::path_to_str(path, tcx.sess.parse_sess.interner),
ast_map::path_to_str(path, token::get_ident_interner()),
*tcx.sess.str_of(ii.ident()));
ast_map::map_decoded_item(tcx.sess.diagnostic(),
dcx.tcx.items, path, &ii);
@ -1167,7 +1168,7 @@ impl fake_ext_ctxt for fake_session {
}
}
fn ident_of(&self, st: &str) -> ast::ident {
self.interner.intern(st)
token::str_to_ident(st)
}
}
@ -1236,9 +1237,9 @@ fn test_simplification() {
match (item_out, item_exp) {
(ast::ii_item(item_out), ast::ii_item(item_exp)) => {
assert!(pprust::item_to_str(item_out,
ext_cx.parse_sess().interner)
token::get_ident_interner())
== pprust::item_to_str(item_exp,
ext_cx.parse_sess().interner));
token::get_ident_interner()));
}
_ => fail!()
}

View File

@ -30,6 +30,7 @@ use syntax::ast;
use syntax::ast_map;
use syntax::visit;
use syntax::codemap::span;
use syntax::parse::token;
macro_rules! if_ok(
($inp: expr) => (
@ -710,8 +711,8 @@ impl BorrowckCtxt {
match *loan_path {
LpVar(id) => {
match self.tcx.items.find(&id) {
Some(&ast_map::node_local(ident)) => {
str::push_str(out, *self.tcx.sess.intr().get(ident));
Some(&ast_map::node_local(ref ident)) => {
str::push_str(out, *token::ident_to_str(ident));
}
r => {
self.tcx.sess.bug(
@ -724,9 +725,9 @@ impl BorrowckCtxt {
LpExtend(lp_base, _, LpInterior(mc::InteriorField(fname))) => {
self.append_loan_path_to_str_from_interior(lp_base, out);
match fname {
mc::NamedField(fname) => {
mc::NamedField(ref fname) => {
str::push_char(out, '.');
str::push_str(out, *self.tcx.sess.intr().get(fname));
str::push_str(out, *token::ident_to_str(fname));
}
mc::PositionalField(idx) => {
str::push_char(out, '#'); // invent a notation here

View File

@ -35,6 +35,7 @@ use syntax::ast_util::{Private, Public, is_local};
use syntax::ast_util::{variant_visibility_to_privacy, visibility_to_privacy};
use syntax::attr;
use syntax::codemap::span;
use syntax::parse::token;
use syntax::visit;
pub fn check_crate(tcx: ty::ctxt,
@ -120,7 +121,7 @@ pub fn check_crate(tcx: ty::ctxt,
ast_map::node_id_to_str(
tcx.items,
method_id,
tcx.sess.parse_sess.interner)));
token::get_ident_interner())));
}
None => {
tcx.sess.span_bug(span, "method not found in \
@ -184,7 +185,7 @@ pub fn check_crate(tcx: ty::ctxt,
ast_map::node_id_to_str(
tcx.items,
method_id,
tcx.sess.parse_sess.interner)));
token::get_ident_interner())));
}
None => {
tcx.sess.span_bug(span, "method not found in \
@ -216,9 +217,7 @@ pub fn check_crate(tcx: ty::ctxt,
ast_map::node_id_to_str(
tcx.items,
item_id,
tcx.sess
.parse_sess
.interner)));
token::get_ident_interner())));
}
None => {
tcx.sess.span_bug(span, "item not found in AST map?!");
@ -236,8 +235,7 @@ pub fn check_crate(tcx: ty::ctxt,
if field.ident != ident { loop; }
if field.vis == private {
tcx.sess.span_err(span, fmt!("field `%s` is private",
*tcx.sess.parse_sess.interner
.get(ident)));
*token::ident_to_str(&ident)));
}
break;
}
@ -257,10 +255,7 @@ pub fn check_crate(tcx: ty::ctxt,
!privileged_items.contains(&(container_id.node))) {
tcx.sess.span_err(span,
fmt!("method `%s` is private",
*tcx.sess
.parse_sess
.interner
.get(*name)));
*token::ident_to_str(name)));
}
} else {
let visibility =
@ -268,8 +263,7 @@ pub fn check_crate(tcx: ty::ctxt,
if visibility != public {
tcx.sess.span_err(span,
fmt!("method `%s` is private",
*tcx.sess.parse_sess.interner
.get(*name)));
*token::ident_to_str(name)));
}
}
};
@ -289,23 +283,13 @@ pub fn check_crate(tcx: ty::ctxt,
!privileged_items.contains(&def_id.node) {
tcx.sess.span_err(span,
fmt!("function `%s` is private",
*tcx.sess
.parse_sess
.interner
.get(copy *path
.idents
.last())));
*token::ident_to_str(path.idents.last())));
}
} else if csearch::get_item_visibility(tcx.sess.cstore,
def_id) != public {
tcx.sess.span_err(span,
fmt!("function `%s` is private",
*tcx.sess
.parse_sess
.interner
.get(copy *path
.idents
.last())));
*token::ident_to_str(path.idents.last())));
}
}
_ => {}
@ -335,10 +319,7 @@ pub fn check_crate(tcx: ty::ctxt,
match item.node {
item_trait(_, _, ref methods) => {
if method_num >= (*methods).len() {
tcx.sess.span_bug(span, "method \
number \
out of \
range?!");
tcx.sess.span_bug(span, "method number out of range?!");
}
match (*methods)[method_num] {
provided(method)
@ -346,17 +327,9 @@ pub fn check_crate(tcx: ty::ctxt,
!privileged_items
.contains(&(trait_id.node)) => {
tcx.sess.span_err(span,
fmt!("method
`%s` \
is \
private",
*tcx
.sess
.parse_sess
.interner
.get
(method
.ident)));
fmt!("method `%s` is private",
*token::ident_to_str(&method
.ident)));
}
provided(_) | required(_) => {
// Required methods can't be
@ -365,20 +338,15 @@ pub fn check_crate(tcx: ty::ctxt,
}
}
_ => {
tcx.sess.span_bug(span, "trait wasn't \
actually a \
trait?!");
tcx.sess.span_bug(span, "trait wasn't actually a trait?!");
}
}
}
Some(_) => {
tcx.sess.span_bug(span, "trait wasn't an \
item?!");
tcx.sess.span_bug(span, "trait wasn't an item?!");
}
None => {
tcx.sess.span_bug(span, "trait item wasn't \
found in the AST \
map?!");
tcx.sess.span_bug(span, "trait item wasn't found in the AST map?!");
}
}
} else {

View File

@ -30,6 +30,7 @@ use core::hashmap::{HashMap, HashSet};
use syntax::ast_map;
use syntax::codemap::span;
use syntax::print::pprust;
use syntax::parse::token;
use syntax::parse::token::special_idents;
use syntax::{ast, visit};
@ -595,7 +596,7 @@ impl DetermineRpCtxt {
debug!("add_rp() variance for %s: %? == %? ^ %?",
ast_map::node_id_to_str(self.ast_map, id,
self.sess.parse_sess.interner),
token::get_ident_interner()),
joined_variance, old_variance, variance);
if Some(joined_variance) != old_variance {
@ -614,9 +615,9 @@ impl DetermineRpCtxt {
debug!("add dependency from %d -> %d (%s -> %s) with variance %?",
from, self.item_id,
ast_map::node_id_to_str(self.ast_map, from,
self.sess.parse_sess.interner),
token::get_ident_interner()),
ast_map::node_id_to_str(self.ast_map, self.item_id,
self.sess.parse_sess.interner),
token::get_ident_interner()),
copy self.ambient_variance);
let vec = match self.dep_map.find(&from) {
Some(&vec) => vec,
@ -951,7 +952,7 @@ pub fn determine_rp_in_crate(sess: Session,
debug!("item %? (%s) is parameterized with variance %?",
key,
ast_map::node_id_to_str(ast_map, key,
sess.parse_sess.interner),
token::get_ident_interner()),
value);
}
"----"

View File

@ -28,6 +28,7 @@ use syntax::ast_util::{path_to_ident, walk_pat, trait_method_to_ty_method};
use syntax::ast_util::{Privacy, Public, Private};
use syntax::ast_util::{variant_visibility_to_privacy, visibility_to_privacy};
use syntax::attr::{attr_metas, contains_name};
use syntax::parse::token;
use syntax::parse::token::ident_interner;
use syntax::parse::token::special_idents;
use syntax::print::pprust::path_to_str;
@ -727,7 +728,7 @@ impl PrimitiveTypeTable {
intr: @ident_interner,
string: &str,
primitive_type: prim_ty) {
let ident = intr.intern(string);
let ident = token::str_to_ident(string);
self.primitive_types.insert(ident, primitive_type);
}
}
@ -805,8 +806,7 @@ pub fn Resolver(session: Session,
self_ident: special_idents::self_,
type_self_ident: special_idents::type_self,
primitive_type_table: @PrimitiveTypeTable(session.
parse_sess.interner),
primitive_type_table: @PrimitiveTypeTable(token::get_ident_interner()),
namespaces: ~[ TypeNS, ValueNS ],
@ -2942,17 +2942,17 @@ impl Resolver {
module_: @mut Module,
module_path: &[ident])
-> ResolveResult<ModulePrefixResult> {
let interner = self.session.parse_sess.interner;
let interner = token::get_ident_interner();
// Start at the current module if we see `self` or `super`, or at the
// top of the crate otherwise.
let mut containing_module;
let mut i;
if *interner.get(module_path[0]) == ~"self" {
if *token::ident_to_str(&module_path[0]) == ~"self" {
containing_module =
self.get_nearest_normal_module_parent_or_self(module_);
i = 1;
} else if *interner.get(module_path[0]) == ~"super" {
} else if *token::ident_to_str(&module_path[0]) == ~"super" {
containing_module =
self.get_nearest_normal_module_parent_or_self(module_);
i = 0; // We'll handle `super` below.
@ -2962,7 +2962,7 @@ impl Resolver {
// Now loop through all the `super`s we find.
while i < module_path.len() &&
*interner.get(module_path[i]) == ~"super" {
*token::ident_to_str(&module_path[i]) == ~"super" {
debug!("(resolving module prefix) resolving `super` at %s",
self.module_to_str(containing_module));
match self.get_nearest_normal_module_parent(containing_module) {

View File

@ -78,7 +78,8 @@ use syntax::ast_map::{path, path_elt_to_str, path_name};
use syntax::ast_util::{local_def, path_to_ident};
use syntax::attr;
use syntax::codemap::span;
use syntax::parse::token::special_idents;
use syntax::parse::token;
use syntax::parse::token::{special_idents};
use syntax::print::pprust::stmt_to_str;
use syntax::visit;
use syntax::{ast, ast_util, codemap, ast_map};
@ -2257,10 +2258,10 @@ pub fn register_fn_fuller(ccx: @CrateContext,
-> ValueRef {
debug!("register_fn_fuller creating fn for item %d with path %s",
node_id,
ast_map::path_to_str(path, ccx.sess.parse_sess.interner));
ast_map::path_to_str(path, token::get_ident_interner()));
let ps = if attr::attrs_contains_name(attrs, "no_mangle") {
path_elt_to_str(*path.last(), ccx.sess.parse_sess.interner)
path_elt_to_str(*path.last(), token::get_ident_interner())
} else {
mangle_exported_name(ccx, /*bad*/copy path, node_type)
};
@ -2504,7 +2505,7 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::node_id) -> ValueRef {
}
ast::foreign_item_const(*) => {
let typ = ty::node_id_to_type(tcx, ni.id);
let ident = ccx.sess.parse_sess.interner.get(ni.ident);
let ident = token::ident_to_str(&ni.ident);
let g = do str::as_c_str(*ident) |buf| {
unsafe {
llvm::LLVMAddGlobal(ccx.llmod,
@ -3069,7 +3070,7 @@ pub fn trans_crate(sess: session::Session,
lib::llvm::associate_type(tn, @"tydesc", tydesc_type);
let crate_map = decl_crate_map(sess, link_meta, llmod);
let dbg_cx = if sess.opts.debuginfo {
Some(debuginfo::mk_ctxt(copy llmod_id, sess.parse_sess.interner))
Some(debuginfo::mk_ctxt(copy llmod_id, token::get_ident_interner()))
} else {
None
};
@ -3104,7 +3105,7 @@ pub fn trans_crate(sess: session::Session,
lltypes: @mut HashMap::new(),
llsizingtypes: @mut HashMap::new(),
adt_reprs: @mut HashMap::new(),
names: new_namegen(sess.parse_sess.interner),
names: new_namegen(token::get_ident_interner()),
next_addrspace: new_addrspace_gen(),
symbol_hasher: symbol_hasher,
type_hashcodes: @mut HashMap::new(),

View File

@ -52,16 +52,19 @@ use core::vec;
use syntax::ast::ident;
use syntax::ast_map::{path, path_elt};
use syntax::codemap::span;
use syntax::parse::token;
use syntax::parse::token::ident_interner;
use syntax::{ast, ast_map};
use syntax::abi::{X86, X86_64, Arm, Mips};
// NOTE: this thunk is totally pointless now that we're not passing
// interners around...
pub type namegen = @fn(s: &str) -> ident;
pub fn new_namegen(intr: @ident_interner) -> namegen {
let f: @fn(s: &str) -> ident = |prefix| {
intr.gensym(fmt!("%s_%u",
prefix,
intr.gensym(prefix).repr))
token::str_to_ident(fmt!("%s_%u",
prefix,
token::gensym(prefix)))
};
f
}
@ -1195,7 +1198,7 @@ pub fn C_cstr(cx: @CrateContext, s: @~str) -> ValueRef {
llvm::LLVMConstString(buf, s.len() as c_uint, False)
};
let g =
str::as_c_str(fmt!("str%u", (cx.names)("str").repr),
str::as_c_str(fmt!("str%u", (cx.names)("str").name),
|buf| llvm::LLVMAddGlobal(cx.llmod, val_ty(sc), buf));
llvm::LLVMSetInitializer(g, sc);
llvm::LLVMSetGlobalConstant(g, True);
@ -1287,7 +1290,7 @@ pub fn C_bytes_plus_null(bytes: &[u8]) -> ValueRef {
pub fn C_shape(ccx: @CrateContext, bytes: ~[u8]) -> ValueRef {
unsafe {
let llshape = C_bytes_plus_null(bytes);
let name = fmt!("shape%u", (ccx.names)("shape").repr);
let name = fmt!("shape%u", (ccx.names)("shape").name);
let llglobal = str::as_c_str(name, |buf| {
llvm::LLVMAddGlobal(ccx.llmod, val_ty(llshape), buf)
});

View File

@ -41,6 +41,7 @@ use syntax::{ast, ast_util};
use syntax::{attr, ast_map};
use syntax::opt_vec;
use syntax::parse::token::special_idents;
use syntax::parse::token;
use syntax::abi::{X86, X86_64, Arm, Mips};
use syntax::abi::{RustIntrinsic, Rust, Stdcall, Fastcall,
Cdecl, Aapcs, C};
@ -343,8 +344,7 @@ pub fn trans_foreign_mod(ccx: @CrateContext,
}
}
ast::foreign_item_const(*) => {
let ident = ccx.sess.parse_sess.interner.get(
foreign_item.ident);
let ident = token::ident_to_str(&foreign_item.ident);
ccx.item_symbols.insert(foreign_item.id, copy *ident);
}
}

View File

@ -42,6 +42,7 @@ use syntax::ast;
use syntax::ast::*;
use syntax::ast_map;
use syntax::ast_util;
use syntax::parse::token;
use syntax::visit;
pub type type_uses = uint; // Bitmask
@ -170,7 +171,7 @@ pub fn type_uses_for(ccx: @CrateContext, fn_id: def_id, n_tps: uint)
ast_map::node_id_to_str(
ccx.tcx.items,
fn_id_loc.node,
ccx.tcx.sess.parse_sess.interner)));
token::get_ident_interner())));
}
}
let Context { uses, _ } = cx;

View File

@ -43,6 +43,7 @@ use syntax::ast_util;
use syntax::attr;
use syntax::codemap::span;
use syntax::codemap;
use syntax::parse::token;
use syntax::parse::token::special_idents;
use syntax::{ast, ast_map};
use syntax::opt_vec::OptVec;
@ -2678,7 +2679,7 @@ impl cmp::TotalOrd for bound_region {
(&ty::br_anon(ref a1), &ty::br_anon(ref a2)) => a1.cmp(a2),
(&ty::br_anon(*), _) => cmp::Less,
(&ty::br_named(ref a1), &ty::br_named(ref a2)) => a1.repr.cmp(&a2.repr),
(&ty::br_named(ref a1), &ty::br_named(ref a2)) => a1.name.cmp(&a2.name),
(&ty::br_named(*), _) => cmp::Less,
(&ty::br_cap_avoid(ref a1, @ref b1),
@ -2819,7 +2820,7 @@ pub fn node_id_to_trait_ref(cx: ctxt, id: ast::node_id) -> @ty::TraitRef {
None => cx.sess.bug(
fmt!("node_id_to_trait_ref: no trait ref for node `%s`",
ast_map::node_id_to_str(cx.items, id,
cx.sess.parse_sess.interner)))
token::get_ident_interner())))
}
}
@ -2830,7 +2831,7 @@ pub fn node_id_to_type(cx: ctxt, id: ast::node_id) -> t {
None => cx.sess.bug(
fmt!("node_id_to_type: no type for node `%s`",
ast_map::node_id_to_str(cx.items, id,
cx.sess.parse_sess.interner)))
token::get_ident_interner())))
}
}
@ -3821,7 +3822,7 @@ pub fn substd_enum_variants(cx: ctxt,
}
pub fn item_path_str(cx: ctxt, id: ast::def_id) -> ~str {
ast_map::path_to_str(item_path(cx, id), cx.sess.parse_sess.interner)
ast_map::path_to_str(item_path(cx, id), token::get_ident_interner())
}
pub enum DtorKind {
@ -4107,7 +4108,7 @@ pub fn lookup_struct_fields(cx: ctxt, did: ast::def_id) -> ~[field_ty] {
cx.sess.bug(
fmt!("struct ID not bound to an item: %s",
ast_map::node_id_to_str(cx.items, did.node,
cx.sess.parse_sess.interner)));
token::get_ident_interner())));
}
}
}

View File

@ -127,6 +127,7 @@ use syntax::codemap::span;
use syntax::codemap;
use syntax::opt_vec::OptVec;
use syntax::opt_vec;
use syntax::parse::token;
use syntax::parse::token::special_idents;
use syntax::print::pprust;
use syntax::visit;
@ -815,7 +816,7 @@ impl FnCtxt {
fmt!("no type for node %d: %s in fcx %s",
id, ast_map::node_id_to_str(
self.tcx().items, id,
self.tcx().sess.parse_sess.interner),
token::get_ident_interner()),
self.tag()));
}
}
@ -829,7 +830,7 @@ impl FnCtxt {
fmt!("no type substs for node %d: %s in fcx %s",
id, ast_map::node_id_to_str(
self.tcx().items, id,
self.tcx().sess.parse_sess.interner),
token::get_ident_interner()),
self.tag()));
}
}

View File

@ -28,6 +28,7 @@ use middle::typeck;
use syntax::abi::AbiSet;
use syntax::ast_map;
use syntax::codemap::span;
use syntax::parse::token;
use syntax::print::pprust;
use syntax::{ast, ast_util};
@ -200,7 +201,7 @@ pub fn re_scope_id_to_str(cx: ctxt, node_id: ast::node_id) -> ~str {
_ => { cx.sess.bug(
fmt!("re_scope refers to %s",
ast_map::node_id_to_str(cx.items, node_id,
cx.sess.parse_sess.interner))) }
token::get_ident_interner()))) }
}
}
@ -654,7 +655,7 @@ impl Repr for ty::Method {
impl Repr for ast::ident {
fn repr(&self, tcx: ctxt) -> ~str {
copy *tcx.sess.intr().get(*self)
copy *token::ident_to_str(self)
}
}

View File

@ -18,16 +18,17 @@ use doc;
use core::vec;
use syntax::ast;
use syntax::parse::token::{ident_interner};
use syntax::parse::token::{ident_interner, ident_to_str};
use syntax::parse::token;
// Hack; rather than thread an interner through everywhere, rely on
// thread-local data
// Hack-Becomes-Feature: using thread-local-state everywhere...
pub fn to_str(id: ast::ident) -> ~str {
let intr = token::get_ident_interner();
return copy *(*intr).get(id);
return copy *ident_to_str(&id);
}
// get rid of this pointless function:
pub fn interner() -> @ident_interner {
return token::get_ident_interner();
}

View File

@ -24,6 +24,7 @@ use pass::Pass;
use core::vec;
use syntax::ast;
use syntax::print::pprust;
use syntax::parse::token;
use syntax::ast_map;
pub fn mk_pass() -> Pass {
@ -76,7 +77,7 @@ fn get_fn_sig(srv: astsrv::Srv, fn_id: doc::AstId) -> Option<~str> {
node: ast::foreign_item_fn(ref decl, purity, ref tys), _
}, _, _, _) => {
Some(pprust::fun_to_str(decl, purity, ident, None, tys,
extract::interner()))
token::get_ident_interner()))
}
_ => fail!("get_fn_sig: fn_id not bound to a fn item")
}

View File

@ -174,7 +174,7 @@ fn run(repl: Repl, input: ~str) -> Repl {
_ => fail!()
};
debug!("recording input into repl history");
record(repl, blk, sess.parse_sess.interner)
record(repl, blk, token::get_ident_interner())
}
// Compiles a crate given by the filename as a library if the compiled

View File

@ -25,7 +25,7 @@ distributions like normal and exponential.
use core::rand::RngUtil;
fn main() {
let rng = rand::rng();
let mut rng = rand::rng();
if rng.gen() { // bool
println(fmt!("int: %d, uint: %u", rng.gen(), rng.gen()))
}

View File

@ -15,7 +15,7 @@ use core::prelude::*;
use codemap::{span, spanned};
use abi::AbiSet;
use opt_vec::OptVec;
use parse::token::get_ident_interner;
use parse::token::{ident_to_str, interner_get, str_to_ident};
use core::hashmap::HashMap;
use core::option::Option;
@ -25,12 +25,15 @@ use core::to_str::ToStr;
use extra::serialize::{Encodable, Decodable, Encoder, Decoder};
// an identifier contains an index into the interner
// table and a SyntaxContext to track renaming and
// an identifier contains a Name (index into the interner
// table) and a SyntaxContext to track renaming and
// macro expansion per Flatt et al., "Macros
// That Work Together"
#[deriving(Eq)]
pub struct ident { repr: Name, ctxt: SyntaxContext }
pub struct ident { name: Name, ctxt: SyntaxContext }
/// Construct an identifier with the given name and an empty context:
pub fn new_ident(name: Name) -> ident { ident {name: name, ctxt: empty_ctxt}}
// a SyntaxContext represents a chain of macro-expandings
// and renamings. Each macro expansion corresponds to
@ -72,7 +75,8 @@ pub enum SyntaxContext_ {
IllegalCtxt()
}
// a name represents an identifier
// a name is a part of an identifier, representing a string
// or gensym. It's the result of interning.
pub type Name = uint;
// a mark represents a unique id associated
// with a macro expansion
@ -80,22 +84,20 @@ pub type Mrk = uint;
impl<S:Encoder> Encodable<S> for ident {
fn encode(&self, s: &mut S) {
let intr = get_ident_interner();
s.emit_str(*(*intr).get(*self));
s.emit_str(*interner_get(self.name));
}
}
impl<D:Decoder> Decodable<D> for ident {
fn decode(d: &mut D) -> ident {
let intr = get_ident_interner();
(*intr).intern(d.read_str())
str_to_ident(d.read_str())
}
}
impl to_bytes::IterBytes for ident {
#[inline(always)]
fn iter_bytes(&self, lsb0: bool, f: to_bytes::Cb) -> bool {
self.repr.iter_bytes(lsb0, f)
self.name.iter_bytes(lsb0, f)
}
}
@ -385,6 +387,7 @@ pub type stmt = spanned<stmt_>;
#[deriving(Eq, Encodable, Decodable)]
pub enum stmt_ {
// could be an item or a local (let) binding:
stmt_decl(@decl, node_id),
// expr without trailing semi-colon (must have unit type):
@ -414,7 +417,9 @@ pub type decl = spanned<decl_>;
#[deriving(Eq, Encodable, Decodable)]
pub enum decl_ {
// a local (let) binding:
decl_local(@local),
// an item binding:
decl_item(@item),
}

View File

@ -58,8 +58,8 @@ pub fn path_to_str_with_sep(p: &[path_elt], sep: &str, itr: @ident_interner)
-> ~str {
let strs = do p.map |e| {
match *e {
path_mod(s) => copy *itr.get(s),
path_name(s) => copy *itr.get(s)
path_mod(s) => copy *itr.get(s.name),
path_name(s) => copy *itr.get(s.name)
}
};
str::connect(strs, sep)
@ -68,9 +68,9 @@ pub fn path_to_str_with_sep(p: &[path_elt], sep: &str, itr: @ident_interner)
pub fn path_ident_to_str(p: &path, i: ident, itr: @ident_interner) -> ~str {
if vec::is_empty(*p) {
//FIXME /* FIXME (#2543) */ copy *i
copy *itr.get(i)
copy *itr.get(i.name)
} else {
fmt!("%s::%s", path_to_str(*p, itr), *itr.get(i))
fmt!("%s::%s", path_to_str(*p, itr), *itr.get(i.name))
}
}
@ -80,8 +80,8 @@ pub fn path_to_str(p: &[path_elt], itr: @ident_interner) -> ~str {
pub fn path_elt_to_str(pe: path_elt, itr: @ident_interner) -> ~str {
match pe {
path_mod(s) => copy *itr.get(s),
path_name(s) => copy *itr.get(s)
path_mod(s) => copy *itr.get(s.name),
path_name(s) => copy *itr.get(s.name)
}
}
@ -356,16 +356,16 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str {
}
Some(&node_method(m, _, path)) => {
fmt!("method %s in %s (id=%?)",
*itr.get(m.ident), path_to_str(*path, itr), id)
*itr.get(m.ident.name), path_to_str(*path, itr), id)
}
Some(&node_trait_method(ref tm, _, path)) => {
let m = ast_util::trait_method_to_ty_method(&**tm);
fmt!("method %s in %s (id=%?)",
*itr.get(m.ident), path_to_str(*path, itr), id)
*itr.get(m.ident.name), path_to_str(*path, itr), id)
}
Some(&node_variant(ref variant, _, path)) => {
fmt!("variant %s in %s (id=%?)",
*itr.get(variant.node.name), path_to_str(*path, itr), id)
*itr.get(variant.node.name.name), path_to_str(*path, itr), id)
}
Some(&node_expr(expr)) => {
fmt!("expr %s (id=%?)", pprust::expr_to_str(expr, itr), id)
@ -381,7 +381,7 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str {
fmt!("arg (id=%?)", id)
}
Some(&node_local(ident)) => {
fmt!("local (id=%?, name=%s)", id, *itr.get(ident))
fmt!("local (id=%?, name=%s)", id, *itr.get(ident.name))
}
Some(&node_block(_)) => {
fmt!("block")

View File

@ -14,6 +14,8 @@ use ast::*;
use ast;
use ast_util;
use codemap::{span, spanned};
use core::cast;
use core::local_data;
use opt_vec;
use parse::token;
use visit;
@ -24,12 +26,11 @@ use core::option;
use core::str;
use core::to_bytes;
pub fn path_name_i(idents: &[ident], intr: @token::ident_interner) -> ~str {
pub fn path_name_i(idents: &[ident]) -> ~str {
// FIXME: Bad copies (#2543 -- same for everything else that says "bad")
str::connect(idents.map(|i| copy *intr.get(*i)), "::")
str::connect(idents.map(|i| copy *token::interner_get(i.name)), "::")
}
pub fn path_to_ident(p: @Path) -> ident { copy *p.idents.last() }
pub fn local_def(id: node_id) -> def_id {
@ -620,16 +621,22 @@ pub enum Privacy {
// HYGIENE FUNCTIONS
/// Construct an identifier with the given repr and an empty context:
pub fn new_ident(repr: uint) -> ident { ident {repr: repr, ctxt: 0}}
/// Construct an identifier with the given name and an empty context:
pub fn new_ident(name: Name) -> ident { ident {name: name, ctxt: 0}}
/// Extend a syntax context with a given mark
pub fn new_mark (m:Mrk, tail:SyntaxContext,table:&mut SCTable)
pub fn new_mark(m:Mrk, tail:SyntaxContext) -> SyntaxContext {
new_mark_internal(m,tail,get_sctable())
}
// Extend a syntax context with a given mark and table
// FIXME #4536 : currently pub to allow testing
pub fn new_mark_internal(m:Mrk, tail:SyntaxContext,table:&mut SCTable)
-> SyntaxContext {
let key = (tail,m);
// FIXME #5074 : can't use more natural style because we're missing
// flow-sensitivity. Results in two lookups on a hash table hit.
// also applies to new_rename, below.
// also applies to new_rename_internal, below.
// let try_lookup = table.mark_memo.find(&key);
match table.mark_memo.contains_key(&key) {
false => {
@ -647,7 +654,13 @@ pub fn new_mark (m:Mrk, tail:SyntaxContext,table:&mut SCTable)
}
/// Extend a syntax context with a given rename
pub fn new_rename (id:ident, to:Name, tail:SyntaxContext, table: &mut SCTable)
pub fn new_rename(id:ident, to:Name, tail:SyntaxContext) -> SyntaxContext {
new_rename_internal(id, to, tail, get_sctable())
}
// Extend a syntax context with a given rename and sctable
// FIXME #4536 : currently pub to allow testing
pub fn new_rename_internal(id:ident, to:Name, tail:SyntaxContext, table: &mut SCTable)
-> SyntaxContext {
let key = (tail,id,to);
// FIXME #5074
@ -669,7 +682,8 @@ pub fn new_rename (id:ident, to:Name, tail:SyntaxContext, table: &mut SCTable)
/// Make a fresh syntax context table with EmptyCtxt in slot zero
/// and IllegalCtxt in slot one.
pub fn new_sctable() -> SCTable {
// FIXME #4536 : currently pub to allow testing
pub fn new_sctable_internal() -> SCTable {
SCTable {
table: ~[EmptyCtxt,IllegalCtxt],
mark_memo: HashMap::new(),
@ -677,6 +691,23 @@ pub fn new_sctable() -> SCTable {
}
}
// fetch the SCTable from TLS, create one if it doesn't yet exist.
pub fn get_sctable() -> @mut SCTable {
unsafe {
let sctable_key = (cast::transmute::<(uint, uint),
&fn(v: @@mut SCTable)>(
(-4 as uint, 0u)));
match local_data::local_data_get(sctable_key) {
None => {
let new_table = @@mut new_sctable_internal();
local_data::local_data_set(sctable_key,new_table);
*new_table
},
Some(intr) => *intr
}
}
}
/// Add a value to the end of a vec, return its index
fn idx_push<T>(vec: &mut ~[T], val: T) -> uint {
vec.push(val);
@ -684,19 +715,25 @@ fn idx_push<T>(vec: &mut ~[T], val: T) -> uint {
}
/// Resolve a syntax object to a name, per MTWT.
pub fn resolve (id : ident, table : &mut SCTable) -> Name {
pub fn resolve(id : ident) -> Name {
resolve_internal(id, get_sctable())
}
// Resolve a syntax object to a name, per MTWT.
// FIXME #4536 : currently pub to allow testing
pub fn resolve_internal(id : ident, table : &mut SCTable) -> Name {
match table.table[id.ctxt] {
EmptyCtxt => id.repr,
EmptyCtxt => id.name,
// ignore marks here:
Mark(_,subctxt) => resolve (ident{repr:id.repr, ctxt: subctxt},table),
Mark(_,subctxt) => resolve_internal(ident{name:id.name, ctxt: subctxt},table),
// do the rename if necessary:
Rename(ident{repr,ctxt},toname,subctxt) => {
Rename(ident{name,ctxt},toname,subctxt) => {
// this could be cached or computed eagerly:
let resolvedfrom = resolve(ident{repr:repr,ctxt:ctxt},table);
let resolvedthis = resolve(ident{repr:id.repr,ctxt:subctxt},table);
let resolvedfrom = resolve_internal(ident{name:name,ctxt:ctxt},table);
let resolvedthis = resolve_internal(ident{name:id.name,ctxt:subctxt},table);
if ((resolvedthis == resolvedfrom)
&& (marksof (ctxt,resolvedthis,table)
== marksof (subctxt,resolvedthis,table))) {
&& (marksof(ctxt,resolvedthis,table)
== marksof(subctxt,resolvedthis,table))) {
toname
} else {
resolvedthis
@ -777,11 +814,11 @@ mod test {
// convert a list of uints to an @~[ident]
// (ignores the interner completely)
fn uints_to_idents (uints: &~[uint]) -> @~[ident] {
@uints.map(|u|{ ident {repr:*u, ctxt: empty_ctxt} })
@uints.map(|u|{ ident {name:*u, ctxt: empty_ctxt} })
}
fn id (u : uint, s: SyntaxContext) -> ident {
ident{repr:u, ctxt: s}
ident{name:u, ctxt: s}
}
// because of the SCTable, I now need a tidy way of
@ -798,8 +835,8 @@ mod test {
-> SyntaxContext {
tscs.foldr(tail, |tsc : &TestSC,tail : SyntaxContext|
{match *tsc {
M(mrk) => new_mark(mrk,tail,table),
R(ident,name) => new_rename(ident,name,tail,table)}})
M(mrk) => new_mark_internal(mrk,tail,table),
R(ident,name) => new_rename_internal(ident,name,tail,table)}})
}
// gather a SyntaxContext back into a vector of TestSCs
@ -824,7 +861,7 @@ mod test {
}
#[test] fn test_unfold_refold(){
let mut t = new_sctable();
let mut t = new_sctable_internal();
let test_sc = ~[M(3),R(id(101,0),14),M(9)];
assert_eq!(unfold_test_sc(copy test_sc,empty_ctxt,&mut t),4);
@ -838,11 +875,11 @@ mod test {
// in a vector. v[0] will be the outermost mark.
fn unfold_marks(mrks:~[Mrk],tail:SyntaxContext,table: &mut SCTable) -> SyntaxContext {
mrks.foldr(tail, |mrk:&Mrk,tail:SyntaxContext|
{new_mark(*mrk,tail,table)})
{new_mark_internal(*mrk,tail,table)})
}
#[test] fn unfold_marks_test() {
let mut t = new_sctable();
let mut t = new_sctable_internal();
assert_eq!(unfold_marks(~[3,7],empty_ctxt,&mut t),3);
assert_eq!(t.table[2],Mark(7,0));
@ -852,7 +889,7 @@ mod test {
#[test] fn test_marksof () {
let stopname = 242;
let name1 = 243;
let mut t = new_sctable();
let mut t = new_sctable_internal();
assert_eq!(marksof (empty_ctxt,stopname,&t),~[]);
// FIXME #5074: ANF'd to dodge nested calls
{ let ans = unfold_marks(~[4,98],empty_ctxt,&mut t);
@ -866,13 +903,13 @@ mod test {
// rename where stop doesn't match:
{ let chain = ~[M(9),
R(id(name1,
new_mark (4, empty_ctxt,&mut t)),
new_mark_internal (4, empty_ctxt,&mut t)),
100101102),
M(14)];
let ans = unfold_test_sc(chain,empty_ctxt,&mut t);
assert_eq! (marksof (ans, stopname, &t), ~[9,14]);}
// rename where stop does match
{ let name1sc = new_mark(4, empty_ctxt, &mut t);
{ let name1sc = new_mark_internal(4, empty_ctxt, &mut t);
let chain = ~[M(9),
R(id(name1, name1sc),
stopname),
@ -884,30 +921,30 @@ mod test {
#[test] fn resolve_tests () {
let a = 40;
let mut t = new_sctable();
let mut t = new_sctable_internal();
// - ctxt is MT
assert_eq!(resolve(id(a,empty_ctxt),&mut t),a);
assert_eq!(resolve_internal(id(a,empty_ctxt),&mut t),a);
// - simple ignored marks
{ let sc = unfold_marks(~[1,2,3],empty_ctxt,&mut t);
assert_eq!(resolve(id(a,sc),&mut t),a);}
assert_eq!(resolve_internal(id(a,sc),&mut t),a);}
// - orthogonal rename where names don't match
{ let sc = unfold_test_sc(~[R(id(50,empty_ctxt),51),M(12)],empty_ctxt,&mut t);
assert_eq!(resolve(id(a,sc),&mut t),a);}
assert_eq!(resolve_internal(id(a,sc),&mut t),a);}
// - rename where names do match, but marks don't
{ let sc1 = new_mark(1,empty_ctxt,&mut t);
{ let sc1 = new_mark_internal(1,empty_ctxt,&mut t);
let sc = unfold_test_sc(~[R(id(a,sc1),50),
M(1),
M(2)],
empty_ctxt,&mut t);
assert_eq!(resolve(id(a,sc),&mut t), a);}
assert_eq!(resolve_internal(id(a,sc),&mut t), a);}
// - rename where names and marks match
{ let sc1 = unfold_test_sc(~[M(1),M(2)],empty_ctxt,&mut t);
let sc = unfold_test_sc(~[R(id(a,sc1),50),M(1),M(2)],empty_ctxt,&mut t);
assert_eq!(resolve(id(a,sc),&mut t), 50); }
assert_eq!(resolve_internal(id(a,sc),&mut t), 50); }
// - rename where names and marks match by literal sharing
{ let sc1 = unfold_test_sc(~[M(1),M(2)],empty_ctxt,&mut t);
let sc = unfold_test_sc(~[R(id(a,sc1),50)],sc1,&mut t);
assert_eq!(resolve(id(a,sc),&mut t), 50); }
assert_eq!(resolve_internal(id(a,sc),&mut t), 50); }
// - two renames of the same var.. can only happen if you use
// local-expand to prevent the inner binding from being renamed
// during the rename-pass caused by the first:
@ -915,28 +952,28 @@ mod test {
{ let sc = unfold_test_sc(~[R(id(a,empty_ctxt),50),
R(id(a,empty_ctxt),51)],
empty_ctxt,&mut t);
assert_eq!(resolve(id(a,sc),&mut t), 51); }
assert_eq!(resolve_internal(id(a,sc),&mut t), 51); }
// the simplest double-rename:
{ let a_to_a50 = new_rename(id(a,empty_ctxt),50,empty_ctxt,&mut t);
let a50_to_a51 = new_rename(id(a,a_to_a50),51,a_to_a50,&mut t);
assert_eq!(resolve(id(a,a50_to_a51),&mut t),51);
{ let a_to_a50 = new_rename_internal(id(a,empty_ctxt),50,empty_ctxt,&mut t);
let a50_to_a51 = new_rename_internal(id(a,a_to_a50),51,a_to_a50,&mut t);
assert_eq!(resolve_internal(id(a,a50_to_a51),&mut t),51);
// mark on the outside doesn't stop rename:
let sc = new_mark(9,a50_to_a51,&mut t);
assert_eq!(resolve(id(a,sc),&mut t),51);
let sc = new_mark_internal(9,a50_to_a51,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t),51);
// but mark on the inside does:
let a50_to_a51_b = unfold_test_sc(~[R(id(a,a_to_a50),51),
M(9)],
a_to_a50,
&mut t);
assert_eq!(resolve(id(a,a50_to_a51_b),&mut t),50);}
assert_eq!(resolve_internal(id(a,a50_to_a51_b),&mut t),50);}
}
#[test] fn hashing_tests () {
let mut t = new_sctable();
assert_eq!(new_mark(12,empty_ctxt,&mut t),2);
assert_eq!(new_mark(13,empty_ctxt,&mut t),3);
let mut t = new_sctable_internal();
assert_eq!(new_mark_internal(12,empty_ctxt,&mut t),2);
assert_eq!(new_mark_internal(13,empty_ctxt,&mut t),3);
// using the same one again should result in the same index:
assert_eq!(new_mark(12,empty_ctxt,&mut t),2);
assert_eq!(new_mark_internal(12,empty_ctxt,&mut t),2);
// I'm assuming that the rename table will behave the same....
}

View File

@ -11,6 +11,7 @@
use core::prelude::*;
use ast;
use ast::Name;
use codemap;
use codemap::{CodeMap, span, ExpnInfo, ExpandedFrom};
use codemap::CallInfo;
@ -18,6 +19,7 @@ use diagnostic::span_handler;
use ext;
use parse;
use parse::token;
use parse::token::{ident_to_str, intern, str_to_ident};
use core::hashmap::HashMap;
use core::vec;
@ -91,29 +93,33 @@ pub enum SyntaxExtension {
IdentTT(SyntaxExpanderTTItem),
}
// The SyntaxEnv is the environment that's threaded through the expansion
// of macros. It contains bindings for macros, and also a special binding
// for " block" (not a legal identifier) that maps to a BlockInfo
pub type SyntaxEnv = @mut MapChain<Name, Transformer>;
// Name : the domain of SyntaxEnvs
// want to change these to uints....
// note that we use certain strings that are not legal as identifiers
// to indicate, for instance, how blocks are supposed to behave.
type Name = @~str;
// Transformer : the codomain of SyntaxEnvs
// NB: it may seem crazy to lump both of these into one environment;
// what would it mean to bind "foo" to BlockLimit(true)? The idea
// is that this follows the lead of MTWT, and accommodates growth
// toward a more uniform syntax syntax (sorry) where blocks are just
// another kind of transformer.
pub enum Transformer {
// this identifier maps to a syntax extension or macro
SE(SyntaxExtension),
// should blocks occurring here limit macro scopes?
ScopeMacros(bool)
// blockinfo : this is ... well, it's simpler than threading
// another whole data stack-structured data structure through
// expansion. Basically, there's an invariant that every
// map must contain a binding for " block".
BlockInfo(BlockInfo)
}
pub struct BlockInfo {
// should macros escape from this scope?
macros_escape : bool,
// what are the pending renames?
pending_renames : @mut RenameList
}
// a list of ident->name renamings
type RenameList = ~[(ast::ident,Name)];
// The base map of methods for expanding syntax extension
// AST nodes into full ASTs
pub fn syntax_expander_table() -> SyntaxEnv {
@ -127,77 +133,80 @@ pub fn syntax_expander_table() -> SyntaxEnv {
}
let mut syntax_expanders = HashMap::new();
// NB identifier starts with space, and can't conflict with legal idents
syntax_expanders.insert(@~" block",
@ScopeMacros(true));
syntax_expanders.insert(@~"macro_rules",
syntax_expanders.insert(intern(&" block"),
@BlockInfo(BlockInfo{
macros_escape : false,
pending_renames : @mut ~[]
}));
syntax_expanders.insert(intern(&"macro_rules"),
builtin_item_tt(
ext::tt::macro_rules::add_new_extension));
syntax_expanders.insert(@~"fmt",
syntax_expanders.insert(intern(&"fmt"),
builtin_normal_tt(ext::fmt::expand_syntax_ext));
syntax_expanders.insert(
@~"auto_encode",
intern(&"auto_encode"),
@SE(ItemDecorator(ext::auto_encode::expand_auto_encode)));
syntax_expanders.insert(
@~"auto_decode",
intern(&"auto_decode"),
@SE(ItemDecorator(ext::auto_encode::expand_auto_decode)));
syntax_expanders.insert(@~"env",
syntax_expanders.insert(intern(&"env"),
builtin_normal_tt(ext::env::expand_syntax_ext));
syntax_expanders.insert(@~"bytes",
syntax_expanders.insert(intern("bytes"),
builtin_normal_tt(ext::bytes::expand_syntax_ext));
syntax_expanders.insert(@~"concat_idents",
syntax_expanders.insert(intern("concat_idents"),
builtin_normal_tt(
ext::concat_idents::expand_syntax_ext));
syntax_expanders.insert(@~"log_syntax",
syntax_expanders.insert(intern(&"log_syntax"),
builtin_normal_tt(
ext::log_syntax::expand_syntax_ext));
syntax_expanders.insert(@~"deriving",
syntax_expanders.insert(intern(&"deriving"),
@SE(ItemDecorator(
ext::deriving::expand_meta_deriving)));
// Quasi-quoting expanders
syntax_expanders.insert(@~"quote_tokens",
syntax_expanders.insert(intern(&"quote_tokens"),
builtin_normal_tt(ext::quote::expand_quote_tokens));
syntax_expanders.insert(@~"quote_expr",
syntax_expanders.insert(intern(&"quote_expr"),
builtin_normal_tt(ext::quote::expand_quote_expr));
syntax_expanders.insert(@~"quote_ty",
syntax_expanders.insert(intern(&"quote_ty"),
builtin_normal_tt(ext::quote::expand_quote_ty));
syntax_expanders.insert(@~"quote_item",
syntax_expanders.insert(intern(&"quote_item"),
builtin_normal_tt(ext::quote::expand_quote_item));
syntax_expanders.insert(@~"quote_pat",
syntax_expanders.insert(intern(&"quote_pat"),
builtin_normal_tt(ext::quote::expand_quote_pat));
syntax_expanders.insert(@~"quote_stmt",
syntax_expanders.insert(intern(&"quote_stmt"),
builtin_normal_tt(ext::quote::expand_quote_stmt));
syntax_expanders.insert(@~"line",
syntax_expanders.insert(intern(&"line"),
builtin_normal_tt(
ext::source_util::expand_line));
syntax_expanders.insert(@~"col",
syntax_expanders.insert(intern(&"col"),
builtin_normal_tt(
ext::source_util::expand_col));
syntax_expanders.insert(@~"file",
syntax_expanders.insert(intern(&"file"),
builtin_normal_tt(
ext::source_util::expand_file));
syntax_expanders.insert(@~"stringify",
syntax_expanders.insert(intern(&"stringify"),
builtin_normal_tt(
ext::source_util::expand_stringify));
syntax_expanders.insert(@~"include",
syntax_expanders.insert(intern(&"include"),
builtin_normal_tt(
ext::source_util::expand_include));
syntax_expanders.insert(@~"include_str",
syntax_expanders.insert(intern(&"include_str"),
builtin_normal_tt(
ext::source_util::expand_include_str));
syntax_expanders.insert(@~"include_bin",
syntax_expanders.insert(intern(&"include_bin"),
builtin_normal_tt(
ext::source_util::expand_include_bin));
syntax_expanders.insert(@~"module_path",
syntax_expanders.insert(intern(&"module_path"),
builtin_normal_tt(
ext::source_util::expand_mod));
syntax_expanders.insert(@~"proto",
syntax_expanders.insert(intern(&"proto"),
builtin_item_tt(ext::pipes::expand_proto));
syntax_expanders.insert(@~"asm",
syntax_expanders.insert(intern(&"asm"),
builtin_normal_tt(ext::asm::expand_asm));
syntax_expanders.insert(
@~"trace_macros",
intern(&"trace_macros"),
builtin_normal_tt(ext::trace_macros::expand_trace_macros));
MapChain::new(~syntax_expanders)
}
@ -301,10 +310,10 @@ impl ExtCtxt {
*self.trace_mac = x
}
pub fn str_of(&self, id: ast::ident) -> ~str {
copy *self.parse_sess.interner.get(id)
copy *ident_to_str(&id)
}
pub fn ident_of(&self, st: &str) -> ast::ident {
self.parse_sess.interner.intern(st)
str_to_ident(st)
}
}
@ -470,6 +479,15 @@ impl <K: Eq + Hash + IterBytes ,V: Copy> MapChain<K,V>{
}
}
fn find_in_topmost_frame(&self, key: &K) -> Option<@V> {
let map = match *self {
BaseMapChain(ref map) => map,
ConsMapChain(ref map,_) => map
};
// strip one layer of indirection off the pointer.
map.find(key).map(|r| {**r})
}
// insert the binding into the top-level map
fn insert (&mut self, key: K, ext: @V) -> bool {
// can't abstract over get_map because of flow sensitivity...
@ -478,7 +496,40 @@ impl <K: Eq + Hash + IterBytes ,V: Copy> MapChain<K,V>{
ConsMapChain (~ref mut map,_) => map.insert(key,ext)
}
}
// insert the binding into the topmost frame for which the binding
// associated with 'n' exists and satisfies pred
// ... there are definitely some opportunities for abstraction
// here that I'm ignoring. (e.g., manufacturing a predicate on
// the maps in the chain, and using an abstract "find".
fn insert_into_frame(&mut self, key: K, ext: @V, n: K, pred: &fn(&@V)->bool) {
match *self {
BaseMapChain (~ref mut map) => {
if satisfies_pred(map,&n,pred) {
map.insert(key,ext);
} else {
fail!(~"expected map chain containing satisfying frame")
}
},
ConsMapChain (~ref mut map, rest) => {
if satisfies_pred(map,&n,pred) {
map.insert(key,ext);
} else {
rest.insert_into_frame(key,ext,n,pred)
}
}
}
}
}
// returns true if the binding for 'n' satisfies 'pred' in 'map'
fn satisfies_pred<K : Eq + Hash + IterBytes,V>(map : &mut HashMap<K,V>,
n: &K,
pred: &fn(&V)->bool)
-> bool {
match map.find(n) {
Some(ref v) => (pred(*v)),
None => false
}
}
#[cfg(test)]

View File

@ -15,6 +15,7 @@ use codemap::span;
use ext::base::*;
use ext::base;
use parse::token;
use parse::token::{str_to_ident};
pub fn expand_syntax_ext(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
-> base::MacResult {
@ -33,7 +34,7 @@ pub fn expand_syntax_ext(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
}
}
}
let res = cx.parse_sess().interner.intern(res_str);
let res = str_to_ident(res_str);
let e = @ast::expr {
id: cx.next_id(),

View File

@ -11,11 +11,11 @@
use core::prelude::*;
use ast::{blk_, attribute_, attr_outer, meta_word};
use ast::{crate, expr_, expr_mac, mac_invoc_tt};
use ast::{item_mac, stmt_, stmt_mac, stmt_expr, stmt_semi};
use ast::{crate, decl_local, expr_, expr_mac, mac_invoc_tt};
use ast::{item_mac, local_, stmt_, stmt_decl, stmt_mac, stmt_expr, stmt_semi};
use ast::{SCTable, illegal_ctxt};
use ast;
use ast_util::{new_rename, new_mark, resolve};
use ast_util::{new_rename, new_mark, resolve, get_sctable};
use attr;
use codemap;
use codemap::{span, CallInfo, ExpandedFrom, NameAndSpan, spanned};
@ -23,6 +23,9 @@ use ext::base::*;
use fold::*;
use parse;
use parse::{parse_item_from_source_str};
use parse::token::{ident_to_str, intern, fresh_name};
use visit;
use visit::{Visitor,mk_vt};
use core::vec;
@ -46,15 +49,14 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv,
fmt!("expected macro name without module \
separators"));
}
/* using idents and token::special_idents would make the
the macro names be hygienic */
let extname = cx.parse_sess().interner.get(pth.idents[0]);
let extname = &pth.idents[0];
let extnamestr = ident_to_str(extname);
// leaving explicit deref here to highlight unbox op:
match (*extsbox).find(&extname) {
match (*extsbox).find(&extname.name) {
None => {
cx.span_fatal(
pth.span,
fmt!("macro undefined: '%s'", *extname))
fmt!("macro undefined: '%s'", *extnamestr))
}
Some(@SE(NormalTT(SyntaxExpanderTT{
expander: exp,
@ -63,7 +65,7 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv,
cx.bt_push(ExpandedFrom(CallInfo {
call_site: s,
callee: NameAndSpan {
name: copy *extname,
name: copy *extnamestr,
span: exp_sp,
},
}));
@ -76,7 +78,7 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv,
pth.span,
fmt!(
"non-expr macro in expr pos: %s",
*extname
*extnamestr
)
)
}
@ -92,7 +94,7 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv,
_ => {
cx.span_fatal(
pth.span,
fmt!("'%s' is not a tt-style macro", *extname)
fmt!("'%s' is not a tt-style macro", *extnamestr)
)
}
}
@ -129,7 +131,7 @@ pub fn expand_mod_items(extsbox: @mut SyntaxEnv,
do vec::foldr(item.attrs, ~[*item]) |attr, items| {
let mname = attr::get_attr_name(attr);
match (*extsbox).find(&mname) {
match (*extsbox).find(&intern(*mname)) {
Some(@SE(ItemDecorator(dec_fn))) => {
cx.bt_push(ExpandedFrom(CallInfo {
call_site: attr.span,
@ -153,16 +155,20 @@ pub fn expand_mod_items(extsbox: @mut SyntaxEnv,
// eval $e with a new exts frame:
macro_rules! with_exts_frame (
($extsboxexpr:expr,$e:expr) =>
($extsboxexpr:expr,$macros_escape:expr,$e:expr) =>
({let extsbox = $extsboxexpr;
let oldexts = *extsbox;
*extsbox = oldexts.push_frame();
extsbox.insert(intern(special_block_name),
@BlockInfo(BlockInfo{macros_escape:$macros_escape,pending_renames:@mut ~[]}));
let result = $e;
*extsbox = oldexts;
result
})
)
static special_block_name : &'static str = " block";
// When we enter a module, record it, for the sake of `module!`
pub fn expand_item(extsbox: @mut SyntaxEnv,
cx: @ExtCtxt,
@ -180,14 +186,8 @@ pub fn expand_item(extsbox: @mut SyntaxEnv,
match it.node {
ast::item_mod(_) | ast::item_foreign_mod(_) => {
cx.mod_push(it.ident);
let result =
// don't push a macro scope for macro_escape:
if contains_macro_escape(it.attrs) {
orig(it,fld)
} else {
// otherwise, push a scope:
with_exts_frame!(extsbox,orig(it,fld))
};
let macro_escape = contains_macro_escape(it.attrs);
let result = with_exts_frame!(extsbox,macro_escape,orig(it,fld));
cx.mod_pop();
result
}
@ -203,31 +203,6 @@ pub fn contains_macro_escape (attrs: &[ast::attribute]) -> bool {
attrs.any(|attr| "macro_escape" == *attr::get_attr_name(attr))
}
// this macro disables (one layer of) macro
// scoping, to allow a block to add macro bindings
// to its parent env
macro_rules! without_macro_scoping(
($extsexpr:expr,$exp:expr) =>
({
// only evaluate this once:
let exts = $extsexpr;
// capture the existing binding:
let existingBlockBinding =
match exts.find(&@~" block"){
Some(binding) => binding,
None => cx.bug("expected to find \" block\" binding")
};
// this prevents the block from limiting the macros' scope:
exts.insert(@~" block",@ScopeMacros(false));
let result = $exp;
// reset the block binding. Note that since the original
// one may have been inherited, this procedure may wind
// up introducing a block binding where one didn't exist
// before.
exts.insert(@~" block",existingBlockBinding);
result
}))
// Support for item-position macro invocations, exactly the same
// logic as for expression-position macro invocations.
pub fn expand_item_mac(extsbox: @mut SyntaxEnv,
@ -241,22 +216,23 @@ pub fn expand_item_mac(extsbox: @mut SyntaxEnv,
_ => cx.span_bug(it.span, "invalid item macro invocation")
};
let extname = cx.parse_sess().interner.get(pth.idents[0]);
let expanded = match (*extsbox).find(&extname) {
let extname = &pth.idents[0];
let extnamestr = ident_to_str(extname);
let expanded = match (*extsbox).find(&extname.name) {
None => cx.span_fatal(pth.span,
fmt!("macro undefined: '%s!'", *extname)),
fmt!("macro undefined: '%s!'", *extnamestr)),
Some(@SE(NormalTT(ref expand))) => {
if it.ident != parse::token::special_idents::invalid {
cx.span_fatal(pth.span,
fmt!("macro %s! expects no ident argument, \
given '%s'", *extname,
*cx.parse_sess().interner.get(it.ident)));
given '%s'", *extnamestr,
*ident_to_str(&it.ident)));
}
cx.bt_push(ExpandedFrom(CallInfo {
call_site: it.span,
callee: NameAndSpan {
name: copy *extname,
name: copy *extnamestr,
span: expand.span
}
}));
@ -266,29 +242,29 @@ pub fn expand_item_mac(extsbox: @mut SyntaxEnv,
if it.ident == parse::token::special_idents::invalid {
cx.span_fatal(pth.span,
fmt!("macro %s! expects an ident argument",
*extname));
*extnamestr));
}
cx.bt_push(ExpandedFrom(CallInfo {
call_site: it.span,
callee: NameAndSpan {
name: copy *extname,
name: copy *extnamestr,
span: expand.span
}
}));
((*expand).expander)(cx, it.span, it.ident, tts)
}
_ => cx.span_fatal(
it.span, fmt!("%s! is not legal in item position", *extname))
it.span, fmt!("%s! is not legal in item position", *extnamestr))
};
let maybe_it = match expanded {
MRItem(it) => fld.fold_item(it),
MRExpr(_) => cx.span_fatal(pth.span,
~"expr macro in item position: "
+ *extname),
+ *extnamestr),
MRAny(_, item_maker, _) => item_maker().chain(|i| {fld.fold_item(i)}),
MRDef(ref mdef) => {
extsbox.insert(@/*bad*/ copy mdef.name, @SE((*mdef).ext));
insert_macro(*extsbox,intern(mdef.name), @SE((*mdef).ext));
None
}
};
@ -296,6 +272,23 @@ pub fn expand_item_mac(extsbox: @mut SyntaxEnv,
return maybe_it;
}
// insert a macro into the innermost frame that doesn't have the
// macro_escape tag.
fn insert_macro(exts: SyntaxEnv, name: ast::Name, transformer: @Transformer) {
let is_non_escaping_block =
|t : &@Transformer| -> bool{
match t {
&@BlockInfo(BlockInfo {macros_escape:false,_}) => true,
&@BlockInfo(BlockInfo {_}) => false,
_ => fail!(fmt!("special identifier %? was bound to a non-BlockInfo",
special_block_name))
}
};
exts.insert_into_frame(name,transformer,intern(special_block_name),
is_non_escaping_block)
}
// expand a stmt
pub fn expand_stmt(extsbox: @mut SyntaxEnv,
cx: @ExtCtxt,
@ -321,16 +314,17 @@ pub fn expand_stmt(extsbox: @mut SyntaxEnv,
fmt!("expected macro name without module \
separators"));
}
let extname = cx.parse_sess().interner.get(pth.idents[0]);
let (fully_expanded, sp) = match (*extsbox).find(&extname) {
let extname = &pth.idents[0];
let extnamestr = ident_to_str(extname);
let (fully_expanded, sp) = match (*extsbox).find(&extname.name) {
None =>
cx.span_fatal(pth.span, fmt!("macro undefined: '%s'", *extname)),
cx.span_fatal(pth.span, fmt!("macro undefined: '%s'", *extnamestr)),
Some(@SE(NormalTT(
SyntaxExpanderTT{expander: exp, span: exp_sp}))) => {
cx.bt_push(ExpandedFrom(CallInfo {
call_site: sp,
callee: NameAndSpan { name: copy *extname, span: exp_sp }
callee: NameAndSpan { name: copy *extnamestr, span: exp_sp }
}));
let expanded = match exp(cx, mac.span, tts) {
MRExpr(e) =>
@ -339,7 +333,7 @@ pub fn expand_stmt(extsbox: @mut SyntaxEnv,
MRAny(_,_,stmt_mkr) => stmt_mkr(),
_ => cx.span_fatal(
pth.span,
fmt!("non-stmt macro in stmt pos: %s", *extname))
fmt!("non-stmt macro in stmt pos: %s", *extnamestr))
};
//keep going, outside-in
@ -360,7 +354,7 @@ pub fn expand_stmt(extsbox: @mut SyntaxEnv,
_ => {
cx.span_fatal(pth.span,
fmt!("'%s' is not a tt-style macro", *extname))
fmt!("'%s' is not a tt-style macro", *extnamestr))
}
};
@ -371,7 +365,33 @@ pub fn expand_stmt(extsbox: @mut SyntaxEnv,
}
// return a visitor that extracts the pat_ident paths
// from a given pattern and puts them in a mutable
// array (passed in to the traversal)
pub fn new_name_finder() -> @Visitor<@mut ~[ast::ident]> {
let default_visitor = visit::default_visitor();
@Visitor{
visit_pat : |p:@ast::pat,ident_accum:@mut ~[ast::ident],v:visit::vt<@mut ~[ast::ident]>| {
match *p {
// we found a pat_ident!
ast::pat{id:_, node: ast::pat_ident(_,path,ref inner), span:_} => {
match path {
// a path of length one:
@ast::Path{global: false,idents: [id], span:_,rp:_,types:_} =>
ident_accum.push(id),
// I believe these must be enums...
_ => ()
}
// visit optional subpattern of pat_ident:
for inner.each |subpat: &@ast::pat| { (v.visit_pat)(*subpat, ident_accum, v) }
}
// use the default traversal for non-pat_idents
_ => visit::visit_pat(p,ident_accum,v)
}
},
.. *default_visitor
}
}
pub fn expand_block(extsbox: @mut SyntaxEnv,
cx: @ExtCtxt,
@ -380,21 +400,49 @@ pub fn expand_block(extsbox: @mut SyntaxEnv,
fld: @ast_fold,
orig: @fn(&blk_, span, @ast_fold) -> (blk_, span))
-> (blk_, span) {
match (*extsbox).find(&@~" block") {
// no scope limit on macros in this block, no need
// to push an exts frame:
Some(@ScopeMacros(false)) => {
orig (blk,sp,fld)
},
// this block should limit the scope of its macros:
Some(@ScopeMacros(true)) => {
// see note below about treatment of exts table
with_exts_frame!(extsbox,orig(blk,sp,fld))
},
_ => cx.span_bug(sp, "expected ScopeMacros binding for \" block\"")
// see note below about treatment of exts table
with_exts_frame!(extsbox,false,orig(blk,sp,fld))
}
// get the (innermost) BlockInfo from an exts stack
fn get_block_info(exts : SyntaxEnv) -> BlockInfo {
match exts.find_in_topmost_frame(&intern(special_block_name)) {
Some(@BlockInfo(bi)) => bi,
_ => fail!(fmt!("special identifier %? was bound to a non-BlockInfo",
@~" block"))
}
}
// given a mutable list of renames, return a tree-folder that applies those
// renames.
fn renames_to_fold(renames : @mut ~[(ast::ident,ast::Name)]) -> @ast_fold {
let afp = default_ast_fold();
let f_pre = @AstFoldFns {
fold_ident: |id,_| {
// the individual elements are memoized... it would
// also be possible to memoize on the whole list at once.
let new_ctxt = renames.foldl(id.ctxt,|ctxt,&(from,to)| {
new_rename(from,to,*ctxt)
});
ast::ident{name:id.name,ctxt:new_ctxt}
},
.. *afp
};
make_fold(f_pre)
}
// perform a bunch of renames
fn apply_pending_renames(folder : @ast_fold, stmt : ast::stmt) -> @ast::stmt {
match folder.fold_stmt(&stmt) {
Some(s) => s,
None => fail!(fmt!("renaming of stmt produced None"))
}
}
pub fn new_span(cx: @ExtCtxt, sp: span) -> span {
/* this discards information in the case of macro-defining macros */
return span {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()};
@ -663,35 +711,33 @@ pub fn fun_to_ident_folder(f: @fn(ast::ident)->ast::ident) -> @ast_fold{
// update the ctxts in a path to get a rename node
pub fn new_ident_renamer(from: ast::ident,
to: ast::Name,
table: @mut SCTable) ->
to: ast::Name) ->
@fn(ast::ident)->ast::ident {
|id : ast::ident|
ast::ident{
repr: id.repr,
ctxt: new_rename(from,to,id.ctxt,table)
name: id.name,
ctxt: new_rename(from,to,id.ctxt)
}
}
// update the ctxts in a path to get a mark node
pub fn new_ident_marker(mark: uint,
table: @mut SCTable) ->
pub fn new_ident_marker(mark: uint) ->
@fn(ast::ident)->ast::ident {
|id : ast::ident|
ast::ident{
repr: id.repr,
ctxt: new_mark(mark,id.ctxt,table)
name: id.name,
ctxt: new_mark(mark,id.ctxt)
}
}
// perform resolution (in the MTWT sense) on all of the
// idents in the tree. This is the final step in expansion.
pub fn new_ident_resolver(table: @mut SCTable) ->
pub fn new_ident_resolver() ->
@fn(ast::ident)->ast::ident {
|id : ast::ident|
ast::ident {
repr : resolve(id,table),
name : resolve(id),
ctxt : illegal_ctxt
}
}
@ -702,13 +748,17 @@ mod test {
use super::*;
use ast;
use ast::{attribute_, attr_outer, meta_word, empty_ctxt};
use ast_util::{new_sctable};
use ast_util::{get_sctable};
use codemap;
use codemap::spanned;
use parse;
use parse::token::{gensym, intern, get_ident_interner};
use print::pprust;
use util::parser_testing::{string_to_item, string_to_pat, strs_to_idents};
use visit::{mk_vt,Visitor};
use core::io;
use core::option::{None, Some};
use util::parser_testing::{string_to_item_and_sess};
// make sure that fail! is present
#[test] fn fail_exists_test () {
@ -812,20 +862,33 @@ mod test {
#[test]
fn renaming () {
let (maybe_item_ast,sess) = string_to_item_and_sess(@~"fn a() -> int { let b = 13; b} ");
let maybe_item_ast = string_to_item(@~"fn a() -> int { let b = 13; b }");
let item_ast = match maybe_item_ast {
Some(x) => x,
None => fail!("test case fail")
};
let table = @mut new_sctable();
let a_name = 100; // enforced by testing_interner
let a2_name = sess.interner.gensym("a2").repr;
let renamer = new_ident_renamer(ast::ident{repr:a_name,ctxt:empty_ctxt},
a2_name,table);
let a_name = intern("a");
let a2_name = intern("a2");
let renamer = new_ident_renamer(ast::ident{name:a_name,ctxt:empty_ctxt},
a2_name);
let renamed_ast = fun_to_ident_folder(renamer).fold_item(item_ast).get();
let resolver = new_ident_resolver(table);
let resolver = new_ident_resolver();
let resolved_ast = fun_to_ident_folder(resolver).fold_item(renamed_ast).get();
io::print(fmt!("ast: %?\n",resolved_ast))
let resolved_as_str = pprust::item_to_str(resolved_ast,
get_ident_interner());
assert_eq!(resolved_as_str,~"fn a2() -> int { let b = 13; b }");
}
// sigh... it looks like I have two different renaming mechanisms, now...
#[test]
fn pat_idents(){
let pat = string_to_pat(@~"(a,Foo{x:c @ (b,9),y:Bar(4,d)})");
let pat_idents = new_name_finder();
let idents = @mut ~[];
((*pat_idents).visit_pat)(pat,idents, mk_vt(pat_idents));
assert_eq!(idents,@mut strs_to_idents(~["a","c","b","d"]));
}
}

View File

@ -25,6 +25,7 @@ use ext::build::AstBuilder;
use core::option;
use core::unstable::extfmt::ct::*;
use core::vec;
use parse::token::{str_to_ident};
pub fn expand_syntax_ext(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
-> base::MacResult {
@ -53,12 +54,11 @@ fn pieces_to_expr(cx: @ExtCtxt, sp: span,
pieces: ~[Piece], args: ~[@ast::expr])
-> @ast::expr {
fn make_path_vec(cx: @ExtCtxt, ident: &str) -> ~[ast::ident] {
let intr = cx.parse_sess().interner;
return ~[intr.intern("std"),
intr.intern("unstable"),
intr.intern("extfmt"),
intr.intern("rt"),
intr.intern(ident)];
return ~[str_to_ident("std"),
str_to_ident("unstable"),
str_to_ident("extfmt"),
str_to_ident("rt"),
str_to_ident(ident)];
}
fn make_rt_path_expr(cx: @ExtCtxt, sp: span, nm: &str) -> @ast::expr {
let path = make_path_vec(cx, nm);
@ -112,15 +112,14 @@ fn pieces_to_expr(cx: @ExtCtxt, sp: span,
fn make_conv_struct(cx: @ExtCtxt, sp: span, flags_expr: @ast::expr,
width_expr: @ast::expr, precision_expr: @ast::expr,
ty_expr: @ast::expr) -> @ast::expr {
let intr = cx.parse_sess().interner;
cx.expr_struct(
sp,
cx.path_global(sp, make_path_vec(cx, "Conv")),
~[
cx.field_imm(sp, intr.intern("flags"), flags_expr),
cx.field_imm(sp, intr.intern("width"), width_expr),
cx.field_imm(sp, intr.intern("precision"), precision_expr),
cx.field_imm(sp, intr.intern("ty"), ty_expr)
cx.field_imm(sp, str_to_ident("flags"), flags_expr),
cx.field_imm(sp, str_to_ident("width"), width_expr),
cx.field_imm(sp, str_to_ident("precision"), precision_expr),
cx.field_imm(sp, str_to_ident("ty"), ty_expr)
]
)
}
@ -255,11 +254,11 @@ fn pieces_to_expr(cx: @ExtCtxt, sp: span,
let nargs = args.len();
/* 'ident' is the local buffer building up the result of fmt! */
let ident = cx.parse_sess().interner.intern("__fmtbuf");
let ident = str_to_ident("__fmtbuf");
let buf = || cx.expr_ident(fmt_sp, ident);
let core_ident = cx.parse_sess().interner.intern("std");
let str_ident = cx.parse_sess().interner.intern("str");
let push_ident = cx.parse_sess().interner.intern("push_str");
let core_ident = str_to_ident("std");
let str_ident = str_to_ident("str");
let push_ident = str_to_ident("push_str");
let mut stms = ~[];
/* Translate each piece (portion of the fmt expression) by invoking the

View File

@ -15,6 +15,7 @@ use codemap;
use ext::base::*;
use ext::base;
use print;
use parse::token::{get_ident_interner};
use core::io;
use core::vec;
@ -28,7 +29,7 @@ pub fn expand_syntax_ext(cx: @ExtCtxt,
io::stdout().write_line(
print::pprust::tt_to_str(
ast::tt_delim(vec::to_owned(tt)),
cx.parse_sess().interner));
get_ident_interner()));
//trivial expression
MRExpr(@ast::expr {

View File

@ -52,7 +52,6 @@ use ext::pipes::pipec::gen_init;
use ext::pipes::proto::visit;
use parse::lexer::{new_tt_reader, reader};
use parse::parser::Parser;
use core::option::None;
pub mod ast_builder;
@ -68,7 +67,6 @@ pub fn expand_proto(cx: @ExtCtxt, _sp: span, id: ast::ident,
let sess = cx.parse_sess();
let cfg = cx.cfg();
let tt_rdr = new_tt_reader(copy cx.parse_sess().span_diagnostic,
cx.parse_sess().interner,
None,
copy tt);
let rdr = tt_rdr as @reader;

View File

@ -17,6 +17,7 @@ use ext::pipes::proto::*;
use parse::common::SeqSep;
use parse::parser;
use parse::token;
use parse::token::{interner_get};
pub trait proto_parser {
fn parse_proto(&self, id: ~str) -> protocol;
@ -42,11 +43,11 @@ impl proto_parser for parser::Parser {
fn parse_state(&self, proto: protocol) {
let id = self.parse_ident();
let name = copy *self.interner.get(id);
let name = copy *interner_get(id.name);
self.expect(&token::COLON);
let dir = match copy *self.token {
token::IDENT(n, _) => self.interner.get(n),
token::IDENT(n, _) => interner_get(n.name),
_ => fail!()
};
self.bump();
@ -77,7 +78,7 @@ impl proto_parser for parser::Parser {
}
fn parse_message(&self, state: state) {
let mname = copy *self.interner.get(self.parse_ident());
let mname = copy *interner_get(self.parse_ident().name);
let args = if *self.token == token::LPAREN {
self.parse_unspanned_seq(
@ -96,7 +97,7 @@ impl proto_parser for parser::Parser {
let next = match *self.token {
token::IDENT(_, _) => {
let name = copy *self.interner.get(self.parse_ident());
let name = copy *interner_get(self.parse_ident().name);
let ntys = if *self.token == token::LT {
self.parse_unspanned_seq(
&token::LT,

View File

@ -325,10 +325,10 @@ impl gen_init for protocol {
cx.parse_item(fmt!("pub fn init%s() -> (server::%s, client::%s)\
{ pub use std::pipes::HasBuffer; %s }",
start_state.generics.to_source(cx),
start_state.to_ty(cx).to_source(cx),
start_state.to_ty(cx).to_source(cx),
body.to_source(cx)))
start_state.generics.to_source(),
start_state.to_ty(cx).to_source(),
start_state.to_ty(cx).to_source(),
body.to_source()))
}
fn gen_buffer_init(&self, ext_cx: @ExtCtxt) -> @ast::expr {

View File

@ -63,11 +63,11 @@ pub mod rt {
trait ToSource : ToTokens {
// Takes a thing and generates a string containing rust code for it.
pub fn to_source(cx: @ExtCtxt) -> ~str;
pub fn to_source() -> ~str;
// If you can make source, you can definitely make tokens.
pub fn to_tokens(cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
@ -75,80 +75,80 @@ pub mod rt {
pub trait ToSource {
// Takes a thing and generates a string containing rust code for it.
pub fn to_source(&self, cx: @ExtCtxt) -> ~str;
pub fn to_source(&self) -> ~str;
}
impl ToSource for ast::ident {
fn to_source(&self, cx: @ExtCtxt) -> ~str {
copy *cx.parse_sess().interner.get(*self)
fn to_source(&self) -> ~str {
copy *ident_to_str(self)
}
}
impl ToSource for @ast::item {
fn to_source(&self, cx: @ExtCtxt) -> ~str {
item_to_str(*self, cx.parse_sess().interner)
fn to_source(&self) -> ~str {
item_to_str(*self, get_ident_interner())
}
}
impl<'self> ToSource for &'self [@ast::item] {
fn to_source(&self, cx: @ExtCtxt) -> ~str {
str::connect(self.map(|i| i.to_source(cx)), "\n\n")
fn to_source(&self) -> ~str {
str::connect(self.map(|i| i.to_source()), ~"\n\n")
}
}
impl ToSource for @ast::Ty {
fn to_source(&self, cx: @ExtCtxt) -> ~str {
ty_to_str(*self, cx.parse_sess().interner)
fn to_source(&self) -> ~str {
ty_to_str(*self, get_ident_interner())
}
}
impl<'self> ToSource for &'self [@ast::Ty] {
fn to_source(&self, cx: @ExtCtxt) -> ~str {
str::connect(self.map(|i| i.to_source(cx)), ", ")
fn to_source(&self) -> ~str {
str::connect(self.map(|i| i.to_source()), ~", ")
}
}
impl ToSource for Generics {
fn to_source(&self, cx: @ExtCtxt) -> ~str {
pprust::generics_to_str(self, cx.parse_sess().interner)
fn to_source(&self) -> ~str {
pprust::generics_to_str(self, get_ident_interner())
}
}
impl ToSource for @ast::expr {
fn to_source(&self, cx: @ExtCtxt) -> ~str {
pprust::expr_to_str(*self, cx.parse_sess().interner)
fn to_source(&self) -> ~str {
pprust::expr_to_str(*self, get_ident_interner())
}
}
impl ToSource for ast::blk {
fn to_source(&self, cx: @ExtCtxt) -> ~str {
pprust::block_to_str(self, cx.parse_sess().interner)
fn to_source(&self) -> ~str {
pprust::block_to_str(self, get_ident_interner())
}
}
impl<'self> ToSource for &'self str {
fn to_source(&self, _cx: @ExtCtxt) -> ~str {
fn to_source(&self) -> ~str {
let lit = dummy_spanned(ast::lit_str(@str::to_owned(*self)));
pprust::lit_to_str(@lit)
}
}
impl ToSource for int {
fn to_source(&self, _cx: @ExtCtxt) -> ~str {
fn to_source(&self) -> ~str {
let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i));
pprust::lit_to_str(@lit)
}
}
impl ToSource for i8 {
fn to_source(&self, _cx: @ExtCtxt) -> ~str {
fn to_source(&self) -> ~str {
let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i8));
pprust::lit_to_str(@lit)
}
}
impl ToSource for i16 {
fn to_source(&self, _cx: @ExtCtxt) -> ~str {
fn to_source(&self) -> ~str {
let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i16));
pprust::lit_to_str(@lit)
}
@ -156,49 +156,49 @@ pub mod rt {
impl ToSource for i32 {
fn to_source(&self, _cx: @ExtCtxt) -> ~str {
fn to_source(&self) -> ~str {
let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i32));
pprust::lit_to_str(@lit)
}
}
impl ToSource for i64 {
fn to_source(&self, _cx: @ExtCtxt) -> ~str {
fn to_source(&self) -> ~str {
let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i64));
pprust::lit_to_str(@lit)
}
}
impl ToSource for uint {
fn to_source(&self, _cx: @ExtCtxt) -> ~str {
fn to_source(&self) -> ~str {
let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u));
pprust::lit_to_str(@lit)
}
}
impl ToSource for u8 {
fn to_source(&self, _cx: @ExtCtxt) -> ~str {
fn to_source(&self) -> ~str {
let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u8));
pprust::lit_to_str(@lit)
}
}
impl ToSource for u16 {
fn to_source(&self, _cx: @ExtCtxt) -> ~str {
fn to_source(&self) -> ~str {
let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u16));
pprust::lit_to_str(@lit)
}
}
impl ToSource for u32 {
fn to_source(&self, _cx: @ExtCtxt) -> ~str {
fn to_source(&self) -> ~str {
let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u32));
pprust::lit_to_str(@lit)
}
}
impl ToSource for u64 {
fn to_source(&self, _cx: @ExtCtxt) -> ~str {
fn to_source(&self) -> ~str {
let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u64));
pprust::lit_to_str(@lit)
}
@ -208,115 +208,115 @@ pub mod rt {
impl ToTokens for ast::ident {
fn to_tokens(&self, cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
impl ToTokens for @ast::item {
fn to_tokens(&self, cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
impl<'self> ToTokens for &'self [@ast::item] {
fn to_tokens(&self, cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
impl ToTokens for @ast::Ty {
fn to_tokens(&self, cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
impl<'self> ToTokens for &'self [@ast::Ty] {
fn to_tokens(&self, cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
impl ToTokens for Generics {
fn to_tokens(&self, cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
impl ToTokens for @ast::expr {
fn to_tokens(&self, cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
impl ToTokens for ast::blk {
fn to_tokens(&self, cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
impl<'self> ToTokens for &'self str {
fn to_tokens(&self, cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
impl ToTokens for int {
fn to_tokens(&self, cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
impl ToTokens for i8 {
fn to_tokens(&self, cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
impl ToTokens for i16 {
fn to_tokens(&self, cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
impl ToTokens for i32 {
fn to_tokens(&self, cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
impl ToTokens for i64 {
fn to_tokens(&self, cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
impl ToTokens for uint {
fn to_tokens(&self, cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
impl ToTokens for u8 {
fn to_tokens(&self, cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
impl ToTokens for u16 {
fn to_tokens(&self, cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
impl ToTokens for u32 {
fn to_tokens(&self, cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
impl ToTokens for u64 {
fn to_tokens(&self, cx: @ExtCtxt) -> ~[token_tree] {
cx.parse_tts(self.to_source(cx))
cx.parse_tts(self.to_source())
}
}
@ -417,25 +417,25 @@ pub fn expand_quote_stmt(cx: @ExtCtxt,
~[e_attrs], tts))
}
fn ids_ext(cx: @ExtCtxt, strs: ~[~str]) -> ~[ast::ident] {
strs.map(|str| cx.parse_sess().interner.intern(*str))
fn ids_ext(strs: ~[~str]) -> ~[ast::ident] {
strs.map(|str| str_to_ident(*str))
}
fn id_ext(cx: @ExtCtxt, str: &str) -> ast::ident {
cx.parse_sess().interner.intern(str)
fn id_ext(str: &str) -> ast::ident {
str_to_ident(str)
}
// Lift an ident to the expr that evaluates to that ident.
fn mk_ident(cx: @ExtCtxt, sp: span, ident: ast::ident) -> @ast::expr {
let e_str = cx.expr_str(sp, cx.str_of(ident));
cx.expr_method_call(sp,
cx.expr_ident(sp, id_ext(cx, "ext_cx")),
id_ext(cx, "ident_of"),
cx.expr_ident(sp, id_ext("ext_cx")),
id_ext("ident_of"),
~[e_str])
}
fn mk_bytepos(cx: @ExtCtxt, sp: span, bpos: BytePos) -> @ast::expr {
let path = id_ext(cx, "BytePos");
let path = id_ext("BytePos");
let arg = cx.expr_uint(sp, bpos.to_uint());
cx.expr_call_ident(sp, path, ~[arg])
}
@ -453,7 +453,7 @@ fn mk_binop(cx: @ExtCtxt, sp: span, bop: token::binop) -> @ast::expr {
SHL => "SHL",
SHR => "SHR"
};
cx.expr_ident(sp, id_ext(cx, name))
cx.expr_ident(sp, id_ext(name))
}
fn mk_token(cx: @ExtCtxt, sp: span, tok: &token::Token) -> @ast::expr {
@ -461,12 +461,12 @@ fn mk_token(cx: @ExtCtxt, sp: span, tok: &token::Token) -> @ast::expr {
match *tok {
BINOP(binop) => {
return cx.expr_call_ident(sp,
id_ext(cx, "BINOP"),
id_ext("BINOP"),
~[mk_binop(cx, sp, binop)]);
}
BINOPEQ(binop) => {
return cx.expr_call_ident(sp,
id_ext(cx, "BINOPEQ"),
id_ext("BINOPEQ"),
~[mk_binop(cx, sp, binop)]);
}
@ -479,12 +479,12 @@ fn mk_token(cx: @ExtCtxt, sp: span, tok: &token::Token) -> @ast::expr {
ast::ty_i32 => ~"ty_i32",
ast::ty_i64 => ~"ty_i64"
};
let e_ity = cx.expr_ident(sp, id_ext(cx, s_ity));
let e_ity = cx.expr_ident(sp, id_ext(s_ity));
let e_i64 = cx.expr_lit(sp, ast::lit_int(i, ast::ty_i64));
return cx.expr_call_ident(sp,
id_ext(cx, "LIT_INT"),
id_ext("LIT_INT"),
~[e_i64, e_ity]);
}
@ -496,12 +496,12 @@ fn mk_token(cx: @ExtCtxt, sp: span, tok: &token::Token) -> @ast::expr {
ast::ty_u32 => ~"ty_u32",
ast::ty_u64 => ~"ty_u64"
};
let e_uty = cx.expr_ident(sp, id_ext(cx, s_uty));
let e_uty = cx.expr_ident(sp, id_ext(s_uty));
let e_u64 = cx.expr_lit(sp, ast::lit_uint(u, ast::ty_u64));
return cx.expr_call_ident(sp,
id_ext(cx, "LIT_UINT"),
id_ext("LIT_UINT"),
~[e_u64, e_uty]);
}
@ -509,7 +509,7 @@ fn mk_token(cx: @ExtCtxt, sp: span, tok: &token::Token) -> @ast::expr {
let e_i64 = cx.expr_lit(sp, ast::lit_int(i, ast::ty_i64));
return cx.expr_call_ident(sp,
id_ext(cx, "LIT_INT_UNSUFFIXED"),
id_ext("LIT_INT_UNSUFFIXED"),
~[e_i64]);
}
@ -519,37 +519,37 @@ fn mk_token(cx: @ExtCtxt, sp: span, tok: &token::Token) -> @ast::expr {
ast::ty_f32 => ~"ty_f32",
ast::ty_f64 => ~"ty_f64"
};
let e_fty = cx.expr_ident(sp, id_ext(cx, s_fty));
let e_fty = cx.expr_ident(sp, id_ext(s_fty));
let e_fident = mk_ident(cx, sp, fident);
return cx.expr_call_ident(sp,
id_ext(cx, "LIT_FLOAT"),
id_ext("LIT_FLOAT"),
~[e_fident, e_fty]);
}
LIT_STR(ident) => {
return cx.expr_call_ident(sp,
id_ext(cx, "LIT_STR"),
id_ext("LIT_STR"),
~[mk_ident(cx, sp, ident)]);
}
IDENT(ident, b) => {
return cx.expr_call_ident(sp,
id_ext(cx, "IDENT"),
id_ext("IDENT"),
~[mk_ident(cx, sp, ident),
cx.expr_bool(sp, b)]);
}
LIFETIME(ident) => {
return cx.expr_call_ident(sp,
id_ext(cx, "LIFETIME"),
id_ext("LIFETIME"),
~[mk_ident(cx, sp, ident)]);
}
DOC_COMMENT(ident) => {
return cx.expr_call_ident(sp,
id_ext(cx, "DOC_COMMENT"),
id_ext("DOC_COMMENT"),
~[mk_ident(cx, sp, ident)]);
}
@ -593,7 +593,7 @@ fn mk_token(cx: @ExtCtxt, sp: span, tok: &token::Token) -> @ast::expr {
EOF => "EOF",
_ => fail!()
};
cx.expr_ident(sp, id_ext(cx, name))
cx.expr_ident(sp, id_ext(name))
}
@ -603,14 +603,14 @@ fn mk_tt(cx: @ExtCtxt, sp: span, tt: &ast::token_tree)
match *tt {
ast::tt_tok(sp, ref tok) => {
let e_sp = cx.expr_ident(sp, id_ext(cx, "sp"));
let e_sp = cx.expr_ident(sp, id_ext("sp"));
let e_tok = cx.expr_call_ident(sp,
id_ext(cx, "tt_tok"),
id_ext("tt_tok"),
~[e_sp, mk_token(cx, sp, tok)]);
let e_push =
cx.expr_method_call(sp,
cx.expr_ident(sp, id_ext(cx, "tt")),
id_ext(cx, "push"),
cx.expr_ident(sp, id_ext("tt")),
id_ext("push"),
~[e_tok]);
~[cx.stmt_expr(e_push)]
}
@ -625,13 +625,13 @@ fn mk_tt(cx: @ExtCtxt, sp: span, tt: &ast::token_tree)
let e_to_toks =
cx.expr_method_call(sp,
cx.expr_ident(sp, ident),
id_ext(cx, "to_tokens"),
~[cx.expr_ident(sp, id_ext(cx, "ext_cx"))]);
id_ext("to_tokens"),
~[cx.expr_ident(sp, id_ext("ext_cx"))]);
let e_push =
cx.expr_method_call(sp,
cx.expr_ident(sp, id_ext(cx, "tt")),
id_ext(cx, "push_all_move"),
cx.expr_ident(sp, id_ext("tt")),
id_ext("push_all_move"),
~[e_to_toks]);
~[cx.stmt_expr(e_push)]
@ -671,10 +671,10 @@ fn expand_tts(cx: @ExtCtxt,
// import the runtime module, followed by a tt-building expression.
let uses = ~[ cx.view_use_glob(sp, ast::public,
ids_ext(cx, ~[~"syntax",
~"ext",
~"quote",
~"rt"])) ];
ids_ext(~[~"syntax",
~"ext",
~"quote",
~"rt"])) ];
// We also bind a single value, sp, to ext_cx.call_site()
//
@ -703,23 +703,23 @@ fn expand_tts(cx: @ExtCtxt,
// worth the hassle.
let e_sp = cx.expr_method_call(sp,
cx.expr_ident(sp, id_ext(cx, "ext_cx")),
id_ext(cx, "call_site"),
cx.expr_ident(sp, id_ext("ext_cx")),
id_ext("call_site"),
~[]);
let stmt_let_sp = cx.stmt_let(sp, false,
id_ext(cx, "sp"),
id_ext("sp"),
e_sp);
let stmt_let_tt = cx.stmt_let(sp, true,
id_ext(cx, "tt"),
id_ext("tt"),
cx.expr_vec_uniq(sp, ~[]));
cx.expr_blk(
cx.blk_all(sp, uses,
~[stmt_let_sp,
stmt_let_tt] + mk_tts(cx, sp, tts),
Some(cx.expr_ident(sp, id_ext(cx, "tt")))))
Some(cx.expr_ident(sp, id_ext("tt")))))
}
fn expand_parse_call(cx: @ExtCtxt,
@ -730,25 +730,25 @@ fn expand_parse_call(cx: @ExtCtxt,
let tts_expr = expand_tts(cx, sp, tts);
let cfg_call = || cx.expr_method_call(
sp, cx.expr_ident(sp, id_ext(cx, "ext_cx")),
id_ext(cx, "cfg"), ~[]);
sp, cx.expr_ident(sp, id_ext("ext_cx")),
id_ext("cfg"), ~[]);
let parse_sess_call = || cx.expr_method_call(
sp, cx.expr_ident(sp, id_ext(cx, "ext_cx")),
id_ext(cx, "parse_sess"), ~[]);
sp, cx.expr_ident(sp, id_ext("ext_cx")),
id_ext("parse_sess"), ~[]);
let new_parser_call =
cx.expr_call_global(sp,
ids_ext(cx, ~[~"syntax",
~"ext",
~"quote",
~"rt",
~"new_parser_from_tts"]),
ids_ext(~[~"syntax",
~"ext",
~"quote",
~"rt",
~"new_parser_from_tts"]),
~[parse_sess_call(),
cfg_call(),
tts_expr]);
cx.expr_method_call(sp, new_parser_call,
id_ext(cx, parse_method),
id_ext(parse_method),
arg_exprs)
}

View File

@ -18,6 +18,7 @@ use ext::base::*;
use ext::base;
use ext::build::AstBuilder;
use parse;
use parse::token::{get_ident_interner};
use print::pprust;
use core::io;
@ -65,7 +66,7 @@ pub fn expand_file(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
pub fn expand_stringify(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
-> base::MacResult {
let s = pprust::tts_to_str(tts, cx.parse_sess().interner);
let s = pprust::tts_to_str(tts, get_ident_interner());
base::MRExpr(cx.expr_str(sp, s))
}

View File

@ -28,7 +28,6 @@ pub fn expand_trace_macros(cx: @ExtCtxt,
let cfg = cx.cfg();
let tt_rdr = new_tt_reader(
copy cx.parse_sess().span_diagnostic,
cx.parse_sess().interner,
None,
vec::to_owned(tt)
);

View File

@ -19,7 +19,7 @@ use codemap;
use parse::lexer::*; //resolve bug?
use parse::ParseSess;
use parse::parser::Parser;
use parse::token::{Token, EOF, to_str, nonterminal};
use parse::token::{Token, EOF, to_str, nonterminal, get_ident_interner, ident_to_str};
use parse::token;
use core::hashmap::HashMap;
@ -201,13 +201,13 @@ pub fn nameize(p_s: @mut ParseSess, ms: &[matcher], res: &[@named_match])
};
}
codemap::spanned {
node: match_nonterminal(bind_name, _, idx), span: sp
node: match_nonterminal(ref bind_name, _, idx), span: sp
} => {
if ret_val.contains_key(&bind_name) {
if ret_val.contains_key(bind_name) {
p_s.span_diagnostic.span_fatal(sp, ~"Duplicated bind name: "+
*p_s.interner.get(bind_name))
*ident_to_str(bind_name))
}
ret_val.insert(bind_name, res[idx]);
ret_val.insert(*bind_name, res[idx]);
}
}
}
@ -372,9 +372,9 @@ pub fn parse(
|| bb_eis.len() > 1u {
let nts = str::connect(vec::map(bb_eis, |ei| {
match ei.elts[ei.idx].node {
match_nonterminal(bind,name,_) => {
fmt!("%s ('%s')", *sess.interner.get(name),
*sess.interner.get(bind))
match_nonterminal(ref bind,ref name,_) => {
fmt!("%s ('%s')", *ident_to_str(name),
*ident_to_str(bind))
}
_ => fail!()
} }), " or ");
@ -384,7 +384,7 @@ pub fn parse(
nts, next_eis.len()));
} else if (bb_eis.len() == 0u && next_eis.len() == 0u) {
return failure(sp, ~"No rules expected the token: "
+ to_str(rdr.interner(), &tok));
+ to_str(get_ident_interner(), &tok));
} else if (next_eis.len() > 0u) {
/* Now process the next token */
while(next_eis.len() > 0u) {
@ -396,9 +396,9 @@ pub fn parse(
let mut ei = bb_eis.pop();
match ei.elts[ei.idx].node {
match_nonterminal(_, name, idx) => {
match_nonterminal(_, ref name, idx) => {
ei.matches[idx].push(@matched_nonterminal(
parse_nt(&rust_parser, *sess.interner.get(name))));
parse_nt(&rust_parser, *ident_to_str(name))));
ei.idx += 1u;
}
_ => fail!()
@ -430,7 +430,7 @@ pub fn parse_nt(p: &Parser, name: &str) -> nonterminal {
"ident" => match *p.token {
token::IDENT(sn,b) => { p.bump(); token::nt_ident(sn,b) }
_ => p.fatal(~"expected ident, found "
+ token::to_str(p.reader.interner(), &copy *p.token))
+ token::to_str(get_ident_interner(), &copy *p.token))
},
"path" => token::nt_path(p.parse_path_with_tps(false)),
"tt" => {

View File

@ -21,7 +21,7 @@ use ext::tt::macro_parser::{named_match, matched_seq, matched_nonterminal};
use ext::tt::macro_parser::{parse, parse_or_else, success, failure};
use parse::lexer::{new_tt_reader, reader};
use parse::parser::Parser;
use parse::token::special_idents;
use parse::token::{get_ident_interner, special_idents, gensym_ident, ident_to_str};
use parse::token::{FAT_ARROW, SEMI, nt_matchers, nt_tt};
use print;
@ -38,8 +38,8 @@ pub fn add_new_extension(cx: @ExtCtxt,
spanned { node: copy m, span: dummy_sp() }
}
let lhs_nm = cx.parse_sess().interner.gensym("lhs");
let rhs_nm = cx.parse_sess().interner.gensym("rhs");
let lhs_nm = gensym_ident("lhs");
let rhs_nm = gensym_ident("rhs");
// The grammar for macro_rules! is:
// $( $lhs:mtcs => $rhs:tt );+
@ -57,7 +57,7 @@ pub fn add_new_extension(cx: @ExtCtxt,
// Parse the macro_rules! invocation (`none` is for no interpolations):
let arg_reader = new_tt_reader(copy cx.parse_sess().span_diagnostic,
cx.parse_sess().interner, None, copy arg);
None, copy arg);
let argument_map = parse_or_else(cx.parse_sess(),
cx.cfg(),
arg_reader as @reader,
@ -85,7 +85,7 @@ pub fn add_new_extension(cx: @ExtCtxt,
cx.str_of(name),
print::pprust::tt_to_str(
ast::tt_delim(vec::to_owned(arg)),
cx.parse_sess().interner)));
get_ident_interner())));
}
// Which arm's failure should we report? (the one furthest along)
@ -93,7 +93,6 @@ pub fn add_new_extension(cx: @ExtCtxt,
let mut best_fail_msg = ~"internal error: ran no matchers";
let s_d = cx.parse_sess().span_diagnostic;
let itr = cx.parse_sess().interner;
for lhses.eachi |i, lhs| { // try each arm's matchers
match *lhs {
@ -101,7 +100,6 @@ pub fn add_new_extension(cx: @ExtCtxt,
// `none` is because we're not interpolating
let arg_rdr = new_tt_reader(
s_d,
itr,
None,
vec::to_owned(arg)
) as @reader;
@ -122,7 +120,7 @@ pub fn add_new_extension(cx: @ExtCtxt,
_ => cx.span_bug(sp, "bad thing in rhs")
};
// rhs has holes ( `$id` and `$(...)` that need filled)
let trncbr = new_tt_reader(s_d, itr, Some(named_matches),
let trncbr = new_tt_reader(s_d, Some(named_matches),
rhs);
let p = @Parser(cx.parse_sess(),
cx.cfg(),
@ -151,7 +149,7 @@ pub fn add_new_extension(cx: @ExtCtxt,
|cx, sp, arg| generic_extension(cx, sp, name, arg, *lhses, *rhses);
return MRDef(MacroDef{
name: copy *cx.parse_sess().interner.get(name),
name: copy *ident_to_str(&name),
ext: NormalTT(base::SyntaxExpanderTT{expander: exp, span: Some(sp)})
});
}

View File

@ -15,7 +15,8 @@ use ast::{token_tree, tt_delim, tt_tok, tt_seq, tt_nonterminal,ident};
use codemap::{span, dummy_sp};
use diagnostic::span_handler;
use ext::tt::macro_parser::{named_match, matched_seq, matched_nonterminal};
use parse::token::{EOF, INTERPOLATED, IDENT, Token, nt_ident, ident_interner};
use parse::token::{EOF, INTERPOLATED, IDENT, Token, nt_ident};
use parse::token::{ident_to_str};
use parse::lexer::TokenAndSpan;
use core::hashmap::HashMap;
@ -33,7 +34,6 @@ struct TtFrame {
pub struct TtReader {
sp_diag: @span_handler,
interner: @ident_interner,
// the unzipped tree:
stack: @mut TtFrame,
/* for MBE-style macro transcription */
@ -49,13 +49,11 @@ pub struct TtReader {
* `src` contains no `tt_seq`s and `tt_nonterminal`s, `interp` can (and
* should) be none. */
pub fn new_tt_reader(sp_diag: @span_handler,
itr: @ident_interner,
interp: Option<HashMap<ident,@named_match>>,
src: ~[ast::token_tree])
-> @mut TtReader {
let r = @mut TtReader {
sp_diag: sp_diag,
interner: itr,
stack: @mut TtFrame {
forest: @mut src,
idx: 0u,
@ -93,7 +91,6 @@ fn dup_tt_frame(f: @mut TtFrame) -> @mut TtFrame {
pub fn dup_tt_reader(r: @mut TtReader) -> @mut TtReader {
@mut TtReader {
sp_diag: r.sp_diag,
interner: r.interner,
stack: dup_tt_frame(r.stack),
repeat_idx: copy r.repeat_idx,
repeat_len: copy r.repeat_len,
@ -126,7 +123,7 @@ fn lookup_cur_matched(r: &mut TtReader, name: ident) -> @named_match {
Some(s) => lookup_cur_matched_by_matched(r, s),
None => {
r.sp_diag.span_fatal(r.cur_span, fmt!("unknown macro variable `%s`",
*r.interner.get(name)));
*ident_to_str(&name)));
}
}
}
@ -139,13 +136,13 @@ fn lockstep_iter_size(t: &token_tree, r: &mut TtReader) -> lis {
match lhs {
lis_unconstrained => copy rhs,
lis_contradiction(_) => copy lhs,
lis_constraint(l_len, l_id) => match rhs {
lis_constraint(l_len, ref l_id) => match rhs {
lis_unconstrained => copy lhs,
lis_contradiction(_) => copy rhs,
lis_constraint(r_len, _) if l_len == r_len => copy lhs,
lis_constraint(r_len, r_id) => {
let l_n = copy *r.interner.get(l_id);
let r_n = copy *r.interner.get(r_id);
lis_constraint(r_len, ref r_id) => {
let l_n = copy *ident_to_str(l_id);
let r_n = copy *ident_to_str(r_id);
lis_contradiction(fmt!("Inconsistent lockstep iteration: \
'%s' has %u items, but '%s' has %u",
l_n, l_len, r_n, r_len))
@ -295,7 +292,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
r.sp_diag.span_fatal(
copy r.cur_span, /* blame the macro writer */
fmt!("variable '%s' is still repeating at this depth",
*r.interner.get(ident)));
*ident_to_str(&ident)));
}
}
}

View File

@ -18,6 +18,7 @@ use parse::lexer::{StringReader, bump, is_eof, nextch, TokenAndSpan};
use parse::lexer::{is_line_non_doc_comment, is_block_non_doc_comment};
use parse::lexer;
use parse::token;
use parse::token::{get_ident_interner};
use parse;
use core::io;
@ -323,12 +324,9 @@ pub fn gather_comments_and_literals(span_diagnostic:
srdr: @io::Reader)
-> (~[cmnt], ~[lit]) {
let src = @str::from_bytes(srdr.read_whole_stream());
let itr = parse::token::mk_fake_ident_interner();
let cm = CodeMap::new();
let filemap = cm.new_filemap(path, src);
let rdr = lexer::new_low_level_string_reader(span_diagnostic,
filemap,
itr);
let rdr = lexer::new_low_level_string_reader(span_diagnostic, filemap);
let mut comments: ~[cmnt] = ~[];
let mut literals: ~[lit] = ~[];
@ -358,7 +356,7 @@ pub fn gather_comments_and_literals(span_diagnostic:
debug!("tok lit: %s", s);
literals.push(lit {lit: s, pos: sp.lo});
} else {
debug!("tok: %s", token::to_str(rdr.interner, &tok));
debug!("tok: %s", token::to_str(get_ident_interner(), &tok));
}
first_read = false;
}

View File

@ -16,6 +16,7 @@ use parse::lexer::reader;
use parse::parser::Parser;
use parse::token::keywords;
use parse::token;
use parse::token::{get_ident_interner};
use opt_vec;
use opt_vec::OptVec;
@ -49,13 +50,13 @@ pub fn seq_sep_none() -> SeqSep {
// maps any token back to a string. not necessary if you know it's
// an identifier....
pub fn token_to_str(reader: @reader, token: &token::Token) -> ~str {
token::to_str(reader.interner(), token)
token::to_str(get_ident_interner(), token)
}
impl Parser {
// convert a token to a string using self's reader
pub fn token_to_str(&self, token: &token::Token) -> ~str {
token::to_str(self.reader.interner(), token)
token::to_str(get_ident_interner(), token)
}
// convert the current token to a string using self's reader
@ -142,7 +143,7 @@ impl Parser {
// true. Otherwise, return false.
pub fn eat_keyword(&self, kw: keywords::Keyword) -> bool {
let is_kw = match *self.token {
token::IDENT(sid, false) => kw.to_ident().repr == sid.repr,
token::IDENT(sid, false) => kw.to_ident().name == sid.name,
_ => false
};
if is_kw { self.bump() }

View File

@ -17,6 +17,7 @@ use diagnostic::span_handler;
use ext::tt::transcribe::{tt_next_token};
use ext::tt::transcribe::{dup_tt_reader};
use parse::token;
use parse::token::{str_to_ident};
use core::char;
use core::either;
@ -30,7 +31,6 @@ pub trait reader {
fn next_token(@mut self) -> TokenAndSpan;
fn fatal(@mut self, ~str) -> !;
fn span_diag(@mut self) -> @span_handler;
fn interner(@mut self) -> @token::ident_interner;
fn peek(@mut self) -> TokenAndSpan;
fn dup(@mut self) -> @reader;
}
@ -50,25 +50,22 @@ pub struct StringReader {
// The last character to be read
curr: char,
filemap: @codemap::FileMap,
interner: @token::ident_interner,
/* cached: */
peek_tok: token::Token,
peek_span: span
}
pub fn new_string_reader(span_diagnostic: @span_handler,
filemap: @codemap::FileMap,
itr: @token::ident_interner)
filemap: @codemap::FileMap)
-> @mut StringReader {
let r = new_low_level_string_reader(span_diagnostic, filemap, itr);
let r = new_low_level_string_reader(span_diagnostic, filemap);
string_advance_token(r); /* fill in peek_* */
return r;
}
/* For comments.rs, which hackily pokes into 'pos' and 'curr' */
pub fn new_low_level_string_reader(span_diagnostic: @span_handler,
filemap: @codemap::FileMap,
itr: @token::ident_interner)
filemap: @codemap::FileMap)
-> @mut StringReader {
// Force the initial reader bump to start on a fresh line
let initial_char = '\n';
@ -79,7 +76,6 @@ pub fn new_low_level_string_reader(span_diagnostic: @span_handler,
col: CharPos(0),
curr: initial_char,
filemap: filemap,
interner: itr,
/* dummy values; not read */
peek_tok: token::EOF,
peek_span: codemap::dummy_sp()
@ -100,7 +96,6 @@ fn dup_string_reader(r: @mut StringReader) -> @mut StringReader {
col: r.col,
curr: r.curr,
filemap: r.filemap,
interner: r.interner,
peek_tok: copy r.peek_tok,
peek_span: copy r.peek_span
}
@ -121,7 +116,6 @@ impl reader for StringReader {
self.span_diagnostic.span_fatal(copy self.peek_span, m)
}
fn span_diag(@mut self) -> @span_handler { self.span_diagnostic }
fn interner(@mut self) -> @token::ident_interner { self.interner }
fn peek(@mut self) -> TokenAndSpan {
TokenAndSpan {
tok: copy self.peek_tok,
@ -138,7 +132,6 @@ impl reader for TtReader {
self.sp_diag.span_fatal(copy self.cur_span, m);
}
fn span_diag(@mut self) -> @span_handler { self.sp_diag }
fn interner(@mut self) -> @token::ident_interner { self.interner }
fn peek(@mut self) -> TokenAndSpan {
TokenAndSpan {
tok: copy self.cur_tok,
@ -277,7 +270,7 @@ fn consume_any_line_comment(rdr: @mut StringReader)
// but comments with only more "/"s are not
if !is_line_non_doc_comment(acc) {
return Some(TokenAndSpan{
tok: token::DOC_COMMENT(rdr.interner.intern(acc)),
tok: token::DOC_COMMENT(str_to_ident(acc)),
sp: codemap::mk_sp(start_bpos, rdr.pos)
});
}
@ -331,7 +324,7 @@ fn consume_block_comment(rdr: @mut StringReader)
// but comments with only "*"s between two "/"s are not
if !is_block_non_doc_comment(acc) {
return Some(TokenAndSpan{
tok: token::DOC_COMMENT(rdr.interner.intern(acc)),
tok: token::DOC_COMMENT(str_to_ident(acc)),
sp: codemap::mk_sp(start_bpos, rdr.pos)
});
}
@ -477,12 +470,12 @@ fn scan_number(c: char, rdr: @mut StringReader) -> token::Token {
if c == '3' && n == '2' {
bump(rdr);
bump(rdr);
return token::LIT_FLOAT(rdr.interner.intern(num_str),
return token::LIT_FLOAT(str_to_ident(num_str),
ast::ty_f32);
} else if c == '6' && n == '4' {
bump(rdr);
bump(rdr);
return token::LIT_FLOAT(rdr.interner.intern(num_str),
return token::LIT_FLOAT(str_to_ident(num_str),
ast::ty_f64);
/* FIXME (#2252): if this is out of range for either a
32-bit or 64-bit float, it won't be noticed till the
@ -494,9 +487,9 @@ fn scan_number(c: char, rdr: @mut StringReader) -> token::Token {
}
if is_float {
if is_machine_float {
return token::LIT_FLOAT(rdr.interner.intern(num_str), ast::ty_f);
return token::LIT_FLOAT(str_to_ident(num_str), ast::ty_f);
}
return token::LIT_FLOAT_UNSUFFIXED(rdr.interner.intern(num_str));
return token::LIT_FLOAT_UNSUFFIXED(str_to_ident(num_str));
} else {
if str::len(num_str) == 0u {
rdr.fatal(~"no valid digits found for number");
@ -559,7 +552,7 @@ fn next_token_inner(rdr: @mut StringReader) -> token::Token {
let is_mod_name = c == ':' && nextch(rdr) == ':';
// FIXME: perform NFKC normalization here. (Issue #2253)
return token::IDENT(rdr.interner.intern(accum_str), is_mod_name);
return token::IDENT(str_to_ident(accum_str), is_mod_name);
}
if is_dec_digit(c) {
return scan_number(c, rdr);
@ -669,7 +662,7 @@ fn next_token_inner(rdr: @mut StringReader) -> token::Token {
lifetime_name.push_char(rdr.curr);
bump(rdr);
}
return token::LIFETIME(rdr.interner.intern(lifetime_name));
return token::LIFETIME(str_to_ident(lifetime_name));
}
// Otherwise it is a character constant:
@ -742,7 +735,7 @@ fn next_token_inner(rdr: @mut StringReader) -> token::Token {
}
}
bump(rdr);
return token::LIT_STR(rdr.interner.intern(accum_str));
return token::LIT_STR(str_to_ident(accum_str));
}
'-' => {
if nextch(rdr) == '>' {
@ -786,10 +779,10 @@ mod test {
use core::option::None;
use diagnostic;
use parse::token;
use parse::token::{str_to_ident};
// represents a testing reader (incl. both reader and interner)
struct Env {
interner: @token::ident_interner,
string_reader: @mut StringReader
}
@ -797,20 +790,18 @@ mod test {
fn setup(teststr: ~str) -> Env {
let cm = CodeMap::new();
let fm = cm.new_filemap(~"zebra.rs", @teststr);
let ident_interner = token::get_ident_interner();
let span_handler =
diagnostic::mk_span_handler(diagnostic::mk_handler(None),@cm);
Env {
interner: ident_interner,
string_reader: new_string_reader(span_handler,fm,ident_interner)
string_reader: new_string_reader(span_handler,fm)
}
}
#[test] fn t1 () {
let Env {interner: ident_interner, string_reader} =
let Env {string_reader} =
setup(~"/* my source file */ \
fn main() { io::println(~\"zebra\"); }\n");
let id = ident_interner.intern("fn");
let id = str_to_ident("fn");
let tok1 = string_reader.next_token();
let tok2 = TokenAndSpan{
tok:token::IDENT(id, false),
@ -821,7 +812,7 @@ mod test {
// read another token:
let tok3 = string_reader.next_token();
let tok4 = TokenAndSpan{
tok:token::IDENT(ident_interner.intern("main"), false),
tok:token::IDENT(str_to_ident("main"), false),
sp:span {lo:BytePos(24),hi:BytePos(28),expn_info: None}};
assert_eq!(tok3,tok4);
// the lparen is already read:
@ -839,39 +830,39 @@ mod test {
}
// make the identifier by looking up the string in the interner
fn mk_ident (env: Env, id: &str, is_mod_name: bool) -> token::Token {
token::IDENT (env.interner.intern(id),is_mod_name)
fn mk_ident (id: &str, is_mod_name: bool) -> token::Token {
token::IDENT (str_to_ident(id),is_mod_name)
}
#[test] fn doublecolonparsing () {
let env = setup (~"a b");
check_tokenization (env,
~[mk_ident (env,"a",false),
mk_ident (env,"b",false)]);
~[mk_ident("a",false),
mk_ident("b",false)]);
}
#[test] fn dcparsing_2 () {
let env = setup (~"a::b");
check_tokenization (env,
~[mk_ident (env,"a",true),
~[mk_ident("a",true),
token::MOD_SEP,
mk_ident (env,"b",false)]);
mk_ident("b",false)]);
}
#[test] fn dcparsing_3 () {
let env = setup (~"a ::b");
check_tokenization (env,
~[mk_ident (env,"a",false),
~[mk_ident("a",false),
token::MOD_SEP,
mk_ident (env,"b",false)]);
mk_ident("b",false)]);
}
#[test] fn dcparsing_4 () {
let env = setup (~"a:: b");
check_tokenization (env,
~[mk_ident (env,"a",true),
~[mk_ident("a",true),
token::MOD_SEP,
mk_ident (env,"b",false)]);
mk_ident("b",false)]);
}
#[test] fn character_a() {
@ -899,7 +890,7 @@ mod test {
let env = setup(~"'abc");
let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token();
let id = env.interner.intern("abc");
let id = token::str_to_ident("abc");
assert_eq!(tok, token::LIFETIME(id));
}

View File

@ -19,7 +19,6 @@ use diagnostic::{span_handler, mk_span_handler, mk_handler, Emitter};
use parse::attr::parser_attr;
use parse::lexer::reader;
use parse::parser::Parser;
use parse::token::{ident_interner, get_ident_interner};
use core::io;
use core::option::{None, Option, Some};
@ -43,14 +42,10 @@ pub mod classify;
pub mod obsolete;
// info about a parsing session.
// This structure and the reader both have
// an interner associated with them. If they're
// not the same, bad things can happen.
pub struct ParseSess {
cm: @codemap::CodeMap, // better be the same as the one in the reader!
next_id: node_id,
span_diagnostic: @span_handler, // better be the same as the one in the reader!
interner: @ident_interner,
}
pub fn new_parse_sess(demitter: Option<Emitter>) -> @mut ParseSess {
@ -59,7 +54,6 @@ pub fn new_parse_sess(demitter: Option<Emitter>) -> @mut ParseSess {
cm: cm,
next_id: 1,
span_diagnostic: mk_span_handler(mk_handler(demitter), cm),
interner: get_ident_interner(),
}
}
@ -70,7 +64,6 @@ pub fn new_parse_sess_special_handler(sh: @span_handler,
cm: cm,
next_id: 1,
span_diagnostic: sh,
interner: get_ident_interner(),
}
}
@ -312,9 +305,7 @@ pub fn filemap_to_tts(sess: @mut ParseSess, filemap: @FileMap)
// it appears to me that the cfg doesn't matter here... indeed,
// parsing tt's probably shouldn't require a parser at all.
let cfg = ~[];
let srdr = lexer::new_string_reader(copy sess.span_diagnostic,
filemap,
sess.interner);
let srdr = lexer::new_string_reader(copy sess.span_diagnostic, filemap);
let p1 = Parser(sess, cfg, srdr as @reader);
p1.parse_all_token_trees()
}
@ -325,7 +316,6 @@ pub fn tts_to_parser(sess: @mut ParseSess,
cfg: ast::crate_cfg) -> Parser {
let trdr = lexer::new_tt_reader(
copy sess.span_diagnostic,
sess.interner,
None,
tts
);
@ -351,12 +341,13 @@ mod test {
use codemap::{span, BytePos, spanned};
use opt_vec;
use ast;
use ast::{new_ident};
use abi;
use parse::parser::Parser;
use parse::token::intern;
use util::parser_testing::{string_to_tts_and_sess,string_to_parser};
use parse::token::{intern, str_to_ident};
use util::parser_testing::{string_to_tts_and_sess, string_to_parser};
use util::parser_testing::{string_to_expr, string_to_item};
use util::parser_testing::{string_to_stmt};
use util::parser_testing::{string_to_stmt, strs_to_idents};
// map a string to tts, return the tt without its parsesess
fn string_to_tts_only(source_str : @~str) -> ~[ast::token_tree] {
@ -377,17 +368,12 @@ mod test {
span{lo:BytePos(a),hi:BytePos(b),expn_info:None}
}
// convert a vector of uints to a vector of ast::idents
fn ints_to_idents(ids: ~[~str]) -> ~[ast::ident] {
ids.map(|u| intern(*u))
}
#[test] fn path_exprs_1 () {
assert_eq!(string_to_expr(@~"a"),
@ast::expr{id:1,
node:ast::expr_path(@ast::Path {span:sp(0,1),
global:false,
idents:~[intern("a")],
idents:~[str_to_ident("a")],
rp:None,
types:~[]}),
span:sp(0,1)})
@ -399,7 +385,7 @@ mod test {
node:ast::expr_path(
@ast::Path {span:sp(0,6),
global:true,
idents:ints_to_idents(~[~"a",~"b"]),
idents:strs_to_idents(~["a","b"]),
rp:None,
types:~[]}),
span:sp(0,6)})
@ -449,7 +435,7 @@ mod test {
node:ast::expr_path(
@ast::Path{span:sp(7,8),
global:false,
idents:~[intern("d")],
idents:~[str_to_ident("d")],
rp:None,
types:~[]
}),
@ -466,7 +452,7 @@ mod test {
@ast::Path{
span:sp(0,1),
global:false,
idents:~[intern("b")],
idents:~[str_to_ident("b")],
rp:None,
types: ~[]}),
span: sp(0,1)},
@ -487,7 +473,7 @@ mod test {
@ast::Path{
span:sp(0,1),
global:false,
idents:~[intern("b")],
idents:~[str_to_ident("b")],
rp: None,
types: ~[]},
None // no idea
@ -506,7 +492,7 @@ mod test {
span:sp(4,4), // this is bizarre...
// check this in the original parser?
global:false,
idents:~[intern("int")],
idents:~[str_to_ident("int")],
rp: None,
types: ~[]},
2),
@ -516,7 +502,7 @@ mod test {
@ast::Path{
span:sp(0,1),
global:false,
idents:~[intern("b")],
idents:~[str_to_ident("b")],
rp: None,
types: ~[]},
None // no idea
@ -532,7 +518,7 @@ mod test {
// assignment order of the node_ids.
assert_eq!(string_to_item(@~"fn a (b : int) { b; }"),
Some(
@ast::item{ident:intern("a"),
@ast::item{ident:str_to_ident("a"),
attrs:~[],
id: 9, // fixme
node: ast::item_fn(ast::fn_decl{
@ -542,7 +528,7 @@ mod test {
node: ast::ty_path(@ast::Path{
span:sp(10,13),
global:false,
idents:~[intern("int")],
idents:~[str_to_ident("int")],
rp: None,
types: ~[]},
2),
@ -553,7 +539,7 @@ mod test {
@ast::Path{
span:sp(6,7),
global:false,
idents:~[intern("b")],
idents:~[str_to_ident("b")],
rp: None,
types: ~[]},
None // no idea
@ -583,7 +569,7 @@ mod test {
@ast::Path{
span:sp(17,18),
global:false,
idents:~[intern("b")],
idents:~[str_to_ident("b")],
rp:None,
types: ~[]}),
span: sp(17,18)},

View File

@ -85,7 +85,7 @@ use parse::obsolete::{ObsoleteLifetimeNotation, ObsoleteConstManagedPointer};
use parse::obsolete::{ObsoletePurity, ObsoleteStaticMethod};
use parse::obsolete::{ObsoleteConstItem, ObsoleteFixedLengthVectorType};
use parse::obsolete::{ObsoleteNamedExternModule, ObsoleteMultipleLocalDecl};
use parse::token::{can_begin_expr, is_ident, is_ident_or_path};
use parse::token::{can_begin_expr, get_ident_interner, ident_to_str, is_ident, is_ident_or_path};
use parse::token::{is_plain_ident, INTERPOLATED, keywords, special_idents, token_to_binop};
use parse::token;
use parse::{new_sub_parser_from_file, next_node_id, ParseSess};
@ -219,7 +219,7 @@ pub fn Parser(sess: @mut ParseSess,
rdr: @reader)
-> Parser {
let tok0 = copy rdr.next_token();
let interner = rdr.interner();
let interner = get_ident_interner();
Parser {
reader: rdr,
@ -333,7 +333,7 @@ impl Parser {
pub fn get_id(&self) -> node_id { next_node_id(self.sess) }
pub fn id_to_str(&self, id: ident) -> @~str {
self.sess.interner.get(id)
get_ident_interner().get(id.name)
}
// is this one of the keywords that signals a closure type?
@ -2628,6 +2628,13 @@ impl Parser {
// to the macro clause of parse_item_or_view_item. This
// could use some cleanup, it appears to me.
// whoops! I now have a guess: I'm guessing the "parens-only"
// rule here is deliberate, to allow macro users to use parens
// for things that should be parsed as stmt_mac, and braces
// for things that should expand into items. Tricky, and
// somewhat awkward... and probably undocumented. Of course,
// I could just be wrong.
check_expected_item(self, item_attrs);
// Potential trouble: if we allow macros with paths instead of
@ -3363,7 +3370,7 @@ impl Parser {
}
if fields.len() == 0 {
self.fatal(fmt!("Unit-like struct should be written as `struct %s;`",
*self.interner.get(class_name)));
*get_ident_interner().get(class_name.name)));
}
self.bump();
} else if *self.token == token::LPAREN {
@ -3575,7 +3582,7 @@ impl Parser {
}
fn push_mod_path(&self, id: ident, attrs: ~[ast::attribute]) {
let default_path = self.sess.interner.get(id);
let default_path = token::interner_get(id.name);
let file_path = match ::attr::first_attr_value_str_by_name(
attrs, "path") {
@ -3598,7 +3605,7 @@ impl Parser {
let prefix = prefix.dir_path();
let mod_path_stack = &*self.mod_path_stack;
let mod_path = Path(".").push_many(*mod_path_stack);
let default_path = *self.sess.interner.get(id) + ".rs";
let default_path = *token::interner_get(id.name) + ".rs";
let file_path = match ::attr::first_attr_value_str_by_name(
outer_attrs, "path") {
Some(d) => {
@ -3973,7 +3980,7 @@ impl Parser {
match *self.token {
token::LIT_STR(s) => {
self.bump();
let the_string = self.id_to_str(s);
let the_string = ident_to_str(&s);
let mut words = ~[];
for str::each_word(*the_string) |s| { words.push(s) }
let mut abis = AbiSet::empty();
@ -4535,7 +4542,7 @@ impl Parser {
match *self.token {
token::LIT_STR(s) => {
self.bump();
self.id_to_str(s)
ident_to_str(&s)
}
_ => self.fatal("expected string literal")
}

View File

@ -11,6 +11,7 @@
use core::prelude::*;
use ast;
use ast::Name;
use ast_util;
use parse::token;
use util::interner::StrInterner;
@ -21,6 +22,9 @@ use core::char;
use core::cmp::Equiv;
use core::local_data;
use core::str;
use core::hashmap::HashSet;
use core::rand;
use core::rand::RngUtil;
use core::to_bytes;
#[deriving(Encodable, Decodable, Eq)]
@ -176,29 +180,29 @@ pub fn to_str(in: @ident_interner, t: &Token) -> ~str {
u.to_str() + ast_util::uint_ty_to_str(t)
}
LIT_INT_UNSUFFIXED(i) => { i.to_str() }
LIT_FLOAT(s, t) => {
let mut body = copy *in.get(s);
LIT_FLOAT(ref s, t) => {
let mut body = copy *ident_to_str(s);
if body.ends_with(".") {
body += "0"; // `10.f` is not a float literal
}
body + ast_util::float_ty_to_str(t)
}
LIT_FLOAT_UNSUFFIXED(s) => {
let mut body = copy *in.get(s);
LIT_FLOAT_UNSUFFIXED(ref s) => {
let mut body = copy *ident_to_str(s);
if body.ends_with(".") {
body += "0"; // `10.f` is not a float literal
}
body
}
LIT_STR(s) => { ~"\"" + str::escape_default(*in.get(s)) + "\"" }
LIT_STR(ref s) => { ~"\"" + str::escape_default(*ident_to_str(s)) + "\"" }
/* Name components */
IDENT(s, _) => copy *in.get(s),
LIFETIME(s) => fmt!("'%s", *in.get(s)),
IDENT(s, _) => copy *in.get(s.name),
LIFETIME(s) => fmt!("'%s", *in.get(s.name)),
UNDERSCORE => ~"_",
/* Other */
DOC_COMMENT(s) => copy *in.get(s),
DOC_COMMENT(ref s) => copy *ident_to_str(s),
EOF => ~"<eof>",
INTERPOLATED(ref nt) => {
match nt {
@ -304,47 +308,47 @@ pub fn is_bar(t: &Token) -> bool {
pub mod special_idents {
use ast::ident;
pub static underscore : ident = ident { repr: 0, ctxt: 0};
pub static anon : ident = ident { repr: 1, ctxt: 0};
pub static invalid : ident = ident { repr: 2, ctxt: 0}; // ''
pub static unary : ident = ident { repr: 3, ctxt: 0};
pub static not_fn : ident = ident { repr: 4, ctxt: 0};
pub static idx_fn : ident = ident { repr: 5, ctxt: 0};
pub static unary_minus_fn : ident = ident { repr: 6, ctxt: 0};
pub static clownshoes_extensions : ident = ident { repr: 7, ctxt: 0};
pub static underscore : ident = ident { name: 0, ctxt: 0};
pub static anon : ident = ident { name: 1, ctxt: 0};
pub static invalid : ident = ident { name: 2, ctxt: 0}; // ''
pub static unary : ident = ident { name: 3, ctxt: 0};
pub static not_fn : ident = ident { name: 4, ctxt: 0};
pub static idx_fn : ident = ident { name: 5, ctxt: 0};
pub static unary_minus_fn : ident = ident { name: 6, ctxt: 0};
pub static clownshoes_extensions : ident = ident { name: 7, ctxt: 0};
pub static self_ : ident = ident { repr: 8, ctxt: 0}; // 'self'
pub static self_ : ident = ident { name: 8, ctxt: 0}; // 'self'
/* for matcher NTs */
pub static item : ident = ident { repr: 9, ctxt: 0};
pub static block : ident = ident { repr: 10, ctxt: 0};
pub static stmt : ident = ident { repr: 11, ctxt: 0};
pub static pat : ident = ident { repr: 12, ctxt: 0};
pub static expr : ident = ident { repr: 13, ctxt: 0};
pub static ty : ident = ident { repr: 14, ctxt: 0};
pub static ident : ident = ident { repr: 15, ctxt: 0};
pub static path : ident = ident { repr: 16, ctxt: 0};
pub static tt : ident = ident { repr: 17, ctxt: 0};
pub static matchers : ident = ident { repr: 18, ctxt: 0};
pub static item : ident = ident { name: 9, ctxt: 0};
pub static block : ident = ident { name: 10, ctxt: 0};
pub static stmt : ident = ident { name: 11, ctxt: 0};
pub static pat : ident = ident { name: 12, ctxt: 0};
pub static expr : ident = ident { name: 13, ctxt: 0};
pub static ty : ident = ident { name: 14, ctxt: 0};
pub static ident : ident = ident { name: 15, ctxt: 0};
pub static path : ident = ident { name: 16, ctxt: 0};
pub static tt : ident = ident { name: 17, ctxt: 0};
pub static matchers : ident = ident { name: 18, ctxt: 0};
pub static str : ident = ident { repr: 19, ctxt: 0}; // for the type
pub static str : ident = ident { name: 19, ctxt: 0}; // for the type
/* outside of libsyntax */
pub static ty_visitor : ident = ident { repr: 20, ctxt: 0};
pub static arg : ident = ident { repr: 21, ctxt: 0};
pub static descrim : ident = ident { repr: 22, ctxt: 0};
pub static clownshoe_abi : ident = ident { repr: 23, ctxt: 0};
pub static clownshoe_stack_shim : ident = ident { repr: 24, ctxt: 0};
pub static tydesc : ident = ident { repr: 25, ctxt: 0};
pub static main : ident = ident { repr: 26, ctxt: 0};
pub static opaque : ident = ident { repr: 27, ctxt: 0};
pub static blk : ident = ident { repr: 28, ctxt: 0};
pub static statik : ident = ident { repr: 29, ctxt: 0};
pub static intrinsic : ident = ident { repr: 30, ctxt: 0};
pub static clownshoes_foreign_mod: ident = ident { repr: 31, ctxt: 0};
pub static unnamed_field: ident = ident { repr: 32, ctxt: 0};
pub static c_abi: ident = ident { repr: 33, ctxt: 0};
pub static type_self: ident = ident { repr: 34, ctxt: 0}; // `Self`
pub static ty_visitor : ident = ident { name: 20, ctxt: 0};
pub static arg : ident = ident { name: 21, ctxt: 0};
pub static descrim : ident = ident { name: 22, ctxt: 0};
pub static clownshoe_abi : ident = ident { name: 23, ctxt: 0};
pub static clownshoe_stack_shim : ident = ident { name: 24, ctxt: 0};
pub static tydesc : ident = ident { name: 25, ctxt: 0};
pub static main : ident = ident { name: 26, ctxt: 0};
pub static opaque : ident = ident { name: 27, ctxt: 0};
pub static blk : ident = ident { name: 28, ctxt: 0};
pub static statik : ident = ident { name: 29, ctxt: 0};
pub static intrinsic : ident = ident { name: 30, ctxt: 0};
pub static clownshoes_foreign_mod: ident = ident { name: 31, ctxt: 0};
pub static unnamed_field: ident = ident { name: 32, ctxt: 0};
pub static c_abi: ident = ident { name: 33, ctxt: 0};
pub static type_self: ident = ident { name: 34, ctxt: 0}; // `Self`
}
pub struct StringRef<'self>(&'self str);
@ -394,25 +398,22 @@ pub struct ident_interner {
}
impl ident_interner {
pub fn intern(&self, val: &str) -> ast::ident {
ast::ident { repr: self.interner.intern(val), ctxt: 0 }
pub fn intern(&self, val: &str) -> Name {
self.interner.intern(val)
}
pub fn gensym(&self, val: &str) -> ast::ident {
ast::ident { repr: self.interner.gensym(val), ctxt: 0 }
pub fn gensym(&self, val: &str) -> Name {
self.interner.gensym(val)
}
pub fn get(&self, idx: ast::ident) -> @~str {
self.interner.get(idx.repr)
pub fn get(&self, idx: Name) -> @~str {
self.interner.get(idx)
}
// is this really something that should be exposed?
pub fn len(&self) -> uint {
self.interner.len()
}
pub fn find_equiv<Q:Hash +
IterBytes +
Equiv<@~str>>(&self, val: &Q) -> Option<ast::ident> {
match self.interner.find_equiv(val) {
Some(v) => Some(ast::ident { repr: v, ctxt: 0 }),
None => None,
}
pub fn find_equiv<Q:Hash + IterBytes + Equiv<@~str>>(&self, val: &Q)
-> Option<Name> {
self.interner.find_equiv(val)
}
}
@ -530,11 +531,51 @@ pub fn mk_fake_ident_interner() -> @ident_interner {
}
// maps a string to its interned representation
pub fn intern(str : &str) -> ast::ident {
pub fn intern(str : &str) -> Name {
let interner = get_ident_interner();
interner.intern(str)
}
// gensyms a new uint, using the current interner
pub fn gensym(str : &str) -> Name {
let interner = get_ident_interner();
interner.gensym(str)
}
// map an interned representation back to a string
pub fn interner_get(name : Name) -> @~str {
get_ident_interner().get(name)
}
// maps an identifier to the string that it corresponds to
pub fn ident_to_str(id : &ast::ident) -> @~str {
interner_get(id.name)
}
// maps a string to an identifier with an empty syntax context
pub fn str_to_ident(str : &str) -> ast::ident {
ast::new_ident(intern(str))
}
// maps a string to a gensym'ed identifier
pub fn gensym_ident(str : &str) -> ast::ident {
ast::new_ident(gensym(str))
}
// create a fresh name. In principle, this is just a
// gensym, but for debugging purposes, you'd like the
// resulting name to have a suggestive stringify, without
// paying the cost of guaranteeing that the name is
// truly unique. I'm going to try to strike a balance
// by using a gensym with a name that has a random number
// at the end. So, the gensym guarantees the uniqueness,
// and the int helps to avoid confusion.
pub fn fresh_name(src_name : &str) -> Name {
let num = rand::rng().gen_uint_range(0,0xffff);
gensym(fmt!("%s_%u",src_name,num))
}
/**
* All the valid words that have meaning in the Rust language.
*
@ -590,42 +631,42 @@ pub mod keywords {
impl Keyword {
pub fn to_ident(&self) -> ident {
match *self {
As => ident { repr: 35, ctxt: 0 },
Break => ident { repr: 36, ctxt: 0 },
Const => ident { repr: 37, ctxt: 0 },
Copy => ident { repr: 38, ctxt: 0 },
Do => ident { repr: 39, ctxt: 0 },
Else => ident { repr: 41, ctxt: 0 },
Enum => ident { repr: 42, ctxt: 0 },
Extern => ident { repr: 43, ctxt: 0 },
False => ident { repr: 44, ctxt: 0 },
Fn => ident { repr: 45, ctxt: 0 },
For => ident { repr: 46, ctxt: 0 },
If => ident { repr: 47, ctxt: 0 },
Impl => ident { repr: 48, ctxt: 0 },
Let => ident { repr: 49, ctxt: 0 },
__Log => ident { repr: 50, ctxt: 0 },
Loop => ident { repr: 51, ctxt: 0 },
Match => ident { repr: 52, ctxt: 0 },
Mod => ident { repr: 53, ctxt: 0 },
Mut => ident { repr: 54, ctxt: 0 },
Once => ident { repr: 55, ctxt: 0 },
Priv => ident { repr: 56, ctxt: 0 },
Pub => ident { repr: 57, ctxt: 0 },
Pure => ident { repr: 58, ctxt: 0 },
Ref => ident { repr: 59, ctxt: 0 },
Return => ident { repr: 60, ctxt: 0 },
Static => ident { repr: 29, ctxt: 0 },
Self => ident { repr: 8, ctxt: 0 },
Struct => ident { repr: 61, ctxt: 0 },
Super => ident { repr: 62, ctxt: 0 },
True => ident { repr: 63, ctxt: 0 },
Trait => ident { repr: 64, ctxt: 0 },
Type => ident { repr: 65, ctxt: 0 },
Unsafe => ident { repr: 66, ctxt: 0 },
Use => ident { repr: 67, ctxt: 0 },
While => ident { repr: 68, ctxt: 0 },
Be => ident { repr: 69, ctxt: 0 },
As => ident { name: 35, ctxt: 0 },
Break => ident { name: 36, ctxt: 0 },
Const => ident { name: 37, ctxt: 0 },
Copy => ident { name: 38, ctxt: 0 },
Do => ident { name: 39, ctxt: 0 },
Else => ident { name: 41, ctxt: 0 },
Enum => ident { name: 42, ctxt: 0 },
Extern => ident { name: 43, ctxt: 0 },
False => ident { name: 44, ctxt: 0 },
Fn => ident { name: 45, ctxt: 0 },
For => ident { name: 46, ctxt: 0 },
If => ident { name: 47, ctxt: 0 },
Impl => ident { name: 48, ctxt: 0 },
Let => ident { name: 49, ctxt: 0 },
__Log => ident { name: 50, ctxt: 0 },
Loop => ident { name: 51, ctxt: 0 },
Match => ident { name: 52, ctxt: 0 },
Mod => ident { name: 53, ctxt: 0 },
Mut => ident { name: 54, ctxt: 0 },
Once => ident { name: 55, ctxt: 0 },
Priv => ident { name: 56, ctxt: 0 },
Pub => ident { name: 57, ctxt: 0 },
Pure => ident { name: 58, ctxt: 0 },
Ref => ident { name: 59, ctxt: 0 },
Return => ident { name: 60, ctxt: 0 },
Static => ident { name: 29, ctxt: 0 },
Self => ident { name: 8, ctxt: 0 },
Struct => ident { name: 61, ctxt: 0 },
Super => ident { name: 62, ctxt: 0 },
True => ident { name: 63, ctxt: 0 },
Trait => ident { name: 64, ctxt: 0 },
Type => ident { name: 65, ctxt: 0 },
Unsafe => ident { name: 66, ctxt: 0 },
Use => ident { name: 67, ctxt: 0 },
While => ident { name: 68, ctxt: 0 },
Be => ident { name: 69, ctxt: 0 },
}
}
}
@ -633,14 +674,14 @@ pub mod keywords {
pub fn is_keyword(kw: keywords::Keyword, tok: &Token) -> bool {
match *tok {
token::IDENT(sid, false) => { kw.to_ident().repr == sid.repr }
token::IDENT(sid, false) => { kw.to_ident().name == sid.name }
_ => { false }
}
}
pub fn is_any_keyword(tok: &Token) -> bool {
match *tok {
token::IDENT(sid, false) => match sid.repr {
token::IDENT(sid, false) => match sid.name {
8 | 29 | 35 .. 69 => true,
_ => false,
},
@ -650,7 +691,7 @@ pub fn is_any_keyword(tok: &Token) -> bool {
pub fn is_strict_keyword(tok: &Token) -> bool {
match *tok {
token::IDENT(sid, false) => match sid.repr {
token::IDENT(sid, false) => match sid.name {
8 | 29 | 35 .. 68 => true,
_ => false,
},
@ -660,10 +701,21 @@ pub fn is_strict_keyword(tok: &Token) -> bool {
pub fn is_reserved_keyword(tok: &Token) -> bool {
match *tok {
token::IDENT(sid, false) => match sid.repr {
token::IDENT(sid, false) => match sid.name {
69 => true,
_ => false,
},
_ => false,
}
}
#[cfg(test)]
mod test {
use super::*;
use std::io;
#[test] fn t1() {
let a = fresh_name("ghi");
io::println(fmt!("interned name: %u,\ntextual name: %s\n",
a,*interner_get(a)));
}
}

View File

@ -21,7 +21,7 @@ use codemap::{CodeMap, BytePos};
use codemap;
use diagnostic;
use parse::classify::expr_is_simple_block;
use parse::token::ident_interner;
use parse::token::{ident_interner, ident_to_str};
use parse::{comments, token};
use parse;
use print::pp::{break_offset, word, space, zerobreak, hardbreak};
@ -1475,7 +1475,7 @@ pub fn print_decl(s: @ps, decl: @ast::decl) {
}
pub fn print_ident(s: @ps, ident: ast::ident) {
word(s.s, *s.intr.get(ident));
word(s.s, *ident_to_str(&ident));
}
pub fn print_for_decl(s: @ps, loc: @ast::local, coll: @ast::expr) {
@ -2236,7 +2236,7 @@ mod test {
use codemap;
use core::cmp::Eq;
use core::option::None;
use parse;
use parse::token;
fn string_check<T:Eq> (given : &T, expected: &T) {
if !(given == expected) {
@ -2246,8 +2246,7 @@ mod test {
#[test]
fn test_fun_to_str() {
let mock_interner = parse::token::mk_fake_ident_interner();
let abba_ident = mock_interner.intern("abba");
let abba_ident = token::str_to_ident("abba");
let decl = ast::fn_decl {
inputs: ~[],
@ -2258,14 +2257,13 @@ mod test {
};
let generics = ast_util::empty_generics();
assert_eq!(&fun_to_str(&decl, ast::impure_fn, abba_ident,
None, &generics, mock_interner),
None, &generics, token::get_ident_interner()),
&~"fn abba()");
}
#[test]
fn test_variant_to_str() {
let mock_interner = parse::token::mk_fake_ident_interner();
let ident = mock_interner.intern("principal_skinner");
let ident = token::str_to_ident("principal_skinner");
let var = codemap::respan(codemap::dummy_sp(), ast::variant_ {
name: ident,
@ -2277,7 +2275,7 @@ mod test {
vis: ast::public,
});
let varstr = variant_to_str(&var,mock_interner);
let varstr = variant_to_str(&var,token::get_ident_interner());
assert_eq!(&varstr,&~"pub principal_skinner");
}
}

View File

@ -64,9 +64,6 @@ impl<T:Eq + IterBytes + Hash + Const + Copy> Interner<T> {
new_idx
}
// this isn't "pure" in the traditional sense, because it can go from
// failing to returning a value as items are interned. But for typestate,
// where we first check a pred and then rely on it, ceasing to fail is ok.
pub fn get(&self, idx: uint) -> T { self.vect[idx] }
pub fn len(&self) -> uint { let vect = &*self.vect; vect.len() }

View File

@ -10,11 +10,11 @@
use core::option::{Option,None};
use ast;
use parse::parser::Parser;
use parse::{new_parse_sess};
use syntax::parse::{ParseSess,string_to_filemap,filemap_to_tts};
use syntax::parse::{new_parser_from_source_str};
use parse::{ParseSess,string_to_filemap,filemap_to_tts};
use parse::{new_parser_from_source_str};
use parse::parser::Parser;
use parse::token;
// map a string to tts, using a made-up filename: return both the token_trees
// and the ParseSess
@ -54,7 +54,18 @@ pub fn string_to_item_and_sess (source_str : @~str) -> (Option<@ast::item>,@mut
(p.parse_item(~[]),ps)
}
pub fn string_to_stmt (source_str : @~str) -> @ast::stmt {
// parse a string, return a stmt
pub fn string_to_stmt(source_str : @~str) -> @ast::stmt {
string_to_parser(source_str).parse_stmt(~[])
}
// parse a string, return a pat. Uses "irrefutable"... which doesn't
// (currently) affect parsing.
pub fn string_to_pat(source_str : @~str) -> @ast::pat {
string_to_parser(source_str).parse_pat()
}
// convert a vector of strings to a vector of ast::idents
pub fn strs_to_idents(ids: ~[&str]) -> ~[ast::ident] {
ids.map(|u| token::str_to_ident(*u))
}