libsyntax: Remove uses of ~str
from libsyntax, and fix fallout
This commit is contained in:
parent
e454851813
commit
7f8f3dcf17
@ -536,7 +536,7 @@ pub fn crate_id_hash(crate_id: &CrateId) -> ~str {
|
||||
// the crate id in the hash because lookups are only done by (name/vers),
|
||||
// not by path.
|
||||
let mut s = Sha256::new();
|
||||
s.input_str(crate_id.short_name_with_version());
|
||||
s.input_str(crate_id.short_name_with_version().as_slice());
|
||||
truncated_hash_result(&mut s).slice_to(8).to_owned()
|
||||
}
|
||||
|
||||
@ -566,7 +566,7 @@ fn symbol_hash(tcx: &ty::ctxt,
|
||||
// to be independent of one another in the crate.
|
||||
|
||||
symbol_hasher.reset();
|
||||
symbol_hasher.input_str(link_meta.crateid.name);
|
||||
symbol_hasher.input_str(link_meta.crateid.name.as_slice());
|
||||
symbol_hasher.input_str("-");
|
||||
symbol_hasher.input_str(link_meta.crate_hash.as_str());
|
||||
symbol_hasher.input_str("-");
|
||||
|
@ -143,8 +143,8 @@ pub fn build_configuration(sess: &Session) -> ast::CrateConfig {
|
||||
fn parse_cfgspecs(cfgspecs: Vec<~str> )
|
||||
-> ast::CrateConfig {
|
||||
cfgspecs.move_iter().map(|s| {
|
||||
parse::parse_meta_from_source_str("cfgspec".to_str(),
|
||||
s,
|
||||
parse::parse_meta_from_source_str("cfgspec".to_strbuf(),
|
||||
s.to_strbuf(),
|
||||
Vec::new(),
|
||||
&parse::new_parse_sess())
|
||||
}).collect::<ast::CrateConfig>()
|
||||
@ -175,8 +175,8 @@ pub fn phase_1_parse_input(sess: &Session, cfg: ast::CrateConfig, input: &Input)
|
||||
parse::parse_crate_from_file(&(*file), cfg.clone(), &sess.parse_sess)
|
||||
}
|
||||
StrInput(ref src) => {
|
||||
parse::parse_crate_from_source_str(anon_src(),
|
||||
(*src).clone(),
|
||||
parse::parse_crate_from_source_str(anon_src().to_strbuf(),
|
||||
src.to_strbuf(),
|
||||
cfg.clone(),
|
||||
&sess.parse_sess)
|
||||
}
|
||||
@ -528,7 +528,7 @@ fn write_out_deps(sess: &Session,
|
||||
// write Makefile-compatible dependency rules
|
||||
let files: Vec<~str> = sess.codemap().files.borrow()
|
||||
.iter().filter(|fmap| fmap.is_real_file())
|
||||
.map(|fmap| fmap.name.clone())
|
||||
.map(|fmap| fmap.name.to_owned())
|
||||
.collect();
|
||||
let mut file = try!(io::File::create(&deps_filename));
|
||||
for path in out_filenames.iter() {
|
||||
@ -604,20 +604,20 @@ impl pprust::PpAnn for IdentifiedAnnotation {
|
||||
match node {
|
||||
pprust::NodeItem(item) => {
|
||||
try!(pp::space(&mut s.s));
|
||||
s.synth_comment(item.id.to_str())
|
||||
s.synth_comment(item.id.to_str().to_strbuf())
|
||||
}
|
||||
pprust::NodeBlock(blk) => {
|
||||
try!(pp::space(&mut s.s));
|
||||
s.synth_comment("block ".to_owned() + blk.id.to_str())
|
||||
s.synth_comment((format!("block {}", blk.id)).to_strbuf())
|
||||
}
|
||||
pprust::NodeExpr(expr) => {
|
||||
try!(pp::space(&mut s.s));
|
||||
try!(s.synth_comment(expr.id.to_str()));
|
||||
try!(s.synth_comment(expr.id.to_str().to_strbuf()));
|
||||
s.pclose()
|
||||
}
|
||||
pprust::NodePat(pat) => {
|
||||
try!(pp::space(&mut s.s));
|
||||
s.synth_comment("pat ".to_owned() + pat.id.to_str())
|
||||
s.synth_comment((format!("pat {}", pat.id)).to_strbuf())
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -692,7 +692,7 @@ pub fn pretty_print_input(sess: Session,
|
||||
pprust::print_crate(sess.codemap(),
|
||||
sess.diagnostic(),
|
||||
&krate,
|
||||
src_name,
|
||||
src_name.to_strbuf(),
|
||||
&mut rdr,
|
||||
out,
|
||||
&IdentifiedAnnotation,
|
||||
@ -707,7 +707,7 @@ pub fn pretty_print_input(sess: Session,
|
||||
pprust::print_crate(annotation.analysis.ty_cx.sess.codemap(),
|
||||
annotation.analysis.ty_cx.sess.diagnostic(),
|
||||
&krate,
|
||||
src_name,
|
||||
src_name.to_strbuf(),
|
||||
&mut rdr,
|
||||
out,
|
||||
&annotation,
|
||||
@ -717,7 +717,7 @@ pub fn pretty_print_input(sess: Session,
|
||||
pprust::print_crate(sess.codemap(),
|
||||
sess.diagnostic(),
|
||||
&krate,
|
||||
src_name,
|
||||
src_name.to_strbuf(),
|
||||
&mut rdr,
|
||||
out,
|
||||
&pprust::NoAnn,
|
||||
|
@ -471,7 +471,8 @@ cgoptions!(
|
||||
)
|
||||
|
||||
// Seems out of place, but it uses session, so I'm putting it here
|
||||
pub fn expect<T:Clone>(sess: &Session, opt: Option<T>, msg: || -> ~str) -> T {
|
||||
pub fn expect<T:Clone>(sess: &Session, opt: Option<T>, msg: || -> StrBuf)
|
||||
-> T {
|
||||
diagnostic::expect(sess.diagnostic(), opt, msg)
|
||||
}
|
||||
|
||||
|
@ -168,7 +168,7 @@ fn generate_test_harness(sess: &Session, krate: ast::Crate)
|
||||
cx.ext_cx.bt_push(ExpnInfo {
|
||||
call_site: DUMMY_SP,
|
||||
callee: NameAndSpan {
|
||||
name: "test".to_owned(),
|
||||
name: "test".to_strbuf(),
|
||||
format: MacroAttribute,
|
||||
span: None
|
||||
}
|
||||
@ -398,7 +398,7 @@ fn mk_tests(cx: &TestCtxt) -> @ast::Item {
|
||||
|
||||
fn is_test_crate(krate: &ast::Crate) -> bool {
|
||||
match attr::find_crateid(krate.attrs.as_slice()) {
|
||||
Some(ref s) if "test" == s.name => true,
|
||||
Some(ref s) if "test" == s.name.as_slice() => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
@ -427,7 +427,7 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> @ast::Expr {
|
||||
|
||||
let name_lit: ast::Lit =
|
||||
nospan(ast::LitStr(token::intern_and_get_ident(
|
||||
ast_util::path_name_i(path.as_slice())),
|
||||
ast_util::path_name_i(path.as_slice()).as_slice()),
|
||||
ast::CookedStr));
|
||||
|
||||
let name_expr = @ast::Expr {
|
||||
|
@ -352,10 +352,11 @@ fn parse_crate_attrs(sess: &session::Session, input: &d::Input) ->
|
||||
&sess.parse_sess)
|
||||
}
|
||||
d::StrInput(ref src) => {
|
||||
parse::parse_crate_attrs_from_source_str(d::anon_src(),
|
||||
(*src).clone(),
|
||||
Vec::new(),
|
||||
&sess.parse_sess)
|
||||
parse::parse_crate_attrs_from_source_str(
|
||||
d::anon_src().to_strbuf(),
|
||||
src.to_strbuf(),
|
||||
Vec::new(),
|
||||
&sess.parse_sess)
|
||||
}
|
||||
};
|
||||
result.move_iter().collect()
|
||||
|
@ -428,11 +428,11 @@ impl<'a> CrateLoader for Loader<'a> {
|
||||
};
|
||||
let macros = decoder::get_exported_macros(library.metadata.as_slice());
|
||||
let registrar = decoder::get_macro_registrar_fn(library.metadata.as_slice()).map(|id| {
|
||||
decoder::get_symbol(library.metadata.as_slice(), id)
|
||||
decoder::get_symbol(library.metadata.as_slice(), id).to_strbuf()
|
||||
});
|
||||
let mc = MacroCrate {
|
||||
lib: library.dylib.clone(),
|
||||
macros: macros.move_iter().collect(),
|
||||
macros: macros.move_iter().map(|x| x.to_strbuf()).collect(),
|
||||
registrar_symbol: registrar,
|
||||
};
|
||||
if should_link {
|
||||
|
@ -207,12 +207,17 @@ pub fn get_field_type(tcx: &ty::ctxt, class_id: ast::DefId,
|
||||
let all_items = reader::get_doc(reader::Doc(cdata.data()), tag_items);
|
||||
let class_doc = expect(tcx.sess.diagnostic(),
|
||||
decoder::maybe_find_item(class_id.node, all_items),
|
||||
|| format!("get_field_type: class ID {:?} not found",
|
||||
class_id) );
|
||||
|| {
|
||||
(format!("get_field_type: class ID {:?} not found",
|
||||
class_id)).to_strbuf()
|
||||
});
|
||||
let the_field = expect(tcx.sess.diagnostic(),
|
||||
decoder::maybe_find_item(def.node, class_doc),
|
||||
|| format!("get_field_type: in class {:?}, field ID {:?} not found",
|
||||
class_id, def) );
|
||||
|| {
|
||||
(format!("get_field_type: in class {:?}, field ID {:?} not found",
|
||||
class_id,
|
||||
def)).to_strbuf()
|
||||
});
|
||||
let ty = decoder::item_type(def, the_field, tcx, &*cdata);
|
||||
ty::ty_param_bounds_and_ty {
|
||||
generics: ty::Generics {type_param_defs: Rc::new(Vec::new()),
|
||||
|
@ -1563,7 +1563,7 @@ impl<'a, 'b, 'c> Visitor<()> for MacroDefVisitor<'a, 'b, 'c> {
|
||||
let def = self.ecx.tcx.sess.codemap().span_to_snippet(item.span)
|
||||
.expect("Unable to find source for macro");
|
||||
self.ebml_w.start_tag(tag_macro_def);
|
||||
self.ebml_w.wr_str(def);
|
||||
self.ebml_w.wr_str(def.as_slice());
|
||||
self.ebml_w.end_tag();
|
||||
}
|
||||
_ => {}
|
||||
|
@ -112,8 +112,8 @@ impl<'a, O:DataFlowOperator> pprust::PpAnn for DataFlowContext<'a, O> {
|
||||
"".to_owned()
|
||||
};
|
||||
|
||||
try!(ps.synth_comment(format!("id {}: {}{}{}", id, entry_str,
|
||||
gens_str, kills_str)));
|
||||
try!(ps.synth_comment((format!("id {}: {}{}{}", id, entry_str,
|
||||
gens_str, kills_str)).to_strbuf()));
|
||||
try!(pp::space(&mut ps.s));
|
||||
}
|
||||
Ok(())
|
||||
|
@ -569,7 +569,7 @@ fn check_sized(tcx: &ty::ctxt, ty: ty::t, name: ~str, sp: Span) {
|
||||
fn check_pat(cx: &mut Context, pat: &Pat) {
|
||||
let var_name = match pat.node {
|
||||
PatWild => Some("_".to_owned()),
|
||||
PatIdent(_, ref path, _) => Some(path_to_str(path)),
|
||||
PatIdent(_, ref path, _) => Some(path_to_str(path).to_owned()),
|
||||
_ => None
|
||||
};
|
||||
|
||||
|
@ -357,7 +357,7 @@ enum FieldName {
|
||||
impl<'a> PrivacyVisitor<'a> {
|
||||
// used when debugging
|
||||
fn nodestr(&self, id: ast::NodeId) -> ~str {
|
||||
self.tcx.map.node_to_str(id)
|
||||
self.tcx.map.node_to_str(id).to_owned()
|
||||
}
|
||||
|
||||
// Determines whether the given definition is public from the point of view
|
||||
|
@ -3167,12 +3167,12 @@ impl<'a> Resolver<'a> {
|
||||
.codemap()
|
||||
.span_to_snippet(imports.get(index).span)
|
||||
.unwrap();
|
||||
if sn.contains("::") {
|
||||
if sn.as_slice().contains("::") {
|
||||
self.resolve_error(imports.get(index).span,
|
||||
"unresolved import");
|
||||
} else {
|
||||
let err = format!("unresolved import (maybe you meant `{}::*`?)",
|
||||
sn.slice(0, sn.len()));
|
||||
sn.as_slice().slice(0, sn.len()));
|
||||
self.resolve_error(imports.get(index).span, err);
|
||||
}
|
||||
}
|
||||
|
@ -1145,7 +1145,11 @@ pub fn new_fn_ctxt<'a>(ccx: &'a CrateContext,
|
||||
for p in param_substs.iter() { p.validate(); }
|
||||
|
||||
debug!("new_fn_ctxt(path={}, id={}, param_substs={})",
|
||||
if id == -1 { "".to_owned() } else { ccx.tcx.map.path_to_str(id) },
|
||||
if id == -1 {
|
||||
"".to_owned()
|
||||
} else {
|
||||
ccx.tcx.map.path_to_str(id).to_owned()
|
||||
},
|
||||
id, param_substs.map(|s| s.repr(ccx.tcx())));
|
||||
|
||||
let substd_output_type = match param_substs {
|
||||
@ -1458,7 +1462,7 @@ pub fn trans_fn(ccx: &CrateContext,
|
||||
param_substs: Option<¶m_substs>,
|
||||
id: ast::NodeId,
|
||||
attrs: &[ast::Attribute]) {
|
||||
let _s = StatRecorder::new(ccx, ccx.tcx.map.path_to_str(id));
|
||||
let _s = StatRecorder::new(ccx, ccx.tcx.map.path_to_str(id).to_owned());
|
||||
debug!("trans_fn(param_substs={})", param_substs.map(|s| s.repr(ccx.tcx())));
|
||||
let _icx = push_ctxt("trans_fn");
|
||||
let output_type = ty::ty_fn_ret(ty::node_id_to_type(ccx.tcx(), id));
|
||||
@ -2161,9 +2165,10 @@ pub fn trans_crate(krate: ast::Crate,
|
||||
// crashes if the module identifer is same as other symbols
|
||||
// such as a function name in the module.
|
||||
// 1. http://llvm.org/bugs/show_bug.cgi?id=11479
|
||||
let llmod_id = link_meta.crateid.name + ".rs";
|
||||
let mut llmod_id = link_meta.crateid.name.clone();
|
||||
llmod_id.push_str(".rs");
|
||||
|
||||
let ccx = CrateContext::new(llmod_id, tcx, exp_map2,
|
||||
let ccx = CrateContext::new(llmod_id.as_slice(), tcx, exp_map2,
|
||||
Sha256::new(), link_meta, reachable);
|
||||
{
|
||||
let _icx = push_ctxt("text");
|
||||
|
@ -364,7 +364,7 @@ pub fn trans_fn_ref_with_vtables(
|
||||
let map_node = session::expect(
|
||||
ccx.sess(),
|
||||
tcx.map.find(def_id.node),
|
||||
|| format!("local item should be in ast map"));
|
||||
|| "local item should be in ast map".to_strbuf());
|
||||
|
||||
match map_node {
|
||||
ast_map::NodeForeignItem(_) => {
|
||||
|
@ -426,7 +426,7 @@ impl<'a> Block<'a> {
|
||||
}
|
||||
|
||||
pub fn node_id_to_str(&self, id: ast::NodeId) -> ~str {
|
||||
self.tcx().map.node_to_str(id)
|
||||
self.tcx().map.node_to_str(id).to_owned()
|
||||
}
|
||||
|
||||
pub fn expr_to_str(&self, e: &ast::Expr) -> ~str {
|
||||
@ -839,7 +839,10 @@ pub fn filename_and_line_num_from_span(bcx: &Block, span: Span)
|
||||
-> (ValueRef, ValueRef) {
|
||||
let loc = bcx.sess().codemap().lookup_char_pos(span.lo);
|
||||
let filename_cstr = C_cstr(bcx.ccx(),
|
||||
token::intern_and_get_ident(loc.file.name), true);
|
||||
token::intern_and_get_ident(loc.file
|
||||
.name
|
||||
.as_slice()),
|
||||
true);
|
||||
let filename = build::PointerCast(bcx, filename_cstr, Type::i8p(bcx.ccx()));
|
||||
let line = C_int(bcx.ccx(), loc.line as int);
|
||||
(filename, line)
|
||||
|
@ -345,7 +345,11 @@ pub fn trans_fail<'a>(
|
||||
let v_fail_str = C_cstr(ccx, fail_str, true);
|
||||
let _icx = push_ctxt("trans_fail_value");
|
||||
let loc = bcx.sess().codemap().lookup_char_pos(sp.lo);
|
||||
let v_filename = C_cstr(ccx, token::intern_and_get_ident(loc.file.name), true);
|
||||
let v_filename = C_cstr(ccx,
|
||||
token::intern_and_get_ident(loc.file
|
||||
.name
|
||||
.as_slice()),
|
||||
true);
|
||||
let v_line = loc.line as int;
|
||||
let v_str = PointerCast(bcx, v_fail_str, Type::i8p(ccx));
|
||||
let v_filename = PointerCast(bcx, v_filename, Type::i8p(ccx));
|
||||
|
@ -330,7 +330,7 @@ pub fn create_global_var_metadata(cx: &CrateContext,
|
||||
};
|
||||
|
||||
let filename = span_start(cx, span).file.name.clone();
|
||||
let file_metadata = file_metadata(cx, filename);
|
||||
let file_metadata = file_metadata(cx, filename.as_slice());
|
||||
|
||||
let is_local_to_unit = is_node_local_to_unit(cx, node_id);
|
||||
let loc = span_start(cx, span);
|
||||
@ -700,7 +700,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
|
||||
}
|
||||
|
||||
let loc = span_start(cx, span);
|
||||
let file_metadata = file_metadata(cx, loc.file.name);
|
||||
let file_metadata = file_metadata(cx, loc.file.name.as_slice());
|
||||
|
||||
let function_type_metadata = unsafe {
|
||||
let fn_signature = get_function_signature(cx, fn_ast_id, fn_decl, param_substs, span);
|
||||
@ -1011,7 +1011,7 @@ fn compile_unit_metadata(cx: &CrateContext) {
|
||||
});
|
||||
|
||||
fn fallback_path(cx: &CrateContext) -> CString {
|
||||
cx.link_meta.crateid.name.to_c_str()
|
||||
cx.link_meta.crateid.name.as_slice().to_c_str()
|
||||
}
|
||||
}
|
||||
|
||||
@ -1025,7 +1025,7 @@ fn declare_local(bcx: &Block,
|
||||
let cx: &CrateContext = bcx.ccx();
|
||||
|
||||
let filename = span_start(cx, span).file.name.clone();
|
||||
let file_metadata = file_metadata(cx, filename);
|
||||
let file_metadata = file_metadata(cx, filename.as_slice());
|
||||
|
||||
let name = token::get_ident(variable_ident);
|
||||
let loc = span_start(cx, span);
|
||||
@ -1277,7 +1277,7 @@ fn prepare_struct_metadata(cx: &CrateContext,
|
||||
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id);
|
||||
|
||||
let file_name = span_start(cx, definition_span).file.name.clone();
|
||||
let file_metadata = file_metadata(cx, file_name);
|
||||
let file_metadata = file_metadata(cx, file_name.as_slice());
|
||||
|
||||
let struct_metadata_stub = create_struct_stub(cx,
|
||||
struct_llvm_type,
|
||||
@ -1371,7 +1371,7 @@ fn prepare_tuple_metadata(cx: &CrateContext,
|
||||
let tuple_llvm_type = type_of::type_of(cx, tuple_type);
|
||||
|
||||
let loc = span_start(cx, span);
|
||||
let file_metadata = file_metadata(cx, loc.file.name);
|
||||
let file_metadata = file_metadata(cx, loc.file.name.as_slice());
|
||||
|
||||
UnfinishedMetadata {
|
||||
cache_id: cache_id_for_type(tuple_type),
|
||||
@ -1533,7 +1533,7 @@ fn prepare_enum_metadata(cx: &CrateContext,
|
||||
|
||||
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, enum_def_id);
|
||||
let loc = span_start(cx, definition_span);
|
||||
let file_metadata = file_metadata(cx, loc.file.name);
|
||||
let file_metadata = file_metadata(cx, loc.file.name.as_slice());
|
||||
|
||||
// For empty enums there is an early exit. Just describe it as an empty struct with the
|
||||
// appropriate type name
|
||||
@ -1903,7 +1903,7 @@ fn boxed_type_metadata(cx: &CrateContext,
|
||||
];
|
||||
|
||||
let loc = span_start(cx, span);
|
||||
let file_metadata = file_metadata(cx, loc.file.name);
|
||||
let file_metadata = file_metadata(cx, loc.file.name.as_slice());
|
||||
|
||||
return composite_type_metadata(
|
||||
cx,
|
||||
@ -2004,7 +2004,7 @@ fn vec_metadata(cx: &CrateContext,
|
||||
assert!(member_descriptions.len() == member_llvm_types.len());
|
||||
|
||||
let loc = span_start(cx, span);
|
||||
let file_metadata = file_metadata(cx, loc.file.name);
|
||||
let file_metadata = file_metadata(cx, loc.file.name.as_slice());
|
||||
|
||||
composite_type_metadata(
|
||||
cx,
|
||||
@ -2055,7 +2055,7 @@ fn vec_slice_metadata(cx: &CrateContext,
|
||||
assert!(member_descriptions.len() == member_llvm_types.len());
|
||||
|
||||
let loc = span_start(cx, span);
|
||||
let file_metadata = file_metadata(cx, loc.file.name);
|
||||
let file_metadata = file_metadata(cx, loc.file.name.as_slice());
|
||||
|
||||
return composite_type_metadata(
|
||||
cx,
|
||||
@ -2081,7 +2081,7 @@ fn subroutine_type_metadata(cx: &CrateContext,
|
||||
span: Span)
|
||||
-> DICompositeType {
|
||||
let loc = span_start(cx, span);
|
||||
let file_metadata = file_metadata(cx, loc.file.name);
|
||||
let file_metadata = file_metadata(cx, loc.file.name.as_slice());
|
||||
|
||||
let mut signature_metadata: Vec<DIType> =
|
||||
Vec::with_capacity(signature.inputs.len() + 1);
|
||||
@ -2126,7 +2126,7 @@ fn trait_metadata(cx: &CrateContext,
|
||||
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id);
|
||||
|
||||
let file_name = span_start(cx, definition_span).file.name.clone();
|
||||
let file_metadata = file_metadata(cx, file_name);
|
||||
let file_metadata = file_metadata(cx, file_name.as_slice());
|
||||
|
||||
let trait_llvm_type = type_of::type_of(cx, trait_type);
|
||||
|
||||
@ -2420,7 +2420,7 @@ fn populate_scope_map(cx: &CrateContext,
|
||||
&mut HashMap<ast::NodeId, DIScope>|) {
|
||||
// Create a new lexical scope and push it onto the stack
|
||||
let loc = cx.sess().codemap().lookup_char_pos(scope_span.lo);
|
||||
let file_metadata = file_metadata(cx, loc.file.name);
|
||||
let file_metadata = file_metadata(cx, loc.file.name.as_slice());
|
||||
let parent_scope = scope_stack.last().unwrap().scope_metadata;
|
||||
|
||||
let scope_metadata = unsafe {
|
||||
@ -2538,7 +2538,10 @@ fn populate_scope_map(cx: &CrateContext,
|
||||
if need_new_scope {
|
||||
// Create a new lexical scope and push it onto the stack
|
||||
let loc = cx.sess().codemap().lookup_char_pos(pat.span.lo);
|
||||
let file_metadata = file_metadata(cx, loc.file.name);
|
||||
let file_metadata = file_metadata(cx,
|
||||
loc.file
|
||||
.name
|
||||
.as_slice());
|
||||
let parent_scope = scope_stack.last().unwrap().scope_metadata;
|
||||
|
||||
let scope_metadata = unsafe {
|
||||
@ -2860,7 +2863,10 @@ fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc<NamespaceTree
|
||||
ty::with_path(cx.tcx(), def_id, |path| {
|
||||
// prepend crate name if not already present
|
||||
let krate = if def_id.krate == ast::LOCAL_CRATE {
|
||||
let crate_namespace_ident = token::str_to_ident(cx.link_meta.crateid.name);
|
||||
let crate_namespace_ident = token::str_to_ident(cx.link_meta
|
||||
.crateid
|
||||
.name
|
||||
.as_slice());
|
||||
Some(ast_map::PathMod(crate_namespace_ident.name))
|
||||
} else {
|
||||
None
|
||||
|
@ -109,9 +109,11 @@ pub fn monomorphic_fn(ccx: &CrateContext,
|
||||
let map_node = session::expect(
|
||||
ccx.sess(),
|
||||
ccx.tcx.map.find(fn_id.node),
|
||||
|| format!("while monomorphizing {:?}, couldn't find it in the \
|
||||
item map (may have attempted to monomorphize an item \
|
||||
defined in a different crate?)", fn_id));
|
||||
|| {
|
||||
(format!("while monomorphizing {:?}, couldn't find it in the \
|
||||
item map (may have attempted to monomorphize an item \
|
||||
defined in a different crate?)", fn_id)).to_strbuf()
|
||||
});
|
||||
|
||||
match map_node {
|
||||
ast_map::NodeForeignItem(_) => {
|
||||
|
@ -3709,7 +3709,7 @@ pub fn substd_enum_variants(cx: &ctxt,
|
||||
}
|
||||
|
||||
pub fn item_path_str(cx: &ctxt, id: ast::DefId) -> ~str {
|
||||
with_path(cx, id, |path| ast_map::path_to_str(path))
|
||||
with_path(cx, id, |path| ast_map::path_to_str(path)).to_owned()
|
||||
}
|
||||
|
||||
pub enum DtorKind {
|
||||
|
@ -341,9 +341,9 @@ pub fn ty_to_str(cx: &ctxt, typ: t) -> ~str {
|
||||
ty_bot => "!".to_owned(),
|
||||
ty_bool => "bool".to_owned(),
|
||||
ty_char => "char".to_owned(),
|
||||
ty_int(t) => ast_util::int_ty_to_str(t, None),
|
||||
ty_uint(t) => ast_util::uint_ty_to_str(t, None),
|
||||
ty_float(t) => ast_util::float_ty_to_str(t),
|
||||
ty_int(t) => ast_util::int_ty_to_str(t, None).to_owned(),
|
||||
ty_uint(t) => ast_util::uint_ty_to_str(t, None).to_owned(),
|
||||
ty_float(t) => ast_util::float_ty_to_str(t).to_owned(),
|
||||
ty_box(typ) => "@".to_owned() + ty_to_str(cx, typ),
|
||||
ty_uniq(typ) => "~".to_owned() + ty_to_str(cx, typ),
|
||||
ty_ptr(ref tm) => "*".to_owned() + mt_to_str(cx, tm),
|
||||
@ -870,7 +870,7 @@ impl Repr for ty::BuiltinBounds {
|
||||
|
||||
impl Repr for Span {
|
||||
fn repr(&self, tcx: &ctxt) -> ~str {
|
||||
tcx.sess.codemap().span_to_str(*self)
|
||||
tcx.sess.codemap().span_to_str(*self).to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -92,7 +92,7 @@ impl<'a> Clean<Crate> for visit_ast::RustdocVisitor<'a> {
|
||||
let id = link::find_crate_id(self.attrs.as_slice(),
|
||||
t_outputs.out_filestem);
|
||||
Crate {
|
||||
name: id.name,
|
||||
name: id.name.to_owned(),
|
||||
module: Some(self.module.clean()),
|
||||
externs: externs,
|
||||
}
|
||||
@ -1239,7 +1239,7 @@ impl ToSource for syntax::codemap::Span {
|
||||
let ctxt = super::ctxtkey.get().unwrap();
|
||||
let cm = ctxt.sess().codemap().clone();
|
||||
let sn = match cm.span_to_snippet(*self) {
|
||||
Some(x) => x,
|
||||
Some(x) => x.to_owned(),
|
||||
None => "".to_owned()
|
||||
};
|
||||
debug!("got snippet {}", sn);
|
||||
|
@ -27,7 +27,9 @@ use t = syntax::parse::token;
|
||||
/// Highlights some source code, returning the HTML output.
|
||||
pub fn highlight(src: &str, class: Option<&str>) -> ~str {
|
||||
let sess = parse::new_parse_sess();
|
||||
let fm = parse::string_to_filemap(&sess, src.to_owned(), "<stdin>".to_owned());
|
||||
let fm = parse::string_to_filemap(&sess,
|
||||
src.to_strbuf(),
|
||||
"<stdin>".to_strbuf());
|
||||
|
||||
let mut out = io::MemWriter::new();
|
||||
doit(&sess,
|
||||
@ -70,11 +72,11 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader, class: Option<&
|
||||
hi: test,
|
||||
expn_info: None,
|
||||
}).unwrap();
|
||||
if snip.contains("/") {
|
||||
if snip.as_slice().contains("/") {
|
||||
try!(write!(out, "<span class='comment'>{}</span>",
|
||||
Escape(snip)));
|
||||
Escape(snip.as_slice())));
|
||||
} else {
|
||||
try!(write!(out, "{}", Escape(snip)));
|
||||
try!(write!(out, "{}", Escape(snip.as_slice())));
|
||||
}
|
||||
}
|
||||
last = next.sp.hi;
|
||||
@ -171,10 +173,10 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader, class: Option<&
|
||||
// stringifying this token
|
||||
let snip = sess.span_diagnostic.cm.span_to_snippet(next.sp).unwrap();
|
||||
if klass == "" {
|
||||
try!(write!(out, "{}", Escape(snip)));
|
||||
try!(write!(out, "{}", Escape(snip.as_slice())));
|
||||
} else {
|
||||
try!(write!(out, "<span class='{}'>{}</span>", klass,
|
||||
Escape(snip)));
|
||||
Escape(snip.as_slice())));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -79,7 +79,7 @@ impl<'a, T: Copy> Iterator<T> for Values<'a, T> {
|
||||
/// The type of the iterator used by with_path.
|
||||
pub type PathElems<'a, 'b> = iter::Chain<Values<'a, PathElem>, LinkedPath<'b>>;
|
||||
|
||||
pub fn path_to_str<PI: Iterator<PathElem>>(mut path: PI) -> ~str {
|
||||
pub fn path_to_str<PI: Iterator<PathElem>>(mut path: PI) -> StrBuf {
|
||||
let itr = token::get_ident_interner();
|
||||
|
||||
path.fold(StrBuf::new(), |mut s, e| {
|
||||
@ -89,7 +89,7 @@ pub fn path_to_str<PI: Iterator<PathElem>>(mut path: PI) -> ~str {
|
||||
}
|
||||
s.push_str(e.as_slice());
|
||||
s
|
||||
}).into_owned()
|
||||
}).to_strbuf()
|
||||
}
|
||||
|
||||
#[deriving(Clone)]
|
||||
@ -322,11 +322,11 @@ impl Map {
|
||||
self.with_path_next(id, None, f)
|
||||
}
|
||||
|
||||
pub fn path_to_str(&self, id: NodeId) -> ~str {
|
||||
pub fn path_to_str(&self, id: NodeId) -> StrBuf {
|
||||
self.with_path(id, |path| path_to_str(path))
|
||||
}
|
||||
|
||||
fn path_to_str_with_ident(&self, id: NodeId, i: Ident) -> ~str {
|
||||
fn path_to_str_with_ident(&self, id: NodeId, i: Ident) -> StrBuf {
|
||||
self.with_path(id, |path| {
|
||||
path_to_str(path.chain(Some(PathName(i.name)).move_iter()))
|
||||
})
|
||||
@ -405,7 +405,7 @@ impl Map {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn node_to_str(&self, id: NodeId) -> ~str {
|
||||
pub fn node_to_str(&self, id: NodeId) -> StrBuf {
|
||||
node_id_to_str(self, id)
|
||||
}
|
||||
}
|
||||
@ -650,7 +650,7 @@ pub fn map_decoded_item<F: FoldOps>(map: &Map,
|
||||
ii
|
||||
}
|
||||
|
||||
fn node_id_to_str(map: &Map, id: NodeId) -> ~str {
|
||||
fn node_id_to_str(map: &Map, id: NodeId) -> StrBuf {
|
||||
match map.find(id) {
|
||||
Some(NodeItem(item)) => {
|
||||
let path_str = map.path_to_str_with_ident(id, item.ident);
|
||||
@ -666,51 +666,58 @@ fn node_id_to_str(map: &Map, id: NodeId) -> ~str {
|
||||
ItemImpl(..) => "impl",
|
||||
ItemMac(..) => "macro"
|
||||
};
|
||||
format!("{} {} (id={})", item_str, path_str, id)
|
||||
(format!("{} {} (id={})", item_str, path_str, id)).to_strbuf()
|
||||
}
|
||||
Some(NodeForeignItem(item)) => {
|
||||
let path_str = map.path_to_str_with_ident(id, item.ident);
|
||||
format!("foreign item {} (id={})", path_str, id)
|
||||
(format!("foreign item {} (id={})", path_str, id)).to_strbuf()
|
||||
}
|
||||
Some(NodeMethod(m)) => {
|
||||
format!("method {} in {} (id={})",
|
||||
(format!("method {} in {} (id={})",
|
||||
token::get_ident(m.ident),
|
||||
map.path_to_str(id), id)
|
||||
map.path_to_str(id), id)).to_strbuf()
|
||||
}
|
||||
Some(NodeTraitMethod(ref tm)) => {
|
||||
let m = ast_util::trait_method_to_ty_method(&**tm);
|
||||
format!("method {} in {} (id={})",
|
||||
(format!("method {} in {} (id={})",
|
||||
token::get_ident(m.ident),
|
||||
map.path_to_str(id), id)
|
||||
map.path_to_str(id), id)).to_strbuf()
|
||||
}
|
||||
Some(NodeVariant(ref variant)) => {
|
||||
format!("variant {} in {} (id={})",
|
||||
(format!("variant {} in {} (id={})",
|
||||
token::get_ident(variant.node.name),
|
||||
map.path_to_str(id), id)
|
||||
map.path_to_str(id), id)).to_strbuf()
|
||||
}
|
||||
Some(NodeExpr(expr)) => {
|
||||
format!("expr {} (id={})", pprust::expr_to_str(expr), id)
|
||||
(format!("expr {} (id={})",
|
||||
pprust::expr_to_str(expr), id)).to_strbuf()
|
||||
}
|
||||
Some(NodeStmt(stmt)) => {
|
||||
format!("stmt {} (id={})", pprust::stmt_to_str(stmt), id)
|
||||
(format!("stmt {} (id={})",
|
||||
pprust::stmt_to_str(stmt), id)).to_strbuf()
|
||||
}
|
||||
Some(NodeArg(pat)) => {
|
||||
format!("arg {} (id={})", pprust::pat_to_str(pat), id)
|
||||
(format!("arg {} (id={})",
|
||||
pprust::pat_to_str(pat), id)).to_strbuf()
|
||||
}
|
||||
Some(NodeLocal(pat)) => {
|
||||
format!("local {} (id={})", pprust::pat_to_str(pat), id)
|
||||
(format!("local {} (id={})",
|
||||
pprust::pat_to_str(pat), id)).to_strbuf()
|
||||
}
|
||||
Some(NodeBlock(block)) => {
|
||||
format!("block {} (id={})", pprust::block_to_str(block), id)
|
||||
(format!("block {} (id={})",
|
||||
pprust::block_to_str(block), id)).to_strbuf()
|
||||
}
|
||||
Some(NodeStructCtor(_)) => {
|
||||
format!("struct_ctor {} (id={})", map.path_to_str(id), id)
|
||||
(format!("struct_ctor {} (id={})",
|
||||
map.path_to_str(id), id)).to_strbuf()
|
||||
}
|
||||
Some(NodeLifetime(ref l)) => {
|
||||
format!("lifetime {} (id={})", pprust::lifetime_to_str(*l), id)
|
||||
(format!("lifetime {} (id={})",
|
||||
pprust::lifetime_to_str(*l), id)).to_strbuf()
|
||||
}
|
||||
None => {
|
||||
format!("unknown node (id={})", id)
|
||||
(format!("unknown node (id={})", id)).to_strbuf()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -24,11 +24,11 @@ use std::cmp;
|
||||
use std::strbuf::StrBuf;
|
||||
use std::u32;
|
||||
|
||||
pub fn path_name_i(idents: &[Ident]) -> ~str {
|
||||
pub fn path_name_i(idents: &[Ident]) -> StrBuf {
|
||||
// FIXME: Bad copies (#2543 -- same for everything else that says "bad")
|
||||
idents.iter().map(|i| {
|
||||
token::get_ident(*i).get().to_str()
|
||||
}).collect::<Vec<~str>>().connect("::")
|
||||
token::get_ident(*i).get().to_strbuf()
|
||||
}).collect::<Vec<StrBuf>>().connect("::").to_strbuf()
|
||||
}
|
||||
|
||||
// totally scary function: ignores all but the last element, should have
|
||||
@ -134,7 +134,7 @@ pub fn is_path(e: @Expr) -> bool {
|
||||
|
||||
// Get a string representation of a signed int type, with its value.
|
||||
// We want to avoid "45int" and "-3int" in favor of "45" and "-3"
|
||||
pub fn int_ty_to_str(t: IntTy, val: Option<i64>) -> ~str {
|
||||
pub fn int_ty_to_str(t: IntTy, val: Option<i64>) -> StrBuf {
|
||||
let s = match t {
|
||||
TyI if val.is_some() => "",
|
||||
TyI => "int",
|
||||
@ -145,8 +145,8 @@ pub fn int_ty_to_str(t: IntTy, val: Option<i64>) -> ~str {
|
||||
};
|
||||
|
||||
match val {
|
||||
Some(n) => format!("{}{}", n, s),
|
||||
None => s.to_owned()
|
||||
Some(n) => format!("{}{}", n, s).to_strbuf(),
|
||||
None => s.to_strbuf()
|
||||
}
|
||||
}
|
||||
|
||||
@ -161,7 +161,7 @@ pub fn int_ty_max(t: IntTy) -> u64 {
|
||||
|
||||
// Get a string representation of an unsigned int type, with its value.
|
||||
// We want to avoid "42uint" in favor of "42u"
|
||||
pub fn uint_ty_to_str(t: UintTy, val: Option<u64>) -> ~str {
|
||||
pub fn uint_ty_to_str(t: UintTy, val: Option<u64>) -> StrBuf {
|
||||
let s = match t {
|
||||
TyU if val.is_some() => "u",
|
||||
TyU => "uint",
|
||||
@ -172,8 +172,8 @@ pub fn uint_ty_to_str(t: UintTy, val: Option<u64>) -> ~str {
|
||||
};
|
||||
|
||||
match val {
|
||||
Some(n) => format!("{}{}", n, s),
|
||||
None => s.to_owned()
|
||||
Some(n) => format!("{}{}", n, s).to_strbuf(),
|
||||
None => s.to_strbuf()
|
||||
}
|
||||
}
|
||||
|
||||
@ -186,8 +186,12 @@ pub fn uint_ty_max(t: UintTy) -> u64 {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn float_ty_to_str(t: FloatTy) -> ~str {
|
||||
match t { TyF32 => "f32".to_owned(), TyF64 => "f64".to_owned(), TyF128 => "f128".to_owned() }
|
||||
pub fn float_ty_to_str(t: FloatTy) -> StrBuf {
|
||||
match t {
|
||||
TyF32 => "f32".to_strbuf(),
|
||||
TyF64 => "f64".to_strbuf(),
|
||||
TyF128 => "f128".to_strbuf(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_call_expr(e: @Expr) -> bool {
|
||||
@ -252,11 +256,11 @@ pub fn unguarded_pat(a: &Arm) -> Option<Vec<@Pat> > {
|
||||
/// listed as `__extensions__::method_name::hash`, with no indication
|
||||
/// of the type).
|
||||
pub fn impl_pretty_name(trait_ref: &Option<TraitRef>, ty: &Ty) -> Ident {
|
||||
let mut pretty = StrBuf::from_owned_str(pprust::ty_to_str(ty));
|
||||
let mut pretty = pprust::ty_to_str(ty);
|
||||
match *trait_ref {
|
||||
Some(ref trait_ref) => {
|
||||
pretty.push_char('.');
|
||||
pretty.push_str(pprust::path_to_str(&trait_ref.path));
|
||||
pretty.push_str(pprust::path_to_str(&trait_ref.path).as_slice());
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
@ -125,7 +125,7 @@ impl AttributeMethods for Attribute {
|
||||
let meta = mk_name_value_item_str(
|
||||
InternedString::new("doc"),
|
||||
token::intern_and_get_ident(strip_doc_comment_decoration(
|
||||
comment.get())));
|
||||
comment.get()).as_slice()));
|
||||
mk_attr(meta)
|
||||
} else {
|
||||
*self
|
||||
|
@ -189,7 +189,7 @@ pub enum MacroFormat {
|
||||
pub struct NameAndSpan {
|
||||
/// The name of the macro that was invoked to create the thing
|
||||
/// with this Span.
|
||||
pub name: ~str,
|
||||
pub name: StrBuf,
|
||||
/// The format with which the macro was invoked.
|
||||
pub format: MacroFormat,
|
||||
/// The span of the macro definition itself. The macro may not
|
||||
@ -220,7 +220,7 @@ pub struct ExpnInfo {
|
||||
pub callee: NameAndSpan
|
||||
}
|
||||
|
||||
pub type FileName = ~str;
|
||||
pub type FileName = StrBuf;
|
||||
|
||||
pub struct FileLines {
|
||||
pub file: Rc<FileMap>,
|
||||
@ -242,7 +242,7 @@ pub struct FileMap {
|
||||
/// e.g. `<anon>`
|
||||
pub name: FileName,
|
||||
/// The complete source code
|
||||
pub src: ~str,
|
||||
pub src: StrBuf,
|
||||
/// The start position of this source in the CodeMap
|
||||
pub start_pos: BytePos,
|
||||
/// Locations of lines beginnings in the source code
|
||||
@ -270,14 +270,14 @@ impl FileMap {
|
||||
}
|
||||
|
||||
// get a line from the list of pre-computed line-beginnings
|
||||
pub fn get_line(&self, line: int) -> ~str {
|
||||
pub fn get_line(&self, line: int) -> StrBuf {
|
||||
let mut lines = self.lines.borrow_mut();
|
||||
let begin: BytePos = *lines.get(line as uint) - self.start_pos;
|
||||
let begin = begin.to_uint();
|
||||
let slice = self.src.slice_from(begin);
|
||||
let slice = self.src.as_slice().slice_from(begin);
|
||||
match slice.find('\n') {
|
||||
Some(e) => slice.slice_to(e).to_owned(),
|
||||
None => slice.to_owned()
|
||||
Some(e) => slice.slice_to(e).to_strbuf(),
|
||||
None => slice.to_strbuf()
|
||||
}
|
||||
}
|
||||
|
||||
@ -291,7 +291,8 @@ impl FileMap {
|
||||
}
|
||||
|
||||
pub fn is_real_file(&self) -> bool {
|
||||
!(self.name.starts_with("<") && self.name.ends_with(">"))
|
||||
!(self.name.as_slice().starts_with("<") &&
|
||||
self.name.as_slice().ends_with(">"))
|
||||
}
|
||||
}
|
||||
|
||||
@ -306,7 +307,7 @@ impl CodeMap {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_filemap(&self, filename: FileName, src: ~str) -> Rc<FileMap> {
|
||||
pub fn new_filemap(&self, filename: FileName, src: StrBuf) -> Rc<FileMap> {
|
||||
let mut files = self.files.borrow_mut();
|
||||
let start_pos = match files.last() {
|
||||
None => 0,
|
||||
@ -316,10 +317,10 @@ impl CodeMap {
|
||||
// Remove utf-8 BOM if any.
|
||||
// FIXME #12884: no efficient/safe way to remove from the start of a string
|
||||
// and reuse the allocation.
|
||||
let mut src = if src.starts_with("\ufeff") {
|
||||
let mut src = if src.as_slice().starts_with("\ufeff") {
|
||||
StrBuf::from_str(src.as_slice().slice_from(3))
|
||||
} else {
|
||||
StrBuf::from_owned_str(src)
|
||||
StrBuf::from_str(src.as_slice())
|
||||
};
|
||||
|
||||
// Append '\n' in case it's not already there.
|
||||
@ -332,7 +333,7 @@ impl CodeMap {
|
||||
|
||||
let filemap = Rc::new(FileMap {
|
||||
name: filename,
|
||||
src: src.into_owned(),
|
||||
src: src.to_strbuf(),
|
||||
start_pos: Pos::from_uint(start_pos),
|
||||
lines: RefCell::new(Vec::new()),
|
||||
multibyte_chars: RefCell::new(Vec::new()),
|
||||
@ -343,9 +344,12 @@ impl CodeMap {
|
||||
filemap
|
||||
}
|
||||
|
||||
pub fn mk_substr_filename(&self, sp: Span) -> ~str {
|
||||
pub fn mk_substr_filename(&self, sp: Span) -> StrBuf {
|
||||
let pos = self.lookup_char_pos(sp.lo);
|
||||
format!("<{}:{}:{}>", pos.file.name, pos.line, pos.col.to_uint() + 1)
|
||||
(format!("<{}:{}:{}>",
|
||||
pos.file.name,
|
||||
pos.line,
|
||||
pos.col.to_uint() + 1)).to_strbuf()
|
||||
}
|
||||
|
||||
/// Lookup source information about a BytePos
|
||||
@ -356,26 +360,30 @@ impl CodeMap {
|
||||
pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt {
|
||||
let loc = self.lookup_char_pos(pos);
|
||||
LocWithOpt {
|
||||
filename: loc.file.name.to_str(),
|
||||
filename: loc.file.name.to_strbuf(),
|
||||
line: loc.line,
|
||||
col: loc.col,
|
||||
file: Some(loc.file)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span_to_str(&self, sp: Span) -> ~str {
|
||||
pub fn span_to_str(&self, sp: Span) -> StrBuf {
|
||||
if self.files.borrow().len() == 0 && sp == DUMMY_SP {
|
||||
return "no-location".to_owned();
|
||||
return "no-location".to_strbuf();
|
||||
}
|
||||
|
||||
let lo = self.lookup_char_pos_adj(sp.lo);
|
||||
let hi = self.lookup_char_pos_adj(sp.hi);
|
||||
return format!("{}:{}:{}: {}:{}", lo.filename,
|
||||
lo.line, lo.col.to_uint() + 1, hi.line, hi.col.to_uint() + 1)
|
||||
return (format!("{}:{}:{}: {}:{}",
|
||||
lo.filename,
|
||||
lo.line,
|
||||
lo.col.to_uint() + 1,
|
||||
hi.line,
|
||||
hi.col.to_uint() + 1)).to_strbuf()
|
||||
}
|
||||
|
||||
pub fn span_to_filename(&self, sp: Span) -> FileName {
|
||||
self.lookup_char_pos(sp.lo).file.name.to_str()
|
||||
self.lookup_char_pos(sp.lo).file.name.to_strbuf()
|
||||
}
|
||||
|
||||
pub fn span_to_lines(&self, sp: Span) -> FileLines {
|
||||
@ -388,7 +396,7 @@ impl CodeMap {
|
||||
FileLines {file: lo.file, lines: lines}
|
||||
}
|
||||
|
||||
pub fn span_to_snippet(&self, sp: Span) -> Option<~str> {
|
||||
pub fn span_to_snippet(&self, sp: Span) -> Option<StrBuf> {
|
||||
let begin = self.lookup_byte_offset(sp.lo);
|
||||
let end = self.lookup_byte_offset(sp.hi);
|
||||
|
||||
@ -399,13 +407,14 @@ impl CodeMap {
|
||||
if begin.fm.start_pos != end.fm.start_pos {
|
||||
None
|
||||
} else {
|
||||
Some(begin.fm.src.slice( begin.pos.to_uint(), end.pos.to_uint()).to_owned())
|
||||
Some(begin.fm.src.as_slice().slice(begin.pos.to_uint(),
|
||||
end.pos.to_uint()).to_strbuf())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_filemap(&self, filename: &str) -> Rc<FileMap> {
|
||||
for fm in self.files.borrow().iter() {
|
||||
if filename == fm.name {
|
||||
if filename == fm.name.as_slice() {
|
||||
return fm.clone();
|
||||
}
|
||||
}
|
||||
@ -526,19 +535,21 @@ mod test {
|
||||
#[test]
|
||||
fn t1 () {
|
||||
let cm = CodeMap::new();
|
||||
let fm = cm.new_filemap("blork.rs".to_owned(),"first line.\nsecond line".to_owned());
|
||||
let fm = cm.new_filemap("blork.rs".to_strbuf(),
|
||||
"first line.\nsecond line".to_strbuf());
|
||||
fm.next_line(BytePos(0));
|
||||
assert_eq!(&fm.get_line(0),&"first line.".to_owned());
|
||||
assert_eq!(&fm.get_line(0),&"first line.".to_strbuf());
|
||||
// TESTING BROKEN BEHAVIOR:
|
||||
fm.next_line(BytePos(10));
|
||||
assert_eq!(&fm.get_line(1),&".".to_owned());
|
||||
assert_eq!(&fm.get_line(1), &".".to_strbuf());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_fail]
|
||||
fn t2 () {
|
||||
let cm = CodeMap::new();
|
||||
let fm = cm.new_filemap("blork.rs".to_owned(),"first line.\nsecond line".to_owned());
|
||||
let fm = cm.new_filemap("blork.rs".to_strbuf(),
|
||||
"first line.\nsecond line".to_strbuf());
|
||||
// TESTING *REALLY* BROKEN BEHAVIOR:
|
||||
fm.next_line(BytePos(0));
|
||||
fm.next_line(BytePos(10));
|
||||
@ -547,9 +558,12 @@ mod test {
|
||||
|
||||
fn init_code_map() -> CodeMap {
|
||||
let cm = CodeMap::new();
|
||||
let fm1 = cm.new_filemap("blork.rs".to_owned(),"first line.\nsecond line".to_owned());
|
||||
let fm2 = cm.new_filemap("empty.rs".to_owned(),"".to_owned());
|
||||
let fm3 = cm.new_filemap("blork2.rs".to_owned(),"first line.\nsecond line".to_owned());
|
||||
let fm1 = cm.new_filemap("blork.rs".to_strbuf(),
|
||||
"first line.\nsecond line".to_strbuf());
|
||||
let fm2 = cm.new_filemap("empty.rs".to_strbuf(),
|
||||
"".to_strbuf());
|
||||
let fm3 = cm.new_filemap("blork2.rs".to_strbuf(),
|
||||
"first line.\nsecond line".to_strbuf());
|
||||
|
||||
fm1.next_line(BytePos(0));
|
||||
fm1.next_line(BytePos(12));
|
||||
@ -566,11 +580,11 @@ mod test {
|
||||
let cm = init_code_map();
|
||||
|
||||
let fmabp1 = cm.lookup_byte_offset(BytePos(22));
|
||||
assert_eq!(fmabp1.fm.name, "blork.rs".to_owned());
|
||||
assert_eq!(fmabp1.fm.name, "blork.rs".to_strbuf());
|
||||
assert_eq!(fmabp1.pos, BytePos(22));
|
||||
|
||||
let fmabp2 = cm.lookup_byte_offset(BytePos(24));
|
||||
assert_eq!(fmabp2.fm.name, "blork2.rs".to_owned());
|
||||
assert_eq!(fmabp2.fm.name, "blork2.rs".to_strbuf());
|
||||
assert_eq!(fmabp2.pos, BytePos(0));
|
||||
}
|
||||
|
||||
@ -592,12 +606,12 @@ mod test {
|
||||
let cm = init_code_map();
|
||||
|
||||
let loc1 = cm.lookup_char_pos(BytePos(22));
|
||||
assert_eq!(loc1.file.name, "blork.rs".to_owned());
|
||||
assert_eq!(loc1.file.name, "blork.rs".to_strbuf());
|
||||
assert_eq!(loc1.line, 2);
|
||||
assert_eq!(loc1.col, CharPos(10));
|
||||
|
||||
let loc2 = cm.lookup_char_pos(BytePos(24));
|
||||
assert_eq!(loc2.file.name, "blork2.rs".to_owned());
|
||||
assert_eq!(loc2.file.name, "blork2.rs".to_strbuf());
|
||||
assert_eq!(loc2.line, 1);
|
||||
assert_eq!(loc2.col, CharPos(0));
|
||||
}
|
||||
@ -605,8 +619,11 @@ mod test {
|
||||
fn init_code_map_mbc() -> CodeMap {
|
||||
let cm = CodeMap::new();
|
||||
// € is a three byte utf8 char.
|
||||
let fm1 = cm.new_filemap("blork.rs".to_owned(),"fir€st €€€€ line.\nsecond line".to_owned());
|
||||
let fm2 = cm.new_filemap("blork2.rs".to_owned(),"first line€€.\n€ second line".to_owned());
|
||||
let fm1 =
|
||||
cm.new_filemap("blork.rs".to_strbuf(),
|
||||
"fir€st €€€€ line.\nsecond line".to_strbuf());
|
||||
let fm2 = cm.new_filemap("blork2.rs".to_strbuf(),
|
||||
"first line€€.\n€ second line".to_strbuf());
|
||||
|
||||
fm1.next_line(BytePos(0));
|
||||
fm1.next_line(BytePos(22));
|
||||
@ -650,7 +667,7 @@ mod test {
|
||||
let span = Span {lo: BytePos(12), hi: BytePos(23), expn_info: None};
|
||||
let file_lines = cm.span_to_lines(span);
|
||||
|
||||
assert_eq!(file_lines.file.name, "blork.rs".to_owned());
|
||||
assert_eq!(file_lines.file.name, "blork.rs".to_strbuf());
|
||||
assert_eq!(file_lines.lines.len(), 1);
|
||||
assert_eq!(*file_lines.lines.get(0), 1u);
|
||||
}
|
||||
@ -662,7 +679,7 @@ mod test {
|
||||
let span = Span {lo: BytePos(12), hi: BytePos(23), expn_info: None};
|
||||
let snippet = cm.span_to_snippet(span);
|
||||
|
||||
assert_eq!(snippet, Some("second line".to_owned()));
|
||||
assert_eq!(snippet, Some("second line".to_strbuf()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -672,6 +689,6 @@ mod test {
|
||||
let span = Span {lo: BytePos(12), hi: BytePos(23), expn_info: None};
|
||||
let sstr = cm.span_to_str(span);
|
||||
|
||||
assert_eq!(sstr, "blork.rs:2:1: 2:12".to_owned());
|
||||
assert_eq!(sstr, "blork.rs:2:1: 2:12".to_strbuf());
|
||||
}
|
||||
}
|
||||
|
@ -24,11 +24,11 @@ use std::from_str::FromStr;
|
||||
pub struct CrateId {
|
||||
/// A path which represents the codes origin. By convention this is the
|
||||
/// URL, without `http://` or `https://` prefix, to the crate's repository
|
||||
pub path: ~str,
|
||||
pub path: StrBuf,
|
||||
/// The name of the crate.
|
||||
pub name: ~str,
|
||||
pub name: StrBuf,
|
||||
/// The version of the crate.
|
||||
pub version: Option<~str>,
|
||||
pub version: Option<StrBuf>,
|
||||
}
|
||||
|
||||
impl fmt::Show for CrateId {
|
||||
@ -38,7 +38,8 @@ impl fmt::Show for CrateId {
|
||||
None => "0.0",
|
||||
Some(ref version) => version.as_slice(),
|
||||
};
|
||||
if self.path == self.name || self.path.ends_with(format!("/{}", self.name)) {
|
||||
if self.path == self.name ||
|
||||
self.path.as_slice().ends_with(format!("/{}", self.name)) {
|
||||
write!(f.buf, "\\#{}", version)
|
||||
} else {
|
||||
write!(f.buf, "\\#{}:{}", self.name, version)
|
||||
@ -60,7 +61,7 @@ impl FromStr for CrateId {
|
||||
let inferred_name = *path_pieces.get(0);
|
||||
|
||||
let (name, version) = if pieces.len() == 1 {
|
||||
(inferred_name.to_owned(), None)
|
||||
(inferred_name.to_strbuf(), None)
|
||||
} else {
|
||||
let hash_pieces: Vec<&str> = pieces.get(1)
|
||||
.splitn(':', 1)
|
||||
@ -72,16 +73,16 @@ impl FromStr for CrateId {
|
||||
};
|
||||
|
||||
let name = if !hash_name.is_empty() {
|
||||
hash_name.to_owned()
|
||||
hash_name.to_strbuf()
|
||||
} else {
|
||||
inferred_name.to_owned()
|
||||
inferred_name.to_strbuf()
|
||||
};
|
||||
|
||||
let version = if !hash_version.is_empty() {
|
||||
if hash_version == "0.0" {
|
||||
None
|
||||
} else {
|
||||
Some(hash_version.to_owned())
|
||||
Some(hash_version.to_strbuf())
|
||||
}
|
||||
} else {
|
||||
None
|
||||
@ -91,7 +92,7 @@ impl FromStr for CrateId {
|
||||
};
|
||||
|
||||
Some(CrateId {
|
||||
path: path.clone(),
|
||||
path: path.to_strbuf(),
|
||||
name: name,
|
||||
version: version,
|
||||
})
|
||||
@ -106,8 +107,8 @@ impl CrateId {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn short_name_with_version(&self) -> ~str {
|
||||
format!("{}-{}", self.name, self.version_or_default())
|
||||
pub fn short_name_with_version(&self) -> StrBuf {
|
||||
(format!("{}-{}", self.name, self.version_or_default())).to_strbuf()
|
||||
}
|
||||
|
||||
pub fn matches(&self, other: &CrateId) -> bool {
|
||||
@ -123,17 +124,17 @@ impl CrateId {
|
||||
#[test]
|
||||
fn bare_name() {
|
||||
let crateid: CrateId = from_str("foo").expect("valid crateid");
|
||||
assert_eq!(crateid.name, "foo".to_owned());
|
||||
assert_eq!(crateid.name, "foo".to_strbuf());
|
||||
assert_eq!(crateid.version, None);
|
||||
assert_eq!(crateid.path, "foo".to_owned());
|
||||
assert_eq!(crateid.path, "foo".to_strbuf());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bare_name_single_char() {
|
||||
let crateid: CrateId = from_str("f").expect("valid crateid");
|
||||
assert_eq!(crateid.name, "f".to_owned());
|
||||
assert_eq!(crateid.name, "f".to_strbuf());
|
||||
assert_eq!(crateid.version, None);
|
||||
assert_eq!(crateid.path, "f".to_owned());
|
||||
assert_eq!(crateid.path, "f".to_strbuf());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -145,17 +146,17 @@ fn empty_crateid() {
|
||||
#[test]
|
||||
fn simple_path() {
|
||||
let crateid: CrateId = from_str("example.com/foo/bar").expect("valid crateid");
|
||||
assert_eq!(crateid.name, "bar".to_owned());
|
||||
assert_eq!(crateid.name, "bar".to_strbuf());
|
||||
assert_eq!(crateid.version, None);
|
||||
assert_eq!(crateid.path, "example.com/foo/bar".to_owned());
|
||||
assert_eq!(crateid.path, "example.com/foo/bar".to_strbuf());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_version() {
|
||||
let crateid: CrateId = from_str("foo#1.0").expect("valid crateid");
|
||||
assert_eq!(crateid.name, "foo".to_owned());
|
||||
assert_eq!(crateid.version, Some("1.0".to_owned()));
|
||||
assert_eq!(crateid.path, "foo".to_owned());
|
||||
assert_eq!(crateid.name, "foo".to_strbuf());
|
||||
assert_eq!(crateid.version, Some("1.0".to_strbuf()));
|
||||
assert_eq!(crateid.path, "foo".to_strbuf());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -173,39 +174,39 @@ fn path_ends_with_slash() {
|
||||
#[test]
|
||||
fn path_and_version() {
|
||||
let crateid: CrateId = from_str("example.com/foo/bar#1.0").expect("valid crateid");
|
||||
assert_eq!(crateid.name, "bar".to_owned());
|
||||
assert_eq!(crateid.version, Some("1.0".to_owned()));
|
||||
assert_eq!(crateid.path, "example.com/foo/bar".to_owned());
|
||||
assert_eq!(crateid.name, "bar".to_strbuf());
|
||||
assert_eq!(crateid.version, Some("1.0".to_strbuf()));
|
||||
assert_eq!(crateid.path, "example.com/foo/bar".to_strbuf());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn single_chars() {
|
||||
let crateid: CrateId = from_str("a/b#1").expect("valid crateid");
|
||||
assert_eq!(crateid.name, "b".to_owned());
|
||||
assert_eq!(crateid.version, Some("1".to_owned()));
|
||||
assert_eq!(crateid.path, "a/b".to_owned());
|
||||
assert_eq!(crateid.name, "b".to_strbuf());
|
||||
assert_eq!(crateid.version, Some("1".to_strbuf()));
|
||||
assert_eq!(crateid.path, "a/b".to_strbuf());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_version() {
|
||||
let crateid: CrateId = from_str("foo#").expect("valid crateid");
|
||||
assert_eq!(crateid.name, "foo".to_owned());
|
||||
assert_eq!(crateid.name, "foo".to_strbuf());
|
||||
assert_eq!(crateid.version, None);
|
||||
assert_eq!(crateid.path, "foo".to_owned());
|
||||
assert_eq!(crateid.path, "foo".to_strbuf());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn path_and_name() {
|
||||
let crateid: CrateId = from_str("foo/rust-bar#bar:1.0").expect("valid crateid");
|
||||
assert_eq!(crateid.name, "bar".to_owned());
|
||||
assert_eq!(crateid.version, Some("1.0".to_owned()));
|
||||
assert_eq!(crateid.path, "foo/rust-bar".to_owned());
|
||||
assert_eq!(crateid.name, "bar".to_strbuf());
|
||||
assert_eq!(crateid.version, Some("1.0".to_strbuf()));
|
||||
assert_eq!(crateid.path, "foo/rust-bar".to_strbuf());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_name() {
|
||||
let crateid: CrateId = from_str("foo/bar#:1.0").expect("valid crateid");
|
||||
assert_eq!(crateid.name, "bar".to_owned());
|
||||
assert_eq!(crateid.version, Some("1.0".to_owned()));
|
||||
assert_eq!(crateid.path, "foo/bar".to_owned());
|
||||
assert_eq!(crateid.name, "bar".to_strbuf());
|
||||
assert_eq!(crateid.version, Some("1.0".to_strbuf()));
|
||||
assert_eq!(crateid.path, "foo/bar".to_strbuf());
|
||||
}
|
||||
|
@ -320,12 +320,12 @@ fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan,
|
||||
// the span)
|
||||
let span_end = Span { lo: sp.hi, hi: sp.hi, expn_info: sp.expn_info};
|
||||
let ses = cm.span_to_str(span_end);
|
||||
try!(print_diagnostic(dst, ses, lvl, msg));
|
||||
try!(print_diagnostic(dst, ses.as_slice(), lvl, msg));
|
||||
if rsp.is_full_span() {
|
||||
try!(custom_highlight_lines(dst, cm, sp, lvl, lines));
|
||||
}
|
||||
} else {
|
||||
try!(print_diagnostic(dst, ss, lvl, msg));
|
||||
try!(print_diagnostic(dst, ss.as_slice(), lvl, msg));
|
||||
if rsp.is_full_span() {
|
||||
try!(highlight_lines(dst, cm, sp, lvl, lines));
|
||||
}
|
||||
@ -378,7 +378,7 @@ fn highlight_lines(err: &mut EmitterWriter,
|
||||
}
|
||||
let orig = fm.get_line(*lines.lines.get(0) as int);
|
||||
for pos in range(0u, left-skip) {
|
||||
let cur_char = orig[pos] as char;
|
||||
let cur_char = orig.as_slice()[pos] as char;
|
||||
// Whenever a tab occurs on the previous line, we insert one on
|
||||
// the error-point-squiggly-line as well (instead of a space).
|
||||
// That way the squiggly line will usually appear in the correct
|
||||
@ -452,24 +452,28 @@ fn print_macro_backtrace(w: &mut EmitterWriter,
|
||||
sp: Span)
|
||||
-> io::IoResult<()> {
|
||||
for ei in sp.expn_info.iter() {
|
||||
let ss = ei.callee.span.as_ref().map_or("".to_owned(), |span| cm.span_to_str(*span));
|
||||
let ss = ei.callee
|
||||
.span
|
||||
.as_ref()
|
||||
.map_or("".to_strbuf(), |span| cm.span_to_str(*span));
|
||||
let (pre, post) = match ei.callee.format {
|
||||
codemap::MacroAttribute => ("#[", "]"),
|
||||
codemap::MacroBang => ("", "!")
|
||||
};
|
||||
try!(print_diagnostic(w, ss, Note,
|
||||
try!(print_diagnostic(w, ss.as_slice(), Note,
|
||||
format!("in expansion of {}{}{}", pre,
|
||||
ei.callee.name, post)));
|
||||
let ss = cm.span_to_str(ei.call_site);
|
||||
try!(print_diagnostic(w, ss, Note, "expansion site"));
|
||||
try!(print_diagnostic(w, ss.as_slice(), Note, "expansion site"));
|
||||
try!(print_macro_backtrace(w, cm, ei.call_site));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn expect<T:Clone>(diag: &SpanHandler, opt: Option<T>, msg: || -> ~str) -> T {
|
||||
pub fn expect<T:Clone>(diag: &SpanHandler, opt: Option<T>, msg: || -> StrBuf)
|
||||
-> T {
|
||||
match opt {
|
||||
Some(ref t) => (*t).clone(),
|
||||
None => diag.handler().bug(msg()),
|
||||
None => diag.handler().bug(msg().as_slice()),
|
||||
}
|
||||
}
|
||||
|
@ -30,7 +30,7 @@ use collections::HashMap;
|
||||
// ast::MacInvocTT.
|
||||
|
||||
pub struct MacroDef {
|
||||
pub name: ~str,
|
||||
pub name: StrBuf,
|
||||
pub ext: SyntaxExtension
|
||||
}
|
||||
|
||||
@ -361,8 +361,8 @@ pub fn syntax_expander_table() -> SyntaxEnv {
|
||||
|
||||
pub struct MacroCrate {
|
||||
pub lib: Option<Path>,
|
||||
pub macros: Vec<~str>,
|
||||
pub registrar_symbol: Option<~str>,
|
||||
pub macros: Vec<StrBuf>,
|
||||
pub registrar_symbol: Option<StrBuf>,
|
||||
}
|
||||
|
||||
pub trait CrateLoader {
|
||||
@ -425,7 +425,7 @@ impl<'a> ExtCtxt<'a> {
|
||||
pub fn mod_pop(&mut self) { self.mod_path.pop().unwrap(); }
|
||||
pub fn mod_path(&self) -> Vec<ast::Ident> {
|
||||
let mut v = Vec::new();
|
||||
v.push(token::str_to_ident(self.ecfg.crate_id.name));
|
||||
v.push(token::str_to_ident(self.ecfg.crate_id.name.as_slice()));
|
||||
v.extend(self.mod_path.iter().map(|a| *a));
|
||||
return v;
|
||||
}
|
||||
@ -540,14 +540,14 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt,
|
||||
sp: Span,
|
||||
tts: &[ast::TokenTree],
|
||||
name: &str)
|
||||
-> Option<~str> {
|
||||
-> Option<StrBuf> {
|
||||
if tts.len() != 1 {
|
||||
cx.span_err(sp, format!("{} takes 1 argument.", name));
|
||||
} else {
|
||||
match tts[0] {
|
||||
ast::TTTok(_, token::LIT_STR(ident))
|
||||
| ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => {
|
||||
return Some(token::get_ident(ident).get().to_str())
|
||||
return Some(token::get_ident(ident).get().to_strbuf())
|
||||
}
|
||||
_ => cx.span_err(sp, format!("{} requires a string.", name)),
|
||||
}
|
||||
|
@ -639,7 +639,9 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
|
||||
vec!(
|
||||
self.expr_str(span, msg),
|
||||
self.expr_str(span,
|
||||
token::intern_and_get_ident(loc.file.name)),
|
||||
token::intern_and_get_ident(loc.file
|
||||
.name
|
||||
.as_slice())),
|
||||
self.expr_uint(span, loc.line)))
|
||||
}
|
||||
|
||||
|
@ -987,7 +987,7 @@ impl<'a> TraitDef<'a> {
|
||||
to_set.expn_info = Some(@codemap::ExpnInfo {
|
||||
call_site: to_set,
|
||||
callee: codemap::NameAndSpan {
|
||||
name: format!("deriving({})", trait_name),
|
||||
name: format!("deriving({})", trait_name).to_strbuf(),
|
||||
format: codemap::MacroAttribute,
|
||||
span: Some(self.span)
|
||||
}
|
||||
|
@ -30,7 +30,7 @@ pub fn expand_option_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
Some(v) => v
|
||||
};
|
||||
|
||||
let e = match os::getenv(var) {
|
||||
let e = match os::getenv(var.as_slice()) {
|
||||
None => {
|
||||
cx.expr_path(cx.path_all(sp,
|
||||
true,
|
||||
|
@ -71,7 +71,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr {
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: e.span,
|
||||
callee: NameAndSpan {
|
||||
name: extnamestr.get().to_str(),
|
||||
name: extnamestr.get().to_strbuf(),
|
||||
format: MacroBang,
|
||||
span: exp_span,
|
||||
},
|
||||
@ -270,7 +270,7 @@ pub fn expand_item(it: @ast::Item, fld: &mut MacroExpander)
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: attr.span,
|
||||
callee: NameAndSpan {
|
||||
name: mname.get().to_str(),
|
||||
name: mname.get().to_strbuf(),
|
||||
format: MacroAttribute,
|
||||
span: None
|
||||
}
|
||||
@ -334,7 +334,7 @@ fn expand_item_modifiers(mut it: @ast::Item, fld: &mut MacroExpander)
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: attr.span,
|
||||
callee: NameAndSpan {
|
||||
name: mname.get().to_str(),
|
||||
name: mname.get().to_strbuf(),
|
||||
format: MacroAttribute,
|
||||
span: None,
|
||||
}
|
||||
@ -393,7 +393,7 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander)
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: it.span,
|
||||
callee: NameAndSpan {
|
||||
name: extnamestr.get().to_str(),
|
||||
name: extnamestr.get().to_strbuf(),
|
||||
format: MacroBang,
|
||||
span: span
|
||||
}
|
||||
@ -412,7 +412,7 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander)
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: it.span,
|
||||
callee: NameAndSpan {
|
||||
name: extnamestr.get().to_str(),
|
||||
name: extnamestr.get().to_strbuf(),
|
||||
format: MacroBang,
|
||||
span: span
|
||||
}
|
||||
@ -433,7 +433,7 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander)
|
||||
Some(MacroDef { name, ext }) => {
|
||||
// yikes... no idea how to apply the mark to this. I'm afraid
|
||||
// we're going to have to wait-and-see on this one.
|
||||
fld.extsbox.insert(intern(name), ext);
|
||||
fld.extsbox.insert(intern(name.as_slice()), ext);
|
||||
if attr::contains_name(it.attrs.as_slice(), "macro_export") {
|
||||
SmallVector::one(it)
|
||||
} else {
|
||||
@ -493,6 +493,7 @@ fn load_extern_macros(krate: &ast::ViewItem, fld: &mut MacroExpander) {
|
||||
_ => unreachable!()
|
||||
};
|
||||
let name = format!("<{} macros>", token::get_ident(crate_name));
|
||||
let name = name.to_strbuf();
|
||||
|
||||
for source in macros.iter() {
|
||||
let item = parse::parse_item_from_source_str(name.clone(),
|
||||
@ -524,11 +525,12 @@ fn load_extern_macros(krate: &ast::ViewItem, fld: &mut MacroExpander) {
|
||||
};
|
||||
|
||||
unsafe {
|
||||
let registrar: MacroCrateRegistrationFun = match lib.symbol(registrar) {
|
||||
Ok(registrar) => registrar,
|
||||
// again fatal if we can't register macros
|
||||
Err(err) => fld.cx.span_fatal(krate.span, err)
|
||||
};
|
||||
let registrar: MacroCrateRegistrationFun =
|
||||
match lib.symbol(registrar.as_slice()) {
|
||||
Ok(registrar) => registrar,
|
||||
// again fatal if we can't register macros
|
||||
Err(err) => fld.cx.span_fatal(krate.span, err)
|
||||
};
|
||||
registrar(|name, extension| {
|
||||
let extension = match extension {
|
||||
NormalTT(ext, _) => NormalTT(ext, Some(krate.span)),
|
||||
@ -576,7 +578,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> {
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: s.span,
|
||||
callee: NameAndSpan {
|
||||
name: extnamestr.get().to_str(),
|
||||
name: extnamestr.get().to_strbuf(),
|
||||
format: MacroBang,
|
||||
span: exp_span,
|
||||
}
|
||||
@ -1020,10 +1022,10 @@ mod test {
|
||||
#[should_fail]
|
||||
#[test] fn macros_cant_escape_fns_test () {
|
||||
let src = "fn bogus() {macro_rules! z (() => (3+4))}\
|
||||
fn inty() -> int { z!() }".to_owned();
|
||||
fn inty() -> int { z!() }".to_strbuf();
|
||||
let sess = parse::new_parse_sess();
|
||||
let crate_ast = parse::parse_crate_from_source_str(
|
||||
"<test>".to_owned(),
|
||||
"<test>".to_strbuf(),
|
||||
src,
|
||||
Vec::new(), &sess);
|
||||
// should fail:
|
||||
@ -1040,10 +1042,10 @@ mod test {
|
||||
#[should_fail]
|
||||
#[test] fn macros_cant_escape_mods_test () {
|
||||
let src = "mod foo {macro_rules! z (() => (3+4))}\
|
||||
fn inty() -> int { z!() }".to_owned();
|
||||
fn inty() -> int { z!() }".to_strbuf();
|
||||
let sess = parse::new_parse_sess();
|
||||
let crate_ast = parse::parse_crate_from_source_str(
|
||||
"<test>".to_owned(),
|
||||
"<test>".to_strbuf(),
|
||||
src,
|
||||
Vec::new(), &sess);
|
||||
// should fail:
|
||||
@ -1059,10 +1061,10 @@ mod test {
|
||||
// macro_escape modules shouldn't cause macros to leave scope
|
||||
#[test] fn macros_can_escape_flattened_mods_test () {
|
||||
let src = "#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\
|
||||
fn inty() -> int { z!() }".to_owned();
|
||||
fn inty() -> int { z!() }".to_strbuf();
|
||||
let sess = parse::new_parse_sess();
|
||||
let crate_ast = parse::parse_crate_from_source_str(
|
||||
"<test>".to_owned(),
|
||||
"<test>".to_strbuf(),
|
||||
src,
|
||||
Vec::new(), &sess);
|
||||
// should fail:
|
||||
@ -1100,7 +1102,7 @@ mod test {
|
||||
}
|
||||
}
|
||||
|
||||
fn expand_crate_str(crate_str: ~str) -> ast::Crate {
|
||||
fn expand_crate_str(crate_str: StrBuf) -> ast::Crate {
|
||||
let ps = parse::new_parse_sess();
|
||||
let crate_ast = string_to_parser(&ps, crate_str).parse_crate_mod();
|
||||
// the cfg argument actually does matter, here...
|
||||
@ -1118,13 +1120,14 @@ mod test {
|
||||
// println!("expanded: {:?}\n",expanded_ast);
|
||||
//mtwt_resolve_crate(expanded_ast)
|
||||
//}
|
||||
//fn expand_and_resolve_and_pretty_print (crate_str: @str) -> ~str {
|
||||
//fn expand_and_resolve_and_pretty_print (crate_str: @str) -> StrBuf {
|
||||
//let resolved_ast = expand_and_resolve(crate_str);
|
||||
//pprust::to_str(&resolved_ast,fake_print_crate,get_ident_interner())
|
||||
//}
|
||||
|
||||
#[test] fn macro_tokens_should_match(){
|
||||
expand_crate_str("macro_rules! m((a)=>(13)) fn main(){m!(a);}".to_owned());
|
||||
expand_crate_str(
|
||||
"macro_rules! m((a)=>(13)) fn main(){m!(a);}".to_strbuf());
|
||||
}
|
||||
|
||||
// renaming tests expand a crate and then check that the bindings match
|
||||
@ -1182,7 +1185,7 @@ mod test {
|
||||
let (teststr, bound_connections, bound_ident_check) = match *t {
|
||||
(ref str,ref conns, bic) => (str.to_owned(), conns.clone(), bic)
|
||||
};
|
||||
let cr = expand_crate_str(teststr.to_owned());
|
||||
let cr = expand_crate_str(teststr.to_strbuf());
|
||||
// find the bindings:
|
||||
let mut name_finder = new_name_finder(Vec::new());
|
||||
visit::walk_crate(&mut name_finder,&cr,());
|
||||
@ -1257,7 +1260,7 @@ mod test {
|
||||
let crate_str = "macro_rules! fmt_wrap(($b:expr)=>($b.to_str()))
|
||||
macro_rules! foo_module (() => (mod generated { fn a() { let xx = 147; fmt_wrap!(xx);}}))
|
||||
foo_module!()
|
||||
".to_owned();
|
||||
".to_strbuf();
|
||||
let cr = expand_crate_str(crate_str);
|
||||
// find the xx binding
|
||||
let mut name_finder = new_name_finder(Vec::new());
|
||||
@ -1303,7 +1306,8 @@ foo_module!()
|
||||
|
||||
#[test]
|
||||
fn pat_idents(){
|
||||
let pat = string_to_pat("(a,Foo{x:c @ (b,9),y:Bar(4,d)})".to_owned());
|
||||
let pat = string_to_pat(
|
||||
"(a,Foo{x:c @ (b,9),y:Bar(4,d)})".to_strbuf());
|
||||
let mut pat_idents = new_name_finder(Vec::new());
|
||||
pat_idents.visit_pat(pat, ());
|
||||
assert_eq!(pat_idents.ident_accumulator,
|
||||
|
@ -23,14 +23,14 @@ use collections::{HashMap, HashSet};
|
||||
|
||||
#[deriving(Eq)]
|
||||
enum ArgumentType {
|
||||
Known(~str),
|
||||
Known(StrBuf),
|
||||
Unsigned,
|
||||
String,
|
||||
}
|
||||
|
||||
enum Position {
|
||||
Exact(uint),
|
||||
Named(~str),
|
||||
Named(StrBuf),
|
||||
}
|
||||
|
||||
struct Context<'a, 'b> {
|
||||
@ -45,13 +45,13 @@ struct Context<'a, 'b> {
|
||||
// Note that we keep a side-array of the ordering of the named arguments
|
||||
// found to be sure that we can translate them in the same order that they
|
||||
// were declared in.
|
||||
names: HashMap<~str, @ast::Expr>,
|
||||
name_types: HashMap<~str, ArgumentType>,
|
||||
name_ordering: Vec<~str>,
|
||||
names: HashMap<StrBuf, @ast::Expr>,
|
||||
name_types: HashMap<StrBuf, ArgumentType>,
|
||||
name_ordering: Vec<StrBuf>,
|
||||
|
||||
// Collection of the compiled `rt::Piece` structures
|
||||
pieces: Vec<@ast::Expr> ,
|
||||
name_positions: HashMap<~str, uint>,
|
||||
name_positions: HashMap<StrBuf, uint>,
|
||||
method_statics: Vec<@ast::Item> ,
|
||||
|
||||
// Updated as arguments are consumed or methods are entered
|
||||
@ -68,10 +68,10 @@ struct Context<'a, 'b> {
|
||||
/// Some((fmtstr, unnamed arguments, ordering of named arguments,
|
||||
/// named arguments))
|
||||
fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
-> (@ast::Expr, Option<(@ast::Expr, Vec<@ast::Expr>, Vec<~str>,
|
||||
HashMap<~str, @ast::Expr>)>) {
|
||||
-> (@ast::Expr, Option<(@ast::Expr, Vec<@ast::Expr>, Vec<StrBuf>,
|
||||
HashMap<StrBuf, @ast::Expr>)>) {
|
||||
let mut args = Vec::new();
|
||||
let mut names = HashMap::<~str, @ast::Expr>::new();
|
||||
let mut names = HashMap::<StrBuf, @ast::Expr>::new();
|
||||
let mut order = Vec::new();
|
||||
|
||||
let mut p = rsparse::new_parser_from_tts(ecx.parse_sess(),
|
||||
@ -131,8 +131,8 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
continue
|
||||
}
|
||||
}
|
||||
order.push(name.to_str());
|
||||
names.insert(name.to_str(), e);
|
||||
order.push(name.to_strbuf());
|
||||
names.insert(name.to_strbuf(), e);
|
||||
} else {
|
||||
args.push(p.parse_expr());
|
||||
}
|
||||
@ -171,13 +171,13 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||
Exact(i)
|
||||
}
|
||||
parse::ArgumentIs(i) => Exact(i),
|
||||
parse::ArgumentNamed(s) => Named(s.to_str()),
|
||||
parse::ArgumentNamed(s) => Named(s.to_strbuf()),
|
||||
};
|
||||
|
||||
// and finally the method being applied
|
||||
match arg.method {
|
||||
None => {
|
||||
let ty = Known(arg.format.ty.to_str());
|
||||
let ty = Known(arg.format.ty.to_strbuf());
|
||||
self.verify_arg_type(pos, ty);
|
||||
}
|
||||
Some(ref method) => { self.verify_method(pos, *method); }
|
||||
@ -199,7 +199,7 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||
self.verify_arg_type(Exact(i), Unsigned);
|
||||
}
|
||||
parse::CountIsName(s) => {
|
||||
self.verify_arg_type(Named(s.to_str()), Unsigned);
|
||||
self.verify_arg_type(Named(s.to_strbuf()), Unsigned);
|
||||
}
|
||||
parse::CountIsNextParam => {
|
||||
if self.check_positional_ok() {
|
||||
@ -822,8 +822,8 @@ pub fn expand_args(ecx: &mut ExtCtxt, sp: Span,
|
||||
pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
|
||||
extra: @ast::Expr,
|
||||
efmt: @ast::Expr, args: Vec<@ast::Expr>,
|
||||
name_ordering: Vec<~str>,
|
||||
names: HashMap<~str, @ast::Expr>) -> @ast::Expr {
|
||||
name_ordering: Vec<StrBuf>,
|
||||
names: HashMap<StrBuf, @ast::Expr>) -> @ast::Expr {
|
||||
let arg_types = Vec::from_fn(args.len(), |_| None);
|
||||
let mut cx = Context {
|
||||
ecx: ecx,
|
||||
|
@ -55,7 +55,7 @@ pub mod rt {
|
||||
|
||||
trait ToSource : ToTokens {
|
||||
// Takes a thing and generates a string containing rust code for it.
|
||||
pub fn to_source() -> ~str;
|
||||
pub fn to_source() -> StrBuf;
|
||||
|
||||
// If you can make source, you can definitely make tokens.
|
||||
pub fn to_tokens(cx: &ExtCtxt) -> ~[TokenTree] {
|
||||
@ -67,59 +67,67 @@ pub mod rt {
|
||||
|
||||
pub trait ToSource {
|
||||
// Takes a thing and generates a string containing rust code for it.
|
||||
fn to_source(&self) -> ~str;
|
||||
fn to_source(&self) -> StrBuf;
|
||||
}
|
||||
|
||||
impl ToSource for ast::Ident {
|
||||
fn to_source(&self) -> ~str {
|
||||
get_ident(*self).get().to_str()
|
||||
fn to_source(&self) -> StrBuf {
|
||||
get_ident(*self).get().to_strbuf()
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSource for @ast::Item {
|
||||
fn to_source(&self) -> ~str {
|
||||
fn to_source(&self) -> StrBuf {
|
||||
pprust::item_to_str(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ToSource for &'a [@ast::Item] {
|
||||
fn to_source(&self) -> ~str {
|
||||
self.iter().map(|i| i.to_source()).collect::<Vec<~str>>().connect("\n\n")
|
||||
fn to_source(&self) -> StrBuf {
|
||||
self.iter()
|
||||
.map(|i| i.to_source())
|
||||
.collect::<Vec<StrBuf>>()
|
||||
.connect("\n\n")
|
||||
.to_strbuf()
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSource for ast::Ty {
|
||||
fn to_source(&self) -> ~str {
|
||||
fn to_source(&self) -> StrBuf {
|
||||
pprust::ty_to_str(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ToSource for &'a [ast::Ty] {
|
||||
fn to_source(&self) -> ~str {
|
||||
self.iter().map(|i| i.to_source()).collect::<Vec<~str>>().connect(", ")
|
||||
fn to_source(&self) -> StrBuf {
|
||||
self.iter()
|
||||
.map(|i| i.to_source())
|
||||
.collect::<Vec<StrBuf>>()
|
||||
.connect(", ")
|
||||
.to_strbuf()
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSource for Generics {
|
||||
fn to_source(&self) -> ~str {
|
||||
fn to_source(&self) -> StrBuf {
|
||||
pprust::generics_to_str(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSource for @ast::Expr {
|
||||
fn to_source(&self) -> ~str {
|
||||
fn to_source(&self) -> StrBuf {
|
||||
pprust::expr_to_str(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSource for ast::Block {
|
||||
fn to_source(&self) -> ~str {
|
||||
fn to_source(&self) -> StrBuf {
|
||||
pprust::block_to_str(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ToSource for &'a str {
|
||||
fn to_source(&self) -> ~str {
|
||||
fn to_source(&self) -> StrBuf {
|
||||
let lit = dummy_spanned(ast::LitStr(
|
||||
token::intern_and_get_ident(*self), ast::CookedStr));
|
||||
pprust::lit_to_str(&lit)
|
||||
@ -127,41 +135,41 @@ pub mod rt {
|
||||
}
|
||||
|
||||
impl ToSource for () {
|
||||
fn to_source(&self) -> ~str {
|
||||
"()".to_owned()
|
||||
fn to_source(&self) -> StrBuf {
|
||||
"()".to_strbuf()
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSource for bool {
|
||||
fn to_source(&self) -> ~str {
|
||||
fn to_source(&self) -> StrBuf {
|
||||
let lit = dummy_spanned(ast::LitBool(*self));
|
||||
pprust::lit_to_str(&lit)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSource for char {
|
||||
fn to_source(&self) -> ~str {
|
||||
fn to_source(&self) -> StrBuf {
|
||||
let lit = dummy_spanned(ast::LitChar(*self));
|
||||
pprust::lit_to_str(&lit)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSource for int {
|
||||
fn to_source(&self) -> ~str {
|
||||
fn to_source(&self) -> StrBuf {
|
||||
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI));
|
||||
pprust::lit_to_str(&lit)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSource for i8 {
|
||||
fn to_source(&self) -> ~str {
|
||||
fn to_source(&self) -> StrBuf {
|
||||
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI8));
|
||||
pprust::lit_to_str(&lit)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSource for i16 {
|
||||
fn to_source(&self) -> ~str {
|
||||
fn to_source(&self) -> StrBuf {
|
||||
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI16));
|
||||
pprust::lit_to_str(&lit)
|
||||
}
|
||||
@ -169,49 +177,49 @@ pub mod rt {
|
||||
|
||||
|
||||
impl ToSource for i32 {
|
||||
fn to_source(&self) -> ~str {
|
||||
fn to_source(&self) -> StrBuf {
|
||||
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI32));
|
||||
pprust::lit_to_str(&lit)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSource for i64 {
|
||||
fn to_source(&self) -> ~str {
|
||||
fn to_source(&self) -> StrBuf {
|
||||
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI64));
|
||||
pprust::lit_to_str(&lit)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSource for uint {
|
||||
fn to_source(&self) -> ~str {
|
||||
fn to_source(&self) -> StrBuf {
|
||||
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU));
|
||||
pprust::lit_to_str(&lit)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSource for u8 {
|
||||
fn to_source(&self) -> ~str {
|
||||
fn to_source(&self) -> StrBuf {
|
||||
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU8));
|
||||
pprust::lit_to_str(&lit)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSource for u16 {
|
||||
fn to_source(&self) -> ~str {
|
||||
fn to_source(&self) -> StrBuf {
|
||||
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU16));
|
||||
pprust::lit_to_str(&lit)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSource for u32 {
|
||||
fn to_source(&self) -> ~str {
|
||||
fn to_source(&self) -> StrBuf {
|
||||
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU32));
|
||||
pprust::lit_to_str(&lit)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSource for u64 {
|
||||
fn to_source(&self) -> ~str {
|
||||
fn to_source(&self) -> StrBuf {
|
||||
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU64));
|
||||
pprust::lit_to_str(&lit)
|
||||
}
|
||||
@ -263,17 +271,17 @@ pub mod rt {
|
||||
impl_to_tokens!(u64)
|
||||
|
||||
pub trait ExtParseUtils {
|
||||
fn parse_item(&self, s: ~str) -> @ast::Item;
|
||||
fn parse_expr(&self, s: ~str) -> @ast::Expr;
|
||||
fn parse_stmt(&self, s: ~str) -> @ast::Stmt;
|
||||
fn parse_tts(&self, s: ~str) -> Vec<ast::TokenTree> ;
|
||||
fn parse_item(&self, s: StrBuf) -> @ast::Item;
|
||||
fn parse_expr(&self, s: StrBuf) -> @ast::Expr;
|
||||
fn parse_stmt(&self, s: StrBuf) -> @ast::Stmt;
|
||||
fn parse_tts(&self, s: StrBuf) -> Vec<ast::TokenTree> ;
|
||||
}
|
||||
|
||||
impl<'a> ExtParseUtils for ExtCtxt<'a> {
|
||||
|
||||
fn parse_item(&self, s: ~str) -> @ast::Item {
|
||||
fn parse_item(&self, s: StrBuf) -> @ast::Item {
|
||||
let res = parse::parse_item_from_source_str(
|
||||
"<quote expansion>".to_str(),
|
||||
"<quote expansion>".to_strbuf(),
|
||||
s,
|
||||
self.cfg(),
|
||||
self.parse_sess());
|
||||
@ -286,23 +294,23 @@ pub mod rt {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_stmt(&self, s: ~str) -> @ast::Stmt {
|
||||
parse::parse_stmt_from_source_str("<quote expansion>".to_str(),
|
||||
fn parse_stmt(&self, s: StrBuf) -> @ast::Stmt {
|
||||
parse::parse_stmt_from_source_str("<quote expansion>".to_strbuf(),
|
||||
s,
|
||||
self.cfg(),
|
||||
Vec::new(),
|
||||
self.parse_sess())
|
||||
}
|
||||
|
||||
fn parse_expr(&self, s: ~str) -> @ast::Expr {
|
||||
parse::parse_expr_from_source_str("<quote expansion>".to_str(),
|
||||
fn parse_expr(&self, s: StrBuf) -> @ast::Expr {
|
||||
parse::parse_expr_from_source_str("<quote expansion>".to_strbuf(),
|
||||
s,
|
||||
self.cfg(),
|
||||
self.parse_sess())
|
||||
}
|
||||
|
||||
fn parse_tts(&self, s: ~str) -> Vec<ast::TokenTree> {
|
||||
parse::parse_tts_from_source_str("<quote expansion>".to_str(),
|
||||
fn parse_tts(&self, s: StrBuf) -> Vec<ast::TokenTree> {
|
||||
parse::parse_tts_from_source_str("<quote expansion>".to_strbuf(),
|
||||
s,
|
||||
self.cfg(),
|
||||
self.parse_sess())
|
||||
@ -367,8 +375,8 @@ pub fn expand_quote_stmt(cx: &mut ExtCtxt,
|
||||
base::MacExpr::new(expanded)
|
||||
}
|
||||
|
||||
fn ids_ext(strs: Vec<~str> ) -> Vec<ast::Ident> {
|
||||
strs.iter().map(|str| str_to_ident(*str)).collect()
|
||||
fn ids_ext(strs: Vec<StrBuf> ) -> Vec<ast::Ident> {
|
||||
strs.iter().map(|str| str_to_ident((*str).as_slice())).collect()
|
||||
}
|
||||
|
||||
fn id_ext(str: &str) -> ast::Ident {
|
||||
@ -678,11 +686,11 @@ fn expand_wrapper(cx: &ExtCtxt,
|
||||
sp: Span,
|
||||
cx_expr: @ast::Expr,
|
||||
expr: @ast::Expr) -> @ast::Expr {
|
||||
let uses = vec!( cx.view_use_glob(sp, ast::Inherited,
|
||||
ids_ext(vec!("syntax".to_owned(),
|
||||
"ext".to_owned(),
|
||||
"quote".to_owned(),
|
||||
"rt".to_owned()))) );
|
||||
let uses = vec![ cx.view_use_glob(sp, ast::Inherited,
|
||||
ids_ext(vec!["syntax".to_strbuf(),
|
||||
"ext".to_strbuf(),
|
||||
"quote".to_strbuf(),
|
||||
"rt".to_strbuf()])) ];
|
||||
|
||||
let stmt_let_ext_cx = cx.stmt_let(sp, false, id_ext("ext_cx"), cx_expr);
|
||||
|
||||
|
@ -57,14 +57,15 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
|
||||
let topmost = topmost_expn_info(cx.backtrace().unwrap());
|
||||
let loc = cx.codemap().lookup_char_pos(topmost.call_site.lo);
|
||||
let filename = token::intern_and_get_ident(loc.file.name);
|
||||
let filename = token::intern_and_get_ident(loc.file.name.as_slice());
|
||||
base::MacExpr::new(cx.expr_str(topmost.call_site, filename))
|
||||
}
|
||||
|
||||
pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
-> Box<base::MacResult> {
|
||||
let s = pprust::tts_to_str(tts);
|
||||
base::MacExpr::new(cx.expr_str(sp, token::intern_and_get_ident(s)))
|
||||
base::MacExpr::new(cx.expr_str(sp,
|
||||
token::intern_and_get_ident(s.as_slice())))
|
||||
}
|
||||
|
||||
pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
@ -72,8 +73,8 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
base::check_zero_tts(cx, sp, tts, "module_path!");
|
||||
let string = cx.mod_path()
|
||||
.iter()
|
||||
.map(|x| token::get_ident(*x).get().to_str())
|
||||
.collect::<Vec<~str>>()
|
||||
.map(|x| token::get_ident(*x).get().to_strbuf())
|
||||
.collect::<Vec<StrBuf>>()
|
||||
.connect("::");
|
||||
base::MacExpr::new(cx.expr_str(sp, token::intern_and_get_ident(string)))
|
||||
}
|
||||
@ -117,9 +118,9 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
Some(src) => {
|
||||
// Add this input file to the code map to make it available as
|
||||
// dependency information
|
||||
let filename = file.display().to_str();
|
||||
let filename = file.display().to_str().to_strbuf();
|
||||
let interned = token::intern_and_get_ident(src);
|
||||
cx.codemap().new_filemap(filename, src.to_owned());
|
||||
cx.codemap().new_filemap(filename, src.to_strbuf());
|
||||
|
||||
base::MacExpr::new(cx.expr_str(sp, interned))
|
||||
}
|
||||
@ -161,7 +162,7 @@ fn topmost_expn_info(expn_info: @codemap::ExpnInfo) -> @codemap::ExpnInfo {
|
||||
..
|
||||
} => {
|
||||
// Don't recurse into file using "include!"
|
||||
if "include" == *name {
|
||||
if "include" == name.as_slice() {
|
||||
expn_info
|
||||
} else {
|
||||
topmost_expn_info(next_expn_info)
|
||||
|
@ -201,8 +201,8 @@ pub fn nameize(p_s: &ParseSess, ms: &[Matcher], res: &[Rc<NamedMatch>])
|
||||
|
||||
pub enum ParseResult {
|
||||
Success(HashMap<Ident, Rc<NamedMatch>>),
|
||||
Failure(codemap::Span, ~str),
|
||||
Error(codemap::Span, ~str)
|
||||
Failure(codemap::Span, StrBuf),
|
||||
Error(codemap::Span, StrBuf)
|
||||
}
|
||||
|
||||
pub fn parse_or_else(sess: &ParseSess,
|
||||
@ -212,8 +212,12 @@ pub fn parse_or_else(sess: &ParseSess,
|
||||
-> HashMap<Ident, Rc<NamedMatch>> {
|
||||
match parse(sess, cfg, rdr, ms.as_slice()) {
|
||||
Success(m) => m,
|
||||
Failure(sp, str) => sess.span_diagnostic.span_fatal(sp, str),
|
||||
Error(sp, str) => sess.span_diagnostic.span_fatal(sp, str)
|
||||
Failure(sp, str) => {
|
||||
sess.span_diagnostic.span_fatal(sp, str.as_slice())
|
||||
}
|
||||
Error(sp, str) => {
|
||||
sess.span_diagnostic.span_fatal(sp, str.as_slice())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -366,9 +370,9 @@ pub fn parse(sess: &ParseSess,
|
||||
}
|
||||
return Success(nameize(sess, ms, v.as_slice()));
|
||||
} else if eof_eis.len() > 1u {
|
||||
return Error(sp, "ambiguity: multiple successful parses".to_owned());
|
||||
return Error(sp, "ambiguity: multiple successful parses".to_strbuf());
|
||||
} else {
|
||||
return Failure(sp, "unexpected end of macro invocation".to_owned());
|
||||
return Failure(sp, "unexpected end of macro invocation".to_strbuf());
|
||||
}
|
||||
} else {
|
||||
if (bb_eis.len() > 0u && next_eis.len() > 0u)
|
||||
@ -376,19 +380,19 @@ pub fn parse(sess: &ParseSess,
|
||||
let nts = bb_eis.iter().map(|ei| {
|
||||
match ei.elts.get(ei.idx).node {
|
||||
MatchNonterminal(bind, name, _) => {
|
||||
format!("{} ('{}')",
|
||||
(format!("{} ('{}')",
|
||||
token::get_ident(name),
|
||||
token::get_ident(bind))
|
||||
token::get_ident(bind))).to_strbuf()
|
||||
}
|
||||
_ => fail!()
|
||||
} }).collect::<Vec<~str>>().connect(" or ");
|
||||
} }).collect::<Vec<StrBuf>>().connect(" or ");
|
||||
return Error(sp, format!(
|
||||
"local ambiguity: multiple parsing options: \
|
||||
built-in NTs {} or {} other options.",
|
||||
nts, next_eis.len()));
|
||||
nts, next_eis.len()).to_strbuf());
|
||||
} else if bb_eis.len() == 0u && next_eis.len() == 0u {
|
||||
return Failure(sp, format!("no rules expected the token `{}`",
|
||||
token::to_str(&tok)));
|
||||
token::to_str(&tok)).to_strbuf());
|
||||
} else if next_eis.len() > 0u {
|
||||
/* Now process the next token */
|
||||
while next_eis.len() > 0u {
|
||||
@ -436,7 +440,8 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal {
|
||||
token::IDENT(sn,b) => { p.bump(); token::NtIdent(box sn,b) }
|
||||
_ => {
|
||||
let token_str = token::to_str(&p.token);
|
||||
p.fatal("expected ident, found ".to_owned() + token_str)
|
||||
p.fatal((format!("expected ident, found {}",
|
||||
token_str.as_slice())).as_slice())
|
||||
}
|
||||
},
|
||||
"path" => {
|
||||
|
@ -132,7 +132,7 @@ fn generic_extension(cx: &ExtCtxt,
|
||||
|
||||
// Which arm's failure should we report? (the one furthest along)
|
||||
let mut best_fail_spot = DUMMY_SP;
|
||||
let mut best_fail_msg = "internal error: ran no matchers".to_owned();
|
||||
let mut best_fail_msg = "internal error: ran no matchers".to_strbuf();
|
||||
|
||||
for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
|
||||
match **lhs {
|
||||
@ -177,13 +177,13 @@ fn generic_extension(cx: &ExtCtxt,
|
||||
best_fail_spot = sp;
|
||||
best_fail_msg = (*msg).clone();
|
||||
},
|
||||
Error(sp, ref msg) => cx.span_fatal(sp, (*msg))
|
||||
Error(sp, ref msg) => cx.span_fatal(sp, msg.as_slice())
|
||||
}
|
||||
}
|
||||
_ => cx.bug("non-matcher found in parsed lhses")
|
||||
}
|
||||
}
|
||||
cx.span_fatal(best_fail_spot, best_fail_msg);
|
||||
cx.span_fatal(best_fail_spot, best_fail_msg.as_slice());
|
||||
}
|
||||
|
||||
// this procedure performs the expansion of the
|
||||
@ -247,7 +247,7 @@ pub fn add_new_extension(cx: &mut ExtCtxt,
|
||||
|
||||
box MacroRulesDefiner {
|
||||
def: RefCell::new(Some(MacroDef {
|
||||
name: token::get_ident(name).to_str(),
|
||||
name: token::get_ident(name).to_str().to_strbuf(),
|
||||
ext: NormalTT(exp, Some(sp))
|
||||
}))
|
||||
} as Box<MacResult>
|
||||
|
@ -100,7 +100,7 @@ fn lookup_cur_matched(r: &TtReader, name: Ident) -> Rc<NamedMatch> {
|
||||
enum LockstepIterSize {
|
||||
LisUnconstrained,
|
||||
LisConstraint(uint, Ident),
|
||||
LisContradiction(~str),
|
||||
LisContradiction(StrBuf),
|
||||
}
|
||||
|
||||
fn lis_merge(lhs: LockstepIterSize, rhs: LockstepIterSize) -> LockstepIterSize {
|
||||
@ -116,7 +116,7 @@ fn lis_merge(lhs: LockstepIterSize, rhs: LockstepIterSize) -> LockstepIterSize {
|
||||
let r_n = token::get_ident(r_id);
|
||||
LisContradiction(format!("inconsistent lockstep iteration: \
|
||||
'{}' has {} items, but '{}' has {}",
|
||||
l_n, l_len, r_n, r_len))
|
||||
l_n, l_len, r_n, r_len).to_strbuf())
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -223,7 +223,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||
}
|
||||
LisContradiction(ref msg) => {
|
||||
// FIXME #2887 blame macro invoker instead
|
||||
r.sp_diag.span_fatal(sp.clone(), *msg);
|
||||
r.sp_diag.span_fatal(sp.clone(), msg.as_slice());
|
||||
}
|
||||
LisConstraint(len, _) => {
|
||||
if len == 0 {
|
||||
|
@ -949,7 +949,7 @@ mod test {
|
||||
let pred_val = $pred;
|
||||
let a_val = $a;
|
||||
let b_val = $b;
|
||||
if !(pred_val(a_val,b_val)) {
|
||||
if !(pred_val(a_val.as_slice(),b_val.as_slice())) {
|
||||
fail!("expected args satisfying {}, got {:?} and {:?}",
|
||||
$predname, a_val, b_val);
|
||||
}
|
||||
@ -961,12 +961,13 @@ mod test {
|
||||
#[test] fn ident_transformation () {
|
||||
let mut zz_fold = ToZzIdentFolder;
|
||||
let ast = string_to_crate(
|
||||
"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_owned());
|
||||
"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_strbuf());
|
||||
let folded_crate = zz_fold.fold_crate(ast);
|
||||
assert_pred!(matches_codepattern,
|
||||
"matches_codepattern",
|
||||
pprust::to_str(|s| fake_print_crate(s, &folded_crate)),
|
||||
"#[a]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_owned());
|
||||
assert_pred!(
|
||||
matches_codepattern,
|
||||
"matches_codepattern",
|
||||
pprust::to_str(|s| fake_print_crate(s, &folded_crate)),
|
||||
"#[a]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_strbuf());
|
||||
}
|
||||
|
||||
// even inside macro defs....
|
||||
@ -974,11 +975,12 @@ mod test {
|
||||
let mut zz_fold = ToZzIdentFolder;
|
||||
let ast = string_to_crate(
|
||||
"macro_rules! a {(b $c:expr $(d $e:token)f+ => \
|
||||
(g $(d $d $e)+))} ".to_owned());
|
||||
(g $(d $d $e)+))} ".to_strbuf());
|
||||
let folded_crate = zz_fold.fold_crate(ast);
|
||||
assert_pred!(matches_codepattern,
|
||||
"matches_codepattern",
|
||||
pprust::to_str(|s| fake_print_crate(s, &folded_crate)),
|
||||
"zz!zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)))".to_owned());
|
||||
assert_pred!(
|
||||
matches_codepattern,
|
||||
"matches_codepattern",
|
||||
pprust::to_str(|s| fake_print_crate(s, &folded_crate)),
|
||||
"zz!zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)))".to_strbuf());
|
||||
}
|
||||
}
|
||||
|
@ -33,7 +33,7 @@ pub enum CommentStyle {
|
||||
#[deriving(Clone)]
|
||||
pub struct Comment {
|
||||
pub style: CommentStyle,
|
||||
pub lines: Vec<~str>,
|
||||
pub lines: Vec<StrBuf>,
|
||||
pub pos: BytePos,
|
||||
}
|
||||
|
||||
@ -53,35 +53,40 @@ pub fn doc_comment_style(comment: &str) -> ast::AttrStyle {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn strip_doc_comment_decoration(comment: &str) -> ~str {
|
||||
pub fn strip_doc_comment_decoration(comment: &str) -> StrBuf {
|
||||
/// remove whitespace-only lines from the start/end of lines
|
||||
fn vertical_trim(lines: Vec<~str> ) -> Vec<~str> {
|
||||
fn vertical_trim(lines: Vec<StrBuf> ) -> Vec<StrBuf> {
|
||||
let mut i = 0u;
|
||||
let mut j = lines.len();
|
||||
// first line of all-stars should be omitted
|
||||
if lines.len() > 0 && lines.get(0).chars().all(|c| c == '*') {
|
||||
if lines.len() > 0 &&
|
||||
lines.get(0).as_slice().chars().all(|c| c == '*') {
|
||||
i += 1;
|
||||
}
|
||||
while i < j && lines.get(i).trim().is_empty() {
|
||||
while i < j && lines.get(i).as_slice().trim().is_empty() {
|
||||
i += 1;
|
||||
}
|
||||
// like the first, a last line of all stars should be omitted
|
||||
if j > i && lines.get(j - 1).chars().skip(1).all(|c| c == '*') {
|
||||
if j > i && lines.get(j - 1)
|
||||
.as_slice()
|
||||
.chars()
|
||||
.skip(1)
|
||||
.all(|c| c == '*') {
|
||||
j -= 1;
|
||||
}
|
||||
while j > i && lines.get(j - 1).trim().is_empty() {
|
||||
while j > i && lines.get(j - 1).as_slice().trim().is_empty() {
|
||||
j -= 1;
|
||||
}
|
||||
return lines.slice(i, j).iter().map(|x| (*x).clone()).collect();
|
||||
}
|
||||
|
||||
/// remove a "[ \t]*\*" block from each line, if possible
|
||||
fn horizontal_trim(lines: Vec<~str> ) -> Vec<~str> {
|
||||
fn horizontal_trim(lines: Vec<StrBuf> ) -> Vec<StrBuf> {
|
||||
let mut i = uint::MAX;
|
||||
let mut can_trim = true;
|
||||
let mut first = true;
|
||||
for line in lines.iter() {
|
||||
for (j, c) in line.chars().enumerate() {
|
||||
for (j, c) in line.as_slice().chars().enumerate() {
|
||||
if j > i || !"* \t".contains_char(c) {
|
||||
can_trim = false;
|
||||
break;
|
||||
@ -105,7 +110,9 @@ pub fn strip_doc_comment_decoration(comment: &str) -> ~str {
|
||||
}
|
||||
|
||||
if can_trim {
|
||||
lines.iter().map(|line| line.slice(i + 1, line.len()).to_owned()).collect()
|
||||
lines.iter().map(|line| {
|
||||
line.as_slice().slice(i + 1, line.len()).to_strbuf()
|
||||
}).collect()
|
||||
} else {
|
||||
lines
|
||||
}
|
||||
@ -115,39 +122,41 @@ pub fn strip_doc_comment_decoration(comment: &str) -> ~str {
|
||||
static ONLINERS: &'static [&'static str] = &["///!", "///", "//!", "//"];
|
||||
for prefix in ONLINERS.iter() {
|
||||
if comment.starts_with(*prefix) {
|
||||
return comment.slice_from(prefix.len()).to_owned();
|
||||
return comment.slice_from(prefix.len()).to_strbuf();
|
||||
}
|
||||
}
|
||||
|
||||
if comment.starts_with("/*") {
|
||||
let lines = comment.slice(3u, comment.len() - 2u)
|
||||
.lines_any()
|
||||
.map(|s| s.to_owned())
|
||||
.collect::<Vec<~str> >();
|
||||
.map(|s| s.to_strbuf())
|
||||
.collect::<Vec<StrBuf> >();
|
||||
|
||||
let lines = vertical_trim(lines);
|
||||
let lines = horizontal_trim(lines);
|
||||
|
||||
return lines.connect("\n");
|
||||
return lines.connect("\n").to_strbuf();
|
||||
}
|
||||
|
||||
fail!("not a doc-comment: {}", comment);
|
||||
}
|
||||
|
||||
fn read_to_eol(rdr: &mut StringReader) -> ~str {
|
||||
fn read_to_eol(rdr: &mut StringReader) -> StrBuf {
|
||||
let mut val = StrBuf::new();
|
||||
while !rdr.curr_is('\n') && !is_eof(rdr) {
|
||||
val.push_char(rdr.curr.unwrap());
|
||||
bump(rdr);
|
||||
}
|
||||
if rdr.curr_is('\n') { bump(rdr); }
|
||||
return val.into_owned();
|
||||
return val
|
||||
}
|
||||
|
||||
fn read_one_line_comment(rdr: &mut StringReader) -> ~str {
|
||||
fn read_one_line_comment(rdr: &mut StringReader) -> StrBuf {
|
||||
let val = read_to_eol(rdr);
|
||||
assert!((val[0] == '/' as u8 && val[1] == '/' as u8) ||
|
||||
(val[0] == '#' as u8 && val[1] == '!' as u8));
|
||||
assert!((val.as_slice()[0] == '/' as u8 &&
|
||||
val.as_slice()[1] == '/' as u8) ||
|
||||
(val.as_slice()[0] == '#' as u8 &&
|
||||
val.as_slice()[1] == '!' as u8));
|
||||
return val;
|
||||
}
|
||||
|
||||
@ -193,11 +202,12 @@ fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool,
|
||||
comments: &mut Vec<Comment>) {
|
||||
debug!(">>> line comments");
|
||||
let p = rdr.last_pos;
|
||||
let mut lines: Vec<~str> = Vec::new();
|
||||
let mut lines: Vec<StrBuf> = Vec::new();
|
||||
while rdr.curr_is('/') && nextch_is(rdr, '/') {
|
||||
let line = read_one_line_comment(rdr);
|
||||
debug!("{}", line);
|
||||
if is_doc_comment(line) { // doc-comments are not put in comments
|
||||
// Doc comments are not put in comments.
|
||||
if is_doc_comment(line.as_slice()) {
|
||||
break;
|
||||
}
|
||||
lines.push(line);
|
||||
@ -231,14 +241,16 @@ fn all_whitespace(s: &str, col: CharPos) -> Option<uint> {
|
||||
return Some(cursor);
|
||||
}
|
||||
|
||||
fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<~str> ,
|
||||
s: ~str, col: CharPos) {
|
||||
fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<StrBuf> ,
|
||||
s: StrBuf, col: CharPos) {
|
||||
let len = s.len();
|
||||
let s1 = match all_whitespace(s, col) {
|
||||
let s1 = match all_whitespace(s.as_slice(), col) {
|
||||
Some(col) => {
|
||||
if col < len {
|
||||
s.slice(col, len).to_owned()
|
||||
} else { "".to_owned() }
|
||||
s.as_slice().slice(col, len).to_strbuf()
|
||||
} else {
|
||||
"".to_strbuf()
|
||||
}
|
||||
}
|
||||
None => s,
|
||||
};
|
||||
@ -251,7 +263,7 @@ fn read_block_comment(rdr: &mut StringReader,
|
||||
comments: &mut Vec<Comment> ) {
|
||||
debug!(">>> block comment");
|
||||
let p = rdr.last_pos;
|
||||
let mut lines: Vec<~str> = Vec::new();
|
||||
let mut lines: Vec<StrBuf> = Vec::new();
|
||||
let col = rdr.col;
|
||||
bump(rdr);
|
||||
bump(rdr);
|
||||
@ -273,17 +285,17 @@ fn read_block_comment(rdr: &mut StringReader,
|
||||
return
|
||||
}
|
||||
assert!(!curr_line.as_slice().contains_char('\n'));
|
||||
lines.push(curr_line.into_owned());
|
||||
lines.push(curr_line);
|
||||
} else {
|
||||
let mut level: int = 1;
|
||||
while level > 0 {
|
||||
debug!("=== block comment level {}", level);
|
||||
if is_eof(rdr) {
|
||||
rdr.fatal("unterminated block comment".to_owned());
|
||||
rdr.fatal("unterminated block comment".to_strbuf());
|
||||
}
|
||||
if rdr.curr_is('\n') {
|
||||
trim_whitespace_prefix_and_push_line(&mut lines,
|
||||
curr_line.into_owned(),
|
||||
curr_line,
|
||||
col);
|
||||
curr_line = StrBuf::new();
|
||||
bump(rdr);
|
||||
@ -306,7 +318,7 @@ fn read_block_comment(rdr: &mut StringReader,
|
||||
}
|
||||
if curr_line.len() != 0 {
|
||||
trim_whitespace_prefix_and_push_line(&mut lines,
|
||||
curr_line.into_owned(),
|
||||
curr_line,
|
||||
col);
|
||||
}
|
||||
}
|
||||
@ -344,7 +356,7 @@ fn consume_comment(rdr: &mut StringReader,
|
||||
|
||||
#[deriving(Clone)]
|
||||
pub struct Literal {
|
||||
pub lit: ~str,
|
||||
pub lit: StrBuf,
|
||||
pub pos: BytePos,
|
||||
}
|
||||
|
||||
@ -352,11 +364,11 @@ pub struct Literal {
|
||||
// probably not a good thing.
|
||||
pub fn gather_comments_and_literals(span_diagnostic:
|
||||
&diagnostic::SpanHandler,
|
||||
path: ~str,
|
||||
path: StrBuf,
|
||||
srdr: &mut io::Reader)
|
||||
-> (Vec<Comment>, Vec<Literal>) {
|
||||
let src = srdr.read_to_end().unwrap();
|
||||
let src = str::from_utf8(src.as_slice()).unwrap().to_owned();
|
||||
let src = str::from_utf8(src.as_slice()).unwrap().to_strbuf();
|
||||
let cm = CodeMap::new();
|
||||
let filemap = cm.new_filemap(path, src);
|
||||
let mut rdr = lexer::new_low_level_string_reader(span_diagnostic, filemap);
|
||||
@ -387,7 +399,7 @@ pub fn gather_comments_and_literals(span_diagnostic:
|
||||
if token::is_lit(&tok) {
|
||||
with_str_from(&rdr, bstart, |s| {
|
||||
debug!("tok lit: {}", s);
|
||||
literals.push(Literal {lit: s.to_owned(), pos: sp.lo});
|
||||
literals.push(Literal {lit: s.to_strbuf(), pos: sp.lo});
|
||||
})
|
||||
} else {
|
||||
debug!("tok: {}", token::to_str(&tok));
|
||||
@ -405,41 +417,41 @@ mod test {
|
||||
#[test] fn test_block_doc_comment_1() {
|
||||
let comment = "/**\n * Test \n ** Test\n * Test\n*/";
|
||||
let stripped = strip_doc_comment_decoration(comment);
|
||||
assert_eq!(stripped, " Test \n* Test\n Test".to_owned());
|
||||
assert_eq!(stripped, " Test \n* Test\n Test".to_strbuf());
|
||||
}
|
||||
|
||||
#[test] fn test_block_doc_comment_2() {
|
||||
let comment = "/**\n * Test\n * Test\n*/";
|
||||
let stripped = strip_doc_comment_decoration(comment);
|
||||
assert_eq!(stripped, " Test\n Test".to_owned());
|
||||
assert_eq!(stripped, " Test\n Test".to_strbuf());
|
||||
}
|
||||
|
||||
#[test] fn test_block_doc_comment_3() {
|
||||
let comment = "/**\n let a: *int;\n *a = 5;\n*/";
|
||||
let stripped = strip_doc_comment_decoration(comment);
|
||||
assert_eq!(stripped, " let a: *int;\n *a = 5;".to_owned());
|
||||
assert_eq!(stripped, " let a: *int;\n *a = 5;".to_strbuf());
|
||||
}
|
||||
|
||||
#[test] fn test_block_doc_comment_4() {
|
||||
let comment = "/*******************\n test\n *********************/";
|
||||
let stripped = strip_doc_comment_decoration(comment);
|
||||
assert_eq!(stripped, " test".to_owned());
|
||||
assert_eq!(stripped, " test".to_strbuf());
|
||||
}
|
||||
|
||||
#[test] fn test_line_doc_comment() {
|
||||
let stripped = strip_doc_comment_decoration("/// test");
|
||||
assert_eq!(stripped, " test".to_owned());
|
||||
assert_eq!(stripped, " test".to_strbuf());
|
||||
let stripped = strip_doc_comment_decoration("///! test");
|
||||
assert_eq!(stripped, " test".to_owned());
|
||||
assert_eq!(stripped, " test".to_strbuf());
|
||||
let stripped = strip_doc_comment_decoration("// test");
|
||||
assert_eq!(stripped, " test".to_owned());
|
||||
assert_eq!(stripped, " test".to_strbuf());
|
||||
let stripped = strip_doc_comment_decoration("// test");
|
||||
assert_eq!(stripped, " test".to_owned());
|
||||
assert_eq!(stripped, " test".to_strbuf());
|
||||
let stripped = strip_doc_comment_decoration("///test");
|
||||
assert_eq!(stripped, "test".to_owned());
|
||||
assert_eq!(stripped, "test".to_strbuf());
|
||||
let stripped = strip_doc_comment_decoration("///!test");
|
||||
assert_eq!(stripped, "test".to_owned());
|
||||
assert_eq!(stripped, "test".to_strbuf());
|
||||
let stripped = strip_doc_comment_decoration("//test");
|
||||
assert_eq!(stripped, "test".to_owned());
|
||||
assert_eq!(stripped, "test".to_strbuf());
|
||||
}
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ pub use ext::tt::transcribe::{TtReader, new_tt_reader};
|
||||
pub trait Reader {
|
||||
fn is_eof(&self) -> bool;
|
||||
fn next_token(&mut self) -> TokenAndSpan;
|
||||
fn fatal(&self, ~str) -> !;
|
||||
fn fatal(&self, StrBuf) -> !;
|
||||
fn span_diag<'a>(&'a self) -> &'a SpanHandler;
|
||||
fn peek(&self) -> TokenAndSpan;
|
||||
}
|
||||
@ -101,8 +101,8 @@ impl<'a> Reader for StringReader<'a> {
|
||||
string_advance_token(self);
|
||||
ret_val
|
||||
}
|
||||
fn fatal(&self, m: ~str) -> ! {
|
||||
self.span_diagnostic.span_fatal(self.peek_span, m)
|
||||
fn fatal(&self, m: StrBuf) -> ! {
|
||||
self.span_diagnostic.span_fatal(self.peek_span, m.as_slice())
|
||||
}
|
||||
fn span_diag<'a>(&'a self) -> &'a SpanHandler { self.span_diagnostic }
|
||||
fn peek(&self) -> TokenAndSpan {
|
||||
@ -123,8 +123,8 @@ impl<'a> Reader for TtReader<'a> {
|
||||
debug!("TtReader: r={:?}", r);
|
||||
r
|
||||
}
|
||||
fn fatal(&self, m: ~str) -> ! {
|
||||
self.sp_diag.span_fatal(self.cur_span, m);
|
||||
fn fatal(&self, m: StrBuf) -> ! {
|
||||
self.sp_diag.span_fatal(self.cur_span, m.as_slice());
|
||||
}
|
||||
fn span_diag<'a>(&'a self) -> &'a SpanHandler { self.sp_diag }
|
||||
fn peek(&self) -> TokenAndSpan {
|
||||
@ -139,7 +139,7 @@ impl<'a> Reader for TtReader<'a> {
|
||||
fn fatal_span(rdr: &mut StringReader,
|
||||
from_pos: BytePos,
|
||||
to_pos: BytePos,
|
||||
m: ~str)
|
||||
m: StrBuf)
|
||||
-> ! {
|
||||
rdr.peek_span = codemap::mk_sp(from_pos, to_pos);
|
||||
rdr.fatal(m);
|
||||
@ -150,13 +150,13 @@ fn fatal_span(rdr: &mut StringReader,
|
||||
fn fatal_span_char(rdr: &mut StringReader,
|
||||
from_pos: BytePos,
|
||||
to_pos: BytePos,
|
||||
m: ~str,
|
||||
m: StrBuf,
|
||||
c: char)
|
||||
-> ! {
|
||||
let mut m = StrBuf::from_owned_str(m);
|
||||
let mut m = m;
|
||||
m.push_str(": ");
|
||||
char::escape_default(c, |c| m.push_char(c));
|
||||
fatal_span(rdr, from_pos, to_pos, m.into_owned());
|
||||
fatal_span(rdr, from_pos, to_pos, m.into_strbuf());
|
||||
}
|
||||
|
||||
// report a lexical error spanning [`from_pos`, `to_pos`), appending the
|
||||
@ -164,14 +164,14 @@ fn fatal_span_char(rdr: &mut StringReader,
|
||||
fn fatal_span_verbose(rdr: &mut StringReader,
|
||||
from_pos: BytePos,
|
||||
to_pos: BytePos,
|
||||
m: ~str)
|
||||
m: StrBuf)
|
||||
-> ! {
|
||||
let mut m = StrBuf::from_owned_str(m);
|
||||
let mut m = m;
|
||||
m.push_str(": ");
|
||||
let from = byte_offset(rdr, from_pos).to_uint();
|
||||
let to = byte_offset(rdr, to_pos).to_uint();
|
||||
m.push_str(rdr.filemap.src.slice(from, to));
|
||||
fatal_span(rdr, from_pos, to_pos, m.into_owned());
|
||||
m.push_str(rdr.filemap.src.as_slice().slice(from, to));
|
||||
fatal_span(rdr, from_pos, to_pos, m);
|
||||
}
|
||||
|
||||
// EFFECT: advance peek_tok and peek_span to refer to the next token.
|
||||
@ -218,7 +218,7 @@ fn with_str_from_to<T>(
|
||||
end: BytePos,
|
||||
f: |s: &str| -> T)
|
||||
-> T {
|
||||
f(rdr.filemap.src.slice(
|
||||
f(rdr.filemap.src.as_slice().slice(
|
||||
byte_offset(rdr, start).to_uint(),
|
||||
byte_offset(rdr, end).to_uint()))
|
||||
}
|
||||
@ -231,7 +231,10 @@ pub fn bump(rdr: &mut StringReader) {
|
||||
if current_byte_offset < rdr.filemap.src.len() {
|
||||
assert!(rdr.curr.is_some());
|
||||
let last_char = rdr.curr.unwrap();
|
||||
let next = rdr.filemap.src.char_range_at(current_byte_offset);
|
||||
let next = rdr.filemap
|
||||
.src
|
||||
.as_slice()
|
||||
.char_range_at(current_byte_offset);
|
||||
let byte_offset_diff = next.next - current_byte_offset;
|
||||
rdr.pos = rdr.pos + Pos::from_uint(byte_offset_diff);
|
||||
rdr.curr = Some(next.ch);
|
||||
@ -256,7 +259,7 @@ pub fn is_eof(rdr: &StringReader) -> bool {
|
||||
pub fn nextch(rdr: &StringReader) -> Option<char> {
|
||||
let offset = byte_offset(rdr, rdr.pos).to_uint();
|
||||
if offset < rdr.filemap.src.len() {
|
||||
Some(rdr.filemap.src.char_at(offset))
|
||||
Some(rdr.filemap.src.as_slice().char_at(offset))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@ -400,9 +403,9 @@ fn consume_block_comment(rdr: &mut StringReader) -> Option<TokenAndSpan> {
|
||||
while level > 0 {
|
||||
if is_eof(rdr) {
|
||||
let msg = if is_doc_comment {
|
||||
"unterminated block doc-comment".to_owned()
|
||||
"unterminated block doc-comment".to_strbuf()
|
||||
} else {
|
||||
"unterminated block comment".to_owned()
|
||||
"unterminated block comment".to_strbuf()
|
||||
};
|
||||
fatal_span(rdr, start_bpos, rdr.last_pos, msg);
|
||||
} else if rdr.curr_is('/') && nextch_is(rdr, '*') {
|
||||
@ -438,7 +441,7 @@ fn consume_block_comment(rdr: &mut StringReader) -> Option<TokenAndSpan> {
|
||||
if res.is_some() { res } else { consume_whitespace_and_comments(rdr) }
|
||||
}
|
||||
|
||||
fn scan_exponent(rdr: &mut StringReader, start_bpos: BytePos) -> Option<~str> {
|
||||
fn scan_exponent(rdr: &mut StringReader, start_bpos: BytePos) -> Option<StrBuf> {
|
||||
// \x00 hits the `return None` case immediately, so this is fine.
|
||||
let mut c = rdr.curr.unwrap_or('\x00');
|
||||
let mut rslt = StrBuf::new();
|
||||
@ -452,16 +455,18 @@ fn scan_exponent(rdr: &mut StringReader, start_bpos: BytePos) -> Option<~str> {
|
||||
}
|
||||
let exponent = scan_digits(rdr, 10u);
|
||||
if exponent.len() > 0u {
|
||||
rslt.push_str(exponent);
|
||||
return Some(rslt.into_owned());
|
||||
rslt.push_str(exponent.as_slice());
|
||||
return Some(rslt);
|
||||
} else {
|
||||
fatal_span(rdr, start_bpos, rdr.last_pos,
|
||||
"scan_exponent: bad fp literal".to_owned());
|
||||
"scan_exponent: bad fp literal".to_strbuf());
|
||||
}
|
||||
} else { return None::<~str>; }
|
||||
} else {
|
||||
return None::<StrBuf>;
|
||||
}
|
||||
}
|
||||
|
||||
fn scan_digits(rdr: &mut StringReader, radix: uint) -> ~str {
|
||||
fn scan_digits(rdr: &mut StringReader, radix: uint) -> StrBuf {
|
||||
let mut rslt = StrBuf::new();
|
||||
loop {
|
||||
let c = rdr.curr;
|
||||
@ -471,7 +476,7 @@ fn scan_digits(rdr: &mut StringReader, radix: uint) -> ~str {
|
||||
rslt.push_char(c.unwrap());
|
||||
bump(rdr);
|
||||
}
|
||||
_ => return rslt.into_owned()
|
||||
_ => return rslt
|
||||
}
|
||||
};
|
||||
}
|
||||
@ -479,12 +484,14 @@ fn scan_digits(rdr: &mut StringReader, radix: uint) -> ~str {
|
||||
fn check_float_base(rdr: &mut StringReader, start_bpos: BytePos, last_bpos: BytePos,
|
||||
base: uint) {
|
||||
match base {
|
||||
16u => fatal_span(rdr, start_bpos, last_bpos,
|
||||
"hexadecimal float literal is not supported".to_owned()),
|
||||
16u => {
|
||||
fatal_span(rdr, start_bpos, last_bpos,
|
||||
"hexadecimal float literal is not supported".to_strbuf())
|
||||
}
|
||||
8u => fatal_span(rdr, start_bpos, last_bpos,
|
||||
"octal float literal is not supported".to_owned()),
|
||||
"octal float literal is not supported".to_strbuf()),
|
||||
2u => fatal_span(rdr, start_bpos, last_bpos,
|
||||
"binary float literal is not supported".to_owned()),
|
||||
"binary float literal is not supported".to_strbuf()),
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
@ -508,7 +515,7 @@ fn scan_number(c: char, rdr: &mut StringReader) -> token::Token {
|
||||
bump(rdr);
|
||||
base = 2u;
|
||||
}
|
||||
num_str = StrBuf::from_owned_str(scan_digits(rdr, base));
|
||||
num_str = scan_digits(rdr, base);
|
||||
c = rdr.curr.unwrap_or('\x00');
|
||||
nextch(rdr);
|
||||
if c == 'u' || c == 'i' {
|
||||
@ -544,13 +551,13 @@ fn scan_number(c: char, rdr: &mut StringReader) -> token::Token {
|
||||
}
|
||||
if num_str.len() == 0u {
|
||||
fatal_span(rdr, start_bpos, rdr.last_pos,
|
||||
"no valid digits found for number".to_owned());
|
||||
"no valid digits found for number".to_strbuf());
|
||||
}
|
||||
let parsed = match from_str_radix::<u64>(num_str.as_slice(),
|
||||
base as uint) {
|
||||
Some(p) => p,
|
||||
None => fatal_span(rdr, start_bpos, rdr.last_pos,
|
||||
"int literal is too large".to_owned())
|
||||
"int literal is too large".to_strbuf())
|
||||
};
|
||||
|
||||
match tp {
|
||||
@ -564,12 +571,12 @@ fn scan_number(c: char, rdr: &mut StringReader) -> token::Token {
|
||||
bump(rdr);
|
||||
let dec_part = scan_digits(rdr, 10u);
|
||||
num_str.push_char('.');
|
||||
num_str.push_str(dec_part);
|
||||
num_str.push_str(dec_part.as_slice());
|
||||
}
|
||||
match scan_exponent(rdr, start_bpos) {
|
||||
Some(ref s) => {
|
||||
is_float = true;
|
||||
num_str.push_str(*s);
|
||||
num_str.push_str(s.as_slice());
|
||||
}
|
||||
None => ()
|
||||
}
|
||||
@ -601,7 +608,7 @@ fn scan_number(c: char, rdr: &mut StringReader) -> token::Token {
|
||||
return token::LIT_FLOAT(str_to_ident(num_str.as_slice()), ast::TyF128);
|
||||
}
|
||||
fatal_span(rdr, start_bpos, rdr.last_pos,
|
||||
"expected `f32`, `f64` or `f128` suffix".to_owned());
|
||||
"expected `f32`, `f64` or `f128` suffix".to_strbuf());
|
||||
}
|
||||
if is_float {
|
||||
check_float_base(rdr, start_bpos, rdr.last_pos, base);
|
||||
@ -610,13 +617,13 @@ fn scan_number(c: char, rdr: &mut StringReader) -> token::Token {
|
||||
} else {
|
||||
if num_str.len() == 0u {
|
||||
fatal_span(rdr, start_bpos, rdr.last_pos,
|
||||
"no valid digits found for number".to_owned());
|
||||
"no valid digits found for number".to_strbuf());
|
||||
}
|
||||
let parsed = match from_str_radix::<u64>(num_str.as_slice(),
|
||||
base as uint) {
|
||||
Some(p) => p,
|
||||
None => fatal_span(rdr, start_bpos, rdr.last_pos,
|
||||
"int literal is too large".to_owned())
|
||||
"int literal is too large".to_strbuf())
|
||||
};
|
||||
|
||||
debug!("lexing {} as an unsuffixed integer literal",
|
||||
@ -632,9 +639,12 @@ fn scan_numeric_escape(rdr: &mut StringReader, n_hex_digits: uint) -> char {
|
||||
while i != 0u && !is_eof(rdr) {
|
||||
let n = rdr.curr;
|
||||
if !is_hex_digit(n) {
|
||||
fatal_span_char(rdr, rdr.last_pos, rdr.pos,
|
||||
"illegal character in numeric character escape".to_owned(),
|
||||
n.unwrap());
|
||||
fatal_span_char(
|
||||
rdr,
|
||||
rdr.last_pos,
|
||||
rdr.pos,
|
||||
"illegal character in numeric character escape".to_strbuf(),
|
||||
n.unwrap());
|
||||
}
|
||||
bump(rdr);
|
||||
accum_int *= 16;
|
||||
@ -643,13 +653,13 @@ fn scan_numeric_escape(rdr: &mut StringReader, n_hex_digits: uint) -> char {
|
||||
}
|
||||
if i != 0 && is_eof(rdr) {
|
||||
fatal_span(rdr, start_bpos, rdr.last_pos,
|
||||
"unterminated numeric character escape".to_owned());
|
||||
"unterminated numeric character escape".to_strbuf());
|
||||
}
|
||||
|
||||
match char::from_u32(accum_int as u32) {
|
||||
Some(x) => x,
|
||||
None => fatal_span(rdr, start_bpos, rdr.last_pos,
|
||||
"illegal numeric character escape".to_owned())
|
||||
"illegal numeric character escape".to_strbuf())
|
||||
}
|
||||
}
|
||||
|
||||
@ -819,11 +829,11 @@ fn next_token_inner(rdr: &mut StringReader) -> token::Token {
|
||||
if token::is_keyword(token::keywords::Self, tok) {
|
||||
fatal_span(rdr, start, rdr.last_pos,
|
||||
"invalid lifetime name: 'self \
|
||||
is no longer a special lifetime".to_owned());
|
||||
is no longer a special lifetime".to_strbuf());
|
||||
} else if token::is_any_keyword(tok) &&
|
||||
!token::is_keyword(token::keywords::Static, tok) {
|
||||
fatal_span(rdr, start, rdr.last_pos,
|
||||
"invalid lifetime name".to_owned());
|
||||
"invalid lifetime name".to_strbuf());
|
||||
} else {
|
||||
return token::LIFETIME(ident);
|
||||
}
|
||||
@ -851,16 +861,24 @@ fn next_token_inner(rdr: &mut StringReader) -> token::Token {
|
||||
'u' => scan_numeric_escape(rdr, 4u),
|
||||
'U' => scan_numeric_escape(rdr, 8u),
|
||||
c2 => {
|
||||
fatal_span_char(rdr, escaped_pos, rdr.last_pos,
|
||||
"unknown character escape".to_owned(), c2)
|
||||
fatal_span_char(rdr,
|
||||
escaped_pos,
|
||||
rdr.last_pos,
|
||||
"unknown character \
|
||||
escape".to_strbuf(),
|
||||
c2)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
'\t' | '\n' | '\r' | '\'' => {
|
||||
fatal_span_char(rdr, start, rdr.last_pos,
|
||||
"character constant must be escaped".to_owned(), c2);
|
||||
fatal_span_char(
|
||||
rdr,
|
||||
start,
|
||||
rdr.last_pos,
|
||||
"character constant must be escaped".to_strbuf(),
|
||||
c2);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -871,7 +889,7 @@ fn next_token_inner(rdr: &mut StringReader) -> token::Token {
|
||||
// ascii single quote.
|
||||
start - BytePos(1),
|
||||
rdr.last_pos,
|
||||
"unterminated character constant".to_owned());
|
||||
"unterminated character constant".to_strbuf());
|
||||
}
|
||||
bump(rdr); // advance curr past token
|
||||
return token::LIT_CHAR(c2);
|
||||
@ -883,7 +901,7 @@ fn next_token_inner(rdr: &mut StringReader) -> token::Token {
|
||||
while !rdr.curr_is('"') {
|
||||
if is_eof(rdr) {
|
||||
fatal_span(rdr, start_bpos, rdr.last_pos,
|
||||
"unterminated double quote string".to_owned());
|
||||
"unterminated double quote string".to_strbuf());
|
||||
}
|
||||
|
||||
let ch = rdr.curr.unwrap();
|
||||
@ -892,7 +910,7 @@ fn next_token_inner(rdr: &mut StringReader) -> token::Token {
|
||||
'\\' => {
|
||||
if is_eof(rdr) {
|
||||
fatal_span(rdr, start_bpos, rdr.last_pos,
|
||||
"unterminated double quote string".to_owned());
|
||||
"unterminated double quote string".to_strbuf());
|
||||
}
|
||||
|
||||
let escaped = rdr.curr.unwrap();
|
||||
@ -918,7 +936,7 @@ fn next_token_inner(rdr: &mut StringReader) -> token::Token {
|
||||
}
|
||||
c2 => {
|
||||
fatal_span_char(rdr, escaped_pos, rdr.last_pos,
|
||||
"unknown string escape".to_owned(), c2);
|
||||
"unknown string escape".to_strbuf(), c2);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -939,11 +957,11 @@ fn next_token_inner(rdr: &mut StringReader) -> token::Token {
|
||||
|
||||
if is_eof(rdr) {
|
||||
fatal_span(rdr, start_bpos, rdr.last_pos,
|
||||
"unterminated raw string".to_owned());
|
||||
"unterminated raw string".to_strbuf());
|
||||
} else if !rdr.curr_is('"') {
|
||||
fatal_span_char(rdr, start_bpos, rdr.last_pos,
|
||||
"only `#` is allowed in raw string delimitation; \
|
||||
found illegal character".to_owned(),
|
||||
found illegal character".to_strbuf(),
|
||||
rdr.curr.unwrap());
|
||||
}
|
||||
bump(rdr);
|
||||
@ -952,7 +970,7 @@ fn next_token_inner(rdr: &mut StringReader) -> token::Token {
|
||||
'outer: loop {
|
||||
if is_eof(rdr) {
|
||||
fatal_span(rdr, start_bpos, rdr.last_pos,
|
||||
"unterminated raw string".to_owned());
|
||||
"unterminated raw string".to_strbuf());
|
||||
}
|
||||
if rdr.curr_is('"') {
|
||||
content_end_bpos = rdr.last_pos;
|
||||
@ -1000,7 +1018,7 @@ fn next_token_inner(rdr: &mut StringReader) -> token::Token {
|
||||
'%' => { return binop(rdr, token::PERCENT); }
|
||||
c => {
|
||||
fatal_span_char(rdr, rdr.last_pos, rdr.pos,
|
||||
"unknown start of token".to_owned(), c);
|
||||
"unknown start of token".to_strbuf(), c);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1027,8 +1045,8 @@ mod test {
|
||||
|
||||
// open a string reader for the given string
|
||||
fn setup<'a>(span_handler: &'a diagnostic::SpanHandler,
|
||||
teststr: ~str) -> StringReader<'a> {
|
||||
let fm = span_handler.cm.new_filemap("zebra.rs".to_owned(), teststr);
|
||||
teststr: StrBuf) -> StringReader<'a> {
|
||||
let fm = span_handler.cm.new_filemap("zebra.rs".to_strbuf(), teststr);
|
||||
new_string_reader(span_handler, fm)
|
||||
}
|
||||
|
||||
@ -1036,7 +1054,7 @@ mod test {
|
||||
let span_handler = mk_sh();
|
||||
let mut string_reader = setup(&span_handler,
|
||||
"/* my source file */ \
|
||||
fn main() { println!(\"zebra\"); }\n".to_owned());
|
||||
fn main() { println!(\"zebra\"); }\n".to_strbuf());
|
||||
let id = str_to_ident("fn");
|
||||
let tok1 = string_reader.next_token();
|
||||
let tok2 = TokenAndSpan{
|
||||
@ -1069,54 +1087,56 @@ mod test {
|
||||
}
|
||||
|
||||
#[test] fn doublecolonparsing () {
|
||||
check_tokenization(setup(&mk_sh(), "a b".to_owned()),
|
||||
check_tokenization(setup(&mk_sh(), "a b".to_strbuf()),
|
||||
vec!(mk_ident("a",false),
|
||||
mk_ident("b",false)));
|
||||
}
|
||||
|
||||
#[test] fn dcparsing_2 () {
|
||||
check_tokenization(setup(&mk_sh(), "a::b".to_owned()),
|
||||
check_tokenization(setup(&mk_sh(), "a::b".to_strbuf()),
|
||||
vec!(mk_ident("a",true),
|
||||
token::MOD_SEP,
|
||||
mk_ident("b",false)));
|
||||
}
|
||||
|
||||
#[test] fn dcparsing_3 () {
|
||||
check_tokenization(setup(&mk_sh(), "a ::b".to_owned()),
|
||||
check_tokenization(setup(&mk_sh(), "a ::b".to_strbuf()),
|
||||
vec!(mk_ident("a",false),
|
||||
token::MOD_SEP,
|
||||
mk_ident("b",false)));
|
||||
}
|
||||
|
||||
#[test] fn dcparsing_4 () {
|
||||
check_tokenization(setup(&mk_sh(), "a:: b".to_owned()),
|
||||
check_tokenization(setup(&mk_sh(), "a:: b".to_strbuf()),
|
||||
vec!(mk_ident("a",true),
|
||||
token::MOD_SEP,
|
||||
mk_ident("b",false)));
|
||||
}
|
||||
|
||||
#[test] fn character_a() {
|
||||
assert_eq!(setup(&mk_sh(), "'a'".to_owned()).next_token().tok,
|
||||
assert_eq!(setup(&mk_sh(), "'a'".to_strbuf()).next_token().tok,
|
||||
token::LIT_CHAR('a'));
|
||||
}
|
||||
|
||||
#[test] fn character_space() {
|
||||
assert_eq!(setup(&mk_sh(), "' '".to_owned()).next_token().tok,
|
||||
assert_eq!(setup(&mk_sh(), "' '".to_strbuf()).next_token().tok,
|
||||
token::LIT_CHAR(' '));
|
||||
}
|
||||
|
||||
#[test] fn character_escaped() {
|
||||
assert_eq!(setup(&mk_sh(), "'\\n'".to_owned()).next_token().tok,
|
||||
assert_eq!(setup(&mk_sh(), "'\\n'".to_strbuf()).next_token().tok,
|
||||
token::LIT_CHAR('\n'));
|
||||
}
|
||||
|
||||
#[test] fn lifetime_name() {
|
||||
assert_eq!(setup(&mk_sh(), "'abc".to_owned()).next_token().tok,
|
||||
assert_eq!(setup(&mk_sh(), "'abc".to_strbuf()).next_token().tok,
|
||||
token::LIFETIME(token::str_to_ident("abc")));
|
||||
}
|
||||
|
||||
#[test] fn raw_string() {
|
||||
assert_eq!(setup(&mk_sh(), "r###\"\"#a\\b\x00c\"\"###".to_owned()).next_token().tok,
|
||||
assert_eq!(setup(&mk_sh(),
|
||||
"r###\"\"#a\\b\x00c\"\"###".to_strbuf()).next_token()
|
||||
.tok,
|
||||
token::LIT_STR_RAW(token::str_to_ident("\"#a\\b\x00c\""), 3));
|
||||
}
|
||||
|
||||
@ -1127,7 +1147,8 @@ mod test {
|
||||
}
|
||||
|
||||
#[test] fn nested_block_comments() {
|
||||
assert_eq!(setup(&mk_sh(), "/* /* */ */'a'".to_owned()).next_token().tok,
|
||||
assert_eq!(setup(&mk_sh(),
|
||||
"/* /* */ */'a'".to_strbuf()).next_token().tok,
|
||||
token::LIT_CHAR('a'));
|
||||
}
|
||||
|
||||
|
@ -77,8 +77,8 @@ pub fn parse_crate_attrs_from_file(
|
||||
inner
|
||||
}
|
||||
|
||||
pub fn parse_crate_from_source_str(name: ~str,
|
||||
source: ~str,
|
||||
pub fn parse_crate_from_source_str(name: StrBuf,
|
||||
source: StrBuf,
|
||||
cfg: ast::CrateConfig,
|
||||
sess: &ParseSess)
|
||||
-> ast::Crate {
|
||||
@ -89,8 +89,8 @@ pub fn parse_crate_from_source_str(name: ~str,
|
||||
maybe_aborted(p.parse_crate_mod(),p)
|
||||
}
|
||||
|
||||
pub fn parse_crate_attrs_from_source_str(name: ~str,
|
||||
source: ~str,
|
||||
pub fn parse_crate_attrs_from_source_str(name: StrBuf,
|
||||
source: StrBuf,
|
||||
cfg: ast::CrateConfig,
|
||||
sess: &ParseSess)
|
||||
-> Vec<ast::Attribute> {
|
||||
@ -102,8 +102,8 @@ pub fn parse_crate_attrs_from_source_str(name: ~str,
|
||||
inner
|
||||
}
|
||||
|
||||
pub fn parse_expr_from_source_str(name: ~str,
|
||||
source: ~str,
|
||||
pub fn parse_expr_from_source_str(name: StrBuf,
|
||||
source: StrBuf,
|
||||
cfg: ast::CrateConfig,
|
||||
sess: &ParseSess)
|
||||
-> @ast::Expr {
|
||||
@ -111,8 +111,8 @@ pub fn parse_expr_from_source_str(name: ~str,
|
||||
maybe_aborted(p.parse_expr(), p)
|
||||
}
|
||||
|
||||
pub fn parse_item_from_source_str(name: ~str,
|
||||
source: ~str,
|
||||
pub fn parse_item_from_source_str(name: StrBuf,
|
||||
source: StrBuf,
|
||||
cfg: ast::CrateConfig,
|
||||
sess: &ParseSess)
|
||||
-> Option<@ast::Item> {
|
||||
@ -121,8 +121,8 @@ pub fn parse_item_from_source_str(name: ~str,
|
||||
maybe_aborted(p.parse_item(attrs),p)
|
||||
}
|
||||
|
||||
pub fn parse_meta_from_source_str(name: ~str,
|
||||
source: ~str,
|
||||
pub fn parse_meta_from_source_str(name: StrBuf,
|
||||
source: StrBuf,
|
||||
cfg: ast::CrateConfig,
|
||||
sess: &ParseSess)
|
||||
-> @ast::MetaItem {
|
||||
@ -130,8 +130,8 @@ pub fn parse_meta_from_source_str(name: ~str,
|
||||
maybe_aborted(p.parse_meta_item(),p)
|
||||
}
|
||||
|
||||
pub fn parse_stmt_from_source_str(name: ~str,
|
||||
source: ~str,
|
||||
pub fn parse_stmt_from_source_str(name: StrBuf,
|
||||
source: StrBuf,
|
||||
cfg: ast::CrateConfig,
|
||||
attrs: Vec<ast::Attribute> ,
|
||||
sess: &ParseSess)
|
||||
@ -145,8 +145,8 @@ pub fn parse_stmt_from_source_str(name: ~str,
|
||||
maybe_aborted(p.parse_stmt(attrs),p)
|
||||
}
|
||||
|
||||
pub fn parse_tts_from_source_str(name: ~str,
|
||||
source: ~str,
|
||||
pub fn parse_tts_from_source_str(name: StrBuf,
|
||||
source: StrBuf,
|
||||
cfg: ast::CrateConfig,
|
||||
sess: &ParseSess)
|
||||
-> Vec<ast::TokenTree> {
|
||||
@ -164,8 +164,8 @@ pub fn parse_tts_from_source_str(name: ~str,
|
||||
// Create a new parser from a source string
|
||||
pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess,
|
||||
cfg: ast::CrateConfig,
|
||||
name: ~str,
|
||||
source: ~str)
|
||||
name: StrBuf,
|
||||
source: StrBuf)
|
||||
-> Parser<'a> {
|
||||
filemap_to_parser(sess, string_to_filemap(sess, source, name), cfg)
|
||||
}
|
||||
@ -225,8 +225,8 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
|
||||
};
|
||||
match str::from_utf8(bytes.as_slice()) {
|
||||
Some(s) => {
|
||||
return string_to_filemap(sess, s.to_owned(),
|
||||
path.as_str().unwrap().to_str())
|
||||
return string_to_filemap(sess, s.to_strbuf(),
|
||||
path.as_str().unwrap().to_strbuf())
|
||||
}
|
||||
None => err(format!("{} is not UTF-8 encoded", path.display())),
|
||||
}
|
||||
@ -235,7 +235,7 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
|
||||
|
||||
// given a session and a string, add the string to
|
||||
// the session's codemap and return the new filemap
|
||||
pub fn string_to_filemap(sess: &ParseSess, source: ~str, path: ~str)
|
||||
pub fn string_to_filemap(sess: &ParseSess, source: StrBuf, path: StrBuf)
|
||||
-> Rc<FileMap> {
|
||||
sess.span_diagnostic.cm.new_filemap(path, source)
|
||||
}
|
||||
@ -284,11 +284,11 @@ mod test {
|
||||
use util::parser_testing::{string_to_expr, string_to_item};
|
||||
use util::parser_testing::string_to_stmt;
|
||||
|
||||
fn to_json_str<'a, E: Encodable<json::Encoder<'a>, io::IoError>>(val: &E) -> ~str {
|
||||
fn to_json_str<'a, E: Encodable<json::Encoder<'a>, io::IoError>>(val: &E) -> StrBuf {
|
||||
let mut writer = MemWriter::new();
|
||||
let mut encoder = json::Encoder::new(&mut writer as &mut io::Writer);
|
||||
let _ = val.encode(&mut encoder);
|
||||
str::from_utf8(writer.unwrap().as_slice()).unwrap().to_owned()
|
||||
str::from_utf8(writer.unwrap().as_slice()).unwrap().to_strbuf()
|
||||
}
|
||||
|
||||
// produce a codemap::span
|
||||
@ -297,7 +297,7 @@ mod test {
|
||||
}
|
||||
|
||||
#[test] fn path_exprs_1() {
|
||||
assert!(string_to_expr("a".to_owned()) ==
|
||||
assert!(string_to_expr("a".to_strbuf()) ==
|
||||
@ast::Expr{
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
node: ast::ExprPath(ast::Path {
|
||||
@ -316,7 +316,7 @@ mod test {
|
||||
}
|
||||
|
||||
#[test] fn path_exprs_2 () {
|
||||
assert!(string_to_expr("::a::b".to_owned()) ==
|
||||
assert!(string_to_expr("::a::b".to_strbuf()) ==
|
||||
@ast::Expr {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
node: ast::ExprPath(ast::Path {
|
||||
@ -341,12 +341,12 @@ mod test {
|
||||
|
||||
#[should_fail]
|
||||
#[test] fn bad_path_expr_1() {
|
||||
string_to_expr("::abc::def::return".to_owned());
|
||||
string_to_expr("::abc::def::return".to_strbuf());
|
||||
}
|
||||
|
||||
// check the token-tree-ization of macros
|
||||
#[test] fn string_to_tts_macro () {
|
||||
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_owned());
|
||||
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_strbuf());
|
||||
let tts: &[ast::TokenTree] = tts.as_slice();
|
||||
match tts {
|
||||
[ast::TTTok(_,_),
|
||||
@ -399,7 +399,7 @@ mod test {
|
||||
}
|
||||
|
||||
#[test] fn string_to_tts_1 () {
|
||||
let tts = string_to_tts("fn a (b : int) { b; }".to_owned());
|
||||
let tts = string_to_tts("fn a (b : int) { b; }".to_strbuf());
|
||||
assert_eq!(to_json_str(&tts),
|
||||
"[\
|
||||
{\
|
||||
@ -523,12 +523,12 @@ mod test {
|
||||
]\
|
||||
]\
|
||||
}\
|
||||
]".to_owned()
|
||||
]".to_strbuf()
|
||||
);
|
||||
}
|
||||
|
||||
#[test] fn ret_expr() {
|
||||
assert!(string_to_expr("return d".to_owned()) ==
|
||||
assert!(string_to_expr("return d".to_strbuf()) ==
|
||||
@ast::Expr{
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
node:ast::ExprRet(Some(@ast::Expr{
|
||||
@ -551,7 +551,7 @@ mod test {
|
||||
}
|
||||
|
||||
#[test] fn parse_stmt_1 () {
|
||||
assert!(string_to_stmt("b;".to_owned()) ==
|
||||
assert!(string_to_stmt("b;".to_strbuf()) ==
|
||||
@Spanned{
|
||||
node: ast::StmtExpr(@ast::Expr {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
@ -578,7 +578,7 @@ mod test {
|
||||
|
||||
#[test] fn parse_ident_pat () {
|
||||
let sess = new_parse_sess();
|
||||
let mut parser = string_to_parser(&sess, "b".to_owned());
|
||||
let mut parser = string_to_parser(&sess, "b".to_strbuf());
|
||||
assert!(parser.parse_pat() ==
|
||||
@ast::Pat{id: ast::DUMMY_NODE_ID,
|
||||
node: ast::PatIdent(
|
||||
@ -602,7 +602,7 @@ mod test {
|
||||
// check the contents of the tt manually:
|
||||
#[test] fn parse_fundecl () {
|
||||
// this test depends on the intern order of "fn" and "int"
|
||||
assert!(string_to_item("fn a (b : int) { b; }".to_owned()) ==
|
||||
assert!(string_to_item("fn a (b : int) { b; }".to_strbuf()) ==
|
||||
Some(
|
||||
@ast::Item{ident:str_to_ident("a"),
|
||||
attrs:Vec::new(),
|
||||
@ -694,13 +694,13 @@ mod test {
|
||||
|
||||
#[test] fn parse_exprs () {
|
||||
// just make sure that they parse....
|
||||
string_to_expr("3 + 4".to_owned());
|
||||
string_to_expr("a::z.froob(b,@(987+3))".to_owned());
|
||||
string_to_expr("3 + 4".to_strbuf());
|
||||
string_to_expr("a::z.froob(b,@(987+3))".to_strbuf());
|
||||
}
|
||||
|
||||
#[test] fn attrs_fix_bug () {
|
||||
string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
|
||||
-> Result<@Writer, ~str> {
|
||||
-> Result<@Writer, StrBuf> {
|
||||
#[cfg(windows)]
|
||||
fn wb() -> c_int {
|
||||
(O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int
|
||||
@ -710,7 +710,7 @@ mod test {
|
||||
fn wb() -> c_int { O_WRONLY as c_int }
|
||||
|
||||
let mut fflags: c_int = wb();
|
||||
}".to_owned());
|
||||
}".to_strbuf());
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -123,7 +123,7 @@ impl<'a> ParserObsoleteMethods for Parser<'a> {
|
||||
),
|
||||
ObsoleteManagedString => (
|
||||
"managed string",
|
||||
"use `Rc<~str>` instead of a managed string"
|
||||
"use `Rc<StrBuf>` instead of a managed string"
|
||||
),
|
||||
ObsoleteManagedVec => (
|
||||
"managed vector",
|
||||
|
@ -345,12 +345,12 @@ fn is_plain_ident_or_underscore(t: &token::Token) -> bool {
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
// convert a token to a string using self's reader
|
||||
pub fn token_to_str(token: &token::Token) -> ~str {
|
||||
pub fn token_to_str(token: &token::Token) -> StrBuf {
|
||||
token::to_str(token)
|
||||
}
|
||||
|
||||
// convert the current token to a string using self's reader
|
||||
pub fn this_token_to_str(&mut self) -> ~str {
|
||||
pub fn this_token_to_str(&mut self) -> StrBuf {
|
||||
Parser::token_to_str(&self.token)
|
||||
}
|
||||
|
||||
@ -385,11 +385,17 @@ impl<'a> Parser<'a> {
|
||||
pub fn expect_one_of(&mut self,
|
||||
edible: &[token::Token],
|
||||
inedible: &[token::Token]) {
|
||||
fn tokens_to_str(tokens: &[token::Token]) -> ~str {
|
||||
fn tokens_to_str(tokens: &[token::Token]) -> StrBuf {
|
||||
let mut i = tokens.iter();
|
||||
// This might be a sign we need a connect method on Iterator.
|
||||
let b = i.next().map_or("".to_owned(), |t| Parser::token_to_str(t));
|
||||
i.fold(b, |b,a| b + "`, `" + Parser::token_to_str(a))
|
||||
let b = i.next()
|
||||
.map_or("".to_strbuf(), |t| Parser::token_to_str(t));
|
||||
i.fold(b, |b,a| {
|
||||
let mut b = b;
|
||||
b.push_str("`, `");
|
||||
b.push_str(Parser::token_to_str(a).as_slice());
|
||||
b
|
||||
})
|
||||
}
|
||||
if edible.contains(&self.token) {
|
||||
self.bump();
|
||||
@ -3898,7 +3904,7 @@ impl<'a> Parser<'a> {
|
||||
(ident, ItemImpl(generics, opt_trait, ty, meths), Some(inner_attrs))
|
||||
}
|
||||
|
||||
// parse a::B<~str,int>
|
||||
// parse a::B<StrBuf,int>
|
||||
fn parse_trait_ref(&mut self) -> TraitRef {
|
||||
ast::TraitRef {
|
||||
path: self.parse_path(LifetimeAndTypesWithoutColons).path,
|
||||
@ -3906,7 +3912,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
// parse B + C<~str,int> + D
|
||||
// parse B + C<StrBuf,int> + D
|
||||
fn parse_trait_ref_list(&mut self, ket: &token::Token) -> Vec<TraitRef> {
|
||||
self.parse_seq_to_before_end(
|
||||
ket,
|
||||
|
@ -137,58 +137,62 @@ impl fmt::Show for Nonterminal {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn binop_to_str(o: BinOp) -> ~str {
|
||||
pub fn binop_to_str(o: BinOp) -> StrBuf {
|
||||
match o {
|
||||
PLUS => "+".to_owned(),
|
||||
MINUS => "-".to_owned(),
|
||||
STAR => "*".to_owned(),
|
||||
SLASH => "/".to_owned(),
|
||||
PERCENT => "%".to_owned(),
|
||||
CARET => "^".to_owned(),
|
||||
AND => "&".to_owned(),
|
||||
OR => "|".to_owned(),
|
||||
SHL => "<<".to_owned(),
|
||||
SHR => ">>".to_owned()
|
||||
PLUS => "+".to_strbuf(),
|
||||
MINUS => "-".to_strbuf(),
|
||||
STAR => "*".to_strbuf(),
|
||||
SLASH => "/".to_strbuf(),
|
||||
PERCENT => "%".to_strbuf(),
|
||||
CARET => "^".to_strbuf(),
|
||||
AND => "&".to_strbuf(),
|
||||
OR => "|".to_strbuf(),
|
||||
SHL => "<<".to_strbuf(),
|
||||
SHR => ">>".to_strbuf()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_str(t: &Token) -> ~str {
|
||||
pub fn to_str(t: &Token) -> StrBuf {
|
||||
match *t {
|
||||
EQ => "=".to_owned(),
|
||||
LT => "<".to_owned(),
|
||||
LE => "<=".to_owned(),
|
||||
EQEQ => "==".to_owned(),
|
||||
NE => "!=".to_owned(),
|
||||
GE => ">=".to_owned(),
|
||||
GT => ">".to_owned(),
|
||||
NOT => "!".to_owned(),
|
||||
TILDE => "~".to_owned(),
|
||||
OROR => "||".to_owned(),
|
||||
ANDAND => "&&".to_owned(),
|
||||
EQ => "=".to_strbuf(),
|
||||
LT => "<".to_strbuf(),
|
||||
LE => "<=".to_strbuf(),
|
||||
EQEQ => "==".to_strbuf(),
|
||||
NE => "!=".to_strbuf(),
|
||||
GE => ">=".to_strbuf(),
|
||||
GT => ">".to_strbuf(),
|
||||
NOT => "!".to_strbuf(),
|
||||
TILDE => "~".to_strbuf(),
|
||||
OROR => "||".to_strbuf(),
|
||||
ANDAND => "&&".to_strbuf(),
|
||||
BINOP(op) => binop_to_str(op),
|
||||
BINOPEQ(op) => binop_to_str(op) + "=",
|
||||
BINOPEQ(op) => {
|
||||
let mut s = binop_to_str(op);
|
||||
s.push_str("=");
|
||||
s
|
||||
}
|
||||
|
||||
/* Structural symbols */
|
||||
AT => "@".to_owned(),
|
||||
DOT => ".".to_owned(),
|
||||
DOTDOT => "..".to_owned(),
|
||||
DOTDOTDOT => "...".to_owned(),
|
||||
COMMA => ",".to_owned(),
|
||||
SEMI => ";".to_owned(),
|
||||
COLON => ":".to_owned(),
|
||||
MOD_SEP => "::".to_owned(),
|
||||
RARROW => "->".to_owned(),
|
||||
LARROW => "<-".to_owned(),
|
||||
DARROW => "<->".to_owned(),
|
||||
FAT_ARROW => "=>".to_owned(),
|
||||
LPAREN => "(".to_owned(),
|
||||
RPAREN => ")".to_owned(),
|
||||
LBRACKET => "[".to_owned(),
|
||||
RBRACKET => "]".to_owned(),
|
||||
LBRACE => "{".to_owned(),
|
||||
RBRACE => "}".to_owned(),
|
||||
POUND => "#".to_owned(),
|
||||
DOLLAR => "$".to_owned(),
|
||||
AT => "@".to_strbuf(),
|
||||
DOT => ".".to_strbuf(),
|
||||
DOTDOT => "..".to_strbuf(),
|
||||
DOTDOTDOT => "...".to_strbuf(),
|
||||
COMMA => ",".to_strbuf(),
|
||||
SEMI => ";".to_strbuf(),
|
||||
COLON => ":".to_strbuf(),
|
||||
MOD_SEP => "::".to_strbuf(),
|
||||
RARROW => "->".to_strbuf(),
|
||||
LARROW => "<-".to_strbuf(),
|
||||
DARROW => "<->".to_strbuf(),
|
||||
FAT_ARROW => "=>".to_strbuf(),
|
||||
LPAREN => "(".to_strbuf(),
|
||||
RPAREN => ")".to_strbuf(),
|
||||
LBRACKET => "[".to_strbuf(),
|
||||
RBRACKET => "]".to_strbuf(),
|
||||
LBRACE => "{".to_strbuf(),
|
||||
RBRACE => "}".to_strbuf(),
|
||||
POUND => "#".to_strbuf(),
|
||||
DOLLAR => "$".to_strbuf(),
|
||||
|
||||
/* Literals */
|
||||
LIT_CHAR(c) => {
|
||||
@ -197,63 +201,64 @@ pub fn to_str(t: &Token) -> ~str {
|
||||
res.push_char(c);
|
||||
});
|
||||
res.push_char('\'');
|
||||
res.into_owned()
|
||||
res
|
||||
}
|
||||
LIT_INT(i, t) => ast_util::int_ty_to_str(t, Some(i)),
|
||||
LIT_UINT(u, t) => ast_util::uint_ty_to_str(t, Some(u)),
|
||||
LIT_INT_UNSUFFIXED(i) => { i.to_str() }
|
||||
LIT_INT_UNSUFFIXED(i) => { i.to_str().to_strbuf() }
|
||||
LIT_FLOAT(s, t) => {
|
||||
let mut body = StrBuf::from_str(get_ident(s).get());
|
||||
if body.as_slice().ends_with(".") {
|
||||
body.push_char('0'); // `10.f` is not a float literal
|
||||
}
|
||||
body.push_str(ast_util::float_ty_to_str(t));
|
||||
body.into_owned()
|
||||
body.push_str(ast_util::float_ty_to_str(t).as_slice());
|
||||
body
|
||||
}
|
||||
LIT_FLOAT_UNSUFFIXED(s) => {
|
||||
let mut body = StrBuf::from_str(get_ident(s).get());
|
||||
if body.as_slice().ends_with(".") {
|
||||
body.push_char('0'); // `10.f` is not a float literal
|
||||
}
|
||||
body.into_owned()
|
||||
body
|
||||
}
|
||||
LIT_STR(s) => {
|
||||
format!("\"{}\"", get_ident(s).get().escape_default())
|
||||
(format!("\"{}\"", get_ident(s).get().escape_default())).to_strbuf()
|
||||
}
|
||||
LIT_STR_RAW(s, n) => {
|
||||
format!("r{delim}\"{string}\"{delim}",
|
||||
delim="#".repeat(n), string=get_ident(s))
|
||||
(format!("r{delim}\"{string}\"{delim}",
|
||||
delim="#".repeat(n), string=get_ident(s))).to_strbuf()
|
||||
}
|
||||
|
||||
/* Name components */
|
||||
IDENT(s, _) => get_ident(s).get().to_str(),
|
||||
IDENT(s, _) => get_ident(s).get().to_strbuf(),
|
||||
LIFETIME(s) => {
|
||||
format!("'{}", get_ident(s))
|
||||
(format!("'{}", get_ident(s))).to_strbuf()
|
||||
}
|
||||
UNDERSCORE => "_".to_owned(),
|
||||
UNDERSCORE => "_".to_strbuf(),
|
||||
|
||||
/* Other */
|
||||
DOC_COMMENT(s) => get_ident(s).get().to_str(),
|
||||
EOF => "<eof>".to_owned(),
|
||||
DOC_COMMENT(s) => get_ident(s).get().to_strbuf(),
|
||||
EOF => "<eof>".to_strbuf(),
|
||||
INTERPOLATED(ref nt) => {
|
||||
match nt {
|
||||
&NtExpr(e) => ::print::pprust::expr_to_str(e),
|
||||
&NtMeta(e) => ::print::pprust::meta_item_to_str(e),
|
||||
_ => {
|
||||
"an interpolated ".to_owned() +
|
||||
match *nt {
|
||||
NtItem(..) => "item".to_owned(),
|
||||
NtBlock(..) => "block".to_owned(),
|
||||
NtStmt(..) => "statement".to_owned(),
|
||||
NtPat(..) => "pattern".to_owned(),
|
||||
NtMeta(..) => fail!("should have been handled"),
|
||||
NtExpr(..) => fail!("should have been handled above"),
|
||||
NtTy(..) => "type".to_owned(),
|
||||
NtIdent(..) => "identifier".to_owned(),
|
||||
NtPath(..) => "path".to_owned(),
|
||||
NtTT(..) => "tt".to_owned(),
|
||||
NtMatchers(..) => "matcher sequence".to_owned()
|
||||
}
|
||||
let mut s = "an interpolated ".to_strbuf();
|
||||
match *nt {
|
||||
NtItem(..) => s.push_str("item"),
|
||||
NtBlock(..) => s.push_str("block"),
|
||||
NtStmt(..) => s.push_str("statement"),
|
||||
NtPat(..) => s.push_str("pattern"),
|
||||
NtMeta(..) => fail!("should have been handled"),
|
||||
NtExpr(..) => fail!("should have been handled above"),
|
||||
NtTy(..) => s.push_str("type"),
|
||||
NtIdent(..) => s.push_str("identifier"),
|
||||
NtPath(..) => s.push_str("path"),
|
||||
NtTT(..) => s.push_str("tt"),
|
||||
NtMatchers(..) => s.push_str("matcher sequence")
|
||||
};
|
||||
s
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -84,7 +84,7 @@ pub struct BeginToken {
|
||||
|
||||
#[deriving(Clone)]
|
||||
pub enum Token {
|
||||
String(~str, int),
|
||||
String(StrBuf, int),
|
||||
Break(BreakToken),
|
||||
Begin(BeginToken),
|
||||
End,
|
||||
@ -109,13 +109,13 @@ impl Token {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tok_str(t: Token) -> ~str {
|
||||
pub fn tok_str(t: Token) -> StrBuf {
|
||||
match t {
|
||||
String(s, len) => return format!("STR({},{})", s, len),
|
||||
Break(_) => return "BREAK".to_owned(),
|
||||
Begin(_) => return "BEGIN".to_owned(),
|
||||
End => return "END".to_owned(),
|
||||
Eof => return "EOF".to_owned()
|
||||
String(s, len) => return format!("STR({},{})", s, len).to_strbuf(),
|
||||
Break(_) => return "BREAK".to_strbuf(),
|
||||
Begin(_) => return "BEGIN".to_strbuf(),
|
||||
End => return "END".to_strbuf(),
|
||||
Eof => return "EOF".to_strbuf()
|
||||
}
|
||||
}
|
||||
|
||||
@ -124,7 +124,7 @@ pub fn buf_str(toks: Vec<Token>,
|
||||
left: uint,
|
||||
right: uint,
|
||||
lim: uint)
|
||||
-> ~str {
|
||||
-> StrBuf {
|
||||
let n = toks.len();
|
||||
assert_eq!(n, szs.len());
|
||||
let mut i = left;
|
||||
@ -140,7 +140,7 @@ pub fn buf_str(toks: Vec<Token>,
|
||||
i %= n;
|
||||
}
|
||||
s.push_char(']');
|
||||
return s.into_owned();
|
||||
return s.into_strbuf();
|
||||
}
|
||||
|
||||
pub enum PrintStackBreak {
|
||||
@ -585,7 +585,7 @@ impl Printer {
|
||||
assert_eq!(l, len);
|
||||
// assert!(l <= space);
|
||||
self.space -= len;
|
||||
self.print_str(s)
|
||||
self.print_str(s.as_slice())
|
||||
}
|
||||
Eof => {
|
||||
// Eof should never get here.
|
||||
@ -625,15 +625,15 @@ pub fn end(p: &mut Printer) -> io::IoResult<()> { p.pretty_print(End) }
|
||||
pub fn eof(p: &mut Printer) -> io::IoResult<()> { p.pretty_print(Eof) }
|
||||
|
||||
pub fn word(p: &mut Printer, wrd: &str) -> io::IoResult<()> {
|
||||
p.pretty_print(String(/* bad */ wrd.to_str(), wrd.len() as int))
|
||||
p.pretty_print(String(/* bad */ wrd.to_strbuf(), wrd.len() as int))
|
||||
}
|
||||
|
||||
pub fn huge_word(p: &mut Printer, wrd: &str) -> io::IoResult<()> {
|
||||
p.pretty_print(String(/* bad */ wrd.to_str(), SIZE_INFINITY))
|
||||
p.pretty_print(String(/* bad */ wrd.to_strbuf(), SIZE_INFINITY))
|
||||
}
|
||||
|
||||
pub fn zero_word(p: &mut Printer, wrd: &str) -> io::IoResult<()> {
|
||||
p.pretty_print(String(/* bad */ wrd.to_str(), 0))
|
||||
p.pretty_print(String(/* bad */ wrd.to_strbuf(), 0))
|
||||
}
|
||||
|
||||
pub fn spaces(p: &mut Printer, n: uint) -> io::IoResult<()> {
|
||||
|
@ -97,7 +97,7 @@ pub static default_columns: uint = 78u;
|
||||
pub fn print_crate<'a>(cm: &'a CodeMap,
|
||||
span_diagnostic: &diagnostic::SpanHandler,
|
||||
krate: &ast::Crate,
|
||||
filename: ~str,
|
||||
filename: StrBuf,
|
||||
input: &mut io::Reader,
|
||||
out: Box<io::Writer>,
|
||||
ann: &'a PpAnn,
|
||||
@ -132,7 +132,7 @@ pub fn print_crate<'a>(cm: &'a CodeMap,
|
||||
eof(&mut s.s)
|
||||
}
|
||||
|
||||
pub fn to_str(f: |&mut State| -> IoResult<()>) -> ~str {
|
||||
pub fn to_str(f: |&mut State| -> IoResult<()>) -> StrBuf {
|
||||
let mut s = rust_printer(box MemWriter::new());
|
||||
f(&mut s).unwrap();
|
||||
eof(&mut s.s).unwrap();
|
||||
@ -143,65 +143,65 @@ pub fn to_str(f: |&mut State| -> IoResult<()>) -> ~str {
|
||||
let (_, wr): (uint, Box<MemWriter>) = cast::transmute_copy(&s.s.out);
|
||||
let result = str::from_utf8_owned(wr.get_ref().to_owned()).unwrap();
|
||||
cast::forget(wr);
|
||||
result
|
||||
result.to_strbuf()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ty_to_str(ty: &ast::Ty) -> ~str {
|
||||
pub fn ty_to_str(ty: &ast::Ty) -> StrBuf {
|
||||
to_str(|s| s.print_type(ty))
|
||||
}
|
||||
|
||||
pub fn pat_to_str(pat: &ast::Pat) -> ~str {
|
||||
pub fn pat_to_str(pat: &ast::Pat) -> StrBuf {
|
||||
to_str(|s| s.print_pat(pat))
|
||||
}
|
||||
|
||||
pub fn expr_to_str(e: &ast::Expr) -> ~str {
|
||||
pub fn expr_to_str(e: &ast::Expr) -> StrBuf {
|
||||
to_str(|s| s.print_expr(e))
|
||||
}
|
||||
|
||||
pub fn lifetime_to_str(e: &ast::Lifetime) -> ~str {
|
||||
pub fn lifetime_to_str(e: &ast::Lifetime) -> StrBuf {
|
||||
to_str(|s| s.print_lifetime(e))
|
||||
}
|
||||
|
||||
pub fn tt_to_str(tt: &ast::TokenTree) -> ~str {
|
||||
pub fn tt_to_str(tt: &ast::TokenTree) -> StrBuf {
|
||||
to_str(|s| s.print_tt(tt))
|
||||
}
|
||||
|
||||
pub fn tts_to_str(tts: &[ast::TokenTree]) -> ~str {
|
||||
pub fn tts_to_str(tts: &[ast::TokenTree]) -> StrBuf {
|
||||
to_str(|s| s.print_tts(&tts))
|
||||
}
|
||||
|
||||
pub fn stmt_to_str(stmt: &ast::Stmt) -> ~str {
|
||||
pub fn stmt_to_str(stmt: &ast::Stmt) -> StrBuf {
|
||||
to_str(|s| s.print_stmt(stmt))
|
||||
}
|
||||
|
||||
pub fn item_to_str(i: &ast::Item) -> ~str {
|
||||
pub fn item_to_str(i: &ast::Item) -> StrBuf {
|
||||
to_str(|s| s.print_item(i))
|
||||
}
|
||||
|
||||
pub fn generics_to_str(generics: &ast::Generics) -> ~str {
|
||||
pub fn generics_to_str(generics: &ast::Generics) -> StrBuf {
|
||||
to_str(|s| s.print_generics(generics))
|
||||
}
|
||||
|
||||
pub fn ty_method_to_str(p: &ast::TypeMethod) -> ~str {
|
||||
pub fn ty_method_to_str(p: &ast::TypeMethod) -> StrBuf {
|
||||
to_str(|s| s.print_ty_method(p))
|
||||
}
|
||||
|
||||
pub fn method_to_str(p: &ast::Method) -> ~str {
|
||||
pub fn method_to_str(p: &ast::Method) -> StrBuf {
|
||||
to_str(|s| s.print_method(p))
|
||||
}
|
||||
|
||||
pub fn fn_block_to_str(p: &ast::FnDecl) -> ~str {
|
||||
pub fn fn_block_to_str(p: &ast::FnDecl) -> StrBuf {
|
||||
to_str(|s| s.print_fn_block_args(p))
|
||||
}
|
||||
|
||||
pub fn path_to_str(p: &ast::Path) -> ~str {
|
||||
pub fn path_to_str(p: &ast::Path) -> StrBuf {
|
||||
to_str(|s| s.print_path(p, false))
|
||||
}
|
||||
|
||||
pub fn fun_to_str(decl: &ast::FnDecl, fn_style: ast::FnStyle, name: ast::Ident,
|
||||
opt_explicit_self: Option<ast::ExplicitSelf_>,
|
||||
generics: &ast::Generics) -> ~str {
|
||||
generics: &ast::Generics) -> StrBuf {
|
||||
to_str(|s| {
|
||||
try!(s.print_fn(decl, Some(fn_style), abi::Rust,
|
||||
name, generics, opt_explicit_self, ast::Inherited));
|
||||
@ -210,7 +210,7 @@ pub fn fun_to_str(decl: &ast::FnDecl, fn_style: ast::FnStyle, name: ast::Ident,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn block_to_str(blk: &ast::Block) -> ~str {
|
||||
pub fn block_to_str(blk: &ast::Block) -> StrBuf {
|
||||
to_str(|s| {
|
||||
// containing cbox, will be closed by print-block at }
|
||||
try!(s.cbox(indent_unit));
|
||||
@ -220,30 +220,30 @@ pub fn block_to_str(blk: &ast::Block) -> ~str {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn meta_item_to_str(mi: &ast::MetaItem) -> ~str {
|
||||
pub fn meta_item_to_str(mi: &ast::MetaItem) -> StrBuf {
|
||||
to_str(|s| s.print_meta_item(mi))
|
||||
}
|
||||
|
||||
pub fn attribute_to_str(attr: &ast::Attribute) -> ~str {
|
||||
pub fn attribute_to_str(attr: &ast::Attribute) -> StrBuf {
|
||||
to_str(|s| s.print_attribute(attr))
|
||||
}
|
||||
|
||||
pub fn lit_to_str(l: &ast::Lit) -> ~str {
|
||||
pub fn lit_to_str(l: &ast::Lit) -> StrBuf {
|
||||
to_str(|s| s.print_literal(l))
|
||||
}
|
||||
|
||||
pub fn explicit_self_to_str(explicit_self: ast::ExplicitSelf_) -> ~str {
|
||||
pub fn explicit_self_to_str(explicit_self: ast::ExplicitSelf_) -> StrBuf {
|
||||
to_str(|s| s.print_explicit_self(explicit_self, ast::MutImmutable).map(|_| {}))
|
||||
}
|
||||
|
||||
pub fn variant_to_str(var: &ast::Variant) -> ~str {
|
||||
pub fn variant_to_str(var: &ast::Variant) -> StrBuf {
|
||||
to_str(|s| s.print_variant(var))
|
||||
}
|
||||
|
||||
pub fn visibility_qualified(vis: ast::Visibility, s: &str) -> ~str {
|
||||
pub fn visibility_qualified(vis: ast::Visibility, s: &str) -> StrBuf {
|
||||
match vis {
|
||||
ast::Public => format!("pub {}", s),
|
||||
ast::Inherited => s.to_owned()
|
||||
ast::Public => format!("pub {}", s).to_strbuf(),
|
||||
ast::Inherited => s.to_strbuf()
|
||||
}
|
||||
}
|
||||
|
||||
@ -366,10 +366,10 @@ impl<'a> State<'a> {
|
||||
|
||||
// Synthesizes a comment that was not textually present in the original source
|
||||
// file.
|
||||
pub fn synth_comment(&mut self, text: ~str) -> IoResult<()> {
|
||||
pub fn synth_comment(&mut self, text: StrBuf) -> IoResult<()> {
|
||||
try!(word(&mut self.s, "/*"));
|
||||
try!(space(&mut self.s));
|
||||
try!(word(&mut self.s, text));
|
||||
try!(word(&mut self.s, text.as_slice()));
|
||||
try!(space(&mut self.s));
|
||||
word(&mut self.s, "*/")
|
||||
}
|
||||
@ -552,7 +552,8 @@ impl<'a> State<'a> {
|
||||
self.end() // end the outer fn box
|
||||
}
|
||||
ast::ForeignItemStatic(t, m) => {
|
||||
try!(self.head(visibility_qualified(item.vis, "static")));
|
||||
try!(self.head(visibility_qualified(item.vis,
|
||||
"static").as_slice()));
|
||||
if m {
|
||||
try!(self.word_space("mut"));
|
||||
}
|
||||
@ -573,7 +574,8 @@ impl<'a> State<'a> {
|
||||
try!(self.ann.pre(self, NodeItem(item)));
|
||||
match item.node {
|
||||
ast::ItemStatic(ty, m, expr) => {
|
||||
try!(self.head(visibility_qualified(item.vis, "static")));
|
||||
try!(self.head(visibility_qualified(item.vis,
|
||||
"static").as_slice()));
|
||||
if m == ast::MutMutable {
|
||||
try!(self.word_space("mut"));
|
||||
}
|
||||
@ -602,7 +604,8 @@ impl<'a> State<'a> {
|
||||
try!(self.print_block_with_attrs(body, item.attrs.as_slice()));
|
||||
}
|
||||
ast::ItemMod(ref _mod) => {
|
||||
try!(self.head(visibility_qualified(item.vis, "mod")));
|
||||
try!(self.head(visibility_qualified(item.vis,
|
||||
"mod").as_slice()));
|
||||
try!(self.print_ident(item.ident));
|
||||
try!(self.nbsp());
|
||||
try!(self.bopen());
|
||||
@ -619,7 +622,8 @@ impl<'a> State<'a> {
|
||||
ast::ItemTy(ty, ref params) => {
|
||||
try!(self.ibox(indent_unit));
|
||||
try!(self.ibox(0u));
|
||||
try!(self.word_nbsp(visibility_qualified(item.vis, "type")));
|
||||
try!(self.word_nbsp(visibility_qualified(item.vis,
|
||||
"type").as_slice()));
|
||||
try!(self.print_ident(item.ident));
|
||||
try!(self.print_generics(params));
|
||||
try!(self.end()); // end the inner ibox
|
||||
@ -643,12 +647,14 @@ impl<'a> State<'a> {
|
||||
if struct_def.is_virtual {
|
||||
try!(self.word_space("virtual"));
|
||||
}
|
||||
try!(self.head(visibility_qualified(item.vis, "struct")));
|
||||
try!(self.head(visibility_qualified(item.vis,
|
||||
"struct").as_slice()));
|
||||
try!(self.print_struct(struct_def, generics, item.ident, item.span));
|
||||
}
|
||||
|
||||
ast::ItemImpl(ref generics, ref opt_trait, ty, ref methods) => {
|
||||
try!(self.head(visibility_qualified(item.vis, "impl")));
|
||||
try!(self.head(visibility_qualified(item.vis,
|
||||
"impl").as_slice()));
|
||||
if generics.is_parameterized() {
|
||||
try!(self.print_generics(generics));
|
||||
try!(space(&mut self.s));
|
||||
@ -674,7 +680,8 @@ impl<'a> State<'a> {
|
||||
try!(self.bclose(item.span));
|
||||
}
|
||||
ast::ItemTrait(ref generics, ref sized, ref traits, ref methods) => {
|
||||
try!(self.head(visibility_qualified(item.vis, "trait")));
|
||||
try!(self.head(visibility_qualified(item.vis,
|
||||
"trait").as_slice()));
|
||||
try!(self.print_ident(item.ident));
|
||||
try!(self.print_generics(generics));
|
||||
if *sized == ast::DynSize {
|
||||
@ -723,7 +730,7 @@ impl<'a> State<'a> {
|
||||
generics: &ast::Generics, ident: ast::Ident,
|
||||
span: codemap::Span,
|
||||
visibility: ast::Visibility) -> IoResult<()> {
|
||||
try!(self.head(visibility_qualified(visibility, "enum")));
|
||||
try!(self.head(visibility_qualified(visibility, "enum").as_slice()));
|
||||
try!(self.print_ident(ident));
|
||||
try!(self.print_generics(generics));
|
||||
try!(space(&mut self.s));
|
||||
@ -825,7 +832,7 @@ impl<'a> State<'a> {
|
||||
match *tt {
|
||||
ast::TTDelim(ref tts) => self.print_tts(&(tts.as_slice())),
|
||||
ast::TTTok(_, ref tk) => {
|
||||
word(&mut self.s, parse::token::to_str(tk))
|
||||
word(&mut self.s, parse::token::to_str(tk).as_slice())
|
||||
}
|
||||
ast::TTSeq(_, ref tts, ref sep, zerok) => {
|
||||
try!(word(&mut self.s, "$("));
|
||||
@ -835,7 +842,8 @@ impl<'a> State<'a> {
|
||||
try!(word(&mut self.s, ")"));
|
||||
match *sep {
|
||||
Some(ref tk) => {
|
||||
try!(word(&mut self.s, parse::token::to_str(tk)));
|
||||
try!(word(&mut self.s,
|
||||
parse::token::to_str(tk).as_slice()));
|
||||
}
|
||||
None => ()
|
||||
}
|
||||
@ -2189,7 +2197,7 @@ impl<'a> State<'a> {
|
||||
try!(self.maybe_print_comment(lit.span.lo));
|
||||
match self.next_lit(lit.span.lo) {
|
||||
Some(ref ltrl) => {
|
||||
return word(&mut self.s, (*ltrl).lit);
|
||||
return word(&mut self.s, (*ltrl).lit.as_slice());
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
@ -2202,16 +2210,19 @@ impl<'a> State<'a> {
|
||||
word(&mut self.s, res.into_owned())
|
||||
}
|
||||
ast::LitInt(i, t) => {
|
||||
word(&mut self.s, ast_util::int_ty_to_str(t, Some(i)))
|
||||
word(&mut self.s,
|
||||
ast_util::int_ty_to_str(t, Some(i)).as_slice())
|
||||
}
|
||||
ast::LitUint(u, t) => {
|
||||
word(&mut self.s, ast_util::uint_ty_to_str(t, Some(u)))
|
||||
word(&mut self.s,
|
||||
ast_util::uint_ty_to_str(t, Some(u)).as_slice())
|
||||
}
|
||||
ast::LitIntUnsuffixed(i) => {
|
||||
word(&mut self.s, format!("{}", i))
|
||||
}
|
||||
ast::LitFloat(ref f, t) => {
|
||||
word(&mut self.s, f.get() + ast_util::float_ty_to_str(t))
|
||||
word(&mut self.s,
|
||||
f.get() + ast_util::float_ty_to_str(t).as_slice())
|
||||
}
|
||||
ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.get()),
|
||||
ast::LitNil => word(&mut self.s, "()"),
|
||||
@ -2266,7 +2277,7 @@ impl<'a> State<'a> {
|
||||
comments::Mixed => {
|
||||
assert_eq!(cmnt.lines.len(), 1u);
|
||||
try!(zerobreak(&mut self.s));
|
||||
try!(word(&mut self.s, *cmnt.lines.get(0)));
|
||||
try!(word(&mut self.s, cmnt.lines.get(0).as_slice()));
|
||||
zerobreak(&mut self.s)
|
||||
}
|
||||
comments::Isolated => {
|
||||
@ -2275,7 +2286,7 @@ impl<'a> State<'a> {
|
||||
// Don't print empty lines because they will end up as trailing
|
||||
// whitespace
|
||||
if !line.is_empty() {
|
||||
try!(word(&mut self.s, *line));
|
||||
try!(word(&mut self.s, line.as_slice()));
|
||||
}
|
||||
try!(hardbreak(&mut self.s));
|
||||
}
|
||||
@ -2284,13 +2295,13 @@ impl<'a> State<'a> {
|
||||
comments::Trailing => {
|
||||
try!(word(&mut self.s, " "));
|
||||
if cmnt.lines.len() == 1u {
|
||||
try!(word(&mut self.s, *cmnt.lines.get(0)));
|
||||
try!(word(&mut self.s, cmnt.lines.get(0).as_slice()));
|
||||
hardbreak(&mut self.s)
|
||||
} else {
|
||||
try!(self.ibox(0u));
|
||||
for line in cmnt.lines.iter() {
|
||||
if !line.is_empty() {
|
||||
try!(word(&mut self.s, *line));
|
||||
try!(word(&mut self.s, line.as_slice()));
|
||||
}
|
||||
try!(hardbreak(&mut self.s));
|
||||
}
|
||||
@ -2300,7 +2311,7 @@ impl<'a> State<'a> {
|
||||
comments::BlankLine => {
|
||||
// We need to do at least one, possibly two hardbreaks.
|
||||
let is_semi = match self.s.last_token() {
|
||||
pp::String(s, _) => ";" == s,
|
||||
pp::String(s, _) => ";" == s.as_slice(),
|
||||
_ => false
|
||||
};
|
||||
if is_semi || self.is_begin() || self.is_end() {
|
||||
@ -2371,8 +2382,9 @@ impl<'a> State<'a> {
|
||||
opt_fn_style: Option<ast::FnStyle>,
|
||||
abi: abi::Abi,
|
||||
vis: ast::Visibility) -> IoResult<()> {
|
||||
try!(word(&mut self.s, visibility_qualified(vis, "")));
|
||||
try!(word(&mut self.s, visibility_qualified(vis, "").as_slice()));
|
||||
try!(self.print_opt_fn_style(opt_fn_style));
|
||||
|
||||
if abi != abi::Rust {
|
||||
try!(self.word_nbsp("extern"));
|
||||
try!(self.word_nbsp(abi.to_str()));
|
||||
@ -2420,7 +2432,7 @@ mod test {
|
||||
let generics = ast_util::empty_generics();
|
||||
assert_eq!(&fun_to_str(&decl, ast::NormalFn, abba_ident,
|
||||
None, &generics),
|
||||
&"fn abba()".to_owned());
|
||||
&"fn abba()".to_strbuf());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -2438,6 +2450,6 @@ mod test {
|
||||
});
|
||||
|
||||
let varstr = variant_to_str(&var);
|
||||
assert_eq!(&varstr,&"pub principal_skinner".to_owned());
|
||||
assert_eq!(&varstr,&"pub principal_skinner".to_strbuf());
|
||||
}
|
||||
}
|
||||
|
@ -92,7 +92,7 @@ impl<T: TotalEq + Hash + Clone + 'static> Interner<T> {
|
||||
|
||||
#[deriving(Clone, Eq, Hash, Ord)]
|
||||
pub struct RcStr {
|
||||
string: Rc<~str>,
|
||||
string: Rc<StrBuf>,
|
||||
}
|
||||
|
||||
impl TotalEq for RcStr {}
|
||||
@ -106,7 +106,7 @@ impl TotalOrd for RcStr {
|
||||
impl Str for RcStr {
|
||||
#[inline]
|
||||
fn as_slice<'a>(&'a self) -> &'a str {
|
||||
let s: &'a str = *self.string;
|
||||
let s: &'a str = self.string.as_slice();
|
||||
s
|
||||
}
|
||||
}
|
||||
@ -121,7 +121,7 @@ impl fmt::Show for RcStr {
|
||||
impl RcStr {
|
||||
pub fn new(string: &str) -> RcStr {
|
||||
RcStr {
|
||||
string: Rc::new(string.to_owned()),
|
||||
string: Rc::new(string.to_strbuf()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -16,17 +16,21 @@ use parse::parser::Parser;
|
||||
use parse::token;
|
||||
|
||||
// map a string to tts, using a made-up filename:
|
||||
pub fn string_to_tts(source_str: ~str) -> Vec<ast::TokenTree> {
|
||||
pub fn string_to_tts(source_str: StrBuf) -> Vec<ast::TokenTree> {
|
||||
let ps = new_parse_sess();
|
||||
filemap_to_tts(&ps, string_to_filemap(&ps, source_str,"bogofile".to_owned()))
|
||||
filemap_to_tts(&ps,
|
||||
string_to_filemap(&ps, source_str, "bogofile".to_strbuf()))
|
||||
}
|
||||
|
||||
// map string to parser (via tts)
|
||||
pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: ~str) -> Parser<'a> {
|
||||
new_parser_from_source_str(ps, Vec::new(), "bogofile".to_owned(), source_str)
|
||||
pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: StrBuf) -> Parser<'a> {
|
||||
new_parser_from_source_str(ps,
|
||||
Vec::new(),
|
||||
"bogofile".to_strbuf(),
|
||||
source_str)
|
||||
}
|
||||
|
||||
fn with_error_checking_parse<T>(s: ~str, f: |&mut Parser| -> T) -> T {
|
||||
fn with_error_checking_parse<T>(s: StrBuf, f: |&mut Parser| -> T) -> T {
|
||||
let ps = new_parse_sess();
|
||||
let mut p = string_to_parser(&ps, s);
|
||||
let x = f(&mut p);
|
||||
@ -35,28 +39,28 @@ fn with_error_checking_parse<T>(s: ~str, f: |&mut Parser| -> T) -> T {
|
||||
}
|
||||
|
||||
// parse a string, return a crate.
|
||||
pub fn string_to_crate (source_str : ~str) -> ast::Crate {
|
||||
pub fn string_to_crate (source_str : StrBuf) -> ast::Crate {
|
||||
with_error_checking_parse(source_str, |p| {
|
||||
p.parse_crate_mod()
|
||||
})
|
||||
}
|
||||
|
||||
// parse a string, return an expr
|
||||
pub fn string_to_expr (source_str : ~str) -> @ast::Expr {
|
||||
pub fn string_to_expr (source_str : StrBuf) -> @ast::Expr {
|
||||
with_error_checking_parse(source_str, |p| {
|
||||
p.parse_expr()
|
||||
})
|
||||
}
|
||||
|
||||
// parse a string, return an item
|
||||
pub fn string_to_item (source_str : ~str) -> Option<@ast::Item> {
|
||||
pub fn string_to_item (source_str : StrBuf) -> Option<@ast::Item> {
|
||||
with_error_checking_parse(source_str, |p| {
|
||||
p.parse_item(Vec::new())
|
||||
})
|
||||
}
|
||||
|
||||
// parse a string, return a stmt
|
||||
pub fn string_to_stmt(source_str : ~str) -> @ast::Stmt {
|
||||
pub fn string_to_stmt(source_str : StrBuf) -> @ast::Stmt {
|
||||
with_error_checking_parse(source_str, |p| {
|
||||
p.parse_stmt(Vec::new())
|
||||
})
|
||||
@ -64,7 +68,7 @@ pub fn string_to_stmt(source_str : ~str) -> @ast::Stmt {
|
||||
|
||||
// parse a string, return a pat. Uses "irrefutable"... which doesn't
|
||||
// (currently) affect parsing.
|
||||
pub fn string_to_pat(source_str: ~str) -> @ast::Pat {
|
||||
pub fn string_to_pat(source_str: StrBuf) -> @ast::Pat {
|
||||
string_to_parser(&new_parse_sess(), source_str).parse_pat()
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user