Use @str instead of @~str in libsyntax and librustc. Fixes #5048.

This almost removes the StringRef wrapper, since all strings are
Equiv-alent now. Removes a lot of `/* bad */ copy *`'s, and converts
several things to be &'static str (the lint table and the intrinsics
table).

There are many instances of .to_managed(), unfortunately.
This commit is contained in:
Huon Wilson 2013-06-13 03:02:55 +10:00
parent 641910dc13
commit 096f6f56a8
94 changed files with 1259 additions and 1283 deletions

View File

@ -345,13 +345,13 @@ pub fn check_variants_T<T:Copy>(crate: @ast::crate,
intr,
span_handler,
crate2,
fname.to_str(),
fname.to_managed(),
rdr,
a,
pprust::no_ann(),
false)
};
@string
string.to_managed()
};
match cx.mode {
tm_converge => check_roundtrip_convergence(str3, 1),
@ -361,9 +361,9 @@ pub fn check_variants_T<T:Copy>(crate: @ast::crate,
thing_label,
i,
j);
let safe_to_run = !(content_is_dangerous_to_run(*str3)
let safe_to_run = !(content_is_dangerous_to_run(str3)
|| has_raw_pointers(crate2));
check_whole_compiler(*str3,
check_whole_compiler(str3,
&Path(file_label),
safe_to_run);
}
@ -502,15 +502,15 @@ pub fn check_compiling(filename: &Path) -> happiness {
}
pub fn parse_and_print(code: @~str) -> ~str {
pub fn parse_and_print(code: @str) -> @str {
let filename = Path("tmp.rs");
let sess = parse::new_parse_sess(option::None);
write_file(&filename, *code);
let crate = parse::parse_crate_from_source_str(filename.to_str(),
write_file(&filename, code);
let crate = parse::parse_crate_from_source_str(filename.to_str().to_managed(),
code,
~[],
sess);
do io::with_str_reader(*code) |rdr| {
do io::with_str_reader(code) |rdr| {
let filename = filename.to_str();
do as_str |a| {
pprust::print_crate(sess.cm,
@ -518,12 +518,12 @@ pub fn parse_and_print(code: @~str) -> ~str {
token::mk_fake_ident_interner(),
copy sess.span_diagnostic,
crate,
filename.to_str(),
filename.to_managed(),
rdr,
a,
pprust::no_ann(),
false)
}
}.to_managed()
}
}
@ -598,15 +598,15 @@ pub fn file_might_not_converge(filename: &Path) -> bool {
return false;
}
pub fn check_roundtrip_convergence(code: @~str, maxIters: uint) {
pub fn check_roundtrip_convergence(code: @str, maxIters: uint) {
let mut i = 0u;
let mut newv = code;
let mut oldv = code;
while i < maxIters {
oldv = newv;
if content_might_not_converge(*oldv) { return; }
newv = @parse_and_print(oldv);
if content_might_not_converge(oldv) { return; }
newv = parse_and_print(oldv);
if oldv == newv { break; }
i += 1u;
}
@ -615,8 +615,8 @@ pub fn check_roundtrip_convergence(code: @~str, maxIters: uint) {
error!("Converged after %u iterations", i);
} else {
error!("Did not converge after %u iterations!", i);
write_file(&Path("round-trip-a.rs"), *oldv);
write_file(&Path("round-trip-b.rs"), *newv);
write_file(&Path("round-trip-a.rs"), oldv);
write_file(&Path("round-trip-b.rs"), newv);
run::process_status("diff", [~"-w", ~"-u", ~"round-trip-a.rs", ~"round-trip-b.rs"]);
fail!("Mismatch");
}
@ -626,8 +626,8 @@ pub fn check_convergence(files: &[Path]) {
error!("pp convergence tests: %u files", files.len());
for files.each |file| {
if !file_might_not_converge(file) {
let s = @result::get(&io::read_whole_file_str(file));
if !content_might_not_converge(*s) {
let s = result::get(&io::read_whole_file_str(file)).to_managed();
if !content_might_not_converge(s) {
error!("pp converge: %s", file.to_str());
// Change from 7u to 2u once
// https://github.com/mozilla/rust/issues/850 is fixed
@ -646,14 +646,14 @@ pub fn check_variants(files: &[Path], cx: Context) {
loop;
}
let s = @result::get(&io::read_whole_file_str(file));
if contains(*s, "#") {
let s = result::get(&io::read_whole_file_str(file)).to_managed();
if s.contains_char('#') {
loop; // Macros are confusing
}
if cx.mode == tm_converge && content_might_not_converge(*s) {
if cx.mode == tm_converge && content_might_not_converge(s) {
loop;
}
if cx.mode == tm_run && content_is_dangerous_to_compile(*s) {
if cx.mode == tm_run && content_is_dangerous_to_compile(s) {
loop;
}
@ -661,11 +661,11 @@ pub fn check_variants(files: &[Path], cx: Context) {
error!("check_variants: %?", file_str);
let sess = parse::new_parse_sess(None);
let crate = parse::parse_crate_from_source_str(file_str.to_str(),
let crate = parse::parse_crate_from_source_str(file_str.to_managed(),
s,
~[],
sess);
io::with_str_reader(*s, |rdr| {
io::with_str_reader(s, |rdr| {
let file_str = file_str.to_str();
error!("%s",
as_str(|a| {
@ -675,7 +675,7 @@ pub fn check_variants(files: &[Path], cx: Context) {
token::mk_fake_ident_interner(),
copy sess.span_diagnostic,
crate,
file_str.to_str(),
file_str.to_managed(),
rdr,
a,
pprust::no_ann(),

View File

@ -493,16 +493,16 @@ pub fn build_link_meta(sess: Session,
let linkage_metas = attr::find_linkage_metas(c.node.attrs);
attr::require_unique_names(sess.diagnostic(), linkage_metas);
for linkage_metas.each |meta| {
if *attr::get_meta_item_name(*meta) == ~"name" {
if "name" == attr::get_meta_item_name(*meta) {
match attr::get_meta_item_value_str(*meta) {
// Changing attr would avoid the need for the copy
// here
Some(v) => { name = Some(v.to_managed()); }
Some(v) => { name = Some(v); }
None => cmh_items.push(*meta)
}
} else if *attr::get_meta_item_name(*meta) == ~"vers" {
} else if "vers" == attr::get_meta_item_name(*meta) {
match attr::get_meta_item_value_str(*meta) {
Some(v) => { vers = Some(v.to_managed()); }
Some(v) => { vers = Some(v); }
None => cmh_items.push(*meta)
}
} else { cmh_items.push(*meta); }
@ -518,7 +518,7 @@ pub fn build_link_meta(sess: Session,
// This calculates CMH as defined above
fn crate_meta_extras_hash(symbol_hasher: &mut hash::State,
cmh_items: ~[@ast::meta_item],
dep_hashes: ~[~str]) -> @str {
dep_hashes: ~[@str]) -> @str {
fn len_and_str(s: &str) -> ~str {
fmt!("%u_%s", s.len(), s)
}
@ -532,14 +532,14 @@ pub fn build_link_meta(sess: Session,
fn hash(symbol_hasher: &mut hash::State, m: &@ast::meta_item) {
match m.node {
ast::meta_name_value(key, value) => {
write_string(symbol_hasher, len_and_str(*key));
write_string(symbol_hasher, len_and_str(key));
write_string(symbol_hasher, len_and_str_lit(value));
}
ast::meta_word(name) => {
write_string(symbol_hasher, len_and_str(*name));
write_string(symbol_hasher, len_and_str(name));
}
ast::meta_list(name, ref mis) => {
write_string(symbol_hasher, len_and_str(*name));
write_string(symbol_hasher, len_and_str(name));
for mis.each |m_| {
hash(symbol_hasher, m_);
}
@ -706,7 +706,7 @@ pub fn mangle(sess: Session, ss: path) -> ~str {
for ss.each |s| {
match *s { path_name(s) | path_mod(s) => {
let sani = sanitize(*sess.str_of(s));
let sani = sanitize(sess.str_of(s));
n += fmt!("%u%s", sani.len(), sani);
} }
}
@ -912,7 +912,7 @@ pub fn link_args(sess: Session,
}
let ula = cstore::get_used_link_args(cstore);
for ula.each |arg| { args.push(/*bad*/copy *arg); }
for ula.each |arg| { args.push(arg.to_owned()); }
// Add all the link args for external crates.
do cstore::iter_crate_data(cstore) |crate_num, _| {

View File

@ -55,31 +55,31 @@ pub enum pp_mode {
* The name used for source code that doesn't originate in a file
* (e.g. source from stdin or a string)
*/
pub fn anon_src() -> ~str { ~"<anon>" }
pub fn anon_src() -> @str { @"<anon>" }
pub fn source_name(input: &input) -> ~str {
pub fn source_name(input: &input) -> @str {
match *input {
file_input(ref ifile) => ifile.to_str(),
file_input(ref ifile) => ifile.to_str().to_managed(),
str_input(_) => anon_src()
}
}
pub fn default_configuration(sess: Session, argv0: @~str, input: &input) ->
pub fn default_configuration(sess: Session, argv0: @str, input: &input) ->
ast::crate_cfg {
let libc = match sess.targ_cfg.os {
session::os_win32 => ~"msvcrt.dll",
session::os_macos => ~"libc.dylib",
session::os_linux => ~"libc.so.6",
session::os_android => ~"libc.so",
session::os_freebsd => ~"libc.so.7"
session::os_win32 => @"msvcrt.dll",
session::os_macos => @"libc.dylib",
session::os_linux => @"libc.so.6",
session::os_android => @"libc.so",
session::os_freebsd => @"libc.so.7"
// _ { "libc.so" }
};
let tos = match sess.targ_cfg.os {
session::os_win32 => ~"win32",
session::os_macos => ~"macos",
session::os_linux => ~"linux",
session::os_android => ~"android",
session::os_freebsd => ~"freebsd"
session::os_win32 => @"win32",
session::os_macos => @"macos",
session::os_linux => @"linux",
session::os_android => @"android",
session::os_freebsd => @"freebsd"
// _ { "libc.so" }
};
@ -88,47 +88,47 @@ pub fn default_configuration(sess: Session, argv0: @~str, input: &input) ->
// ARM is bi-endian, however using NDK seems to default
// to little-endian unless a flag is provided.
let (end,arch,wordsz) = match sess.targ_cfg.arch {
abi::X86 => (~"little",~"x86",~"32"),
abi::X86_64 => (~"little",~"x86_64",~"64"),
abi::Arm => (~"little",~"arm",~"32"),
abi::Mips => (~"big",~"mips",~"32")
abi::X86 => (@"little",@"x86",@"32"),
abi::X86_64 => (@"little",@"x86_64",@"64"),
abi::Arm => (@"little",@"arm",@"32"),
abi::Mips => (@"big",@"mips",@"32")
};
return ~[ // Target bindings.
attr::mk_word_item(@os::FAMILY.to_owned()),
mk(@~"target_os", @tos),
mk(@~"target_family", @os::FAMILY.to_owned()),
mk(@~"target_arch", @arch),
mk(@~"target_endian", @end),
mk(@~"target_word_size", @wordsz),
mk(@~"target_libc", @libc),
attr::mk_word_item(os::FAMILY.to_managed()),
mk(@"target_os", tos),
mk(@"target_family", os::FAMILY.to_managed()),
mk(@"target_arch", arch),
mk(@"target_endian", end),
mk(@"target_word_size", wordsz),
mk(@"target_libc", libc),
// Build bindings.
mk(@~"build_compiler", argv0),
mk(@~"build_input", @source_name(input))];
mk(@"build_compiler", argv0),
mk(@"build_input", source_name(input))];
}
pub fn append_configuration(cfg: ast::crate_cfg, name: ~str)
pub fn append_configuration(cfg: ast::crate_cfg, name: @str)
-> ast::crate_cfg {
if attr::contains_name(cfg, name) {
cfg
} else {
vec::append_one(cfg, attr::mk_word_item(@name))
vec::append_one(cfg, attr::mk_word_item(name))
}
}
pub fn build_configuration(sess: Session, argv0: @~str, input: &input) ->
pub fn build_configuration(sess: Session, argv0: @str, input: &input) ->
ast::crate_cfg {
// Combine the configuration requested by the session (command line) with
// some default and generated configuration items
let default_cfg = default_configuration(sess, argv0, input);
let user_cfg = /*bad*/copy sess.opts.cfg;
// If the user wants a test runner, then add the test cfg
let user_cfg = if sess.opts.test { append_configuration(user_cfg, ~"test") }
let user_cfg = if sess.opts.test { append_configuration(user_cfg, @"test") }
else { user_cfg };
// If the user requested GC, then add the GC cfg
let user_cfg = append_configuration(
user_cfg,
if sess.opts.gc { ~"gc" } else { ~"nogc" });
if sess.opts.gc { @"gc" } else { @"nogc" });
return vec::append(user_cfg, default_cfg);
}
@ -137,7 +137,7 @@ fn parse_cfgspecs(cfgspecs: ~[~str],
demitter: diagnostic::Emitter) -> ast::crate_cfg {
do vec::map_consume(cfgspecs) |s| {
let sess = parse::new_parse_sess(Some(demitter));
parse::parse_meta_from_source_str(~"cfgspec", @s, ~[], sess)
parse::parse_meta_from_source_str(@"cfgspec", s.to_managed(), ~[], sess)
}
}
@ -145,7 +145,8 @@ pub enum input {
/// Load source from file
file_input(Path),
/// The string is the source
str_input(~str)
// FIXME (#2319): Don't really want to box the source string
str_input(@str)
}
pub fn parse_input(sess: Session, cfg: ast::crate_cfg, input: &input)
@ -154,10 +155,9 @@ pub fn parse_input(sess: Session, cfg: ast::crate_cfg, input: &input)
file_input(ref file) => {
parse::parse_crate_from_file(&(*file), cfg, sess.parse_sess)
}
str_input(ref src) => {
// FIXME (#2319): Don't really want to box the source string
str_input(src) => {
parse::parse_crate_from_source_str(
anon_src(), @(/*bad*/copy *src), cfg, sess.parse_sess)
anon_src(), src, cfg, sess.parse_sess)
}
}
}
@ -455,7 +455,7 @@ pub fn pretty_print_input(sess: Session, cfg: ast::crate_cfg, input: &input,
};
let is_expanded = upto != cu_parse;
let src = sess.codemap.get_filemap(source_name(input)).src;
do io::with_str_reader(*src) |rdr| {
do io::with_str_reader(src) |rdr| {
pprust::print_crate(sess.codemap, token::get_ident_interner(),
sess.span_diagnostic, crate.unwrap(),
source_name(input),
@ -566,7 +566,7 @@ pub fn host_triple() -> ~str {
};
}
pub fn build_session_options(binary: @~str,
pub fn build_session_options(binary: @str,
matches: &getopts::Matches,
demitter: diagnostic::Emitter)
-> @session::options {
@ -595,7 +595,7 @@ pub fn build_session_options(binary: @~str,
getopts::opt_strs(matches, level_name));
for flags.each |lint_name| {
let lint_name = lint_name.replace("-", "_");
match lint_dict.find(&lint_name) {
match lint_dict.find_equiv(&lint_name) {
None => {
early_error(demitter, fmt!("unknown %s flag: %s",
level_name, lint_name));
@ -895,8 +895,8 @@ pub fn build_output_filenames(input: &input,
};
let mut stem = match *input {
file_input(ref ifile) => (*ifile).filestem().get(),
str_input(_) => ~"rust_out"
file_input(ref ifile) => (*ifile).filestem().get().to_managed(),
str_input(_) => @"rust_out"
};
// If a linkage name meta is present, we use it as the link name
@ -906,7 +906,7 @@ pub fn build_output_filenames(input: &input,
let maybe_matches = attr::find_meta_items_by_name(linkage_metas, "name");
if !maybe_matches.is_empty() {
match attr::get_meta_item_value_str(maybe_matches[0]) {
Some(s) => stem = copy *s,
Some(s) => stem = s,
_ => ()
}
}
@ -982,9 +982,9 @@ mod test {
Err(f) => fail!("test_switch_implies_cfg_test: %s", getopts::fail_str(f))
};
let sessopts = build_session_options(
@~"rustc", matches, diagnostic::emit);
@"rustc", matches, diagnostic::emit);
let sess = build_session(sessopts, diagnostic::emit);
let cfg = build_configuration(sess, @~"whatever", &str_input(~""));
let cfg = build_configuration(sess, @"whatever", &str_input(@""));
assert!((attr::contains_name(cfg, "test")));
}
@ -1000,9 +1000,9 @@ mod test {
}
};
let sessopts = build_session_options(
@~"rustc", matches, diagnostic::emit);
@"rustc", matches, diagnostic::emit);
let sess = build_session(sessopts, diagnostic::emit);
let cfg = build_configuration(sess, @~"whatever", &str_input(~""));
let cfg = build_configuration(sess, @"whatever", &str_input(@""));
let test_items = attr::find_meta_items_by_name(cfg, "test");
assert_eq!(test_items.len(), 1u);
}

View File

@ -150,7 +150,7 @@ pub struct options {
// will be added to the crate AST node. This should not be used for
// anything except building the full crate config prior to parsing.
cfg: ast::crate_cfg,
binary: @~str,
binary: @str,
test: bool,
parse_only: bool,
no_trans: bool,
@ -295,7 +295,7 @@ impl Session_ {
}
// pointless function, now...
pub fn str_of(@self, id: ast::ident) -> @~str {
pub fn str_of(@self, id: ast::ident) -> @str {
token::ident_to_str(&id)
}
@ -331,7 +331,7 @@ pub fn basic_options() -> @options {
target_triple: host_triple(),
target_feature: ~"",
cfg: ~[],
binary: @~"rustc",
binary: @"rustc",
test: false,
parse_only: false,
no_trans: false,
@ -361,7 +361,7 @@ pub fn building_library(req_crate_type: crate_type,
match syntax::attr::first_attr_value_str_by_name(
crate.node.attrs,
"crate_type") {
Some(@~"lib") => true,
Some(s) if "lib" == s => true,
_ => false
}
}
@ -389,22 +389,22 @@ mod test {
use syntax::ast;
use syntax::codemap;
fn make_crate_type_attr(t: ~str) -> ast::attribute {
fn make_crate_type_attr(t: @str) -> ast::attribute {
codemap::respan(codemap::dummy_sp(), ast::attribute_ {
style: ast::attr_outer,
value: @codemap::respan(codemap::dummy_sp(),
ast::meta_name_value(
@~"crate_type",
@"crate_type",
codemap::respan(codemap::dummy_sp(),
ast::lit_str(@t)))),
ast::lit_str(t)))),
is_sugared_doc: false
})
}
fn make_crate(with_bin: bool, with_lib: bool) -> @ast::crate {
let mut attrs = ~[];
if with_bin { attrs += [make_crate_type_attr(~"bin")]; }
if with_lib { attrs += [make_crate_type_attr(~"lib")]; }
if with_bin { attrs += [make_crate_type_attr(@"bin")]; }
if with_lib { attrs += [make_crate_type_attr(@"lib")]; }
@codemap::respan(codemap::dummy_sp(), ast::crate_ {
module: ast::_mod { view_items: ~[], items: ~[] },
attrs: attrs,

View File

@ -202,7 +202,7 @@ pub fn metas_in_cfg(cfg: ast::crate_cfg,
cfg_metas.any(|cfg_meta| {
cfg_meta.all(|cfg_mi| {
match cfg_mi.node {
ast::meta_list(s, ref it) if *s == ~"not"
ast::meta_list(s, ref it) if "not" == s
=> it.all(|mi| !attr::contains(cfg, *mi)),
_ => attr::contains(cfg, *cfg_mi)
}

View File

@ -17,9 +17,9 @@ use syntax::ast;
use syntax::codemap::spanned;
pub fn inject_intrinsic(sess: Session, crate: @ast::crate) -> @ast::crate {
let intrinsic_module = @(include_str!("intrinsic.rs").to_owned());
let intrinsic_module = include_str!("intrinsic.rs").to_managed();
let item = parse::parse_item_from_source_str(~"<intrinsic>",
let item = parse::parse_item_from_source_str(@"<intrinsic>",
intrinsic_module,
/*bad*/copy sess.opts.cfg,
~[],

View File

@ -49,8 +49,8 @@ fn inject_libstd_ref(sess: Session, crate: @ast::crate) -> @ast::crate {
spanned(ast::attribute_ {
style: ast::attr_inner,
value: @spanned(ast::meta_name_value(
@~"vers",
spanned(ast::lit_str(@STD_VERSION.to_str()))
@"vers",
spanned(ast::lit_str(STD_VERSION.to_managed()))
)),
is_sugared_doc: false
})

View File

@ -52,7 +52,7 @@ pub fn modify_for_testing(sess: session::Session,
// configuration, either with the '--test' or '--cfg test'
// command line options.
let should_test = attr::contains(crate.node.config,
attr::mk_word_item(@~"test"));
attr::mk_word_item(@"test"));
if should_test {
generate_test_harness(sess, crate)
@ -76,7 +76,7 @@ fn generate_test_harness(sess: session::Session,
ext_cx.bt_push(ExpandedFrom(CallInfo {
call_site: dummy_sp(),
callee: NameAndSpan {
name: ~"test",
name: @"test",
span: None
}
}));
@ -111,7 +111,7 @@ fn fold_mod(cx: @mut TestCtxt,
fn nomain(cx: @mut TestCtxt, item: @ast::item) -> @ast::item {
if !*cx.sess.building_library {
@ast::item{attrs: item.attrs.filtered(|attr| {
*attr::get_attr_name(attr) != ~"main"
"main" != attr::get_attr_name(attr)
}),.. copy *item}
} else { item }
}
@ -272,9 +272,9 @@ mod __test {
*/
fn mk_std(cx: &TestCtxt) -> @ast::view_item {
let vers = ast::lit_str(@~"0.7-pre");
let vers = ast::lit_str(@"0.7-pre");
let vers = nospan(vers);
let mi = ast::meta_name_value(@~"vers", vers);
let mi = ast::meta_name_value(@"vers", vers);
let mi = nospan(mi);
let id_std = cx.sess.ident_of("extra");
let vi = if is_std(cx) {
@ -321,7 +321,7 @@ fn mk_test_module(cx: &TestCtxt) -> @ast::item {
// This attribute tells resolve to let us call unexported functions
let resolve_unexported_attr =
attr::mk_attr(attr::mk_word_item(@~"!resolve_unexported"));
attr::mk_attr(attr::mk_word_item(@"!resolve_unexported"));
let item = ast::item {
ident: cx.sess.ident_of("__test"),
@ -376,7 +376,7 @@ fn is_std(cx: &TestCtxt) -> bool {
let is_std = {
let items = attr::find_linkage_metas(cx.crate.node.attrs);
match attr::last_meta_item_value_str_by_name(items, "name") {
Some(@~"extra") => true,
Some(s) if "extra" == s => true,
_ => false
}
};
@ -413,7 +413,7 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> @ast::expr {
debug!("encoding %s", ast_util::path_name_i(path));
let name_lit: ast::lit =
nospan(ast::lit_str(@ast_util::path_name_i(path)));
nospan(ast::lit_str(ast_util::path_name_i(path).to_managed()));
let name_expr = @ast::expr {
id: cx.sess.next_node_id(),

View File

@ -60,7 +60,7 @@ pub fn read_crates(diag: @span_handler,
struct cache_entry {
cnum: int,
span: span,
hash: @~str,
hash: @str,
metas: @~[@ast::meta_item]
}
@ -100,12 +100,12 @@ fn warn_if_multiple_versions(e: @mut Env,
if matches.len() != 1u {
diag.handler().warn(
fmt!("using multiple versions of crate `%s`", *name));
fmt!("using multiple versions of crate `%s`", name));
for matches.each |match_| {
diag.span_note(match_.span, "used here");
let attrs = ~[
attr::mk_attr(attr::mk_list_item(
@~"link", /*bad*/copy *match_.metas))
@"link", /*bad*/copy *match_.metas))
];
loader::note_linkage_attrs(e.intr, diag, attrs);
}
@ -133,7 +133,7 @@ fn visit_crate(e: @mut Env, c: &ast::crate) {
for link_args.each |a| {
match attr::get_meta_item_value_str(attr::attr_meta(*a)) {
Some(ref linkarg) => {
cstore::add_used_link_args(cstore, **linkarg);
cstore::add_used_link_args(cstore, *linkarg);
}
None => {/* fallthrough */ }
}
@ -145,7 +145,7 @@ fn visit_view_item(e: @mut Env, i: @ast::view_item) {
ast::view_item_extern_mod(ident, ref meta_items, id) => {
debug!("resolving extern mod stmt. ident: %?, meta: %?",
ident, *meta_items);
let cnum = resolve_crate(e, ident, copy *meta_items, @~"", i.span);
let cnum = resolve_crate(e, ident, copy *meta_items, @"", i.span);
cstore::add_extern_mod_stmt_cnum(e.cstore, id, cnum);
}
_ => ()
@ -169,7 +169,7 @@ fn visit_item(e: @mut Env, i: @ast::item) {
match attr::first_attr_value_str_by_name(i.attrs,
"link_name") {
Some(nn) => {
if *nn == ~"" {
if nn.is_empty() {
e.diag.span_fatal(
i.span,
"empty #[link_name] not allowed; use \
@ -184,7 +184,7 @@ fn visit_item(e: @mut Env, i: @ast::item) {
!cstore::add_used_library(cstore, foreign_name);
}
if !link_args.is_empty() && already_added {
e.diag.span_fatal(i.span, ~"library '" + *foreign_name +
e.diag.span_fatal(i.span, ~"library '" + foreign_name +
"' already added: can't specify link_args.");
}
}
@ -194,7 +194,7 @@ fn visit_item(e: @mut Env, i: @ast::item) {
for link_args.each |a| {
match attr::get_meta_item_value_str(attr::attr_meta(*a)) {
Some(linkarg) => {
cstore::add_used_link_args(cstore, *linkarg);
cstore::add_used_link_args(cstore, linkarg);
}
None => { /* fallthrough */ }
}
@ -204,9 +204,9 @@ fn visit_item(e: @mut Env, i: @ast::item) {
}
}
fn metas_with(ident: @~str, key: @~str, metas: ~[@ast::meta_item])
fn metas_with(ident: @str, key: @str, metas: ~[@ast::meta_item])
-> ~[@ast::meta_item] {
let name_items = attr::find_meta_items_by_name(metas, *key);
let name_items = attr::find_meta_items_by_name(metas, key);
if name_items.is_empty() {
vec::append_one(metas, attr::mk_name_value_item_str(key, ident))
} else {
@ -214,12 +214,12 @@ fn metas_with(ident: @~str, key: @~str, metas: ~[@ast::meta_item])
}
}
fn metas_with_ident(ident: @~str, metas: ~[@ast::meta_item])
fn metas_with_ident(ident: @str, metas: ~[@ast::meta_item])
-> ~[@ast::meta_item] {
metas_with(ident, @~"name", metas)
metas_with(ident, @"name", metas)
}
fn existing_match(e: @mut Env, metas: &[@ast::meta_item], hash: @~str)
fn existing_match(e: @mut Env, metas: &[@ast::meta_item], hash: @str)
-> Option<int> {
for e.crate_cache.each |c| {
if loader::metadata_matches(*c.metas, metas)
@ -233,7 +233,7 @@ fn existing_match(e: @mut Env, metas: &[@ast::meta_item], hash: @~str)
fn resolve_crate(e: @mut Env,
ident: ast::ident,
metas: ~[@ast::meta_item],
hash: @~str,
hash: @str,
span: span)
-> ast::crate_num {
let metas = metas_with_ident(token::ident_to_str(&ident), metas);
@ -307,9 +307,9 @@ fn resolve_crate_deps(e: @mut Env, cdata: @~[u8]) -> cstore::cnum_map {
let extrn_cnum = dep.cnum;
let cname = dep.name;
let cname_str = token::ident_to_str(&dep.name);
let cmetas = metas_with(dep.vers, @~"vers", ~[]);
let cmetas = metas_with(dep.vers, @"vers", ~[]);
debug!("resolving dep crate %s ver: %s hash: %s",
*cname_str, *dep.vers, *dep.hash);
cname_str, dep.vers, dep.hash);
match existing_match(e, metas_with_ident(cname_str,
copy cmetas),
dep.hash) {

View File

@ -74,7 +74,7 @@ pub fn get_item_path(tcx: ty::ctxt, def: ast::def_id) -> ast_map::path {
// FIXME #1920: This path is not always correct if the crate is not linked
// into the root namespace.
vec::append(~[ast_map::path_mod(tcx.sess.ident_of(
*cdata.name))], path)
cdata.name))], path)
}
pub enum found_ast {

View File

@ -31,7 +31,7 @@ use syntax::parse::token::ident_interner;
pub type cnum_map = @mut HashMap<ast::crate_num, ast::crate_num>;
pub struct crate_metadata {
name: @~str,
name: @str,
data: @~[u8],
cnum_map: cnum_map,
cnum: ast::crate_num
@ -41,8 +41,8 @@ pub struct CStore {
priv metas: HashMap <ast::crate_num, @crate_metadata>,
priv extern_mod_crate_map: extern_mod_crate_map,
priv used_crate_files: ~[Path],
priv used_libraries: ~[~str],
priv used_link_args: ~[~str],
priv used_libraries: ~[@str],
priv used_link_args: ~[@str],
intr: @ident_interner
}
@ -65,12 +65,12 @@ pub fn get_crate_data(cstore: &CStore, cnum: ast::crate_num)
return *cstore.metas.get(&cnum);
}
pub fn get_crate_hash(cstore: &CStore, cnum: ast::crate_num) -> @~str {
pub fn get_crate_hash(cstore: &CStore, cnum: ast::crate_num) -> @str {
let cdata = get_crate_data(cstore, cnum);
decoder::get_crate_hash(cdata.data)
}
pub fn get_crate_vers(cstore: &CStore, cnum: ast::crate_num) -> @~str {
pub fn get_crate_vers(cstore: &CStore, cnum: ast::crate_num) -> @str {
let cdata = get_crate_data(cstore, cnum);
decoder::get_crate_vers(cdata.data)
}
@ -102,26 +102,28 @@ pub fn get_used_crate_files(cstore: &CStore) -> ~[Path] {
return /*bad*/copy cstore.used_crate_files;
}
pub fn add_used_library(cstore: &mut CStore, lib: @~str) -> bool {
assert!(*lib != ~"");
pub fn add_used_library(cstore: &mut CStore, lib: @str) -> bool {
assert!(!lib.is_empty());
if cstore.used_libraries.contains(&*lib) { return false; }
cstore.used_libraries.push(/*bad*/ copy *lib);
if cstore.used_libraries.contains(&lib) { return false; }
cstore.used_libraries.push(lib);
true
}
pub fn get_used_libraries(cstore: &CStore) -> ~[~str] {
/*bad*/copy cstore.used_libraries
pub fn get_used_libraries<'a>(cstore: &'a CStore) -> &'a [@str] {
let slice: &'a [@str] = cstore.used_libraries;
slice
}
pub fn add_used_link_args(cstore: &mut CStore, args: &str) {
for args.split_iter(' ').advance |s| {
cstore.used_link_args.push(s.to_owned());
cstore.used_link_args.push(s.to_managed());
}
}
pub fn get_used_link_args(cstore: &CStore) -> ~[~str] {
/*bad*/copy cstore.used_link_args
pub fn get_used_link_args<'a>(cstore: &'a CStore) -> &'a [@str] {
let slice: &'a [@str] = cstore.used_link_args;
slice
}
pub fn add_extern_mod_stmt_cnum(cstore: &mut CStore,
@ -138,15 +140,15 @@ pub fn find_extern_mod_stmt_cnum(cstore: &CStore,
// returns hashes of crates directly used by this crate. Hashes are sorted by
// (crate name, crate version, crate hash) in lexicographic order (not semver)
pub fn get_dep_hashes(cstore: &CStore) -> ~[~str] {
struct crate_hash { name: @~str, vers: @~str, hash: @~str }
pub fn get_dep_hashes(cstore: &CStore) -> ~[@str] {
struct crate_hash { name: @str, vers: @str, hash: @str }
let mut result = ~[];
for cstore.extern_mod_crate_map.each_value |&cnum| {
let cdata = cstore::get_crate_data(cstore, cnum);
let hash = decoder::get_crate_hash(cdata.data);
let vers = decoder::get_crate_vers(cdata.data);
debug!("Add hash[%s]: %s %s", *cdata.name, *vers, *hash);
debug!("Add hash[%s]: %s %s", cdata.name, vers, hash);
result.push(crate_hash {
name: cdata.name,
vers: vers,
@ -160,8 +162,8 @@ pub fn get_dep_hashes(cstore: &CStore) -> ~[~str] {
debug!("sorted:");
for sorted.each |x| {
debug!(" hash[%s]: %s", *x.name, *x.hash);
debug!(" hash[%s]: %s", x.name, x.hash);
}
sorted.map(|ch| /*bad*/copy *ch.hash)
sorted.map(|ch| ch.hash)
}

View File

@ -36,7 +36,7 @@ use extra::serialize::Decodable;
use syntax::ast_map;
use syntax::attr;
use syntax::diagnostic::span_handler;
use syntax::parse::token::{StringRef, ident_interner, special_idents};
use syntax::parse::token::{ident_interner, special_idents};
use syntax::print::pprust;
use syntax::{ast, ast_util};
use syntax::codemap;
@ -311,7 +311,7 @@ fn item_path(item_doc: ebml::Doc) -> ast_map::path {
fn item_name(intr: @ident_interner, item: ebml::Doc) -> ast::ident {
let name = reader::get_doc(item, tag_paths_data_name);
let string = name.as_str_slice();
match intr.find_equiv(&StringRef(string)) {
match intr.find_equiv(&string) {
None => token::str_to_ident(string),
Some(val) => ast::new_ident(val),
}
@ -985,23 +985,23 @@ fn get_meta_items(md: ebml::Doc) -> ~[@ast::meta_item] {
let mut items: ~[@ast::meta_item] = ~[];
for reader::tagged_docs(md, tag_meta_item_word) |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let n = nd.as_str();
items.push(attr::mk_word_item(@n));
let n = nd.as_str_slice().to_managed();
items.push(attr::mk_word_item(n));
};
for reader::tagged_docs(md, tag_meta_item_name_value) |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let vd = reader::get_doc(meta_item_doc, tag_meta_item_value);
let n = nd.as_str();
let v = vd.as_str();
let n = nd.as_str_slice().to_managed();
let v = vd.as_str_slice().to_managed();
// FIXME (#623): Should be able to decode meta_name_value variants,
// but currently the encoder just drops them
items.push(attr::mk_name_value_item_str(@n, @v));
items.push(attr::mk_name_value_item_str(n, v));
};
for reader::tagged_docs(md, tag_meta_item_list) |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let n = nd.as_str();
let n = nd.as_str_slice().to_managed();
let subitems = get_meta_items(meta_item_doc);
items.push(attr::mk_list_item(@n, subitems));
items.push(attr::mk_list_item(n, subitems));
};
return items;
}
@ -1058,8 +1058,8 @@ pub fn get_crate_attributes(data: @~[u8]) -> ~[ast::attribute] {
pub struct crate_dep {
cnum: ast::crate_num,
name: ast::ident,
vers: @~str,
hash: @~str
vers: @str,
hash: @str
}
pub fn get_crate_deps(data: @~[u8]) -> ~[crate_dep] {
@ -1067,14 +1067,15 @@ pub fn get_crate_deps(data: @~[u8]) -> ~[crate_dep] {
let cratedoc = reader::Doc(data);
let depsdoc = reader::get_doc(cratedoc, tag_crate_deps);
let mut crate_num = 1;
fn docstr(doc: ebml::Doc, tag_: uint) -> ~str {
reader::get_doc(doc, tag_).as_str()
fn docstr(doc: ebml::Doc, tag_: uint) -> @str {
let d = reader::get_doc(doc, tag_);
d.as_str_slice().to_managed()
}
for reader::tagged_docs(depsdoc, tag_crate_dep) |depdoc| {
deps.push(crate_dep {cnum: crate_num,
name: token::str_to_ident(docstr(depdoc, tag_crate_dep_name)),
vers: @docstr(depdoc, tag_crate_dep_vers),
hash: @docstr(depdoc, tag_crate_dep_hash)});
vers: docstr(depdoc, tag_crate_dep_vers),
hash: docstr(depdoc, tag_crate_dep_hash)});
crate_num += 1;
};
return deps;
@ -1086,25 +1087,25 @@ fn list_crate_deps(data: @~[u8], out: @io::Writer) {
for get_crate_deps(data).each |dep| {
out.write_str(
fmt!("%d %s-%s-%s\n",
dep.cnum, *token::ident_to_str(&dep.name), *dep.hash, *dep.vers));
dep.cnum, token::ident_to_str(&dep.name), dep.hash, dep.vers));
}
out.write_str("\n");
}
pub fn get_crate_hash(data: @~[u8]) -> @~str {
pub fn get_crate_hash(data: @~[u8]) -> @str {
let cratedoc = reader::Doc(data);
let hashdoc = reader::get_doc(cratedoc, tag_crate_hash);
@hashdoc.as_str()
hashdoc.as_str_slice().to_managed()
}
pub fn get_crate_vers(data: @~[u8]) -> @~str {
pub fn get_crate_vers(data: @~[u8]) -> @str {
let attrs = decoder::get_crate_attributes(data);
let linkage_attrs = attr::find_linkage_metas(attrs);
match attr::last_meta_item_value_str_by_name(linkage_attrs, "vers") {
Some(ver) => ver,
None => @~"0.0"
None => @"0.0"
}
}
@ -1126,7 +1127,7 @@ pub fn list_crate_metadata(intr: @ident_interner, bytes: @~[u8],
out: @io::Writer) {
let hash = get_crate_hash(bytes);
let md = reader::Doc(bytes);
list_crate_attributes(intr, md, *hash, out);
list_crate_attributes(intr, md, hash, out);
list_crate_deps(bytes, out);
}

View File

@ -62,7 +62,7 @@ pub struct EncodeParams {
reachable: reachable::map,
reexports2: middle::resolve::ExportMap2,
item_symbols: @mut HashMap<ast::node_id, ~str>,
discrim_symbols: @mut HashMap<ast::node_id, @~str>,
discrim_symbols: @mut HashMap<ast::node_id, @str>,
link_meta: LinkMeta,
cstore: @mut cstore::CStore,
encode_inlined_item: encode_inlined_item
@ -89,7 +89,7 @@ pub struct EncodeContext {
reachable: reachable::map,
reexports2: middle::resolve::ExportMap2,
item_symbols: @mut HashMap<ast::node_id, ~str>,
discrim_symbols: @mut HashMap<ast::node_id, @~str>,
discrim_symbols: @mut HashMap<ast::node_id, @str>,
link_meta: LinkMeta,
cstore: @mut cstore::CStore,
encode_inlined_item: encode_inlined_item,
@ -103,14 +103,14 @@ pub fn reachable(ecx: @EncodeContext, id: node_id) -> bool {
fn encode_name(ecx: @EncodeContext,
ebml_w: &mut writer::Encoder,
name: ident) {
ebml_w.wr_tagged_str(tag_paths_data_name, *ecx.tcx.sess.str_of(name));
ebml_w.wr_tagged_str(tag_paths_data_name, ecx.tcx.sess.str_of(name));
}
fn encode_impl_type_basename(ecx: @EncodeContext,
ebml_w: &mut writer::Encoder,
name: ident) {
ebml_w.wr_tagged_str(tag_item_impl_type_basename,
*ecx.tcx.sess.str_of(name));
ecx.tcx.sess.str_of(name));
}
pub fn encode_def_id(ebml_w: &mut writer::Encoder, id: def_id) {
@ -362,7 +362,7 @@ fn encode_path(ecx: @EncodeContext,
ast_map::path_name(name) => (tag_path_elt_name, name)
};
ebml_w.wr_tagged_str(tag, *ecx.tcx.sess.str_of(name));
ebml_w.wr_tagged_str(tag, ecx.tcx.sess.str_of(name));
}
ebml_w.start_tag(tag_path);
@ -380,13 +380,13 @@ fn encode_reexported_static_method(ecx: @EncodeContext,
method_def_id: def_id,
method_ident: ident) {
debug!("(encode reexported static method) %s::%s",
*exp.name, *ecx.tcx.sess.str_of(method_ident));
exp.name, ecx.tcx.sess.str_of(method_ident));
ebml_w.start_tag(tag_items_data_item_reexport);
ebml_w.start_tag(tag_items_data_item_reexport_def_id);
ebml_w.wr_str(def_to_str(method_def_id));
ebml_w.end_tag();
ebml_w.start_tag(tag_items_data_item_reexport_name);
ebml_w.wr_str(*exp.name + "::" + *ecx.tcx.sess.str_of(method_ident));
ebml_w.wr_str(fmt!("%s::%s", exp.name, ecx.tcx.sess.str_of(method_ident)));
ebml_w.end_tag();
ebml_w.end_tag();
}
@ -449,17 +449,17 @@ fn encode_reexported_static_methods(ecx: @EncodeContext,
// encoded metadata for static methods relative to Bar,
// but not yet for Foo.
//
if mod_path != *path || *exp.name != *original_name {
if mod_path != *path || exp.name != original_name {
if !encode_reexported_static_base_methods(ecx, ebml_w, exp) {
if encode_reexported_static_trait_methods(ecx, ebml_w, exp) {
debug!(fmt!("(encode reexported static methods) %s \
[trait]",
*original_name));
original_name));
}
}
else {
debug!(fmt!("(encode reexported static methods) %s [base]",
*original_name));
original_name));
}
}
}
@ -486,7 +486,7 @@ fn encode_info_for_mod(ecx: @EncodeContext,
let (ident, did) = (item.ident, item.id);
debug!("(encoding info for module) ... encoding impl %s \
(%?/%?)",
*ecx.tcx.sess.str_of(ident),
ecx.tcx.sess.str_of(ident),
did,
ast_map::node_id_to_str(ecx.tcx.items, did, token::get_ident_interner()));
@ -507,13 +507,13 @@ fn encode_info_for_mod(ecx: @EncodeContext,
debug!("(encoding info for module) found reexports for %d", id);
for exports.each |exp| {
debug!("(encoding info for module) reexport '%s' for %d",
*exp.name, id);
exp.name, id);
ebml_w.start_tag(tag_items_data_item_reexport);
ebml_w.start_tag(tag_items_data_item_reexport_def_id);
ebml_w.wr_str(def_to_str(exp.def_id));
ebml_w.end_tag();
ebml_w.start_tag(tag_items_data_item_reexport_name);
ebml_w.wr_str(*exp.name);
ebml_w.wr_str(exp.name);
ebml_w.end_tag();
ebml_w.end_tag();
encode_reexported_static_methods(ecx, ebml_w, path, exp);
@ -622,7 +622,7 @@ fn encode_info_for_struct(ecx: @EncodeContext,
global_index.push(entry {val: id, pos: ebml_w.writer.tell()});
ebml_w.start_tag(tag_items_data_item);
debug!("encode_info_for_struct: doing %s %d",
*tcx.sess.str_of(nm), id);
tcx.sess.str_of(nm), id);
encode_struct_field_family(ebml_w, vis);
encode_name(ecx, ebml_w, nm);
encode_path(ecx, ebml_w, path, ast_map::path_name(nm));
@ -648,7 +648,7 @@ fn encode_info_for_ctor(ecx: @EncodeContext,
encode_type_param_bounds(ebml_w, ecx, &generics.ty_params);
let its_ty = node_id_to_type(ecx.tcx, id);
debug!("fn name = %s ty = %s its node id = %d",
*ecx.tcx.sess.str_of(ident),
ecx.tcx.sess.str_of(ident),
ty_to_str(ecx.tcx, its_ty), id);
encode_type(ecx, ebml_w, its_ty);
encode_path(ecx, ebml_w, path, ast_map::path_name(ident));
@ -708,7 +708,7 @@ fn encode_info_for_method(ecx: @EncodeContext,
owner_generics: &ast::Generics,
method_generics: &ast::Generics) {
debug!("encode_info_for_method: %d %s %u %u", m.id,
*ecx.tcx.sess.str_of(m.ident),
ecx.tcx.sess.str_of(m.ident),
owner_generics.ty_params.len(),
method_generics.ty_params.len());
ebml_w.start_tag(tag_items_data_item);
@ -1058,7 +1058,7 @@ fn encode_info_for_item(ecx: @EncodeContext,
tcx.sess.span_unimpl(
item.span,
fmt!("Method %s is both provided and static",
*token::ident_to_str(&method_ty.ident)));
token::ident_to_str(&method_ty.ident)));
}
encode_type_param_bounds(ebml_w, ecx,
&m.generics.ty_params);
@ -1278,11 +1278,11 @@ fn synthesize_crate_attrs(ecx: @EncodeContext,
assert!(!ecx.link_meta.vers.is_empty());
let name_item =
attr::mk_name_value_item_str(@~"name",
@ecx.link_meta.name.to_owned());
attr::mk_name_value_item_str(@"name",
ecx.link_meta.name);
let vers_item =
attr::mk_name_value_item_str(@~"vers",
@ecx.link_meta.vers.to_owned());
attr::mk_name_value_item_str(@"vers",
ecx.link_meta.vers);
let other_items =
{
@ -1291,7 +1291,7 @@ fn synthesize_crate_attrs(ecx: @EncodeContext,
};
let meta_items = vec::append(~[name_item, vers_item], other_items);
let link_item = attr::mk_list_item(@~"link", meta_items);
let link_item = attr::mk_list_item(@"link", meta_items);
return attr::mk_attr(link_item);
}
@ -1300,15 +1300,15 @@ fn synthesize_crate_attrs(ecx: @EncodeContext,
let mut found_link_attr = false;
for crate.node.attrs.each |attr| {
attrs.push(
if *attr::get_attr_name(attr) != ~"link" {
/*bad*/copy *attr
if "link" != attr::get_attr_name(attr) {
copy *attr
} else {
match attr.node.value.node {
meta_list(_, ref l) => {
found_link_attr = true;;
synthesize_link_attr(ecx, /*bad*/copy *l)
}
_ => /*bad*/copy *attr
_ => copy *attr
}
});
}
@ -1329,7 +1329,7 @@ fn encode_crate_deps(ecx: @EncodeContext,
let mut deps = ~[];
do cstore::iter_crate_data(cstore) |key, val| {
let dep = decoder::crate_dep {cnum: key,
name: ecx.tcx.sess.ident_of(/*bad*/ copy *val.name),
name: ecx.tcx.sess.ident_of(val.name),
vers: decoder::get_crate_vers(val.data),
hash: decoder::get_crate_hash(val.data)};
deps.push(dep);

View File

@ -48,7 +48,7 @@ pub struct Context {
span: span,
ident: ast::ident,
metas: ~[@ast::meta_item],
hash: @~str,
hash: @str,
os: os,
is_static: bool,
intr: @ident_interner
@ -60,7 +60,7 @@ pub fn load_library_crate(cx: &Context) -> (~str, @~[u8]) {
None => {
cx.diag.span_fatal(
cx.span, fmt!("can't find crate for `%s`",
*token::ident_to_str(&cx.ident)));
token::ident_to_str(&cx.ident)));
}
}
}
@ -89,7 +89,7 @@ fn find_library_crate_aux(
filesearch: @filesearch::FileSearch
) -> Option<(~str, @~[u8])> {
let crate_name = crate_name_from_metas(cx.metas);
let prefix: ~str = prefix + *crate_name + "-";
let prefix: ~str = prefix + crate_name + "-";
let suffix: ~str = /*bad*/copy suffix;
let mut matches = ~[];
@ -128,7 +128,7 @@ fn find_library_crate_aux(
Some(/*bad*/copy matches[0])
} else {
cx.diag.span_err(
cx.span, fmt!("multiple matching crates for `%s`", *crate_name));
cx.span, fmt!("multiple matching crates for `%s`", crate_name));
cx.diag.handler().note("candidates:");
for matches.each |&(ident, data)| {
cx.diag.handler().note(fmt!("path: %s", ident));
@ -140,7 +140,7 @@ fn find_library_crate_aux(
}
}
pub fn crate_name_from_metas(metas: &[@ast::meta_item]) -> @~str {
pub fn crate_name_from_metas(metas: &[@ast::meta_item]) -> @str {
let name_items = attr::find_meta_items_by_name(metas, "name");
match name_items.last_opt() {
Some(i) => {
@ -166,7 +166,7 @@ pub fn note_linkage_attrs(intr: @ident_interner,
fn crate_matches(crate_data: @~[u8],
metas: &[@ast::meta_item],
hash: @~str) -> bool {
hash: @str) -> bool {
let attrs = decoder::get_crate_attributes(crate_data);
let linkage_metas = attr::find_linkage_metas(attrs);
if !hash.is_empty() {

View File

@ -41,7 +41,7 @@ pub struct ctxt {
pub struct ty_abbrev {
pos: uint,
len: uint,
s: @~str
s: @str
}
pub enum abbrev_ctxt {
@ -60,12 +60,12 @@ pub fn enc_ty(w: @io::Writer, cx: @ctxt, t: ty::t) {
match cx.abbrevs {
ac_no_abbrevs => {
let result_str = match cx.tcx.short_names_cache.find(&t) {
Some(&s) => /*bad*/copy *s,
Some(&s) => s,
None => {
let s = do io::with_str_writer |wr| {
enc_sty(wr, cx, /*bad*/copy ty::get(t).sty);
};
cx.tcx.short_names_cache.insert(t, @copy s);
}.to_managed();
cx.tcx.short_names_cache.insert(t, s);
s
}
};
@ -73,7 +73,7 @@ pub fn enc_ty(w: @io::Writer, cx: @ctxt, t: ty::t) {
}
ac_use_abbrevs(abbrevs) => {
match abbrevs.find(&t) {
Some(a) => { w.write_str(*a.s); return; }
Some(a) => { w.write_str(a.s); return; }
None => {}
}
let pos = w.tell();
@ -89,8 +89,8 @@ pub fn enc_ty(w: @io::Writer, cx: @ctxt, t: ty::t) {
let abbrev_len = 3u + estimate_sz(pos) + estimate_sz(len);
if abbrev_len < len {
// I.e. it's actually an abbreviation.
let s = fmt!("#%x:%x#", pos, len);
let a = ty_abbrev { pos: pos, len: len, s: @s };
let s = fmt!("#%x:%x#", pos, len).to_managed();
let a = ty_abbrev { pos: pos, len: len, s: s };
abbrevs.insert(t, a);
}
return;
@ -171,7 +171,7 @@ fn enc_bound_region(w: @io::Writer, cx: @ctxt, br: ty::bound_region) {
}
ty::br_named(s) => {
w.write_char('[');
w.write_str(*cx.tcx.sess.str_of(s));
w.write_str(cx.tcx.sess.str_of(s));
w.write_char(']')
}
ty::br_cap_avoid(id, br) => {

View File

@ -88,7 +88,7 @@ pub fn encode_inlined_item(ecx: @e::EncodeContext,
maps: Maps) {
debug!("> Encoding inlined item: %s::%s (%u)",
ast_map::path_to_str(path, token::get_ident_interner()),
*ecx.tcx.sess.str_of(ii.ident()),
ecx.tcx.sess.str_of(ii.ident()),
ebml_w.writer.tell());
let id_range = ast_util::compute_id_range_for_inlined_item(&ii);
@ -101,7 +101,7 @@ pub fn encode_inlined_item(ecx: @e::EncodeContext,
debug!("< Encoded inlined fn: %s::%s (%u)",
ast_map::path_to_str(path, token::get_ident_interner()),
*ecx.tcx.sess.str_of(ii.ident()),
ecx.tcx.sess.str_of(ii.ident()),
ebml_w.writer.tell());
}
@ -131,10 +131,10 @@ pub fn decode_inlined_item(cdata: @cstore::crate_metadata,
};
let raw_ii = decode_ast(ast_doc);
let ii = renumber_ast(xcx, raw_ii);
debug!("Fn named: %s", *tcx.sess.str_of(ii.ident()));
debug!("Fn named: %s", tcx.sess.str_of(ii.ident()));
debug!("< Decoded inlined fn: %s::%s",
ast_map::path_to_str(path, token::get_ident_interner()),
*tcx.sess.str_of(ii.ident()));
tcx.sess.str_of(ii.ident()));
ast_map::map_decoded_item(tcx.sess.diagnostic(),
dcx.tcx.items, path, &ii);
decode_side_tables(xcx, ast_doc);

View File

@ -711,7 +711,7 @@ impl BorrowckCtxt {
LpVar(id) => {
match self.tcx.items.find(&id) {
Some(&ast_map::node_local(ref ident)) => {
out.push_str(*token::ident_to_str(ident));
out.push_str(token::ident_to_str(ident));
}
r => {
self.tcx.sess.bug(
@ -726,7 +726,7 @@ impl BorrowckCtxt {
match fname {
mc::NamedField(ref fname) => {
out.push_char('.');
out.push_str(*token::ident_to_str(fname));
out.push_str(token::ident_to_str(fname));
}
mc::PositionalField(idx) => {
out.push_char('#'); // invent a notation here

View File

@ -144,8 +144,8 @@ pub fn check_exhaustive(cx: @MatchCheckCtxt, sp: span, pats: ~[@pat]) {
match ty::get(ty).sty {
ty::ty_bool => {
match (*ctor) {
val(const_bool(true)) => Some(@~"true"),
val(const_bool(false)) => Some(@~"false"),
val(const_bool(true)) => Some(@"true"),
val(const_bool(false)) => Some(@"false"),
_ => None
}
}
@ -165,7 +165,7 @@ pub fn check_exhaustive(cx: @MatchCheckCtxt, sp: span, pats: ~[@pat]) {
}
ty::ty_unboxed_vec(*) | ty::ty_evec(*) => {
match *ctor {
vec(n) => Some(@fmt!("vectors of length %u", n)),
vec(n) => Some(fmt!("vectors of length %u", n).to_managed()),
_ => None
}
}
@ -174,7 +174,7 @@ pub fn check_exhaustive(cx: @MatchCheckCtxt, sp: span, pats: ~[@pat]) {
}
};
let msg = ~"non-exhaustive patterns" + match ext {
Some(ref s) => ~": " + **s + " not covered",
Some(ref s) => fmt!(": %s not covered", *s),
None => ~""
};
cx.tcx.sess.span_err(sp, msg);

View File

@ -236,14 +236,14 @@ pub enum const_val {
const_float(f64),
const_int(i64),
const_uint(u64),
const_str(~str),
const_str(@str),
const_bool(bool)
}
pub fn eval_const_expr(tcx: middle::ty::ctxt, e: @expr) -> const_val {
match eval_const_expr_partial(tcx, e) {
Ok(ref r) => (/*bad*/copy *r),
Err(ref s) => tcx.sess.span_fatal(e.span, *s)
Ok(r) => r,
Err(s) => tcx.sess.span_fatal(e.span, s)
}
}
@ -409,13 +409,13 @@ pub fn eval_const_expr_partial(tcx: middle::ty::ctxt, e: @expr)
pub fn lit_to_const(lit: @lit) -> const_val {
match lit.node {
lit_str(s) => const_str(/*bad*/copy *s),
lit_str(s) => const_str(s),
lit_int(n, _) => const_int(n),
lit_uint(n, _) => const_uint(n),
lit_int_unsuffixed(n) => const_int(n),
lit_float(n, _) => const_float(float::from_str(*n).get() as f64),
lit_float(n, _) => const_float(float::from_str(n).get() as f64),
lit_float_unsuffixed(n) =>
const_float(float::from_str(*n).get() as f64),
const_float(float::from_str(n).get() as f64),
lit_nil => const_int(0i64),
lit_bool(b) => const_bool(b)
}

View File

@ -269,50 +269,50 @@ fn LanguageItemCollector(crate: @crate,
-> LanguageItemCollector {
let mut item_refs = HashMap::new();
item_refs.insert(@~"const", ConstTraitLangItem as uint);
item_refs.insert(@~"copy", CopyTraitLangItem as uint);
item_refs.insert(@~"owned", OwnedTraitLangItem as uint);
item_refs.insert(@~"sized", SizedTraitLangItem as uint);
item_refs.insert(@"const", ConstTraitLangItem as uint);
item_refs.insert(@"copy", CopyTraitLangItem as uint);
item_refs.insert(@"owned", OwnedTraitLangItem as uint);
item_refs.insert(@"sized", SizedTraitLangItem as uint);
item_refs.insert(@~"drop", DropTraitLangItem as uint);
item_refs.insert(@"drop", DropTraitLangItem as uint);
item_refs.insert(@~"add", AddTraitLangItem as uint);
item_refs.insert(@~"sub", SubTraitLangItem as uint);
item_refs.insert(@~"mul", MulTraitLangItem as uint);
item_refs.insert(@~"div", DivTraitLangItem as uint);
item_refs.insert(@~"rem", RemTraitLangItem as uint);
item_refs.insert(@~"neg", NegTraitLangItem as uint);
item_refs.insert(@~"not", NotTraitLangItem as uint);
item_refs.insert(@~"bitxor", BitXorTraitLangItem as uint);
item_refs.insert(@~"bitand", BitAndTraitLangItem as uint);
item_refs.insert(@~"bitor", BitOrTraitLangItem as uint);
item_refs.insert(@~"shl", ShlTraitLangItem as uint);
item_refs.insert(@~"shr", ShrTraitLangItem as uint);
item_refs.insert(@~"index", IndexTraitLangItem as uint);
item_refs.insert(@"add", AddTraitLangItem as uint);
item_refs.insert(@"sub", SubTraitLangItem as uint);
item_refs.insert(@"mul", MulTraitLangItem as uint);
item_refs.insert(@"div", DivTraitLangItem as uint);
item_refs.insert(@"rem", RemTraitLangItem as uint);
item_refs.insert(@"neg", NegTraitLangItem as uint);
item_refs.insert(@"not", NotTraitLangItem as uint);
item_refs.insert(@"bitxor", BitXorTraitLangItem as uint);
item_refs.insert(@"bitand", BitAndTraitLangItem as uint);
item_refs.insert(@"bitor", BitOrTraitLangItem as uint);
item_refs.insert(@"shl", ShlTraitLangItem as uint);
item_refs.insert(@"shr", ShrTraitLangItem as uint);
item_refs.insert(@"index", IndexTraitLangItem as uint);
item_refs.insert(@~"eq", EqTraitLangItem as uint);
item_refs.insert(@~"ord", OrdTraitLangItem as uint);
item_refs.insert(@"eq", EqTraitLangItem as uint);
item_refs.insert(@"ord", OrdTraitLangItem as uint);
item_refs.insert(@~"str_eq", StrEqFnLangItem as uint);
item_refs.insert(@~"uniq_str_eq", UniqStrEqFnLangItem as uint);
item_refs.insert(@~"annihilate", AnnihilateFnLangItem as uint);
item_refs.insert(@~"log_type", LogTypeFnLangItem as uint);
item_refs.insert(@~"fail_", FailFnLangItem as uint);
item_refs.insert(@~"fail_bounds_check",
item_refs.insert(@"str_eq", StrEqFnLangItem as uint);
item_refs.insert(@"uniq_str_eq", UniqStrEqFnLangItem as uint);
item_refs.insert(@"annihilate", AnnihilateFnLangItem as uint);
item_refs.insert(@"log_type", LogTypeFnLangItem as uint);
item_refs.insert(@"fail_", FailFnLangItem as uint);
item_refs.insert(@"fail_bounds_check",
FailBoundsCheckFnLangItem as uint);
item_refs.insert(@~"exchange_malloc", ExchangeMallocFnLangItem as uint);
item_refs.insert(@~"exchange_free", ExchangeFreeFnLangItem as uint);
item_refs.insert(@~"malloc", MallocFnLangItem as uint);
item_refs.insert(@~"free", FreeFnLangItem as uint);
item_refs.insert(@~"borrow_as_imm", BorrowAsImmFnLangItem as uint);
item_refs.insert(@~"borrow_as_mut", BorrowAsMutFnLangItem as uint);
item_refs.insert(@~"return_to_mut", ReturnToMutFnLangItem as uint);
item_refs.insert(@~"check_not_borrowed",
item_refs.insert(@"exchange_malloc", ExchangeMallocFnLangItem as uint);
item_refs.insert(@"exchange_free", ExchangeFreeFnLangItem as uint);
item_refs.insert(@"malloc", MallocFnLangItem as uint);
item_refs.insert(@"free", FreeFnLangItem as uint);
item_refs.insert(@"borrow_as_imm", BorrowAsImmFnLangItem as uint);
item_refs.insert(@"borrow_as_mut", BorrowAsMutFnLangItem as uint);
item_refs.insert(@"return_to_mut", ReturnToMutFnLangItem as uint);
item_refs.insert(@"check_not_borrowed",
CheckNotBorrowedFnLangItem as uint);
item_refs.insert(@~"strdup_uniq", StrDupUniqFnLangItem as uint);
item_refs.insert(@~"record_borrow", RecordBorrowFnLangItem as uint);
item_refs.insert(@~"unrecord_borrow", UnrecordBorrowFnLangItem as uint);
item_refs.insert(@~"start", StartFnLangItem as uint);
item_refs.insert(@"strdup_uniq", StrDupUniqFnLangItem as uint);
item_refs.insert(@"record_borrow", RecordBorrowFnLangItem as uint);
item_refs.insert(@"unrecord_borrow", UnrecordBorrowFnLangItem as uint);
item_refs.insert(@"start", StartFnLangItem as uint);
LanguageItemCollector {
crate: crate,
@ -328,7 +328,7 @@ struct LanguageItemCollector {
crate: @crate,
session: Session,
item_refs: HashMap<@~str, uint>,
item_refs: HashMap<@str, uint>,
}
impl LanguageItemCollector {
@ -366,9 +366,9 @@ impl LanguageItemCollector {
pub fn match_and_collect_item(&mut self,
item_def_id: def_id,
key: @~str,
value: @~str) {
if *key != ~"lang" {
key: @str,
value: @str) {
if "lang" != key {
return; // Didn't match.
}
@ -419,7 +419,7 @@ impl LanguageItemCollector {
for self.item_refs.each |&key, &item_ref| {
match self.items.items[item_ref] {
None => {
self.session.err(fmt!("no item found for `%s`", *key));
self.session.err(fmt!("no item found for `%s`", key));
}
Some(_) => {
// OK.

View File

@ -119,7 +119,7 @@ struct LintSpec {
default: level
}
pub type LintDict = HashMap<~str, LintSpec>;
pub type LintDict = HashMap<&'static str, LintSpec>;
enum AttributedNode<'self> {
Item(@ast::item),
@ -290,7 +290,7 @@ static lint_table: &'static [(&'static str, LintSpec)] = &[
pub fn get_lint_dict() -> LintDict {
let mut map = HashMap::new();
for lint_table.each|&(k, v)| {
map.insert(k.to_str(), v);
map.insert(k, v);
}
return map;
}
@ -352,10 +352,10 @@ impl Context {
}
}
fn lint_to_str(&self, lint: lint) -> ~str {
fn lint_to_str(&self, lint: lint) -> &'static str {
for self.dict.each |k, v| {
if v.lint == lint {
return copy *k;
return *k;
}
}
fail!("unregistered lint %?", lint);
@ -405,13 +405,13 @@ impl Context {
// specified closure
let mut pushed = 0u;
for each_lint(self.tcx.sess, attrs) |meta, level, lintname| {
let lint = match self.dict.find(lintname) {
let lint = match self.dict.find_equiv(&lintname) {
None => {
self.span_lint(
unrecognized_lint,
meta.span,
fmt!("unknown `%s` attribute: `%s`",
level_to_str(level), *lintname));
level_to_str(level), lintname));
loop
}
Some(lint) => { lint.lint }
@ -422,7 +422,7 @@ impl Context {
self.tcx.sess.span_err(meta.span,
fmt!("%s(%s) overruled by outer forbid(%s)",
level_to_str(level),
*lintname, *lintname));
lintname, lintname));
loop;
}
@ -498,7 +498,7 @@ impl Context {
pub fn each_lint(sess: session::Session,
attrs: &[ast::attribute],
f: &fn(@ast::meta_item, level, &~str) -> bool) -> bool
f: &fn(@ast::meta_item, level, @str) -> bool) -> bool
{
for [allow, warn, deny, forbid].each |&level| {
let level_name = level_to_str(level);

View File

@ -310,12 +310,12 @@ impl IrMaps {
}
}
pub fn variable_name(&mut self, var: Variable) -> @~str {
pub fn variable_name(&mut self, var: Variable) -> @str {
match self.var_kinds[*var] {
Local(LocalInfo { ident: nm, _ }) | Arg(_, nm) => {
self.tcx.sess.str_of(nm)
},
ImplicitRet => @~"<implicit-ret>"
ImplicitRet => @"<implicit-ret>"
}
}
@ -1578,12 +1578,12 @@ impl Liveness {
FreeVarNode(span) => {
self.tcx.sess.span_err(
span,
fmt!("capture of %s: `%s`", msg, *name));
fmt!("capture of %s: `%s`", msg, name));
}
ExprNode(span) => {
self.tcx.sess.span_err(
span,
fmt!("use of %s: `%s`", msg, *name));
fmt!("use of %s: `%s`", msg, name));
}
ExitNode | VarDefNode(_) => {
self.tcx.sess.span_bug(
@ -1593,7 +1593,7 @@ impl Liveness {
}
}
pub fn should_warn(&self, var: Variable) -> Option<@~str> {
pub fn should_warn(&self, var: Variable) -> Option<@str> {
let name = self.ir.variable_name(var);
if name[0] == ('_' as u8) { None } else { Some(name) }
}
@ -1638,10 +1638,10 @@ impl Liveness {
if is_assigned {
self.tcx.sess.add_lint(unused_variable, id, sp,
fmt!("variable `%s` is assigned to, \
but never used", **name));
but never used", *name));
} else {
self.tcx.sess.add_lint(unused_variable, id, sp,
fmt!("unused variable: `%s`", **name));
fmt!("unused variable: `%s`", *name));
}
}
true
@ -1659,7 +1659,7 @@ impl Liveness {
let r = self.should_warn(var);
for r.iter().advance |name| {
self.tcx.sess.add_lint(dead_assignment, id, sp,
fmt!("value assigned to `%s` is never read", **name));
fmt!("value assigned to `%s` is never read", *name));
}
}
}

View File

@ -1201,7 +1201,7 @@ pub fn ptr_sigil(ptr: ptr_kind) -> ~str {
impl Repr for InteriorKind {
fn repr(&self, tcx: ty::ctxt) -> ~str {
match *self {
InteriorField(NamedField(fld)) => copy *tcx.sess.str_of(fld),
InteriorField(NamedField(fld)) => tcx.sess.str_of(fld).to_owned(),
InteriorField(PositionalField(i)) => fmt!("#%?", i),
InteriorElement(_) => ~"[]",
}

View File

@ -235,7 +235,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
if field.ident != ident { loop; }
if field.vis == private {
tcx.sess.span_err(span, fmt!("field `%s` is private",
*token::ident_to_str(&ident)));
token::ident_to_str(&ident)));
}
break;
}
@ -255,7 +255,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
!privileged_items.contains(&(container_id.node))) {
tcx.sess.span_err(span,
fmt!("method `%s` is private",
*token::ident_to_str(name)));
token::ident_to_str(name)));
}
} else {
let visibility =
@ -263,7 +263,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
if visibility != public {
tcx.sess.span_err(span,
fmt!("method `%s` is private",
*token::ident_to_str(name)));
token::ident_to_str(name)));
}
}
};
@ -283,13 +283,13 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
!privileged_items.contains(&def_id.node) {
tcx.sess.span_err(span,
fmt!("function `%s` is private",
*token::ident_to_str(path.idents.last())));
token::ident_to_str(path.idents.last())));
}
} else if csearch::get_item_visibility(tcx.sess.cstore,
def_id) != public {
tcx.sess.span_err(span,
fmt!("function `%s` is private",
*token::ident_to_str(path.idents.last())));
token::ident_to_str(path.idents.last())));
}
}
_ => {}
@ -328,7 +328,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
.contains(&(trait_id.node)) => {
tcx.sess.span_err(span,
fmt!("method `%s` is private",
*token::ident_to_str(&method
token::ident_to_str(&method
.ident)));
}
provided(_) | required(_) => {

View File

@ -84,7 +84,7 @@ pub type TraitMap = HashMap<node_id,@mut ~[def_id]>;
pub type ExportMap2 = @mut HashMap<node_id, ~[Export2]>;
pub struct Export2 {
name: @~str, // The name of the target.
name: @str, // The name of the target.
def_id: def_id, // The definition of the target.
reexport: bool, // Whether this is a reexport.
}
@ -1035,14 +1035,14 @@ impl Resolver {
self.session.span_err(sp,
fmt!("duplicate definition of %s `%s`",
namespace_to_str(ns),
*self.session.str_of(name)));
self.session.str_of(name)));
{
let r = child.span_for_namespace(ns);
for r.iter().advance |sp| {
self.session.span_note(*sp,
fmt!("first definition of %s %s here:",
namespace_to_str(ns),
*self.session.str_of(name)));
self.session.str_of(name)));
}
}
}
@ -1695,7 +1695,7 @@ impl Resolver {
debug!("(building reduced graph for \
external crate) ... adding \
trait method '%s'",
*self.session.str_of(method_name));
self.session.str_of(method_name));
// Add it to the trait info if not static.
if explicit_self != sty_static {
@ -1824,7 +1824,7 @@ impl Resolver {
visibility,
&mut modules,
child_name_bindings,
*self.session.str_of(
self.session.str_of(
final_ident),
final_ident,
new_parent);
@ -1843,7 +1843,7 @@ impl Resolver {
debug!("(building reduced graph for \
external crate) processing \
static methods for type name %s",
*self.session.str_of(
self.session.str_of(
final_ident));
let (child_name_bindings, new_parent) =
@ -1894,7 +1894,7 @@ impl Resolver {
debug!("(building reduced graph for \
external crate) creating \
static method '%s'",
*self.session.str_of(ident));
self.session.str_of(ident));
let (method_name_bindings, _) =
self.add_child(
@ -1945,7 +1945,7 @@ impl Resolver {
directive: privacy %? %s::%s",
privacy,
self.idents_to_str(directive.module_path),
*self.session.str_of(target));
self.session.str_of(target));
match module_.import_resolutions.find(&target) {
Some(&resolution) => {
@ -2054,7 +2054,7 @@ impl Resolver {
Failed => {
// We presumably emitted an error. Continue.
let msg = fmt!("failed to resolve import `%s`",
*self.import_path_to_str(
self.import_path_to_str(
import_directive.module_path,
*import_directive.subclass));
self.session.span_err(import_directive.span, msg);
@ -2077,30 +2077,30 @@ impl Resolver {
let mut result = ~"";
for idents.each |ident| {
if first { first = false; } else { result += "::" };
result += *self.session.str_of(*ident);
result += self.session.str_of(*ident);
};
return result;
}
pub fn import_directive_subclass_to_str(@mut self,
subclass: ImportDirectiveSubclass)
-> @~str {
-> @str {
match subclass {
SingleImport(_target, source) => self.session.str_of(source),
GlobImport => @~"*"
GlobImport => @"*"
}
}
pub fn import_path_to_str(@mut self,
idents: &[ident],
subclass: ImportDirectiveSubclass)
-> @~str {
-> @str {
if idents.is_empty() {
self.import_directive_subclass_to_str(subclass)
} else {
@fmt!("%s::%s",
self.idents_to_str(idents),
*self.import_directive_subclass_to_str(subclass))
(fmt!("%s::%s",
self.idents_to_str(idents),
self.import_directive_subclass_to_str(subclass))).to_managed()
}
}
@ -2221,9 +2221,9 @@ impl Resolver {
-> ResolveResult<()> {
debug!("(resolving single import) resolving `%s` = `%s::%s` from \
`%s`",
*self.session.str_of(target),
self.session.str_of(target),
self.module_to_str(containing_module),
*self.session.str_of(source),
self.session.str_of(source),
self.module_to_str(module_));
// We need to resolve both namespaces for this to succeed.
@ -2427,12 +2427,12 @@ impl Resolver {
let span = directive.span;
if resolve_fail {
self.session.span_err(span, fmt!("unresolved import: there is no `%s` in `%s`",
*self.session.str_of(source),
self.session.str_of(source),
self.module_to_str(containing_module)));
return Failed;
} else if priv_fail {
self.session.span_err(span, fmt!("unresolved import: found `%s` in `%s` but it is \
private", *self.session.str_of(source),
private", self.session.str_of(source),
self.module_to_str(containing_module)));
return Failed;
}
@ -2535,7 +2535,7 @@ impl Resolver {
debug!("(resolving glob import) writing resolution `%s` in `%s` \
to `%s`, privacy=%?",
*self.session.str_of(ident),
self.session.str_of(ident),
self.module_to_str(containing_module),
self.module_to_str(module_),
copy dest_import_resolution.privacy);
@ -2604,17 +2604,17 @@ impl Resolver {
fmt!("unresolved import. maybe \
a missing `extern mod \
%s`?",
*segment_name));
segment_name));
return Failed;
}
self.session.span_err(span, fmt!("unresolved import: could not find `%s` in \
`%s`.", *segment_name, module_name));
`%s`.", segment_name, module_name));
return Failed;
}
Indeterminate => {
debug!("(resolving module path for import) module \
resolution is indeterminate: %s",
*self.session.str_of(name));
self.session.str_of(name));
return Indeterminate;
}
Success(target) => {
@ -2628,7 +2628,7 @@ impl Resolver {
self.session.span_err(span,
fmt!("not a \
module `%s`",
*self.session.
self.session.
str_of(
name)));
return Failed;
@ -2656,7 +2656,7 @@ impl Resolver {
// There are no type bindings at all.
self.session.span_err(span,
fmt!("not a module `%s`",
*self.session.str_of(
self.session.str_of(
name)));
return Failed;
}
@ -2783,7 +2783,7 @@ impl Resolver {
-> ResolveResult<Target> {
debug!("(resolving item in lexical scope) resolving `%s` in \
namespace %? in `%s`",
*self.session.str_of(name),
self.session.str_of(name),
namespace,
self.module_to_str(module_));
@ -2997,11 +2997,11 @@ impl Resolver {
// top of the crate otherwise.
let mut containing_module;
let mut i;
if *token::ident_to_str(&module_path[0]) == ~"self" {
if "self" == token::ident_to_str(&module_path[0]) {
containing_module =
self.get_nearest_normal_module_parent_or_self(module_);
i = 1;
} else if *token::ident_to_str(&module_path[0]) == ~"super" {
} else if "super" == token::ident_to_str(&module_path[0]) {
containing_module =
self.get_nearest_normal_module_parent_or_self(module_);
i = 0; // We'll handle `super` below.
@ -3011,7 +3011,7 @@ impl Resolver {
// Now loop through all the `super`s we find.
while i < module_path.len() &&
*token::ident_to_str(&module_path[i]) == ~"super" {
"super" == token::ident_to_str(&module_path[i]) {
debug!("(resolving module prefix) resolving `super` at %s",
self.module_to_str(containing_module));
match self.get_nearest_normal_module_parent(containing_module) {
@ -3039,7 +3039,7 @@ impl Resolver {
name_search_type: NameSearchType)
-> ResolveResult<Target> {
debug!("(resolving name in module) resolving `%s` in `%s`",
*self.session.str_of(name),
self.session.str_of(name),
self.module_to_str(module_));
// First, check the direct children of the module.
@ -3112,7 +3112,7 @@ impl Resolver {
// We're out of luck.
debug!("(resolving name in module) failed to resolve `%s`",
*self.session.str_of(name));
self.session.str_of(name));
return Failed;
}
@ -3230,7 +3230,7 @@ impl Resolver {
(Some(d), Some(Public)) => {
debug!("(computing exports) YES: %s '%s' => %?",
if reexport { ~"reexport" } else { ~"export"},
*self.session.str_of(ident),
self.session.str_of(ident),
def_id_of_def(d));
exports2.push(Export2 {
reexport: reexport,
@ -3252,7 +3252,7 @@ impl Resolver {
module_: @mut Module) {
for module_.children.each |ident, namebindings| {
debug!("(computing exports) maybe export '%s'",
*self.session.str_of(*ident));
self.session.str_of(*ident));
self.add_exports_of_namebindings(&mut *exports2,
*ident,
*namebindings,
@ -3268,14 +3268,14 @@ impl Resolver {
for module_.import_resolutions.each |ident, importresolution| {
if importresolution.privacy != Public {
debug!("(computing exports) not reexporting private `%s`",
*self.session.str_of(*ident));
self.session.str_of(*ident));
loop;
}
for [ TypeNS, ValueNS ].each |ns| {
match importresolution.target_for_namespace(*ns) {
Some(target) => {
debug!("(computing exports) maybe reexport '%s'",
*self.session.str_of(*ident));
self.session.str_of(*ident));
self.add_exports_of_namebindings(&mut *exports2,
*ident,
target.bindings,
@ -3318,7 +3318,7 @@ impl Resolver {
match orig_module.children.find(&name) {
None => {
debug!("!!! (with scope) didn't find `%s` in `%s`",
*self.session.str_of(name),
self.session.str_of(name),
self.module_to_str(orig_module));
}
Some(name_bindings) => {
@ -3326,7 +3326,7 @@ impl Resolver {
None => {
debug!("!!! (with scope) didn't find module \
for `%s` in `%s`",
*self.session.str_of(name),
self.session.str_of(name),
self.module_to_str(orig_module));
}
Some(module_) => {
@ -3503,7 +3503,7 @@ impl Resolver {
pub fn resolve_item(@mut self, item: @item, visitor: ResolveVisitor) {
debug!("(resolving item) resolving %s",
*self.session.str_of(item.ident));
self.session.str_of(item.ident));
// Items with the !resolve_unexported attribute are X-ray contexts.
// This is used to allow the test runner to run unexported tests.
@ -4044,7 +4044,7 @@ impl Resolver {
p.span,
fmt!("variable `%s` from pattern #1 is \
not bound in pattern #%u",
*self.session.str_of(key), i + 1));
self.session.str_of(key), i + 1));
}
Some(binding_i) => {
if binding_0.binding_mode != binding_i.binding_mode {
@ -4052,7 +4052,7 @@ impl Resolver {
binding_i.span,
fmt!("variable `%s` is bound with different \
mode in pattern #%u than in pattern #1",
*self.session.str_of(key), i + 1));
self.session.str_of(key), i + 1));
}
}
}
@ -4064,7 +4064,7 @@ impl Resolver {
binding.span,
fmt!("variable `%s` from pattern #%u is \
not bound in pattern #1",
*self.session.str_of(key), i + 1));
self.session.str_of(key), i + 1));
}
}
}
@ -4148,7 +4148,7 @@ impl Resolver {
Some(def) => {
debug!("(resolving type) resolved `%s` to \
type %?",
*self.session.str_of(
self.session.str_of(
*path.idents.last()),
def);
result_def = Some(def);
@ -4224,7 +4224,7 @@ impl Resolver {
if mode == RefutableMode => {
debug!("(resolving pattern) resolving `%s` to \
struct or enum variant",
*self.session.str_of(ident));
self.session.str_of(ident));
self.enforce_default_binding_mode(
pattern,
@ -4238,13 +4238,13 @@ impl Resolver {
shadows an enum \
variant or unit-like \
struct in scope",
*self.session
self.session
.str_of(ident)));
}
FoundConst(def) if mode == RefutableMode => {
debug!("(resolving pattern) resolving `%s` to \
constant",
*self.session.str_of(ident));
self.session.str_of(ident));
self.enforce_default_binding_mode(
pattern,
@ -4259,7 +4259,7 @@ impl Resolver {
}
BareIdentifierPatternUnresolved => {
debug!("(resolving pattern) binding `%s`",
*self.session.str_of(ident));
self.session.str_of(ident));
let is_mutable = mutability == Mutable;
@ -4350,7 +4350,7 @@ impl Resolver {
self.session.span_err(
path.span,
fmt!("`%s` is not an enum variant or constant",
*self.session.str_of(
self.session.str_of(
*path.idents.last())));
}
None => {
@ -4378,7 +4378,7 @@ impl Resolver {
self.session.span_err(
path.span,
fmt!("`%s` is not an enum variant, struct or const",
*self.session.str_of(
self.session.str_of(
*path.idents.last())));
}
None => {
@ -4753,7 +4753,7 @@ impl Resolver {
Some(dl_def(def)) => {
debug!("(resolving path in local ribs) resolved `%s` to \
local: %?",
*self.session.str_of(ident),
self.session.str_of(ident),
def);
return Some(def);
}
@ -4811,7 +4811,7 @@ impl Resolver {
Some(def) => {
debug!("(resolving item path in lexical scope) \
resolved `%s` to item",
*self.session.str_of(ident));
self.session.str_of(ident));
return Some(def);
}
}
@ -4828,17 +4828,17 @@ impl Resolver {
pub fn find_best_match_for_name(@mut self,
name: &str,
max_distance: uint)
-> Option<~str> {
-> Option<@str> {
let this = &mut *self;
let mut maybes: ~[~str] = ~[];
let mut maybes: ~[@str] = ~[];
let mut values: ~[uint] = ~[];
let mut j = this.value_ribs.len();
while j != 0 {
j -= 1;
for this.value_ribs[j].bindings.each_key |&k| {
vec::push(&mut maybes, copy *this.session.str_of(k));
vec::push(&mut maybes, this.session.str_of(k));
vec::push(&mut values, uint::max_value);
}
}
@ -4857,7 +4857,7 @@ impl Resolver {
values[smallest] != uint::max_value &&
values[smallest] < name.len() + 2 &&
values[smallest] <= max_distance &&
maybes[smallest] != name.to_owned() {
name != maybes[smallest] {
Some(vec::swap_remove(&mut maybes, smallest))
@ -4882,7 +4882,7 @@ impl Resolver {
match field.node.kind {
unnamed_field => {},
named_field(ident, _) => {
if str::eq_slice(*this.session.str_of(ident),
if str::eq_slice(this.session.str_of(ident),
name) {
return true
}
@ -5007,7 +5007,7 @@ impl Resolver {
self.session.span_err(expr.span,
fmt!("use of undeclared label \
`%s`",
*self.session.str_of(
self.session.str_of(
label))),
Some(dl_def(def @ def_label(_))) => {
self.record_def(expr.id, def)
@ -5122,7 +5122,7 @@ impl Resolver {
pub fn search_for_traits_containing_method(@mut self, name: ident)
-> ~[def_id] {
debug!("(searching for traits containing method) looking for '%s'",
*self.session.str_of(name));
self.session.str_of(name));
let mut found_traits = ~[];
@ -5227,7 +5227,7 @@ impl Resolver {
debug!("(adding trait info) found trait %d:%d for method '%s'",
trait_def_id.crate,
trait_def_id.node,
*self.session.str_of(name));
self.session.str_of(name));
found_traits.push(trait_def_id);
}
@ -5346,7 +5346,7 @@ impl Resolver {
debug!("Children:");
for module_.children.each_key |&name| {
debug!("* %s", *self.session.str_of(name));
debug!("* %s", self.session.str_of(name));
}
debug!("Import resolutions:");
@ -5369,7 +5369,7 @@ impl Resolver {
}
}
debug!("* %s:%s%s", *self.session.str_of(*name),
debug!("* %s:%s%s", self.session.str_of(*name),
value_repr, type_repr);
}
}

View File

@ -1699,7 +1699,7 @@ pub fn trans_match_inner(scope_cx: block,
// Special case for empty types
let fail_cx = @mut None;
let f: mk_fail = || mk_fail(scope_cx, discr_expr.span,
@~"scrutinizing value that can't exist", fail_cx);
@"scrutinizing value that can't exist", fail_cx);
Some(f)
} else {
None
@ -1731,7 +1731,7 @@ pub fn trans_match_inner(scope_cx: block,
bcx = controlflow::join_blocks(scope_cx, arm_cxs);
return bcx;
fn mk_fail(bcx: block, sp: span, msg: @~str,
fn mk_fail(bcx: block, sp: span, msg: @str,
finished: @mut Option<BasicBlockRef>) -> BasicBlockRef {
match *finished { Some(bb) => return bb, _ => () }
let fail_cx = sub_block(bcx, "case_fallthrough");

View File

@ -33,7 +33,7 @@ pub fn trans_inline_asm(bcx: block, ia: &ast::inline_asm) -> block {
// Prepare the output operands
let outputs = do ia.outputs.map |&(c, out)| {
constraints.push(copy *c);
constraints.push(c);
aoutputs.push(unpack_result!(bcx, {
callee::trans_arg_expr(bcx,
@ -69,7 +69,7 @@ pub fn trans_inline_asm(bcx: block, ia: &ast::inline_asm) -> block {
// Now the input operands
let inputs = do ia.inputs.map |&(c, in)| {
constraints.push(copy *c);
constraints.push(c);
unpack_result!(bcx, {
callee::trans_arg_expr(bcx,
@ -90,14 +90,14 @@ pub fn trans_inline_asm(bcx: block, ia: &ast::inline_asm) -> block {
let mut constraints = constraints.connect(",");
let mut clobbers = getClobbers();
if *ia.clobbers != ~"" && clobbers != ~"" {
clobbers = *ia.clobbers + "," + clobbers;
if !ia.clobbers.is_empty() && !clobbers.is_empty() {
clobbers = fmt!("%s,%s", ia.clobbers, clobbers);
} else {
clobbers += *ia.clobbers;
clobbers += ia.clobbers;
};
// Add the clobbers to our constraints list
if clobbers != ~"" && constraints != ~"" {
if !clobbers.is_empty() && !constraints.is_empty() {
constraints += ",";
constraints += clobbers;
} else {
@ -122,7 +122,7 @@ pub fn trans_inline_asm(bcx: block, ia: &ast::inline_asm) -> block {
ast::asm_intel => lib::llvm::AD_Intel
};
let r = do str::as_c_str(*ia.asm) |a| {
let r = do str::as_c_str(ia.asm) |a| {
do str::as_c_str(constraints) |c| {
InlineAsmCall(bcx, a, c, inputs, output, ia.volatile, ia.alignstack, dialect)
}

View File

@ -485,9 +485,9 @@ pub fn set_glue_inlining(f: ValueRef, t: ty::t) {
// Double-check that we never ask LLVM to declare the same symbol twice. It
// silently mangles such symbols, breaking our linkage model.
pub fn note_unique_llvm_symbol(ccx: @CrateContext, sym: @~str) {
pub fn note_unique_llvm_symbol(ccx: @CrateContext, sym: @str) {
if ccx.all_llvm_symbols.contains(&sym) {
ccx.sess.bug(~"duplicate LLVM symbol: " + *sym);
ccx.sess.bug(~"duplicate LLVM symbol: " + sym);
}
ccx.all_llvm_symbols.insert(sym);
}
@ -569,7 +569,7 @@ pub fn compare_scalar_types(cx: block,
rslt(
controlflow::trans_fail(
cx, None,
@~"attempt to compare values of type type"),
@"attempt to compare values of type type"),
C_nil())
}
_ => {
@ -791,9 +791,9 @@ pub fn cast_shift_rhs(op: ast::binop,
pub fn fail_if_zero(cx: block, span: span, divrem: ast::binop,
rhs: ValueRef, rhs_t: ty::t) -> block {
let text = if divrem == ast::div {
@~"attempted to divide by zero"
@"attempted to divide by zero"
} else {
@~"attempted remainder with a divisor of zero"
@"attempted remainder with a divisor of zero"
};
let is_zero = match ty::get(rhs_t).sty {
ty::ty_int(t) => {
@ -1056,19 +1056,19 @@ pub fn load_if_immediate(cx: block, v: ValueRef, t: ty::t) -> ValueRef {
return v;
}
pub fn trans_trace(bcx: block, sp_opt: Option<span>, trace_str: @~str) {
pub fn trans_trace(bcx: block, sp_opt: Option<span>, trace_str: @str) {
if !bcx.sess().trace() { return; }
let _icx = bcx.insn_ctxt("trans_trace");
add_comment(bcx, *trace_str);
add_comment(bcx, trace_str);
let V_trace_str = C_cstr(bcx.ccx(), trace_str);
let (V_filename, V_line) = match sp_opt {
Some(sp) => {
let sess = bcx.sess();
let loc = sess.parse_sess.cm.lookup_char_pos(sp.lo);
(C_cstr(bcx.ccx(), @/*bad*/copy loc.file.name), loc.line as int)
(C_cstr(bcx.ccx(), loc.file.name), loc.line as int)
}
None => {
(C_cstr(bcx.ccx(), @~"<runtime>"), 0)
(C_cstr(bcx.ccx(), @"<runtime>"), 0)
}
};
let ccx = bcx.ccx();
@ -1187,7 +1187,7 @@ pub fn new_block(cx: fn_ctxt, parent: Option<block>, kind: block_kind,
special_idents::invalid
};
unsafe {
let llbb = str::as_c_str(*cx.ccx.sess.str_of(s), |buf| {
let llbb = str::as_c_str(cx.ccx.sess.str_of(s), |buf| {
llvm::LLVMAppendBasicBlockInContext(cx.ccx.llcx, cx.llfn, buf)
});
let bcx = mk_block(llbb,
@ -1308,7 +1308,7 @@ pub fn cleanup_and_leave(bcx: block,
if bcx.sess().trace() {
trans_trace(
bcx, None,
@fmt!("cleanup_and_leave(%s)", cur.to_str()));
(fmt!("cleanup_and_leave(%s)", cur.to_str())).to_managed());
}
match cur.kind {
@ -1427,7 +1427,7 @@ pub fn alloc_local(cx: block, local: @ast::local) -> block {
let val = alloc_ty(cx, t);
if cx.sess().opts.debuginfo {
for simple_name.iter().advance |name| {
str::as_c_str(*cx.ccx().sess.str_of(*name), |buf| {
str::as_c_str(cx.ccx().sess.str_of(*name), |buf| {
unsafe {
llvm::LLVMSetValueName(val, buf)
}
@ -1453,12 +1453,8 @@ pub fn call_memcpy(cx: block, dst: ValueRef, src: ValueRef, n_bytes: ValueRef, a
let _icx = cx.insn_ctxt("call_memcpy");
let ccx = cx.ccx();
let key = match ccx.sess.targ_cfg.arch {
X86 | Arm | Mips => {
~"llvm.memcpy.p0i8.p0i8.i32"
}
X86_64 => {
~"llvm.memcpy.p0i8.p0i8.i64"
}
X86 | Arm | Mips => "llvm.memcpy.p0i8.p0i8.i32",
X86_64 => "llvm.memcpy.p0i8.p0i8.i64"
};
let memcpy = *ccx.intrinsics.get(&key);
let src_ptr = PointerCast(cx, src, T_ptr(T_i8()));
@ -1499,15 +1495,10 @@ pub fn memzero(cx: block, llptr: ValueRef, llty: TypeRef) {
let _icx = cx.insn_ctxt("memzero");
let ccx = cx.ccx();
let intrinsic_key;
match ccx.sess.targ_cfg.arch {
X86 | Arm | Mips => {
intrinsic_key = ~"llvm.memset.p0i8.i32";
}
X86_64 => {
intrinsic_key = ~"llvm.memset.p0i8.i64";
}
}
let intrinsic_key = match ccx.sess.targ_cfg.arch {
X86 | Arm | Mips => "llvm.memset.p0i8.i32",
X86_64 => "llvm.memset.p0i8.i64"
};
let llintrinsicfn = *ccx.intrinsics.get(&intrinsic_key);
let llptr = PointerCast(cx, llptr, T_ptr(T_i8()));
@ -2512,7 +2503,7 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::node_id) -> ValueRef {
ast::foreign_item_const(*) => {
let typ = ty::node_id_to_type(tcx, ni.id);
let ident = token::ident_to_str(&ni.ident);
let g = do str::as_c_str(*ident) |buf| {
let g = do str::as_c_str(ident) |buf| {
unsafe {
llvm::LLVMAddGlobal(ccx.llmod,
type_of(ccx, typ),
@ -2607,10 +2598,10 @@ pub fn trans_constant(ccx: @CrateContext, it: @ast::item) {
path_name(variant.node.name),
path_name(special_idents::descrim)
]);
let s = @mangle_exported_name(ccx, p, ty::mk_int());
let s = mangle_exported_name(ccx, p, ty::mk_int()).to_managed();
let disr_val = vi[i].disr_val;
note_unique_llvm_symbol(ccx, s);
let discrim_gvar = str::as_c_str(*s, |buf| {
let discrim_gvar = str::as_c_str(s, |buf| {
unsafe {
llvm::LLVMAddGlobal(ccx.llmod, ccx.int_type, buf)
}
@ -2649,7 +2640,7 @@ pub fn p2i(ccx: @CrateContext, v: ValueRef) -> ValueRef {
}
}
pub fn declare_intrinsics(llmod: ModuleRef) -> HashMap<~str, ValueRef> {
pub fn declare_intrinsics(llmod: ModuleRef) -> HashMap<&'static str, ValueRef> {
let T_memcpy32_args: ~[TypeRef] =
~[T_ptr(T_i8()), T_ptr(T_i8()), T_i32(), T_i32(), T_i1()];
let T_memcpy64_args: ~[TypeRef] =
@ -2783,80 +2774,80 @@ pub fn declare_intrinsics(llmod: ModuleRef) -> HashMap<~str, ValueRef> {
T_fn([T_i64()], T_i64()));
let mut intrinsics = HashMap::new();
intrinsics.insert(~"llvm.gcroot", gcroot);
intrinsics.insert(~"llvm.gcread", gcread);
intrinsics.insert(~"llvm.memcpy.p0i8.p0i8.i32", memcpy32);
intrinsics.insert(~"llvm.memcpy.p0i8.p0i8.i64", memcpy64);
intrinsics.insert(~"llvm.memmove.p0i8.p0i8.i32", memmove32);
intrinsics.insert(~"llvm.memmove.p0i8.p0i8.i64", memmove64);
intrinsics.insert(~"llvm.memset.p0i8.i32", memset32);
intrinsics.insert(~"llvm.memset.p0i8.i64", memset64);
intrinsics.insert(~"llvm.trap", trap);
intrinsics.insert(~"llvm.frameaddress", frameaddress);
intrinsics.insert(~"llvm.sqrt.f32", sqrtf32);
intrinsics.insert(~"llvm.sqrt.f64", sqrtf64);
intrinsics.insert(~"llvm.powi.f32", powif32);
intrinsics.insert(~"llvm.powi.f64", powif64);
intrinsics.insert(~"llvm.sin.f32", sinf32);
intrinsics.insert(~"llvm.sin.f64", sinf64);
intrinsics.insert(~"llvm.cos.f32", cosf32);
intrinsics.insert(~"llvm.cos.f64", cosf64);
intrinsics.insert(~"llvm.pow.f32", powf32);
intrinsics.insert(~"llvm.pow.f64", powf64);
intrinsics.insert(~"llvm.exp.f32", expf32);
intrinsics.insert(~"llvm.exp.f64", expf64);
intrinsics.insert(~"llvm.exp2.f32", exp2f32);
intrinsics.insert(~"llvm.exp2.f64", exp2f64);
intrinsics.insert(~"llvm.log.f32", logf32);
intrinsics.insert(~"llvm.log.f64", logf64);
intrinsics.insert(~"llvm.log10.f32", log10f32);
intrinsics.insert(~"llvm.log10.f64", log10f64);
intrinsics.insert(~"llvm.log2.f32", log2f32);
intrinsics.insert(~"llvm.log2.f64", log2f64);
intrinsics.insert(~"llvm.fma.f32", fmaf32);
intrinsics.insert(~"llvm.fma.f64", fmaf64);
intrinsics.insert(~"llvm.fabs.f32", fabsf32);
intrinsics.insert(~"llvm.fabs.f64", fabsf64);
intrinsics.insert(~"llvm.floor.f32", floorf32);
intrinsics.insert(~"llvm.floor.f64", floorf64);
intrinsics.insert(~"llvm.ceil.f32", ceilf32);
intrinsics.insert(~"llvm.ceil.f64", ceilf64);
intrinsics.insert(~"llvm.trunc.f32", truncf32);
intrinsics.insert(~"llvm.trunc.f64", truncf64);
intrinsics.insert(~"llvm.ctpop.i8", ctpop8);
intrinsics.insert(~"llvm.ctpop.i16", ctpop16);
intrinsics.insert(~"llvm.ctpop.i32", ctpop32);
intrinsics.insert(~"llvm.ctpop.i64", ctpop64);
intrinsics.insert(~"llvm.ctlz.i8", ctlz8);
intrinsics.insert(~"llvm.ctlz.i16", ctlz16);
intrinsics.insert(~"llvm.ctlz.i32", ctlz32);
intrinsics.insert(~"llvm.ctlz.i64", ctlz64);
intrinsics.insert(~"llvm.cttz.i8", cttz8);
intrinsics.insert(~"llvm.cttz.i16", cttz16);
intrinsics.insert(~"llvm.cttz.i32", cttz32);
intrinsics.insert(~"llvm.cttz.i64", cttz64);
intrinsics.insert(~"llvm.bswap.i16", bswap16);
intrinsics.insert(~"llvm.bswap.i32", bswap32);
intrinsics.insert(~"llvm.bswap.i64", bswap64);
intrinsics.insert("llvm.gcroot", gcroot);
intrinsics.insert("llvm.gcread", gcread);
intrinsics.insert("llvm.memcpy.p0i8.p0i8.i32", memcpy32);
intrinsics.insert("llvm.memcpy.p0i8.p0i8.i64", memcpy64);
intrinsics.insert("llvm.memmove.p0i8.p0i8.i32", memmove32);
intrinsics.insert("llvm.memmove.p0i8.p0i8.i64", memmove64);
intrinsics.insert("llvm.memset.p0i8.i32", memset32);
intrinsics.insert("llvm.memset.p0i8.i64", memset64);
intrinsics.insert("llvm.trap", trap);
intrinsics.insert("llvm.frameaddress", frameaddress);
intrinsics.insert("llvm.sqrt.f32", sqrtf32);
intrinsics.insert("llvm.sqrt.f64", sqrtf64);
intrinsics.insert("llvm.powi.f32", powif32);
intrinsics.insert("llvm.powi.f64", powif64);
intrinsics.insert("llvm.sin.f32", sinf32);
intrinsics.insert("llvm.sin.f64", sinf64);
intrinsics.insert("llvm.cos.f32", cosf32);
intrinsics.insert("llvm.cos.f64", cosf64);
intrinsics.insert("llvm.pow.f32", powf32);
intrinsics.insert("llvm.pow.f64", powf64);
intrinsics.insert("llvm.exp.f32", expf32);
intrinsics.insert("llvm.exp.f64", expf64);
intrinsics.insert("llvm.exp2.f32", exp2f32);
intrinsics.insert("llvm.exp2.f64", exp2f64);
intrinsics.insert("llvm.log.f32", logf32);
intrinsics.insert("llvm.log.f64", logf64);
intrinsics.insert("llvm.log10.f32", log10f32);
intrinsics.insert("llvm.log10.f64", log10f64);
intrinsics.insert("llvm.log2.f32", log2f32);
intrinsics.insert("llvm.log2.f64", log2f64);
intrinsics.insert("llvm.fma.f32", fmaf32);
intrinsics.insert("llvm.fma.f64", fmaf64);
intrinsics.insert("llvm.fabs.f32", fabsf32);
intrinsics.insert("llvm.fabs.f64", fabsf64);
intrinsics.insert("llvm.floor.f32", floorf32);
intrinsics.insert("llvm.floor.f64", floorf64);
intrinsics.insert("llvm.ceil.f32", ceilf32);
intrinsics.insert("llvm.ceil.f64", ceilf64);
intrinsics.insert("llvm.trunc.f32", truncf32);
intrinsics.insert("llvm.trunc.f64", truncf64);
intrinsics.insert("llvm.ctpop.i8", ctpop8);
intrinsics.insert("llvm.ctpop.i16", ctpop16);
intrinsics.insert("llvm.ctpop.i32", ctpop32);
intrinsics.insert("llvm.ctpop.i64", ctpop64);
intrinsics.insert("llvm.ctlz.i8", ctlz8);
intrinsics.insert("llvm.ctlz.i16", ctlz16);
intrinsics.insert("llvm.ctlz.i32", ctlz32);
intrinsics.insert("llvm.ctlz.i64", ctlz64);
intrinsics.insert("llvm.cttz.i8", cttz8);
intrinsics.insert("llvm.cttz.i16", cttz16);
intrinsics.insert("llvm.cttz.i32", cttz32);
intrinsics.insert("llvm.cttz.i64", cttz64);
intrinsics.insert("llvm.bswap.i16", bswap16);
intrinsics.insert("llvm.bswap.i32", bswap32);
intrinsics.insert("llvm.bswap.i64", bswap64);
return intrinsics;
}
pub fn declare_dbg_intrinsics(llmod: ModuleRef,
intrinsics: &mut HashMap<~str, ValueRef>) {
intrinsics: &mut HashMap<&'static str, ValueRef>) {
let declare =
decl_cdecl_fn(llmod, "llvm.dbg.declare",
T_fn([T_metadata(), T_metadata()], T_void()));
let value =
decl_cdecl_fn(llmod, "llvm.dbg.value",
T_fn([T_metadata(), T_i64(), T_metadata()], T_void()));
intrinsics.insert(~"llvm.dbg.declare", declare);
intrinsics.insert(~"llvm.dbg.value", value);
intrinsics.insert("llvm.dbg.declare", declare);
intrinsics.insert("llvm.dbg.value", value);
}
pub fn trap(bcx: block) {
let v: ~[ValueRef] = ~[];
match bcx.ccx().intrinsics.find(&~"llvm.trap") {
match bcx.ccx().intrinsics.find(& &"llvm.trap") {
Some(&x) => { Call(bcx, x, v); },
_ => bcx.sess().bug("unbound llvm.trap in trap")
}
@ -2891,7 +2882,7 @@ pub fn create_module_map(ccx: @CrateContext) -> ValueRef {
lib::llvm::SetLinkage(map, lib::llvm::InternalLinkage);
let mut elts: ~[ValueRef] = ~[];
for ccx.module_data.each |key, &val| {
let elt = C_struct([p2i(ccx, C_cstr(ccx, @/*bad*/ copy *key)),
let elt = C_struct([p2i(ccx, C_cstr(ccx, /* bad */key.to_managed())),
p2i(ccx, val)]);
elts.push(elt);
}
@ -2934,9 +2925,10 @@ pub fn fill_crate_map(ccx: @CrateContext, map: ValueRef) {
let cstore = ccx.sess.cstore;
while cstore::have_crate_data(cstore, i) {
let cdata = cstore::get_crate_data(cstore, i);
let nm = ~"_rust_crate_map_" + *cdata.name +
"_" + *cstore::get_crate_vers(cstore, i) +
"_" + *cstore::get_crate_hash(cstore, i);
let nm = fmt!("_rust_crate_map_%s_%s_%s",
cdata.name,
cstore::get_crate_vers(cstore, i),
cstore::get_crate_hash(cstore, i));
let cr = str::as_c_str(nm, |buf| {
unsafe {
llvm::LLVMAddGlobal(ccx.llmod, ccx.int_type, buf)

View File

@ -165,7 +165,7 @@ pub struct CrateContext {
td: TargetData,
tn: @TypeNames,
externs: ExternMap,
intrinsics: HashMap<~str, ValueRef>,
intrinsics: HashMap<&'static str, ValueRef>,
item_vals: @mut HashMap<ast::node_id, ValueRef>,
exp_map2: resolve::ExportMap2,
reachable: reachable::map,
@ -173,7 +173,7 @@ pub struct CrateContext {
link_meta: LinkMeta,
enum_sizes: @mut HashMap<ty::t, uint>,
discrims: @mut HashMap<ast::def_id, ValueRef>,
discrim_symbols: @mut HashMap<ast::node_id, @~str>,
discrim_symbols: @mut HashMap<ast::node_id, @str>,
tydescs: @mut HashMap<ty::t, @mut tydesc_info>,
// Set when running emit_tydescs to enforce that no more tydescs are
// created.
@ -188,7 +188,7 @@ pub struct CrateContext {
// Cache generated vtables
vtables: @mut HashMap<mono_id, ValueRef>,
// Cache of constant strings,
const_cstr_cache: @mut HashMap<@~str, ValueRef>,
const_cstr_cache: @mut HashMap<@str, ValueRef>,
// Reverse-direction for const ptrs cast from globals.
// Key is an int, cast from a ValueRef holding a *T,
@ -215,7 +215,7 @@ pub struct CrateContext {
symbol_hasher: @mut hash::State,
type_hashcodes: @mut HashMap<ty::t, @str>,
type_short_names: @mut HashMap<ty::t, ~str>,
all_llvm_symbols: @mut HashSet<@~str>,
all_llvm_symbols: @mut HashSet<@str>,
tcx: ty::ctxt,
maps: astencode::Maps,
stats: @mut Stats,
@ -1176,14 +1176,14 @@ pub fn C_u8(i: uint) -> ValueRef {
// This is a 'c-like' raw string, which differs from
// our boxed-and-length-annotated strings.
pub fn C_cstr(cx: @CrateContext, s: @~str) -> ValueRef {
pub fn C_cstr(cx: @CrateContext, s: @str) -> ValueRef {
unsafe {
match cx.const_cstr_cache.find(&s) {
Some(&llval) => return llval,
None => ()
}
let sc = do str::as_c_str(*s) |buf| {
let sc = do str::as_c_str(s) |buf| {
llvm::LLVMConstStringInContext(cx.llcx, buf, s.len() as c_uint,
False)
};
@ -1202,7 +1202,7 @@ pub fn C_cstr(cx: @CrateContext, s: @~str) -> ValueRef {
// NB: Do not use `do_spill_noroot` to make this into a constant string, or
// you will be kicked off fast isel. See issue #4352 for an example of this.
pub fn C_estr_slice(cx: @CrateContext, s: @~str) -> ValueRef {
pub fn C_estr_slice(cx: @CrateContext, s: @str) -> ValueRef {
unsafe {
let len = s.len();
let cs = llvm::LLVMConstPointerCast(C_cstr(cx, s), T_ptr(T_i8()));
@ -1441,7 +1441,7 @@ pub fn path_str(sess: session::Session, p: &[path_elt]) -> ~str {
ast_map::path_name(s) | ast_map::path_mod(s) => {
if first { first = false; }
else { r += "::"; }
r += *sess.str_of(s);
r += sess.str_of(s);
}
}
}
@ -1564,7 +1564,7 @@ pub fn dummy_substs(tps: ~[ty::t]) -> ty::substs {
pub fn filename_and_line_num_from_span(bcx: block,
span: span) -> (ValueRef, ValueRef) {
let loc = bcx.sess().parse_sess.cm.lookup_char_pos(span.lo);
let filename_cstr = C_cstr(bcx.ccx(), @/*bad*/copy loc.file.name);
let filename_cstr = C_cstr(bcx.ccx(), loc.file.name);
let filename = build::PointerCast(bcx, filename_cstr, T_ptr(T_i8()));
let line = C_int(bcx.ccx(), loc.line as int);
(filename, line)

View File

@ -54,12 +54,12 @@ pub fn const_lit(cx: @CrateContext, e: @ast::expr, lit: ast::lit)
ty_to_str(cx.tcx, lit_int_ty)))
}
}
ast::lit_float(fs, t) => C_floating(/*bad*/copy *fs, T_float_ty(cx, t)),
ast::lit_float(fs, t) => C_floating(fs, T_float_ty(cx, t)),
ast::lit_float_unsuffixed(fs) => {
let lit_float_ty = ty::node_id_to_type(cx.tcx, e.id);
match ty::get(lit_float_ty).sty {
ty::ty_float(t) => {
C_floating(/*bad*/copy *fs, T_float_ty(cx, t))
C_floating(fs, T_float_ty(cx, t))
}
_ => {
cx.sess.span_bug(lit.span,

View File

@ -348,13 +348,13 @@ pub fn trans_fail_expr(bcx: block,
ppaux::ty_to_str(tcx, arg_datum.ty));
}
}
_ => trans_fail(bcx, sp_opt, @~"explicit failure")
_ => trans_fail(bcx, sp_opt, @"explicit failure")
}
}
pub fn trans_fail(bcx: block,
sp_opt: Option<span>,
fail_str: @~str)
fail_str: @str)
-> block {
let _icx = bcx.insn_ctxt("trans_fail");
let V_fail_str = C_cstr(bcx.ccx(), fail_str);
@ -371,11 +371,11 @@ fn trans_fail_value(bcx: block,
Some(sp) => {
let sess = bcx.sess();
let loc = sess.parse_sess.cm.lookup_char_pos(sp.lo);
(C_cstr(bcx.ccx(), @/*bad*/ copy loc.file.name),
(C_cstr(bcx.ccx(), loc.file.name),
loc.line as int)
}
None => {
(C_cstr(bcx.ccx(), @~"<runtime>"), 0)
(C_cstr(bcx.ccx(), @"<runtime>"), 0)
}
};
let V_str = PointerCast(bcx, V_fail_str, T_ptr(T_i8()));

View File

@ -316,7 +316,7 @@ fn create_block(cx: block) -> @Metadata<BlockMetadata> {
None => create_function(cx.fcx).node,
Some(bcx) => create_block(bcx).node
};
let file_node = create_file(cx.ccx(), fname);
let file_node = create_file(cx.ccx(), /* bad */ fname.to_owned());
let unique_id = match cache.find(&LexicalBlockTag) {
option::Some(v) => v.len() as int,
option::None => 0
@ -383,7 +383,7 @@ fn create_basic_type(cx: @CrateContext, t: ty::t, span: span)
};
let fname = filename_from_span(cx, span);
let file_node = create_file(cx, fname);
let file_node = create_file(cx, fname.to_owned());
let cu_node = create_compile_unit(cx);
let (size, align) = size_and_align_of(cx, t);
let lldata = ~[lltag(tg),
@ -420,7 +420,7 @@ fn create_pointer_type(cx: @CrateContext, t: ty::t, span: span,
}*/
let (size, align) = size_and_align_of(cx, t);
let fname = filename_from_span(cx, span);
let file_node = create_file(cx, fname);
let file_node = create_file(cx, fname.to_owned());
//let cu_node = create_compile_unit(cx, fname);
let name = ty_to_str(cx.tcx, t);
let llnode = create_derived_type(tg, file_node.node, name, 0, size * 8,
@ -438,7 +438,7 @@ fn create_pointer_type(cx: @CrateContext, t: ty::t, span: span,
struct StructCtxt {
file: ValueRef,
name: @~str,
name: @str,
line: int,
members: ~[ValueRef],
total_size: int,
@ -447,7 +447,7 @@ struct StructCtxt {
fn finish_structure(cx: @mut StructCtxt) -> ValueRef {
return create_composite_type(StructureTypeTag,
*cx.name,
cx.name,
cx.file,
cx.line,
cx.total_size,
@ -457,7 +457,7 @@ fn finish_structure(cx: @mut StructCtxt) -> ValueRef {
Some(/*bad*/copy cx.members));
}
fn create_structure(file: @Metadata<FileMetadata>, name: @~str, line: int)
fn create_structure(file: @Metadata<FileMetadata>, name: @str, line: int)
-> @mut StructCtxt {
let cx = @mut StructCtxt {
file: file.node,
@ -501,14 +501,14 @@ fn add_member(cx: @mut StructCtxt,
fn create_struct(cx: @CrateContext, t: ty::t, fields: ~[ty::field],
span: span) -> @Metadata<TyDescMetadata> {
let fname = filename_from_span(cx, span);
let file_node = create_file(cx, fname);
let scx = create_structure(file_node, @ty_to_str(cx.tcx, t),
let file_node = create_file(cx, fname.to_owned());
let scx = create_structure(file_node, (ty_to_str(cx.tcx, t)).to_managed(),
line_from_span(cx.sess.codemap, span) as int);
for fields.each |field| {
let field_t = field.mt.ty;
let ty_md = create_ty(cx, field_t, span);
let (size, align) = size_and_align_of(cx, field_t);
add_member(scx, *cx.sess.str_of(field.ident),
add_member(scx, cx.sess.str_of(field.ident),
line_from_span(cx.sess.codemap, span) as int,
size as int, align as int, ty_md.node);
}
@ -524,7 +524,7 @@ fn create_struct(cx: @CrateContext, t: ty::t, fields: ~[ty::field],
fn create_tuple(cx: @CrateContext, t: ty::t, elements: &[ty::t], span: span)
-> @Metadata<TyDescMetadata> {
let fname = filename_from_span(cx, span);
let file_node = create_file(cx, fname);
let file_node = create_file(cx, fname.to_owned());
let scx = create_structure(file_node,
cx.sess.str_of(
((/*bad*/copy cx.dbg_cx).get().names)
@ -566,12 +566,12 @@ fn create_boxed_type(cx: @CrateContext, contents: ty::t,
option::None {}
}*/
let fname = filename_from_span(cx, span);
let file_node = create_file(cx, fname);
let file_node = create_file(cx, fname.to_owned());
//let cu_node = create_compile_unit_metadata(cx, fname);
let int_t = ty::mk_int();
let refcount_type = create_basic_type(cx, int_t, span);
let name = ty_to_str(cx.tcx, contents);
let scx = create_structure(file_node, @fmt!("box<%s>", name), 0);
let scx = create_structure(file_node, (fmt!("box<%s>", name)).to_managed(), 0);
add_member(scx, "refcnt", 0, sys::size_of::<uint>() as int,
sys::min_align_of::<uint>() as int, refcount_type.node);
// the tydesc and other pointers should be irrelevant to the
@ -628,7 +628,7 @@ fn create_fixed_vec(cx: @CrateContext, vec_t: ty::t, elem_t: ty::t,
len: int, span: span) -> @Metadata<TyDescMetadata> {
let t_md = create_ty(cx, elem_t, span);
let fname = filename_from_span(cx, span);
let file_node = create_file(cx, fname);
let file_node = create_file(cx, fname.to_owned());
let (size, align) = size_and_align_of(cx, elem_t);
let subrange = llmdnode([lltag(SubrangeTag), lli64(0), lli64(len - 1)]);
let name = fmt!("[%s]", ty_to_str(cx.tcx, elem_t));
@ -647,10 +647,10 @@ fn create_boxed_vec(cx: @CrateContext, vec_t: ty::t, elem_t: ty::t,
vec_ty_span: codemap::span)
-> @Metadata<TyDescMetadata> {
let fname = filename_from_span(cx, vec_ty_span);
let file_node = create_file(cx, fname);
let file_node = create_file(cx, fname.to_owned());
let elem_ty_md = create_ty(cx, elem_t, vec_ty_span);
let vec_scx = create_structure(file_node,
@/*bad*/ copy ty_to_str(cx.tcx, vec_t), 0);
ty_to_str(cx.tcx, vec_t).to_managed(), 0);
let size_t_type = create_basic_type(cx, ty::mk_uint(), vec_ty_span);
add_member(vec_scx, "fill", 0, sys::size_of::<libc::size_t>() as int,
sys::min_align_of::<libc::size_t>() as int, size_t_type.node);
@ -673,7 +673,7 @@ fn create_boxed_vec(cx: @CrateContext, vec_t: ty::t, elem_t: ty::t,
}
};
let box_scx = create_structure(file_node, @fmt!("box<%s>", name), 0);
let box_scx = create_structure(file_node, (fmt!("box<%s>", name)).to_managed(), 0);
let int_t = ty::mk_int();
let refcount_type = create_basic_type(cx, int_t, vec_ty_span);
add_member(box_scx, "refcnt", 0, sys::size_of::<uint>() as int,
@ -698,11 +698,11 @@ fn create_boxed_vec(cx: @CrateContext, vec_t: ty::t, elem_t: ty::t,
fn create_vec_slice(cx: @CrateContext, vec_t: ty::t, elem_t: ty::t, span: span)
-> @Metadata<TyDescMetadata> {
let fname = filename_from_span(cx, span);
let file_node = create_file(cx, fname);
let file_node = create_file(cx, fname.to_owned());
let elem_ty_md = create_ty(cx, elem_t, span);
let uint_type = create_basic_type(cx, ty::mk_uint(), span);
let elem_ptr = create_pointer_type(cx, elem_t, span, elem_ty_md);
let scx = create_structure(file_node, @ty_to_str(cx.tcx, vec_t), 0);
let scx = create_structure(file_node, ty_to_str(cx.tcx, vec_t).to_managed(), 0);
let (_, ptr_size, ptr_align) = voidptr();
add_member(scx, "vec", 0, ptr_size, ptr_align, elem_ptr.node);
add_member(scx, "length", 0, sys::size_of::<uint>() as int,
@ -720,7 +720,7 @@ fn create_vec_slice(cx: @CrateContext, vec_t: ty::t, elem_t: ty::t, span: span)
fn create_fn_ty(cx: @CrateContext, fn_ty: ty::t, inputs: ~[ty::t], output: ty::t,
span: span) -> @Metadata<TyDescMetadata> {
let fname = filename_from_span(cx, span);
let file_node = create_file(cx, fname);
let file_node = create_file(cx, fname.to_owned());
let (vp, _, _) = voidptr();
let output_md = create_ty(cx, output, span);
let output_ptr_md = create_pointer_type(cx, output, span, output_md);
@ -817,8 +817,8 @@ fn create_ty(cx: @CrateContext, t: ty::t, span: span)
}
}
fn filename_from_span(cx: @CrateContext, sp: codemap::span) -> ~str {
/*bad*/copy cx.sess.codemap.lookup_char_pos(sp.lo).file.name
fn filename_from_span(cx: @CrateContext, sp: codemap::span) -> @str {
cx.sess.codemap.lookup_char_pos(sp.lo).file.name
}
fn create_var(type_tag: int, context: ValueRef, name: &str, file: ValueRef,
@ -853,12 +853,12 @@ pub fn create_local_var(bcx: block, local: @ast::local)
let loc = cx.sess.codemap.lookup_char_pos(local.span.lo);
let ty = node_id_type(bcx, local.node.id);
let tymd = create_ty(cx, ty, local.node.ty.span);
let filemd = create_file(cx, /*bad*/copy loc.file.name);
let filemd = create_file(cx, /*bad*/ loc.file.name.to_owned());
let context = match bcx.parent {
None => create_function(bcx.fcx).node,
Some(_) => create_block(bcx).node
};
let mdnode = create_var(tg, context, *cx.sess.str_of(name),
let mdnode = create_var(tg, context, cx.sess.str_of(name),
filemd.node, loc.line as int, tymd.node);
let mdval = @Metadata {
node: mdnode,
@ -878,7 +878,7 @@ pub fn create_local_var(bcx: block, local: @ast::local)
}
};
let declargs = ~[llmdnode([llptr]), mdnode];
trans::build::Call(bcx, *cx.intrinsics.get(&~"llvm.dbg.declare"),
trans::build::Call(bcx, *cx.intrinsics.get(&("llvm.dbg.declare")),
declargs);
return mdval;
}
@ -896,12 +896,12 @@ pub fn create_arg(bcx: block, arg: ast::arg, sp: span)
}
let loc = cx.sess.codemap.lookup_char_pos(sp.lo);
if loc.file.name == ~"<intrinsic>" {
if "<intrinsic>" == loc.file.name {
return None;
}
let ty = node_id_type(bcx, arg.id);
let tymd = create_ty(cx, ty, arg.ty.span);
let filemd = create_file(cx, /*bad*/copy loc.file.name);
let filemd = create_file(cx, /* bad */ loc.file.name.to_owned());
let context = create_function(bcx.fcx);
match arg.pat.node {
@ -910,7 +910,7 @@ pub fn create_arg(bcx: block, arg: ast::arg, sp: span)
let mdnode = create_var(
tg,
context.node,
*cx.sess.str_of(*path.idents.last()),
cx.sess.str_of(*path.idents.last()),
filemd.node,
loc.line as int,
tymd.node
@ -927,7 +927,7 @@ pub fn create_arg(bcx: block, arg: ast::arg, sp: span)
let llptr = fcx.llargs.get_copy(&arg.id);
let declargs = ~[llmdnode([llptr]), mdnode];
trans::build::Call(bcx,
*cx.intrinsics.get(&~"llvm.dbg.declare"),
*cx.intrinsics.get(&("llvm.dbg.declare")),
declargs);
return Some(mdval);
}
@ -1000,7 +1000,7 @@ pub fn create_function(fcx: fn_ctxt) -> @Metadata<SubProgramMetadata> {
}
let loc = cx.sess.codemap.lookup_char_pos(sp.lo);
let file_node = create_file(cx, copy loc.file.name).node;
let file_node = create_file(cx, loc.file.name.to_owned()).node;
let ty_node = if cx.sess.opts.extra_debuginfo {
match ret_ty.node {
ast::ty_nil => llnull(),
@ -1017,9 +1017,9 @@ pub fn create_function(fcx: fn_ctxt) -> @Metadata<SubProgramMetadata> {
let fn_metadata = ~[lltag(SubprogramTag),
llunused(),
file_node,
llstr(*cx.sess.str_of(ident)),
llstr(cx.sess.str_of(ident)),
//XXX fully-qualified C++ name:
llstr(*cx.sess.str_of(ident)),
llstr(cx.sess.str_of(ident)),
llstr(""), //XXX MIPS name?????
file_node,
lli32(loc.line as int),

View File

@ -452,7 +452,7 @@ fn trans_to_datum_unadjusted(bcx: block, expr: @ast::expr) -> DatumBlock {
fn trans_rvalue_datum_unadjusted(bcx: block, expr: @ast::expr) -> DatumBlock {
let _icx = bcx.insn_ctxt("trans_rvalue_datum_unadjusted");
trace_span!(bcx, expr.span, @shorten(bcx.expr_to_str(expr)));
trace_span!(bcx, expr.span, shorten(bcx.expr_to_str(expr)));
match expr.node {
ast::expr_path(_) | ast::expr_self => {
@ -507,7 +507,7 @@ fn trans_rvalue_stmt_unadjusted(bcx: block, expr: @ast::expr) -> block {
return bcx;
}
trace_span!(bcx, expr.span, @shorten(bcx.expr_to_str(expr)));
trace_span!(bcx, expr.span, shorten(bcx.expr_to_str(expr)));
match expr.node {
ast::expr_break(label_opt) => {
@ -560,7 +560,7 @@ fn trans_rvalue_dps_unadjusted(bcx: block, expr: @ast::expr,
let _icx = bcx.insn_ctxt("trans_rvalue_dps_unadjusted");
let tcx = bcx.tcx();
trace_span!(bcx, expr.span, @shorten(bcx.expr_to_str(expr)));
trace_span!(bcx, expr.span, shorten(bcx.expr_to_str(expr)));
match expr.node {
ast::expr_paren(e) => {
@ -821,7 +821,7 @@ fn trans_lvalue_unadjusted(bcx: block, expr: @ast::expr) -> DatumBlock {
debug!("trans_lvalue(expr=%s)", bcx.expr_to_str(expr));
let _indenter = indenter();
trace_span!(bcx, expr.span, @shorten(bcx.expr_to_str(expr)));
trace_span!(bcx, expr.span, shorten(bcx.expr_to_str(expr)));
return match expr.node {
ast::expr_paren(e) => {
@ -1703,6 +1703,6 @@ fn trans_assign_op(bcx: block,
return result_datum.copy_to_datum(bcx, DROP_EXISTING, dst_datum);
}
fn shorten(x: ~str) -> ~str {
if x.char_len() > 60 { x.slice_chars(0, 60).to_owned() } else { x }
fn shorten(x: &str) -> @str {
(if x.char_len() > 60 {x.slice_chars(0, 60)} else {x}).to_managed()
}

View File

@ -55,7 +55,7 @@ fn abi_info(ccx: @CrateContext) -> @cabi::ABIInfo {
}
}
pub fn link_name(ccx: @CrateContext, i: @ast::foreign_item) -> @~str {
pub fn link_name(ccx: @CrateContext, i: @ast::foreign_item) -> @str {
match attr::first_attr_value_str_by_name(i.attrs, "link_name") {
None => ccx.sess.str_of(i.ident),
Some(ln) => ln,
@ -345,7 +345,7 @@ pub fn trans_foreign_mod(ccx: @CrateContext,
}
ast::foreign_item_const(*) => {
let ident = token::ident_to_str(&foreign_item.ident);
ccx.item_symbols.insert(foreign_item.id, copy *ident);
ccx.item_symbols.insert(foreign_item.id, /* bad */ident.to_owned());
}
}
}
@ -403,9 +403,9 @@ pub fn trans_foreign_mod(ccx: @CrateContext,
}
let lname = link_name(ccx, foreign_item);
let llbasefn = base_fn(ccx, *lname, tys, cc);
let llbasefn = base_fn(ccx, lname, tys, cc);
// Name the shim function
let shim_name = *lname + "__c_stack_shim";
let shim_name = fmt!("%s__c_stack_shim", lname);
build_shim_fn_(ccx,
shim_name,
llbasefn,
@ -433,12 +433,12 @@ pub fn trans_foreign_mod(ccx: @CrateContext,
item: @ast::foreign_item,
tys: &ShimTypes,
cc: lib::llvm::CallConv) {
debug!("build_direct_fn(%s)", *link_name(ccx, item));
debug!("build_direct_fn(%s)", link_name(ccx, item));
let fcx = new_fn_ctxt(ccx, ~[], decl, tys.fn_sig.output, None);
let bcx = top_scope_block(fcx, None);
let lltop = bcx.llbb;
let llbasefn = base_fn(ccx, *link_name(ccx, item), tys, cc);
let llbasefn = base_fn(ccx, link_name(ccx, item), tys, cc);
let ty = ty::lookup_item_type(ccx.tcx,
ast_util::local_def(item.id)).ty;
let ret_ty = ty::ty_fn_ret(ty);
@ -460,12 +460,12 @@ pub fn trans_foreign_mod(ccx: @CrateContext,
item: @ast::foreign_item,
tys: &ShimTypes,
cc: lib::llvm::CallConv) {
debug!("build_fast_ffi_fn(%s)", *link_name(ccx, item));
debug!("build_fast_ffi_fn(%s)", link_name(ccx, item));
let fcx = new_fn_ctxt(ccx, ~[], decl, tys.fn_sig.output, None);
let bcx = top_scope_block(fcx, None);
let lltop = bcx.llbb;
let llbasefn = base_fn(ccx, *link_name(ccx, item), tys, cc);
let llbasefn = base_fn(ccx, link_name(ccx, item), tys, cc);
set_no_inline(fcx.llfn);
set_fixed_stack_segment(fcx.llfn);
let ty = ty::lookup_item_type(ccx.tcx,
@ -553,7 +553,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
substs: @param_substs,
attributes: &[ast::attribute],
ref_id: Option<ast::node_id>) {
debug!("trans_intrinsic(item.ident=%s)", *ccx.sess.str_of(item.ident));
debug!("trans_intrinsic(item.ident=%s)", ccx.sess.str_of(item.ident));
let output_type = ty::ty_fn_ret(ty::node_id_to_type(ccx.tcx, item.id));
@ -574,8 +574,8 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let mut bcx = top_scope_block(fcx, None);
let lltop = bcx.llbb;
let first_real_arg = fcx.arg_pos(0u);
match *ccx.sess.str_of(item.ident) {
~"atomic_cxchg" => {
match ccx.sess.str_of(item.ident).as_slice() {
"atomic_cxchg" => {
let old = AtomicCmpXchg(bcx,
get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u),
@ -583,7 +583,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
SequentiallyConsistent);
Store(bcx, old, fcx.llretptr.get());
}
~"atomic_cxchg_acq" => {
"atomic_cxchg_acq" => {
let old = AtomicCmpXchg(bcx,
get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u),
@ -591,7 +591,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
Acquire);
Store(bcx, old, fcx.llretptr.get());
}
~"atomic_cxchg_rel" => {
"atomic_cxchg_rel" => {
let old = AtomicCmpXchg(bcx,
get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u),
@ -599,100 +599,100 @@ pub fn trans_intrinsic(ccx: @CrateContext,
Release);
Store(bcx, old, fcx.llretptr.get());
}
~"atomic_load" => {
"atomic_load" => {
let old = AtomicLoad(bcx,
get_param(decl, first_real_arg),
SequentiallyConsistent);
Store(bcx, old, fcx.llretptr.get());
}
~"atomic_load_acq" => {
"atomic_load_acq" => {
let old = AtomicLoad(bcx,
get_param(decl, first_real_arg),
Acquire);
Store(bcx, old, fcx.llretptr.get());
}
~"atomic_store" => {
"atomic_store" => {
AtomicStore(bcx,
get_param(decl, first_real_arg + 1u),
get_param(decl, first_real_arg),
SequentiallyConsistent);
}
~"atomic_store_rel" => {
"atomic_store_rel" => {
AtomicStore(bcx,
get_param(decl, first_real_arg + 1u),
get_param(decl, first_real_arg),
Release);
}
~"atomic_xchg" => {
"atomic_xchg" => {
let old = AtomicRMW(bcx, Xchg,
get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u),
SequentiallyConsistent);
Store(bcx, old, fcx.llretptr.get());
}
~"atomic_xchg_acq" => {
"atomic_xchg_acq" => {
let old = AtomicRMW(bcx, Xchg,
get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u),
Acquire);
Store(bcx, old, fcx.llretptr.get());
}
~"atomic_xchg_rel" => {
"atomic_xchg_rel" => {
let old = AtomicRMW(bcx, Xchg,
get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u),
Release);
Store(bcx, old, fcx.llretptr.get());
}
~"atomic_xadd" => {
"atomic_xadd" => {
let old = AtomicRMW(bcx, lib::llvm::Add,
get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u),
SequentiallyConsistent);
Store(bcx, old, fcx.llretptr.get());
}
~"atomic_xadd_acq" => {
"atomic_xadd_acq" => {
let old = AtomicRMW(bcx, lib::llvm::Add,
get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u),
Acquire);
Store(bcx, old, fcx.llretptr.get());
}
~"atomic_xadd_rel" => {
"atomic_xadd_rel" => {
let old = AtomicRMW(bcx, lib::llvm::Add,
get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u),
Release);
Store(bcx, old, fcx.llretptr.get());
}
~"atomic_xsub" => {
"atomic_xsub" => {
let old = AtomicRMW(bcx, lib::llvm::Sub,
get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u),
SequentiallyConsistent);
Store(bcx, old, fcx.llretptr.get());
}
~"atomic_xsub_acq" => {
"atomic_xsub_acq" => {
let old = AtomicRMW(bcx, lib::llvm::Sub,
get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u),
Acquire);
Store(bcx, old, fcx.llretptr.get());
}
~"atomic_xsub_rel" => {
"atomic_xsub_rel" => {
let old = AtomicRMW(bcx, lib::llvm::Sub,
get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u),
Release);
Store(bcx, old, fcx.llretptr.get());
}
~"size_of" => {
"size_of" => {
let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty);
Store(bcx, C_uint(ccx, machine::llsize_of_real(ccx, lltp_ty)),
fcx.llretptr.get());
}
~"move_val" => {
"move_val" => {
// Create a datum reflecting the value being moved.
// Use `appropriate_mode` so that the datum is by ref
// if the value is non-immediate. Note that, with
@ -705,7 +705,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
bcx = src.move_to(bcx, DROP_EXISTING,
get_param(decl, first_real_arg));
}
~"move_val_init" => {
"move_val_init" => {
// See comments for `"move_val"`.
let tp_ty = substs.tys[0];
let mode = appropriate_mode(tp_ty);
@ -713,19 +713,19 @@ pub fn trans_intrinsic(ccx: @CrateContext,
ty: tp_ty, mode: mode};
bcx = src.move_to(bcx, INIT, get_param(decl, first_real_arg));
}
~"min_align_of" => {
"min_align_of" => {
let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty);
Store(bcx, C_uint(ccx, machine::llalign_of_min(ccx, lltp_ty)),
fcx.llretptr.get());
}
~"pref_align_of"=> {
"pref_align_of"=> {
let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty);
Store(bcx, C_uint(ccx, machine::llalign_of_pref(ccx, lltp_ty)),
fcx.llretptr.get());
}
~"get_tydesc" => {
"get_tydesc" => {
let tp_ty = substs.tys[0];
let static_ti = get_tydesc(ccx, tp_ty);
glue::lazily_emit_all_tydesc_glue(ccx, static_ti);
@ -735,18 +735,18 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let td = PointerCast(bcx, static_ti.tydesc, T_ptr(T_nil()));
Store(bcx, td, fcx.llretptr.get());
}
~"init" => {
"init" => {
let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty);
if !ty::type_is_nil(tp_ty) {
Store(bcx, C_null(lltp_ty), fcx.llretptr.get());
}
}
~"uninit" => {
"uninit" => {
// Do nothing, this is effectively a no-op
}
~"forget" => {}
~"transmute" => {
"forget" => {}
"transmute" => {
let (in_type, out_type) = (substs.tys[0], substs.tys[1]);
let llintype = type_of::type_of(ccx, in_type);
let llouttype = type_of::type_of(ccx, out_type);
@ -792,13 +792,13 @@ pub fn trans_intrinsic(ccx: @CrateContext,
call_memcpy(bcx, lldestptr, llsrcptr, llsize, 1);
}
}
~"needs_drop" => {
"needs_drop" => {
let tp_ty = substs.tys[0];
Store(bcx,
C_bool(ty::type_needs_drop(ccx.tcx, tp_ty)),
fcx.llretptr.get());
}
~"visit_tydesc" => {
"visit_tydesc" => {
let td = get_param(decl, first_real_arg);
let visitor = get_param(decl, first_real_arg + 1u);
//let llvisitorptr = alloca(bcx, val_ty(visitor));
@ -810,8 +810,8 @@ pub fn trans_intrinsic(ccx: @CrateContext,
abi::tydesc_field_visit_glue,
None);
}
~"frame_address" => {
let frameaddress = *ccx.intrinsics.get(&~"llvm.frameaddress");
"frame_address" => {
let frameaddress = *ccx.intrinsics.get(& &"llvm.frameaddress");
let frameaddress_val = Call(bcx, frameaddress, [C_i32(0i32)]);
let star_u8 = ty::mk_imm_ptr(
bcx.tcx(),
@ -836,7 +836,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
|bcx| Callee {bcx: bcx, data: Closure(datum)},
ArgVals(arg_vals), Ignore, DontAutorefArg);
}
~"morestack_addr" => {
"morestack_addr" => {
// XXX This is a hack to grab the address of this particular
// native function. There should be a general in-language
// way to do this
@ -847,7 +847,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
T_ptr(T_nil()));
Store(bcx, morestack_addr, fcx.llretptr.get());
}
~"memcpy32" => {
"memcpy32" => {
let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty);
let align = C_i32(machine::llalign_of_min(ccx, lltp_ty) as i32);
@ -857,10 +857,10 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let src_ptr = PointerCast(bcx, get_param(decl, first_real_arg + 1), T_ptr(T_i8()));
let count = get_param(decl, first_real_arg + 2);
let volatile = C_i1(false);
let llfn = *bcx.ccx().intrinsics.get(&~"llvm.memcpy.p0i8.p0i8.i32");
let llfn = *bcx.ccx().intrinsics.get(& &"llvm.memcpy.p0i8.p0i8.i32");
Call(bcx, llfn, [dst_ptr, src_ptr, Mul(bcx, size, count), align, volatile]);
}
~"memcpy64" => {
"memcpy64" => {
let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty);
let align = C_i32(machine::llalign_of_min(ccx, lltp_ty) as i32);
@ -870,10 +870,10 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let src_ptr = PointerCast(bcx, get_param(decl, first_real_arg + 1), T_ptr(T_i8()));
let count = get_param(decl, first_real_arg + 2);
let volatile = C_i1(false);
let llfn = *bcx.ccx().intrinsics.get(&~"llvm.memcpy.p0i8.p0i8.i64");
let llfn = *bcx.ccx().intrinsics.get(& &"llvm.memcpy.p0i8.p0i8.i64");
Call(bcx, llfn, [dst_ptr, src_ptr, Mul(bcx, size, count), align, volatile]);
}
~"memmove32" => {
"memmove32" => {
let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty);
let align = C_i32(machine::llalign_of_min(ccx, lltp_ty) as i32);
@ -883,10 +883,10 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let src_ptr = PointerCast(bcx, get_param(decl, first_real_arg + 1), T_ptr(T_i8()));
let count = get_param(decl, first_real_arg + 2);
let volatile = C_i1(false);
let llfn = *bcx.ccx().intrinsics.get(&~"llvm.memmove.p0i8.p0i8.i32");
let llfn = *bcx.ccx().intrinsics.get(& &"llvm.memmove.p0i8.p0i8.i32");
Call(bcx, llfn, [dst_ptr, src_ptr, Mul(bcx, size, count), align, volatile]);
}
~"memmove64" => {
"memmove64" => {
let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty);
let align = C_i32(machine::llalign_of_min(ccx, lltp_ty) as i32);
@ -896,10 +896,10 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let src_ptr = PointerCast(bcx, get_param(decl, first_real_arg + 1), T_ptr(T_i8()));
let count = get_param(decl, first_real_arg + 2);
let volatile = C_i1(false);
let llfn = *bcx.ccx().intrinsics.get(&~"llvm.memmove.p0i8.p0i8.i64");
let llfn = *bcx.ccx().intrinsics.get(& &"llvm.memmove.p0i8.p0i8.i64");
Call(bcx, llfn, [dst_ptr, src_ptr, Mul(bcx, size, count), align, volatile]);
}
~"memset32" => {
"memset32" => {
let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty);
let align = C_i32(machine::llalign_of_min(ccx, lltp_ty) as i32);
@ -909,10 +909,10 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let val = get_param(decl, first_real_arg + 1);
let count = get_param(decl, first_real_arg + 2);
let volatile = C_i1(false);
let llfn = *bcx.ccx().intrinsics.get(&~"llvm.memset.p0i8.i32");
let llfn = *bcx.ccx().intrinsics.get(& &"llvm.memset.p0i8.i32");
Call(bcx, llfn, [dst_ptr, val, Mul(bcx, size, count), align, volatile]);
}
~"memset64" => {
"memset64" => {
let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty);
let align = C_i32(machine::llalign_of_min(ccx, lltp_ty) as i32);
@ -922,248 +922,248 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let val = get_param(decl, first_real_arg + 1);
let count = get_param(decl, first_real_arg + 2);
let volatile = C_i1(false);
let llfn = *bcx.ccx().intrinsics.get(&~"llvm.memset.p0i8.i64");
let llfn = *bcx.ccx().intrinsics.get(& &"llvm.memset.p0i8.i64");
Call(bcx, llfn, [dst_ptr, val, Mul(bcx, size, count), align, volatile]);
}
~"sqrtf32" => {
"sqrtf32" => {
let x = get_param(decl, first_real_arg);
let sqrtf = *ccx.intrinsics.get(&~"llvm.sqrt.f32");
let sqrtf = *ccx.intrinsics.get(& &"llvm.sqrt.f32");
Store(bcx, Call(bcx, sqrtf, [x]), fcx.llretptr.get());
}
~"sqrtf64" => {
"sqrtf64" => {
let x = get_param(decl, first_real_arg);
let sqrtf = *ccx.intrinsics.get(&~"llvm.sqrt.f64");
let sqrtf = *ccx.intrinsics.get(& &"llvm.sqrt.f64");
Store(bcx, Call(bcx, sqrtf, [x]), fcx.llretptr.get());
}
~"powif32" => {
"powif32" => {
let a = get_param(decl, first_real_arg);
let x = get_param(decl, first_real_arg + 1u);
let powif = *ccx.intrinsics.get(&~"llvm.powi.f32");
let powif = *ccx.intrinsics.get(& &"llvm.powi.f32");
Store(bcx, Call(bcx, powif, [a, x]), fcx.llretptr.get());
}
~"powif64" => {
"powif64" => {
let a = get_param(decl, first_real_arg);
let x = get_param(decl, first_real_arg + 1u);
let powif = *ccx.intrinsics.get(&~"llvm.powi.f64");
let powif = *ccx.intrinsics.get(& &"llvm.powi.f64");
Store(bcx, Call(bcx, powif, [a, x]), fcx.llretptr.get());
}
~"sinf32" => {
"sinf32" => {
let x = get_param(decl, first_real_arg);
let sinf = *ccx.intrinsics.get(&~"llvm.sin.f32");
let sinf = *ccx.intrinsics.get(& &"llvm.sin.f32");
Store(bcx, Call(bcx, sinf, [x]), fcx.llretptr.get());
}
~"sinf64" => {
"sinf64" => {
let x = get_param(decl, first_real_arg);
let sinf = *ccx.intrinsics.get(&~"llvm.sin.f64");
let sinf = *ccx.intrinsics.get(& &"llvm.sin.f64");
Store(bcx, Call(bcx, sinf, [x]), fcx.llretptr.get());
}
~"cosf32" => {
"cosf32" => {
let x = get_param(decl, first_real_arg);
let cosf = *ccx.intrinsics.get(&~"llvm.cos.f32");
let cosf = *ccx.intrinsics.get(& &"llvm.cos.f32");
Store(bcx, Call(bcx, cosf, [x]), fcx.llretptr.get());
}
~"cosf64" => {
"cosf64" => {
let x = get_param(decl, first_real_arg);
let cosf = *ccx.intrinsics.get(&~"llvm.cos.f64");
let cosf = *ccx.intrinsics.get(& &"llvm.cos.f64");
Store(bcx, Call(bcx, cosf, [x]), fcx.llretptr.get());
}
~"powf32" => {
"powf32" => {
let a = get_param(decl, first_real_arg);
let x = get_param(decl, first_real_arg + 1u);
let powf = *ccx.intrinsics.get(&~"llvm.pow.f32");
let powf = *ccx.intrinsics.get(& &"llvm.pow.f32");
Store(bcx, Call(bcx, powf, [a, x]), fcx.llretptr.get());
}
~"powf64" => {
"powf64" => {
let a = get_param(decl, first_real_arg);
let x = get_param(decl, first_real_arg + 1u);
let powf = *ccx.intrinsics.get(&~"llvm.pow.f64");
let powf = *ccx.intrinsics.get(& &"llvm.pow.f64");
Store(bcx, Call(bcx, powf, [a, x]), fcx.llretptr.get());
}
~"expf32" => {
"expf32" => {
let x = get_param(decl, first_real_arg);
let expf = *ccx.intrinsics.get(&~"llvm.exp.f32");
let expf = *ccx.intrinsics.get(& &"llvm.exp.f32");
Store(bcx, Call(bcx, expf, [x]), fcx.llretptr.get());
}
~"expf64" => {
"expf64" => {
let x = get_param(decl, first_real_arg);
let expf = *ccx.intrinsics.get(&~"llvm.exp.f64");
let expf = *ccx.intrinsics.get(& &"llvm.exp.f64");
Store(bcx, Call(bcx, expf, [x]), fcx.llretptr.get());
}
~"exp2f32" => {
"exp2f32" => {
let x = get_param(decl, first_real_arg);
let exp2f = *ccx.intrinsics.get(&~"llvm.exp2.f32");
let exp2f = *ccx.intrinsics.get(& &"llvm.exp2.f32");
Store(bcx, Call(bcx, exp2f, [x]), fcx.llretptr.get());
}
~"exp2f64" => {
"exp2f64" => {
let x = get_param(decl, first_real_arg);
let exp2f = *ccx.intrinsics.get(&~"llvm.exp2.f64");
let exp2f = *ccx.intrinsics.get(& &"llvm.exp2.f64");
Store(bcx, Call(bcx, exp2f, [x]), fcx.llretptr.get());
}
~"logf32" => {
"logf32" => {
let x = get_param(decl, first_real_arg);
let logf = *ccx.intrinsics.get(&~"llvm.log.f32");
let logf = *ccx.intrinsics.get(& &"llvm.log.f32");
Store(bcx, Call(bcx, logf, [x]), fcx.llretptr.get());
}
~"logf64" => {
"logf64" => {
let x = get_param(decl, first_real_arg);
let logf = *ccx.intrinsics.get(&~"llvm.log.f64");
let logf = *ccx.intrinsics.get(& &"llvm.log.f64");
Store(bcx, Call(bcx, logf, [x]), fcx.llretptr.get());
}
~"log10f32" => {
"log10f32" => {
let x = get_param(decl, first_real_arg);
let log10f = *ccx.intrinsics.get(&~"llvm.log10.f32");
let log10f = *ccx.intrinsics.get(& &"llvm.log10.f32");
Store(bcx, Call(bcx, log10f, [x]), fcx.llretptr.get());
}
~"log10f64" => {
"log10f64" => {
let x = get_param(decl, first_real_arg);
let log10f = *ccx.intrinsics.get(&~"llvm.log10.f64");
let log10f = *ccx.intrinsics.get(& &"llvm.log10.f64");
Store(bcx, Call(bcx, log10f, [x]), fcx.llretptr.get());
}
~"log2f32" => {
"log2f32" => {
let x = get_param(decl, first_real_arg);
let log2f = *ccx.intrinsics.get(&~"llvm.log2.f32");
let log2f = *ccx.intrinsics.get(& &"llvm.log2.f32");
Store(bcx, Call(bcx, log2f, [x]), fcx.llretptr.get());
}
~"log2f64" => {
"log2f64" => {
let x = get_param(decl, first_real_arg);
let log2f = *ccx.intrinsics.get(&~"llvm.log2.f64");
let log2f = *ccx.intrinsics.get(& &"llvm.log2.f64");
Store(bcx, Call(bcx, log2f, [x]), fcx.llretptr.get());
}
~"fmaf32" => {
"fmaf32" => {
let a = get_param(decl, first_real_arg);
let b = get_param(decl, first_real_arg + 1u);
let c = get_param(decl, first_real_arg + 2u);
let fmaf = *ccx.intrinsics.get(&~"llvm.fma.f32");
let fmaf = *ccx.intrinsics.get(& &"llvm.fma.f32");
Store(bcx, Call(bcx, fmaf, [a, b, c]), fcx.llretptr.get());
}
~"fmaf64" => {
"fmaf64" => {
let a = get_param(decl, first_real_arg);
let b = get_param(decl, first_real_arg + 1u);
let c = get_param(decl, first_real_arg + 2u);
let fmaf = *ccx.intrinsics.get(&~"llvm.fma.f64");
let fmaf = *ccx.intrinsics.get(& &"llvm.fma.f64");
Store(bcx, Call(bcx, fmaf, [a, b, c]), fcx.llretptr.get());
}
~"fabsf32" => {
"fabsf32" => {
let x = get_param(decl, first_real_arg);
let fabsf = *ccx.intrinsics.get(&~"llvm.fabs.f32");
let fabsf = *ccx.intrinsics.get(& &"llvm.fabs.f32");
Store(bcx, Call(bcx, fabsf, [x]), fcx.llretptr.get());
}
~"fabsf64" => {
"fabsf64" => {
let x = get_param(decl, first_real_arg);
let fabsf = *ccx.intrinsics.get(&~"llvm.fabs.f64");
let fabsf = *ccx.intrinsics.get(& &"llvm.fabs.f64");
Store(bcx, Call(bcx, fabsf, [x]), fcx.llretptr.get());
}
~"floorf32" => {
"floorf32" => {
let x = get_param(decl, first_real_arg);
let floorf = *ccx.intrinsics.get(&~"llvm.floor.f32");
let floorf = *ccx.intrinsics.get(& &"llvm.floor.f32");
Store(bcx, Call(bcx, floorf, [x]), fcx.llretptr.get());
}
~"floorf64" => {
"floorf64" => {
let x = get_param(decl, first_real_arg);
let floorf = *ccx.intrinsics.get(&~"llvm.floor.f64");
let floorf = *ccx.intrinsics.get(& &"llvm.floor.f64");
Store(bcx, Call(bcx, floorf, [x]), fcx.llretptr.get());
}
~"ceilf32" => {
"ceilf32" => {
let x = get_param(decl, first_real_arg);
let ceilf = *ccx.intrinsics.get(&~"llvm.ceil.f32");
let ceilf = *ccx.intrinsics.get(& &"llvm.ceil.f32");
Store(bcx, Call(bcx, ceilf, [x]), fcx.llretptr.get());
}
~"ceilf64" => {
"ceilf64" => {
let x = get_param(decl, first_real_arg);
let ceilf = *ccx.intrinsics.get(&~"llvm.ceil.f64");
let ceilf = *ccx.intrinsics.get(& &"llvm.ceil.f64");
Store(bcx, Call(bcx, ceilf, [x]), fcx.llretptr.get());
}
~"truncf32" => {
"truncf32" => {
let x = get_param(decl, first_real_arg);
let truncf = *ccx.intrinsics.get(&~"llvm.trunc.f32");
let truncf = *ccx.intrinsics.get(& &"llvm.trunc.f32");
Store(bcx, Call(bcx, truncf, [x]), fcx.llretptr.get());
}
~"truncf64" => {
"truncf64" => {
let x = get_param(decl, first_real_arg);
let truncf = *ccx.intrinsics.get(&~"llvm.trunc.f64");
let truncf = *ccx.intrinsics.get(& &"llvm.trunc.f64");
Store(bcx, Call(bcx, truncf, [x]), fcx.llretptr.get());
}
~"ctpop8" => {
"ctpop8" => {
let x = get_param(decl, first_real_arg);
let ctpop = *ccx.intrinsics.get(&~"llvm.ctpop.i8");
let ctpop = *ccx.intrinsics.get(& &"llvm.ctpop.i8");
Store(bcx, Call(bcx, ctpop, [x]), fcx.llretptr.get())
}
~"ctpop16" => {
"ctpop16" => {
let x = get_param(decl, first_real_arg);
let ctpop = *ccx.intrinsics.get(&~"llvm.ctpop.i16");
let ctpop = *ccx.intrinsics.get(& &"llvm.ctpop.i16");
Store(bcx, Call(bcx, ctpop, [x]), fcx.llretptr.get())
}
~"ctpop32" => {
"ctpop32" => {
let x = get_param(decl, first_real_arg);
let ctpop = *ccx.intrinsics.get(&~"llvm.ctpop.i32");
let ctpop = *ccx.intrinsics.get(& &"llvm.ctpop.i32");
Store(bcx, Call(bcx, ctpop, [x]), fcx.llretptr.get())
}
~"ctpop64" => {
"ctpop64" => {
let x = get_param(decl, first_real_arg);
let ctpop = *ccx.intrinsics.get(&~"llvm.ctpop.i64");
let ctpop = *ccx.intrinsics.get(& &"llvm.ctpop.i64");
Store(bcx, Call(bcx, ctpop, [x]), fcx.llretptr.get())
}
~"ctlz8" => {
"ctlz8" => {
let x = get_param(decl, first_real_arg);
let y = C_i1(false);
let ctlz = *ccx.intrinsics.get(&~"llvm.ctlz.i8");
let ctlz = *ccx.intrinsics.get(& &"llvm.ctlz.i8");
Store(bcx, Call(bcx, ctlz, [x, y]), fcx.llretptr.get())
}
~"ctlz16" => {
"ctlz16" => {
let x = get_param(decl, first_real_arg);
let y = C_i1(false);
let ctlz = *ccx.intrinsics.get(&~"llvm.ctlz.i16");
let ctlz = *ccx.intrinsics.get(& &"llvm.ctlz.i16");
Store(bcx, Call(bcx, ctlz, [x, y]), fcx.llretptr.get())
}
~"ctlz32" => {
"ctlz32" => {
let x = get_param(decl, first_real_arg);
let y = C_i1(false);
let ctlz = *ccx.intrinsics.get(&~"llvm.ctlz.i32");
let ctlz = *ccx.intrinsics.get(& &"llvm.ctlz.i32");
Store(bcx, Call(bcx, ctlz, [x, y]), fcx.llretptr.get())
}
~"ctlz64" => {
"ctlz64" => {
let x = get_param(decl, first_real_arg);
let y = C_i1(false);
let ctlz = *ccx.intrinsics.get(&~"llvm.ctlz.i64");
let ctlz = *ccx.intrinsics.get(& &"llvm.ctlz.i64");
Store(bcx, Call(bcx, ctlz, [x, y]), fcx.llretptr.get())
}
~"cttz8" => {
"cttz8" => {
let x = get_param(decl, first_real_arg);
let y = C_i1(false);
let cttz = *ccx.intrinsics.get(&~"llvm.cttz.i8");
let cttz = *ccx.intrinsics.get(& &"llvm.cttz.i8");
Store(bcx, Call(bcx, cttz, [x, y]), fcx.llretptr.get())
}
~"cttz16" => {
"cttz16" => {
let x = get_param(decl, first_real_arg);
let y = C_i1(false);
let cttz = *ccx.intrinsics.get(&~"llvm.cttz.i16");
let cttz = *ccx.intrinsics.get(& &"llvm.cttz.i16");
Store(bcx, Call(bcx, cttz, [x, y]), fcx.llretptr.get())
}
~"cttz32" => {
"cttz32" => {
let x = get_param(decl, first_real_arg);
let y = C_i1(false);
let cttz = *ccx.intrinsics.get(&~"llvm.cttz.i32");
let cttz = *ccx.intrinsics.get(& &"llvm.cttz.i32");
Store(bcx, Call(bcx, cttz, [x, y]), fcx.llretptr.get())
}
~"cttz64" => {
"cttz64" => {
let x = get_param(decl, first_real_arg);
let y = C_i1(false);
let cttz = *ccx.intrinsics.get(&~"llvm.cttz.i64");
let cttz = *ccx.intrinsics.get(& &"llvm.cttz.i64");
Store(bcx, Call(bcx, cttz, [x, y]), fcx.llretptr.get())
}
~"bswap16" => {
"bswap16" => {
let x = get_param(decl, first_real_arg);
let cttz = *ccx.intrinsics.get(&~"llvm.bswap.i16");
let cttz = *ccx.intrinsics.get(& &"llvm.bswap.i16");
Store(bcx, Call(bcx, cttz, [x]), fcx.llretptr.get())
}
~"bswap32" => {
"bswap32" => {
let x = get_param(decl, first_real_arg);
let cttz = *ccx.intrinsics.get(&~"llvm.bswap.i32");
let cttz = *ccx.intrinsics.get(& &"llvm.bswap.i32");
Store(bcx, Call(bcx, cttz, [x]), fcx.llretptr.get())
}
~"bswap64" => {
"bswap64" => {
let x = get_param(decl, first_real_arg);
let cttz = *ccx.intrinsics.get(&~"llvm.bswap.i64");
let cttz = *ccx.intrinsics.get(& &"llvm.bswap.i64");
Store(bcx, Call(bcx, cttz, [x]), fcx.llretptr.get())
}
_ => {

View File

@ -686,10 +686,10 @@ pub fn declare_tydesc(ccx: @CrateContext, t: ty::t) -> @mut tydesc_info {
let llsize = llsize_of(ccx, llty);
let llalign = llalign_of(ccx, llty);
let addrspace = declare_tydesc_addrspace(ccx, t);
let name = @mangle_internal_name_by_type_and_seq(ccx, t, "tydesc");
let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc").to_managed();
note_unique_llvm_symbol(ccx, name);
debug!("+++ declare_tydesc %s %s", ppaux::ty_to_str(ccx.tcx, t), *name);
let gvar = str::as_c_str(*name, |buf| {
debug!("+++ declare_tydesc %s %s", ppaux::ty_to_str(ccx.tcx, t), name);
let gvar = str::as_c_str(name, |buf| {
unsafe {
llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type, buf)
}
@ -715,10 +715,10 @@ pub fn declare_generic_glue(ccx: @CrateContext, t: ty::t, llfnty: TypeRef,
name: ~str) -> ValueRef {
let _icx = ccx.insn_ctxt("declare_generic_glue");
let name = name;
let fn_nm = @mangle_internal_name_by_type_and_seq(ccx, t, (~"glue_" + name));
debug!("%s is for type %s", *fn_nm, ppaux::ty_to_str(ccx.tcx, t));
let fn_nm = mangle_internal_name_by_type_and_seq(ccx, t, (~"glue_" + name)).to_managed();
debug!("%s is for type %s", fn_nm, ppaux::ty_to_str(ccx.tcx, t));
note_unique_llvm_symbol(ccx, fn_nm);
let llfn = decl_cdecl_fn(ccx.llmod, *fn_nm, llfnty);
let llfn = decl_cdecl_fn(ccx.llmod, fn_nm, llfnty);
set_glue_inlining(llfn, t);
return llfn;
}

View File

@ -139,7 +139,7 @@ pub fn static_size_of_enum(cx: @CrateContext, t: ty::t) -> uint {
});
debug!("static_size_of_enum: variant %s type %s",
*cx.tcx.sess.str_of(variant.name),
cx.tcx.sess.str_of(variant.name),
ty_str(cx.tn, T_struct(lltypes, false)));
let this_size = llsize_of_real(cx, T_struct(lltypes, false));

View File

@ -339,7 +339,7 @@ pub fn trans_static_method_callee(bcx: block,
}
};
debug!("trans_static_method_callee: method_id=%?, callee_id=%?, \
name=%s", method_id, callee_id, *ccx.sess.str_of(mname));
name=%s", method_id, callee_id, ccx.sess.str_of(mname));
let vtbls = resolve_vtables_in_fn_ctxt(
bcx.fcx, ccx.maps.vtable_map.get_copy(&callee_id));
@ -791,7 +791,7 @@ pub fn make_vtable(ccx: @CrateContext,
let tbl = C_struct(components);
let vtable = ccx.sess.str_of((ccx.names)("vtable"));
let vt_gvar = do str::as_c_str(*vtable) |buf| {
let vt_gvar = do str::as_c_str(vtable) |buf| {
llvm::LLVMAddGlobal(ccx.llmod, val_ty(tbl), buf)
};
llvm::LLVMSetInitializer(vt_gvar, tbl);
@ -827,16 +827,15 @@ pub fn make_impl_vtable(bcx: block,
ty::mk_bare_fn(tcx, copy im.fty));
if im.generics.has_type_params() || ty::type_has_self(fty) {
debug!("(making impl vtable) method has self or type params: %s",
*tcx.sess.str_of(im.ident));
tcx.sess.str_of(im.ident));
C_null(T_ptr(T_nil()))
} else {
debug!("(making impl vtable) adding method to vtable: %s",
*tcx.sess.str_of(im.ident));
tcx.sess.str_of(im.ident));
let m_id = method_with_name_or_default(ccx, impl_id, im.ident);
trans_fn_ref_with_vtables(bcx, m_id, 0,
substs, Some(vtables)).llfn
}
};

View File

@ -164,7 +164,7 @@ pub fn monomorphic_fn(ccx: @CrateContext,
ccx.monomorphizing.insert(fn_id, depth + 1);
let pt = vec::append(/*bad*/copy *pt,
[path_name((ccx.names)(*ccx.sess.str_of(name)))]);
[path_name((ccx.names)(ccx.sess.str_of(name)))]);
let s = mangle_exported_name(ccx, /*bad*/copy pt, mono_ty);
let mk_lldecl = || {

View File

@ -50,7 +50,7 @@ impl Reflector {
C_int(self.bcx.ccx(), i)
}
pub fn c_slice(&mut self, s: @~str) -> ValueRef {
pub fn c_slice(&mut self, s: @str) -> ValueRef {
// We're careful to not use first class aggregates here because that
// will kick us off fast isel. (Issue #4352.)
let bcx = self.bcx;

View File

@ -250,7 +250,7 @@ pub fn trans_slice_vstore(bcx: block,
pub fn trans_lit_str(bcx: block,
lit_expr: @ast::expr,
str_lit: @~str,
str_lit: @str,
dest: Dest)
-> block {
//!

View File

@ -118,43 +118,43 @@ pub fn type_uses_for(ccx: @CrateContext, fn_id: def_id, n_tps: uint)
_,
_) => {
if abi.is_intrinsic() {
let flags = match *cx.ccx.sess.str_of(i.ident) {
~"size_of" | ~"pref_align_of" | ~"min_align_of" |
~"uninit" | ~"init" | ~"transmute" | ~"move_val" |
~"move_val_init" => use_repr,
let flags = match cx.ccx.sess.str_of(i.ident).as_slice() {
"size_of" | "pref_align_of" | "min_align_of" |
"uninit" | "init" | "transmute" | "move_val" |
"move_val_init" => use_repr,
~"get_tydesc" | ~"needs_drop" => use_tydesc,
"get_tydesc" | "needs_drop" => use_tydesc,
~"atomic_cxchg" | ~"atomic_cxchg_acq"|
~"atomic_cxchg_rel"| ~"atomic_load" |
~"atomic_load_acq" | ~"atomic_store" |
~"atomic_store_rel"| ~"atomic_xchg" |
~"atomic_xadd" | ~"atomic_xsub" |
~"atomic_xchg_acq" | ~"atomic_xadd_acq" |
~"atomic_xsub_acq" | ~"atomic_xchg_rel" |
~"atomic_xadd_rel" | ~"atomic_xsub_rel" => 0,
"atomic_cxchg" | "atomic_cxchg_acq"|
"atomic_cxchg_rel"| "atomic_load" |
"atomic_load_acq" | "atomic_store" |
"atomic_store_rel"| "atomic_xchg" |
"atomic_xadd" | "atomic_xsub" |
"atomic_xchg_acq" | "atomic_xadd_acq" |
"atomic_xsub_acq" | "atomic_xchg_rel" |
"atomic_xadd_rel" | "atomic_xsub_rel" => 0,
~"visit_tydesc" | ~"forget" | ~"frame_address" |
~"morestack_addr" => 0,
"visit_tydesc" | "forget" | "frame_address" |
"morestack_addr" => 0,
~"memcpy32" | ~"memcpy64" | ~"memmove32" | ~"memmove64" |
~"memset32" | ~"memset64" => use_repr,
"memcpy32" | "memcpy64" | "memmove32" | "memmove64" |
"memset32" | "memset64" => use_repr,
~"sqrtf32" | ~"sqrtf64" | ~"powif32" | ~"powif64" |
~"sinf32" | ~"sinf64" | ~"cosf32" | ~"cosf64" |
~"powf32" | ~"powf64" | ~"expf32" | ~"expf64" |
~"exp2f32" | ~"exp2f64" | ~"logf32" | ~"logf64" |
~"log10f32"| ~"log10f64"| ~"log2f32" | ~"log2f64" |
~"fmaf32" | ~"fmaf64" | ~"fabsf32" | ~"fabsf64" |
~"floorf32"| ~"floorf64"| ~"ceilf32" | ~"ceilf64" |
~"truncf32"| ~"truncf64" => 0,
"sqrtf32" | "sqrtf64" | "powif32" | "powif64" |
"sinf32" | "sinf64" | "cosf32" | "cosf64" |
"powf32" | "powf64" | "expf32" | "expf64" |
"exp2f32" | "exp2f64" | "logf32" | "logf64" |
"log10f32"| "log10f64"| "log2f32" | "log2f64" |
"fmaf32" | "fmaf64" | "fabsf32" | "fabsf64" |
"floorf32"| "floorf64"| "ceilf32" | "ceilf64" |
"truncf32"| "truncf64" => 0,
~"ctpop8" | ~"ctpop16" | ~"ctpop32" | ~"ctpop64" => 0,
"ctpop8" | "ctpop16" | "ctpop32" | "ctpop64" => 0,
~"ctlz8" | ~"ctlz16" | ~"ctlz32" | ~"ctlz64" => 0,
~"cttz8" | ~"cttz16" | ~"cttz32" | ~"cttz64" => 0,
"ctlz8" | "ctlz16" | "ctlz32" | "ctlz64" => 0,
"cttz8" | "cttz16" | "cttz32" | "cttz64" => 0,
~"bswap16" | ~"bswap32" | ~"bswap64" => 0,
"bswap16" | "bswap32" | "bswap64" => 0,
// would be cool to make these an enum instead of strings!
_ => fail!("unknown intrinsic in type_use")

View File

@ -117,8 +117,8 @@ fn root(datum: &Datum,
if bcx.sess().trace() {
trans_trace(
bcx, None,
@fmt!("preserving until end of scope %d",
root_info.scope));
(fmt!("preserving until end of scope %d",
root_info.scope)).to_managed());
}
// First, root the datum. Note that we must zero this value,

View File

@ -281,7 +281,7 @@ struct ctxt_ {
tcache: type_cache,
rcache: creader_cache,
ccache: constness_cache,
short_names_cache: @mut HashMap<t, @~str>,
short_names_cache: @mut HashMap<t, @str>,
needs_unwind_cleanup_cache: @mut HashMap<t, bool>,
tc_cache: @mut HashMap<uint, TypeContents>,
ast_ty_to_ty_cache: @mut HashMap<node_id, ast_ty_to_ty_cache_entry>,
@ -3366,7 +3366,7 @@ pub fn field_idx_strict(tcx: ty::ctxt, id: ast::ident, fields: &[field])
for fields.each |f| { if f.ident == id { return i; } i += 1u; }
tcx.sess.bug(fmt!(
"No field named `%s` found in the list of fields `%?`",
*tcx.sess.str_of(id),
tcx.sess.str_of(id),
fields.map(|f| tcx.sess.str_of(f.ident))));
}
@ -3514,8 +3514,8 @@ pub fn type_err_to_str(cx: ctxt, err: &type_err) -> ~str {
terr_record_fields(values) => {
fmt!("expected a record with field `%s` but found one with field \
`%s`",
*cx.sess.str_of(values.expected),
*cx.sess.str_of(values.found))
cx.sess.str_of(values.expected),
cx.sess.str_of(values.found))
}
terr_arg_count => ~"incorrect number of function parameters",
terr_regions_does_not_outlive(*) => {
@ -3549,7 +3549,7 @@ pub fn type_err_to_str(cx: ctxt, err: &type_err) -> ~str {
trait_store_to_str(cx, (*values).found))
}
terr_in_field(err, fname) => {
fmt!("in field `%s`, %s", *cx.sess.str_of(fname),
fmt!("in field `%s`, %s", cx.sess.str_of(fname),
type_err_to_str(cx, err))
}
terr_sorts(values) => {

View File

@ -297,7 +297,7 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
tcx.sess.span_err(span,
fmt!("struct `%s` does not have a field
named `%s`", name,
*tcx.sess.str_of(field.ident)));
tcx.sess.str_of(field.ident)));
}
}
}
@ -310,7 +310,7 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
}
tcx.sess.span_err(span,
fmt!("pattern does not mention field `%s`",
*tcx.sess.str_of(field.ident)));
tcx.sess.str_of(field.ident)));
}
}
}

View File

@ -490,7 +490,7 @@ pub fn check_fn(ccx: @mut CrateCtxt,
if pat_util::pat_is_binding(fcx.ccx.tcx.def_map, p) => {
assign(p.id, None);
debug!("Pattern binding %s is assigned to %s",
*tcx.sess.str_of(path.idents[0]),
tcx.sess.str_of(path.idents[0]),
fcx.infcx().ty_to_str(
fcx.inh.locals.get_copy(&p.id)));
}
@ -557,7 +557,7 @@ pub fn check_no_duplicate_fields(tcx: ty::ctxt,
match orig_sp {
Some(orig_sp) => {
tcx.sess.span_err(sp, fmt!("Duplicate field name %s in record type declaration",
*tcx.sess.str_of(id)));
tcx.sess.str_of(id)));
tcx.sess.span_note(orig_sp, "First declaration of this field occurred here");
break;
}
@ -599,7 +599,7 @@ pub fn check_item(ccx: @mut CrateCtxt, it: @ast::item) {
ast::item_impl(_, _, _, ref ms) => {
let rp = ccx.tcx.region_paramd_items.find(&it.id).map_consume(|x| *x);
debug!("item_impl %s with id %d rp %?",
*ccx.tcx.sess.str_of(it.ident), it.id, rp);
ccx.tcx.sess.str_of(it.ident), it.id, rp);
for ms.each |m| {
check_method(ccx, *m);
}
@ -1396,7 +1396,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
fmt!("type `%s` does not implement any method in scope \
named `%s`",
actual,
*fcx.ccx.tcx.sess.str_of(method_name))
fcx.ccx.tcx.sess.str_of(method_name))
},
expr_t,
None);
@ -1772,7 +1772,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
|actual| {
fmt!("attempted to take value of method `%s` on type `%s` \
(try writing an anonymous function)",
*tcx.sess.str_of(field), actual)
tcx.sess.str_of(field), actual)
},
expr_t, None);
}
@ -1783,7 +1783,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
|actual| {
fmt!("attempted access of field `%s` on type `%s`, \
but no field with that name was found",
*tcx.sess.str_of(field), actual)
tcx.sess.str_of(field), actual)
},
expr_t, None);
}
@ -1821,14 +1821,14 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
tcx.sess.span_err(
field.span,
fmt!("structure has no field named `%s`",
*tcx.sess.str_of(field.node.ident)));
tcx.sess.str_of(field.node.ident)));
error_happened = true;
}
Some((_, true)) => {
tcx.sess.span_err(
field.span,
fmt!("field `%s` specified more than once",
*tcx.sess.str_of(field.node.ident)));
tcx.sess.str_of(field.node.ident)));
error_happened = true;
}
Some((field_id, false)) => {
@ -1862,7 +1862,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
let (_, seen) = *class_field_map.get(&name);
if !seen {
missing_fields.push(
~"`" + *tcx.sess.str_of(name) + "`");
~"`" + tcx.sess.str_of(name) + "`");
}
}
@ -3424,7 +3424,7 @@ pub fn check_bounds_are_used(ccx: @mut CrateCtxt,
if !*b {
ccx.tcx.sess.span_err(
span, fmt!("type parameter `%s` is unused",
*ccx.tcx.sess.str_of(tps.get(i).ident)));
ccx.tcx.sess.str_of(tps.get(i).ident)));
}
}
}
@ -3435,14 +3435,15 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
}
let tcx = ccx.tcx;
let (n_tps, inputs, output) = match *ccx.tcx.sess.str_of(it.ident) {
~"size_of" |
~"pref_align_of" | ~"min_align_of" => (1u, ~[], ty::mk_uint()),
~"init" => (1u, ~[], param(ccx, 0u)),
~"uninit" => (1u, ~[], param(ccx, 0u)),
~"forget" => (1u, ~[ param(ccx, 0) ], ty::mk_nil()),
~"transmute" => (2, ~[ param(ccx, 0) ], param(ccx, 1)),
~"move_val" | ~"move_val_init" => {
let str = ccx.tcx.sess.str_of(it.ident);
let (n_tps, inputs, output) = match str.as_slice() {
"size_of" |
"pref_align_of" | "min_align_of" => (1u, ~[], ty::mk_uint()),
"init" => (1u, ~[], param(ccx, 0u)),
"uninit" => (1u, ~[], param(ccx, 0u)),
"forget" => (1u, ~[ param(ccx, 0) ], ty::mk_nil()),
"transmute" => (2, ~[ param(ccx, 0) ], param(ccx, 1)),
"move_val" | "move_val_init" => {
(1u,
~[
ty::mk_mut_rptr(tcx, ty::re_bound(ty::br_anon(0)), param(ccx, 0)),
@ -3450,9 +3451,9 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
],
ty::mk_nil())
}
~"needs_drop" => (1u, ~[], ty::mk_bool()),
"needs_drop" => (1u, ~[], ty::mk_bool()),
~"atomic_cxchg" | ~"atomic_cxchg_acq"| ~"atomic_cxchg_rel" => {
"atomic_cxchg" | "atomic_cxchg_acq"| "atomic_cxchg_rel" => {
(0,
~[
ty::mk_mut_rptr(tcx,
@ -3463,14 +3464,14 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
],
ty::mk_int())
}
~"atomic_load" | ~"atomic_load_acq" => {
"atomic_load" | "atomic_load_acq" => {
(0,
~[
ty::mk_imm_rptr(tcx, ty::re_bound(ty::br_anon(0)), ty::mk_int())
],
ty::mk_int())
}
~"atomic_store" | ~"atomic_store_rel" => {
"atomic_store" | "atomic_store_rel" => {
(0,
~[
ty::mk_mut_rptr(tcx, ty::re_bound(ty::br_anon(0)), ty::mk_int()),
@ -3478,9 +3479,9 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
],
ty::mk_nil())
}
~"atomic_xchg" | ~"atomic_xadd" | ~"atomic_xsub" |
~"atomic_xchg_acq" | ~"atomic_xadd_acq" | ~"atomic_xsub_acq" |
~"atomic_xchg_rel" | ~"atomic_xadd_rel" | ~"atomic_xsub_rel" => {
"atomic_xchg" | "atomic_xadd" | "atomic_xsub" |
"atomic_xchg_acq" | "atomic_xadd_acq" | "atomic_xsub_acq" |
"atomic_xchg_rel" | "atomic_xadd_rel" | "atomic_xsub_rel" => {
(0,
~[
ty::mk_mut_rptr(tcx, ty::re_bound(ty::br_anon(0)), ty::mk_int()),
@ -3489,11 +3490,11 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
ty::mk_int())
}
~"get_tydesc" => {
"get_tydesc" => {
// FIXME (#3730): return *intrinsic::tydesc, not *()
(1u, ~[], ty::mk_nil_ptr(ccx.tcx))
}
~"visit_tydesc" => {
"visit_tydesc" => {
let tydesc_name = special_idents::tydesc;
assert!(tcx.intrinsic_defs.contains_key(&tydesc_name));
let (_, tydesc_ty) = tcx.intrinsic_defs.get_copy(&tydesc_name);
@ -3504,7 +3505,7 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
});
(0, ~[ td_ptr, visitor_object_ty ], ty::mk_nil())
}
~"frame_address" => {
"frame_address" => {
let fty = ty::mk_closure(ccx.tcx, ty::ClosureTy {
purity: ast::impure_fn,
sigil: ast::BorrowedSigil,
@ -3519,10 +3520,10 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
});
(0u, ~[fty], ty::mk_nil())
}
~"morestack_addr" => {
"morestack_addr" => {
(0u, ~[], ty::mk_nil_ptr(ccx.tcx))
}
~"memcpy32" => {
"memcpy32" => {
(1,
~[
ty::mk_ptr(tcx, ty::mt {
@ -3537,7 +3538,7 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
],
ty::mk_nil())
}
~"memcpy64" => {
"memcpy64" => {
(1,
~[
ty::mk_ptr(tcx, ty::mt {
@ -3552,7 +3553,7 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
],
ty::mk_nil())
}
~"memmove32" => {
"memmove32" => {
(1,
~[
ty::mk_ptr(tcx, ty::mt {
@ -3567,7 +3568,7 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
],
ty::mk_nil())
}
~"memmove64" => {
"memmove64" => {
(1,
~[
ty::mk_ptr(tcx, ty::mt {
@ -3582,7 +3583,7 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
],
ty::mk_nil())
}
~"memset32" => {
"memset32" => {
(1,
~[
ty::mk_ptr(tcx, ty::mt {
@ -3594,7 +3595,7 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
],
ty::mk_nil())
}
~"memset64" => {
"memset64" => {
(1,
~[
ty::mk_ptr(tcx, ty::mt {
@ -3606,75 +3607,75 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
],
ty::mk_nil())
}
~"sqrtf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"sqrtf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"powif32" => {
"sqrtf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"sqrtf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"powif32" => {
(0,
~[ ty::mk_f32(), ty::mk_i32() ],
ty::mk_f32())
}
~"powif64" => {
"powif64" => {
(0,
~[ ty::mk_f64(), ty::mk_i32() ],
ty::mk_f64())
}
~"sinf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"sinf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"cosf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"cosf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"powf32" => {
"sinf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"sinf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"cosf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"cosf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"powf32" => {
(0,
~[ ty::mk_f32(), ty::mk_f32() ],
ty::mk_f32())
}
~"powf64" => {
"powf64" => {
(0,
~[ ty::mk_f64(), ty::mk_f64() ],
ty::mk_f64())
}
~"expf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"expf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"exp2f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"exp2f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"logf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"logf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"log10f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"log10f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"log2f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"log2f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"fmaf32" => {
"expf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"expf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"exp2f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"exp2f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"logf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"logf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"log10f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"log10f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"log2f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"log2f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"fmaf32" => {
(0,
~[ ty::mk_f32(), ty::mk_f32(), ty::mk_f32() ],
ty::mk_f32())
}
~"fmaf64" => {
"fmaf64" => {
(0,
~[ ty::mk_f64(), ty::mk_f64(), ty::mk_f64() ],
ty::mk_f64())
}
~"fabsf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"fabsf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"floorf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"floorf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"ceilf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"ceilf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"truncf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"truncf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"ctpop8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()),
~"ctpop16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
~"ctpop32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
~"ctpop64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
~"ctlz8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()),
~"ctlz16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
~"ctlz32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
~"ctlz64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
~"cttz8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()),
~"cttz16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
~"cttz32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
~"cttz64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
~"bswap16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
~"bswap32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
~"bswap64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
"fabsf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"fabsf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"floorf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"floorf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"ceilf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"ceilf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"truncf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
"truncf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
"ctpop8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()),
"ctpop16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
"ctpop32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
"ctpop64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
"ctlz8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()),
"ctlz16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
"ctlz32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
"ctlz64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
"cttz8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()),
"cttz16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
"cttz32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
"cttz64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
"bswap16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
"bswap32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
"bswap64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
ref other => {
tcx.sess.span_err(it.span,
fmt!("unrecognized intrinsic function: `%s`",

View File

@ -251,7 +251,7 @@ impl CoherenceChecker {
if associated_traits.len() == 0 {
debug!("(checking implementation) no associated traits for item \
'%s'",
*self.crate_context.tcx.sess.str_of(item.ident));
self.crate_context.tcx.sess.str_of(item.ident));
match get_base_type_def_id(self.inference_context,
item.span,
@ -278,7 +278,7 @@ impl CoherenceChecker {
associated_trait.ref_id);
debug!("(checking implementation) adding impl for trait '%s', item '%s'",
trait_ref.repr(self.crate_context.tcx),
*self.crate_context.tcx.sess.str_of(item.ident));
self.crate_context.tcx.sess.str_of(item.ident));
self.instantiate_default_methods(item.id, trait_ref);
@ -401,7 +401,7 @@ impl CoherenceChecker {
// method to that entry.
debug!("(checking implementation) adding method `%s` \
to entry for existing trait",
*self.crate_context.tcx.sess.str_of(
self.crate_context.tcx.sess.str_of(
provided_method_info.method_info.ident));
mis.push(provided_method_info);
}
@ -409,7 +409,7 @@ impl CoherenceChecker {
// If the trait doesn't have an entry yet, create one.
debug!("(checking implementation) creating new entry \
for method `%s`",
*self.crate_context.tcx.sess.str_of(
self.crate_context.tcx.sess.str_of(
provided_method_info.method_info.ident));
pmm.insert(local_def(impl_id),
@mut ~[provided_method_info]);
@ -742,7 +742,7 @@ impl CoherenceChecker {
tcx.sess.span_err(trait_ref_span,
fmt!("missing method `%s`",
*tcx.sess.str_of(method.ident)));
tcx.sess.str_of(method.ident)));
}
}
@ -794,7 +794,7 @@ impl CoherenceChecker {
for all_provided_methods.each |provided_method| {
debug!(
"(creating impl) adding provided method `%s` to impl",
*sess.str_of(provided_method.method_info.ident));
sess.str_of(provided_method.method_info.ident));
vec::push(all_methods, provided_method.method_info);
}
}
@ -909,7 +909,7 @@ impl CoherenceChecker {
session.bug(fmt!(
"no base type for external impl \
with no trait: %s (type %s)!",
*session.str_of(implementation.ident),
session.str_of(implementation.ident),
ty_to_str(self.crate_context.tcx,self_type.ty)));
}
Some(_) => {

View File

@ -471,7 +471,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
cm.span,
fmt!("method `%s` has a `%s` declaration in the impl, \
but not in the trait",
*tcx.sess.str_of(trait_m.ident),
tcx.sess.str_of(trait_m.ident),
explicit_self_to_str(impl_m.explicit_self, tcx.sess.intr())));
return;
}
@ -480,7 +480,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
cm.span,
fmt!("method `%s` has a `%s` declaration in the trait, \
but not in the impl",
*tcx.sess.str_of(trait_m.ident),
tcx.sess.str_of(trait_m.ident),
explicit_self_to_str(trait_m.explicit_self, tcx.sess.intr())));
return;
}
@ -496,7 +496,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
cm.span,
fmt!("method `%s` has %u type %s, but its trait \
declaration has %u type %s",
*tcx.sess.str_of(trait_m.ident),
tcx.sess.str_of(trait_m.ident),
num_impl_m_type_params,
pluralize(num_impl_m_type_params, ~"parameter"),
num_trait_m_type_params,
@ -509,7 +509,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
cm.span,
fmt!("method `%s` has %u parameter%s \
but the trait has %u",
*tcx.sess.str_of(trait_m.ident),
tcx.sess.str_of(trait_m.ident),
impl_m.fty.sig.inputs.len(),
if impl_m.fty.sig.inputs.len() == 1 { "" } else { "s" },
trait_m.fty.sig.inputs.len()));
@ -533,7 +533,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
which is not required by \
the corresponding type parameter \
in the trait declaration",
*tcx.sess.str_of(trait_m.ident),
tcx.sess.str_of(trait_m.ident),
i,
extra_bounds.user_string(tcx)));
return;
@ -551,7 +551,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
type parameter %u has %u trait %s, but the \
corresponding type parameter in \
the trait declaration has %u trait %s",
*tcx.sess.str_of(trait_m.ident),
tcx.sess.str_of(trait_m.ident),
i, impl_param_def.bounds.trait_bounds.len(),
pluralize(impl_param_def.bounds.trait_bounds.len(),
~"bound"),
@ -652,7 +652,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
tcx.sess.span_err(
cm.span,
fmt!("method `%s` has an incompatible type: %s",
*tcx.sess.str_of(trait_m.ident),
tcx.sess.str_of(trait_m.ident),
ty::type_err_to_str(tcx, terr)));
ty::note_and_explain_type_err(tcx, terr);
}
@ -700,7 +700,7 @@ pub fn check_methods_against_trait(ccx: &CrateCtxt,
tcx.sess.span_err(
impl_m.span,
fmt!("method `%s` is not a member of trait `%s`",
*tcx.sess.str_of(impl_m.mty.ident),
tcx.sess.str_of(impl_m.mty.ident),
path_to_str(a_trait_ty.path, tcx.sess.intr())));
}
}
@ -829,7 +829,7 @@ pub fn convert(ccx: &CrateCtxt, it: @ast::item) {
let tcx = ccx.tcx;
let rp = tcx.region_paramd_items.find(&it.id).map_consume(|x| *x);
debug!("convert: item %s with id %d rp %?",
*tcx.sess.str_of(it.ident), it.id, rp);
tcx.sess.str_of(it.ident), it.id, rp);
match it.node {
// These don't define types.
ast::item_foreign_mod(_) | ast::item_mod(_) => {}
@ -1084,7 +1084,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: @ast::item)
ty: ty::mk_bare_fn(ccx.tcx, tofd)
};
debug!("type of %s (id %d) is %s",
*tcx.sess.str_of(it.ident),
tcx.sess.str_of(it.ident),
it.id,
ppaux::ty_to_str(tcx, tpt.ty));
ccx.tcx.tcache.insert(local_def(it.id), tpt);

View File

@ -236,9 +236,9 @@ pub fn run_compiler(args: &~[~str], demitter: diagnostic::Emitter) {
::core::logging::console_off();
let mut args = /*bad*/copy *args;
let binary = @args.shift();
let binary = args.shift().to_managed();
if args.is_empty() { usage(*binary); return; }
if args.is_empty() { usage(binary); return; }
let matches =
&match getopts::groups::getopts(args, optgroups()) {
@ -249,7 +249,7 @@ pub fn run_compiler(args: &~[~str], demitter: diagnostic::Emitter) {
};
if opt_present(matches, "h") || opt_present(matches, "help") {
usage(*binary);
usage(binary);
return;
}
@ -276,16 +276,16 @@ pub fn run_compiler(args: &~[~str], demitter: diagnostic::Emitter) {
}
if opt_present(matches, "v") || opt_present(matches, "version") {
version(*binary);
version(binary);
return;
}
let input = match matches.free.len() {
0u => early_error(demitter, ~"no input filename given"),
1u => {
let ifile = /*bad*/copy matches.free[0];
if ifile == ~"-" {
let ifile = matches.free[0].as_slice();
if "-" == ifile {
let src = str::from_bytes(io::stdin().read_whole_stream());
str_input(src)
str_input(src.to_managed())
} else {
file_input(Path(ifile))
}

View File

@ -159,7 +159,7 @@ pub fn bound_region_to_str_space(cx: ctxt,
if cx.sess.verbose() { return fmt!("%s%? ", prefix, br); }
match br {
br_named(id) => fmt!("%s'%s ", prefix, *cx.sess.str_of(id)),
br_named(id) => fmt!("%s'%s ", prefix, cx.sess.str_of(id)),
br_self => fmt!("%s'self ", prefix),
br_anon(_) => prefix.to_str(),
br_fresh(_) => prefix.to_str(),
@ -323,7 +323,7 @@ pub fn ty_to_str(cx: ctxt, typ: t) -> ~str {
match ident {
Some(i) => {
s.push_char(' ');
s.push_str(*cx.sess.str_of(i));
s.push_str(cx.sess.str_of(i));
}
_ => { }
}
@ -389,7 +389,7 @@ pub fn ty_to_str(cx: ctxt, typ: t) -> ~str {
&m.fty.sig) + ";"
}
fn field_to_str(cx: ctxt, f: field) -> ~str {
return *cx.sess.str_of(f.ident) + ": " + mt_to_str(cx, &f.mt);
return fmt!("%s: %s", cx.sess.str_of(f.ident), mt_to_str(cx, &f.mt));
}
// if there is an id, print that instead of the structural type:
@ -656,7 +656,7 @@ impl Repr for ty::Method {
impl Repr for ast::ident {
fn repr(&self, _tcx: ctxt) -> ~str {
copy *token::ident_to_str(self)
token::ident_to_str(self).to_owned()
}
}

View File

@ -41,7 +41,7 @@ pub struct Ctxt {
type SrvOwner<'self,T> = &'self fn(srv: Srv) -> T;
pub type CtxtHandler<T> = ~fn(ctxt: Ctxt) -> T;
type Parser = ~fn(Session, s: ~str) -> @ast::crate;
type Parser = ~fn(Session, s: @str) -> @ast::crate;
enum Msg {
HandleRequest(~fn(Ctxt)),
@ -68,7 +68,7 @@ fn run<T>(owner: SrvOwner<T>, source: ~str, parse: Parser) -> T {
let source = Cell::new(source);
let parse = Cell::new(parse);
do task::spawn {
act(&po, source.take(), parse.take());
act(&po, source.take().to_managed(), parse.take());
}
let srv_ = Srv {
@ -80,12 +80,12 @@ fn run<T>(owner: SrvOwner<T>, source: ~str, parse: Parser) -> T {
res
}
fn act(po: &Port<Msg>, source: ~str, parse: Parser) {
fn act(po: &Port<Msg>, source: @str, parse: Parser) {
let sess = build_session();
let ctxt = build_ctxt(
sess,
parse(sess, copy source)
parse(sess, source)
);
let mut keep_going = true;

View File

@ -41,13 +41,13 @@ pub fn parse_crate(attrs: ~[ast::attribute]) -> CrateAttrs {
let name = attr::last_meta_item_value_str_by_name(link_metas, "name");
CrateAttrs {
name: name.map(|s| copy **s)
name: name.map(|s| s.to_owned())
}
}
pub fn parse_desc(attrs: ~[ast::attribute]) -> Option<~str> {
let doc_strs = do doc_metas(attrs).filter_mapped |meta| {
attr::get_meta_item_value_str(*meta).map(|s| copy **s)
attr::get_meta_item_value_str(*meta).map(|s| s.to_owned())
};
if doc_strs.is_empty() {
None

View File

@ -25,7 +25,7 @@ use syntax::parse::token;
// thread-local data
// Hack-Becomes-Feature: using thread-local-state everywhere...
pub fn to_str(id: ast::ident) -> ~str {
return copy *ident_to_str(&id);
/* bad */ ident_to_str(&id).to_owned()
}
// get rid of this pointless function:

View File

@ -23,9 +23,9 @@ pub fn from_file(file: &Path) -> @ast::crate {
file, ~[], parse::new_parse_sess(None))
}
pub fn from_str(source: ~str) -> @ast::crate {
pub fn from_str(source: @str) -> @ast::crate {
parse::parse_crate_from_source_str(
~"-", @source, ~[], parse::new_parse_sess(None))
@"-", source, ~[], parse::new_parse_sess(None))
}
pub fn from_file_sess(sess: session::Session, file: &Path) -> @ast::crate {
@ -33,11 +33,11 @@ pub fn from_file_sess(sess: session::Session, file: &Path) -> @ast::crate {
file, cfg(sess, file_input(copy *file)), sess.parse_sess)
}
pub fn from_str_sess(sess: session::Session, source: ~str) -> @ast::crate {
pub fn from_str_sess(sess: session::Session, source: @str) -> @ast::crate {
parse::parse_crate_from_source_str(
~"-", @copy source, cfg(sess, str_input(source)), sess.parse_sess)
@"-", source, cfg(sess, str_input(source)), sess.parse_sess)
}
fn cfg(sess: session::Session, input: driver::input) -> ast::crate_cfg {
driver::build_configuration(sess, @~"rustdoc", &input)
driver::build_configuration(sess, @"rustdoc", &input)
}

View File

@ -117,7 +117,7 @@ fn record(mut repl: Repl, blk: &ast::blk, intr: @token::ident_interner) -> Repl
/// Run an input string in a Repl, returning the new Repl.
fn run(repl: Repl, input: ~str) -> Repl {
let binary = @copy repl.binary;
let binary = repl.binary.to_managed();
let options = @session::options {
crate_type: session::unknown_crate,
binary: binary,
@ -130,7 +130,7 @@ fn run(repl: Repl, input: ~str) -> Repl {
let head = include_str!("wrapper.rs").to_owned();
let foot = fmt!("fn main() {\n%s\n%s\n\nprint({\n%s\n})\n}",
repl.view_items, repl.stmts, input);
let wrapped = driver::str_input(head + foot);
let wrapped = driver::str_input((head + foot).to_managed());
debug!("inputting %s", head + foot);
@ -186,7 +186,7 @@ fn run(repl: Repl, input: ~str) -> Repl {
fn compile_crate(src_filename: ~str, binary: ~str) -> Option<bool> {
match do task::try {
let src_path = Path(src_filename);
let binary = @copy binary;
let binary = binary.to_managed();
let options = @session::options {
binary: binary,
addl_lib_search_paths: @mut ~[os::getcwd()],

View File

@ -100,7 +100,7 @@ impl<'self> PkgScript<'self> {
/// a PkgScript that we can then execute
fn parse<'a>(script: Path, workspace: &Path, id: &'a PkgId) -> PkgScript<'a> {
// Get the executable name that was invoked
let binary = @copy os::args()[0];
let binary = os::args()[0].to_managed();
// Build the rustc session data structures to pass
// to the compiler
let options = @session::options {
@ -145,7 +145,7 @@ impl<'self> PkgScript<'self> {
let root = r.pop().pop().pop().pop(); // :-\
debug!("Root is %s, calling compile_rest", root.to_str());
let exe = self.build_dir.push(~"pkg" + util::exe_suffix());
let binary = @copy os::args()[0];
let binary = os::args()[0].to_managed();
util::compile_crate_from_input(&self.input,
&self.build_dir,
sess,

View File

@ -80,7 +80,7 @@ fn fold_mod(_ctx: @mut ReadyCtx,
fn strip_main(item: @ast::item) -> @ast::item {
@ast::item {
attrs: do item.attrs.filtered |attr| {
*attr::get_attr_name(attr) != ~"main"
"main" != attr::get_attr_name(attr)
},
.. copy *item
}
@ -109,7 +109,7 @@ fn fold_item(ctx: @mut ReadyCtx,
ast::meta_list(_, ref mis) => {
for mis.each |mi| {
match mi.node {
ast::meta_word(cmd) => cmds.push(copy *cmd),
ast::meta_word(cmd) => cmds.push(cmd.to_owned()),
_ => {}
};
}
@ -205,7 +205,7 @@ pub fn compile_input(ctxt: &Ctx,
// tjc: by default, use the package ID name as the link name
// not sure if we should support anything else
let binary = @(copy os::args()[0]);
let binary = os::args()[0].to_managed();
debug!("flags: %s", flags.connect(" "));
debug!("cfgs: %s", cfgs.connect(" "));
@ -270,11 +270,11 @@ pub fn compile_input(ctxt: &Ctx,
debug!("Injecting link name: %s", short_name_to_use);
crate = @codemap::respan(crate.span, ast::crate_ {
attrs: ~[mk_attr(@dummy_spanned(
meta_list(@~"link",
~[@dummy_spanned(meta_name_value(@~"name",
mk_string_lit(@short_name_to_use))),
@dummy_spanned(meta_name_value(@~"vers",
mk_string_lit(@pkg_id.version.to_str_nonempty())))])))],
meta_list(@"link",
~[@dummy_spanned(meta_name_value(@"name",
mk_string_lit(short_name_to_use.to_managed()))),
@dummy_spanned(meta_name_value(@"vers",
mk_string_lit(pkg_id.version.to_str_nonempty().to_managed())))])))],
..copy crate.node});
}
@ -363,24 +363,24 @@ fn find_and_install_dependencies(ctxt: &Ctx,
None => ()
};
let lib_name = sess.str_of(lib_ident);
match find_library_in_search_path(my_ctxt.sysroot_opt, *lib_name) {
match find_library_in_search_path(my_ctxt.sysroot_opt, lib_name) {
Some(installed_path) => {
debug!("It exists: %s", installed_path.to_str());
}
None => {
// Try to install it
let pkg_id = PkgId::new(*lib_name);
let pkg_id = PkgId::new(lib_name);
my_ctxt.install(&my_workspace, &pkg_id);
let built_lib =
built_library_in_workspace(&pkg_id,
&my_workspace).expect(fmt!("find_and_install_dependencies: \
I thought I already built %s, but the library doesn't seem \
to exist", *lib_name));
to exist", lib_name));
// Also, add an additional search path
let installed_path = target_library_in_workspace(&my_workspace,
&built_lib).pop();
debug!("Great, I installed %s, and it's in %s",
*lib_name, installed_path.to_str());
lib_name, installed_path.to_str());
save(installed_path);
}
}
@ -415,7 +415,7 @@ pub fn link_exe(src: &Path, dest: &Path) -> bool {
}
}
pub fn mk_string_lit(s: @~str) -> ast::lit {
pub fn mk_string_lit(s: @str) -> ast::lit {
spanned {
node: ast::lit_str(s),
span: dummy_sp()

View File

@ -84,7 +84,7 @@ pub type Mrk = uint;
impl<S:Encoder> Encodable<S> for ident {
fn encode(&self, s: &mut S) {
s.emit_str(*interner_get(self.name));
s.emit_str(interner_get(self.name));
}
}
@ -228,9 +228,9 @@ pub type meta_item = spanned<meta_item_>;
#[deriving(Eq, Encodable, Decodable)]
pub enum meta_item_ {
meta_word(@~str),
meta_list(@~str, ~[@meta_item]),
meta_name_value(@~str, lit),
meta_word(@str),
meta_list(@str, ~[@meta_item]),
meta_name_value(@str, lit),
}
pub type blk = spanned<blk_>;
@ -634,12 +634,12 @@ pub type lit = spanned<lit_>;
#[deriving(Eq, Encodable, Decodable)]
pub enum lit_ {
lit_str(@~str),
lit_str(@str),
lit_int(i64, int_ty),
lit_uint(u64, uint_ty),
lit_int_unsuffixed(i64),
lit_float(@~str, float_ty),
lit_float_unsuffixed(@~str),
lit_float(@str, float_ty),
lit_float_unsuffixed(@str),
lit_nil,
lit_bool(bool),
}
@ -819,10 +819,10 @@ pub enum asm_dialect {
#[deriving(Eq, Encodable, Decodable)]
pub struct inline_asm {
asm: @~str,
clobbers: @~str,
inputs: ~[(@~str, @expr)],
outputs: ~[(@~str, @expr)],
asm: @str,
clobbers: @str,
inputs: ~[(@str, @expr)],
outputs: ~[(@str, @expr)],
volatile: bool,
alignstack: bool,
dialect: asm_dialect

View File

@ -58,8 +58,8 @@ pub fn path_to_str_with_sep(p: &[path_elt], sep: &str, itr: @ident_interner)
-> ~str {
let strs = do p.map |e| {
match *e {
path_mod(s) => copy *itr.get(s.name),
path_name(s) => copy *itr.get(s.name)
path_mod(s) => itr.get(s.name),
path_name(s) => itr.get(s.name)
}
};
strs.connect(sep)
@ -68,9 +68,9 @@ pub fn path_to_str_with_sep(p: &[path_elt], sep: &str, itr: @ident_interner)
pub fn path_ident_to_str(p: &path, i: ident, itr: @ident_interner) -> ~str {
if p.is_empty() {
//FIXME /* FIXME (#2543) */ copy *i
copy *itr.get(i.name)
itr.get(i.name).to_owned()
} else {
fmt!("%s::%s", path_to_str(*p, itr), *itr.get(i.name))
fmt!("%s::%s", path_to_str(*p, itr), itr.get(i.name))
}
}
@ -80,8 +80,8 @@ pub fn path_to_str(p: &[path_elt], itr: @ident_interner) -> ~str {
pub fn path_elt_to_str(pe: path_elt, itr: @ident_interner) -> ~str {
match pe {
path_mod(s) => copy *itr.get(s.name),
path_name(s) => copy *itr.get(s.name)
path_mod(s) => itr.get(s.name).to_owned(),
path_name(s) => itr.get(s.name).to_owned()
}
}
@ -359,16 +359,16 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str {
}
Some(&node_method(m, _, path)) => {
fmt!("method %s in %s (id=%?)",
*itr.get(m.ident.name), path_to_str(*path, itr), id)
itr.get(m.ident.name), path_to_str(*path, itr), id)
}
Some(&node_trait_method(ref tm, _, path)) => {
let m = ast_util::trait_method_to_ty_method(&**tm);
fmt!("method %s in %s (id=%?)",
*itr.get(m.ident.name), path_to_str(*path, itr), id)
itr.get(m.ident.name), path_to_str(*path, itr), id)
}
Some(&node_variant(ref variant, _, path)) => {
fmt!("variant %s in %s (id=%?)",
*itr.get(variant.node.name.name), path_to_str(*path, itr), id)
itr.get(variant.node.name.name), path_to_str(*path, itr), id)
}
Some(&node_expr(expr)) => {
fmt!("expr %s (id=%?)", pprust::expr_to_str(expr, itr), id)
@ -384,7 +384,7 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str {
fmt!("arg (id=%?)", id)
}
Some(&node_local(ident)) => {
fmt!("local (id=%?, name=%s)", id, *itr.get(ident.name))
fmt!("local (id=%?, name=%s)", id, itr.get(ident.name))
}
Some(&node_block(_)) => {
fmt!("block")

View File

@ -28,7 +28,7 @@ use core::iterator::IteratorUtil;
pub fn path_name_i(idents: &[ident]) -> ~str {
// FIXME: Bad copies (#2543 -- same for everything else that says "bad")
idents.map(|i| copy *token::interner_get(i.name)).connect("::")
idents.map(|i| token::interner_get(i.name)).connect("::")
}
pub fn path_to_ident(p: @Path) -> ident { copy *p.idents.last() }
@ -815,7 +815,7 @@ mod test {
assert_eq!(copy s,~[14]);
}
// convert a list of uints to an @~[ident]
// convert a list of uints to an @[ident]
// (ignores the interner completely)
fn uints_to_idents (uints: &~[uint]) -> @~[ident] {
@uints.map(|u|{ ident {name:*u, ctxt: empty_ctxt} })

View File

@ -26,23 +26,23 @@ use extra;
/* Constructors */
pub fn mk_name_value_item_str(name: @~str, value: @~str)
pub fn mk_name_value_item_str(name: @str, value: @str)
-> @ast::meta_item {
let value_lit = dummy_spanned(ast::lit_str(value));
mk_name_value_item(name, value_lit)
}
pub fn mk_name_value_item(name: @~str, value: ast::lit)
pub fn mk_name_value_item(name: @str, value: ast::lit)
-> @ast::meta_item {
@dummy_spanned(ast::meta_name_value(name, value))
}
pub fn mk_list_item(name: @~str, items: ~[@ast::meta_item]) ->
pub fn mk_list_item(name: @str, items: ~[@ast::meta_item]) ->
@ast::meta_item {
@dummy_spanned(ast::meta_list(name, items))
}
pub fn mk_word_item(name: @~str) -> @ast::meta_item {
pub fn mk_word_item(name: @str) -> @ast::meta_item {
@dummy_spanned(ast::meta_word(name))
}
@ -52,13 +52,13 @@ pub fn mk_attr(item: @ast::meta_item) -> ast::attribute {
is_sugared_doc: false })
}
pub fn mk_sugared_doc_attr(text: ~str,
pub fn mk_sugared_doc_attr(text: @str,
lo: BytePos, hi: BytePos) -> ast::attribute {
let style = doc_comment_style(text);
let lit = spanned(lo, hi, ast::lit_str(@text));
let lit = spanned(lo, hi, ast::lit_str(text));
let attr = ast::attribute_ {
style: style,
value: @spanned(lo, hi, ast::meta_name_value(@~"doc", lit)),
value: @spanned(lo, hi, ast::meta_name_value(@"doc", lit)),
is_sugared_doc: true
};
spanned(lo, hi, attr)
@ -78,8 +78,8 @@ pub fn attr_metas(attrs: &[ast::attribute]) -> ~[@ast::meta_item] {
pub fn desugar_doc_attr(attr: &ast::attribute) -> ast::attribute {
if attr.node.is_sugared_doc {
let comment = get_meta_item_value_str(attr.node.value).get();
let meta = mk_name_value_item_str(@~"doc",
@strip_doc_comment_decoration(*comment));
let meta = mk_name_value_item_str(@"doc",
strip_doc_comment_decoration(comment).to_managed());
mk_attr(meta)
} else {
*attr
@ -88,11 +88,11 @@ pub fn desugar_doc_attr(attr: &ast::attribute) -> ast::attribute {
/* Accessors */
pub fn get_attr_name(attr: &ast::attribute) -> @~str {
pub fn get_attr_name(attr: &ast::attribute) -> @str {
get_meta_item_name(attr.node.value)
}
pub fn get_meta_item_name(meta: @ast::meta_item) -> @~str {
pub fn get_meta_item_name(meta: @ast::meta_item) -> @str {
match meta.node {
ast::meta_word(n) => n,
ast::meta_name_value(n, _) => n,
@ -104,7 +104,7 @@ pub fn get_meta_item_name(meta: @ast::meta_item) -> @~str {
* Gets the string value if the meta_item is a meta_name_value variant
* containing a string, otherwise none
*/
pub fn get_meta_item_value_str(meta: @ast::meta_item) -> Option<@~str> {
pub fn get_meta_item_value_str(meta: @ast::meta_item) -> Option<@str> {
match meta.node {
ast::meta_name_value(_, v) => {
match v.node {
@ -130,7 +130,7 @@ pub fn get_meta_item_list(meta: @ast::meta_item)
* a tuple containing the name and string value, otherwise `none`
*/
pub fn get_name_value_str_pair(item: @ast::meta_item)
-> Option<(@~str, @~str)> {
-> Option<(@str, @str)> {
match attr::get_meta_item_value_str(item) {
Some(value) => {
let name = attr::get_meta_item_name(item);
@ -147,7 +147,7 @@ pub fn get_name_value_str_pair(item: @ast::meta_item)
pub fn find_attrs_by_name(attrs: &[ast::attribute], name: &str) ->
~[ast::attribute] {
do vec::filter_mapped(attrs) |a| {
if name == *get_attr_name(a) {
if name == get_attr_name(a) {
Some(*a)
} else {
None
@ -160,7 +160,7 @@ pub fn find_meta_items_by_name(metas: &[@ast::meta_item], name: &str) ->
~[@ast::meta_item] {
let mut rs = ~[];
for metas.each |mi| {
if name == *get_meta_item_name(*mi) {
if name == get_meta_item_name(*mi) {
rs.push(*mi)
}
}
@ -214,7 +214,7 @@ pub fn attrs_contains_name(attrs: &[ast::attribute], name: &str) -> bool {
}
pub fn first_attr_value_str_by_name(attrs: &[ast::attribute], name: &str)
-> Option<@~str> {
-> Option<@str> {
let mattrs = find_attrs_by_name(attrs, name);
if mattrs.len() > 0 {
@ -232,7 +232,7 @@ fn last_meta_item_by_name(items: &[@ast::meta_item], name: &str)
}
pub fn last_meta_item_value_str_by_name(items: &[@ast::meta_item], name: &str)
-> Option<@~str> {
-> Option<@str> {
match last_meta_item_by_name(items, name) {
Some(item) => {
@ -282,7 +282,7 @@ pub fn remove_meta_items_by_name(items: ~[@ast::meta_item], name: &str) ->
~[@ast::meta_item] {
return vec::filter_mapped(items, |item| {
if name != *get_meta_item_name(*item) {
if name != get_meta_item_name(*item) {
Some(*item)
} else {
None
@ -316,8 +316,8 @@ pub fn find_inline_attr(attrs: &[ast::attribute]) -> inline_attr {
// FIXME (#2809)---validate the usage of #[inline] and #[inline(always)]
do attrs.iter().fold(ia_none) |ia,attr| {
match attr.node.value.node {
ast::meta_word(@~"inline") => ia_hint,
ast::meta_list(@~"inline", ref items) => {
ast::meta_word(s) if "inline" == s => ia_hint,
ast::meta_list(s, ref items) if "inline" == s => {
if !find_meta_items_by_name(*items, "always").is_empty() {
ia_always
} else if !find_meta_items_by_name(*items, "never").is_empty() {
@ -341,7 +341,7 @@ pub fn require_unique_names(diagnostic: @span_handler,
// FIXME: How do I silence the warnings? --pcw (#2619)
if !set.insert(name) {
diagnostic.span_fatal(meta.span,
fmt!("duplicate meta item `%s`", *name));
fmt!("duplicate meta item `%s`", name));
}
}
}

View File

@ -184,7 +184,7 @@ pub struct Loc {
// Actually, *none* of the clients use the filename *or* file field;
// perhaps they should just be removed.
pub struct LocWithOpt {
filename: ~str,
filename: FileName,
line: uint,
col: CharPos,
file: Option<@FileMap>,
@ -193,7 +193,7 @@ pub struct LocWithOpt {
// used to be structural records. Better names, anyone?
pub struct FileMapAndLine {fm: @FileMap, line: uint}
pub struct FileMapAndBytePos {fm: @FileMap, pos: BytePos}
pub struct NameAndSpan {name: ~str, span: Option<span>}
pub struct NameAndSpan {name: @str, span: Option<span>}
impl to_bytes::IterBytes for NameAndSpan {
fn iter_bytes(&self, lsb0: bool, f: to_bytes::Cb) -> bool {
@ -227,7 +227,7 @@ impl to_bytes::IterBytes for ExpnInfo {
}
}
pub type FileName = ~str;
pub type FileName = @str;
pub struct FileLines
{
@ -261,7 +261,7 @@ pub struct FileMap {
/// Extra information used by qquote
substr: FileSubstr,
/// The complete source code
src: @~str,
src: @str,
/// The start position of this source in the CodeMap
start_pos: BytePos,
/// Locations of lines beginnings in the source code
@ -316,14 +316,14 @@ impl CodeMap {
}
/// Add a new FileMap to the CodeMap and return it
pub fn new_filemap(&self, filename: FileName, src: @~str) -> @FileMap {
pub fn new_filemap(&self, filename: FileName, src: @str) -> @FileMap {
return self.new_filemap_w_substr(filename, FssNone, src);
}
pub fn new_filemap_w_substr(&self,
filename: FileName,
substr: FileSubstr,
src: @~str)
src: @str)
-> @FileMap {
let files = &mut *self.files;
let start_pos = if files.len() == 0 {
@ -362,7 +362,7 @@ impl CodeMap {
match (loc.file.substr) {
FssNone =>
LocWithOpt {
filename: /* FIXME (#2543) */ copy loc.file.name,
filename: loc.file.name,
line: loc.line,
col: loc.col,
file: Some(loc.file)},
@ -421,8 +421,8 @@ impl CodeMap {
begin.pos.to_uint(), end.pos.to_uint()).to_owned();
}
pub fn get_filemap(&self, filename: ~str) -> @FileMap {
for self.files.each |fm| { if fm.name == filename { return *fm; } }
pub fn get_filemap(&self, filename: &str) -> @FileMap {
for self.files.each |fm| { if filename == fm.name { return *fm; } }
//XXjdm the following triggers a mismatched type bug
// (or expected function, found _|_)
fail!(); // ("asking for " + filename + " which we don't know about");
@ -532,7 +532,7 @@ mod test {
#[test]
fn t1 () {
let cm = CodeMap::new();
let fm = cm.new_filemap(~"blork.rs",@~"first line.\nsecond line");
let fm = cm.new_filemap(@"blork.rs",@"first line.\nsecond line");
fm.next_line(BytePos(0));
assert_eq!(&fm.get_line(0),&~"first line.");
// TESTING BROKEN BEHAVIOR:
@ -544,7 +544,7 @@ mod test {
#[should_fail]
fn t2 () {
let cm = CodeMap::new();
let fm = cm.new_filemap(~"blork.rs",@~"first line.\nsecond line");
let fm = cm.new_filemap(@"blork.rs",@"first line.\nsecond line");
// TESTING *REALLY* BROKEN BEHAVIOR:
fm.next_line(BytePos(0));
fm.next_line(BytePos(10));

View File

@ -306,8 +306,8 @@ fn highlight_lines(cm: @codemap::CodeMap,
fn print_macro_backtrace(cm: @codemap::CodeMap, sp: span) {
for sp.expn_info.iter().advance |ei| {
let ss = ei.callee.span.map_default(@~"", |span| @cm.span_to_str(*span));
print_diagnostic(*ss, note,
let ss = ei.callee.span.map_default(~"", |span| cm.span_to_str(*span));
print_diagnostic(ss, note,
fmt!("in expansion of %s!", ei.callee.name));
let ss = cm.span_to_str(ei.call_site);
print_diagnostic(ss, note, "expansion site");

View File

@ -45,7 +45,7 @@ pub fn expand_asm(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
cx.cfg(),
tts.to_owned());
let mut asm = ~"";
let mut asm = @"";
let mut outputs = ~[];
let mut inputs = ~[];
let mut cons = ~"";
@ -113,7 +113,7 @@ pub fn expand_asm(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
p.eat(&token::COMMA);
}
let clob = ~"~{" + *p.parse_str() + "}";
let clob = fmt!("~{%s}", p.parse_str());
clobs.push(clob);
}
@ -122,11 +122,11 @@ pub fn expand_asm(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
Options => {
let option = p.parse_str();
if "volatile" == *option {
if "volatile" == option {
volatile = true;
} else if "alignstack" == *option {
} else if "alignstack" == option {
alignstack = true;
} else if "intel" == *option {
} else if "intel" == option {
dialect = ast::asm_intel;
}
@ -176,8 +176,8 @@ pub fn expand_asm(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
MRExpr(@ast::expr {
id: cx.next_id(),
node: ast::expr_inline_asm(ast::inline_asm {
asm: @asm,
clobbers: @cons,
asm: asm,
clobbers: cons.to_managed(),
inputs: inputs,
outputs: outputs,
volatile: volatile,

View File

@ -33,7 +33,7 @@ use core::hashmap::HashMap;
// ast::mac_invoc_tt.
pub struct MacroDef {
name: ~str,
name: @str,
ext: SyntaxExtension
}
@ -308,18 +308,18 @@ impl ExtCtxt {
pub fn set_trace_macros(&self, x: bool) {
*self.trace_mac = x
}
pub fn str_of(&self, id: ast::ident) -> ~str {
copy *ident_to_str(&id)
pub fn str_of(&self, id: ast::ident) -> @str {
ident_to_str(&id)
}
pub fn ident_of(&self, st: &str) -> ast::ident {
str_to_ident(st)
}
}
pub fn expr_to_str(cx: @ExtCtxt, expr: @ast::expr, err_msg: ~str) -> ~str {
pub fn expr_to_str(cx: @ExtCtxt, expr: @ast::expr, err_msg: ~str) -> @str {
match expr.node {
ast::expr_lit(l) => match l.node {
ast::lit_str(s) => copy *s,
ast::lit_str(s) => s,
_ => cx.span_fatal(l.span, err_msg)
},
_ => cx.span_fatal(expr.span, err_msg)
@ -350,7 +350,7 @@ pub fn check_zero_tts(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree],
pub fn get_single_str_from_tts(cx: @ExtCtxt,
sp: span,
tts: &[ast::token_tree],
name: &str) -> ~str {
name: &str) -> @str {
if tts.len() != 1 {
cx.span_fatal(sp, fmt!("%s takes 1 argument.", name));
}
@ -538,25 +538,25 @@ mod test {
#[test] fn testenv () {
let mut a = HashMap::new();
a.insert (@~"abc",@15);
a.insert (@"abc",@15);
let m = MapChain::new(~a);
m.insert (@~"def",@16);
// FIXME: #4492 (ICE) assert_eq!(m.find(&@~"abc"),Some(@15));
// .... assert_eq!(m.find(&@~"def"),Some(@16));
assert_eq!(*(m.find(&@~"abc").get()),15);
assert_eq!(*(m.find(&@~"def").get()),16);
m.insert (@"def",@16);
// FIXME: #4492 (ICE) assert_eq!(m.find(&@"abc"),Some(@15));
// .... assert_eq!(m.find(&@"def"),Some(@16));
assert_eq!(*(m.find(&@"abc").get()),15);
assert_eq!(*(m.find(&@"def").get()),16);
let n = m.push_frame();
// old bindings are still present:
assert_eq!(*(n.find(&@~"abc").get()),15);
assert_eq!(*(n.find(&@~"def").get()),16);
n.insert (@~"def",@17);
assert_eq!(*(n.find(&@"abc").get()),15);
assert_eq!(*(n.find(&@"def").get()),16);
n.insert (@"def",@17);
// n shows the new binding
assert_eq!(*(n.find(&@~"abc").get()),15);
assert_eq!(*(n.find(&@~"def").get()),17);
assert_eq!(*(n.find(&@"abc").get()),15);
assert_eq!(*(n.find(&@"def").get()),17);
// ... but m still has the old ones
// FIXME: #4492: assert_eq!(m.find(&@~"abc"),Some(@15));
// FIXME: #4492: assert_eq!(m.find(&@~"def"),Some(@16));
assert_eq!(*(m.find(&@~"abc").get()),15);
assert_eq!(*(m.find(&@~"def").get()),16);
// FIXME: #4492: assert_eq!(m.find(&@"abc"),Some(@15));
// FIXME: #4492: assert_eq!(m.find(&@"def"),Some(@16));
assert_eq!(*(m.find(&@"abc").get()),15);
assert_eq!(*(m.find(&@"def").get()),16);
}
}

View File

@ -126,8 +126,8 @@ pub trait AstBuilder {
fn expr_vec(&self, sp: span, exprs: ~[@ast::expr]) -> @ast::expr;
fn expr_vec_uniq(&self, sp: span, exprs: ~[@ast::expr]) -> @ast::expr;
fn expr_vec_slice(&self, sp: span, exprs: ~[@ast::expr]) -> @ast::expr;
fn expr_str(&self, sp: span, s: ~str) -> @ast::expr;
fn expr_str_uniq(&self, sp: span, s: ~str) -> @ast::expr;
fn expr_str(&self, sp: span, s: @str) -> @ast::expr;
fn expr_str_uniq(&self, sp: span, s: @str) -> @ast::expr;
fn expr_unreachable(&self, span: span) -> @ast::expr;
@ -215,9 +215,9 @@ pub trait AstBuilder {
fn attribute(&self, sp: span, mi: @ast::meta_item) -> ast::attribute;
fn meta_word(&self, sp: span, w: ~str) -> @ast::meta_item;
fn meta_list(&self, sp: span, name: ~str, mis: ~[@ast::meta_item]) -> @ast::meta_item;
fn meta_name_value(&self, sp: span, name: ~str, value: ast::lit_) -> @ast::meta_item;
fn meta_word(&self, sp: span, w: @str) -> @ast::meta_item;
fn meta_list(&self, sp: span, name: @str, mis: ~[@ast::meta_item]) -> @ast::meta_item;
fn meta_name_value(&self, sp: span, name: @str, value: ast::lit_) -> @ast::meta_item;
fn view_use(&self, sp: span,
vis: ast::visibility, vp: ~[@ast::view_path]) -> @ast::view_item;
@ -521,10 +521,10 @@ impl AstBuilder for @ExtCtxt {
fn expr_vec_slice(&self, sp: span, exprs: ~[@ast::expr]) -> @ast::expr {
self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::expr_vstore_slice)
}
fn expr_str(&self, sp: span, s: ~str) -> @ast::expr {
self.expr_lit(sp, ast::lit_str(@s))
fn expr_str(&self, sp: span, s: @str) -> @ast::expr {
self.expr_lit(sp, ast::lit_str(s))
}
fn expr_str_uniq(&self, sp: span, s: ~str) -> @ast::expr {
fn expr_str_uniq(&self, sp: span, s: @str) -> @ast::expr {
self.expr_vstore(sp, self.expr_str(sp, s), ast::expr_vstore_uniq)
}
@ -540,8 +540,8 @@ impl AstBuilder for @ExtCtxt {
self.ident_of("fail_with"),
],
~[
self.expr_str(span, ~"internal error: entered unreachable code"),
self.expr_str(span, copy loc.file.name),
self.expr_str(span, @"internal error: entered unreachable code"),
self.expr_str(span, loc.file.name),
self.expr_uint(span, loc.line),
])
}
@ -791,14 +791,14 @@ impl AstBuilder for @ExtCtxt {
})
}
fn meta_word(&self, sp: span, w: ~str) -> @ast::meta_item {
@respan(sp, ast::meta_word(@w))
fn meta_word(&self, sp: span, w: @str) -> @ast::meta_item {
@respan(sp, ast::meta_word(w))
}
fn meta_list(&self, sp: span, name: ~str, mis: ~[@ast::meta_item]) -> @ast::meta_item {
@respan(sp, ast::meta_list(@name, mis))
fn meta_list(&self, sp: span, name: @str, mis: ~[@ast::meta_item]) -> @ast::meta_item {
@respan(sp, ast::meta_list(name, mis))
}
fn meta_name_value(&self, sp: span, name: ~str, value: ast::lit_) -> @ast::meta_item {
@respan(sp, ast::meta_name_value(@name, respan(sp, value)))
fn meta_name_value(&self, sp: span, name: @str, value: ast::lit_) -> @ast::meta_item {
@respan(sp, ast::meta_name_value(name, respan(sp, value)))
}
fn view_use(&self, sp: span,

View File

@ -72,7 +72,7 @@ fn decodable_substructure(cx: @ExtCtxt, span: span,
};
let read_struct_field = cx.ident_of("read_struct_field");
let getarg = |name: ~str, field: uint| {
let getarg = |name: @str, field: uint| {
cx.expr_method_call(span, blkdecoder, read_struct_field,
~[cx.expr_str(span, name),
cx.expr_uint(span, field),
@ -86,7 +86,7 @@ fn decodable_substructure(cx: @ExtCtxt, span: span,
} else {
let mut fields = vec::with_capacity(n);
for uint::range(0, n) |i| {
fields.push(getarg(fmt!("_field%u", i), i));
fields.push(getarg(fmt!("_field%u", i).to_managed(), i));
}
cx.expr_call_ident(span, substr.type_ident, fields)
}

View File

@ -127,7 +127,7 @@ fn encodable_substructure(cx: @ExtCtxt, span: span,
for fields.eachi |i, f| {
let (name, val) = match *f {
(Some(id), e, _) => (cx.str_of(id), e),
(None, e, _) => (fmt!("_field%u", i), e)
(None, e, _) => (fmt!("_field%u", i).to_managed(), e)
};
let enc = cx.expr_method_call(span, val, encode, ~[blkencoder]);
let lambda = cx.lambda_expr_1(span, enc, blkarg);

View File

@ -364,7 +364,7 @@ impl<'self> TraitDef<'self> {
let doc_attr = cx.attribute(
span,
cx.meta_name_value(span,
~"doc", ast::lit_str(@~"Automatically derived.")));
@"doc", ast::lit_str(@"Automatically derived.")));
cx.item(
span,
::parse::token::special_idents::clownshoes_extensions,

View File

@ -82,23 +82,23 @@ pub fn expand_meta_deriving(cx: @ExtCtxt,
meta_word(tname) => {
macro_rules! expand(($func:path) => ($func(cx, titem.span,
titem, in_items)));
match *tname {
~"Clone" => expand!(clone::expand_deriving_clone),
~"DeepClone" => expand!(clone::expand_deriving_deep_clone),
match tname.as_slice() {
"Clone" => expand!(clone::expand_deriving_clone),
"DeepClone" => expand!(clone::expand_deriving_deep_clone),
~"IterBytes" => expand!(iter_bytes::expand_deriving_iter_bytes),
"IterBytes" => expand!(iter_bytes::expand_deriving_iter_bytes),
~"Encodable" => expand!(encodable::expand_deriving_encodable),
~"Decodable" => expand!(decodable::expand_deriving_decodable),
"Encodable" => expand!(encodable::expand_deriving_encodable),
"Decodable" => expand!(decodable::expand_deriving_decodable),
~"Eq" => expand!(eq::expand_deriving_eq),
~"TotalEq" => expand!(totaleq::expand_deriving_totaleq),
~"Ord" => expand!(ord::expand_deriving_ord),
~"TotalOrd" => expand!(totalord::expand_deriving_totalord),
"Eq" => expand!(eq::expand_deriving_eq),
"TotalEq" => expand!(totaleq::expand_deriving_totaleq),
"Ord" => expand!(ord::expand_deriving_ord),
"TotalOrd" => expand!(totalord::expand_deriving_totalord),
~"Rand" => expand!(rand::expand_deriving_rand),
"Rand" => expand!(rand::expand_deriving_rand),
~"ToStr" => expand!(to_str::expand_deriving_to_str),
"ToStr" => expand!(to_str::expand_deriving_to_str),
ref tname => {
cx.span_err(titem.span, fmt!("unknown \

View File

@ -33,8 +33,8 @@ pub fn expand_syntax_ext(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
// Option<str> rather than just an maybe-empty string.
let e = match os::getenv(var) {
None => cx.expr_str(sp, ~""),
Some(ref s) => cx.expr_str(sp, copy *s)
None => cx.expr_str(sp, @""),
Some(s) => cx.expr_str(sp, s.to_managed())
};
MRExpr(e)
}

View File

@ -57,7 +57,7 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv,
None => {
cx.span_fatal(
pth.span,
fmt!("macro undefined: '%s'", *extnamestr))
fmt!("macro undefined: '%s'", extnamestr))
}
Some(@SE(NormalTT(SyntaxExpanderTT{
expander: exp,
@ -66,7 +66,7 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv,
cx.bt_push(ExpandedFrom(CallInfo {
call_site: s,
callee: NameAndSpan {
name: copy *extnamestr,
name: extnamestr,
span: exp_sp,
},
}));
@ -79,7 +79,7 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv,
pth.span,
fmt!(
"non-expr macro in expr pos: %s",
*extnamestr
extnamestr
)
)
}
@ -95,7 +95,7 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv,
_ => {
cx.span_fatal(
pth.span,
fmt!("'%s' is not a tt-style macro", *extnamestr)
fmt!("'%s' is not a tt-style macro", extnamestr)
)
}
}
@ -132,12 +132,12 @@ pub fn expand_mod_items(extsbox: @mut SyntaxEnv,
do item.attrs.rev_iter().fold(~[*item]) |items, attr| {
let mname = attr::get_attr_name(attr);
match (*extsbox).find(&intern(*mname)) {
match (*extsbox).find(&intern(mname)) {
Some(@SE(ItemDecorator(dec_fn))) => {
cx.bt_push(ExpandedFrom(CallInfo {
call_site: attr.span,
callee: NameAndSpan {
name: /*bad*/ copy *mname,
name: mname,
span: None
}
}));
@ -201,7 +201,7 @@ pub fn expand_item(extsbox: @mut SyntaxEnv,
// does this attribute list contain "macro_escape" ?
pub fn contains_macro_escape (attrs: &[ast::attribute]) -> bool {
attrs.any(|attr| "macro_escape" == *attr::get_attr_name(attr))
attrs.any(|attr| "macro_escape" == attr::get_attr_name(attr))
}
// Support for item-position macro invocations, exactly the same
@ -221,19 +221,19 @@ pub fn expand_item_mac(extsbox: @mut SyntaxEnv,
let extnamestr = ident_to_str(extname);
let expanded = match (*extsbox).find(&extname.name) {
None => cx.span_fatal(pth.span,
fmt!("macro undefined: '%s!'", *extnamestr)),
fmt!("macro undefined: '%s!'", extnamestr)),
Some(@SE(NormalTT(ref expand))) => {
if it.ident != parse::token::special_idents::invalid {
cx.span_fatal(pth.span,
fmt!("macro %s! expects no ident argument, \
given '%s'", *extnamestr,
*ident_to_str(&it.ident)));
given '%s'", extnamestr,
ident_to_str(&it.ident)));
}
cx.bt_push(ExpandedFrom(CallInfo {
call_site: it.span,
callee: NameAndSpan {
name: copy *extnamestr,
name: extnamestr,
span: expand.span
}
}));
@ -243,26 +243,25 @@ pub fn expand_item_mac(extsbox: @mut SyntaxEnv,
if it.ident == parse::token::special_idents::invalid {
cx.span_fatal(pth.span,
fmt!("macro %s! expects an ident argument",
*extnamestr));
extnamestr));
}
cx.bt_push(ExpandedFrom(CallInfo {
call_site: it.span,
callee: NameAndSpan {
name: copy *extnamestr,
name: extnamestr,
span: expand.span
}
}));
((*expand).expander)(cx, it.span, it.ident, tts)
}
_ => cx.span_fatal(
it.span, fmt!("%s! is not legal in item position", *extnamestr))
it.span, fmt!("%s! is not legal in item position", extnamestr))
};
let maybe_it = match expanded {
MRItem(it) => fld.fold_item(it),
MRExpr(_) => cx.span_fatal(pth.span,
~"expr macro in item position: "
+ *extnamestr),
fmt!("expr macro in item position: %s", extnamestr)),
MRAny(_, item_maker, _) => item_maker().chain(|i| {fld.fold_item(i)}),
MRDef(ref mdef) => {
insert_macro(*extsbox,intern(mdef.name), @SE((*mdef).ext));
@ -319,13 +318,13 @@ pub fn expand_stmt(extsbox: @mut SyntaxEnv,
let extnamestr = ident_to_str(extname);
let (fully_expanded, sp) = match (*extsbox).find(&extname.name) {
None =>
cx.span_fatal(pth.span, fmt!("macro undefined: '%s'", *extnamestr)),
cx.span_fatal(pth.span, fmt!("macro undefined: '%s'", extnamestr)),
Some(@SE(NormalTT(
SyntaxExpanderTT{expander: exp, span: exp_sp}))) => {
cx.bt_push(ExpandedFrom(CallInfo {
call_site: sp,
callee: NameAndSpan { name: copy *extnamestr, span: exp_sp }
callee: NameAndSpan { name: extnamestr, span: exp_sp }
}));
let expanded = match exp(cx, mac.span, tts) {
MRExpr(e) =>
@ -334,7 +333,7 @@ pub fn expand_stmt(extsbox: @mut SyntaxEnv,
MRAny(_,_,stmt_mkr) => stmt_mkr(),
_ => cx.span_fatal(
pth.span,
fmt!("non-stmt macro in stmt pos: %s", *extnamestr))
fmt!("non-stmt macro in stmt pos: %s", extnamestr))
};
//keep going, outside-in
@ -355,7 +354,7 @@ pub fn expand_stmt(extsbox: @mut SyntaxEnv,
_ => {
cx.span_fatal(pth.span,
fmt!("'%s' is not a tt-style macro", *extnamestr))
fmt!("'%s' is not a tt-style macro", extnamestr))
}
};
@ -414,7 +413,7 @@ fn get_block_info(exts : SyntaxEnv) -> BlockInfo {
match exts.find_in_topmost_frame(&intern(special_block_name)) {
Some(@BlockInfo(bi)) => bi,
_ => fail!(fmt!("special identifier %? was bound to a non-BlockInfo",
@~" block"))
@" block"))
}
}
@ -456,9 +455,9 @@ pub fn new_span(cx: @ExtCtxt, sp: span) -> span {
// the default compilation environment. It would be much nicer to use
// a mechanism like syntax_quote to ensure hygiene.
pub fn core_macros() -> ~str {
pub fn core_macros() -> @str {
return
~"pub mod macros {
@"pub mod macros {
macro_rules! ignore (($($x:tt)*) => (()))
macro_rules! error (
@ -679,7 +678,7 @@ pub fn expand_crate(parse_sess: @mut parse::ParseSess,
node: attribute_ {
style: attr_outer,
value: @spanned {
node: meta_word(@~"macro_escape"),
node: meta_word(@"macro_escape"),
span: codemap::dummy_sp(),
},
is_sugared_doc: false,
@ -687,8 +686,8 @@ pub fn expand_crate(parse_sess: @mut parse::ParseSess,
}
];
let cm = match parse_item_from_source_str(~"<core-macros>",
@core_macros(),
let cm = match parse_item_from_source_str(@"<core-macros>",
core_macros(),
copy cfg,
attrs,
parse_sess) {
@ -764,11 +763,11 @@ mod test {
// make sure that fail! is present
#[test] fn fail_exists_test () {
let src = ~"fn main() { fail!(\"something appropriately gloomy\");}";
let src = @"fn main() { fail!(\"something appropriately gloomy\");}";
let sess = parse::new_parse_sess(None);
let crate_ast = parse::parse_crate_from_source_str(
~"<test>",
@src,
@"<test>",
src,
~[],sess);
expand_crate(sess,~[],crate_ast);
}
@ -779,12 +778,12 @@ mod test {
// make sure that macros can leave scope
#[should_fail]
#[test] fn macros_cant_escape_fns_test () {
let src = ~"fn bogus() {macro_rules! z (() => (3+4))}\
let src = @"fn bogus() {macro_rules! z (() => (3+4))}\
fn inty() -> int { z!() }";
let sess = parse::new_parse_sess(None);
let crate_ast = parse::parse_crate_from_source_str(
~"<test>",
@src,
@"<test>",
src,
~[],sess);
// should fail:
expand_crate(sess,~[],crate_ast);
@ -793,12 +792,12 @@ mod test {
// make sure that macros can leave scope for modules
#[should_fail]
#[test] fn macros_cant_escape_mods_test () {
let src = ~"mod foo {macro_rules! z (() => (3+4))}\
let src = @"mod foo {macro_rules! z (() => (3+4))}\
fn inty() -> int { z!() }";
let sess = parse::new_parse_sess(None);
let crate_ast = parse::parse_crate_from_source_str(
~"<test>",
@src,
@"<test>",
src,
~[],sess);
// should fail:
expand_crate(sess,~[],crate_ast);
@ -806,19 +805,19 @@ mod test {
// macro_escape modules shouldn't cause macros to leave scope
#[test] fn macros_can_escape_flattened_mods_test () {
let src = ~"#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\
let src = @"#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\
fn inty() -> int { z!() }";
let sess = parse::new_parse_sess(None);
let crate_ast = parse::parse_crate_from_source_str(
~"<test>",
@src,
@"<test>",
src,
~[], sess);
// should fail:
expand_crate(sess,~[],crate_ast);
}
#[test] fn core_macros_must_parse () {
let src = ~"
let src = @"
pub mod macros {
macro_rules! ignore (($($x:tt)*) => (()))
@ -828,9 +827,9 @@ mod test {
let sess = parse::new_parse_sess(None);
let cfg = ~[];
let item_ast = parse::parse_item_from_source_str(
~"<test>",
@src,
cfg,~[make_dummy_attr (@~"macro_escape")],sess);
@"<test>",
src,
cfg,~[make_dummy_attr (@"macro_escape")],sess);
match item_ast {
Some(_) => (), // success
None => fail!("expected this to parse")
@ -838,9 +837,9 @@ mod test {
}
#[test] fn test_contains_flatten (){
let attr1 = make_dummy_attr (@~"foo");
let attr2 = make_dummy_attr (@~"bar");
let escape_attr = make_dummy_attr (@~"macro_escape");
let attr1 = make_dummy_attr (@"foo");
let attr2 = make_dummy_attr (@"bar");
let escape_attr = make_dummy_attr (@"macro_escape");
let attrs1 = ~[attr1, escape_attr, attr2];
assert_eq!(contains_macro_escape (attrs1),true);
let attrs2 = ~[attr1,attr2];
@ -848,7 +847,7 @@ mod test {
}
// make a "meta_word" outer attribute with the given name
fn make_dummy_attr(s: @~str) -> ast::attribute {
fn make_dummy_attr(s: @str) -> ast::attribute {
spanned {
span:codemap::dummy_sp(),
node: attribute_ {
@ -864,7 +863,7 @@ mod test {
#[test]
fn renaming () {
let maybe_item_ast = string_to_item(@~"fn a() -> int { let b = 13; b }");
let maybe_item_ast = string_to_item(@"fn a() -> int { let b = 13; b }");
let item_ast = match maybe_item_ast {
Some(x) => x,
None => fail!("test case fail")
@ -887,7 +886,7 @@ mod test {
#[test]
fn pat_idents(){
let pat = string_to_pat(@~"(a,Foo{x:c @ (b,9),y:Bar(4,d)})");
let pat = string_to_pat(@"(a,Foo{x:c @ (b,9),y:Bar(4,d)})");
let pat_idents = new_name_finder();
let idents = @mut ~[];
((*pat_idents).visit_pat)(pat, (idents, mk_vt(pat_idents)));

View File

@ -274,12 +274,13 @@ fn pieces_to_expr(cx: @ExtCtxt, sp: span,
then there's no need for it to be mutable */
if i == 0 {
stms.push(cx.stmt_let(fmt_sp, npieces > 1,
ident, cx.expr_str_uniq(fmt_sp, s)));
ident, cx.expr_str_uniq(fmt_sp, s.to_managed())));
} else {
// we call the push_str function because the
// bootstrap doesnt't seem to work if we call the
// method.
let args = ~[cx.expr_mut_addr_of(fmt_sp, buf()), cx.expr_str(fmt_sp, s)];
let args = ~[cx.expr_mut_addr_of(fmt_sp, buf()),
cx.expr_str(fmt_sp, s.to_managed())];
let call = cx.expr_call_global(fmt_sp,
~[core_ident,
str_ident,
@ -303,7 +304,7 @@ fn pieces_to_expr(cx: @ExtCtxt, sp: span,
must be initialized as an empty string */
if i == 0 {
stms.push(cx.stmt_let(fmt_sp, true, ident,
cx.expr_str_uniq(fmt_sp, ~"")));
cx.expr_str_uniq(fmt_sp, @"")));
}
stms.push(cx.stmt_expr(make_new_conv(cx, fmt_sp, conv,
args[n], buf())));

View File

@ -51,7 +51,7 @@ impl proto::visitor<(), (), ()> for @ExtCtxt {
}
}
fn visit_message(&self, name: ~str, _span: span, _tys: &[@ast::Ty],
fn visit_message(&self, name: @str, _span: span, _tys: &[@ast::Ty],
this: state, next: Option<next_state>) {
match next {
Some(ref next_state) => {

View File

@ -20,13 +20,13 @@ use parse::token;
use parse::token::{interner_get};
pub trait proto_parser {
fn parse_proto(&self, id: ~str) -> protocol;
fn parse_proto(&self, id: @str) -> protocol;
fn parse_state(&self, proto: protocol);
fn parse_message(&self, state: state);
}
impl proto_parser for parser::Parser {
fn parse_proto(&self, id: ~str) -> protocol {
fn parse_proto(&self, id: @str) -> protocol {
let proto = protocol(id, *self.span);
self.parse_seq_to_before_end(
@ -43,7 +43,7 @@ impl proto_parser for parser::Parser {
fn parse_state(&self, proto: protocol) {
let id = self.parse_ident();
let name = copy *interner_get(id.name);
let name = interner_get(id.name);
self.expect(&token::COLON);
let dir = match copy *self.token {
@ -51,9 +51,9 @@ impl proto_parser for parser::Parser {
_ => fail!()
};
self.bump();
let dir = match dir {
@~"send" => send,
@~"recv" => recv,
let dir = match dir.as_slice() {
"send" => send,
"recv" => recv,
_ => fail!()
};
@ -78,7 +78,7 @@ impl proto_parser for parser::Parser {
}
fn parse_message(&self, state: state) {
let mname = copy *interner_get(self.parse_ident().name);
let mname = interner_get(self.parse_ident().name);
let args = if *self.token == token::LPAREN {
self.parse_unspanned_seq(
@ -97,7 +97,7 @@ impl proto_parser for parser::Parser {
let next = match *self.token {
token::IDENT(_, _) => {
let name = copy *interner_get(self.parse_ident().name);
let name = interner_get(self.parse_ident().name);
let ntys = if *self.token == token::LT {
self.parse_unspanned_seq(
&token::LT,

View File

@ -101,7 +101,7 @@ impl gen_send for message {
name,
vec::append_one(
arg_names.map(|x| cx.str_of(*x)),
~"s").connect(", "));
@"s").connect(", "));
if !try {
body += fmt!("::std::pipes::send(pipe, message);\n");
@ -114,7 +114,7 @@ impl gen_send for message {
} else { ::std::pipes::rt::make_none() } }");
}
let body = cx.parse_expr(body);
let body = cx.parse_expr(body.to_managed());
let mut rty = cx.ty_path(path(~[next.data_name()],
span)
@ -123,7 +123,7 @@ impl gen_send for message {
rty = cx.ty_option(rty);
}
let name = cx.ident_of(if try { ~"try_" + name } else { name } );
let name = if try {cx.ident_of(~"try_" + name)} else {cx.ident_of(name)};
cx.item_fn_poly(dummy_sp(),
name,
@ -173,12 +173,12 @@ impl gen_send for message {
} }");
}
let body = cx.parse_expr(body);
let body = cx.parse_expr(body.to_managed());
let name = if try { ~"try_" + name } else { name };
let name = if try {cx.ident_of(~"try_" + name)} else {cx.ident_of(name)};
cx.item_fn_poly(dummy_sp(),
cx.ident_of(name),
name,
args_ast,
if try {
cx.ty_option(cx.ty_nil())
@ -326,7 +326,7 @@ impl gen_init for protocol {
start_state.generics.to_source(),
start_state.to_ty(cx).to_source(),
start_state.to_ty(cx).to_source(),
body.to_source()))
body.to_source()).to_managed())
}
fn gen_buffer_init(&self, ext_cx: @ExtCtxt) -> @ast::expr {
@ -358,10 +358,10 @@ impl gen_init for protocol {
self.states.map_to_vec(
|s| ext_cx.parse_stmt(
fmt!("data.%s.set_buffer(buffer)",
s.name))),
s.name).to_managed())),
Some(ext_cx.parse_expr(fmt!(
"::std::ptr::to_mut_unsafe_ptr(&mut (data.%s))",
self.states[0].name)))));
self.states[0].name).to_managed()))));
quote_expr!({
let buffer = $buffer;
@ -459,9 +459,9 @@ impl gen_init for protocol {
let allows = cx.attribute(
copy self.span,
cx.meta_list(copy self.span,
~"allow",
~[cx.meta_word(copy self.span, ~"non_camel_case_types"),
cx.meta_word(copy self.span, ~"unused_mut")]));
@"allow",
~[cx.meta_word(copy self.span, @"non_camel_case_types"),
cx.meta_word(copy self.span, @"unused_mut")]));
cx.item_mod(copy self.span, cx.ident_of(copy self.name),
~[allows], ~[], items)
}

View File

@ -38,17 +38,17 @@ impl direction {
}
pub struct next_state {
state: ~str,
state: @str,
tys: ~[@ast::Ty],
}
// name, span, data, current state, next state
pub struct message(~str, span, ~[@ast::Ty], state, Option<next_state>);
pub struct message(@str, span, ~[@ast::Ty], state, Option<next_state>);
impl message {
pub fn name(&mut self) -> ~str {
pub fn name(&mut self) -> @str {
match *self {
message(ref id, _, _, _, _) => copy *id
message(id, _, _, _, _) => id
}
}
@ -70,7 +70,7 @@ pub type state = @state_;
pub struct state_ {
id: uint,
name: ~str,
name: @str,
ident: ast::ident,
span: span,
dir: direction,
@ -81,7 +81,7 @@ pub struct state_ {
impl state_ {
pub fn add_message(@self,
name: ~str,
name: @str,
span: span,
data: ~[@ast::Ty],
next: Option<next_state>) {
@ -122,11 +122,11 @@ impl state_ {
pub type protocol = @mut protocol_;
pub fn protocol(name: ~str, span: span) -> protocol {
pub fn protocol(name: @str, span: span) -> protocol {
@mut protocol_(name, span)
}
pub fn protocol_(name: ~str, span: span) -> protocol_ {
pub fn protocol_(name: @str, span: span) -> protocol_ {
protocol_ {
name: name,
span: span,
@ -136,7 +136,7 @@ pub fn protocol_(name: ~str, span: span) -> protocol_ {
}
pub struct protocol_ {
name: ~str,
name: @str,
span: span,
states: @mut ~[state],
@ -181,7 +181,7 @@ impl protocol_ {
impl protocol_ {
pub fn add_state_poly(@mut self,
name: ~str,
name: @str,
ident: ast::ident,
dir: direction,
generics: ast::Generics)
@ -208,7 +208,7 @@ impl protocol_ {
pub trait visitor<Tproto, Tstate, Tmessage> {
fn visit_proto(&self, proto: protocol, st: &[Tstate]) -> Tproto;
fn visit_state(&self, state: state, m: &[Tmessage]) -> Tstate;
fn visit_message(&self, name: ~str, spane: span, tys: &[@ast::Ty],
fn visit_message(&self, name: @str, spane: span, tys: &[@ast::Ty],
this: state, next: Option<next_state>) -> Tmessage;
}

View File

@ -43,8 +43,6 @@ pub mod rt {
pub use parse::new_parser_from_tts;
pub use codemap::{BytePos, span, dummy_spanned};
use print::pprust::{item_to_str, ty_to_str};
pub trait ToTokens {
pub fn to_tokens(&self, _cx: @ExtCtxt) -> ~[token_tree];
}
@ -71,132 +69,132 @@ pub mod rt {
pub trait ToSource {
// Takes a thing and generates a string containing rust code for it.
pub fn to_source(&self) -> ~str;
pub fn to_source(&self) -> @str;
}
impl ToSource for ast::ident {
fn to_source(&self) -> ~str {
copy *ident_to_str(self)
fn to_source(&self) -> @str {
ident_to_str(self)
}
}
impl ToSource for @ast::item {
fn to_source(&self) -> ~str {
item_to_str(*self, get_ident_interner())
fn to_source(&self) -> @str {
pprust::item_to_str(*self, get_ident_interner()).to_managed()
}
}
impl<'self> ToSource for &'self [@ast::item] {
fn to_source(&self) -> ~str {
self.map(|i| i.to_source()).connect("\n\n")
fn to_source(&self) -> @str {
self.map(|i| i.to_source()).connect("\n\n").to_managed()
}
}
impl ToSource for @ast::Ty {
fn to_source(&self) -> ~str {
ty_to_str(*self, get_ident_interner())
fn to_source(&self) -> @str {
pprust::ty_to_str(*self, get_ident_interner()).to_managed()
}
}
impl<'self> ToSource for &'self [@ast::Ty] {
fn to_source(&self) -> ~str {
self.map(|i| i.to_source()).connect(", ")
fn to_source(&self) -> @str {
self.map(|i| i.to_source()).connect(", ").to_managed()
}
}
impl ToSource for Generics {
fn to_source(&self) -> ~str {
pprust::generics_to_str(self, get_ident_interner())
fn to_source(&self) -> @str {
pprust::generics_to_str(self, get_ident_interner()).to_managed()
}
}
impl ToSource for @ast::expr {
fn to_source(&self) -> ~str {
pprust::expr_to_str(*self, get_ident_interner())
fn to_source(&self) -> @str {
pprust::expr_to_str(*self, get_ident_interner()).to_managed()
}
}
impl ToSource for ast::blk {
fn to_source(&self) -> ~str {
pprust::block_to_str(self, get_ident_interner())
fn to_source(&self) -> @str {
pprust::block_to_str(self, get_ident_interner()).to_managed()
}
}
impl<'self> ToSource for &'self str {
fn to_source(&self) -> ~str {
let lit = dummy_spanned(ast::lit_str(@self.to_owned()));
pprust::lit_to_str(@lit)
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_str(self.to_managed()));
pprust::lit_to_str(@lit).to_managed()
}
}
impl ToSource for int {
fn to_source(&self) -> ~str {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i));
pprust::lit_to_str(@lit)
pprust::lit_to_str(@lit).to_managed()
}
}
impl ToSource for i8 {
fn to_source(&self) -> ~str {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i8));
pprust::lit_to_str(@lit)
pprust::lit_to_str(@lit).to_managed()
}
}
impl ToSource for i16 {
fn to_source(&self) -> ~str {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i16));
pprust::lit_to_str(@lit)
pprust::lit_to_str(@lit).to_managed()
}
}
impl ToSource for i32 {
fn to_source(&self) -> ~str {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i32));
pprust::lit_to_str(@lit)
pprust::lit_to_str(@lit).to_managed()
}
}
impl ToSource for i64 {
fn to_source(&self) -> ~str {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i64));
pprust::lit_to_str(@lit)
pprust::lit_to_str(@lit).to_managed()
}
}
impl ToSource for uint {
fn to_source(&self) -> ~str {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u));
pprust::lit_to_str(@lit)
pprust::lit_to_str(@lit).to_managed()
}
}
impl ToSource for u8 {
fn to_source(&self) -> ~str {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u8));
pprust::lit_to_str(@lit)
pprust::lit_to_str(@lit).to_managed()
}
}
impl ToSource for u16 {
fn to_source(&self) -> ~str {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u16));
pprust::lit_to_str(@lit)
pprust::lit_to_str(@lit).to_managed()
}
}
impl ToSource for u32 {
fn to_source(&self) -> ~str {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u32));
pprust::lit_to_str(@lit)
pprust::lit_to_str(@lit).to_managed()
}
}
impl ToSource for u64 {
fn to_source(&self) -> ~str {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u64));
pprust::lit_to_str(@lit)
pprust::lit_to_str(@lit).to_managed()
}
}
@ -317,18 +315,18 @@ pub mod rt {
}
pub trait ExtParseUtils {
fn parse_item(&self, s: ~str) -> @ast::item;
fn parse_expr(&self, s: ~str) -> @ast::expr;
fn parse_stmt(&self, s: ~str) -> @ast::stmt;
fn parse_tts(&self, s: ~str) -> ~[ast::token_tree];
fn parse_item(&self, s: @str) -> @ast::item;
fn parse_expr(&self, s: @str) -> @ast::expr;
fn parse_stmt(&self, s: @str) -> @ast::stmt;
fn parse_tts(&self, s: @str) -> ~[ast::token_tree];
}
impl ExtParseUtils for ExtCtxt {
fn parse_item(&self, s: ~str) -> @ast::item {
fn parse_item(&self, s: @str) -> @ast::item {
let res = parse::parse_item_from_source_str(
~"<quote expansion>",
@(copy s),
@"<quote expansion>",
s,
self.cfg(),
~[],
self.parse_sess());
@ -341,27 +339,27 @@ pub mod rt {
}
}
fn parse_stmt(&self, s: ~str) -> @ast::stmt {
fn parse_stmt(&self, s: @str) -> @ast::stmt {
parse::parse_stmt_from_source_str(
~"<quote expansion>",
@(copy s),
@"<quote expansion>",
s,
self.cfg(),
~[],
self.parse_sess())
}
fn parse_expr(&self, s: ~str) -> @ast::expr {
fn parse_expr(&self, s: @str) -> @ast::expr {
parse::parse_expr_from_source_str(
~"<quote expansion>",
@(copy s),
@"<quote expansion>",
s,
self.cfg(),
self.parse_sess())
}
fn parse_tts(&self, s: ~str) -> ~[ast::token_tree] {
fn parse_tts(&self, s: @str) -> ~[ast::token_tree] {
parse::parse_tts_from_source_str(
~"<quote expansion>",
@(copy s),
@"<quote expansion>",
s,
self.cfg(),
self.parse_sess())
}

View File

@ -59,21 +59,21 @@ pub fn expand_file(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
let topmost = topmost_expn_info(cx.backtrace().get());
let loc = cx.codemap().lookup_char_pos(topmost.call_site.lo);
let filename = copy loc.file.name;
let filename = loc.file.name;
base::MRExpr(cx.expr_str(topmost.call_site, filename))
}
pub fn expand_stringify(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
-> base::MacResult {
let s = pprust::tts_to_str(tts, get_ident_interner());
base::MRExpr(cx.expr_str(sp, s))
base::MRExpr(cx.expr_str(sp, s.to_managed()))
}
pub fn expand_mod(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
-> base::MacResult {
base::check_zero_tts(cx, sp, tts, "module_path!");
base::MRExpr(cx.expr_str(sp,
cx.mod_path().map(|x| cx.str_of(*x)).connect("::")))
cx.mod_path().map(|x| cx.str_of(*x)).connect("::").to_managed()))
}
// include! : parse the given file as an expr
@ -94,13 +94,13 @@ pub fn expand_include_str(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
let file = get_single_str_from_tts(cx, sp, tts, "include_str!");
let res = io::read_whole_file_str(&res_rel_file(cx, sp, &Path(file)));
match res {
result::Ok(_) => { /* Continue. */ }
result::Err(ref e) => {
cx.parse_sess().span_diagnostic.handler().fatal((*e));
result::Ok(res) => {
base::MRExpr(cx.expr_str(sp, res.to_managed()))
}
result::Err(e) => {
cx.span_fatal(sp, e);
}
}
base::MRExpr(cx.expr_str(sp, result::unwrap(res)))
}
pub fn expand_include_bin(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
@ -131,7 +131,7 @@ fn topmost_expn_info(expn_info: @codemap::ExpnInfo) -> @codemap::ExpnInfo {
_
}) => {
// Don't recurse into file using "include!"
if *name == ~"include" {
if "include" == *name {
expn_info
} else {
topmost_expn_info(next_expn_info)

View File

@ -205,7 +205,7 @@ pub fn nameize(p_s: @mut ParseSess, ms: &[matcher], res: &[@named_match])
} => {
if ret_val.contains_key(bind_name) {
p_s.span_diagnostic.span_fatal(sp, ~"Duplicated bind name: "+
*ident_to_str(bind_name))
ident_to_str(bind_name))
}
ret_val.insert(*bind_name, res[idx]);
}
@ -373,8 +373,8 @@ pub fn parse(
let nts = bb_eis.map(|ei| {
match ei.elts[ei.idx].node {
match_nonterminal(ref bind,ref name,_) => {
fmt!("%s ('%s')", *ident_to_str(name),
*ident_to_str(bind))
fmt!("%s ('%s')", ident_to_str(name),
ident_to_str(bind))
}
_ => fail!()
} }).connect(" or ");
@ -398,7 +398,7 @@ pub fn parse(
match ei.elts[ei.idx].node {
match_nonterminal(_, ref name, idx) => {
ei.matches[idx].push(@matched_nonterminal(
parse_nt(&rust_parser, *ident_to_str(name))));
parse_nt(&rust_parser, ident_to_str(name))));
ei.idx += 1u;
}
_ => fail!()

View File

@ -148,7 +148,7 @@ pub fn add_new_extension(cx: @ExtCtxt,
|cx, sp, arg| generic_extension(cx, sp, name, arg, *lhses, *rhses);
return MRDef(MacroDef{
name: copy *ident_to_str(&name),
name: ident_to_str(&name),
ext: NormalTT(base::SyntaxExpanderTT{expander: exp, span: Some(sp)})
});
}

View File

@ -121,7 +121,7 @@ fn lookup_cur_matched(r: &mut TtReader, name: ident) -> @named_match {
Some(s) => lookup_cur_matched_by_matched(r, s),
None => {
r.sp_diag.span_fatal(r.cur_span, fmt!("unknown macro variable `%s`",
*ident_to_str(&name)));
ident_to_str(&name)));
}
}
}
@ -139,8 +139,8 @@ fn lockstep_iter_size(t: &token_tree, r: &mut TtReader) -> lis {
lis_contradiction(_) => copy rhs,
lis_constraint(r_len, _) if l_len == r_len => copy lhs,
lis_constraint(r_len, ref r_id) => {
let l_n = copy *ident_to_str(l_id);
let r_n = copy *ident_to_str(r_id);
let l_n = ident_to_str(l_id);
let r_n = ident_to_str(r_id);
lis_contradiction(fmt!("Inconsistent lockstep iteration: \
'%s' has %u items, but '%s' has %u",
l_n, l_len, r_n, r_len))
@ -290,7 +290,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
r.sp_diag.span_fatal(
copy r.cur_span, /* blame the macro writer */
fmt!("variable '%s' is still repeating at this depth",
*ident_to_str(&ident)));
ident_to_str(&ident)));
}
}
}

View File

@ -990,7 +990,7 @@ mod test {
// make sure idents get transformed everywhere
#[test] fn ident_transformation () {
let zz_fold = fun_to_ident_folder(to_zz());
let ast = string_to_crate(@~"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}");
let ast = string_to_crate(@"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}");
assert_pred!(matches_codepattern,
"matches_codepattern",
pprust::to_str(zz_fold.fold_crate(ast),fake_print_crate,
@ -1001,7 +1001,7 @@ mod test {
// even inside macro defs....
#[test] fn ident_transformation_in_defs () {
let zz_fold = fun_to_ident_folder(to_zz());
let ast = string_to_crate(@~"macro_rules! a {(b $c:expr $(d $e:token)f+
let ast = string_to_crate(@"macro_rules! a {(b $c:expr $(d $e:token)f+
=> (g $(d $d $e)+))} ");
assert_pred!(matches_codepattern,
"matches_codepattern",

View File

@ -46,7 +46,7 @@ impl parser_attr for Parser {
}
token::DOC_COMMENT(s) => {
let attr = ::attr::mk_sugared_doc_attr(
copy *self.id_to_str(s),
self.id_to_str(s),
self.span.lo,
self.span.hi
);
@ -119,7 +119,7 @@ impl parser_attr for Parser {
}
token::DOC_COMMENT(s) => {
let attr = ::attr::mk_sugared_doc_attr(
copy *self.id_to_str(s),
self.id_to_str(s),
self.span.lo,
self.span.hi
);

View File

@ -320,10 +320,10 @@ pub struct lit {
// probably not a good thing.
pub fn gather_comments_and_literals(span_diagnostic:
@diagnostic::span_handler,
path: ~str,
path: @str,
srdr: @io::Reader)
-> (~[cmnt], ~[lit]) {
let src = @str::from_bytes(srdr.read_whole_stream());
let src = str::from_bytes(srdr.read_whole_stream()).to_managed();
let cm = CodeMap::new();
let filemap = cm.new_filemap(path, src);
let rdr = lexer::new_low_level_string_reader(span_diagnostic, filemap);

View File

@ -158,7 +158,7 @@ impl Parser {
self.fatal(
fmt!(
"expected `%s`, found `%s`",
*self.id_to_str(kw.to_ident()),
self.id_to_str(kw.to_ident()),
self.this_token_to_str()
)
);

View File

@ -40,7 +40,7 @@ pub struct TokenAndSpan {tok: token::Token, sp: span}
pub struct StringReader {
span_diagnostic: @span_handler,
src: @~str,
src: @str,
// The absolute offset within the codemap of the next character to read
pos: BytePos,
// The absolute offset within the codemap of the last character read(curr)
@ -176,7 +176,7 @@ pub fn with_str_from<T>(rdr: @mut StringReader, start: BytePos, f: &fn(s: &str)
pub fn bump(rdr: &mut StringReader) {
rdr.last_pos = rdr.pos;
let current_byte_offset = byte_offset(rdr, rdr.pos).to_uint();
if current_byte_offset < (*rdr.src).len() {
if current_byte_offset < (rdr.src).len() {
assert!(rdr.curr != -1 as char);
let last_char = rdr.curr;
let next = rdr.src.char_range_at(current_byte_offset);
@ -202,7 +202,7 @@ pub fn is_eof(rdr: @mut StringReader) -> bool {
}
pub fn nextch(rdr: @mut StringReader) -> char {
let offset = byte_offset(rdr, rdr.pos).to_uint();
if offset < (*rdr.src).len() {
if offset < (rdr.src).len() {
return rdr.src.char_at(offset);
} else { return -1 as char; }
}
@ -801,9 +801,9 @@ mod test {
}
// open a string reader for the given string
fn setup(teststr: ~str) -> Env {
fn setup(teststr: @str) -> Env {
let cm = CodeMap::new();
let fm = cm.new_filemap(~"zebra.rs", @teststr);
let fm = cm.new_filemap(@"zebra.rs", teststr);
let span_handler =
diagnostic::mk_span_handler(diagnostic::mk_handler(None),@cm);
Env {
@ -813,7 +813,7 @@ mod test {
#[test] fn t1 () {
let Env {string_reader} =
setup(~"/* my source file */ \
setup(@"/* my source file */ \
fn main() { io::println(~\"zebra\"); }\n");
let id = str_to_ident("fn");
let tok1 = string_reader.next_token();
@ -849,14 +849,14 @@ mod test {
}
#[test] fn doublecolonparsing () {
let env = setup (~"a b");
let env = setup (@"a b");
check_tokenization (env,
~[mk_ident("a",false),
mk_ident("b",false)]);
}
#[test] fn dcparsing_2 () {
let env = setup (~"a::b");
let env = setup (@"a::b");
check_tokenization (env,
~[mk_ident("a",true),
token::MOD_SEP,
@ -864,7 +864,7 @@ mod test {
}
#[test] fn dcparsing_3 () {
let env = setup (~"a ::b");
let env = setup (@"a ::b");
check_tokenization (env,
~[mk_ident("a",false),
token::MOD_SEP,
@ -872,7 +872,7 @@ mod test {
}
#[test] fn dcparsing_4 () {
let env = setup (~"a:: b");
let env = setup (@"a:: b");
check_tokenization (env,
~[mk_ident("a",true),
token::MOD_SEP,
@ -880,28 +880,28 @@ mod test {
}
#[test] fn character_a() {
let env = setup(~"'a'");
let env = setup(@"'a'");
let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token();
assert_eq!(tok,token::LIT_INT('a' as i64, ast::ty_char));
}
#[test] fn character_space() {
let env = setup(~"' '");
let env = setup(@"' '");
let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token();
assert_eq!(tok, token::LIT_INT(' ' as i64, ast::ty_char));
}
#[test] fn character_escaped() {
let env = setup(~"'\n'");
let env = setup(@"'\n'");
let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token();
assert_eq!(tok, token::LIT_INT('\n' as i64, ast::ty_char));
}
#[test] fn lifetime_name() {
let env = setup(~"'abc");
let env = setup(@"'abc");
let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token();
let id = token::str_to_ident("abc");

View File

@ -82,38 +82,38 @@ pub fn parse_crate_from_file(
}
pub fn parse_crate_from_source_str(
name: ~str,
source: @~str,
name: @str,
source: @str,
cfg: ast::crate_cfg,
sess: @mut ParseSess
) -> @ast::crate {
let p = new_parser_from_source_str(
sess,
/*bad*/ copy cfg,
/*bad*/ copy name,
name,
source
);
maybe_aborted(p.parse_crate_mod(),p)
}
pub fn parse_expr_from_source_str(
name: ~str,
source: @~str,
name: @str,
source: @str,
cfg: ast::crate_cfg,
sess: @mut ParseSess
) -> @ast::expr {
let p = new_parser_from_source_str(
sess,
cfg,
/*bad*/ copy name,
name,
source
);
maybe_aborted(p.parse_expr(), p)
}
pub fn parse_item_from_source_str(
name: ~str,
source: @~str,
name: @str,
source: @str,
cfg: ast::crate_cfg,
attrs: ~[ast::attribute],
sess: @mut ParseSess
@ -121,30 +121,30 @@ pub fn parse_item_from_source_str(
let p = new_parser_from_source_str(
sess,
cfg,
/*bad*/ copy name,
name,
source
);
maybe_aborted(p.parse_item(attrs),p)
}
pub fn parse_meta_from_source_str(
name: ~str,
source: @~str,
name: @str,
source: @str,
cfg: ast::crate_cfg,
sess: @mut ParseSess
) -> @ast::meta_item {
let p = new_parser_from_source_str(
sess,
cfg,
/*bad*/ copy name,
name,
source
);
maybe_aborted(p.parse_meta_item(),p)
}
pub fn parse_stmt_from_source_str(
name: ~str,
source: @~str,
name: @str,
source: @str,
cfg: ast::crate_cfg,
attrs: ~[ast::attribute],
sess: @mut ParseSess
@ -152,22 +152,22 @@ pub fn parse_stmt_from_source_str(
let p = new_parser_from_source_str(
sess,
cfg,
/*bad*/ copy name,
name,
source
);
maybe_aborted(p.parse_stmt(attrs),p)
}
pub fn parse_tts_from_source_str(
name: ~str,
source: @~str,
name: @str,
source: @str,
cfg: ast::crate_cfg,
sess: @mut ParseSess
) -> ~[ast::token_tree] {
let p = new_parser_from_source_str(
sess,
cfg,
/*bad*/ copy name,
name,
source
);
*p.quote_depth += 1u;
@ -182,8 +182,8 @@ pub fn parse_tts_from_source_str(
// result.
pub fn parse_from_source_str<T>(
f: &fn(&Parser) -> T,
name: ~str, ss: codemap::FileSubstr,
source: @~str,
name: @str, ss: codemap::FileSubstr,
source: @str,
cfg: ast::crate_cfg,
sess: @mut ParseSess
) -> T {
@ -213,8 +213,8 @@ pub fn next_node_id(sess: @mut ParseSess) -> node_id {
// Create a new parser from a source string
pub fn new_parser_from_source_str(sess: @mut ParseSess,
cfg: ast::crate_cfg,
name: ~str,
source: @~str)
name: @str,
source: @str)
-> Parser {
filemap_to_parser(sess,string_to_filemap(sess,source,name),cfg)
}
@ -223,9 +223,9 @@ pub fn new_parser_from_source_str(sess: @mut ParseSess,
// is specified as a substring of another file.
pub fn new_parser_from_source_substr(sess: @mut ParseSess,
cfg: ast::crate_cfg,
name: ~str,
name: @str,
ss: codemap::FileSubstr,
source: @~str)
source: @str)
-> Parser {
filemap_to_parser(sess,substring_to_filemap(sess,source,name,ss),cfg)
}
@ -275,7 +275,7 @@ pub fn new_parser_from_tts(sess: @mut ParseSess,
pub fn file_to_filemap(sess: @mut ParseSess, path: &Path, spanopt: Option<span>)
-> @FileMap {
match io::read_whole_file_str(path) {
Ok(src) => string_to_filemap(sess, @src, path.to_str()),
Ok(src) => string_to_filemap(sess, src.to_managed(), path.to_str().to_managed()),
Err(e) => {
match spanopt {
Some(span) => sess.span_diagnostic.span_fatal(span, e),
@ -287,14 +287,14 @@ pub fn file_to_filemap(sess: @mut ParseSess, path: &Path, spanopt: Option<span>)
// given a session and a string, add the string to
// the session's codemap and return the new filemap
pub fn string_to_filemap(sess: @mut ParseSess, source: @~str, path: ~str)
pub fn string_to_filemap(sess: @mut ParseSess, source: @str, path: @str)
-> @FileMap {
sess.cm.new_filemap(path, source)
}
// given a session and a string and a path and a FileSubStr, add
// the string to the CodeMap and return the new FileMap
pub fn substring_to_filemap(sess: @mut ParseSess, source: @~str, path: ~str,
pub fn substring_to_filemap(sess: @mut ParseSess, source: @str, path: @str,
filesubstr: FileSubstr) -> @FileMap {
sess.cm.new_filemap_w_substr(path,filesubstr,source)
}
@ -349,7 +349,7 @@ mod test {
use util::parser_testing::{string_to_stmt, strs_to_idents};
// map a string to tts, return the tt without its parsesess
fn string_to_tts_only(source_str : @~str) -> ~[ast::token_tree] {
fn string_to_tts_only(source_str : @str) -> ~[ast::token_tree] {
let (tts,_ps) = string_to_tts_and_sess(source_str);
tts
}
@ -368,7 +368,7 @@ mod test {
}
#[test] fn path_exprs_1 () {
assert_eq!(string_to_expr(@~"a"),
assert_eq!(string_to_expr(@"a"),
@ast::expr{id:1,
node:ast::expr_path(@ast::Path {span:sp(0,1),
global:false,
@ -379,7 +379,7 @@ mod test {
}
#[test] fn path_exprs_2 () {
assert_eq!(string_to_expr(@~"::a::b"),
assert_eq!(string_to_expr(@"::a::b"),
@ast::expr{id:1,
node:ast::expr_path(
@ast::Path {span:sp(0,6),
@ -394,11 +394,11 @@ mod test {
// marked as `#[should_fail]`.
/*#[should_fail]
#[test] fn bad_path_expr_1() {
string_to_expr(@~"::abc::def::return");
string_to_expr(@"::abc::def::return");
}*/
#[test] fn string_to_tts_1 () {
let (tts,_ps) = string_to_tts_and_sess(@~"fn a (b : int) { b; }");
let (tts,_ps) = string_to_tts_and_sess(@"fn a (b : int) { b; }");
assert_eq!(to_json_str(@tts),
~"[\
[\"tt_tok\",null,[\"IDENT\",\"fn\",false]],\
@ -427,7 +427,7 @@ mod test {
}
#[test] fn ret_expr() {
assert_eq!(string_to_expr(@~"return d"),
assert_eq!(string_to_expr(@"return d"),
@ast::expr{id:2,
node:ast::expr_ret(
Some(@ast::expr{id:1,
@ -443,7 +443,7 @@ mod test {
}
#[test] fn parse_stmt_1 () {
assert_eq!(string_to_stmt(@~"b;"),
assert_eq!(string_to_stmt(@"b;"),
@spanned{
node: ast::stmt_expr(@ast::expr{
id: 1,
@ -465,7 +465,7 @@ mod test {
}
#[test] fn parse_ident_pat () {
let parser = string_to_parser(@~"b");
let parser = string_to_parser(@"b");
assert_eq!(parser.parse_pat(),
@ast::pat{id:1, // fixme
node: ast::pat_ident(ast::bind_infer,
@ -482,7 +482,7 @@ mod test {
}
#[test] fn parse_arg () {
let parser = string_to_parser(@~"b : int");
let parser = string_to_parser(@"b : int");
assert_eq!(parser.parse_arg_general(true),
ast::arg{
is_mutbl: false,
@ -515,7 +515,7 @@ mod test {
#[test] fn parse_fundecl () {
// this test depends on the intern order of "fn" and "int", and on the
// assignment order of the node_ids.
assert_eq!(string_to_item(@~"fn a (b : int) { b; }"),
assert_eq!(string_to_item(@"fn a (b : int) { b; }"),
Some(
@ast::item{ident:str_to_ident("a"),
attrs:~[],
@ -585,12 +585,12 @@ mod test {
#[test] fn parse_exprs () {
// just make sure that they parse....
string_to_expr(@~"3 + 4");
string_to_expr(@~"a::z.froob(b,@(987+3))");
string_to_expr(@"3 + 4");
string_to_expr(@"a::z.froob(b,@(987+3))");
}
#[test] fn attrs_fix_bug () {
string_to_item(@~"pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
string_to_item(@"pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
-> Result<@Writer, ~str> {
#[cfg(windows)]
fn wb() -> c_int {

View File

@ -259,7 +259,7 @@ impl Parser {
-> bool {
match *token {
token::IDENT(sid, _) => {
str::eq_slice(*self.id_to_str(sid), ident)
str::eq_slice(self.id_to_str(sid), ident)
}
_ => false
}

View File

@ -96,7 +96,6 @@ use core::iterator::IteratorUtil;
use core::either::Either;
use core::either;
use core::hashmap::HashSet;
use core::str;
use core::vec;
#[deriving(Eq)]
@ -263,7 +262,7 @@ pub struct Parser {
/// extra detail when the same error is seen twice
obsolete_set: @mut HashSet<ObsoleteSyntax>,
/// Used to determine the path to externally loaded source files
mod_path_stack: @mut ~[~str],
mod_path_stack: @mut ~[@str],
}
@ -333,7 +332,7 @@ impl Parser {
}
pub fn get_id(&self) -> node_id { next_node_id(self.sess) }
pub fn id_to_str(&self, id: ident) -> @~str {
pub fn id_to_str(&self, id: ident) -> @str {
get_ident_interner().get(id.name)
}
@ -2886,7 +2885,7 @@ impl Parser {
loop {
match *self.token {
token::LIFETIME(lifetime) => {
if str::eq_slice(*self.id_to_str(lifetime), "static") {
if "static" == self.id_to_str(lifetime) {
result.push(RegionTyParamBound);
} else {
self.span_err(*self.span,
@ -2898,11 +2897,11 @@ impl Parser {
let obsolete_bound = match *self.token {
token::MOD_SEP => false,
token::IDENT(sid, _) => {
match *self.id_to_str(sid) {
~"send" |
~"copy" |
~"const" |
~"owned" => {
match self.id_to_str(sid).as_slice() {
"send" |
"copy" |
"const" |
"owned" => {
self.obsolete(
*self.span,
ObsoleteLowerCaseKindBounds);
@ -3364,7 +3363,7 @@ impl Parser {
}
if fields.len() == 0 {
self.fatal(fmt!("Unit-like struct should be written as `struct %s;`",
*get_ident_interner().get(class_name.name)));
get_ident_interner().get(class_name.name)));
}
self.bump();
} else if *self.token == token::LPAREN {
@ -3580,8 +3579,8 @@ impl Parser {
let file_path = match ::attr::first_attr_value_str_by_name(
attrs, "path") {
Some(d) => copy *d,
None => copy *default_path
Some(d) => d,
None => default_path
};
self.mod_path_stack.push(file_path)
}
@ -3599,13 +3598,13 @@ impl Parser {
let prefix = prefix.dir_path();
let mod_path_stack = &*self.mod_path_stack;
let mod_path = Path(".").push_many(*mod_path_stack);
let default_path = *token::interner_get(id.name) + ".rs";
let default_path = token::interner_get(id.name).to_owned() + ".rs";
let file_path = match ::attr::first_attr_value_str_by_name(
outer_attrs, "path") {
Some(d) => {
let path = Path(copy *d);
let path = Path(d);
if !path.is_absolute {
mod_path.push(copy *d)
mod_path.push(d)
} else {
path
}
@ -3637,9 +3636,9 @@ impl Parser {
let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs);
return (ast::item_mod(m0), mod_attrs);
fn cdir_path_opt(default: ~str, attrs: ~[ast::attribute]) -> ~str {
fn cdir_path_opt(default: @str, attrs: ~[ast::attribute]) -> @str {
match ::attr::first_attr_value_str_by_name(attrs, "path") {
Some(d) => copy *d,
Some(d) => d,
None => default
}
}
@ -4263,7 +4262,7 @@ impl Parser {
let first_ident = self.parse_ident();
let mut path = ~[first_ident];
debug!("parsed view_path: %s", *self.id_to_str(first_ident));
debug!("parsed view_path: %s", self.id_to_str(first_ident));
match *self.token {
token::EQ => {
// x = foo::bar
@ -4528,7 +4527,7 @@ impl Parser {
config: copy self.cfg })
}
pub fn parse_str(&self) -> @~str {
pub fn parse_str(&self) -> @str {
match *self.token {
token::LIT_STR(s) => {
self.bump();

View File

@ -21,10 +21,8 @@ use core::cast;
use core::char;
use core::cmp::Equiv;
use core::local_data;
use core::str;
use core::rand;
use core::rand::RngUtil;
use core::to_bytes;
#[deriving(Encodable, Decodable, Eq)]
pub enum binop {
@ -180,28 +178,28 @@ pub fn to_str(in: @ident_interner, t: &Token) -> ~str {
}
LIT_INT_UNSUFFIXED(i) => { i.to_str() }
LIT_FLOAT(ref s, t) => {
let mut body = copy *ident_to_str(s);
let mut body = ident_to_str(s).to_owned();
if body.ends_with(".") {
body += "0"; // `10.f` is not a float literal
}
body + ast_util::float_ty_to_str(t)
}
LIT_FLOAT_UNSUFFIXED(ref s) => {
let mut body = copy *ident_to_str(s);
let mut body = ident_to_str(s).to_owned();
if body.ends_with(".") {
body += "0"; // `10.f` is not a float literal
}
body
}
LIT_STR(ref s) => { ~"\"" + ident_to_str(s).escape_default() + "\"" }
LIT_STR(ref s) => { fmt!("\"%s\"", ident_to_str(s).escape_default()) }
/* Name components */
IDENT(s, _) => copy *in.get(s.name),
LIFETIME(s) => fmt!("'%s", *in.get(s.name)),
IDENT(s, _) => in.get(s.name).to_owned(),
LIFETIME(s) => fmt!("'%s", in.get(s.name)),
UNDERSCORE => ~"_",
/* Other */
DOC_COMMENT(ref s) => copy *ident_to_str(s),
DOC_COMMENT(ref s) => ident_to_str(s).to_owned(),
EOF => ~"<eof>",
INTERPOLATED(ref nt) => {
match nt {
@ -350,20 +348,6 @@ pub mod special_idents {
pub static type_self: ident = ident { name: 34, ctxt: 0}; // `Self`
}
pub struct StringRef<'self>(&'self str);
impl<'self> Equiv<@~str> for StringRef<'self> {
#[inline(always)]
fn equiv(&self, other: &@~str) -> bool { str::eq_slice(**self, **other) }
}
impl<'self> to_bytes::IterBytes for StringRef<'self> {
#[inline(always)]
fn iter_bytes(&self, lsb0: bool, f: to_bytes::Cb) -> bool {
(**self).iter_bytes(lsb0, f)
}
}
/**
* Maps a token to a record specifying the corresponding binary
* operator
@ -403,14 +387,14 @@ impl ident_interner {
pub fn gensym(&self, val: &str) -> Name {
self.interner.gensym(val)
}
pub fn get(&self, idx: Name) -> @~str {
pub fn get(&self, idx: Name) -> @str {
self.interner.get(idx)
}
// is this really something that should be exposed?
pub fn len(&self) -> uint {
self.interner.len()
}
pub fn find_equiv<Q:Hash + IterBytes + Equiv<@~str>>(&self, val: &Q)
pub fn find_equiv<Q:Hash + IterBytes + Equiv<@str>>(&self, val: &Q)
-> Option<Name> {
self.interner.find_equiv(val)
}
@ -542,12 +526,12 @@ pub fn gensym(str : &str) -> Name {
}
// map an interned representation back to a string
pub fn interner_get(name : Name) -> @~str {
pub fn interner_get(name : Name) -> @str {
get_ident_interner().get(name)
}
// maps an identifier to the string that it corresponds to
pub fn ident_to_str(id : &ast::ident) -> @~str {
pub fn ident_to_str(id : &ast::ident) -> @str {
interner_get(id.name)
}
@ -715,6 +699,6 @@ mod test {
#[test] fn t1() {
let a = fresh_name("ghi");
io::println(fmt!("interned name: %u,\ntextual name: %s\n",
a,*interner_get(a)));
a,interner_get(a)));
}
}

View File

@ -80,7 +80,7 @@ pub struct begin_t {
}
pub enum token {
STRING(@~str, int),
STRING(@str, int),
BREAK(break_t),
BEGIN(begin_t),
END,
@ -107,7 +107,7 @@ impl token {
pub fn tok_str(t: token) -> ~str {
match t {
STRING(s, len) => return fmt!("STR(%s,%d)", *s, len),
STRING(s, len) => return fmt!("STR(%s,%d)", s, len),
BREAK(_) => return ~"BREAK",
BEGIN(_) => return ~"BEGIN",
END => return ~"END",
@ -335,11 +335,11 @@ impl Printer {
STRING(s, len) => {
if self.scan_stack_empty {
debug!("pp STRING('%s')/print ~[%u,%u]",
*s, self.left, self.right);
s, self.left, self.right);
self.print(t, len);
} else {
debug!("pp STRING('%s')/buffer ~[%u,%u]",
*s, self.left, self.right);
s, self.left, self.right);
self.advance_right();
self.token[self.right] = t;
self.size[self.right] = len;
@ -534,11 +534,11 @@ impl Printer {
}
}
STRING(s, len) => {
debug!("print STRING(%s)", *s);
debug!("print STRING(%s)", s);
assert_eq!(L, len);
// assert!(L <= space);
self.space -= len;
self.print_str(*s);
self.print_str(s);
}
EOF => {
// EOF should never get here.
@ -572,15 +572,15 @@ pub fn end(p: @mut Printer) { p.pretty_print(END); }
pub fn eof(p: @mut Printer) { p.pretty_print(EOF); }
pub fn word(p: @mut Printer, wrd: &str) {
p.pretty_print(STRING(@/*bad*/ wrd.to_owned(), wrd.len() as int));
p.pretty_print(STRING(/* bad */ wrd.to_managed(), wrd.len() as int));
}
pub fn huge_word(p: @mut Printer, wrd: &str) {
p.pretty_print(STRING(@/*bad*/ wrd.to_owned(), size_infinity));
p.pretty_print(STRING(/* bad */ wrd.to_managed(), size_infinity));
}
pub fn zero_word(p: @mut Printer, wrd: &str) {
p.pretty_print(STRING(@/*bad*/ wrd.to_owned(), 0));
p.pretty_print(STRING(/* bad */ wrd.to_managed(), 0));
}
pub fn spaces(p: @mut Printer, n: uint) { break_offset(p, n, 0); }

View File

@ -111,14 +111,14 @@ pub fn print_crate(cm: @CodeMap,
intr: @ident_interner,
span_diagnostic: @diagnostic::span_handler,
crate: @ast::crate,
filename: ~str,
filename: @str,
in: @io::Reader,
out: @io::Writer,
ann: pp_ann,
is_expanded: bool) {
let (cmnts, lits) = comments::gather_comments_and_literals(
span_diagnostic,
copy filename,
filename,
in
);
let s = @ps {
@ -860,7 +860,7 @@ pub fn print_attribute(s: @ps, attr: ast::attribute) {
if attr.node.is_sugared_doc {
let meta = attr::attr_meta(attr);
let comment = attr::get_meta_item_value_str(meta).get();
word(s.s, *comment);
word(s.s, comment);
} else {
word(s.s, "#[");
print_meta_item(s, attr.node.value);
@ -1400,10 +1400,10 @@ pub fn print_expr(s: @ps, expr: @ast::expr) {
word(s.s, "asm!");
}
popen(s);
print_string(s, *a.asm);
print_string(s, a.asm);
word_space(s, ":");
for a.outputs.each |&(co, o)| {
print_string(s, *co);
print_string(s, co);
popen(s);
print_expr(s, o);
pclose(s);
@ -1411,14 +1411,14 @@ pub fn print_expr(s: @ps, expr: @ast::expr) {
}
word_space(s, ":");
for a.inputs.each |&(co, o)| {
print_string(s, *co);
print_string(s, co);
popen(s);
print_expr(s, o);
pclose(s);
word_space(s, ",");
}
word_space(s, ":");
print_string(s, *a.clobbers);
print_string(s, a.clobbers);
pclose(s);
}
ast::expr_mac(ref m) => print_mac(s, m),
@ -1474,7 +1474,7 @@ pub fn print_decl(s: @ps, decl: @ast::decl) {
}
pub fn print_ident(s: @ps, ident: ast::ident) {
word(s.s, *ident_to_str(&ident));
word(s.s, ident_to_str(&ident));
}
pub fn print_for_decl(s: @ps, loc: @ast::local, coll: @ast::expr) {
@ -1776,14 +1776,14 @@ pub fn print_generics(s: @ps, generics: &ast::Generics) {
pub fn print_meta_item(s: @ps, item: @ast::meta_item) {
ibox(s, indent_unit);
match item.node {
ast::meta_word(name) => word(s.s, *name),
ast::meta_word(name) => word(s.s, name),
ast::meta_name_value(name, value) => {
word_space(s, *name);
word_space(s, name);
word_space(s, "=");
print_literal(s, @value);
}
ast::meta_list(name, ref items) => {
word(s.s, *name);
word(s.s, name);
popen(s);
commasep(
s,
@ -1995,7 +1995,7 @@ pub fn print_literal(s: @ps, lit: @ast::lit) {
_ => ()
}
match lit.node {
ast::lit_str(st) => print_string(s, *st),
ast::lit_str(st) => print_string(s, st),
ast::lit_int(ch, ast::ty_char) => {
word(s.s, ~"'" + char::escape_default(ch as char) + "'");
}
@ -2023,9 +2023,9 @@ pub fn print_literal(s: @ps, lit: @ast::lit) {
}
}
ast::lit_float(f, t) => {
word(s.s, *f + ast_util::float_ty_to_str(t));
word(s.s, f.to_owned() + ast_util::float_ty_to_str(t));
}
ast::lit_float_unsuffixed(f) => word(s.s, *f),
ast::lit_float_unsuffixed(f) => word(s.s, f),
ast::lit_nil => word(s.s, "()"),
ast::lit_bool(val) => {
if val { word(s.s, "true"); } else { word(s.s, "false"); }
@ -2101,7 +2101,7 @@ pub fn print_comment(s: @ps, cmnt: &comments::cmnt) {
// We need to do at least one, possibly two hardbreaks.
let is_semi =
match s.s.last_token() {
pp::STRING(s, _) => *s == ~";",
pp::STRING(s, _) => ";" == s,
_ => false
};
if is_semi || is_begin(s) || is_end(s) { hardbreak(s.s); }

View File

@ -19,7 +19,6 @@ use core::prelude::*;
use core::cmp::Equiv;
use core::hashmap::HashMap;
use syntax::parse::token::StringRef;
pub struct Interner<T> {
priv map: @mut HashMap<T, uint>,
@ -80,8 +79,8 @@ impl<T:Eq + IterBytes + Hash + Const + Copy> Interner<T> {
// A StrInterner differs from Interner<String> in that it accepts
// borrowed pointers rather than @ ones, resulting in less allocation.
pub struct StrInterner {
priv map: @mut HashMap<@~str, uint>,
priv vect: @mut ~[@~str],
priv map: @mut HashMap<@str, uint>,
priv vect: @mut ~[@str],
}
// when traits can extend traits, we should extend index<uint,T> to get []
@ -95,37 +94,38 @@ impl StrInterner {
pub fn prefill(init: &[&str]) -> StrInterner {
let rv = StrInterner::new();
for init.each() |v| { rv.intern(*v); }
for init.each |&v| { rv.intern(v); }
rv
}
pub fn intern(&self, val: &str) -> uint {
match self.map.find_equiv(&StringRef(val)) {
match self.map.find_equiv(&val) {
Some(&idx) => return idx,
None => (),
}
let new_idx = self.len();
self.map.insert(@val.to_owned(), new_idx);
self.vect.push(@val.to_owned());
let val = val.to_managed();
self.map.insert(val, new_idx);
self.vect.push(val);
new_idx
}
pub fn gensym(&self, val: &str) -> uint {
let new_idx = self.len();
// leave out of .map to avoid colliding
self.vect.push(@val.to_owned());
self.vect.push(val.to_managed());
new_idx
}
// this isn't "pure" in the traditional sense, because it can go from
// failing to returning a value as items are interned. But for typestate,
// where we first check a pred and then rely on it, ceasing to fail is ok.
pub fn get(&self, idx: uint) -> @~str { self.vect[idx] }
pub fn get(&self, idx: uint) -> @str { self.vect[idx] }
pub fn len(&self) -> uint { let vect = &*self.vect; vect.len() }
pub fn find_equiv<Q:Hash + IterBytes + Equiv<@~str>>(&self, val: &Q)
pub fn find_equiv<Q:Hash + IterBytes + Equiv<@str>>(&self, val: &Q)
-> Option<uint> {
match self.map.find_equiv(val) {
Some(v) => Some(*v),
@ -140,41 +140,41 @@ mod tests {
#[test]
#[should_fail]
fn i1 () {
let i : Interner<@~str> = Interner::new();
let i : Interner<@str> = Interner::new();
i.get(13);
}
#[test]
fn i2 () {
let i : Interner<@~str> = Interner::new();
let i : Interner<@str> = Interner::new();
// first one is zero:
assert_eq!(i.intern (@~"dog"), 0);
assert_eq!(i.intern (@"dog"), 0);
// re-use gets the same entry:
assert_eq!(i.intern (@~"dog"), 0);
assert_eq!(i.intern (@"dog"), 0);
// different string gets a different #:
assert_eq!(i.intern (@~"cat"), 1);
assert_eq!(i.intern (@~"cat"), 1);
assert_eq!(i.intern (@"cat"), 1);
assert_eq!(i.intern (@"cat"), 1);
// dog is still at zero
assert_eq!(i.intern (@~"dog"), 0);
assert_eq!(i.intern (@"dog"), 0);
// gensym gets 3
assert_eq!(i.gensym (@~"zebra" ), 2);
assert_eq!(i.gensym (@"zebra" ), 2);
// gensym of same string gets new number :
assert_eq!(i.gensym (@~"zebra" ), 3);
assert_eq!(i.gensym (@"zebra" ), 3);
// gensym of *existing* string gets new number:
assert_eq!(i.gensym (@~"dog"), 4);
assert_eq!(i.get(0), @~"dog");
assert_eq!(i.get(1), @~"cat");
assert_eq!(i.get(2), @~"zebra");
assert_eq!(i.get(3), @~"zebra");
assert_eq!(i.get(4), @~"dog");
assert_eq!(i.gensym (@"dog"), 4);
assert_eq!(i.get(0), @"dog");
assert_eq!(i.get(1), @"cat");
assert_eq!(i.get(2), @"zebra");
assert_eq!(i.get(3), @"zebra");
assert_eq!(i.get(4), @"dog");
}
#[test]
fn i3 () {
let i : Interner<@~str> = Interner::prefill([@~"Alan",@~"Bob",@~"Carol"]);
assert_eq!(i.get(0), @~"Alan");
assert_eq!(i.get(1), @~"Bob");
assert_eq!(i.get(2), @~"Carol");
assert_eq!(i.intern(@~"Bob"), 1);
let i : Interner<@str> = Interner::prefill([@"Alan",@"Bob",@"Carol"]);
assert_eq!(i.get(0), @"Alan");
assert_eq!(i.get(1), @"Bob");
assert_eq!(i.get(2), @"Carol");
assert_eq!(i.intern(@"Bob"), 1);
}
}

View File

@ -18,50 +18,50 @@ use parse::token;
// map a string to tts, using a made-up filename: return both the token_trees
// and the ParseSess
pub fn string_to_tts_and_sess (source_str : @~str) -> (~[ast::token_tree],@mut ParseSess) {
pub fn string_to_tts_and_sess (source_str : @str) -> (~[ast::token_tree],@mut ParseSess) {
let ps = new_parse_sess(None);
(filemap_to_tts(ps,string_to_filemap(ps,source_str,~"bogofile")),ps)
(filemap_to_tts(ps,string_to_filemap(ps,source_str,@"bogofile")),ps)
}
pub fn string_to_parser_and_sess(source_str: @~str) -> (Parser,@mut ParseSess) {
pub fn string_to_parser_and_sess(source_str: @str) -> (Parser,@mut ParseSess) {
let ps = new_parse_sess(None);
(new_parser_from_source_str(ps,~[],~"bogofile",source_str),ps)
(new_parser_from_source_str(ps,~[],@"bogofile",source_str),ps)
}
// map string to parser (via tts)
pub fn string_to_parser(source_str: @~str) -> Parser {
pub fn string_to_parser(source_str: @str) -> Parser {
let (p,_) = string_to_parser_and_sess(source_str);
p
}
pub fn string_to_crate (source_str : @~str) -> @ast::crate {
pub fn string_to_crate (source_str : @str) -> @ast::crate {
string_to_parser(source_str).parse_crate_mod()
}
// parse a string, return an expr
pub fn string_to_expr (source_str : @~str) -> @ast::expr {
pub fn string_to_expr (source_str : @str) -> @ast::expr {
string_to_parser(source_str).parse_expr()
}
// parse a string, return an item
pub fn string_to_item (source_str : @~str) -> Option<@ast::item> {
pub fn string_to_item (source_str : @str) -> Option<@ast::item> {
string_to_parser(source_str).parse_item(~[])
}
// parse a string, return an item and the ParseSess
pub fn string_to_item_and_sess (source_str : @~str) -> (Option<@ast::item>,@mut ParseSess) {
pub fn string_to_item_and_sess (source_str : @str) -> (Option<@ast::item>,@mut ParseSess) {
let (p,ps) = string_to_parser_and_sess(source_str);
(p.parse_item(~[]),ps)
}
// parse a string, return a stmt
pub fn string_to_stmt(source_str : @~str) -> @ast::stmt {
pub fn string_to_stmt(source_str : @str) -> @ast::stmt {
string_to_parser(source_str).parse_stmt(~[])
}
// parse a string, return a pat. Uses "irrefutable"... which doesn't
// (currently) affect parsing.
pub fn string_to_pat(source_str : @~str) -> @ast::pat {
pub fn string_to_pat(source_str : @str) -> @ast::pat {
string_to_parser(source_str).parse_pat()
}