Use @str instead of @~str in libsyntax and librustc. Fixes #5048.

This almost removes the StringRef wrapper, since all strings are
Equiv-alent now. Removes a lot of `/* bad */ copy *`'s, and converts
several things to be &'static str (the lint table and the intrinsics
table).

There are many instances of .to_managed(), unfortunately.
This commit is contained in:
Huon Wilson 2013-06-13 03:02:55 +10:00
parent 641910dc13
commit 096f6f56a8
94 changed files with 1259 additions and 1283 deletions

View File

@ -345,13 +345,13 @@ pub fn check_variants_T<T:Copy>(crate: @ast::crate,
intr, intr,
span_handler, span_handler,
crate2, crate2,
fname.to_str(), fname.to_managed(),
rdr, rdr,
a, a,
pprust::no_ann(), pprust::no_ann(),
false) false)
}; };
@string string.to_managed()
}; };
match cx.mode { match cx.mode {
tm_converge => check_roundtrip_convergence(str3, 1), tm_converge => check_roundtrip_convergence(str3, 1),
@ -361,9 +361,9 @@ pub fn check_variants_T<T:Copy>(crate: @ast::crate,
thing_label, thing_label,
i, i,
j); j);
let safe_to_run = !(content_is_dangerous_to_run(*str3) let safe_to_run = !(content_is_dangerous_to_run(str3)
|| has_raw_pointers(crate2)); || has_raw_pointers(crate2));
check_whole_compiler(*str3, check_whole_compiler(str3,
&Path(file_label), &Path(file_label),
safe_to_run); safe_to_run);
} }
@ -502,15 +502,15 @@ pub fn check_compiling(filename: &Path) -> happiness {
} }
pub fn parse_and_print(code: @~str) -> ~str { pub fn parse_and_print(code: @str) -> @str {
let filename = Path("tmp.rs"); let filename = Path("tmp.rs");
let sess = parse::new_parse_sess(option::None); let sess = parse::new_parse_sess(option::None);
write_file(&filename, *code); write_file(&filename, code);
let crate = parse::parse_crate_from_source_str(filename.to_str(), let crate = parse::parse_crate_from_source_str(filename.to_str().to_managed(),
code, code,
~[], ~[],
sess); sess);
do io::with_str_reader(*code) |rdr| { do io::with_str_reader(code) |rdr| {
let filename = filename.to_str(); let filename = filename.to_str();
do as_str |a| { do as_str |a| {
pprust::print_crate(sess.cm, pprust::print_crate(sess.cm,
@ -518,12 +518,12 @@ pub fn parse_and_print(code: @~str) -> ~str {
token::mk_fake_ident_interner(), token::mk_fake_ident_interner(),
copy sess.span_diagnostic, copy sess.span_diagnostic,
crate, crate,
filename.to_str(), filename.to_managed(),
rdr, rdr,
a, a,
pprust::no_ann(), pprust::no_ann(),
false) false)
} }.to_managed()
} }
} }
@ -598,15 +598,15 @@ pub fn file_might_not_converge(filename: &Path) -> bool {
return false; return false;
} }
pub fn check_roundtrip_convergence(code: @~str, maxIters: uint) { pub fn check_roundtrip_convergence(code: @str, maxIters: uint) {
let mut i = 0u; let mut i = 0u;
let mut newv = code; let mut newv = code;
let mut oldv = code; let mut oldv = code;
while i < maxIters { while i < maxIters {
oldv = newv; oldv = newv;
if content_might_not_converge(*oldv) { return; } if content_might_not_converge(oldv) { return; }
newv = @parse_and_print(oldv); newv = parse_and_print(oldv);
if oldv == newv { break; } if oldv == newv { break; }
i += 1u; i += 1u;
} }
@ -615,8 +615,8 @@ pub fn check_roundtrip_convergence(code: @~str, maxIters: uint) {
error!("Converged after %u iterations", i); error!("Converged after %u iterations", i);
} else { } else {
error!("Did not converge after %u iterations!", i); error!("Did not converge after %u iterations!", i);
write_file(&Path("round-trip-a.rs"), *oldv); write_file(&Path("round-trip-a.rs"), oldv);
write_file(&Path("round-trip-b.rs"), *newv); write_file(&Path("round-trip-b.rs"), newv);
run::process_status("diff", [~"-w", ~"-u", ~"round-trip-a.rs", ~"round-trip-b.rs"]); run::process_status("diff", [~"-w", ~"-u", ~"round-trip-a.rs", ~"round-trip-b.rs"]);
fail!("Mismatch"); fail!("Mismatch");
} }
@ -626,8 +626,8 @@ pub fn check_convergence(files: &[Path]) {
error!("pp convergence tests: %u files", files.len()); error!("pp convergence tests: %u files", files.len());
for files.each |file| { for files.each |file| {
if !file_might_not_converge(file) { if !file_might_not_converge(file) {
let s = @result::get(&io::read_whole_file_str(file)); let s = result::get(&io::read_whole_file_str(file)).to_managed();
if !content_might_not_converge(*s) { if !content_might_not_converge(s) {
error!("pp converge: %s", file.to_str()); error!("pp converge: %s", file.to_str());
// Change from 7u to 2u once // Change from 7u to 2u once
// https://github.com/mozilla/rust/issues/850 is fixed // https://github.com/mozilla/rust/issues/850 is fixed
@ -646,14 +646,14 @@ pub fn check_variants(files: &[Path], cx: Context) {
loop; loop;
} }
let s = @result::get(&io::read_whole_file_str(file)); let s = result::get(&io::read_whole_file_str(file)).to_managed();
if contains(*s, "#") { if s.contains_char('#') {
loop; // Macros are confusing loop; // Macros are confusing
} }
if cx.mode == tm_converge && content_might_not_converge(*s) { if cx.mode == tm_converge && content_might_not_converge(s) {
loop; loop;
} }
if cx.mode == tm_run && content_is_dangerous_to_compile(*s) { if cx.mode == tm_run && content_is_dangerous_to_compile(s) {
loop; loop;
} }
@ -661,11 +661,11 @@ pub fn check_variants(files: &[Path], cx: Context) {
error!("check_variants: %?", file_str); error!("check_variants: %?", file_str);
let sess = parse::new_parse_sess(None); let sess = parse::new_parse_sess(None);
let crate = parse::parse_crate_from_source_str(file_str.to_str(), let crate = parse::parse_crate_from_source_str(file_str.to_managed(),
s, s,
~[], ~[],
sess); sess);
io::with_str_reader(*s, |rdr| { io::with_str_reader(s, |rdr| {
let file_str = file_str.to_str(); let file_str = file_str.to_str();
error!("%s", error!("%s",
as_str(|a| { as_str(|a| {
@ -675,7 +675,7 @@ pub fn check_variants(files: &[Path], cx: Context) {
token::mk_fake_ident_interner(), token::mk_fake_ident_interner(),
copy sess.span_diagnostic, copy sess.span_diagnostic,
crate, crate,
file_str.to_str(), file_str.to_managed(),
rdr, rdr,
a, a,
pprust::no_ann(), pprust::no_ann(),

View File

@ -493,16 +493,16 @@ pub fn build_link_meta(sess: Session,
let linkage_metas = attr::find_linkage_metas(c.node.attrs); let linkage_metas = attr::find_linkage_metas(c.node.attrs);
attr::require_unique_names(sess.diagnostic(), linkage_metas); attr::require_unique_names(sess.diagnostic(), linkage_metas);
for linkage_metas.each |meta| { for linkage_metas.each |meta| {
if *attr::get_meta_item_name(*meta) == ~"name" { if "name" == attr::get_meta_item_name(*meta) {
match attr::get_meta_item_value_str(*meta) { match attr::get_meta_item_value_str(*meta) {
// Changing attr would avoid the need for the copy // Changing attr would avoid the need for the copy
// here // here
Some(v) => { name = Some(v.to_managed()); } Some(v) => { name = Some(v); }
None => cmh_items.push(*meta) None => cmh_items.push(*meta)
} }
} else if *attr::get_meta_item_name(*meta) == ~"vers" { } else if "vers" == attr::get_meta_item_name(*meta) {
match attr::get_meta_item_value_str(*meta) { match attr::get_meta_item_value_str(*meta) {
Some(v) => { vers = Some(v.to_managed()); } Some(v) => { vers = Some(v); }
None => cmh_items.push(*meta) None => cmh_items.push(*meta)
} }
} else { cmh_items.push(*meta); } } else { cmh_items.push(*meta); }
@ -518,7 +518,7 @@ pub fn build_link_meta(sess: Session,
// This calculates CMH as defined above // This calculates CMH as defined above
fn crate_meta_extras_hash(symbol_hasher: &mut hash::State, fn crate_meta_extras_hash(symbol_hasher: &mut hash::State,
cmh_items: ~[@ast::meta_item], cmh_items: ~[@ast::meta_item],
dep_hashes: ~[~str]) -> @str { dep_hashes: ~[@str]) -> @str {
fn len_and_str(s: &str) -> ~str { fn len_and_str(s: &str) -> ~str {
fmt!("%u_%s", s.len(), s) fmt!("%u_%s", s.len(), s)
} }
@ -532,14 +532,14 @@ pub fn build_link_meta(sess: Session,
fn hash(symbol_hasher: &mut hash::State, m: &@ast::meta_item) { fn hash(symbol_hasher: &mut hash::State, m: &@ast::meta_item) {
match m.node { match m.node {
ast::meta_name_value(key, value) => { ast::meta_name_value(key, value) => {
write_string(symbol_hasher, len_and_str(*key)); write_string(symbol_hasher, len_and_str(key));
write_string(symbol_hasher, len_and_str_lit(value)); write_string(symbol_hasher, len_and_str_lit(value));
} }
ast::meta_word(name) => { ast::meta_word(name) => {
write_string(symbol_hasher, len_and_str(*name)); write_string(symbol_hasher, len_and_str(name));
} }
ast::meta_list(name, ref mis) => { ast::meta_list(name, ref mis) => {
write_string(symbol_hasher, len_and_str(*name)); write_string(symbol_hasher, len_and_str(name));
for mis.each |m_| { for mis.each |m_| {
hash(symbol_hasher, m_); hash(symbol_hasher, m_);
} }
@ -706,7 +706,7 @@ pub fn mangle(sess: Session, ss: path) -> ~str {
for ss.each |s| { for ss.each |s| {
match *s { path_name(s) | path_mod(s) => { match *s { path_name(s) | path_mod(s) => {
let sani = sanitize(*sess.str_of(s)); let sani = sanitize(sess.str_of(s));
n += fmt!("%u%s", sani.len(), sani); n += fmt!("%u%s", sani.len(), sani);
} } } }
} }
@ -912,7 +912,7 @@ pub fn link_args(sess: Session,
} }
let ula = cstore::get_used_link_args(cstore); let ula = cstore::get_used_link_args(cstore);
for ula.each |arg| { args.push(/*bad*/copy *arg); } for ula.each |arg| { args.push(arg.to_owned()); }
// Add all the link args for external crates. // Add all the link args for external crates.
do cstore::iter_crate_data(cstore) |crate_num, _| { do cstore::iter_crate_data(cstore) |crate_num, _| {

View File

@ -55,31 +55,31 @@ pub enum pp_mode {
* The name used for source code that doesn't originate in a file * The name used for source code that doesn't originate in a file
* (e.g. source from stdin or a string) * (e.g. source from stdin or a string)
*/ */
pub fn anon_src() -> ~str { ~"<anon>" } pub fn anon_src() -> @str { @"<anon>" }
pub fn source_name(input: &input) -> ~str { pub fn source_name(input: &input) -> @str {
match *input { match *input {
file_input(ref ifile) => ifile.to_str(), file_input(ref ifile) => ifile.to_str().to_managed(),
str_input(_) => anon_src() str_input(_) => anon_src()
} }
} }
pub fn default_configuration(sess: Session, argv0: @~str, input: &input) -> pub fn default_configuration(sess: Session, argv0: @str, input: &input) ->
ast::crate_cfg { ast::crate_cfg {
let libc = match sess.targ_cfg.os { let libc = match sess.targ_cfg.os {
session::os_win32 => ~"msvcrt.dll", session::os_win32 => @"msvcrt.dll",
session::os_macos => ~"libc.dylib", session::os_macos => @"libc.dylib",
session::os_linux => ~"libc.so.6", session::os_linux => @"libc.so.6",
session::os_android => ~"libc.so", session::os_android => @"libc.so",
session::os_freebsd => ~"libc.so.7" session::os_freebsd => @"libc.so.7"
// _ { "libc.so" } // _ { "libc.so" }
}; };
let tos = match sess.targ_cfg.os { let tos = match sess.targ_cfg.os {
session::os_win32 => ~"win32", session::os_win32 => @"win32",
session::os_macos => ~"macos", session::os_macos => @"macos",
session::os_linux => ~"linux", session::os_linux => @"linux",
session::os_android => ~"android", session::os_android => @"android",
session::os_freebsd => ~"freebsd" session::os_freebsd => @"freebsd"
// _ { "libc.so" } // _ { "libc.so" }
}; };
@ -88,47 +88,47 @@ pub fn default_configuration(sess: Session, argv0: @~str, input: &input) ->
// ARM is bi-endian, however using NDK seems to default // ARM is bi-endian, however using NDK seems to default
// to little-endian unless a flag is provided. // to little-endian unless a flag is provided.
let (end,arch,wordsz) = match sess.targ_cfg.arch { let (end,arch,wordsz) = match sess.targ_cfg.arch {
abi::X86 => (~"little",~"x86",~"32"), abi::X86 => (@"little",@"x86",@"32"),
abi::X86_64 => (~"little",~"x86_64",~"64"), abi::X86_64 => (@"little",@"x86_64",@"64"),
abi::Arm => (~"little",~"arm",~"32"), abi::Arm => (@"little",@"arm",@"32"),
abi::Mips => (~"big",~"mips",~"32") abi::Mips => (@"big",@"mips",@"32")
}; };
return ~[ // Target bindings. return ~[ // Target bindings.
attr::mk_word_item(@os::FAMILY.to_owned()), attr::mk_word_item(os::FAMILY.to_managed()),
mk(@~"target_os", @tos), mk(@"target_os", tos),
mk(@~"target_family", @os::FAMILY.to_owned()), mk(@"target_family", os::FAMILY.to_managed()),
mk(@~"target_arch", @arch), mk(@"target_arch", arch),
mk(@~"target_endian", @end), mk(@"target_endian", end),
mk(@~"target_word_size", @wordsz), mk(@"target_word_size", wordsz),
mk(@~"target_libc", @libc), mk(@"target_libc", libc),
// Build bindings. // Build bindings.
mk(@~"build_compiler", argv0), mk(@"build_compiler", argv0),
mk(@~"build_input", @source_name(input))]; mk(@"build_input", source_name(input))];
} }
pub fn append_configuration(cfg: ast::crate_cfg, name: ~str) pub fn append_configuration(cfg: ast::crate_cfg, name: @str)
-> ast::crate_cfg { -> ast::crate_cfg {
if attr::contains_name(cfg, name) { if attr::contains_name(cfg, name) {
cfg cfg
} else { } else {
vec::append_one(cfg, attr::mk_word_item(@name)) vec::append_one(cfg, attr::mk_word_item(name))
} }
} }
pub fn build_configuration(sess: Session, argv0: @~str, input: &input) -> pub fn build_configuration(sess: Session, argv0: @str, input: &input) ->
ast::crate_cfg { ast::crate_cfg {
// Combine the configuration requested by the session (command line) with // Combine the configuration requested by the session (command line) with
// some default and generated configuration items // some default and generated configuration items
let default_cfg = default_configuration(sess, argv0, input); let default_cfg = default_configuration(sess, argv0, input);
let user_cfg = /*bad*/copy sess.opts.cfg; let user_cfg = /*bad*/copy sess.opts.cfg;
// If the user wants a test runner, then add the test cfg // If the user wants a test runner, then add the test cfg
let user_cfg = if sess.opts.test { append_configuration(user_cfg, ~"test") } let user_cfg = if sess.opts.test { append_configuration(user_cfg, @"test") }
else { user_cfg }; else { user_cfg };
// If the user requested GC, then add the GC cfg // If the user requested GC, then add the GC cfg
let user_cfg = append_configuration( let user_cfg = append_configuration(
user_cfg, user_cfg,
if sess.opts.gc { ~"gc" } else { ~"nogc" }); if sess.opts.gc { @"gc" } else { @"nogc" });
return vec::append(user_cfg, default_cfg); return vec::append(user_cfg, default_cfg);
} }
@ -137,7 +137,7 @@ fn parse_cfgspecs(cfgspecs: ~[~str],
demitter: diagnostic::Emitter) -> ast::crate_cfg { demitter: diagnostic::Emitter) -> ast::crate_cfg {
do vec::map_consume(cfgspecs) |s| { do vec::map_consume(cfgspecs) |s| {
let sess = parse::new_parse_sess(Some(demitter)); let sess = parse::new_parse_sess(Some(demitter));
parse::parse_meta_from_source_str(~"cfgspec", @s, ~[], sess) parse::parse_meta_from_source_str(@"cfgspec", s.to_managed(), ~[], sess)
} }
} }
@ -145,7 +145,8 @@ pub enum input {
/// Load source from file /// Load source from file
file_input(Path), file_input(Path),
/// The string is the source /// The string is the source
str_input(~str) // FIXME (#2319): Don't really want to box the source string
str_input(@str)
} }
pub fn parse_input(sess: Session, cfg: ast::crate_cfg, input: &input) pub fn parse_input(sess: Session, cfg: ast::crate_cfg, input: &input)
@ -154,10 +155,9 @@ pub fn parse_input(sess: Session, cfg: ast::crate_cfg, input: &input)
file_input(ref file) => { file_input(ref file) => {
parse::parse_crate_from_file(&(*file), cfg, sess.parse_sess) parse::parse_crate_from_file(&(*file), cfg, sess.parse_sess)
} }
str_input(ref src) => { str_input(src) => {
// FIXME (#2319): Don't really want to box the source string
parse::parse_crate_from_source_str( parse::parse_crate_from_source_str(
anon_src(), @(/*bad*/copy *src), cfg, sess.parse_sess) anon_src(), src, cfg, sess.parse_sess)
} }
} }
} }
@ -455,7 +455,7 @@ pub fn pretty_print_input(sess: Session, cfg: ast::crate_cfg, input: &input,
}; };
let is_expanded = upto != cu_parse; let is_expanded = upto != cu_parse;
let src = sess.codemap.get_filemap(source_name(input)).src; let src = sess.codemap.get_filemap(source_name(input)).src;
do io::with_str_reader(*src) |rdr| { do io::with_str_reader(src) |rdr| {
pprust::print_crate(sess.codemap, token::get_ident_interner(), pprust::print_crate(sess.codemap, token::get_ident_interner(),
sess.span_diagnostic, crate.unwrap(), sess.span_diagnostic, crate.unwrap(),
source_name(input), source_name(input),
@ -566,7 +566,7 @@ pub fn host_triple() -> ~str {
}; };
} }
pub fn build_session_options(binary: @~str, pub fn build_session_options(binary: @str,
matches: &getopts::Matches, matches: &getopts::Matches,
demitter: diagnostic::Emitter) demitter: diagnostic::Emitter)
-> @session::options { -> @session::options {
@ -595,7 +595,7 @@ pub fn build_session_options(binary: @~str,
getopts::opt_strs(matches, level_name)); getopts::opt_strs(matches, level_name));
for flags.each |lint_name| { for flags.each |lint_name| {
let lint_name = lint_name.replace("-", "_"); let lint_name = lint_name.replace("-", "_");
match lint_dict.find(&lint_name) { match lint_dict.find_equiv(&lint_name) {
None => { None => {
early_error(demitter, fmt!("unknown %s flag: %s", early_error(demitter, fmt!("unknown %s flag: %s",
level_name, lint_name)); level_name, lint_name));
@ -895,8 +895,8 @@ pub fn build_output_filenames(input: &input,
}; };
let mut stem = match *input { let mut stem = match *input {
file_input(ref ifile) => (*ifile).filestem().get(), file_input(ref ifile) => (*ifile).filestem().get().to_managed(),
str_input(_) => ~"rust_out" str_input(_) => @"rust_out"
}; };
// If a linkage name meta is present, we use it as the link name // If a linkage name meta is present, we use it as the link name
@ -906,7 +906,7 @@ pub fn build_output_filenames(input: &input,
let maybe_matches = attr::find_meta_items_by_name(linkage_metas, "name"); let maybe_matches = attr::find_meta_items_by_name(linkage_metas, "name");
if !maybe_matches.is_empty() { if !maybe_matches.is_empty() {
match attr::get_meta_item_value_str(maybe_matches[0]) { match attr::get_meta_item_value_str(maybe_matches[0]) {
Some(s) => stem = copy *s, Some(s) => stem = s,
_ => () _ => ()
} }
} }
@ -982,9 +982,9 @@ mod test {
Err(f) => fail!("test_switch_implies_cfg_test: %s", getopts::fail_str(f)) Err(f) => fail!("test_switch_implies_cfg_test: %s", getopts::fail_str(f))
}; };
let sessopts = build_session_options( let sessopts = build_session_options(
@~"rustc", matches, diagnostic::emit); @"rustc", matches, diagnostic::emit);
let sess = build_session(sessopts, diagnostic::emit); let sess = build_session(sessopts, diagnostic::emit);
let cfg = build_configuration(sess, @~"whatever", &str_input(~"")); let cfg = build_configuration(sess, @"whatever", &str_input(@""));
assert!((attr::contains_name(cfg, "test"))); assert!((attr::contains_name(cfg, "test")));
} }
@ -1000,9 +1000,9 @@ mod test {
} }
}; };
let sessopts = build_session_options( let sessopts = build_session_options(
@~"rustc", matches, diagnostic::emit); @"rustc", matches, diagnostic::emit);
let sess = build_session(sessopts, diagnostic::emit); let sess = build_session(sessopts, diagnostic::emit);
let cfg = build_configuration(sess, @~"whatever", &str_input(~"")); let cfg = build_configuration(sess, @"whatever", &str_input(@""));
let test_items = attr::find_meta_items_by_name(cfg, "test"); let test_items = attr::find_meta_items_by_name(cfg, "test");
assert_eq!(test_items.len(), 1u); assert_eq!(test_items.len(), 1u);
} }

View File

@ -150,7 +150,7 @@ pub struct options {
// will be added to the crate AST node. This should not be used for // will be added to the crate AST node. This should not be used for
// anything except building the full crate config prior to parsing. // anything except building the full crate config prior to parsing.
cfg: ast::crate_cfg, cfg: ast::crate_cfg,
binary: @~str, binary: @str,
test: bool, test: bool,
parse_only: bool, parse_only: bool,
no_trans: bool, no_trans: bool,
@ -295,7 +295,7 @@ impl Session_ {
} }
// pointless function, now... // pointless function, now...
pub fn str_of(@self, id: ast::ident) -> @~str { pub fn str_of(@self, id: ast::ident) -> @str {
token::ident_to_str(&id) token::ident_to_str(&id)
} }
@ -331,7 +331,7 @@ pub fn basic_options() -> @options {
target_triple: host_triple(), target_triple: host_triple(),
target_feature: ~"", target_feature: ~"",
cfg: ~[], cfg: ~[],
binary: @~"rustc", binary: @"rustc",
test: false, test: false,
parse_only: false, parse_only: false,
no_trans: false, no_trans: false,
@ -361,7 +361,7 @@ pub fn building_library(req_crate_type: crate_type,
match syntax::attr::first_attr_value_str_by_name( match syntax::attr::first_attr_value_str_by_name(
crate.node.attrs, crate.node.attrs,
"crate_type") { "crate_type") {
Some(@~"lib") => true, Some(s) if "lib" == s => true,
_ => false _ => false
} }
} }
@ -389,22 +389,22 @@ mod test {
use syntax::ast; use syntax::ast;
use syntax::codemap; use syntax::codemap;
fn make_crate_type_attr(t: ~str) -> ast::attribute { fn make_crate_type_attr(t: @str) -> ast::attribute {
codemap::respan(codemap::dummy_sp(), ast::attribute_ { codemap::respan(codemap::dummy_sp(), ast::attribute_ {
style: ast::attr_outer, style: ast::attr_outer,
value: @codemap::respan(codemap::dummy_sp(), value: @codemap::respan(codemap::dummy_sp(),
ast::meta_name_value( ast::meta_name_value(
@~"crate_type", @"crate_type",
codemap::respan(codemap::dummy_sp(), codemap::respan(codemap::dummy_sp(),
ast::lit_str(@t)))), ast::lit_str(t)))),
is_sugared_doc: false is_sugared_doc: false
}) })
} }
fn make_crate(with_bin: bool, with_lib: bool) -> @ast::crate { fn make_crate(with_bin: bool, with_lib: bool) -> @ast::crate {
let mut attrs = ~[]; let mut attrs = ~[];
if with_bin { attrs += [make_crate_type_attr(~"bin")]; } if with_bin { attrs += [make_crate_type_attr(@"bin")]; }
if with_lib { attrs += [make_crate_type_attr(~"lib")]; } if with_lib { attrs += [make_crate_type_attr(@"lib")]; }
@codemap::respan(codemap::dummy_sp(), ast::crate_ { @codemap::respan(codemap::dummy_sp(), ast::crate_ {
module: ast::_mod { view_items: ~[], items: ~[] }, module: ast::_mod { view_items: ~[], items: ~[] },
attrs: attrs, attrs: attrs,

View File

@ -202,7 +202,7 @@ pub fn metas_in_cfg(cfg: ast::crate_cfg,
cfg_metas.any(|cfg_meta| { cfg_metas.any(|cfg_meta| {
cfg_meta.all(|cfg_mi| { cfg_meta.all(|cfg_mi| {
match cfg_mi.node { match cfg_mi.node {
ast::meta_list(s, ref it) if *s == ~"not" ast::meta_list(s, ref it) if "not" == s
=> it.all(|mi| !attr::contains(cfg, *mi)), => it.all(|mi| !attr::contains(cfg, *mi)),
_ => attr::contains(cfg, *cfg_mi) _ => attr::contains(cfg, *cfg_mi)
} }

View File

@ -17,9 +17,9 @@ use syntax::ast;
use syntax::codemap::spanned; use syntax::codemap::spanned;
pub fn inject_intrinsic(sess: Session, crate: @ast::crate) -> @ast::crate { pub fn inject_intrinsic(sess: Session, crate: @ast::crate) -> @ast::crate {
let intrinsic_module = @(include_str!("intrinsic.rs").to_owned()); let intrinsic_module = include_str!("intrinsic.rs").to_managed();
let item = parse::parse_item_from_source_str(~"<intrinsic>", let item = parse::parse_item_from_source_str(@"<intrinsic>",
intrinsic_module, intrinsic_module,
/*bad*/copy sess.opts.cfg, /*bad*/copy sess.opts.cfg,
~[], ~[],

View File

@ -49,8 +49,8 @@ fn inject_libstd_ref(sess: Session, crate: @ast::crate) -> @ast::crate {
spanned(ast::attribute_ { spanned(ast::attribute_ {
style: ast::attr_inner, style: ast::attr_inner,
value: @spanned(ast::meta_name_value( value: @spanned(ast::meta_name_value(
@~"vers", @"vers",
spanned(ast::lit_str(@STD_VERSION.to_str())) spanned(ast::lit_str(STD_VERSION.to_managed()))
)), )),
is_sugared_doc: false is_sugared_doc: false
}) })

View File

@ -52,7 +52,7 @@ pub fn modify_for_testing(sess: session::Session,
// configuration, either with the '--test' or '--cfg test' // configuration, either with the '--test' or '--cfg test'
// command line options. // command line options.
let should_test = attr::contains(crate.node.config, let should_test = attr::contains(crate.node.config,
attr::mk_word_item(@~"test")); attr::mk_word_item(@"test"));
if should_test { if should_test {
generate_test_harness(sess, crate) generate_test_harness(sess, crate)
@ -76,7 +76,7 @@ fn generate_test_harness(sess: session::Session,
ext_cx.bt_push(ExpandedFrom(CallInfo { ext_cx.bt_push(ExpandedFrom(CallInfo {
call_site: dummy_sp(), call_site: dummy_sp(),
callee: NameAndSpan { callee: NameAndSpan {
name: ~"test", name: @"test",
span: None span: None
} }
})); }));
@ -111,7 +111,7 @@ fn fold_mod(cx: @mut TestCtxt,
fn nomain(cx: @mut TestCtxt, item: @ast::item) -> @ast::item { fn nomain(cx: @mut TestCtxt, item: @ast::item) -> @ast::item {
if !*cx.sess.building_library { if !*cx.sess.building_library {
@ast::item{attrs: item.attrs.filtered(|attr| { @ast::item{attrs: item.attrs.filtered(|attr| {
*attr::get_attr_name(attr) != ~"main" "main" != attr::get_attr_name(attr)
}),.. copy *item} }),.. copy *item}
} else { item } } else { item }
} }
@ -272,9 +272,9 @@ mod __test {
*/ */
fn mk_std(cx: &TestCtxt) -> @ast::view_item { fn mk_std(cx: &TestCtxt) -> @ast::view_item {
let vers = ast::lit_str(@~"0.7-pre"); let vers = ast::lit_str(@"0.7-pre");
let vers = nospan(vers); let vers = nospan(vers);
let mi = ast::meta_name_value(@~"vers", vers); let mi = ast::meta_name_value(@"vers", vers);
let mi = nospan(mi); let mi = nospan(mi);
let id_std = cx.sess.ident_of("extra"); let id_std = cx.sess.ident_of("extra");
let vi = if is_std(cx) { let vi = if is_std(cx) {
@ -321,7 +321,7 @@ fn mk_test_module(cx: &TestCtxt) -> @ast::item {
// This attribute tells resolve to let us call unexported functions // This attribute tells resolve to let us call unexported functions
let resolve_unexported_attr = let resolve_unexported_attr =
attr::mk_attr(attr::mk_word_item(@~"!resolve_unexported")); attr::mk_attr(attr::mk_word_item(@"!resolve_unexported"));
let item = ast::item { let item = ast::item {
ident: cx.sess.ident_of("__test"), ident: cx.sess.ident_of("__test"),
@ -376,7 +376,7 @@ fn is_std(cx: &TestCtxt) -> bool {
let is_std = { let is_std = {
let items = attr::find_linkage_metas(cx.crate.node.attrs); let items = attr::find_linkage_metas(cx.crate.node.attrs);
match attr::last_meta_item_value_str_by_name(items, "name") { match attr::last_meta_item_value_str_by_name(items, "name") {
Some(@~"extra") => true, Some(s) if "extra" == s => true,
_ => false _ => false
} }
}; };
@ -413,7 +413,7 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> @ast::expr {
debug!("encoding %s", ast_util::path_name_i(path)); debug!("encoding %s", ast_util::path_name_i(path));
let name_lit: ast::lit = let name_lit: ast::lit =
nospan(ast::lit_str(@ast_util::path_name_i(path))); nospan(ast::lit_str(ast_util::path_name_i(path).to_managed()));
let name_expr = @ast::expr { let name_expr = @ast::expr {
id: cx.sess.next_node_id(), id: cx.sess.next_node_id(),

View File

@ -60,7 +60,7 @@ pub fn read_crates(diag: @span_handler,
struct cache_entry { struct cache_entry {
cnum: int, cnum: int,
span: span, span: span,
hash: @~str, hash: @str,
metas: @~[@ast::meta_item] metas: @~[@ast::meta_item]
} }
@ -100,12 +100,12 @@ fn warn_if_multiple_versions(e: @mut Env,
if matches.len() != 1u { if matches.len() != 1u {
diag.handler().warn( diag.handler().warn(
fmt!("using multiple versions of crate `%s`", *name)); fmt!("using multiple versions of crate `%s`", name));
for matches.each |match_| { for matches.each |match_| {
diag.span_note(match_.span, "used here"); diag.span_note(match_.span, "used here");
let attrs = ~[ let attrs = ~[
attr::mk_attr(attr::mk_list_item( attr::mk_attr(attr::mk_list_item(
@~"link", /*bad*/copy *match_.metas)) @"link", /*bad*/copy *match_.metas))
]; ];
loader::note_linkage_attrs(e.intr, diag, attrs); loader::note_linkage_attrs(e.intr, diag, attrs);
} }
@ -133,7 +133,7 @@ fn visit_crate(e: @mut Env, c: &ast::crate) {
for link_args.each |a| { for link_args.each |a| {
match attr::get_meta_item_value_str(attr::attr_meta(*a)) { match attr::get_meta_item_value_str(attr::attr_meta(*a)) {
Some(ref linkarg) => { Some(ref linkarg) => {
cstore::add_used_link_args(cstore, **linkarg); cstore::add_used_link_args(cstore, *linkarg);
} }
None => {/* fallthrough */ } None => {/* fallthrough */ }
} }
@ -145,7 +145,7 @@ fn visit_view_item(e: @mut Env, i: @ast::view_item) {
ast::view_item_extern_mod(ident, ref meta_items, id) => { ast::view_item_extern_mod(ident, ref meta_items, id) => {
debug!("resolving extern mod stmt. ident: %?, meta: %?", debug!("resolving extern mod stmt. ident: %?, meta: %?",
ident, *meta_items); ident, *meta_items);
let cnum = resolve_crate(e, ident, copy *meta_items, @~"", i.span); let cnum = resolve_crate(e, ident, copy *meta_items, @"", i.span);
cstore::add_extern_mod_stmt_cnum(e.cstore, id, cnum); cstore::add_extern_mod_stmt_cnum(e.cstore, id, cnum);
} }
_ => () _ => ()
@ -169,7 +169,7 @@ fn visit_item(e: @mut Env, i: @ast::item) {
match attr::first_attr_value_str_by_name(i.attrs, match attr::first_attr_value_str_by_name(i.attrs,
"link_name") { "link_name") {
Some(nn) => { Some(nn) => {
if *nn == ~"" { if nn.is_empty() {
e.diag.span_fatal( e.diag.span_fatal(
i.span, i.span,
"empty #[link_name] not allowed; use \ "empty #[link_name] not allowed; use \
@ -184,7 +184,7 @@ fn visit_item(e: @mut Env, i: @ast::item) {
!cstore::add_used_library(cstore, foreign_name); !cstore::add_used_library(cstore, foreign_name);
} }
if !link_args.is_empty() && already_added { if !link_args.is_empty() && already_added {
e.diag.span_fatal(i.span, ~"library '" + *foreign_name + e.diag.span_fatal(i.span, ~"library '" + foreign_name +
"' already added: can't specify link_args."); "' already added: can't specify link_args.");
} }
} }
@ -194,7 +194,7 @@ fn visit_item(e: @mut Env, i: @ast::item) {
for link_args.each |a| { for link_args.each |a| {
match attr::get_meta_item_value_str(attr::attr_meta(*a)) { match attr::get_meta_item_value_str(attr::attr_meta(*a)) {
Some(linkarg) => { Some(linkarg) => {
cstore::add_used_link_args(cstore, *linkarg); cstore::add_used_link_args(cstore, linkarg);
} }
None => { /* fallthrough */ } None => { /* fallthrough */ }
} }
@ -204,9 +204,9 @@ fn visit_item(e: @mut Env, i: @ast::item) {
} }
} }
fn metas_with(ident: @~str, key: @~str, metas: ~[@ast::meta_item]) fn metas_with(ident: @str, key: @str, metas: ~[@ast::meta_item])
-> ~[@ast::meta_item] { -> ~[@ast::meta_item] {
let name_items = attr::find_meta_items_by_name(metas, *key); let name_items = attr::find_meta_items_by_name(metas, key);
if name_items.is_empty() { if name_items.is_empty() {
vec::append_one(metas, attr::mk_name_value_item_str(key, ident)) vec::append_one(metas, attr::mk_name_value_item_str(key, ident))
} else { } else {
@ -214,12 +214,12 @@ fn metas_with(ident: @~str, key: @~str, metas: ~[@ast::meta_item])
} }
} }
fn metas_with_ident(ident: @~str, metas: ~[@ast::meta_item]) fn metas_with_ident(ident: @str, metas: ~[@ast::meta_item])
-> ~[@ast::meta_item] { -> ~[@ast::meta_item] {
metas_with(ident, @~"name", metas) metas_with(ident, @"name", metas)
} }
fn existing_match(e: @mut Env, metas: &[@ast::meta_item], hash: @~str) fn existing_match(e: @mut Env, metas: &[@ast::meta_item], hash: @str)
-> Option<int> { -> Option<int> {
for e.crate_cache.each |c| { for e.crate_cache.each |c| {
if loader::metadata_matches(*c.metas, metas) if loader::metadata_matches(*c.metas, metas)
@ -233,7 +233,7 @@ fn existing_match(e: @mut Env, metas: &[@ast::meta_item], hash: @~str)
fn resolve_crate(e: @mut Env, fn resolve_crate(e: @mut Env,
ident: ast::ident, ident: ast::ident,
metas: ~[@ast::meta_item], metas: ~[@ast::meta_item],
hash: @~str, hash: @str,
span: span) span: span)
-> ast::crate_num { -> ast::crate_num {
let metas = metas_with_ident(token::ident_to_str(&ident), metas); let metas = metas_with_ident(token::ident_to_str(&ident), metas);
@ -307,9 +307,9 @@ fn resolve_crate_deps(e: @mut Env, cdata: @~[u8]) -> cstore::cnum_map {
let extrn_cnum = dep.cnum; let extrn_cnum = dep.cnum;
let cname = dep.name; let cname = dep.name;
let cname_str = token::ident_to_str(&dep.name); let cname_str = token::ident_to_str(&dep.name);
let cmetas = metas_with(dep.vers, @~"vers", ~[]); let cmetas = metas_with(dep.vers, @"vers", ~[]);
debug!("resolving dep crate %s ver: %s hash: %s", debug!("resolving dep crate %s ver: %s hash: %s",
*cname_str, *dep.vers, *dep.hash); cname_str, dep.vers, dep.hash);
match existing_match(e, metas_with_ident(cname_str, match existing_match(e, metas_with_ident(cname_str,
copy cmetas), copy cmetas),
dep.hash) { dep.hash) {

View File

@ -74,7 +74,7 @@ pub fn get_item_path(tcx: ty::ctxt, def: ast::def_id) -> ast_map::path {
// FIXME #1920: This path is not always correct if the crate is not linked // FIXME #1920: This path is not always correct if the crate is not linked
// into the root namespace. // into the root namespace.
vec::append(~[ast_map::path_mod(tcx.sess.ident_of( vec::append(~[ast_map::path_mod(tcx.sess.ident_of(
*cdata.name))], path) cdata.name))], path)
} }
pub enum found_ast { pub enum found_ast {

View File

@ -31,7 +31,7 @@ use syntax::parse::token::ident_interner;
pub type cnum_map = @mut HashMap<ast::crate_num, ast::crate_num>; pub type cnum_map = @mut HashMap<ast::crate_num, ast::crate_num>;
pub struct crate_metadata { pub struct crate_metadata {
name: @~str, name: @str,
data: @~[u8], data: @~[u8],
cnum_map: cnum_map, cnum_map: cnum_map,
cnum: ast::crate_num cnum: ast::crate_num
@ -41,8 +41,8 @@ pub struct CStore {
priv metas: HashMap <ast::crate_num, @crate_metadata>, priv metas: HashMap <ast::crate_num, @crate_metadata>,
priv extern_mod_crate_map: extern_mod_crate_map, priv extern_mod_crate_map: extern_mod_crate_map,
priv used_crate_files: ~[Path], priv used_crate_files: ~[Path],
priv used_libraries: ~[~str], priv used_libraries: ~[@str],
priv used_link_args: ~[~str], priv used_link_args: ~[@str],
intr: @ident_interner intr: @ident_interner
} }
@ -65,12 +65,12 @@ pub fn get_crate_data(cstore: &CStore, cnum: ast::crate_num)
return *cstore.metas.get(&cnum); return *cstore.metas.get(&cnum);
} }
pub fn get_crate_hash(cstore: &CStore, cnum: ast::crate_num) -> @~str { pub fn get_crate_hash(cstore: &CStore, cnum: ast::crate_num) -> @str {
let cdata = get_crate_data(cstore, cnum); let cdata = get_crate_data(cstore, cnum);
decoder::get_crate_hash(cdata.data) decoder::get_crate_hash(cdata.data)
} }
pub fn get_crate_vers(cstore: &CStore, cnum: ast::crate_num) -> @~str { pub fn get_crate_vers(cstore: &CStore, cnum: ast::crate_num) -> @str {
let cdata = get_crate_data(cstore, cnum); let cdata = get_crate_data(cstore, cnum);
decoder::get_crate_vers(cdata.data) decoder::get_crate_vers(cdata.data)
} }
@ -102,26 +102,28 @@ pub fn get_used_crate_files(cstore: &CStore) -> ~[Path] {
return /*bad*/copy cstore.used_crate_files; return /*bad*/copy cstore.used_crate_files;
} }
pub fn add_used_library(cstore: &mut CStore, lib: @~str) -> bool { pub fn add_used_library(cstore: &mut CStore, lib: @str) -> bool {
assert!(*lib != ~""); assert!(!lib.is_empty());
if cstore.used_libraries.contains(&*lib) { return false; } if cstore.used_libraries.contains(&lib) { return false; }
cstore.used_libraries.push(/*bad*/ copy *lib); cstore.used_libraries.push(lib);
true true
} }
pub fn get_used_libraries(cstore: &CStore) -> ~[~str] { pub fn get_used_libraries<'a>(cstore: &'a CStore) -> &'a [@str] {
/*bad*/copy cstore.used_libraries let slice: &'a [@str] = cstore.used_libraries;
slice
} }
pub fn add_used_link_args(cstore: &mut CStore, args: &str) { pub fn add_used_link_args(cstore: &mut CStore, args: &str) {
for args.split_iter(' ').advance |s| { for args.split_iter(' ').advance |s| {
cstore.used_link_args.push(s.to_owned()); cstore.used_link_args.push(s.to_managed());
} }
} }
pub fn get_used_link_args(cstore: &CStore) -> ~[~str] { pub fn get_used_link_args<'a>(cstore: &'a CStore) -> &'a [@str] {
/*bad*/copy cstore.used_link_args let slice: &'a [@str] = cstore.used_link_args;
slice
} }
pub fn add_extern_mod_stmt_cnum(cstore: &mut CStore, pub fn add_extern_mod_stmt_cnum(cstore: &mut CStore,
@ -138,15 +140,15 @@ pub fn find_extern_mod_stmt_cnum(cstore: &CStore,
// returns hashes of crates directly used by this crate. Hashes are sorted by // returns hashes of crates directly used by this crate. Hashes are sorted by
// (crate name, crate version, crate hash) in lexicographic order (not semver) // (crate name, crate version, crate hash) in lexicographic order (not semver)
pub fn get_dep_hashes(cstore: &CStore) -> ~[~str] { pub fn get_dep_hashes(cstore: &CStore) -> ~[@str] {
struct crate_hash { name: @~str, vers: @~str, hash: @~str } struct crate_hash { name: @str, vers: @str, hash: @str }
let mut result = ~[]; let mut result = ~[];
for cstore.extern_mod_crate_map.each_value |&cnum| { for cstore.extern_mod_crate_map.each_value |&cnum| {
let cdata = cstore::get_crate_data(cstore, cnum); let cdata = cstore::get_crate_data(cstore, cnum);
let hash = decoder::get_crate_hash(cdata.data); let hash = decoder::get_crate_hash(cdata.data);
let vers = decoder::get_crate_vers(cdata.data); let vers = decoder::get_crate_vers(cdata.data);
debug!("Add hash[%s]: %s %s", *cdata.name, *vers, *hash); debug!("Add hash[%s]: %s %s", cdata.name, vers, hash);
result.push(crate_hash { result.push(crate_hash {
name: cdata.name, name: cdata.name,
vers: vers, vers: vers,
@ -160,8 +162,8 @@ pub fn get_dep_hashes(cstore: &CStore) -> ~[~str] {
debug!("sorted:"); debug!("sorted:");
for sorted.each |x| { for sorted.each |x| {
debug!(" hash[%s]: %s", *x.name, *x.hash); debug!(" hash[%s]: %s", x.name, x.hash);
} }
sorted.map(|ch| /*bad*/copy *ch.hash) sorted.map(|ch| ch.hash)
} }

View File

@ -36,7 +36,7 @@ use extra::serialize::Decodable;
use syntax::ast_map; use syntax::ast_map;
use syntax::attr; use syntax::attr;
use syntax::diagnostic::span_handler; use syntax::diagnostic::span_handler;
use syntax::parse::token::{StringRef, ident_interner, special_idents}; use syntax::parse::token::{ident_interner, special_idents};
use syntax::print::pprust; use syntax::print::pprust;
use syntax::{ast, ast_util}; use syntax::{ast, ast_util};
use syntax::codemap; use syntax::codemap;
@ -311,7 +311,7 @@ fn item_path(item_doc: ebml::Doc) -> ast_map::path {
fn item_name(intr: @ident_interner, item: ebml::Doc) -> ast::ident { fn item_name(intr: @ident_interner, item: ebml::Doc) -> ast::ident {
let name = reader::get_doc(item, tag_paths_data_name); let name = reader::get_doc(item, tag_paths_data_name);
let string = name.as_str_slice(); let string = name.as_str_slice();
match intr.find_equiv(&StringRef(string)) { match intr.find_equiv(&string) {
None => token::str_to_ident(string), None => token::str_to_ident(string),
Some(val) => ast::new_ident(val), Some(val) => ast::new_ident(val),
} }
@ -985,23 +985,23 @@ fn get_meta_items(md: ebml::Doc) -> ~[@ast::meta_item] {
let mut items: ~[@ast::meta_item] = ~[]; let mut items: ~[@ast::meta_item] = ~[];
for reader::tagged_docs(md, tag_meta_item_word) |meta_item_doc| { for reader::tagged_docs(md, tag_meta_item_word) |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name); let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let n = nd.as_str(); let n = nd.as_str_slice().to_managed();
items.push(attr::mk_word_item(@n)); items.push(attr::mk_word_item(n));
}; };
for reader::tagged_docs(md, tag_meta_item_name_value) |meta_item_doc| { for reader::tagged_docs(md, tag_meta_item_name_value) |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name); let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let vd = reader::get_doc(meta_item_doc, tag_meta_item_value); let vd = reader::get_doc(meta_item_doc, tag_meta_item_value);
let n = nd.as_str(); let n = nd.as_str_slice().to_managed();
let v = vd.as_str(); let v = vd.as_str_slice().to_managed();
// FIXME (#623): Should be able to decode meta_name_value variants, // FIXME (#623): Should be able to decode meta_name_value variants,
// but currently the encoder just drops them // but currently the encoder just drops them
items.push(attr::mk_name_value_item_str(@n, @v)); items.push(attr::mk_name_value_item_str(n, v));
}; };
for reader::tagged_docs(md, tag_meta_item_list) |meta_item_doc| { for reader::tagged_docs(md, tag_meta_item_list) |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name); let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let n = nd.as_str(); let n = nd.as_str_slice().to_managed();
let subitems = get_meta_items(meta_item_doc); let subitems = get_meta_items(meta_item_doc);
items.push(attr::mk_list_item(@n, subitems)); items.push(attr::mk_list_item(n, subitems));
}; };
return items; return items;
} }
@ -1058,8 +1058,8 @@ pub fn get_crate_attributes(data: @~[u8]) -> ~[ast::attribute] {
pub struct crate_dep { pub struct crate_dep {
cnum: ast::crate_num, cnum: ast::crate_num,
name: ast::ident, name: ast::ident,
vers: @~str, vers: @str,
hash: @~str hash: @str
} }
pub fn get_crate_deps(data: @~[u8]) -> ~[crate_dep] { pub fn get_crate_deps(data: @~[u8]) -> ~[crate_dep] {
@ -1067,14 +1067,15 @@ pub fn get_crate_deps(data: @~[u8]) -> ~[crate_dep] {
let cratedoc = reader::Doc(data); let cratedoc = reader::Doc(data);
let depsdoc = reader::get_doc(cratedoc, tag_crate_deps); let depsdoc = reader::get_doc(cratedoc, tag_crate_deps);
let mut crate_num = 1; let mut crate_num = 1;
fn docstr(doc: ebml::Doc, tag_: uint) -> ~str { fn docstr(doc: ebml::Doc, tag_: uint) -> @str {
reader::get_doc(doc, tag_).as_str() let d = reader::get_doc(doc, tag_);
d.as_str_slice().to_managed()
} }
for reader::tagged_docs(depsdoc, tag_crate_dep) |depdoc| { for reader::tagged_docs(depsdoc, tag_crate_dep) |depdoc| {
deps.push(crate_dep {cnum: crate_num, deps.push(crate_dep {cnum: crate_num,
name: token::str_to_ident(docstr(depdoc, tag_crate_dep_name)), name: token::str_to_ident(docstr(depdoc, tag_crate_dep_name)),
vers: @docstr(depdoc, tag_crate_dep_vers), vers: docstr(depdoc, tag_crate_dep_vers),
hash: @docstr(depdoc, tag_crate_dep_hash)}); hash: docstr(depdoc, tag_crate_dep_hash)});
crate_num += 1; crate_num += 1;
}; };
return deps; return deps;
@ -1086,25 +1087,25 @@ fn list_crate_deps(data: @~[u8], out: @io::Writer) {
for get_crate_deps(data).each |dep| { for get_crate_deps(data).each |dep| {
out.write_str( out.write_str(
fmt!("%d %s-%s-%s\n", fmt!("%d %s-%s-%s\n",
dep.cnum, *token::ident_to_str(&dep.name), *dep.hash, *dep.vers)); dep.cnum, token::ident_to_str(&dep.name), dep.hash, dep.vers));
} }
out.write_str("\n"); out.write_str("\n");
} }
pub fn get_crate_hash(data: @~[u8]) -> @~str { pub fn get_crate_hash(data: @~[u8]) -> @str {
let cratedoc = reader::Doc(data); let cratedoc = reader::Doc(data);
let hashdoc = reader::get_doc(cratedoc, tag_crate_hash); let hashdoc = reader::get_doc(cratedoc, tag_crate_hash);
@hashdoc.as_str() hashdoc.as_str_slice().to_managed()
} }
pub fn get_crate_vers(data: @~[u8]) -> @~str { pub fn get_crate_vers(data: @~[u8]) -> @str {
let attrs = decoder::get_crate_attributes(data); let attrs = decoder::get_crate_attributes(data);
let linkage_attrs = attr::find_linkage_metas(attrs); let linkage_attrs = attr::find_linkage_metas(attrs);
match attr::last_meta_item_value_str_by_name(linkage_attrs, "vers") { match attr::last_meta_item_value_str_by_name(linkage_attrs, "vers") {
Some(ver) => ver, Some(ver) => ver,
None => @~"0.0" None => @"0.0"
} }
} }
@ -1126,7 +1127,7 @@ pub fn list_crate_metadata(intr: @ident_interner, bytes: @~[u8],
out: @io::Writer) { out: @io::Writer) {
let hash = get_crate_hash(bytes); let hash = get_crate_hash(bytes);
let md = reader::Doc(bytes); let md = reader::Doc(bytes);
list_crate_attributes(intr, md, *hash, out); list_crate_attributes(intr, md, hash, out);
list_crate_deps(bytes, out); list_crate_deps(bytes, out);
} }

View File

@ -62,7 +62,7 @@ pub struct EncodeParams {
reachable: reachable::map, reachable: reachable::map,
reexports2: middle::resolve::ExportMap2, reexports2: middle::resolve::ExportMap2,
item_symbols: @mut HashMap<ast::node_id, ~str>, item_symbols: @mut HashMap<ast::node_id, ~str>,
discrim_symbols: @mut HashMap<ast::node_id, @~str>, discrim_symbols: @mut HashMap<ast::node_id, @str>,
link_meta: LinkMeta, link_meta: LinkMeta,
cstore: @mut cstore::CStore, cstore: @mut cstore::CStore,
encode_inlined_item: encode_inlined_item encode_inlined_item: encode_inlined_item
@ -89,7 +89,7 @@ pub struct EncodeContext {
reachable: reachable::map, reachable: reachable::map,
reexports2: middle::resolve::ExportMap2, reexports2: middle::resolve::ExportMap2,
item_symbols: @mut HashMap<ast::node_id, ~str>, item_symbols: @mut HashMap<ast::node_id, ~str>,
discrim_symbols: @mut HashMap<ast::node_id, @~str>, discrim_symbols: @mut HashMap<ast::node_id, @str>,
link_meta: LinkMeta, link_meta: LinkMeta,
cstore: @mut cstore::CStore, cstore: @mut cstore::CStore,
encode_inlined_item: encode_inlined_item, encode_inlined_item: encode_inlined_item,
@ -103,14 +103,14 @@ pub fn reachable(ecx: @EncodeContext, id: node_id) -> bool {
fn encode_name(ecx: @EncodeContext, fn encode_name(ecx: @EncodeContext,
ebml_w: &mut writer::Encoder, ebml_w: &mut writer::Encoder,
name: ident) { name: ident) {
ebml_w.wr_tagged_str(tag_paths_data_name, *ecx.tcx.sess.str_of(name)); ebml_w.wr_tagged_str(tag_paths_data_name, ecx.tcx.sess.str_of(name));
} }
fn encode_impl_type_basename(ecx: @EncodeContext, fn encode_impl_type_basename(ecx: @EncodeContext,
ebml_w: &mut writer::Encoder, ebml_w: &mut writer::Encoder,
name: ident) { name: ident) {
ebml_w.wr_tagged_str(tag_item_impl_type_basename, ebml_w.wr_tagged_str(tag_item_impl_type_basename,
*ecx.tcx.sess.str_of(name)); ecx.tcx.sess.str_of(name));
} }
pub fn encode_def_id(ebml_w: &mut writer::Encoder, id: def_id) { pub fn encode_def_id(ebml_w: &mut writer::Encoder, id: def_id) {
@ -362,7 +362,7 @@ fn encode_path(ecx: @EncodeContext,
ast_map::path_name(name) => (tag_path_elt_name, name) ast_map::path_name(name) => (tag_path_elt_name, name)
}; };
ebml_w.wr_tagged_str(tag, *ecx.tcx.sess.str_of(name)); ebml_w.wr_tagged_str(tag, ecx.tcx.sess.str_of(name));
} }
ebml_w.start_tag(tag_path); ebml_w.start_tag(tag_path);
@ -380,13 +380,13 @@ fn encode_reexported_static_method(ecx: @EncodeContext,
method_def_id: def_id, method_def_id: def_id,
method_ident: ident) { method_ident: ident) {
debug!("(encode reexported static method) %s::%s", debug!("(encode reexported static method) %s::%s",
*exp.name, *ecx.tcx.sess.str_of(method_ident)); exp.name, ecx.tcx.sess.str_of(method_ident));
ebml_w.start_tag(tag_items_data_item_reexport); ebml_w.start_tag(tag_items_data_item_reexport);
ebml_w.start_tag(tag_items_data_item_reexport_def_id); ebml_w.start_tag(tag_items_data_item_reexport_def_id);
ebml_w.wr_str(def_to_str(method_def_id)); ebml_w.wr_str(def_to_str(method_def_id));
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.start_tag(tag_items_data_item_reexport_name); ebml_w.start_tag(tag_items_data_item_reexport_name);
ebml_w.wr_str(*exp.name + "::" + *ecx.tcx.sess.str_of(method_ident)); ebml_w.wr_str(fmt!("%s::%s", exp.name, ecx.tcx.sess.str_of(method_ident)));
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.end_tag(); ebml_w.end_tag();
} }
@ -449,17 +449,17 @@ fn encode_reexported_static_methods(ecx: @EncodeContext,
// encoded metadata for static methods relative to Bar, // encoded metadata for static methods relative to Bar,
// but not yet for Foo. // but not yet for Foo.
// //
if mod_path != *path || *exp.name != *original_name { if mod_path != *path || exp.name != original_name {
if !encode_reexported_static_base_methods(ecx, ebml_w, exp) { if !encode_reexported_static_base_methods(ecx, ebml_w, exp) {
if encode_reexported_static_trait_methods(ecx, ebml_w, exp) { if encode_reexported_static_trait_methods(ecx, ebml_w, exp) {
debug!(fmt!("(encode reexported static methods) %s \ debug!(fmt!("(encode reexported static methods) %s \
[trait]", [trait]",
*original_name)); original_name));
} }
} }
else { else {
debug!(fmt!("(encode reexported static methods) %s [base]", debug!(fmt!("(encode reexported static methods) %s [base]",
*original_name)); original_name));
} }
} }
} }
@ -486,7 +486,7 @@ fn encode_info_for_mod(ecx: @EncodeContext,
let (ident, did) = (item.ident, item.id); let (ident, did) = (item.ident, item.id);
debug!("(encoding info for module) ... encoding impl %s \ debug!("(encoding info for module) ... encoding impl %s \
(%?/%?)", (%?/%?)",
*ecx.tcx.sess.str_of(ident), ecx.tcx.sess.str_of(ident),
did, did,
ast_map::node_id_to_str(ecx.tcx.items, did, token::get_ident_interner())); ast_map::node_id_to_str(ecx.tcx.items, did, token::get_ident_interner()));
@ -507,13 +507,13 @@ fn encode_info_for_mod(ecx: @EncodeContext,
debug!("(encoding info for module) found reexports for %d", id); debug!("(encoding info for module) found reexports for %d", id);
for exports.each |exp| { for exports.each |exp| {
debug!("(encoding info for module) reexport '%s' for %d", debug!("(encoding info for module) reexport '%s' for %d",
*exp.name, id); exp.name, id);
ebml_w.start_tag(tag_items_data_item_reexport); ebml_w.start_tag(tag_items_data_item_reexport);
ebml_w.start_tag(tag_items_data_item_reexport_def_id); ebml_w.start_tag(tag_items_data_item_reexport_def_id);
ebml_w.wr_str(def_to_str(exp.def_id)); ebml_w.wr_str(def_to_str(exp.def_id));
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.start_tag(tag_items_data_item_reexport_name); ebml_w.start_tag(tag_items_data_item_reexport_name);
ebml_w.wr_str(*exp.name); ebml_w.wr_str(exp.name);
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.end_tag(); ebml_w.end_tag();
encode_reexported_static_methods(ecx, ebml_w, path, exp); encode_reexported_static_methods(ecx, ebml_w, path, exp);
@ -622,7 +622,7 @@ fn encode_info_for_struct(ecx: @EncodeContext,
global_index.push(entry {val: id, pos: ebml_w.writer.tell()}); global_index.push(entry {val: id, pos: ebml_w.writer.tell()});
ebml_w.start_tag(tag_items_data_item); ebml_w.start_tag(tag_items_data_item);
debug!("encode_info_for_struct: doing %s %d", debug!("encode_info_for_struct: doing %s %d",
*tcx.sess.str_of(nm), id); tcx.sess.str_of(nm), id);
encode_struct_field_family(ebml_w, vis); encode_struct_field_family(ebml_w, vis);
encode_name(ecx, ebml_w, nm); encode_name(ecx, ebml_w, nm);
encode_path(ecx, ebml_w, path, ast_map::path_name(nm)); encode_path(ecx, ebml_w, path, ast_map::path_name(nm));
@ -648,7 +648,7 @@ fn encode_info_for_ctor(ecx: @EncodeContext,
encode_type_param_bounds(ebml_w, ecx, &generics.ty_params); encode_type_param_bounds(ebml_w, ecx, &generics.ty_params);
let its_ty = node_id_to_type(ecx.tcx, id); let its_ty = node_id_to_type(ecx.tcx, id);
debug!("fn name = %s ty = %s its node id = %d", debug!("fn name = %s ty = %s its node id = %d",
*ecx.tcx.sess.str_of(ident), ecx.tcx.sess.str_of(ident),
ty_to_str(ecx.tcx, its_ty), id); ty_to_str(ecx.tcx, its_ty), id);
encode_type(ecx, ebml_w, its_ty); encode_type(ecx, ebml_w, its_ty);
encode_path(ecx, ebml_w, path, ast_map::path_name(ident)); encode_path(ecx, ebml_w, path, ast_map::path_name(ident));
@ -708,7 +708,7 @@ fn encode_info_for_method(ecx: @EncodeContext,
owner_generics: &ast::Generics, owner_generics: &ast::Generics,
method_generics: &ast::Generics) { method_generics: &ast::Generics) {
debug!("encode_info_for_method: %d %s %u %u", m.id, debug!("encode_info_for_method: %d %s %u %u", m.id,
*ecx.tcx.sess.str_of(m.ident), ecx.tcx.sess.str_of(m.ident),
owner_generics.ty_params.len(), owner_generics.ty_params.len(),
method_generics.ty_params.len()); method_generics.ty_params.len());
ebml_w.start_tag(tag_items_data_item); ebml_w.start_tag(tag_items_data_item);
@ -1058,7 +1058,7 @@ fn encode_info_for_item(ecx: @EncodeContext,
tcx.sess.span_unimpl( tcx.sess.span_unimpl(
item.span, item.span,
fmt!("Method %s is both provided and static", fmt!("Method %s is both provided and static",
*token::ident_to_str(&method_ty.ident))); token::ident_to_str(&method_ty.ident)));
} }
encode_type_param_bounds(ebml_w, ecx, encode_type_param_bounds(ebml_w, ecx,
&m.generics.ty_params); &m.generics.ty_params);
@ -1278,11 +1278,11 @@ fn synthesize_crate_attrs(ecx: @EncodeContext,
assert!(!ecx.link_meta.vers.is_empty()); assert!(!ecx.link_meta.vers.is_empty());
let name_item = let name_item =
attr::mk_name_value_item_str(@~"name", attr::mk_name_value_item_str(@"name",
@ecx.link_meta.name.to_owned()); ecx.link_meta.name);
let vers_item = let vers_item =
attr::mk_name_value_item_str(@~"vers", attr::mk_name_value_item_str(@"vers",
@ecx.link_meta.vers.to_owned()); ecx.link_meta.vers);
let other_items = let other_items =
{ {
@ -1291,7 +1291,7 @@ fn synthesize_crate_attrs(ecx: @EncodeContext,
}; };
let meta_items = vec::append(~[name_item, vers_item], other_items); let meta_items = vec::append(~[name_item, vers_item], other_items);
let link_item = attr::mk_list_item(@~"link", meta_items); let link_item = attr::mk_list_item(@"link", meta_items);
return attr::mk_attr(link_item); return attr::mk_attr(link_item);
} }
@ -1300,15 +1300,15 @@ fn synthesize_crate_attrs(ecx: @EncodeContext,
let mut found_link_attr = false; let mut found_link_attr = false;
for crate.node.attrs.each |attr| { for crate.node.attrs.each |attr| {
attrs.push( attrs.push(
if *attr::get_attr_name(attr) != ~"link" { if "link" != attr::get_attr_name(attr) {
/*bad*/copy *attr copy *attr
} else { } else {
match attr.node.value.node { match attr.node.value.node {
meta_list(_, ref l) => { meta_list(_, ref l) => {
found_link_attr = true;; found_link_attr = true;;
synthesize_link_attr(ecx, /*bad*/copy *l) synthesize_link_attr(ecx, /*bad*/copy *l)
} }
_ => /*bad*/copy *attr _ => copy *attr
} }
}); });
} }
@ -1329,7 +1329,7 @@ fn encode_crate_deps(ecx: @EncodeContext,
let mut deps = ~[]; let mut deps = ~[];
do cstore::iter_crate_data(cstore) |key, val| { do cstore::iter_crate_data(cstore) |key, val| {
let dep = decoder::crate_dep {cnum: key, let dep = decoder::crate_dep {cnum: key,
name: ecx.tcx.sess.ident_of(/*bad*/ copy *val.name), name: ecx.tcx.sess.ident_of(val.name),
vers: decoder::get_crate_vers(val.data), vers: decoder::get_crate_vers(val.data),
hash: decoder::get_crate_hash(val.data)}; hash: decoder::get_crate_hash(val.data)};
deps.push(dep); deps.push(dep);

View File

@ -48,7 +48,7 @@ pub struct Context {
span: span, span: span,
ident: ast::ident, ident: ast::ident,
metas: ~[@ast::meta_item], metas: ~[@ast::meta_item],
hash: @~str, hash: @str,
os: os, os: os,
is_static: bool, is_static: bool,
intr: @ident_interner intr: @ident_interner
@ -60,7 +60,7 @@ pub fn load_library_crate(cx: &Context) -> (~str, @~[u8]) {
None => { None => {
cx.diag.span_fatal( cx.diag.span_fatal(
cx.span, fmt!("can't find crate for `%s`", cx.span, fmt!("can't find crate for `%s`",
*token::ident_to_str(&cx.ident))); token::ident_to_str(&cx.ident)));
} }
} }
} }
@ -89,7 +89,7 @@ fn find_library_crate_aux(
filesearch: @filesearch::FileSearch filesearch: @filesearch::FileSearch
) -> Option<(~str, @~[u8])> { ) -> Option<(~str, @~[u8])> {
let crate_name = crate_name_from_metas(cx.metas); let crate_name = crate_name_from_metas(cx.metas);
let prefix: ~str = prefix + *crate_name + "-"; let prefix: ~str = prefix + crate_name + "-";
let suffix: ~str = /*bad*/copy suffix; let suffix: ~str = /*bad*/copy suffix;
let mut matches = ~[]; let mut matches = ~[];
@ -128,7 +128,7 @@ fn find_library_crate_aux(
Some(/*bad*/copy matches[0]) Some(/*bad*/copy matches[0])
} else { } else {
cx.diag.span_err( cx.diag.span_err(
cx.span, fmt!("multiple matching crates for `%s`", *crate_name)); cx.span, fmt!("multiple matching crates for `%s`", crate_name));
cx.diag.handler().note("candidates:"); cx.diag.handler().note("candidates:");
for matches.each |&(ident, data)| { for matches.each |&(ident, data)| {
cx.diag.handler().note(fmt!("path: %s", ident)); cx.diag.handler().note(fmt!("path: %s", ident));
@ -140,7 +140,7 @@ fn find_library_crate_aux(
} }
} }
pub fn crate_name_from_metas(metas: &[@ast::meta_item]) -> @~str { pub fn crate_name_from_metas(metas: &[@ast::meta_item]) -> @str {
let name_items = attr::find_meta_items_by_name(metas, "name"); let name_items = attr::find_meta_items_by_name(metas, "name");
match name_items.last_opt() { match name_items.last_opt() {
Some(i) => { Some(i) => {
@ -166,7 +166,7 @@ pub fn note_linkage_attrs(intr: @ident_interner,
fn crate_matches(crate_data: @~[u8], fn crate_matches(crate_data: @~[u8],
metas: &[@ast::meta_item], metas: &[@ast::meta_item],
hash: @~str) -> bool { hash: @str) -> bool {
let attrs = decoder::get_crate_attributes(crate_data); let attrs = decoder::get_crate_attributes(crate_data);
let linkage_metas = attr::find_linkage_metas(attrs); let linkage_metas = attr::find_linkage_metas(attrs);
if !hash.is_empty() { if !hash.is_empty() {

View File

@ -41,7 +41,7 @@ pub struct ctxt {
pub struct ty_abbrev { pub struct ty_abbrev {
pos: uint, pos: uint,
len: uint, len: uint,
s: @~str s: @str
} }
pub enum abbrev_ctxt { pub enum abbrev_ctxt {
@ -60,12 +60,12 @@ pub fn enc_ty(w: @io::Writer, cx: @ctxt, t: ty::t) {
match cx.abbrevs { match cx.abbrevs {
ac_no_abbrevs => { ac_no_abbrevs => {
let result_str = match cx.tcx.short_names_cache.find(&t) { let result_str = match cx.tcx.short_names_cache.find(&t) {
Some(&s) => /*bad*/copy *s, Some(&s) => s,
None => { None => {
let s = do io::with_str_writer |wr| { let s = do io::with_str_writer |wr| {
enc_sty(wr, cx, /*bad*/copy ty::get(t).sty); enc_sty(wr, cx, /*bad*/copy ty::get(t).sty);
}; }.to_managed();
cx.tcx.short_names_cache.insert(t, @copy s); cx.tcx.short_names_cache.insert(t, s);
s s
} }
}; };
@ -73,7 +73,7 @@ pub fn enc_ty(w: @io::Writer, cx: @ctxt, t: ty::t) {
} }
ac_use_abbrevs(abbrevs) => { ac_use_abbrevs(abbrevs) => {
match abbrevs.find(&t) { match abbrevs.find(&t) {
Some(a) => { w.write_str(*a.s); return; } Some(a) => { w.write_str(a.s); return; }
None => {} None => {}
} }
let pos = w.tell(); let pos = w.tell();
@ -89,8 +89,8 @@ pub fn enc_ty(w: @io::Writer, cx: @ctxt, t: ty::t) {
let abbrev_len = 3u + estimate_sz(pos) + estimate_sz(len); let abbrev_len = 3u + estimate_sz(pos) + estimate_sz(len);
if abbrev_len < len { if abbrev_len < len {
// I.e. it's actually an abbreviation. // I.e. it's actually an abbreviation.
let s = fmt!("#%x:%x#", pos, len); let s = fmt!("#%x:%x#", pos, len).to_managed();
let a = ty_abbrev { pos: pos, len: len, s: @s }; let a = ty_abbrev { pos: pos, len: len, s: s };
abbrevs.insert(t, a); abbrevs.insert(t, a);
} }
return; return;
@ -171,7 +171,7 @@ fn enc_bound_region(w: @io::Writer, cx: @ctxt, br: ty::bound_region) {
} }
ty::br_named(s) => { ty::br_named(s) => {
w.write_char('['); w.write_char('[');
w.write_str(*cx.tcx.sess.str_of(s)); w.write_str(cx.tcx.sess.str_of(s));
w.write_char(']') w.write_char(']')
} }
ty::br_cap_avoid(id, br) => { ty::br_cap_avoid(id, br) => {

View File

@ -88,7 +88,7 @@ pub fn encode_inlined_item(ecx: @e::EncodeContext,
maps: Maps) { maps: Maps) {
debug!("> Encoding inlined item: %s::%s (%u)", debug!("> Encoding inlined item: %s::%s (%u)",
ast_map::path_to_str(path, token::get_ident_interner()), ast_map::path_to_str(path, token::get_ident_interner()),
*ecx.tcx.sess.str_of(ii.ident()), ecx.tcx.sess.str_of(ii.ident()),
ebml_w.writer.tell()); ebml_w.writer.tell());
let id_range = ast_util::compute_id_range_for_inlined_item(&ii); let id_range = ast_util::compute_id_range_for_inlined_item(&ii);
@ -101,7 +101,7 @@ pub fn encode_inlined_item(ecx: @e::EncodeContext,
debug!("< Encoded inlined fn: %s::%s (%u)", debug!("< Encoded inlined fn: %s::%s (%u)",
ast_map::path_to_str(path, token::get_ident_interner()), ast_map::path_to_str(path, token::get_ident_interner()),
*ecx.tcx.sess.str_of(ii.ident()), ecx.tcx.sess.str_of(ii.ident()),
ebml_w.writer.tell()); ebml_w.writer.tell());
} }
@ -131,10 +131,10 @@ pub fn decode_inlined_item(cdata: @cstore::crate_metadata,
}; };
let raw_ii = decode_ast(ast_doc); let raw_ii = decode_ast(ast_doc);
let ii = renumber_ast(xcx, raw_ii); let ii = renumber_ast(xcx, raw_ii);
debug!("Fn named: %s", *tcx.sess.str_of(ii.ident())); debug!("Fn named: %s", tcx.sess.str_of(ii.ident()));
debug!("< Decoded inlined fn: %s::%s", debug!("< Decoded inlined fn: %s::%s",
ast_map::path_to_str(path, token::get_ident_interner()), ast_map::path_to_str(path, token::get_ident_interner()),
*tcx.sess.str_of(ii.ident())); tcx.sess.str_of(ii.ident()));
ast_map::map_decoded_item(tcx.sess.diagnostic(), ast_map::map_decoded_item(tcx.sess.diagnostic(),
dcx.tcx.items, path, &ii); dcx.tcx.items, path, &ii);
decode_side_tables(xcx, ast_doc); decode_side_tables(xcx, ast_doc);

View File

@ -711,7 +711,7 @@ impl BorrowckCtxt {
LpVar(id) => { LpVar(id) => {
match self.tcx.items.find(&id) { match self.tcx.items.find(&id) {
Some(&ast_map::node_local(ref ident)) => { Some(&ast_map::node_local(ref ident)) => {
out.push_str(*token::ident_to_str(ident)); out.push_str(token::ident_to_str(ident));
} }
r => { r => {
self.tcx.sess.bug( self.tcx.sess.bug(
@ -726,7 +726,7 @@ impl BorrowckCtxt {
match fname { match fname {
mc::NamedField(ref fname) => { mc::NamedField(ref fname) => {
out.push_char('.'); out.push_char('.');
out.push_str(*token::ident_to_str(fname)); out.push_str(token::ident_to_str(fname));
} }
mc::PositionalField(idx) => { mc::PositionalField(idx) => {
out.push_char('#'); // invent a notation here out.push_char('#'); // invent a notation here

View File

@ -144,8 +144,8 @@ pub fn check_exhaustive(cx: @MatchCheckCtxt, sp: span, pats: ~[@pat]) {
match ty::get(ty).sty { match ty::get(ty).sty {
ty::ty_bool => { ty::ty_bool => {
match (*ctor) { match (*ctor) {
val(const_bool(true)) => Some(@~"true"), val(const_bool(true)) => Some(@"true"),
val(const_bool(false)) => Some(@~"false"), val(const_bool(false)) => Some(@"false"),
_ => None _ => None
} }
} }
@ -165,7 +165,7 @@ pub fn check_exhaustive(cx: @MatchCheckCtxt, sp: span, pats: ~[@pat]) {
} }
ty::ty_unboxed_vec(*) | ty::ty_evec(*) => { ty::ty_unboxed_vec(*) | ty::ty_evec(*) => {
match *ctor { match *ctor {
vec(n) => Some(@fmt!("vectors of length %u", n)), vec(n) => Some(fmt!("vectors of length %u", n).to_managed()),
_ => None _ => None
} }
} }
@ -174,7 +174,7 @@ pub fn check_exhaustive(cx: @MatchCheckCtxt, sp: span, pats: ~[@pat]) {
} }
}; };
let msg = ~"non-exhaustive patterns" + match ext { let msg = ~"non-exhaustive patterns" + match ext {
Some(ref s) => ~": " + **s + " not covered", Some(ref s) => fmt!(": %s not covered", *s),
None => ~"" None => ~""
}; };
cx.tcx.sess.span_err(sp, msg); cx.tcx.sess.span_err(sp, msg);

View File

@ -236,14 +236,14 @@ pub enum const_val {
const_float(f64), const_float(f64),
const_int(i64), const_int(i64),
const_uint(u64), const_uint(u64),
const_str(~str), const_str(@str),
const_bool(bool) const_bool(bool)
} }
pub fn eval_const_expr(tcx: middle::ty::ctxt, e: @expr) -> const_val { pub fn eval_const_expr(tcx: middle::ty::ctxt, e: @expr) -> const_val {
match eval_const_expr_partial(tcx, e) { match eval_const_expr_partial(tcx, e) {
Ok(ref r) => (/*bad*/copy *r), Ok(r) => r,
Err(ref s) => tcx.sess.span_fatal(e.span, *s) Err(s) => tcx.sess.span_fatal(e.span, s)
} }
} }
@ -409,13 +409,13 @@ pub fn eval_const_expr_partial(tcx: middle::ty::ctxt, e: @expr)
pub fn lit_to_const(lit: @lit) -> const_val { pub fn lit_to_const(lit: @lit) -> const_val {
match lit.node { match lit.node {
lit_str(s) => const_str(/*bad*/copy *s), lit_str(s) => const_str(s),
lit_int(n, _) => const_int(n), lit_int(n, _) => const_int(n),
lit_uint(n, _) => const_uint(n), lit_uint(n, _) => const_uint(n),
lit_int_unsuffixed(n) => const_int(n), lit_int_unsuffixed(n) => const_int(n),
lit_float(n, _) => const_float(float::from_str(*n).get() as f64), lit_float(n, _) => const_float(float::from_str(n).get() as f64),
lit_float_unsuffixed(n) => lit_float_unsuffixed(n) =>
const_float(float::from_str(*n).get() as f64), const_float(float::from_str(n).get() as f64),
lit_nil => const_int(0i64), lit_nil => const_int(0i64),
lit_bool(b) => const_bool(b) lit_bool(b) => const_bool(b)
} }

View File

@ -269,50 +269,50 @@ fn LanguageItemCollector(crate: @crate,
-> LanguageItemCollector { -> LanguageItemCollector {
let mut item_refs = HashMap::new(); let mut item_refs = HashMap::new();
item_refs.insert(@~"const", ConstTraitLangItem as uint); item_refs.insert(@"const", ConstTraitLangItem as uint);
item_refs.insert(@~"copy", CopyTraitLangItem as uint); item_refs.insert(@"copy", CopyTraitLangItem as uint);
item_refs.insert(@~"owned", OwnedTraitLangItem as uint); item_refs.insert(@"owned", OwnedTraitLangItem as uint);
item_refs.insert(@~"sized", SizedTraitLangItem as uint); item_refs.insert(@"sized", SizedTraitLangItem as uint);
item_refs.insert(@~"drop", DropTraitLangItem as uint); item_refs.insert(@"drop", DropTraitLangItem as uint);
item_refs.insert(@~"add", AddTraitLangItem as uint); item_refs.insert(@"add", AddTraitLangItem as uint);
item_refs.insert(@~"sub", SubTraitLangItem as uint); item_refs.insert(@"sub", SubTraitLangItem as uint);
item_refs.insert(@~"mul", MulTraitLangItem as uint); item_refs.insert(@"mul", MulTraitLangItem as uint);
item_refs.insert(@~"div", DivTraitLangItem as uint); item_refs.insert(@"div", DivTraitLangItem as uint);
item_refs.insert(@~"rem", RemTraitLangItem as uint); item_refs.insert(@"rem", RemTraitLangItem as uint);
item_refs.insert(@~"neg", NegTraitLangItem as uint); item_refs.insert(@"neg", NegTraitLangItem as uint);
item_refs.insert(@~"not", NotTraitLangItem as uint); item_refs.insert(@"not", NotTraitLangItem as uint);
item_refs.insert(@~"bitxor", BitXorTraitLangItem as uint); item_refs.insert(@"bitxor", BitXorTraitLangItem as uint);
item_refs.insert(@~"bitand", BitAndTraitLangItem as uint); item_refs.insert(@"bitand", BitAndTraitLangItem as uint);
item_refs.insert(@~"bitor", BitOrTraitLangItem as uint); item_refs.insert(@"bitor", BitOrTraitLangItem as uint);
item_refs.insert(@~"shl", ShlTraitLangItem as uint); item_refs.insert(@"shl", ShlTraitLangItem as uint);
item_refs.insert(@~"shr", ShrTraitLangItem as uint); item_refs.insert(@"shr", ShrTraitLangItem as uint);
item_refs.insert(@~"index", IndexTraitLangItem as uint); item_refs.insert(@"index", IndexTraitLangItem as uint);
item_refs.insert(@~"eq", EqTraitLangItem as uint); item_refs.insert(@"eq", EqTraitLangItem as uint);
item_refs.insert(@~"ord", OrdTraitLangItem as uint); item_refs.insert(@"ord", OrdTraitLangItem as uint);
item_refs.insert(@~"str_eq", StrEqFnLangItem as uint); item_refs.insert(@"str_eq", StrEqFnLangItem as uint);
item_refs.insert(@~"uniq_str_eq", UniqStrEqFnLangItem as uint); item_refs.insert(@"uniq_str_eq", UniqStrEqFnLangItem as uint);
item_refs.insert(@~"annihilate", AnnihilateFnLangItem as uint); item_refs.insert(@"annihilate", AnnihilateFnLangItem as uint);
item_refs.insert(@~"log_type", LogTypeFnLangItem as uint); item_refs.insert(@"log_type", LogTypeFnLangItem as uint);
item_refs.insert(@~"fail_", FailFnLangItem as uint); item_refs.insert(@"fail_", FailFnLangItem as uint);
item_refs.insert(@~"fail_bounds_check", item_refs.insert(@"fail_bounds_check",
FailBoundsCheckFnLangItem as uint); FailBoundsCheckFnLangItem as uint);
item_refs.insert(@~"exchange_malloc", ExchangeMallocFnLangItem as uint); item_refs.insert(@"exchange_malloc", ExchangeMallocFnLangItem as uint);
item_refs.insert(@~"exchange_free", ExchangeFreeFnLangItem as uint); item_refs.insert(@"exchange_free", ExchangeFreeFnLangItem as uint);
item_refs.insert(@~"malloc", MallocFnLangItem as uint); item_refs.insert(@"malloc", MallocFnLangItem as uint);
item_refs.insert(@~"free", FreeFnLangItem as uint); item_refs.insert(@"free", FreeFnLangItem as uint);
item_refs.insert(@~"borrow_as_imm", BorrowAsImmFnLangItem as uint); item_refs.insert(@"borrow_as_imm", BorrowAsImmFnLangItem as uint);
item_refs.insert(@~"borrow_as_mut", BorrowAsMutFnLangItem as uint); item_refs.insert(@"borrow_as_mut", BorrowAsMutFnLangItem as uint);
item_refs.insert(@~"return_to_mut", ReturnToMutFnLangItem as uint); item_refs.insert(@"return_to_mut", ReturnToMutFnLangItem as uint);
item_refs.insert(@~"check_not_borrowed", item_refs.insert(@"check_not_borrowed",
CheckNotBorrowedFnLangItem as uint); CheckNotBorrowedFnLangItem as uint);
item_refs.insert(@~"strdup_uniq", StrDupUniqFnLangItem as uint); item_refs.insert(@"strdup_uniq", StrDupUniqFnLangItem as uint);
item_refs.insert(@~"record_borrow", RecordBorrowFnLangItem as uint); item_refs.insert(@"record_borrow", RecordBorrowFnLangItem as uint);
item_refs.insert(@~"unrecord_borrow", UnrecordBorrowFnLangItem as uint); item_refs.insert(@"unrecord_borrow", UnrecordBorrowFnLangItem as uint);
item_refs.insert(@~"start", StartFnLangItem as uint); item_refs.insert(@"start", StartFnLangItem as uint);
LanguageItemCollector { LanguageItemCollector {
crate: crate, crate: crate,
@ -328,7 +328,7 @@ struct LanguageItemCollector {
crate: @crate, crate: @crate,
session: Session, session: Session,
item_refs: HashMap<@~str, uint>, item_refs: HashMap<@str, uint>,
} }
impl LanguageItemCollector { impl LanguageItemCollector {
@ -366,9 +366,9 @@ impl LanguageItemCollector {
pub fn match_and_collect_item(&mut self, pub fn match_and_collect_item(&mut self,
item_def_id: def_id, item_def_id: def_id,
key: @~str, key: @str,
value: @~str) { value: @str) {
if *key != ~"lang" { if "lang" != key {
return; // Didn't match. return; // Didn't match.
} }
@ -419,7 +419,7 @@ impl LanguageItemCollector {
for self.item_refs.each |&key, &item_ref| { for self.item_refs.each |&key, &item_ref| {
match self.items.items[item_ref] { match self.items.items[item_ref] {
None => { None => {
self.session.err(fmt!("no item found for `%s`", *key)); self.session.err(fmt!("no item found for `%s`", key));
} }
Some(_) => { Some(_) => {
// OK. // OK.

View File

@ -119,7 +119,7 @@ struct LintSpec {
default: level default: level
} }
pub type LintDict = HashMap<~str, LintSpec>; pub type LintDict = HashMap<&'static str, LintSpec>;
enum AttributedNode<'self> { enum AttributedNode<'self> {
Item(@ast::item), Item(@ast::item),
@ -290,7 +290,7 @@ static lint_table: &'static [(&'static str, LintSpec)] = &[
pub fn get_lint_dict() -> LintDict { pub fn get_lint_dict() -> LintDict {
let mut map = HashMap::new(); let mut map = HashMap::new();
for lint_table.each|&(k, v)| { for lint_table.each|&(k, v)| {
map.insert(k.to_str(), v); map.insert(k, v);
} }
return map; return map;
} }
@ -352,10 +352,10 @@ impl Context {
} }
} }
fn lint_to_str(&self, lint: lint) -> ~str { fn lint_to_str(&self, lint: lint) -> &'static str {
for self.dict.each |k, v| { for self.dict.each |k, v| {
if v.lint == lint { if v.lint == lint {
return copy *k; return *k;
} }
} }
fail!("unregistered lint %?", lint); fail!("unregistered lint %?", lint);
@ -405,13 +405,13 @@ impl Context {
// specified closure // specified closure
let mut pushed = 0u; let mut pushed = 0u;
for each_lint(self.tcx.sess, attrs) |meta, level, lintname| { for each_lint(self.tcx.sess, attrs) |meta, level, lintname| {
let lint = match self.dict.find(lintname) { let lint = match self.dict.find_equiv(&lintname) {
None => { None => {
self.span_lint( self.span_lint(
unrecognized_lint, unrecognized_lint,
meta.span, meta.span,
fmt!("unknown `%s` attribute: `%s`", fmt!("unknown `%s` attribute: `%s`",
level_to_str(level), *lintname)); level_to_str(level), lintname));
loop loop
} }
Some(lint) => { lint.lint } Some(lint) => { lint.lint }
@ -422,7 +422,7 @@ impl Context {
self.tcx.sess.span_err(meta.span, self.tcx.sess.span_err(meta.span,
fmt!("%s(%s) overruled by outer forbid(%s)", fmt!("%s(%s) overruled by outer forbid(%s)",
level_to_str(level), level_to_str(level),
*lintname, *lintname)); lintname, lintname));
loop; loop;
} }
@ -498,7 +498,7 @@ impl Context {
pub fn each_lint(sess: session::Session, pub fn each_lint(sess: session::Session,
attrs: &[ast::attribute], attrs: &[ast::attribute],
f: &fn(@ast::meta_item, level, &~str) -> bool) -> bool f: &fn(@ast::meta_item, level, @str) -> bool) -> bool
{ {
for [allow, warn, deny, forbid].each |&level| { for [allow, warn, deny, forbid].each |&level| {
let level_name = level_to_str(level); let level_name = level_to_str(level);

View File

@ -310,12 +310,12 @@ impl IrMaps {
} }
} }
pub fn variable_name(&mut self, var: Variable) -> @~str { pub fn variable_name(&mut self, var: Variable) -> @str {
match self.var_kinds[*var] { match self.var_kinds[*var] {
Local(LocalInfo { ident: nm, _ }) | Arg(_, nm) => { Local(LocalInfo { ident: nm, _ }) | Arg(_, nm) => {
self.tcx.sess.str_of(nm) self.tcx.sess.str_of(nm)
}, },
ImplicitRet => @~"<implicit-ret>" ImplicitRet => @"<implicit-ret>"
} }
} }
@ -1578,12 +1578,12 @@ impl Liveness {
FreeVarNode(span) => { FreeVarNode(span) => {
self.tcx.sess.span_err( self.tcx.sess.span_err(
span, span,
fmt!("capture of %s: `%s`", msg, *name)); fmt!("capture of %s: `%s`", msg, name));
} }
ExprNode(span) => { ExprNode(span) => {
self.tcx.sess.span_err( self.tcx.sess.span_err(
span, span,
fmt!("use of %s: `%s`", msg, *name)); fmt!("use of %s: `%s`", msg, name));
} }
ExitNode | VarDefNode(_) => { ExitNode | VarDefNode(_) => {
self.tcx.sess.span_bug( self.tcx.sess.span_bug(
@ -1593,7 +1593,7 @@ impl Liveness {
} }
} }
pub fn should_warn(&self, var: Variable) -> Option<@~str> { pub fn should_warn(&self, var: Variable) -> Option<@str> {
let name = self.ir.variable_name(var); let name = self.ir.variable_name(var);
if name[0] == ('_' as u8) { None } else { Some(name) } if name[0] == ('_' as u8) { None } else { Some(name) }
} }
@ -1638,10 +1638,10 @@ impl Liveness {
if is_assigned { if is_assigned {
self.tcx.sess.add_lint(unused_variable, id, sp, self.tcx.sess.add_lint(unused_variable, id, sp,
fmt!("variable `%s` is assigned to, \ fmt!("variable `%s` is assigned to, \
but never used", **name)); but never used", *name));
} else { } else {
self.tcx.sess.add_lint(unused_variable, id, sp, self.tcx.sess.add_lint(unused_variable, id, sp,
fmt!("unused variable: `%s`", **name)); fmt!("unused variable: `%s`", *name));
} }
} }
true true
@ -1659,7 +1659,7 @@ impl Liveness {
let r = self.should_warn(var); let r = self.should_warn(var);
for r.iter().advance |name| { for r.iter().advance |name| {
self.tcx.sess.add_lint(dead_assignment, id, sp, self.tcx.sess.add_lint(dead_assignment, id, sp,
fmt!("value assigned to `%s` is never read", **name)); fmt!("value assigned to `%s` is never read", *name));
} }
} }
} }

View File

@ -1201,7 +1201,7 @@ pub fn ptr_sigil(ptr: ptr_kind) -> ~str {
impl Repr for InteriorKind { impl Repr for InteriorKind {
fn repr(&self, tcx: ty::ctxt) -> ~str { fn repr(&self, tcx: ty::ctxt) -> ~str {
match *self { match *self {
InteriorField(NamedField(fld)) => copy *tcx.sess.str_of(fld), InteriorField(NamedField(fld)) => tcx.sess.str_of(fld).to_owned(),
InteriorField(PositionalField(i)) => fmt!("#%?", i), InteriorField(PositionalField(i)) => fmt!("#%?", i),
InteriorElement(_) => ~"[]", InteriorElement(_) => ~"[]",
} }

View File

@ -235,7 +235,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
if field.ident != ident { loop; } if field.ident != ident { loop; }
if field.vis == private { if field.vis == private {
tcx.sess.span_err(span, fmt!("field `%s` is private", tcx.sess.span_err(span, fmt!("field `%s` is private",
*token::ident_to_str(&ident))); token::ident_to_str(&ident)));
} }
break; break;
} }
@ -255,7 +255,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
!privileged_items.contains(&(container_id.node))) { !privileged_items.contains(&(container_id.node))) {
tcx.sess.span_err(span, tcx.sess.span_err(span,
fmt!("method `%s` is private", fmt!("method `%s` is private",
*token::ident_to_str(name))); token::ident_to_str(name)));
} }
} else { } else {
let visibility = let visibility =
@ -263,7 +263,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
if visibility != public { if visibility != public {
tcx.sess.span_err(span, tcx.sess.span_err(span,
fmt!("method `%s` is private", fmt!("method `%s` is private",
*token::ident_to_str(name))); token::ident_to_str(name)));
} }
} }
}; };
@ -283,13 +283,13 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
!privileged_items.contains(&def_id.node) { !privileged_items.contains(&def_id.node) {
tcx.sess.span_err(span, tcx.sess.span_err(span,
fmt!("function `%s` is private", fmt!("function `%s` is private",
*token::ident_to_str(path.idents.last()))); token::ident_to_str(path.idents.last())));
} }
} else if csearch::get_item_visibility(tcx.sess.cstore, } else if csearch::get_item_visibility(tcx.sess.cstore,
def_id) != public { def_id) != public {
tcx.sess.span_err(span, tcx.sess.span_err(span,
fmt!("function `%s` is private", fmt!("function `%s` is private",
*token::ident_to_str(path.idents.last()))); token::ident_to_str(path.idents.last())));
} }
} }
_ => {} _ => {}
@ -328,7 +328,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
.contains(&(trait_id.node)) => { .contains(&(trait_id.node)) => {
tcx.sess.span_err(span, tcx.sess.span_err(span,
fmt!("method `%s` is private", fmt!("method `%s` is private",
*token::ident_to_str(&method token::ident_to_str(&method
.ident))); .ident)));
} }
provided(_) | required(_) => { provided(_) | required(_) => {

View File

@ -84,7 +84,7 @@ pub type TraitMap = HashMap<node_id,@mut ~[def_id]>;
pub type ExportMap2 = @mut HashMap<node_id, ~[Export2]>; pub type ExportMap2 = @mut HashMap<node_id, ~[Export2]>;
pub struct Export2 { pub struct Export2 {
name: @~str, // The name of the target. name: @str, // The name of the target.
def_id: def_id, // The definition of the target. def_id: def_id, // The definition of the target.
reexport: bool, // Whether this is a reexport. reexport: bool, // Whether this is a reexport.
} }
@ -1035,14 +1035,14 @@ impl Resolver {
self.session.span_err(sp, self.session.span_err(sp,
fmt!("duplicate definition of %s `%s`", fmt!("duplicate definition of %s `%s`",
namespace_to_str(ns), namespace_to_str(ns),
*self.session.str_of(name))); self.session.str_of(name)));
{ {
let r = child.span_for_namespace(ns); let r = child.span_for_namespace(ns);
for r.iter().advance |sp| { for r.iter().advance |sp| {
self.session.span_note(*sp, self.session.span_note(*sp,
fmt!("first definition of %s %s here:", fmt!("first definition of %s %s here:",
namespace_to_str(ns), namespace_to_str(ns),
*self.session.str_of(name))); self.session.str_of(name)));
} }
} }
} }
@ -1695,7 +1695,7 @@ impl Resolver {
debug!("(building reduced graph for \ debug!("(building reduced graph for \
external crate) ... adding \ external crate) ... adding \
trait method '%s'", trait method '%s'",
*self.session.str_of(method_name)); self.session.str_of(method_name));
// Add it to the trait info if not static. // Add it to the trait info if not static.
if explicit_self != sty_static { if explicit_self != sty_static {
@ -1824,7 +1824,7 @@ impl Resolver {
visibility, visibility,
&mut modules, &mut modules,
child_name_bindings, child_name_bindings,
*self.session.str_of( self.session.str_of(
final_ident), final_ident),
final_ident, final_ident,
new_parent); new_parent);
@ -1843,7 +1843,7 @@ impl Resolver {
debug!("(building reduced graph for \ debug!("(building reduced graph for \
external crate) processing \ external crate) processing \
static methods for type name %s", static methods for type name %s",
*self.session.str_of( self.session.str_of(
final_ident)); final_ident));
let (child_name_bindings, new_parent) = let (child_name_bindings, new_parent) =
@ -1894,7 +1894,7 @@ impl Resolver {
debug!("(building reduced graph for \ debug!("(building reduced graph for \
external crate) creating \ external crate) creating \
static method '%s'", static method '%s'",
*self.session.str_of(ident)); self.session.str_of(ident));
let (method_name_bindings, _) = let (method_name_bindings, _) =
self.add_child( self.add_child(
@ -1945,7 +1945,7 @@ impl Resolver {
directive: privacy %? %s::%s", directive: privacy %? %s::%s",
privacy, privacy,
self.idents_to_str(directive.module_path), self.idents_to_str(directive.module_path),
*self.session.str_of(target)); self.session.str_of(target));
match module_.import_resolutions.find(&target) { match module_.import_resolutions.find(&target) {
Some(&resolution) => { Some(&resolution) => {
@ -2054,7 +2054,7 @@ impl Resolver {
Failed => { Failed => {
// We presumably emitted an error. Continue. // We presumably emitted an error. Continue.
let msg = fmt!("failed to resolve import `%s`", let msg = fmt!("failed to resolve import `%s`",
*self.import_path_to_str( self.import_path_to_str(
import_directive.module_path, import_directive.module_path,
*import_directive.subclass)); *import_directive.subclass));
self.session.span_err(import_directive.span, msg); self.session.span_err(import_directive.span, msg);
@ -2077,30 +2077,30 @@ impl Resolver {
let mut result = ~""; let mut result = ~"";
for idents.each |ident| { for idents.each |ident| {
if first { first = false; } else { result += "::" }; if first { first = false; } else { result += "::" };
result += *self.session.str_of(*ident); result += self.session.str_of(*ident);
}; };
return result; return result;
} }
pub fn import_directive_subclass_to_str(@mut self, pub fn import_directive_subclass_to_str(@mut self,
subclass: ImportDirectiveSubclass) subclass: ImportDirectiveSubclass)
-> @~str { -> @str {
match subclass { match subclass {
SingleImport(_target, source) => self.session.str_of(source), SingleImport(_target, source) => self.session.str_of(source),
GlobImport => @~"*" GlobImport => @"*"
} }
} }
pub fn import_path_to_str(@mut self, pub fn import_path_to_str(@mut self,
idents: &[ident], idents: &[ident],
subclass: ImportDirectiveSubclass) subclass: ImportDirectiveSubclass)
-> @~str { -> @str {
if idents.is_empty() { if idents.is_empty() {
self.import_directive_subclass_to_str(subclass) self.import_directive_subclass_to_str(subclass)
} else { } else {
@fmt!("%s::%s", (fmt!("%s::%s",
self.idents_to_str(idents), self.idents_to_str(idents),
*self.import_directive_subclass_to_str(subclass)) self.import_directive_subclass_to_str(subclass))).to_managed()
} }
} }
@ -2221,9 +2221,9 @@ impl Resolver {
-> ResolveResult<()> { -> ResolveResult<()> {
debug!("(resolving single import) resolving `%s` = `%s::%s` from \ debug!("(resolving single import) resolving `%s` = `%s::%s` from \
`%s`", `%s`",
*self.session.str_of(target), self.session.str_of(target),
self.module_to_str(containing_module), self.module_to_str(containing_module),
*self.session.str_of(source), self.session.str_of(source),
self.module_to_str(module_)); self.module_to_str(module_));
// We need to resolve both namespaces for this to succeed. // We need to resolve both namespaces for this to succeed.
@ -2427,12 +2427,12 @@ impl Resolver {
let span = directive.span; let span = directive.span;
if resolve_fail { if resolve_fail {
self.session.span_err(span, fmt!("unresolved import: there is no `%s` in `%s`", self.session.span_err(span, fmt!("unresolved import: there is no `%s` in `%s`",
*self.session.str_of(source), self.session.str_of(source),
self.module_to_str(containing_module))); self.module_to_str(containing_module)));
return Failed; return Failed;
} else if priv_fail { } else if priv_fail {
self.session.span_err(span, fmt!("unresolved import: found `%s` in `%s` but it is \ self.session.span_err(span, fmt!("unresolved import: found `%s` in `%s` but it is \
private", *self.session.str_of(source), private", self.session.str_of(source),
self.module_to_str(containing_module))); self.module_to_str(containing_module)));
return Failed; return Failed;
} }
@ -2535,7 +2535,7 @@ impl Resolver {
debug!("(resolving glob import) writing resolution `%s` in `%s` \ debug!("(resolving glob import) writing resolution `%s` in `%s` \
to `%s`, privacy=%?", to `%s`, privacy=%?",
*self.session.str_of(ident), self.session.str_of(ident),
self.module_to_str(containing_module), self.module_to_str(containing_module),
self.module_to_str(module_), self.module_to_str(module_),
copy dest_import_resolution.privacy); copy dest_import_resolution.privacy);
@ -2604,17 +2604,17 @@ impl Resolver {
fmt!("unresolved import. maybe \ fmt!("unresolved import. maybe \
a missing `extern mod \ a missing `extern mod \
%s`?", %s`?",
*segment_name)); segment_name));
return Failed; return Failed;
} }
self.session.span_err(span, fmt!("unresolved import: could not find `%s` in \ self.session.span_err(span, fmt!("unresolved import: could not find `%s` in \
`%s`.", *segment_name, module_name)); `%s`.", segment_name, module_name));
return Failed; return Failed;
} }
Indeterminate => { Indeterminate => {
debug!("(resolving module path for import) module \ debug!("(resolving module path for import) module \
resolution is indeterminate: %s", resolution is indeterminate: %s",
*self.session.str_of(name)); self.session.str_of(name));
return Indeterminate; return Indeterminate;
} }
Success(target) => { Success(target) => {
@ -2628,7 +2628,7 @@ impl Resolver {
self.session.span_err(span, self.session.span_err(span,
fmt!("not a \ fmt!("not a \
module `%s`", module `%s`",
*self.session. self.session.
str_of( str_of(
name))); name)));
return Failed; return Failed;
@ -2656,7 +2656,7 @@ impl Resolver {
// There are no type bindings at all. // There are no type bindings at all.
self.session.span_err(span, self.session.span_err(span,
fmt!("not a module `%s`", fmt!("not a module `%s`",
*self.session.str_of( self.session.str_of(
name))); name)));
return Failed; return Failed;
} }
@ -2783,7 +2783,7 @@ impl Resolver {
-> ResolveResult<Target> { -> ResolveResult<Target> {
debug!("(resolving item in lexical scope) resolving `%s` in \ debug!("(resolving item in lexical scope) resolving `%s` in \
namespace %? in `%s`", namespace %? in `%s`",
*self.session.str_of(name), self.session.str_of(name),
namespace, namespace,
self.module_to_str(module_)); self.module_to_str(module_));
@ -2997,11 +2997,11 @@ impl Resolver {
// top of the crate otherwise. // top of the crate otherwise.
let mut containing_module; let mut containing_module;
let mut i; let mut i;
if *token::ident_to_str(&module_path[0]) == ~"self" { if "self" == token::ident_to_str(&module_path[0]) {
containing_module = containing_module =
self.get_nearest_normal_module_parent_or_self(module_); self.get_nearest_normal_module_parent_or_self(module_);
i = 1; i = 1;
} else if *token::ident_to_str(&module_path[0]) == ~"super" { } else if "super" == token::ident_to_str(&module_path[0]) {
containing_module = containing_module =
self.get_nearest_normal_module_parent_or_self(module_); self.get_nearest_normal_module_parent_or_self(module_);
i = 0; // We'll handle `super` below. i = 0; // We'll handle `super` below.
@ -3011,7 +3011,7 @@ impl Resolver {
// Now loop through all the `super`s we find. // Now loop through all the `super`s we find.
while i < module_path.len() && while i < module_path.len() &&
*token::ident_to_str(&module_path[i]) == ~"super" { "super" == token::ident_to_str(&module_path[i]) {
debug!("(resolving module prefix) resolving `super` at %s", debug!("(resolving module prefix) resolving `super` at %s",
self.module_to_str(containing_module)); self.module_to_str(containing_module));
match self.get_nearest_normal_module_parent(containing_module) { match self.get_nearest_normal_module_parent(containing_module) {
@ -3039,7 +3039,7 @@ impl Resolver {
name_search_type: NameSearchType) name_search_type: NameSearchType)
-> ResolveResult<Target> { -> ResolveResult<Target> {
debug!("(resolving name in module) resolving `%s` in `%s`", debug!("(resolving name in module) resolving `%s` in `%s`",
*self.session.str_of(name), self.session.str_of(name),
self.module_to_str(module_)); self.module_to_str(module_));
// First, check the direct children of the module. // First, check the direct children of the module.
@ -3112,7 +3112,7 @@ impl Resolver {
// We're out of luck. // We're out of luck.
debug!("(resolving name in module) failed to resolve `%s`", debug!("(resolving name in module) failed to resolve `%s`",
*self.session.str_of(name)); self.session.str_of(name));
return Failed; return Failed;
} }
@ -3230,7 +3230,7 @@ impl Resolver {
(Some(d), Some(Public)) => { (Some(d), Some(Public)) => {
debug!("(computing exports) YES: %s '%s' => %?", debug!("(computing exports) YES: %s '%s' => %?",
if reexport { ~"reexport" } else { ~"export"}, if reexport { ~"reexport" } else { ~"export"},
*self.session.str_of(ident), self.session.str_of(ident),
def_id_of_def(d)); def_id_of_def(d));
exports2.push(Export2 { exports2.push(Export2 {
reexport: reexport, reexport: reexport,
@ -3252,7 +3252,7 @@ impl Resolver {
module_: @mut Module) { module_: @mut Module) {
for module_.children.each |ident, namebindings| { for module_.children.each |ident, namebindings| {
debug!("(computing exports) maybe export '%s'", debug!("(computing exports) maybe export '%s'",
*self.session.str_of(*ident)); self.session.str_of(*ident));
self.add_exports_of_namebindings(&mut *exports2, self.add_exports_of_namebindings(&mut *exports2,
*ident, *ident,
*namebindings, *namebindings,
@ -3268,14 +3268,14 @@ impl Resolver {
for module_.import_resolutions.each |ident, importresolution| { for module_.import_resolutions.each |ident, importresolution| {
if importresolution.privacy != Public { if importresolution.privacy != Public {
debug!("(computing exports) not reexporting private `%s`", debug!("(computing exports) not reexporting private `%s`",
*self.session.str_of(*ident)); self.session.str_of(*ident));
loop; loop;
} }
for [ TypeNS, ValueNS ].each |ns| { for [ TypeNS, ValueNS ].each |ns| {
match importresolution.target_for_namespace(*ns) { match importresolution.target_for_namespace(*ns) {
Some(target) => { Some(target) => {
debug!("(computing exports) maybe reexport '%s'", debug!("(computing exports) maybe reexport '%s'",
*self.session.str_of(*ident)); self.session.str_of(*ident));
self.add_exports_of_namebindings(&mut *exports2, self.add_exports_of_namebindings(&mut *exports2,
*ident, *ident,
target.bindings, target.bindings,
@ -3318,7 +3318,7 @@ impl Resolver {
match orig_module.children.find(&name) { match orig_module.children.find(&name) {
None => { None => {
debug!("!!! (with scope) didn't find `%s` in `%s`", debug!("!!! (with scope) didn't find `%s` in `%s`",
*self.session.str_of(name), self.session.str_of(name),
self.module_to_str(orig_module)); self.module_to_str(orig_module));
} }
Some(name_bindings) => { Some(name_bindings) => {
@ -3326,7 +3326,7 @@ impl Resolver {
None => { None => {
debug!("!!! (with scope) didn't find module \ debug!("!!! (with scope) didn't find module \
for `%s` in `%s`", for `%s` in `%s`",
*self.session.str_of(name), self.session.str_of(name),
self.module_to_str(orig_module)); self.module_to_str(orig_module));
} }
Some(module_) => { Some(module_) => {
@ -3503,7 +3503,7 @@ impl Resolver {
pub fn resolve_item(@mut self, item: @item, visitor: ResolveVisitor) { pub fn resolve_item(@mut self, item: @item, visitor: ResolveVisitor) {
debug!("(resolving item) resolving %s", debug!("(resolving item) resolving %s",
*self.session.str_of(item.ident)); self.session.str_of(item.ident));
// Items with the !resolve_unexported attribute are X-ray contexts. // Items with the !resolve_unexported attribute are X-ray contexts.
// This is used to allow the test runner to run unexported tests. // This is used to allow the test runner to run unexported tests.
@ -4044,7 +4044,7 @@ impl Resolver {
p.span, p.span,
fmt!("variable `%s` from pattern #1 is \ fmt!("variable `%s` from pattern #1 is \
not bound in pattern #%u", not bound in pattern #%u",
*self.session.str_of(key), i + 1)); self.session.str_of(key), i + 1));
} }
Some(binding_i) => { Some(binding_i) => {
if binding_0.binding_mode != binding_i.binding_mode { if binding_0.binding_mode != binding_i.binding_mode {
@ -4052,7 +4052,7 @@ impl Resolver {
binding_i.span, binding_i.span,
fmt!("variable `%s` is bound with different \ fmt!("variable `%s` is bound with different \
mode in pattern #%u than in pattern #1", mode in pattern #%u than in pattern #1",
*self.session.str_of(key), i + 1)); self.session.str_of(key), i + 1));
} }
} }
} }
@ -4064,7 +4064,7 @@ impl Resolver {
binding.span, binding.span,
fmt!("variable `%s` from pattern #%u is \ fmt!("variable `%s` from pattern #%u is \
not bound in pattern #1", not bound in pattern #1",
*self.session.str_of(key), i + 1)); self.session.str_of(key), i + 1));
} }
} }
} }
@ -4148,7 +4148,7 @@ impl Resolver {
Some(def) => { Some(def) => {
debug!("(resolving type) resolved `%s` to \ debug!("(resolving type) resolved `%s` to \
type %?", type %?",
*self.session.str_of( self.session.str_of(
*path.idents.last()), *path.idents.last()),
def); def);
result_def = Some(def); result_def = Some(def);
@ -4224,7 +4224,7 @@ impl Resolver {
if mode == RefutableMode => { if mode == RefutableMode => {
debug!("(resolving pattern) resolving `%s` to \ debug!("(resolving pattern) resolving `%s` to \
struct or enum variant", struct or enum variant",
*self.session.str_of(ident)); self.session.str_of(ident));
self.enforce_default_binding_mode( self.enforce_default_binding_mode(
pattern, pattern,
@ -4238,13 +4238,13 @@ impl Resolver {
shadows an enum \ shadows an enum \
variant or unit-like \ variant or unit-like \
struct in scope", struct in scope",
*self.session self.session
.str_of(ident))); .str_of(ident)));
} }
FoundConst(def) if mode == RefutableMode => { FoundConst(def) if mode == RefutableMode => {
debug!("(resolving pattern) resolving `%s` to \ debug!("(resolving pattern) resolving `%s` to \
constant", constant",
*self.session.str_of(ident)); self.session.str_of(ident));
self.enforce_default_binding_mode( self.enforce_default_binding_mode(
pattern, pattern,
@ -4259,7 +4259,7 @@ impl Resolver {
} }
BareIdentifierPatternUnresolved => { BareIdentifierPatternUnresolved => {
debug!("(resolving pattern) binding `%s`", debug!("(resolving pattern) binding `%s`",
*self.session.str_of(ident)); self.session.str_of(ident));
let is_mutable = mutability == Mutable; let is_mutable = mutability == Mutable;
@ -4350,7 +4350,7 @@ impl Resolver {
self.session.span_err( self.session.span_err(
path.span, path.span,
fmt!("`%s` is not an enum variant or constant", fmt!("`%s` is not an enum variant or constant",
*self.session.str_of( self.session.str_of(
*path.idents.last()))); *path.idents.last())));
} }
None => { None => {
@ -4378,7 +4378,7 @@ impl Resolver {
self.session.span_err( self.session.span_err(
path.span, path.span,
fmt!("`%s` is not an enum variant, struct or const", fmt!("`%s` is not an enum variant, struct or const",
*self.session.str_of( self.session.str_of(
*path.idents.last()))); *path.idents.last())));
} }
None => { None => {
@ -4753,7 +4753,7 @@ impl Resolver {
Some(dl_def(def)) => { Some(dl_def(def)) => {
debug!("(resolving path in local ribs) resolved `%s` to \ debug!("(resolving path in local ribs) resolved `%s` to \
local: %?", local: %?",
*self.session.str_of(ident), self.session.str_of(ident),
def); def);
return Some(def); return Some(def);
} }
@ -4811,7 +4811,7 @@ impl Resolver {
Some(def) => { Some(def) => {
debug!("(resolving item path in lexical scope) \ debug!("(resolving item path in lexical scope) \
resolved `%s` to item", resolved `%s` to item",
*self.session.str_of(ident)); self.session.str_of(ident));
return Some(def); return Some(def);
} }
} }
@ -4828,17 +4828,17 @@ impl Resolver {
pub fn find_best_match_for_name(@mut self, pub fn find_best_match_for_name(@mut self,
name: &str, name: &str,
max_distance: uint) max_distance: uint)
-> Option<~str> { -> Option<@str> {
let this = &mut *self; let this = &mut *self;
let mut maybes: ~[~str] = ~[]; let mut maybes: ~[@str] = ~[];
let mut values: ~[uint] = ~[]; let mut values: ~[uint] = ~[];
let mut j = this.value_ribs.len(); let mut j = this.value_ribs.len();
while j != 0 { while j != 0 {
j -= 1; j -= 1;
for this.value_ribs[j].bindings.each_key |&k| { for this.value_ribs[j].bindings.each_key |&k| {
vec::push(&mut maybes, copy *this.session.str_of(k)); vec::push(&mut maybes, this.session.str_of(k));
vec::push(&mut values, uint::max_value); vec::push(&mut values, uint::max_value);
} }
} }
@ -4857,7 +4857,7 @@ impl Resolver {
values[smallest] != uint::max_value && values[smallest] != uint::max_value &&
values[smallest] < name.len() + 2 && values[smallest] < name.len() + 2 &&
values[smallest] <= max_distance && values[smallest] <= max_distance &&
maybes[smallest] != name.to_owned() { name != maybes[smallest] {
Some(vec::swap_remove(&mut maybes, smallest)) Some(vec::swap_remove(&mut maybes, smallest))
@ -4882,7 +4882,7 @@ impl Resolver {
match field.node.kind { match field.node.kind {
unnamed_field => {}, unnamed_field => {},
named_field(ident, _) => { named_field(ident, _) => {
if str::eq_slice(*this.session.str_of(ident), if str::eq_slice(this.session.str_of(ident),
name) { name) {
return true return true
} }
@ -5007,7 +5007,7 @@ impl Resolver {
self.session.span_err(expr.span, self.session.span_err(expr.span,
fmt!("use of undeclared label \ fmt!("use of undeclared label \
`%s`", `%s`",
*self.session.str_of( self.session.str_of(
label))), label))),
Some(dl_def(def @ def_label(_))) => { Some(dl_def(def @ def_label(_))) => {
self.record_def(expr.id, def) self.record_def(expr.id, def)
@ -5122,7 +5122,7 @@ impl Resolver {
pub fn search_for_traits_containing_method(@mut self, name: ident) pub fn search_for_traits_containing_method(@mut self, name: ident)
-> ~[def_id] { -> ~[def_id] {
debug!("(searching for traits containing method) looking for '%s'", debug!("(searching for traits containing method) looking for '%s'",
*self.session.str_of(name)); self.session.str_of(name));
let mut found_traits = ~[]; let mut found_traits = ~[];
@ -5227,7 +5227,7 @@ impl Resolver {
debug!("(adding trait info) found trait %d:%d for method '%s'", debug!("(adding trait info) found trait %d:%d for method '%s'",
trait_def_id.crate, trait_def_id.crate,
trait_def_id.node, trait_def_id.node,
*self.session.str_of(name)); self.session.str_of(name));
found_traits.push(trait_def_id); found_traits.push(trait_def_id);
} }
@ -5346,7 +5346,7 @@ impl Resolver {
debug!("Children:"); debug!("Children:");
for module_.children.each_key |&name| { for module_.children.each_key |&name| {
debug!("* %s", *self.session.str_of(name)); debug!("* %s", self.session.str_of(name));
} }
debug!("Import resolutions:"); debug!("Import resolutions:");
@ -5369,7 +5369,7 @@ impl Resolver {
} }
} }
debug!("* %s:%s%s", *self.session.str_of(*name), debug!("* %s:%s%s", self.session.str_of(*name),
value_repr, type_repr); value_repr, type_repr);
} }
} }

View File

@ -1699,7 +1699,7 @@ pub fn trans_match_inner(scope_cx: block,
// Special case for empty types // Special case for empty types
let fail_cx = @mut None; let fail_cx = @mut None;
let f: mk_fail = || mk_fail(scope_cx, discr_expr.span, let f: mk_fail = || mk_fail(scope_cx, discr_expr.span,
@~"scrutinizing value that can't exist", fail_cx); @"scrutinizing value that can't exist", fail_cx);
Some(f) Some(f)
} else { } else {
None None
@ -1731,7 +1731,7 @@ pub fn trans_match_inner(scope_cx: block,
bcx = controlflow::join_blocks(scope_cx, arm_cxs); bcx = controlflow::join_blocks(scope_cx, arm_cxs);
return bcx; return bcx;
fn mk_fail(bcx: block, sp: span, msg: @~str, fn mk_fail(bcx: block, sp: span, msg: @str,
finished: @mut Option<BasicBlockRef>) -> BasicBlockRef { finished: @mut Option<BasicBlockRef>) -> BasicBlockRef {
match *finished { Some(bb) => return bb, _ => () } match *finished { Some(bb) => return bb, _ => () }
let fail_cx = sub_block(bcx, "case_fallthrough"); let fail_cx = sub_block(bcx, "case_fallthrough");

View File

@ -33,7 +33,7 @@ pub fn trans_inline_asm(bcx: block, ia: &ast::inline_asm) -> block {
// Prepare the output operands // Prepare the output operands
let outputs = do ia.outputs.map |&(c, out)| { let outputs = do ia.outputs.map |&(c, out)| {
constraints.push(copy *c); constraints.push(c);
aoutputs.push(unpack_result!(bcx, { aoutputs.push(unpack_result!(bcx, {
callee::trans_arg_expr(bcx, callee::trans_arg_expr(bcx,
@ -69,7 +69,7 @@ pub fn trans_inline_asm(bcx: block, ia: &ast::inline_asm) -> block {
// Now the input operands // Now the input operands
let inputs = do ia.inputs.map |&(c, in)| { let inputs = do ia.inputs.map |&(c, in)| {
constraints.push(copy *c); constraints.push(c);
unpack_result!(bcx, { unpack_result!(bcx, {
callee::trans_arg_expr(bcx, callee::trans_arg_expr(bcx,
@ -90,14 +90,14 @@ pub fn trans_inline_asm(bcx: block, ia: &ast::inline_asm) -> block {
let mut constraints = constraints.connect(","); let mut constraints = constraints.connect(",");
let mut clobbers = getClobbers(); let mut clobbers = getClobbers();
if *ia.clobbers != ~"" && clobbers != ~"" { if !ia.clobbers.is_empty() && !clobbers.is_empty() {
clobbers = *ia.clobbers + "," + clobbers; clobbers = fmt!("%s,%s", ia.clobbers, clobbers);
} else { } else {
clobbers += *ia.clobbers; clobbers += ia.clobbers;
}; };
// Add the clobbers to our constraints list // Add the clobbers to our constraints list
if clobbers != ~"" && constraints != ~"" { if !clobbers.is_empty() && !constraints.is_empty() {
constraints += ","; constraints += ",";
constraints += clobbers; constraints += clobbers;
} else { } else {
@ -122,7 +122,7 @@ pub fn trans_inline_asm(bcx: block, ia: &ast::inline_asm) -> block {
ast::asm_intel => lib::llvm::AD_Intel ast::asm_intel => lib::llvm::AD_Intel
}; };
let r = do str::as_c_str(*ia.asm) |a| { let r = do str::as_c_str(ia.asm) |a| {
do str::as_c_str(constraints) |c| { do str::as_c_str(constraints) |c| {
InlineAsmCall(bcx, a, c, inputs, output, ia.volatile, ia.alignstack, dialect) InlineAsmCall(bcx, a, c, inputs, output, ia.volatile, ia.alignstack, dialect)
} }

View File

@ -485,9 +485,9 @@ pub fn set_glue_inlining(f: ValueRef, t: ty::t) {
// Double-check that we never ask LLVM to declare the same symbol twice. It // Double-check that we never ask LLVM to declare the same symbol twice. It
// silently mangles such symbols, breaking our linkage model. // silently mangles such symbols, breaking our linkage model.
pub fn note_unique_llvm_symbol(ccx: @CrateContext, sym: @~str) { pub fn note_unique_llvm_symbol(ccx: @CrateContext, sym: @str) {
if ccx.all_llvm_symbols.contains(&sym) { if ccx.all_llvm_symbols.contains(&sym) {
ccx.sess.bug(~"duplicate LLVM symbol: " + *sym); ccx.sess.bug(~"duplicate LLVM symbol: " + sym);
} }
ccx.all_llvm_symbols.insert(sym); ccx.all_llvm_symbols.insert(sym);
} }
@ -569,7 +569,7 @@ pub fn compare_scalar_types(cx: block,
rslt( rslt(
controlflow::trans_fail( controlflow::trans_fail(
cx, None, cx, None,
@~"attempt to compare values of type type"), @"attempt to compare values of type type"),
C_nil()) C_nil())
} }
_ => { _ => {
@ -791,9 +791,9 @@ pub fn cast_shift_rhs(op: ast::binop,
pub fn fail_if_zero(cx: block, span: span, divrem: ast::binop, pub fn fail_if_zero(cx: block, span: span, divrem: ast::binop,
rhs: ValueRef, rhs_t: ty::t) -> block { rhs: ValueRef, rhs_t: ty::t) -> block {
let text = if divrem == ast::div { let text = if divrem == ast::div {
@~"attempted to divide by zero" @"attempted to divide by zero"
} else { } else {
@~"attempted remainder with a divisor of zero" @"attempted remainder with a divisor of zero"
}; };
let is_zero = match ty::get(rhs_t).sty { let is_zero = match ty::get(rhs_t).sty {
ty::ty_int(t) => { ty::ty_int(t) => {
@ -1056,19 +1056,19 @@ pub fn load_if_immediate(cx: block, v: ValueRef, t: ty::t) -> ValueRef {
return v; return v;
} }
pub fn trans_trace(bcx: block, sp_opt: Option<span>, trace_str: @~str) { pub fn trans_trace(bcx: block, sp_opt: Option<span>, trace_str: @str) {
if !bcx.sess().trace() { return; } if !bcx.sess().trace() { return; }
let _icx = bcx.insn_ctxt("trans_trace"); let _icx = bcx.insn_ctxt("trans_trace");
add_comment(bcx, *trace_str); add_comment(bcx, trace_str);
let V_trace_str = C_cstr(bcx.ccx(), trace_str); let V_trace_str = C_cstr(bcx.ccx(), trace_str);
let (V_filename, V_line) = match sp_opt { let (V_filename, V_line) = match sp_opt {
Some(sp) => { Some(sp) => {
let sess = bcx.sess(); let sess = bcx.sess();
let loc = sess.parse_sess.cm.lookup_char_pos(sp.lo); let loc = sess.parse_sess.cm.lookup_char_pos(sp.lo);
(C_cstr(bcx.ccx(), @/*bad*/copy loc.file.name), loc.line as int) (C_cstr(bcx.ccx(), loc.file.name), loc.line as int)
} }
None => { None => {
(C_cstr(bcx.ccx(), @~"<runtime>"), 0) (C_cstr(bcx.ccx(), @"<runtime>"), 0)
} }
}; };
let ccx = bcx.ccx(); let ccx = bcx.ccx();
@ -1187,7 +1187,7 @@ pub fn new_block(cx: fn_ctxt, parent: Option<block>, kind: block_kind,
special_idents::invalid special_idents::invalid
}; };
unsafe { unsafe {
let llbb = str::as_c_str(*cx.ccx.sess.str_of(s), |buf| { let llbb = str::as_c_str(cx.ccx.sess.str_of(s), |buf| {
llvm::LLVMAppendBasicBlockInContext(cx.ccx.llcx, cx.llfn, buf) llvm::LLVMAppendBasicBlockInContext(cx.ccx.llcx, cx.llfn, buf)
}); });
let bcx = mk_block(llbb, let bcx = mk_block(llbb,
@ -1308,7 +1308,7 @@ pub fn cleanup_and_leave(bcx: block,
if bcx.sess().trace() { if bcx.sess().trace() {
trans_trace( trans_trace(
bcx, None, bcx, None,
@fmt!("cleanup_and_leave(%s)", cur.to_str())); (fmt!("cleanup_and_leave(%s)", cur.to_str())).to_managed());
} }
match cur.kind { match cur.kind {
@ -1427,7 +1427,7 @@ pub fn alloc_local(cx: block, local: @ast::local) -> block {
let val = alloc_ty(cx, t); let val = alloc_ty(cx, t);
if cx.sess().opts.debuginfo { if cx.sess().opts.debuginfo {
for simple_name.iter().advance |name| { for simple_name.iter().advance |name| {
str::as_c_str(*cx.ccx().sess.str_of(*name), |buf| { str::as_c_str(cx.ccx().sess.str_of(*name), |buf| {
unsafe { unsafe {
llvm::LLVMSetValueName(val, buf) llvm::LLVMSetValueName(val, buf)
} }
@ -1453,12 +1453,8 @@ pub fn call_memcpy(cx: block, dst: ValueRef, src: ValueRef, n_bytes: ValueRef, a
let _icx = cx.insn_ctxt("call_memcpy"); let _icx = cx.insn_ctxt("call_memcpy");
let ccx = cx.ccx(); let ccx = cx.ccx();
let key = match ccx.sess.targ_cfg.arch { let key = match ccx.sess.targ_cfg.arch {
X86 | Arm | Mips => { X86 | Arm | Mips => "llvm.memcpy.p0i8.p0i8.i32",
~"llvm.memcpy.p0i8.p0i8.i32" X86_64 => "llvm.memcpy.p0i8.p0i8.i64"
}
X86_64 => {
~"llvm.memcpy.p0i8.p0i8.i64"
}
}; };
let memcpy = *ccx.intrinsics.get(&key); let memcpy = *ccx.intrinsics.get(&key);
let src_ptr = PointerCast(cx, src, T_ptr(T_i8())); let src_ptr = PointerCast(cx, src, T_ptr(T_i8()));
@ -1499,15 +1495,10 @@ pub fn memzero(cx: block, llptr: ValueRef, llty: TypeRef) {
let _icx = cx.insn_ctxt("memzero"); let _icx = cx.insn_ctxt("memzero");
let ccx = cx.ccx(); let ccx = cx.ccx();
let intrinsic_key; let intrinsic_key = match ccx.sess.targ_cfg.arch {
match ccx.sess.targ_cfg.arch { X86 | Arm | Mips => "llvm.memset.p0i8.i32",
X86 | Arm | Mips => { X86_64 => "llvm.memset.p0i8.i64"
intrinsic_key = ~"llvm.memset.p0i8.i32"; };
}
X86_64 => {
intrinsic_key = ~"llvm.memset.p0i8.i64";
}
}
let llintrinsicfn = *ccx.intrinsics.get(&intrinsic_key); let llintrinsicfn = *ccx.intrinsics.get(&intrinsic_key);
let llptr = PointerCast(cx, llptr, T_ptr(T_i8())); let llptr = PointerCast(cx, llptr, T_ptr(T_i8()));
@ -2512,7 +2503,7 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::node_id) -> ValueRef {
ast::foreign_item_const(*) => { ast::foreign_item_const(*) => {
let typ = ty::node_id_to_type(tcx, ni.id); let typ = ty::node_id_to_type(tcx, ni.id);
let ident = token::ident_to_str(&ni.ident); let ident = token::ident_to_str(&ni.ident);
let g = do str::as_c_str(*ident) |buf| { let g = do str::as_c_str(ident) |buf| {
unsafe { unsafe {
llvm::LLVMAddGlobal(ccx.llmod, llvm::LLVMAddGlobal(ccx.llmod,
type_of(ccx, typ), type_of(ccx, typ),
@ -2607,10 +2598,10 @@ pub fn trans_constant(ccx: @CrateContext, it: @ast::item) {
path_name(variant.node.name), path_name(variant.node.name),
path_name(special_idents::descrim) path_name(special_idents::descrim)
]); ]);
let s = @mangle_exported_name(ccx, p, ty::mk_int()); let s = mangle_exported_name(ccx, p, ty::mk_int()).to_managed();
let disr_val = vi[i].disr_val; let disr_val = vi[i].disr_val;
note_unique_llvm_symbol(ccx, s); note_unique_llvm_symbol(ccx, s);
let discrim_gvar = str::as_c_str(*s, |buf| { let discrim_gvar = str::as_c_str(s, |buf| {
unsafe { unsafe {
llvm::LLVMAddGlobal(ccx.llmod, ccx.int_type, buf) llvm::LLVMAddGlobal(ccx.llmod, ccx.int_type, buf)
} }
@ -2649,7 +2640,7 @@ pub fn p2i(ccx: @CrateContext, v: ValueRef) -> ValueRef {
} }
} }
pub fn declare_intrinsics(llmod: ModuleRef) -> HashMap<~str, ValueRef> { pub fn declare_intrinsics(llmod: ModuleRef) -> HashMap<&'static str, ValueRef> {
let T_memcpy32_args: ~[TypeRef] = let T_memcpy32_args: ~[TypeRef] =
~[T_ptr(T_i8()), T_ptr(T_i8()), T_i32(), T_i32(), T_i1()]; ~[T_ptr(T_i8()), T_ptr(T_i8()), T_i32(), T_i32(), T_i1()];
let T_memcpy64_args: ~[TypeRef] = let T_memcpy64_args: ~[TypeRef] =
@ -2783,80 +2774,80 @@ pub fn declare_intrinsics(llmod: ModuleRef) -> HashMap<~str, ValueRef> {
T_fn([T_i64()], T_i64())); T_fn([T_i64()], T_i64()));
let mut intrinsics = HashMap::new(); let mut intrinsics = HashMap::new();
intrinsics.insert(~"llvm.gcroot", gcroot); intrinsics.insert("llvm.gcroot", gcroot);
intrinsics.insert(~"llvm.gcread", gcread); intrinsics.insert("llvm.gcread", gcread);
intrinsics.insert(~"llvm.memcpy.p0i8.p0i8.i32", memcpy32); intrinsics.insert("llvm.memcpy.p0i8.p0i8.i32", memcpy32);
intrinsics.insert(~"llvm.memcpy.p0i8.p0i8.i64", memcpy64); intrinsics.insert("llvm.memcpy.p0i8.p0i8.i64", memcpy64);
intrinsics.insert(~"llvm.memmove.p0i8.p0i8.i32", memmove32); intrinsics.insert("llvm.memmove.p0i8.p0i8.i32", memmove32);
intrinsics.insert(~"llvm.memmove.p0i8.p0i8.i64", memmove64); intrinsics.insert("llvm.memmove.p0i8.p0i8.i64", memmove64);
intrinsics.insert(~"llvm.memset.p0i8.i32", memset32); intrinsics.insert("llvm.memset.p0i8.i32", memset32);
intrinsics.insert(~"llvm.memset.p0i8.i64", memset64); intrinsics.insert("llvm.memset.p0i8.i64", memset64);
intrinsics.insert(~"llvm.trap", trap); intrinsics.insert("llvm.trap", trap);
intrinsics.insert(~"llvm.frameaddress", frameaddress); intrinsics.insert("llvm.frameaddress", frameaddress);
intrinsics.insert(~"llvm.sqrt.f32", sqrtf32); intrinsics.insert("llvm.sqrt.f32", sqrtf32);
intrinsics.insert(~"llvm.sqrt.f64", sqrtf64); intrinsics.insert("llvm.sqrt.f64", sqrtf64);
intrinsics.insert(~"llvm.powi.f32", powif32); intrinsics.insert("llvm.powi.f32", powif32);
intrinsics.insert(~"llvm.powi.f64", powif64); intrinsics.insert("llvm.powi.f64", powif64);
intrinsics.insert(~"llvm.sin.f32", sinf32); intrinsics.insert("llvm.sin.f32", sinf32);
intrinsics.insert(~"llvm.sin.f64", sinf64); intrinsics.insert("llvm.sin.f64", sinf64);
intrinsics.insert(~"llvm.cos.f32", cosf32); intrinsics.insert("llvm.cos.f32", cosf32);
intrinsics.insert(~"llvm.cos.f64", cosf64); intrinsics.insert("llvm.cos.f64", cosf64);
intrinsics.insert(~"llvm.pow.f32", powf32); intrinsics.insert("llvm.pow.f32", powf32);
intrinsics.insert(~"llvm.pow.f64", powf64); intrinsics.insert("llvm.pow.f64", powf64);
intrinsics.insert(~"llvm.exp.f32", expf32); intrinsics.insert("llvm.exp.f32", expf32);
intrinsics.insert(~"llvm.exp.f64", expf64); intrinsics.insert("llvm.exp.f64", expf64);
intrinsics.insert(~"llvm.exp2.f32", exp2f32); intrinsics.insert("llvm.exp2.f32", exp2f32);
intrinsics.insert(~"llvm.exp2.f64", exp2f64); intrinsics.insert("llvm.exp2.f64", exp2f64);
intrinsics.insert(~"llvm.log.f32", logf32); intrinsics.insert("llvm.log.f32", logf32);
intrinsics.insert(~"llvm.log.f64", logf64); intrinsics.insert("llvm.log.f64", logf64);
intrinsics.insert(~"llvm.log10.f32", log10f32); intrinsics.insert("llvm.log10.f32", log10f32);
intrinsics.insert(~"llvm.log10.f64", log10f64); intrinsics.insert("llvm.log10.f64", log10f64);
intrinsics.insert(~"llvm.log2.f32", log2f32); intrinsics.insert("llvm.log2.f32", log2f32);
intrinsics.insert(~"llvm.log2.f64", log2f64); intrinsics.insert("llvm.log2.f64", log2f64);
intrinsics.insert(~"llvm.fma.f32", fmaf32); intrinsics.insert("llvm.fma.f32", fmaf32);
intrinsics.insert(~"llvm.fma.f64", fmaf64); intrinsics.insert("llvm.fma.f64", fmaf64);
intrinsics.insert(~"llvm.fabs.f32", fabsf32); intrinsics.insert("llvm.fabs.f32", fabsf32);
intrinsics.insert(~"llvm.fabs.f64", fabsf64); intrinsics.insert("llvm.fabs.f64", fabsf64);
intrinsics.insert(~"llvm.floor.f32", floorf32); intrinsics.insert("llvm.floor.f32", floorf32);
intrinsics.insert(~"llvm.floor.f64", floorf64); intrinsics.insert("llvm.floor.f64", floorf64);
intrinsics.insert(~"llvm.ceil.f32", ceilf32); intrinsics.insert("llvm.ceil.f32", ceilf32);
intrinsics.insert(~"llvm.ceil.f64", ceilf64); intrinsics.insert("llvm.ceil.f64", ceilf64);
intrinsics.insert(~"llvm.trunc.f32", truncf32); intrinsics.insert("llvm.trunc.f32", truncf32);
intrinsics.insert(~"llvm.trunc.f64", truncf64); intrinsics.insert("llvm.trunc.f64", truncf64);
intrinsics.insert(~"llvm.ctpop.i8", ctpop8); intrinsics.insert("llvm.ctpop.i8", ctpop8);
intrinsics.insert(~"llvm.ctpop.i16", ctpop16); intrinsics.insert("llvm.ctpop.i16", ctpop16);
intrinsics.insert(~"llvm.ctpop.i32", ctpop32); intrinsics.insert("llvm.ctpop.i32", ctpop32);
intrinsics.insert(~"llvm.ctpop.i64", ctpop64); intrinsics.insert("llvm.ctpop.i64", ctpop64);
intrinsics.insert(~"llvm.ctlz.i8", ctlz8); intrinsics.insert("llvm.ctlz.i8", ctlz8);
intrinsics.insert(~"llvm.ctlz.i16", ctlz16); intrinsics.insert("llvm.ctlz.i16", ctlz16);
intrinsics.insert(~"llvm.ctlz.i32", ctlz32); intrinsics.insert("llvm.ctlz.i32", ctlz32);
intrinsics.insert(~"llvm.ctlz.i64", ctlz64); intrinsics.insert("llvm.ctlz.i64", ctlz64);
intrinsics.insert(~"llvm.cttz.i8", cttz8); intrinsics.insert("llvm.cttz.i8", cttz8);
intrinsics.insert(~"llvm.cttz.i16", cttz16); intrinsics.insert("llvm.cttz.i16", cttz16);
intrinsics.insert(~"llvm.cttz.i32", cttz32); intrinsics.insert("llvm.cttz.i32", cttz32);
intrinsics.insert(~"llvm.cttz.i64", cttz64); intrinsics.insert("llvm.cttz.i64", cttz64);
intrinsics.insert(~"llvm.bswap.i16", bswap16); intrinsics.insert("llvm.bswap.i16", bswap16);
intrinsics.insert(~"llvm.bswap.i32", bswap32); intrinsics.insert("llvm.bswap.i32", bswap32);
intrinsics.insert(~"llvm.bswap.i64", bswap64); intrinsics.insert("llvm.bswap.i64", bswap64);
return intrinsics; return intrinsics;
} }
pub fn declare_dbg_intrinsics(llmod: ModuleRef, pub fn declare_dbg_intrinsics(llmod: ModuleRef,
intrinsics: &mut HashMap<~str, ValueRef>) { intrinsics: &mut HashMap<&'static str, ValueRef>) {
let declare = let declare =
decl_cdecl_fn(llmod, "llvm.dbg.declare", decl_cdecl_fn(llmod, "llvm.dbg.declare",
T_fn([T_metadata(), T_metadata()], T_void())); T_fn([T_metadata(), T_metadata()], T_void()));
let value = let value =
decl_cdecl_fn(llmod, "llvm.dbg.value", decl_cdecl_fn(llmod, "llvm.dbg.value",
T_fn([T_metadata(), T_i64(), T_metadata()], T_void())); T_fn([T_metadata(), T_i64(), T_metadata()], T_void()));
intrinsics.insert(~"llvm.dbg.declare", declare); intrinsics.insert("llvm.dbg.declare", declare);
intrinsics.insert(~"llvm.dbg.value", value); intrinsics.insert("llvm.dbg.value", value);
} }
pub fn trap(bcx: block) { pub fn trap(bcx: block) {
let v: ~[ValueRef] = ~[]; let v: ~[ValueRef] = ~[];
match bcx.ccx().intrinsics.find(&~"llvm.trap") { match bcx.ccx().intrinsics.find(& &"llvm.trap") {
Some(&x) => { Call(bcx, x, v); }, Some(&x) => { Call(bcx, x, v); },
_ => bcx.sess().bug("unbound llvm.trap in trap") _ => bcx.sess().bug("unbound llvm.trap in trap")
} }
@ -2891,7 +2882,7 @@ pub fn create_module_map(ccx: @CrateContext) -> ValueRef {
lib::llvm::SetLinkage(map, lib::llvm::InternalLinkage); lib::llvm::SetLinkage(map, lib::llvm::InternalLinkage);
let mut elts: ~[ValueRef] = ~[]; let mut elts: ~[ValueRef] = ~[];
for ccx.module_data.each |key, &val| { for ccx.module_data.each |key, &val| {
let elt = C_struct([p2i(ccx, C_cstr(ccx, @/*bad*/ copy *key)), let elt = C_struct([p2i(ccx, C_cstr(ccx, /* bad */key.to_managed())),
p2i(ccx, val)]); p2i(ccx, val)]);
elts.push(elt); elts.push(elt);
} }
@ -2934,9 +2925,10 @@ pub fn fill_crate_map(ccx: @CrateContext, map: ValueRef) {
let cstore = ccx.sess.cstore; let cstore = ccx.sess.cstore;
while cstore::have_crate_data(cstore, i) { while cstore::have_crate_data(cstore, i) {
let cdata = cstore::get_crate_data(cstore, i); let cdata = cstore::get_crate_data(cstore, i);
let nm = ~"_rust_crate_map_" + *cdata.name + let nm = fmt!("_rust_crate_map_%s_%s_%s",
"_" + *cstore::get_crate_vers(cstore, i) + cdata.name,
"_" + *cstore::get_crate_hash(cstore, i); cstore::get_crate_vers(cstore, i),
cstore::get_crate_hash(cstore, i));
let cr = str::as_c_str(nm, |buf| { let cr = str::as_c_str(nm, |buf| {
unsafe { unsafe {
llvm::LLVMAddGlobal(ccx.llmod, ccx.int_type, buf) llvm::LLVMAddGlobal(ccx.llmod, ccx.int_type, buf)

View File

@ -165,7 +165,7 @@ pub struct CrateContext {
td: TargetData, td: TargetData,
tn: @TypeNames, tn: @TypeNames,
externs: ExternMap, externs: ExternMap,
intrinsics: HashMap<~str, ValueRef>, intrinsics: HashMap<&'static str, ValueRef>,
item_vals: @mut HashMap<ast::node_id, ValueRef>, item_vals: @mut HashMap<ast::node_id, ValueRef>,
exp_map2: resolve::ExportMap2, exp_map2: resolve::ExportMap2,
reachable: reachable::map, reachable: reachable::map,
@ -173,7 +173,7 @@ pub struct CrateContext {
link_meta: LinkMeta, link_meta: LinkMeta,
enum_sizes: @mut HashMap<ty::t, uint>, enum_sizes: @mut HashMap<ty::t, uint>,
discrims: @mut HashMap<ast::def_id, ValueRef>, discrims: @mut HashMap<ast::def_id, ValueRef>,
discrim_symbols: @mut HashMap<ast::node_id, @~str>, discrim_symbols: @mut HashMap<ast::node_id, @str>,
tydescs: @mut HashMap<ty::t, @mut tydesc_info>, tydescs: @mut HashMap<ty::t, @mut tydesc_info>,
// Set when running emit_tydescs to enforce that no more tydescs are // Set when running emit_tydescs to enforce that no more tydescs are
// created. // created.
@ -188,7 +188,7 @@ pub struct CrateContext {
// Cache generated vtables // Cache generated vtables
vtables: @mut HashMap<mono_id, ValueRef>, vtables: @mut HashMap<mono_id, ValueRef>,
// Cache of constant strings, // Cache of constant strings,
const_cstr_cache: @mut HashMap<@~str, ValueRef>, const_cstr_cache: @mut HashMap<@str, ValueRef>,
// Reverse-direction for const ptrs cast from globals. // Reverse-direction for const ptrs cast from globals.
// Key is an int, cast from a ValueRef holding a *T, // Key is an int, cast from a ValueRef holding a *T,
@ -215,7 +215,7 @@ pub struct CrateContext {
symbol_hasher: @mut hash::State, symbol_hasher: @mut hash::State,
type_hashcodes: @mut HashMap<ty::t, @str>, type_hashcodes: @mut HashMap<ty::t, @str>,
type_short_names: @mut HashMap<ty::t, ~str>, type_short_names: @mut HashMap<ty::t, ~str>,
all_llvm_symbols: @mut HashSet<@~str>, all_llvm_symbols: @mut HashSet<@str>,
tcx: ty::ctxt, tcx: ty::ctxt,
maps: astencode::Maps, maps: astencode::Maps,
stats: @mut Stats, stats: @mut Stats,
@ -1176,14 +1176,14 @@ pub fn C_u8(i: uint) -> ValueRef {
// This is a 'c-like' raw string, which differs from // This is a 'c-like' raw string, which differs from
// our boxed-and-length-annotated strings. // our boxed-and-length-annotated strings.
pub fn C_cstr(cx: @CrateContext, s: @~str) -> ValueRef { pub fn C_cstr(cx: @CrateContext, s: @str) -> ValueRef {
unsafe { unsafe {
match cx.const_cstr_cache.find(&s) { match cx.const_cstr_cache.find(&s) {
Some(&llval) => return llval, Some(&llval) => return llval,
None => () None => ()
} }
let sc = do str::as_c_str(*s) |buf| { let sc = do str::as_c_str(s) |buf| {
llvm::LLVMConstStringInContext(cx.llcx, buf, s.len() as c_uint, llvm::LLVMConstStringInContext(cx.llcx, buf, s.len() as c_uint,
False) False)
}; };
@ -1202,7 +1202,7 @@ pub fn C_cstr(cx: @CrateContext, s: @~str) -> ValueRef {
// NB: Do not use `do_spill_noroot` to make this into a constant string, or // NB: Do not use `do_spill_noroot` to make this into a constant string, or
// you will be kicked off fast isel. See issue #4352 for an example of this. // you will be kicked off fast isel. See issue #4352 for an example of this.
pub fn C_estr_slice(cx: @CrateContext, s: @~str) -> ValueRef { pub fn C_estr_slice(cx: @CrateContext, s: @str) -> ValueRef {
unsafe { unsafe {
let len = s.len(); let len = s.len();
let cs = llvm::LLVMConstPointerCast(C_cstr(cx, s), T_ptr(T_i8())); let cs = llvm::LLVMConstPointerCast(C_cstr(cx, s), T_ptr(T_i8()));
@ -1441,7 +1441,7 @@ pub fn path_str(sess: session::Session, p: &[path_elt]) -> ~str {
ast_map::path_name(s) | ast_map::path_mod(s) => { ast_map::path_name(s) | ast_map::path_mod(s) => {
if first { first = false; } if first { first = false; }
else { r += "::"; } else { r += "::"; }
r += *sess.str_of(s); r += sess.str_of(s);
} }
} }
} }
@ -1564,7 +1564,7 @@ pub fn dummy_substs(tps: ~[ty::t]) -> ty::substs {
pub fn filename_and_line_num_from_span(bcx: block, pub fn filename_and_line_num_from_span(bcx: block,
span: span) -> (ValueRef, ValueRef) { span: span) -> (ValueRef, ValueRef) {
let loc = bcx.sess().parse_sess.cm.lookup_char_pos(span.lo); let loc = bcx.sess().parse_sess.cm.lookup_char_pos(span.lo);
let filename_cstr = C_cstr(bcx.ccx(), @/*bad*/copy loc.file.name); let filename_cstr = C_cstr(bcx.ccx(), loc.file.name);
let filename = build::PointerCast(bcx, filename_cstr, T_ptr(T_i8())); let filename = build::PointerCast(bcx, filename_cstr, T_ptr(T_i8()));
let line = C_int(bcx.ccx(), loc.line as int); let line = C_int(bcx.ccx(), loc.line as int);
(filename, line) (filename, line)

View File

@ -54,12 +54,12 @@ pub fn const_lit(cx: @CrateContext, e: @ast::expr, lit: ast::lit)
ty_to_str(cx.tcx, lit_int_ty))) ty_to_str(cx.tcx, lit_int_ty)))
} }
} }
ast::lit_float(fs, t) => C_floating(/*bad*/copy *fs, T_float_ty(cx, t)), ast::lit_float(fs, t) => C_floating(fs, T_float_ty(cx, t)),
ast::lit_float_unsuffixed(fs) => { ast::lit_float_unsuffixed(fs) => {
let lit_float_ty = ty::node_id_to_type(cx.tcx, e.id); let lit_float_ty = ty::node_id_to_type(cx.tcx, e.id);
match ty::get(lit_float_ty).sty { match ty::get(lit_float_ty).sty {
ty::ty_float(t) => { ty::ty_float(t) => {
C_floating(/*bad*/copy *fs, T_float_ty(cx, t)) C_floating(fs, T_float_ty(cx, t))
} }
_ => { _ => {
cx.sess.span_bug(lit.span, cx.sess.span_bug(lit.span,

View File

@ -348,13 +348,13 @@ pub fn trans_fail_expr(bcx: block,
ppaux::ty_to_str(tcx, arg_datum.ty)); ppaux::ty_to_str(tcx, arg_datum.ty));
} }
} }
_ => trans_fail(bcx, sp_opt, @~"explicit failure") _ => trans_fail(bcx, sp_opt, @"explicit failure")
} }
} }
pub fn trans_fail(bcx: block, pub fn trans_fail(bcx: block,
sp_opt: Option<span>, sp_opt: Option<span>,
fail_str: @~str) fail_str: @str)
-> block { -> block {
let _icx = bcx.insn_ctxt("trans_fail"); let _icx = bcx.insn_ctxt("trans_fail");
let V_fail_str = C_cstr(bcx.ccx(), fail_str); let V_fail_str = C_cstr(bcx.ccx(), fail_str);
@ -371,11 +371,11 @@ fn trans_fail_value(bcx: block,
Some(sp) => { Some(sp) => {
let sess = bcx.sess(); let sess = bcx.sess();
let loc = sess.parse_sess.cm.lookup_char_pos(sp.lo); let loc = sess.parse_sess.cm.lookup_char_pos(sp.lo);
(C_cstr(bcx.ccx(), @/*bad*/ copy loc.file.name), (C_cstr(bcx.ccx(), loc.file.name),
loc.line as int) loc.line as int)
} }
None => { None => {
(C_cstr(bcx.ccx(), @~"<runtime>"), 0) (C_cstr(bcx.ccx(), @"<runtime>"), 0)
} }
}; };
let V_str = PointerCast(bcx, V_fail_str, T_ptr(T_i8())); let V_str = PointerCast(bcx, V_fail_str, T_ptr(T_i8()));

View File

@ -316,7 +316,7 @@ fn create_block(cx: block) -> @Metadata<BlockMetadata> {
None => create_function(cx.fcx).node, None => create_function(cx.fcx).node,
Some(bcx) => create_block(bcx).node Some(bcx) => create_block(bcx).node
}; };
let file_node = create_file(cx.ccx(), fname); let file_node = create_file(cx.ccx(), /* bad */ fname.to_owned());
let unique_id = match cache.find(&LexicalBlockTag) { let unique_id = match cache.find(&LexicalBlockTag) {
option::Some(v) => v.len() as int, option::Some(v) => v.len() as int,
option::None => 0 option::None => 0
@ -383,7 +383,7 @@ fn create_basic_type(cx: @CrateContext, t: ty::t, span: span)
}; };
let fname = filename_from_span(cx, span); let fname = filename_from_span(cx, span);
let file_node = create_file(cx, fname); let file_node = create_file(cx, fname.to_owned());
let cu_node = create_compile_unit(cx); let cu_node = create_compile_unit(cx);
let (size, align) = size_and_align_of(cx, t); let (size, align) = size_and_align_of(cx, t);
let lldata = ~[lltag(tg), let lldata = ~[lltag(tg),
@ -420,7 +420,7 @@ fn create_pointer_type(cx: @CrateContext, t: ty::t, span: span,
}*/ }*/
let (size, align) = size_and_align_of(cx, t); let (size, align) = size_and_align_of(cx, t);
let fname = filename_from_span(cx, span); let fname = filename_from_span(cx, span);
let file_node = create_file(cx, fname); let file_node = create_file(cx, fname.to_owned());
//let cu_node = create_compile_unit(cx, fname); //let cu_node = create_compile_unit(cx, fname);
let name = ty_to_str(cx.tcx, t); let name = ty_to_str(cx.tcx, t);
let llnode = create_derived_type(tg, file_node.node, name, 0, size * 8, let llnode = create_derived_type(tg, file_node.node, name, 0, size * 8,
@ -438,7 +438,7 @@ fn create_pointer_type(cx: @CrateContext, t: ty::t, span: span,
struct StructCtxt { struct StructCtxt {
file: ValueRef, file: ValueRef,
name: @~str, name: @str,
line: int, line: int,
members: ~[ValueRef], members: ~[ValueRef],
total_size: int, total_size: int,
@ -447,7 +447,7 @@ struct StructCtxt {
fn finish_structure(cx: @mut StructCtxt) -> ValueRef { fn finish_structure(cx: @mut StructCtxt) -> ValueRef {
return create_composite_type(StructureTypeTag, return create_composite_type(StructureTypeTag,
*cx.name, cx.name,
cx.file, cx.file,
cx.line, cx.line,
cx.total_size, cx.total_size,
@ -457,7 +457,7 @@ fn finish_structure(cx: @mut StructCtxt) -> ValueRef {
Some(/*bad*/copy cx.members)); Some(/*bad*/copy cx.members));
} }
fn create_structure(file: @Metadata<FileMetadata>, name: @~str, line: int) fn create_structure(file: @Metadata<FileMetadata>, name: @str, line: int)
-> @mut StructCtxt { -> @mut StructCtxt {
let cx = @mut StructCtxt { let cx = @mut StructCtxt {
file: file.node, file: file.node,
@ -501,14 +501,14 @@ fn add_member(cx: @mut StructCtxt,
fn create_struct(cx: @CrateContext, t: ty::t, fields: ~[ty::field], fn create_struct(cx: @CrateContext, t: ty::t, fields: ~[ty::field],
span: span) -> @Metadata<TyDescMetadata> { span: span) -> @Metadata<TyDescMetadata> {
let fname = filename_from_span(cx, span); let fname = filename_from_span(cx, span);
let file_node = create_file(cx, fname); let file_node = create_file(cx, fname.to_owned());
let scx = create_structure(file_node, @ty_to_str(cx.tcx, t), let scx = create_structure(file_node, (ty_to_str(cx.tcx, t)).to_managed(),
line_from_span(cx.sess.codemap, span) as int); line_from_span(cx.sess.codemap, span) as int);
for fields.each |field| { for fields.each |field| {
let field_t = field.mt.ty; let field_t = field.mt.ty;
let ty_md = create_ty(cx, field_t, span); let ty_md = create_ty(cx, field_t, span);
let (size, align) = size_and_align_of(cx, field_t); let (size, align) = size_and_align_of(cx, field_t);
add_member(scx, *cx.sess.str_of(field.ident), add_member(scx, cx.sess.str_of(field.ident),
line_from_span(cx.sess.codemap, span) as int, line_from_span(cx.sess.codemap, span) as int,
size as int, align as int, ty_md.node); size as int, align as int, ty_md.node);
} }
@ -524,7 +524,7 @@ fn create_struct(cx: @CrateContext, t: ty::t, fields: ~[ty::field],
fn create_tuple(cx: @CrateContext, t: ty::t, elements: &[ty::t], span: span) fn create_tuple(cx: @CrateContext, t: ty::t, elements: &[ty::t], span: span)
-> @Metadata<TyDescMetadata> { -> @Metadata<TyDescMetadata> {
let fname = filename_from_span(cx, span); let fname = filename_from_span(cx, span);
let file_node = create_file(cx, fname); let file_node = create_file(cx, fname.to_owned());
let scx = create_structure(file_node, let scx = create_structure(file_node,
cx.sess.str_of( cx.sess.str_of(
((/*bad*/copy cx.dbg_cx).get().names) ((/*bad*/copy cx.dbg_cx).get().names)
@ -566,12 +566,12 @@ fn create_boxed_type(cx: @CrateContext, contents: ty::t,
option::None {} option::None {}
}*/ }*/
let fname = filename_from_span(cx, span); let fname = filename_from_span(cx, span);
let file_node = create_file(cx, fname); let file_node = create_file(cx, fname.to_owned());
//let cu_node = create_compile_unit_metadata(cx, fname); //let cu_node = create_compile_unit_metadata(cx, fname);
let int_t = ty::mk_int(); let int_t = ty::mk_int();
let refcount_type = create_basic_type(cx, int_t, span); let refcount_type = create_basic_type(cx, int_t, span);
let name = ty_to_str(cx.tcx, contents); let name = ty_to_str(cx.tcx, contents);
let scx = create_structure(file_node, @fmt!("box<%s>", name), 0); let scx = create_structure(file_node, (fmt!("box<%s>", name)).to_managed(), 0);
add_member(scx, "refcnt", 0, sys::size_of::<uint>() as int, add_member(scx, "refcnt", 0, sys::size_of::<uint>() as int,
sys::min_align_of::<uint>() as int, refcount_type.node); sys::min_align_of::<uint>() as int, refcount_type.node);
// the tydesc and other pointers should be irrelevant to the // the tydesc and other pointers should be irrelevant to the
@ -628,7 +628,7 @@ fn create_fixed_vec(cx: @CrateContext, vec_t: ty::t, elem_t: ty::t,
len: int, span: span) -> @Metadata<TyDescMetadata> { len: int, span: span) -> @Metadata<TyDescMetadata> {
let t_md = create_ty(cx, elem_t, span); let t_md = create_ty(cx, elem_t, span);
let fname = filename_from_span(cx, span); let fname = filename_from_span(cx, span);
let file_node = create_file(cx, fname); let file_node = create_file(cx, fname.to_owned());
let (size, align) = size_and_align_of(cx, elem_t); let (size, align) = size_and_align_of(cx, elem_t);
let subrange = llmdnode([lltag(SubrangeTag), lli64(0), lli64(len - 1)]); let subrange = llmdnode([lltag(SubrangeTag), lli64(0), lli64(len - 1)]);
let name = fmt!("[%s]", ty_to_str(cx.tcx, elem_t)); let name = fmt!("[%s]", ty_to_str(cx.tcx, elem_t));
@ -647,10 +647,10 @@ fn create_boxed_vec(cx: @CrateContext, vec_t: ty::t, elem_t: ty::t,
vec_ty_span: codemap::span) vec_ty_span: codemap::span)
-> @Metadata<TyDescMetadata> { -> @Metadata<TyDescMetadata> {
let fname = filename_from_span(cx, vec_ty_span); let fname = filename_from_span(cx, vec_ty_span);
let file_node = create_file(cx, fname); let file_node = create_file(cx, fname.to_owned());
let elem_ty_md = create_ty(cx, elem_t, vec_ty_span); let elem_ty_md = create_ty(cx, elem_t, vec_ty_span);
let vec_scx = create_structure(file_node, let vec_scx = create_structure(file_node,
@/*bad*/ copy ty_to_str(cx.tcx, vec_t), 0); ty_to_str(cx.tcx, vec_t).to_managed(), 0);
let size_t_type = create_basic_type(cx, ty::mk_uint(), vec_ty_span); let size_t_type = create_basic_type(cx, ty::mk_uint(), vec_ty_span);
add_member(vec_scx, "fill", 0, sys::size_of::<libc::size_t>() as int, add_member(vec_scx, "fill", 0, sys::size_of::<libc::size_t>() as int,
sys::min_align_of::<libc::size_t>() as int, size_t_type.node); sys::min_align_of::<libc::size_t>() as int, size_t_type.node);
@ -673,7 +673,7 @@ fn create_boxed_vec(cx: @CrateContext, vec_t: ty::t, elem_t: ty::t,
} }
}; };
let box_scx = create_structure(file_node, @fmt!("box<%s>", name), 0); let box_scx = create_structure(file_node, (fmt!("box<%s>", name)).to_managed(), 0);
let int_t = ty::mk_int(); let int_t = ty::mk_int();
let refcount_type = create_basic_type(cx, int_t, vec_ty_span); let refcount_type = create_basic_type(cx, int_t, vec_ty_span);
add_member(box_scx, "refcnt", 0, sys::size_of::<uint>() as int, add_member(box_scx, "refcnt", 0, sys::size_of::<uint>() as int,
@ -698,11 +698,11 @@ fn create_boxed_vec(cx: @CrateContext, vec_t: ty::t, elem_t: ty::t,
fn create_vec_slice(cx: @CrateContext, vec_t: ty::t, elem_t: ty::t, span: span) fn create_vec_slice(cx: @CrateContext, vec_t: ty::t, elem_t: ty::t, span: span)
-> @Metadata<TyDescMetadata> { -> @Metadata<TyDescMetadata> {
let fname = filename_from_span(cx, span); let fname = filename_from_span(cx, span);
let file_node = create_file(cx, fname); let file_node = create_file(cx, fname.to_owned());
let elem_ty_md = create_ty(cx, elem_t, span); let elem_ty_md = create_ty(cx, elem_t, span);
let uint_type = create_basic_type(cx, ty::mk_uint(), span); let uint_type = create_basic_type(cx, ty::mk_uint(), span);
let elem_ptr = create_pointer_type(cx, elem_t, span, elem_ty_md); let elem_ptr = create_pointer_type(cx, elem_t, span, elem_ty_md);
let scx = create_structure(file_node, @ty_to_str(cx.tcx, vec_t), 0); let scx = create_structure(file_node, ty_to_str(cx.tcx, vec_t).to_managed(), 0);
let (_, ptr_size, ptr_align) = voidptr(); let (_, ptr_size, ptr_align) = voidptr();
add_member(scx, "vec", 0, ptr_size, ptr_align, elem_ptr.node); add_member(scx, "vec", 0, ptr_size, ptr_align, elem_ptr.node);
add_member(scx, "length", 0, sys::size_of::<uint>() as int, add_member(scx, "length", 0, sys::size_of::<uint>() as int,
@ -720,7 +720,7 @@ fn create_vec_slice(cx: @CrateContext, vec_t: ty::t, elem_t: ty::t, span: span)
fn create_fn_ty(cx: @CrateContext, fn_ty: ty::t, inputs: ~[ty::t], output: ty::t, fn create_fn_ty(cx: @CrateContext, fn_ty: ty::t, inputs: ~[ty::t], output: ty::t,
span: span) -> @Metadata<TyDescMetadata> { span: span) -> @Metadata<TyDescMetadata> {
let fname = filename_from_span(cx, span); let fname = filename_from_span(cx, span);
let file_node = create_file(cx, fname); let file_node = create_file(cx, fname.to_owned());
let (vp, _, _) = voidptr(); let (vp, _, _) = voidptr();
let output_md = create_ty(cx, output, span); let output_md = create_ty(cx, output, span);
let output_ptr_md = create_pointer_type(cx, output, span, output_md); let output_ptr_md = create_pointer_type(cx, output, span, output_md);
@ -817,8 +817,8 @@ fn create_ty(cx: @CrateContext, t: ty::t, span: span)
} }
} }
fn filename_from_span(cx: @CrateContext, sp: codemap::span) -> ~str { fn filename_from_span(cx: @CrateContext, sp: codemap::span) -> @str {
/*bad*/copy cx.sess.codemap.lookup_char_pos(sp.lo).file.name cx.sess.codemap.lookup_char_pos(sp.lo).file.name
} }
fn create_var(type_tag: int, context: ValueRef, name: &str, file: ValueRef, fn create_var(type_tag: int, context: ValueRef, name: &str, file: ValueRef,
@ -853,12 +853,12 @@ pub fn create_local_var(bcx: block, local: @ast::local)
let loc = cx.sess.codemap.lookup_char_pos(local.span.lo); let loc = cx.sess.codemap.lookup_char_pos(local.span.lo);
let ty = node_id_type(bcx, local.node.id); let ty = node_id_type(bcx, local.node.id);
let tymd = create_ty(cx, ty, local.node.ty.span); let tymd = create_ty(cx, ty, local.node.ty.span);
let filemd = create_file(cx, /*bad*/copy loc.file.name); let filemd = create_file(cx, /*bad*/ loc.file.name.to_owned());
let context = match bcx.parent { let context = match bcx.parent {
None => create_function(bcx.fcx).node, None => create_function(bcx.fcx).node,
Some(_) => create_block(bcx).node Some(_) => create_block(bcx).node
}; };
let mdnode = create_var(tg, context, *cx.sess.str_of(name), let mdnode = create_var(tg, context, cx.sess.str_of(name),
filemd.node, loc.line as int, tymd.node); filemd.node, loc.line as int, tymd.node);
let mdval = @Metadata { let mdval = @Metadata {
node: mdnode, node: mdnode,
@ -878,7 +878,7 @@ pub fn create_local_var(bcx: block, local: @ast::local)
} }
}; };
let declargs = ~[llmdnode([llptr]), mdnode]; let declargs = ~[llmdnode([llptr]), mdnode];
trans::build::Call(bcx, *cx.intrinsics.get(&~"llvm.dbg.declare"), trans::build::Call(bcx, *cx.intrinsics.get(&("llvm.dbg.declare")),
declargs); declargs);
return mdval; return mdval;
} }
@ -896,12 +896,12 @@ pub fn create_arg(bcx: block, arg: ast::arg, sp: span)
} }
let loc = cx.sess.codemap.lookup_char_pos(sp.lo); let loc = cx.sess.codemap.lookup_char_pos(sp.lo);
if loc.file.name == ~"<intrinsic>" { if "<intrinsic>" == loc.file.name {
return None; return None;
} }
let ty = node_id_type(bcx, arg.id); let ty = node_id_type(bcx, arg.id);
let tymd = create_ty(cx, ty, arg.ty.span); let tymd = create_ty(cx, ty, arg.ty.span);
let filemd = create_file(cx, /*bad*/copy loc.file.name); let filemd = create_file(cx, /* bad */ loc.file.name.to_owned());
let context = create_function(bcx.fcx); let context = create_function(bcx.fcx);
match arg.pat.node { match arg.pat.node {
@ -910,7 +910,7 @@ pub fn create_arg(bcx: block, arg: ast::arg, sp: span)
let mdnode = create_var( let mdnode = create_var(
tg, tg,
context.node, context.node,
*cx.sess.str_of(*path.idents.last()), cx.sess.str_of(*path.idents.last()),
filemd.node, filemd.node,
loc.line as int, loc.line as int,
tymd.node tymd.node
@ -927,7 +927,7 @@ pub fn create_arg(bcx: block, arg: ast::arg, sp: span)
let llptr = fcx.llargs.get_copy(&arg.id); let llptr = fcx.llargs.get_copy(&arg.id);
let declargs = ~[llmdnode([llptr]), mdnode]; let declargs = ~[llmdnode([llptr]), mdnode];
trans::build::Call(bcx, trans::build::Call(bcx,
*cx.intrinsics.get(&~"llvm.dbg.declare"), *cx.intrinsics.get(&("llvm.dbg.declare")),
declargs); declargs);
return Some(mdval); return Some(mdval);
} }
@ -1000,7 +1000,7 @@ pub fn create_function(fcx: fn_ctxt) -> @Metadata<SubProgramMetadata> {
} }
let loc = cx.sess.codemap.lookup_char_pos(sp.lo); let loc = cx.sess.codemap.lookup_char_pos(sp.lo);
let file_node = create_file(cx, copy loc.file.name).node; let file_node = create_file(cx, loc.file.name.to_owned()).node;
let ty_node = if cx.sess.opts.extra_debuginfo { let ty_node = if cx.sess.opts.extra_debuginfo {
match ret_ty.node { match ret_ty.node {
ast::ty_nil => llnull(), ast::ty_nil => llnull(),
@ -1017,9 +1017,9 @@ pub fn create_function(fcx: fn_ctxt) -> @Metadata<SubProgramMetadata> {
let fn_metadata = ~[lltag(SubprogramTag), let fn_metadata = ~[lltag(SubprogramTag),
llunused(), llunused(),
file_node, file_node,
llstr(*cx.sess.str_of(ident)), llstr(cx.sess.str_of(ident)),
//XXX fully-qualified C++ name: //XXX fully-qualified C++ name:
llstr(*cx.sess.str_of(ident)), llstr(cx.sess.str_of(ident)),
llstr(""), //XXX MIPS name????? llstr(""), //XXX MIPS name?????
file_node, file_node,
lli32(loc.line as int), lli32(loc.line as int),

View File

@ -452,7 +452,7 @@ fn trans_to_datum_unadjusted(bcx: block, expr: @ast::expr) -> DatumBlock {
fn trans_rvalue_datum_unadjusted(bcx: block, expr: @ast::expr) -> DatumBlock { fn trans_rvalue_datum_unadjusted(bcx: block, expr: @ast::expr) -> DatumBlock {
let _icx = bcx.insn_ctxt("trans_rvalue_datum_unadjusted"); let _icx = bcx.insn_ctxt("trans_rvalue_datum_unadjusted");
trace_span!(bcx, expr.span, @shorten(bcx.expr_to_str(expr))); trace_span!(bcx, expr.span, shorten(bcx.expr_to_str(expr)));
match expr.node { match expr.node {
ast::expr_path(_) | ast::expr_self => { ast::expr_path(_) | ast::expr_self => {
@ -507,7 +507,7 @@ fn trans_rvalue_stmt_unadjusted(bcx: block, expr: @ast::expr) -> block {
return bcx; return bcx;
} }
trace_span!(bcx, expr.span, @shorten(bcx.expr_to_str(expr))); trace_span!(bcx, expr.span, shorten(bcx.expr_to_str(expr)));
match expr.node { match expr.node {
ast::expr_break(label_opt) => { ast::expr_break(label_opt) => {
@ -560,7 +560,7 @@ fn trans_rvalue_dps_unadjusted(bcx: block, expr: @ast::expr,
let _icx = bcx.insn_ctxt("trans_rvalue_dps_unadjusted"); let _icx = bcx.insn_ctxt("trans_rvalue_dps_unadjusted");
let tcx = bcx.tcx(); let tcx = bcx.tcx();
trace_span!(bcx, expr.span, @shorten(bcx.expr_to_str(expr))); trace_span!(bcx, expr.span, shorten(bcx.expr_to_str(expr)));
match expr.node { match expr.node {
ast::expr_paren(e) => { ast::expr_paren(e) => {
@ -821,7 +821,7 @@ fn trans_lvalue_unadjusted(bcx: block, expr: @ast::expr) -> DatumBlock {
debug!("trans_lvalue(expr=%s)", bcx.expr_to_str(expr)); debug!("trans_lvalue(expr=%s)", bcx.expr_to_str(expr));
let _indenter = indenter(); let _indenter = indenter();
trace_span!(bcx, expr.span, @shorten(bcx.expr_to_str(expr))); trace_span!(bcx, expr.span, shorten(bcx.expr_to_str(expr)));
return match expr.node { return match expr.node {
ast::expr_paren(e) => { ast::expr_paren(e) => {
@ -1703,6 +1703,6 @@ fn trans_assign_op(bcx: block,
return result_datum.copy_to_datum(bcx, DROP_EXISTING, dst_datum); return result_datum.copy_to_datum(bcx, DROP_EXISTING, dst_datum);
} }
fn shorten(x: ~str) -> ~str { fn shorten(x: &str) -> @str {
if x.char_len() > 60 { x.slice_chars(0, 60).to_owned() } else { x } (if x.char_len() > 60 {x.slice_chars(0, 60)} else {x}).to_managed()
} }

View File

@ -55,7 +55,7 @@ fn abi_info(ccx: @CrateContext) -> @cabi::ABIInfo {
} }
} }
pub fn link_name(ccx: @CrateContext, i: @ast::foreign_item) -> @~str { pub fn link_name(ccx: @CrateContext, i: @ast::foreign_item) -> @str {
match attr::first_attr_value_str_by_name(i.attrs, "link_name") { match attr::first_attr_value_str_by_name(i.attrs, "link_name") {
None => ccx.sess.str_of(i.ident), None => ccx.sess.str_of(i.ident),
Some(ln) => ln, Some(ln) => ln,
@ -345,7 +345,7 @@ pub fn trans_foreign_mod(ccx: @CrateContext,
} }
ast::foreign_item_const(*) => { ast::foreign_item_const(*) => {
let ident = token::ident_to_str(&foreign_item.ident); let ident = token::ident_to_str(&foreign_item.ident);
ccx.item_symbols.insert(foreign_item.id, copy *ident); ccx.item_symbols.insert(foreign_item.id, /* bad */ident.to_owned());
} }
} }
} }
@ -403,9 +403,9 @@ pub fn trans_foreign_mod(ccx: @CrateContext,
} }
let lname = link_name(ccx, foreign_item); let lname = link_name(ccx, foreign_item);
let llbasefn = base_fn(ccx, *lname, tys, cc); let llbasefn = base_fn(ccx, lname, tys, cc);
// Name the shim function // Name the shim function
let shim_name = *lname + "__c_stack_shim"; let shim_name = fmt!("%s__c_stack_shim", lname);
build_shim_fn_(ccx, build_shim_fn_(ccx,
shim_name, shim_name,
llbasefn, llbasefn,
@ -433,12 +433,12 @@ pub fn trans_foreign_mod(ccx: @CrateContext,
item: @ast::foreign_item, item: @ast::foreign_item,
tys: &ShimTypes, tys: &ShimTypes,
cc: lib::llvm::CallConv) { cc: lib::llvm::CallConv) {
debug!("build_direct_fn(%s)", *link_name(ccx, item)); debug!("build_direct_fn(%s)", link_name(ccx, item));
let fcx = new_fn_ctxt(ccx, ~[], decl, tys.fn_sig.output, None); let fcx = new_fn_ctxt(ccx, ~[], decl, tys.fn_sig.output, None);
let bcx = top_scope_block(fcx, None); let bcx = top_scope_block(fcx, None);
let lltop = bcx.llbb; let lltop = bcx.llbb;
let llbasefn = base_fn(ccx, *link_name(ccx, item), tys, cc); let llbasefn = base_fn(ccx, link_name(ccx, item), tys, cc);
let ty = ty::lookup_item_type(ccx.tcx, let ty = ty::lookup_item_type(ccx.tcx,
ast_util::local_def(item.id)).ty; ast_util::local_def(item.id)).ty;
let ret_ty = ty::ty_fn_ret(ty); let ret_ty = ty::ty_fn_ret(ty);
@ -460,12 +460,12 @@ pub fn trans_foreign_mod(ccx: @CrateContext,
item: @ast::foreign_item, item: @ast::foreign_item,
tys: &ShimTypes, tys: &ShimTypes,
cc: lib::llvm::CallConv) { cc: lib::llvm::CallConv) {
debug!("build_fast_ffi_fn(%s)", *link_name(ccx, item)); debug!("build_fast_ffi_fn(%s)", link_name(ccx, item));
let fcx = new_fn_ctxt(ccx, ~[], decl, tys.fn_sig.output, None); let fcx = new_fn_ctxt(ccx, ~[], decl, tys.fn_sig.output, None);
let bcx = top_scope_block(fcx, None); let bcx = top_scope_block(fcx, None);
let lltop = bcx.llbb; let lltop = bcx.llbb;
let llbasefn = base_fn(ccx, *link_name(ccx, item), tys, cc); let llbasefn = base_fn(ccx, link_name(ccx, item), tys, cc);
set_no_inline(fcx.llfn); set_no_inline(fcx.llfn);
set_fixed_stack_segment(fcx.llfn); set_fixed_stack_segment(fcx.llfn);
let ty = ty::lookup_item_type(ccx.tcx, let ty = ty::lookup_item_type(ccx.tcx,
@ -553,7 +553,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
substs: @param_substs, substs: @param_substs,
attributes: &[ast::attribute], attributes: &[ast::attribute],
ref_id: Option<ast::node_id>) { ref_id: Option<ast::node_id>) {
debug!("trans_intrinsic(item.ident=%s)", *ccx.sess.str_of(item.ident)); debug!("trans_intrinsic(item.ident=%s)", ccx.sess.str_of(item.ident));
let output_type = ty::ty_fn_ret(ty::node_id_to_type(ccx.tcx, item.id)); let output_type = ty::ty_fn_ret(ty::node_id_to_type(ccx.tcx, item.id));
@ -574,8 +574,8 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let mut bcx = top_scope_block(fcx, None); let mut bcx = top_scope_block(fcx, None);
let lltop = bcx.llbb; let lltop = bcx.llbb;
let first_real_arg = fcx.arg_pos(0u); let first_real_arg = fcx.arg_pos(0u);
match *ccx.sess.str_of(item.ident) { match ccx.sess.str_of(item.ident).as_slice() {
~"atomic_cxchg" => { "atomic_cxchg" => {
let old = AtomicCmpXchg(bcx, let old = AtomicCmpXchg(bcx,
get_param(decl, first_real_arg), get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u), get_param(decl, first_real_arg + 1u),
@ -583,7 +583,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
SequentiallyConsistent); SequentiallyConsistent);
Store(bcx, old, fcx.llretptr.get()); Store(bcx, old, fcx.llretptr.get());
} }
~"atomic_cxchg_acq" => { "atomic_cxchg_acq" => {
let old = AtomicCmpXchg(bcx, let old = AtomicCmpXchg(bcx,
get_param(decl, first_real_arg), get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u), get_param(decl, first_real_arg + 1u),
@ -591,7 +591,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
Acquire); Acquire);
Store(bcx, old, fcx.llretptr.get()); Store(bcx, old, fcx.llretptr.get());
} }
~"atomic_cxchg_rel" => { "atomic_cxchg_rel" => {
let old = AtomicCmpXchg(bcx, let old = AtomicCmpXchg(bcx,
get_param(decl, first_real_arg), get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u), get_param(decl, first_real_arg + 1u),
@ -599,100 +599,100 @@ pub fn trans_intrinsic(ccx: @CrateContext,
Release); Release);
Store(bcx, old, fcx.llretptr.get()); Store(bcx, old, fcx.llretptr.get());
} }
~"atomic_load" => { "atomic_load" => {
let old = AtomicLoad(bcx, let old = AtomicLoad(bcx,
get_param(decl, first_real_arg), get_param(decl, first_real_arg),
SequentiallyConsistent); SequentiallyConsistent);
Store(bcx, old, fcx.llretptr.get()); Store(bcx, old, fcx.llretptr.get());
} }
~"atomic_load_acq" => { "atomic_load_acq" => {
let old = AtomicLoad(bcx, let old = AtomicLoad(bcx,
get_param(decl, first_real_arg), get_param(decl, first_real_arg),
Acquire); Acquire);
Store(bcx, old, fcx.llretptr.get()); Store(bcx, old, fcx.llretptr.get());
} }
~"atomic_store" => { "atomic_store" => {
AtomicStore(bcx, AtomicStore(bcx,
get_param(decl, first_real_arg + 1u), get_param(decl, first_real_arg + 1u),
get_param(decl, first_real_arg), get_param(decl, first_real_arg),
SequentiallyConsistent); SequentiallyConsistent);
} }
~"atomic_store_rel" => { "atomic_store_rel" => {
AtomicStore(bcx, AtomicStore(bcx,
get_param(decl, first_real_arg + 1u), get_param(decl, first_real_arg + 1u),
get_param(decl, first_real_arg), get_param(decl, first_real_arg),
Release); Release);
} }
~"atomic_xchg" => { "atomic_xchg" => {
let old = AtomicRMW(bcx, Xchg, let old = AtomicRMW(bcx, Xchg,
get_param(decl, first_real_arg), get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u), get_param(decl, first_real_arg + 1u),
SequentiallyConsistent); SequentiallyConsistent);
Store(bcx, old, fcx.llretptr.get()); Store(bcx, old, fcx.llretptr.get());
} }
~"atomic_xchg_acq" => { "atomic_xchg_acq" => {
let old = AtomicRMW(bcx, Xchg, let old = AtomicRMW(bcx, Xchg,
get_param(decl, first_real_arg), get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u), get_param(decl, first_real_arg + 1u),
Acquire); Acquire);
Store(bcx, old, fcx.llretptr.get()); Store(bcx, old, fcx.llretptr.get());
} }
~"atomic_xchg_rel" => { "atomic_xchg_rel" => {
let old = AtomicRMW(bcx, Xchg, let old = AtomicRMW(bcx, Xchg,
get_param(decl, first_real_arg), get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u), get_param(decl, first_real_arg + 1u),
Release); Release);
Store(bcx, old, fcx.llretptr.get()); Store(bcx, old, fcx.llretptr.get());
} }
~"atomic_xadd" => { "atomic_xadd" => {
let old = AtomicRMW(bcx, lib::llvm::Add, let old = AtomicRMW(bcx, lib::llvm::Add,
get_param(decl, first_real_arg), get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u), get_param(decl, first_real_arg + 1u),
SequentiallyConsistent); SequentiallyConsistent);
Store(bcx, old, fcx.llretptr.get()); Store(bcx, old, fcx.llretptr.get());
} }
~"atomic_xadd_acq" => { "atomic_xadd_acq" => {
let old = AtomicRMW(bcx, lib::llvm::Add, let old = AtomicRMW(bcx, lib::llvm::Add,
get_param(decl, first_real_arg), get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u), get_param(decl, first_real_arg + 1u),
Acquire); Acquire);
Store(bcx, old, fcx.llretptr.get()); Store(bcx, old, fcx.llretptr.get());
} }
~"atomic_xadd_rel" => { "atomic_xadd_rel" => {
let old = AtomicRMW(bcx, lib::llvm::Add, let old = AtomicRMW(bcx, lib::llvm::Add,
get_param(decl, first_real_arg), get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u), get_param(decl, first_real_arg + 1u),
Release); Release);
Store(bcx, old, fcx.llretptr.get()); Store(bcx, old, fcx.llretptr.get());
} }
~"atomic_xsub" => { "atomic_xsub" => {
let old = AtomicRMW(bcx, lib::llvm::Sub, let old = AtomicRMW(bcx, lib::llvm::Sub,
get_param(decl, first_real_arg), get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u), get_param(decl, first_real_arg + 1u),
SequentiallyConsistent); SequentiallyConsistent);
Store(bcx, old, fcx.llretptr.get()); Store(bcx, old, fcx.llretptr.get());
} }
~"atomic_xsub_acq" => { "atomic_xsub_acq" => {
let old = AtomicRMW(bcx, lib::llvm::Sub, let old = AtomicRMW(bcx, lib::llvm::Sub,
get_param(decl, first_real_arg), get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u), get_param(decl, first_real_arg + 1u),
Acquire); Acquire);
Store(bcx, old, fcx.llretptr.get()); Store(bcx, old, fcx.llretptr.get());
} }
~"atomic_xsub_rel" => { "atomic_xsub_rel" => {
let old = AtomicRMW(bcx, lib::llvm::Sub, let old = AtomicRMW(bcx, lib::llvm::Sub,
get_param(decl, first_real_arg), get_param(decl, first_real_arg),
get_param(decl, first_real_arg + 1u), get_param(decl, first_real_arg + 1u),
Release); Release);
Store(bcx, old, fcx.llretptr.get()); Store(bcx, old, fcx.llretptr.get());
} }
~"size_of" => { "size_of" => {
let tp_ty = substs.tys[0]; let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty); let lltp_ty = type_of::type_of(ccx, tp_ty);
Store(bcx, C_uint(ccx, machine::llsize_of_real(ccx, lltp_ty)), Store(bcx, C_uint(ccx, machine::llsize_of_real(ccx, lltp_ty)),
fcx.llretptr.get()); fcx.llretptr.get());
} }
~"move_val" => { "move_val" => {
// Create a datum reflecting the value being moved. // Create a datum reflecting the value being moved.
// Use `appropriate_mode` so that the datum is by ref // Use `appropriate_mode` so that the datum is by ref
// if the value is non-immediate. Note that, with // if the value is non-immediate. Note that, with
@ -705,7 +705,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
bcx = src.move_to(bcx, DROP_EXISTING, bcx = src.move_to(bcx, DROP_EXISTING,
get_param(decl, first_real_arg)); get_param(decl, first_real_arg));
} }
~"move_val_init" => { "move_val_init" => {
// See comments for `"move_val"`. // See comments for `"move_val"`.
let tp_ty = substs.tys[0]; let tp_ty = substs.tys[0];
let mode = appropriate_mode(tp_ty); let mode = appropriate_mode(tp_ty);
@ -713,19 +713,19 @@ pub fn trans_intrinsic(ccx: @CrateContext,
ty: tp_ty, mode: mode}; ty: tp_ty, mode: mode};
bcx = src.move_to(bcx, INIT, get_param(decl, first_real_arg)); bcx = src.move_to(bcx, INIT, get_param(decl, first_real_arg));
} }
~"min_align_of" => { "min_align_of" => {
let tp_ty = substs.tys[0]; let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty); let lltp_ty = type_of::type_of(ccx, tp_ty);
Store(bcx, C_uint(ccx, machine::llalign_of_min(ccx, lltp_ty)), Store(bcx, C_uint(ccx, machine::llalign_of_min(ccx, lltp_ty)),
fcx.llretptr.get()); fcx.llretptr.get());
} }
~"pref_align_of"=> { "pref_align_of"=> {
let tp_ty = substs.tys[0]; let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty); let lltp_ty = type_of::type_of(ccx, tp_ty);
Store(bcx, C_uint(ccx, machine::llalign_of_pref(ccx, lltp_ty)), Store(bcx, C_uint(ccx, machine::llalign_of_pref(ccx, lltp_ty)),
fcx.llretptr.get()); fcx.llretptr.get());
} }
~"get_tydesc" => { "get_tydesc" => {
let tp_ty = substs.tys[0]; let tp_ty = substs.tys[0];
let static_ti = get_tydesc(ccx, tp_ty); let static_ti = get_tydesc(ccx, tp_ty);
glue::lazily_emit_all_tydesc_glue(ccx, static_ti); glue::lazily_emit_all_tydesc_glue(ccx, static_ti);
@ -735,18 +735,18 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let td = PointerCast(bcx, static_ti.tydesc, T_ptr(T_nil())); let td = PointerCast(bcx, static_ti.tydesc, T_ptr(T_nil()));
Store(bcx, td, fcx.llretptr.get()); Store(bcx, td, fcx.llretptr.get());
} }
~"init" => { "init" => {
let tp_ty = substs.tys[0]; let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty); let lltp_ty = type_of::type_of(ccx, tp_ty);
if !ty::type_is_nil(tp_ty) { if !ty::type_is_nil(tp_ty) {
Store(bcx, C_null(lltp_ty), fcx.llretptr.get()); Store(bcx, C_null(lltp_ty), fcx.llretptr.get());
} }
} }
~"uninit" => { "uninit" => {
// Do nothing, this is effectively a no-op // Do nothing, this is effectively a no-op
} }
~"forget" => {} "forget" => {}
~"transmute" => { "transmute" => {
let (in_type, out_type) = (substs.tys[0], substs.tys[1]); let (in_type, out_type) = (substs.tys[0], substs.tys[1]);
let llintype = type_of::type_of(ccx, in_type); let llintype = type_of::type_of(ccx, in_type);
let llouttype = type_of::type_of(ccx, out_type); let llouttype = type_of::type_of(ccx, out_type);
@ -792,13 +792,13 @@ pub fn trans_intrinsic(ccx: @CrateContext,
call_memcpy(bcx, lldestptr, llsrcptr, llsize, 1); call_memcpy(bcx, lldestptr, llsrcptr, llsize, 1);
} }
} }
~"needs_drop" => { "needs_drop" => {
let tp_ty = substs.tys[0]; let tp_ty = substs.tys[0];
Store(bcx, Store(bcx,
C_bool(ty::type_needs_drop(ccx.tcx, tp_ty)), C_bool(ty::type_needs_drop(ccx.tcx, tp_ty)),
fcx.llretptr.get()); fcx.llretptr.get());
} }
~"visit_tydesc" => { "visit_tydesc" => {
let td = get_param(decl, first_real_arg); let td = get_param(decl, first_real_arg);
let visitor = get_param(decl, first_real_arg + 1u); let visitor = get_param(decl, first_real_arg + 1u);
//let llvisitorptr = alloca(bcx, val_ty(visitor)); //let llvisitorptr = alloca(bcx, val_ty(visitor));
@ -810,8 +810,8 @@ pub fn trans_intrinsic(ccx: @CrateContext,
abi::tydesc_field_visit_glue, abi::tydesc_field_visit_glue,
None); None);
} }
~"frame_address" => { "frame_address" => {
let frameaddress = *ccx.intrinsics.get(&~"llvm.frameaddress"); let frameaddress = *ccx.intrinsics.get(& &"llvm.frameaddress");
let frameaddress_val = Call(bcx, frameaddress, [C_i32(0i32)]); let frameaddress_val = Call(bcx, frameaddress, [C_i32(0i32)]);
let star_u8 = ty::mk_imm_ptr( let star_u8 = ty::mk_imm_ptr(
bcx.tcx(), bcx.tcx(),
@ -836,7 +836,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
|bcx| Callee {bcx: bcx, data: Closure(datum)}, |bcx| Callee {bcx: bcx, data: Closure(datum)},
ArgVals(arg_vals), Ignore, DontAutorefArg); ArgVals(arg_vals), Ignore, DontAutorefArg);
} }
~"morestack_addr" => { "morestack_addr" => {
// XXX This is a hack to grab the address of this particular // XXX This is a hack to grab the address of this particular
// native function. There should be a general in-language // native function. There should be a general in-language
// way to do this // way to do this
@ -847,7 +847,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
T_ptr(T_nil())); T_ptr(T_nil()));
Store(bcx, morestack_addr, fcx.llretptr.get()); Store(bcx, morestack_addr, fcx.llretptr.get());
} }
~"memcpy32" => { "memcpy32" => {
let tp_ty = substs.tys[0]; let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty); let lltp_ty = type_of::type_of(ccx, tp_ty);
let align = C_i32(machine::llalign_of_min(ccx, lltp_ty) as i32); let align = C_i32(machine::llalign_of_min(ccx, lltp_ty) as i32);
@ -857,10 +857,10 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let src_ptr = PointerCast(bcx, get_param(decl, first_real_arg + 1), T_ptr(T_i8())); let src_ptr = PointerCast(bcx, get_param(decl, first_real_arg + 1), T_ptr(T_i8()));
let count = get_param(decl, first_real_arg + 2); let count = get_param(decl, first_real_arg + 2);
let volatile = C_i1(false); let volatile = C_i1(false);
let llfn = *bcx.ccx().intrinsics.get(&~"llvm.memcpy.p0i8.p0i8.i32"); let llfn = *bcx.ccx().intrinsics.get(& &"llvm.memcpy.p0i8.p0i8.i32");
Call(bcx, llfn, [dst_ptr, src_ptr, Mul(bcx, size, count), align, volatile]); Call(bcx, llfn, [dst_ptr, src_ptr, Mul(bcx, size, count), align, volatile]);
} }
~"memcpy64" => { "memcpy64" => {
let tp_ty = substs.tys[0]; let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty); let lltp_ty = type_of::type_of(ccx, tp_ty);
let align = C_i32(machine::llalign_of_min(ccx, lltp_ty) as i32); let align = C_i32(machine::llalign_of_min(ccx, lltp_ty) as i32);
@ -870,10 +870,10 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let src_ptr = PointerCast(bcx, get_param(decl, first_real_arg + 1), T_ptr(T_i8())); let src_ptr = PointerCast(bcx, get_param(decl, first_real_arg + 1), T_ptr(T_i8()));
let count = get_param(decl, first_real_arg + 2); let count = get_param(decl, first_real_arg + 2);
let volatile = C_i1(false); let volatile = C_i1(false);
let llfn = *bcx.ccx().intrinsics.get(&~"llvm.memcpy.p0i8.p0i8.i64"); let llfn = *bcx.ccx().intrinsics.get(& &"llvm.memcpy.p0i8.p0i8.i64");
Call(bcx, llfn, [dst_ptr, src_ptr, Mul(bcx, size, count), align, volatile]); Call(bcx, llfn, [dst_ptr, src_ptr, Mul(bcx, size, count), align, volatile]);
} }
~"memmove32" => { "memmove32" => {
let tp_ty = substs.tys[0]; let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty); let lltp_ty = type_of::type_of(ccx, tp_ty);
let align = C_i32(machine::llalign_of_min(ccx, lltp_ty) as i32); let align = C_i32(machine::llalign_of_min(ccx, lltp_ty) as i32);
@ -883,10 +883,10 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let src_ptr = PointerCast(bcx, get_param(decl, first_real_arg + 1), T_ptr(T_i8())); let src_ptr = PointerCast(bcx, get_param(decl, first_real_arg + 1), T_ptr(T_i8()));
let count = get_param(decl, first_real_arg + 2); let count = get_param(decl, first_real_arg + 2);
let volatile = C_i1(false); let volatile = C_i1(false);
let llfn = *bcx.ccx().intrinsics.get(&~"llvm.memmove.p0i8.p0i8.i32"); let llfn = *bcx.ccx().intrinsics.get(& &"llvm.memmove.p0i8.p0i8.i32");
Call(bcx, llfn, [dst_ptr, src_ptr, Mul(bcx, size, count), align, volatile]); Call(bcx, llfn, [dst_ptr, src_ptr, Mul(bcx, size, count), align, volatile]);
} }
~"memmove64" => { "memmove64" => {
let tp_ty = substs.tys[0]; let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty); let lltp_ty = type_of::type_of(ccx, tp_ty);
let align = C_i32(machine::llalign_of_min(ccx, lltp_ty) as i32); let align = C_i32(machine::llalign_of_min(ccx, lltp_ty) as i32);
@ -896,10 +896,10 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let src_ptr = PointerCast(bcx, get_param(decl, first_real_arg + 1), T_ptr(T_i8())); let src_ptr = PointerCast(bcx, get_param(decl, first_real_arg + 1), T_ptr(T_i8()));
let count = get_param(decl, first_real_arg + 2); let count = get_param(decl, first_real_arg + 2);
let volatile = C_i1(false); let volatile = C_i1(false);
let llfn = *bcx.ccx().intrinsics.get(&~"llvm.memmove.p0i8.p0i8.i64"); let llfn = *bcx.ccx().intrinsics.get(& &"llvm.memmove.p0i8.p0i8.i64");
Call(bcx, llfn, [dst_ptr, src_ptr, Mul(bcx, size, count), align, volatile]); Call(bcx, llfn, [dst_ptr, src_ptr, Mul(bcx, size, count), align, volatile]);
} }
~"memset32" => { "memset32" => {
let tp_ty = substs.tys[0]; let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty); let lltp_ty = type_of::type_of(ccx, tp_ty);
let align = C_i32(machine::llalign_of_min(ccx, lltp_ty) as i32); let align = C_i32(machine::llalign_of_min(ccx, lltp_ty) as i32);
@ -909,10 +909,10 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let val = get_param(decl, first_real_arg + 1); let val = get_param(decl, first_real_arg + 1);
let count = get_param(decl, first_real_arg + 2); let count = get_param(decl, first_real_arg + 2);
let volatile = C_i1(false); let volatile = C_i1(false);
let llfn = *bcx.ccx().intrinsics.get(&~"llvm.memset.p0i8.i32"); let llfn = *bcx.ccx().intrinsics.get(& &"llvm.memset.p0i8.i32");
Call(bcx, llfn, [dst_ptr, val, Mul(bcx, size, count), align, volatile]); Call(bcx, llfn, [dst_ptr, val, Mul(bcx, size, count), align, volatile]);
} }
~"memset64" => { "memset64" => {
let tp_ty = substs.tys[0]; let tp_ty = substs.tys[0];
let lltp_ty = type_of::type_of(ccx, tp_ty); let lltp_ty = type_of::type_of(ccx, tp_ty);
let align = C_i32(machine::llalign_of_min(ccx, lltp_ty) as i32); let align = C_i32(machine::llalign_of_min(ccx, lltp_ty) as i32);
@ -922,248 +922,248 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let val = get_param(decl, first_real_arg + 1); let val = get_param(decl, first_real_arg + 1);
let count = get_param(decl, first_real_arg + 2); let count = get_param(decl, first_real_arg + 2);
let volatile = C_i1(false); let volatile = C_i1(false);
let llfn = *bcx.ccx().intrinsics.get(&~"llvm.memset.p0i8.i64"); let llfn = *bcx.ccx().intrinsics.get(& &"llvm.memset.p0i8.i64");
Call(bcx, llfn, [dst_ptr, val, Mul(bcx, size, count), align, volatile]); Call(bcx, llfn, [dst_ptr, val, Mul(bcx, size, count), align, volatile]);
} }
~"sqrtf32" => { "sqrtf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let sqrtf = *ccx.intrinsics.get(&~"llvm.sqrt.f32"); let sqrtf = *ccx.intrinsics.get(& &"llvm.sqrt.f32");
Store(bcx, Call(bcx, sqrtf, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, sqrtf, [x]), fcx.llretptr.get());
} }
~"sqrtf64" => { "sqrtf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let sqrtf = *ccx.intrinsics.get(&~"llvm.sqrt.f64"); let sqrtf = *ccx.intrinsics.get(& &"llvm.sqrt.f64");
Store(bcx, Call(bcx, sqrtf, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, sqrtf, [x]), fcx.llretptr.get());
} }
~"powif32" => { "powif32" => {
let a = get_param(decl, first_real_arg); let a = get_param(decl, first_real_arg);
let x = get_param(decl, first_real_arg + 1u); let x = get_param(decl, first_real_arg + 1u);
let powif = *ccx.intrinsics.get(&~"llvm.powi.f32"); let powif = *ccx.intrinsics.get(& &"llvm.powi.f32");
Store(bcx, Call(bcx, powif, [a, x]), fcx.llretptr.get()); Store(bcx, Call(bcx, powif, [a, x]), fcx.llretptr.get());
} }
~"powif64" => { "powif64" => {
let a = get_param(decl, first_real_arg); let a = get_param(decl, first_real_arg);
let x = get_param(decl, first_real_arg + 1u); let x = get_param(decl, first_real_arg + 1u);
let powif = *ccx.intrinsics.get(&~"llvm.powi.f64"); let powif = *ccx.intrinsics.get(& &"llvm.powi.f64");
Store(bcx, Call(bcx, powif, [a, x]), fcx.llretptr.get()); Store(bcx, Call(bcx, powif, [a, x]), fcx.llretptr.get());
} }
~"sinf32" => { "sinf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let sinf = *ccx.intrinsics.get(&~"llvm.sin.f32"); let sinf = *ccx.intrinsics.get(& &"llvm.sin.f32");
Store(bcx, Call(bcx, sinf, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, sinf, [x]), fcx.llretptr.get());
} }
~"sinf64" => { "sinf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let sinf = *ccx.intrinsics.get(&~"llvm.sin.f64"); let sinf = *ccx.intrinsics.get(& &"llvm.sin.f64");
Store(bcx, Call(bcx, sinf, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, sinf, [x]), fcx.llretptr.get());
} }
~"cosf32" => { "cosf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let cosf = *ccx.intrinsics.get(&~"llvm.cos.f32"); let cosf = *ccx.intrinsics.get(& &"llvm.cos.f32");
Store(bcx, Call(bcx, cosf, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, cosf, [x]), fcx.llretptr.get());
} }
~"cosf64" => { "cosf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let cosf = *ccx.intrinsics.get(&~"llvm.cos.f64"); let cosf = *ccx.intrinsics.get(& &"llvm.cos.f64");
Store(bcx, Call(bcx, cosf, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, cosf, [x]), fcx.llretptr.get());
} }
~"powf32" => { "powf32" => {
let a = get_param(decl, first_real_arg); let a = get_param(decl, first_real_arg);
let x = get_param(decl, first_real_arg + 1u); let x = get_param(decl, first_real_arg + 1u);
let powf = *ccx.intrinsics.get(&~"llvm.pow.f32"); let powf = *ccx.intrinsics.get(& &"llvm.pow.f32");
Store(bcx, Call(bcx, powf, [a, x]), fcx.llretptr.get()); Store(bcx, Call(bcx, powf, [a, x]), fcx.llretptr.get());
} }
~"powf64" => { "powf64" => {
let a = get_param(decl, first_real_arg); let a = get_param(decl, first_real_arg);
let x = get_param(decl, first_real_arg + 1u); let x = get_param(decl, first_real_arg + 1u);
let powf = *ccx.intrinsics.get(&~"llvm.pow.f64"); let powf = *ccx.intrinsics.get(& &"llvm.pow.f64");
Store(bcx, Call(bcx, powf, [a, x]), fcx.llretptr.get()); Store(bcx, Call(bcx, powf, [a, x]), fcx.llretptr.get());
} }
~"expf32" => { "expf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let expf = *ccx.intrinsics.get(&~"llvm.exp.f32"); let expf = *ccx.intrinsics.get(& &"llvm.exp.f32");
Store(bcx, Call(bcx, expf, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, expf, [x]), fcx.llretptr.get());
} }
~"expf64" => { "expf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let expf = *ccx.intrinsics.get(&~"llvm.exp.f64"); let expf = *ccx.intrinsics.get(& &"llvm.exp.f64");
Store(bcx, Call(bcx, expf, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, expf, [x]), fcx.llretptr.get());
} }
~"exp2f32" => { "exp2f32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let exp2f = *ccx.intrinsics.get(&~"llvm.exp2.f32"); let exp2f = *ccx.intrinsics.get(& &"llvm.exp2.f32");
Store(bcx, Call(bcx, exp2f, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, exp2f, [x]), fcx.llretptr.get());
} }
~"exp2f64" => { "exp2f64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let exp2f = *ccx.intrinsics.get(&~"llvm.exp2.f64"); let exp2f = *ccx.intrinsics.get(& &"llvm.exp2.f64");
Store(bcx, Call(bcx, exp2f, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, exp2f, [x]), fcx.llretptr.get());
} }
~"logf32" => { "logf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let logf = *ccx.intrinsics.get(&~"llvm.log.f32"); let logf = *ccx.intrinsics.get(& &"llvm.log.f32");
Store(bcx, Call(bcx, logf, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, logf, [x]), fcx.llretptr.get());
} }
~"logf64" => { "logf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let logf = *ccx.intrinsics.get(&~"llvm.log.f64"); let logf = *ccx.intrinsics.get(& &"llvm.log.f64");
Store(bcx, Call(bcx, logf, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, logf, [x]), fcx.llretptr.get());
} }
~"log10f32" => { "log10f32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let log10f = *ccx.intrinsics.get(&~"llvm.log10.f32"); let log10f = *ccx.intrinsics.get(& &"llvm.log10.f32");
Store(bcx, Call(bcx, log10f, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, log10f, [x]), fcx.llretptr.get());
} }
~"log10f64" => { "log10f64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let log10f = *ccx.intrinsics.get(&~"llvm.log10.f64"); let log10f = *ccx.intrinsics.get(& &"llvm.log10.f64");
Store(bcx, Call(bcx, log10f, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, log10f, [x]), fcx.llretptr.get());
} }
~"log2f32" => { "log2f32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let log2f = *ccx.intrinsics.get(&~"llvm.log2.f32"); let log2f = *ccx.intrinsics.get(& &"llvm.log2.f32");
Store(bcx, Call(bcx, log2f, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, log2f, [x]), fcx.llretptr.get());
} }
~"log2f64" => { "log2f64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let log2f = *ccx.intrinsics.get(&~"llvm.log2.f64"); let log2f = *ccx.intrinsics.get(& &"llvm.log2.f64");
Store(bcx, Call(bcx, log2f, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, log2f, [x]), fcx.llretptr.get());
} }
~"fmaf32" => { "fmaf32" => {
let a = get_param(decl, first_real_arg); let a = get_param(decl, first_real_arg);
let b = get_param(decl, first_real_arg + 1u); let b = get_param(decl, first_real_arg + 1u);
let c = get_param(decl, first_real_arg + 2u); let c = get_param(decl, first_real_arg + 2u);
let fmaf = *ccx.intrinsics.get(&~"llvm.fma.f32"); let fmaf = *ccx.intrinsics.get(& &"llvm.fma.f32");
Store(bcx, Call(bcx, fmaf, [a, b, c]), fcx.llretptr.get()); Store(bcx, Call(bcx, fmaf, [a, b, c]), fcx.llretptr.get());
} }
~"fmaf64" => { "fmaf64" => {
let a = get_param(decl, first_real_arg); let a = get_param(decl, first_real_arg);
let b = get_param(decl, first_real_arg + 1u); let b = get_param(decl, first_real_arg + 1u);
let c = get_param(decl, first_real_arg + 2u); let c = get_param(decl, first_real_arg + 2u);
let fmaf = *ccx.intrinsics.get(&~"llvm.fma.f64"); let fmaf = *ccx.intrinsics.get(& &"llvm.fma.f64");
Store(bcx, Call(bcx, fmaf, [a, b, c]), fcx.llretptr.get()); Store(bcx, Call(bcx, fmaf, [a, b, c]), fcx.llretptr.get());
} }
~"fabsf32" => { "fabsf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let fabsf = *ccx.intrinsics.get(&~"llvm.fabs.f32"); let fabsf = *ccx.intrinsics.get(& &"llvm.fabs.f32");
Store(bcx, Call(bcx, fabsf, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, fabsf, [x]), fcx.llretptr.get());
} }
~"fabsf64" => { "fabsf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let fabsf = *ccx.intrinsics.get(&~"llvm.fabs.f64"); let fabsf = *ccx.intrinsics.get(& &"llvm.fabs.f64");
Store(bcx, Call(bcx, fabsf, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, fabsf, [x]), fcx.llretptr.get());
} }
~"floorf32" => { "floorf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let floorf = *ccx.intrinsics.get(&~"llvm.floor.f32"); let floorf = *ccx.intrinsics.get(& &"llvm.floor.f32");
Store(bcx, Call(bcx, floorf, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, floorf, [x]), fcx.llretptr.get());
} }
~"floorf64" => { "floorf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let floorf = *ccx.intrinsics.get(&~"llvm.floor.f64"); let floorf = *ccx.intrinsics.get(& &"llvm.floor.f64");
Store(bcx, Call(bcx, floorf, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, floorf, [x]), fcx.llretptr.get());
} }
~"ceilf32" => { "ceilf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let ceilf = *ccx.intrinsics.get(&~"llvm.ceil.f32"); let ceilf = *ccx.intrinsics.get(& &"llvm.ceil.f32");
Store(bcx, Call(bcx, ceilf, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, ceilf, [x]), fcx.llretptr.get());
} }
~"ceilf64" => { "ceilf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let ceilf = *ccx.intrinsics.get(&~"llvm.ceil.f64"); let ceilf = *ccx.intrinsics.get(& &"llvm.ceil.f64");
Store(bcx, Call(bcx, ceilf, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, ceilf, [x]), fcx.llretptr.get());
} }
~"truncf32" => { "truncf32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let truncf = *ccx.intrinsics.get(&~"llvm.trunc.f32"); let truncf = *ccx.intrinsics.get(& &"llvm.trunc.f32");
Store(bcx, Call(bcx, truncf, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, truncf, [x]), fcx.llretptr.get());
} }
~"truncf64" => { "truncf64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let truncf = *ccx.intrinsics.get(&~"llvm.trunc.f64"); let truncf = *ccx.intrinsics.get(& &"llvm.trunc.f64");
Store(bcx, Call(bcx, truncf, [x]), fcx.llretptr.get()); Store(bcx, Call(bcx, truncf, [x]), fcx.llretptr.get());
} }
~"ctpop8" => { "ctpop8" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let ctpop = *ccx.intrinsics.get(&~"llvm.ctpop.i8"); let ctpop = *ccx.intrinsics.get(& &"llvm.ctpop.i8");
Store(bcx, Call(bcx, ctpop, [x]), fcx.llretptr.get()) Store(bcx, Call(bcx, ctpop, [x]), fcx.llretptr.get())
} }
~"ctpop16" => { "ctpop16" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let ctpop = *ccx.intrinsics.get(&~"llvm.ctpop.i16"); let ctpop = *ccx.intrinsics.get(& &"llvm.ctpop.i16");
Store(bcx, Call(bcx, ctpop, [x]), fcx.llretptr.get()) Store(bcx, Call(bcx, ctpop, [x]), fcx.llretptr.get())
} }
~"ctpop32" => { "ctpop32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let ctpop = *ccx.intrinsics.get(&~"llvm.ctpop.i32"); let ctpop = *ccx.intrinsics.get(& &"llvm.ctpop.i32");
Store(bcx, Call(bcx, ctpop, [x]), fcx.llretptr.get()) Store(bcx, Call(bcx, ctpop, [x]), fcx.llretptr.get())
} }
~"ctpop64" => { "ctpop64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let ctpop = *ccx.intrinsics.get(&~"llvm.ctpop.i64"); let ctpop = *ccx.intrinsics.get(& &"llvm.ctpop.i64");
Store(bcx, Call(bcx, ctpop, [x]), fcx.llretptr.get()) Store(bcx, Call(bcx, ctpop, [x]), fcx.llretptr.get())
} }
~"ctlz8" => { "ctlz8" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_i1(false); let y = C_i1(false);
let ctlz = *ccx.intrinsics.get(&~"llvm.ctlz.i8"); let ctlz = *ccx.intrinsics.get(& &"llvm.ctlz.i8");
Store(bcx, Call(bcx, ctlz, [x, y]), fcx.llretptr.get()) Store(bcx, Call(bcx, ctlz, [x, y]), fcx.llretptr.get())
} }
~"ctlz16" => { "ctlz16" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_i1(false); let y = C_i1(false);
let ctlz = *ccx.intrinsics.get(&~"llvm.ctlz.i16"); let ctlz = *ccx.intrinsics.get(& &"llvm.ctlz.i16");
Store(bcx, Call(bcx, ctlz, [x, y]), fcx.llretptr.get()) Store(bcx, Call(bcx, ctlz, [x, y]), fcx.llretptr.get())
} }
~"ctlz32" => { "ctlz32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_i1(false); let y = C_i1(false);
let ctlz = *ccx.intrinsics.get(&~"llvm.ctlz.i32"); let ctlz = *ccx.intrinsics.get(& &"llvm.ctlz.i32");
Store(bcx, Call(bcx, ctlz, [x, y]), fcx.llretptr.get()) Store(bcx, Call(bcx, ctlz, [x, y]), fcx.llretptr.get())
} }
~"ctlz64" => { "ctlz64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_i1(false); let y = C_i1(false);
let ctlz = *ccx.intrinsics.get(&~"llvm.ctlz.i64"); let ctlz = *ccx.intrinsics.get(& &"llvm.ctlz.i64");
Store(bcx, Call(bcx, ctlz, [x, y]), fcx.llretptr.get()) Store(bcx, Call(bcx, ctlz, [x, y]), fcx.llretptr.get())
} }
~"cttz8" => { "cttz8" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_i1(false); let y = C_i1(false);
let cttz = *ccx.intrinsics.get(&~"llvm.cttz.i8"); let cttz = *ccx.intrinsics.get(& &"llvm.cttz.i8");
Store(bcx, Call(bcx, cttz, [x, y]), fcx.llretptr.get()) Store(bcx, Call(bcx, cttz, [x, y]), fcx.llretptr.get())
} }
~"cttz16" => { "cttz16" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_i1(false); let y = C_i1(false);
let cttz = *ccx.intrinsics.get(&~"llvm.cttz.i16"); let cttz = *ccx.intrinsics.get(& &"llvm.cttz.i16");
Store(bcx, Call(bcx, cttz, [x, y]), fcx.llretptr.get()) Store(bcx, Call(bcx, cttz, [x, y]), fcx.llretptr.get())
} }
~"cttz32" => { "cttz32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_i1(false); let y = C_i1(false);
let cttz = *ccx.intrinsics.get(&~"llvm.cttz.i32"); let cttz = *ccx.intrinsics.get(& &"llvm.cttz.i32");
Store(bcx, Call(bcx, cttz, [x, y]), fcx.llretptr.get()) Store(bcx, Call(bcx, cttz, [x, y]), fcx.llretptr.get())
} }
~"cttz64" => { "cttz64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let y = C_i1(false); let y = C_i1(false);
let cttz = *ccx.intrinsics.get(&~"llvm.cttz.i64"); let cttz = *ccx.intrinsics.get(& &"llvm.cttz.i64");
Store(bcx, Call(bcx, cttz, [x, y]), fcx.llretptr.get()) Store(bcx, Call(bcx, cttz, [x, y]), fcx.llretptr.get())
} }
~"bswap16" => { "bswap16" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let cttz = *ccx.intrinsics.get(&~"llvm.bswap.i16"); let cttz = *ccx.intrinsics.get(& &"llvm.bswap.i16");
Store(bcx, Call(bcx, cttz, [x]), fcx.llretptr.get()) Store(bcx, Call(bcx, cttz, [x]), fcx.llretptr.get())
} }
~"bswap32" => { "bswap32" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let cttz = *ccx.intrinsics.get(&~"llvm.bswap.i32"); let cttz = *ccx.intrinsics.get(& &"llvm.bswap.i32");
Store(bcx, Call(bcx, cttz, [x]), fcx.llretptr.get()) Store(bcx, Call(bcx, cttz, [x]), fcx.llretptr.get())
} }
~"bswap64" => { "bswap64" => {
let x = get_param(decl, first_real_arg); let x = get_param(decl, first_real_arg);
let cttz = *ccx.intrinsics.get(&~"llvm.bswap.i64"); let cttz = *ccx.intrinsics.get(& &"llvm.bswap.i64");
Store(bcx, Call(bcx, cttz, [x]), fcx.llretptr.get()) Store(bcx, Call(bcx, cttz, [x]), fcx.llretptr.get())
} }
_ => { _ => {

View File

@ -686,10 +686,10 @@ pub fn declare_tydesc(ccx: @CrateContext, t: ty::t) -> @mut tydesc_info {
let llsize = llsize_of(ccx, llty); let llsize = llsize_of(ccx, llty);
let llalign = llalign_of(ccx, llty); let llalign = llalign_of(ccx, llty);
let addrspace = declare_tydesc_addrspace(ccx, t); let addrspace = declare_tydesc_addrspace(ccx, t);
let name = @mangle_internal_name_by_type_and_seq(ccx, t, "tydesc"); let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc").to_managed();
note_unique_llvm_symbol(ccx, name); note_unique_llvm_symbol(ccx, name);
debug!("+++ declare_tydesc %s %s", ppaux::ty_to_str(ccx.tcx, t), *name); debug!("+++ declare_tydesc %s %s", ppaux::ty_to_str(ccx.tcx, t), name);
let gvar = str::as_c_str(*name, |buf| { let gvar = str::as_c_str(name, |buf| {
unsafe { unsafe {
llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type, buf) llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type, buf)
} }
@ -715,10 +715,10 @@ pub fn declare_generic_glue(ccx: @CrateContext, t: ty::t, llfnty: TypeRef,
name: ~str) -> ValueRef { name: ~str) -> ValueRef {
let _icx = ccx.insn_ctxt("declare_generic_glue"); let _icx = ccx.insn_ctxt("declare_generic_glue");
let name = name; let name = name;
let fn_nm = @mangle_internal_name_by_type_and_seq(ccx, t, (~"glue_" + name)); let fn_nm = mangle_internal_name_by_type_and_seq(ccx, t, (~"glue_" + name)).to_managed();
debug!("%s is for type %s", *fn_nm, ppaux::ty_to_str(ccx.tcx, t)); debug!("%s is for type %s", fn_nm, ppaux::ty_to_str(ccx.tcx, t));
note_unique_llvm_symbol(ccx, fn_nm); note_unique_llvm_symbol(ccx, fn_nm);
let llfn = decl_cdecl_fn(ccx.llmod, *fn_nm, llfnty); let llfn = decl_cdecl_fn(ccx.llmod, fn_nm, llfnty);
set_glue_inlining(llfn, t); set_glue_inlining(llfn, t);
return llfn; return llfn;
} }

View File

@ -139,7 +139,7 @@ pub fn static_size_of_enum(cx: @CrateContext, t: ty::t) -> uint {
}); });
debug!("static_size_of_enum: variant %s type %s", debug!("static_size_of_enum: variant %s type %s",
*cx.tcx.sess.str_of(variant.name), cx.tcx.sess.str_of(variant.name),
ty_str(cx.tn, T_struct(lltypes, false))); ty_str(cx.tn, T_struct(lltypes, false)));
let this_size = llsize_of_real(cx, T_struct(lltypes, false)); let this_size = llsize_of_real(cx, T_struct(lltypes, false));

View File

@ -339,7 +339,7 @@ pub fn trans_static_method_callee(bcx: block,
} }
}; };
debug!("trans_static_method_callee: method_id=%?, callee_id=%?, \ debug!("trans_static_method_callee: method_id=%?, callee_id=%?, \
name=%s", method_id, callee_id, *ccx.sess.str_of(mname)); name=%s", method_id, callee_id, ccx.sess.str_of(mname));
let vtbls = resolve_vtables_in_fn_ctxt( let vtbls = resolve_vtables_in_fn_ctxt(
bcx.fcx, ccx.maps.vtable_map.get_copy(&callee_id)); bcx.fcx, ccx.maps.vtable_map.get_copy(&callee_id));
@ -791,7 +791,7 @@ pub fn make_vtable(ccx: @CrateContext,
let tbl = C_struct(components); let tbl = C_struct(components);
let vtable = ccx.sess.str_of((ccx.names)("vtable")); let vtable = ccx.sess.str_of((ccx.names)("vtable"));
let vt_gvar = do str::as_c_str(*vtable) |buf| { let vt_gvar = do str::as_c_str(vtable) |buf| {
llvm::LLVMAddGlobal(ccx.llmod, val_ty(tbl), buf) llvm::LLVMAddGlobal(ccx.llmod, val_ty(tbl), buf)
}; };
llvm::LLVMSetInitializer(vt_gvar, tbl); llvm::LLVMSetInitializer(vt_gvar, tbl);
@ -827,16 +827,15 @@ pub fn make_impl_vtable(bcx: block,
ty::mk_bare_fn(tcx, copy im.fty)); ty::mk_bare_fn(tcx, copy im.fty));
if im.generics.has_type_params() || ty::type_has_self(fty) { if im.generics.has_type_params() || ty::type_has_self(fty) {
debug!("(making impl vtable) method has self or type params: %s", debug!("(making impl vtable) method has self or type params: %s",
*tcx.sess.str_of(im.ident)); tcx.sess.str_of(im.ident));
C_null(T_ptr(T_nil())) C_null(T_ptr(T_nil()))
} else { } else {
debug!("(making impl vtable) adding method to vtable: %s", debug!("(making impl vtable) adding method to vtable: %s",
*tcx.sess.str_of(im.ident)); tcx.sess.str_of(im.ident));
let m_id = method_with_name_or_default(ccx, impl_id, im.ident); let m_id = method_with_name_or_default(ccx, impl_id, im.ident);
trans_fn_ref_with_vtables(bcx, m_id, 0, trans_fn_ref_with_vtables(bcx, m_id, 0,
substs, Some(vtables)).llfn substs, Some(vtables)).llfn
} }
}; };

View File

@ -164,7 +164,7 @@ pub fn monomorphic_fn(ccx: @CrateContext,
ccx.monomorphizing.insert(fn_id, depth + 1); ccx.monomorphizing.insert(fn_id, depth + 1);
let pt = vec::append(/*bad*/copy *pt, let pt = vec::append(/*bad*/copy *pt,
[path_name((ccx.names)(*ccx.sess.str_of(name)))]); [path_name((ccx.names)(ccx.sess.str_of(name)))]);
let s = mangle_exported_name(ccx, /*bad*/copy pt, mono_ty); let s = mangle_exported_name(ccx, /*bad*/copy pt, mono_ty);
let mk_lldecl = || { let mk_lldecl = || {

View File

@ -50,7 +50,7 @@ impl Reflector {
C_int(self.bcx.ccx(), i) C_int(self.bcx.ccx(), i)
} }
pub fn c_slice(&mut self, s: @~str) -> ValueRef { pub fn c_slice(&mut self, s: @str) -> ValueRef {
// We're careful to not use first class aggregates here because that // We're careful to not use first class aggregates here because that
// will kick us off fast isel. (Issue #4352.) // will kick us off fast isel. (Issue #4352.)
let bcx = self.bcx; let bcx = self.bcx;

View File

@ -250,7 +250,7 @@ pub fn trans_slice_vstore(bcx: block,
pub fn trans_lit_str(bcx: block, pub fn trans_lit_str(bcx: block,
lit_expr: @ast::expr, lit_expr: @ast::expr,
str_lit: @~str, str_lit: @str,
dest: Dest) dest: Dest)
-> block { -> block {
//! //!

View File

@ -118,43 +118,43 @@ pub fn type_uses_for(ccx: @CrateContext, fn_id: def_id, n_tps: uint)
_, _,
_) => { _) => {
if abi.is_intrinsic() { if abi.is_intrinsic() {
let flags = match *cx.ccx.sess.str_of(i.ident) { let flags = match cx.ccx.sess.str_of(i.ident).as_slice() {
~"size_of" | ~"pref_align_of" | ~"min_align_of" | "size_of" | "pref_align_of" | "min_align_of" |
~"uninit" | ~"init" | ~"transmute" | ~"move_val" | "uninit" | "init" | "transmute" | "move_val" |
~"move_val_init" => use_repr, "move_val_init" => use_repr,
~"get_tydesc" | ~"needs_drop" => use_tydesc, "get_tydesc" | "needs_drop" => use_tydesc,
~"atomic_cxchg" | ~"atomic_cxchg_acq"| "atomic_cxchg" | "atomic_cxchg_acq"|
~"atomic_cxchg_rel"| ~"atomic_load" | "atomic_cxchg_rel"| "atomic_load" |
~"atomic_load_acq" | ~"atomic_store" | "atomic_load_acq" | "atomic_store" |
~"atomic_store_rel"| ~"atomic_xchg" | "atomic_store_rel"| "atomic_xchg" |
~"atomic_xadd" | ~"atomic_xsub" | "atomic_xadd" | "atomic_xsub" |
~"atomic_xchg_acq" | ~"atomic_xadd_acq" | "atomic_xchg_acq" | "atomic_xadd_acq" |
~"atomic_xsub_acq" | ~"atomic_xchg_rel" | "atomic_xsub_acq" | "atomic_xchg_rel" |
~"atomic_xadd_rel" | ~"atomic_xsub_rel" => 0, "atomic_xadd_rel" | "atomic_xsub_rel" => 0,
~"visit_tydesc" | ~"forget" | ~"frame_address" | "visit_tydesc" | "forget" | "frame_address" |
~"morestack_addr" => 0, "morestack_addr" => 0,
~"memcpy32" | ~"memcpy64" | ~"memmove32" | ~"memmove64" | "memcpy32" | "memcpy64" | "memmove32" | "memmove64" |
~"memset32" | ~"memset64" => use_repr, "memset32" | "memset64" => use_repr,
~"sqrtf32" | ~"sqrtf64" | ~"powif32" | ~"powif64" | "sqrtf32" | "sqrtf64" | "powif32" | "powif64" |
~"sinf32" | ~"sinf64" | ~"cosf32" | ~"cosf64" | "sinf32" | "sinf64" | "cosf32" | "cosf64" |
~"powf32" | ~"powf64" | ~"expf32" | ~"expf64" | "powf32" | "powf64" | "expf32" | "expf64" |
~"exp2f32" | ~"exp2f64" | ~"logf32" | ~"logf64" | "exp2f32" | "exp2f64" | "logf32" | "logf64" |
~"log10f32"| ~"log10f64"| ~"log2f32" | ~"log2f64" | "log10f32"| "log10f64"| "log2f32" | "log2f64" |
~"fmaf32" | ~"fmaf64" | ~"fabsf32" | ~"fabsf64" | "fmaf32" | "fmaf64" | "fabsf32" | "fabsf64" |
~"floorf32"| ~"floorf64"| ~"ceilf32" | ~"ceilf64" | "floorf32"| "floorf64"| "ceilf32" | "ceilf64" |
~"truncf32"| ~"truncf64" => 0, "truncf32"| "truncf64" => 0,
~"ctpop8" | ~"ctpop16" | ~"ctpop32" | ~"ctpop64" => 0, "ctpop8" | "ctpop16" | "ctpop32" | "ctpop64" => 0,
~"ctlz8" | ~"ctlz16" | ~"ctlz32" | ~"ctlz64" => 0, "ctlz8" | "ctlz16" | "ctlz32" | "ctlz64" => 0,
~"cttz8" | ~"cttz16" | ~"cttz32" | ~"cttz64" => 0, "cttz8" | "cttz16" | "cttz32" | "cttz64" => 0,
~"bswap16" | ~"bswap32" | ~"bswap64" => 0, "bswap16" | "bswap32" | "bswap64" => 0,
// would be cool to make these an enum instead of strings! // would be cool to make these an enum instead of strings!
_ => fail!("unknown intrinsic in type_use") _ => fail!("unknown intrinsic in type_use")

View File

@ -117,8 +117,8 @@ fn root(datum: &Datum,
if bcx.sess().trace() { if bcx.sess().trace() {
trans_trace( trans_trace(
bcx, None, bcx, None,
@fmt!("preserving until end of scope %d", (fmt!("preserving until end of scope %d",
root_info.scope)); root_info.scope)).to_managed());
} }
// First, root the datum. Note that we must zero this value, // First, root the datum. Note that we must zero this value,

View File

@ -281,7 +281,7 @@ struct ctxt_ {
tcache: type_cache, tcache: type_cache,
rcache: creader_cache, rcache: creader_cache,
ccache: constness_cache, ccache: constness_cache,
short_names_cache: @mut HashMap<t, @~str>, short_names_cache: @mut HashMap<t, @str>,
needs_unwind_cleanup_cache: @mut HashMap<t, bool>, needs_unwind_cleanup_cache: @mut HashMap<t, bool>,
tc_cache: @mut HashMap<uint, TypeContents>, tc_cache: @mut HashMap<uint, TypeContents>,
ast_ty_to_ty_cache: @mut HashMap<node_id, ast_ty_to_ty_cache_entry>, ast_ty_to_ty_cache: @mut HashMap<node_id, ast_ty_to_ty_cache_entry>,
@ -3366,7 +3366,7 @@ pub fn field_idx_strict(tcx: ty::ctxt, id: ast::ident, fields: &[field])
for fields.each |f| { if f.ident == id { return i; } i += 1u; } for fields.each |f| { if f.ident == id { return i; } i += 1u; }
tcx.sess.bug(fmt!( tcx.sess.bug(fmt!(
"No field named `%s` found in the list of fields `%?`", "No field named `%s` found in the list of fields `%?`",
*tcx.sess.str_of(id), tcx.sess.str_of(id),
fields.map(|f| tcx.sess.str_of(f.ident)))); fields.map(|f| tcx.sess.str_of(f.ident))));
} }
@ -3514,8 +3514,8 @@ pub fn type_err_to_str(cx: ctxt, err: &type_err) -> ~str {
terr_record_fields(values) => { terr_record_fields(values) => {
fmt!("expected a record with field `%s` but found one with field \ fmt!("expected a record with field `%s` but found one with field \
`%s`", `%s`",
*cx.sess.str_of(values.expected), cx.sess.str_of(values.expected),
*cx.sess.str_of(values.found)) cx.sess.str_of(values.found))
} }
terr_arg_count => ~"incorrect number of function parameters", terr_arg_count => ~"incorrect number of function parameters",
terr_regions_does_not_outlive(*) => { terr_regions_does_not_outlive(*) => {
@ -3549,7 +3549,7 @@ pub fn type_err_to_str(cx: ctxt, err: &type_err) -> ~str {
trait_store_to_str(cx, (*values).found)) trait_store_to_str(cx, (*values).found))
} }
terr_in_field(err, fname) => { terr_in_field(err, fname) => {
fmt!("in field `%s`, %s", *cx.sess.str_of(fname), fmt!("in field `%s`, %s", cx.sess.str_of(fname),
type_err_to_str(cx, err)) type_err_to_str(cx, err))
} }
terr_sorts(values) => { terr_sorts(values) => {

View File

@ -297,7 +297,7 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
tcx.sess.span_err(span, tcx.sess.span_err(span,
fmt!("struct `%s` does not have a field fmt!("struct `%s` does not have a field
named `%s`", name, named `%s`", name,
*tcx.sess.str_of(field.ident))); tcx.sess.str_of(field.ident)));
} }
} }
} }
@ -310,7 +310,7 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
} }
tcx.sess.span_err(span, tcx.sess.span_err(span,
fmt!("pattern does not mention field `%s`", fmt!("pattern does not mention field `%s`",
*tcx.sess.str_of(field.ident))); tcx.sess.str_of(field.ident)));
} }
} }
} }

View File

@ -490,7 +490,7 @@ pub fn check_fn(ccx: @mut CrateCtxt,
if pat_util::pat_is_binding(fcx.ccx.tcx.def_map, p) => { if pat_util::pat_is_binding(fcx.ccx.tcx.def_map, p) => {
assign(p.id, None); assign(p.id, None);
debug!("Pattern binding %s is assigned to %s", debug!("Pattern binding %s is assigned to %s",
*tcx.sess.str_of(path.idents[0]), tcx.sess.str_of(path.idents[0]),
fcx.infcx().ty_to_str( fcx.infcx().ty_to_str(
fcx.inh.locals.get_copy(&p.id))); fcx.inh.locals.get_copy(&p.id)));
} }
@ -557,7 +557,7 @@ pub fn check_no_duplicate_fields(tcx: ty::ctxt,
match orig_sp { match orig_sp {
Some(orig_sp) => { Some(orig_sp) => {
tcx.sess.span_err(sp, fmt!("Duplicate field name %s in record type declaration", tcx.sess.span_err(sp, fmt!("Duplicate field name %s in record type declaration",
*tcx.sess.str_of(id))); tcx.sess.str_of(id)));
tcx.sess.span_note(orig_sp, "First declaration of this field occurred here"); tcx.sess.span_note(orig_sp, "First declaration of this field occurred here");
break; break;
} }
@ -599,7 +599,7 @@ pub fn check_item(ccx: @mut CrateCtxt, it: @ast::item) {
ast::item_impl(_, _, _, ref ms) => { ast::item_impl(_, _, _, ref ms) => {
let rp = ccx.tcx.region_paramd_items.find(&it.id).map_consume(|x| *x); let rp = ccx.tcx.region_paramd_items.find(&it.id).map_consume(|x| *x);
debug!("item_impl %s with id %d rp %?", debug!("item_impl %s with id %d rp %?",
*ccx.tcx.sess.str_of(it.ident), it.id, rp); ccx.tcx.sess.str_of(it.ident), it.id, rp);
for ms.each |m| { for ms.each |m| {
check_method(ccx, *m); check_method(ccx, *m);
} }
@ -1396,7 +1396,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
fmt!("type `%s` does not implement any method in scope \ fmt!("type `%s` does not implement any method in scope \
named `%s`", named `%s`",
actual, actual,
*fcx.ccx.tcx.sess.str_of(method_name)) fcx.ccx.tcx.sess.str_of(method_name))
}, },
expr_t, expr_t,
None); None);
@ -1772,7 +1772,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
|actual| { |actual| {
fmt!("attempted to take value of method `%s` on type `%s` \ fmt!("attempted to take value of method `%s` on type `%s` \
(try writing an anonymous function)", (try writing an anonymous function)",
*tcx.sess.str_of(field), actual) tcx.sess.str_of(field), actual)
}, },
expr_t, None); expr_t, None);
} }
@ -1783,7 +1783,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
|actual| { |actual| {
fmt!("attempted access of field `%s` on type `%s`, \ fmt!("attempted access of field `%s` on type `%s`, \
but no field with that name was found", but no field with that name was found",
*tcx.sess.str_of(field), actual) tcx.sess.str_of(field), actual)
}, },
expr_t, None); expr_t, None);
} }
@ -1821,14 +1821,14 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
tcx.sess.span_err( tcx.sess.span_err(
field.span, field.span,
fmt!("structure has no field named `%s`", fmt!("structure has no field named `%s`",
*tcx.sess.str_of(field.node.ident))); tcx.sess.str_of(field.node.ident)));
error_happened = true; error_happened = true;
} }
Some((_, true)) => { Some((_, true)) => {
tcx.sess.span_err( tcx.sess.span_err(
field.span, field.span,
fmt!("field `%s` specified more than once", fmt!("field `%s` specified more than once",
*tcx.sess.str_of(field.node.ident))); tcx.sess.str_of(field.node.ident)));
error_happened = true; error_happened = true;
} }
Some((field_id, false)) => { Some((field_id, false)) => {
@ -1862,7 +1862,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
let (_, seen) = *class_field_map.get(&name); let (_, seen) = *class_field_map.get(&name);
if !seen { if !seen {
missing_fields.push( missing_fields.push(
~"`" + *tcx.sess.str_of(name) + "`"); ~"`" + tcx.sess.str_of(name) + "`");
} }
} }
@ -3424,7 +3424,7 @@ pub fn check_bounds_are_used(ccx: @mut CrateCtxt,
if !*b { if !*b {
ccx.tcx.sess.span_err( ccx.tcx.sess.span_err(
span, fmt!("type parameter `%s` is unused", span, fmt!("type parameter `%s` is unused",
*ccx.tcx.sess.str_of(tps.get(i).ident))); ccx.tcx.sess.str_of(tps.get(i).ident)));
} }
} }
} }
@ -3435,14 +3435,15 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
} }
let tcx = ccx.tcx; let tcx = ccx.tcx;
let (n_tps, inputs, output) = match *ccx.tcx.sess.str_of(it.ident) { let str = ccx.tcx.sess.str_of(it.ident);
~"size_of" | let (n_tps, inputs, output) = match str.as_slice() {
~"pref_align_of" | ~"min_align_of" => (1u, ~[], ty::mk_uint()), "size_of" |
~"init" => (1u, ~[], param(ccx, 0u)), "pref_align_of" | "min_align_of" => (1u, ~[], ty::mk_uint()),
~"uninit" => (1u, ~[], param(ccx, 0u)), "init" => (1u, ~[], param(ccx, 0u)),
~"forget" => (1u, ~[ param(ccx, 0) ], ty::mk_nil()), "uninit" => (1u, ~[], param(ccx, 0u)),
~"transmute" => (2, ~[ param(ccx, 0) ], param(ccx, 1)), "forget" => (1u, ~[ param(ccx, 0) ], ty::mk_nil()),
~"move_val" | ~"move_val_init" => { "transmute" => (2, ~[ param(ccx, 0) ], param(ccx, 1)),
"move_val" | "move_val_init" => {
(1u, (1u,
~[ ~[
ty::mk_mut_rptr(tcx, ty::re_bound(ty::br_anon(0)), param(ccx, 0)), ty::mk_mut_rptr(tcx, ty::re_bound(ty::br_anon(0)), param(ccx, 0)),
@ -3450,9 +3451,9 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
], ],
ty::mk_nil()) ty::mk_nil())
} }
~"needs_drop" => (1u, ~[], ty::mk_bool()), "needs_drop" => (1u, ~[], ty::mk_bool()),
~"atomic_cxchg" | ~"atomic_cxchg_acq"| ~"atomic_cxchg_rel" => { "atomic_cxchg" | "atomic_cxchg_acq"| "atomic_cxchg_rel" => {
(0, (0,
~[ ~[
ty::mk_mut_rptr(tcx, ty::mk_mut_rptr(tcx,
@ -3463,14 +3464,14 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
], ],
ty::mk_int()) ty::mk_int())
} }
~"atomic_load" | ~"atomic_load_acq" => { "atomic_load" | "atomic_load_acq" => {
(0, (0,
~[ ~[
ty::mk_imm_rptr(tcx, ty::re_bound(ty::br_anon(0)), ty::mk_int()) ty::mk_imm_rptr(tcx, ty::re_bound(ty::br_anon(0)), ty::mk_int())
], ],
ty::mk_int()) ty::mk_int())
} }
~"atomic_store" | ~"atomic_store_rel" => { "atomic_store" | "atomic_store_rel" => {
(0, (0,
~[ ~[
ty::mk_mut_rptr(tcx, ty::re_bound(ty::br_anon(0)), ty::mk_int()), ty::mk_mut_rptr(tcx, ty::re_bound(ty::br_anon(0)), ty::mk_int()),
@ -3478,9 +3479,9 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
], ],
ty::mk_nil()) ty::mk_nil())
} }
~"atomic_xchg" | ~"atomic_xadd" | ~"atomic_xsub" | "atomic_xchg" | "atomic_xadd" | "atomic_xsub" |
~"atomic_xchg_acq" | ~"atomic_xadd_acq" | ~"atomic_xsub_acq" | "atomic_xchg_acq" | "atomic_xadd_acq" | "atomic_xsub_acq" |
~"atomic_xchg_rel" | ~"atomic_xadd_rel" | ~"atomic_xsub_rel" => { "atomic_xchg_rel" | "atomic_xadd_rel" | "atomic_xsub_rel" => {
(0, (0,
~[ ~[
ty::mk_mut_rptr(tcx, ty::re_bound(ty::br_anon(0)), ty::mk_int()), ty::mk_mut_rptr(tcx, ty::re_bound(ty::br_anon(0)), ty::mk_int()),
@ -3489,11 +3490,11 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
ty::mk_int()) ty::mk_int())
} }
~"get_tydesc" => { "get_tydesc" => {
// FIXME (#3730): return *intrinsic::tydesc, not *() // FIXME (#3730): return *intrinsic::tydesc, not *()
(1u, ~[], ty::mk_nil_ptr(ccx.tcx)) (1u, ~[], ty::mk_nil_ptr(ccx.tcx))
} }
~"visit_tydesc" => { "visit_tydesc" => {
let tydesc_name = special_idents::tydesc; let tydesc_name = special_idents::tydesc;
assert!(tcx.intrinsic_defs.contains_key(&tydesc_name)); assert!(tcx.intrinsic_defs.contains_key(&tydesc_name));
let (_, tydesc_ty) = tcx.intrinsic_defs.get_copy(&tydesc_name); let (_, tydesc_ty) = tcx.intrinsic_defs.get_copy(&tydesc_name);
@ -3504,7 +3505,7 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
}); });
(0, ~[ td_ptr, visitor_object_ty ], ty::mk_nil()) (0, ~[ td_ptr, visitor_object_ty ], ty::mk_nil())
} }
~"frame_address" => { "frame_address" => {
let fty = ty::mk_closure(ccx.tcx, ty::ClosureTy { let fty = ty::mk_closure(ccx.tcx, ty::ClosureTy {
purity: ast::impure_fn, purity: ast::impure_fn,
sigil: ast::BorrowedSigil, sigil: ast::BorrowedSigil,
@ -3519,10 +3520,10 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
}); });
(0u, ~[fty], ty::mk_nil()) (0u, ~[fty], ty::mk_nil())
} }
~"morestack_addr" => { "morestack_addr" => {
(0u, ~[], ty::mk_nil_ptr(ccx.tcx)) (0u, ~[], ty::mk_nil_ptr(ccx.tcx))
} }
~"memcpy32" => { "memcpy32" => {
(1, (1,
~[ ~[
ty::mk_ptr(tcx, ty::mt { ty::mk_ptr(tcx, ty::mt {
@ -3537,7 +3538,7 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
], ],
ty::mk_nil()) ty::mk_nil())
} }
~"memcpy64" => { "memcpy64" => {
(1, (1,
~[ ~[
ty::mk_ptr(tcx, ty::mt { ty::mk_ptr(tcx, ty::mt {
@ -3552,7 +3553,7 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
], ],
ty::mk_nil()) ty::mk_nil())
} }
~"memmove32" => { "memmove32" => {
(1, (1,
~[ ~[
ty::mk_ptr(tcx, ty::mt { ty::mk_ptr(tcx, ty::mt {
@ -3567,7 +3568,7 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
], ],
ty::mk_nil()) ty::mk_nil())
} }
~"memmove64" => { "memmove64" => {
(1, (1,
~[ ~[
ty::mk_ptr(tcx, ty::mt { ty::mk_ptr(tcx, ty::mt {
@ -3582,7 +3583,7 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
], ],
ty::mk_nil()) ty::mk_nil())
} }
~"memset32" => { "memset32" => {
(1, (1,
~[ ~[
ty::mk_ptr(tcx, ty::mt { ty::mk_ptr(tcx, ty::mt {
@ -3594,7 +3595,7 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
], ],
ty::mk_nil()) ty::mk_nil())
} }
~"memset64" => { "memset64" => {
(1, (1,
~[ ~[
ty::mk_ptr(tcx, ty::mt { ty::mk_ptr(tcx, ty::mt {
@ -3606,75 +3607,75 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
], ],
ty::mk_nil()) ty::mk_nil())
} }
~"sqrtf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "sqrtf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"sqrtf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "sqrtf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"powif32" => { "powif32" => {
(0, (0,
~[ ty::mk_f32(), ty::mk_i32() ], ~[ ty::mk_f32(), ty::mk_i32() ],
ty::mk_f32()) ty::mk_f32())
} }
~"powif64" => { "powif64" => {
(0, (0,
~[ ty::mk_f64(), ty::mk_i32() ], ~[ ty::mk_f64(), ty::mk_i32() ],
ty::mk_f64()) ty::mk_f64())
} }
~"sinf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "sinf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"sinf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "sinf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"cosf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "cosf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"cosf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "cosf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"powf32" => { "powf32" => {
(0, (0,
~[ ty::mk_f32(), ty::mk_f32() ], ~[ ty::mk_f32(), ty::mk_f32() ],
ty::mk_f32()) ty::mk_f32())
} }
~"powf64" => { "powf64" => {
(0, (0,
~[ ty::mk_f64(), ty::mk_f64() ], ~[ ty::mk_f64(), ty::mk_f64() ],
ty::mk_f64()) ty::mk_f64())
} }
~"expf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "expf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"expf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "expf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"exp2f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "exp2f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"exp2f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "exp2f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"logf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "logf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"logf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "logf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"log10f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "log10f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"log10f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "log10f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"log2f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "log2f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"log2f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "log2f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"fmaf32" => { "fmaf32" => {
(0, (0,
~[ ty::mk_f32(), ty::mk_f32(), ty::mk_f32() ], ~[ ty::mk_f32(), ty::mk_f32(), ty::mk_f32() ],
ty::mk_f32()) ty::mk_f32())
} }
~"fmaf64" => { "fmaf64" => {
(0, (0,
~[ ty::mk_f64(), ty::mk_f64(), ty::mk_f64() ], ~[ ty::mk_f64(), ty::mk_f64(), ty::mk_f64() ],
ty::mk_f64()) ty::mk_f64())
} }
~"fabsf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "fabsf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"fabsf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "fabsf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"floorf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "floorf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"floorf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "floorf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"ceilf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "ceilf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"ceilf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "ceilf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"truncf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()), "truncf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
~"truncf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()), "truncf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
~"ctpop8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()), "ctpop8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()),
~"ctpop16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()), "ctpop16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
~"ctpop32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()), "ctpop32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
~"ctpop64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()), "ctpop64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
~"ctlz8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()), "ctlz8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()),
~"ctlz16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()), "ctlz16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
~"ctlz32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()), "ctlz32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
~"ctlz64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()), "ctlz64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
~"cttz8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()), "cttz8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()),
~"cttz16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()), "cttz16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
~"cttz32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()), "cttz32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
~"cttz64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()), "cttz64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
~"bswap16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()), "bswap16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
~"bswap32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()), "bswap32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
~"bswap64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()), "bswap64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
ref other => { ref other => {
tcx.sess.span_err(it.span, tcx.sess.span_err(it.span,
fmt!("unrecognized intrinsic function: `%s`", fmt!("unrecognized intrinsic function: `%s`",

View File

@ -251,7 +251,7 @@ impl CoherenceChecker {
if associated_traits.len() == 0 { if associated_traits.len() == 0 {
debug!("(checking implementation) no associated traits for item \ debug!("(checking implementation) no associated traits for item \
'%s'", '%s'",
*self.crate_context.tcx.sess.str_of(item.ident)); self.crate_context.tcx.sess.str_of(item.ident));
match get_base_type_def_id(self.inference_context, match get_base_type_def_id(self.inference_context,
item.span, item.span,
@ -278,7 +278,7 @@ impl CoherenceChecker {
associated_trait.ref_id); associated_trait.ref_id);
debug!("(checking implementation) adding impl for trait '%s', item '%s'", debug!("(checking implementation) adding impl for trait '%s', item '%s'",
trait_ref.repr(self.crate_context.tcx), trait_ref.repr(self.crate_context.tcx),
*self.crate_context.tcx.sess.str_of(item.ident)); self.crate_context.tcx.sess.str_of(item.ident));
self.instantiate_default_methods(item.id, trait_ref); self.instantiate_default_methods(item.id, trait_ref);
@ -401,7 +401,7 @@ impl CoherenceChecker {
// method to that entry. // method to that entry.
debug!("(checking implementation) adding method `%s` \ debug!("(checking implementation) adding method `%s` \
to entry for existing trait", to entry for existing trait",
*self.crate_context.tcx.sess.str_of( self.crate_context.tcx.sess.str_of(
provided_method_info.method_info.ident)); provided_method_info.method_info.ident));
mis.push(provided_method_info); mis.push(provided_method_info);
} }
@ -409,7 +409,7 @@ impl CoherenceChecker {
// If the trait doesn't have an entry yet, create one. // If the trait doesn't have an entry yet, create one.
debug!("(checking implementation) creating new entry \ debug!("(checking implementation) creating new entry \
for method `%s`", for method `%s`",
*self.crate_context.tcx.sess.str_of( self.crate_context.tcx.sess.str_of(
provided_method_info.method_info.ident)); provided_method_info.method_info.ident));
pmm.insert(local_def(impl_id), pmm.insert(local_def(impl_id),
@mut ~[provided_method_info]); @mut ~[provided_method_info]);
@ -742,7 +742,7 @@ impl CoherenceChecker {
tcx.sess.span_err(trait_ref_span, tcx.sess.span_err(trait_ref_span,
fmt!("missing method `%s`", fmt!("missing method `%s`",
*tcx.sess.str_of(method.ident))); tcx.sess.str_of(method.ident)));
} }
} }
@ -794,7 +794,7 @@ impl CoherenceChecker {
for all_provided_methods.each |provided_method| { for all_provided_methods.each |provided_method| {
debug!( debug!(
"(creating impl) adding provided method `%s` to impl", "(creating impl) adding provided method `%s` to impl",
*sess.str_of(provided_method.method_info.ident)); sess.str_of(provided_method.method_info.ident));
vec::push(all_methods, provided_method.method_info); vec::push(all_methods, provided_method.method_info);
} }
} }
@ -909,7 +909,7 @@ impl CoherenceChecker {
session.bug(fmt!( session.bug(fmt!(
"no base type for external impl \ "no base type for external impl \
with no trait: %s (type %s)!", with no trait: %s (type %s)!",
*session.str_of(implementation.ident), session.str_of(implementation.ident),
ty_to_str(self.crate_context.tcx,self_type.ty))); ty_to_str(self.crate_context.tcx,self_type.ty)));
} }
Some(_) => { Some(_) => {

View File

@ -471,7 +471,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
cm.span, cm.span,
fmt!("method `%s` has a `%s` declaration in the impl, \ fmt!("method `%s` has a `%s` declaration in the impl, \
but not in the trait", but not in the trait",
*tcx.sess.str_of(trait_m.ident), tcx.sess.str_of(trait_m.ident),
explicit_self_to_str(impl_m.explicit_self, tcx.sess.intr()))); explicit_self_to_str(impl_m.explicit_self, tcx.sess.intr())));
return; return;
} }
@ -480,7 +480,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
cm.span, cm.span,
fmt!("method `%s` has a `%s` declaration in the trait, \ fmt!("method `%s` has a `%s` declaration in the trait, \
but not in the impl", but not in the impl",
*tcx.sess.str_of(trait_m.ident), tcx.sess.str_of(trait_m.ident),
explicit_self_to_str(trait_m.explicit_self, tcx.sess.intr()))); explicit_self_to_str(trait_m.explicit_self, tcx.sess.intr())));
return; return;
} }
@ -496,7 +496,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
cm.span, cm.span,
fmt!("method `%s` has %u type %s, but its trait \ fmt!("method `%s` has %u type %s, but its trait \
declaration has %u type %s", declaration has %u type %s",
*tcx.sess.str_of(trait_m.ident), tcx.sess.str_of(trait_m.ident),
num_impl_m_type_params, num_impl_m_type_params,
pluralize(num_impl_m_type_params, ~"parameter"), pluralize(num_impl_m_type_params, ~"parameter"),
num_trait_m_type_params, num_trait_m_type_params,
@ -509,7 +509,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
cm.span, cm.span,
fmt!("method `%s` has %u parameter%s \ fmt!("method `%s` has %u parameter%s \
but the trait has %u", but the trait has %u",
*tcx.sess.str_of(trait_m.ident), tcx.sess.str_of(trait_m.ident),
impl_m.fty.sig.inputs.len(), impl_m.fty.sig.inputs.len(),
if impl_m.fty.sig.inputs.len() == 1 { "" } else { "s" }, if impl_m.fty.sig.inputs.len() == 1 { "" } else { "s" },
trait_m.fty.sig.inputs.len())); trait_m.fty.sig.inputs.len()));
@ -533,7 +533,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
which is not required by \ which is not required by \
the corresponding type parameter \ the corresponding type parameter \
in the trait declaration", in the trait declaration",
*tcx.sess.str_of(trait_m.ident), tcx.sess.str_of(trait_m.ident),
i, i,
extra_bounds.user_string(tcx))); extra_bounds.user_string(tcx)));
return; return;
@ -551,7 +551,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
type parameter %u has %u trait %s, but the \ type parameter %u has %u trait %s, but the \
corresponding type parameter in \ corresponding type parameter in \
the trait declaration has %u trait %s", the trait declaration has %u trait %s",
*tcx.sess.str_of(trait_m.ident), tcx.sess.str_of(trait_m.ident),
i, impl_param_def.bounds.trait_bounds.len(), i, impl_param_def.bounds.trait_bounds.len(),
pluralize(impl_param_def.bounds.trait_bounds.len(), pluralize(impl_param_def.bounds.trait_bounds.len(),
~"bound"), ~"bound"),
@ -652,7 +652,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
tcx.sess.span_err( tcx.sess.span_err(
cm.span, cm.span,
fmt!("method `%s` has an incompatible type: %s", fmt!("method `%s` has an incompatible type: %s",
*tcx.sess.str_of(trait_m.ident), tcx.sess.str_of(trait_m.ident),
ty::type_err_to_str(tcx, terr))); ty::type_err_to_str(tcx, terr)));
ty::note_and_explain_type_err(tcx, terr); ty::note_and_explain_type_err(tcx, terr);
} }
@ -700,7 +700,7 @@ pub fn check_methods_against_trait(ccx: &CrateCtxt,
tcx.sess.span_err( tcx.sess.span_err(
impl_m.span, impl_m.span,
fmt!("method `%s` is not a member of trait `%s`", fmt!("method `%s` is not a member of trait `%s`",
*tcx.sess.str_of(impl_m.mty.ident), tcx.sess.str_of(impl_m.mty.ident),
path_to_str(a_trait_ty.path, tcx.sess.intr()))); path_to_str(a_trait_ty.path, tcx.sess.intr())));
} }
} }
@ -829,7 +829,7 @@ pub fn convert(ccx: &CrateCtxt, it: @ast::item) {
let tcx = ccx.tcx; let tcx = ccx.tcx;
let rp = tcx.region_paramd_items.find(&it.id).map_consume(|x| *x); let rp = tcx.region_paramd_items.find(&it.id).map_consume(|x| *x);
debug!("convert: item %s with id %d rp %?", debug!("convert: item %s with id %d rp %?",
*tcx.sess.str_of(it.ident), it.id, rp); tcx.sess.str_of(it.ident), it.id, rp);
match it.node { match it.node {
// These don't define types. // These don't define types.
ast::item_foreign_mod(_) | ast::item_mod(_) => {} ast::item_foreign_mod(_) | ast::item_mod(_) => {}
@ -1084,7 +1084,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: @ast::item)
ty: ty::mk_bare_fn(ccx.tcx, tofd) ty: ty::mk_bare_fn(ccx.tcx, tofd)
}; };
debug!("type of %s (id %d) is %s", debug!("type of %s (id %d) is %s",
*tcx.sess.str_of(it.ident), tcx.sess.str_of(it.ident),
it.id, it.id,
ppaux::ty_to_str(tcx, tpt.ty)); ppaux::ty_to_str(tcx, tpt.ty));
ccx.tcx.tcache.insert(local_def(it.id), tpt); ccx.tcx.tcache.insert(local_def(it.id), tpt);

View File

@ -236,9 +236,9 @@ pub fn run_compiler(args: &~[~str], demitter: diagnostic::Emitter) {
::core::logging::console_off(); ::core::logging::console_off();
let mut args = /*bad*/copy *args; let mut args = /*bad*/copy *args;
let binary = @args.shift(); let binary = args.shift().to_managed();
if args.is_empty() { usage(*binary); return; } if args.is_empty() { usage(binary); return; }
let matches = let matches =
&match getopts::groups::getopts(args, optgroups()) { &match getopts::groups::getopts(args, optgroups()) {
@ -249,7 +249,7 @@ pub fn run_compiler(args: &~[~str], demitter: diagnostic::Emitter) {
}; };
if opt_present(matches, "h") || opt_present(matches, "help") { if opt_present(matches, "h") || opt_present(matches, "help") {
usage(*binary); usage(binary);
return; return;
} }
@ -276,16 +276,16 @@ pub fn run_compiler(args: &~[~str], demitter: diagnostic::Emitter) {
} }
if opt_present(matches, "v") || opt_present(matches, "version") { if opt_present(matches, "v") || opt_present(matches, "version") {
version(*binary); version(binary);
return; return;
} }
let input = match matches.free.len() { let input = match matches.free.len() {
0u => early_error(demitter, ~"no input filename given"), 0u => early_error(demitter, ~"no input filename given"),
1u => { 1u => {
let ifile = /*bad*/copy matches.free[0]; let ifile = matches.free[0].as_slice();
if ifile == ~"-" { if "-" == ifile {
let src = str::from_bytes(io::stdin().read_whole_stream()); let src = str::from_bytes(io::stdin().read_whole_stream());
str_input(src) str_input(src.to_managed())
} else { } else {
file_input(Path(ifile)) file_input(Path(ifile))
} }

View File

@ -159,7 +159,7 @@ pub fn bound_region_to_str_space(cx: ctxt,
if cx.sess.verbose() { return fmt!("%s%? ", prefix, br); } if cx.sess.verbose() { return fmt!("%s%? ", prefix, br); }
match br { match br {
br_named(id) => fmt!("%s'%s ", prefix, *cx.sess.str_of(id)), br_named(id) => fmt!("%s'%s ", prefix, cx.sess.str_of(id)),
br_self => fmt!("%s'self ", prefix), br_self => fmt!("%s'self ", prefix),
br_anon(_) => prefix.to_str(), br_anon(_) => prefix.to_str(),
br_fresh(_) => prefix.to_str(), br_fresh(_) => prefix.to_str(),
@ -323,7 +323,7 @@ pub fn ty_to_str(cx: ctxt, typ: t) -> ~str {
match ident { match ident {
Some(i) => { Some(i) => {
s.push_char(' '); s.push_char(' ');
s.push_str(*cx.sess.str_of(i)); s.push_str(cx.sess.str_of(i));
} }
_ => { } _ => { }
} }
@ -389,7 +389,7 @@ pub fn ty_to_str(cx: ctxt, typ: t) -> ~str {
&m.fty.sig) + ";" &m.fty.sig) + ";"
} }
fn field_to_str(cx: ctxt, f: field) -> ~str { fn field_to_str(cx: ctxt, f: field) -> ~str {
return *cx.sess.str_of(f.ident) + ": " + mt_to_str(cx, &f.mt); return fmt!("%s: %s", cx.sess.str_of(f.ident), mt_to_str(cx, &f.mt));
} }
// if there is an id, print that instead of the structural type: // if there is an id, print that instead of the structural type:
@ -656,7 +656,7 @@ impl Repr for ty::Method {
impl Repr for ast::ident { impl Repr for ast::ident {
fn repr(&self, _tcx: ctxt) -> ~str { fn repr(&self, _tcx: ctxt) -> ~str {
copy *token::ident_to_str(self) token::ident_to_str(self).to_owned()
} }
} }

View File

@ -41,7 +41,7 @@ pub struct Ctxt {
type SrvOwner<'self,T> = &'self fn(srv: Srv) -> T; type SrvOwner<'self,T> = &'self fn(srv: Srv) -> T;
pub type CtxtHandler<T> = ~fn(ctxt: Ctxt) -> T; pub type CtxtHandler<T> = ~fn(ctxt: Ctxt) -> T;
type Parser = ~fn(Session, s: ~str) -> @ast::crate; type Parser = ~fn(Session, s: @str) -> @ast::crate;
enum Msg { enum Msg {
HandleRequest(~fn(Ctxt)), HandleRequest(~fn(Ctxt)),
@ -68,7 +68,7 @@ fn run<T>(owner: SrvOwner<T>, source: ~str, parse: Parser) -> T {
let source = Cell::new(source); let source = Cell::new(source);
let parse = Cell::new(parse); let parse = Cell::new(parse);
do task::spawn { do task::spawn {
act(&po, source.take(), parse.take()); act(&po, source.take().to_managed(), parse.take());
} }
let srv_ = Srv { let srv_ = Srv {
@ -80,12 +80,12 @@ fn run<T>(owner: SrvOwner<T>, source: ~str, parse: Parser) -> T {
res res
} }
fn act(po: &Port<Msg>, source: ~str, parse: Parser) { fn act(po: &Port<Msg>, source: @str, parse: Parser) {
let sess = build_session(); let sess = build_session();
let ctxt = build_ctxt( let ctxt = build_ctxt(
sess, sess,
parse(sess, copy source) parse(sess, source)
); );
let mut keep_going = true; let mut keep_going = true;

View File

@ -41,13 +41,13 @@ pub fn parse_crate(attrs: ~[ast::attribute]) -> CrateAttrs {
let name = attr::last_meta_item_value_str_by_name(link_metas, "name"); let name = attr::last_meta_item_value_str_by_name(link_metas, "name");
CrateAttrs { CrateAttrs {
name: name.map(|s| copy **s) name: name.map(|s| s.to_owned())
} }
} }
pub fn parse_desc(attrs: ~[ast::attribute]) -> Option<~str> { pub fn parse_desc(attrs: ~[ast::attribute]) -> Option<~str> {
let doc_strs = do doc_metas(attrs).filter_mapped |meta| { let doc_strs = do doc_metas(attrs).filter_mapped |meta| {
attr::get_meta_item_value_str(*meta).map(|s| copy **s) attr::get_meta_item_value_str(*meta).map(|s| s.to_owned())
}; };
if doc_strs.is_empty() { if doc_strs.is_empty() {
None None

View File

@ -25,7 +25,7 @@ use syntax::parse::token;
// thread-local data // thread-local data
// Hack-Becomes-Feature: using thread-local-state everywhere... // Hack-Becomes-Feature: using thread-local-state everywhere...
pub fn to_str(id: ast::ident) -> ~str { pub fn to_str(id: ast::ident) -> ~str {
return copy *ident_to_str(&id); /* bad */ ident_to_str(&id).to_owned()
} }
// get rid of this pointless function: // get rid of this pointless function:

View File

@ -23,9 +23,9 @@ pub fn from_file(file: &Path) -> @ast::crate {
file, ~[], parse::new_parse_sess(None)) file, ~[], parse::new_parse_sess(None))
} }
pub fn from_str(source: ~str) -> @ast::crate { pub fn from_str(source: @str) -> @ast::crate {
parse::parse_crate_from_source_str( parse::parse_crate_from_source_str(
~"-", @source, ~[], parse::new_parse_sess(None)) @"-", source, ~[], parse::new_parse_sess(None))
} }
pub fn from_file_sess(sess: session::Session, file: &Path) -> @ast::crate { pub fn from_file_sess(sess: session::Session, file: &Path) -> @ast::crate {
@ -33,11 +33,11 @@ pub fn from_file_sess(sess: session::Session, file: &Path) -> @ast::crate {
file, cfg(sess, file_input(copy *file)), sess.parse_sess) file, cfg(sess, file_input(copy *file)), sess.parse_sess)
} }
pub fn from_str_sess(sess: session::Session, source: ~str) -> @ast::crate { pub fn from_str_sess(sess: session::Session, source: @str) -> @ast::crate {
parse::parse_crate_from_source_str( parse::parse_crate_from_source_str(
~"-", @copy source, cfg(sess, str_input(source)), sess.parse_sess) @"-", source, cfg(sess, str_input(source)), sess.parse_sess)
} }
fn cfg(sess: session::Session, input: driver::input) -> ast::crate_cfg { fn cfg(sess: session::Session, input: driver::input) -> ast::crate_cfg {
driver::build_configuration(sess, @~"rustdoc", &input) driver::build_configuration(sess, @"rustdoc", &input)
} }

View File

@ -117,7 +117,7 @@ fn record(mut repl: Repl, blk: &ast::blk, intr: @token::ident_interner) -> Repl
/// Run an input string in a Repl, returning the new Repl. /// Run an input string in a Repl, returning the new Repl.
fn run(repl: Repl, input: ~str) -> Repl { fn run(repl: Repl, input: ~str) -> Repl {
let binary = @copy repl.binary; let binary = repl.binary.to_managed();
let options = @session::options { let options = @session::options {
crate_type: session::unknown_crate, crate_type: session::unknown_crate,
binary: binary, binary: binary,
@ -130,7 +130,7 @@ fn run(repl: Repl, input: ~str) -> Repl {
let head = include_str!("wrapper.rs").to_owned(); let head = include_str!("wrapper.rs").to_owned();
let foot = fmt!("fn main() {\n%s\n%s\n\nprint({\n%s\n})\n}", let foot = fmt!("fn main() {\n%s\n%s\n\nprint({\n%s\n})\n}",
repl.view_items, repl.stmts, input); repl.view_items, repl.stmts, input);
let wrapped = driver::str_input(head + foot); let wrapped = driver::str_input((head + foot).to_managed());
debug!("inputting %s", head + foot); debug!("inputting %s", head + foot);
@ -186,7 +186,7 @@ fn run(repl: Repl, input: ~str) -> Repl {
fn compile_crate(src_filename: ~str, binary: ~str) -> Option<bool> { fn compile_crate(src_filename: ~str, binary: ~str) -> Option<bool> {
match do task::try { match do task::try {
let src_path = Path(src_filename); let src_path = Path(src_filename);
let binary = @copy binary; let binary = binary.to_managed();
let options = @session::options { let options = @session::options {
binary: binary, binary: binary,
addl_lib_search_paths: @mut ~[os::getcwd()], addl_lib_search_paths: @mut ~[os::getcwd()],

View File

@ -100,7 +100,7 @@ impl<'self> PkgScript<'self> {
/// a PkgScript that we can then execute /// a PkgScript that we can then execute
fn parse<'a>(script: Path, workspace: &Path, id: &'a PkgId) -> PkgScript<'a> { fn parse<'a>(script: Path, workspace: &Path, id: &'a PkgId) -> PkgScript<'a> {
// Get the executable name that was invoked // Get the executable name that was invoked
let binary = @copy os::args()[0]; let binary = os::args()[0].to_managed();
// Build the rustc session data structures to pass // Build the rustc session data structures to pass
// to the compiler // to the compiler
let options = @session::options { let options = @session::options {
@ -145,7 +145,7 @@ impl<'self> PkgScript<'self> {
let root = r.pop().pop().pop().pop(); // :-\ let root = r.pop().pop().pop().pop(); // :-\
debug!("Root is %s, calling compile_rest", root.to_str()); debug!("Root is %s, calling compile_rest", root.to_str());
let exe = self.build_dir.push(~"pkg" + util::exe_suffix()); let exe = self.build_dir.push(~"pkg" + util::exe_suffix());
let binary = @copy os::args()[0]; let binary = os::args()[0].to_managed();
util::compile_crate_from_input(&self.input, util::compile_crate_from_input(&self.input,
&self.build_dir, &self.build_dir,
sess, sess,

View File

@ -80,7 +80,7 @@ fn fold_mod(_ctx: @mut ReadyCtx,
fn strip_main(item: @ast::item) -> @ast::item { fn strip_main(item: @ast::item) -> @ast::item {
@ast::item { @ast::item {
attrs: do item.attrs.filtered |attr| { attrs: do item.attrs.filtered |attr| {
*attr::get_attr_name(attr) != ~"main" "main" != attr::get_attr_name(attr)
}, },
.. copy *item .. copy *item
} }
@ -109,7 +109,7 @@ fn fold_item(ctx: @mut ReadyCtx,
ast::meta_list(_, ref mis) => { ast::meta_list(_, ref mis) => {
for mis.each |mi| { for mis.each |mi| {
match mi.node { match mi.node {
ast::meta_word(cmd) => cmds.push(copy *cmd), ast::meta_word(cmd) => cmds.push(cmd.to_owned()),
_ => {} _ => {}
}; };
} }
@ -205,7 +205,7 @@ pub fn compile_input(ctxt: &Ctx,
// tjc: by default, use the package ID name as the link name // tjc: by default, use the package ID name as the link name
// not sure if we should support anything else // not sure if we should support anything else
let binary = @(copy os::args()[0]); let binary = os::args()[0].to_managed();
debug!("flags: %s", flags.connect(" ")); debug!("flags: %s", flags.connect(" "));
debug!("cfgs: %s", cfgs.connect(" ")); debug!("cfgs: %s", cfgs.connect(" "));
@ -270,11 +270,11 @@ pub fn compile_input(ctxt: &Ctx,
debug!("Injecting link name: %s", short_name_to_use); debug!("Injecting link name: %s", short_name_to_use);
crate = @codemap::respan(crate.span, ast::crate_ { crate = @codemap::respan(crate.span, ast::crate_ {
attrs: ~[mk_attr(@dummy_spanned( attrs: ~[mk_attr(@dummy_spanned(
meta_list(@~"link", meta_list(@"link",
~[@dummy_spanned(meta_name_value(@~"name", ~[@dummy_spanned(meta_name_value(@"name",
mk_string_lit(@short_name_to_use))), mk_string_lit(short_name_to_use.to_managed()))),
@dummy_spanned(meta_name_value(@~"vers", @dummy_spanned(meta_name_value(@"vers",
mk_string_lit(@pkg_id.version.to_str_nonempty())))])))], mk_string_lit(pkg_id.version.to_str_nonempty().to_managed())))])))],
..copy crate.node}); ..copy crate.node});
} }
@ -363,24 +363,24 @@ fn find_and_install_dependencies(ctxt: &Ctx,
None => () None => ()
}; };
let lib_name = sess.str_of(lib_ident); let lib_name = sess.str_of(lib_ident);
match find_library_in_search_path(my_ctxt.sysroot_opt, *lib_name) { match find_library_in_search_path(my_ctxt.sysroot_opt, lib_name) {
Some(installed_path) => { Some(installed_path) => {
debug!("It exists: %s", installed_path.to_str()); debug!("It exists: %s", installed_path.to_str());
} }
None => { None => {
// Try to install it // Try to install it
let pkg_id = PkgId::new(*lib_name); let pkg_id = PkgId::new(lib_name);
my_ctxt.install(&my_workspace, &pkg_id); my_ctxt.install(&my_workspace, &pkg_id);
let built_lib = let built_lib =
built_library_in_workspace(&pkg_id, built_library_in_workspace(&pkg_id,
&my_workspace).expect(fmt!("find_and_install_dependencies: \ &my_workspace).expect(fmt!("find_and_install_dependencies: \
I thought I already built %s, but the library doesn't seem \ I thought I already built %s, but the library doesn't seem \
to exist", *lib_name)); to exist", lib_name));
// Also, add an additional search path // Also, add an additional search path
let installed_path = target_library_in_workspace(&my_workspace, let installed_path = target_library_in_workspace(&my_workspace,
&built_lib).pop(); &built_lib).pop();
debug!("Great, I installed %s, and it's in %s", debug!("Great, I installed %s, and it's in %s",
*lib_name, installed_path.to_str()); lib_name, installed_path.to_str());
save(installed_path); save(installed_path);
} }
} }
@ -415,7 +415,7 @@ pub fn link_exe(src: &Path, dest: &Path) -> bool {
} }
} }
pub fn mk_string_lit(s: @~str) -> ast::lit { pub fn mk_string_lit(s: @str) -> ast::lit {
spanned { spanned {
node: ast::lit_str(s), node: ast::lit_str(s),
span: dummy_sp() span: dummy_sp()

View File

@ -84,7 +84,7 @@ pub type Mrk = uint;
impl<S:Encoder> Encodable<S> for ident { impl<S:Encoder> Encodable<S> for ident {
fn encode(&self, s: &mut S) { fn encode(&self, s: &mut S) {
s.emit_str(*interner_get(self.name)); s.emit_str(interner_get(self.name));
} }
} }
@ -228,9 +228,9 @@ pub type meta_item = spanned<meta_item_>;
#[deriving(Eq, Encodable, Decodable)] #[deriving(Eq, Encodable, Decodable)]
pub enum meta_item_ { pub enum meta_item_ {
meta_word(@~str), meta_word(@str),
meta_list(@~str, ~[@meta_item]), meta_list(@str, ~[@meta_item]),
meta_name_value(@~str, lit), meta_name_value(@str, lit),
} }
pub type blk = spanned<blk_>; pub type blk = spanned<blk_>;
@ -634,12 +634,12 @@ pub type lit = spanned<lit_>;
#[deriving(Eq, Encodable, Decodable)] #[deriving(Eq, Encodable, Decodable)]
pub enum lit_ { pub enum lit_ {
lit_str(@~str), lit_str(@str),
lit_int(i64, int_ty), lit_int(i64, int_ty),
lit_uint(u64, uint_ty), lit_uint(u64, uint_ty),
lit_int_unsuffixed(i64), lit_int_unsuffixed(i64),
lit_float(@~str, float_ty), lit_float(@str, float_ty),
lit_float_unsuffixed(@~str), lit_float_unsuffixed(@str),
lit_nil, lit_nil,
lit_bool(bool), lit_bool(bool),
} }
@ -819,10 +819,10 @@ pub enum asm_dialect {
#[deriving(Eq, Encodable, Decodable)] #[deriving(Eq, Encodable, Decodable)]
pub struct inline_asm { pub struct inline_asm {
asm: @~str, asm: @str,
clobbers: @~str, clobbers: @str,
inputs: ~[(@~str, @expr)], inputs: ~[(@str, @expr)],
outputs: ~[(@~str, @expr)], outputs: ~[(@str, @expr)],
volatile: bool, volatile: bool,
alignstack: bool, alignstack: bool,
dialect: asm_dialect dialect: asm_dialect

View File

@ -58,8 +58,8 @@ pub fn path_to_str_with_sep(p: &[path_elt], sep: &str, itr: @ident_interner)
-> ~str { -> ~str {
let strs = do p.map |e| { let strs = do p.map |e| {
match *e { match *e {
path_mod(s) => copy *itr.get(s.name), path_mod(s) => itr.get(s.name),
path_name(s) => copy *itr.get(s.name) path_name(s) => itr.get(s.name)
} }
}; };
strs.connect(sep) strs.connect(sep)
@ -68,9 +68,9 @@ pub fn path_to_str_with_sep(p: &[path_elt], sep: &str, itr: @ident_interner)
pub fn path_ident_to_str(p: &path, i: ident, itr: @ident_interner) -> ~str { pub fn path_ident_to_str(p: &path, i: ident, itr: @ident_interner) -> ~str {
if p.is_empty() { if p.is_empty() {
//FIXME /* FIXME (#2543) */ copy *i //FIXME /* FIXME (#2543) */ copy *i
copy *itr.get(i.name) itr.get(i.name).to_owned()
} else { } else {
fmt!("%s::%s", path_to_str(*p, itr), *itr.get(i.name)) fmt!("%s::%s", path_to_str(*p, itr), itr.get(i.name))
} }
} }
@ -80,8 +80,8 @@ pub fn path_to_str(p: &[path_elt], itr: @ident_interner) -> ~str {
pub fn path_elt_to_str(pe: path_elt, itr: @ident_interner) -> ~str { pub fn path_elt_to_str(pe: path_elt, itr: @ident_interner) -> ~str {
match pe { match pe {
path_mod(s) => copy *itr.get(s.name), path_mod(s) => itr.get(s.name).to_owned(),
path_name(s) => copy *itr.get(s.name) path_name(s) => itr.get(s.name).to_owned()
} }
} }
@ -359,16 +359,16 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str {
} }
Some(&node_method(m, _, path)) => { Some(&node_method(m, _, path)) => {
fmt!("method %s in %s (id=%?)", fmt!("method %s in %s (id=%?)",
*itr.get(m.ident.name), path_to_str(*path, itr), id) itr.get(m.ident.name), path_to_str(*path, itr), id)
} }
Some(&node_trait_method(ref tm, _, path)) => { Some(&node_trait_method(ref tm, _, path)) => {
let m = ast_util::trait_method_to_ty_method(&**tm); let m = ast_util::trait_method_to_ty_method(&**tm);
fmt!("method %s in %s (id=%?)", fmt!("method %s in %s (id=%?)",
*itr.get(m.ident.name), path_to_str(*path, itr), id) itr.get(m.ident.name), path_to_str(*path, itr), id)
} }
Some(&node_variant(ref variant, _, path)) => { Some(&node_variant(ref variant, _, path)) => {
fmt!("variant %s in %s (id=%?)", fmt!("variant %s in %s (id=%?)",
*itr.get(variant.node.name.name), path_to_str(*path, itr), id) itr.get(variant.node.name.name), path_to_str(*path, itr), id)
} }
Some(&node_expr(expr)) => { Some(&node_expr(expr)) => {
fmt!("expr %s (id=%?)", pprust::expr_to_str(expr, itr), id) fmt!("expr %s (id=%?)", pprust::expr_to_str(expr, itr), id)
@ -384,7 +384,7 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str {
fmt!("arg (id=%?)", id) fmt!("arg (id=%?)", id)
} }
Some(&node_local(ident)) => { Some(&node_local(ident)) => {
fmt!("local (id=%?, name=%s)", id, *itr.get(ident.name)) fmt!("local (id=%?, name=%s)", id, itr.get(ident.name))
} }
Some(&node_block(_)) => { Some(&node_block(_)) => {
fmt!("block") fmt!("block")

View File

@ -28,7 +28,7 @@ use core::iterator::IteratorUtil;
pub fn path_name_i(idents: &[ident]) -> ~str { pub fn path_name_i(idents: &[ident]) -> ~str {
// FIXME: Bad copies (#2543 -- same for everything else that says "bad") // FIXME: Bad copies (#2543 -- same for everything else that says "bad")
idents.map(|i| copy *token::interner_get(i.name)).connect("::") idents.map(|i| token::interner_get(i.name)).connect("::")
} }
pub fn path_to_ident(p: @Path) -> ident { copy *p.idents.last() } pub fn path_to_ident(p: @Path) -> ident { copy *p.idents.last() }
@ -815,7 +815,7 @@ mod test {
assert_eq!(copy s,~[14]); assert_eq!(copy s,~[14]);
} }
// convert a list of uints to an @~[ident] // convert a list of uints to an @[ident]
// (ignores the interner completely) // (ignores the interner completely)
fn uints_to_idents (uints: &~[uint]) -> @~[ident] { fn uints_to_idents (uints: &~[uint]) -> @~[ident] {
@uints.map(|u|{ ident {name:*u, ctxt: empty_ctxt} }) @uints.map(|u|{ ident {name:*u, ctxt: empty_ctxt} })

View File

@ -26,23 +26,23 @@ use extra;
/* Constructors */ /* Constructors */
pub fn mk_name_value_item_str(name: @~str, value: @~str) pub fn mk_name_value_item_str(name: @str, value: @str)
-> @ast::meta_item { -> @ast::meta_item {
let value_lit = dummy_spanned(ast::lit_str(value)); let value_lit = dummy_spanned(ast::lit_str(value));
mk_name_value_item(name, value_lit) mk_name_value_item(name, value_lit)
} }
pub fn mk_name_value_item(name: @~str, value: ast::lit) pub fn mk_name_value_item(name: @str, value: ast::lit)
-> @ast::meta_item { -> @ast::meta_item {
@dummy_spanned(ast::meta_name_value(name, value)) @dummy_spanned(ast::meta_name_value(name, value))
} }
pub fn mk_list_item(name: @~str, items: ~[@ast::meta_item]) -> pub fn mk_list_item(name: @str, items: ~[@ast::meta_item]) ->
@ast::meta_item { @ast::meta_item {
@dummy_spanned(ast::meta_list(name, items)) @dummy_spanned(ast::meta_list(name, items))
} }
pub fn mk_word_item(name: @~str) -> @ast::meta_item { pub fn mk_word_item(name: @str) -> @ast::meta_item {
@dummy_spanned(ast::meta_word(name)) @dummy_spanned(ast::meta_word(name))
} }
@ -52,13 +52,13 @@ pub fn mk_attr(item: @ast::meta_item) -> ast::attribute {
is_sugared_doc: false }) is_sugared_doc: false })
} }
pub fn mk_sugared_doc_attr(text: ~str, pub fn mk_sugared_doc_attr(text: @str,
lo: BytePos, hi: BytePos) -> ast::attribute { lo: BytePos, hi: BytePos) -> ast::attribute {
let style = doc_comment_style(text); let style = doc_comment_style(text);
let lit = spanned(lo, hi, ast::lit_str(@text)); let lit = spanned(lo, hi, ast::lit_str(text));
let attr = ast::attribute_ { let attr = ast::attribute_ {
style: style, style: style,
value: @spanned(lo, hi, ast::meta_name_value(@~"doc", lit)), value: @spanned(lo, hi, ast::meta_name_value(@"doc", lit)),
is_sugared_doc: true is_sugared_doc: true
}; };
spanned(lo, hi, attr) spanned(lo, hi, attr)
@ -78,8 +78,8 @@ pub fn attr_metas(attrs: &[ast::attribute]) -> ~[@ast::meta_item] {
pub fn desugar_doc_attr(attr: &ast::attribute) -> ast::attribute { pub fn desugar_doc_attr(attr: &ast::attribute) -> ast::attribute {
if attr.node.is_sugared_doc { if attr.node.is_sugared_doc {
let comment = get_meta_item_value_str(attr.node.value).get(); let comment = get_meta_item_value_str(attr.node.value).get();
let meta = mk_name_value_item_str(@~"doc", let meta = mk_name_value_item_str(@"doc",
@strip_doc_comment_decoration(*comment)); strip_doc_comment_decoration(comment).to_managed());
mk_attr(meta) mk_attr(meta)
} else { } else {
*attr *attr
@ -88,11 +88,11 @@ pub fn desugar_doc_attr(attr: &ast::attribute) -> ast::attribute {
/* Accessors */ /* Accessors */
pub fn get_attr_name(attr: &ast::attribute) -> @~str { pub fn get_attr_name(attr: &ast::attribute) -> @str {
get_meta_item_name(attr.node.value) get_meta_item_name(attr.node.value)
} }
pub fn get_meta_item_name(meta: @ast::meta_item) -> @~str { pub fn get_meta_item_name(meta: @ast::meta_item) -> @str {
match meta.node { match meta.node {
ast::meta_word(n) => n, ast::meta_word(n) => n,
ast::meta_name_value(n, _) => n, ast::meta_name_value(n, _) => n,
@ -104,7 +104,7 @@ pub fn get_meta_item_name(meta: @ast::meta_item) -> @~str {
* Gets the string value if the meta_item is a meta_name_value variant * Gets the string value if the meta_item is a meta_name_value variant
* containing a string, otherwise none * containing a string, otherwise none
*/ */
pub fn get_meta_item_value_str(meta: @ast::meta_item) -> Option<@~str> { pub fn get_meta_item_value_str(meta: @ast::meta_item) -> Option<@str> {
match meta.node { match meta.node {
ast::meta_name_value(_, v) => { ast::meta_name_value(_, v) => {
match v.node { match v.node {
@ -130,7 +130,7 @@ pub fn get_meta_item_list(meta: @ast::meta_item)
* a tuple containing the name and string value, otherwise `none` * a tuple containing the name and string value, otherwise `none`
*/ */
pub fn get_name_value_str_pair(item: @ast::meta_item) pub fn get_name_value_str_pair(item: @ast::meta_item)
-> Option<(@~str, @~str)> { -> Option<(@str, @str)> {
match attr::get_meta_item_value_str(item) { match attr::get_meta_item_value_str(item) {
Some(value) => { Some(value) => {
let name = attr::get_meta_item_name(item); let name = attr::get_meta_item_name(item);
@ -147,7 +147,7 @@ pub fn get_name_value_str_pair(item: @ast::meta_item)
pub fn find_attrs_by_name(attrs: &[ast::attribute], name: &str) -> pub fn find_attrs_by_name(attrs: &[ast::attribute], name: &str) ->
~[ast::attribute] { ~[ast::attribute] {
do vec::filter_mapped(attrs) |a| { do vec::filter_mapped(attrs) |a| {
if name == *get_attr_name(a) { if name == get_attr_name(a) {
Some(*a) Some(*a)
} else { } else {
None None
@ -160,7 +160,7 @@ pub fn find_meta_items_by_name(metas: &[@ast::meta_item], name: &str) ->
~[@ast::meta_item] { ~[@ast::meta_item] {
let mut rs = ~[]; let mut rs = ~[];
for metas.each |mi| { for metas.each |mi| {
if name == *get_meta_item_name(*mi) { if name == get_meta_item_name(*mi) {
rs.push(*mi) rs.push(*mi)
} }
} }
@ -214,7 +214,7 @@ pub fn attrs_contains_name(attrs: &[ast::attribute], name: &str) -> bool {
} }
pub fn first_attr_value_str_by_name(attrs: &[ast::attribute], name: &str) pub fn first_attr_value_str_by_name(attrs: &[ast::attribute], name: &str)
-> Option<@~str> { -> Option<@str> {
let mattrs = find_attrs_by_name(attrs, name); let mattrs = find_attrs_by_name(attrs, name);
if mattrs.len() > 0 { if mattrs.len() > 0 {
@ -232,7 +232,7 @@ fn last_meta_item_by_name(items: &[@ast::meta_item], name: &str)
} }
pub fn last_meta_item_value_str_by_name(items: &[@ast::meta_item], name: &str) pub fn last_meta_item_value_str_by_name(items: &[@ast::meta_item], name: &str)
-> Option<@~str> { -> Option<@str> {
match last_meta_item_by_name(items, name) { match last_meta_item_by_name(items, name) {
Some(item) => { Some(item) => {
@ -282,7 +282,7 @@ pub fn remove_meta_items_by_name(items: ~[@ast::meta_item], name: &str) ->
~[@ast::meta_item] { ~[@ast::meta_item] {
return vec::filter_mapped(items, |item| { return vec::filter_mapped(items, |item| {
if name != *get_meta_item_name(*item) { if name != get_meta_item_name(*item) {
Some(*item) Some(*item)
} else { } else {
None None
@ -316,8 +316,8 @@ pub fn find_inline_attr(attrs: &[ast::attribute]) -> inline_attr {
// FIXME (#2809)---validate the usage of #[inline] and #[inline(always)] // FIXME (#2809)---validate the usage of #[inline] and #[inline(always)]
do attrs.iter().fold(ia_none) |ia,attr| { do attrs.iter().fold(ia_none) |ia,attr| {
match attr.node.value.node { match attr.node.value.node {
ast::meta_word(@~"inline") => ia_hint, ast::meta_word(s) if "inline" == s => ia_hint,
ast::meta_list(@~"inline", ref items) => { ast::meta_list(s, ref items) if "inline" == s => {
if !find_meta_items_by_name(*items, "always").is_empty() { if !find_meta_items_by_name(*items, "always").is_empty() {
ia_always ia_always
} else if !find_meta_items_by_name(*items, "never").is_empty() { } else if !find_meta_items_by_name(*items, "never").is_empty() {
@ -341,7 +341,7 @@ pub fn require_unique_names(diagnostic: @span_handler,
// FIXME: How do I silence the warnings? --pcw (#2619) // FIXME: How do I silence the warnings? --pcw (#2619)
if !set.insert(name) { if !set.insert(name) {
diagnostic.span_fatal(meta.span, diagnostic.span_fatal(meta.span,
fmt!("duplicate meta item `%s`", *name)); fmt!("duplicate meta item `%s`", name));
} }
} }
} }

View File

@ -184,7 +184,7 @@ pub struct Loc {
// Actually, *none* of the clients use the filename *or* file field; // Actually, *none* of the clients use the filename *or* file field;
// perhaps they should just be removed. // perhaps they should just be removed.
pub struct LocWithOpt { pub struct LocWithOpt {
filename: ~str, filename: FileName,
line: uint, line: uint,
col: CharPos, col: CharPos,
file: Option<@FileMap>, file: Option<@FileMap>,
@ -193,7 +193,7 @@ pub struct LocWithOpt {
// used to be structural records. Better names, anyone? // used to be structural records. Better names, anyone?
pub struct FileMapAndLine {fm: @FileMap, line: uint} pub struct FileMapAndLine {fm: @FileMap, line: uint}
pub struct FileMapAndBytePos {fm: @FileMap, pos: BytePos} pub struct FileMapAndBytePos {fm: @FileMap, pos: BytePos}
pub struct NameAndSpan {name: ~str, span: Option<span>} pub struct NameAndSpan {name: @str, span: Option<span>}
impl to_bytes::IterBytes for NameAndSpan { impl to_bytes::IterBytes for NameAndSpan {
fn iter_bytes(&self, lsb0: bool, f: to_bytes::Cb) -> bool { fn iter_bytes(&self, lsb0: bool, f: to_bytes::Cb) -> bool {
@ -227,7 +227,7 @@ impl to_bytes::IterBytes for ExpnInfo {
} }
} }
pub type FileName = ~str; pub type FileName = @str;
pub struct FileLines pub struct FileLines
{ {
@ -261,7 +261,7 @@ pub struct FileMap {
/// Extra information used by qquote /// Extra information used by qquote
substr: FileSubstr, substr: FileSubstr,
/// The complete source code /// The complete source code
src: @~str, src: @str,
/// The start position of this source in the CodeMap /// The start position of this source in the CodeMap
start_pos: BytePos, start_pos: BytePos,
/// Locations of lines beginnings in the source code /// Locations of lines beginnings in the source code
@ -316,14 +316,14 @@ impl CodeMap {
} }
/// Add a new FileMap to the CodeMap and return it /// Add a new FileMap to the CodeMap and return it
pub fn new_filemap(&self, filename: FileName, src: @~str) -> @FileMap { pub fn new_filemap(&self, filename: FileName, src: @str) -> @FileMap {
return self.new_filemap_w_substr(filename, FssNone, src); return self.new_filemap_w_substr(filename, FssNone, src);
} }
pub fn new_filemap_w_substr(&self, pub fn new_filemap_w_substr(&self,
filename: FileName, filename: FileName,
substr: FileSubstr, substr: FileSubstr,
src: @~str) src: @str)
-> @FileMap { -> @FileMap {
let files = &mut *self.files; let files = &mut *self.files;
let start_pos = if files.len() == 0 { let start_pos = if files.len() == 0 {
@ -362,7 +362,7 @@ impl CodeMap {
match (loc.file.substr) { match (loc.file.substr) {
FssNone => FssNone =>
LocWithOpt { LocWithOpt {
filename: /* FIXME (#2543) */ copy loc.file.name, filename: loc.file.name,
line: loc.line, line: loc.line,
col: loc.col, col: loc.col,
file: Some(loc.file)}, file: Some(loc.file)},
@ -421,8 +421,8 @@ impl CodeMap {
begin.pos.to_uint(), end.pos.to_uint()).to_owned(); begin.pos.to_uint(), end.pos.to_uint()).to_owned();
} }
pub fn get_filemap(&self, filename: ~str) -> @FileMap { pub fn get_filemap(&self, filename: &str) -> @FileMap {
for self.files.each |fm| { if fm.name == filename { return *fm; } } for self.files.each |fm| { if filename == fm.name { return *fm; } }
//XXjdm the following triggers a mismatched type bug //XXjdm the following triggers a mismatched type bug
// (or expected function, found _|_) // (or expected function, found _|_)
fail!(); // ("asking for " + filename + " which we don't know about"); fail!(); // ("asking for " + filename + " which we don't know about");
@ -532,7 +532,7 @@ mod test {
#[test] #[test]
fn t1 () { fn t1 () {
let cm = CodeMap::new(); let cm = CodeMap::new();
let fm = cm.new_filemap(~"blork.rs",@~"first line.\nsecond line"); let fm = cm.new_filemap(@"blork.rs",@"first line.\nsecond line");
fm.next_line(BytePos(0)); fm.next_line(BytePos(0));
assert_eq!(&fm.get_line(0),&~"first line."); assert_eq!(&fm.get_line(0),&~"first line.");
// TESTING BROKEN BEHAVIOR: // TESTING BROKEN BEHAVIOR:
@ -544,7 +544,7 @@ mod test {
#[should_fail] #[should_fail]
fn t2 () { fn t2 () {
let cm = CodeMap::new(); let cm = CodeMap::new();
let fm = cm.new_filemap(~"blork.rs",@~"first line.\nsecond line"); let fm = cm.new_filemap(@"blork.rs",@"first line.\nsecond line");
// TESTING *REALLY* BROKEN BEHAVIOR: // TESTING *REALLY* BROKEN BEHAVIOR:
fm.next_line(BytePos(0)); fm.next_line(BytePos(0));
fm.next_line(BytePos(10)); fm.next_line(BytePos(10));

View File

@ -306,8 +306,8 @@ fn highlight_lines(cm: @codemap::CodeMap,
fn print_macro_backtrace(cm: @codemap::CodeMap, sp: span) { fn print_macro_backtrace(cm: @codemap::CodeMap, sp: span) {
for sp.expn_info.iter().advance |ei| { for sp.expn_info.iter().advance |ei| {
let ss = ei.callee.span.map_default(@~"", |span| @cm.span_to_str(*span)); let ss = ei.callee.span.map_default(~"", |span| cm.span_to_str(*span));
print_diagnostic(*ss, note, print_diagnostic(ss, note,
fmt!("in expansion of %s!", ei.callee.name)); fmt!("in expansion of %s!", ei.callee.name));
let ss = cm.span_to_str(ei.call_site); let ss = cm.span_to_str(ei.call_site);
print_diagnostic(ss, note, "expansion site"); print_diagnostic(ss, note, "expansion site");

View File

@ -45,7 +45,7 @@ pub fn expand_asm(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
cx.cfg(), cx.cfg(),
tts.to_owned()); tts.to_owned());
let mut asm = ~""; let mut asm = @"";
let mut outputs = ~[]; let mut outputs = ~[];
let mut inputs = ~[]; let mut inputs = ~[];
let mut cons = ~""; let mut cons = ~"";
@ -113,7 +113,7 @@ pub fn expand_asm(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
p.eat(&token::COMMA); p.eat(&token::COMMA);
} }
let clob = ~"~{" + *p.parse_str() + "}"; let clob = fmt!("~{%s}", p.parse_str());
clobs.push(clob); clobs.push(clob);
} }
@ -122,11 +122,11 @@ pub fn expand_asm(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
Options => { Options => {
let option = p.parse_str(); let option = p.parse_str();
if "volatile" == *option { if "volatile" == option {
volatile = true; volatile = true;
} else if "alignstack" == *option { } else if "alignstack" == option {
alignstack = true; alignstack = true;
} else if "intel" == *option { } else if "intel" == option {
dialect = ast::asm_intel; dialect = ast::asm_intel;
} }
@ -176,8 +176,8 @@ pub fn expand_asm(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
MRExpr(@ast::expr { MRExpr(@ast::expr {
id: cx.next_id(), id: cx.next_id(),
node: ast::expr_inline_asm(ast::inline_asm { node: ast::expr_inline_asm(ast::inline_asm {
asm: @asm, asm: asm,
clobbers: @cons, clobbers: cons.to_managed(),
inputs: inputs, inputs: inputs,
outputs: outputs, outputs: outputs,
volatile: volatile, volatile: volatile,

View File

@ -33,7 +33,7 @@ use core::hashmap::HashMap;
// ast::mac_invoc_tt. // ast::mac_invoc_tt.
pub struct MacroDef { pub struct MacroDef {
name: ~str, name: @str,
ext: SyntaxExtension ext: SyntaxExtension
} }
@ -308,18 +308,18 @@ impl ExtCtxt {
pub fn set_trace_macros(&self, x: bool) { pub fn set_trace_macros(&self, x: bool) {
*self.trace_mac = x *self.trace_mac = x
} }
pub fn str_of(&self, id: ast::ident) -> ~str { pub fn str_of(&self, id: ast::ident) -> @str {
copy *ident_to_str(&id) ident_to_str(&id)
} }
pub fn ident_of(&self, st: &str) -> ast::ident { pub fn ident_of(&self, st: &str) -> ast::ident {
str_to_ident(st) str_to_ident(st)
} }
} }
pub fn expr_to_str(cx: @ExtCtxt, expr: @ast::expr, err_msg: ~str) -> ~str { pub fn expr_to_str(cx: @ExtCtxt, expr: @ast::expr, err_msg: ~str) -> @str {
match expr.node { match expr.node {
ast::expr_lit(l) => match l.node { ast::expr_lit(l) => match l.node {
ast::lit_str(s) => copy *s, ast::lit_str(s) => s,
_ => cx.span_fatal(l.span, err_msg) _ => cx.span_fatal(l.span, err_msg)
}, },
_ => cx.span_fatal(expr.span, err_msg) _ => cx.span_fatal(expr.span, err_msg)
@ -350,7 +350,7 @@ pub fn check_zero_tts(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree],
pub fn get_single_str_from_tts(cx: @ExtCtxt, pub fn get_single_str_from_tts(cx: @ExtCtxt,
sp: span, sp: span,
tts: &[ast::token_tree], tts: &[ast::token_tree],
name: &str) -> ~str { name: &str) -> @str {
if tts.len() != 1 { if tts.len() != 1 {
cx.span_fatal(sp, fmt!("%s takes 1 argument.", name)); cx.span_fatal(sp, fmt!("%s takes 1 argument.", name));
} }
@ -538,25 +538,25 @@ mod test {
#[test] fn testenv () { #[test] fn testenv () {
let mut a = HashMap::new(); let mut a = HashMap::new();
a.insert (@~"abc",@15); a.insert (@"abc",@15);
let m = MapChain::new(~a); let m = MapChain::new(~a);
m.insert (@~"def",@16); m.insert (@"def",@16);
// FIXME: #4492 (ICE) assert_eq!(m.find(&@~"abc"),Some(@15)); // FIXME: #4492 (ICE) assert_eq!(m.find(&@"abc"),Some(@15));
// .... assert_eq!(m.find(&@~"def"),Some(@16)); // .... assert_eq!(m.find(&@"def"),Some(@16));
assert_eq!(*(m.find(&@~"abc").get()),15); assert_eq!(*(m.find(&@"abc").get()),15);
assert_eq!(*(m.find(&@~"def").get()),16); assert_eq!(*(m.find(&@"def").get()),16);
let n = m.push_frame(); let n = m.push_frame();
// old bindings are still present: // old bindings are still present:
assert_eq!(*(n.find(&@~"abc").get()),15); assert_eq!(*(n.find(&@"abc").get()),15);
assert_eq!(*(n.find(&@~"def").get()),16); assert_eq!(*(n.find(&@"def").get()),16);
n.insert (@~"def",@17); n.insert (@"def",@17);
// n shows the new binding // n shows the new binding
assert_eq!(*(n.find(&@~"abc").get()),15); assert_eq!(*(n.find(&@"abc").get()),15);
assert_eq!(*(n.find(&@~"def").get()),17); assert_eq!(*(n.find(&@"def").get()),17);
// ... but m still has the old ones // ... but m still has the old ones
// FIXME: #4492: assert_eq!(m.find(&@~"abc"),Some(@15)); // FIXME: #4492: assert_eq!(m.find(&@"abc"),Some(@15));
// FIXME: #4492: assert_eq!(m.find(&@~"def"),Some(@16)); // FIXME: #4492: assert_eq!(m.find(&@"def"),Some(@16));
assert_eq!(*(m.find(&@~"abc").get()),15); assert_eq!(*(m.find(&@"abc").get()),15);
assert_eq!(*(m.find(&@~"def").get()),16); assert_eq!(*(m.find(&@"def").get()),16);
} }
} }

View File

@ -126,8 +126,8 @@ pub trait AstBuilder {
fn expr_vec(&self, sp: span, exprs: ~[@ast::expr]) -> @ast::expr; fn expr_vec(&self, sp: span, exprs: ~[@ast::expr]) -> @ast::expr;
fn expr_vec_uniq(&self, sp: span, exprs: ~[@ast::expr]) -> @ast::expr; fn expr_vec_uniq(&self, sp: span, exprs: ~[@ast::expr]) -> @ast::expr;
fn expr_vec_slice(&self, sp: span, exprs: ~[@ast::expr]) -> @ast::expr; fn expr_vec_slice(&self, sp: span, exprs: ~[@ast::expr]) -> @ast::expr;
fn expr_str(&self, sp: span, s: ~str) -> @ast::expr; fn expr_str(&self, sp: span, s: @str) -> @ast::expr;
fn expr_str_uniq(&self, sp: span, s: ~str) -> @ast::expr; fn expr_str_uniq(&self, sp: span, s: @str) -> @ast::expr;
fn expr_unreachable(&self, span: span) -> @ast::expr; fn expr_unreachable(&self, span: span) -> @ast::expr;
@ -215,9 +215,9 @@ pub trait AstBuilder {
fn attribute(&self, sp: span, mi: @ast::meta_item) -> ast::attribute; fn attribute(&self, sp: span, mi: @ast::meta_item) -> ast::attribute;
fn meta_word(&self, sp: span, w: ~str) -> @ast::meta_item; fn meta_word(&self, sp: span, w: @str) -> @ast::meta_item;
fn meta_list(&self, sp: span, name: ~str, mis: ~[@ast::meta_item]) -> @ast::meta_item; fn meta_list(&self, sp: span, name: @str, mis: ~[@ast::meta_item]) -> @ast::meta_item;
fn meta_name_value(&self, sp: span, name: ~str, value: ast::lit_) -> @ast::meta_item; fn meta_name_value(&self, sp: span, name: @str, value: ast::lit_) -> @ast::meta_item;
fn view_use(&self, sp: span, fn view_use(&self, sp: span,
vis: ast::visibility, vp: ~[@ast::view_path]) -> @ast::view_item; vis: ast::visibility, vp: ~[@ast::view_path]) -> @ast::view_item;
@ -521,10 +521,10 @@ impl AstBuilder for @ExtCtxt {
fn expr_vec_slice(&self, sp: span, exprs: ~[@ast::expr]) -> @ast::expr { fn expr_vec_slice(&self, sp: span, exprs: ~[@ast::expr]) -> @ast::expr {
self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::expr_vstore_slice) self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::expr_vstore_slice)
} }
fn expr_str(&self, sp: span, s: ~str) -> @ast::expr { fn expr_str(&self, sp: span, s: @str) -> @ast::expr {
self.expr_lit(sp, ast::lit_str(@s)) self.expr_lit(sp, ast::lit_str(s))
} }
fn expr_str_uniq(&self, sp: span, s: ~str) -> @ast::expr { fn expr_str_uniq(&self, sp: span, s: @str) -> @ast::expr {
self.expr_vstore(sp, self.expr_str(sp, s), ast::expr_vstore_uniq) self.expr_vstore(sp, self.expr_str(sp, s), ast::expr_vstore_uniq)
} }
@ -540,8 +540,8 @@ impl AstBuilder for @ExtCtxt {
self.ident_of("fail_with"), self.ident_of("fail_with"),
], ],
~[ ~[
self.expr_str(span, ~"internal error: entered unreachable code"), self.expr_str(span, @"internal error: entered unreachable code"),
self.expr_str(span, copy loc.file.name), self.expr_str(span, loc.file.name),
self.expr_uint(span, loc.line), self.expr_uint(span, loc.line),
]) ])
} }
@ -791,14 +791,14 @@ impl AstBuilder for @ExtCtxt {
}) })
} }
fn meta_word(&self, sp: span, w: ~str) -> @ast::meta_item { fn meta_word(&self, sp: span, w: @str) -> @ast::meta_item {
@respan(sp, ast::meta_word(@w)) @respan(sp, ast::meta_word(w))
} }
fn meta_list(&self, sp: span, name: ~str, mis: ~[@ast::meta_item]) -> @ast::meta_item { fn meta_list(&self, sp: span, name: @str, mis: ~[@ast::meta_item]) -> @ast::meta_item {
@respan(sp, ast::meta_list(@name, mis)) @respan(sp, ast::meta_list(name, mis))
} }
fn meta_name_value(&self, sp: span, name: ~str, value: ast::lit_) -> @ast::meta_item { fn meta_name_value(&self, sp: span, name: @str, value: ast::lit_) -> @ast::meta_item {
@respan(sp, ast::meta_name_value(@name, respan(sp, value))) @respan(sp, ast::meta_name_value(name, respan(sp, value)))
} }
fn view_use(&self, sp: span, fn view_use(&self, sp: span,

View File

@ -72,7 +72,7 @@ fn decodable_substructure(cx: @ExtCtxt, span: span,
}; };
let read_struct_field = cx.ident_of("read_struct_field"); let read_struct_field = cx.ident_of("read_struct_field");
let getarg = |name: ~str, field: uint| { let getarg = |name: @str, field: uint| {
cx.expr_method_call(span, blkdecoder, read_struct_field, cx.expr_method_call(span, blkdecoder, read_struct_field,
~[cx.expr_str(span, name), ~[cx.expr_str(span, name),
cx.expr_uint(span, field), cx.expr_uint(span, field),
@ -86,7 +86,7 @@ fn decodable_substructure(cx: @ExtCtxt, span: span,
} else { } else {
let mut fields = vec::with_capacity(n); let mut fields = vec::with_capacity(n);
for uint::range(0, n) |i| { for uint::range(0, n) |i| {
fields.push(getarg(fmt!("_field%u", i), i)); fields.push(getarg(fmt!("_field%u", i).to_managed(), i));
} }
cx.expr_call_ident(span, substr.type_ident, fields) cx.expr_call_ident(span, substr.type_ident, fields)
} }

View File

@ -127,7 +127,7 @@ fn encodable_substructure(cx: @ExtCtxt, span: span,
for fields.eachi |i, f| { for fields.eachi |i, f| {
let (name, val) = match *f { let (name, val) = match *f {
(Some(id), e, _) => (cx.str_of(id), e), (Some(id), e, _) => (cx.str_of(id), e),
(None, e, _) => (fmt!("_field%u", i), e) (None, e, _) => (fmt!("_field%u", i).to_managed(), e)
}; };
let enc = cx.expr_method_call(span, val, encode, ~[blkencoder]); let enc = cx.expr_method_call(span, val, encode, ~[blkencoder]);
let lambda = cx.lambda_expr_1(span, enc, blkarg); let lambda = cx.lambda_expr_1(span, enc, blkarg);

View File

@ -364,7 +364,7 @@ impl<'self> TraitDef<'self> {
let doc_attr = cx.attribute( let doc_attr = cx.attribute(
span, span,
cx.meta_name_value(span, cx.meta_name_value(span,
~"doc", ast::lit_str(@~"Automatically derived."))); @"doc", ast::lit_str(@"Automatically derived.")));
cx.item( cx.item(
span, span,
::parse::token::special_idents::clownshoes_extensions, ::parse::token::special_idents::clownshoes_extensions,

View File

@ -82,23 +82,23 @@ pub fn expand_meta_deriving(cx: @ExtCtxt,
meta_word(tname) => { meta_word(tname) => {
macro_rules! expand(($func:path) => ($func(cx, titem.span, macro_rules! expand(($func:path) => ($func(cx, titem.span,
titem, in_items))); titem, in_items)));
match *tname { match tname.as_slice() {
~"Clone" => expand!(clone::expand_deriving_clone), "Clone" => expand!(clone::expand_deriving_clone),
~"DeepClone" => expand!(clone::expand_deriving_deep_clone), "DeepClone" => expand!(clone::expand_deriving_deep_clone),
~"IterBytes" => expand!(iter_bytes::expand_deriving_iter_bytes), "IterBytes" => expand!(iter_bytes::expand_deriving_iter_bytes),
~"Encodable" => expand!(encodable::expand_deriving_encodable), "Encodable" => expand!(encodable::expand_deriving_encodable),
~"Decodable" => expand!(decodable::expand_deriving_decodable), "Decodable" => expand!(decodable::expand_deriving_decodable),
~"Eq" => expand!(eq::expand_deriving_eq), "Eq" => expand!(eq::expand_deriving_eq),
~"TotalEq" => expand!(totaleq::expand_deriving_totaleq), "TotalEq" => expand!(totaleq::expand_deriving_totaleq),
~"Ord" => expand!(ord::expand_deriving_ord), "Ord" => expand!(ord::expand_deriving_ord),
~"TotalOrd" => expand!(totalord::expand_deriving_totalord), "TotalOrd" => expand!(totalord::expand_deriving_totalord),
~"Rand" => expand!(rand::expand_deriving_rand), "Rand" => expand!(rand::expand_deriving_rand),
~"ToStr" => expand!(to_str::expand_deriving_to_str), "ToStr" => expand!(to_str::expand_deriving_to_str),
ref tname => { ref tname => {
cx.span_err(titem.span, fmt!("unknown \ cx.span_err(titem.span, fmt!("unknown \

View File

@ -33,8 +33,8 @@ pub fn expand_syntax_ext(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
// Option<str> rather than just an maybe-empty string. // Option<str> rather than just an maybe-empty string.
let e = match os::getenv(var) { let e = match os::getenv(var) {
None => cx.expr_str(sp, ~""), None => cx.expr_str(sp, @""),
Some(ref s) => cx.expr_str(sp, copy *s) Some(s) => cx.expr_str(sp, s.to_managed())
}; };
MRExpr(e) MRExpr(e)
} }

View File

@ -57,7 +57,7 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv,
None => { None => {
cx.span_fatal( cx.span_fatal(
pth.span, pth.span,
fmt!("macro undefined: '%s'", *extnamestr)) fmt!("macro undefined: '%s'", extnamestr))
} }
Some(@SE(NormalTT(SyntaxExpanderTT{ Some(@SE(NormalTT(SyntaxExpanderTT{
expander: exp, expander: exp,
@ -66,7 +66,7 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv,
cx.bt_push(ExpandedFrom(CallInfo { cx.bt_push(ExpandedFrom(CallInfo {
call_site: s, call_site: s,
callee: NameAndSpan { callee: NameAndSpan {
name: copy *extnamestr, name: extnamestr,
span: exp_sp, span: exp_sp,
}, },
})); }));
@ -79,7 +79,7 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv,
pth.span, pth.span,
fmt!( fmt!(
"non-expr macro in expr pos: %s", "non-expr macro in expr pos: %s",
*extnamestr extnamestr
) )
) )
} }
@ -95,7 +95,7 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv,
_ => { _ => {
cx.span_fatal( cx.span_fatal(
pth.span, pth.span,
fmt!("'%s' is not a tt-style macro", *extnamestr) fmt!("'%s' is not a tt-style macro", extnamestr)
) )
} }
} }
@ -132,12 +132,12 @@ pub fn expand_mod_items(extsbox: @mut SyntaxEnv,
do item.attrs.rev_iter().fold(~[*item]) |items, attr| { do item.attrs.rev_iter().fold(~[*item]) |items, attr| {
let mname = attr::get_attr_name(attr); let mname = attr::get_attr_name(attr);
match (*extsbox).find(&intern(*mname)) { match (*extsbox).find(&intern(mname)) {
Some(@SE(ItemDecorator(dec_fn))) => { Some(@SE(ItemDecorator(dec_fn))) => {
cx.bt_push(ExpandedFrom(CallInfo { cx.bt_push(ExpandedFrom(CallInfo {
call_site: attr.span, call_site: attr.span,
callee: NameAndSpan { callee: NameAndSpan {
name: /*bad*/ copy *mname, name: mname,
span: None span: None
} }
})); }));
@ -201,7 +201,7 @@ pub fn expand_item(extsbox: @mut SyntaxEnv,
// does this attribute list contain "macro_escape" ? // does this attribute list contain "macro_escape" ?
pub fn contains_macro_escape (attrs: &[ast::attribute]) -> bool { pub fn contains_macro_escape (attrs: &[ast::attribute]) -> bool {
attrs.any(|attr| "macro_escape" == *attr::get_attr_name(attr)) attrs.any(|attr| "macro_escape" == attr::get_attr_name(attr))
} }
// Support for item-position macro invocations, exactly the same // Support for item-position macro invocations, exactly the same
@ -221,19 +221,19 @@ pub fn expand_item_mac(extsbox: @mut SyntaxEnv,
let extnamestr = ident_to_str(extname); let extnamestr = ident_to_str(extname);
let expanded = match (*extsbox).find(&extname.name) { let expanded = match (*extsbox).find(&extname.name) {
None => cx.span_fatal(pth.span, None => cx.span_fatal(pth.span,
fmt!("macro undefined: '%s!'", *extnamestr)), fmt!("macro undefined: '%s!'", extnamestr)),
Some(@SE(NormalTT(ref expand))) => { Some(@SE(NormalTT(ref expand))) => {
if it.ident != parse::token::special_idents::invalid { if it.ident != parse::token::special_idents::invalid {
cx.span_fatal(pth.span, cx.span_fatal(pth.span,
fmt!("macro %s! expects no ident argument, \ fmt!("macro %s! expects no ident argument, \
given '%s'", *extnamestr, given '%s'", extnamestr,
*ident_to_str(&it.ident))); ident_to_str(&it.ident)));
} }
cx.bt_push(ExpandedFrom(CallInfo { cx.bt_push(ExpandedFrom(CallInfo {
call_site: it.span, call_site: it.span,
callee: NameAndSpan { callee: NameAndSpan {
name: copy *extnamestr, name: extnamestr,
span: expand.span span: expand.span
} }
})); }));
@ -243,26 +243,25 @@ pub fn expand_item_mac(extsbox: @mut SyntaxEnv,
if it.ident == parse::token::special_idents::invalid { if it.ident == parse::token::special_idents::invalid {
cx.span_fatal(pth.span, cx.span_fatal(pth.span,
fmt!("macro %s! expects an ident argument", fmt!("macro %s! expects an ident argument",
*extnamestr)); extnamestr));
} }
cx.bt_push(ExpandedFrom(CallInfo { cx.bt_push(ExpandedFrom(CallInfo {
call_site: it.span, call_site: it.span,
callee: NameAndSpan { callee: NameAndSpan {
name: copy *extnamestr, name: extnamestr,
span: expand.span span: expand.span
} }
})); }));
((*expand).expander)(cx, it.span, it.ident, tts) ((*expand).expander)(cx, it.span, it.ident, tts)
} }
_ => cx.span_fatal( _ => cx.span_fatal(
it.span, fmt!("%s! is not legal in item position", *extnamestr)) it.span, fmt!("%s! is not legal in item position", extnamestr))
}; };
let maybe_it = match expanded { let maybe_it = match expanded {
MRItem(it) => fld.fold_item(it), MRItem(it) => fld.fold_item(it),
MRExpr(_) => cx.span_fatal(pth.span, MRExpr(_) => cx.span_fatal(pth.span,
~"expr macro in item position: " fmt!("expr macro in item position: %s", extnamestr)),
+ *extnamestr),
MRAny(_, item_maker, _) => item_maker().chain(|i| {fld.fold_item(i)}), MRAny(_, item_maker, _) => item_maker().chain(|i| {fld.fold_item(i)}),
MRDef(ref mdef) => { MRDef(ref mdef) => {
insert_macro(*extsbox,intern(mdef.name), @SE((*mdef).ext)); insert_macro(*extsbox,intern(mdef.name), @SE((*mdef).ext));
@ -319,13 +318,13 @@ pub fn expand_stmt(extsbox: @mut SyntaxEnv,
let extnamestr = ident_to_str(extname); let extnamestr = ident_to_str(extname);
let (fully_expanded, sp) = match (*extsbox).find(&extname.name) { let (fully_expanded, sp) = match (*extsbox).find(&extname.name) {
None => None =>
cx.span_fatal(pth.span, fmt!("macro undefined: '%s'", *extnamestr)), cx.span_fatal(pth.span, fmt!("macro undefined: '%s'", extnamestr)),
Some(@SE(NormalTT( Some(@SE(NormalTT(
SyntaxExpanderTT{expander: exp, span: exp_sp}))) => { SyntaxExpanderTT{expander: exp, span: exp_sp}))) => {
cx.bt_push(ExpandedFrom(CallInfo { cx.bt_push(ExpandedFrom(CallInfo {
call_site: sp, call_site: sp,
callee: NameAndSpan { name: copy *extnamestr, span: exp_sp } callee: NameAndSpan { name: extnamestr, span: exp_sp }
})); }));
let expanded = match exp(cx, mac.span, tts) { let expanded = match exp(cx, mac.span, tts) {
MRExpr(e) => MRExpr(e) =>
@ -334,7 +333,7 @@ pub fn expand_stmt(extsbox: @mut SyntaxEnv,
MRAny(_,_,stmt_mkr) => stmt_mkr(), MRAny(_,_,stmt_mkr) => stmt_mkr(),
_ => cx.span_fatal( _ => cx.span_fatal(
pth.span, pth.span,
fmt!("non-stmt macro in stmt pos: %s", *extnamestr)) fmt!("non-stmt macro in stmt pos: %s", extnamestr))
}; };
//keep going, outside-in //keep going, outside-in
@ -355,7 +354,7 @@ pub fn expand_stmt(extsbox: @mut SyntaxEnv,
_ => { _ => {
cx.span_fatal(pth.span, cx.span_fatal(pth.span,
fmt!("'%s' is not a tt-style macro", *extnamestr)) fmt!("'%s' is not a tt-style macro", extnamestr))
} }
}; };
@ -414,7 +413,7 @@ fn get_block_info(exts : SyntaxEnv) -> BlockInfo {
match exts.find_in_topmost_frame(&intern(special_block_name)) { match exts.find_in_topmost_frame(&intern(special_block_name)) {
Some(@BlockInfo(bi)) => bi, Some(@BlockInfo(bi)) => bi,
_ => fail!(fmt!("special identifier %? was bound to a non-BlockInfo", _ => fail!(fmt!("special identifier %? was bound to a non-BlockInfo",
@~" block")) @" block"))
} }
} }
@ -456,9 +455,9 @@ pub fn new_span(cx: @ExtCtxt, sp: span) -> span {
// the default compilation environment. It would be much nicer to use // the default compilation environment. It would be much nicer to use
// a mechanism like syntax_quote to ensure hygiene. // a mechanism like syntax_quote to ensure hygiene.
pub fn core_macros() -> ~str { pub fn core_macros() -> @str {
return return
~"pub mod macros { @"pub mod macros {
macro_rules! ignore (($($x:tt)*) => (())) macro_rules! ignore (($($x:tt)*) => (()))
macro_rules! error ( macro_rules! error (
@ -679,7 +678,7 @@ pub fn expand_crate(parse_sess: @mut parse::ParseSess,
node: attribute_ { node: attribute_ {
style: attr_outer, style: attr_outer,
value: @spanned { value: @spanned {
node: meta_word(@~"macro_escape"), node: meta_word(@"macro_escape"),
span: codemap::dummy_sp(), span: codemap::dummy_sp(),
}, },
is_sugared_doc: false, is_sugared_doc: false,
@ -687,8 +686,8 @@ pub fn expand_crate(parse_sess: @mut parse::ParseSess,
} }
]; ];
let cm = match parse_item_from_source_str(~"<core-macros>", let cm = match parse_item_from_source_str(@"<core-macros>",
@core_macros(), core_macros(),
copy cfg, copy cfg,
attrs, attrs,
parse_sess) { parse_sess) {
@ -764,11 +763,11 @@ mod test {
// make sure that fail! is present // make sure that fail! is present
#[test] fn fail_exists_test () { #[test] fn fail_exists_test () {
let src = ~"fn main() { fail!(\"something appropriately gloomy\");}"; let src = @"fn main() { fail!(\"something appropriately gloomy\");}";
let sess = parse::new_parse_sess(None); let sess = parse::new_parse_sess(None);
let crate_ast = parse::parse_crate_from_source_str( let crate_ast = parse::parse_crate_from_source_str(
~"<test>", @"<test>",
@src, src,
~[],sess); ~[],sess);
expand_crate(sess,~[],crate_ast); expand_crate(sess,~[],crate_ast);
} }
@ -779,12 +778,12 @@ mod test {
// make sure that macros can leave scope // make sure that macros can leave scope
#[should_fail] #[should_fail]
#[test] fn macros_cant_escape_fns_test () { #[test] fn macros_cant_escape_fns_test () {
let src = ~"fn bogus() {macro_rules! z (() => (3+4))}\ let src = @"fn bogus() {macro_rules! z (() => (3+4))}\
fn inty() -> int { z!() }"; fn inty() -> int { z!() }";
let sess = parse::new_parse_sess(None); let sess = parse::new_parse_sess(None);
let crate_ast = parse::parse_crate_from_source_str( let crate_ast = parse::parse_crate_from_source_str(
~"<test>", @"<test>",
@src, src,
~[],sess); ~[],sess);
// should fail: // should fail:
expand_crate(sess,~[],crate_ast); expand_crate(sess,~[],crate_ast);
@ -793,12 +792,12 @@ mod test {
// make sure that macros can leave scope for modules // make sure that macros can leave scope for modules
#[should_fail] #[should_fail]
#[test] fn macros_cant_escape_mods_test () { #[test] fn macros_cant_escape_mods_test () {
let src = ~"mod foo {macro_rules! z (() => (3+4))}\ let src = @"mod foo {macro_rules! z (() => (3+4))}\
fn inty() -> int { z!() }"; fn inty() -> int { z!() }";
let sess = parse::new_parse_sess(None); let sess = parse::new_parse_sess(None);
let crate_ast = parse::parse_crate_from_source_str( let crate_ast = parse::parse_crate_from_source_str(
~"<test>", @"<test>",
@src, src,
~[],sess); ~[],sess);
// should fail: // should fail:
expand_crate(sess,~[],crate_ast); expand_crate(sess,~[],crate_ast);
@ -806,19 +805,19 @@ mod test {
// macro_escape modules shouldn't cause macros to leave scope // macro_escape modules shouldn't cause macros to leave scope
#[test] fn macros_can_escape_flattened_mods_test () { #[test] fn macros_can_escape_flattened_mods_test () {
let src = ~"#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\ let src = @"#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\
fn inty() -> int { z!() }"; fn inty() -> int { z!() }";
let sess = parse::new_parse_sess(None); let sess = parse::new_parse_sess(None);
let crate_ast = parse::parse_crate_from_source_str( let crate_ast = parse::parse_crate_from_source_str(
~"<test>", @"<test>",
@src, src,
~[], sess); ~[], sess);
// should fail: // should fail:
expand_crate(sess,~[],crate_ast); expand_crate(sess,~[],crate_ast);
} }
#[test] fn core_macros_must_parse () { #[test] fn core_macros_must_parse () {
let src = ~" let src = @"
pub mod macros { pub mod macros {
macro_rules! ignore (($($x:tt)*) => (())) macro_rules! ignore (($($x:tt)*) => (()))
@ -828,9 +827,9 @@ mod test {
let sess = parse::new_parse_sess(None); let sess = parse::new_parse_sess(None);
let cfg = ~[]; let cfg = ~[];
let item_ast = parse::parse_item_from_source_str( let item_ast = parse::parse_item_from_source_str(
~"<test>", @"<test>",
@src, src,
cfg,~[make_dummy_attr (@~"macro_escape")],sess); cfg,~[make_dummy_attr (@"macro_escape")],sess);
match item_ast { match item_ast {
Some(_) => (), // success Some(_) => (), // success
None => fail!("expected this to parse") None => fail!("expected this to parse")
@ -838,9 +837,9 @@ mod test {
} }
#[test] fn test_contains_flatten (){ #[test] fn test_contains_flatten (){
let attr1 = make_dummy_attr (@~"foo"); let attr1 = make_dummy_attr (@"foo");
let attr2 = make_dummy_attr (@~"bar"); let attr2 = make_dummy_attr (@"bar");
let escape_attr = make_dummy_attr (@~"macro_escape"); let escape_attr = make_dummy_attr (@"macro_escape");
let attrs1 = ~[attr1, escape_attr, attr2]; let attrs1 = ~[attr1, escape_attr, attr2];
assert_eq!(contains_macro_escape (attrs1),true); assert_eq!(contains_macro_escape (attrs1),true);
let attrs2 = ~[attr1,attr2]; let attrs2 = ~[attr1,attr2];
@ -848,7 +847,7 @@ mod test {
} }
// make a "meta_word" outer attribute with the given name // make a "meta_word" outer attribute with the given name
fn make_dummy_attr(s: @~str) -> ast::attribute { fn make_dummy_attr(s: @str) -> ast::attribute {
spanned { spanned {
span:codemap::dummy_sp(), span:codemap::dummy_sp(),
node: attribute_ { node: attribute_ {
@ -864,7 +863,7 @@ mod test {
#[test] #[test]
fn renaming () { fn renaming () {
let maybe_item_ast = string_to_item(@~"fn a() -> int { let b = 13; b }"); let maybe_item_ast = string_to_item(@"fn a() -> int { let b = 13; b }");
let item_ast = match maybe_item_ast { let item_ast = match maybe_item_ast {
Some(x) => x, Some(x) => x,
None => fail!("test case fail") None => fail!("test case fail")
@ -887,7 +886,7 @@ mod test {
#[test] #[test]
fn pat_idents(){ fn pat_idents(){
let pat = string_to_pat(@~"(a,Foo{x:c @ (b,9),y:Bar(4,d)})"); let pat = string_to_pat(@"(a,Foo{x:c @ (b,9),y:Bar(4,d)})");
let pat_idents = new_name_finder(); let pat_idents = new_name_finder();
let idents = @mut ~[]; let idents = @mut ~[];
((*pat_idents).visit_pat)(pat, (idents, mk_vt(pat_idents))); ((*pat_idents).visit_pat)(pat, (idents, mk_vt(pat_idents)));

View File

@ -274,12 +274,13 @@ fn pieces_to_expr(cx: @ExtCtxt, sp: span,
then there's no need for it to be mutable */ then there's no need for it to be mutable */
if i == 0 { if i == 0 {
stms.push(cx.stmt_let(fmt_sp, npieces > 1, stms.push(cx.stmt_let(fmt_sp, npieces > 1,
ident, cx.expr_str_uniq(fmt_sp, s))); ident, cx.expr_str_uniq(fmt_sp, s.to_managed())));
} else { } else {
// we call the push_str function because the // we call the push_str function because the
// bootstrap doesnt't seem to work if we call the // bootstrap doesnt't seem to work if we call the
// method. // method.
let args = ~[cx.expr_mut_addr_of(fmt_sp, buf()), cx.expr_str(fmt_sp, s)]; let args = ~[cx.expr_mut_addr_of(fmt_sp, buf()),
cx.expr_str(fmt_sp, s.to_managed())];
let call = cx.expr_call_global(fmt_sp, let call = cx.expr_call_global(fmt_sp,
~[core_ident, ~[core_ident,
str_ident, str_ident,
@ -303,7 +304,7 @@ fn pieces_to_expr(cx: @ExtCtxt, sp: span,
must be initialized as an empty string */ must be initialized as an empty string */
if i == 0 { if i == 0 {
stms.push(cx.stmt_let(fmt_sp, true, ident, stms.push(cx.stmt_let(fmt_sp, true, ident,
cx.expr_str_uniq(fmt_sp, ~""))); cx.expr_str_uniq(fmt_sp, @"")));
} }
stms.push(cx.stmt_expr(make_new_conv(cx, fmt_sp, conv, stms.push(cx.stmt_expr(make_new_conv(cx, fmt_sp, conv,
args[n], buf()))); args[n], buf())));

View File

@ -51,7 +51,7 @@ impl proto::visitor<(), (), ()> for @ExtCtxt {
} }
} }
fn visit_message(&self, name: ~str, _span: span, _tys: &[@ast::Ty], fn visit_message(&self, name: @str, _span: span, _tys: &[@ast::Ty],
this: state, next: Option<next_state>) { this: state, next: Option<next_state>) {
match next { match next {
Some(ref next_state) => { Some(ref next_state) => {

View File

@ -20,13 +20,13 @@ use parse::token;
use parse::token::{interner_get}; use parse::token::{interner_get};
pub trait proto_parser { pub trait proto_parser {
fn parse_proto(&self, id: ~str) -> protocol; fn parse_proto(&self, id: @str) -> protocol;
fn parse_state(&self, proto: protocol); fn parse_state(&self, proto: protocol);
fn parse_message(&self, state: state); fn parse_message(&self, state: state);
} }
impl proto_parser for parser::Parser { impl proto_parser for parser::Parser {
fn parse_proto(&self, id: ~str) -> protocol { fn parse_proto(&self, id: @str) -> protocol {
let proto = protocol(id, *self.span); let proto = protocol(id, *self.span);
self.parse_seq_to_before_end( self.parse_seq_to_before_end(
@ -43,7 +43,7 @@ impl proto_parser for parser::Parser {
fn parse_state(&self, proto: protocol) { fn parse_state(&self, proto: protocol) {
let id = self.parse_ident(); let id = self.parse_ident();
let name = copy *interner_get(id.name); let name = interner_get(id.name);
self.expect(&token::COLON); self.expect(&token::COLON);
let dir = match copy *self.token { let dir = match copy *self.token {
@ -51,9 +51,9 @@ impl proto_parser for parser::Parser {
_ => fail!() _ => fail!()
}; };
self.bump(); self.bump();
let dir = match dir { let dir = match dir.as_slice() {
@~"send" => send, "send" => send,
@~"recv" => recv, "recv" => recv,
_ => fail!() _ => fail!()
}; };
@ -78,7 +78,7 @@ impl proto_parser for parser::Parser {
} }
fn parse_message(&self, state: state) { fn parse_message(&self, state: state) {
let mname = copy *interner_get(self.parse_ident().name); let mname = interner_get(self.parse_ident().name);
let args = if *self.token == token::LPAREN { let args = if *self.token == token::LPAREN {
self.parse_unspanned_seq( self.parse_unspanned_seq(
@ -97,7 +97,7 @@ impl proto_parser for parser::Parser {
let next = match *self.token { let next = match *self.token {
token::IDENT(_, _) => { token::IDENT(_, _) => {
let name = copy *interner_get(self.parse_ident().name); let name = interner_get(self.parse_ident().name);
let ntys = if *self.token == token::LT { let ntys = if *self.token == token::LT {
self.parse_unspanned_seq( self.parse_unspanned_seq(
&token::LT, &token::LT,

View File

@ -101,7 +101,7 @@ impl gen_send for message {
name, name,
vec::append_one( vec::append_one(
arg_names.map(|x| cx.str_of(*x)), arg_names.map(|x| cx.str_of(*x)),
~"s").connect(", ")); @"s").connect(", "));
if !try { if !try {
body += fmt!("::std::pipes::send(pipe, message);\n"); body += fmt!("::std::pipes::send(pipe, message);\n");
@ -114,7 +114,7 @@ impl gen_send for message {
} else { ::std::pipes::rt::make_none() } }"); } else { ::std::pipes::rt::make_none() } }");
} }
let body = cx.parse_expr(body); let body = cx.parse_expr(body.to_managed());
let mut rty = cx.ty_path(path(~[next.data_name()], let mut rty = cx.ty_path(path(~[next.data_name()],
span) span)
@ -123,7 +123,7 @@ impl gen_send for message {
rty = cx.ty_option(rty); rty = cx.ty_option(rty);
} }
let name = cx.ident_of(if try { ~"try_" + name } else { name } ); let name = if try {cx.ident_of(~"try_" + name)} else {cx.ident_of(name)};
cx.item_fn_poly(dummy_sp(), cx.item_fn_poly(dummy_sp(),
name, name,
@ -173,12 +173,12 @@ impl gen_send for message {
} }"); } }");
} }
let body = cx.parse_expr(body); let body = cx.parse_expr(body.to_managed());
let name = if try { ~"try_" + name } else { name }; let name = if try {cx.ident_of(~"try_" + name)} else {cx.ident_of(name)};
cx.item_fn_poly(dummy_sp(), cx.item_fn_poly(dummy_sp(),
cx.ident_of(name), name,
args_ast, args_ast,
if try { if try {
cx.ty_option(cx.ty_nil()) cx.ty_option(cx.ty_nil())
@ -326,7 +326,7 @@ impl gen_init for protocol {
start_state.generics.to_source(), start_state.generics.to_source(),
start_state.to_ty(cx).to_source(), start_state.to_ty(cx).to_source(),
start_state.to_ty(cx).to_source(), start_state.to_ty(cx).to_source(),
body.to_source())) body.to_source()).to_managed())
} }
fn gen_buffer_init(&self, ext_cx: @ExtCtxt) -> @ast::expr { fn gen_buffer_init(&self, ext_cx: @ExtCtxt) -> @ast::expr {
@ -358,10 +358,10 @@ impl gen_init for protocol {
self.states.map_to_vec( self.states.map_to_vec(
|s| ext_cx.parse_stmt( |s| ext_cx.parse_stmt(
fmt!("data.%s.set_buffer(buffer)", fmt!("data.%s.set_buffer(buffer)",
s.name))), s.name).to_managed())),
Some(ext_cx.parse_expr(fmt!( Some(ext_cx.parse_expr(fmt!(
"::std::ptr::to_mut_unsafe_ptr(&mut (data.%s))", "::std::ptr::to_mut_unsafe_ptr(&mut (data.%s))",
self.states[0].name))))); self.states[0].name).to_managed()))));
quote_expr!({ quote_expr!({
let buffer = $buffer; let buffer = $buffer;
@ -459,9 +459,9 @@ impl gen_init for protocol {
let allows = cx.attribute( let allows = cx.attribute(
copy self.span, copy self.span,
cx.meta_list(copy self.span, cx.meta_list(copy self.span,
~"allow", @"allow",
~[cx.meta_word(copy self.span, ~"non_camel_case_types"), ~[cx.meta_word(copy self.span, @"non_camel_case_types"),
cx.meta_word(copy self.span, ~"unused_mut")])); cx.meta_word(copy self.span, @"unused_mut")]));
cx.item_mod(copy self.span, cx.ident_of(copy self.name), cx.item_mod(copy self.span, cx.ident_of(copy self.name),
~[allows], ~[], items) ~[allows], ~[], items)
} }

View File

@ -38,17 +38,17 @@ impl direction {
} }
pub struct next_state { pub struct next_state {
state: ~str, state: @str,
tys: ~[@ast::Ty], tys: ~[@ast::Ty],
} }
// name, span, data, current state, next state // name, span, data, current state, next state
pub struct message(~str, span, ~[@ast::Ty], state, Option<next_state>); pub struct message(@str, span, ~[@ast::Ty], state, Option<next_state>);
impl message { impl message {
pub fn name(&mut self) -> ~str { pub fn name(&mut self) -> @str {
match *self { match *self {
message(ref id, _, _, _, _) => copy *id message(id, _, _, _, _) => id
} }
} }
@ -70,7 +70,7 @@ pub type state = @state_;
pub struct state_ { pub struct state_ {
id: uint, id: uint,
name: ~str, name: @str,
ident: ast::ident, ident: ast::ident,
span: span, span: span,
dir: direction, dir: direction,
@ -81,7 +81,7 @@ pub struct state_ {
impl state_ { impl state_ {
pub fn add_message(@self, pub fn add_message(@self,
name: ~str, name: @str,
span: span, span: span,
data: ~[@ast::Ty], data: ~[@ast::Ty],
next: Option<next_state>) { next: Option<next_state>) {
@ -122,11 +122,11 @@ impl state_ {
pub type protocol = @mut protocol_; pub type protocol = @mut protocol_;
pub fn protocol(name: ~str, span: span) -> protocol { pub fn protocol(name: @str, span: span) -> protocol {
@mut protocol_(name, span) @mut protocol_(name, span)
} }
pub fn protocol_(name: ~str, span: span) -> protocol_ { pub fn protocol_(name: @str, span: span) -> protocol_ {
protocol_ { protocol_ {
name: name, name: name,
span: span, span: span,
@ -136,7 +136,7 @@ pub fn protocol_(name: ~str, span: span) -> protocol_ {
} }
pub struct protocol_ { pub struct protocol_ {
name: ~str, name: @str,
span: span, span: span,
states: @mut ~[state], states: @mut ~[state],
@ -181,7 +181,7 @@ impl protocol_ {
impl protocol_ { impl protocol_ {
pub fn add_state_poly(@mut self, pub fn add_state_poly(@mut self,
name: ~str, name: @str,
ident: ast::ident, ident: ast::ident,
dir: direction, dir: direction,
generics: ast::Generics) generics: ast::Generics)
@ -208,7 +208,7 @@ impl protocol_ {
pub trait visitor<Tproto, Tstate, Tmessage> { pub trait visitor<Tproto, Tstate, Tmessage> {
fn visit_proto(&self, proto: protocol, st: &[Tstate]) -> Tproto; fn visit_proto(&self, proto: protocol, st: &[Tstate]) -> Tproto;
fn visit_state(&self, state: state, m: &[Tmessage]) -> Tstate; fn visit_state(&self, state: state, m: &[Tmessage]) -> Tstate;
fn visit_message(&self, name: ~str, spane: span, tys: &[@ast::Ty], fn visit_message(&self, name: @str, spane: span, tys: &[@ast::Ty],
this: state, next: Option<next_state>) -> Tmessage; this: state, next: Option<next_state>) -> Tmessage;
} }

View File

@ -43,8 +43,6 @@ pub mod rt {
pub use parse::new_parser_from_tts; pub use parse::new_parser_from_tts;
pub use codemap::{BytePos, span, dummy_spanned}; pub use codemap::{BytePos, span, dummy_spanned};
use print::pprust::{item_to_str, ty_to_str};
pub trait ToTokens { pub trait ToTokens {
pub fn to_tokens(&self, _cx: @ExtCtxt) -> ~[token_tree]; pub fn to_tokens(&self, _cx: @ExtCtxt) -> ~[token_tree];
} }
@ -71,132 +69,132 @@ pub mod rt {
pub trait ToSource { pub trait ToSource {
// Takes a thing and generates a string containing rust code for it. // Takes a thing and generates a string containing rust code for it.
pub fn to_source(&self) -> ~str; pub fn to_source(&self) -> @str;
} }
impl ToSource for ast::ident { impl ToSource for ast::ident {
fn to_source(&self) -> ~str { fn to_source(&self) -> @str {
copy *ident_to_str(self) ident_to_str(self)
} }
} }
impl ToSource for @ast::item { impl ToSource for @ast::item {
fn to_source(&self) -> ~str { fn to_source(&self) -> @str {
item_to_str(*self, get_ident_interner()) pprust::item_to_str(*self, get_ident_interner()).to_managed()
} }
} }
impl<'self> ToSource for &'self [@ast::item] { impl<'self> ToSource for &'self [@ast::item] {
fn to_source(&self) -> ~str { fn to_source(&self) -> @str {
self.map(|i| i.to_source()).connect("\n\n") self.map(|i| i.to_source()).connect("\n\n").to_managed()
} }
} }
impl ToSource for @ast::Ty { impl ToSource for @ast::Ty {
fn to_source(&self) -> ~str { fn to_source(&self) -> @str {
ty_to_str(*self, get_ident_interner()) pprust::ty_to_str(*self, get_ident_interner()).to_managed()
} }
} }
impl<'self> ToSource for &'self [@ast::Ty] { impl<'self> ToSource for &'self [@ast::Ty] {
fn to_source(&self) -> ~str { fn to_source(&self) -> @str {
self.map(|i| i.to_source()).connect(", ") self.map(|i| i.to_source()).connect(", ").to_managed()
} }
} }
impl ToSource for Generics { impl ToSource for Generics {
fn to_source(&self) -> ~str { fn to_source(&self) -> @str {
pprust::generics_to_str(self, get_ident_interner()) pprust::generics_to_str(self, get_ident_interner()).to_managed()
} }
} }
impl ToSource for @ast::expr { impl ToSource for @ast::expr {
fn to_source(&self) -> ~str { fn to_source(&self) -> @str {
pprust::expr_to_str(*self, get_ident_interner()) pprust::expr_to_str(*self, get_ident_interner()).to_managed()
} }
} }
impl ToSource for ast::blk { impl ToSource for ast::blk {
fn to_source(&self) -> ~str { fn to_source(&self) -> @str {
pprust::block_to_str(self, get_ident_interner()) pprust::block_to_str(self, get_ident_interner()).to_managed()
} }
} }
impl<'self> ToSource for &'self str { impl<'self> ToSource for &'self str {
fn to_source(&self) -> ~str { fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_str(@self.to_owned())); let lit = dummy_spanned(ast::lit_str(self.to_managed()));
pprust::lit_to_str(@lit) pprust::lit_to_str(@lit).to_managed()
} }
} }
impl ToSource for int { impl ToSource for int {
fn to_source(&self) -> ~str { fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i)); let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i));
pprust::lit_to_str(@lit) pprust::lit_to_str(@lit).to_managed()
} }
} }
impl ToSource for i8 { impl ToSource for i8 {
fn to_source(&self) -> ~str { fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i8)); let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i8));
pprust::lit_to_str(@lit) pprust::lit_to_str(@lit).to_managed()
} }
} }
impl ToSource for i16 { impl ToSource for i16 {
fn to_source(&self) -> ~str { fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i16)); let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i16));
pprust::lit_to_str(@lit) pprust::lit_to_str(@lit).to_managed()
} }
} }
impl ToSource for i32 { impl ToSource for i32 {
fn to_source(&self) -> ~str { fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i32)); let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i32));
pprust::lit_to_str(@lit) pprust::lit_to_str(@lit).to_managed()
} }
} }
impl ToSource for i64 { impl ToSource for i64 {
fn to_source(&self) -> ~str { fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i64)); let lit = dummy_spanned(ast::lit_int(*self as i64, ast::ty_i64));
pprust::lit_to_str(@lit) pprust::lit_to_str(@lit).to_managed()
} }
} }
impl ToSource for uint { impl ToSource for uint {
fn to_source(&self) -> ~str { fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u)); let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u));
pprust::lit_to_str(@lit) pprust::lit_to_str(@lit).to_managed()
} }
} }
impl ToSource for u8 { impl ToSource for u8 {
fn to_source(&self) -> ~str { fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u8)); let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u8));
pprust::lit_to_str(@lit) pprust::lit_to_str(@lit).to_managed()
} }
} }
impl ToSource for u16 { impl ToSource for u16 {
fn to_source(&self) -> ~str { fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u16)); let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u16));
pprust::lit_to_str(@lit) pprust::lit_to_str(@lit).to_managed()
} }
} }
impl ToSource for u32 { impl ToSource for u32 {
fn to_source(&self) -> ~str { fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u32)); let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u32));
pprust::lit_to_str(@lit) pprust::lit_to_str(@lit).to_managed()
} }
} }
impl ToSource for u64 { impl ToSource for u64 {
fn to_source(&self) -> ~str { fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u64)); let lit = dummy_spanned(ast::lit_uint(*self as u64, ast::ty_u64));
pprust::lit_to_str(@lit) pprust::lit_to_str(@lit).to_managed()
} }
} }
@ -317,18 +315,18 @@ pub mod rt {
} }
pub trait ExtParseUtils { pub trait ExtParseUtils {
fn parse_item(&self, s: ~str) -> @ast::item; fn parse_item(&self, s: @str) -> @ast::item;
fn parse_expr(&self, s: ~str) -> @ast::expr; fn parse_expr(&self, s: @str) -> @ast::expr;
fn parse_stmt(&self, s: ~str) -> @ast::stmt; fn parse_stmt(&self, s: @str) -> @ast::stmt;
fn parse_tts(&self, s: ~str) -> ~[ast::token_tree]; fn parse_tts(&self, s: @str) -> ~[ast::token_tree];
} }
impl ExtParseUtils for ExtCtxt { impl ExtParseUtils for ExtCtxt {
fn parse_item(&self, s: ~str) -> @ast::item { fn parse_item(&self, s: @str) -> @ast::item {
let res = parse::parse_item_from_source_str( let res = parse::parse_item_from_source_str(
~"<quote expansion>", @"<quote expansion>",
@(copy s), s,
self.cfg(), self.cfg(),
~[], ~[],
self.parse_sess()); self.parse_sess());
@ -341,27 +339,27 @@ pub mod rt {
} }
} }
fn parse_stmt(&self, s: ~str) -> @ast::stmt { fn parse_stmt(&self, s: @str) -> @ast::stmt {
parse::parse_stmt_from_source_str( parse::parse_stmt_from_source_str(
~"<quote expansion>", @"<quote expansion>",
@(copy s), s,
self.cfg(), self.cfg(),
~[], ~[],
self.parse_sess()) self.parse_sess())
} }
fn parse_expr(&self, s: ~str) -> @ast::expr { fn parse_expr(&self, s: @str) -> @ast::expr {
parse::parse_expr_from_source_str( parse::parse_expr_from_source_str(
~"<quote expansion>", @"<quote expansion>",
@(copy s), s,
self.cfg(), self.cfg(),
self.parse_sess()) self.parse_sess())
} }
fn parse_tts(&self, s: ~str) -> ~[ast::token_tree] { fn parse_tts(&self, s: @str) -> ~[ast::token_tree] {
parse::parse_tts_from_source_str( parse::parse_tts_from_source_str(
~"<quote expansion>", @"<quote expansion>",
@(copy s), s,
self.cfg(), self.cfg(),
self.parse_sess()) self.parse_sess())
} }

View File

@ -59,21 +59,21 @@ pub fn expand_file(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
let topmost = topmost_expn_info(cx.backtrace().get()); let topmost = topmost_expn_info(cx.backtrace().get());
let loc = cx.codemap().lookup_char_pos(topmost.call_site.lo); let loc = cx.codemap().lookup_char_pos(topmost.call_site.lo);
let filename = copy loc.file.name; let filename = loc.file.name;
base::MRExpr(cx.expr_str(topmost.call_site, filename)) base::MRExpr(cx.expr_str(topmost.call_site, filename))
} }
pub fn expand_stringify(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree]) pub fn expand_stringify(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
-> base::MacResult { -> base::MacResult {
let s = pprust::tts_to_str(tts, get_ident_interner()); let s = pprust::tts_to_str(tts, get_ident_interner());
base::MRExpr(cx.expr_str(sp, s)) base::MRExpr(cx.expr_str(sp, s.to_managed()))
} }
pub fn expand_mod(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree]) pub fn expand_mod(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
-> base::MacResult { -> base::MacResult {
base::check_zero_tts(cx, sp, tts, "module_path!"); base::check_zero_tts(cx, sp, tts, "module_path!");
base::MRExpr(cx.expr_str(sp, base::MRExpr(cx.expr_str(sp,
cx.mod_path().map(|x| cx.str_of(*x)).connect("::"))) cx.mod_path().map(|x| cx.str_of(*x)).connect("::").to_managed()))
} }
// include! : parse the given file as an expr // include! : parse the given file as an expr
@ -94,13 +94,13 @@ pub fn expand_include_str(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
let file = get_single_str_from_tts(cx, sp, tts, "include_str!"); let file = get_single_str_from_tts(cx, sp, tts, "include_str!");
let res = io::read_whole_file_str(&res_rel_file(cx, sp, &Path(file))); let res = io::read_whole_file_str(&res_rel_file(cx, sp, &Path(file)));
match res { match res {
result::Ok(_) => { /* Continue. */ } result::Ok(res) => {
result::Err(ref e) => { base::MRExpr(cx.expr_str(sp, res.to_managed()))
cx.parse_sess().span_diagnostic.handler().fatal((*e)); }
result::Err(e) => {
cx.span_fatal(sp, e);
} }
} }
base::MRExpr(cx.expr_str(sp, result::unwrap(res)))
} }
pub fn expand_include_bin(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree]) pub fn expand_include_bin(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
@ -131,7 +131,7 @@ fn topmost_expn_info(expn_info: @codemap::ExpnInfo) -> @codemap::ExpnInfo {
_ _
}) => { }) => {
// Don't recurse into file using "include!" // Don't recurse into file using "include!"
if *name == ~"include" { if "include" == *name {
expn_info expn_info
} else { } else {
topmost_expn_info(next_expn_info) topmost_expn_info(next_expn_info)

View File

@ -205,7 +205,7 @@ pub fn nameize(p_s: @mut ParseSess, ms: &[matcher], res: &[@named_match])
} => { } => {
if ret_val.contains_key(bind_name) { if ret_val.contains_key(bind_name) {
p_s.span_diagnostic.span_fatal(sp, ~"Duplicated bind name: "+ p_s.span_diagnostic.span_fatal(sp, ~"Duplicated bind name: "+
*ident_to_str(bind_name)) ident_to_str(bind_name))
} }
ret_val.insert(*bind_name, res[idx]); ret_val.insert(*bind_name, res[idx]);
} }
@ -373,8 +373,8 @@ pub fn parse(
let nts = bb_eis.map(|ei| { let nts = bb_eis.map(|ei| {
match ei.elts[ei.idx].node { match ei.elts[ei.idx].node {
match_nonterminal(ref bind,ref name,_) => { match_nonterminal(ref bind,ref name,_) => {
fmt!("%s ('%s')", *ident_to_str(name), fmt!("%s ('%s')", ident_to_str(name),
*ident_to_str(bind)) ident_to_str(bind))
} }
_ => fail!() _ => fail!()
} }).connect(" or "); } }).connect(" or ");
@ -398,7 +398,7 @@ pub fn parse(
match ei.elts[ei.idx].node { match ei.elts[ei.idx].node {
match_nonterminal(_, ref name, idx) => { match_nonterminal(_, ref name, idx) => {
ei.matches[idx].push(@matched_nonterminal( ei.matches[idx].push(@matched_nonterminal(
parse_nt(&rust_parser, *ident_to_str(name)))); parse_nt(&rust_parser, ident_to_str(name))));
ei.idx += 1u; ei.idx += 1u;
} }
_ => fail!() _ => fail!()

View File

@ -148,7 +148,7 @@ pub fn add_new_extension(cx: @ExtCtxt,
|cx, sp, arg| generic_extension(cx, sp, name, arg, *lhses, *rhses); |cx, sp, arg| generic_extension(cx, sp, name, arg, *lhses, *rhses);
return MRDef(MacroDef{ return MRDef(MacroDef{
name: copy *ident_to_str(&name), name: ident_to_str(&name),
ext: NormalTT(base::SyntaxExpanderTT{expander: exp, span: Some(sp)}) ext: NormalTT(base::SyntaxExpanderTT{expander: exp, span: Some(sp)})
}); });
} }

View File

@ -121,7 +121,7 @@ fn lookup_cur_matched(r: &mut TtReader, name: ident) -> @named_match {
Some(s) => lookup_cur_matched_by_matched(r, s), Some(s) => lookup_cur_matched_by_matched(r, s),
None => { None => {
r.sp_diag.span_fatal(r.cur_span, fmt!("unknown macro variable `%s`", r.sp_diag.span_fatal(r.cur_span, fmt!("unknown macro variable `%s`",
*ident_to_str(&name))); ident_to_str(&name)));
} }
} }
} }
@ -139,8 +139,8 @@ fn lockstep_iter_size(t: &token_tree, r: &mut TtReader) -> lis {
lis_contradiction(_) => copy rhs, lis_contradiction(_) => copy rhs,
lis_constraint(r_len, _) if l_len == r_len => copy lhs, lis_constraint(r_len, _) if l_len == r_len => copy lhs,
lis_constraint(r_len, ref r_id) => { lis_constraint(r_len, ref r_id) => {
let l_n = copy *ident_to_str(l_id); let l_n = ident_to_str(l_id);
let r_n = copy *ident_to_str(r_id); let r_n = ident_to_str(r_id);
lis_contradiction(fmt!("Inconsistent lockstep iteration: \ lis_contradiction(fmt!("Inconsistent lockstep iteration: \
'%s' has %u items, but '%s' has %u", '%s' has %u items, but '%s' has %u",
l_n, l_len, r_n, r_len)) l_n, l_len, r_n, r_len))
@ -290,7 +290,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
r.sp_diag.span_fatal( r.sp_diag.span_fatal(
copy r.cur_span, /* blame the macro writer */ copy r.cur_span, /* blame the macro writer */
fmt!("variable '%s' is still repeating at this depth", fmt!("variable '%s' is still repeating at this depth",
*ident_to_str(&ident))); ident_to_str(&ident)));
} }
} }
} }

View File

@ -990,7 +990,7 @@ mod test {
// make sure idents get transformed everywhere // make sure idents get transformed everywhere
#[test] fn ident_transformation () { #[test] fn ident_transformation () {
let zz_fold = fun_to_ident_folder(to_zz()); let zz_fold = fun_to_ident_folder(to_zz());
let ast = string_to_crate(@~"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}"); let ast = string_to_crate(@"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}");
assert_pred!(matches_codepattern, assert_pred!(matches_codepattern,
"matches_codepattern", "matches_codepattern",
pprust::to_str(zz_fold.fold_crate(ast),fake_print_crate, pprust::to_str(zz_fold.fold_crate(ast),fake_print_crate,
@ -1001,7 +1001,7 @@ mod test {
// even inside macro defs.... // even inside macro defs....
#[test] fn ident_transformation_in_defs () { #[test] fn ident_transformation_in_defs () {
let zz_fold = fun_to_ident_folder(to_zz()); let zz_fold = fun_to_ident_folder(to_zz());
let ast = string_to_crate(@~"macro_rules! a {(b $c:expr $(d $e:token)f+ let ast = string_to_crate(@"macro_rules! a {(b $c:expr $(d $e:token)f+
=> (g $(d $d $e)+))} "); => (g $(d $d $e)+))} ");
assert_pred!(matches_codepattern, assert_pred!(matches_codepattern,
"matches_codepattern", "matches_codepattern",

View File

@ -46,7 +46,7 @@ impl parser_attr for Parser {
} }
token::DOC_COMMENT(s) => { token::DOC_COMMENT(s) => {
let attr = ::attr::mk_sugared_doc_attr( let attr = ::attr::mk_sugared_doc_attr(
copy *self.id_to_str(s), self.id_to_str(s),
self.span.lo, self.span.lo,
self.span.hi self.span.hi
); );
@ -119,7 +119,7 @@ impl parser_attr for Parser {
} }
token::DOC_COMMENT(s) => { token::DOC_COMMENT(s) => {
let attr = ::attr::mk_sugared_doc_attr( let attr = ::attr::mk_sugared_doc_attr(
copy *self.id_to_str(s), self.id_to_str(s),
self.span.lo, self.span.lo,
self.span.hi self.span.hi
); );

View File

@ -320,10 +320,10 @@ pub struct lit {
// probably not a good thing. // probably not a good thing.
pub fn gather_comments_and_literals(span_diagnostic: pub fn gather_comments_and_literals(span_diagnostic:
@diagnostic::span_handler, @diagnostic::span_handler,
path: ~str, path: @str,
srdr: @io::Reader) srdr: @io::Reader)
-> (~[cmnt], ~[lit]) { -> (~[cmnt], ~[lit]) {
let src = @str::from_bytes(srdr.read_whole_stream()); let src = str::from_bytes(srdr.read_whole_stream()).to_managed();
let cm = CodeMap::new(); let cm = CodeMap::new();
let filemap = cm.new_filemap(path, src); let filemap = cm.new_filemap(path, src);
let rdr = lexer::new_low_level_string_reader(span_diagnostic, filemap); let rdr = lexer::new_low_level_string_reader(span_diagnostic, filemap);

View File

@ -158,7 +158,7 @@ impl Parser {
self.fatal( self.fatal(
fmt!( fmt!(
"expected `%s`, found `%s`", "expected `%s`, found `%s`",
*self.id_to_str(kw.to_ident()), self.id_to_str(kw.to_ident()),
self.this_token_to_str() self.this_token_to_str()
) )
); );

View File

@ -40,7 +40,7 @@ pub struct TokenAndSpan {tok: token::Token, sp: span}
pub struct StringReader { pub struct StringReader {
span_diagnostic: @span_handler, span_diagnostic: @span_handler,
src: @~str, src: @str,
// The absolute offset within the codemap of the next character to read // The absolute offset within the codemap of the next character to read
pos: BytePos, pos: BytePos,
// The absolute offset within the codemap of the last character read(curr) // The absolute offset within the codemap of the last character read(curr)
@ -176,7 +176,7 @@ pub fn with_str_from<T>(rdr: @mut StringReader, start: BytePos, f: &fn(s: &str)
pub fn bump(rdr: &mut StringReader) { pub fn bump(rdr: &mut StringReader) {
rdr.last_pos = rdr.pos; rdr.last_pos = rdr.pos;
let current_byte_offset = byte_offset(rdr, rdr.pos).to_uint(); let current_byte_offset = byte_offset(rdr, rdr.pos).to_uint();
if current_byte_offset < (*rdr.src).len() { if current_byte_offset < (rdr.src).len() {
assert!(rdr.curr != -1 as char); assert!(rdr.curr != -1 as char);
let last_char = rdr.curr; let last_char = rdr.curr;
let next = rdr.src.char_range_at(current_byte_offset); let next = rdr.src.char_range_at(current_byte_offset);
@ -202,7 +202,7 @@ pub fn is_eof(rdr: @mut StringReader) -> bool {
} }
pub fn nextch(rdr: @mut StringReader) -> char { pub fn nextch(rdr: @mut StringReader) -> char {
let offset = byte_offset(rdr, rdr.pos).to_uint(); let offset = byte_offset(rdr, rdr.pos).to_uint();
if offset < (*rdr.src).len() { if offset < (rdr.src).len() {
return rdr.src.char_at(offset); return rdr.src.char_at(offset);
} else { return -1 as char; } } else { return -1 as char; }
} }
@ -801,9 +801,9 @@ mod test {
} }
// open a string reader for the given string // open a string reader for the given string
fn setup(teststr: ~str) -> Env { fn setup(teststr: @str) -> Env {
let cm = CodeMap::new(); let cm = CodeMap::new();
let fm = cm.new_filemap(~"zebra.rs", @teststr); let fm = cm.new_filemap(@"zebra.rs", teststr);
let span_handler = let span_handler =
diagnostic::mk_span_handler(diagnostic::mk_handler(None),@cm); diagnostic::mk_span_handler(diagnostic::mk_handler(None),@cm);
Env { Env {
@ -813,7 +813,7 @@ mod test {
#[test] fn t1 () { #[test] fn t1 () {
let Env {string_reader} = let Env {string_reader} =
setup(~"/* my source file */ \ setup(@"/* my source file */ \
fn main() { io::println(~\"zebra\"); }\n"); fn main() { io::println(~\"zebra\"); }\n");
let id = str_to_ident("fn"); let id = str_to_ident("fn");
let tok1 = string_reader.next_token(); let tok1 = string_reader.next_token();
@ -849,14 +849,14 @@ mod test {
} }
#[test] fn doublecolonparsing () { #[test] fn doublecolonparsing () {
let env = setup (~"a b"); let env = setup (@"a b");
check_tokenization (env, check_tokenization (env,
~[mk_ident("a",false), ~[mk_ident("a",false),
mk_ident("b",false)]); mk_ident("b",false)]);
} }
#[test] fn dcparsing_2 () { #[test] fn dcparsing_2 () {
let env = setup (~"a::b"); let env = setup (@"a::b");
check_tokenization (env, check_tokenization (env,
~[mk_ident("a",true), ~[mk_ident("a",true),
token::MOD_SEP, token::MOD_SEP,
@ -864,7 +864,7 @@ mod test {
} }
#[test] fn dcparsing_3 () { #[test] fn dcparsing_3 () {
let env = setup (~"a ::b"); let env = setup (@"a ::b");
check_tokenization (env, check_tokenization (env,
~[mk_ident("a",false), ~[mk_ident("a",false),
token::MOD_SEP, token::MOD_SEP,
@ -872,7 +872,7 @@ mod test {
} }
#[test] fn dcparsing_4 () { #[test] fn dcparsing_4 () {
let env = setup (~"a:: b"); let env = setup (@"a:: b");
check_tokenization (env, check_tokenization (env,
~[mk_ident("a",true), ~[mk_ident("a",true),
token::MOD_SEP, token::MOD_SEP,
@ -880,28 +880,28 @@ mod test {
} }
#[test] fn character_a() { #[test] fn character_a() {
let env = setup(~"'a'"); let env = setup(@"'a'");
let TokenAndSpan {tok, sp: _} = let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token(); env.string_reader.next_token();
assert_eq!(tok,token::LIT_INT('a' as i64, ast::ty_char)); assert_eq!(tok,token::LIT_INT('a' as i64, ast::ty_char));
} }
#[test] fn character_space() { #[test] fn character_space() {
let env = setup(~"' '"); let env = setup(@"' '");
let TokenAndSpan {tok, sp: _} = let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token(); env.string_reader.next_token();
assert_eq!(tok, token::LIT_INT(' ' as i64, ast::ty_char)); assert_eq!(tok, token::LIT_INT(' ' as i64, ast::ty_char));
} }
#[test] fn character_escaped() { #[test] fn character_escaped() {
let env = setup(~"'\n'"); let env = setup(@"'\n'");
let TokenAndSpan {tok, sp: _} = let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token(); env.string_reader.next_token();
assert_eq!(tok, token::LIT_INT('\n' as i64, ast::ty_char)); assert_eq!(tok, token::LIT_INT('\n' as i64, ast::ty_char));
} }
#[test] fn lifetime_name() { #[test] fn lifetime_name() {
let env = setup(~"'abc"); let env = setup(@"'abc");
let TokenAndSpan {tok, sp: _} = let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token(); env.string_reader.next_token();
let id = token::str_to_ident("abc"); let id = token::str_to_ident("abc");

View File

@ -82,38 +82,38 @@ pub fn parse_crate_from_file(
} }
pub fn parse_crate_from_source_str( pub fn parse_crate_from_source_str(
name: ~str, name: @str,
source: @~str, source: @str,
cfg: ast::crate_cfg, cfg: ast::crate_cfg,
sess: @mut ParseSess sess: @mut ParseSess
) -> @ast::crate { ) -> @ast::crate {
let p = new_parser_from_source_str( let p = new_parser_from_source_str(
sess, sess,
/*bad*/ copy cfg, /*bad*/ copy cfg,
/*bad*/ copy name, name,
source source
); );
maybe_aborted(p.parse_crate_mod(),p) maybe_aborted(p.parse_crate_mod(),p)
} }
pub fn parse_expr_from_source_str( pub fn parse_expr_from_source_str(
name: ~str, name: @str,
source: @~str, source: @str,
cfg: ast::crate_cfg, cfg: ast::crate_cfg,
sess: @mut ParseSess sess: @mut ParseSess
) -> @ast::expr { ) -> @ast::expr {
let p = new_parser_from_source_str( let p = new_parser_from_source_str(
sess, sess,
cfg, cfg,
/*bad*/ copy name, name,
source source
); );
maybe_aborted(p.parse_expr(), p) maybe_aborted(p.parse_expr(), p)
} }
pub fn parse_item_from_source_str( pub fn parse_item_from_source_str(
name: ~str, name: @str,
source: @~str, source: @str,
cfg: ast::crate_cfg, cfg: ast::crate_cfg,
attrs: ~[ast::attribute], attrs: ~[ast::attribute],
sess: @mut ParseSess sess: @mut ParseSess
@ -121,30 +121,30 @@ pub fn parse_item_from_source_str(
let p = new_parser_from_source_str( let p = new_parser_from_source_str(
sess, sess,
cfg, cfg,
/*bad*/ copy name, name,
source source
); );
maybe_aborted(p.parse_item(attrs),p) maybe_aborted(p.parse_item(attrs),p)
} }
pub fn parse_meta_from_source_str( pub fn parse_meta_from_source_str(
name: ~str, name: @str,
source: @~str, source: @str,
cfg: ast::crate_cfg, cfg: ast::crate_cfg,
sess: @mut ParseSess sess: @mut ParseSess
) -> @ast::meta_item { ) -> @ast::meta_item {
let p = new_parser_from_source_str( let p = new_parser_from_source_str(
sess, sess,
cfg, cfg,
/*bad*/ copy name, name,
source source
); );
maybe_aborted(p.parse_meta_item(),p) maybe_aborted(p.parse_meta_item(),p)
} }
pub fn parse_stmt_from_source_str( pub fn parse_stmt_from_source_str(
name: ~str, name: @str,
source: @~str, source: @str,
cfg: ast::crate_cfg, cfg: ast::crate_cfg,
attrs: ~[ast::attribute], attrs: ~[ast::attribute],
sess: @mut ParseSess sess: @mut ParseSess
@ -152,22 +152,22 @@ pub fn parse_stmt_from_source_str(
let p = new_parser_from_source_str( let p = new_parser_from_source_str(
sess, sess,
cfg, cfg,
/*bad*/ copy name, name,
source source
); );
maybe_aborted(p.parse_stmt(attrs),p) maybe_aborted(p.parse_stmt(attrs),p)
} }
pub fn parse_tts_from_source_str( pub fn parse_tts_from_source_str(
name: ~str, name: @str,
source: @~str, source: @str,
cfg: ast::crate_cfg, cfg: ast::crate_cfg,
sess: @mut ParseSess sess: @mut ParseSess
) -> ~[ast::token_tree] { ) -> ~[ast::token_tree] {
let p = new_parser_from_source_str( let p = new_parser_from_source_str(
sess, sess,
cfg, cfg,
/*bad*/ copy name, name,
source source
); );
*p.quote_depth += 1u; *p.quote_depth += 1u;
@ -182,8 +182,8 @@ pub fn parse_tts_from_source_str(
// result. // result.
pub fn parse_from_source_str<T>( pub fn parse_from_source_str<T>(
f: &fn(&Parser) -> T, f: &fn(&Parser) -> T,
name: ~str, ss: codemap::FileSubstr, name: @str, ss: codemap::FileSubstr,
source: @~str, source: @str,
cfg: ast::crate_cfg, cfg: ast::crate_cfg,
sess: @mut ParseSess sess: @mut ParseSess
) -> T { ) -> T {
@ -213,8 +213,8 @@ pub fn next_node_id(sess: @mut ParseSess) -> node_id {
// Create a new parser from a source string // Create a new parser from a source string
pub fn new_parser_from_source_str(sess: @mut ParseSess, pub fn new_parser_from_source_str(sess: @mut ParseSess,
cfg: ast::crate_cfg, cfg: ast::crate_cfg,
name: ~str, name: @str,
source: @~str) source: @str)
-> Parser { -> Parser {
filemap_to_parser(sess,string_to_filemap(sess,source,name),cfg) filemap_to_parser(sess,string_to_filemap(sess,source,name),cfg)
} }
@ -223,9 +223,9 @@ pub fn new_parser_from_source_str(sess: @mut ParseSess,
// is specified as a substring of another file. // is specified as a substring of another file.
pub fn new_parser_from_source_substr(sess: @mut ParseSess, pub fn new_parser_from_source_substr(sess: @mut ParseSess,
cfg: ast::crate_cfg, cfg: ast::crate_cfg,
name: ~str, name: @str,
ss: codemap::FileSubstr, ss: codemap::FileSubstr,
source: @~str) source: @str)
-> Parser { -> Parser {
filemap_to_parser(sess,substring_to_filemap(sess,source,name,ss),cfg) filemap_to_parser(sess,substring_to_filemap(sess,source,name,ss),cfg)
} }
@ -275,7 +275,7 @@ pub fn new_parser_from_tts(sess: @mut ParseSess,
pub fn file_to_filemap(sess: @mut ParseSess, path: &Path, spanopt: Option<span>) pub fn file_to_filemap(sess: @mut ParseSess, path: &Path, spanopt: Option<span>)
-> @FileMap { -> @FileMap {
match io::read_whole_file_str(path) { match io::read_whole_file_str(path) {
Ok(src) => string_to_filemap(sess, @src, path.to_str()), Ok(src) => string_to_filemap(sess, src.to_managed(), path.to_str().to_managed()),
Err(e) => { Err(e) => {
match spanopt { match spanopt {
Some(span) => sess.span_diagnostic.span_fatal(span, e), Some(span) => sess.span_diagnostic.span_fatal(span, e),
@ -287,14 +287,14 @@ pub fn file_to_filemap(sess: @mut ParseSess, path: &Path, spanopt: Option<span>)
// given a session and a string, add the string to // given a session and a string, add the string to
// the session's codemap and return the new filemap // the session's codemap and return the new filemap
pub fn string_to_filemap(sess: @mut ParseSess, source: @~str, path: ~str) pub fn string_to_filemap(sess: @mut ParseSess, source: @str, path: @str)
-> @FileMap { -> @FileMap {
sess.cm.new_filemap(path, source) sess.cm.new_filemap(path, source)
} }
// given a session and a string and a path and a FileSubStr, add // given a session and a string and a path and a FileSubStr, add
// the string to the CodeMap and return the new FileMap // the string to the CodeMap and return the new FileMap
pub fn substring_to_filemap(sess: @mut ParseSess, source: @~str, path: ~str, pub fn substring_to_filemap(sess: @mut ParseSess, source: @str, path: @str,
filesubstr: FileSubstr) -> @FileMap { filesubstr: FileSubstr) -> @FileMap {
sess.cm.new_filemap_w_substr(path,filesubstr,source) sess.cm.new_filemap_w_substr(path,filesubstr,source)
} }
@ -349,7 +349,7 @@ mod test {
use util::parser_testing::{string_to_stmt, strs_to_idents}; use util::parser_testing::{string_to_stmt, strs_to_idents};
// map a string to tts, return the tt without its parsesess // map a string to tts, return the tt without its parsesess
fn string_to_tts_only(source_str : @~str) -> ~[ast::token_tree] { fn string_to_tts_only(source_str : @str) -> ~[ast::token_tree] {
let (tts,_ps) = string_to_tts_and_sess(source_str); let (tts,_ps) = string_to_tts_and_sess(source_str);
tts tts
} }
@ -368,7 +368,7 @@ mod test {
} }
#[test] fn path_exprs_1 () { #[test] fn path_exprs_1 () {
assert_eq!(string_to_expr(@~"a"), assert_eq!(string_to_expr(@"a"),
@ast::expr{id:1, @ast::expr{id:1,
node:ast::expr_path(@ast::Path {span:sp(0,1), node:ast::expr_path(@ast::Path {span:sp(0,1),
global:false, global:false,
@ -379,7 +379,7 @@ mod test {
} }
#[test] fn path_exprs_2 () { #[test] fn path_exprs_2 () {
assert_eq!(string_to_expr(@~"::a::b"), assert_eq!(string_to_expr(@"::a::b"),
@ast::expr{id:1, @ast::expr{id:1,
node:ast::expr_path( node:ast::expr_path(
@ast::Path {span:sp(0,6), @ast::Path {span:sp(0,6),
@ -394,11 +394,11 @@ mod test {
// marked as `#[should_fail]`. // marked as `#[should_fail]`.
/*#[should_fail] /*#[should_fail]
#[test] fn bad_path_expr_1() { #[test] fn bad_path_expr_1() {
string_to_expr(@~"::abc::def::return"); string_to_expr(@"::abc::def::return");
}*/ }*/
#[test] fn string_to_tts_1 () { #[test] fn string_to_tts_1 () {
let (tts,_ps) = string_to_tts_and_sess(@~"fn a (b : int) { b; }"); let (tts,_ps) = string_to_tts_and_sess(@"fn a (b : int) { b; }");
assert_eq!(to_json_str(@tts), assert_eq!(to_json_str(@tts),
~"[\ ~"[\
[\"tt_tok\",null,[\"IDENT\",\"fn\",false]],\ [\"tt_tok\",null,[\"IDENT\",\"fn\",false]],\
@ -427,7 +427,7 @@ mod test {
} }
#[test] fn ret_expr() { #[test] fn ret_expr() {
assert_eq!(string_to_expr(@~"return d"), assert_eq!(string_to_expr(@"return d"),
@ast::expr{id:2, @ast::expr{id:2,
node:ast::expr_ret( node:ast::expr_ret(
Some(@ast::expr{id:1, Some(@ast::expr{id:1,
@ -443,7 +443,7 @@ mod test {
} }
#[test] fn parse_stmt_1 () { #[test] fn parse_stmt_1 () {
assert_eq!(string_to_stmt(@~"b;"), assert_eq!(string_to_stmt(@"b;"),
@spanned{ @spanned{
node: ast::stmt_expr(@ast::expr{ node: ast::stmt_expr(@ast::expr{
id: 1, id: 1,
@ -465,7 +465,7 @@ mod test {
} }
#[test] fn parse_ident_pat () { #[test] fn parse_ident_pat () {
let parser = string_to_parser(@~"b"); let parser = string_to_parser(@"b");
assert_eq!(parser.parse_pat(), assert_eq!(parser.parse_pat(),
@ast::pat{id:1, // fixme @ast::pat{id:1, // fixme
node: ast::pat_ident(ast::bind_infer, node: ast::pat_ident(ast::bind_infer,
@ -482,7 +482,7 @@ mod test {
} }
#[test] fn parse_arg () { #[test] fn parse_arg () {
let parser = string_to_parser(@~"b : int"); let parser = string_to_parser(@"b : int");
assert_eq!(parser.parse_arg_general(true), assert_eq!(parser.parse_arg_general(true),
ast::arg{ ast::arg{
is_mutbl: false, is_mutbl: false,
@ -515,7 +515,7 @@ mod test {
#[test] fn parse_fundecl () { #[test] fn parse_fundecl () {
// this test depends on the intern order of "fn" and "int", and on the // this test depends on the intern order of "fn" and "int", and on the
// assignment order of the node_ids. // assignment order of the node_ids.
assert_eq!(string_to_item(@~"fn a (b : int) { b; }"), assert_eq!(string_to_item(@"fn a (b : int) { b; }"),
Some( Some(
@ast::item{ident:str_to_ident("a"), @ast::item{ident:str_to_ident("a"),
attrs:~[], attrs:~[],
@ -585,12 +585,12 @@ mod test {
#[test] fn parse_exprs () { #[test] fn parse_exprs () {
// just make sure that they parse.... // just make sure that they parse....
string_to_expr(@~"3 + 4"); string_to_expr(@"3 + 4");
string_to_expr(@~"a::z.froob(b,@(987+3))"); string_to_expr(@"a::z.froob(b,@(987+3))");
} }
#[test] fn attrs_fix_bug () { #[test] fn attrs_fix_bug () {
string_to_item(@~"pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) string_to_item(@"pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
-> Result<@Writer, ~str> { -> Result<@Writer, ~str> {
#[cfg(windows)] #[cfg(windows)]
fn wb() -> c_int { fn wb() -> c_int {

View File

@ -259,7 +259,7 @@ impl Parser {
-> bool { -> bool {
match *token { match *token {
token::IDENT(sid, _) => { token::IDENT(sid, _) => {
str::eq_slice(*self.id_to_str(sid), ident) str::eq_slice(self.id_to_str(sid), ident)
} }
_ => false _ => false
} }

View File

@ -96,7 +96,6 @@ use core::iterator::IteratorUtil;
use core::either::Either; use core::either::Either;
use core::either; use core::either;
use core::hashmap::HashSet; use core::hashmap::HashSet;
use core::str;
use core::vec; use core::vec;
#[deriving(Eq)] #[deriving(Eq)]
@ -263,7 +262,7 @@ pub struct Parser {
/// extra detail when the same error is seen twice /// extra detail when the same error is seen twice
obsolete_set: @mut HashSet<ObsoleteSyntax>, obsolete_set: @mut HashSet<ObsoleteSyntax>,
/// Used to determine the path to externally loaded source files /// Used to determine the path to externally loaded source files
mod_path_stack: @mut ~[~str], mod_path_stack: @mut ~[@str],
} }
@ -333,7 +332,7 @@ impl Parser {
} }
pub fn get_id(&self) -> node_id { next_node_id(self.sess) } pub fn get_id(&self) -> node_id { next_node_id(self.sess) }
pub fn id_to_str(&self, id: ident) -> @~str { pub fn id_to_str(&self, id: ident) -> @str {
get_ident_interner().get(id.name) get_ident_interner().get(id.name)
} }
@ -2886,7 +2885,7 @@ impl Parser {
loop { loop {
match *self.token { match *self.token {
token::LIFETIME(lifetime) => { token::LIFETIME(lifetime) => {
if str::eq_slice(*self.id_to_str(lifetime), "static") { if "static" == self.id_to_str(lifetime) {
result.push(RegionTyParamBound); result.push(RegionTyParamBound);
} else { } else {
self.span_err(*self.span, self.span_err(*self.span,
@ -2898,11 +2897,11 @@ impl Parser {
let obsolete_bound = match *self.token { let obsolete_bound = match *self.token {
token::MOD_SEP => false, token::MOD_SEP => false,
token::IDENT(sid, _) => { token::IDENT(sid, _) => {
match *self.id_to_str(sid) { match self.id_to_str(sid).as_slice() {
~"send" | "send" |
~"copy" | "copy" |
~"const" | "const" |
~"owned" => { "owned" => {
self.obsolete( self.obsolete(
*self.span, *self.span,
ObsoleteLowerCaseKindBounds); ObsoleteLowerCaseKindBounds);
@ -3364,7 +3363,7 @@ impl Parser {
} }
if fields.len() == 0 { if fields.len() == 0 {
self.fatal(fmt!("Unit-like struct should be written as `struct %s;`", self.fatal(fmt!("Unit-like struct should be written as `struct %s;`",
*get_ident_interner().get(class_name.name))); get_ident_interner().get(class_name.name)));
} }
self.bump(); self.bump();
} else if *self.token == token::LPAREN { } else if *self.token == token::LPAREN {
@ -3580,8 +3579,8 @@ impl Parser {
let file_path = match ::attr::first_attr_value_str_by_name( let file_path = match ::attr::first_attr_value_str_by_name(
attrs, "path") { attrs, "path") {
Some(d) => copy *d, Some(d) => d,
None => copy *default_path None => default_path
}; };
self.mod_path_stack.push(file_path) self.mod_path_stack.push(file_path)
} }
@ -3599,13 +3598,13 @@ impl Parser {
let prefix = prefix.dir_path(); let prefix = prefix.dir_path();
let mod_path_stack = &*self.mod_path_stack; let mod_path_stack = &*self.mod_path_stack;
let mod_path = Path(".").push_many(*mod_path_stack); let mod_path = Path(".").push_many(*mod_path_stack);
let default_path = *token::interner_get(id.name) + ".rs"; let default_path = token::interner_get(id.name).to_owned() + ".rs";
let file_path = match ::attr::first_attr_value_str_by_name( let file_path = match ::attr::first_attr_value_str_by_name(
outer_attrs, "path") { outer_attrs, "path") {
Some(d) => { Some(d) => {
let path = Path(copy *d); let path = Path(d);
if !path.is_absolute { if !path.is_absolute {
mod_path.push(copy *d) mod_path.push(d)
} else { } else {
path path
} }
@ -3637,9 +3636,9 @@ impl Parser {
let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs); let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs);
return (ast::item_mod(m0), mod_attrs); return (ast::item_mod(m0), mod_attrs);
fn cdir_path_opt(default: ~str, attrs: ~[ast::attribute]) -> ~str { fn cdir_path_opt(default: @str, attrs: ~[ast::attribute]) -> @str {
match ::attr::first_attr_value_str_by_name(attrs, "path") { match ::attr::first_attr_value_str_by_name(attrs, "path") {
Some(d) => copy *d, Some(d) => d,
None => default None => default
} }
} }
@ -4263,7 +4262,7 @@ impl Parser {
let first_ident = self.parse_ident(); let first_ident = self.parse_ident();
let mut path = ~[first_ident]; let mut path = ~[first_ident];
debug!("parsed view_path: %s", *self.id_to_str(first_ident)); debug!("parsed view_path: %s", self.id_to_str(first_ident));
match *self.token { match *self.token {
token::EQ => { token::EQ => {
// x = foo::bar // x = foo::bar
@ -4528,7 +4527,7 @@ impl Parser {
config: copy self.cfg }) config: copy self.cfg })
} }
pub fn parse_str(&self) -> @~str { pub fn parse_str(&self) -> @str {
match *self.token { match *self.token {
token::LIT_STR(s) => { token::LIT_STR(s) => {
self.bump(); self.bump();

View File

@ -21,10 +21,8 @@ use core::cast;
use core::char; use core::char;
use core::cmp::Equiv; use core::cmp::Equiv;
use core::local_data; use core::local_data;
use core::str;
use core::rand; use core::rand;
use core::rand::RngUtil; use core::rand::RngUtil;
use core::to_bytes;
#[deriving(Encodable, Decodable, Eq)] #[deriving(Encodable, Decodable, Eq)]
pub enum binop { pub enum binop {
@ -180,28 +178,28 @@ pub fn to_str(in: @ident_interner, t: &Token) -> ~str {
} }
LIT_INT_UNSUFFIXED(i) => { i.to_str() } LIT_INT_UNSUFFIXED(i) => { i.to_str() }
LIT_FLOAT(ref s, t) => { LIT_FLOAT(ref s, t) => {
let mut body = copy *ident_to_str(s); let mut body = ident_to_str(s).to_owned();
if body.ends_with(".") { if body.ends_with(".") {
body += "0"; // `10.f` is not a float literal body += "0"; // `10.f` is not a float literal
} }
body + ast_util::float_ty_to_str(t) body + ast_util::float_ty_to_str(t)
} }
LIT_FLOAT_UNSUFFIXED(ref s) => { LIT_FLOAT_UNSUFFIXED(ref s) => {
let mut body = copy *ident_to_str(s); let mut body = ident_to_str(s).to_owned();
if body.ends_with(".") { if body.ends_with(".") {
body += "0"; // `10.f` is not a float literal body += "0"; // `10.f` is not a float literal
} }
body body
} }
LIT_STR(ref s) => { ~"\"" + ident_to_str(s).escape_default() + "\"" } LIT_STR(ref s) => { fmt!("\"%s\"", ident_to_str(s).escape_default()) }
/* Name components */ /* Name components */
IDENT(s, _) => copy *in.get(s.name), IDENT(s, _) => in.get(s.name).to_owned(),
LIFETIME(s) => fmt!("'%s", *in.get(s.name)), LIFETIME(s) => fmt!("'%s", in.get(s.name)),
UNDERSCORE => ~"_", UNDERSCORE => ~"_",
/* Other */ /* Other */
DOC_COMMENT(ref s) => copy *ident_to_str(s), DOC_COMMENT(ref s) => ident_to_str(s).to_owned(),
EOF => ~"<eof>", EOF => ~"<eof>",
INTERPOLATED(ref nt) => { INTERPOLATED(ref nt) => {
match nt { match nt {
@ -350,20 +348,6 @@ pub mod special_idents {
pub static type_self: ident = ident { name: 34, ctxt: 0}; // `Self` pub static type_self: ident = ident { name: 34, ctxt: 0}; // `Self`
} }
pub struct StringRef<'self>(&'self str);
impl<'self> Equiv<@~str> for StringRef<'self> {
#[inline(always)]
fn equiv(&self, other: &@~str) -> bool { str::eq_slice(**self, **other) }
}
impl<'self> to_bytes::IterBytes for StringRef<'self> {
#[inline(always)]
fn iter_bytes(&self, lsb0: bool, f: to_bytes::Cb) -> bool {
(**self).iter_bytes(lsb0, f)
}
}
/** /**
* Maps a token to a record specifying the corresponding binary * Maps a token to a record specifying the corresponding binary
* operator * operator
@ -403,14 +387,14 @@ impl ident_interner {
pub fn gensym(&self, val: &str) -> Name { pub fn gensym(&self, val: &str) -> Name {
self.interner.gensym(val) self.interner.gensym(val)
} }
pub fn get(&self, idx: Name) -> @~str { pub fn get(&self, idx: Name) -> @str {
self.interner.get(idx) self.interner.get(idx)
} }
// is this really something that should be exposed? // is this really something that should be exposed?
pub fn len(&self) -> uint { pub fn len(&self) -> uint {
self.interner.len() self.interner.len()
} }
pub fn find_equiv<Q:Hash + IterBytes + Equiv<@~str>>(&self, val: &Q) pub fn find_equiv<Q:Hash + IterBytes + Equiv<@str>>(&self, val: &Q)
-> Option<Name> { -> Option<Name> {
self.interner.find_equiv(val) self.interner.find_equiv(val)
} }
@ -542,12 +526,12 @@ pub fn gensym(str : &str) -> Name {
} }
// map an interned representation back to a string // map an interned representation back to a string
pub fn interner_get(name : Name) -> @~str { pub fn interner_get(name : Name) -> @str {
get_ident_interner().get(name) get_ident_interner().get(name)
} }
// maps an identifier to the string that it corresponds to // maps an identifier to the string that it corresponds to
pub fn ident_to_str(id : &ast::ident) -> @~str { pub fn ident_to_str(id : &ast::ident) -> @str {
interner_get(id.name) interner_get(id.name)
} }
@ -715,6 +699,6 @@ mod test {
#[test] fn t1() { #[test] fn t1() {
let a = fresh_name("ghi"); let a = fresh_name("ghi");
io::println(fmt!("interned name: %u,\ntextual name: %s\n", io::println(fmt!("interned name: %u,\ntextual name: %s\n",
a,*interner_get(a))); a,interner_get(a)));
} }
} }

View File

@ -80,7 +80,7 @@ pub struct begin_t {
} }
pub enum token { pub enum token {
STRING(@~str, int), STRING(@str, int),
BREAK(break_t), BREAK(break_t),
BEGIN(begin_t), BEGIN(begin_t),
END, END,
@ -107,7 +107,7 @@ impl token {
pub fn tok_str(t: token) -> ~str { pub fn tok_str(t: token) -> ~str {
match t { match t {
STRING(s, len) => return fmt!("STR(%s,%d)", *s, len), STRING(s, len) => return fmt!("STR(%s,%d)", s, len),
BREAK(_) => return ~"BREAK", BREAK(_) => return ~"BREAK",
BEGIN(_) => return ~"BEGIN", BEGIN(_) => return ~"BEGIN",
END => return ~"END", END => return ~"END",
@ -335,11 +335,11 @@ impl Printer {
STRING(s, len) => { STRING(s, len) => {
if self.scan_stack_empty { if self.scan_stack_empty {
debug!("pp STRING('%s')/print ~[%u,%u]", debug!("pp STRING('%s')/print ~[%u,%u]",
*s, self.left, self.right); s, self.left, self.right);
self.print(t, len); self.print(t, len);
} else { } else {
debug!("pp STRING('%s')/buffer ~[%u,%u]", debug!("pp STRING('%s')/buffer ~[%u,%u]",
*s, self.left, self.right); s, self.left, self.right);
self.advance_right(); self.advance_right();
self.token[self.right] = t; self.token[self.right] = t;
self.size[self.right] = len; self.size[self.right] = len;
@ -534,11 +534,11 @@ impl Printer {
} }
} }
STRING(s, len) => { STRING(s, len) => {
debug!("print STRING(%s)", *s); debug!("print STRING(%s)", s);
assert_eq!(L, len); assert_eq!(L, len);
// assert!(L <= space); // assert!(L <= space);
self.space -= len; self.space -= len;
self.print_str(*s); self.print_str(s);
} }
EOF => { EOF => {
// EOF should never get here. // EOF should never get here.
@ -572,15 +572,15 @@ pub fn end(p: @mut Printer) { p.pretty_print(END); }
pub fn eof(p: @mut Printer) { p.pretty_print(EOF); } pub fn eof(p: @mut Printer) { p.pretty_print(EOF); }
pub fn word(p: @mut Printer, wrd: &str) { pub fn word(p: @mut Printer, wrd: &str) {
p.pretty_print(STRING(@/*bad*/ wrd.to_owned(), wrd.len() as int)); p.pretty_print(STRING(/* bad */ wrd.to_managed(), wrd.len() as int));
} }
pub fn huge_word(p: @mut Printer, wrd: &str) { pub fn huge_word(p: @mut Printer, wrd: &str) {
p.pretty_print(STRING(@/*bad*/ wrd.to_owned(), size_infinity)); p.pretty_print(STRING(/* bad */ wrd.to_managed(), size_infinity));
} }
pub fn zero_word(p: @mut Printer, wrd: &str) { pub fn zero_word(p: @mut Printer, wrd: &str) {
p.pretty_print(STRING(@/*bad*/ wrd.to_owned(), 0)); p.pretty_print(STRING(/* bad */ wrd.to_managed(), 0));
} }
pub fn spaces(p: @mut Printer, n: uint) { break_offset(p, n, 0); } pub fn spaces(p: @mut Printer, n: uint) { break_offset(p, n, 0); }

View File

@ -111,14 +111,14 @@ pub fn print_crate(cm: @CodeMap,
intr: @ident_interner, intr: @ident_interner,
span_diagnostic: @diagnostic::span_handler, span_diagnostic: @diagnostic::span_handler,
crate: @ast::crate, crate: @ast::crate,
filename: ~str, filename: @str,
in: @io::Reader, in: @io::Reader,
out: @io::Writer, out: @io::Writer,
ann: pp_ann, ann: pp_ann,
is_expanded: bool) { is_expanded: bool) {
let (cmnts, lits) = comments::gather_comments_and_literals( let (cmnts, lits) = comments::gather_comments_and_literals(
span_diagnostic, span_diagnostic,
copy filename, filename,
in in
); );
let s = @ps { let s = @ps {
@ -860,7 +860,7 @@ pub fn print_attribute(s: @ps, attr: ast::attribute) {
if attr.node.is_sugared_doc { if attr.node.is_sugared_doc {
let meta = attr::attr_meta(attr); let meta = attr::attr_meta(attr);
let comment = attr::get_meta_item_value_str(meta).get(); let comment = attr::get_meta_item_value_str(meta).get();
word(s.s, *comment); word(s.s, comment);
} else { } else {
word(s.s, "#["); word(s.s, "#[");
print_meta_item(s, attr.node.value); print_meta_item(s, attr.node.value);
@ -1400,10 +1400,10 @@ pub fn print_expr(s: @ps, expr: @ast::expr) {
word(s.s, "asm!"); word(s.s, "asm!");
} }
popen(s); popen(s);
print_string(s, *a.asm); print_string(s, a.asm);
word_space(s, ":"); word_space(s, ":");
for a.outputs.each |&(co, o)| { for a.outputs.each |&(co, o)| {
print_string(s, *co); print_string(s, co);
popen(s); popen(s);
print_expr(s, o); print_expr(s, o);
pclose(s); pclose(s);
@ -1411,14 +1411,14 @@ pub fn print_expr(s: @ps, expr: @ast::expr) {
} }
word_space(s, ":"); word_space(s, ":");
for a.inputs.each |&(co, o)| { for a.inputs.each |&(co, o)| {
print_string(s, *co); print_string(s, co);
popen(s); popen(s);
print_expr(s, o); print_expr(s, o);
pclose(s); pclose(s);
word_space(s, ","); word_space(s, ",");
} }
word_space(s, ":"); word_space(s, ":");
print_string(s, *a.clobbers); print_string(s, a.clobbers);
pclose(s); pclose(s);
} }
ast::expr_mac(ref m) => print_mac(s, m), ast::expr_mac(ref m) => print_mac(s, m),
@ -1474,7 +1474,7 @@ pub fn print_decl(s: @ps, decl: @ast::decl) {
} }
pub fn print_ident(s: @ps, ident: ast::ident) { pub fn print_ident(s: @ps, ident: ast::ident) {
word(s.s, *ident_to_str(&ident)); word(s.s, ident_to_str(&ident));
} }
pub fn print_for_decl(s: @ps, loc: @ast::local, coll: @ast::expr) { pub fn print_for_decl(s: @ps, loc: @ast::local, coll: @ast::expr) {
@ -1776,14 +1776,14 @@ pub fn print_generics(s: @ps, generics: &ast::Generics) {
pub fn print_meta_item(s: @ps, item: @ast::meta_item) { pub fn print_meta_item(s: @ps, item: @ast::meta_item) {
ibox(s, indent_unit); ibox(s, indent_unit);
match item.node { match item.node {
ast::meta_word(name) => word(s.s, *name), ast::meta_word(name) => word(s.s, name),
ast::meta_name_value(name, value) => { ast::meta_name_value(name, value) => {
word_space(s, *name); word_space(s, name);
word_space(s, "="); word_space(s, "=");
print_literal(s, @value); print_literal(s, @value);
} }
ast::meta_list(name, ref items) => { ast::meta_list(name, ref items) => {
word(s.s, *name); word(s.s, name);
popen(s); popen(s);
commasep( commasep(
s, s,
@ -1995,7 +1995,7 @@ pub fn print_literal(s: @ps, lit: @ast::lit) {
_ => () _ => ()
} }
match lit.node { match lit.node {
ast::lit_str(st) => print_string(s, *st), ast::lit_str(st) => print_string(s, st),
ast::lit_int(ch, ast::ty_char) => { ast::lit_int(ch, ast::ty_char) => {
word(s.s, ~"'" + char::escape_default(ch as char) + "'"); word(s.s, ~"'" + char::escape_default(ch as char) + "'");
} }
@ -2023,9 +2023,9 @@ pub fn print_literal(s: @ps, lit: @ast::lit) {
} }
} }
ast::lit_float(f, t) => { ast::lit_float(f, t) => {
word(s.s, *f + ast_util::float_ty_to_str(t)); word(s.s, f.to_owned() + ast_util::float_ty_to_str(t));
} }
ast::lit_float_unsuffixed(f) => word(s.s, *f), ast::lit_float_unsuffixed(f) => word(s.s, f),
ast::lit_nil => word(s.s, "()"), ast::lit_nil => word(s.s, "()"),
ast::lit_bool(val) => { ast::lit_bool(val) => {
if val { word(s.s, "true"); } else { word(s.s, "false"); } if val { word(s.s, "true"); } else { word(s.s, "false"); }
@ -2101,7 +2101,7 @@ pub fn print_comment(s: @ps, cmnt: &comments::cmnt) {
// We need to do at least one, possibly two hardbreaks. // We need to do at least one, possibly two hardbreaks.
let is_semi = let is_semi =
match s.s.last_token() { match s.s.last_token() {
pp::STRING(s, _) => *s == ~";", pp::STRING(s, _) => ";" == s,
_ => false _ => false
}; };
if is_semi || is_begin(s) || is_end(s) { hardbreak(s.s); } if is_semi || is_begin(s) || is_end(s) { hardbreak(s.s); }

View File

@ -19,7 +19,6 @@ use core::prelude::*;
use core::cmp::Equiv; use core::cmp::Equiv;
use core::hashmap::HashMap; use core::hashmap::HashMap;
use syntax::parse::token::StringRef;
pub struct Interner<T> { pub struct Interner<T> {
priv map: @mut HashMap<T, uint>, priv map: @mut HashMap<T, uint>,
@ -80,8 +79,8 @@ impl<T:Eq + IterBytes + Hash + Const + Copy> Interner<T> {
// A StrInterner differs from Interner<String> in that it accepts // A StrInterner differs from Interner<String> in that it accepts
// borrowed pointers rather than @ ones, resulting in less allocation. // borrowed pointers rather than @ ones, resulting in less allocation.
pub struct StrInterner { pub struct StrInterner {
priv map: @mut HashMap<@~str, uint>, priv map: @mut HashMap<@str, uint>,
priv vect: @mut ~[@~str], priv vect: @mut ~[@str],
} }
// when traits can extend traits, we should extend index<uint,T> to get [] // when traits can extend traits, we should extend index<uint,T> to get []
@ -95,37 +94,38 @@ impl StrInterner {
pub fn prefill(init: &[&str]) -> StrInterner { pub fn prefill(init: &[&str]) -> StrInterner {
let rv = StrInterner::new(); let rv = StrInterner::new();
for init.each() |v| { rv.intern(*v); } for init.each |&v| { rv.intern(v); }
rv rv
} }
pub fn intern(&self, val: &str) -> uint { pub fn intern(&self, val: &str) -> uint {
match self.map.find_equiv(&StringRef(val)) { match self.map.find_equiv(&val) {
Some(&idx) => return idx, Some(&idx) => return idx,
None => (), None => (),
} }
let new_idx = self.len(); let new_idx = self.len();
self.map.insert(@val.to_owned(), new_idx); let val = val.to_managed();
self.vect.push(@val.to_owned()); self.map.insert(val, new_idx);
self.vect.push(val);
new_idx new_idx
} }
pub fn gensym(&self, val: &str) -> uint { pub fn gensym(&self, val: &str) -> uint {
let new_idx = self.len(); let new_idx = self.len();
// leave out of .map to avoid colliding // leave out of .map to avoid colliding
self.vect.push(@val.to_owned()); self.vect.push(val.to_managed());
new_idx new_idx
} }
// this isn't "pure" in the traditional sense, because it can go from // this isn't "pure" in the traditional sense, because it can go from
// failing to returning a value as items are interned. But for typestate, // failing to returning a value as items are interned. But for typestate,
// where we first check a pred and then rely on it, ceasing to fail is ok. // where we first check a pred and then rely on it, ceasing to fail is ok.
pub fn get(&self, idx: uint) -> @~str { self.vect[idx] } pub fn get(&self, idx: uint) -> @str { self.vect[idx] }
pub fn len(&self) -> uint { let vect = &*self.vect; vect.len() } pub fn len(&self) -> uint { let vect = &*self.vect; vect.len() }
pub fn find_equiv<Q:Hash + IterBytes + Equiv<@~str>>(&self, val: &Q) pub fn find_equiv<Q:Hash + IterBytes + Equiv<@str>>(&self, val: &Q)
-> Option<uint> { -> Option<uint> {
match self.map.find_equiv(val) { match self.map.find_equiv(val) {
Some(v) => Some(*v), Some(v) => Some(*v),
@ -140,41 +140,41 @@ mod tests {
#[test] #[test]
#[should_fail] #[should_fail]
fn i1 () { fn i1 () {
let i : Interner<@~str> = Interner::new(); let i : Interner<@str> = Interner::new();
i.get(13); i.get(13);
} }
#[test] #[test]
fn i2 () { fn i2 () {
let i : Interner<@~str> = Interner::new(); let i : Interner<@str> = Interner::new();
// first one is zero: // first one is zero:
assert_eq!(i.intern (@~"dog"), 0); assert_eq!(i.intern (@"dog"), 0);
// re-use gets the same entry: // re-use gets the same entry:
assert_eq!(i.intern (@~"dog"), 0); assert_eq!(i.intern (@"dog"), 0);
// different string gets a different #: // different string gets a different #:
assert_eq!(i.intern (@~"cat"), 1); assert_eq!(i.intern (@"cat"), 1);
assert_eq!(i.intern (@~"cat"), 1); assert_eq!(i.intern (@"cat"), 1);
// dog is still at zero // dog is still at zero
assert_eq!(i.intern (@~"dog"), 0); assert_eq!(i.intern (@"dog"), 0);
// gensym gets 3 // gensym gets 3
assert_eq!(i.gensym (@~"zebra" ), 2); assert_eq!(i.gensym (@"zebra" ), 2);
// gensym of same string gets new number : // gensym of same string gets new number :
assert_eq!(i.gensym (@~"zebra" ), 3); assert_eq!(i.gensym (@"zebra" ), 3);
// gensym of *existing* string gets new number: // gensym of *existing* string gets new number:
assert_eq!(i.gensym (@~"dog"), 4); assert_eq!(i.gensym (@"dog"), 4);
assert_eq!(i.get(0), @~"dog"); assert_eq!(i.get(0), @"dog");
assert_eq!(i.get(1), @~"cat"); assert_eq!(i.get(1), @"cat");
assert_eq!(i.get(2), @~"zebra"); assert_eq!(i.get(2), @"zebra");
assert_eq!(i.get(3), @~"zebra"); assert_eq!(i.get(3), @"zebra");
assert_eq!(i.get(4), @~"dog"); assert_eq!(i.get(4), @"dog");
} }
#[test] #[test]
fn i3 () { fn i3 () {
let i : Interner<@~str> = Interner::prefill([@~"Alan",@~"Bob",@~"Carol"]); let i : Interner<@str> = Interner::prefill([@"Alan",@"Bob",@"Carol"]);
assert_eq!(i.get(0), @~"Alan"); assert_eq!(i.get(0), @"Alan");
assert_eq!(i.get(1), @~"Bob"); assert_eq!(i.get(1), @"Bob");
assert_eq!(i.get(2), @~"Carol"); assert_eq!(i.get(2), @"Carol");
assert_eq!(i.intern(@~"Bob"), 1); assert_eq!(i.intern(@"Bob"), 1);
} }
} }

View File

@ -18,50 +18,50 @@ use parse::token;
// map a string to tts, using a made-up filename: return both the token_trees // map a string to tts, using a made-up filename: return both the token_trees
// and the ParseSess // and the ParseSess
pub fn string_to_tts_and_sess (source_str : @~str) -> (~[ast::token_tree],@mut ParseSess) { pub fn string_to_tts_and_sess (source_str : @str) -> (~[ast::token_tree],@mut ParseSess) {
let ps = new_parse_sess(None); let ps = new_parse_sess(None);
(filemap_to_tts(ps,string_to_filemap(ps,source_str,~"bogofile")),ps) (filemap_to_tts(ps,string_to_filemap(ps,source_str,@"bogofile")),ps)
} }
pub fn string_to_parser_and_sess(source_str: @~str) -> (Parser,@mut ParseSess) { pub fn string_to_parser_and_sess(source_str: @str) -> (Parser,@mut ParseSess) {
let ps = new_parse_sess(None); let ps = new_parse_sess(None);
(new_parser_from_source_str(ps,~[],~"bogofile",source_str),ps) (new_parser_from_source_str(ps,~[],@"bogofile",source_str),ps)
} }
// map string to parser (via tts) // map string to parser (via tts)
pub fn string_to_parser(source_str: @~str) -> Parser { pub fn string_to_parser(source_str: @str) -> Parser {
let (p,_) = string_to_parser_and_sess(source_str); let (p,_) = string_to_parser_and_sess(source_str);
p p
} }
pub fn string_to_crate (source_str : @~str) -> @ast::crate { pub fn string_to_crate (source_str : @str) -> @ast::crate {
string_to_parser(source_str).parse_crate_mod() string_to_parser(source_str).parse_crate_mod()
} }
// parse a string, return an expr // parse a string, return an expr
pub fn string_to_expr (source_str : @~str) -> @ast::expr { pub fn string_to_expr (source_str : @str) -> @ast::expr {
string_to_parser(source_str).parse_expr() string_to_parser(source_str).parse_expr()
} }
// parse a string, return an item // parse a string, return an item
pub fn string_to_item (source_str : @~str) -> Option<@ast::item> { pub fn string_to_item (source_str : @str) -> Option<@ast::item> {
string_to_parser(source_str).parse_item(~[]) string_to_parser(source_str).parse_item(~[])
} }
// parse a string, return an item and the ParseSess // parse a string, return an item and the ParseSess
pub fn string_to_item_and_sess (source_str : @~str) -> (Option<@ast::item>,@mut ParseSess) { pub fn string_to_item_and_sess (source_str : @str) -> (Option<@ast::item>,@mut ParseSess) {
let (p,ps) = string_to_parser_and_sess(source_str); let (p,ps) = string_to_parser_and_sess(source_str);
(p.parse_item(~[]),ps) (p.parse_item(~[]),ps)
} }
// parse a string, return a stmt // parse a string, return a stmt
pub fn string_to_stmt(source_str : @~str) -> @ast::stmt { pub fn string_to_stmt(source_str : @str) -> @ast::stmt {
string_to_parser(source_str).parse_stmt(~[]) string_to_parser(source_str).parse_stmt(~[])
} }
// parse a string, return a pat. Uses "irrefutable"... which doesn't // parse a string, return a pat. Uses "irrefutable"... which doesn't
// (currently) affect parsing. // (currently) affect parsing.
pub fn string_to_pat(source_str : @~str) -> @ast::pat { pub fn string_to_pat(source_str : @str) -> @ast::pat {
string_to_parser(source_str).parse_pat() string_to_parser(source_str).parse_pat()
} }