librustc: Remove all uses of `~str` from librustc.

This commit is contained in:
Patrick Walton 2014-05-09 18:45:36 -07:00
parent e8053b9a7f
commit 6559a3675e
83 changed files with 2014 additions and 1439 deletions

View File

@ -129,12 +129,12 @@ impl<'a> Archive<'a> {
}
/// Lists all files in an archive
pub fn files(&self) -> Vec<~str> {
pub fn files(&self) -> Vec<StrBuf> {
let output = run_ar(self.sess, "t", None, [&self.dst]);
let output = str::from_utf8(output.output.as_slice()).unwrap();
// use lines_any because windows delimits output with `\r\n` instead of
// just `\n`
output.lines_any().map(|s| s.to_owned()).collect()
output.lines_any().map(|s| s.to_strbuf()).collect()
}
fn add_archive(&mut self, archive: &Path, name: &str,

View File

@ -13,56 +13,57 @@ use driver::config::cfg_os_to_meta_os;
use metadata::loader::meta_section_name;
use syntax::abi;
pub fn get_target_strs(target_triple: ~str, target_os: abi::Os) -> target_strs::t {
let cc_args = if target_triple.contains("thumb") {
vec!("-mthumb".to_owned())
pub fn get_target_strs(target_triple: StrBuf, target_os: abi::Os) -> target_strs::t {
let cc_args = if target_triple.as_slice().contains("thumb") {
vec!("-mthumb".to_strbuf())
} else {
vec!("-marm".to_owned())
vec!("-marm".to_strbuf())
};
return target_strs::t {
module_asm: "".to_owned(),
module_asm: "".to_strbuf(),
meta_sect_name: meta_section_name(cfg_os_to_meta_os(target_os)).to_owned(),
meta_sect_name:
meta_section_name(cfg_os_to_meta_os(target_os)).to_strbuf(),
data_layout: match target_os {
abi::OsMacos => {
"e-p:32:32:32".to_owned() +
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
"-f32:32:32-f64:64:64" +
"-v64:64:64-v128:64:128" +
"-a0:0:64-n32"
"e-p:32:32:32\
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
}
abi::OsWin32 => {
"e-p:32:32:32".to_owned() +
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
"-f32:32:32-f64:64:64" +
"-v64:64:64-v128:64:128" +
"-a0:0:64-n32"
"e-p:32:32:32\
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
}
abi::OsLinux => {
"e-p:32:32:32".to_owned() +
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
"-f32:32:32-f64:64:64" +
"-v64:64:64-v128:64:128" +
"-a0:0:64-n32"
"e-p:32:32:32\
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
}
abi::OsAndroid => {
"e-p:32:32:32".to_owned() +
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
"-f32:32:32-f64:64:64" +
"-v64:64:64-v128:64:128" +
"-a0:0:64-n32"
"e-p:32:32:32\
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
}
abi::OsFreebsd => {
"e-p:32:32:32".to_owned() +
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
"-f32:32:32-f64:64:64" +
"-v64:64:64-v128:64:128" +
"-a0:0:64-n32"
"e-p:32:32:32\
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
}
},

View File

@ -54,15 +54,15 @@ pub enum OutputType {
OutputTypeExe,
}
pub fn llvm_err(sess: &Session, msg: ~str) -> ! {
pub fn llvm_err(sess: &Session, msg: StrBuf) -> ! {
unsafe {
let cstr = llvm::LLVMRustGetLastError();
if cstr == ptr::null() {
sess.fatal(msg);
sess.fatal(msg.as_slice());
} else {
let err = CString::new(cstr, true);
let err = str::from_utf8_lossy(err.as_bytes());
sess.fatal(msg + ": " + err.as_slice());
sess.fatal((msg.as_slice() + ": " + err.as_slice()));
}
}
}
@ -79,7 +79,7 @@ pub fn WriteOutputFile(
let result = llvm::LLVMRustWriteOutputFile(
target, pm, m, output, file_type);
if !result {
llvm_err(sess, "could not write output".to_owned());
llvm_err(sess, "could not write output".to_strbuf());
}
})
}
@ -115,7 +115,7 @@ pub mod write {
fn target_feature<'a>(sess: &'a Session) -> &'a str {
match sess.targ_cfg.os {
abi::OsAndroid => {
if "" == sess.opts.cg.target_feature {
if "" == sess.opts.cg.target_feature.as_slice() {
"+v7"
} else {
sess.opts.cg.target_feature.as_slice()
@ -173,8 +173,12 @@ pub mod write {
}
};
let tm = sess.targ_cfg.target_strs.target_triple.with_c_str(|t| {
sess.opts.cg.target_cpu.with_c_str(|cpu| {
let tm = sess.targ_cfg
.target_strs
.target_triple
.as_slice()
.with_c_str(|t| {
sess.opts.cg.target_cpu.as_slice().with_c_str(|cpu| {
target_feature(sess).with_c_str(|features| {
llvm::LLVMRustCreateTargetMachine(
t, cpu, features,
@ -201,7 +205,7 @@ pub mod write {
// If we're verifying or linting, add them to the function pass
// manager.
let addpass = |pass: &str| {
pass.with_c_str(|s| llvm::LLVMRustAddPass(fpm, s))
pass.as_slice().with_c_str(|s| llvm::LLVMRustAddPass(fpm, s))
};
if !sess.no_verify() { assert!(addpass("verify")); }
@ -212,7 +216,7 @@ pub mod write {
}
for pass in sess.opts.cg.passes.iter() {
pass.with_c_str(|s| {
pass.as_slice().with_c_str(|s| {
if !llvm::LLVMRustAddPass(mpm, s) {
sess.warn(format!("unknown pass {}, ignoring", *pass));
}
@ -355,7 +359,7 @@ pub mod write {
assembly.as_str().unwrap().to_owned()];
debug!("{} '{}'", cc, args.connect("' '"));
match Process::output(cc, args) {
match Process::output(cc.as_slice(), args) {
Ok(prog) => {
if !prog.status.success() {
sess.err(format!("linking with `{}` failed: {}", cc, prog.status));
@ -400,7 +404,7 @@ pub mod write {
if sess.print_llvm_passes() { add("-debug-pass=Structure"); }
for arg in sess.opts.cg.llvm_args.iter() {
add(*arg);
add((*arg).as_slice());
}
}
@ -527,19 +531,20 @@ pub fn find_crate_id(attrs: &[ast::Attribute], out_filestem: &str) -> CrateId {
match attr::find_crateid(attrs) {
None => from_str(out_filestem).unwrap_or_else(|| {
let mut s = out_filestem.chars().filter(|c| c.is_XID_continue());
from_str(s.collect::<~str>()).or(from_str("rust-out")).unwrap()
from_str(s.collect::<StrBuf>()
.to_owned()).or(from_str("rust-out")).unwrap()
}),
Some(s) => s,
}
}
pub fn crate_id_hash(crate_id: &CrateId) -> ~str {
pub fn crate_id_hash(crate_id: &CrateId) -> StrBuf {
// This calculates CMH as defined above. Note that we don't use the path of
// the crate id in the hash because lookups are only done by (name/vers),
// not by path.
let mut s = Sha256::new();
s.input_str(crate_id.short_name_with_version().as_slice());
truncated_hash_result(&mut s).slice_to(8).to_owned()
truncated_hash_result(&mut s).as_slice().slice_to(8).to_strbuf()
}
pub fn build_link_meta(krate: &ast::Crate, out_filestem: &str) -> LinkMeta {
@ -551,10 +556,10 @@ pub fn build_link_meta(krate: &ast::Crate, out_filestem: &str) -> LinkMeta {
return r;
}
fn truncated_hash_result(symbol_hasher: &mut Sha256) -> ~str {
fn truncated_hash_result(symbol_hasher: &mut Sha256) -> StrBuf {
let output = symbol_hasher.result_bytes();
// 64 bits should be enough to avoid collisions.
output.slice_to(8).to_hex()
output.slice_to(8).to_hex().to_strbuf()
}
@ -563,7 +568,7 @@ fn symbol_hash(tcx: &ty::ctxt,
symbol_hasher: &mut Sha256,
t: ty::t,
link_meta: &LinkMeta)
-> ~str {
-> StrBuf {
// NB: do *not* use abbrevs here as we want the symbol names
// to be independent of one another in the crate.
@ -572,16 +577,16 @@ fn symbol_hash(tcx: &ty::ctxt,
symbol_hasher.input_str("-");
symbol_hasher.input_str(link_meta.crate_hash.as_str());
symbol_hasher.input_str("-");
symbol_hasher.input_str(encoder::encoded_ty(tcx, t));
symbol_hasher.input_str(encoder::encoded_ty(tcx, t).as_slice());
// Prefix with 'h' so that it never blends into adjacent digits
let mut hash = StrBuf::from_str("h");
hash.push_str(truncated_hash_result(symbol_hasher));
hash.into_owned()
hash.push_str(truncated_hash_result(symbol_hasher).as_slice());
hash
}
fn get_symbol_hash(ccx: &CrateContext, t: ty::t) -> ~str {
fn get_symbol_hash(ccx: &CrateContext, t: ty::t) -> StrBuf {
match ccx.type_hashcodes.borrow().find(&t) {
Some(h) => return h.to_str(),
Some(h) => return h.to_strbuf(),
None => {}
}
@ -595,7 +600,7 @@ fn get_symbol_hash(ccx: &CrateContext, t: ty::t) -> ~str {
// Name sanitation. LLVM will happily accept identifiers with weird names, but
// gas doesn't!
// gas accepts the following characters in symbols: a-z, A-Z, 0-9, ., _, $
pub fn sanitize(s: &str) -> ~str {
pub fn sanitize(s: &str) -> StrBuf {
let mut result = StrBuf::new();
for c in s.chars() {
match c {
@ -630,11 +635,10 @@ pub fn sanitize(s: &str) -> ~str {
}
// Underscore-qualify anything that didn't start as an ident.
let result = result.into_owned();
if result.len() > 0u &&
result[0] != '_' as u8 &&
! char::is_XID_start(result[0] as char) {
return "_".to_owned() + result;
result.as_slice()[0] != '_' as u8 &&
! char::is_XID_start(result.as_slice()[0] as char) {
return ("_" + result.as_slice()).to_strbuf();
}
return result;
@ -642,7 +646,7 @@ pub fn sanitize(s: &str) -> ~str {
pub fn mangle<PI: Iterator<PathElem>>(mut path: PI,
hash: Option<&str>,
vers: Option<&str>) -> ~str {
vers: Option<&str>) -> StrBuf {
// Follow C++ namespace-mangling style, see
// http://en.wikipedia.org/wiki/Name_mangling for more info.
//
@ -679,10 +683,10 @@ pub fn mangle<PI: Iterator<PathElem>>(mut path: PI,
}
n.push_char('E'); // End name-sequence.
n.into_owned()
n
}
pub fn exported_name(path: PathElems, hash: &str, vers: &str) -> ~str {
pub fn exported_name(path: PathElems, hash: &str, vers: &str) -> StrBuf {
// The version will get mangled to have a leading '_', but it makes more
// sense to lead with a 'v' b/c this is a version...
let vers = if vers.len() > 0 && !char::is_XID_start(vers.char_at(0)) {
@ -695,8 +699,8 @@ pub fn exported_name(path: PathElems, hash: &str, vers: &str) -> ~str {
}
pub fn mangle_exported_name(ccx: &CrateContext, path: PathElems,
t: ty::t, id: ast::NodeId) -> ~str {
let mut hash = StrBuf::from_owned_str(get_symbol_hash(ccx, t));
t: ty::t, id: ast::NodeId) -> StrBuf {
let mut hash = get_symbol_hash(ccx, t);
// Paths can be completely identical for different nodes,
// e.g. `fn foo() { { fn a() {} } { fn a() {} } }`, so we
@ -723,25 +727,28 @@ pub fn mangle_exported_name(ccx: &CrateContext, path: PathElems,
pub fn mangle_internal_name_by_type_and_seq(ccx: &CrateContext,
t: ty::t,
name: &str) -> ~str {
name: &str) -> StrBuf {
let s = ppaux::ty_to_str(ccx.tcx(), t);
let path = [PathName(token::intern(s)),
let path = [PathName(token::intern(s.as_slice())),
gensym_name(name)];
let hash = get_symbol_hash(ccx, t);
mangle(ast_map::Values(path.iter()), Some(hash.as_slice()), None)
}
pub fn mangle_internal_name_by_path_and_seq(path: PathElems, flav: &str) -> ~str {
pub fn mangle_internal_name_by_path_and_seq(path: PathElems, flav: &str) -> StrBuf {
mangle(path.chain(Some(gensym_name(flav)).move_iter()), None, None)
}
pub fn output_lib_filename(id: &CrateId) -> ~str {
format!("{}-{}-{}", id.name, crate_id_hash(id), id.version_or_default())
pub fn output_lib_filename(id: &CrateId) -> StrBuf {
format_strbuf!("{}-{}-{}",
id.name,
crate_id_hash(id),
id.version_or_default())
}
pub fn get_cc_prog(sess: &Session) -> ~str {
pub fn get_cc_prog(sess: &Session) -> StrBuf {
match sess.opts.cg.linker {
Some(ref linker) => return linker.to_owned(),
Some(ref linker) => return linker.to_strbuf(),
None => {}
}
@ -750,23 +757,23 @@ pub fn get_cc_prog(sess: &Session) -> ~str {
// instead of hard-coded gcc.
// For win32, there is no cc command, so we add a condition to make it use gcc.
match sess.targ_cfg.os {
abi::OsWin32 => return "gcc".to_owned(),
abi::OsWin32 => return "gcc".to_strbuf(),
_ => {},
}
get_system_tool(sess, "cc")
}
pub fn get_ar_prog(sess: &Session) -> ~str {
pub fn get_ar_prog(sess: &Session) -> StrBuf {
match sess.opts.cg.ar {
Some(ref ar) => return ar.to_owned(),
Some(ref ar) => return (*ar).clone(),
None => {}
}
get_system_tool(sess, "ar")
}
fn get_system_tool(sess: &Session, tool: &str) -> ~str {
fn get_system_tool(sess: &Session, tool: &str) -> StrBuf {
match sess.targ_cfg.os {
abi::OsAndroid => match sess.opts.cg.android_cross_path {
Some(ref path) => {
@ -774,14 +781,16 @@ fn get_system_tool(sess: &Session, tool: &str) -> ~str {
"cc" => "gcc",
_ => tool
};
format!("{}/bin/arm-linux-androideabi-{}", *path, tool_str)
format_strbuf!("{}/bin/arm-linux-androideabi-{}",
*path,
tool_str)
}
None => {
sess.fatal(format!("need Android NDK path for the '{}' tool \
(-C android-cross-path)", tool))
}
},
_ => tool.to_owned(),
_ => tool.to_strbuf(),
}
}
@ -1022,7 +1031,7 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) {
continue
}
};
a.add_rlib(&p, name, sess.lto()).unwrap();
a.add_rlib(&p, name.as_slice(), sess.lto()).unwrap();
let native_libs = csearch::get_native_libraries(&sess.cstore, cnum);
for &(kind, ref lib) in native_libs.iter() {
let name = match kind {
@ -1057,7 +1066,11 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
// Invoke the system linker
debug!("{} {}", cc_prog, cc_args.connect(" "));
let prog = time(sess.time_passes(), "running linker", (), |()|
Process::output(cc_prog, cc_args.as_slice()));
Process::output(cc_prog.as_slice(),
cc_args.iter()
.map(|x| (*x).to_owned())
.collect::<Vec<_>>()
.as_slice()));
match prog {
Ok(prog) => {
if !prog.status.success() {
@ -1096,20 +1109,20 @@ fn link_args(sess: &Session,
tmpdir: &Path,
trans: &CrateTranslation,
obj_filename: &Path,
out_filename: &Path) -> Vec<~str> {
out_filename: &Path) -> Vec<StrBuf> {
// The default library location, we need this to find the runtime.
// The location of crates will be determined as needed.
// FIXME (#9639): This needs to handle non-utf8 paths
let lib_path = sess.target_filesearch().get_lib_path();
let stage: ~str = "-L".to_owned() + lib_path.as_str().unwrap();
let stage = ("-L".to_owned() + lib_path.as_str().unwrap()).to_strbuf();
let mut args = vec!(stage);
// FIXME (#9639): This needs to handle non-utf8 paths
args.push_all([
"-o".to_owned(), out_filename.as_str().unwrap().to_owned(),
obj_filename.as_str().unwrap().to_owned()]);
"-o".to_strbuf(), out_filename.as_str().unwrap().to_strbuf(),
obj_filename.as_str().unwrap().to_strbuf()]);
// Stack growth requires statically linking a __morestack function. Note
// that this is listed *before* all other libraries, even though it may be
@ -1126,14 +1139,14 @@ fn link_args(sess: &Session,
// line, but inserting this farther to the left makes the
// "rust_stack_exhausted" symbol an outstanding undefined symbol, which
// flags libstd as a required library (or whatever provides the symbol).
args.push("-lmorestack".to_owned());
args.push("-lmorestack".to_strbuf());
// When linking a dynamic library, we put the metadata into a section of the
// executable. This metadata is in a separate object file from the main
// object file, so we link that in here.
if dylib {
let metadata = obj_filename.with_extension("metadata.o");
args.push(metadata.as_str().unwrap().to_owned());
args.push(metadata.as_str().unwrap().to_strbuf());
}
// We want to prevent the compiler from accidentally leaking in any system
@ -1144,7 +1157,7 @@ fn link_args(sess: &Session,
//
// FIXME(#11937) we should invoke the system linker directly
if sess.targ_cfg.os != abi::OsWin32 {
args.push("-nodefaultlibs".to_owned());
args.push("-nodefaultlibs".to_strbuf());
}
// If we're building a dylib, we don't use --gc-sections because LLVM has
@ -1152,20 +1165,20 @@ fn link_args(sess: &Session,
// metadata. If we're building an executable, however, --gc-sections drops
// the size of hello world from 1.8MB to 597K, a 67% reduction.
if !dylib && sess.targ_cfg.os != abi::OsMacos {
args.push("-Wl,--gc-sections".to_owned());
args.push("-Wl,--gc-sections".to_strbuf());
}
if sess.targ_cfg.os == abi::OsLinux {
// GNU-style linkers will use this to omit linking to libraries which
// don't actually fulfill any relocations, but only for libraries which
// follow this flag. Thus, use it before specifying libraries to link to.
args.push("-Wl,--as-needed".to_owned());
args.push("-Wl,--as-needed".to_strbuf());
// GNU-style linkers support optimization with -O. GNU ld doesn't need a
// numeric argument, but other linkers do.
if sess.opts.optimize == config::Default ||
sess.opts.optimize == config::Aggressive {
args.push("-Wl,-O1".to_owned());
args.push("-Wl,-O1".to_strbuf());
}
} else if sess.targ_cfg.os == abi::OsMacos {
// The dead_strip option to the linker specifies that functions and data
@ -1178,14 +1191,14 @@ fn link_args(sess: &Session,
// won't get much benefit from dylibs because LLVM will have already
// stripped away as much as it could. This has not been seen to impact
// link times negatively.
args.push("-Wl,-dead_strip".to_owned());
args.push("-Wl,-dead_strip".to_strbuf());
}
if sess.targ_cfg.os == abi::OsWin32 {
// Make sure that we link to the dynamic libgcc, otherwise cross-module
// DWARF stack unwinding will not work.
// This behavior may be overridden by --link-args "-static-libgcc"
args.push("-shared-libgcc".to_owned());
args.push("-shared-libgcc".to_strbuf());
// And here, we see obscure linker flags #45. On windows, it has been
// found to be necessary to have this flag to compile liblibc.
@ -1212,13 +1225,13 @@ fn link_args(sess: &Session,
//
// [1] - https://sourceware.org/bugzilla/show_bug.cgi?id=13130
// [2] - https://code.google.com/p/go/issues/detail?id=2139
args.push("-Wl,--enable-long-section-names".to_owned());
args.push("-Wl,--enable-long-section-names".to_strbuf());
}
if sess.targ_cfg.os == abi::OsAndroid {
// Many of the symbols defined in compiler-rt are also defined in libgcc.
// Android linker doesn't like that by default.
args.push("-Wl,--allow-multiple-definition".to_owned());
args.push("-Wl,--allow-multiple-definition".to_strbuf());
}
// Take careful note of the ordering of the arguments we pass to the linker
@ -1263,22 +1276,23 @@ fn link_args(sess: &Session,
if dylib {
// On mac we need to tell the linker to let this library be rpathed
if sess.targ_cfg.os == abi::OsMacos {
args.push("-dynamiclib".to_owned());
args.push("-Wl,-dylib".to_owned());
args.push("-dynamiclib".to_strbuf());
args.push("-Wl,-dylib".to_strbuf());
// FIXME (#9639): This needs to handle non-utf8 paths
if !sess.opts.cg.no_rpath {
args.push("-Wl,-install_name,@rpath/".to_owned() +
out_filename.filename_str().unwrap());
args.push(format_strbuf!("-Wl,-install_name,@rpath/{}",
out_filename.filename_str()
.unwrap()));
}
} else {
args.push("-shared".to_owned())
args.push("-shared".to_strbuf())
}
}
if sess.targ_cfg.os == abi::OsFreebsd {
args.push_all(["-L/usr/local/lib".to_owned(),
"-L/usr/local/lib/gcc46".to_owned(),
"-L/usr/local/lib/gcc44".to_owned()]);
args.push_all(["-L/usr/local/lib".to_strbuf(),
"-L/usr/local/lib/gcc46".to_strbuf(),
"-L/usr/local/lib/gcc44".to_strbuf()]);
}
// FIXME (#2397): At some point we want to rpath our guesses as to
@ -1295,7 +1309,7 @@ fn link_args(sess: &Session,
//
// This is the end of the command line, so this library is used to resolve
// *all* undefined symbols in all other libraries, and this is intentional.
args.push("-lcompiler-rt".to_owned());
args.push("-lcompiler-rt".to_strbuf());
// Finally add all the linker arguments provided on the command line along
// with any #[link_args] attributes found inside the crate
@ -1317,16 +1331,16 @@ fn link_args(sess: &Session,
// Also note that the native libraries linked here are only the ones located
// in the current crate. Upstream crates with native library dependencies
// may have their native library pulled in above.
fn add_local_native_libraries(args: &mut Vec<~str>, sess: &Session) {
fn add_local_native_libraries(args: &mut Vec<StrBuf>, sess: &Session) {
for path in sess.opts.addl_lib_search_paths.borrow().iter() {
// FIXME (#9639): This needs to handle non-utf8 paths
args.push("-L" + path.as_str().unwrap().to_owned());
args.push(("-L" + path.as_str().unwrap().to_owned()).to_strbuf());
}
let rustpath = filesearch::rust_path();
for path in rustpath.iter() {
// FIXME (#9639): This needs to handle non-utf8 paths
args.push("-L" + path.as_str().unwrap().to_owned());
args.push(("-L" + path.as_str().unwrap().to_owned()).to_strbuf());
}
// Some platforms take hints about whether a library is static or dynamic.
@ -1340,21 +1354,21 @@ fn add_local_native_libraries(args: &mut Vec<~str>, sess: &Session) {
cstore::NativeUnknown | cstore::NativeStatic => {
if takes_hints {
if kind == cstore::NativeStatic {
args.push("-Wl,-Bstatic".to_owned());
args.push("-Wl,-Bstatic".to_strbuf());
} else {
args.push("-Wl,-Bdynamic".to_owned());
args.push("-Wl,-Bdynamic".to_strbuf());
}
}
args.push("-l" + *l);
args.push(format_strbuf!("-l{}", *l));
}
cstore::NativeFramework => {
args.push("-framework".to_owned());
args.push(l.to_owned());
args.push("-framework".to_strbuf());
args.push(l.to_strbuf());
}
}
}
if takes_hints {
args.push("-Wl,-Bdynamic".to_owned());
args.push("-Wl,-Bdynamic".to_strbuf());
}
}
@ -1363,7 +1377,7 @@ fn add_local_native_libraries(args: &mut Vec<~str>, sess: &Session) {
// Rust crates are not considered at all when creating an rlib output. All
// dependencies will be linked when producing the final output (instead of
// the intermediate rlib version)
fn add_upstream_rust_crates(args: &mut Vec<~str>, sess: &Session,
fn add_upstream_rust_crates(args: &mut Vec<StrBuf>, sess: &Session,
dylib: bool, tmpdir: &Path,
trans: &CrateTranslation) {
// All of the heavy lifting has previously been accomplished by the
@ -1405,16 +1419,16 @@ fn add_upstream_rust_crates(args: &mut Vec<~str>, sess: &Session,
}
// Converts a library file-stem into a cc -l argument
fn unlib(config: &config::Config, stem: &str) -> ~str {
fn unlib(config: &config::Config, stem: &str) -> StrBuf {
if stem.starts_with("lib") && config.os != abi::OsWin32 {
stem.slice(3, stem.len()).to_owned()
stem.slice(3, stem.len()).to_strbuf()
} else {
stem.to_owned()
stem.to_strbuf()
}
}
// Adds the static "rlib" versions of all crates to the command line.
fn add_static_crate(args: &mut Vec<~str>, sess: &Session, tmpdir: &Path,
fn add_static_crate(args: &mut Vec<StrBuf>, sess: &Session, tmpdir: &Path,
cnum: ast::CrateNum, cratepath: Path) {
// When performing LTO on an executable output, all of the
// bytecode from the upstream libraries has already been
@ -1445,21 +1459,21 @@ fn add_upstream_rust_crates(args: &mut Vec<~str>, sess: &Session,
sess.abort_if_errors();
}
}
let dst_str = dst.as_str().unwrap().to_owned();
let dst_str = dst.as_str().unwrap().to_strbuf();
let mut archive = Archive::open(sess, dst);
archive.remove_file(format!("{}.o", name));
let files = archive.files();
if files.iter().any(|s| s.ends_with(".o")) {
if files.iter().any(|s| s.as_slice().ends_with(".o")) {
args.push(dst_str);
}
});
} else {
args.push(cratepath.as_str().unwrap().to_owned());
args.push(cratepath.as_str().unwrap().to_strbuf());
}
}
// Same thing as above, but for dynamic crates instead of static crates.
fn add_dynamic_crate(args: &mut Vec<~str>, sess: &Session,
fn add_dynamic_crate(args: &mut Vec<StrBuf>, sess: &Session,
cratepath: Path) {
// If we're performing LTO, then it should have been previously required
// that all upstream rust dependencies were available in an rlib format.
@ -1468,9 +1482,11 @@ fn add_upstream_rust_crates(args: &mut Vec<~str>, sess: &Session,
// Just need to tell the linker about where the library lives and
// what its name is
let dir = cratepath.dirname_str().unwrap();
if !dir.is_empty() { args.push("-L" + dir); }
if !dir.is_empty() {
args.push(format_strbuf!("-L{}", dir));
}
let libarg = unlib(&sess.targ_cfg, cratepath.filestem_str().unwrap());
args.push("-l" + libarg);
args.push(format_strbuf!("-l{}", libarg));
}
}
@ -1492,7 +1508,7 @@ fn add_upstream_rust_crates(args: &mut Vec<~str>, sess: &Session,
// generic function calls a native function, then the generic function must
// be instantiated in the target crate, meaning that the native symbol must
// also be resolved in the target crate.
fn add_upstream_native_libraries(args: &mut Vec<~str>, sess: &Session) {
fn add_upstream_native_libraries(args: &mut Vec<StrBuf>, sess: &Session) {
// Be sure to use a topological sorting of crates because there may be
// interdependencies between native libraries. When passing -nodefaultlibs,
// for example, almost all native libraries depend on libc, so we have to
@ -1507,10 +1523,12 @@ fn add_upstream_native_libraries(args: &mut Vec<~str>, sess: &Session) {
let libs = csearch::get_native_libraries(&sess.cstore, cnum);
for &(kind, ref lib) in libs.iter() {
match kind {
cstore::NativeUnknown => args.push("-l" + *lib),
cstore::NativeUnknown => {
args.push(format_strbuf!("-l{}", *lib))
}
cstore::NativeFramework => {
args.push("-framework".to_owned());
args.push(lib.to_owned());
args.push("-framework".to_strbuf());
args.push(lib.to_strbuf());
}
cstore::NativeStatic => {
sess.bug("statics shouldn't be propagated");

View File

@ -20,7 +20,7 @@ use libc;
use flate;
pub fn run(sess: &session::Session, llmod: ModuleRef,
tm: TargetMachineRef, reachable: &[~str]) {
tm: TargetMachineRef, reachable: &[StrBuf]) {
if sess.opts.cg.prefer_dynamic {
sess.err("cannot prefer dynamic linking when performing LTO");
sess.note("only 'staticlib' and 'bin' outputs are supported with LTO");
@ -67,13 +67,16 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
if !llvm::LLVMRustLinkInExternalBitcode(llmod,
ptr as *libc::c_char,
bc.len() as libc::size_t) {
link::llvm_err(sess, format!("failed to load bc of `{}`", name));
link::llvm_err(sess,
(format_strbuf!("failed to load bc of `{}`",
name)));
}
});
}
// Internalize everything but the reachable symbols of the current module
let cstrs: Vec<::std::c_str::CString> = reachable.iter().map(|s| s.to_c_str()).collect();
let cstrs: Vec<::std::c_str::CString> =
reachable.iter().map(|s| s.as_slice().to_c_str()).collect();
let arr: Vec<*i8> = cstrs.iter().map(|c| c.with_ref(|p| p)).collect();
let ptr = arr.as_ptr();
unsafe {

View File

@ -13,51 +13,52 @@ use driver::config::cfg_os_to_meta_os;
use metadata::loader::meta_section_name;
use syntax::abi;
pub fn get_target_strs(target_triple: ~str, target_os: abi::Os) -> target_strs::t {
pub fn get_target_strs(target_triple: StrBuf, target_os: abi::Os) -> target_strs::t {
return target_strs::t {
module_asm: "".to_owned(),
module_asm: "".to_strbuf(),
meta_sect_name: meta_section_name(cfg_os_to_meta_os(target_os)).to_owned(),
meta_sect_name:
meta_section_name(cfg_os_to_meta_os(target_os)).to_strbuf(),
data_layout: match target_os {
abi::OsMacos => {
"E-p:32:32:32".to_owned() +
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
"-f32:32:32-f64:64:64" +
"-v64:64:64-v128:64:128" +
"-a0:0:64-n32"
"E-p:32:32:32\
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
}
abi::OsWin32 => {
"E-p:32:32:32".to_owned() +
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
"-f32:32:32-f64:64:64" +
"-v64:64:64-v128:64:128" +
"-a0:0:64-n32"
"E-p:32:32:32\
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
}
abi::OsLinux => {
"E-p:32:32:32".to_owned() +
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
"-f32:32:32-f64:64:64" +
"-v64:64:64-v128:64:128" +
"-a0:0:64-n32"
"E-p:32:32:32\
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
}
abi::OsAndroid => {
"E-p:32:32:32".to_owned() +
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
"-f32:32:32-f64:64:64" +
"-v64:64:64-v128:64:128" +
"-a0:0:64-n32"
"E-p:32:32:32\
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
}
abi::OsFreebsd => {
"E-p:32:32:32".to_owned() +
"-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64" +
"-f32:32:32-f64:64:64" +
"-v64:64:64-v128:64:128" +
"-a0:0:64-n32"
"E-p:32:32:32\
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
}
},

View File

@ -22,7 +22,7 @@ fn not_win32(os: abi::Os) -> bool {
os != abi::OsWin32
}
pub fn get_rpath_flags(sess: &Session, out_filename: &Path) -> Vec<~str> {
pub fn get_rpath_flags(sess: &Session, out_filename: &Path) -> Vec<StrBuf> {
let os = sess.targ_cfg.os;
// No rpath on windows
@ -33,9 +33,9 @@ pub fn get_rpath_flags(sess: &Session, out_filename: &Path) -> Vec<~str> {
let mut flags = Vec::new();
if sess.targ_cfg.os == abi::OsFreebsd {
flags.push_all(["-Wl,-rpath,/usr/local/lib/gcc46".to_owned(),
"-Wl,-rpath,/usr/local/lib/gcc44".to_owned(),
"-Wl,-z,origin".to_owned()]);
flags.push_all(["-Wl,-rpath,/usr/local/lib/gcc46".to_strbuf(),
"-Wl,-rpath,/usr/local/lib/gcc44".to_strbuf(),
"-Wl,-z,origin".to_strbuf()]);
}
debug!("preparing the RPATH!");
@ -47,16 +47,19 @@ pub fn get_rpath_flags(sess: &Session, out_filename: &Path) -> Vec<~str> {
l.map(|p| p.clone())
}).collect::<Vec<_>>();
let rpaths = get_rpaths(os, sysroot, output, libs.as_slice(),
sess.opts.target_triple);
let rpaths = get_rpaths(os,
sysroot,
output,
libs.as_slice(),
sess.opts.target_triple.as_slice());
flags.push_all(rpaths_to_flags(rpaths.as_slice()).as_slice());
flags
}
pub fn rpaths_to_flags(rpaths: &[~str]) -> Vec<~str> {
pub fn rpaths_to_flags(rpaths: &[StrBuf]) -> Vec<StrBuf> {
let mut ret = Vec::new();
for rpath in rpaths.iter() {
ret.push("-Wl,-rpath," + *rpath);
ret.push(("-Wl,-rpath," + (*rpath).as_slice()).to_strbuf());
}
return ret;
}
@ -65,7 +68,7 @@ fn get_rpaths(os: abi::Os,
sysroot: &Path,
output: &Path,
libs: &[Path],
target_triple: &str) -> Vec<~str> {
target_triple: &str) -> Vec<StrBuf> {
debug!("sysroot: {}", sysroot.display());
debug!("output: {}", output.display());
debug!("libs:");
@ -82,7 +85,7 @@ fn get_rpaths(os: abi::Os,
// And a final backup rpath to the global library location.
let fallback_rpaths = vec!(get_install_prefix_rpath(sysroot, target_triple));
fn log_rpaths(desc: &str, rpaths: &[~str]) {
fn log_rpaths(desc: &str, rpaths: &[StrBuf]) {
debug!("{} rpaths:", desc);
for rpath in rpaths.iter() {
debug!(" {}", *rpath);
@ -102,14 +105,14 @@ fn get_rpaths(os: abi::Os,
fn get_rpaths_relative_to_output(os: abi::Os,
output: &Path,
libs: &[Path]) -> Vec<~str> {
libs: &[Path]) -> Vec<StrBuf> {
libs.iter().map(|a| get_rpath_relative_to_output(os, output, a)).collect()
}
pub fn get_rpath_relative_to_output(os: abi::Os,
output: &Path,
lib: &Path)
-> ~str {
-> StrBuf {
use std::os;
assert!(not_win32(os));
@ -129,10 +132,11 @@ pub fn get_rpath_relative_to_output(os: abi::Os,
let relative = lib.path_relative_from(&output);
let relative = relative.expect("could not create rpath relative to output");
// FIXME (#9639): This needs to handle non-utf8 paths
prefix+"/"+relative.as_str().expect("non-utf8 component in path")
(prefix + "/" + relative.as_str()
.expect("non-utf8 component in path")).to_strbuf()
}
pub fn get_install_prefix_rpath(sysroot: &Path, target_triple: &str) -> ~str {
pub fn get_install_prefix_rpath(sysroot: &Path, target_triple: &str) -> StrBuf {
let install_prefix = option_env!("CFG_PREFIX").expect("CFG_PREFIX");
let tlib = filesearch::relative_target_lib_path(sysroot, target_triple);
@ -140,10 +144,10 @@ pub fn get_install_prefix_rpath(sysroot: &Path, target_triple: &str) -> ~str {
path.push(&tlib);
let path = os::make_absolute(&path);
// FIXME (#9639): This needs to handle non-utf8 paths
path.as_str().expect("non-utf8 component in rpath").to_owned()
path.as_str().expect("non-utf8 component in rpath").to_strbuf()
}
pub fn minimize_rpaths(rpaths: &[~str]) -> Vec<~str> {
pub fn minimize_rpaths(rpaths: &[StrBuf]) -> Vec<StrBuf> {
let mut set = HashSet::new();
let mut minimized = Vec::new();
for rpath in rpaths.iter() {
@ -163,8 +167,13 @@ mod test {
#[test]
fn test_rpaths_to_flags() {
let flags = rpaths_to_flags(["path1".to_owned(), "path2".to_owned()]);
assert_eq!(flags, vec!("-Wl,-rpath,path1".to_owned(), "-Wl,-rpath,path2".to_owned()));
let flags = rpaths_to_flags([
"path1".to_strbuf(),
"path2".to_strbuf()
]);
assert_eq!(flags,
vec!("-Wl,-rpath,path1".to_strbuf(),
"-Wl,-rpath,path2".to_strbuf()));
}
#[test]
@ -190,18 +199,37 @@ mod test {
#[test]
fn test_minimize1() {
let res = minimize_rpaths(["rpath1".to_owned(), "rpath2".to_owned(), "rpath1".to_owned()]);
assert!(res.as_slice() == ["rpath1".to_owned(), "rpath2".to_owned()]);
let res = minimize_rpaths([
"rpath1".to_strbuf(),
"rpath2".to_strbuf(),
"rpath1".to_strbuf()
]);
assert!(res.as_slice() == [
"rpath1".to_strbuf(),
"rpath2".to_strbuf()
]);
}
#[test]
fn test_minimize2() {
let res = minimize_rpaths(["1a".to_owned(), "2".to_owned(), "2".to_owned(),
"1a".to_owned(), "4a".to_owned(), "1a".to_owned(),
"2".to_owned(), "3".to_owned(), "4a".to_owned(),
"3".to_owned()]);
assert!(res.as_slice() == ["1a".to_owned(), "2".to_owned(), "4a".to_owned(),
"3".to_owned()]);
let res = minimize_rpaths([
"1a".to_strbuf(),
"2".to_strbuf(),
"2".to_strbuf(),
"1a".to_strbuf(),
"4a".to_strbuf(),
"1a".to_strbuf(),
"2".to_strbuf(),
"3".to_strbuf(),
"4a".to_strbuf(),
"3".to_strbuf()
]);
assert!(res.as_slice() == [
"1a".to_strbuf(),
"2".to_strbuf(),
"4a".to_strbuf(),
"3".to_strbuf()
]);
}
#[test]

View File

@ -54,13 +54,13 @@ use syntax::ast;
#[deriving(Clone, Eq)]
pub struct Svh {
hash: ~str,
hash: StrBuf,
}
impl Svh {
pub fn new(hash: &str) -> Svh {
assert!(hash.len() == 16);
Svh { hash: hash.to_owned() }
Svh { hash: hash.to_strbuf() }
}
pub fn as_str<'a>(&'a self) -> &'a str {

View File

@ -11,9 +11,9 @@
#![allow(non_camel_case_types)]
pub struct t {
pub module_asm: ~str,
pub meta_sect_name: ~str,
pub data_layout: ~str,
pub target_triple: ~str,
pub cc_args: Vec<~str> ,
pub module_asm: StrBuf,
pub meta_sect_name: StrBuf,
pub data_layout: StrBuf,
pub target_triple: StrBuf,
pub cc_args: Vec<StrBuf> ,
}

View File

@ -14,38 +14,41 @@ use driver::config::cfg_os_to_meta_os;
use metadata::loader::meta_section_name;
use syntax::abi;
pub fn get_target_strs(target_triple: ~str, target_os: abi::Os) -> target_strs::t {
pub fn get_target_strs(target_triple: StrBuf, target_os: abi::Os)
-> target_strs::t {
return target_strs::t {
module_asm: "".to_owned(),
module_asm: "".to_strbuf(),
meta_sect_name: meta_section_name(cfg_os_to_meta_os(target_os)).to_owned(),
meta_sect_name:
meta_section_name(cfg_os_to_meta_os(target_os)).to_strbuf(),
data_layout: match target_os {
abi::OsMacos => {
"e-p:32:32:32-i1:8:8-i8:8:8-i16:16:16".to_owned() +
"-i32:32:32-i64:32:64" +
"-f32:32:32-f64:32:64-v64:64:64" +
"-v128:128:128-a0:0:64-f80:128:128" + "-n8:16:32"
"e-p:32:32:32-i1:8:8-i8:8:8-i16:16:16\
-i32:32:32-i64:32:64\
-f32:32:32-f64:32:64-v64:64:64\
-v128:128:128-a0:0:64-f80:128:128\
-n8:16:32".to_strbuf()
}
abi::OsWin32 => {
"e-p:32:32-f64:64:64-i64:64:64-f80:32:32-n8:16:32".to_owned()
"e-p:32:32-f64:64:64-i64:64:64-f80:32:32-n8:16:32".to_strbuf()
}
abi::OsLinux => {
"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_owned()
"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_strbuf()
}
abi::OsAndroid => {
"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_owned()
"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_strbuf()
}
abi::OsFreebsd => {
"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_owned()
"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_strbuf()
}
},
target_triple: target_triple,
cc_args: vec!("-m32".to_owned()),
cc_args: vec!("-m32".to_strbuf()),
};
}

View File

@ -14,46 +14,47 @@ use driver::config::cfg_os_to_meta_os;
use metadata::loader::meta_section_name;
use syntax::abi;
pub fn get_target_strs(target_triple: ~str, target_os: abi::Os) -> target_strs::t {
pub fn get_target_strs(target_triple: StrBuf, target_os: abi::Os) -> target_strs::t {
return target_strs::t {
module_asm: "".to_owned(),
module_asm: "".to_strbuf(),
meta_sect_name: meta_section_name(cfg_os_to_meta_os(target_os)).to_owned(),
meta_sect_name:
meta_section_name(cfg_os_to_meta_os(target_os)).to_strbuf(),
data_layout: match target_os {
abi::OsMacos => {
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-".to_owned()+
"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+
"s0:64:64-f80:128:128-n8:16:32:64"
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-\
f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-\
s0:64:64-f80:128:128-n8:16:32:64".to_strbuf()
}
abi::OsWin32 => {
// FIXME: Test this. Copied from linux (#2398)
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-".to_owned()+
"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+
"s0:64:64-f80:128:128-n8:16:32:64-S128"
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-\
f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-\
s0:64:64-f80:128:128-n8:16:32:64-S128".to_strbuf()
}
abi::OsLinux => {
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-".to_owned()+
"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+
"s0:64:64-f80:128:128-n8:16:32:64-S128"
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-\
f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-\
s0:64:64-f80:128:128-n8:16:32:64-S128".to_strbuf()
}
abi::OsAndroid => {
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-".to_owned()+
"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+
"s0:64:64-f80:128:128-n8:16:32:64-S128"
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-\
f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-\
s0:64:64-f80:128:128-n8:16:32:64-S128".to_strbuf()
}
abi::OsFreebsd => {
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-".to_owned()+
"f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-"+
"s0:64:64-f80:128:128-n8:16:32:64-S128"
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-\
f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-\
s0:64:64-f80:128:128-n8:16:32:64-S128".to_strbuf()
}
},
target_triple: target_triple,
cc_args: vec!("-m64".to_owned()),
cc_args: vec!("-m64".to_strbuf()),
};
}

View File

@ -76,7 +76,7 @@ pub struct Options {
// this.
pub addl_lib_search_paths: RefCell<HashSet<Path>>,
pub maybe_sysroot: Option<Path>,
pub target_triple: ~str,
pub target_triple: StrBuf,
// User-specified cfg meta items. The compiler itself will add additional
// items to the crate config, and during parsing the entire crate config
// will be added to the crate AST node. This should not be used for
@ -105,7 +105,7 @@ pub fn basic_options() -> Options {
output_types: Vec::new(),
addl_lib_search_paths: RefCell::new(HashSet::new()),
maybe_sysroot: None,
target_triple: driver::host_triple().to_owned(),
target_triple: driver::host_triple().to_strbuf(),
cfg: Vec::new(),
test: false,
parse_only: false,
@ -247,26 +247,26 @@ macro_rules! cgoptions(
}
}
fn parse_opt_string(slot: &mut Option<~str>, v: Option<&str>) -> bool {
fn parse_opt_string(slot: &mut Option<StrBuf>, v: Option<&str>) -> bool {
match v {
Some(s) => { *slot = Some(s.to_owned()); true },
Some(s) => { *slot = Some(s.to_strbuf()); true },
None => false,
}
}
fn parse_string(slot: &mut ~str, v: Option<&str>) -> bool {
fn parse_string(slot: &mut StrBuf, v: Option<&str>) -> bool {
match v {
Some(s) => { *slot = s.to_owned(); true },
Some(s) => { *slot = s.to_strbuf(); true },
None => false,
}
}
fn parse_list(slot: &mut Vec<~str>, v: Option<&str>)
fn parse_list(slot: &mut Vec<StrBuf>, v: Option<&str>)
-> bool {
match v {
Some(s) => {
for s in s.words() {
slot.push(s.to_owned());
slot.push(s.to_strbuf());
}
true
},
@ -278,23 +278,23 @@ macro_rules! cgoptions(
) )
cgoptions!(
ar: Option<~str> = (None, parse_opt_string,
ar: Option<StrBuf> = (None, parse_opt_string,
"tool to assemble archives with"),
linker: Option<~str> = (None, parse_opt_string,
linker: Option<StrBuf> = (None, parse_opt_string,
"system linker to link outputs with"),
link_args: Vec<~str> = (Vec::new(), parse_list,
link_args: Vec<StrBuf> = (Vec::new(), parse_list,
"extra arguments to pass to the linker (space separated)"),
target_cpu: ~str = ("generic".to_owned(), parse_string,
target_cpu: StrBuf = ("generic".to_strbuf(), parse_string,
"select target processor (llc -mcpu=help for details)"),
target_feature: ~str = ("".to_owned(), parse_string,
target_feature: StrBuf = ("".to_strbuf(), parse_string,
"target specific attributes (llc -mattr=help for details)"),
passes: Vec<~str> = (Vec::new(), parse_list,
passes: Vec<StrBuf> = (Vec::new(), parse_list,
"a list of extra LLVM passes to run (space separated)"),
llvm_args: Vec<~str> = (Vec::new(), parse_list,
llvm_args: Vec<StrBuf> = (Vec::new(), parse_list,
"a list of arguments to pass to llvm (space separated)"),
save_temps: bool = (false, parse_bool,
"save all temporary output files during compilation"),
android_cross_path: Option<~str> = (None, parse_opt_string,
android_cross_path: Option<StrBuf> = (None, parse_opt_string,
"the path to the Android NDK"),
no_rpath: bool = (false, parse_bool,
"disables setting the rpath in libs/exes"),
@ -310,7 +310,7 @@ cgoptions!(
"prefer dynamic linking to static linking"),
no_integrated_as: bool = (false, parse_bool,
"use an external assembler rather than LLVM's integrated one"),
relocation_model: ~str = ("pic".to_owned(), parse_string,
relocation_model: StrBuf = ("pic".to_strbuf(), parse_string,
"choose the relocation model to use (llc -relocation-model for details)"),
)
@ -456,13 +456,16 @@ static architecture_abis : &'static [(&'static str, abi::Architecture)] = &'stat
("mips", abi::Mips)];
pub fn build_target_config(sopts: &Options) -> Config {
let os = match get_os(sopts.target_triple) {
let os = match get_os(sopts.target_triple.as_slice()) {
Some(os) => os,
None => early_error("unknown operating system")
};
let arch = match get_arch(sopts.target_triple) {
let arch = match get_arch(sopts.target_triple.as_slice()) {
Some(arch) => arch,
None => early_error("unknown architecture: " + sopts.target_triple)
None => {
early_error("unknown architecture: " +
sopts.target_triple.as_slice())
}
};
let (int_type, uint_type) = match arch {
abi::X86 => (ast::TyI32, ast::TyU32),
@ -541,7 +544,7 @@ pub fn optgroups() -> Vec<getopts::OptGroup> {
// Convert strings provided as --cfg [cfgspec] into a crate_cfg
fn parse_cfgspecs(cfgspecs: Vec<~str> ) -> ast::CrateConfig {
fn parse_cfgspecs(cfgspecs: Vec<StrBuf> ) -> ast::CrateConfig {
cfgspecs.move_iter().map(|s| {
parse::parse_meta_from_source_str("cfgspec".to_strbuf(),
s.to_strbuf(),
@ -639,7 +642,10 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
}
let sysroot_opt = matches.opt_str("sysroot").map(|m| Path::new(m));
let target = matches.opt_str("target").unwrap_or(driver::host_triple().to_owned());
let target = match matches.opt_str("target") {
Some(supplied_target) => supplied_target.to_strbuf(),
None => driver::host_triple().to_strbuf(),
};
let opt_level = {
if (debugging_opts & NO_OPT) != 0 {
No
@ -689,10 +695,14 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
Path::new(s.as_slice())
}).collect();
let cfg = parse_cfgspecs(matches.opt_strs("cfg").move_iter().collect());
let cfg = parse_cfgspecs(matches.opt_strs("cfg")
.move_iter()
.map(|x| x.to_strbuf())
.collect());
let test = matches.opt_present("test");
let write_dependency_info = (matches.opt_present("dep-info"),
matches.opt_str("dep-info").map(|p| Path::new(p)));
matches.opt_str("dep-info")
.map(|p| Path::new(p)));
let print_metas = (matches.opt_present("crate-id"),
matches.opt_present("crate-name"),

View File

@ -71,9 +71,9 @@ pub fn compile_input(sess: Session,
&sess);
let loader = &mut Loader::new(&sess);
let id = link::find_crate_id(krate.attrs.as_slice(),
outputs.out_filestem);
let (expanded_crate, ast_map) = phase_2_configure_and_expand(&sess, loader,
krate, &id);
outputs.out_filestem.as_slice());
let (expanded_crate, ast_map) =
phase_2_configure_and_expand(&sess, loader, krate, &id);
(outputs, expanded_crate, ast_map)
};
write_out_deps(&sess, input, &outputs, &expanded_crate);
@ -99,14 +99,14 @@ pub fn compile_input(sess: Session,
* The name used for source code that doesn't originate in a file
* (e.g. source from stdin or a string)
*/
pub fn anon_src() -> ~str {
"<anon>".to_str()
pub fn anon_src() -> StrBuf {
"<anon>".to_strbuf()
}
pub fn source_name(input: &Input) -> ~str {
pub fn source_name(input: &Input) -> StrBuf {
match *input {
// FIXME (#9639): This needs to handle non-utf8 paths
FileInput(ref ifile) => ifile.as_str().unwrap().to_str(),
FileInput(ref ifile) => ifile.as_str().unwrap().to_strbuf(),
StrInput(_) => anon_src()
}
}
@ -115,14 +115,14 @@ pub enum Input {
/// Load source from file
FileInput(Path),
/// The string is the source
StrInput(~str)
StrInput(StrBuf)
}
impl Input {
fn filestem(&self) -> ~str {
fn filestem(&self) -> StrBuf {
match *self {
FileInput(ref ifile) => ifile.filestem_str().unwrap().to_str(),
StrInput(_) => "rust_out".to_owned(),
FileInput(ref ifile) => ifile.filestem_str().unwrap().to_strbuf(),
StrInput(_) => "rust_out".to_strbuf(),
}
}
}
@ -354,7 +354,7 @@ pub struct CrateTranslation {
pub metadata_module: ModuleRef,
pub link: LinkMeta,
pub metadata: Vec<u8>,
pub reachable: Vec<~str>,
pub reachable: Vec<StrBuf>,
pub crate_formats: dependency_format::Dependencies,
}
@ -450,7 +450,8 @@ fn write_out_deps(sess: &Session,
input: &Input,
outputs: &OutputFilenames,
krate: &ast::Crate) {
let id = link::find_crate_id(krate.attrs.as_slice(), outputs.out_filestem);
let id = link::find_crate_id(krate.attrs.as_slice(),
outputs.out_filestem.as_slice());
let mut out_filenames = Vec::new();
for output_type in sess.opts.output_types.iter() {
@ -487,9 +488,9 @@ fn write_out_deps(sess: &Session,
let result = (|| {
// Build a list of files used to compile the output and
// write Makefile-compatible dependency rules
let files: Vec<~str> = sess.codemap().files.borrow()
let files: Vec<StrBuf> = sess.codemap().files.borrow()
.iter().filter(|fmap| fmap.is_real_file())
.map(|fmap| fmap.name.to_owned())
.map(|fmap| fmap.name.to_strbuf())
.collect();
let mut file = try!(io::File::create(&deps_filename));
for path in out_filenames.iter() {
@ -567,7 +568,9 @@ impl pprust::PpAnn for TypedAnnotation {
try!(pp::word(&mut s.s, "as"));
try!(pp::space(&mut s.s));
try!(pp::word(&mut s.s,
ppaux::ty_to_str(tcx, ty::expr_ty(tcx, expr))));
ppaux::ty_to_str(
tcx,
ty::expr_ty(tcx, expr)).as_slice()));
s.pclose()
}
_ => Ok(())
@ -581,20 +584,26 @@ pub fn pretty_print_input(sess: Session,
ppm: ::driver::PpMode,
ofile: Option<Path>) {
let krate = phase_1_parse_input(&sess, cfg, input);
let id = link::find_crate_id(krate.attrs.as_slice(), input.filestem());
let id = link::find_crate_id(krate.attrs.as_slice(),
input.filestem().as_slice());
let (krate, ast_map, is_expanded) = match ppm {
PpmExpanded | PpmExpandedIdentified | PpmTyped => {
let loader = &mut Loader::new(&sess);
let (krate, ast_map) = phase_2_configure_and_expand(&sess, loader,
krate, &id);
let (krate, ast_map) = phase_2_configure_and_expand(&sess,
loader,
krate,
&id);
(krate, Some(ast_map), true)
}
_ => (krate, None, false)
};
let src_name = source_name(input);
let src = Vec::from_slice(sess.codemap().get_filemap(src_name).src.as_bytes());
let src = Vec::from_slice(sess.codemap()
.get_filemap(src_name.as_slice())
.src
.as_bytes());
let mut rdr = MemReader::new(src);
let out = match ofile {
@ -666,8 +675,12 @@ pub fn collect_crate_types(session: &Session,
let iter = attrs.iter().filter_map(|a| {
if a.name().equiv(&("crate_type")) {
match a.value_str() {
Some(ref n) if n.equiv(&("rlib")) => Some(config::CrateTypeRlib),
Some(ref n) if n.equiv(&("dylib")) => Some(config::CrateTypeDylib),
Some(ref n) if n.equiv(&("rlib")) => {
Some(config::CrateTypeRlib)
}
Some(ref n) if n.equiv(&("dylib")) => {
Some(config::CrateTypeDylib)
}
Some(ref n) if n.equiv(&("lib")) => {
Some(config::default_lib_output())
}
@ -679,12 +692,16 @@ pub fn collect_crate_types(session: &Session,
session.add_lint(lint::UnknownCrateType,
ast::CRATE_NODE_ID,
a.span,
"invalid `crate_type` value".to_owned());
"invalid `crate_type` \
value".to_strbuf());
None
}
_ => {
session.add_lint(lint::UnknownCrateType, ast::CRATE_NODE_ID,
a.span, "`crate_type` requires a value".to_owned());
session.add_lint(lint::UnknownCrateType,
ast::CRATE_NODE_ID,
a.span,
"`crate_type` requires a \
value".to_strbuf());
None
}
}
@ -704,7 +721,7 @@ pub fn collect_crate_types(session: &Session,
pub struct OutputFilenames {
pub out_directory: Path,
pub out_filestem: ~str,
pub out_filestem: StrBuf,
pub single_output_file: Option<Path>,
}
@ -756,7 +773,7 @@ pub fn build_output_filenames(input: &Input,
let crateid = attr::find_crateid(attrs);
match crateid {
None => {}
Some(crateid) => stem = crateid.name.to_str(),
Some(crateid) => stem = crateid.name.to_strbuf(),
}
OutputFilenames {
out_directory: dirpath,
@ -778,7 +795,7 @@ pub fn build_output_filenames(input: &Input,
}
OutputFilenames {
out_directory: out_file.dir_path(),
out_filestem: out_file.filestem_str().unwrap().to_str(),
out_filestem: out_file.filestem_str().unwrap().to_strbuf(),
single_output_file: ofile,
}
}

View File

@ -56,7 +56,8 @@ fn run_compiler(args: &[~str]) {
let ifile = matches.free.get(0).as_slice();
if ifile == "-" {
let contents = io::stdin().read_to_end().unwrap();
let src = str::from_utf8(contents.as_slice()).unwrap().to_owned();
let src = str::from_utf8(contents.as_slice()).unwrap()
.to_strbuf();
(StrInput(src), None)
} else {
(FileInput(Path::new(ifile)), Some(Path::new(ifile)))
@ -249,9 +250,13 @@ fn print_crate_info(sess: &Session,
// these nasty nested conditions are to avoid doing extra work
if crate_id || crate_name || crate_file_name {
let attrs = parse_crate_attrs(sess, input);
let t_outputs = driver::build_output_filenames(input, odir, ofile,
attrs.as_slice(), sess);
let id = link::find_crate_id(attrs.as_slice(), t_outputs.out_filestem);
let t_outputs = driver::build_output_filenames(input,
odir,
ofile,
attrs.as_slice(),
sess);
let id = link::find_crate_id(attrs.as_slice(),
t_outputs.out_filestem.as_slice());
if crate_id {
println!("{}", id.to_str());

View File

@ -28,7 +28,6 @@ use syntax::{ast, codemap};
use std::os;
use std::cell::{Cell, RefCell};
pub struct Session {
pub targ_cfg: config::Config,
pub opts: config::Options,
@ -43,7 +42,7 @@ pub struct Session {
// expected to be absolute. `None` means that there is no source file.
pub local_crate_source_file: Option<Path>,
pub working_dir: Path,
pub lints: RefCell<NodeMap<Vec<(lint::Lint, codemap::Span, ~str)>>>,
pub lints: RefCell<NodeMap<Vec<(lint::Lint, codemap::Span, StrBuf)>>>,
pub node_id: Cell<ast::NodeId>,
pub crate_types: RefCell<Vec<config::CrateType>>,
pub features: front::feature_gate::Features,
@ -109,7 +108,7 @@ impl Session {
lint: lint::Lint,
id: ast::NodeId,
sp: Span,
msg: ~str) {
msg: StrBuf) {
let mut lints = self.lints.borrow_mut();
match lints.find_mut(&id) {
Some(arr) => { arr.push((lint, sp, msg)); return; }
@ -180,10 +179,9 @@ impl Session {
}
}
pub fn target_filesearch<'a>(&'a self) -> filesearch::FileSearch<'a> {
filesearch::FileSearch::new(
self.sysroot(),
self.opts.target_triple,
&self.opts.addl_lib_search_paths)
filesearch::FileSearch::new(self.sysroot(),
self.opts.target_triple.as_slice(),
&self.opts.addl_lib_search_paths)
}
pub fn host_filesearch<'a>(&'a self) -> filesearch::FileSearch<'a> {
filesearch::FileSearch::new(
@ -245,7 +243,6 @@ pub fn build_session_(sopts: config::Options,
}
}
// Seems out of place, but it uses session, so I'm putting it here
pub fn expect<T:Clone>(sess: &Session, opt: Option<T>, msg: || -> StrBuf)
-> T {

View File

@ -359,7 +359,7 @@ pub fn check_crate(sess: &Session, krate: &ast::Crate) {
sess.add_lint(lint::UnknownFeatures,
ast::CRATE_NODE_ID,
mi.span,
"unknown feature".to_owned());
"unknown feature".to_strbuf());
}
}
}

View File

@ -119,7 +119,6 @@ pub mod lib {
pub mod llvmdeps;
}
pub fn main() {
std::os::set_exit_status(driver::main_args(std::os::args().as_slice()));
}

View File

@ -1840,7 +1840,7 @@ pub fn SetFunctionAttribute(fn_: ValueRef, attr: Attribute) {
/* Memory-managed object interface to type handles. */
pub struct TypeNames {
named_types: RefCell<HashMap<~str, TypeRef>>,
named_types: RefCell<HashMap<StrBuf, TypeRef>>,
}
impl TypeNames {
@ -1851,33 +1851,34 @@ impl TypeNames {
}
pub fn associate_type(&self, s: &str, t: &Type) {
assert!(self.named_types.borrow_mut().insert(s.to_owned(), t.to_ref()));
assert!(self.named_types.borrow_mut().insert(s.to_strbuf(),
t.to_ref()));
}
pub fn find_type(&self, s: &str) -> Option<Type> {
self.named_types.borrow().find_equiv(&s).map(|x| Type::from_ref(*x))
}
pub fn type_to_str(&self, ty: Type) -> ~str {
pub fn type_to_str(&self, ty: Type) -> StrBuf {
unsafe {
let s = llvm::LLVMTypeToString(ty.to_ref());
let ret = from_c_str(s);
free(s as *mut c_void);
ret
ret.to_strbuf()
}
}
pub fn types_to_str(&self, tys: &[Type]) -> ~str {
let strs: Vec<~str> = tys.iter().map(|t| self.type_to_str(*t)).collect();
format!("[{}]", strs.connect(","))
pub fn types_to_str(&self, tys: &[Type]) -> StrBuf {
let strs: Vec<StrBuf> = tys.iter().map(|t| self.type_to_str(*t)).collect();
format_strbuf!("[{}]", strs.connect(",").to_strbuf())
}
pub fn val_to_str(&self, val: ValueRef) -> ~str {
pub fn val_to_str(&self, val: ValueRef) -> StrBuf {
unsafe {
let s = llvm::LLVMValueToString(val);
let ret = from_c_str(s);
free(s as *mut c_void);
ret
ret.to_strbuf()
}
}
}

View File

@ -120,8 +120,11 @@ fn visit_view_item(e: &mut Env, i: &ast::ViewItem) {
match extract_crate_info(e, i) {
Some(info) => {
let (cnum, _, _) = resolve_crate(e, &None, info.ident,
&info.crate_id, None,
let (cnum, _, _) = resolve_crate(e,
&None,
info.ident.as_slice(),
&info.crate_id,
None,
i.span);
e.sess.cstore.add_extern_mod_stmt_cnum(info.id, cnum);
}
@ -130,7 +133,7 @@ fn visit_view_item(e: &mut Env, i: &ast::ViewItem) {
}
struct CrateInfo {
ident: ~str,
ident: StrBuf,
crate_id: CrateId,
id: ast::NodeId,
should_link: bool,
@ -156,7 +159,7 @@ fn extract_crate_info(e: &Env, i: &ast::ViewItem) -> Option<CrateInfo> {
None => from_str(ident.get().to_str()).unwrap()
};
Some(CrateInfo {
ident: ident.get().to_str(),
ident: ident.get().to_strbuf(),
crate_id: crate_id,
id: id,
should_link: should_link(i),
@ -237,7 +240,9 @@ fn visit_item(e: &Env, i: &ast::Item) {
if n.get().is_empty() {
e.sess.span_err(m.span, "#[link(name = \"\")] given with empty name");
} else {
e.sess.cstore.add_used_library(n.get().to_owned(), kind);
e.sess
.cstore
.add_used_library(n.get().to_strbuf(), kind);
}
}
None => {}
@ -279,7 +284,7 @@ fn register_crate<'a>(e: &mut Env,
// Stash paths for top-most crate locally if necessary.
let crate_paths = if root.is_none() {
Some(CratePaths {
ident: ident.to_owned(),
ident: ident.to_strbuf(),
dylib: lib.dylib.clone(),
rlib: lib.rlib.clone(),
})
@ -294,7 +299,7 @@ fn register_crate<'a>(e: &mut Env,
let loader::Library{ dylib, rlib, metadata } = lib;
let cmeta = Rc::new( cstore::crate_metadata {
name: crate_id.name.to_owned(),
name: crate_id.name.to_strbuf(),
data: metadata,
cnum_map: cnum_map,
cnum: cnum,
@ -328,7 +333,7 @@ fn resolve_crate<'a>(e: &mut Env,
span: span,
ident: ident,
crate_id: crate_id,
id_hash: id_hash,
id_hash: id_hash.as_slice(),
hash: hash.map(|a| &*a),
filesearch: e.sess.target_filesearch(),
os: config::cfg_os_to_meta_os(e.sess.targ_cfg.os),
@ -391,9 +396,9 @@ impl<'a> CrateLoader for Loader<'a> {
let mut load_ctxt = loader::Context {
sess: self.env.sess,
span: krate.span,
ident: info.ident,
ident: info.ident.as_slice(),
crate_id: &info.crate_id,
id_hash: id_hash,
id_hash: id_hash.as_slice(),
hash: None,
filesearch: self.env.sess.host_filesearch(),
triple: driver::host_triple(),

View File

@ -32,7 +32,7 @@ pub struct StaticMethodInfo {
pub vis: ast::Visibility,
}
pub fn get_symbol(cstore: &cstore::CStore, def: ast::DefId) -> ~str {
pub fn get_symbol(cstore: &cstore::CStore, def: ast::DefId) -> StrBuf {
let cdata = cstore.get_crate_data(def.krate);
decoder::get_symbol(cdata.data(), def.node)
}
@ -86,7 +86,8 @@ pub fn get_item_path(tcx: &ty::ctxt, def: ast::DefId) -> Vec<ast_map::PathElem>
// FIXME #1920: This path is not always correct if the crate is not linked
// into the root namespace.
(vec!(ast_map::PathMod(token::intern(cdata.name)))).append(path.as_slice())
(vec!(ast_map::PathMod(token::intern(cdata.name.as_slice())))).append(
path.as_slice())
}
pub enum found_ast {
@ -245,7 +246,7 @@ pub fn get_impl_vtables(tcx: &ty::ctxt,
pub fn get_native_libraries(cstore: &cstore::CStore,
crate_num: ast::CrateNum)
-> Vec<(cstore::NativeLibaryKind, ~str)> {
-> Vec<(cstore::NativeLibaryKind, StrBuf)> {
let cdata = cstore.get_crate_data(crate_num);
decoder::get_native_libraries(&*cdata)
}

View File

@ -38,7 +38,7 @@ pub enum MetadataBlob {
}
pub struct crate_metadata {
pub name: ~str,
pub name: StrBuf,
pub data: MetadataBlob,
pub cnum_map: cnum_map,
pub cnum: ast::CrateNum,
@ -71,8 +71,8 @@ pub struct CStore {
metas: RefCell<HashMap<ast::CrateNum, Rc<crate_metadata>>>,
extern_mod_crate_map: RefCell<extern_mod_crate_map>,
used_crate_sources: RefCell<Vec<CrateSource>>,
used_libraries: RefCell<Vec<(~str, NativeLibaryKind)>>,
used_link_args: RefCell<Vec<~str>>,
used_libraries: RefCell<Vec<(StrBuf, NativeLibaryKind)>>,
used_link_args: RefCell<Vec<StrBuf>>,
pub intr: Rc<IdentInterner>,
}
@ -178,23 +178,23 @@ impl CStore {
libs
}
pub fn add_used_library(&self, lib: ~str, kind: NativeLibaryKind) {
pub fn add_used_library(&self, lib: StrBuf, kind: NativeLibaryKind) {
assert!(!lib.is_empty());
self.used_libraries.borrow_mut().push((lib, kind));
}
pub fn get_used_libraries<'a>(&'a self)
-> &'a RefCell<Vec<(~str, NativeLibaryKind)> > {
-> &'a RefCell<Vec<(StrBuf, NativeLibaryKind)> > {
&self.used_libraries
}
pub fn add_used_link_args(&self, args: &str) {
for s in args.split(' ') {
self.used_link_args.borrow_mut().push(s.to_owned());
self.used_link_args.borrow_mut().push(s.to_strbuf());
}
}
pub fn get_used_link_args<'a>(&'a self) -> &'a RefCell<Vec<~str> > {
pub fn get_used_link_args<'a>(&'a self) -> &'a RefCell<Vec<StrBuf> > {
&self.used_link_args
}

View File

@ -184,8 +184,8 @@ fn item_method_sort(item: ebml::Doc) -> char {
ret
}
fn item_symbol(item: ebml::Doc) -> ~str {
reader::get_doc(item, tag_items_data_item_symbol).as_str()
fn item_symbol(item: ebml::Doc) -> StrBuf {
reader::get_doc(item, tag_items_data_item_symbol).as_str().to_strbuf()
}
fn item_parent_item(d: ebml::Doc) -> Option<ast::DefId> {
@ -451,7 +451,7 @@ pub fn get_impl_vtables(cdata: Cmd,
}
pub fn get_symbol(data: &[u8], id: ast::NodeId) -> ~str {
pub fn get_symbol(data: &[u8], id: ast::NodeId) -> StrBuf {
return item_symbol(lookup_item(id, data));
}
@ -1097,13 +1097,15 @@ pub fn get_crate_deps(data: &[u8]) -> Vec<CrateDep> {
let cratedoc = reader::Doc(data);
let depsdoc = reader::get_doc(cratedoc, tag_crate_deps);
let mut crate_num = 1;
fn docstr(doc: ebml::Doc, tag_: uint) -> ~str {
fn docstr(doc: ebml::Doc, tag_: uint) -> StrBuf {
let d = reader::get_doc(doc, tag_);
d.as_str_slice().to_str()
d.as_str_slice().to_strbuf()
}
reader::tagged_docs(depsdoc, tag_crate_dep, |depdoc| {
let crate_id = from_str(docstr(depdoc, tag_crate_dep_crateid)).unwrap();
let hash = Svh::new(docstr(depdoc, tag_crate_dep_hash));
let crate_id =
from_str(docstr(depdoc,
tag_crate_dep_crateid).as_slice()).unwrap();
let hash = Svh::new(docstr(depdoc, tag_crate_dep_hash).as_slice());
deps.push(CrateDep {
cnum: crate_num,
crate_id: crate_id,
@ -1144,10 +1146,10 @@ pub fn maybe_get_crate_id(data: &[u8]) -> Option<CrateId> {
})
}
pub fn get_crate_triple(data: &[u8]) -> ~str {
pub fn get_crate_triple(data: &[u8]) -> StrBuf {
let cratedoc = reader::Doc(data);
let triple_doc = reader::maybe_get_doc(cratedoc, tag_crate_triple);
triple_doc.expect("No triple in crate").as_str()
triple_doc.expect("No triple in crate").as_str().to_strbuf()
}
pub fn get_crate_id(data: &[u8]) -> CrateId {
@ -1239,7 +1241,8 @@ pub fn get_trait_of_method(cdata: Cmd, id: ast::NodeId, tcx: &ty::ctxt)
}
pub fn get_native_libraries(cdata: Cmd) -> Vec<(cstore::NativeLibaryKind, ~str)> {
pub fn get_native_libraries(cdata: Cmd)
-> Vec<(cstore::NativeLibaryKind, StrBuf)> {
let libraries = reader::get_doc(reader::Doc(cdata.data()),
tag_native_libraries);
let mut result = Vec::new();
@ -1248,7 +1251,7 @@ pub fn get_native_libraries(cdata: Cmd) -> Vec<(cstore::NativeLibaryKind, ~str)>
let name_doc = reader::get_doc(lib_doc, tag_native_libraries_name);
let kind: cstore::NativeLibaryKind =
FromPrimitive::from_u32(reader::doc_as_u32(kind_doc)).unwrap();
let name = name_doc.as_str();
let name = name_doc.as_str().to_strbuf();
result.push((kind, name));
true
});
@ -1260,12 +1263,12 @@ pub fn get_macro_registrar_fn(data: &[u8]) -> Option<ast::NodeId> {
.map(|doc| FromPrimitive::from_u32(reader::doc_as_u32(doc)).unwrap())
}
pub fn get_exported_macros(data: &[u8]) -> Vec<~str> {
pub fn get_exported_macros(data: &[u8]) -> Vec<StrBuf> {
let macros = reader::get_doc(reader::Doc(data),
tag_exported_macros);
let mut result = Vec::new();
reader::tagged_docs(macros, tag_macro_def, |macro_doc| {
result.push(macro_doc.as_str());
result.push(macro_doc.as_str().to_strbuf());
true
});
result

View File

@ -70,7 +70,7 @@ pub struct EncodeParams<'a> {
pub diag: &'a SpanHandler,
pub tcx: &'a ty::ctxt,
pub reexports2: &'a middle::resolve::ExportMap2,
pub item_symbols: &'a RefCell<NodeMap<~str>>,
pub item_symbols: &'a RefCell<NodeMap<StrBuf>>,
pub non_inlineable_statics: &'a RefCell<NodeSet>,
pub link_meta: &'a LinkMeta,
pub cstore: &'a cstore::CStore,
@ -81,7 +81,7 @@ pub struct EncodeContext<'a> {
pub diag: &'a SpanHandler,
pub tcx: &'a ty::ctxt,
pub reexports2: &'a middle::resolve::ExportMap2,
pub item_symbols: &'a RefCell<NodeMap<~str>>,
pub item_symbols: &'a RefCell<NodeMap<StrBuf>>,
pub non_inlineable_statics: &'a RefCell<NodeSet>,
pub link_meta: &'a LinkMeta,
pub cstore: &'a cstore::CStore,
@ -98,7 +98,7 @@ fn encode_impl_type_basename(ebml_w: &mut Encoder, name: Ident) {
}
pub fn encode_def_id(ebml_w: &mut Encoder, id: DefId) {
ebml_w.wr_tagged_str(tag_def_id, def_to_str(id));
ebml_w.wr_tagged_str(tag_def_id, def_to_str(id).as_slice());
}
#[deriving(Clone)]
@ -139,8 +139,8 @@ fn encode_family(ebml_w: &mut Encoder, c: char) {
ebml_w.end_tag();
}
pub fn def_to_str(did: DefId) -> ~str {
format!("{}:{}", did.krate, did.node)
pub fn def_to_str(did: DefId) -> StrBuf {
format_strbuf!("{}:{}", did.krate, did.node)
}
fn encode_ty_type_param_defs(ebml_w: &mut Encoder,
@ -170,7 +170,7 @@ fn encode_region_param_defs(ebml_w: &mut Encoder,
ebml_w.end_tag();
ebml_w.wr_tagged_str(tag_region_param_def_def_id,
def_to_str(param.def_id));
def_to_str(param.def_id).as_slice());
ebml_w.end_tag();
}
@ -370,10 +370,12 @@ fn encode_reexported_static_method(ebml_w: &mut Encoder,
exp.name, token::get_ident(method_ident));
ebml_w.start_tag(tag_items_data_item_reexport);
ebml_w.start_tag(tag_items_data_item_reexport_def_id);
ebml_w.wr_str(def_to_str(method_def_id));
ebml_w.wr_str(def_to_str(method_def_id).as_slice());
ebml_w.end_tag();
ebml_w.start_tag(tag_items_data_item_reexport_name);
ebml_w.wr_str(format!("{}::{}", exp.name, token::get_ident(method_ident)));
ebml_w.wr_str(format!("{}::{}",
exp.name,
token::get_ident(method_ident)));
ebml_w.end_tag();
ebml_w.end_tag();
}
@ -447,7 +449,7 @@ fn encode_reexported_static_methods(ecx: &EncodeContext,
// encoded metadata for static methods relative to Bar,
// but not yet for Foo.
//
if path_differs || original_name.get() != exp.name {
if path_differs || original_name.get() != exp.name.as_slice() {
if !encode_reexported_static_base_methods(ecx, ebml_w, exp) {
if encode_reexported_static_trait_methods(ecx, ebml_w, exp) {
debug!("(encode reexported static methods) {} \
@ -515,10 +517,10 @@ fn encode_reexports(ecx: &EncodeContext,
id);
ebml_w.start_tag(tag_items_data_item_reexport);
ebml_w.start_tag(tag_items_data_item_reexport_def_id);
ebml_w.wr_str(def_to_str(exp.def_id));
ebml_w.wr_str(def_to_str(exp.def_id).as_slice());
ebml_w.end_tag();
ebml_w.start_tag(tag_items_data_item_reexport_name);
ebml_w.wr_str(exp.name);
ebml_w.wr_str(exp.name.as_slice());
ebml_w.end_tag();
ebml_w.end_tag();
encode_reexported_static_methods(ecx, ebml_w, path.clone(), exp);
@ -547,12 +549,13 @@ fn encode_info_for_mod(ecx: &EncodeContext,
// Encode info about all the module children.
for item in md.items.iter() {
ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(item.id)));
ebml_w.wr_str(def_to_str(local_def(item.id)).as_slice());
ebml_w.end_tag();
each_auxiliary_node_id(*item, |auxiliary_node_id| {
ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(auxiliary_node_id)));
ebml_w.wr_str(def_to_str(local_def(
auxiliary_node_id)).as_slice());
ebml_w.end_tag();
true
});
@ -566,7 +569,7 @@ fn encode_info_for_mod(ecx: &EncodeContext,
did, ecx.tcx.map.node_to_str(did));
ebml_w.start_tag(tag_mod_impl);
ebml_w.wr_str(def_to_str(local_def(did)));
ebml_w.wr_str(def_to_str(local_def(did)).as_slice());
ebml_w.end_tag();
}
_ => {}
@ -931,7 +934,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
// Encode all the items in this module.
for foreign_item in fm.items.iter() {
ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(foreign_item.id)));
ebml_w.wr_str(def_to_str(local_def(foreign_item.id)).as_slice());
ebml_w.end_tag();
}
encode_visibility(ebml_w, vis);
@ -1111,7 +1114,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
ebml_w.end_tag();
ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(method_def_id));
ebml_w.wr_str(def_to_str(method_def_id).as_slice());
ebml_w.end_tag();
}
encode_path(ebml_w, path.clone());
@ -1647,12 +1650,13 @@ fn encode_misc_info(ecx: &EncodeContext,
ebml_w.start_tag(tag_misc_info_crate_items);
for &item in krate.module.items.iter() {
ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(item.id)));
ebml_w.wr_str(def_to_str(local_def(item.id)).as_slice());
ebml_w.end_tag();
each_auxiliary_node_id(item, |auxiliary_node_id| {
ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(auxiliary_node_id)));
ebml_w.wr_str(def_to_str(local_def(
auxiliary_node_id)).as_slice());
ebml_w.end_tag();
true
});
@ -1700,11 +1704,11 @@ fn encode_dylib_dependency_formats(ebml_w: &mut Encoder, ecx: &EncodeContext) {
match ecx.tcx.dependency_formats.borrow().find(&config::CrateTypeDylib) {
Some(arr) => {
let s = arr.iter().enumerate().filter_map(|(i, slot)| {
slot.map(|kind| format!("{}:{}", i + 1, match kind {
slot.map(|kind| (format!("{}:{}", i + 1, match kind {
cstore::RequireDynamic => "d",
cstore::RequireStatic => "s",
}))
}).collect::<Vec<~str>>();
})).to_strbuf())
}).collect::<Vec<StrBuf>>();
ebml_w.writer.write(s.connect(",").as_bytes());
}
None => {}
@ -1781,7 +1785,12 @@ fn encode_metadata_inner(wr: &mut MemWriter, parms: EncodeParams, krate: &Crate)
let mut ebml_w = writer::Encoder(wr);
encode_crate_id(&mut ebml_w, &ecx.link_meta.crateid);
encode_crate_triple(&mut ebml_w, tcx.sess.targ_cfg.target_strs.target_triple);
encode_crate_triple(&mut ebml_w,
tcx.sess
.targ_cfg
.target_strs
.target_triple
.as_slice());
encode_hash(&mut ebml_w, &ecx.link_meta.crate_hash);
encode_dylib_dependency_formats(&mut ebml_w, &ecx);
@ -1861,7 +1870,7 @@ fn encode_metadata_inner(wr: &mut MemWriter, parms: EncodeParams, krate: &Crate)
}
// Get the encoded string for a type
pub fn encoded_ty(tcx: &ty::ctxt, t: ty::t) -> ~str {
pub fn encoded_ty(tcx: &ty::ctxt, t: ty::t) -> StrBuf {
let mut wr = MemWriter::new();
tyencode::enc_ty(&mut wr, &tyencode::ctxt {
diag: tcx.sess.diagnostic(),
@ -1869,5 +1878,5 @@ pub fn encoded_ty(tcx: &ty::ctxt, t: ty::t) -> ~str {
tcx: tcx,
abbrevs: &RefCell::new(HashMap::new())
}, t);
str::from_utf8_owned(wr.get_ref().to_owned()).unwrap()
str::from_utf8_owned(wr.get_ref().to_owned()).unwrap().to_strbuf()
}

View File

@ -186,8 +186,8 @@ static PATH_ENTRY_SEPARATOR: &'static str = ";";
static PATH_ENTRY_SEPARATOR: &'static str = ":";
/// Returns RUST_PATH as a string, without default paths added
pub fn get_rust_path() -> Option<~str> {
os::getenv("RUST_PATH")
pub fn get_rust_path() -> Option<StrBuf> {
os::getenv("RUST_PATH").map(|x| x.to_strbuf())
}
/// Returns the value of RUST_PATH, as a list
@ -199,7 +199,7 @@ pub fn rust_path() -> Vec<Path> {
let mut env_rust_path: Vec<Path> = match get_rust_path() {
Some(env_path) => {
let env_path_components =
env_path.split_str(PATH_ENTRY_SEPARATOR);
env_path.as_slice().split_str(PATH_ENTRY_SEPARATOR);
env_path_components.map(|s| Path::new(s)).collect()
}
None => Vec::new()
@ -236,7 +236,7 @@ pub fn rust_path() -> Vec<Path> {
// The name of the directory rustc expects libraries to be located.
// On Unix should be "lib", on windows "bin"
#[cfg(unix)]
fn find_libdir(sysroot: &Path) -> ~str {
fn find_libdir(sysroot: &Path) -> StrBuf {
// FIXME: This is a quick hack to make the rustc binary able to locate
// Rust libraries in Linux environments where libraries might be installed
// to lib64/lib32. This would be more foolproof by basing the sysroot off
@ -250,21 +250,27 @@ fn find_libdir(sysroot: &Path) -> ~str {
}
#[cfg(target_word_size = "64")]
fn primary_libdir_name() -> ~str { "lib64".to_owned() }
fn primary_libdir_name() -> StrBuf {
"lib64".to_strbuf()
}
#[cfg(target_word_size = "32")]
fn primary_libdir_name() -> ~str { "lib32".to_owned() }
fn primary_libdir_name() -> StrBuf {
"lib32".to_strbuf()
}
fn secondary_libdir_name() -> ~str { "lib".to_owned() }
fn secondary_libdir_name() -> StrBuf {
"lib".to_strbuf()
}
}
#[cfg(windows)]
fn find_libdir(_sysroot: &Path) -> ~str {
"bin".to_owned()
fn find_libdir(_sysroot: &Path) -> StrBuf {
"bin".to_strbuf()
}
// The name of rustc's own place to organize libraries.
// Used to be "rustc", now the default is "rustlib"
pub fn rustlibdir() -> ~str {
"rustlib".to_owned()
pub fn rustlibdir() -> StrBuf {
"rustlib".to_strbuf()
}

View File

@ -61,7 +61,7 @@ pub enum Os {
pub struct CrateMismatch {
path: Path,
got: ~str,
got: StrBuf,
}
pub struct Context<'a> {
@ -92,7 +92,7 @@ pub struct ArchiveMetadata {
}
pub struct CratePaths {
pub ident: ~str,
pub ident: StrBuf,
pub dylib: Option<Path>,
pub rlib: Option<Path>
}
@ -305,7 +305,7 @@ impl<'a> Context<'a> {
//
// If everything checks out, then `Some(hash)` is returned where `hash` is
// the listed hash in the filename itself.
fn try_match(&self, file: &str, prefix: &str, suffix: &str) -> Option<~str>{
fn try_match(&self, file: &str, prefix: &str, suffix: &str) -> Option<StrBuf>{
let middle = file.slice(prefix.len(), file.len() - suffix.len());
debug!("matching -- {}, middle: {}", file, middle);
let mut parts = middle.splitn('-', 1);
@ -319,13 +319,13 @@ impl<'a> Context<'a> {
Some(..) => {} // check the hash
// hash is irrelevant, no version specified
None => return Some(hash.to_owned())
None => return Some(hash.to_strbuf())
}
debug!("matching -- {}, vers ok", file);
// hashes in filenames are prefixes of the "true hash"
if self.id_hash == hash.as_slice() {
debug!("matching -- {}, hash ok", file);
Some(hash.to_owned())
Some(hash.to_strbuf())
} else {
None
}
@ -410,8 +410,10 @@ impl<'a> Context<'a> {
let triple = decoder::get_crate_triple(crate_data);
if triple.as_slice() != self.triple {
info!("Rejecting via crate triple: expected {} got {}", self.triple, triple);
self.rejected_via_triple.push(CrateMismatch{ path: libpath.clone(),
got: triple.to_owned() });
self.rejected_via_triple.push(CrateMismatch {
path: libpath.clone(),
got: triple.to_strbuf()
});
return false;
}
@ -420,8 +422,10 @@ impl<'a> Context<'a> {
Some(myhash) => {
if *myhash != hash {
info!("Rejecting via hash: expected {} got {}", *myhash, hash);
self.rejected_via_hash.push(CrateMismatch{ path: libpath.clone(),
got: myhash.as_str().to_owned() });
self.rejected_via_hash.push(CrateMismatch {
path: libpath.clone(),
got: myhash.as_str().to_strbuf()
});
false
} else {
true
@ -481,7 +485,7 @@ impl ArchiveMetadata {
}
// Just a small wrapper to time how long reading metadata takes.
fn get_metadata_section(os: Os, filename: &Path) -> Result<MetadataBlob, ~str> {
fn get_metadata_section(os: Os, filename: &Path) -> Result<MetadataBlob, StrBuf> {
let start = time::precise_time_ns();
let ret = get_metadata_section_imp(os, filename);
info!("reading {} => {}ms", filename.filename_display(),
@ -489,9 +493,9 @@ fn get_metadata_section(os: Os, filename: &Path) -> Result<MetadataBlob, ~str> {
return ret;
}
fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, ~str> {
fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, StrBuf> {
if !filename.exists() {
return Err(format!("no such file: '{}'", filename.display()));
return Err(format_strbuf!("no such file: '{}'", filename.display()));
}
if filename.filename_str().unwrap().ends_with(".rlib") {
// Use ArchiveRO for speed here, it's backed by LLVM and uses mmap
@ -501,13 +505,17 @@ fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, ~st
Some(ar) => ar,
None => {
debug!("llvm didn't like `{}`", filename.display());
return Err(format!("failed to read rlib metadata: '{}'",
filename.display()));
return Err(format_strbuf!("failed to read rlib metadata: \
'{}'",
filename.display()));
}
};
return match ArchiveMetadata::new(archive).map(|ar| MetadataArchive(ar)) {
None => return Err(format!("failed to read rlib metadata: '{}'",
filename.display())),
None => {
return Err((format_strbuf!("failed to read rlib metadata: \
'{}'",
filename.display())))
}
Some(blob) => return Ok(blob)
}
}
@ -516,11 +524,16 @@ fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, ~st
llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf)
});
if mb as int == 0 {
return Err(format!("error reading library: '{}'",filename.display()))
return Err(format_strbuf!("error reading library: '{}'",
filename.display()))
}
let of = match ObjectFile::new(mb) {
Some(of) => of,
_ => return Err(format!("provided path not an object file: '{}'", filename.display()))
_ => {
return Err((format_strbuf!("provided path not an object \
file: '{}'",
filename.display())))
}
};
let si = mk_section_iter(of.llof);
while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False {
@ -531,7 +544,9 @@ fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, ~st
if read_meta_section_name(os) == name {
let cbuf = llvm::LLVMGetSectionContents(si.llsi);
let csz = llvm::LLVMGetSectionSize(si.llsi) as uint;
let mut found = Err(format!("metadata not found: '{}'", filename.display()));
let mut found =
Err(format_strbuf!("metadata not found: '{}'",
filename.display()));
let cvbuf: *u8 = mem::transmute(cbuf);
let vlen = encoder::metadata_encoding_version.len();
debug!("checking {} bytes of metadata-version stamp",
@ -539,8 +554,11 @@ fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, ~st
let minsz = cmp::min(vlen, csz);
let version_ok = slice::raw::buf_as_slice(cvbuf, minsz,
|buf0| buf0 == encoder::metadata_encoding_version);
if !version_ok { return Err(format!("incompatible metadata version found: '{}'",
filename.display())); }
if !version_ok {
return Err((format_strbuf!("incompatible metadata \
version found: '{}'",
filename.display())));
}
let cvbuf1 = cvbuf.offset(vlen as int);
debug!("inflating {} bytes of compressed metadata",
@ -548,8 +566,12 @@ fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, ~st
slice::raw::buf_as_slice(cvbuf1, csz-vlen, |bytes| {
match flate::inflate_bytes(bytes) {
Some(inflated) => found = Ok(MetadataVec(inflated)),
None => found = Err(format!("failed to decompress metadata for: '{}'",
filename.display()))
None => {
found =
Err(format_strbuf!("failed to decompress \
metadata for: '{}'",
filename.display()))
}
}
});
if found.is_ok() {
@ -558,7 +580,8 @@ fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, ~st
}
llvm::LLVMMoveToNextSection(si.llsi);
}
return Err(format!("metadata not found: '{}'", filename.display()));
return Err(format_strbuf!("metadata not found: '{}'",
filename.display()));
}
}

View File

@ -201,7 +201,7 @@ fn parse_bound_region(st: &mut PState, conv: conv_did) -> ty::BoundRegion {
}
'[' => {
let def = parse_def(st, RegionParameter, |x,y| conv(x,y));
let ident = token::str_to_ident(parse_str(st, ']'));
let ident = token::str_to_ident(parse_str(st, ']').as_slice());
ty::BrNamed(def, ident.name)
}
'f' => {
@ -229,7 +229,7 @@ fn parse_region(st: &mut PState, conv: conv_did) -> ty::Region {
assert_eq!(next(st), '|');
let index = parse_uint(st);
assert_eq!(next(st), '|');
let nm = token::str_to_ident(parse_str(st, ']'));
let nm = token::str_to_ident(parse_str(st, ']').as_slice());
ty::ReEarlyBound(node_id, index, nm.name)
}
'f' => {
@ -264,7 +264,7 @@ fn parse_opt<T>(st: &mut PState, f: |&mut PState| -> T) -> Option<T> {
}
}
fn parse_str(st: &mut PState, term: char) -> ~str {
fn parse_str(st: &mut PState, term: char) -> StrBuf {
let mut result = StrBuf::new();
while peek(st) != term {
unsafe {
@ -272,7 +272,7 @@ fn parse_str(st: &mut PState, term: char) -> ~str {
}
}
next(st);
return result.into_owned();
result
}
fn parse_trait_ref(st: &mut PState, conv: conv_did) -> ty::TraitRef {

View File

@ -35,7 +35,7 @@ macro_rules! mywrite( ($wr:expr, $($arg:tt)*) => (
pub struct ctxt<'a> {
pub diag: &'a SpanHandler,
// Def -> str Callback:
pub ds: fn(DefId) -> ~str,
pub ds: fn(DefId) -> StrBuf,
// The type context.
pub tcx: &'a ty::ctxt,
pub abbrevs: &'a abbrev_map
@ -47,7 +47,7 @@ pub struct ctxt<'a> {
pub struct ty_abbrev {
pos: uint,
len: uint,
s: ~str
s: StrBuf
}
pub type abbrev_map = RefCell<HashMap<ty::t, ty_abbrev>>;
@ -77,7 +77,7 @@ pub fn enc_ty(w: &mut MemWriter, cx: &ctxt, t: ty::t) {
cx.abbrevs.borrow_mut().insert(t, ty_abbrev {
pos: pos as uint,
len: len as uint,
s: format!("\\#{:x}:{:x}\\#", pos, len)
s: format_strbuf!("\\#{:x}:{:x}\\#", pos, len)
});
}
}

View File

@ -1152,12 +1152,12 @@ impl<'a> ebml_decoder_decoder_helpers for reader::Decoder<'a> {
Ok(ty)
}).unwrap();
fn type_string(doc: ebml::Doc) -> ~str {
fn type_string(doc: ebml::Doc) -> StrBuf {
let mut str = StrBuf::new();
for i in range(doc.start, doc.end) {
str.push_char(doc.data[i] as char);
}
str.into_owned()
str
}
}

View File

@ -98,10 +98,10 @@ pub fn check_crate(tcx: &ty::ctxt,
make_stat(&bccx, bccx.stats.stable_paths.get()));
}
fn make_stat(bccx: &BorrowckCtxt, stat: uint) -> ~str {
fn make_stat(bccx: &BorrowckCtxt, stat: uint) -> StrBuf {
let stat_f = stat as f64;
let total = bccx.stats.guaranteed_paths.get() as f64;
format!("{} ({:.0f}%)", stat , stat_f * 100.0 / total)
format_strbuf!("{} ({:.0f}%)", stat , stat_f * 100.0 / total)
}
}
@ -303,8 +303,8 @@ impl BitAnd<RestrictionSet,RestrictionSet> for RestrictionSet {
}
impl Repr for RestrictionSet {
fn repr(&self, _tcx: &ty::ctxt) -> ~str {
format!("RestrictionSet(0x{:x})", self.bits as uint)
fn repr(&self, _tcx: &ty::ctxt) -> StrBuf {
format_strbuf!("RestrictionSet(0x{:x})", self.bits as uint)
}
}
@ -447,7 +447,7 @@ impl<'a> BorrowckCtxt<'a> {
pub fn report(&self, err: BckError) {
self.span_err(
err.span,
self.bckerr_to_str(&err));
self.bckerr_to_str(&err).as_slice());
self.note_and_explain_bckerr(err);
}
@ -572,28 +572,32 @@ impl<'a> BorrowckCtxt<'a> {
self.tcx.sess.span_end_note(s, m);
}
pub fn bckerr_to_str(&self, err: &BckError) -> ~str {
pub fn bckerr_to_str(&self, err: &BckError) -> StrBuf {
match err.code {
err_mutbl => {
let descr = match opt_loan_path(&err.cmt) {
None => format!("{} {}",
err.cmt.mutbl.to_user_str(),
self.cmt_to_str(&*err.cmt)),
Some(lp) => format!("{} {} `{}`",
err.cmt.mutbl.to_user_str(),
self.cmt_to_str(&*err.cmt),
self.loan_path_to_str(&*lp)),
None => {
format_strbuf!("{} {}",
err.cmt.mutbl.to_user_str(),
self.cmt_to_str(&*err.cmt))
}
Some(lp) => {
format_strbuf!("{} {} `{}`",
err.cmt.mutbl.to_user_str(),
self.cmt_to_str(&*err.cmt),
self.loan_path_to_str(&*lp))
}
};
match err.cause {
euv::ClosureCapture(_) => {
format!("closure cannot assign to {}", descr)
format_strbuf!("closure cannot assign to {}", descr)
}
euv::OverloadedOperator |
euv::AddrOf |
euv::RefBinding |
euv::AutoRef => {
format!("cannot borrow {} as mutable", descr)
format_strbuf!("cannot borrow {} as mutable", descr)
}
euv::ClosureInvocation => {
self.tcx.sess.span_bug(err.span,
@ -603,20 +607,24 @@ impl<'a> BorrowckCtxt<'a> {
}
err_out_of_scope(..) => {
let msg = match opt_loan_path(&err.cmt) {
None => format!("borrowed value"),
Some(lp) => format!("`{}`", self.loan_path_to_str(&*lp)),
None => "borrowed value".to_strbuf(),
Some(lp) => {
format_strbuf!("`{}`", self.loan_path_to_str(&*lp))
}
};
format!("{} does not live long enough", msg)
format_strbuf!("{} does not live long enough", msg)
}
err_borrowed_pointer_too_short(..) => {
let descr = match opt_loan_path(&err.cmt) {
Some(lp) => format!("`{}`", self.loan_path_to_str(&*lp)),
Some(lp) => {
format_strbuf!("`{}`", self.loan_path_to_str(&*lp))
}
None => self.cmt_to_str(&*err.cmt),
};
format!("lifetime of {} is too short to guarantee \
its contents can be safely reborrowed",
descr)
format_strbuf!("lifetime of {} is too short to guarantee \
its contents can be safely reborrowed",
descr)
}
}
}
@ -655,7 +663,8 @@ impl<'a> BorrowckCtxt<'a> {
mc::AliasableOther => {
self.tcx.sess.span_err(
span,
format!("{} in an aliasable location", prefix));
format!("{} in an aliasable location",
prefix));
}
mc::AliasableStatic(..) |
mc::AliasableStaticMut(..) => {
@ -696,7 +705,9 @@ impl<'a> BorrowckCtxt<'a> {
err_borrowed_pointer_too_short(loan_scope, ptr_scope, _) => {
let descr = match opt_loan_path(&err.cmt) {
Some(lp) => format!("`{}`", self.loan_path_to_str(&*lp)),
Some(lp) => {
format_strbuf!("`{}`", self.loan_path_to_str(&*lp))
}
None => self.cmt_to_str(&*err.cmt),
};
note_and_explain_region(
@ -764,13 +775,13 @@ impl<'a> BorrowckCtxt<'a> {
}
}
pub fn loan_path_to_str(&self, loan_path: &LoanPath) -> ~str {
pub fn loan_path_to_str(&self, loan_path: &LoanPath) -> StrBuf {
let mut result = StrBuf::new();
self.append_loan_path_to_str(loan_path, &mut result);
result.into_owned()
result
}
pub fn cmt_to_str(&self, cmt: &mc::cmt_) -> ~str {
pub fn cmt_to_str(&self, cmt: &mc::cmt_) -> StrBuf {
self.mc().cmt_to_str(cmt)
}
}
@ -788,38 +799,40 @@ impl DataFlowOperator for LoanDataFlowOperator {
}
impl Repr for Loan {
fn repr(&self, tcx: &ty::ctxt) -> ~str {
format!("Loan_{:?}({}, {:?}, {:?}-{:?}, {})",
self.index,
self.loan_path.repr(tcx),
self.kind,
self.gen_scope,
self.kill_scope,
self.restrictions.repr(tcx))
fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
(format!("Loan_{:?}({}, {:?}, {:?}-{:?}, {})",
self.index,
self.loan_path.repr(tcx),
self.kind,
self.gen_scope,
self.kill_scope,
self.restrictions.repr(tcx))).to_strbuf()
}
}
impl Repr for Restriction {
fn repr(&self, tcx: &ty::ctxt) -> ~str {
format!("Restriction({}, {:x})",
self.loan_path.repr(tcx),
self.set.bits as uint)
fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
(format!("Restriction({}, {:x})",
self.loan_path.repr(tcx),
self.set.bits as uint)).to_strbuf()
}
}
impl Repr for LoanPath {
fn repr(&self, tcx: &ty::ctxt) -> ~str {
fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
match self {
&LpVar(id) => {
format!("$({})", tcx.map.node_to_str(id))
(format!("$({})", tcx.map.node_to_str(id))).to_strbuf()
}
&LpExtend(ref lp, _, LpDeref(_)) => {
format!("{}.*", lp.repr(tcx))
(format!("{}.*", lp.repr(tcx))).to_strbuf()
}
&LpExtend(ref lp, _, LpInterior(ref interior)) => {
format!("{}.{}", lp.repr(tcx), interior.repr(tcx))
(format!("{}.{}",
lp.repr(tcx),
interior.repr(tcx))).to_strbuf()
}
}
}

View File

@ -102,9 +102,12 @@ fn check_expr(v: &mut CheckCrateVisitor, e: &Expr, is_const: bool) {
ExprCast(_, _) => {
let ety = ty::expr_ty(v.tcx, e);
if !ty::type_is_numeric(ety) && !ty::type_is_unsafe_ptr(ety) {
v.tcx.sess.span_err(e.span, "can not cast to `".to_owned() +
ppaux::ty_to_str(v.tcx, ety) +
"` in a constant expression");
v.tcx
.sess
.span_err(e.span,
format!("can not cast to `{}` in a constant \
expression",
ppaux::ty_to_str(v.tcx, ety).as_slice()))
}
}
ExprPath(ref pth) => {

View File

@ -33,7 +33,7 @@ use syntax::visit;
use syntax::print::pprust;
fn safe_type_for_static_mut(cx: &ty::ctxt, e: &ast::Expr) -> Option<~str> {
fn safe_type_for_static_mut(cx: &ty::ctxt, e: &ast::Expr) -> Option<StrBuf> {
let node_ty = ty::node_id_to_type(cx, e.id);
let tcontents = ty::type_contents(cx, node_ty);
debug!("safe_type_for_static_mut(dtor={}, managed={}, owned={})",
@ -49,7 +49,8 @@ fn safe_type_for_static_mut(cx: &ty::ctxt, e: &ast::Expr) -> Option<~str> {
return None;
};
Some(format!("mutable static items are not allowed to have {}", suffix))
Some(format_strbuf!("mutable static items are not allowed to have {}",
suffix))
}
struct CheckStaticVisitor<'a> {
@ -61,11 +62,11 @@ pub fn check_crate(tcx: &ty::ctxt, krate: &ast::Crate) {
}
impl<'a> CheckStaticVisitor<'a> {
fn report_error(&self, span: Span, result: Option<~str>) -> bool {
fn report_error(&self, span: Span, result: Option<StrBuf>) -> bool {
match result {
None => { false }
Some(msg) => {
self.tcx.sess.span_err(span, msg);
self.tcx.sess.span_err(span, msg.as_slice());
true
}
}
@ -132,7 +133,8 @@ impl<'a> Visitor<bool> for CheckStaticVisitor<'a> {
ty::ty_enum(did, _) => {
if ty::has_dtor(self.tcx, did) {
self.report_error(e.span,
Some("static items are not allowed to have destructors".to_owned()));
Some("static items are not allowed to have \
destructors".to_strbuf()));
return;
}
}

View File

@ -295,21 +295,21 @@ pub enum const_val {
pub fn eval_const_expr(tcx: &ty::ctxt, e: &Expr) -> const_val {
match eval_const_expr_partial(tcx, e) {
Ok(r) => r,
Err(s) => tcx.sess.span_fatal(e.span, s)
Err(s) => tcx.sess.span_fatal(e.span, s.as_slice())
}
}
pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
-> Result<const_val, ~str> {
fn fromb(b: bool) -> Result<const_val, ~str> { Ok(const_int(b as i64)) }
-> Result<const_val, StrBuf> {
fn fromb(b: bool) -> Result<const_val, StrBuf> { Ok(const_int(b as i64)) }
match e.node {
ExprUnary(UnNeg, inner) => {
match eval_const_expr_partial(tcx, inner) {
Ok(const_float(f)) => Ok(const_float(-f)),
Ok(const_int(i)) => Ok(const_int(-i)),
Ok(const_uint(i)) => Ok(const_uint(-i)),
Ok(const_str(_)) => Err("negate on string".to_owned()),
Ok(const_bool(_)) => Err("negate on boolean".to_owned()),
Ok(const_str(_)) => Err("negate on string".to_strbuf()),
Ok(const_bool(_)) => Err("negate on boolean".to_strbuf()),
ref err => ((*err).clone())
}
}
@ -318,7 +318,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
Ok(const_int(i)) => Ok(const_int(!i)),
Ok(const_uint(i)) => Ok(const_uint(!i)),
Ok(const_bool(b)) => Ok(const_bool(!b)),
_ => Err("not on float or string".to_owned())
_ => Err("not on float or string".to_strbuf())
}
}
ExprBinary(op, a, b) => {
@ -337,7 +337,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
BiNe => fromb(a != b),
BiGe => fromb(a >= b),
BiGt => fromb(a > b),
_ => Err("can't do this op on floats".to_owned())
_ => Err("can't do this op on floats".to_strbuf())
}
}
(Ok(const_int(a)), Ok(const_int(b))) => {
@ -345,9 +345,14 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
BiAdd => Ok(const_int(a + b)),
BiSub => Ok(const_int(a - b)),
BiMul => Ok(const_int(a * b)),
BiDiv if b == 0 => Err("attempted to divide by zero".to_owned()),
BiDiv if b == 0 => {
Err("attempted to divide by zero".to_strbuf())
}
BiDiv => Ok(const_int(a / b)),
BiRem if b == 0 => Err("attempted remainder with a divisor of zero".to_owned()),
BiRem if b == 0 => {
Err("attempted remainder with a divisor of \
zero".to_strbuf())
}
BiRem => Ok(const_int(a % b)),
BiAnd | BiBitAnd => Ok(const_int(a & b)),
BiOr | BiBitOr => Ok(const_int(a | b)),
@ -367,9 +372,14 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
BiAdd => Ok(const_uint(a + b)),
BiSub => Ok(const_uint(a - b)),
BiMul => Ok(const_uint(a * b)),
BiDiv if b == 0 => Err("attempted to divide by zero".to_owned()),
BiDiv if b == 0 => {
Err("attempted to divide by zero".to_strbuf())
}
BiDiv => Ok(const_uint(a / b)),
BiRem if b == 0 => Err("attempted remainder with a divisor of zero".to_owned()),
BiRem if b == 0 => {
Err("attempted remainder with a divisor of \
zero".to_strbuf())
}
BiRem => Ok(const_uint(a % b)),
BiAnd | BiBitAnd => Ok(const_uint(a & b)),
BiOr | BiBitOr => Ok(const_uint(a | b)),
@ -389,14 +399,14 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
match op {
BiShl => Ok(const_int(a << b)),
BiShr => Ok(const_int(a >> b)),
_ => Err("can't do this op on an int and uint".to_owned())
_ => Err("can't do this op on an int and uint".to_strbuf())
}
}
(Ok(const_uint(a)), Ok(const_int(b))) => {
match op {
BiShl => Ok(const_uint(a << b)),
BiShr => Ok(const_uint(a >> b)),
_ => Err("can't do this op on a uint and int".to_owned())
_ => Err("can't do this op on a uint and int".to_strbuf())
}
}
(Ok(const_bool(a)), Ok(const_bool(b))) => {
@ -408,10 +418,10 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
BiBitOr => a | b,
BiEq => a == b,
BiNe => a != b,
_ => return Err("can't do this op on bools".to_owned())
_ => return Err("can't do this op on bools".to_strbuf())
}))
}
_ => Err("bad operands for binary".to_owned())
_ => Err("bad operands for binary".to_strbuf())
}
}
ExprCast(base, target_ty) => {
@ -435,7 +445,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
const_uint(u) => Ok(const_float(u as f64)),
const_int(i) => Ok(const_float(i as f64)),
const_float(f) => Ok(const_float(f)),
_ => Err("can't cast float to str".to_owned()),
_ => Err("can't cast float to str".to_strbuf()),
}
}
ty::ty_uint(_) => {
@ -443,7 +453,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
const_uint(u) => Ok(const_uint(u)),
const_int(i) => Ok(const_uint(i as u64)),
const_float(f) => Ok(const_uint(f as u64)),
_ => Err("can't cast str to uint".to_owned()),
_ => Err("can't cast str to uint".to_strbuf()),
}
}
ty::ty_int(_) | ty::ty_bool => {
@ -451,10 +461,10 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
const_uint(u) => Ok(const_int(u as i64)),
const_int(i) => Ok(const_int(i)),
const_float(f) => Ok(const_int(f as i64)),
_ => Err("can't cast str to int".to_owned()),
_ => Err("can't cast str to int".to_strbuf()),
}
}
_ => Err("can't cast this type".to_owned())
_ => Err("can't cast this type".to_strbuf())
}
}
}
@ -462,14 +472,14 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
ExprPath(_) => {
match lookup_const(tcx.ty_ctxt(), e) {
Some(actual_e) => eval_const_expr_partial(tcx.ty_ctxt(), actual_e),
None => Err("non-constant path in constant expr".to_owned())
None => Err("non-constant path in constant expr".to_strbuf())
}
}
ExprLit(lit) => Ok(lit_to_const(lit)),
// If we have a vstore, just keep going; it has to be a string
ExprVstore(e, _) => eval_const_expr_partial(tcx, e),
ExprParen(e) => eval_const_expr_partial(tcx, e),
_ => Err("unsupported constant expr".to_owned())
_ => Err("unsupported constant expr".to_strbuf())
}
}

View File

@ -112,8 +112,11 @@ impl<'a, O:DataFlowOperator> pprust::PpAnn for DataFlowContext<'a, O> {
"".to_owned()
};
try!(ps.synth_comment((format!("id {}: {}{}{}", id, entry_str,
gens_str, kills_str)).to_strbuf()));
try!(ps.synth_comment(format_strbuf!("id {}: {}{}{}",
id,
entry_str,
gens_str,
kills_str)));
try!(pp::space(&mut ps.s));
}
Ok(())
@ -824,11 +827,11 @@ impl<'a, 'b, O:DataFlowOperator> PropagationContext<'a, 'b, O> {
}
}
fn mut_bits_to_str(words: &mut [uint]) -> ~str {
fn mut_bits_to_str(words: &mut [uint]) -> StrBuf {
bits_to_str(words)
}
fn bits_to_str(words: &[uint]) -> ~str {
fn bits_to_str(words: &[uint]) -> StrBuf {
let mut result = StrBuf::new();
let mut sep = '[';
@ -844,7 +847,7 @@ fn bits_to_str(words: &[uint]) -> ~str {
}
}
result.push_char(']');
return result.into_owned();
return result
}
fn copy_bits(in_vec: &[uint], out_vec: &mut [uint]) -> bool {
@ -884,8 +887,8 @@ fn set_bit(words: &mut [uint], bit: uint) -> bool {
oldv != newv
}
fn bit_str(bit: uint) -> ~str {
fn bit_str(bit: uint) -> StrBuf {
let byte = bit >> 8;
let lobits = 1 << (bit & 0xFF);
format!("[{}:{}-{:02x}]", bit, byte, lobits)
format_strbuf!("[{}:{}-{:02x}]", bit, byte, lobits)
}

View File

@ -352,11 +352,17 @@ impl<'a> DeadVisitor<'a> {
false
}
fn warn_dead_code(&mut self, id: ast::NodeId,
span: codemap::Span, ident: ast::Ident) {
self.tcx.sess.add_lint(DeadCode, id, span,
format!("code is never used: `{}`",
token::get_ident(ident)));
fn warn_dead_code(&mut self,
id: ast::NodeId,
span: codemap::Span,
ident: ast::Ident) {
self.tcx
.sess
.add_lint(DeadCode,
id,
span,
format_strbuf!("code is never used: `{}`",
token::get_ident(ident)));
}
}

View File

@ -562,7 +562,7 @@ pub fn check_cast_for_escaping_regions(
}
// Ensure that `ty` has a statically known size (i.e., it has the `Sized` bound).
fn check_sized(tcx: &ty::ctxt, ty: ty::t, name: ~str, sp: Span) {
fn check_sized(tcx: &ty::ctxt, ty: ty::t, name: StrBuf, sp: Span) {
if !ty::type_is_sized(tcx, ty) {
tcx.sess.span_err(sp, format!("variable `{}` has dynamically sized type `{}`",
name, ty_to_str(tcx, ty)));
@ -572,8 +572,8 @@ fn check_sized(tcx: &ty::ctxt, ty: ty::t, name: ~str, sp: Span) {
// Check that any variables in a pattern have types with statically known size.
fn check_pat(cx: &mut Context, pat: &Pat) {
let var_name = match pat.node {
PatWild => Some("_".to_owned()),
PatIdent(_, ref path, _) => Some(path_to_str(path).to_owned()),
PatWild => Some("_".to_strbuf()),
PatIdent(_, ref path, _) => Some(path_to_str(path).to_strbuf()),
_ => None
};

View File

@ -71,12 +71,12 @@ impl LanguageItems {
}
}
pub fn require(&self, it: LangItem) -> Result<ast::DefId, ~str> {
pub fn require(&self, it: LangItem) -> Result<ast::DefId, StrBuf> {
match self.items.get(it as uint) {
&Some(id) => Ok(id),
&None => {
Err(format!("requires `{}` lang_item",
LanguageItems::item_name(it as uint)))
Err(format_strbuf!("requires `{}` lang_item",
LanguageItems::item_name(it as uint)))
}
}
}

View File

@ -1830,7 +1830,7 @@ impl<'a> IdVisitingOperation for Context<'a> {
None => {}
Some(l) => {
for (lint, span, msg) in l.move_iter() {
self.span_lint(lint, span, msg)
self.span_lint(lint, span, msg.as_slice())
}
}
}

View File

@ -150,13 +150,19 @@ enum LiveNodeKind {
ExitNode
}
fn live_node_kind_to_str(lnk: LiveNodeKind, cx: &ty::ctxt) -> ~str {
fn live_node_kind_to_str(lnk: LiveNodeKind, cx: &ty::ctxt) -> StrBuf {
let cm = cx.sess.codemap();
match lnk {
FreeVarNode(s) => format!("Free var node [{}]", cm.span_to_str(s)),
ExprNode(s) => format!("Expr node [{}]", cm.span_to_str(s)),
VarDefNode(s) => format!("Var def node [{}]", cm.span_to_str(s)),
ExitNode => "Exit node".to_owned()
FreeVarNode(s) => {
format_strbuf!("Free var node [{}]", cm.span_to_str(s))
}
ExprNode(s) => {
format_strbuf!("Expr node [{}]", cm.span_to_str(s))
}
VarDefNode(s) => {
format_strbuf!("Var def node [{}]", cm.span_to_str(s))
}
ExitNode => "Exit node".to_strbuf(),
}
}
@ -308,18 +314,20 @@ impl<'a> IrMaps<'a> {
match self.variable_map.find(&node_id) {
Some(&var) => var,
None => {
self.tcx.sess.span_bug(
span, format!("no variable registered for id {}", node_id));
self.tcx
.sess
.span_bug(span, format!("no variable registered for id {}",
node_id));
}
}
}
fn variable_name(&self, var: Variable) -> ~str {
fn variable_name(&self, var: Variable) -> StrBuf {
match self.var_kinds.get(var.get()) {
&Local(LocalInfo { ident: nm, .. }) | &Arg(_, nm) => {
token::get_ident(nm).get().to_str()
token::get_ident(nm).get().to_str().to_strbuf()
},
&ImplicitRet => "<implicit-ret>".to_owned()
&ImplicitRet => "<implicit-ret>".to_strbuf()
}
}
@ -741,7 +749,7 @@ impl<'a> Liveness<'a> {
}
#[allow(unused_must_use)]
fn ln_str(&self, ln: LiveNode) -> ~str {
fn ln_str(&self, ln: LiveNode) -> StrBuf {
let mut wr = io::MemWriter::new();
{
let wr = &mut wr as &mut io::Writer;
@ -751,7 +759,7 @@ impl<'a> Liveness<'a> {
self.write_vars(wr, ln, |idx| self.users.get(idx).writer);
write!(wr, " precedes {}]", self.successors.get(ln.get()).to_str());
}
str::from_utf8(wr.unwrap().as_slice()).unwrap().to_owned()
str::from_utf8(wr.unwrap().as_slice()).unwrap().to_strbuf()
}
fn init_empty(&mut self, ln: LiveNode, succ_ln: LiveNode) {
@ -1532,9 +1540,13 @@ impl<'a> Liveness<'a> {
}
}
fn should_warn(&self, var: Variable) -> Option<~str> {
fn should_warn(&self, var: Variable) -> Option<StrBuf> {
let name = self.ir.variable_name(var);
if name.len() == 0 || name[0] == ('_' as u8) { None } else { Some(name) }
if name.len() == 0 || name.as_slice()[0] == ('_' as u8) {
None
} else {
Some(name)
}
}
fn warn_about_unused_args(&self, decl: &FnDecl, entry_ln: LiveNode) {
@ -1581,11 +1593,12 @@ impl<'a> Liveness<'a> {
if is_assigned {
self.ir.tcx.sess.add_lint(UnusedVariable, id, sp,
format!("variable `{}` is assigned to, \
but never used", *name));
format_strbuf!("variable `{}` is assigned to, \
but never used",
*name));
} else {
self.ir.tcx.sess.add_lint(UnusedVariable, id, sp,
format!("unused variable: `{}`", *name));
format_strbuf!("unused variable: `{}`", *name));
}
}
true
@ -1603,7 +1616,8 @@ impl<'a> Liveness<'a> {
let r = self.should_warn(var);
for name in r.iter() {
self.ir.tcx.sess.add_lint(DeadAssignment, id, sp,
format!("value assigned to `{}` is never read", *name));
format_strbuf!("value assigned to `{}` is never read",
*name));
}
}
}

View File

@ -1093,50 +1093,51 @@ impl<'t,TYPER:Typer> MemCategorizationContext<'t,TYPER> {
Ok(())
}
pub fn cmt_to_str(&self, cmt: &cmt_) -> ~str {
pub fn cmt_to_str(&self, cmt: &cmt_) -> StrBuf {
match cmt.cat {
cat_static_item => {
"static item".to_owned()
"static item".to_strbuf()
}
cat_copied_upvar(_) => {
"captured outer variable in a proc".to_owned()
"captured outer variable in a proc".to_strbuf()
}
cat_rvalue(..) => {
"non-lvalue".to_owned()
"non-lvalue".to_strbuf()
}
cat_local(_) => {
"local variable".to_owned()
"local variable".to_strbuf()
}
cat_arg(..) => {
"argument".to_owned()
"argument".to_strbuf()
}
cat_deref(ref base, _, pk) => {
match base.cat {
cat_upvar(..) => {
format!("captured outer variable")
"captured outer variable".to_strbuf()
}
_ => {
format!("dereference of `{}`-pointer", ptr_sigil(pk))
format_strbuf!("dereference of `{}`-pointer",
ptr_sigil(pk))
}
}
}
cat_interior(_, InteriorField(NamedField(_))) => {
"field".to_owned()
"field".to_strbuf()
}
cat_interior(_, InteriorField(PositionalField(_))) => {
"anonymous field".to_owned()
"anonymous field".to_strbuf()
}
cat_interior(_, InteriorElement(VecElement)) => {
"vec content".to_owned()
"vec content".to_strbuf()
}
cat_interior(_, InteriorElement(StrElement)) => {
"str content".to_owned()
"str content".to_strbuf()
}
cat_interior(_, InteriorElement(OtherElement)) => {
"indexed content".to_owned()
"indexed content".to_strbuf()
}
cat_upvar(..) => {
"captured outer variable".to_owned()
"captured outer variable".to_strbuf()
}
cat_discr(ref cmt, _) => {
self.cmt_to_str(&**cmt)
@ -1248,17 +1249,17 @@ impl cmt_ {
}
impl Repr for cmt_ {
fn repr(&self, tcx: &ty::ctxt) -> ~str {
format!("\\{{} id:{} m:{:?} ty:{}\\}",
self.cat.repr(tcx),
self.id,
self.mutbl,
self.ty.repr(tcx))
fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
format_strbuf!("\\{{} id:{} m:{:?} ty:{}\\}",
self.cat.repr(tcx),
self.id,
self.mutbl,
self.ty.repr(tcx))
}
}
impl Repr for categorization {
fn repr(&self, tcx: &ty::ctxt) -> ~str {
fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
match *self {
cat_static_item |
cat_rvalue(..) |
@ -1266,21 +1267,19 @@ impl Repr for categorization {
cat_local(..) |
cat_upvar(..) |
cat_arg(..) => {
format!("{:?}", *self)
format_strbuf!("{:?}", *self)
}
cat_deref(ref cmt, derefs, ptr) => {
format!("{}-{}{}->",
cmt.cat.repr(tcx),
ptr_sigil(ptr),
derefs)
format_strbuf!("{}-{}{}->",
cmt.cat.repr(tcx),
ptr_sigil(ptr),
derefs)
}
cat_interior(ref cmt, interior) => {
format!("{}.{}",
cmt.cat.repr(tcx),
interior.repr(tcx))
format_strbuf!("{}.{}", cmt.cat.repr(tcx), interior.repr(tcx))
}
cat_downcast(ref cmt) => {
format!("{}->(enum)", cmt.cat.repr(tcx))
format_strbuf!("{}->(enum)", cmt.cat.repr(tcx))
}
cat_discr(ref cmt, _) => {
cmt.cat.repr(tcx)
@ -1301,13 +1300,13 @@ pub fn ptr_sigil(ptr: PointerKind) -> &'static str {
}
impl Repr for InteriorKind {
fn repr(&self, _tcx: &ty::ctxt) -> ~str {
fn repr(&self, _tcx: &ty::ctxt) -> StrBuf {
match *self {
InteriorField(NamedField(fld)) => {
token::get_name(fld).get().to_str()
token::get_name(fld).get().to_str().to_strbuf()
}
InteriorField(PositionalField(i)) => format!("\\#{:?}", i),
InteriorElement(_) => "[]".to_owned(),
InteriorField(PositionalField(i)) => format_strbuf!("\\#{:?}", i),
InteriorElement(_) => "[]".to_strbuf(),
}
}
}

View File

@ -45,7 +45,7 @@ pub type PublicItems = NodeSet;
/// Result of a checking operation - None => no errors were found. Some => an
/// error and contains the span and message for reporting that error and
/// optionally the same for a note about the error.
type CheckResult = Option<(Span, ~str, Option<(Span, ~str)>)>;
type CheckResult = Option<(Span, StrBuf, Option<(Span, StrBuf)>)>;
////////////////////////////////////////////////////////////////////////////////
/// The parent visitor, used to determine what's the parent of what (node-wise)
@ -356,8 +356,8 @@ enum FieldName {
impl<'a> PrivacyVisitor<'a> {
// used when debugging
fn nodestr(&self, id: ast::NodeId) -> ~str {
self.tcx.map.node_to_str(id).to_owned()
fn nodestr(&self, id: ast::NodeId) -> StrBuf {
self.tcx.map.node_to_str(id).to_strbuf()
}
// Determines whether the given definition is public from the point of view
@ -511,9 +511,11 @@ impl<'a> PrivacyVisitor<'a> {
match result {
None => true,
Some((span, msg, note)) => {
self.tcx.sess.span_err(span, msg);
self.tcx.sess.span_err(span, msg.as_slice());
match note {
Some((span, msg)) => self.tcx.sess.span_note(span, msg),
Some((span, msg)) => {
self.tcx.sess.span_note(span, msg.as_slice())
}
None => {},
}
false
@ -528,7 +530,9 @@ impl<'a> PrivacyVisitor<'a> {
source_did: Option<ast::DefId>, msg: &str) -> CheckResult {
let id = match self.def_privacy(to_check) {
ExternallyDenied => {
return Some((span, format!("{} is private", msg), None))
return Some((span,
format_strbuf!("{} is private", msg),
None))
}
Allowable => return None,
DisallowedBy(id) => id,
@ -539,9 +543,11 @@ impl<'a> PrivacyVisitor<'a> {
// because the item itself is private or because its parent is private
// and its parent isn't in our ancestry.
let (err_span, err_msg) = if id == source_did.unwrap_or(to_check).node {
return Some((span, format!("{} is private", msg), None));
return Some((span,
format_strbuf!("{} is private", msg),
None));
} else {
(span, format!("{} is inaccessible", msg))
(span, format_strbuf!("{} is inaccessible", msg))
};
let item = match self.tcx.map.find(id) {
Some(ast_map::NodeItem(item)) => {
@ -577,8 +583,9 @@ impl<'a> PrivacyVisitor<'a> {
ast::ItemEnum(..) => "enum",
_ => return Some((err_span, err_msg, None))
};
let msg = format!("{} `{}` is private", desc,
token::get_ident(item.ident));
let msg = format_strbuf!("{} `{}` is private",
desc,
token::get_ident(item.ident));
Some((err_span, err_msg, Some((span, msg))))
}
@ -1364,9 +1371,11 @@ impl<'a> Visitor<()> for VisiblePrivateTypesVisitor<'a> {
match t.node {
ast::TyPath(ref p, _, path_id) => {
if self.path_is_private_type(path_id) {
self.tcx.sess.add_lint(lint::VisiblePrivateTypes,
path_id, p.span,
"private type in exported type signature".to_owned());
self.tcx.sess.add_lint(
lint::VisiblePrivateTypes,
path_id, p.span,
"private type in exported type \
signature".to_strbuf());
}
}
_ => {}

View File

@ -57,7 +57,7 @@ pub type TraitMap = NodeMap<Vec<DefId> >;
pub type ExportMap2 = RefCell<NodeMap<Vec<Export2> >>;
pub struct Export2 {
pub name: ~str, // The name of the target.
pub name: StrBuf, // The name of the target.
pub def_id: DefId, // The definition of the target.
}
@ -2046,7 +2046,7 @@ impl<'a> Resolver<'a> {
}
}
fn idents_to_str(&mut self, idents: &[Ident]) -> ~str {
fn idents_to_str(&mut self, idents: &[Ident]) -> StrBuf {
let mut first = true;
let mut result = StrBuf::new();
for ident in idents.iter() {
@ -2057,10 +2057,10 @@ impl<'a> Resolver<'a> {
}
result.push_str(token::get_ident(*ident).get());
};
result.into_owned()
result
}
fn path_idents_to_str(&mut self, path: &Path) -> ~str {
fn path_idents_to_str(&mut self, path: &Path) -> StrBuf {
let identifiers: Vec<ast::Ident> = path.segments
.iter()
.map(|seg| seg.identifier)
@ -2070,25 +2070,26 @@ impl<'a> Resolver<'a> {
fn import_directive_subclass_to_str(&mut self,
subclass: ImportDirectiveSubclass)
-> ~str {
-> StrBuf {
match subclass {
SingleImport(_, source) => {
token::get_ident(source).get().to_str()
token::get_ident(source).get().to_strbuf()
}
GlobImport => "*".to_owned()
GlobImport => "*".to_strbuf()
}
}
fn import_path_to_str(&mut self,
idents: &[Ident],
subclass: ImportDirectiveSubclass)
-> ~str {
-> StrBuf {
if idents.is_empty() {
self.import_directive_subclass_to_str(subclass)
} else {
(format!("{}::{}",
self.idents_to_str(idents),
self.import_directive_subclass_to_str(subclass)))
self.import_directive_subclass_to_str(
subclass))).to_strbuf()
}
}
@ -2219,8 +2220,11 @@ impl<'a> Resolver<'a> {
let lp = match lp {
LastMod(lp) => lp,
LastImport{..} => self.session.span_bug(directive.span,
"Not expecting Import here, must be LastMod"),
LastImport {..} => {
self.session
.span_bug(directive.span,
"not expecting Import here, must be LastMod")
}
};
// We need to resolve both namespaces for this to succeed.
@ -2614,7 +2618,7 @@ impl<'a> Resolver<'a> {
Failed => {
let segment_name = token::get_ident(name);
let module_name = self.module_to_str(&*search_module);
if "???" == module_name {
if "???" == module_name.as_slice() {
let span = Span {
lo: span.lo,
hi: span.lo + Pos::from_uint(segment_name.get().len()),
@ -2732,14 +2736,18 @@ impl<'a> Resolver<'a> {
match module_prefix_result {
Failed => {
let mpath = self.idents_to_str(module_path);
match mpath.rfind(':') {
match mpath.as_slice().rfind(':') {
Some(idx) => {
self.resolve_error(span, format!("unresolved import: could not find `{}` \
in `{}`",
// idx +- 1 to account for the colons
// on either side
mpath.slice_from(idx + 1),
mpath.slice_to(idx - 1)));
self.resolve_error(span,
format!("unresolved import: could \
not find `{}` in `{}`",
// idx +- 1 to account for
// the colons on either
// side
mpath.as_slice()
.slice_from(idx + 1),
mpath.as_slice()
.slice_to(idx - 1)));
},
None => (),
};
@ -3283,7 +3291,7 @@ impl<'a> Resolver<'a> {
debug!("(computing exports) YES: export '{}' => {:?}",
name, def_id_of_def(d));
exports2.push(Export2 {
name: name.get().to_str(),
name: name.get().to_strbuf(),
def_id: def_id_of_def(d)
});
}
@ -4557,10 +4565,11 @@ impl<'a> Resolver<'a> {
let def = self.resolve_module_relative_path(path, namespace);
match (def, unqualified_def) {
(Some((d, _)), Some((ud, _))) if d == ud => {
self.session.add_lint(UnnecessaryQualification,
id,
path.span,
"unnecessary qualification".to_owned());
self.session
.add_lint(UnnecessaryQualification,
id,
path.span,
"unnecessary qualification".to_strbuf());
}
_ => ()
}
@ -4875,7 +4884,7 @@ impl<'a> Resolver<'a> {
}
fn find_best_match_for_name(&mut self, name: &str, max_distance: uint)
-> Option<~str> {
-> Option<StrBuf> {
let this = &mut *self;
let mut maybes: Vec<token::InternedString> = Vec::new();
@ -4907,7 +4916,7 @@ impl<'a> Resolver<'a> {
*values.get(smallest) <= max_distance &&
name != maybes.get(smallest).get() {
Some(maybes.get(smallest).get().to_str())
Some(maybes.get(smallest).get().to_strbuf())
} else {
None
@ -4977,17 +4986,20 @@ impl<'a> Resolver<'a> {
_ =>
// limit search to 5 to reduce the number
// of stupid suggestions
match self.find_best_match_for_name(wrong_name, 5) {
match self.find_best_match_for_name(
wrong_name.as_slice(),
5) {
Some(m) => {
self.resolve_error(expr.span,
format!("unresolved name `{}`. \
Did you mean `{}`?",
wrong_name, m));
wrong_name,
m));
}
None => {
self.resolve_error(expr.span,
format!("unresolved name `{}`.",
wrong_name));
wrong_name.as_slice()));
}
}
}
@ -5240,8 +5252,11 @@ impl<'a> Resolver<'a> {
ViewPathGlob(_, id) => {
if !self.used_imports.contains(&(id, TypeNS)) &&
!self.used_imports.contains(&(id, ValueNS)) {
self.session.add_lint(UnusedImports, id, p.span,
"unused import".to_owned());
self.session
.add_lint(UnusedImports,
id,
p.span,
"unused import".to_strbuf());
}
},
}
@ -5257,19 +5272,27 @@ impl<'a> Resolver<'a> {
// public or private item, we will check the correct thing, dependent on how the import
// is used.
fn finalize_import(&mut self, id: NodeId, span: Span) {
debug!("finalizing import uses for {}", self.session.codemap().span_to_snippet(span));
debug!("finalizing import uses for {}",
self.session.codemap().span_to_snippet(span));
if !self.used_imports.contains(&(id, TypeNS)) &&
!self.used_imports.contains(&(id, ValueNS)) {
self.session.add_lint(UnusedImports, id, span, "unused import".to_owned());
self.session.add_lint(UnusedImports,
id,
span,
"unused import".to_strbuf());
}
let (v_priv, t_priv) = match self.last_private.find(&id) {
Some(&LastImport{value_priv: v,
value_used: _,
type_priv: t,
type_used: _}) => (v, t),
Some(_) => fail!("We should only have LastImport for `use` directives"),
Some(&LastImport {
value_priv: v,
value_used: _,
type_priv: t,
type_used: _
}) => (v, t),
Some(_) => {
fail!("we should only have LastImport for `use` directives")
}
_ => return,
};
@ -5306,7 +5329,7 @@ impl<'a> Resolver<'a> {
//
/// A somewhat inefficient routine to obtain the name of a module.
fn module_to_str(&mut self, module: &Module) -> ~str {
fn module_to_str(&mut self, module: &Module) -> StrBuf {
let mut idents = Vec::new();
fn collect_mod(idents: &mut Vec<ast::Ident>, module: &Module) {
@ -5325,7 +5348,7 @@ impl<'a> Resolver<'a> {
collect_mod(&mut idents, module);
if idents.len() == 0 {
return "???".to_owned();
return "???".to_strbuf();
}
self.idents_to_str(idents.move_iter().rev()
.collect::<Vec<ast::Ident>>()

View File

@ -400,12 +400,12 @@ struct Match<'a, 'b> {
}
impl<'a, 'b> Repr for Match<'a, 'b> {
fn repr(&self, tcx: &ty::ctxt) -> ~str {
fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
if tcx.sess.verbose() {
// for many programs, this just take too long to serialize
self.pats.repr(tcx)
} else {
format!("{} pats", self.pats.len())
format_strbuf!("{} pats", self.pats.len())
}
}
}
@ -1851,11 +1851,14 @@ fn create_bindings_map(bcx: &Block, pat: @ast::Pat) -> BindingsMap {
// but during matching we need to store a *T as explained
// above
llmatch = alloca(bcx, llvariable_ty.ptr_to(), "__llmatch");
trmode = TrByValue(alloca(bcx, llvariable_ty,
bcx.ident(ident)));
trmode = TrByValue(alloca(bcx,
llvariable_ty,
bcx.ident(ident).as_slice()));
}
ast::BindByRef(_) => {
llmatch = alloca(bcx, llvariable_ty, bcx.ident(ident));
llmatch = alloca(bcx,
llvariable_ty,
bcx.ident(ident).as_slice());
trmode = TrByRef;
}
};
@ -2103,7 +2106,7 @@ fn mk_binding_alloca<'a,A>(bcx: &'a Block<'a>,
let ident = ast_util::path_to_ident(path);
// Allocate memory on stack for the binding.
let llval = alloc_ty(bcx, var_ty, bcx.ident(ident));
let llval = alloc_ty(bcx, var_ty, bcx.ident(ident).as_slice());
// Subtle: be sure that we *populate* the memory *before*
// we schedule the cleanup.

View File

@ -65,15 +65,13 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
let mut constraints =
StrBuf::from_str(constraints.iter()
.map(|s| s.get().to_str())
.collect::<Vec<~str>>()
.map(|s| s.get().to_strbuf())
.collect::<Vec<StrBuf>>()
.connect(","));
let mut clobbers = StrBuf::from_str(getClobbers());
let mut clobbers = getClobbers();
if !ia.clobbers.get().is_empty() && !clobbers.is_empty() {
clobbers = StrBuf::from_owned_str(format!("{},{}",
ia.clobbers.get(),
clobbers));
clobbers = format_strbuf!("{},{}", ia.clobbers.get(), clobbers);
} else {
clobbers.push_str(ia.clobbers.get());
}
@ -136,12 +134,12 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
#[cfg(target_arch = "arm")]
#[cfg(target_arch = "mips")]
fn getClobbers() -> ~str {
"".to_owned()
fn getClobbers() -> StrBuf {
"".to_strbuf()
}
#[cfg(target_arch = "x86")]
#[cfg(target_arch = "x86_64")]
fn getClobbers() -> ~str {
"~{dirflag},~{fpsr},~{flags}".to_owned()
fn getClobbers() -> StrBuf {
"~{dirflag},~{fpsr},~{flags}".to_strbuf()
}

View File

@ -124,13 +124,13 @@ pub fn push_ctxt(s: &'static str) -> _InsnCtxt {
pub struct StatRecorder<'a> {
ccx: &'a CrateContext,
name: Option<~str>,
name: Option<StrBuf>,
start: u64,
istart: uint,
}
impl<'a> StatRecorder<'a> {
pub fn new(ccx: &'a CrateContext, name: ~str) -> StatRecorder<'a> {
pub fn new(ccx: &'a CrateContext, name: StrBuf) -> StatRecorder<'a> {
let start = if ccx.sess().trans_stats() {
time::precise_time_ns()
} else {
@ -206,15 +206,19 @@ pub fn decl_cdecl_fn(llmod: ModuleRef,
}
// only use this for foreign function ABIs and glue, use `get_extern_rust_fn` for Rust functions
pub fn get_extern_fn(externs: &mut ExternMap, llmod: ModuleRef,
name: &str, cc: lib::llvm::CallConv,
ty: Type, output: ty::t) -> ValueRef {
pub fn get_extern_fn(externs: &mut ExternMap,
llmod: ModuleRef,
name: &str,
cc: lib::llvm::CallConv,
ty: Type,
output: ty::t)
-> ValueRef {
match externs.find_equiv(&name) {
Some(n) => return *n,
None => {}
}
let f = decl_fn(llmod, name, cc, ty, output);
externs.insert(name.to_owned(), f);
externs.insert(name.to_strbuf(), f);
f
}
@ -231,7 +235,7 @@ fn get_extern_rust_fn(ccx: &CrateContext, inputs: &[ty::t], output: ty::t,
.collect::<Vec<_>>().as_slice(), f)
});
ccx.externs.borrow_mut().insert(name.to_owned(), f);
ccx.externs.borrow_mut().insert(name.to_strbuf(), f);
f
}
@ -315,7 +319,7 @@ pub fn get_extern_const(externs: &mut ExternMap, llmod: ModuleRef,
let c = name.with_c_str(|buf| {
llvm::LLVMAddGlobal(llmod, ty.to_ref(), buf)
});
externs.insert(name.to_owned(), c);
externs.insert(name.to_strbuf(), c);
return c;
}
}
@ -469,9 +473,9 @@ pub fn unset_split_stack(f: ValueRef) {
// Double-check that we never ask LLVM to declare the same symbol twice. It
// silently mangles such symbols, breaking our linkage model.
pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: ~str) {
pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: StrBuf) {
if ccx.all_llvm_symbols.borrow().contains(&sym) {
ccx.sess().bug("duplicate LLVM symbol: ".to_owned() + sym);
ccx.sess().bug(format!("duplicate LLVM symbol: {}", sym));
}
ccx.all_llvm_symbols.borrow_mut().insert(sym);
}
@ -505,8 +509,12 @@ pub fn get_res_dtor(ccx: &CrateContext,
ty::lookup_item_type(tcx, parent_id).ty);
let llty = type_of_dtor(ccx, class_ty);
get_extern_fn(&mut *ccx.externs.borrow_mut(), ccx.llmod, name,
lib::llvm::CCallConv, llty, ty::mk_nil())
get_extern_fn(&mut *ccx.externs.borrow_mut(),
ccx.llmod,
name.as_slice(),
lib::llvm::CCallConv,
llty,
ty::mk_nil())
}
}
@ -829,8 +837,8 @@ pub fn fail_if_zero<'a>(
ICmp(cx, lib::llvm::IntEQ, rhs, zero)
}
_ => {
cx.sess().bug("fail-if-zero on unexpected type: ".to_owned() +
ty_to_str(cx.tcx(), rhs_t));
cx.sess().bug(format!("fail-if-zero on unexpected type: {}",
ty_to_str(cx.tcx(), rhs_t)));
}
};
with_cond(cx, is_zero, |bcx| {
@ -848,15 +856,19 @@ pub fn trans_external_path(ccx: &CrateContext, did: ast::DefId, t: ty::t) -> Val
get_extern_rust_fn(ccx,
fn_ty.sig.inputs.as_slice(),
fn_ty.sig.output,
name,
name.as_slice(),
did)
}
Some(..) | None => {
let c = foreign::llvm_calling_convention(ccx, fn_ty.abi);
let cconv = c.unwrap_or(lib::llvm::CCallConv);
let llty = type_of_fn_from_ty(ccx, t);
get_extern_fn(&mut *ccx.externs.borrow_mut(), ccx.llmod,
name, cconv, llty, fn_ty.sig.output)
get_extern_fn(&mut *ccx.externs.borrow_mut(),
ccx.llmod,
name.as_slice(),
cconv,
llty,
fn_ty.sig.output)
}
}
}
@ -864,12 +876,14 @@ pub fn trans_external_path(ccx: &CrateContext, did: ast::DefId, t: ty::t) -> Val
get_extern_rust_fn(ccx,
f.sig.inputs.as_slice(),
f.sig.output,
name,
name.as_slice(),
did)
}
_ => {
let llty = type_of(ccx, t);
get_extern_const(&mut *ccx.externs.borrow_mut(), ccx.llmod, name,
get_extern_const(&mut *ccx.externs.borrow_mut(),
ccx.llmod,
name.as_slice(),
llty)
}
}
@ -1443,7 +1457,7 @@ pub fn trans_fn(ccx: &CrateContext,
param_substs: Option<&param_substs>,
id: ast::NodeId,
attrs: &[ast::Attribute]) {
let _s = StatRecorder::new(ccx, ccx.tcx.map.path_to_str(id).to_owned());
let _s = StatRecorder::new(ccx, ccx.tcx.map.path_to_str(id).to_strbuf());
debug!("trans_fn(param_substs={})", param_substs.map(|s| s.repr(ccx.tcx())));
let _icx = push_ctxt("trans_fn");
let output_type = ty::ty_fn_ret(ty::node_id_to_type(ccx.tcx(), id));
@ -1661,7 +1675,7 @@ pub fn trans_mod(ccx: &CrateContext, m: &ast::Mod) {
}
}
fn finish_register_fn(ccx: &CrateContext, sp: Span, sym: ~str, node_id: ast::NodeId,
fn finish_register_fn(ccx: &CrateContext, sp: Span, sym: StrBuf, node_id: ast::NodeId,
llfn: ValueRef) {
ccx.item_symbols.borrow_mut().insert(node_id, sym);
@ -1676,7 +1690,7 @@ fn finish_register_fn(ccx: &CrateContext, sp: Span, sym: ~str, node_id: ast::Nod
fn register_fn(ccx: &CrateContext,
sp: Span,
sym: ~str,
sym: StrBuf,
node_id: ast::NodeId,
node_type: ty::t)
-> ValueRef {
@ -1692,7 +1706,7 @@ fn register_fn(ccx: &CrateContext,
false,
f.sig.inputs.as_slice(),
f.sig.output,
sym);
sym.as_slice());
finish_register_fn(ccx, sp, sym, node_id, llfn);
llfn
}
@ -1700,14 +1714,14 @@ fn register_fn(ccx: &CrateContext,
// only use this for foreign function ABIs and glue, use `register_fn` for Rust functions
pub fn register_fn_llvmty(ccx: &CrateContext,
sp: Span,
sym: ~str,
sym: StrBuf,
node_id: ast::NodeId,
cc: lib::llvm::CallConv,
fn_ty: Type,
output: ty::t) -> ValueRef {
debug!("register_fn_llvmty id={} sym={}", node_id, sym);
let llfn = decl_fn(ccx.llmod, sym, cc, fn_ty, output);
let llfn = decl_fn(ccx.llmod, sym.as_slice(), cc, fn_ty, output);
finish_register_fn(ccx, sp, sym, node_id, llfn);
llfn
}
@ -1752,7 +1766,7 @@ pub fn create_entry_wrapper(ccx: &CrateContext,
let (start_fn, args) = if use_start_lang_item {
let start_def_id = match ccx.tcx.lang_items.require(StartFnLangItem) {
Ok(id) => id,
Err(s) => { ccx.sess().fatal(s); }
Err(s) => { ccx.sess().fatal(s.as_slice()); }
};
let start_fn = if start_def_id.krate == ast::LOCAL_CRATE {
get_item_val(ccx, start_def_id.node)
@ -1796,15 +1810,15 @@ pub fn create_entry_wrapper(ccx: &CrateContext,
}
fn exported_name(ccx: &CrateContext, id: ast::NodeId,
ty: ty::t, attrs: &[ast::Attribute]) -> ~str {
ty: ty::t, attrs: &[ast::Attribute]) -> StrBuf {
match attr::first_attr_value_str_by_name(attrs, "export_name") {
// Use provided name
Some(name) => name.get().to_owned(),
Some(name) => name.get().to_strbuf(),
_ => ccx.tcx.map.with_path(id, |mut path| {
if attr::contains_name(attrs, "no_mangle") {
// Don't mangle
path.last().unwrap().to_str()
path.last().unwrap().to_str().to_strbuf()
} else {
// Usual name mangling
mangle_exported_name(ccx, path, ty, id)
@ -1854,7 +1868,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
unsafe {
let llty = llvm::LLVMTypeOf(v);
let g = sym.with_c_str(|buf| {
let g = sym.as_slice().with_c_str(|buf| {
llvm::LLVMAddGlobal(ccx.llmod, llty, buf)
});
@ -2096,7 +2110,12 @@ pub fn write_metadata(cx: &CrateContext, krate: &ast::Crate) -> Vec<u8> {
});
unsafe {
llvm::LLVMSetInitializer(llglobal, llconst);
cx.sess().targ_cfg.target_strs.meta_sect_name.with_c_str(|buf| {
cx.sess()
.targ_cfg
.target_strs
.meta_sect_name
.as_slice()
.with_c_str(|buf| {
llvm::LLVMSetSection(llglobal, buf)
});
}
@ -2126,7 +2145,8 @@ pub fn trans_crate(krate: ast::Crate,
}
}
let link_meta = link::build_link_meta(&krate, output.out_filestem);
let link_meta = link::build_link_meta(&krate,
output.out_filestem.as_slice());
// Append ".rs" to crate name as LLVM module identifier.
//
@ -2186,8 +2206,8 @@ pub fn trans_crate(krate: ast::Crate,
let link_meta = ccx.link_meta.clone();
let llmod = ccx.llmod;
let mut reachable: Vec<~str> = ccx.reachable.iter().filter_map(|id| {
ccx.item_symbols.borrow().find(id).map(|s| s.to_owned())
let mut reachable: Vec<StrBuf> = ccx.reachable.iter().filter_map(|id| {
ccx.item_symbols.borrow().find(id).map(|s| s.to_strbuf())
}).collect();
// Make sure that some other crucial symbols are not eliminated from the
@ -2196,12 +2216,13 @@ pub fn trans_crate(krate: ast::Crate,
// symbol. This symbol is required for use by the libmorestack library that
// we link in, so we must ensure that this symbol is not internalized (if
// defined in the crate).
reachable.push("main".to_owned());
reachable.push("rust_stack_exhausted".to_owned());
reachable.push("main".to_strbuf());
reachable.push("rust_stack_exhausted".to_strbuf());
// referenced from .eh_frame section on some platforms
reachable.push("rust_eh_personality".to_owned());
reachable.push("rust_eh_personality_catch".to_owned()); // referenced from rt/rust_try.ll
reachable.push("rust_eh_personality".to_strbuf());
// referenced from rt/rust_try.ll
reachable.push("rust_eh_personality_catch".to_strbuf());
let metadata_module = ccx.metadata_llmod;
let formats = ccx.tcx.dependency_formats.borrow().clone();

View File

@ -122,7 +122,7 @@ pub fn Invoke(cx: &Block,
terminate(cx, "Invoke");
debug!("Invoke({} with arguments ({}))",
cx.val_to_str(fn_),
args.iter().map(|a| cx.val_to_str(*a)).collect::<Vec<~str>>().connect(", "));
args.iter().map(|a| cx.val_to_str(*a)).collect::<Vec<StrBuf>>().connect(", "));
B(cx).invoke(fn_, args, then, catch, attributes)
}

View File

@ -81,8 +81,7 @@ impl<'a> Builder<'a> {
s.push_char('/');
s.push_str(category);
let s = s.into_owned();
let n = match h.find_equiv(&s) {
let n = match h.find(&s) {
Some(&n) => n,
_ => 0u
};
@ -805,7 +804,7 @@ impl<'a> Builder<'a> {
self.ccx.tn.val_to_str(llfn),
args.iter()
.map(|&v| self.ccx.tn.val_to_str(v))
.collect::<Vec<~str>>()
.collect::<Vec<StrBuf>>()
.connect(", "));
unsafe {

View File

@ -599,7 +599,9 @@ impl<'a> CleanupHelperMethods<'a> for FunctionContext<'a> {
{
let name = scope.block_name("clean");
debug!("generating cleanups for {}", name);
let bcx_in = self.new_block(label.is_unwind(), name, None);
let bcx_in = self.new_block(label.is_unwind(),
name.as_slice(),
None);
let mut bcx_out = bcx_in;
for cleanup in scope.cleanups.iter().rev() {
if cleanup_is_suitable_for(*cleanup, label) {
@ -649,7 +651,7 @@ impl<'a> CleanupHelperMethods<'a> for FunctionContext<'a> {
Some(llbb) => { return llbb; }
None => {
let name = last_scope.block_name("unwind");
pad_bcx = self.new_block(true, name, None);
pad_bcx = self.new_block(true, name.as_slice(), None);
last_scope.cached_landing_pad = Some(pad_bcx.llbb);
}
}
@ -731,16 +733,16 @@ impl<'a> CleanupScope<'a> {
self.cleanups.iter().any(|c| c.clean_on_unwind())
}
fn block_name(&self, prefix: &str) -> ~str {
fn block_name(&self, prefix: &str) -> StrBuf {
/*!
* Returns a suitable name to use for the basic block that
* handles this cleanup scope
*/
match self.kind {
CustomScopeKind => format!("{}_custom_", prefix),
AstScopeKind(id) => format!("{}_ast_{}_", prefix, id),
LoopScopeKind(id, _) => format!("{}_loop_{}_", prefix, id),
CustomScopeKind => format_strbuf!("{}_custom_", prefix),
AstScopeKind(id) => format_strbuf!("{}_ast_{}_", prefix, id),
LoopScopeKind(id, _) => format_strbuf!("{}_loop_{}_", prefix, id),
}
}
}

View File

@ -104,8 +104,8 @@ pub struct EnvValue {
}
impl EnvValue {
pub fn to_str(&self, ccx: &CrateContext) -> ~str {
format!("{}({})", self.action, self.datum.to_str(ccx))
pub fn to_str(&self, ccx: &CrateContext) -> StrBuf {
format_strbuf!("{}({})", self.action, self.datum.to_str(ccx))
}
}
@ -354,7 +354,7 @@ pub fn trans_expr_fn<'a>(
true,
f.sig.inputs.as_slice(),
f.sig.output,
s);
s.as_slice());
// set an inline hint for all closures
set_inline_hint(llfn);
@ -417,9 +417,13 @@ pub fn get_wrapper_for_bare_fn(ccx: &CrateContext,
true,
f.sig.inputs.as_slice(),
f.sig.output,
name)
name.as_slice())
} else {
decl_rust_fn(ccx, true, f.sig.inputs.as_slice(), f.sig.output, name)
decl_rust_fn(ccx,
true,
f.sig.inputs.as_slice(),
f.sig.output,
name.as_slice())
};
ccx.closure_bare_wrapper_cache.borrow_mut().insert(fn_ptr, llfn);

View File

@ -173,7 +173,7 @@ pub fn BuilderRef_res(b: BuilderRef) -> BuilderRef_res {
}
}
pub type ExternMap = HashMap<~str, ValueRef>;
pub type ExternMap = HashMap<StrBuf, ValueRef>;
// Here `self_ty` is the real type of the self parameter to this method. It
// will only be set in the case of default methods.
@ -194,12 +194,12 @@ impl param_substs {
}
}
fn param_substs_to_str(this: &param_substs, tcx: &ty::ctxt) -> ~str {
format!("param_substs({})", this.substs.repr(tcx))
fn param_substs_to_str(this: &param_substs, tcx: &ty::ctxt) -> StrBuf {
format_strbuf!("param_substs({})", this.substs.repr(tcx))
}
impl Repr for param_substs {
fn repr(&self, tcx: &ty::ctxt) -> ~str {
fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
param_substs_to_str(self, tcx)
}
}
@ -442,15 +442,15 @@ impl<'a> Block<'a> {
}
pub fn sess(&self) -> &'a Session { self.fcx.ccx.sess() }
pub fn ident(&self, ident: Ident) -> ~str {
token::get_ident(ident).get().to_str()
pub fn ident(&self, ident: Ident) -> StrBuf {
token::get_ident(ident).get().to_strbuf()
}
pub fn node_id_to_str(&self, id: ast::NodeId) -> ~str {
self.tcx().map.node_to_str(id).to_owned()
pub fn node_id_to_str(&self, id: ast::NodeId) -> StrBuf {
self.tcx().map.node_to_str(id).to_strbuf()
}
pub fn expr_to_str(&self, e: &ast::Expr) -> ~str {
pub fn expr_to_str(&self, e: &ast::Expr) -> StrBuf {
e.repr(self.tcx())
}
@ -464,21 +464,21 @@ impl<'a> Block<'a> {
}
}
pub fn val_to_str(&self, val: ValueRef) -> ~str {
pub fn val_to_str(&self, val: ValueRef) -> StrBuf {
self.ccx().tn.val_to_str(val)
}
pub fn llty_str(&self, ty: Type) -> ~str {
pub fn llty_str(&self, ty: Type) -> StrBuf {
self.ccx().tn.type_to_str(ty)
}
pub fn ty_to_str(&self, t: ty::t) -> ~str {
pub fn ty_to_str(&self, t: ty::t) -> StrBuf {
t.repr(self.tcx())
}
pub fn to_str(&self) -> ~str {
pub fn to_str(&self) -> StrBuf {
let blk: *Block = self;
format!("[block {}]", blk)
format_strbuf!("[block {}]", blk)
}
}

View File

@ -45,9 +45,9 @@ pub struct Stats {
pub n_inlines: Cell<uint>,
pub n_closures: Cell<uint>,
pub n_llvm_insns: Cell<uint>,
pub llvm_insns: RefCell<HashMap<~str, uint>>,
pub llvm_insns: RefCell<HashMap<StrBuf, uint>>,
// (ident, time-in-ms, llvm-instructions)
pub fn_stats: RefCell<Vec<(~str, uint, uint)> >,
pub fn_stats: RefCell<Vec<(StrBuf, uint, uint)> >,
}
pub struct CrateContext {
@ -60,7 +60,7 @@ pub struct CrateContext {
pub item_vals: RefCell<NodeMap<ValueRef>>,
pub exp_map2: resolve::ExportMap2,
pub reachable: NodeSet,
pub item_symbols: RefCell<NodeMap<~str>>,
pub item_symbols: RefCell<NodeMap<StrBuf>>,
pub link_meta: LinkMeta,
pub drop_glues: RefCell<HashMap<ty::t, ValueRef>>,
pub tydescs: RefCell<HashMap<ty::t, Rc<tydesc_info>>>,
@ -109,8 +109,8 @@ pub struct CrateContext {
pub llsizingtypes: RefCell<HashMap<ty::t, Type>>,
pub adt_reprs: RefCell<HashMap<ty::t, Rc<adt::Repr>>>,
pub symbol_hasher: RefCell<Sha256>,
pub type_hashcodes: RefCell<HashMap<ty::t, ~str>>,
pub all_llvm_symbols: RefCell<HashSet<~str>>,
pub type_hashcodes: RefCell<HashMap<ty::t, StrBuf>>,
pub all_llvm_symbols: RefCell<HashSet<StrBuf>>,
pub tcx: ty::ctxt,
pub stats: Stats,
pub int_type: Type,
@ -141,16 +141,30 @@ impl CrateContext {
let metadata_llmod = format!("{}_metadata", name).with_c_str(|buf| {
llvm::LLVMModuleCreateWithNameInContext(buf, llcx)
});
tcx.sess.targ_cfg.target_strs.data_layout.with_c_str(|buf| {
tcx.sess
.targ_cfg
.target_strs
.data_layout
.as_slice()
.with_c_str(|buf| {
llvm::LLVMSetDataLayout(llmod, buf);
llvm::LLVMSetDataLayout(metadata_llmod, buf);
});
tcx.sess.targ_cfg.target_strs.target_triple.with_c_str(|buf| {
tcx.sess
.targ_cfg
.target_strs
.target_triple
.as_slice()
.with_c_str(|buf| {
llvm::LLVMRustSetNormalizedTarget(llmod, buf);
llvm::LLVMRustSetNormalizedTarget(metadata_llmod, buf);
});
let td = mk_target_data(tcx.sess.targ_cfg.target_strs.data_layout);
let td = mk_target_data(tcx.sess
.targ_cfg
.target_strs
.data_layout
.as_slice());
let dbg_cx = if tcx.sess.opts.debuginfo != NoDebugInfo {
Some(debuginfo::CrateDebugContext::new(llmod))

View File

@ -40,7 +40,7 @@ pub fn trans_stmt<'a>(cx: &'a Block<'a>,
debug!("trans_stmt({})", s.repr(cx.tcx()));
if cx.sess().asm_comments() {
add_span_comment(cx, s.span, s.repr(cx.tcx()));
add_span_comment(cx, s.span, s.repr(cx.tcx()).as_slice());
}
let mut bcx = cx;

View File

@ -624,11 +624,11 @@ impl<K:KindOps> Datum<K> {
}
#[allow(dead_code)] // useful for debugging
pub fn to_str(&self, ccx: &CrateContext) -> ~str {
format!("Datum({}, {}, {:?})",
ccx.tn.val_to_str(self.val),
ty_to_str(ccx.tcx(), self.ty),
self.kind)
pub fn to_str(&self, ccx: &CrateContext) -> StrBuf {
format_strbuf!("Datum({}, {}, {:?})",
ccx.tn.val_to_str(self.val),
ty_to_str(ccx.tcx(), self.ty),
self.kind)
}
pub fn appropriate_rvalue_mode(&self, ccx: &CrateContext) -> RvalueMode {

View File

@ -178,7 +178,7 @@ pub struct CrateDebugContext {
llcontext: ContextRef,
builder: DIBuilderRef,
current_debug_location: Cell<DebugLocation>,
created_files: RefCell<HashMap<~str, DIFile>>,
created_files: RefCell<HashMap<StrBuf, DIFile>>,
created_types: RefCell<HashMap<uint, DIType>>,
created_enum_disr_types: RefCell<HashMap<ast::DefId, DIType>>,
namespace_map: RefCell<HashMap<Vec<ast::Name>, Rc<NamespaceTreeNode>>>,
@ -343,8 +343,8 @@ pub fn create_global_var_metadata(cx: &CrateContext,
let linkage_name = namespace_node.mangled_name_of_contained_item(var_name);
let var_scope = namespace_node.scope;
var_name.with_c_str(|var_name| {
linkage_name.with_c_str(|linkage_name| {
var_name.as_slice().with_c_str(|var_name| {
linkage_name.as_slice().with_c_str(|linkage_name| {
unsafe {
llvm::LLVMDIBuilderCreateStaticVariable(DIB(cx),
var_scope,
@ -725,7 +725,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
let containing_scope = namespace_node.scope;
(linkage_name, containing_scope)
} else {
(function_name.as_slice().to_owned(), file_metadata)
(function_name.as_slice().to_strbuf(), file_metadata)
};
// Clang sets this parameter to the opening brace of the function's block, so let's do this too.
@ -734,7 +734,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
let is_local_to_unit = is_node_local_to_unit(cx, fn_ast_id);
let fn_metadata = function_name.as_slice().with_c_str(|function_name| {
linkage_name.with_c_str(|linkage_name| {
linkage_name.as_slice().with_c_str(|linkage_name| {
unsafe {
llvm::LLVMDIBuilderCreateFunction(
DIB(cx),
@ -838,7 +838,8 @@ pub fn create_function_debug_context(cx: &CrateContext,
let actual_self_type = self_type.unwrap();
// Add self type name to <...> clause of function name
let actual_self_type_name = ppaux::ty_to_str(cx.tcx(), actual_self_type);
name_to_append_suffix_to.push_str(actual_self_type_name);
name_to_append_suffix_to.push_str(
actual_self_type_name.as_slice());
if generics.is_type_parameterized() {
name_to_append_suffix_to.push_str(",");
@ -882,7 +883,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
let actual_type = *actual_types.get(index);
// Add actual type name to <...> clause of function name
let actual_type_name = ppaux::ty_to_str(cx.tcx(), actual_type);
name_to_append_suffix_to.push_str(actual_type_name);
name_to_append_suffix_to.push_str(actual_type_name.as_slice());
if index != generics.ty_params.len() - 1 {
name_to_append_suffix_to.push_str(",");
@ -1107,7 +1108,7 @@ fn file_metadata(cx: &CrateContext, full_path: &str) -> DIFile {
});
let mut created_files = debug_context(cx).created_files.borrow_mut();
created_files.insert(full_path.to_owned(), file_metadata);
created_files.insert(full_path.to_strbuf(), file_metadata);
return file_metadata;
}
@ -1182,7 +1183,7 @@ fn pointer_type_metadata(cx: &CrateContext,
let pointer_llvm_type = type_of::type_of(cx, pointer_type);
let (pointer_size, pointer_align) = size_and_align_of(cx, pointer_llvm_type);
let name = ppaux::ty_to_str(cx.tcx(), pointer_type);
let ptr_metadata = name.with_c_str(|name| {
let ptr_metadata = name.as_slice().with_c_str(|name| {
unsafe {
llvm::LLVMDIBuilderCreatePointerType(
DIB(cx),
@ -1232,9 +1233,9 @@ impl StructMemberDescriptionFactory {
-> Vec<MemberDescription> {
self.fields.iter().map(|field| {
let name = if field.ident.name == special_idents::unnamed_field.name {
"".to_owned()
"".to_strbuf()
} else {
token::get_ident(field.ident).get().to_str()
token::get_ident(field.ident).get().to_strbuf()
};
MemberDescription {
@ -1263,7 +1264,7 @@ fn prepare_struct_metadata(cx: &CrateContext,
let struct_metadata_stub = create_struct_stub(cx,
struct_llvm_type,
struct_name,
struct_name.as_slice(),
containing_scope,
file_metadata,
definition_span);
@ -1335,7 +1336,7 @@ impl TupleMemberDescriptionFactory {
-> Vec<MemberDescription> {
self.component_types.iter().map(|&component_type| {
MemberDescription {
name: "".to_owned(),
name: "".to_strbuf(),
llvm_type: type_of::type_of(cx, component_type),
type_metadata: type_metadata(cx, component_type, self.span),
offset: ComputedMemberOffset,
@ -1359,7 +1360,7 @@ fn prepare_tuple_metadata(cx: &CrateContext,
cache_id: cache_id_for_type(tuple_type),
metadata_stub: create_struct_stub(cx,
tuple_llvm_type,
tuple_name,
tuple_name.as_slice(),
file_metadata,
file_metadata,
span),
@ -1413,7 +1414,7 @@ impl GeneralMemberDescriptionFactory {
self.file_metadata,
codemap::DUMMY_SP);
MemberDescription {
name: "".to_owned(),
name: "".to_strbuf(),
llvm_type: variant_llvm_type,
type_metadata: variant_type_metadata,
offset: FixedMemberOffset { bytes: 0 },
@ -1423,7 +1424,7 @@ impl GeneralMemberDescriptionFactory {
}
struct EnumVariantMemberDescriptionFactory {
args: Vec<(~str, ty::t)> ,
args: Vec<(StrBuf, ty::t)> ,
discriminant_type_metadata: Option<DIType>,
span: Span,
}
@ -1433,7 +1434,7 @@ impl EnumVariantMemberDescriptionFactory {
-> Vec<MemberDescription> {
self.args.iter().enumerate().map(|(i, &(ref name, ty))| {
MemberDescription {
name: name.to_str(),
name: name.to_strbuf(),
llvm_type: type_of::type_of(cx, ty),
type_metadata: match self.discriminant_type_metadata {
Some(metadata) if i == 0 => metadata,
@ -1491,9 +1492,9 @@ fn describe_enum_variant(cx: &CrateContext,
}
// Build an array of (field name, field type) pairs to be captured in the factory closure.
let args: Vec<(~str, ty::t)> = arg_names.iter()
let args: Vec<(StrBuf, ty::t)> = arg_names.iter()
.zip(struct_def.fields.iter())
.map(|(s, &t)| (s.to_str(), t))
.map(|(s, &t)| (s.to_strbuf(), t))
.collect();
let member_description_factory =
@ -1520,13 +1521,14 @@ fn prepare_enum_metadata(cx: &CrateContext,
// For empty enums there is an early exit. Just describe it as an empty struct with the
// appropriate type name
if ty::type_is_empty(cx.tcx(), enum_type) {
let empty_type_metadata = composite_type_metadata(cx,
Type::nil(cx),
enum_name,
[],
containing_scope,
file_metadata,
definition_span);
let empty_type_metadata = composite_type_metadata(
cx,
Type::nil(cx),
enum_name.as_slice(),
[],
containing_scope,
file_metadata,
definition_span);
return FinalMetadata(empty_type_metadata);
}
@ -1621,8 +1623,8 @@ fn prepare_enum_metadata(cx: &CrateContext,
let (enum_type_size, enum_type_align) = size_and_align_of(cx, enum_llvm_type);
let unique_id = generate_unique_type_id("DI_ENUM_");
let enum_metadata = enum_name.with_c_str(|enum_name| {
unique_id.with_c_str(|unique_id| {
let enum_metadata = enum_name.as_slice().with_c_str(|enum_name| {
unique_id.as_slice().with_c_str(|unique_id| {
unsafe {
llvm::LLVMDIBuilderCreateUnionType(
DIB(cx),
@ -1694,7 +1696,7 @@ enum MemberOffset {
}
struct MemberDescription {
name: ~str,
name: StrBuf,
llvm_type: Type,
type_metadata: DIType,
offset: MemberOffset,
@ -1764,7 +1766,7 @@ fn set_members_of_composite_type(cx: &CrateContext,
ComputedMemberOffset => machine::llelement_offset(cx, composite_llvm_type, i)
};
member_description.name.with_c_str(|member_name| {
member_description.name.as_slice().with_c_str(|member_name| {
unsafe {
llvm::LLVMDIBuilderCreateMemberType(
DIB(cx),
@ -1806,7 +1808,7 @@ fn create_struct_stub(cx: &CrateContext,
return unsafe {
struct_type_name.with_c_str(|name| {
unique_id.with_c_str(|unique_id| {
unique_id.as_slice().with_c_str(|unique_id| {
// LLVMDIBuilderCreateStructType() wants an empty array. A null pointer will lead to
// hard to trace and debug LLVM assertions later on in llvm/lib/IR/Value.cpp
let empty_array = create_DIArray(DIB(cx), []);
@ -1853,31 +1855,31 @@ fn boxed_type_metadata(cx: &CrateContext,
let member_descriptions = [
MemberDescription {
name: "refcnt".to_owned(),
name: "refcnt".to_strbuf(),
llvm_type: *member_llvm_types.get(0),
type_metadata: type_metadata(cx, int_type, codemap::DUMMY_SP),
offset: ComputedMemberOffset,
},
MemberDescription {
name: "drop_glue".to_owned(),
name: "drop_glue".to_strbuf(),
llvm_type: *member_llvm_types.get(1),
type_metadata: nil_pointer_type_metadata,
offset: ComputedMemberOffset,
},
MemberDescription {
name: "prev".to_owned(),
name: "prev".to_strbuf(),
llvm_type: *member_llvm_types.get(2),
type_metadata: nil_pointer_type_metadata,
offset: ComputedMemberOffset,
},
MemberDescription {
name: "next".to_owned(),
name: "next".to_strbuf(),
llvm_type: *member_llvm_types.get(3),
type_metadata: nil_pointer_type_metadata,
offset: ComputedMemberOffset,
},
MemberDescription {
name: "val".to_owned(),
name: "val".to_strbuf(),
llvm_type: *member_llvm_types.get(4),
type_metadata: content_type_metadata,
offset: ComputedMemberOffset,
@ -1964,19 +1966,19 @@ fn vec_metadata(cx: &CrateContext,
let member_descriptions = [
MemberDescription {
name: "fill".to_owned(),
name: "fill".to_strbuf(),
llvm_type: *member_llvm_types.get(0),
type_metadata: int_type_metadata,
offset: ComputedMemberOffset,
},
MemberDescription {
name: "alloc".to_owned(),
name: "alloc".to_strbuf(),
llvm_type: *member_llvm_types.get(1),
type_metadata: int_type_metadata,
offset: ComputedMemberOffset,
},
MemberDescription {
name: "elements".to_owned(),
name: "elements".to_strbuf(),
llvm_type: *member_llvm_types.get(2),
type_metadata: array_type_metadata,
offset: ComputedMemberOffset,
@ -2021,13 +2023,13 @@ fn vec_slice_metadata(cx: &CrateContext,
let member_descriptions = [
MemberDescription {
name: "data_ptr".to_owned(),
name: "data_ptr".to_strbuf(),
llvm_type: *member_llvm_types.get(0),
type_metadata: type_metadata(cx, data_ptr_type, span),
offset: ComputedMemberOffset,
},
MemberDescription {
name: "length".to_owned(),
name: "length".to_strbuf(),
llvm_type: *member_llvm_types.get(1),
type_metadata: type_metadata(cx, ty::mk_uint(), span),
offset: ComputedMemberOffset,
@ -2042,7 +2044,7 @@ fn vec_slice_metadata(cx: &CrateContext,
return composite_type_metadata(
cx,
slice_llvm_type,
slice_type_name,
slice_type_name.as_slice(),
member_descriptions,
file_metadata,
file_metadata,
@ -2099,11 +2101,15 @@ fn trait_metadata(cx: &CrateContext,
// the trait's methods.
let last = ty::with_path(cx.tcx(), def_id, |mut path| path.last().unwrap());
let ident_string = token::get_name(last.name());
let name = ppaux::trait_store_to_str(cx.tcx(), trait_store) +
ident_string.get();
let mut name = ppaux::trait_store_to_str(cx.tcx(), trait_store);
name.push_str(ident_string.get());
// Add type and region parameters
let name = ppaux::parameterized(cx.tcx(), name, &substs.regions,
substs.tps.as_slice(), def_id, true);
let name = ppaux::parameterized(cx.tcx(),
name.as_slice(),
&substs.regions,
substs.tps.as_slice(),
def_id,
true);
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id);
@ -2114,7 +2120,7 @@ fn trait_metadata(cx: &CrateContext,
composite_type_metadata(cx,
trait_llvm_type,
name,
name.as_slice(),
[],
containing_scope,
file_metadata,
@ -2136,7 +2142,8 @@ fn type_metadata(cx: &CrateContext,
pointer_type: ty::t,
type_in_box: ty::t)
-> DIType {
let content_type_name: &str = ppaux::ty_to_str(cx.tcx(), type_in_box);
let content_type_name = ppaux::ty_to_str(cx.tcx(), type_in_box);
let content_type_name = content_type_name.as_slice();
let content_llvm_type = type_of::type_of(cx, type_in_box);
let content_type_metadata = type_metadata(
cx,
@ -2296,10 +2303,12 @@ fn cache_id_for_type(t: ty::t) -> uint {
// Used to avoid LLVM metadata uniquing problems. See `create_struct_stub()` and
// `prepare_enum_metadata()`.
fn generate_unique_type_id(prefix: &'static str) -> ~str {
fn generate_unique_type_id(prefix: &'static str) -> StrBuf {
unsafe {
static mut unique_id_counter: atomics::AtomicUint = atomics::INIT_ATOMIC_UINT;
format!("{}{}", prefix, unique_id_counter.fetch_add(1, atomics::SeqCst))
format_strbuf!("{}{}",
prefix,
unique_id_counter.fetch_add(1, atomics::SeqCst))
}
}
@ -2796,7 +2805,7 @@ fn populate_scope_map(cx: &CrateContext,
ast::ExprInlineAsm(ast::InlineAsm { inputs: ref inputs,
outputs: ref outputs,
.. }) => {
// inputs, outputs: ~[(~str, @expr)]
// inputs, outputs: ~[(StrBuf, @expr)]
for &(_, exp) in inputs.iter() {
walk_expr(cx, exp, scope_stack, scope_map);
}
@ -2821,7 +2830,7 @@ struct NamespaceTreeNode {
}
impl NamespaceTreeNode {
fn mangled_name_of_contained_item(&self, item_name: &str) -> ~str {
fn mangled_name_of_contained_item(&self, item_name: &str) -> StrBuf {
fn fill_nested(node: &NamespaceTreeNode, output: &mut StrBuf) {
match node.parent {
Some(ref parent) => fill_nested(&*parent.upgrade().unwrap(), output),
@ -2837,7 +2846,7 @@ impl NamespaceTreeNode {
name.push_str(format!("{}", item_name.len()));
name.push_str(item_name);
name.push_char('E');
name.into_owned()
name
}
}

View File

@ -86,10 +86,10 @@ pub enum Dest {
}
impl Dest {
pub fn to_str(&self, ccx: &CrateContext) -> ~str {
pub fn to_str(&self, ccx: &CrateContext) -> StrBuf {
match *self {
SaveIn(v) => format!("SaveIn({})", ccx.tn.val_to_str(v)),
Ignore => "Ignore".to_owned()
SaveIn(v) => format_strbuf!("SaveIn({})", ccx.tn.val_to_str(v)),
Ignore => "Ignore".to_strbuf()
}
}
}
@ -545,7 +545,7 @@ fn trans_def<'a>(bcx: &'a Block<'a>,
let symbol = csearch::get_symbol(
&bcx.ccx().sess().cstore,
did);
let llval = symbol.with_c_str(|buf| {
let llval = symbol.as_slice().with_c_str(|buf| {
llvm::LLVMAddGlobal(bcx.ccx().llmod,
llty.to_ref(),
buf)

View File

@ -445,7 +445,7 @@ pub fn trans_foreign_mod(ccx: &CrateContext, foreign_mod: &ast::ForeignMod) {
let lname = link_name(foreign_item);
ccx.item_symbols.borrow_mut().insert(foreign_item.id,
lname.get().to_owned());
lname.get().to_strbuf());
}
}
@ -476,7 +476,7 @@ pub fn trans_foreign_mod(ccx: &CrateContext, foreign_mod: &ast::ForeignMod) {
pub fn register_rust_fn_with_foreign_abi(ccx: &CrateContext,
sp: Span,
sym: ~str,
sym: StrBuf,
node_id: ast::NodeId)
-> ValueRef {
let _icx = push_ctxt("foreign::register_foreign_fn");
@ -553,7 +553,7 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: &CrateContext,
false,
f.sig.inputs.as_slice(),
f.sig.output,
ps);
ps.as_slice());
base::set_llvm_fn_attrs(attrs, llfn);
base::trans_fn(ccx, decl, body, llfn, None, id, []);
llfn

View File

@ -203,7 +203,7 @@ fn make_visit_glue<'a>(bcx: &'a Block<'a>, v: ValueRef, t: ty::t)
ty::ReStatic) {
Ok(pair) => pair,
Err(s) => {
bcx.tcx().sess.fatal(s);
bcx.tcx().sess.fatal(s.as_slice());
}
};
let v = PointerCast(bcx, v, type_of(bcx.ccx(), object_ty).ptr_to());
@ -413,14 +413,15 @@ pub fn declare_tydesc(ccx: &CrateContext, t: ty::t) -> tydesc_info {
let llalign = llalign_of(ccx, llty);
let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc");
debug!("+++ declare_tydesc {} {}", ppaux::ty_to_str(ccx.tcx(), t), name);
let gvar = name.with_c_str(|buf| {
let gvar = name.as_slice().with_c_str(|buf| {
unsafe {
llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type().to_ref(), buf)
}
});
note_unique_llvm_symbol(ccx, name);
let ty_name = token::intern_and_get_ident(ppaux::ty_to_str(ccx.tcx(), t));
let ty_name = token::intern_and_get_ident(
ppaux::ty_to_str(ccx.tcx(), t).as_slice());
let ty_name = C_str_slice(ccx, ty_name);
debug!("--- declare_tydesc {}", ppaux::ty_to_str(ccx.tcx(), t));
@ -439,7 +440,10 @@ fn declare_generic_glue(ccx: &CrateContext, t: ty::t, llfnty: Type,
let _icx = push_ctxt("declare_generic_glue");
let fn_nm = mangle_internal_name_by_type_and_seq(ccx, t, "glue_".to_owned() + name);
debug!("{} is for type {}", fn_nm, ppaux::ty_to_str(ccx.tcx(), t));
let llfn = decl_cdecl_fn(ccx.llmod, fn_nm, llfnty, ty::mk_nil());
let llfn = decl_cdecl_fn(ccx.llmod,
fn_nm.as_slice(),
llfnty,
ty::mk_nil());
note_unique_llvm_symbol(ccx, fn_nm);
return llfn;
}
@ -452,7 +456,9 @@ fn make_generic_glue(ccx: &CrateContext,
name: &str)
-> ValueRef {
let _icx = push_ctxt("make_generic_glue");
let glue_name = format!("glue {} {}", name, ty_to_short_str(ccx.tcx(), t));
let glue_name = format_strbuf!("glue {} {}",
name,
ty_to_short_str(ccx.tcx(), t));
let _s = StatRecorder::new(ccx, glue_name);
let arena = TypedArena::new();

View File

@ -13,24 +13,24 @@ use middle::trans::type_::Type;
use lib::llvm::ValueRef;
pub trait LlvmRepr {
fn llrepr(&self, ccx: &CrateContext) -> ~str;
fn llrepr(&self, ccx: &CrateContext) -> StrBuf;
}
impl<'a, T:LlvmRepr> LlvmRepr for &'a [T] {
fn llrepr(&self, ccx: &CrateContext) -> ~str {
let reprs: Vec<~str> = self.iter().map(|t| t.llrepr(ccx)).collect();
format!("[{}]", reprs.connect(","))
fn llrepr(&self, ccx: &CrateContext) -> StrBuf {
let reprs: Vec<StrBuf> = self.iter().map(|t| t.llrepr(ccx)).collect();
format_strbuf!("[{}]", reprs.connect(","))
}
}
impl LlvmRepr for Type {
fn llrepr(&self, ccx: &CrateContext) -> ~str {
fn llrepr(&self, ccx: &CrateContext) -> StrBuf {
ccx.tn.type_to_str(*self)
}
}
impl LlvmRepr for ValueRef {
fn llrepr(&self, ccx: &CrateContext) -> ~str {
fn llrepr(&self, ccx: &CrateContext) -> StrBuf {
ccx.tn.val_to_str(*self)
}
}

View File

@ -109,9 +109,10 @@ pub fn monomorphic_fn(ccx: &CrateContext,
ccx.sess(),
ccx.tcx.map.find(fn_id.node),
|| {
(format!("while monomorphizing {:?}, couldn't find it in the \
item map (may have attempted to monomorphize an item \
defined in a different crate?)", fn_id)).to_strbuf()
format_strbuf!("while monomorphizing {:?}, couldn't find it in \
the item map (may have attempted to monomorphize \
an item defined in a different crate?)",
fn_id)
});
match map_node {
@ -212,9 +213,11 @@ pub fn monomorphic_fn(ccx: &CrateContext,
// This shouldn't need to option dance.
let mut hash_id = Some(hash_id);
let mk_lldecl = || {
let lldecl = decl_internal_rust_fn(ccx, false,
let lldecl = decl_internal_rust_fn(ccx,
false,
f.sig.inputs.as_slice(),
f.sig.output, s);
f.sig.output,
s.as_slice());
ccx.monomorphized.borrow_mut().insert(hash_id.take_unwrap(), lldecl);
lldecl
};

View File

@ -254,8 +254,9 @@ impl<'a, 'b> Reflector<'a, 'b> {
}
let extra = (vec!(
self.c_slice(token::intern_and_get_ident(ty_to_str(tcx,
t))),
self.c_slice(
token::intern_and_get_ident(ty_to_str(tcx,
t).as_slice())),
self.c_bool(named_fields),
self.c_uint(fields.len())
)).append(self.c_size_and_align(t).as_slice());
@ -288,7 +289,11 @@ impl<'a, 'b> Reflector<'a, 'b> {
let sym = mangle_internal_name_by_path_and_seq(
ast_map::Values([].iter()).chain(None), "get_disr");
let llfdecl = decl_internal_rust_fn(ccx, false, [opaqueptrty], ty::mk_u64(), sym);
let llfdecl = decl_internal_rust_fn(ccx,
false,
[opaqueptrty],
ty::mk_u64(),
sym.as_slice());
let arena = TypedArena::new();
let fcx = new_fn_ctxt(ccx, llfdecl, -1, false,
ty::mk_u64(), None, None, &arena);
@ -344,7 +349,8 @@ impl<'a, 'b> Reflector<'a, 'b> {
ty::ty_trait(..) => {
let extra = [
self.c_slice(token::intern_and_get_ident(ty_to_str(tcx, t)))
self.c_slice(token::intern_and_get_ident(
ty_to_str(tcx, t).as_slice()))
];
self.visit("trait", extra);
}

View File

@ -73,13 +73,13 @@ pub struct VecTypes {
}
impl VecTypes {
pub fn to_str(&self, ccx: &CrateContext) -> ~str {
format!("VecTypes \\{unit_ty={}, llunit_ty={}, llunit_size={}, \
llunit_alloc_size={}\\}",
ty_to_str(ccx.tcx(), self.unit_ty),
ccx.tn.type_to_str(self.llunit_ty),
ccx.tn.val_to_str(self.llunit_size),
self.llunit_alloc_size)
pub fn to_str(&self, ccx: &CrateContext) -> StrBuf {
format_strbuf!("VecTypes \\{unit_ty={}, llunit_ty={}, \
llunit_size={}, llunit_alloc_size={}\\}",
ty_to_str(ccx.tcx(), self.unit_ty),
ccx.tn.type_to_str(self.llunit_ty),
ccx.tn.val_to_str(self.llunit_size),
self.llunit_alloc_size)
}
}

View File

@ -206,7 +206,7 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type {
// of the enum's variants refers to the enum itself.
let repr = adt::represent_type(cx, t);
let name = llvm_type_name(cx, an_enum, did, substs.tps.as_slice());
adt::incomplete_type_of(cx, &*repr, name)
adt::incomplete_type_of(cx, &*repr, name.as_slice())
}
ty::ty_box(typ) => {
Type::at_box(cx, type_of(cx, typ)).ptr_to()
@ -264,7 +264,7 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type {
a_struct,
did,
substs.tps.as_slice());
adt::incomplete_type_of(cx, &*repr, name)
adt::incomplete_type_of(cx, &*repr, name.as_slice())
}
}
@ -301,18 +301,24 @@ pub enum named_ty { a_struct, an_enum }
pub fn llvm_type_name(cx: &CrateContext,
what: named_ty,
did: ast::DefId,
tps: &[ty::t]) -> ~str {
tps: &[ty::t])
-> StrBuf {
let name = match what {
a_struct => { "struct" }
an_enum => { "enum" }
};
let tstr = ppaux::parameterized(cx.tcx(), ty::item_path_str(cx.tcx(), did),
&ty::NonerasedRegions(OwnedSlice::empty()),
tps, did, false);
let tstr = ppaux::parameterized(cx.tcx(),
ty::item_path_str(cx.tcx(),
did).as_slice(),
&ty::NonerasedRegions(
OwnedSlice::empty()),
tps,
did,
false);
if did.krate == 0 {
format!("{}.{}", name, tstr)
format_strbuf!("{}.{}", name, tstr)
} else {
format!("{}.{}[\\#{}]", name, tstr, did.krate)
format_strbuf!("{}.{}[\\#{}]", name, tstr, did.krate)
}
}

View File

@ -278,7 +278,7 @@ pub struct ctxt {
pub freevars: RefCell<freevars::freevar_map>,
pub tcache: type_cache,
pub rcache: creader_cache,
pub short_names_cache: RefCell<HashMap<t, ~str>>,
pub short_names_cache: RefCell<HashMap<t, StrBuf>>,
pub needs_unwind_cleanup_cache: RefCell<HashMap<t, bool>>,
pub tc_cache: RefCell<HashMap<uint, TypeContents>>,
pub ast_ty_to_ty_cache: RefCell<NodeMap<ast_ty_to_ty_cache_entry>>,
@ -1538,7 +1538,7 @@ pub fn substs_is_noop(substs: &substs) -> bool {
substs.self_ty.is_none()
}
pub fn substs_to_str(cx: &ctxt, substs: &substs) -> ~str {
pub fn substs_to_str(cx: &ctxt, substs: &substs) -> StrBuf {
substs.repr(cx)
}
@ -3189,7 +3189,9 @@ pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field])
tcx.sess.bug(format!(
"no field named `{}` found in the list of fields `{:?}`",
token::get_name(name),
fields.iter().map(|f| token::get_ident(f.ident).get().to_str()).collect::<Vec<~str>>()));
fields.iter()
.map(|f| token::get_ident(f.ident).get().to_strbuf())
.collect::<Vec<StrBuf>>()));
}
pub fn method_idx(id: ast::Ident, meths: &[Rc<Method>]) -> Option<uint> {
@ -3212,34 +3214,38 @@ pub fn param_tys_in_type(ty: t) -> Vec<param_ty> {
rslt
}
pub fn ty_sort_str(cx: &ctxt, t: t) -> ~str {
pub fn ty_sort_str(cx: &ctxt, t: t) -> StrBuf {
match get(t).sty {
ty_nil | ty_bot | ty_bool | ty_char | ty_int(_) |
ty_uint(_) | ty_float(_) | ty_str => {
::util::ppaux::ty_to_str(cx, t)
}
ty_enum(id, _) => format!("enum {}", item_path_str(cx, id)),
ty_box(_) => "@-ptr".to_owned(),
ty_uniq(_) => "box".to_owned(),
ty_vec(_, _) => "vector".to_owned(),
ty_ptr(_) => "*-ptr".to_owned(),
ty_rptr(_, _) => "&-ptr".to_owned(),
ty_bare_fn(_) => "extern fn".to_owned(),
ty_closure(_) => "fn".to_owned(),
ty_trait(ref inner) => format!("trait {}", item_path_str(cx, inner.def_id)),
ty_struct(id, _) => format!("struct {}", item_path_str(cx, id)),
ty_tup(_) => "tuple".to_owned(),
ty_infer(TyVar(_)) => "inferred type".to_owned(),
ty_infer(IntVar(_)) => "integral variable".to_owned(),
ty_infer(FloatVar(_)) => "floating-point variable".to_owned(),
ty_param(_) => "type parameter".to_owned(),
ty_self(_) => "self".to_owned(),
ty_err => "type error".to_owned()
ty_enum(id, _) => format_strbuf!("enum {}", item_path_str(cx, id)),
ty_box(_) => "@-ptr".to_strbuf(),
ty_uniq(_) => "box".to_strbuf(),
ty_vec(_, _) => "vector".to_strbuf(),
ty_ptr(_) => "*-ptr".to_strbuf(),
ty_rptr(_, _) => "&-ptr".to_strbuf(),
ty_bare_fn(_) => "extern fn".to_strbuf(),
ty_closure(_) => "fn".to_strbuf(),
ty_trait(ref inner) => {
format_strbuf!("trait {}", item_path_str(cx, inner.def_id))
}
ty_struct(id, _) => {
format_strbuf!("struct {}", item_path_str(cx, id))
}
ty_tup(_) => "tuple".to_strbuf(),
ty_infer(TyVar(_)) => "inferred type".to_strbuf(),
ty_infer(IntVar(_)) => "integral variable".to_strbuf(),
ty_infer(FloatVar(_)) => "floating-point variable".to_strbuf(),
ty_param(_) => "type parameter".to_strbuf(),
ty_self(_) => "self".to_strbuf(),
ty_err => "type error".to_strbuf(),
}
}
pub fn type_err_to_str(cx: &ctxt, err: &type_err) -> ~str {
pub fn type_err_to_str(cx: &ctxt, err: &type_err) -> StrBuf {
/*!
*
* Explains the source of a type err in a short,
@ -3249,126 +3255,145 @@ pub fn type_err_to_str(cx: &ctxt, err: &type_err) -> ~str {
* to present additional details, particularly when
* it comes to lifetime-related errors. */
fn tstore_to_closure(s: &TraitStore) -> ~str {
fn tstore_to_closure(s: &TraitStore) -> StrBuf {
match s {
&UniqTraitStore => "proc".to_owned(),
&RegionTraitStore(..) => "closure".to_owned()
&UniqTraitStore => "proc".to_strbuf(),
&RegionTraitStore(..) => "closure".to_strbuf()
}
}
match *err {
terr_mismatch => "types differ".to_owned(),
terr_mismatch => "types differ".to_strbuf(),
terr_fn_style_mismatch(values) => {
format!("expected {} fn but found {} fn",
values.expected.to_str(), values.found.to_str())
format_strbuf!("expected {} fn but found {} fn",
values.expected.to_str(),
values.found.to_str())
}
terr_abi_mismatch(values) => {
format!("expected {} fn but found {} fn",
values.expected.to_str(), values.found.to_str())
format_strbuf!("expected {} fn but found {} fn",
values.expected.to_str(),
values.found.to_str())
}
terr_onceness_mismatch(values) => {
format!("expected {} fn but found {} fn",
values.expected.to_str(), values.found.to_str())
format_strbuf!("expected {} fn but found {} fn",
values.expected.to_str(),
values.found.to_str())
}
terr_sigil_mismatch(values) => {
format!("expected {}, found {}",
tstore_to_closure(&values.expected),
tstore_to_closure(&values.found))
format_strbuf!("expected {}, found {}",
tstore_to_closure(&values.expected),
tstore_to_closure(&values.found))
}
terr_mutability => "values differ in mutability".to_owned(),
terr_box_mutability => "boxed values differ in mutability".to_owned(),
terr_vec_mutability => "vectors differ in mutability".to_owned(),
terr_ptr_mutability => "pointers differ in mutability".to_owned(),
terr_ref_mutability => "references differ in mutability".to_owned(),
terr_mutability => "values differ in mutability".to_strbuf(),
terr_box_mutability => {
"boxed values differ in mutability".to_strbuf()
}
terr_vec_mutability => "vectors differ in mutability".to_strbuf(),
terr_ptr_mutability => "pointers differ in mutability".to_strbuf(),
terr_ref_mutability => "references differ in mutability".to_strbuf(),
terr_ty_param_size(values) => {
format!("expected a type with {} type params \
but found one with {} type params",
values.expected, values.found)
format_strbuf!("expected a type with {} type params \
but found one with {} type params",
values.expected,
values.found)
}
terr_tuple_size(values) => {
format!("expected a tuple with {} elements \
but found one with {} elements",
values.expected, values.found)
format_strbuf!("expected a tuple with {} elements \
but found one with {} elements",
values.expected,
values.found)
}
terr_record_size(values) => {
format!("expected a record with {} fields \
but found one with {} fields",
values.expected, values.found)
format_strbuf!("expected a record with {} fields \
but found one with {} fields",
values.expected,
values.found)
}
terr_record_mutability => {
"record elements differ in mutability".to_owned()
"record elements differ in mutability".to_strbuf()
}
terr_record_fields(values) => {
format!("expected a record with field `{}` but found one with field \
`{}`",
token::get_ident(values.expected),
token::get_ident(values.found))
format_strbuf!("expected a record with field `{}` but found one \
with field `{}`",
token::get_ident(values.expected),
token::get_ident(values.found))
}
terr_arg_count => {
"incorrect number of function parameters".to_strbuf()
}
terr_arg_count => "incorrect number of function parameters".to_owned(),
terr_regions_does_not_outlive(..) => {
format!("lifetime mismatch")
"lifetime mismatch".to_strbuf()
}
terr_regions_not_same(..) => {
format!("lifetimes are not the same")
"lifetimes are not the same".to_strbuf()
}
terr_regions_no_overlap(..) => {
format!("lifetimes do not intersect")
"lifetimes do not intersect".to_strbuf()
}
terr_regions_insufficiently_polymorphic(br, _) => {
format!("expected bound lifetime parameter {}, \
but found concrete lifetime",
bound_region_ptr_to_str(cx, br))
format_strbuf!("expected bound lifetime parameter {}, \
but found concrete lifetime",
bound_region_ptr_to_str(cx, br))
}
terr_regions_overly_polymorphic(br, _) => {
format!("expected concrete lifetime, \
but found bound lifetime parameter {}",
bound_region_ptr_to_str(cx, br))
format_strbuf!("expected concrete lifetime, \
but found bound lifetime parameter {}",
bound_region_ptr_to_str(cx, br))
}
terr_trait_stores_differ(_, ref values) => {
format!("trait storage differs: expected `{}` but found `{}`",
trait_store_to_str(cx, (*values).expected),
trait_store_to_str(cx, (*values).found))
format_strbuf!("trait storage differs: expected `{}` but found \
`{}`",
trait_store_to_str(cx, (*values).expected),
trait_store_to_str(cx, (*values).found))
}
terr_sorts(values) => {
format!("expected {} but found {}",
ty_sort_str(cx, values.expected),
ty_sort_str(cx, values.found))
format_strbuf!("expected {} but found {}",
ty_sort_str(cx, values.expected),
ty_sort_str(cx, values.found))
}
terr_traits(values) => {
format!("expected trait `{}` but found trait `{}`",
item_path_str(cx, values.expected),
item_path_str(cx, values.found))
format_strbuf!("expected trait `{}` but found trait `{}`",
item_path_str(cx, values.expected),
item_path_str(cx, values.found))
}
terr_builtin_bounds(values) => {
if values.expected.is_empty() {
format!("expected no bounds but found `{}`",
values.found.user_string(cx))
format_strbuf!("expected no bounds but found `{}`",
values.found.user_string(cx))
} else if values.found.is_empty() {
format!("expected bounds `{}` but found no bounds",
values.expected.user_string(cx))
format_strbuf!("expected bounds `{}` but found no bounds",
values.expected.user_string(cx))
} else {
format!("expected bounds `{}` but found bounds `{}`",
values.expected.user_string(cx),
values.found.user_string(cx))
format_strbuf!("expected bounds `{}` but found bounds `{}`",
values.expected.user_string(cx),
values.found.user_string(cx))
}
}
terr_integer_as_char => {
format!("expected an integral type but found `char`")
"expected an integral type but found `char`".to_strbuf()
}
terr_int_mismatch(ref values) => {
format!("expected `{}` but found `{}`",
values.expected.to_str(),
values.found.to_str())
format_strbuf!("expected `{}` but found `{}`",
values.expected.to_str(),
values.found.to_str())
}
terr_float_mismatch(ref values) => {
format!("expected `{}` but found `{}`",
values.expected.to_str(),
values.found.to_str())
format_strbuf!("expected `{}` but found `{}`",
values.expected.to_str(),
values.found.to_str())
}
terr_variadic_mismatch(ref values) => {
format!("expected {} fn but found {} function",
if values.expected { "variadic" } else { "non-variadic" },
if values.found { "variadic" } else { "non-variadic" })
format_strbuf!("expected {} fn but found {} function",
if values.expected {
"variadic"
} else {
"non-variadic"
},
if values.found {
"variadic"
} else {
"non-variadic"
})
}
}
}
@ -3665,8 +3690,8 @@ pub fn substd_enum_variants(cx: &ctxt,
}).collect()
}
pub fn item_path_str(cx: &ctxt, id: ast::DefId) -> ~str {
with_path(cx, id, |path| ast_map::path_to_str(path)).to_owned()
pub fn item_path_str(cx: &ctxt, id: ast::DefId) -> StrBuf {
with_path(cx, id, |path| ast_map::path_to_str(path)).to_strbuf()
}
pub enum DtorKind {
@ -4231,14 +4256,14 @@ pub fn each_bound_trait_and_supertraits(tcx: &ctxt,
return true;
}
pub fn get_tydesc_ty(tcx: &ctxt) -> Result<t, ~str> {
pub fn get_tydesc_ty(tcx: &ctxt) -> Result<t, StrBuf> {
tcx.lang_items.require(TyDescStructLangItem).map(|tydesc_lang_item| {
tcx.intrinsic_defs.borrow().find_copy(&tydesc_lang_item)
.expect("Failed to resolve TyDesc")
})
}
pub fn get_opaque_ty(tcx: &ctxt) -> Result<t, ~str> {
pub fn get_opaque_ty(tcx: &ctxt) -> Result<t, StrBuf> {
tcx.lang_items.require(OpaqueStructLangItem).map(|opaque_lang_item| {
tcx.intrinsic_defs.borrow().find_copy(&opaque_lang_item)
.expect("Failed to resolve Opaque")
@ -4246,7 +4271,7 @@ pub fn get_opaque_ty(tcx: &ctxt) -> Result<t, ~str> {
}
pub fn visitor_object_ty(tcx: &ctxt,
region: ty::Region) -> Result<(Rc<TraitRef>, t), ~str> {
region: ty::Region) -> Result<(Rc<TraitRef>, t), StrBuf> {
let trait_lang_item = match tcx.lang_items.require(TyVisitorTraitLangItem) {
Ok(id) => id,
Err(s) => { return Err(s); }

View File

@ -166,11 +166,15 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected, actual| {
expected.map_or("".to_owned(), |e| {
format!("mismatched types: expected `{}` but found {}",
e, actual)})},
Some(expected), "a structure pattern".to_owned(),
None);
expected.map_or("".to_strbuf(), |e| {
format_strbuf!("mismatched types: expected `{}` but \
found {}",
e,
actual)
})},
Some(expected),
"a structure pattern".to_strbuf(),
None);
fcx.write_error(pat.id);
kind_name = "[error]";
arg_types = subpats.clone()
@ -217,11 +221,17 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected, actual| {
expected.map_or("".to_owned(), |e| {
format!("mismatched types: expected `{}` but found {}",
e, actual)})},
Some(expected), "an enum or structure pattern".to_owned(),
None);
expected.map_or("".to_strbuf(),
|e| {
format_strbuf!("mismatched types: expected `{}` but \
found {}",
e,
actual)
})
},
Some(expected),
"an enum or structure pattern".to_strbuf(),
None);
fcx.write_error(pat.id);
kind_name = "[error]";
arg_types = subpats.clone()
@ -446,7 +456,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
debug!("pat_range ending type: {:?}", e_ty);
if !require_same_types(
tcx, Some(fcx.infcx()), false, pat.span, b_ty, e_ty,
|| "mismatched types in range".to_owned())
|| "mismatched types in range".to_strbuf())
{
// no-op
} else if !ty::type_is_numeric(b_ty) && !ty::type_is_char(b_ty) {
@ -540,11 +550,16 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected, actual| {
expected.map_or("".to_owned(), |e| {
format!("mismatched types: expected `{}` but found {}",
e, actual)})},
Some(expected), "a structure pattern".to_owned(),
None);
expected.map_or("".to_strbuf(),
|e| {
format_strbuf!("mismatched types: expected \
`{}` but found {}",
e,
actual)
})},
Some(expected),
"a structure pattern".to_strbuf(),
None);
match tcx.def_map.borrow().find(&pat.id) {
Some(&ast::DefStruct(supplied_def_id)) => {
check_struct_pat(pcx,
@ -590,17 +605,28 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
}
// use terr_tuple_size if both types are tuples
let type_error = match *s {
ty::ty_tup(ref ex_elts) =>
ty::terr_tuple_size(ty::expected_found{expected: ex_elts.len(),
found: e_count}),
ty::ty_tup(ref ex_elts) => {
ty::terr_tuple_size(ty::expected_found {
expected: ex_elts.len(),
found: e_count
})
}
_ => ty::terr_mismatch
};
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span, |expected, actual| {
expected.map_or("".to_owned(), |e| {
format!("mismatched types: expected `{}` but found {}",
e, actual)})},
Some(expected), "tuple".to_owned(), Some(&type_error));
fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected,
actual| {
expected.map_or("".to_strbuf(), |e| {
format_strbuf!("mismatched types: expected `{}` \
but found {}",
e,
actual)
}
)},
Some(expected),
"tuple".to_strbuf(),
Some(&type_error));
fcx.write_error(pat.id);
}
}
@ -630,11 +656,16 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
fcx.infcx().type_error_message_str_with_expected(
pat.span,
|expected, actual| {
expected.map_or("".to_owned(), |e| {
format!("mismatched types: expected `{}` but found {}",
e, actual)})},
expected.map_or("".to_strbuf(),
|e| {
format_strbuf!("mismatched types: expected `{}` but \
found {}",
e,
actual)
})
},
Some(expected),
"a vector pattern".to_owned(),
"a vector pattern".to_strbuf(),
None);
fcx.write_error(pat.id);
};
@ -648,7 +679,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
fcx.type_error_message(pat.span,
|_| {
"unique vector patterns are no \
longer supported".to_owned()
longer supported".to_strbuf()
},
expected,
None);
@ -716,13 +747,17 @@ pub fn check_pointer_pat(pcx: &pat_ctxt,
fcx.infcx().type_error_message_str_with_expected(
span,
|expected, actual| {
expected.map_or("".to_owned(), |e| {
format!("mismatched types: expected `{}` but found {}",
e, actual)})},
expected.map_or("".to_strbuf(), |e| {
format_strbuf!("mismatched types: expected `{}` but \
found {}",
e,
actual)
})
},
Some(expected),
format!("{} pattern", match pointer_kind {
format_strbuf!("{} pattern", match pointer_kind {
Send => "a box",
Borrowed => "an `&`-pointer"
Borrowed => "an `&`-pointer",
}),
None);
fcx.write_error(pat_id);
@ -731,4 +766,8 @@ pub fn check_pointer_pat(pcx: &pat_ctxt,
}
#[deriving(Eq)]
pub enum PointerKind { Send, Borrowed }
pub enum PointerKind {
Send,
Borrowed,
}

View File

@ -1498,11 +1498,11 @@ impl<'a> LookupContext<'a> {
self.fcx.tcx()
}
fn ty_to_str(&self, t: ty::t) -> ~str {
fn ty_to_str(&self, t: ty::t) -> StrBuf {
self.fcx.infcx().ty_to_str(t)
}
fn did_to_str(&self, did: DefId) -> ~str {
fn did_to_str(&self, did: DefId) -> StrBuf {
ty::item_path_str(self.tcx(), did)
}
@ -1512,23 +1512,24 @@ impl<'a> LookupContext<'a> {
}
impl Repr for Candidate {
fn repr(&self, tcx: &ty::ctxt) -> ~str {
format!("Candidate(rcvr_ty={}, rcvr_substs={}, method_ty={}, origin={:?})",
self.rcvr_match_condition.repr(tcx),
self.rcvr_substs.repr(tcx),
self.method_ty.repr(tcx),
self.origin)
fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
format_strbuf!("Candidate(rcvr_ty={}, rcvr_substs={}, method_ty={}, \
origin={:?})",
self.rcvr_match_condition.repr(tcx),
self.rcvr_substs.repr(tcx),
self.method_ty.repr(tcx),
self.origin)
}
}
impl Repr for RcvrMatchCondition {
fn repr(&self, tcx: &ty::ctxt) -> ~str {
fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
match *self {
RcvrMatchesIfObject(d) => {
format!("RcvrMatchesIfObject({})", d.repr(tcx))
format_strbuf!("RcvrMatchesIfObject({})", d.repr(tcx))
}
RcvrMatchesIfSubtype(t) => {
format!("RcvrMatchesIfSubtype({})", t.repr(tcx))
format_strbuf!("RcvrMatchesIfSubtype({})", t.repr(tcx))
}
}
}

View File

@ -468,7 +468,7 @@ fn check_fn<'a>(ccx: &'a CrateCtxt<'a>,
let ret_ty = fn_sig.output;
debug!("check_fn(arg_tys={:?}, ret_ty={:?})",
arg_tys.iter().map(|&a| ppaux::ty_to_str(tcx, a)).collect::<Vec<~str>>(),
arg_tys.iter().map(|&a| ppaux::ty_to_str(tcx, a)).collect::<Vec<StrBuf>>(),
ppaux::ty_to_str(tcx, ret_ty));
// Create the function context. This is either derived from scratch or,
@ -1089,8 +1089,8 @@ impl<'a> RegionScope for infer::InferCtxt<'a> {
}
impl<'a> FnCtxt<'a> {
pub fn tag(&self) -> ~str {
format!("{}", self as *FnCtxt)
pub fn tag(&self) -> StrBuf {
format_strbuf!("{}", self as *FnCtxt)
}
pub fn local_ty(&self, span: Span, nid: ast::NodeId) -> ty::t {
@ -1164,7 +1164,7 @@ impl<'a> FnCtxt<'a> {
ast_ty_to_ty(self, self.infcx(), ast_t)
}
pub fn pat_to_str(&self, pat: &ast::Pat) -> ~str {
pub fn pat_to_str(&self, pat: &ast::Pat) -> StrBuf {
pat.repr(self.tcx())
}
@ -1271,7 +1271,7 @@ impl<'a> FnCtxt<'a> {
pub fn type_error_message(&self,
sp: Span,
mk_msg: |~str| -> ~str,
mk_msg: |StrBuf| -> StrBuf,
actual_ty: ty::t,
err: Option<&ty::type_err>) {
self.infcx().type_error_message(sp, mk_msg, actual_ty, err);
@ -1787,7 +1787,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
};
debug!("check_argument_types: formal_tys={:?}",
formal_tys.iter().map(|t| fcx.infcx().ty_to_str(*t)).collect::<Vec<~str>>());
formal_tys.iter().map(|t| fcx.infcx().ty_to_str(*t)).collect::<Vec<StrBuf>>());
// Check the arguments.
// We do this in a pretty awful way: first we typecheck any arguments
@ -1863,18 +1863,24 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
match ty::get(arg_ty).sty {
ty::ty_float(ast::TyF32) => {
fcx.type_error_message(arg.span,
|t| format!("can't pass an {} to variadic function, \
cast to c_double", t), arg_ty, None);
|t| {
format_strbuf!("can't pass an {} to variadic \
function, cast to c_double", t)
}, arg_ty, None);
}
ty::ty_int(ast::TyI8) | ty::ty_int(ast::TyI16) | ty::ty_bool => {
fcx.type_error_message(arg.span,
|t| format!("can't pass {} to variadic function, cast to c_int",
t), arg_ty, None);
fcx.type_error_message(arg.span, |t| {
format_strbuf!("can't pass {} to variadic \
function, cast to c_int",
t)
}, arg_ty, None);
}
ty::ty_uint(ast::TyU8) | ty::ty_uint(ast::TyU16) => {
fcx.type_error_message(arg.span,
|t| format!("can't pass {} to variadic function, cast to c_uint",
t), arg_ty, None);
fcx.type_error_message(arg.span, |t| {
format_strbuf!("can't pass {} to variadic \
function, cast to c_uint",
t)
}, arg_ty, None);
}
_ => {}
}
@ -1920,8 +1926,8 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
ty::ty_closure(box ty::ClosureTy {sig: ref sig, ..}) => sig,
_ => {
fcx.type_error_message(call_expr.span, |actual| {
format!("expected function but \
found `{}`", actual) }, fn_ty, None);
format_strbuf!("expected function but found `{}`", actual)
}, fn_ty, None);
&error_fn_sig
}
};
@ -1974,8 +1980,10 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
fcx.type_error_message(method_name.span,
|actual| {
format!("type `{}` does not implement any method in scope named `{}`",
actual, token::get_ident(method_name.node))
format_strbuf!("type `{}` does not implement any \
method in scope named `{}`",
actual,
token::get_ident(method_name.node))
},
expr_t,
None);
@ -1984,12 +1992,16 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
fcx.write_error(expr.id);
// Check for potential static matches (missing self parameters)
method::lookup(fcx, expr, rcvr,
method_name.node.name,
expr_t, tps.as_slice(),
DontDerefArgs,
CheckTraitsAndInherentMethods,
DontAutoderefReceiver, ReportStaticMethods);
method::lookup(fcx,
expr,
rcvr,
method_name.node.name,
expr_t,
tps.as_slice(),
DontDerefArgs,
CheckTraitsAndInherentMethods,
DontAutoderefReceiver,
ReportStaticMethods);
ty::mk_err()
}
@ -2123,9 +2135,12 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
if ty::type_is_fp(ty::simd_type(tcx, lhs_t)) {
fcx.type_error_message(expr.span,
|actual| {
format!("binary comparison operation `{}` not supported \
for floating point SIMD vector `{}`",
ast_util::binop_to_str(op), actual)
format_strbuf!("binary comparison \
operation `{}` not \
supported for floating \
point SIMD vector `{}`",
ast_util::binop_to_str(op),
actual)
},
lhs_t,
None
@ -2150,12 +2165,15 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
// type
fcx.write_error(expr.id);
fcx.write_error(rhs.id);
fcx.type_error_message(expr.span, |actual| {
format!("binary operation `{}` cannot be applied \
to type `{}`",
ast_util::binop_to_str(op), actual)},
lhs_t, None)
fcx.type_error_message(expr.span,
|actual| {
format_strbuf!("binary operation `{}` cannot be applied \
to type `{}`",
ast_util::binop_to_str(op),
actual)
},
lhs_t,
None)
}
// Check for overloaded operators if not an assignment.
@ -2164,10 +2182,12 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
} else {
fcx.type_error_message(expr.span,
|actual| {
format!("binary assignment operation \
`{}=` cannot be applied to type `{}`",
ast_util::binop_to_str(op),
actual)
format_strbuf!("binary assignment \
operation `{}=` \
cannot be applied to \
type `{}`",
ast_util::binop_to_str(op),
actual)
},
lhs_t,
None);
@ -2214,8 +2234,10 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
lookup_op_method(fcx, ex, lhs_resolved_t, token::intern(name),
trait_did, [lhs_expr, rhs], DontAutoderefReceiver, || {
fcx.type_error_message(ex.span, |actual| {
format!("binary operation `{}` cannot be applied to type `{}`",
ast_util::binop_to_str(op), actual)
format_strbuf!("binary operation `{}` cannot be applied to \
type `{}`",
ast_util::binop_to_str(op),
actual)
}, lhs_resolved_t, None)
})
}
@ -2230,7 +2252,10 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
lookup_op_method(fcx, ex, rhs_t, token::intern(mname),
trait_did, [rhs_expr], DontAutoderefReceiver, || {
fcx.type_error_message(ex.span, |actual| {
format!("cannot apply unary operator `{}` to type `{}`", op_str, actual)
format_strbuf!("cannot apply unary operator `{}` to type \
`{}`",
op_str,
actual)
}, rhs_t, None);
})
}
@ -2389,8 +2414,10 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
fcx.type_error_message(
expr.span,
|actual| {
format!("attempted to take value of method `{}` on type `{}`",
token::get_name(field), actual)
format_strbuf!("attempted to take value of method \
`{}` on type `{}`",
token::get_name(field),
actual)
},
expr_t, None);
@ -2402,9 +2429,11 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
fcx.type_error_message(
expr.span,
|actual| {
format!("attempted access of field `{}` on type `{}`, \
but no field with that name was found",
token::get_name(field), actual)
format_strbuf!("attempted access of field `{}` on \
type `{}`, but no field with that \
name was found",
token::get_name(field),
actual)
},
expr_t, None);
}
@ -2442,9 +2471,13 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
fcx.type_error_message(
field.ident.span,
|actual| {
format!("structure `{}` has no field named `{}`",
actual, token::get_ident(field.ident.node))
}, struct_ty, None);
format_strbuf!("structure `{}` has no field named \
`{}`",
actual,
token::get_ident(field.ident.node))
},
struct_ty,
None);
error_happened = true;
}
Some((_, true)) => {
@ -2701,7 +2734,8 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
.require(GcLangItem) {
Ok(id) => id,
Err(msg) => {
tcx.sess.span_err(expr.span, msg);
tcx.sess.span_err(expr.span,
msg.as_slice());
ast::DefId {
krate: ast::CRATE_NODE_ID,
node: ast::DUMMY_NODE_ID,
@ -2825,7 +2859,9 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
no longer be dereferenced");
} else {
fcx.type_error_message(expr.span, |actual| {
format!("type `{}` cannot be dereferenced", actual)
format_strbuf!("type `{}` cannot be \
dereferenced",
actual)
}, oprnd_t, None);
}
ty::mk_err()
@ -3066,13 +3102,15 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
_ => {
if ty::type_is_nil(t_e) {
fcx.type_error_message(expr.span, |actual| {
format!("cast from nil: `{}` as `{}`", actual,
fcx.infcx().ty_to_str(t_1))
format_strbuf!("cast from nil: `{}` as `{}`",
actual,
fcx.infcx().ty_to_str(t_1))
}, t_e, None);
} else if ty::type_is_nil(t_1) {
fcx.type_error_message(expr.span, |actual| {
format!("cast to nil: `{}` as `{}`", actual,
fcx.infcx().ty_to_str(t_1))
format_strbuf!("cast to nil: `{}` as `{}`",
actual,
fcx.infcx().ty_to_str(t_1))
}, t_e, None);
}
@ -3092,12 +3130,17 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
let te = fcx.infcx().resolve_type_vars_if_possible(te);
if ty::get(te).sty != ty::ty_uint(ast::TyU8) {
fcx.type_error_message(expr.span, |actual| {
format!("only `u8` can be cast as `char`, not `{}`", actual)
format_strbuf!("only `u8` can be cast as \
`char`, not `{}`",
actual)
}, t_e, None);
}
} else if ty::get(t1).sty == ty::ty_bool {
fcx.tcx().sess.span_err(expr.span,
"cannot cast as `bool`, compare with zero instead");
fcx.tcx()
.sess
.span_err(expr.span,
"cannot cast as `bool`, compare with \
zero instead");
} else if type_is_region_ptr(fcx, expr.span, t_e) &&
type_is_unsafe_ptr(fcx, expr.span, t_1) {
@ -3151,8 +3194,9 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
record the issue number in this comment.
*/
fcx.type_error_message(expr.span, |actual| {
format!("non-scalar cast: `{}` as `{}`", actual,
fcx.infcx().ty_to_str(t_1))
format_strbuf!("non-scalar cast: `{}` as `{}`",
actual,
fcx.infcx().ty_to_str(t_1))
}, t_e, None);
}
}
@ -3271,9 +3315,12 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
|| {
fcx.type_error_message(expr.span,
|actual| {
format!("cannot index a value \
of type `{}`",
actual)
format_strbuf!("cannot \
index a \
value of \
type \
`{}`",
actual)
},
base_t,
None);
@ -3291,7 +3338,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
ppaux::ty_to_str(tcx, fcx.expr_ty(expr)),
match expected {
Some(t) => ppaux::ty_to_str(tcx, t),
_ => "empty".to_owned()
_ => "empty".to_strbuf()
});
unifier();
@ -3300,8 +3347,9 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
pub fn require_uint(fcx: &FnCtxt, sp: Span, t: ty::t) {
if !type_is_uint(fcx, sp, t) {
fcx.type_error_message(sp, |actual| {
format!("mismatched types: expected `uint` type but found `{}`",
actual)
format_strbuf!("mismatched types: expected `uint` type but found \
`{}`",
actual)
}, t, None);
}
}
@ -3309,8 +3357,9 @@ pub fn require_uint(fcx: &FnCtxt, sp: Span, t: ty::t) {
pub fn require_integral(fcx: &FnCtxt, sp: Span, t: ty::t) {
if !type_is_integral(fcx, sp, t) {
fcx.type_error_message(sp, |actual| {
format!("mismatched types: expected integral type but found `{}`",
actual)
format_strbuf!("mismatched types: expected integral type but \
found `{}`",
actual)
}, t, None);
}
}
@ -3439,8 +3488,13 @@ pub fn check_block_with_expected(fcx: &FnCtxt,
ast::StmtExpr(_, _) | ast::StmtSemi(_, _) => true,
_ => false
} {
fcx.ccx.tcx.sess.add_lint(UnreachableCode, s_id, s.span,
"unreachable statement".to_owned());
fcx.ccx
.tcx
.sess
.add_lint(UnreachableCode,
s_id,
s.span,
"unreachable statement".to_strbuf());
warned = true;
}
if ty::type_is_bot(s_ty) {
@ -3461,8 +3515,13 @@ pub fn check_block_with_expected(fcx: &FnCtxt,
},
Some(e) => {
if any_bot && !warned {
fcx.ccx.tcx.sess.add_lint(UnreachableCode, e.id, e.span,
"unreachable expression".to_owned());
fcx.ccx
.tcx
.sess
.add_lint(UnreachableCode,
e.id,
e.span,
"unreachable expression".to_strbuf());
}
check_expr_with_opt_hint(fcx, e, expected);
let ety = fcx.expr_ty(e);
@ -3979,7 +4038,8 @@ pub fn structurally_resolved_type(fcx: &FnCtxt, sp: Span, tp: ty::t) -> ty::t {
Ok(t_s) if !ty::type_is_ty_var(t_s) => t_s,
_ => {
fcx.type_error_message(sp, |_actual| {
"the type of this value must be known in this context".to_owned()
"the type of this value must be known in this \
context".to_strbuf()
}, tp, None);
demand::suptype(fcx, sp, ty::mk_err(), tp);
tp
@ -4183,7 +4243,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
"get_tydesc" => {
let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) {
Ok(t) => t,
Err(s) => { tcx.sess.span_fatal(it.span, s); }
Err(s) => { tcx.sess.span_fatal(it.span, s.as_slice()); }
};
let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt {
ty: tydesc_ty,
@ -4199,18 +4259,20 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
tps: Vec::new(),
regions: ty::NonerasedRegions(OwnedSlice::empty())
}) ),
Err(msg) => { tcx.sess.span_fatal(it.span, msg); }
Err(msg) => {
tcx.sess.span_fatal(it.span, msg.as_slice());
}
}
},
"visit_tydesc" => {
let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) {
Ok(t) => t,
Err(s) => { tcx.sess.span_fatal(it.span, s); }
Err(s) => { tcx.sess.span_fatal(it.span, s.as_slice()); }
};
let region = ty::ReLateBound(it.id, ty::BrAnon(0));
let visitor_object_ty = match ty::visitor_object_ty(tcx, region) {
Ok((_, vot)) => vot,
Err(s) => { tcx.sess.span_fatal(it.span, s); }
Err(s) => { tcx.sess.span_fatal(it.span, s.as_slice()); }
};
let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt {
@ -4387,10 +4449,12 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
let fty = ty::mk_bare_fn(tcx, ty::BareFnTy {
fn_style: ast::UnsafeFn,
abi: abi::RustIntrinsic,
sig: FnSig {binder_id: it.id,
inputs: inputs,
output: output,
variadic: false}
sig: FnSig {
binder_id: it.id,
inputs: inputs,
output: output,
variadic: false,
}
});
let i_ty = ty::lookup_item_type(ccx.tcx, local_def(it.id));
let i_n_tps = i_ty.generics.type_param_defs().len();
@ -4399,10 +4463,15 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
of type parameters: found {}, \
expected {}", i_n_tps, n_tps));
} else {
require_same_types(
tcx, None, false, it.span, i_ty.ty, fty,
|| format!("intrinsic has wrong type: \
expected `{}`",
ppaux::ty_to_str(ccx.tcx, fty)));
require_same_types(tcx,
None,
false,
it.span,
i_ty.ty,
fty,
|| {
format_strbuf!("intrinsic has wrong type: expected `{}`",
ppaux::ty_to_str(ccx.tcx, fty))
});
}
}

View File

@ -421,7 +421,9 @@ impl<'a> CoherenceChecker<'a> {
let crate_store = &self.crate_context.tcx.sess.cstore;
let cdata = crate_store.get_crate_data(impl_b.krate);
session.note(
"conflicting implementation in crate `" + cdata.name + "`");
format!("conflicting implementation in crate \
`{}`",
cdata.name));
}
}
}

View File

@ -438,8 +438,10 @@ pub fn ensure_supertraits(ccx: &CrateCtxt,
}
if sized == ast::StaticSize {
match tcx.lang_items.require(SizedTraitLangItem) {
Ok(def_id) => { ty::try_add_builtin_trait(tcx, def_id, &mut bounds); },
Err(s) => tcx.sess.err(s),
Ok(def_id) => {
ty::try_add_builtin_trait(tcx, def_id, &mut bounds);
}
Err(s) => tcx.sess.err(s.as_slice()),
};
}

View File

@ -71,7 +71,7 @@ use syntax::abi;
pub trait Combine {
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a>;
fn tag(&self) -> ~str;
fn tag(&self) -> StrBuf;
fn a_is_expected(&self) -> bool;
fn trace(&self) -> TypeTrace;

View File

@ -103,12 +103,12 @@ pub trait ErrorReporting {
trace: TypeTrace,
terr: &ty::type_err);
fn values_str(&self, values: &ValuePairs) -> Option<~str>;
fn values_str(&self, values: &ValuePairs) -> Option<StrBuf>;
fn expected_found_str<T:UserString+Resolvable>(
&self,
exp_found: &ty::expected_found<T>)
-> Option<~str>;
-> Option<StrBuf>;
fn report_concrete_failure(&self,
origin: SubregionOrigin,
@ -365,7 +365,7 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
ty::note_and_explain_type_err(self.tcx, terr);
}
fn values_str(&self, values: &ValuePairs) -> Option<~str> {
fn values_str(&self, values: &ValuePairs) -> Option<StrBuf> {
/*!
* Returns a string of the form "expected `{}` but found `{}`",
* or None if this is a derived error.
@ -383,7 +383,7 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
fn expected_found_str<T:UserString+Resolvable>(
&self,
exp_found: &ty::expected_found<T>)
-> Option<~str>
-> Option<StrBuf>
{
let expected = exp_found.expected.resolve(self);
if expected.contains_error() {
@ -395,9 +395,9 @@ impl<'a> ErrorReporting for InferCtxt<'a> {
return None;
}
Some(format!("expected `{}` but found `{}`",
expected.user_string(self.tcx),
found.user_string(self.tcx)))
Some(format_strbuf!("expected `{}` but found `{}`",
expected.user_string(self.tcx),
found.user_string(self.tcx)))
}
fn report_concrete_failure(&self,
@ -1449,7 +1449,7 @@ fn lifetimes_in_scope(tcx: &ty::ctxt,
// LifeGiver is responsible for generating fresh lifetime names
struct LifeGiver {
taken: HashSet<~str>,
taken: HashSet<StrBuf>,
counter: Cell<uint>,
generated: RefCell<Vec<ast::Lifetime>>,
}
@ -1458,7 +1458,7 @@ impl LifeGiver {
fn with_taken(taken: &[ast::Lifetime]) -> LifeGiver {
let mut taken_ = HashSet::new();
for lt in taken.iter() {
let lt_name = token::get_name(lt.name).get().to_owned();
let lt_name = token::get_name(lt.name).get().to_strbuf();
taken_.insert(lt_name);
}
LifeGiver {
@ -1489,14 +1489,14 @@ impl LifeGiver {
return lifetime;
// 0 .. 25 generates a .. z, 26 .. 51 generates aa .. zz, and so on
fn num_to_str(counter: uint) -> ~str {
fn num_to_str(counter: uint) -> StrBuf {
let mut s = StrBuf::new();
let (n, r) = (counter/26 + 1, counter % 26);
let letter: char = from_u32((r+97) as u32).unwrap();
for _ in range(0, n) {
s.push_char(letter);
}
s.into_owned()
s
}
}

View File

@ -36,7 +36,7 @@ impl<'f> Glb<'f> {
impl<'f> Combine for Glb<'f> {
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a> { self.get_ref().infcx }
fn tag(&self) -> ~str { "glb".to_owned() }
fn tag(&self) -> StrBuf { "glb".to_strbuf() }
fn a_is_expected(&self) -> bool { self.get_ref().a_is_expected }
fn trace(&self) -> TypeTrace { self.get_ref().trace.clone() }

View File

@ -35,7 +35,7 @@ impl<'f> Lub<'f> {
impl<'f> Combine for Lub<'f> {
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a> { self.get_ref().infcx }
fn tag(&self) -> ~str { "lub".to_owned() }
fn tag(&self) -> StrBuf { "lub".to_strbuf() }
fn a_is_expected(&self) -> bool { self.get_ref().a_is_expected }
fn trace(&self) -> TypeTrace { self.get_ref().trace.clone() }

View File

@ -246,15 +246,17 @@ pub enum fixup_err {
region_var_bound_by_region_var(RegionVid, RegionVid)
}
pub fn fixup_err_to_str(f: fixup_err) -> ~str {
pub fn fixup_err_to_str(f: fixup_err) -> StrBuf {
match f {
unresolved_int_ty(_) => "unconstrained integral type".to_owned(),
unresolved_ty(_) => "unconstrained type".to_owned(),
cyclic_ty(_) => "cyclic type of infinite size".to_owned(),
unresolved_region(_) => "unconstrained region".to_owned(),
unresolved_int_ty(_) => "unconstrained integral type".to_strbuf(),
unresolved_ty(_) => "unconstrained type".to_strbuf(),
cyclic_ty(_) => "cyclic type of infinite size".to_strbuf(),
unresolved_region(_) => "unconstrained region".to_strbuf(),
region_var_bound_by_region_var(r1, r2) => {
format!("region var {:?} bound by another region var {:?}; this is \
a bug in rustc", r1, r2)
format_strbuf!("region var {:?} bound by another region var {:?}; \
this is a bug in rustc",
r1,
r2)
}
}
}
@ -649,17 +651,17 @@ impl<'a> InferCtxt<'a> {
self.report_region_errors(&errors); // see error_reporting.rs
}
pub fn ty_to_str(&self, t: ty::t) -> ~str {
pub fn ty_to_str(&self, t: ty::t) -> StrBuf {
ty_to_str(self.tcx,
self.resolve_type_vars_if_possible(t))
}
pub fn tys_to_str(&self, ts: &[ty::t]) -> ~str {
let tstrs: Vec<~str> = ts.iter().map(|t| self.ty_to_str(*t)).collect();
format!("({})", tstrs.connect(", "))
pub fn tys_to_str(&self, ts: &[ty::t]) -> StrBuf {
let tstrs: Vec<StrBuf> = ts.iter().map(|t| self.ty_to_str(*t)).collect();
format_strbuf!("({})", tstrs.connect(", "))
}
pub fn trait_ref_to_str(&self, t: &ty::TraitRef) -> ~str {
pub fn trait_ref_to_str(&self, t: &ty::TraitRef) -> StrBuf {
let t = self.resolve_type_vars_in_trait_ref_if_possible(t);
trait_ref_to_str(self.tcx, &t)
}
@ -712,19 +714,19 @@ impl<'a> InferCtxt<'a> {
// errors.
pub fn type_error_message_str(&self,
sp: Span,
mk_msg: |Option<~str>, ~str| -> ~str,
actual_ty: ~str,
mk_msg: |Option<StrBuf>, StrBuf| -> StrBuf,
actual_ty: StrBuf,
err: Option<&ty::type_err>) {
self.type_error_message_str_with_expected(sp, mk_msg, None, actual_ty, err)
}
pub fn type_error_message_str_with_expected(&self,
sp: Span,
mk_msg: |Option<~str>,
~str|
-> ~str,
mk_msg: |Option<StrBuf>,
StrBuf|
-> StrBuf,
expected_ty: Option<ty::t>,
actual_ty: ~str,
actual_ty: StrBuf,
err: Option<&ty::type_err>) {
debug!("hi! expected_ty = {:?}, actual_ty = {}", expected_ty, actual_ty);
@ -751,7 +753,7 @@ impl<'a> InferCtxt<'a> {
pub fn type_error_message(&self,
sp: Span,
mk_msg: |~str| -> ~str,
mk_msg: |StrBuf| -> StrBuf,
actual_ty: ty::t,
err: Option<&ty::type_err>) {
let actual_ty = self.resolve_type_vars_if_possible(actual_ty);
@ -775,10 +777,12 @@ impl<'a> InferCtxt<'a> {
// Don't report an error if expected is ty_err
ty::ty_err => return,
_ => {
// if I leave out : ~str, it infers &str and complains
|actual: ~str| {
format!("mismatched types: expected `{}` but found `{}`",
self.ty_to_str(resolved_expected), actual)
// if I leave out : StrBuf, it infers &str and complains
|actual: StrBuf| {
format_strbuf!("mismatched types: expected `{}` but \
found `{}`",
self.ty_to_str(resolved_expected),
actual)
}
}
};
@ -818,8 +822,8 @@ impl TypeTrace {
}
impl Repr for TypeTrace {
fn repr(&self, tcx: &ty::ctxt) -> ~str {
format!("TypeTrace({})", self.origin.repr(tcx))
fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
format_strbuf!("TypeTrace({})", self.origin.repr(tcx))
}
}
@ -838,15 +842,27 @@ impl TypeOrigin {
}
impl Repr for TypeOrigin {
fn repr(&self, tcx: &ty::ctxt) -> ~str {
fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
match *self {
MethodCompatCheck(a) => format!("MethodCompatCheck({})", a.repr(tcx)),
ExprAssignable(a) => format!("ExprAssignable({})", a.repr(tcx)),
Misc(a) => format!("Misc({})", a.repr(tcx)),
RelateTraitRefs(a) => format!("RelateTraitRefs({})", a.repr(tcx)),
RelateSelfType(a) => format!("RelateSelfType({})", a.repr(tcx)),
MatchExpression(a) => format!("MatchExpression({})", a.repr(tcx)),
IfExpression(a) => format!("IfExpression({})", a.repr(tcx)),
MethodCompatCheck(a) => {
format_strbuf!("MethodCompatCheck({})", a.repr(tcx))
}
ExprAssignable(a) => {
format_strbuf!("ExprAssignable({})", a.repr(tcx))
}
Misc(a) => format_strbuf!("Misc({})", a.repr(tcx)),
RelateTraitRefs(a) => {
format_strbuf!("RelateTraitRefs({})", a.repr(tcx))
}
RelateSelfType(a) => {
format_strbuf!("RelateSelfType({})", a.repr(tcx))
}
MatchExpression(a) => {
format_strbuf!("MatchExpression({})", a.repr(tcx))
}
IfExpression(a) => {
format_strbuf!("IfExpression({})", a.repr(tcx))
}
}
}
}
@ -875,26 +891,44 @@ impl SubregionOrigin {
}
impl Repr for SubregionOrigin {
fn repr(&self, tcx: &ty::ctxt) -> ~str {
fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
match *self {
Subtype(ref a) => format!("Subtype({})", a.repr(tcx)),
InfStackClosure(a) => format!("InfStackClosure({})", a.repr(tcx)),
InvokeClosure(a) => format!("InvokeClosure({})", a.repr(tcx)),
DerefPointer(a) => format!("DerefPointer({})", a.repr(tcx)),
FreeVariable(a, b) => format!("FreeVariable({}, {})", a.repr(tcx), b),
IndexSlice(a) => format!("IndexSlice({})", a.repr(tcx)),
RelateObjectBound(a) => format!("RelateObjectBound({})", a.repr(tcx)),
Reborrow(a) => format!("Reborrow({})", a.repr(tcx)),
ReborrowUpvar(a, b) => format!("ReborrowUpvar({},{:?})", a.repr(tcx), b),
ReferenceOutlivesReferent(_, a) =>
format!("ReferenceOutlivesReferent({})", a.repr(tcx)),
BindingTypeIsNotValidAtDecl(a) =>
format!("BindingTypeIsNotValidAtDecl({})", a.repr(tcx)),
CallRcvr(a) => format!("CallRcvr({})", a.repr(tcx)),
CallArg(a) => format!("CallArg({})", a.repr(tcx)),
CallReturn(a) => format!("CallReturn({})", a.repr(tcx)),
AddrOf(a) => format!("AddrOf({})", a.repr(tcx)),
AutoBorrow(a) => format!("AutoBorrow({})", a.repr(tcx)),
Subtype(ref a) => {
format_strbuf!("Subtype({})", a.repr(tcx))
}
InfStackClosure(a) => {
format_strbuf!("InfStackClosure({})", a.repr(tcx))
}
InvokeClosure(a) => {
format_strbuf!("InvokeClosure({})", a.repr(tcx))
}
DerefPointer(a) => {
format_strbuf!("DerefPointer({})", a.repr(tcx))
}
FreeVariable(a, b) => {
format_strbuf!("FreeVariable({}, {})", a.repr(tcx), b)
}
IndexSlice(a) => {
format_strbuf!("IndexSlice({})", a.repr(tcx))
}
RelateObjectBound(a) => {
format_strbuf!("RelateObjectBound({})", a.repr(tcx))
}
Reborrow(a) => format_strbuf!("Reborrow({})", a.repr(tcx)),
ReborrowUpvar(a, b) => {
format_strbuf!("ReborrowUpvar({},{:?})", a.repr(tcx), b)
}
ReferenceOutlivesReferent(_, a) => {
format_strbuf!("ReferenceOutlivesReferent({})", a.repr(tcx))
}
BindingTypeIsNotValidAtDecl(a) => {
format_strbuf!("BindingTypeIsNotValidAtDecl({})", a.repr(tcx))
}
CallRcvr(a) => format_strbuf!("CallRcvr({})", a.repr(tcx)),
CallArg(a) => format_strbuf!("CallArg({})", a.repr(tcx)),
CallReturn(a) => format_strbuf!("CallReturn({})", a.repr(tcx)),
AddrOf(a) => format_strbuf!("AddrOf({})", a.repr(tcx)),
AutoBorrow(a) => format_strbuf!("AutoBorrow({})", a.repr(tcx)),
}
}
}
@ -918,25 +952,43 @@ impl RegionVariableOrigin {
}
impl Repr for RegionVariableOrigin {
fn repr(&self, tcx: &ty::ctxt) -> ~str {
fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
match *self {
MiscVariable(a) => format!("MiscVariable({})", a.repr(tcx)),
PatternRegion(a) => format!("PatternRegion({})", a.repr(tcx)),
AddrOfRegion(a) => format!("AddrOfRegion({})", a.repr(tcx)),
AddrOfSlice(a) => format!("AddrOfSlice({})", a.repr(tcx)),
Autoref(a) => format!("Autoref({})", a.repr(tcx)),
Coercion(ref a) => format!("Coercion({})", a.repr(tcx)),
EarlyBoundRegion(a, b) => format!("EarlyBoundRegion({},{})",
a.repr(tcx), b.repr(tcx)),
LateBoundRegion(a, b) => format!("LateBoundRegion({},{})",
a.repr(tcx), b.repr(tcx)),
BoundRegionInFnType(a, b) => format!("bound_regionInFnType({},{})",
a.repr(tcx), b.repr(tcx)),
BoundRegionInCoherence(a) => format!("bound_regionInCoherence({})",
a.repr(tcx)),
UpvarRegion(a, b) => format!("UpvarRegion({}, {})",
a.repr(tcx),
b.repr(tcx)),
MiscVariable(a) => {
format_strbuf!("MiscVariable({})", a.repr(tcx))
}
PatternRegion(a) => {
format_strbuf!("PatternRegion({})", a.repr(tcx))
}
AddrOfRegion(a) => {
format_strbuf!("AddrOfRegion({})", a.repr(tcx))
}
AddrOfSlice(a) => format_strbuf!("AddrOfSlice({})", a.repr(tcx)),
Autoref(a) => format_strbuf!("Autoref({})", a.repr(tcx)),
Coercion(ref a) => format_strbuf!("Coercion({})", a.repr(tcx)),
EarlyBoundRegion(a, b) => {
format_strbuf!("EarlyBoundRegion({},{})",
a.repr(tcx),
b.repr(tcx))
}
LateBoundRegion(a, b) => {
format_strbuf!("LateBoundRegion({},{})",
a.repr(tcx),
b.repr(tcx))
}
BoundRegionInFnType(a, b) => {
format_strbuf!("bound_regionInFnType({},{})",
a.repr(tcx),
b.repr(tcx))
}
BoundRegionInCoherence(a) => {
format_strbuf!("bound_regionInCoherence({})", a.repr(tcx))
}
UpvarRegion(a, b) => {
format_strbuf!("UpvarRegion({}, {})",
a.repr(tcx),
b.repr(tcx))
}
}
}
}

View File

@ -1332,16 +1332,28 @@ impl<'a> RegionVarBindings<'a> {
}
impl Repr for Constraint {
fn repr(&self, tcx: &ty::ctxt) -> ~str {
fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
match *self {
ConstrainVarSubVar(a, b) => format!("ConstrainVarSubVar({}, {})",
a.repr(tcx), b.repr(tcx)),
ConstrainRegSubVar(a, b) => format!("ConstrainRegSubVar({}, {})",
a.repr(tcx), b.repr(tcx)),
ConstrainVarSubReg(a, b) => format!("ConstrainVarSubReg({}, {})",
a.repr(tcx), b.repr(tcx)),
ConstrainRegSubReg(a, b) => format!("ConstrainRegSubReg({}, {})",
a.repr(tcx), b.repr(tcx)),
ConstrainVarSubVar(a, b) => {
format_strbuf!("ConstrainVarSubVar({}, {})",
a.repr(tcx),
b.repr(tcx))
}
ConstrainRegSubVar(a, b) => {
format_strbuf!("ConstrainRegSubVar({}, {})",
a.repr(tcx),
b.repr(tcx))
}
ConstrainVarSubReg(a, b) => {
format_strbuf!("ConstrainVarSubReg({}, {})",
a.repr(tcx),
b.repr(tcx))
}
ConstrainRegSubReg(a, b) => {
format_strbuf!("ConstrainRegSubReg({}, {})",
a.repr(tcx),
b.repr(tcx))
}
}
}
}

View File

@ -35,7 +35,7 @@ impl<'f> Sub<'f> {
impl<'f> Combine for Sub<'f> {
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a> { self.get_ref().infcx }
fn tag(&self) -> ~str { "sub".to_owned() }
fn tag(&self) -> StrBuf { "sub".to_strbuf() }
fn a_is_expected(&self) -> bool { self.get_ref().a_is_expected }
fn trace(&self) -> TypeTrace { self.get_ref().trace.clone() }

View File

@ -34,7 +34,7 @@ struct Env {
krate: @ast::Crate,
tcx: ty::ctxt,
infcx: infer::infer_ctxt,
err_messages: @DVec<~str>
err_messages: @DVec<StrBuf>
}
struct RH {
@ -93,7 +93,7 @@ impl Env {
sub: &[]}]});
}
pub fn lookup_item(&self, names: &[~str]) -> ast::node_id {
pub fn lookup_item(&self, names: &[StrBuf]) -> ast::node_id {
return match search_mod(self, &self.krate.node.module, 0, names) {
Some(id) => id,
None => {
@ -104,7 +104,7 @@ impl Env {
fn search_mod(self: &Env,
m: &ast::Mod,
idx: uint,
names: &[~str]) -> Option<ast::node_id> {
names: &[StrBuf]) -> Option<ast::node_id> {
assert!(idx < names.len());
for item in m.items.iter() {
if self.tcx.sess.str_of(item.ident) == names[idx] {
@ -117,7 +117,7 @@ impl Env {
fn search(self: &Env,
it: @ast::Item,
idx: uint,
names: &[~str]) -> Option<ast::node_id> {
names: &[StrBuf]) -> Option<ast::node_id> {
if idx == names.len() {
return Some(it.id);
}
@ -174,7 +174,7 @@ impl Env {
self.assert_subtype(b, a);
}
pub fn ty_to_str(&self, a: ty::t) -> ~str {
pub fn ty_to_str(&self, a: ty::t) -> StrBuf {
ty_to_str(self.tcx, a)
}

View File

@ -20,75 +20,80 @@ use util::ppaux::{mt_to_str, ty_to_str, trait_ref_to_str};
use syntax::ast;
pub trait InferStr {
fn inf_str(&self, cx: &InferCtxt) -> ~str;
fn inf_str(&self, cx: &InferCtxt) -> StrBuf;
}
impl InferStr for ty::t {
fn inf_str(&self, cx: &InferCtxt) -> ~str {
fn inf_str(&self, cx: &InferCtxt) -> StrBuf {
ty_to_str(cx.tcx, *self)
}
}
impl InferStr for FnSig {
fn inf_str(&self, cx: &InferCtxt) -> ~str {
format!("({}) -> {}",
self.inputs.iter().map(|a| a.inf_str(cx)).collect::<Vec<~str>>().connect(", "),
self.output.inf_str(cx))
fn inf_str(&self, cx: &InferCtxt) -> StrBuf {
format_strbuf!("({}) -> {}",
self.inputs
.iter()
.map(|a| a.inf_str(cx))
.collect::<Vec<StrBuf>>().connect(", "),
self.output.inf_str(cx))
}
}
impl InferStr for ty::mt {
fn inf_str(&self, cx: &InferCtxt) -> ~str {
fn inf_str(&self, cx: &InferCtxt) -> StrBuf {
mt_to_str(cx.tcx, self)
}
}
impl InferStr for ty::Region {
fn inf_str(&self, _cx: &InferCtxt) -> ~str {
format!("{:?}", *self)
fn inf_str(&self, _cx: &InferCtxt) -> StrBuf {
format_strbuf!("{:?}", *self)
}
}
impl<V:InferStr> InferStr for Bound<V> {
fn inf_str(&self, cx: &InferCtxt) -> ~str {
fn inf_str(&self, cx: &InferCtxt) -> StrBuf {
match *self {
Some(ref v) => v.inf_str(cx),
None => "none".to_owned()
Some(ref v) => v.inf_str(cx),
None => "none".to_strbuf()
}
}
}
impl<T:InferStr> InferStr for Bounds<T> {
fn inf_str(&self, cx: &InferCtxt) -> ~str {
format!("\\{{} <: {}\\}",
self.lb.inf_str(cx),
self.ub.inf_str(cx))
fn inf_str(&self, cx: &InferCtxt) -> StrBuf {
format_strbuf!("\\{{} <: {}\\}",
self.lb.inf_str(cx),
self.ub.inf_str(cx))
}
}
impl<V:Vid + ToStr,T:InferStr> InferStr for VarValue<V, T> {
fn inf_str(&self, cx: &InferCtxt) -> ~str {
fn inf_str(&self, cx: &InferCtxt) -> StrBuf {
match *self {
Redirect(ref vid) => format!("Redirect({})", vid.to_str()),
Root(ref pt, rk) => format!("Root({}, {})", pt.inf_str(cx), rk)
Redirect(ref vid) => format_strbuf!("Redirect({})", vid.to_str()),
Root(ref pt, rk) => {
format_strbuf!("Root({}, {})", pt.inf_str(cx), rk)
}
}
}
}
impl InferStr for IntVarValue {
fn inf_str(&self, _cx: &InferCtxt) -> ~str {
self.to_str()
fn inf_str(&self, _cx: &InferCtxt) -> StrBuf {
self.to_str().to_strbuf()
}
}
impl InferStr for ast::FloatTy {
fn inf_str(&self, _cx: &InferCtxt) -> ~str {
self.to_str()
fn inf_str(&self, _cx: &InferCtxt) -> StrBuf {
self.to_str().to_strbuf()
}
}
impl InferStr for ty::TraitRef {
fn inf_str(&self, cx: &InferCtxt) -> ~str {
fn inf_str(&self, cx: &InferCtxt) -> StrBuf {
trait_ref_to_str(cx.tcx, self)
}
}

View File

@ -198,18 +198,18 @@ pub enum vtable_origin {
}
impl Repr for vtable_origin {
fn repr(&self, tcx: &ty::ctxt) -> ~str {
fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
match *self {
vtable_static(def_id, ref tys, ref vtable_res) => {
format!("vtable_static({:?}:{}, {}, {})",
def_id,
ty::item_path_str(tcx, def_id),
tys.repr(tcx),
vtable_res.repr(tcx))
format_strbuf!("vtable_static({:?}:{}, {}, {})",
def_id,
ty::item_path_str(tcx, def_id),
tys.repr(tcx),
vtable_res.repr(tcx))
}
vtable_param(x, y) => {
format!("vtable_param({:?}, {:?})", x, y)
format_strbuf!("vtable_param({:?}, {:?})", x, y)
}
}
}
@ -230,10 +230,10 @@ pub struct impl_res {
}
impl Repr for impl_res {
fn repr(&self, tcx: &ty::ctxt) -> ~str {
format!("impl_res \\{trait_vtables={}, self_vtables={}\\}",
self.trait_vtables.repr(tcx),
self.self_vtables.repr(tcx))
fn repr(&self, tcx: &ty::ctxt) -> StrBuf {
format_strbuf!("impl_res \\{trait_vtables={}, self_vtables={}\\}",
self.trait_vtables.repr(tcx),
self.self_vtables.repr(tcx))
}
}
@ -293,7 +293,7 @@ pub fn require_same_types(tcx: &ty::ctxt,
span: Span,
t1: ty::t,
t2: ty::t,
msg: || -> ~str)
msg: || -> StrBuf)
-> bool {
let result = match maybe_infcx {
None => {
@ -308,8 +308,10 @@ pub fn require_same_types(tcx: &ty::ctxt,
match result {
Ok(_) => true,
Err(ref terr) => {
tcx.sess.span_err(span, msg() + ": " +
ty::type_err_to_str(tcx, terr));
tcx.sess.span_err(span,
format!("{}: {}",
msg(),
ty::type_err_to_str(tcx, terr)));
ty::note_and_explain_type_err(tcx, terr);
false
}
@ -350,8 +352,10 @@ fn check_main_fn_ty(ccx: &CrateCtxt,
});
require_same_types(tcx, None, false, main_span, main_t, se_ty,
|| format!("main function expects type: `{}`",
ppaux::ty_to_str(ccx.tcx, se_ty)));
|| {
format_strbuf!("main function expects type: `{}`",
ppaux::ty_to_str(ccx.tcx, se_ty))
});
}
_ => {
tcx.sess.span_bug(main_span,
@ -399,7 +403,10 @@ fn check_start_fn_ty(ccx: &CrateCtxt,
});
require_same_types(tcx, None, false, start_span, start_t, se_ty,
|| format!("start function expects type: `{}`", ppaux::ty_to_str(ccx.tcx, se_ty)));
|| {
format_strbuf!("start function expects type: `{}`",
ppaux::ty_to_str(ccx.tcx, se_ty))
});
}
_ => {

View File

@ -1001,7 +1001,7 @@ impl<'a> SolveContext<'a> {
// attribute and report an error with various results if found.
if ty::has_attr(tcx, item_def_id, "rustc_variance") {
let found = item_variances.repr(tcx);
tcx.sess.span_err(tcx.map.span(item_id), found);
tcx.sess.span_err(tcx.map.span(item_id), found.as_slice());
}
let newly_added = tcx.item_variance_map.borrow_mut()

File diff suppressed because it is too large Load Diff

View File

@ -257,9 +257,9 @@ pub trait Digest {
}
/// Convenience function that retrieves the result of a digest as a
/// ~str in hexadecimal format.
fn result_str(&mut self) -> ~str {
self.result_bytes().as_slice().to_hex()
/// StrBuf in hexadecimal format.
fn result_str(&mut self) -> StrBuf {
self.result_bytes().as_slice().to_hex().to_strbuf()
}
}
@ -543,15 +543,15 @@ mod tests {
}
struct Test {
input: ~str,
output_str: ~str,
input: StrBuf,
output_str: StrBuf,
}
fn test_hash<D: Digest>(sh: &mut D, tests: &[Test]) {
// Test that it works when accepting the message all at once
for t in tests.iter() {
sh.reset();
sh.input_str(t.input);
sh.input_str(t.input.as_slice());
let out_str = sh.result_str();
assert!(out_str == t.output_str);
}
@ -563,7 +563,9 @@ mod tests {
let mut left = len;
while left > 0u {
let take = (left + 1u) / 2u;
sh.input_str(t.input.slice(len - left, take + len - left));
sh.input_str(t.input
.as_slice()
.slice(len - left, take + len - left));
left = left - take;
}
let out_str = sh.result_str();
@ -576,19 +578,21 @@ mod tests {
// Examples from wikipedia
let wikipedia_tests = vec!(
Test {
input: "".to_owned(),
input: "".to_strbuf(),
output_str: "e3b0c44298fc1c149afb\
f4c8996fb92427ae41e4649b934ca495991b7852b855".to_owned()
f4c8996fb92427ae41e4649b934ca495991b7852b855".to_strbuf()
},
Test {
input: "The quick brown fox jumps over the lazy dog".to_owned(),
input: "The quick brown fox jumps over the lazy \
dog".to_strbuf(),
output_str: "d7a8fbb307d7809469ca\
9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592".to_owned()
9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592".to_strbuf()
},
Test {
input: "The quick brown fox jumps over the lazy dog.".to_owned(),
input: "The quick brown fox jumps over the lazy \
dog.".to_strbuf(),
output_str: "ef537f25c895bfa78252\
6529a9b63d97aa631564d5d789c2b765448c8635fb6c".to_owned()
6529a9b63d97aa631564d5d789c2b765448c8635fb6c".to_strbuf()
});
let tests = wikipedia_tests;

View File

@ -90,7 +90,7 @@ impl<'a> Clean<Crate> for visit_ast::RustdocVisitor<'a> {
self.attrs.as_slice(),
cx.sess());
let id = link::find_crate_id(self.attrs.as_slice(),
t_outputs.out_filestem);
t_outputs.out_filestem.as_slice());
Crate {
name: id.name.to_owned(),
module: Some(self.module.clean()),

View File

@ -100,7 +100,7 @@ pub fn run(input: &str,
fn runtest(test: &str, cratename: &str, libs: HashSet<Path>, should_fail: bool,
no_run: bool, loose_feature_gating: bool) {
let test = maketest(test, cratename, loose_feature_gating);
let input = driver::StrInput(test);
let input = driver::StrInput(test.to_strbuf());
let sessopts = config::Options {
maybe_sysroot: Some(os::self_exe_path().unwrap().dir_path()),

View File

@ -504,6 +504,7 @@ use slice::{Vector, ImmutableVector};
use slice;
use str::{StrSlice, StrAllocating, UTF16Item, ScalarValue, LoneSurrogate};
use str;
use strbuf::StrBuf;
pub use self::num::radix;
pub use self::num::Radix;
@ -788,6 +789,11 @@ pub fn format(args: &Arguments) -> ~str {
unsafe { format_unsafe(args.fmt, args.args) }
}
/// Temporary transitionary thing.
pub fn format_strbuf(args: &Arguments) -> StrBuf {
unsafe { format_unsafe_strbuf(args.fmt, args.args) }
}
/// The unsafe version of the formatting function.
///
/// This is currently an unsafe function because the types of all arguments
@ -815,6 +821,14 @@ pub unsafe fn format_unsafe(fmt: &[rt::Piece], args: &[Argument]) -> ~str {
return str::from_utf8(output.unwrap().as_slice()).unwrap().to_owned();
}
/// Temporary transitionary thing.
pub unsafe fn format_unsafe_strbuf(fmt: &[rt::Piece], args: &[Argument])
-> StrBuf {
let mut output = MemWriter::new();
write_unsafe(&mut output as &mut io::Writer, fmt, args).unwrap();
return str::from_utf8(output.unwrap().as_slice()).unwrap().into_strbuf();
}
impl<'a> Formatter<'a> {
// First up is the collection of functions used to execute a format string

View File

@ -229,6 +229,14 @@ macro_rules! format(
)
)
/// Temporary transitionary thing.
#[macro_export]
macro_rules! format_strbuf(
($($arg:tt)*) => (
format_args!(::std::fmt::format_strbuf, $($arg)*)
)
)
/// Use the `format!` syntax to write data into a buffer of type `&mut Writer`.
/// See `std::fmt` for more information.
///