rustc: Remove usage of fmt!

This commit is contained in:
Alex Crichton 2013-09-27 22:38:08 -07:00
parent af3b132285
commit 1b80558be3
100 changed files with 1743 additions and 1739 deletions

View File

@ -131,13 +131,13 @@ pub mod jit {
for cratepath in r.iter() {
let path = cratepath.to_str();
debug!("linking: %s", path);
debug2!("linking: {}", path);
do path.with_c_str |buf_t| {
if !llvm::LLVMRustLoadCrate(manager, buf_t) {
llvm_err(sess, ~"Could not link");
}
debug!("linked: %s", path);
debug2!("linked: {}", path);
}
}
@ -303,7 +303,7 @@ pub mod write {
for pass in sess.opts.custom_passes.iter() {
do pass.with_c_str |s| {
if !llvm::LLVMRustAddPass(mpm, s) {
sess.warn(fmt!("Unknown pass %s, ignoring", *pass));
sess.warn(format!("Unknown pass {}, ignoring", *pass));
}
}
}
@ -381,9 +381,9 @@ pub mod write {
let prog = run::process_output(cc_prog, cc_args);
if prog.status != 0 {
sess.err(fmt!("building with `%s` failed with code %d",
sess.err(format!("building with `{}` failed with code {}",
cc_prog, prog.status));
sess.note(fmt!("%s arguments: %s",
sess.note(format!("{} arguments: {}",
cc_prog, cc_args.connect(" ")));
sess.note(str::from_utf8(prog.error + prog.output));
sess.abort_if_errors();
@ -554,7 +554,7 @@ pub fn build_link_meta(sess: Session,
dep_hashes: ~[@str],
pkg_id: Option<@str>) -> @str {
fn len_and_str(s: &str) -> ~str {
fmt!("%u_%s", s.len(), s)
format!("{}_{}", s.len(), s)
}
fn len_and_str_lit(l: ast::lit) -> ~str {
@ -599,7 +599,7 @@ pub fn build_link_meta(sess: Session,
fn warn_missing(sess: Session, name: &str, default: &str) {
if !*sess.building_library { return; }
sess.warn(fmt!("missing crate link meta `%s`, using `%s` as default",
sess.warn(format!("missing crate link meta `{}`, using `{}` as default",
name, default));
}
@ -612,7 +612,7 @@ pub fn build_link_meta(sess: Session,
// filestem that returned an @str
let name = session::expect(sess,
output.filestem(),
|| fmt!("output file name `%s` doesn't\
|| format!("output file name `{}` doesn't\
appear to have a stem",
output.to_str())).to_managed();
if name.is_empty() {
@ -762,7 +762,7 @@ pub fn mangle(sess: Session, ss: path,
let push = |s: &str| {
let sani = sanitize(s);
n.push_str(fmt!("%u%s", sani.len(), sani));
n.push_str(format!("{}{}", sani.len(), sani));
};
// First, connect each component with <len, name> pairs.
@ -874,7 +874,7 @@ pub fn output_dll_filename(os: session::Os, lm: LinkMeta) -> ~str {
session::OsAndroid => (android::DLL_PREFIX, android::DLL_SUFFIX),
session::OsFreebsd => (freebsd::DLL_PREFIX, freebsd::DLL_SUFFIX),
};
fmt!("%s%s-%s-%s%s", dll_prefix, lm.name, lm.extras_hash, lm.vers, dll_suffix)
format!("{}{}-{}-{}{}", dll_prefix, lm.name, lm.extras_hash, lm.vers, dll_suffix)
}
pub fn get_cc_prog(sess: Session) -> ~str {
@ -890,7 +890,7 @@ pub fn get_cc_prog(sess: Session) -> ~str {
session::OsAndroid =>
match &sess.opts.android_cross_path {
&Some(ref path) => {
fmt!("%s/bin/arm-linux-androideabi-gcc", *path)
format!("{}/bin/arm-linux-androideabi-gcc", *path)
}
&None => {
sess.fatal("need Android NDK path for linking \
@ -915,29 +915,29 @@ pub fn link_binary(sess: Session,
let output = if *sess.building_library {
let long_libname = output_dll_filename(sess.targ_cfg.os, lm);
debug!("link_meta.name: %s", lm.name);
debug!("long_libname: %s", long_libname);
debug!("out_filename: %s", out_filename.to_str());
debug!("dirname(out_filename): %s", out_filename.dir_path().to_str());
debug2!("link_meta.name: {}", lm.name);
debug2!("long_libname: {}", long_libname);
debug2!("out_filename: {}", out_filename.to_str());
debug2!("dirname(out_filename): {}", out_filename.dir_path().to_str());
out_filename.dir_path().push(long_libname)
} else {
out_filename.clone()
};
debug!("output: %s", output.to_str());
debug2!("output: {}", output.to_str());
let cc_args = link_args(sess, obj_filename, out_filename, lm);
debug!("%s link args: %s", cc_prog, cc_args.connect(" "));
debug2!("{} link args: {}", cc_prog, cc_args.connect(" "));
if (sess.opts.debugging_opts & session::print_link_args) != 0 {
io::println(fmt!("%s link args: %s", cc_prog, cc_args.connect(" ")));
io::println(format!("{} link args: {}", cc_prog, cc_args.connect(" ")));
}
// We run 'cc' here
let prog = run::process_output(cc_prog, cc_args);
if 0 != prog.status {
sess.err(fmt!("linking with `%s` failed with code %d",
sess.err(format!("linking with `{}` failed with code {}",
cc_prog, prog.status));
sess.note(fmt!("%s arguments: %s",
sess.note(format!("{} arguments: {}",
cc_prog, cc_args.connect(" ")));
sess.note(str::from_utf8(prog.error + prog.output));
sess.abort_if_errors();
@ -951,7 +951,7 @@ pub fn link_binary(sess: Session,
// Remove the temporary object file if we aren't saving temps
if !sess.opts.save_temps {
if ! os::remove_file(obj_filename) {
sess.warn(fmt!("failed to delete object file `%s`",
sess.warn(format!("failed to delete object file `{}`",
obj_filename.to_str()));
}
}

View File

@ -29,7 +29,7 @@ pub fn get_rpath_flags(sess: session::Session, out_filename: &Path)
return ~[];
}
debug!("preparing the RPATH!");
debug2!("preparing the RPATH!");
let sysroot = sess.filesearch.sysroot();
let output = out_filename;
@ -49,7 +49,7 @@ fn get_sysroot_absolute_rt_lib(sess: session::Session) -> Path {
}
pub fn rpaths_to_flags(rpaths: &[Path]) -> ~[~str] {
rpaths.iter().map(|rpath| fmt!("-Wl,-rpath,%s",rpath.to_str())).collect()
rpaths.iter().map(|rpath| format!("-Wl,-rpath,{}",rpath.to_str())).collect()
}
fn get_rpaths(os: session::Os,
@ -57,13 +57,13 @@ fn get_rpaths(os: session::Os,
output: &Path,
libs: &[Path],
target_triple: &str) -> ~[Path] {
debug!("sysroot: %s", sysroot.to_str());
debug!("output: %s", output.to_str());
debug!("libs:");
debug2!("sysroot: {}", sysroot.to_str());
debug2!("output: {}", output.to_str());
debug2!("libs:");
for libpath in libs.iter() {
debug!(" %s", libpath.to_str());
debug2!(" {}", libpath.to_str());
}
debug!("target_triple: %s", target_triple);
debug2!("target_triple: {}", target_triple);
// Use relative paths to the libraries. Binaries can be moved
// as long as they maintain the relative relationship to the
@ -78,9 +78,9 @@ fn get_rpaths(os: session::Os,
let fallback_rpaths = ~[get_install_prefix_rpath(target_triple)];
fn log_rpaths(desc: &str, rpaths: &[Path]) {
debug!("%s rpaths:", desc);
debug2!("{} rpaths:", desc);
for rpath in rpaths.iter() {
debug!(" %s", rpath.to_str());
debug2!(" {}", rpath.to_str());
}
}
@ -172,7 +172,7 @@ mod test {
let res = get_install_prefix_rpath("triple");
let d = Path(env!("CFG_PREFIX"))
.push_rel(&Path("lib/rustc/triple/lib"));
debug!("test_prefix_path: %s vs. %s",
debug2!("test_prefix_path: {} vs. {}",
res.to_str(),
d.to_str());
assert!(res.to_str().ends_with(d.to_str()));
@ -233,7 +233,7 @@ mod test {
#[test]
fn test_get_absolute_rpath() {
let res = get_absolute_rpath(&Path("lib/libstd.so"));
debug!("test_get_absolute_rpath: %s vs. %s",
debug2!("test_get_absolute_rpath: {} vs. {}",
res.to_str(),
os::make_absolute(&Path("lib")).to_str());

View File

@ -386,7 +386,7 @@ pub fn phase_6_link_output(sess: Session,
pub fn stop_after_phase_3(sess: Session) -> bool {
if sess.opts.no_trans {
debug!("invoked with --no-trans, returning early from compile_input");
debug2!("invoked with --no-trans, returning early from compile_input");
return true;
}
return false;
@ -394,7 +394,7 @@ pub fn stop_after_phase_3(sess: Session) -> bool {
pub fn stop_after_phase_1(sess: Session) -> bool {
if sess.opts.parse_only {
debug!("invoked with --parse-only, returning early from compile_input");
debug2!("invoked with --parse-only, returning early from compile_input");
return true;
}
return false;
@ -402,17 +402,17 @@ pub fn stop_after_phase_1(sess: Session) -> bool {
pub fn stop_after_phase_5(sess: Session) -> bool {
if sess.opts.output_type != link::output_type_exe {
debug!("not building executable, returning early from compile_input");
debug2!("not building executable, returning early from compile_input");
return true;
}
if sess.opts.is_static && *sess.building_library {
debug!("building static library, returning early from compile_input");
debug2!("building static library, returning early from compile_input");
return true;
}
if sess.opts.jit {
debug!("running JIT, returning early from compile_input");
debug2!("running JIT, returning early from compile_input");
return true;
}
return false;
@ -670,7 +670,7 @@ pub fn build_session_options(binary: @str,
let lint_name = lint_name.replace("-", "_");
match lint_dict.find_equiv(&lint_name) {
None => {
early_error(demitter, fmt!("unknown %s flag: %s",
early_error(demitter, format!("unknown {} flag: {}",
level_name, lint_name));
}
Some(lint) => {
@ -690,7 +690,7 @@ pub fn build_session_options(binary: @str,
if name == debug_flag { this_bit = bit; break; }
}
if this_bit == 0u {
early_error(demitter, fmt!("unknown debug flag: %s", *debug_flag))
early_error(demitter, format!("unknown debug flag: {}", *debug_flag))
}
debugging_opts |= this_bit;
}
@ -1033,7 +1033,7 @@ pub fn build_output_filenames(input: &input,
pub fn early_error(emitter: @diagnostic::Emitter, msg: ~str) -> ! {
emitter.emit(None, msg, diagnostic::fatal);
fail!();
fail2!();
}
pub fn list_metadata(sess: Session, path: &Path, out: @io::Writer) {
@ -1058,7 +1058,7 @@ mod test {
let matches =
&match getopts([~"--test"], optgroups()) {
Ok(m) => m,
Err(f) => fail!("test_switch_implies_cfg_test: %s", f.to_err_msg())
Err(f) => fail2!("test_switch_implies_cfg_test: {}", f.to_err_msg())
};
let sessopts = build_session_options(
@"rustc",
@ -1079,7 +1079,8 @@ mod test {
&match getopts([~"--test", ~"--cfg=test"], optgroups()) {
Ok(m) => m,
Err(f) => {
fail!("test_switch_implies_cfg_test_unless_cfg_test: %s", f.to_err_msg());
fail2!("test_switch_implies_cfg_test_unless_cfg_test: {}",
f.to_err_msg());
}
};
let sessopts = build_session_options(

View File

@ -296,7 +296,7 @@ impl Session_ {
// This exists to help with refactoring to eliminate impossible
// cases later on
pub fn impossible_case(&self, sp: Span, msg: &str) -> ! {
self.span_bug(sp, fmt!("Impossible case reached: %s", msg));
self.span_bug(sp, format!("Impossible case reached: {}", msg));
}
pub fn verbose(&self) -> bool { self.debugging_opt(verbose) }
pub fn time_passes(&self) -> bool { self.debugging_opt(time_passes) }

View File

@ -78,7 +78,7 @@ impl fold::ast_fold for TestHarnessGenerator {
fn fold_item(&self, i: @ast::item) -> Option<@ast::item> {
self.cx.path.push(i.ident);
debug!("current path: %s",
debug2!("current path: {}",
ast_util::path_name_i(self.cx.path.clone()));
if is_test_fn(self.cx, i) || is_bench_fn(i) {
@ -91,7 +91,7 @@ impl fold::ast_fold for TestHarnessGenerator {
tests");
}
_ => {
debug!("this is a test function");
debug2!("this is a test function");
let test = Test {
span: i.span,
path: self.cx.path.clone(),
@ -100,7 +100,7 @@ impl fold::ast_fold for TestHarnessGenerator {
should_fail: should_fail(i)
};
self.cx.testfns.push(test);
// debug!("have %u test/bench functions",
// debug2!("have {} test/bench functions",
// cx.testfns.len());
}
}
@ -327,7 +327,7 @@ fn mk_test_module(cx: &TestCtxt) -> @ast::item {
span: dummy_sp(),
};
debug!("Synthetic test module:\n%s\n",
debug2!("Synthetic test module:\n{}\n",
pprust::item_to_str(@item.clone(), cx.sess.intr()));
return @item;
@ -381,7 +381,7 @@ fn is_extra(crate: &ast::Crate) -> bool {
}
fn mk_test_descs(cx: &TestCtxt) -> @ast::Expr {
debug!("building test vector from %u tests", cx.testfns.len());
debug2!("building test vector from {} tests", cx.testfns.len());
let mut descs = ~[];
for test in cx.testfns.iter() {
descs.push(mk_test_desc_and_fn_rec(cx, test));
@ -404,7 +404,7 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> @ast::Expr {
let span = test.span;
let path = test.path.clone();
debug!("encoding %s", ast_util::path_name_i(path));
debug2!("encoding {}", ast_util::path_name_i(path));
let name_lit: ast::lit =
nospan(ast::lit_str(ast_util::path_name_i(path).to_managed()));

View File

@ -2270,7 +2270,7 @@ impl TypeNames {
Metadata => ~"Metadata",
X86_MMX => ~"X86_MMAX",
Integer => {
fmt!("i%d", llvm::LLVMGetIntTypeWidth(ty.to_ref()) as int)
format!("i{}", llvm::LLVMGetIntTypeWidth(ty.to_ref()) as int)
}
Function => {
let out_ty = ty.return_type();
@ -2278,25 +2278,25 @@ impl TypeNames {
let args =
args.map(|&ty| self.type_to_str_depth(ty, depth-1)).connect(", ");
let out_ty = self.type_to_str_depth(out_ty, depth-1);
fmt!("fn(%s) -> %s", args, out_ty)
format!("fn({}) -> {}", args, out_ty)
}
Struct => {
let tys = ty.field_types();
let tys = tys.map(|&ty| self.type_to_str_depth(ty, depth-1)).connect(", ");
fmt!("{%s}", tys)
format!("\\{{}\\}", tys)
}
Array => {
let el_ty = ty.element_type();
let el_ty = self.type_to_str_depth(el_ty, depth-1);
let len = ty.array_length();
fmt!("[%s x %u]", el_ty, len)
format!("[{} x {}]", el_ty, len)
}
Pointer => {
let el_ty = ty.element_type();
let el_ty = self.type_to_str_depth(el_ty, depth-1);
fmt!("*%s", el_ty)
format!("*{}", el_ty)
}
_ => fail!("Unknown Type Kind (%u)", kind as uint)
_ => fail2!("Unknown Type Kind ({})", kind as uint)
}
}
}
@ -2307,7 +2307,7 @@ impl TypeNames {
pub fn types_to_str(&self, tys: &[Type]) -> ~str {
let strs = tys.map(|t| self.type_to_str(*t));
fmt!("[%s]", strs.connect(","))
format!("[{}]", strs.connect(","))
}
pub fn val_to_str(&self, val: ValueRef) -> ~str {

View File

@ -74,11 +74,11 @@ struct cache_entry {
}
fn dump_crates(crate_cache: &[cache_entry]) {
debug!("resolved crates:");
debug2!("resolved crates:");
for entry in crate_cache.iter() {
debug!("cnum: %?", entry.cnum);
debug!("span: %?", entry.span);
debug!("hash: %?", entry.hash);
debug2!("cnum: {:?}", entry.cnum);
debug2!("span: {:?}", entry.span);
debug2!("hash: {:?}", entry.hash);
}
}
@ -97,7 +97,7 @@ fn warn_if_multiple_versions(e: @mut Env,
if matches.len() != 1u {
diag.handler().warn(
fmt!("using multiple versions of crate `%s`", name));
format!("using multiple versions of crate `{}`", name));
for match_ in matches.iter() {
diag.span_note(match_.span, "used here");
let attrs = ~[
@ -154,7 +154,7 @@ fn visit_view_item(e: @mut Env, i: &ast::view_item) {
}
}
};
debug!("resolving extern mod stmt. ident: %?, meta: %?",
debug2!("resolving extern mod stmt. ident: {:?}, meta: {:?}",
ident, meta_items);
let cnum = resolve_crate(e,
ident,
@ -317,7 +317,7 @@ fn resolve_crate(e: @mut Env,
// Go through the crate metadata and load any crates that it references
fn resolve_crate_deps(e: @mut Env, cdata: @~[u8]) -> cstore::cnum_map {
debug!("resolving deps of external crate");
debug2!("resolving deps of external crate");
// The map from crate numbers in the crate we're resolving to local crate
// numbers
let mut cnum_map = HashMap::new();
@ -326,18 +326,18 @@ fn resolve_crate_deps(e: @mut Env, cdata: @~[u8]) -> cstore::cnum_map {
let extrn_cnum = dep.cnum;
let cname_str = token::ident_to_str(&dep.name);
let cmetas = metas_with(dep.vers, @"vers", ~[]);
debug!("resolving dep crate %s ver: %s hash: %s",
debug2!("resolving dep crate {} ver: {} hash: {}",
cname_str, dep.vers, dep.hash);
match existing_match(e,
metas_with_ident(cname_str, cmetas.clone()),
dep.hash) {
Some(local_cnum) => {
debug!("already have it");
debug2!("already have it");
// We've already seen this crate
cnum_map.insert(extrn_cnum, local_cnum);
}
None => {
debug!("need to load it");
debug2!("need to load it");
// This is a new one so we've got to load it
// FIXME (#2404): Need better error reporting than just a bogus
// span.

View File

@ -210,17 +210,17 @@ pub fn get_field_type(tcx: ty::ctxt, class_id: ast::DefId,
let cstore = tcx.cstore;
let cdata = cstore::get_crate_data(cstore, class_id.crate);
let all_items = reader::get_doc(reader::Doc(cdata.data), tag_items);
debug!("Looking up %?", class_id);
debug2!("Looking up {:?}", class_id);
let class_doc = expect(tcx.diag,
decoder::maybe_find_item(class_id.node, all_items),
|| fmt!("get_field_type: class ID %? not found",
|| format!("get_field_type: class ID {:?} not found",
class_id) );
debug!("looking up %? : %?", def, class_doc);
debug2!("looking up {:?} : {:?}", def, class_doc);
let the_field = expect(tcx.diag,
decoder::maybe_find_item(def.node, class_doc),
|| fmt!("get_field_type: in class %?, field ID %? not found",
|| format!("get_field_type: in class {:?}, field ID {:?} not found",
class_id, def) );
debug!("got field data %?", the_field);
debug2!("got field data {:?}", the_field);
let ty = decoder::item_type(def, the_field, tcx, cdata);
ty::ty_param_bounds_and_ty {
generics: ty::Generics {type_param_defs: @~[],

View File

@ -152,7 +152,7 @@ pub fn get_dep_hashes(cstore: &CStore) -> ~[@str] {
let cdata = cstore::get_crate_data(cstore, cnum);
let hash = decoder::get_crate_hash(cdata.data);
let vers = decoder::get_crate_vers(cdata.data);
debug!("Add hash[%s]: %s %s", cdata.name, vers, hash);
debug2!("Add hash[{}]: {} {}", cdata.name, vers, hash);
result.push(crate_hash {
name: cdata.name,
vers: vers,
@ -164,9 +164,9 @@ pub fn get_dep_hashes(cstore: &CStore) -> ~[@str] {
(a.name, a.vers, a.hash) <= (b.name, b.vers, b.hash)
};
debug!("sorted:");
debug2!("sorted:");
for x in sorted.iter() {
debug!(" hash[%s]: %s", x.name, x.hash);
debug2!(" hash[{}]: {}", x.name, x.hash);
}
sorted.map(|ch| ch.hash)

View File

@ -89,7 +89,7 @@ pub fn maybe_find_item(item_id: int, items: ebml::Doc) -> Option<ebml::Doc> {
fn find_item(item_id: int, items: ebml::Doc) -> ebml::Doc {
match maybe_find_item(item_id, items) {
None => fail!("lookup_item: id not found: %d", item_id),
None => fail2!("lookup_item: id not found: {}", item_id),
Some(d) => d
}
}
@ -148,7 +148,7 @@ fn item_family(item: ebml::Doc) -> Family {
'g' => PublicField,
'j' => PrivateField,
'N' => InheritedField,
c => fail!("unexpected family char: %c", c)
c => fail2!("unexpected family char: {}", c)
}
}
@ -160,7 +160,7 @@ fn item_visibility(item: ebml::Doc) -> ast::visibility {
'y' => ast::public,
'n' => ast::private,
'i' => ast::inherited,
_ => fail!("unknown visibility character")
_ => fail2!("unknown visibility character")
}
}
}
@ -494,8 +494,8 @@ pub enum DefLike {
pub fn def_like_to_def(def_like: DefLike) -> ast::Def {
match def_like {
DlDef(def) => return def,
DlImpl(*) => fail!("found impl in def_like_to_def"),
DlField => fail!("found field in def_like_to_def")
DlImpl(*) => fail2!("found impl in def_like_to_def"),
DlField => fail2!("found field in def_like_to_def")
}
}
@ -550,14 +550,14 @@ impl<'self> EachItemContext<'self> {
let def_like = item_to_def_like(doc, def_id, self.cdata.cnum);
match def_like {
DlDef(def) => {
debug!("(iterating over each item of a module) processing \
`%s` (def %?)",
debug2!("(iterating over each item of a module) processing \
`{}` (def {:?})",
*self.path_builder,
def);
}
_ => {
debug!("(iterating over each item of a module) processing \
`%s` (%d:%d)",
debug2!("(iterating over each item of a module) processing \
`{}` ({}:{})",
*self.path_builder,
def_id.crate,
def_id.node);
@ -631,8 +631,8 @@ impl<'self> EachItemContext<'self> {
reader::get_doc(root, tag_items)
};
debug!("(iterating over each item of a module) looking up item \
%d:%d in `%s`, crate %d",
debug2!("(iterating over each item of a module) looking up item \
{}:{} in `{}`, crate {}",
child_def_id.crate,
child_def_id.node,
*self.path_builder,
@ -644,8 +644,8 @@ impl<'self> EachItemContext<'self> {
Some(child_item_doc) => {
// Push the name.
let child_name = item_name(self.intr, child_item_doc);
debug!("(iterating over each item of a module) pushing \
name `%s` onto `%s`",
debug2!("(iterating over each item of a module) pushing \
name `{}` onto `{}`",
token::ident_to_str(&child_name),
*self.path_builder);
let old_len =
@ -682,9 +682,9 @@ impl<'self> EachItemContext<'self> {
let name = name_doc.as_str_slice();
// Push the name.
debug!("(iterating over each item of a module) pushing \
reexported name `%s` onto `%s` (crate %d, orig %d, \
in crate %d)",
debug2!("(iterating over each item of a module) pushing \
reexported name `{}` onto `{}` (crate {}, orig {}, \
in crate {})",
name,
*self.path_builder,
def_id.crate,
@ -899,7 +899,7 @@ pub fn maybe_get_item_ast(cdata: Cmd, tcx: ty::ctxt,
id: ast::NodeId,
decode_inlined_item: decode_inlined_item)
-> csearch::found_ast {
debug!("Looking up item: %d", id);
debug2!("Looking up item: {}", id);
let item_doc = lookup_item(id, cdata.data);
let path = {
let item_path = item_path(item_doc);
@ -964,7 +964,7 @@ fn get_explicit_self(item: ebml::Doc) -> ast::explicit_self_ {
match ch as char {
'i' => ast::MutImmutable,
'm' => ast::MutMutable,
_ => fail!("unknown mutability character: `%c`", ch as char),
_ => fail2!("unknown mutability character: `{}`", ch as char),
}
}
@ -982,7 +982,7 @@ fn get_explicit_self(item: ebml::Doc) -> ast::explicit_self_ {
return ast::sty_region(None, get_mutability(string[1]));
}
_ => {
fail!("unknown self type code: `%c`", explicit_self_kind as char);
fail2!("unknown self type code: `{}`", explicit_self_kind as char);
}
}
}
@ -1163,7 +1163,7 @@ pub fn get_static_methods_if_impl(intr: @ident_interner,
match item_family(impl_method_doc) {
StaticMethod => purity = ast::impure_fn,
UnsafeStaticMethod => purity = ast::unsafe_fn,
_ => fail!()
_ => fail2!()
}
static_impl_methods.push(StaticMethodInfo {
@ -1199,7 +1199,7 @@ fn struct_field_family_to_visibility(family: Family) -> ast::visibility {
PublicField => ast::public,
PrivateField => ast::private,
InheritedField => ast::inherited,
_ => fail!()
_ => fail2!()
}
}
@ -1265,7 +1265,7 @@ fn describe_def(items: ebml::Doc, id: ast::DefId) -> ~str {
if id.crate != ast::LOCAL_CRATE { return ~"external"; }
let it = match maybe_find_item(id.node, items) {
Some(it) => it,
None => fail!("describe_def: item not found %?", id)
None => fail2!("describe_def: item not found {:?}", id)
};
return item_family_to_str(item_family(it));
}
@ -1355,17 +1355,17 @@ fn list_meta_items(intr: @ident_interner,
out: @io::Writer) {
let r = get_meta_items(meta_items);
for mi in r.iter() {
out.write_str(fmt!("%s\n", pprust::meta_item_to_str(*mi, intr)));
out.write_str(format!("{}\n", pprust::meta_item_to_str(*mi, intr)));
}
}
fn list_crate_attributes(intr: @ident_interner, md: ebml::Doc, hash: &str,
out: @io::Writer) {
out.write_str(fmt!("=Crate Attributes (%s)=\n", hash));
out.write_str(format!("=Crate Attributes ({})=\n", hash));
let r = get_attributes(md);
for attr in r.iter() {
out.write_str(fmt!("%s\n", pprust::attribute_to_str(attr, intr)));
out.write_str(format!("{}\n", pprust::attribute_to_str(attr, intr)));
}
out.write_str("\n\n");
@ -1409,7 +1409,7 @@ fn list_crate_deps(data: @~[u8], out: @io::Writer) {
let r = get_crate_deps(data);
for dep in r.iter() {
out.write_str(
fmt!("%d %s-%s-%s\n",
format!("{} {}-{}-{}\n",
dep.cnum, token::ident_to_str(&dep.name), dep.hash, dep.vers));
}
@ -1452,7 +1452,7 @@ pub fn translate_def_id(cdata: Cmd, did: ast::DefId) -> ast::DefId {
match cdata.cnum_map.find(&did.crate) {
option::Some(&n) => ast::DefId { crate: n, node: did.node },
option::None => fail!("didn't find a crate in the cnum_map")
option::None => fail2!("didn't find a crate in the cnum_map")
}
}

View File

@ -185,7 +185,7 @@ fn encode_family(ebml_w: &mut writer::Encoder, c: char) {
}
pub fn def_to_str(did: DefId) -> ~str {
fmt!("%d:%d", did.crate, did.node)
format!("{}:{}", did.crate, did.node)
}
fn encode_ty_type_param_defs(ebml_w: &mut writer::Encoder,
@ -284,12 +284,12 @@ fn encode_symbol(ecx: &EncodeContext,
ebml_w.start_tag(tag_items_data_item_symbol);
match ecx.item_symbols.find(&id) {
Some(x) => {
debug!("encode_symbol(id=%?, str=%s)", id, *x);
debug2!("encode_symbol(id={:?}, str={})", id, *x);
ebml_w.writer.write(x.as_bytes());
}
None => {
ecx.diag.handler().bug(
fmt!("encode_symbol: id not found %d", id));
format!("encode_symbol: id not found {}", id));
}
}
ebml_w.end_tag();
@ -339,7 +339,7 @@ fn encode_enum_variant_info(ecx: &EncodeContext,
path: &[ast_map::path_elt],
index: @mut ~[entry<i64>],
generics: &ast::Generics) {
debug!("encode_enum_variant_info(id=%?)", id);
debug2!("encode_enum_variant_info(id={:?})", id);
let mut disr_val = 0;
let mut i = 0;
@ -425,14 +425,14 @@ fn encode_reexported_static_method(ecx: &EncodeContext,
exp: &middle::resolve::Export2,
method_def_id: DefId,
method_ident: Ident) {
debug!("(encode reexported static method) %s::%s",
debug2!("(encode reexported static method) {}::{}",
exp.name, ecx.tcx.sess.str_of(method_ident));
ebml_w.start_tag(tag_items_data_item_reexport);
ebml_w.start_tag(tag_items_data_item_reexport_def_id);
ebml_w.wr_str(def_to_str(method_def_id));
ebml_w.end_tag();
ebml_w.start_tag(tag_items_data_item_reexport_name);
ebml_w.wr_str(fmt!("%s::%s", exp.name, ecx.tcx.sess.str_of(method_ident)));
ebml_w.wr_str(format!("{}::{}", exp.name, ecx.tcx.sess.str_of(method_ident)));
ebml_w.end_tag();
ebml_w.end_tag();
}
@ -498,14 +498,14 @@ fn encode_reexported_static_methods(ecx: &EncodeContext,
if mod_path != *path || exp.name != original_name {
if !encode_reexported_static_base_methods(ecx, ebml_w, exp) {
if encode_reexported_static_trait_methods(ecx, ebml_w, exp) {
debug!(fmt!("(encode reexported static methods) %s \
[trait]",
original_name));
debug2!("(encode reexported static methods) {} \
[trait]",
original_name);
}
}
else {
debug!(fmt!("(encode reexported static methods) %s [base]",
original_name));
debug2!("(encode reexported static methods) {} [base]",
original_name);
}
}
}
@ -552,13 +552,13 @@ fn encode_reexports(ecx: &EncodeContext,
ebml_w: &mut writer::Encoder,
id: NodeId,
path: &[ast_map::path_elt]) {
debug!("(encoding info for module) encoding reexports for %d", id);
debug2!("(encoding info for module) encoding reexports for {}", id);
match ecx.reexports2.find(&id) {
Some(ref exports) => {
debug!("(encoding info for module) found reexports for %d", id);
debug2!("(encoding info for module) found reexports for {}", id);
for exp in exports.iter() {
debug!("(encoding info for module) reexport '%s' (%d/%d) for \
%d",
debug2!("(encoding info for module) reexport '{}' ({}/{}) for \
{}",
exp.name,
exp.def_id.crate,
exp.def_id.node,
@ -575,7 +575,7 @@ fn encode_reexports(ecx: &EncodeContext,
}
}
None => {
debug!("(encoding info for module) found no reexports for %d",
debug2!("(encoding info for module) found no reexports for {}",
id);
}
}
@ -592,7 +592,7 @@ fn encode_info_for_mod(ecx: &EncodeContext,
encode_def_id(ebml_w, local_def(id));
encode_family(ebml_w, 'm');
encode_name(ecx, ebml_w, name);
debug!("(encoding info for module) encoding info for module ID %d", id);
debug2!("(encoding info for module) encoding info for module ID {}", id);
// Encode info about all the module children.
for item in md.items.iter() {
@ -610,8 +610,8 @@ fn encode_info_for_mod(ecx: &EncodeContext,
match item.node {
item_impl(*) => {
let (ident, did) = (item.ident, item.id);
debug!("(encoding info for module) ... encoding impl %s \
(%?/%?)",
debug2!("(encoding info for module) ... encoding impl {} \
({:?}/{:?})",
ecx.tcx.sess.str_of(ident),
did,
ast_map::node_id_to_str(ecx.tcx.items, did, token::get_ident_interner()));
@ -628,7 +628,7 @@ fn encode_info_for_mod(ecx: &EncodeContext,
// Encode the reexports of this module, if this module is public.
if vis == public {
debug!("(encoding info for module) encoding reexports for %d", id);
debug2!("(encoding info for module) encoding reexports for {}", id);
encode_reexports(ecx, ebml_w, id, path);
}
@ -730,7 +730,7 @@ fn encode_info_for_struct(ecx: &EncodeContext,
index.push(entry {val: id as i64, pos: ebml_w.writer.tell()});
global_index.push(entry {val: id as i64, pos: ebml_w.writer.tell()});
ebml_w.start_tag(tag_items_data_item);
debug!("encode_info_for_struct: doing %s %d",
debug2!("encode_info_for_struct: doing {} {}",
tcx.sess.str_of(nm), id);
encode_struct_field_family(ebml_w, vis);
encode_name(ecx, ebml_w, nm);
@ -794,7 +794,7 @@ fn encode_info_for_method(ecx: &EncodeContext,
parent_id: NodeId,
ast_method_opt: Option<@method>) {
debug!("encode_info_for_method: %? %s", m.def_id,
debug2!("encode_info_for_method: {:?} {}", m.def_id,
ecx.tcx.sess.str_of(m.ident));
ebml_w.start_tag(tag_items_data_item);
@ -834,7 +834,7 @@ fn purity_static_method_family(p: purity) -> char {
match p {
unsafe_fn => 'U',
impure_fn => 'F',
_ => fail!("extern fn can't be static")
_ => fail2!("extern fn can't be static")
}
}
@ -893,7 +893,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
}
let add_to_index: &fn() = || add_to_index_(item, ebml_w, index);
debug!("encoding info for item at %s",
debug2!("encoding info for item at {}",
ecx.tcx.sess.codemap.span_to_str(item.span));
let def_id = local_def(item.id);
@ -1220,7 +1220,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
// Encode inherent implementations for this trait.
encode_inherent_implementations(ecx, ebml_w, def_id);
}
item_mac(*) => fail!("item macros unimplemented")
item_mac(*) => fail2!("item macros unimplemented")
}
}
@ -1279,7 +1279,7 @@ fn my_visit_item(i:@item, items: ast_map::map, ebml_w:&writer::Encoder,
};
encode_info_for_item(ecx, &mut ebml_w, i, index, *pt, vis);
}
_ => fail!("bad item")
_ => fail2!("bad item")
}
}
@ -1287,7 +1287,7 @@ fn my_visit_foreign_item(ni:@foreign_item, items: ast_map::map, ebml_w:&writer::
ecx_ptr:*int, index: @mut ~[entry<i64>]) {
match items.get_copy(&ni.id) {
ast_map::node_foreign_item(_, abi, _, pt) => {
debug!("writing foreign item %s::%s",
debug2!("writing foreign item {}::{}",
ast_map::path_to_str(
*pt,
token::get_ident_interner()),
@ -1304,7 +1304,7 @@ fn my_visit_foreign_item(ni:@foreign_item, items: ast_map::map, ebml_w:&writer::
abi);
}
// case for separate item and foreign-item tables
_ => fail!("bad foreign item")
_ => fail2!("bad foreign item")
}
}

View File

@ -53,7 +53,7 @@ pub fn mk_filesearch(maybe_sysroot: &Option<@Path>,
let mut visited_dirs = HashSet::new();
let mut found = false;
debug!("filesearch: searching additional lib search paths [%?]",
debug2!("filesearch: searching additional lib search paths [{:?}]",
self.addl_lib_search_paths.len());
for path in self.addl_lib_search_paths.iter() {
match f(path) {
@ -63,7 +63,7 @@ pub fn mk_filesearch(maybe_sysroot: &Option<@Path>,
visited_dirs.insert(path.to_str());
}
debug!("filesearch: searching target lib path");
debug2!("filesearch: searching target lib path");
let tlib_path = make_target_lib_path(self.sysroot,
self.target_triple);
if !visited_dirs.contains(&tlib_path.to_str()) {
@ -78,7 +78,7 @@ pub fn mk_filesearch(maybe_sysroot: &Option<@Path>,
let rustpath = rust_path();
for path in rustpath.iter() {
let tlib_path = make_rustpkg_target_lib_path(path, self.target_triple);
debug!("is %s in visited_dirs? %?", tlib_path.to_str(),
debug2!("is {} in visited_dirs? {:?}", tlib_path.to_str(),
visited_dirs.contains(&tlib_path.to_str()));
if !visited_dirs.contains(&tlib_path.to_str()) {
@ -104,7 +104,7 @@ pub fn mk_filesearch(maybe_sysroot: &Option<@Path>,
}
let sysroot = get_sysroot(maybe_sysroot);
debug!("using sysroot = %s", sysroot.to_str());
debug2!("using sysroot = {}", sysroot.to_str());
@FileSearchImpl {
sysroot: sysroot,
addl_lib_search_paths: addl_lib_search_paths,
@ -114,19 +114,19 @@ pub fn mk_filesearch(maybe_sysroot: &Option<@Path>,
pub fn search(filesearch: @FileSearch, pick: pick) {
do filesearch.for_each_lib_search_path() |lib_search_path| {
debug!("searching %s", lib_search_path.to_str());
debug2!("searching {}", lib_search_path.to_str());
let r = os::list_dir_path(lib_search_path);
let mut rslt = FileDoesntMatch;
for path in r.iter() {
debug!("testing %s", path.to_str());
debug2!("testing {}", path.to_str());
let maybe_picked = pick(path);
match maybe_picked {
FileMatches => {
debug!("picked %s", path.to_str());
debug2!("picked {}", path.to_str());
rslt = FileMatches;
}
FileDoesntMatch => {
debug!("rejected %s", path.to_str());
debug2!("rejected {}", path.to_str());
}
}
}
@ -153,7 +153,7 @@ fn make_rustpkg_target_lib_path(dir: &Path,
pub fn get_or_default_sysroot() -> Path {
match os::self_exe_path() {
option::Some(ref p) => (*p).pop(),
option::None => fail!("can't determine value for sysroot")
option::None => fail2!("can't determine value for sysroot")
}
}

View File

@ -59,7 +59,7 @@ pub fn load_library_crate(cx: &Context) -> (~str, @~[u8]) {
Some(t) => t,
None => {
cx.diag.span_fatal(cx.span,
fmt!("can't find crate for `%s`",
format!("can't find crate for `{}`",
cx.ident));
}
}
@ -90,7 +90,7 @@ fn find_library_crate_aux(
) -> Option<(~str, @~[u8])> {
let crate_name = crate_name_from_metas(cx.metas);
// want: crate_name.dir_part() + prefix + crate_name.file_part + "-"
let prefix = fmt!("%s%s-", prefix, crate_name);
let prefix = format!("{}{}-", prefix, crate_name);
let mut matches = ~[];
filesearch::search(filesearch, |path| -> FileMatch {
let path_str = path.filename();
@ -98,20 +98,20 @@ fn find_library_crate_aux(
None => FileDoesntMatch,
Some(path_str) =>
if path_str.starts_with(prefix) && path_str.ends_with(suffix) {
debug!("%s is a candidate", path.to_str());
debug2!("{} is a candidate", path.to_str());
match get_metadata_section(cx.os, path) {
Some(cvec) =>
if !crate_matches(cvec, cx.metas, cx.hash) {
debug!("skipping %s, metadata doesn't match",
debug2!("skipping {}, metadata doesn't match",
path.to_str());
FileDoesntMatch
} else {
debug!("found %s with matching metadata", path.to_str());
debug2!("found {} with matching metadata", path.to_str());
matches.push((path.to_str(), cvec));
FileMatches
},
_ => {
debug!("could not load metadata for %s", path.to_str());
debug2!("could not load metadata for {}", path.to_str());
FileDoesntMatch
}
}
@ -127,12 +127,12 @@ fn find_library_crate_aux(
1 => Some(matches[0]),
_ => {
cx.diag.span_err(
cx.span, fmt!("multiple matching crates for `%s`", crate_name));
cx.span, format!("multiple matching crates for `{}`", crate_name));
cx.diag.handler().note("candidates:");
for pair in matches.iter() {
let ident = pair.first();
let data = pair.second();
cx.diag.handler().note(fmt!("path: %s", ident));
cx.diag.handler().note(format!("path: {}", ident));
let attrs = decoder::get_crate_attributes(data);
note_linkage_attrs(cx.intr, cx.diag, attrs);
}
@ -149,7 +149,7 @@ pub fn crate_name_from_metas(metas: &[@ast::MetaItem]) -> @str {
_ => {}
}
}
fail!("expected to find the crate name")
fail2!("expected to find the crate name")
}
pub fn package_id_from_metas(metas: &[@ast::MetaItem]) -> Option<@str> {
@ -167,7 +167,7 @@ pub fn note_linkage_attrs(intr: @ident_interner,
attrs: ~[ast::Attribute]) {
let r = attr::find_linkage_metas(attrs);
for mi in r.iter() {
diag.handler().note(fmt!("meta: %s", pprust::meta_item_to_str(*mi,intr)));
diag.handler().note(format!("meta: {}", pprust::meta_item_to_str(*mi,intr)));
}
}
@ -188,7 +188,7 @@ pub fn metadata_matches(extern_metas: &[@ast::MetaItem],
// extern_metas: metas we read from the crate
// local_metas: metas we're looking for
debug!("matching %u metadata requirements against %u items",
debug2!("matching {} metadata requirements against {} items",
local_metas.len(), extern_metas.len());
do local_metas.iter().all |needed| {
@ -211,14 +211,14 @@ fn get_metadata_section(os: Os,
while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False {
let name_buf = llvm::LLVMGetSectionName(si.llsi);
let name = str::raw::from_c_str(name_buf);
debug!("get_metadata_section: name %s", name);
debug2!("get_metadata_section: name {}", name);
if read_meta_section_name(os) == name {
let cbuf = llvm::LLVMGetSectionContents(si.llsi);
let csz = llvm::LLVMGetSectionSize(si.llsi) as uint;
let mut found = None;
let cvbuf: *u8 = cast::transmute(cbuf);
let vlen = encoder::metadata_encoding_version.len();
debug!("checking %u bytes of metadata-version stamp",
debug2!("checking {} bytes of metadata-version stamp",
vlen);
let minsz = num::min(vlen, csz);
let mut version_ok = false;
@ -229,7 +229,7 @@ fn get_metadata_section(os: Os,
if !version_ok { return None; }
let cvbuf1 = ptr::offset(cvbuf, vlen as int);
debug!("inflating %u bytes of compressed metadata",
debug2!("inflating {} bytes of compressed metadata",
csz - vlen);
do vec::raw::buf_as_slice(cvbuf1, csz-vlen) |bytes| {
let inflated = flate::inflate_bytes(bytes);
@ -273,7 +273,7 @@ pub fn list_file_metadata(intr: @ident_interner,
match get_metadata_section(os, path) {
option::Some(bytes) => decoder::list_crate_metadata(intr, bytes, out),
option::None => {
out.write_str(fmt!("could not find metadata in %s.\n", path.to_str()))
out.write_str(format!("could not find metadata in {}.\n", path.to_str()))
}
}
}

View File

@ -80,10 +80,10 @@ fn scan<R>(st: &mut PState, is_last: &fn(char) -> bool,
op: &fn(&[u8]) -> R) -> R
{
let start_pos = st.pos;
debug!("scan: '%c' (start)", st.data[st.pos] as char);
debug2!("scan: '{}' (start)", st.data[st.pos] as char);
while !is_last(st.data[st.pos] as char) {
st.pos += 1;
debug!("scan: '%c'", st.data[st.pos] as char);
debug2!("scan: '{}'", st.data[st.pos] as char);
}
let end_pos = st.pos;
st.pos += 1;
@ -161,7 +161,7 @@ fn parse_sigil(st: &mut PState) -> ast::Sigil {
'@' => ast::ManagedSigil,
'~' => ast::OwnedSigil,
'&' => ast::BorrowedSigil,
c => st.tcx.sess.bug(fmt!("parse_sigil(): bad input '%c'", c))
c => st.tcx.sess.bug(format!("parse_sigil(): bad input '{}'", c))
}
}
@ -179,7 +179,7 @@ fn parse_vstore(st: &mut PState) -> ty::vstore {
'~' => ty::vstore_uniq,
'@' => ty::vstore_box,
'&' => ty::vstore_slice(parse_region(st)),
c => st.tcx.sess.bug(fmt!("parse_vstore(): bad input '%c'", c))
c => st.tcx.sess.bug(format!("parse_vstore(): bad input '{}'", c))
}
}
@ -188,7 +188,7 @@ fn parse_trait_store(st: &mut PState) -> ty::TraitStore {
'~' => ty::UniqTraitStore,
'@' => ty::BoxTraitStore,
'&' => ty::RegionTraitStore(parse_region(st)),
c => st.tcx.sess.bug(fmt!("parse_trait_store(): bad input '%c'", c))
c => st.tcx.sess.bug(format!("parse_trait_store(): bad input '{}'", c))
}
}
@ -221,7 +221,7 @@ fn parse_region_substs(st: &mut PState) -> ty::RegionSubsts {
assert_eq!(next(st), '.');
ty::NonerasedRegions(regions)
}
_ => fail!("parse_bound_region: bad input")
_ => fail2!("parse_bound_region: bad input")
}
}
@ -239,7 +239,7 @@ fn parse_bound_region(st: &mut PState) -> ty::bound_region {
assert_eq!(next(st), '|');
ty::br_cap_avoid(id, @parse_bound_region(st))
},
_ => fail!("parse_bound_region: bad input")
_ => fail2!("parse_bound_region: bad input")
}
}
@ -268,7 +268,7 @@ fn parse_region(st: &mut PState) -> ty::Region {
'e' => {
ty::re_static
}
_ => fail!("parse_region: bad input")
_ => fail2!("parse_region: bad input")
}
}
@ -276,7 +276,7 @@ fn parse_opt<T>(st: &mut PState, f: &fn(&mut PState) -> T) -> Option<T> {
match next(st) {
'n' => None,
's' => Some(f(st)),
_ => fail!("parse_opt: bad input")
_ => fail2!("parse_opt: bad input")
}
}
@ -317,7 +317,7 @@ fn parse_ty(st: &mut PState, conv: conv_did) -> ty::t {
'D' => return ty::mk_mach_int(ast::ty_i64),
'f' => return ty::mk_mach_float(ast::ty_f32),
'F' => return ty::mk_mach_float(ast::ty_f64),
_ => fail!("parse_ty: bad numeric type")
_ => fail2!("parse_ty: bad numeric type")
}
}
'c' => return ty::mk_char(),
@ -340,7 +340,7 @@ fn parse_ty(st: &mut PState, conv: conv_did) -> ty::t {
}
'p' => {
let did = parse_def(st, TypeParameter, conv);
debug!("parsed ty_param: did=%?", did);
debug2!("parsed ty_param: did={:?}", did);
return ty::mk_param(st.tcx, parse_uint(st), did);
}
's' => {
@ -417,7 +417,7 @@ fn parse_ty(st: &mut PState, conv: conv_did) -> ty::t {
assert_eq!(next(st), ']');
return ty::mk_struct(st.tcx, did, substs);
}
c => { error!("unexpected char in type string: %c", c); fail!();}
c => { error2!("unexpected char in type string: {}", c); fail2!();}
}
}
@ -467,7 +467,7 @@ fn parse_purity(c: char) -> purity {
'u' => unsafe_fn,
'i' => impure_fn,
'c' => extern_fn,
_ => fail!("parse_purity: bad purity %c", c)
_ => fail2!("parse_purity: bad purity {}", c)
}
}
@ -488,7 +488,7 @@ fn parse_onceness(c: char) -> ast::Onceness {
match c {
'o' => ast::Once,
'm' => ast::Many,
_ => fail!("parse_onceness: bad onceness")
_ => fail2!("parse_onceness: bad onceness")
}
}
@ -539,8 +539,8 @@ pub fn parse_def_id(buf: &[u8]) -> ast::DefId {
let len = buf.len();
while colon_idx < len && buf[colon_idx] != ':' as u8 { colon_idx += 1u; }
if colon_idx == len {
error!("didn't find ':' when parsing def id");
fail!();
error2!("didn't find ':' when parsing def id");
fail2!();
}
let crate_part = buf.slice(0u, colon_idx);
@ -548,12 +548,12 @@ pub fn parse_def_id(buf: &[u8]) -> ast::DefId {
let crate_num = match uint::parse_bytes(crate_part, 10u) {
Some(cn) => cn as int,
None => fail!("internal error: parse_def_id: crate number expected, but found %?",
None => fail2!("internal error: parse_def_id: crate number expected, but found {:?}",
crate_part)
};
let def_num = match uint::parse_bytes(def_part, 10u) {
Some(dn) => dn as int,
None => fail!("internal error: parse_def_id: id expected, but found %?",
None => fail2!("internal error: parse_def_id: id expected, but found {:?}",
def_part)
};
ast::DefId { crate: crate_num, node: def_num }
@ -599,7 +599,7 @@ fn parse_bounds(st: &mut PState, conv: conv_did) -> ty::ParamBounds {
return param_bounds;
}
_ => {
fail!("parse_bounds: bad bounds")
fail2!("parse_bounds: bad bounds")
}
}
}

View File

@ -86,7 +86,7 @@ pub fn enc_ty(w: @io::Writer, cx: @ctxt, t: ty::t) {
let abbrev_len = 3u + estimate_sz(pos) + estimate_sz(len);
if abbrev_len < len {
// I.e. it's actually an abbreviation.
let s = fmt!("#%x:%x#", pos, len).to_managed();
let s = format!("\\#{:x}:{:x}\\#", pos, len).to_managed();
let a = ty_abbrev { pos: pos, len: len, s: s };
abbrevs.insert(t, a);
}
@ -336,18 +336,18 @@ fn enc_sty(w: @io::Writer, cx: @ctxt, st: &ty::sty) {
}
ty::ty_opaque_box => w.write_char('B'),
ty::ty_struct(def, ref substs) => {
debug!("~~~~ %s", "a[");
debug2!("~~~~ {}", "a[");
w.write_str(&"a[");
let s = (cx.ds)(def);
debug!("~~~~ %s", s);
debug2!("~~~~ {}", s);
w.write_str(s);
debug!("~~~~ %s", "|");
debug2!("~~~~ {}", "|");
w.write_char('|');
enc_substs(w, cx, substs);
debug!("~~~~ %s", "]");
debug2!("~~~~ {}", "]");
w.write_char(']');
}
ty::ty_err => fail!("Shouldn't encode error type")
ty::ty_err => fail2!("Shouldn't encode error type")
}
}

View File

@ -84,7 +84,7 @@ pub fn encode_inlined_item(ecx: &e::EncodeContext,
path: &[ast_map::path_elt],
ii: ast::inlined_item,
maps: Maps) {
debug!("> Encoding inlined item: %s::%s (%u)",
debug2!("> Encoding inlined item: {}::{} ({})",
ast_map::path_to_str(path, token::get_ident_interner()),
ecx.tcx.sess.str_of(ii.ident()),
ebml_w.writer.tell());
@ -97,7 +97,7 @@ pub fn encode_inlined_item(ecx: &e::EncodeContext,
encode_side_tables_for_ii(ecx, maps, ebml_w, &ii);
ebml_w.end_tag();
debug!("< Encoded inlined fn: %s::%s (%u)",
debug2!("< Encoded inlined fn: {}::{} ({})",
ast_map::path_to_str(path, token::get_ident_interner()),
ecx.tcx.sess.str_of(ii.ident()),
ebml_w.writer.tell());
@ -117,7 +117,7 @@ pub fn decode_inlined_item(cdata: @cstore::crate_metadata,
match par_doc.opt_child(c::tag_ast) {
None => None,
Some(ast_doc) => {
debug!("> Decoding inlined fn: %s::?",
debug2!("> Decoding inlined fn: {}::?",
ast_map::path_to_str(path, token::get_ident_interner()));
let mut ast_dsr = reader::Decoder(ast_doc);
let from_id_range = Decodable::decode(&mut ast_dsr);
@ -129,8 +129,8 @@ pub fn decode_inlined_item(cdata: @cstore::crate_metadata,
};
let raw_ii = decode_ast(ast_doc);
let ii = renumber_ast(xcx, raw_ii);
debug!("Fn named: %s", tcx.sess.str_of(ii.ident()));
debug!("< Decoded inlined fn: %s::%s",
debug2!("Fn named: {}", tcx.sess.str_of(ii.ident()));
debug2!("< Decoded inlined fn: {}::{}",
ast_map::path_to_str(path, token::get_ident_interner()),
tcx.sess.str_of(ii.ident()));
ast_map::map_decoded_item(tcx.sess.diagnostic(),
@ -140,7 +140,7 @@ pub fn decode_inlined_item(cdata: @cstore::crate_metadata,
decode_side_tables(xcx, ast_doc);
match ii {
ast::ii_item(i) => {
debug!(">>> DECODED ITEM >>>\n%s\n<<< DECODED ITEM <<<",
debug2!(">>> DECODED ITEM >>>\n{}\n<<< DECODED ITEM <<<",
syntax::print::pprust::item_to_str(i, tcx.sess.intr()));
}
_ => { }
@ -305,7 +305,7 @@ impl fold::ast_fold for NestedItemsDropper {
node: ast::DeclItem(_),
span: _
}, _) => None,
ast::StmtMac(*) => fail!("unexpanded macro in astencode")
ast::StmtMac(*) => fail2!("unexpanded macro in astencode")
}
}.collect();
let blk_sans_items = ast::Block {
@ -741,7 +741,7 @@ impl vtable_decoder_helpers for reader::Decoder {
)
}
// hard to avoid - user input
_ => fail!("bad enum variant")
_ => fail2!("bad enum variant")
}
}
}
@ -896,7 +896,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
id: ast::NodeId) {
let tcx = ecx.tcx;
debug!("Encoding side tables for id %d", id);
debug2!("Encoding side tables for id {}", id);
{
let r = tcx.def_map.find(&id);
@ -1091,7 +1091,7 @@ impl ebml_decoder_decoder_helpers for reader::Decoder {
xcx.dcx.tcx,
|s, a| this.convert_def_id(xcx, s, a));
debug!("read_ty(%s) = %s",
debug2!("read_ty({}) = {}",
type_string(doc),
ty_to_str(xcx.dcx.tcx, ty));
@ -1176,7 +1176,7 @@ impl ebml_decoder_decoder_helpers for reader::Decoder {
NominalType | TypeWithId => xcx.tr_def_id(did),
TypeParameter => xcx.tr_intern_def_id(did)
};
debug!("convert_def_id(source=%?, did=%?)=%?", source, did, r);
debug2!("convert_def_id(source={:?}, did={:?})={:?}", source, did, r);
return r;
}
}
@ -1189,14 +1189,14 @@ fn decode_side_tables(xcx: @ExtendedDecodeContext,
let id0 = entry_doc.get(c::tag_table_id as uint).as_int();
let id = xcx.tr_id(id0);
debug!(">> Side table document with tag 0x%x \
found for id %d (orig %d)",
debug2!(">> Side table document with tag 0x{:x} \
found for id {} (orig {})",
tag, id, id0);
match c::astencode_tag::from_uint(tag) {
None => {
xcx.dcx.tcx.sess.bug(
fmt!("unknown tag found in side tables: %x", tag));
format!("unknown tag found in side tables: {:x}", tag));
}
Some(value) => {
let val_doc = entry_doc.get(c::tag_table_val as uint);
@ -1210,7 +1210,7 @@ fn decode_side_tables(xcx: @ExtendedDecodeContext,
}
c::tag_table_node_type => {
let ty = val_dsr.read_ty(xcx);
debug!("inserting ty for node %?: %s",
debug2!("inserting ty for node {:?}: {}",
id, ty_to_str(dcx.tcx, ty));
dcx.tcx.node_types.insert(id as uint, ty);
}
@ -1257,13 +1257,13 @@ fn decode_side_tables(xcx: @ExtendedDecodeContext,
}
_ => {
xcx.dcx.tcx.sess.bug(
fmt!("unknown tag found in side tables: %x", tag));
format!("unknown tag found in side tables: {:x}", tag));
}
}
}
}
debug!(">< Side table doc loaded");
debug2!(">< Side table doc loaded");
true
};
}
@ -1381,6 +1381,6 @@ fn test_simplification() {
== pprust::item_to_str(item_exp,
token::get_ident_interner()));
}
_ => fail!()
_ => fail2!()
}
}

View File

@ -65,7 +65,7 @@ pub fn check_loans(bccx: &BorrowckCtxt,
move_data: move_data::FlowedMoveData,
all_loans: &[Loan],
body: &ast::Block) {
debug!("check_loans(body id=%?)", body.id);
debug2!("check_loans(body id={:?})", body.id);
let mut clcx = CheckLoanCtxt {
bccx: bccx,
@ -94,12 +94,12 @@ impl<'self> CheckLoanCtxt<'self> {
MoveWhileBorrowed(loan_path, loan_span) => {
self.bccx.span_err(
cap_var.span,
fmt!("cannot move `%s` into closure \
format!("cannot move `{}` into closure \
because it is borrowed",
self.bccx.loan_path_to_str(move_path)));
self.bccx.span_note(
loan_span,
fmt!("borrow of `%s` occurs here",
format!("borrow of `{}` occurs here",
self.bccx.loan_path_to_str(loan_path)));
}
}
@ -197,10 +197,10 @@ impl<'self> CheckLoanCtxt<'self> {
//! issued when we enter `scope_id` (for example, we do not
//! permit two `&mut` borrows of the same variable).
debug!("check_for_conflicting_loans(scope_id=%?)", scope_id);
debug2!("check_for_conflicting_loans(scope_id={:?})", scope_id);
let new_loan_indices = self.loans_generated_by(scope_id);
debug!("new_loan_indices = %?", new_loan_indices);
debug2!("new_loan_indices = {:?}", new_loan_indices);
do self.each_issued_loan(scope_id) |issued_loan| {
for &new_loan_index in new_loan_indices.iter() {
@ -225,7 +225,7 @@ impl<'self> CheckLoanCtxt<'self> {
//! Checks whether `old_loan` and `new_loan` can safely be issued
//! simultaneously.
debug!("report_error_if_loans_conflict(old_loan=%s, new_loan=%s)",
debug2!("report_error_if_loans_conflict(old_loan={}, new_loan={})",
old_loan.repr(self.tcx()),
new_loan.repr(self.tcx()));
@ -249,8 +249,8 @@ impl<'self> CheckLoanCtxt<'self> {
//! Checks whether the restrictions introduced by `loan1` would
//! prohibit `loan2`. Returns false if an error is reported.
debug!("report_error_if_loan_conflicts_with_restriction(\
loan1=%s, loan2=%s)",
debug2!("report_error_if_loan_conflicts_with_restriction(\
loan1={}, loan2={})",
loan1.repr(self.tcx()),
loan2.repr(self.tcx()));
@ -260,7 +260,7 @@ impl<'self> CheckLoanCtxt<'self> {
ImmutableMutability => RESTR_ALIAS | RESTR_FREEZE,
ConstMutability => RESTR_ALIAS,
};
debug!("illegal_if=%?", illegal_if);
debug2!("illegal_if={:?}", illegal_if);
for restr in loan1.restrictions.iter() {
if !restr.set.intersects(illegal_if) { loop; }
@ -270,12 +270,12 @@ impl<'self> CheckLoanCtxt<'self> {
(MutableMutability, MutableMutability) => {
self.bccx.span_err(
new_loan.span,
fmt!("cannot borrow `%s` as mutable \
format!("cannot borrow `{}` as mutable \
more than once at a time",
self.bccx.loan_path_to_str(new_loan.loan_path)));
self.bccx.span_note(
old_loan.span,
fmt!("second borrow of `%s` as mutable occurs here",
format!("second borrow of `{}` as mutable occurs here",
self.bccx.loan_path_to_str(new_loan.loan_path)));
return false;
}
@ -283,14 +283,14 @@ impl<'self> CheckLoanCtxt<'self> {
_ => {
self.bccx.span_err(
new_loan.span,
fmt!("cannot borrow `%s` as %s because \
it is also borrowed as %s",
format!("cannot borrow `{}` as {} because \
it is also borrowed as {}",
self.bccx.loan_path_to_str(new_loan.loan_path),
self.bccx.mut_to_str(new_loan.mutbl),
self.bccx.mut_to_str(old_loan.mutbl)));
self.bccx.span_note(
old_loan.span,
fmt!("second borrow of `%s` occurs here",
format!("second borrow of `{}` occurs here",
self.bccx.loan_path_to_str(new_loan.loan_path)));
return false;
}
@ -317,7 +317,7 @@ impl<'self> CheckLoanCtxt<'self> {
* is using a moved/uninitialized value
*/
debug!("check_if_path_is_moved(id=%?, use_kind=%?, lp=%s)",
debug2!("check_if_path_is_moved(id={:?}, use_kind={:?}, lp={})",
id, use_kind, lp.repr(self.bccx.tcx));
do self.move_data.each_move_of(id, lp) |move, moved_lp| {
self.bccx.report_use_of_moved_value(
@ -338,7 +338,7 @@ impl<'self> CheckLoanCtxt<'self> {
Some(&adj) => self.bccx.cat_expr_autoderefd(expr, adj)
};
debug!("check_assignment(cmt=%s)", cmt.repr(self.tcx()));
debug2!("check_assignment(cmt={})", cmt.repr(self.tcx()));
// Mutable values can be assigned, as long as they obey loans
// and aliasing restrictions:
@ -372,7 +372,7 @@ impl<'self> CheckLoanCtxt<'self> {
// Otherwise, just a plain error.
self.bccx.span_err(
expr.span,
fmt!("cannot assign to %s %s",
format!("cannot assign to {} {}",
cmt.mutbl.to_user_str(),
self.bccx.cmt_to_str(cmt)));
return;
@ -387,7 +387,7 @@ impl<'self> CheckLoanCtxt<'self> {
let mut cmt = cmt;
loop {
debug!("mark_writes_through_upvars_as_used_mut(cmt=%s)",
debug2!("mark_writes_through_upvars_as_used_mut(cmt={})",
cmt.repr(this.tcx()));
match cmt.cat {
mc::cat_local(id) |
@ -435,7 +435,7 @@ impl<'self> CheckLoanCtxt<'self> {
//! Safety checks related to writes to aliasable, mutable locations
let guarantor = cmt.guarantor();
debug!("check_for_aliasable_mutable_writes(cmt=%s, guarantor=%s)",
debug2!("check_for_aliasable_mutable_writes(cmt={}, guarantor={})",
cmt.repr(this.tcx()), guarantor.repr(this.tcx()));
match guarantor.cat {
mc::cat_deref(b, _, mc::region_ptr(MutMutable, _)) => {
@ -451,7 +451,7 @@ impl<'self> CheckLoanCtxt<'self> {
id: guarantor.id,
derefs: deref_count
};
debug!("Inserting write guard at %?", key);
debug2!("Inserting write guard at {:?}", key);
this.bccx.write_guard_map.insert(key);
}
@ -646,11 +646,11 @@ impl<'self> CheckLoanCtxt<'self> {
loan: &Loan) {
self.bccx.span_err(
expr.span,
fmt!("cannot assign to `%s` because it is borrowed",
format!("cannot assign to `{}` because it is borrowed",
self.bccx.loan_path_to_str(loan_path)));
self.bccx.span_note(
loan.span,
fmt!("borrow of `%s` occurs here",
format!("borrow of `{}` occurs here",
self.bccx.loan_path_to_str(loan_path)));
}
@ -674,12 +674,12 @@ impl<'self> CheckLoanCtxt<'self> {
MoveWhileBorrowed(loan_path, loan_span) => {
self.bccx.span_err(
span,
fmt!("cannot move out of `%s` \
format!("cannot move out of `{}` \
because it is borrowed",
self.bccx.loan_path_to_str(move_path)));
self.bccx.span_note(
loan_span,
fmt!("borrow of `%s` occurs here",
format!("borrow of `{}` occurs here",
self.bccx.loan_path_to_str(loan_path)));
}
}
@ -690,7 +690,7 @@ impl<'self> CheckLoanCtxt<'self> {
pub fn analyze_move_out_from(&self,
expr_id: ast::NodeId,
move_path: @LoanPath) -> MoveError {
debug!("analyze_move_out_from(expr_id=%?, move_path=%s)",
debug2!("analyze_move_out_from(expr_id={:?}, move_path={})",
expr_id, move_path.repr(self.tcx()));
// FIXME(#4384) inadequare if/when we permit `move a.b`
@ -772,12 +772,12 @@ fn check_loans_in_fn<'a>(this: &mut CheckLoanCtxt<'a>,
MoveWhileBorrowed(loan_path, loan_span) => {
this.bccx.span_err(
cap_var.span,
fmt!("cannot move `%s` into closure \
format!("cannot move `{}` into closure \
because it is borrowed",
this.bccx.loan_path_to_str(move_path)));
this.bccx.span_note(
loan_span,
fmt!("borrow of `%s` occurs here",
format!("borrow of `{}` occurs here",
this.bccx.loan_path_to_str(loan_path)));
}
}
@ -794,7 +794,7 @@ fn check_loans_in_expr<'a>(this: &mut CheckLoanCtxt<'a>,
expr: @ast::Expr) {
visit::walk_expr(this, expr, ());
debug!("check_loans_in_expr(expr=%s)",
debug2!("check_loans_in_expr(expr={})",
expr.repr(this.tcx()));
this.check_for_conflicting_loans(expr.id);
@ -805,7 +805,7 @@ fn check_loans_in_expr<'a>(this: &mut CheckLoanCtxt<'a>,
ast::ExprPath(*) => {
if !this.move_data.is_assignee(expr.id) {
let cmt = this.bccx.cat_expr_unadjusted(expr);
debug!("path cmt=%s", cmt.repr(this.tcx()));
debug2!("path cmt={}", cmt.repr(this.tcx()));
let r = opt_loan_path(cmt);
for &lp in r.iter() {
this.check_if_path_is_moved(expr.id, expr.span, MovedInUse, lp);

View File

@ -105,7 +105,7 @@ fn check_is_legal_to_move_from(bccx: &BorrowckCtxt,
mc::cat_deref(_, _, mc::unsafe_ptr(*)) => {
bccx.span_err(
cmt0.span,
fmt!("cannot move out of %s",
format!("cannot move out of {}",
bccx.cmt_to_str(cmt)));
false
}
@ -120,7 +120,7 @@ fn check_is_legal_to_move_from(bccx: &BorrowckCtxt,
};
bccx.span_err(
cmt0.span,
fmt!("cannot move out of %s%s", bccx.cmt_to_str(cmt), once_hint));
format!("cannot move out of {}{}", bccx.cmt_to_str(cmt), once_hint));
false
}
@ -158,7 +158,7 @@ fn check_is_legal_to_move_from(bccx: &BorrowckCtxt,
if ty::has_dtor(bccx.tcx, did) {
bccx.span_err(
cmt0.span,
fmt!("cannot move out of type `%s`, \
format!("cannot move out of type `{}`, \
which defines the `Drop` trait",
b.ty.user_string(bccx.tcx)));
false

View File

@ -27,7 +27,7 @@ pub fn guarantee_lifetime(bccx: &BorrowckCtxt,
cmt: mc::cmt,
loan_region: ty::Region,
loan_mutbl: LoanMutability) {
debug!("guarantee_lifetime(cmt=%s, loan_region=%s)",
debug2!("guarantee_lifetime(cmt={}, loan_region={})",
cmt.repr(bccx.tcx), loan_region.repr(bccx.tcx));
let ctxt = GuaranteeLifetimeContext {bccx: bccx,
item_scope_id: item_scope_id,
@ -101,7 +101,7 @@ impl<'self> GuaranteeLifetimeContext<'self> {
// L-Deref-Managed-Mut-Compiler-Root
self.check_root(cmt, base, derefs, ptr_mutbl, discr_scope);
} else {
debug!("omitting root, base=%s, base_scope=%?",
debug2!("omitting root, base={}, base_scope={:?}",
base.repr(self.tcx()), base_scope);
}
}
@ -189,8 +189,8 @@ impl<'self> GuaranteeLifetimeContext<'self> {
derefs: uint,
ptr_mutbl: ast::Mutability,
discr_scope: Option<ast::NodeId>) {
debug!("check_root(cmt_deref=%s, cmt_base=%s, derefs=%?, ptr_mutbl=%?, \
discr_scope=%?)",
debug2!("check_root(cmt_deref={}, cmt_base={}, derefs={:?}, ptr_mutbl={:?}, \
discr_scope={:?})",
cmt_deref.repr(self.tcx()),
cmt_base.repr(self.tcx()),
derefs,
@ -213,7 +213,7 @@ impl<'self> GuaranteeLifetimeContext<'self> {
// the check above should fail for anything is not re_scope
self.bccx.tcx.sess.span_bug(
cmt_base.span,
fmt!("Cannot issue root for scope region: %?",
format!("Cannot issue root for scope region: {:?}",
self.loan_region));
}
};
@ -247,7 +247,7 @@ impl<'self> GuaranteeLifetimeContext<'self> {
// FIXME(#3511) grow to the nearest cleanup scope---this can
// cause observable errors if freezing!
if !self.bccx.tcx.region_maps.is_cleanup_scope(root_scope) {
debug!("%? is not a cleanup scope, adjusting", root_scope);
debug2!("{:?} is not a cleanup scope, adjusting", root_scope);
let cleanup_scope =
self.bccx.tcx.region_maps.cleanup_scope(root_scope);
@ -255,8 +255,8 @@ impl<'self> GuaranteeLifetimeContext<'self> {
if opt_dyna.is_some() {
self.tcx().sess.span_warn(
self.span,
fmt!("Dynamic freeze scope artifically extended \
(see Issue #6248)"));
format!("Dynamic freeze scope artifically extended \
(see Issue \\#6248)"));
note_and_explain_region(
self.bccx.tcx,
"managed value only needs to be frozen for ",
@ -277,7 +277,7 @@ impl<'self> GuaranteeLifetimeContext<'self> {
let root_info = RootInfo {scope: root_scope, freeze: opt_dyna};
self.bccx.root_map.insert(rm_key, root_info);
debug!("root_key: %? root_info: %?", rm_key, root_info);
debug2!("root_key: {:?} root_info: {:?}", rm_key, root_info);
}
fn check_scope(&self, max_scope: ty::Region) {
@ -310,7 +310,7 @@ impl<'self> GuaranteeLifetimeContext<'self> {
r @ mc::cat_discr(*) => {
self.tcx().sess.span_bug(
cmt.span,
fmt!("illegal guarantor category: %?", r));
format!("illegal guarantor category: {:?}", r));
}
}
}

View File

@ -136,7 +136,7 @@ fn gather_loans_in_fn(this: &mut GatherLoanCtxt,
id: ast::NodeId) {
match fk {
&visit::fk_item_fn(*) | &visit::fk_method(*) => {
fail!("cannot occur, due to visit_item override");
fail2!("cannot occur, due to visit_item override");
}
// Visit closures as part of the containing item.
@ -196,7 +196,7 @@ fn gather_loans_in_expr(this: &mut GatherLoanCtxt,
let bccx = this.bccx;
let tcx = bccx.tcx;
debug!("gather_loans_in_expr(expr=%?/%s)",
debug2!("gather_loans_in_expr(expr={:?}/{})",
ex.id, pprust::expr_to_str(ex, tcx.sess.intr()));
this.id_range.add(ex.id);
@ -330,20 +330,20 @@ impl<'self> GatherLoanCtxt<'self> {
pub fn guarantee_adjustments(&mut self,
expr: @ast::Expr,
adjustment: &ty::AutoAdjustment) {
debug!("guarantee_adjustments(expr=%s, adjustment=%?)",
debug2!("guarantee_adjustments(expr={}, adjustment={:?})",
expr.repr(self.tcx()), adjustment);
let _i = indenter();
match *adjustment {
ty::AutoAddEnv(*) => {
debug!("autoaddenv -- no autoref");
debug2!("autoaddenv -- no autoref");
return;
}
ty::AutoDerefRef(
ty::AutoDerefRef {
autoref: None, _ }) => {
debug!("no autoref");
debug2!("no autoref");
return;
}
@ -355,7 +355,7 @@ impl<'self> GatherLoanCtxt<'self> {
tcx: self.tcx(),
method_map: self.bccx.method_map};
let cmt = mcx.cat_expr_autoderefd(expr, autoderefs);
debug!("after autoderef, cmt=%s", cmt.repr(self.tcx()));
debug2!("after autoderef, cmt={}", cmt.repr(self.tcx()));
match *autoref {
ty::AutoPtr(r, m) => {
@ -412,8 +412,8 @@ impl<'self> GatherLoanCtxt<'self> {
cmt: mc::cmt,
req_mutbl: LoanMutability,
loan_region: ty::Region) {
debug!("guarantee_valid(borrow_id=%?, cmt=%s, \
req_mutbl=%?, loan_region=%?)",
debug2!("guarantee_valid(borrow_id={:?}, cmt={}, \
req_mutbl={:?}, loan_region={:?})",
borrow_id,
cmt.repr(self.tcx()),
req_mutbl,
@ -470,16 +470,16 @@ impl<'self> GatherLoanCtxt<'self> {
ty::re_infer(*) => {
self.tcx().sess.span_bug(
cmt.span,
fmt!("Invalid borrow lifetime: %?", loan_region));
format!("Invalid borrow lifetime: {:?}", loan_region));
}
};
debug!("loan_scope = %?", loan_scope);
debug2!("loan_scope = {:?}", loan_scope);
let gen_scope = self.compute_gen_scope(borrow_id, loan_scope);
debug!("gen_scope = %?", gen_scope);
debug2!("gen_scope = {:?}", gen_scope);
let kill_scope = self.compute_kill_scope(loan_scope, loan_path);
debug!("kill_scope = %?", kill_scope);
debug2!("kill_scope = {:?}", kill_scope);
if req_mutbl == MutableMutability {
self.mark_loan_path_as_mutated(loan_path);
@ -499,7 +499,7 @@ impl<'self> GatherLoanCtxt<'self> {
}
};
debug!("guarantee_valid(borrow_id=%?), loan=%s",
debug2!("guarantee_valid(borrow_id={:?}), loan={}",
borrow_id, loan.repr(self.tcx()));
// let loan_path = loan.loan_path;
@ -785,7 +785,7 @@ impl<'self> GatherLoanCtxt<'self> {
_ => {
self.tcx().sess.span_bug(
pat.span,
fmt!("Type of slice pattern is not a slice"));
format!("Type of slice pattern is not a slice"));
}
}
}

View File

@ -117,7 +117,7 @@ pub fn check_crate(
fn make_stat(bccx: &mut BorrowckCtxt, stat: uint) -> ~str {
let stat_f = stat as float;
let total = bccx.stats.guaranteed_paths as float;
fmt!("%u (%.0f%%)", stat , stat_f * 100f / total)
format!("{} ({:.0f}%)", stat , stat_f * 100f / total)
}
}
@ -135,7 +135,7 @@ fn borrowck_fn(this: &mut BorrowckCtxt,
&visit::fk_item_fn(*) |
&visit::fk_method(*) => {
debug!("borrowck_fn(id=%?)", id);
debug2!("borrowck_fn(id={:?})", id);
// Check the body of fn items.
let (id_range, all_loans, move_data) =
@ -561,7 +561,7 @@ impl BorrowckCtxt {
move_data::Declared => {
self.tcx.sess.span_err(
use_span,
fmt!("%s of possibly uninitialized value: `%s`",
format!("{} of possibly uninitialized value: `{}`",
verb,
self.loan_path_to_str(lp)));
}
@ -569,7 +569,7 @@ impl BorrowckCtxt {
let partially = if lp == moved_lp {""} else {"partially "};
self.tcx.sess.span_err(
use_span,
fmt!("%s of %smoved value: `%s`",
format!("{} of {}moved value: `{}`",
verb,
partially,
self.loan_path_to_str(lp)));
@ -585,7 +585,7 @@ impl BorrowckCtxt {
"moved by default (use `copy` to override)");
self.tcx.sess.span_note(
expr.span,
fmt!("`%s` moved here because it has type `%s`, which is %s",
format!("`{}` moved here because it has type `{}`, which is {}",
self.loan_path_to_str(moved_lp),
expr_ty.user_string(self.tcx), suggestion));
}
@ -594,7 +594,7 @@ impl BorrowckCtxt {
let pat_ty = ty::node_id_to_type(self.tcx, pat.id);
self.tcx.sess.span_note(
pat.span,
fmt!("`%s` moved here because it has type `%s`, \
format!("`{}` moved here because it has type `{}`, \
which is moved by default (use `ref` to override)",
self.loan_path_to_str(moved_lp),
pat_ty.user_string(self.tcx)));
@ -607,8 +607,8 @@ impl BorrowckCtxt {
capture that instead to override)");
self.tcx.sess.span_note(
expr.span,
fmt!("`%s` moved into closure environment here because it \
has type `%s`, which is %s",
format!("`{}` moved into closure environment here because it \
has type `{}`, which is {}",
self.loan_path_to_str(moved_lp),
expr_ty.user_string(self.tcx), suggestion));
}
@ -634,11 +634,11 @@ impl BorrowckCtxt {
&move_data::Assignment) {
self.tcx.sess.span_err(
span,
fmt!("re-assignment of immutable variable `%s`",
format!("re-assignment of immutable variable `{}`",
self.loan_path_to_str(lp)));
self.tcx.sess.span_note(
assign.span,
fmt!("prior assignment occurs here"));
format!("prior assignment occurs here"));
}
pub fn span_err(&self, s: Span, m: &str) {
@ -652,23 +652,23 @@ impl BorrowckCtxt {
pub fn bckerr_to_str(&self, err: BckError) -> ~str {
match err.code {
err_mutbl(lk) => {
fmt!("cannot borrow %s %s as %s",
format!("cannot borrow {} {} as {}",
err.cmt.mutbl.to_user_str(),
self.cmt_to_str(err.cmt),
self.mut_to_str(lk))
}
err_out_of_root_scope(*) => {
fmt!("cannot root managed value long enough")
format!("cannot root managed value long enough")
}
err_out_of_scope(*) => {
fmt!("borrowed value does not live long enough")
format!("borrowed value does not live long enough")
}
err_freeze_aliasable_const => {
// Means that the user borrowed a ~T or enum value
// residing in &const or @const pointer. Terrible
// error message, but then &const and @const are
// supposed to be going away.
fmt!("unsafe borrow of aliasable, const value")
format!("unsafe borrow of aliasable, const value")
}
}
}
@ -686,19 +686,19 @@ impl BorrowckCtxt {
mc::AliasableOther => {
self.tcx.sess.span_err(
span,
fmt!("%s in an aliasable location", prefix));
format!("{} in an aliasable location", prefix));
}
mc::AliasableManaged(ast::MutMutable) => {
// FIXME(#6269) reborrow @mut to &mut
self.tcx.sess.span_err(
span,
fmt!("%s in a `@mut` pointer; \
format!("{} in a `@mut` pointer; \
try borrowing as `&mut` first", prefix));
}
mc::AliasableManaged(m) => {
self.tcx.sess.span_err(
span,
fmt!("%s in a `@%s` pointer; \
format!("{} in a `@{}` pointer; \
try an `@mut` instead",
prefix,
self.mut_to_keyword(m)));
@ -706,7 +706,7 @@ impl BorrowckCtxt {
mc::AliasableBorrowed(m) => {
self.tcx.sess.span_err(
span,
fmt!("%s in a `&%s` pointer; \
format!("{} in a `&{}` pointer; \
try an `&mut` instead",
prefix,
self.mut_to_keyword(m)));
@ -774,7 +774,7 @@ impl BorrowckCtxt {
}
r => {
self.tcx.sess.bug(
fmt!("Loan path LpVar(%?) maps to %?, not local",
format!("Loan path LpVar({:?}) maps to {:?}, not local",
id, r));
}
}
@ -849,7 +849,7 @@ impl DataFlowOperator for LoanDataFlowOperator {
impl Repr for Loan {
fn repr(&self, tcx: ty::ctxt) -> ~str {
fmt!("Loan_%?(%s, %?, %?-%?, %s)",
format!("Loan_{:?}({}, {:?}, {:?}-{:?}, {})",
self.index,
self.loan_path.repr(tcx),
self.mutbl,
@ -861,7 +861,7 @@ impl Repr for Loan {
impl Repr for Restriction {
fn repr(&self, tcx: ty::ctxt) -> ~str {
fmt!("Restriction(%s, %x)",
format!("Restriction({}, {:x})",
self.loan_path.repr(tcx),
self.set.bits as uint)
}
@ -871,15 +871,15 @@ impl Repr for LoanPath {
fn repr(&self, tcx: ty::ctxt) -> ~str {
match self {
&LpVar(id) => {
fmt!("$(%?)", id)
format!("$({:?})", id)
}
&LpExtend(lp, _, LpDeref(_)) => {
fmt!("%s.*", lp.repr(tcx))
format!("{}.*", lp.repr(tcx))
}
&LpExtend(lp, _, LpInterior(ref interior)) => {
fmt!("%s.%s", lp.repr(tcx), interior.repr(tcx))
format!("{}.{}", lp.repr(tcx), interior.repr(tcx))
}
}
}

View File

@ -244,7 +244,7 @@ impl MoveData {
}
};
debug!("move_path(lp=%s, index=%?)",
debug2!("move_path(lp={}, index={:?})",
lp.repr(tcx),
index);
@ -304,7 +304,7 @@ impl MoveData {
* location `id` with kind `kind`.
*/
debug!("add_move(lp=%s, id=%?, kind=%?)",
debug2!("add_move(lp={}, id={:?}, kind={:?})",
lp.repr(tcx),
id,
kind);
@ -334,7 +334,7 @@ impl MoveData {
* location `id` with the given `span`.
*/
debug!("add_assignment(lp=%s, assign_id=%?, assignee_id=%?",
debug2!("add_assignment(lp={}, assign_id={:?}, assignee_id={:?}",
lp.repr(tcx), assign_id, assignee_id);
let path_index = self.move_path(tcx, lp);
@ -348,12 +348,12 @@ impl MoveData {
};
if self.is_var_path(path_index) {
debug!("add_assignment[var](lp=%s, assignment=%u, path_index=%?)",
debug2!("add_assignment[var](lp={}, assignment={}, path_index={:?})",
lp.repr(tcx), self.var_assignments.len(), path_index);
self.var_assignments.push(assignment);
} else {
debug!("add_assignment[path](lp=%s, path_index=%?)",
debug2!("add_assignment[path](lp={}, path_index={:?})",
lp.repr(tcx), path_index);
self.path_assignments.push(assignment);

View File

@ -239,7 +239,7 @@ impl CFGBuilder {
expr_exit
}
ast::ExprForLoop(*) => fail!("non-desugared expr_for_loop"),
ast::ExprForLoop(*) => fail2!("non-desugared expr_for_loop"),
ast::ExprLoop(ref body, _) => {
//
@ -504,13 +504,13 @@ impl CFGBuilder {
}
self.tcx.sess.span_bug(
expr.span,
fmt!("No loop scope for id %?", loop_id));
format!("No loop scope for id {:?}", loop_id));
}
r => {
self.tcx.sess.span_bug(
expr.span,
fmt!("Bad entry `%?` in def_map for label", r));
format!("Bad entry `{:?}` in def_map for label", r));
}
}
}

View File

@ -153,7 +153,7 @@ pub fn check_expr(v: &mut CheckCrateVisitor,
Some(&DefStruct(_)) => { }
Some(&def) => {
debug!("(checking const) found bad def: %?", def);
debug2!("(checking const) found bad def: {:?}", def);
sess.span_err(
e.span,
"paths in constants may only refer to \
@ -266,7 +266,7 @@ impl Visitor<()> for CheckItemRecursionVisitor {
ast_map::node_item(it, _) => {
self.visit_item(it, ());
}
_ => fail!("const not bound to an item")
_ => fail2!("const not bound to an item")
},
_ => ()
},

View File

@ -86,8 +86,8 @@ pub fn check_expr(v: &mut CheckMatchVisitor,
if (*arms).is_empty() {
if !type_is_empty(cx.tcx, pat_ty) {
// We know the type is inhabited, so this must be wrong
cx.tcx.sess.span_err(ex.span, fmt!("non-exhaustive patterns: \
type %s is non-empty",
cx.tcx.sess.span_err(ex.span, format!("non-exhaustive patterns: \
type {} is non-empty",
ty_to_str(cx.tcx, pat_ty)));
}
// If the type *is* empty, it's vacuously exhaustive
@ -180,20 +180,20 @@ pub fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: ~[@Pat]) {
ty::ty_enum(id, _) => {
let vid = match *ctor {
variant(id) => id,
_ => fail!("check_exhaustive: non-variant ctor"),
_ => fail2!("check_exhaustive: non-variant ctor"),
};
let variants = ty::enum_variants(cx.tcx, id);
match variants.iter().find(|v| v.id == vid) {
Some(v) => Some(cx.tcx.sess.str_of(v.name)),
None => {
fail!("check_exhaustive: bad variant in ctor")
fail2!("check_exhaustive: bad variant in ctor")
}
}
}
ty::ty_unboxed_vec(*) | ty::ty_evec(*) => {
match *ctor {
vec(n) => Some(fmt!("vectors of length %u", n).to_managed()),
vec(n) => Some(format!("vectors of length {}", n).to_managed()),
_ => None
}
}
@ -202,7 +202,7 @@ pub fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: ~[@Pat]) {
}
};
let msg = ~"non-exhaustive patterns" + match ext {
Some(ref s) => fmt!(": %s not covered", *s),
Some(ref s) => format!(": {} not covered", *s),
None => ~""
};
cx.tcx.sess.span_err(sp, msg);
@ -408,7 +408,7 @@ pub fn missing_ctor(cx: &MatchCheckCtxt,
return Some(variant(v.id));
}
}
fail!();
fail2!();
} else { None }
}
ty::ty_nil => None,
@ -420,7 +420,7 @@ pub fn missing_ctor(cx: &MatchCheckCtxt,
None => (),
Some(val(const_bool(true))) => true_found = true,
Some(val(const_bool(false))) => false_found = true,
_ => fail!("impossible case")
_ => fail2!("impossible case")
}
}
if true_found && false_found { None }
@ -510,10 +510,10 @@ pub fn ctor_arity(cx: &MatchCheckCtxt, ctor: &ctor, ty: ty::t) -> uint {
ty::ty_box(_) | ty::ty_uniq(_) | ty::ty_rptr(*) => 1u,
ty::ty_enum(eid, _) => {
let id = match *ctor { variant(id) => id,
_ => fail!("impossible case") };
_ => fail2!("impossible case") };
match ty::enum_variants(cx.tcx, eid).iter().find(|v| v.id == id ) {
Some(v) => v.args.len(),
None => fail!("impossible case")
None => fail2!("impossible case")
}
}
ty::ty_struct(cid, _) => ty::lookup_struct_fields(cx.tcx, cid).len(),
@ -584,7 +584,7 @@ pub fn specialize(cx: &MatchCheckCtxt,
}
}
single => true,
_ => fail!("type error")
_ => fail2!("type error")
};
if match_ {
Some(r.tail().to_owned())
@ -631,7 +631,7 @@ pub fn specialize(cx: &MatchCheckCtxt,
}
}
single => true,
_ => fail!("type error")
_ => fail2!("type error")
};
if match_ {
Some(r.tail().to_owned())
@ -693,7 +693,7 @@ pub fn specialize(cx: &MatchCheckCtxt,
_ => {
cx.tcx.sess.span_bug(
pat_span,
fmt!("struct pattern resolved to %s, \
format!("struct pattern resolved to {}, \
not a struct",
ty_to_str(cx.tcx, left_ty)));
}
@ -739,7 +739,7 @@ pub fn specialize(cx: &MatchCheckCtxt,
}
}
single => true,
_ => fail!("type error")
_ => fail2!("type error")
};
if match_ { Some(r.tail().to_owned()) } else { None }
}
@ -748,7 +748,7 @@ pub fn specialize(cx: &MatchCheckCtxt,
val(ref v) => (*v, *v),
range(ref lo, ref hi) => (*lo, *hi),
single => return Some(r.tail().to_owned()),
_ => fail!("type error")
_ => fail2!("type error")
};
let v_lo = eval_const_expr(cx.tcx, lo);
let v_hi = eval_const_expr(cx.tcx, hi);
@ -929,8 +929,8 @@ pub fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
_ => {
cx.tcx.sess.span_bug(
p.span,
fmt!("Binding pattern %d is \
not an identifier: %?",
format!("Binding pattern {} is \
not an identifier: {:?}",
p.id, p.node));
}
}

View File

@ -103,20 +103,20 @@ impl<O:DataFlowOperator> pprust::pp_ann for DataFlowContext<O> {
let gens = self.gens.slice(start, end);
let gens_str = if gens.iter().any(|&u| u != 0) {
fmt!(" gen: %s", bits_to_str(gens))
format!(" gen: {}", bits_to_str(gens))
} else {
~""
};
let kills = self.kills.slice(start, end);
let kills_str = if kills.iter().any(|&u| u != 0) {
fmt!(" kill: %s", bits_to_str(kills))
format!(" kill: {}", bits_to_str(kills))
} else {
~""
};
let comment_str = fmt!("id %d: %s%s%s",
id, entry_str, gens_str, kills_str);
let comment_str = format!("id {}: {}{}{}",
id, entry_str, gens_str, kills_str);
pprust::synth_comment(ps, comment_str);
pp::space(ps.s);
}
@ -131,7 +131,7 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
bits_per_id: uint) -> DataFlowContext<O> {
let words_per_id = (bits_per_id + uint::bits - 1) / uint::bits;
debug!("DataFlowContext::new(id_range=%?, bits_per_id=%?, words_per_id=%?)",
debug2!("DataFlowContext::new(id_range={:?}, bits_per_id={:?}, words_per_id={:?})",
id_range, bits_per_id, words_per_id);
let gens = ~[];
@ -154,7 +154,7 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
pub fn add_gen(&mut self, id: ast::NodeId, bit: uint) {
//! Indicates that `id` generates `bit`
debug!("add_gen(id=%?, bit=%?)", id, bit);
debug2!("add_gen(id={:?}, bit={:?})", id, bit);
let (start, end) = self.compute_id_range(id);
{
let gens = self.gens.mut_slice(start, end);
@ -165,7 +165,7 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
pub fn add_kill(&mut self, id: ast::NodeId, bit: uint) {
//! Indicates that `id` kills `bit`
debug!("add_kill(id=%?, bit=%?)", id, bit);
debug2!("add_kill(id={:?}, bit={:?})", id, bit);
let (start, end) = self.compute_id_range(id);
{
let kills = self.kills.mut_slice(start, end);
@ -176,7 +176,7 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
fn apply_gen_kill(&mut self, id: ast::NodeId, bits: &mut [uint]) {
//! Applies the gen and kill sets for `id` to `bits`
debug!("apply_gen_kill(id=%?, bits=%s) [before]",
debug2!("apply_gen_kill(id={:?}, bits={}) [before]",
id, mut_bits_to_str(bits));
let (start, end) = self.compute_id_range(id);
let gens = self.gens.slice(start, end);
@ -184,17 +184,17 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
let kills = self.kills.slice(start, end);
bitwise(bits, kills, |a, b| a & !b);
debug!("apply_gen_kill(id=%?, bits=%s) [after]",
debug2!("apply_gen_kill(id={:?}, bits={}) [after]",
id, mut_bits_to_str(bits));
}
fn apply_kill(&mut self, id: ast::NodeId, bits: &mut [uint]) {
debug!("apply_kill(id=%?, bits=%s) [before]",
debug2!("apply_kill(id={:?}, bits={}) [before]",
id, mut_bits_to_str(bits));
let (start, end) = self.compute_id_range(id);
let kills = self.kills.slice(start, end);
bitwise(bits, kills, |a, b| a & !b);
debug!("apply_kill(id=%?, bits=%s) [after]",
debug2!("apply_kill(id={:?}, bits={}) [after]",
id, mut_bits_to_str(bits));
}
@ -242,7 +242,7 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
}
let (start, end) = self.compute_id_range_frozen(id);
let on_entry = self.on_entry.slice(start, end);
debug!("each_bit_on_entry_frozen(id=%?, on_entry=%s)",
debug2!("each_bit_on_entry_frozen(id={:?}, on_entry={})",
id, bits_to_str(on_entry));
self.each_bit(on_entry, f)
}
@ -255,7 +255,7 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
let (start, end) = self.compute_id_range(id);
let on_entry = self.on_entry.slice(start, end);
debug!("each_bit_on_entry(id=%?, on_entry=%s)",
debug2!("each_bit_on_entry(id={:?}, on_entry={})",
id, bits_to_str(on_entry));
self.each_bit(on_entry, f)
}
@ -267,7 +267,7 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
let (start, end) = self.compute_id_range(id);
let gens = self.gens.slice(start, end);
debug!("each_gen_bit(id=%?, gens=%s)",
debug2!("each_gen_bit(id={:?}, gens={})",
id, bits_to_str(gens));
self.each_bit(gens, f)
}
@ -281,7 +281,7 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
}
let (start, end) = self.compute_id_range_frozen(id);
let gens = self.gens.slice(start, end);
debug!("each_gen_bit(id=%?, gens=%s)",
debug2!("each_gen_bit(id={:?}, gens={})",
id, bits_to_str(gens));
self.each_bit(gens, f)
}
@ -346,8 +346,8 @@ impl<O:DataFlowOperator+Clone+'static> DataFlowContext<O> {
}
}
debug!("Dataflow result:");
debug!("%s", {
debug2!("Dataflow result:");
debug2!("{}", {
let this = @(*self).clone();
this.pretty_print_to(io::stderr(), blk);
""
@ -374,7 +374,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
blk: &ast::Block,
in_out: &mut [uint],
loop_scopes: &mut ~[LoopScope]) {
debug!("DataFlowContext::walk_block(blk.id=%?, in_out=%s)",
debug2!("DataFlowContext::walk_block(blk.id={:?}, in_out={})",
blk.id, bits_to_str(reslice(in_out)));
self.merge_with_entry_set(blk.id, in_out);
@ -425,7 +425,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
expr: &ast::Expr,
in_out: &mut [uint],
loop_scopes: &mut ~[LoopScope]) {
debug!("DataFlowContext::walk_expr(expr=%s, in_out=%s)",
debug2!("DataFlowContext::walk_expr(expr={}, in_out={})",
expr.repr(self.dfcx.tcx), bits_to_str(reslice(in_out)));
self.merge_with_entry_set(expr.id, in_out);
@ -569,7 +569,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
copy_bits(new_loop_scope.break_bits, in_out);
}
ast::ExprForLoop(*) => fail!("non-desugared expr_for_loop"),
ast::ExprForLoop(*) => fail2!("non-desugared expr_for_loop"),
ast::ExprLoop(ref blk, _) => {
//
@ -756,7 +756,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
let tcx = self.tcx();
let region_maps = tcx.region_maps;
debug!("pop_scopes(from_expr=%s, to_scope=%?, in_out=%s)",
debug2!("pop_scopes(from_expr={}, to_scope={:?}, in_out={})",
from_expr.repr(tcx), to_scope.loop_id,
bits_to_str(reslice(in_out)));
@ -769,7 +769,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
None => {
tcx.sess.span_bug(
from_expr.span,
fmt!("pop_scopes(from_expr=%s, to_scope=%?) \
format!("pop_scopes(from_expr={}, to_scope={:?}) \
to_scope does not enclose from_expr",
from_expr.repr(tcx), to_scope.loop_id));
}
@ -784,7 +784,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
self.pop_scopes(from_expr, to_scope, in_out);
self.dfcx.apply_kill(from_expr.id, in_out);
join_bits(&self.dfcx.oper, reslice(in_out), to_scope.break_bits);
debug!("break_from_to(from_expr=%s, to_scope=%?) final break_bits=%s",
debug2!("break_from_to(from_expr={}, to_scope={:?}) final break_bits={}",
from_expr.repr(self.tcx()),
to_scope.loop_id,
bits_to_str(reslice(in_out)));
@ -833,11 +833,11 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
pat: @ast::Pat,
in_out: &mut [uint],
_loop_scopes: &mut ~[LoopScope]) {
debug!("DataFlowContext::walk_pat(pat=%s, in_out=%s)",
debug2!("DataFlowContext::walk_pat(pat={}, in_out={})",
pat.repr(self.dfcx.tcx), bits_to_str(reslice(in_out)));
do ast_util::walk_pat(pat) |p| {
debug!(" p.id=%? in_out=%s", p.id, bits_to_str(reslice(in_out)));
debug2!(" p.id={:?} in_out={}", p.id, bits_to_str(reslice(in_out)));
self.merge_with_entry_set(p.id, in_out);
self.dfcx.apply_gen_kill(p.id, in_out);
true
@ -882,7 +882,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
None => {
self.tcx().sess.span_bug(
expr.span,
fmt!("No loop scope for id %?", loop_id));
format!("No loop scope for id {:?}", loop_id));
}
}
}
@ -890,7 +890,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
r => {
self.tcx().sess.span_bug(
expr.span,
fmt!("Bad entry `%?` in def_map for label", r));
format!("Bad entry `{:?}` in def_map for label", r));
}
}
}
@ -909,7 +909,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
}
fn add_to_entry_set(&mut self, id: ast::NodeId, pred_bits: &[uint]) {
debug!("add_to_entry_set(id=%?, pred_bits=%s)",
debug2!("add_to_entry_set(id={:?}, pred_bits={})",
id, bits_to_str(pred_bits));
let (start, end) = self.dfcx.compute_id_range(id);
let changed = { // FIXME(#5074) awkward construction
@ -917,7 +917,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
join_bits(&self.dfcx.oper, pred_bits, on_entry)
};
if changed {
debug!("changed entry set for %? to %s",
debug2!("changed entry set for {:?} to {}",
id, bits_to_str(self.dfcx.on_entry.slice(start, end)));
self.changed = true;
}
@ -926,7 +926,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
fn merge_with_entry_set(&mut self,
id: ast::NodeId,
pred_bits: &mut [uint]) {
debug!("merge_with_entry_set(id=%?, pred_bits=%s)",
debug2!("merge_with_entry_set(id={:?}, pred_bits={})",
id, mut_bits_to_str(pred_bits));
let (start, end) = self.dfcx.compute_id_range(id);
let changed = { // FIXME(#5074) awkward construction
@ -936,7 +936,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
changed
};
if changed {
debug!("changed entry set for %? to %s",
debug2!("changed entry set for {:?} to {}",
id, bits_to_str(self.dfcx.on_entry.slice(start, end)));
self.changed = true;
}
@ -957,7 +957,7 @@ fn bits_to_str(words: &[uint]) -> ~str {
let mut v = word;
for _ in range(0u, uint::bytes) {
result.push_char(sep);
result.push_str(fmt!("%02x", v & 0xFF));
result.push_str(format!("{:02x}", v & 0xFF));
v >>= 8;
sep = '-';
}
@ -992,12 +992,12 @@ fn bitwise(out_vec: &mut [uint],
}
fn set_bit(words: &mut [uint], bit: uint) -> bool {
debug!("set_bit: words=%s bit=%s",
debug2!("set_bit: words={} bit={}",
mut_bits_to_str(words), bit_str(bit));
let word = bit / uint::bits;
let bit_in_word = bit % uint::bits;
let bit_mask = 1 << bit_in_word;
debug!("word=%u bit_in_word=%u bit_mask=%u", word, bit_in_word, word);
debug2!("word={} bit_in_word={} bit_mask={}", word, bit_in_word, word);
let oldv = words[word];
let newv = oldv | bit_mask;
words[word] = newv;
@ -1007,7 +1007,7 @@ fn set_bit(words: &mut [uint], bit: uint) -> bool {
fn bit_str(bit: uint) -> ~str {
let byte = bit >> 8;
let lobits = 1 << (bit & 0xFF);
fmt!("[%u:%u-%02x]", bit, byte, lobits)
format!("[{}:{}-{:02x}]", bit, byte, lobits)
}
fn reslice<'a>(v: &'a mut [uint]) -> &'a [uint] {

View File

@ -58,12 +58,12 @@ impl EffectCheckVisitor {
SafeContext => {
// Report an error.
self.tcx.sess.span_err(span,
fmt!("%s requires unsafe function or block",
format!("{} requires unsafe function or block",
description))
}
UnsafeBlock(block_id) => {
// OK, but record this.
debug!("effect: recording unsafe block as used: %?", block_id);
debug2!("effect: recording unsafe block as used: {:?}", block_id);
let _ = self.tcx.used_unsafe.insert(block_id);
}
UnsafeFn => {}
@ -119,7 +119,7 @@ impl Visitor<()> for EffectCheckVisitor {
match expr.node {
ExprMethodCall(callee_id, _, _, _, _, _) => {
let base_type = ty::node_id_to_type(self.tcx, callee_id);
debug!("effect: method call case, base type is %s",
debug2!("effect: method call case, base type is {}",
ppaux::ty_to_str(self.tcx, base_type));
if type_is_unsafe_function(base_type) {
self.require_unsafe(expr.span,
@ -128,7 +128,7 @@ impl Visitor<()> for EffectCheckVisitor {
}
ExprCall(base, _, _) => {
let base_type = ty::node_id_to_type(self.tcx, base.id);
debug!("effect: call case, base type is %s",
debug2!("effect: call case, base type is {}",
ppaux::ty_to_str(self.tcx, base_type));
if type_is_unsafe_function(base_type) {
self.require_unsafe(expr.span, "call to unsafe function")
@ -136,7 +136,7 @@ impl Visitor<()> for EffectCheckVisitor {
}
ExprUnary(_, UnDeref, base) => {
let base_type = ty::node_id_to_type(self.tcx, base.id);
debug!("effect: unary case, base type is %s",
debug2!("effect: unary case, base type is {}",
ppaux::ty_to_str(self.tcx, base_type));
match ty::get(base_type).sty {
ty_ptr(_) => {

View File

@ -53,7 +53,7 @@ impl Visitor<int> for CollectFreevarsVisitor {
ast::ExprPath(*) | ast::ExprSelf => {
let mut i = 0;
match self.def_map.find(&expr.id) {
None => fail!("path not found"),
None => fail2!("path not found"),
Some(&df) => {
let mut def = df;
while i < depth {
@ -137,7 +137,7 @@ pub fn annotate_freevars(def_map: resolve::DefMap, crate: &ast::Crate) ->
pub fn get_freevars(tcx: ty::ctxt, fid: ast::NodeId) -> freevar_info {
match tcx.freevars.find(&fid) {
None => fail!("get_freevars: %d has no freevars", fid),
None => fail2!("get_freevars: {} has no freevars", fid),
Some(&d) => return d
}
}

View File

@ -343,7 +343,7 @@ mod test {
do graph.each_incoming_edge(start_index) |edge_index, edge| {
assert_eq!(graph.edge_data(edge_index), &edge.data);
assert!(counter < expected_incoming.len());
debug!("counter=%? expected=%? edge_index=%? edge=%?",
debug2!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
counter, expected_incoming[counter], edge_index, edge);
match expected_incoming[counter] {
(ref e, ref n) => {
@ -361,7 +361,7 @@ mod test {
do graph.each_outgoing_edge(start_index) |edge_index, edge| {
assert_eq!(graph.edge_data(edge_index), &edge.data);
assert!(counter < expected_outgoing.len());
debug!("counter=%? expected=%? edge_index=%? edge=%?",
debug2!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
counter, expected_outgoing[counter], edge_index, edge);
match expected_outgoing[counter] {
(ref e, ref n) => {

View File

@ -124,13 +124,13 @@ fn check_impl_of_trait(cx: &mut Context, it: @item, trait_ref: &trait_ref, self_
// If this trait has builtin-kind supertraits, meet them.
let self_ty: ty::t = ty::node_id_to_type(cx.tcx, it.id);
debug!("checking impl with self type %?", ty::get(self_ty).sty);
debug2!("checking impl with self type {:?}", ty::get(self_ty).sty);
do check_builtin_bounds(cx, self_ty, trait_def.bounds) |missing| {
cx.tcx.sess.span_err(self_type.span,
fmt!("the type `%s', which does not fulfill `%s`, cannot implement this \
format!("the type `{}', which does not fulfill `{}`, cannot implement this \
trait", ty_to_str(cx.tcx, self_ty), missing.user_string(cx.tcx)));
cx.tcx.sess.span_note(self_type.span,
fmt!("types implementing this trait must fulfill `%s`",
format!("types implementing this trait must fulfill `{}`",
trait_def.bounds.user_string(cx.tcx)));
}
@ -238,7 +238,7 @@ fn with_appropriate_checker(cx: &Context, id: NodeId,
}
ref s => {
cx.tcx.sess.bug(
fmt!("expect fn type in kind checker, not %?", s));
format!("expect fn type in kind checker, not {:?}", s));
}
}
}
@ -265,7 +265,7 @@ fn check_fn(
}
pub fn check_expr(cx: &mut Context, e: @Expr) {
debug!("kind::check_expr(%s)", expr_to_str(e, cx.tcx.sess.intr()));
debug2!("kind::check_expr({})", expr_to_str(e, cx.tcx.sess.intr()));
// Handle any kind bounds on type parameters
let type_parameter_id = match e.get_callee_id() {
@ -292,9 +292,9 @@ pub fn check_expr(cx: &mut Context, e: @Expr) {
};
if ts.len() != type_param_defs.len() {
// Fail earlier to make debugging easier
fail!("internal error: in kind::check_expr, length \
fail2!("internal error: in kind::check_expr, length \
mismatch between actual and declared bounds: actual = \
%s, declared = %s",
{}, declared = {}",
ts.repr(cx.tcx),
type_param_defs.repr(cx.tcx));
}
@ -375,8 +375,8 @@ pub fn check_typaram_bounds(cx: &Context,
do check_builtin_bounds(cx, ty, type_param_def.bounds.builtin_bounds) |missing| {
cx.tcx.sess.span_err(
sp,
fmt!("instantiating a type parameter with an incompatible type \
`%s`, which does not fulfill `%s`",
format!("instantiating a type parameter with an incompatible type \
`{}`, which does not fulfill `{}`",
ty_to_str(cx.tcx, ty),
missing.user_string(cx.tcx)));
}
@ -390,17 +390,17 @@ pub fn check_freevar_bounds(cx: &Context, sp: Span, ty: ty::t,
// Emit a less mysterious error message in this case.
match referenced_ty {
Some(rty) => cx.tcx.sess.span_err(sp,
fmt!("cannot implicitly borrow variable of type `%s` in a bounded \
stack closure (implicit reference does not fulfill `%s`)",
format!("cannot implicitly borrow variable of type `{}` in a bounded \
stack closure (implicit reference does not fulfill `{}`)",
ty_to_str(cx.tcx, rty), missing.user_string(cx.tcx))),
None => cx.tcx.sess.span_err(sp,
fmt!("cannot capture variable of type `%s`, which does \
not fulfill `%s`, in a bounded closure",
format!("cannot capture variable of type `{}`, which does \
not fulfill `{}`, in a bounded closure",
ty_to_str(cx.tcx, ty), missing.user_string(cx.tcx))),
}
cx.tcx.sess.span_note(
sp,
fmt!("this closure's environment must satisfy `%s`",
format!("this closure's environment must satisfy `{}`",
bounds.user_string(cx.tcx)));
}
}
@ -409,8 +409,8 @@ pub fn check_trait_cast_bounds(cx: &Context, sp: Span, ty: ty::t,
bounds: ty::BuiltinBounds) {
do check_builtin_bounds(cx, ty, bounds) |missing| {
cx.tcx.sess.span_err(sp,
fmt!("cannot pack type `%s`, which does not fulfill \
`%s`, as a trait bounded by %s",
format!("cannot pack type `{}`, which does not fulfill \
`{}`, as a trait bounded by {}",
ty_to_str(cx.tcx, ty), missing.user_string(cx.tcx),
bounds.user_string(cx.tcx)));
}
@ -445,27 +445,27 @@ fn check_imm_free_var(cx: &Context, def: Def, sp: Span) {
_ => {
cx.tcx.sess.span_bug(
sp,
fmt!("unknown def for free variable: %?", def));
format!("unknown def for free variable: {:?}", def));
}
}
}
fn check_copy(cx: &Context, ty: ty::t, sp: Span, reason: &str) {
debug!("type_contents(%s)=%s",
debug2!("type_contents({})={}",
ty_to_str(cx.tcx, ty),
ty::type_contents(cx.tcx, ty).to_str());
if ty::type_moves_by_default(cx.tcx, ty) {
cx.tcx.sess.span_err(
sp, fmt!("copying a value of non-copyable type `%s`",
sp, format!("copying a value of non-copyable type `{}`",
ty_to_str(cx.tcx, ty)));
cx.tcx.sess.span_note(sp, fmt!("%s", reason));
cx.tcx.sess.span_note(sp, format!("{}", reason));
}
}
pub fn check_send(cx: &Context, ty: ty::t, sp: Span) -> bool {
if !ty::type_is_sendable(cx.tcx, ty) {
cx.tcx.sess.span_err(
sp, fmt!("value has non-sendable type `%s`",
sp, format!("value has non-sendable type `{}`",
ty_to_str(cx.tcx, ty)));
false
} else {
@ -565,8 +565,8 @@ pub fn check_cast_for_escaping_regions(
// if !target_regions.iter().any(|t_r| is_subregion_of(cx, *t_r, r)) {
// cx.tcx.sess.span_err(
// source.span,
// fmt!("source contains borrowed pointer with lifetime \
// not found in the target type `%s`",
// format!("source contains borrowed pointer with lifetime \
// not found in the target type `{}`",
// ty_to_str(cx.tcx, target_ty)));
// note_and_explain_region(
// cx.tcx, "source data is only valid for ", r, "");

View File

@ -154,7 +154,7 @@ impl LanguageItems {
pub fn require(&self, it: LangItem) -> Result<DefId, ~str> {
match self.items[it as uint] {
Some(id) => Ok(id),
None => Err(fmt!("requires `%s` lang_item",
None => Err(format!("requires `{}` lang_item",
LanguageItems::item_name(it as uint)))
}
}
@ -398,7 +398,7 @@ impl<'self> LanguageItemCollector<'self> {
// Check for duplicates.
match self.items.items[item_index] {
Some(original_def_id) if original_def_id != item_def_id => {
self.session.err(fmt!("duplicate entry for `%s`",
self.session.err(format!("duplicate entry for `{}`",
LanguageItems::item_name(item_index)));
}
Some(_) | None => {

View File

@ -430,7 +430,7 @@ impl Context {
return *k;
}
}
fail!("unregistered lint %?", lint);
fail2!("unregistered lint {:?}", lint);
}
fn span_lint(&self, lint: lint, span: Span, msg: &str) {
@ -444,9 +444,9 @@ impl Context {
let mut note = None;
let msg = match src {
Default | CommandLine => {
fmt!("%s [-%c %s%s]", msg, match level {
format!("{} [-{} {}{}]", msg, match level {
warn => 'W', deny => 'D', forbid => 'F',
allow => fail!()
allow => fail2!()
}, self.lint_to_str(lint).replace("_", "-"),
if src == Default { " (default)" } else { "" })
},
@ -458,7 +458,7 @@ impl Context {
match level {
warn => { self.tcx.sess.span_warn(span, msg); }
deny | forbid => { self.tcx.sess.span_err(span, msg); }
allow => fail!(),
allow => fail2!(),
}
for &span in note.iter() {
@ -483,7 +483,7 @@ impl Context {
self.span_lint(
unrecognized_lint,
meta.span,
fmt!("unknown `%s` attribute: `%s`",
format!("unknown `{}` attribute: `{}`",
level_to_str(level), lintname));
}
Some(lint) => {
@ -491,7 +491,7 @@ impl Context {
let now = self.get_level(lint);
if now == forbid && level != forbid {
self.tcx.sess.span_err(meta.span,
fmt!("%s(%s) overruled by outer forbid(%s)",
format!("{}({}) overruled by outer forbid({})",
level_to_str(level),
lintname, lintname));
} else if now != level {
@ -757,7 +757,7 @@ impl TypeLimitsLintVisitor {
ast::BiGt => v >= min,
ast::BiGe => v > min,
ast::BiEq | ast::BiNe => v >= min && v <= max,
_ => fail!()
_ => fail2!()
}
}
@ -821,7 +821,7 @@ impl TypeLimitsLintVisitor {
ast::lit_int_unsuffixed(v) => v,
_ => return true
},
_ => fail!()
_ => fail2!()
};
self.is_valid(norm_binop, lit_val, min, max)
}
@ -834,7 +834,7 @@ impl TypeLimitsLintVisitor {
ast::lit_int_unsuffixed(v) => v as u64,
_ => return true
},
_ => fail!()
_ => fail2!()
};
self.is_valid(norm_binop, lit_val, min, max)
}
@ -1071,7 +1071,7 @@ fn check_item_non_camel_case_types(cx: &Context, it: &ast::item) {
if !is_camel_case(cx.tcx, ident) {
cx.span_lint(
non_camel_case_types, span,
fmt!("%s `%s` should have a camel case identifier",
format!("{} `{}` should have a camel case identifier",
sort, cx.tcx.sess.str_of(ident)));
}
}
@ -1437,7 +1437,7 @@ impl StabilityLintVisitor {
None => return
}
}
_ => cx.tcx.sess.bug(fmt!("handle_def: %? not found", id))
_ => cx.tcx.sess.bug(format!("handle_def: {:?} not found", id))
}
} else {
// cross-crate
@ -1466,9 +1466,9 @@ impl StabilityLintVisitor {
let msg = match stability {
Some(attr::Stability { text: Some(ref s), _ }) => {
fmt!("use of %s item: %s", label, *s)
format!("use of {} item: {}", label, *s)
}
_ => fmt!("use of %s item", label)
_ => format!("use of {} item", label)
};
cx.span_lint(lint, sp, msg);
@ -1613,8 +1613,8 @@ pub fn check_crate(tcx: ty::ctxt, crate: &ast::Crate) {
for t in v.iter() {
match *t {
(lint, span, ref msg) =>
tcx.sess.span_bug(span, fmt!("unprocessed lint %? at %s: \
%s",
tcx.sess.span_bug(span, format!("unprocessed lint {:?} at {}: \
{}",
lint,
ast_map::node_id_to_str(
tcx.items,

View File

@ -144,9 +144,9 @@ enum LiveNodeKind {
fn live_node_kind_to_str(lnk: LiveNodeKind, cx: ty::ctxt) -> ~str {
let cm = cx.sess.codemap;
match lnk {
FreeVarNode(s) => fmt!("Free var node [%s]", cm.span_to_str(s)),
ExprNode(s) => fmt!("Expr node [%s]", cm.span_to_str(s)),
VarDefNode(s) => fmt!("Var def node [%s]", cm.span_to_str(s)),
FreeVarNode(s) => format!("Free var node [{}]", cm.span_to_str(s)),
ExprNode(s) => format!("Expr node [{}]", cm.span_to_str(s)),
VarDefNode(s) => format!("Var def node [{}]", cm.span_to_str(s)),
ExitNode => ~"Exit node"
}
}
@ -176,11 +176,11 @@ pub fn check_crate(tcx: ty::ctxt,
}
impl to_str::ToStr for LiveNode {
fn to_str(&self) -> ~str { fmt!("ln(%u)", **self) }
fn to_str(&self) -> ~str { format!("ln({})", **self) }
}
impl to_str::ToStr for Variable {
fn to_str(&self) -> ~str { fmt!("v(%u)", **self) }
fn to_str(&self) -> ~str { format!("v({})", **self) }
}
// ______________________________________________________________________
@ -276,7 +276,7 @@ impl IrMaps {
self.lnks.push(lnk);
self.num_live_nodes += 1;
debug!("%s is of kind %s", ln.to_str(),
debug2!("{} is of kind {}", ln.to_str(),
live_node_kind_to_str(lnk, self.tcx));
ln
@ -288,7 +288,7 @@ impl IrMaps {
let ln = self.add_live_node(lnk);
self.live_node_map.insert(node_id, ln);
debug!("%s is node %d", ln.to_str(), node_id);
debug2!("{} is node {}", ln.to_str(), node_id);
}
pub fn add_variable(&mut self, vk: VarKind) -> Variable {
@ -303,7 +303,7 @@ impl IrMaps {
ImplicitRet => {}
}
debug!("%s is %?", v.to_str(), vk);
debug2!("{} is {:?}", v.to_str(), vk);
v
}
@ -313,7 +313,7 @@ impl IrMaps {
Some(&var) => var,
None => {
self.tcx.sess.span_bug(
span, fmt!("No variable registered for id %d", node_id));
span, format!("No variable registered for id {}", node_id));
}
}
}
@ -367,7 +367,7 @@ fn visit_fn(v: &mut LivenessVisitor,
sp: Span,
id: NodeId,
this: @mut IrMaps) {
debug!("visit_fn: id=%d", id);
debug2!("visit_fn: id={}", id);
let _i = ::util::common::indenter();
// swap in a new set of IR maps for this function body:
@ -376,13 +376,13 @@ fn visit_fn(v: &mut LivenessVisitor,
this.capture_map);
unsafe {
debug!("creating fn_maps: %x", transmute(&*fn_maps));
debug2!("creating fn_maps: {}", transmute::<&IrMaps, *IrMaps>(fn_maps));
}
for arg in decl.inputs.iter() {
do pat_util::pat_bindings(this.tcx.def_map, arg.pat)
|_bm, arg_id, _x, path| {
debug!("adding argument %d", arg_id);
debug2!("adding argument {}", arg_id);
let ident = ast_util::path_to_ident(path);
fn_maps.add_variable(Arg(arg_id, ident));
}
@ -429,7 +429,7 @@ fn visit_fn(v: &mut LivenessVisitor,
fn visit_local(v: &mut LivenessVisitor, local: @Local, this: @mut IrMaps) {
let def_map = this.tcx.def_map;
do pat_util::pat_bindings(def_map, local.pat) |_bm, p_id, sp, path| {
debug!("adding local variable %d", p_id);
debug2!("adding local variable {}", p_id);
let name = ast_util::path_to_ident(path);
this.add_live_node_for_node(p_id, VarDefNode(sp));
let kind = match local.init {
@ -450,7 +450,7 @@ fn visit_arm(v: &mut LivenessVisitor, arm: &Arm, this: @mut IrMaps) {
let def_map = this.tcx.def_map;
for pat in arm.pats.iter() {
do pat_util::pat_bindings(def_map, *pat) |bm, p_id, sp, path| {
debug!("adding local variable %d from match with bm %?",
debug2!("adding local variable {} from match with bm {:?}",
p_id, bm);
let name = ast_util::path_to_ident(path);
this.add_live_node_for_node(p_id, VarDefNode(sp));
@ -470,7 +470,7 @@ fn visit_expr(v: &mut LivenessVisitor, expr: @Expr, this: @mut IrMaps) {
// live nodes required for uses or definitions of variables:
ExprPath(_) | ExprSelf => {
let def = this.tcx.def_map.get_copy(&expr.id);
debug!("expr %d: path that leads to %?", expr.id, def);
debug2!("expr {}: path that leads to {:?}", expr.id, def);
if moves::moved_variable_node_id_from_def(def).is_some() {
this.add_live_node_for_node(expr.id, ExprNode(expr.span));
}
@ -515,7 +515,7 @@ fn visit_expr(v: &mut LivenessVisitor, expr: @Expr, this: @mut IrMaps) {
this.add_live_node_for_node(expr.id, ExprNode(expr.span));
visit::walk_expr(v, expr, this);
}
ExprForLoop(*) => fail!("non-desugared expr_for_loop"),
ExprForLoop(*) => fail2!("non-desugared expr_for_loop"),
ExprBinary(_, op, _, _) if ast_util::lazy_binop(op) => {
this.add_live_node_for_node(expr.id, ExprNode(expr.span));
visit::walk_expr(v, expr, this);
@ -609,7 +609,7 @@ impl Liveness {
// code have to agree about which AST nodes are worth
// creating liveness nodes for.
self.tcx.sess.span_bug(
span, fmt!("No live node registered for node %d",
span, format!("No live node registered for node {}",
node_id));
}
}
@ -788,7 +788,7 @@ impl Liveness {
wr.write_str("[ln(");
wr.write_uint(*ln);
wr.write_str(") of kind ");
wr.write_str(fmt!("%?", self.ir.lnks[*ln]));
wr.write_str(format!("{:?}", self.ir.lnks[*ln]));
wr.write_str(" reads");
self.write_vars(wr, ln, |idx| self.users[idx].reader );
wr.write_str(" writes");
@ -819,7 +819,7 @@ impl Liveness {
self.indices2(ln, succ_ln, |idx, succ_idx| {
self.users[idx] = self.users[succ_idx]
});
debug!("init_from_succ(ln=%s, succ=%s)",
debug2!("init_from_succ(ln={}, succ={})",
self.ln_str(ln), self.ln_str(succ_ln));
}
@ -843,7 +843,7 @@ impl Liveness {
}
}
debug!("merge_from_succ(ln=%s, succ=%s, first_merge=%b, changed=%b)",
debug2!("merge_from_succ(ln={}, succ={}, first_merge={}, changed={})",
ln.to_str(), self.ln_str(succ_ln), first_merge, changed);
return changed;
@ -866,7 +866,7 @@ impl Liveness {
self.users[idx].reader = invalid_node();
self.users[idx].writer = invalid_node();
debug!("%s defines %s (idx=%u): %s", writer.to_str(), var.to_str(),
debug2!("{} defines {} (idx={}): {}", writer.to_str(), var.to_str(),
idx, self.ln_str(writer));
}
@ -891,7 +891,7 @@ impl Liveness {
user.used = true;
}
debug!("%s accesses[%x] %s: %s",
debug2!("{} accesses[{:x}] {}: {}",
ln.to_str(), acc, var.to_str(), self.ln_str(ln));
}
@ -902,18 +902,18 @@ impl Liveness {
// effectively a return---this only occurs in `for` loops,
// where the body is really a closure.
debug!("compute: using id for block, %s", block_to_str(body,
debug2!("compute: using id for block, {}", block_to_str(body,
self.tcx.sess.intr()));
let entry_ln: LiveNode =
self.with_loop_nodes(body.id, self.s.exit_ln, self.s.exit_ln,
|| { self.propagate_through_fn_block(decl, body) });
// hack to skip the loop unless debug! is enabled:
debug!("^^ liveness computation results for body %d (entry=%s)",
// hack to skip the loop unless debug2! is enabled:
debug2!("^^ liveness computation results for body {} (entry={})",
{
for ln_idx in range(0u, self.ir.num_live_nodes) {
debug!("%s", self.ln_str(LiveNode(ln_idx)));
debug2!("{}", self.ln_str(LiveNode(ln_idx)));
}
body.id
},
@ -1007,7 +1007,7 @@ impl Liveness {
pub fn propagate_through_expr(&self, expr: @Expr, succ: LiveNode)
-> LiveNode {
debug!("propagate_through_expr: %s",
debug2!("propagate_through_expr: {}",
expr_to_str(expr, self.tcx.sess.intr()));
match expr.node {
@ -1022,7 +1022,7 @@ impl Liveness {
}
ExprFnBlock(_, ref blk) => {
debug!("%s is an expr_fn_block",
debug2!("{} is an expr_fn_block",
expr_to_str(expr, self.tcx.sess.intr()));
/*
@ -1070,7 +1070,7 @@ impl Liveness {
self.propagate_through_loop(expr, Some(cond), blk, succ)
}
ExprForLoop(*) => fail!("non-desugared expr_for_loop"),
ExprForLoop(*) => fail2!("non-desugared expr_for_loop"),
// Note that labels have been resolved, so we don't need to look
// at the label ident
@ -1379,7 +1379,7 @@ impl Liveness {
self.merge_from_succ(ln, succ, first_merge);
first_merge = false;
}
debug!("propagate_through_loop: using id for loop body %d %s",
debug2!("propagate_through_loop: using id for loop body {} {}",
expr.id, block_to_str(body, self.tcx.sess.intr()));
let cond_ln = self.propagate_through_opt_expr(cond, ln);
@ -1407,7 +1407,7 @@ impl Liveness {
cont_ln: LiveNode,
f: &fn() -> R)
-> R {
debug!("with_loop_nodes: %d %u", loop_node_id, *break_ln);
debug2!("with_loop_nodes: {} {}", loop_node_id, *break_ln);
self.loop_scope.push(loop_node_id);
self.break_ln.insert(loop_node_id, break_ln);
self.cont_ln.insert(loop_node_id, cont_ln);
@ -1430,7 +1430,7 @@ fn check_local(this: &mut Liveness, local: @Local) {
// No initializer: the variable might be unused; if not, it
// should not be live at this point.
debug!("check_local() with no initializer");
debug2!("check_local() with no initializer");
do this.pat_bindings(local.pat) |ln, var, sp, id| {
if !this.warn_about_unused(sp, id, ln, var) {
match this.live_on_exit(ln, var) {
@ -1501,7 +1501,7 @@ fn check_expr(this: &mut Liveness, expr: @Expr) {
ExprParen(*) | ExprFnBlock(*) | ExprPath(*) | ExprSelf(*) => {
visit::walk_expr(this, expr, ());
}
ExprForLoop(*) => fail!("non-desugared expr_for_loop")
ExprForLoop(*) => fail2!("non-desugared expr_for_loop")
}
}
@ -1596,17 +1596,17 @@ impl Liveness {
FreeVarNode(span) => {
self.tcx.sess.span_err(
span,
fmt!("capture of %s: `%s`", msg, name));
format!("capture of {}: `{}`", msg, name));
}
ExprNode(span) => {
self.tcx.sess.span_err(
span,
fmt!("use of %s: `%s`", msg, name));
format!("use of {}: `{}`", msg, name));
}
ExitNode | VarDefNode(_) => {
self.tcx.sess.span_bug(
chk_span,
fmt!("illegal reader: %?", lnk));
format!("illegal reader: {:?}", lnk));
}
}
}
@ -1655,11 +1655,11 @@ impl Liveness {
if is_assigned {
self.tcx.sess.add_lint(unused_variable, id, sp,
fmt!("variable `%s` is assigned to, \
format!("variable `{}` is assigned to, \
but never used", *name));
} else {
self.tcx.sess.add_lint(unused_variable, id, sp,
fmt!("unused variable: `%s`", *name));
format!("unused variable: `{}`", *name));
}
}
true
@ -1677,7 +1677,7 @@ impl Liveness {
let r = self.should_warn(var);
for name in r.iter() {
self.tcx.sess.add_lint(dead_assignment, id, sp,
fmt!("value assigned to `%s` is never read", *name));
format!("value assigned to `{}` is never read", *name));
}
}
}

View File

@ -214,7 +214,7 @@ pub fn deref_kind(tcx: ty::ctxt, t: ty::t) -> deref_kind {
Some(k) => k,
None => {
tcx.sess.bug(
fmt!("deref_cat() invoked on non-derefable type %s",
format!("deref_cat() invoked on non-derefable type {}",
ty_to_str(tcx, t)));
}
}
@ -288,7 +288,7 @@ pub struct mem_categorization_ctxt {
impl ToStr for MutabilityCategory {
fn to_str(&self) -> ~str {
fmt!("%?", *self)
format!("{:?}", *self)
}
}
@ -383,7 +383,7 @@ impl mem_categorization_ctxt {
}
pub fn cat_expr_unadjusted(&self, expr: @ast::Expr) -> cmt {
debug!("cat_expr: id=%d expr=%s",
debug2!("cat_expr: id={} expr={}",
expr.id, pprust::expr_to_str(expr, self.tcx.sess.intr()));
let expr_ty = self.expr_ty(expr);
@ -436,7 +436,7 @@ impl mem_categorization_ctxt {
return self.cat_rvalue_node(expr, expr_ty);
}
ast::ExprForLoop(*) => fail!("non-desugared expr_for_loop")
ast::ExprForLoop(*) => fail2!("non-desugared expr_for_loop")
}
}
@ -544,7 +544,7 @@ impl mem_categorization_ctxt {
_ => {
self.tcx.sess.span_bug(
span,
fmt!("Upvar of non-closure %? - %s",
format!("Upvar of non-closure {:?} - {}",
fn_node_id, ty.repr(self.tcx)));
}
}
@ -651,7 +651,7 @@ impl mem_categorization_ctxt {
None => {
self.tcx.sess.span_bug(
node.span(),
fmt!("Explicit deref of non-derefable type: %s",
format!("Explicit deref of non-derefable type: {}",
ty_to_str(self.tcx, base_cmt.ty)));
}
};
@ -741,7 +741,7 @@ impl mem_categorization_ctxt {
None => {
self.tcx.sess.span_bug(
elt.span(),
fmt!("Explicit index of non-index type `%s`",
format!("Explicit index of non-index type `{}`",
ty_to_str(self.tcx, base_cmt.ty)));
}
};
@ -872,7 +872,7 @@ impl mem_categorization_ctxt {
// get the type of the *subpattern* and use that.
let tcx = self.tcx;
debug!("cat_pattern: id=%d pat=%s cmt=%s",
debug2!("cat_pattern: id={} pat={} cmt={}",
pat.id, pprust::pat_to_str(pat, tcx.sess.intr()),
cmt.repr(tcx));
let _i = indenter();
@ -1020,7 +1020,7 @@ impl mem_categorization_ctxt {
~"argument"
}
cat_deref(_, _, pk) => {
fmt!("dereference of %s pointer", ptr_sigil(pk))
format!("dereference of {} pointer", ptr_sigil(pk))
}
cat_interior(_, InteriorField(NamedField(_))) => {
~"field"
@ -1177,7 +1177,7 @@ impl cmt_ {
impl Repr for cmt_ {
fn repr(&self, tcx: ty::ctxt) -> ~str {
fmt!("{%s id:%d m:%? ty:%s}",
format!("\\{{} id:{} m:{:?} ty:{}\\}",
self.cat.repr(tcx),
self.id,
self.mutbl,
@ -1194,19 +1194,19 @@ impl Repr for categorization {
cat_local(*) |
cat_self(*) |
cat_arg(*) => {
fmt!("%?", *self)
format!("{:?}", *self)
}
cat_deref(cmt, derefs, ptr) => {
fmt!("%s->(%s, %u)", cmt.cat.repr(tcx),
format!("{}->({}, {})", cmt.cat.repr(tcx),
ptr_sigil(ptr), derefs)
}
cat_interior(cmt, interior) => {
fmt!("%s.%s",
format!("{}.{}",
cmt.cat.repr(tcx),
interior.repr(tcx))
}
cat_downcast(cmt) => {
fmt!("%s->(enum)", cmt.cat.repr(tcx))
format!("{}->(enum)", cmt.cat.repr(tcx))
}
cat_stack_upvar(cmt) |
cat_discr(cmt, _) => {
@ -1229,7 +1229,7 @@ impl Repr for InteriorKind {
fn repr(&self, _tcx: ty::ctxt) -> ~str {
match *self {
InteriorField(NamedField(fld)) => token::interner_get(fld).to_owned(),
InteriorField(PositionalField(i)) => fmt!("#%?", i),
InteriorField(PositionalField(i)) => format!("\\#{:?}", i),
InteriorElement(_) => ~"[]",
}
}

View File

@ -275,7 +275,7 @@ impl VisitContext {
* meaning either copied or moved depending on its type.
*/
debug!("consume_expr(expr=%s)",
debug2!("consume_expr(expr={})",
expr.repr(self.tcx));
let expr_ty = ty::expr_ty_adjusted(self.tcx, expr);
@ -293,7 +293,7 @@ impl VisitContext {
* meaning either copied or moved depending on its type.
*/
debug!("consume_block(blk.id=%?)", blk.id);
debug2!("consume_block(blk.id={:?})", blk.id);
for stmt in blk.stmts.iter() {
self.visit_stmt(*stmt, ());
@ -312,7 +312,7 @@ impl VisitContext {
* in turn trigger calls to the subcomponents of `expr`.
*/
debug!("use_expr(expr=%s, mode=%?)",
debug2!("use_expr(expr={}, mode={:?})",
expr.repr(self.tcx),
expr_mode);
@ -326,7 +326,7 @@ impl VisitContext {
_ => expr_mode
};
debug!("comp_mode = %?", comp_mode);
debug2!("comp_mode = {:?}", comp_mode);
match expr.node {
ExprPath(*) | ExprSelf => {
@ -375,7 +375,7 @@ impl VisitContext {
ty::ty_bare_fn(*) => Read,
ref x =>
self.tcx.sess.span_bug(callee.span,
fmt!("non-function type in moves for expr_call: %?", x)),
format!("non-function type in moves for expr_call: {:?}", x)),
};
// Note we're not using consume_expr, which uses type_moves_by_default
// to determine the mode, for this. The reason is that while stack
@ -411,7 +411,7 @@ impl VisitContext {
ref r => {
self.tcx.sess.span_bug(
with_expr.span,
fmt!("bad base expr type in record: %?", r))
format!("bad base expr type in record: {:?}", r))
}
};
@ -435,7 +435,7 @@ impl VisitContext {
if consume_with {
if has_dtor(self.tcx, with_ty) {
self.tcx.sess.span_err(with_expr.span,
fmt!("cannot move out of type `%s`, \
format!("cannot move out of type `{}`, \
which defines the `Drop` trait",
with_ty.user_string(self.tcx)));
}
@ -500,7 +500,7 @@ impl VisitContext {
self.consume_block(blk);
}
ExprForLoop(*) => fail!("non-desugared expr_for_loop"),
ExprForLoop(*) => fail2!("non-desugared expr_for_loop"),
ExprUnary(_, _, lhs) => {
if !self.use_overloaded_operator(expr, lhs, [])
@ -620,7 +620,7 @@ impl VisitContext {
BindByRef(_) => false,
BindInfer => {
let pat_ty = ty::node_id_to_type(self.tcx, id);
debug!("pattern %? %s type is %s",
debug2!("pattern {:?} {} type is {}",
id,
ast_util::path_to_ident(path).repr(self.tcx),
pat_ty.repr(self.tcx));
@ -628,7 +628,7 @@ impl VisitContext {
}
};
debug!("pattern binding %?: bm=%?, binding_moves=%b",
debug2!("pattern binding {:?}: bm={:?}, binding_moves={}",
id, bm, binding_moves);
if binding_moves {
@ -678,7 +678,7 @@ impl VisitContext {
}
pub fn compute_captures(&mut self, fn_expr_id: NodeId) -> @[CaptureVar] {
debug!("compute_capture_vars(fn_expr_id=%?)", fn_expr_id);
debug2!("compute_capture_vars(fn_expr_id={:?})", fn_expr_id);
let _indenter = indenter();
let fn_ty = ty::node_id_to_type(self.tcx, fn_expr_id);
@ -696,7 +696,7 @@ impl VisitContext {
let fvar = &freevars[i];
let fvar_def_id = ast_util::def_id_of_def(fvar.def).node;
let fvar_ty = ty::node_id_to_type(self.tcx, fvar_def_id);
debug!("fvar_def_id=%? fvar_ty=%s",
debug2!("fvar_def_id={:?} fvar_ty={}",
fvar_def_id, ppaux::ty_to_str(self.tcx, fvar_ty));
let mode = if ty::type_moves_by_default(self.tcx, fvar_ty) {
CapMove

View File

@ -111,8 +111,8 @@ impl PrivacyVisitor {
// WRONG
Public
};
debug!("parental_privacy = %?", parental_privacy);
debug!("vis = %?, priv = %?",
debug2!("parental_privacy = {:?}", parental_privacy);
debug2!("vis = {:?}, priv = {:?}",
variant_info.vis,
visibility_to_privacy(variant_info.vis))
// inherited => privacy of the enum item
@ -175,7 +175,7 @@ impl PrivacyVisitor {
}
Some(_) => {
self.tcx.sess.span_bug(span,
fmt!("method_is_private: method was a %s?!",
format!("method_is_private: method was a {}?!",
ast_map::node_id_to_str(
self.tcx.items,
method_id,
@ -205,8 +205,8 @@ impl PrivacyVisitor {
Some(&node_trait_method(_, trait_did, _)) => f(trait_did.node),
Some(_) => {
self.tcx.sess.span_bug(span,
fmt!("local_item_is_private: item was \
a %s?!",
format!("local_item_is_private: item was \
a {}?!",
ast_map::node_id_to_str(
self.tcx.items,
item_id,
@ -227,7 +227,7 @@ impl PrivacyVisitor {
for field in fields.iter() {
if field.name != ident.name { loop; }
if field.vis == private {
self.tcx.sess.span_err(span, fmt!("field `%s` is private",
self.tcx.sess.span_err(span, format!("field `{}` is private",
token::ident_to_str(&ident)));
}
break;
@ -248,7 +248,7 @@ impl PrivacyVisitor {
(container_id.crate != LOCAL_CRATE ||
!self.privileged_items.iter().any(|x| x == &(container_id.node))) {
self.tcx.sess.span_err(span,
fmt!("method `%s` is private",
format!("method `{}` is private",
token::ident_to_str(name)));
}
} else {
@ -256,7 +256,7 @@ impl PrivacyVisitor {
csearch::get_item_visibility(self.tcx.sess.cstore, method_id);
if visibility != public {
self.tcx.sess.span_err(span,
fmt!("method `%s` is private",
format!("method `{}` is private",
token::ident_to_str(name)));
}
}
@ -264,10 +264,10 @@ impl PrivacyVisitor {
// Checks that a private path is in scope.
fn check_path(&mut self, span: Span, def: Def, path: &Path) {
debug!("checking path");
debug2!("checking path");
match def {
DefStaticMethod(method_id, _, _) => {
debug!("found static method def, checking it");
debug2!("found static method def, checking it");
self.check_method_common(span,
method_id,
&path.segments.last().identifier)
@ -277,7 +277,7 @@ impl PrivacyVisitor {
if self.local_item_is_private(span, def_id.node) &&
!self.privileged_items.iter().any(|x| x == &def_id.node) {
self.tcx.sess.span_err(span,
fmt!("function `%s` is private",
format!("function `{}` is private",
token::ident_to_str(
&path.segments
.last()
@ -286,7 +286,7 @@ impl PrivacyVisitor {
//} else if csearch::get_item_visibility(self.tcx.sess.cstore,
// def_id) != public {
// self.tcx.sess.span_err(span,
// fmt!("function `%s` is private",
// format!("function `{}` is private",
// token::ident_to_str(
// &path.segments
// .last()
@ -333,7 +333,7 @@ impl PrivacyVisitor {
!self.privileged_items.iter()
.any(|x| x == &(trait_id.node)) => {
self.tcx.sess.span_err(span,
fmt!("method `%s` is private",
format!("method `{}` is private",
token::ident_to_str(&method
.ident)));
}
@ -476,7 +476,7 @@ impl<'self> Visitor<Context<'self>> for PrivacyVisitor {
ty_struct(id, _)
if id.crate != LOCAL_CRATE || !self.privileged_items.iter()
.any(|x| x == &(id.node)) => {
debug!("(privacy checking) checking field access");
debug2!("(privacy checking) checking field access");
self.check_field(expr.span, id, ident);
}
_ => {}
@ -497,7 +497,7 @@ impl<'self> Visitor<Context<'self>> for PrivacyVisitor {
method map");
}
Some(ref entry) => {
debug!("(privacy checking) checking \
debug2!("(privacy checking) checking \
impl method");
self.check_method(expr.span, &entry.origin, ident);
}
@ -515,7 +515,7 @@ impl<'self> Visitor<Context<'self>> for PrivacyVisitor {
if id.crate != LOCAL_CRATE ||
!self.privileged_items.iter().any(|x| x == &(id.node)) {
for field in (*fields).iter() {
debug!("(privacy checking) checking \
debug2!("(privacy checking) checking \
field in struct literal");
self.check_field(expr.span, id, field.ident);
}
@ -527,7 +527,7 @@ impl<'self> Visitor<Context<'self>> for PrivacyVisitor {
match self.tcx.def_map.get_copy(&expr.id) {
DefVariant(_, variant_id, _) => {
for field in (*fields).iter() {
debug!("(privacy checking) \
debug2!("(privacy checking) \
checking field in \
struct variant \
literal");
@ -582,7 +582,7 @@ impl<'self> Visitor<Context<'self>> for PrivacyVisitor {
if id.crate != LOCAL_CRATE ||
!self.privileged_items.iter().any(|x| x == &(id.node)) {
for field in fields.iter() {
debug!("(privacy checking) checking \
debug2!("(privacy checking) checking \
struct pattern");
self.check_field(pattern.span, id, field.ident);
}
@ -594,7 +594,7 @@ impl<'self> Visitor<Context<'self>> for PrivacyVisitor {
match self.tcx.def_map.find(&pattern.id) {
Some(&DefVariant(_, variant_id, _)) => {
for field in fields.iter() {
debug!("(privacy checking) \
debug2!("(privacy checking) \
checking field in \
struct variant pattern");
self.check_field(pattern.span, variant_id, field.ident);

View File

@ -400,14 +400,14 @@ impl ReachableContext {
let desc = ast_map::node_id_to_str(self.tcx.items,
search_item,
ident_interner);
self.tcx.sess.bug(fmt!("found unexpected thingy in \
worklist: %s",
desc))
self.tcx.sess.bug(format!("found unexpected thingy in \
worklist: {}",
desc))
}
None => {
self.tcx.sess.bug(fmt!("found unmapped ID in worklist: \
%d",
search_item))
self.tcx.sess.bug(format!("found unmapped ID in worklist: \
{}",
search_item))
}
}
}

View File

@ -93,13 +93,13 @@ impl RegionMaps {
None => {}
}
debug!("relate_free_regions(sub=%?, sup=%?)", sub, sup);
debug2!("relate_free_regions(sub={:?}, sup={:?})", sub, sup);
self.free_region_map.insert(sub, ~[sup]);
}
pub fn record_parent(&mut self, sub: ast::NodeId, sup: ast::NodeId) {
debug!("record_parent(sub=%?, sup=%?)", sub, sup);
debug2!("record_parent(sub={:?}, sup={:?})", sub, sup);
assert!(sub != sup);
self.scope_map.insert(sub, sup);
@ -125,7 +125,7 @@ impl RegionMaps {
match self.scope_map.find(&id) {
Some(&r) => r,
None => { fail!("No enclosing scope for id %?", id); }
None => { fail2!("No enclosing scope for id {:?}", id); }
}
}
@ -168,7 +168,7 @@ impl RegionMaps {
while superscope != s {
match self.scope_map.find(&s) {
None => {
debug!("is_subscope_of(%?, %?, s=%?)=false",
debug2!("is_subscope_of({:?}, {:?}, s={:?})=false",
subscope, superscope, s);
return false;
@ -177,7 +177,7 @@ impl RegionMaps {
}
}
debug!("is_subscope_of(%?, %?)=true",
debug2!("is_subscope_of({:?}, {:?})=true",
subscope, superscope);
return true;
@ -231,7 +231,7 @@ impl RegionMaps {
* duplicated with the code in infer.rs.
*/
debug!("is_subregion_of(sub_region=%?, super_region=%?)",
debug2!("is_subregion_of(sub_region={:?}, super_region={:?})",
sub_region, super_region);
sub_region == super_region || {
@ -303,7 +303,7 @@ impl RegionMaps {
fn ancestors_of(this: &RegionMaps, scope: ast::NodeId)
-> ~[ast::NodeId]
{
// debug!("ancestors_of(scope=%d)", scope);
// debug2!("ancestors_of(scope={})", scope);
let mut result = ~[scope];
let mut scope = scope;
loop {
@ -314,7 +314,7 @@ impl RegionMaps {
scope = superscope;
}
}
// debug!("ancestors_of_loop(scope=%d)", scope);
// debug2!("ancestors_of_loop(scope={})", scope);
}
}
}
@ -323,7 +323,7 @@ impl RegionMaps {
/// Records the current parent (if any) as the parent of `child_id`.
fn parent_to_expr(visitor: &mut RegionResolutionVisitor,
cx: Context, child_id: ast::NodeId, sp: Span) {
debug!("region::parent_to_expr(span=%?)",
debug2!("region::parent_to_expr(span={:?})",
visitor.sess.codemap.span_to_str(sp));
for parent_id in cx.parent.iter() {
visitor.region_maps.record_parent(child_id, *parent_id);
@ -437,10 +437,10 @@ fn resolve_fn(visitor: &mut RegionResolutionVisitor,
sp: Span,
id: ast::NodeId,
cx: Context) {
debug!("region::resolve_fn(id=%?, \
span=%?, \
body.id=%?, \
cx.parent=%?)",
debug2!("region::resolve_fn(id={:?}, \
span={:?}, \
body.id={:?}, \
cx.parent={:?})",
id,
visitor.sess.codemap.span_to_str(sp),
body.id,
@ -619,7 +619,7 @@ impl DetermineRpCtxt {
Some(v) => join_variance(v, variance)
};
debug!("add_rp() variance for %s: %? == %? ^ %?",
debug2!("add_rp() variance for {}: {:?} == {:?} ^ {:?}",
ast_map::node_id_to_str(self.ast_map, id,
token::get_ident_interner()),
joined_variance, old_variance, variance);
@ -637,7 +637,7 @@ impl DetermineRpCtxt {
/// contains a value of type `from`, so if `from` is
/// region-parameterized, so is the current item.
pub fn add_dep(&mut self, from: ast::NodeId) {
debug!("add dependency from %d -> %d (%s -> %s) with variance %?",
debug2!("add dependency from {} -> {} ({} -> {}) with variance {:?}",
from, self.item_id,
ast_map::node_id_to_str(self.ast_map, from,
token::get_ident_interner()),
@ -715,7 +715,7 @@ impl DetermineRpCtxt {
let old_anon_implies_rp = self.anon_implies_rp;
self.item_id = item_id;
self.anon_implies_rp = anon_implies_rp;
debug!("with_item_id(%d, %b)",
debug2!("with_item_id({}, {})",
item_id,
anon_implies_rp);
let _i = ::util::common::indenter();
@ -787,7 +787,7 @@ fn determine_rp_in_ty(visitor: &mut DetermineRpVisitor,
let sess = cx.sess;
match ty.node {
ast::ty_rptr(ref r, _) => {
debug!("referenced rptr type %s",
debug2!("referenced rptr type {}",
pprust::ty_to_str(ty, sess.intr()));
if cx.region_is_relevant(r) {
@ -797,7 +797,7 @@ fn determine_rp_in_ty(visitor: &mut DetermineRpVisitor,
}
ast::ty_closure(ref f) => {
debug!("referenced fn type: %s",
debug2!("referenced fn type: {}",
pprust::ty_to_str(ty, sess.intr()));
match f.region {
Some(_) => {
@ -837,7 +837,7 @@ fn determine_rp_in_ty(visitor: &mut DetermineRpVisitor,
match csearch::get_region_param(cstore, did) {
None => {}
Some(variance) => {
debug!("reference to external, rp'd type %s",
debug2!("reference to external, rp'd type {}",
pprust::ty_to_str(ty, sess.intr()));
if cx.region_is_relevant(&path.segments.last().lifetime) {
let rv = cx.add_variance(variance);
@ -967,7 +967,7 @@ pub fn determine_rp_in_crate(sess: Session,
while cx.worklist.len() != 0 {
let c_id = cx.worklist.pop();
let c_variance = cx.region_paramd_items.get_copy(&c_id);
debug!("popped %d from worklist", c_id);
debug2!("popped {} from worklist", c_id);
match cx.dep_map.find(&c_id) {
None => {}
Some(deps) => {
@ -980,11 +980,11 @@ pub fn determine_rp_in_crate(sess: Session,
}
}
debug!("%s", {
debug!("Region variance results:");
debug2!("{}", {
debug2!("Region variance results:");
let region_paramd_items = cx.region_paramd_items;
for (&key, &value) in region_paramd_items.iter() {
debug!("item %? (%s) is parameterized with variance %?",
debug2!("item {:?} ({}) is parameterized with variance {:?}",
key,
ast_map::node_id_to_str(ast_map, key,
token::get_ident_interner()),

File diff suppressed because it is too large Load Diff

View File

@ -123,20 +123,20 @@ fn stack_check_fn<'a>(v: &mut StackCheckVisitor,
}
};
let new_cx = Context {safe_stack: safe_stack};
debug!("stack_check_fn(safe_stack=%b, id=%?)", safe_stack, id);
debug2!("stack_check_fn(safe_stack={}, id={:?})", safe_stack, id);
visit::walk_fn(v, fk, decl, body, sp, id, new_cx);
}
fn stack_check_expr<'a>(v: &mut StackCheckVisitor,
expr: @ast::Expr,
cx: Context) {
debug!("stack_check_expr(safe_stack=%b, expr=%s)",
debug2!("stack_check_expr(safe_stack={}, expr={})",
cx.safe_stack, expr.repr(v.tcx));
if !cx.safe_stack {
match expr.node {
ast::ExprCall(callee, _, _) => {
let callee_ty = ty::expr_ty(v.tcx, callee);
debug!("callee_ty=%s", callee_ty.repr(v.tcx));
debug2!("callee_ty={}", callee_ty.repr(v.tcx));
match ty::get(callee_ty).sty {
ty::ty_bare_fn(ref fty) => {
if !fty.abis.is_rust() && !fty.abis.is_intrinsic() {
@ -177,6 +177,6 @@ fn call_to_extern_fn(v: &mut StackCheckVisitor, callee: @ast::Expr) {
v.tcx.sess.add_lint(lint::cstack,
callee.id,
callee.span,
fmt!("invoking non-Rust fn in fn without \
#[fixed_stack_segment]"));
format!("invoking non-Rust fn in fn without \
\\#[fixed_stack_segment]"));
}

View File

@ -183,9 +183,9 @@ impl Subst for ty::Region {
ty::NonerasedRegions(ref regions) => {
if regions.len() != 1 {
tcx.sess.bug(
fmt!("ty::Region#subst(): \
format!("ty::Region\\#subst(): \
Reference to self region when \
given substs with no self region: %s",
given substs with no self region: {}",
substs.repr(tcx)));
}
*regions.get(0)

View File

@ -264,7 +264,7 @@ fn opt_eq(tcx: ty::ctxt, a: &Opt, b: &Opt) -> bool {
a_expr = e.unwrap();
}
UnitLikeStructLit(_) => {
fail!("UnitLikeStructLit should have been handled \
fail2!("UnitLikeStructLit should have been handled \
above")
}
}
@ -277,14 +277,14 @@ fn opt_eq(tcx: ty::ctxt, a: &Opt, b: &Opt) -> bool {
b_expr = e.unwrap();
}
UnitLikeStructLit(_) => {
fail!("UnitLikeStructLit should have been handled \
fail2!("UnitLikeStructLit should have been handled \
above")
}
}
match const_eval::compare_lit_exprs(tcx, a_expr, b_expr) {
Some(val1) => val1 == 0,
None => fail!("compare_list_exprs: type mismatch"),
None => fail2!("compare_list_exprs: type mismatch"),
}
}
}
@ -294,7 +294,7 @@ fn opt_eq(tcx: ty::ctxt, a: &Opt, b: &Opt) -> bool {
let m2 = const_eval::compare_lit_exprs(tcx, a2, b2);
match (m1, m2) {
(Some(val1), Some(val2)) => (val1 == 0 && val2 == 0),
_ => fail!("compare_list_exprs: type mismatch"),
_ => fail2!("compare_list_exprs: type mismatch"),
}
}
(&var(a, _), &var(b, _)) => a == b,
@ -419,7 +419,7 @@ impl<'self> Repr for Match<'self> {
// for many programs, this just take too long to serialize
self.pats.repr(tcx)
} else {
fmt!("%u pats", self.pats.len())
format!("{} pats", self.pats.len())
}
}
}
@ -439,7 +439,7 @@ fn expand_nested_bindings<'r>(bcx: @mut Block,
col: uint,
val: ValueRef)
-> ~[Match<'r>] {
debug!("expand_nested_bindings(bcx=%s, m=%s, col=%u, val=%s)",
debug2!("expand_nested_bindings(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
col,
@ -472,7 +472,7 @@ fn assert_is_binding_or_wild(bcx: @mut Block, p: @ast::Pat) {
if !pat_is_binding_or_wild(bcx.tcx().def_map, p) {
bcx.sess().span_bug(
p.span,
fmt!("Expected an identifier pattern but found p: %s",
format!("Expected an identifier pattern but found p: {}",
p.repr(bcx.tcx())));
}
}
@ -486,7 +486,7 @@ fn enter_match<'r>(bcx: @mut Block,
val: ValueRef,
e: enter_pat)
-> ~[Match<'r>] {
debug!("enter_match(bcx=%s, m=%s, col=%u, val=%s)",
debug2!("enter_match(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
col,
@ -523,7 +523,7 @@ fn enter_match<'r>(bcx: @mut Block,
}
}
debug!("result=%s", result.repr(bcx.tcx()));
debug2!("result={}", result.repr(bcx.tcx()));
return result;
}
@ -535,7 +535,7 @@ fn enter_default<'r>(bcx: @mut Block,
val: ValueRef,
chk: FailureHandler)
-> ~[Match<'r>] {
debug!("enter_default(bcx=%s, m=%s, col=%u, val=%s)",
debug2!("enter_default(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
col,
@ -605,7 +605,7 @@ fn enter_opt<'r>(bcx: @mut Block,
variant_size: uint,
val: ValueRef)
-> ~[Match<'r>] {
debug!("enter_opt(bcx=%s, m=%s, opt=%?, col=%u, val=%s)",
debug2!("enter_opt(bcx={}, m={}, opt={:?}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
*opt,
@ -741,7 +741,7 @@ fn enter_rec_or_struct<'r>(bcx: @mut Block,
fields: &[ast::Ident],
val: ValueRef)
-> ~[Match<'r>] {
debug!("enter_rec_or_struct(bcx=%s, m=%s, col=%u, val=%s)",
debug2!("enter_rec_or_struct(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
col,
@ -776,7 +776,7 @@ fn enter_tup<'r>(bcx: @mut Block,
val: ValueRef,
n_elts: uint)
-> ~[Match<'r>] {
debug!("enter_tup(bcx=%s, m=%s, col=%u, val=%s)",
debug2!("enter_tup(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
col,
@ -802,7 +802,7 @@ fn enter_tuple_struct<'r>(bcx: @mut Block,
val: ValueRef,
n_elts: uint)
-> ~[Match<'r>] {
debug!("enter_tuple_struct(bcx=%s, m=%s, col=%u, val=%s)",
debug2!("enter_tuple_struct(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
col,
@ -827,7 +827,7 @@ fn enter_box<'r>(bcx: @mut Block,
col: uint,
val: ValueRef)
-> ~[Match<'r>] {
debug!("enter_box(bcx=%s, m=%s, col=%u, val=%s)",
debug2!("enter_box(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
col,
@ -854,7 +854,7 @@ fn enter_uniq<'r>(bcx: @mut Block,
col: uint,
val: ValueRef)
-> ~[Match<'r>] {
debug!("enter_uniq(bcx=%s, m=%s, col=%u, val=%s)",
debug2!("enter_uniq(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
col,
@ -881,7 +881,7 @@ fn enter_region<'r>(bcx: @mut Block,
col: uint,
val: ValueRef)
-> ~[Match<'r>] {
debug!("enter_region(bcx=%s, m=%s, col=%u, val=%s)",
debug2!("enter_region(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
col,
@ -1232,7 +1232,7 @@ impl FailureHandler {
fn handle_fail(&self) -> BasicBlockRef {
match *self {
Infallible => {
fail!("attempted to fail in infallible failure handler!")
fail2!("attempted to fail in infallible failure handler!")
}
JumpToBasicBlock(basic_block) => basic_block,
CustomFailureHandlerClass(custom_failure_handler) => {
@ -1295,7 +1295,7 @@ fn compare_values(cx: @mut Block,
let scratch_rhs = alloca(cx, val_ty(rhs), "__rhs");
Store(cx, rhs, scratch_rhs);
let did = langcall(cx, None,
fmt!("comparison of `%s`", cx.ty_to_str(rhs_t)),
format!("comparison of `{}`", cx.ty_to_str(rhs_t)),
UniqStrEqFnLangItem);
let result = callee::trans_lang_call(cx, did, [scratch_lhs, scratch_rhs], None);
Result {
@ -1305,7 +1305,7 @@ fn compare_values(cx: @mut Block,
}
ty::ty_estr(_) => {
let did = langcall(cx, None,
fmt!("comparison of `%s`", cx.ty_to_str(rhs_t)),
format!("comparison of `{}`", cx.ty_to_str(rhs_t)),
StrEqFnLangItem);
let result = callee::trans_lang_call(cx, did, [lhs, rhs], None);
Result {
@ -1383,7 +1383,7 @@ fn insert_lllocals(bcx: @mut Block,
}
};
debug!("binding %? to %s", binding_info.id, bcx.val_to_str(llval));
debug2!("binding {:?} to {}", binding_info.id, bcx.val_to_str(llval));
llmap.insert(binding_info.id, llval);
if bcx.sess().opts.extra_debuginfo {
@ -1404,7 +1404,7 @@ fn compile_guard(bcx: @mut Block,
vals: &[ValueRef],
chk: FailureHandler)
-> @mut Block {
debug!("compile_guard(bcx=%s, guard_expr=%s, m=%s, vals=%s)",
debug2!("compile_guard(bcx={}, guard_expr={}, m={}, vals={})",
bcx.to_str(),
bcx.expr_to_str(guard_expr),
m.repr(bcx.tcx()),
@ -1458,7 +1458,7 @@ fn compile_submatch(bcx: @mut Block,
m: &[Match],
vals: &[ValueRef],
chk: FailureHandler) {
debug!("compile_submatch(bcx=%s, m=%s, vals=%s)",
debug2!("compile_submatch(bcx={}, m={}, vals={})",
bcx.to_str(),
m.repr(bcx.tcx()),
vec_map_to_str(vals, |v| bcx.val_to_str(*v)));
@ -1624,7 +1624,7 @@ fn compile_submatch_continue(mut bcx: @mut Block,
// Decide what kind of branch we need
let opts = get_options(bcx, m, col);
debug!("options=%?", opts);
debug2!("options={:?}", opts);
let mut kind = no_branch;
let mut test_val = val;
if opts.len() > 0u {
@ -2113,13 +2113,13 @@ fn bind_irrefutable_pat(bcx: @mut Block,
* - binding_mode: is this for an argument or a local variable?
*/
debug!("bind_irrefutable_pat(bcx=%s, pat=%s, binding_mode=%?)",
debug2!("bind_irrefutable_pat(bcx={}, pat={}, binding_mode={:?})",
bcx.to_str(),
pat.repr(bcx.tcx()),
binding_mode);
if bcx.sess().asm_comments() {
add_comment(bcx, fmt!("bind_irrefutable_pat(pat=%s)",
add_comment(bcx, format!("bind_irrefutable_pat(pat={})",
pat.repr(bcx.tcx())));
}
@ -2241,7 +2241,7 @@ fn bind_irrefutable_pat(bcx: @mut Block,
ast::PatVec(*) => {
bcx.tcx().sess.span_bug(
pat.span,
fmt!("vector patterns are never irrefutable!"));
format!("vector patterns are never irrefutable!"));
}
ast::PatWild | ast::PatLit(_) | ast::PatRange(_, _) => ()
}

View File

@ -113,13 +113,13 @@ pub fn represent_node(bcx: @mut Block, node: ast::NodeId) -> @Repr {
/// Decides how to represent a given type.
pub fn represent_type(cx: &mut CrateContext, t: ty::t) -> @Repr {
debug!("Representing: %s", ty_to_str(cx.tcx, t));
debug2!("Representing: {}", ty_to_str(cx.tcx, t));
match cx.adt_reprs.find(&t) {
Some(repr) => return *repr,
None => { }
}
let repr = @represent_type_uncached(cx, t);
debug!("Represented as: %?", repr)
debug2!("Represented as: {:?}", repr)
cx.adt_reprs.insert(t, repr);
return repr;
}
@ -179,7 +179,7 @@ fn represent_type_uncached(cx: &mut CrateContext, t: ty::t) -> Repr {
// non-empty body, explicit discriminants should have
// been rejected by a checker before this point.
if !cases.iter().enumerate().all(|(i,c)| c.discr == (i as Disr)) {
cx.sess.bug(fmt!("non-C-like enum %s with specified \
cx.sess.bug(format!("non-C-like enum {} with specified \
discriminants",
ty::item_path_str(cx.tcx, def_id)))
}

View File

@ -47,7 +47,7 @@ pub fn trans_inline_asm(bcx: @mut Block, ia: &ast::inline_asm) -> @mut Block {
let e = match out.node {
ast::ExprAddrOf(_, e) => e,
_ => fail!("Expression must be addr of")
_ => fail2!("Expression must be addr of")
};
unpack_result!(bcx, {
@ -89,7 +89,7 @@ pub fn trans_inline_asm(bcx: @mut Block, ia: &ast::inline_asm) -> @mut Block {
let mut clobbers = getClobbers();
if !ia.clobbers.is_empty() && !clobbers.is_empty() {
clobbers = fmt!("%s,%s", ia.clobbers, clobbers);
clobbers = format!("{},{}", ia.clobbers, clobbers);
} else {
clobbers.push_str(ia.clobbers);
};
@ -102,7 +102,7 @@ pub fn trans_inline_asm(bcx: @mut Block, ia: &ast::inline_asm) -> @mut Block {
constraints.push_str(clobbers);
}
debug!("Asm Constraints: %?", constraints);
debug2!("Asm Constraints: {:?}", constraints);
let numOutputs = outputs.len();

View File

@ -351,7 +351,7 @@ pub fn malloc_raw_dyn(bcx: @mut Block,
match li.require(it) {
Ok(id) => id,
Err(s) => {
bcx.tcx().sess.fatal(fmt!("allocation of `%s` %s",
bcx.tcx().sess.fatal(format!("allocation of `{}` {}",
bcx.ty_to_str(t), s));
}
}
@ -379,7 +379,7 @@ pub fn malloc_raw_dyn(bcx: @mut Block,
(ty::mk_imm_box,
require_alloc_fn(bcx, t, ClosureExchangeMallocFnLangItem))
}
_ => fail!("heap_exchange already handled")
_ => fail2!("heap_exchange already handled")
};
// Grab the TypeRef type of box_ptr_ty.
@ -911,20 +911,18 @@ pub fn invoke(bcx: @mut Block, llfn: ValueRef, llargs: ~[ValueRef],
}
match bcx.node_info {
None => debug!("invoke at ???"),
None => debug2!("invoke at ???"),
Some(node_info) => {
debug!("invoke at %s",
debug2!("invoke at {}",
bcx.sess().codemap.span_to_str(node_info.span));
}
}
if need_invoke(bcx) {
unsafe {
debug!("invoking %x at %x",
::std::cast::transmute(llfn),
::std::cast::transmute(bcx.llbb));
debug2!("invoking {} at {}", llfn, bcx.llbb);
for &llarg in llargs.iter() {
debug!("arg: %x", ::std::cast::transmute(llarg));
debug2!("arg: {}", llarg);
}
}
let normal_bcx = sub_block(bcx, "normal return");
@ -937,11 +935,9 @@ pub fn invoke(bcx: @mut Block, llfn: ValueRef, llargs: ~[ValueRef],
return (llresult, normal_bcx);
} else {
unsafe {
debug!("calling %x at %x",
::std::cast::transmute(llfn),
::std::cast::transmute(bcx.llbb));
debug2!("calling {} at {}", llfn, bcx.llbb);
for &llarg in llargs.iter() {
debug!("arg: %x", ::std::cast::transmute(llarg));
debug2!("arg: {}", llarg);
}
}
let llresult = Call(bcx, llfn, llargs, attributes);
@ -1092,7 +1088,7 @@ pub fn find_bcx_for_scope(bcx: @mut Block, scope_id: ast::NodeId) -> @mut Block
}
None => {
bcx_sid = match bcx_sid.parent {
None => bcx.tcx().sess.bug(fmt!("no enclosing scope with id %d", scope_id)),
None => bcx.tcx().sess.bug(format!("no enclosing scope with id {}", scope_id)),
Some(bcx_par) => bcx_par
};
bcx_sid.scope
@ -1161,7 +1157,7 @@ pub fn ignore_lhs(_bcx: @mut Block, local: &ast::Local) -> bool {
pub fn init_local(bcx: @mut Block, local: &ast::Local) -> @mut Block {
debug!("init_local(bcx=%s, local.id=%?)",
debug2!("init_local(bcx={}, local.id={:?})",
bcx.to_str(), local.id);
let _indenter = indenter();
@ -1182,7 +1178,7 @@ pub fn init_local(bcx: @mut Block, local: &ast::Local) -> @mut Block {
pub fn trans_stmt(cx: @mut Block, s: &ast::Stmt) -> @mut Block {
let _icx = push_ctxt("trans_stmt");
debug!("trans_stmt(%s)", stmt_to_str(s, cx.tcx().sess.intr()));
debug2!("trans_stmt({})", stmt_to_str(s, cx.tcx().sess.intr()));
if cx.sess().asm_comments() {
add_span_comment(cx, s.span, stmt_to_str(s, cx.ccx().sess.intr()));
@ -1345,12 +1341,12 @@ pub fn cleanup_and_leave(bcx: @mut Block,
let mut bcx = bcx;
let is_lpad = leave == None;
loop {
debug!("cleanup_and_leave: leaving %s", cur.to_str());
debug2!("cleanup_and_leave: leaving {}", cur.to_str());
if bcx.sess().trace() {
trans_trace(
bcx, None,
(fmt!("cleanup_and_leave(%s)", cur.to_str())).to_managed());
(format!("cleanup_and_leave({})", cur.to_str())).to_managed());
}
let mut cur_scope = cur.scope;
@ -1419,12 +1415,12 @@ pub fn cleanup_block(bcx: @mut Block, upto: Option<BasicBlockRef>) -> @mut Block
let mut cur = bcx;
let mut bcx = bcx;
loop {
debug!("cleanup_block: %s", cur.to_str());
debug2!("cleanup_block: {}", cur.to_str());
if bcx.sess().trace() {
trans_trace(
bcx, None,
(fmt!("cleanup_block(%s)", cur.to_str())).to_managed());
(format!("cleanup_block({})", cur.to_str())).to_managed());
}
let mut cur_scope = cur.scope;
@ -1469,7 +1465,7 @@ pub fn with_scope(bcx: @mut Block,
f: &fn(@mut Block) -> @mut Block) -> @mut Block {
let _icx = push_ctxt("with_scope");
debug!("with_scope(bcx=%s, opt_node_info=%?, name=%s)",
debug2!("with_scope(bcx={}, opt_node_info={:?}, name={})",
bcx.to_str(), opt_node_info, name);
let _indenter = indenter();
@ -1599,7 +1595,7 @@ pub fn alloc_ty(bcx: @mut Block, t: ty::t, name: &str) -> ValueRef {
let _icx = push_ctxt("alloc_ty");
let ccx = bcx.ccx();
let ty = type_of::type_of(ccx, t);
assert!(!ty::type_has_params(t), "Type has params: %s", ty_to_str(ccx.tcx, t));
assert!(!ty::type_has_params(t));
let val = alloca(bcx, ty, name);
return val;
}
@ -1688,8 +1684,8 @@ pub fn new_fn_ctxt_w_id(ccx: @mut CrateContext,
-> @mut FunctionContext {
for p in param_substs.iter() { p.validate(); }
debug!("new_fn_ctxt_w_id(path=%s, id=%?, \
param_substs=%s)",
debug2!("new_fn_ctxt_w_id(path={}, id={:?}, \
param_substs={})",
path_str(ccx.sess, path),
id,
param_substs.repr(ccx.tcx));
@ -1802,7 +1798,7 @@ pub fn copy_args_to_allocas(fcx: @mut FunctionContext,
args: &[ast::arg],
raw_llargs: &[ValueRef],
arg_tys: &[ty::t]) -> @mut Block {
debug!("copy_args_to_allocas: raw_llargs=%s arg_tys=%s",
debug2!("copy_args_to_allocas: raw_llargs={} arg_tys={}",
raw_llargs.llrepr(fcx.ccx),
arg_tys.repr(fcx.ccx.tcx));
@ -1926,7 +1922,7 @@ pub fn trans_closure(ccx: @mut CrateContext,
let _icx = push_ctxt("trans_closure");
set_uwtable(llfndecl);
debug!("trans_closure(..., param_substs=%s)",
debug2!("trans_closure(..., param_substs={})",
param_substs.repr(ccx.tcx));
let fcx = new_fn_ctxt_w_id(ccx,
@ -2006,7 +2002,7 @@ pub fn trans_fn(ccx: @mut CrateContext,
let the_path_str = path_str(ccx.sess, path);
let _s = StatRecorder::new(ccx, the_path_str);
debug!("trans_fn(self_arg=%?, param_substs=%s)",
debug2!("trans_fn(self_arg={:?}, param_substs={})",
self_arg,
param_substs.repr(ccx.tcx));
let _icx = push_ctxt("trans_fn");
@ -2042,7 +2038,7 @@ fn insert_synthetic_type_entries(bcx: @mut Block,
let tcx = bcx.tcx();
for i in range(0u, fn_args.len()) {
debug!("setting type of argument %u (pat node %d) to %s",
debug2!("setting type of argument {} (pat node {}) to {}",
i, fn_args[i].pat.id, bcx.ty_to_str(arg_tys[i]));
let pat_id = fn_args[i].pat.id;
@ -2141,8 +2137,8 @@ pub fn trans_enum_variant_or_tuple_like_struct<A:IdAndTy>(
let result_ty = match ty::get(ctor_ty).sty {
ty::ty_bare_fn(ref bft) => bft.sig.output,
_ => ccx.sess.bug(
fmt!("trans_enum_variant_or_tuple_like_struct: \
unexpected ctor return type %s",
format!("trans_enum_variant_or_tuple_like_struct: \
unexpected ctor return type {}",
ty_to_str(ccx.tcx, ctor_ty)))
};
@ -2218,7 +2214,7 @@ pub fn trans_item(ccx: @mut CrateContext, item: &ast::item) {
let path = match ccx.tcx.items.get_copy(&item.id) {
ast_map::node_item(_, p) => p,
// tjc: ?
_ => fail!("trans_item"),
_ => fail2!("trans_item"),
};
match item.node {
ast::item_fn(ref decl, purity, _abis, ref generics, ref body) => {
@ -2360,7 +2356,7 @@ pub fn register_fn(ccx: @mut CrateContext,
assert!(f.abis.is_rust() || f.abis.is_intrinsic());
f
}
_ => fail!("expected bare rust fn or an intrinsic")
_ => fail2!("expected bare rust fn or an intrinsic")
};
let llfn = decl_rust_fn(ccx, f.sig.inputs, f.sig.output, sym);
@ -2376,7 +2372,7 @@ pub fn register_fn_llvmty(ccx: @mut CrateContext,
cc: lib::llvm::CallConv,
fn_ty: Type)
-> ValueRef {
debug!("register_fn_fuller creating fn for item %d with path %s",
debug2!("register_fn_fuller creating fn for item {} with path {}",
node_id,
ast_map::path_to_str(item_path(ccx, &node_id), token::get_ident_interner()));
@ -2455,7 +2451,7 @@ pub fn create_entry_wrapper(ccx: @mut CrateContext,
};
(start_fn, args)
} else {
debug!("using user-defined start fn");
debug2!("using user-defined start fn");
let args = ~[
C_null(Type::opaque_box(ccx).ptr_to()),
llvm::LLVMGetParam(llfn, 0 as c_uint),
@ -2503,7 +2499,7 @@ fn exported_name(ccx: &mut CrateContext, path: path, ty: ty::t, attrs: &[ast::At
}
pub fn get_item_val(ccx: @mut CrateContext, id: ast::NodeId) -> ValueRef {
debug!("get_item_val(id=`%?`)", id);
debug2!("get_item_val(id=`{:?}`)", id);
let val = ccx.item_vals.find_copy(&id);
match val {
@ -2525,10 +2521,10 @@ pub fn get_item_val(ccx: @mut CrateContext, id: ast::NodeId) -> ValueRef {
// we need to get the symbol from csearch instead of
// using the current crate's name/version
// information in the hash of the symbol
debug!("making %s", sym);
debug2!("making {}", sym);
let sym = match ccx.external_srcs.find(&i.id) {
Some(&did) => {
debug!("but found in other crate...");
debug2!("but found in other crate...");
csearch::get_symbol(ccx.sess.cstore, did)
}
None => sym
@ -2579,7 +2575,7 @@ pub fn get_item_val(ccx: @mut CrateContext, id: ast::NodeId) -> ValueRef {
}
if !inlineable {
debug!("%s not inlined", sym);
debug2!("{} not inlined", sym);
ccx.non_inlineable_statics.insert(id);
}
ccx.item_symbols.insert(i.id, sym);
@ -2600,7 +2596,7 @@ pub fn get_item_val(ccx: @mut CrateContext, id: ast::NodeId) -> ValueRef {
llfn
}
_ => fail!("get_item_val: weird result in table")
_ => fail2!("get_item_val: weird result in table")
};
match (attr::first_attr_value_str_by_name(i.attrs, "link_section")) {
@ -2616,7 +2612,7 @@ pub fn get_item_val(ccx: @mut CrateContext, id: ast::NodeId) -> ValueRef {
}
ast_map::node_trait_method(trait_method, _, pth) => {
debug!("get_item_val(): processing a node_trait_method");
debug2!("get_item_val(): processing a node_trait_method");
match *trait_method {
ast::required(_) => {
ccx.sess.bug("unexpected variant: required trait method in \
@ -2673,11 +2669,11 @@ pub fn get_item_val(ccx: @mut CrateContext, id: ast::NodeId) -> ValueRef {
ast::item_enum(_, _) => {
register_fn(ccx, (*v).span, sym, id, ty)
}
_ => fail!("node_variant, shouldn't happen")
_ => fail2!("node_variant, shouldn't happen")
};
}
ast::struct_variant_kind(_) => {
fail!("struct variant kind unexpected in get_item_val")
fail2!("struct variant kind unexpected in get_item_val")
}
}
set_inline_hint(llfn);
@ -2704,7 +2700,7 @@ pub fn get_item_val(ccx: @mut CrateContext, id: ast::NodeId) -> ValueRef {
}
ref variant => {
ccx.sess.bug(fmt!("get_item_val(): unexpected variant: %?",
ccx.sess.bug(format!("get_item_val(): unexpected variant: {:?}",
variant))
}
};
@ -2959,7 +2955,7 @@ pub fn decl_crate_map(sess: session::Session, mapmeta: LinkMeta,
let cstore = sess.cstore;
while cstore::have_crate_data(cstore, n_subcrates) { n_subcrates += 1; }
let mapname = if *sess.building_library {
fmt!("%s_%s_%s", mapmeta.name, mapmeta.vers, mapmeta.extras_hash)
format!("{}_{}_{}", mapmeta.name, mapmeta.vers, mapmeta.extras_hash)
} else {
~"toplevel"
};
@ -2988,7 +2984,7 @@ pub fn fill_crate_map(ccx: &mut CrateContext, map: ValueRef) {
let cstore = ccx.sess.cstore;
while cstore::have_crate_data(cstore, i) {
let cdata = cstore::get_crate_data(cstore, i);
let nm = fmt!("_rust_crate_map_%s_%s_%s",
let nm = format!("_rust_crate_map_{}_{}_{}",
cdata.name,
cstore::get_crate_vers(cstore, i),
cstore::get_crate_hash(cstore, i));

View File

@ -29,7 +29,7 @@ pub fn terminate(cx: &mut Block, _: &str) {
pub fn check_not_terminated(cx: &Block) {
if cx.terminated {
fail!("already terminated!");
fail2!("already terminated!");
}
}
@ -117,7 +117,7 @@ pub fn Invoke(cx: @mut Block,
}
check_not_terminated(cx);
terminate(cx, "Invoke");
debug!("Invoke(%s with arguments (%s))",
debug2!("Invoke({} with arguments ({}))",
cx.val_to_str(Fn),
Args.map(|a| cx.val_to_str(*a)).connect(", "));
B(cx).invoke(Fn, Args, Then, Catch, attributes)

View File

@ -476,7 +476,7 @@ impl Builder {
}
pub fn store(&self, val: ValueRef, ptr: ValueRef) {
debug!("Store %s -> %s",
debug2!("Store {} -> {}",
self.ccx.tn.val_to_str(val),
self.ccx.tn.val_to_str(ptr));
assert!(is_not_null(self.llbuilder));
@ -487,7 +487,7 @@ impl Builder {
}
pub fn atomic_store(&self, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) {
debug!("Store %s -> %s",
debug2!("Store {} -> {}",
self.ccx.tn.val_to_str(val),
self.ccx.tn.val_to_str(ptr));
self.count_insn("store.atomic");
@ -725,8 +725,8 @@ impl Builder {
pub fn add_span_comment(&self, sp: Span, text: &str) {
if self.ccx.sess.asm_comments() {
let s = fmt!("%s (%s)", text, self.ccx.sess.codemap.span_to_str(sp));
debug!("%s", s);
let s = format!("{} ({})", text, self.ccx.sess.codemap.span_to_str(sp));
debug2!("{}", s);
self.add_comment(s);
}
}
@ -734,7 +734,7 @@ impl Builder {
pub fn add_comment(&self, text: &str) {
if self.ccx.sess.asm_comments() {
let sanitized = text.replace("$", "");
let comment_text = fmt!("# %s", sanitized.replace("\n", "\n\t# "));
let comment_text = format!("\\# {}", sanitized.replace("\n", "\n\t# "));
self.count_insn("inlineasm");
let asm = do comment_text.with_c_str |c| {
unsafe {
@ -758,11 +758,11 @@ impl Builder {
else { lib::llvm::False };
let argtys = do inputs.map |v| {
debug!("Asm Input Type: %?", self.ccx.tn.val_to_str(*v));
debug2!("Asm Input Type: {:?}", self.ccx.tn.val_to_str(*v));
val_ty(*v)
};
debug!("Asm Output Type: %?", self.ccx.tn.type_to_str(output));
debug2!("Asm Output Type: {:?}", self.ccx.tn.type_to_str(output));
let fty = Type::func(argtys, &output);
unsafe {
let v = llvm::LLVMInlineAsm(

View File

@ -49,7 +49,7 @@ fn ty_align(ty: Type) -> uint {
let elt = ty.element_type();
ty_align(elt)
}
_ => fail!("ty_align: unhandled type")
_ => fail2!("ty_align: unhandled type")
}
}
@ -79,7 +79,7 @@ fn ty_size(ty: Type) -> uint {
let eltsz = ty_size(elt);
len * eltsz
}
_ => fail!("ty_size: unhandled type")
_ => fail2!("ty_size: unhandled type")
}
}

View File

@ -51,7 +51,7 @@ fn ty_align(ty: Type) -> uint {
let elt = ty.element_type();
ty_align(elt)
}
_ => fail!("ty_size: unhandled type")
_ => fail2!("ty_size: unhandled type")
}
}
@ -81,7 +81,7 @@ fn ty_size(ty: Type) -> uint {
let eltsz = ty_size(elt);
len * eltsz
}
_ => fail!("ty_size: unhandled type")
_ => fail2!("ty_size: unhandled type")
}
}

View File

@ -112,7 +112,7 @@ fn classify_ty(ty: Type) -> ~[RegClass] {
let elt = ty.element_type();
ty_align(elt)
}
_ => fail!("ty_size: unhandled type")
_ => fail2!("ty_size: unhandled type")
}
}
@ -141,7 +141,7 @@ fn classify_ty(ty: Type) -> ~[RegClass] {
let eltsz = ty_size(elt);
len * eltsz
}
_ => fail!("ty_size: unhandled type")
_ => fail2!("ty_size: unhandled type")
}
}
@ -232,7 +232,7 @@ fn classify_ty(ty: Type) -> ~[RegClass] {
i += 1u;
}
}
_ => fail!("classify: unhandled type")
_ => fail2!("classify: unhandled type")
}
}
@ -325,7 +325,7 @@ fn llreg_ty(cls: &[RegClass]) -> Type {
SSEDs => {
tys.push(Type::f64());
}
_ => fail!("llregtype: unhandled class")
_ => fail2!("llregtype: unhandled class")
}
i += 1u;
}

View File

@ -79,7 +79,7 @@ pub struct Callee {
pub fn trans(bcx: @mut Block, expr: &ast::Expr) -> Callee {
let _icx = push_ctxt("trans_callee");
debug!("callee::trans(expr=%s)", expr.repr(bcx.tcx()));
debug2!("callee::trans(expr={})", expr.repr(bcx.tcx()));
// pick out special kinds of expressions that can be called:
match expr.node {
@ -105,7 +105,7 @@ pub fn trans(bcx: @mut Block, expr: &ast::Expr) -> Callee {
_ => {
bcx.tcx().sess.span_bug(
expr.span,
fmt!("Type of callee is neither bare-fn nor closure: %s",
format!("Type of callee is neither bare-fn nor closure: {}",
bcx.ty_to_str(datum.ty)));
}
}
@ -153,7 +153,7 @@ pub fn trans(bcx: @mut Block, expr: &ast::Expr) -> Callee {
ast::DefSelfTy(*) | ast::DefMethod(*) => {
bcx.tcx().sess.span_bug(
ref_expr.span,
fmt!("Cannot translate def %? \
format!("Cannot translate def {:?} \
to a callable thing!", def));
}
}
@ -180,7 +180,7 @@ pub fn trans_fn_ref(bcx: @mut Block,
let type_params = node_id_type_params(bcx, ref_id);
let vtables = node_vtables(bcx, ref_id);
debug!("trans_fn_ref(def_id=%s, ref_id=%?, type_params=%s, vtables=%s)",
debug2!("trans_fn_ref(def_id={}, ref_id={:?}, type_params={}, vtables={})",
def_id.repr(bcx.tcx()), ref_id, type_params.repr(bcx.tcx()),
vtables.repr(bcx.tcx()));
trans_fn_ref_with_vtables(bcx, def_id, ref_id, type_params, vtables)
@ -266,8 +266,8 @@ pub fn trans_fn_ref_with_vtables(
let ccx = bcx.ccx();
let tcx = ccx.tcx;
debug!("trans_fn_ref_with_vtables(bcx=%s, def_id=%s, ref_id=%?, \
type_params=%s, vtables=%s)",
debug2!("trans_fn_ref_with_vtables(bcx={}, def_id={}, ref_id={:?}, \
type_params={}, vtables={})",
bcx.to_str(),
def_id.repr(bcx.tcx()),
ref_id,
@ -329,11 +329,11 @@ pub fn trans_fn_ref_with_vtables(
resolve_default_method_vtables(bcx, impl_id,
method, &substs, vtables);
debug!("trans_fn_with_vtables - default method: \
substs = %s, trait_subst = %s, \
first_subst = %s, new_subst = %s, \
vtables = %s, \
self_vtable = %s, param_vtables = %s",
debug2!("trans_fn_with_vtables - default method: \
substs = {}, trait_subst = {}, \
first_subst = {}, new_subst = {}, \
vtables = {}, \
self_vtable = {}, param_vtables = {}",
substs.repr(tcx), trait_ref.substs.repr(tcx),
first_subst.repr(tcx), new_substs.repr(tcx),
vtables.repr(tcx),
@ -365,7 +365,7 @@ pub fn trans_fn_ref_with_vtables(
let map_node = session::expect(
ccx.sess,
ccx.tcx.items.find(&def_id.node),
|| fmt!("local item should be in ast map"));
|| format!("local item should be in ast map"));
match *map_node {
ast_map::node_foreign_item(_, abis, _, _) => {
@ -472,7 +472,7 @@ pub fn trans_method_call(in_cx: @mut Block,
dest: expr::Dest)
-> @mut Block {
let _icx = push_ctxt("trans_method_call");
debug!("trans_method_call(call_ex=%s, rcvr=%s)",
debug2!("trans_method_call(call_ex={}, rcvr={})",
call_ex.repr(in_cx.tcx()),
rcvr.repr(in_cx.tcx()));
trans_call_inner(
@ -483,7 +483,7 @@ pub fn trans_method_call(in_cx: @mut Block,
|cx| {
match cx.ccx().maps.method_map.find_copy(&call_ex.id) {
Some(origin) => {
debug!("origin for %s: %s",
debug2!("origin for {}: {}",
call_ex.repr(in_cx.tcx()),
origin.repr(in_cx.tcx()));
@ -562,7 +562,7 @@ pub fn trans_lang_call_with_type_params(bcx: @mut Block,
substituted);
new_llval = PointerCast(callee.bcx, fn_data.llfn, llfnty);
}
_ => fail!()
_ => fail2!()
}
Callee { bcx: callee.bcx, data: Fn(FnData { llfn: new_llval }) }
},
@ -840,7 +840,7 @@ pub fn trans_arg_expr(bcx: @mut Block,
let _icx = push_ctxt("trans_arg_expr");
let ccx = bcx.ccx();
debug!("trans_arg_expr(formal_arg_ty=(%s), self_mode=%?, arg_expr=%s)",
debug2!("trans_arg_expr(formal_arg_ty=({}), self_mode={:?}, arg_expr={})",
formal_arg_ty.repr(bcx.tcx()),
self_mode,
arg_expr.repr(bcx.tcx()));
@ -850,7 +850,7 @@ pub fn trans_arg_expr(bcx: @mut Block,
let arg_datum = arg_datumblock.datum;
let bcx = arg_datumblock.bcx;
debug!(" arg datum: %s", arg_datum.to_str(bcx.ccx()));
debug2!(" arg datum: {}", arg_datum.to_str(bcx.ccx()));
let mut val;
if ty::type_is_bot(arg_datum.ty) {
@ -890,11 +890,11 @@ pub fn trans_arg_expr(bcx: @mut Block,
val = match self_mode {
ty::ByRef => {
debug!("by ref arg with type %s", bcx.ty_to_str(arg_datum.ty));
debug2!("by ref arg with type {}", bcx.ty_to_str(arg_datum.ty));
arg_datum.to_ref_llval(bcx)
}
ty::ByCopy => {
debug!("by copy arg with type %s", bcx.ty_to_str(arg_datum.ty));
debug2!("by copy arg with type {}", bcx.ty_to_str(arg_datum.ty));
arg_datum.to_appropriate_llval(bcx)
}
}
@ -904,12 +904,12 @@ pub fn trans_arg_expr(bcx: @mut Block,
if formal_arg_ty != arg_datum.ty {
// this could happen due to e.g. subtyping
let llformal_arg_ty = type_of::type_of_explicit_arg(ccx, formal_arg_ty);
debug!("casting actual type (%s) to match formal (%s)",
debug2!("casting actual type ({}) to match formal ({})",
bcx.val_to_str(val), bcx.llty_str(llformal_arg_ty));
val = PointerCast(bcx, val, llformal_arg_ty);
}
}
debug!("--- trans_arg_expr passing %s", bcx.val_to_str(val));
debug2!("--- trans_arg_expr passing {}", bcx.val_to_str(val));
return rslt(bcx, val);
}

View File

@ -127,7 +127,7 @@ impl EnvAction {
impl EnvValue {
pub fn to_str(&self, ccx: &CrateContext) -> ~str {
fmt!("%s(%s)", self.action.to_str(), self.datum.to_str(ccx))
format!("{}({})", self.action.to_str(), self.datum.to_str(ccx))
}
}
@ -151,7 +151,7 @@ pub fn mk_closure_tys(tcx: ty::ctxt,
}
});
let cdata_ty = ty::mk_tup(tcx, bound_tys);
debug!("cdata_ty=%s", ty_to_str(tcx, cdata_ty));
debug2!("cdata_ty={}", ty_to_str(tcx, cdata_ty));
return cdata_ty;
}
@ -224,15 +224,15 @@ pub fn store_environment(bcx: @mut Block,
let Result {bcx: bcx, val: llbox} = allocate_cbox(bcx, sigil, cdata_ty);
let llbox = PointerCast(bcx, llbox, llboxptr_ty);
debug!("tuplify_box_ty = %s", ty_to_str(tcx, cbox_ty));
debug2!("tuplify_box_ty = {}", ty_to_str(tcx, cbox_ty));
// Copy expr values into boxed bindings.
let mut bcx = bcx;
for (i, bv) in bound_values.iter().enumerate() {
debug!("Copy %s into closure", bv.to_str(ccx));
debug2!("Copy {} into closure", bv.to_str(ccx));
if ccx.sess.asm_comments() {
add_comment(bcx, fmt!("Copy %s into closure",
add_comment(bcx, format!("Copy {} into closure",
bv.to_str(ccx)));
}
@ -268,7 +268,7 @@ pub fn build_closure(bcx0: @mut Block,
// Package up the captured upvars
let mut env_vals = ~[];
for cap_var in cap_vars.iter() {
debug!("Building closure: captured variable %?", *cap_var);
debug2!("Building closure: captured variable {:?}", *cap_var);
let datum = expr::trans_local_var(bcx, cap_var.def);
match cap_var.mode {
moves::CapRef => {
@ -384,7 +384,7 @@ pub fn trans_expr_fn(bcx: @mut Block,
let fty = node_id_type(bcx, outer_id);
let f = match ty::get(fty).sty {
ty::ty_closure(ref f) => f,
_ => fail!("expected closure")
_ => fail2!("expected closure")
};
let sub_path = vec::append_one(bcx.fcx.path.clone(),

View File

@ -169,7 +169,7 @@ impl param_substs {
}
fn param_substs_to_str(this: &param_substs, tcx: ty::ctxt) -> ~str {
fmt!("param_substs {tys:%s, vtables:%s}",
format!("param_substs \\{tys:{}, vtables:{}\\}",
this.tys.repr(tcx),
this.vtables.repr(tcx))
}
@ -436,7 +436,7 @@ pub fn add_clean(bcx: @mut Block, val: ValueRef, t: ty::t) {
return
}
debug!("add_clean(%s, %s, %s)", bcx.to_str(), bcx.val_to_str(val), t.repr(bcx.tcx()));
debug2!("add_clean({}, {}, {})", bcx.to_str(), bcx.val_to_str(val), t.repr(bcx.tcx()));
let cleanup_type = cleanup_type(bcx.tcx(), t);
do in_scope_cx(bcx, None) |scope_info| {
@ -451,7 +451,7 @@ pub fn add_clean(bcx: @mut Block, val: ValueRef, t: ty::t) {
pub fn add_clean_temp_immediate(cx: @mut Block, val: ValueRef, ty: ty::t) {
if !ty::type_needs_drop(cx.tcx(), ty) { return; }
debug!("add_clean_temp_immediate(%s, %s, %s)",
debug2!("add_clean_temp_immediate({}, {}, {})",
cx.to_str(), cx.val_to_str(val),
ty.repr(cx.tcx()));
let cleanup_type = cleanup_type(cx.tcx(), ty);
@ -480,7 +480,7 @@ pub fn add_clean_temp_mem_in_scope(bcx: @mut Block,
pub fn add_clean_temp_mem_in_scope_(bcx: @mut Block, scope_id: Option<ast::NodeId>,
val: ValueRef, t: ty::t) {
if !ty::type_needs_drop(bcx.tcx(), t) { return; }
debug!("add_clean_temp_mem(%s, %s, %s)",
debug2!("add_clean_temp_mem({}, {}, {})",
bcx.to_str(), bcx.val_to_str(val),
t.repr(bcx.tcx()));
let cleanup_type = cleanup_type(bcx.tcx(), t);
@ -509,7 +509,7 @@ pub fn add_clean_return_to_mut(bcx: @mut Block,
//! box was frozen initially. Here, both `frozen_val_ref` and
//! `bits_val_ref` are in fact pointers to stack slots.
debug!("add_clean_return_to_mut(%s, %s, %s)",
debug2!("add_clean_return_to_mut({}, {}, {})",
bcx.to_str(),
bcx.val_to_str(frozen_val_ref),
bcx.val_to_str(bits_val_ref));
@ -705,8 +705,8 @@ impl Block {
match self.tcx().def_map.find(&nid) {
Some(&v) => v,
None => {
self.tcx().sess.bug(fmt!(
"No def associated with node id %?", nid));
self.tcx().sess.bug(format!(
"No def associated with node id {:?}", nid));
}
}
}
@ -726,8 +726,8 @@ impl Block {
pub fn to_str(&self) -> ~str {
unsafe {
match self.node_info {
Some(node_info) => fmt!("[block %d]", node_info.id),
None => fmt!("[block %x]", transmute(&*self)),
Some(node_info) => format!("[block {}]", node_info.id),
None => format!("[block {}]", transmute::<&Block, *Block>(self)),
}
}
}
@ -763,7 +763,7 @@ pub fn in_scope_cx(cx: @mut Block, scope_id: Option<ast::NodeId>, f: &fn(si: &mu
Some(inf) => match scope_id {
Some(wanted) => match inf.node_info {
Some(NodeInfo { id: actual, _ }) if wanted == actual => {
debug!("in_scope_cx: selected cur=%s (cx=%s)",
debug2!("in_scope_cx: selected cur={} (cx={})",
cur.to_str(), cx.to_str());
f(inf);
return;
@ -771,7 +771,7 @@ pub fn in_scope_cx(cx: @mut Block, scope_id: Option<ast::NodeId>, f: &fn(si: &mu
_ => inf.parent,
},
None => {
debug!("in_scope_cx: selected cur=%s (cx=%s)",
debug2!("in_scope_cx: selected cur={} (cx={})",
cur.to_str(), cx.to_str());
f(inf);
return;
@ -788,7 +788,7 @@ pub fn in_scope_cx(cx: @mut Block, scope_id: Option<ast::NodeId>, f: &fn(si: &mu
pub fn block_parent(cx: @mut Block) -> @mut Block {
match cx.parent {
Some(b) => b,
None => cx.sess().bug(fmt!("block_parent called on root block %?",
None => cx.sess().bug(format!("block_parent called on root block {:?}",
cx))
}
}
@ -881,7 +881,7 @@ pub fn C_cstr(cx: &mut CrateContext, s: @str) -> ValueRef {
};
let gsym = token::gensym("str");
let g = do fmt!("str%u", gsym).with_c_str |buf| {
let g = do format!("str{}", gsym).with_c_str |buf| {
llvm::LLVMAddGlobal(cx.llmod, val_ty(sc).to_ref(), buf)
};
llvm::LLVMSetInitializer(g, sc);
@ -964,7 +964,7 @@ pub fn const_get_elt(cx: &CrateContext, v: ValueRef, us: &[c_uint])
llvm::LLVMConstExtractValue(v, p, len as c_uint)
};
debug!("const_get_elt(v=%s, us=%?, r=%s)",
debug2!("const_get_elt(v={}, us={:?}, r={})",
cx.tn.val_to_str(v), us, cx.tn.val_to_str(r));
return r;
@ -1115,7 +1115,7 @@ pub fn node_id_type_params(bcx: &mut Block, id: ast::NodeId) -> ~[ty::t] {
if !params.iter().all(|t| !ty::type_needs_infer(*t)) {
bcx.sess().bug(
fmt!("Type parameters for node %d include inference types: %s",
format!("Type parameters for node {} include inference types: {}",
id, params.map(|t| bcx.ty_to_str(*t)).connect(",")));
}
@ -1193,7 +1193,7 @@ pub fn resolve_vtable_under_param_substs(tcx: ty::ctxt,
find_vtable(tcx, substs, n_param, n_bound)
}
_ => {
tcx.sess.bug(fmt!(
tcx.sess.bug(format!(
"resolve_vtable_under_param_substs: asked to lookup \
but no vtables in the fn_ctxt!"))
}
@ -1207,7 +1207,7 @@ pub fn find_vtable(tcx: ty::ctxt,
n_param: typeck::param_index,
n_bound: uint)
-> typeck::vtable_origin {
debug!("find_vtable(n_param=%?, n_bound=%u, ps=%s)",
debug2!("find_vtable(n_param={:?}, n_bound={}, ps={})",
n_param, n_bound, ps.repr(tcx));
let param_bounds = match n_param {
@ -1248,7 +1248,7 @@ pub fn langcall(bcx: @mut Block, span: Option<Span>, msg: &str,
match bcx.tcx().lang_items.require(li) {
Ok(id) => id,
Err(s) => {
let msg = fmt!("%s %s", msg, s);
let msg = format!("{} {}", msg, s);
match span {
Some(span) => { bcx.tcx().sess.span_fatal(span, msg); }
None => { bcx.tcx().sess.fatal(msg); }

View File

@ -52,7 +52,7 @@ pub fn const_lit(cx: &mut CrateContext, e: &ast::Expr, lit: ast::lit)
C_integral(Type::uint_from_ty(cx, t), i as u64, false)
}
_ => cx.sess.span_bug(lit.span,
fmt!("integer literal has type %s (expected int or uint)",
format!("integer literal has type {} (expected int or uint)",
ty_to_str(cx.tcx, lit_int_ty)))
}
}
@ -144,14 +144,14 @@ fn const_deref(cx: &mut CrateContext, v: ValueRef, t: ty::t, explicit: bool)
const_deref_newtype(cx, v, t)
}
_ => {
cx.sess.bug(fmt!("Unexpected dereferenceable type %s",
cx.sess.bug(format!("Unexpected dereferenceable type {}",
ty_to_str(cx.tcx, t)))
}
};
(dv, mt.ty)
}
None => {
cx.sess.bug(fmt!("Can't dereference const of type %s",
cx.sess.bug(format!("Can't dereference const of type {}",
ty_to_str(cx.tcx, t)))
}
}
@ -189,8 +189,8 @@ pub fn const_expr(cx: @mut CrateContext, e: &ast::Expr) -> (ValueRef, bool) {
llconst = C_struct([llconst, C_null(Type::opaque_box(cx).ptr_to())])
}
Some(@ty::AutoAddEnv(ref r, ref s)) => {
cx.sess.span_bug(e.span, fmt!("unexpected static function: \
region %? sigil %?", *r, *s))
cx.sess.span_bug(e.span, format!("unexpected static function: \
region {:?} sigil {:?}", *r, *s))
}
Some(@ty::AutoDerefRef(ref adj)) => {
let mut ty = ety;
@ -234,8 +234,8 @@ pub fn const_expr(cx: @mut CrateContext, e: &ast::Expr) -> (ValueRef, bool) {
}
_ => {
cx.sess.span_bug(e.span,
fmt!("unimplemented const \
autoref %?", autoref))
format!("unimplemented const \
autoref {:?}", autoref))
}
}
}
@ -253,7 +253,7 @@ pub fn const_expr(cx: @mut CrateContext, e: &ast::Expr) -> (ValueRef, bool) {
llvm::LLVMDumpValue(llconst);
llvm::LLVMDumpValue(C_undef(llty));
}
cx.sess.bug(fmt!("const %s of type %s has size %u instead of %u",
cx.sess.bug(format!("const {} of type {} has size {} instead of {}",
e.repr(cx.tcx), ty_to_str(cx.tcx, ety),
csize, tsize));
}

View File

@ -252,7 +252,7 @@ impl CrateContext {
pub fn const_inbounds_gepi(&self,
pointer: ValueRef,
indices: &[uint]) -> ValueRef {
debug!("const_inbounds_gepi: pointer=%s indices=%?",
debug2!("const_inbounds_gepi: pointer={} indices={:?}",
self.tn.val_to_str(pointer), indices);
let v: ~[ValueRef] =
indices.iter().map(|i| C_i32(*i as i32)).collect();

View File

@ -50,7 +50,7 @@ pub fn trans_if(bcx: @mut Block,
els: Option<@ast::Expr>,
dest: expr::Dest)
-> @mut Block {
debug!("trans_if(bcx=%s, cond=%s, thn=%?, dest=%s)",
debug2!("trans_if(bcx={}, cond={}, thn={:?}, dest={})",
bcx.to_str(), bcx.expr_to_str(cond), thn.id,
dest.to_str(bcx.ccx()));
let _indenter = indenter();
@ -119,7 +119,7 @@ pub fn trans_if(bcx: @mut Block,
}
};
debug!("then_bcx_in=%s, else_bcx_in=%s",
debug2!("then_bcx_in={}, else_bcx_in={}",
then_bcx_in.to_str(), else_bcx_in.to_str());
CondBr(bcx, cond_val, then_bcx_in.llbb, else_bcx_in.llbb);

View File

@ -242,7 +242,7 @@ impl Datum {
action: CopyAction,
datum: Datum)
-> @mut Block {
debug!("store_to_datum(self=%s, action=%?, datum=%s)",
debug2!("store_to_datum(self={}, action={:?}, datum={})",
self.to_str(bcx.ccx()), action, datum.to_str(bcx.ccx()));
assert!(datum.mode.is_by_ref());
self.store_to(bcx, action, datum.val)
@ -275,7 +275,7 @@ impl Datum {
return bcx;
}
debug!("copy_to(self=%s, action=%?, dst=%s)",
debug2!("copy_to(self={}, action={:?}, dst={})",
self.to_str(bcx.ccx()), action, bcx.val_to_str(dst));
// Watch out for the case where we are writing the copying the
@ -340,7 +340,7 @@ impl Datum {
let _icx = push_ctxt("move_to");
let mut bcx = bcx;
debug!("move_to(self=%s, action=%?, dst=%s)",
debug2!("move_to(self={}, action={:?}, dst={})",
self.to_str(bcx.ccx()), action, bcx.val_to_str(dst));
if ty::type_is_voidish(self.ty) {
@ -380,7 +380,7 @@ impl Datum {
}
ByRef(ZeroMem) => {
bcx.tcx().sess.bug(
fmt!("Cannot add clean to a 'zero-mem' datum"));
format!("Cannot add clean to a 'zero-mem' datum"));
}
}
}
@ -404,7 +404,7 @@ impl Datum {
}
pub fn to_str(&self, ccx: &CrateContext) -> ~str {
fmt!("Datum { val=%s, ty=%s, mode=%? }",
format!("Datum \\{ val={}, ty={}, mode={:?} \\}",
ccx.tn.val_to_str(self.val),
ty_to_str(ccx.tcx, self.ty),
self.mode)
@ -573,8 +573,8 @@ impl Datum {
(unboxed_vec_ty, true)
}
_ => {
bcx.tcx().sess.bug(fmt!(
"box_body() invoked on non-box type %s",
bcx.tcx().sess.bug(format!(
"box_body() invoked on non-box type {}",
ty_to_str(bcx.tcx(), self.ty)));
}
};
@ -620,7 +620,7 @@ impl Datum {
-> (Option<Datum>, @mut Block) {
let ccx = bcx.ccx();
debug!("try_deref(expr_id=%?, derefs=%?, is_auto=%b, self=%?)",
debug2!("try_deref(expr_id={:?}, derefs={:?}, is_auto={}, self={:?})",
expr_id, derefs, is_auto, self.to_str(bcx.ccx()));
let bcx =
@ -745,7 +745,7 @@ impl Datum {
-> DatumBlock {
let _icx = push_ctxt("autoderef");
debug!("autoderef(expr_id=%d, max=%?, self=%?)",
debug2!("autoderef(expr_id={}, max={:?}, self={:?})",
expr_id, max, self.to_str(bcx.ccx()));
let _indenter = indenter();

View File

@ -142,7 +142,7 @@ pub struct CrateDebugContext {
impl CrateDebugContext {
pub fn new(llmod: ModuleRef, crate: ~str) -> CrateDebugContext {
debug!("CrateDebugContext::new");
debug2!("CrateDebugContext::new");
let builder = unsafe { llvm::LLVMDIBuilderCreate(llmod) };
// DIBuilder inherits context from the module, so we'd better use the same one
let llcontext = unsafe { llvm::LLVMGetModuleContext(llmod) };
@ -240,7 +240,7 @@ pub fn finalize(cx: @mut CrateContext) {
return;
}
debug!("finalize");
debug2!("finalize");
compile_unit_metadata(cx);
unsafe {
llvm::LLVMDIBuilderFinalize(DIB(cx));
@ -268,7 +268,8 @@ pub fn create_local_var_metadata(bcx: @mut Block,
let llptr = match bcx.fcx.lllocals.find_copy(&node_id) {
Some(v) => v,
None => {
bcx.tcx().sess.span_bug(span, fmt!("No entry in lllocals table for %?", node_id));
bcx.tcx().sess.span_bug(span,
format!("No entry in lllocals table for {:?}", node_id));
}
};
@ -310,8 +311,8 @@ pub fn create_captured_var_metadata(bcx: @mut Block,
ast_util::path_to_ident(path)
}
_ => {
cx.sess.span_bug(span, fmt!("debuginfo::create_captured_var_metadata() - \
Captured var-id refers to unexpected ast_map variant: %?", ast_item));
cx.sess.span_bug(span, format!("debuginfo::create_captured_var_metadata() - \
Captured var-id refers to unexpected ast_map variant: {:?}", ast_item));
}
};
@ -366,7 +367,7 @@ pub fn create_match_binding_metadata(bcx: @mut Block,
let llptr = match bcx.fcx.lllocals.find_copy(&node_id) {
Some(v) => v,
None => {
bcx.tcx().sess.span_bug(span, fmt!("No entry in lllocals table for %?", node_id));
bcx.tcx().sess.span_bug(span, format!("No entry in lllocals table for {:?}", node_id));
}
};
@ -408,7 +409,7 @@ pub fn create_self_argument_metadata(bcx: @mut Block,
explicit_self.span
}
_ => bcx.ccx().sess.bug(
fmt!("create_self_argument_metadata: unexpected sort of node: %?", fnitem))
format!("create_self_argument_metadata: unexpected sort of node: {:?}", fnitem))
};
let scope_metadata = bcx.fcx.debug_context.get_ref(bcx.ccx(), span).fn_metadata;
@ -459,7 +460,8 @@ pub fn create_argument_metadata(bcx: @mut Block,
let llptr = match bcx.fcx.llargs.find_copy(&node_id) {
Some(v) => v,
None => {
bcx.tcx().sess.span_bug(span, fmt!("No entry in llargs table for %?", node_id));
bcx.tcx().sess.span_bug(span,
format!("No entry in llargs table for {:?}", node_id));
}
};
@ -501,7 +503,7 @@ pub fn set_source_location(fcx: &FunctionContext,
let cx = fcx.ccx;
debug!("set_source_location: %s", cx.sess.codemap.span_to_str(span));
debug2!("set_source_location: {}", cx.sess.codemap.span_to_str(span));
if fcx.debug_context.get_ref(cx, span).source_locations_enabled {
let loc = span_start(cx, span);
@ -574,7 +576,7 @@ pub fn create_function_debug_context(cx: &mut CrateContext,
ast_map::node_expr(ref expr) => {
match expr.node {
ast::ExprFnBlock(ref fn_decl, ref top_level_block) => {
let name = fmt!("fn%u", token::gensym("fn"));
let name = format!("fn{}", token::gensym("fn"));
let name = token::str_to_ident(name);
(name, fn_decl,
// This is not quite right. It should actually inherit the generics of the
@ -606,7 +608,8 @@ pub fn create_function_debug_context(cx: &mut CrateContext,
ast_map::node_struct_ctor(*) => {
return FunctionWithoutDebugInfo;
}
_ => cx.sess.bug(fmt!("create_function_debug_context: unexpected sort of node: %?", fnitem))
_ => cx.sess.bug(format!("create_function_debug_context: \
unexpected sort of node: {:?}", fnitem))
};
// This can be the case for functions inlined from another crate
@ -637,8 +640,8 @@ pub fn create_function_debug_context(cx: &mut CrateContext,
}
None => {
// This branch is only hit when there is a bug in the NamespaceVisitor.
cx.sess.span_warn(span, fmt!("debuginfo: Could not find namespace node for function
with name %s. This is a bug! Please report this to
cx.sess.span_warn(span, format!("debuginfo: Could not find namespace node for function
with name {}. This is a bug! Please report this to
github.com/mozilla/rust/issues", function_name));
(function_name.clone(), file_metadata)
}
@ -870,10 +873,10 @@ fn compile_unit_metadata(cx: @mut CrateContext) {
let dcx = debug_context(cx);
let crate_name: &str = dcx.crate_file;
debug!("compile_unit_metadata: %?", crate_name);
debug2!("compile_unit_metadata: {:?}", crate_name);
let work_dir = cx.sess.working_dir.to_str();
let producer = fmt!("rustc version %s", env!("CFG_VERSION"));
let producer = format!("rustc version {}", env!("CFG_VERSION"));
do crate_name.with_c_str |crate_name| {
do work_dir.with_c_str |work_dir| {
@ -980,7 +983,7 @@ fn file_metadata(cx: &mut CrateContext, full_path: &str) -> DIFile {
None => ()
}
debug!("file_metadata: %s", full_path);
debug2!("file_metadata: {}", full_path);
let work_dir = cx.sess.working_dir.to_str();
let file_name =
@ -1015,14 +1018,14 @@ fn scope_metadata(fcx: &FunctionContext,
let node = fcx.ccx.tcx.items.get_copy(&node_id);
fcx.ccx.sess.span_bug(span,
fmt!("debuginfo: Could not find scope info for node %?", node));
format!("debuginfo: Could not find scope info for node {:?}", node));
}
}
}
fn basic_type_metadata(cx: &mut CrateContext, t: ty::t) -> DIType {
debug!("basic_type_metadata: %?", ty::get(t));
debug2!("basic_type_metadata: {:?}", ty::get(t));
let (name, encoding) = match ty::get(t).sty {
ty::ty_nil | ty::ty_bot => (~"uint", DW_ATE_unsigned),
@ -1340,8 +1343,8 @@ fn describe_variant(cx: &mut CrateContext,
Some(&ast_map::node_variant(ref variant, _, _)) => variant.span,
ref node => {
cx.sess.span_warn(span,
fmt!("debuginfo::enum_metadata()::adt_struct_metadata() - Unexpected node \
type: %?. This is a bug.", node));
format!("debuginfo::enum_metadata()::adt_struct_metadata() - Unexpected node \
type: {:?}. This is a bug.", node));
codemap::dummy_sp()
}
}
@ -1659,7 +1662,7 @@ fn boxed_type_metadata(cx: &mut CrateContext,
span: Span)
-> DICompositeType {
let box_type_name = match content_type_name {
Some(content_type_name) => fmt!("Boxed<%s>", content_type_name),
Some(content_type_name) => format!("Boxed<{}>", content_type_name),
None => ~"BoxedType"
};
@ -1768,7 +1771,7 @@ fn vec_metadata(cx: &mut CrateContext,
let (element_size, element_align) = size_and_align_of(cx, element_llvm_type);
let vec_llvm_type = Type::vec(cx.sess.targ_cfg.arch, &element_llvm_type);
let vec_type_name: &str = fmt!("[%s]", ppaux::ty_to_str(cx.tcx, element_type));
let vec_type_name: &str = format!("[{}]", ppaux::ty_to_str(cx.tcx, element_type));
let member_llvm_types = vec_llvm_type.field_types();
@ -1824,7 +1827,7 @@ fn boxed_vec_metadata(cx: &mut CrateContext,
-> DICompositeType {
let element_llvm_type = type_of::type_of(cx, element_type);
let vec_llvm_type = Type::vec(cx.sess.targ_cfg.arch, &element_llvm_type);
let vec_type_name: &str = fmt!("[%s]", ppaux::ty_to_str(cx.tcx, element_type));
let vec_type_name: &str = format!("[{}]", ppaux::ty_to_str(cx.tcx, element_type));
let vec_metadata = vec_metadata(cx, element_type, span);
return boxed_type_metadata(
@ -1841,7 +1844,7 @@ fn vec_slice_metadata(cx: &mut CrateContext,
span: Span)
-> DICompositeType {
debug!("vec_slice_metadata: %?", ty::get(vec_type));
debug2!("vec_slice_metadata: {:?}", ty::get(vec_type));
let slice_llvm_type = type_of::type_of(cx, vec_type);
let slice_type_name = ppaux::ty_to_str(cx.tcx, vec_type);
@ -1956,10 +1959,10 @@ fn trait_metadata(cx: &mut CrateContext,
}
fn unimplemented_type_metadata(cx: &mut CrateContext, t: ty::t) -> DIType {
debug!("unimplemented_type_metadata: %?", ty::get(t));
debug2!("unimplemented_type_metadata: {:?}", ty::get(t));
let name = ppaux::ty_to_str(cx.tcx, t);
let metadata = do fmt!("NYI<%s>", name).with_c_str |name| {
let metadata = do format!("NYI<{}>", name).with_c_str |name| {
unsafe {
llvm::LLVMDIBuilderCreateBasicType(
DIB(cx),
@ -2008,7 +2011,7 @@ fn type_metadata(cx: &mut CrateContext,
pointer_type_metadata(cx, pointer_type, box_metadata)
}
debug!("type_metadata: %?", ty::get(t));
debug2!("type_metadata: {:?}", ty::get(t));
let sty = &ty::get(t).sty;
let type_metadata = match *sty {
@ -2095,7 +2098,7 @@ fn type_metadata(cx: &mut CrateContext,
ty::ty_opaque_box => {
create_pointer_to_box_metadata(cx, t, ty::mk_nil())
}
_ => cx.sess.bug(fmt!("debuginfo: unexpected type in type_metadata: %?", sty))
_ => cx.sess.bug(format!("debuginfo: unexpected type in type_metadata: {:?}", sty))
};
debug_context(cx).created_types.insert(cache_id, type_metadata);
@ -2127,7 +2130,7 @@ fn set_debug_location(cx: &mut CrateContext, debug_location: DebugLocation) {
match debug_location {
KnownLocation { scope, line, col } => {
debug!("setting debug location to %u %u", line, col);
debug2!("setting debug location to {} {}", line, col);
let elements = [C_i32(line as i32), C_i32(col as i32), scope, ptr::null()];
unsafe {
metadata_node = llvm::LLVMMDNodeInContext(debug_context(cx).llcontext,
@ -2136,7 +2139,7 @@ fn set_debug_location(cx: &mut CrateContext, debug_location: DebugLocation) {
}
}
UnknownLocation => {
debug!("clearing debug location ");
debug2!("clearing debug location ");
metadata_node = ptr::null();
}
};
@ -2202,8 +2205,9 @@ fn get_namespace_and_span_for_item(cx: &mut CrateContext,
let definition_span = match cx.tcx.items.find(&def_id.node) {
Some(&ast_map::node_item(@ast::item { span, _ }, _)) => span,
ref node => {
cx.sess.span_warn(warning_span, fmt!("debuginfo::get_namespace_and_span_for_item() \
- Unexpected node type: %?", *node));
cx.sess.span_warn(warning_span,
format!("debuginfo::get_namespace_and_span_for_item() \
- Unexpected node type: {:?}", *node));
codemap::dummy_sp()
}
};
@ -2682,7 +2686,7 @@ impl NamespaceTreeNode {
let mut name = ~"_ZN";
fill_nested(self, &mut name);
name.push_str(fmt!("%u%s", item_name.len(), item_name));
name.push_str(format!("{}{}", item_name.len(), item_name));
name.push_char('E');
return name;
@ -2695,7 +2699,7 @@ impl NamespaceTreeNode {
None => {}
}
let name = token::ident_to_str(&node.ident);
output.push_str(fmt!("%u%s", name.len(), name));
output.push_str(format!("{}{}", name.len(), name));
}
}
}
@ -2704,7 +2708,7 @@ fn namespace_for_external_item(cx: &mut CrateContext,
item_path: &ast_map::path)
-> @NamespaceTreeNode {
if item_path.len() < 2 {
cx.sess.bug(fmt!("debuginfo::namespace_for_external_item() - Invalid item_path: %s",
cx.sess.bug(format!("debuginfo::namespace_for_external_item() - Invalid item_path: {}",
ast_map::path_to_str(*item_path, token::get_ident_interner())));
}

View File

@ -169,7 +169,7 @@ pub enum Dest {
impl Dest {
pub fn to_str(&self, ccx: &CrateContext) -> ~str {
match *self {
SaveIn(v) => fmt!("SaveIn(%s)", ccx.tn.val_to_str(v)),
SaveIn(v) => format!("SaveIn({})", ccx.tn.val_to_str(v)),
Ignore => ~"Ignore"
}
}
@ -182,7 +182,7 @@ fn drop_and_cancel_clean(bcx: @mut Block, dat: Datum) -> @mut Block {
}
pub fn trans_to_datum(bcx: @mut Block, expr: &ast::Expr) -> DatumBlock {
debug!("trans_to_datum(expr=%s)", bcx.expr_to_str(expr));
debug2!("trans_to_datum(expr={})", bcx.expr_to_str(expr));
let mut bcx = bcx;
let mut datum = unpack_datum!(bcx, trans_to_datum_unadjusted(bcx, expr));
@ -190,7 +190,7 @@ pub fn trans_to_datum(bcx: @mut Block, expr: &ast::Expr) -> DatumBlock {
None => { return DatumBlock {bcx: bcx, datum: datum}; }
Some(adj) => { adj }
};
debug!("unadjusted datum: %s", datum.to_str(bcx.ccx()));
debug2!("unadjusted datum: {}", datum.to_str(bcx.ccx()));
match *adjustment {
AutoAddEnv(*) => {
datum = unpack_datum!(bcx, add_env(bcx, expr, datum));
@ -232,7 +232,7 @@ pub fn trans_to_datum(bcx: @mut Block, expr: &ast::Expr) -> DatumBlock {
};
}
}
debug!("after adjustments, datum=%s", datum.to_str(bcx.ccx()));
debug2!("after adjustments, datum={}", datum.to_str(bcx.ccx()));
return DatumBlock {bcx: bcx, datum: datum};
fn auto_ref(bcx: @mut Block, datum: Datum) -> DatumBlock {
@ -287,7 +287,7 @@ pub fn trans_to_datum(bcx: @mut Block, expr: &ast::Expr) -> DatumBlock {
let tcx = bcx.tcx();
let closure_ty = expr_ty_adjusted(bcx, expr);
debug!("add_env(closure_ty=%s)", closure_ty.repr(tcx));
debug2!("add_env(closure_ty={})", closure_ty.repr(tcx));
let scratch = scratch_datum(bcx, closure_ty, "__adjust", false);
let llfn = GEPi(bcx, scratch.val, [0u, abi::fn_field_code]);
assert_eq!(datum.appropriate_mode(tcx), ByValue);
@ -311,7 +311,7 @@ pub fn trans_to_datum(bcx: @mut Block, expr: &ast::Expr) -> DatumBlock {
source_datum: Datum) -> DatumBlock {
let tcx = bcx.tcx();
let target_obj_ty = expr_ty_adjusted(bcx, expr);
debug!("auto_borrow_obj(target=%s)",
debug2!("auto_borrow_obj(target={})",
target_obj_ty.repr(tcx));
// Extract source store information
@ -320,7 +320,7 @@ pub fn trans_to_datum(bcx: @mut Block, expr: &ast::Expr) -> DatumBlock {
_ => {
bcx.sess().span_bug(
expr.span,
fmt!("auto_borrow_trait_obj expected a trait, found %s",
format!("auto_borrow_trait_obj expected a trait, found {}",
source_datum.ty.repr(bcx.tcx())));
}
};
@ -432,7 +432,7 @@ pub fn trans_into(bcx: @mut Block, expr: &ast::Expr, dest: Dest) -> @mut Block {
let ty = expr_ty(bcx, expr);
debug!("trans_into_unadjusted(expr=%s, dest=%s)",
debug2!("trans_into_unadjusted(expr={}, dest={})",
bcx.expr_to_str(expr),
dest.to_str(bcx.ccx()));
let _indenter = indenter();
@ -448,7 +448,7 @@ pub fn trans_into(bcx: @mut Block, expr: &ast::Expr, dest: Dest) -> @mut Block {
};
let kind = bcx.expr_kind(expr);
debug!("expr kind = %?", kind);
debug2!("expr kind = {:?}", kind);
return match kind {
ty::LvalueExpr => {
let datumblock = trans_lvalue_unadjusted(bcx, expr);
@ -490,7 +490,7 @@ fn trans_lvalue(bcx: @mut Block, expr: &ast::Expr) -> DatumBlock {
Some(_) => {
bcx.sess().span_bug(
expr.span,
fmt!("trans_lvalue() called on an expression \
format!("trans_lvalue() called on an expression \
with adjustments"));
}
};
@ -506,7 +506,7 @@ fn trans_to_datum_unadjusted(bcx: @mut Block, expr: &ast::Expr) -> DatumBlock {
let mut bcx = bcx;
debug!("trans_to_datum_unadjusted(expr=%s)", bcx.expr_to_str(expr));
debug2!("trans_to_datum_unadjusted(expr={})", bcx.expr_to_str(expr));
let _indenter = indenter();
debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
@ -608,8 +608,8 @@ fn trans_rvalue_datum_unadjusted(bcx: @mut Block, expr: &ast::Expr) -> DatumBloc
_ => {
bcx.tcx().sess.span_bug(
expr.span,
fmt!("trans_rvalue_datum_unadjusted reached \
fall-through case: %?",
format!("trans_rvalue_datum_unadjusted reached \
fall-through case: {:?}",
expr.node));
}
}
@ -662,8 +662,8 @@ fn trans_rvalue_stmt_unadjusted(bcx: @mut Block, expr: &ast::Expr) -> @mut Block
_ => {
bcx.tcx().sess.span_bug(
expr.span,
fmt!("trans_rvalue_stmt_unadjusted reached \
fall-through case: %?",
format!("trans_rvalue_stmt_unadjusted reached \
fall-through case: {:?}",
expr.node));
}
};
@ -718,7 +718,7 @@ fn trans_rvalue_dps_unadjusted(bcx: @mut Block, expr: &ast::Expr,
ast::ExprFnBlock(ref decl, ref body) => {
let expr_ty = expr_ty(bcx, expr);
let sigil = ty::ty_closure_sigil(expr_ty);
debug!("translating fn_block %s with type %s",
debug2!("translating fn_block {} with type {}",
expr_to_str(expr, tcx.sess.intr()),
expr_ty.repr(tcx));
return closure::trans_expr_fn(bcx, sigil, decl, body,
@ -787,7 +787,7 @@ fn trans_rvalue_dps_unadjusted(bcx: @mut Block, expr: &ast::Expr,
_ => {
bcx.tcx().sess.span_bug(
expr.span,
fmt!("trans_rvalue_dps_unadjusted reached fall-through case: %?",
format!("trans_rvalue_dps_unadjusted reached fall-through case: {:?}",
expr.node));
}
}
@ -836,8 +836,8 @@ fn trans_def_dps_unadjusted(bcx: @mut Block, ref_expr: &ast::Expr,
return bcx;
}
_ => {
bcx.tcx().sess.span_bug(ref_expr.span, fmt!(
"Non-DPS def %? referened by %s",
bcx.tcx().sess.span_bug(ref_expr.span, format!(
"Non-DPS def {:?} referened by {}",
def, bcx.node_id_to_str(ref_expr.id)));
}
}
@ -861,8 +861,8 @@ fn trans_def_datum_unadjusted(bcx: @mut Block,
ref_expr.id)
}
_ => {
bcx.tcx().sess.span_bug(ref_expr.span, fmt!(
"Non-DPS def %? referened by %s",
bcx.tcx().sess.span_bug(ref_expr.span, format!(
"Non-DPS def {:?} referened by {}",
def, bcx.node_id_to_str(ref_expr.id)));
}
};
@ -887,7 +887,7 @@ fn trans_lvalue_unadjusted(bcx: @mut Block, expr: &ast::Expr) -> DatumBlock {
let _icx = push_ctxt("trans_lval");
let mut bcx = bcx;
debug!("trans_lvalue(expr=%s)", bcx.expr_to_str(expr));
debug2!("trans_lvalue(expr={})", bcx.expr_to_str(expr));
let _indenter = indenter();
trace_span!(bcx, expr.span, shorten(bcx.expr_to_str(expr)));
@ -912,7 +912,7 @@ fn trans_lvalue_unadjusted(bcx: @mut Block, expr: &ast::Expr) -> DatumBlock {
_ => {
bcx.tcx().sess.span_bug(
expr.span,
fmt!("trans_lvalue reached fall-through case: %?",
format!("trans_lvalue reached fall-through case: {:?}",
expr.node));
}
};
@ -978,8 +978,8 @@ fn trans_lvalue_unadjusted(bcx: @mut Block, expr: &ast::Expr) -> DatumBlock {
base_datum.get_vec_base_and_len(bcx, index_expr.span,
index_expr.id, 0);
debug!("trans_index: base %s", bcx.val_to_str(base));
debug!("trans_index: len %s", bcx.val_to_str(len));
debug2!("trans_index: base {}", bcx.val_to_str(base));
debug2!("trans_index: len {}", bcx.val_to_str(len));
let bounds_check = ICmp(bcx, lib::llvm::IntUGE, scaled_ix, len);
let bcx = do with_cond(bcx, bounds_check) |bcx| {
@ -1091,8 +1091,8 @@ pub fn trans_local_var(bcx: @mut Block, def: ast::Def) -> Datum {
}
}
None => {
bcx.sess().bug(fmt!(
"trans_local_var: no llval for upvar %? found", nid));
bcx.sess().bug(format!(
"trans_local_var: no llval for upvar {:?} found", nid));
}
}
}
@ -1106,13 +1106,13 @@ pub fn trans_local_var(bcx: @mut Block, def: ast::Def) -> Datum {
let self_info: ValSelfData = match bcx.fcx.llself {
Some(ref self_info) => *self_info,
None => {
bcx.sess().bug(fmt!(
bcx.sess().bug(format!(
"trans_local_var: reference to self \
out of context with id %?", nid));
out of context with id {:?}", nid));
}
};
debug!("def_self() reference, self_info.t=%s",
debug2!("def_self() reference, self_info.t={}",
self_info.t.repr(bcx.tcx()));
Datum {
@ -1122,8 +1122,8 @@ pub fn trans_local_var(bcx: @mut Block, def: ast::Def) -> Datum {
}
}
_ => {
bcx.sess().unimpl(fmt!(
"unsupported def type in trans_local_var: %?", def));
bcx.sess().unimpl(format!(
"unsupported def type in trans_local_var: {:?}", def));
}
};
@ -1133,12 +1133,12 @@ pub fn trans_local_var(bcx: @mut Block, def: ast::Def) -> Datum {
let v = match table.find(&nid) {
Some(&v) => v,
None => {
bcx.sess().bug(fmt!(
"trans_local_var: no llval for local/arg %? found", nid));
bcx.sess().bug(format!(
"trans_local_var: no llval for local/arg {:?} found", nid));
}
};
let ty = node_id_type(bcx, nid);
debug!("take_local(nid=%?, v=%s, ty=%s)",
debug2!("take_local(nid={:?}, v={}, ty={})",
nid, bcx.val_to_str(v), bcx.ty_to_str(ty));
Datum {
val: v,
@ -1164,8 +1164,8 @@ pub fn with_field_tys<R>(tcx: ty::ctxt,
// We want the *variant* ID here, not the enum ID.
match node_id_opt {
None => {
tcx.sess.bug(fmt!(
"cannot get field types from the enum type %s \
tcx.sess.bug(format!(
"cannot get field types from the enum type {} \
without a node ID",
ty.repr(tcx)));
}
@ -1187,8 +1187,8 @@ pub fn with_field_tys<R>(tcx: ty::ctxt,
}
_ => {
tcx.sess.bug(fmt!(
"cannot get field types from the type %s",
tcx.sess.bug(format!(
"cannot get field types from the type {}",
ty.repr(tcx)));
}
}
@ -1733,14 +1733,14 @@ fn trans_imm_cast(bcx: @mut Block, expr: &ast::Expr,
val_ty(lldiscrim_a),
lldiscrim_a, true),
cast_float => SIToFP(bcx, lldiscrim_a, ll_t_out),
_ => ccx.sess.bug(fmt!("translating unsupported cast: \
%s (%?) -> %s (%?)",
_ => ccx.sess.bug(format!("translating unsupported cast: \
{} ({:?}) -> {} ({:?})",
t_in.repr(ccx.tcx), k_in,
t_out.repr(ccx.tcx), k_out))
}
}
_ => ccx.sess.bug(fmt!("translating unsupported cast: \
%s (%?) -> %s (%?)",
_ => ccx.sess.bug(format!("translating unsupported cast: \
{} ({:?}) -> {} ({:?})",
t_in.repr(ccx.tcx), k_in,
t_out.repr(ccx.tcx), k_out))
};
@ -1757,7 +1757,7 @@ fn trans_assign_op(bcx: @mut Block,
let _icx = push_ctxt("trans_assign_op");
let mut bcx = bcx;
debug!("trans_assign_op(expr=%s)", bcx.expr_to_str(expr));
debug2!("trans_assign_op(expr={})", bcx.expr_to_str(expr));
// Evaluate LHS (destination), which should be an lvalue
let dst_datum = unpack_datum!(bcx, trans_lvalue_unadjusted(bcx, dst));

View File

@ -80,13 +80,13 @@ pub fn llvm_calling_convention(ccx: &mut CrateContext,
match *abi {
RustIntrinsic => {
// Intrinsics are emitted by monomorphic fn
ccx.sess.bug(fmt!("Asked to register intrinsic fn"));
ccx.sess.bug(format!("Asked to register intrinsic fn"));
}
Rust => {
// FIXME(#3678) Implement linking to foreign fns with Rust ABI
ccx.sess.unimpl(
fmt!("Foreign functions with Rust ABI"));
format!("Foreign functions with Rust ABI"));
}
Stdcall => lib::llvm::X86StdcallCallConv,
@ -110,9 +110,9 @@ pub fn register_foreign_item_fn(ccx: @mut CrateContext,
* Just adds a LLVM global.
*/
debug!("register_foreign_item_fn(abis=%s, \
path=%s, \
foreign_item.id=%?)",
debug2!("register_foreign_item_fn(abis={}, \
path={}, \
foreign_item.id={:?})",
abis.repr(ccx.tcx),
path.repr(ccx.tcx),
foreign_item.id);
@ -122,9 +122,9 @@ pub fn register_foreign_item_fn(ccx: @mut CrateContext,
None => {
// FIXME(#8357) We really ought to report a span here
ccx.sess.fatal(
fmt!("ABI `%s` has no suitable ABI \
format!("ABI `{}` has no suitable ABI \
for target architecture \
in module %s",
in module {}",
abis.user_string(ccx.tcx),
ast_map::path_to_str(*path,
ccx.sess.intr())));
@ -165,9 +165,9 @@ pub fn trans_native_call(bcx: @mut Block,
let ccx = bcx.ccx();
let tcx = bcx.tcx();
debug!("trans_native_call(callee_ty=%s, \
llfn=%s, \
llretptr=%s)",
debug2!("trans_native_call(callee_ty={}, \
llfn={}, \
llretptr={})",
callee_ty.repr(tcx),
ccx.tn.val_to_str(llfn),
ccx.tn.val_to_str(llretptr));
@ -213,7 +213,7 @@ pub fn trans_native_call(bcx: @mut Block,
// Does Rust pass this argument by pointer?
let rust_indirect = type_of::arg_is_indirect(ccx, fn_sig.inputs[i]);
debug!("argument %u, llarg_rust=%s, rust_indirect=%b, arg_ty=%s",
debug2!("argument {}, llarg_rust={}, rust_indirect={}, arg_ty={}",
i,
ccx.tn.val_to_str(llarg_rust),
rust_indirect,
@ -227,7 +227,7 @@ pub fn trans_native_call(bcx: @mut Block,
llarg_rust = scratch;
}
debug!("llarg_rust=%s (after indirection)",
debug2!("llarg_rust={} (after indirection)",
ccx.tn.val_to_str(llarg_rust));
// Check whether we need to do any casting
@ -236,7 +236,7 @@ pub fn trans_native_call(bcx: @mut Block,
llarg_rust = BitCast(bcx, llarg_rust, foreignarg_ty.ptr_to());
}
debug!("llarg_rust=%s (after casting)",
debug2!("llarg_rust={} (after casting)",
ccx.tn.val_to_str(llarg_rust));
// Finally, load the value if needed for the foreign ABI
@ -247,7 +247,7 @@ pub fn trans_native_call(bcx: @mut Block,
Load(bcx, llarg_rust)
};
debug!("argument %u, llarg_foreign=%s",
debug2!("argument {}, llarg_foreign={}",
i, ccx.tn.val_to_str(llarg_foreign));
llargs_foreign.push(llarg_foreign);
@ -258,7 +258,7 @@ pub fn trans_native_call(bcx: @mut Block,
None => {
// FIXME(#8357) We really ought to report a span here
ccx.sess.fatal(
fmt!("ABI string `%s` has no suitable ABI \
format!("ABI string `{}` has no suitable ABI \
for target architecture",
fn_abis.user_string(ccx.tcx)));
}
@ -284,10 +284,10 @@ pub fn trans_native_call(bcx: @mut Block,
let llrust_ret_ty = llsig.llret_ty;
let llforeign_ret_ty = fn_type.ret_ty.ty;
debug!("llretptr=%s", ccx.tn.val_to_str(llretptr));
debug!("llforeign_retval=%s", ccx.tn.val_to_str(llforeign_retval));
debug!("llrust_ret_ty=%s", ccx.tn.type_to_str(llrust_ret_ty));
debug!("llforeign_ret_ty=%s", ccx.tn.type_to_str(llforeign_ret_ty));
debug2!("llretptr={}", ccx.tn.val_to_str(llretptr));
debug2!("llforeign_retval={}", ccx.tn.val_to_str(llforeign_retval));
debug2!("llrust_ret_ty={}", ccx.tn.type_to_str(llrust_ret_ty));
debug2!("llforeign_ret_ty={}", ccx.tn.type_to_str(llforeign_ret_ty));
if llrust_ret_ty == llforeign_ret_ty {
Store(bcx, llforeign_retval, llretptr);
@ -313,7 +313,7 @@ pub fn trans_native_call(bcx: @mut Block,
let llforeign_align = machine::llalign_of_min(ccx, llforeign_ret_ty);
let llrust_align = machine::llalign_of_min(ccx, llrust_ret_ty);
let llalign = uint::min(llforeign_align, llrust_align);
debug!("llrust_size=%?", llrust_size);
debug2!("llrust_size={:?}", llrust_size);
base::call_memcpy(bcx, llretptr_i8, llscratch_i8,
C_uint(ccx, llrust_size), llalign as u32);
}
@ -372,7 +372,7 @@ pub fn register_rust_fn_with_foreign_abi(ccx: @mut CrateContext,
lib::llvm::CCallConv,
llfn_ty);
add_argument_attributes(&tys, llfn);
debug!("register_rust_fn_with_foreign_abi(node_id=%?, llfn_ty=%s, llfn=%s)",
debug2!("register_rust_fn_with_foreign_abi(node_id={:?}, llfn_ty={}, llfn={})",
node_id, ccx.tn.type_to_str(llfn_ty), ccx.tn.val_to_str(llfn));
llfn
}
@ -416,14 +416,14 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @mut CrateContext,
f
}
_ => {
ccx.sess.bug(fmt!("build_rust_fn: extern fn %s has ty %s, \
ccx.sess.bug(format!("build_rust_fn: extern fn {} has ty {}, \
expected a bare fn ty",
path.repr(tcx),
t.repr(tcx)));
}
};
debug!("build_rust_fn: path=%s id=%? t=%s",
debug2!("build_rust_fn: path={} id={:?} t={}",
path.repr(tcx),
id,
t.repr(tcx));
@ -449,7 +449,7 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @mut CrateContext,
"foreign::trans_rust_fn_with_foreign_abi::build_wrap_fn");
let tcx = ccx.tcx;
debug!("build_wrap_fn(llrustfn=%s, llwrapfn=%s)",
debug2!("build_wrap_fn(llrustfn={}, llwrapfn={})",
ccx.tn.val_to_str(llrustfn),
ccx.tn.val_to_str(llwrapfn));
@ -504,14 +504,14 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @mut CrateContext,
// alloca some scratch space on the stack.
match foreign_outptr {
Some(llforeign_outptr) => {
debug!("out pointer, foreign=%s",
debug2!("out pointer, foreign={}",
ccx.tn.val_to_str(llforeign_outptr));
let llrust_retptr =
llvm::LLVMBuildBitCast(builder,
llforeign_outptr,
llrust_ret_ty.ptr_to().to_ref(),
noname());
debug!("out pointer, foreign=%s (casted)",
debug2!("out pointer, foreign={} (casted)",
ccx.tn.val_to_str(llrust_retptr));
llrust_args.push(llrust_retptr);
return_alloca = None;
@ -524,10 +524,10 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @mut CrateContext,
llrust_ret_ty.to_ref(),
s))
};
debug!("out pointer, \
allocad=%s, \
llrust_ret_ty=%s, \
return_ty=%s",
debug2!("out pointer, \
allocad={}, \
llrust_ret_ty={}, \
return_ty={}",
ccx.tn.val_to_str(slot),
ccx.tn.type_to_str(llrust_ret_ty),
tys.fn_sig.output.repr(tcx));
@ -544,7 +544,7 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @mut CrateContext,
// Push an (null) env pointer
let env_pointer = base::null_env_ptr(ccx);
debug!("env pointer=%s", ccx.tn.val_to_str(env_pointer));
debug2!("env pointer={}", ccx.tn.val_to_str(env_pointer));
llrust_args.push(env_pointer);
// Build up the arguments to the call to the rust function.
@ -558,9 +558,9 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @mut CrateContext,
let foreign_indirect = tys.fn_ty.attrs[foreign_index].is_some();
let mut llforeign_arg = llvm::LLVMGetParam(llwrapfn, foreign_index);
debug!("llforeign_arg #%u: %s",
debug2!("llforeign_arg \\#{}: {}",
i, ccx.tn.val_to_str(llforeign_arg));
debug!("rust_indirect = %b, foreign_indirect = %b",
debug2!("rust_indirect = {}, foreign_indirect = {}",
rust_indirect, foreign_indirect);
// Ensure that the foreign argument is indirect (by
@ -591,14 +591,14 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @mut CrateContext,
llvm::LLVMBuildLoad(builder, llforeign_arg, noname())
};
debug!("llrust_arg #%u: %s",
debug2!("llrust_arg \\#{}: {}",
i, ccx.tn.val_to_str(llrust_arg));
llrust_args.push(llrust_arg);
}
// Perform the call itself
let llrust_ret_val = do llrust_args.as_imm_buf |ptr, len| {
debug!("calling llrustfn = %s", ccx.tn.val_to_str(llrustfn));
debug2!("calling llrustfn = {}", ccx.tn.val_to_str(llrustfn));
llvm::LLVMBuildCall(builder, llrustfn, ptr,
len as c_uint, noname())
};
@ -723,11 +723,11 @@ fn foreign_types_for_fn_ty(ccx: &mut CrateContext,
llsig.llarg_tys,
llsig.llret_ty,
ret_def);
debug!("foreign_types_for_fn_ty(\
ty=%s, \
llsig=%s -> %s, \
fn_ty=%s -> %s, \
ret_def=%b",
debug2!("foreign_types_for_fn_ty(\
ty={}, \
llsig={} -> {}, \
fn_ty={} -> {}, \
ret_def={}",
ty.repr(ccx.tcx),
ccx.tn.types_to_str(llsig.llarg_tys),
ccx.tn.type_to_str(llsig.llret_ty),

View File

@ -213,12 +213,12 @@ pub fn lazily_emit_tydesc_glue(ccx: @mut CrateContext,
match ti.take_glue {
Some(_) => (),
None => {
debug!("+++ lazily_emit_tydesc_glue TAKE %s",
debug2!("+++ lazily_emit_tydesc_glue TAKE {}",
ppaux::ty_to_str(ccx.tcx, ti.ty));
let glue_fn = declare_generic_glue(ccx, ti.ty, llfnty, "take");
ti.take_glue = Some(glue_fn);
make_generic_glue(ccx, ti.ty, glue_fn, make_take_glue, "take");
debug!("--- lazily_emit_tydesc_glue TAKE %s",
debug2!("--- lazily_emit_tydesc_glue TAKE {}",
ppaux::ty_to_str(ccx.tcx, ti.ty));
}
}
@ -226,12 +226,12 @@ pub fn lazily_emit_tydesc_glue(ccx: @mut CrateContext,
match ti.drop_glue {
Some(_) => (),
None => {
debug!("+++ lazily_emit_tydesc_glue DROP %s",
debug2!("+++ lazily_emit_tydesc_glue DROP {}",
ppaux::ty_to_str(ccx.tcx, ti.ty));
let glue_fn = declare_generic_glue(ccx, ti.ty, llfnty, "drop");
ti.drop_glue = Some(glue_fn);
make_generic_glue(ccx, ti.ty, glue_fn, make_drop_glue, "drop");
debug!("--- lazily_emit_tydesc_glue DROP %s",
debug2!("--- lazily_emit_tydesc_glue DROP {}",
ppaux::ty_to_str(ccx.tcx, ti.ty));
}
}
@ -239,12 +239,12 @@ pub fn lazily_emit_tydesc_glue(ccx: @mut CrateContext,
match ti.free_glue {
Some(_) => (),
None => {
debug!("+++ lazily_emit_tydesc_glue FREE %s",
debug2!("+++ lazily_emit_tydesc_glue FREE {}",
ppaux::ty_to_str(ccx.tcx, ti.ty));
let glue_fn = declare_generic_glue(ccx, ti.ty, llfnty, "free");
ti.free_glue = Some(glue_fn);
make_generic_glue(ccx, ti.ty, glue_fn, make_free_glue, "free");
debug!("--- lazily_emit_tydesc_glue FREE %s",
debug2!("--- lazily_emit_tydesc_glue FREE {}",
ppaux::ty_to_str(ccx.tcx, ti.ty));
}
}
@ -252,12 +252,12 @@ pub fn lazily_emit_tydesc_glue(ccx: @mut CrateContext,
match ti.visit_glue {
Some(_) => (),
None => {
debug!("+++ lazily_emit_tydesc_glue VISIT %s",
debug2!("+++ lazily_emit_tydesc_glue VISIT {}",
ppaux::ty_to_str(ccx.tcx, ti.ty));
let glue_fn = declare_generic_glue(ccx, ti.ty, llfnty, "visit");
ti.visit_glue = Some(glue_fn);
make_generic_glue(ccx, ti.ty, glue_fn, make_visit_glue, "visit");
debug!("--- lazily_emit_tydesc_glue VISIT %s",
debug2!("--- lazily_emit_tydesc_glue VISIT {}",
ppaux::ty_to_str(ccx.tcx, ti.ty));
}
}
@ -658,7 +658,7 @@ pub fn declare_tydesc(ccx: &mut CrateContext, t: ty::t) -> @mut tydesc_info {
let llalign = llalign_of(ccx, llty);
let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc").to_managed();
note_unique_llvm_symbol(ccx, name);
debug!("+++ declare_tydesc %s %s", ppaux::ty_to_str(ccx.tcx, t), name);
debug2!("+++ declare_tydesc {} {}", ppaux::ty_to_str(ccx.tcx, t), name);
let gvar = do name.with_c_str |buf| {
unsafe {
llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type.to_ref(), buf)
@ -679,7 +679,7 @@ pub fn declare_tydesc(ccx: &mut CrateContext, t: ty::t) -> @mut tydesc_info {
free_glue: None,
visit_glue: None
};
debug!("--- declare_tydesc %s", ppaux::ty_to_str(ccx.tcx, t));
debug2!("--- declare_tydesc {}", ppaux::ty_to_str(ccx.tcx, t));
return inf;
}
@ -689,7 +689,7 @@ pub fn declare_generic_glue(ccx: &mut CrateContext, t: ty::t, llfnty: Type,
name: &str) -> ValueRef {
let _icx = push_ctxt("declare_generic_glue");
let fn_nm = mangle_internal_name_by_type_and_seq(ccx, t, (~"glue_" + name)).to_managed();
debug!("%s is for type %s", fn_nm, ppaux::ty_to_str(ccx.tcx, t));
debug2!("{} is for type {}", fn_nm, ppaux::ty_to_str(ccx.tcx, t));
note_unique_llvm_symbol(ccx, fn_nm);
let llfn = decl_cdecl_fn(ccx.llmod, fn_nm, llfnty);
set_glue_inlining(llfn, t);
@ -730,7 +730,7 @@ pub fn make_generic_glue(ccx: @mut CrateContext,
name: &str)
-> ValueRef {
let _icx = push_ctxt("make_generic_glue");
let glue_name = fmt!("glue %s %s", name, ty_to_short_str(ccx.tcx, t));
let glue_name = format!("glue {} {}", name, ty_to_short_str(ccx.tcx, t));
let _s = StatRecorder::new(ccx, glue_name);
make_generic_glue_inner(ccx, t, llfn, helper)
}
@ -789,8 +789,7 @@ pub fn emit_tydescs(ccx: &mut CrateContext) {
}
};
debug!("ti.borrow_offset: %s",
ccx.tn.val_to_str(ti.borrow_offset));
debug2!("ti.borrow_offset: {}", ccx.tn.val_to_str(ti.borrow_offset));
let tydesc = C_named_struct(ccx.tydesc_type,
[ti.size, // size

View File

@ -29,7 +29,7 @@ pub fn maybe_instantiate_inline(ccx: @mut CrateContext, fn_id: ast::DefId)
match ccx.external.find(&fn_id) {
Some(&Some(node_id)) => {
// Already inline
debug!("maybe_instantiate_inline(%s): already inline as node id %d",
debug2!("maybe_instantiate_inline({}): already inline as node id {}",
ty::item_path_str(ccx.tcx, fn_id), node_id);
return local_def(node_id);
}
@ -132,7 +132,7 @@ pub fn maybe_instantiate_inline(ccx: @mut CrateContext, fn_id: ast::DefId)
_ => {
let self_ty = ty::node_id_to_type(ccx.tcx,
mth.self_id);
debug!("calling inline trans_fn with self_ty %s",
debug2!("calling inline trans_fn with self_ty {}",
ty_to_str(ccx.tcx, self_ty));
match mth.explicit_self.node {
ast::sty_value => impl_self(self_ty, ty::ByRef),

View File

@ -39,7 +39,7 @@ pub fn trans_intrinsic(ccx: @mut CrateContext,
substs: @param_substs,
attributes: &[ast::Attribute],
ref_id: Option<ast::NodeId>) {
debug!("trans_intrinsic(item.ident=%s)", ccx.sess.str_of(item.ident));
debug2!("trans_intrinsic(item.ident={})", ccx.sess.str_of(item.ident));
fn simple_llvm_intrinsic(bcx: @mut Block, name: &'static str, num_args: uint) {
assert!(num_args <= 4);
@ -299,13 +299,13 @@ pub fn trans_intrinsic(ccx: @mut CrateContext,
if in_type_size != out_type_size {
let sp = match ccx.tcx.items.get_copy(&ref_id.unwrap()) {
ast_map::node_expr(e) => e.span,
_ => fail!("transmute has non-expr arg"),
_ => fail2!("transmute has non-expr arg"),
};
let pluralize = |n| if 1u == n { "" } else { "s" };
ccx.sess.span_fatal(sp,
fmt!("transmute called on types with \
different sizes: %s (%u bit%s) to \
%s (%u bit%s)",
format!("transmute called on types with \
different sizes: {} ({} bit{}) to \
{} ({} bit{})",
ty_to_str(ccx.tcx, in_type),
in_type_size,
pluralize(in_type_size),

View File

@ -19,7 +19,7 @@ pub trait LlvmRepr {
impl<'self, T:LlvmRepr> LlvmRepr for &'self [T] {
fn llrepr(&self, ccx: &CrateContext) -> ~str {
let reprs = self.map(|t| t.llrepr(ccx));
fmt!("[%s]", reprs.connect(","))
format!("[{}]", reprs.connect(","))
}
}

View File

@ -55,7 +55,7 @@ pub fn trans_impl(ccx: @mut CrateContext,
let _icx = push_ctxt("impl::trans_impl");
let tcx = ccx.tcx;
debug!("trans_impl(path=%s, name=%s, id=%?)",
debug2!("trans_impl(path={}, name={}, id={:?})",
path.repr(tcx), name.repr(tcx), id);
// Both here and below with generic methods, be sure to recurse and look for
@ -117,7 +117,7 @@ pub fn trans_method(ccx: @mut CrateContext,
ty::subst_tps(ccx.tcx, *tys, *self_sub, self_ty)
}
};
debug!("calling trans_fn with self_ty %s",
debug2!("calling trans_fn with self_ty {}",
self_ty.repr(ccx.tcx));
match method.explicit_self.node {
ast::sty_value => impl_self(self_ty, ty::ByRef),
@ -161,7 +161,7 @@ pub fn trans_method_callee(bcx: @mut Block,
-> Callee {
let _icx = push_ctxt("impl::trans_method_callee");
debug!("trans_method_callee(callee_id=%?, this=%s, mentry=%s)",
debug2!("trans_method_callee(callee_id={:?}, this={}, mentry={})",
callee_id,
bcx.expr_to_str(this),
mentry.repr(bcx.tcx()));
@ -199,7 +199,7 @@ pub fn trans_method_callee(bcx: @mut Block,
trait_id, off, vtbl)
}
// how to get rid of this?
None => fail!("trans_method_callee: missing param_substs")
None => fail2!("trans_method_callee: missing param_substs")
}
}
@ -220,8 +220,8 @@ pub fn trans_static_method_callee(bcx: @mut Block,
let _icx = push_ctxt("impl::trans_static_method_callee");
let ccx = bcx.ccx();
debug!("trans_static_method_callee(method_id=%?, trait_id=%s, \
callee_id=%?)",
debug2!("trans_static_method_callee(method_id={:?}, trait_id={}, \
callee_id={:?})",
method_id,
ty::item_path_str(bcx.tcx(), trait_id),
callee_id);
@ -250,17 +250,17 @@ pub fn trans_static_method_callee(bcx: @mut Block,
ast_map::node_trait_method(trait_method, _, _) => {
ast_util::trait_method_to_ty_method(trait_method).ident
}
_ => fail!("callee is not a trait method")
_ => fail2!("callee is not a trait method")
}
} else {
let path = csearch::get_item_path(bcx.tcx(), method_id);
match path[path.len()-1] {
path_pretty_name(s, _) | path_name(s) => { s }
path_mod(_) => { fail!("path doesn't have a name?") }
path_mod(_) => { fail2!("path doesn't have a name?") }
}
};
debug!("trans_static_method_callee: method_id=%?, callee_id=%?, \
name=%s", method_id, callee_id, ccx.sess.str_of(mname));
debug2!("trans_static_method_callee: method_id={:?}, callee_id={:?}, \
name={}", method_id, callee_id, ccx.sess.str_of(mname));
let vtbls = resolve_vtables_in_fn_ctxt(
bcx.fcx, ccx.maps.vtable_map.get_copy(&callee_id));
@ -287,7 +287,7 @@ pub fn trans_static_method_callee(bcx: @mut Block,
FnData {llfn: PointerCast(bcx, lval, llty)}
}
_ => {
fail!("vtable_param left in monomorphized \
fail2!("vtable_param left in monomorphized \
function's vtable substs");
}
}
@ -362,7 +362,7 @@ pub fn trans_monomorphized_callee(bcx: @mut Block,
}
}
typeck::vtable_param(*) => {
fail!("vtable_param left in monomorphized function's vtable substs");
fail2!("vtable_param left in monomorphized function's vtable substs");
}
};
@ -395,13 +395,13 @@ pub fn combine_impl_and_methods_tps(bcx: @mut Block,
let method = ty::method(ccx.tcx, mth_did);
let n_m_tps = method.generics.type_param_defs.len();
let node_substs = node_id_type_params(bcx, callee_id);
debug!("rcvr_substs=%?", rcvr_substs.repr(ccx.tcx));
debug2!("rcvr_substs={:?}", rcvr_substs.repr(ccx.tcx));
let ty_substs
= vec::append(rcvr_substs.to_owned(),
node_substs.tailn(node_substs.len() - n_m_tps));
debug!("n_m_tps=%?", n_m_tps);
debug!("node_substs=%?", node_substs.repr(ccx.tcx));
debug!("ty_substs=%?", ty_substs.repr(ccx.tcx));
debug2!("n_m_tps={:?}", n_m_tps);
debug2!("node_substs={:?}", node_substs.repr(ccx.tcx));
debug2!("ty_substs={:?}", ty_substs.repr(ccx.tcx));
// Now, do the same work for the vtables. The vtables might not
@ -474,13 +474,13 @@ pub fn trans_trait_callee_from_llval(bcx: @mut Block,
let ccx = bcx.ccx();
// Load the data pointer from the object.
debug!("(translating trait callee) loading second index from pair");
debug2!("(translating trait callee) loading second index from pair");
let llboxptr = GEPi(bcx, llpair, [0u, abi::trt_field_box]);
let llbox = Load(bcx, llboxptr);
let llself = PointerCast(bcx, llbox, Type::opaque_box(ccx).ptr_to());
// Load the function from the vtable and cast it to the expected type.
debug!("(translating trait callee) loading method");
debug2!("(translating trait callee) loading method");
let llcallee_ty = type_of_fn_from_ty(ccx, callee_ty);
let llvtable = Load(bcx,
PointerCast(bcx,
@ -524,7 +524,7 @@ pub fn vtable_id(ccx: @mut CrateContext,
}
// can't this be checked at the callee?
_ => fail!("vtable_id")
_ => fail2!("vtable_id")
}
}
@ -578,7 +578,7 @@ pub fn make_vtable(ccx: &mut CrateContext,
let tbl = C_struct(components);
let sym = token::gensym("vtable");
let vt_gvar = do fmt!("vtable%u", sym).with_c_str |buf| {
let vt_gvar = do format!("vtable{}", sym).with_c_str |buf| {
llvm::LLVMAddGlobal(ccx.llmod, val_ty(tbl).to_ref(), buf)
};
llvm::LLVMSetInitializer(vt_gvar, tbl);
@ -611,7 +611,7 @@ fn emit_vtable_methods(bcx: @mut Block,
// the method type from the impl to substitute into.
let m_id = method_with_name(ccx, impl_id, ident.name);
let m = ty::method(tcx, m_id);
debug!("(making impl vtable) emitting method %s at subst %s",
debug2!("(making impl vtable) emitting method {} at subst {}",
m.repr(tcx),
substs.repr(tcx));
let fty = ty::subst_tps(tcx,
@ -619,7 +619,7 @@ fn emit_vtable_methods(bcx: @mut Block,
None,
ty::mk_bare_fn(tcx, m.fty.clone()));
if m.generics.has_type_params() || ty::type_has_self(fty) {
debug!("(making impl vtable) method has self or type params: %s",
debug2!("(making impl vtable) method has self or type params: {}",
tcx.sess.str_of(ident));
C_null(Type::nil().ptr_to())
} else {

View File

@ -36,12 +36,12 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
ref_id: Option<ast::NodeId>)
-> (ValueRef, bool)
{
debug!("monomorphic_fn(\
fn_id=%s, \
real_substs=%s, \
vtables=%s, \
self_vtable=%s, \
ref_id=%?)",
debug2!("monomorphic_fn(\
fn_id={}, \
real_substs={}, \
vtables={}, \
self_vtable={}, \
ref_id={:?})",
fn_id.repr(ccx.tcx),
real_substs.repr(ccx.tcx),
vtables.repr(ccx.tcx),
@ -68,17 +68,17 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
must_cast = true;
}
debug!("monomorphic_fn(\
fn_id=%s, \
psubsts=%s, \
hash_id=%?)",
debug2!("monomorphic_fn(\
fn_id={}, \
psubsts={}, \
hash_id={:?})",
fn_id.repr(ccx.tcx),
psubsts.repr(ccx.tcx),
hash_id);
match ccx.monomorphized.find(&hash_id) {
Some(&val) => {
debug!("leaving monomorphic fn %s",
debug2!("leaving monomorphic fn {}",
ty::item_path_str(ccx.tcx, fn_id));
return (val, must_cast);
}
@ -95,7 +95,7 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
let map_node = session::expect(
ccx.sess,
ccx.tcx.items.find_copy(&fn_id.node),
|| fmt!("While monomorphizing %?, couldn't find it in the item map \
|| format!("While monomorphizing {:?}, couldn't find it in the item map \
(may have attempted to monomorphize an item \
defined in a different crate?)", fn_id));
// Get the path so that we can create a symbol
@ -140,7 +140,7 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
ast_map::node_struct_ctor(_, i, pt) => (pt, i.ident, i.span)
};
debug!("monomorphic_fn about to subst into %s", llitem_ty.repr(ccx.tcx));
debug2!("monomorphic_fn about to subst into {}", llitem_ty.repr(ccx.tcx));
let mono_ty = match is_static_provided {
None => ty::subst_tps(ccx.tcx, psubsts.tys,
psubsts.self_ty, llitem_ty),
@ -164,7 +164,7 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
(psubsts.tys.slice(0, idx) +
&[psubsts.self_ty.unwrap()] +
psubsts.tys.tailn(idx));
debug!("static default: changed substitution to %s",
debug2!("static default: changed substitution to {}",
substs.repr(ccx.tcx));
ty::subst_tps(ccx.tcx, substs, None, llitem_ty)
@ -176,7 +176,7 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
assert!(f.abis.is_rust() || f.abis.is_intrinsic());
f
}
_ => fail!("expected bare rust fn or an intrinsic")
_ => fail2!("expected bare rust fn or an intrinsic")
};
ccx.stats.n_monos += 1;
@ -197,7 +197,7 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
let mut pt = (*pt).clone();
pt.push(elt);
let s = mangle_exported_name(ccx, pt.clone(), mono_ty);
debug!("monomorphize_fn mangled to %s", s);
debug2!("monomorphize_fn mangled to {}", s);
let mk_lldecl = || {
let lldecl = decl_internal_rust_fn(ccx, f.sig.inputs, f.sig.output, s);
@ -285,12 +285,12 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
ast_map::node_block(*) |
ast_map::node_callee_scope(*) |
ast_map::node_local(*) => {
ccx.tcx.sess.bug(fmt!("Can't monomorphize a %?", map_node))
ccx.tcx.sess.bug(format!("Can't monomorphize a {:?}", map_node))
}
};
ccx.monomorphizing.insert(fn_id, depth);
debug!("leaving monomorphic fn %s", ty::item_path_str(ccx.tcx, fn_id));
debug2!("leaving monomorphic fn {}", ty::item_path_str(ccx.tcx, fn_id));
(lldecl, must_cast)
}
@ -302,7 +302,7 @@ pub fn make_mono_id(ccx: @mut CrateContext,
let substs_iter = substs.self_ty.iter().chain(substs.tys.iter());
let precise_param_ids: ~[(ty::t, Option<@~[mono_id]>)] = match substs.vtables {
Some(vts) => {
debug!("make_mono_id vtables=%s substs=%s",
debug2!("make_mono_id vtables={} substs={}",
vts.repr(ccx.tcx), substs.tys.repr(ccx.tcx));
let vts_iter = substs.self_vtables.iter().chain(vts.iter());
vts_iter.zip(substs_iter).map(|(vtable, subst)| {

View File

@ -93,15 +93,15 @@ impl Reflector {
let tcx = self.bcx.tcx();
let mth_idx = ty::method_idx(
tcx.sess.ident_of(~"visit_" + ty_name),
*self.visitor_methods).expect(fmt!("Couldn't find visit method \
for %s", ty_name));
*self.visitor_methods).expect(format!("Couldn't find visit method \
for {}", ty_name));
let mth_ty =
ty::mk_bare_fn(tcx, self.visitor_methods[mth_idx].fty.clone());
let v = self.visitor_val;
debug!("passing %u args:", args.len());
debug2!("passing {} args:", args.len());
let mut bcx = self.bcx;
for (i, a) in args.iter().enumerate() {
debug!("arg %u: %s", i, bcx.val_to_str(*a));
debug2!("arg {}: {}", i, bcx.val_to_str(*a));
}
let bool_ty = ty::mk_bool();
let result = unpack_result!(bcx, callee::trans_call_inner(
@ -151,7 +151,7 @@ impl Reflector {
pub fn visit_ty(&mut self, t: ty::t) {
let bcx = self.bcx;
let tcx = bcx.ccx().tcx;
debug!("reflect::visit_ty %s", ty_to_str(bcx.ccx().tcx, t));
debug2!("reflect::visit_ty {}", ty_to_str(bcx.ccx().tcx, t));
match ty::get(t).sty {
ty::ty_bot => self.leaf("bot"),

View File

@ -149,7 +149,7 @@ pub struct VecTypes {
impl VecTypes {
pub fn to_str(&self, ccx: &CrateContext) -> ~str {
fmt!("VecTypes {vec_ty=%s, unit_ty=%s, llunit_ty=%s, llunit_size=%s}",
format!("VecTypes \\{vec_ty={}, unit_ty={}, llunit_ty={}, llunit_size={}\\}",
ty_to_str(ccx.tcx, self.vec_ty),
ty_to_str(ccx.tcx, self.unit_ty),
ccx.tn.type_to_str(self.llunit_ty),
@ -169,7 +169,7 @@ pub fn trans_fixed_vstore(bcx: @mut Block,
// to store the array of the suitable size, so all we have to do is
// generate the content.
debug!("trans_fixed_vstore(vstore_expr=%s, dest=%?)",
debug2!("trans_fixed_vstore(vstore_expr={}, dest={:?})",
bcx.expr_to_str(vstore_expr), dest.to_str(bcx.ccx()));
let _indenter = indenter();
@ -199,7 +199,7 @@ pub fn trans_slice_vstore(bcx: @mut Block,
let ccx = bcx.ccx();
debug!("trans_slice_vstore(vstore_expr=%s, dest=%s)",
debug2!("trans_slice_vstore(vstore_expr={}, dest={})",
bcx.expr_to_str(vstore_expr), dest.to_str(ccx));
let _indenter = indenter();
@ -214,7 +214,7 @@ pub fn trans_slice_vstore(bcx: @mut Block,
// Handle the &[...] case:
let vt = vec_types_from_expr(bcx, vstore_expr);
let count = elements_required(bcx, content_expr);
debug!("vt=%s, count=%?", vt.to_str(ccx), count);
debug2!("vt={}, count={:?}", vt.to_str(ccx), count);
// Make a fixed-length backing array and allocate it on the stack.
let llcount = C_uint(ccx, count);
@ -256,7 +256,7 @@ pub fn trans_lit_str(bcx: @mut Block,
// different from trans_slice_vstore() above because it does need to copy
// the content anywhere.
debug!("trans_lit_str(lit_expr=%s, dest=%s)",
debug2!("trans_lit_str(lit_expr={}, dest={})",
bcx.expr_to_str(lit_expr),
dest.to_str(bcx.ccx()));
let _indenter = indenter();
@ -287,7 +287,7 @@ pub fn trans_uniq_or_managed_vstore(bcx: @mut Block, heap: heap, vstore_expr: &a
// @[...] or ~[...] (also @"..." or ~"...") allocate boxes in the
// appropriate heap and write the array elements into them.
debug!("trans_uniq_or_managed_vstore(vstore_expr=%s, heap=%?)",
debug2!("trans_uniq_or_managed_vstore(vstore_expr={}, heap={:?})",
bcx.expr_to_str(vstore_expr), heap);
let _indenter = indenter();
@ -318,7 +318,7 @@ pub fn trans_uniq_or_managed_vstore(bcx: @mut Block, heap: heap, vstore_expr: &a
_ => {}
}
}
heap_exchange_closure => fail!("vectors use exchange_alloc"),
heap_exchange_closure => fail2!("vectors use exchange_alloc"),
heap_managed | heap_managed_unique => {}
}
@ -330,7 +330,7 @@ pub fn trans_uniq_or_managed_vstore(bcx: @mut Block, heap: heap, vstore_expr: &a
add_clean_free(bcx, val, heap);
let dataptr = get_dataptr(bcx, get_bodyptr(bcx, val, vt.vec_ty));
debug!("alloc_vec() returned val=%s, dataptr=%s",
debug2!("alloc_vec() returned val={}, dataptr={}",
bcx.val_to_str(val), bcx.val_to_str(dataptr));
let bcx = write_content(bcx, &vt, vstore_expr,
@ -350,7 +350,7 @@ pub fn write_content(bcx: @mut Block,
let _icx = push_ctxt("tvec::write_content");
let mut bcx = bcx;
debug!("write_content(vt=%s, dest=%s, vstore_expr=%?)",
debug2!("write_content(vt={}, dest={}, vstore_expr={:?})",
vt.to_str(bcx.ccx()),
dest.to_str(bcx.ccx()),
bcx.expr_to_str(vstore_expr));
@ -383,7 +383,7 @@ pub fn write_content(bcx: @mut Block,
let mut temp_cleanups = ~[];
for (i, element) in elements.iter().enumerate() {
let lleltptr = GEPi(bcx, lldest, [i]);
debug!("writing index %? with lleltptr=%?",
debug2!("writing index {:?} with lleltptr={:?}",
i, bcx.val_to_str(lleltptr));
bcx = expr::trans_into(bcx, *element,
SaveIn(lleltptr));

View File

@ -364,7 +364,7 @@ impl Type {
Double => 64,
X86_FP80 => 80,
FP128 | PPC_FP128 => 128,
_ => fail!("llvm_float_width called on a non-float type")
_ => fail2!("llvm_float_width called on a non-float type")
}
}
}

View File

@ -162,7 +162,7 @@ pub fn sizing_type_of(cx: &mut CrateContext, t: ty::t) -> Type {
}
ty::ty_self(_) | ty::ty_infer(*) | ty::ty_param(*) | ty::ty_err(*) => {
cx.tcx.sess.bug(fmt!("fictitious type %? in sizing_type_of()", ty::get(t).sty))
cx.tcx.sess.bug(format!("fictitious type {:?} in sizing_type_of()", ty::get(t).sty))
}
};
@ -172,7 +172,7 @@ pub fn sizing_type_of(cx: &mut CrateContext, t: ty::t) -> Type {
// NB: If you update this, be sure to update `sizing_type_of()` as well.
pub fn type_of(cx: &mut CrateContext, t: ty::t) -> Type {
debug!("type_of %?: %?", t, ty::get(t));
debug2!("type_of {:?}: {:?}", t, ty::get(t));
// Check the cache.
match cx.lltypes.find(&t) {
@ -335,9 +335,9 @@ pub fn llvm_type_name(cx: &CrateContext,
let tstr = ppaux::parameterized(cx.tcx, ty::item_path_str(cx.tcx, did),
&ty::NonerasedRegions(opt_vec::Empty), tps);
if did.crate == 0 {
fmt!("%s.%s", name, tstr)
format!("{}.{}", name, tstr)
} else {
fmt!("%s.%s[#%d]", name, tstr, did.crate)
format!("{}.{}[\\#{}]", name, tstr, did.crate)
}
}

View File

@ -39,7 +39,7 @@ pub fn root_and_write_guard(datum: &Datum,
expr_id: ast::NodeId,
derefs: uint) -> @mut Block {
let key = root_map_key { id: expr_id, derefs: derefs };
debug!("write_guard::root_and_write_guard(key=%?)", key);
debug2!("write_guard::root_and_write_guard(key={:?})", key);
// root the autoderef'd value, if necessary:
//
@ -66,7 +66,7 @@ pub fn return_to_mut(mut bcx: @mut Block,
bits_val_ref: ValueRef,
filename_val: ValueRef,
line_val: ValueRef) -> @mut Block {
debug!("write_guard::return_to_mut(root_key=%?, %s, %s, %s)",
debug2!("write_guard::return_to_mut(root_key={:?}, {}, {}, {})",
root_key,
bcx.to_str(),
bcx.val_to_str(frozen_val_ref),
@ -111,13 +111,13 @@ fn root(datum: &Datum,
//! case, we will call this function, which will stash a copy
//! away until we exit the scope `scope_id`.
debug!("write_guard::root(root_key=%?, root_info=%?, datum=%?)",
debug2!("write_guard::root(root_key={:?}, root_info={:?}, datum={:?})",
root_key, root_info, datum.to_str(bcx.ccx()));
if bcx.sess().trace() {
trans_trace(
bcx, None,
(fmt!("preserving until end of scope %d",
(format!("preserving until end of scope {}",
root_info.scope)).to_managed());
}
@ -184,7 +184,7 @@ fn root(datum: &Datum,
fn perform_write_guard(datum: &Datum,
bcx: @mut Block,
span: Span) -> @mut Block {
debug!("perform_write_guard");
debug2!("perform_write_guard");
let llval = datum.to_value_llval(bcx);
let (filename, line) = filename_and_line_num_from_span(bcx, span);

View File

@ -794,7 +794,7 @@ impl Vid for TyVid {
}
impl ToStr for TyVid {
fn to_str(&self) -> ~str { fmt!("<V%u>", self.to_uint()) }
fn to_str(&self) -> ~str { format!("<V{}>", self.to_uint()) }
}
impl Vid for IntVid {
@ -802,7 +802,7 @@ impl Vid for IntVid {
}
impl ToStr for IntVid {
fn to_str(&self) -> ~str { fmt!("<VI%u>", self.to_uint()) }
fn to_str(&self) -> ~str { format!("<VI{}>", self.to_uint()) }
}
impl Vid for FloatVid {
@ -810,7 +810,7 @@ impl Vid for FloatVid {
}
impl ToStr for FloatVid {
fn to_str(&self) -> ~str { fmt!("<VF%u>", self.to_uint()) }
fn to_str(&self) -> ~str { format!("<VF{}>", self.to_uint()) }
}
impl Vid for RegionVid {
@ -818,7 +818,7 @@ impl Vid for RegionVid {
}
impl ToStr for RegionVid {
fn to_str(&self) -> ~str { fmt!("%?", self.id) }
fn to_str(&self) -> ~str { format!("{:?}", self.id) }
}
impl ToStr for FnSig {
@ -1515,7 +1515,7 @@ pub fn fold_regions(
fldr: &fn(r: Region, in_fn: bool) -> Region) -> t {
fn do_fold(cx: ctxt, ty: t, in_fn: bool,
fldr: &fn(Region, bool) -> Region) -> t {
debug!("do_fold(ty=%s, in_fn=%b)", ty_to_str(cx, ty), in_fn);
debug2!("do_fold(ty={}, in_fn={})", ty_to_str(cx, ty), in_fn);
if !type_has_regions(ty) { return ty; }
fold_regions_and_ty(
cx, ty,
@ -1656,7 +1656,7 @@ pub fn simd_type(cx: ctxt, ty: t) -> t {
let fields = lookup_struct_fields(cx, did);
lookup_field_type(cx, did, fields[0].id, substs)
}
_ => fail!("simd_type called on invalid type")
_ => fail2!("simd_type called on invalid type")
}
}
@ -1666,14 +1666,14 @@ pub fn simd_size(cx: ctxt, ty: t) -> uint {
let fields = lookup_struct_fields(cx, did);
fields.len()
}
_ => fail!("simd_size called on invalid type")
_ => fail2!("simd_size called on invalid type")
}
}
pub fn get_element_type(ty: t, i: uint) -> t {
match get(ty).sty {
ty_tup(ref ts) => return ts[i],
_ => fail!("get_element_type called on invalid type")
_ => fail2!("get_element_type called on invalid type")
}
}
@ -1950,7 +1950,7 @@ impl ops::Sub<TypeContents,TypeContents> for TypeContents {
impl ToStr for TypeContents {
fn to_str(&self) -> ~str {
fmt!("TypeContents(%s)", self.bits.to_str_radix(2))
format!("TypeContents({})", self.bits.to_str_radix(2))
}
}
@ -2324,7 +2324,7 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
let mut tc = TC_ALL;
do each_inherited_builtin_bound(cx, bounds, traits) |bound| {
debug!("tc = %s, bound = %?", tc.to_str(), bound);
debug2!("tc = {}, bound = {:?}", tc.to_str(), bound);
tc = tc - match bound {
BoundStatic => TypeContents::nonstatic(cx),
BoundSend => TypeContents::nonsendable(cx),
@ -2334,7 +2334,7 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
};
}
debug!("result = %s", tc.to_str());
debug2!("result = {}", tc.to_str());
return tc;
// Iterates over all builtin bounds on the type parameter def, including
@ -2364,7 +2364,7 @@ pub fn type_moves_by_default(cx: ctxt, ty: t) -> bool {
pub fn is_instantiable(cx: ctxt, r_ty: t) -> bool {
fn type_requires(cx: ctxt, seen: &mut ~[DefId],
r_ty: t, ty: t) -> bool {
debug!("type_requires(%s, %s)?",
debug2!("type_requires({}, {})?",
::util::ppaux::ty_to_str(cx, r_ty),
::util::ppaux::ty_to_str(cx, ty));
@ -2373,7 +2373,7 @@ pub fn is_instantiable(cx: ctxt, r_ty: t) -> bool {
subtypes_require(cx, seen, r_ty, ty)
};
debug!("type_requires(%s, %s)? %b",
debug2!("type_requires({}, {})? {}",
::util::ppaux::ty_to_str(cx, r_ty),
::util::ppaux::ty_to_str(cx, ty),
r);
@ -2382,7 +2382,7 @@ pub fn is_instantiable(cx: ctxt, r_ty: t) -> bool {
fn subtypes_require(cx: ctxt, seen: &mut ~[DefId],
r_ty: t, ty: t) -> bool {
debug!("subtypes_require(%s, %s)?",
debug2!("subtypes_require({}, {})?",
::util::ppaux::ty_to_str(cx, r_ty),
::util::ppaux::ty_to_str(cx, ty));
@ -2456,7 +2456,7 @@ pub fn is_instantiable(cx: ctxt, r_ty: t) -> bool {
}
};
debug!("subtypes_require(%s, %s)? %b",
debug2!("subtypes_require({}, {})? {}",
::util::ppaux::ty_to_str(cx, r_ty),
::util::ppaux::ty_to_str(cx, ty),
r);
@ -2473,7 +2473,7 @@ pub fn type_structurally_contains(cx: ctxt,
test: &fn(x: &sty) -> bool)
-> bool {
let sty = &get(ty).sty;
debug!("type_structurally_contains: %s",
debug2!("type_structurally_contains: {}",
::util::ppaux::ty_to_str(cx, ty));
if test(sty) { return true; }
match *sty {
@ -2786,18 +2786,18 @@ pub fn node_id_to_trait_ref(cx: ctxt, id: ast::NodeId) -> @ty::TraitRef {
match cx.trait_refs.find(&id) {
Some(&t) => t,
None => cx.sess.bug(
fmt!("node_id_to_trait_ref: no trait ref for node `%s`",
format!("node_id_to_trait_ref: no trait ref for node `{}`",
ast_map::node_id_to_str(cx.items, id,
token::get_ident_interner())))
}
}
pub fn node_id_to_type(cx: ctxt, id: ast::NodeId) -> t {
//printfln!("%?/%?", id, cx.node_types.len());
//printfln!("{:?}/{:?}", id, cx.node_types.len());
match cx.node_types.find(&(id as uint)) {
Some(&t) => t,
None => cx.sess.bug(
fmt!("node_id_to_type: no type for node `%s`",
format!("node_id_to_type: no type for node `{}`",
ast_map::node_id_to_str(cx.items, id,
token::get_ident_interner())))
}
@ -2820,7 +2820,7 @@ pub fn ty_fn_sig(fty: t) -> FnSig {
ty_bare_fn(ref f) => f.sig.clone(),
ty_closure(ref f) => f.sig.clone(),
ref s => {
fail!("ty_fn_sig() called on non-fn type: %?", s)
fail2!("ty_fn_sig() called on non-fn type: {:?}", s)
}
}
}
@ -2831,7 +2831,7 @@ pub fn ty_fn_args(fty: t) -> ~[t] {
ty_bare_fn(ref f) => f.sig.inputs.clone(),
ty_closure(ref f) => f.sig.inputs.clone(),
ref s => {
fail!("ty_fn_args() called on non-fn type: %?", s)
fail2!("ty_fn_args() called on non-fn type: {:?}", s)
}
}
}
@ -2840,7 +2840,7 @@ pub fn ty_closure_sigil(fty: t) -> Sigil {
match get(fty).sty {
ty_closure(ref f) => f.sigil,
ref s => {
fail!("ty_closure_sigil() called on non-closure type: %?", s)
fail2!("ty_closure_sigil() called on non-closure type: {:?}", s)
}
}
}
@ -2850,7 +2850,7 @@ pub fn ty_fn_purity(fty: t) -> ast::purity {
ty_bare_fn(ref f) => f.purity,
ty_closure(ref f) => f.purity,
ref s => {
fail!("ty_fn_purity() called on non-fn type: %?", s)
fail2!("ty_fn_purity() called on non-fn type: {:?}", s)
}
}
}
@ -2860,7 +2860,7 @@ pub fn ty_fn_ret(fty: t) -> t {
ty_bare_fn(ref f) => f.sig.output,
ty_closure(ref f) => f.sig.output,
ref s => {
fail!("ty_fn_ret() called on non-fn type: %?", s)
fail2!("ty_fn_ret() called on non-fn type: {:?}", s)
}
}
}
@ -2877,7 +2877,7 @@ pub fn ty_vstore(ty: t) -> vstore {
match get(ty).sty {
ty_evec(_, vstore) => vstore,
ty_estr(vstore) => vstore,
ref s => fail!("ty_vstore() called on invalid sty: %?", s)
ref s => fail2!("ty_vstore() called on invalid sty: {:?}", s)
}
}
@ -2891,7 +2891,7 @@ pub fn ty_region(tcx: ctxt,
ref s => {
tcx.sess.span_bug(
span,
fmt!("ty_region() invoked on in appropriate ty: %?", s));
format!("ty_region() invoked on in appropriate ty: {:?}", s));
}
}
}
@ -2902,7 +2902,7 @@ pub fn replace_fn_sig(cx: ctxt, fsty: &sty, new_sig: FnSig) -> t {
ty_closure(ref f) => mk_closure(cx, ClosureTy {sig: new_sig, ..*f}),
ref s => {
cx.sess.bug(
fmt!("ty_fn_sig() called on non-fn type: %?", s));
format!("ty_fn_sig() called on non-fn type: {:?}", s));
}
}
}
@ -2921,8 +2921,8 @@ pub fn replace_closure_return_type(tcx: ctxt, fn_type: t, ret_type: t) -> t {
})
}
_ => {
tcx.sess.bug(fmt!(
"replace_fn_ret() invoked with non-fn-type: %s",
tcx.sess.bug(format!(
"replace_fn_ret() invoked with non-fn-type: {}",
ty_to_str(tcx, fn_type)));
}
}
@ -3003,7 +3003,7 @@ pub fn adjust_ty(cx: ctxt,
}
ref b => {
cx.sess.bug(
fmt!("add_env adjustment on non-bare-fn: %?", b));
format!("add_env adjustment on non-bare-fn: {:?}", b));
}
}
}
@ -3018,7 +3018,7 @@ pub fn adjust_ty(cx: ctxt,
None => {
cx.sess.span_bug(
span,
fmt!("The %uth autoderef failed: %s",
format!("The {}th autoderef failed: {}",
i, ty_to_str(cx,
adjusted_ty)));
}
@ -3075,7 +3075,7 @@ pub fn adjust_ty(cx: ctxt,
ref s => {
cx.sess.span_bug(
span,
fmt!("borrow-vec associated with bad sty: %?",
format!("borrow-vec associated with bad sty: {:?}",
s));
}
}
@ -3094,7 +3094,7 @@ pub fn adjust_ty(cx: ctxt,
ref s => {
cx.sess.span_bug(
span,
fmt!("borrow-fn associated with bad sty: %?",
format!("borrow-fn associated with bad sty: {:?}",
s));
}
}
@ -3110,7 +3110,7 @@ pub fn adjust_ty(cx: ctxt,
ref s => {
cx.sess.span_bug(
span,
fmt!("borrow-trait-obj associated with bad sty: %?",
format!("borrow-trait-obj associated with bad sty: {:?}",
s));
}
}
@ -3185,8 +3185,8 @@ pub fn resolve_expr(tcx: ctxt, expr: &ast::Expr) -> ast::Def {
match tcx.def_map.find(&expr.id) {
Some(&def) => def,
None => {
tcx.sess.span_bug(expr.span, fmt!(
"No def-map entry for expr %?", expr.id));
tcx.sess.span_bug(expr.span, format!(
"No def-map entry for expr {:?}", expr.id));
}
}
}
@ -3244,8 +3244,8 @@ pub fn expr_kind(tcx: ctxt,
ast::DefSelf(*) => LvalueExpr,
def => {
tcx.sess.span_bug(expr.span, fmt!(
"Uncategorized def for expr %?: %?",
tcx.sess.span_bug(expr.span, format!(
"Uncategorized def for expr {:?}: {:?}",
expr.id, def));
}
}
@ -3311,7 +3311,7 @@ pub fn expr_kind(tcx: ctxt,
RvalueStmtExpr
}
ast::ExprForLoop(*) => fail!("non-desugared expr_for_loop"),
ast::ExprForLoop(*) => fail2!("non-desugared expr_for_loop"),
ast::ExprLogLevel |
ast::ExprLit(_) | // Note: lit_str is carved out above
@ -3339,7 +3339,7 @@ pub fn stmt_node_id(s: &ast::Stmt) -> ast::NodeId {
ast::StmtDecl(_, id) | StmtExpr(_, id) | StmtSemi(_, id) => {
return id;
}
ast::StmtMac(*) => fail!("unexpanded macro in trans")
ast::StmtMac(*) => fail2!("unexpanded macro in trans")
}
}
@ -3353,8 +3353,8 @@ pub fn field_idx_strict(tcx: ty::ctxt, name: ast::Name, fields: &[field])
-> uint {
let mut i = 0u;
for f in fields.iter() { if f.ident.name == name { return i; } i += 1u; }
tcx.sess.bug(fmt!(
"No field named `%s` found in the list of fields `%?`",
tcx.sess.bug(format!(
"No field named `{}` found in the list of fields `{:?}`",
token::interner_get(name),
fields.map(|f| tcx.sess.str_of(f.ident))));
}
@ -3418,7 +3418,7 @@ pub fn ty_sort_str(cx: ctxt, t: t) -> ~str {
::util::ppaux::ty_to_str(cx, t)
}
ty_enum(id, _) => fmt!("enum %s", item_path_str(cx, id)),
ty_enum(id, _) => format!("enum {}", item_path_str(cx, id)),
ty_box(_) => ~"@-ptr",
ty_uniq(_) => ~"~-ptr",
ty_evec(_, _) => ~"vector",
@ -3427,8 +3427,8 @@ pub fn ty_sort_str(cx: ctxt, t: t) -> ~str {
ty_rptr(_, _) => ~"&-ptr",
ty_bare_fn(_) => ~"extern fn",
ty_closure(_) => ~"fn",
ty_trait(id, _, _, _, _) => fmt!("trait %s", item_path_str(cx, id)),
ty_struct(id, _) => fmt!("struct %s", item_path_str(cx, id)),
ty_trait(id, _, _, _, _) => format!("trait {}", item_path_str(cx, id)),
ty_struct(id, _) => format!("struct {}", item_path_str(cx, id)),
ty_tup(_) => ~"tuple",
ty_infer(TyVar(_)) => ~"inferred type",
ty_infer(IntVar(_)) => ~"integral variable",
@ -3461,19 +3461,19 @@ pub fn type_err_to_str(cx: ctxt, err: &type_err) -> ~str {
match *err {
terr_mismatch => ~"types differ",
terr_purity_mismatch(values) => {
fmt!("expected %s fn but found %s fn",
format!("expected {} fn but found {} fn",
values.expected.to_str(), values.found.to_str())
}
terr_abi_mismatch(values) => {
fmt!("expected %s fn but found %s fn",
format!("expected {} fn but found {} fn",
values.expected.to_str(), values.found.to_str())
}
terr_onceness_mismatch(values) => {
fmt!("expected %s fn but found %s fn",
format!("expected {} fn but found {} fn",
values.expected.to_str(), values.found.to_str())
}
terr_sigil_mismatch(values) => {
fmt!("expected %s closure, found %s closure",
format!("expected {} closure, found {} closure",
values.expected.to_str(),
values.found.to_str())
}
@ -3483,97 +3483,97 @@ pub fn type_err_to_str(cx: ctxt, err: &type_err) -> ~str {
terr_ptr_mutability => ~"pointers differ in mutability",
terr_ref_mutability => ~"references differ in mutability",
terr_ty_param_size(values) => {
fmt!("expected a type with %u type params \
but found one with %u type params",
format!("expected a type with {} type params \
but found one with {} type params",
values.expected, values.found)
}
terr_tuple_size(values) => {
fmt!("expected a tuple with %u elements \
but found one with %u elements",
format!("expected a tuple with {} elements \
but found one with {} elements",
values.expected, values.found)
}
terr_record_size(values) => {
fmt!("expected a record with %u fields \
but found one with %u fields",
format!("expected a record with {} fields \
but found one with {} fields",
values.expected, values.found)
}
terr_record_mutability => {
~"record elements differ in mutability"
}
terr_record_fields(values) => {
fmt!("expected a record with field `%s` but found one with field \
`%s`",
format!("expected a record with field `{}` but found one with field \
`{}`",
cx.sess.str_of(values.expected),
cx.sess.str_of(values.found))
}
terr_arg_count => ~"incorrect number of function parameters",
terr_regions_does_not_outlive(*) => {
fmt!("lifetime mismatch")
format!("lifetime mismatch")
}
terr_regions_not_same(*) => {
fmt!("lifetimes are not the same")
format!("lifetimes are not the same")
}
terr_regions_no_overlap(*) => {
fmt!("lifetimes do not intersect")
format!("lifetimes do not intersect")
}
terr_regions_insufficiently_polymorphic(br, _) => {
fmt!("expected bound lifetime parameter %s, \
format!("expected bound lifetime parameter {}, \
but found concrete lifetime",
bound_region_ptr_to_str(cx, br))
}
terr_regions_overly_polymorphic(br, _) => {
fmt!("expected concrete lifetime, \
but found bound lifetime parameter %s",
format!("expected concrete lifetime, \
but found bound lifetime parameter {}",
bound_region_ptr_to_str(cx, br))
}
terr_vstores_differ(k, ref values) => {
fmt!("%s storage differs: expected %s but found %s",
format!("{} storage differs: expected {} but found {}",
terr_vstore_kind_to_str(k),
vstore_to_str(cx, (*values).expected),
vstore_to_str(cx, (*values).found))
}
terr_trait_stores_differ(_, ref values) => {
fmt!("trait storage differs: expected %s but found %s",
format!("trait storage differs: expected {} but found {}",
trait_store_to_str(cx, (*values).expected),
trait_store_to_str(cx, (*values).found))
}
terr_in_field(err, fname) => {
fmt!("in field `%s`, %s", cx.sess.str_of(fname),
format!("in field `{}`, {}", cx.sess.str_of(fname),
type_err_to_str(cx, err))
}
terr_sorts(values) => {
fmt!("expected %s but found %s",
format!("expected {} but found {}",
ty_sort_str(cx, values.expected),
ty_sort_str(cx, values.found))
}
terr_traits(values) => {
fmt!("expected trait %s but found trait %s",
format!("expected trait {} but found trait {}",
item_path_str(cx, values.expected),
item_path_str(cx, values.found))
}
terr_builtin_bounds(values) => {
if values.expected.is_empty() {
fmt!("expected no bounds but found `%s`",
format!("expected no bounds but found `{}`",
values.found.user_string(cx))
} else if values.found.is_empty() {
fmt!("expected bounds `%s` but found no bounds",
format!("expected bounds `{}` but found no bounds",
values.expected.user_string(cx))
} else {
fmt!("expected bounds `%s` but found bounds `%s`",
format!("expected bounds `{}` but found bounds `{}`",
values.expected.user_string(cx),
values.found.user_string(cx))
}
}
terr_integer_as_char => {
fmt!("expected an integral type but found char")
format!("expected an integral type but found char")
}
terr_int_mismatch(ref values) => {
fmt!("expected %s but found %s",
format!("expected {} but found {}",
values.expected.to_str(),
values.found.to_str())
}
terr_float_mismatch(ref values) => {
fmt!("expected %s but found %s",
format!("expected {} but found {}",
values.expected.to_str(),
values.found.to_str())
}
@ -3633,7 +3633,7 @@ pub fn provided_trait_methods(cx: ctxt, id: ast::DefId) -> ~[@Method] {
match ast_util::split_trait_methods(*ms) {
(_, p) => p.map(|m| method(cx, ast_util::local_def(m.id)))
},
_ => cx.sess.bug(fmt!("provided_trait_methods: %? is not a trait",
_ => cx.sess.bug(format!("provided_trait_methods: {:?} is not a trait",
id))
}
} else {
@ -3690,7 +3690,7 @@ fn lookup_locally_or_in_crate_store<V:Clone>(
}
if def_id.crate == ast::LOCAL_CRATE {
fail!("No def'n found for %? in tcx.%s", def_id, descr);
fail2!("No def'n found for {:?} in tcx.{}", def_id, descr);
}
let v = load_external();
map.insert(def_id, v.clone());
@ -3733,7 +3733,7 @@ pub fn impl_trait_ref(cx: ctxt, id: ast::DefId) -> Option<@TraitRef> {
None => {}
}
let ret = if id.crate == ast::LOCAL_CRATE {
debug!("(impl_trait_ref) searching for trait impl %?", id);
debug2!("(impl_trait_ref) searching for trait impl {:?}", id);
match cx.items.find(&id.node) {
Some(&ast_map::node_item(@ast::item {
node: ast::item_impl(_, ref opt_trait, _, _),
@ -3979,7 +3979,7 @@ pub fn item_path(cx: ctxt, id: ast::DefId) -> ast_map::path {
}
ref node => {
cx.sess.bug(fmt!("cannot find item_path for node %?", node));
cx.sess.bug(format!("cannot find item_path for node {:?}", node));
}
}
}
@ -4031,7 +4031,7 @@ pub fn enum_variants(cx: ctxt, id: ast::DefId) -> @~[@VariantInfo] {
cx.sess.span_err(e.span, "expected signed integer constant");
}
Err(ref err) => {
cx.sess.span_err(e.span, fmt!("expected constant: %s", (*err)));
cx.sess.span_err(e.span, format!("expected constant: {}", (*err)));
}
},
None => {}
@ -4111,7 +4111,7 @@ pub fn has_attr(tcx: ctxt, did: DefId, attr: &str) -> bool {
attrs: ref attrs,
_
}, _)) => attr::contains_name(*attrs, attr),
_ => tcx.sess.bug(fmt!("has_attr: %? is not an item",
_ => tcx.sess.bug(format!("has_attr: {:?} is not an item",
did))
}
} else {
@ -4182,7 +4182,7 @@ pub fn lookup_struct_fields(cx: ctxt, did: ast::DefId) -> ~[field_ty] {
}
_ => {
cx.sess.bug(
fmt!("struct ID not bound to an item: %s",
format!("struct ID not bound to an item: {}",
ast_map::node_id_to_str(cx.items, did.node,
token::get_ident_interner())));
}
@ -4486,7 +4486,7 @@ pub fn each_bound_trait_and_supertraits(tcx: ctxt,
// Add the given trait ty to the hash map
while i < trait_refs.len() {
debug!("each_bound_trait_and_supertraits(i=%?, trait_ref=%s)",
debug2!("each_bound_trait_and_supertraits(i={:?}, trait_ref={})",
i, trait_refs[i].repr(tcx));
if !f(trait_refs[i]) {
@ -4496,7 +4496,7 @@ pub fn each_bound_trait_and_supertraits(tcx: ctxt,
// Add supertraits to supertrait_set
let supertrait_refs = trait_ref_supertraits(tcx, trait_refs[i]);
for &supertrait_ref in supertrait_refs.iter() {
debug!("each_bound_trait_and_supertraits(supertrait_ref=%s)",
debug2!("each_bound_trait_and_supertraits(supertrait_ref={})",
supertrait_ref.repr(tcx));
let d_id = supertrait_ref.def_id;

View File

@ -92,12 +92,12 @@ pub fn get_region_reporting_err(
result::Err(ref e) => {
let descr = match a_r {
&None => ~"anonymous lifetime",
&Some(ref a) => fmt!("lifetime %s",
&Some(ref a) => format!("lifetime {}",
lifetime_to_str(a, tcx.sess.intr()))
};
tcx.sess.span_err(
span,
fmt!("Illegal %s: %s",
format!("Illegal {}: {}",
descr, e.msg));
e.replacement
}
@ -157,7 +157,7 @@ fn ast_path_substs<AC:AstConv,RS:RegionScope + Clone + 'static>(
(&None, &Some(_)) => {
tcx.sess.span_err(
path.span,
fmt!("no region bound is allowed on `%s`, \
format!("no region bound is allowed on `{}`, \
which is not declared as containing region pointers",
ty::item_path_str(tcx, def_id)));
opt_vec::Empty
@ -182,7 +182,7 @@ fn ast_path_substs<AC:AstConv,RS:RegionScope + Clone + 'static>(
if decl_generics.type_param_defs.len() != supplied_type_parameter_count {
this.tcx().sess.span_fatal(
path.span,
fmt!("wrong number of type arguments: expected %u but found %u",
format!("wrong number of type arguments: expected {} but found {}",
decl_generics.type_param_defs.len(),
supplied_type_parameter_count));
}
@ -428,7 +428,7 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope + Clone + 'static>(
ast::ty_path(ref path, ref bounds, id) => {
let a_def = match tcx.def_map.find(&id) {
None => tcx.sess.span_fatal(
ast_ty.span, fmt!("unbound path %s",
ast_ty.span, format!("unbound path {}",
path_to_str(path, tcx.sess.intr()))),
Some(&d) => d
};
@ -446,8 +446,8 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope + Clone + 'static>(
let path_str = path_to_str(path, tcx.sess.intr());
tcx.sess.span_err(
ast_ty.span,
fmt!("reference to trait `%s` where a type is expected; \
try `@%s`, `~%s`, or `&%s`",
format!("reference to trait `{}` where a type is expected; \
try `@{}`, `~{}`, or `&{}`",
path_str, path_str, path_str, path_str));
ty::mk_err()
}
@ -498,7 +498,7 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope + Clone + 'static>(
}
_ => {
tcx.sess.span_fatal(ast_ty.span,
fmt!("found value name used as a type: %?", a_def));
format!("found value name used as a type: {:?}", a_def));
}
}
}
@ -521,8 +521,7 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope + Clone + 'static>(
Err(ref r) => {
tcx.sess.span_fatal(
ast_ty.span,
fmt!("expected constant expr for vector length: %s",
*r));
format!("expected constant expr for vector length: {}", *r));
}
}
}
@ -583,7 +582,7 @@ pub fn bound_lifetimes<AC:AstConv>(
if special_idents.iter().any(|&i| i == ast_lifetime.ident) {
this.tcx().sess.span_err(
ast_lifetime.span,
fmt!("illegal lifetime parameter name: `%s`",
format!("illegal lifetime parameter name: `{}`",
lifetime_to_str(ast_lifetime, this.tcx().sess.intr())));
} else {
bound_lifetime_names.push(ast_lifetime.ident);
@ -637,7 +636,7 @@ fn ty_of_method_or_bare_fn<AC:AstConv,RS:RegionScope + Clone + 'static>(
opt_self_info: Option<&SelfInfo>,
decl: &ast::fn_decl) -> (Option<Option<ty::t>>, ty::BareFnTy)
{
debug!("ty_of_bare_fn");
debug2!("ty_of_bare_fn");
// new region names that appear inside of the fn decl are bound to
// that function type
@ -718,7 +717,7 @@ pub fn ty_of_closure<AC:AstConv,RS:RegionScope + Clone + 'static>(
// names or they are provided, but not both.
assert!(lifetimes.is_empty() || expected_sig.is_none());
debug!("ty_of_fn_decl");
debug2!("ty_of_fn_decl");
let _i = indenter();
// resolve the function bound region in the original region
@ -807,7 +806,7 @@ fn conv_builtin_bounds(tcx: ty::ctxt, ast_bounds: &Option<OptVec<ast::TyParamBou
}
tcx.sess.span_fatal(
b.path.span,
fmt!("only the builtin traits can be used \
format!("only the builtin traits can be used \
as closure or object bounds"));
}
ast::RegionTyParamBound => {

View File

@ -166,7 +166,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: @ast::Pat, path: &ast::Path,
fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected, actual| {
expected.map_move_default(~"", |e| {
fmt!("mismatched types: expected `%s` but found %s",
format!("mismatched types: expected `{}` but found {}",
e, actual)})},
Some(expected), ~"a structure pattern",
None);
@ -215,7 +215,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: @ast::Pat, path: &ast::Path,
fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected, actual| {
expected.map_move_default(~"", |e| {
fmt!("mismatched types: expected `%s` but found %s",
format!("mismatched types: expected `{}` but found {}",
e, actual)})},
Some(expected), ~"an enum or structure pattern",
None);
@ -241,7 +241,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: @ast::Pat, path: &ast::Path,
if arg_len > 0 {
// N-ary variant.
if arg_len != subpats_len {
let s = fmt!("this pattern has %u field%s, but the corresponding %s has %u field%s",
let s = format!("this pattern has {} field{}, but the corresponding {} has {} field{}",
subpats_len,
if subpats_len == 1u { ~"" } else { ~"s" },
kind_name,
@ -260,7 +260,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: @ast::Pat, path: &ast::Path,
}
} else if subpats_len > 0 {
tcx.sess.span_err(pat.span,
fmt!("this pattern has %u field%s, but the corresponding %s has no \
format!("this pattern has {} field{}, but the corresponding {} has no \
fields",
subpats_len,
if subpats_len == 1u { "" } else { "s" },
@ -319,7 +319,7 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
// up its type won't fail
check_pat(pcx, field.pat, ty::mk_err());
tcx.sess.span_err(span,
fmt!("struct `%s` does not have a field named `%s`",
format!("struct `{}` does not have a field named `{}`",
name,
tcx.sess.str_of(field.ident)));
}
@ -333,7 +333,7 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
loop;
}
tcx.sess.span_err(span,
fmt!("pattern does not mention field `%s`",
format!("pattern does not mention field `{}`",
token::interner_get(field.name)));
}
}
@ -358,7 +358,7 @@ pub fn check_struct_pat(pcx: &pat_ctxt, pat_id: ast::NodeId, span: Span,
Some(&ast::DefStruct(*)) | Some(&ast::DefVariant(*)) => {
let name = pprust::path_to_str(path, tcx.sess.intr());
tcx.sess.span_err(span,
fmt!("mismatched types: expected `%s` but found `%s`",
format!("mismatched types: expected `{}` but found `{}`",
fcx.infcx().ty_to_str(expected),
name));
}
@ -396,8 +396,8 @@ pub fn check_struct_like_enum_variant_pat(pcx: &pat_ctxt,
Some(&ast::DefStruct(*)) | Some(&ast::DefVariant(*)) => {
let name = pprust::path_to_str(path, tcx.sess.intr());
tcx.sess.span_err(span,
fmt!("mismatched types: expected `%s` but \
found `%s`",
format!("mismatched types: expected `{}` but \
found `{}`",
fcx.infcx().ty_to_str(expected),
name));
}
@ -428,8 +428,8 @@ pub fn check_pat(pcx: &pat_ctxt, pat: @ast::Pat, expected: ty::t) {
fcx.infcx().resolve_type_vars_if_possible(fcx.expr_ty(begin));
let e_ty =
fcx.infcx().resolve_type_vars_if_possible(fcx.expr_ty(end));
debug!("pat_range beginning type: %?", b_ty);
debug!("pat_range ending type: %?", e_ty);
debug2!("pat_range beginning type: {:?}", b_ty);
debug2!("pat_range ending type: {:?}", e_ty);
if !require_same_types(
tcx, Some(fcx.infcx()), false, pat.span, b_ty, e_ty,
|| ~"mismatched types in range")
@ -488,7 +488,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: @ast::Pat, expected: ty::t) {
}
fcx.write_ty(pat.id, typ);
debug!("(checking match) writing type for pat id %d", pat.id);
debug2!("(checking match) writing type for pat id {}", pat.id);
match sub {
Some(p) => check_pat(pcx, p, expected),
@ -520,7 +520,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: @ast::Pat, expected: ty::t) {
fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected, actual| {
expected.map_move_default(~"", |e| {
fmt!("mismatched types: expected `%s` but found %s",
format!("mismatched types: expected `{}` but found {}",
e, actual)})},
Some(expected), ~"a structure pattern",
None);
@ -567,7 +567,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: @ast::Pat, expected: ty::t) {
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span, |expected, actual| {
expected.map_move_default(~"", |e| {
fmt!("mismatched types: expected `%s` but found %s",
format!("mismatched types: expected `{}` but found {}",
e, actual)})}, Some(expected), ~"tuple", Some(&type_error));
fcx.write_error(pat.id);
}
@ -617,7 +617,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: @ast::Pat, expected: ty::t) {
pat.span,
|expected, actual| {
expected.map_move_default(~"", |e| {
fmt!("mismatched types: expected `%s` but found %s",
format!("mismatched types: expected `{}` but found {}",
e, actual)})},
Some(expected),
~"a vector pattern",
@ -676,10 +676,10 @@ pub fn check_pointer_pat(pcx: &pat_ctxt,
span,
|expected, actual| {
expected.map_move_default(~"", |e| {
fmt!("mismatched types: expected `%s` but found %s",
format!("mismatched types: expected `{}` but found {}",
e, actual)})},
Some(expected),
fmt!("%s pattern", match pointer_kind {
format!("{} pattern", match pointer_kind {
Managed => "an @-box",
Send => "a ~-box",
Borrowed => "an &-pointer"

View File

@ -151,18 +151,18 @@ pub fn lookup(
};
let self_ty = structurally_resolved_type(fcx, self_expr.span, self_ty);
debug!("method lookup(self_ty=%s, expr=%s, self_expr=%s)",
debug2!("method lookup(self_ty={}, expr={}, self_expr={})",
self_ty.repr(fcx.tcx()), expr.repr(fcx.tcx()),
self_expr.repr(fcx.tcx()));
debug!("searching inherent candidates");
debug2!("searching inherent candidates");
lcx.push_inherent_candidates(self_ty);
let mme = lcx.search(self_ty);
if mme.is_some() {
return mme;
}
debug!("searching extension candidates");
debug2!("searching extension candidates");
lcx.reset_candidates();
lcx.push_bound_candidates(self_ty);
lcx.push_extension_candidates();
@ -215,7 +215,7 @@ impl<'self> LookupContext<'self> {
let mut self_ty = self_ty;
let mut autoderefs = 0;
loop {
debug!("loop: self_ty=%s autoderefs=%u",
debug2!("loop: self_ty={} autoderefs={}",
self.ty_to_str(self_ty), autoderefs);
match self.deref_args {
@ -397,7 +397,7 @@ impl<'self> LookupContext<'self> {
fn push_inherent_candidates_from_object(&self,
did: DefId,
substs: &ty::substs) {
debug!("push_inherent_candidates_from_object(did=%s, substs=%s)",
debug2!("push_inherent_candidates_from_object(did={}, substs={})",
self.did_to_str(did),
substs_to_str(self.tcx(), substs));
let _indenter = indenter();
@ -446,7 +446,7 @@ impl<'self> LookupContext<'self> {
fn push_inherent_candidates_from_param(&self,
rcvr_ty: ty::t,
param_ty: param_ty) {
debug!("push_inherent_candidates_from_param(param_ty=%?)",
debug2!("push_inherent_candidates_from_param(param_ty={:?})",
param_ty);
let _indenter = indenter();
@ -456,7 +456,7 @@ impl<'self> LookupContext<'self> {
None => {
tcx.sess.span_bug(
self.expr.span,
fmt!("No param def for %?", param_ty));
format!("No param def for {:?}", param_ty));
}
};
@ -523,11 +523,11 @@ impl<'self> LookupContext<'self> {
let cand = mk_cand(bound_trait_ref, method,
pos, this_bound_idx);
debug!("pushing inherent candidate for param: %?", cand);
debug2!("pushing inherent candidate for param: {:?}", cand);
self.inherent_candidates.push(cand);
}
None => {
debug!("trait doesn't contain method: %?",
debug2!("trait doesn't contain method: {:?}",
bound_trait_ref.def_id);
// check next trait or bound
}
@ -557,7 +557,7 @@ impl<'self> LookupContext<'self> {
if !self.impl_dups.insert(impl_info.did) {
return; // already visited
}
debug!("push_candidates_from_impl: %s %s %s",
debug2!("push_candidates_from_impl: {} {} {}",
token::interner_get(self.m_name),
impl_info.ident.repr(self.tcx()),
impl_info.methods.map(|m| m.ident).repr(self.tcx()));
@ -603,8 +603,8 @@ impl<'self> LookupContext<'self> {
match self.search_for_method(self_ty) {
None => None,
Some(mme) => {
debug!("(searching for autoderef'd method) writing \
adjustment (%u) to %d",
debug2!("(searching for autoderef'd method) writing \
adjustment ({}) to {}",
autoderefs,
self.self_expr.id);
self.fcx.write_adjustment(self.self_expr.id, @autoadjust);
@ -795,7 +795,7 @@ impl<'self> LookupContext<'self> {
ty_opaque_closure_ptr(_) | ty_unboxed_vec(_) |
ty_opaque_box | ty_type | ty_infer(TyVar(_)) => {
self.bug(fmt!("Unexpected type: %s",
self.bug(format!("Unexpected type: {}",
self.ty_to_str(self_ty)));
}
}
@ -832,14 +832,14 @@ impl<'self> LookupContext<'self> {
fn search_for_method(&self, rcvr_ty: ty::t)
-> Option<method_map_entry> {
debug!("search_for_method(rcvr_ty=%s)", self.ty_to_str(rcvr_ty));
debug2!("search_for_method(rcvr_ty={})", self.ty_to_str(rcvr_ty));
let _indenter = indenter();
// I am not sure that inherent methods should have higher
// priority, but it is necessary ATM to handle some of the
// existing code.
debug!("searching inherent candidates");
debug2!("searching inherent candidates");
match self.consider_candidates(rcvr_ty, self.inherent_candidates) {
None => {}
Some(mme) => {
@ -847,7 +847,7 @@ impl<'self> LookupContext<'self> {
}
}
debug!("searching extension candidates");
debug2!("searching extension candidates");
match self.consider_candidates(rcvr_ty, self.extension_candidates) {
None => {
return None;
@ -896,7 +896,7 @@ impl<'self> LookupContext<'self> {
let mut j = i + 1;
while j < candidates.len() {
let candidate_b = &candidates[j];
debug!("attempting to merge %? and %?",
debug2!("attempting to merge {:?} and {:?}",
candidate_a, candidate_b);
let candidates_same = match (&candidate_a.origin,
&candidate_b.origin) {
@ -936,7 +936,7 @@ impl<'self> LookupContext<'self> {
let tcx = self.tcx();
let fty = ty::mk_bare_fn(tcx, candidate.method_ty.fty.clone());
debug!("confirm_candidate(expr=%s, candidate=%s, fty=%s)",
debug2!("confirm_candidate(expr={}, candidate={}, fty={})",
self.expr.repr(tcx),
self.cand_to_str(candidate),
self.ty_to_str(fty));
@ -992,11 +992,11 @@ impl<'self> LookupContext<'self> {
};
// Compute the method type with type parameters substituted
debug!("fty=%s all_substs=%s",
debug2!("fty={} all_substs={}",
self.ty_to_str(fty),
ty::substs_to_str(tcx, &all_substs));
let fty = ty::subst(tcx, &all_substs, fty);
debug!("after subst, fty=%s", self.ty_to_str(fty));
debug2!("after subst, fty={}", self.ty_to_str(fty));
// Replace any bound regions that appear in the function
// signature with region variables
@ -1005,7 +1005,7 @@ impl<'self> LookupContext<'self> {
ref s => {
tcx.sess.span_bug(
self.expr.span,
fmt!("Invoking method with non-bare-fn ty: %?", s));
format!("Invoking method with non-bare-fn ty: {:?}", s));
}
};
let (_, opt_transformed_self_ty, fn_sig) =
@ -1019,7 +1019,7 @@ impl<'self> LookupContext<'self> {
purity: bare_fn_ty.purity,
abis: bare_fn_ty.abis.clone(),
});
debug!("after replacing bound regions, fty=%s", self.ty_to_str(fty));
debug2!("after replacing bound regions, fty={}", self.ty_to_str(fty));
let self_mode = get_mode_from_explicit_self(candidate.method_ty.explicit_self);
@ -1032,7 +1032,7 @@ impl<'self> LookupContext<'self> {
rcvr_ty, transformed_self_ty) {
result::Ok(_) => (),
result::Err(_) => {
self.bug(fmt!("%s was a subtype of %s but now is not?",
self.bug(format!("{} was a subtype of {} but now is not?",
self.ty_to_str(rcvr_ty),
self.ty_to_str(transformed_self_ty)));
}
@ -1106,7 +1106,7 @@ impl<'self> LookupContext<'self> {
}
_ => {
self.bug(
fmt!("'impossible' transformed_self_ty: %s",
format!("'impossible' transformed_self_ty: {}",
transformed_self_ty.repr(self.tcx())));
}
}
@ -1189,12 +1189,12 @@ impl<'self> LookupContext<'self> {
// `rcvr_ty` is the type of the expression. It may be a subtype of a
// candidate method's `self_ty`.
fn is_relevant(&self, rcvr_ty: ty::t, candidate: &Candidate) -> bool {
debug!("is_relevant(rcvr_ty=%s, candidate=%s)",
debug2!("is_relevant(rcvr_ty={}, candidate={})",
self.ty_to_str(rcvr_ty), self.cand_to_str(candidate));
return match candidate.method_ty.explicit_self {
sty_static => {
debug!("(is relevant?) explicit self is static");
debug2!("(is relevant?) explicit self is static");
false
}
@ -1203,7 +1203,7 @@ impl<'self> LookupContext<'self> {
}
sty_region(_, m) => {
debug!("(is relevant?) explicit self is a region");
debug2!("(is relevant?) explicit self is a region");
match ty::get(rcvr_ty).sty {
ty::ty_rptr(_, mt) => {
mutability_matches(mt.mutbl, m) &&
@ -1220,7 +1220,7 @@ impl<'self> LookupContext<'self> {
}
sty_box(m) => {
debug!("(is relevant?) explicit self is a box");
debug2!("(is relevant?) explicit self is a box");
match ty::get(rcvr_ty).sty {
ty::ty_box(mt) => {
mutability_matches(mt.mutbl, m) &&
@ -1237,7 +1237,7 @@ impl<'self> LookupContext<'self> {
}
sty_uniq => {
debug!("(is relevant?) explicit self is a unique pointer");
debug2!("(is relevant?) explicit self is a unique pointer");
match ty::get(rcvr_ty).sty {
ty::ty_uniq(mt) => {
rcvr_matches_ty(self.fcx, mt.ty, candidate)
@ -1303,14 +1303,14 @@ impl<'self> LookupContext<'self> {
let span = if did.crate == ast::LOCAL_CRATE {
match self.tcx().items.find(&did.node) {
Some(&ast_map::node_method(m, _, _)) => m.span,
_ => fail!("report_static_candidate: bad item %?", did)
_ => fail2!("report_static_candidate: bad item {:?}", did)
}
} else {
self.expr.span
};
self.tcx().sess.span_note(
span,
fmt!("candidate #%u is `%s`",
format!("candidate \\#{} is `{}`",
(idx+1u),
ty::item_path_str(self.tcx(), did)));
}
@ -1318,7 +1318,7 @@ impl<'self> LookupContext<'self> {
fn report_param_candidate(&self, idx: uint, did: DefId) {
self.tcx().sess.span_note(
self.expr.span,
fmt!("candidate #%u derives from the bound `%s`",
format!("candidate \\#{} derives from the bound `{}`",
(idx+1u),
ty::item_path_str(self.tcx(), did)));
}
@ -1326,8 +1326,8 @@ impl<'self> LookupContext<'self> {
fn report_trait_candidate(&self, idx: uint, did: DefId) {
self.tcx().sess.span_note(
self.expr.span,
fmt!("candidate #%u derives from the type of the receiver, \
which is the trait `%s`",
format!("candidate \\#{} derives from the type of the receiver, \
which is the trait `{}`",
(idx+1u),
ty::item_path_str(self.tcx(), did)));
}
@ -1345,7 +1345,7 @@ impl<'self> LookupContext<'self> {
}
fn cand_to_str(&self, cand: &Candidate) -> ~str {
fmt!("Candidate(rcvr_ty=%s, rcvr_substs=%s, origin=%?)",
format!("Candidate(rcvr_ty={}, rcvr_substs={}, origin={:?})",
cand.rcvr_match_condition.repr(self.tcx()),
ty::substs_to_str(self.tcx(), &cand.rcvr_substs),
cand.origin)
@ -1371,10 +1371,10 @@ impl Repr for RcvrMatchCondition {
fn repr(&self, tcx: ty::ctxt) -> ~str {
match *self {
RcvrMatchesIfObject(d) => {
fmt!("RcvrMatchesIfObject(%s)", d.repr(tcx))
format!("RcvrMatchesIfObject({})", d.repr(tcx))
}
RcvrMatchesIfSubtype(t) => {
fmt!("RcvrMatchesIfSubtype(%s)", t.repr(tcx))
format!("RcvrMatchesIfSubtype({})", t.repr(tcx))
}
}
}

View File

@ -110,7 +110,6 @@ use util::ppaux::{bound_region_ptr_to_str};
use util::ppaux;
use std::cast::transmute;
use std::hashmap::HashMap;
use std::result;
use std::util::replace;
@ -363,7 +362,7 @@ impl Visitor<()> for GatherLocalsVisitor {
_ => Some(self.fcx.to_ty(&local.ty))
};
self.assign(local.id, o_ty);
debug!("Local variable %s is assigned type %s",
debug2!("Local variable {} is assigned type {}",
self.fcx.pat_to_str(local.pat),
self.fcx.infcx().ty_to_str(
self.fcx.inh.locals.get_copy(&local.id)));
@ -376,7 +375,7 @@ impl Visitor<()> for GatherLocalsVisitor {
ast::PatIdent(_, ref path, _)
if pat_util::pat_is_binding(self.fcx.ccx.tcx.def_map, p) => {
self.assign(p.id, None);
debug!("Pattern binding %s is assigned to %s",
debug2!("Pattern binding {} is assigned to {}",
self.tcx.sess.str_of(path.segments[0].identifier),
self.fcx.infcx().ty_to_str(
self.fcx.inh.locals.get_copy(&p.id)));
@ -451,7 +450,7 @@ pub fn check_fn(ccx: @mut CrateCtxt,
let arg_tys = fn_sig.inputs.map(|a| *a);
let ret_ty = fn_sig.output;
debug!("check_fn(arg_tys=%?, ret_ty=%?, opt_self_ty=%?)",
debug2!("check_fn(arg_tys={:?}, ret_ty={:?}, opt_self_ty={:?})",
arg_tys.map(|&a| ppaux::ty_to_str(tcx, a)),
ppaux::ty_to_str(tcx, ret_ty),
opt_self_info.map(|si| ppaux::ty_to_str(tcx, si.self_ty)));
@ -511,7 +510,7 @@ pub fn check_fn(ccx: @mut CrateCtxt,
// Add the self parameter
for self_info in opt_self_info.iter() {
visit.assign(self_info.self_id, Some(self_info.self_ty));
debug!("self is assigned to %s",
debug2!("self is assigned to {}",
fcx.infcx().ty_to_str(
fcx.inh.locals.get_copy(&self_info.self_id)));
}
@ -565,7 +564,7 @@ pub fn check_no_duplicate_fields(tcx: ty::ctxt,
let orig_sp = field_names.find(&id).map_move(|x| *x);
match orig_sp {
Some(orig_sp) => {
tcx.sess.span_err(sp, fmt!("Duplicate field name %s in record type declaration",
tcx.sess.span_err(sp, format!("Duplicate field name {} in record type declaration",
tcx.sess.str_of(id)));
tcx.sess.span_note(orig_sp, "First declaration of this field occurred here");
break;
@ -589,7 +588,7 @@ pub fn check_struct(ccx: @mut CrateCtxt, id: ast::NodeId, span: Span) {
}
pub fn check_item(ccx: @mut CrateCtxt, it: @ast::item) {
debug!("check_item(it.id=%d, it.ident=%s)",
debug2!("check_item(it.id={}, it.ident={})",
it.id,
ty::item_path_str(ccx.tcx, local_def(it.id)));
let _indenter = indenter();
@ -607,7 +606,7 @@ pub fn check_item(ccx: @mut CrateCtxt, it: @ast::item) {
}
ast::item_impl(_, _, _, ref ms) => {
let rp = ccx.tcx.region_paramd_items.find(&it.id).map_move(|x| *x);
debug!("item_impl %s with id %d rp %?",
debug2!("item_impl {} with id {} rp {:?}",
ccx.tcx.sess.str_of(it.ident), it.id, rp);
for m in ms.iter() {
check_method(ccx, *m);
@ -645,7 +644,7 @@ pub fn check_item(ccx: @mut CrateCtxt, it: @ast::item) {
if tpt.generics.has_type_params() {
ccx.tcx.sess.span_err(
item.span,
fmt!("foreign items may not have type parameters"));
format!("foreign items may not have type parameters"));
}
}
}
@ -691,7 +690,7 @@ impl FnCtxt {
} else {
result::Err(RegionError {
msg: {
fmt!("named region `%s` not in scope here",
format!("named region `{}` not in scope here",
bound_region_ptr_to_str(self.tcx(), br))
},
replacement: {
@ -722,7 +721,7 @@ impl RegionScope for FnCtxt {
impl FnCtxt {
pub fn tag(&self) -> ~str {
unsafe {
fmt!("%x", transmute(self))
format!("{}", self as *FnCtxt)
}
}
@ -732,7 +731,7 @@ impl FnCtxt {
None => {
self.tcx().sess.span_bug(
span,
fmt!("No type for local variable %?", nid));
format!("No type for local variable {:?}", nid));
}
}
}
@ -743,14 +742,14 @@ impl FnCtxt {
#[inline]
pub fn write_ty(&self, node_id: ast::NodeId, ty: ty::t) {
debug!("write_ty(%d, %s) in fcx %s",
debug2!("write_ty({}, {}) in fcx {}",
node_id, ppaux::ty_to_str(self.tcx(), ty), self.tag());
self.inh.node_types.insert(node_id, ty);
}
pub fn write_substs(&self, node_id: ast::NodeId, substs: ty::substs) {
if !ty::substs_is_noop(&substs) {
debug!("write_substs(%d, %s) in fcx %s",
debug2!("write_substs({}, {}) in fcx {}",
node_id,
ty::substs_to_str(self.tcx(), &substs),
self.tag());
@ -782,7 +781,7 @@ impl FnCtxt {
pub fn write_adjustment(&self,
node_id: ast::NodeId,
adj: @ty::AutoAdjustment) {
debug!("write_adjustment(node_id=%?, adj=%?)", node_id, adj);
debug2!("write_adjustment(node_id={:?}, adj={:?})", node_id, adj);
self.inh.adjustments.insert(node_id, adj);
}
@ -808,7 +807,7 @@ impl FnCtxt {
match self.inh.node_types.find(&ex.id) {
Some(&t) => t,
None => {
self.tcx().sess.bug(fmt!("no type for expr in fcx %s",
self.tcx().sess.bug(format!("no type for expr in fcx {}",
self.tag()));
}
}
@ -819,7 +818,7 @@ impl FnCtxt {
Some(&t) => t,
None => {
self.tcx().sess.bug(
fmt!("no type for node %d: %s in fcx %s",
format!("no type for node {}: {} in fcx {}",
id, ast_map::node_id_to_str(
self.tcx().items, id,
token::get_ident_interner()),
@ -833,7 +832,7 @@ impl FnCtxt {
Some(ts) => (*ts).clone(),
None => {
self.tcx().sess.bug(
fmt!("no type substs for node %d: %s in fcx %s",
format!("no type substs for node {}: {} in fcx {}",
id, ast_map::node_id_to_str(
self.tcx().items, id,
token::get_ident_interner()),
@ -1212,7 +1211,7 @@ fn check_type_parameter_positions_in_path(function_context: @mut FnCtxt,
function_context.tcx()
.sess
.span_err(path.span,
fmt!("this %s has a lifetime \
format!("this {} has a lifetime \
parameter but no \
lifetime was specified",
name))
@ -1221,7 +1220,7 @@ fn check_type_parameter_positions_in_path(function_context: @mut FnCtxt,
function_context.tcx()
.sess
.span_err(path.span,
fmt!("this %s has no lifetime \
format!("this {} has no lifetime \
parameter but a lifetime \
was specified",
name))
@ -1249,10 +1248,10 @@ fn check_type_parameter_positions_in_path(function_context: @mut FnCtxt,
function_context.tcx()
.sess
.span_err(path.span,
fmt!("the %s referenced by this \
path has %u type \
parameter%s, but %u type \
parameter%s were supplied",
format!("the {} referenced by this \
path has {} type \
parameter{}, but {} type \
parameter{} were supplied",
name,
trait_type_parameter_count,
trait_count_suffix,
@ -1283,7 +1282,7 @@ fn check_type_parameter_positions_in_path(function_context: @mut FnCtxt,
function_context.tcx()
.sess
.span_note(typ.span,
fmt!("this is a %?", def));
format!("this is a {:?}", def));
}
}
}
@ -1303,7 +1302,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
expr: @ast::Expr,
expected: Option<ty::t>,
unifier: &fn()) {
debug!(">> typechecking");
debug2!(">> typechecking");
fn check_method_argument_types(
fcx: @mut FnCtxt,
@ -1329,7 +1328,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
_ => {
fcx.tcx().sess.span_bug(
sp,
fmt!("Method without bare fn type"));
format!("Method without bare fn type"));
}
}
}
@ -1366,8 +1365,8 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
ast::ForSugar => " (including the closure passed by \
the `for` keyword)"
};
let msg = fmt!("this function takes %u parameter%s but \
%u parameter%s supplied%s",
let msg = format!("this function takes {} parameter{} but \
{} parameter{} supplied{}",
expected_arg_count,
if expected_arg_count == 1 {""}
else {"s"},
@ -1381,7 +1380,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
vec::from_elem(supplied_arg_count, ty::mk_err())
};
debug!("check_argument_types: formal_tys=%?",
debug2!("check_argument_types: formal_tys={:?}",
formal_tys.map(|t| fcx.infcx().ty_to_str(*t)));
// Check the arguments.
@ -1393,7 +1392,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
let xs = [false, true];
for check_blocks in xs.iter() {
let check_blocks = *check_blocks;
debug!("check_blocks=%b", check_blocks);
debug2!("check_blocks={}", check_blocks);
// More awful hacks: before we check the blocks, try to do
// an "opportunistic" vtable resolution of any trait
@ -1410,7 +1409,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
};
if is_block == check_blocks {
debug!("checking the argument");
debug2!("checking the argument");
let mut formal_ty = formal_tys[i];
match deref_args {
@ -1459,8 +1458,8 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
match ty::get(output).sty {
ty::ty_bool => {}
_ => fcx.type_error_message(call_expr.span, |actual| {
fmt!("expected `for` closure to return `bool`, \
but found `%s`", actual) },
format!("expected `for` closure to return `bool`, \
but found `{}`", actual) },
output, None)
}
ty::mk_nil()
@ -1508,8 +1507,8 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
ty::ty_closure(ty::ClosureTy {sig: ref sig, _}) => sig,
_ => {
fcx.type_error_message(call_expr.span, |actual| {
fmt!("expected function but \
found `%s`", actual) }, fn_ty, None);
format!("expected function but \
found `{}`", actual) }, fn_ty, None);
&error_fn_sig
}
};
@ -1564,12 +1563,12 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
method_map.insert(expr.id, (*entry));
}
None => {
debug!("(checking method call) failing expr is %d", expr.id);
debug2!("(checking method call) failing expr is {}", expr.id);
fcx.type_error_message(expr.span,
|actual| {
fmt!("type `%s` does not implement any method in scope \
named `%s`",
format!("type `{}` does not implement any method in scope \
named `{}`",
actual,
fcx.ccx.tcx.sess.str_of(method_name))
},
@ -1721,8 +1720,8 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
fcx.write_error(expr.id);
fcx.write_error(rhs.id);
fcx.type_error_message(expr.span, |actual| {
fmt!("binary operation %s cannot be applied \
to type `%s`",
format!("binary operation {} cannot be applied \
to type `{}`",
ast_util::binop_to_str(op), actual)},
lhs_t, None)
@ -1742,8 +1741,8 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
} else {
fcx.type_error_message(expr.span,
|actual| {
fmt!("binary operation %s cannot be \
applied to type `%s`",
format!("binary operation {} cannot be \
applied to type `{}`",
ast_util::binop_to_str(op),
actual)
},
@ -1771,8 +1770,8 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
Some(ref name) => {
let if_op_unbound = || {
fcx.type_error_message(ex.span, |actual| {
fmt!("binary operation %s cannot be applied \
to type `%s`",
format!("binary operation {} cannot be applied \
to type `{}`",
ast_util::binop_to_str(op), actual)},
lhs_resolved_t, None)
};
@ -1815,7 +1814,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
DoDerefArgs, DontAutoderefReceiver,
|| {
fcx.type_error_message(ex.span, |actual| {
fmt!("cannot apply unary operator `%s` to type `%s`",
format!("cannot apply unary operator `{}` to type `{}`",
op_str, actual)
}, rhs_t, None);
}, expected_t)
@ -1918,7 +1917,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
ty::mk_closure(tcx, fn_ty_copy)
};
debug!("check_expr_fn_with_unifier fty=%s",
debug2!("check_expr_fn_with_unifier fty={}",
fcx.infcx().ty_to_str(fty));
fcx.write_ty(expr.id, fty);
@ -1952,7 +1951,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
// (1) verify that the class id actually has a field called
// field
debug!("class named %s", ppaux::ty_to_str(tcx, base_t));
debug2!("class named {}", ppaux::ty_to_str(tcx, base_t));
let cls_items = ty::lookup_struct_fields(tcx, base_id);
match lookup_field_ty(tcx, base_id, cls_items,
field, &(*substs)) {
@ -1983,7 +1982,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
fcx.type_error_message(
expr.span,
|actual| {
fmt!("attempted to take value of method `%s` on type `%s` \
format!("attempted to take value of method `{}` on type `{}` \
(try writing an anonymous function)",
token::interner_get(field), actual)
},
@ -1994,7 +1993,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
fcx.type_error_message(
expr.span,
|actual| {
fmt!("attempted access of field `%s` on type `%s`, \
format!("attempted access of field `{}` on type `{}`, \
but no field with that name was found",
token::interner_get(field), actual)
},
@ -2032,14 +2031,14 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
None => {
tcx.sess.span_err(
field.span,
fmt!("structure has no field named `%s`",
format!("structure has no field named `{}`",
tcx.sess.str_of(field.ident)));
error_happened = true;
}
Some((_, true)) => {
tcx.sess.span_err(
field.span,
fmt!("field `%s` specified more than once",
format!("field `{}` specified more than once",
tcx.sess.str_of(field.ident)));
error_happened = true;
}
@ -2079,7 +2078,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
}
tcx.sess.span_err(span,
fmt!("missing field%s: %s",
format!("missing field{}: {}",
if missing_fields.len() == 1 {
""
} else {
@ -2419,7 +2418,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
_ => {
fcx.type_error_message(expr.span,
|actual| {
fmt!("type %s cannot be dereferenced", actual)
format!("type {} cannot be dereferenced", actual)
}, oprnd_t, None);
}
}
@ -2567,7 +2566,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
}
}
ast::ExprForLoop(*) =>
fail!("non-desugared expr_for_loop"),
fail2!("non-desugared expr_for_loop"),
ast::ExprLoop(ref body, _) => {
check_block_no_value(fcx, (body));
if !may_break(tcx, expr.id, body) {
@ -2593,8 +2592,8 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
_ => match expected {
Some(expected_t) => {
fcx.type_error_message(expr.span, |actual| {
fmt!("last argument in `do` call \
has non-closure type: %s",
format!("last argument in `do` call \
has non-closure type: {}",
actual)
}, expected_t, None);
let err_ty = ty::mk_err();
@ -2615,7 +2614,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
demand::suptype(fcx, b.span, inner_ty, fcx.expr_ty(b));
}
// argh
_ => fail!("expected fn ty")
_ => fail2!("expected fn ty")
}
fcx.write_ty(expr.id, fcx.node_ty(b.id));
}
@ -2659,8 +2658,8 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
let t_1 = fcx.to_ty(t);
let t_e = fcx.expr_ty(e);
debug!("t_1=%s", fcx.infcx().ty_to_str(t_1));
debug!("t_e=%s", fcx.infcx().ty_to_str(t_e));
debug2!("t_1={}", fcx.infcx().ty_to_str(t_1));
debug2!("t_e={}", fcx.infcx().ty_to_str(t_e));
if ty::type_is_error(t_e) {
fcx.write_error(id);
@ -2676,12 +2675,12 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
_ => {
if ty::type_is_nil(t_e) {
fcx.type_error_message(expr.span, |actual| {
fmt!("cast from nil: `%s` as `%s`", actual,
format!("cast from nil: `{}` as `{}`", actual,
fcx.infcx().ty_to_str(t_1))
}, t_e, None);
} else if ty::type_is_nil(t_1) {
fcx.type_error_message(expr.span, |actual| {
fmt!("cast to nil: `%s` as `%s`", actual,
format!("cast to nil: `{}` as `{}`", actual,
fcx.infcx().ty_to_str(t_1))
}, t_e, None);
}
@ -2698,7 +2697,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
} else if t_1_is_char {
if ty::get(te).sty != ty::ty_uint(ast::ty_u8) {
fcx.type_error_message(expr.span, |actual| {
fmt!("only `u8` can be cast as `char`, not `%s`", actual)
format!("only `u8` can be cast as `char`, not `{}`", actual)
}, t_e, None);
}
} else if ty::get(t1).sty == ty::ty_bool {
@ -2752,7 +2751,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
record the issue number in this comment.
*/
fcx.type_error_message(expr.span, |actual| {
fmt!("non-scalar cast: `%s` as `%s`", actual,
format!("non-scalar cast: `{}` as `{}`", actual,
fcx.infcx().ty_to_str(t_1))
}, t_e, None);
}
@ -2864,8 +2863,8 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
let error_message = || {
fcx.type_error_message(expr.span,
|actual| {
fmt!("cannot index a value \
of type `%s`",
format!("cannot index a value \
of type `{}`",
actual)
},
base_t,
@ -2889,9 +2888,9 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
}
}
debug!("type of expr(%d) %s is...", expr.id,
debug2!("type of expr({}) {} is...", expr.id,
syntax::print::pprust::expr_to_str(expr, tcx.sess.intr()));
debug!("... %s, expected is %s",
debug2!("... {}, expected is {}",
ppaux::ty_to_str(tcx, fcx.expr_ty(expr)),
match expected {
Some(t) => ppaux::ty_to_str(tcx, t),
@ -2904,7 +2903,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
pub fn require_integral(fcx: @mut FnCtxt, sp: Span, t: ty::t) {
if !type_is_integral(fcx, sp, t) {
fcx.type_error_message(sp, |actual| {
fmt!("mismatched types: expected integral type but found `%s`",
format!("mismatched types: expected integral type but found `{}`",
actual)
}, t, None);
}
@ -3110,9 +3109,9 @@ pub fn check_instantiable(tcx: ty::ctxt,
item_id: ast::NodeId) {
let item_ty = ty::node_id_to_type(tcx, item_id);
if !ty::is_instantiable(tcx, item_ty) {
tcx.sess.span_err(sp, fmt!("this type cannot be instantiated \
tcx.sess.span_err(sp, format!("this type cannot be instantiated \
without an instance of itself; \
consider using `Option<%s>`",
consider using `Option<{}>`",
ppaux::ty_to_str(tcx, item_ty)));
}
}
@ -3171,7 +3170,7 @@ pub fn check_enum_variants(ccx: @mut CrateCtxt,
match v.node.disr_expr {
Some(e) => {
debug!("disr expr, checking %s", pprust::expr_to_str(e, ccx.tcx.sess.intr()));
debug2!("disr expr, checking {}", pprust::expr_to_str(e, ccx.tcx.sess.intr()));
let fcx = blank_fn_ctxt(ccx, rty, e.id);
let declty = ty::mk_int_var(ccx.tcx, fcx.infcx().next_int_var_id());
@ -3187,7 +3186,7 @@ pub fn check_enum_variants(ccx: @mut CrateCtxt,
ccx.tcx.sess.span_err(e.span, "expected signed integer constant");
}
Err(ref err) => {
ccx.tcx.sess.span_err(e.span, fmt!("expected constant: %s", (*err)));
ccx.tcx.sess.span_err(e.span, format!("expected constant: {}", (*err)));
}
}
},
@ -3301,7 +3300,7 @@ pub fn instantiate_path(fcx: @mut FnCtxt,
def: ast::Def,
span: Span,
node_id: ast::NodeId) {
debug!(">>> instantiate_path");
debug2!(">>> instantiate_path");
let ty_param_count = tpt.generics.type_param_defs.len();
let mut ty_substs_len = 0;
@ -3309,7 +3308,7 @@ pub fn instantiate_path(fcx: @mut FnCtxt,
ty_substs_len += segment.types.len()
}
debug!("tpt=%s ty_param_count=%? ty_substs_len=%?",
debug2!("tpt={} ty_param_count={:?} ty_substs_len={:?}",
tpt.repr(fcx.tcx()),
ty_param_count,
ty_substs_len);
@ -3364,13 +3363,13 @@ pub fn instantiate_path(fcx: @mut FnCtxt,
} else if ty_substs_len > user_type_parameter_count {
fcx.ccx.tcx.sess.span_err
(span,
fmt!("too many type parameters provided: expected %u, found %u",
format!("too many type parameters provided: expected {}, found {}",
user_type_parameter_count, ty_substs_len));
fcx.infcx().next_ty_vars(ty_param_count)
} else if ty_substs_len < user_type_parameter_count {
fcx.ccx.tcx.sess.span_err
(span,
fmt!("not enough type parameters provided: expected %u, found %u",
format!("not enough type parameters provided: expected {}, found {}",
user_type_parameter_count, ty_substs_len));
fcx.infcx().next_ty_vars(ty_param_count)
} else {
@ -3408,7 +3407,7 @@ pub fn instantiate_path(fcx: @mut FnCtxt,
};
fcx.write_ty_substs(node_id, tpt.ty, substs);
debug!("<<<");
debug2!("<<<");
}
// Resolves `typ` by a single level if `typ` is a type variable. If no
@ -3504,7 +3503,7 @@ pub fn check_bounds_are_used(ccx: @mut CrateCtxt,
span: Span,
tps: &OptVec<ast::TyParam>,
ty: ty::t) {
debug!("check_bounds_are_used(n_tps=%u, ty=%s)",
debug2!("check_bounds_are_used(n_tps={}, ty={})",
tps.len(), ppaux::ty_to_str(ccx.tcx, ty));
// make a vector of booleans initially false, set to true when used
@ -3517,7 +3516,7 @@ pub fn check_bounds_are_used(ccx: @mut CrateCtxt,
|t| {
match ty::get(t).sty {
ty::ty_param(param_ty {idx, _}) => {
debug!("Found use of ty param #%u", idx);
debug2!("Found use of ty param \\#{}", idx);
tps_used[idx] = true;
}
_ => ()
@ -3528,7 +3527,7 @@ pub fn check_bounds_are_used(ccx: @mut CrateCtxt,
for (i, b) in tps_used.iter().enumerate() {
if !*b {
ccx.tcx.sess.span_err(
span, fmt!("type parameter `%s` is unused",
span, format!("type parameter `{}` is unused",
ccx.tcx.sess.str_of(tps.get(i).ident)));
}
}
@ -3577,7 +3576,7 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
}
op => {
tcx.sess.span_err(it.span,
fmt!("unrecognized atomic operation function: `%s`",
format!("unrecognized atomic operation function: `{}`",
op));
return;
}
@ -3860,7 +3859,7 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
ref other => {
tcx.sess.span_err(it.span,
fmt!("unrecognized intrinsic function: `%s`",
format!("unrecognized intrinsic function: `{}`",
*other));
return;
}
@ -3876,14 +3875,14 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
let i_ty = ty::lookup_item_type(ccx.tcx, local_def(it.id));
let i_n_tps = i_ty.generics.type_param_defs.len();
if i_n_tps != n_tps {
tcx.sess.span_err(it.span, fmt!("intrinsic has wrong number \
of type parameters: found %u, \
expected %u", i_n_tps, n_tps));
tcx.sess.span_err(it.span, format!("intrinsic has wrong number \
of type parameters: found {}, \
expected {}", i_n_tps, n_tps));
} else {
require_same_types(
tcx, None, false, it.span, i_ty.ty, fty,
|| fmt!("intrinsic has wrong type: \
expected `%s`",
|| format!("intrinsic has wrong type: \
expected `{}`",
ppaux::ty_to_str(ccx.tcx, fty)));
}
}

View File

@ -68,7 +68,7 @@ fn encl_region_of_def(fcx: @mut FnCtxt, def: ast::Def) -> ty::Region {
}
}
_ => {
tcx.sess.bug(fmt!("unexpected def in encl_region_of_def: %?",
tcx.sess.bug(format!("unexpected def in encl_region_of_def: {:?}",
def))
}
}
@ -211,7 +211,7 @@ fn visit_local(rcx: &mut Rcx, l: @ast::Local) {
fn constrain_bindings_in_pat(pat: @ast::Pat, rcx: &mut Rcx) {
let tcx = rcx.fcx.tcx();
debug!("regionck::visit_pat(pat=%s)", pat.repr(tcx));
debug2!("regionck::visit_pat(pat={})", pat.repr(tcx));
do pat_util::pat_bindings(tcx.def_map, pat) |_, id, span, _| {
// If we have a variable that contains region'd data, that
// data will be accessible from anywhere that the variable is
@ -244,7 +244,7 @@ fn constrain_bindings_in_pat(pat: @ast::Pat, rcx: &mut Rcx) {
}
fn visit_expr(rcx: &mut Rcx, expr: @ast::Expr) {
debug!("regionck::visit_expr(e=%s, repeating_scope=%?)",
debug2!("regionck::visit_expr(e={}, repeating_scope={:?})",
expr.repr(rcx.fcx.tcx()), rcx.repeating_scope);
let has_method_map = rcx.fcx.inh.method_map.contains_key(&expr.id);
@ -302,7 +302,7 @@ fn visit_expr(rcx: &mut Rcx, expr: @ast::Expr) {
{
let r = rcx.fcx.inh.adjustments.find(&expr.id);
for &adjustment in r.iter() {
debug!("adjustment=%?", adjustment);
debug2!("adjustment={:?}", adjustment);
match *adjustment {
@ty::AutoDerefRef(
ty::AutoDerefRef {autoderefs: autoderefs, autoref: opt_autoref}) =>
@ -515,7 +515,7 @@ fn constrain_callee(rcx: &mut Rcx,
//
// tcx.sess.span_bug(
// callee_expr.span,
// fmt!("Calling non-function: %s", callee_ty.repr(tcx)));
// format!("Calling non-function: {}", callee_ty.repr(tcx)));
}
}
}
@ -535,7 +535,7 @@ fn constrain_call(rcx: &mut Rcx,
//! appear in the arguments appropriately.
let tcx = rcx.fcx.tcx();
debug!("constrain_call(call_expr=%s, implicitly_ref_args=%?)",
debug2!("constrain_call(call_expr={}, implicitly_ref_args={:?})",
call_expr.repr(tcx), implicitly_ref_args);
let callee_ty = rcx.resolve_node_type(callee_id);
if ty::type_is_error(callee_ty) {
@ -597,7 +597,7 @@ fn constrain_derefs(rcx: &mut Rcx,
let tcx = rcx.fcx.tcx();
let r_deref_expr = ty::re_scope(deref_expr.id);
for i in range(0u, derefs) {
debug!("constrain_derefs(deref_expr=?, derefd_ty=%s, derefs=%?/%?",
debug2!("constrain_derefs(deref_expr=?, derefd_ty={}, derefs={:?}/{:?}",
rcx.fcx.infcx().ty_to_str(derefd_ty),
i, derefs);
@ -638,7 +638,7 @@ fn constrain_index(rcx: &mut Rcx,
* includes the deref expr.
*/
debug!("constrain_index(index_expr=?, indexed_ty=%s",
debug2!("constrain_index(index_expr=?, indexed_ty={}",
rcx.fcx.infcx().ty_to_str(indexed_ty));
let r_index_expr = ty::re_scope(index_expr.id);
@ -662,13 +662,13 @@ fn constrain_free_variables(rcx: &mut Rcx,
*/
let tcx = rcx.fcx.ccx.tcx;
debug!("constrain_free_variables(%s, %s)",
debug2!("constrain_free_variables({}, {})",
region.repr(tcx), expr.repr(tcx));
for freevar in get_freevars(tcx, expr.id).iter() {
debug!("freevar def is %?", freevar.def);
debug2!("freevar def is {:?}", freevar.def);
let def = freevar.def;
let en_region = encl_region_of_def(rcx.fcx, def);
debug!("en_region = %s", en_region.repr(tcx));
debug2!("en_region = {}", en_region.repr(tcx));
rcx.fcx.mk_subr(true, infer::FreeVariable(freevar.span),
region, en_region);
}
@ -692,8 +692,8 @@ fn constrain_regions_in_type_of_node(
let ty0 = rcx.resolve_node_type(id);
let adjustment = rcx.fcx.inh.adjustments.find_copy(&id);
let ty = ty::adjust_ty(tcx, origin.span(), ty0, adjustment);
debug!("constrain_regions_in_type_of_node(\
ty=%s, ty0=%s, id=%d, minimum_lifetime=%?, adjustment=%?)",
debug2!("constrain_regions_in_type_of_node(\
ty={}, ty0={}, id={}, minimum_lifetime={:?}, adjustment={:?})",
ty_to_str(tcx, ty), ty_to_str(tcx, ty0),
id, minimum_lifetime, adjustment);
constrain_regions_in_type(rcx, minimum_lifetime, origin, ty)
@ -722,12 +722,12 @@ fn constrain_regions_in_type(
let e = rcx.errors_reported;
let tcx = rcx.fcx.ccx.tcx;
debug!("constrain_regions_in_type(minimum_lifetime=%s, ty=%s)",
debug2!("constrain_regions_in_type(minimum_lifetime={}, ty={})",
region_to_str(tcx, "", false, minimum_lifetime),
ty_to_str(tcx, ty));
do relate_nested_regions(tcx, Some(minimum_lifetime), ty) |r_sub, r_sup| {
debug!("relate(r_sub=%s, r_sup=%s)",
debug2!("relate(r_sub={}, r_sup={})",
region_to_str(tcx, "", false, r_sub),
region_to_str(tcx, "", false, r_sup));
@ -813,7 +813,7 @@ pub mod guarantor {
* to the lifetime of its guarantor (if any).
*/
debug!("guarantor::for_addr_of(base=?)");
debug2!("guarantor::for_addr_of(base=?)");
let guarantor = guarantor(rcx, base);
link(rcx, expr.span, expr.id, guarantor);
@ -826,9 +826,9 @@ pub mod guarantor {
* linked to the lifetime of its guarantor (if any).
*/
debug!("regionck::for_match()");
debug2!("regionck::for_match()");
let discr_guarantor = guarantor(rcx, discr);
debug!("discr_guarantor=%s", discr_guarantor.repr(rcx.tcx()));
debug2!("discr_guarantor={}", discr_guarantor.repr(rcx.tcx()));
for arm in arms.iter() {
for pat in arm.pats.iter() {
link_ref_bindings_in_pat(rcx, *pat, discr_guarantor);
@ -847,10 +847,10 @@ pub mod guarantor {
* region pointers.
*/
debug!("guarantor::for_autoref(autoref=%?)", autoref);
debug2!("guarantor::for_autoref(autoref={:?})", autoref);
let mut expr_ct = categorize_unadjusted(rcx, expr);
debug!(" unadjusted cat=%?", expr_ct.cat);
debug2!(" unadjusted cat={:?}", expr_ct.cat);
expr_ct = apply_autoderefs(
rcx, expr, autoderefs, expr_ct);
@ -898,10 +898,10 @@ pub mod guarantor {
*/
let tcx = rcx.tcx();
debug!("guarantor::for_by_ref(expr=%s, callee_scope=%?)",
debug2!("guarantor::for_by_ref(expr={}, callee_scope={:?})",
expr.repr(tcx), callee_scope);
let expr_cat = categorize(rcx, expr);
debug!("guarantor::for_by_ref(expr=%?, callee_scope=%?) category=%?",
debug2!("guarantor::for_by_ref(expr={:?}, callee_scope={:?}) category={:?}",
expr.id, callee_scope, expr_cat);
let minimum_lifetime = ty::re_scope(callee_scope);
for guarantor in expr_cat.guarantor.iter() {
@ -921,7 +921,7 @@ pub mod guarantor {
* to the lifetime of its guarantor (if any).
*/
debug!("link(id=%?, guarantor=%?)", id, guarantor);
debug2!("link(id={:?}, guarantor={:?})", id, guarantor);
let bound = match guarantor {
None => {
@ -939,7 +939,7 @@ pub mod guarantor {
let rptr_ty = rcx.resolve_node_type(id);
if !ty::type_is_bot(rptr_ty) {
let tcx = rcx.fcx.ccx.tcx;
debug!("rptr_ty=%s", ty_to_str(tcx, rptr_ty));
debug2!("rptr_ty={}", ty_to_str(tcx, rptr_ty));
let r = ty::ty_region(tcx, span, rptr_ty);
rcx.fcx.mk_subr(true, infer::Reborrow(span), r, bound);
}
@ -977,7 +977,7 @@ pub mod guarantor {
* `&expr`).
*/
debug!("guarantor()");
debug2!("guarantor()");
match expr.node {
ast::ExprUnary(_, ast::UnDeref, b) => {
let cat = categorize(rcx, b);
@ -1035,15 +1035,15 @@ pub mod guarantor {
rcx.fcx.tcx(), rcx.fcx.inh.method_map, expr));
None
}
ast::ExprForLoop(*) => fail!("non-desugared expr_for_loop"),
ast::ExprForLoop(*) => fail2!("non-desugared expr_for_loop"),
}
}
fn categorize(rcx: &mut Rcx, expr: @ast::Expr) -> ExprCategorization {
debug!("categorize()");
debug2!("categorize()");
let mut expr_ct = categorize_unadjusted(rcx, expr);
debug!("before adjustments, cat=%?", expr_ct.cat);
debug2!("before adjustments, cat={:?}", expr_ct.cat);
match rcx.fcx.inh.adjustments.find(&expr.id) {
Some(&@ty::AutoAddEnv(*)) => {
@ -1056,7 +1056,7 @@ pub mod guarantor {
}
Some(&@ty::AutoDerefRef(ref adjustment)) => {
debug!("adjustment=%?", adjustment);
debug2!("adjustment={:?}", adjustment);
expr_ct = apply_autoderefs(
rcx, expr, adjustment.autoderefs, expr_ct);
@ -1067,7 +1067,7 @@ pub mod guarantor {
Some(ty::AutoUnsafe(_)) => {
expr_ct.cat.guarantor = None;
expr_ct.cat.pointer = OtherPointer;
debug!("autoref, cat=%?", expr_ct.cat);
debug2!("autoref, cat={:?}", expr_ct.cat);
}
Some(ty::AutoPtr(r, _)) |
Some(ty::AutoBorrowVec(r, _)) |
@ -1078,7 +1078,7 @@ pub mod guarantor {
// expression will be some sort of borrowed pointer.
expr_ct.cat.guarantor = None;
expr_ct.cat.pointer = BorrowedPointer(r);
debug!("autoref, cat=%?", expr_ct.cat);
debug2!("autoref, cat={:?}", expr_ct.cat);
}
}
}
@ -1086,14 +1086,14 @@ pub mod guarantor {
None => {}
}
debug!("result=%?", expr_ct.cat);
debug2!("result={:?}", expr_ct.cat);
return expr_ct.cat;
}
fn categorize_unadjusted(rcx: &mut Rcx,
expr: @ast::Expr)
-> ExprCategorizationType {
debug!("categorize_unadjusted()");
debug2!("categorize_unadjusted()");
let guarantor = {
if rcx.fcx.inh.method_map.contains_key(&expr.id) {
@ -1138,12 +1138,12 @@ pub mod guarantor {
None => {
tcx.sess.span_bug(
expr.span,
fmt!("Autoderef but type not derefable: %s",
format!("Autoderef but type not derefable: {}",
ty_to_str(tcx, ct.ty)));
}
}
debug!("autoderef, cat=%?", ct.cat);
debug2!("autoderef, cat={:?}", ct.cat);
}
return ct;
}
@ -1205,7 +1205,7 @@ pub mod guarantor {
* other pointers.
*/
debug!("link_ref_bindings_in_pat(pat=%s, guarantor=%?)",
debug2!("link_ref_bindings_in_pat(pat={}, guarantor={:?})",
rcx.fcx.pat_to_str(pat), guarantor);
match pat.node {

View File

@ -38,15 +38,15 @@ pub fn replace_bound_regions_in_fn_sig(
for &t in opt_self_ty.iter() { all_tys.push(t) }
debug!("replace_bound_regions_in_fn_sig(self_ty=%?, fn_sig=%s, \
all_tys=%?)",
debug2!("replace_bound_regions_in_fn_sig(self_ty={:?}, fn_sig={}, \
all_tys={:?})",
opt_self_ty.map(|t| ppaux::ty_to_str(tcx, *t)),
ppaux::fn_sig_to_str(tcx, fn_sig),
all_tys.map(|t| ppaux::ty_to_str(tcx, *t)));
let _i = indenter();
let isr = do create_bound_region_mapping(tcx, isr, all_tys) |br| {
debug!("br=%?", br);
debug2!("br={:?}", br);
mapf(br)
};
let new_fn_sig = ty::fold_sig(fn_sig, |t| {
@ -54,9 +54,9 @@ pub fn replace_bound_regions_in_fn_sig(
});
let new_self_ty = opt_self_ty.map(|t| replace_bound_regions(tcx, isr, *t));
debug!("result of replace_bound_regions_in_fn_sig: \
new_self_ty=%?, \
fn_sig=%s",
debug2!("result of replace_bound_regions_in_fn_sig: \
new_self_ty={:?}, \
fn_sig={}",
new_self_ty.map(|t| ppaux::ty_to_str(tcx, *t)),
ppaux::fn_sig_to_str(tcx, &new_fn_sig));
@ -146,8 +146,8 @@ pub fn replace_bound_regions_in_fn_sig(
None if in_fn => r,
None => {
tcx.sess.bug(
fmt!("Bound region not found in \
in_scope_regions list: %s",
format!("Bound region not found in \
in_scope_regions list: {}",
region_to_str(tcx, "", false, r)));
}
}
@ -255,7 +255,7 @@ pub fn relate_free_regions(
* Tests: `src/test/compile-fail/regions-free-region-ordering-*.rs`
*/
debug!("relate_free_regions >>");
debug2!("relate_free_regions >>");
let mut all_tys = ~[];
for arg in fn_sig.inputs.iter() {
@ -266,7 +266,7 @@ pub fn relate_free_regions(
}
for &t in all_tys.iter() {
debug!("relate_free_regions(t=%s)", ppaux::ty_to_str(tcx, t));
debug2!("relate_free_regions(t={})", ppaux::ty_to_str(tcx, t));
relate_nested_regions(tcx, None, t, |a, b| {
match (&a, &b) {
(&ty::re_free(free_a), &ty::re_free(free_b)) => {
@ -277,5 +277,5 @@ pub fn relate_free_regions(
})
}
debug!("<< relate_free_regions");
debug2!("<< relate_free_regions");
}

View File

@ -87,9 +87,9 @@ fn lookup_vtables(vcx: &VtableContext,
type_param_defs: &[ty::TypeParameterDef],
substs: &ty::substs,
is_early: bool) -> vtable_res {
debug!("lookup_vtables(location_info=%?, \
type_param_defs=%s, \
substs=%s",
debug2!("lookup_vtables(location_info={:?}, \
type_param_defs={}, \
substs={}",
location_info,
type_param_defs.repr(vcx.tcx()),
substs.repr(vcx.tcx()));
@ -108,11 +108,11 @@ fn lookup_vtables(vcx: &VtableContext,
result.reverse();
assert_eq!(substs.tps.len(), result.len());
debug!("lookup_vtables result(\
location_info=%?, \
type_param_defs=%s, \
substs=%s, \
result=%s)",
debug2!("lookup_vtables result(\
location_info={:?}, \
type_param_defs={}, \
substs={}, \
result={})",
location_info,
type_param_defs.repr(vcx.tcx()),
substs.repr(vcx.tcx()),
@ -142,20 +142,20 @@ fn lookup_vtables_for_param(vcx: &VtableContext,
// Substitute the values of the type parameters that may
// appear in the bound.
let trait_ref = substs.map_default(trait_ref, |substs| {
debug!("about to subst: %s, %s",
debug2!("about to subst: {}, {}",
trait_ref.repr(tcx), substs.repr(tcx));
trait_ref.subst(tcx, *substs)
});
debug!("after subst: %s", trait_ref.repr(tcx));
debug2!("after subst: {}", trait_ref.repr(tcx));
match lookup_vtable(vcx, location_info, ty, trait_ref, is_early) {
Some(vtable) => param_result.push(vtable),
None => {
vcx.tcx().sess.span_fatal(
location_info.span,
fmt!("failed to find an implementation of \
trait %s for %s",
format!("failed to find an implementation of \
trait {} for {}",
vcx.infcx.trait_ref_to_str(trait_ref),
vcx.infcx.ty_to_str(ty)));
}
@ -163,11 +163,11 @@ fn lookup_vtables_for_param(vcx: &VtableContext,
true
};
debug!("lookup_vtables_for_param result(\
location_info=%?, \
type_param_bounds=%s, \
ty=%s, \
result=%s)",
debug2!("lookup_vtables_for_param result(\
location_info={:?}, \
type_param_bounds={}, \
ty={}, \
result={})",
location_info,
type_param_bounds.repr(vcx.tcx()),
ty.repr(vcx.tcx()),
@ -211,7 +211,7 @@ fn relate_trait_refs(vcx: &VtableContext,
let tcx = vcx.tcx();
tcx.sess.span_err(
location_info.span,
fmt!("expected %s, but found %s (%s)",
format!("expected {}, but found {} ({})",
ppaux::trait_ref_to_str(tcx, &r_exp_trait_ref),
ppaux::trait_ref_to_str(tcx, &r_act_trait_ref),
ty::type_err_to_str(tcx, err)));
@ -228,7 +228,7 @@ fn lookup_vtable(vcx: &VtableContext,
is_early: bool)
-> Option<vtable_origin>
{
debug!("lookup_vtable(ty=%s, trait_ref=%s)",
debug2!("lookup_vtable(ty={}, trait_ref={})",
vcx.infcx.ty_to_str(ty),
vcx.infcx.trait_ref_to_str(trait_ref));
let _i = indenter();
@ -291,7 +291,7 @@ fn lookup_vtable_from_bounds(vcx: &VtableContext,
let mut n_bound = 0;
let mut ret = None;
do ty::each_bound_trait_and_supertraits(tcx, bounds) |bound_trait_ref| {
debug!("checking bounds trait %s",
debug2!("checking bounds trait {}",
bound_trait_ref.repr(vcx.tcx()));
if bound_trait_ref.def_id == trait_ref.def_id {
@ -300,7 +300,7 @@ fn lookup_vtable_from_bounds(vcx: &VtableContext,
bound_trait_ref,
trait_ref);
let vtable = vtable_param(param, n_bound);
debug!("found param vtable: %?",
debug2!("found param vtable: {:?}",
vtable);
ret = Some(vtable);
false
@ -383,7 +383,7 @@ fn search_for_vtable(vcx: &VtableContext,
// Now, in the previous example, for_ty is bound to
// the type self_ty, and substs is bound to [T].
debug!("The self ty is %s and its substs are %s",
debug2!("The self ty is {} and its substs are {}",
vcx.infcx.ty_to_str(for_ty),
vcx.infcx.tys_to_str(substs.tps));
@ -397,8 +397,8 @@ fn search_for_vtable(vcx: &VtableContext,
// some value of U) with some_trait<T>. This would fail if T
// and U weren't compatible.
debug!("(checking vtable) @2 relating trait \
ty %s to of_trait_ref %s",
debug2!("(checking vtable) @2 relating trait \
ty {} to of_trait_ref {}",
vcx.infcx.trait_ref_to_str(trait_ref),
vcx.infcx.trait_ref_to_str(of_trait_ref));
@ -435,9 +435,9 @@ fn search_for_vtable(vcx: &VtableContext,
}
};
debug!("The fixed-up substs are %s - \
debug2!("The fixed-up substs are {} - \
they will be unified with the bounds for \
the target ty, %s",
the target ty, {}",
vcx.infcx.tys_to_str(substs_f.tps),
vcx.infcx.trait_ref_to_str(trait_ref));
@ -487,7 +487,7 @@ fn fixup_substs(vcx: &VtableContext,
do fixup_ty(vcx, location_info, t, is_early).map |t_f| {
match ty::get(*t_f).sty {
ty::ty_trait(_, ref substs_f, _, _, _) => (*substs_f).clone(),
_ => fail!("t_f should be a trait")
_ => fail2!("t_f should be a trait")
}
}
}
@ -502,8 +502,8 @@ fn fixup_ty(vcx: &VtableContext,
Err(e) if !is_early => {
tcx.sess.span_fatal(
location_info.span,
fmt!("cannot determine a type \
for this bounded type parameter: %s",
format!("cannot determine a type \
for this bounded type parameter: {}",
fixup_err_to_str(e)))
}
Err(_) => {
@ -533,7 +533,7 @@ fn connect_trait_tps(vcx: &VtableContext,
fn insert_vtables(fcx: @mut FnCtxt,
callee_id: ast::NodeId,
vtables: vtable_res) {
debug!("insert_vtables(callee_id=%d, vtables=%?)",
debug2!("insert_vtables(callee_id={}, vtables={:?})",
callee_id, vtables.repr(fcx.tcx()));
fcx.inh.vtable_map.insert(callee_id, vtables);
}
@ -554,7 +554,7 @@ pub fn location_info_for_item(item: @ast::item) -> LocationInfo {
pub fn early_resolve_expr(ex: @ast::Expr,
fcx: @mut FnCtxt,
is_early: bool) {
debug!("vtable: early_resolve_expr() ex with id %? (early: %b): %s",
debug2!("vtable: early_resolve_expr() ex with id {:?} (early: {}): {}",
ex.id, is_early, expr_to_str(ex, fcx.tcx().sess.intr()));
let _indent = indenter();
@ -562,15 +562,15 @@ pub fn early_resolve_expr(ex: @ast::Expr,
match ex.node {
ast::ExprPath(*) => {
do fcx.opt_node_ty_substs(ex.id) |substs| {
debug!("vtable resolution on parameter bounds for expr %s",
debug2!("vtable resolution on parameter bounds for expr {}",
ex.repr(fcx.tcx()));
let def = cx.tcx.def_map.get_copy(&ex.id);
let did = ast_util::def_id_of_def(def);
let item_ty = ty::lookup_item_type(cx.tcx, did);
debug!("early resolve expr: def %? %?, %?, %s", ex.id, did, def,
debug2!("early resolve expr: def {:?} {:?}, {:?}, {}", ex.id, did, def,
fcx.infcx().ty_to_str(item_ty.ty));
if has_trait_bounds(*item_ty.generics.type_param_defs) {
debug!("early_resolve_expr: looking up vtables for type params %s",
debug2!("early_resolve_expr: looking up vtables for type params {}",
item_ty.generics.type_param_defs.repr(fcx.tcx()));
let vcx = VtableContext { ccx: fcx.ccx, infcx: fcx.infcx() };
let vtbls = lookup_vtables(&vcx, &location_info_for_expr(ex),
@ -596,7 +596,7 @@ pub fn early_resolve_expr(ex: @ast::Expr,
ast::ExprMethodCall(callee_id, _, _, _, _, _) => {
match ty::method_call_type_param_defs(cx.tcx, fcx.inh.method_map, ex.id) {
Some(type_param_defs) => {
debug!("vtable resolution on parameter bounds for method call %s",
debug2!("vtable resolution on parameter bounds for method call {}",
ex.repr(fcx.tcx()));
if has_trait_bounds(*type_param_defs) {
let substs = fcx.node_ty_substs(callee_id);
@ -612,7 +612,7 @@ pub fn early_resolve_expr(ex: @ast::Expr,
}
}
ast::ExprCast(src, _) => {
debug!("vtable resolution on expr %s", ex.repr(fcx.tcx()));
debug2!("vtable resolution on expr {}", ex.repr(fcx.tcx()));
let target_ty = fcx.expr_ty(ex);
match ty::get(target_ty).sty {
// Bounds of type's contents are not checked here, but in kind.rs.
@ -635,7 +635,7 @@ pub fn early_resolve_expr(ex: @ast::Expr,
(&ty::ty_rptr(_, mt), ty::RegionTraitStore(*))
if !mutability_allowed(mt.mutbl, target_mutbl) => {
fcx.tcx().sess.span_err(ex.span,
fmt!("types differ in mutability"));
format!("types differ in mutability"));
}
(&ty::ty_box(mt), ty::BoxTraitStore) |
@ -691,24 +691,24 @@ pub fn early_resolve_expr(ex: @ast::Expr,
(_, ty::UniqTraitStore) => {
fcx.ccx.tcx.sess.span_err(
ex.span,
fmt!("can only cast an ~-pointer \
to a ~-object, not a %s",
format!("can only cast an ~-pointer \
to a ~-object, not a {}",
ty::ty_sort_str(fcx.tcx(), ty)));
}
(_, ty::BoxTraitStore) => {
fcx.ccx.tcx.sess.span_err(
ex.span,
fmt!("can only cast an @-pointer \
to an @-object, not a %s",
format!("can only cast an @-pointer \
to an @-object, not a {}",
ty::ty_sort_str(fcx.tcx(), ty)));
}
(_, ty::RegionTraitStore(_)) => {
fcx.ccx.tcx.sess.span_err(
ex.span,
fmt!("can only cast an &-pointer \
to an &-object, not a %s",
format!("can only cast an &-pointer \
to an &-object, not a {}",
ty::ty_sort_str(fcx.tcx(), ty)));
}
}
@ -753,7 +753,7 @@ pub fn resolve_impl(ccx: @mut CrateCtxt, impl_item: @ast::item) {
trait_bounds: ~[trait_ref]
};
let t = ty::node_id_to_type(ccx.tcx, impl_item.id);
debug!("=== Doing a self lookup now.");
debug2!("=== Doing a self lookup now.");
// Right now, we don't have any place to store this.
// We will need to make one so we can use this information
// for compiling default methods that refer to supertraits.

View File

@ -41,8 +41,8 @@ fn resolve_type_vars_in_type(fcx: @mut FnCtxt, sp: Span, typ: ty::t)
if !fcx.ccx.tcx.sess.has_errors() {
fcx.ccx.tcx.sess.span_err(
sp,
fmt!("cannot determine a type \
for this expression: %s",
format!("cannot determine a type \
for this expression: {}",
infer::fixup_err_to_str(e)))
}
return None;
@ -70,8 +70,8 @@ fn resolve_method_map_entry(fcx: @mut FnCtxt, sp: Span, id: ast::NodeId) {
for t in r.iter() {
let method_map = fcx.ccx.method_map;
let new_entry = method_map_entry { self_ty: *t, ..*mme };
debug!("writeback::resolve_method_map_entry(id=%?, \
new_entry=%?)",
debug2!("writeback::resolve_method_map_entry(id={:?}, \
new_entry={:?})",
id, new_entry);
method_map.insert(id, new_entry);
}
@ -88,7 +88,7 @@ fn resolve_vtable_map_entry(fcx: @mut FnCtxt, sp: Span, id: ast::NodeId) {
let r_origins = resolve_origins(fcx, sp, *origins);
let vtable_map = fcx.ccx.vtable_map;
vtable_map.insert(id, r_origins);
debug!("writeback::resolve_vtable_map_entry(id=%d, vtables=%?)",
debug2!("writeback::resolve_vtable_map_entry(id={}, vtables={:?})",
id, r_origins.repr(fcx.tcx()));
}
}
@ -128,12 +128,12 @@ fn resolve_type_vars_for_node(wbcx: &mut WbCtxt, sp: Span, id: ast::NodeId)
Err(e) => {
// This should not, I think, happen:
fcx.ccx.tcx.sess.span_err(
sp, fmt!("cannot resolve bound for closure: %s",
sp, format!("cannot resolve bound for closure: {}",
infer::fixup_err_to_str(e)));
}
Ok(r1) => {
let resolved_adj = @ty::AutoAddEnv(r1, s);
debug!("Adjustments for node %d: %?", id, resolved_adj);
debug2!("Adjustments for node {}: {:?}", id, resolved_adj);
fcx.tcx().adjustments.insert(id, resolved_adj);
}
}
@ -146,7 +146,7 @@ fn resolve_type_vars_for_node(wbcx: &mut WbCtxt, sp: Span, id: ast::NodeId)
Err(e) => {
// This should not, I think, happen.
fcx.ccx.tcx.sess.span_err(
sp, fmt!("cannot resolve scope of borrow: %s",
sp, format!("cannot resolve scope of borrow: {}",
infer::fixup_err_to_str(e)));
r
}
@ -162,7 +162,7 @@ fn resolve_type_vars_for_node(wbcx: &mut WbCtxt, sp: Span, id: ast::NodeId)
autoderefs: adj.autoderefs,
autoref: resolved_autoref,
});
debug!("Adjustments for node %d: %?", id, resolved_adj);
debug2!("Adjustments for node {}: {:?}", id, resolved_adj);
fcx.tcx().adjustments.insert(id, resolved_adj);
}
}
@ -176,7 +176,7 @@ fn resolve_type_vars_for_node(wbcx: &mut WbCtxt, sp: Span, id: ast::NodeId)
}
Some(t) => {
debug!("resolve_type_vars_for_node(id=%d, n_ty=%s, t=%s)",
debug2!("resolve_type_vars_for_node(id={}, n_ty={}, t={})",
id, ppaux::ty_to_str(tcx, n_ty), ppaux::ty_to_str(tcx, t));
write_ty_to_tcx(tcx, id, t);
let mut ret = Some(t);
@ -284,7 +284,7 @@ fn visit_pat(p: @ast::Pat, wbcx: &mut WbCtxt) {
}
resolve_type_vars_for_node(wbcx, p.span, p.id);
debug!("Type for pattern binding %s (id %d) resolved to %s",
debug2!("Type for pattern binding {} (id {}) resolved to {}",
pat_to_str(p, wbcx.fcx.ccx.tcx.sess.intr()), p.id,
wbcx.fcx.infcx().ty_to_str(
ty::node_id_to_type(wbcx.fcx.ccx.tcx,
@ -297,7 +297,7 @@ fn visit_local(l: @ast::Local, wbcx: &mut WbCtxt) {
let var_ty = wbcx.fcx.local_ty(l.span, l.id);
match resolve_type(wbcx.fcx.infcx(), var_ty, resolve_all | force_all) {
Ok(lty) => {
debug!("Type for local %s (id %d) resolved to %s",
debug2!("Type for local {} (id {}) resolved to {}",
pat_to_str(l.pat, wbcx.fcx.tcx().sess.intr()),
l.id,
wbcx.fcx.infcx().ty_to_str(lty));
@ -306,8 +306,8 @@ fn visit_local(l: @ast::Local, wbcx: &mut WbCtxt) {
Err(e) => {
wbcx.fcx.ccx.tcx.sess.span_err(
l.span,
fmt!("cannot determine a type \
for this local variable: %s",
format!("cannot determine a type \
for this local variable: {}",
infer::fixup_err_to_str(e)));
wbcx.success = false;
}

View File

@ -76,7 +76,7 @@ pub fn get_base_type(inference_context: @mut InferCtxt,
match get(resolved_type).sty {
ty_enum(*) | ty_trait(*) | ty_struct(*) => {
debug!("(getting base type) found base type");
debug2!("(getting base type) found base type");
Some(resolved_type)
}
@ -85,7 +85,7 @@ pub fn get_base_type(inference_context: @mut InferCtxt,
ty_infer(*) | ty_param(*) | ty_self(*) | ty_type | ty_opaque_box |
ty_opaque_closure_ptr(*) | ty_unboxed_vec(*) | ty_err | ty_box(_) |
ty_uniq(_) | ty_ptr(_) | ty_rptr(_, _) => {
debug!("(getting base type) no base type; found %?",
debug2!("(getting base type) no base type; found {:?}",
get(original_type).sty);
None
}
@ -135,7 +135,7 @@ pub fn get_base_type_def_id(inference_context: @mut InferCtxt,
return Some(def_id);
}
_ => {
fail!("get_base_type() returned a type that wasn't an \
fail2!("get_base_type() returned a type that wasn't an \
enum, struct, or trait");
}
}
@ -160,7 +160,7 @@ struct CoherenceCheckVisitor { cc: CoherenceChecker }
impl visit::Visitor<()> for CoherenceCheckVisitor {
fn visit_item(&mut self, item:@item, _:()) {
// debug!("(checking coherence) item '%s'",
// debug2!("(checking coherence) item '{}'",
// self.cc.crate_context.tcx.sess.str_of(item.ident));
match item.node {
@ -266,8 +266,8 @@ impl CoherenceChecker {
// base type.
if associated_traits.len() == 0 {
debug!("(checking implementation) no associated traits for item \
'%s'",
debug2!("(checking implementation) no associated traits for item \
'{}'",
self.crate_context.tcx.sess.str_of(item.ident));
match get_base_type_def_id(self.inference_context,
@ -290,7 +290,7 @@ impl CoherenceChecker {
for associated_trait in associated_traits.iter() {
let trait_ref = ty::node_id_to_trait_ref(
self.crate_context.tcx, associated_trait.ref_id);
debug!("(checking implementation) adding impl for trait '%s', item '%s'",
debug2!("(checking implementation) adding impl for trait '{}', item '{}'",
trait_ref.repr(self.crate_context.tcx),
self.crate_context.tcx.sess.str_of(item.ident));
@ -325,7 +325,7 @@ impl CoherenceChecker {
trait_ref: &ty::TraitRef,
all_methods: &mut ~[@Method]) {
let tcx = self.crate_context.tcx;
debug!("instantiate_default_methods(impl_id=%?, trait_ref=%s)",
debug2!("instantiate_default_methods(impl_id={:?}, trait_ref={})",
impl_id, trait_ref.repr(tcx));
let impl_poly_type = ty::lookup_item_type(tcx, impl_id);
@ -336,7 +336,7 @@ impl CoherenceChecker {
let new_id = tcx.sess.next_node_id();
let new_did = local_def(new_id);
debug!("new_did=%? trait_method=%s", new_did, trait_method.repr(tcx));
debug2!("new_did={:?} trait_method={}", new_did, trait_method.repr(tcx));
// Create substitutions for the various trait parameters.
let new_method_ty =
@ -348,7 +348,7 @@ impl CoherenceChecker {
*trait_method,
Some(trait_method.def_id));
debug!("new_method_ty=%s", new_method_ty.repr(tcx));
debug2!("new_method_ty={}", new_method_ty.repr(tcx));
all_methods.push(new_method_ty);
// construct the polytype for the method based on the method_ty
@ -364,7 +364,7 @@ impl CoherenceChecker {
generics: new_generics,
ty: ty::mk_bare_fn(tcx, new_method_ty.fty.clone())
};
debug!("new_polytype=%s", new_polytype.repr(tcx));
debug2!("new_polytype={}", new_polytype.repr(tcx));
tcx.tcache.insert(new_did, new_polytype);
tcx.methods.insert(new_did, new_method_ty);
@ -440,7 +440,7 @@ impl CoherenceChecker {
let session = self.crate_context.tcx.sess;
session.span_err(
self.span_of_impl(implementation_b),
fmt!("conflicting implementations for trait `%s`",
format!("conflicting implementations for trait `{}`",
ty::item_path_str(self.crate_context.tcx,
trait_def_id)));
session.span_note(self.span_of_impl(implementation_a),
@ -557,11 +557,11 @@ impl CoherenceChecker {
let r = ty::trait_methods(tcx, trait_did);
for method in r.iter() {
debug!("checking for %s", method.ident.repr(tcx));
debug2!("checking for {}", method.ident.repr(tcx));
if provided_names.contains(&method.ident.name) { loop; }
tcx.sess.span_err(trait_ref_span,
fmt!("missing method `%s`",
format!("missing method `{}`",
tcx.sess.str_of(method.ident)));
}
}

View File

@ -129,8 +129,8 @@ impl AstConv for CrateCtxt {
ty_of_foreign_item(self, foreign_item, abis)
}
ref x => {
self.tcx.sess.bug(fmt!("unexpected sort of item \
in get_item_ty(): %?", (*x)));
self.tcx.sess.bug(format!("unexpected sort of item \
in get_item_ty(): {:?}", (*x)));
}
}
}
@ -347,7 +347,7 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt,
let substd_type_param_defs = m.generics.type_param_defs.subst(tcx, &substs);
new_type_param_defs.push_all(*substd_type_param_defs);
debug!("static method %s type_param_defs=%s ty=%s, substs=%s",
debug2!("static method {} type_param_defs={} ty={}, substs={}",
m.def_id.repr(tcx),
new_type_param_defs.repr(tcx),
ty.repr(tcx),
@ -453,7 +453,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
trait_m: &ty::Method,
trait_substs: &ty::substs,
self_ty: ty::t) {
debug!("compare_impl_method()");
debug2!("compare_impl_method()");
let infcx = infer::new_infer_ctxt(tcx);
let impl_m = &cm.mty;
@ -470,7 +470,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
(&ast::sty_static, _) => {
tcx.sess.span_err(
cm.span,
fmt!("method `%s` has a `%s` declaration in the impl, \
format!("method `{}` has a `{}` declaration in the impl, \
but not in the trait",
tcx.sess.str_of(trait_m.ident),
explicit_self_to_str(&impl_m.explicit_self, tcx.sess.intr())));
@ -479,7 +479,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
(_, &ast::sty_static) => {
tcx.sess.span_err(
cm.span,
fmt!("method `%s` has a `%s` declaration in the trait, \
format!("method `{}` has a `{}` declaration in the trait, \
but not in the impl",
tcx.sess.str_of(trait_m.ident),
explicit_self_to_str(&trait_m.explicit_self, tcx.sess.intr())));
@ -495,8 +495,8 @@ pub fn compare_impl_method(tcx: ty::ctxt,
if num_impl_m_type_params != num_trait_m_type_params {
tcx.sess.span_err(
cm.span,
fmt!("method `%s` has %u type %s, but its trait \
declaration has %u type %s",
format!("method `{}` has {} type {}, but its trait \
declaration has {} type {}",
tcx.sess.str_of(trait_m.ident),
num_impl_m_type_params,
pluralize(num_impl_m_type_params, ~"parameter"),
@ -508,8 +508,8 @@ pub fn compare_impl_method(tcx: ty::ctxt,
if impl_m.fty.sig.inputs.len() != trait_m.fty.sig.inputs.len() {
tcx.sess.span_err(
cm.span,
fmt!("method `%s` has %u parameter%s \
but the trait has %u",
format!("method `{}` has {} parameter{} \
but the trait has {}",
tcx.sess.str_of(trait_m.ident),
impl_m.fty.sig.inputs.len(),
if impl_m.fty.sig.inputs.len() == 1 { "" } else { "s" },
@ -529,8 +529,8 @@ pub fn compare_impl_method(tcx: ty::ctxt,
if !extra_bounds.is_empty() {
tcx.sess.span_err(
cm.span,
fmt!("in method `%s`, \
type parameter %u requires `%s`, \
format!("in method `{}`, \
type parameter {} requires `{}`, \
which is not required by \
the corresponding type parameter \
in the trait declaration",
@ -548,10 +548,10 @@ pub fn compare_impl_method(tcx: ty::ctxt,
{
tcx.sess.span_err(
cm.span,
fmt!("in method `%s`, \
type parameter %u has %u trait %s, but the \
format!("in method `{}`, \
type parameter {} has {} trait {}, but the \
corresponding type parameter in \
the trait declaration has %u trait %s",
the trait declaration has {} trait {}",
tcx.sess.str_of(trait_m.ident),
i, impl_param_def.bounds.trait_bounds.len(),
pluralize(impl_param_def.bounds.trait_bounds.len(),
@ -632,10 +632,10 @@ pub fn compare_impl_method(tcx: ty::ctxt,
// that correspond to the parameters we will find on the impl
// - replace self region with a fresh, dummy region
let impl_fty = {
debug!("impl_fty (pre-subst): %s", ppaux::ty_to_str(tcx, impl_fty));
debug2!("impl_fty (pre-subst): {}", ppaux::ty_to_str(tcx, impl_fty));
replace_bound_self(tcx, impl_fty, dummy_self_r)
};
debug!("impl_fty (post-subst): %s", ppaux::ty_to_str(tcx, impl_fty));
debug2!("impl_fty (post-subst): {}", ppaux::ty_to_str(tcx, impl_fty));
let trait_fty = {
let num_trait_m_type_params = trait_m.generics.type_param_defs.len();
let dummy_tps = do vec::from_fn(num_trait_m_type_params) |i| {
@ -649,11 +649,11 @@ pub fn compare_impl_method(tcx: ty::ctxt,
self_ty: Some(self_ty),
tps: vec::append(trait_tps, dummy_tps)
};
debug!("trait_fty (pre-subst): %s substs=%s",
debug2!("trait_fty (pre-subst): {} substs={}",
trait_fty.repr(tcx), substs.repr(tcx));
ty::subst(tcx, &substs, trait_fty)
};
debug!("trait_fty (post-subst): %s", trait_fty.repr(tcx));
debug2!("trait_fty (post-subst): {}", trait_fty.repr(tcx));
match infer::mk_subty(infcx, false, infer::MethodCompatCheck(cm.span),
impl_fty, trait_fty) {
@ -661,7 +661,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
result::Err(ref terr) => {
tcx.sess.span_err(
cm.span,
fmt!("method `%s` has an incompatible type: %s",
format!("method `{}` has an incompatible type: {}",
tcx.sess.str_of(trait_m.ident),
ty::type_err_to_str(tcx, terr)));
ty::note_and_explain_type_err(tcx, terr);
@ -709,7 +709,7 @@ pub fn check_methods_against_trait(ccx: &CrateCtxt,
// This method is not part of the trait
tcx.sess.span_err(
impl_m.span,
fmt!("method `%s` is not a member of trait `%s`",
format!("method `{}` is not a member of trait `{}`",
tcx.sess.str_of(impl_m.mty.ident),
path_to_str(&a_trait_ty.path, tcx.sess.intr())));
}
@ -835,7 +835,7 @@ pub fn ensure_no_ty_param_bounds(ccx: &CrateCtxt,
if ty_param.bounds.len() > 0 {
ccx.tcx.sess.span_err(
span,
fmt!("trait bounds are not allowed in %s definitions",
format!("trait bounds are not allowed in {} definitions",
thing));
}
}
@ -844,7 +844,7 @@ pub fn ensure_no_ty_param_bounds(ccx: &CrateCtxt,
pub fn convert(ccx: &CrateCtxt, it: &ast::item) {
let tcx = ccx.tcx;
let rp = tcx.region_paramd_items.find(&it.id).map_move(|x| *x);
debug!("convert: item %s with id %d rp %?",
debug2!("convert: item {} with id {} rp {:?}",
tcx.sess.str_of(it.ident), it.id, rp);
match it.node {
// These don't define types.
@ -1000,8 +1000,8 @@ pub fn convert_foreign(ccx: &CrateCtxt, i: &ast::foreign_item) {
let abis = match ccx.tcx.items.find(&i.id) {
Some(&ast_map::node_foreign_item(_, abis, _, _)) => abis,
ref x => {
ccx.tcx.sess.bug(fmt!("unexpected sort of item \
in get_item_ty(): %?", (*x)));
ccx.tcx.sess.bug(format!("unexpected sort of item \
in get_item_ty(): {:?}", (*x)));
}
};
@ -1038,7 +1038,7 @@ pub fn instantiate_trait_ref(ccx: &CrateCtxt,
_ => {
ccx.tcx.sess.span_fatal(
ast_trait_ref.path.span,
fmt!("%s is not a trait",
format!("{} is not a trait",
path_to_str(&ast_trait_ref.path,
ccx.tcx.sess.intr())));
}
@ -1051,7 +1051,7 @@ fn get_trait_def(ccx: &CrateCtxt, trait_id: ast::DefId) -> @ty::TraitDef {
} else {
match ccx.tcx.items.get(&trait_id.node) {
&ast_map::node_item(item, _) => trait_def_of_item(ccx, item),
_ => ccx.tcx.sess.bug(fmt!("get_trait_def(%d): not an item",
_ => ccx.tcx.sess.bug(format!("get_trait_def({}): not an item",
trait_id.node))
}
}
@ -1083,7 +1083,7 @@ pub fn trait_def_of_item(ccx: &CrateCtxt, it: &ast::item) -> @ty::TraitDef {
ref s => {
tcx.sess.span_bug(
it.span,
fmt!("trait_def_of_item invoked on %?", s));
format!("trait_def_of_item invoked on {:?}", s));
}
}
}
@ -1120,7 +1120,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::item)
},
ty: ty::mk_bare_fn(ccx.tcx, tofd)
};
debug!("type of %s (id %d) is %s",
debug2!("type of {} (id {}) is {}",
tcx.sess.str_of(it.ident),
it.id,
ppaux::ty_to_str(tcx, tpt.ty));
@ -1161,7 +1161,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::item)
ast::item_trait(*) => {
tcx.sess.span_bug(
it.span,
fmt!("Invoked ty_of_item on trait"));
format!("Invoked ty_of_item on trait"));
}
ast::item_struct(_, ref generics) => {
let (ty_generics, substs) = mk_item_substs(ccx, generics, rp, None);
@ -1174,8 +1174,8 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::item)
return tpt;
}
ast::item_impl(*) | ast::item_mod(_) |
ast::item_foreign_mod(_) => fail!(),
ast::item_mac(*) => fail!("item macros unimplemented")
ast::item_foreign_mod(_) => fail2!(),
ast::item_mac(*) => fail2!("item macros unimplemented")
}
}
@ -1222,7 +1222,7 @@ pub fn ty_generics(ccx: &CrateCtxt,
def_id: local_def(param.id),
bounds: bounds
};
debug!("def for param: %s", def.repr(ccx.tcx));
debug2!("def for param: {}", def.repr(ccx.tcx));
ccx.tcx.ty_param_defs.insert(param.id, def);
def
}

View File

@ -87,7 +87,7 @@ pub struct Coerce(CombineFields);
impl Coerce {
pub fn tys(&self, a: ty::t, b: ty::t) -> CoerceResult {
debug!("Coerce.tys(%s => %s)",
debug2!("Coerce.tys({} => {})",
a.inf_str(self.infcx),
b.inf_str(self.infcx));
let _indent = indenter();
@ -172,8 +172,8 @@ impl Coerce {
Err(e) => {
self.infcx.tcx.sess.span_bug(
self.trace.origin.span(),
fmt!("Failed to resolve even without \
any force options: %?", e));
format!("Failed to resolve even without \
any force options: {:?}", e));
}
}
}
@ -184,7 +184,7 @@ impl Coerce {
b: ty::t,
mt_b: ty::mt)
-> CoerceResult {
debug!("coerce_borrowed_pointer(a=%s, sty_a=%?, b=%s, mt_b=%?)",
debug2!("coerce_borrowed_pointer(a={}, sty_a={:?}, b={}, mt_b={:?})",
a.inf_str(self.infcx), sty_a,
b.inf_str(self.infcx), mt_b);
@ -221,7 +221,7 @@ impl Coerce {
sty_a: &ty::sty,
b: ty::t)
-> CoerceResult {
debug!("coerce_borrowed_string(a=%s, sty_a=%?, b=%s)",
debug2!("coerce_borrowed_string(a={}, sty_a={:?}, b={})",
a.inf_str(self.infcx), sty_a,
b.inf_str(self.infcx));
@ -248,7 +248,7 @@ impl Coerce {
b: ty::t,
mt_b: ty::mt)
-> CoerceResult {
debug!("coerce_borrowed_vector(a=%s, sty_a=%?, b=%s)",
debug2!("coerce_borrowed_vector(a={}, sty_a={:?}, b={})",
a.inf_str(self.infcx), sty_a,
b.inf_str(self.infcx));
@ -277,7 +277,7 @@ impl Coerce {
b: ty::t,
b_mutbl: ast::Mutability) -> CoerceResult
{
debug!("coerce_borrowed_object(a=%s, sty_a=%?, b=%s)",
debug2!("coerce_borrowed_object(a={}, sty_a={:?}, b={})",
a.inf_str(self.infcx), sty_a,
b.inf_str(self.infcx));
@ -306,7 +306,7 @@ impl Coerce {
sty_a: &ty::sty,
b: ty::t)
-> CoerceResult {
debug!("coerce_borrowed_fn(a=%s, sty_a=%?, b=%s)",
debug2!("coerce_borrowed_fn(a={}, sty_a={:?}, b={})",
a.inf_str(self.infcx), sty_a,
b.inf_str(self.infcx));
@ -361,7 +361,7 @@ impl Coerce {
* "rust" fn`) into a closure.
*/
debug!("coerce_from_bare_fn(a=%s, b=%s)",
debug2!("coerce_from_bare_fn(a={}, b={})",
a.inf_str(self.infcx), b.inf_str(self.infcx));
if !fn_ty_a.abis.is_rust() {
@ -389,7 +389,7 @@ impl Coerce {
b: ty::t,
mt_b: ty::mt)
-> CoerceResult {
debug!("coerce_unsafe_ptr(a=%s, sty_a=%?, b=%s)",
debug2!("coerce_unsafe_ptr(a={}, sty_a={:?}, b={})",
a.inf_str(self.infcx), sty_a,
b.inf_str(self.infcx));

View File

@ -116,7 +116,7 @@ pub trait Combine {
// substs and one of them has a self_ty and one
// doesn't...? I could be wrong about this.
self.infcx().tcx.sess.bug(
fmt!("substitution a had a self_ty \
format!("substitution a had a self_ty \
and substitution b didn't, \
or vice versa"));
}
@ -270,7 +270,7 @@ pub trait Combine {
fn vstores(&self, vk: ty::terr_vstore_kind,
a: ty::vstore, b: ty::vstore) -> cres<ty::vstore> {
debug!("%s.vstores(a=%?, b=%?)", self.tag(), a, b);
debug2!("{}.vstores(a={:?}, b={:?})", self.tag(), a, b);
match (a, b) {
(ty::vstore_slice(a_r), ty::vstore_slice(b_r)) => {
@ -295,7 +295,7 @@ pub trait Combine {
b: ty::TraitStore)
-> cres<ty::TraitStore> {
debug!("%s.trait_stores(a=%?, b=%?)", self.tag(), a, b);
debug2!("{}.trait_stores(a={:?}, b={:?})", self.tag(), a, b);
match (a, b) {
(ty::RegionTraitStore(a_r), ty::RegionTraitStore(b_r)) => {
@ -365,7 +365,7 @@ pub fn eq_tys<C:Combine>(this: &C, a: ty::t, b: ty::t) -> ures {
pub fn eq_regions<C:Combine>(this: &C, a: ty::Region, b: ty::Region)
-> ures {
debug!("eq_regions(%s, %s)",
debug2!("eq_regions({}, {})",
a.inf_str(this.infcx()),
b.inf_str(this.infcx()));
let sub = this.sub();
@ -406,8 +406,8 @@ pub fn eq_opt_regions<C:Combine>(
// consistently have a region parameter or not have a
// region parameter.
this.infcx().tcx.sess.bug(
fmt!("substitution a had opt_region %s and \
b had opt_region %s",
format!("substitution a had opt_region {} and \
b had opt_region {}",
a.inf_str(this.infcx()),
b.inf_str(this.infcx())));
}
@ -446,7 +446,7 @@ pub fn super_tys<C:Combine>(
(&ty::ty_infer(TyVar(_)), _) |
(_, &ty::ty_infer(TyVar(_))) => {
tcx.sess.bug(
fmt!("%s: bot and var types should have been handled (%s,%s)",
format!("{}: bot and var types should have been handled ({},{})",
this.tag(),
a.inf_str(this.infcx()),
b.inf_str(this.infcx())));

View File

@ -163,7 +163,7 @@ impl ErrorReporting for InferCtxt {
self.tcx.sess.span_err(
trace.origin.span(),
fmt!("%s: %s (%s)",
format!("{}: {} ({})",
message_root_str,
expected_found_str,
ty::type_err_to_str(tcx, terr)));
@ -173,7 +173,7 @@ impl ErrorReporting for InferCtxt {
fn values_str(@mut self, values: &ValuePairs) -> Option<~str> {
/*!
* Returns a string of the form "expected `%s` but found `%s`",
* Returns a string of the form "expected `{}` but found `{}`",
* or None if this is a derived error.
*/
match *values {
@ -201,7 +201,7 @@ impl ErrorReporting for InferCtxt {
return None;
}
Some(fmt!("expected `%s` but found `%s`",
Some(format!("expected `{}` but found `{}`",
expected.user_string(self.tcx),
found.user_string(self.tcx)))
}
@ -284,7 +284,7 @@ impl ErrorReporting for InferCtxt {
infer::IndexSlice(span) => {
self.tcx.sess.span_err(
span,
fmt!("index of slice outside its lifetime"));
format!("index of slice outside its lifetime"));
note_and_explain_region(
self.tcx,
"the slice is only valid for ",
@ -375,7 +375,7 @@ impl ErrorReporting for InferCtxt {
infer::ReferenceOutlivesReferent(ty, span) => {
self.tcx.sess.span_err(
span,
fmt!("in type `%s`, pointer has a longer lifetime than \
format!("in type `{}`, pointer has a longer lifetime than \
the data it references",
ty.user_string(self.tcx)));
note_and_explain_region(
@ -400,7 +400,7 @@ impl ErrorReporting for InferCtxt {
sup_region: Region) {
self.tcx.sess.span_err(
var_origin.span(),
fmt!("cannot infer an appropriate lifetime \
format!("cannot infer an appropriate lifetime \
due to conflicting requirements"));
note_and_explain_region(
@ -411,7 +411,7 @@ impl ErrorReporting for InferCtxt {
self.tcx.sess.span_note(
sup_origin.span(),
fmt!("...due to the following expression"));
format!("...due to the following expression"));
note_and_explain_region(
self.tcx,
@ -421,7 +421,7 @@ impl ErrorReporting for InferCtxt {
self.tcx.sess.span_note(
sub_origin.span(),
fmt!("...due to the following expression"));
format!("...due to the following expression"));
}
fn report_sup_sup_conflict(@mut self,
@ -432,7 +432,7 @@ impl ErrorReporting for InferCtxt {
region2: Region) {
self.tcx.sess.span_err(
var_origin.span(),
fmt!("cannot infer an appropriate lifetime \
format!("cannot infer an appropriate lifetime \
due to conflicting requirements"));
note_and_explain_region(
@ -443,7 +443,7 @@ impl ErrorReporting for InferCtxt {
self.tcx.sess.span_note(
origin1.span(),
fmt!("...due to the following expression"));
format!("...due to the following expression"));
note_and_explain_region(
self.tcx,
@ -453,7 +453,7 @@ impl ErrorReporting for InferCtxt {
self.tcx.sess.span_note(
origin2.span(),
fmt!("...due to the following expression"));
format!("...due to the following expression"));
}
}

View File

@ -44,7 +44,7 @@ impl Combine for Glb {
fn mts(&self, a: &ty::mt, b: &ty::mt) -> cres<ty::mt> {
let tcx = self.infcx.tcx;
debug!("%s.mts(%s, %s)",
debug2!("{}.mts({}, {})",
self.tag(),
mt_to_str(tcx, a),
mt_to_str(tcx, b));
@ -100,7 +100,7 @@ impl Combine for Glb {
}
fn regions(&self, a: ty::Region, b: ty::Region) -> cres<ty::Region> {
debug!("%s.regions(%?, %?)",
debug2!("{}.regions({:?}, {:?})",
self.tag(),
a.inf_str(self.infcx),
b.inf_str(self.infcx));
@ -121,7 +121,7 @@ impl Combine for Glb {
// Note: this is a subtle algorithm. For a full explanation,
// please see the large comment in `region_inference.rs`.
debug!("%s.fn_sigs(%?, %?)",
debug2!("{}.fn_sigs({:?}, {:?})",
self.tag(), a.inf_str(self.infcx), b.inf_str(self.infcx));
let _indenter = indenter();
@ -143,7 +143,7 @@ impl Combine for Glb {
// Collect constraints.
let sig0 = if_ok!(super_fn_sigs(self, &a_with_fresh, &b_with_fresh));
debug!("sig0 = %s", sig0.inf_str(self.infcx));
debug2!("sig0 = {}", sig0.inf_str(self.infcx));
// Generalize the regions appearing in fn_ty0 if possible
let new_vars =
@ -155,7 +155,7 @@ impl Combine for Glb {
|r, _in_fn| generalize_region(self, snapshot,
new_vars, a_isr, a_vars, b_vars,
r));
debug!("sig1 = %s", sig1.inf_str(self.infcx));
debug2!("sig1 = {}", sig1.inf_str(self.infcx));
return Ok(sig1);
fn generalize_region(this: &Glb,
@ -237,7 +237,7 @@ impl Combine for Glb {
Some(x) => x,
None => this.infcx.tcx.sess.span_bug(
this.trace.origin.span(),
fmt!("could not find original bound region for %?", r))
format!("could not find original bound region for {:?}", r))
}
}

View File

@ -131,7 +131,7 @@ impl CombineFieldsLatticeMethods for CombineFields {
let a_bounds = node_a.possible_types.clone();
let b_bounds = node_b.possible_types.clone();
debug!("vars(%s=%s <: %s=%s)",
debug2!("vars({}={} <: {}={})",
a_id.to_str(), a_bounds.inf_str(self.infcx),
b_id.to_str(), b_bounds.inf_str(self.infcx));
@ -179,7 +179,7 @@ impl CombineFieldsLatticeMethods for CombineFields {
let a_bounds = &node_a.possible_types;
let b_bounds = &Bounds { lb: None, ub: Some(b.clone()) };
debug!("var_sub_t(%s=%s <: %s)",
debug2!("var_sub_t({}={} <: {})",
a_id.to_str(),
a_bounds.inf_str(self.infcx),
b.inf_str(self.infcx));
@ -203,7 +203,7 @@ impl CombineFieldsLatticeMethods for CombineFields {
let b_id = node_b.root.clone();
let b_bounds = &node_b.possible_types;
debug!("t_sub_var(%s <: %s=%s)",
debug2!("t_sub_var({} <: {}={})",
a.inf_str(self.infcx),
b_id.to_str(),
b_bounds.inf_str(self.infcx));
@ -222,7 +222,7 @@ impl CombineFieldsLatticeMethods for CombineFields {
*
* Combines two bounds into a more general bound. */
debug!("merge_bnd(%s,%s)",
debug2!("merge_bnd({},{})",
a.inf_str(self.infcx),
b.inf_str(self.infcx));
let _r = indenter();
@ -273,7 +273,7 @@ impl CombineFieldsLatticeMethods for CombineFields {
// A \ / A
// B
debug!("merge(%s,%s,%s)",
debug2!("merge({},{},{})",
v_id.to_str(),
a.inf_str(self.infcx),
b.inf_str(self.infcx));
@ -290,7 +290,7 @@ impl CombineFieldsLatticeMethods for CombineFields {
let ub = if_ok!(self.merge_bnd(&a.ub, &b.ub, LatticeValue::glb));
let lb = if_ok!(self.merge_bnd(&a.lb, &b.lb, LatticeValue::lub));
let bounds = Bounds { lb: lb, ub: ub };
debug!("merge(%s): bounds=%s",
debug2!("merge({}): bounds={}",
v_id.to_str(),
bounds.inf_str(self.infcx));
@ -305,7 +305,7 @@ impl CombineFieldsLatticeMethods for CombineFields {
a: &Bound<T>,
b: &Bound<T>)
-> ures {
debug!("bnds(%s <: %s)", a.inf_str(self.infcx),
debug2!("bnds({} <: {})", a.inf_str(self.infcx),
b.inf_str(self.infcx));
let _r = indenter();
@ -370,7 +370,7 @@ pub fn super_lattice_tys<L:LatticeDir + TyLatticeDir + Combine>(
this: &L,
a: ty::t,
b: ty::t) -> cres<ty::t> {
debug!("%s.lattice_tys(%s, %s)", this.tag(),
debug2!("{}.lattice_tys({}, {})", this.tag(),
a.inf_str(this.infcx()),
b.inf_str(this.infcx()));
let _r = indenter();
@ -448,7 +448,7 @@ pub fn lattice_vars<L:LatticeDir + Combine,
let a_bounds = &nde_a.possible_types;
let b_bounds = &nde_b.possible_types;
debug!("%s.lattice_vars(%s=%s <: %s=%s)",
debug2!("{}.lattice_vars({}={} <: {}={})",
this.tag(),
a_vid.to_str(), a_bounds.inf_str(this.infcx()),
b_vid.to_str(), b_bounds.inf_str(this.infcx()));
@ -494,7 +494,7 @@ pub fn lattice_var_and_t<L:LatticeDir + Combine,
// The comments in this function are written for LUB, but they
// apply equally well to GLB if you inverse upper/lower/sub/super/etc.
debug!("%s.lattice_var_and_t(%s=%s <: %s)",
debug2!("{}.lattice_var_and_t({}={} <: {})",
this.tag(),
a_id.to_str(),
a_bounds.inf_str(this.infcx()),
@ -503,13 +503,13 @@ pub fn lattice_var_and_t<L:LatticeDir + Combine,
match this.bnd(a_bounds) {
Some(ref a_bnd) => {
// If a has an upper bound, return the LUB(a.ub, b)
debug!("bnd=Some(%s)", a_bnd.inf_str(this.infcx()));
debug2!("bnd=Some({})", a_bnd.inf_str(this.infcx()));
lattice_dir_op(a_bnd, b)
}
None => {
// If a does not have an upper bound, make b the upper bound of a
// and then return b.
debug!("bnd=None");
debug2!("bnd=None");
let a_bounds = this.with_bnd(a_bounds, (*b).clone());
do this.combine_fields().bnds(&a_bounds.lb, &a_bounds.ub).then {
this.infcx().set(a_id.clone(),
@ -532,7 +532,7 @@ pub fn var_ids<T:Combine>(this: &T, isr: isr_alist) -> ~[RegionVid] {
r => {
this.infcx().tcx.sess.span_bug(
this.trace().origin.span(),
fmt!("Found non-region-vid: %?", r));
format!("Found non-region-vid: {:?}", r));
}
}
true

View File

@ -50,7 +50,7 @@ impl Combine for Lub {
fn mts(&self, a: &ty::mt, b: &ty::mt) -> cres<ty::mt> {
let tcx = self.infcx.tcx;
debug!("%s.mts(%s, %s)",
debug2!("{}.mts({}, {})",
self.tag(),
mt_to_str(tcx, a),
mt_to_str(tcx, b));
@ -106,7 +106,7 @@ impl Combine for Lub {
}
fn regions(&self, a: ty::Region, b: ty::Region) -> cres<ty::Region> {
debug!("%s.regions(%?, %?)",
debug2!("{}.regions({:?}, {:?})",
self.tag(),
a.inf_str(self.infcx),
b.inf_str(self.infcx));
@ -134,7 +134,7 @@ impl Combine for Lub {
// Collect constraints.
let sig0 = if_ok!(super_fn_sigs(self, &a_with_fresh, &b_with_fresh));
debug!("sig0 = %s", sig0.inf_str(self.infcx));
debug2!("sig0 = {}", sig0.inf_str(self.infcx));
// Generalize the regions appearing in sig0 if possible
let new_vars =
@ -154,7 +154,7 @@ impl Combine for Lub {
r0: ty::Region) -> ty::Region {
// Regions that pre-dated the LUB computation stay as they are.
if !is_var_in_set(new_vars, r0) {
debug!("generalize_region(r0=%?): not new variable", r0);
debug2!("generalize_region(r0={:?}): not new variable", r0);
return r0;
}
@ -164,8 +164,8 @@ impl Combine for Lub {
// *related* to regions that pre-date the LUB computation
// stay as they are.
if !tainted.iter().all(|r| is_var_in_set(new_vars, *r)) {
debug!("generalize_region(r0=%?): \
non-new-variables found in %?",
debug2!("generalize_region(r0={:?}): \
non-new-variables found in {:?}",
r0, tainted);
return r0;
}
@ -179,8 +179,8 @@ impl Combine for Lub {
do list::each(a_isr) |pair| {
let (a_br, a_r) = *pair;
if tainted.iter().any(|x| x == &a_r) {
debug!("generalize_region(r0=%?): \
replacing with %?, tainted=%?",
debug2!("generalize_region(r0={:?}): \
replacing with {:?}, tainted={:?}",
r0, a_br, tainted);
ret = Some(ty::re_bound(a_br));
false
@ -193,7 +193,7 @@ impl Combine for Lub {
Some(x) => x,
None => this.infcx.tcx.sess.span_bug(
this.trace.origin.span(),
fmt!("Region %? is not associated with \
format!("Region {:?} is not associated with \
any bound region from A!", r0))
}
}

View File

@ -244,7 +244,7 @@ pub fn fixup_err_to_str(f: fixup_err) -> ~str {
cyclic_ty(_) => ~"cyclic type of infinite size",
unresolved_region(_) => ~"unconstrained region",
region_var_bound_by_region_var(r1, r2) => {
fmt!("region var %? bound by another region var %?; this is \
format!("region var {:?} bound by another region var {:?}; this is \
a bug in rustc", r1, r2)
}
}
@ -285,7 +285,7 @@ pub fn common_supertype(cx: @mut InferCtxt,
* not possible, reports an error and returns ty::err.
*/
debug!("common_supertype(%s, %s)", a.inf_str(cx), b.inf_str(cx));
debug2!("common_supertype({}, {})", a.inf_str(cx), b.inf_str(cx));
let trace = TypeTrace {
origin: origin,
@ -311,7 +311,7 @@ pub fn mk_subty(cx: @mut InferCtxt,
a: ty::t,
b: ty::t)
-> ures {
debug!("mk_subty(%s <: %s)", a.inf_str(cx), b.inf_str(cx));
debug2!("mk_subty({} <: {})", a.inf_str(cx), b.inf_str(cx));
do indent {
do cx.commit {
let trace = TypeTrace {
@ -324,7 +324,7 @@ pub fn mk_subty(cx: @mut InferCtxt,
}
pub fn can_mk_subty(cx: @mut InferCtxt, a: ty::t, b: ty::t) -> ures {
debug!("can_mk_subty(%s <: %s)", a.inf_str(cx), b.inf_str(cx));
debug2!("can_mk_subty({} <: {})", a.inf_str(cx), b.inf_str(cx));
do indent {
do cx.probe {
let trace = TypeTrace {
@ -341,7 +341,7 @@ pub fn mk_subr(cx: @mut InferCtxt,
origin: SubregionOrigin,
a: ty::Region,
b: ty::Region) {
debug!("mk_subr(%s <: %s)", a.inf_str(cx), b.inf_str(cx));
debug2!("mk_subr({} <: {})", a.inf_str(cx), b.inf_str(cx));
cx.region_vars.start_snapshot();
cx.region_vars.make_subregion(origin, a, b);
cx.region_vars.commit();
@ -353,7 +353,7 @@ pub fn mk_eqty(cx: @mut InferCtxt,
a: ty::t,
b: ty::t)
-> ures {
debug!("mk_eqty(%s <: %s)", a.inf_str(cx), b.inf_str(cx));
debug2!("mk_eqty({} <: {})", a.inf_str(cx), b.inf_str(cx));
do indent {
do cx.commit {
let trace = TypeTrace {
@ -373,7 +373,7 @@ pub fn mk_sub_trait_refs(cx: @mut InferCtxt,
b: @ty::TraitRef)
-> ures
{
debug!("mk_sub_trait_refs(%s <: %s)",
debug2!("mk_sub_trait_refs({} <: {})",
a.inf_str(cx), b.inf_str(cx));
do indent {
do cx.commit {
@ -403,7 +403,7 @@ pub fn mk_coercety(cx: @mut InferCtxt,
a: ty::t,
b: ty::t)
-> CoerceResult {
debug!("mk_coercety(%s -> %s)", a.inf_str(cx), b.inf_str(cx));
debug2!("mk_coercety({} -> {})", a.inf_str(cx), b.inf_str(cx));
do indent {
do cx.commit {
let trace = TypeTrace {
@ -416,7 +416,7 @@ pub fn mk_coercety(cx: @mut InferCtxt,
}
pub fn can_mk_coercety(cx: @mut InferCtxt, a: ty::t, b: ty::t) -> ures {
debug!("can_mk_coercety(%s -> %s)", a.inf_str(cx), b.inf_str(cx));
debug2!("can_mk_coercety({} -> {})", a.inf_str(cx), b.inf_str(cx));
do indent {
do cx.probe {
let trace = TypeTrace {
@ -539,7 +539,7 @@ impl InferCtxt {
}
pub fn rollback_to(&mut self, snapshot: &Snapshot) {
debug!("rollback!");
debug2!("rollback!");
rollback_to(&mut self.ty_var_bindings, snapshot.ty_var_bindings_len);
rollback_to(&mut self.int_var_bindings,
@ -554,7 +554,7 @@ impl InferCtxt {
pub fn commit<T,E>(@mut self, f: &fn() -> Result<T,E>) -> Result<T,E> {
assert!(!self.in_snapshot());
debug!("commit()");
debug2!("commit()");
do indent {
let r = self.try(|| f());
@ -567,7 +567,7 @@ impl InferCtxt {
/// Execute `f`, unroll bindings on failure
pub fn try<T,E>(@mut self, f: &fn() -> Result<T,E>) -> Result<T,E> {
debug!("try()");
debug2!("try()");
do indent {
let snapshot = self.start_snapshot();
let r = f();
@ -581,7 +581,7 @@ impl InferCtxt {
/// Execute `f` then unroll any bindings it creates
pub fn probe<T,E>(@mut self, f: &fn() -> Result<T,E>) -> Result<T,E> {
debug!("probe()");
debug2!("probe()");
do indent {
let snapshot = self.start_snapshot();
let r = f();
@ -654,7 +654,7 @@ impl InferCtxt {
pub fn tys_to_str(@mut self, ts: &[ty::t]) -> ~str {
let tstrs = ts.map(|t| self.ty_to_str(*t));
fmt!("(%s)", tstrs.connect(", "))
format!("({})", tstrs.connect(", "))
}
pub fn trait_ref_to_str(@mut self, t: &ty::TraitRef) -> ~str {
@ -690,8 +690,8 @@ impl InferCtxt {
}
_ => {
self.tcx.sess.bug(
fmt!("resolve_type_vars_if_possible() yielded %s \
when supplied with %s",
format!("resolve_type_vars_if_possible() yielded {} \
when supplied with {}",
self.ty_to_str(dummy0),
self.ty_to_str(dummy1)));
}
@ -725,10 +725,10 @@ impl InferCtxt {
expected_ty: Option<ty::t>,
actual_ty: ~str,
err: Option<&ty::type_err>) {
debug!("hi! expected_ty = %?, actual_ty = %s", expected_ty, actual_ty);
debug2!("hi! expected_ty = {:?}, actual_ty = {}", expected_ty, actual_ty);
let error_str = do err.map_move_default(~"") |t_err| {
fmt!(" (%s)", ty::type_err_to_str(self.tcx, t_err))
format!(" ({})", ty::type_err_to_str(self.tcx, t_err))
};
let resolved_expected = do expected_ty.map_move |e_ty| {
self.resolve_type_vars_if_possible(e_ty)
@ -736,10 +736,10 @@ impl InferCtxt {
if !resolved_expected.map_move_default(false, |e| { ty::type_is_error(e) }) {
match resolved_expected {
None => self.tcx.sess.span_err(sp,
fmt!("%s%s", mk_msg(None, actual_ty), error_str)),
format!("{}{}", mk_msg(None, actual_ty), error_str)),
Some(e) => {
self.tcx.sess.span_err(sp,
fmt!("%s%s", mk_msg(Some(self.ty_to_str(e)), actual_ty), error_str));
format!("{}{}", mk_msg(Some(self.ty_to_str(e)), actual_ty), error_str));
}
}
for err in err.iter() {
@ -776,7 +776,7 @@ impl InferCtxt {
_ => {
// if I leave out : ~str, it infers &str and complains
|actual: ~str| {
fmt!("mismatched types: expected `%s` but found `%s`",
format!("mismatched types: expected `{}` but found `{}`",
self.ty_to_str(resolved_expected), actual)
}
}
@ -792,7 +792,7 @@ impl InferCtxt {
replace_bound_regions_in_fn_sig(self.tcx, @Nil, None, fsig, |br| {
let rvar = self.next_region_var(
BoundRegionInFnType(trace.origin.span(), br));
debug!("Bound region %s maps to %?",
debug2!("Bound region {} maps to {:?}",
bound_region_to_str(self.tcx, "", false, br),
rvar);
rvar
@ -819,7 +819,7 @@ impl TypeTrace {
impl Repr for TypeTrace {
fn repr(&self, tcx: ty::ctxt) -> ~str {
fmt!("TypeTrace(%s)", self.origin.repr(tcx))
format!("TypeTrace({})", self.origin.repr(tcx))
}
}
@ -840,13 +840,13 @@ impl TypeOrigin {
impl Repr for TypeOrigin {
fn repr(&self, tcx: ty::ctxt) -> ~str {
match *self {
MethodCompatCheck(a) => fmt!("MethodCompatCheck(%s)", a.repr(tcx)),
ExprAssignable(a) => fmt!("ExprAssignable(%s)", a.repr(tcx)),
Misc(a) => fmt!("Misc(%s)", a.repr(tcx)),
RelateTraitRefs(a) => fmt!("RelateTraitRefs(%s)", a.repr(tcx)),
RelateSelfType(a) => fmt!("RelateSelfType(%s)", a.repr(tcx)),
MatchExpression(a) => fmt!("MatchExpression(%s)", a.repr(tcx)),
IfExpression(a) => fmt!("IfExpression(%s)", a.repr(tcx)),
MethodCompatCheck(a) => format!("MethodCompatCheck({})", a.repr(tcx)),
ExprAssignable(a) => format!("ExprAssignable({})", a.repr(tcx)),
Misc(a) => format!("Misc({})", a.repr(tcx)),
RelateTraitRefs(a) => format!("RelateTraitRefs({})", a.repr(tcx)),
RelateSelfType(a) => format!("RelateSelfType({})", a.repr(tcx)),
MatchExpression(a) => format!("MatchExpression({})", a.repr(tcx)),
IfExpression(a) => format!("IfExpression({})", a.repr(tcx)),
}
}
}
@ -876,21 +876,23 @@ impl SubregionOrigin {
impl Repr for SubregionOrigin {
fn repr(&self, tcx: ty::ctxt) -> ~str {
match *self {
Subtype(a) => fmt!("Subtype(%s)", a.repr(tcx)),
InfStackClosure(a) => fmt!("InfStackClosure(%s)", a.repr(tcx)),
InvokeClosure(a) => fmt!("InvokeClosure(%s)", a.repr(tcx)),
DerefPointer(a) => fmt!("DerefPointer(%s)", a.repr(tcx)),
FreeVariable(a) => fmt!("FreeVariable(%s)", a.repr(tcx)),
IndexSlice(a) => fmt!("IndexSlice(%s)", a.repr(tcx)),
RelateObjectBound(a) => fmt!("RelateObjectBound(%s)", a.repr(tcx)),
Reborrow(a) => fmt!("Reborrow(%s)", a.repr(tcx)),
ReferenceOutlivesReferent(_, a) => fmt!("ReferenceOutlivesReferent(%s)", a.repr(tcx)),
BindingTypeIsNotValidAtDecl(a) => fmt!("BindingTypeIsNotValidAtDecl(%s)", a.repr(tcx)),
CallRcvr(a) => fmt!("CallRcvr(%s)", a.repr(tcx)),
CallArg(a) => fmt!("CallArg(%s)", a.repr(tcx)),
CallReturn(a) => fmt!("CallReturn(%s)", a.repr(tcx)),
AddrOf(a) => fmt!("AddrOf(%s)", a.repr(tcx)),
AutoBorrow(a) => fmt!("AutoBorrow(%s)", a.repr(tcx)),
Subtype(a) => format!("Subtype({})", a.repr(tcx)),
InfStackClosure(a) => format!("InfStackClosure({})", a.repr(tcx)),
InvokeClosure(a) => format!("InvokeClosure({})", a.repr(tcx)),
DerefPointer(a) => format!("DerefPointer({})", a.repr(tcx)),
FreeVariable(a) => format!("FreeVariable({})", a.repr(tcx)),
IndexSlice(a) => format!("IndexSlice({})", a.repr(tcx)),
RelateObjectBound(a) => format!("RelateObjectBound({})", a.repr(tcx)),
Reborrow(a) => format!("Reborrow({})", a.repr(tcx)),
ReferenceOutlivesReferent(_, a) =>
format!("ReferenceOutlivesReferent({})", a.repr(tcx)),
BindingTypeIsNotValidAtDecl(a) =>
format!("BindingTypeIsNotValidAtDecl({})", a.repr(tcx)),
CallRcvr(a) => format!("CallRcvr({})", a.repr(tcx)),
CallArg(a) => format!("CallArg({})", a.repr(tcx)),
CallReturn(a) => format!("CallReturn({})", a.repr(tcx)),
AddrOf(a) => format!("AddrOf({})", a.repr(tcx)),
AutoBorrow(a) => format!("AutoBorrow({})", a.repr(tcx)),
}
}
}
@ -916,20 +918,20 @@ impl RegionVariableOrigin {
impl Repr for RegionVariableOrigin {
fn repr(&self, tcx: ty::ctxt) -> ~str {
match *self {
MiscVariable(a) => fmt!("MiscVariable(%s)", a.repr(tcx)),
PatternRegion(a) => fmt!("PatternRegion(%s)", a.repr(tcx)),
AddrOfRegion(a) => fmt!("AddrOfRegion(%s)", a.repr(tcx)),
AddrOfSlice(a) => fmt!("AddrOfSlice(%s)", a.repr(tcx)),
Autoref(a) => fmt!("Autoref(%s)", a.repr(tcx)),
Coercion(a) => fmt!("Coercion(%s)", a.repr(tcx)),
BoundRegionInFnCall(a, b) => fmt!("BoundRegionInFnCall(%s,%s)",
MiscVariable(a) => format!("MiscVariable({})", a.repr(tcx)),
PatternRegion(a) => format!("PatternRegion({})", a.repr(tcx)),
AddrOfRegion(a) => format!("AddrOfRegion({})", a.repr(tcx)),
AddrOfSlice(a) => format!("AddrOfSlice({})", a.repr(tcx)),
Autoref(a) => format!("Autoref({})", a.repr(tcx)),
Coercion(a) => format!("Coercion({})", a.repr(tcx)),
BoundRegionInFnCall(a, b) => format!("BoundRegionInFnCall({},{})",
a.repr(tcx), b.repr(tcx)),
BoundRegionInFnType(a, b) => fmt!("BoundRegionInFnType(%s,%s)",
BoundRegionInFnType(a, b) => format!("BoundRegionInFnType({},{})",
a.repr(tcx), b.repr(tcx)),
BoundRegionInTypeOrImpl(a) => fmt!("BoundRegionInTypeOrImpl(%s)",
BoundRegionInTypeOrImpl(a) => format!("BoundRegionInTypeOrImpl({})",
a.repr(tcx)),
BoundRegionInCoherence => fmt!("BoundRegionInCoherence"),
BoundRegionError(a) => fmt!("BoundRegionError(%s)", a.repr(tcx)),
BoundRegionInCoherence => format!("BoundRegionInCoherence"),
BoundRegionError(a) => format!("BoundRegionError({})", a.repr(tcx)),
}
}
}

View File

@ -130,7 +130,7 @@ impl RegionVarBindings {
}
pub fn start_snapshot(&mut self) -> uint {
debug!("RegionVarBindings: snapshot()=%u", self.undo_log.len());
debug2!("RegionVarBindings: snapshot()={}", self.undo_log.len());
if self.in_snapshot() {
self.undo_log.len()
} else {
@ -140,17 +140,17 @@ impl RegionVarBindings {
}
pub fn commit(&mut self) {
debug!("RegionVarBindings: commit()");
debug2!("RegionVarBindings: commit()");
while self.undo_log.len() > 0 {
self.undo_log.pop();
}
}
pub fn rollback_to(&mut self, snapshot: uint) {
debug!("RegionVarBindings: rollback_to(%u)", snapshot);
debug2!("RegionVarBindings: rollback_to({})", snapshot);
while self.undo_log.len() > snapshot {
let undo_item = self.undo_log.pop();
debug!("undo_item=%?", undo_item);
debug2!("undo_item={:?}", undo_item);
match undo_item {
Snapshot => {}
AddVar(vid) => {
@ -181,7 +181,7 @@ impl RegionVarBindings {
if self.in_snapshot() {
self.undo_log.push(AddVar(vid));
}
debug!("created new region variable %? with origin %?",
debug2!("created new region variable {:?} with origin {:?}",
vid, origin.repr(self.tcx));
return vid;
}
@ -218,7 +218,7 @@ impl RegionVarBindings {
// cannot add constraints once regions are resolved
assert!(self.values.is_empty());
debug!("RegionVarBindings: add_constraint(%?)", constraint);
debug2!("RegionVarBindings: add_constraint({:?})", constraint);
if self.constraints.insert(constraint, origin) {
if self.in_snapshot() {
@ -234,7 +234,7 @@ impl RegionVarBindings {
// cannot add constraints once regions are resolved
assert!(self.values.is_empty());
debug!("RegionVarBindings: make_subregion(%?, %?)", sub, sup);
debug2!("RegionVarBindings: make_subregion({:?}, {:?})", sub, sup);
match (sub, sup) {
(re_infer(ReVar(sub_id)), re_infer(ReVar(sup_id))) => {
self.add_constraint(ConstrainVarSubVar(sub_id, sup_id), origin);
@ -248,12 +248,12 @@ impl RegionVarBindings {
(re_bound(br), _) => {
self.tcx.sess.span_bug(
origin.span(),
fmt!("Cannot relate bound region as subregion: %?", br));
format!("Cannot relate bound region as subregion: {:?}", br));
}
(_, re_bound(br)) => {
self.tcx.sess.span_bug(
origin.span(),
fmt!("Cannot relate bound region as superregion: %?", br));
format!("Cannot relate bound region as superregion: {:?}", br));
}
_ => {
self.add_constraint(ConstrainRegSubReg(sub, sup), origin);
@ -269,7 +269,7 @@ impl RegionVarBindings {
// cannot add constraints once regions are resolved
assert!(self.values.is_empty());
debug!("RegionVarBindings: lub_regions(%?, %?)", a, b);
debug2!("RegionVarBindings: lub_regions({:?}, {:?})", a, b);
match (a, b) {
(re_static, _) | (_, re_static) => {
re_static // nothing lives longer than static
@ -292,7 +292,7 @@ impl RegionVarBindings {
// cannot add constraints once regions are resolved
assert!(self.values.is_empty());
debug!("RegionVarBindings: glb_regions(%?, %?)", a, b);
debug2!("RegionVarBindings: glb_regions({:?}, {:?})", a, b);
match (a, b) {
(re_static, r) | (r, re_static) => {
// static lives longer than everything else
@ -312,12 +312,12 @@ impl RegionVarBindings {
if self.values.is_empty() {
self.tcx.sess.span_bug(
self.var_origins[rid.to_uint()].span(),
fmt!("Attempt to resolve region variable before values have \
format!("Attempt to resolve region variable before values have \
been computed!"));
}
let v = self.values.with_ref(|values| values[rid.to_uint()]);
debug!("RegionVarBindings: resolve_var(%?=%u)=%?",
debug2!("RegionVarBindings: resolve_var({:?}={})={:?}",
rid, rid.to_uint(), v);
match v {
Value(r) => r,
@ -367,7 +367,7 @@ impl RegionVarBindings {
}
relate(self, a, re_infer(ReVar(c)));
relate(self, b, re_infer(ReVar(c)));
debug!("combine_vars() c=%?", c);
debug2!("combine_vars() c={:?}", c);
re_infer(ReVar(c))
}
@ -390,7 +390,7 @@ impl RegionVarBindings {
* regions.
*/
debug!("tainted(snapshot=%u, r0=%?)", snapshot, r0);
debug2!("tainted(snapshot={}, r0={:?})", snapshot, r0);
let _indenter = indenter();
let undo_len = self.undo_log.len();
@ -404,7 +404,7 @@ impl RegionVarBindings {
// nb: can't use uint::range() here because result_set grows
let r = result_set[result_index];
debug!("result_index=%u, r=%?", result_index, r);
debug2!("result_index={}, r={:?}", result_index, r);
let mut undo_index = snapshot;
while undo_index < undo_len {
@ -469,7 +469,7 @@ impl RegionVarBindings {
errors are reported.
*/
pub fn resolve_regions(&mut self) -> OptVec<RegionResolutionError> {
debug!("RegionVarBindings: resolve_regions()");
debug2!("RegionVarBindings: resolve_regions()");
let mut errors = opt_vec::Empty;
let v = self.infer_variable_values(&mut errors);
self.values.put_back(v);
@ -496,8 +496,8 @@ impl RegionVarBindings {
(re_infer(ReVar(v_id)), _) | (_, re_infer(ReVar(v_id))) => {
self.tcx.sess.span_bug(
self.var_origins[v_id.to_uint()].span(),
fmt!("lub_concrete_regions invoked with \
non-concrete regions: %?, %?", a, b));
format!("lub_concrete_regions invoked with \
non-concrete regions: {:?}, {:?}", a, b));
}
(f @ re_free(ref fr), re_scope(s_id)) |
@ -582,7 +582,7 @@ impl RegionVarBindings {
a: Region,
b: Region)
-> cres<Region> {
debug!("glb_concrete_regions(%?, %?)", a, b);
debug2!("glb_concrete_regions({:?}, {:?})", a, b);
match (a, b) {
(re_static, r) | (r, re_static) => {
// static lives longer than everything else
@ -598,8 +598,8 @@ impl RegionVarBindings {
(_, re_infer(ReVar(v_id))) => {
self.tcx.sess.span_bug(
self.var_origins[v_id.to_uint()].span(),
fmt!("glb_concrete_regions invoked with \
non-concrete regions: %?, %?", a, b));
format!("glb_concrete_regions invoked with \
non-concrete regions: {:?}, {:?}", a, b));
}
(re_free(ref fr), s @ re_scope(s_id)) |
@ -691,7 +691,7 @@ impl RegionVarBindings {
// scopes or two free regions. So, if one of
// these scopes is a subscope of the other, return
// it. Otherwise fail.
debug!("intersect_scopes(scope_a=%?, scope_b=%?, region_a=%?, region_b=%?)",
debug2!("intersect_scopes(scope_a={:?}, scope_b={:?}, region_a={:?}, region_b={:?})",
scope_a, scope_b, region_a, region_b);
let rm = self.tcx.region_maps;
match rm.nearest_common_ancestor(scope_a, scope_b) {
@ -778,13 +778,13 @@ impl RegionVarBindings {
b_vid: RegionVid,
b_data: &mut VarData)
-> bool {
debug!("expand_node(%?, %? == %?)",
debug2!("expand_node({:?}, {:?} == {:?})",
a_region, b_vid, b_data.value);
b_data.classification = Expanding;
match b_data.value {
NoValue => {
debug!("Setting initial value of %? to %?", b_vid, a_region);
debug2!("Setting initial value of {:?} to {:?}", b_vid, a_region);
b_data.value = Value(a_region);
return true;
@ -796,7 +796,7 @@ impl RegionVarBindings {
return false;
}
debug!("Expanding value of %? from %? to %?",
debug2!("Expanding value of {:?} from {:?} to {:?}",
b_vid, cur_region, lub);
b_data.value = Value(lub);
@ -843,7 +843,7 @@ impl RegionVarBindings {
a_data: &mut VarData,
b_region: Region)
-> bool {
debug!("contract_node(%? == %?/%?, %?)",
debug2!("contract_node({:?} == {:?}/{:?}, {:?})",
a_vid, a_data.value, a_data.classification, b_region);
return match a_data.value {
@ -876,7 +876,7 @@ impl RegionVarBindings {
b_region: Region)
-> bool {
if !this.is_subregion_of(a_region, b_region) {
debug!("Setting %? to ErrorValue: %? not subregion of %?",
debug2!("Setting {:?} to ErrorValue: {:?} not subregion of {:?}",
a_vid, a_region, b_region);
a_data.value = ErrorValue;
}
@ -894,14 +894,14 @@ impl RegionVarBindings {
if glb == a_region {
false
} else {
debug!("Contracting value of %? from %? to %?",
debug2!("Contracting value of {:?} from {:?} to {:?}",
a_vid, a_region, glb);
a_data.value = Value(glb);
true
}
}
Err(_) => {
debug!("Setting %? to ErrorValue: no glb of %?, %?",
debug2!("Setting {:?} to ErrorValue: no glb of {:?}, {:?}",
a_vid, a_region, b_region);
a_data.value = ErrorValue;
false
@ -930,7 +930,7 @@ impl RegionVarBindings {
loop;
}
debug!("ConcreteFailure: !(sub <= sup): sub=%?, sup=%?",
debug2!("ConcreteFailure: !(sub <= sup): sub={:?}, sup={:?}",
sub, sup);
let origin = self.constraints.get_copy(constraint);
errors.push(ConcreteFailure(origin, sub, sup));
@ -943,7 +943,7 @@ impl RegionVarBindings {
errors: &mut OptVec<RegionResolutionError>)
-> ~[VarValue]
{
debug!("extract_values_and_collect_conflicts()");
debug2!("extract_values_and_collect_conflicts()");
// This is the best way that I have found to suppress
// duplicate and related errors. Basically we keep a set of
@ -1095,8 +1095,8 @@ impl RegionVarBindings {
self.tcx.sess.span_bug(
self.var_origins[node_idx.to_uint()].span(),
fmt!("collect_error_for_expanding_node() could not find error \
for var %?, lower_bounds=%s, upper_bounds=%s",
format!("collect_error_for_expanding_node() could not find error \
for var {:?}, lower_bounds={}, upper_bounds={}",
node_idx,
lower_bounds.map(|x| x.region).repr(self.tcx),
upper_bounds.map(|x| x.region).repr(self.tcx)));
@ -1140,8 +1140,8 @@ impl RegionVarBindings {
self.tcx.sess.span_bug(
self.var_origins[node_idx.to_uint()].span(),
fmt!("collect_error_for_contracting_node() could not find error \
for var %?, upper_bounds=%s",
format!("collect_error_for_contracting_node() could not find error \
for var {:?}, upper_bounds={}",
node_idx,
upper_bounds.map(|x| x.region).repr(self.tcx)));
}
@ -1182,8 +1182,8 @@ impl RegionVarBindings {
state.dup_found = true;
}
debug!("collect_concrete_regions(orig_node_idx=%?, node_idx=%?, \
classification=%?)",
debug2!("collect_concrete_regions(orig_node_idx={:?}, node_idx={:?}, \
classification={:?})",
orig_node_idx, node_idx, classification);
// figure out the direction from which this node takes its
@ -1204,7 +1204,7 @@ impl RegionVarBindings {
graph: &RegionGraph,
source_vid: RegionVid,
dir: Direction) {
debug!("process_edges(source_vid=%?, dir=%?)", source_vid, dir);
debug2!("process_edges(source_vid={:?}, dir={:?})", source_vid, dir);
let source_node_index = NodeIndex(source_vid.to_uint());
do graph.each_adjacent_edge(source_node_index, dir) |_, edge| {
@ -1240,17 +1240,17 @@ impl RegionVarBindings {
while changed {
changed = false;
iteration += 1;
debug!("---- %s Iteration #%u", tag, iteration);
debug2!("---- {} Iteration \\#{}", tag, iteration);
for (constraint, _) in self.constraints.iter() {
let edge_changed = body(constraint);
if edge_changed {
debug!("Updated due to constraint %s",
debug2!("Updated due to constraint {}",
constraint.repr(self.tcx));
changed = true;
}
}
}
debug!("---- %s Complete after %u iteration(s)", tag, iteration);
debug2!("---- {} Complete after {} iteration(s)", tag, iteration);
}
}
@ -1258,13 +1258,13 @@ impl RegionVarBindings {
impl Repr for Constraint {
fn repr(&self, tcx: ty::ctxt) -> ~str {
match *self {
ConstrainVarSubVar(a, b) => fmt!("ConstrainVarSubVar(%s, %s)",
ConstrainVarSubVar(a, b) => format!("ConstrainVarSubVar({}, {})",
a.repr(tcx), b.repr(tcx)),
ConstrainRegSubVar(a, b) => fmt!("ConstrainRegSubVar(%s, %s)",
ConstrainRegSubVar(a, b) => format!("ConstrainRegSubVar({}, {})",
a.repr(tcx), b.repr(tcx)),
ConstrainVarSubReg(a, b) => fmt!("ConstrainVarSubReg(%s, %s)",
ConstrainVarSubReg(a, b) => format!("ConstrainVarSubReg({}, {})",
a.repr(tcx), b.repr(tcx)),
ConstrainRegSubReg(a, b) => fmt!("ConstrainRegSubReg(%s, %s)",
ConstrainRegSubReg(a, b) => format!("ConstrainRegSubReg({}, {})",
a.repr(tcx), b.repr(tcx)),
}
}

View File

@ -104,7 +104,7 @@ impl ResolveState {
pub fn resolve_type_chk(&mut self, typ: ty::t) -> fres<ty::t> {
self.err = None;
debug!("Resolving %s (modes=%x)",
debug2!("Resolving {} (modes={:x})",
ty_to_str(self.infcx.tcx, typ),
self.modes);
@ -116,7 +116,7 @@ impl ResolveState {
assert!(self.v_seen.is_empty());
match self.err {
None => {
debug!("Resolved to %s + %s (modes=%x)",
debug2!("Resolved to {} + {} (modes={:x})",
ty_to_str(self.infcx.tcx, rty),
ty_to_str(self.infcx.tcx, rty),
self.modes);
@ -137,7 +137,7 @@ impl ResolveState {
}
pub fn resolve_type(&mut self, typ: ty::t) -> ty::t {
debug!("resolve_type(%s)", typ.inf_str(self.infcx));
debug2!("resolve_type({})", typ.inf_str(self.infcx));
let _i = indenter();
if !ty::type_needs_infer(typ) {
@ -179,7 +179,7 @@ impl ResolveState {
}
pub fn resolve_region(&mut self, orig: ty::Region) -> ty::Region {
debug!("Resolve_region(%s)", orig.inf_str(self.infcx));
debug2!("Resolve_region({})", orig.inf_str(self.infcx));
match orig {
ty::re_infer(ty::ReVar(rid)) => self.resolve_region_var(rid),
_ => orig

View File

@ -56,7 +56,7 @@ impl Combine for Sub {
}
fn regions(&self, a: ty::Region, b: ty::Region) -> cres<ty::Region> {
debug!("%s.regions(%s, %s)",
debug2!("{}.regions({}, {})",
self.tag(),
a.inf_str(self.infcx),
b.inf_str(self.infcx));
@ -65,7 +65,7 @@ impl Combine for Sub {
}
fn mts(&self, a: &ty::mt, b: &ty::mt) -> cres<ty::mt> {
debug!("mts(%s <: %s)", a.inf_str(self.infcx), b.inf_str(self.infcx));
debug2!("mts({} <: {})", a.inf_str(self.infcx), b.inf_str(self.infcx));
if a.mutbl != b.mutbl {
return Err(ty::terr_mutability);
@ -110,7 +110,7 @@ impl Combine for Sub {
}
fn tys(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
debug!("%s.tys(%s, %s)", self.tag(),
debug2!("{}.tys({}, {})", self.tag(),
a.inf_str(self.infcx), b.inf_str(self.infcx));
if a == b { return Ok(a); }
let _indenter = indenter();
@ -143,7 +143,7 @@ impl Combine for Sub {
}
fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
debug!("fn_sigs(a=%s, b=%s)",
debug2!("fn_sigs(a={}, b={})",
a.inf_str(self.infcx), b.inf_str(self.infcx));
let _indenter = indenter();
@ -172,15 +172,15 @@ impl Combine for Sub {
do replace_bound_regions_in_fn_sig(self.infcx.tcx, @Nil,
None, b) |br| {
let skol = self.infcx.region_vars.new_skolemized(br);
debug!("Bound region %s skolemized to %?",
debug2!("Bound region {} skolemized to {:?}",
bound_region_to_str(self.infcx.tcx, "", false, br),
skol);
skol
}
};
debug!("a_sig=%s", a_sig.inf_str(self.infcx));
debug!("b_sig=%s", b_sig.inf_str(self.infcx));
debug2!("a_sig={}", a_sig.inf_str(self.infcx));
debug2!("b_sig={}", b_sig.inf_str(self.infcx));
// Compare types now that bound regions have been replaced.
let sig = if_ok!(super_fn_sigs(self, &a_sig, &b_sig));

View File

@ -100,7 +100,7 @@ impl Env {
return match search_mod(self, &self.crate.node.module, 0, names) {
Some(id) => id,
None => {
fail!("No item found: `%s`", names.connect("::"));
fail2!("No item found: `%s`", names.connect("::"));
}
};
@ -153,7 +153,7 @@ impl Env {
pub fn assert_subtype(&self, a: ty::t, b: ty::t) {
if !self.is_subtype(a, b) {
fail!("%s is not a subtype of %s, but it should be",
fail2!("%s is not a subtype of %s, but it should be",
self.ty_to_str(a),
self.ty_to_str(b));
}
@ -161,7 +161,7 @@ impl Env {
pub fn assert_not_subtype(&self, a: ty::t, b: ty::t) {
if self.is_subtype(a, b) {
fail!("%s is a subtype of %s, but it shouldn't be",
fail2!("%s is a subtype of %s, but it shouldn't be",
self.ty_to_str(a),
self.ty_to_str(b));
}
@ -223,14 +223,14 @@ impl Env {
pub fn glb() -> Glb { Glb(self.infcx.combine_fields(true, dummy_sp())) }
pub fn resolve_regions(exp_count: uint) {
debug!("resolve_regions(%u)", exp_count);
debug2!("resolve_regions(%u)", exp_count);
self.infcx.resolve_regions();
if self.err_messages.len() != exp_count {
for msg in self.err_messages.iter() {
debug!("Error encountered: %s", *msg);
debug2!("Error encountered: %s", *msg);
}
fmt!("Resolving regions encountered %u errors but expected %u!",
format!("Resolving regions encountered %u errors but expected %u!",
self.err_messages.len(),
exp_count);
}
@ -240,7 +240,7 @@ impl Env {
pub fn check_lub(&self, t1: ty::t, t2: ty::t, t_lub: ty::t) {
match self.lub().tys(t1, t2) {
Err(e) => {
fail!("Unexpected error computing LUB: %?", e)
fail2!("Unexpected error computing LUB: %?", e)
}
Ok(t) => {
self.assert_eq(t, t_lub);
@ -256,13 +256,13 @@ impl Env {
/// Checks that `GLB(t1,t2) == t_glb`
pub fn check_glb(&self, t1: ty::t, t2: ty::t, t_glb: ty::t) {
debug!("check_glb(t1=%s, t2=%s, t_glb=%s)",
debug2!("check_glb(t1=%s, t2=%s, t_glb=%s)",
self.ty_to_str(t1),
self.ty_to_str(t2),
self.ty_to_str(t_glb));
match self.glb().tys(t1, t2) {
Err(e) => {
fail!("Unexpected error computing LUB: %?", e)
fail2!("Unexpected error computing LUB: %?", e)
}
Ok(t) => {
self.assert_eq(t, t_glb);
@ -281,7 +281,7 @@ impl Env {
match self.lub().tys(t1, t2) {
Err(_) => {}
Ok(t) => {
fail!("Unexpected success computing LUB: %?", self.ty_to_str(t))
fail2!("Unexpected success computing LUB: %?", self.ty_to_str(t))
}
}
}
@ -291,7 +291,7 @@ impl Env {
match self.glb().tys(t1, t2) {
Err(_) => {}
Ok(t) => {
fail!("Unexpected success computing GLB: %?", self.ty_to_str(t))
fail2!("Unexpected success computing GLB: %?", self.ty_to_str(t))
}
}
}

View File

@ -31,7 +31,7 @@ impl InferStr for ty::t {
impl InferStr for FnSig {
fn inf_str(&self, cx: &InferCtxt) -> ~str {
fmt!("(%s) -> %s",
format!("({}) -> {}",
self.inputs.map(|a| a.inf_str(cx)).connect(", "),
self.output.inf_str(cx))
}
@ -45,7 +45,7 @@ impl InferStr for ty::mt {
impl InferStr for ty::Region {
fn inf_str(&self, _cx: &InferCtxt) -> ~str {
fmt!("%?", *self)
format!("{:?}", *self)
}
}
@ -60,7 +60,7 @@ impl<V:InferStr> InferStr for Bound<V> {
impl<T:InferStr> InferStr for Bounds<T> {
fn inf_str(&self, cx: &InferCtxt) -> ~str {
fmt!("{%s <: %s}",
format!("\\{{} <: {}\\}",
self.lb.inf_str(cx),
self.ub.inf_str(cx))
}
@ -69,8 +69,8 @@ impl<T:InferStr> InferStr for Bounds<T> {
impl<V:Vid + ToStr,T:InferStr> InferStr for VarValue<V, T> {
fn inf_str(&self, cx: &InferCtxt) -> ~str {
match *self {
Redirect(ref vid) => fmt!("Redirect(%s)", vid.to_str()),
Root(ref pt, rk) => fmt!("Root(%s, %s)", pt.inf_str(cx),
Redirect(ref vid) => format!("Redirect({})", vid.to_str()),
Root(ref pt, rk) => format!("Root({}, {})", pt.inf_str(cx),
rk.to_str_radix(10u))
}
}

View File

@ -85,8 +85,8 @@ impl UnifyInferCtxtMethods for InferCtxt {
let var_val = match vb.vals.find(&vid_u) {
Some(&ref var_val) => (*var_val).clone(),
None => {
tcx.sess.bug(fmt!(
"failed lookup of vid `%u`", vid_u));
tcx.sess.bug(format!(
"failed lookup of vid `{}`", vid_u));
}
};
match var_val {
@ -116,7 +116,7 @@ impl UnifyInferCtxtMethods for InferCtxt {
* Sets the value for `vid` to `new_v`. `vid` MUST be a root node!
*/
debug!("Updating variable %s to %s",
debug2!("Updating variable {} to {}",
vid.to_str(), new_v.inf_str(self));
let vb = UnifyVid::appropriate_vals_and_bindings(self);
@ -134,8 +134,8 @@ impl UnifyInferCtxtMethods for InferCtxt {
// Rank optimization: if you don't know what it is, check
// out <http://en.wikipedia.org/wiki/Disjoint-set_data_structure>
debug!("unify(node_a(id=%?, rank=%?), \
node_b(id=%?, rank=%?))",
debug2!("unify(node_a(id={:?}, rank={:?}), \
node_b(id={:?}, rank={:?}))",
node_a.root, node_a.rank,
node_b.root, node_b.rank);

View File

@ -178,7 +178,7 @@ impl Repr for vtable_origin {
fn repr(&self, tcx: ty::ctxt) -> ~str {
match *self {
vtable_static(def_id, ref tys, ref vtable_res) => {
fmt!("vtable_static(%?:%s, %s, %s)",
format!("vtable_static({:?}:{}, {}, {})",
def_id,
ty::item_path_str(tcx, def_id),
tys.repr(tcx),
@ -186,7 +186,7 @@ impl Repr for vtable_origin {
}
vtable_param(x, y) => {
fmt!("vtable_param(%?, %?)", x, y)
format!("vtable_param({:?}, {:?})", x, y)
}
}
}
@ -208,7 +208,7 @@ pub struct impl_res {
impl Repr for impl_res {
fn repr(&self, tcx: ty::ctxt) -> ~str {
fmt!("impl_res {trait_vtables=%s, self_vtables=%s}",
format!("impl_res \\{trait_vtables={}, self_vtables={}\\}",
self.trait_vtables.repr(tcx),
self.self_vtables.repr(tcx))
}
@ -226,7 +226,7 @@ pub struct CrateCtxt {
// Functions that write types into the node type table
pub fn write_ty_to_tcx(tcx: ty::ctxt, node_id: ast::NodeId, ty: ty::t) {
debug!("write_ty_to_tcx(%d, %s)", node_id, ppaux::ty_to_str(tcx, ty));
debug2!("write_ty_to_tcx({}, {})", node_id, ppaux::ty_to_str(tcx, ty));
assert!(!ty::type_needs_infer(ty));
tcx.node_types.insert(node_id as uint, ty);
}
@ -234,7 +234,7 @@ pub fn write_substs_to_tcx(tcx: ty::ctxt,
node_id: ast::NodeId,
substs: ~[ty::t]) {
if substs.len() > 0u {
debug!("write_substs_to_tcx(%d, %?)", node_id,
debug2!("write_substs_to_tcx({}, {:?})", node_id,
substs.map(|t| ppaux::ty_to_str(tcx, *t)));
assert!(substs.iter().all(|t| !ty::type_needs_infer(*t)));
tcx.node_type_substs.insert(node_id, substs);
@ -361,12 +361,12 @@ fn check_main_fn_ty(ccx: &CrateCtxt,
});
require_same_types(tcx, None, false, main_span, main_t, se_ty,
|| fmt!("main function expects type: `%s`",
|| format!("main function expects type: `{}`",
ppaux::ty_to_str(ccx.tcx, se_ty)));
}
_ => {
tcx.sess.span_bug(main_span,
fmt!("main has a non-function type: found `%s`",
format!("main has a non-function type: found `{}`",
ppaux::ty_to_str(tcx, main_t)));
}
}
@ -409,12 +409,12 @@ fn check_start_fn_ty(ccx: &CrateCtxt,
});
require_same_types(tcx, None, false, start_span, start_t, se_ty,
|| fmt!("start function expects type: `%s`", ppaux::ty_to_str(ccx.tcx, se_ty)));
|| format!("start function expects type: `{}`", ppaux::ty_to_str(ccx.tcx, se_ty)));
}
_ => {
tcx.sess.span_bug(start_span,
fmt!("start has a non-function type: found `%s`",
format!("start has a non-function type: found `{}`",
ppaux::ty_to_str(tcx, start_t)));
}
}

View File

@ -235,7 +235,7 @@ impl RegionScope for TypeRscope {
None => {
// if the self region is used, region parameterization should
// have inferred that this type is RP
fail!("region parameterization should have inferred that \
fail2!("region parameterization should have inferred that \
this type is RP");
}
Some(ref region_parameterization) => {

View File

@ -135,7 +135,7 @@ pub fn version(argv0: &str) {
}
pub fn usage(argv0: &str) {
let message = fmt!("Usage: %s [OPTIONS] INPUT", argv0);
let message = format!("Usage: {} [OPTIONS] INPUT", argv0);
println!("{}\n\
Additional help:
-W help Print 'lint' options and default settings
@ -388,7 +388,7 @@ pub fn monitor(f: ~fn(@diagnostic::Emitter)) {
}
}
// Fail so the process returns a failure code
fail!();
fail2!();
}
}
}

View File

@ -29,9 +29,9 @@ pub fn time<T, U>(do_it: bool, what: ~str, u: U, f: &fn(U) -> T) -> T {
pub fn indent<R>(op: &fn() -> R) -> R {
// Use in conjunction with the log post-processor like `src/etc/indenter`
// to make debug output more readable.
debug!(">>");
debug2!(">>");
let r = op();
debug!("<< (Result = %?)", r);
debug2!("<< (Result = {:?})", r);
r
}
@ -40,7 +40,7 @@ pub struct _indenter {
}
impl Drop for _indenter {
fn drop(&mut self) { debug!("<<"); }
fn drop(&mut self) { debug2!("<<"); }
}
pub fn _indenter(_i: ()) -> _indenter {
@ -50,7 +50,7 @@ pub fn _indenter(_i: ()) -> _indenter {
}
pub fn indenter() -> _indenter {
debug!(">>");
debug2!(">>");
_indenter(())
}
@ -120,7 +120,7 @@ pub fn local_rhs_span(l: @ast::Local, def: Span) -> Span {
pub fn pluralize(n: uint, s: ~str) -> ~str {
if n == 1 { s }
else { fmt!("%ss", s) }
else { format!("{}s", s) }
}
// A set of node IDs (used to keep track of which node IDs are for statements)

View File

@ -51,11 +51,11 @@ pub fn note_and_explain_region(cx: ctxt,
(ref str, Some(span)) => {
cx.sess.span_note(
span,
fmt!("%s%s%s", prefix, (*str), suffix));
format!("{}{}{}", prefix, (*str), suffix));
}
(ref str, None) => {
cx.sess.note(
fmt!("%s%s%s", prefix, (*str), suffix));
format!("{}{}{}", prefix, (*str), suffix));
}
}
}
@ -98,7 +98,7 @@ pub fn explain_region_and_span(cx: ctxt, region: ty::Region)
}
Some(_) | None => {
// this really should not happen
(fmt!("unknown scope: %d. Please report a bug.", node_id),
(format!("unknown scope: {}. Please report a bug.", node_id),
None)
}
}
@ -106,21 +106,21 @@ pub fn explain_region_and_span(cx: ctxt, region: ty::Region)
re_free(ref fr) => {
let prefix = match fr.bound_region {
br_anon(idx) => fmt!("the anonymous lifetime #%u defined on",
br_anon(idx) => format!("the anonymous lifetime \\#{} defined on",
idx + 1),
br_fresh(_) => fmt!("an anonymous lifetime defined on"),
_ => fmt!("the lifetime %s as defined on",
br_fresh(_) => format!("an anonymous lifetime defined on"),
_ => format!("the lifetime {} as defined on",
bound_region_ptr_to_str(cx, fr.bound_region))
};
match cx.items.find(&fr.scope_id) {
Some(&ast_map::node_block(ref blk)) => {
let (msg, opt_span) = explain_span(cx, "block", blk.span);
(fmt!("%s %s", prefix, msg), opt_span)
(format!("{} {}", prefix, msg), opt_span)
}
Some(_) | None => {
// this really should not happen
(fmt!("%s node %d", prefix, fr.scope_id), None)
(format!("{} node {}", prefix, fr.scope_id), None)
}
}
}
@ -132,7 +132,7 @@ pub fn explain_region_and_span(cx: ctxt, region: ty::Region)
// I believe these cases should not occur (except when debugging,
// perhaps)
re_infer(_) | re_bound(_) => {
(fmt!("lifetime %?", region), None)
(format!("lifetime {:?}", region), None)
}
};
@ -140,7 +140,7 @@ pub fn explain_region_and_span(cx: ctxt, region: ty::Region)
-> (~str, Option<Span>)
{
let lo = cx.sess.codemap.lookup_char_pos_adj(span.lo);
(fmt!("the %s at %u:%u", heading,
(format!("the {} at {}:{}", heading,
lo.line, lo.col.to_uint()), Some(span))
}
}
@ -154,11 +154,11 @@ pub fn bound_region_to_str(cx: ctxt,
br: bound_region) -> ~str {
let space_str = if space { " " } else { "" };
if cx.sess.verbose() { return fmt!("%s%?%s", prefix, br, space_str); }
if cx.sess.verbose() { return format!("{}{:?}{}", prefix, br, space_str); }
match br {
br_named(id) => fmt!("%s'%s%s", prefix, cx.sess.str_of(id), space_str),
br_self => fmt!("%s'self%s", prefix, space_str),
br_named(id) => format!("{}'{}{}", prefix, cx.sess.str_of(id), space_str),
br_self => format!("{}'self{}", prefix, space_str),
br_anon(_) => prefix.to_str(),
br_fresh(_) => prefix.to_str(),
br_cap_avoid(_, br) => bound_region_to_str(cx, prefix, space, *br)
@ -168,37 +168,37 @@ pub fn bound_region_to_str(cx: ctxt,
pub fn re_scope_id_to_str(cx: ctxt, node_id: ast::NodeId) -> ~str {
match cx.items.find(&node_id) {
Some(&ast_map::node_block(ref blk)) => {
fmt!("<block at %s>",
format!("<block at {}>",
cx.sess.codemap.span_to_str(blk.span))
}
Some(&ast_map::node_expr(expr)) => {
match expr.node {
ast::ExprCall(*) => {
fmt!("<call at %s>",
format!("<call at {}>",
cx.sess.codemap.span_to_str(expr.span))
}
ast::ExprMatch(*) => {
fmt!("<match at %s>",
format!("<match at {}>",
cx.sess.codemap.span_to_str(expr.span))
}
ast::ExprAssignOp(*) |
ast::ExprUnary(*) |
ast::ExprBinary(*) |
ast::ExprIndex(*) => {
fmt!("<method at %s>",
format!("<method at {}>",
cx.sess.codemap.span_to_str(expr.span))
}
_ => {
fmt!("<expression at %s>",
format!("<expression at {}>",
cx.sess.codemap.span_to_str(expr.span))
}
}
}
None => {
fmt!("<unknown-%d>", node_id)
format!("<unknown-{}>", node_id)
}
_ => { cx.sess.bug(
fmt!("re_scope refers to %s",
format!("re_scope refers to {}",
ast_map::node_id_to_str(cx.items, node_id,
token::get_ident_interner()))) }
}
@ -215,7 +215,7 @@ pub fn region_to_str(cx: ctxt, prefix: &str, space: bool, region: Region) -> ~st
let space_str = if space { " " } else { "" };
if cx.sess.verbose() {
return fmt!("%s%?%s", prefix, region, space_str);
return format!("{}{:?}{}", prefix, region, space_str);
}
// These printouts are concise. They do not contain all the information
@ -230,8 +230,8 @@ pub fn region_to_str(cx: ctxt, prefix: &str, space: bool, region: Region) -> ~st
bound_region_to_str(cx, prefix, space, br)
}
re_infer(ReVar(_)) => prefix.to_str(),
re_static => fmt!("%s'static%s", prefix, space_str),
re_empty => fmt!("%s'<empty>%s", prefix, space_str)
re_static => format!("{}'static{}", prefix, space_str),
re_empty => format!("{}'<empty>{}", prefix, space_str)
}
}
@ -248,12 +248,12 @@ pub fn mt_to_str(cx: ctxt, m: &mt) -> ~str {
pub fn mt_to_str_wrapped(cx: ctxt, before: &str, m: &mt, after: &str) -> ~str {
let mstr = mutability_to_str(m.mutbl);
return fmt!("%s%s%s%s", mstr, before, ty_to_str(cx, m.ty), after);
return format!("{}{}{}{}", mstr, before, ty_to_str(cx, m.ty), after);
}
pub fn vstore_to_str(cx: ctxt, vs: ty::vstore) -> ~str {
match vs {
ty::vstore_fixed(n) => fmt!("%u", n),
ty::vstore_fixed(n) => format!("{}", n),
ty::vstore_uniq => ~"~",
ty::vstore_box => ~"@",
ty::vstore_slice(r) => region_ptr_to_str(cx, r)
@ -271,17 +271,17 @@ pub fn trait_store_to_str(cx: ctxt, s: ty::TraitStore) -> ~str {
pub fn vstore_ty_to_str(cx: ctxt, mt: &mt, vs: ty::vstore) -> ~str {
match vs {
ty::vstore_fixed(_) => {
fmt!("[%s, .. %s]", mt_to_str(cx, mt), vstore_to_str(cx, vs))
format!("[{}, .. {}]", mt_to_str(cx, mt), vstore_to_str(cx, vs))
}
_ => {
fmt!("%s%s", vstore_to_str(cx, vs), mt_to_str_wrapped(cx, "[", mt, "]"))
format!("{}{}", vstore_to_str(cx, vs), mt_to_str_wrapped(cx, "[", mt, "]"))
}
}
}
pub fn vec_map_to_str<T>(ts: &[T], f: &fn(t: &T) -> ~str) -> ~str {
let tstrs = ts.map(f);
fmt!("[%s]", tstrs.connect(", "))
format!("[{}]", tstrs.connect(", "))
}
pub fn tys_to_str(cx: ctxt, ts: &[t]) -> ~str {
@ -289,7 +289,7 @@ pub fn tys_to_str(cx: ctxt, ts: &[t]) -> ~str {
}
pub fn fn_sig_to_str(cx: ctxt, typ: &ty::FnSig) -> ~str {
fmt!("fn%s -> %s",
format!("fn{} -> {}",
tys_to_str(cx, typ.inputs.map(|a| *a)),
ty_to_str(cx, typ.output))
}
@ -397,7 +397,7 @@ pub fn ty_to_str(cx: ctxt, typ: t) -> ~str {
&m.fty.sig) + ";"
}
fn field_to_str(cx: ctxt, f: field) -> ~str {
return fmt!("%s: %s", cx.sess.str_of(f.ident), mt_to_str(cx, &f.mt));
return format!("{}: {}", cx.sess.str_of(f.ident), mt_to_str(cx, &f.mt));
}
// if there is an id, print that instead of the structural type:
@ -425,7 +425,7 @@ pub fn ty_to_str(cx: ctxt, typ: t) -> ~str {
ty_rptr(r, ref tm) => {
region_ptr_to_str(cx, r) + mt_to_str(cx, tm)
}
ty_unboxed_vec(ref tm) => { fmt!("unboxed_vec<%s>", mt_to_str(cx, tm)) }
ty_unboxed_vec(ref tm) => { format!("unboxed_vec<{}>", mt_to_str(cx, tm)) }
ty_type => ~"type",
ty_tup(ref elems) => {
let strs = elems.map(|elem| ty_to_str(cx, *elem));
@ -447,10 +447,10 @@ pub fn ty_to_str(cx: ctxt, typ: t) -> ~str {
}
None => {
// This should not happen...
fmt!("BUG[%?]", id)
format!("BUG[{:?}]", id)
}
};
if !cx.sess.verbose() { ident } else { fmt!("%s:%?", ident, did) }
if !cx.sess.verbose() { ident } else { format!("{}:{:?}", ident, did) }
}
ty_self(*) => ~"Self",
ty_enum(did, ref substs) | ty_struct(did, ref substs) => {
@ -464,13 +464,13 @@ pub fn ty_to_str(cx: ctxt, typ: t) -> ~str {
let ty = parameterized(cx, base, &substs.regions, substs.tps);
let bound_sep = if bounds.is_empty() { "" } else { ":" };
let bound_str = bounds.repr(cx);
fmt!("%s%s%s%s%s", trait_store_to_str(cx, s), mutability_to_str(mutbl), ty,
format!("{}{}{}{}{}", trait_store_to_str(cx, s), mutability_to_str(mutbl), ty,
bound_sep, bound_str)
}
ty_evec(ref mt, vs) => {
vstore_ty_to_str(cx, mt, vs)
}
ty_estr(vs) => fmt!("%s%s", vstore_to_str(cx, vs), "str"),
ty_estr(vs) => format!("{}{}", vstore_to_str(cx, vs), "str"),
ty_opaque_box => ~"@?",
ty_opaque_closure_ptr(ast::BorrowedSigil) => ~"&closure",
ty_opaque_closure_ptr(ast::ManagedSigil) => ~"@closure",
@ -498,9 +498,9 @@ pub fn parameterized(cx: ctxt,
}
if strs.len() > 0u {
fmt!("%s<%s>", base, strs.connect(","))
format!("{}<{}>", base, strs.connect(","))
} else {
fmt!("%s", base)
format!("{}", base)
}
}
@ -514,7 +514,7 @@ impl<T:Repr> Repr for Option<T> {
fn repr(&self, tcx: ctxt) -> ~str {
match self {
&None => ~"None",
&Some(ref t) => fmt!("Some(%s)", t.repr(tcx))
&Some(ref t) => format!("Some({})", t.repr(tcx))
}
}
}
@ -560,7 +560,7 @@ impl<T:Repr> Repr for ~[T] {
impl Repr for ty::TypeParameterDef {
fn repr(&self, tcx: ctxt) -> ~str {
fmt!("TypeParameterDef {%?, bounds: %s}",
format!("TypeParameterDef \\{{:?}, bounds: {}\\}",
self.def_id, self.bounds.repr(tcx))
}
}
@ -573,7 +573,7 @@ impl Repr for ty::t {
impl Repr for ty::substs {
fn repr(&self, tcx: ctxt) -> ~str {
fmt!("substs(regions=%s, self_ty=%s, tps=%s)",
format!("substs(regions={}, self_ty={}, tps={})",
self.regions.repr(tcx),
self.self_ty.repr(tcx),
self.tps.repr(tcx))
@ -615,7 +615,7 @@ impl Repr for ty::TraitRef {
impl Repr for ast::Expr {
fn repr(&self, tcx: ctxt) -> ~str {
fmt!("expr(%d: %s)",
format!("expr({}: {})",
self.id,
pprust::expr_to_str(self, tcx.sess.intr()))
}
@ -623,7 +623,7 @@ impl Repr for ast::Expr {
impl Repr for ast::Pat {
fn repr(&self, tcx: ctxt) -> ~str {
fmt!("pat(%d: %s)",
format!("pat({}: {})",
self.id,
pprust::pat_to_str(self, tcx.sess.intr()))
}
@ -654,18 +654,18 @@ impl Repr for ast::DefId {
Some(&ast_map::node_trait_method(*)) |
Some(&ast_map::node_variant(*)) |
Some(&ast_map::node_struct_ctor(*)) => {
return fmt!("%?:%s", *self, ty::item_path_str(tcx, *self));
return format!("{:?}:{}", *self, ty::item_path_str(tcx, *self));
}
_ => {}
}
}
return fmt!("%?", *self);
return format!("{:?}", *self);
}
}
impl Repr for ty::ty_param_bounds_and_ty {
fn repr(&self, tcx: ctxt) -> ~str {
fmt!("ty_param_bounds_and_ty {generics: %s, ty: %s}",
format!("ty_param_bounds_and_ty \\{generics: {}, ty: {}\\}",
self.generics.repr(tcx),
self.ty.repr(tcx))
}
@ -673,7 +673,7 @@ impl Repr for ty::ty_param_bounds_and_ty {
impl Repr for ty::Generics {
fn repr(&self, tcx: ctxt) -> ~str {
fmt!("Generics {type_param_defs: %s, region_param: %?}",
format!("Generics \\{type_param_defs: {}, region_param: {:?}\\}",
self.type_param_defs.repr(tcx),
self.region_param)
}
@ -681,8 +681,8 @@ impl Repr for ty::Generics {
impl Repr for ty::Method {
fn repr(&self, tcx: ctxt) -> ~str {
fmt!("method {ident: %s, generics: %s, transformed_self_ty: %s, \
fty: %s, explicit_self: %s, vis: %s, def_id: %s}",
format!("method \\{ident: {}, generics: {}, transformed_self_ty: {}, \
fty: {}, explicit_self: {}, vis: {}, def_id: {}\\}",
self.ident.repr(tcx),
self.generics.repr(tcx),
self.transformed_self_ty.repr(tcx),
@ -701,19 +701,19 @@ impl Repr for ast::Ident {
impl Repr for ast::explicit_self_ {
fn repr(&self, _tcx: ctxt) -> ~str {
fmt!("%?", *self)
format!("{:?}", *self)
}
}
impl Repr for ast::visibility {
fn repr(&self, _tcx: ctxt) -> ~str {
fmt!("%?", *self)
format!("{:?}", *self)
}
}
impl Repr for ty::BareFnTy {
fn repr(&self, tcx: ctxt) -> ~str {
fmt!("BareFnTy {purity: %?, abis: %s, sig: %s}",
format!("BareFnTy \\{purity: {:?}, abis: {}, sig: {}\\}",
self.purity,
self.abis.to_str(),
self.sig.repr(tcx))
@ -728,9 +728,9 @@ impl Repr for ty::FnSig {
impl Repr for typeck::method_map_entry {
fn repr(&self, tcx: ctxt) -> ~str {
fmt!("method_map_entry {self_arg: %s, \
explicit_self: %s, \
origin: %s}",
format!("method_map_entry \\{self_arg: {}, \
explicit_self: {}, \
origin: {}\\}",
self.self_ty.repr(tcx),
self.explicit_self.repr(tcx),
self.origin.repr(tcx))
@ -741,7 +741,7 @@ impl Repr for typeck::method_origin {
fn repr(&self, tcx: ctxt) -> ~str {
match self {
&typeck::method_static(def_id) => {
fmt!("method_static(%s)", def_id.repr(tcx))
format!("method_static({})", def_id.repr(tcx))
}
&typeck::method_param(ref p) => {
p.repr(tcx)
@ -755,7 +755,7 @@ impl Repr for typeck::method_origin {
impl Repr for typeck::method_param {
fn repr(&self, tcx: ctxt) -> ~str {
fmt!("method_param(%s,%?,%?,%?)",
format!("method_param({},{:?},{:?},{:?})",
self.trait_id.repr(tcx),
self.method_num,
self.param_num,
@ -765,7 +765,7 @@ impl Repr for typeck::method_param {
impl Repr for typeck::method_object {
fn repr(&self, tcx: ctxt) -> ~str {
fmt!("method_object(%s,%?,%?)",
format!("method_object({},{:?},{:?})",
self.trait_id.repr(tcx),
self.method_num,
self.real_index)
@ -775,7 +775,7 @@ impl Repr for typeck::method_object {
impl Repr for ty::RegionVid {
fn repr(&self, _tcx: ctxt) -> ~str {
fmt!("%?", *self)
format!("{:?}", *self)
}
}
@ -784,7 +784,7 @@ impl Repr for ty::TraitStore {
match self {
&ty::BoxTraitStore => ~"@Trait",
&ty::UniqTraitStore => ~"~Trait",
&ty::RegionTraitStore(r) => fmt!("&%s Trait", r.repr(tcx))
&ty::RegionTraitStore(r) => format!("&{} Trait", r.repr(tcx))
}
}
}
@ -807,7 +807,7 @@ impl Repr for ast_map::path_elt {
impl Repr for ty::BuiltinBound {
fn repr(&self, _tcx: ctxt) -> ~str {
fmt!("%?", *self)
format!("{:?}", *self)
}
}