syntax: Remove usage of fmt!

This commit is contained in:
Alex Crichton 2013-09-27 21:01:58 -07:00
parent 7e709bfd0d
commit af3b132285
34 changed files with 342 additions and 342 deletions

View File

@ -505,7 +505,7 @@ impl<T:Freeze + Send> RWArc<T> {
let inner = x.unwrap(); let inner = x.unwrap();
let RWArcInner { failed: failed, data: data, _ } = inner; let RWArcInner { failed: failed, data: data, _ } = inner;
if failed { if failed {
fail2!(~"Can't unwrap poisoned RWArc - another task failed inside!") fail2!("Can't unwrap poisoned RWArc - another task failed inside!")
} }
data data
} }

View File

@ -125,7 +125,7 @@ pub fn test_main(args: &[~str], tests: ~[TestDescAndFn]) {
let opts = let opts =
match parse_opts(args) { match parse_opts(args) {
Some(Ok(o)) => o, Some(Ok(o)) => o,
Some(Err(msg)) => fail2!(msg), Some(Err(msg)) => fail2!("{}", msg),
None => return None => return
}; };
if !run_tests_console(&opts, tests) { fail2!("Some tests failed"); } if !run_tests_console(&opts, tests) { fail2!("Some tests failed"); }

View File

@ -221,7 +221,7 @@ impl AbiSet {
let data = abi.data(); let data = abi.data();
for other_abi in abis.slice(0, i).iter() { for other_abi in abis.slice(0, i).iter() {
let other_data = other_abi.data(); let other_data = other_abi.data();
debug!("abis=(%?,%?) datas=(%?,%?)", debug2!("abis=({:?},{:?}) datas=({:?},{:?})",
abi, data.abi_arch, abi, data.abi_arch,
other_abi, other_data.abi_arch); other_abi, other_data.abi_arch);
match (&data.abi_arch, &other_data.abi_arch) { match (&data.abi_arch, &other_data.abi_arch) {
@ -273,7 +273,7 @@ impl ToStr for AbiSet {
strs.push(abi.data().name); strs.push(abi.data().name);
true true
}; };
fmt!("\"%s\"", strs.connect(" ")) format!("\"{}\"", strs.connect(" "))
} }
} }
@ -306,7 +306,7 @@ fn cannot_combine(n: Abi, m: Abi) {
(m == a && n == b)); (m == a && n == b));
} }
None => { None => {
fail!("Invalid match not detected"); fail2!("Invalid match not detected");
} }
} }
} }
@ -318,7 +318,7 @@ fn can_combine(n: Abi, m: Abi) {
set.add(m); set.add(m);
match set.check_valid() { match set.check_valid() {
Some((_, _)) => { Some((_, _)) => {
fail!("Valid match declared invalid"); fail2!("Valid match declared invalid");
} }
None => {} None => {}
} }
@ -367,7 +367,7 @@ fn abi_to_str_c_aaps() {
let mut set = AbiSet::empty(); let mut set = AbiSet::empty();
set.add(Aapcs); set.add(Aapcs);
set.add(C); set.add(C);
debug!("set = %s", set.to_str()); debug2!("set = {}", set.to_str());
assert!(set.to_str() == ~"\"aapcs C\""); assert!(set.to_str() == ~"\"aapcs C\"");
} }
@ -375,7 +375,7 @@ fn abi_to_str_c_aaps() {
fn abi_to_str_rust() { fn abi_to_str_rust() {
let mut set = AbiSet::empty(); let mut set = AbiSet::empty();
set.add(Rust); set.add(Rust);
debug!("set = %s", set.to_str()); debug2!("set = {}", set.to_str());
assert!(set.to_str() == ~"\"Rust\""); assert!(set.to_str() == ~"\"Rust\"");
} }

View File

@ -47,8 +47,8 @@ impl Eq for Ident {
// if it should be non-hygienic (most things are), just compare the // if it should be non-hygienic (most things are), just compare the
// 'name' fields of the idents. Or, even better, replace the idents // 'name' fields of the idents. Or, even better, replace the idents
// with Name's. // with Name's.
fail!(fmt!("not allowed to compare these idents: %?, %?. Probably \ fail2!("not allowed to compare these idents: {:?}, {:?}.
related to issue #6993", self, other)); Probably related to issue \\#6993", self, other);
} }
} }
fn ne(&self, other: &Ident) -> bool { fn ne(&self, other: &Ident) -> bool {

View File

@ -66,7 +66,7 @@ pub fn path_ident_to_str(p: &path, i: Ident, itr: @ident_interner) -> ~str {
if p.is_empty() { if p.is_empty() {
itr.get(i.name).to_owned() itr.get(i.name).to_owned()
} else { } else {
fmt!("%s::%s", path_to_str(*p, itr), itr.get(i.name)) format!("{}::{}", path_to_str(*p, itr), itr.get(i.name))
} }
} }
@ -96,7 +96,7 @@ pub fn impl_pretty_name(trait_ref: &Option<trait_ref>,
// XXX: this dollar sign is actually a relic of being one of the // XXX: this dollar sign is actually a relic of being one of the
// very few valid symbol names on unix. These kinds of // very few valid symbol names on unix. These kinds of
// details shouldn't be exposed way up here in the ast. // details shouldn't be exposed way up here in the ast.
let s = fmt!("%s$%s", let s = format!("{}${}",
itr.get(trait_ref.path.segments.last().identifier.name), itr.get(trait_ref.path.segments.last().identifier.name),
itr.get(ty_ident.name)); itr.get(ty_ident.name));
path_pretty_name(Ident::new(itr.gensym(s)), hash) path_pretty_name(Ident::new(itr.gensym(s)), hash)
@ -185,7 +185,7 @@ impl Ctx {
item, item,
p)); p));
} }
_ => fail!("struct def parent wasn't an item") _ => fail2!("struct def parent wasn't an item")
} }
} }
} }
@ -426,7 +426,7 @@ pub fn map_decoded_item(diag: @mut span_handler,
pub fn node_id_to_str(map: map, id: NodeId, itr: @ident_interner) -> ~str { pub fn node_id_to_str(map: map, id: NodeId, itr: @ident_interner) -> ~str {
match map.find(&id) { match map.find(&id) {
None => { None => {
fmt!("unknown node (id=%d)", id) format!("unknown node (id={})", id)
} }
Some(&node_item(item, path)) => { Some(&node_item(item, path)) => {
let path_str = path_ident_to_str(path, item.ident, itr); let path_str = path_ident_to_str(path, item.ident, itr);
@ -442,46 +442,46 @@ pub fn node_id_to_str(map: map, id: NodeId, itr: @ident_interner) -> ~str {
item_impl(*) => ~"impl", item_impl(*) => ~"impl",
item_mac(*) => ~"macro" item_mac(*) => ~"macro"
}; };
fmt!("%s %s (id=%?)", item_str, path_str, id) format!("{} {} (id={})", item_str, path_str, id)
} }
Some(&node_foreign_item(item, abi, _, path)) => { Some(&node_foreign_item(item, abi, _, path)) => {
fmt!("foreign item %s with abi %? (id=%?)", format!("foreign item {} with abi {:?} (id={})",
path_ident_to_str(path, item.ident, itr), abi, id) path_ident_to_str(path, item.ident, itr), abi, id)
} }
Some(&node_method(m, _, path)) => { Some(&node_method(m, _, path)) => {
fmt!("method %s in %s (id=%?)", format!("method {} in {} (id={})",
itr.get(m.ident.name), path_to_str(*path, itr), id) itr.get(m.ident.name), path_to_str(*path, itr), id)
} }
Some(&node_trait_method(ref tm, _, path)) => { Some(&node_trait_method(ref tm, _, path)) => {
let m = ast_util::trait_method_to_ty_method(&**tm); let m = ast_util::trait_method_to_ty_method(&**tm);
fmt!("method %s in %s (id=%?)", format!("method {} in {} (id={})",
itr.get(m.ident.name), path_to_str(*path, itr), id) itr.get(m.ident.name), path_to_str(*path, itr), id)
} }
Some(&node_variant(ref variant, _, path)) => { Some(&node_variant(ref variant, _, path)) => {
fmt!("variant %s in %s (id=%?)", format!("variant {} in {} (id={})",
itr.get(variant.node.name.name), path_to_str(*path, itr), id) itr.get(variant.node.name.name), path_to_str(*path, itr), id)
} }
Some(&node_expr(expr)) => { Some(&node_expr(expr)) => {
fmt!("expr %s (id=%?)", pprust::expr_to_str(expr, itr), id) format!("expr {} (id={})", pprust::expr_to_str(expr, itr), id)
} }
Some(&node_callee_scope(expr)) => { Some(&node_callee_scope(expr)) => {
fmt!("callee_scope %s (id=%?)", pprust::expr_to_str(expr, itr), id) format!("callee_scope {} (id={})", pprust::expr_to_str(expr, itr), id)
} }
Some(&node_stmt(stmt)) => { Some(&node_stmt(stmt)) => {
fmt!("stmt %s (id=%?)", format!("stmt {} (id={})",
pprust::stmt_to_str(stmt, itr), id) pprust::stmt_to_str(stmt, itr), id)
} }
Some(&node_arg(pat)) => { Some(&node_arg(pat)) => {
fmt!("arg %s (id=%?)", pprust::pat_to_str(pat, itr), id) format!("arg {} (id={})", pprust::pat_to_str(pat, itr), id)
} }
Some(&node_local(ident)) => { Some(&node_local(ident)) => {
fmt!("local (id=%?, name=%s)", id, itr.get(ident.name)) format!("local (id={}, name={})", id, itr.get(ident.name))
} }
Some(&node_block(ref block)) => { Some(&node_block(ref block)) => {
fmt!("block %s (id=%?)", pprust::block_to_str(block, itr), id) format!("block {} (id={})", pprust::block_to_str(block, itr), id)
} }
Some(&node_struct_ctor(_, _, path)) => { Some(&node_struct_ctor(_, _, path)) => {
fmt!("struct_ctor %s (id=%?)", path_to_str(*path, itr), id) format!("struct_ctor {} (id={})", path_to_str(*path, itr), id)
} }
} }
} }
@ -491,6 +491,6 @@ pub fn node_item_query<Result>(items: map, id: NodeId,
error_msg: ~str) -> Result { error_msg: ~str) -> Result {
match items.find(&id) { match items.find(&id) {
Some(&node_item(it, _)) => query(it), Some(&node_item(it, _)) => query(it),
_ => fail!(error_msg) _ => fail2!("{}", error_msg)
} }
} }

View File

@ -45,7 +45,7 @@ pub fn stmt_id(s: &Stmt) -> NodeId {
StmtDecl(_, id) => id, StmtDecl(_, id) => id,
StmtExpr(_, id) => id, StmtExpr(_, id) => id,
StmtSemi(_, id) => id, StmtSemi(_, id) => id,
StmtMac(*) => fail!("attempted to analyze unexpanded stmt") StmtMac(*) => fail2!("attempted to analyze unexpanded stmt")
} }
} }
@ -72,7 +72,7 @@ pub fn def_id_of_def(d: Def) -> DefId {
local_def(id) local_def(id)
} }
DefPrimTy(_) => fail!() DefPrimTy(_) => fail2!()
} }
} }
@ -756,7 +756,7 @@ pub fn new_mark_internal(m:Mrk, tail:SyntaxContext,table:&mut SCTable)
} }
true => { true => {
match table.mark_memo.find(&key) { match table.mark_memo.find(&key) {
None => fail!(~"internal error: key disappeared 2013042901"), None => fail2!("internal error: key disappeared 2013042901"),
Some(idxptr) => {*idxptr} Some(idxptr) => {*idxptr}
} }
} }
@ -783,7 +783,7 @@ pub fn new_rename_internal(id:Ident, to:Name, tail:SyntaxContext, table: &mut SC
} }
true => { true => {
match table.rename_memo.find(&key) { match table.rename_memo.find(&key) {
None => fail!(~"internal error: key disappeared 2013042902"), None => fail2!("internal error: key disappeared 2013042902"),
Some(idxptr) => {*idxptr} Some(idxptr) => {*idxptr}
} }
} }
@ -816,9 +816,9 @@ pub fn get_sctable() -> @mut SCTable {
/// print out an SCTable for debugging /// print out an SCTable for debugging
pub fn display_sctable(table : &SCTable) { pub fn display_sctable(table : &SCTable) {
error!("SC table:"); error2!("SC table:");
for (idx,val) in table.table.iter().enumerate() { for (idx,val) in table.table.iter().enumerate() {
error!("%4u : %?",idx,val); error2!("{:4u} : {:?}",idx,val);
} }
} }
@ -880,7 +880,7 @@ pub fn resolve_internal(id : Ident,
resolvedthis resolvedthis
} }
} }
IllegalCtxt() => fail!(~"expected resolvable context, got IllegalCtxt") IllegalCtxt() => fail2!("expected resolvable context, got IllegalCtxt")
} }
}; };
resolve_table.insert(key,resolved); resolve_table.insert(key,resolved);
@ -921,7 +921,7 @@ pub fn marksof(ctxt: SyntaxContext, stopname: Name, table: &SCTable) -> ~[Mrk] {
loopvar = tl; loopvar = tl;
} }
} }
IllegalCtxt => fail!(~"expected resolvable context, got IllegalCtxt") IllegalCtxt => fail2!("expected resolvable context, got IllegalCtxt")
} }
} }
} }
@ -932,7 +932,7 @@ pub fn mtwt_outer_mark(ctxt: SyntaxContext) -> Mrk {
let sctable = get_sctable(); let sctable = get_sctable();
match sctable.table[ctxt] { match sctable.table[ctxt] {
ast::Mark(mrk,_) => mrk, ast::Mark(mrk,_) => mrk,
_ => fail!("can't retrieve outer mark when outside is not a mark") _ => fail2!("can't retrieve outer mark when outside is not a mark")
} }
} }
@ -1064,7 +1064,7 @@ mod test {
sc = tail; sc = tail;
loop; loop;
} }
IllegalCtxt => fail!("expected resolvable context, got IllegalCtxt") IllegalCtxt => fail2!("expected resolvable context, got IllegalCtxt")
} }
} }
} }

View File

@ -168,17 +168,17 @@ pub fn mk_sugared_doc_attr(text: @str, lo: BytePos, hi: BytePos) -> Attribute {
/// span included in the `==` comparison a plain MetaItem. /// span included in the `==` comparison a plain MetaItem.
pub fn contains(haystack: &[@ast::MetaItem], pub fn contains(haystack: &[@ast::MetaItem],
needle: @ast::MetaItem) -> bool { needle: @ast::MetaItem) -> bool {
debug!("attr::contains (name=%s)", needle.name()); debug2!("attr::contains (name={})", needle.name());
do haystack.iter().any |item| { do haystack.iter().any |item| {
debug!(" testing: %s", item.name()); debug2!(" testing: {}", item.name());
item.node == needle.node item.node == needle.node
} }
} }
pub fn contains_name<AM: AttrMetaMethods>(metas: &[AM], name: &str) -> bool { pub fn contains_name<AM: AttrMetaMethods>(metas: &[AM], name: &str) -> bool {
debug!("attr::contains_name (name=%s)", name); debug2!("attr::contains_name (name={})", name);
do metas.iter().any |item| { do metas.iter().any |item| {
debug!(" testing: %s", item.name()); debug2!(" testing: {}", item.name());
name == item.name() name == item.name()
} }
} }
@ -279,23 +279,23 @@ pub fn test_cfg<AM: AttrMetaMethods, It: Iterator<AM>>
// this would be much nicer as a chain of iterator adaptors, but // this would be much nicer as a chain of iterator adaptors, but
// this doesn't work. // this doesn't work.
let some_cfg_matches = do metas.any |mi| { let some_cfg_matches = do metas.any |mi| {
debug!("testing name: %s", mi.name()); debug2!("testing name: {}", mi.name());
if "cfg" == mi.name() { // it is a #[cfg()] attribute if "cfg" == mi.name() { // it is a #[cfg()] attribute
debug!("is cfg"); debug2!("is cfg");
no_cfgs = false; no_cfgs = false;
// only #[cfg(...)] ones are understood. // only #[cfg(...)] ones are understood.
match mi.meta_item_list() { match mi.meta_item_list() {
Some(cfg_meta) => { Some(cfg_meta) => {
debug!("is cfg(...)"); debug2!("is cfg(...)");
do cfg_meta.iter().all |cfg_mi| { do cfg_meta.iter().all |cfg_mi| {
debug!("cfg(%s[...])", cfg_mi.name()); debug2!("cfg({}[...])", cfg_mi.name());
match cfg_mi.node { match cfg_mi.node {
ast::MetaList(s, ref not_cfgs) if "not" == s => { ast::MetaList(s, ref not_cfgs) if "not" == s => {
debug!("not!"); debug2!("not!");
// inside #[cfg(not(...))], so these need to all // inside #[cfg(not(...))], so these need to all
// not match. // not match.
not_cfgs.iter().all(|mi| { not_cfgs.iter().all(|mi| {
debug!("cfg(not(%s[...]))", mi.name()); debug2!("cfg(not({}[...]))", mi.name());
!contains(cfg, *mi) !contains(cfg, *mi)
}) })
} }
@ -309,7 +309,7 @@ pub fn test_cfg<AM: AttrMetaMethods, It: Iterator<AM>>
false false
} }
}; };
debug!("test_cfg (no_cfgs=%?, some_cfg_matches=%?)", no_cfgs, some_cfg_matches); debug2!("test_cfg (no_cfgs={}, some_cfg_matches={})", no_cfgs, some_cfg_matches);
no_cfgs || some_cfg_matches no_cfgs || some_cfg_matches
} }
@ -359,7 +359,7 @@ pub fn require_unique_names(diagnostic: @mut span_handler,
if !set.insert(name) { if !set.insert(name) {
diagnostic.span_fatal(meta.span, diagnostic.span_fatal(meta.span,
fmt!("duplicate meta item `%s`", name)); format!("duplicate meta item `{}`", name));
} }
} }
} }

View File

@ -290,7 +290,7 @@ impl CodeMap {
pub fn mk_substr_filename(&self, sp: Span) -> ~str { pub fn mk_substr_filename(&self, sp: Span) -> ~str {
let pos = self.lookup_char_pos(sp.lo); let pos = self.lookup_char_pos(sp.lo);
return fmt!("<%s:%u:%u>", pos.file.name, return format!("<{}:{}:{}>", pos.file.name,
pos.line, pos.col.to_uint()); pos.line, pos.col.to_uint());
} }
@ -336,7 +336,7 @@ impl CodeMap {
let lo = self.lookup_char_pos_adj(sp.lo); let lo = self.lookup_char_pos_adj(sp.lo);
let hi = self.lookup_char_pos_adj(sp.hi); let hi = self.lookup_char_pos_adj(sp.hi);
return fmt!("%s:%u:%u: %u:%u", lo.filename, return format!("{}:{}:{}: {}:{}", lo.filename,
lo.line, lo.col.to_uint(), hi.line, hi.col.to_uint()) lo.line, lo.col.to_uint(), hi.line, hi.col.to_uint())
} }
@ -374,7 +374,7 @@ impl CodeMap {
for fm in self.files.iter() { if filename == fm.name { return *fm; } } for fm in self.files.iter() { if filename == fm.name { return *fm; } }
//XXjdm the following triggers a mismatched type bug //XXjdm the following triggers a mismatched type bug
// (or expected function, found _|_) // (or expected function, found _|_)
fail!(); // ("asking for " + filename + " which we don't know about"); fail2!(); // ("asking for " + filename + " which we don't know about");
} }
} }
@ -393,7 +393,7 @@ impl CodeMap {
} }
} }
if (a >= len) { if (a >= len) {
fail!("position %u does not resolve to a source location", pos.to_uint()) fail2!("position {} does not resolve to a source location", pos.to_uint())
} }
return a; return a;
@ -419,11 +419,11 @@ impl CodeMap {
let chpos = self.bytepos_to_local_charpos(pos); let chpos = self.bytepos_to_local_charpos(pos);
let linebpos = f.lines[a]; let linebpos = f.lines[a];
let linechpos = self.bytepos_to_local_charpos(linebpos); let linechpos = self.bytepos_to_local_charpos(linebpos);
debug!("codemap: byte pos %? is on the line at byte pos %?", debug2!("codemap: byte pos {:?} is on the line at byte pos {:?}",
pos, linebpos); pos, linebpos);
debug!("codemap: char pos %? is on the line at char pos %?", debug2!("codemap: char pos {:?} is on the line at char pos {:?}",
chpos, linechpos); chpos, linechpos);
debug!("codemap: byte is on line: %?", line); debug2!("codemap: byte is on line: {:?}", line);
assert!(chpos >= linechpos); assert!(chpos >= linechpos);
return Loc { return Loc {
file: f, file: f,
@ -435,7 +435,7 @@ impl CodeMap {
fn span_to_str_no_adj(&self, sp: Span) -> ~str { fn span_to_str_no_adj(&self, sp: Span) -> ~str {
let lo = self.lookup_char_pos(sp.lo); let lo = self.lookup_char_pos(sp.lo);
let hi = self.lookup_char_pos(sp.hi); let hi = self.lookup_char_pos(sp.hi);
return fmt!("%s:%u:%u: %u:%u", lo.file.name, return format!("{}:{}:{}: {}:{}", lo.file.name,
lo.line, lo.col.to_uint(), hi.line, hi.col.to_uint()) lo.line, lo.col.to_uint(), hi.line, hi.col.to_uint())
} }
@ -450,7 +450,7 @@ impl CodeMap {
// Converts an absolute BytePos to a CharPos relative to the file it is // Converts an absolute BytePos to a CharPos relative to the file it is
// located in // located in
fn bytepos_to_local_charpos(&self, bpos: BytePos) -> CharPos { fn bytepos_to_local_charpos(&self, bpos: BytePos) -> CharPos {
debug!("codemap: converting %? to char pos", bpos); debug2!("codemap: converting {:?} to char pos", bpos);
let idx = self.lookup_filemap_idx(bpos); let idx = self.lookup_filemap_idx(bpos);
let map = self.files[idx]; let map = self.files[idx];
@ -458,7 +458,7 @@ impl CodeMap {
let mut total_extra_bytes = 0; let mut total_extra_bytes = 0;
for mbc in map.multibyte_chars.iter() { for mbc in map.multibyte_chars.iter() {
debug!("codemap: %?-byte char at %?", mbc.bytes, mbc.pos); debug2!("codemap: {:?}-byte char at {:?}", mbc.bytes, mbc.pos);
if mbc.pos < bpos { if mbc.pos < bpos {
total_extra_bytes += mbc.bytes; total_extra_bytes += mbc.bytes;
// We should never see a byte position in the middle of a // We should never see a byte position in the middle of a

View File

@ -69,7 +69,7 @@ struct CodemapT {
impl span_handler for CodemapT { impl span_handler for CodemapT {
fn span_fatal(@mut self, sp: Span, msg: &str) -> ! { fn span_fatal(@mut self, sp: Span, msg: &str) -> ! {
self.handler.emit(Some((self.cm, sp)), msg, fatal); self.handler.emit(Some((self.cm, sp)), msg, fatal);
fail!(); fail2!();
} }
fn span_err(@mut self, sp: Span, msg: &str) { fn span_err(@mut self, sp: Span, msg: &str) {
self.handler.emit(Some((self.cm, sp)), msg, error); self.handler.emit(Some((self.cm, sp)), msg, error);
@ -95,7 +95,7 @@ impl span_handler for CodemapT {
impl handler for HandlerT { impl handler for HandlerT {
fn fatal(@mut self, msg: &str) -> ! { fn fatal(@mut self, msg: &str) -> ! {
self.emit.emit(None, msg, fatal); self.emit.emit(None, msg, fatal);
fail!(); fail2!();
} }
fn err(@mut self, msg: &str) { fn err(@mut self, msg: &str) {
self.emit.emit(None, msg, error); self.emit.emit(None, msg, error);
@ -116,7 +116,7 @@ impl handler for HandlerT {
0u => return, 0u => return,
1u => s = ~"aborting due to previous error", 1u => s = ~"aborting due to previous error",
_ => { _ => {
s = fmt!("aborting due to %u previous errors", s = format!("aborting due to {} previous errors",
self.err_count); self.err_count);
} }
} }
@ -143,7 +143,7 @@ impl handler for HandlerT {
} }
pub fn ice_msg(msg: &str) -> ~str { pub fn ice_msg(msg: &str) -> ~str {
fmt!("internal compiler error: %s", msg) format!("internal compiler error: {}", msg)
} }
pub fn mk_span_handler(handler: @mut handler, cm: @codemap::CodeMap) pub fn mk_span_handler(handler: @mut handler, cm: @codemap::CodeMap)
@ -228,12 +228,12 @@ fn print_diagnostic(topic: &str, lvl: level, msg: &str) {
let stderr = io::stderr(); let stderr = io::stderr();
if !topic.is_empty() { if !topic.is_empty() {
stderr.write_str(fmt!("%s ", topic)); stderr.write_str(format!("{} ", topic));
} }
print_maybe_styled(fmt!("%s: ", diagnosticstr(lvl)), print_maybe_styled(format!("{}: ", diagnosticstr(lvl)),
term::attr::ForegroundColor(diagnosticcolor(lvl))); term::attr::ForegroundColor(diagnosticcolor(lvl)));
print_maybe_styled(fmt!("%s\n", msg), term::attr::Bold); print_maybe_styled(format!("{}\n", msg), term::attr::Bold);
} }
pub struct DefaultEmitter; pub struct DefaultEmitter;
@ -273,13 +273,13 @@ fn highlight_lines(cm: @codemap::CodeMap,
} }
// Print the offending lines // Print the offending lines
for line in display_lines.iter() { for line in display_lines.iter() {
io::stderr().write_str(fmt!("%s:%u ", fm.name, *line + 1u)); io::stderr().write_str(format!("{}:{} ", fm.name, *line + 1u));
let s = fm.get_line(*line as int) + "\n"; let s = fm.get_line(*line as int) + "\n";
io::stderr().write_str(s); io::stderr().write_str(s);
} }
if elided { if elided {
let last_line = display_lines[display_lines.len() - 1u]; let last_line = display_lines[display_lines.len() - 1u];
let s = fmt!("%s:%u ", fm.name, last_line + 1u); let s = format!("{}:{} ", fm.name, last_line + 1u);
let mut indent = s.len(); let mut indent = s.len();
let mut out = ~""; let mut out = ~"";
while indent > 0u { while indent > 0u {
@ -339,7 +339,7 @@ fn print_macro_backtrace(cm: @codemap::CodeMap, sp: Span) {
for ei in sp.expn_info.iter() { for ei in sp.expn_info.iter() {
let ss = ei.callee.span.map_default(~"", |span| cm.span_to_str(*span)); let ss = ei.callee.span.map_default(~"", |span| cm.span_to_str(*span));
print_diagnostic(ss, note, print_diagnostic(ss, note,
fmt!("in expansion of %s!", ei.callee.name)); format!("in expansion of {}!", ei.callee.name));
let ss = cm.span_to_str(ei.call_site); let ss = cm.span_to_str(ei.call_site);
print_diagnostic(ss, note, "expansion site"); print_diagnostic(ss, note, "expansion site");
print_macro_backtrace(cm, ei.call_site); print_macro_backtrace(cm, ei.call_site);

View File

@ -111,7 +111,7 @@ pub fn expand_asm(cx: @ExtCtxt, sp: Span, tts: &[ast::token_tree])
p.eat(&token::COMMA); p.eat(&token::COMMA);
} }
let clob = fmt!("~{%s}", p.parse_str()); let clob = format!("~\\{{}\\}", p.parse_str());
clobs.push(clob); clobs.push(clob);
} }

View File

@ -423,7 +423,7 @@ pub fn expr_to_str(cx: @ExtCtxt, expr: @ast::Expr, err_msg: &str) -> @str {
pub fn check_zero_tts(cx: @ExtCtxt, sp: Span, tts: &[ast::token_tree], pub fn check_zero_tts(cx: @ExtCtxt, sp: Span, tts: &[ast::token_tree],
name: &str) { name: &str) {
if tts.len() != 0 { if tts.len() != 0 {
cx.span_fatal(sp, fmt!("%s takes no arguments", name)); cx.span_fatal(sp, format!("{} takes no arguments", name));
} }
} }
@ -433,12 +433,12 @@ pub fn get_single_str_from_tts(cx: @ExtCtxt,
name: &str) name: &str)
-> @str { -> @str {
if tts.len() != 1 { if tts.len() != 1 {
cx.span_fatal(sp, fmt!("%s takes 1 argument.", name)); cx.span_fatal(sp, format!("{} takes 1 argument.", name));
} }
match tts[0] { match tts[0] {
ast::tt_tok(_, token::LIT_STR(ident)) => cx.str_of(ident), ast::tt_tok(_, token::LIT_STR(ident)) => cx.str_of(ident),
_ => cx.span_fatal(sp, fmt!("%s requires a string.", name)), _ => cx.span_fatal(sp, format!("{} requires a string.", name)),
} }
} }
@ -539,11 +539,11 @@ impl <K: Eq + Hash + IterBytes + 'static, V: 'static> MapChain<K,V>{
// names? I think not. // names? I think not.
// delaying implementing this.... // delaying implementing this....
pub fn each_key (&self, _f: &fn (&K)->bool) { pub fn each_key (&self, _f: &fn (&K)->bool) {
fail!("unimplemented 2013-02-15T10:01"); fail2!("unimplemented 2013-02-15T10:01");
} }
pub fn each_value (&self, _f: &fn (&V) -> bool) { pub fn each_value (&self, _f: &fn (&V) -> bool) {
fail!("unimplemented 2013-02-15T10:02"); fail2!("unimplemented 2013-02-15T10:02");
} }
// Returns a copy of the value that the name maps to. // Returns a copy of the value that the name maps to.
@ -586,7 +586,7 @@ impl <K: Eq + Hash + IterBytes + 'static, V: 'static> MapChain<K,V>{
if satisfies_pred(map,&n,pred) { if satisfies_pred(map,&n,pred) {
map.insert(key,ext); map.insert(key,ext);
} else { } else {
fail!(~"expected map chain containing satisfying frame") fail2!("expected map chain containing satisfying frame")
} }
}, },
ConsMapChain (~ref mut map, rest) => { ConsMapChain (~ref mut map, rest) => {

View File

@ -86,10 +86,10 @@ fn cs_clone(
all_fields = af; all_fields = af;
}, },
EnumNonMatching(*) => cx.span_bug(span, EnumNonMatching(*) => cx.span_bug(span,
fmt!("Non-matching enum variants in `deriving(%s)`", format!("Non-matching enum variants in `deriving({})`",
name)), name)),
StaticEnum(*) | StaticStruct(*) => cx.span_bug(span, StaticEnum(*) | StaticStruct(*) => cx.span_bug(span,
fmt!("Static method in `deriving(%s)`", format!("Static method in `deriving({})`",
name)) name))
} }
@ -105,7 +105,7 @@ fn cs_clone(
let ident = match o_id { let ident = match o_id {
Some(i) => i, Some(i) => i,
None => cx.span_bug(span, None => cx.span_bug(span,
fmt!("unnamed field in normal struct in `deriving(%s)`", format!("unnamed field in normal struct in `deriving({})`",
name)) name))
}; };
cx.field_imm(span, ident, subcall(self_f)) cx.field_imm(span, ident, subcall(self_f))

View File

@ -84,7 +84,7 @@ fn decodable_substructure(cx: @ExtCtxt, span: Span,
} else { } else {
let mut fields = vec::with_capacity(n); let mut fields = vec::with_capacity(n);
for i in range(0, n) { for i in range(0, n) {
fields.push(getarg(fmt!("_field%u", i).to_managed(), i)); fields.push(getarg(format!("_field{}", i).to_managed(), i));
} }
cx.expr_call_ident(span, substr.type_ident, fields) cx.expr_call_ident(span, substr.type_ident, fields)
} }

View File

@ -125,7 +125,7 @@ fn encodable_substructure(cx: @ExtCtxt, span: Span,
for (i, f) in fields.iter().enumerate() { for (i, f) in fields.iter().enumerate() {
let (name, val) = match *f { let (name, val) = match *f {
(Some(id), e, _) => (cx.str_of(id), e), (Some(id), e, _) => (cx.str_of(id), e),
(None, e, _) => (fmt!("_field%u", i).to_managed(), e) (None, e, _) => (format!("_field{}", i).to_managed(), e)
}; };
let enc = cx.expr_method_call(span, val, encode, ~[blkencoder]); let enc = cx.expr_method_call(span, val, encode, ~[blkencoder]);
let lambda = cx.lambda_expr_1(span, enc, blkarg); let lambda = cx.lambda_expr_1(span, enc, blkarg);

View File

@ -487,7 +487,7 @@ impl<'self> MethodDef<'self> {
for (i, ty) in self.args.iter().enumerate() { for (i, ty) in self.args.iter().enumerate() {
let ast_ty = ty.to_ty(cx, span, type_ident, generics); let ast_ty = ty.to_ty(cx, span, type_ident, generics);
let ident = cx.ident_of(fmt!("__arg_%u", i)); let ident = cx.ident_of(format!("__arg_{}", i));
arg_tys.push((ident, ast_ty)); arg_tys.push((ident, ast_ty));
let arg_expr = cx.expr_ident(span, ident); let arg_expr = cx.expr_ident(span, ident);
@ -582,7 +582,8 @@ impl<'self> MethodDef<'self> {
for i in range(0u, self_args.len()) { for i in range(0u, self_args.len()) {
let (pat, ident_expr) = create_struct_pattern(cx, span, let (pat, ident_expr) = create_struct_pattern(cx, span,
type_ident, struct_def, type_ident, struct_def,
fmt!("__self_%u", i), ast::MutImmutable); format!("__self_{}", i),
ast::MutImmutable);
patterns.push(pat); patterns.push(pat);
raw_fields.push(ident_expr); raw_fields.push(ident_expr);
} }
@ -767,7 +768,7 @@ impl<'self> MethodDef<'self> {
let current_match_str = if match_count == 0 { let current_match_str = if match_count == 0 {
~"__self" ~"__self"
} else { } else {
fmt!("__arg_%u", match_count) format!("__arg_{}", match_count)
}; };
let mut arms = ~[]; let mut arms = ~[];
@ -948,7 +949,7 @@ fn create_struct_pattern(cx: @ExtCtxt,
} }
}; };
let path = cx.path_ident(span, let path = cx.path_ident(span,
cx.ident_of(fmt!("%s_%u", prefix, i))); cx.ident_of(format!("{}_{}", prefix, i)));
paths.push(path.clone()); paths.push(path.clone());
ident_expr.push((opt_id, cx.expr_path(path))); ident_expr.push((opt_id, cx.expr_path(path)));
} }
@ -993,7 +994,7 @@ fn create_enum_variant_pattern(cx: @ExtCtxt,
let mut ident_expr = ~[]; let mut ident_expr = ~[];
for i in range(0u, variant_args.len()) { for i in range(0u, variant_args.len()) {
let path = cx.path_ident(span, let path = cx.path_ident(span,
cx.ident_of(fmt!("%s_%u", prefix, i))); cx.ident_of(format!("{}_{}", prefix, i)));
paths.push(path.clone()); paths.push(path.clone());
ident_expr.push((None, cx.expr_path(path))); ident_expr.push((None, cx.expr_path(path)));

View File

@ -101,8 +101,8 @@ pub fn expand_meta_deriving(cx: @ExtCtxt,
"Default" => expand!(default::expand_deriving_default), "Default" => expand!(default::expand_deriving_default),
ref tname => { ref tname => {
cx.span_err(titem.span, fmt!("unknown \ cx.span_err(titem.span, format!("unknown \
`deriving` trait: `%s`", *tname)); `deriving` trait: `{}`", *tname));
in_items in_items
} }
} }

View File

@ -43,7 +43,7 @@ pub fn expand_env(cx: @ExtCtxt, sp: Span, tts: &[ast::token_tree])
let var = expr_to_str(cx, exprs[0], "expected string literal"); let var = expr_to_str(cx, exprs[0], "expected string literal");
let msg = match exprs.len() { let msg = match exprs.len() {
1 => fmt!("Environment variable %s not defined", var).to_managed(), 1 => format!("Environment variable {} not defined", var).to_managed(),
2 => expr_to_str(cx, exprs[1], "expected string literal"), 2 => expr_to_str(cx, exprs[1], "expected string literal"),
_ => cx.span_fatal(sp, "env! takes 1 or 2 arguments") _ => cx.span_fatal(sp, "env! takes 1 or 2 arguments")
}; };

View File

@ -51,7 +51,7 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv,
if (pth.segments.len() > 1u) { if (pth.segments.len() > 1u) {
cx.span_fatal( cx.span_fatal(
pth.span, pth.span,
fmt!("expected macro name without module \ format!("expected macro name without module \
separators")); separators"));
} }
let extname = &pth.segments[0].identifier; let extname = &pth.segments[0].identifier;
@ -61,7 +61,7 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv,
None => { None => {
cx.span_fatal( cx.span_fatal(
pth.span, pth.span,
fmt!("macro undefined: '%s'", extnamestr)) format!("macro undefined: '{}'", extnamestr))
} }
Some(@SE(NormalTT(expandfun, exp_span))) => { Some(@SE(NormalTT(expandfun, exp_span))) => {
cx.bt_push(ExpnInfo { cx.bt_push(ExpnInfo {
@ -92,8 +92,8 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv,
_ => { _ => {
cx.span_fatal( cx.span_fatal(
pth.span, pth.span,
fmt!( format!(
"non-expr macro in expr pos: %s", "non-expr macro in expr pos: {}",
extnamestr extnamestr
) )
) )
@ -119,7 +119,7 @@ pub fn expand_expr(extsbox: @mut SyntaxEnv,
_ => { _ => {
cx.span_fatal( cx.span_fatal(
pth.span, pth.span,
fmt!("'%s' is not a tt-style macro", extnamestr) format!("'{}' is not a tt-style macro", extnamestr)
) )
} }
} }
@ -353,13 +353,13 @@ pub fn expand_item_mac(extsbox: @mut SyntaxEnv,
let fm = fresh_mark(); let fm = fresh_mark();
let expanded = match (*extsbox).find(&extname.name) { let expanded = match (*extsbox).find(&extname.name) {
None => cx.span_fatal(pth.span, None => cx.span_fatal(pth.span,
fmt!("macro undefined: '%s!'", extnamestr)), format!("macro undefined: '{}!'", extnamestr)),
Some(@SE(NormalTT(expander, span))) => { Some(@SE(NormalTT(expander, span))) => {
if it.ident.name != parse::token::special_idents::invalid.name { if it.ident.name != parse::token::special_idents::invalid.name {
cx.span_fatal(pth.span, cx.span_fatal(pth.span,
fmt!("macro %s! expects no ident argument, \ format!("macro {}! expects no ident argument, \
given '%s'", extnamestr, given '{}'", extnamestr,
ident_to_str(&it.ident))); ident_to_str(&it.ident)));
} }
cx.bt_push(ExpnInfo { cx.bt_push(ExpnInfo {
@ -377,7 +377,7 @@ pub fn expand_item_mac(extsbox: @mut SyntaxEnv,
Some(@SE(IdentTT(expander, span))) => { Some(@SE(IdentTT(expander, span))) => {
if it.ident.name == parse::token::special_idents::invalid.name { if it.ident.name == parse::token::special_idents::invalid.name {
cx.span_fatal(pth.span, cx.span_fatal(pth.span,
fmt!("macro %s! expects an ident argument", format!("macro {}! expects an ident argument",
extnamestr)); extnamestr));
} }
cx.bt_push(ExpnInfo { cx.bt_push(ExpnInfo {
@ -393,7 +393,7 @@ pub fn expand_item_mac(extsbox: @mut SyntaxEnv,
expander.expand(cx, it.span, it.ident, marked_tts, marked_ctxt) expander.expand(cx, it.span, it.ident, marked_tts, marked_ctxt)
} }
_ => cx.span_fatal( _ => cx.span_fatal(
it.span, fmt!("%s! is not legal in item position", extnamestr)) it.span, format!("{}! is not legal in item position", extnamestr))
}; };
let maybe_it = match expanded { let maybe_it = match expanded {
@ -402,7 +402,7 @@ pub fn expand_item_mac(extsbox: @mut SyntaxEnv,
.and_then(|i| fld.fold_item(i)) .and_then(|i| fld.fold_item(i))
} }
MRExpr(_) => { MRExpr(_) => {
cx.span_fatal(pth.span, fmt!("expr macro in item position: %s", extnamestr)) cx.span_fatal(pth.span, format!("expr macro in item position: {}", extnamestr))
} }
MRAny(any_macro) => { MRAny(any_macro) => {
any_macro.make_item() any_macro.make_item()
@ -429,8 +429,8 @@ fn insert_macro(exts: SyntaxEnv, name: ast::Name, transformer: @Transformer) {
match t { match t {
&@BlockInfo(BlockInfo {macros_escape:false,_}) => true, &@BlockInfo(BlockInfo {macros_escape:false,_}) => true,
&@BlockInfo(BlockInfo {_}) => false, &@BlockInfo(BlockInfo {_}) => false,
_ => fail!(fmt!("special identifier %? was bound to a non-BlockInfo", _ => fail2!("special identifier {:?} was bound to a non-BlockInfo",
special_block_name)) special_block_name)
} }
}; };
exts.insert_into_frame(name,transformer,intern(special_block_name), exts.insert_into_frame(name,transformer,intern(special_block_name),
@ -463,7 +463,7 @@ pub fn expand_stmt(extsbox: @mut SyntaxEnv,
let extnamestr = ident_to_str(extname); let extnamestr = ident_to_str(extname);
let fully_expanded: @ast::Stmt = match (*extsbox).find(&extname.name) { let fully_expanded: @ast::Stmt = match (*extsbox).find(&extname.name) {
None => { None => {
cx.span_fatal(pth.span, fmt!("macro undefined: '%s'", extnamestr)) cx.span_fatal(pth.span, format!("macro undefined: '{}'", extnamestr))
} }
Some(@SE(NormalTT(expandfun, exp_span))) => { Some(@SE(NormalTT(expandfun, exp_span))) => {
@ -496,7 +496,7 @@ pub fn expand_stmt(extsbox: @mut SyntaxEnv,
MRAny(any_macro) => any_macro.make_stmt(), MRAny(any_macro) => any_macro.make_stmt(),
_ => cx.span_fatal( _ => cx.span_fatal(
pth.span, pth.span,
fmt!("non-stmt macro in stmt pos: %s", extnamestr)) format!("non-stmt macro in stmt pos: {}", extnamestr))
}; };
let marked_after = mark_stmt(expanded,fm); let marked_after = mark_stmt(expanded,fm);
@ -521,7 +521,7 @@ pub fn expand_stmt(extsbox: @mut SyntaxEnv,
_ => { _ => {
cx.span_fatal(pth.span, cx.span_fatal(pth.span,
fmt!("'%s' is not a tt-style macro", extnamestr)) format!("'{}' is not a tt-style macro", extnamestr))
} }
}; };
@ -741,7 +741,7 @@ pub fn expand_block_elts(exts: SyntaxEnv, b: &Block, fld: &MacroExpander)
fn mustbesome<T>(val : Option<T>) -> T { fn mustbesome<T>(val : Option<T>) -> T {
match val { match val {
Some(v) => v, Some(v) => v,
None => fail!("rename_fold returned None") None => fail2!("rename_fold returned None")
} }
} }
@ -749,8 +749,8 @@ fn mustbesome<T>(val : Option<T>) -> T {
fn get_block_info(exts : SyntaxEnv) -> BlockInfo { fn get_block_info(exts : SyntaxEnv) -> BlockInfo {
match exts.find_in_topmost_frame(&intern(special_block_name)) { match exts.find_in_topmost_frame(&intern(special_block_name)) {
Some(@BlockInfo(bi)) => bi, Some(@BlockInfo(bi)) => bi,
_ => fail!(fmt!("special identifier %? was bound to a non-BlockInfo", _ => fail2!("special identifier {:?} was bound to a non-BlockInfo",
@" block")) @" block")
} }
} }
@ -782,7 +782,7 @@ pub fn renames_to_fold(renames: @mut ~[(ast::Ident,ast::Name)]) -> @ast_fold {
fn apply_pending_renames(folder : @ast_fold, stmt : ast::Stmt) -> @ast::Stmt { fn apply_pending_renames(folder : @ast_fold, stmt : ast::Stmt) -> @ast::Stmt {
match folder.fold_stmt(&stmt) { match folder.fold_stmt(&stmt) {
Some(s) => s, Some(s) => s,
None => fail!(fmt!("renaming of stmt produced None")) None => fail2!("renaming of stmt produced None")
} }
} }
@ -813,7 +813,7 @@ pub fn std_macros() -> @str {
mod fmt_extension { mod fmt_extension {
#[macro_escape]; #[macro_escape];
macro_rules! fmt(($($arg:tt)*) => (oldfmt!($($arg)*))) macro_rules! fmt(($($arg:tt)*) => (oldformat!($($arg)*)))
macro_rules! log( macro_rules! log(
($lvl:expr, $arg:expr) => ({ ($lvl:expr, $arg:expr) => ({
@ -821,7 +821,7 @@ pub fn std_macros() -> @str {
if lvl <= __log_level() { if lvl <= __log_level() {
format_args!(|args| { format_args!(|args| {
::std::logging::log(lvl, args) ::std::logging::log(lvl, args)
}, \"{}\", fmt!(\"%?\", $arg)) }, \"{}\", format!(\"{:?}\", $arg))
} }
}); });
($lvl:expr, $($arg:expr),+) => ({ ($lvl:expr, $($arg:expr),+) => ({
@ -829,7 +829,7 @@ pub fn std_macros() -> @str {
if lvl <= __log_level() { if lvl <= __log_level() {
format_args!(|args| { format_args!(|args| {
::std::logging::log(lvl, args) ::std::logging::log(lvl, args)
}, \"{}\", fmt!($($arg),+)) }, \"{}\", format!($($arg),+))
} }
}) })
) )
@ -842,13 +842,13 @@ pub fn std_macros() -> @str {
macro_rules! fail( macro_rules! fail(
() => ( () => (
fail!(\"explicit failure\") fail2!(\"explicit failure\")
); );
($msg:expr) => ( ($msg:expr) => (
::std::sys::FailWithCause::fail_with($msg, file!(), line!()) ::std::sys::FailWithCause::fail_with($msg, file!(), line!())
); );
($( $arg:expr ),+) => ( ($( $arg:expr ),+) => (
::std::sys::FailWithCause::fail_with(fmt!( $($arg),+ ), file!(), line!()) ::std::sys::FailWithCause::fail_with(format!( $($arg),+ ), file!(), line!())
) )
) )
} }
@ -896,7 +896,7 @@ pub fn std_macros() -> @str {
}; };
($cond:expr, $( $arg:expr ),+) => { ($cond:expr, $( $arg:expr ),+) => {
if !$cond { if !$cond {
::std::sys::FailWithCause::fail_with(fmt!( $($arg),+ ), file!(), line!()) ::std::sys::FailWithCause::fail_with(format!( $($arg),+ ), file!(), line!())
} }
} }
) )
@ -1164,7 +1164,7 @@ pub fn inject_std_macros(parse_sess: @mut parse::ParseSess,
~[], ~[],
parse_sess) { parse_sess) {
Some(item) => item, Some(item) => item,
None => fail!("expected core macros to parse correctly") None => fail2!("expected core macros to parse correctly")
}; };
let injector = @Injector { let injector = @Injector {
@ -1422,16 +1422,16 @@ mod test {
use util::parser_testing::{string_to_pat, string_to_tts, strs_to_idents}; use util::parser_testing::{string_to_pat, string_to_tts, strs_to_idents};
use visit; use visit;
// make sure that fail! is present // make sure that fail2! is present
#[test] fn fail_exists_test () { #[test] fn fail_exists_test () {
let src = @"fn main() { fail!(\"something appropriately gloomy\");}"; let src = @"fn main() { fail2!(\"something appropriately gloomy\");}";
let sess = parse::new_parse_sess(None); let sess = parse::new_parse_sess(None);
let crate_ast = parse::parse_crate_from_source_str( let crate_ast = parse::parse_crate_from_source_str(
@"<test>", @"<test>",
src, src,
~[],sess); ~[],sess);
let crate_ast = inject_std_macros(sess, ~[], crate_ast); let crate_ast = inject_std_macros(sess, ~[], crate_ast);
// don't bother with striping, doesn't affect fail!. // don't bother with striping, doesn't affect fail2!.
expand_crate(sess,~[],crate_ast); expand_crate(sess,~[],crate_ast);
} }
@ -1489,7 +1489,7 @@ mod test {
cfg,~[],sess); cfg,~[],sess);
match item_ast { match item_ast {
Some(_) => (), // success Some(_) => (), // success
None => fail!("expected this to parse") None => fail2!("expected this to parse")
} }
} }
@ -1528,7 +1528,7 @@ mod test {
let marked_once_ctxt = let marked_once_ctxt =
match marked_once[0] { match marked_once[0] {
ast::tt_tok(_,token::IDENT(id,_)) => id.ctxt, ast::tt_tok(_,token::IDENT(id,_)) => id.ctxt,
_ => fail!(fmt!("unexpected shape for marked tts: %?",marked_once[0])) _ => fail2!(format!("unexpected shape for marked tts: {:?}",marked_once[0]))
}; };
assert_eq!(mtwt_marksof(marked_once_ctxt,invalid_name),~[fm]); assert_eq!(mtwt_marksof(marked_once_ctxt,invalid_name),~[fm]);
let remarked = mtwt_cancel_outer_mark(marked_once,marked_once_ctxt); let remarked = mtwt_cancel_outer_mark(marked_once,marked_once_ctxt);
@ -1536,7 +1536,7 @@ mod test {
match remarked[0] { match remarked[0] {
ast::tt_tok(_,token::IDENT(id,_)) => ast::tt_tok(_,token::IDENT(id,_)) =>
assert_eq!(mtwt_marksof(id.ctxt,invalid_name),~[]), assert_eq!(mtwt_marksof(id.ctxt,invalid_name),~[]),
_ => fail!(fmt!("unexpected shape for marked tts: %?",remarked[0])) _ => fail2!(format!("unexpected shape for marked tts: {:?}",remarked[0]))
} }
} }
@ -1583,7 +1583,7 @@ mod test {
//fn expand_and_resolve(crate_str: @str) -> ast::crate { //fn expand_and_resolve(crate_str: @str) -> ast::crate {
//let expanded_ast = expand_crate_str(crate_str); //let expanded_ast = expand_crate_str(crate_str);
// std::io::println(fmt!("expanded: %?\n",expanded_ast)); // std::io::println(format!("expanded: {:?}\n",expanded_ast));
//mtwt_resolve_crate(expanded_ast) //mtwt_resolve_crate(expanded_ast)
//} //}
//fn expand_and_resolve_and_pretty_print (crate_str : @str) -> ~str { //fn expand_and_resolve_and_pretty_print (crate_str : @str) -> ~str {
@ -1693,8 +1693,8 @@ mod test {
invalid_name); invalid_name);
if (!(varref_name==binding_name)){ if (!(varref_name==binding_name)){
std::io::println("uh oh, should match but doesn't:"); std::io::println("uh oh, should match but doesn't:");
std::io::println(fmt!("varref: %?",varref)); std::io::println(format!("varref: {:?}",varref));
std::io::println(fmt!("binding: %?", bindings[binding_idx])); std::io::println(format!("binding: {:?}", bindings[binding_idx]));
ast_util::display_sctable(get_sctable()); ast_util::display_sctable(get_sctable());
} }
assert_eq!(varref_name,binding_name); assert_eq!(varref_name,binding_name);
@ -1712,12 +1712,12 @@ mod test {
println!("text of test case: \"{}\"", teststr); println!("text of test case: \"{}\"", teststr);
println!(""); println!("");
println!("uh oh, matches but shouldn't:"); println!("uh oh, matches but shouldn't:");
std::io::println(fmt!("varref: %?",varref)); std::io::println(format!("varref: {:?}",varref));
// good lord, you can't make a path with 0 segments, can you? // good lord, you can't make a path with 0 segments, can you?
println!("varref's first segment's uint: {}, and string: \"{}\"", println!("varref's first segment's uint: {}, and string: \"{}\"",
varref.segments[0].identifier.name, varref.segments[0].identifier.name,
ident_to_str(&varref.segments[0].identifier)); ident_to_str(&varref.segments[0].identifier));
std::io::println(fmt!("binding: %?", bindings[binding_idx])); std::io::println(format!("binding: {:?}", bindings[binding_idx]));
ast_util::display_sctable(get_sctable()); ast_util::display_sctable(get_sctable());
} }
assert!(!fail); assert!(!fail);
@ -1727,7 +1727,7 @@ mod test {
} }
#[test] fn fmt_in_macro_used_inside_module_macro() { #[test] fn fmt_in_macro_used_inside_module_macro() {
let crate_str = @"macro_rules! fmt_wrap(($b:expr)=>(fmt!(\"left: %?\", $b))) let crate_str = @"macro_rules! fmt_wrap(($b:expr)=>($b.to_str()))
macro_rules! foo_module (() => (mod generated { fn a() { let xx = 147; fmt_wrap!(xx);}})) macro_rules! foo_module (() => (mod generated { fn a() { let xx = 147; fmt_wrap!(xx);}}))
foo_module!() foo_module!()
"; ";
@ -1739,7 +1739,7 @@ foo_module!()
bindings.iter().filter(|b|{@"xx" == (ident_to_str(*b))}).collect(); bindings.iter().filter(|b|{@"xx" == (ident_to_str(*b))}).collect();
let cxbind = match cxbinds { let cxbind = match cxbinds {
[b] => b, [b] => b,
_ => fail!("expected just one binding for ext_cx") _ => fail2!("expected just one binding for ext_cx")
}; };
let resolved_binding = mtwt_resolve(*cxbind); let resolved_binding = mtwt_resolve(*cxbind);
// find all the xx varrefs: // find all the xx varrefs:
@ -1751,15 +1751,16 @@ foo_module!()
}).enumerate() { }).enumerate() {
if (mtwt_resolve(v.segments[0].identifier) != resolved_binding) { if (mtwt_resolve(v.segments[0].identifier) != resolved_binding) {
std::io::println("uh oh, xx binding didn't match xx varref:"); std::io::println("uh oh, xx binding didn't match xx varref:");
std::io::println(fmt!("this is xx varref # %?",idx)); std::io::println(format!("this is xx varref \\# {:?}",idx));
std::io::println(fmt!("binding: %?",cxbind)); std::io::println(format!("binding: {:?}",cxbind));
std::io::println(fmt!("resolves to: %?",resolved_binding)); std::io::println(format!("resolves to: {:?}",resolved_binding));
std::io::println(fmt!("varref: %?",v.segments[0].identifier)); std::io::println(format!("varref: {:?}",v.segments[0].identifier));
std::io::println(fmt!("resolves to: %?",mtwt_resolve(v.segments[0].identifier))); std::io::println(format!("resolves to: {:?}",
mtwt_resolve(v.segments[0].identifier)));
let table = get_sctable(); let table = get_sctable();
std::io::println("SC table:"); std::io::println("SC table:");
for (idx,val) in table.table.iter().enumerate() { for (idx,val) in table.table.iter().enumerate() {
std::io::println(fmt!("%4u : %?",idx,val)); std::io::println(format!("{:4u} : {:?}",idx,val));
} }
} }
assert_eq!(mtwt_resolve(v.segments[0].identifier),resolved_binding); assert_eq!(mtwt_resolve(v.segments[0].identifier),resolved_binding);

View File

@ -34,7 +34,7 @@ pub fn expand_syntax_ext(cx: @ExtCtxt, sp: Span, tts: &[ast::token_tree])
expr_to_str(cx, args[0], expr_to_str(cx, args[0],
"first argument to fmt! must be a string literal."); "first argument to fmt! must be a string literal.");
let fmtspan = args[0].span; let fmtspan = args[0].span;
debug!("Format string: %s", fmt); debug2!("Format string: {}", fmt);
fn parse_fmt_err_(cx: @ExtCtxt, sp: Span, msg: &str) -> ! { fn parse_fmt_err_(cx: @ExtCtxt, sp: Span, msg: &str) -> ! {
cx.span_fatal(sp, msg); cx.span_fatal(sp, msg);
} }
@ -198,53 +198,53 @@ fn pieces_to_expr(cx: @ExtCtxt, sp: Span,
cx.expr_mut_addr_of(arg.span, buf)); cx.expr_mut_addr_of(arg.span, buf));
} }
fn log_conv(c: &Conv) { fn log_conv(c: &Conv) {
debug!("Building conversion:"); debug2!("Building conversion:");
match c.param { match c.param {
Some(p) => { debug!("param: %s", p.to_str()); } Some(p) => { debug2!("param: {}", p.to_str()); }
_ => debug!("param: none") _ => debug2!("param: none")
} }
for f in c.flags.iter() { for f in c.flags.iter() {
match *f { match *f {
FlagLeftJustify => debug!("flag: left justify"), FlagLeftJustify => debug2!("flag: left justify"),
FlagLeftZeroPad => debug!("flag: left zero pad"), FlagLeftZeroPad => debug2!("flag: left zero pad"),
FlagSpaceForSign => debug!("flag: left space pad"), FlagSpaceForSign => debug2!("flag: left space pad"),
FlagSignAlways => debug!("flag: sign always"), FlagSignAlways => debug2!("flag: sign always"),
FlagAlternate => debug!("flag: alternate") FlagAlternate => debug2!("flag: alternate")
} }
} }
match c.width { match c.width {
CountIs(i) => CountIs(i) =>
debug!("width: count is %s", i.to_str()), debug2!("width: count is {}", i.to_str()),
CountIsParam(i) => CountIsParam(i) =>
debug!("width: count is param %s", i.to_str()), debug2!("width: count is param {}", i.to_str()),
CountIsNextParam => debug!("width: count is next param"), CountIsNextParam => debug2!("width: count is next param"),
CountImplied => debug!("width: count is implied") CountImplied => debug2!("width: count is implied")
} }
match c.precision { match c.precision {
CountIs(i) => CountIs(i) =>
debug!("prec: count is %s", i.to_str()), debug2!("prec: count is {}", i.to_str()),
CountIsParam(i) => CountIsParam(i) =>
debug!("prec: count is param %s", i.to_str()), debug2!("prec: count is param {}", i.to_str()),
CountIsNextParam => debug!("prec: count is next param"), CountIsNextParam => debug2!("prec: count is next param"),
CountImplied => debug!("prec: count is implied") CountImplied => debug2!("prec: count is implied")
} }
match c.ty { match c.ty {
TyBool => debug!("type: bool"), TyBool => debug2!("type: bool"),
TyStr => debug!("type: str"), TyStr => debug2!("type: str"),
TyChar => debug!("type: char"), TyChar => debug2!("type: char"),
TyInt(s) => match s { TyInt(s) => match s {
Signed => debug!("type: signed"), Signed => debug2!("type: signed"),
Unsigned => debug!("type: unsigned") Unsigned => debug2!("type: unsigned")
}, },
TyBits => debug!("type: bits"), TyBits => debug2!("type: bits"),
TyHex(cs) => match cs { TyHex(cs) => match cs {
CaseUpper => debug!("type: uhex"), CaseUpper => debug2!("type: uhex"),
CaseLower => debug!("type: lhex"), CaseLower => debug2!("type: lhex"),
}, },
TyOctal => debug!("type: octal"), TyOctal => debug2!("type: octal"),
TyFloat => debug!("type: float"), TyFloat => debug2!("type: float"),
TyPointer => debug!("type: pointer"), TyPointer => debug2!("type: pointer"),
TyPoly => debug!("type: poly") TyPoly => debug2!("type: poly")
} }
} }
@ -319,7 +319,7 @@ fn pieces_to_expr(cx: @ExtCtxt, sp: Span,
let expected_nargs = n + 1u; // n conversions + the fmt string let expected_nargs = n + 1u; // n conversions + the fmt string
if expected_nargs < nargs { if expected_nargs < nargs {
cx.span_fatal cx.span_fatal
(sp, fmt!("too many arguments to fmt!. found %u, expected %u", (sp, format!("too many arguments to fmt!. found {}, expected {}",
nargs, expected_nargs)); nargs, expected_nargs));
} }

View File

@ -51,7 +51,7 @@ struct Context {
impl Context { impl Context {
/// Parses the arguments from the given list of tokens, returning None if /// Parses the arguments from the given list of tokens, returning None if
/// there's a parse error so we can continue parsing other fmt! expressions. /// there's a parse error so we can continue parsing other format! expressions.
fn parse_args(&mut self, sp: Span, fn parse_args(&mut self, sp: Span,
tts: &[ast::token_tree]) -> (@ast::Expr, Option<@ast::Expr>) { tts: &[ast::token_tree]) -> (@ast::Expr, Option<@ast::Expr>) {
let p = rsparse::new_parser_from_tts(self.ecx.parse_sess(), let p = rsparse::new_parser_from_tts(self.ecx.parse_sess(),
@ -92,8 +92,8 @@ impl Context {
} }
_ => { _ => {
self.ecx.span_err(*p.span, self.ecx.span_err(*p.span,
fmt!("expected ident for named \ format!("expected ident for named \
argument, but found `%s`", argument, but found `{}`",
p.this_token_to_str())); p.this_token_to_str()));
return (extra, None); return (extra, None);
} }
@ -104,8 +104,8 @@ impl Context {
match self.names.find(&name) { match self.names.find(&name) {
None => {} None => {}
Some(prev) => { Some(prev) => {
self.ecx.span_err(e.span, fmt!("duplicate argument \ self.ecx.span_err(e.span, format!("duplicate argument \
named `%s`", name)); named `{}`", name));
self.ecx.parse_sess.span_diagnostic.span_note( self.ecx.parse_sess.span_diagnostic.span_note(
prev.span, "previously here"); prev.span, "previously here");
loop loop
@ -207,13 +207,13 @@ impl Context {
match arm.selector { match arm.selector {
Left(name) => { Left(name) => {
self.ecx.span_err(self.fmtsp, self.ecx.span_err(self.fmtsp,
fmt!("duplicate selector \ format!("duplicate selector \
`%?`", name)); `{:?}`", name));
} }
Right(idx) => { Right(idx) => {
self.ecx.span_err(self.fmtsp, self.ecx.span_err(self.fmtsp,
fmt!("duplicate selector \ format!("duplicate selector \
`=%u`", idx)); `={}`", idx));
} }
} }
} }
@ -227,7 +227,7 @@ impl Context {
for arm in arms.iter() { for arm in arms.iter() {
if !seen_cases.insert(arm.selector) { if !seen_cases.insert(arm.selector) {
self.ecx.span_err(self.fmtsp, self.ecx.span_err(self.fmtsp,
fmt!("duplicate selector `%s`", format!("duplicate selector `{}`",
arm.selector)); arm.selector));
} else if arm.selector == "" { } else if arm.selector == "" {
self.ecx.span_err(self.fmtsp, self.ecx.span_err(self.fmtsp,
@ -245,8 +245,8 @@ impl Context {
match arg { match arg {
Left(arg) => { Left(arg) => {
if arg < 0 || self.args.len() <= arg { if arg < 0 || self.args.len() <= arg {
let msg = fmt!("invalid reference to argument `%u` (there \ let msg = format!("invalid reference to argument `{}` (there \
are %u arguments)", arg, self.args.len()); are {} arguments)", arg, self.args.len());
self.ecx.span_err(self.fmtsp, msg); self.ecx.span_err(self.fmtsp, msg);
return; return;
} }
@ -260,7 +260,7 @@ impl Context {
let span = match self.names.find(&name) { let span = match self.names.find(&name) {
Some(e) => e.span, Some(e) => e.span,
None => { None => {
let msg = fmt!("there is no argument named `%s`", name); let msg = format!("there is no argument named `{}`", name);
self.ecx.span_err(self.fmtsp, msg); self.ecx.span_err(self.fmtsp, msg);
return; return;
} }
@ -298,20 +298,20 @@ impl Context {
match (cur, ty) { match (cur, ty) {
(Known(cur), Known(ty)) => { (Known(cur), Known(ty)) => {
self.ecx.span_err(sp, self.ecx.span_err(sp,
fmt!("argument redeclared with type `%s` when \ format!("argument redeclared with type `{}` when \
it was previously `%s`", ty, cur)); it was previously `{}`", ty, cur));
} }
(Known(cur), _) => { (Known(cur), _) => {
self.ecx.span_err(sp, self.ecx.span_err(sp,
fmt!("argument used to format with `%s` was \ format!("argument used to format with `{}` was \
attempted to not be used for formatting", attempted to not be used for formatting",
cur)); cur));
} }
(_, Known(ty)) => { (_, Known(ty)) => {
self.ecx.span_err(sp, self.ecx.span_err(sp,
fmt!("argument previously used as a format \ format!("argument previously used as a format \
argument attempted to be used as `%s`", argument attempted to be used as `{}`",
ty)); ty));
} }
(_, _) => { (_, _) => {
self.ecx.span_err(sp, "argument declared with multiple formats"); self.ecx.span_err(sp, "argument declared with multiple formats");
@ -405,7 +405,7 @@ impl Context {
}).collect(); }).collect();
let (lr, selarg) = match arm.selector { let (lr, selarg) = match arm.selector {
Left(t) => { Left(t) => {
let p = ctpath(fmt!("%?", t)); let p = ctpath(format!("{:?}", t));
let p = self.ecx.path_global(sp, p); let p = self.ecx.path_global(sp, p);
(self.ecx.ident_of("Left"), (self.ecx.ident_of("Left"),
self.ecx.expr_path(p)) self.ecx.expr_path(p))
@ -444,7 +444,7 @@ impl Context {
~[] ~[]
), None); ), None);
let st = ast::item_static(ty, ast::MutImmutable, method); let st = ast::item_static(ty, ast::MutImmutable, method);
let static_name = self.ecx.ident_of(fmt!("__static_method_%u", let static_name = self.ecx.ident_of(format!("__static_method_{}",
self.method_statics.len())); self.method_statics.len()));
// Flag these statics as `address_insignificant` so LLVM can // Flag these statics as `address_insignificant` so LLVM can
// merge duplicate globals as much as possible (which we're // merge duplicate globals as much as possible (which we're
@ -538,7 +538,7 @@ impl Context {
} }
} }
/// Actually builds the expression which the ifmt! block will be expanded /// Actually builds the expression which the iformat! block will be expanded
/// to /// to
fn to_expr(&self, extra: @ast::Expr) -> @ast::Expr { fn to_expr(&self, extra: @ast::Expr) -> @ast::Expr {
let mut lets = ~[]; let mut lets = ~[];
@ -584,13 +584,13 @@ impl Context {
// foo(bar(&1)) // foo(bar(&1))
// the lifetime of `1` doesn't outlast the call to `bar`, so it's not // the lifetime of `1` doesn't outlast the call to `bar`, so it's not
// vald for the call to `foo`. To work around this all arguments to the // vald for the call to `foo`. To work around this all arguments to the
// fmt! string are shoved into locals. Furthermore, we shove the address // format! string are shoved into locals. Furthermore, we shove the address
// of each variable because we don't want to move out of the arguments // of each variable because we don't want to move out of the arguments
// passed to this function. // passed to this function.
for (i, &e) in self.args.iter().enumerate() { for (i, &e) in self.args.iter().enumerate() {
if self.arg_types[i].is_none() { loop } // error already generated if self.arg_types[i].is_none() { loop } // error already generated
let name = self.ecx.ident_of(fmt!("__arg%u", i)); let name = self.ecx.ident_of(format!("__arg{}", i));
let e = self.ecx.expr_addr_of(e.span, e); let e = self.ecx.expr_addr_of(e.span, e);
lets.push(self.ecx.stmt_let(e.span, false, name, e)); lets.push(self.ecx.stmt_let(e.span, false, name, e));
locals.push(self.format_arg(e.span, Left(i), locals.push(self.format_arg(e.span, Left(i),
@ -599,7 +599,7 @@ impl Context {
for (&name, &e) in self.names.iter() { for (&name, &e) in self.names.iter() {
if !self.name_types.contains_key(&name) { loop } if !self.name_types.contains_key(&name) { loop }
let lname = self.ecx.ident_of(fmt!("__arg%s", name)); let lname = self.ecx.ident_of(format!("__arg{}", name));
let e = self.ecx.expr_addr_of(e.span, e); let e = self.ecx.expr_addr_of(e.span, e);
lets.push(self.ecx.stmt_let(e.span, false, lname, e)); lets.push(self.ecx.stmt_let(e.span, false, lname, e));
names[*self.name_positions.get(&name)] = names[*self.name_positions.get(&name)] =
@ -662,8 +662,8 @@ impl Context {
"x" => "LowerHex", "x" => "LowerHex",
"X" => "UpperHex", "X" => "UpperHex",
_ => { _ => {
self.ecx.span_err(sp, fmt!("unknown format trait \ self.ecx.span_err(sp, format!("unknown format trait \
`%s`", tyname)); `{}`", tyname));
"Dummy" "Dummy"
} }
} }

View File

@ -255,8 +255,8 @@ pub mod rt {
match res { match res {
Some(ast) => ast, Some(ast) => ast,
None => { None => {
error!("Parse error with ```\n%s\n```", s); error2!("Parse error with ```\n{}\n```", s);
fail!() fail2!()
} }
} }
} }
@ -484,7 +484,7 @@ fn mk_token(cx: @ExtCtxt, sp: Span, tok: &token::Token) -> @ast::Expr {
~[mk_ident(cx, sp, ident)]); ~[mk_ident(cx, sp, ident)]);
} }
INTERPOLATED(_) => fail!("quote! with interpolated token"), INTERPOLATED(_) => fail2!("quote! with interpolated token"),
_ => () _ => ()
} }
@ -522,7 +522,7 @@ fn mk_token(cx: @ExtCtxt, sp: Span, tok: &token::Token) -> @ast::Expr {
DOLLAR => "DOLLAR", DOLLAR => "DOLLAR",
UNDERSCORE => "UNDERSCORE", UNDERSCORE => "UNDERSCORE",
EOF => "EOF", EOF => "EOF",
_ => fail!() _ => fail2!()
}; };
cx.expr_ident(sp, id_ext(name)) cx.expr_ident(sp, id_ext(name))
} }
@ -547,7 +547,7 @@ fn mk_tt(cx: @ExtCtxt, sp: Span, tt: &ast::token_tree)
} }
ast::tt_delim(ref tts) => mk_tts(cx, sp, **tts), ast::tt_delim(ref tts) => mk_tts(cx, sp, **tts),
ast::tt_seq(*) => fail!("tt_seq in quote!"), ast::tt_seq(*) => fail2!("tt_seq in quote!"),
ast::tt_nonterminal(sp, ident) => { ast::tt_nonterminal(sp, ident) => {

View File

@ -122,7 +122,7 @@ pub struct MatcherPos {
pub fn copy_up(mpu: &matcher_pos_up) -> ~MatcherPos { pub fn copy_up(mpu: &matcher_pos_up) -> ~MatcherPos {
match *mpu { match *mpu {
matcher_pos_up(Some(ref mp)) => (*mp).clone(), matcher_pos_up(Some(ref mp)) => (*mp).clone(),
_ => fail!() _ => fail2!()
} }
} }
@ -384,14 +384,14 @@ pub fn parse(
let nts = bb_eis.map(|ei| { let nts = bb_eis.map(|ei| {
match ei.elts[ei.idx].node { match ei.elts[ei.idx].node {
match_nonterminal(ref bind,ref name,_) => { match_nonterminal(ref bind,ref name,_) => {
fmt!("%s ('%s')", ident_to_str(name), format!("{} ('{}')", ident_to_str(name),
ident_to_str(bind)) ident_to_str(bind))
} }
_ => fail!() _ => fail2!()
} }).connect(" or "); } }).connect(" or ");
return error(sp, fmt!( return error(sp, format!(
"Local ambiguity: multiple parsing options: \ "Local ambiguity: multiple parsing options: \
built-in NTs %s or %u other options.", built-in NTs {} or {} other options.",
nts, next_eis.len())); nts, next_eis.len()));
} else if (bb_eis.len() == 0u && next_eis.len() == 0u) { } else if (bb_eis.len() == 0u && next_eis.len() == 0u) {
return failure(sp, ~"No rules expected the token: " return failure(sp, ~"No rules expected the token: "
@ -412,7 +412,7 @@ pub fn parse(
parse_nt(&rust_parser, ident_to_str(name)))); parse_nt(&rust_parser, ident_to_str(name))));
ei.idx += 1u; ei.idx += 1u;
} }
_ => fail!() _ => fail2!()
} }
cur_eis.push(ei); cur_eis.push(ei);

View File

@ -117,7 +117,7 @@ fn lookup_cur_matched(r: &mut TtReader, name: Ident) -> @named_match {
match r.interpolations.find_copy(&name) { match r.interpolations.find_copy(&name) {
Some(s) => lookup_cur_matched_by_matched(r, s), Some(s) => lookup_cur_matched_by_matched(r, s),
None => { None => {
r.sp_diag.span_fatal(r.cur_span, fmt!("unknown macro variable `%s`", r.sp_diag.span_fatal(r.cur_span, format!("unknown macro variable `{}`",
ident_to_str(&name))); ident_to_str(&name)));
} }
} }
@ -142,9 +142,9 @@ fn lockstep_iter_size(t: &token_tree, r: &mut TtReader) -> lis {
lis_constraint(r_len, ref r_id) => { lis_constraint(r_len, ref r_id) => {
let l_n = ident_to_str(l_id); let l_n = ident_to_str(l_id);
let r_n = ident_to_str(r_id); let r_n = ident_to_str(r_id);
lis_contradiction(fmt!("Inconsistent lockstep iteration: \ lis_contradiction(format!("Inconsistent lockstep iteration: \
'%s' has %u items, but '%s' has %u", '{}' has {} items, but '{}' has {}",
l_n, l_len, r_n, r_len)) l_n, l_len, r_n, r_len))
} }
} }
} }
@ -294,7 +294,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
matched_seq(*) => { matched_seq(*) => {
r.sp_diag.span_fatal( r.sp_diag.span_fatal(
r.cur_span, /* blame the macro writer */ r.cur_span, /* blame the macro writer */
fmt!("variable '%s' is still repeating at this depth", format!("variable '{}' is still repeating at this depth",
ident_to_str(&ident))); ident_to_str(&ident)));
} }
} }

View File

@ -892,7 +892,7 @@ mod test {
let a_val = $a; let a_val = $a;
let b_val = $b; let b_val = $b;
if !(pred_val(a_val,b_val)) { if !(pred_val(a_val,b_val)) {
fail!("expected args satisfying %s, got %? and %?", fail2!("expected args satisfying {}, got {:?} and {:?}",
$predname, a_val, b_val); $predname, a_val, b_val);
} }
} }

View File

@ -66,7 +66,7 @@ impl<T> OptVec<T> {
pub fn get<'a>(&'a self, i: uint) -> &'a T { pub fn get<'a>(&'a self, i: uint) -> &'a T {
match *self { match *self {
Empty => fail!("Invalid index %u", i), Empty => fail2!("Invalid index {}", i),
Vec(ref v) => &v[i] Vec(ref v) => &v[i]
} }
} }

View File

@ -32,7 +32,7 @@ impl parser_attr for Parser {
fn parse_outer_attributes(&self) -> ~[ast::Attribute] { fn parse_outer_attributes(&self) -> ~[ast::Attribute] {
let mut attrs: ~[ast::Attribute] = ~[]; let mut attrs: ~[ast::Attribute] = ~[];
loop { loop {
debug!("parse_outer_attributes: self.token=%?", debug2!("parse_outer_attributes: self.token={:?}",
self.token); self.token);
match *self.token { match *self.token {
token::INTERPOLATED(token::nt_attr(*)) => { token::INTERPOLATED(token::nt_attr(*)) => {
@ -67,7 +67,7 @@ impl parser_attr for Parser {
// if permit_inner is true, then a trailing `;` indicates an inner // if permit_inner is true, then a trailing `;` indicates an inner
// attribute // attribute
fn parse_attribute(&self, permit_inner: bool) -> ast::Attribute { fn parse_attribute(&self, permit_inner: bool) -> ast::Attribute {
debug!("parse_attributes: permit_inner=%? self.token=%?", debug2!("parse_attributes: permit_inner={:?} self.token={:?}",
permit_inner, self.token); permit_inner, self.token);
let (span, value) = match *self.token { let (span, value) = match *self.token {
INTERPOLATED(token::nt_attr(attr)) => { INTERPOLATED(token::nt_attr(attr)) => {
@ -85,8 +85,8 @@ impl parser_attr for Parser {
(mk_sp(lo, hi), meta_item) (mk_sp(lo, hi), meta_item)
} }
_ => { _ => {
self.fatal(fmt!("expected `#` but found `%s`", self.fatal(format!("expected `\\#` but found `{}`",
self.this_token_to_str())); self.this_token_to_str()));
} }
}; };
let style = if permit_inner && *self.token == token::SEMI { let style = if permit_inner && *self.token == token::SEMI {

View File

@ -134,7 +134,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> ~str {
return lines.connect("\n"); return lines.connect("\n");
} }
fail!("not a doc-comment: %s", comment); fail2!("not a doc-comment: {}", comment);
} }
fn read_to_eol(rdr: @mut StringReader) -> ~str { fn read_to_eol(rdr: @mut StringReader) -> ~str {
@ -161,7 +161,7 @@ fn consume_non_eol_whitespace(rdr: @mut StringReader) {
} }
fn push_blank_line_comment(rdr: @mut StringReader, comments: &mut ~[cmnt]) { fn push_blank_line_comment(rdr: @mut StringReader, comments: &mut ~[cmnt]) {
debug!(">>> blank-line comment"); debug2!(">>> blank-line comment");
let v: ~[~str] = ~[]; let v: ~[~str] = ~[];
comments.push(cmnt {style: blank_line, lines: v, pos: rdr.last_pos}); comments.push(cmnt {style: blank_line, lines: v, pos: rdr.last_pos});
} }
@ -179,9 +179,9 @@ fn consume_whitespace_counting_blank_lines(rdr: @mut StringReader,
fn read_shebang_comment(rdr: @mut StringReader, code_to_the_left: bool, fn read_shebang_comment(rdr: @mut StringReader, code_to_the_left: bool,
comments: &mut ~[cmnt]) { comments: &mut ~[cmnt]) {
debug!(">>> shebang comment"); debug2!(">>> shebang comment");
let p = rdr.last_pos; let p = rdr.last_pos;
debug!("<<< shebang comment"); debug2!("<<< shebang comment");
comments.push(cmnt { comments.push(cmnt {
style: if code_to_the_left { trailing } else { isolated }, style: if code_to_the_left { trailing } else { isolated },
lines: ~[read_one_line_comment(rdr)], lines: ~[read_one_line_comment(rdr)],
@ -191,19 +191,19 @@ fn read_shebang_comment(rdr: @mut StringReader, code_to_the_left: bool,
fn read_line_comments(rdr: @mut StringReader, code_to_the_left: bool, fn read_line_comments(rdr: @mut StringReader, code_to_the_left: bool,
comments: &mut ~[cmnt]) { comments: &mut ~[cmnt]) {
debug!(">>> line comments"); debug2!(">>> line comments");
let p = rdr.last_pos; let p = rdr.last_pos;
let mut lines: ~[~str] = ~[]; let mut lines: ~[~str] = ~[];
while rdr.curr == '/' && nextch(rdr) == '/' { while rdr.curr == '/' && nextch(rdr) == '/' {
let line = read_one_line_comment(rdr); let line = read_one_line_comment(rdr);
debug!("%s", line); debug2!("{}", line);
if is_doc_comment(line) { // doc-comments are not put in comments if is_doc_comment(line) { // doc-comments are not put in comments
break; break;
} }
lines.push(line); lines.push(line);
consume_non_eol_whitespace(rdr); consume_non_eol_whitespace(rdr);
} }
debug!("<<< line comments"); debug2!("<<< line comments");
if !lines.is_empty() { if !lines.is_empty() {
comments.push(cmnt { comments.push(cmnt {
style: if code_to_the_left { trailing } else { isolated }, style: if code_to_the_left { trailing } else { isolated },
@ -242,14 +242,14 @@ fn trim_whitespace_prefix_and_push_line(lines: &mut ~[~str],
} }
None => s, None => s,
}; };
debug!("pushing line: %s", s1); debug2!("pushing line: {}", s1);
lines.push(s1); lines.push(s1);
} }
fn read_block_comment(rdr: @mut StringReader, fn read_block_comment(rdr: @mut StringReader,
code_to_the_left: bool, code_to_the_left: bool,
comments: &mut ~[cmnt]) { comments: &mut ~[cmnt]) {
debug!(">>> block comment"); debug2!(">>> block comment");
let p = rdr.last_pos; let p = rdr.last_pos;
let mut lines: ~[~str] = ~[]; let mut lines: ~[~str] = ~[];
let col: CharPos = rdr.col; let col: CharPos = rdr.col;
@ -275,7 +275,7 @@ fn read_block_comment(rdr: @mut StringReader,
} else { } else {
let mut level: int = 1; let mut level: int = 1;
while level > 0 { while level > 0 {
debug!("=== block comment level %d", level); debug2!("=== block comment level {}", level);
if is_eof(rdr) { if is_eof(rdr) {
(rdr as @mut reader).fatal(~"unterminated block comment"); (rdr as @mut reader).fatal(~"unterminated block comment");
} }
@ -311,7 +311,7 @@ fn read_block_comment(rdr: @mut StringReader,
if !is_eof(rdr) && rdr.curr != '\n' && lines.len() == 1u { if !is_eof(rdr) && rdr.curr != '\n' && lines.len() == 1u {
style = mixed; style = mixed;
} }
debug!("<<< block comment"); debug2!("<<< block comment");
comments.push(cmnt {style: style, lines: lines, pos: p}); comments.push(cmnt {style: style, lines: lines, pos: p});
} }
@ -324,15 +324,15 @@ fn peeking_at_comment(rdr: @mut StringReader) -> bool {
fn consume_comment(rdr: @mut StringReader, fn consume_comment(rdr: @mut StringReader,
code_to_the_left: bool, code_to_the_left: bool,
comments: &mut ~[cmnt]) { comments: &mut ~[cmnt]) {
debug!(">>> consume comment"); debug2!(">>> consume comment");
if rdr.curr == '/' && nextch(rdr) == '/' { if rdr.curr == '/' && nextch(rdr) == '/' {
read_line_comments(rdr, code_to_the_left, comments); read_line_comments(rdr, code_to_the_left, comments);
} else if rdr.curr == '/' && nextch(rdr) == '*' { } else if rdr.curr == '/' && nextch(rdr) == '*' {
read_block_comment(rdr, code_to_the_left, comments); read_block_comment(rdr, code_to_the_left, comments);
} else if rdr.curr == '#' && nextch(rdr) == '!' { } else if rdr.curr == '#' && nextch(rdr) == '!' {
read_shebang_comment(rdr, code_to_the_left, comments); read_shebang_comment(rdr, code_to_the_left, comments);
} else { fail!(); } } else { fail2!(); }
debug!("<<< consume comment"); debug2!("<<< consume comment");
} }
#[deriving(Clone)] #[deriving(Clone)]
@ -378,11 +378,11 @@ pub fn gather_comments_and_literals(span_diagnostic:
let TokenAndSpan {tok: tok, sp: sp} = rdr.peek(); let TokenAndSpan {tok: tok, sp: sp} = rdr.peek();
if token::is_lit(&tok) { if token::is_lit(&tok) {
do with_str_from(rdr, bstart) |s| { do with_str_from(rdr, bstart) |s| {
debug!("tok lit: %s", s); debug2!("tok lit: {}", s);
literals.push(lit {lit: s.to_owned(), pos: sp.lo}); literals.push(lit {lit: s.to_owned(), pos: sp.lo});
} }
} else { } else {
debug!("tok: %s", token::to_str(get_ident_interner(), &tok)); debug2!("tok: {}", token::to_str(get_ident_interner(), &tok));
} }
first_read = false; first_read = false;
} }

View File

@ -133,7 +133,7 @@ impl reader for TtReader {
fn is_eof(@mut self) -> bool { self.cur_tok == token::EOF } fn is_eof(@mut self) -> bool { self.cur_tok == token::EOF }
fn next_token(@mut self) -> TokenAndSpan { fn next_token(@mut self) -> TokenAndSpan {
let r = tt_next_token(self); let r = tt_next_token(self);
debug!("TtReader: r=%?", r); debug2!("TtReader: r={:?}", r);
return r; return r;
} }
fn fatal(@mut self, m: ~str) -> ! { fn fatal(@mut self, m: ~str) -> ! {
@ -261,7 +261,7 @@ fn hex_digit_val(c: char) -> int {
if in_range(c, '0', '9') { return (c as int) - ('0' as int); } if in_range(c, '0', '9') { return (c as int) - ('0' as int); }
if in_range(c, 'a', 'f') { return (c as int) - ('a' as int) + 10; } if in_range(c, 'a', 'f') { return (c as int) - ('a' as int) + 10; }
if in_range(c, 'A', 'F') { return (c as int) - ('A' as int) + 10; } if in_range(c, 'A', 'F') { return (c as int) - ('A' as int) + 10; }
fail!(); fail2!();
} }
fn bin_digit_value(c: char) -> int { if c == '0' { return 0; } return 1; } fn bin_digit_value(c: char) -> int { if c == '0' { return 0; } return 1; }
@ -569,8 +569,7 @@ fn scan_number(c: char, rdr: @mut StringReader) -> token::Token {
~"int literal is too large") ~"int literal is too large")
}; };
debug!("lexing %s as an unsuffixed integer literal", debug2!("lexing {} as an unsuffixed integer literal", num_str);
num_str);
return token::LIT_INT_UNSUFFIXED(parsed as i64); return token::LIT_INT_UNSUFFIXED(parsed as i64);
} }
} }

View File

@ -416,18 +416,18 @@ mod test {
_ => assert_eq!("wrong 4","correct") _ => assert_eq!("wrong 4","correct")
}, },
_ => { _ => {
error!("failing value 3: %?",first_set); error2!("failing value 3: {:?}",first_set);
assert_eq!("wrong 3","correct") assert_eq!("wrong 3","correct")
} }
}, },
_ => { _ => {
error!("failing value 2: %?",delim_elts); error2!("failing value 2: {:?}",delim_elts);
assert_eq!("wrong","correct"); assert_eq!("wrong","correct");
} }
}, },
_ => { _ => {
error!("failing value: %?",tts); error2!("failing value: {:?}",tts);
assert_eq!("wrong 1","correct"); assert_eq!("wrong 1","correct");
} }
} }

View File

@ -261,10 +261,10 @@ impl ParserObsoleteMethods for Parser {
kind: ObsoleteSyntax, kind: ObsoleteSyntax,
kind_str: &str, kind_str: &str,
desc: &str) { desc: &str) {
self.span_err(sp, fmt!("obsolete syntax: %s", kind_str)); self.span_err(sp, format!("obsolete syntax: {}", kind_str));
if !self.obsolete_set.contains(&kind) { if !self.obsolete_set.contains(&kind) {
self.sess.span_diagnostic.handler().note(fmt!("%s", desc)); self.sess.span_diagnostic.handler().note(format!("{}", desc));
self.obsolete_set.insert(kind); self.obsolete_set.insert(kind);
} }
} }

View File

@ -362,8 +362,8 @@ impl Parser {
pub fn unexpected_last(&self, t: &token::Token) -> ! { pub fn unexpected_last(&self, t: &token::Token) -> ! {
self.span_fatal( self.span_fatal(
*self.last_span, *self.last_span,
fmt!( format!(
"unexpected token: `%s`", "unexpected token: `{}`",
self.token_to_str(t) self.token_to_str(t)
) )
); );
@ -371,8 +371,8 @@ impl Parser {
pub fn unexpected(&self) -> ! { pub fn unexpected(&self) -> ! {
self.fatal( self.fatal(
fmt!( format!(
"unexpected token: `%s`", "unexpected token: `{}`",
self.this_token_to_str() self.this_token_to_str()
) )
); );
@ -385,8 +385,8 @@ impl Parser {
self.bump(); self.bump();
} else { } else {
self.fatal( self.fatal(
fmt!( format!(
"expected `%s` but found `%s`", "expected `{}` but found `{}`",
self.token_to_str(t), self.token_to_str(t),
self.this_token_to_str() self.this_token_to_str()
) )
@ -414,9 +414,9 @@ impl Parser {
let actual = self.this_token_to_str(); let actual = self.this_token_to_str();
self.fatal( self.fatal(
if expected.len() != 1 { if expected.len() != 1 {
fmt!("expected one of `%s` but found `%s`", expect, actual) format!("expected one of `{}` but found `{}`", expect, actual)
} else { } else {
fmt!("expected `%s` but found `%s`", expect, actual) format!("expected `{}` but found `{}`", expect, actual)
} }
) )
} }
@ -444,7 +444,7 @@ impl Parser {
// followed by some token from the set edible + inedible. Recover // followed by some token from the set edible + inedible. Recover
// from anticipated input errors, discarding erroneous characters. // from anticipated input errors, discarding erroneous characters.
pub fn commit_expr(&self, e: @Expr, edible: &[token::Token], inedible: &[token::Token]) { pub fn commit_expr(&self, e: @Expr, edible: &[token::Token], inedible: &[token::Token]) {
debug!("commit_expr %?", e); debug2!("commit_expr {:?}", e);
match e.node { match e.node {
ExprPath(*) => { ExprPath(*) => {
// might be unit-struct construction; check for recoverableinput error. // might be unit-struct construction; check for recoverableinput error.
@ -464,7 +464,7 @@ impl Parser {
// followed by some token from the set edible + inedible. Check // followed by some token from the set edible + inedible. Check
// for recoverable input errors, discarding erroneous characters. // for recoverable input errors, discarding erroneous characters.
pub fn commit_stmt(&self, s: @Stmt, edible: &[token::Token], inedible: &[token::Token]) { pub fn commit_stmt(&self, s: @Stmt, edible: &[token::Token], inedible: &[token::Token]) {
debug!("commit_stmt %?", s); debug2!("commit_stmt {:?}", s);
let _s = s; // unused, but future checks might want to inspect `s`. let _s = s; // unused, but future checks might want to inspect `s`.
if self.last_token.map_default(false, |t|is_ident_or_path(*t)) { if self.last_token.map_default(false, |t|is_ident_or_path(*t)) {
let expected = vec::append(edible.to_owned(), inedible); let expected = vec::append(edible.to_owned(), inedible);
@ -490,8 +490,8 @@ impl Parser {
} }
_ => { _ => {
self.fatal( self.fatal(
fmt!( format!(
"expected ident, found `%s`", "expected ident, found `{}`",
self.this_token_to_str() self.this_token_to_str()
) )
); );
@ -536,8 +536,8 @@ impl Parser {
pub fn expect_keyword(&self, kw: keywords::Keyword) { pub fn expect_keyword(&self, kw: keywords::Keyword) {
if !self.eat_keyword(kw) { if !self.eat_keyword(kw) {
self.fatal( self.fatal(
fmt!( format!(
"expected `%s`, found `%s`", "expected `{}`, found `{}`",
self.id_to_str(kw.to_ident()).to_str(), self.id_to_str(kw.to_ident()).to_str(),
self.this_token_to_str() self.this_token_to_str()
) )
@ -549,14 +549,14 @@ impl Parser {
pub fn check_strict_keywords(&self) { pub fn check_strict_keywords(&self) {
if token::is_strict_keyword(self.token) { if token::is_strict_keyword(self.token) {
self.span_err(*self.last_span, self.span_err(*self.last_span,
fmt!("found `%s` in ident position", self.this_token_to_str())); format!("found `{}` in ident position", self.this_token_to_str()));
} }
} }
// signal an error if the current token is a reserved keyword // signal an error if the current token is a reserved keyword
pub fn check_reserved_keywords(&self) { pub fn check_reserved_keywords(&self) {
if token::is_reserved_keyword(self.token) { if token::is_reserved_keyword(self.token) {
self.fatal(fmt!("`%s` is a reserved keyword", self.this_token_to_str())); self.fatal(format!("`{}` is a reserved keyword", self.this_token_to_str()));
} }
} }
@ -571,7 +571,7 @@ impl Parser {
self.span.lo + BytePos(1u), self.span.lo + BytePos(1u),
self.span.hi self.span.hi
), ),
_ => self.fatal(fmt!("expected `%s`, found `%s`", _ => self.fatal(format!("expected `{}`, found `{}`",
self.token_to_str(&token::GT), self.token_to_str(&token::GT),
self.this_token_to_str())) self.this_token_to_str()))
} }
@ -938,13 +938,13 @@ impl Parser {
}; };
let hi = p.last_span.hi; let hi = p.last_span.hi;
debug!("parse_trait_methods(): trait method signature ends in \ debug2!("parse_trait_methods(): trait method signature ends in \
`%s`", `{}`",
self.this_token_to_str()); self.this_token_to_str());
match *p.token { match *p.token {
token::SEMI => { token::SEMI => {
p.bump(); p.bump();
debug!("parse_trait_methods(): parsing required method"); debug2!("parse_trait_methods(): parsing required method");
// NB: at the moment, visibility annotations on required // NB: at the moment, visibility annotations on required
// methods are ignored; this could change. // methods are ignored; this could change.
if vis != ast::inherited { if vis != ast::inherited {
@ -963,7 +963,7 @@ impl Parser {
}) })
} }
token::LBRACE => { token::LBRACE => {
debug!("parse_trait_methods(): parsing provided method"); debug2!("parse_trait_methods(): parsing provided method");
let (inner_attrs, body) = let (inner_attrs, body) =
p.parse_inner_attrs_and_block(); p.parse_inner_attrs_and_block();
let attrs = vec::append(attrs, inner_attrs); let attrs = vec::append(attrs, inner_attrs);
@ -984,8 +984,8 @@ impl Parser {
_ => { _ => {
p.fatal( p.fatal(
fmt!( format!(
"expected `;` or `{` but found `%s`", "expected `;` or `\\{` but found `{}`",
self.this_token_to_str() self.this_token_to_str()
) )
); );
@ -1153,8 +1153,7 @@ impl Parser {
} = self.parse_path(LifetimeAndTypesAndBounds); } = self.parse_path(LifetimeAndTypesAndBounds);
ty_path(path, bounds, ast::DUMMY_NODE_ID) ty_path(path, bounds, ast::DUMMY_NODE_ID)
} else { } else {
self.fatal(fmt!("expected type, found token %?", self.fatal(format!("expected type, found token {:?}", *self.token));
*self.token));
}; };
let sp = mk_sp(lo, self.last_span.hi); let sp = mk_sp(lo, self.last_span.hi);
@ -1245,7 +1244,7 @@ impl Parser {
_ => 0 _ => 0
}; };
debug!("parser is_named_argument offset:%u", offset); debug2!("parser is_named_argument offset:{}", offset);
if offset == 0 { if offset == 0 {
is_plain_ident_or_underscore(&*self.token) is_plain_ident_or_underscore(&*self.token)
@ -1261,7 +1260,7 @@ impl Parser {
pub fn parse_arg_general(&self, require_name: bool) -> arg { pub fn parse_arg_general(&self, require_name: bool) -> arg {
let is_mutbl = self.eat_keyword(keywords::Mut); let is_mutbl = self.eat_keyword(keywords::Mut);
let pat = if require_name || self.is_named_argument() { let pat = if require_name || self.is_named_argument() {
debug!("parse_arg_general parse_pat (require_name:%?)", debug2!("parse_arg_general parse_pat (require_name:{:?})",
require_name); require_name);
self.parse_arg_mode(); self.parse_arg_mode();
let pat = self.parse_pat(); let pat = self.parse_pat();
@ -1273,7 +1272,7 @@ impl Parser {
self.expect(&token::COLON); self.expect(&token::COLON);
pat pat
} else { } else {
debug!("parse_arg_general ident_to_pat"); debug2!("parse_arg_general ident_to_pat");
ast_util::ident_to_pat(ast::DUMMY_NODE_ID, ast_util::ident_to_pat(ast::DUMMY_NODE_ID,
*self.last_span, *self.last_span,
special_idents::invalid) special_idents::invalid)
@ -1581,7 +1580,7 @@ impl Parser {
} }
_ => { _ => {
self.fatal(fmt!("Expected a lifetime name")); self.fatal(format!("Expected a lifetime name"));
} }
} }
} }
@ -1614,7 +1613,7 @@ impl Parser {
token::GT => { return res; } token::GT => { return res; }
token::BINOP(token::SHR) => { return res; } token::BINOP(token::SHR) => { return res; }
_ => { _ => {
self.fatal(fmt!("expected `,` or `>` after lifetime name, got: %?", self.fatal(format!("expected `,` or `>` after lifetime name, got: {:?}",
*self.token)); *self.token));
} }
} }
@ -2067,8 +2066,8 @@ impl Parser {
token::RPAREN | token::RBRACE | token::RBRACKET token::RPAREN | token::RBRACE | token::RBRACKET
=> { => {
p.fatal( p.fatal(
fmt!( format!(
"incorrect close delimiter: `%s`", "incorrect close delimiter: `{}`",
p.this_token_to_str() p.this_token_to_str()
) )
); );
@ -2561,10 +2560,10 @@ impl Parser {
// There may be other types of expressions that can // There may be other types of expressions that can
// represent the callee in `for` and `do` expressions // represent the callee in `for` and `do` expressions
// but they aren't represented by tests // but they aren't represented by tests
debug!("sugary call on %?", e.node); debug2!("sugary call on {:?}", e.node);
self.span_fatal( self.span_fatal(
e.span, e.span,
fmt!("`%s` must be followed by a block call", keyword)); format!("`{}` must be followed by a block call", keyword));
} }
} }
} }
@ -2759,8 +2758,8 @@ impl Parser {
self.bump(); self.bump();
if *self.token != token::RBRACE { if *self.token != token::RBRACE {
self.fatal( self.fatal(
fmt!( format!(
"expected `}`, found `%s`", "expected `\\}`, found `{}`",
self.this_token_to_str() self.this_token_to_str()
) )
); );
@ -3543,8 +3542,8 @@ impl Parser {
fn expect_self_ident(&self) { fn expect_self_ident(&self) {
if !self.is_self_ident() { if !self.is_self_ident() {
self.fatal( self.fatal(
fmt!( format!(
"expected `self` but found `%s`", "expected `self` but found `{}`",
self.this_token_to_str() self.this_token_to_str()
) )
); );
@ -3682,8 +3681,8 @@ impl Parser {
} }
_ => { _ => {
self.fatal( self.fatal(
fmt!( format!(
"expected `,` or `)`, found `%s`", "expected `,` or `)`, found `{}`",
self.this_token_to_str() self.this_token_to_str()
) )
); );
@ -3920,7 +3919,7 @@ impl Parser {
} }
} }
if fields.len() == 0 { if fields.len() == 0 {
self.fatal(fmt!("Unit-like struct definition should be written as `struct %s;`", self.fatal(format!("Unit-like struct definition should be written as `struct {};`",
get_ident_interner().get(class_name.name))); get_ident_interner().get(class_name.name)));
} }
self.bump(); self.bump();
@ -3949,9 +3948,9 @@ impl Parser {
fields = ~[]; fields = ~[];
} else { } else {
self.fatal( self.fatal(
fmt!( format!(
"expected `{`, `(`, or `;` after struct name \ "expected `\\{`, `(`, or `;` after struct name \
but found `%s`", but found `{}`",
self.this_token_to_str() self.this_token_to_str()
) )
); );
@ -3995,7 +3994,7 @@ impl Parser {
token::RBRACE => {} token::RBRACE => {}
_ => { _ => {
self.span_fatal(*self.span, self.span_fatal(*self.span,
fmt!("expected `,`, or '}' but found `%s`", format!("expected `,`, or '\\}' but found `{}`",
self.this_token_to_str())); self.this_token_to_str()));
} }
} }
@ -4064,7 +4063,7 @@ impl Parser {
attrs = attrs_remaining + attrs; attrs = attrs_remaining + attrs;
first = false; first = false;
} }
debug!("parse_mod_items: parse_item_or_view_item(attrs=%?)", debug2!("parse_mod_items: parse_item_or_view_item(attrs={:?})",
attrs); attrs);
match self.parse_item_or_view_item(attrs, match self.parse_item_or_view_item(attrs,
true /* macros allowed */) { true /* macros allowed */) {
@ -4075,7 +4074,7 @@ impl Parser {
the module"); the module");
} }
_ => { _ => {
self.fatal(fmt!("expected item but found `%s`", self.fatal(format!("expected item but found `{}`",
self.this_token_to_str())); self.this_token_to_str()));
} }
} }
@ -4167,11 +4166,11 @@ impl Parser {
(true, false) => default_path, (true, false) => default_path,
(false, true) => secondary_path, (false, true) => secondary_path,
(false, false) => { (false, false) => {
self.span_fatal(id_sp, fmt!("file not found for module `%s`", mod_name)); self.span_fatal(id_sp, format!("file not found for module `{}`", mod_name));
} }
(true, true) => { (true, true) => {
self.span_fatal(id_sp, self.span_fatal(id_sp,
fmt!("file for module `%s` found at both %s and %s", format!("file for module `{}` found at both {} and {}",
mod_name, default_path_str, secondary_path_str)); mod_name, default_path_str, secondary_path_str));
} }
} }
@ -4323,7 +4322,7 @@ impl Parser {
self.expect_keyword(keywords::Mod); self.expect_keyword(keywords::Mod);
} else if *self.token != token::LBRACE { } else if *self.token != token::LBRACE {
self.span_fatal(*self.span, self.span_fatal(*self.span,
fmt!("expected `{` or `mod` but found `%s`", format!("expected `\\{` or `mod` but found `{}`",
self.this_token_to_str())); self.this_token_to_str()));
} }
@ -4340,8 +4339,8 @@ impl Parser {
_ => { _ => {
if must_be_named_mod { if must_be_named_mod {
self.span_fatal(*self.span, self.span_fatal(*self.span,
fmt!("expected foreign module name but \ format!("expected foreign module name but \
found `%s`", found `{}`",
self.this_token_to_str())); self.this_token_to_str()));
} }
@ -4566,7 +4565,7 @@ impl Parser {
if abis.contains(abi) { if abis.contains(abi) {
self.span_err( self.span_err(
*self.span, *self.span,
fmt!("ABI `%s` appears twice", format!("ABI `{}` appears twice",
word)); word));
} else { } else {
abis.add(abi); abis.add(abi);
@ -4576,9 +4575,9 @@ impl Parser {
None => { None => {
self.span_err( self.span_err(
*self.span, *self.span,
fmt!("illegal ABI: \ format!("illegal ABI: \
expected one of [%s], \ expected one of [{}], \
found `%s`", found `{}`",
abi::all_names().connect(", "), abi::all_names().connect(", "),
word)); word));
} }
@ -4853,7 +4852,7 @@ impl Parser {
let first_ident = self.parse_ident(); let first_ident = self.parse_ident();
let mut path = ~[first_ident]; let mut path = ~[first_ident];
debug!("parsed view_path: %s", self.id_to_str(first_ident)); debug2!("parsed view_path: {}", self.id_to_str(first_ident));
match *self.token { match *self.token {
token::EQ => { token::EQ => {
// x = foo::bar // x = foo::bar
@ -5061,7 +5060,7 @@ impl Parser {
break; break;
} }
iovi_foreign_item(_) => { iovi_foreign_item(_) => {
fail!(); fail2!();
} }
} }
attrs = self.parse_outer_attributes(); attrs = self.parse_outer_attributes();
@ -5084,7 +5083,7 @@ impl Parser {
items.push(item) items.push(item)
} }
iovi_foreign_item(_) => { iovi_foreign_item(_) => {
fail!(); fail2!();
} }
} }
} }

View File

@ -193,11 +193,11 @@ pub fn to_str(input: @ident_interner, t: &Token) -> ~str {
} }
body body
} }
LIT_STR(ref s) => { fmt!("\"%s\"", ident_to_str(s).escape_default()) } LIT_STR(ref s) => { format!("\"{}\"", ident_to_str(s).escape_default()) }
/* Name components */ /* Name components */
IDENT(s, _) => input.get(s.name).to_owned(), IDENT(s, _) => input.get(s.name).to_owned(),
LIFETIME(s) => fmt!("'%s", input.get(s.name)), LIFETIME(s) => format!("'{}", input.get(s.name)),
UNDERSCORE => ~"_", UNDERSCORE => ~"_",
/* Other */ /* Other */
@ -214,8 +214,8 @@ pub fn to_str(input: @ident_interner, t: &Token) -> ~str {
nt_block(*) => ~"block", nt_block(*) => ~"block",
nt_stmt(*) => ~"statement", nt_stmt(*) => ~"statement",
nt_pat(*) => ~"pattern", nt_pat(*) => ~"pattern",
nt_attr(*) => fail!("should have been handled"), nt_attr(*) => fail2!("should have been handled"),
nt_expr(*) => fail!("should have been handled above"), nt_expr(*) => fail2!("should have been handled above"),
nt_ty(*) => ~"type", nt_ty(*) => ~"type",
nt_ident(*) => ~"identifier", nt_ident(*) => ~"identifier",
nt_path(*) => ~"path", nt_path(*) => ~"path",
@ -269,7 +269,7 @@ pub fn flip_delimiter(t: &token::Token) -> token::Token {
RPAREN => LPAREN, RPAREN => LPAREN,
RBRACE => LBRACE, RBRACE => LBRACE,
RBRACKET => LBRACKET, RBRACKET => LBRACKET,
_ => fail!() _ => fail2!()
} }
} }
@ -553,7 +553,7 @@ pub fn fresh_name(src : &ast::Ident) -> Name {
// good error messages and uses of struct names in ambiguous could-be-binding // good error messages and uses of struct names in ambiguous could-be-binding
// locations. Also definitely destroys the guarantee given above about ptr_eq. // locations. Also definitely destroys the guarantee given above about ptr_eq.
/*let num = rand::rng().gen_uint_range(0,0xffff); /*let num = rand::rng().gen_uint_range(0,0xffff);
gensym(fmt!("%s_%u",ident_to_str(src),num))*/ gensym(format!("{}_{}",ident_to_str(src),num))*/
} }
// it looks like there oughta be a str_ptr_eq fn, but no one bothered to implement it? // it looks like there oughta be a str_ptr_eq fn, but no one bothered to implement it?

View File

@ -111,7 +111,7 @@ impl token {
pub fn tok_str(t: token) -> ~str { pub fn tok_str(t: token) -> ~str {
match t { match t {
STRING(s, len) => return fmt!("STR(%s,%d)", s, len), STRING(s, len) => return format!("STR({},{})", s, len),
BREAK(_) => return ~"BREAK", BREAK(_) => return ~"BREAK",
BEGIN(_) => return ~"BEGIN", BEGIN(_) => return ~"BEGIN",
END => return ~"END", END => return ~"END",
@ -131,7 +131,7 @@ pub fn buf_str(toks: ~[token], szs: ~[int], left: uint, right: uint,
if i != left { if i != left {
s.push_str(", "); s.push_str(", ");
} }
s.push_str(fmt!("%d=%s", szs[i], tok_str(toks[i]))); s.push_str(format!("{}={}", szs[i], tok_str(toks[i])));
i += 1u; i += 1u;
i %= n; i %= n;
} }
@ -152,7 +152,7 @@ pub fn mk_printer(out: @io::Writer, linewidth: uint) -> @mut Printer {
// Yes 3, it makes the ring buffers big enough to never // Yes 3, it makes the ring buffers big enough to never
// fall behind. // fall behind.
let n: uint = 3 * linewidth; let n: uint = 3 * linewidth;
debug!("mk_printer %u", linewidth); debug2!("mk_printer {}", linewidth);
let token: ~[token] = vec::from_elem(n, EOF); let token: ~[token] = vec::from_elem(n, EOF);
let size: ~[int] = vec::from_elem(n, 0); let size: ~[int] = vec::from_elem(n, 0);
let scan_stack: ~[uint] = vec::from_elem(n, 0u); let scan_stack: ~[uint] = vec::from_elem(n, 0u);
@ -288,7 +288,7 @@ impl Printer {
self.token[self.right] = t; self.token[self.right] = t;
} }
pub fn pretty_print(&mut self, t: token) { pub fn pretty_print(&mut self, t: token) {
debug!("pp ~[%u,%u]", self.left, self.right); debug2!("pp ~[{},{}]", self.left, self.right);
match t { match t {
EOF => { EOF => {
if !self.scan_stack_empty { if !self.scan_stack_empty {
@ -305,7 +305,7 @@ impl Printer {
self.left = 0u; self.left = 0u;
self.right = 0u; self.right = 0u;
} else { self.advance_right(); } } else { self.advance_right(); }
debug!("pp BEGIN(%d)/buffer ~[%u,%u]", debug2!("pp BEGIN({})/buffer ~[{},{}]",
b.offset, self.left, self.right); b.offset, self.left, self.right);
self.token[self.right] = t; self.token[self.right] = t;
self.size[self.right] = -self.right_total; self.size[self.right] = -self.right_total;
@ -313,10 +313,10 @@ impl Printer {
} }
END => { END => {
if self.scan_stack_empty { if self.scan_stack_empty {
debug!("pp END/print ~[%u,%u]", self.left, self.right); debug2!("pp END/print ~[{},{}]", self.left, self.right);
self.print(t, 0); self.print(t, 0);
} else { } else {
debug!("pp END/buffer ~[%u,%u]", self.left, self.right); debug2!("pp END/buffer ~[{},{}]", self.left, self.right);
self.advance_right(); self.advance_right();
self.token[self.right] = t; self.token[self.right] = t;
self.size[self.right] = -1; self.size[self.right] = -1;
@ -330,7 +330,7 @@ impl Printer {
self.left = 0u; self.left = 0u;
self.right = 0u; self.right = 0u;
} else { self.advance_right(); } } else { self.advance_right(); }
debug!("pp BREAK(%d)/buffer ~[%u,%u]", debug2!("pp BREAK({})/buffer ~[{},{}]",
b.offset, self.left, self.right); b.offset, self.left, self.right);
self.check_stack(0); self.check_stack(0);
self.scan_push(self.right); self.scan_push(self.right);
@ -340,11 +340,11 @@ impl Printer {
} }
STRING(s, len) => { STRING(s, len) => {
if self.scan_stack_empty { if self.scan_stack_empty {
debug!("pp STRING('%s')/print ~[%u,%u]", debug2!("pp STRING('{}')/print ~[{},{}]",
s, self.left, self.right); s, self.left, self.right);
self.print(t, len); self.print(t, len);
} else { } else {
debug!("pp STRING('%s')/buffer ~[%u,%u]", debug2!("pp STRING('{}')/buffer ~[{},{}]",
s, self.left, self.right); s, self.left, self.right);
self.advance_right(); self.advance_right();
self.token[self.right] = t; self.token[self.right] = t;
@ -356,14 +356,14 @@ impl Printer {
} }
} }
pub fn check_stream(&mut self) { pub fn check_stream(&mut self) {
debug!("check_stream ~[%u, %u] with left_total=%d, right_total=%d", debug2!("check_stream ~[{}, {}] with left_total={}, right_total={}",
self.left, self.right, self.left_total, self.right_total); self.left, self.right, self.left_total, self.right_total);
if self.right_total - self.left_total > self.space { if self.right_total - self.left_total > self.space {
debug!("scan window is %d, longer than space on line (%d)", debug2!("scan window is {}, longer than space on line ({})",
self.right_total - self.left_total, self.space); self.right_total - self.left_total, self.space);
if !self.scan_stack_empty { if !self.scan_stack_empty {
if self.left == self.scan_stack[self.bottom] { if self.left == self.scan_stack[self.bottom] {
debug!("setting %u to infinity and popping", self.left); debug2!("setting {} to infinity and popping", self.left);
self.size[self.scan_pop_bottom()] = size_infinity; self.size[self.scan_pop_bottom()] = size_infinity;
} }
} }
@ -372,7 +372,7 @@ impl Printer {
} }
} }
pub fn scan_push(&mut self, x: uint) { pub fn scan_push(&mut self, x: uint) {
debug!("scan_push %u", x); debug2!("scan_push {}", x);
if self.scan_stack_empty { if self.scan_stack_empty {
self.scan_stack_empty = false; self.scan_stack_empty = false;
} else { } else {
@ -408,7 +408,7 @@ impl Printer {
assert!((self.right != self.left)); assert!((self.right != self.left));
} }
pub fn advance_left(&mut self, x: token, L: int) { pub fn advance_left(&mut self, x: token, L: int) {
debug!("advnce_left ~[%u,%u], sizeof(%u)=%d", self.left, self.right, debug2!("advnce_left ~[{},{}], sizeof({})={}", self.left, self.right,
self.left, L); self.left, L);
if L >= 0 { if L >= 0 {
self.print(x, L); self.print(x, L);
@ -451,13 +451,13 @@ impl Printer {
} }
} }
pub fn print_newline(&mut self, amount: int) { pub fn print_newline(&mut self, amount: int) {
debug!("NEWLINE %d", amount); debug2!("NEWLINE {}", amount);
(*self.out).write_str("\n"); (*self.out).write_str("\n");
self.pending_indentation = 0; self.pending_indentation = 0;
self.indent(amount); self.indent(amount);
} }
pub fn indent(&mut self, amount: int) { pub fn indent(&mut self, amount: int) {
debug!("INDENT %d", amount); debug2!("INDENT {}", amount);
self.pending_indentation += amount; self.pending_indentation += amount;
} }
pub fn get_top(&mut self) -> print_stack_elt { pub fn get_top(&mut self) -> print_stack_elt {
@ -480,9 +480,9 @@ impl Printer {
(*self.out).write_str(s); (*self.out).write_str(s);
} }
pub fn print(&mut self, x: token, L: int) { pub fn print(&mut self, x: token, L: int) {
debug!("print %s %d (remaining line space=%d)", tok_str(x), L, debug2!("print {} {} (remaining line space={})", tok_str(x), L,
self.space); self.space);
debug!("%s", buf_str(self.token.clone(), debug2!("{}", buf_str(self.token.clone(),
self.size.clone(), self.size.clone(),
self.left, self.left,
self.right, self.right,
@ -491,13 +491,13 @@ impl Printer {
BEGIN(b) => { BEGIN(b) => {
if L > self.space { if L > self.space {
let col = self.margin - self.space + b.offset; let col = self.margin - self.space + b.offset;
debug!("print BEGIN -> push broken block at col %d", col); debug2!("print BEGIN -> push broken block at col {}", col);
self.print_stack.push(print_stack_elt { self.print_stack.push(print_stack_elt {
offset: col, offset: col,
pbreak: broken(b.breaks) pbreak: broken(b.breaks)
}); });
} else { } else {
debug!("print BEGIN -> push fitting block"); debug2!("print BEGIN -> push fitting block");
self.print_stack.push(print_stack_elt { self.print_stack.push(print_stack_elt {
offset: 0, offset: 0,
pbreak: fits pbreak: fits
@ -505,7 +505,7 @@ impl Printer {
} }
} }
END => { END => {
debug!("print END -> pop END"); debug2!("print END -> pop END");
let print_stack = &mut *self.print_stack; let print_stack = &mut *self.print_stack;
assert!((print_stack.len() != 0u)); assert!((print_stack.len() != 0u));
print_stack.pop(); print_stack.pop();
@ -514,24 +514,24 @@ impl Printer {
let top = self.get_top(); let top = self.get_top();
match top.pbreak { match top.pbreak {
fits => { fits => {
debug!("print BREAK(%d) in fitting block", b.blank_space); debug2!("print BREAK({}) in fitting block", b.blank_space);
self.space -= b.blank_space; self.space -= b.blank_space;
self.indent(b.blank_space); self.indent(b.blank_space);
} }
broken(consistent) => { broken(consistent) => {
debug!("print BREAK(%d+%d) in consistent block", debug2!("print BREAK({}+{}) in consistent block",
top.offset, b.offset); top.offset, b.offset);
self.print_newline(top.offset + b.offset); self.print_newline(top.offset + b.offset);
self.space = self.margin - (top.offset + b.offset); self.space = self.margin - (top.offset + b.offset);
} }
broken(inconsistent) => { broken(inconsistent) => {
if L > self.space { if L > self.space {
debug!("print BREAK(%d+%d) w/ newline in inconsistent", debug2!("print BREAK({}+{}) w/ newline in inconsistent",
top.offset, b.offset); top.offset, b.offset);
self.print_newline(top.offset + b.offset); self.print_newline(top.offset + b.offset);
self.space = self.margin - (top.offset + b.offset); self.space = self.margin - (top.offset + b.offset);
} else { } else {
debug!("print BREAK(%d) w/o newline in inconsistent", debug2!("print BREAK({}) w/o newline in inconsistent",
b.blank_space); b.blank_space);
self.indent(b.blank_space); self.indent(b.blank_space);
self.space -= b.blank_space; self.space -= b.blank_space;
@ -540,7 +540,7 @@ impl Printer {
} }
} }
STRING(s, len) => { STRING(s, len) => {
debug!("print STRING(%s)", s); debug2!("print STRING({})", s);
assert_eq!(L, len); assert_eq!(L, len);
// assert!(L <= space); // assert!(L <= space);
self.space -= len; self.space -= len;
@ -548,7 +548,7 @@ impl Printer {
} }
EOF => { EOF => {
// EOF should never get here. // EOF should never get here.
fail!(); fail2!();
} }
} }
} }

View File

@ -453,10 +453,10 @@ pub fn print_type(s: @ps, ty: &ast::Ty) {
word(s.s, ")"); word(s.s, ")");
} }
ast::ty_mac(_) => { ast::ty_mac(_) => {
fail!("print_type doesn't know how to print a ty_mac"); fail2!("print_type doesn't know how to print a ty_mac");
} }
ast::ty_infer => { ast::ty_infer => {
fail!("print_type shouldn't see a ty_infer"); fail2!("print_type shouldn't see a ty_infer");
} }
} }
@ -709,7 +709,7 @@ pub fn print_struct(s: @ps,
popen(s); popen(s);
do commasep(s, inconsistent, struct_def.fields) |s, field| { do commasep(s, inconsistent, struct_def.fields) |s, field| {
match field.node.kind { match field.node.kind {
ast::named_field(*) => fail!("unexpected named field"), ast::named_field(*) => fail2!("unexpected named field"),
ast::unnamed_field => { ast::unnamed_field => {
maybe_print_comment(s, field.span.lo); maybe_print_comment(s, field.span.lo);
print_type(s, &field.node.ty); print_type(s, &field.node.ty);
@ -728,7 +728,7 @@ pub fn print_struct(s: @ps,
for field in struct_def.fields.iter() { for field in struct_def.fields.iter() {
match field.node.kind { match field.node.kind {
ast::unnamed_field => fail!("unexpected unnamed field"), ast::unnamed_field => fail2!("unexpected unnamed field"),
ast::named_field(ident, visibility) => { ast::named_field(ident, visibility) => {
hardbreak_if_not_bol(s); hardbreak_if_not_bol(s);
maybe_print_comment(s, field.span.lo); maybe_print_comment(s, field.span.lo);
@ -1017,7 +1017,7 @@ pub fn print_if(s: @ps, test: &ast::Expr, blk: &ast::Block,
} }
// BLEAH, constraints would be great here // BLEAH, constraints would be great here
_ => { _ => {
fail!("print_if saw if with weird alternative"); fail2!("print_if saw if with weird alternative");
} }
} }
} }
@ -1042,7 +1042,7 @@ pub fn print_mac(s: @ps, m: &ast::mac) {
pub fn print_vstore(s: @ps, t: ast::Vstore) { pub fn print_vstore(s: @ps, t: ast::Vstore) {
match t { match t {
ast::VstoreFixed(Some(i)) => word(s.s, fmt!("%u", i)), ast::VstoreFixed(Some(i)) => word(s.s, format!("{}", i)),
ast::VstoreFixed(None) => word(s.s, "_"), ast::VstoreFixed(None) => word(s.s, "_"),
ast::VstoreUniq => word(s.s, "~"), ast::VstoreUniq => word(s.s, "~"),
ast::VstoreBox => word(s.s, "@"), ast::VstoreBox => word(s.s, "@"),
@ -1319,7 +1319,7 @@ pub fn print_expr(s: @ps, expr: &ast::Expr) {
} }
end(s); // close enclosing cbox end(s); // close enclosing cbox
} }
None => fail!() None => fail2!()
} }
} else { } else {
// the block will close the pattern's ibox // the block will close the pattern's ibox
@ -2299,7 +2299,7 @@ mod test {
fn string_check<T:Eq> (given : &T, expected: &T) { fn string_check<T:Eq> (given : &T, expected: &T) {
if !(given == expected) { if !(given == expected) {
fail!("given %?, expected %?", given, expected); fail2!("given {:?}, expected {:?}", given, expected);
} }
} }