Fixed error starting with uppercase
Error messages cleaned in librustc/middle Error messages cleaned in libsyntax Error messages cleaned in libsyntax more agressively Error messages cleaned in librustc more aggressively Fixed affected tests Fixed other failing tests Last failing tests fixed
This commit is contained in:
parent
35518514c4
commit
ee3fa68fed
|
@ -76,7 +76,7 @@ pub fn WriteOutputFile(
|
|||
let result = llvm::LLVMRustWriteOutputFile(
|
||||
Target, PM, M, Output, FileType);
|
||||
if !result {
|
||||
llvm_err(sess, ~"Could not write output");
|
||||
llvm_err(sess, ~"could not write output");
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -189,7 +189,7 @@ pub mod write {
|
|||
for pass in sess.opts.custom_passes.iter() {
|
||||
pass.with_c_str(|s| {
|
||||
if !llvm::LLVMRustAddPass(mpm, s) {
|
||||
sess.warn(format!("Unknown pass {}, ignoring", *pass));
|
||||
sess.warn(format!("unknown pass {}, ignoring", *pass));
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -876,11 +876,11 @@ fn link_binary_output(sess: Session,
|
|||
let obj_is_writeable = is_writeable(&obj_filename);
|
||||
let out_is_writeable = is_writeable(&out_filename);
|
||||
if !out_is_writeable {
|
||||
sess.fatal(format!("Output file {} is not writeable -- check its permissions.",
|
||||
sess.fatal(format!("output file {} is not writeable -- check its permissions.",
|
||||
out_filename.display()));
|
||||
}
|
||||
else if !obj_is_writeable {
|
||||
sess.fatal(format!("Object file {} is not writeable -- check its permissions.",
|
||||
sess.fatal(format!("object file {} is not writeable -- check its permissions.",
|
||||
obj_filename.display()));
|
||||
}
|
||||
|
||||
|
|
|
@ -308,7 +308,7 @@ impl Session_ {
|
|||
// This exists to help with refactoring to eliminate impossible
|
||||
// cases later on
|
||||
pub fn impossible_case(&self, sp: Span, msg: &str) -> ! {
|
||||
self.span_bug(sp, format!("Impossible case reached: {}", msg));
|
||||
self.span_bug(sp, format!("impossible case reached: {}", msg));
|
||||
}
|
||||
pub fn verbose(&self) -> bool { self.debugging_opt(VERBOSE) }
|
||||
pub fn time_passes(&self) -> bool { self.debugging_opt(TIME_PASSES) }
|
||||
|
|
|
@ -527,7 +527,7 @@ fn parse_sig(st: &mut PState, conv: conv_did) -> ty::FnSig {
|
|||
let variadic = match next(st) {
|
||||
'V' => true,
|
||||
'N' => false,
|
||||
r => fail!(format!("Bad variadic: {}", r)),
|
||||
r => fail!(format!("bad variadic: {}", r)),
|
||||
};
|
||||
let ret_ty = parse_ty(st, |x,y| conv(x,y));
|
||||
ty::FnSig {binder_id: id,
|
||||
|
|
|
@ -195,7 +195,7 @@ fn enc_region(w: &mut MemWriter, cx: @ctxt, r: ty::Region) {
|
|||
}
|
||||
ty::ReInfer(_) => {
|
||||
// these should not crop up after typeck
|
||||
cx.diag.handler().bug("Cannot encode region variables");
|
||||
cx.diag.handler().bug("cannot encode region variables");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -320,7 +320,7 @@ fn enc_sty(w: &mut MemWriter, cx: @ctxt, st: &ty::sty) {
|
|||
enc_bare_fn_ty(w, cx, f);
|
||||
}
|
||||
ty::ty_infer(_) => {
|
||||
cx.diag.handler().bug("Cannot encode inference variable types");
|
||||
cx.diag.handler().bug("cannot encode inference variable types");
|
||||
}
|
||||
ty::ty_param(param_ty {idx: id, def_id: did}) => {
|
||||
mywrite!(w, "p{}|{}", (cx.ds)(did), id);
|
||||
|
@ -334,7 +334,7 @@ fn enc_sty(w: &mut MemWriter, cx: @ctxt, st: &ty::sty) {
|
|||
enc_substs(w, cx, substs);
|
||||
mywrite!(w, "]");
|
||||
}
|
||||
ty::ty_err => fail!("Shouldn't encode error type")
|
||||
ty::ty_err => fail!("shouldn't encode error type")
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -215,7 +215,7 @@ impl<'a> GuaranteeLifetimeContext<'a> {
|
|||
// the check above should fail for anything is not ReScope
|
||||
self.bccx.tcx.sess.span_bug(
|
||||
cmt_base.span,
|
||||
format!("Cannot issue root for scope region: {:?}",
|
||||
format!("cannot issue root for scope region: {:?}",
|
||||
self.loan_region));
|
||||
}
|
||||
};
|
||||
|
|
|
@ -496,7 +496,7 @@ impl<'a> GatherLoanCtxt<'a> {
|
|||
ty::ReInfer(..) => {
|
||||
self.tcx().sess.span_bug(
|
||||
cmt.span,
|
||||
format!("Invalid borrow lifetime: {:?}", loan_region));
|
||||
format!("invalid borrow lifetime: {:?}", loan_region));
|
||||
}
|
||||
};
|
||||
debug!("loan_scope = {:?}", loan_scope);
|
||||
|
@ -820,7 +820,7 @@ impl<'a> GatherLoanCtxt<'a> {
|
|||
_ => {
|
||||
self.tcx().sess.span_bug(
|
||||
pat.span,
|
||||
format!("Type of slice pattern is not a slice"));
|
||||
format!("type of slice pattern is not a slice"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -769,14 +769,14 @@ impl BorrowckCtxt {
|
|||
}
|
||||
_ => {
|
||||
self.tcx.sess.bug(
|
||||
format!("Loan path LpVar({:?}) maps to {:?}, not local",
|
||||
format!("loan path LpVar({:?}) maps to {:?}, not local",
|
||||
id, pat));
|
||||
}
|
||||
}
|
||||
}
|
||||
r => {
|
||||
self.tcx.sess.bug(
|
||||
format!("Loan path LpVar({:?}) maps to {:?}, not local",
|
||||
format!("loan path LpVar({:?}) maps to {:?}, not local",
|
||||
id, r));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -494,7 +494,7 @@ impl MoveData {
|
|||
dfcx_assign.add_kill(kill_id, assignment_index);
|
||||
}
|
||||
LpExtend(..) => {
|
||||
tcx.sess.bug("Var assignment for non var path");
|
||||
tcx.sess.bug("var assignment for non var path");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -507,13 +507,13 @@ impl CFGBuilder {
|
|||
}
|
||||
self.tcx.sess.span_bug(
|
||||
expr.span,
|
||||
format!("No loop scope for id {:?}", loop_id));
|
||||
format!("no loop scope for id {:?}", loop_id));
|
||||
}
|
||||
|
||||
r => {
|
||||
self.tcx.sess.span_bug(
|
||||
expr.span,
|
||||
format!("Bad entry `{:?}` in def_map for label", r));
|
||||
format!("bad entry `{:?}` in def_map for label", r));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -971,7 +971,7 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
|
|||
_ => {
|
||||
cx.tcx.sess.span_bug(
|
||||
p.span,
|
||||
format!("Binding pattern {} is \
|
||||
format!("binding pattern {} is \
|
||||
not an identifier: {:?}",
|
||||
p.id, p.node));
|
||||
}
|
||||
|
|
|
@ -342,8 +342,8 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
|
|||
Ok(const_float(f)) => Ok(const_float(-f)),
|
||||
Ok(const_int(i)) => Ok(const_int(-i)),
|
||||
Ok(const_uint(i)) => Ok(const_uint(-i)),
|
||||
Ok(const_str(_)) => Err(~"Negate on string"),
|
||||
Ok(const_bool(_)) => Err(~"Negate on boolean"),
|
||||
Ok(const_str(_)) => Err(~"negate on string"),
|
||||
Ok(const_bool(_)) => Err(~"negate on boolean"),
|
||||
ref err => ((*err).clone())
|
||||
}
|
||||
}
|
||||
|
@ -352,7 +352,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
|
|||
Ok(const_int(i)) => Ok(const_int(!i)),
|
||||
Ok(const_uint(i)) => Ok(const_uint(!i)),
|
||||
Ok(const_bool(b)) => Ok(const_bool(!b)),
|
||||
_ => Err(~"Not on float or string")
|
||||
_ => Err(~"not on float or string")
|
||||
}
|
||||
}
|
||||
ExprBinary(_, op, a, b) => {
|
||||
|
@ -371,7 +371,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
|
|||
BiNe => fromb(a != b),
|
||||
BiGe => fromb(a >= b),
|
||||
BiGt => fromb(a > b),
|
||||
_ => Err(~"Can't do this op on floats")
|
||||
_ => Err(~"can't do this op on floats")
|
||||
}
|
||||
}
|
||||
(Ok(const_int(a)), Ok(const_int(b))) => {
|
||||
|
@ -423,14 +423,14 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
|
|||
match op {
|
||||
BiShl => Ok(const_int(a << b)),
|
||||
BiShr => Ok(const_int(a >> b)),
|
||||
_ => Err(~"Can't do this op on an int and uint")
|
||||
_ => Err(~"can't do this op on an int and uint")
|
||||
}
|
||||
}
|
||||
(Ok(const_uint(a)), Ok(const_int(b))) => {
|
||||
match op {
|
||||
BiShl => Ok(const_uint(a << b)),
|
||||
BiShr => Ok(const_uint(a >> b)),
|
||||
_ => Err(~"Can't do this op on a uint and int")
|
||||
_ => Err(~"can't do this op on a uint and int")
|
||||
}
|
||||
}
|
||||
(Ok(const_bool(a)), Ok(const_bool(b))) => {
|
||||
|
@ -442,10 +442,10 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
|
|||
BiBitOr => a | b,
|
||||
BiEq => a == b,
|
||||
BiNe => a != b,
|
||||
_ => return Err(~"Can't do this op on bools")
|
||||
_ => return Err(~"can't do this op on bools")
|
||||
}))
|
||||
}
|
||||
_ => Err(~"Bad operands for binary")
|
||||
_ => Err(~"bad operands for binary")
|
||||
}
|
||||
}
|
||||
ExprCast(base, target_ty) => {
|
||||
|
@ -456,7 +456,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
|
|||
.or_else(|| astconv::ast_ty_to_prim_ty(tcx.ty_ctxt(), target_ty))
|
||||
.unwrap_or_else(|| tcx.ty_ctxt().sess.span_fatal(
|
||||
target_ty.span,
|
||||
format!("Target type not found for const cast")
|
||||
format!("target type not found for const cast")
|
||||
));
|
||||
|
||||
let base = eval_const_expr_partial(tcx, base);
|
||||
|
@ -469,7 +469,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
|
|||
const_uint(u) => Ok(const_float(u as f64)),
|
||||
const_int(i) => Ok(const_float(i as f64)),
|
||||
const_float(f) => Ok(const_float(f)),
|
||||
_ => Err(~"Can't cast float to str"),
|
||||
_ => Err(~"can't cast float to str"),
|
||||
}
|
||||
}
|
||||
ty::ty_uint(_) => {
|
||||
|
@ -477,7 +477,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
|
|||
const_uint(u) => Ok(const_uint(u)),
|
||||
const_int(i) => Ok(const_uint(i as u64)),
|
||||
const_float(f) => Ok(const_uint(f as u64)),
|
||||
_ => Err(~"Can't cast str to uint"),
|
||||
_ => Err(~"can't cast str to uint"),
|
||||
}
|
||||
}
|
||||
ty::ty_int(_) | ty::ty_bool => {
|
||||
|
@ -485,10 +485,10 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
|
|||
const_uint(u) => Ok(const_int(u as i64)),
|
||||
const_int(i) => Ok(const_int(i)),
|
||||
const_float(f) => Ok(const_int(f as i64)),
|
||||
_ => Err(~"Can't cast str to int"),
|
||||
_ => Err(~"can't cast str to int"),
|
||||
}
|
||||
}
|
||||
_ => Err(~"Can't cast this type")
|
||||
_ => Err(~"can't cast this type")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -496,14 +496,14 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
|
|||
ExprPath(_) => {
|
||||
match lookup_const(tcx.ty_ctxt(), e) {
|
||||
Some(actual_e) => eval_const_expr_partial(&tcx.ty_ctxt(), actual_e),
|
||||
None => Err(~"Non-constant path in constant expr")
|
||||
None => Err(~"non-constant path in constant expr")
|
||||
}
|
||||
}
|
||||
ExprLit(lit) => Ok(lit_to_const(lit)),
|
||||
// If we have a vstore, just keep going; it has to be a string
|
||||
ExprVstore(e, _) => eval_const_expr_partial(tcx, e),
|
||||
ExprParen(e) => eval_const_expr_partial(tcx, e),
|
||||
_ => Err(~"Unsupported constant expr")
|
||||
_ => Err(~"unsupported constant expr")
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -879,7 +879,7 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
|
|||
None => {
|
||||
self.tcx().sess.span_bug(
|
||||
expr.span,
|
||||
format!("No loop scope for id {:?}", loop_id));
|
||||
format!("no loop scope for id {:?}", loop_id));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -887,7 +887,7 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> {
|
|||
r => {
|
||||
self.tcx().sess.span_bug(
|
||||
expr.span,
|
||||
format!("Bad entry `{:?}` in def_map for label", r));
|
||||
format!("bad entry `{:?}` in def_map for label", r));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -328,7 +328,7 @@ impl IrMaps {
|
|||
Some(&var) => var,
|
||||
None => {
|
||||
self.tcx.sess.span_bug(
|
||||
span, format!("No variable registered for id {}", node_id));
|
||||
span, format!("no variable registered for id {}", node_id));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -627,7 +627,7 @@ impl Liveness {
|
|||
// code have to agree about which AST nodes are worth
|
||||
// creating liveness nodes for.
|
||||
self.tcx.sess.span_bug(
|
||||
span, format!("No live node registered for node {}",
|
||||
span, format!("no live node registered for node {}",
|
||||
node_id));
|
||||
}
|
||||
}
|
||||
|
@ -759,7 +759,7 @@ impl Liveness {
|
|||
let def_map = self.tcx.def_map.borrow();
|
||||
match def_map.get().find(&id) {
|
||||
Some(&DefLabel(loop_id)) => loop_id,
|
||||
_ => self.tcx.sess.span_bug(sp, "Label on break/loop \
|
||||
_ => self.tcx.sess.span_bug(sp, "label on break/loop \
|
||||
doesn't refer to a loop")
|
||||
}
|
||||
}
|
||||
|
@ -1152,7 +1152,7 @@ impl Liveness {
|
|||
match break_ln.get().find(&sc) {
|
||||
Some(&b) => b,
|
||||
None => self.tcx.sess.span_bug(expr.span,
|
||||
"Break to unknown label")
|
||||
"break to unknown label")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1167,7 +1167,7 @@ impl Liveness {
|
|||
match cont_ln.get().find(&sc) {
|
||||
Some(&b) => b,
|
||||
None => self.tcx.sess.span_bug(expr.span,
|
||||
"Loop to unknown label")
|
||||
"loop to unknown label")
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -548,7 +548,7 @@ impl mem_categorization_ctxt {
|
|||
_ => {
|
||||
self.tcx.sess.span_bug(
|
||||
span,
|
||||
format!("Upvar of non-closure {:?} - {}",
|
||||
format!("upvar of non-closure {:?} - {}",
|
||||
fn_node_id, ty.repr(self.tcx)));
|
||||
}
|
||||
}
|
||||
|
@ -652,7 +652,7 @@ impl mem_categorization_ctxt {
|
|||
None => {
|
||||
self.tcx.sess.span_bug(
|
||||
node.span(),
|
||||
format!("Explicit deref of non-derefable type: {}",
|
||||
format!("explicit deref of non-derefable type: {}",
|
||||
ty_to_str(self.tcx, base_cmt.ty)));
|
||||
}
|
||||
};
|
||||
|
@ -745,7 +745,7 @@ impl mem_categorization_ctxt {
|
|||
None => {
|
||||
self.tcx.sess.span_bug(
|
||||
elt.span(),
|
||||
format!("Explicit index of non-index type `{}`",
|
||||
format!("explicit index of non-index type `{}`",
|
||||
ty_to_str(self.tcx, base_cmt.ty)));
|
||||
}
|
||||
};
|
||||
|
|
|
@ -142,7 +142,7 @@ impl RegionMaps {
|
|||
let scope_map = self.scope_map.borrow();
|
||||
match scope_map.get().find(&id) {
|
||||
Some(&r) => r,
|
||||
None => { fail!("No enclosing scope for id {}", id); }
|
||||
None => { fail!("no enclosing scope for id {}", id); }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -154,7 +154,7 @@ impl RegionMaps {
|
|||
let var_map = self.var_map.borrow();
|
||||
match var_map.get().find(&var_id) {
|
||||
Some(&r) => r,
|
||||
None => { fail!("No enclosing scope for id {}", var_id); }
|
||||
None => { fail!("no enclosing scope for id {}", var_id); }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -554,7 +554,7 @@ fn resolve_local(visitor: &mut RegionResolutionVisitor,
|
|||
None => {
|
||||
visitor.sess.span_bug(
|
||||
local.span,
|
||||
"Local without enclosing block");
|
||||
"local without enclosing block");
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -1378,7 +1378,7 @@ impl Resolver {
|
|||
}
|
||||
match method_map.get().find_mut(name) {
|
||||
Some(s) => { s.insert(def_id); },
|
||||
_ => fail!("Can't happen"),
|
||||
_ => fail!("can't happen"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1696,7 +1696,7 @@ impl Resolver {
|
|||
}
|
||||
match method_map.get().find_mut(name) {
|
||||
Some(s) => { s.insert(def_id); },
|
||||
_ => fail!("Can't happen"),
|
||||
_ => fail!("can't happen"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3992,7 +3992,7 @@ impl Resolver {
|
|||
self.resolve_error(field.span,
|
||||
format!("field `{}` is already declared", ident_str));
|
||||
self.session.span_note(prev_field.span,
|
||||
"Previously declared here");
|
||||
"previously declared here");
|
||||
},
|
||||
None => {
|
||||
ident_map.insert(ident, field);
|
||||
|
@ -4476,7 +4476,7 @@ impl Resolver {
|
|||
// in the same disjunct, which is an
|
||||
// error
|
||||
self.resolve_error(pattern.span,
|
||||
format!("Identifier `{}` is bound more \
|
||||
format!("identifier `{}` is bound more \
|
||||
than once in the same pattern",
|
||||
path_to_str(path, self.session
|
||||
.intr())));
|
||||
|
|
|
@ -297,7 +297,7 @@ impl LifetimeContext {
|
|||
def: ast::DefRegion) {
|
||||
if lifetime_ref.id == ast::DUMMY_NODE_ID {
|
||||
self.sess.span_bug(lifetime_ref.span,
|
||||
"Lifetime reference not renumbered, \
|
||||
"lifetime reference not renumbered, \
|
||||
probably a bug in syntax::fold");
|
||||
}
|
||||
|
||||
|
|
|
@ -484,7 +484,7 @@ fn assert_is_binding_or_wild(bcx: &Block, p: @ast::Pat) {
|
|||
if !pat_is_binding_or_wild(bcx.tcx().def_map, p) {
|
||||
bcx.sess().span_bug(
|
||||
p.span,
|
||||
format!("Expected an identifier pattern but found p: {}",
|
||||
format!("expected an identifier pattern but found p: {}",
|
||||
p.repr(bcx.tcx())));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -434,7 +434,7 @@ fn generic_type_of(cx: &CrateContext, r: &Repr, name: Option<&str>, sizing: bool
|
|||
Type::array(&Type::i64(), align_units),
|
||||
a if a.population_count() == 1 => Type::array(&Type::vector(&Type::i32(), a / 4),
|
||||
align_units),
|
||||
_ => fail!("Unsupported enum alignment: {:?}", align)
|
||||
_ => fail!("unsupported enum alignment: {:?}", align)
|
||||
};
|
||||
assert_eq!(machine::llalign_of_min(cx, pad_ty) as u64, align);
|
||||
assert_eq!(align % discr_size, 0);
|
||||
|
|
|
@ -103,7 +103,7 @@ fn trans<'a>(bcx: &'a Block<'a>, expr: &ast::Expr) -> Callee<'a> {
|
|||
_ => {
|
||||
bcx.tcx().sess.span_bug(
|
||||
expr.span,
|
||||
format!("Type of callee is neither bare-fn nor closure: {}",
|
||||
format!("type of callee is neither bare-fn nor closure: {}",
|
||||
bcx.ty_to_str(datum.ty)));
|
||||
}
|
||||
}
|
||||
|
@ -151,7 +151,7 @@ fn trans<'a>(bcx: &'a Block<'a>, expr: &ast::Expr) -> Callee<'a> {
|
|||
ast::DefSelfTy(..) | ast::DefMethod(..) => {
|
||||
bcx.tcx().sess.span_bug(
|
||||
ref_expr.span,
|
||||
format!("Cannot translate def {:?} \
|
||||
format!("cannot translate def {:?} \
|
||||
to a callable thing!", def));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -211,7 +211,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
|
|||
_ => {}
|
||||
}
|
||||
}
|
||||
self.ccx.tcx.sess.bug("No loop scope found");
|
||||
self.ccx.tcx.sess.bug("no loop scope found");
|
||||
}
|
||||
|
||||
fn normal_exit_block(&'a self,
|
||||
|
@ -337,7 +337,7 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
|
|||
}
|
||||
|
||||
self.ccx.tcx.sess.bug(
|
||||
format!("No cleanup scope {} found",
|
||||
format!("no cleanup scope {} found",
|
||||
ast_map::node_id_to_str(self.ccx.tcx.items, cleanup_scope,
|
||||
token::get_ident_interner())));
|
||||
}
|
||||
|
@ -548,7 +548,7 @@ impl<'a> CleanupHelperMethods<'a> for FunctionContext<'a> {
|
|||
|
||||
LoopExit(id, _) => {
|
||||
self.ccx.tcx.sess.bug(format!(
|
||||
"Cannot exit from scope {:?}, \
|
||||
"cannot exit from scope {:?}, \
|
||||
not in scope", id));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -472,7 +472,7 @@ impl<'a> Block<'a> {
|
|||
Some(&v) => v,
|
||||
None => {
|
||||
self.tcx().sess.bug(format!(
|
||||
"No def associated with node id {:?}", nid));
|
||||
"no def associated with node id {:?}", nid));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -852,7 +852,7 @@ pub fn node_id_type_params(bcx: &Block, id: ast::NodeId) -> ~[ty::t] {
|
|||
|
||||
if !params.iter().all(|t| !ty::type_needs_infer(*t)) {
|
||||
bcx.sess().bug(
|
||||
format!("Type parameters for node {} include inference types: {}",
|
||||
format!("type parameters for node {} include inference types: {}",
|
||||
id, params.map(|t| bcx.ty_to_str(*t)).connect(",")));
|
||||
}
|
||||
|
||||
|
|
|
@ -146,14 +146,14 @@ fn const_deref(cx: &CrateContext, v: ValueRef, t: ty::t, explicit: bool)
|
|||
const_deref_newtype(cx, v, t)
|
||||
}
|
||||
_ => {
|
||||
cx.sess.bug(format!("Unexpected dereferenceable type {}",
|
||||
cx.sess.bug(format!("unexpected dereferenceable type {}",
|
||||
ty_to_str(cx.tcx, t)))
|
||||
}
|
||||
};
|
||||
(dv, mt.ty)
|
||||
}
|
||||
None => {
|
||||
cx.sess.bug(format!("Can't dereference const of type {}",
|
||||
cx.sess.bug(format!("can't dereference const of type {}",
|
||||
ty_to_str(cx.tcx, t)))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -298,7 +298,7 @@ pub fn create_local_var_metadata(bcx: &Block, local: &ast::Local) {
|
|||
Some(datum) => datum,
|
||||
None => {
|
||||
bcx.tcx().sess.span_bug(span,
|
||||
format!("No entry in lllocals table for {:?}",
|
||||
format!("no entry in lllocals table for {:?}",
|
||||
node_id));
|
||||
}
|
||||
}
|
||||
|
@ -440,7 +440,7 @@ pub fn create_argument_metadata(bcx: &Block, arg: &ast::Arg) {
|
|||
Some(v) => v,
|
||||
None => {
|
||||
bcx.tcx().sess.span_bug(span,
|
||||
format!("No entry in llargs table for {:?}",
|
||||
format!("no entry in llargs table for {:?}",
|
||||
node_id));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -81,17 +81,17 @@ pub fn llvm_calling_convention(ccx: &CrateContext,
|
|||
match abi {
|
||||
RustIntrinsic => {
|
||||
// Intrinsics are emitted by monomorphic fn
|
||||
ccx.sess.bug(format!("Asked to register intrinsic fn"));
|
||||
ccx.sess.bug(format!("asked to register intrinsic fn"));
|
||||
}
|
||||
|
||||
Rust => {
|
||||
// FIXME(#3678) Implement linking to foreign fns with Rust ABI
|
||||
ccx.sess.unimpl(
|
||||
format!("Foreign functions with Rust ABI"));
|
||||
format!("foreign functions with Rust ABI"));
|
||||
}
|
||||
|
||||
// It's the ABI's job to select this, not us.
|
||||
System => ccx.sess.bug("System abi should be selected elsewhere"),
|
||||
System => ccx.sess.bug("system abi should be selected elsewhere"),
|
||||
|
||||
Stdcall => lib::llvm::X86StdcallCallConv,
|
||||
Fastcall => lib::llvm::X86FastcallCallConv,
|
||||
|
@ -365,7 +365,7 @@ pub fn trans_foreign_mod(ccx: @CrateContext,
|
|||
(abis, (*path).clone())
|
||||
}
|
||||
_ => {
|
||||
fail!("Unable to find foreign item in tcx.items \
|
||||
fail!("unable to find foreign item in tcx.items \
|
||||
table.")
|
||||
}
|
||||
};
|
||||
|
|
|
@ -227,7 +227,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
|
|||
"acq" => lib::llvm::Acquire,
|
||||
"rel" => lib::llvm::Release,
|
||||
"acqrel" => lib::llvm::AcquireRelease,
|
||||
_ => ccx.sess.fatal("Unknown ordering in atomic intrinsic")
|
||||
_ => ccx.sess.fatal("unknown ordering in atomic intrinsic")
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -268,7 +268,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
|
|||
"min" => lib::llvm::Min,
|
||||
"umax" => lib::llvm::UMax,
|
||||
"umin" => lib::llvm::UMin,
|
||||
_ => ccx.sess.fatal("Unknown atomic operation")
|
||||
_ => ccx.sess.fatal("unknown atomic operation")
|
||||
};
|
||||
|
||||
let old = AtomicRMW(bcx, atom_op, get_param(decl, first_real_arg),
|
||||
|
|
|
@ -98,7 +98,7 @@ pub fn monomorphic_fn(ccx: @CrateContext,
|
|||
session::expect(
|
||||
ccx.sess,
|
||||
ccx.tcx.items.find(fn_id.node),
|
||||
|| format!("While monomorphizing {:?}, couldn't find it in the \
|
||||
|| format!("while monomorphizing {:?}, couldn't find it in the \
|
||||
item map (may have attempted to monomorphize an item \
|
||||
defined in a different crate?)", fn_id))
|
||||
};
|
||||
|
@ -286,7 +286,7 @@ pub fn monomorphic_fn(ccx: @CrateContext,
|
|||
d
|
||||
}
|
||||
_ => {
|
||||
ccx.tcx.sess.bug(format!("Can't monomorphize a {:?}",
|
||||
ccx.tcx.sess.bug(format!("can't monomorphize a {:?}",
|
||||
map_node))
|
||||
}
|
||||
}
|
||||
|
@ -310,7 +310,7 @@ pub fn monomorphic_fn(ccx: @CrateContext,
|
|||
ast_map::NodeBlock(..) |
|
||||
ast_map::NodeCalleeScope(..) |
|
||||
ast_map::NodeLocal(..) => {
|
||||
ccx.tcx.sess.bug(format!("Can't monomorphize a {:?}", map_node))
|
||||
ccx.tcx.sess.bug(format!("can't monomorphize a {:?}", map_node))
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -95,7 +95,7 @@ impl<'a> Reflector<'a> {
|
|||
let tcx = self.bcx.tcx();
|
||||
let mth_idx = ty::method_idx(
|
||||
tcx.sess.ident_of(~"visit_" + ty_name),
|
||||
*self.visitor_methods).expect(format!("Couldn't find visit method \
|
||||
*self.visitor_methods).expect(format!("couldn't find visit method \
|
||||
for {}", ty_name));
|
||||
let mth_ty =
|
||||
ty::mk_bare_fn(tcx, self.visitor_methods[mth_idx].fty.clone());
|
||||
|
|
|
@ -404,7 +404,7 @@ pub fn write_content<'a>(
|
|||
}
|
||||
_ => {
|
||||
bcx.tcx().sess.span_bug(content_expr.span,
|
||||
"Unexpected evec content");
|
||||
"unexpected evec content");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -465,7 +465,7 @@ pub fn write_content<'a>(
|
|||
}
|
||||
_ => {
|
||||
bcx.tcx().sess.span_bug(content_expr.span,
|
||||
"Unexpected vec content");
|
||||
"unexpected vec content");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -498,7 +498,7 @@ pub fn elements_required(bcx: &Block, content_expr: &ast::Expr) -> uint {
|
|||
ast::LitStr(ref s, _) => s.get().len(),
|
||||
_ => {
|
||||
bcx.tcx().sess.span_bug(content_expr.span,
|
||||
"Unexpected evec content")
|
||||
"unexpected evec content")
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -507,7 +507,7 @@ pub fn elements_required(bcx: &Block, content_expr: &ast::Expr) -> uint {
|
|||
ty::eval_repeat_count(&bcx.tcx(), count_expr)
|
||||
}
|
||||
_ => bcx.tcx().sess.span_bug(content_expr.span,
|
||||
"Unexpected vec content")
|
||||
"unexpected vec content")
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2101,7 +2101,7 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
|
|||
ty_type => TC::None,
|
||||
|
||||
ty_err => {
|
||||
cx.sess.bug("Asked to compute contents of error type");
|
||||
cx.sess.bug("asked to compute contents of error type");
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -2926,7 +2926,7 @@ pub fn adjust_ty(cx: ctxt,
|
|||
None => {
|
||||
cx.sess.span_bug(
|
||||
span,
|
||||
format!("The {}th autoderef failed: \
|
||||
format!("the {}th autoderef failed: \
|
||||
{}",
|
||||
i,
|
||||
ty_to_str(cx, adjusted_ty)));
|
||||
|
@ -3132,7 +3132,7 @@ pub fn resolve_expr(tcx: ctxt, expr: &ast::Expr) -> ast::Def {
|
|||
Some(&def) => def,
|
||||
None => {
|
||||
tcx.sess.span_bug(expr.span, format!(
|
||||
"No def-map entry for expr {:?}", expr.id));
|
||||
"no def-map entry for expr {:?}", expr.id));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3209,7 +3209,7 @@ pub fn expr_kind(tcx: ctxt,
|
|||
|
||||
def => {
|
||||
tcx.sess.span_bug(expr.span, format!(
|
||||
"Uncategorized def for expr {:?}: {:?}",
|
||||
"uncategorized def for expr {:?}: {:?}",
|
||||
expr.id, def));
|
||||
}
|
||||
}
|
||||
|
@ -3335,7 +3335,7 @@ pub fn field_idx_strict(tcx: ty::ctxt, name: ast::Name, fields: &[field])
|
|||
for f in fields.iter() { if f.ident.name == name { return i; } i += 1u; }
|
||||
let string = token::get_ident(name);
|
||||
tcx.sess.bug(format!(
|
||||
"No field named `{}` found in the list of fields `{:?}`",
|
||||
"no field named `{}` found in the list of fields `{:?}`",
|
||||
string.get(),
|
||||
fields.map(|f| tcx.sess.str_of(f.ident))));
|
||||
}
|
||||
|
@ -3687,7 +3687,7 @@ fn lookup_locally_or_in_crate_store<V:Clone>(
|
|||
}
|
||||
|
||||
if def_id.crate == ast::LOCAL_CRATE {
|
||||
fail!("No def'n found for {:?} in tcx.{}", def_id, descr);
|
||||
fail!("no def'n found for {:?} in tcx.{}", def_id, descr);
|
||||
}
|
||||
let v = load_external();
|
||||
map.insert(def_id, v.clone());
|
||||
|
|
|
@ -789,7 +789,7 @@ impl<'a> LookupContext<'a> {
|
|||
ty_err => None,
|
||||
|
||||
ty_unboxed_vec(_) | ty_type | ty_infer(TyVar(_)) => {
|
||||
self.bug(format!("Unexpected type: {}",
|
||||
self.bug(format!("unexpected type: {}",
|
||||
self.ty_to_str(self_ty)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -517,9 +517,9 @@ pub fn check_no_duplicate_fields(tcx: ty::ctxt,
|
|||
let orig_sp = field_names.find(&id).map(|x| *x);
|
||||
match orig_sp {
|
||||
Some(orig_sp) => {
|
||||
tcx.sess.span_err(sp, format!("Duplicate field name {} in record type declaration",
|
||||
tcx.sess.span_err(sp, format!("duplicate field name {} in record type declaration",
|
||||
tcx.sess.str_of(id)));
|
||||
tcx.sess.span_note(orig_sp, "First declaration of this field occurred here");
|
||||
tcx.sess.span_note(orig_sp, "first declaration of this field occurred here");
|
||||
break;
|
||||
}
|
||||
None => {
|
||||
|
@ -1006,7 +1006,7 @@ impl FnCtxt {
|
|||
None => {
|
||||
self.tcx().sess.span_bug(
|
||||
span,
|
||||
format!("No type for local variable {:?}", nid));
|
||||
format!("no type for local variable {:?}", nid));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1619,7 +1619,7 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt,
|
|||
_ => {
|
||||
fcx.tcx().sess.span_bug(
|
||||
sp,
|
||||
format!("Method without bare fn type"));
|
||||
format!("method without bare fn type"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -473,7 +473,7 @@ fn check_expr_fn_block(rcx: &mut Rcx,
|
|||
_ => {
|
||||
tcx.sess.span_bug(
|
||||
expr.span,
|
||||
"Expected expr_fn_block");
|
||||
"expected expr_fn_block");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1189,7 +1189,7 @@ pub mod guarantor {
|
|||
None => {
|
||||
tcx.sess.span_bug(
|
||||
expr.span,
|
||||
format!("Autoderef but type not derefable: {}",
|
||||
format!("autoderef but type not derefable: {}",
|
||||
ty_to_str(tcx, ct.ty)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -430,7 +430,7 @@ pub fn ensure_supertraits(ccx: &CrateCtxt,
|
|||
if ty_trait_refs.iter().any(|other_trait| other_trait.def_id == trait_ref.def_id) {
|
||||
// This means a trait inherited from the same supertrait more
|
||||
// than once.
|
||||
tcx.sess.span_err(sp, "Duplicate supertrait in trait declaration");
|
||||
tcx.sess.span_err(sp, "duplicate supertrait in trait declaration");
|
||||
break;
|
||||
} else {
|
||||
ty_trait_refs.push(trait_ref);
|
||||
|
@ -891,7 +891,7 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item)
|
|||
ast::ItemTrait(..) => {
|
||||
tcx.sess.span_bug(
|
||||
it.span,
|
||||
format!("Invoked ty_of_item on trait"));
|
||||
format!("invoked ty_of_item on trait"));
|
||||
}
|
||||
ast::ItemStruct(_, ref generics) => {
|
||||
let ty_generics = ty_generics(ccx, generics, 0);
|
||||
|
|
|
@ -202,7 +202,7 @@ impl<'f> Coerce<'f> {
|
|||
Err(e) => {
|
||||
self.get_ref().infcx.tcx.sess.span_bug(
|
||||
self.get_ref().trace.origin.span(),
|
||||
format!("Failed to resolve even without \
|
||||
format!("failed to resolve even without \
|
||||
any force options: {:?}", e));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -528,7 +528,7 @@ pub fn var_ids<T:Combine>(this: &T,
|
|||
r => {
|
||||
this.infcx().tcx.sess.span_bug(
|
||||
this.trace().origin.span(),
|
||||
format!("Found non-region-vid: {:?}", r));
|
||||
format!("found non-region-vid: {:?}", r));
|
||||
}
|
||||
}).collect()
|
||||
}
|
||||
|
|
|
@ -233,7 +233,7 @@ impl RegionVarBindings {
|
|||
self.bound_count.set(sc + 1);
|
||||
|
||||
if sc >= self.bound_count.get() {
|
||||
self.tcx.sess.bug("Rollover in RegionInference new_bound()");
|
||||
self.tcx.sess.bug("rollover in RegionInference new_bound()");
|
||||
}
|
||||
|
||||
ReLateBound(binder_id, BrFresh(sc))
|
||||
|
@ -278,7 +278,7 @@ impl RegionVarBindings {
|
|||
(_, ReLateBound(..)) => {
|
||||
self.tcx.sess.span_bug(
|
||||
origin.span(),
|
||||
format!("Cannot relate bound region: {} <= {}",
|
||||
format!("cannot relate bound region: {} <= {}",
|
||||
sub.repr(self.tcx),
|
||||
sup.repr(self.tcx)));
|
||||
}
|
||||
|
@ -351,7 +351,7 @@ impl RegionVarBindings {
|
|||
let var_origins = self.var_origins.borrow();
|
||||
self.tcx.sess.span_bug(
|
||||
var_origins.get()[rid.to_uint()].span(),
|
||||
format!("Attempt to resolve region variable before \
|
||||
format!("attempt to resolve region variable before \
|
||||
values have been computed!"))
|
||||
}
|
||||
Some(ref values) => values[rid.to_uint()]
|
||||
|
@ -544,7 +544,7 @@ impl RegionVarBindings {
|
|||
(ReEarlyBound(..), _) |
|
||||
(_, ReEarlyBound(..)) => {
|
||||
self.tcx.sess.bug(
|
||||
format!("Cannot relate bound region: LUB({}, {})",
|
||||
format!("cannot relate bound region: LUB({}, {})",
|
||||
a.repr(self.tcx),
|
||||
b.repr(self.tcx)));
|
||||
}
|
||||
|
@ -646,7 +646,7 @@ impl RegionVarBindings {
|
|||
(ReEarlyBound(..), _) |
|
||||
(_, ReEarlyBound(..)) => {
|
||||
self.tcx.sess.bug(
|
||||
format!("Cannot relate bound region: GLB({}, {})",
|
||||
format!("cannot relate bound region: GLB({}, {})",
|
||||
a.repr(self.tcx),
|
||||
b.repr(self.tcx)));
|
||||
}
|
||||
|
|
|
@ -97,7 +97,7 @@ impl Env {
|
|||
return match search_mod(self, &self.crate.node.module, 0, names) {
|
||||
Some(id) => id,
|
||||
None => {
|
||||
fail!("No item found: `%s`", names.connect("::"));
|
||||
fail!("no item found: `%s`", names.connect("::"));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -230,7 +230,7 @@ impl Env {
|
|||
for msg in self.err_messages.iter() {
|
||||
debug!("Error encountered: %s", *msg);
|
||||
}
|
||||
format!("Resolving regions encountered %u errors but expected %u!",
|
||||
format!("resolving regions encountered %u errors but expected %u!",
|
||||
self.err_messages.len(),
|
||||
exp_count);
|
||||
}
|
||||
|
@ -240,7 +240,7 @@ impl Env {
|
|||
pub fn check_lub(&self, t1: ty::t, t2: ty::t, t_lub: ty::t) {
|
||||
match self.lub().tys(t1, t2) {
|
||||
Err(e) => {
|
||||
fail!("Unexpected error computing LUB: %?", e)
|
||||
fail!("unexpected error computing LUB: %?", e)
|
||||
}
|
||||
Ok(t) => {
|
||||
self.assert_eq(t, t_lub);
|
||||
|
@ -262,7 +262,7 @@ impl Env {
|
|||
self.ty_to_str(t_glb));
|
||||
match self.glb().tys(t1, t2) {
|
||||
Err(e) => {
|
||||
fail!("Unexpected error computing LUB: %?", e)
|
||||
fail!("unexpected error computing LUB: %?", e)
|
||||
}
|
||||
Ok(t) => {
|
||||
self.assert_eq(t, t_glb);
|
||||
|
@ -281,7 +281,7 @@ impl Env {
|
|||
match self.lub().tys(t1, t2) {
|
||||
Err(_) => {}
|
||||
Ok(t) => {
|
||||
fail!("Unexpected success computing LUB: %?", self.ty_to_str(t))
|
||||
fail!("unexpected success computing LUB: %?", self.ty_to_str(t))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -291,7 +291,7 @@ impl Env {
|
|||
match self.glb().tys(t1, t2) {
|
||||
Err(_) => {}
|
||||
Ok(t) => {
|
||||
fail!("Unexpected success computing GLB: %?", self.ty_to_str(t))
|
||||
fail!("unexpected success computing GLB: %?", self.ty_to_str(t))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -718,7 +718,7 @@ impl<'a> ConstraintContext<'a> {
|
|||
ty::ty_infer(..) | ty::ty_err |
|
||||
ty::ty_type | ty::ty_unboxed_vec(..) => {
|
||||
self.tcx().sess.bug(
|
||||
format!("Unexpected type encountered in \
|
||||
format!("unexpected type encountered in \
|
||||
variance inference: {}",
|
||||
ty.repr(self.tcx())));
|
||||
}
|
||||
|
@ -804,7 +804,7 @@ impl<'a> ConstraintContext<'a> {
|
|||
ty::ReEmpty => {
|
||||
// We don't expect to see anything but 'static or bound
|
||||
// regions when visiting member types or method types.
|
||||
self.tcx().sess.bug(format!("Unexpected region encountered in \
|
||||
self.tcx().sess.bug(format!("unexpected region encountered in \
|
||||
variance inference: {}",
|
||||
region.repr(self.tcx())));
|
||||
}
|
||||
|
|
|
@ -64,12 +64,12 @@ fn add_bytes_to_bits<T: Int + CheckedAdd + ToBits>(bits: T, bytes: T) -> T {
|
|||
let (new_high_bits, new_low_bits) = bytes.to_bits();
|
||||
|
||||
if new_high_bits > Zero::zero() {
|
||||
fail!("Numeric overflow occured.")
|
||||
fail!("numeric overflow occured.")
|
||||
}
|
||||
|
||||
match bits.checked_add(&new_low_bits) {
|
||||
Some(x) => return x,
|
||||
None => fail!("Numeric overflow occured.")
|
||||
None => fail!("numeric overflow occured.")
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -323,7 +323,7 @@ fn cannot_combine(n: Abi, m: Abi) {
|
|||
(m == a && n == b));
|
||||
}
|
||||
None => {
|
||||
fail!("Invalid match not detected");
|
||||
fail!("invalid match not detected");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -335,7 +335,7 @@ fn can_combine(n: Abi, m: Abi) {
|
|||
set.add(m);
|
||||
match set.check_valid() {
|
||||
Some((_, _)) => {
|
||||
fail!("Valid match declared invalid");
|
||||
fail!("valid match declared invalid");
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
|
|
@ -40,7 +40,7 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) ->
|
|||
// u8 literal, push to vector expression
|
||||
ast::LitUint(v, ast::TyU8) => {
|
||||
if v > 0xFF {
|
||||
cx.span_err(expr.span, "Too large u8 literal in bytes!")
|
||||
cx.span_err(expr.span, "too large u8 literal in bytes!")
|
||||
} else {
|
||||
bytes.push(cx.expr_u8(expr.span, v as u8));
|
||||
}
|
||||
|
@ -49,9 +49,9 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) ->
|
|||
// integer literal, push to vector expression
|
||||
ast::LitIntUnsuffixed(v) => {
|
||||
if v > 0xFF {
|
||||
cx.span_err(expr.span, "Too large integer literal in bytes!")
|
||||
cx.span_err(expr.span, "too large integer literal in bytes!")
|
||||
} else if v < 0 {
|
||||
cx.span_err(expr.span, "Negative integer literal in bytes!")
|
||||
cx.span_err(expr.span, "negative integer literal in bytes!")
|
||||
} else {
|
||||
bytes.push(cx.expr_u8(expr.span, v as u8));
|
||||
}
|
||||
|
@ -62,14 +62,14 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) ->
|
|||
if char::from_u32(v).unwrap().is_ascii() {
|
||||
bytes.push(cx.expr_u8(expr.span, v as u8));
|
||||
} else {
|
||||
cx.span_err(expr.span, "Non-ascii char literal in bytes!")
|
||||
cx.span_err(expr.span, "non-ascii char literal in bytes!")
|
||||
}
|
||||
}
|
||||
|
||||
_ => cx.span_err(expr.span, "Unsupported literal in bytes!")
|
||||
_ => cx.span_err(expr.span, "unsupported literal in bytes!")
|
||||
},
|
||||
|
||||
_ => cx.span_err(expr.span, "Non-literal in bytes!")
|
||||
_ => cx.span_err(expr.span, "non-literal in bytes!")
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -92,10 +92,10 @@ fn cs_clone(
|
|||
all_fields = af;
|
||||
},
|
||||
EnumNonMatching(..) => cx.span_bug(trait_span,
|
||||
format!("Non-matching enum variants in `deriving({})`",
|
||||
format!("non-matching enum variants in `deriving({})`",
|
||||
name)),
|
||||
StaticEnum(..) | StaticStruct(..) => cx.span_bug(trait_span,
|
||||
format!("Static method in `deriving({})`",
|
||||
format!("static method in `deriving({})`",
|
||||
name))
|
||||
}
|
||||
|
||||
|
|
|
@ -75,7 +75,7 @@ fn cs_op(less: bool, equal: bool, cx: &mut ExtCtxt, span: Span, substr: &Substru
|
|||
*/
|
||||
let other_f = match other_fs {
|
||||
[o_f] => o_f,
|
||||
_ => cx.span_bug(span, "Not exactly 2 arguments in `deriving(Ord)`")
|
||||
_ => cx.span_bug(span, "not exactly 2 arguments in `deriving(Ord)`")
|
||||
};
|
||||
|
||||
let cmp = cx.expr_binary(span, op, self_f, other_f);
|
||||
|
@ -99,7 +99,7 @@ fn cs_op(less: bool, equal: bool, cx: &mut ExtCtxt, span: Span, substr: &Substru
|
|||
} else {
|
||||
self_var > other_var
|
||||
}),
|
||||
_ => cx.span_bug(span, "Not exactly 2 arguments in `deriving(Ord)`")
|
||||
_ => cx.span_bug(span, "not exactly 2 arguments in `deriving(Ord)`")
|
||||
}
|
||||
},
|
||||
cx, span, substr)
|
||||
|
|
|
@ -110,7 +110,7 @@ pub fn cs_cmp(cx: &mut ExtCtxt, span: Span,
|
|||
let order = ordering_const(cx, span, self_var.cmp(&other_var));
|
||||
cx.expr_path(order)
|
||||
}
|
||||
_ => cx.span_bug(span, "Not exactly 2 arguments in `deriving(TotalOrd)`")
|
||||
_ => cx.span_bug(span, "not exactly 2 arguments in `deriving(TotalOrd)`")
|
||||
}
|
||||
},
|
||||
cx, span, substr)
|
||||
|
|
|
@ -656,7 +656,7 @@ impl<'a> MethodDef<'a> {
|
|||
}).collect()
|
||||
}
|
||||
[] => { trait_.cx.span_bug(trait_.span,
|
||||
"No self arguments to non-static method \
|
||||
"no self arguments to non-static method \
|
||||
in generic `deriving`") }
|
||||
};
|
||||
|
||||
|
@ -840,7 +840,7 @@ impl<'a> MethodDef<'a> {
|
|||
let index = match matching {
|
||||
Some(i) => i,
|
||||
None => cx.span_bug(trait_.span,
|
||||
"Non-matching variants when required to \
|
||||
"non-matching variants when required to \
|
||||
be matching in generic `deriving`")
|
||||
};
|
||||
|
||||
|
@ -965,7 +965,7 @@ impl<'a> TraitDef<'a> {
|
|||
|
||||
match (just_spans.is_empty(), named_idents.is_empty()) {
|
||||
(false, false) => self.cx.span_bug(self.span,
|
||||
"A struct with named and unnamed \
|
||||
"a struct with named and unnamed \
|
||||
fields in generic `deriving`"),
|
||||
// named fields
|
||||
(_, false) => Named(named_idents),
|
||||
|
@ -1019,7 +1019,7 @@ impl<'a> TraitDef<'a> {
|
|||
None
|
||||
}
|
||||
_ => {
|
||||
cx.span_bug(sp, "A struct with named and unnamed fields in `deriving`");
|
||||
cx.span_bug(sp, "a struct with named and unnamed fields in `deriving`");
|
||||
}
|
||||
};
|
||||
let path = cx.path_ident(sp, cx.ident_of(format!("{}_{}", prefix, i)));
|
||||
|
@ -1116,7 +1116,7 @@ pub fn cs_fold(use_foldl: bool,
|
|||
*all_enums,
|
||||
substructure.nonself_args),
|
||||
StaticEnum(..) | StaticStruct(..) => {
|
||||
cx.span_bug(trait_span, "Static function in `deriving`")
|
||||
cx.span_bug(trait_span, "static function in `deriving`")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1154,7 +1154,7 @@ pub fn cs_same_method(f: |&mut ExtCtxt, Span, ~[@Expr]| -> @Expr,
|
|||
*all_enums,
|
||||
substructure.nonself_args),
|
||||
StaticEnum(..) | StaticStruct(..) => {
|
||||
cx.span_bug(trait_span, "Static function in `deriving`")
|
||||
cx.span_bug(trait_span, "static function in `deriving`")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,7 +48,7 @@ pub fn expand_deriving_iter_bytes(cx: &mut ExtCtxt,
|
|||
fn iter_bytes_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> @Expr {
|
||||
let (lsb0, f)= match substr.nonself_args {
|
||||
[l, f] => (l, f),
|
||||
_ => cx.span_bug(trait_span, "Incorrect number of arguments in `deriving(IterBytes)`")
|
||||
_ => cx.span_bug(trait_span, "incorrect number of arguments in `deriving(IterBytes)`")
|
||||
};
|
||||
// Build the "explicitly borrowed" stack closure, "|_buf| f(_buf)".
|
||||
let blk_arg = cx.ident_of("_buf");
|
||||
|
@ -82,7 +82,7 @@ fn iter_bytes_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substruc
|
|||
|
||||
fields = fs;
|
||||
}
|
||||
_ => cx.span_bug(trait_span, "Impossible substructure in `deriving(IterBytes)`")
|
||||
_ => cx.span_bug(trait_span, "impossible substructure in `deriving(IterBytes)`")
|
||||
}
|
||||
|
||||
for &FieldInfo { self_, span, .. } in fields.iter() {
|
||||
|
|
|
@ -68,7 +68,7 @@ pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt,
|
|||
fn cs_from(name: &str, cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> @Expr {
|
||||
let n = match substr.nonself_args {
|
||||
[n] => n,
|
||||
_ => cx.span_bug(trait_span, "Incorrect number of arguments in `deriving(FromPrimitive)`")
|
||||
_ => cx.span_bug(trait_span, "incorrect number of arguments in `deriving(FromPrimitive)`")
|
||||
};
|
||||
|
||||
match *substr.fields {
|
||||
|
|
|
@ -178,8 +178,8 @@ impl<'a> Ty<'a> {
|
|||
Literal(ref p) => {
|
||||
p.to_path(cx, span, self_ty, self_generics)
|
||||
}
|
||||
Ptr(..) => { cx.span_bug(span, "Pointer in a path in generic `deriving`") }
|
||||
Tuple(..) => { cx.span_bug(span, "Tuple in a path in generic `deriving`") }
|
||||
Ptr(..) => { cx.span_bug(span, "pointer in a path in generic `deriving`") }
|
||||
Tuple(..) => { cx.span_bug(span, "tuple in a path in generic `deriving`") }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -257,7 +257,7 @@ pub mod rt {
|
|||
match res {
|
||||
Some(ast) => ast,
|
||||
None => {
|
||||
error!("Parse error");
|
||||
error!("parse error");
|
||||
fail!()
|
||||
}
|
||||
}
|
||||
|
@ -589,7 +589,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
|||
|
||||
let cx_expr = p.parse_expr();
|
||||
if !p.eat(&token::COMMA) {
|
||||
p.fatal("Expected token `,`");
|
||||
p.fatal("expected token `,`");
|
||||
}
|
||||
|
||||
let tts = p.parse_all_token_trees();
|
||||
|
|
|
@ -49,7 +49,7 @@ pub fn find_macro_registrar(diagnostic: @diagnostic::SpanHandler,
|
|||
})
|
||||
},
|
||||
_ => {
|
||||
diagnostic.handler().err("Multiple macro registration functions found");
|
||||
diagnostic.handler().err("multiple macro registration functions found");
|
||||
for &(_, span) in ctx.registrars.iter() {
|
||||
diagnostic.span_note(span, "one is here");
|
||||
}
|
||||
|
|
|
@ -185,7 +185,7 @@ pub fn nameize(p_s: @ParseSess, ms: &[Matcher], res: &[@NamedMatch])
|
|||
if ret_val.contains_key(bind_name) {
|
||||
let string = token::get_ident(bind_name.name);
|
||||
p_s.span_diagnostic
|
||||
.span_fatal(sp, "Duplicated bind name: " + string.get())
|
||||
.span_fatal(sp, "duplicated bind name: " + string.get())
|
||||
}
|
||||
ret_val.insert(*bind_name, res[idx]);
|
||||
}
|
||||
|
@ -441,6 +441,6 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal {
|
|||
res
|
||||
}
|
||||
"matchers" => token::NtMatchers(p.parse_matchers()),
|
||||
_ => p.fatal(~"Unsupported builtin nonterminal parser: " + name)
|
||||
_ => p.fatal(~"unsupported builtin nonterminal parser: " + name)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -148,7 +148,7 @@ fn lis_merge(lhs: LockstepIterSize, rhs: LockstepIterSize) -> LockstepIterSize {
|
|||
LisConstraint(r_len, ref r_id) => {
|
||||
let l_n = token::get_ident(l_id.name);
|
||||
let r_n = token::get_ident(r_id.name);
|
||||
LisContradiction(format!("Inconsistent lockstep iteration: \
|
||||
LisContradiction(format!("inconsistent lockstep iteration: \
|
||||
'{}' has {} items, but '{}' has {}",
|
||||
l_n.get(), l_len, r_n.get(), r_len))
|
||||
}
|
||||
|
|
|
@ -85,7 +85,7 @@ impl<T> OptVec<T> {
|
|||
|
||||
pub fn get<'a>(&'a self, i: uint) -> &'a T {
|
||||
match *self {
|
||||
Empty => fail!("Invalid index {}", i),
|
||||
Empty => fail!("invalid index {}", i),
|
||||
Vec(ref v) => &v[i]
|
||||
}
|
||||
}
|
||||
|
@ -103,7 +103,7 @@ impl<T> OptVec<T> {
|
|||
|
||||
pub fn swap_remove(&mut self, index: uint) {
|
||||
match *self {
|
||||
Empty => { fail!("Index out of bounds"); }
|
||||
Empty => { fail!("index out of bounds"); }
|
||||
Vec(ref mut v) => {
|
||||
assert!(index < v.len());
|
||||
v.swap_remove(index);
|
||||
|
|
|
@ -76,7 +76,7 @@ impl ParserObsoleteMethods for Parser {
|
|||
let (kind_str, desc) = match kind {
|
||||
ObsoleteSwap => (
|
||||
"swap",
|
||||
"Use std::util::{swap, replace} instead"
|
||||
"use std::util::{swap, replace} instead"
|
||||
),
|
||||
ObsoleteUnsafeBlock => (
|
||||
"non-standalone unsafe block",
|
||||
|
|
|
@ -431,7 +431,7 @@ impl Parser {
|
|||
&& self.look_ahead(1, |t| *t == token::RBRACE) {
|
||||
// matched; signal non-fatal error and recover.
|
||||
self.span_err(self.span,
|
||||
"Unit-like struct construction is written with no trailing `{ }`");
|
||||
"unit-like struct construction is written with no trailing `{ }`");
|
||||
self.eat(&token::LBRACE);
|
||||
self.eat(&token::RBRACE);
|
||||
true
|
||||
|
@ -1601,7 +1601,7 @@ impl Parser {
|
|||
};
|
||||
}
|
||||
_ => {
|
||||
self.fatal(format!("Expected a lifetime name"));
|
||||
self.fatal(format!("expected a lifetime name"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2137,7 +2137,7 @@ impl Parser {
|
|||
}
|
||||
// There shouldn't really be a span, but it's easier for the test runner
|
||||
// if we give it one
|
||||
self.fatal("This file contains an un-closed delimiter ");
|
||||
self.fatal("this file contains an un-closed delimiter ");
|
||||
}
|
||||
token::LPAREN | token::LBRACE | token::LBRACKET => {
|
||||
let close_delim = token::flip_delimiter(&self.token);
|
||||
|
@ -3957,7 +3957,7 @@ impl Parser {
|
|||
}
|
||||
if fields.len() == 0 {
|
||||
let string = get_ident_interner().get(class_name.name);
|
||||
self.fatal(format!("Unit-like struct definition should be written as `struct {};`",
|
||||
self.fatal(format!("unit-like struct definition should be written as `struct {};`",
|
||||
string.as_slice()));
|
||||
}
|
||||
self.bump();
|
||||
|
|
|
@ -68,7 +68,7 @@ impl<T> SmallVector<T> {
|
|||
match *self {
|
||||
One(ref v) if idx == 0 => v,
|
||||
Many(ref vs) => &vs[idx],
|
||||
_ => fail!("Out of bounds access")
|
||||
_ => fail!("out of bounds access")
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,4 +19,4 @@ fn bar() {
|
|||
while (i < 1000) {}
|
||||
}
|
||||
|
||||
fn main() {} //~ ERROR This file contains an un-closed delimiter
|
||||
fn main() {} //~ ERROR this file contains an un-closed delimiter
|
||||
|
|
|
@ -20,16 +20,16 @@ fn main()
|
|||
|
||||
let _z = match g(1, 2) {
|
||||
g(x, x) => { info!("{:?}", x + x); }
|
||||
//~^ ERROR Identifier `x` is bound more than once in the same pattern
|
||||
//~^ ERROR identifier `x` is bound more than once in the same pattern
|
||||
};
|
||||
|
||||
let _z = match i(l(1, 2), m(3, 4)) {
|
||||
i(l(x, _), m(_, x)) //~ ERROR Identifier `x` is bound more than once in the same pattern
|
||||
i(l(x, _), m(_, x)) //~ ERROR identifier `x` is bound more than once in the same pattern
|
||||
=> { error!("{:?}", x + x); }
|
||||
};
|
||||
|
||||
let _z = match (1, 2) {
|
||||
(x, x) => { x } //~ ERROR Identifier `x` is bound more than once in the same pattern
|
||||
(x, x) => { x } //~ ERROR identifier `x` is bound more than once in the same pattern
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
|
||||
use std::cmp::Eq;
|
||||
|
||||
trait Hahaha: Eq + Eq + Eq + Eq + Eq + //~ ERROR Duplicate supertrait
|
||||
trait Hahaha: Eq + Eq + Eq + Eq + Eq + //~ ERROR duplicate supertrait
|
||||
Eq + Eq + Eq + Eq + Eq + Eq + Eq + Eq + Eq + Eq + Eq + Eq + Eq +
|
||||
Eq + Eq + Eq + Eq + Eq + Eq + Eq + Eq + Eq + Eq + Eq + Eq + Eq +
|
||||
Eq + Eq + Eq + Eq + Eq + Eq + Eq + Eq + Eq + Eq + Eq + Eq + Eq +
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// error-pattern: Multiple macro registration functions found
|
||||
// error-pattern: multiple macro registration functions found
|
||||
|
||||
#[feature(macro_registrar)];
|
||||
|
||||
|
|
|
@ -13,6 +13,6 @@
|
|||
fn main() {
|
||||
fn bar(n: int) {
|
||||
let _x: [int, ..n];
|
||||
//~^ ERROR expected constant expr for vector length: Non-constant path in constant expr
|
||||
//~^ ERROR expected constant expr for vector length: non-constant path in constant expr
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ struct Foo;
|
|||
|
||||
fn f2() {
|
||||
let _end_stmt = Foo { };
|
||||
//~^ ERROR: Unit-like struct construction is written with no trailing `{ }`
|
||||
//~^ ERROR: unit-like struct construction is written with no trailing `{ }`
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
|
|
|
@ -12,7 +12,7 @@ struct Foo;
|
|||
|
||||
fn g3() {
|
||||
let _mid_tuple = (Foo { }, 2);
|
||||
//~^ ERROR: Unit-like struct construction is written with no trailing `{ }`
|
||||
//~^ ERROR: unit-like struct construction is written with no trailing `{ }`
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
|
|
|
@ -12,7 +12,7 @@ struct Foo;
|
|||
|
||||
fn h4() {
|
||||
let _end_of_tuple = (3, Foo { });
|
||||
//~^ ERROR: Unit-like struct construction is written with no trailing `{ }`
|
||||
//~^ ERROR: unit-like struct construction is written with no trailing `{ }`
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
|
|
|
@ -12,7 +12,7 @@ struct Foo;
|
|||
|
||||
fn i5() {
|
||||
let _end_of_block = { Foo { } };
|
||||
//~^ ERROR: Unit-like struct construction is written with no trailing `{ }`
|
||||
//~^ ERROR: unit-like struct construction is written with no trailing `{ }`
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
|
|
|
@ -9,6 +9,6 @@
|
|||
// except according to those terms.
|
||||
|
||||
struct Foo {}
|
||||
//~^ ERROR: Unit-like struct definition should be written as `struct Foo;`
|
||||
//~^ ERROR: unit-like struct definition should be written as `struct Foo;`
|
||||
|
||||
fn main() {}
|
||||
|
|
|
@ -9,5 +9,5 @@
|
|||
// except according to those terms.
|
||||
|
||||
fn main() {
|
||||
let vec = bytes!('λ'); //~ ERROR Non-ascii char literal in bytes!
|
||||
let vec = bytes!('λ'); //~ ERROR non-ascii char literal in bytes!
|
||||
}
|
||||
|
|
|
@ -9,5 +9,5 @@
|
|||
// except according to those terms.
|
||||
|
||||
fn main() {
|
||||
let vec = bytes!(foo); //~ ERROR Non-literal in bytes!
|
||||
let vec = bytes!(foo); //~ ERROR non-literal in bytes!
|
||||
}
|
||||
|
|
|
@ -9,5 +9,5 @@
|
|||
// except according to those terms.
|
||||
|
||||
fn main() {
|
||||
let vec = bytes!(1024); //~ ERROR Too large integer literal in bytes!
|
||||
let vec = bytes!(1024); //~ ERROR too large integer literal in bytes!
|
||||
}
|
||||
|
|
|
@ -9,5 +9,5 @@
|
|||
// except according to those terms.
|
||||
|
||||
fn main() {
|
||||
let vec = bytes!(1024u8); //~ ERROR Too large u8 literal in bytes!
|
||||
let vec = bytes!(1024u8); //~ ERROR too large u8 literal in bytes!
|
||||
}
|
||||
|
|
|
@ -9,5 +9,5 @@
|
|||
// except according to those terms.
|
||||
|
||||
fn main() {
|
||||
let vec = bytes!(-1024); //~ ERROR Non-literal in bytes
|
||||
let vec = bytes!(-1024); //~ ERROR non-literal in bytes
|
||||
}
|
||||
|
|
|
@ -9,5 +9,5 @@
|
|||
// except according to those terms.
|
||||
|
||||
fn main() {
|
||||
let vec = bytes!(-1024u8); //~ ERROR Non-literal in bytes
|
||||
let vec = bytes!(-1024u8); //~ ERROR non-literal in bytes
|
||||
}
|
||||
|
|
|
@ -9,5 +9,5 @@
|
|||
// except according to those terms.
|
||||
|
||||
fn main() {
|
||||
let vec = bytes!(45f64); //~ ERROR Unsupported literal in bytes!
|
||||
let vec = bytes!(45f64); //~ ERROR unsupported literal in bytes!
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue