rollup merge of #24487: erickt/syntax

This removes the usage of `#[feature(into_cow, slice_patterns, box_syntax, box_patterns, quote, unsafe_destructor)]` from being used in libsyntax. My main desire for this is that it brings me one step closer to letting [syntex](https://github.com/erickt/rust-syntex) compile with stable rust. Hopefully this doesn't inconvenience rust development.
This commit is contained in:
Alex Crichton 2015-04-21 15:23:07 -07:00
commit 37a1f2e3ac
36 changed files with 258 additions and 178 deletions

View File

@ -348,7 +348,7 @@ unsafe extern "C" fn report_inline_asm<'a, 'b>(cgcx: &'a CodegenContext<'a>,
match cgcx.lto_ctxt {
Some((sess, _)) => {
sess.codemap().with_expn_info(ExpnId::from_llvm_cookie(cookie), |info| match info {
sess.codemap().with_expn_info(ExpnId::from_u32(cookie), |info| match info {
Some(ei) => sess.span_err(ei.call_site, msg),
None => sess.err(msg),
});

View File

@ -138,7 +138,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm)
let kind = llvm::LLVMGetMDKindIDInContext(bcx.ccx().llcx(),
key.as_ptr() as *const c_char, key.len() as c_uint);
let val: llvm::ValueRef = C_i32(bcx.ccx(), ia.expn_id.to_llvm_cookie());
let val: llvm::ValueRef = C_i32(bcx.ccx(), ia.expn_id.into_u32() as i32);
llvm::LLVMSetMetadata(r, kind,
llvm::LLVMMDNodeInContext(bcx.ccx().llcx(), &val, 1));

View File

@ -238,7 +238,7 @@ pub fn name_to_dummy_lifetime(name: Name) -> Lifetime {
pub fn impl_pretty_name(trait_ref: &Option<TraitRef>, ty: Option<&Ty>) -> Ident {
let mut pretty = match ty {
Some(t) => pprust::ty_to_string(t),
None => String::from_str("..")
None => String::from("..")
};
match *trait_ref {

View File

@ -26,7 +26,6 @@ use std::rc::Rc;
use std::fmt;
use libc::c_uint;
use serialize::{Encodable, Decodable, Encoder, Decoder};
@ -287,13 +286,12 @@ pub const NO_EXPANSION: ExpnId = ExpnId(!0);
pub const COMMAND_LINE_EXPN: ExpnId = ExpnId(!1);
impl ExpnId {
pub fn from_llvm_cookie(cookie: c_uint) -> ExpnId {
ExpnId(cookie)
pub fn from_u32(id: u32) -> ExpnId {
ExpnId(id)
}
pub fn to_llvm_cookie(self) -> i32 {
let ExpnId(cookie) = self;
cookie as i32
pub fn into_u32(self) -> u32 {
self.0
}
}
@ -557,9 +555,9 @@ impl CodeMap {
// FIXME #12884: no efficient/safe way to remove from the start of a string
// and reuse the allocation.
let mut src = if src.starts_with("\u{feff}") {
String::from_str(&src[3..])
String::from(&src[3..])
} else {
String::from_str(&src[..])
String::from(&src[..])
};
// Append '\n' in case it's not already there.
@ -594,8 +592,8 @@ impl CodeMap {
pub fn new_imported_filemap(&self,
filename: FileName,
source_len: usize,
file_local_lines: Vec<BytePos>,
file_local_multibyte_chars: Vec<MultiByteChar>)
mut file_local_lines: Vec<BytePos>,
mut file_local_multibyte_chars: Vec<MultiByteChar>)
-> Rc<FileMap> {
let mut files = self.files.borrow_mut();
let start_pos = match files.last() {
@ -606,19 +604,21 @@ impl CodeMap {
let end_pos = Pos::from_usize(start_pos + source_len);
let start_pos = Pos::from_usize(start_pos);
let lines = file_local_lines.map_in_place(|pos| pos + start_pos);
let multibyte_chars = file_local_multibyte_chars.map_in_place(|mbc| MultiByteChar {
pos: mbc.pos + start_pos,
bytes: mbc.bytes
});
for pos in &mut file_local_lines {
*pos = *pos + start_pos;
}
for mbc in &mut file_local_multibyte_chars {
mbc.pos = mbc.pos + start_pos;
}
let filemap = Rc::new(FileMap {
name: filename,
src: None,
start_pos: start_pos,
end_pos: end_pos,
lines: RefCell::new(lines),
multibyte_chars: RefCell::new(multibyte_chars),
lines: RefCell::new(file_local_lines),
multibyte_chars: RefCell::new(file_local_multibyte_chars),
});
files.push(filemap.clone());

View File

@ -284,8 +284,15 @@ impl<'a> fold::Folder for CfgAttrFolder<'a> {
return fold::noop_fold_attribute(attr, self);
}
let (cfg, mi) = match attr.meta_item_list() {
Some([ref cfg, ref mi]) => (cfg, mi),
let attr_list = match attr.meta_item_list() {
Some(attr_list) => attr_list,
None => {
self.diag.span_err(attr.span, "expected `#[cfg_attr(<cfg pattern>, <attr>)]`");
return None;
}
};
let (cfg, mi) = match (attr_list.len(), attr_list.get(0), attr_list.get(1)) {
(2, Some(cfg), Some(mi)) => (cfg, mi),
_ => {
self.diag.span_err(attr.span, "expected `#[cfg_attr(<cfg pattern>, <attr>)]`");
return None;

View File

@ -644,7 +644,7 @@ fn highlight_lines(err: &mut EmitterWriter,
}
try!(write!(&mut err.dst, "{}", s));
let mut s = String::from_str("^");
let mut s = String::from("^");
let count = match lastc {
// Most terminals have a tab stop every eight columns by default
'\t' => 8 - col%8,

View File

@ -54,8 +54,8 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
span: Span,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let code = match token_tree {
[ast::TtToken(_, token::Ident(code, _))] => code,
let code = match (token_tree.len(), token_tree.get(0)) {
(1, Some(&ast::TtToken(_, token::Ident(code, _)))) => code,
_ => unreachable!()
};
with_used_diagnostics(|diagnostics| {
@ -77,20 +77,25 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
));
}
});
MacEager::expr(quote_expr!(ecx, ()))
MacEager::expr(ecx.expr_tuple(span, Vec::new()))
}
pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
span: Span,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let (code, description) = match token_tree {
[ast::TtToken(_, token::Ident(ref code, _))] => {
let (code, description) = match (
token_tree.len(),
token_tree.get(0),
token_tree.get(1),
token_tree.get(2)
) {
(1, Some(&ast::TtToken(_, token::Ident(ref code, _))), None, None) => {
(code, None)
},
[ast::TtToken(_, token::Ident(ref code, _)),
ast::TtToken(_, token::Comma),
ast::TtToken(_, token::Literal(token::StrRaw(description, _), None))] => {
(3, Some(&ast::TtToken(_, token::Ident(ref code, _))),
Some(&ast::TtToken(_, token::Comma)),
Some(&ast::TtToken(_, token::Literal(token::StrRaw(description, _), None)))) => {
(code, Some(description))
}
_ => unreachable!()
@ -123,15 +128,23 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
let sym = Ident::new(token::gensym(&(
"__register_diagnostic_".to_string() + &token::get_ident(*code)
)));
MacEager::items(SmallVector::many(vec![quote_item!(ecx, mod $sym {}).unwrap()]))
MacEager::items(SmallVector::many(vec![
ecx.item_mod(
span,
span,
sym,
Vec::new(),
Vec::new()
)
]))
}
pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
span: Span,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let name = match token_tree {
[ast::TtToken(_, token::Ident(ref name, _))] => name,
let name = match (token_tree.len(), token_tree.get(0)) {
(1, Some(&ast::TtToken(_, token::Ident(ref name, _)))) => name,
_ => unreachable!()
};
@ -148,7 +161,37 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
(descriptions.len(), ecx.expr_vec(span, descriptions))
});
MacEager::items(SmallVector::many(vec![quote_item!(ecx,
pub static $name: [(&'static str, &'static str); $count] = $expr;
).unwrap()]))
let static_ = ecx.lifetime(span, ecx.name_of("'static"));
let ty_str = ecx.ty_rptr(
span,
ecx.ty_ident(span, ecx.ident_of("str")),
Some(static_),
ast::MutImmutable,
);
let ty = ecx.ty(
span,
ast::TyFixedLengthVec(
ecx.ty(
span,
ast::TyTup(vec![ty_str.clone(), ty_str])
),
ecx.expr_usize(span, count),
),
);
MacEager::items(SmallVector::many(vec![
P(ast::Item {
ident: name.clone(),
attrs: Vec::new(),
id: ast::DUMMY_NODE_ID,
node: ast::ItemStatic(
ty,
ast::MutImmutable,
expr,
),
vis: ast::Public,
span: span,
})
]))
}

View File

@ -262,10 +262,10 @@ macro_rules! make_MacEager {
impl MacEager {
$(
pub fn $fld(v: $t) -> Box<MacResult> {
box MacEager {
Box::new(MacEager {
$fld: Some(v),
..Default::default()
}
})
}
)*
}
@ -331,7 +331,7 @@ impl DummyResult {
/// Use this as a return value after hitting any errors and
/// calling `span_err`.
pub fn any(sp: Span) -> Box<MacResult+'static> {
box DummyResult { expr_only: false, span: sp }
Box::new(DummyResult { expr_only: false, span: sp })
}
/// Create a default MacResult that can only be an expression.
@ -340,7 +340,7 @@ impl DummyResult {
/// if an error is encountered internally, the user will receive
/// an error that they also used it in the wrong place.
pub fn expr(sp: Span) -> Box<MacResult+'static> {
box DummyResult { expr_only: true, span: sp }
Box::new(DummyResult { expr_only: true, span: sp })
}
/// A plain dummy expression.

View File

@ -106,8 +106,8 @@ pub fn cs_cmp(cx: &mut ExtCtxt, span: Span,
// }
let new = {
let other_f = match other_fs {
[ref o_f] => o_f,
let other_f = match (other_fs.len(), other_fs.get(0)) {
(1, Some(o_f)) => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"),
};

View File

@ -29,8 +29,8 @@ pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt,
cs_fold(
true, // use foldl
|cx, span, subexpr, self_f, other_fs| {
let other_f = match other_fs {
[ref o_f] => o_f,
let other_f = match (other_fs.len(), other_fs.get(0)) {
(1, Some(o_f)) => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`")
};
@ -46,8 +46,8 @@ pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt,
cs_fold(
true, // use foldl
|cx, span, subexpr, self_f, other_fs| {
let other_f = match other_fs {
[ref o_f] => o_f,
let other_f = match (other_fs.len(), other_fs.get(0)) {
(1, Some(o_f)) => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`")
};

View File

@ -47,7 +47,7 @@ pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt,
let ordering_ty = Literal(path_std!(cx, core::cmp::Ordering));
let ret_ty = Literal(Path::new_(pathvec_std!(cx, core::option::Option),
None,
vec![box ordering_ty],
vec![Box::new(ordering_ty)],
true));
let inline = cx.meta_word(span, InternedString::new("inline"));
@ -150,8 +150,8 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt, span: Span,
// }
let new = {
let other_f = match other_fs {
[ref o_f] => o_f,
let other_f = match (other_fs.len(), other_fs.get(0)) {
(1, Some(o_f)) => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"),
};
@ -208,8 +208,8 @@ fn cs_op(less: bool, equal: bool, cx: &mut ExtCtxt,
get use the binops to avoid auto-deref dereferencing too many
layers of pointers, if the type includes pointers.
*/
let other_f = match other_fs {
[ref o_f] => o_f,
let other_f = match (other_fs.len(), other_fs.get(0)) {
(1, Some(o_f)) => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`")
};

View File

@ -68,14 +68,14 @@ fn expand_deriving_decodable_imp(cx: &mut ExtCtxt,
vec!(), true))))
},
explicit_self: None,
args: vec!(Ptr(box Literal(Path::new_local("__D")),
args: vec!(Ptr(Box::new(Literal(Path::new_local("__D"))),
Borrowed(None, MutMutable))),
ret_ty: Literal(Path::new_(
pathvec_std!(cx, core::result::Result),
None,
vec!(box Self_, box Literal(Path::new_(
vec!(Box::new(Self_), Box::new(Literal(Path::new_(
vec!["__D", "Error"], None, vec![], false
))),
)))),
true
)),
attributes: Vec::new(),

View File

@ -144,14 +144,14 @@ fn expand_deriving_encodable_imp(cx: &mut ExtCtxt,
vec!(), true))))
},
explicit_self: borrowed_explicit_self(),
args: vec!(Ptr(box Literal(Path::new_local("__S")),
args: vec!(Ptr(Box::new(Literal(Path::new_local("__S"))),
Borrowed(None, MutMutable))),
ret_ty: Literal(Path::new_(
pathvec_std!(cx, core::result::Result),
None,
vec!(box Tuple(Vec::new()), box Literal(Path::new_(
vec!(Box::new(Tuple(Vec::new())), Box::new(Literal(Path::new_(
vec!["__S", "Error"], None, vec![], false
))),
)))),
true
)),
attributes: Vec::new(),

View File

@ -807,7 +807,7 @@ impl<'a> MethodDef<'a> {
Self_ if nonstatic => {
self_args.push(arg_expr);
}
Ptr(box Self_, _) if nonstatic => {
Ptr(ref ty, _) if **ty == Self_ && nonstatic => {
self_args.push(cx.expr_deref(trait_.span, arg_expr))
}
_ => {
@ -1103,7 +1103,7 @@ impl<'a> MethodDef<'a> {
subpats.push(p);
idents
};
for self_arg_name in self_arg_names.tail() {
for self_arg_name in &self_arg_names[1..] {
let (p, idents) = mk_self_pat(cx, &self_arg_name[..]);
subpats.push(p);
self_pats_idents.push(idents);

View File

@ -24,7 +24,7 @@ use parse::token::special_idents;
use ptr::P;
/// The types of pointers
#[derive(Clone)]
#[derive(Clone, Eq, PartialEq)]
pub enum PtrTy<'a> {
/// &'lifetime mut
Borrowed(Option<&'a str>, ast::Mutability),
@ -34,7 +34,7 @@ pub enum PtrTy<'a> {
/// A path, e.g. `::std::option::Option::<i32>` (global). Has support
/// for type parameters and a lifetime.
#[derive(Clone)]
#[derive(Clone, Eq, PartialEq)]
pub struct Path<'a> {
pub path: Vec<&'a str> ,
pub lifetime: Option<&'a str>,
@ -85,7 +85,7 @@ impl<'a> Path<'a> {
}
/// A type. Supports pointers, Self, and literals
#[derive(Clone)]
#[derive(Clone, Eq, PartialEq)]
pub enum Ty<'a> {
Self_,
/// &/Box/ Ty
@ -109,7 +109,7 @@ pub fn borrowed_explicit_self<'r>() -> Option<Option<PtrTy<'r>>> {
}
pub fn borrowed_self<'r>() -> Ty<'r> {
borrowed(box Self_)
borrowed(Box::new(Self_))
}
pub fn nil_ty<'r>() -> Ty<'r> {

View File

@ -41,7 +41,7 @@ pub fn expand_deriving_hash(cx: &mut ExtCtxt,
vec![path_std!(cx, core::hash::Hasher)])],
},
explicit_self: borrowed_explicit_self(),
args: vec!(Ptr(box Literal(arg), Borrowed(None, MutMutable))),
args: vec!(Ptr(Box::new(Literal(arg)), Borrowed(None, MutMutable))),
ret_ty: nil_ty(),
attributes: vec![],
combine_substructure: combine_substructure(Box::new(|a, b, c| {
@ -56,8 +56,8 @@ pub fn expand_deriving_hash(cx: &mut ExtCtxt,
}
fn hash_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> P<Expr> {
let state_expr = match substr.nonself_args {
[ref state_expr] => state_expr,
let state_expr = match (substr.nonself_args.len(), substr.nonself_args.get(0)) {
(1, Some(o_f)) => o_f,
_ => cx.span_bug(trait_span, "incorrect number of arguments in `derive(Hash)`")
};
let call_hash = |span, thing_expr| {

View File

@ -40,7 +40,7 @@ pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt,
args: vec!(Literal(path_local!(i64))),
ret_ty: Literal(Path::new_(pathvec_std!(cx, core::option::Option),
None,
vec!(box Self_),
vec!(Box::new(Self_)),
true)),
// #[inline] liable to cause code-bloat
attributes: attrs.clone(),
@ -55,7 +55,7 @@ pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt,
args: vec!(Literal(path_local!(u64))),
ret_ty: Literal(Path::new_(pathvec_std!(cx, core::option::Option),
None,
vec!(box Self_),
vec!(Box::new(Self_)),
true)),
// #[inline] liable to cause code-bloat
attributes: attrs,
@ -71,8 +71,8 @@ pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt,
}
fn cs_from(name: &str, cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> P<Expr> {
let n = match substr.nonself_args {
[ref n] => n,
let n = match (substr.nonself_args.len(), substr.nonself_args.get(0)) {
(1, Some(o_f)) => o_f,
_ => cx.span_bug(trait_span, "incorrect number of arguments in `derive(FromPrimitive)`")
};

View File

@ -25,7 +25,7 @@ pub fn expand_deriving_show(cx: &mut ExtCtxt,
push: &mut FnMut(P<Item>))
{
// &mut ::std::fmt::Formatter
let fmtr = Ptr(box Literal(path_std!(cx, core::fmt::Formatter)),
let fmtr = Ptr(Box::new(Literal(path_std!(cx, core::fmt::Formatter))),
Borrowed(None, ast::MutMutable));
let trait_def = TraitDef {

View File

@ -1962,8 +1962,8 @@ foo_module!();
"xx" == string
}).collect();
let cxbinds: &[&ast::Ident] = &cxbinds[..];
let cxbind = match cxbinds {
[b] => b,
let cxbind = match (cxbinds.len(), cxbinds.get(0)) {
(1, Some(b)) => *b,
_ => panic!("expected just one binding for ext_cx")
};
let resolved_binding = mtwt::resolve(*cxbind);

View File

@ -128,7 +128,7 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree
}
}
box ExpandResult { p: p }
Box::new(ExpandResult { p: p })
}
// include_str! : read the given file, insert it as a literal string expr

View File

@ -28,12 +28,11 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt,
return base::DummyResult::any(sp);
}
match tt {
[ast::TtToken(_, ref tok)] if tok.is_keyword(keywords::True) => {
match (tt.len(), tt.first()) {
(1, Some(&ast::TtToken(_, ref tok))) if tok.is_keyword(keywords::True) => {
cx.set_trace_macros(true);
}
[ast::TtToken(_, ref tok)] if tok.is_keyword(keywords::False) => {
(1, Some(&ast::TtToken(_, ref tok))) if tok.is_keyword(keywords::False) => {
cx.set_trace_macros(false);
}
_ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),

View File

@ -165,7 +165,7 @@ pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: ByteP
-> Box<MatcherPos> {
let match_idx_hi = count_names(&ms[..]);
let matches: Vec<_> = (0..match_idx_hi).map(|_| Vec::new()).collect();
box MatcherPos {
Box::new(MatcherPos {
stack: vec![],
top_elts: TtSeq(ms),
sep: sep,
@ -176,7 +176,7 @@ pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: ByteP
match_cur: 0,
match_hi: match_idx_hi,
sp_lo: lo
}
})
}
/// NamedMatch is a pattern-match result for a single token::MATCH_NONTERMINAL:
@ -396,7 +396,7 @@ pub fn parse(sess: &ParseSess,
let matches: Vec<_> = (0..ei.matches.len())
.map(|_| Vec::new()).collect();
let ei_t = ei;
cur_eis.push(box MatcherPos {
cur_eis.push(Box::new(MatcherPos {
stack: vec![],
sep: seq.separator.clone(),
idx: 0,
@ -407,7 +407,7 @@ pub fn parse(sess: &ParseSess,
up: Some(ei_t),
sp_lo: sp.lo,
top_elts: Tt(TtSequence(sp, seq)),
});
}));
}
TtToken(_, MatchNt(..)) => {
// Built-in nonterminals never start with these tokens,
@ -533,7 +533,7 @@ pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal {
"ty" => token::NtTy(p.parse_ty()),
// this could be handled like a token, since it is one
"ident" => match p.token {
token::Ident(sn,b) => { panictry!(p.bump()); token::NtIdent(box sn,b) }
token::Ident(sn,b) => { panictry!(p.bump()); token::NtIdent(Box::new(sn),b) }
_ => {
let token_str = pprust::token_to_string(&p.token);
panic!(p.fatal(&format!("expected ident, found {}",
@ -541,7 +541,7 @@ pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal {
}
},
"path" => {
token::NtPath(box panictry!(p.parse_path(LifetimeAndTypesWithoutColons)))
token::NtPath(Box::new(panictry!(p.parse_path(LifetimeAndTypesWithoutColons))))
}
"meta" => token::NtMeta(p.parse_meta_item()),
_ => {

View File

@ -192,7 +192,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
panictry!(p.check_unknown_macro_variable());
// Let the context choose how to interpret the result.
// Weird, but useful for X-macros.
return box ParserAnyMacro {
return Box::new(ParserAnyMacro {
parser: RefCell::new(p),
// Pass along the original expansion site and the name of the macro
@ -200,7 +200,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
// macro leaves unparsed tokens.
site_span: sp,
macro_ident: name
}
})
}
Failure(sp, ref msg) => if sp.lo >= best_fail_spot.lo {
best_fail_spot = sp;
@ -281,12 +281,12 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
_ => cx.span_bug(def.span, "wrong-structured rhs")
};
let exp: Box<_> = box MacroRulesMacroExpander {
let exp: Box<_> = Box::new(MacroRulesMacroExpander {
name: def.ident,
imported_from: def.imported_from,
lhses: lhses,
rhses: rhses,
};
});
NormalTT(exp, Some(def.span), def.allow_internal_unstable)
}

View File

@ -294,9 +294,9 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
// sidestep the interpolation tricks for ident because
// (a) idents can be in lots of places, so it'd be a pain
// (b) we actually can, since it's a token.
MatchedNonterminal(NtIdent(box sn, b)) => {
MatchedNonterminal(NtIdent(ref sn, b)) => {
r.cur_span = sp;
r.cur_tok = token::Ident(sn, b);
r.cur_tok = token::Ident(**sn, b);
return ret_val;
}
MatchedNonterminal(ref other_whole_nt) => {

View File

@ -677,10 +677,10 @@ pub fn noop_fold_interpolated<T: Folder>(nt: token::Nonterminal, fld: &mut T)
token::NtPat(pat) => token::NtPat(fld.fold_pat(pat)),
token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)),
token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)),
token::NtIdent(box id, is_mod_name) =>
token::NtIdent(box fld.fold_ident(id), is_mod_name),
token::NtIdent(id, is_mod_name) =>
token::NtIdent(Box::new(fld.fold_ident(*id)), is_mod_name),
token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)),
token::NtPath(box path) => token::NtPath(box fld.fold_path(path)),
token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))),
token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&*tt))),
}
}

View File

@ -25,19 +25,13 @@
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/")]
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(collections)]
#![feature(core)]
#![feature(libc)]
#![feature(quote, unsafe_destructor)]
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(unicode)]
#![feature(path_ext)]
#![feature(str_char)]
#![feature(into_cow)]
#![feature(slice_patterns)]
extern crate arena;
extern crate fmt_macros;
@ -98,6 +92,7 @@ pub mod parse;
pub mod ptr;
pub mod show_span;
pub mod std_inject;
pub mod str;
pub mod test;
pub mod visit;

View File

@ -13,11 +13,12 @@ pub use self::CommentStyle::*;
use ast;
use codemap::{BytePos, CharPos, CodeMap, Pos};
use diagnostic;
use parse::lexer::{is_whitespace, Reader};
use parse::lexer::{StringReader, TokenAndSpan};
use parse::lexer::is_block_doc_comment;
use parse::lexer::{StringReader, TokenAndSpan};
use parse::lexer::{is_whitespace, Reader};
use parse::lexer;
use print::pprust;
use str::char_at;
use std::io::Read;
use std::usize;
@ -209,7 +210,7 @@ fn all_whitespace(s: &str, col: CharPos) -> Option<usize> {
let mut col = col.to_usize();
let mut cursor: usize = 0;
while col > 0 && cursor < len {
let ch = s.char_at(cursor);
let ch = char_at(s, cursor);
if !ch.is_whitespace() {
return None;
}
@ -246,7 +247,7 @@ fn read_block_comment(rdr: &mut StringReader,
rdr.bump();
rdr.bump();
let mut curr_line = String::from_str("/*");
let mut curr_line = String::from("/*");
// doc-comments are not really comments, they are attributes
if (rdr.curr_is('*') && !rdr.nextch_is('*')) || rdr.curr_is('!') {

View File

@ -13,10 +13,11 @@ use codemap::{BytePos, CharPos, CodeMap, Pos, Span};
use codemap;
use diagnostic::SpanHandler;
use ext::tt::transcribe::tt_next_token;
use parse::token;
use parse::token::str_to_ident;
use parse::token;
use str::char_at;
use std::borrow::{IntoCow, Cow};
use std::borrow::Cow;
use std::char;
use std::fmt;
use std::mem::replace;
@ -289,11 +290,11 @@ impl<'a> StringReader<'a> {
s: &'b str, errmsg: &'b str) -> Cow<'b, str> {
let mut i = 0;
while i < s.len() {
let ch = s.char_at(i);
let ch = char_at(s, i);
let next = i + ch.len_utf8();
if ch == '\r' {
if next < s.len() && s.char_at(next) == '\n' {
return translate_crlf_(self, start, s, errmsg, i).into_cow();
if next < s.len() && char_at(s, next) == '\n' {
return translate_crlf_(self, start, s, errmsg, i).into();
}
let pos = start + BytePos(i as u32);
let end_pos = start + BytePos(next as u32);
@ -301,19 +302,19 @@ impl<'a> StringReader<'a> {
}
i = next;
}
return s.into_cow();
return s.into();
fn translate_crlf_(rdr: &StringReader, start: BytePos,
s: &str, errmsg: &str, mut i: usize) -> String {
let mut buf = String::with_capacity(s.len());
let mut j = 0;
while i < s.len() {
let ch = s.char_at(i);
let ch = char_at(s, i);
let next = i + ch.len_utf8();
if ch == '\r' {
if j < i { buf.push_str(&s[j..i]); }
j = next;
if next >= s.len() || s.char_at(next) != '\n' {
if next >= s.len() || char_at(s, next) != '\n' {
let pos = start + BytePos(i as u32);
let end_pos = start + BytePos(next as u32);
rdr.err_span_(pos, end_pos, errmsg);
@ -335,7 +336,7 @@ impl<'a> StringReader<'a> {
if current_byte_offset < self.source_text.len() {
assert!(self.curr.is_some());
let last_char = self.curr.unwrap();
let ch = self.source_text.char_at(current_byte_offset);
let ch = char_at(&self.source_text, current_byte_offset);
let next = current_byte_offset + ch.len_utf8();
let byte_offset_diff = next - current_byte_offset;
self.pos = self.pos + Pos::from_usize(byte_offset_diff);
@ -357,7 +358,7 @@ impl<'a> StringReader<'a> {
pub fn nextch(&self) -> Option<char> {
let offset = self.byte_offset(self.pos).to_usize();
if offset < self.source_text.len() {
Some(self.source_text.char_at(offset))
Some(char_at(&self.source_text, offset))
} else {
None
}
@ -371,9 +372,9 @@ impl<'a> StringReader<'a> {
let offset = self.byte_offset(self.pos).to_usize();
let s = &self.source_text[..];
if offset >= s.len() { return None }
let next = offset + s.char_at(offset).len_utf8();
let next = offset + char_at(s, offset).len_utf8();
if next < s.len() {
Some(s.char_at(next))
Some(char_at(s, next))
} else {
None
}
@ -564,7 +565,7 @@ impl<'a> StringReader<'a> {
let string = if has_cr {
self.translate_crlf(start_bpos, string,
"bare CR not allowed in block doc-comment")
} else { string.into_cow() };
} else { string.into() };
token::DocComment(token::intern(&string[..]))
} else {
token::Comment

View File

@ -16,7 +16,7 @@ use diagnostic::{SpanHandler, mk_span_handler, default_handler, Auto, FatalError
use parse::attr::ParserAttr;
use parse::parser::Parser;
use ptr::P;
use str::char_at;
use std::cell::{Cell, RefCell};
use std::fs::File;
@ -288,7 +288,7 @@ pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
// parsing tt's probably shouldn't require a parser at all.
let cfg = Vec::new();
let srdr = lexer::StringReader::new(&sess.span_diagnostic, filemap);
let mut p1 = Parser::new(sess, cfg, box srdr);
let mut p1 = Parser::new(sess, cfg, Box::new(srdr));
panictry!(p1.parse_all_token_trees())
}
@ -297,7 +297,7 @@ pub fn tts_to_parser<'a>(sess: &'a ParseSess,
tts: Vec<ast::TokenTree>,
cfg: ast::CrateConfig) -> Parser<'a> {
let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, None, tts);
let mut p = Parser::new(sess, cfg, box trdr);
let mut p = Parser::new(sess, cfg, Box::new(trdr));
panictry!(p.check_unknown_macro_variable());
p
}
@ -360,7 +360,7 @@ pub mod with_hygiene {
use super::lexer::make_reader_with_embedded_idents as make_reader;
let cfg = Vec::new();
let srdr = make_reader(&sess.span_diagnostic, filemap);
let mut p1 = Parser::new(sess, cfg, box srdr);
let mut p1 = Parser::new(sess, cfg, Box::new(srdr));
panictry!(p1.parse_all_token_trees())
}
}
@ -536,7 +536,7 @@ pub fn raw_str_lit(lit: &str) -> String {
// check if `s` looks like i32 or u1234 etc.
fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
s.len() > 1 &&
first_chars.contains(&s.char_at(0)) &&
first_chars.contains(&char_at(s, 0)) &&
s[1..].chars().all(|c| '0' <= c && c <= '9')
}
@ -673,8 +673,8 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) ->
let orig = s;
let mut ty = ast::UnsuffixedIntLit(ast::Plus);
if s.char_at(0) == '0' && s.len() > 1 {
match s.char_at(1) {
if char_at(s, 0) == '0' && s.len() > 1 {
match char_at(s, 1) {
'x' => base = 16,
'o' => base = 8,
'b' => base = 2,
@ -834,28 +834,44 @@ mod test {
fn string_to_tts_macro () {
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
let tts: &[ast::TokenTree] = &tts[..];
match tts {
[ast::TtToken(_, token::Ident(name_macro_rules, token::Plain)),
ast::TtToken(_, token::Not),
ast::TtToken(_, token::Ident(name_zip, token::Plain)),
ast::TtDelimited(_, ref macro_delimed)]
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
(
4,
Some(&ast::TtToken(_, token::Ident(name_macro_rules, token::Plain))),
Some(&ast::TtToken(_, token::Not)),
Some(&ast::TtToken(_, token::Ident(name_zip, token::Plain))),
Some(&ast::TtDelimited(_, ref macro_delimed)),
)
if name_macro_rules.as_str() == "macro_rules"
&& name_zip.as_str() == "zip" => {
match &macro_delimed.tts[..] {
[ast::TtDelimited(_, ref first_delimed),
ast::TtToken(_, token::FatArrow),
ast::TtDelimited(_, ref second_delimed)]
let tts = &macro_delimed.tts[..];
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
(
3,
Some(&ast::TtDelimited(_, ref first_delimed)),
Some(&ast::TtToken(_, token::FatArrow)),
Some(&ast::TtDelimited(_, ref second_delimed)),
)
if macro_delimed.delim == token::Paren => {
match &first_delimed.tts[..] {
[ast::TtToken(_, token::Dollar),
ast::TtToken(_, token::Ident(name, token::Plain))]
let tts = &first_delimed.tts[..];
match (tts.len(), tts.get(0), tts.get(1)) {
(
2,
Some(&ast::TtToken(_, token::Dollar)),
Some(&ast::TtToken(_, token::Ident(name, token::Plain))),
)
if first_delimed.delim == token::Paren
&& name.as_str() == "a" => {},
_ => panic!("value 3: {:?}", **first_delimed),
}
match &second_delimed.tts[..] {
[ast::TtToken(_, token::Dollar),
ast::TtToken(_, token::Ident(name, token::Plain))]
let tts = &second_delimed.tts[..];
match (tts.len(), tts.get(0), tts.get(1)) {
(
2,
Some(&ast::TtToken(_, token::Dollar)),
Some(&ast::TtToken(_, token::Ident(name, token::Plain))),
)
if second_delimed.delim == token::Paren
&& name.as_str() == "a" => {},
_ => panic!("value 4: {:?}", **second_delimed),

View File

@ -78,6 +78,7 @@ use parse::PResult;
use diagnostic::FatalError;
use std::collections::HashSet;
use std::fs;
use std::io::prelude::*;
use std::mem;
use std::path::{Path, PathBuf};
@ -436,10 +437,11 @@ impl<'a> Parser<'a> {
// leave it in the input
Ok(())
} else {
let mut expected = edible.iter().map(|x| TokenType::Token(x.clone()))
.collect::<Vec<_>>();
expected.extend(inedible.iter().map(|x| TokenType::Token(x.clone())));
expected.push_all(&*self.expected_tokens);
let mut expected = edible.iter()
.map(|x| TokenType::Token(x.clone()))
.chain(inedible.iter().map(|x| TokenType::Token(x.clone())))
.chain(self.expected_tokens.iter().cloned())
.collect::<Vec<_>>();
expected.sort_by(|a, b| a.to_string().cmp(&b.to_string()));
expected.dedup();
let expect = tokens_to_string(&expected[..]);
@ -490,8 +492,10 @@ impl<'a> Parser<'a> {
debug!("commit_expr {:?}", e);
if let ExprPath(..) = e.node {
// might be unit-struct construction; check for recoverableinput error.
let mut expected = edible.iter().cloned().collect::<Vec<_>>();
expected.push_all(inedible);
let expected = edible.iter()
.cloned()
.chain(inedible.iter().cloned())
.collect::<Vec<_>>();
try!(self.check_for_erroneous_unit_struct_expecting(&expected[..]));
}
self.expect_one_of(edible, inedible)
@ -509,8 +513,10 @@ impl<'a> Parser<'a> {
if self.last_token
.as_ref()
.map_or(false, |t| t.is_ident() || t.is_path()) {
let mut expected = edible.iter().cloned().collect::<Vec<_>>();
expected.push_all(&inedible);
let expected = edible.iter()
.cloned()
.chain(inedible.iter().cloned())
.collect::<Vec<_>>();
try!(self.check_for_erroneous_unit_struct_expecting(&expected));
}
self.expect_one_of(edible, inedible)
@ -897,7 +903,7 @@ impl<'a> Parser<'a> {
self.last_span = self.span;
// Stash token for error recovery (sometimes; clone is not necessarily cheap).
self.last_token = if self.token.is_ident() || self.token.is_path() {
Some(box self.token.clone())
Some(Box::new(self.token.clone()))
} else {
None
};
@ -1187,7 +1193,7 @@ impl<'a> Parser<'a> {
debug!("parse_trait_methods(): parsing provided method");
let (inner_attrs, body) =
try!(p.parse_inner_attrs_and_block());
attrs.push_all(&inner_attrs[..]);
attrs.extend(inner_attrs.iter().cloned());
Some(body)
}
@ -1578,8 +1584,8 @@ impl<'a> Parser<'a> {
token::Interpolated(token::NtPath(_)) => Some(try!(self.bump_and_get())),
_ => None,
};
if let Some(token::Interpolated(token::NtPath(box path))) = found {
return Ok(path);
if let Some(token::Interpolated(token::NtPath(path))) = found {
return Ok(*path);
}
let lo = self.span.lo;
@ -4770,8 +4776,8 @@ impl<'a> Parser<'a> {
let secondary_path_str = format!("{}/mod.rs", mod_name);
let default_path = dir_path.join(&default_path_str[..]);
let secondary_path = dir_path.join(&secondary_path_str[..]);
let default_exists = default_path.exists();
let secondary_exists = secondary_path.exists();
let default_exists = fs::metadata(&default_path).is_ok();
let secondary_exists = fs::metadata(&secondary_path).is_ok();
if !self.owns_directory {
self.span_err(id_sp,
@ -4834,7 +4840,7 @@ impl<'a> Parser<'a> {
let mut included_mod_stack = self.sess.included_mod_stack.borrow_mut();
match included_mod_stack.iter().position(|p| *p == path) {
Some(i) => {
let mut err = String::from_str("circular modules: ");
let mut err = String::from("circular modules: ");
let len = included_mod_stack.len();
for p in &included_mod_stack[i.. len] {
err.push_str(&p.to_string_lossy());

View File

@ -131,7 +131,7 @@ pub fn buf_str(toks: &[Token],
assert_eq!(n, szs.len());
let mut i = left;
let mut l = lim;
let mut s = string::String::from_str("[");
let mut s = string::String::from("[");
while i != right && l != 0 {
l -= 1;
if i != left {

View File

@ -28,7 +28,7 @@ use print::pp::Breaks::{Consistent, Inconsistent};
use ptr::P;
use std_inject;
use std::{ascii, mem};
use std::ascii;
use std::io::{self, Write, Read};
use std::iter;
@ -187,18 +187,13 @@ impl<'a> State<'a> {
pub fn to_string<F>(f: F) -> String where
F: FnOnce(&mut State) -> io::Result<()>,
{
use std::raw::TraitObject;
let mut s = rust_printer(box Vec::new());
f(&mut s).unwrap();
eof(&mut s.s).unwrap();
let wr = unsafe {
// FIXME(pcwalton): A nasty function to extract the string from an `Write`
// that we "know" to be a `Vec<u8>` that works around the lack of checked
// downcasts.
let obj: &TraitObject = mem::transmute(&s.s.out);
mem::transmute::<*mut (), &Vec<u8>>(obj.data)
};
String::from_utf8(wr.clone()).unwrap()
let mut wr = Vec::new();
{
let mut printer = rust_printer(Box::new(&mut wr));
f(&mut printer).unwrap();
eof(&mut printer.s).unwrap();
}
String::from_utf8(wr).unwrap()
}
pub fn binop_to_string(op: BinOpToken) -> &'static str {
@ -2799,13 +2794,13 @@ impl<'a> State<'a> {
match lit.node {
ast::LitStr(ref st, style) => self.print_string(&st, style),
ast::LitByte(byte) => {
let mut res = String::from_str("b'");
let mut res = String::from("b'");
res.extend(ascii::escape_default(byte).map(|c| c as char));
res.push('\'');
word(&mut self.s, &res[..])
}
ast::LitChar(ch) => {
let mut res = String::from_str("'");
let mut res = String::from("'");
res.extend(ch.escape_default());
res.push('\'');
word(&mut self.s, &res[..])

View File

@ -52,7 +52,7 @@ pub struct P<T> {
/// Construct a `P<T>` from a `T` value.
pub fn P<T: 'static>(value: T) -> P<T> {
P {
ptr: box value
ptr: Box::new(value)
}
}

13
src/libsyntax/str.rs Normal file
View File

@ -0,0 +1,13 @@
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn char_at(s: &str, byte: usize) -> char {
s[byte..].chars().next().unwrap()
}

View File

@ -15,6 +15,7 @@ use parse::new_parser_from_source_str;
use parse::parser::Parser;
use parse::token;
use ptr::P;
use str::char_at;
/// Map a string to tts, using a made-up filename:
pub fn string_to_tts(source_str: String) -> Vec<ast::TokenTree> {
@ -96,24 +97,24 @@ pub fn matches_codepattern(a : &str, b : &str) -> bool {
else if idx_a == a.len() {return false;}
else if idx_b == b.len() {
// maybe the stuff left in a is all ws?
if is_whitespace(a.char_at(idx_a)) {
if is_whitespace(char_at(a, idx_a)) {
return scan_for_non_ws_or_end(a,idx_a) == a.len();
} else {
return false;
}
}
// ws in both given and pattern:
else if is_whitespace(a.char_at(idx_a))
&& is_whitespace(b.char_at(idx_b)) {
else if is_whitespace(char_at(a, idx_a))
&& is_whitespace(char_at(b, idx_b)) {
idx_a = scan_for_non_ws_or_end(a,idx_a);
idx_b = scan_for_non_ws_or_end(b,idx_b);
}
// ws in given only:
else if is_whitespace(a.char_at(idx_a)) {
else if is_whitespace(char_at(a, idx_a)) {
idx_a = scan_for_non_ws_or_end(a,idx_a);
}
// *don't* silently eat ws in expected only.
else if a.char_at(idx_a) == b.char_at(idx_b) {
else if char_at(a, idx_a) == char_at(b, idx_b) {
idx_a += 1;
idx_b += 1;
}
@ -129,7 +130,7 @@ pub fn matches_codepattern(a : &str, b : &str) -> bool {
fn scan_for_non_ws_or_end(a : &str, idx: usize) -> usize {
let mut i = idx;
let len = a.len();
while (i < len) && (is_whitespace(a.char_at(i))) {
while (i < len) && (is_whitespace(char_at(a, i))) {
i += 1;
}
i

View File

@ -64,7 +64,10 @@ impl<T> SmallVector<T> {
let result: &[T] = &[];
result
}
One(ref v) => slice::ref_slice(v),
One(ref v) => {
// FIXME: Could be replaced with `slice::ref_slice(v)` when it is stable.
unsafe { slice::from_raw_parts(v, 1) }
}
Many(ref vs) => vs
}
}