auto merge of #5071 : luqmana/rust/derec, r=pcwalton

Rid libsyntax of records and get rid of the last piece in `librustc/front/test.rs`.
This commit is contained in:
bors 2013-02-21 09:20:43 -08:00
commit 4a5e8c5218
25 changed files with 330 additions and 241 deletions

View File

@ -466,7 +466,7 @@ pub fn parse_and_print(code: @~str) -> ~str {
sess.cm,
// Assuming there are no token_trees
syntax::parse::token::mk_fake_ident_interner(),
sess.span_diagnostic,
copy sess.span_diagnostic,
crate,
filename.to_str(),
rdr, a,
@ -622,7 +622,7 @@ pub fn check_variants(files: &[Path], cx: Context) {
sess.cm,
// Assuming no token_trees
syntax::parse::token::mk_fake_ident_interner(),
sess.span_diagnostic,
copy sess.span_diagnostic,
crate,
file_str,
rdr, a,

View File

@ -25,7 +25,7 @@ use syntax::ast::node_id;
use syntax::ast::{int_ty, uint_ty, float_ty};
use syntax::codemap::span;
use syntax::diagnostic;
use syntax::parse::parse_sess;
use syntax::parse::ParseSess;
use syntax::{ast, codemap};
use syntax;
@ -151,7 +151,7 @@ pub struct Session_ {
targ_cfg: @config,
opts: @options,
cstore: @mut metadata::cstore::CStore,
parse_sess: parse_sess,
parse_sess: @mut ParseSess,
codemap: @codemap::CodeMap,
// For a library crate, this is always none
main_fn: @mut Option<(node_id, codemap::span)>,

View File

@ -10,10 +10,6 @@
// Code that generates a test runner to run all the tests in a crate
// XXX - Need to finish off libsyntax first
#[legacy_records];
#[allow(structural_records)];
use core::prelude::*;
use driver::session;
@ -25,7 +21,7 @@ use core::option;
use core::vec;
use syntax::ast_util::*;
use syntax::attr;
use syntax::codemap::{dummy_sp, span, ExpandedFrom};
use syntax::codemap::{dummy_sp, span, ExpandedFrom, CallInfo, NameAndSpan};
use syntax::codemap;
use syntax::fold;
use syntax::print::pprust;
@ -81,11 +77,13 @@ fn generate_test_harness(sess: session::Session,
testfns: ~[]
};
cx.ext_cx.bt_push(ExpandedFrom({
call_site: dummy_sp(),
callie: {
name: ~"test",
span: None}}));
cx.ext_cx.bt_push(ExpandedFrom(CallInfo {
call_site: dummy_sp(),
callee: NameAndSpan {
name: ~"test",
span: None
}
}));
let precursor = @fold::AstFoldFns {
fold_crate: fold::wrap(|a,b| fold_crate(cx, a, b) ),

View File

@ -1193,18 +1193,18 @@ fn decode_item_ast(par_doc: ebml::Doc) -> @ast::item {
#[cfg(test)]
trait fake_ext_ctxt {
fn cfg() -> ast::crate_cfg;
fn parse_sess() -> parse::parse_sess;
fn parse_sess() -> @mut parse::ParseSess;
fn call_site() -> span;
fn ident_of(+st: ~str) -> ast::ident;
}
#[cfg(test)]
type fake_session = parse::parse_sess;
type fake_session = @mut parse::ParseSess;
#[cfg(test)]
impl fake_ext_ctxt for fake_session {
fn cfg() -> ast::crate_cfg { ~[] }
fn parse_sess() -> parse::parse_sess { self }
fn parse_sess() -> @mut parse::ParseSess { self }
fn call_site() -> span {
codemap::span {
lo: codemap::BytePos(0),

View File

@ -196,11 +196,16 @@ pub struct LocWithOpt {
// used to be structural records. Better names, anyone?
pub struct FileMapAndLine {fm: @FileMap, line: uint}
pub struct FileMapAndBytePos {fm: @FileMap, pos: BytePos}
pub struct NameAndSpan {name: ~str, span: Option<span>}
pub struct CallInfo {
call_site: span,
callee: NameAndSpan
}
/// Extra information for tracking macro expansion of spans
pub enum ExpnInfo {
ExpandedFrom({call_site: span,
callie: {name: ~str, span: Option<span>}})
ExpandedFrom(CallInfo)
}
pub type FileName = ~str;

View File

@ -290,10 +290,10 @@ fn highlight_lines(cm: @codemap::CodeMap,
fn print_macro_backtrace(cm: @codemap::CodeMap, sp: span) {
do option::iter(&sp.expn_info) |ei| {
let ss = option::map_default(&ei.callie.span, @~"",
let ss = option::map_default(&ei.callee.span, @~"",
|span| @cm.span_to_str(*span));
print_diagnostic(*ss, note,
fmt!("in expansion of %s!", ei.callie.name));
fmt!("in expansion of %s!", ei.callee.name));
let ss = cm.span_to_str(ei.call_site);
print_diagnostic(ss, note, ~"expansion site");
print_macro_backtrace(cm, ei.call_site);

View File

@ -13,6 +13,7 @@ use core::prelude::*;
use ast;
use codemap;
use codemap::{CodeMap, span, ExpnInfo, ExpandedFrom, dummy_sp};
use codemap::{CallInfo, NameAndSpan};
use diagnostic::span_handler;
use ext;
use parse;
@ -166,7 +167,7 @@ pub fn syntax_expander_table() -> SyntaxExtensions {
// -> expn_info of their expansion context stored into their span.
pub trait ext_ctxt {
fn codemap(@mut self) -> @CodeMap;
fn parse_sess(@mut self) -> parse::parse_sess;
fn parse_sess(@mut self) -> @mut parse::ParseSess;
fn cfg(@mut self) -> ast::crate_cfg;
fn call_site(@mut self) -> span;
fn print_backtrace(@mut self);
@ -190,10 +191,10 @@ pub trait ext_ctxt {
fn ident_of(@mut self, st: ~str) -> ast::ident;
}
pub fn mk_ctxt(parse_sess: parse::parse_sess,
pub fn mk_ctxt(parse_sess: @mut parse::ParseSess,
cfg: ast::crate_cfg) -> ext_ctxt {
struct CtxtRepr {
parse_sess: parse::parse_sess,
parse_sess: @mut parse::ParseSess,
cfg: ast::crate_cfg,
backtrace: Option<@ExpnInfo>,
mod_path: ~[ast::ident],
@ -201,11 +202,11 @@ pub fn mk_ctxt(parse_sess: parse::parse_sess,
}
impl ext_ctxt for CtxtRepr {
fn codemap(@mut self) -> @CodeMap { self.parse_sess.cm }
fn parse_sess(@mut self) -> parse::parse_sess { self.parse_sess }
fn parse_sess(@mut self) -> @mut parse::ParseSess { self.parse_sess }
fn cfg(@mut self) -> ast::crate_cfg { self.cfg }
fn call_site(@mut self) -> span {
match self.backtrace {
Some(@ExpandedFrom({call_site: cs, _})) => cs,
Some(@ExpandedFrom(CallInfo {call_site: cs, _})) => cs,
None => self.bug(~"missing top span")
}
}
@ -216,18 +217,18 @@ pub fn mk_ctxt(parse_sess: parse::parse_sess,
fn mod_path(@mut self) -> ~[ast::ident] { return self.mod_path; }
fn bt_push(@mut self, ei: codemap::ExpnInfo) {
match ei {
ExpandedFrom({call_site: cs, callie: ref callie}) => {
ExpandedFrom(CallInfo {call_site: cs, callee: ref callee}) => {
self.backtrace =
Some(@ExpandedFrom({
Some(@ExpandedFrom(CallInfo {
call_site: span {lo: cs.lo, hi: cs.hi,
expn_info: self.backtrace},
callie: (*callie)}));
callee: (*callee)}));
}
}
}
fn bt_pop(@mut self) {
match self.backtrace {
Some(@ExpandedFrom({
Some(@ExpandedFrom(CallInfo {
call_site: span {expn_info: prev, _}, _
})) => {
self.backtrace = prev

View File

@ -19,6 +19,11 @@ use ext::build;
use core::dvec;
use core::option;
pub struct Field {
ident: ast::ident,
ex: @ast::expr
}
pub fn mk_expr(cx: ext_ctxt,
sp: codemap::span,
expr: ast::expr_)
@ -147,47 +152,37 @@ pub fn mk_base_str(cx: ext_ctxt, sp: span, s: ~str) -> @ast::expr {
pub fn mk_uniq_str(cx: ext_ctxt, sp: span, s: ~str) -> @ast::expr {
mk_vstore_e(cx, sp, mk_base_str(cx, sp, s), ast::expr_vstore_uniq)
}
pub fn mk_field(sp: span, f: &{ident: ast::ident, ex: @ast::expr})
-> ast::field {
pub fn mk_field(sp: span, f: &Field) -> ast::field {
codemap::spanned {
node: ast::field_ { mutbl: ast::m_imm, ident: f.ident, expr: f.ex },
span: sp,
}
}
pub fn mk_fields(sp: span, fields: ~[{ident: ast::ident, ex: @ast::expr}])
-> ~[ast::field] {
pub fn mk_fields(sp: span, fields: ~[Field]) -> ~[ast::field] {
fields.map(|f| mk_field(sp, f))
}
pub fn mk_rec_e(cx: ext_ctxt,
sp: span,
fields: ~[{ident: ast::ident, ex: @ast::expr}])
-> @ast::expr {
pub fn mk_rec_e(cx: ext_ctxt, sp: span, fields: ~[Field]) -> @ast::expr {
mk_expr(cx, sp, ast::expr_rec(mk_fields(sp, fields),
option::None::<@ast::expr>))
}
pub fn mk_struct_e(cx: ext_ctxt,
sp: span,
ctor_path: ~[ast::ident],
fields: ~[{ident: ast::ident, ex: @ast::expr}])
-> @ast::expr {
pub fn mk_struct_e(cx: ext_ctxt, sp: span, ctor_path: ~[ast::ident],
fields: ~[Field]) -> @ast::expr {
mk_expr(cx, sp,
ast::expr_struct(mk_raw_path(sp, ctor_path),
mk_fields(sp, fields),
option::None::<@ast::expr>))
}
pub fn mk_global_struct_e(cx: ext_ctxt,
sp: span,
pub fn mk_global_struct_e(cx: ext_ctxt, sp: span,
ctor_path: ~[ast::ident],
fields: ~[{ident: ast::ident, ex: @ast::expr}])
fields: ~[Field])
-> @ast::expr {
mk_expr(cx, sp,
ast::expr_struct(mk_raw_path_global(sp, ctor_path),
mk_fields(sp, fields),
option::None::<@ast::expr>))
}
pub fn mk_glob_use(cx: ext_ctxt,
sp: span,
path: ~[ast::ident]) -> @ast::view_item {
pub fn mk_glob_use(cx: ext_ctxt, sp: span, path: ~[ast::ident])
-> @ast::view_item {
let glob = @codemap::spanned {
node: ast::view_path_glob(mk_raw_path(sp, path), cx.next_id()),
span: sp,

View File

@ -14,7 +14,7 @@ use ast::{crate, expr_, expr_mac, mac_invoc_tt};
use ast::{tt_delim, tt_tok, item_mac, stmt_, stmt_mac, stmt_expr, stmt_semi};
use ast;
use attr;
use codemap::{span, ExpandedFrom};
use codemap::{span, CallInfo, ExpandedFrom, NameAndSpan};
use ext::base::*;
use fold::*;
use parse::{parser, parse_expr_from_source_str, new_parser_from_tts};
@ -48,8 +48,12 @@ pub fn expand_expr(exts: SyntaxExtensions, cx: ext_ctxt,
}
Some(NormalTT(SyntaxExpanderTT{expander: exp,
span: exp_sp})) => {
cx.bt_push(ExpandedFrom({call_site: s,
callie: {name: *extname, span: exp_sp}}));
cx.bt_push(ExpandedFrom(CallInfo{
call_site: s,
callee: NameAndSpan {
name: *extname, span: exp_sp
}
}));
let expanded = match exp(cx, (*mac).span, (*tts)) {
MRExpr(e) => e,
@ -105,9 +109,13 @@ pub fn expand_mod_items(exts: SyntaxExtensions, cx: ext_ctxt,
match exts.find(&mname) {
None | Some(NormalTT(_)) | Some(ItemTT(*)) => items,
Some(ItemDecorator(dec_fn)) => {
cx.bt_push(ExpandedFrom({call_site: attr.span,
callie: {name: /*bad*/ copy *mname,
span: None}}));
cx.bt_push(ExpandedFrom(CallInfo {
call_site: attr.span,
callee: NameAndSpan {
name: /*bad*/ copy *mname,
span: None
}
}));
let r = dec_fn(cx, attr.span, attr.node.value, items);
cx.bt_pop();
r
@ -170,9 +178,13 @@ pub fn expand_item_mac(exts: SyntaxExtensions,
given '%s'", *extname,
*cx.parse_sess().interner.get(it.ident)));
}
cx.bt_push(ExpandedFrom({call_site: it.span,
callie: {name: *extname,
span: (*expand).span}}));
cx.bt_push(ExpandedFrom(CallInfo {
call_site: it.span,
callee: NameAndSpan {
name: *extname,
span: (*expand).span
}
}));
((*expand).expander)(cx, it.span, tts)
}
Some(ItemTT(ref expand)) => {
@ -181,9 +193,13 @@ pub fn expand_item_mac(exts: SyntaxExtensions,
fmt!("macro %s! expects an ident argument",
*extname));
}
cx.bt_push(ExpandedFrom({call_site: it.span,
callie: {name: *extname,
span: (*expand).span}}));
cx.bt_push(ExpandedFrom(CallInfo {
call_site: it.span,
callee: NameAndSpan {
name: *extname,
span: (*expand).span
}
}));
((*expand).expander)(cx, it.span, it.ident, tts)
}
_ => cx.span_fatal(
@ -228,8 +244,10 @@ pub fn expand_stmt(exts: SyntaxExtensions, cx: ext_ctxt,
Some(NormalTT(
SyntaxExpanderTT{expander: exp, span: exp_sp})) => {
cx.bt_push(ExpandedFrom(
{call_site: sp, callie: {name: *extname, span: exp_sp}}));
cx.bt_push(ExpandedFrom(CallInfo {
call_site: sp,
callee: NameAndSpan { name: *extname, span: exp_sp }
}));
let expanded = match exp(cx, mac.span, tts) {
MRExpr(e) =>
@codemap::spanned { node: stmt_expr(e, cx.next_id()),
@ -321,7 +339,7 @@ pub fn core_macros() -> ~str {
}";
}
pub fn expand_crate(parse_sess: parse::parse_sess,
pub fn expand_crate(parse_sess: @mut parse::ParseSess,
cfg: ast::crate_cfg, c: @crate) -> @crate {
let exts = syntax_expander_table();
let afp = default_ast_fold();

View File

@ -118,10 +118,18 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
sp,
make_path_vec(cx, @~"Conv"),
~[
{ident: intr.intern(@~"flags"), ex: flags_expr},
{ident: intr.intern(@~"width"), ex: width_expr},
{ident: intr.intern(@~"precision"), ex: precision_expr},
{ident: intr.intern(@~"ty"), ex: ty_expr},
build::Field {
ident: intr.intern(@~"flags"), ex: flags_expr
},
build::Field {
ident: intr.intern(@~"width"), ex: width_expr
},
build::Field {
ident: intr.intern(@~"precision"), ex: precision_expr
},
build::Field {
ident: intr.intern(@~"ty"), ex: ty_expr
},
]
)
}

View File

@ -68,7 +68,7 @@ pub fn expand_proto(cx: ext_ctxt, _sp: span, id: ast::ident,
tt: ~[ast::token_tree]) -> base::MacResult {
let sess = cx.parse_sess();
let cfg = cx.cfg();
let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic,
let tt_rdr = new_tt_reader(copy cx.parse_sess().span_diagnostic,
cx.parse_sess().interner, None, tt);
let rdr = tt_rdr as reader;
let rust_parser = Parser(sess, cfg, rdr.dup());

View File

@ -11,6 +11,7 @@
// Parsing pipes protocols from token trees.
use ext::pipes::pipec::*;
use parse::common::SeqSep;
use parse::parser;
use parse::token;
@ -26,9 +27,10 @@ pub impl proto_parser for parser::Parser {
fn parse_proto(&self, id: ~str) -> protocol {
let proto = protocol(id, self.span);
self.parse_seq_to_before_end(token::EOF,
{sep: None, trailing_sep_allowed: false},
|self| self.parse_state(proto));
self.parse_seq_to_before_end(token::EOF, SeqSep {
sep: None,
trailing_sep_allowed: false
}, |self| self.parse_state(proto));
return proto;
}
@ -58,9 +60,10 @@ pub impl proto_parser for parser::Parser {
// parse the messages
self.parse_unspanned_seq(
token::LBRACE, token::RBRACE,
{sep: Some(token::COMMA), trailing_sep_allowed: true},
|self| self.parse_message(state));
token::LBRACE, token::RBRACE, SeqSep {
sep: Some(token::COMMA),
trailing_sep_allowed: true
}, |self| self.parse_message(state));
}
fn parse_message(&self, state: state) {
@ -68,10 +71,10 @@ pub impl proto_parser for parser::Parser {
let args = if self.token == token::LPAREN {
self.parse_unspanned_seq(token::LPAREN,
token::RPAREN,
{sep: Some(token::COMMA),
trailing_sep_allowed: true},
|p| p.parse_ty(false))
token::RPAREN, SeqSep {
sep: Some(token::COMMA),
trailing_sep_allowed: true
}, |p| p.parse_ty(false))
}
else { ~[] };
@ -82,10 +85,10 @@ pub impl proto_parser for parser::Parser {
let name = *self.interner.get(self.parse_ident());
let ntys = if self.token == token::LT {
self.parse_unspanned_seq(token::LT,
token::GT,
{sep: Some(token::COMMA),
trailing_sep_allowed: true},
|p| p.parse_ty(false))
token::GT, SeqSep {
sep: Some(token::COMMA),
trailing_sep_allowed: true
}, |p| p.parse_ty(false))
}
else { ~[] };
Some(next_state {state: name, tys: ntys})

View File

@ -10,6 +10,7 @@
use codemap;
use codemap::{FileMap, Loc, Pos, ExpandedFrom, span};
use codemap::{CallInfo, NameAndSpan};
use ext::base::*;
use ext::base;
use ext::build::{mk_base_vec_e, mk_uint, mk_u8, mk_base_str};
@ -22,10 +23,13 @@ use core::str;
use core::vec;
fn topmost_expn_info(expn_info: @codemap::ExpnInfo) -> @codemap::ExpnInfo {
let ExpandedFrom({call_site, _}) = *expn_info;
let ExpandedFrom(CallInfo { call_site, _ }) = *expn_info;
match call_site.expn_info {
Some(next_expn_info) => {
let ExpandedFrom({callie: {name, _}, _}) = *next_expn_info;
let ExpandedFrom(CallInfo {
callee: NameAndSpan {name, _},
_
}) = *next_expn_info;
// Don't recurse into file using "include!"
if name == ~"include" { return expn_info; }

View File

@ -22,7 +22,7 @@ pub fn expand_trace_macros(cx: ext_ctxt, sp: span,
tt: ~[ast::token_tree]) -> base::MacResult {
let sess = cx.parse_sess();
let cfg = cx.cfg();
let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic,
let tt_rdr = new_tt_reader(copy cx.parse_sess().span_diagnostic,
cx.parse_sess().interner, None, tt);
let rdr = tt_rdr as reader;
let rust_parser = Parser(sess, cfg, rdr.dup());

View File

@ -14,7 +14,7 @@ use codemap::{BytePos, mk_sp};
use codemap;
use parse::common::*; //resolve bug?
use parse::lexer::*; //resolve bug?
use parse::parse_sess;
use parse::ParseSess;
use parse::parser::Parser;
use parse::token::{Token, EOF, to_str, nonterminal};
use parse::token;
@ -101,7 +101,7 @@ eof: [a $( a )* a b ·]
nonempty body. */
pub enum matcher_pos_up { /* to break a circularity */
matcher_pos_up(Option<matcher_pos>)
matcher_pos_up(Option<~MatcherPos>)
}
pub fn is_some(&&mpu: matcher_pos_up) -> bool {
@ -111,7 +111,7 @@ pub fn is_some(&&mpu: matcher_pos_up) -> bool {
}
}
pub type matcher_pos = ~{
pub struct MatcherPos {
elts: ~[ast::matcher], // maybe should be /&? Need to understand regions.
sep: Option<Token>,
mut idx: uint,
@ -119,9 +119,9 @@ pub type matcher_pos = ~{
matches: ~[DVec<@named_match>],
match_lo: uint, match_hi: uint,
sp_lo: BytePos,
};
}
pub fn copy_up(&& mpu: matcher_pos_up) -> matcher_pos {
pub fn copy_up(&& mpu: matcher_pos_up) -> ~MatcherPos {
match &mpu {
&matcher_pos_up(Some(ref mp)) => copy (*mp),
_ => fail!()
@ -139,7 +139,7 @@ pub fn count_names(ms: &[matcher]) -> uint {
#[allow(non_implicitly_copyable_typarams)]
pub fn initial_matcher_pos(ms: ~[matcher], sep: Option<Token>, lo: BytePos)
-> matcher_pos {
-> ~MatcherPos {
let mut match_idx_hi = 0u;
for ms.each() |elt| {
match elt.node {
@ -152,9 +152,16 @@ pub fn initial_matcher_pos(ms: ~[matcher], sep: Option<Token>, lo: BytePos)
}
}
}
~{elts: ms, sep: sep, mut idx: 0u, mut up: matcher_pos_up(None),
matches: copy vec::from_fn(count_names(ms), |_i| dvec::DVec()),
match_lo: 0u, match_hi: match_idx_hi, sp_lo: lo}
~MatcherPos {
elts: ms,
sep: sep,
mut idx: 0u,
mut up: matcher_pos_up(None),
matches: copy vec::from_fn(count_names(ms), |_i| dvec::DVec()),
match_lo: 0u,
match_hi: match_idx_hi,
sp_lo: lo
}
}
// named_match is a pattern-match result for a single ast::match_nonterminal:
@ -181,11 +188,11 @@ pub enum named_match {
matched_nonterminal(nonterminal)
}
pub type earley_item = matcher_pos;
pub type earley_item = ~MatcherPos;
pub fn nameize(p_s: parse_sess, ms: ~[matcher], res: ~[@named_match])
pub fn nameize(p_s: @mut ParseSess, ms: ~[matcher], res: ~[@named_match])
-> HashMap<ident,@named_match> {
fn n_rec(p_s: parse_sess, m: matcher, res: ~[@named_match],
fn n_rec(p_s: @mut ParseSess, m: matcher, res: ~[@named_match],
ret_val: HashMap<ident, @named_match>) {
match m {
codemap::spanned {node: match_tok(_), _} => (),
@ -216,7 +223,7 @@ pub enum parse_result {
error(codemap::span, ~str)
}
pub fn parse_or_else(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader,
pub fn parse_or_else(sess: @mut ParseSess, cfg: ast::crate_cfg, rdr: reader,
ms: ~[matcher]) -> HashMap<ident, @named_match> {
match parse(sess, cfg, rdr, ms) {
success(m) => m,
@ -225,7 +232,7 @@ pub fn parse_or_else(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader,
}
}
pub fn parse(sess: parse_sess,
pub fn parse(sess: @mut ParseSess,
cfg: ast::crate_cfg,
rdr: reader,
ms: ~[matcher])
@ -321,8 +328,10 @@ pub fn parse(sess: parse_sess,
let matches = vec::map(ei.matches, // fresh, same size:
|_m| DVec::<@named_match>());
let ei_t = ei;
cur_eis.push(~{
elts: (*matchers), sep: (*sep), mut idx: 0u,
cur_eis.push(~MatcherPos {
elts: (*matchers),
sep: (*sep),
mut idx: 0u,
mut up: matcher_pos_up(Some(ei_t)),
matches: matches,
match_lo: match_idx_lo, match_hi: match_idx_hi,

View File

@ -53,7 +53,7 @@ pub fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
// Parse the macro_rules! invocation (`none` is for no interpolations):
let arg_reader = new_tt_reader(cx.parse_sess().span_diagnostic,
let arg_reader = new_tt_reader(copy cx.parse_sess().span_diagnostic,
cx.parse_sess().interner, None, arg);
let argument_map = parse_or_else(cx.parse_sess(), cx.cfg(),
arg_reader as reader, argument_gram);

View File

@ -24,7 +24,7 @@ pub trait parser_attr {
fn parse_attribute_naked(style: ast::attr_style, lo: BytePos) ->
ast::attribute;
fn parse_inner_attrs_and_next() ->
{inner: ~[ast::attribute], next: ~[ast::attribute]};
(~[ast::attribute], ~[ast::attribute]);
fn parse_meta_item() -> @ast::meta_item;
fn parse_meta_seq() -> ~[@ast::meta_item];
fn parse_optional_meta() -> ~[@ast::meta_item];
@ -82,7 +82,7 @@ impl parser_attr for Parser {
// is an inner attribute of the containing item or an outer attribute of
// the first contained item until we see the semi).
fn parse_inner_attrs_and_next() ->
{inner: ~[ast::attribute], next: ~[ast::attribute]} {
(~[ast::attribute], ~[ast::attribute]) {
let mut inner_attrs: ~[ast::attribute] = ~[];
let mut next_outer_attrs: ~[ast::attribute] = ~[];
loop {
@ -121,7 +121,7 @@ impl parser_attr for Parser {
_ => break
}
}
return {inner: inner_attrs, next: next_outer_attrs};
(inner_attrs, next_outer_attrs)
}
fn parse_meta_item() -> @ast::meta_item {

View File

@ -44,7 +44,11 @@ impl cmp::Eq for cmnt_style {
}
}
pub type cmnt = {style: cmnt_style, lines: ~[~str], pos: BytePos};
pub struct cmnt {
style: cmnt_style,
lines: ~[~str],
pos: BytePos
}
pub fn is_doc_comment(s: &str) -> bool {
(s.starts_with(~"///") && !is_line_non_doc_comment(s)) ||
@ -147,7 +151,7 @@ fn consume_non_eol_whitespace(rdr: @mut StringReader) {
fn push_blank_line_comment(rdr: @mut StringReader, comments: &mut ~[cmnt]) {
debug!(">>> blank-line comment");
let v: ~[~str] = ~[];
comments.push({style: blank_line, lines: v, pos: rdr.last_pos});
comments.push(cmnt {style: blank_line, lines: v, pos: rdr.last_pos});
}
fn consume_whitespace_counting_blank_lines(rdr: @mut StringReader,
@ -166,7 +170,7 @@ fn read_shebang_comment(rdr: @mut StringReader, code_to_the_left: bool,
debug!(">>> shebang comment");
let p = rdr.last_pos;
debug!("<<< shebang comment");
comments.push({
comments.push(cmnt {
style: if code_to_the_left { trailing } else { isolated },
lines: ~[read_one_line_comment(rdr)],
pos: p
@ -189,7 +193,7 @@ fn read_line_comments(rdr: @mut StringReader, code_to_the_left: bool,
}
debug!("<<< line comments");
if !lines.is_empty() {
comments.push({
comments.push(cmnt {
style: if code_to_the_left { trailing } else { isolated },
lines: lines,
pos: p
@ -288,7 +292,7 @@ fn read_block_comment(rdr: @mut StringReader,
style = mixed;
}
debug!("<<< block comment");
comments.push({style: style, lines: lines, pos: p});
comments.push(cmnt {style: style, lines: lines, pos: p});
}
fn peeking_at_comment(rdr: @mut StringReader) -> bool {
@ -311,12 +315,14 @@ fn consume_comment(rdr: @mut StringReader,
debug!("<<< consume comment");
}
pub type lit = {lit: ~str, pos: BytePos};
pub struct lit {
lit: ~str,
pos: BytePos
}
pub fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
path: ~str,
srdr: io::Reader) ->
{cmnts: ~[cmnt], lits: ~[lit]} {
srdr: io::Reader) -> (~[cmnt], ~[lit]) {
let src = @str::from_bytes(srdr.read_whole_stream());
let itr = parse::token::mk_fake_ident_interner();
let cm = CodeMap::new();
@ -350,12 +356,13 @@ pub fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
let TokenAndSpan {tok: tok, sp: sp} = rdr.peek();
if token::is_lit(tok) {
let s = get_str_from(rdr, bstart);
literals.push({lit: s, pos: sp.lo});
literals.push(lit {lit: s, pos: sp.lo});
log(debug, ~"tok lit: " + s);
} else {
log(debug, ~"tok: " + token::to_str(rdr.interner, tok));
}
first_read = false;
}
return {cmnts: comments, lits: literals};
(comments, literals)
}

View File

@ -20,21 +20,30 @@ use core::option::{None, Option, Some};
use core::option;
use std::oldmap::HashMap;
// seq_sep : a sequence separator (token)
// SeqSep : a sequence separator (token)
// and whether a trailing separator is allowed.
pub type seq_sep = {
pub struct SeqSep {
sep: Option<token::Token>,
trailing_sep_allowed: bool
};
}
pub fn seq_sep_trailing_disallowed(t: token::Token) -> seq_sep {
return {sep: option::Some(t), trailing_sep_allowed: false};
pub fn seq_sep_trailing_disallowed(t: token::Token) -> SeqSep {
SeqSep {
sep: option::Some(t),
trailing_sep_allowed: false
}
}
pub fn seq_sep_trailing_allowed(t: token::Token) -> seq_sep {
return {sep: option::Some(t), trailing_sep_allowed: true};
pub fn seq_sep_trailing_allowed(t: token::Token) -> SeqSep {
SeqSep {
sep: option::Some(t),
trailing_sep_allowed: true
}
}
pub fn seq_sep_none() -> seq_sep {
return {sep: option::None, trailing_sep_allowed: false};
pub fn seq_sep_none() -> SeqSep {
SeqSep {
sep: option::None,
trailing_sep_allowed: false
}
}
pub fn token_to_str(reader: reader, ++token: token::Token) -> ~str {
@ -253,7 +262,7 @@ pub impl Parser {
// parse a sequence, including the closing delimiter. The function
// f must consume tokens until reaching the next separator or
// closing bracket.
fn parse_seq_to_end<T:Copy>(ket: token::Token, sep: seq_sep,
fn parse_seq_to_end<T:Copy>(ket: token::Token, sep: SeqSep,
f: fn(Parser) -> T) -> ~[T] {
let val = self.parse_seq_to_before_end(ket, sep, f);
self.bump();
@ -263,7 +272,7 @@ pub impl Parser {
// parse a sequence, not including the closing delimiter. The function
// f must consume tokens until reaching the next separator or
// closing bracket.
fn parse_seq_to_before_end<T:Copy>(ket: token::Token, sep: seq_sep,
fn parse_seq_to_before_end<T:Copy>(ket: token::Token, sep: SeqSep,
f: fn(Parser) -> T) -> ~[T] {
let mut first: bool = true;
let mut v: ~[T] = ~[];
@ -286,7 +295,7 @@ pub impl Parser {
// closing bracket.
fn parse_unspanned_seq<T:Copy>(bra: token::Token,
ket: token::Token,
sep: seq_sep,
sep: SeqSep,
f: fn(Parser) -> T) -> ~[T] {
self.expect(bra);
let result = self.parse_seq_to_before_end::<T>(ket, sep, f);
@ -296,7 +305,7 @@ pub impl Parser {
// NB: Do not use this function unless you actually plan to place the
// spanned list in the AST.
fn parse_seq<T:Copy>(bra: token::Token, ket: token::Token, sep: seq_sep,
fn parse_seq<T:Copy>(bra: token::Token, ket: token::Token, sep: SeqSep,
f: fn(Parser) -> T) -> spanned<~[T]> {
let lo = self.span.lo;
self.expect(bra);

View File

@ -75,9 +75,9 @@ fn parse_companion_mod(cx: ctx, prefix: &Path, suffix: &Option<Path>)
let p0 = new_sub_parser_from_file(cx.sess, cx.cfg,
modpath,
codemap::dummy_sp());
let inner_attrs = p0.parse_inner_attrs_and_next();
let m0 = p0.parse_mod_items(token::EOF, inner_attrs.next);
return (m0.view_items, m0.items, inner_attrs.inner);
let (inner, next) = p0.parse_inner_attrs_and_next();
let m0 = p0.parse_mod_items(token::EOF, next);
return (m0.view_items, m0.items, inner);
} else {
return (~[], ~[], ~[]);
}
@ -111,9 +111,9 @@ pub fn eval_src_mod_from_path(cx: ctx, prefix: &Path, path: &Path,
let p0 =
new_sub_parser_from_file(cx.sess, cx.cfg,
&full_path, sp);
let inner_attrs = p0.parse_inner_attrs_and_next();
let mod_attrs = vec::append(outer_attrs, inner_attrs.inner);
let first_item_outer_attrs = inner_attrs.next;
let (inner, next) = p0.parse_inner_attrs_and_next();
let mod_attrs = vec::append(outer_attrs, inner);
let first_item_outer_attrs = next;
let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs);
return (ast::item_mod(m0), mod_attrs);
}

View File

@ -47,33 +47,35 @@ pub mod classify;
/// Reporting obsolete syntax
pub mod obsolete;
pub type parse_sess = @{
pub struct ParseSess {
cm: @codemap::CodeMap,
mut next_id: node_id,
next_id: node_id,
span_diagnostic: span_handler,
interner: @ident_interner,
};
}
pub fn new_parse_sess(demitter: Option<Emitter>) -> parse_sess {
pub fn new_parse_sess(demitter: Option<Emitter>) -> @mut ParseSess {
let cm = @CodeMap::new();
return @{cm: cm,
mut next_id: 1,
span_diagnostic: mk_span_handler(mk_handler(demitter), cm),
interner: mk_ident_interner(),
};
@mut ParseSess {
cm: cm,
next_id: 1,
span_diagnostic: mk_span_handler(mk_handler(demitter), cm),
interner: mk_ident_interner(),
}
}
pub fn new_parse_sess_special_handler(sh: span_handler, cm: @codemap::CodeMap)
-> parse_sess {
return @{cm: cm,
mut next_id: 1,
span_diagnostic: sh,
interner: mk_ident_interner(),
};
-> @mut ParseSess {
@mut ParseSess {
cm: cm,
next_id: 1,
span_diagnostic: sh,
interner: mk_ident_interner(),
}
}
pub fn parse_crate_from_file(input: &Path, cfg: ast::crate_cfg,
sess: parse_sess) -> @ast::crate {
sess: @mut ParseSess) -> @ast::crate {
let p = new_crate_parser_from_file(sess, cfg, input);
let r = p.parse_crate_mod(cfg);
return r;
@ -82,7 +84,7 @@ pub fn parse_crate_from_file(input: &Path, cfg: ast::crate_cfg,
pub fn parse_crate_from_source_str(name: ~str,
source: @~str,
cfg: ast::crate_cfg,
sess: parse_sess) -> @ast::crate {
sess: @mut ParseSess) -> @ast::crate {
let p = new_parser_from_source_str(sess, cfg, name,
codemap::FssNone, source);
let r = p.parse_crate_mod(cfg);
@ -93,7 +95,7 @@ pub fn parse_crate_from_source_str(name: ~str,
pub fn parse_expr_from_source_str(name: ~str,
source: @~str,
cfg: ast::crate_cfg,
sess: parse_sess) -> @ast::expr {
sess: @mut ParseSess) -> @ast::expr {
let p = new_parser_from_source_str(sess, cfg, name,
codemap::FssNone, source);
let r = p.parse_expr();
@ -105,7 +107,7 @@ pub fn parse_item_from_source_str(name: ~str,
source: @~str,
cfg: ast::crate_cfg,
+attrs: ~[ast::attribute],
sess: parse_sess)
sess: @mut ParseSess)
-> Option<@ast::item> {
let p = new_parser_from_source_str(sess, cfg, name,
codemap::FssNone, source);
@ -118,7 +120,7 @@ pub fn parse_stmt_from_source_str(name: ~str,
source: @~str,
cfg: ast::crate_cfg,
+attrs: ~[ast::attribute],
sess: parse_sess) -> @ast::stmt {
sess: @mut ParseSess) -> @ast::stmt {
let p = new_parser_from_source_str(sess, cfg, name,
codemap::FssNone, source);
let r = p.parse_stmt(attrs);
@ -129,7 +131,7 @@ pub fn parse_stmt_from_source_str(name: ~str,
pub fn parse_tts_from_source_str(name: ~str,
source: @~str,
cfg: ast::crate_cfg,
sess: parse_sess) -> ~[ast::token_tree] {
sess: @mut ParseSess) -> ~[ast::token_tree] {
let p = new_parser_from_source_str(sess, cfg, name,
codemap::FssNone, source);
p.quote_depth += 1u;
@ -141,7 +143,7 @@ pub fn parse_tts_from_source_str(name: ~str,
pub fn parse_from_source_str<T>(f: fn (p: Parser) -> T,
name: ~str, ss: codemap::FileSubstr,
source: @~str, cfg: ast::crate_cfg,
sess: parse_sess)
sess: @mut ParseSess)
-> T
{
let p = new_parser_from_source_str(sess, cfg, name, ss,
@ -154,7 +156,7 @@ pub fn parse_from_source_str<T>(f: fn (p: Parser) -> T,
r
}
pub fn next_node_id(sess: parse_sess) -> node_id {
pub fn next_node_id(sess: @mut ParseSess) -> node_id {
let rv = sess.next_id;
sess.next_id += 1;
// ID 0 is reserved for the crate and doesn't actually exist in the AST
@ -162,17 +164,17 @@ pub fn next_node_id(sess: parse_sess) -> node_id {
return rv;
}
pub fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg,
pub fn new_parser_from_source_str(sess: @mut ParseSess, cfg: ast::crate_cfg,
+name: ~str, +ss: codemap::FileSubstr,
source: @~str) -> Parser {
let filemap = sess.cm.new_filemap_w_substr(name, ss, source);
let srdr = lexer::new_string_reader(sess.span_diagnostic,
let srdr = lexer::new_string_reader(copy sess.span_diagnostic,
filemap,
sess.interner);
return Parser(sess, cfg, srdr as reader);
}
pub fn new_parser_from_file(sess: parse_sess,
pub fn new_parser_from_file(sess: @mut ParseSess,
cfg: ast::crate_cfg,
path: &Path)
-> Result<Parser, ~str> {
@ -180,7 +182,7 @@ pub fn new_parser_from_file(sess: parse_sess,
result::Ok(src) => {
let filemap = sess.cm.new_filemap(path.to_str(), @src);
let srdr = lexer::new_string_reader(sess.span_diagnostic,
let srdr = lexer::new_string_reader(copy sess.span_diagnostic,
filemap,
sess.interner);
Ok(Parser(sess, cfg, srdr as reader))
@ -192,7 +194,7 @@ pub fn new_parser_from_file(sess: parse_sess,
/// Create a new parser for an entire crate, handling errors as appropriate
/// if the file doesn't exist
pub fn new_crate_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg,
pub fn new_crate_parser_from_file(sess: @mut ParseSess, cfg: ast::crate_cfg,
path: &Path) -> Parser {
match new_parser_from_file(sess, cfg, path) {
Ok(parser) => parser,
@ -204,7 +206,7 @@ pub fn new_crate_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg,
/// Create a new parser based on a span from an existing parser. Handles
/// error messages correctly when the file does not exist.
pub fn new_sub_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg,
pub fn new_sub_parser_from_file(sess: @mut ParseSess, cfg: ast::crate_cfg,
path: &Path, sp: span) -> Parser {
match new_parser_from_file(sess, cfg, path) {
Ok(parser) => parser,
@ -214,9 +216,9 @@ pub fn new_sub_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg,
}
}
pub fn new_parser_from_tts(sess: parse_sess, cfg: ast::crate_cfg,
pub fn new_parser_from_tts(sess: @mut ParseSess, cfg: ast::crate_cfg,
tts: ~[ast::token_tree]) -> Parser {
let trdr = lexer::new_tt_reader(sess.span_diagnostic, sess.interner,
let trdr = lexer::new_tt_reader(copy sess.span_diagnostic, sess.interner,
None, tts);
return Parser(sess, cfg, trdr as reader)
}

View File

@ -80,7 +80,7 @@ use parse::prec::{as_prec, token_to_binop};
use parse::token::{can_begin_expr, is_ident, is_ident_or_path};
use parse::token::{is_plain_ident, INTERPOLATED, special_idents};
use parse::token;
use parse::{new_sub_parser_from_file, next_node_id, parse_sess};
use parse::{new_sub_parser_from_file, next_node_id, ParseSess};
use print::pprust::expr_to_str;
use util::interner::Interner;
@ -179,10 +179,16 @@ pure fn maybe_append(+lhs: ~[attribute], rhs: Option<~[attribute]>)
}
struct ParsedItemsAndViewItems {
attrs_remaining: ~[attribute],
view_items: ~[@view_item],
items: ~[@item],
foreign_items: ~[@foreign_item]
}
/* ident is handled by common.rs */
pub fn Parser(sess: parse_sess
,
pub fn Parser(sess: @mut ParseSess,
cfg: ast::crate_cfg,
+rdr: reader) -> Parser {
@ -213,7 +219,7 @@ pub fn Parser(sess: parse_sess
}
pub struct Parser {
sess: parse_sess,
sess: @mut ParseSess,
cfg: crate_cfg,
mut token: token::Token,
mut span: span,
@ -1706,8 +1712,7 @@ pub impl Parser {
els = Some(elexpr);
hi = elexpr.span.hi;
}
let q = {cond: cond, then: thn, els: els, lo: lo, hi: hi};
self.mk_expr(q.lo, q.hi, expr_if(q.cond, q.then, q.els))
self.mk_expr(lo, hi, expr_if(cond, thn, els))
}
fn parse_fn_expr(sigil: Sigil) -> @expr {
@ -2470,11 +2475,11 @@ pub impl Parser {
maybe_whole!(pair_empty self, nt_block);
fn maybe_parse_inner_attrs_and_next(p: Parser, parse_attrs: bool) ->
{inner: ~[attribute], next: ~[attribute]} {
(~[attribute], ~[attribute]) {
if parse_attrs {
p.parse_inner_attrs_and_next()
} else {
{inner: ~[], next: ~[]}
(~[], ~[])
}
}
@ -2483,7 +2488,7 @@ pub impl Parser {
self.obsolete(copy self.span, ObsoleteUnsafeBlock);
}
self.expect(token::LBRACE);
let {inner: inner, next: next} =
let (inner, next) =
maybe_parse_inner_attrs_and_next(self, parse_attrs);
return (inner, self.parse_block_tail_(lo, default_blk, next));
}
@ -2508,10 +2513,12 @@ pub impl Parser {
let mut stmts = ~[];
let mut expr = None;
let {attrs_remaining: attrs_remaining,
view_items: view_items,
items: items, _} =
self.parse_items_and_view_items(first_item_attrs,
let ParsedItemsAndViewItems {
attrs_remaining: attrs_remaining,
view_items: view_items,
items: items,
_
} = self.parse_items_and_view_items(first_item_attrs,
IMPORTS_AND_ITEMS_ALLOWED, false);
for items.each |item| {
@ -2851,10 +2858,10 @@ pub impl Parser {
}
}
fn parse_fn_header() -> {ident: ident, tps: ~[ty_param]} {
fn parse_fn_header() -> (ident, ~[ty_param]) {
let id = self.parse_value_ident();
let ty_params = self.parse_ty_params();
return {ident: id, tps: ty_params};
(id, ty_params)
}
fn mk_item(+lo: BytePos, +hi: BytePos, +ident: ident,
@ -2869,10 +2876,10 @@ pub impl Parser {
}
fn parse_item_fn(purity: purity) -> item_info {
let t = self.parse_fn_header();
let (ident, tps) = self.parse_fn_header();
let decl = self.parse_fn_decl(|p| p.parse_arg());
let (inner_attrs, body) = self.parse_inner_attrs_and_block(true);
(t.ident, item_fn(decl, purity, t.tps, body), Some(inner_attrs))
(ident, item_fn(decl, purity, tps, body), Some(inner_attrs))
}
fn parse_method_name() -> ident {
@ -3200,10 +3207,12 @@ pub impl Parser {
fn parse_mod_items(term: token::Token,
+first_item_attrs: ~[attribute]) -> _mod {
// Shouldn't be any view items since we've already parsed an item attr
let {attrs_remaining: attrs_remaining,
view_items: view_items,
items: starting_items, _} =
self.parse_items_and_view_items(first_item_attrs,
let ParsedItemsAndViewItems {
attrs_remaining: attrs_remaining,
view_items: view_items,
items: starting_items,
_
} = self.parse_items_and_view_items(first_item_attrs,
VIEW_ITEMS_AND_ITEMS_ALLOWED,
true);
let mut items: ~[@item] = starting_items;
@ -3261,11 +3270,11 @@ pub impl Parser {
} else {
self.push_mod_path(id, outer_attrs);
self.expect(token::LBRACE);
let inner_attrs = self.parse_inner_attrs_and_next();
let m = self.parse_mod_items(token::RBRACE, inner_attrs.next);
let (inner, next) = self.parse_inner_attrs_and_next();
let m = self.parse_mod_items(token::RBRACE, next);
self.expect(token::RBRACE);
self.pop_mod_path();
(id, item_mod(m), Some(inner_attrs.inner))
(id, item_mod(m), Some(inner))
};
// XXX: Transitionary hack to do the template work inside core
@ -3355,9 +3364,9 @@ pub impl Parser {
let p0 =
new_sub_parser_from_file(self.sess, self.cfg,
&full_path, id_sp);
let inner_attrs = p0.parse_inner_attrs_and_next();
let mod_attrs = vec::append(outer_attrs, inner_attrs.inner);
let first_item_outer_attrs = inner_attrs.next;
let (inner, next) = p0.parse_inner_attrs_and_next();
let mod_attrs = vec::append(outer_attrs, inner);
let first_item_outer_attrs = next;
let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs);
return (ast::item_mod(m0), mod_attrs);
@ -3373,13 +3382,13 @@ pub impl Parser {
let lo = self.span.lo;
let vis = self.parse_visibility();
let purity = self.parse_fn_purity();
let t = self.parse_fn_header();
let (ident, tps) = self.parse_fn_header();
let decl = self.parse_fn_decl(|p| p.parse_arg());
let mut hi = self.span.hi;
self.expect(token::SEMI);
@ast::foreign_item { ident: t.ident,
@ast::foreign_item { ident: ident,
attrs: attrs,
node: foreign_item_fn(decl, purity, t.tps),
node: foreign_item_fn(decl, purity, tps),
id: self.get_id(),
span: mk_sp(lo, hi),
vis: vis }
@ -3428,11 +3437,12 @@ pub impl Parser {
+first_item_attrs: ~[attribute])
-> foreign_mod {
// Shouldn't be any view items since we've already parsed an item attr
let {attrs_remaining: attrs_remaining,
view_items: view_items,
items: _,
foreign_items: foreign_items} =
self.parse_items_and_view_items(first_item_attrs,
let ParsedItemsAndViewItems {
attrs_remaining: attrs_remaining,
view_items: view_items,
items: _,
foreign_items: foreign_items
} = self.parse_items_and_view_items(first_item_attrs,
VIEW_ITEMS_AND_FOREIGN_ITEMS_ALLOWED,
true);
@ -3504,17 +3514,13 @@ pub impl Parser {
None => abi = special_idents::c_abi,
}
let extra_attrs = self.parse_inner_attrs_and_next();
let m = self.parse_foreign_mod_items(sort,
abi,
extra_attrs.next);
let (inner, next) = self.parse_inner_attrs_and_next();
let m = self.parse_foreign_mod_items(sort, abi, next);
self.expect(token::RBRACE);
return iovi_item(self.mk_item(lo, self.last_span.hi, ident,
item_foreign_mod(m), visibility,
maybe_append(attrs,
Some(extra_attrs.
inner))));
item_foreign_mod(m), visibility,
maybe_append(attrs, Some(inner))));
}
match abi_opt {
@ -3536,20 +3542,20 @@ pub impl Parser {
})
}
fn parse_type_decl() -> {lo: BytePos, ident: ident} {
fn parse_type_decl() -> (BytePos, ident) {
let lo = self.last_span.lo;
let id = self.parse_ident();
return {lo: lo, ident: id};
(lo, id)
}
fn parse_item_type() -> item_info {
let t = self.parse_type_decl();
let (_, ident) = self.parse_type_decl();
self.parse_region_param();
let tps = self.parse_ty_params();
self.expect(token::EQ);
let ty = self.parse_ty(false);
self.expect(token::SEMI);
(t.ident, item_ty(ty, tps), None)
(ident, item_ty(ty, tps), None)
}
fn parse_region_param() {
@ -4046,10 +4052,7 @@ pub impl Parser {
fn parse_items_and_view_items(+first_item_attrs: ~[attribute],
mode: view_item_parse_mode,
macros_allowed: bool)
-> {attrs_remaining: ~[attribute],
view_items: ~[@view_item],
items: ~[@item],
foreign_items: ~[@foreign_item]} {
-> ParsedItemsAndViewItems {
let mut attrs = vec::append(first_item_attrs,
self.parse_outer_attributes());
@ -4100,21 +4103,23 @@ pub impl Parser {
attrs = self.parse_outer_attributes();
}
{attrs_remaining: attrs,
view_items: view_items,
items: items,
foreign_items: foreign_items}
ParsedItemsAndViewItems {
attrs_remaining: attrs,
view_items: view_items,
items: items,
foreign_items: foreign_items
}
}
// Parses a source module as a crate
fn parse_crate_mod(_cfg: crate_cfg) -> @crate {
let lo = self.span.lo;
let crate_attrs = self.parse_inner_attrs_and_next();
let first_item_outer_attrs = crate_attrs.next;
let (inner, next) = self.parse_inner_attrs_and_next();
let first_item_outer_attrs = next;
let m = self.parse_mod_items(token::EOF, first_item_outer_attrs);
@spanned(lo, self.span.lo,
ast::crate_ { module: m,
attrs: crate_attrs.inner,
attrs: inner,
config: self.cfg })
}

View File

@ -72,9 +72,15 @@ use core::vec;
#[deriving_eq]
pub enum breaks { consistent, inconsistent, }
pub type break_t = {offset: int, blank_space: int};
pub struct break_t {
offset: int,
blank_space: int
}
pub type begin_t = {offset: int, breaks: breaks};
pub struct begin_t {
offset: int,
breaks: breaks
}
pub enum token {
STRING(@~str, int),
@ -90,7 +96,10 @@ pub impl token {
}
fn is_hardbreak_tok(&self) -> bool {
match *self {
BREAK({offset: 0, blank_space: bs }) if bs == size_infinity =>
BREAK(break_t {
offset: 0,
blank_space: bs
}) if bs == size_infinity =>
true,
_ =>
false
@ -128,7 +137,10 @@ pub fn buf_str(toks: ~[token], szs: ~[int], left: uint, right: uint,
pub enum print_stack_break { fits, broken(breaks), }
pub type print_stack_elt = {offset: int, pbreak: print_stack_break};
pub struct print_stack_elt {
offset: int,
pbreak: print_stack_break
}
pub const size_infinity: int = 0xffff;
@ -445,7 +457,10 @@ pub impl Printer {
if n != 0u {
self.print_stack[n - 1u]
} else {
{offset: 0, pbreak: broken(inconsistent)}
print_stack_elt {
offset: 0,
pbreak: broken(inconsistent)
}
}
}
fn print_str(&mut self, s: ~str) {
@ -468,12 +483,16 @@ pub impl Printer {
if L > self.space {
let col = self.margin - self.space + b.offset;
debug!("print BEGIN -> push broken block at col %d", col);
self.print_stack.push({offset: col,
pbreak: broken(b.breaks)});
self.print_stack.push(print_stack_elt {
offset: col,
pbreak: broken(b.breaks)
});
} else {
debug!("print BEGIN -> push fitting block");
self.print_stack.push({offset: 0,
pbreak: fits});
self.print_stack.push(print_stack_elt {
offset: 0,
pbreak: fits
});
}
}
END => {
@ -527,7 +546,10 @@ pub impl Printer {
// Convenience functions to talk to the printer.
pub fn box(p: @mut Printer, indent: uint, b: breaks) {
p.pretty_print(BEGIN({offset: indent as int, breaks: b}));
p.pretty_print(BEGIN(begin_t {
offset: indent as int,
breaks: b
}));
}
pub fn ibox(p: @mut Printer, indent: uint) { box(p, indent, inconsistent); }
@ -535,7 +557,10 @@ pub fn ibox(p: @mut Printer, indent: uint) { box(p, indent, inconsistent); }
pub fn cbox(p: @mut Printer, indent: uint) { box(p, indent, consistent); }
pub fn break_offset(p: @mut Printer, n: uint, off: int) {
p.pretty_print(BREAK({offset: off, blank_space: n as int}));
p.pretty_print(BREAK(break_t {
offset: off,
blank_space: n as int
}));
}
pub fn end(p: @mut Printer) { p.pretty_print(END); }
@ -563,7 +588,7 @@ pub fn space(p: @mut Printer) { spaces(p, 1u); }
pub fn hardbreak(p: @mut Printer) { spaces(p, size_infinity as uint); }
pub fn hardbreak_tok_offset(off: int) -> token {
return BREAK({offset: off, blank_space: size_infinity});
BREAK(break_t {offset: off, blank_space: size_infinity})
}
pub fn hardbreak_tok() -> token { return hardbreak_tok_offset(0); }

View File

@ -108,17 +108,18 @@ pub fn print_crate(cm: @CodeMap, intr: @ident_interner,
span_diagnostic: diagnostic::span_handler,
crate: @ast::crate, filename: ~str, in: io::Reader,
out: io::Writer, ann: pp_ann, is_expanded: bool) {
let r = comments::gather_comments_and_literals(span_diagnostic,
filename, in);
let (cmnts, lits) =
comments::gather_comments_and_literals(span_diagnostic,
filename, in);
let s = @ps {
s: pp::mk_printer(out, default_columns),
cm: Some(cm),
intr: intr,
comments: Some(r.cmnts),
comments: Some(cmnts),
// If the code is post expansion, don't use the table of
// literals, since it doesn't correspond with the literals
// in the AST anymore.
literals: if is_expanded { None } else { Some(r.lits) },
literals: if is_expanded { None } else { Some(lits) },
cur_cmnt_and_lit: @mut CurrentCommentAndLiteral {
cur_cmnt: 0,
cur_lit: 0

View File

@ -17,7 +17,6 @@
#[crate_type = "lib"];
#[legacy_modes];
#[legacy_records];
#[allow(vecs_implicitly_copyable)];
#[allow(non_camel_case_types)];