diff --git a/src/librustsyntax/ext/qquote.rs b/src/librustsyntax/ext/qquote.rs index 02d3c3d095b..2628be0109c 100644 --- a/src/librustsyntax/ext/qquote.rs +++ b/src/librustsyntax/ext/qquote.rs @@ -49,7 +49,7 @@ impl of qq_helper for @ast::expr { } } fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr { - mk_path(cx, sp, ["syntax", "parse", "parser", "parse_expr"]) + mk_path(cx, sp, ["syntax", "ext", "qquote", "parse_expr"]) } fn get_fold_fn() -> str {"fold_expr"} } @@ -90,7 +90,7 @@ impl of qq_helper for @ast::pat { fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_pat(self, cx, v);} fn extract_mac() -> option<ast::mac_> {fail} fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr { - mk_path(cx, sp, ["syntax", "parse", "parser", "parse_pat"]) + mk_path(cx, sp, ["syntax", "ext", "qquote", "parse_pat"]) } fn get_fold_fn() -> str {"fold_pat"} } @@ -154,29 +154,23 @@ fn expand_ast(ecx: ext_ctxt, _sp: span, ret alt what { "crate" {finish(ecx, body, parse_crate)} - "expr" {finish(ecx, body, parser::parse_expr)} + "expr" {finish(ecx, body, parse_expr)} "ty" {finish(ecx, body, parse_ty)} "item" {finish(ecx, body, parse_item)} "stmt" {finish(ecx, body, parse_stmt)} - "pat" {finish(ecx, body, parser::parse_pat)} + "pat" {finish(ecx, body, parse_pat)} _ {ecx.span_fatal(_sp, "unsupported ast type")} }; } -fn parse_crate(p: parser) -> @ast::crate { - parser::parse_crate_mod(p, []) -} - -fn parse_ty(p: parser) -> @ast::ty { - parser::parse_ty(p, false) -} - -fn parse_stmt(p: parser) -> @ast::stmt { - parser::parse_stmt(p, []) -} +fn parse_crate(p: parser) -> @ast::crate { p.parse_crate_mod([]) } +fn parse_ty(p: parser) -> @ast::ty { p.parse_ty(false) } +fn parse_stmt(p: parser) -> @ast::stmt { p.parse_stmt([]) } +fn parse_expr(p: parser) -> @ast::expr { p.parse_expr() } +fn parse_pat(p: parser) -> @ast::pat { p.parse_pat() } fn parse_item(p: parser) -> @ast::item { - alt parser::parse_item(p, [], ast::public) { + alt p.parse_item([], ast::public) { some(item) { item } none { fail "parse_item: parsing an item failed"; } } diff --git a/src/librustsyntax/parse.rs b/src/librustsyntax/parse.rs index a8000ba4ac0..482dd5930d3 100644 --- a/src/librustsyntax/parse.rs +++ b/src/librustsyntax/parse.rs @@ -46,13 +46,11 @@ fn parse_crate_from_crate_file(input: str, cfg: ast::crate_cfg, let leading_attrs = attr::parse_inner_attrs_and_next(p); let crate_attrs = leading_attrs.inner; let first_cdir_attr = leading_attrs.next; - let cdirs = parser::parse_crate_directives( - p, token::EOF, first_cdir_attr); + let cdirs = p.parse_crate_directives(token::EOF, first_cdir_attr); sess.chpos = p.reader.chpos; sess.byte_pos = sess.byte_pos + p.reader.pos; let cx = - @{p: p, - sess: sess, + @{sess: sess, cfg: p.cfg}; let (companionmod, _) = path::splitext(path::basename(input)); let (m, attrs) = eval::eval_crate_directives_to_mod( @@ -69,7 +67,7 @@ fn parse_crate_from_crate_file(input: str, cfg: ast::crate_cfg, fn parse_crate_from_source_file(input: str, cfg: ast::crate_cfg, sess: parse_sess) -> @ast::crate { let p = new_parser_from_file(sess, cfg, input, parser::SOURCE_FILE); - let r = parser::parse_crate_mod(p, cfg); + let r = p.parse_crate_mod(cfg); sess.chpos = p.reader.chpos; sess.byte_pos = sess.byte_pos + p.reader.pos; ret r; @@ -79,7 +77,7 @@ fn parse_crate_from_source_str(name: str, source: @str, cfg: ast::crate_cfg, sess: parse_sess) -> @ast::crate { let p = new_parser_from_source_str( sess, cfg, name, codemap::fss_none, source); - let r = parser::parse_crate_mod(p, cfg); + let r = p.parse_crate_mod(cfg); sess.chpos = p.reader.chpos; sess.byte_pos = sess.byte_pos + p.reader.pos; ret r; @@ -89,7 +87,7 @@ fn parse_expr_from_source_str(name: str, source: @str, cfg: ast::crate_cfg, sess: parse_sess) -> @ast::expr { let p = new_parser_from_source_str( sess, cfg, name, codemap::fss_none, source); - let r = parser::parse_expr(p); + let r = p.parse_expr(); sess.chpos = p.reader.chpos; sess.byte_pos = sess.byte_pos + p.reader.pos; ret r; @@ -100,7 +98,7 @@ fn parse_item_from_source_str(name: str, source: @str, cfg: ast::crate_cfg, sess: parse_sess) -> option<@ast::item> { let p = new_parser_from_source_str( sess, cfg, name, codemap::fss_none, source); - let r = parser::parse_item(p, attrs, vis); + let r = p.parse_item(attrs, vis); sess.chpos = p.reader.chpos; sess.byte_pos = sess.byte_pos + p.reader.pos; ret r; @@ -130,23 +128,6 @@ fn next_node_id(sess: parse_sess) -> node_id { ret rv; } -fn new_parser(sess: parse_sess, cfg: ast::crate_cfg, rdr: lexer::reader, - ftype: parser::file_type) -> parser { - let tok0 = lexer::next_token(rdr); - let span0 = ast_util::mk_sp(tok0.chpos, rdr.chpos); - @{sess: sess, - cfg: cfg, - file_type: ftype, - mut token: tok0.tok, - mut span: span0, - mut last_span: span0, - buffer: dvec::dvec(), - mut restriction: parser::UNRESTRICTED, - reader: rdr, - keywords: token::keyword_table(), - restricted_keywords: token::restricted_keyword_table()} -} - fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg, name: str, ss: codemap::file_substr, source: @str) -> parser { @@ -157,7 +138,7 @@ fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg, let itr = @interner::mk(str::hash, str::eq); let rdr = lexer::new_reader(sess.span_diagnostic, filemap, itr); - ret new_parser(sess, cfg, rdr, ftype); + ret parser(sess, cfg, rdr, ftype); } fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, path: str, @@ -177,5 +158,5 @@ fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, path: str, sess.cm.files.push(filemap); let itr = @interner::mk(str::hash, str::eq); let rdr = lexer::new_reader(sess.span_diagnostic, filemap, itr); - ret new_parser(sess, cfg, rdr, ftype); + ret parser(sess, cfg, rdr, ftype); } diff --git a/src/librustsyntax/parse/attr.rs b/src/librustsyntax/parse/attr.rs index e8d73915577..800efaff185 100644 --- a/src/librustsyntax/parse/attr.rs +++ b/src/librustsyntax/parse/attr.rs @@ -4,8 +4,6 @@ import common::{parse_seq, seq_sep, expect, parse_ident}; -import parser::{parse_lit, - parse_syntax_ext_naked}; export attr_or_ext; export parse_outer_attributes; @@ -31,7 +29,7 @@ fn parse_outer_attrs_or_ext( || p.look_ahead(1u) == token::LBRACKET || expect_item_next) { p.bump(); - ret some(right(parse_syntax_ext_naked(p, lo))); + ret some(right(p.parse_syntax_ext_naked(lo))); } else { ret none; } } else { ret none; } } @@ -97,7 +95,7 @@ fn parse_meta_item(p: parser) -> @ast::meta_item { alt p.token { token::EQ { p.bump(); - let lit = parse_lit(p); + let lit = p.parse_lit(); let mut hi = p.span.hi; ret @spanned(lo, hi, ast::meta_name_value(ident, lit)); } @@ -115,7 +113,7 @@ fn parse_meta_item(p: parser) -> @ast::meta_item { fn parse_meta_seq(p: parser) -> [@ast::meta_item] { ret parse_seq(token::LPAREN, token::RPAREN, seq_sep(token::COMMA), - parse_meta_item, p).node; + p, {|p| parse_meta_item(p)}).node; } fn parse_optional_meta(p: parser) -> [@ast::meta_item] { diff --git a/src/librustsyntax/parse/common.rs b/src/librustsyntax/parse/common.rs index a4d30cf2627..19f436708c1 100644 --- a/src/librustsyntax/parse/common.rs +++ b/src/librustsyntax/parse/common.rs @@ -126,8 +126,7 @@ fn expect_gt(p: parser) { } fn parse_seq_to_before_gt<T: copy>(sep: option<token::token>, - f: fn(parser) -> T, - p: parser) -> [T] { + p: parser, f: fn(parser) -> T) -> [T] { let mut first = true; let mut v = []; while p.token != token::GT && p.token != token::BINOP(token::SHR) { @@ -142,27 +141,26 @@ fn parse_seq_to_before_gt<T: copy>(sep: option<token::token>, } fn parse_seq_to_gt<T: copy>(sep: option<token::token>, - f: fn(parser) -> T, p: parser) -> [T] { - let v = parse_seq_to_before_gt(sep, f, p); + p: parser, f: fn(parser) -> T) -> [T] { + let v = parse_seq_to_before_gt(sep, p, f); expect_gt(p); ret v; } fn parse_seq_lt_gt<T: copy>(sep: option<token::token>, - f: fn(parser) -> T, - p: parser) -> spanned<[T]> { + p: parser, f: fn(parser) -> T) -> spanned<[T]> { let lo = p.span.lo; expect(p, token::LT); - let result = parse_seq_to_before_gt::<T>(sep, f, p); + let result = parse_seq_to_before_gt::<T>(sep, p, f); let hi = p.span.hi; expect_gt(p); ret spanned(lo, hi, result); } -fn parse_seq_to_end<T: copy>(ket: token::token, sep: seq_sep, - f: fn(parser) -> T, p: parser) -> [T] { - let val = parse_seq_to_before_end(ket, sep, f, p); +fn parse_seq_to_end<T: copy>(ket: token::token, sep: seq_sep, p: parser, + f: fn(parser) -> T) -> [T] { + let val = parse_seq_to_before_end(ket, sep, p, f); p.bump(); ret val; } @@ -182,9 +180,8 @@ fn seq_sep_none() -> seq_sep { ret {sep: option::none, trailing_opt: false}; } -fn parse_seq_to_before_end<T: copy>(ket: token::token, - sep: seq_sep, - f: fn(parser) -> T, p: parser) -> [T] { +fn parse_seq_to_before_end<T: copy>(ket: token::token, sep: seq_sep, + p: parser, f: fn(parser) -> T) -> [T] { let mut first: bool = true; let mut v: [T] = []; while p.token != ket { @@ -198,12 +195,11 @@ fn parse_seq_to_before_end<T: copy>(ket: token::token, ret v; } -fn parse_seq<T: copy>(bra: token::token, ket: token::token, - sep: seq_sep, f: fn(parser) -> T, - p: parser) -> spanned<[T]> { +fn parse_seq<T: copy>(bra: token::token, ket: token::token, sep: seq_sep, + p: parser, f: fn(parser) -> T) -> spanned<[T]> { let lo = p.span.lo; expect(p, bra); - let result = parse_seq_to_before_end::<T>(ket, sep, f, p); + let result = parse_seq_to_before_end::<T>(ket, sep, p, f); let hi = p.span.hi; p.bump(); ret spanned(lo, hi, result); diff --git a/src/librustsyntax/parse/eval.rs b/src/librustsyntax/parse/eval.rs index b7d400de3dc..1e883083e94 100644 --- a/src/librustsyntax/parse/eval.rs +++ b/src/librustsyntax/parse/eval.rs @@ -1,12 +1,10 @@ -import parser::{parser, - parse_mod_items, SOURCE_FILE}; +import parser::{parser, SOURCE_FILE}; import attr::parse_inner_attrs_and_next; export eval_crate_directives_to_mod; type ctx = - @{p: parser, - sess: parse::parse_sess, + @{sess: parse::parse_sess, cfg: ast::crate_cfg}; fn eval_crate_directives(cx: ctx, cdirs: [@ast::crate_directive], prefix: str, @@ -66,11 +64,10 @@ fn parse_companion_mod(cx: ctx, prefix: str, suffix: option<str>) #debug("looking for companion mod %s", modpath); if file_exists(modpath) { #debug("found companion mod"); - let p0 = new_parser_from_file(cx.sess, cx.cfg, modpath, - SOURCE_FILE); + let p0 = new_parser_from_file(cx.sess, cx.cfg, modpath, SOURCE_FILE); let inner_attrs = parse_inner_attrs_and_next(p0); let first_item_outer_attrs = inner_attrs.next; - let m0 = parse_mod_items(p0, token::EOF, first_item_outer_attrs); + let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs); cx.sess.chpos = p0.reader.chpos; cx.sess.byte_pos = cx.sess.byte_pos + p0.reader.pos; ret (m0.view_items, m0.items, inner_attrs.inner); @@ -103,11 +100,10 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: str, let inner_attrs = parse_inner_attrs_and_next(p0); let mod_attrs = attrs + inner_attrs.inner; let first_item_outer_attrs = inner_attrs.next; - let m0 = parse_mod_items(p0, token::EOF, first_item_outer_attrs); + let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs); - let i = - parser::mk_item(p0, cdir.span.lo, cdir.span.hi, id, - ast::item_mod(m0), ast::public, mod_attrs); + let i = p0.mk_item(cdir.span.lo, cdir.span.hi, id, + ast::item_mod(m0), ast::public, mod_attrs); // Thread defids, chpos and byte_pos through the parsers cx.sess.chpos = p0.reader.chpos; cx.sess.byte_pos = cx.sess.byte_pos + p0.reader.pos; diff --git a/src/librustsyntax/parse/parser.rs b/src/librustsyntax/parse/parser.rs index bf55ebf43c6..c81d9ace376 100644 --- a/src/librustsyntax/parse/parser.rs +++ b/src/librustsyntax/parse/parser.rs @@ -17,20 +17,9 @@ import dvec::{dvec, extensions}; export expect; export file_type; -export mk_item; -export restriction; export parser; -export parse_crate_directives; -export parse_crate_mod; export parse_expr; -export parse_item; -export parse_mod_items; export parse_pat; -export parse_seq; -export parse_stmt; -export parse_ty; -export parse_lit; -export parse_syntax_ext_naked; // FIXME: #ast expects to find this here but it's actually defined in `parse` // Fixing this will be easier when we have export decls on individual items -- @@ -39,6 +28,12 @@ export parse_syntax_ext_naked; import parse_from_source_str; export parse_from_source_str; +// TODO: remove these once we go around a snapshot cycle. +// These are here for the old way that #ast (qquote.rs) worked +fn parse_expr(p: parser) -> @ast::expr { p.parse_expr() } +fn parse_pat(p: parser) -> @ast::pat { p.parse_pat() } + + enum restriction { UNRESTRICTED, RESTRICT_STMT_EXPR, @@ -48,21 +43,62 @@ enum restriction { enum file_type { CRATE_FILE, SOURCE_FILE, } -type parser = @{ - sess: parse_sess, - cfg: crate_cfg, - file_type: file_type, - mut token: token::token, - mut span: span, - mut last_span: span, - buffer: dvec<{tok: token::token, span: span}>, - mut restriction: restriction, - reader: reader, - keywords: hashmap<str, ()>, - restricted_keywords: hashmap<str, ()> -}; -impl parser for parser { +// We don't allow single-entry tuples in the true AST; that indicates a +// parenthesized expression. However, we preserve them temporarily while +// parsing because `(while{...})+3` parses differently from `while{...}+3`. +// +// To reflect the fact that the @expr is not a true expr that should be +// part of the AST, we wrap such expressions in the pexpr enum. They +// can then be converted to true expressions by a call to `to_expr()`. +enum pexpr { + pexpr(@expr), +} + +/* + So that we can distinguish a class ctor or dtor + from other class members + */ +enum class_contents { ctor_decl(fn_decl, blk, codemap::span), + dtor_decl(blk, codemap::span), + members([@class_member]) } + +type arg_or_capture_item = either<arg, capture_item>; +type item_info = (ident, item_, option<[attribute]>); + +class parser { + let sess: parse_sess; + let cfg: crate_cfg; + let file_type: file_type; + let mut token: token::token; + let mut span: span; + let mut last_span: span; + let buffer: dvec<{tok: token::token, span: span}>; + let mut restriction: restriction; + let reader: reader; + let keywords: hashmap<str, ()>; + let restricted_keywords: hashmap<str, ()>; + + new(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, + ftype: file_type) { + let tok0 = lexer::next_token(rdr); + let span0 = ast_util::mk_sp(tok0.chpos, rdr.chpos); + self.sess = sess; + self.cfg = cfg; + self.file_type = ftype; + self.token = tok0.tok; + self.span = span0; + self.last_span = span0; + self.buffer = dvec::dvec(); + self.restriction == UNRESTRICTED; + self.reader = rdr; + self.keywords = token::keyword_table(); + self.restricted_keywords = token::restricted_keyword_table(); + } + + //TODO: uncomment when destructors workd + //drop {} /* do not copy the parser; its state is tied to outside state */ + fn bump() { self.last_span = self.span; if self.buffer.len() == 0u { @@ -103,898 +139,882 @@ impl parser for parser { interner::get(*self.reader.interner, i) } fn get_id() -> node_id { next_node_id(self.sess) } -} -fn parse_ty_fn(p: parser) -> fn_decl { - fn parse_fn_input_ty(p: parser) -> arg { - let mode = parse_arg_mode(p); - let name = if is_plain_ident(p.token) - && p.look_ahead(1u) == token::COLON { - - let name = parse_value_ident(p); - p.bump(); - name - } else { "" }; - ret {mode: mode, ty: parse_ty(p, false), ident: name, id: p.get_id()}; - } - let inputs = - parse_seq(token::LPAREN, token::RPAREN, seq_sep(token::COMMA), - parse_fn_input_ty, p); - // FIXME: constrs is empty because right now, higher-order functions - // can't have constrained types. - // Not sure whether that would be desirable anyway. See #34 for the - // story on constrained types. - let constrs: [@constr] = []; - let (ret_style, ret_ty) = parse_ret_ty(p); - ret {inputs: inputs.node, output: ret_ty, - purity: impure_fn, cf: ret_style, - constraints: constrs}; -} - -fn parse_ty_methods(p: parser) -> [ty_method] { - parse_seq(token::LBRACE, token::RBRACE, seq_sep_none(), {|p| - let attrs = parse_outer_attributes(p); - let flo = p.span.lo; - let pur = parse_fn_purity(p); - let ident = parse_method_name(p); - let tps = parse_ty_params(p); - let d = parse_ty_fn(p), fhi = p.last_span.hi; - expect(p, token::SEMI); - {ident: ident, attrs: attrs, decl: {purity: pur with d}, tps: tps, - span: mk_sp(flo, fhi)} - }, p).node -} - -fn parse_mt(p: parser) -> mt { - let mutbl = parse_mutability(p); - let t = parse_ty(p, false); - ret {ty: t, mutbl: mutbl}; -} - -fn parse_ty_field(p: parser) -> ty_field { - let lo = p.span.lo; - let mutbl = parse_mutability(p); - let id = parse_ident(p); - expect(p, token::COLON); - let ty = parse_ty(p, false); - ret spanned(lo, ty.span.hi, {ident: id, mt: {ty: ty, mutbl: mutbl}}); -} - -// if i is the jth ident in args, return j -// otherwise, fail -fn ident_index(p: parser, args: [arg], i: ident) -> uint { - let mut j = 0u; - for args.each {|a| if a.ident == i { ret j; } j += 1u; } - p.fatal("unbound variable `" + i + "` in constraint arg"); -} - -fn parse_type_constr_arg(p: parser) -> @ty_constr_arg { - let sp = p.span; - let mut carg = carg_base; - expect(p, token::BINOP(token::STAR)); - if p.token == token::DOT { - // "*..." notation for record fields - p.bump(); - let pth = parse_path_without_tps(p); - carg = carg_ident(pth); - } - // No literals yet, I guess? - ret @{node: carg, span: sp}; -} - -fn parse_constr_arg(args: [arg], p: parser) -> @constr_arg { - let sp = p.span; - let mut carg = carg_base; - if p.token == token::BINOP(token::STAR) { - p.bump(); - } else { - let i: ident = parse_value_ident(p); - carg = carg_ident(ident_index(p, args, i)); - } - ret @{node: carg, span: sp}; -} - -fn parse_ty_constr(fn_args: [arg], p: parser) -> @constr { - let lo = p.span.lo; - let path = parse_path_without_tps(p); - let args: {node: [@constr_arg], span: span} = - parse_seq(token::LPAREN, token::RPAREN, seq_sep(token::COMMA), - {|p| parse_constr_arg(fn_args, p)}, p); - ret @spanned(lo, args.span.hi, - {path: path, args: args.node, id: p.get_id()}); -} - -fn parse_constr_in_type(p: parser) -> @ty_constr { - let lo = p.span.lo; - let path = parse_path_without_tps(p); - let args: [@ty_constr_arg] = - parse_seq(token::LPAREN, token::RPAREN, seq_sep(token::COMMA), - parse_type_constr_arg, p).node; - let hi = p.span.lo; - let tc: ty_constr_ = {path: path, args: args, id: p.get_id()}; - ret @spanned(lo, hi, tc); -} - - -fn parse_constrs<T: copy>(pser: fn(parser) -> @constr_general<T>, - p: parser) -> - [@constr_general<T>] { - let mut constrs: [@constr_general<T>] = []; - loop { - let constr = pser(p); - constrs += [constr]; - if p.token == token::COMMA { p.bump(); } else { ret constrs; } - }; -} - -fn parse_type_constraints(p: parser) -> [@ty_constr] { - ret parse_constrs(parse_constr_in_type, p); -} - -fn parse_ret_ty(p: parser) -> (ret_style, @ty) { - ret if eat(p, token::RARROW) { - let lo = p.span.lo; - if eat(p, token::NOT) { - (noreturn, @{id: p.get_id(), - node: ty_bot, - span: mk_sp(lo, p.last_span.hi)}) - } else { - (return_val, parse_ty(p, false)) - } - } else { - let pos = p.span.lo; - (return_val, @{id: p.get_id(), - node: ty_nil, - span: mk_sp(pos, pos)}) - } -} - -fn region_from_name(p: parser, s: option<str>) -> @region { - let r = alt s { - some (string) { re_named(string) } - none { re_anon } - }; - - @{id: p.get_id(), node: r} -} - -// Parses something like "&x" -fn parse_region(p: parser) -> @region { - expect(p, token::BINOP(token::AND)); - alt p.token { - token::IDENT(sid, _) { - p.bump(); - let n = p.get_str(sid); - region_from_name(p, some(n)) - } - _ { - region_from_name(p, none) - } - } -} - -// Parses something like "&x." (note the trailing dot) -fn parse_region_dot(p: parser) -> @region { - let name = - alt p.token { - token::IDENT(sid, _) if p.look_ahead(1u) == token::DOT { - p.bump(); p.bump(); - some(p.get_str(sid)) - } - _ { none } - }; - region_from_name(p, name) -} - -fn parse_ty(p: parser, colons_before_params: bool) -> @ty { - let lo = p.span.lo; - - alt maybe_parse_dollar_mac(p) { - some(e) { - ret @{id: p.get_id(), - node: ty_mac(spanned(lo, p.span.hi, e)), - span: mk_sp(lo, p.span.hi)}; - } - none {} - } - - let t = if p.token == token::LPAREN { - p.bump(); - if p.token == token::RPAREN { - p.bump(); - ty_nil - } else { - let mut ts = [parse_ty(p, false)]; - while p.token == token::COMMA { - p.bump(); - ts += [parse_ty(p, false)]; - } - let t = if vec::len(ts) == 1u { ts[0].node } - else { ty_tup(ts) }; - expect(p, token::RPAREN); - t - } - } else if p.token == token::AT { - p.bump(); - ty_box(parse_mt(p)) - } else if p.token == token::TILDE { - p.bump(); - ty_uniq(parse_mt(p)) - } else if p.token == token::BINOP(token::STAR) { - p.bump(); - ty_ptr(parse_mt(p)) - } else if p.token == token::LBRACE { - let elems = - parse_seq(token::LBRACE, token::RBRACE, seq_sep_opt(token::COMMA), - parse_ty_field, p); - if vec::len(elems.node) == 0u { unexpected_last(p, token::RBRACE); } - let hi = elems.span.hi; - - let t = ty_rec(elems.node); - if p.token == token::COLON { - p.bump(); - ty_constr(@{id: p.get_id(), - node: t, - span: mk_sp(lo, hi)}, - parse_type_constraints(p)) - } else { t } - } else if p.token == token::LBRACKET { - expect(p, token::LBRACKET); - let t = ty_vec(parse_mt(p)); - expect(p, token::RBRACKET); - t - } else if p.token == token::BINOP(token::AND) { - p.bump(); - let region = parse_region_dot(p); - let mt = parse_mt(p); - ty_rptr(region, mt) - } else if eat_keyword(p, "fn") { - let proto = parse_fn_ty_proto(p); - alt proto { - proto_bare { p.warn("fn is deprecated, use native fn"); } - _ { /* fallthrough */ } - } - ty_fn(proto, parse_ty_fn(p)) - } else if eat_keyword(p, "native") { - expect_keyword(p, "fn"); - ty_fn(proto_bare, parse_ty_fn(p)) - } else if p.token == token::MOD_SEP || is_ident(p.token) { - let path = parse_path_with_tps(p, colons_before_params); - ty_path(path, p.get_id()) - } else { p.fatal("expecting type"); }; - - fn mk_ty(p: parser, t: ty_, lo: uint, hi: uint) -> @ty { - @{id: p.get_id(), - node: t, - span: mk_sp(lo, hi)} - } - - let ty = mk_ty(p, t, lo, p.last_span.hi); - - // Consider a vstore suffix like /@ or /~ - alt maybe_parse_vstore(p) { - none { - ret ty; - } - some(v) { - let t1 = ty_vstore(ty, v); - ret mk_ty(p, t1, lo, p.last_span.hi); - } - } -} - -fn parse_arg_mode(p: parser) -> mode { - if eat(p, token::BINOP(token::AND)) { - expl(by_mutbl_ref) - } else if eat(p, token::BINOP(token::MINUS)) { - expl(by_move) - } else if eat(p, token::ANDAND) { - expl(by_ref) - } else if eat(p, token::BINOP(token::PLUS)) { - if eat(p, token::BINOP(token::PLUS)) { - expl(by_val) - } else { - expl(by_copy) - } - } else { infer(p.get_id()) } -} - -fn parse_capture_item_or( - p: parser, - parse_arg_fn: fn() -> arg_or_capture_item) -> arg_or_capture_item { - - fn parse_capture_item(p: parser, is_move: bool) -> capture_item { - let id = p.get_id(); - let sp = mk_sp(p.span.lo, p.span.hi); - let ident = parse_ident(p); - {id: id, is_move: is_move, name: ident, span: sp} - } - - if eat_keyword(p, "move") { - either::right(parse_capture_item(p, true)) - } else if eat_keyword(p, "copy") { - either::right(parse_capture_item(p, false)) - } else { - parse_arg_fn() - } -} - -fn parse_arg(p: parser) -> arg_or_capture_item { - let m = parse_arg_mode(p); - let i = parse_value_ident(p); - expect(p, token::COLON); - let t = parse_ty(p, false); - either::left({mode: m, ty: t, ident: i, id: p.get_id()}) -} - -fn parse_arg_or_capture_item(p: parser) -> arg_or_capture_item { - parse_capture_item_or(p) {|| parse_arg(p) } -} - -fn parse_fn_block_arg(p: parser) -> arg_or_capture_item { - parse_capture_item_or(p) {|| - let m = parse_arg_mode(p); - let i = parse_value_ident(p); - let t = if eat(p, token::COLON) { - parse_ty(p, false) - } else { - @{id: p.get_id(), - node: ty_infer, - span: mk_sp(p.span.lo, p.span.hi)} - }; - either::left({mode: m, ty: t, ident: i, id: p.get_id()}) - } -} - -fn maybe_parse_dollar_mac(p: parser) -> option<mac_> { - alt p.token { - token::DOLLAR { - let lo = p.span.lo; - p.bump(); - alt p.token { - token::LIT_INT(num, ty_i) { - p.bump(); - some(mac_var(num as uint)) - } - token::LPAREN { - p.bump(); - let e = parse_expr(p); - expect(p, token::RPAREN); - let hi = p.last_span.hi; - some(mac_aq(mk_sp(lo,hi), e)) - } - _ { - p.fatal("expected `(` or integer literal"); - } - } - } - _ {none} - } -} - -fn maybe_parse_vstore(p: parser) -> option<vstore> { - if p.token == token::BINOP(token::SLASH) { - p.bump(); - alt p.token { - token::AT { - p.bump(); some(vstore_box) - } - token::TILDE { - p.bump(); some(vstore_uniq) - } - token::UNDERSCORE { - p.bump(); some(vstore_fixed(none)) - } - token::LIT_INT(i, ty_i) if i >= 0i64 { - p.bump(); some(vstore_fixed(some(i as uint))) - } - token::BINOP(token::AND) { - some(vstore_slice(parse_region(p))) - } - _ { - none - } - } - } else { - none - } -} - -fn lit_from_token(p: parser, tok: token::token) -> lit_ { - alt tok { - token::LIT_INT(i, it) { lit_int(i, it) } - token::LIT_UINT(u, ut) { lit_uint(u, ut) } - token::LIT_FLOAT(s, ft) { lit_float(p.get_str(s), ft) } - token::LIT_STR(s) { lit_str(p.get_str(s)) } - token::LPAREN { expect(p, token::RPAREN); lit_nil } - _ { unexpected_last(p, tok); } - } -} - -fn parse_lit(p: parser) -> lit { - let lo = p.span.lo; - let lit = if eat_keyword(p, "true") { - lit_bool(true) - } else if eat_keyword(p, "false") { - lit_bool(false) - } else { - let tok = p.token; - p.bump(); - lit_from_token(p, tok) - }; - ret {node: lit, span: mk_sp(lo, p.last_span.hi)}; -} - -fn parse_path_without_tps(p: parser) -> @path { - parse_path_without_tps_(p, parse_ident, parse_ident) -} - -fn parse_path_without_tps_( - p: parser, parse_ident: fn(parser) -> ident, - parse_last_ident: fn(parser) -> ident) -> @path { - - let lo = p.span.lo; - let global = eat(p, token::MOD_SEP); - let mut ids = []; - loop { - let is_not_last = - p.look_ahead(2u) != token::LT - && p.look_ahead(1u) == token::MOD_SEP; - - if is_not_last { - ids += [parse_ident(p)]; - expect(p, token::MOD_SEP); - } else { - ids += [parse_last_ident(p)]; - break; - } - } - @{span: mk_sp(lo, p.last_span.hi), global: global, - idents: ids, rp: none, types: []} -} - -fn parse_value_path(p: parser) -> @path { - parse_path_without_tps_(p, parse_ident, parse_value_ident) -} - -fn parse_path_with_tps(p: parser, colons: bool) -> @path { - #debug["parse_path_with_tps(colons=%b)", colons]; - - let lo = p.span.lo; - let path = parse_path_without_tps(p); - if colons && !eat(p, token::MOD_SEP) { - ret path; - } - - // Parse the region parameter, if any, which will - // be written "foo/&x" - let rp = { - // Hack: avoid parsing vstores like /@ and /~. This is painful - // because the notation for region bounds and the notation for vstores - // is... um... the same. I guess that's my fault. This is still not - // ideal as for str/& we end up parsing more than we ought to and have - // to sort it out later. - if p.token == token::BINOP(token::SLASH) - && p.look_ahead(1u) == token::BINOP(token::AND) { - - expect(p, token::BINOP(token::SLASH)); - some(parse_region(p)) - } else { - none - } - }; - - // Parse any type parameters which may appear: - let tps = { - if p.token == token::LT { - parse_seq_lt_gt(some(token::COMMA), {|p| parse_ty(p, false)}, p) - } else { - {node: [], span: path.span} - } - }; - - ret @{span: mk_sp(lo, tps.span.hi), - rp: rp, - types: tps.node with *path}; -} - -fn parse_mutability(p: parser) -> mutability { - if eat_keyword(p, "mut") { - m_mutbl - } else if eat_keyword(p, "mut") { - m_mutbl - } else if eat_keyword(p, "const") { - m_const - } else { - m_imm - } -} - -fn parse_field(p: parser, sep: token::token) -> field { - let lo = p.span.lo; - let m = parse_mutability(p); - let i = parse_ident(p); - expect(p, sep); - let e = parse_expr(p); - ret spanned(lo, e.span.hi, {mutbl: m, ident: i, expr: e}); -} - -fn mk_expr(p: parser, lo: uint, hi: uint, +node: expr_) -> @expr { - ret @{id: p.get_id(), node: node, span: mk_sp(lo, hi)}; -} - -fn mk_mac_expr(p: parser, lo: uint, hi: uint, m: mac_) -> @expr { - ret @{id: p.get_id(), - node: expr_mac({node: m, span: mk_sp(lo, hi)}), - span: mk_sp(lo, hi)}; -} - -fn mk_lit_u32(p: parser, i: u32) -> @expr { - let span = p.span; - let lv_lit = @{node: lit_uint(i as u64, ty_u32), - span: span}; - - ret @{id: p.get_id(), node: expr_lit(lv_lit), span: span}; -} - -// We don't allow single-entry tuples in the true AST; that indicates a -// parenthesized expression. However, we preserve them temporarily while -// parsing because `(while{...})+3` parses differently from `while{...}+3`. -// -// To reflect the fact that the @expr is not a true expr that should be -// part of the AST, we wrap such expressions in the pexpr enum. They -// can then be converted to true expressions by a call to `to_expr()`. -enum pexpr { - pexpr(@expr), -} - -fn mk_pexpr(p: parser, lo: uint, hi: uint, node: expr_) -> pexpr { - ret pexpr(mk_expr(p, lo, hi, node)); -} - -fn to_expr(e: pexpr) -> @expr { - alt e.node { - expr_tup(es) if vec::len(es) == 1u { es[0u] } - _ { *e } - } -} - -fn parse_bottom_expr(p: parser) -> pexpr { - let lo = p.span.lo; - let mut hi = p.span.hi; - - let mut ex: expr_; - - alt maybe_parse_dollar_mac(p) { - some(x) {ret pexpr(mk_mac_expr(p, lo, p.span.hi, x));} - _ {} - } - - if p.token == token::LPAREN { - p.bump(); - if p.token == token::RPAREN { - hi = p.span.hi; - p.bump(); - let lit = @spanned(lo, hi, lit_nil); - ret mk_pexpr(p, lo, hi, expr_lit(lit)); - } - let mut es = [parse_expr(p)]; - while p.token == token::COMMA { p.bump(); es += [parse_expr(p)]; } - hi = p.span.hi; - expect(p, token::RPAREN); - - // Note: we retain the expr_tup() even for simple - // parenthesized expressions, but only for a "little while". - // This is so that wrappers around parse_bottom_expr() - // can tell whether the expression was parenthesized or not, - // which affects expr_is_complete(). - ret mk_pexpr(p, lo, hi, expr_tup(es)); - } else if p.token == token::LBRACE { - p.bump(); - if is_keyword(p, "mut") || - is_plain_ident(p.token) && p.look_ahead(1u) == token::COLON { - let mut fields = [parse_field(p, token::COLON)]; - let mut base = none; - while p.token != token::RBRACE { - if eat_keyword(p, "with") { - base = some(parse_expr(p)); break; - } - expect(p, token::COMMA); - if p.token == token::RBRACE { - // record ends by an optional trailing comma - break; - } - fields += [parse_field(p, token::COLON)]; - } - hi = p.span.hi; - expect(p, token::RBRACE); - ex = expr_rec(fields, base); - } else if token::is_bar(p.token) { - ret pexpr(parse_fn_block_expr(p)); - } else { - let blk = parse_block_tail(p, lo, default_blk); - ret mk_pexpr(p, blk.span.lo, blk.span.hi, expr_block(blk)); - } - } else if eat_keyword(p, "new") { - expect(p, token::LPAREN); - let r = parse_expr(p); - expect(p, token::RPAREN); - let v = parse_expr(p); - ret mk_pexpr(p, lo, p.span.hi, - expr_new(r, p.get_id(), v)); - } else if eat_keyword(p, "if") { - ret pexpr(parse_if_expr(p)); - } else if eat_keyword(p, "for") { - ret pexpr(parse_for_expr(p)); - } else if eat_keyword(p, "while") { - ret pexpr(parse_while_expr(p)); - } else if eat_keyword(p, "loop") { - ret pexpr(parse_loop_expr(p)); - } else if eat_keyword(p, "alt") { - ret pexpr(parse_alt_expr(p)); - } else if eat_keyword(p, "fn") { - let proto = parse_fn_ty_proto(p); - alt proto { - proto_bare { p.fatal("fn expr are deprecated, use fn@"); } - proto_any { p.fatal("fn* cannot be used in an expression"); } - _ { /* fallthrough */ } - } - ret pexpr(parse_fn_expr(p, proto)); - } else if eat_keyword(p, "unchecked") { - ret pexpr(parse_block_expr(p, lo, unchecked_blk)); - } else if eat_keyword(p, "unsafe") { - ret pexpr(parse_block_expr(p, lo, unsafe_blk)); - } else if p.token == token::LBRACKET { - p.bump(); - let mutbl = parse_mutability(p); - let es = - parse_seq_to_end(token::RBRACKET, seq_sep(token::COMMA), - parse_expr, p); - hi = p.span.hi; - ex = expr_vec(es, mutbl); - } else if p.token == token::POUND && p.look_ahead(1u) == token::LT { - p.bump(); - p.bump(); - let ty = parse_ty(p, false); - expect(p, token::GT); - - /* hack: early return to take advantage of specialized function */ - ret pexpr(mk_mac_expr(p, lo, p.span.hi, - mac_embed_type(ty))); - } else if p.token == token::POUND && p.look_ahead(1u) == token::LBRACE { - p.bump(); - p.bump(); - let blk = mac_embed_block( - parse_block_tail(p, lo, default_blk)); - ret pexpr(mk_mac_expr(p, lo, p.span.hi, blk)); - } else if p.token == token::ELLIPSIS { - p.bump(); - ret pexpr(mk_mac_expr(p, lo, p.span.hi, mac_ellipsis)); - } else if p.token == token::POUND { - let ex_ext = parse_syntax_ext(p); - hi = ex_ext.span.hi; - ex = ex_ext.node; - } else if eat_keyword(p, "bind") { - let e = parse_expr_res(p, RESTRICT_NO_CALL_EXPRS); - let es = + fn parse_ty_fn() -> fn_decl { + let inputs = parse_seq(token::LPAREN, token::RPAREN, seq_sep(token::COMMA), - parse_expr_or_hole, p); - hi = es.span.hi; - ex = expr_bind(e, es.node); - } else if eat_keyword(p, "fail") { - if can_begin_expr(p.token) { - let e = parse_expr(p); - hi = e.span.hi; - ex = expr_fail(some(e)); - } else { ex = expr_fail(none); } - } else if eat_keyword(p, "log") { - expect(p, token::LPAREN); - let lvl = parse_expr(p); - expect(p, token::COMMA); - let e = parse_expr(p); - ex = expr_log(2, lvl, e); - hi = p.span.hi; - expect(p, token::RPAREN); - } else if eat_keyword(p, "assert") { - let e = parse_expr(p); - ex = expr_assert(e); - hi = e.span.hi; - } else if eat_keyword(p, "check") { - /* Should be a predicate (pure boolean function) applied to - arguments that are all either slot variables or literals. - but the typechecker enforces that. */ - let e = parse_expr(p); - hi = e.span.hi; - ex = expr_check(checked_expr, e); - } else if eat_keyword(p, "claim") { - /* Same rules as check, except that if check-claims - is enabled (a command-line flag), then the parser turns - claims into check */ + self) { |p| + let mode = p.parse_arg_mode(); + let name = if is_plain_ident(p.token) + && p.look_ahead(1u) == token::COLON { - let e = parse_expr(p); - hi = e.span.hi; - ex = expr_check(claimed_expr, e); - } else if eat_keyword(p, "ret") { - if can_begin_expr(p.token) { - let e = parse_expr(p); - hi = e.span.hi; - ex = expr_ret(some(e)); - } else { ex = expr_ret(none); } - } else if eat_keyword(p, "break") { - ex = expr_break; - hi = p.span.hi; - } else if eat_keyword(p, "cont") { - ex = expr_cont; - hi = p.span.hi; - } else if eat_keyword(p, "copy") { - let e = parse_expr(p); - ex = expr_copy(e); - hi = e.span.hi; - } else if p.token == token::MOD_SEP || - is_ident(p.token) && !is_keyword(p, "true") && - !is_keyword(p, "false") { - let pth = parse_path_with_tps(p, true); - hi = pth.span.hi; - ex = expr_path(pth); - } else { - let lit = parse_lit(p); - hi = lit.span.hi; - ex = expr_lit(@lit); + let name = parse_value_ident(p); + p.bump(); + name + } else { "" }; + + {mode: mode, ty: p.parse_ty(false), ident: name, + id: p.get_id()} + }; + // FIXME: constrs is empty because right now, higher-order functions + // can't have constrained types. + // Not sure whether that would be desirable anyway. See #34 for the + // story on constrained types. + let constrs: [@constr] = []; + let (ret_style, ret_ty) = self.parse_ret_ty(); + ret {inputs: inputs.node, output: ret_ty, + purity: impure_fn, cf: ret_style, + constraints: constrs}; } - // Vstore is legal following expr_lit(lit_str(...)) and expr_vec(...) - // only. - alt ex { - expr_lit(@{node: lit_str(_), span: _}) | - expr_vec(_, _) { - alt maybe_parse_vstore(p) { - none { } - some(v) { - hi = p.span.hi; - ex = expr_vstore(mk_expr(p, lo, hi, ex), v); + fn parse_ty_methods() -> [ty_method] { + (parse_seq(token::LBRACE, token::RBRACE, seq_sep_none(), self) { |p| + let attrs = parse_outer_attributes(p); + let flo = p.span.lo; + let pur = p.parse_fn_purity(); + let ident = p.parse_method_name(); + let tps = p.parse_ty_params(); + let d = p.parse_ty_fn(), fhi = p.last_span.hi; + expect(p, token::SEMI); + {ident: ident, attrs: attrs, decl: {purity: pur with d}, tps: tps, + span: mk_sp(flo, fhi)} + }).node + } + + fn parse_mt() -> mt { + let mutbl = self.parse_mutability(); + let t = self.parse_ty(false); + ret {ty: t, mutbl: mutbl}; + } + + fn parse_ty_field() -> ty_field { + let lo = self.span.lo; + let mutbl = self.parse_mutability(); + let id = parse_ident(self); + expect(self, token::COLON); + let ty = self.parse_ty(false); + ret spanned(lo, ty.span.hi, {ident: id, mt: {ty: ty, mutbl: mutbl}}); + } + + // if i is the jth ident in args, return j + // otherwise, fail + fn ident_index(args: [arg], i: ident) -> uint { + let mut j = 0u; + for args.each {|a| if a.ident == i { ret j; } j += 1u; } + self.fatal("unbound variable `" + i + "` in constraint arg"); + } + + fn parse_type_constr_arg() -> @ty_constr_arg { + let sp = self.span; + let mut carg = carg_base; + expect(self, token::BINOP(token::STAR)); + if self.token == token::DOT { + // "*..." notation for record fields + self.bump(); + let pth = self.parse_path_without_tps(); + carg = carg_ident(pth); + } + // No literals yet, I guess? + ret @{node: carg, span: sp}; + } + + fn parse_constr_arg(args: [arg]) -> @constr_arg { + let sp = self.span; + let mut carg = carg_base; + if self.token == token::BINOP(token::STAR) { + self.bump(); + } else { + let i: ident = parse_value_ident(self); + carg = carg_ident(self.ident_index(args, i)); + } + ret @{node: carg, span: sp}; + } + + fn parse_ty_constr(fn_args: [arg]) -> @constr { + let lo = self.span.lo; + let path = self.parse_path_without_tps(); + let args: {node: [@constr_arg], span: span} = + parse_seq(token::LPAREN, token::RPAREN, seq_sep(token::COMMA), + self, {|p| p.parse_constr_arg(fn_args)}); + ret @spanned(lo, args.span.hi, + {path: path, args: args.node, id: self.get_id()}); + } + + fn parse_constr_in_type() -> @ty_constr { + let lo = self.span.lo; + let path = self.parse_path_without_tps(); + let args: [@ty_constr_arg] = + parse_seq(token::LPAREN, token::RPAREN, seq_sep(token::COMMA), + self, {|p| p.parse_type_constr_arg()}).node; + let hi = self.span.lo; + let tc: ty_constr_ = {path: path, args: args, id: self.get_id()}; + ret @spanned(lo, hi, tc); + } + + + fn parse_constrs<T: copy>(pser: fn(parser) -> @constr_general<T>) -> + [@constr_general<T>] { + let mut constrs: [@constr_general<T>] = []; + loop { + let constr = pser(self); + constrs += [constr]; + if self.token == token::COMMA { self.bump(); } + else { ret constrs; } + }; + } + + fn parse_type_constraints() -> [@ty_constr] { + ret self.parse_constrs({|p| p.parse_constr_in_type()}); + } + + fn parse_ret_ty() -> (ret_style, @ty) { + ret if eat(self, token::RARROW) { + let lo = self.span.lo; + if eat(self, token::NOT) { + (noreturn, @{id: self.get_id(), + node: ty_bot, + span: mk_sp(lo, self.last_span.hi)}) + } else { + (return_val, self.parse_ty(false)) + } + } else { + let pos = self.span.lo; + (return_val, @{id: self.get_id(), + node: ty_nil, + span: mk_sp(pos, pos)}) + } + } + + fn region_from_name(s: option<str>) -> @region { + let r = alt s { + some (string) { re_named(string) } + none { re_anon } + }; + + @{id: self.get_id(), node: r} + } + + // Parses something like "&x" + fn parse_region() -> @region { + expect(self, token::BINOP(token::AND)); + alt self.token { + token::IDENT(sid, _) { + self.bump(); + let n = self.get_str(sid); + self.region_from_name(some(n)) + } + _ { + self.region_from_name(none) } } - } - _ { } } - ret mk_pexpr(p, lo, hi, ex); -} - -fn parse_block_expr(p: parser, - lo: uint, - blk_mode: blk_check_mode) -> @expr { - expect(p, token::LBRACE); - let blk = parse_block_tail(p, lo, blk_mode); - ret mk_expr(p, blk.span.lo, blk.span.hi, expr_block(blk)); -} - -fn parse_syntax_ext(p: parser) -> @expr { - let lo = p.span.lo; - expect(p, token::POUND); - ret parse_syntax_ext_naked(p, lo); -} - -fn parse_syntax_ext_naked(p: parser, lo: uint) -> @expr { - alt p.token { - token::IDENT(_, _) {} - _ { p.fatal("expected a syntax expander name"); } - } - let pth = parse_path_without_tps(p); - //temporary for a backwards-compatible cycle: - let sep = seq_sep(token::COMMA); - let mut e = none; - if (p.token == token::LPAREN || p.token == token::LBRACKET) { - let es = - if p.token == token::LPAREN { - parse_seq(token::LPAREN, token::RPAREN, - sep, parse_expr, p) - } else { - parse_seq(token::LBRACKET, token::RBRACKET, - sep, parse_expr, p) + // Parses something like "&x." (note the trailing dot) + fn parse_region_dot() -> @region { + let name = + alt self.token { + token::IDENT(sid, _) if self.look_ahead(1u) == token::DOT { + self.bump(); self.bump(); + some(self.get_str(sid)) + } + _ { none } }; + self.region_from_name(name) + } + + fn parse_ty(colons_before_params: bool) -> @ty { + let lo = self.span.lo; + + alt self.maybe_parse_dollar_mac() { + some(e) { + ret @{id: self.get_id(), + node: ty_mac(spanned(lo, self.span.hi, e)), + span: mk_sp(lo, self.span.hi)}; + } + none {} + } + + let t = if self.token == token::LPAREN { + self.bump(); + if self.token == token::RPAREN { + self.bump(); + ty_nil + } else { + let mut ts = [self.parse_ty(false)]; + while self.token == token::COMMA { + self.bump(); + ts += [self.parse_ty(false)]; + } + let t = if vec::len(ts) == 1u { ts[0].node } + else { ty_tup(ts) }; + expect(self, token::RPAREN); + t + } + } else if self.token == token::AT { + self.bump(); + ty_box(self.parse_mt()) + } else if self.token == token::TILDE { + self.bump(); + ty_uniq(self.parse_mt()) + } else if self.token == token::BINOP(token::STAR) { + self.bump(); + ty_ptr(self.parse_mt()) + } else if self.token == token::LBRACE { + let elems = parse_seq(token::LBRACE, token::RBRACE, + seq_sep_opt(token::COMMA), self, + {|p| p.parse_ty_field()}); + if vec::len(elems.node) == 0u { + unexpected_last(self, token::RBRACE); + } + let hi = elems.span.hi; + + let t = ty_rec(elems.node); + if self.token == token::COLON { + self.bump(); + ty_constr(@{id: self.get_id(), + node: t, + span: mk_sp(lo, hi)}, + self.parse_type_constraints()) + } else { t } + } else if self.token == token::LBRACKET { + expect(self, token::LBRACKET); + let t = ty_vec(self.parse_mt()); + expect(self, token::RBRACKET); + t + } else if self.token == token::BINOP(token::AND) { + self.bump(); + let region = self.parse_region_dot(); + let mt = self.parse_mt(); + ty_rptr(region, mt) + } else if eat_keyword(self, "fn") { + let proto = self.parse_fn_ty_proto(); + alt proto { + proto_bare { self.warn("fn is deprecated, use native fn"); } + _ { /* fallthrough */ } + } + ty_fn(proto, self.parse_ty_fn()) + } else if eat_keyword(self, "native") { + expect_keyword(self, "fn"); + ty_fn(proto_bare, self.parse_ty_fn()) + } else if self.token == token::MOD_SEP || is_ident(self.token) { + let path = self.parse_path_with_tps(colons_before_params); + ty_path(path, self.get_id()) + } else { self.fatal("expecting type"); }; + + let sp = mk_sp(lo, self.last_span.hi); + ret @{id: self.get_id(), + node: alt self.maybe_parse_vstore() { + // Consider a vstore suffix like /@ or /~ + none { t } + some(v) { + ty_vstore(@{id: self.get_id(), node:t, span: sp}, v) + } }, + span: sp} + } + + fn parse_arg_mode() -> mode { + if eat(self, token::BINOP(token::AND)) { + expl(by_mutbl_ref) + } else if eat(self, token::BINOP(token::MINUS)) { + expl(by_move) + } else if eat(self, token::ANDAND) { + expl(by_ref) + } else if eat(self, token::BINOP(token::PLUS)) { + if eat(self, token::BINOP(token::PLUS)) { + expl(by_val) + } else { + expl(by_copy) + } + } else { infer(self.get_id()) } + } + + fn parse_capture_item_or(parse_arg_fn: fn(parser) -> arg_or_capture_item) + -> arg_or_capture_item { + + fn parse_capture_item(p:parser, is_move: bool) -> capture_item { + let sp = mk_sp(p.span.lo, p.span.hi); + let ident = parse_ident(p); + {id: p.get_id(), is_move: is_move, name: ident, span: sp} + } + + if eat_keyword(self, "move") { + either::right(parse_capture_item(self, true)) + } else if eat_keyword(self, "copy") { + either::right(parse_capture_item(self, false)) + } else { + parse_arg_fn(self) + } + } + + fn parse_arg() -> arg_or_capture_item { + let m = self.parse_arg_mode(); + let i = parse_value_ident(self); + expect(self, token::COLON); + let t = self.parse_ty(false); + either::left({mode: m, ty: t, ident: i, id: self.get_id()}) + } + + fn parse_arg_or_capture_item() -> arg_or_capture_item { + self.parse_capture_item_or() {|p| p.parse_arg() } + } + + fn parse_fn_block_arg() -> arg_or_capture_item { + self.parse_capture_item_or() {|p| + let m = p.parse_arg_mode(); + let i = parse_value_ident(p); + let t = if eat(p, token::COLON) { + p.parse_ty(false) + } else { + @{id: p.get_id(), + node: ty_infer, + span: mk_sp(p.span.lo, p.span.hi)} + }; + either::left({mode: m, ty: t, ident: i, id: p.get_id()}) + } + } + + fn maybe_parse_dollar_mac() -> option<mac_> { + alt self.token { + token::DOLLAR { + let lo = self.span.lo; + self.bump(); + alt self.token { + token::LIT_INT(num, ty_i) { + self.bump(); + some(mac_var(num as uint)) + } + token::LPAREN { + self.bump(); + let e = self.parse_expr(); + expect(self, token::RPAREN); + let hi = self.last_span.hi; + some(mac_aq(mk_sp(lo,hi), e)) + } + _ { + self.fatal("expected `(` or integer literal"); + } + } + } + _ {none} + } + } + + fn maybe_parse_vstore() -> option<vstore> { + if self.token == token::BINOP(token::SLASH) { + self.bump(); + alt self.token { + token::AT { + self.bump(); some(vstore_box) + } + token::TILDE { + self.bump(); some(vstore_uniq) + } + token::UNDERSCORE { + self.bump(); some(vstore_fixed(none)) + } + token::LIT_INT(i, ty_i) if i >= 0i64 { + self.bump(); some(vstore_fixed(some(i as uint))) + } + token::BINOP(token::AND) { + some(vstore_slice(self.parse_region())) + } + _ { + none + } + } + } else { + none + } + } + + fn lit_from_token(tok: token::token) -> lit_ { + alt tok { + token::LIT_INT(i, it) { lit_int(i, it) } + token::LIT_UINT(u, ut) { lit_uint(u, ut) } + token::LIT_FLOAT(s, ft) { lit_float(self.get_str(s), ft) } + token::LIT_STR(s) { lit_str(self.get_str(s)) } + token::LPAREN { expect(self, token::RPAREN); lit_nil } + _ { unexpected_last(self, tok); } + } + } + + fn parse_lit() -> lit { + let lo = self.span.lo; + let lit = if eat_keyword(self, "true") { + lit_bool(true) + } else if eat_keyword(self, "false") { + lit_bool(false) + } else { + let tok = self.token; + self.bump(); + self.lit_from_token(tok) + }; + ret {node: lit, span: mk_sp(lo, self.last_span.hi)}; + } + + fn parse_path_without_tps() -> @path { + self.parse_path_without_tps_(parse_ident, parse_ident) + } + + fn parse_path_without_tps_( + parse_ident: fn(parser) -> ident, + parse_last_ident: fn(parser) -> ident) -> @path { + + let lo = self.span.lo; + let global = eat(self, token::MOD_SEP); + let mut ids = []; + loop { + let is_not_last = + self.look_ahead(2u) != token::LT + && self.look_ahead(1u) == token::MOD_SEP; + + if is_not_last { + ids += [parse_ident(self)]; + expect(self, token::MOD_SEP); + } else { + ids += [parse_last_ident(self)]; + break; + } + } + @{span: mk_sp(lo, self.last_span.hi), global: global, + idents: ids, rp: none, types: []} + } + + fn parse_value_path() -> @path { + self.parse_path_without_tps_(parse_ident, parse_value_ident) + } + + fn parse_path_with_tps(colons: bool) -> @path { + #debug["parse_path_with_tps(colons=%b)", colons]; + + let lo = self.span.lo; + let path = self.parse_path_without_tps(); + if colons && !eat(self, token::MOD_SEP) { + ret path; + } + + // Parse the region parameter, if any, which will + // be written "foo/&x" + let rp = { + // Hack: avoid parsing vstores like /@ and /~. This is painful + // because the notation for region bounds and the notation for + // vstores is... um... the same. I guess that's my fault. This + // is still not ideal as for str/& we end up parsing more than we + // ought to and have to sort it out later. + if self.token == token::BINOP(token::SLASH) + && self.look_ahead(1u) == token::BINOP(token::AND) { + + expect(self, token::BINOP(token::SLASH)); + some(self.parse_region()) + } else { + none + } + }; + + // Parse any type parameters which may appear: + let tps = { + if self.token == token::LT { + parse_seq_lt_gt(some(token::COMMA), self, + {|p| p.parse_ty(false)}) + } else { + {node: [], span: path.span} + } + }; + + ret @{span: mk_sp(lo, tps.span.hi), + rp: rp, + types: tps.node with *path}; + } + + fn parse_mutability() -> mutability { + if eat_keyword(self, "mut") { + m_mutbl + } else if eat_keyword(self, "mut") { + m_mutbl + } else if eat_keyword(self, "const") { + m_const + } else { + m_imm + } + } + + fn parse_field(sep: token::token) -> field { + let lo = self.span.lo; + let m = self.parse_mutability(); + let i = parse_ident(self); + expect(self, sep); + let e = self.parse_expr(); + ret spanned(lo, e.span.hi, {mutbl: m, ident: i, expr: e}); + } + + fn mk_expr(lo: uint, hi: uint, +node: expr_) -> @expr { + ret @{id: self.get_id(), node: node, span: mk_sp(lo, hi)}; + } + + fn mk_mac_expr(lo: uint, hi: uint, m: mac_) -> @expr { + ret @{id: self.get_id(), + node: expr_mac({node: m, span: mk_sp(lo, hi)}), + span: mk_sp(lo, hi)}; + } + + fn mk_lit_u32(i: u32) -> @expr { + let span = self.span; + let lv_lit = @{node: lit_uint(i as u64, ty_u32), + span: span}; + + ret @{id: self.get_id(), node: expr_lit(lv_lit), span: span}; + } + + fn mk_pexpr(lo: uint, hi: uint, node: expr_) -> pexpr { + ret pexpr(self.mk_expr(lo, hi, node)); + } + + fn to_expr(e: pexpr) -> @expr { + alt e.node { + expr_tup(es) if vec::len(es) == 1u { es[0u] } + _ { *e } + } + } + + fn parse_bottom_expr() -> pexpr { + let lo = self.span.lo; + let mut hi = self.span.hi; + + let mut ex: expr_; + + alt self.maybe_parse_dollar_mac() { + some(x) {ret pexpr(self.mk_mac_expr(lo, self.span.hi, x));} + _ {} + } + + if self.token == token::LPAREN { + self.bump(); + if self.token == token::RPAREN { + hi = self.span.hi; + self.bump(); + let lit = @spanned(lo, hi, lit_nil); + ret self.mk_pexpr(lo, hi, expr_lit(lit)); + } + let mut es = [self.parse_expr()]; + while self.token == token::COMMA { + self.bump(); es += [self.parse_expr()]; + } + hi = self.span.hi; + expect(self, token::RPAREN); + + // Note: we retain the expr_tup() even for simple + // parenthesized expressions, but only for a "little while". + // This is so that wrappers around parse_bottom_expr() + // can tell whether the expression was parenthesized or not, + // which affects expr_is_complete(). + ret self.mk_pexpr(lo, hi, expr_tup(es)); + } else if self.token == token::LBRACE { + self.bump(); + if is_keyword(self, "mut") || + is_plain_ident(self.token) + && self.look_ahead(1u) == token::COLON { + let mut fields = [self.parse_field(token::COLON)]; + let mut base = none; + while self.token != token::RBRACE { + if eat_keyword(self, "with") { + base = some(self.parse_expr()); break; + } + expect(self, token::COMMA); + if self.token == token::RBRACE { + // record ends by an optional trailing comma + break; + } + fields += [self.parse_field(token::COLON)]; + } + hi = self.span.hi; + expect(self, token::RBRACE); + ex = expr_rec(fields, base); + } else if token::is_bar(self.token) { + ret pexpr(self.parse_fn_block_expr()); + } else { + let blk = self.parse_block_tail(lo, default_blk); + ret self.mk_pexpr(blk.span.lo, blk.span.hi, expr_block(blk)); + } + } else if eat_keyword(self, "new") { + expect(self, token::LPAREN); + let r = self.parse_expr(); + expect(self, token::RPAREN); + let v = self.parse_expr(); + ret self.mk_pexpr(lo, self.span.hi, + expr_new(r, self.get_id(), v)); + } else if eat_keyword(self, "if") { + ret pexpr(self.parse_if_expr()); + } else if eat_keyword(self, "for") { + ret pexpr(self.parse_for_expr()); + } else if eat_keyword(self, "while") { + ret pexpr(self.parse_while_expr()); + } else if eat_keyword(self, "loop") { + ret pexpr(self.parse_loop_expr()); + } else if eat_keyword(self, "alt") { + ret pexpr(self.parse_alt_expr()); + } else if eat_keyword(self, "fn") { + let proto = self.parse_fn_ty_proto(); + alt proto { + proto_bare { self.fatal("fn expr are deprecated, use fn@"); } + proto_any { self.fatal("fn* cannot be used in an expression"); } + _ { /* fallthrough */ } + } + ret pexpr(self.parse_fn_expr(proto)); + } else if eat_keyword(self, "unchecked") { + ret pexpr(self.parse_block_expr(lo, unchecked_blk)); + } else if eat_keyword(self, "unsafe") { + ret pexpr(self.parse_block_expr(lo, unsafe_blk)); + } else if self.token == token::LBRACKET { + self.bump(); + let mutbl = self.parse_mutability(); + let es = + parse_seq_to_end(token::RBRACKET, seq_sep(token::COMMA), self, + {|p| p.parse_expr()}); + hi = self.span.hi; + ex = expr_vec(es, mutbl); + } else if self.token == token::POUND + && self.look_ahead(1u) == token::LT { + self.bump(); + self.bump(); + let ty = self.parse_ty(false); + expect(self, token::GT); + + /* hack: early return to take advantage of specialized function */ + ret pexpr(self.mk_mac_expr(lo, self.span.hi, + mac_embed_type(ty))); + } else if self.token == token::POUND + && self.look_ahead(1u) == token::LBRACE { + self.bump(); + self.bump(); + let blk = mac_embed_block( + self.parse_block_tail(lo, default_blk)); + ret pexpr(self.mk_mac_expr(lo, self.span.hi, blk)); + } else if self.token == token::ELLIPSIS { + self.bump(); + ret pexpr(self.mk_mac_expr(lo, self.span.hi, mac_ellipsis)); + } else if self.token == token::POUND { + let ex_ext = self.parse_syntax_ext(); + hi = ex_ext.span.hi; + ex = ex_ext.node; + } else if eat_keyword(self, "bind") { + let e = self.parse_expr_res(RESTRICT_NO_CALL_EXPRS); + let es = + parse_seq(token::LPAREN, token::RPAREN, seq_sep(token::COMMA), + self, {|p| p.parse_expr_or_hole()}); + hi = es.span.hi; + ex = expr_bind(e, es.node); + } else if eat_keyword(self, "fail") { + if can_begin_expr(self.token) { + let e = self.parse_expr(); + hi = e.span.hi; + ex = expr_fail(some(e)); + } else { ex = expr_fail(none); } + } else if eat_keyword(self, "log") { + expect(self, token::LPAREN); + let lvl = self.parse_expr(); + expect(self, token::COMMA); + let e = self.parse_expr(); + ex = expr_log(2, lvl, e); + hi = self.span.hi; + expect(self, token::RPAREN); + } else if eat_keyword(self, "assert") { + let e = self.parse_expr(); + ex = expr_assert(e); + hi = e.span.hi; + } else if eat_keyword(self, "check") { + /* Should be a predicate (pure boolean function) applied to + arguments that are all either slot variables or literals. + but the typechecker enforces that. */ + let e = self.parse_expr(); + hi = e.span.hi; + ex = expr_check(checked_expr, e); + } else if eat_keyword(self, "claim") { + /* Same rules as check, except that if check-claims + is enabled (a command-line flag), then the parser turns + claims into check */ + + let e = self.parse_expr(); + hi = e.span.hi; + ex = expr_check(claimed_expr, e); + } else if eat_keyword(self, "ret") { + if can_begin_expr(self.token) { + let e = self.parse_expr(); + hi = e.span.hi; + ex = expr_ret(some(e)); + } else { ex = expr_ret(none); } + } else if eat_keyword(self, "break") { + ex = expr_break; + hi = self.span.hi; + } else if eat_keyword(self, "cont") { + ex = expr_cont; + hi = self.span.hi; + } else if eat_keyword(self, "copy") { + let e = self.parse_expr(); + ex = expr_copy(e); + hi = e.span.hi; + } else if self.token == token::MOD_SEP || + is_ident(self.token) && !is_keyword(self, "true") && + !is_keyword(self, "false") { + let pth = self.parse_path_with_tps(true); + hi = pth.span.hi; + ex = expr_path(pth); + } else { + let lit = self.parse_lit(); + hi = lit.span.hi; + ex = expr_lit(@lit); + } + + // Vstore is legal following expr_lit(lit_str(...)) and expr_vec(...) + // only. + alt ex { + expr_lit(@{node: lit_str(_), span: _}) | + expr_vec(_, _) { + alt self.maybe_parse_vstore() { + none { } + some(v) { + hi = self.span.hi; + ex = expr_vstore(self.mk_expr(lo, hi, ex), v); + } + } + } + _ { } + } + + ret self.mk_pexpr(lo, hi, ex); + } + + fn parse_block_expr(lo: uint, blk_mode: blk_check_mode) -> @expr { + expect(self, token::LBRACE); + let blk = self.parse_block_tail(lo, blk_mode); + ret self.mk_expr(blk.span.lo, blk.span.hi, expr_block(blk)); + } + + fn parse_syntax_ext() -> @expr { + let lo = self.span.lo; + expect(self, token::POUND); + ret self.parse_syntax_ext_naked(lo); + } + + fn parse_syntax_ext_naked(lo: uint) -> @expr { + alt self.token { + token::IDENT(_, _) {} + _ { self.fatal("expected a syntax expander name"); } + } + let pth = self.parse_path_without_tps(); + //temporary for a backwards-compatible cycle: + let sep = seq_sep(token::COMMA); + let mut e = none; + if (self.token == token::LPAREN || self.token == token::LBRACKET) { + let es = + if self.token == token::LPAREN { + parse_seq(token::LPAREN, token::RPAREN, + sep, self, {|p| p.parse_expr()}) + } else { + parse_seq(token::LBRACKET, token::RBRACKET, + sep, self, {|p| p.parse_expr()}) + }; let hi = es.span.hi; - e = some(mk_expr(p, es.span.lo, hi, - expr_vec(es.node, m_imm))); + e = some(self.mk_expr(es.span.lo, hi, + expr_vec(es.node, m_imm))); } let mut b = none; - if p.token == token::LBRACE { - p.bump(); - let lo = p.span.lo; + if self.token == token::LBRACE { + self.bump(); + let lo = self.span.lo; let mut depth = 1u; while (depth > 0u) { - alt (p.token) { + alt (self.token) { token::LBRACE {depth += 1u;} token::RBRACE {depth -= 1u;} - token::EOF {p.fatal("unexpected EOF in macro body");} + token::EOF {self.fatal("unexpected EOF in macro body");} _ {} } - p.bump(); + self.bump(); } - let hi = p.last_span.lo; + let hi = self.last_span.lo; b = some({span: mk_sp(lo,hi)}); } - ret mk_mac_expr(p, lo, p.span.hi, mac_invoc(pth, e, b)); + ret self.mk_mac_expr(lo, self.span.hi, mac_invoc(pth, e, b)); } -fn parse_dot_or_call_expr(p: parser) -> pexpr { - let b = parse_bottom_expr(p); - parse_dot_or_call_expr_with(p, b) -} + fn parse_dot_or_call_expr() -> pexpr { + let b = self.parse_bottom_expr(); + self.parse_dot_or_call_expr_with(b) + } -fn permits_call(p: parser) -> bool { - ret p.restriction != RESTRICT_NO_CALL_EXPRS; -} + fn permits_call() -> bool { + ret self.restriction != RESTRICT_NO_CALL_EXPRS; + } -fn parse_dot_or_call_expr_with(p: parser, e0: pexpr) -> pexpr { - let mut e = e0; - let lo = e.span.lo; - let mut hi = e.span.hi; - loop { - // expr.f - if eat(p, token::DOT) { - alt p.token { - token::IDENT(i, _) { - hi = p.span.hi; - p.bump(); - let tys = if eat(p, token::MOD_SEP) { - expect(p, token::LT); - parse_seq_to_gt(some(token::COMMA), - {|p| parse_ty(p, false)}, p) - } else { [] }; - e = mk_pexpr(p, lo, hi, - expr_field(to_expr(e), - p.get_str(i), - tys)); - } - _ { unexpected(p); } + fn parse_dot_or_call_expr_with(e0: pexpr) -> pexpr { + let mut e = e0; + let lo = e.span.lo; + let mut hi = e.span.hi; + loop { + // expr.f + if eat(self, token::DOT) { + alt self.token { + token::IDENT(i, _) { + hi = self.span.hi; + self.bump(); + let tys = if eat(self, token::MOD_SEP) { + expect(self, token::LT); + parse_seq_to_gt(some(token::COMMA), self, + {|p| p.parse_ty(false)}) + } else { [] }; + e = self.mk_pexpr(lo, hi, expr_field(self.to_expr(e), + self.get_str(i), + tys)); + } + _ { unexpected(self); } + } + cont; } - cont; - } - if expr_is_complete(p, e) { break; } - alt p.token { - // expr(...) - token::LPAREN if permits_call(p) { - let es_opt = - parse_seq(token::LPAREN, token::RPAREN, - seq_sep(token::COMMA), parse_expr_or_hole, p); - hi = es_opt.span.hi; + if self.expr_is_complete(e) { break; } + alt self.token { + // expr(...) + token::LPAREN if self.permits_call() { + let es_opt = + parse_seq(token::LPAREN, token::RPAREN, + seq_sep(token::COMMA), self, + {|p| p.parse_expr_or_hole()}); + hi = es_opt.span.hi; - let nd = - if vec::any(es_opt.node, {|e| option::is_none(e) }) { - expr_bind(to_expr(e), es_opt.node) - } else { - let es = vec::map(es_opt.node) {|e| option::get(e) }; - expr_call(to_expr(e), es, false) - }; - e = mk_pexpr(p, lo, hi, nd); + let nd = + if vec::any(es_opt.node, {|e| option::is_none(e) }) { + expr_bind(self.to_expr(e), es_opt.node) + } else { + let es = vec::map(es_opt.node) {|e| option::get(e) }; + expr_call(self.to_expr(e), es, false) + }; + e = self.mk_pexpr(lo, hi, nd); } // expr {|| ... } - token::LBRACE if (token::is_bar(p.look_ahead(1u)) - && permits_call(p)) { - p.bump(); - let blk = parse_fn_block_expr(p); + token::LBRACE if (token::is_bar(self.look_ahead(1u)) + && self.permits_call()) { + self.bump(); + let blk = self.parse_fn_block_expr(); alt e.node { expr_call(f, args, false) { e = pexpr(@{node: expr_call(f, args + [blk], true) - with *to_expr(e)}); + with *self.to_expr(e)}); } _ { - e = mk_pexpr(p, lo, p.last_span.hi, - expr_call(to_expr(e), [blk], true)); + e = self.mk_pexpr(lo, self.last_span.hi, + expr_call(self.to_expr(e), [blk], true)); } } } // expr[...] token::LBRACKET { - p.bump(); - let ix = parse_expr(p); + self.bump(); + let ix = self.parse_expr(); hi = ix.span.hi; - expect(p, token::RBRACKET); - p.get_id(); // see ast_util::op_expr_callee_id - e = mk_pexpr(p, lo, hi, expr_index(to_expr(e), ix)); + expect(self, token::RBRACKET); + self.get_id(); // see ast_util::op_expr_callee_id + e = self.mk_pexpr(lo, hi, expr_index(self.to_expr(e), ix)); } _ { ret e; } @@ -1003,1514 +1023,1520 @@ fn parse_dot_or_call_expr_with(p: parser, e0: pexpr) -> pexpr { ret e; } -fn parse_prefix_expr(p: parser) -> pexpr { - let lo = p.span.lo; - let mut hi = p.span.hi; + fn parse_prefix_expr() -> pexpr { + let lo = self.span.lo; + let mut hi = self.span.hi; - let mut ex; - alt p.token { - token::NOT { - p.bump(); - let e = to_expr(parse_prefix_expr(p)); - hi = e.span.hi; - p.get_id(); // see ast_util::op_expr_callee_id - ex = expr_unary(not, e); - } - token::BINOP(b) { - alt b { - token::MINUS { - p.bump(); - let e = to_expr(parse_prefix_expr(p)); + let mut ex; + alt self.token { + token::NOT { + self.bump(); + let e = self.to_expr(self.parse_prefix_expr()); hi = e.span.hi; - p.get_id(); // see ast_util::op_expr_callee_id - ex = expr_unary(neg, e); + self.get_id(); // see ast_util::op_expr_callee_id + ex = expr_unary(not, e); } - token::STAR { - p.bump(); - let e = to_expr(parse_prefix_expr(p)); - hi = e.span.hi; - ex = expr_unary(deref, e); - } - token::AND { - p.bump(); - let m = parse_mutability(p); - let e = to_expr(parse_prefix_expr(p)); - hi = e.span.hi; - ex = expr_addr_of(m, e); - } - _ { ret parse_dot_or_call_expr(p); } - } - } - token::AT { - p.bump(); - let m = parse_mutability(p); - let e = to_expr(parse_prefix_expr(p)); - hi = e.span.hi; - ex = expr_unary(box(m), e); - } - token::TILDE { - p.bump(); - let m = parse_mutability(p); - let e = to_expr(parse_prefix_expr(p)); - hi = e.span.hi; - ex = expr_unary(uniq(m), e); - } - _ { ret parse_dot_or_call_expr(p); } - } - ret mk_pexpr(p, lo, hi, ex); -} - - -fn parse_binops(p: parser) -> @expr { - ret parse_more_binops(p, parse_prefix_expr(p), 0u); -} - -fn parse_more_binops(p: parser, plhs: pexpr, min_prec: uint) -> - @expr { - let lhs = to_expr(plhs); - if expr_is_complete(p, plhs) { ret lhs; } - let peeked = p.token; - if peeked == token::BINOP(token::OR) && - p.restriction == RESTRICT_NO_BAR_OP { ret lhs; } - let cur_opt = token_to_binop(peeked); - alt cur_opt { - some(cur_op) { - let cur_prec = operator_prec(cur_op); - if cur_prec > min_prec { - p.bump(); - let expr = parse_prefix_expr(p); - let rhs = parse_more_binops(p, expr, cur_prec); - p.get_id(); // see ast_util::op_expr_callee_id - let bin = mk_pexpr(p, lhs.span.lo, rhs.span.hi, - expr_binary(cur_op, lhs, rhs)); - ret parse_more_binops(p, bin, min_prec); - } - } - _ {} - } - if as_prec > min_prec && eat_keyword(p, "as") { - let rhs = parse_ty(p, true); - let _as = - mk_pexpr(p, lhs.span.lo, rhs.span.hi, expr_cast(lhs, rhs)); - ret parse_more_binops(p, _as, min_prec); - } - ret lhs; -} - -fn parse_assign_expr(p: parser) -> @expr { - let lo = p.span.lo; - let lhs = parse_binops(p); - alt p.token { - token::EQ { - p.bump(); - let rhs = parse_expr(p); - ret mk_expr(p, lo, rhs.span.hi, expr_assign(lhs, rhs)); - } - token::BINOPEQ(op) { - p.bump(); - let rhs = parse_expr(p); - let mut aop; - alt op { - token::PLUS { aop = add; } - token::MINUS { aop = subtract; } - token::STAR { aop = mul; } - token::SLASH { aop = div; } - token::PERCENT { aop = rem; } - token::CARET { aop = bitxor; } - token::AND { aop = bitand; } - token::OR { aop = bitor; } - token::SHL { aop = shl; } - token::SHR { aop = shr; } - } - p.get_id(); // see ast_util::op_expr_callee_id - ret mk_expr(p, lo, rhs.span.hi, expr_assign_op(aop, lhs, rhs)); - } - token::LARROW { - p.bump(); - let rhs = parse_expr(p); - ret mk_expr(p, lo, rhs.span.hi, expr_move(lhs, rhs)); - } - token::DARROW { - p.bump(); - let rhs = parse_expr(p); - ret mk_expr(p, lo, rhs.span.hi, expr_swap(lhs, rhs)); - } - _ {/* fall through */ } - } - ret lhs; -} - -fn parse_if_expr_1(p: parser) -> - {cond: @expr, - then: blk, - els: option<@expr>, - lo: uint, - hi: uint} { - let lo = p.last_span.lo; - let cond = parse_expr(p); - let thn = parse_block(p); - let mut els: option<@expr> = none; - let mut hi = thn.span.hi; - if eat_keyword(p, "else") { - let elexpr = parse_else_expr(p); - els = some(elexpr); - hi = elexpr.span.hi; - } - ret {cond: cond, then: thn, els: els, lo: lo, hi: hi}; -} - -fn parse_if_expr(p: parser) -> @expr { - if eat_keyword(p, "check") { - let q = parse_if_expr_1(p); - ret mk_expr(p, q.lo, q.hi, expr_if_check(q.cond, q.then, q.els)); - } else { - let q = parse_if_expr_1(p); - ret mk_expr(p, q.lo, q.hi, expr_if(q.cond, q.then, q.els)); - } -} - -fn parse_fn_expr(p: parser, proto: proto) -> @expr { - let lo = p.last_span.lo; - - let cc_old = parse_old_skool_capture_clause(p); - - // if we want to allow fn expression argument types to be inferred in the - // future, just have to change parse_arg to parse_fn_block_arg. - let (decl, capture_clause) = - parse_fn_decl(p, impure_fn, parse_arg_or_capture_item); - - let body = parse_block(p); - ret mk_expr(p, lo, body.span.hi, - expr_fn(proto, decl, body, - @(*capture_clause + cc_old))); -} - -fn parse_fn_block_expr(p: parser) -> @expr { - let lo = p.last_span.lo; - let (decl, captures) = parse_fn_block_decl(p); - let body = parse_block_tail(p, lo, default_blk); - ret mk_expr(p, lo, body.span.hi, - expr_fn_block(decl, body, captures)); -} - -fn parse_else_expr(p: parser) -> @expr { - if eat_keyword(p, "if") { - ret parse_if_expr(p); - } else { - let blk = parse_block(p); - ret mk_expr(p, blk.span.lo, blk.span.hi, expr_block(blk)); - } -} - -fn parse_for_expr(p: parser) -> @expr { - let lo = p.last_span; - let call = parse_expr_res(p, RESTRICT_STMT_EXPR); - alt call.node { - expr_call(f, args, true) { - let b_arg = vec::last(args); - let last = mk_expr(p, b_arg.span.lo, b_arg.span.hi, - expr_loop_body(b_arg)); - @{node: expr_call(f, vec::init(args) + [last], true) - with *call} - } - _ { - p.span_fatal(lo, "`for` must be followed by a block call"); - } - } -} - -fn parse_while_expr(p: parser) -> @expr { - let lo = p.last_span.lo; - let cond = parse_expr(p); - let body = parse_block_no_value(p); - let mut hi = body.span.hi; - ret mk_expr(p, lo, hi, expr_while(cond, body)); -} - -fn parse_loop_expr(p: parser) -> @expr { - let lo = p.last_span.lo; - let body = parse_block_no_value(p); - let mut hi = body.span.hi; - ret mk_expr(p, lo, hi, expr_loop(body)); -} - -fn parse_alt_expr(p: parser) -> @expr { - let lo = p.last_span.lo; - let mode = if eat_keyword(p, "check") { alt_check } - else { alt_exhaustive }; - let discriminant = parse_expr(p); - expect(p, token::LBRACE); - let mut arms: [arm] = []; - while p.token != token::RBRACE { - let pats = parse_pats(p); - let mut guard = none; - if eat_keyword(p, "if") { guard = some(parse_expr(p)); } - let blk = parse_block(p); - arms += [{pats: pats, guard: guard, body: blk}]; - } - let mut hi = p.span.hi; - p.bump(); - ret mk_expr(p, lo, hi, expr_alt(discriminant, arms, mode)); -} - -fn parse_expr(p: parser) -> @expr { - ret parse_expr_res(p, UNRESTRICTED); -} - -fn parse_expr_or_hole(p: parser) -> option<@expr> { - alt p.token { - token::UNDERSCORE { p.bump(); ret none; } - _ { ret some(parse_expr(p)); } - } -} - -fn parse_expr_res(p: parser, r: restriction) -> @expr { - let old = p.restriction; - p.restriction = r; - let e = parse_assign_expr(p); - p.restriction = old; - ret e; -} - -fn parse_initializer(p: parser) -> option<initializer> { - alt p.token { - token::EQ { - p.bump(); - ret some({op: init_assign, expr: parse_expr(p)}); - } - token::LARROW { - p.bump(); - ret some({op: init_move, expr: parse_expr(p)}); - } - // Now that the the channel is the first argument to receive, - // combining it with an initializer doesn't really make sense. - // case (token::RECV) { - // p.bump(); - // ret some(rec(op = init_recv, - // expr = parse_expr(p))); - // } - _ { - ret none; - } - } -} - -fn parse_pats(p: parser) -> [@pat] { - let mut pats = []; - loop { - pats += [parse_pat(p)]; - if p.token == token::BINOP(token::OR) { p.bump(); } else { ret pats; } - }; -} - -fn parse_pat(p: parser) -> @pat { - let lo = p.span.lo; - let mut hi = p.span.hi; - let mut pat; - alt p.token { - token::UNDERSCORE { p.bump(); pat = pat_wild; } - token::AT { - p.bump(); - let sub = parse_pat(p); - pat = pat_box(sub); - hi = sub.span.hi; - } - token::TILDE { - p.bump(); - let sub = parse_pat(p); - pat = pat_uniq(sub); - hi = sub.span.hi; - } - token::LBRACE { - p.bump(); - let mut fields = []; - let mut etc = false; - let mut first = true; - while p.token != token::RBRACE { - if first { first = false; } else { expect(p, token::COMMA); } - - if p.token == token::UNDERSCORE { - p.bump(); - if p.token != token::RBRACE { - p.fatal("expecting }, found " + - token_to_str(p.reader, p.token)); - } - etc = true; - break; - } - - let lo1 = p.last_span.lo; - let fieldname = if p.look_ahead(1u) == token::COLON { - parse_ident(p) - } else { - parse_value_ident(p) - }; - let hi1 = p.last_span.lo; - let fieldpath = ast_util::ident_to_path(mk_sp(lo1, hi1), - fieldname); - let mut subpat; - if p.token == token::COLON { - p.bump(); - subpat = parse_pat(p); - } else { - subpat = @{id: p.get_id(), - node: pat_ident(fieldpath, none), - span: mk_sp(lo, hi)}; - } - fields += [{ident: fieldname, pat: subpat}]; - } - hi = p.span.hi; - p.bump(); - pat = pat_rec(fields, etc); - } - token::LPAREN { - p.bump(); - if p.token == token::RPAREN { - hi = p.span.hi; - p.bump(); - let lit = @{node: lit_nil, span: mk_sp(lo, hi)}; - let expr = mk_expr(p, lo, hi, expr_lit(lit)); - pat = pat_lit(expr); - } else { - let mut fields = [parse_pat(p)]; - while p.token == token::COMMA { - p.bump(); - fields += [parse_pat(p)]; - } - if vec::len(fields) == 1u { expect(p, token::COMMA); } - hi = p.span.hi; - expect(p, token::RPAREN); - pat = pat_tup(fields); - } - } - tok { - if !is_ident(tok) || is_keyword(p, "true") || is_keyword(p, "false") { - let val = parse_expr_res(p, RESTRICT_NO_BAR_OP); - if eat_keyword(p, "to") { - let end = parse_expr_res(p, RESTRICT_NO_BAR_OP); - hi = end.span.hi; - pat = pat_range(val, end); - } else { - hi = val.span.hi; - pat = pat_lit(val); - } - } else if is_plain_ident(p.token) && - alt p.look_ahead(1u) { - token::LPAREN | token::LBRACKET | token::LT { false } - _ { true } - } { - let name = parse_value_path(p); - let sub = if eat(p, token::AT) { some(parse_pat(p)) } - else { none }; - pat = pat_ident(name, sub); - } else { - let enum_path = parse_path_with_tps(p, true); - hi = enum_path.span.hi; - let mut args: [@pat] = []; - let mut star_pat = false; - alt p.token { - token::LPAREN { - alt p.look_ahead(1u) { - token::BINOP(token::STAR) { - // This is a "top constructor only" pat - p.bump(); p.bump(); - star_pat = true; - expect(p, token::RPAREN); - } - _ { - let a = - parse_seq(token::LPAREN, token::RPAREN, - seq_sep(token::COMMA), parse_pat, p); - args = a.node; - hi = a.span.hi; - } - } + token::BINOP(b) { + alt b { + token::MINUS { + self.bump(); + let e = self.to_expr(self.parse_prefix_expr()); + hi = e.span.hi; + self.get_id(); // see ast_util::op_expr_callee_id + ex = expr_unary(neg, e); } - _ { } + token::STAR { + self.bump(); + let e = self.to_expr(self.parse_prefix_expr()); + hi = e.span.hi; + ex = expr_unary(deref, e); + } + token::AND { + self.bump(); + let m = self.parse_mutability(); + let e = self.to_expr(self.parse_prefix_expr()); + hi = e.span.hi; + ex = expr_addr_of(m, e); + } + _ { ret self.parse_dot_or_call_expr(); } } - // at this point, we're not sure whether it's a enum or a bind - if star_pat { - pat = pat_enum(enum_path, none); - } - else if vec::is_empty(args) && - vec::len(enum_path.idents) == 1u { - pat = pat_ident(enum_path, none); - } - else { - pat = pat_enum(enum_path, some(args)); - } - } - } - } - ret @{id: p.get_id(), node: pat, span: mk_sp(lo, hi)}; -} - -fn parse_local(p: parser, is_mutbl: bool, - allow_init: bool) -> @local { - let lo = p.span.lo; - let pat = parse_pat(p); - let mut ty = @{id: p.get_id(), - node: ty_infer, - span: mk_sp(lo, lo)}; - if eat(p, token::COLON) { ty = parse_ty(p, false); } - let init = if allow_init { parse_initializer(p) } else { none }; - ret @spanned(lo, p.last_span.hi, - {is_mutbl: is_mutbl, ty: ty, pat: pat, - init: init, id: p.get_id()}); -} - -fn parse_let(p: parser) -> @decl { - let is_mutbl = eat_keyword(p, "mut"); - let lo = p.span.lo; - let mut locals = [parse_local(p, is_mutbl, true)]; - while eat(p, token::COMMA) { - locals += [parse_local(p, is_mutbl, true)]; - } - ret @spanned(lo, p.last_span.hi, decl_local(locals)); -} - -/* assumes "let" token has already been consumed */ -fn parse_instance_var(p:parser, pr: visibility) -> @class_member { - let mut is_mutbl = class_immutable; - let lo = p.span.lo; - if eat_keyword(p, "mut") { - is_mutbl = class_mutable; - } - if !is_plain_ident(p.token) { - p.fatal("expecting ident"); - } - let name = parse_ident(p); - expect(p, token::COLON); - let ty = parse_ty(p, false); - ret @{node: instance_var(name, ty, is_mutbl, p.get_id(), pr), - span: mk_sp(lo, p.last_span.hi)}; -} - -fn parse_stmt(p: parser, +first_item_attrs: [attribute]) -> @stmt { - fn check_expected_item(p: parser, current_attrs: [attribute]) { - // If we have attributes then we should have an item - if vec::is_not_empty(current_attrs) { - p.fatal("expected item"); - } - } - - let lo = p.span.lo; - if is_keyword(p, "let") { - check_expected_item(p, first_item_attrs); - expect_keyword(p, "let"); - let decl = parse_let(p); - ret @spanned(lo, decl.span.hi, stmt_decl(decl, p.get_id())); - } else { - let mut item_attrs; - alt parse_outer_attrs_or_ext(p, first_item_attrs) { - none { item_attrs = []; } - some(left(attrs)) { item_attrs = attrs; } - some(right(ext)) { - ret @spanned(lo, ext.span.hi, stmt_expr(ext, p.get_id())); } - } - - let item_attrs = first_item_attrs + item_attrs; - - alt parse_item(p, item_attrs, public) { - some(i) { - let mut hi = i.span.hi; - let decl = @spanned(lo, hi, decl_item(i)); - ret @spanned(lo, hi, stmt_decl(decl, p.get_id())); + token::AT { + self.bump(); + let m = self.parse_mutability(); + let e = self.to_expr(self.parse_prefix_expr()); + hi = e.span.hi; + ex = expr_unary(box(m), e); } - none() { /* fallthrough */ } + token::TILDE { + self.bump(); + let m = self.parse_mutability(); + let e = self.to_expr(self.parse_prefix_expr()); + hi = e.span.hi; + ex = expr_unary(uniq(m), e); + } + _ { ret self.parse_dot_or_call_expr(); } } - - check_expected_item(p, item_attrs); - - // Remainder are line-expr stmts. - let e = parse_expr_res(p, RESTRICT_STMT_EXPR); - ret @spanned(lo, e.span.hi, stmt_expr(e, p.get_id())); + ret self.mk_pexpr(lo, hi, ex); } -} -fn expr_is_complete(p: parser, e: pexpr) -> bool { - log(debug, ("expr_is_complete", p.restriction, - print::pprust::expr_to_str(*e), - classify::expr_requires_semi_to_be_stmt(*e))); - ret p.restriction == RESTRICT_STMT_EXPR && - !classify::expr_requires_semi_to_be_stmt(*e); -} -fn parse_block(p: parser) -> blk { - let (attrs, blk) = parse_inner_attrs_and_block(p, false); - assert vec::is_empty(attrs); - ret blk; -} + fn parse_binops() -> @expr { + ret self.parse_more_binops(self.parse_prefix_expr(), 0u); + } -fn parse_inner_attrs_and_block( - p: parser, parse_attrs: bool) -> ([attribute], blk) { + fn parse_more_binops(plhs: pexpr, min_prec: uint) -> + @expr { + let lhs = self.to_expr(plhs); + if self.expr_is_complete(plhs) { ret lhs; } + let peeked = self.token; + if peeked == token::BINOP(token::OR) && + self.restriction == RESTRICT_NO_BAR_OP { ret lhs; } + let cur_opt = token_to_binop(peeked); + alt cur_opt { + some(cur_op) { + let cur_prec = operator_prec(cur_op); + if cur_prec > min_prec { + self.bump(); + let expr = self.parse_prefix_expr(); + let rhs = self.parse_more_binops(expr, cur_prec); + self.get_id(); // see ast_util::op_expr_callee_id + let bin = self.mk_pexpr(lhs.span.lo, rhs.span.hi, + expr_binary(cur_op, lhs, rhs)); + ret self.parse_more_binops(bin, min_prec); + } + } + _ {} + } + if as_prec > min_prec && eat_keyword(self, "as") { + let rhs = self.parse_ty(true); + let _as = + self.mk_pexpr(lhs.span.lo, rhs.span.hi, expr_cast(lhs, rhs)); + ret self.parse_more_binops(_as, min_prec); + } + ret lhs; + } - fn maybe_parse_inner_attrs_and_next( - p: parser, parse_attrs: bool) -> - {inner: [attribute], next: [attribute]} { - if parse_attrs { - parse_inner_attrs_and_next(p) + fn parse_assign_expr() -> @expr { + let lo = self.span.lo; + let lhs = self.parse_binops(); + alt self.token { + token::EQ { + self.bump(); + let rhs = self.parse_expr(); + ret self.mk_expr(lo, rhs.span.hi, expr_assign(lhs, rhs)); + } + token::BINOPEQ(op) { + self.bump(); + let rhs = self.parse_expr(); + let mut aop; + alt op { + token::PLUS { aop = add; } + token::MINUS { aop = subtract; } + token::STAR { aop = mul; } + token::SLASH { aop = div; } + token::PERCENT { aop = rem; } + token::CARET { aop = bitxor; } + token::AND { aop = bitand; } + token::OR { aop = bitor; } + token::SHL { aop = shl; } + token::SHR { aop = shr; } + } + self.get_id(); // see ast_util::op_expr_callee_id + ret self.mk_expr(lo, rhs.span.hi, expr_assign_op(aop, lhs, rhs)); + } + token::LARROW { + self.bump(); + let rhs = self.parse_expr(); + ret self.mk_expr(lo, rhs.span.hi, expr_move(lhs, rhs)); + } + token::DARROW { + self.bump(); + let rhs = self.parse_expr(); + ret self.mk_expr(lo, rhs.span.hi, expr_swap(lhs, rhs)); + } + _ {/* fall through */ } + } + ret lhs; + } + + fn parse_if_expr_1() -> + {cond: @expr, + then: blk, + els: option<@expr>, + lo: uint, + hi: uint} { + let lo = self.last_span.lo; + let cond = self.parse_expr(); + let thn = self.parse_block(); + let mut els: option<@expr> = none; + let mut hi = thn.span.hi; + if eat_keyword(self, "else") { + let elexpr = self.parse_else_expr(); + els = some(elexpr); + hi = elexpr.span.hi; + } + ret {cond: cond, then: thn, els: els, lo: lo, hi: hi}; + } + + fn parse_if_expr() -> @expr { + if eat_keyword(self, "check") { + let q = self.parse_if_expr_1(); + ret self.mk_expr(q.lo, q.hi, + expr_if_check(q.cond, q.then, q.els)); } else { - {inner: [], next: []} + let q = self.parse_if_expr_1(); + ret self.mk_expr(q.lo, q.hi, expr_if(q.cond, q.then, q.els)); } } - let lo = p.span.lo; - if eat_keyword(p, "unchecked") { - expect(p, token::LBRACE); - let {inner, next} = maybe_parse_inner_attrs_and_next(p, parse_attrs); - ret (inner, parse_block_tail_(p, lo, unchecked_blk, next)); - } else if eat_keyword(p, "unsafe") { - expect(p, token::LBRACE); - let {inner, next} = maybe_parse_inner_attrs_and_next(p, parse_attrs); - ret (inner, parse_block_tail_(p, lo, unsafe_blk, next)); - } else { - expect(p, token::LBRACE); - let {inner, next} = maybe_parse_inner_attrs_and_next(p, parse_attrs); - ret (inner, parse_block_tail_(p, lo, default_blk, next)); - } -} + fn parse_fn_expr(proto: proto) -> @expr { + let lo = self.last_span.lo; -fn parse_block_no_value(p: parser) -> blk { - // We parse blocks that cannot have a value the same as any other block; - // the type checker will make sure that the tail expression (if any) has - // unit type. - ret parse_block(p); -} + let cc_old = self.parse_old_skool_capture_clause(); -// Precondition: already parsed the '{' or '#{' -// I guess that also means "already parsed the 'impure'" if -// necessary, and this should take a qualifier. -// some blocks start with "#{"... -fn parse_block_tail(p: parser, lo: uint, s: blk_check_mode) -> blk { - parse_block_tail_(p, lo, s, []) -} + // if we want to allow fn expression argument types to be inferred in + // the future, just have to change parse_arg to parse_fn_block_arg. + let (decl, capture_clause) = + self.parse_fn_decl(impure_fn, + {|p| p.parse_arg_or_capture_item()}); -fn parse_block_tail_(p: parser, lo: uint, s: blk_check_mode, - +first_item_attrs: [attribute]) -> blk { - let mut stmts = []; - let mut expr = none; - let {attrs_remaining, view_items} = parse_view(p, first_item_attrs, true); - let mut initial_attrs = attrs_remaining; - - if p.token == token::RBRACE && !vec::is_empty(initial_attrs) { - p.fatal("expected item"); + let body = self.parse_block(); + ret self.mk_expr(lo, body.span.hi, + expr_fn(proto, decl, body, + @(*capture_clause + cc_old))); } - while p.token != token::RBRACE { - alt p.token { - token::SEMI { - p.bump(); // empty + fn parse_fn_block_expr() -> @expr { + let lo = self.last_span.lo; + let (decl, captures) = self.parse_fn_block_decl(); + let body = self.parse_block_tail(lo, default_blk); + ret self.mk_expr(lo, body.span.hi, + expr_fn_block(decl, body, captures)); + } + + fn parse_else_expr() -> @expr { + if eat_keyword(self, "if") { + ret self.parse_if_expr(); + } else { + let blk = self.parse_block(); + ret self.mk_expr(blk.span.lo, blk.span.hi, expr_block(blk)); + } + } + + fn parse_for_expr() -> @expr { + let lo = self.last_span; + let call = self.parse_expr_res(RESTRICT_STMT_EXPR); + alt call.node { + expr_call(f, args, true) { + let b_arg = vec::last(args); + let last = self.mk_expr(b_arg.span.lo, b_arg.span.hi, + expr_loop_body(b_arg)); + @{node: expr_call(f, vec::init(args) + [last], true) + with *call} } _ { - let stmt = parse_stmt(p, initial_attrs); - initial_attrs = []; - alt stmt.node { - stmt_expr(e, stmt_id) { // Expression without semicolon: - alt p.token { - token::SEMI { - p.bump(); - stmts += [@{node: stmt_semi(e, stmt_id) with *stmt}]; - } - token::RBRACE { - expr = some(e); - } - t { - if classify::stmt_ends_with_semi(*stmt) { - p.fatal("expected ';' or '}' after expression but \ - found '" + token_to_str(p.reader, t) + - "'"); - } - stmts += [stmt]; - } - } - } - - _ { // All other kinds of statements: - stmts += [stmt]; - - if classify::stmt_ends_with_semi(*stmt) { - expect(p, token::SEMI); - } - } - } + self.span_fatal(lo, "`for` must be followed by a block call"); } } } - let mut hi = p.span.hi; - p.bump(); - let bloc = {view_items: view_items, stmts: stmts, expr: expr, - id: p.get_id(), rules: s}; - ret spanned(lo, hi, bloc); -} -fn parse_ty_param(p: parser) -> ty_param { - let mut bounds = []; - let ident = parse_ident(p); - if eat(p, token::COLON) { - while p.token != token::COMMA && p.token != token::GT { - if eat_keyword(p, "send") { bounds += [bound_send]; } - else if eat_keyword(p, "copy") { bounds += [bound_copy]; } - else { bounds += [bound_iface(parse_ty(p, false))]; } - } + fn parse_while_expr() -> @expr { + let lo = self.last_span.lo; + let cond = self.parse_expr(); + let body = self.parse_block_no_value(); + let mut hi = body.span.hi; + ret self.mk_expr(lo, hi, expr_while(cond, body)); } - ret {ident: ident, id: p.get_id(), bounds: @bounds}; -} -fn parse_ty_params(p: parser) -> [ty_param] { - if eat(p, token::LT) { - parse_seq_to_gt(some(token::COMMA), parse_ty_param, p) - } else { [] } -} + fn parse_loop_expr() -> @expr { + let lo = self.last_span.lo; + let body = self.parse_block_no_value(); + let mut hi = body.span.hi; + ret self.mk_expr(lo, hi, expr_loop(body)); + } -// FIXME Remove after snapshot -fn parse_old_skool_capture_clause(p: parser) -> [capture_item] { - fn expect_opt_trailing_semi(p: parser) { - if !eat(p, token::SEMI) { - if p.token != token::RBRACKET { - p.fatal("expecting ; or ]"); - } + fn parse_alt_expr() -> @expr { + let lo = self.last_span.lo; + let mode = if eat_keyword(self, "check") { alt_check } + else { alt_exhaustive }; + let discriminant = self.parse_expr(); + expect(self, token::LBRACE); + let mut arms: [arm] = []; + while self.token != token::RBRACE { + let pats = self.parse_pats(); + let mut guard = none; + if eat_keyword(self, "if") { guard = some(self.parse_expr()); } + let blk = self.parse_block(); + arms += [{pats: pats, guard: guard, body: blk}]; + } + let mut hi = self.span.hi; + self.bump(); + ret self.mk_expr(lo, hi, expr_alt(discriminant, arms, mode)); + } + + fn parse_expr() -> @expr { + ret self.parse_expr_res(UNRESTRICTED); + } + + fn parse_expr_or_hole() -> option<@expr> { + alt self.token { + token::UNDERSCORE { self.bump(); ret none; } + _ { ret some(self.parse_expr()); } } } - fn eat_ident_list(p: parser, is_move: bool) -> [capture_item] { - let mut res = []; + fn parse_expr_res(r: restriction) -> @expr { + let old = self.restriction; + self.restriction = r; + let e = self.parse_assign_expr(); + self.restriction = old; + ret e; + } + + fn parse_initializer() -> option<initializer> { + alt self.token { + token::EQ { + self.bump(); + ret some({op: init_assign, expr: self.parse_expr()}); + } + token::LARROW { + self.bump(); + ret some({op: init_move, expr: self.parse_expr()}); + } + // Now that the the channel is the first argument to receive, + // combining it with an initializer doesn't really make sense. + // case (token::RECV) { + // self.bump(); + // ret some(rec(op = init_recv, + // expr = self.parse_expr())); + // } + _ { + ret none; + } + } + } + + fn parse_pats() -> [@pat] { + let mut pats = []; loop { - alt p.token { - token::IDENT(_, _) { - let id = p.get_id(); - let sp = mk_sp(p.span.lo, p.span.hi); - let ident = parse_ident(p); - res += [{id:id, is_move: is_move, name:ident, span:sp}]; - if !eat(p, token::COMMA) { - ret res; - } - } - - _ { ret res; } - } + pats += [self.parse_pat()]; + if self.token == token::BINOP(token::OR) { self.bump(); } + else { ret pats; } }; } - let mut cap_items = []; + fn parse_pat() -> @pat { + let lo = self.span.lo; + let mut hi = self.span.hi; + let mut pat; + alt self.token { + token::UNDERSCORE { self.bump(); pat = pat_wild; } + token::AT { + self.bump(); + let sub = self.parse_pat(); + pat = pat_box(sub); + hi = sub.span.hi; + } + token::TILDE { + self.bump(); + let sub = self.parse_pat(); + pat = pat_uniq(sub); + hi = sub.span.hi; + } + token::LBRACE { + self.bump(); + let mut fields = []; + let mut etc = false; + let mut first = true; + while self.token != token::RBRACE { + if first { first = false; } + else { expect(self, token::COMMA); } - if eat(p, token::LBRACKET) { - while !eat(p, token::RBRACKET) { - if eat_keyword(p, "copy") { - cap_items += eat_ident_list(p, false); - expect_opt_trailing_semi(p); - } else if eat_keyword(p, "move") { - cap_items += eat_ident_list(p, true); - expect_opt_trailing_semi(p); + if self.token == token::UNDERSCORE { + self.bump(); + if self.token != token::RBRACE { + self.fatal("expecting }, found " + + token_to_str(self.reader, self.token)); + } + etc = true; + break; + } + + let lo1 = self.last_span.lo; + let fieldname = if self.look_ahead(1u) == token::COLON { + parse_ident(self) + } else { + parse_value_ident(self) + }; + let hi1 = self.last_span.lo; + let fieldpath = ast_util::ident_to_path(mk_sp(lo1, hi1), + fieldname); + let mut subpat; + if self.token == token::COLON { + self.bump(); + subpat = self.parse_pat(); + } else { + subpat = @{id: self.get_id(), + node: pat_ident(fieldpath, none), + span: mk_sp(lo, hi)}; + } + fields += [{ident: fieldname, pat: subpat}]; + } + hi = self.span.hi; + self.bump(); + pat = pat_rec(fields, etc); + } + token::LPAREN { + self.bump(); + if self.token == token::RPAREN { + hi = self.span.hi; + self.bump(); + let lit = @{node: lit_nil, span: mk_sp(lo, hi)}; + let expr = self.mk_expr(lo, hi, expr_lit(lit)); + pat = pat_lit(expr); } else { - let s: str = "expecting send, copy, or move clause"; - p.fatal(s); + let mut fields = [self.parse_pat()]; + while self.token == token::COMMA { + self.bump(); + fields += [self.parse_pat()]; + } + if vec::len(fields) == 1u { expect(self, token::COMMA); } + hi = self.span.hi; + expect(self, token::RPAREN); + pat = pat_tup(fields); + } + } + tok { + if !is_ident(tok) || is_keyword(self, "true") + || is_keyword(self, "false") { + let val = self.parse_expr_res(RESTRICT_NO_BAR_OP); + if eat_keyword(self, "to") { + let end = self.parse_expr_res(RESTRICT_NO_BAR_OP); + hi = end.span.hi; + pat = pat_range(val, end); + } else { + hi = val.span.hi; + pat = pat_lit(val); + } + } else if is_plain_ident(self.token) && + alt self.look_ahead(1u) { + token::LPAREN | token::LBRACKET | token::LT { false } + _ { true } + } { + let name = self.parse_value_path(); + let sub = if eat(self, token::AT) { some(self.parse_pat()) } + else { none }; + pat = pat_ident(name, sub); + } else { + let enum_path = self.parse_path_with_tps(true); + hi = enum_path.span.hi; + let mut args: [@pat] = []; + let mut star_pat = false; + alt self.token { + token::LPAREN { + alt self.look_ahead(1u) { + token::BINOP(token::STAR) { + // This is a "top constructor only" pat + self.bump(); self.bump(); + star_pat = true; + expect(self, token::RPAREN); + } + _ { + let a = parse_seq(token::LPAREN, token::RPAREN, + seq_sep(token::COMMA), self, + {|p| p.parse_pat()}); + args = a.node; + hi = a.span.hi; + } + } + } + _ { } + } + // at this point, we're not sure whether it's a enum or a bind + if star_pat { + pat = pat_enum(enum_path, none); + } + else if vec::is_empty(args) && + vec::len(enum_path.idents) == 1u { + pat = pat_ident(enum_path, none); + } + else { + pat = pat_enum(enum_path, some(args)); + } + } + } + } + ret @{id: self.get_id(), node: pat, span: mk_sp(lo, hi)}; + } + + fn parse_local(is_mutbl: bool, + allow_init: bool) -> @local { + let lo = self.span.lo; + let pat = self.parse_pat(); + let mut ty = @{id: self.get_id(), + node: ty_infer, + span: mk_sp(lo, lo)}; + if eat(self, token::COLON) { ty = self.parse_ty(false); } + let init = if allow_init { self.parse_initializer() } else { none }; + ret @spanned(lo, self.last_span.hi, + {is_mutbl: is_mutbl, ty: ty, pat: pat, + init: init, id: self.get_id()}); + } + + fn parse_let() -> @decl { + let is_mutbl = eat_keyword(self, "mut"); + let lo = self.span.lo; + let mut locals = [self.parse_local(is_mutbl, true)]; + while eat(self, token::COMMA) { + locals += [self.parse_local(is_mutbl, true)]; + } + ret @spanned(lo, self.last_span.hi, decl_local(locals)); + } + + /* assumes "let" token has already been consumed */ + fn parse_instance_var(pr: visibility) -> @class_member { + let mut is_mutbl = class_immutable; + let lo = self.span.lo; + if eat_keyword(self, "mut") { + is_mutbl = class_mutable; + } + if !is_plain_ident(self.token) { + self.fatal("expecting ident"); + } + let name = parse_ident(self); + expect(self, token::COLON); + let ty = self.parse_ty(false); + ret @{node: instance_var(name, ty, is_mutbl, self.get_id(), pr), + span: mk_sp(lo, self.last_span.hi)}; + } + + fn parse_stmt(+first_item_attrs: [attribute]) -> @stmt { + fn check_expected_item(p: parser, current_attrs: [attribute]) { + // If we have attributes then we should have an item + if vec::is_not_empty(current_attrs) { + p.fatal("expected item"); } } - } - ret cap_items; -} - -type arg_or_capture_item = either<arg, capture_item>; - - -fn parse_fn_decl(p: parser, purity: purity, - parse_arg_fn: fn(parser) -> arg_or_capture_item) - -> (fn_decl, capture_clause) { - - let args_or_capture_items: [arg_or_capture_item] = - parse_seq(token::LPAREN, token::RPAREN, seq_sep(token::COMMA), - parse_arg_fn, p).node; - - let inputs = either::lefts(args_or_capture_items); - let capture_clause = @either::rights(args_or_capture_items); - - // Use the args list to translate each bound variable - // mentioned in a constraint to an arg index. - // Seems weird to do this in the parser, but I'm not sure how else to. - let mut constrs = []; - if p.token == token::COLON { - p.bump(); - constrs = parse_constrs({|x| parse_ty_constr(inputs, x) }, p); - } - let (ret_style, ret_ty) = parse_ret_ty(p); - ret ({inputs: inputs, - output: ret_ty, - purity: purity, - cf: ret_style, - constraints: constrs}, capture_clause); -} - -fn parse_fn_block_decl(p: parser) -> (fn_decl, capture_clause) { - let inputs_captures = { - if eat(p, token::OROR) { - [] + let lo = self.span.lo; + if is_keyword(self, "let") { + check_expected_item(self, first_item_attrs); + expect_keyword(self, "let"); + let decl = self.parse_let(); + ret @spanned(lo, decl.span.hi, stmt_decl(decl, self.get_id())); } else { - parse_seq(token::BINOP(token::OR), - token::BINOP(token::OR), - seq_sep(token::COMMA), - parse_fn_block_arg, p).node - } - }; - let output = if eat(p, token::RARROW) { - parse_ty(p, false) - } else { - @{id: p.get_id(), node: ty_infer, span: p.span} - }; - ret ({inputs: either::lefts(inputs_captures), - output: output, - purity: impure_fn, - cf: return_val, - constraints: []}, - @either::rights(inputs_captures)); -} - -fn parse_fn_header(p: parser) -> {ident: ident, tps: [ty_param]} { - let id = parse_value_ident(p); - let ty_params = parse_ty_params(p); - ret {ident: id, tps: ty_params}; -} - -fn mk_item(p: parser, lo: uint, hi: uint, +ident: ident, - +node: item_, vis: visibility, - +attrs: [attribute]) -> @item { - ret @{ident: ident, - attrs: attrs, - id: p.get_id(), - node: node, - vis: vis, - span: mk_sp(lo, hi)}; -} - -type item_info = (ident, item_, option<[attribute]>); - -fn parse_item_fn(p: parser, purity: purity) -> item_info { - let t = parse_fn_header(p); - let (decl, _) = parse_fn_decl(p, purity, parse_arg); - let (inner_attrs, body) = parse_inner_attrs_and_block(p, true); - (t.ident, item_fn(decl, t.tps, body), some(inner_attrs)) -} - -fn parse_method_name(p: parser) -> ident { - alt p.token { - token::BINOP(op) { p.bump(); token::binop_to_str(op) } - token::NOT { p.bump(); "!" } - token::LBRACKET { p.bump(); expect(p, token::RBRACKET); "[]" } - _ { - let id = parse_value_ident(p); - if id == "unary" && eat(p, token::BINOP(token::MINUS)) { "unary-" } - else { id } - } - } -} - -fn parse_method(p: parser, pr: visibility) -> @method { - let attrs = parse_outer_attributes(p); - let lo = p.span.lo, pur = parse_fn_purity(p); - let ident = parse_method_name(p); - let tps = parse_ty_params(p); - let (decl, _) = parse_fn_decl(p, pur, parse_arg); - let (inner_attrs, body) = parse_inner_attrs_and_block(p, true); - let attrs = attrs + inner_attrs; - @{ident: ident, attrs: attrs, tps: tps, decl: decl, body: body, - id: p.get_id(), span: mk_sp(lo, body.span.hi), - self_id: p.get_id(), vis: pr} -} - -fn parse_item_iface(p: parser) -> item_info { - let ident = parse_ident(p); - let rp = parse_region_param(p); - let tps = parse_ty_params(p); - let meths = parse_ty_methods(p); - (ident, item_iface(tps, rp, meths), none) -} - -// Parses three variants (with the region/type params always optional): -// impl /&<T: copy> of to_str for [T] { ... } -// impl name/&<T> of to_str for [T] { ... } -// impl name/&<T> for [T] { ... } -fn parse_item_impl(p: parser) -> item_info { - fn wrap_path(p: parser, pt: @path) -> @ty { - @{id: p.get_id(), node: ty_path(pt, p.get_id()), span: pt.span} - } - let mut (ident, rp, tps) = { - if p.token == token::LT { - (none, rp_none, parse_ty_params(p)) - } else if p.token == token::BINOP(token::SLASH) { - (none, parse_region_param(p), parse_ty_params(p)) - } - else if is_keyword(p, "of") { - (none, rp_none, []) - } else { - let id = parse_ident(p); - let rp = parse_region_param(p); - (some(id), rp, parse_ty_params(p)) - } - }; - let ifce = if eat_keyword(p, "of") { - let path = parse_path_with_tps(p, false); - if option::is_none(ident) { - ident = some(vec::last(path.idents)); - } - some(@{path: path, id: p.get_id()}) - } else { none }; - let ident = alt ident { - some(name) { name } - none { expect_keyword(p, "of"); fail; } - }; - expect_keyword(p, "for"); - let ty = parse_ty(p, false); - let mut meths = []; - expect(p, token::LBRACE); - while !eat(p, token::RBRACE) { meths += [parse_method(p, public)]; } - (ident, item_impl(tps, rp, ifce, ty, meths), none) -} - -fn parse_item_res(p: parser) -> item_info { - let ident = parse_value_ident(p); - let rp = parse_region_param(p); - let ty_params = parse_ty_params(p); - expect(p, token::LPAREN); - let arg_ident = parse_value_ident(p); - expect(p, token::COLON); - let t = parse_ty(p, false); - expect(p, token::RPAREN); - let dtor = parse_block_no_value(p); - let decl = { - inputs: [{mode: expl(by_ref), ty: t, - ident: arg_ident, id: p.get_id()}], - output: @{id: p.get_id(), node: ty_nil, - span: ast_util::dummy_sp()}, - purity: impure_fn, - cf: return_val, - constraints: [] - }; - (ident, item_res(decl, ty_params, dtor, - p.get_id(), p.get_id(), rp), none) -} - -// Instantiates ident <i> with references to <typarams> as arguments. Used to -// create a path that refers to a class which will be defined as the return -// type of the ctor function. -fn ident_to_path_tys(p: parser, i: ident, - rp: region_param, - typarams: [ty_param]) -> @path { - let s = p.last_span; - - // Hack. But then, this whole function is in service of a hack. - let a_r = alt rp { - rp_none { none } - rp_self { some(region_from_name(p, some("self"))) } - }; - - @{span: s, global: false, idents: [i], - rp: a_r, - types: vec::map(typarams, {|tp| - @{id: p.get_id(), - node: ty_path(ident_to_path(s, tp.ident), p.get_id()), - span: s}}) - } -} - -fn parse_iface_ref(p:parser) -> @iface_ref { - @{path: parse_path_with_tps(p, false), - id: p.get_id()} -} - -fn parse_iface_ref_list(p:parser) -> [@iface_ref] { - parse_seq_to_before_end(token::LBRACE, seq_sep(token::COMMA), - parse_iface_ref, p) -} - -fn parse_item_class(p: parser) -> item_info { - let class_name = parse_value_ident(p); - let rp = parse_region_param(p); - let ty_params = parse_ty_params(p); - let class_path = ident_to_path_tys(p, class_name, rp, ty_params); - let ifaces : [@iface_ref] = if eat_keyword(p, "implements") - { parse_iface_ref_list(p) } - else { [] }; - expect(p, token::LBRACE); - let mut ms: [@class_member] = []; - let ctor_id = p.get_id(); - let mut the_ctor : option<(fn_decl, blk, codemap::span)> = none; - let mut the_dtor : option<(blk, codemap::span)> = none; - while p.token != token::RBRACE { - alt parse_class_item(p, class_path) { - ctor_decl(a_fn_decl, blk, s) { - the_ctor = some((a_fn_decl, blk, s)); - } - dtor_decl(blk, s) { - the_dtor = some((blk, s)); - } - members(mms) { ms += mms; } - } - } - let actual_dtor = option::map(the_dtor) {|dtor| - let (d_body, d_s) = dtor; - {node: {id: p.get_id(), - self_id: p.get_id(), - body: d_body}, - span: d_s}}; - p.bump(); - alt the_ctor { - some((ct_d, ct_b, ct_s)) { - (class_name, - item_class(ty_params, ifaces, ms, { - node: {id: ctor_id, - self_id: p.get_id(), - dec: ct_d, - body: ct_b}, - span: ct_s}, actual_dtor, rp), - none) - } - /* - Is it strange for the parser to check this? - */ - none { - p.fatal("class with no ctor"); - } - } -} - -fn parse_single_class_item(p: parser, vis: visibility) - -> @class_member { - if eat_keyword(p, "let") { - let a_var = parse_instance_var(p, vis); - expect(p, token::SEMI); - ret a_var; - } - else { - let m = parse_method(p, vis); - ret @{node: class_method(m), span: m.span}; - } -} - -/* - So that we can distinguish a class ctor or dtor - from other class members - */ -enum class_contents { ctor_decl(fn_decl, blk, codemap::span), - dtor_decl(blk, codemap::span), - members([@class_member]) } - -fn parse_ctor(p: parser, result_ty: ast::ty_) -> class_contents { - // Can ctors/dtors have attrs? FIXME - let lo = p.last_span.lo; - let (decl_, _) = parse_fn_decl(p, impure_fn, parse_arg); - let decl = {output: @{id: p.get_id(), - node: result_ty, span: decl_.output.span} - with decl_}; - let body = parse_block(p); - ctor_decl(decl, body, mk_sp(lo, p.last_span.hi)) -} - -fn parse_dtor(p: parser) -> class_contents { - // Can ctors/dtors have attrs? FIXME - let lo = p.last_span.lo; - let body = parse_block(p); - dtor_decl(body, mk_sp(lo, p.last_span.hi)) -} - -fn parse_class_item(p:parser, class_name_with_tps: @path) - -> class_contents { - if eat_keyword(p, "new") { - // result type is always the type of the class - ret parse_ctor(p, ty_path(class_name_with_tps, - p.get_id())); - } - else if eat_keyword(p, "drop") { - ret parse_dtor(p); - } - else if eat_keyword(p, "priv") { - expect(p, token::LBRACE); - let mut results = []; - while p.token != token::RBRACE { - results += [parse_single_class_item(p, private)]; + let mut item_attrs; + alt parse_outer_attrs_or_ext(self, first_item_attrs) { + none { item_attrs = []; } + some(left(attrs)) { item_attrs = attrs; } + some(right(ext)) { + ret @spanned(lo, ext.span.hi, stmt_expr(ext, self.get_id())); + } } - p.bump(); - ret members(results); + + let item_attrs = first_item_attrs + item_attrs; + + alt self.parse_item(item_attrs, public) { + some(i) { + let mut hi = i.span.hi; + let decl = @spanned(lo, hi, decl_item(i)); + ret @spanned(lo, hi, stmt_decl(decl, self.get_id())); + } + none() { /* fallthrough */ } + } + + check_expected_item(self, item_attrs); + + // Remainder are line-expr stmts. + let e = self.parse_expr_res(RESTRICT_STMT_EXPR); + ret @spanned(lo, e.span.hi, stmt_expr(e, self.get_id())); + } + } + + fn expr_is_complete(e: pexpr) -> bool { + log(debug, ("expr_is_complete", self.restriction, + print::pprust::expr_to_str(*e), + classify::expr_requires_semi_to_be_stmt(*e))); + ret self.restriction == RESTRICT_STMT_EXPR && + !classify::expr_requires_semi_to_be_stmt(*e); + } + + fn parse_block() -> blk { + let (attrs, blk) = self.parse_inner_attrs_and_block(false); + assert vec::is_empty(attrs); + ret blk; + } + + fn parse_inner_attrs_and_block(parse_attrs: bool) -> ([attribute], blk) { + + fn maybe_parse_inner_attrs_and_next(p: parser, parse_attrs: bool) -> + {inner: [attribute], next: [attribute]} { + if parse_attrs { + parse_inner_attrs_and_next(p) + } else { + {inner: [], next: []} + } + } + + let lo = self.span.lo; + if eat_keyword(self, "unchecked") { + expect(self, token::LBRACE); + let {inner, next} = maybe_parse_inner_attrs_and_next(self, + parse_attrs); + ret (inner, self.parse_block_tail_(lo, unchecked_blk, next)); + } else if eat_keyword(self, "unsafe") { + expect(self, token::LBRACE); + let {inner, next} = maybe_parse_inner_attrs_and_next(self, + parse_attrs); + ret (inner, self.parse_block_tail_(lo, unsafe_blk, next)); + } else { + expect(self, token::LBRACE); + let {inner, next} = maybe_parse_inner_attrs_and_next(self, + parse_attrs); + ret (inner, self.parse_block_tail_(lo, default_blk, next)); + } + } + + fn parse_block_no_value() -> blk { + // We parse blocks that cannot have a value the same as any other + // block; the type checker will make sure that the tail expression (if + // any) has unit type. + ret self.parse_block(); + } + + // Precondition: already parsed the '{' or '#{' + // I guess that also means "already parsed the 'impure'" if + // necessary, and this should take a qualifier. + // some blocks start with "#{"... + fn parse_block_tail(lo: uint, s: blk_check_mode) -> blk { + self.parse_block_tail_(lo, s, []) + } + + fn parse_block_tail_(lo: uint, s: blk_check_mode, + +first_item_attrs: [attribute]) -> blk { + let mut stmts = []; + let mut expr = none; + let {attrs_remaining, view_items} = + self.parse_view(first_item_attrs, true); + let mut initial_attrs = attrs_remaining; + + if self.token == token::RBRACE && !vec::is_empty(initial_attrs) { + self.fatal("expected item"); + } + + while self.token != token::RBRACE { + alt self.token { + token::SEMI { + self.bump(); // empty + } + _ { + let stmt = self.parse_stmt(initial_attrs); + initial_attrs = []; + alt stmt.node { + stmt_expr(e, stmt_id) { // Expression without semicolon: + alt self.token { + token::SEMI { + self.bump(); + stmts += [@{node: stmt_semi(e, stmt_id) with *stmt}]; + } + token::RBRACE { + expr = some(e); + } + t { + if classify::stmt_ends_with_semi(*stmt) { + self.fatal("expected ';' or '}' after expression \ + but found '" + + token_to_str(self.reader, t) + "'"); + } + stmts += [stmt]; + } + } + } + + _ { // All other kinds of statements: + stmts += [stmt]; + + if classify::stmt_ends_with_semi(*stmt) { + expect(self, token::SEMI); + } + } + } + } + } + } + let mut hi = self.span.hi; + self.bump(); + let bloc = {view_items: view_items, stmts: stmts, expr: expr, + id: self.get_id(), rules: s}; + ret spanned(lo, hi, bloc); + } + + fn parse_ty_param() -> ty_param { + let mut bounds = []; + let ident = parse_ident(self); + if eat(self, token::COLON) { + while self.token != token::COMMA && self.token != token::GT { + if eat_keyword(self, "send") { bounds += [bound_send]; } + else if eat_keyword(self, "copy") { bounds += [bound_copy]; } + else { bounds += [bound_iface(self.parse_ty(false))]; } + } + } + ret {ident: ident, id: self.get_id(), bounds: @bounds}; + } + + fn parse_ty_params() -> [ty_param] { + if eat(self, token::LT) { + parse_seq_to_gt(some(token::COMMA), self, + {|p| p.parse_ty_param()}) + } else { [] } + } + + // FIXME Remove after snapshot + fn parse_old_skool_capture_clause() -> [capture_item] { + fn expect_opt_trailing_semi(p: parser) { + if !eat(p, token::SEMI) { + if p.token != token::RBRACKET { + p.fatal("expecting ; or ]"); + } + } + } + + fn eat_ident_list(p: parser, is_move: bool) -> [capture_item] { + let mut res = []; + loop { + alt p.token { + token::IDENT(_, _) { + let id = p.get_id(); + let sp = mk_sp(p.span.lo, p.span.hi); + let ident = parse_ident(p); + res += [{id:id, is_move: is_move, name:ident, span:sp}]; + if !eat(p, token::COMMA) { + ret res; + } + } + + _ { ret res; } + } + }; + } + + let mut cap_items = []; + + if eat(self, token::LBRACKET) { + while !eat(self, token::RBRACKET) { + if eat_keyword(self, "copy") { + cap_items += eat_ident_list(self, false); + expect_opt_trailing_semi(self); + } else if eat_keyword(self, "move") { + cap_items += eat_ident_list(self, true); + expect_opt_trailing_semi(self); + } else { + let s: str = "expecting send, copy, or move clause"; + self.fatal(s); + } + } + } + + ret cap_items; + } + + fn parse_fn_decl(purity: purity, + parse_arg_fn: fn(parser) -> arg_or_capture_item) + -> (fn_decl, capture_clause) { + + let args_or_capture_items: [arg_or_capture_item] = + parse_seq(token::LPAREN, token::RPAREN, seq_sep(token::COMMA), + self, parse_arg_fn).node; + + let inputs = either::lefts(args_or_capture_items); + let capture_clause = @either::rights(args_or_capture_items); + + // Use the args list to translate each bound variable + // mentioned in a constraint to an arg index. + // Seems weird to do this in the parser, but I'm not sure how else to. + let mut constrs = []; + if self.token == token::COLON { + self.bump(); + constrs = self.parse_constrs({|p| p.parse_ty_constr(inputs) }); + } + let (ret_style, ret_ty) = self.parse_ret_ty(); + ret ({inputs: inputs, + output: ret_ty, + purity: purity, + cf: ret_style, + constraints: constrs}, capture_clause); + } + + fn parse_fn_block_decl() -> (fn_decl, capture_clause) { + let inputs_captures = { + if eat(self, token::OROR) { + [] + } else { + parse_seq(token::BINOP(token::OR), token::BINOP(token::OR), + seq_sep(token::COMMA), self, + {|p| p.parse_fn_block_arg()}).node + } + }; + let output = if eat(self, token::RARROW) { + self.parse_ty(false) + } else { + @{id: self.get_id(), node: ty_infer, span: self.span} + }; + ret ({inputs: either::lefts(inputs_captures), + output: output, + purity: impure_fn, + cf: return_val, + constraints: []}, + @either::rights(inputs_captures)); + } + + fn parse_fn_header() -> {ident: ident, tps: [ty_param]} { + let id = parse_value_ident(self); + let ty_params = self.parse_ty_params(); + ret {ident: id, tps: ty_params}; + } + + fn mk_item(lo: uint, hi: uint, +ident: ident, + +node: item_, vis: visibility, + +attrs: [attribute]) -> @item { + ret @{ident: ident, + attrs: attrs, + id: self.get_id(), + node: node, + vis: vis, + span: mk_sp(lo, hi)}; + } + + fn parse_item_fn(purity: purity) -> item_info { + let t = self.parse_fn_header(); + let (decl, _) = self.parse_fn_decl(purity, {|p| p.parse_arg()}); + let (inner_attrs, body) = self.parse_inner_attrs_and_block(true); + (t.ident, item_fn(decl, t.tps, body), some(inner_attrs)) + } + + fn parse_method_name() -> ident { + alt self.token { + token::BINOP(op) { self.bump(); token::binop_to_str(op) } + token::NOT { self.bump(); "!" } + token::LBRACKET { self.bump(); expect(self, token::RBRACKET); "[]" } + _ { + let id = parse_value_ident(self); + if id == "unary" && eat(self, token::BINOP(token::MINUS)) { + "unary-" + } + else { id } + } + } + } + + fn parse_method(pr: visibility) -> @method { + let attrs = parse_outer_attributes(self); + let lo = self.span.lo, pur = self.parse_fn_purity(); + let ident = self.parse_method_name(); + let tps = self.parse_ty_params(); + let (decl, _) = self.parse_fn_decl(pur, {|p| p.parse_arg()}); + let (inner_attrs, body) = self.parse_inner_attrs_and_block(true); + let attrs = attrs + inner_attrs; + @{ident: ident, attrs: attrs, tps: tps, decl: decl, body: body, + id: self.get_id(), span: mk_sp(lo, body.span.hi), + self_id: self.get_id(), vis: pr} + } + + fn parse_item_iface() -> item_info { + let ident = parse_ident(self); + let rp = self.parse_region_param(); + let tps = self.parse_ty_params(); + let meths = self.parse_ty_methods(); + (ident, item_iface(tps, rp, meths), none) + } + + // Parses three variants (with the region/type params always optional): + // impl /&<T: copy> of to_str for [T] { ... } + // impl name/&<T> of to_str for [T] { ... } + // impl name/&<T> for [T] { ... } + fn parse_item_impl() -> item_info { + fn wrap_path(p: parser, pt: @path) -> @ty { + @{id: p.get_id(), node: ty_path(pt, p.get_id()), span: pt.span} + } + let mut (ident, rp, tps) = { + if self.token == token::LT { + (none, rp_none, self.parse_ty_params()) + } else if self.token == token::BINOP(token::SLASH) { + (none, self.parse_region_param(), self.parse_ty_params()) + } + else if is_keyword(self, "of") { + (none, rp_none, []) + } else { + let id = parse_ident(self); + let rp = self.parse_region_param(); + (some(id), rp, self.parse_ty_params()) + } + }; + let ifce = if eat_keyword(self, "of") { + let path = self.parse_path_with_tps(false); + if option::is_none(ident) { + ident = some(vec::last(path.idents)); + } + some(@{path: path, id: self.get_id()}) + } else { none }; + let ident = alt ident { + some(name) { name } + none { expect_keyword(self, "of"); fail; } + }; + expect_keyword(self, "for"); + let ty = self.parse_ty(false); + let mut meths = []; + expect(self, token::LBRACE); + while !eat(self, token::RBRACE) { + meths += [self.parse_method(public)]; + } + (ident, item_impl(tps, rp, ifce, ty, meths), none) + } + + fn parse_item_res() -> item_info { + let ident = parse_value_ident(self); + let rp = self.parse_region_param(); + let ty_params = self.parse_ty_params(); + expect(self, token::LPAREN); + let arg_ident = parse_value_ident(self); + expect(self, token::COLON); + let t = self.parse_ty(false); + expect(self, token::RPAREN); + let dtor = self.parse_block_no_value(); + let decl = { + inputs: [{mode: expl(by_ref), ty: t, + ident: arg_ident, id: self.get_id()}], + output: @{id: self.get_id(), node: ty_nil, + span: ast_util::dummy_sp()}, + purity: impure_fn, + cf: return_val, + constraints: [] + }; + (ident, item_res(decl, ty_params, dtor, + self.get_id(), self.get_id(), rp), none) + } + + // Instantiates ident <i> with references to <typarams> as arguments. + // Used to create a path that refers to a class which will be defined as + // the return type of the ctor function. + fn ident_to_path_tys(i: ident, + rp: region_param, + typarams: [ty_param]) -> @path { + let s = self.last_span; + + // Hack. But then, this whole function is in service of a hack. + let a_r = alt rp { + rp_none { none } + rp_self { some(self.region_from_name(some("self"))) } + }; + + @{span: s, global: false, idents: [i], + rp: a_r, + types: vec::map(typarams, {|tp| + @{id: self.get_id(), + node: ty_path(ident_to_path(s, tp.ident), self.get_id()), + span: s}}) + } + } + + fn parse_iface_ref() -> @iface_ref { + @{path: self.parse_path_with_tps(false), + id: self.get_id()} + } + + fn parse_iface_ref_list() -> [@iface_ref] { + parse_seq_to_before_end(token::LBRACE, seq_sep(token::COMMA), self, + {|p| p.parse_iface_ref()}) + } + + fn parse_item_class() -> item_info { + let class_name = parse_value_ident(self); + let rp = self.parse_region_param(); + let ty_params = self.parse_ty_params(); + let class_path = self.ident_to_path_tys(class_name, rp, ty_params); + let ifaces : [@iface_ref] = if eat_keyword(self, "implements") + { self.parse_iface_ref_list() } + else { [] }; + expect(self, token::LBRACE); + let mut ms: [@class_member] = []; + let ctor_id = self.get_id(); + let mut the_ctor : option<(fn_decl, blk, codemap::span)> = none; + let mut the_dtor : option<(blk, codemap::span)> = none; + while self.token != token::RBRACE { + alt self.parse_class_item(class_path) { + ctor_decl(a_fn_decl, blk, s) { + the_ctor = some((a_fn_decl, blk, s)); + } + dtor_decl(blk, s) { + the_dtor = some((blk, s)); + } + members(mms) { ms += mms; } + } + } + let actual_dtor = option::map(the_dtor) {|dtor| + let (d_body, d_s) = dtor; + {node: {id: self.get_id(), + self_id: self.get_id(), + body: d_body}, + span: d_s}}; + self.bump(); + alt the_ctor { + some((ct_d, ct_b, ct_s)) { + (class_name, + item_class(ty_params, ifaces, ms, { + node: {id: ctor_id, + self_id: self.get_id(), + dec: ct_d, + body: ct_b}, + span: ct_s}, actual_dtor, rp), + none) + } + /* + Is it strange for the parser to check this? + */ + none { + self.fatal("class with no ctor"); + } + } + } + + fn parse_single_class_item(vis: visibility) + -> @class_member { + if eat_keyword(self, "let") { + let a_var = self.parse_instance_var(vis); + expect(self, token::SEMI); + ret a_var; + } + else { + let m = self.parse_method(vis); + ret @{node: class_method(m), span: m.span}; + } + } + + fn parse_ctor(result_ty: ast::ty_) -> class_contents { + // Can ctors/dtors have attrs? FIXME + let lo = self.last_span.lo; + let (decl_, _) = self.parse_fn_decl(impure_fn, {|p| p.parse_arg()}); + let decl = {output: @{id: self.get_id(), + node: result_ty, span: decl_.output.span} + with decl_}; + let body = self.parse_block(); + ctor_decl(decl, body, mk_sp(lo, self.last_span.hi)) + } + + fn parse_dtor() -> class_contents { + // Can ctors/dtors have attrs? FIXME + let lo = self.last_span.lo; + let body = self.parse_block(); + dtor_decl(body, mk_sp(lo, self.last_span.hi)) + } + + fn parse_class_item(class_name_with_tps: @path) + -> class_contents { + if eat_keyword(self, "new") { + // result type is always the type of the class + ret self.parse_ctor(ty_path(class_name_with_tps, + self.get_id())); + } + else if eat_keyword(self, "drop") { + ret self.parse_dtor(); + } + else if eat_keyword(self, "priv") { + expect(self, token::LBRACE); + let mut results = []; + while self.token != token::RBRACE { + results += [self.parse_single_class_item(private)]; + } + self.bump(); + ret members(results); } else { // Probably need to parse attrs - ret members([parse_single_class_item(p, public)]); + ret members([self.parse_single_class_item(public)]); } } -fn parse_visibility(p: parser, def: visibility) -> visibility { - if eat_keyword(p, "pub") { public } - else if eat_keyword(p, "priv") { private } - else { def } -} + fn parse_visibility(def: visibility) -> visibility { + if eat_keyword(self, "pub") { public } + else if eat_keyword(self, "priv") { private } + else { def } + } -fn parse_mod_items(p: parser, term: token::token, - +first_item_attrs: [attribute]) -> _mod { - // Shouldn't be any view items since we've already parsed an item attr - let {attrs_remaining, view_items} = - parse_view(p, first_item_attrs, false); - let mut items: [@item] = []; - let mut first = true; - while p.token != term { - let mut attrs = parse_outer_attributes(p); - if first { attrs = attrs_remaining + attrs; first = false; } - #debug["parse_mod_items: parse_item(attrs=%?)", attrs]; - let vis = parse_visibility(p, private); - alt parse_item(p, attrs, vis) { - some(i) { items += [i]; } + fn parse_mod_items(term: token::token, + +first_item_attrs: [attribute]) -> _mod { + // Shouldn't be any view items since we've already parsed an item attr + let {attrs_remaining, view_items} = + self.parse_view(first_item_attrs, false); + let mut items: [@item] = []; + let mut first = true; + while self.token != term { + let mut attrs = parse_outer_attributes(self); + if first { attrs = attrs_remaining + attrs; first = false; } + #debug["parse_mod_items: parse_item(attrs=%?)", attrs]; + let vis = self.parse_visibility(private); + alt self.parse_item(attrs, vis) { + some(i) { items += [i]; } + _ { + self.fatal("expected item but found '" + + token_to_str(self.reader, self.token) + "'"); + } + } + #debug["parse_mod_items: attrs=%?", attrs]; + } + + if first && attrs_remaining.len() > 0u { + // We parsed attributes for the first item but didn't find it + self.fatal("expected item"); + } + + ret {view_items: view_items, items: items}; + } + + fn parse_item_const() -> item_info { + let id = parse_value_ident(self); + expect(self, token::COLON); + let ty = self.parse_ty(false); + expect(self, token::EQ); + let e = self.parse_expr(); + expect(self, token::SEMI); + (id, item_const(ty, e), none) + } + + fn parse_item_mod() -> item_info { + let id = parse_ident(self); + expect(self, token::LBRACE); + let inner_attrs = parse_inner_attrs_and_next(self); + let m = self.parse_mod_items(token::RBRACE, inner_attrs.next); + expect(self, token::RBRACE); + (id, item_mod(m), some(inner_attrs.inner)) + } + + fn parse_item_native_fn(+attrs: [attribute], + purity: purity) -> @native_item { + let lo = self.last_span.lo; + let t = self.parse_fn_header(); + let (decl, _) = self.parse_fn_decl(purity, {|p| p.parse_arg()}); + let mut hi = self.span.hi; + expect(self, token::SEMI); + ret @{ident: t.ident, + attrs: attrs, + node: native_item_fn(decl, t.tps), + id: self.get_id(), + span: mk_sp(lo, hi)}; + } + + fn parse_fn_purity() -> purity { + if eat_keyword(self, "fn") { impure_fn } + else if eat_keyword(self, "pure") { + expect_keyword(self, "fn"); + pure_fn + } else if eat_keyword(self, "unsafe") { + expect_keyword(self, "fn"); + unsafe_fn + } + else { unexpected(self); } + } + + fn parse_native_item(+attrs: [attribute]) -> + @native_item { + self.parse_item_native_fn(attrs, self.parse_fn_purity()) + } + + fn parse_native_mod_items(+first_item_attrs: [attribute]) -> + native_mod { + // Shouldn't be any view items since we've already parsed an item attr + let {attrs_remaining, view_items} = + self.parse_view(first_item_attrs, false); + let mut items: [@native_item] = []; + let mut initial_attrs = attrs_remaining; + while self.token != token::RBRACE { + let attrs = initial_attrs + parse_outer_attributes(self); + initial_attrs = []; + items += [self.parse_native_item(attrs)]; + } + ret {view_items: view_items, + items: items}; + } + + fn parse_item_native_mod() -> item_info { + expect_keyword(self, "mod"); + let id = parse_ident(self); + expect(self, token::LBRACE); + let more_attrs = parse_inner_attrs_and_next(self); + let m = self.parse_native_mod_items(more_attrs.next); + expect(self, token::RBRACE); + (id, item_native_mod(m), some(more_attrs.inner)) + } + + fn parse_type_decl() -> {lo: uint, ident: ident} { + let lo = self.last_span.lo; + let id = parse_ident(self); + ret {lo: lo, ident: id}; + } + + fn parse_item_type() -> item_info { + let t = self.parse_type_decl(); + let rp = self.parse_region_param(); + let tps = self.parse_ty_params(); + expect(self, token::EQ); + let ty = self.parse_ty(false); + expect(self, token::SEMI); + (t.ident, item_ty(ty, tps, rp), none) + } + + fn parse_region_param() -> region_param { + if eat(self, token::BINOP(token::SLASH)) { + expect(self, token::BINOP(token::AND)); + rp_self + } else { + rp_none + } + } + + fn parse_item_enum(default_vis: visibility) -> item_info { + let id = parse_ident(self); + let rp = self.parse_region_param(); + let ty_params = self.parse_ty_params(); + let mut variants: [variant] = []; + // Newtype syntax + if self.token == token::EQ { + check_restricted_keywords_(self, id); + self.bump(); + let ty = self.parse_ty(false); + expect(self, token::SEMI); + let variant = + spanned(ty.span.lo, ty.span.hi, + {name: id, + attrs: [], + args: [{ty: ty, id: self.get_id()}], + id: self.get_id(), + disr_expr: none, + vis: public}); + ret (id, item_enum([variant], ty_params, rp), none); + } + expect(self, token::LBRACE); + + let mut all_nullary = true, have_disr = false; + + while self.token != token::RBRACE { + let variant_attrs = parse_outer_attributes(self); + let vlo = self.span.lo; + let vis = self.parse_visibility(default_vis); + let ident = parse_value_ident(self); + let mut args = [], disr_expr = none; + if self.token == token::LPAREN { + all_nullary = false; + let arg_tys = parse_seq(token::LPAREN, token::RPAREN, + seq_sep(token::COMMA), self, + {|p| p.parse_ty(false)}); + for arg_tys.node.each {|ty| + args += [{ty: ty, id: self.get_id()}]; + } + } else if eat(self, token::EQ) { + have_disr = true; + disr_expr = some(self.parse_expr()); + } + + let vr = {name: ident, attrs: variant_attrs, + args: args, id: self.get_id(), + disr_expr: disr_expr, vis: vis}; + variants += [spanned(vlo, self.last_span.hi, vr)]; + + if !eat(self, token::COMMA) { break; } + } + expect(self, token::RBRACE); + if (have_disr && !all_nullary) { + self.fatal("discriminator values can only be used with a c-like \ + enum"); + } + (id, item_enum(variants, ty_params, rp), none) + } + + fn parse_fn_ty_proto() -> proto { + alt self.token { + token::AT { + self.bump(); + proto_box + } + token::TILDE { + self.bump(); + proto_uniq + } + token::BINOP(token::AND) { + self.bump(); + proto_block + } _ { - p.fatal("expected item but found '" + - token_to_str(p.reader, p.token) + "'"); + proto_any } } - #debug["parse_mod_items: attrs=%?", attrs]; } - if first && attrs_remaining.len() > 0u { - // We parsed attributes for the first item but didn't find the item - p.fatal("expected item"); + fn fn_expr_lookahead(tok: token::token) -> bool { + alt tok { + token::LPAREN | token::AT | token::TILDE | token::BINOP(_) { + true + } + _ { + false + } + } } - ret {view_items: view_items, items: items}; -} - -fn parse_item_const(p: parser) -> item_info { - let id = parse_value_ident(p); - expect(p, token::COLON); - let ty = parse_ty(p, false); - expect(p, token::EQ); - let e = parse_expr(p); - expect(p, token::SEMI); - (id, item_const(ty, e), none) -} - -fn parse_item_mod(p: parser) -> item_info { - let id = parse_ident(p); - expect(p, token::LBRACE); - let inner_attrs = parse_inner_attrs_and_next(p); - let m = parse_mod_items(p, token::RBRACE, inner_attrs.next); - expect(p, token::RBRACE); - (id, item_mod(m), some(inner_attrs.inner)) -} - -fn parse_item_native_fn(p: parser, +attrs: [attribute], - purity: purity) -> @native_item { - let lo = p.last_span.lo; - let t = parse_fn_header(p); - let (decl, _) = parse_fn_decl(p, purity, parse_arg); - let mut hi = p.span.hi; - expect(p, token::SEMI); - ret @{ident: t.ident, - attrs: attrs, - node: native_item_fn(decl, t.tps), - id: p.get_id(), - span: mk_sp(lo, hi)}; -} - -fn parse_fn_purity(p: parser) -> purity { - if eat_keyword(p, "fn") { impure_fn } - else if eat_keyword(p, "pure") { expect_keyword(p, "fn"); pure_fn } - else if eat_keyword(p, "unsafe") { - expect_keyword(p, "fn"); - unsafe_fn + fn parse_item(+attrs: [attribute], vis: visibility) + -> option<@item> { + let lo = self.span.lo; + let (ident, item_, extra_attrs) = if eat_keyword(self, "const") { + self.parse_item_const() + } else if is_keyword(self, "fn") && + !self.fn_expr_lookahead(self.look_ahead(1u)) { + self.bump(); + self.parse_item_fn(impure_fn) + } else if eat_keyword(self, "pure") { + expect_keyword(self, "fn"); + self.parse_item_fn(pure_fn) + } else if is_keyword(self, "unsafe") + && self.look_ahead(1u) != token::LBRACE { + self.bump(); + expect_keyword(self, "fn"); + self.parse_item_fn(unsafe_fn) + } else if eat_keyword(self, "crust") { + expect_keyword(self, "fn"); + self.parse_item_fn(crust_fn) + } else if eat_keyword(self, "mod") { + self.parse_item_mod() + } else if eat_keyword(self, "native") { + self.parse_item_native_mod() + } else if eat_keyword(self, "type") { + self.parse_item_type() + } else if eat_keyword(self, "enum") { + self.parse_item_enum(vis) + } else if eat_keyword(self, "iface") { + self.parse_item_iface() + } else if eat_keyword(self, "impl") { + self.parse_item_impl() + } else if eat_keyword(self, "resource") { + self.parse_item_res() + } else if eat_keyword(self, "class") { + self.parse_item_class() + } else { ret none; }; + some(self.mk_item(lo, self.last_span.hi, ident, item_, vis, + alt extra_attrs { + some(as) { attrs + as } + none { attrs } + })) } - else { unexpected(p); } -} -fn parse_native_item(p: parser, +attrs: [attribute]) -> - @native_item { - parse_item_native_fn(p, attrs, parse_fn_purity(p)) -} - -fn parse_native_mod_items(p: parser, +first_item_attrs: [attribute]) -> - native_mod { - // Shouldn't be any view items since we've already parsed an item attr - let {attrs_remaining, view_items} = - parse_view(p, first_item_attrs, false); - let mut items: [@native_item] = []; - let mut initial_attrs = attrs_remaining; - while p.token != token::RBRACE { - let attrs = initial_attrs + parse_outer_attributes(p); - initial_attrs = []; - items += [parse_native_item(p, attrs)]; + fn parse_use() -> view_item_ { + let ident = parse_ident(self); + let metadata = parse_optional_meta(self); + ret view_item_use(ident, metadata, self.get_id()); } - ret {view_items: view_items, - items: items}; -} -fn parse_item_native_mod(p: parser) -> item_info { - expect_keyword(p, "mod"); - let id = parse_ident(p); - expect(p, token::LBRACE); - let more_attrs = parse_inner_attrs_and_next(p); - let m = parse_native_mod_items(p, more_attrs.next); - expect(p, token::RBRACE); - (id, item_native_mod(m), some(more_attrs.inner)) -} - -fn parse_type_decl(p: parser) -> {lo: uint, ident: ident} { - let lo = p.last_span.lo; - let id = parse_ident(p); - ret {lo: lo, ident: id}; -} - -fn parse_item_type(p: parser) -> item_info { - let t = parse_type_decl(p); - let rp = parse_region_param(p); - let tps = parse_ty_params(p); - expect(p, token::EQ); - let ty = parse_ty(p, false); - expect(p, token::SEMI); - (t.ident, item_ty(ty, tps, rp), none) -} - -fn parse_region_param(p: parser) -> region_param { - if eat(p, token::BINOP(token::SLASH)) { - expect(p, token::BINOP(token::AND)); - rp_self - } else { - rp_none - } -} - -fn parse_item_enum(p: parser, default_vis: visibility) -> item_info { - let id = parse_ident(p); - let rp = parse_region_param(p); - let ty_params = parse_ty_params(p); - let mut variants: [variant] = []; - // Newtype syntax - if p.token == token::EQ { - check_restricted_keywords_(p, id); - p.bump(); - let ty = parse_ty(p, false); - expect(p, token::SEMI); - let variant = - spanned(ty.span.lo, ty.span.hi, - {name: id, - attrs: [], - args: [{ty: ty, id: p.get_id()}], - id: p.get_id(), - disr_expr: none, - vis: public}); - ret (id, item_enum([variant], ty_params, rp), none); - } - expect(p, token::LBRACE); - - let mut all_nullary = true, have_disr = false; - - while p.token != token::RBRACE { - let variant_attrs = parse_outer_attributes(p); - let vlo = p.span.lo; - let vis = parse_visibility(p, default_vis); - let ident = parse_value_ident(p); - let mut args = [], disr_expr = none; - if p.token == token::LPAREN { - all_nullary = false; - let arg_tys = parse_seq(token::LPAREN, token::RPAREN, - seq_sep(token::COMMA), - {|p| parse_ty(p, false)}, p); - for arg_tys.node.each {|ty| - args += [{ty: ty, id: p.get_id()}]; + fn parse_view_path() -> @view_path { + let lo = self.span.lo; + let first_ident = parse_ident(self); + let mut path = [first_ident]; + #debug("parsed view_path: %s", first_ident); + alt self.token { + token::EQ { + // x = foo::bar + self.bump(); + path = [parse_ident(self)]; + while self.token == token::MOD_SEP { + self.bump(); + let id = parse_ident(self); + path += [id]; } - } else if eat(p, token::EQ) { - have_disr = true; - disr_expr = some(parse_expr(p)); + let path = @{span: mk_sp(lo, self.span.hi), global: false, + idents: path, rp: none, types: []}; + ret @spanned(lo, self.span.hi, + view_path_simple(first_ident, path, self.get_id())); + } + + token::MOD_SEP { + // foo::bar or foo::{a,b,c} or foo::* + while self.token == token::MOD_SEP { + self.bump(); + + alt self.token { + + token::IDENT(i, _) { + self.bump(); + path += [self.get_str(i)]; + } + + // foo::bar::{a,b,c} + token::LBRACE { + let idents = + parse_seq(token::LBRACE, token::RBRACE, + seq_sep(token::COMMA), self, + {|p| parse_path_list_ident(p)}).node; + let path = @{span: mk_sp(lo, self.span.hi), + global: false, idents: path, + rp: none, types: []}; + ret @spanned(lo, self.span.hi, + view_path_list(path, idents, self.get_id())); + } + + // foo::bar::* + token::BINOP(token::STAR) { + self.bump(); + let path = @{span: mk_sp(lo, self.span.hi), + global: false, idents: path, + rp: none, types: []}; + ret @spanned(lo, self.span.hi, + view_path_glob(path, self.get_id())); + } + + _ { break; } + } + } + } + _ { } } - - let vr = {name: ident, attrs: variant_attrs, - args: args, id: p.get_id(), - disr_expr: disr_expr, vis: vis}; - variants += [spanned(vlo, p.last_span.hi, vr)]; - - if !eat(p, token::COMMA) { break; } - } - expect(p, token::RBRACE); - if (have_disr && !all_nullary) { - p.fatal("discriminator values can only be used with a c-like enum"); - } - (id, item_enum(variants, ty_params, rp), none) -} - -fn parse_fn_ty_proto(p: parser) -> proto { - alt p.token { - token::AT { - p.bump(); - proto_box - } - token::TILDE { - p.bump(); - proto_uniq - } - token::BINOP(token::AND) { - p.bump(); - proto_block - } - _ { - proto_any - } - } -} - -fn fn_expr_lookahead(tok: token::token) -> bool { - alt tok { - token::LPAREN | token::AT | token::TILDE | token::BINOP(_) { - true - } - _ { - false - } - } -} - -fn parse_item(p: parser, +attrs: [attribute], vis: visibility) - -> option<@item> { - let lo = p.span.lo; - let (ident, item_, extra_attrs) = if eat_keyword(p, "const") { - parse_item_const(p) - } else if is_keyword(p, "fn") && !fn_expr_lookahead(p.look_ahead(1u)) { - p.bump(); - parse_item_fn(p, impure_fn) - } else if eat_keyword(p, "pure") { - expect_keyword(p, "fn"); - parse_item_fn(p, pure_fn) - } else if is_keyword(p, "unsafe") && p.look_ahead(1u) != token::LBRACE { - p.bump(); - expect_keyword(p, "fn"); - parse_item_fn(p, unsafe_fn) - } else if eat_keyword(p, "crust") { - expect_keyword(p, "fn"); - parse_item_fn(p, crust_fn) - } else if eat_keyword(p, "mod") { - parse_item_mod(p) - } else if eat_keyword(p, "native") { - parse_item_native_mod(p) - } else if eat_keyword(p, "type") { - parse_item_type(p) - } else if eat_keyword(p, "enum") { - parse_item_enum(p, vis) - } else if eat_keyword(p, "iface") { - parse_item_iface(p) - } else if eat_keyword(p, "impl") { - parse_item_impl(p) - } else if eat_keyword(p, "resource") { - parse_item_res(p) - } else if eat_keyword(p, "class") { - parse_item_class(p) - } else { ret none; }; - some(mk_item(p, lo, p.last_span.hi, ident, item_, vis, - alt extra_attrs { - some(as) { attrs + as } - none { attrs } - })) -} - -fn parse_use(p: parser) -> view_item_ { - let ident = parse_ident(p); - let metadata = parse_optional_meta(p); - ret view_item_use(ident, metadata, p.get_id()); -} - -fn parse_view_path(p: parser) -> @view_path { - let lo = p.span.lo; - let first_ident = parse_ident(p); - let mut path = [first_ident]; - #debug("parsed view_path: %s", first_ident); - alt p.token { - token::EQ { - // x = foo::bar - p.bump(); - path = [parse_ident(p)]; - while p.token == token::MOD_SEP { - p.bump(); - let id = parse_ident(p); - path += [id]; - } - let path = @{span: mk_sp(lo, p.span.hi), global: false, + let last = path[vec::len(path) - 1u]; + let path = @{span: mk_sp(lo, self.span.hi), global: false, idents: path, rp: none, types: []}; - ret @spanned(lo, p.span.hi, - view_path_simple(first_ident, path, p.get_id())); - } + ret @spanned(lo, self.span.hi, + view_path_simple(last, path, self.get_id())); + } - token::MOD_SEP { - // foo::bar or foo::{a,b,c} or foo::* - while p.token == token::MOD_SEP { - p.bump(); + fn parse_view_paths() -> [@view_path] { + let mut vp = [self.parse_view_path()]; + while self.token == token::COMMA { + self.bump(); + vp += [self.parse_view_path()]; + } + ret vp; + } - alt p.token { + fn is_view_item() -> bool { + let tok = if !is_keyword(self, "pub") && !is_keyword(self, "priv") { + self.token + } else { self.look_ahead(1u) }; + token_is_keyword(self, "use", tok) + || token_is_keyword(self, "import", tok) + || token_is_keyword(self, "export", tok) + } - token::IDENT(i, _) { - p.bump(); - path += [p.get_str(i)]; + fn parse_view_item(+attrs: [attribute]) -> @view_item { + let lo = self.span.lo, vis = self.parse_visibility(private); + let node = if eat_keyword(self, "use") { + self.parse_use() + } else if eat_keyword(self, "import") { + view_item_import(self.parse_view_paths()) + } else if eat_keyword(self, "export") { + view_item_export(self.parse_view_paths()) + } else { fail; }; + expect(self, token::SEMI); + @{node: node, attrs: attrs, + vis: vis, span: mk_sp(lo, self.last_span.hi)} + } + + fn parse_view(+first_item_attrs: [attribute], + only_imports: bool) -> {attrs_remaining: [attribute], + view_items: [@view_item]} { + let mut attrs = first_item_attrs + parse_outer_attributes(self); + let mut items = []; + while if only_imports { is_keyword(self, "import") } + else { self.is_view_item() } { + items += [self.parse_view_item(attrs)]; + attrs = parse_outer_attributes(self); + } + {attrs_remaining: attrs, view_items: items} + } + + // Parses a source module as a crate + fn parse_crate_mod(_cfg: crate_cfg) -> @crate { + let lo = self.span.lo; + let crate_attrs = parse_inner_attrs_and_next(self); + let first_item_outer_attrs = crate_attrs.next; + let m = self.parse_mod_items(token::EOF, first_item_outer_attrs); + ret @spanned(lo, self.span.lo, + {directives: [], + module: m, + attrs: crate_attrs.inner, + config: self.cfg}); + } + + fn parse_str() -> str { + alt self.token { + token::LIT_STR(s) { self.bump(); self.get_str(s) } + _ { + self.fatal("expected string literal") + } + } + } + + // Logic for parsing crate files (.rc) + // + // Each crate file is a sequence of directives. + // + // Each directive imperatively extends its environment with 0 or more + // items. + fn parse_crate_directive(first_outer_attr: [attribute]) -> + crate_directive { + + // Collect the next attributes + let outer_attrs = first_outer_attr + parse_outer_attributes(self); + // In a crate file outer attributes are only going to apply to mods + let expect_mod = vec::len(outer_attrs) > 0u; + + let lo = self.span.lo; + if expect_mod || is_keyword(self, "mod") { + expect_keyword(self, "mod"); + let id = parse_ident(self); + alt self.token { + // mod x = "foo.rs"; + token::SEMI { + let mut hi = self.span.hi; + self.bump(); + ret spanned(lo, hi, cdir_src_mod(id, outer_attrs)); } - - // foo::bar::{a,b,c} + // mod x = "foo_dir" { ...directives... } token::LBRACE { - let idents = - parse_seq(token::LBRACE, token::RBRACE, - seq_sep(token::COMMA), - parse_path_list_ident, p).node; - let path = @{span: mk_sp(lo, p.span.hi), - global: false, idents: path, - rp: none, types: []}; - ret @spanned(lo, p.span.hi, - view_path_list(path, idents, p.get_id())); + self.bump(); + let inner_attrs = parse_inner_attrs_and_next(self); + let mod_attrs = outer_attrs + inner_attrs.inner; + let next_outer_attr = inner_attrs.next; + let cdirs = self.parse_crate_directives(token::RBRACE, + next_outer_attr); + let mut hi = self.span.hi; + expect(self, token::RBRACE); + ret spanned(lo, hi, + cdir_dir_mod(id, cdirs, mod_attrs)); } - - // foo::bar::* - token::BINOP(token::STAR) { - p.bump(); - let path = @{span: mk_sp(lo, p.span.hi), - global: false, idents: path, - rp: none, types: []}; - ret @spanned(lo, p.span.hi, - view_path_glob(path, p.get_id())); - } - - _ { break; } + _ { unexpected(self); } } + } else if self.is_view_item() { + let vi = self.parse_view_item(outer_attrs); + ret spanned(lo, vi.span.hi, cdir_view_item(vi)); + } else { ret self.fatal("expected crate directive"); } + } + + fn parse_crate_directives(term: token::token, + first_outer_attr: [attribute]) -> + [@crate_directive] { + + // This is pretty ugly. If we have an outer attribute then we can't + // accept seeing the terminator next, so if we do see it then fail the + // same way parse_crate_directive would + if vec::len(first_outer_attr) > 0u && self.token == term { + expect_keyword(self, "mod"); } - } - _ { } - } - let last = path[vec::len(path) - 1u]; - let path = @{span: mk_sp(lo, p.span.hi), global: false, - idents: path, rp: none, types: []}; - ret @spanned(lo, p.span.hi, - view_path_simple(last, path, p.get_id())); -} -fn parse_view_paths(p: parser) -> [@view_path] { - let mut vp = [parse_view_path(p)]; - while p.token == token::COMMA { - p.bump(); - vp += [parse_view_path(p)]; - } - ret vp; -} - -fn is_view_item(p: parser) -> bool { - let tok = if !is_keyword(p, "pub") && !is_keyword(p, "priv") { p.token } - else { p.look_ahead(1u) }; - token_is_keyword(p, "use", tok) || token_is_keyword(p, "import", tok) || - token_is_keyword(p, "export", tok) -} - -fn parse_view_item(p: parser, +attrs: [attribute]) -> @view_item { - let lo = p.span.lo, vis = parse_visibility(p, private); - let node = if eat_keyword(p, "use") { - parse_use(p) - } else if eat_keyword(p, "import") { - view_item_import(parse_view_paths(p)) - } else if eat_keyword(p, "export") { - view_item_export(parse_view_paths(p)) - } else { fail; }; - expect(p, token::SEMI); - @{node: node, attrs: attrs, - vis: vis, span: mk_sp(lo, p.last_span.hi)} -} - -fn parse_view(p: parser, +first_item_attrs: [attribute], - only_imports: bool) -> {attrs_remaining: [attribute], - view_items: [@view_item]} { - let mut attrs = first_item_attrs + parse_outer_attributes(p); - let mut items = []; - while if only_imports { is_keyword(p, "import") } - else { is_view_item(p) } { - items += [parse_view_item(p, attrs)]; - attrs = parse_outer_attributes(p); - } - {attrs_remaining: attrs, view_items: items} -} - -// Parses a source module as a crate -fn parse_crate_mod(p: parser, _cfg: crate_cfg) -> @crate { - let lo = p.span.lo; - let crate_attrs = parse_inner_attrs_and_next(p); - let first_item_outer_attrs = crate_attrs.next; - let m = parse_mod_items(p, token::EOF, first_item_outer_attrs); - ret @spanned(lo, p.span.lo, - {directives: [], - module: m, - attrs: crate_attrs.inner, - config: p.cfg}); -} - -fn parse_str(p: parser) -> str { - alt p.token { - token::LIT_STR(s) { p.bump(); p.get_str(s) } - _ { - p.fatal("expected string literal") - } - } -} - -// Logic for parsing crate files (.rc) -// -// Each crate file is a sequence of directives. -// -// Each directive imperatively extends its environment with 0 or more items. -fn parse_crate_directive(p: parser, first_outer_attr: [attribute]) -> - crate_directive { - - // Collect the next attributes - let outer_attrs = first_outer_attr + parse_outer_attributes(p); - // In a crate file outer attributes are only going to apply to mods - let expect_mod = vec::len(outer_attrs) > 0u; - - let lo = p.span.lo; - if expect_mod || is_keyword(p, "mod") { - expect_keyword(p, "mod"); - let id = parse_ident(p); - alt p.token { - // mod x = "foo.rs"; - token::SEMI { - let mut hi = p.span.hi; - p.bump(); - ret spanned(lo, hi, cdir_src_mod(id, outer_attrs)); - } - // mod x = "foo_dir" { ...directives... } - token::LBRACE { - p.bump(); - let inner_attrs = parse_inner_attrs_and_next(p); - let mod_attrs = outer_attrs + inner_attrs.inner; - let next_outer_attr = inner_attrs.next; - let cdirs = - parse_crate_directives(p, token::RBRACE, next_outer_attr); - let mut hi = p.span.hi; - expect(p, token::RBRACE); - ret spanned(lo, hi, - cdir_dir_mod(id, cdirs, mod_attrs)); - } - _ { unexpected(p); } + let mut cdirs: [@crate_directive] = []; + let mut first_outer_attr = first_outer_attr; + while self.token != term { + let cdir = @self.parse_crate_directive(first_outer_attr); + cdirs += [cdir]; + first_outer_attr = []; } - } else if is_view_item(p) { - let vi = parse_view_item(p, outer_attrs); - ret spanned(lo, vi.span.hi, cdir_view_item(vi)); - } else { ret p.fatal("expected crate directive"); } -} - -fn parse_crate_directives(p: parser, term: token::token, - first_outer_attr: [attribute]) -> - [@crate_directive] { - - // This is pretty ugly. If we have an outer attribute then we can't accept - // seeing the terminator next, so if we do see it then fail the same way - // parse_crate_directive would - if vec::len(first_outer_attr) > 0u && p.token == term { - expect_keyword(p, "mod"); + ret cdirs; } - - let mut cdirs: [@crate_directive] = []; - let mut first_outer_attr = first_outer_attr; - while p.token != term { - let cdir = @parse_crate_directive(p, first_outer_attr); - cdirs += [cdir]; - first_outer_attr = []; - } - ret cdirs; } - // // Local Variables: // mode: rust