Add session, span tracking, error reporting, beginning of a function to parse an item to rustc.
This commit is contained in:
parent
b90e6b93c1
commit
eb90be7798
@ -25,9 +25,10 @@ fn main(vec[str] args) {
|
||||
log "You want rustboot, the compiler next door.";
|
||||
|
||||
auto i = 0;
|
||||
auto sess = session.session();
|
||||
for (str filename in args) {
|
||||
if (i > 0) {
|
||||
auto p = parser.new_parser(filename);
|
||||
auto p = parser.new_parser(sess, filename);
|
||||
log "opened file: " + filename;
|
||||
auto tok = p.peek();
|
||||
while (true) {
|
||||
|
37
src/comp/driver/session.rs
Normal file
37
src/comp/driver/session.rs
Normal file
@ -0,0 +1,37 @@
|
||||
import util.common.span;
|
||||
import std._uint;
|
||||
|
||||
io obj session() {
|
||||
io fn span_err(span sp, str msg) {
|
||||
let str s = sp.filename;
|
||||
s += ':' as u8;
|
||||
// We really need #fmt soon!
|
||||
s += _uint.to_str(sp.lo.line, 10u);
|
||||
s += ':' as u8;
|
||||
s += _uint.to_str(sp.lo.col, 10u);
|
||||
s += ':' as u8;
|
||||
s += _uint.to_str(sp.hi.line, 10u);
|
||||
s += ':' as u8;
|
||||
s += _uint.to_str(sp.hi.col, 10u);
|
||||
s += ": error: ";
|
||||
s += msg;
|
||||
log s;
|
||||
fail;
|
||||
}
|
||||
|
||||
io fn err(str msg) {
|
||||
let str s = "error: ";
|
||||
s += msg;
|
||||
log s;
|
||||
fail;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Local Variables:
|
||||
// fill-column: 78;
|
||||
// indent-tabs-mode: nil
|
||||
// c-basic-offset: 4
|
||||
// buffer-file-coding-system: utf-8-unix
|
||||
// compile-command: "make -k -C ../.. 2>&1 | sed -e 's/\\/x\\//x:\\//g'";
|
||||
// End:
|
@ -1,13 +1,14 @@
|
||||
|
||||
import std.util.option;
|
||||
import std.map.hashmap;
|
||||
import util.common.span;
|
||||
|
||||
type ident = str;
|
||||
|
||||
type crate = rec( str filename,
|
||||
_mod module);
|
||||
|
||||
type block = vec[stmt];
|
||||
type block = vec[@stmt];
|
||||
|
||||
type stmt = tag( stmt_block(block),
|
||||
stmt_decl(@decl),
|
||||
@ -20,7 +21,7 @@ type lval = tag( lval_ident(ident),
|
||||
lval_ext(@lval, ident),
|
||||
lval_idx(@lval, @atom) );
|
||||
|
||||
type atom = tag( atom_lit(lit));
|
||||
type atom = tag( atom_lit(@lit), atom_lval(@lval) );
|
||||
|
||||
type lit = tag( lit_char(char),
|
||||
lit_int(int),
|
||||
@ -32,7 +33,7 @@ type ty = tag( ty_nil(),
|
||||
ty_int(),
|
||||
ty_char() );
|
||||
|
||||
type mode = tag( local(), alias() );
|
||||
type mode = tag( val(), alias() );
|
||||
|
||||
type slot = rec(ty ty, mode mode);
|
||||
|
||||
|
@ -10,33 +10,46 @@ fn new_str_hash[V]() -> map.hashmap[str,V] {
|
||||
ret map.mk_hashmap[str,V](hasher, eqer);
|
||||
}
|
||||
|
||||
type reader = obj {
|
||||
fn is_eof() -> bool;
|
||||
fn curr() -> char;
|
||||
fn next() -> char;
|
||||
fn bump();
|
||||
fn get_curr_pos() -> tup(str,uint,uint);
|
||||
fn get_keywords() -> hashmap[str,token.token];
|
||||
fn get_reserved() -> hashmap[str,()];
|
||||
state type reader = state obj {
|
||||
fn is_eof() -> bool;
|
||||
fn curr() -> char;
|
||||
fn next() -> char;
|
||||
state fn bump();
|
||||
state fn mark();
|
||||
fn get_filename() -> str;
|
||||
fn get_mark_pos() -> common.pos;
|
||||
fn get_curr_pos() -> common.pos;
|
||||
fn get_keywords() -> hashmap[str,token.token];
|
||||
fn get_reserved() -> hashmap[str,()];
|
||||
};
|
||||
|
||||
fn new_reader(stdio_reader rdr, str filename) -> reader
|
||||
{
|
||||
obj reader(stdio_reader rdr,
|
||||
str filename,
|
||||
mutable char c,
|
||||
mutable char n,
|
||||
mutable uint line,
|
||||
mutable uint col,
|
||||
hashmap[str,token.token] keywords,
|
||||
hashmap[str,()] reserved)
|
||||
{
|
||||
state obj reader(stdio_reader rdr,
|
||||
str filename,
|
||||
mutable char c,
|
||||
mutable char n,
|
||||
mutable uint mark_line,
|
||||
mutable uint mark_col,
|
||||
mutable uint line,
|
||||
mutable uint col,
|
||||
hashmap[str,token.token] keywords,
|
||||
hashmap[str,()] reserved) {
|
||||
|
||||
fn is_eof() -> bool {
|
||||
ret c == (-1) as char;
|
||||
}
|
||||
|
||||
fn get_curr_pos() -> tup(str,uint,uint) {
|
||||
ret tup(filename, line, col);
|
||||
fn get_curr_pos() -> common.pos {
|
||||
ret rec(line=line, col=col);
|
||||
}
|
||||
|
||||
fn get_mark_pos() -> common.pos {
|
||||
ret rec(line=mark_line, col=mark_col);
|
||||
}
|
||||
|
||||
fn get_filename() -> str {
|
||||
ret filename;
|
||||
}
|
||||
|
||||
fn curr() -> char {
|
||||
@ -47,7 +60,7 @@ fn new_reader(stdio_reader rdr, str filename) -> reader
|
||||
ret n;
|
||||
}
|
||||
|
||||
fn bump() {
|
||||
state fn bump() {
|
||||
c = n;
|
||||
|
||||
if (c == (-1) as char) {
|
||||
@ -56,7 +69,7 @@ fn new_reader(stdio_reader rdr, str filename) -> reader
|
||||
|
||||
if (c == '\n') {
|
||||
line += 1u;
|
||||
col = 0u;
|
||||
col = 1u;
|
||||
} else {
|
||||
col += 1u;
|
||||
}
|
||||
@ -64,6 +77,11 @@ fn new_reader(stdio_reader rdr, str filename) -> reader
|
||||
n = rdr.getc() as char;
|
||||
}
|
||||
|
||||
state fn mark() {
|
||||
mark_line = line;
|
||||
mark_col = col;
|
||||
}
|
||||
|
||||
fn get_keywords() -> hashmap[str,token.token] {
|
||||
ret keywords;
|
||||
}
|
||||
@ -171,7 +189,7 @@ fn new_reader(stdio_reader rdr, str filename) -> reader
|
||||
keywords.insert("f64", token.MACH(common.ty_f64()));
|
||||
|
||||
ret reader(rdr, filename, rdr.getc() as char, rdr.getc() as char,
|
||||
1u, 1u, keywords, reserved);
|
||||
1u, 1u, 1u, 1u, keywords, reserved);
|
||||
}
|
||||
|
||||
|
||||
@ -229,14 +247,14 @@ fn is_whitespace(char c) -> bool {
|
||||
ret c == ' ' || c == '\t' || c == '\r' || c == '\n';
|
||||
}
|
||||
|
||||
fn consume_any_whitespace(reader rdr) {
|
||||
state fn consume_any_whitespace(reader rdr) {
|
||||
while (is_whitespace(rdr.curr())) {
|
||||
rdr.bump();
|
||||
}
|
||||
be consume_any_line_comment(rdr);
|
||||
}
|
||||
|
||||
fn consume_any_line_comment(reader rdr) {
|
||||
state fn consume_any_line_comment(reader rdr) {
|
||||
if (rdr.curr() == '/') {
|
||||
alt (rdr.next()) {
|
||||
case ('/') {
|
||||
@ -259,7 +277,7 @@ fn consume_any_line_comment(reader rdr) {
|
||||
}
|
||||
|
||||
|
||||
fn consume_block_comment(reader rdr) {
|
||||
state fn consume_block_comment(reader rdr) {
|
||||
let int level = 1;
|
||||
while (level > 0) {
|
||||
if (rdr.curr() == '/' && rdr.next() == '*') {
|
||||
@ -280,7 +298,7 @@ fn consume_block_comment(reader rdr) {
|
||||
be consume_any_whitespace(rdr);
|
||||
}
|
||||
|
||||
fn next_token(reader rdr) -> token.token {
|
||||
state fn next_token(reader rdr) -> token.token {
|
||||
auto accum_str = "";
|
||||
auto accum_int = 0;
|
||||
|
||||
@ -341,8 +359,7 @@ fn next_token(reader rdr) -> token.token {
|
||||
ret token.LIT_INT(accum_int);
|
||||
}
|
||||
|
||||
|
||||
fn binop(reader rdr, token.binop op) -> token.token {
|
||||
state fn binop(reader rdr, token.binop op) -> token.token {
|
||||
rdr.bump();
|
||||
if (rdr.next() == '=') {
|
||||
rdr.bump();
|
||||
|
@ -1,25 +1,94 @@
|
||||
import std._io;
|
||||
import driver.session;
|
||||
import util.common;
|
||||
|
||||
state type parser =
|
||||
state obj {
|
||||
state fn peek() -> token.token;
|
||||
state fn bump();
|
||||
io fn err(str s);
|
||||
fn get_session() -> session.session;
|
||||
fn get_span() -> common.span;
|
||||
};
|
||||
|
||||
fn new_parser(str path) -> parser {
|
||||
state obj stdio_parser(mutable token.token tok,
|
||||
state fn new_parser(session.session sess, str path) -> parser {
|
||||
state obj stdio_parser(session.session sess,
|
||||
mutable token.token tok,
|
||||
mutable common.pos lo,
|
||||
mutable common.pos hi,
|
||||
lexer.reader rdr)
|
||||
{
|
||||
state fn peek() -> token.token {
|
||||
ret tok;
|
||||
}
|
||||
|
||||
state fn bump() {
|
||||
tok = lexer.next_token(rdr);
|
||||
lo = rdr.get_mark_pos();
|
||||
hi = rdr.get_curr_pos();
|
||||
}
|
||||
|
||||
io fn err(str m) {
|
||||
auto span = rec(filename = rdr.get_filename(),
|
||||
lo = lo, hi = hi);
|
||||
sess.span_err(span, m);
|
||||
}
|
||||
|
||||
fn get_session() -> session.session {
|
||||
ret sess;
|
||||
}
|
||||
|
||||
fn get_span() -> common.span {
|
||||
ret rec(filename = rdr.get_filename(),
|
||||
lo = lo, hi = hi);
|
||||
}
|
||||
}
|
||||
auto srdr = _io.new_stdio_reader(path);
|
||||
auto rdr = lexer.new_reader(srdr, path);
|
||||
ret stdio_parser(lexer.next_token(rdr), rdr);
|
||||
auto npos = rdr.get_curr_pos();
|
||||
ret stdio_parser(sess, lexer.next_token(rdr), npos, npos, rdr);
|
||||
}
|
||||
|
||||
state fn expect(parser p, token.token t) {
|
||||
// FIXME: comparing tags would be good. One of these days.
|
||||
if (true /* p.peek() == t */) {
|
||||
p.bump();
|
||||
} else {
|
||||
let str s = "expecting ";
|
||||
s += token.to_str(t);
|
||||
s += ", found ";
|
||||
s += token.to_str(t);
|
||||
p.err(s);
|
||||
}
|
||||
}
|
||||
|
||||
state fn parse_ident(parser p) -> ast.ident {
|
||||
alt (p.peek()) {
|
||||
case (token.IDENT(i)) { ret i; }
|
||||
case (_) {
|
||||
p.err("expecting ident");
|
||||
fail;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
state fn parse_item(parser p) -> tup(ast.ident, ast.item) {
|
||||
alt (p.peek()) {
|
||||
case (token.FN()) {
|
||||
p.bump();
|
||||
auto id = parse_ident(p);
|
||||
expect(p, token.LPAREN());
|
||||
let vec[rec(ast.slot slot, ast.ident ident)] inputs = vec();
|
||||
let vec[@ast.stmt] body = vec();
|
||||
auto output = rec(ty = ast.ty_nil(), mode = ast.val() );
|
||||
let ast._fn f = rec(inputs = inputs,
|
||||
output = output,
|
||||
body = body);
|
||||
ret tup(id, ast.item_fn(@f));
|
||||
}
|
||||
}
|
||||
p.err("expecting item");
|
||||
fail;
|
||||
}
|
||||
|
||||
//
|
||||
|
@ -12,6 +12,7 @@ mod fe {
|
||||
|
||||
mod driver {
|
||||
mod rustc;
|
||||
mod session;
|
||||
}
|
||||
|
||||
mod util {
|
||||
|
@ -1,3 +1,7 @@
|
||||
import std._uint;
|
||||
|
||||
type pos = rec(uint line, uint col);
|
||||
type span = rec(str filename, pos lo, pos hi);
|
||||
|
||||
type ty_mach = tag( ty_i8(), ty_i16(), ty_i32(), ty_i64(),
|
||||
ty_u8(), ty_u16(), ty_u32(), ty_u64(),
|
||||
|
Loading…
Reference in New Issue
Block a user