remove peek_token from StringReader
This commit is contained in:
parent
830ff4a592
commit
e9dc95c86e
|
@ -268,7 +268,7 @@ fn read_block_comment(rdr: &mut StringReader<'_>,
|
||||||
while level > 0 {
|
while level > 0 {
|
||||||
debug!("=== block comment level {}", level);
|
debug!("=== block comment level {}", level);
|
||||||
if rdr.is_eof() {
|
if rdr.is_eof() {
|
||||||
rdr.fatal("unterminated block comment").raise();
|
rdr.fatal_span_(rdr.pos, rdr.pos, "unterminated block comment").raise();
|
||||||
}
|
}
|
||||||
if rdr.ch_is('\n') {
|
if rdr.ch_is('\n') {
|
||||||
trim_whitespace_prefix_and_push_line(&mut lines, curr_line, col);
|
trim_whitespace_prefix_and_push_line(&mut lines, curr_line, col);
|
||||||
|
|
|
@ -39,7 +39,6 @@ pub struct StringReader<'a> {
|
||||||
/// Stop reading src at this index.
|
/// Stop reading src at this index.
|
||||||
crate end_src_index: usize,
|
crate end_src_index: usize,
|
||||||
// cached:
|
// cached:
|
||||||
peek_token: Token,
|
|
||||||
peek_span_src_raw: Span,
|
peek_span_src_raw: Span,
|
||||||
fatal_errs: Vec<DiagnosticBuilder<'a>>,
|
fatal_errs: Vec<DiagnosticBuilder<'a>>,
|
||||||
// cache a direct reference to the source text, so that we don't have to
|
// cache a direct reference to the source text, so that we don't have to
|
||||||
|
@ -78,9 +77,7 @@ impl<'a> StringReader<'a> {
|
||||||
/// Returns the next token. EFFECT: advances the string_reader.
|
/// Returns the next token. EFFECT: advances the string_reader.
|
||||||
pub fn try_next_token(&mut self) -> Result<Token, ()> {
|
pub fn try_next_token(&mut self) -> Result<Token, ()> {
|
||||||
assert!(self.fatal_errs.is_empty());
|
assert!(self.fatal_errs.is_empty());
|
||||||
let ret_val = self.peek_token.take();
|
self.advance_token()
|
||||||
self.advance_token()?;
|
|
||||||
Ok(ret_val)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn try_real_token(&mut self) -> Result<Token, ()> {
|
fn try_real_token(&mut self) -> Result<Token, ()> {
|
||||||
|
@ -120,10 +117,6 @@ impl<'a> StringReader<'a> {
|
||||||
FatalError.raise();
|
FatalError.raise();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fatal(&self, m: &str) -> FatalError {
|
|
||||||
self.fatal_span(self.peek_token.span, m)
|
|
||||||
}
|
|
||||||
|
|
||||||
crate fn emit_fatal_errors(&mut self) {
|
crate fn emit_fatal_errors(&mut self) {
|
||||||
for err in &mut self.fatal_errs {
|
for err in &mut self.fatal_errs {
|
||||||
err.emit();
|
err.emit();
|
||||||
|
@ -169,7 +162,6 @@ impl<'a> StringReader<'a> {
|
||||||
ch: Some('\n'),
|
ch: Some('\n'),
|
||||||
source_file,
|
source_file,
|
||||||
end_src_index: src.len(),
|
end_src_index: src.len(),
|
||||||
peek_token: Token::dummy(),
|
|
||||||
peek_span_src_raw: syntax_pos::DUMMY_SP,
|
peek_span_src_raw: syntax_pos::DUMMY_SP,
|
||||||
src,
|
src,
|
||||||
fatal_errs: Vec::new(),
|
fatal_errs: Vec::new(),
|
||||||
|
@ -267,11 +259,11 @@ impl<'a> StringReader<'a> {
|
||||||
|
|
||||||
/// Advance peek_token to refer to the next token, and
|
/// Advance peek_token to refer to the next token, and
|
||||||
/// possibly update the interner.
|
/// possibly update the interner.
|
||||||
fn advance_token(&mut self) -> Result<(), ()> {
|
fn advance_token(&mut self) -> Result<Token, ()> {
|
||||||
match self.scan_whitespace_or_comment() {
|
match self.scan_whitespace_or_comment() {
|
||||||
Some(comment) => {
|
Some(comment) => {
|
||||||
self.peek_span_src_raw = comment.span;
|
self.peek_span_src_raw = comment.span;
|
||||||
self.peek_token = comment;
|
Ok(comment)
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
let (kind, start_pos, end_pos) = if self.is_eof() {
|
let (kind, start_pos, end_pos) = if self.is_eof() {
|
||||||
|
@ -281,12 +273,10 @@ impl<'a> StringReader<'a> {
|
||||||
(self.next_token_inner()?, start_pos, self.pos)
|
(self.next_token_inner()?, start_pos, self.pos)
|
||||||
};
|
};
|
||||||
let (real, raw) = self.mk_sp_and_raw(start_pos, end_pos);
|
let (real, raw) = self.mk_sp_and_raw(start_pos, end_pos);
|
||||||
self.peek_token = Token::new(kind, real);
|
|
||||||
self.peek_span_src_raw = raw;
|
self.peek_span_src_raw = raw;
|
||||||
|
Ok(Token::new(kind, real))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -1484,17 +1474,17 @@ mod tests {
|
||||||
assert_eq!(tok1.kind, tok2.kind);
|
assert_eq!(tok1.kind, tok2.kind);
|
||||||
assert_eq!(tok1.span, tok2.span);
|
assert_eq!(tok1.span, tok2.span);
|
||||||
assert_eq!(string_reader.next_token(), token::Whitespace);
|
assert_eq!(string_reader.next_token(), token::Whitespace);
|
||||||
// the 'main' id is already read:
|
|
||||||
assert_eq!(string_reader.pos.clone(), BytePos(28));
|
|
||||||
// read another token:
|
// read another token:
|
||||||
let tok3 = string_reader.next_token();
|
let tok3 = string_reader.next_token();
|
||||||
|
assert_eq!(string_reader.pos.clone(), BytePos(28));
|
||||||
let tok4 = Token::new(
|
let tok4 = Token::new(
|
||||||
mk_ident("main"),
|
mk_ident("main"),
|
||||||
Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
|
Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
|
||||||
);
|
);
|
||||||
assert_eq!(tok3.kind, tok4.kind);
|
assert_eq!(tok3.kind, tok4.kind);
|
||||||
assert_eq!(tok3.span, tok4.span);
|
assert_eq!(tok3.span, tok4.span);
|
||||||
// the lparen is already read:
|
|
||||||
|
assert_eq!(string_reader.next_token(), token::OpenDelim(token::Paren));
|
||||||
assert_eq!(string_reader.pos.clone(), BytePos(29))
|
assert_eq!(string_reader.pos.clone(), BytePos(29))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue