From 256df83f642ff3cfff82b266edc7d9bbe3fd2ecc Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Wed, 3 Jul 2019 12:52:22 +0300 Subject: [PATCH] remove peek_span_src_raw from StringReader --- src/librustc_save_analysis/span_utils.rs | 2 +- src/libsyntax/parse/lexer/mod.rs | 98 +++++++++--------------- src/libsyntax/parse/lexer/tokentrees.rs | 15 ++-- 3 files changed, 46 insertions(+), 69 deletions(-) diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs index 8905f475647..fb9919d777d 100644 --- a/src/librustc_save_analysis/span_utils.rs +++ b/src/librustc_save_analysis/span_utils.rs @@ -53,7 +53,7 @@ impl<'a> SpanUtils<'a> { pub fn sub_span_of_token(&self, span: Span, tok: TokenKind) -> Option { let mut toks = self.retokenise_span(span); loop { - let next = toks.real_token(); + let next = toks.next_token(); if next == token::Eof { return None; } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index a24c72ecc24..8b43b88fbac 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -38,8 +38,6 @@ pub struct StringReader<'a> { crate source_file: Lrc, /// Stop reading src at this index. crate end_src_index: usize, - // cached: - peek_span_src_raw: Span, fatal_errs: Vec>, // cache a direct reference to the source text, so that we don't have to // retrieve it via `self.source_file.src.as_ref().unwrap()` all the time. @@ -59,7 +57,7 @@ impl<'a> StringReader<'a> { (real, raw) } - fn unwrap_or_abort(&mut self, res: Result) -> Token { + fn unwrap_or_abort(&mut self, res: Result) -> T { match res { Ok(tok) => tok, Err(_) => { @@ -69,36 +67,52 @@ impl<'a> StringReader<'a> { } } - fn next_token(&mut self) -> Token where Self: Sized { + /// Returns the next token. EFFECT: advances the string_reader. + pub fn try_next_token(&mut self) -> Result { + let (token, _raw_span) = self.try_next_token_with_raw_span()?; + Ok(token) + } + + pub fn next_token(&mut self) -> Token { let res = self.try_next_token(); self.unwrap_or_abort(res) } - /// Returns the next token. EFFECT: advances the string_reader. - pub fn try_next_token(&mut self) -> Result { - assert!(self.fatal_errs.is_empty()); - self.advance_token() - } - - fn try_real_token(&mut self) -> Result { - let mut t = self.try_next_token()?; + fn try_real_token(&mut self) -> Result<(Token, Span), ()> { loop { - match t.kind { - token::Whitespace | token::Comment | token::Shebang(_) => { - t = self.try_next_token()?; - } - _ => break, + let t = self.try_next_token_with_raw_span()?; + match t.0.kind { + token::Whitespace | token::Comment | token::Shebang(_) => continue, + _ => return Ok(t), } } - - Ok(t) } - pub fn real_token(&mut self) -> Token { + fn real_token(&mut self) -> (Token, Span) { let res = self.try_real_token(); self.unwrap_or_abort(res) } + fn try_next_token_with_raw_span(&mut self) -> Result<(Token, Span), ()> { + assert!(self.fatal_errs.is_empty()); + match self.scan_whitespace_or_comment() { + Some(comment) => { + let raw_span = comment.span; + Ok((comment, raw_span)) + } + None => { + let (kind, start_pos, end_pos) = if self.is_eof() { + (token::Eof, self.source_file.end_pos, self.source_file.end_pos) + } else { + let start_pos = self.pos; + (self.next_token_inner()?, start_pos, self.pos) + }; + let (real, raw) = self.mk_sp_and_raw(start_pos, end_pos); + Ok((Token::new(kind, real), raw)) + } + } + } + #[inline] fn is_eof(&self) -> bool { self.ch.is_none() @@ -141,7 +155,6 @@ impl<'a> StringReader<'a> { override_span: Option) -> Self { let mut sr = StringReader::new_raw_internal(sess, source_file, override_span); sr.bump(); - sr } @@ -162,7 +175,6 @@ impl<'a> StringReader<'a> { ch: Some('\n'), source_file, end_src_index: src.len(), - peek_span_src_raw: syntax_pos::DUMMY_SP, src, fatal_errs: Vec::new(), override_span, @@ -172,12 +184,8 @@ impl<'a> StringReader<'a> { pub fn new_or_buffered_errs(sess: &'a ParseSess, source_file: Lrc, override_span: Option) -> Result> { - let mut sr = StringReader::new_raw(sess, source_file, override_span); - if sr.advance_token().is_err() { - Err(sr.buffer_fatal_errors()) - } else { - Ok(sr) - } + let sr = StringReader::new_raw(sess, source_file, override_span); + Ok(sr) } pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self { @@ -197,11 +205,6 @@ impl<'a> StringReader<'a> { sr.bump(); - if sr.advance_token().is_err() { - sr.emit_fatal_errors(); - FatalError.raise(); - } - sr } @@ -257,28 +260,6 @@ impl<'a> StringReader<'a> { self.sess.span_diagnostic.struct_span_fatal(self.mk_sp(from_pos, to_pos), &m[..]) } - /// Advance peek_token to refer to the next token, and - /// possibly update the interner. - fn advance_token(&mut self) -> Result { - match self.scan_whitespace_or_comment() { - Some(comment) => { - self.peek_span_src_raw = comment.span; - Ok(comment) - } - None => { - let (kind, start_pos, end_pos) = if self.is_eof() { - (token::Eof, self.source_file.end_pos, self.source_file.end_pos) - } else { - let start_pos = self.pos; - (self.next_token_inner()?, start_pos, self.pos) - }; - let (real, raw) = self.mk_sp_and_raw(start_pos, end_pos); - self.peek_span_src_raw = raw; - Ok(Token::new(kind, real)) - } - } - } - #[inline] fn src_index(&self, pos: BytePos) -> usize { (pos - self.source_file.start_pos).to_usize() @@ -1447,12 +1428,7 @@ mod tests { teststr: String) -> StringReader<'a> { let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr); - let mut sr = StringReader::new_raw(sess, sf, None); - if sr.advance_token().is_err() { - sr.emit_fatal_errors(); - FatalError.raise(); - } - sr + StringReader::new_raw(sess, sf, None) } #[test] diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 99d9d40a45b..9593a50bdd2 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -1,4 +1,4 @@ -use syntax_pos::Span; +use syntax_pos::{Span, DUMMY_SP}; use crate::print::pprust::token_to_string; use crate::parse::lexer::{StringReader, UnmatchedBrace}; @@ -11,6 +11,7 @@ impl<'a> StringReader<'a> { let mut tt_reader = TokenTreesReader { string_reader: self, token: Token::dummy(), + raw_span: DUMMY_SP, open_braces: Vec::new(), unmatched_braces: Vec::new(), matching_delim_spans: Vec::new(), @@ -24,6 +25,7 @@ impl<'a> StringReader<'a> { struct TokenTreesReader<'a> { string_reader: StringReader<'a>, token: Token, + raw_span: Span, /// Stack of open delimiters and their spans. Used for error message. open_braces: Vec<(token::DelimToken, Span)>, unmatched_braces: Vec, @@ -206,18 +208,17 @@ impl<'a> TokenTreesReader<'a> { // Note that testing for joint-ness here is done via the raw // source span as the joint-ness is a property of the raw source // rather than wanting to take `override_span` into account. - // Additionally, we actually check if the *next* pair of tokens - // is joint, but this is equivalent to checking the current pair. - let raw = self.string_reader.peek_span_src_raw; + let raw_span = self.raw_span; self.real_token(); - let is_joint = raw.hi() == self.string_reader.peek_span_src_raw.lo() - && self.token.is_op(); + let is_joint = raw_span.hi() == self.raw_span.lo() && self.token.is_op(); Ok((tt, if is_joint { Joint } else { NonJoint })) } } } fn real_token(&mut self) { - self.token = self.string_reader.real_token(); + let (token, raw_span) = self.string_reader.real_token(); + self.token = token; + self.raw_span = raw_span; } }