Condense StringReader's API to a single function

This commit is contained in:
Aleksey Kladov 2020-09-03 15:37:03 +02:00
parent 08deb863bd
commit 4231fbc0a8
4 changed files with 25 additions and 33 deletions

View File

@ -1,22 +1,19 @@
use rustc_ast::ast::AttrStyle;
use rustc_ast::token::{self, CommentKind, Token, TokenKind};
use rustc_ast::tokenstream::IsJoint;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{error_code, Applicability, DiagnosticBuilder, FatalError};
use rustc_lexer::Base;
use rustc_lexer::{unescape, RawStrError};
use rustc_ast::tokenstream::{IsJoint, TokenStream};
use rustc_errors::{error_code, Applicability, DiagnosticBuilder, FatalError, PResult};
use rustc_lexer::unescape::{self, Mode};
use rustc_lexer::{Base, DocStyle, RawStrError};
use rustc_session::parse::ParseSess;
use rustc_span::symbol::{sym, Symbol};
use rustc_span::{BytePos, Pos, Span};
use std::char;
use tracing::debug;
mod tokentrees;
mod unescape_error_reporting;
mod unicode_chars;
use rustc_lexer::{unescape::Mode, DocStyle};
use unescape_error_reporting::{emit_unescape_error, push_escaped_char};
#[derive(Clone, Debug)]
@ -28,7 +25,17 @@ pub struct UnmatchedBrace {
pub candidate_span: Option<Span>,
}
crate struct StringReader<'a> {
crate fn parse_token_trees<'a>(
sess: &'a ParseSess,
src: &'a str,
start_pos: BytePos,
override_span: Option<Span>,
) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
StringReader { sess, start_pos, pos: start_pos, end_src_index: src.len(), src, override_span }
.into_token_trees()
}
struct StringReader<'a> {
sess: &'a ParseSess,
/// Initial position, read-only.
start_pos: BytePos,
@ -37,31 +44,11 @@ crate struct StringReader<'a> {
/// Stop reading src at this index.
end_src_index: usize,
/// Source text to tokenize.
src: Lrc<String>,
src: &'a str,
override_span: Option<Span>,
}
impl<'a> StringReader<'a> {
crate fn new(
sess: &'a ParseSess,
source_file: Lrc<rustc_span::SourceFile>,
override_span: Option<Span>,
) -> Self {
let src = source_file.src.clone().unwrap_or_else(|| {
sess.span_diagnostic
.bug(&format!("cannot lex `source_file` without source: {}", source_file.name));
});
StringReader {
sess,
start_pos: source_file.start_pos,
pos: source_file.start_pos,
end_src_index: src.len(),
src,
override_span,
}
}
fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi))
}

View File

@ -12,7 +12,7 @@ use rustc_errors::PResult;
use rustc_span::Span;
impl<'a> StringReader<'a> {
crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
pub(super) fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
let mut tt_reader = TokenTreesReader {
string_reader: self,
token: Token::dummy(),

View File

@ -332,7 +332,7 @@ const ASCII_ARRAY: &[(char, &str, Option<token::TokenKind>)] = &[
('"', "Quotation Mark", None),
];
crate fn check_for_substitution<'a>(
pub(super) fn check_for_substitution<'a>(
reader: &StringReader<'a>,
pos: BytePos,
ch: char,

View File

@ -200,8 +200,13 @@ pub fn maybe_file_to_stream(
source_file: Lrc<SourceFile>,
override_span: Option<Span>,
) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
let srdr = lexer::StringReader::new(sess, source_file, override_span);
let (token_trees, unmatched_braces) = srdr.into_token_trees();
let src = source_file.src.as_ref().unwrap_or_else(|| {
sess.span_diagnostic
.bug(&format!("cannot lex `source_file` without source: {}", source_file.name));
});
let (token_trees, unmatched_braces) =
lexer::parse_token_trees(sess, src.as_str(), source_file.start_pos, override_span);
match token_trees {
Ok(stream) => Ok((stream, unmatched_braces)),