Auto merge of #25387 - eddyb:syn-file-loader, r=nikomatsakis
This allows compiling entire crates from memory or preprocessing source files before they are tokenized. Minor API refactoring included, which is a [breaking-change] for libsyntax users: * `ParseSess::{next_node_id, reserve_node_ids}` moved to rustc's `Session` * `new_parse_sess` -> `ParseSess::new` * `new_parse_sess_special_handler` -> `ParseSess::with_span_handler` * `mk_span_handler` -> `SpanHandler::new` * `default_handler` -> `Handler::new` * `mk_handler` -> `Handler::with_emitter` * `string_to_filemap(sess source, path)` -> `sess.codemap().new_filemap(path, source)`
This commit is contained in:
commit
c23a9d42ea
@ -287,11 +287,9 @@ fn main() {
|
||||
let options = config::basic_options();
|
||||
let session = session::build_session(options, None,
|
||||
syntax::diagnostics::registry::Registry::new(&[]));
|
||||
let filemap = parse::string_to_filemap(&session.parse_sess,
|
||||
code,
|
||||
String::from_str("<n/a>"));
|
||||
let filemap = session.parse_sess.codemap().new_filemap(String::from_str("<n/a>"), code);
|
||||
let mut lexer = lexer::StringReader::new(session.diagnostic(), filemap);
|
||||
let ref cm = lexer.span_diagnostic.cm;
|
||||
let cm = session.codemap();
|
||||
|
||||
// ANTLR
|
||||
let mut token_file = File::open(&Path::new(&args.next().unwrap())).unwrap();
|
||||
|
@ -1866,7 +1866,7 @@ impl FakeExtCtxt for parse::ParseSess {
|
||||
|
||||
#[cfg(test)]
|
||||
fn mk_ctxt() -> parse::ParseSess {
|
||||
parse::new_parse_sess()
|
||||
parse::ParseSess::new()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -852,7 +852,7 @@ pub fn parse_cfgspecs(cfgspecs: Vec<String> ) -> ast::CrateConfig {
|
||||
parse::parse_meta_from_source_str("cfgspec".to_string(),
|
||||
s.to_string(),
|
||||
Vec::new(),
|
||||
&parse::new_parse_sess())
|
||||
&parse::ParseSess::new())
|
||||
}).collect::<ast::CrateConfig>()
|
||||
}
|
||||
|
||||
|
@ -64,7 +64,9 @@ pub struct Session {
|
||||
/// operations such as auto-dereference and monomorphization.
|
||||
pub recursion_limit: Cell<usize>,
|
||||
|
||||
pub can_print_warnings: bool
|
||||
pub can_print_warnings: bool,
|
||||
|
||||
next_node_id: Cell<ast::NodeId>
|
||||
}
|
||||
|
||||
impl Session {
|
||||
@ -213,16 +215,23 @@ impl Session {
|
||||
lints.insert(id, vec!((lint_id, sp, msg)));
|
||||
}
|
||||
pub fn next_node_id(&self) -> ast::NodeId {
|
||||
self.parse_sess.next_node_id()
|
||||
self.reserve_node_ids(1)
|
||||
}
|
||||
pub fn reserve_node_ids(&self, count: ast::NodeId) -> ast::NodeId {
|
||||
self.parse_sess.reserve_node_ids(count)
|
||||
let id = self.next_node_id.get();
|
||||
|
||||
match id.checked_add(count) {
|
||||
Some(next) => self.next_node_id.set(next),
|
||||
None => self.bug("Input too large, ran out of node ids!")
|
||||
}
|
||||
|
||||
id
|
||||
}
|
||||
pub fn diagnostic<'a>(&'a self) -> &'a diagnostic::SpanHandler {
|
||||
&self.parse_sess.span_diagnostic
|
||||
}
|
||||
pub fn codemap<'a>(&'a self) -> &'a codemap::CodeMap {
|
||||
&self.parse_sess.span_diagnostic.cm
|
||||
self.parse_sess.codemap()
|
||||
}
|
||||
// This exists to help with refactoring to eliminate impossible
|
||||
// cases later on
|
||||
@ -359,9 +368,9 @@ pub fn build_session(sopts: config::Options,
|
||||
|
||||
let codemap = codemap::CodeMap::new();
|
||||
let diagnostic_handler =
|
||||
diagnostic::default_handler(sopts.color, Some(registry), can_print_warnings);
|
||||
diagnostic::Handler::new(sopts.color, Some(registry), can_print_warnings);
|
||||
let span_diagnostic_handler =
|
||||
diagnostic::mk_span_handler(diagnostic_handler, codemap);
|
||||
diagnostic::SpanHandler::new(diagnostic_handler, codemap);
|
||||
|
||||
build_session_(sopts, local_crate_source_file, span_diagnostic_handler)
|
||||
}
|
||||
@ -378,7 +387,7 @@ pub fn build_session_(sopts: config::Options,
|
||||
}
|
||||
};
|
||||
let target_cfg = config::build_target_config(&sopts, &span_diagnostic);
|
||||
let p_s = parse::new_parse_sess_special_handler(span_diagnostic);
|
||||
let p_s = parse::ParseSess::with_span_handler(span_diagnostic);
|
||||
let default_sysroot = match sopts.maybe_sysroot {
|
||||
Some(_) => None,
|
||||
None => Some(filesearch::get_or_default_sysroot())
|
||||
@ -421,7 +430,8 @@ pub fn build_session_(sopts: config::Options,
|
||||
delayed_span_bug: RefCell::new(None),
|
||||
features: RefCell::new(feature_gate::Features::new()),
|
||||
recursion_limit: Cell::new(64),
|
||||
can_print_warnings: can_print_warnings
|
||||
can_print_warnings: can_print_warnings,
|
||||
next_node_id: Cell::new(1)
|
||||
};
|
||||
|
||||
sess
|
||||
|
@ -218,7 +218,7 @@ impl Target {
|
||||
// this is 1. ugly, 2. error prone.
|
||||
|
||||
|
||||
let handler = diagnostic::default_handler(diagnostic::Auto, None, true);
|
||||
let handler = diagnostic::Handler::new(diagnostic::Auto, None, true);
|
||||
|
||||
let get_req_field = |name: &str| {
|
||||
match obj.find(name)
|
||||
|
@ -105,9 +105,9 @@ fn test_env<F>(source_string: &str,
|
||||
let codemap =
|
||||
CodeMap::new();
|
||||
let diagnostic_handler =
|
||||
diagnostic::mk_handler(true, emitter);
|
||||
diagnostic::Handler::with_emitter(true, emitter);
|
||||
let span_diagnostic_handler =
|
||||
diagnostic::mk_span_handler(diagnostic_handler, codemap);
|
||||
diagnostic::SpanHandler::new(diagnostic_handler, codemap);
|
||||
|
||||
let sess = session::build_session_(options, None, span_diagnostic_handler);
|
||||
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
|
||||
|
@ -21,7 +21,7 @@ use util::common::time;
|
||||
use util::common::path2cstr;
|
||||
use syntax::codemap;
|
||||
use syntax::diagnostic;
|
||||
use syntax::diagnostic::{Emitter, Handler, Level, mk_handler};
|
||||
use syntax::diagnostic::{Emitter, Handler, Level};
|
||||
|
||||
use std::ffi::{CStr, CString};
|
||||
use std::fs;
|
||||
@ -928,7 +928,7 @@ fn run_work_multithreaded(sess: &Session,
|
||||
futures.push(rx);
|
||||
|
||||
thread::Builder::new().name(format!("codegen-{}", i)).spawn(move || {
|
||||
let diag_handler = mk_handler(true, box diag_emitter);
|
||||
let diag_handler = Handler::with_emitter(true, box diag_emitter);
|
||||
|
||||
// Must construct cgcx inside the proc because it has non-Send
|
||||
// fields.
|
||||
|
@ -110,9 +110,9 @@ pub fn run_core(search_paths: SearchPaths, cfgs: Vec<String>, externs: Externs,
|
||||
};
|
||||
|
||||
let codemap = codemap::CodeMap::new();
|
||||
let diagnostic_handler = diagnostic::default_handler(diagnostic::Auto, None, true);
|
||||
let diagnostic_handler = diagnostic::Handler::new(diagnostic::Auto, None, true);
|
||||
let span_diagnostic_handler =
|
||||
diagnostic::mk_span_handler(diagnostic_handler, codemap);
|
||||
diagnostic::SpanHandler::new(diagnostic_handler, codemap);
|
||||
|
||||
let sess = session::build_session_(sessopts, cpath,
|
||||
span_diagnostic_handler);
|
||||
|
@ -24,10 +24,8 @@ use syntax::parse;
|
||||
/// Highlights some source code, returning the HTML output.
|
||||
pub fn highlight(src: &str, class: Option<&str>, id: Option<&str>) -> String {
|
||||
debug!("highlighting: ================\n{}\n==============", src);
|
||||
let sess = parse::new_parse_sess();
|
||||
let fm = parse::string_to_filemap(&sess,
|
||||
src.to_string(),
|
||||
"<stdin>".to_string());
|
||||
let sess = parse::ParseSess::new();
|
||||
let fm = sess.codemap().new_filemap("<stdin>".to_string(), src.to_string());
|
||||
|
||||
let mut out = Vec::new();
|
||||
doit(&sess,
|
||||
@ -62,7 +60,7 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
|
||||
loop {
|
||||
let next = lexer.next_token();
|
||||
|
||||
let snip = |sp| sess.span_diagnostic.cm.span_to_snippet(sp).unwrap();
|
||||
let snip = |sp| sess.codemap().span_to_snippet(sp).unwrap();
|
||||
|
||||
if next.tok == token::Eof { break }
|
||||
|
||||
@ -178,7 +176,7 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
|
||||
|
||||
// as mentioned above, use the original source code instead of
|
||||
// stringifying this token
|
||||
let snip = sess.span_diagnostic.cm.span_to_snippet(next.sp).unwrap();
|
||||
let snip = sess.codemap().span_to_snippet(next.sp).unwrap();
|
||||
if klass == "" {
|
||||
try!(write!(out, "{}", Escape(&snip)));
|
||||
} else {
|
||||
|
@ -65,9 +65,9 @@ pub fn run(input: &str,
|
||||
};
|
||||
|
||||
let codemap = CodeMap::new();
|
||||
let diagnostic_handler = diagnostic::default_handler(diagnostic::Auto, None, true);
|
||||
let diagnostic_handler = diagnostic::Handler::new(diagnostic::Auto, None, true);
|
||||
let span_diagnostic_handler =
|
||||
diagnostic::mk_span_handler(diagnostic_handler, codemap);
|
||||
diagnostic::SpanHandler::new(diagnostic_handler, codemap);
|
||||
|
||||
let sess = session::build_session_(sessopts,
|
||||
Some(input_path.clone()),
|
||||
@ -184,7 +184,7 @@ fn runtest(test: &str, cratename: &str, libs: SearchPaths,
|
||||
// it with a sink that is also passed to rustc itself. When this function
|
||||
// returns the output of the sink is copied onto the output of our own thread.
|
||||
//
|
||||
// The basic idea is to not use a default_handler() for rustc, and then also
|
||||
// The basic idea is to not use a default Handler for rustc, and then also
|
||||
// not print things by default to the actual stderr.
|
||||
struct Sink(Arc<Mutex<Vec<u8>>>);
|
||||
impl Write for Sink {
|
||||
@ -206,9 +206,9 @@ fn runtest(test: &str, cratename: &str, libs: SearchPaths,
|
||||
|
||||
// Compile the code
|
||||
let codemap = CodeMap::new();
|
||||
let diagnostic_handler = diagnostic::mk_handler(true, box emitter);
|
||||
let diagnostic_handler = diagnostic::Handler::with_emitter(true, box emitter);
|
||||
let span_diagnostic_handler =
|
||||
diagnostic::mk_span_handler(diagnostic_handler, codemap);
|
||||
diagnostic::SpanHandler::new(diagnostic_handler, codemap);
|
||||
|
||||
let sess = session::build_session_(sessopts,
|
||||
None,
|
||||
|
@ -21,9 +21,11 @@ pub use self::MacroFormat::*;
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::ops::{Add, Sub};
|
||||
use std::path::Path;
|
||||
use std::rc::Rc;
|
||||
|
||||
use std::fmt;
|
||||
use std::{fmt, fs};
|
||||
use std::io::{self, Read};
|
||||
|
||||
use serialize::{Encodable, Decodable, Encoder, Decoder};
|
||||
|
||||
@ -527,6 +529,29 @@ impl FileMap {
|
||||
}
|
||||
}
|
||||
|
||||
/// An abstraction over the fs operations used by the Parser.
|
||||
pub trait FileLoader {
|
||||
/// Query the existence of a file.
|
||||
fn file_exists(&self, path: &Path) -> bool;
|
||||
|
||||
/// Read the contents of an UTF-8 file into memory.
|
||||
fn read_file(&self, path: &Path) -> io::Result<String>;
|
||||
}
|
||||
|
||||
/// A FileLoader that uses std::fs to load real files.
|
||||
pub struct RealFileLoader;
|
||||
|
||||
impl FileLoader for RealFileLoader {
|
||||
fn file_exists(&self, path: &Path) -> bool {
|
||||
fs::metadata(path).is_ok()
|
||||
}
|
||||
|
||||
fn read_file(&self, path: &Path) -> io::Result<String> {
|
||||
let mut src = String::new();
|
||||
try!(try!(fs::File::open(path)).read_to_string(&mut src));
|
||||
Ok(src)
|
||||
}
|
||||
}
|
||||
|
||||
// _____________________________________________________________________________
|
||||
// CodeMap
|
||||
@ -534,7 +559,8 @@ impl FileMap {
|
||||
|
||||
pub struct CodeMap {
|
||||
pub files: RefCell<Vec<Rc<FileMap>>>,
|
||||
expansions: RefCell<Vec<ExpnInfo>>
|
||||
expansions: RefCell<Vec<ExpnInfo>>,
|
||||
file_loader: Box<FileLoader>
|
||||
}
|
||||
|
||||
impl CodeMap {
|
||||
@ -542,9 +568,27 @@ impl CodeMap {
|
||||
CodeMap {
|
||||
files: RefCell::new(Vec::new()),
|
||||
expansions: RefCell::new(Vec::new()),
|
||||
file_loader: Box::new(RealFileLoader)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_file_loader(file_loader: Box<FileLoader>) -> CodeMap {
|
||||
CodeMap {
|
||||
files: RefCell::new(Vec::new()),
|
||||
expansions: RefCell::new(Vec::new()),
|
||||
file_loader: file_loader
|
||||
}
|
||||
}
|
||||
|
||||
pub fn file_exists(&self, path: &Path) -> bool {
|
||||
self.file_loader.file_exists(path)
|
||||
}
|
||||
|
||||
pub fn load_file(&self, path: &Path) -> io::Result<Rc<FileMap>> {
|
||||
let src = try!(self.file_loader.read_file(path));
|
||||
Ok(self.new_filemap(path.to_str().unwrap().to_string(), src))
|
||||
}
|
||||
|
||||
pub fn new_filemap(&self, filename: FileName, mut src: String) -> Rc<FileMap> {
|
||||
let mut files = self.files.borrow_mut();
|
||||
let start_pos = match files.last() {
|
||||
|
@ -122,6 +122,12 @@ pub struct SpanHandler {
|
||||
}
|
||||
|
||||
impl SpanHandler {
|
||||
pub fn new(handler: Handler, cm: codemap::CodeMap) -> SpanHandler {
|
||||
SpanHandler {
|
||||
handler: handler,
|
||||
cm: cm,
|
||||
}
|
||||
}
|
||||
pub fn span_fatal(&self, sp: Span, msg: &str) -> FatalError {
|
||||
self.handler.emit(Some((&self.cm, sp)), msg, Fatal);
|
||||
return FatalError;
|
||||
@ -187,6 +193,19 @@ pub struct Handler {
|
||||
}
|
||||
|
||||
impl Handler {
|
||||
pub fn new(color_config: ColorConfig,
|
||||
registry: Option<diagnostics::registry::Registry>,
|
||||
can_emit_warnings: bool) -> Handler {
|
||||
let emitter = Box::new(EmitterWriter::stderr(color_config, registry));
|
||||
Handler::with_emitter(can_emit_warnings, emitter)
|
||||
}
|
||||
pub fn with_emitter(can_emit_warnings: bool, e: Box<Emitter + Send>) -> Handler {
|
||||
Handler {
|
||||
err_count: Cell::new(0),
|
||||
emit: RefCell::new(e),
|
||||
can_emit_warnings: can_emit_warnings
|
||||
}
|
||||
}
|
||||
pub fn fatal(&self, msg: &str) -> ! {
|
||||
self.emit.borrow_mut().emit(None, msg, None, Fatal);
|
||||
panic!(FatalError);
|
||||
@ -254,27 +273,6 @@ impl Handler {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mk_span_handler(handler: Handler, cm: codemap::CodeMap) -> SpanHandler {
|
||||
SpanHandler {
|
||||
handler: handler,
|
||||
cm: cm,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_handler(color_config: ColorConfig,
|
||||
registry: Option<diagnostics::registry::Registry>,
|
||||
can_emit_warnings: bool) -> Handler {
|
||||
mk_handler(can_emit_warnings, Box::new(EmitterWriter::stderr(color_config, registry)))
|
||||
}
|
||||
|
||||
pub fn mk_handler(can_emit_warnings: bool, e: Box<Emitter + Send>) -> Handler {
|
||||
Handler {
|
||||
err_count: Cell::new(0),
|
||||
emit: RefCell::new(e),
|
||||
can_emit_warnings: can_emit_warnings
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, PartialEq, Clone, Debug)]
|
||||
pub enum Level {
|
||||
Bug,
|
||||
|
@ -648,7 +648,7 @@ impl<'a> ExtCtxt<'a> {
|
||||
parse::tts_to_parser(self.parse_sess, tts.to_vec(), self.cfg())
|
||||
}
|
||||
|
||||
pub fn codemap(&self) -> &'a CodeMap { &self.parse_sess.span_diagnostic.cm }
|
||||
pub fn codemap(&self) -> &'a CodeMap { self.parse_sess.codemap() }
|
||||
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
|
||||
pub fn cfg(&self) -> ast::CrateConfig { self.cfg.clone() }
|
||||
pub fn call_site(&self) -> Span {
|
||||
|
@ -1684,7 +1684,7 @@ mod tests {
|
||||
#[test] fn macros_cant_escape_fns_test () {
|
||||
let src = "fn bogus() {macro_rules! z (() => (3+4));}\
|
||||
fn inty() -> i32 { z!() }".to_string();
|
||||
let sess = parse::new_parse_sess();
|
||||
let sess = parse::ParseSess::new();
|
||||
let crate_ast = parse::parse_crate_from_source_str(
|
||||
"<test>".to_string(),
|
||||
src,
|
||||
@ -1698,7 +1698,7 @@ mod tests {
|
||||
#[test] fn macros_cant_escape_mods_test () {
|
||||
let src = "mod foo {macro_rules! z (() => (3+4));}\
|
||||
fn inty() -> i32 { z!() }".to_string();
|
||||
let sess = parse::new_parse_sess();
|
||||
let sess = parse::ParseSess::new();
|
||||
let crate_ast = parse::parse_crate_from_source_str(
|
||||
"<test>".to_string(),
|
||||
src,
|
||||
@ -1710,7 +1710,7 @@ mod tests {
|
||||
#[test] fn macros_can_escape_flattened_mods_test () {
|
||||
let src = "#[macro_use] mod foo {macro_rules! z (() => (3+4));}\
|
||||
fn inty() -> i32 { z!() }".to_string();
|
||||
let sess = parse::new_parse_sess();
|
||||
let sess = parse::ParseSess::new();
|
||||
let crate_ast = parse::parse_crate_from_source_str(
|
||||
"<test>".to_string(),
|
||||
src,
|
||||
@ -1719,7 +1719,7 @@ mod tests {
|
||||
}
|
||||
|
||||
fn expand_crate_str(crate_str: String) -> ast::Crate {
|
||||
let ps = parse::new_parse_sess();
|
||||
let ps = parse::ParseSess::new();
|
||||
let crate_ast = panictry!(string_to_parser(&ps, crate_str).parse_crate_mod());
|
||||
// the cfg argument actually does matter, here...
|
||||
expand_crate(&ps,test_ecfg(),vec!(),vec!(),crate_ast)
|
||||
|
@ -1412,8 +1412,8 @@ mod tests {
|
||||
fn mk_sh() -> diagnostic::SpanHandler {
|
||||
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
|
||||
let emitter = diagnostic::EmitterWriter::new(Box::new(io::sink()), None);
|
||||
let handler = diagnostic::mk_handler(true, Box::new(emitter));
|
||||
diagnostic::mk_span_handler(handler, CodeMap::new())
|
||||
let handler = diagnostic::Handler::with_emitter(true, Box::new(emitter));
|
||||
diagnostic::SpanHandler::new(handler, CodeMap::new())
|
||||
}
|
||||
|
||||
// open a string reader for the given string
|
||||
|
@ -12,14 +12,13 @@
|
||||
|
||||
use ast;
|
||||
use codemap::{Span, CodeMap, FileMap};
|
||||
use diagnostic::{SpanHandler, mk_span_handler, default_handler, Auto, FatalError};
|
||||
use diagnostic::{SpanHandler, Handler, Auto, FatalError};
|
||||
use parse::attr::ParserAttr;
|
||||
use parse::parser::Parser;
|
||||
use ptr::P;
|
||||
use str::char_at;
|
||||
|
||||
use std::cell::{Cell, RefCell};
|
||||
use std::fs::File;
|
||||
use std::cell::RefCell;
|
||||
use std::io::Read;
|
||||
use std::iter;
|
||||
use std::path::{Path, PathBuf};
|
||||
@ -44,38 +43,23 @@ pub struct ParseSess {
|
||||
pub span_diagnostic: SpanHandler, // better be the same as the one in the reader!
|
||||
/// Used to determine and report recursive mod inclusions
|
||||
included_mod_stack: RefCell<Vec<PathBuf>>,
|
||||
pub node_id: Cell<ast::NodeId>,
|
||||
}
|
||||
|
||||
pub fn new_parse_sess() -> ParseSess {
|
||||
ParseSess {
|
||||
span_diagnostic: mk_span_handler(default_handler(Auto, None, true), CodeMap::new()),
|
||||
included_mod_stack: RefCell::new(Vec::new()),
|
||||
node_id: Cell::new(1),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_parse_sess_special_handler(sh: SpanHandler) -> ParseSess {
|
||||
ParseSess {
|
||||
span_diagnostic: sh,
|
||||
included_mod_stack: RefCell::new(Vec::new()),
|
||||
node_id: Cell::new(1),
|
||||
}
|
||||
}
|
||||
|
||||
impl ParseSess {
|
||||
pub fn next_node_id(&self) -> ast::NodeId {
|
||||
self.reserve_node_ids(1)
|
||||
pub fn new() -> ParseSess {
|
||||
let handler = SpanHandler::new(Handler::new(Auto, None, true), CodeMap::new());
|
||||
ParseSess::with_span_handler(handler)
|
||||
}
|
||||
pub fn reserve_node_ids(&self, count: ast::NodeId) -> ast::NodeId {
|
||||
let v = self.node_id.get();
|
||||
|
||||
match v.checked_add(count) {
|
||||
Some(next) => { self.node_id.set(next); }
|
||||
None => panic!("Input too large, ran out of node ids!")
|
||||
pub fn with_span_handler(sh: SpanHandler) -> ParseSess {
|
||||
ParseSess {
|
||||
span_diagnostic: sh,
|
||||
included_mod_stack: RefCell::new(vec![])
|
||||
}
|
||||
}
|
||||
|
||||
v
|
||||
pub fn codemap(&self) -> &CodeMap {
|
||||
&self.span_diagnostic.cm
|
||||
}
|
||||
}
|
||||
|
||||
@ -189,7 +173,7 @@ pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess,
|
||||
name: String,
|
||||
source: String)
|
||||
-> Parser<'a> {
|
||||
filemap_to_parser(sess, string_to_filemap(sess, source, name), cfg)
|
||||
filemap_to_parser(sess, sess.codemap().new_filemap(name, source), cfg)
|
||||
}
|
||||
|
||||
/// Create a new parser, handling errors as appropriate
|
||||
@ -235,41 +219,20 @@ pub fn new_parser_from_tts<'a>(sess: &'a ParseSess,
|
||||
|
||||
/// Given a session and a path and an optional span (for error reporting),
|
||||
/// add the path to the session's codemap and return the new filemap.
|
||||
pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
|
||||
-> Rc<FileMap> {
|
||||
let err = |msg: &str| {
|
||||
match spanopt {
|
||||
Some(sp) => panic!(sess.span_diagnostic.span_fatal(sp, msg)),
|
||||
None => sess.span_diagnostic.handler().fatal(msg),
|
||||
}
|
||||
};
|
||||
let mut bytes = Vec::new();
|
||||
match File::open(path).and_then(|mut f| f.read_to_end(&mut bytes)) {
|
||||
Ok(..) => {}
|
||||
fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
|
||||
-> Rc<FileMap> {
|
||||
match sess.codemap().load_file(path) {
|
||||
Ok(filemap) => filemap,
|
||||
Err(e) => {
|
||||
err(&format!("couldn't read {:?}: {}", path.display(), e));
|
||||
unreachable!();
|
||||
}
|
||||
};
|
||||
match str::from_utf8(&bytes[..]).ok() {
|
||||
Some(s) => {
|
||||
string_to_filemap(sess, s.to_string(),
|
||||
path.to_str().unwrap().to_string())
|
||||
}
|
||||
None => {
|
||||
err(&format!("{:?} is not UTF-8 encoded", path.display()));
|
||||
unreachable!();
|
||||
let msg = format!("couldn't read {:?}: {}", path.display(), e);
|
||||
match spanopt {
|
||||
Some(sp) => panic!(sess.span_diagnostic.span_fatal(sp, &msg)),
|
||||
None => sess.span_diagnostic.handler().fatal(&msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a session and a string, add the string to
|
||||
/// the session's codemap and return the new filemap
|
||||
pub fn string_to_filemap(sess: &ParseSess, source: String, path: String)
|
||||
-> Rc<FileMap> {
|
||||
sess.span_diagnostic.cm.new_filemap(path, source)
|
||||
}
|
||||
|
||||
/// Given a filemap, produce a sequence of token-trees
|
||||
pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
|
||||
-> Vec<ast::TokenTree> {
|
||||
@ -905,7 +868,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test] fn parse_ident_pat () {
|
||||
let sess = new_parse_sess();
|
||||
let sess = ParseSess::new();
|
||||
let mut parser = string_to_parser(&sess, "b".to_string());
|
||||
assert!(panictry!(parser.parse_pat_nopanic())
|
||||
== P(ast::Pat{
|
||||
@ -1086,7 +1049,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test] fn crlf_doc_comments() {
|
||||
let sess = new_parse_sess();
|
||||
let sess = ParseSess::new();
|
||||
|
||||
let name = "<source>".to_string();
|
||||
let source = "/// doc comment\r\nfn foo() {}".to_string();
|
||||
@ -1109,7 +1072,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn ttdelim_span() {
|
||||
let sess = parse::new_parse_sess();
|
||||
let sess = ParseSess::new();
|
||||
let expr = parse::parse_expr_from_source_str("foo".to_string(),
|
||||
"foo!( fn main() { body } )".to_string(), vec![], &sess);
|
||||
|
||||
@ -1123,7 +1086,7 @@ mod tests {
|
||||
|
||||
let span = tts.iter().rev().next().unwrap().get_span();
|
||||
|
||||
match sess.span_diagnostic.cm.span_to_snippet(span) {
|
||||
match sess.codemap().span_to_snippet(span) {
|
||||
Ok(s) => assert_eq!(&s[..], "{ body }"),
|
||||
Err(_) => panic!("could not get snippet"),
|
||||
}
|
||||
|
@ -79,7 +79,6 @@ use parse::PResult;
|
||||
use diagnostic::FatalError;
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
use std::io::prelude::*;
|
||||
use std::mem;
|
||||
use std::path::{Path, PathBuf};
|
||||
@ -4840,8 +4839,7 @@ impl<'a> Parser<'a> {
|
||||
outer_attrs: &[ast::Attribute],
|
||||
id_sp: Span)
|
||||
-> PResult<(ast::Item_, Vec<ast::Attribute> )> {
|
||||
let mut prefix = PathBuf::from(&self.sess.span_diagnostic.cm
|
||||
.span_to_filename(self.span));
|
||||
let mut prefix = PathBuf::from(&self.sess.codemap().span_to_filename(self.span));
|
||||
prefix.pop();
|
||||
let mut dir_path = prefix;
|
||||
for part in &self.mod_path_stack {
|
||||
@ -4857,8 +4855,8 @@ impl<'a> Parser<'a> {
|
||||
let secondary_path_str = format!("{}/mod.rs", mod_name);
|
||||
let default_path = dir_path.join(&default_path_str[..]);
|
||||
let secondary_path = dir_path.join(&secondary_path_str[..]);
|
||||
let default_exists = fs::metadata(&default_path).is_ok();
|
||||
let secondary_exists = fs::metadata(&secondary_path).is_ok();
|
||||
let default_exists = self.sess.codemap().file_exists(&default_path);
|
||||
let secondary_exists = self.sess.codemap().file_exists(&secondary_path);
|
||||
|
||||
if !self.owns_directory {
|
||||
self.span_err(id_sp,
|
||||
|
@ -301,7 +301,7 @@ fn ignored_span(cx: &TestCtxt, sp: Span) -> Span {
|
||||
allow_internal_unstable: true,
|
||||
}
|
||||
};
|
||||
let expn_id = cx.sess.span_diagnostic.cm.record_expansion(info);
|
||||
let expn_id = cx.sess.codemap().record_expansion(info);
|
||||
let mut sp = sp;
|
||||
sp.expn_id = expn_id;
|
||||
return sp;
|
||||
|
@ -9,8 +9,7 @@
|
||||
// except according to those terms.
|
||||
|
||||
use ast;
|
||||
use parse::new_parse_sess;
|
||||
use parse::{ParseSess,string_to_filemap,filemap_to_tts};
|
||||
use parse::{ParseSess,filemap_to_tts};
|
||||
use parse::new_parser_from_source_str;
|
||||
use parse::parser::Parser;
|
||||
use parse::token;
|
||||
@ -19,9 +18,8 @@ use str::char_at;
|
||||
|
||||
/// Map a string to tts, using a made-up filename:
|
||||
pub fn string_to_tts(source_str: String) -> Vec<ast::TokenTree> {
|
||||
let ps = new_parse_sess();
|
||||
filemap_to_tts(&ps,
|
||||
string_to_filemap(&ps, source_str, "bogofile".to_string()))
|
||||
let ps = ParseSess::new();
|
||||
filemap_to_tts(&ps, ps.codemap().new_filemap("bogofile".to_string(), source_str))
|
||||
}
|
||||
|
||||
/// Map string to parser (via tts)
|
||||
@ -35,7 +33,7 @@ pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: String) -> Parser<'a>
|
||||
fn with_error_checking_parse<T, F>(s: String, f: F) -> T where
|
||||
F: FnOnce(&mut Parser) -> T,
|
||||
{
|
||||
let ps = new_parse_sess();
|
||||
let ps = ParseSess::new();
|
||||
let mut p = string_to_parser(&ps, s);
|
||||
let x = f(&mut p);
|
||||
p.abort_if_errors();
|
||||
@ -75,7 +73,7 @@ pub fn string_to_stmt(source_str : String) -> P<ast::Stmt> {
|
||||
pub fn string_to_pat(source_str: String) -> P<ast::Pat> {
|
||||
// Binding `sess` and `parser` works around dropck-injected
|
||||
// region-inference issues; see #25212, #22323, #22321.
|
||||
let sess = new_parse_sess();
|
||||
let sess = ParseSess::new();
|
||||
let mut parser = string_to_parser(&sess, source_str);
|
||||
parser.parse_pat()
|
||||
}
|
||||
|
@ -20,7 +20,7 @@ use syntax::parse;
|
||||
use syntax::print::pprust;
|
||||
|
||||
fn main() {
|
||||
let ps = syntax::parse::new_parse_sess();
|
||||
let ps = syntax::parse::ParseSess::new();
|
||||
let mut cx = syntax::ext::base::ExtCtxt::new(
|
||||
&ps, vec![],
|
||||
syntax::ext::expand::ExpansionConfig::default("qquote".to_string()));
|
||||
|
@ -8,7 +8,7 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// error-pattern: is not UTF-8
|
||||
// error-pattern: did not contain valid UTF-8
|
||||
|
||||
fn foo() {
|
||||
include!("not-utf8.bin")
|
||||
|
@ -22,7 +22,7 @@ use syntax::parse;
|
||||
use syntax::print::pprust;
|
||||
|
||||
fn main() {
|
||||
let ps = syntax::parse::new_parse_sess();
|
||||
let ps = syntax::parse::ParseSess::new();
|
||||
let mut cx = syntax::ext::base::ExtCtxt::new(
|
||||
&ps, vec![],
|
||||
syntax::ext::expand::ExpansionConfig::default("qquote".to_string()));
|
||||
|
@ -18,7 +18,7 @@ use syntax::codemap::DUMMY_SP;
|
||||
use syntax::print::pprust::*;
|
||||
|
||||
fn main() {
|
||||
let ps = syntax::parse::new_parse_sess();
|
||||
let ps = syntax::parse::ParseSess::new();
|
||||
let mut cx = syntax::ext::base::ExtCtxt::new(
|
||||
&ps, vec![],
|
||||
syntax::ext::expand::ExpansionConfig::default("qquote".to_string()));
|
||||
|
Loading…
Reference in New Issue
Block a user