Fix rustdoc and tests.

This commit is contained in:
Eduard Burtescu 2014-03-17 09:55:41 +02:00
parent e02aa722aa
commit e2ebc8f811
13 changed files with 71 additions and 103 deletions

View File

@ -1234,7 +1234,7 @@ mod test {
};
let sessopts = build_session_options(matches);
let sess = build_session(sessopts, None);
let cfg = build_configuration(sess);
let cfg = build_configuration(&sess);
assert!((attr::contains_name(cfg.as_slice(), "test")));
}
@ -1253,7 +1253,7 @@ mod test {
};
let sessopts = build_session_options(matches);
let sess = build_session(sessopts, None);
let cfg = build_configuration(sess);
let cfg = build_configuration(&sess);
let mut test_items = cfg.iter().filter(|m| m.name().equiv(&("test")));
assert!(test_items.next().is_some());
assert!(test_items.next().is_none());

View File

@ -1517,7 +1517,7 @@ fn test_more() {
#[test]
fn test_simplification() {
let cx = mk_ctxt();
let item = quote_item!(cx,
let item = quote_item!(&cx,
fn new_int_alist<B>() -> alist<int, B> {
fn eq_int(a: int, b: int) -> bool { a == b }
return alist {eq_fn: eq_int, data: Vec::new()};

View File

@ -684,7 +684,7 @@ impl Clean<Type> for ast::Ty {
fn clean(&self) -> Type {
use syntax::ast::*;
debug!("cleaning type `{:?}`", self);
let codemap = local_data::get(super::ctxtkey, |x| *x.unwrap()).sess().codemap;
let codemap = local_data::get(super::ctxtkey, |x| *x.unwrap()).sess().codemap();
debug!("span corresponds to `{}`", codemap.span_to_str(self.span));
match self.node {
TyNil => Unit,
@ -866,7 +866,7 @@ pub struct Span {
impl Clean<Span> for syntax::codemap::Span {
fn clean(&self) -> Span {
let cm = local_data::get(super::ctxtkey, |x| *x.unwrap()).sess().codemap;
let cm = local_data::get(super::ctxtkey, |x| *x.unwrap()).sess().codemap();
let filename = cm.span_to_filename(*self);
let lo = cm.lookup_char_pos(self.lo);
let hi = cm.lookup_char_pos(self.hi);
@ -1180,7 +1180,7 @@ trait ToSource {
impl ToSource for syntax::codemap::Span {
fn to_src(&self) -> ~str {
debug!("converting span {:?} to snippet", self.clean());
let cm = local_data::get(super::ctxtkey, |x| x.unwrap().clone()).sess().codemap.clone();
let cm = local_data::get(super::ctxtkey, |x| x.unwrap().clone()).sess().codemap().clone();
let sn = match cm.span_to_snippet(*self) {
Some(x) => x,
None => ~""

View File

@ -15,7 +15,6 @@ use rustc::middle::privacy;
use syntax::ast;
use syntax::parse::token;
use syntax::parse;
use syntax;
use std::cell::RefCell;
@ -60,24 +59,23 @@ fn get_ast_and_resolve(cpath: &Path,
phase_2_configure_and_expand,
phase_3_run_analysis_passes};
let parsesess = parse::new_parse_sess();
let input = FileInput(cpath.clone());
let sessopts = @driver::session::Options {
let sessopts = driver::session::Options {
maybe_sysroot: Some(os::self_exe_path().unwrap().dir_path()),
addl_lib_search_paths: RefCell::new(libs),
crate_types: vec!(driver::session::CrateTypeDylib),
.. (*rustc::driver::session::basic_options()).clone()
..rustc::driver::session::basic_options().clone()
};
let codemap = syntax::codemap::CodeMap::new();
let diagnostic_handler = syntax::diagnostic::default_handler();
let span_diagnostic_handler =
syntax::diagnostic::mk_span_handler(diagnostic_handler, parsesess.cm);
syntax::diagnostic::mk_span_handler(diagnostic_handler, codemap);
let sess = driver::driver::build_session_(sessopts,
Some(cpath.clone()),
parsesess.cm,
span_diagnostic_handler);
let mut cfg = build_configuration(&sess);
@ -87,7 +85,7 @@ fn get_ast_and_resolve(cpath: &Path,
}
let krate = phase_1_parse_input(&sess, cfg, &input);
let (krate, ast_map) = phase_2_configure_and_expand(&sess, &mut Loader::new(sess),
let (krate, ast_map) = phase_2_configure_and_expand(&sess, &mut Loader::new(&sess),
krate, &from_str("rustdoc").unwrap());
let driver::driver::CrateAnalysis {
exported_items, public_items, ty_cx, ..

View File

@ -18,7 +18,6 @@ use std::io;
use syntax::parse;
use syntax::parse::lexer;
use syntax::diagnostic;
use syntax::codemap::{BytePos, Span};
use html::escape::Escape;
@ -28,13 +27,11 @@ use t = syntax::parse::token;
/// Highlights some source code, returning the HTML output.
pub fn highlight(src: &str, class: Option<&str>) -> ~str {
let sess = parse::new_parse_sess();
let handler = diagnostic::default_handler();
let span_handler = diagnostic::mk_span_handler(handler, sess.cm);
let fm = parse::string_to_filemap(&sess, src.to_owned(), ~"<stdin>");
let mut out = io::MemWriter::new();
doit(&sess,
lexer::new_string_reader(span_handler, fm),
lexer::new_string_reader(&sess.span_diagnostic, fm),
class,
&mut out).unwrap();
str::from_utf8_lossy(out.unwrap()).into_owned()
@ -68,7 +65,7 @@ fn doit(sess: &parse::ParseSess, lexer: lexer::StringReader, class: Option<&str>
// comment. This will classify some whitespace as a comment, but that
// doesn't matter too much for syntax highlighting purposes.
if test > last {
let snip = sess.cm.span_to_snippet(Span {
let snip = sess.span_diagnostic.cm.span_to_snippet(Span {
lo: last,
hi: test,
expn_info: None,
@ -172,7 +169,7 @@ fn doit(sess: &parse::ParseSess, lexer: lexer::StringReader, class: Option<&str>
// as mentioned above, use the original source code instead of
// stringifying this token
let snip = sess.cm.span_to_snippet(next.sp).unwrap();
let snip = sess.span_diagnostic.cm.span_to_snippet(next.sp).unwrap();
if klass == "" {
try!(write!(out, "{}", Escape(snip)));
} else {

View File

@ -28,7 +28,6 @@ extern crate time;
#[phase(syntax, link)]
extern crate log;
use std::cell::RefCell;
use std::local_data;
use std::io;
use std::io::{File, MemWriter};

View File

@ -9,7 +9,6 @@
// except according to those terms.
use std::{str, io};
use std::cell::RefCell;
use std::vec_ng::Vec;
use collections::HashSet;

View File

@ -23,7 +23,6 @@ use rustc::driver::driver;
use rustc::driver::session;
use rustc::metadata::creader::Loader;
use syntax::diagnostic;
use syntax::parse;
use syntax::codemap::CodeMap;
use core;
@ -38,29 +37,26 @@ pub fn run(input: &str, libs: HashSet<Path>, mut test_args: ~[~str]) -> int {
let input_path = Path::new(input);
let input = driver::FileInput(input_path.clone());
let sessopts = @session::Options {
let sessopts = session::Options {
maybe_sysroot: Some(os::self_exe_path().unwrap().dir_path()),
addl_lib_search_paths: RefCell::new(libs.clone()),
crate_types: vec!(session::CrateTypeDylib),
.. (*session::basic_options()).clone()
..session::basic_options().clone()
};
let cm = @CodeMap::new();
let codemap = CodeMap::new();
let diagnostic_handler = diagnostic::default_handler();
let span_diagnostic_handler =
diagnostic::mk_span_handler(diagnostic_handler, cm);
let parsesess = parse::new_parse_sess_special_handler(span_diagnostic_handler,
cm);
diagnostic::mk_span_handler(diagnostic_handler, codemap);
let sess = driver::build_session_(sessopts,
Some(input_path),
parsesess.cm,
span_diagnostic_handler);
let cfg = driver::build_configuration(&sess);
let krate = driver::phase_1_parse_input(&sess, cfg, &input);
let (krate, _) = driver::phase_2_configure_and_expand(sess, &mut Loader::new(sess), krate,
let (krate, _) = driver::phase_2_configure_and_expand(&sess, &mut Loader::new(&sess), krate,
&from_str("rustdoc-test").unwrap());
let ctx = @core::DocContext {
@ -88,10 +84,9 @@ pub fn run(input: &str, libs: HashSet<Path>, mut test_args: ~[~str]) -> int {
fn runtest(test: &str, cratename: &str, libs: HashSet<Path>, should_fail: bool,
no_run: bool, loose_feature_gating: bool) {
let test = maketest(test, cratename, loose_feature_gating);
let parsesess = parse::new_parse_sess();
let input = driver::StrInput(test);
let sessopts = @session::Options {
let sessopts = session::Options {
maybe_sysroot: Some(os::self_exe_path().unwrap().dir_path()),
addl_lib_search_paths: RefCell::new(libs),
crate_types: vec!(session::CrateTypeExecutable),
@ -100,7 +95,7 @@ fn runtest(test: &str, cratename: &str, libs: HashSet<Path>, should_fail: bool,
prefer_dynamic: true,
.. session::basic_codegen_options()
},
.. (*session::basic_options()).clone()
..session::basic_options().clone()
};
// Shuffle around a few input and output handles here. We're going to pass
@ -126,13 +121,13 @@ fn runtest(test: &str, cratename: &str, libs: HashSet<Path>, should_fail: bool,
let emitter = diagnostic::EmitterWriter::new(~w2);
// Compile the code
let codemap = CodeMap::new();
let diagnostic_handler = diagnostic::mk_handler(~emitter);
let span_diagnostic_handler =
diagnostic::mk_span_handler(diagnostic_handler, parsesess.cm);
diagnostic::mk_span_handler(diagnostic_handler, codemap);
let sess = driver::build_session_(sessopts,
None,
parsesess.cm,
span_diagnostic_handler);
let outdir = TempDir::new("rustdoctest").expect("rustdoc needs a tempdir");

View File

@ -1044,7 +1044,7 @@ mod test {
let crate_ast = parse::parse_crate_from_source_str(
~"<test>",
src,
Vec::new(),sess);
Vec::new(), &sess);
// should fail:
let mut loader = ErrLoader;
let cfg = ::syntax::ext::expand::ExpansionConfig {
@ -1052,7 +1052,7 @@ mod test {
deriving_hash_type_parameter: false,
crate_id: from_str("test").unwrap(),
};
expand_crate(sess,cfg,crate_ast);
expand_crate(&sess,cfg,crate_ast);
}
// make sure that macros can leave scope for modules
@ -1064,7 +1064,7 @@ mod test {
let crate_ast = parse::parse_crate_from_source_str(
~"<test>",
src,
Vec::new(),sess);
Vec::new(), &sess);
// should fail:
let mut loader = ErrLoader;
let cfg = ::syntax::ext::expand::ExpansionConfig {
@ -1072,7 +1072,7 @@ mod test {
deriving_hash_type_parameter: false,
crate_id: from_str("test").unwrap(),
};
expand_crate(sess,cfg,crate_ast);
expand_crate(&sess,cfg,crate_ast);
}
// macro_escape modules shouldn't cause macros to leave scope
@ -1083,7 +1083,7 @@ mod test {
let crate_ast = parse::parse_crate_from_source_str(
~"<test>",
src,
Vec::new(), sess);
Vec::new(), &sess);
// should fail:
let mut loader = ErrLoader;
let cfg = ::syntax::ext::expand::ExpansionConfig {
@ -1091,7 +1091,7 @@ mod test {
deriving_hash_type_parameter: false,
crate_id: from_str("test").unwrap(),
};
expand_crate(sess, cfg, crate_ast);
expand_crate(&sess, cfg, crate_ast);
}
#[test] fn test_contains_flatten (){
@ -1127,7 +1127,7 @@ mod test {
fn expand_crate_str(crate_str: ~str) -> ast::Crate {
let ps = parse::new_parse_sess();
let crate_ast = string_to_parser(&ps, source_str).parse_crate_mod();
let crate_ast = string_to_parser(&ps, crate_str).parse_crate_mod();
// the cfg argument actually does matter, here...
let mut loader = ErrLoader;
let cfg = ::syntax::ext::expand::ExpansionConfig {

View File

@ -880,9 +880,9 @@ mod test {
use super::*;
// this version doesn't care about getting comments or docstrings in.
fn fake_print_crate(s: &mut pprust::State,
krate: &ast::Crate) -> io::IoResult<()> {
pprust::print_mod(s, &krate.module, krate.attrs.as_slice())
fn fake_print_crate<A: pprust::PpAnn>(s: &mut pprust::State<A>,
krate: &ast::Crate) -> io::IoResult<()> {
s.print_mod(&krate.module, krate.attrs.as_slice())
}
// change every identifier to "zz"
@ -914,9 +914,10 @@ mod test {
let mut zz_fold = ToZzIdentFolder;
let ast = string_to_crate(
~"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}");
let folded_crate = zz_fold.fold_crate(ast);
assert_pred!(matches_codepattern,
"matches_codepattern",
pprust::to_str(&mut zz_fold.fold_crate(ast),fake_print_crate),
pprust::to_str(|s| fake_print_crate(s, &folded_crate)),
~"#[a]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}");
}
@ -926,9 +927,10 @@ mod test {
let ast = string_to_crate(
~"macro_rules! a {(b $c:expr $(d $e:token)f+ => \
(g $(d $d $e)+))} ");
let folded_crate = zz_fold.fold_crate(ast);
assert_pred!(matches_codepattern,
"matches_codepattern",
pprust::to_str(&mut zz_fold.fold_crate(ast),fake_print_crate),
pprust::to_str(|s| fake_print_crate(s, &folded_crate)),
~"zz!zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)))");
}
}

View File

@ -1007,28 +1007,24 @@ mod test {
use std::io::util;
use std::vec_ng::Vec;
// represents a testing reader (incl. both reader and interner)
struct Env {
string_reader: StringReader
fn mk_sh() -> diagnostic::SpanHandler {
let emitter = diagnostic::EmitterWriter::new(~util::NullWriter);
let handler = diagnostic::mk_handler(~emitter);
diagnostic::mk_span_handler(handler, CodeMap::new())
}
// open a string reader for the given string
fn setup(teststr: ~str) -> Env {
let cm = CodeMap::new();
let fm = cm.new_filemap(~"zebra.rs", teststr);
let writer = ~util::NullWriter;
let emitter = diagnostic::EmitterWriter::new(writer);
let handler = diagnostic::mk_handler(~emitter);
let span_handler = diagnostic::mk_span_handler(handler, cm);
Env {
string_reader: new_string_reader(span_handler,fm)
}
fn setup<'a>(span_handler: &'a diagnostic::SpanHandler,
teststr: ~str) -> StringReader<'a> {
let fm = span_handler.cm.new_filemap(~"zebra.rs", teststr);
new_string_reader(span_handler, fm)
}
#[test] fn t1 () {
let Env {string_reader} =
setup(~"/* my source file */ \
fn main() { println!(\"zebra\"); }\n");
let span_handler = mk_sh();
let string_reader = setup(&span_handler,
~"/* my source file */ \
fn main() { println!(\"zebra\"); }\n");
let id = str_to_ident("fn");
let tok1 = string_reader.next_token();
let tok2 = TokenAndSpan{
@ -1049,11 +1045,9 @@ mod test {
// check that the given reader produces the desired stream
// of tokens (stop checking after exhausting the expected vec)
fn check_tokenization (env: Env, expected: Vec<token::Token> ) {
fn check_tokenization (string_reader: StringReader, expected: Vec<token::Token> ) {
for expected_tok in expected.iter() {
let TokenAndSpan {tok:actual_tok, sp: _} =
env.string_reader.next_token();
assert_eq!(&actual_tok,expected_tok);
assert_eq!(&string_reader.next_token().tok, expected_tok);
}
}
@ -1063,71 +1057,55 @@ mod test {
}
#[test] fn doublecolonparsing () {
let env = setup (~"a b");
check_tokenization (env,
check_tokenization(setup(&mk_sh(), ~"a b"),
vec!(mk_ident("a",false),
mk_ident("b",false)));
}
#[test] fn dcparsing_2 () {
let env = setup (~"a::b");
check_tokenization (env,
check_tokenization(setup(&mk_sh(), ~"a::b"),
vec!(mk_ident("a",true),
token::MOD_SEP,
mk_ident("b",false)));
}
#[test] fn dcparsing_3 () {
let env = setup (~"a ::b");
check_tokenization (env,
check_tokenization(setup(&mk_sh(), ~"a ::b"),
vec!(mk_ident("a",false),
token::MOD_SEP,
mk_ident("b",false)));
}
#[test] fn dcparsing_4 () {
let env = setup (~"a:: b");
check_tokenization (env,
check_tokenization(setup(&mk_sh(), ~"a:: b"),
vec!(mk_ident("a",true),
token::MOD_SEP,
mk_ident("b",false)));
}
#[test] fn character_a() {
let env = setup(~"'a'");
let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token();
assert_eq!(tok,token::LIT_CHAR('a' as u32));
assert_eq!(setup(&mk_sh(), ~"'a'").next_token().tok,
token::LIT_CHAR('a' as u32));
}
#[test] fn character_space() {
let env = setup(~"' '");
let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token();
assert_eq!(tok, token::LIT_CHAR(' ' as u32));
assert_eq!(setup(&mk_sh(), ~"' '").next_token().tok,
token::LIT_CHAR(' ' as u32));
}
#[test] fn character_escaped() {
let env = setup(~"'\\n'");
let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token();
assert_eq!(tok, token::LIT_CHAR('\n' as u32));
assert_eq!(setup(&mk_sh(), ~"'\\n'").next_token().tok,
token::LIT_CHAR('\n' as u32));
}
#[test] fn lifetime_name() {
let env = setup(~"'abc");
let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token();
let id = token::str_to_ident("abc");
assert_eq!(tok, token::LIFETIME(id));
assert_eq!(setup(&mk_sh(), ~"'abc").next_token().tok,
token::LIFETIME(token::str_to_ident("abc")));
}
#[test] fn raw_string() {
let env = setup(~"r###\"\"#a\\b\x00c\"\"###");
let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token();
let id = token::str_to_ident("\"#a\\b\x00c\"");
assert_eq!(tok, token::LIT_STR_RAW(id, 3));
assert_eq!(setup(&mk_sh(), ~"r###\"\"#a\\b\x00c\"\"###").next_token().tok,
token::LIT_STR_RAW(token::str_to_ident("\"#a\\b\x00c\""), 3));
}
#[test] fn line_doc_comments() {
@ -1137,10 +1115,8 @@ mod test {
}
#[test] fn nested_block_comments() {
let env = setup(~"/* /* */ */'a'");
let TokenAndSpan {tok, sp: _} =
env.string_reader.next_token();
assert_eq!(tok,token::LIT_CHAR('a' as u32));
assert_eq!(setup(&mk_sh(), ~"/* /* */ */'a'").next_token().tok,
token::LIT_CHAR('a' as u32));
}
}

View File

@ -584,7 +584,8 @@ mod test {
}
#[test] fn parse_ident_pat () {
let mut parser = string_to_parser(&new_parse_sess(), ~"b");
let sess = new_parse_sess();
let mut parser = string_to_parser(&sess, ~"b");
assert!(parser.parse_pat() ==
@ast::Pat{id: ast::DUMMY_NODE_ID,
node: ast::PatIdent(

View File

@ -29,7 +29,8 @@ pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: ~str) -> Parser<'a> {
}
fn with_error_checking_parse<T>(s: ~str, f: |&mut Parser| -> T) -> T {
let mut p = string_to_parser(&new_parse_sess(), s);
let ps = new_parse_sess();
let mut p = string_to_parser(&ps, s);
let x = f(&mut p);
p.abort_if_errors();
x