Remove syntax and syntax_pos thread locals

This commit is contained in:
John Kåre Alsaker 2018-03-07 02:44:10 +01:00
parent fab632f975
commit cbdf4ec03e
29 changed files with 1212 additions and 998 deletions

2
src/Cargo.lock generated
View File

@ -2432,6 +2432,7 @@ dependencies = [
"rustc_cratesio_shim 0.0.0", "rustc_cratesio_shim 0.0.0",
"rustc_data_structures 0.0.0", "rustc_data_structures 0.0.0",
"rustc_errors 0.0.0", "rustc_errors 0.0.0",
"scoped-tls 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serialize 0.0.0", "serialize 0.0.0",
"syntax_pos 0.0.0", "syntax_pos 0.0.0",
] ]
@ -2453,6 +2454,7 @@ name = "syntax_pos"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"rustc_data_structures 0.0.0", "rustc_data_structures 0.0.0",
"scoped-tls 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serialize 0.0.0", "serialize 0.0.0",
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
] ]

View File

@ -2391,6 +2391,7 @@ mod tests {
use super::{Externs, OutputType, OutputTypes}; use super::{Externs, OutputType, OutputTypes};
use rustc_back::{PanicStrategy, RelroLevel}; use rustc_back::{PanicStrategy, RelroLevel};
use syntax::symbol::Symbol; use syntax::symbol::Symbol;
use syntax;
fn optgroups() -> getopts::Options { fn optgroups() -> getopts::Options {
let mut opts = getopts::Options::new(); let mut opts = getopts::Options::new();
@ -2411,45 +2412,50 @@ mod tests {
// When the user supplies --test we should implicitly supply --cfg test // When the user supplies --test we should implicitly supply --cfg test
#[test] #[test]
fn test_switch_implies_cfg_test() { fn test_switch_implies_cfg_test() {
let matches = &match optgroups().parse(&["--test".to_string()]) { syntax::with_globals(|| {
Ok(m) => m, let matches = &match optgroups().parse(&["--test".to_string()]) {
Err(f) => panic!("test_switch_implies_cfg_test: {}", f), Ok(m) => m,
}; Err(f) => panic!("test_switch_implies_cfg_test: {}", f),
let registry = errors::registry::Registry::new(&[]); };
let (sessopts, cfg) = build_session_options_and_crate_config(matches); let registry = errors::registry::Registry::new(&[]);
let sess = build_session(sessopts, None, registry); let (sessopts, cfg) = build_session_options_and_crate_config(matches);
let cfg = build_configuration(&sess, cfg); let sess = build_session(sessopts, None, registry);
assert!(cfg.contains(&(Symbol::intern("test"), None))); let cfg = build_configuration(&sess, cfg);
assert!(cfg.contains(&(Symbol::intern("test"), None)));
});
} }
// When the user supplies --test and --cfg test, don't implicitly add // When the user supplies --test and --cfg test, don't implicitly add
// another --cfg test // another --cfg test
#[test] #[test]
fn test_switch_implies_cfg_test_unless_cfg_test() { fn test_switch_implies_cfg_test_unless_cfg_test() {
let matches = &match optgroups().parse(&["--test".to_string(), "--cfg=test".to_string()]) { syntax::with_globals(|| {
Ok(m) => m, let matches = &match optgroups().parse(&["--test".to_string(),
Err(f) => panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f), "--cfg=test".to_string()]) {
}; Ok(m) => m,
let registry = errors::registry::Registry::new(&[]); Err(f) => panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f),
let (sessopts, cfg) = build_session_options_and_crate_config(matches); };
let sess = build_session(sessopts, None, registry); let registry = errors::registry::Registry::new(&[]);
let cfg = build_configuration(&sess, cfg); let (sessopts, cfg) = build_session_options_and_crate_config(matches);
let mut test_items = cfg.iter().filter(|&&(name, _)| name == "test"); let sess = build_session(sessopts, None, registry);
assert!(test_items.next().is_some()); let cfg = build_configuration(&sess, cfg);
assert!(test_items.next().is_none()); let mut test_items = cfg.iter().filter(|&&(name, _)| name == "test");
assert!(test_items.next().is_some());
assert!(test_items.next().is_none());
});
} }
#[test] #[test]
fn test_can_print_warnings() { fn test_can_print_warnings() {
{ syntax::with_globals(|| {
let matches = optgroups().parse(&["-Awarnings".to_string()]).unwrap(); let matches = optgroups().parse(&["-Awarnings".to_string()]).unwrap();
let registry = errors::registry::Registry::new(&[]); let registry = errors::registry::Registry::new(&[]);
let (sessopts, _) = build_session_options_and_crate_config(&matches); let (sessopts, _) = build_session_options_and_crate_config(&matches);
let sess = build_session(sessopts, None, registry); let sess = build_session(sessopts, None, registry);
assert!(!sess.diagnostic().flags.can_emit_warnings); assert!(!sess.diagnostic().flags.can_emit_warnings);
} });
{ syntax::with_globals(|| {
let matches = optgroups() let matches = optgroups()
.parse(&["-Awarnings".to_string(), "-Dwarnings".to_string()]) .parse(&["-Awarnings".to_string(), "-Dwarnings".to_string()])
.unwrap(); .unwrap();
@ -2457,15 +2463,15 @@ mod tests {
let (sessopts, _) = build_session_options_and_crate_config(&matches); let (sessopts, _) = build_session_options_and_crate_config(&matches);
let sess = build_session(sessopts, None, registry); let sess = build_session(sessopts, None, registry);
assert!(sess.diagnostic().flags.can_emit_warnings); assert!(sess.diagnostic().flags.can_emit_warnings);
} });
{ syntax::with_globals(|| {
let matches = optgroups().parse(&["-Adead_code".to_string()]).unwrap(); let matches = optgroups().parse(&["-Adead_code".to_string()]).unwrap();
let registry = errors::registry::Registry::new(&[]); let registry = errors::registry::Registry::new(&[]);
let (sessopts, _) = build_session_options_and_crate_config(&matches); let (sessopts, _) = build_session_options_and_crate_config(&matches);
let sess = build_session(sessopts, None, registry); let sess = build_session(sessopts, None, registry);
assert!(sess.diagnostic().flags.can_emit_warnings); assert!(sess.diagnostic().flags.can_emit_warnings);
} });
} }
#[test] #[test]

View File

@ -447,6 +447,17 @@ pub fn run_compiler<'a>(args: &[String],
file_loader: Option<Box<FileLoader + 'static>>, file_loader: Option<Box<FileLoader + 'static>>,
emitter_dest: Option<Box<Write + Send>>) emitter_dest: Option<Box<Write + Send>>)
-> (CompileResult, Option<Session>) -> (CompileResult, Option<Session>)
{
syntax::with_globals(|| {
run_compiler_impl(args, callbacks, file_loader, emitter_dest)
})
}
fn run_compiler_impl<'a>(args: &[String],
callbacks: &mut CompilerCalls<'a>,
file_loader: Option<Box<FileLoader + 'static>>,
emitter_dest: Option<Box<Write + Send>>)
-> (CompileResult, Option<Session>)
{ {
macro_rules! do_or_return {($expr: expr, $sess: expr) => { macro_rules! do_or_return {($expr: expr, $sess: expr) => {
match $expr { match $expr {

View File

@ -29,6 +29,7 @@ use rustc::hir::map as hir_map;
use rustc::session::{self, config}; use rustc::session::{self, config};
use rustc::session::config::{OutputFilenames, OutputTypes}; use rustc::session::config::{OutputFilenames, OutputTypes};
use rustc_data_structures::sync::Lrc; use rustc_data_structures::sync::Lrc;
use syntax;
use syntax::ast; use syntax::ast;
use syntax::abi::Abi; use syntax::abi::Abi;
use syntax::codemap::{CodeMap, FilePathMapping, FileName}; use syntax::codemap::{CodeMap, FilePathMapping, FileName};
@ -93,9 +94,19 @@ fn errors(msgs: &[&str]) -> (Box<Emitter + Send>, usize) {
} }
fn test_env<F>(source_string: &str, fn test_env<F>(source_string: &str,
(emitter, expected_err_count): (Box<Emitter + Send>, usize), args: (Box<Emitter + Send>, usize),
body: F) body: F)
where F: FnOnce(Env) where F: FnOnce(Env)
{
syntax::with_globals(|| {
test_env_impl(source_string, args, body)
});
}
fn test_env_impl<F>(source_string: &str,
(emitter, expected_err_count): (Box<Emitter + Send>, usize),
body: F)
where F: FnOnce(Env)
{ {
let mut options = config::basic_options(); let mut options = config::basic_options();
options.debugging_opts.verbose = true; options.debugging_opts.verbose = true;

View File

@ -398,6 +398,7 @@ mod test {
use syntax::ast::*; use syntax::ast::*;
use syntax::codemap::dummy_spanned; use syntax::codemap::dummy_spanned;
use syntax_pos::DUMMY_SP; use syntax_pos::DUMMY_SP;
use syntax::with_globals;
fn word_cfg(s: &str) -> Cfg { fn word_cfg(s: &str) -> Cfg {
Cfg::Cfg(Symbol::intern(s), None) Cfg::Cfg(Symbol::intern(s), None)
@ -409,479 +410,494 @@ mod test {
#[test] #[test]
fn test_cfg_not() { fn test_cfg_not() {
assert_eq!(!Cfg::False, Cfg::True); with_globals(|| {
assert_eq!(!Cfg::True, Cfg::False); assert_eq!(!Cfg::False, Cfg::True);
assert_eq!(!word_cfg("test"), Cfg::Not(Box::new(word_cfg("test")))); assert_eq!(!Cfg::True, Cfg::False);
assert_eq!( assert_eq!(!word_cfg("test"), Cfg::Not(Box::new(word_cfg("test"))));
!Cfg::All(vec![word_cfg("a"), word_cfg("b")]), assert_eq!(
Cfg::Not(Box::new(Cfg::All(vec![word_cfg("a"), word_cfg("b")]))) !Cfg::All(vec![word_cfg("a"), word_cfg("b")]),
); Cfg::Not(Box::new(Cfg::All(vec![word_cfg("a"), word_cfg("b")])))
assert_eq!( );
!Cfg::Any(vec![word_cfg("a"), word_cfg("b")]), assert_eq!(
Cfg::Not(Box::new(Cfg::Any(vec![word_cfg("a"), word_cfg("b")]))) !Cfg::Any(vec![word_cfg("a"), word_cfg("b")]),
); Cfg::Not(Box::new(Cfg::Any(vec![word_cfg("a"), word_cfg("b")])))
assert_eq!(!Cfg::Not(Box::new(word_cfg("test"))), word_cfg("test")); );
assert_eq!(!Cfg::Not(Box::new(word_cfg("test"))), word_cfg("test"));
})
} }
#[test] #[test]
fn test_cfg_and() { fn test_cfg_and() {
let mut x = Cfg::False; with_globals(|| {
x &= Cfg::True; let mut x = Cfg::False;
assert_eq!(x, Cfg::False); x &= Cfg::True;
assert_eq!(x, Cfg::False);
x = word_cfg("test"); x = word_cfg("test");
x &= Cfg::False; x &= Cfg::False;
assert_eq!(x, Cfg::False); assert_eq!(x, Cfg::False);
x = word_cfg("test2"); x = word_cfg("test2");
x &= Cfg::True; x &= Cfg::True;
assert_eq!(x, word_cfg("test2")); assert_eq!(x, word_cfg("test2"));
x = Cfg::True; x = Cfg::True;
x &= word_cfg("test3"); x &= word_cfg("test3");
assert_eq!(x, word_cfg("test3")); assert_eq!(x, word_cfg("test3"));
x &= word_cfg("test4"); x &= word_cfg("test4");
assert_eq!(x, Cfg::All(vec![word_cfg("test3"), word_cfg("test4")])); assert_eq!(x, Cfg::All(vec![word_cfg("test3"), word_cfg("test4")]));
x &= word_cfg("test5"); x &= word_cfg("test5");
assert_eq!(x, Cfg::All(vec![word_cfg("test3"), word_cfg("test4"), word_cfg("test5")])); assert_eq!(x, Cfg::All(vec![word_cfg("test3"), word_cfg("test4"), word_cfg("test5")]));
x &= Cfg::All(vec![word_cfg("test6"), word_cfg("test7")]); x &= Cfg::All(vec![word_cfg("test6"), word_cfg("test7")]);
assert_eq!(x, Cfg::All(vec![ assert_eq!(x, Cfg::All(vec![
word_cfg("test3"), word_cfg("test3"),
word_cfg("test4"), word_cfg("test4"),
word_cfg("test5"), word_cfg("test5"),
word_cfg("test6"), word_cfg("test6"),
word_cfg("test7"), word_cfg("test7"),
])); ]));
let mut y = Cfg::Any(vec![word_cfg("a"), word_cfg("b")]); let mut y = Cfg::Any(vec![word_cfg("a"), word_cfg("b")]);
y &= x; y &= x;
assert_eq!(y, Cfg::All(vec![ assert_eq!(y, Cfg::All(vec![
word_cfg("test3"), word_cfg("test3"),
word_cfg("test4"), word_cfg("test4"),
word_cfg("test5"), word_cfg("test5"),
word_cfg("test6"), word_cfg("test6"),
word_cfg("test7"), word_cfg("test7"),
Cfg::Any(vec![word_cfg("a"), word_cfg("b")]), Cfg::Any(vec![word_cfg("a"), word_cfg("b")]),
])); ]));
assert_eq!( assert_eq!(
word_cfg("a") & word_cfg("b") & word_cfg("c"), word_cfg("a") & word_cfg("b") & word_cfg("c"),
Cfg::All(vec![word_cfg("a"), word_cfg("b"), word_cfg("c")]) Cfg::All(vec![word_cfg("a"), word_cfg("b"), word_cfg("c")])
); );
})
} }
#[test] #[test]
fn test_cfg_or() { fn test_cfg_or() {
let mut x = Cfg::True; with_globals(|| {
x |= Cfg::False; let mut x = Cfg::True;
assert_eq!(x, Cfg::True); x |= Cfg::False;
assert_eq!(x, Cfg::True);
x = word_cfg("test"); x = word_cfg("test");
x |= Cfg::True; x |= Cfg::True;
assert_eq!(x, Cfg::True); assert_eq!(x, Cfg::True);
x = word_cfg("test2"); x = word_cfg("test2");
x |= Cfg::False; x |= Cfg::False;
assert_eq!(x, word_cfg("test2")); assert_eq!(x, word_cfg("test2"));
x = Cfg::False; x = Cfg::False;
x |= word_cfg("test3"); x |= word_cfg("test3");
assert_eq!(x, word_cfg("test3")); assert_eq!(x, word_cfg("test3"));
x |= word_cfg("test4"); x |= word_cfg("test4");
assert_eq!(x, Cfg::Any(vec![word_cfg("test3"), word_cfg("test4")])); assert_eq!(x, Cfg::Any(vec![word_cfg("test3"), word_cfg("test4")]));
x |= word_cfg("test5"); x |= word_cfg("test5");
assert_eq!(x, Cfg::Any(vec![word_cfg("test3"), word_cfg("test4"), word_cfg("test5")])); assert_eq!(x, Cfg::Any(vec![word_cfg("test3"), word_cfg("test4"), word_cfg("test5")]));
x |= Cfg::Any(vec![word_cfg("test6"), word_cfg("test7")]); x |= Cfg::Any(vec![word_cfg("test6"), word_cfg("test7")]);
assert_eq!(x, Cfg::Any(vec![ assert_eq!(x, Cfg::Any(vec![
word_cfg("test3"), word_cfg("test3"),
word_cfg("test4"), word_cfg("test4"),
word_cfg("test5"), word_cfg("test5"),
word_cfg("test6"), word_cfg("test6"),
word_cfg("test7"), word_cfg("test7"),
])); ]));
let mut y = Cfg::All(vec![word_cfg("a"), word_cfg("b")]); let mut y = Cfg::All(vec![word_cfg("a"), word_cfg("b")]);
y |= x; y |= x;
assert_eq!(y, Cfg::Any(vec![ assert_eq!(y, Cfg::Any(vec![
word_cfg("test3"), word_cfg("test3"),
word_cfg("test4"), word_cfg("test4"),
word_cfg("test5"), word_cfg("test5"),
word_cfg("test6"), word_cfg("test6"),
word_cfg("test7"), word_cfg("test7"),
Cfg::All(vec![word_cfg("a"), word_cfg("b")]), Cfg::All(vec![word_cfg("a"), word_cfg("b")]),
])); ]));
assert_eq!( assert_eq!(
word_cfg("a") | word_cfg("b") | word_cfg("c"), word_cfg("a") | word_cfg("b") | word_cfg("c"),
Cfg::Any(vec![word_cfg("a"), word_cfg("b"), word_cfg("c")]) Cfg::Any(vec![word_cfg("a"), word_cfg("b"), word_cfg("c")])
); );
})
} }
#[test] #[test]
fn test_parse_ok() { fn test_parse_ok() {
let mi = MetaItem { with_globals(|| {
name: Symbol::intern("all"), let mi = MetaItem {
node: MetaItemKind::Word, name: Symbol::intern("all"),
span: DUMMY_SP, node: MetaItemKind::Word,
}; span: DUMMY_SP,
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("all"))); };
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("all")));
let mi = MetaItem { let mi = MetaItem {
name: Symbol::intern("all"), name: Symbol::intern("all"),
node: MetaItemKind::NameValue(dummy_spanned(LitKind::Str( node: MetaItemKind::NameValue(dummy_spanned(LitKind::Str(
Symbol::intern("done"), Symbol::intern("done"),
StrStyle::Cooked, StrStyle::Cooked,
))), ))),
span: DUMMY_SP, span: DUMMY_SP,
}; };
assert_eq!(Cfg::parse(&mi), Ok(name_value_cfg("all", "done"))); assert_eq!(Cfg::parse(&mi), Ok(name_value_cfg("all", "done")));
let mi = MetaItem { let mi = MetaItem {
name: Symbol::intern("all"), name: Symbol::intern("all"),
node: MetaItemKind::List(vec![ node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"), name: Symbol::intern("a"),
node: MetaItemKind::Word, node: MetaItemKind::Word,
span: DUMMY_SP, span: DUMMY_SP,
})), })),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("b"), name: Symbol::intern("b"),
node: MetaItemKind::Word, node: MetaItemKind::Word,
span: DUMMY_SP, span: DUMMY_SP,
})), })),
]), ]),
span: DUMMY_SP, span: DUMMY_SP,
}; };
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") & word_cfg("b"))); assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") & word_cfg("b")));
let mi = MetaItem { let mi = MetaItem {
name: Symbol::intern("any"), name: Symbol::intern("any"),
node: MetaItemKind::List(vec![ node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"), name: Symbol::intern("a"),
node: MetaItemKind::Word, node: MetaItemKind::Word,
span: DUMMY_SP, span: DUMMY_SP,
})), })),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("b"), name: Symbol::intern("b"),
node: MetaItemKind::Word, node: MetaItemKind::Word,
span: DUMMY_SP, span: DUMMY_SP,
})), })),
]), ]),
span: DUMMY_SP, span: DUMMY_SP,
}; };
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") | word_cfg("b"))); assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") | word_cfg("b")));
let mi = MetaItem { let mi = MetaItem {
name: Symbol::intern("not"), name: Symbol::intern("not"),
node: MetaItemKind::List(vec![ node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"), name: Symbol::intern("a"),
node: MetaItemKind::Word, node: MetaItemKind::Word,
span: DUMMY_SP, span: DUMMY_SP,
})), })),
]), ]),
span: DUMMY_SP, span: DUMMY_SP,
}; };
assert_eq!(Cfg::parse(&mi), Ok(!word_cfg("a"))); assert_eq!(Cfg::parse(&mi), Ok(!word_cfg("a")));
let mi = MetaItem { let mi = MetaItem {
name: Symbol::intern("not"), name: Symbol::intern("not"),
node: MetaItemKind::List(vec![ node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("any"), name: Symbol::intern("any"),
node: MetaItemKind::List(vec![ node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"), name: Symbol::intern("a"),
node: MetaItemKind::Word, node: MetaItemKind::Word,
span: DUMMY_SP, span: DUMMY_SP,
})), })),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("all"), name: Symbol::intern("all"),
node: MetaItemKind::List(vec![ node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("b"), name: Symbol::intern("b"),
node: MetaItemKind::Word, node: MetaItemKind::Word,
span: DUMMY_SP, span: DUMMY_SP,
})), })),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("c"), name: Symbol::intern("c"),
node: MetaItemKind::Word, node: MetaItemKind::Word,
span: DUMMY_SP, span: DUMMY_SP,
})), })),
]), ]),
span: DUMMY_SP, span: DUMMY_SP,
})), })),
]), ]),
span: DUMMY_SP, span: DUMMY_SP,
})), })),
]), ]),
span: DUMMY_SP, span: DUMMY_SP,
}; };
assert_eq!(Cfg::parse(&mi), Ok(!(word_cfg("a") | (word_cfg("b") & word_cfg("c"))))); assert_eq!(Cfg::parse(&mi), Ok(!(word_cfg("a") | (word_cfg("b") & word_cfg("c")))));
let mi = MetaItem { let mi = MetaItem {
name: Symbol::intern("all"), name: Symbol::intern("all"),
node: MetaItemKind::List(vec![ node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"), name: Symbol::intern("a"),
node: MetaItemKind::Word, node: MetaItemKind::Word,
span: DUMMY_SP, span: DUMMY_SP,
})), })),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("b"), name: Symbol::intern("b"),
node: MetaItemKind::Word, node: MetaItemKind::Word,
span: DUMMY_SP, span: DUMMY_SP,
})), })),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("c"), name: Symbol::intern("c"),
node: MetaItemKind::Word, node: MetaItemKind::Word,
span: DUMMY_SP, span: DUMMY_SP,
})), })),
]), ]),
span: DUMMY_SP, span: DUMMY_SP,
}; };
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") & word_cfg("b") & word_cfg("c"))); assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") & word_cfg("b") & word_cfg("c")));
})
} }
#[test] #[test]
fn test_parse_err() { fn test_parse_err() {
let mi = MetaItem { with_globals(|| {
name: Symbol::intern("foo"), let mi = MetaItem {
node: MetaItemKind::NameValue(dummy_spanned(LitKind::Bool(false))), name: Symbol::intern("foo"),
span: DUMMY_SP, node: MetaItemKind::NameValue(dummy_spanned(LitKind::Bool(false))),
}; span: DUMMY_SP,
assert!(Cfg::parse(&mi).is_err()); };
assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem { let mi = MetaItem {
name: Symbol::intern("not"), name: Symbol::intern("not"),
node: MetaItemKind::List(vec![ node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"), name: Symbol::intern("a"),
node: MetaItemKind::Word, node: MetaItemKind::Word,
span: DUMMY_SP, span: DUMMY_SP,
})), })),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("b"), name: Symbol::intern("b"),
node: MetaItemKind::Word, node: MetaItemKind::Word,
span: DUMMY_SP, span: DUMMY_SP,
})), })),
]), ]),
span: DUMMY_SP, span: DUMMY_SP,
}; };
assert!(Cfg::parse(&mi).is_err()); assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem { let mi = MetaItem {
name: Symbol::intern("not"), name: Symbol::intern("not"),
node: MetaItemKind::List(vec![]), node: MetaItemKind::List(vec![]),
span: DUMMY_SP, span: DUMMY_SP,
}; };
assert!(Cfg::parse(&mi).is_err()); assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem { let mi = MetaItem {
name: Symbol::intern("foo"), name: Symbol::intern("foo"),
node: MetaItemKind::List(vec![ node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"), name: Symbol::intern("a"),
node: MetaItemKind::Word, node: MetaItemKind::Word,
span: DUMMY_SP, span: DUMMY_SP,
})), })),
]), ]),
span: DUMMY_SP, span: DUMMY_SP,
}; };
assert!(Cfg::parse(&mi).is_err()); assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem { let mi = MetaItem {
name: Symbol::intern("all"), name: Symbol::intern("all"),
node: MetaItemKind::List(vec![ node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("foo"), name: Symbol::intern("foo"),
node: MetaItemKind::List(vec![]), node: MetaItemKind::List(vec![]),
span: DUMMY_SP, span: DUMMY_SP,
})), })),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("b"), name: Symbol::intern("b"),
node: MetaItemKind::Word, node: MetaItemKind::Word,
span: DUMMY_SP, span: DUMMY_SP,
})), })),
]), ]),
span: DUMMY_SP, span: DUMMY_SP,
}; };
assert!(Cfg::parse(&mi).is_err()); assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem { let mi = MetaItem {
name: Symbol::intern("any"), name: Symbol::intern("any"),
node: MetaItemKind::List(vec![ node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"), name: Symbol::intern("a"),
node: MetaItemKind::Word, node: MetaItemKind::Word,
span: DUMMY_SP, span: DUMMY_SP,
})), })),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("foo"), name: Symbol::intern("foo"),
node: MetaItemKind::List(vec![]), node: MetaItemKind::List(vec![]),
span: DUMMY_SP, span: DUMMY_SP,
})), })),
]), ]),
span: DUMMY_SP, span: DUMMY_SP,
}; };
assert!(Cfg::parse(&mi).is_err()); assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem { let mi = MetaItem {
name: Symbol::intern("not"), name: Symbol::intern("not"),
node: MetaItemKind::List(vec![ node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("foo"), name: Symbol::intern("foo"),
node: MetaItemKind::List(vec![]), node: MetaItemKind::List(vec![]),
span: DUMMY_SP, span: DUMMY_SP,
})), })),
]), ]),
span: DUMMY_SP, span: DUMMY_SP,
}; };
assert!(Cfg::parse(&mi).is_err()); assert!(Cfg::parse(&mi).is_err());
})
} }
#[test] #[test]
fn test_render_short_html() { fn test_render_short_html() {
assert_eq!( with_globals(|| {
word_cfg("unix").render_short_html(), assert_eq!(
"Unix" word_cfg("unix").render_short_html(),
); "Unix"
assert_eq!( );
name_value_cfg("target_os", "macos").render_short_html(), assert_eq!(
"macOS" name_value_cfg("target_os", "macos").render_short_html(),
); "macOS"
assert_eq!( );
name_value_cfg("target_pointer_width", "16").render_short_html(), assert_eq!(
"16-bit" name_value_cfg("target_pointer_width", "16").render_short_html(),
); "16-bit"
assert_eq!( );
name_value_cfg("target_endian", "little").render_short_html(), assert_eq!(
"Little-endian" name_value_cfg("target_endian", "little").render_short_html(),
); "Little-endian"
assert_eq!( );
(!word_cfg("windows")).render_short_html(), assert_eq!(
"Non-Windows" (!word_cfg("windows")).render_short_html(),
); "Non-Windows"
assert_eq!( );
(word_cfg("unix") & word_cfg("windows")).render_short_html(), assert_eq!(
"Unix and Windows" (word_cfg("unix") & word_cfg("windows")).render_short_html(),
); "Unix and Windows"
assert_eq!( );
(word_cfg("unix") | word_cfg("windows")).render_short_html(), assert_eq!(
"Unix or Windows" (word_cfg("unix") | word_cfg("windows")).render_short_html(),
); "Unix or Windows"
assert_eq!( );
( assert_eq!(
word_cfg("unix") & word_cfg("windows") & word_cfg("debug_assertions") (
).render_short_html(), word_cfg("unix") & word_cfg("windows") & word_cfg("debug_assertions")
"Unix and Windows and debug-assertions enabled" ).render_short_html(),
); "Unix and Windows and debug-assertions enabled"
assert_eq!( );
( assert_eq!(
word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions") (
).render_short_html(), word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions")
"Unix or Windows or debug-assertions enabled" ).render_short_html(),
); "Unix or Windows or debug-assertions enabled"
assert_eq!( );
( assert_eq!(
!(word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions")) (
).render_short_html(), !(word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions"))
"Neither Unix nor Windows nor debug-assertions enabled" ).render_short_html(),
); "Neither Unix nor Windows nor debug-assertions enabled"
assert_eq!( );
( assert_eq!(
(word_cfg("unix") & name_value_cfg("target_arch", "x86_64")) | (
(word_cfg("windows") & name_value_cfg("target_pointer_width", "64")) (word_cfg("unix") & name_value_cfg("target_arch", "x86_64")) |
).render_short_html(), (word_cfg("windows") & name_value_cfg("target_pointer_width", "64"))
"Unix and x86-64, or Windows and 64-bit" ).render_short_html(),
); "Unix and x86-64, or Windows and 64-bit"
assert_eq!( );
(!(word_cfg("unix") & word_cfg("windows"))).render_short_html(), assert_eq!(
"Not (Unix and Windows)" (!(word_cfg("unix") & word_cfg("windows"))).render_short_html(),
); "Not (Unix and Windows)"
assert_eq!( );
( assert_eq!(
(word_cfg("debug_assertions") | word_cfg("windows")) & word_cfg("unix") (
).render_short_html(), (word_cfg("debug_assertions") | word_cfg("windows")) & word_cfg("unix")
"(Debug-assertions enabled or Windows) and Unix" ).render_short_html(),
); "(Debug-assertions enabled or Windows) and Unix"
);
})
} }
#[test] #[test]
fn test_render_long_html() { fn test_render_long_html() {
assert_eq!( with_globals(|| {
word_cfg("unix").render_long_html(), assert_eq!(
"This is supported on <strong>Unix</strong> only." word_cfg("unix").render_long_html(),
); "This is supported on <strong>Unix</strong> only."
assert_eq!( );
name_value_cfg("target_os", "macos").render_long_html(), assert_eq!(
"This is supported on <strong>macOS</strong> only." name_value_cfg("target_os", "macos").render_long_html(),
); "This is supported on <strong>macOS</strong> only."
assert_eq!( );
name_value_cfg("target_pointer_width", "16").render_long_html(), assert_eq!(
"This is supported on <strong>16-bit</strong> only." name_value_cfg("target_pointer_width", "16").render_long_html(),
); "This is supported on <strong>16-bit</strong> only."
assert_eq!( );
name_value_cfg("target_endian", "little").render_long_html(), assert_eq!(
"This is supported on <strong>little-endian</strong> only." name_value_cfg("target_endian", "little").render_long_html(),
); "This is supported on <strong>little-endian</strong> only."
assert_eq!( );
(!word_cfg("windows")).render_long_html(), assert_eq!(
"This is supported on <strong>non-Windows</strong> only." (!word_cfg("windows")).render_long_html(),
); "This is supported on <strong>non-Windows</strong> only."
assert_eq!( );
(word_cfg("unix") & word_cfg("windows")).render_long_html(), assert_eq!(
"This is supported on <strong>Unix and Windows</strong> only." (word_cfg("unix") & word_cfg("windows")).render_long_html(),
); "This is supported on <strong>Unix and Windows</strong> only."
assert_eq!( );
(word_cfg("unix") | word_cfg("windows")).render_long_html(), assert_eq!(
"This is supported on <strong>Unix or Windows</strong> only." (word_cfg("unix") | word_cfg("windows")).render_long_html(),
); "This is supported on <strong>Unix or Windows</strong> only."
assert_eq!( );
( assert_eq!(
word_cfg("unix") & word_cfg("windows") & word_cfg("debug_assertions") (
).render_long_html(), word_cfg("unix") & word_cfg("windows") & word_cfg("debug_assertions")
"This is supported on <strong>Unix and Windows and debug-assertions enabled</strong> \ ).render_long_html(),
only." "This is supported on <strong>Unix and Windows and debug-assertions enabled\
); </strong> only."
assert_eq!( );
( assert_eq!(
word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions") (
).render_long_html(), word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions")
"This is supported on <strong>Unix or Windows or debug-assertions enabled</strong> \ ).render_long_html(),
only." "This is supported on <strong>Unix or Windows or debug-assertions enabled\
); </strong> only."
assert_eq!( );
( assert_eq!(
!(word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions")) (
).render_long_html(), !(word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions"))
"This is supported on <strong>neither Unix nor Windows nor debug-assertions \ ).render_long_html(),
enabled</strong>." "This is supported on <strong>neither Unix nor Windows nor debug-assertions \
); enabled</strong>."
assert_eq!( );
( assert_eq!(
(word_cfg("unix") & name_value_cfg("target_arch", "x86_64")) | (
(word_cfg("windows") & name_value_cfg("target_pointer_width", "64")) (word_cfg("unix") & name_value_cfg("target_arch", "x86_64")) |
).render_long_html(), (word_cfg("windows") & name_value_cfg("target_pointer_width", "64"))
"This is supported on <strong>Unix and x86-64, or Windows and 64-bit</strong> only." ).render_long_html(),
); "This is supported on <strong>Unix and x86-64, or Windows and 64-bit</strong> \
assert_eq!( only."
(!(word_cfg("unix") & word_cfg("windows"))).render_long_html(), );
"This is supported on <strong>not (Unix and Windows)</strong>." assert_eq!(
); (!(word_cfg("unix") & word_cfg("windows"))).render_long_html(),
assert_eq!( "This is supported on <strong>not (Unix and Windows)</strong>."
( );
(word_cfg("debug_assertions") | word_cfg("windows")) & word_cfg("unix") assert_eq!(
).render_long_html(), (
"This is supported on <strong>(debug-assertions enabled or Windows) and Unix</strong> \ (word_cfg("debug_assertions") | word_cfg("windows")) & word_cfg("unix")
only." ).render_long_html(),
); "This is supported on <strong>(debug-assertions enabled or Windows) and Unix\
</strong> only."
);
})
} }
} }

View File

@ -102,7 +102,9 @@ pub fn main() {
const STACK_SIZE: usize = 32_000_000; // 32MB const STACK_SIZE: usize = 32_000_000; // 32MB
env_logger::init(); env_logger::init();
let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || { let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || {
get_args().map(|args| main_args(&args)).unwrap_or(1) syntax::with_globals(move || {
get_args().map(|args| main_args(&args)).unwrap_or(1)
})
}).unwrap().join().unwrap_or(101); }).unwrap().join().unwrap_or(101);
process::exit(res as i32); process::exit(res as i32);
} }
@ -554,7 +556,8 @@ where R: 'static + Send, F: 'static + Send + FnOnce(Output) -> R {
}); });
let (tx, rx) = channel(); let (tx, rx) = channel();
rustc_driver::monitor(move || {
rustc_driver::monitor(move || syntax::with_globals(move || {
use rustc::session::config::Input; use rustc::session::config::Input;
let (mut krate, renderinfo) = let (mut krate, renderinfo) =
@ -623,7 +626,7 @@ where R: 'static + Send, F: 'static + Send + FnOnce(Output) -> R {
let krate = pm.run_plugins(krate); let krate = pm.run_plugins(krate);
tx.send(f(Output { krate: krate, renderinfo: renderinfo, passes: passes })).unwrap(); tx.send(f(Output { krate: krate, renderinfo: renderinfo, passes: passes })).unwrap();
}); }));
rx.recv().unwrap() rx.recv().unwrap()
} }

View File

@ -35,6 +35,7 @@ use rustc_resolve::MakeGlobMap;
use syntax::ast; use syntax::ast;
use syntax::codemap::CodeMap; use syntax::codemap::CodeMap;
use syntax::feature_gate::UnstableFeatures; use syntax::feature_gate::UnstableFeatures;
use syntax::with_globals;
use syntax_pos::{BytePos, DUMMY_SP, Pos, Span, FileName}; use syntax_pos::{BytePos, DUMMY_SP, Pos, Span, FileName};
use errors; use errors;
use errors::emitter::ColorConfig; use errors::emitter::ColorConfig;
@ -518,7 +519,7 @@ impl Collector {
let panic = io::set_panic(None); let panic = io::set_panic(None);
let print = io::set_print(None); let print = io::set_print(None);
match { match {
rustc_driver::in_rustc_thread(move || { rustc_driver::in_rustc_thread(move || with_globals(move || {
io::set_panic(panic); io::set_panic(panic);
io::set_print(print); io::set_print(print);
run_test(&test, run_test(&test,
@ -536,7 +537,7 @@ impl Collector {
&opts, &opts,
maybe_sysroot, maybe_sysroot,
linker) linker)
}) }))
} { } {
Ok(()) => (), Ok(()) => (),
Err(err) => panic::resume_unwind(err), Err(err) => panic::resume_unwind(err),

View File

@ -12,6 +12,7 @@ crate-type = ["dylib"]
bitflags = "1.0" bitflags = "1.0"
serialize = { path = "../libserialize" } serialize = { path = "../libserialize" }
log = "0.4" log = "0.4"
scoped-tls = "0.1"
syntax_pos = { path = "../libsyntax_pos" } syntax_pos = { path = "../libsyntax_pos" }
rustc_cratesio_shim = { path = "../librustc_cratesio_shim" } rustc_cratesio_shim = { path = "../librustc_cratesio_shim" }
rustc_errors = { path = "../librustc_errors" } rustc_errors = { path = "../librustc_errors" }

View File

@ -30,15 +30,10 @@ use ptr::P;
use symbol::Symbol; use symbol::Symbol;
use tokenstream::{TokenStream, TokenTree, Delimited}; use tokenstream::{TokenStream, TokenTree, Delimited};
use util::ThinVec; use util::ThinVec;
use GLOBALS;
use std::cell::RefCell;
use std::iter; use std::iter;
thread_local! {
static USED_ATTRS: RefCell<Vec<u64>> = RefCell::new(Vec::new());
static KNOWN_ATTRS: RefCell<Vec<u64>> = RefCell::new(Vec::new());
}
enum AttrError { enum AttrError {
MultipleItem(Name), MultipleItem(Name),
UnknownMetaItem(Name), UnknownMetaItem(Name),
@ -65,22 +60,24 @@ fn handle_errors(diag: &Handler, span: Span, error: AttrError) {
pub fn mark_used(attr: &Attribute) { pub fn mark_used(attr: &Attribute) {
debug!("Marking {:?} as used.", attr); debug!("Marking {:?} as used.", attr);
let AttrId(id) = attr.id; let AttrId(id) = attr.id;
USED_ATTRS.with(|slot| { GLOBALS.with(|globals| {
let mut slot = globals.used_attrs.lock();
let idx = (id / 64) as usize; let idx = (id / 64) as usize;
let shift = id % 64; let shift = id % 64;
if slot.borrow().len() <= idx { if slot.len() <= idx {
slot.borrow_mut().resize(idx + 1, 0); slot.resize(idx + 1, 0);
} }
slot.borrow_mut()[idx] |= 1 << shift; slot[idx] |= 1 << shift;
}); });
} }
pub fn is_used(attr: &Attribute) -> bool { pub fn is_used(attr: &Attribute) -> bool {
let AttrId(id) = attr.id; let AttrId(id) = attr.id;
USED_ATTRS.with(|slot| { GLOBALS.with(|globals| {
let slot = globals.used_attrs.lock();
let idx = (id / 64) as usize; let idx = (id / 64) as usize;
let shift = id % 64; let shift = id % 64;
slot.borrow().get(idx).map(|bits| bits & (1 << shift) != 0) slot.get(idx).map(|bits| bits & (1 << shift) != 0)
.unwrap_or(false) .unwrap_or(false)
}) })
} }
@ -88,22 +85,24 @@ pub fn is_used(attr: &Attribute) -> bool {
pub fn mark_known(attr: &Attribute) { pub fn mark_known(attr: &Attribute) {
debug!("Marking {:?} as known.", attr); debug!("Marking {:?} as known.", attr);
let AttrId(id) = attr.id; let AttrId(id) = attr.id;
KNOWN_ATTRS.with(|slot| { GLOBALS.with(|globals| {
let mut slot = globals.known_attrs.lock();
let idx = (id / 64) as usize; let idx = (id / 64) as usize;
let shift = id % 64; let shift = id % 64;
if slot.borrow().len() <= idx { if slot.len() <= idx {
slot.borrow_mut().resize(idx + 1, 0); slot.resize(idx + 1, 0);
} }
slot.borrow_mut()[idx] |= 1 << shift; slot[idx] |= 1 << shift;
}); });
} }
pub fn is_known(attr: &Attribute) -> bool { pub fn is_known(attr: &Attribute) -> bool {
let AttrId(id) = attr.id; let AttrId(id) = attr.id;
KNOWN_ATTRS.with(|slot| { GLOBALS.with(|globals| {
let slot = globals.known_attrs.lock();
let idx = (id / 64) as usize; let idx = (id / 64) as usize;
let shift = id % 64; let shift = id % 64;
slot.borrow().get(idx).map(|bits| bits & (1 << shift) != 0) slot.get(idx).map(|bits| bits & (1 << shift) != 0)
.unwrap_or(false) .unwrap_or(false)
}) })
} }

View File

@ -1386,6 +1386,7 @@ mod tests {
use util::parser_testing::{string_to_crate, matches_codepattern}; use util::parser_testing::{string_to_crate, matches_codepattern};
use print::pprust; use print::pprust;
use fold; use fold;
use with_globals;
use super::*; use super::*;
// this version doesn't care about getting comments or docstrings in. // this version doesn't care about getting comments or docstrings in.
@ -1423,28 +1424,32 @@ mod tests {
// make sure idents get transformed everywhere // make sure idents get transformed everywhere
#[test] fn ident_transformation () { #[test] fn ident_transformation () {
let mut zz_fold = ToZzIdentFolder; with_globals(|| {
let ast = string_to_crate( let mut zz_fold = ToZzIdentFolder;
"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_string()); let ast = string_to_crate(
let folded_crate = zz_fold.fold_crate(ast); "#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_string());
assert_pred!( let folded_crate = zz_fold.fold_crate(ast);
matches_codepattern, assert_pred!(
"matches_codepattern", matches_codepattern,
pprust::to_string(|s| fake_print_crate(s, &folded_crate)), "matches_codepattern",
"#[zz]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string()); pprust::to_string(|s| fake_print_crate(s, &folded_crate)),
"#[zz]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string());
})
} }
// even inside macro defs.... // even inside macro defs....
#[test] fn ident_transformation_in_defs () { #[test] fn ident_transformation_in_defs () {
let mut zz_fold = ToZzIdentFolder; with_globals(|| {
let ast = string_to_crate( let mut zz_fold = ToZzIdentFolder;
"macro_rules! a {(b $c:expr $(d $e:token)f+ => \ let ast = string_to_crate(
(g $(d $d $e)+))} ".to_string()); "macro_rules! a {(b $c:expr $(d $e:token)f+ => \
let folded_crate = zz_fold.fold_crate(ast); (g $(d $d $e)+))} ".to_string());
assert_pred!( let folded_crate = zz_fold.fold_crate(ast);
matches_codepattern, assert_pred!(
"matches_codepattern", matches_codepattern,
pprust::to_string(|s| fake_print_crate(s, &folded_crate)), "matches_codepattern",
"macro_rules! zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)));".to_string()); pprust::to_string(|s| fake_print_crate(s, &folded_crate)),
"macro_rules! zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)));".to_string());
})
} }
} }

View File

@ -39,9 +39,12 @@ extern crate std_unicode;
pub extern crate rustc_errors as errors; pub extern crate rustc_errors as errors;
extern crate syntax_pos; extern crate syntax_pos;
extern crate rustc_data_structures; extern crate rustc_data_structures;
#[macro_use] extern crate scoped_tls;
extern crate serialize as rustc_serialize; // used by deriving extern crate serialize as rustc_serialize; // used by deriving
use rustc_data_structures::sync::Lock;
// A variant of 'try!' that panics on an Err. This is used as a crutch on the // A variant of 'try!' that panics on an Err. This is used as a crutch on the
// way towards a non-panic!-prone parser. It should be used for fatal parsing // way towards a non-panic!-prone parser. It should be used for fatal parsing
// errors; eventually we plan to convert all code using panictry to just use // errors; eventually we plan to convert all code using panictry to just use
@ -72,6 +75,33 @@ macro_rules! unwrap_or {
} }
} }
struct Globals {
used_attrs: Lock<Vec<u64>>,
known_attrs: Lock<Vec<u64>>,
syntax_pos_globals: syntax_pos::Globals,
}
impl Globals {
fn new() -> Globals {
Globals {
used_attrs: Lock::new(Vec::new()),
known_attrs: Lock::new(Vec::new()),
syntax_pos_globals: syntax_pos::Globals::new(),
}
}
}
pub fn with_globals<F, R>(f: F) -> R
where F: FnOnce() -> R
{
let globals = Globals::new();
GLOBALS.set(&globals, || {
syntax_pos::GLOBALS.set(&globals.syntax_pos_globals, f)
})
}
scoped_thread_local!(static GLOBALS: Globals);
#[macro_use] #[macro_use]
pub mod diagnostics { pub mod diagnostics {
#[macro_use] #[macro_use]

View File

@ -1766,6 +1766,7 @@ mod tests {
use std::path::PathBuf; use std::path::PathBuf;
use diagnostics::plugin::ErrorMap; use diagnostics::plugin::ErrorMap;
use rustc_data_structures::sync::Lock; use rustc_data_structures::sync::Lock;
use with_globals;
fn mk_sess(cm: Lrc<CodeMap>) -> ParseSess { fn mk_sess(cm: Lrc<CodeMap>) -> ParseSess {
let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()), let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()),
Some(cm.clone()), Some(cm.clone()),
@ -1794,33 +1795,35 @@ mod tests {
#[test] #[test]
fn t1() { fn t1() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); with_globals(|| {
let sh = mk_sess(cm.clone()); let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let mut string_reader = setup(&cm, let sh = mk_sess(cm.clone());
&sh, let mut string_reader = setup(&cm,
"/* my source file */ fn main() { println!(\"zebra\"); }\n" &sh,
.to_string()); "/* my source file */ fn main() { println!(\"zebra\"); }\n"
let id = Ident::from_str("fn"); .to_string());
assert_eq!(string_reader.next_token().tok, token::Comment); let id = Ident::from_str("fn");
assert_eq!(string_reader.next_token().tok, token::Whitespace); assert_eq!(string_reader.next_token().tok, token::Comment);
let tok1 = string_reader.next_token(); assert_eq!(string_reader.next_token().tok, token::Whitespace);
let tok2 = TokenAndSpan { let tok1 = string_reader.next_token();
tok: token::Ident(id), let tok2 = TokenAndSpan {
sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION), tok: token::Ident(id),
}; sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
assert_eq!(tok1, tok2); };
assert_eq!(string_reader.next_token().tok, token::Whitespace); assert_eq!(tok1, tok2);
// the 'main' id is already read: assert_eq!(string_reader.next_token().tok, token::Whitespace);
assert_eq!(string_reader.pos.clone(), BytePos(28)); // the 'main' id is already read:
// read another token: assert_eq!(string_reader.pos.clone(), BytePos(28));
let tok3 = string_reader.next_token(); // read another token:
let tok4 = TokenAndSpan { let tok3 = string_reader.next_token();
tok: token::Ident(Ident::from_str("main")), let tok4 = TokenAndSpan {
sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION), tok: token::Ident(Ident::from_str("main")),
}; sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
assert_eq!(tok3, tok4); };
// the lparen is already read: assert_eq!(tok3, tok4);
assert_eq!(string_reader.pos.clone(), BytePos(29)) // the lparen is already read:
assert_eq!(string_reader.pos.clone(), BytePos(29))
})
} }
// check that the given reader produces the desired stream // check that the given reader produces the desired stream
@ -1838,113 +1841,133 @@ mod tests {
#[test] #[test]
fn doublecolonparsing() { fn doublecolonparsing() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); with_globals(|| {
let sh = mk_sess(cm.clone()); let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
check_tokenization(setup(&cm, &sh, "a b".to_string()), let sh = mk_sess(cm.clone());
vec![mk_ident("a"), token::Whitespace, mk_ident("b")]); check_tokenization(setup(&cm, &sh, "a b".to_string()),
vec![mk_ident("a"), token::Whitespace, mk_ident("b")]);
})
} }
#[test] #[test]
fn dcparsing_2() { fn dcparsing_2() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); with_globals(|| {
let sh = mk_sess(cm.clone()); let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
check_tokenization(setup(&cm, &sh, "a::b".to_string()), let sh = mk_sess(cm.clone());
vec![mk_ident("a"), token::ModSep, mk_ident("b")]); check_tokenization(setup(&cm, &sh, "a::b".to_string()),
vec![mk_ident("a"), token::ModSep, mk_ident("b")]);
})
} }
#[test] #[test]
fn dcparsing_3() { fn dcparsing_3() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); with_globals(|| {
let sh = mk_sess(cm.clone()); let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
check_tokenization(setup(&cm, &sh, "a ::b".to_string()), let sh = mk_sess(cm.clone());
vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]); check_tokenization(setup(&cm, &sh, "a ::b".to_string()),
vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]);
})
} }
#[test] #[test]
fn dcparsing_4() { fn dcparsing_4() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); with_globals(|| {
let sh = mk_sess(cm.clone()); let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
check_tokenization(setup(&cm, &sh, "a:: b".to_string()), let sh = mk_sess(cm.clone());
vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]); check_tokenization(setup(&cm, &sh, "a:: b".to_string()),
vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]);
})
} }
#[test] #[test]
fn character_a() { fn character_a() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); with_globals(|| {
let sh = mk_sess(cm.clone()); let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok, let sh = mk_sess(cm.clone());
token::Literal(token::Char(Symbol::intern("a")), None)); assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern("a")), None));
})
} }
#[test] #[test]
fn character_space() { fn character_space() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); with_globals(|| {
let sh = mk_sess(cm.clone()); let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok, let sh = mk_sess(cm.clone());
token::Literal(token::Char(Symbol::intern(" ")), None)); assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern(" ")), None));
})
} }
#[test] #[test]
fn character_escaped() { fn character_escaped() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); with_globals(|| {
let sh = mk_sess(cm.clone()); let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok, let sh = mk_sess(cm.clone());
token::Literal(token::Char(Symbol::intern("\\n")), None)); assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern("\\n")), None));
})
} }
#[test] #[test]
fn lifetime_name() { fn lifetime_name() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); with_globals(|| {
let sh = mk_sess(cm.clone()); let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok, let sh = mk_sess(cm.clone());
token::Lifetime(Ident::from_str("'abc"))); assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok,
token::Lifetime(Ident::from_str("'abc")));
})
} }
#[test] #[test]
fn raw_string() { fn raw_string() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); with_globals(|| {
let sh = mk_sess(cm.clone()); let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()) let sh = mk_sess(cm.clone());
.next_token() assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
.tok, .next_token()
token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None)); .tok,
token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None));
})
} }
#[test] #[test]
fn literal_suffixes() { fn literal_suffixes() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); with_globals(|| {
let sh = mk_sess(cm.clone()); let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
macro_rules! test { let sh = mk_sess(cm.clone());
($input: expr, $tok_type: ident, $tok_contents: expr) => {{ macro_rules! test {
assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok, ($input: expr, $tok_type: ident, $tok_contents: expr) => {{
token::Literal(token::$tok_type(Symbol::intern($tok_contents)), assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok,
Some(Symbol::intern("suffix")))); token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
// with a whitespace separator: Some(Symbol::intern("suffix"))));
assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok, // with a whitespace separator:
token::Literal(token::$tok_type(Symbol::intern($tok_contents)), assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok,
None)); token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
}} None));
} }}
}
test!("'a'", Char, "a"); test!("'a'", Char, "a");
test!("b'a'", Byte, "a"); test!("b'a'", Byte, "a");
test!("\"a\"", Str_, "a"); test!("\"a\"", Str_, "a");
test!("b\"a\"", ByteStr, "a"); test!("b\"a\"", ByteStr, "a");
test!("1234", Integer, "1234"); test!("1234", Integer, "1234");
test!("0b101", Integer, "0b101"); test!("0b101", Integer, "0b101");
test!("0xABC", Integer, "0xABC"); test!("0xABC", Integer, "0xABC");
test!("1.0", Float, "1.0"); test!("1.0", Float, "1.0");
test!("1.0e10", Float, "1.0e10"); test!("1.0e10", Float, "1.0e10");
assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok, assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok,
token::Literal(token::Integer(Symbol::intern("2")), token::Literal(token::Integer(Symbol::intern("2")),
Some(Symbol::intern("us")))); Some(Symbol::intern("us"))));
assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok, assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok,
token::Literal(token::StrRaw(Symbol::intern("raw"), 3), token::Literal(token::StrRaw(Symbol::intern("raw"), 3),
Some(Symbol::intern("suffix")))); Some(Symbol::intern("suffix"))));
assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok, assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok,
token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3), token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3),
Some(Symbol::intern("suffix")))); Some(Symbol::intern("suffix"))));
})
} }
#[test] #[test]
@ -1956,27 +1979,31 @@ mod tests {
#[test] #[test]
fn nested_block_comments() { fn nested_block_comments() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); with_globals(|| {
let sh = mk_sess(cm.clone()); let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string()); let sh = mk_sess(cm.clone());
match lexer.next_token().tok { let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string());
token::Comment => {} match lexer.next_token().tok {
_ => panic!("expected a comment!"), token::Comment => {}
} _ => panic!("expected a comment!"),
assert_eq!(lexer.next_token().tok, }
token::Literal(token::Char(Symbol::intern("a")), None)); assert_eq!(lexer.next_token().tok,
token::Literal(token::Char(Symbol::intern("a")), None));
})
} }
#[test] #[test]
fn crlf_comments() { fn crlf_comments() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty())); with_globals(|| {
let sh = mk_sess(cm.clone()); let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string()); let sh = mk_sess(cm.clone());
let comment = lexer.next_token(); let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string());
assert_eq!(comment.tok, token::Comment); let comment = lexer.next_token();
assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7))); assert_eq!(comment.tok, token::Comment);
assert_eq!(lexer.next_token().tok, token::Whitespace); assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7)));
assert_eq!(lexer.next_token().tok, assert_eq!(lexer.next_token().tok, token::Whitespace);
token::DocComment(Symbol::intern("/// test"))); assert_eq!(lexer.next_token().tok,
token::DocComment(Symbol::intern("/// test")));
})
} }
} }

View File

@ -680,6 +680,7 @@ mod tests {
use util::parser_testing::{string_to_stream, string_to_parser}; use util::parser_testing::{string_to_stream, string_to_parser};
use util::parser_testing::{string_to_expr, string_to_item, string_to_stmt}; use util::parser_testing::{string_to_expr, string_to_item, string_to_stmt};
use util::ThinVec; use util::ThinVec;
use with_globals;
// produce a syntax_pos::span // produce a syntax_pos::span
fn sp(a: u32, b: u32) -> Span { fn sp(a: u32, b: u32) -> Span {
@ -691,156 +692,170 @@ mod tests {
} }
#[test] fn path_exprs_1() { #[test] fn path_exprs_1() {
assert!(string_to_expr("a".to_string()) == with_globals(|| {
P(ast::Expr{ assert!(string_to_expr("a".to_string()) ==
id: ast::DUMMY_NODE_ID, P(ast::Expr{
node: ast::ExprKind::Path(None, ast::Path { id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Path(None, ast::Path {
span: sp(0, 1),
segments: vec![str2seg("a", 0, 1)],
}),
span: sp(0, 1), span: sp(0, 1),
segments: vec![str2seg("a", 0, 1)], attrs: ThinVec::new(),
}), }))
span: sp(0, 1), })
attrs: ThinVec::new(),
}))
} }
#[test] fn path_exprs_2 () { #[test] fn path_exprs_2 () {
assert!(string_to_expr("::a::b".to_string()) == with_globals(|| {
P(ast::Expr { assert!(string_to_expr("::a::b".to_string()) ==
id: ast::DUMMY_NODE_ID, P(ast::Expr {
node: ast::ExprKind::Path(None, ast::Path { id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Path(None, ast::Path {
span: sp(0, 6),
segments: vec![ast::PathSegment::crate_root(sp(0, 2)),
str2seg("a", 2, 3),
str2seg("b", 5, 6)]
}),
span: sp(0, 6), span: sp(0, 6),
segments: vec![ast::PathSegment::crate_root(sp(0, 2)), attrs: ThinVec::new(),
str2seg("a", 2, 3), }))
str2seg("b", 5, 6)] })
}),
span: sp(0, 6),
attrs: ThinVec::new(),
}))
} }
#[should_panic] #[should_panic]
#[test] fn bad_path_expr_1() { #[test] fn bad_path_expr_1() {
string_to_expr("::abc::def::return".to_string()); with_globals(|| {
string_to_expr("::abc::def::return".to_string());
})
} }
// check the token-tree-ization of macros // check the token-tree-ization of macros
#[test] #[test]
fn string_to_tts_macro () { fn string_to_tts_macro () {
let tts: Vec<_> = with_globals(|| {
string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect(); let tts: Vec<_> =
let tts: &[TokenTree] = &tts[..]; string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect();
let tts: &[TokenTree] = &tts[..];
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
( (
4, 4,
Some(&TokenTree::Token(_, token::Ident(name_macro_rules))), Some(&TokenTree::Token(_, token::Ident(name_macro_rules))),
Some(&TokenTree::Token(_, token::Not)), Some(&TokenTree::Token(_, token::Not)),
Some(&TokenTree::Token(_, token::Ident(name_zip))), Some(&TokenTree::Token(_, token::Ident(name_zip))),
Some(&TokenTree::Delimited(_, ref macro_delimed)), Some(&TokenTree::Delimited(_, ref macro_delimed)),
) )
if name_macro_rules.name == "macro_rules" if name_macro_rules.name == "macro_rules"
&& name_zip.name == "zip" => { && name_zip.name == "zip" => {
let tts = &macro_delimed.stream().trees().collect::<Vec<_>>(); let tts = &macro_delimed.stream().trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
( (
3, 3,
Some(&TokenTree::Delimited(_, ref first_delimed)), Some(&TokenTree::Delimited(_, ref first_delimed)),
Some(&TokenTree::Token(_, token::FatArrow)), Some(&TokenTree::Token(_, token::FatArrow)),
Some(&TokenTree::Delimited(_, ref second_delimed)), Some(&TokenTree::Delimited(_, ref second_delimed)),
) )
if macro_delimed.delim == token::Paren => { if macro_delimed.delim == token::Paren => {
let tts = &first_delimed.stream().trees().collect::<Vec<_>>(); let tts = &first_delimed.stream().trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1)) { match (tts.len(), tts.get(0), tts.get(1)) {
( (
2, 2,
Some(&TokenTree::Token(_, token::Dollar)), Some(&TokenTree::Token(_, token::Dollar)),
Some(&TokenTree::Token(_, token::Ident(ident))), Some(&TokenTree::Token(_, token::Ident(ident))),
) )
if first_delimed.delim == token::Paren && ident.name == "a" => {}, if first_delimed.delim == token::Paren && ident.name == "a" => {},
_ => panic!("value 3: {:?}", *first_delimed), _ => panic!("value 3: {:?}", *first_delimed),
} }
let tts = &second_delimed.stream().trees().collect::<Vec<_>>(); let tts = &second_delimed.stream().trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1)) { match (tts.len(), tts.get(0), tts.get(1)) {
( (
2, 2,
Some(&TokenTree::Token(_, token::Dollar)), Some(&TokenTree::Token(_, token::Dollar)),
Some(&TokenTree::Token(_, token::Ident(ident))), Some(&TokenTree::Token(_, token::Ident(ident))),
) )
if second_delimed.delim == token::Paren if second_delimed.delim == token::Paren
&& ident.name == "a" => {}, && ident.name == "a" => {},
_ => panic!("value 4: {:?}", *second_delimed), _ => panic!("value 4: {:?}", *second_delimed),
} }
}, },
_ => panic!("value 2: {:?}", *macro_delimed), _ => panic!("value 2: {:?}", *macro_delimed),
} }
}, },
_ => panic!("value: {:?}",tts), _ => panic!("value: {:?}",tts),
} }
})
} }
#[test] #[test]
fn string_to_tts_1() { fn string_to_tts_1() {
let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); with_globals(|| {
let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
let expected = TokenStream::concat(vec![ let expected = TokenStream::concat(vec![
TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))).into(), TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))).into(),
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))).into(), TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))).into(),
TokenTree::Delimited( TokenTree::Delimited(
sp(5, 14), sp(5, 14),
tokenstream::Delimited { tokenstream::Delimited {
delim: token::DelimToken::Paren, delim: token::DelimToken::Paren,
tts: TokenStream::concat(vec![ tts: TokenStream::concat(vec![
TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))).into(), TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))).into(),
TokenTree::Token(sp(8, 9), token::Colon).into(), TokenTree::Token(sp(8, 9), token::Colon).into(),
TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))).into(), TokenTree::Token(sp(10, 13),
]).into(), token::Ident(Ident::from_str("i32"))).into(),
}).into(), ]).into(),
TokenTree::Delimited( }).into(),
sp(15, 21), TokenTree::Delimited(
tokenstream::Delimited { sp(15, 21),
delim: token::DelimToken::Brace, tokenstream::Delimited {
tts: TokenStream::concat(vec![ delim: token::DelimToken::Brace,
TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))).into(), tts: TokenStream::concat(vec![
TokenTree::Token(sp(18, 19), token::Semi).into(), TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))).into(),
]).into(), TokenTree::Token(sp(18, 19), token::Semi).into(),
}).into() ]).into(),
]); }).into()
]);
assert_eq!(tts, expected); assert_eq!(tts, expected);
})
} }
#[test] fn ret_expr() { #[test] fn ret_expr() {
assert!(string_to_expr("return d".to_string()) == with_globals(|| {
P(ast::Expr{ assert!(string_to_expr("return d".to_string()) ==
id: ast::DUMMY_NODE_ID, P(ast::Expr{
node:ast::ExprKind::Ret(Some(P(ast::Expr{
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
node:ast::ExprKind::Path(None, ast::Path{ node:ast::ExprKind::Ret(Some(P(ast::Expr{
span: sp(7, 8), id: ast::DUMMY_NODE_ID,
segments: vec![str2seg("d", 7, 8)], node:ast::ExprKind::Path(None, ast::Path{
}), span: sp(7, 8),
span:sp(7,8), segments: vec![str2seg("d", 7, 8)],
}),
span:sp(7,8),
attrs: ThinVec::new(),
}))),
span:sp(0,8),
attrs: ThinVec::new(), attrs: ThinVec::new(),
}))), }))
span:sp(0,8), })
attrs: ThinVec::new(),
}))
} }
#[test] fn parse_stmt_1 () { #[test] fn parse_stmt_1 () {
assert!(string_to_stmt("b;".to_string()) == with_globals(|| {
Some(ast::Stmt { assert!(string_to_stmt("b;".to_string()) ==
node: ast::StmtKind::Expr(P(ast::Expr { Some(ast::Stmt {
id: ast::DUMMY_NODE_ID, node: ast::StmtKind::Expr(P(ast::Expr {
node: ast::ExprKind::Path(None, ast::Path { id: ast::DUMMY_NODE_ID,
span:sp(0,1), node: ast::ExprKind::Path(None, ast::Path {
segments: vec![str2seg("b", 0, 1)], span:sp(0,1),
}), segments: vec![str2seg("b", 0, 1)],
span: sp(0,1), }),
attrs: ThinVec::new()})), span: sp(0,1),
id: ast::DUMMY_NODE_ID, attrs: ThinVec::new()})),
span: sp(0,1)})) id: ast::DUMMY_NODE_ID,
span: sp(0,1)}))
})
} }
fn parser_done(p: Parser){ fn parser_done(p: Parser){
@ -848,120 +863,128 @@ mod tests {
} }
#[test] fn parse_ident_pat () { #[test] fn parse_ident_pat () {
let sess = ParseSess::new(FilePathMapping::empty()); with_globals(|| {
let mut parser = string_to_parser(&sess, "b".to_string()); let sess = ParseSess::new(FilePathMapping::empty());
assert!(panictry!(parser.parse_pat()) let mut parser = string_to_parser(&sess, "b".to_string());
== P(ast::Pat{ assert!(panictry!(parser.parse_pat())
id: ast::DUMMY_NODE_ID, == P(ast::Pat{
node: PatKind::Ident(ast::BindingMode::ByValue(ast::Mutability::Immutable), id: ast::DUMMY_NODE_ID,
Spanned{ span:sp(0, 1), node: PatKind::Ident(ast::BindingMode::ByValue(ast::Mutability::Immutable),
node: Ident::from_str("b") Spanned{ span:sp(0, 1),
}, node: Ident::from_str("b")
None), },
span: sp(0,1)})); None),
parser_done(parser); span: sp(0,1)}));
parser_done(parser);
})
} }
// check the contents of the tt manually: // check the contents of the tt manually:
#[test] fn parse_fundecl () { #[test] fn parse_fundecl () {
// this test depends on the intern order of "fn" and "i32" with_globals(|| {
let item = string_to_item("fn a (b : i32) { b; }".to_string()).map(|m| { // this test depends on the intern order of "fn" and "i32"
m.map(|mut m| { let item = string_to_item("fn a (b : i32) { b; }".to_string()).map(|m| {
m.tokens = None; m.map(|mut m| {
m m.tokens = None;
}) m
}); })
assert_eq!(item, });
Some( assert_eq!(item,
P(ast::Item{ident:Ident::from_str("a"), Some(
attrs:Vec::new(), P(ast::Item{ident:Ident::from_str("a"),
id: ast::DUMMY_NODE_ID, attrs:Vec::new(),
tokens: None, id: ast::DUMMY_NODE_ID,
node: ast::ItemKind::Fn(P(ast::FnDecl { tokens: None,
inputs: vec![ast::Arg{ node: ast::ItemKind::Fn(P(ast::FnDecl {
ty: P(ast::Ty{id: ast::DUMMY_NODE_ID, inputs: vec![ast::Arg{
node: ast::TyKind::Path(None, ast::Path{ ty: P(ast::Ty{id: ast::DUMMY_NODE_ID,
span:sp(10,13), node: ast::TyKind::Path(None, ast::Path{
segments: vec![str2seg("i32", 10, 13)], span:sp(10,13),
segments: vec![str2seg("i32", 10, 13)],
}),
span:sp(10,13)
}), }),
span:sp(10,13) pat: P(ast::Pat {
}),
pat: P(ast::Pat {
id: ast::DUMMY_NODE_ID,
node: PatKind::Ident(
ast::BindingMode::ByValue(
ast::Mutability::Immutable),
Spanned{
span: sp(6,7),
node: Ident::from_str("b")},
None
),
span: sp(6,7)
}),
id: ast::DUMMY_NODE_ID
}],
output: ast::FunctionRetTy::Default(sp(15, 15)),
variadic: false
}),
ast::Unsafety::Normal,
Spanned {
span: sp(0,2),
node: ast::Constness::NotConst,
},
Abi::Rust,
ast::Generics{
params: Vec::new(),
where_clause: ast::WhereClause {
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
predicates: Vec::new(), node: PatKind::Ident(
ast::BindingMode::ByValue(
ast::Mutability::Immutable),
Spanned{
span: sp(6,7),
node: Ident::from_str("b")},
None
),
span: sp(6,7)
}),
id: ast::DUMMY_NODE_ID
}],
output: ast::FunctionRetTy::Default(sp(15, 15)),
variadic: false
}),
ast::Unsafety::Normal,
Spanned {
span: sp(0,2),
node: ast::Constness::NotConst,
},
Abi::Rust,
ast::Generics{
params: Vec::new(),
where_clause: ast::WhereClause {
id: ast::DUMMY_NODE_ID,
predicates: Vec::new(),
span: syntax_pos::DUMMY_SP,
},
span: syntax_pos::DUMMY_SP, span: syntax_pos::DUMMY_SP,
}, },
span: syntax_pos::DUMMY_SP, P(ast::Block {
}, stmts: vec![ast::Stmt {
P(ast::Block { node: ast::StmtKind::Semi(P(ast::Expr{
stmts: vec![ast::Stmt { id: ast::DUMMY_NODE_ID,
node: ast::StmtKind::Semi(P(ast::Expr{ node: ast::ExprKind::Path(None,
ast::Path{
span:sp(17,18),
segments: vec![str2seg("b", 17, 18)],
}),
span: sp(17,18),
attrs: ThinVec::new()})),
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Path(None, span: sp(17,19)}],
ast::Path{
span:sp(17,18),
segments: vec![str2seg("b", 17, 18)],
}),
span: sp(17,18),
attrs: ThinVec::new()})),
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
span: sp(17,19)}], rules: ast::BlockCheckMode::Default, // no idea
id: ast::DUMMY_NODE_ID, span: sp(15,21),
rules: ast::BlockCheckMode::Default, // no idea recovered: false,
span: sp(15,21), })),
recovered: false, vis: respan(sp(0, 0), ast::VisibilityKind::Inherited),
})), span: sp(0,21)})));
vis: respan(sp(0, 0), ast::VisibilityKind::Inherited), })
span: sp(0,21)})));
} }
#[test] fn parse_use() { #[test] fn parse_use() {
let use_s = "use foo::bar::baz;"; with_globals(|| {
let vitem = string_to_item(use_s.to_string()).unwrap(); let use_s = "use foo::bar::baz;";
let vitem_s = item_to_string(&vitem); let vitem = string_to_item(use_s.to_string()).unwrap();
assert_eq!(&vitem_s[..], use_s); let vitem_s = item_to_string(&vitem);
assert_eq!(&vitem_s[..], use_s);
let use_s = "use foo::bar as baz;"; let use_s = "use foo::bar as baz;";
let vitem = string_to_item(use_s.to_string()).unwrap(); let vitem = string_to_item(use_s.to_string()).unwrap();
let vitem_s = item_to_string(&vitem); let vitem_s = item_to_string(&vitem);
assert_eq!(&vitem_s[..], use_s); assert_eq!(&vitem_s[..], use_s);
})
} }
#[test] fn parse_extern_crate() { #[test] fn parse_extern_crate() {
let ex_s = "extern crate foo;"; with_globals(|| {
let vitem = string_to_item(ex_s.to_string()).unwrap(); let ex_s = "extern crate foo;";
let vitem_s = item_to_string(&vitem); let vitem = string_to_item(ex_s.to_string()).unwrap();
assert_eq!(&vitem_s[..], ex_s); let vitem_s = item_to_string(&vitem);
assert_eq!(&vitem_s[..], ex_s);
let ex_s = "extern crate foo as bar;"; let ex_s = "extern crate foo as bar;";
let vitem = string_to_item(ex_s.to_string()).unwrap(); let vitem = string_to_item(ex_s.to_string()).unwrap();
let vitem_s = item_to_string(&vitem); let vitem_s = item_to_string(&vitem);
assert_eq!(&vitem_s[..], ex_s); assert_eq!(&vitem_s[..], ex_s);
})
} }
fn get_spans_of_pat_idents(src: &str) -> Vec<Span> { fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
@ -988,31 +1011,36 @@ mod tests {
} }
#[test] fn span_of_self_arg_pat_idents_are_correct() { #[test] fn span_of_self_arg_pat_idents_are_correct() {
with_globals(|| {
let srcs = ["impl z { fn a (&self, &myarg: i32) {} }", let srcs = ["impl z { fn a (&self, &myarg: i32) {} }",
"impl z { fn a (&mut self, &myarg: i32) {} }", "impl z { fn a (&mut self, &myarg: i32) {} }",
"impl z { fn a (&'a self, &myarg: i32) {} }", "impl z { fn a (&'a self, &myarg: i32) {} }",
"impl z { fn a (self, &myarg: i32) {} }", "impl z { fn a (self, &myarg: i32) {} }",
"impl z { fn a (self: Foo, &myarg: i32) {} }", "impl z { fn a (self: Foo, &myarg: i32) {} }",
]; ];
for &src in &srcs { for &src in &srcs {
let spans = get_spans_of_pat_idents(src); let spans = get_spans_of_pat_idents(src);
let (lo, hi) = (spans[0].lo(), spans[0].hi()); let (lo, hi) = (spans[0].lo(), spans[0].hi());
assert!("self" == &src[lo.to_usize()..hi.to_usize()], assert!("self" == &src[lo.to_usize()..hi.to_usize()],
"\"{}\" != \"self\". src=\"{}\"", "\"{}\" != \"self\". src=\"{}\"",
&src[lo.to_usize()..hi.to_usize()], src) &src[lo.to_usize()..hi.to_usize()], src)
} }
})
} }
#[test] fn parse_exprs () { #[test] fn parse_exprs () {
// just make sure that they parse.... with_globals(|| {
string_to_expr("3 + 4".to_string()); // just make sure that they parse....
string_to_expr("a::z.froob(b,&(987+3))".to_string()); string_to_expr("3 + 4".to_string());
string_to_expr("a::z.froob(b,&(987+3))".to_string());
})
} }
#[test] fn attrs_fix_bug () { #[test] fn attrs_fix_bug () {
string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) with_globals(|| {
string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
-> Result<Box<Writer>, String> { -> Result<Box<Writer>, String> {
#[cfg(windows)] #[cfg(windows)]
fn wb() -> c_int { fn wb() -> c_int {
@ -1024,49 +1052,54 @@ mod tests {
let mut fflags: c_int = wb(); let mut fflags: c_int = wb();
}".to_string()); }".to_string());
})
} }
#[test] fn crlf_doc_comments() { #[test] fn crlf_doc_comments() {
let sess = ParseSess::new(FilePathMapping::empty()); with_globals(|| {
let sess = ParseSess::new(FilePathMapping::empty());
let name = FileName::Custom("source".to_string()); let name = FileName::Custom("source".to_string());
let source = "/// doc comment\r\nfn foo() {}".to_string(); let source = "/// doc comment\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name.clone(), source, &sess) let item = parse_item_from_source_str(name.clone(), source, &sess)
.unwrap().unwrap(); .unwrap().unwrap();
let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
assert_eq!(doc, "/// doc comment"); assert_eq!(doc, "/// doc comment");
let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name.clone(), source, &sess) let item = parse_item_from_source_str(name.clone(), source, &sess)
.unwrap().unwrap(); .unwrap().unwrap();
let docs = item.attrs.iter().filter(|a| a.path == "doc") let docs = item.attrs.iter().filter(|a| a.path == "doc")
.map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>(); .map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>();
let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
assert_eq!(&docs[..], b); assert_eq!(&docs[..], b);
let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name, source, &sess).unwrap().unwrap(); let item = parse_item_from_source_str(name, source, &sess).unwrap().unwrap();
let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
assert_eq!(doc, "/** doc comment\n * with CRLF */"); assert_eq!(doc, "/** doc comment\n * with CRLF */");
});
} }
#[test] #[test]
fn ttdelim_span() { fn ttdelim_span() {
let sess = ParseSess::new(FilePathMapping::empty()); with_globals(|| {
let expr = parse::parse_expr_from_source_str(PathBuf::from("foo").into(), let sess = ParseSess::new(FilePathMapping::empty());
"foo!( fn main() { body } )".to_string(), &sess).unwrap(); let expr = parse::parse_expr_from_source_str(PathBuf::from("foo").into(),
"foo!( fn main() { body } )".to_string(), &sess).unwrap();
let tts: Vec<_> = match expr.node { let tts: Vec<_> = match expr.node {
ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(), ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(),
_ => panic!("not a macro"), _ => panic!("not a macro"),
}; };
let span = tts.iter().rev().next().unwrap().span(); let span = tts.iter().rev().next().unwrap().span();
match sess.codemap().span_to_snippet(span) { match sess.codemap().span_to_snippet(span) {
Ok(s) => assert_eq!(&s[..], "{ body }"), Ok(s) => assert_eq!(&s[..], "{ body }"),
Err(_) => panic!("could not get snippet"), Err(_) => panic!("could not get snippet"),
} }
});
} }
// This tests that when parsing a string (rather than a file) we don't try // This tests that when parsing a string (rather than a file) we don't try
@ -1074,17 +1107,19 @@ mod tests {
// See `recurse_into_file_modules` in the parser. // See `recurse_into_file_modules` in the parser.
#[test] #[test]
fn out_of_line_mod() { fn out_of_line_mod() {
let sess = ParseSess::new(FilePathMapping::empty()); with_globals(|| {
let item = parse_item_from_source_str( let sess = ParseSess::new(FilePathMapping::empty());
PathBuf::from("foo").into(), let item = parse_item_from_source_str(
"mod foo { struct S; mod this_does_not_exist; }".to_owned(), PathBuf::from("foo").into(),
&sess, "mod foo { struct S; mod this_does_not_exist; }".to_owned(),
).unwrap().unwrap(); &sess,
).unwrap().unwrap();
if let ast::ItemKind::Mod(ref m) = item.node { if let ast::ItemKind::Mod(ref m) = item.node {
assert!(m.items.len() == 2); assert!(m.items.len() == 2);
} else { } else {
panic!(); panic!();
} }
});
} }
} }

View File

@ -3178,36 +3178,41 @@ mod tests {
use ast; use ast;
use codemap; use codemap;
use syntax_pos; use syntax_pos;
use with_globals;
#[test] #[test]
fn test_fun_to_string() { fn test_fun_to_string() {
let abba_ident = ast::Ident::from_str("abba"); with_globals(|| {
let abba_ident = ast::Ident::from_str("abba");
let decl = ast::FnDecl { let decl = ast::FnDecl {
inputs: Vec::new(), inputs: Vec::new(),
output: ast::FunctionRetTy::Default(syntax_pos::DUMMY_SP), output: ast::FunctionRetTy::Default(syntax_pos::DUMMY_SP),
variadic: false variadic: false
}; };
let generics = ast::Generics::default(); let generics = ast::Generics::default();
assert_eq!(fun_to_string(&decl, ast::Unsafety::Normal, assert_eq!(fun_to_string(&decl, ast::Unsafety::Normal,
ast::Constness::NotConst, ast::Constness::NotConst,
abba_ident, &generics), abba_ident, &generics),
"fn abba()"); "fn abba()");
})
} }
#[test] #[test]
fn test_variant_to_string() { fn test_variant_to_string() {
let ident = ast::Ident::from_str("principal_skinner"); with_globals(|| {
let ident = ast::Ident::from_str("principal_skinner");
let var = codemap::respan(syntax_pos::DUMMY_SP, ast::Variant_ { let var = codemap::respan(syntax_pos::DUMMY_SP, ast::Variant_ {
name: ident, name: ident,
attrs: Vec::new(), attrs: Vec::new(),
// making this up as I go.... ? // making this up as I go.... ?
data: ast::VariantData::Unit(ast::DUMMY_NODE_ID), data: ast::VariantData::Unit(ast::DUMMY_NODE_ID),
disr_expr: None, disr_expr: None,
}); });
let varstr = variant_to_string(&var); let varstr = variant_to_string(&var);
assert_eq!(varstr, "principal_skinner"); assert_eq!(varstr, "principal_skinner");
})
} }
} }

View File

@ -18,6 +18,7 @@ use std::str;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::path::Path; use std::path::Path;
use syntax_pos::{BytePos, NO_EXPANSION, Span, MultiSpan}; use syntax_pos::{BytePos, NO_EXPANSION, Span, MultiSpan};
use with_globals;
/// Identify a position in the text by the Nth occurrence of a string. /// Identify a position in the text by the Nth occurrence of a string.
struct Position { struct Position {
@ -46,37 +47,39 @@ impl<T: Write> Write for Shared<T> {
} }
fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &str) { fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &str) {
let output = Arc::new(Mutex::new(Vec::new())); with_globals(|| {
let output = Arc::new(Mutex::new(Vec::new()));
let code_map = Lrc::new(CodeMap::new(FilePathMapping::empty())); let code_map = Lrc::new(CodeMap::new(FilePathMapping::empty()));
code_map.new_filemap_and_lines(Path::new("test.rs"), &file_text); code_map.new_filemap_and_lines(Path::new("test.rs"), &file_text);
let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end); let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end);
let mut msp = MultiSpan::from_span(primary_span); let mut msp = MultiSpan::from_span(primary_span);
for span_label in span_labels { for span_label in span_labels {
let span = make_span(&file_text, &span_label.start, &span_label.end); let span = make_span(&file_text, &span_label.start, &span_label.end);
msp.push_span_label(span, span_label.label.to_string()); msp.push_span_label(span, span_label.label.to_string());
println!("span: {:?} label: {:?}", span, span_label.label); println!("span: {:?} label: {:?}", span, span_label.label);
println!("text: {:?}", code_map.span_to_snippet(span)); println!("text: {:?}", code_map.span_to_snippet(span));
} }
let emitter = EmitterWriter::new(Box::new(Shared { data: output.clone() }), let emitter = EmitterWriter::new(Box::new(Shared { data: output.clone() }),
Some(code_map.clone()), Some(code_map.clone()),
false, false,
false); false);
let handler = Handler::with_emitter(true, false, Box::new(emitter)); let handler = Handler::with_emitter(true, false, Box::new(emitter));
handler.span_err(msp, "foo"); handler.span_err(msp, "foo");
assert!(expected_output.chars().next() == Some('\n'), assert!(expected_output.chars().next() == Some('\n'),
"expected output should begin with newline"); "expected output should begin with newline");
let expected_output = &expected_output[1..]; let expected_output = &expected_output[1..];
let bytes = output.lock().unwrap(); let bytes = output.lock().unwrap();
let actual_output = str::from_utf8(&bytes).unwrap(); let actual_output = str::from_utf8(&bytes).unwrap();
println!("expected output:\n------\n{}------", expected_output); println!("expected output:\n------\n{}------", expected_output);
println!("actual output:\n------\n{}------", actual_output); println!("actual output:\n------\n{}------", actual_output);
assert!(expected_output == actual_output) assert!(expected_output == actual_output)
})
} }
fn make_span(file_text: &str, start: &Position, end: &Position) -> Span { fn make_span(file_text: &str, start: &Position, end: &Position) -> Span {

View File

@ -599,6 +599,7 @@ impl Hash for ThinTokenStream {
mod tests { mod tests {
use super::*; use super::*;
use syntax::ast::Ident; use syntax::ast::Ident;
use with_globals;
use syntax_pos::{Span, BytePos, NO_EXPANSION}; use syntax_pos::{Span, BytePos, NO_EXPANSION};
use parse::token::Token; use parse::token::Token;
use util::parser_testing::string_to_stream; use util::parser_testing::string_to_stream;
@ -613,67 +614,83 @@ mod tests {
#[test] #[test]
fn test_concat() { fn test_concat() {
let test_res = string_to_ts("foo::bar::baz"); with_globals(|| {
let test_fst = string_to_ts("foo::bar"); let test_res = string_to_ts("foo::bar::baz");
let test_snd = string_to_ts("::baz"); let test_fst = string_to_ts("foo::bar");
let eq_res = TokenStream::concat(vec![test_fst, test_snd]); let test_snd = string_to_ts("::baz");
assert_eq!(test_res.trees().count(), 5); let eq_res = TokenStream::concat(vec![test_fst, test_snd]);
assert_eq!(eq_res.trees().count(), 5); assert_eq!(test_res.trees().count(), 5);
assert_eq!(test_res.eq_unspanned(&eq_res), true); assert_eq!(eq_res.trees().count(), 5);
assert_eq!(test_res.eq_unspanned(&eq_res), true);
})
} }
#[test] #[test]
fn test_to_from_bijection() { fn test_to_from_bijection() {
let test_start = string_to_ts("foo::bar(baz)"); with_globals(|| {
let test_end = test_start.trees().collect(); let test_start = string_to_ts("foo::bar(baz)");
assert_eq!(test_start, test_end) let test_end = test_start.trees().collect();
assert_eq!(test_start, test_end)
})
} }
#[test] #[test]
fn test_eq_0() { fn test_eq_0() {
let test_res = string_to_ts("foo"); with_globals(|| {
let test_eqs = string_to_ts("foo"); let test_res = string_to_ts("foo");
assert_eq!(test_res, test_eqs) let test_eqs = string_to_ts("foo");
assert_eq!(test_res, test_eqs)
})
} }
#[test] #[test]
fn test_eq_1() { fn test_eq_1() {
let test_res = string_to_ts("::bar::baz"); with_globals(|| {
let test_eqs = string_to_ts("::bar::baz"); let test_res = string_to_ts("::bar::baz");
assert_eq!(test_res, test_eqs) let test_eqs = string_to_ts("::bar::baz");
assert_eq!(test_res, test_eqs)
})
} }
#[test] #[test]
fn test_eq_3() { fn test_eq_3() {
let test_res = string_to_ts(""); with_globals(|| {
let test_eqs = string_to_ts(""); let test_res = string_to_ts("");
assert_eq!(test_res, test_eqs) let test_eqs = string_to_ts("");
assert_eq!(test_res, test_eqs)
})
} }
#[test] #[test]
fn test_diseq_0() { fn test_diseq_0() {
let test_res = string_to_ts("::bar::baz"); with_globals(|| {
let test_eqs = string_to_ts("bar::baz"); let test_res = string_to_ts("::bar::baz");
assert_eq!(test_res == test_eqs, false) let test_eqs = string_to_ts("bar::baz");
assert_eq!(test_res == test_eqs, false)
})
} }
#[test] #[test]
fn test_diseq_1() { fn test_diseq_1() {
let test_res = string_to_ts("(bar,baz)"); with_globals(|| {
let test_eqs = string_to_ts("bar,baz"); let test_res = string_to_ts("(bar,baz)");
assert_eq!(test_res == test_eqs, false) let test_eqs = string_to_ts("bar,baz");
assert_eq!(test_res == test_eqs, false)
})
} }
#[test] #[test]
fn test_is_empty() { fn test_is_empty() {
let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect(); with_globals(|| {
let test1: TokenStream = let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"))).into(); let test1: TokenStream =
let test2 = string_to_ts("foo(bar::baz)"); TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"))).into();
let test2 = string_to_ts("foo(bar::baz)");
assert_eq!(test0.is_empty(), true); assert_eq!(test0.is_empty(), true);
assert_eq!(test1.is_empty(), false); assert_eq!(test1.is_empty(), false);
assert_eq!(test2.is_empty(), false); assert_eq!(test2.is_empty(), false);
})
} }
#[test] #[test]

View File

@ -11,4 +11,5 @@ crate-type = ["dylib"]
[dependencies] [dependencies]
serialize = { path = "../libserialize" } serialize = { path = "../libserialize" }
rustc_data_structures = { path = "../librustc_data_structures" } rustc_data_structures = { path = "../librustc_data_structures" }
scoped-tls = { version = "0.1.1", features = ["nightly"] }
unicode-width = "0.1.4" unicode-width = "0.1.4"

View File

@ -15,11 +15,11 @@
//! and definition contexts*. J. Funct. Program. 22, 2 (March 2012), 181-216. //! and definition contexts*. J. Funct. Program. 22, 2 (March 2012), 181-216.
//! DOI=10.1017/S0956796812000093 <http://dx.doi.org/10.1017/S0956796812000093> //! DOI=10.1017/S0956796812000093 <http://dx.doi.org/10.1017/S0956796812000093>
use GLOBALS;
use Span; use Span;
use symbol::{Ident, Symbol}; use symbol::{Ident, Symbol};
use serialize::{Encodable, Decodable, Encoder, Decoder}; use serialize::{Encodable, Decodable, Encoder, Decoder};
use std::cell::RefCell;
use std::collections::HashMap; use std::collections::HashMap;
use std::fmt; use std::fmt;
@ -119,7 +119,7 @@ impl Mark {
} }
} }
struct HygieneData { pub struct HygieneData {
marks: Vec<MarkData>, marks: Vec<MarkData>,
syntax_contexts: Vec<SyntaxContextData>, syntax_contexts: Vec<SyntaxContextData>,
markings: HashMap<(SyntaxContext, Mark), SyntaxContext>, markings: HashMap<(SyntaxContext, Mark), SyntaxContext>,
@ -127,7 +127,7 @@ struct HygieneData {
} }
impl HygieneData { impl HygieneData {
fn new() -> Self { pub fn new() -> Self {
HygieneData { HygieneData {
marks: vec![MarkData { marks: vec![MarkData {
parent: Mark::root(), parent: Mark::root(),
@ -145,10 +145,7 @@ impl HygieneData {
} }
fn with<T, F: FnOnce(&mut HygieneData) -> T>(f: F) -> T { fn with<T, F: FnOnce(&mut HygieneData) -> T>(f: F) -> T {
thread_local! { GLOBALS.with(|globals| f(&mut *globals.hygiene_data.borrow_mut()))
static HYGIENE_DATA: RefCell<HygieneData> = RefCell::new(HygieneData::new());
}
HYGIENE_DATA.with(|data| f(&mut *data.borrow_mut()))
} }
} }

View File

@ -35,10 +35,13 @@ use std::ops::{Add, Sub};
use std::path::PathBuf; use std::path::PathBuf;
use rustc_data_structures::stable_hasher::StableHasher; use rustc_data_structures::stable_hasher::StableHasher;
use rustc_data_structures::sync::Lrc; use rustc_data_structures::sync::{Lrc, Lock};
extern crate rustc_data_structures; extern crate rustc_data_structures;
#[macro_use]
extern crate scoped_tls;
use serialize::{Encodable, Decodable, Encoder, Decoder}; use serialize::{Encodable, Decodable, Encoder, Decoder};
extern crate serialize; extern crate serialize;
@ -54,6 +57,24 @@ pub use span_encoding::{Span, DUMMY_SP};
pub mod symbol; pub mod symbol;
pub struct Globals {
symbol_interner: Lock<symbol::Interner>,
span_interner: Lock<span_encoding::SpanInterner>,
hygiene_data: Lock<hygiene::HygieneData>,
}
impl Globals {
pub fn new() -> Globals {
Globals {
symbol_interner: Lock::new(symbol::Interner::fresh()),
span_interner: Lock::new(span_encoding::SpanInterner::default()),
hygiene_data: Lock::new(hygiene::HygieneData::new()),
}
}
}
scoped_thread_local!(pub static GLOBALS: Globals);
/// Differentiates between real files and common virtual files /// Differentiates between real files and common virtual files
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash, RustcDecodable, RustcEncodable)] #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash, RustcDecodable, RustcEncodable)]
pub enum FileName { pub enum FileName {

View File

@ -14,11 +14,11 @@
// The encoding format for inline spans were obtained by optimizing over crates in rustc/libstd. // The encoding format for inline spans were obtained by optimizing over crates in rustc/libstd.
// See https://internals.rust-lang.org/t/rfc-compiler-refactoring-spans/1357/28 // See https://internals.rust-lang.org/t/rfc-compiler-refactoring-spans/1357/28
use GLOBALS;
use {BytePos, SpanData}; use {BytePos, SpanData};
use hygiene::SyntaxContext; use hygiene::SyntaxContext;
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use std::cell::RefCell;
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
/// A compressed span. /// A compressed span.
@ -133,7 +133,7 @@ fn decode(span: Span) -> SpanData {
} }
#[derive(Default)] #[derive(Default)]
struct SpanInterner { pub struct SpanInterner {
spans: FxHashMap<SpanData, u32>, spans: FxHashMap<SpanData, u32>,
span_data: Vec<SpanData>, span_data: Vec<SpanData>,
} }
@ -156,11 +156,8 @@ impl SpanInterner {
} }
} }
// If an interner exists in TLS, return it. Otherwise, prepare a fresh one. // If an interner exists, return it. Otherwise, prepare a fresh one.
#[inline] #[inline]
fn with_span_interner<T, F: FnOnce(&mut SpanInterner) -> T>(f: F) -> T { fn with_span_interner<T, F: FnOnce(&mut SpanInterner) -> T>(f: F) -> T {
thread_local!(static INTERNER: RefCell<SpanInterner> = { GLOBALS.with(|globals| f(&mut *globals.span_interner.lock()))
RefCell::new(SpanInterner::default())
});
INTERNER.with(|interner| f(&mut *interner.borrow_mut()))
} }

View File

@ -13,9 +13,9 @@
//! type, and vice versa. //! type, and vice versa.
use hygiene::SyntaxContext; use hygiene::SyntaxContext;
use GLOBALS;
use serialize::{Decodable, Decoder, Encodable, Encoder}; use serialize::{Decodable, Decoder, Encodable, Encoder};
use std::cell::RefCell;
use std::collections::HashMap; use std::collections::HashMap;
use std::fmt; use std::fmt;
@ -247,7 +247,7 @@ macro_rules! declare_keywords {(
} }
impl Interner { impl Interner {
fn fresh() -> Self { pub fn fresh() -> Self {
Interner::prefill(&[$($string,)*]) Interner::prefill(&[$($string,)*])
} }
} }
@ -330,12 +330,10 @@ declare_keywords! {
(60, Union, "union") (60, Union, "union")
} }
// If an interner exists in TLS, return it. Otherwise, prepare a fresh one. // If an interner exists, return it. Otherwise, prepare a fresh one.
#[inline]
fn with_interner<T, F: FnOnce(&mut Interner) -> T>(f: F) -> T { fn with_interner<T, F: FnOnce(&mut Interner) -> T>(f: F) -> T {
thread_local!(static INTERNER: RefCell<Interner> = { GLOBALS.with(|globals| f(&mut *globals.symbol_interner.lock()))
RefCell::new(Interner::fresh())
});
INTERNER.with(|interner| f(&mut *interner.borrow_mut()))
} }
/// Represents a string stored in the thread-local interner. Because the /// Represents a string stored in the thread-local interner. Because the
@ -422,6 +420,7 @@ impl Encodable for InternedString {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use Globals;
#[test] #[test]
fn interner_tests() { fn interner_tests() {
@ -444,7 +443,9 @@ mod tests {
#[test] #[test]
fn without_first_quote_test() { fn without_first_quote_test() {
let i = Ident::from_str("'break"); GLOBALS.set(&Globals::new(), || {
assert_eq!(i.without_first_quote().name, keywords::Break.name()); let i = Ident::from_str("'break");
assert_eq!(i.without_first_quote().name, keywords::Break.name());
});
} }
} }

View File

@ -24,6 +24,10 @@ use syntax::symbol::Symbol;
use syntax_pos::DUMMY_SP; use syntax_pos::DUMMY_SP;
fn main() { fn main() {
syntax::with_globals(|| run());
}
fn run() {
let ps = syntax::parse::ParseSess::new(codemap::FilePathMapping::empty()); let ps = syntax::parse::ParseSess::new(codemap::FilePathMapping::empty());
let mut resolver = syntax::ext::base::DummyResolver; let mut resolver = syntax::ext::base::DummyResolver;
let mut cx = syntax::ext::base::ExtCtxt::new( let mut cx = syntax::ext::base::ExtCtxt::new(

View File

@ -69,18 +69,20 @@ fn basic_sess(sysroot: PathBuf) -> (Session, Rc<CStore>, Box<TransCrate>) {
} }
fn compile(code: String, output: PathBuf, sysroot: PathBuf) { fn compile(code: String, output: PathBuf, sysroot: PathBuf) {
let (sess, cstore, trans) = basic_sess(sysroot); syntax::with_globals(|| {
let control = CompileController::basic(); let (sess, cstore, trans) = basic_sess(sysroot);
let input = Input::Str { name: FileName::Anon, input: code }; let control = CompileController::basic();
let _ = compile_input( let input = Input::Str { name: FileName::Anon, input: code };
trans, let _ = compile_input(
&sess, trans,
&cstore, &sess,
&None, &cstore,
&input, &None,
&None, &input,
&Some(output), &None,
None, &Some(output),
&control None,
); &control
);
});
} }

View File

@ -115,6 +115,10 @@ fn reject_stmt_parse(es: &str) {
} }
fn main() { fn main() {
syntax::with_globals(|| run());
}
fn run() {
let both = &["#[attr]", "#![attr]"]; let both = &["#[attr]", "#![attr]"];
let outer = &["#[attr]"]; let outer = &["#[attr]"];
let none = &[]; let none = &[];

View File

@ -27,6 +27,10 @@ use syntax::ptr::P;
use rustc_data_structures::sync::Lrc; use rustc_data_structures::sync::Lrc;
fn main() { fn main() {
syntax::with_globals(|| run());
}
fn run() {
let parse_sess = ParseSess::new(FilePathMapping::empty()); let parse_sess = ParseSess::new(FilePathMapping::empty());
let exp_cfg = ExpansionConfig::default("issue_35829".to_owned()); let exp_cfg = ExpansionConfig::default("issue_35829".to_owned());
let mut resolver = DummyResolver; let mut resolver = DummyResolver;

View File

@ -221,8 +221,11 @@ impl Folder for AddParens {
} }
} }
fn main() { fn main() {
syntax::with_globals(|| run());
}
fn run() {
let ps = ParseSess::new(FilePathMapping::empty()); let ps = ParseSess::new(FilePathMapping::empty());
iter_exprs(2, &mut |e| { iter_exprs(2, &mut |e| {

View File

@ -21,6 +21,10 @@ use syntax::symbol::Symbol;
use syntax_pos::DUMMY_SP; use syntax_pos::DUMMY_SP;
fn main() { fn main() {
syntax::with_globals(|| run());
}
fn run() {
let ps = syntax::parse::ParseSess::new(FilePathMapping::empty()); let ps = syntax::parse::ParseSess::new(FilePathMapping::empty());
let mut resolver = syntax::ext::base::DummyResolver; let mut resolver = syntax::ext::base::DummyResolver;
let mut cx = syntax::ext::base::ExtCtxt::new( let mut cx = syntax::ext::base::ExtCtxt::new(

View File

@ -263,7 +263,10 @@ fn main() {
*slot.borrow_mut() = Some((None, String::from("https://play.rust-lang.org/"))); *slot.borrow_mut() = Some((None, String::from("https://play.rust-lang.org/")));
}); });
let (format, dst) = parse_args(); let (format, dst) = parse_args();
if let Err(e) = main_with_result(format, &dst) { let result = syntax::with_globals(move || {
main_with_result(format, &dst)
});
if let Err(e) = result {
panic!("{}", e.description()); panic!("{}", e.description());
} }
} }

View File

@ -93,6 +93,7 @@ static WHITELIST: &'static [Crate] = &[
Crate("regex-syntax"), Crate("regex-syntax"),
Crate("remove_dir_all"), Crate("remove_dir_all"),
Crate("rustc-demangle"), Crate("rustc-demangle"),
Crate("scoped-tls"),
Crate("smallvec"), Crate("smallvec"),
Crate("stable_deref_trait"), Crate("stable_deref_trait"),
Crate("tempdir"), Crate("tempdir"),