Remove syntax and syntax_pos thread locals

This commit is contained in:
John Kåre Alsaker 2018-03-07 02:44:10 +01:00
parent fab632f975
commit cbdf4ec03e
29 changed files with 1212 additions and 998 deletions

2
src/Cargo.lock generated
View File

@ -2432,6 +2432,7 @@ dependencies = [
"rustc_cratesio_shim 0.0.0",
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
"scoped-tls 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serialize 0.0.0",
"syntax_pos 0.0.0",
]
@ -2453,6 +2454,7 @@ name = "syntax_pos"
version = "0.0.0"
dependencies = [
"rustc_data_structures 0.0.0",
"scoped-tls 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serialize 0.0.0",
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
]

View File

@ -2391,6 +2391,7 @@ mod tests {
use super::{Externs, OutputType, OutputTypes};
use rustc_back::{PanicStrategy, RelroLevel};
use syntax::symbol::Symbol;
use syntax;
fn optgroups() -> getopts::Options {
let mut opts = getopts::Options::new();
@ -2411,45 +2412,50 @@ mod tests {
// When the user supplies --test we should implicitly supply --cfg test
#[test]
fn test_switch_implies_cfg_test() {
let matches = &match optgroups().parse(&["--test".to_string()]) {
Ok(m) => m,
Err(f) => panic!("test_switch_implies_cfg_test: {}", f),
};
let registry = errors::registry::Registry::new(&[]);
let (sessopts, cfg) = build_session_options_and_crate_config(matches);
let sess = build_session(sessopts, None, registry);
let cfg = build_configuration(&sess, cfg);
assert!(cfg.contains(&(Symbol::intern("test"), None)));
syntax::with_globals(|| {
let matches = &match optgroups().parse(&["--test".to_string()]) {
Ok(m) => m,
Err(f) => panic!("test_switch_implies_cfg_test: {}", f),
};
let registry = errors::registry::Registry::new(&[]);
let (sessopts, cfg) = build_session_options_and_crate_config(matches);
let sess = build_session(sessopts, None, registry);
let cfg = build_configuration(&sess, cfg);
assert!(cfg.contains(&(Symbol::intern("test"), None)));
});
}
// When the user supplies --test and --cfg test, don't implicitly add
// another --cfg test
#[test]
fn test_switch_implies_cfg_test_unless_cfg_test() {
let matches = &match optgroups().parse(&["--test".to_string(), "--cfg=test".to_string()]) {
Ok(m) => m,
Err(f) => panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f),
};
let registry = errors::registry::Registry::new(&[]);
let (sessopts, cfg) = build_session_options_and_crate_config(matches);
let sess = build_session(sessopts, None, registry);
let cfg = build_configuration(&sess, cfg);
let mut test_items = cfg.iter().filter(|&&(name, _)| name == "test");
assert!(test_items.next().is_some());
assert!(test_items.next().is_none());
syntax::with_globals(|| {
let matches = &match optgroups().parse(&["--test".to_string(),
"--cfg=test".to_string()]) {
Ok(m) => m,
Err(f) => panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f),
};
let registry = errors::registry::Registry::new(&[]);
let (sessopts, cfg) = build_session_options_and_crate_config(matches);
let sess = build_session(sessopts, None, registry);
let cfg = build_configuration(&sess, cfg);
let mut test_items = cfg.iter().filter(|&&(name, _)| name == "test");
assert!(test_items.next().is_some());
assert!(test_items.next().is_none());
});
}
#[test]
fn test_can_print_warnings() {
{
syntax::with_globals(|| {
let matches = optgroups().parse(&["-Awarnings".to_string()]).unwrap();
let registry = errors::registry::Registry::new(&[]);
let (sessopts, _) = build_session_options_and_crate_config(&matches);
let sess = build_session(sessopts, None, registry);
assert!(!sess.diagnostic().flags.can_emit_warnings);
}
});
{
syntax::with_globals(|| {
let matches = optgroups()
.parse(&["-Awarnings".to_string(), "-Dwarnings".to_string()])
.unwrap();
@ -2457,15 +2463,15 @@ mod tests {
let (sessopts, _) = build_session_options_and_crate_config(&matches);
let sess = build_session(sessopts, None, registry);
assert!(sess.diagnostic().flags.can_emit_warnings);
}
});
{
syntax::with_globals(|| {
let matches = optgroups().parse(&["-Adead_code".to_string()]).unwrap();
let registry = errors::registry::Registry::new(&[]);
let (sessopts, _) = build_session_options_and_crate_config(&matches);
let sess = build_session(sessopts, None, registry);
assert!(sess.diagnostic().flags.can_emit_warnings);
}
});
}
#[test]

View File

@ -447,6 +447,17 @@ pub fn run_compiler<'a>(args: &[String],
file_loader: Option<Box<FileLoader + 'static>>,
emitter_dest: Option<Box<Write + Send>>)
-> (CompileResult, Option<Session>)
{
syntax::with_globals(|| {
run_compiler_impl(args, callbacks, file_loader, emitter_dest)
})
}
fn run_compiler_impl<'a>(args: &[String],
callbacks: &mut CompilerCalls<'a>,
file_loader: Option<Box<FileLoader + 'static>>,
emitter_dest: Option<Box<Write + Send>>)
-> (CompileResult, Option<Session>)
{
macro_rules! do_or_return {($expr: expr, $sess: expr) => {
match $expr {

View File

@ -29,6 +29,7 @@ use rustc::hir::map as hir_map;
use rustc::session::{self, config};
use rustc::session::config::{OutputFilenames, OutputTypes};
use rustc_data_structures::sync::Lrc;
use syntax;
use syntax::ast;
use syntax::abi::Abi;
use syntax::codemap::{CodeMap, FilePathMapping, FileName};
@ -93,9 +94,19 @@ fn errors(msgs: &[&str]) -> (Box<Emitter + Send>, usize) {
}
fn test_env<F>(source_string: &str,
(emitter, expected_err_count): (Box<Emitter + Send>, usize),
args: (Box<Emitter + Send>, usize),
body: F)
where F: FnOnce(Env)
{
syntax::with_globals(|| {
test_env_impl(source_string, args, body)
});
}
fn test_env_impl<F>(source_string: &str,
(emitter, expected_err_count): (Box<Emitter + Send>, usize),
body: F)
where F: FnOnce(Env)
{
let mut options = config::basic_options();
options.debugging_opts.verbose = true;

View File

@ -398,6 +398,7 @@ mod test {
use syntax::ast::*;
use syntax::codemap::dummy_spanned;
use syntax_pos::DUMMY_SP;
use syntax::with_globals;
fn word_cfg(s: &str) -> Cfg {
Cfg::Cfg(Symbol::intern(s), None)
@ -409,479 +410,494 @@ mod test {
#[test]
fn test_cfg_not() {
assert_eq!(!Cfg::False, Cfg::True);
assert_eq!(!Cfg::True, Cfg::False);
assert_eq!(!word_cfg("test"), Cfg::Not(Box::new(word_cfg("test"))));
assert_eq!(
!Cfg::All(vec![word_cfg("a"), word_cfg("b")]),
Cfg::Not(Box::new(Cfg::All(vec![word_cfg("a"), word_cfg("b")])))
);
assert_eq!(
!Cfg::Any(vec![word_cfg("a"), word_cfg("b")]),
Cfg::Not(Box::new(Cfg::Any(vec![word_cfg("a"), word_cfg("b")])))
);
assert_eq!(!Cfg::Not(Box::new(word_cfg("test"))), word_cfg("test"));
with_globals(|| {
assert_eq!(!Cfg::False, Cfg::True);
assert_eq!(!Cfg::True, Cfg::False);
assert_eq!(!word_cfg("test"), Cfg::Not(Box::new(word_cfg("test"))));
assert_eq!(
!Cfg::All(vec![word_cfg("a"), word_cfg("b")]),
Cfg::Not(Box::new(Cfg::All(vec![word_cfg("a"), word_cfg("b")])))
);
assert_eq!(
!Cfg::Any(vec![word_cfg("a"), word_cfg("b")]),
Cfg::Not(Box::new(Cfg::Any(vec![word_cfg("a"), word_cfg("b")])))
);
assert_eq!(!Cfg::Not(Box::new(word_cfg("test"))), word_cfg("test"));
})
}
#[test]
fn test_cfg_and() {
let mut x = Cfg::False;
x &= Cfg::True;
assert_eq!(x, Cfg::False);
with_globals(|| {
let mut x = Cfg::False;
x &= Cfg::True;
assert_eq!(x, Cfg::False);
x = word_cfg("test");
x &= Cfg::False;
assert_eq!(x, Cfg::False);
x = word_cfg("test");
x &= Cfg::False;
assert_eq!(x, Cfg::False);
x = word_cfg("test2");
x &= Cfg::True;
assert_eq!(x, word_cfg("test2"));
x = word_cfg("test2");
x &= Cfg::True;
assert_eq!(x, word_cfg("test2"));
x = Cfg::True;
x &= word_cfg("test3");
assert_eq!(x, word_cfg("test3"));
x = Cfg::True;
x &= word_cfg("test3");
assert_eq!(x, word_cfg("test3"));
x &= word_cfg("test4");
assert_eq!(x, Cfg::All(vec![word_cfg("test3"), word_cfg("test4")]));
x &= word_cfg("test4");
assert_eq!(x, Cfg::All(vec![word_cfg("test3"), word_cfg("test4")]));
x &= word_cfg("test5");
assert_eq!(x, Cfg::All(vec![word_cfg("test3"), word_cfg("test4"), word_cfg("test5")]));
x &= word_cfg("test5");
assert_eq!(x, Cfg::All(vec![word_cfg("test3"), word_cfg("test4"), word_cfg("test5")]));
x &= Cfg::All(vec![word_cfg("test6"), word_cfg("test7")]);
assert_eq!(x, Cfg::All(vec![
word_cfg("test3"),
word_cfg("test4"),
word_cfg("test5"),
word_cfg("test6"),
word_cfg("test7"),
]));
x &= Cfg::All(vec![word_cfg("test6"), word_cfg("test7")]);
assert_eq!(x, Cfg::All(vec![
word_cfg("test3"),
word_cfg("test4"),
word_cfg("test5"),
word_cfg("test6"),
word_cfg("test7"),
]));
let mut y = Cfg::Any(vec![word_cfg("a"), word_cfg("b")]);
y &= x;
assert_eq!(y, Cfg::All(vec![
word_cfg("test3"),
word_cfg("test4"),
word_cfg("test5"),
word_cfg("test6"),
word_cfg("test7"),
Cfg::Any(vec![word_cfg("a"), word_cfg("b")]),
]));
let mut y = Cfg::Any(vec![word_cfg("a"), word_cfg("b")]);
y &= x;
assert_eq!(y, Cfg::All(vec![
word_cfg("test3"),
word_cfg("test4"),
word_cfg("test5"),
word_cfg("test6"),
word_cfg("test7"),
Cfg::Any(vec![word_cfg("a"), word_cfg("b")]),
]));
assert_eq!(
word_cfg("a") & word_cfg("b") & word_cfg("c"),
Cfg::All(vec![word_cfg("a"), word_cfg("b"), word_cfg("c")])
);
assert_eq!(
word_cfg("a") & word_cfg("b") & word_cfg("c"),
Cfg::All(vec![word_cfg("a"), word_cfg("b"), word_cfg("c")])
);
})
}
#[test]
fn test_cfg_or() {
let mut x = Cfg::True;
x |= Cfg::False;
assert_eq!(x, Cfg::True);
with_globals(|| {
let mut x = Cfg::True;
x |= Cfg::False;
assert_eq!(x, Cfg::True);
x = word_cfg("test");
x |= Cfg::True;
assert_eq!(x, Cfg::True);
x = word_cfg("test");
x |= Cfg::True;
assert_eq!(x, Cfg::True);
x = word_cfg("test2");
x |= Cfg::False;
assert_eq!(x, word_cfg("test2"));
x = word_cfg("test2");
x |= Cfg::False;
assert_eq!(x, word_cfg("test2"));
x = Cfg::False;
x |= word_cfg("test3");
assert_eq!(x, word_cfg("test3"));
x = Cfg::False;
x |= word_cfg("test3");
assert_eq!(x, word_cfg("test3"));
x |= word_cfg("test4");
assert_eq!(x, Cfg::Any(vec![word_cfg("test3"), word_cfg("test4")]));
x |= word_cfg("test4");
assert_eq!(x, Cfg::Any(vec![word_cfg("test3"), word_cfg("test4")]));
x |= word_cfg("test5");
assert_eq!(x, Cfg::Any(vec![word_cfg("test3"), word_cfg("test4"), word_cfg("test5")]));
x |= word_cfg("test5");
assert_eq!(x, Cfg::Any(vec![word_cfg("test3"), word_cfg("test4"), word_cfg("test5")]));
x |= Cfg::Any(vec![word_cfg("test6"), word_cfg("test7")]);
assert_eq!(x, Cfg::Any(vec![
word_cfg("test3"),
word_cfg("test4"),
word_cfg("test5"),
word_cfg("test6"),
word_cfg("test7"),
]));
x |= Cfg::Any(vec![word_cfg("test6"), word_cfg("test7")]);
assert_eq!(x, Cfg::Any(vec![
word_cfg("test3"),
word_cfg("test4"),
word_cfg("test5"),
word_cfg("test6"),
word_cfg("test7"),
]));
let mut y = Cfg::All(vec![word_cfg("a"), word_cfg("b")]);
y |= x;
assert_eq!(y, Cfg::Any(vec![
word_cfg("test3"),
word_cfg("test4"),
word_cfg("test5"),
word_cfg("test6"),
word_cfg("test7"),
Cfg::All(vec![word_cfg("a"), word_cfg("b")]),
]));
let mut y = Cfg::All(vec![word_cfg("a"), word_cfg("b")]);
y |= x;
assert_eq!(y, Cfg::Any(vec![
word_cfg("test3"),
word_cfg("test4"),
word_cfg("test5"),
word_cfg("test6"),
word_cfg("test7"),
Cfg::All(vec![word_cfg("a"), word_cfg("b")]),
]));
assert_eq!(
word_cfg("a") | word_cfg("b") | word_cfg("c"),
Cfg::Any(vec![word_cfg("a"), word_cfg("b"), word_cfg("c")])
);
assert_eq!(
word_cfg("a") | word_cfg("b") | word_cfg("c"),
Cfg::Any(vec![word_cfg("a"), word_cfg("b"), word_cfg("c")])
);
})
}
#[test]
fn test_parse_ok() {
let mi = MetaItem {
name: Symbol::intern("all"),
node: MetaItemKind::Word,
span: DUMMY_SP,
};
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("all")));
with_globals(|| {
let mi = MetaItem {
name: Symbol::intern("all"),
node: MetaItemKind::Word,
span: DUMMY_SP,
};
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("all")));
let mi = MetaItem {
name: Symbol::intern("all"),
node: MetaItemKind::NameValue(dummy_spanned(LitKind::Str(
Symbol::intern("done"),
StrStyle::Cooked,
))),
span: DUMMY_SP,
};
assert_eq!(Cfg::parse(&mi), Ok(name_value_cfg("all", "done")));
let mi = MetaItem {
name: Symbol::intern("all"),
node: MetaItemKind::NameValue(dummy_spanned(LitKind::Str(
Symbol::intern("done"),
StrStyle::Cooked,
))),
span: DUMMY_SP,
};
assert_eq!(Cfg::parse(&mi), Ok(name_value_cfg("all", "done")));
let mi = MetaItem {
name: Symbol::intern("all"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("b"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") & word_cfg("b")));
let mi = MetaItem {
name: Symbol::intern("all"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("b"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") & word_cfg("b")));
let mi = MetaItem {
name: Symbol::intern("any"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("b"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") | word_cfg("b")));
let mi = MetaItem {
name: Symbol::intern("any"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("b"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") | word_cfg("b")));
let mi = MetaItem {
name: Symbol::intern("not"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert_eq!(Cfg::parse(&mi), Ok(!word_cfg("a")));
let mi = MetaItem {
name: Symbol::intern("not"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert_eq!(Cfg::parse(&mi), Ok(!word_cfg("a")));
let mi = MetaItem {
name: Symbol::intern("not"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("any"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("all"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("b"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("c"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert_eq!(Cfg::parse(&mi), Ok(!(word_cfg("a") | (word_cfg("b") & word_cfg("c")))));
let mi = MetaItem {
name: Symbol::intern("not"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("any"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("all"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("b"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("c"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert_eq!(Cfg::parse(&mi), Ok(!(word_cfg("a") | (word_cfg("b") & word_cfg("c")))));
let mi = MetaItem {
name: Symbol::intern("all"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("b"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("c"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") & word_cfg("b") & word_cfg("c")));
let mi = MetaItem {
name: Symbol::intern("all"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("b"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("c"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") & word_cfg("b") & word_cfg("c")));
})
}
#[test]
fn test_parse_err() {
let mi = MetaItem {
name: Symbol::intern("foo"),
node: MetaItemKind::NameValue(dummy_spanned(LitKind::Bool(false))),
span: DUMMY_SP,
};
assert!(Cfg::parse(&mi).is_err());
with_globals(|| {
let mi = MetaItem {
name: Symbol::intern("foo"),
node: MetaItemKind::NameValue(dummy_spanned(LitKind::Bool(false))),
span: DUMMY_SP,
};
assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem {
name: Symbol::intern("not"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("b"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem {
name: Symbol::intern("not"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("b"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem {
name: Symbol::intern("not"),
node: MetaItemKind::List(vec![]),
span: DUMMY_SP,
};
assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem {
name: Symbol::intern("not"),
node: MetaItemKind::List(vec![]),
span: DUMMY_SP,
};
assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem {
name: Symbol::intern("foo"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem {
name: Symbol::intern("foo"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem {
name: Symbol::intern("all"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("foo"),
node: MetaItemKind::List(vec![]),
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("b"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem {
name: Symbol::intern("all"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("foo"),
node: MetaItemKind::List(vec![]),
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("b"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem {
name: Symbol::intern("any"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("foo"),
node: MetaItemKind::List(vec![]),
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem {
name: Symbol::intern("any"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("foo"),
node: MetaItemKind::List(vec![]),
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem {
name: Symbol::intern("not"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("foo"),
node: MetaItemKind::List(vec![]),
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem {
name: Symbol::intern("not"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
name: Symbol::intern("foo"),
node: MetaItemKind::List(vec![]),
span: DUMMY_SP,
})),
]),
span: DUMMY_SP,
};
assert!(Cfg::parse(&mi).is_err());
})
}
#[test]
fn test_render_short_html() {
assert_eq!(
word_cfg("unix").render_short_html(),
"Unix"
);
assert_eq!(
name_value_cfg("target_os", "macos").render_short_html(),
"macOS"
);
assert_eq!(
name_value_cfg("target_pointer_width", "16").render_short_html(),
"16-bit"
);
assert_eq!(
name_value_cfg("target_endian", "little").render_short_html(),
"Little-endian"
);
assert_eq!(
(!word_cfg("windows")).render_short_html(),
"Non-Windows"
);
assert_eq!(
(word_cfg("unix") & word_cfg("windows")).render_short_html(),
"Unix and Windows"
);
assert_eq!(
(word_cfg("unix") | word_cfg("windows")).render_short_html(),
"Unix or Windows"
);
assert_eq!(
(
word_cfg("unix") & word_cfg("windows") & word_cfg("debug_assertions")
).render_short_html(),
"Unix and Windows and debug-assertions enabled"
);
assert_eq!(
(
word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions")
).render_short_html(),
"Unix or Windows or debug-assertions enabled"
);
assert_eq!(
(
!(word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions"))
).render_short_html(),
"Neither Unix nor Windows nor debug-assertions enabled"
);
assert_eq!(
(
(word_cfg("unix") & name_value_cfg("target_arch", "x86_64")) |
(word_cfg("windows") & name_value_cfg("target_pointer_width", "64"))
).render_short_html(),
"Unix and x86-64, or Windows and 64-bit"
);
assert_eq!(
(!(word_cfg("unix") & word_cfg("windows"))).render_short_html(),
"Not (Unix and Windows)"
);
assert_eq!(
(
(word_cfg("debug_assertions") | word_cfg("windows")) & word_cfg("unix")
).render_short_html(),
"(Debug-assertions enabled or Windows) and Unix"
);
with_globals(|| {
assert_eq!(
word_cfg("unix").render_short_html(),
"Unix"
);
assert_eq!(
name_value_cfg("target_os", "macos").render_short_html(),
"macOS"
);
assert_eq!(
name_value_cfg("target_pointer_width", "16").render_short_html(),
"16-bit"
);
assert_eq!(
name_value_cfg("target_endian", "little").render_short_html(),
"Little-endian"
);
assert_eq!(
(!word_cfg("windows")).render_short_html(),
"Non-Windows"
);
assert_eq!(
(word_cfg("unix") & word_cfg("windows")).render_short_html(),
"Unix and Windows"
);
assert_eq!(
(word_cfg("unix") | word_cfg("windows")).render_short_html(),
"Unix or Windows"
);
assert_eq!(
(
word_cfg("unix") & word_cfg("windows") & word_cfg("debug_assertions")
).render_short_html(),
"Unix and Windows and debug-assertions enabled"
);
assert_eq!(
(
word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions")
).render_short_html(),
"Unix or Windows or debug-assertions enabled"
);
assert_eq!(
(
!(word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions"))
).render_short_html(),
"Neither Unix nor Windows nor debug-assertions enabled"
);
assert_eq!(
(
(word_cfg("unix") & name_value_cfg("target_arch", "x86_64")) |
(word_cfg("windows") & name_value_cfg("target_pointer_width", "64"))
).render_short_html(),
"Unix and x86-64, or Windows and 64-bit"
);
assert_eq!(
(!(word_cfg("unix") & word_cfg("windows"))).render_short_html(),
"Not (Unix and Windows)"
);
assert_eq!(
(
(word_cfg("debug_assertions") | word_cfg("windows")) & word_cfg("unix")
).render_short_html(),
"(Debug-assertions enabled or Windows) and Unix"
);
})
}
#[test]
fn test_render_long_html() {
assert_eq!(
word_cfg("unix").render_long_html(),
"This is supported on <strong>Unix</strong> only."
);
assert_eq!(
name_value_cfg("target_os", "macos").render_long_html(),
"This is supported on <strong>macOS</strong> only."
);
assert_eq!(
name_value_cfg("target_pointer_width", "16").render_long_html(),
"This is supported on <strong>16-bit</strong> only."
);
assert_eq!(
name_value_cfg("target_endian", "little").render_long_html(),
"This is supported on <strong>little-endian</strong> only."
);
assert_eq!(
(!word_cfg("windows")).render_long_html(),
"This is supported on <strong>non-Windows</strong> only."
);
assert_eq!(
(word_cfg("unix") & word_cfg("windows")).render_long_html(),
"This is supported on <strong>Unix and Windows</strong> only."
);
assert_eq!(
(word_cfg("unix") | word_cfg("windows")).render_long_html(),
"This is supported on <strong>Unix or Windows</strong> only."
);
assert_eq!(
(
word_cfg("unix") & word_cfg("windows") & word_cfg("debug_assertions")
).render_long_html(),
"This is supported on <strong>Unix and Windows and debug-assertions enabled</strong> \
only."
);
assert_eq!(
(
word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions")
).render_long_html(),
"This is supported on <strong>Unix or Windows or debug-assertions enabled</strong> \
only."
);
assert_eq!(
(
!(word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions"))
).render_long_html(),
"This is supported on <strong>neither Unix nor Windows nor debug-assertions \
enabled</strong>."
);
assert_eq!(
(
(word_cfg("unix") & name_value_cfg("target_arch", "x86_64")) |
(word_cfg("windows") & name_value_cfg("target_pointer_width", "64"))
).render_long_html(),
"This is supported on <strong>Unix and x86-64, or Windows and 64-bit</strong> only."
);
assert_eq!(
(!(word_cfg("unix") & word_cfg("windows"))).render_long_html(),
"This is supported on <strong>not (Unix and Windows)</strong>."
);
assert_eq!(
(
(word_cfg("debug_assertions") | word_cfg("windows")) & word_cfg("unix")
).render_long_html(),
"This is supported on <strong>(debug-assertions enabled or Windows) and Unix</strong> \
only."
);
with_globals(|| {
assert_eq!(
word_cfg("unix").render_long_html(),
"This is supported on <strong>Unix</strong> only."
);
assert_eq!(
name_value_cfg("target_os", "macos").render_long_html(),
"This is supported on <strong>macOS</strong> only."
);
assert_eq!(
name_value_cfg("target_pointer_width", "16").render_long_html(),
"This is supported on <strong>16-bit</strong> only."
);
assert_eq!(
name_value_cfg("target_endian", "little").render_long_html(),
"This is supported on <strong>little-endian</strong> only."
);
assert_eq!(
(!word_cfg("windows")).render_long_html(),
"This is supported on <strong>non-Windows</strong> only."
);
assert_eq!(
(word_cfg("unix") & word_cfg("windows")).render_long_html(),
"This is supported on <strong>Unix and Windows</strong> only."
);
assert_eq!(
(word_cfg("unix") | word_cfg("windows")).render_long_html(),
"This is supported on <strong>Unix or Windows</strong> only."
);
assert_eq!(
(
word_cfg("unix") & word_cfg("windows") & word_cfg("debug_assertions")
).render_long_html(),
"This is supported on <strong>Unix and Windows and debug-assertions enabled\
</strong> only."
);
assert_eq!(
(
word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions")
).render_long_html(),
"This is supported on <strong>Unix or Windows or debug-assertions enabled\
</strong> only."
);
assert_eq!(
(
!(word_cfg("unix") | word_cfg("windows") | word_cfg("debug_assertions"))
).render_long_html(),
"This is supported on <strong>neither Unix nor Windows nor debug-assertions \
enabled</strong>."
);
assert_eq!(
(
(word_cfg("unix") & name_value_cfg("target_arch", "x86_64")) |
(word_cfg("windows") & name_value_cfg("target_pointer_width", "64"))
).render_long_html(),
"This is supported on <strong>Unix and x86-64, or Windows and 64-bit</strong> \
only."
);
assert_eq!(
(!(word_cfg("unix") & word_cfg("windows"))).render_long_html(),
"This is supported on <strong>not (Unix and Windows)</strong>."
);
assert_eq!(
(
(word_cfg("debug_assertions") | word_cfg("windows")) & word_cfg("unix")
).render_long_html(),
"This is supported on <strong>(debug-assertions enabled or Windows) and Unix\
</strong> only."
);
})
}
}

View File

@ -102,7 +102,9 @@ pub fn main() {
const STACK_SIZE: usize = 32_000_000; // 32MB
env_logger::init();
let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || {
get_args().map(|args| main_args(&args)).unwrap_or(1)
syntax::with_globals(move || {
get_args().map(|args| main_args(&args)).unwrap_or(1)
})
}).unwrap().join().unwrap_or(101);
process::exit(res as i32);
}
@ -554,7 +556,8 @@ where R: 'static + Send, F: 'static + Send + FnOnce(Output) -> R {
});
let (tx, rx) = channel();
rustc_driver::monitor(move || {
rustc_driver::monitor(move || syntax::with_globals(move || {
use rustc::session::config::Input;
let (mut krate, renderinfo) =
@ -623,7 +626,7 @@ where R: 'static + Send, F: 'static + Send + FnOnce(Output) -> R {
let krate = pm.run_plugins(krate);
tx.send(f(Output { krate: krate, renderinfo: renderinfo, passes: passes })).unwrap();
});
}));
rx.recv().unwrap()
}

View File

@ -35,6 +35,7 @@ use rustc_resolve::MakeGlobMap;
use syntax::ast;
use syntax::codemap::CodeMap;
use syntax::feature_gate::UnstableFeatures;
use syntax::with_globals;
use syntax_pos::{BytePos, DUMMY_SP, Pos, Span, FileName};
use errors;
use errors::emitter::ColorConfig;
@ -518,7 +519,7 @@ impl Collector {
let panic = io::set_panic(None);
let print = io::set_print(None);
match {
rustc_driver::in_rustc_thread(move || {
rustc_driver::in_rustc_thread(move || with_globals(move || {
io::set_panic(panic);
io::set_print(print);
run_test(&test,
@ -536,7 +537,7 @@ impl Collector {
&opts,
maybe_sysroot,
linker)
})
}))
} {
Ok(()) => (),
Err(err) => panic::resume_unwind(err),

View File

@ -12,6 +12,7 @@ crate-type = ["dylib"]
bitflags = "1.0"
serialize = { path = "../libserialize" }
log = "0.4"
scoped-tls = "0.1"
syntax_pos = { path = "../libsyntax_pos" }
rustc_cratesio_shim = { path = "../librustc_cratesio_shim" }
rustc_errors = { path = "../librustc_errors" }

View File

@ -30,15 +30,10 @@ use ptr::P;
use symbol::Symbol;
use tokenstream::{TokenStream, TokenTree, Delimited};
use util::ThinVec;
use GLOBALS;
use std::cell::RefCell;
use std::iter;
thread_local! {
static USED_ATTRS: RefCell<Vec<u64>> = RefCell::new(Vec::new());
static KNOWN_ATTRS: RefCell<Vec<u64>> = RefCell::new(Vec::new());
}
enum AttrError {
MultipleItem(Name),
UnknownMetaItem(Name),
@ -65,22 +60,24 @@ fn handle_errors(diag: &Handler, span: Span, error: AttrError) {
pub fn mark_used(attr: &Attribute) {
debug!("Marking {:?} as used.", attr);
let AttrId(id) = attr.id;
USED_ATTRS.with(|slot| {
GLOBALS.with(|globals| {
let mut slot = globals.used_attrs.lock();
let idx = (id / 64) as usize;
let shift = id % 64;
if slot.borrow().len() <= idx {
slot.borrow_mut().resize(idx + 1, 0);
if slot.len() <= idx {
slot.resize(idx + 1, 0);
}
slot.borrow_mut()[idx] |= 1 << shift;
slot[idx] |= 1 << shift;
});
}
pub fn is_used(attr: &Attribute) -> bool {
let AttrId(id) = attr.id;
USED_ATTRS.with(|slot| {
GLOBALS.with(|globals| {
let slot = globals.used_attrs.lock();
let idx = (id / 64) as usize;
let shift = id % 64;
slot.borrow().get(idx).map(|bits| bits & (1 << shift) != 0)
slot.get(idx).map(|bits| bits & (1 << shift) != 0)
.unwrap_or(false)
})
}
@ -88,22 +85,24 @@ pub fn is_used(attr: &Attribute) -> bool {
pub fn mark_known(attr: &Attribute) {
debug!("Marking {:?} as known.", attr);
let AttrId(id) = attr.id;
KNOWN_ATTRS.with(|slot| {
GLOBALS.with(|globals| {
let mut slot = globals.known_attrs.lock();
let idx = (id / 64) as usize;
let shift = id % 64;
if slot.borrow().len() <= idx {
slot.borrow_mut().resize(idx + 1, 0);
if slot.len() <= idx {
slot.resize(idx + 1, 0);
}
slot.borrow_mut()[idx] |= 1 << shift;
slot[idx] |= 1 << shift;
});
}
pub fn is_known(attr: &Attribute) -> bool {
let AttrId(id) = attr.id;
KNOWN_ATTRS.with(|slot| {
GLOBALS.with(|globals| {
let slot = globals.known_attrs.lock();
let idx = (id / 64) as usize;
let shift = id % 64;
slot.borrow().get(idx).map(|bits| bits & (1 << shift) != 0)
slot.get(idx).map(|bits| bits & (1 << shift) != 0)
.unwrap_or(false)
})
}

View File

@ -1386,6 +1386,7 @@ mod tests {
use util::parser_testing::{string_to_crate, matches_codepattern};
use print::pprust;
use fold;
use with_globals;
use super::*;
// this version doesn't care about getting comments or docstrings in.
@ -1423,28 +1424,32 @@ mod tests {
// make sure idents get transformed everywhere
#[test] fn ident_transformation () {
let mut zz_fold = ToZzIdentFolder;
let ast = string_to_crate(
"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_string());
let folded_crate = zz_fold.fold_crate(ast);
assert_pred!(
matches_codepattern,
"matches_codepattern",
pprust::to_string(|s| fake_print_crate(s, &folded_crate)),
"#[zz]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string());
with_globals(|| {
let mut zz_fold = ToZzIdentFolder;
let ast = string_to_crate(
"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_string());
let folded_crate = zz_fold.fold_crate(ast);
assert_pred!(
matches_codepattern,
"matches_codepattern",
pprust::to_string(|s| fake_print_crate(s, &folded_crate)),
"#[zz]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string());
})
}
// even inside macro defs....
#[test] fn ident_transformation_in_defs () {
let mut zz_fold = ToZzIdentFolder;
let ast = string_to_crate(
"macro_rules! a {(b $c:expr $(d $e:token)f+ => \
(g $(d $d $e)+))} ".to_string());
let folded_crate = zz_fold.fold_crate(ast);
assert_pred!(
matches_codepattern,
"matches_codepattern",
pprust::to_string(|s| fake_print_crate(s, &folded_crate)),
"macro_rules! zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)));".to_string());
with_globals(|| {
let mut zz_fold = ToZzIdentFolder;
let ast = string_to_crate(
"macro_rules! a {(b $c:expr $(d $e:token)f+ => \
(g $(d $d $e)+))} ".to_string());
let folded_crate = zz_fold.fold_crate(ast);
assert_pred!(
matches_codepattern,
"matches_codepattern",
pprust::to_string(|s| fake_print_crate(s, &folded_crate)),
"macro_rules! zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)));".to_string());
})
}
}

View File

@ -39,9 +39,12 @@ extern crate std_unicode;
pub extern crate rustc_errors as errors;
extern crate syntax_pos;
extern crate rustc_data_structures;
#[macro_use] extern crate scoped_tls;
extern crate serialize as rustc_serialize; // used by deriving
use rustc_data_structures::sync::Lock;
// A variant of 'try!' that panics on an Err. This is used as a crutch on the
// way towards a non-panic!-prone parser. It should be used for fatal parsing
// errors; eventually we plan to convert all code using panictry to just use
@ -72,6 +75,33 @@ macro_rules! unwrap_or {
}
}
struct Globals {
used_attrs: Lock<Vec<u64>>,
known_attrs: Lock<Vec<u64>>,
syntax_pos_globals: syntax_pos::Globals,
}
impl Globals {
fn new() -> Globals {
Globals {
used_attrs: Lock::new(Vec::new()),
known_attrs: Lock::new(Vec::new()),
syntax_pos_globals: syntax_pos::Globals::new(),
}
}
}
pub fn with_globals<F, R>(f: F) -> R
where F: FnOnce() -> R
{
let globals = Globals::new();
GLOBALS.set(&globals, || {
syntax_pos::GLOBALS.set(&globals.syntax_pos_globals, f)
})
}
scoped_thread_local!(static GLOBALS: Globals);
#[macro_use]
pub mod diagnostics {
#[macro_use]

View File

@ -1766,6 +1766,7 @@ mod tests {
use std::path::PathBuf;
use diagnostics::plugin::ErrorMap;
use rustc_data_structures::sync::Lock;
use with_globals;
fn mk_sess(cm: Lrc<CodeMap>) -> ParseSess {
let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()),
Some(cm.clone()),
@ -1794,33 +1795,35 @@ mod tests {
#[test]
fn t1() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
let mut string_reader = setup(&cm,
&sh,
"/* my source file */ fn main() { println!(\"zebra\"); }\n"
.to_string());
let id = Ident::from_str("fn");
assert_eq!(string_reader.next_token().tok, token::Comment);
assert_eq!(string_reader.next_token().tok, token::Whitespace);
let tok1 = string_reader.next_token();
let tok2 = TokenAndSpan {
tok: token::Ident(id),
sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
};
assert_eq!(tok1, tok2);
assert_eq!(string_reader.next_token().tok, token::Whitespace);
// the 'main' id is already read:
assert_eq!(string_reader.pos.clone(), BytePos(28));
// read another token:
let tok3 = string_reader.next_token();
let tok4 = TokenAndSpan {
tok: token::Ident(Ident::from_str("main")),
sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
};
assert_eq!(tok3, tok4);
// the lparen is already read:
assert_eq!(string_reader.pos.clone(), BytePos(29))
with_globals(|| {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
let mut string_reader = setup(&cm,
&sh,
"/* my source file */ fn main() { println!(\"zebra\"); }\n"
.to_string());
let id = Ident::from_str("fn");
assert_eq!(string_reader.next_token().tok, token::Comment);
assert_eq!(string_reader.next_token().tok, token::Whitespace);
let tok1 = string_reader.next_token();
let tok2 = TokenAndSpan {
tok: token::Ident(id),
sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
};
assert_eq!(tok1, tok2);
assert_eq!(string_reader.next_token().tok, token::Whitespace);
// the 'main' id is already read:
assert_eq!(string_reader.pos.clone(), BytePos(28));
// read another token:
let tok3 = string_reader.next_token();
let tok4 = TokenAndSpan {
tok: token::Ident(Ident::from_str("main")),
sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
};
assert_eq!(tok3, tok4);
// the lparen is already read:
assert_eq!(string_reader.pos.clone(), BytePos(29))
})
}
// check that the given reader produces the desired stream
@ -1838,113 +1841,133 @@ mod tests {
#[test]
fn doublecolonparsing() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a b".to_string()),
vec![mk_ident("a"), token::Whitespace, mk_ident("b")]);
with_globals(|| {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a b".to_string()),
vec![mk_ident("a"), token::Whitespace, mk_ident("b")]);
})
}
#[test]
fn dcparsing_2() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a::b".to_string()),
vec![mk_ident("a"), token::ModSep, mk_ident("b")]);
with_globals(|| {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a::b".to_string()),
vec![mk_ident("a"), token::ModSep, mk_ident("b")]);
})
}
#[test]
fn dcparsing_3() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a ::b".to_string()),
vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]);
with_globals(|| {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a ::b".to_string()),
vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]);
})
}
#[test]
fn dcparsing_4() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a:: b".to_string()),
vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]);
with_globals(|| {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a:: b".to_string()),
vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]);
})
}
#[test]
fn character_a() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern("a")), None));
with_globals(|| {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern("a")), None));
})
}
#[test]
fn character_space() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern(" ")), None));
with_globals(|| {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern(" ")), None));
})
}
#[test]
fn character_escaped() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern("\\n")), None));
with_globals(|| {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern("\\n")), None));
})
}
#[test]
fn lifetime_name() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok,
token::Lifetime(Ident::from_str("'abc")));
with_globals(|| {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok,
token::Lifetime(Ident::from_str("'abc")));
})
}
#[test]
fn raw_string() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
.next_token()
.tok,
token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None));
with_globals(|| {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
.next_token()
.tok,
token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None));
})
}
#[test]
fn literal_suffixes() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
macro_rules! test {
($input: expr, $tok_type: ident, $tok_contents: expr) => {{
assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok,
token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
Some(Symbol::intern("suffix"))));
// with a whitespace separator:
assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok,
token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
None));
}}
}
with_globals(|| {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
macro_rules! test {
($input: expr, $tok_type: ident, $tok_contents: expr) => {{
assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok,
token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
Some(Symbol::intern("suffix"))));
// with a whitespace separator:
assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok,
token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
None));
}}
}
test!("'a'", Char, "a");
test!("b'a'", Byte, "a");
test!("\"a\"", Str_, "a");
test!("b\"a\"", ByteStr, "a");
test!("1234", Integer, "1234");
test!("0b101", Integer, "0b101");
test!("0xABC", Integer, "0xABC");
test!("1.0", Float, "1.0");
test!("1.0e10", Float, "1.0e10");
test!("'a'", Char, "a");
test!("b'a'", Byte, "a");
test!("\"a\"", Str_, "a");
test!("b\"a\"", ByteStr, "a");
test!("1234", Integer, "1234");
test!("0b101", Integer, "0b101");
test!("0xABC", Integer, "0xABC");
test!("1.0", Float, "1.0");
test!("1.0e10", Float, "1.0e10");
assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok,
token::Literal(token::Integer(Symbol::intern("2")),
Some(Symbol::intern("us"))));
assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok,
token::Literal(token::StrRaw(Symbol::intern("raw"), 3),
Some(Symbol::intern("suffix"))));
assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok,
token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3),
Some(Symbol::intern("suffix"))));
assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok,
token::Literal(token::Integer(Symbol::intern("2")),
Some(Symbol::intern("us"))));
assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok,
token::Literal(token::StrRaw(Symbol::intern("raw"), 3),
Some(Symbol::intern("suffix"))));
assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok,
token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3),
Some(Symbol::intern("suffix"))));
})
}
#[test]
@ -1956,27 +1979,31 @@ mod tests {
#[test]
fn nested_block_comments() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string());
match lexer.next_token().tok {
token::Comment => {}
_ => panic!("expected a comment!"),
}
assert_eq!(lexer.next_token().tok,
token::Literal(token::Char(Symbol::intern("a")), None));
with_globals(|| {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string());
match lexer.next_token().tok {
token::Comment => {}
_ => panic!("expected a comment!"),
}
assert_eq!(lexer.next_token().tok,
token::Literal(token::Char(Symbol::intern("a")), None));
})
}
#[test]
fn crlf_comments() {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string());
let comment = lexer.next_token();
assert_eq!(comment.tok, token::Comment);
assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7)));
assert_eq!(lexer.next_token().tok, token::Whitespace);
assert_eq!(lexer.next_token().tok,
token::DocComment(Symbol::intern("/// test")));
with_globals(|| {
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string());
let comment = lexer.next_token();
assert_eq!(comment.tok, token::Comment);
assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7)));
assert_eq!(lexer.next_token().tok, token::Whitespace);
assert_eq!(lexer.next_token().tok,
token::DocComment(Symbol::intern("/// test")));
})
}
}

View File

@ -680,6 +680,7 @@ mod tests {
use util::parser_testing::{string_to_stream, string_to_parser};
use util::parser_testing::{string_to_expr, string_to_item, string_to_stmt};
use util::ThinVec;
use with_globals;
// produce a syntax_pos::span
fn sp(a: u32, b: u32) -> Span {
@ -691,156 +692,170 @@ mod tests {
}
#[test] fn path_exprs_1() {
assert!(string_to_expr("a".to_string()) ==
P(ast::Expr{
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Path(None, ast::Path {
with_globals(|| {
assert!(string_to_expr("a".to_string()) ==
P(ast::Expr{
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Path(None, ast::Path {
span: sp(0, 1),
segments: vec![str2seg("a", 0, 1)],
}),
span: sp(0, 1),
segments: vec![str2seg("a", 0, 1)],
}),
span: sp(0, 1),
attrs: ThinVec::new(),
}))
attrs: ThinVec::new(),
}))
})
}
#[test] fn path_exprs_2 () {
assert!(string_to_expr("::a::b".to_string()) ==
P(ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Path(None, ast::Path {
with_globals(|| {
assert!(string_to_expr("::a::b".to_string()) ==
P(ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Path(None, ast::Path {
span: sp(0, 6),
segments: vec![ast::PathSegment::crate_root(sp(0, 2)),
str2seg("a", 2, 3),
str2seg("b", 5, 6)]
}),
span: sp(0, 6),
segments: vec![ast::PathSegment::crate_root(sp(0, 2)),
str2seg("a", 2, 3),
str2seg("b", 5, 6)]
}),
span: sp(0, 6),
attrs: ThinVec::new(),
}))
attrs: ThinVec::new(),
}))
})
}
#[should_panic]
#[test] fn bad_path_expr_1() {
string_to_expr("::abc::def::return".to_string());
with_globals(|| {
string_to_expr("::abc::def::return".to_string());
})
}
// check the token-tree-ization of macros
#[test]
fn string_to_tts_macro () {
let tts: Vec<_> =
string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect();
let tts: &[TokenTree] = &tts[..];
with_globals(|| {
let tts: Vec<_> =
string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect();
let tts: &[TokenTree] = &tts[..];
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
(
4,
Some(&TokenTree::Token(_, token::Ident(name_macro_rules))),
Some(&TokenTree::Token(_, token::Not)),
Some(&TokenTree::Token(_, token::Ident(name_zip))),
Some(&TokenTree::Delimited(_, ref macro_delimed)),
)
if name_macro_rules.name == "macro_rules"
&& name_zip.name == "zip" => {
let tts = &macro_delimed.stream().trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
(
3,
Some(&TokenTree::Delimited(_, ref first_delimed)),
Some(&TokenTree::Token(_, token::FatArrow)),
Some(&TokenTree::Delimited(_, ref second_delimed)),
)
if macro_delimed.delim == token::Paren => {
let tts = &first_delimed.stream().trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1)) {
(
2,
Some(&TokenTree::Token(_, token::Dollar)),
Some(&TokenTree::Token(_, token::Ident(ident))),
)
if first_delimed.delim == token::Paren && ident.name == "a" => {},
_ => panic!("value 3: {:?}", *first_delimed),
}
let tts = &second_delimed.stream().trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1)) {
(
2,
Some(&TokenTree::Token(_, token::Dollar)),
Some(&TokenTree::Token(_, token::Ident(ident))),
)
if second_delimed.delim == token::Paren
&& ident.name == "a" => {},
_ => panic!("value 4: {:?}", *second_delimed),
}
},
_ => panic!("value 2: {:?}", *macro_delimed),
}
},
_ => panic!("value: {:?}",tts),
}
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
(
4,
Some(&TokenTree::Token(_, token::Ident(name_macro_rules))),
Some(&TokenTree::Token(_, token::Not)),
Some(&TokenTree::Token(_, token::Ident(name_zip))),
Some(&TokenTree::Delimited(_, ref macro_delimed)),
)
if name_macro_rules.name == "macro_rules"
&& name_zip.name == "zip" => {
let tts = &macro_delimed.stream().trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
(
3,
Some(&TokenTree::Delimited(_, ref first_delimed)),
Some(&TokenTree::Token(_, token::FatArrow)),
Some(&TokenTree::Delimited(_, ref second_delimed)),
)
if macro_delimed.delim == token::Paren => {
let tts = &first_delimed.stream().trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1)) {
(
2,
Some(&TokenTree::Token(_, token::Dollar)),
Some(&TokenTree::Token(_, token::Ident(ident))),
)
if first_delimed.delim == token::Paren && ident.name == "a" => {},
_ => panic!("value 3: {:?}", *first_delimed),
}
let tts = &second_delimed.stream().trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1)) {
(
2,
Some(&TokenTree::Token(_, token::Dollar)),
Some(&TokenTree::Token(_, token::Ident(ident))),
)
if second_delimed.delim == token::Paren
&& ident.name == "a" => {},
_ => panic!("value 4: {:?}", *second_delimed),
}
},
_ => panic!("value 2: {:?}", *macro_delimed),
}
},
_ => panic!("value: {:?}",tts),
}
})
}
#[test]
fn string_to_tts_1() {
let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
with_globals(|| {
let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
let expected = TokenStream::concat(vec![
TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))).into(),
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))).into(),
TokenTree::Delimited(
sp(5, 14),
tokenstream::Delimited {
delim: token::DelimToken::Paren,
tts: TokenStream::concat(vec![
TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))).into(),
TokenTree::Token(sp(8, 9), token::Colon).into(),
TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))).into(),
]).into(),
}).into(),
TokenTree::Delimited(
sp(15, 21),
tokenstream::Delimited {
delim: token::DelimToken::Brace,
tts: TokenStream::concat(vec![
TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))).into(),
TokenTree::Token(sp(18, 19), token::Semi).into(),
]).into(),
}).into()
]);
let expected = TokenStream::concat(vec![
TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))).into(),
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))).into(),
TokenTree::Delimited(
sp(5, 14),
tokenstream::Delimited {
delim: token::DelimToken::Paren,
tts: TokenStream::concat(vec![
TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))).into(),
TokenTree::Token(sp(8, 9), token::Colon).into(),
TokenTree::Token(sp(10, 13),
token::Ident(Ident::from_str("i32"))).into(),
]).into(),
}).into(),
TokenTree::Delimited(
sp(15, 21),
tokenstream::Delimited {
delim: token::DelimToken::Brace,
tts: TokenStream::concat(vec![
TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))).into(),
TokenTree::Token(sp(18, 19), token::Semi).into(),
]).into(),
}).into()
]);
assert_eq!(tts, expected);
assert_eq!(tts, expected);
})
}
#[test] fn ret_expr() {
assert!(string_to_expr("return d".to_string()) ==
P(ast::Expr{
id: ast::DUMMY_NODE_ID,
node:ast::ExprKind::Ret(Some(P(ast::Expr{
with_globals(|| {
assert!(string_to_expr("return d".to_string()) ==
P(ast::Expr{
id: ast::DUMMY_NODE_ID,
node:ast::ExprKind::Path(None, ast::Path{
span: sp(7, 8),
segments: vec![str2seg("d", 7, 8)],
}),
span:sp(7,8),
node:ast::ExprKind::Ret(Some(P(ast::Expr{
id: ast::DUMMY_NODE_ID,
node:ast::ExprKind::Path(None, ast::Path{
span: sp(7, 8),
segments: vec![str2seg("d", 7, 8)],
}),
span:sp(7,8),
attrs: ThinVec::new(),
}))),
span:sp(0,8),
attrs: ThinVec::new(),
}))),
span:sp(0,8),
attrs: ThinVec::new(),
}))
}))
})
}
#[test] fn parse_stmt_1 () {
assert!(string_to_stmt("b;".to_string()) ==
Some(ast::Stmt {
node: ast::StmtKind::Expr(P(ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Path(None, ast::Path {
span:sp(0,1),
segments: vec![str2seg("b", 0, 1)],
}),
span: sp(0,1),
attrs: ThinVec::new()})),
id: ast::DUMMY_NODE_ID,
span: sp(0,1)}))
with_globals(|| {
assert!(string_to_stmt("b;".to_string()) ==
Some(ast::Stmt {
node: ast::StmtKind::Expr(P(ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Path(None, ast::Path {
span:sp(0,1),
segments: vec![str2seg("b", 0, 1)],
}),
span: sp(0,1),
attrs: ThinVec::new()})),
id: ast::DUMMY_NODE_ID,
span: sp(0,1)}))
})
}
fn parser_done(p: Parser){
@ -848,120 +863,128 @@ mod tests {
}
#[test] fn parse_ident_pat () {
let sess = ParseSess::new(FilePathMapping::empty());
let mut parser = string_to_parser(&sess, "b".to_string());
assert!(panictry!(parser.parse_pat())
== P(ast::Pat{
id: ast::DUMMY_NODE_ID,
node: PatKind::Ident(ast::BindingMode::ByValue(ast::Mutability::Immutable),
Spanned{ span:sp(0, 1),
node: Ident::from_str("b")
},
None),
span: sp(0,1)}));
parser_done(parser);
with_globals(|| {
let sess = ParseSess::new(FilePathMapping::empty());
let mut parser = string_to_parser(&sess, "b".to_string());
assert!(panictry!(parser.parse_pat())
== P(ast::Pat{
id: ast::DUMMY_NODE_ID,
node: PatKind::Ident(ast::BindingMode::ByValue(ast::Mutability::Immutable),
Spanned{ span:sp(0, 1),
node: Ident::from_str("b")
},
None),
span: sp(0,1)}));
parser_done(parser);
})
}
// check the contents of the tt manually:
#[test] fn parse_fundecl () {
// this test depends on the intern order of "fn" and "i32"
let item = string_to_item("fn a (b : i32) { b; }".to_string()).map(|m| {
m.map(|mut m| {
m.tokens = None;
m
})
});
assert_eq!(item,
Some(
P(ast::Item{ident:Ident::from_str("a"),
attrs:Vec::new(),
id: ast::DUMMY_NODE_ID,
tokens: None,
node: ast::ItemKind::Fn(P(ast::FnDecl {
inputs: vec![ast::Arg{
ty: P(ast::Ty{id: ast::DUMMY_NODE_ID,
node: ast::TyKind::Path(None, ast::Path{
span:sp(10,13),
segments: vec![str2seg("i32", 10, 13)],
with_globals(|| {
// this test depends on the intern order of "fn" and "i32"
let item = string_to_item("fn a (b : i32) { b; }".to_string()).map(|m| {
m.map(|mut m| {
m.tokens = None;
m
})
});
assert_eq!(item,
Some(
P(ast::Item{ident:Ident::from_str("a"),
attrs:Vec::new(),
id: ast::DUMMY_NODE_ID,
tokens: None,
node: ast::ItemKind::Fn(P(ast::FnDecl {
inputs: vec![ast::Arg{
ty: P(ast::Ty{id: ast::DUMMY_NODE_ID,
node: ast::TyKind::Path(None, ast::Path{
span:sp(10,13),
segments: vec![str2seg("i32", 10, 13)],
}),
span:sp(10,13)
}),
span:sp(10,13)
}),
pat: P(ast::Pat {
id: ast::DUMMY_NODE_ID,
node: PatKind::Ident(
ast::BindingMode::ByValue(
ast::Mutability::Immutable),
Spanned{
span: sp(6,7),
node: Ident::from_str("b")},
None
),
span: sp(6,7)
}),
id: ast::DUMMY_NODE_ID
}],
output: ast::FunctionRetTy::Default(sp(15, 15)),
variadic: false
}),
ast::Unsafety::Normal,
Spanned {
span: sp(0,2),
node: ast::Constness::NotConst,
},
Abi::Rust,
ast::Generics{
params: Vec::new(),
where_clause: ast::WhereClause {
pat: P(ast::Pat {
id: ast::DUMMY_NODE_ID,
predicates: Vec::new(),
node: PatKind::Ident(
ast::BindingMode::ByValue(
ast::Mutability::Immutable),
Spanned{
span: sp(6,7),
node: Ident::from_str("b")},
None
),
span: sp(6,7)
}),
id: ast::DUMMY_NODE_ID
}],
output: ast::FunctionRetTy::Default(sp(15, 15)),
variadic: false
}),
ast::Unsafety::Normal,
Spanned {
span: sp(0,2),
node: ast::Constness::NotConst,
},
Abi::Rust,
ast::Generics{
params: Vec::new(),
where_clause: ast::WhereClause {
id: ast::DUMMY_NODE_ID,
predicates: Vec::new(),
span: syntax_pos::DUMMY_SP,
},
span: syntax_pos::DUMMY_SP,
},
span: syntax_pos::DUMMY_SP,
},
P(ast::Block {
stmts: vec![ast::Stmt {
node: ast::StmtKind::Semi(P(ast::Expr{
P(ast::Block {
stmts: vec![ast::Stmt {
node: ast::StmtKind::Semi(P(ast::Expr{
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Path(None,
ast::Path{
span:sp(17,18),
segments: vec![str2seg("b", 17, 18)],
}),
span: sp(17,18),
attrs: ThinVec::new()})),
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Path(None,
ast::Path{
span:sp(17,18),
segments: vec![str2seg("b", 17, 18)],
}),
span: sp(17,18),
attrs: ThinVec::new()})),
span: sp(17,19)}],
id: ast::DUMMY_NODE_ID,
span: sp(17,19)}],
id: ast::DUMMY_NODE_ID,
rules: ast::BlockCheckMode::Default, // no idea
span: sp(15,21),
recovered: false,
})),
vis: respan(sp(0, 0), ast::VisibilityKind::Inherited),
span: sp(0,21)})));
rules: ast::BlockCheckMode::Default, // no idea
span: sp(15,21),
recovered: false,
})),
vis: respan(sp(0, 0), ast::VisibilityKind::Inherited),
span: sp(0,21)})));
})
}
#[test] fn parse_use() {
let use_s = "use foo::bar::baz;";
let vitem = string_to_item(use_s.to_string()).unwrap();
let vitem_s = item_to_string(&vitem);
assert_eq!(&vitem_s[..], use_s);
with_globals(|| {
let use_s = "use foo::bar::baz;";
let vitem = string_to_item(use_s.to_string()).unwrap();
let vitem_s = item_to_string(&vitem);
assert_eq!(&vitem_s[..], use_s);
let use_s = "use foo::bar as baz;";
let vitem = string_to_item(use_s.to_string()).unwrap();
let vitem_s = item_to_string(&vitem);
assert_eq!(&vitem_s[..], use_s);
let use_s = "use foo::bar as baz;";
let vitem = string_to_item(use_s.to_string()).unwrap();
let vitem_s = item_to_string(&vitem);
assert_eq!(&vitem_s[..], use_s);
})
}
#[test] fn parse_extern_crate() {
let ex_s = "extern crate foo;";
let vitem = string_to_item(ex_s.to_string()).unwrap();
let vitem_s = item_to_string(&vitem);
assert_eq!(&vitem_s[..], ex_s);
with_globals(|| {
let ex_s = "extern crate foo;";
let vitem = string_to_item(ex_s.to_string()).unwrap();
let vitem_s = item_to_string(&vitem);
assert_eq!(&vitem_s[..], ex_s);
let ex_s = "extern crate foo as bar;";
let vitem = string_to_item(ex_s.to_string()).unwrap();
let vitem_s = item_to_string(&vitem);
assert_eq!(&vitem_s[..], ex_s);
let ex_s = "extern crate foo as bar;";
let vitem = string_to_item(ex_s.to_string()).unwrap();
let vitem_s = item_to_string(&vitem);
assert_eq!(&vitem_s[..], ex_s);
})
}
fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
@ -988,31 +1011,36 @@ mod tests {
}
#[test] fn span_of_self_arg_pat_idents_are_correct() {
with_globals(|| {
let srcs = ["impl z { fn a (&self, &myarg: i32) {} }",
"impl z { fn a (&mut self, &myarg: i32) {} }",
"impl z { fn a (&'a self, &myarg: i32) {} }",
"impl z { fn a (self, &myarg: i32) {} }",
"impl z { fn a (self: Foo, &myarg: i32) {} }",
];
let srcs = ["impl z { fn a (&self, &myarg: i32) {} }",
"impl z { fn a (&mut self, &myarg: i32) {} }",
"impl z { fn a (&'a self, &myarg: i32) {} }",
"impl z { fn a (self, &myarg: i32) {} }",
"impl z { fn a (self: Foo, &myarg: i32) {} }",
];
for &src in &srcs {
let spans = get_spans_of_pat_idents(src);
let (lo, hi) = (spans[0].lo(), spans[0].hi());
assert!("self" == &src[lo.to_usize()..hi.to_usize()],
"\"{}\" != \"self\". src=\"{}\"",
&src[lo.to_usize()..hi.to_usize()], src)
}
for &src in &srcs {
let spans = get_spans_of_pat_idents(src);
let (lo, hi) = (spans[0].lo(), spans[0].hi());
assert!("self" == &src[lo.to_usize()..hi.to_usize()],
"\"{}\" != \"self\". src=\"{}\"",
&src[lo.to_usize()..hi.to_usize()], src)
}
})
}
#[test] fn parse_exprs () {
// just make sure that they parse....
string_to_expr("3 + 4".to_string());
string_to_expr("a::z.froob(b,&(987+3))".to_string());
with_globals(|| {
// just make sure that they parse....
string_to_expr("3 + 4".to_string());
string_to_expr("a::z.froob(b,&(987+3))".to_string());
})
}
#[test] fn attrs_fix_bug () {
string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
with_globals(|| {
string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
-> Result<Box<Writer>, String> {
#[cfg(windows)]
fn wb() -> c_int {
@ -1024,49 +1052,54 @@ mod tests {
let mut fflags: c_int = wb();
}".to_string());
})
}
#[test] fn crlf_doc_comments() {
let sess = ParseSess::new(FilePathMapping::empty());
with_globals(|| {
let sess = ParseSess::new(FilePathMapping::empty());
let name = FileName::Custom("source".to_string());
let source = "/// doc comment\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name.clone(), source, &sess)
.unwrap().unwrap();
let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
assert_eq!(doc, "/// doc comment");
let name = FileName::Custom("source".to_string());
let source = "/// doc comment\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name.clone(), source, &sess)
.unwrap().unwrap();
let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
assert_eq!(doc, "/// doc comment");
let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name.clone(), source, &sess)
.unwrap().unwrap();
let docs = item.attrs.iter().filter(|a| a.path == "doc")
.map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>();
let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
assert_eq!(&docs[..], b);
let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name.clone(), source, &sess)
.unwrap().unwrap();
let docs = item.attrs.iter().filter(|a| a.path == "doc")
.map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>();
let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
assert_eq!(&docs[..], b);
let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name, source, &sess).unwrap().unwrap();
let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
assert_eq!(doc, "/** doc comment\n * with CRLF */");
let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name, source, &sess).unwrap().unwrap();
let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
assert_eq!(doc, "/** doc comment\n * with CRLF */");
});
}
#[test]
fn ttdelim_span() {
let sess = ParseSess::new(FilePathMapping::empty());
let expr = parse::parse_expr_from_source_str(PathBuf::from("foo").into(),
"foo!( fn main() { body } )".to_string(), &sess).unwrap();
with_globals(|| {
let sess = ParseSess::new(FilePathMapping::empty());
let expr = parse::parse_expr_from_source_str(PathBuf::from("foo").into(),
"foo!( fn main() { body } )".to_string(), &sess).unwrap();
let tts: Vec<_> = match expr.node {
ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(),
_ => panic!("not a macro"),
};
let tts: Vec<_> = match expr.node {
ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(),
_ => panic!("not a macro"),
};
let span = tts.iter().rev().next().unwrap().span();
let span = tts.iter().rev().next().unwrap().span();
match sess.codemap().span_to_snippet(span) {
Ok(s) => assert_eq!(&s[..], "{ body }"),
Err(_) => panic!("could not get snippet"),
}
match sess.codemap().span_to_snippet(span) {
Ok(s) => assert_eq!(&s[..], "{ body }"),
Err(_) => panic!("could not get snippet"),
}
});
}
// This tests that when parsing a string (rather than a file) we don't try
@ -1074,17 +1107,19 @@ mod tests {
// See `recurse_into_file_modules` in the parser.
#[test]
fn out_of_line_mod() {
let sess = ParseSess::new(FilePathMapping::empty());
let item = parse_item_from_source_str(
PathBuf::from("foo").into(),
"mod foo { struct S; mod this_does_not_exist; }".to_owned(),
&sess,
).unwrap().unwrap();
with_globals(|| {
let sess = ParseSess::new(FilePathMapping::empty());
let item = parse_item_from_source_str(
PathBuf::from("foo").into(),
"mod foo { struct S; mod this_does_not_exist; }".to_owned(),
&sess,
).unwrap().unwrap();
if let ast::ItemKind::Mod(ref m) = item.node {
assert!(m.items.len() == 2);
} else {
panic!();
}
if let ast::ItemKind::Mod(ref m) = item.node {
assert!(m.items.len() == 2);
} else {
panic!();
}
});
}
}

View File

@ -3178,36 +3178,41 @@ mod tests {
use ast;
use codemap;
use syntax_pos;
use with_globals;
#[test]
fn test_fun_to_string() {
let abba_ident = ast::Ident::from_str("abba");
with_globals(|| {
let abba_ident = ast::Ident::from_str("abba");
let decl = ast::FnDecl {
inputs: Vec::new(),
output: ast::FunctionRetTy::Default(syntax_pos::DUMMY_SP),
variadic: false
};
let generics = ast::Generics::default();
assert_eq!(fun_to_string(&decl, ast::Unsafety::Normal,
ast::Constness::NotConst,
abba_ident, &generics),
"fn abba()");
let decl = ast::FnDecl {
inputs: Vec::new(),
output: ast::FunctionRetTy::Default(syntax_pos::DUMMY_SP),
variadic: false
};
let generics = ast::Generics::default();
assert_eq!(fun_to_string(&decl, ast::Unsafety::Normal,
ast::Constness::NotConst,
abba_ident, &generics),
"fn abba()");
})
}
#[test]
fn test_variant_to_string() {
let ident = ast::Ident::from_str("principal_skinner");
with_globals(|| {
let ident = ast::Ident::from_str("principal_skinner");
let var = codemap::respan(syntax_pos::DUMMY_SP, ast::Variant_ {
name: ident,
attrs: Vec::new(),
// making this up as I go.... ?
data: ast::VariantData::Unit(ast::DUMMY_NODE_ID),
disr_expr: None,
});
let var = codemap::respan(syntax_pos::DUMMY_SP, ast::Variant_ {
name: ident,
attrs: Vec::new(),
// making this up as I go.... ?
data: ast::VariantData::Unit(ast::DUMMY_NODE_ID),
disr_expr: None,
});
let varstr = variant_to_string(&var);
assert_eq!(varstr, "principal_skinner");
let varstr = variant_to_string(&var);
assert_eq!(varstr, "principal_skinner");
})
}
}

View File

@ -18,6 +18,7 @@ use std::str;
use std::sync::{Arc, Mutex};
use std::path::Path;
use syntax_pos::{BytePos, NO_EXPANSION, Span, MultiSpan};
use with_globals;
/// Identify a position in the text by the Nth occurrence of a string.
struct Position {
@ -46,37 +47,39 @@ impl<T: Write> Write for Shared<T> {
}
fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &str) {
let output = Arc::new(Mutex::new(Vec::new()));
with_globals(|| {
let output = Arc::new(Mutex::new(Vec::new()));
let code_map = Lrc::new(CodeMap::new(FilePathMapping::empty()));
code_map.new_filemap_and_lines(Path::new("test.rs"), &file_text);
let code_map = Lrc::new(CodeMap::new(FilePathMapping::empty()));
code_map.new_filemap_and_lines(Path::new("test.rs"), &file_text);
let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end);
let mut msp = MultiSpan::from_span(primary_span);
for span_label in span_labels {
let span = make_span(&file_text, &span_label.start, &span_label.end);
msp.push_span_label(span, span_label.label.to_string());
println!("span: {:?} label: {:?}", span, span_label.label);
println!("text: {:?}", code_map.span_to_snippet(span));
}
let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end);
let mut msp = MultiSpan::from_span(primary_span);
for span_label in span_labels {
let span = make_span(&file_text, &span_label.start, &span_label.end);
msp.push_span_label(span, span_label.label.to_string());
println!("span: {:?} label: {:?}", span, span_label.label);
println!("text: {:?}", code_map.span_to_snippet(span));
}
let emitter = EmitterWriter::new(Box::new(Shared { data: output.clone() }),
Some(code_map.clone()),
false,
false);
let handler = Handler::with_emitter(true, false, Box::new(emitter));
handler.span_err(msp, "foo");
let emitter = EmitterWriter::new(Box::new(Shared { data: output.clone() }),
Some(code_map.clone()),
false,
false);
let handler = Handler::with_emitter(true, false, Box::new(emitter));
handler.span_err(msp, "foo");
assert!(expected_output.chars().next() == Some('\n'),
"expected output should begin with newline");
let expected_output = &expected_output[1..];
assert!(expected_output.chars().next() == Some('\n'),
"expected output should begin with newline");
let expected_output = &expected_output[1..];
let bytes = output.lock().unwrap();
let actual_output = str::from_utf8(&bytes).unwrap();
println!("expected output:\n------\n{}------", expected_output);
println!("actual output:\n------\n{}------", actual_output);
let bytes = output.lock().unwrap();
let actual_output = str::from_utf8(&bytes).unwrap();
println!("expected output:\n------\n{}------", expected_output);
println!("actual output:\n------\n{}------", actual_output);
assert!(expected_output == actual_output)
assert!(expected_output == actual_output)
})
}
fn make_span(file_text: &str, start: &Position, end: &Position) -> Span {

View File

@ -599,6 +599,7 @@ impl Hash for ThinTokenStream {
mod tests {
use super::*;
use syntax::ast::Ident;
use with_globals;
use syntax_pos::{Span, BytePos, NO_EXPANSION};
use parse::token::Token;
use util::parser_testing::string_to_stream;
@ -613,67 +614,83 @@ mod tests {
#[test]
fn test_concat() {
let test_res = string_to_ts("foo::bar::baz");
let test_fst = string_to_ts("foo::bar");
let test_snd = string_to_ts("::baz");
let eq_res = TokenStream::concat(vec![test_fst, test_snd]);
assert_eq!(test_res.trees().count(), 5);
assert_eq!(eq_res.trees().count(), 5);
assert_eq!(test_res.eq_unspanned(&eq_res), true);
with_globals(|| {
let test_res = string_to_ts("foo::bar::baz");
let test_fst = string_to_ts("foo::bar");
let test_snd = string_to_ts("::baz");
let eq_res = TokenStream::concat(vec![test_fst, test_snd]);
assert_eq!(test_res.trees().count(), 5);
assert_eq!(eq_res.trees().count(), 5);
assert_eq!(test_res.eq_unspanned(&eq_res), true);
})
}
#[test]
fn test_to_from_bijection() {
let test_start = string_to_ts("foo::bar(baz)");
let test_end = test_start.trees().collect();
assert_eq!(test_start, test_end)
with_globals(|| {
let test_start = string_to_ts("foo::bar(baz)");
let test_end = test_start.trees().collect();
assert_eq!(test_start, test_end)
})
}
#[test]
fn test_eq_0() {
let test_res = string_to_ts("foo");
let test_eqs = string_to_ts("foo");
assert_eq!(test_res, test_eqs)
with_globals(|| {
let test_res = string_to_ts("foo");
let test_eqs = string_to_ts("foo");
assert_eq!(test_res, test_eqs)
})
}
#[test]
fn test_eq_1() {
let test_res = string_to_ts("::bar::baz");
let test_eqs = string_to_ts("::bar::baz");
assert_eq!(test_res, test_eqs)
with_globals(|| {
let test_res = string_to_ts("::bar::baz");
let test_eqs = string_to_ts("::bar::baz");
assert_eq!(test_res, test_eqs)
})
}
#[test]
fn test_eq_3() {
let test_res = string_to_ts("");
let test_eqs = string_to_ts("");
assert_eq!(test_res, test_eqs)
with_globals(|| {
let test_res = string_to_ts("");
let test_eqs = string_to_ts("");
assert_eq!(test_res, test_eqs)
})
}
#[test]
fn test_diseq_0() {
let test_res = string_to_ts("::bar::baz");
let test_eqs = string_to_ts("bar::baz");
assert_eq!(test_res == test_eqs, false)
with_globals(|| {
let test_res = string_to_ts("::bar::baz");
let test_eqs = string_to_ts("bar::baz");
assert_eq!(test_res == test_eqs, false)
})
}
#[test]
fn test_diseq_1() {
let test_res = string_to_ts("(bar,baz)");
let test_eqs = string_to_ts("bar,baz");
assert_eq!(test_res == test_eqs, false)
with_globals(|| {
let test_res = string_to_ts("(bar,baz)");
let test_eqs = string_to_ts("bar,baz");
assert_eq!(test_res == test_eqs, false)
})
}
#[test]
fn test_is_empty() {
let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
let test1: TokenStream =
TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"))).into();
let test2 = string_to_ts("foo(bar::baz)");
with_globals(|| {
let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
let test1: TokenStream =
TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"))).into();
let test2 = string_to_ts("foo(bar::baz)");
assert_eq!(test0.is_empty(), true);
assert_eq!(test1.is_empty(), false);
assert_eq!(test2.is_empty(), false);
assert_eq!(test0.is_empty(), true);
assert_eq!(test1.is_empty(), false);
assert_eq!(test2.is_empty(), false);
})
}
#[test]

View File

@ -11,4 +11,5 @@ crate-type = ["dylib"]
[dependencies]
serialize = { path = "../libserialize" }
rustc_data_structures = { path = "../librustc_data_structures" }
scoped-tls = { version = "0.1.1", features = ["nightly"] }
unicode-width = "0.1.4"

View File

@ -15,11 +15,11 @@
//! and definition contexts*. J. Funct. Program. 22, 2 (March 2012), 181-216.
//! DOI=10.1017/S0956796812000093 <http://dx.doi.org/10.1017/S0956796812000093>
use GLOBALS;
use Span;
use symbol::{Ident, Symbol};
use serialize::{Encodable, Decodable, Encoder, Decoder};
use std::cell::RefCell;
use std::collections::HashMap;
use std::fmt;
@ -119,7 +119,7 @@ impl Mark {
}
}
struct HygieneData {
pub struct HygieneData {
marks: Vec<MarkData>,
syntax_contexts: Vec<SyntaxContextData>,
markings: HashMap<(SyntaxContext, Mark), SyntaxContext>,
@ -127,7 +127,7 @@ struct HygieneData {
}
impl HygieneData {
fn new() -> Self {
pub fn new() -> Self {
HygieneData {
marks: vec![MarkData {
parent: Mark::root(),
@ -145,10 +145,7 @@ impl HygieneData {
}
fn with<T, F: FnOnce(&mut HygieneData) -> T>(f: F) -> T {
thread_local! {
static HYGIENE_DATA: RefCell<HygieneData> = RefCell::new(HygieneData::new());
}
HYGIENE_DATA.with(|data| f(&mut *data.borrow_mut()))
GLOBALS.with(|globals| f(&mut *globals.hygiene_data.borrow_mut()))
}
}

View File

@ -35,10 +35,13 @@ use std::ops::{Add, Sub};
use std::path::PathBuf;
use rustc_data_structures::stable_hasher::StableHasher;
use rustc_data_structures::sync::Lrc;
use rustc_data_structures::sync::{Lrc, Lock};
extern crate rustc_data_structures;
#[macro_use]
extern crate scoped_tls;
use serialize::{Encodable, Decodable, Encoder, Decoder};
extern crate serialize;
@ -54,6 +57,24 @@ pub use span_encoding::{Span, DUMMY_SP};
pub mod symbol;
pub struct Globals {
symbol_interner: Lock<symbol::Interner>,
span_interner: Lock<span_encoding::SpanInterner>,
hygiene_data: Lock<hygiene::HygieneData>,
}
impl Globals {
pub fn new() -> Globals {
Globals {
symbol_interner: Lock::new(symbol::Interner::fresh()),
span_interner: Lock::new(span_encoding::SpanInterner::default()),
hygiene_data: Lock::new(hygiene::HygieneData::new()),
}
}
}
scoped_thread_local!(pub static GLOBALS: Globals);
/// Differentiates between real files and common virtual files
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash, RustcDecodable, RustcEncodable)]
pub enum FileName {

View File

@ -14,11 +14,11 @@
// The encoding format for inline spans were obtained by optimizing over crates in rustc/libstd.
// See https://internals.rust-lang.org/t/rfc-compiler-refactoring-spans/1357/28
use GLOBALS;
use {BytePos, SpanData};
use hygiene::SyntaxContext;
use rustc_data_structures::fx::FxHashMap;
use std::cell::RefCell;
use std::hash::{Hash, Hasher};
/// A compressed span.
@ -133,7 +133,7 @@ fn decode(span: Span) -> SpanData {
}
#[derive(Default)]
struct SpanInterner {
pub struct SpanInterner {
spans: FxHashMap<SpanData, u32>,
span_data: Vec<SpanData>,
}
@ -156,11 +156,8 @@ impl SpanInterner {
}
}
// If an interner exists in TLS, return it. Otherwise, prepare a fresh one.
// If an interner exists, return it. Otherwise, prepare a fresh one.
#[inline]
fn with_span_interner<T, F: FnOnce(&mut SpanInterner) -> T>(f: F) -> T {
thread_local!(static INTERNER: RefCell<SpanInterner> = {
RefCell::new(SpanInterner::default())
});
INTERNER.with(|interner| f(&mut *interner.borrow_mut()))
GLOBALS.with(|globals| f(&mut *globals.span_interner.lock()))
}

View File

@ -13,9 +13,9 @@
//! type, and vice versa.
use hygiene::SyntaxContext;
use GLOBALS;
use serialize::{Decodable, Decoder, Encodable, Encoder};
use std::cell::RefCell;
use std::collections::HashMap;
use std::fmt;
@ -247,7 +247,7 @@ macro_rules! declare_keywords {(
}
impl Interner {
fn fresh() -> Self {
pub fn fresh() -> Self {
Interner::prefill(&[$($string,)*])
}
}
@ -330,12 +330,10 @@ declare_keywords! {
(60, Union, "union")
}
// If an interner exists in TLS, return it. Otherwise, prepare a fresh one.
// If an interner exists, return it. Otherwise, prepare a fresh one.
#[inline]
fn with_interner<T, F: FnOnce(&mut Interner) -> T>(f: F) -> T {
thread_local!(static INTERNER: RefCell<Interner> = {
RefCell::new(Interner::fresh())
});
INTERNER.with(|interner| f(&mut *interner.borrow_mut()))
GLOBALS.with(|globals| f(&mut *globals.symbol_interner.lock()))
}
/// Represents a string stored in the thread-local interner. Because the
@ -422,6 +420,7 @@ impl Encodable for InternedString {
#[cfg(test)]
mod tests {
use super::*;
use Globals;
#[test]
fn interner_tests() {
@ -444,7 +443,9 @@ mod tests {
#[test]
fn without_first_quote_test() {
let i = Ident::from_str("'break");
assert_eq!(i.without_first_quote().name, keywords::Break.name());
GLOBALS.set(&Globals::new(), || {
let i = Ident::from_str("'break");
assert_eq!(i.without_first_quote().name, keywords::Break.name());
});
}
}

View File

@ -24,6 +24,10 @@ use syntax::symbol::Symbol;
use syntax_pos::DUMMY_SP;
fn main() {
syntax::with_globals(|| run());
}
fn run() {
let ps = syntax::parse::ParseSess::new(codemap::FilePathMapping::empty());
let mut resolver = syntax::ext::base::DummyResolver;
let mut cx = syntax::ext::base::ExtCtxt::new(

View File

@ -69,18 +69,20 @@ fn basic_sess(sysroot: PathBuf) -> (Session, Rc<CStore>, Box<TransCrate>) {
}
fn compile(code: String, output: PathBuf, sysroot: PathBuf) {
let (sess, cstore, trans) = basic_sess(sysroot);
let control = CompileController::basic();
let input = Input::Str { name: FileName::Anon, input: code };
let _ = compile_input(
trans,
&sess,
&cstore,
&None,
&input,
&None,
&Some(output),
None,
&control
);
syntax::with_globals(|| {
let (sess, cstore, trans) = basic_sess(sysroot);
let control = CompileController::basic();
let input = Input::Str { name: FileName::Anon, input: code };
let _ = compile_input(
trans,
&sess,
&cstore,
&None,
&input,
&None,
&Some(output),
None,
&control
);
});
}

View File

@ -115,6 +115,10 @@ fn reject_stmt_parse(es: &str) {
}
fn main() {
syntax::with_globals(|| run());
}
fn run() {
let both = &["#[attr]", "#![attr]"];
let outer = &["#[attr]"];
let none = &[];

View File

@ -27,6 +27,10 @@ use syntax::ptr::P;
use rustc_data_structures::sync::Lrc;
fn main() {
syntax::with_globals(|| run());
}
fn run() {
let parse_sess = ParseSess::new(FilePathMapping::empty());
let exp_cfg = ExpansionConfig::default("issue_35829".to_owned());
let mut resolver = DummyResolver;

View File

@ -221,8 +221,11 @@ impl Folder for AddParens {
}
}
fn main() {
syntax::with_globals(|| run());
}
fn run() {
let ps = ParseSess::new(FilePathMapping::empty());
iter_exprs(2, &mut |e| {

View File

@ -21,6 +21,10 @@ use syntax::symbol::Symbol;
use syntax_pos::DUMMY_SP;
fn main() {
syntax::with_globals(|| run());
}
fn run() {
let ps = syntax::parse::ParseSess::new(FilePathMapping::empty());
let mut resolver = syntax::ext::base::DummyResolver;
let mut cx = syntax::ext::base::ExtCtxt::new(

View File

@ -263,7 +263,10 @@ fn main() {
*slot.borrow_mut() = Some((None, String::from("https://play.rust-lang.org/")));
});
let (format, dst) = parse_args();
if let Err(e) = main_with_result(format, &dst) {
let result = syntax::with_globals(move || {
main_with_result(format, &dst)
});
if let Err(e) = result {
panic!("{}", e.description());
}
}

View File

@ -93,6 +93,7 @@ static WHITELIST: &'static [Crate] = &[
Crate("regex-syntax"),
Crate("remove_dir_all"),
Crate("rustc-demangle"),
Crate("scoped-tls"),
Crate("smallvec"),
Crate("stable_deref_trait"),
Crate("tempdir"),