Rustfmting librustc_driver.

This commit is contained in:
Jose Narvaez 2015-11-10 20:48:44 +00:00
parent ea422eb4de
commit 4e64645228
5 changed files with 772 additions and 718 deletions

View File

@ -80,27 +80,17 @@ pub fn compile_input(sess: Session,
controller_entry_point!(after_parse,
sess,
CompileState::state_after_parse(input,
&sess,
outdir,
&krate));
CompileState::state_after_parse(input, &sess, outdir, &krate));
let outputs = build_output_filenames(input,
outdir,
output,
&krate.attrs,
&sess);
let id = link::find_crate_name(Some(&sess),
&krate.attrs,
input);
let expanded_crate
= match phase_2_configure_and_expand(&sess,
krate,
&id[..],
addl_plugins) {
None => return,
Some(k) => k
};
let outputs = build_output_filenames(input, outdir, output, &krate.attrs, &sess);
let id = link::find_crate_name(Some(&sess), &krate.attrs, input);
let expanded_crate = match phase_2_configure_and_expand(&sess,
krate,
&id[..],
addl_plugins) {
None => return,
Some(k) => k,
};
(outputs, expanded_crate, id)
};
@ -139,9 +129,9 @@ pub fn compile_input(sess: Session,
front::check_attr::check_crate(&sess, &expanded_crate);
});
time(sess.time_passes(), "early lint checks", || {
lint::check_ast_crate(&sess, &expanded_crate)
});
time(sess.time_passes(),
"early lint checks",
|| lint::check_ast_crate(&sess, &expanded_crate));
phase_3_run_analysis_passes(&sess,
ast_map,
@ -150,40 +140,43 @@ pub fn compile_input(sess: Session,
control.make_glob_map,
|tcx, mir_map, analysis| {
{
let state = CompileState::state_after_analysis(input,
&tcx.sess,
outdir,
&expanded_crate,
tcx.map.krate(),
&analysis,
tcx,
&lcx,
&id);
(control.after_analysis.callback)(state);
{
let state =
CompileState::state_after_analysis(input,
&tcx.sess,
outdir,
&expanded_crate,
tcx.map.krate(),
&analysis,
tcx,
&lcx,
&id);
(control.after_analysis.callback)(state);
tcx.sess.abort_if_errors();
if control.after_analysis.stop == Compilation::Stop {
return Err(());
}
}
tcx.sess.abort_if_errors();
if control.after_analysis.stop == Compilation::Stop {
return Err(());
}
}
if log_enabled!(::log::INFO) {
println!("Pre-trans");
tcx.print_debug_stats();
}
let trans = phase_4_translate_to_llvm(tcx, &mir_map, analysis);
if log_enabled!(::log::INFO) {
println!("Pre-trans");
tcx.print_debug_stats();
}
let trans = phase_4_translate_to_llvm(tcx,
&mir_map,
analysis);
if log_enabled!(::log::INFO) {
println!("Post-trans");
tcx.print_debug_stats();
}
if log_enabled!(::log::INFO) {
println!("Post-trans");
tcx.print_debug_stats();
}
// Discard interned strings as they are no longer required.
token::get_ident_interner().clear();
// Discard interned strings as they are no longer required.
token::get_ident_interner().clear();
Ok((outputs, trans))
})
Ok((outputs, trans))
})
};
let (outputs, trans) = if let Ok(out) = result {
@ -196,10 +189,7 @@ pub fn compile_input(sess: Session,
controller_entry_point!(after_llvm,
sess,
CompileState::state_after_llvm(input,
&sess,
outdir,
&trans));
CompileState::state_after_llvm(input, &sess, outdir, &trans));
phase_6_link_output(&sess, &trans, &outputs);
}
@ -214,7 +204,7 @@ pub fn source_name(input: &Input) -> String {
match *input {
// FIXME (#9639): This needs to handle non-utf8 paths
Input::File(ref ifile) => ifile.to_str().unwrap().to_string(),
Input::Str(_) => anon_src()
Input::Str(_) => anon_src(),
}
}
@ -247,7 +237,7 @@ impl<'a> CompileController<'a> {
CompileController {
after_parse: PhaseController::basic(),
after_expand: PhaseController::basic(),
after_write_deps: PhaseController::basic(),
after_write_deps: PhaseController::basic(),
after_analysis: PhaseController::basic(),
after_llvm: PhaseController::basic(),
make_glob_map: resolve::MakeGlobMap::No,
@ -317,10 +307,7 @@ impl<'a, 'ast, 'tcx> CompileState<'a, 'ast, 'tcx> {
out_dir: &'a Option<PathBuf>,
krate: &'a ast::Crate)
-> CompileState<'a, 'ast, 'tcx> {
CompileState {
krate: Some(krate),
.. CompileState::empty(input, session, out_dir)
}
CompileState { krate: Some(krate), ..CompileState::empty(input, session, out_dir) }
}
fn state_after_expand(input: &'a Input,
@ -332,7 +319,7 @@ impl<'a, 'ast, 'tcx> CompileState<'a, 'ast, 'tcx> {
CompileState {
crate_name: Some(crate_name),
expanded_crate: Some(expanded_crate),
.. CompileState::empty(input, session, out_dir)
..CompileState::empty(input, session, out_dir)
}
}
@ -351,7 +338,7 @@ impl<'a, 'ast, 'tcx> CompileState<'a, 'ast, 'tcx> {
krate: Some(krate),
hir_crate: Some(hir_crate),
lcx: Some(lcx),
.. CompileState::empty(input, session, out_dir)
..CompileState::empty(input, session, out_dir)
}
}
@ -372,7 +359,7 @@ impl<'a, 'ast, 'tcx> CompileState<'a, 'ast, 'tcx> {
hir_crate: Some(hir_crate),
lcx: Some(lcx),
crate_name: Some(crate_name),
.. CompileState::empty(input, session, out_dir)
..CompileState::empty(input, session, out_dir)
}
}
@ -382,15 +369,11 @@ impl<'a, 'ast, 'tcx> CompileState<'a, 'ast, 'tcx> {
out_dir: &'a Option<PathBuf>,
trans: &'a trans::CrateTranslation)
-> CompileState<'a, 'ast, 'tcx> {
CompileState {
trans: Some(trans),
.. CompileState::empty(input, session, out_dir)
}
CompileState { trans: Some(trans), ..CompileState::empty(input, session, out_dir) }
}
}
pub fn phase_1_parse_input(sess: &Session, cfg: ast::CrateConfig, input: &Input)
-> ast::Crate {
pub fn phase_1_parse_input(sess: &Session, cfg: ast::CrateConfig, input: &Input) -> ast::Crate {
// These may be left in an incoherent state after a previous compile.
// `clear_tables` and `get_ident_interner().clear()` can be used to free
// memory, but they do not restore the initial state.
@ -448,24 +431,21 @@ pub fn phase_2_configure_and_expand(sess: &Session,
// baz! should not use this definition unless foo is enabled.
let mut feature_gated_cfgs = vec![];
krate = time(time_passes, "configuration 1", ||
syntax::config::strip_unconfigured_items(sess.diagnostic(), krate,
&mut feature_gated_cfgs));
krate = time(time_passes, "configuration 1", || {
syntax::config::strip_unconfigured_items(sess.diagnostic(), krate, &mut feature_gated_cfgs)
});
*sess.crate_types.borrow_mut() =
collect_crate_types(sess, &krate.attrs);
*sess.crate_metadata.borrow_mut() =
collect_crate_metadata(sess, &krate.attrs);
*sess.crate_types.borrow_mut() = collect_crate_types(sess, &krate.attrs);
*sess.crate_metadata.borrow_mut() = collect_crate_metadata(sess, &krate.attrs);
time(time_passes, "recursion limit", || {
middle::recursion_limit::update_recursion_limit(sess, &krate);
});
time(time_passes, "gated macro checking", || {
let features =
syntax::feature_gate::check_crate_macros(sess.codemap(),
&sess.parse_sess.span_diagnostic,
&krate);
let features = syntax::feature_gate::check_crate_macros(sess.codemap(),
&sess.parse_sess.span_diagnostic,
&krate);
// these need to be set "early" so that expansion sees `quote` if enabled.
*sess.features.borrow_mut() = features;
@ -473,27 +453,29 @@ pub fn phase_2_configure_and_expand(sess: &Session,
});
krate = time(time_passes, "crate injection", ||
syntax::std_inject::maybe_inject_crates_ref(krate,
sess.opts.alt_std_name.clone()));
krate = time(time_passes, "crate injection", || {
syntax::std_inject::maybe_inject_crates_ref(krate, sess.opts.alt_std_name.clone())
});
let macros = time(time_passes, "macro loading", ||
metadata::macro_import::read_macro_defs(sess, &krate));
let macros = time(time_passes,
"macro loading",
|| metadata::macro_import::read_macro_defs(sess, &krate));
let mut addl_plugins = Some(addl_plugins);
let registrars = time(time_passes, "plugin loading", ||
plugin::load::load_plugins(sess, &krate, addl_plugins.take().unwrap()));
let registrars = time(time_passes, "plugin loading", || {
plugin::load::load_plugins(sess, &krate, addl_plugins.take().unwrap())
});
let mut registry = Registry::new(sess, &krate);
time(time_passes, "plugin registration", || {
if sess.features.borrow().rustc_diagnostic_macros {
registry.register_macro("__diagnostic_used",
diagnostics::plugin::expand_diagnostic_used);
diagnostics::plugin::expand_diagnostic_used);
registry.register_macro("__register_diagnostic",
diagnostics::plugin::expand_register_diagnostic);
diagnostics::plugin::expand_register_diagnostic);
registry.register_macro("__build_diagnostic_array",
diagnostics::plugin::expand_build_diagnostic_array);
diagnostics::plugin::expand_build_diagnostic_array);
}
for registrar in registrars {
@ -554,11 +536,11 @@ pub fn phase_2_configure_and_expand(sess: &Session,
trace_mac: sess.opts.debugging_opts.trace_macros,
};
let ret = syntax::ext::expand::expand_crate(&sess.parse_sess,
cfg,
macros,
syntax_exts,
&mut feature_gated_cfgs,
krate);
cfg,
macros,
syntax_exts,
&mut feature_gated_cfgs,
krate);
if cfg!(windows) {
env::set_var("PATH", &_old_path);
}
@ -570,11 +552,11 @@ pub fn phase_2_configure_and_expand(sess: &Session,
// much as possible (e.g. help the programmer avoid platform
// specific differences)
time(time_passes, "complete gated feature checking 1", || {
let features =
syntax::feature_gate::check_crate(sess.codemap(),
&sess.parse_sess.span_diagnostic,
&krate, &attributes,
sess.opts.unstable_features);
let features = syntax::feature_gate::check_crate(sess.codemap(),
&sess.parse_sess.span_diagnostic,
&krate,
&attributes,
sess.opts.unstable_features);
*sess.features.borrow_mut() = features;
sess.abort_if_errors();
});
@ -582,9 +564,9 @@ pub fn phase_2_configure_and_expand(sess: &Session,
// JBC: make CFG processing part of expansion to avoid this problem:
// strip again, in case expansion added anything with a #[cfg].
krate = time(time_passes, "configuration 2", ||
syntax::config::strip_unconfigured_items(sess.diagnostic(), krate,
&mut feature_gated_cfgs));
krate = time(time_passes, "configuration 2", || {
syntax::config::strip_unconfigured_items(sess.diagnostic(), krate, &mut feature_gated_cfgs)
});
time(time_passes, "gated configuration checking", || {
let features = sess.features.borrow();
@ -595,30 +577,31 @@ pub fn phase_2_configure_and_expand(sess: &Session,
}
});
krate = time(time_passes, "maybe building test harness", ||
syntax::test::modify_for_testing(&sess.parse_sess,
&sess.opts.cfg,
krate,
sess.diagnostic()));
krate = time(time_passes, "maybe building test harness", || {
syntax::test::modify_for_testing(&sess.parse_sess, &sess.opts.cfg, krate, sess.diagnostic())
});
krate = time(time_passes, "prelude injection", ||
syntax::std_inject::maybe_inject_prelude(&sess.parse_sess, krate));
krate = time(time_passes,
"prelude injection",
|| syntax::std_inject::maybe_inject_prelude(&sess.parse_sess, krate));
time(time_passes, "checking that all macro invocations are gone", ||
syntax::ext::expand::check_for_macros(&sess.parse_sess, &krate));
time(time_passes,
"checking that all macro invocations are gone",
|| syntax::ext::expand::check_for_macros(&sess.parse_sess, &krate));
time(time_passes, "checking for inline asm in case the target doesn't support it", ||
middle::check_no_asm::check_crate(sess, &krate));
time(time_passes,
"checking for inline asm in case the target doesn't support it",
|| middle::check_no_asm::check_crate(sess, &krate));
// One final feature gating of the true AST that gets compiled
// later, to make sure we've got everything (e.g. configuration
// can insert new attributes via `cfg_attr`)
time(time_passes, "complete gated feature checking 2", || {
let features =
syntax::feature_gate::check_crate(sess.codemap(),
&sess.parse_sess.span_diagnostic,
&krate, &attributes,
sess.opts.unstable_features);
let features = syntax::feature_gate::check_crate(sess.codemap(),
&sess.parse_sess.span_diagnostic,
&krate,
&attributes,
sess.opts.unstable_features);
*sess.features.borrow_mut() = features;
sess.abort_if_errors();
});
@ -626,10 +609,9 @@ pub fn phase_2_configure_and_expand(sess: &Session,
Some(krate)
}
pub fn assign_node_ids(sess: &Session,
krate: ast::Crate) -> ast::Crate {
pub fn assign_node_ids(sess: &Session, krate: ast::Crate) -> ast::Crate {
struct NodeIdAssigner<'a> {
sess: &'a Session
sess: &'a Session,
}
impl<'a> Folder for NodeIdAssigner<'a> {
@ -671,18 +653,18 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
make_glob_map: resolve::MakeGlobMap,
f: F)
-> R
where F: for<'a> FnOnce(&'a ty::ctxt<'tcx>,
MirMap<'tcx>,
ty::CrateAnalysis) -> R
where F: for<'a> FnOnce(&'a ty::ctxt<'tcx>, MirMap<'tcx>, ty::CrateAnalysis) -> R
{
let time_passes = sess.time_passes();
let krate = ast_map.krate();
time(time_passes, "external crate/lib resolution", ||
LocalCrateReader::new(sess, &ast_map).read_crates(krate));
time(time_passes,
"external crate/lib resolution",
|| LocalCrateReader::new(sess, &ast_map).read_crates(krate));
let lang_items = time(time_passes, "language item collection", ||
middle::lang_items::collect_language_items(&sess, &ast_map));
let lang_items = time(time_passes,
"language item collection",
|| middle::lang_items::collect_language_items(&sess, &ast_map));
let resolve::CrateMap {
def_map,
@ -691,8 +673,8 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
trait_map,
external_exports,
glob_map,
} =
time(time_passes, "resolution",
} = time(time_passes,
"resolution",
|| resolve::resolve_crate(sess, &ast_map, make_glob_map));
// Discard MTWT tables that aren't required past resolution.
@ -700,25 +682,29 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
syntax::ext::mtwt::clear_tables();
}
let named_region_map = time(time_passes, "lifetime resolution", ||
middle::resolve_lifetime::krate(sess, krate, &def_map.borrow()));
let named_region_map = time(time_passes,
"lifetime resolution",
|| middle::resolve_lifetime::krate(sess, krate, &def_map.borrow()));
time(time_passes, "looking for entry point",
time(time_passes,
"looking for entry point",
|| middle::entry::find_entry_point(sess, &ast_map));
sess.plugin_registrar_fn.set(
time(time_passes, "looking for plugin registrar", ||
plugin::build::find_plugin_registrar(
sess.diagnostic(), krate)));
sess.plugin_registrar_fn.set(time(time_passes, "looking for plugin registrar", || {
plugin::build::find_plugin_registrar(sess.diagnostic(), krate)
}));
let region_map = time(time_passes, "region resolution", ||
middle::region::resolve_crate(sess, krate));
let region_map = time(time_passes,
"region resolution",
|| middle::region::resolve_crate(sess, krate));
time(time_passes, "loop checking", ||
middle::check_loop::check_crate(sess, krate));
time(time_passes,
"loop checking",
|| middle::check_loop::check_crate(sess, krate));
time(time_passes, "static item recursion checking", ||
middle::check_static_recursion::check_crate(sess, krate, &def_map.borrow(), &ast_map));
time(time_passes,
"static item recursion checking",
|| middle::check_static_recursion::check_crate(sess, krate, &def_map.borrow(), &ast_map));
ty::ctxt::create_and_enter(sess,
arenas,
@ -731,91 +717,110 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
stability::Index::new(krate),
|tcx| {
// passes are timed inside typeck
typeck::check_crate(tcx, trait_map);
// passes are timed inside typeck
typeck::check_crate(tcx, trait_map);
time(time_passes, "const checking", ||
middle::check_const::check_crate(tcx));
time(time_passes,
"const checking",
|| middle::check_const::check_crate(tcx));
let (exported_items, public_items) =
time(time_passes, "privacy checking", ||
rustc_privacy::check_crate(tcx, &export_map, external_exports));
let (exported_items, public_items) =
time(time_passes, "privacy checking", || {
rustc_privacy::check_crate(tcx,
&export_map,
external_exports)
});
// Do not move this check past lint
time(time_passes, "stability index", ||
tcx.stability.borrow_mut().build(tcx, krate, &public_items));
// Do not move this check past lint
time(time_passes, "stability index", || {
tcx.stability.borrow_mut().build(tcx, krate, &public_items)
});
time(time_passes, "intrinsic checking", ||
middle::intrinsicck::check_crate(tcx));
time(time_passes,
"intrinsic checking",
|| middle::intrinsicck::check_crate(tcx));
time(time_passes, "effect checking", ||
middle::effect::check_crate(tcx));
time(time_passes,
"effect checking",
|| middle::effect::check_crate(tcx));
time(time_passes, "match checking", ||
middle::check_match::check_crate(tcx));
time(time_passes,
"match checking",
|| middle::check_match::check_crate(tcx));
let mir_map = match tcx.sess.opts.unstable_features {
UnstableFeatures::Disallow => {
// use this as a shorthand for beta/stable, and skip
// MIR construction there until known regressions are
// addressed
NodeMap()
}
UnstableFeatures::Allow | UnstableFeatures::Cheat => {
time(time_passes, "MIR dump", ||
mir::mir_map::build_mir_for_crate(tcx))
}
};
let mir_map = match tcx.sess.opts.unstable_features {
UnstableFeatures::Disallow => {
// use this as a shorthand for beta/stable, and skip
// MIR construction there until known regressions are
// addressed
NodeMap()
}
UnstableFeatures::Allow | UnstableFeatures::Cheat => {
time(time_passes,
"MIR dump",
|| mir::mir_map::build_mir_for_crate(tcx))
}
};
time(time_passes, "liveness checking", ||
middle::liveness::check_crate(tcx));
time(time_passes,
"liveness checking",
|| middle::liveness::check_crate(tcx));
time(time_passes, "borrow checking", ||
borrowck::check_crate(tcx));
time(time_passes,
"borrow checking",
|| borrowck::check_crate(tcx));
time(time_passes, "rvalue checking", ||
middle::check_rvalues::check_crate(tcx, krate));
time(time_passes,
"rvalue checking",
|| middle::check_rvalues::check_crate(tcx, krate));
// Avoid overwhelming user with errors if type checking failed.
// I'm not sure how helpful this is, to be honest, but it avoids a
// lot of annoying errors in the compile-fail tests (basically,
// lint warnings and so on -- kindck used to do this abort, but
// kindck is gone now). -nmatsakis
tcx.sess.abort_if_errors();
// Avoid overwhelming user with errors if type checking failed.
// I'm not sure how helpful this is, to be honest, but it avoids
// a
// lot of annoying errors in the compile-fail tests (basically,
// lint warnings and so on -- kindck used to do this abort, but
// kindck is gone now). -nmatsakis
tcx.sess.abort_if_errors();
let reachable_map =
time(time_passes, "reachability checking", ||
reachable::find_reachable(tcx, &exported_items));
let reachable_map =
time(time_passes,
"reachability checking",
|| reachable::find_reachable(tcx, &exported_items));
time(time_passes, "death checking", || {
middle::dead::check_crate(tcx,
&exported_items,
&reachable_map)
});
time(time_passes, "death checking", || {
middle::dead::check_crate(tcx,
&exported_items,
&reachable_map)
});
let ref lib_features_used =
time(time_passes, "stability checking", ||
stability::check_unstable_api_usage(tcx));
let ref lib_features_used =
time(time_passes,
"stability checking",
|| stability::check_unstable_api_usage(tcx));
time(time_passes, "unused lib feature checking", ||
stability::check_unused_or_stable_features(
&tcx.sess, lib_features_used));
time(time_passes, "unused lib feature checking", || {
stability::check_unused_or_stable_features(&tcx.sess,
lib_features_used)
});
time(time_passes, "lint checking", ||
lint::check_crate(tcx, krate, &exported_items));
time(time_passes,
"lint checking",
|| lint::check_crate(tcx, krate, &exported_items));
// The above three passes generate errors w/o aborting
tcx.sess.abort_if_errors();
// The above three passes generate errors w/o aborting
tcx.sess.abort_if_errors();
f(tcx, mir_map, ty::CrateAnalysis {
export_map: export_map,
exported_items: exported_items,
public_items: public_items,
reachable: reachable_map,
name: name,
glob_map: glob_map,
})
})
f(tcx,
mir_map,
ty::CrateAnalysis {
export_map: export_map,
exported_items: exported_items,
public_items: public_items,
reachable: reachable_map,
name: name,
glob_map: glob_map,
})
})
}
/// Run the translation phase to LLVM, after which the AST and analysis can
@ -826,12 +831,14 @@ pub fn phase_4_translate_to_llvm<'tcx>(tcx: &ty::ctxt<'tcx>,
-> trans::CrateTranslation {
let time_passes = tcx.sess.time_passes();
time(time_passes, "resolving dependency formats", ||
dependency_format::calculate(&tcx.sess));
time(time_passes,
"resolving dependency formats",
|| dependency_format::calculate(&tcx.sess));
// Option dance to work around the lack of stack once closures.
time(time_passes, "translation", move ||
trans::trans_crate(tcx, mir_map, analysis))
time(time_passes,
"translation",
move || trans::trans_crate(tcx, mir_map, analysis))
}
/// Run LLVM itself, producing a bitcode file, assembly file or object file
@ -842,8 +849,9 @@ pub fn phase_5_run_llvm_passes(sess: &Session,
if sess.opts.cg.no_integrated_as {
let mut map = HashMap::new();
map.insert(OutputType::Assembly, None);
time(sess.time_passes(), "LLVM passes", ||
write::run_passes(sess, trans, &map, outputs));
time(sess.time_passes(),
"LLVM passes",
|| write::run_passes(sess, trans, &map, outputs));
write::run_assembler(sess, outputs);
@ -852,11 +860,9 @@ pub fn phase_5_run_llvm_passes(sess: &Session,
fs::remove_file(&outputs.temp_path(OutputType::Assembly)).unwrap();
}
} else {
time(sess.time_passes(), "LLVM passes", ||
write::run_passes(sess,
trans,
&sess.opts.output_types,
outputs));
time(sess.time_passes(),
"LLVM passes",
|| write::run_passes(sess, trans, &sess.opts.output_types, outputs));
}
sess.abort_if_errors();
@ -867,17 +873,15 @@ pub fn phase_5_run_llvm_passes(sess: &Session,
pub fn phase_6_link_output(sess: &Session,
trans: &trans::CrateTranslation,
outputs: &OutputFilenames) {
time(sess.time_passes(), "linking", ||
link::link_binary(sess,
trans,
outputs,
&trans.link.crate_name));
time(sess.time_passes(),
"linking",
|| link::link_binary(sess, trans, outputs, &trans.link.crate_name));
}
fn escape_dep_filename(filename: &str) -> String {
// Apparently clang and gcc *only* escape spaces:
// http://llvm.org/klaus/clang/commit/9d50634cfc268ecc9a7250226dd5ca0e945240d4
filename.replace(" ", "\\ ")
filename.replace(" ", "\")
}
fn write_out_deps(sess: &Session, outputs: &OutputFilenames, id: &str) {
@ -887,96 +891,101 @@ fn write_out_deps(sess: &Session, outputs: &OutputFilenames, id: &str) {
match *output_type {
OutputType::Exe => {
for output in sess.crate_types.borrow().iter() {
let p = link::filename_for_input(sess, *output, id,
outputs);
let p = link::filename_for_input(sess, *output, id, outputs);
out_filenames.push(p);
}
}
_ => { out_filenames.push(file); }
_ => {
out_filenames.push(file);
}
}
}
// Write out dependency rules to the dep-info file if requested
if !sess.opts.output_types.contains_key(&OutputType::DepInfo) {
return
return;
}
let deps_filename = outputs.path(OutputType::DepInfo);
let result = (|| -> io::Result<()> {
// Build a list of files used to compile the output and
// write Makefile-compatible dependency rules
let files: Vec<String> = sess.codemap().files.borrow()
.iter()
.filter(|fmap| fmap.is_real_file())
.filter(|fmap| !fmap.is_imported())
.map(|fmap| escape_dep_filename(&fmap.name))
.collect();
let mut file = try!(fs::File::create(&deps_filename));
for path in &out_filenames {
try!(write!(file,
"{}: {}\n\n", path.display(), files.join(" ")));
}
let result =
(|| -> io::Result<()> {
// Build a list of files used to compile the output and
// write Makefile-compatible dependency rules
let files: Vec<String> = sess.codemap()
.files
.borrow()
.iter()
.filter(|fmap| fmap.is_real_file())
.filter(|fmap| !fmap.is_imported())
.map(|fmap| escape_dep_filename(&fmap.name))
.collect();
let mut file = try!(fs::File::create(&deps_filename));
for path in &out_filenames {
try!(write!(file, "{}: {}\n\n", path.display(), files.join(" ")));
}
// Emit a fake target for each input file to the compilation. This
// prevents `make` from spitting out an error if a file is later
// deleted. For more info see #28735
for path in files {
try!(writeln!(file, "{}:", path));
}
Ok(())
})();
// Emit a fake target for each input file to the compilation. This
// prevents `make` from spitting out an error if a file is later
// deleted. For more info see #28735
for path in files {
try!(writeln!(file, "{}:", path));
}
Ok(())
})();
match result {
Ok(()) => {}
Err(e) => {
sess.fatal(&format!("error writing dependencies to `{}`: {}",
deps_filename.display(), e));
deps_filename.display(),
e));
}
}
}
pub fn collect_crate_types(session: &Session,
attrs: &[ast::Attribute]) -> Vec<config::CrateType> {
pub fn collect_crate_types(session: &Session, attrs: &[ast::Attribute]) -> Vec<config::CrateType> {
// Unconditionally collect crate types from attributes to make them used
let attr_types: Vec<config::CrateType> = attrs.iter().filter_map(|a| {
if a.check_name("crate_type") {
match a.value_str() {
Some(ref n) if *n == "rlib" => {
Some(config::CrateTypeRlib)
}
Some(ref n) if *n == "dylib" => {
Some(config::CrateTypeDylib)
}
Some(ref n) if *n == "lib" => {
Some(config::default_lib_output())
}
Some(ref n) if *n == "staticlib" => {
Some(config::CrateTypeStaticlib)
}
Some(ref n) if *n == "bin" => Some(config::CrateTypeExecutable),
Some(_) => {
session.add_lint(lint::builtin::UNKNOWN_CRATE_TYPES,
ast::CRATE_NODE_ID,
a.span,
"invalid `crate_type` \
value".to_string());
None
}
_ => {
session.span_err(a.span, "`crate_type` requires a value");
session.note("for example: `#![crate_type=\"lib\"]`");
None
}
}
} else {
None
}
}).collect();
let attr_types: Vec<config::CrateType> =
attrs.iter()
.filter_map(|a| {
if a.check_name("crate_type") {
match a.value_str() {
Some(ref n) if *n == "rlib" => {
Some(config::CrateTypeRlib)
}
Some(ref n) if *n == "dylib" => {
Some(config::CrateTypeDylib)
}
Some(ref n) if *n == "lib" => {
Some(config::default_lib_output())
}
Some(ref n) if *n == "staticlib" => {
Some(config::CrateTypeStaticlib)
}
Some(ref n) if *n == "bin" => Some(config::CrateTypeExecutable),
Some(_) => {
session.add_lint(lint::builtin::UNKNOWN_CRATE_TYPES,
ast::CRATE_NODE_ID,
a.span,
"invalid `crate_type` value".to_string());
None
}
_ => {
session.span_err(a.span, "`crate_type` requires a value");
session.note("for example: `#![crate_type=\"lib\"]`");
None
}
}
} else {
None
}
})
.collect();
// If we're generating a test executable, then ignore all other output
// styles at all other locations
if session.opts.test {
return vec!(config::CrateTypeExecutable)
return vec![config::CrateTypeExecutable];
}
// Only check command line flags if present. If no types are specified by
@ -992,21 +1001,22 @@ pub fn collect_crate_types(session: &Session,
base.dedup();
}
base.into_iter().filter(|crate_type| {
let res = !link::invalid_output_for_target(session, *crate_type);
base.into_iter()
.filter(|crate_type| {
let res = !link::invalid_output_for_target(session, *crate_type);
if !res {
session.warn(&format!("dropping unsupported crate type `{}` \
for target `{}`",
*crate_type, session.opts.target_triple));
}
if !res {
session.warn(&format!("dropping unsupported crate type `{}` for target `{}`",
*crate_type,
session.opts.target_triple));
}
res
}).collect()
res
})
.collect()
}
pub fn collect_crate_metadata(session: &Session,
_attrs: &[ast::Attribute]) -> Vec<String> {
pub fn collect_crate_metadata(session: &Session, _attrs: &[ast::Attribute]) -> Vec<String> {
session.opts.cg.metadata.clone()
}
@ -1015,7 +1025,7 @@ pub fn build_output_filenames(input: &Input,
ofile: &Option<PathBuf>,
attrs: &[ast::Attribute],
sess: &Session)
-> OutputFilenames {
-> OutputFilenames {
match *ofile {
None => {
// "-" as input file will cause the parser to read from stdin so we
@ -1023,13 +1033,15 @@ pub fn build_output_filenames(input: &Input,
// We want to toss everything after the final '.'
let dirpath = match *odir {
Some(ref d) => d.clone(),
None => PathBuf::new()
None => PathBuf::new(),
};
// If a crate name is present, we use it as the link name
let stem = sess.opts.crate_name.clone().or_else(|| {
attr::find_crate_name(attrs).map(|n| n.to_string())
}).unwrap_or(input.filestem());
let stem = sess.opts
.crate_name
.clone()
.or_else(|| attr::find_crate_name(attrs).map(|n| n.to_string()))
.unwrap_or(input.filestem());
OutputFilenames {
out_directory: dirpath,
@ -1041,12 +1053,14 @@ pub fn build_output_filenames(input: &Input,
}
Some(ref out_file) => {
let unnamed_output_types = sess.opts.output_types.values()
let unnamed_output_types = sess.opts
.output_types
.values()
.filter(|a| a.is_none())
.count();
let ofile = if unnamed_output_types > 1 {
sess.warn("ignoring specified output filename because multiple \
outputs were requested");
sess.warn("ignoring specified output filename because multiple outputs were \
requested");
None
} else {
Some(out_file.clone())
@ -1059,8 +1073,11 @@ pub fn build_output_filenames(input: &Input,
OutputFilenames {
out_directory: out_file.parent().unwrap_or(cur_dir).to_path_buf(),
out_filestem: out_file.file_stem().unwrap_or(OsStr::new(""))
.to_str().unwrap().to_string(),
out_filestem: out_file.file_stem()
.unwrap_or(OsStr::new(""))
.to_str()
.unwrap()
.to_string(),
single_output_file: ofile,
extra: sess.opts.cg.extra_filename.clone(),
outputs: sess.opts.output_types.clone(),

View File

@ -51,8 +51,10 @@ extern crate rustc_trans;
extern crate rustc_typeck;
extern crate serialize;
extern crate rustc_llvm as llvm;
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
#[macro_use]
extern crate log;
#[macro_use]
extern crate syntax;
pub use syntax::diagnostic;
@ -94,8 +96,8 @@ pub mod pretty;
pub mod target_features;
const BUG_REPORT_URL: &'static str =
"https://github.com/rust-lang/rust/blob/master/CONTRIBUTING.md#bug-reports";
const BUG_REPORT_URL: &'static str = "https://github.com/rust-lang/rust/blob/master/CONTRIBUTING.\
md#bug-reports";
pub fn run(args: Vec<String>) -> isize {
@ -105,8 +107,7 @@ pub fn run(args: Vec<String>) -> isize {
// Parse args and run the compiler. This is the primary entry point for rustc.
// See comments on CompilerCalls below for details about the callbacks argument.
pub fn run_compiler<'a>(args: &[String],
callbacks: &mut CompilerCalls<'a>) {
pub fn run_compiler<'a>(args: &[String], callbacks: &mut CompilerCalls<'a>) {
macro_rules! do_or_return {($expr: expr) => {
match $expr {
Compilation::Stop => return,
@ -116,7 +117,7 @@ pub fn run_compiler<'a>(args: &[String],
let matches = match handle_options(args.to_vec()) {
Some(matches) => matches,
None => return
None => return,
};
let sopts = config::build_session_options(&matches);
@ -130,8 +131,8 @@ pub fn run_compiler<'a>(args: &[String],
Some((input, input_file_path)) => callbacks.some_input(input, input_file_path),
None => match callbacks.no_input(&matches, &sopts, &odir, &ofile, &descriptions) {
Some((input, input_file_path)) => (input, input_file_path),
None => return
}
None => return,
},
};
let mut sess = build_session(sopts, input_file_path, descriptions);
@ -152,7 +153,9 @@ pub fn run_compiler<'a>(args: &[String],
pretty::pretty_print_input(sess, cfg, &input, ppm, opt_uii, ofile);
return;
}
None => {/* continue */ }
None => {
// continue
}
}
let plugins = sess.opts.debugging_opts.extra_plugins.clone();
@ -176,7 +179,8 @@ fn make_input(free_matches: &[String]) -> Option<(Input, Option<PathBuf>)> {
io::stdin().read_to_string(&mut src).unwrap();
Some((Input::Str(src), None))
} else {
Some((Input::File(PathBuf::from(ifile)), Some(PathBuf::from(ifile))))
Some((Input::File(PathBuf::from(ifile)),
Some(PathBuf::from(ifile))))
}
} else {
None
@ -194,7 +198,7 @@ impl Compilation {
pub fn and_then<F: FnOnce() -> Compilation>(self, next: F) -> Compilation {
match self {
Compilation::Stop => Compilation::Stop,
Compilation::Continue => next()
Compilation::Continue => next(),
}
}
}
@ -229,7 +233,9 @@ pub trait CompilerCalls<'a> {
// Called after we extract the input from the arguments. Gives the implementer
// an opportunity to change the inputs or to add some custom input handling.
// The default behaviour is to simply pass through the inputs.
fn some_input(&mut self, input: Input, input_path: Option<PathBuf>)
fn some_input(&mut self,
input: Input,
input_path: Option<PathBuf>)
-> (Input, Option<PathBuf>) {
(input, input_path)
}
@ -269,7 +275,8 @@ pub trait CompilerCalls<'a> {
None
}
// Create a CompilController struct for controlling the behaviour of compilation.
// Create a CompilController struct for controlling the behaviour of
// compilation.
fn build_controller(&mut self, &Session) -> CompileController<'a>;
}
@ -300,8 +307,8 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls {
}
}
return Compilation::Stop;
},
None => ()
}
None => (),
}
return Compilation::Continue;
@ -331,7 +338,7 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls {
early_error(sopts.color, "no input filename given");
}
1 => panic!("make_input should have provided valid inputs"),
_ => early_error(sopts.color, "multiple input filenames provided")
_ => early_error(sopts.color, "multiple input filenames provided"),
}
None
@ -366,15 +373,14 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls {
odir: &Option<PathBuf>,
ofile: &Option<PathBuf>)
-> Compilation {
RustcDefaultCalls::print_crate_info(sess, Some(input), odir, ofile).and_then(
|| RustcDefaultCalls::list_metadata(sess, matches, input))
RustcDefaultCalls::print_crate_info(sess, Some(input), odir, ofile)
.and_then(|| RustcDefaultCalls::list_metadata(sess, matches, input))
}
fn build_controller(&mut self, sess: &Session) -> CompileController<'a> {
let mut control = CompileController::basic();
if sess.opts.parse_only ||
sess.opts.show_span.is_some() ||
if sess.opts.parse_only || sess.opts.show_span.is_some() ||
sess.opts.debugging_opts.ast_json_noexpand {
control.after_parse.stop = Compilation::Stop;
}
@ -393,14 +399,14 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls {
if sess.opts.debugging_opts.save_analysis {
control.after_analysis.callback = box |state| {
time(state.session.time_passes(),
"save analysis",
|| save::process_crate(state.tcx.unwrap(),
state.lcx.unwrap(),
state.krate.unwrap(),
state.analysis.unwrap(),
state.crate_name.unwrap(),
state.out_dir));
time(state.session.time_passes(), "save analysis", || {
save::process_crate(state.tcx.unwrap(),
state.lcx.unwrap(),
state.krate.unwrap(),
state.analysis.unwrap(),
state.crate_name.unwrap(),
state.out_dir)
});
};
control.make_glob_map = resolve::MakeGlobMap::Yes;
}
@ -410,19 +416,15 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls {
}
impl RustcDefaultCalls {
pub fn list_metadata(sess: &Session,
matches: &getopts::Matches,
input: &Input)
-> Compilation {
pub fn list_metadata(sess: &Session, matches: &getopts::Matches, input: &Input) -> Compilation {
let r = matches.opt_strs("Z");
if r.contains(&("ls".to_string())) {
match input {
&Input::File(ref ifile) => {
let path = &(*ifile);
let mut v = Vec::new();
metadata::loader::list_file_metadata(&sess.target.target,
path,
&mut v).unwrap();
metadata::loader::list_file_metadata(&sess.target.target, path, &mut v)
.unwrap();
println!("{}", String::from_utf8(v).unwrap());
}
&Input::Str(_) => {
@ -456,26 +458,21 @@ impl RustcDefaultCalls {
None => early_error(sess.opts.color, "no input file provided"),
};
let attrs = attrs.as_ref().unwrap();
let t_outputs = driver::build_output_filenames(input,
odir,
ofile,
attrs,
sess);
let id = link::find_crate_name(Some(sess),
attrs,
input);
let t_outputs = driver::build_output_filenames(input, odir, ofile, attrs, sess);
let id = link::find_crate_name(Some(sess), attrs, input);
if *req == PrintRequest::CrateName {
println!("{}", id);
continue
continue;
}
let crate_types = driver::collect_crate_types(sess, attrs);
let metadata = driver::collect_crate_metadata(sess, attrs);
*sess.crate_metadata.borrow_mut() = metadata;
for &style in &crate_types {
let fname = link::filename_for_input(sess, style, &id,
&t_outputs);
println!("{}", fname.file_name().unwrap()
.to_string_lossy());
let fname = link::filename_for_input(sess, style, &id, &t_outputs);
println!("{}",
fname.file_name()
.unwrap()
.to_string_lossy());
}
}
}
@ -503,9 +500,13 @@ pub fn commit_date_str() -> Option<&'static str> {
pub fn version(binary: &str, matches: &getopts::Matches) {
let verbose = matches.opt_present("verbose");
println!("{} {}", binary, option_env!("CFG_VERSION").unwrap_or("unknown version"));
println!("{} {}",
binary,
option_env!("CFG_VERSION").unwrap_or("unknown version"));
if verbose {
fn unw(x: Option<&str>) -> &str { x.unwrap_or("unknown") }
fn unw(x: Option<&str>) -> &str {
x.unwrap_or("unknown")
}
println!("binary: {}", binary);
println!("commit-hash: {}", unw(commit_hash_str()));
println!("commit-date: {}", unw(commit_date_str()));
@ -520,32 +521,35 @@ fn usage(verbose: bool, include_unstable_options: bool) {
} else {
config::rustc_short_optgroups()
};
let groups : Vec<_> = groups.into_iter()
.filter(|x| include_unstable_options || x.is_stable())
.map(|x|x.opt_group)
.collect();
let groups: Vec<_> = groups.into_iter()
.filter(|x| include_unstable_options || x.is_stable())
.map(|x| x.opt_group)
.collect();
let message = format!("Usage: rustc [OPTIONS] INPUT");
let extra_help = if verbose {
""
} else {
"\n --help -v Print the full set of options rustc accepts"
};
println!("{}\n\
Additional help:
println!("{}\nAdditional help:
-C help Print codegen options
-W help Print 'lint' options and default settings
-Z help Print internal options for debugging rustc{}\n",
getopts::usage(&message, &groups),
extra_help);
-W help \
Print 'lint' options and default settings
-Z help Print internal \
options for debugging rustc{}\n",
getopts::usage(&message, &groups),
extra_help);
}
fn describe_lints(lint_store: &lint::LintStore, loaded_plugins: bool) {
println!("
Available lint options:
-W <foo> Warn about <foo>
-A <foo> Allow <foo>
-A <foo> \
Allow <foo>
-D <foo> Deny <foo>
-F <foo> Forbid <foo> (deny, and deny all overrides)
-F <foo> Forbid <foo> \
(deny, and deny all overrides)
");
@ -562,7 +566,7 @@ Available lint options:
}
fn sort_lint_groups(lints: Vec<(&'static str, Vec<lint::LintId>, bool)>)
-> Vec<(&'static str, Vec<lint::LintId>)> {
-> Vec<(&'static str, Vec<lint::LintId>)> {
let mut lints: Vec<_> = lints.into_iter().map(|(x, y, _)| (x, y)).collect();
lints.sort_by(|&(x, _): &(&'static str, Vec<lint::LintId>),
&(y, _): &(&'static str, Vec<lint::LintId>)| {
@ -572,21 +576,28 @@ Available lint options:
}
let (plugin, builtin): (Vec<_>, _) = lint_store.get_lints()
.iter().cloned().partition(|&(_, p)| p);
.iter()
.cloned()
.partition(|&(_, p)| p);
let plugin = sort_lints(plugin);
let builtin = sort_lints(builtin);
let (plugin_groups, builtin_groups): (Vec<_>, _) = lint_store.get_lint_groups()
.iter().cloned().partition(|&(_, _, p)| p);
.iter()
.cloned()
.partition(|&(_, _, p)| p);
let plugin_groups = sort_lint_groups(plugin_groups);
let builtin_groups = sort_lint_groups(builtin_groups);
let max_name_len = plugin.iter().chain(&builtin)
.map(|&s| s.name.chars().count())
.max().unwrap_or(0);
let max_name_len = plugin.iter()
.chain(&builtin)
.map(|&s| s.name.chars().count())
.max()
.unwrap_or(0);
let padded = |x: &str| {
let mut s = repeat(" ").take(max_name_len - x.chars().count())
.collect::<String>();
let mut s = repeat(" ")
.take(max_name_len - x.chars().count())
.collect::<String>();
s.push_str(x);
s
};
@ -599,7 +610,9 @@ Available lint options:
for lint in lints {
let name = lint.name_lower().replace("_", "-");
println!(" {} {:7.7} {}",
padded(&name[..]), lint.default_level.as_str(), lint.desc);
padded(&name[..]),
lint.default_level.as_str(),
lint.desc);
}
println!("\n");
};
@ -608,12 +621,15 @@ Available lint options:
let max_name_len = plugin_groups.iter().chain(&builtin_groups)
.map(|&(s, _)| s.chars().count())
.max().unwrap_or(0);
let max_name_len = plugin_groups.iter()
.chain(&builtin_groups)
.map(|&(s, _)| s.chars().count())
.max()
.unwrap_or(0);
let padded = |x: &str| {
let mut s = repeat(" ").take(max_name_len - x.chars().count())
.collect::<String>();
let mut s = repeat(" ")
.take(max_name_len - x.chars().count())
.collect::<String>();
s.push_str(x);
s
};
@ -625,10 +641,11 @@ Available lint options:
let print_lint_groups = |lints: Vec<(&'static str, Vec<lint::LintId>)>| {
for (name, to) in lints {
let name = name.to_lowercase().replace("_", "-");
let desc = to.into_iter().map(|x| x.as_str().replace("_", "-"))
.collect::<Vec<String>>().join(", ");
println!(" {} {}",
padded(&name[..]), desc);
let desc = to.into_iter()
.map(|x| x.as_str().replace("_", "-"))
.collect::<Vec<String>>()
.join(", ");
println!(" {} {}", padded(&name[..]), desc);
}
println!("\n");
};
@ -667,21 +684,28 @@ fn describe_codegen_flags() {
fn print_flag_list<T>(cmdline_opt: &str,
flag_list: &[(&'static str, T, Option<&'static str>, &'static str)]) {
let max_len = flag_list.iter().map(|&(name, _, opt_type_desc, _)| {
let extra_len = match opt_type_desc {
Some(..) => 4,
None => 0
};
name.chars().count() + extra_len
}).max().unwrap_or(0);
let max_len = flag_list.iter()
.map(|&(name, _, opt_type_desc, _)| {
let extra_len = match opt_type_desc {
Some(..) => 4,
None => 0,
};
name.chars().count() + extra_len
})
.max()
.unwrap_or(0);
for &(name, _, opt_type_desc, desc) in flag_list {
let (width, extra) = match opt_type_desc {
Some(..) => (max_len - 4, "=val"),
None => (max_len, "")
None => (max_len, ""),
};
println!(" {} {:>width$}{} -- {}", cmdline_opt, name.replace("_", "-"),
extra, desc, width=width);
println!(" {} {:>width$}{} -- {}",
cmdline_opt,
name.replace("_", "-"),
extra,
desc,
width = width);
}
}
@ -705,8 +729,10 @@ pub fn handle_options(mut args: Vec<String>) -> Option<getopts::Matches> {
}
fn parse_all_options(args: &Vec<String>) -> getopts::Matches {
let all_groups : Vec<getopts::OptGroup>
= config::rustc_optgroups().into_iter().map(|x|x.opt_group).collect();
let all_groups: Vec<getopts::OptGroup> = config::rustc_optgroups()
.into_iter()
.map(|x| x.opt_group)
.collect();
match getopts::getopts(&args[..], &all_groups) {
Ok(m) => {
if !allows_unstable_options(&m) {
@ -719,15 +745,16 @@ pub fn handle_options(mut args: Vec<String>) -> Option<getopts::Matches> {
&opt.opt_group.short_name
};
if m.opt_present(opt_name) {
early_error(diagnostic::Auto, &format!("use of unstable option '{}' \
requires -Z unstable-options",
opt_name));
early_error(diagnostic::Auto,
&format!("use of unstable option '{}' requires -Z \
unstable-options",
opt_name));
}
}
}
m
}
Err(f) => early_error(diagnostic::Auto, &f.to_string())
Err(f) => early_error(diagnostic::Auto, &f.to_string()),
}
}
@ -750,7 +777,8 @@ pub fn handle_options(mut args: Vec<String>) -> Option<getopts::Matches> {
};
if matches.opt_present("h") || matches.opt_present("help") {
usage(matches.opt_present("verbose"), allows_unstable_options(&matches));
usage(matches.opt_present("verbose"),
allows_unstable_options(&matches));
return None;
}
@ -769,7 +797,9 @@ pub fn handle_options(mut args: Vec<String>) -> Option<getopts::Matches> {
}
if cg_flags.contains(&"passes=list".to_string()) {
unsafe { ::llvm::LLVMRustPrintPasses(); }
unsafe {
::llvm::LLVMRustPrintPasses();
}
return None;
}
@ -781,20 +811,16 @@ pub fn handle_options(mut args: Vec<String>) -> Option<getopts::Matches> {
Some(matches)
}
fn parse_crate_attrs(sess: &Session, input: &Input) ->
Vec<ast::Attribute> {
fn parse_crate_attrs(sess: &Session, input: &Input) -> Vec<ast::Attribute> {
let result = match *input {
Input::File(ref ifile) => {
parse::parse_crate_attrs_from_file(ifile,
Vec::new(),
&sess.parse_sess)
parse::parse_crate_attrs_from_file(ifile, Vec::new(), &sess.parse_sess)
}
Input::Str(ref src) => {
parse::parse_crate_attrs_from_source_str(
driver::anon_src().to_string(),
src.to_string(),
Vec::new(),
&sess.parse_sess)
parse::parse_crate_attrs_from_source_str(driver::anon_src().to_string(),
src.to_string(),
Vec::new(),
&sess.parse_sess)
}
};
result.into_iter().collect()
@ -805,7 +831,7 @@ fn parse_crate_attrs(sess: &Session, input: &Input) ->
///
/// The diagnostic emitter yielded to the procedure should be used for reporting
/// errors of the compiler.
pub fn monitor<F:FnOnce()+Send+'static>(f: F) {
pub fn monitor<F: FnOnce() + Send + 'static>(f: F) {
const STACK_SIZE: usize = 8 * 1024 * 1024; // 8MB
struct Sink(Arc<Mutex<Vec<u8>>>);
@ -813,7 +839,9 @@ pub fn monitor<F:FnOnce()+Send+'static>(f: F) {
fn write(&mut self, data: &[u8]) -> io::Result<usize> {
Write::write(&mut *self.0.lock().unwrap(), data)
}
fn flush(&mut self) -> io::Result<()> { Ok(()) }
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
let data = Arc::new(Mutex::new(Vec::new()));
@ -827,8 +855,15 @@ pub fn monitor<F:FnOnce()+Send+'static>(f: F) {
cfg = cfg.stack_size(STACK_SIZE);
}
match cfg.spawn(move || { io::set_panic(box err); f() }).unwrap().join() {
Ok(()) => { /* fallthrough */ }
match cfg.spawn(move || {
io::set_panic(box err);
f()
})
.unwrap()
.join() {
Ok(()) => {
// fallthrough
}
Err(value) => {
// Thread panicked without emitting a fatal diagnostic
if !value.is::<diagnostic::FatalError>() {
@ -837,24 +872,19 @@ pub fn monitor<F:FnOnce()+Send+'static>(f: F) {
// a .span_bug or .bug call has already printed what
// it wants to print.
if !value.is::<diagnostic::ExplicitBug>() {
emitter.emit(
None,
"unexpected panic",
None,
diagnostic::Bug);
emitter.emit(None, "unexpected panic", None, diagnostic::Bug);
}
let xs = [
"the compiler unexpectedly panicked. this is a bug.".to_string(),
format!("we would appreciate a bug report: {}",
BUG_REPORT_URL),
];
let xs = ["the compiler unexpectedly panicked. this is a bug.".to_string(),
format!("we would appreciate a bug report: {}", BUG_REPORT_URL)];
for note in &xs {
emitter.emit(None, &note[..], None, diagnostic::Note)
}
if let None = env::var_os("RUST_BACKTRACE") {
emitter.emit(None, "run with `RUST_BACKTRACE=1` for a backtrace",
None, diagnostic::Note);
emitter.emit(None,
"run with `RUST_BACKTRACE=1` for a backtrace",
None,
diagnostic::Note);
}
println!("{}", str::from_utf8(&data.lock().unwrap()).unwrap());

View File

@ -79,33 +79,34 @@ pub enum PpMode {
pub fn parse_pretty(sess: &Session,
name: &str,
extended: bool) -> (PpMode, Option<UserIdentifiedItem>) {
extended: bool)
-> (PpMode, Option<UserIdentifiedItem>) {
let mut split = name.splitn(2, '=');
let first = split.next().unwrap();
let opt_second = split.next();
let first = match (first, extended) {
("normal", _) => PpmSource(PpmNormal),
("identified", _) => PpmSource(PpmIdentified),
("normal", _) => PpmSource(PpmNormal),
("identified", _) => PpmSource(PpmIdentified),
("everybody_loops", true) => PpmSource(PpmEveryBodyLoops),
("expanded", _) => PpmSource(PpmExpanded),
("expanded", _) => PpmSource(PpmExpanded),
("expanded,identified", _) => PpmSource(PpmExpandedIdentified),
("expanded,hygiene", _) => PpmSource(PpmExpandedHygiene),
("hir", true) => PpmHir(PpmNormal),
("hir", true) => PpmHir(PpmNormal),
("hir,identified", true) => PpmHir(PpmIdentified),
("hir,typed", true) => PpmHir(PpmTyped),
("flowgraph", true) => PpmFlowGraph(PpFlowGraphMode::Default),
("flowgraph,unlabelled", true) => PpmFlowGraph(PpFlowGraphMode::UnlabelledEdges),
("hir,typed", true) => PpmHir(PpmTyped),
("flowgraph", true) => PpmFlowGraph(PpFlowGraphMode::Default),
("flowgraph,unlabelled", true) => PpmFlowGraph(PpFlowGraphMode::UnlabelledEdges),
_ => {
if extended {
sess.fatal(&format!(
"argument to `unpretty` must be one of `normal`, \
`expanded`, `flowgraph[,unlabelled]=<nodeid>`, `identified`, \
`expanded,identified`, `everybody_loops`, `hir`, \
`hir,identified`, or `hir,typed`; got {}", name));
sess.fatal(&format!("argument to `unpretty` must be one of `normal`, \
`expanded`, `flowgraph[,unlabelled]=<nodeid>`, \
`identified`, `expanded,identified`, `everybody_loops`, \
`hir`, `hir,identified`, or `hir,typed`; got {}",
name));
} else {
sess.fatal(&format!(
"argument to `pretty` must be one of `normal`, `expanded`, \
`identified`, or `expanded,identified`; got {}", name));
sess.fatal(&format!("argument to `pretty` must be one of `normal`, `expanded`, \
`identified`, or `expanded,identified`; got {}",
name));
}
}
};
@ -134,21 +135,31 @@ impl PpSourceMode {
sess: &'tcx Session,
ast_map: Option<hir_map::Map<'tcx>>,
payload: B,
f: F) -> A where
F: FnOnce(&PrinterSupport, B) -> A,
f: F)
-> A
where F: FnOnce(&PrinterSupport, B) -> A
{
match *self {
PpmNormal | PpmEveryBodyLoops | PpmExpanded => {
let annotation = NoAnn { sess: sess, ast_map: ast_map };
let annotation = NoAnn {
sess: sess,
ast_map: ast_map,
};
f(&annotation, payload)
}
PpmIdentified | PpmExpandedIdentified => {
let annotation = IdentifiedAnnotation { sess: sess, ast_map: ast_map };
let annotation = IdentifiedAnnotation {
sess: sess,
ast_map: ast_map,
};
f(&annotation, payload)
}
PpmExpandedHygiene => {
let annotation = HygieneAnnotation { sess: sess, ast_map: ast_map };
let annotation = HygieneAnnotation {
sess: sess,
ast_map: ast_map,
};
f(&annotation, payload)
}
_ => panic!("Should use call_with_pp_support_hir"),
@ -160,19 +171,23 @@ impl PpSourceMode {
arenas: &'tcx ty::CtxtArenas<'tcx>,
id: &str,
payload: B,
f: F) -> A where
F: FnOnce(&HirPrinterSupport, B, &hir::Crate) -> A,
f: F)
-> A
where F: FnOnce(&HirPrinterSupport, B, &hir::Crate) -> A
{
match *self {
PpmNormal => {
let annotation = NoAnn { sess: sess, ast_map: Some(ast_map.clone()) };
let annotation = NoAnn {
sess: sess,
ast_map: Some(ast_map.clone()),
};
f(&annotation, payload, &ast_map.forest.krate)
}
PpmIdentified => {
let annotation = IdentifiedAnnotation {
sess: sess,
ast_map: Some(ast_map.clone())
ast_map: Some(ast_map.clone()),
};
f(&annotation, payload, &ast_map.forest.krate)
}
@ -183,9 +198,13 @@ impl PpSourceMode {
id,
resolve::MakeGlobMap::No,
|tcx, _, _| {
let annotation = TypedAnnotation { tcx: tcx };
f(&annotation, payload, &ast_map.forest.krate)
})
let annotation = TypedAnnotation {
tcx: tcx,
};
f(&annotation,
payload,
&ast_map.forest.krate)
})
}
_ => panic!("Should use call_with_pp_support"),
}
@ -226,27 +245,35 @@ trait HirPrinterSupport<'ast>: pprust_hir::PpAnn {
struct NoAnn<'ast> {
sess: &'ast Session,
ast_map: Option<hir_map::Map<'ast>>
ast_map: Option<hir_map::Map<'ast>>,
}
impl<'ast> PrinterSupport<'ast> for NoAnn<'ast> {
fn sess<'a>(&'a self) -> &'a Session { self.sess }
fn sess<'a>(&'a self) -> &'a Session {
self.sess
}
fn ast_map<'a>(&'a self) -> Option<&'a hir_map::Map<'ast>> {
self.ast_map.as_ref()
}
fn pp_ann<'a>(&'a self) -> &'a pprust::PpAnn { self }
fn pp_ann<'a>(&'a self) -> &'a pprust::PpAnn {
self
}
}
impl<'ast> HirPrinterSupport<'ast> for NoAnn<'ast> {
fn sess<'a>(&'a self) -> &'a Session { self.sess }
fn sess<'a>(&'a self) -> &'a Session {
self.sess
}
fn ast_map<'a>(&'a self) -> Option<&'a hir_map::Map<'ast>> {
self.ast_map.as_ref()
}
fn pp_ann<'a>(&'a self) -> &'a pprust_hir::PpAnn { self }
fn pp_ann<'a>(&'a self) -> &'a pprust_hir::PpAnn {
self
}
}
impl<'ast> pprust::PpAnn for NoAnn<'ast> {}
@ -258,27 +285,27 @@ struct IdentifiedAnnotation<'ast> {
}
impl<'ast> PrinterSupport<'ast> for IdentifiedAnnotation<'ast> {
fn sess<'a>(&'a self) -> &'a Session { self.sess }
fn sess<'a>(&'a self) -> &'a Session {
self.sess
}
fn ast_map<'a>(&'a self) -> Option<&'a hir_map::Map<'ast>> {
self.ast_map.as_ref()
}
fn pp_ann<'a>(&'a self) -> &'a pprust::PpAnn { self }
fn pp_ann<'a>(&'a self) -> &'a pprust::PpAnn {
self
}
}
impl<'ast> pprust::PpAnn for IdentifiedAnnotation<'ast> {
fn pre(&self,
s: &mut pprust::State,
node: pprust::AnnNode) -> io::Result<()> {
fn pre(&self, s: &mut pprust::State, node: pprust::AnnNode) -> io::Result<()> {
match node {
pprust::NodeExpr(_) => s.popen(),
_ => Ok(())
_ => Ok(()),
}
}
fn post(&self,
s: &mut pprust::State,
node: pprust::AnnNode) -> io::Result<()> {
fn post(&self, s: &mut pprust::State, node: pprust::AnnNode) -> io::Result<()> {
match node {
pprust::NodeIdent(_) | pprust::NodeName(_) => Ok(()),
@ -308,27 +335,27 @@ impl<'ast> pprust::PpAnn for IdentifiedAnnotation<'ast> {
}
impl<'ast> HirPrinterSupport<'ast> for IdentifiedAnnotation<'ast> {
fn sess<'a>(&'a self) -> &'a Session { self.sess }
fn sess<'a>(&'a self) -> &'a Session {
self.sess
}
fn ast_map<'a>(&'a self) -> Option<&'a hir_map::Map<'ast>> {
self.ast_map.as_ref()
}
fn pp_ann<'a>(&'a self) -> &'a pprust_hir::PpAnn { self }
fn pp_ann<'a>(&'a self) -> &'a pprust_hir::PpAnn {
self
}
}
impl<'ast> pprust_hir::PpAnn for IdentifiedAnnotation<'ast> {
fn pre(&self,
s: &mut pprust_hir::State,
node: pprust_hir::AnnNode) -> io::Result<()> {
fn pre(&self, s: &mut pprust_hir::State, node: pprust_hir::AnnNode) -> io::Result<()> {
match node {
pprust_hir::NodeExpr(_) => s.popen(),
_ => Ok(())
_ => Ok(()),
}
}
fn post(&self,
s: &mut pprust_hir::State,
node: pprust_hir::AnnNode) -> io::Result<()> {
fn post(&self, s: &mut pprust_hir::State, node: pprust_hir::AnnNode) -> io::Result<()> {
match node {
pprust_hir::NodeName(_) => Ok(()),
pprust_hir::NodeItem(item) => {
@ -362,19 +389,21 @@ struct HygieneAnnotation<'ast> {
}
impl<'ast> PrinterSupport<'ast> for HygieneAnnotation<'ast> {
fn sess<'a>(&'a self) -> &'a Session { self.sess }
fn sess<'a>(&'a self) -> &'a Session {
self.sess
}
fn ast_map<'a>(&'a self) -> Option<&'a hir_map::Map<'ast>> {
self.ast_map.as_ref()
}
fn pp_ann<'a>(&'a self) -> &'a pprust::PpAnn { self }
fn pp_ann<'a>(&'a self) -> &'a pprust::PpAnn {
self
}
}
impl<'ast> pprust::PpAnn for HygieneAnnotation<'ast> {
fn post(&self,
s: &mut pprust::State,
node: pprust::AnnNode) -> io::Result<()> {
fn post(&self, s: &mut pprust::State, node: pprust::AnnNode) -> io::Result<()> {
match node {
pprust::NodeIdent(&ast::Ident { name: ast::Name(nm), ctxt }) => {
try!(pp::space(&mut s.s));
@ -386,7 +415,7 @@ impl<'ast> pprust::PpAnn for HygieneAnnotation<'ast> {
try!(pp::space(&mut s.s));
s.synth_comment(nm.to_string())
}
_ => Ok(())
_ => Ok(()),
}
}
}
@ -397,37 +426,36 @@ struct TypedAnnotation<'a, 'tcx: 'a> {
}
impl<'b, 'tcx> HirPrinterSupport<'tcx> for TypedAnnotation<'b, 'tcx> {
fn sess<'a>(&'a self) -> &'a Session { &self.tcx.sess }
fn sess<'a>(&'a self) -> &'a Session {
&self.tcx.sess
}
fn ast_map<'a>(&'a self) -> Option<&'a hir_map::Map<'tcx>> {
Some(&self.tcx.map)
}
fn pp_ann<'a>(&'a self) -> &'a pprust_hir::PpAnn { self }
fn pp_ann<'a>(&'a self) -> &'a pprust_hir::PpAnn {
self
}
}
impl<'a, 'tcx> pprust_hir::PpAnn for TypedAnnotation<'a, 'tcx> {
fn pre(&self,
s: &mut pprust_hir::State,
node: pprust_hir::AnnNode) -> io::Result<()> {
fn pre(&self, s: &mut pprust_hir::State, node: pprust_hir::AnnNode) -> io::Result<()> {
match node {
pprust_hir::NodeExpr(_) => s.popen(),
_ => Ok(())
_ => Ok(()),
}
}
fn post(&self,
s: &mut pprust_hir::State,
node: pprust_hir::AnnNode) -> io::Result<()> {
fn post(&self, s: &mut pprust_hir::State, node: pprust_hir::AnnNode) -> io::Result<()> {
match node {
pprust_hir::NodeExpr(expr) => {
try!(pp::space(&mut s.s));
try!(pp::word(&mut s.s, "as"));
try!(pp::space(&mut s.s));
try!(pp::word(&mut s.s,
&self.tcx.expr_ty(expr).to_string()));
try!(pp::word(&mut s.s, &self.tcx.expr_ty(expr).to_string()));
s.pclose()
}
_ => Ok(())
_ => Ok(()),
}
}
}
@ -459,9 +487,9 @@ pub enum UserIdentifiedItem {
impl FromStr for UserIdentifiedItem {
type Err = ();
fn from_str(s: &str) -> Result<UserIdentifiedItem, ()> {
Ok(s.parse().map(ItemViaNode).unwrap_or_else(|_| {
ItemViaPath(s.split("::").map(|s| s.to_string()).collect())
}))
Ok(s.parse()
.map(ItemViaNode)
.unwrap_or_else(|_| ItemViaPath(s.split("::").map(|s| s.to_string()).collect())))
}
}
@ -489,24 +517,22 @@ impl UserIdentifiedItem {
}
}
fn all_matching_node_ids<'a, 'ast>(&'a self, map: &'a hir_map::Map<'ast>)
fn all_matching_node_ids<'a, 'ast>(&'a self,
map: &'a hir_map::Map<'ast>)
-> NodesMatchingUII<'a, 'ast> {
match *self {
ItemViaNode(node_id) =>
NodesMatchingDirect(Some(node_id).into_iter()),
ItemViaPath(ref parts) =>
NodesMatchingSuffix(map.nodes_matching_suffix(&parts[..])),
ItemViaNode(node_id) => NodesMatchingDirect(Some(node_id).into_iter()),
ItemViaPath(ref parts) => NodesMatchingSuffix(map.nodes_matching_suffix(&parts[..])),
}
}
fn to_one_node_id(self, user_option: &str, sess: &Session, map: &hir_map::Map) -> ast::NodeId {
let fail_because = |is_wrong_because| -> ast::NodeId {
let message =
format!("{} needs NodeId (int) or unique \
path suffix (b::c::d); got {}, which {}",
user_option,
self.reconstructed_input(),
is_wrong_because);
let message = format!("{} needs NodeId (int) or unique path suffix (b::c::d); got \
{}, which {}",
user_option,
self.reconstructed_input(),
is_wrong_because);
sess.fatal(&message[..])
};
@ -608,12 +634,13 @@ impl fold::Folder for ReplaceBodyWithLoop {
}
fn fold_block(&mut self, b: P<ast::Block>) -> P<ast::Block> {
fn expr_to_block(rules: ast::BlockCheckMode,
e: Option<P<ast::Expr>>) -> P<ast::Block> {
fn expr_to_block(rules: ast::BlockCheckMode, e: Option<P<ast::Expr>>) -> P<ast::Block> {
P(ast::Block {
expr: e,
stmts: vec![], rules: rules,
id: ast::DUMMY_NODE_ID, span: codemap::DUMMY_SP,
stmts: vec![],
rules: rules,
id: ast::DUMMY_NODE_ID,
span: codemap::DUMMY_SP,
})
}
@ -622,7 +649,8 @@ impl fold::Folder for ReplaceBodyWithLoop {
let empty_block = expr_to_block(ast::DefaultBlock, None);
let loop_expr = P(ast::Expr {
node: ast::ExprLoop(empty_block, None),
id: ast::DUMMY_NODE_ID, span: codemap::DUMMY_SP
id: ast::DUMMY_NODE_ID,
span: codemap::DUMMY_SP,
});
expr_to_block(b.rules, Some(loop_expr))
@ -661,7 +689,7 @@ pub fn pretty_print_input(sess: Session,
let krate = if compute_ast_map {
match driver::phase_2_configure_and_expand(&sess, krate, &id[..], None) {
None => return,
Some(k) => driver::assign_node_ids(&sess, k)
Some(k) => driver::assign_node_ids(&sess, k),
}
} else {
krate
@ -681,12 +709,13 @@ pub fn pretty_print_input(sess: Session,
};
let src_name = driver::source_name(input);
let src = sess.codemap().get_filemap(&src_name[..])
.src
.as_ref()
.unwrap()
.as_bytes()
.to_vec();
let src = sess.codemap()
.get_filemap(&src_name[..])
.src
.as_ref()
.unwrap()
.as_bytes()
.to_vec();
let mut rdr = &src[..];
let mut out = Vec::new();
@ -695,36 +724,39 @@ pub fn pretty_print_input(sess: Session,
(PpmSource(s), _) => {
// Silently ignores an identified node.
let out: &mut Write = &mut out;
s.call_with_pp_support(
&sess, ast_map, box out, |annotation, out| {
debug!("pretty printing source code {:?}", s);
let sess = annotation.sess();
pprust::print_crate(sess.codemap(),
sess.diagnostic(),
&krate,
src_name.to_string(),
&mut rdr,
out,
annotation.pp_ann(),
is_expanded)
s.call_with_pp_support(&sess, ast_map, box out, |annotation, out| {
debug!("pretty printing source code {:?}", s);
let sess = annotation.sess();
pprust::print_crate(sess.codemap(),
sess.diagnostic(),
&krate,
src_name.to_string(),
&mut rdr,
out,
annotation.pp_ann(),
is_expanded)
})
}
(PpmHir(s), None) => {
let out: &mut Write = &mut out;
s.call_with_pp_support_hir(
&sess, &ast_map.unwrap(), &arenas, &id, box out, |annotation, out, krate| {
debug!("pretty printing source code {:?}", s);
let sess = annotation.sess();
pprust_hir::print_crate(sess.codemap(),
sess.diagnostic(),
krate,
src_name.to_string(),
&mut rdr,
out,
annotation.pp_ann(),
is_expanded)
})
s.call_with_pp_support_hir(&sess,
&ast_map.unwrap(),
&arenas,
&id,
box out,
|annotation, out, krate| {
debug!("pretty printing source code {:?}", s);
let sess = annotation.sess();
pprust_hir::print_crate(sess.codemap(),
sess.diagnostic(),
krate,
src_name.to_string(),
&mut rdr,
out,
annotation.pp_ann(),
is_expanded)
})
}
(PpmHir(s), Some(uii)) => {
@ -761,6 +793,7 @@ pub fn pretty_print_input(sess: Session,
debug!("pretty printing flow graph for {:?}", opt_uii);
let uii = opt_uii.unwrap_or_else(|| {
sess.fatal(&format!("`pretty flowgraph=..` needs NodeId (int) or
\
unique path suffix (b::c::d)"))
});
@ -768,8 +801,7 @@ pub fn pretty_print_input(sess: Session,
let nodeid = uii.to_one_node_id("--pretty", &sess, &ast_map);
let node = ast_map.find(nodeid).unwrap_or_else(|| {
sess.fatal(&format!("--pretty flowgraph couldn't find id: {}",
nodeid))
sess.fatal(&format!("--pretty flowgraph couldn't find id: {}", nodeid))
});
let code = blocks::Code::from_node(node);
@ -783,32 +815,36 @@ pub fn pretty_print_input(sess: Session,
&id,
resolve::MakeGlobMap::No,
|tcx, _, _| {
print_flowgraph(variants, tcx, code, mode, out)
})
print_flowgraph(variants,
tcx,
code,
mode,
out)
})
}
None => {
let message = format!("--pretty=flowgraph needs \
block, fn, or method; got {:?}",
let message = format!("--pretty=flowgraph needs block, fn, or method; got \
{:?}",
node);
// point to what was found, if there's an
// accessible span.
match ast_map.opt_span(nodeid) {
Some(sp) => sess.span_fatal(sp, &message[..]),
None => sess.fatal(&message[..])
None => sess.fatal(&message[..]),
}
}
}
}
}.unwrap();
}
.unwrap();
match ofile {
None => print!("{}", String::from_utf8(out).unwrap()),
Some(p) => {
match File::create(&p) {
Ok(mut w) => w.write_all(&out).unwrap(),
Err(e) => panic!("print-print failed to open {} due to {}",
p.display(), e),
Err(e) => panic!("print-print failed to open {} due to {}", p.display(), e),
}
}
}
@ -818,7 +854,8 @@ fn print_flowgraph<W: Write>(variants: Vec<borrowck_dot::Variant>,
tcx: &ty::ctxt,
code: blocks::Code,
mode: PpFlowGraphMode,
mut out: W) -> io::Result<()> {
mut out: W)
-> io::Result<()> {
let cfg = match code {
blocks::BlockCode(block) => cfg::CFG::new(tcx, &*block),
blocks::FnLikeCode(fn_like) => cfg::CFG::new(tcx, &*fn_like.body()),
@ -837,14 +874,14 @@ fn print_flowgraph<W: Write>(variants: Vec<borrowck_dot::Variant>,
return expand_err_details(r);
}
blocks::BlockCode(_) => {
tcx.sess.err("--pretty flowgraph with -Z flowgraph-print \
annotations requires fn-like node id.");
return Ok(())
tcx.sess.err("--pretty flowgraph with -Z flowgraph-print annotations requires \
fn-like node id.");
return Ok(());
}
blocks::FnLikeCode(fn_like) => {
let fn_parts = borrowck::FnPartsWithCFG::from_fn_like(&fn_like, &cfg);
let (bccx, analysis_data) =
borrowck::build_borrowck_dataflow_data_for_fn(tcx, fn_parts);
let (bccx, analysis_data) = borrowck::build_borrowck_dataflow_data_for_fn(tcx,
fn_parts);
let lcfg = borrowck_dot::DataflowLabeller {
inner: lcfg,

View File

@ -48,39 +48,30 @@ pub fn add_configuration(cfg: &mut ast::CrateConfig, sess: &Session) {
fn features_contain(sess: &Session, s: &str) -> bool {
sess.target.target.options.features.contains(s) ||
sess.opts.cg.target_feature.contains(s)
sess.target.target.options.features.contains(s) || sess.opts.cg.target_feature.contains(s)
}
pub fn has_sse(sess: &Session) -> bool {
features_contain(sess, "+sse") ||
has_sse2(sess)
features_contain(sess, "+sse") || has_sse2(sess)
}
pub fn has_sse2(sess: &Session) -> bool {
// x86-64 requires at least SSE2 support
sess.target.target.arch == "x86_64" ||
features_contain(sess, "+sse2") ||
has_sse3(sess)
sess.target.target.arch == "x86_64" || features_contain(sess, "+sse2") || has_sse3(sess)
}
pub fn has_sse3(sess: &Session) -> bool {
features_contain(sess, "+sse3") ||
has_ssse3(sess)
features_contain(sess, "+sse3") || has_ssse3(sess)
}
pub fn has_ssse3(sess: &Session) -> bool {
features_contain(sess, "+ssse3") ||
has_sse41(sess)
features_contain(sess, "+ssse3") || has_sse41(sess)
}
pub fn has_sse41(sess: &Session) -> bool {
features_contain(sess, "+sse4.1") ||
has_sse42(sess)
features_contain(sess, "+sse4.1") || has_sse42(sess)
}
pub fn has_sse42(sess: &Session) -> bool {
features_contain(sess, "+sse4.2") ||
has_avx(sess)
features_contain(sess, "+sse4.2") || has_avx(sess)
}
pub fn has_avx(sess: &Session) -> bool {
features_contain(sess, "+avx") ||
has_avx2(sess)
features_contain(sess, "+avx") || has_avx2(sess)
}
pub fn has_avx2(sess: &Session) -> bool {
features_contain(sess, "+avx2")
@ -88,11 +79,9 @@ pub fn has_avx2(sess: &Session) -> bool {
pub fn has_neon(sess: &Session) -> bool {
// AArch64 requires NEON support
sess.target.target.arch == "aarch64" ||
features_contain(sess, "+neon")
sess.target.target.arch == "aarch64" || features_contain(sess, "+neon")
}
pub fn has_vfp(sess: &Session) -> bool {
// AArch64 requires VFP support
sess.target.target.arch == "aarch64" ||
features_contain(sess, "+vfp")
sess.target.target.arch == "aarch64" || features_contain(sess, "+vfp")
}

View File

@ -30,7 +30,7 @@ use rustc_typeck::middle::infer::lub::Lub;
use rustc_typeck::middle::infer::glb::Glb;
use rustc_typeck::middle::infer::sub::Sub;
use rustc::front::map as hir_map;
use rustc::session::{self,config};
use rustc::session::{self, config};
use syntax::{abi, ast};
use syntax::codemap;
use syntax::codemap::{Span, CodeMap, DUMMY_SP};
@ -47,19 +47,21 @@ struct Env<'a, 'tcx: 'a> {
struct RH<'a> {
id: ast::NodeId,
sub: &'a [RH<'a>]
sub: &'a [RH<'a>],
}
const EMPTY_SOURCE_STR: &'static str = "#![feature(no_core)] #![no_core]";
struct ExpectErrorEmitter {
messages: Vec<String>
messages: Vec<String>,
}
fn remove_message(e: &mut ExpectErrorEmitter, msg: &str, lvl: Level) {
match lvl {
Bug | Fatal | Error => { }
Warning | Note | Help => { return; }
Bug | Fatal | Error => {}
Warning | Note | Help => {
return;
}
}
debug!("Error: {}", msg);
@ -68,8 +70,7 @@ fn remove_message(e: &mut ExpectErrorEmitter, msg: &str, lvl: Level) {
e.messages.remove(i);
}
None => {
panic!("Unexpected error: {} Expected: {:?}",
msg, e.messages);
panic!("Unexpected error: {} Expected: {:?}", msg, e.messages);
}
}
}
@ -79,41 +80,32 @@ impl Emitter for ExpectErrorEmitter {
_cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str,
_: Option<&str>,
lvl: Level)
{
lvl: Level) {
remove_message(self, msg, lvl);
}
fn custom_emit(&mut self,
_cm: &codemap::CodeMap,
_sp: RenderSpan,
msg: &str,
lvl: Level)
{
fn custom_emit(&mut self, _cm: &codemap::CodeMap, _sp: RenderSpan, msg: &str, lvl: Level) {
remove_message(self, msg, lvl);
}
}
fn errors(msgs: &[&str]) -> (Box<Emitter+Send>, usize) {
fn errors(msgs: &[&str]) -> (Box<Emitter + Send>, usize) {
let v = msgs.iter().map(|m| m.to_string()).collect();
(box ExpectErrorEmitter { messages: v } as Box<Emitter+Send>, msgs.len())
(box ExpectErrorEmitter { messages: v } as Box<Emitter + Send>,
msgs.len())
}
fn test_env<F>(source_string: &str,
(emitter, expected_err_count): (Box<Emitter+Send>, usize),
body: F) where
F: FnOnce(Env),
(emitter, expected_err_count): (Box<Emitter + Send>, usize),
body: F)
where F: FnOnce(Env)
{
let mut options =
config::basic_options();
let mut options = config::basic_options();
options.debugging_opts.verbose = true;
options.unstable_features = UnstableFeatures::Allow;
let codemap =
CodeMap::new();
let diagnostic_handler =
diagnostic::Handler::with_emitter(true, emitter);
let span_diagnostic_handler =
diagnostic::SpanHandler::new(diagnostic_handler, codemap);
let codemap = CodeMap::new();
let diagnostic_handler = diagnostic::Handler::with_emitter(true, emitter);
let span_diagnostic_handler = diagnostic::SpanHandler::new(diagnostic_handler, codemap);
let sess = session::build_session_(options, None, span_diagnostic_handler);
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
@ -146,12 +138,13 @@ fn test_env<F>(source_string: &str,
lang_items,
stability::Index::new(krate),
|tcx| {
let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None, false);
body(Env { infcx: &infcx });
let free_regions = FreeRegionMap::new();
infcx.resolve_regions_and_report_errors(&free_regions, ast::CRATE_NODE_ID);
assert_eq!(tcx.sess.err_count(), expected_err_count);
});
let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None, false);
body(Env { infcx: &infcx });
let free_regions = FreeRegionMap::new();
infcx.resolve_regions_and_report_errors(&free_regions,
ast::CRATE_NODE_ID);
assert_eq!(tcx.sess.err_count(), expected_err_count);
});
}
impl<'a, 'tcx> Env<'a, 'tcx> {
@ -169,15 +162,16 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
pub fn create_simple_region_hierarchy(&self) {
// creates a region hierarchy where 1 is root, 10 and 11 are
// children of 1, etc
let dscope = self.infcx.tcx.region_maps.intern_code_extent(
CodeExtentData::DestructionScope(1), region::ROOT_CODE_EXTENT);
self.create_region_hierarchy(
&RH {id: 1,
sub: &[RH {id: 10,
sub: &[]},
RH {id: 11,
sub: &[]}]},
dscope);
let dscope = self.infcx
.tcx
.region_maps
.intern_code_extent(CodeExtentData::DestructionScope(1),
region::ROOT_CODE_EXTENT);
self.create_region_hierarchy(&RH {
id: 1,
sub: &[RH { id: 10, sub: &[] }, RH { id: 11, sub: &[] }],
},
dscope);
}
#[allow(dead_code)] // this seems like it could be useful, even if we don't use it now
@ -197,30 +191,32 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
assert!(idx < names.len());
for item in &m.items {
if item.name.to_string() == names[idx] {
return search(this, &**item, idx+1, names);
return search(this, &**item, idx + 1, names);
}
}
return None;
}
fn search(this: &Env,
it: &hir::Item,
idx: usize,
names: &[String])
-> Option<ast::NodeId> {
fn search(this: &Env, it: &hir::Item, idx: usize, names: &[String]) -> Option<ast::NodeId> {
if idx == names.len() {
return Some(it.id);
}
return match it.node {
hir::ItemUse(..) | hir::ItemExternCrate(..) |
hir::ItemConst(..) | hir::ItemStatic(..) | hir::ItemFn(..) |
hir::ItemForeignMod(..) | hir::ItemTy(..) => {
hir::ItemUse(..) |
hir::ItemExternCrate(..) |
hir::ItemConst(..) |
hir::ItemStatic(..) |
hir::ItemFn(..) |
hir::ItemForeignMod(..) |
hir::ItemTy(..) => {
None
}
hir::ItemEnum(..) | hir::ItemStruct(..) |
hir::ItemTrait(..) | hir::ItemImpl(..) |
hir::ItemEnum(..) |
hir::ItemStruct(..) |
hir::ItemTrait(..) |
hir::ItemImpl(..) |
hir::ItemDefaultImpl(..) => {
None
}
@ -235,14 +231,14 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
pub fn make_subtype(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> bool {
match infer::mk_subty(self.infcx, true, infer::Misc(DUMMY_SP), a, b) {
Ok(_) => true,
Err(ref e) => panic!("Encountered error: {}", e)
Err(ref e) => panic!("Encountered error: {}", e),
}
}
pub fn is_subtype(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> bool {
match infer::can_mk_subty(self.infcx, a, b) {
Ok(_) => true,
Err(_) => false
Err(_) => false,
}
}
@ -257,22 +253,18 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
self.assert_subtype(b, a);
}
pub fn t_fn(&self,
input_tys: &[Ty<'tcx>],
output_ty: Ty<'tcx>)
-> Ty<'tcx>
{
pub fn t_fn(&self, input_tys: &[Ty<'tcx>], output_ty: Ty<'tcx>) -> Ty<'tcx> {
let input_args = input_tys.iter().cloned().collect();
self.infcx.tcx.mk_fn(None,
self.infcx.tcx.mk_bare_fn(ty::BareFnTy {
unsafety: hir::Unsafety::Normal,
abi: abi::Rust,
sig: ty::Binder(ty::FnSig {
inputs: input_args,
output: ty::FnConverging(output_ty),
variadic: false
})
}))
self.infcx.tcx.mk_bare_fn(ty::BareFnTy {
unsafety: hir::Unsafety::Normal,
abi: abi::Rust,
sig: ty::Binder(ty::FnSig {
inputs: input_args,
output: ty::FnConverging(output_ty),
variadic: false,
}),
}))
}
pub fn t_nil(&self) -> Ty<'tcx> {
@ -292,14 +284,13 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
space: subst::ParamSpace,
index: u32,
name: &'static str)
-> ty::Region
{
-> ty::Region {
let name = token::intern(name);
ty::ReEarlyBound(ty::EarlyBoundRegion {
def_id: self.infcx.tcx.map.local_def_id(ast::DUMMY_NODE_ID),
space: space,
index: index,
name: name
name: name,
})
}
@ -308,14 +299,12 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
}
pub fn t_rptr(&self, r: ty::Region) -> Ty<'tcx> {
self.infcx.tcx.mk_imm_ref(self.infcx.tcx.mk_region(r),
self.tcx().types.isize)
self.infcx.tcx.mk_imm_ref(self.infcx.tcx.mk_region(r), self.tcx().types.isize)
}
pub fn t_rptr_late_bound(&self, id: u32) -> Ty<'tcx> {
let r = self.re_late_bound_with_debruijn(id, ty::DebruijnIndex::new(1));
self.infcx.tcx.mk_imm_ref(self.infcx.tcx.mk_region(r),
self.tcx().types.isize)
self.infcx.tcx.mk_imm_ref(self.infcx.tcx.mk_region(r), self.tcx().types.isize)
}
pub fn t_rptr_late_bound_with_debruijn(&self,
@ -323,37 +312,34 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
debruijn: ty::DebruijnIndex)
-> Ty<'tcx> {
let r = self.re_late_bound_with_debruijn(id, debruijn);
self.infcx.tcx.mk_imm_ref(self.infcx.tcx.mk_region(r),
self.tcx().types.isize)
self.infcx.tcx.mk_imm_ref(self.infcx.tcx.mk_region(r), self.tcx().types.isize)
}
pub fn t_rptr_scope(&self, id: ast::NodeId) -> Ty<'tcx> {
let r = ty::ReScope(self.tcx().region_maps.node_extent(id));
self.infcx.tcx.mk_imm_ref(self.infcx.tcx.mk_region(r),
self.tcx().types.isize)
self.infcx.tcx.mk_imm_ref(self.infcx.tcx.mk_region(r), self.tcx().types.isize)
}
pub fn re_free(&self, nid: ast::NodeId, id: u32) -> ty::Region {
ty::ReFree(ty::FreeRegion {
scope: self.tcx().region_maps.item_extent(nid),
bound_region: ty::BrAnon(id)
bound_region: ty::BrAnon(id),
})
}
pub fn t_rptr_free(&self, nid: ast::NodeId, id: u32) -> Ty<'tcx> {
let r = self.re_free(nid, id);
self.infcx.tcx.mk_imm_ref(self.infcx.tcx.mk_region(r),
self.tcx().types.isize)
self.infcx.tcx.mk_imm_ref(self.infcx.tcx.mk_region(r), self.tcx().types.isize)
}
pub fn t_rptr_static(&self) -> Ty<'tcx> {
self.infcx.tcx.mk_imm_ref(self.infcx.tcx.mk_region(ty::ReStatic),
self.tcx().types.isize)
self.tcx().types.isize)
}
pub fn t_rptr_empty(&self) -> Ty<'tcx> {
self.infcx.tcx.mk_imm_ref(self.infcx.tcx.mk_region(ty::ReEmpty),
self.tcx().types.isize)
self.tcx().types.isize)
}
pub fn dummy_type_trace(&self) -> infer::TypeTrace<'tcx> {
@ -378,7 +364,7 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
pub fn make_lub_ty(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) -> Ty<'tcx> {
match self.lub().relate(&t1, &t2) {
Ok(t) => t,
Err(ref e) => panic!("unexpected error computing LUB: {}", e)
Err(ref e) => panic!("unexpected error computing LUB: {}", e),
}
}
@ -386,12 +372,9 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
/// region checks).
pub fn check_sub(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) {
match self.sub().relate(&t1, &t2) {
Ok(_) => { }
Ok(_) => {}
Err(ref e) => {
panic!("unexpected error computing sub({:?},{:?}): {}",
t1,
t2,
e);
panic!("unexpected error computing sub({:?},{:?}): {}", t1, t2, e);
}
}
}
@ -400,11 +383,9 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
/// region checks).
pub fn check_not_sub(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) {
match self.sub().relate(&t1, &t2) {
Err(_) => { }
Err(_) => {}
Ok(_) => {
panic!("unexpected success computing sub({:?},{:?})",
t1,
t2);
panic!("unexpected success computing sub({:?},{:?})", t1, t2);
}
}
}
@ -453,18 +434,16 @@ fn contravariant_region_ptr_ok() {
#[test]
fn contravariant_region_ptr_err() {
test_env(EMPTY_SOURCE_STR,
errors(&["lifetime mismatch"]),
|env| {
env.create_simple_region_hierarchy();
let t_rptr1 = env.t_rptr_scope(1);
let t_rptr10 = env.t_rptr_scope(10);
env.assert_eq(t_rptr1, t_rptr1);
env.assert_eq(t_rptr10, t_rptr10);
test_env(EMPTY_SOURCE_STR, errors(&["lifetime mismatch"]), |env| {
env.create_simple_region_hierarchy();
let t_rptr1 = env.t_rptr_scope(1);
let t_rptr10 = env.t_rptr_scope(10);
env.assert_eq(t_rptr1, t_rptr1);
env.assert_eq(t_rptr10, t_rptr10);
// will cause an error when regions are resolved
env.make_subtype(t_rptr10, t_rptr1);
})
// will cause an error when regions are resolved
env.make_subtype(t_rptr10, t_rptr1);
})
}
#[test]
@ -661,8 +640,10 @@ fn glb_bound_free_infer() {
// `&'_ isize`
let t_resolve1 = env.infcx.shallow_resolve(t_infer1);
match t_resolve1.sty {
ty::TyRef(..) => { }
_ => { panic!("t_resolve1={:?}", t_resolve1); }
ty::TyRef(..) => {}
_ => {
panic!("t_resolve1={:?}", t_resolve1);
}
}
})
}
@ -819,15 +800,13 @@ fn walk_ty() {
let tcx = env.infcx.tcx;
let int_ty = tcx.types.isize;
let uint_ty = tcx.types.usize;
let tup1_ty = tcx.mk_tup(vec!(int_ty, uint_ty, int_ty, uint_ty));
let tup2_ty = tcx.mk_tup(vec!(tup1_ty, tup1_ty, uint_ty));
let tup1_ty = tcx.mk_tup(vec![int_ty, uint_ty, int_ty, uint_ty]);
let tup2_ty = tcx.mk_tup(vec![tup1_ty, tup1_ty, uint_ty]);
let uniq_ty = tcx.mk_box(tup2_ty);
let walked: Vec<_> = uniq_ty.walk().collect();
assert_eq!(walked, [uniq_ty,
tup2_ty,
tup1_ty, int_ty, uint_ty, int_ty, uint_ty,
tup1_ty, int_ty, uint_ty, int_ty, uint_ty,
uint_ty]);
assert_eq!(walked,
[uniq_ty, tup2_ty, tup1_ty, int_ty, uint_ty, int_ty, uint_ty, tup1_ty, int_ty,
uint_ty, int_ty, uint_ty, uint_ty]);
})
}
@ -837,13 +816,13 @@ fn walk_ty_skip_subtree() {
let tcx = env.infcx.tcx;
let int_ty = tcx.types.isize;
let uint_ty = tcx.types.usize;
let tup1_ty = tcx.mk_tup(vec!(int_ty, uint_ty, int_ty, uint_ty));
let tup2_ty = tcx.mk_tup(vec!(tup1_ty, tup1_ty, uint_ty));
let tup1_ty = tcx.mk_tup(vec![int_ty, uint_ty, int_ty, uint_ty]);
let tup2_ty = tcx.mk_tup(vec![tup1_ty, tup1_ty, uint_ty]);
let uniq_ty = tcx.mk_box(tup2_ty);
// types we expect to see (in order), plus a boolean saying
// whether to skip the subtree.
let mut expected = vec!((uniq_ty, false),
let mut expected = vec![(uniq_ty, false),
(tup2_ty, false),
(tup1_ty, false),
(int_ty, false),
@ -851,7 +830,7 @@ fn walk_ty_skip_subtree() {
(int_ty, false),
(uint_ty, false),
(tup1_ty, true), // skip the isize/usize/isize/usize
(uint_ty, false));
(uint_ty, false)];
expected.reverse();
let mut walker = uniq_ty.walk();
@ -859,7 +838,9 @@ fn walk_ty_skip_subtree() {
debug!("walked to {:?}", t);
let (expected_ty, skip) = expected.pop().unwrap();
assert_eq!(t, expected_ty);
if skip { walker.skip_current_subtree(); }
if skip {
walker.skip_current_subtree();
}
}
assert!(expected.is_empty());