rust/src/bootstrap/tool.rs

702 lines
25 KiB
Rust
Raw Normal View History

use std::fs;
use std::env;
use std::path::PathBuf;
use std::process::{Command, exit};
use std::collections::HashSet;
use build_helper::t;
2018-12-07 13:21:05 +01:00
use crate::Mode;
use crate::Compiler;
use crate::builder::{Step, RunConfig, ShouldRun, Builder};
use crate::util::{exe, add_lib_path, CiEnv};
2018-12-07 13:21:05 +01:00
use crate::compile;
use crate::channel::GitInfo;
use crate::channel;
use crate::cache::Interned;
use crate::toolstate::ToolState;
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub enum SourceType {
InTree,
Submodule,
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
struct ToolBuild {
compiler: Compiler,
target: Interned<String>,
tool: &'static str,
path: &'static str,
mode: Mode,
is_optional_tool: bool,
source_type: SourceType,
extra_features: Vec<String>,
}
impl Step for ToolBuild {
type Output = Option<PathBuf>;
2019-02-25 11:30:32 +01:00
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
2017-07-19 02:03:38 +02:00
run.never()
2017-07-14 14:30:16 +02:00
}
2019-02-08 14:53:55 +01:00
/// Builds a tool in `src/tools`
///
/// This will build the specified tool with the specified `host` compiler in
/// `stage` into the normal cargo output directory.
2019-02-25 11:30:32 +01:00
fn run(self, builder: &Builder<'_>) -> Option<PathBuf> {
let compiler = self.compiler;
let target = self.target;
let tool = self.tool;
let path = self.path;
let is_optional_tool = self.is_optional_tool;
match self.mode {
Mode::ToolRustc => {
builder.ensure(compile::Rustc { compiler, target })
}
Mode::ToolStd => {
builder.ensure(compile::Std { compiler, target })
}
Mode::ToolBootstrap => {} // uses downloaded stage0 compiler libs
2018-05-19 22:04:41 +02:00
_ => panic!("unexpected Mode for tool build")
}
2018-07-13 07:12:58 +02:00
let mut cargo = prepare_tool_cargo(
builder,
compiler,
self.mode,
target,
"build",
path,
self.source_type,
&self.extra_features,
2018-07-13 07:12:58 +02:00
);
builder.info(&format!("Building stage{} tool {} ({})", compiler.stage, tool, target));
let mut duplicates = Vec::new();
2018-12-04 19:26:54 +01:00
let is_expected = compile::stream_cargo(builder, &mut cargo, vec![], &mut |msg| {
// Only care about big things like the RLS/Cargo for now
2018-07-05 18:48:56 +02:00
match tool {
| "rls"
| "cargo"
| "clippy-driver"
2018-12-23 21:31:45 +01:00
| "miri"
| "rustfmt"
2018-07-05 18:48:56 +02:00
=> {}
_ => return,
}
let (id, features, filenames) = match msg {
compile::CargoMessage::CompilerArtifact {
package_id,
features,
2018-12-02 21:47:41 +01:00
filenames,
target: _,
} => {
(package_id, features, filenames)
}
_ => return,
};
let features = features.iter().map(|s| s.to_string()).collect::<Vec<_>>();
for path in filenames {
let val = (tool, PathBuf::from(&*path), features.clone());
// we're only interested in deduplicating rlibs for now
if val.1.extension().and_then(|s| s.to_str()) != Some("rlib") {
continue
}
// Don't worry about compiles that turn out to be host
// dependencies or build scripts. To skip these we look for
// anything that goes in `.../release/deps` but *doesn't* go in
// `$target/release/deps`. This ensure that outputs in
// `$target/release` are still considered candidates for
// deduplication.
if let Some(parent) = val.1.parent() {
if parent.ends_with("release/deps") {
let maybe_target = parent
.parent()
.and_then(|p| p.parent())
.and_then(|p| p.file_name())
.and_then(|p| p.to_str())
.unwrap();
if maybe_target != &*target {
continue;
}
}
}
// Record that we've built an artifact for `id`, and if one was
// already listed then we need to see if we reused the same
// artifact or produced a duplicate.
let mut artifacts = builder.tool_artifacts.borrow_mut();
let prev_artifacts = artifacts
.entry(target)
.or_default();
let prev = match prev_artifacts.get(&*id) {
Some(prev) => prev,
None => {
prev_artifacts.insert(id.to_string(), val);
continue;
}
};
if prev.1 == val.1 {
return; // same path, same artifact
}
// If the paths are different and one of them *isn't* inside of
// `release/deps`, then it means it's probably in
// `$target/release`, or it's some final artifact like
// `libcargo.rlib`. In these situations Cargo probably just
// copied it up from `$target/release/deps/libcargo-xxxx.rlib`,
// so if the features are equal we can just skip it.
let prev_no_hash = prev.1.parent().unwrap().ends_with("release/deps");
let val_no_hash = val.1.parent().unwrap().ends_with("release/deps");
if prev.2 == val.2 || !prev_no_hash || !val_no_hash {
return;
}
// ... and otherwise this looks like we duplicated some sort of
// compilation, so record it to generate an error later.
duplicates.push((
id.to_string(),
val,
prev.clone(),
));
}
});
if is_expected && !duplicates.is_empty() {
2018-12-17 19:23:04 +01:00
println!("duplicate artifacts found when compiling a tool, this \
typically means that something was recompiled because \
a transitive dependency has different features activated \
than in a previous build:\n");
println!("the following dependencies are duplicated although they \
have the same features enabled:");
2018-07-05 19:30:06 +02:00
for (id, cur, prev) in duplicates.drain_filter(|(_, cur, prev)| cur.2 == prev.2) {
println!(" {}", id);
2018-07-05 19:30:06 +02:00
// same features
println!(" `{}` ({:?})\n `{}` ({:?})", cur.0, cur.1, prev.0, prev.1);
}
println!("the following dependencies have different features:");
for (id, cur, prev) in duplicates {
println!(" {}", id);
let cur_features: HashSet<_> = cur.2.into_iter().collect();
let prev_features: HashSet<_> = prev.2.into_iter().collect();
println!(" `{}` additionally enabled features {:?} at {:?}",
2018-07-05 19:30:06 +02:00
cur.0, &cur_features - &prev_features, cur.1);
println!(" `{}` additionally enabled features {:?} at {:?}",
2018-07-05 19:30:06 +02:00
prev.0, &prev_features - &cur_features, prev.1);
}
println!();
println!("to fix this you will probably want to edit the local \
src/tools/rustc-workspace-hack/Cargo.toml crate, as \
that will update the dependency graph to ensure that \
these crates all share the same feature set");
panic!("tools should not compile multiple copies of the same crate");
}
builder.save_toolstate(tool, if is_expected {
ToolState::TestFail
} else {
ToolState::BuildFail
});
if !is_expected {
if !is_optional_tool {
2018-03-16 00:29:53 +01:00
exit(1);
} else {
None
}
} else {
2018-05-28 00:09:43 +02:00
let cargo_out = builder.cargo_out(compiler, self.mode, target)
.join(exe(tool, &compiler.host));
let bin = builder.tools_dir(compiler).join(exe(tool, &compiler.host));
builder.copy(&cargo_out, &bin);
Some(bin)
}
}
}
pub fn prepare_tool_cargo(
2019-02-25 11:30:32 +01:00
builder: &Builder<'_>,
compiler: Compiler,
2018-05-28 00:09:43 +02:00
mode: Mode,
target: Interned<String>,
command: &'static str,
path: &'static str,
source_type: SourceType,
extra_features: &[String],
) -> Command {
2018-05-28 00:09:43 +02:00
let mut cargo = builder.cargo(compiler, mode, target, command);
let dir = builder.src.join(path);
cargo.arg("--manifest-path").arg(dir.join("Cargo.toml"));
// We don't want to build tools dynamically as they'll be running across
// stages and such and it's just easier if they're not dynamically linked.
cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
if source_type == SourceType::Submodule {
cargo.env("RUSTC_EXTERNAL_TOOL", "1");
2018-07-13 07:12:58 +02:00
}
let mut features = extra_features.iter().cloned().collect::<Vec<_>>();
if builder.build.config.cargo_native_static {
if path.ends_with("cargo") ||
path.ends_with("rls") ||
path.ends_with("clippy") ||
2018-12-23 21:31:45 +01:00
path.ends_with("miri") ||
path.ends_with("rustfmt")
{
cargo.env("LIBZ_SYS_STATIC", "1");
features.push("rustc-workspace-hack/all-static".to_string());
}
}
// if tools are using lzma we want to force the build script to build its
// own copy
cargo.env("LZMA_API_STATIC", "1");
cargo.env("CFG_RELEASE_CHANNEL", &builder.config.channel);
cargo.env("CFG_VERSION", builder.rust_version());
cargo.env("CFG_RELEASE_NUM", channel::CFG_RELEASE_NUM);
2019-03-30 19:14:02 +01:00
let info = GitInfo::new(builder.config.ignore_git, &dir);
if let Some(sha) = info.sha() {
cargo.env("CFG_COMMIT_HASH", sha);
}
if let Some(sha_short) = info.sha_short() {
cargo.env("CFG_SHORT_COMMIT_HASH", sha_short);
}
if let Some(date) = info.commit_date() {
cargo.env("CFG_COMMIT_DATE", date);
}
if !features.is_empty() {
cargo.arg("--features").arg(&features.join(", "));
}
cargo
}
fn rustbook_features() -> Vec<String> {
let mut features = Vec::new();
// Due to CI budged and risk of spurious failures we want to limit jobs running this check.
// At same time local builds should run it regardless of the platform.
// `CiEnv::None` means it's local build and `CHECK_LINKS` is defined in x86_64-gnu-tools to
// explicitly enable it on single job
if CiEnv::current() == CiEnv::None || env::var("CHECK_LINKS").is_ok() {
features.push("linkcheck".to_string());
}
features
}
macro_rules! bootstrap_tool {
($(
$name:ident, $path:expr, $tool_name:expr
$(,llvm_tools = $llvm:expr)*
$(,is_external_tool = $external:expr)*
$(,features = $features:expr)*
;
)+) => {
#[derive(Copy, PartialEq, Eq, Clone)]
pub enum Tool {
$(
$name,
)+
}
2018-05-28 00:09:43 +02:00
impl Tool {
/// Whether this tool requires LLVM to run
pub fn uses_llvm_tools(&self) -> bool {
match self {
$(Tool::$name => false $(|| $llvm)*,)+
}
}
2018-05-28 00:09:43 +02:00
}
impl<'a> Builder<'a> {
pub fn tool_exe(&self, tool: Tool) -> PathBuf {
match tool {
$(Tool::$name =>
self.ensure($name {
compiler: self.compiler(0, self.config.build),
target: self.config.build,
}),
)+
}
}
}
$(
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct $name {
pub compiler: Compiler,
pub target: Interned<String>,
}
impl Step for $name {
type Output = PathBuf;
2019-02-25 11:30:32 +01:00
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
2017-07-19 02:03:38 +02:00
run.path($path)
}
2019-02-25 11:30:32 +01:00
fn make_run(run: RunConfig<'_>) {
run.builder.ensure($name {
// snapshot compiler
compiler: run.builder.compiler(0, run.builder.config.build),
target: run.target,
});
}
2019-02-25 11:30:32 +01:00
fn run(self, builder: &Builder<'_>) -> PathBuf {
builder.ensure(ToolBuild {
compiler: self.compiler,
target: self.target,
tool: $tool_name,
mode: Mode::ToolBootstrap,
path: $path,
is_optional_tool: false,
source_type: if false $(|| $external)* {
SourceType::Submodule
} else {
SourceType::InTree
},
extra_features: {
// FIXME(#60643): avoid this lint by using `_`
let mut _tmp = Vec::new();
$(_tmp.extend($features);)*
_tmp
},
}).expect("expected to build -- essential tool")
}
}
)+
}
}
bootstrap_tool!(
Rustbook, "src/tools/rustbook", "rustbook", features = rustbook_features();
UnstableBookGen, "src/tools/unstable-book-gen", "unstable-book-gen";
Tidy, "src/tools/tidy", "tidy";
Linkchecker, "src/tools/linkchecker", "linkchecker";
CargoTest, "src/tools/cargotest", "cargotest";
Compiletest, "src/tools/compiletest", "compiletest", llvm_tools = true;
BuildManifest, "src/tools/build-manifest", "build-manifest";
RemoteTestClient, "src/tools/remote-test-client", "remote-test-client";
RustInstaller, "src/tools/rust-installer", "fabricate", is_external_tool = true;
RustdocTheme, "src/tools/rustdoc-themes", "rustdoc-themes";
);
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct ErrorIndex {
pub compiler: Compiler,
}
impl ErrorIndex {
pub fn command(builder: &Builder<'_>, compiler: Compiler) -> Command {
let mut cmd = Command::new(builder.ensure(ErrorIndex {
compiler
}));
add_lib_path(
vec![PathBuf::from(&builder.sysroot_libdir(compiler, compiler.host))],
&mut cmd,
);
cmd
}
}
impl Step for ErrorIndex {
type Output = PathBuf;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.path("src/tools/error_index_generator")
}
fn make_run(run: RunConfig<'_>) {
// Compile the error-index in the same stage as rustdoc to avoid
// recompiling rustdoc twice if we can.
let stage = if run.builder.top_stage >= 2 { run.builder.top_stage } else { 0 };
run.builder.ensure(ErrorIndex {
compiler: run.builder.compiler(stage, run.builder.config.build),
});
}
fn run(self, builder: &Builder<'_>) -> PathBuf {
builder.ensure(ToolBuild {
compiler: self.compiler,
target: self.compiler.host,
tool: "error_index_generator",
mode: Mode::ToolRustc,
path: "src/tools/error_index_generator",
is_optional_tool: false,
source_type: SourceType::InTree,
extra_features: Vec::new(),
}).expect("expected to build -- essential tool")
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct RemoteTestServer {
pub compiler: Compiler,
pub target: Interned<String>,
2017-07-12 18:12:47 +02:00
}
impl Step for RemoteTestServer {
2017-07-12 18:12:47 +02:00
type Output = PathBuf;
2019-02-25 11:30:32 +01:00
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
2017-07-19 02:03:38 +02:00
run.path("src/tools/remote-test-server")
2017-07-12 18:12:47 +02:00
}
2019-02-25 11:30:32 +01:00
fn make_run(run: RunConfig<'_>) {
run.builder.ensure(RemoteTestServer {
compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
target: run.target,
2017-07-12 18:12:47 +02:00
});
}
2019-02-25 11:30:32 +01:00
fn run(self, builder: &Builder<'_>) -> PathBuf {
2017-07-12 18:12:47 +02:00
builder.ensure(ToolBuild {
compiler: self.compiler,
2017-07-12 18:12:47 +02:00
target: self.target,
tool: "remote-test-server",
mode: Mode::ToolStd,
path: "src/tools/remote-test-server",
is_optional_tool: false,
source_type: SourceType::InTree,
extra_features: Vec::new(),
}).expect("expected to build -- essential tool")
2017-07-12 18:12:47 +02:00
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Rustdoc {
/// This should only ever be 0 or 2.
/// We sometimes want to reference the "bootstrap" rustdoc, which is why this option is here.
pub compiler: Compiler,
}
impl Step for Rustdoc {
type Output = PathBuf;
const DEFAULT: bool = true;
const ONLY_HOSTS: bool = true;
2019-02-25 11:30:32 +01:00
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.path("src/tools/rustdoc")
}
2019-02-25 11:30:32 +01:00
fn make_run(run: RunConfig<'_>) {
run.builder.ensure(Rustdoc {
compiler: run.builder.compiler(run.builder.top_stage, run.host),
});
}
2019-02-25 11:30:32 +01:00
fn run(self, builder: &Builder<'_>) -> PathBuf {
let target_compiler = self.compiler;
if target_compiler.stage == 0 {
if !target_compiler.is_snapshot(builder) {
panic!("rustdoc in stage 0 must be snapshot rustdoc");
}
return builder.initial_rustc.with_file_name(exe("rustdoc", &target_compiler.host));
}
let target = target_compiler.host;
// Similar to `compile::Assemble`, build with the previous stage's compiler. Otherwise
// we'd have stageN/bin/rustc and stageN/bin/rustdoc be effectively different stage
// compilers, which isn't what we want. Rustdoc should be linked in the same way as the
// rustc compiler it's paired with, so it must be built with the previous stage compiler.
let build_compiler = builder.compiler(target_compiler.stage - 1, builder.config.build);
// The presence of `target_compiler` ensures that the necessary libraries (codegen backends,
// compiler libraries, ...) are built. Rustdoc does not require the presence of any
// libraries within sysroot_libdir (i.e., rustlib), though doctests may want it (since
// they'll be linked to those libraries). As such, don't explicitly `ensure` any additional
// libraries here. The intuition here is that If we've built a compiler, we should be able
// to build rustdoc.
2018-07-13 07:12:58 +02:00
let mut cargo = prepare_tool_cargo(
builder,
build_compiler,
Mode::ToolRustc,
target,
"build",
"src/tools/rustdoc",
SourceType::InTree,
&[],
2018-07-13 07:12:58 +02:00
);
builder.info(&format!("Building rustdoc for stage{} ({})",
target_compiler.stage, target_compiler.host));
builder.run(&mut cargo);
// Cargo adds a number of paths to the dylib search path on windows, which results in
// the wrong rustdoc being executed. To avoid the conflicting rustdocs, we name the "tool"
// rustdoc a different name.
2018-05-19 22:04:41 +02:00
let tool_rustdoc = builder.cargo_out(build_compiler, Mode::ToolRustc, target)
2018-05-12 04:51:05 +02:00
.join(exe("rustdoc_tool_binary", &target_compiler.host));
// don't create a stage0-sysroot/bin directory.
if target_compiler.stage > 0 {
let sysroot = builder.sysroot(target_compiler);
let bindir = sysroot.join("bin");
t!(fs::create_dir_all(&bindir));
let bin_rustdoc = bindir.join(exe("rustdoc", &*target_compiler.host));
let _ = fs::remove_file(&bin_rustdoc);
builder.copy(&tool_rustdoc, &bin_rustdoc);
bin_rustdoc
} else {
tool_rustdoc
}
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Cargo {
pub compiler: Compiler,
pub target: Interned<String>,
}
impl Step for Cargo {
type Output = PathBuf;
const DEFAULT: bool = true;
const ONLY_HOSTS: bool = true;
2019-02-25 11:30:32 +01:00
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let builder = run.builder;
run.path("src/tools/cargo").default_condition(builder.config.extended)
}
2019-02-25 11:30:32 +01:00
fn make_run(run: RunConfig<'_>) {
run.builder.ensure(Cargo {
compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
target: run.target,
});
}
2019-02-25 11:30:32 +01:00
fn run(self, builder: &Builder<'_>) -> PathBuf {
builder.ensure(ToolBuild {
compiler: self.compiler,
target: self.target,
tool: "cargo",
2019-03-06 18:48:08 +01:00
mode: Mode::ToolRustc,
path: "src/tools/cargo",
is_optional_tool: false,
source_type: SourceType::Submodule,
extra_features: Vec::new(),
}).expect("expected to build -- essential tool")
}
}
macro_rules! tool_extended {
(($sel:ident, $builder:ident),
$($name:ident,
$toolstate:ident,
$path:expr,
$tool_name:expr,
$extra_deps:block;)+) => {
$(
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct $name {
pub compiler: Compiler,
pub target: Interned<String>,
pub extra_features: Vec<String>,
}
2017-08-15 14:27:20 +02:00
impl Step for $name {
type Output = Option<PathBuf>;
const DEFAULT: bool = true;
const ONLY_HOSTS: bool = true;
2017-08-15 14:27:20 +02:00
2019-02-25 11:30:32 +01:00
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let builder = run.builder;
run.path($path).default_condition(builder.config.extended)
}
2017-08-15 14:27:20 +02:00
2019-02-25 11:30:32 +01:00
fn make_run(run: RunConfig<'_>) {
run.builder.ensure($name {
compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
target: run.target,
extra_features: Vec::new(),
});
}
#[allow(unused_mut)]
2019-02-25 11:30:32 +01:00
fn run(mut $sel, $builder: &Builder<'_>) -> Option<PathBuf> {
$extra_deps
$builder.ensure(ToolBuild {
compiler: $sel.compiler,
target: $sel.target,
tool: $tool_name,
2018-05-19 22:04:41 +02:00
mode: Mode::ToolRustc,
path: $path,
extra_features: $sel.extra_features,
is_optional_tool: true,
source_type: SourceType::Submodule,
})
}
}
)+
2017-08-15 14:27:20 +02:00
}
}
2017-08-15 14:27:20 +02:00
tool_extended!((self, builder),
Cargofmt, rustfmt, "src/tools/rustfmt", "cargo-fmt", {};
bootstrap: Merge the libtest build step with libstd Since its inception rustbuild has always worked in three stages: one for libstd, one for libtest, and one for rustc. These three stages were architected around crates.io dependencies, where rustc wants to depend on crates.io crates but said crates don't explicitly depend on libstd, requiring a sysroot assembly step in the middle. This same logic was applied for libtest where libtest wants to depend on crates.io crates (`getopts`) but `getopts` didn't say that it depended on std, so it needed `std` built ahead of time. Lots of time has passed since the inception of rustbuild, however, and we've since gotten to the point where even `std` itself is depending on crates.io crates (albeit with some wonky configuration). This commit applies the same logic to the two dependencies that the `test` crate pulls in from crates.io, `getopts` and `unicode-width`. Over the many years since rustbuild's inception `unicode-width` was the only dependency picked up by the `test` crate, so the extra configuration necessary to get crates building in this crate graph is unlikely to be too much of a burden on developers. After this patch it means that there are now only two build phasese of rustbuild, one for libstd and one for rustc. The libtest/libproc_macro build phase is all lumped into one now with `std`. This was originally motivated by rust-lang/cargo#7216 where Cargo was having to deal with synthesizing dependency edges but this commit makes them explicit in this repository.
2019-08-16 17:29:08 +02:00
CargoClippy, clippy, "src/tools/clippy", "cargo-clippy", {};
Clippy, clippy, "src/tools/clippy", "clippy-driver", {};
Miri, miri, "src/tools/miri", "miri", {};
bootstrap: Merge the libtest build step with libstd Since its inception rustbuild has always worked in three stages: one for libstd, one for libtest, and one for rustc. These three stages were architected around crates.io dependencies, where rustc wants to depend on crates.io crates but said crates don't explicitly depend on libstd, requiring a sysroot assembly step in the middle. This same logic was applied for libtest where libtest wants to depend on crates.io crates (`getopts`) but `getopts` didn't say that it depended on std, so it needed `std` built ahead of time. Lots of time has passed since the inception of rustbuild, however, and we've since gotten to the point where even `std` itself is depending on crates.io crates (albeit with some wonky configuration). This commit applies the same logic to the two dependencies that the `test` crate pulls in from crates.io, `getopts` and `unicode-width`. Over the many years since rustbuild's inception `unicode-width` was the only dependency picked up by the `test` crate, so the extra configuration necessary to get crates building in this crate graph is unlikely to be too much of a burden on developers. After this patch it means that there are now only two build phasese of rustbuild, one for libstd and one for rustc. The libtest/libproc_macro build phase is all lumped into one now with `std`. This was originally motivated by rust-lang/cargo#7216 where Cargo was having to deal with synthesizing dependency edges but this commit makes them explicit in this repository.
2019-08-16 17:29:08 +02:00
CargoMiri, miri, "src/tools/miri", "cargo-miri", {};
Rls, rls, "src/tools/rls", "rls", {
let clippy = builder.ensure(Clippy {
compiler: self.compiler,
target: self.target,
extra_features: Vec::new(),
});
2018-07-11 10:35:09 +02:00
if clippy.is_some() {
self.extra_features.push("clippy".to_owned());
}
};
Rustfmt, rustfmt, "src/tools/rustfmt", "rustfmt", {};
);
2017-07-05 18:48:02 +02:00
impl<'a> Builder<'a> {
2019-02-08 14:53:55 +01:00
/// Gets a `Command` which is ready to run `tool` in `stage` built for
2017-07-05 18:48:02 +02:00
/// `host`.
2017-07-05 18:54:19 +02:00
pub fn tool_cmd(&self, tool: Tool) -> Command {
let mut cmd = Command::new(self.tool_exe(tool));
let compiler = self.compiler(0, self.config.build);
let host = &compiler.host;
// Prepares the `cmd` provided to be able to run the `compiler` provided.
//
// Notably this munges the dynamic library lookup path to point to the
// right location to run `compiler`.
let mut lib_paths: Vec<PathBuf> = vec![
self.build.rustc_snapshot_libdir(),
2019-06-07 19:10:27 +02:00
self.cargo_out(compiler, Mode::ToolBootstrap, *host).join("deps"),
2017-07-05 18:48:02 +02:00
];
// On MSVC a tool may invoke a C compiler (e.g., compiletest in run-make
2017-07-05 18:48:02 +02:00
// mode) and that C compiler may need some extra PATH modification. Do
// so here.
if compiler.host.contains("msvc") {
2017-07-05 18:54:19 +02:00
let curpaths = env::var_os("PATH").unwrap_or_default();
2017-07-05 18:48:02 +02:00
let curpaths = env::split_paths(&curpaths).collect::<Vec<_>>();
for &(ref k, ref v) in self.cc[&compiler.host].env() {
2017-07-05 18:48:02 +02:00
if k != "PATH" {
continue
}
for path in env::split_paths(v) {
if !curpaths.contains(&path) {
lib_paths.push(path);
2017-07-05 18:48:02 +02:00
}
}
}
}
add_lib_path(lib_paths, &mut cmd);
cmd
}
2017-07-05 18:48:02 +02:00
}