Rollup merge of #38631 - alexcrichton:supafast, r=brson
rustbuild: Compile rustc twice, not thrice This commit switches the rustbuild build system to compiling the compiler twice for a normal bootstrap rather than the historical three times. Rust is a bootstrapped language which means that a previous version of the compiler is used to build the next version of the compiler. Over time, however, we change many parts of compiler artifacts such as the metadata format, symbol names, etc. These changes make artifacts from one compiler incompatible from another compiler. Consequently if a compiler wants to be able to use some artifacts then it itself must have compiled the artifacts. Historically the rustc build system has achieved this by compiling the compiler three times: * An older compiler (stage0) is downloaded to kick off the chain. * This compiler now compiles a new compiler (stage1) * The stage1 compiler then compiles another compiler (stage2) * Finally, the stage2 compiler needs libraries to link against, so it compiles all the libraries again. This entire process amounts in compiling the compiler three times. Additionally, this process always guarantees that the Rust source tree can compile itself because the stage2 compiler (created by a freshly created compiler) would successfully compile itself again. This property, ensuring Rust can compile itself, is quite important! In general, though, this third compilation is not required for general purpose development on the compiler. The third compiler (stage2) can reuse the libraries that were created during the second compile. In other words, the second compilation can produce both a compiler and the libraries that compiler will use. These artifacts *must* be compatible due to the way plugins work today anyway, and they were created by the same source code so they *should* be compatible as well. So given all that, this commit switches the default build process to only compile the compiler two times, avoiding this third compilation by copying artifacts from the previous one. Along the way a new entry in the Travis matrix was also added to ensure that our full bootstrap can succeed. This entry does not run tests, though, as it should not be necessary. To restore the old behavior of a full bootstrap (three compiles) you can either pass: ./configure --enable-full-bootstrap or if you're using config.toml: [build] full-bootstrap = true Overall this will hopefully be an easy 33% win in build times of the compiler. If we do 33% less work we should be 33% faster! This in turn should affect cycle times and such on Travis and AppVeyor positively as well as making it easier to work on the compiler itself.
This commit is contained in:
commit
9bb3543885
@ -19,6 +19,7 @@ matrix:
|
||||
- env: IMAGE=i686-gnu-nopt
|
||||
- env: IMAGE=x86_64-freebsd
|
||||
- env: IMAGE=x86_64-gnu
|
||||
- env: IMAGE=x86_64-gnu-full-bootstrap
|
||||
- env: IMAGE=x86_64-gnu-cargotest
|
||||
- env: IMAGE=x86_64-gnu-debug
|
||||
- env: IMAGE=x86_64-gnu-nopt
|
||||
|
1
configure
vendored
1
configure
vendored
@ -693,6 +693,7 @@ opt_nosave manage-submodules 1 "let the build manage the git submodules"
|
||||
opt_nosave clang 0 "prefer clang to gcc for building the runtime"
|
||||
opt_nosave jemalloc 1 "build liballoc with jemalloc"
|
||||
opt elf-tls 1 "elf thread local storage on platforms where supported"
|
||||
opt full-bootstrap 0 "build three compilers instead of two"
|
||||
|
||||
valopt_nosave prefix "/usr/local" "set installation prefix"
|
||||
valopt_nosave local-rust-root "/usr/local" "set prefix for local rust binary"
|
||||
|
@ -341,12 +341,22 @@ pub fn krate(build: &Build,
|
||||
println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage,
|
||||
compiler.host, target);
|
||||
|
||||
// If we're not doing a full bootstrap but we're testing a stage2 version of
|
||||
// libstd, then what we're actually testing is the libstd produced in
|
||||
// stage1. Reflect that here by updating the compiler that we're working
|
||||
// with automatically.
|
||||
let compiler = if build.force_use_stage1(compiler, target) {
|
||||
Compiler::new(1, compiler.host)
|
||||
} else {
|
||||
compiler.clone()
|
||||
};
|
||||
|
||||
// Build up the base `cargo test` command.
|
||||
//
|
||||
// Pass in some standard flags then iterate over the graph we've discovered
|
||||
// in `cargo metadata` with the maps above and figure out what `-p`
|
||||
// arguments need to get passed.
|
||||
let mut cargo = build.cargo(compiler, mode, target, test_kind.subcommand());
|
||||
let mut cargo = build.cargo(&compiler, mode, target, test_kind.subcommand());
|
||||
cargo.arg("--manifest-path")
|
||||
.arg(build.src.join(path).join("Cargo.toml"))
|
||||
.arg("--features").arg(features);
|
||||
@ -380,7 +390,7 @@ pub fn krate(build: &Build,
|
||||
// Note that to run the compiler we need to run with the *host* libraries,
|
||||
// but our wrapper scripts arrange for that to be the case anyway.
|
||||
let mut dylib_path = dylib_path();
|
||||
dylib_path.insert(0, build.sysroot_libdir(compiler, target));
|
||||
dylib_path.insert(0, build.sysroot_libdir(&compiler, target));
|
||||
cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
|
||||
|
||||
if target.contains("android") {
|
||||
@ -399,10 +409,10 @@ pub fn krate(build: &Build,
|
||||
|
||||
if target.contains("android") {
|
||||
build.run(&mut cargo);
|
||||
krate_android(build, compiler, target, mode);
|
||||
krate_android(build, &compiler, target, mode);
|
||||
} else if target.contains("emscripten") {
|
||||
build.run(&mut cargo);
|
||||
krate_emscripten(build, compiler, target, mode);
|
||||
krate_emscripten(build, &compiler, target, mode);
|
||||
} else {
|
||||
cargo.args(&build.flags.cmd.test_args());
|
||||
build.run(&mut cargo);
|
||||
|
@ -33,14 +33,14 @@ use {Build, Compiler, Mode};
|
||||
/// This will build the standard library for a particular stage of the build
|
||||
/// using the `compiler` targeting the `target` architecture. The artifacts
|
||||
/// created will also be linked into the sysroot directory.
|
||||
pub fn std<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
|
||||
println!("Building stage{} std artifacts ({} -> {})", compiler.stage,
|
||||
compiler.host, target);
|
||||
|
||||
pub fn std(build: &Build, target: &str, compiler: &Compiler) {
|
||||
let libdir = build.sysroot_libdir(compiler, target);
|
||||
let _ = fs::remove_dir_all(&libdir);
|
||||
t!(fs::create_dir_all(&libdir));
|
||||
|
||||
println!("Building stage{} std artifacts ({} -> {})", compiler.stage,
|
||||
compiler.host, target);
|
||||
|
||||
// Some platforms have startup objects that may be required to produce the
|
||||
// libstd dynamic library, for example.
|
||||
build_startup_objects(build, target, &libdir);
|
||||
@ -65,29 +65,30 @@ pub fn std<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
|
||||
|
||||
build.run(&mut cargo);
|
||||
update_mtime(&libstd_stamp(build, &compiler, target));
|
||||
std_link(build, target, compiler.stage, compiler.host);
|
||||
}
|
||||
|
||||
/// Link all libstd rlibs/dylibs into the sysroot location.
|
||||
///
|
||||
/// Links those artifacts generated in the given `stage` for `target` produced
|
||||
/// by `compiler` into `host`'s sysroot.
|
||||
/// Links those artifacts generated by `compiler` to a the `stage` compiler's
|
||||
/// sysroot for the specified `host` and `target`.
|
||||
///
|
||||
/// Note that this assumes that `compiler` has already generated the libstd
|
||||
/// libraries for `target`, and this method will find them in the relevant
|
||||
/// output directory.
|
||||
pub fn std_link(build: &Build,
|
||||
target: &str,
|
||||
stage: u32,
|
||||
host: &str) {
|
||||
let compiler = Compiler::new(stage, &build.config.build);
|
||||
let target_compiler = Compiler::new(compiler.stage, host);
|
||||
compiler: &Compiler,
|
||||
target_compiler: &Compiler,
|
||||
target: &str) {
|
||||
println!("Copying stage{} std from stage{} ({} -> {} / {})",
|
||||
target_compiler.stage,
|
||||
compiler.stage,
|
||||
compiler.host,
|
||||
target_compiler.host,
|
||||
target);
|
||||
let libdir = build.sysroot_libdir(&target_compiler, target);
|
||||
let out_dir = build.cargo_out(&compiler, Mode::Libstd, target);
|
||||
|
||||
// If we're linking one compiler host's output into another, then we weren't
|
||||
// called from the `std` method above. In that case we clean out what's
|
||||
// already there.
|
||||
if host != compiler.host {
|
||||
let _ = fs::remove_dir_all(&libdir);
|
||||
t!(fs::create_dir_all(&libdir));
|
||||
}
|
||||
t!(fs::create_dir_all(&libdir));
|
||||
add_to_sysroot(&out_dir, &libdir);
|
||||
|
||||
if target.contains("musl") && !target.contains("mips") {
|
||||
@ -137,7 +138,7 @@ fn build_startup_objects(build: &Build, target: &str, into: &Path) {
|
||||
/// This will build libtest and supporting libraries for a particular stage of
|
||||
/// the build using the `compiler` targeting the `target` architecture. The
|
||||
/// artifacts created will also be linked into the sysroot directory.
|
||||
pub fn test<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
|
||||
pub fn test(build: &Build, target: &str, compiler: &Compiler) {
|
||||
println!("Building stage{} test artifacts ({} -> {})", compiler.stage,
|
||||
compiler.host, target);
|
||||
let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
|
||||
@ -147,19 +148,13 @@ pub fn test<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
|
||||
.arg(build.src.join("src/rustc/test_shim/Cargo.toml"));
|
||||
build.run(&mut cargo);
|
||||
update_mtime(&libtest_stamp(build, compiler, target));
|
||||
test_link(build, target, compiler.stage, compiler.host);
|
||||
}
|
||||
|
||||
/// Link all libtest rlibs/dylibs into the sysroot location.
|
||||
///
|
||||
/// Links those artifacts generated in the given `stage` for `target` produced
|
||||
/// by `compiler` into `host`'s sysroot.
|
||||
/// Same as `std_link`, only for libtest
|
||||
pub fn test_link(build: &Build,
|
||||
target: &str,
|
||||
stage: u32,
|
||||
host: &str) {
|
||||
let compiler = Compiler::new(stage, &build.config.build);
|
||||
let target_compiler = Compiler::new(compiler.stage, host);
|
||||
compiler: &Compiler,
|
||||
target_compiler: &Compiler,
|
||||
target: &str) {
|
||||
let libdir = build.sysroot_libdir(&target_compiler, target);
|
||||
let out_dir = build.cargo_out(&compiler, Mode::Libtest, target);
|
||||
add_to_sysroot(&out_dir, &libdir);
|
||||
@ -170,7 +165,7 @@ pub fn test_link(build: &Build,
|
||||
/// This will build the compiler for a particular stage of the build using
|
||||
/// the `compiler` targeting the `target` architecture. The artifacts
|
||||
/// created will also be linked into the sysroot directory.
|
||||
pub fn rustc<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
|
||||
pub fn rustc(build: &Build, target: &str, compiler: &Compiler) {
|
||||
println!("Building stage{} compiler artifacts ({} -> {})",
|
||||
compiler.stage, compiler.host, target);
|
||||
|
||||
@ -222,20 +217,13 @@ pub fn rustc<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
|
||||
cargo.env("CFG_DEFAULT_AR", s);
|
||||
}
|
||||
build.run(&mut cargo);
|
||||
|
||||
rustc_link(build, target, compiler.stage, compiler.host);
|
||||
}
|
||||
|
||||
/// Link all librustc rlibs/dylibs into the sysroot location.
|
||||
///
|
||||
/// Links those artifacts generated in the given `stage` for `target` produced
|
||||
/// by `compiler` into `host`'s sysroot.
|
||||
/// Same as `std_link`, only for librustc
|
||||
pub fn rustc_link(build: &Build,
|
||||
target: &str,
|
||||
stage: u32,
|
||||
host: &str) {
|
||||
let compiler = Compiler::new(stage, &build.config.build);
|
||||
let target_compiler = Compiler::new(compiler.stage, host);
|
||||
compiler: &Compiler,
|
||||
target_compiler: &Compiler,
|
||||
target: &str) {
|
||||
let libdir = build.sysroot_libdir(&target_compiler, target);
|
||||
let out_dir = build.cargo_out(&compiler, Mode::Librustc, target);
|
||||
add_to_sysroot(&out_dir, &libdir);
|
||||
@ -259,6 +247,17 @@ fn compiler_file(compiler: &Path, file: &str) -> PathBuf {
|
||||
PathBuf::from(out.trim())
|
||||
}
|
||||
|
||||
pub fn create_sysroot(build: &Build, compiler: &Compiler) {
|
||||
// nothing to do in stage0
|
||||
if compiler.stage == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
let sysroot = build.sysroot(compiler);
|
||||
let _ = fs::remove_dir_all(&sysroot);
|
||||
t!(fs::create_dir_all(&sysroot));
|
||||
}
|
||||
|
||||
/// Prepare a new compiler from the artifacts in `stage`
|
||||
///
|
||||
/// This will assemble a compiler in `build/$host/stage$stage`. The compiler
|
||||
@ -269,18 +268,17 @@ pub fn assemble_rustc(build: &Build, stage: u32, host: &str) {
|
||||
if stage == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
println!("Copying stage{} compiler ({})", stage, host);
|
||||
|
||||
// The compiler that we're assembling
|
||||
let target_compiler = Compiler::new(stage, host);
|
||||
|
||||
// The compiler that compiled the compiler we're assembling
|
||||
let build_compiler = Compiler::new(stage - 1, &build.config.build);
|
||||
|
||||
// Clear out old files
|
||||
let sysroot = build.sysroot(&target_compiler);
|
||||
let _ = fs::remove_dir_all(&sysroot);
|
||||
t!(fs::create_dir_all(&sysroot));
|
||||
|
||||
// Link in all dylibs to the libdir
|
||||
let sysroot = build.sysroot(&target_compiler);
|
||||
let sysroot_libdir = sysroot.join(libdir(host));
|
||||
t!(fs::create_dir_all(&sysroot_libdir));
|
||||
let src_libdir = build.sysroot_libdir(&build_compiler, host);
|
||||
|
@ -46,6 +46,7 @@ pub struct Config {
|
||||
pub docs: bool,
|
||||
pub vendor: bool,
|
||||
pub target_config: HashMap<String, Target>,
|
||||
pub full_bootstrap: bool,
|
||||
|
||||
// llvm codegen options
|
||||
pub llvm_assertions: bool,
|
||||
@ -134,6 +135,7 @@ struct Build {
|
||||
vendor: Option<bool>,
|
||||
nodejs: Option<String>,
|
||||
python: Option<String>,
|
||||
full_bootstrap: Option<bool>,
|
||||
}
|
||||
|
||||
/// TOML representation of various global install decisions.
|
||||
@ -264,6 +266,7 @@ impl Config {
|
||||
set(&mut config.docs, build.docs);
|
||||
set(&mut config.submodules, build.submodules);
|
||||
set(&mut config.vendor, build.vendor);
|
||||
set(&mut config.full_bootstrap, build.full_bootstrap);
|
||||
|
||||
if let Some(ref install) = toml.install {
|
||||
config.prefix = install.prefix.clone();
|
||||
@ -393,6 +396,7 @@ impl Config {
|
||||
("NINJA", self.ninja),
|
||||
("CODEGEN_TESTS", self.codegen_tests),
|
||||
("VENDOR", self.vendor),
|
||||
("FULL_BOOTSTRAP", self.full_bootstrap),
|
||||
}
|
||||
|
||||
match key {
|
||||
|
@ -100,6 +100,13 @@
|
||||
# Indicate whether the vendored sources are used for Rust dependencies or not
|
||||
#vendor = false
|
||||
|
||||
# Typically the build system will build the rust compiler twice. The second
|
||||
# compiler, however, will simply use its own libraries to link against. If you
|
||||
# would rather to perform a full bootstrap, compiling the compiler three times,
|
||||
# then you can set this option to true. You shouldn't ever need to set this
|
||||
# option to true.
|
||||
#full-bootstrap = false
|
||||
|
||||
# =============================================================================
|
||||
# General install configuration options
|
||||
# =============================================================================
|
||||
|
@ -137,6 +137,11 @@ pub fn std(build: &Build, stage: u32, target: &str) {
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = Compiler::new(stage, &build.config.build);
|
||||
let compiler = if build.force_use_stage1(&compiler, target) {
|
||||
Compiler::new(1, compiler.host)
|
||||
} else {
|
||||
compiler
|
||||
};
|
||||
let out_dir = build.stage_out(&compiler, Mode::Libstd)
|
||||
.join(target).join("doc");
|
||||
let rustdoc = build.rustdoc(&compiler);
|
||||
@ -160,6 +165,11 @@ pub fn test(build: &Build, stage: u32, target: &str) {
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = Compiler::new(stage, &build.config.build);
|
||||
let compiler = if build.force_use_stage1(&compiler, target) {
|
||||
Compiler::new(1, compiler.host)
|
||||
} else {
|
||||
compiler
|
||||
};
|
||||
let out_dir = build.stage_out(&compiler, Mode::Libtest)
|
||||
.join(target).join("doc");
|
||||
let rustdoc = build.rustdoc(&compiler);
|
||||
@ -182,6 +192,11 @@ pub fn rustc(build: &Build, stage: u32, target: &str) {
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = Compiler::new(stage, &build.config.build);
|
||||
let compiler = if build.force_use_stage1(&compiler, target) {
|
||||
Compiler::new(1, compiler.host)
|
||||
} else {
|
||||
compiler
|
||||
};
|
||||
let out_dir = build.stage_out(&compiler, Mode::Librustc)
|
||||
.join(target).join("doc");
|
||||
let rustdoc = build.rustdoc(&compiler);
|
||||
|
@ -572,9 +572,7 @@ impl Build {
|
||||
let mut cmd = Command::new(self.tool(&compiler, tool));
|
||||
let host = compiler.host;
|
||||
let mut paths = vec![
|
||||
self.cargo_out(compiler, Mode::Libstd, host).join("deps"),
|
||||
self.cargo_out(compiler, Mode::Libtest, host).join("deps"),
|
||||
self.cargo_out(compiler, Mode::Librustc, host).join("deps"),
|
||||
self.sysroot_libdir(compiler, compiler.host),
|
||||
self.cargo_out(compiler, Mode::Tool, host).join("deps"),
|
||||
];
|
||||
|
||||
@ -880,6 +878,30 @@ impl Build {
|
||||
fn python(&self) -> &Path {
|
||||
self.config.python.as_ref().unwrap()
|
||||
}
|
||||
|
||||
/// Tests whether the `compiler` compiling for `target` should be forced to
|
||||
/// use a stage1 compiler instead.
|
||||
///
|
||||
/// Currently, by default, the build system does not perform a "full
|
||||
/// bootstrap" by default where we compile the compiler three times.
|
||||
/// Instead, we compile the compiler two times. The final stage (stage2)
|
||||
/// just copies the libraries from the previous stage, which is what this
|
||||
/// method detects.
|
||||
///
|
||||
/// Here we return `true` if:
|
||||
///
|
||||
/// * The build isn't performing a full bootstrap
|
||||
/// * The `compiler` is in the final stage, 2
|
||||
/// * We're not cross-compiling, so the artifacts are already available in
|
||||
/// stage1
|
||||
///
|
||||
/// When all of these conditions are met the build will lift artifacts from
|
||||
/// the previous stage forward.
|
||||
fn force_use_stage1(&self, compiler: &Compiler, target: &str) -> bool {
|
||||
!self.config.full_bootstrap &&
|
||||
compiler.stage >= 2 &&
|
||||
self.config.host.iter().any(|h| h == target)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Compiler<'a> {
|
||||
|
@ -44,7 +44,7 @@ pub fn run(build: &Build) {
|
||||
rules.run(&steps);
|
||||
}
|
||||
|
||||
pub fn build_rules(build: &Build) -> Rules {
|
||||
pub fn build_rules<'a>(build: &'a Build) -> Rules {
|
||||
let mut rules = Rules::new(build);
|
||||
|
||||
// This is the first rule that we're going to define for rustbuild, which is
|
||||
@ -117,6 +117,7 @@ pub fn build_rules(build: &Build) -> Rules {
|
||||
|
||||
// the compiler with no target libraries ready to go
|
||||
rules.build("rustc", "src/rustc")
|
||||
.dep(|s| s.name("create-sysroot").target(s.host))
|
||||
.dep(move |s| {
|
||||
if s.stage == 0 {
|
||||
Step::noop()
|
||||
@ -151,69 +152,131 @@ pub fn build_rules(build: &Build) -> Rules {
|
||||
// Crate compilations
|
||||
//
|
||||
// Tools used during the build system but not shipped
|
||||
rules.build("create-sysroot", "path/to/nowhere")
|
||||
.run(move |s| compile::create_sysroot(build, &s.compiler()));
|
||||
|
||||
// These rules are "pseudo rules" that don't actually do any work
|
||||
// themselves, but represent a complete sysroot with the relevant compiler
|
||||
// linked into place.
|
||||
//
|
||||
// That is, depending on "libstd" means that when the rule is completed then
|
||||
// the `stage` sysroot for the compiler `host` will be available with a
|
||||
// standard library built for `target` linked in place. Not all rules need
|
||||
// the compiler itself to be available, just the standard library, so
|
||||
// there's a distinction between the two.
|
||||
rules.build("libstd", "src/libstd")
|
||||
.dep(|s| s.name("build-crate-std_shim"));
|
||||
.dep(|s| s.name("rustc").target(s.host))
|
||||
.dep(|s| s.name("libstd-link"));
|
||||
rules.build("libtest", "src/libtest")
|
||||
.dep(|s| s.name("build-crate-test_shim"));
|
||||
.dep(|s| s.name("libstd"))
|
||||
.dep(|s| s.name("libtest-link"))
|
||||
.default(true);
|
||||
rules.build("librustc", "src/librustc")
|
||||
.dep(|s| s.name("build-crate-rustc-main"));
|
||||
.dep(|s| s.name("libtest"))
|
||||
.dep(|s| s.name("librustc-link"))
|
||||
.host(true)
|
||||
.default(true);
|
||||
|
||||
// Helper method to define the rules to link a crate into its place in the
|
||||
// sysroot.
|
||||
//
|
||||
// The logic here is a little subtle as there's a few cases to consider.
|
||||
// Not all combinations of (stage, host, target) actually require something
|
||||
// to be compiled, but rather libraries could get propagated from a
|
||||
// different location. For example:
|
||||
//
|
||||
// * Any crate with a `host` that's not the build triple will not actually
|
||||
// compile something. A different `host` means that the build triple will
|
||||
// actually compile the libraries, and then we'll copy them over from the
|
||||
// build triple to the `host` directory.
|
||||
//
|
||||
// * Some crates aren't even compiled by the build triple, but may be copied
|
||||
// from previous stages. For example if we're not doing a full bootstrap
|
||||
// then we may just depend on the stage1 versions of libraries to be
|
||||
// available to get linked forward.
|
||||
//
|
||||
// * Finally, there are some cases, however, which do indeed comiple crates
|
||||
// and link them into place afterwards.
|
||||
//
|
||||
// The rule definition below mirrors these three cases. The `dep` method
|
||||
// calculates the correct dependency which either comes from stage1, a
|
||||
// different compiler, or from actually building the crate itself (the `dep`
|
||||
// rule). The `run` rule then mirrors these three cases and links the cases
|
||||
// forward into the compiler sysroot specified from the correct location.
|
||||
fn crate_rule<'a, 'b>(build: &'a Build,
|
||||
rules: &'b mut Rules<'a>,
|
||||
krate: &'a str,
|
||||
dep: &'a str,
|
||||
link: fn(&Build, &Compiler, &Compiler, &str))
|
||||
-> RuleBuilder<'a, 'b> {
|
||||
let mut rule = rules.build(&krate, "path/to/nowhere");
|
||||
rule.dep(move |s| {
|
||||
if build.force_use_stage1(&s.compiler(), s.target) {
|
||||
s.host(&build.config.build).stage(1)
|
||||
} else if s.host == build.config.build {
|
||||
s.name(dep)
|
||||
} else {
|
||||
s.host(&build.config.build)
|
||||
}
|
||||
})
|
||||
.run(move |s| {
|
||||
if build.force_use_stage1(&s.compiler(), s.target) {
|
||||
link(build,
|
||||
&s.stage(1).host(&build.config.build).compiler(),
|
||||
&s.compiler(),
|
||||
s.target)
|
||||
} else if s.host == build.config.build {
|
||||
link(build, &s.compiler(), &s.compiler(), s.target)
|
||||
} else {
|
||||
link(build,
|
||||
&s.host(&build.config.build).compiler(),
|
||||
&s.compiler(),
|
||||
s.target)
|
||||
}
|
||||
});
|
||||
return rule
|
||||
}
|
||||
|
||||
// Similar to the `libstd`, `libtest`, and `librustc` rules above, except
|
||||
// these rules only represent the libraries being available in the sysroot,
|
||||
// not the compiler itself. This is done as not all rules need a compiler in
|
||||
// the sysroot, but may just need the libraries.
|
||||
//
|
||||
// All of these rules use the helper definition above.
|
||||
crate_rule(build,
|
||||
&mut rules,
|
||||
"libstd-link",
|
||||
"build-crate-std_shim",
|
||||
compile::std_link)
|
||||
.dep(|s| s.name("create-sysroot").target(s.host));
|
||||
crate_rule(build,
|
||||
&mut rules,
|
||||
"libtest-link",
|
||||
"build-crate-test_shim",
|
||||
compile::test_link)
|
||||
.dep(|s| s.name("libstd-link"));
|
||||
crate_rule(build,
|
||||
&mut rules,
|
||||
"librustc-link",
|
||||
"build-crate-rustc-main",
|
||||
compile::rustc_link)
|
||||
.dep(|s| s.name("libtest-link"));
|
||||
|
||||
for (krate, path, _default) in krates("std_shim") {
|
||||
rules.build(&krate.build_step, path)
|
||||
.dep(move |s| s.name("rustc").host(&build.config.build).target(s.host))
|
||||
.dep(move |s| {
|
||||
if s.host == build.config.build {
|
||||
Step::noop()
|
||||
} else {
|
||||
s.host(&build.config.build)
|
||||
}
|
||||
})
|
||||
.run(move |s| {
|
||||
if s.host == build.config.build {
|
||||
compile::std(build, s.target, &s.compiler())
|
||||
} else {
|
||||
compile::std_link(build, s.target, s.stage, s.host)
|
||||
}
|
||||
});
|
||||
.run(move |s| compile::std(build, s.target, &s.compiler()));
|
||||
}
|
||||
for (krate, path, default) in krates("test_shim") {
|
||||
for (krate, path, _default) in krates("test_shim") {
|
||||
rules.build(&krate.build_step, path)
|
||||
.dep(|s| s.name("libstd"))
|
||||
.dep(move |s| {
|
||||
if s.host == build.config.build {
|
||||
Step::noop()
|
||||
} else {
|
||||
s.host(&build.config.build)
|
||||
}
|
||||
})
|
||||
.default(default)
|
||||
.run(move |s| {
|
||||
if s.host == build.config.build {
|
||||
compile::test(build, s.target, &s.compiler())
|
||||
} else {
|
||||
compile::test_link(build, s.target, s.stage, s.host)
|
||||
}
|
||||
});
|
||||
.dep(|s| s.name("libstd-link"))
|
||||
.run(move |s| compile::test(build, s.target, &s.compiler()));
|
||||
}
|
||||
for (krate, path, default) in krates("rustc-main") {
|
||||
for (krate, path, _default) in krates("rustc-main") {
|
||||
rules.build(&krate.build_step, path)
|
||||
.dep(|s| s.name("libtest"))
|
||||
.dep(|s| s.name("libtest-link"))
|
||||
.dep(move |s| s.name("llvm").host(&build.config.build).stage(0))
|
||||
.dep(move |s| {
|
||||
if s.host == build.config.build {
|
||||
Step::noop()
|
||||
} else {
|
||||
s.host(&build.config.build)
|
||||
}
|
||||
})
|
||||
.host(true)
|
||||
.default(default)
|
||||
.run(move |s| {
|
||||
if s.host == build.config.build {
|
||||
compile::rustc(build, s.target, &s.compiler())
|
||||
} else {
|
||||
compile::rustc_link(build, s.target, s.stage, s.host)
|
||||
}
|
||||
});
|
||||
.run(move |s| compile::rustc(build, s.target, &s.compiler()));
|
||||
}
|
||||
|
||||
// ========================================================================
|
||||
@ -444,25 +507,25 @@ pub fn build_rules(build: &Build) -> Rules {
|
||||
.run(move |s| doc::standalone(build, s.stage, s.target));
|
||||
rules.doc("doc-error-index", "src/tools/error_index_generator")
|
||||
.dep(move |s| s.name("tool-error-index").target(&build.config.build))
|
||||
.dep(move |s| s.name("librustc"))
|
||||
.dep(move |s| s.name("librustc-link"))
|
||||
.default(build.config.docs)
|
||||
.host(true)
|
||||
.run(move |s| doc::error_index(build, s.stage, s.target));
|
||||
for (krate, path, default) in krates("std_shim") {
|
||||
rules.doc(&krate.doc_step, path)
|
||||
.dep(|s| s.name("libstd"))
|
||||
.dep(|s| s.name("libstd-link"))
|
||||
.default(default && build.config.docs)
|
||||
.run(move |s| doc::std(build, s.stage, s.target));
|
||||
}
|
||||
for (krate, path, default) in krates("test_shim") {
|
||||
rules.doc(&krate.doc_step, path)
|
||||
.dep(|s| s.name("libtest"))
|
||||
.dep(|s| s.name("libtest-link"))
|
||||
.default(default && build.config.compiler_docs)
|
||||
.run(move |s| doc::test(build, s.stage, s.target));
|
||||
}
|
||||
for (krate, path, default) in krates("rustc-main") {
|
||||
rules.doc(&krate.doc_step, path)
|
||||
.dep(|s| s.name("librustc"))
|
||||
.dep(|s| s.name("librustc-link"))
|
||||
.host(true)
|
||||
.default(default && build.config.compiler_docs)
|
||||
.run(move |s| doc::rustc(build, s.stage, s.target));
|
||||
@ -481,9 +544,9 @@ pub fn build_rules(build: &Build) -> Rules {
|
||||
// for the `rust-std` package, so if this is a host target we
|
||||
// depend on librustc and otherwise we just depend on libtest.
|
||||
if build.config.host.iter().any(|t| t == s.target) {
|
||||
s.name("librustc")
|
||||
s.name("librustc-link")
|
||||
} else {
|
||||
s.name("libtest")
|
||||
s.name("libtest-link")
|
||||
}
|
||||
})
|
||||
.default(true)
|
||||
|
28
src/ci/docker/x86_64-gnu-full-bootstrap/Dockerfile
Normal file
28
src/ci/docker/x86_64-gnu-full-bootstrap/Dockerfile
Normal file
@ -0,0 +1,28 @@
|
||||
FROM ubuntu:16.04
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
g++ \
|
||||
make \
|
||||
file \
|
||||
curl \
|
||||
ca-certificates \
|
||||
python2.7 \
|
||||
git \
|
||||
cmake \
|
||||
sudo \
|
||||
gdb \
|
||||
xz-utils
|
||||
|
||||
ENV SCCACHE_DIGEST=7237e38e029342fa27b7ac25412cb9d52554008b12389727320bd533fd7f05b6a96d55485f305caf95e5c8f5f97c3313e10012ccad3e752aba2518f3522ba783
|
||||
RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST | \
|
||||
tar xJf - -C /usr/local/bin --strip-components=1
|
||||
|
||||
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
|
||||
dpkg -i dumb-init_*.deb && \
|
||||
rm dumb-init_*.deb
|
||||
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
|
||||
|
||||
ENV RUST_CONFIGURE_ARGS \
|
||||
--build=x86_64-unknown-linux-gnu \
|
||||
--enable-full-bootstrap
|
||||
ENV RUST_CHECK_TARGET ""
|
Loading…
Reference in New Issue
Block a user