Move code into Step trait implementations.
No changes are introduced to code body. This commit will not build; it is done to permit a better diff in later commits.
This commit is contained in:
parent
0a1b5e8bc0
commit
001e9f3490
File diff suppressed because it is too large
Load Diff
|
@ -32,24 +32,6 @@ use channel::GitInfo;
|
|||
use util::{exe, libdir, is_dylib, copy};
|
||||
use {Build, Compiler, Mode};
|
||||
|
||||
// for (krate, path, _default) in krates("std") {
|
||||
// rules.build(&krate.build_step, path)
|
||||
// .dep(|s| s.name("startup-objects"))
|
||||
// .dep(move |s| s.name("rustc").host(&build.build).target(s.host))
|
||||
// .run(move |s| compile::std(build, s.target, &s.compiler()));
|
||||
// }
|
||||
// for (krate, path, _default) in krates("test") {
|
||||
// rules.build(&krate.build_step, path)
|
||||
// .dep(|s| s.name("libstd-link"))
|
||||
// .run(move |s| compile::test(build, s.target, &s.compiler()));
|
||||
// }
|
||||
// for (krate, path, _default) in krates("rustc-main") {
|
||||
// rules.build(&krate.build_step, path)
|
||||
// .dep(|s| s.name("libtest-link"))
|
||||
// .dep(move |s| s.name("llvm").host(&build.build).stage(0))
|
||||
// .dep(|s| s.name("may-run-build-script"))
|
||||
// .run(move |s| compile::rustc(build, s.target, &s.compiler()));
|
||||
// }
|
||||
//
|
||||
// // Crates which have build scripts need to rely on this rule to ensure that
|
||||
// // the necessary prerequisites for a build script are linked and located in
|
||||
|
@ -147,65 +129,84 @@ fn crate_rule<'a, 'b>(build: &'a Build,
|
|||
rule
|
||||
}
|
||||
|
||||
/// Build the standard library.
|
||||
///
|
||||
/// This will build the standard library for a particular stage of the build
|
||||
/// using the `compiler` targeting the `target` architecture. The artifacts
|
||||
/// created will also be linked into the sysroot directory.
|
||||
pub fn std(build: &Build, target: &str, compiler: &Compiler) {
|
||||
let libdir = build.sysroot_libdir(compiler, target);
|
||||
t!(fs::create_dir_all(&libdir));
|
||||
// for (krate, path, _default) in krates("std") {
|
||||
// rules.build(&krate.build_step, path)
|
||||
// .dep(|s| s.name("startup-objects"))
|
||||
// .dep(move |s| s.name("rustc").host(&build.build).target(s.host))
|
||||
// .run(move |s| compile::std(build, s.target, &s.compiler()));
|
||||
// }
|
||||
#[derive(Serialize)]
|
||||
pub struct Std<'a> {
|
||||
pub target: &'a str,
|
||||
pub compiler: &'a Compiler<'a>,
|
||||
}
|
||||
|
||||
let _folder = build.fold_output(|| format!("stage{}-std", compiler.stage));
|
||||
println!("Building stage{} std artifacts ({} -> {})", compiler.stage,
|
||||
compiler.host, target);
|
||||
impl<'a> Step<'a> for Std<'a> {
|
||||
type Output = ();
|
||||
|
||||
let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
|
||||
build.clear_if_dirty(&out_dir, &build.compiler_path(compiler));
|
||||
let mut cargo = build.cargo(compiler, Mode::Libstd, target, "build");
|
||||
let mut features = build.std_features();
|
||||
/// Build the standard library.
|
||||
///
|
||||
/// This will build the standard library for a particular stage of the build
|
||||
/// using the `compiler` targeting the `target` architecture. The artifacts
|
||||
/// created will also be linked into the sysroot directory.
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let target = self.target;
|
||||
let compiler = self.compiler;
|
||||
let libdir = build.sysroot_libdir(compiler, target);
|
||||
t!(fs::create_dir_all(&libdir));
|
||||
|
||||
if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") {
|
||||
cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
|
||||
}
|
||||
let _folder = build.fold_output(|| format!("stage{}-std", compiler.stage));
|
||||
println!("Building stage{} std artifacts ({} -> {})", compiler.stage,
|
||||
compiler.host, target);
|
||||
|
||||
// When doing a local rebuild we tell cargo that we're stage1 rather than
|
||||
// stage0. This works fine if the local rust and being-built rust have the
|
||||
// same view of what the default allocator is, but fails otherwise. Since
|
||||
// we don't have a way to express an allocator preference yet, work
|
||||
// around the issue in the case of a local rebuild with jemalloc disabled.
|
||||
if compiler.stage == 0 && build.local_rebuild && !build.config.use_jemalloc {
|
||||
features.push_str(" force_alloc_system");
|
||||
}
|
||||
let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
|
||||
build.clear_if_dirty(&out_dir, &build.compiler_path(compiler));
|
||||
let mut cargo = build.cargo(compiler, Mode::Libstd, target, "build");
|
||||
let mut features = build.std_features();
|
||||
|
||||
if compiler.stage != 0 && build.config.sanitizers {
|
||||
// This variable is used by the sanitizer runtime crates, e.g.
|
||||
// rustc_lsan, to build the sanitizer runtime from C code
|
||||
// When this variable is missing, those crates won't compile the C code,
|
||||
// so we don't set this variable during stage0 where llvm-config is
|
||||
// missing
|
||||
// We also only build the runtimes when --enable-sanitizers (or its
|
||||
// config.toml equivalent) is used
|
||||
cargo.env("LLVM_CONFIG", build.llvm_config(target));
|
||||
}
|
||||
cargo.arg("--features").arg(features)
|
||||
.arg("--manifest-path")
|
||||
.arg(build.src.join("src/libstd/Cargo.toml"));
|
||||
|
||||
if let Some(target) = build.config.target_config.get(target) {
|
||||
if let Some(ref jemalloc) = target.jemalloc {
|
||||
cargo.env("JEMALLOC_OVERRIDE", jemalloc);
|
||||
if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") {
|
||||
cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
|
||||
}
|
||||
}
|
||||
if target.contains("musl") {
|
||||
if let Some(p) = build.musl_root(target) {
|
||||
cargo.env("MUSL_ROOT", p);
|
||||
}
|
||||
}
|
||||
|
||||
run_cargo(build,
|
||||
&mut cargo,
|
||||
&libstd_stamp(build, &compiler, target));
|
||||
// When doing a local rebuild we tell cargo that we're stage1 rather than
|
||||
// stage0. This works fine if the local rust and being-built rust have the
|
||||
// same view of what the default allocator is, but fails otherwise. Since
|
||||
// we don't have a way to express an allocator preference yet, work
|
||||
// around the issue in the case of a local rebuild with jemalloc disabled.
|
||||
if compiler.stage == 0 && build.local_rebuild && !build.config.use_jemalloc {
|
||||
features.push_str(" force_alloc_system");
|
||||
}
|
||||
|
||||
if compiler.stage != 0 && build.config.sanitizers {
|
||||
// This variable is used by the sanitizer runtime crates, e.g.
|
||||
// rustc_lsan, to build the sanitizer runtime from C code
|
||||
// When this variable is missing, those crates won't compile the C code,
|
||||
// so we don't set this variable during stage0 where llvm-config is
|
||||
// missing
|
||||
// We also only build the runtimes when --enable-sanitizers (or its
|
||||
// config.toml equivalent) is used
|
||||
cargo.env("LLVM_CONFIG", build.llvm_config(target));
|
||||
}
|
||||
cargo.arg("--features").arg(features)
|
||||
.arg("--manifest-path")
|
||||
.arg(build.src.join("src/libstd/Cargo.toml"));
|
||||
|
||||
if let Some(target) = build.config.target_config.get(target) {
|
||||
if let Some(ref jemalloc) = target.jemalloc {
|
||||
cargo.env("JEMALLOC_OVERRIDE", jemalloc);
|
||||
}
|
||||
}
|
||||
if target.contains("musl") {
|
||||
if let Some(p) = build.musl_root(target) {
|
||||
cargo.env("MUSL_ROOT", p);
|
||||
}
|
||||
}
|
||||
|
||||
run_cargo(build,
|
||||
&mut cargo,
|
||||
&libstd_stamp(build, &compiler, target));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -216,36 +217,49 @@ pub fn std(build: &Build, target: &str, compiler: &Compiler) {
|
|||
// compile::std_link)
|
||||
// .dep(|s| s.name("startup-objects"))
|
||||
// .dep(|s| s.name("create-sysroot").target(s.host));
|
||||
/// Link all libstd rlibs/dylibs into the sysroot location.
|
||||
///
|
||||
/// Links those artifacts generated by `compiler` to a the `stage` compiler's
|
||||
/// sysroot for the specified `host` and `target`.
|
||||
///
|
||||
/// Note that this assumes that `compiler` has already generated the libstd
|
||||
/// libraries for `target`, and this method will find them in the relevant
|
||||
/// output directory.
|
||||
pub fn std_link(build: &Build,
|
||||
compiler: &Compiler,
|
||||
target_compiler: &Compiler,
|
||||
target: &str) {
|
||||
println!("Copying stage{} std from stage{} ({} -> {} / {})",
|
||||
target_compiler.stage,
|
||||
compiler.stage,
|
||||
compiler.host,
|
||||
target_compiler.host,
|
||||
target);
|
||||
let libdir = build.sysroot_libdir(target_compiler, target);
|
||||
add_to_sysroot(&libdir, &libstd_stamp(build, compiler, target));
|
||||
|
||||
if target.contains("musl") && !target.contains("mips") {
|
||||
copy_musl_third_party_objects(build, target, &libdir);
|
||||
}
|
||||
#[derive(Serialize)]
|
||||
pub struct StdLink<'a> {
|
||||
pub compiler: Compiler<'a>,
|
||||
pub target_compiler: Compiler<'a>,
|
||||
pub target: &'a str,
|
||||
}
|
||||
|
||||
if build.config.sanitizers && compiler.stage != 0 && target == "x86_64-apple-darwin" {
|
||||
// The sanitizers are only built in stage1 or above, so the dylibs will
|
||||
// be missing in stage0 and causes panic. See the `std()` function above
|
||||
// for reason why the sanitizers are not built in stage0.
|
||||
copy_apple_sanitizer_dylibs(&build.native_dir(target), "osx", &libdir);
|
||||
impl<'a> Step<'a> for StdLink<'a> {
|
||||
type Output = ();
|
||||
|
||||
/// Link all libstd rlibs/dylibs into the sysroot location.
|
||||
///
|
||||
/// Links those artifacts generated by `compiler` to a the `stage` compiler's
|
||||
/// sysroot for the specified `host` and `target`.
|
||||
///
|
||||
/// Note that this assumes that `compiler` has already generated the libstd
|
||||
/// libraries for `target`, and this method will find them in the relevant
|
||||
/// output directory.
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let compiler = self.compiler;
|
||||
let target_compiler = self.target_compiler;
|
||||
let target = self.target;
|
||||
println!("Copying stage{} std from stage{} ({} -> {} / {})",
|
||||
target_compiler.stage,
|
||||
compiler.stage,
|
||||
compiler.host,
|
||||
target_compiler.host,
|
||||
target);
|
||||
let libdir = build.sysroot_libdir(target_compiler, target);
|
||||
add_to_sysroot(&libdir, &libstd_stamp(build, compiler, target));
|
||||
|
||||
if target.contains("musl") && !target.contains("mips") {
|
||||
copy_musl_third_party_objects(build, target, &libdir);
|
||||
}
|
||||
|
||||
if build.config.sanitizers && compiler.stage != 0 && target == "x86_64-apple-darwin" {
|
||||
// The sanitizers are only built in stage1 or above, so the dylibs will
|
||||
// be missing in stage0 and causes panic. See the `std()` function above
|
||||
// for reason why the sanitizers are not built in stage0.
|
||||
copy_apple_sanitizer_dylibs(&build.native_dir(target), "osx", &libdir);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -274,66 +288,97 @@ fn copy_apple_sanitizer_dylibs(native_dir: &Path, platform: &str, into: &Path) {
|
|||
// .dep(|s| s.name("create-sysroot").target(s.host))
|
||||
// .run(move |s| compile::build_startup_objects(build, &s.compiler(), s.target));
|
||||
|
||||
/// Build and prepare startup objects like rsbegin.o and rsend.o
|
||||
///
|
||||
/// These are primarily used on Windows right now for linking executables/dlls.
|
||||
/// They don't require any library support as they're just plain old object
|
||||
/// files, so we just use the nightly snapshot compiler to always build them (as
|
||||
/// no other compilers are guaranteed to be available).
|
||||
pub fn build_startup_objects(build: &Build, for_compiler: &Compiler, target: &str) {
|
||||
if !target.contains("pc-windows-gnu") {
|
||||
return
|
||||
}
|
||||
#[derive(Serialize)]
|
||||
pub struct StartupObjects<'a> {
|
||||
pub for_compiler: Compiler<'a>,
|
||||
pub target: &'a str,
|
||||
}
|
||||
|
||||
let compiler = Compiler::new(0, &build.build);
|
||||
let compiler_path = build.compiler_path(&compiler);
|
||||
let src_dir = &build.src.join("src/rtstartup");
|
||||
let dst_dir = &build.native_dir(target).join("rtstartup");
|
||||
let sysroot_dir = &build.sysroot_libdir(for_compiler, target);
|
||||
t!(fs::create_dir_all(dst_dir));
|
||||
t!(fs::create_dir_all(sysroot_dir));
|
||||
impl<'a> Step<'a> for StartupObjects<'a> {
|
||||
type Output = ();
|
||||
|
||||
for file in &["rsbegin", "rsend"] {
|
||||
let src_file = &src_dir.join(file.to_string() + ".rs");
|
||||
let dst_file = &dst_dir.join(file.to_string() + ".o");
|
||||
if !up_to_date(src_file, dst_file) {
|
||||
let mut cmd = Command::new(&compiler_path);
|
||||
build.run(cmd.env("RUSTC_BOOTSTRAP", "1")
|
||||
.arg("--cfg").arg(format!("stage{}", compiler.stage))
|
||||
.arg("--target").arg(target)
|
||||
.arg("--emit=obj")
|
||||
.arg("--out-dir").arg(dst_dir)
|
||||
.arg(src_file));
|
||||
/// Build and prepare startup objects like rsbegin.o and rsend.o
|
||||
///
|
||||
/// These are primarily used on Windows right now for linking executables/dlls.
|
||||
/// They don't require any library support as they're just plain old object
|
||||
/// files, so we just use the nightly snapshot compiler to always build them (as
|
||||
/// no other compilers are guaranteed to be available).
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let for_compiler = self.for_compiler;
|
||||
let target = self.target;
|
||||
if !target.contains("pc-windows-gnu") {
|
||||
return
|
||||
}
|
||||
|
||||
copy(dst_file, &sysroot_dir.join(file.to_string() + ".o"));
|
||||
}
|
||||
let compiler = Compiler::new(0, &build.build);
|
||||
let compiler_path = build.compiler_path(&compiler);
|
||||
let src_dir = &build.src.join("src/rtstartup");
|
||||
let dst_dir = &build.native_dir(target).join("rtstartup");
|
||||
let sysroot_dir = &build.sysroot_libdir(for_compiler, target);
|
||||
t!(fs::create_dir_all(dst_dir));
|
||||
t!(fs::create_dir_all(sysroot_dir));
|
||||
|
||||
for obj in ["crt2.o", "dllcrt2.o"].iter() {
|
||||
copy(&compiler_file(build.cc(target), obj), &sysroot_dir.join(obj));
|
||||
for file in &["rsbegin", "rsend"] {
|
||||
let src_file = &src_dir.join(file.to_string() + ".rs");
|
||||
let dst_file = &dst_dir.join(file.to_string() + ".o");
|
||||
if !up_to_date(src_file, dst_file) {
|
||||
let mut cmd = Command::new(&compiler_path);
|
||||
build.run(cmd.env("RUSTC_BOOTSTRAP", "1")
|
||||
.arg("--cfg").arg(format!("stage{}", compiler.stage))
|
||||
.arg("--target").arg(target)
|
||||
.arg("--emit=obj")
|
||||
.arg("--out-dir").arg(dst_dir)
|
||||
.arg(src_file));
|
||||
}
|
||||
|
||||
copy(dst_file, &sysroot_dir.join(file.to_string() + ".o"));
|
||||
}
|
||||
|
||||
for obj in ["crt2.o", "dllcrt2.o"].iter() {
|
||||
copy(&compiler_file(build.cc(target), obj), &sysroot_dir.join(obj));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Build libtest.
|
||||
///
|
||||
/// This will build libtest and supporting libraries for a particular stage of
|
||||
/// the build using the `compiler` targeting the `target` architecture. The
|
||||
/// artifacts created will also be linked into the sysroot directory.
|
||||
pub fn test(build: &Build, target: &str, compiler: &Compiler) {
|
||||
let _folder = build.fold_output(|| format!("stage{}-test", compiler.stage));
|
||||
println!("Building stage{} test artifacts ({} -> {})", compiler.stage,
|
||||
compiler.host, target);
|
||||
let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
|
||||
build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target));
|
||||
let mut cargo = build.cargo(compiler, Mode::Libtest, target, "build");
|
||||
if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") {
|
||||
cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
|
||||
// for (krate, path, _default) in krates("test") {
|
||||
// rules.build(&krate.build_step, path)
|
||||
// .dep(|s| s.name("libstd-link"))
|
||||
// .run(move |s| compile::test(build, s.target, &s.compiler()));
|
||||
// }
|
||||
#[derive(Serialize)]
|
||||
pub struct Test<'a> {
|
||||
pub compiler: Compiler<'a>,
|
||||
pub target: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> Step<'a> for Test<'a> {
|
||||
type Output = ();
|
||||
|
||||
/// Build libtest.
|
||||
///
|
||||
/// This will build libtest and supporting libraries for a particular stage of
|
||||
/// the build using the `compiler` targeting the `target` architecture. The
|
||||
/// artifacts created will also be linked into the sysroot directory.
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let target = self.target;
|
||||
let compiler = self.compiler;
|
||||
let _folder = build.fold_output(|| format!("stage{}-test", compiler.stage));
|
||||
println!("Building stage{} test artifacts ({} -> {})", compiler.stage,
|
||||
compiler.host, target);
|
||||
let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
|
||||
build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target));
|
||||
let mut cargo = build.cargo(compiler, Mode::Libtest, target, "build");
|
||||
if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") {
|
||||
cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
|
||||
}
|
||||
cargo.arg("--manifest-path")
|
||||
.arg(build.src.join("src/libtest/Cargo.toml"));
|
||||
run_cargo(build,
|
||||
&mut cargo,
|
||||
&libtest_stamp(build, compiler, target));
|
||||
}
|
||||
cargo.arg("--manifest-path")
|
||||
.arg(build.src.join("src/libtest/Cargo.toml"));
|
||||
run_cargo(build,
|
||||
&mut cargo,
|
||||
&libtest_stamp(build, compiler, target));
|
||||
}
|
||||
|
||||
|
||||
|
@ -344,98 +389,131 @@ pub fn test(build: &Build, target: &str, compiler: &Compiler) {
|
|||
// compile::test_link)
|
||||
// .dep(|s| s.name("libstd-link"));
|
||||
|
||||
/// Same as `std_link`, only for libtest
|
||||
pub fn test_link(build: &Build,
|
||||
compiler: &Compiler,
|
||||
target_compiler: &Compiler,
|
||||
target: &str) {
|
||||
println!("Copying stage{} test from stage{} ({} -> {} / {})",
|
||||
target_compiler.stage,
|
||||
compiler.stage,
|
||||
compiler.host,
|
||||
target_compiler.host,
|
||||
target);
|
||||
add_to_sysroot(&build.sysroot_libdir(target_compiler, target),
|
||||
&libtest_stamp(build, compiler, target));
|
||||
#[derive(Serialize)]
|
||||
pub struct TestLink<'a> {
|
||||
pub compiler: Compiler<'a>,
|
||||
pub target_compiler: Compiler<'a>,
|
||||
pub target: &'a str,
|
||||
}
|
||||
|
||||
/// Build the compiler.
|
||||
///
|
||||
/// This will build the compiler for a particular stage of the build using
|
||||
/// the `compiler` targeting the `target` architecture. The artifacts
|
||||
/// created will also be linked into the sysroot directory.
|
||||
pub fn rustc(build: &Build, target: &str, compiler: &Compiler) {
|
||||
let _folder = build.fold_output(|| format!("stage{}-rustc", compiler.stage));
|
||||
println!("Building stage{} compiler artifacts ({} -> {})",
|
||||
compiler.stage, compiler.host, target);
|
||||
impl<'a> Step<'a> for Step<'a> {
|
||||
type Output = ();
|
||||
|
||||
let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
|
||||
build.clear_if_dirty(&out_dir, &libtest_stamp(build, compiler, target));
|
||||
/// Same as `std_link`, only for libtest
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let compiler = self.compiler;
|
||||
let target_compiler = self.target_compiler;
|
||||
let target = self.target;
|
||||
println!("Copying stage{} test from stage{} ({} -> {} / {})",
|
||||
target_compiler.stage,
|
||||
compiler.stage,
|
||||
compiler.host,
|
||||
target_compiler.host,
|
||||
target);
|
||||
add_to_sysroot(&build.sysroot_libdir(target_compiler, target),
|
||||
&libtest_stamp(build, compiler, target));
|
||||
}
|
||||
}
|
||||
|
||||
let mut cargo = build.cargo(compiler, Mode::Librustc, target, "build");
|
||||
cargo.arg("--features").arg(build.rustc_features())
|
||||
.arg("--manifest-path")
|
||||
.arg(build.src.join("src/rustc/Cargo.toml"));
|
||||
// for (krate, path, _default) in krates("rustc-main") {
|
||||
// rules.build(&krate.build_step, path)
|
||||
// .dep(|s| s.name("libtest-link"))
|
||||
// .dep(move |s| s.name("llvm").host(&build.build).stage(0))
|
||||
// .dep(|s| s.name("may-run-build-script"))
|
||||
// .run(move |s| compile::rustc(build, s.target, &s.compiler()));
|
||||
// }
|
||||
|
||||
// Set some configuration variables picked up by build scripts and
|
||||
// the compiler alike
|
||||
cargo.env("CFG_RELEASE", build.rust_release())
|
||||
.env("CFG_RELEASE_CHANNEL", &build.config.channel)
|
||||
.env("CFG_VERSION", build.rust_version())
|
||||
.env("CFG_PREFIX", build.config.prefix.clone().unwrap_or_default());
|
||||
#[derive(Serialize)]
|
||||
pub struct Rustc<'a> {
|
||||
pub compiler: Compiler<'a>,
|
||||
pub target: &'a str,
|
||||
}
|
||||
|
||||
if compiler.stage == 0 {
|
||||
cargo.env("CFG_LIBDIR_RELATIVE", "lib");
|
||||
} else {
|
||||
let libdir_relative = build.config.libdir_relative.clone().unwrap_or(PathBuf::from("lib"));
|
||||
cargo.env("CFG_LIBDIR_RELATIVE", libdir_relative);
|
||||
}
|
||||
impl<'a> Step<'a> for Rustc<'a> {
|
||||
type Output = ();
|
||||
|
||||
// If we're not building a compiler with debugging information then remove
|
||||
// these two env vars which would be set otherwise.
|
||||
if build.config.rust_debuginfo_only_std {
|
||||
cargo.env_remove("RUSTC_DEBUGINFO");
|
||||
cargo.env_remove("RUSTC_DEBUGINFO_LINES");
|
||||
}
|
||||
/// Build the compiler.
|
||||
///
|
||||
/// This will build the compiler for a particular stage of the build using
|
||||
/// the `compiler` targeting the `target` architecture. The artifacts
|
||||
/// created will also be linked into the sysroot directory.
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let compiler = self.compiler;
|
||||
let target = self.target;
|
||||
let _folder = build.fold_output(|| format!("stage{}-rustc", compiler.stage));
|
||||
println!("Building stage{} compiler artifacts ({} -> {})",
|
||||
compiler.stage, compiler.host, target);
|
||||
|
||||
if let Some(ref ver_date) = build.rust_info.commit_date() {
|
||||
cargo.env("CFG_VER_DATE", ver_date);
|
||||
let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
|
||||
build.clear_if_dirty(&out_dir, &libtest_stamp(build, compiler, target));
|
||||
|
||||
let mut cargo = build.cargo(compiler, Mode::Librustc, target, "build");
|
||||
cargo.arg("--features").arg(build.rustc_features())
|
||||
.arg("--manifest-path")
|
||||
.arg(build.src.join("src/rustc/Cargo.toml"));
|
||||
|
||||
// Set some configuration variables picked up by build scripts and
|
||||
// the compiler alike
|
||||
cargo.env("CFG_RELEASE", build.rust_release())
|
||||
.env("CFG_RELEASE_CHANNEL", &build.config.channel)
|
||||
.env("CFG_VERSION", build.rust_version())
|
||||
.env("CFG_PREFIX", build.config.prefix.clone().unwrap_or_default());
|
||||
|
||||
if compiler.stage == 0 {
|
||||
cargo.env("CFG_LIBDIR_RELATIVE", "lib");
|
||||
} else {
|
||||
let libdir_relative = build.config.libdir_relative.clone().unwrap_or(PathBuf::from("lib"));
|
||||
cargo.env("CFG_LIBDIR_RELATIVE", libdir_relative);
|
||||
}
|
||||
|
||||
// If we're not building a compiler with debugging information then remove
|
||||
// these two env vars which would be set otherwise.
|
||||
if build.config.rust_debuginfo_only_std {
|
||||
cargo.env_remove("RUSTC_DEBUGINFO");
|
||||
cargo.env_remove("RUSTC_DEBUGINFO_LINES");
|
||||
}
|
||||
|
||||
if let Some(ref ver_date) = build.rust_info.commit_date() {
|
||||
cargo.env("CFG_VER_DATE", ver_date);
|
||||
}
|
||||
if let Some(ref ver_hash) = build.rust_info.sha() {
|
||||
cargo.env("CFG_VER_HASH", ver_hash);
|
||||
}
|
||||
if !build.unstable_features() {
|
||||
cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
|
||||
}
|
||||
// Flag that rust llvm is in use
|
||||
if build.is_rust_llvm(target) {
|
||||
cargo.env("LLVM_RUSTLLVM", "1");
|
||||
}
|
||||
cargo.env("LLVM_CONFIG", build.llvm_config(target));
|
||||
let target_config = build.config.target_config.get(target);
|
||||
if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
|
||||
cargo.env("CFG_LLVM_ROOT", s);
|
||||
}
|
||||
// Building with a static libstdc++ is only supported on linux right now,
|
||||
// not for MSVC or macOS
|
||||
if build.config.llvm_static_stdcpp &&
|
||||
!target.contains("windows") &&
|
||||
!target.contains("apple") {
|
||||
cargo.env("LLVM_STATIC_STDCPP",
|
||||
compiler_file(build.cxx(target).unwrap(), "libstdc++.a"));
|
||||
}
|
||||
if build.config.llvm_link_shared {
|
||||
cargo.env("LLVM_LINK_SHARED", "1");
|
||||
}
|
||||
if let Some(ref s) = build.config.rustc_default_linker {
|
||||
cargo.env("CFG_DEFAULT_LINKER", s);
|
||||
}
|
||||
if let Some(ref s) = build.config.rustc_default_ar {
|
||||
cargo.env("CFG_DEFAULT_AR", s);
|
||||
}
|
||||
run_cargo(build,
|
||||
&mut cargo,
|
||||
&librustc_stamp(build, compiler, target));
|
||||
}
|
||||
if let Some(ref ver_hash) = build.rust_info.sha() {
|
||||
cargo.env("CFG_VER_HASH", ver_hash);
|
||||
}
|
||||
if !build.unstable_features() {
|
||||
cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
|
||||
}
|
||||
// Flag that rust llvm is in use
|
||||
if build.is_rust_llvm(target) {
|
||||
cargo.env("LLVM_RUSTLLVM", "1");
|
||||
}
|
||||
cargo.env("LLVM_CONFIG", build.llvm_config(target));
|
||||
let target_config = build.config.target_config.get(target);
|
||||
if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
|
||||
cargo.env("CFG_LLVM_ROOT", s);
|
||||
}
|
||||
// Building with a static libstdc++ is only supported on linux right now,
|
||||
// not for MSVC or macOS
|
||||
if build.config.llvm_static_stdcpp &&
|
||||
!target.contains("windows") &&
|
||||
!target.contains("apple") {
|
||||
cargo.env("LLVM_STATIC_STDCPP",
|
||||
compiler_file(build.cxx(target).unwrap(), "libstdc++.a"));
|
||||
}
|
||||
if build.config.llvm_link_shared {
|
||||
cargo.env("LLVM_LINK_SHARED", "1");
|
||||
}
|
||||
if let Some(ref s) = build.config.rustc_default_linker {
|
||||
cargo.env("CFG_DEFAULT_LINKER", s);
|
||||
}
|
||||
if let Some(ref s) = build.config.rustc_default_ar {
|
||||
cargo.env("CFG_DEFAULT_AR", s);
|
||||
}
|
||||
run_cargo(build,
|
||||
&mut cargo,
|
||||
&librustc_stamp(build, compiler, target));
|
||||
}
|
||||
|
||||
// crate_rule(build,
|
||||
|
@ -444,19 +522,31 @@ pub fn rustc(build: &Build, target: &str, compiler: &Compiler) {
|
|||
// "build-crate-rustc-main",
|
||||
// compile::rustc_link)
|
||||
// .dep(|s| s.name("libtest-link"));
|
||||
/// Same as `std_link`, only for librustc
|
||||
pub fn rustc_link(build: &Build,
|
||||
compiler: &Compiler,
|
||||
target_compiler: &Compiler,
|
||||
target: &str) {
|
||||
println!("Copying stage{} rustc from stage{} ({} -> {} / {})",
|
||||
target_compiler.stage,
|
||||
compiler.stage,
|
||||
compiler.host,
|
||||
target_compiler.host,
|
||||
target);
|
||||
add_to_sysroot(&build.sysroot_libdir(target_compiler, target),
|
||||
&librustc_stamp(build, compiler, target));
|
||||
#[derive(Serialize)]
|
||||
pub struct RustcLink<'a> {
|
||||
pub compiler: Compiler<'a>,
|
||||
pub target_compiler: Compiler<'a>,
|
||||
pub target: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> Step<'a> for RustcLink<'a> {
|
||||
type Output = ();
|
||||
|
||||
/// Same as `std_link`, only for librustc
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let compiler = self.compiler;
|
||||
let target_compiler = self.target_compiler;
|
||||
let target = self.target;
|
||||
println!("Copying stage{} rustc from stage{} ({} -> {} / {})",
|
||||
target_compiler.stage,
|
||||
compiler.stage,
|
||||
compiler.host,
|
||||
target_compiler.host,
|
||||
target);
|
||||
add_to_sysroot(&build.sysroot_libdir(target_compiler, target),
|
||||
&librustc_stamp(build, compiler, target));
|
||||
}
|
||||
}
|
||||
|
||||
/// Cargo's output path for the standard library in a given stage, compiled
|
||||
|
@ -485,10 +575,28 @@ fn compiler_file(compiler: &Path, file: &str) -> PathBuf {
|
|||
|
||||
// rules.build("create-sysroot", "path/to/nowhere")
|
||||
// .run(move |s| compile::create_sysroot(build, &s.compiler()));
|
||||
pub fn create_sysroot(build: &Build, compiler: &Compiler) {
|
||||
let sysroot = build.sysroot(compiler);
|
||||
let _ = fs::remove_dir_all(&sysroot);
|
||||
t!(fs::create_dir_all(&sysroot));
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Sysroot<'a> {
|
||||
pub compiler: Compiler<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Step<'a> for Sysroot<'a> {
|
||||
type Output = ();
|
||||
|
||||
/// Returns the sysroot for the `compiler` specified that *this build system
|
||||
/// generates*.
|
||||
///
|
||||
/// That is, the sysroot for the stage0 compiler is not what the compiler
|
||||
/// thinks it is by default, but it's the same as the default for stages
|
||||
/// 1-3.
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let compiler = self.compiler;
|
||||
let sysroot = build.sysroot(compiler);
|
||||
let _ = fs::remove_dir_all(&sysroot);
|
||||
t!(fs::create_dir_all(&sysroot));
|
||||
}
|
||||
}
|
||||
|
||||
// the compiler with no target libraries ready to go
|
||||
|
@ -504,54 +612,68 @@ pub fn create_sysroot(build: &Build, compiler: &Compiler) {
|
|||
// }
|
||||
// })
|
||||
// .run(move |s| compile::assemble_rustc(build, s.stage, s.target));
|
||||
/// Prepare a new compiler from the artifacts in `stage`
|
||||
///
|
||||
/// This will assemble a compiler in `build/$host/stage$stage`. The compiler
|
||||
/// must have been previously produced by the `stage - 1` build.build
|
||||
/// compiler.
|
||||
pub fn assemble_rustc(build: &Build, stage: u32, host: &str) {
|
||||
// nothing to do in stage0
|
||||
if stage == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
println!("Copying stage{} compiler ({})", stage, host);
|
||||
#[derive(Serialize)]
|
||||
pub struct Assemble<'a> {
|
||||
pub stage: u32,
|
||||
pub host: &'a str,
|
||||
}
|
||||
|
||||
// The compiler that we're assembling
|
||||
let target_compiler = Compiler::new(stage, host);
|
||||
impl<'a> Step<'a> for Assemble<'a> {
|
||||
type Output = ();
|
||||
|
||||
// The compiler that compiled the compiler we're assembling
|
||||
let build_compiler = Compiler::new(stage - 1, &build.build);
|
||||
|
||||
// Link in all dylibs to the libdir
|
||||
let sysroot = build.sysroot(&target_compiler);
|
||||
let sysroot_libdir = sysroot.join(libdir(host));
|
||||
t!(fs::create_dir_all(&sysroot_libdir));
|
||||
let src_libdir = build.sysroot_libdir(&build_compiler, host);
|
||||
for f in t!(fs::read_dir(&src_libdir)).map(|f| t!(f)) {
|
||||
let filename = f.file_name().into_string().unwrap();
|
||||
if is_dylib(&filename) {
|
||||
copy(&f.path(), &sysroot_libdir.join(&filename));
|
||||
/// Prepare a new compiler from the artifacts in `stage`
|
||||
///
|
||||
/// This will assemble a compiler in `build/$host/stage$stage`. The compiler
|
||||
/// must have been previously produced by the `stage - 1` build.build
|
||||
/// compiler.
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let stage = self.stage;
|
||||
let host = self.host;
|
||||
// nothing to do in stage0
|
||||
if stage == 0 {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
let out_dir = build.cargo_out(&build_compiler, Mode::Librustc, host);
|
||||
println!("Copying stage{} compiler ({})", stage, host);
|
||||
|
||||
// Link the compiler binary itself into place
|
||||
let rustc = out_dir.join(exe("rustc", host));
|
||||
let bindir = sysroot.join("bin");
|
||||
t!(fs::create_dir_all(&bindir));
|
||||
let compiler = build.compiler_path(&target_compiler);
|
||||
let _ = fs::remove_file(&compiler);
|
||||
copy(&rustc, &compiler);
|
||||
// The compiler that we're assembling
|
||||
let target_compiler = Compiler::new(stage, host);
|
||||
|
||||
// See if rustdoc exists to link it into place
|
||||
let rustdoc = exe("rustdoc", host);
|
||||
let rustdoc_src = out_dir.join(&rustdoc);
|
||||
let rustdoc_dst = bindir.join(&rustdoc);
|
||||
if fs::metadata(&rustdoc_src).is_ok() {
|
||||
let _ = fs::remove_file(&rustdoc_dst);
|
||||
copy(&rustdoc_src, &rustdoc_dst);
|
||||
// The compiler that compiled the compiler we're assembling
|
||||
let build_compiler = Compiler::new(stage - 1, &build.build);
|
||||
|
||||
// Link in all dylibs to the libdir
|
||||
let sysroot = build.sysroot(&target_compiler);
|
||||
let sysroot_libdir = sysroot.join(libdir(host));
|
||||
t!(fs::create_dir_all(&sysroot_libdir));
|
||||
let src_libdir = build.sysroot_libdir(&build_compiler, host);
|
||||
for f in t!(fs::read_dir(&src_libdir)).map(|f| t!(f)) {
|
||||
let filename = f.file_name().into_string().unwrap();
|
||||
if is_dylib(&filename) {
|
||||
copy(&f.path(), &sysroot_libdir.join(&filename));
|
||||
}
|
||||
}
|
||||
|
||||
let out_dir = build.cargo_out(&build_compiler, Mode::Librustc, host);
|
||||
|
||||
// Link the compiler binary itself into place
|
||||
let rustc = out_dir.join(exe("rustc", host));
|
||||
let bindir = sysroot.join("bin");
|
||||
t!(fs::create_dir_all(&bindir));
|
||||
let compiler = build.compiler_path(&target_compiler);
|
||||
let _ = fs::remove_file(&compiler);
|
||||
copy(&rustc, &compiler);
|
||||
|
||||
// See if rustdoc exists to link it into place
|
||||
let rustdoc = exe("rustdoc", host);
|
||||
let rustdoc_src = out_dir.join(&rustdoc);
|
||||
let rustdoc_dst = bindir.join(&rustdoc);
|
||||
if fs::metadata(&rustdoc_src).is_ok() {
|
||||
let _ = fs::remove_file(&rustdoc_dst);
|
||||
copy(&rustdoc_src, &rustdoc_dst);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -574,160 +696,6 @@ fn add_to_sysroot(sysroot_dst: &Path, stamp: &Path) {
|
|||
}
|
||||
}
|
||||
|
||||
//// ========================================================================
|
||||
//// Build tools
|
||||
////
|
||||
//// Tools used during the build system but not shipped
|
||||
//// "pseudo rule" which represents completely cleaning out the tools dir in
|
||||
//// one stage. This needs to happen whenever a dependency changes (e.g.
|
||||
//// libstd, libtest, librustc) and all of the tool compilations above will
|
||||
//// be sequenced after this rule.
|
||||
//rules.build("maybe-clean-tools", "path/to/nowhere")
|
||||
// .after("librustc-tool")
|
||||
// .after("libtest-tool")
|
||||
// .after("libstd-tool");
|
||||
//
|
||||
//rules.build("librustc-tool", "path/to/nowhere")
|
||||
// .dep(|s| s.name("librustc"))
|
||||
// .run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Librustc));
|
||||
//rules.build("libtest-tool", "path/to/nowhere")
|
||||
// .dep(|s| s.name("libtest"))
|
||||
// .run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Libtest));
|
||||
//rules.build("libstd-tool", "path/to/nowhere")
|
||||
// .dep(|s| s.name("libstd"))
|
||||
// .run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Libstd));
|
||||
//
|
||||
/// Build a tool in `src/tools`
|
||||
///
|
||||
/// This will build the specified tool with the specified `host` compiler in
|
||||
/// `stage` into the normal cargo output directory.
|
||||
pub fn maybe_clean_tools(build: &Build, stage: u32, target: &str, mode: Mode) {
|
||||
let compiler = Compiler::new(stage, &build.build);
|
||||
|
||||
let stamp = match mode {
|
||||
Mode::Libstd => libstd_stamp(build, &compiler, target),
|
||||
Mode::Libtest => libtest_stamp(build, &compiler, target),
|
||||
Mode::Librustc => librustc_stamp(build, &compiler, target),
|
||||
_ => panic!(),
|
||||
};
|
||||
let out_dir = build.cargo_out(&compiler, Mode::Tool, target);
|
||||
build.clear_if_dirty(&out_dir, &stamp);
|
||||
}
|
||||
|
||||
|
||||
// rules.build("tool-rustbook", "src/tools/rustbook")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("librustc-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "rustbook"));
|
||||
// rules.build("tool-error-index", "src/tools/error_index_generator")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("librustc-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "error_index_generator"));
|
||||
// rules.build("tool-unstable-book-gen", "src/tools/unstable-book-gen")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libstd-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "unstable-book-gen"));
|
||||
// rules.build("tool-tidy", "src/tools/tidy")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libstd-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "tidy"));
|
||||
// rules.build("tool-linkchecker", "src/tools/linkchecker")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libstd-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "linkchecker"));
|
||||
// rules.build("tool-cargotest", "src/tools/cargotest")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libstd-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "cargotest"));
|
||||
// rules.build("tool-compiletest", "src/tools/compiletest")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libtest-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "compiletest"));
|
||||
// rules.build("tool-build-manifest", "src/tools/build-manifest")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libstd-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "build-manifest"));
|
||||
// rules.build("tool-remote-test-server", "src/tools/remote-test-server")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libstd-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "remote-test-server"));
|
||||
// rules.build("tool-remote-test-client", "src/tools/remote-test-client")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libstd-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "remote-test-client"));
|
||||
// rules.build("tool-rust-installer", "src/tools/rust-installer")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libstd-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "rust-installer"));
|
||||
// rules.build("tool-cargo", "src/tools/cargo")
|
||||
// .host(true)
|
||||
// .default(build.config.extended)
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libstd-tool"))
|
||||
// .dep(|s| s.stage(0).host(s.target).name("openssl"))
|
||||
// .dep(move |s| {
|
||||
// // Cargo depends on procedural macros, which requires a full host
|
||||
// // compiler to be available, so we need to depend on that.
|
||||
// s.name("librustc-link")
|
||||
// .target(&build.build)
|
||||
// .host(&build.build)
|
||||
// })
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "cargo"));
|
||||
// rules.build("tool-rls", "src/tools/rls")
|
||||
// .host(true)
|
||||
// .default(build.config.extended)
|
||||
// .dep(|s| s.name("librustc-tool"))
|
||||
// .dep(|s| s.stage(0).host(s.target).name("openssl"))
|
||||
// .dep(move |s| {
|
||||
// // rls, like cargo, uses procedural macros
|
||||
// s.name("librustc-link")
|
||||
// .target(&build.build)
|
||||
// .host(&build.build)
|
||||
// })
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "rls"));
|
||||
//
|
||||
|
||||
/// Build a tool in `src/tools`
|
||||
///
|
||||
/// This will build the specified tool with the specified `host` compiler in
|
||||
/// `stage` into the normal cargo output directory.
|
||||
pub fn tool(build: &Build, stage: u32, target: &str, tool: &str) {
|
||||
let _folder = build.fold_output(|| format!("stage{}-{}", stage, tool));
|
||||
println!("Building stage{} tool {} ({})", stage, tool, target);
|
||||
|
||||
let compiler = Compiler::new(stage, &build.build);
|
||||
|
||||
let mut cargo = build.cargo(&compiler, Mode::Tool, target, "build");
|
||||
let dir = build.src.join("src/tools").join(tool);
|
||||
cargo.arg("--manifest-path").arg(dir.join("Cargo.toml"));
|
||||
|
||||
// We don't want to build tools dynamically as they'll be running across
|
||||
// stages and such and it's just easier if they're not dynamically linked.
|
||||
cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
|
||||
|
||||
if let Some(dir) = build.openssl_install_dir(target) {
|
||||
cargo.env("OPENSSL_STATIC", "1");
|
||||
cargo.env("OPENSSL_DIR", dir);
|
||||
cargo.env("LIBZ_SYS_STATIC", "1");
|
||||
}
|
||||
|
||||
cargo.env("CFG_RELEASE_CHANNEL", &build.config.channel);
|
||||
|
||||
let info = GitInfo::new(&dir);
|
||||
if let Some(sha) = info.sha() {
|
||||
cargo.env("CFG_COMMIT_HASH", sha);
|
||||
}
|
||||
if let Some(sha_short) = info.sha_short() {
|
||||
cargo.env("CFG_SHORT_COMMIT_HASH", sha_short);
|
||||
}
|
||||
if let Some(date) = info.commit_date() {
|
||||
cargo.env("CFG_COMMIT_DATE", date);
|
||||
}
|
||||
|
||||
build.run(&mut cargo);
|
||||
}
|
||||
|
||||
|
||||
// Avoiding a dependency on winapi to keep compile times down
|
||||
#[cfg(unix)]
|
||||
fn stderr_isatty() -> bool {
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -45,13 +45,27 @@ use build_helper::up_to_date;
|
|||
// })
|
||||
// .default(build.config.docs)
|
||||
// .run(move |s| doc::rustbook(build, s.target, "reference"));
|
||||
/// Invoke `rustbook` for `target` for the doc book `name`.
|
||||
///
|
||||
/// This will not actually generate any documentation if the documentation has
|
||||
/// already been generated.
|
||||
pub fn rustbook(build: &Build, target: &str, name: &str) {
|
||||
let src = build.src.join("src/doc");
|
||||
rustbook_src(build, target, name, &src);
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Rustbook<'a> {
|
||||
target: &'a str,
|
||||
name: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> Step<'a> for Rustbook<'a> {
|
||||
type Output = ();
|
||||
|
||||
/// Invoke `rustbook` for `target` for the doc book `name`.
|
||||
///
|
||||
/// This will not actually generate any documentation if the documentation has
|
||||
/// already been generated.
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let target = self.target;
|
||||
let name = self.name;
|
||||
let src = build.src.join("src/doc");
|
||||
rustbook_src(build, target, name, &src);
|
||||
}
|
||||
}
|
||||
|
||||
//rules.doc("doc-unstable-book", "src/doc/unstable-book")
|
||||
|
@ -68,30 +82,44 @@ pub fn rustbook(build: &Build, target: &str, name: &str) {
|
|||
// "unstable-book",
|
||||
// &build.md_doc_out(s.target)));
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct RustbookSrc<'a> {
|
||||
target: &'a str,
|
||||
name: &'a str,
|
||||
src: &'a Path,
|
||||
}
|
||||
|
||||
/// Invoke `rustbook` for `target` for the doc book `name` from the `src` path.
|
||||
///
|
||||
/// This will not actually generate any documentation if the documentation has
|
||||
/// already been generated.
|
||||
pub fn rustbook_src(build: &Build, target: &str, name: &str, src: &Path) {
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
impl<'a> Step<'a> for RustbookSrc<'a> {
|
||||
type Output = ();
|
||||
|
||||
let out = out.join(name);
|
||||
let compiler = Compiler::new(0, &build.build);
|
||||
let src = src.join(name);
|
||||
let index = out.join("index.html");
|
||||
let rustbook = build.tool(&compiler, "rustbook");
|
||||
if up_to_date(&src, &index) && up_to_date(&rustbook, &index) {
|
||||
return
|
||||
/// Invoke `rustbook` for `target` for the doc book `name` from the `src` path.
|
||||
///
|
||||
/// This will not actually generate any documentation if the documentation has
|
||||
/// already been generated.
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let target = self.target;
|
||||
let name = self.name;
|
||||
let src = self.src;
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
|
||||
let out = out.join(name);
|
||||
let compiler = Compiler::new(0, &build.build);
|
||||
let src = src.join(name);
|
||||
let index = out.join("index.html");
|
||||
let rustbook = build.tool(&compiler, "rustbook");
|
||||
if up_to_date(&src, &index) && up_to_date(&rustbook, &index) {
|
||||
return
|
||||
}
|
||||
println!("Rustbook ({}) - {}", target, name);
|
||||
let _ = fs::remove_dir_all(&out);
|
||||
build.run(build.tool_cmd(&compiler, "rustbook")
|
||||
.arg("build")
|
||||
.arg(&src)
|
||||
.arg("-d")
|
||||
.arg(out));
|
||||
}
|
||||
println!("Rustbook ({}) - {}", target, name);
|
||||
let _ = fs::remove_dir_all(&out);
|
||||
build.run(build.tool_cmd(&compiler, "rustbook")
|
||||
.arg("build")
|
||||
.arg(&src)
|
||||
.arg("-d")
|
||||
.arg(out));
|
||||
}
|
||||
|
||||
// rules.doc("doc-book", "src/doc/book")
|
||||
|
@ -103,34 +131,48 @@ pub fn rustbook_src(build: &Build, target: &str, name: &str, src: &Path) {
|
|||
// })
|
||||
// .default(build.config.docs)
|
||||
// .run(move |s| doc::book(build, s.target, "book"));
|
||||
/// Build the book and associated stuff.
|
||||
///
|
||||
/// We need to build:
|
||||
///
|
||||
/// * Book (first edition)
|
||||
/// * Book (second edition)
|
||||
/// * Index page
|
||||
/// * Redirect pages
|
||||
pub fn book(build: &Build, target: &str, name: &str) {
|
||||
// build book first edition
|
||||
rustbook(build, target, &format!("{}/first-edition", name));
|
||||
|
||||
// build book second edition
|
||||
rustbook(build, target, &format!("{}/second-edition", name));
|
||||
#[derive(Serialize)]
|
||||
pub struct TheBook<'a> {
|
||||
target: &'a str,
|
||||
name: &'a str,
|
||||
}
|
||||
|
||||
// build the index page
|
||||
let index = format!("{}/index.md", name);
|
||||
println!("Documenting book index ({})", target);
|
||||
invoke_rustdoc(build, target, &index);
|
||||
impl<'a> Step<'a> for TheBook<'a> {
|
||||
type Output = ();
|
||||
|
||||
// build the redirect pages
|
||||
println!("Documenting book redirect pages ({})", target);
|
||||
for file in t!(fs::read_dir(build.src.join("src/doc/book/redirects"))) {
|
||||
let file = t!(file);
|
||||
let path = file.path();
|
||||
let path = path.to_str().unwrap();
|
||||
/// Build the book and associated stuff.
|
||||
///
|
||||
/// We need to build:
|
||||
///
|
||||
/// * Book (first edition)
|
||||
/// * Book (second edition)
|
||||
/// * Index page
|
||||
/// * Redirect pages
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let target = self.target;
|
||||
let name = self.name;
|
||||
// build book first edition
|
||||
rustbook(build, target, &format!("{}/first-edition", name));
|
||||
|
||||
invoke_rustdoc(build, target, path);
|
||||
// build book second edition
|
||||
rustbook(build, target, &format!("{}/second-edition", name));
|
||||
|
||||
// build the index page
|
||||
let index = format!("{}/index.md", name);
|
||||
println!("Documenting book index ({})", target);
|
||||
invoke_rustdoc(build, target, &index);
|
||||
|
||||
// build the redirect pages
|
||||
println!("Documenting book redirect pages ({})", target);
|
||||
for file in t!(fs::read_dir(build.src.join("src/doc/book/redirects"))) {
|
||||
let file = t!(file);
|
||||
let path = file.path();
|
||||
let path = path.to_str().unwrap();
|
||||
|
||||
invoke_rustdoc(build, target, path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -188,75 +230,87 @@ fn invoke_rustdoc(build: &Build, target: &str, markdown: &str) {
|
|||
// })
|
||||
// .default(build.config.docs)
|
||||
// .run(move |s| doc::standalone(build, s.target));
|
||||
/// Generates all standalone documentation as compiled by the rustdoc in `stage`
|
||||
/// for the `target` into `out`.
|
||||
///
|
||||
/// This will list all of `src/doc` looking for markdown files and appropriately
|
||||
/// perform transformations like substituting `VERSION`, `SHORT_HASH`, and
|
||||
/// `STAMP` alongw ith providing the various header/footer HTML we've cutomized.
|
||||
///
|
||||
/// In the end, this is just a glorified wrapper around rustdoc!
|
||||
pub fn standalone(build: &Build, target: &str) {
|
||||
println!("Documenting standalone ({})", target);
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
|
||||
let compiler = Compiler::new(0, &build.build);
|
||||
#[derive(Serialize)]
|
||||
pub struct Standalone<'a> {
|
||||
target: &'a str,
|
||||
}
|
||||
|
||||
let favicon = build.src.join("src/doc/favicon.inc");
|
||||
let footer = build.src.join("src/doc/footer.inc");
|
||||
let full_toc = build.src.join("src/doc/full-toc.inc");
|
||||
t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css")));
|
||||
impl<'a> Step<'a> for Standalone<'a> {
|
||||
type Output = ();
|
||||
|
||||
let version_input = build.src.join("src/doc/version_info.html.template");
|
||||
let version_info = out.join("version_info.html");
|
||||
/// Generates all standalone documentation as compiled by the rustdoc in `stage`
|
||||
/// for the `target` into `out`.
|
||||
///
|
||||
/// This will list all of `src/doc` looking for markdown files and appropriately
|
||||
/// perform transformations like substituting `VERSION`, `SHORT_HASH`, and
|
||||
/// `STAMP` alongw ith providing the various header/footer HTML we've cutomized.
|
||||
///
|
||||
/// In the end, this is just a glorified wrapper around rustdoc!
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let target = self.target;
|
||||
println!("Documenting standalone ({})", target);
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
|
||||
if !up_to_date(&version_input, &version_info) {
|
||||
let mut info = String::new();
|
||||
t!(t!(File::open(&version_input)).read_to_string(&mut info));
|
||||
let info = info.replace("VERSION", &build.rust_release())
|
||||
.replace("SHORT_HASH", build.rust_info.sha_short().unwrap_or(""))
|
||||
.replace("STAMP", build.rust_info.sha().unwrap_or(""));
|
||||
t!(t!(File::create(&version_info)).write_all(info.as_bytes()));
|
||||
}
|
||||
let compiler = Compiler::new(0, &build.build);
|
||||
|
||||
for file in t!(fs::read_dir(build.src.join("src/doc"))) {
|
||||
let file = t!(file);
|
||||
let path = file.path();
|
||||
let filename = path.file_name().unwrap().to_str().unwrap();
|
||||
if !filename.ends_with(".md") || filename == "README.md" {
|
||||
continue
|
||||
let favicon = build.src.join("src/doc/favicon.inc");
|
||||
let footer = build.src.join("src/doc/footer.inc");
|
||||
let full_toc = build.src.join("src/doc/full-toc.inc");
|
||||
t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css")));
|
||||
|
||||
let version_input = build.src.join("src/doc/version_info.html.template");
|
||||
let version_info = out.join("version_info.html");
|
||||
|
||||
if !up_to_date(&version_input, &version_info) {
|
||||
let mut info = String::new();
|
||||
t!(t!(File::open(&version_input)).read_to_string(&mut info));
|
||||
let info = info.replace("VERSION", &build.rust_release())
|
||||
.replace("SHORT_HASH", build.rust_info.sha_short().unwrap_or(""))
|
||||
.replace("STAMP", build.rust_info.sha().unwrap_or(""));
|
||||
t!(t!(File::create(&version_info)).write_all(info.as_bytes()));
|
||||
}
|
||||
|
||||
let html = out.join(filename).with_extension("html");
|
||||
let rustdoc = build.rustdoc(&compiler);
|
||||
if up_to_date(&path, &html) &&
|
||||
up_to_date(&footer, &html) &&
|
||||
up_to_date(&favicon, &html) &&
|
||||
up_to_date(&full_toc, &html) &&
|
||||
up_to_date(&version_info, &html) &&
|
||||
up_to_date(&rustdoc, &html) {
|
||||
continue
|
||||
}
|
||||
for file in t!(fs::read_dir(build.src.join("src/doc"))) {
|
||||
let file = t!(file);
|
||||
let path = file.path();
|
||||
let filename = path.file_name().unwrap().to_str().unwrap();
|
||||
if !filename.ends_with(".md") || filename == "README.md" {
|
||||
continue
|
||||
}
|
||||
|
||||
let mut cmd = Command::new(&rustdoc);
|
||||
build.add_rustc_lib_path(&compiler, &mut cmd);
|
||||
cmd.arg("--html-after-content").arg(&footer)
|
||||
.arg("--html-before-content").arg(&version_info)
|
||||
.arg("--html-in-header").arg(&favicon)
|
||||
.arg("--markdown-playground-url")
|
||||
.arg("https://play.rust-lang.org/")
|
||||
.arg("-o").arg(&out)
|
||||
.arg(&path);
|
||||
let html = out.join(filename).with_extension("html");
|
||||
let rustdoc = build.rustdoc(&compiler);
|
||||
if up_to_date(&path, &html) &&
|
||||
up_to_date(&footer, &html) &&
|
||||
up_to_date(&favicon, &html) &&
|
||||
up_to_date(&full_toc, &html) &&
|
||||
up_to_date(&version_info, &html) &&
|
||||
up_to_date(&rustdoc, &html) {
|
||||
continue
|
||||
}
|
||||
|
||||
if filename == "not_found.md" {
|
||||
cmd.arg("--markdown-no-toc")
|
||||
.arg("--markdown-css")
|
||||
.arg("https://doc.rust-lang.org/rust.css");
|
||||
} else {
|
||||
cmd.arg("--markdown-css").arg("rust.css");
|
||||
let mut cmd = Command::new(&rustdoc);
|
||||
build.add_rustc_lib_path(&compiler, &mut cmd);
|
||||
cmd.arg("--html-after-content").arg(&footer)
|
||||
.arg("--html-before-content").arg(&version_info)
|
||||
.arg("--html-in-header").arg(&favicon)
|
||||
.arg("--markdown-playground-url")
|
||||
.arg("https://play.rust-lang.org/")
|
||||
.arg("-o").arg(&out)
|
||||
.arg(&path);
|
||||
|
||||
if filename == "not_found.md" {
|
||||
cmd.arg("--markdown-no-toc")
|
||||
.arg("--markdown-css")
|
||||
.arg("https://doc.rust-lang.org/rust.css");
|
||||
} else {
|
||||
cmd.arg("--markdown-css").arg("rust.css");
|
||||
}
|
||||
build.run(&mut cmd);
|
||||
}
|
||||
build.run(&mut cmd);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -266,63 +320,77 @@ pub fn standalone(build: &Build, target: &str) {
|
|||
// .default(default && build.config.docs)
|
||||
// .run(move |s| doc::std(build, s.stage, s.target));
|
||||
// }
|
||||
/// Compile all standard library documentation.
|
||||
///
|
||||
/// This will generate all documentation for the standard library and its
|
||||
/// dependencies. This is largely just a wrapper around `cargo doc`.
|
||||
pub fn std(build: &Build, stage: u32, target: &str) {
|
||||
println!("Documenting stage{} std ({})", stage, target);
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = Compiler::new(stage, &build.build);
|
||||
let compiler = if build.force_use_stage1(&compiler, target) {
|
||||
Compiler::new(1, compiler.host)
|
||||
} else {
|
||||
compiler
|
||||
};
|
||||
let out_dir = build.stage_out(&compiler, Mode::Libstd)
|
||||
.join(target).join("doc");
|
||||
let rustdoc = build.rustdoc(&compiler);
|
||||
|
||||
// Here what we're doing is creating a *symlink* (directory junction on
|
||||
// Windows) to the final output location. This is not done as an
|
||||
// optimization but rather for correctness. We've got three trees of
|
||||
// documentation, one for std, one for test, and one for rustc. It's then
|
||||
// our job to merge them all together.
|
||||
//
|
||||
// Unfortunately rustbuild doesn't know nearly as well how to merge doc
|
||||
// trees as rustdoc does itself, so instead of actually having three
|
||||
// separate trees we just have rustdoc output to the same location across
|
||||
// all of them.
|
||||
//
|
||||
// This way rustdoc generates output directly into the output, and rustdoc
|
||||
// will also directly handle merging.
|
||||
let my_out = build.crate_doc_out(target);
|
||||
build.clear_if_dirty(&my_out, &rustdoc);
|
||||
t!(symlink_dir_force(&my_out, &out_dir));
|
||||
#[derive(Serialize)]
|
||||
pub struct Std<'a> {
|
||||
stage: u32,
|
||||
target: &'a str,
|
||||
}
|
||||
|
||||
let mut cargo = build.cargo(&compiler, Mode::Libstd, target, "doc");
|
||||
cargo.arg("--manifest-path")
|
||||
.arg(build.src.join("src/libstd/Cargo.toml"))
|
||||
.arg("--features").arg(build.std_features());
|
||||
impl<'a> Step<'a> for Std<'a> {
|
||||
type Output = ();
|
||||
|
||||
// We don't want to build docs for internal std dependencies unless
|
||||
// in compiler-docs mode. When not in that mode, we whitelist the crates
|
||||
// for which docs must be built.
|
||||
if !build.config.compiler_docs {
|
||||
cargo.arg("--no-deps");
|
||||
for krate in &["alloc", "collections", "core", "std", "std_unicode"] {
|
||||
cargo.arg("-p").arg(krate);
|
||||
// Create all crate output directories first to make sure rustdoc uses
|
||||
// relative links.
|
||||
// FIXME: Cargo should probably do this itself.
|
||||
t!(fs::create_dir_all(out_dir.join(krate)));
|
||||
/// Compile all standard library documentation.
|
||||
///
|
||||
/// This will generate all documentation for the standard library and its
|
||||
/// dependencies. This is largely just a wrapper around `cargo doc`.
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let stage = self.stage;
|
||||
let target = self.target;
|
||||
println!("Documenting stage{} std ({})", stage, target);
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = Compiler::new(stage, &build.build);
|
||||
let compiler = if build.force_use_stage1(&compiler, target) {
|
||||
Compiler::new(1, compiler.host)
|
||||
} else {
|
||||
compiler
|
||||
};
|
||||
let out_dir = build.stage_out(&compiler, Mode::Libstd)
|
||||
.join(target).join("doc");
|
||||
let rustdoc = build.rustdoc(&compiler);
|
||||
|
||||
// Here what we're doing is creating a *symlink* (directory junction on
|
||||
// Windows) to the final output location. This is not done as an
|
||||
// optimization but rather for correctness. We've got three trees of
|
||||
// documentation, one for std, one for test, and one for rustc. It's then
|
||||
// our job to merge them all together.
|
||||
//
|
||||
// Unfortunately rustbuild doesn't know nearly as well how to merge doc
|
||||
// trees as rustdoc does itself, so instead of actually having three
|
||||
// separate trees we just have rustdoc output to the same location across
|
||||
// all of them.
|
||||
//
|
||||
// This way rustdoc generates output directly into the output, and rustdoc
|
||||
// will also directly handle merging.
|
||||
let my_out = build.crate_doc_out(target);
|
||||
build.clear_if_dirty(&my_out, &rustdoc);
|
||||
t!(symlink_dir_force(&my_out, &out_dir));
|
||||
|
||||
let mut cargo = build.cargo(&compiler, Mode::Libstd, target, "doc");
|
||||
cargo.arg("--manifest-path")
|
||||
.arg(build.src.join("src/libstd/Cargo.toml"))
|
||||
.arg("--features").arg(build.std_features());
|
||||
|
||||
// We don't want to build docs for internal std dependencies unless
|
||||
// in compiler-docs mode. When not in that mode, we whitelist the crates
|
||||
// for which docs must be built.
|
||||
if !build.config.compiler_docs {
|
||||
cargo.arg("--no-deps");
|
||||
for krate in &["alloc", "collections", "core", "std", "std_unicode"] {
|
||||
cargo.arg("-p").arg(krate);
|
||||
// Create all crate output directories first to make sure rustdoc uses
|
||||
// relative links.
|
||||
// FIXME: Cargo should probably do this itself.
|
||||
t!(fs::create_dir_all(out_dir.join(krate)));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
build.run(&mut cargo);
|
||||
cp_r(&my_out, &out);
|
||||
}
|
||||
|
||||
|
||||
build.run(&mut cargo);
|
||||
cp_r(&my_out, &out);
|
||||
}
|
||||
|
||||
// for (krate, path, default) in krates("test") {
|
||||
|
@ -333,36 +401,49 @@ pub fn std(build: &Build, stage: u32, target: &str) {
|
|||
// .default(default && build.config.compiler_docs)
|
||||
// .run(move |s| doc::test(build, s.stage, s.target));
|
||||
// }
|
||||
/// Compile all libtest documentation.
|
||||
///
|
||||
/// This will generate all documentation for libtest and its dependencies. This
|
||||
/// is largely just a wrapper around `cargo doc`.
|
||||
pub fn test(build: &Build, stage: u32, target: &str) {
|
||||
println!("Documenting stage{} test ({})", stage, target);
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = Compiler::new(stage, &build.build);
|
||||
let compiler = if build.force_use_stage1(&compiler, target) {
|
||||
Compiler::new(1, compiler.host)
|
||||
} else {
|
||||
compiler
|
||||
};
|
||||
let out_dir = build.stage_out(&compiler, Mode::Libtest)
|
||||
.join(target).join("doc");
|
||||
let rustdoc = build.rustdoc(&compiler);
|
||||
|
||||
// See docs in std above for why we symlink
|
||||
let my_out = build.crate_doc_out(target);
|
||||
build.clear_if_dirty(&my_out, &rustdoc);
|
||||
t!(symlink_dir_force(&my_out, &out_dir));
|
||||
|
||||
let mut cargo = build.cargo(&compiler, Mode::Libtest, target, "doc");
|
||||
cargo.arg("--manifest-path")
|
||||
.arg(build.src.join("src/libtest/Cargo.toml"));
|
||||
build.run(&mut cargo);
|
||||
cp_r(&my_out, &out);
|
||||
#[derive(Serialize)]
|
||||
pub struct Test<'a> {
|
||||
stage: u32,
|
||||
test: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> Step<'a> for Test<'a> {
|
||||
type Output = ();
|
||||
|
||||
/// Compile all libtest documentation.
|
||||
///
|
||||
/// This will generate all documentation for libtest and its dependencies. This
|
||||
/// is largely just a wrapper around `cargo doc`.
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let stage = self.stage;
|
||||
let target = self.target;
|
||||
println!("Documenting stage{} test ({})", stage, target);
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = Compiler::new(stage, &build.build);
|
||||
let compiler = if build.force_use_stage1(&compiler, target) {
|
||||
Compiler::new(1, compiler.host)
|
||||
} else {
|
||||
compiler
|
||||
};
|
||||
let out_dir = build.stage_out(&compiler, Mode::Libtest)
|
||||
.join(target).join("doc");
|
||||
let rustdoc = build.rustdoc(&compiler);
|
||||
|
||||
// See docs in std above for why we symlink
|
||||
let my_out = build.crate_doc_out(target);
|
||||
build.clear_if_dirty(&my_out, &rustdoc);
|
||||
t!(symlink_dir_force(&my_out, &out_dir));
|
||||
|
||||
let mut cargo = build.cargo(&compiler, Mode::Libtest, target, "doc");
|
||||
cargo.arg("--manifest-path")
|
||||
.arg(build.src.join("src/libtest/Cargo.toml"));
|
||||
build.run(&mut cargo);
|
||||
cp_r(&my_out, &out);
|
||||
}
|
||||
}
|
||||
|
||||
// for (krate, path, default) in krates("rustc-main") {
|
||||
// rules.doc(&krate.doc_step, path)
|
||||
|
@ -374,51 +455,65 @@ pub fn test(build: &Build, stage: u32, target: &str) {
|
|||
// .run(move |s| doc::rustc(build, s.stage, s.target));
|
||||
// }
|
||||
//
|
||||
/// Generate all compiler documentation.
|
||||
///
|
||||
/// This will generate all documentation for the compiler libraries and their
|
||||
/// dependencies. This is largely just a wrapper around `cargo doc`.
|
||||
pub fn rustc(build: &Build, stage: u32, target: &str) {
|
||||
println!("Documenting stage{} compiler ({})", stage, target);
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = Compiler::new(stage, &build.build);
|
||||
let compiler = if build.force_use_stage1(&compiler, target) {
|
||||
Compiler::new(1, compiler.host)
|
||||
} else {
|
||||
compiler
|
||||
};
|
||||
let out_dir = build.stage_out(&compiler, Mode::Librustc)
|
||||
.join(target).join("doc");
|
||||
let rustdoc = build.rustdoc(&compiler);
|
||||
|
||||
// See docs in std above for why we symlink
|
||||
let my_out = build.crate_doc_out(target);
|
||||
build.clear_if_dirty(&my_out, &rustdoc);
|
||||
t!(symlink_dir_force(&my_out, &out_dir));
|
||||
#[derive(Serialize)]
|
||||
pub struct Rustc<'a> {
|
||||
stage: u32,
|
||||
target: &'a str,
|
||||
}
|
||||
|
||||
let mut cargo = build.cargo(&compiler, Mode::Librustc, target, "doc");
|
||||
cargo.arg("--manifest-path")
|
||||
.arg(build.src.join("src/rustc/Cargo.toml"))
|
||||
.arg("--features").arg(build.rustc_features());
|
||||
impl<'a> Step<'a> for Rustc<'a> {
|
||||
type Output = ();
|
||||
|
||||
if build.config.compiler_docs {
|
||||
// src/rustc/Cargo.toml contains bin crates called rustc and rustdoc
|
||||
// which would otherwise overwrite the docs for the real rustc and
|
||||
// rustdoc lib crates.
|
||||
cargo.arg("-p").arg("rustc_driver")
|
||||
.arg("-p").arg("rustdoc");
|
||||
} else {
|
||||
// Like with libstd above if compiler docs aren't enabled then we're not
|
||||
// documenting internal dependencies, so we have a whitelist.
|
||||
cargo.arg("--no-deps");
|
||||
for krate in &["proc_macro"] {
|
||||
cargo.arg("-p").arg(krate);
|
||||
/// Generate all compiler documentation.
|
||||
///
|
||||
/// This will generate all documentation for the compiler libraries and their
|
||||
/// dependencies. This is largely just a wrapper around `cargo doc`.
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let stage = self.stage;
|
||||
let target = self.target;
|
||||
println!("Documenting stage{} compiler ({})", stage, target);
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = Compiler::new(stage, &build.build);
|
||||
let compiler = if build.force_use_stage1(&compiler, target) {
|
||||
Compiler::new(1, compiler.host)
|
||||
} else {
|
||||
compiler
|
||||
};
|
||||
let out_dir = build.stage_out(&compiler, Mode::Librustc)
|
||||
.join(target).join("doc");
|
||||
let rustdoc = build.rustdoc(&compiler);
|
||||
|
||||
// See docs in std above for why we symlink
|
||||
let my_out = build.crate_doc_out(target);
|
||||
build.clear_if_dirty(&my_out, &rustdoc);
|
||||
t!(symlink_dir_force(&my_out, &out_dir));
|
||||
|
||||
let mut cargo = build.cargo(&compiler, Mode::Librustc, target, "doc");
|
||||
cargo.arg("--manifest-path")
|
||||
.arg(build.src.join("src/rustc/Cargo.toml"))
|
||||
.arg("--features").arg(build.rustc_features());
|
||||
|
||||
if build.config.compiler_docs {
|
||||
// src/rustc/Cargo.toml contains bin crates called rustc and rustdoc
|
||||
// which would otherwise overwrite the docs for the real rustc and
|
||||
// rustdoc lib crates.
|
||||
cargo.arg("-p").arg("rustc_driver")
|
||||
.arg("-p").arg("rustdoc");
|
||||
} else {
|
||||
// Like with libstd above if compiler docs aren't enabled then we're not
|
||||
// documenting internal dependencies, so we have a whitelist.
|
||||
cargo.arg("--no-deps");
|
||||
for krate in &["proc_macro"] {
|
||||
cargo.arg("-p").arg(krate);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
build.run(&mut cargo);
|
||||
cp_r(&my_out, &out);
|
||||
build.run(&mut cargo);
|
||||
cp_r(&my_out, &out);
|
||||
}
|
||||
}
|
||||
|
||||
// rules.doc("doc-error-index", "src/tools/error_index_generator")
|
||||
|
@ -428,21 +523,32 @@ pub fn rustc(build: &Build, stage: u32, target: &str) {
|
|||
// .host(true)
|
||||
// .run(move |s| doc::error_index(build, s.target));
|
||||
|
||||
/// Generates the HTML rendered error-index by running the
|
||||
/// `error_index_generator` tool.
|
||||
pub fn error_index(build: &Build, target: &str) {
|
||||
println!("Documenting error index ({})", target);
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = Compiler::new(0, &build.build);
|
||||
let mut index = build.tool_cmd(&compiler, "error_index_generator");
|
||||
index.arg("html");
|
||||
index.arg(out.join("error-index.html"));
|
||||
#[derive(Serialize)]
|
||||
pub struct ErrorIndex<'a> {
|
||||
target: &'a str,
|
||||
}
|
||||
|
||||
// FIXME: shouldn't have to pass this env var
|
||||
index.env("CFG_BUILD", &build.build);
|
||||
impl<'a> Step<'a> for ErrorIndex<'a> {
|
||||
type Output = ();
|
||||
|
||||
build.run(&mut index);
|
||||
/// Generates the HTML rendered error-index by running the
|
||||
/// `error_index_generator` tool.
|
||||
fn run(self, builder: &Builder) {
|
||||
let builder = builder.build;
|
||||
let target = self.target;
|
||||
println!("Documenting error index ({})", target);
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = Compiler::new(0, &build.build);
|
||||
let mut index = build.tool_cmd(&compiler, "error_index_generator");
|
||||
index.arg("html");
|
||||
index.arg(out.join("error-index.html"));
|
||||
|
||||
// FIXME: shouldn't have to pass this env var
|
||||
index.env("CFG_BUILD", &build.build);
|
||||
|
||||
build.run(&mut index);
|
||||
}
|
||||
}
|
||||
|
||||
// rules.doc("doc-unstable-book-gen", "src/tools/unstable-book-gen")
|
||||
|
@ -457,17 +563,28 @@ pub fn error_index(build: &Build, target: &str) {
|
|||
// .host(true)
|
||||
// .run(move |s| doc::unstable_book_gen(build, s.target));
|
||||
|
||||
pub fn unstable_book_gen(build: &Build, target: &str) {
|
||||
println!("Generating unstable book md files ({})", target);
|
||||
let out = build.md_doc_out(target).join("unstable-book");
|
||||
t!(fs::create_dir_all(&out));
|
||||
t!(fs::remove_dir_all(&out));
|
||||
let compiler = Compiler::new(0, &build.build);
|
||||
let mut cmd = build.tool_cmd(&compiler, "unstable-book-gen");
|
||||
cmd.arg(build.src.join("src"));
|
||||
cmd.arg(out);
|
||||
#[derive(Serialize)]
|
||||
pub struct UnstableBookGen<'a> {
|
||||
target: &'a str,
|
||||
}
|
||||
|
||||
build.run(&mut cmd);
|
||||
impl<'a> Step<'a> for UnstableBookGen<'a> {
|
||||
type Output = ();
|
||||
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let target = self.target;
|
||||
println!("Generating unstable book md files ({})", target);
|
||||
let out = build.md_doc_out(target).join("unstable-book");
|
||||
t!(fs::create_dir_all(&out));
|
||||
t!(fs::remove_dir_all(&out));
|
||||
let compiler = Compiler::new(0, &build.build);
|
||||
let mut cmd = build.tool_cmd(&compiler, "unstable-book-gen");
|
||||
cmd.arg(build.src.join("src"));
|
||||
cmd.arg(out);
|
||||
|
||||
build.run(&mut cmd);
|
||||
}
|
||||
}
|
||||
|
||||
fn symlink_dir_force(src: &Path, dst: &Path) -> io::Result<()> {
|
||||
|
|
|
@ -33,7 +33,7 @@ use Build;
|
|||
use util;
|
||||
use build_helper::up_to_date;
|
||||
|
||||
/ rules.build("llvm", "src/llvm")
|
||||
/j/ rules.build("llvm", "src/llvm")
|
||||
// .host(true)
|
||||
// .dep(move |s| {
|
||||
// if s.target == build.build {
|
||||
|
@ -43,173 +43,185 @@ use build_helper::up_to_date;
|
|||
// }
|
||||
// })
|
||||
// .run(move |s| native::llvm(build, s.target));
|
||||
/// Compile LLVM for `target`.
|
||||
pub fn llvm(build: &Build, target: &str) {
|
||||
// If we're using a custom LLVM bail out here, but we can only use a
|
||||
// custom LLVM for the build triple.
|
||||
if let Some(config) = build.config.target_config.get(target) {
|
||||
if let Some(ref s) = config.llvm_config {
|
||||
return check_llvm_version(build, s);
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Llvm<'a> {
|
||||
pub target: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> Step<'a> for Llvm<'a> {
|
||||
type Output = ();
|
||||
|
||||
/// Compile LLVM for `target`.
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let target = self.target;
|
||||
// If we're using a custom LLVM bail out here, but we can only use a
|
||||
// custom LLVM for the build triple.
|
||||
if let Some(config) = build.config.target_config.get(target) {
|
||||
if let Some(ref s) = config.llvm_config {
|
||||
return check_llvm_version(build, s);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let rebuild_trigger = build.src.join("src/rustllvm/llvm-rebuild-trigger");
|
||||
let mut rebuild_trigger_contents = String::new();
|
||||
t!(t!(File::open(&rebuild_trigger)).read_to_string(&mut rebuild_trigger_contents));
|
||||
let rebuild_trigger = build.src.join("src/rustllvm/llvm-rebuild-trigger");
|
||||
let mut rebuild_trigger_contents = String::new();
|
||||
t!(t!(File::open(&rebuild_trigger)).read_to_string(&mut rebuild_trigger_contents));
|
||||
|
||||
let out_dir = build.llvm_out(target);
|
||||
let done_stamp = out_dir.join("llvm-finished-building");
|
||||
if done_stamp.exists() {
|
||||
let mut done_contents = String::new();
|
||||
t!(t!(File::open(&done_stamp)).read_to_string(&mut done_contents));
|
||||
let out_dir = build.llvm_out(target);
|
||||
let done_stamp = out_dir.join("llvm-finished-building");
|
||||
if done_stamp.exists() {
|
||||
let mut done_contents = String::new();
|
||||
t!(t!(File::open(&done_stamp)).read_to_string(&mut done_contents));
|
||||
|
||||
// If LLVM was already built previously and contents of the rebuild-trigger file
|
||||
// didn't change from the previous build, then no action is required.
|
||||
if done_contents == rebuild_trigger_contents {
|
||||
return
|
||||
// If LLVM was already built previously and contents of the rebuild-trigger file
|
||||
// didn't change from the previous build, then no action is required.
|
||||
if done_contents == rebuild_trigger_contents {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
if build.config.llvm_clean_rebuild {
|
||||
drop(fs::remove_dir_all(&out_dir));
|
||||
}
|
||||
|
||||
let _folder = build.fold_output(|| "llvm");
|
||||
println!("Building LLVM for {}", target);
|
||||
let _time = util::timeit();
|
||||
t!(fs::create_dir_all(&out_dir));
|
||||
|
||||
// http://llvm.org/docs/CMake.html
|
||||
let mut cfg = cmake::Config::new(build.src.join("src/llvm"));
|
||||
if build.config.ninja {
|
||||
cfg.generator("Ninja");
|
||||
}
|
||||
|
||||
let profile = match (build.config.llvm_optimize, build.config.llvm_release_debuginfo) {
|
||||
(false, _) => "Debug",
|
||||
(true, false) => "Release",
|
||||
(true, true) => "RelWithDebInfo",
|
||||
};
|
||||
|
||||
// NOTE: remember to also update `config.toml.example` when changing the defaults!
|
||||
let llvm_targets = match build.config.llvm_targets {
|
||||
Some(ref s) => s,
|
||||
None => "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX;Hexagon",
|
||||
};
|
||||
|
||||
let llvm_exp_targets = match build.config.llvm_experimental_targets {
|
||||
Some(ref s) => s,
|
||||
None => "",
|
||||
};
|
||||
|
||||
let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
|
||||
|
||||
cfg.target(target)
|
||||
.host(&build.build)
|
||||
.out_dir(&out_dir)
|
||||
.profile(profile)
|
||||
.define("LLVM_ENABLE_ASSERTIONS", assertions)
|
||||
.define("LLVM_TARGETS_TO_BUILD", llvm_targets)
|
||||
.define("LLVM_EXPERIMENTAL_TARGETS_TO_BUILD", llvm_exp_targets)
|
||||
.define("LLVM_INCLUDE_EXAMPLES", "OFF")
|
||||
.define("LLVM_INCLUDE_TESTS", "OFF")
|
||||
.define("LLVM_INCLUDE_DOCS", "OFF")
|
||||
.define("LLVM_ENABLE_ZLIB", "OFF")
|
||||
.define("WITH_POLLY", "OFF")
|
||||
.define("LLVM_ENABLE_TERMINFO", "OFF")
|
||||
.define("LLVM_ENABLE_LIBEDIT", "OFF")
|
||||
.define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string())
|
||||
.define("LLVM_TARGET_ARCH", target.split('-').next().unwrap())
|
||||
.define("LLVM_DEFAULT_TARGET_TRIPLE", target);
|
||||
|
||||
if target.contains("msvc") {
|
||||
cfg.define("LLVM_USE_CRT_DEBUG", "MT");
|
||||
cfg.define("LLVM_USE_CRT_RELEASE", "MT");
|
||||
cfg.define("LLVM_USE_CRT_RELWITHDEBINFO", "MT");
|
||||
cfg.static_crt(true);
|
||||
}
|
||||
|
||||
if target.starts_with("i686") {
|
||||
cfg.define("LLVM_BUILD_32_BITS", "ON");
|
||||
}
|
||||
|
||||
if let Some(num_linkers) = build.config.llvm_link_jobs {
|
||||
if num_linkers > 0 {
|
||||
cfg.define("LLVM_PARALLEL_LINK_JOBS", num_linkers.to_string());
|
||||
if build.config.llvm_clean_rebuild {
|
||||
drop(fs::remove_dir_all(&out_dir));
|
||||
}
|
||||
}
|
||||
|
||||
// http://llvm.org/docs/HowToCrossCompileLLVM.html
|
||||
if target != build.build {
|
||||
// FIXME: if the llvm root for the build triple is overridden then we
|
||||
// should use llvm-tblgen from there, also should verify that it
|
||||
// actually exists most of the time in normal installs of LLVM.
|
||||
let host = build.llvm_out(&build.build).join("bin/llvm-tblgen");
|
||||
cfg.define("CMAKE_CROSSCOMPILING", "True")
|
||||
.define("LLVM_TABLEGEN", &host);
|
||||
}
|
||||
let _folder = build.fold_output(|| "llvm");
|
||||
println!("Building LLVM for {}", target);
|
||||
let _time = util::timeit();
|
||||
t!(fs::create_dir_all(&out_dir));
|
||||
|
||||
// http://llvm.org/docs/CMake.html
|
||||
let mut cfg = cmake::Config::new(build.src.join("src/llvm"));
|
||||
if build.config.ninja {
|
||||
cfg.generator("Ninja");
|
||||
}
|
||||
|
||||
let profile = match (build.config.llvm_optimize, build.config.llvm_release_debuginfo) {
|
||||
(false, _) => "Debug",
|
||||
(true, false) => "Release",
|
||||
(true, true) => "RelWithDebInfo",
|
||||
};
|
||||
|
||||
// NOTE: remember to also update `config.toml.example` when changing the defaults!
|
||||
let llvm_targets = match build.config.llvm_targets {
|
||||
Some(ref s) => s,
|
||||
None => "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX;Hexagon",
|
||||
};
|
||||
|
||||
let llvm_exp_targets = match build.config.llvm_experimental_targets {
|
||||
Some(ref s) => s,
|
||||
None => "",
|
||||
};
|
||||
|
||||
let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
|
||||
|
||||
cfg.target(target)
|
||||
.host(&build.build)
|
||||
.out_dir(&out_dir)
|
||||
.profile(profile)
|
||||
.define("LLVM_ENABLE_ASSERTIONS", assertions)
|
||||
.define("LLVM_TARGETS_TO_BUILD", llvm_targets)
|
||||
.define("LLVM_EXPERIMENTAL_TARGETS_TO_BUILD", llvm_exp_targets)
|
||||
.define("LLVM_INCLUDE_EXAMPLES", "OFF")
|
||||
.define("LLVM_INCLUDE_TESTS", "OFF")
|
||||
.define("LLVM_INCLUDE_DOCS", "OFF")
|
||||
.define("LLVM_ENABLE_ZLIB", "OFF")
|
||||
.define("WITH_POLLY", "OFF")
|
||||
.define("LLVM_ENABLE_TERMINFO", "OFF")
|
||||
.define("LLVM_ENABLE_LIBEDIT", "OFF")
|
||||
.define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string())
|
||||
.define("LLVM_TARGET_ARCH", target.split('-').next().unwrap())
|
||||
.define("LLVM_DEFAULT_TARGET_TRIPLE", target);
|
||||
|
||||
let sanitize_cc = |cc: &Path| {
|
||||
if target.contains("msvc") {
|
||||
OsString::from(cc.to_str().unwrap().replace("\\", "/"))
|
||||
} else {
|
||||
cc.as_os_str().to_owned()
|
||||
}
|
||||
};
|
||||
|
||||
let configure_compilers = |cfg: &mut cmake::Config| {
|
||||
// MSVC with CMake uses msbuild by default which doesn't respect these
|
||||
// vars that we'd otherwise configure. In that case we just skip this
|
||||
// entirely.
|
||||
if target.contains("msvc") && !build.config.ninja {
|
||||
return
|
||||
cfg.define("LLVM_USE_CRT_DEBUG", "MT");
|
||||
cfg.define("LLVM_USE_CRT_RELEASE", "MT");
|
||||
cfg.define("LLVM_USE_CRT_RELWITHDEBINFO", "MT");
|
||||
cfg.static_crt(true);
|
||||
}
|
||||
|
||||
let cc = build.cc(target);
|
||||
let cxx = build.cxx(target).unwrap();
|
||||
|
||||
// Handle msvc + ninja + ccache specially (this is what the bots use)
|
||||
if target.contains("msvc") &&
|
||||
build.config.ninja &&
|
||||
build.config.ccache.is_some() {
|
||||
let mut cc = env::current_exe().expect("failed to get cwd");
|
||||
cc.set_file_name("sccache-plus-cl.exe");
|
||||
|
||||
cfg.define("CMAKE_C_COMPILER", sanitize_cc(&cc))
|
||||
.define("CMAKE_CXX_COMPILER", sanitize_cc(&cc));
|
||||
cfg.env("SCCACHE_PATH",
|
||||
build.config.ccache.as_ref().unwrap())
|
||||
.env("SCCACHE_TARGET", target);
|
||||
|
||||
// If ccache is configured we inform the build a little differently hwo
|
||||
// to invoke ccache while also invoking our compilers.
|
||||
} else if let Some(ref ccache) = build.config.ccache {
|
||||
cfg.define("CMAKE_C_COMPILER", ccache)
|
||||
.define("CMAKE_C_COMPILER_ARG1", sanitize_cc(cc))
|
||||
.define("CMAKE_CXX_COMPILER", ccache)
|
||||
.define("CMAKE_CXX_COMPILER_ARG1", sanitize_cc(cxx));
|
||||
} else {
|
||||
cfg.define("CMAKE_C_COMPILER", sanitize_cc(cc))
|
||||
.define("CMAKE_CXX_COMPILER", sanitize_cc(cxx));
|
||||
if target.starts_with("i686") {
|
||||
cfg.define("LLVM_BUILD_32_BITS", "ON");
|
||||
}
|
||||
|
||||
cfg.build_arg("-j").build_arg(build.jobs().to_string());
|
||||
cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
|
||||
cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" "));
|
||||
};
|
||||
if let Some(num_linkers) = build.config.llvm_link_jobs {
|
||||
if num_linkers > 0 {
|
||||
cfg.define("LLVM_PARALLEL_LINK_JOBS", num_linkers.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
configure_compilers(&mut cfg);
|
||||
// http://llvm.org/docs/HowToCrossCompileLLVM.html
|
||||
if target != build.build {
|
||||
// FIXME: if the llvm root for the build triple is overridden then we
|
||||
// should use llvm-tblgen from there, also should verify that it
|
||||
// actually exists most of the time in normal installs of LLVM.
|
||||
let host = build.llvm_out(&build.build).join("bin/llvm-tblgen");
|
||||
cfg.define("CMAKE_CROSSCOMPILING", "True")
|
||||
.define("LLVM_TABLEGEN", &host);
|
||||
}
|
||||
|
||||
if env::var_os("SCCACHE_ERROR_LOG").is_some() {
|
||||
cfg.env("RUST_LOG", "sccache=warn");
|
||||
let sanitize_cc = |cc: &Path| {
|
||||
if target.contains("msvc") {
|
||||
OsString::from(cc.to_str().unwrap().replace("\\", "/"))
|
||||
} else {
|
||||
cc.as_os_str().to_owned()
|
||||
}
|
||||
};
|
||||
|
||||
let configure_compilers = |cfg: &mut cmake::Config| {
|
||||
// MSVC with CMake uses msbuild by default which doesn't respect these
|
||||
// vars that we'd otherwise configure. In that case we just skip this
|
||||
// entirely.
|
||||
if target.contains("msvc") && !build.config.ninja {
|
||||
return
|
||||
}
|
||||
|
||||
let cc = build.cc(target);
|
||||
let cxx = build.cxx(target).unwrap();
|
||||
|
||||
// Handle msvc + ninja + ccache specially (this is what the bots use)
|
||||
if target.contains("msvc") &&
|
||||
build.config.ninja &&
|
||||
build.config.ccache.is_some() {
|
||||
let mut cc = env::current_exe().expect("failed to get cwd");
|
||||
cc.set_file_name("sccache-plus-cl.exe");
|
||||
|
||||
cfg.define("CMAKE_C_COMPILER", sanitize_cc(&cc))
|
||||
.define("CMAKE_CXX_COMPILER", sanitize_cc(&cc));
|
||||
cfg.env("SCCACHE_PATH",
|
||||
build.config.ccache.as_ref().unwrap())
|
||||
.env("SCCACHE_TARGET", target);
|
||||
|
||||
// If ccache is configured we inform the build a little differently hwo
|
||||
// to invoke ccache while also invoking our compilers.
|
||||
} else if let Some(ref ccache) = build.config.ccache {
|
||||
cfg.define("CMAKE_C_COMPILER", ccache)
|
||||
.define("CMAKE_C_COMPILER_ARG1", sanitize_cc(cc))
|
||||
.define("CMAKE_CXX_COMPILER", ccache)
|
||||
.define("CMAKE_CXX_COMPILER_ARG1", sanitize_cc(cxx));
|
||||
} else {
|
||||
cfg.define("CMAKE_C_COMPILER", sanitize_cc(cc))
|
||||
.define("CMAKE_CXX_COMPILER", sanitize_cc(cxx));
|
||||
}
|
||||
|
||||
cfg.build_arg("-j").build_arg(build.jobs().to_string());
|
||||
cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
|
||||
cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" "));
|
||||
};
|
||||
|
||||
configure_compilers(&mut cfg);
|
||||
|
||||
if env::var_os("SCCACHE_ERROR_LOG").is_some() {
|
||||
cfg.env("RUST_LOG", "sccache=warn");
|
||||
}
|
||||
|
||||
// FIXME: we don't actually need to build all LLVM tools and all LLVM
|
||||
// libraries here, e.g. we just want a few components and a few
|
||||
// tools. Figure out how to filter them down and only build the right
|
||||
// tools and libs on all platforms.
|
||||
cfg.build();
|
||||
|
||||
t!(t!(File::create(&done_stamp)).write_all(rebuild_trigger_contents.as_bytes()));
|
||||
}
|
||||
|
||||
// FIXME: we don't actually need to build all LLVM tools and all LLVM
|
||||
// libraries here, e.g. we just want a few components and a few
|
||||
// tools. Figure out how to filter them down and only build the right
|
||||
// tools and libs on all platforms.
|
||||
cfg.build();
|
||||
|
||||
t!(t!(File::create(&done_stamp)).write_all(rebuild_trigger_contents.as_bytes()));
|
||||
}
|
||||
|
||||
fn check_llvm_version(build: &Build, llvm_config: &Path) {
|
||||
|
@ -228,39 +240,52 @@ fn check_llvm_version(build: &Build, llvm_config: &Path) {
|
|||
|
||||
//rules.build("test-helpers", "src/rt/rust_test_helpers.c")
|
||||
// .run(move |s| native::test_helpers(build, s.target));
|
||||
/// Compiles the `rust_test_helpers.c` library which we used in various
|
||||
/// `run-pass` test suites for ABI testing.
|
||||
pub fn test_helpers(build: &Build, target: &str) {
|
||||
let dst = build.test_helpers_out(target);
|
||||
let src = build.src.join("src/rt/rust_test_helpers.c");
|
||||
if up_to_date(&src, &dst.join("librust_test_helpers.a")) {
|
||||
return
|
||||
}
|
||||
|
||||
let _folder = build.fold_output(|| "build_test_helpers");
|
||||
println!("Building test helpers");
|
||||
t!(fs::create_dir_all(&dst));
|
||||
let mut cfg = gcc::Config::new();
|
||||
|
||||
// We may have found various cross-compilers a little differently due to our
|
||||
// extra configuration, so inform gcc of these compilers. Note, though, that
|
||||
// on MSVC we still need gcc's detection of env vars (ugh).
|
||||
if !target.contains("msvc") {
|
||||
if let Some(ar) = build.ar(target) {
|
||||
cfg.archiver(ar);
|
||||
}
|
||||
cfg.compiler(build.cc(target));
|
||||
}
|
||||
|
||||
cfg.cargo_metadata(false)
|
||||
.out_dir(&dst)
|
||||
.target(target)
|
||||
.host(&build.build)
|
||||
.opt_level(0)
|
||||
.debug(false)
|
||||
.file(build.src.join("src/rt/rust_test_helpers.c"))
|
||||
.compile("librust_test_helpers.a");
|
||||
#[derive(Serialize)]
|
||||
pub struct TestHelpers<'a> {
|
||||
target: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> Step<'a> for TestHelpers<'a> {
|
||||
type Output = ();
|
||||
|
||||
/// Compiles the `rust_test_helpers.c` library which we used in various
|
||||
/// `run-pass` test suites for ABI testing.
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let target = self.target;
|
||||
let dst = build.test_helpers_out(target);
|
||||
let src = build.src.join("src/rt/rust_test_helpers.c");
|
||||
if up_to_date(&src, &dst.join("librust_test_helpers.a")) {
|
||||
return
|
||||
}
|
||||
|
||||
let _folder = build.fold_output(|| "build_test_helpers");
|
||||
println!("Building test helpers");
|
||||
t!(fs::create_dir_all(&dst));
|
||||
let mut cfg = gcc::Config::new();
|
||||
|
||||
// We may have found various cross-compilers a little differently due to our
|
||||
// extra configuration, so inform gcc of these compilers. Note, though, that
|
||||
// on MSVC we still need gcc's detection of env vars (ugh).
|
||||
if !target.contains("msvc") {
|
||||
if let Some(ar) = build.ar(target) {
|
||||
cfg.archiver(ar);
|
||||
}
|
||||
cfg.compiler(build.cc(target));
|
||||
}
|
||||
|
||||
cfg.cargo_metadata(false)
|
||||
.out_dir(&dst)
|
||||
.target(target)
|
||||
.host(&build.build)
|
||||
.opt_level(0)
|
||||
.debug(false)
|
||||
.file(build.src.join("src/rt/rust_test_helpers.c"))
|
||||
.compile("librust_test_helpers.a");
|
||||
}
|
||||
}
|
||||
|
||||
const OPENSSL_VERS: &'static str = "1.0.2k";
|
||||
const OPENSSL_SHA256: &'static str =
|
||||
"6b3977c61f2aedf0f96367dcfb5c6e578cf37e7b8d913b4ecb6643c3cb88d8c0";
|
||||
|
@ -268,124 +293,135 @@ const OPENSSL_SHA256: &'static str =
|
|||
//rules.build("openssl", "path/to/nowhere")
|
||||
// .run(move |s| native::openssl(build, s.target));
|
||||
|
||||
pub fn openssl(build: &Build, target: &str) {
|
||||
let out = match build.openssl_dir(target) {
|
||||
Some(dir) => dir,
|
||||
None => return,
|
||||
};
|
||||
|
||||
let stamp = out.join(".stamp");
|
||||
let mut contents = String::new();
|
||||
drop(File::open(&stamp).and_then(|mut f| f.read_to_string(&mut contents)));
|
||||
if contents == OPENSSL_VERS {
|
||||
return
|
||||
}
|
||||
t!(fs::create_dir_all(&out));
|
||||
|
||||
let name = format!("openssl-{}.tar.gz", OPENSSL_VERS);
|
||||
let tarball = out.join(&name);
|
||||
if !tarball.exists() {
|
||||
let tmp = tarball.with_extension("tmp");
|
||||
// originally from https://www.openssl.org/source/...
|
||||
let url = format!("https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/{}",
|
||||
name);
|
||||
let mut ok = false;
|
||||
for _ in 0..3 {
|
||||
let status = Command::new("curl")
|
||||
.arg("-o").arg(&tmp)
|
||||
.arg(&url)
|
||||
.status()
|
||||
.expect("failed to spawn curl");
|
||||
if status.success() {
|
||||
ok = true;
|
||||
break
|
||||
}
|
||||
}
|
||||
if !ok {
|
||||
panic!("failed to download openssl source")
|
||||
}
|
||||
let mut shasum = if target.contains("apple") {
|
||||
let mut cmd = Command::new("shasum");
|
||||
cmd.arg("-a").arg("256");
|
||||
cmd
|
||||
} else {
|
||||
Command::new("sha256sum")
|
||||
};
|
||||
let output = output(&mut shasum.arg(&tmp));
|
||||
let found = output.split_whitespace().next().unwrap();
|
||||
if found != OPENSSL_SHA256 {
|
||||
panic!("downloaded openssl sha256 different\n\
|
||||
expected: {}\n\
|
||||
found: {}\n", OPENSSL_SHA256, found);
|
||||
}
|
||||
t!(fs::rename(&tmp, &tarball));
|
||||
}
|
||||
let obj = out.join(format!("openssl-{}", OPENSSL_VERS));
|
||||
let dst = build.openssl_install_dir(target).unwrap();
|
||||
drop(fs::remove_dir_all(&obj));
|
||||
drop(fs::remove_dir_all(&dst));
|
||||
build.run(Command::new("tar").arg("xf").arg(&tarball).current_dir(&out));
|
||||
|
||||
let mut configure = Command::new(obj.join("Configure"));
|
||||
configure.arg(format!("--prefix={}", dst.display()));
|
||||
configure.arg("no-dso");
|
||||
configure.arg("no-ssl2");
|
||||
configure.arg("no-ssl3");
|
||||
|
||||
let os = match target {
|
||||
"aarch64-linux-android" => "linux-aarch64",
|
||||
"aarch64-unknown-linux-gnu" => "linux-aarch64",
|
||||
"arm-linux-androideabi" => "android",
|
||||
"arm-unknown-linux-gnueabi" => "linux-armv4",
|
||||
"arm-unknown-linux-gnueabihf" => "linux-armv4",
|
||||
"armv7-linux-androideabi" => "android-armv7",
|
||||
"armv7-unknown-linux-gnueabihf" => "linux-armv4",
|
||||
"i686-apple-darwin" => "darwin-i386-cc",
|
||||
"i686-linux-android" => "android-x86",
|
||||
"i686-unknown-freebsd" => "BSD-x86-elf",
|
||||
"i686-unknown-linux-gnu" => "linux-elf",
|
||||
"i686-unknown-linux-musl" => "linux-elf",
|
||||
"mips-unknown-linux-gnu" => "linux-mips32",
|
||||
"mips64-unknown-linux-gnuabi64" => "linux64-mips64",
|
||||
"mips64el-unknown-linux-gnuabi64" => "linux64-mips64",
|
||||
"mipsel-unknown-linux-gnu" => "linux-mips32",
|
||||
"powerpc-unknown-linux-gnu" => "linux-ppc",
|
||||
"powerpc64-unknown-linux-gnu" => "linux-ppc64",
|
||||
"powerpc64le-unknown-linux-gnu" => "linux-ppc64le",
|
||||
"s390x-unknown-linux-gnu" => "linux64-s390x",
|
||||
"x86_64-apple-darwin" => "darwin64-x86_64-cc",
|
||||
"x86_64-linux-android" => "linux-x86_64",
|
||||
"x86_64-unknown-freebsd" => "BSD-x86_64",
|
||||
"x86_64-unknown-linux-gnu" => "linux-x86_64",
|
||||
"x86_64-unknown-linux-musl" => "linux-x86_64",
|
||||
"x86_64-unknown-netbsd" => "BSD-x86_64",
|
||||
_ => panic!("don't know how to configure OpenSSL for {}", target),
|
||||
};
|
||||
configure.arg(os);
|
||||
configure.env("CC", build.cc(target));
|
||||
for flag in build.cflags(target) {
|
||||
configure.arg(flag);
|
||||
}
|
||||
// There is no specific os target for android aarch64 or x86_64,
|
||||
// so we need to pass some extra cflags
|
||||
if target == "aarch64-linux-android" || target == "x86_64-linux-android" {
|
||||
configure.arg("-mandroid");
|
||||
configure.arg("-fomit-frame-pointer");
|
||||
}
|
||||
// Make PIE binaries
|
||||
// Non-PIE linker support was removed in Lollipop
|
||||
// https://source.android.com/security/enhancements/enhancements50
|
||||
if target == "i686-linux-android" {
|
||||
configure.arg("no-asm");
|
||||
}
|
||||
configure.current_dir(&obj);
|
||||
println!("Configuring openssl for {}", target);
|
||||
build.run_quiet(&mut configure);
|
||||
println!("Building openssl for {}", target);
|
||||
build.run_quiet(Command::new("make").arg("-j1").current_dir(&obj));
|
||||
println!("Installing openssl for {}", target);
|
||||
build.run_quiet(Command::new("make").arg("install").current_dir(&obj));
|
||||
|
||||
let mut f = t!(File::create(&stamp));
|
||||
t!(f.write_all(OPENSSL_VERS.as_bytes()));
|
||||
#[derive(Serialize)]
|
||||
pub struct Openssl<'a> {
|
||||
target: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> Step<'a> for Openssl<'a> {
|
||||
type Output = ();
|
||||
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = bulder.build;
|
||||
let target = self.target;
|
||||
let out = match build.openssl_dir(target) {
|
||||
Some(dir) => dir,
|
||||
None => return,
|
||||
};
|
||||
|
||||
let stamp = out.join(".stamp");
|
||||
let mut contents = String::new();
|
||||
drop(File::open(&stamp).and_then(|mut f| f.read_to_string(&mut contents)));
|
||||
if contents == OPENSSL_VERS {
|
||||
return
|
||||
}
|
||||
t!(fs::create_dir_all(&out));
|
||||
|
||||
let name = format!("openssl-{}.tar.gz", OPENSSL_VERS);
|
||||
let tarball = out.join(&name);
|
||||
if !tarball.exists() {
|
||||
let tmp = tarball.with_extension("tmp");
|
||||
// originally from https://www.openssl.org/source/...
|
||||
let url = format!("https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/{}",
|
||||
name);
|
||||
let mut ok = false;
|
||||
for _ in 0..3 {
|
||||
let status = Command::new("curl")
|
||||
.arg("-o").arg(&tmp)
|
||||
.arg(&url)
|
||||
.status()
|
||||
.expect("failed to spawn curl");
|
||||
if status.success() {
|
||||
ok = true;
|
||||
break
|
||||
}
|
||||
}
|
||||
if !ok {
|
||||
panic!("failed to download openssl source")
|
||||
}
|
||||
let mut shasum = if target.contains("apple") {
|
||||
let mut cmd = Command::new("shasum");
|
||||
cmd.arg("-a").arg("256");
|
||||
cmd
|
||||
} else {
|
||||
Command::new("sha256sum")
|
||||
};
|
||||
let output = output(&mut shasum.arg(&tmp));
|
||||
let found = output.split_whitespace().next().unwrap();
|
||||
if found != OPENSSL_SHA256 {
|
||||
panic!("downloaded openssl sha256 different\n\
|
||||
expected: {}\n\
|
||||
found: {}\n", OPENSSL_SHA256, found);
|
||||
}
|
||||
t!(fs::rename(&tmp, &tarball));
|
||||
}
|
||||
let obj = out.join(format!("openssl-{}", OPENSSL_VERS));
|
||||
let dst = build.openssl_install_dir(target).unwrap();
|
||||
drop(fs::remove_dir_all(&obj));
|
||||
drop(fs::remove_dir_all(&dst));
|
||||
build.run(Command::new("tar").arg("xf").arg(&tarball).current_dir(&out));
|
||||
|
||||
let mut configure = Command::new(obj.join("Configure"));
|
||||
configure.arg(format!("--prefix={}", dst.display()));
|
||||
configure.arg("no-dso");
|
||||
configure.arg("no-ssl2");
|
||||
configure.arg("no-ssl3");
|
||||
|
||||
let os = match target {
|
||||
"aarch64-linux-android" => "linux-aarch64",
|
||||
"aarch64-unknown-linux-gnu" => "linux-aarch64",
|
||||
"arm-linux-androideabi" => "android",
|
||||
"arm-unknown-linux-gnueabi" => "linux-armv4",
|
||||
"arm-unknown-linux-gnueabihf" => "linux-armv4",
|
||||
"armv7-linux-androideabi" => "android-armv7",
|
||||
"armv7-unknown-linux-gnueabihf" => "linux-armv4",
|
||||
"i686-apple-darwin" => "darwin-i386-cc",
|
||||
"i686-linux-android" => "android-x86",
|
||||
"i686-unknown-freebsd" => "BSD-x86-elf",
|
||||
"i686-unknown-linux-gnu" => "linux-elf",
|
||||
"i686-unknown-linux-musl" => "linux-elf",
|
||||
"mips-unknown-linux-gnu" => "linux-mips32",
|
||||
"mips64-unknown-linux-gnuabi64" => "linux64-mips64",
|
||||
"mips64el-unknown-linux-gnuabi64" => "linux64-mips64",
|
||||
"mipsel-unknown-linux-gnu" => "linux-mips32",
|
||||
"powerpc-unknown-linux-gnu" => "linux-ppc",
|
||||
"powerpc64-unknown-linux-gnu" => "linux-ppc64",
|
||||
"powerpc64le-unknown-linux-gnu" => "linux-ppc64le",
|
||||
"s390x-unknown-linux-gnu" => "linux64-s390x",
|
||||
"x86_64-apple-darwin" => "darwin64-x86_64-cc",
|
||||
"x86_64-linux-android" => "linux-x86_64",
|
||||
"x86_64-unknown-freebsd" => "BSD-x86_64",
|
||||
"x86_64-unknown-linux-gnu" => "linux-x86_64",
|
||||
"x86_64-unknown-linux-musl" => "linux-x86_64",
|
||||
"x86_64-unknown-netbsd" => "BSD-x86_64",
|
||||
_ => panic!("don't know how to configure OpenSSL for {}", target),
|
||||
};
|
||||
configure.arg(os);
|
||||
configure.env("CC", build.cc(target));
|
||||
for flag in build.cflags(target) {
|
||||
configure.arg(flag);
|
||||
}
|
||||
// There is no specific os target for android aarch64 or x86_64,
|
||||
// so we need to pass some extra cflags
|
||||
if target == "aarch64-linux-android" || target == "x86_64-linux-android" {
|
||||
configure.arg("-mandroid");
|
||||
configure.arg("-fomit-frame-pointer");
|
||||
}
|
||||
// Make PIE binaries
|
||||
// Non-PIE linker support was removed in Lollipop
|
||||
// https://source.android.com/security/enhancements/enhancements50
|
||||
if target == "i686-linux-android" {
|
||||
configure.arg("no-asm");
|
||||
}
|
||||
configure.current_dir(&obj);
|
||||
println!("Configuring openssl for {}", target);
|
||||
build.run_quiet(&mut configure);
|
||||
println!("Building openssl for {}", target);
|
||||
build.run_quiet(Command::new("make").arg("-j1").current_dir(&obj));
|
||||
println!("Installing openssl for {}", target);
|
||||
build.run_quiet(Command::new("make").arg("install").current_dir(&obj));
|
||||
|
||||
let mut f = t!(File::create(&stamp));
|
||||
t!(f.write_all(OPENSSL_VERS.as_bytes()));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,205 @@
|
|||
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use std::env;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
|
||||
use Mode;
|
||||
use builder::{Step, Builder};
|
||||
use util::{exe, add_lib_path};
|
||||
use compile::{self, stamp, Rustc};
|
||||
use native;
|
||||
use channel::GitInfo;
|
||||
|
||||
//// ========================================================================
|
||||
//// Build tools
|
||||
////
|
||||
//// Tools used during the build system but not shipped
|
||||
//// "pseudo rule" which represents completely cleaning out the tools dir in
|
||||
//// one stage. This needs to happen whenever a dependency changes (e.g.
|
||||
//// libstd, libtest, librustc) and all of the tool compilations above will
|
||||
//// be sequenced after this rule.
|
||||
//rules.build("maybe-clean-tools", "path/to/nowhere")
|
||||
// .after("librustc-tool")
|
||||
// .after("libtest-tool")
|
||||
// .after("libstd-tool");
|
||||
//
|
||||
//rules.build("librustc-tool", "path/to/nowhere")
|
||||
// .dep(|s| s.name("librustc"))
|
||||
// .run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Librustc));
|
||||
//rules.build("libtest-tool", "path/to/nowhere")
|
||||
// .dep(|s| s.name("libtest"))
|
||||
// .run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Libtest));
|
||||
//rules.build("libstd-tool", "path/to/nowhere")
|
||||
// .dep(|s| s.name("libstd"))
|
||||
// .run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Libstd));
|
||||
//
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct CleanTools<'a> {
|
||||
pub stage: u32,
|
||||
pub target: &'a str,
|
||||
pub mode: Mode,
|
||||
}
|
||||
|
||||
impl<'a> Step<'a> for CleanTools<'a> {
|
||||
type Output = ();
|
||||
|
||||
/// Build a tool in `src/tools`
|
||||
///
|
||||
/// This will build the specified tool with the specified `host` compiler in
|
||||
/// `stage` into the normal cargo output directory.
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let stage = self.stage;
|
||||
let target = self.target;
|
||||
let mode = self.mode;
|
||||
|
||||
let compiler = Compiler::new(stage, &build.build);
|
||||
|
||||
let stamp = match mode {
|
||||
Mode::Libstd => libstd_stamp(build, &compiler, target),
|
||||
Mode::Libtest => libtest_stamp(build, &compiler, target),
|
||||
Mode::Librustc => librustc_stamp(build, &compiler, target),
|
||||
_ => panic!(),
|
||||
};
|
||||
let out_dir = build.cargo_out(&compiler, Mode::Tool, target);
|
||||
build.clear_if_dirty(&out_dir, &stamp);
|
||||
}
|
||||
}
|
||||
|
||||
// rules.build("tool-rustbook", "src/tools/rustbook")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("librustc-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "rustbook"));
|
||||
// rules.build("tool-error-index", "src/tools/error_index_generator")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("librustc-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "error_index_generator"));
|
||||
// rules.build("tool-unstable-book-gen", "src/tools/unstable-book-gen")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libstd-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "unstable-book-gen"));
|
||||
// rules.build("tool-tidy", "src/tools/tidy")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libstd-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "tidy"));
|
||||
// rules.build("tool-linkchecker", "src/tools/linkchecker")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libstd-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "linkchecker"));
|
||||
// rules.build("tool-cargotest", "src/tools/cargotest")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libstd-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "cargotest"));
|
||||
// rules.build("tool-compiletest", "src/tools/compiletest")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libtest-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "compiletest"));
|
||||
// rules.build("tool-build-manifest", "src/tools/build-manifest")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libstd-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "build-manifest"));
|
||||
// rules.build("tool-remote-test-server", "src/tools/remote-test-server")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libstd-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "remote-test-server"));
|
||||
// rules.build("tool-remote-test-client", "src/tools/remote-test-client")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libstd-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "remote-test-client"));
|
||||
// rules.build("tool-rust-installer", "src/tools/rust-installer")
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libstd-tool"))
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "rust-installer"));
|
||||
// rules.build("tool-cargo", "src/tools/cargo")
|
||||
// .host(true)
|
||||
// .default(build.config.extended)
|
||||
// .dep(|s| s.name("maybe-clean-tools"))
|
||||
// .dep(|s| s.name("libstd-tool"))
|
||||
// .dep(|s| s.stage(0).host(s.target).name("openssl"))
|
||||
// .dep(move |s| {
|
||||
// // Cargo depends on procedural macros, which requires a full host
|
||||
// // compiler to be available, so we need to depend on that.
|
||||
// s.name("librustc-link")
|
||||
// .target(&build.build)
|
||||
// .host(&build.build)
|
||||
// })
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "cargo"));
|
||||
// rules.build("tool-rls", "src/tools/rls")
|
||||
// .host(true)
|
||||
// .default(build.config.extended)
|
||||
// .dep(|s| s.name("librustc-tool"))
|
||||
// .dep(|s| s.stage(0).host(s.target).name("openssl"))
|
||||
// .dep(move |s| {
|
||||
// // rls, like cargo, uses procedural macros
|
||||
// s.name("librustc-link")
|
||||
// .target(&build.build)
|
||||
// .host(&build.build)
|
||||
// })
|
||||
// .run(move |s| compile::tool(build, s.stage, s.target, "rls"));
|
||||
//
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Tool<'a> {
|
||||
pub stage: u32,
|
||||
pub target: &'a str,
|
||||
pub tool: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> Step<'a> for Tool<'a> {
|
||||
type Output = ();
|
||||
|
||||
/// Build a tool in `src/tools`
|
||||
///
|
||||
/// This will build the specified tool with the specified `host` compiler in
|
||||
/// `stage` into the normal cargo output directory.
|
||||
fn run(self, builder: &Builder) {
|
||||
let build = builder.build;
|
||||
let stage = self.stage;
|
||||
let target = self.target;
|
||||
let tool = self.tool;
|
||||
|
||||
let _folder = build.fold_output(|| format!("stage{}-{}", stage, tool));
|
||||
println!("Building stage{} tool {} ({})", stage, tool, target);
|
||||
|
||||
let compiler = Compiler::new(stage, &build.build);
|
||||
|
||||
let mut cargo = build.cargo(&compiler, Mode::Tool, target, "build");
|
||||
let dir = build.src.join("src/tools").join(tool);
|
||||
cargo.arg("--manifest-path").arg(dir.join("Cargo.toml"));
|
||||
|
||||
// We don't want to build tools dynamically as they'll be running across
|
||||
// stages and such and it's just easier if they're not dynamically linked.
|
||||
cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
|
||||
|
||||
if let Some(dir) = build.openssl_install_dir(target) {
|
||||
cargo.env("OPENSSL_STATIC", "1");
|
||||
cargo.env("OPENSSL_DIR", dir);
|
||||
cargo.env("LIBZ_SYS_STATIC", "1");
|
||||
}
|
||||
|
||||
cargo.env("CFG_RELEASE_CHANNEL", &build.config.channel);
|
||||
|
||||
let info = GitInfo::new(&dir);
|
||||
if let Some(sha) = info.sha() {
|
||||
cargo.env("CFG_COMMIT_HASH", sha);
|
||||
}
|
||||
if let Some(sha_short) = info.sha_short() {
|
||||
cargo.env("CFG_SHORT_COMMIT_HASH", sha_short);
|
||||
}
|
||||
if let Some(date) = info.commit_date() {
|
||||
cargo.env("CFG_COMMIT_DATE", date);
|
||||
}
|
||||
|
||||
build.run(&mut cargo);
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue