diff --git a/.travis.yml b/.travis.yml index 3a9635a8618..b19877e3843 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,7 +19,8 @@ matrix: - env: IMAGE=i686-gnu-nopt - env: IMAGE=x86_64-freebsd - env: IMAGE=x86_64-gnu - - env: IMAGE=x86_64-gnu-cargotest + - env: IMAGE=x86_64-gnu-full-bootstrap + - env: IMAGE=x86_64-gnu-aux - env: IMAGE=x86_64-gnu-debug - env: IMAGE=x86_64-gnu-nopt - env: IMAGE=x86_64-gnu-make @@ -66,10 +67,10 @@ script: if [ "$ALLOW_PR" = "" ] && [ "$TRAVIS_BRANCH" != "auto" ]; then echo skipping, not a full build; elif [ "$TRAVIS_OS_NAME" = "osx" ]; then - git submodule update --init; + git submodule update --init && src/ci/run.sh; else - git submodule update --init; + git submodule update --init && src/ci/docker/run.sh $IMAGE; fi @@ -78,7 +79,7 @@ before_cache: - docker history -q rust-ci | grep -v missing | xargs docker save | - gzip -9 > $HOME/docker/rust-ci.tar.gz + gzip > $HOME/docker/rust-ci.tar.gz before_install: - zcat $HOME/docker/rust-ci.tar.gz | docker load || true diff --git a/README.md b/README.md index 2133b17de0f..c1218e9c600 100644 --- a/README.md +++ b/README.md @@ -82,8 +82,9 @@ build. # Install build tools needed for Rust. If you're building a 32-bit compiler, # then replace "x86_64" below with "i686". If you've already got git, python, # or CMake installed and in PATH you can remove them from this list. Note - # that it is important that the `python2` and `cmake` packages **not** used. - # The build has historically been known to fail with these packages. + # that it is important that you do **not** use the 'python2' and 'cmake' + # packages from the 'msys2' subsystem. The build has historically been known + # to fail with these packages. $ pacman -S git \ make \ diffutils \ diff --git a/appveyor.yml b/appveyor.yml index 521ab00d0bf..a4b5cbc1152 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -21,7 +21,7 @@ environment: # MSVC cargotest - MSYS_BITS: 64 NO_VENDOR: 1 - RUST_CHECK_TARGET: check-cargotest + RUST_CHECK_TARGET: check-aux RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc # 32/64-bit MinGW builds. diff --git a/configure b/configure index 4f1e8f656ae..5094e5a764b 100755 --- a/configure +++ b/configure @@ -693,6 +693,7 @@ opt_nosave manage-submodules 1 "let the build manage the git submodules" opt_nosave clang 0 "prefer clang to gcc for building the runtime" opt_nosave jemalloc 1 "build liballoc with jemalloc" opt elf-tls 1 "elf thread local storage on platforms where supported" +opt full-bootstrap 0 "build three compilers instead of two" valopt_nosave prefix "/usr/local" "set installation prefix" valopt_nosave local-rust-root "/usr/local" "set prefix for local rust binary" diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs index e7b0afeb8ce..ec0243908ed 100644 --- a/src/bootstrap/check.rs +++ b/src/bootstrap/check.rs @@ -341,12 +341,22 @@ pub fn krate(build: &Build, println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage, compiler.host, target); + // If we're not doing a full bootstrap but we're testing a stage2 version of + // libstd, then what we're actually testing is the libstd produced in + // stage1. Reflect that here by updating the compiler that we're working + // with automatically. + let compiler = if build.force_use_stage1(compiler, target) { + Compiler::new(1, compiler.host) + } else { + compiler.clone() + }; + // Build up the base `cargo test` command. // // Pass in some standard flags then iterate over the graph we've discovered // in `cargo metadata` with the maps above and figure out what `-p` // arguments need to get passed. - let mut cargo = build.cargo(compiler, mode, target, test_kind.subcommand()); + let mut cargo = build.cargo(&compiler, mode, target, test_kind.subcommand()); cargo.arg("--manifest-path") .arg(build.src.join(path).join("Cargo.toml")) .arg("--features").arg(features); @@ -380,7 +390,7 @@ pub fn krate(build: &Build, // Note that to run the compiler we need to run with the *host* libraries, // but our wrapper scripts arrange for that to be the case anyway. let mut dylib_path = dylib_path(); - dylib_path.insert(0, build.sysroot_libdir(compiler, target)); + dylib_path.insert(0, build.sysroot_libdir(&compiler, target)); cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); if target.contains("android") { @@ -399,10 +409,10 @@ pub fn krate(build: &Build, if target.contains("android") { build.run(&mut cargo); - krate_android(build, compiler, target, mode); + krate_android(build, &compiler, target, mode); } else if target.contains("emscripten") { build.run(&mut cargo); - krate_emscripten(build, compiler, target, mode); + krate_emscripten(build, &compiler, target, mode); } else { cargo.args(&build.flags.cmd.test_args()); build.run(&mut cargo); diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs index b268686ca6c..dcccf788935 100644 --- a/src/bootstrap/compile.rs +++ b/src/bootstrap/compile.rs @@ -33,17 +33,12 @@ use {Build, Compiler, Mode}; /// This will build the standard library for a particular stage of the build /// using the `compiler` targeting the `target` architecture. The artifacts /// created will also be linked into the sysroot directory. -pub fn std<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) { - println!("Building stage{} std artifacts ({} -> {})", compiler.stage, - compiler.host, target); - +pub fn std(build: &Build, target: &str, compiler: &Compiler) { let libdir = build.sysroot_libdir(compiler, target); - let _ = fs::remove_dir_all(&libdir); t!(fs::create_dir_all(&libdir)); - // Some platforms have startup objects that may be required to produce the - // libstd dynamic library, for example. - build_startup_objects(build, target, &libdir); + println!("Building stage{} std artifacts ({} -> {})", compiler.stage, + compiler.host, target); let out_dir = build.cargo_out(compiler, Mode::Libstd, target); build.clear_if_dirty(&out_dir, &build.compiler_path(compiler)); @@ -65,29 +60,30 @@ pub fn std<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) { build.run(&mut cargo); update_mtime(&libstd_stamp(build, &compiler, target)); - std_link(build, target, compiler.stage, compiler.host); } /// Link all libstd rlibs/dylibs into the sysroot location. /// -/// Links those artifacts generated in the given `stage` for `target` produced -/// by `compiler` into `host`'s sysroot. +/// Links those artifacts generated by `compiler` to a the `stage` compiler's +/// sysroot for the specified `host` and `target`. +/// +/// Note that this assumes that `compiler` has already generated the libstd +/// libraries for `target`, and this method will find them in the relevant +/// output directory. pub fn std_link(build: &Build, - target: &str, - stage: u32, - host: &str) { - let compiler = Compiler::new(stage, &build.config.build); - let target_compiler = Compiler::new(compiler.stage, host); + compiler: &Compiler, + target_compiler: &Compiler, + target: &str) { + println!("Copying stage{} std from stage{} ({} -> {} / {})", + target_compiler.stage, + compiler.stage, + compiler.host, + target_compiler.host, + target); let libdir = build.sysroot_libdir(&target_compiler, target); let out_dir = build.cargo_out(&compiler, Mode::Libstd, target); - // If we're linking one compiler host's output into another, then we weren't - // called from the `std` method above. In that case we clean out what's - // already there. - if host != compiler.host { - let _ = fs::remove_dir_all(&libdir); - t!(fs::create_dir_all(&libdir)); - } + t!(fs::create_dir_all(&libdir)); add_to_sysroot(&out_dir, &libdir); if target.contains("musl") && !target.contains("mips") { @@ -110,12 +106,15 @@ fn copy_musl_third_party_objects(build: &Build, target: &str, into: &Path) { /// They don't require any library support as they're just plain old object /// files, so we just use the nightly snapshot compiler to always build them (as /// no other compilers are guaranteed to be available). -fn build_startup_objects(build: &Build, target: &str, into: &Path) { +pub fn build_startup_objects(build: &Build, for_compiler: &Compiler, target: &str) { if !target.contains("pc-windows-gnu") { return } + let compiler = Compiler::new(0, &build.config.build); let compiler_path = build.compiler_path(&compiler); + let into = build.sysroot_libdir(for_compiler, target); + t!(fs::create_dir_all(&into)); for file in t!(fs::read_dir(build.src.join("src/rtstartup"))) { let file = t!(file); @@ -123,7 +122,7 @@ fn build_startup_objects(build: &Build, target: &str, into: &Path) { build.run(cmd.env("RUSTC_BOOTSTRAP", "1") .arg("--target").arg(target) .arg("--emit=obj") - .arg("--out-dir").arg(into) + .arg("--out-dir").arg(&into) .arg(file.path())); } @@ -137,7 +136,7 @@ fn build_startup_objects(build: &Build, target: &str, into: &Path) { /// This will build libtest and supporting libraries for a particular stage of /// the build using the `compiler` targeting the `target` architecture. The /// artifacts created will also be linked into the sysroot directory. -pub fn test<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) { +pub fn test(build: &Build, target: &str, compiler: &Compiler) { println!("Building stage{} test artifacts ({} -> {})", compiler.stage, compiler.host, target); let out_dir = build.cargo_out(compiler, Mode::Libtest, target); @@ -147,19 +146,19 @@ pub fn test<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) { .arg(build.src.join("src/rustc/test_shim/Cargo.toml")); build.run(&mut cargo); update_mtime(&libtest_stamp(build, compiler, target)); - test_link(build, target, compiler.stage, compiler.host); } -/// Link all libtest rlibs/dylibs into the sysroot location. -/// -/// Links those artifacts generated in the given `stage` for `target` produced -/// by `compiler` into `host`'s sysroot. +/// Same as `std_link`, only for libtest pub fn test_link(build: &Build, - target: &str, - stage: u32, - host: &str) { - let compiler = Compiler::new(stage, &build.config.build); - let target_compiler = Compiler::new(compiler.stage, host); + compiler: &Compiler, + target_compiler: &Compiler, + target: &str) { + println!("Copying stage{} test from stage{} ({} -> {} / {})", + target_compiler.stage, + compiler.stage, + compiler.host, + target_compiler.host, + target); let libdir = build.sysroot_libdir(&target_compiler, target); let out_dir = build.cargo_out(&compiler, Mode::Libtest, target); add_to_sysroot(&out_dir, &libdir); @@ -170,7 +169,7 @@ pub fn test_link(build: &Build, /// This will build the compiler for a particular stage of the build using /// the `compiler` targeting the `target` architecture. The artifacts /// created will also be linked into the sysroot directory. -pub fn rustc<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) { +pub fn rustc(build: &Build, target: &str, compiler: &Compiler) { println!("Building stage{} compiler artifacts ({} -> {})", compiler.stage, compiler.host, target); @@ -222,20 +221,19 @@ pub fn rustc<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) { cargo.env("CFG_DEFAULT_AR", s); } build.run(&mut cargo); - - rustc_link(build, target, compiler.stage, compiler.host); } -/// Link all librustc rlibs/dylibs into the sysroot location. -/// -/// Links those artifacts generated in the given `stage` for `target` produced -/// by `compiler` into `host`'s sysroot. +/// Same as `std_link`, only for librustc pub fn rustc_link(build: &Build, - target: &str, - stage: u32, - host: &str) { - let compiler = Compiler::new(stage, &build.config.build); - let target_compiler = Compiler::new(compiler.stage, host); + compiler: &Compiler, + target_compiler: &Compiler, + target: &str) { + println!("Copying stage{} rustc from stage{} ({} -> {} / {})", + target_compiler.stage, + compiler.stage, + compiler.host, + target_compiler.host, + target); let libdir = build.sysroot_libdir(&target_compiler, target); let out_dir = build.cargo_out(&compiler, Mode::Librustc, target); add_to_sysroot(&out_dir, &libdir); @@ -259,6 +257,17 @@ fn compiler_file(compiler: &Path, file: &str) -> PathBuf { PathBuf::from(out.trim()) } +pub fn create_sysroot(build: &Build, compiler: &Compiler) { + // nothing to do in stage0 + if compiler.stage == 0 { + return + } + + let sysroot = build.sysroot(compiler); + let _ = fs::remove_dir_all(&sysroot); + t!(fs::create_dir_all(&sysroot)); +} + /// Prepare a new compiler from the artifacts in `stage` /// /// This will assemble a compiler in `build/$host/stage$stage`. The compiler @@ -269,18 +278,17 @@ pub fn assemble_rustc(build: &Build, stage: u32, host: &str) { if stage == 0 { return } + + println!("Copying stage{} compiler ({})", stage, host); + // The compiler that we're assembling let target_compiler = Compiler::new(stage, host); // The compiler that compiled the compiler we're assembling let build_compiler = Compiler::new(stage - 1, &build.config.build); - // Clear out old files - let sysroot = build.sysroot(&target_compiler); - let _ = fs::remove_dir_all(&sysroot); - t!(fs::create_dir_all(&sysroot)); - // Link in all dylibs to the libdir + let sysroot = build.sysroot(&target_compiler); let sysroot_libdir = sysroot.join(libdir(host)); t!(fs::create_dir_all(&sysroot_libdir)); let src_libdir = build.sysroot_libdir(&build_compiler, host); diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs index 6b86e537b7d..9767afd73ca 100644 --- a/src/bootstrap/config.rs +++ b/src/bootstrap/config.rs @@ -46,6 +46,7 @@ pub struct Config { pub docs: bool, pub vendor: bool, pub target_config: HashMap, + pub full_bootstrap: bool, // llvm codegen options pub llvm_assertions: bool, @@ -54,6 +55,7 @@ pub struct Config { pub llvm_version_check: bool, pub llvm_static_stdcpp: bool, pub llvm_link_shared: bool, + pub llvm_targets: Option, // rust codegen options pub rust_optimize: bool, @@ -134,6 +136,7 @@ struct Build { vendor: Option, nodejs: Option, python: Option, + full_bootstrap: Option, } /// TOML representation of various global install decisions. @@ -152,6 +155,7 @@ struct Llvm { release_debuginfo: Option, version_check: Option, static_libstdcpp: Option, + targets: Option, } #[derive(RustcDecodable)] @@ -264,6 +268,7 @@ impl Config { set(&mut config.docs, build.docs); set(&mut config.submodules, build.submodules); set(&mut config.vendor, build.vendor); + set(&mut config.full_bootstrap, build.full_bootstrap); if let Some(ref install) = toml.install { config.prefix = install.prefix.clone(); @@ -285,6 +290,7 @@ impl Config { set(&mut config.llvm_release_debuginfo, llvm.release_debuginfo); set(&mut config.llvm_version_check, llvm.version_check); set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp); + config.llvm_targets = llvm.targets.clone(); } if let Some(ref rust) = toml.rust { @@ -393,6 +399,7 @@ impl Config { ("NINJA", self.ninja), ("CODEGEN_TESTS", self.codegen_tests), ("VENDOR", self.vendor), + ("FULL_BOOTSTRAP", self.full_bootstrap), } match key { diff --git a/src/bootstrap/config.toml.example b/src/bootstrap/config.toml.example index 5fc095137c7..69210c4959b 100644 --- a/src/bootstrap/config.toml.example +++ b/src/bootstrap/config.toml.example @@ -42,6 +42,17 @@ # example. #ninja = false +# LLVM targets to build support for. +# Note: this is NOT related to Rust compilation targets. However, as Rust is +# dependent on LLVM for code generation, turning targets off here WILL lead to +# the resulting rustc being unable to compile for the disabled architectures. +# Also worth pointing out is that, in case support for new targets are added to +# LLVM, enabling them here doesn't mean Rust is automatically gaining said +# support. You'll need to write a target specification at least, and most +# likely, teach rustc about the C ABI of the target. Get in touch with the +# Rust team and file an issue if you need assistance in porting! +#targets = "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX" + # ============================================================================= # General build configuration options # ============================================================================= @@ -100,6 +111,13 @@ # Indicate whether the vendored sources are used for Rust dependencies or not #vendor = false +# Typically the build system will build the rust compiler twice. The second +# compiler, however, will simply use its own libraries to link against. If you +# would rather to perform a full bootstrap, compiling the compiler three times, +# then you can set this option to true. You shouldn't ever need to set this +# option to true. +#full-bootstrap = false + # ============================================================================= # General install configuration options # ============================================================================= diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs index 30c7fefad87..4c4462bf122 100644 --- a/src/bootstrap/doc.rs +++ b/src/bootstrap/doc.rs @@ -137,6 +137,11 @@ pub fn std(build: &Build, stage: u32, target: &str) { let out = build.doc_out(target); t!(fs::create_dir_all(&out)); let compiler = Compiler::new(stage, &build.config.build); + let compiler = if build.force_use_stage1(&compiler, target) { + Compiler::new(1, compiler.host) + } else { + compiler + }; let out_dir = build.stage_out(&compiler, Mode::Libstd) .join(target).join("doc"); let rustdoc = build.rustdoc(&compiler); @@ -160,6 +165,11 @@ pub fn test(build: &Build, stage: u32, target: &str) { let out = build.doc_out(target); t!(fs::create_dir_all(&out)); let compiler = Compiler::new(stage, &build.config.build); + let compiler = if build.force_use_stage1(&compiler, target) { + Compiler::new(1, compiler.host) + } else { + compiler + }; let out_dir = build.stage_out(&compiler, Mode::Libtest) .join(target).join("doc"); let rustdoc = build.rustdoc(&compiler); @@ -182,6 +192,11 @@ pub fn rustc(build: &Build, stage: u32, target: &str) { let out = build.doc_out(target); t!(fs::create_dir_all(&out)); let compiler = Compiler::new(stage, &build.config.build); + let compiler = if build.force_use_stage1(&compiler, target) { + Compiler::new(1, compiler.host) + } else { + compiler + }; let out_dir = build.stage_out(&compiler, Mode::Librustc) .join(target).join("doc"); let rustdoc = build.rustdoc(&compiler); diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index 665e0c67b7f..49eaed4c67a 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -572,9 +572,7 @@ impl Build { let mut cmd = Command::new(self.tool(&compiler, tool)); let host = compiler.host; let mut paths = vec![ - self.cargo_out(compiler, Mode::Libstd, host).join("deps"), - self.cargo_out(compiler, Mode::Libtest, host).join("deps"), - self.cargo_out(compiler, Mode::Librustc, host).join("deps"), + self.sysroot_libdir(compiler, compiler.host), self.cargo_out(compiler, Mode::Tool, host).join("deps"), ]; @@ -880,6 +878,30 @@ impl Build { fn python(&self) -> &Path { self.config.python.as_ref().unwrap() } + + /// Tests whether the `compiler` compiling for `target` should be forced to + /// use a stage1 compiler instead. + /// + /// Currently, by default, the build system does not perform a "full + /// bootstrap" by default where we compile the compiler three times. + /// Instead, we compile the compiler two times. The final stage (stage2) + /// just copies the libraries from the previous stage, which is what this + /// method detects. + /// + /// Here we return `true` if: + /// + /// * The build isn't performing a full bootstrap + /// * The `compiler` is in the final stage, 2 + /// * We're not cross-compiling, so the artifacts are already available in + /// stage1 + /// + /// When all of these conditions are met the build will lift artifacts from + /// the previous stage forward. + fn force_use_stage1(&self, compiler: &Compiler, target: &str) -> bool { + !self.config.full_bootstrap && + compiler.stage >= 2 && + self.config.host.iter().any(|h| h == target) + } } impl<'a> Compiler<'a> { diff --git a/src/bootstrap/mk/Makefile.in b/src/bootstrap/mk/Makefile.in index 0d83a79cf32..a3333369930 100644 --- a/src/bootstrap/mk/Makefile.in +++ b/src/bootstrap/mk/Makefile.in @@ -51,8 +51,16 @@ standalone-docs: $(Q)$(BOOTSTRAP) doc src/doc $(BOOTSTRAP_ARGS) check: $(Q)$(BOOTSTRAP) test $(BOOTSTRAP_ARGS) -check-cargotest: - $(Q)$(BOOTSTRAP) test src/tools/cargotest $(BOOTSTRAP_ARGS) +check-aux: + $(Q)$(BOOTSTRAP) test \ + src/tools/cargotest \ + src/test/pretty \ + src/test/run-pass/pretty \ + src/test/run-fail/pretty \ + src/test/run-pass-valgrind/pretty \ + src/test/run-pass-fulldeps/pretty \ + src/test/run-fail-fulldeps/pretty \ + $(BOOTSTRAP_ARGS) dist: $(Q)$(BOOTSTRAP) dist $(BOOTSTRAP_ARGS) distcheck: diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs index 09dbd9f8220..4b6fef8edc1 100644 --- a/src/bootstrap/native.rs +++ b/src/bootstrap/native.rs @@ -75,13 +75,18 @@ pub fn llvm(build: &Build, target: &str) { (true, true) => "RelWithDebInfo", }; + // NOTE: remember to also update `config.toml.example` when changing the defaults! + let llvm_targets = match build.config.llvm_targets { + Some(ref s) => s, + None => "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX", + }; + cfg.target(target) .host(&build.config.build) .out_dir(&dst) .profile(profile) .define("LLVM_ENABLE_ASSERTIONS", assertions) - .define("LLVM_TARGETS_TO_BUILD", - "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc") + .define("LLVM_TARGETS_TO_BUILD", llvm_targets) .define("LLVM_INCLUDE_EXAMPLES", "OFF") .define("LLVM_INCLUDE_TESTS", "OFF") .define("LLVM_INCLUDE_DOCS", "OFF") diff --git a/src/bootstrap/step.rs b/src/bootstrap/step.rs index 52caa3f0958..7bcfb06f210 100644 --- a/src/bootstrap/step.rs +++ b/src/bootstrap/step.rs @@ -44,7 +44,7 @@ pub fn run(build: &Build) { rules.run(&steps); } -pub fn build_rules(build: &Build) -> Rules { +pub fn build_rules<'a>(build: &'a Build) -> Rules { let mut rules = Rules::new(build); // This is the first rule that we're going to define for rustbuild, which is @@ -117,6 +117,7 @@ pub fn build_rules(build: &Build) -> Rules { // the compiler with no target libraries ready to go rules.build("rustc", "src/rustc") + .dep(|s| s.name("create-sysroot").target(s.host)) .dep(move |s| { if s.stage == 0 { Step::noop() @@ -151,77 +152,145 @@ pub fn build_rules(build: &Build) -> Rules { // Crate compilations // // Tools used during the build system but not shipped + rules.build("create-sysroot", "path/to/nowhere") + .run(move |s| compile::create_sysroot(build, &s.compiler())); + + // These rules are "pseudo rules" that don't actually do any work + // themselves, but represent a complete sysroot with the relevant compiler + // linked into place. + // + // That is, depending on "libstd" means that when the rule is completed then + // the `stage` sysroot for the compiler `host` will be available with a + // standard library built for `target` linked in place. Not all rules need + // the compiler itself to be available, just the standard library, so + // there's a distinction between the two. rules.build("libstd", "src/libstd") - .dep(|s| s.name("build-crate-std_shim")); + .dep(|s| s.name("rustc").target(s.host)) + .dep(|s| s.name("libstd-link")); rules.build("libtest", "src/libtest") - .dep(|s| s.name("build-crate-test_shim")); + .dep(|s| s.name("libstd")) + .dep(|s| s.name("libtest-link")) + .default(true); rules.build("librustc", "src/librustc") - .dep(|s| s.name("build-crate-rustc-main")); + .dep(|s| s.name("libtest")) + .dep(|s| s.name("librustc-link")) + .host(true) + .default(true); + + // Helper method to define the rules to link a crate into its place in the + // sysroot. + // + // The logic here is a little subtle as there's a few cases to consider. + // Not all combinations of (stage, host, target) actually require something + // to be compiled, but rather libraries could get propagated from a + // different location. For example: + // + // * Any crate with a `host` that's not the build triple will not actually + // compile something. A different `host` means that the build triple will + // actually compile the libraries, and then we'll copy them over from the + // build triple to the `host` directory. + // + // * Some crates aren't even compiled by the build triple, but may be copied + // from previous stages. For example if we're not doing a full bootstrap + // then we may just depend on the stage1 versions of libraries to be + // available to get linked forward. + // + // * Finally, there are some cases, however, which do indeed comiple crates + // and link them into place afterwards. + // + // The rule definition below mirrors these three cases. The `dep` method + // calculates the correct dependency which either comes from stage1, a + // different compiler, or from actually building the crate itself (the `dep` + // rule). The `run` rule then mirrors these three cases and links the cases + // forward into the compiler sysroot specified from the correct location. + fn crate_rule<'a, 'b>(build: &'a Build, + rules: &'b mut Rules<'a>, + krate: &'a str, + dep: &'a str, + link: fn(&Build, &Compiler, &Compiler, &str)) + -> RuleBuilder<'a, 'b> { + let mut rule = rules.build(&krate, "path/to/nowhere"); + rule.dep(move |s| { + if build.force_use_stage1(&s.compiler(), s.target) { + s.host(&build.config.build).stage(1) + } else if s.host == build.config.build { + s.name(dep) + } else { + s.host(&build.config.build) + } + }) + .run(move |s| { + if build.force_use_stage1(&s.compiler(), s.target) { + link(build, + &s.stage(1).host(&build.config.build).compiler(), + &s.compiler(), + s.target) + } else if s.host == build.config.build { + link(build, &s.compiler(), &s.compiler(), s.target) + } else { + link(build, + &s.host(&build.config.build).compiler(), + &s.compiler(), + s.target) + } + }); + return rule + } + + // Similar to the `libstd`, `libtest`, and `librustc` rules above, except + // these rules only represent the libraries being available in the sysroot, + // not the compiler itself. This is done as not all rules need a compiler in + // the sysroot, but may just need the libraries. + // + // All of these rules use the helper definition above. + crate_rule(build, + &mut rules, + "libstd-link", + "build-crate-std_shim", + compile::std_link) + .dep(|s| s.name("startup-objects")) + .dep(|s| s.name("create-sysroot").target(s.host)); + crate_rule(build, + &mut rules, + "libtest-link", + "build-crate-test_shim", + compile::test_link) + .dep(|s| s.name("libstd-link")); + crate_rule(build, + &mut rules, + "librustc-link", + "build-crate-rustc-main", + compile::rustc_link) + .dep(|s| s.name("libtest-link")); + for (krate, path, _default) in krates("std_shim") { rules.build(&krate.build_step, path) + .dep(|s| s.name("startup-objects")) .dep(move |s| s.name("rustc").host(&build.config.build).target(s.host)) - .dep(move |s| { - if s.host == build.config.build { - Step::noop() - } else { - s.host(&build.config.build) - } - }) - .run(move |s| { - if s.host == build.config.build { - compile::std(build, s.target, &s.compiler()) - } else { - compile::std_link(build, s.target, s.stage, s.host) - } - }); + .run(move |s| compile::std(build, s.target, &s.compiler())); } - for (krate, path, default) in krates("test_shim") { + for (krate, path, _default) in krates("test_shim") { rules.build(&krate.build_step, path) - .dep(|s| s.name("libstd")) - .dep(move |s| { - if s.host == build.config.build { - Step::noop() - } else { - s.host(&build.config.build) - } - }) - .default(default) - .run(move |s| { - if s.host == build.config.build { - compile::test(build, s.target, &s.compiler()) - } else { - compile::test_link(build, s.target, s.stage, s.host) - } - }); + .dep(|s| s.name("libstd-link")) + .run(move |s| compile::test(build, s.target, &s.compiler())); } - for (krate, path, default) in krates("rustc-main") { + for (krate, path, _default) in krates("rustc-main") { rules.build(&krate.build_step, path) - .dep(|s| s.name("libtest")) + .dep(|s| s.name("libtest-link")) .dep(move |s| s.name("llvm").host(&build.config.build).stage(0)) - .dep(move |s| { - if s.host == build.config.build { - Step::noop() - } else { - s.host(&build.config.build) - } - }) - .host(true) - .default(default) - .run(move |s| { - if s.host == build.config.build { - compile::rustc(build, s.target, &s.compiler()) - } else { - compile::rustc_link(build, s.target, s.stage, s.host) - } - }); + .run(move |s| compile::rustc(build, s.target, &s.compiler())); } + rules.build("startup-objects", "src/rtstartup") + .dep(|s| s.name("create-sysroot").target(s.host)) + .run(move |s| compile::build_startup_objects(build, &s.compiler(), s.target)); + // ======================================================================== // Test targets // // Various unit tests and tests suites we can run { - let mut suite = |name, path, dir, mode| { + let mut suite = |name, path, mode, dir| { rules.test(name, path) .dep(|s| s.name("libtest")) .dep(|s| s.name("tool-compiletest").target(s.host)) @@ -233,9 +302,9 @@ pub fn build_rules(build: &Build) -> Rules { Step::noop() } }) - .default(true) + .default(mode != "pretty") // pretty tests don't run everywhere .run(move |s| { - check::compiletest(build, &s.compiler(), s.target, dir, mode) + check::compiletest(build, &s.compiler(), s.target, mode, dir) }); }; @@ -254,13 +323,6 @@ pub fn build_rules(build: &Build) -> Rules { suite("check-incremental", "src/test/incremental", "incremental", "incremental"); suite("check-ui", "src/test/ui", "ui", "ui"); - suite("check-pretty", "src/test/pretty", "pretty", "pretty"); - suite("check-pretty-rpass", "src/test/run-pass/pretty", "pretty", - "run-pass"); - suite("check-pretty-rfail", "src/test/run-pass/pretty", "pretty", - "run-fail"); - suite("check-pretty-valgrind", "src/test/run-pass-valgrind", "pretty", - "run-pass-valgrind"); } if build.config.build.contains("msvc") { @@ -290,14 +352,15 @@ pub fn build_rules(build: &Build) -> Rules { s.target)); { - let mut suite = |name, path, dir, mode| { + let mut suite = |name, path, mode, dir| { rules.test(name, path) .dep(|s| s.name("librustc")) + .dep(|s| s.name("test-helpers")) .dep(|s| s.name("tool-compiletest").target(s.host)) - .default(true) + .default(mode != "pretty") .host(true) .run(move |s| { - check::compiletest(build, &s.compiler(), s.target, dir, mode) + check::compiletest(build, &s.compiler(), s.target, mode, dir) }); }; @@ -307,9 +370,16 @@ pub fn build_rules(build: &Build) -> Rules { "compile-fail", "compile-fail-fulldeps"); suite("check-rmake", "src/test/run-make", "run-make", "run-make"); suite("check-rustdoc", "src/test/rustdoc", "rustdoc", "rustdoc"); - suite("check-pretty-rpass-full", "src/test/run-pass-fulldeps", + suite("check-pretty", "src/test/pretty", "pretty", "pretty"); + suite("check-pretty-rpass", "src/test/run-pass/pretty", "pretty", + "run-pass"); + suite("check-pretty-rfail", "src/test/run-fail/pretty", "pretty", + "run-fail"); + suite("check-pretty-valgrind", "src/test/run-pass-valgrind/pretty", "pretty", + "run-pass-valgrind"); + suite("check-pretty-rpass-full", "src/test/run-pass-fulldeps/pretty", "pretty", "run-pass-fulldeps"); - suite("check-pretty-rfail-full", "src/test/run-fail-fulldeps", + suite("check-pretty-rfail-full", "src/test/run-fail-fulldeps/pretty", "pretty", "run-fail-fulldeps"); } @@ -444,25 +514,25 @@ pub fn build_rules(build: &Build) -> Rules { .run(move |s| doc::standalone(build, s.stage, s.target)); rules.doc("doc-error-index", "src/tools/error_index_generator") .dep(move |s| s.name("tool-error-index").target(&build.config.build)) - .dep(move |s| s.name("librustc")) + .dep(move |s| s.name("librustc-link")) .default(build.config.docs) .host(true) .run(move |s| doc::error_index(build, s.stage, s.target)); for (krate, path, default) in krates("std_shim") { rules.doc(&krate.doc_step, path) - .dep(|s| s.name("libstd")) + .dep(|s| s.name("libstd-link")) .default(default && build.config.docs) .run(move |s| doc::std(build, s.stage, s.target)); } for (krate, path, default) in krates("test_shim") { rules.doc(&krate.doc_step, path) - .dep(|s| s.name("libtest")) + .dep(|s| s.name("libtest-link")) .default(default && build.config.compiler_docs) .run(move |s| doc::test(build, s.stage, s.target)); } for (krate, path, default) in krates("rustc-main") { rules.doc(&krate.doc_step, path) - .dep(|s| s.name("librustc")) + .dep(|s| s.name("librustc-link")) .host(true) .default(default && build.config.compiler_docs) .run(move |s| doc::rustc(build, s.stage, s.target)); @@ -481,9 +551,9 @@ pub fn build_rules(build: &Build) -> Rules { // for the `rust-std` package, so if this is a host target we // depend on librustc and otherwise we just depend on libtest. if build.config.host.iter().any(|t| t == s.target) { - s.name("librustc") + s.name("librustc-link") } else { - s.name("libtest") + s.name("libtest-link") } }) .default(true) diff --git a/src/ci/docker/arm-android/Dockerfile b/src/ci/docker/arm-android/Dockerfile index 8911b4ff0cb..a40e76839ec 100644 --- a/src/ci/docker/arm-android/Dockerfile +++ b/src/ci/docker/arm-android/Dockerfile @@ -11,7 +11,6 @@ RUN dpkg --add-architecture i386 && \ python2.7 \ git \ cmake \ - ccache \ unzip \ expect \ openjdk-9-jre \ @@ -50,5 +49,3 @@ ENV RUST_CONFIGURE_ARGS \ --i686-linux-android-ndk=/android/ndk-x86-9 \ --aarch64-linux-android-ndk=/android/ndk-aarch64 ENV XPY_CHECK test --target arm-linux-androideabi -RUN mkdir /tmp/obj -RUN chmod 777 /tmp/obj diff --git a/src/ci/docker/cross/Dockerfile b/src/ci/docker/cross/Dockerfile index 08b436313f6..c5aa323f50c 100644 --- a/src/ci/docker/cross/Dockerfile +++ b/src/ci/docker/cross/Dockerfile @@ -9,7 +9,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ python2.7 \ git \ cmake \ - ccache \ sudo \ gcc-aarch64-linux-gnu libc6-dev-arm64-cross \ gcc-arm-linux-gnueabi libc6-dev-armel-cross \ @@ -70,6 +69,3 @@ ENV AR_s390x_unknown_linux_gnu=s390x-linux-gnu-ar \ # FIXME(rust-lang/rust#36150): powerpc unfortunately aborts right now ENV NO_LLVM_ASSERTIONS=1 - -RUN mkdir /tmp/obj -RUN chmod 777 /tmp/obj diff --git a/src/ci/docker/i686-gnu-nopt/Dockerfile b/src/ci/docker/i686-gnu-nopt/Dockerfile index 1da33c94c7b..1ffb24981c5 100644 --- a/src/ci/docker/i686-gnu-nopt/Dockerfile +++ b/src/ci/docker/i686-gnu-nopt/Dockerfile @@ -9,7 +9,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ python2.7 \ git \ cmake \ - ccache \ sudo \ gdb \ xz-utils @@ -25,5 +24,3 @@ ENTRYPOINT ["/usr/bin/dumb-init", "--"] ENV RUST_CONFIGURE_ARGS --build=i686-unknown-linux-gnu --disable-optimize-tests ENV RUST_CHECK_TARGET check -RUN mkdir /tmp/obj -RUN chmod 777 /tmp/obj diff --git a/src/ci/docker/i686-gnu/Dockerfile b/src/ci/docker/i686-gnu/Dockerfile index 9e5b0e0435e..e4310232d78 100644 --- a/src/ci/docker/i686-gnu/Dockerfile +++ b/src/ci/docker/i686-gnu/Dockerfile @@ -9,7 +9,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ python2.7 \ git \ cmake \ - ccache \ sudo \ gdb \ xz-utils @@ -25,5 +24,3 @@ ENTRYPOINT ["/usr/bin/dumb-init", "--"] ENV RUST_CONFIGURE_ARGS --build=i686-unknown-linux-gnu ENV RUST_CHECK_TARGET check -RUN mkdir /tmp/obj -RUN chmod 777 /tmp/obj diff --git a/src/ci/docker/x86_64-freebsd/Dockerfile b/src/ci/docker/x86_64-freebsd/Dockerfile index 75f3174e2c0..86efa74ba3b 100644 --- a/src/ci/docker/x86_64-freebsd/Dockerfile +++ b/src/ci/docker/x86_64-freebsd/Dockerfile @@ -9,7 +9,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ python2.7 \ git \ cmake \ - ccache \ sudo \ bzip2 \ xz-utils \ @@ -33,5 +32,3 @@ ENV \ ENV RUST_CONFIGURE_ARGS --target=x86_64-unknown-freebsd ENV RUST_CHECK_TARGET "" -RUN mkdir /tmp/obj -RUN chmod 777 /tmp/obj diff --git a/src/ci/docker/x86_64-gnu-cargotest/Dockerfile b/src/ci/docker/x86_64-gnu-aux/Dockerfile similarity index 89% rename from src/ci/docker/x86_64-gnu-cargotest/Dockerfile rename to src/ci/docker/x86_64-gnu-aux/Dockerfile index 2c3db87d9fb..0ec0bfd1897 100644 --- a/src/ci/docker/x86_64-gnu-cargotest/Dockerfile +++ b/src/ci/docker/x86_64-gnu-aux/Dockerfile @@ -9,7 +9,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ python2.7 \ git \ cmake \ - ccache \ libssl-dev \ sudo \ xz-utils \ @@ -25,7 +24,5 @@ RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-ini ENTRYPOINT ["/usr/bin/dumb-init", "--"] ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu -ENV RUST_CHECK_TARGET check-cargotest +ENV RUST_CHECK_TARGET check-aux ENV NO_VENDOR 1 -RUN mkdir /tmp/obj -RUN chmod 777 /tmp/obj diff --git a/src/ci/docker/x86_64-gnu-debug/Dockerfile b/src/ci/docker/x86_64-gnu-debug/Dockerfile index eec88442293..9ec8c6059ec 100644 --- a/src/ci/docker/x86_64-gnu-debug/Dockerfile +++ b/src/ci/docker/x86_64-gnu-debug/Dockerfile @@ -9,7 +9,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ python2.7 \ git \ cmake \ - ccache \ sudo \ gdb \ xz-utils @@ -28,5 +27,3 @@ ENV RUST_CONFIGURE_ARGS \ --enable-debug \ --enable-optimize ENV RUST_CHECK_TARGET "" -RUN mkdir /tmp/obj -RUN chmod 777 /tmp/obj diff --git a/src/ci/docker/x86_64-gnu-full-bootstrap/Dockerfile b/src/ci/docker/x86_64-gnu-full-bootstrap/Dockerfile new file mode 100644 index 00000000000..7c079e45751 --- /dev/null +++ b/src/ci/docker/x86_64-gnu-full-bootstrap/Dockerfile @@ -0,0 +1,28 @@ +FROM ubuntu:16.04 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + g++ \ + make \ + file \ + curl \ + ca-certificates \ + python2.7 \ + git \ + cmake \ + sudo \ + gdb \ + xz-utils + +ENV SCCACHE_DIGEST=7237e38e029342fa27b7ac25412cb9d52554008b12389727320bd533fd7f05b6a96d55485f305caf95e5c8f5f97c3313e10012ccad3e752aba2518f3522ba783 +RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST | \ + tar xJf - -C /usr/local/bin --strip-components=1 + +RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \ + dpkg -i dumb-init_*.deb && \ + rm dumb-init_*.deb +ENTRYPOINT ["/usr/bin/dumb-init", "--"] + +ENV RUST_CONFIGURE_ARGS \ + --build=x86_64-unknown-linux-gnu \ + --enable-full-bootstrap +ENV RUST_CHECK_TARGET "" diff --git a/src/ci/docker/x86_64-gnu-llvm-3.7/Dockerfile b/src/ci/docker/x86_64-gnu-llvm-3.7/Dockerfile index 4c9198d88eb..aabfc0cd1bd 100644 --- a/src/ci/docker/x86_64-gnu-llvm-3.7/Dockerfile +++ b/src/ci/docker/x86_64-gnu-llvm-3.7/Dockerfile @@ -9,7 +9,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ python2.7 \ git \ cmake \ - ccache \ sudo \ gdb \ llvm-3.7-tools \ @@ -30,5 +29,3 @@ ENV RUST_CONFIGURE_ARGS \ --build=x86_64-unknown-linux-gnu \ --llvm-root=/usr/lib/llvm-3.7 ENV RUST_CHECK_TARGET check -RUN mkdir /tmp/obj -RUN chmod 777 /tmp/obj diff --git a/src/ci/docker/x86_64-gnu-make/Dockerfile b/src/ci/docker/x86_64-gnu-make/Dockerfile index 1c503aea13d..c6071d704f5 100644 --- a/src/ci/docker/x86_64-gnu-make/Dockerfile +++ b/src/ci/docker/x86_64-gnu-make/Dockerfile @@ -9,7 +9,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ python2.7 \ git \ cmake \ - ccache \ sudo \ gdb \ xz-utils @@ -25,5 +24,3 @@ ENTRYPOINT ["/usr/bin/dumb-init", "--"] ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --disable-rustbuild ENV RUST_CHECK_TARGET check -RUN mkdir /tmp/obj -RUN chmod 777 /tmp/obj diff --git a/src/ci/docker/x86_64-gnu-nopt/Dockerfile b/src/ci/docker/x86_64-gnu-nopt/Dockerfile index 66de6ea13ac..d28dc3de1b4 100644 --- a/src/ci/docker/x86_64-gnu-nopt/Dockerfile +++ b/src/ci/docker/x86_64-gnu-nopt/Dockerfile @@ -9,7 +9,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ python2.7 \ git \ cmake \ - ccache \ sudo \ gdb \ xz-utils @@ -25,5 +24,3 @@ ENTRYPOINT ["/usr/bin/dumb-init", "--"] ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --disable-optimize-tests ENV RUST_CHECK_TARGET check -RUN mkdir /tmp/obj -RUN chmod 777 /tmp/obj diff --git a/src/ci/docker/x86_64-gnu/Dockerfile b/src/ci/docker/x86_64-gnu/Dockerfile index 3d71b7ffb9a..9d8b75c80c7 100644 --- a/src/ci/docker/x86_64-gnu/Dockerfile +++ b/src/ci/docker/x86_64-gnu/Dockerfile @@ -9,7 +9,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ python2.7 \ git \ cmake \ - ccache \ sudo \ gdb \ xz-utils @@ -25,5 +24,3 @@ ENTRYPOINT ["/usr/bin/dumb-init", "--"] ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu ENV RUST_CHECK_TARGET check -RUN mkdir /tmp/obj -RUN chmod 777 /tmp/obj diff --git a/src/ci/docker/x86_64-musl/Dockerfile b/src/ci/docker/x86_64-musl/Dockerfile index 96b38067cbb..49954157ec0 100644 --- a/src/ci/docker/x86_64-musl/Dockerfile +++ b/src/ci/docker/x86_64-musl/Dockerfile @@ -9,7 +9,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ python2.7 \ git \ cmake \ - ccache \ xz-utils \ sudo \ gdb @@ -33,6 +32,3 @@ ENV RUST_CONFIGURE_ARGS \ ENV RUST_CHECK_TARGET check-stage2-T-x86_64-unknown-linux-musl-H-x86_64-unknown-linux-gnu ENV PATH=$PATH:/musl-x86_64/bin ENV XPY_CHECK test --target x86_64-unknown-linux-musl - -RUN mkdir /tmp/obj -RUN chmod 777 /tmp/obj diff --git a/src/doc/book/closures.md b/src/doc/book/closures.md index a3c7333c6be..115fe1f7662 100644 --- a/src/doc/book/closures.md +++ b/src/doc/book/closures.md @@ -283,7 +283,7 @@ fn call_with_one(some_closure: F) -> i32 # some_closure(1) } ``` -We take one parameter, and it has the type `F`. We also return a `i32`. This part +We take one parameter, and it has the type `F`. We also return an `i32`. This part isn’t interesting. The next part is: ```rust @@ -293,7 +293,7 @@ isn’t interesting. The next part is: ``` Because `Fn` is a trait, we can use it as a bound for our generic type. In -this case, our closure takes a `i32` as an argument and returns an `i32`, and +this case, our closure takes an `i32` as an argument and returns an `i32`, and so the generic bound we use is `Fn(i32) -> i32`. There’s one other key point here: because we’re bounding a generic with a diff --git a/src/doc/book/ffi.md b/src/doc/book/ffi.md index b53af694428..41457ee67a5 100644 --- a/src/doc/book/ffi.md +++ b/src/doc/book/ffi.md @@ -574,6 +574,31 @@ The [`libc` crate on crates.io][libc] includes type aliases and function definitions for the C standard library in the `libc` module, and Rust links against `libc` and `libm` by default. +# Variadic functions + +In C, functions can be 'variadic', meaning they accept a variable number of arguments. This can +be achieved in Rust by specifying `...` within the argument list of a foreign function declaration: + +```no_run +extern { + fn foo(x: i32, ...); +} + +fn main() { + unsafe { + foo(10, 20, 30, 40, 50); + } +} +``` + +Normal Rust functions can *not* be variadic: + +```ignore +// This will not compile + +fn foo(x: i32, ...) { } +``` + # The "nullable pointer optimization" Certain Rust types are defined to never be `null`. This includes references (`&T`, diff --git a/src/doc/book/lifetimes.md b/src/doc/book/lifetimes.md index 140e27d1924..546e66fc8ac 100644 --- a/src/doc/book/lifetimes.md +++ b/src/doc/book/lifetimes.md @@ -128,7 +128,7 @@ _descriptive_, not _prescriptive_. This means that how long a reference is valid is determined by the code, not by the annotations. The annotations, however, give information about lifetimes to the compiler that uses them to check the validity of references. The compiler can do so without annotations in simple -cases, but needs the programmers support in complex scenarios. +cases, but needs the programmer's support in complex scenarios. [traits]: traits.html diff --git a/src/doc/book/variable-bindings.md b/src/doc/book/variable-bindings.md index 37b6c0513fc..e1947310a9a 100644 --- a/src/doc/book/variable-bindings.md +++ b/src/doc/book/variable-bindings.md @@ -102,7 +102,7 @@ mutation, then the solution is quite easy: add `mut`. There are other good reasons to avoid mutable state when possible, but they’re out of the scope of this guide. In general, you can often avoid explicit mutation, and so it is preferable in Rust. That said, sometimes, mutation is -what you need, so it’s not verboten. +what you need, so it’s not forbidden. # Initializing bindings diff --git a/src/doc/reference.md b/src/doc/reference.md index b5a91a170d8..9898c31282c 100644 --- a/src/doc/reference.md +++ b/src/doc/reference.md @@ -1657,6 +1657,15 @@ Functions within external blocks may be called by Rust code, just like functions defined in Rust. The Rust compiler automatically translates between the Rust ABI and the foreign ABI. +Functions within external blocks may be variadic by specifying `...` after one +or more named arguments in the argument list: + +```ignore +extern { + fn foo(x: i32, ...); +} +``` + A number of [attributes](#ffi-attributes) control the behavior of external blocks. By default external blocks assume that the library they are calling uses the diff --git a/src/etc/platform-intrinsics/nvptx/cuda.json b/src/etc/platform-intrinsics/nvptx/cuda.json new file mode 100644 index 00000000000..1beaaeb5d87 --- /dev/null +++ b/src/etc/platform-intrinsics/nvptx/cuda.json @@ -0,0 +1,13 @@ +{ + "intrinsic_prefix": "_", + "llvm_prefix": "llvm.cuda.", + "intrinsics": [ + { + "intrinsic": "syncthreads", + "width": ["0"], + "llvm": "syncthreads", + "ret": "V", + "args": [] + } + ] +} diff --git a/src/etc/platform-intrinsics/nvptx/info.json b/src/etc/platform-intrinsics/nvptx/info.json new file mode 100644 index 00000000000..80332c54e04 --- /dev/null +++ b/src/etc/platform-intrinsics/nvptx/info.json @@ -0,0 +1,7 @@ +{ + "platform": "nvptx", + "number_info": { + "signed": {} + }, + "width_info": {} +} diff --git a/src/etc/platform-intrinsics/nvptx/sreg.json b/src/etc/platform-intrinsics/nvptx/sreg.json new file mode 100644 index 00000000000..33d97f26946 --- /dev/null +++ b/src/etc/platform-intrinsics/nvptx/sreg.json @@ -0,0 +1,90 @@ +{ + "intrinsic_prefix": "_", + "llvm_prefix": "llvm.nvvm.read.ptx.sreg.", + "intrinsics": [ + { + "intrinsic": "block_dim_x", + "width": ["0"], + "llvm": "ntid.x", + "ret": "S32", + "args": [] + }, + { + "intrinsic": "block_dim_y", + "width": ["0"], + "llvm": "ntid.y", + "ret": "S32", + "args": [] + }, + { + "intrinsic": "block_dim_z", + "width": ["0"], + "llvm": "ntid.z", + "ret": "S32", + "args": [] + }, + { + "intrinsic": "block_idx_x", + "width": ["0"], + "llvm": "ctaid.x", + "ret": "S32", + "args": [] + }, + { + "intrinsic": "block_idx_y", + "width": ["0"], + "llvm": "ctaid.y", + "ret": "S32", + "args": [] + }, + { + "intrinsic": "block_idx_z", + "width": ["0"], + "llvm": "ctaid.z", + "ret": "S32", + "args": [] + }, + { + "intrinsic": "grid_dim_x", + "width": ["0"], + "llvm": "nctaid.x", + "ret": "S32", + "args": [] + }, + { + "intrinsic": "grid_dim_y", + "width": ["0"], + "llvm": "nctaid.y", + "ret": "S32", + "args": [] + }, + { + "intrinsic": "grid_dim_z", + "width": ["0"], + "llvm": "nctaid.z", + "ret": "S32", + "args": [] + }, + { + "intrinsic": "thread_idx_x", + "width": ["0"], + "llvm": "tid.x", + "ret": "S32", + "args": [] + }, + { + "intrinsic": "thread_idx_y", + "width": ["0"], + "llvm": "tid.y", + "ret": "S32", + "args": [] + }, + { + "intrinsic": "thread_idx_z", + "width": ["0"], + "llvm": "tid.z", + "ret": "S32", + "args": [] + } + ] +} diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 1cad8f7f407..e1a240a0d2e 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -55,24 +55,24 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize; /// [`RwLock`][rwlock], or one of the [`Atomic`][atomic] types. /// /// `Arc` uses atomic operations for reference counting, so `Arc`s can be -/// sent between threads. In other words, `Arc` implements [`Send`][send] -/// as long as `T` implements `Send` and [`Sync`][sync]. The disadvantage is +/// sent between threads. In other words, `Arc` implements [`Send`] +/// as long as `T` implements [`Send`] and [`Sync`][sync]. The disadvantage is /// that atomic operations are more expensive than ordinary memory accesses. /// If you are not sharing reference-counted values between threads, consider -/// using [`rc::Rc`][rc] for lower overhead. `Rc` is a safe default, because -/// the compiler will catch any attempt to send an `Rc` between threads. +/// using [`rc::Rc`] for lower overhead. [`Rc`] is a safe default, because +/// the compiler will catch any attempt to send an [`Rc`] between threads. /// However, a library might choose `Arc` in order to give library consumers /// more flexibility. /// /// The [`downgrade`][downgrade] method can be used to create a non-owning -/// [`Weak`][weak] pointer. A `Weak` pointer can be [`upgrade`][upgrade]d -/// to an `Arc`, but this will return [`None`][option] if the value has -/// already been dropped. +/// [`Weak`][weak] pointer. A [`Weak`][weak] pointer can be [`upgrade`][upgrade]d +/// to an `Arc`, but this will return [`None`] if the value has already been +/// dropped. /// /// A cycle between `Arc` pointers will never be deallocated. For this reason, -/// `Weak` is used to break cycles. For example, a tree could have strong -/// `Arc` pointers from parent nodes to children, and `Weak` pointers from -/// children back to their parents. +/// [`Weak`][weak] is used to break cycles. For example, a tree could have +/// strong `Arc` pointers from parent nodes to children, and [`Weak`][weak] +/// pointers from children back to their parents. /// /// `Arc` automatically dereferences to `T` (via the [`Deref`][deref] trait), /// so you can call `T`'s methods on a value of type `Arc`. To avoid name @@ -86,22 +86,22 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize; /// Arc::downgrade(&my_arc); /// ``` /// -/// `Weak` does not auto-dereference to `T`, because the value may have +/// [`Weak`][weak] does not auto-dereference to `T`, because the value may have /// already been destroyed. /// /// [arc]: struct.Arc.html /// [weak]: struct.Weak.html -/// [rc]: ../../std/rc/struct.Rc.html +/// [`Rc`]: ../../std/rc/struct.Rc.html /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone /// [mutex]: ../../std/sync/struct.Mutex.html /// [rwlock]: ../../std/sync/struct.RwLock.html /// [atomic]: ../../std/sync/atomic/index.html -/// [send]: ../../std/marker/trait.Send.html +/// [`Send`]: ../../std/marker/trait.Send.html /// [sync]: ../../std/marker/trait.Sync.html /// [deref]: ../../std/ops/trait.Deref.html /// [downgrade]: struct.Arc.html#method.downgrade /// [upgrade]: struct.Weak.html#method.upgrade -/// [option]: ../../std/option/enum.Option.html +/// [`None`]: ../../std/option/enum.Option.html#variant.None /// [assoc]: ../../book/method-syntax.html#associated-functions /// /// # Examples @@ -127,7 +127,9 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize; /// } /// ``` /// -/// Sharing a mutable `AtomicUsize`: +/// Sharing a mutable [`AtomicUsize`]: +/// +/// [`AtomicUsize`]: ../../std/sync/atomic/struct.AtomicUsize.html /// /// ```no_run /// use std::sync::Arc; diff --git a/src/libcore/cmp.rs b/src/libcore/cmp.rs index 0daf658a0f4..54fb70278ca 100644 --- a/src/libcore/cmp.rs +++ b/src/libcore/cmp.rs @@ -429,7 +429,7 @@ impl PartialOrd for Ordering { /// This trait can be used with `#[derive]`. When `derive`d, it will produce a lexicographic /// ordering based on the top-to-bottom declaration order of the struct's members. /// -/// ## How can I implement `Ord`? +/// ## How can I implement `PartialOrd`? /// /// PartialOrd only requires implementation of the `partial_cmp` method, with the others generated /// from default implementations. diff --git a/src/libcore/sync/atomic.rs b/src/libcore/sync/atomic.rs index 198db0e7c0a..75205794471 100644 --- a/src/libcore/sync/atomic.rs +++ b/src/libcore/sync/atomic.rs @@ -237,11 +237,16 @@ impl AtomicBool { /// Loads a value from the bool. /// - /// `load` takes an `Ordering` argument which describes the memory ordering of this operation. + /// `load` takes an [`Ordering`] argument which describes the memory ordering + /// of this operation. /// /// # Panics /// - /// Panics if `order` is `Release` or `AcqRel`. + /// Panics if `order` is [`Release`] or [`AcqRel`]. + /// + /// [`Ordering`]: enum.Ordering.html + /// [`Release`]: enum.Ordering.html#variant.Release + /// [`AcqRel`]: enum.Ordering.html#variant.Release /// /// # Examples /// @@ -260,7 +265,10 @@ impl AtomicBool { /// Stores a value into the bool. /// - /// `store` takes an `Ordering` argument which describes the memory ordering of this operation. + /// `store` takes an [`Ordering`] argument which describes the memory ordering + /// of this operation. + /// + /// [`Ordering`]: enum.Ordering.html /// /// # Examples /// @@ -286,7 +294,10 @@ impl AtomicBool { /// Stores a value into the bool, returning the old value. /// - /// `swap` takes an `Ordering` argument which describes the memory ordering of this operation. + /// `swap` takes an [`Ordering`] argument which describes the memory ordering + /// of this operation. + /// + /// [`Ordering`]: enum.Ordering.html /// /// # Examples /// @@ -309,8 +320,10 @@ impl AtomicBool { /// The return value is always the previous value. If it is equal to `current`, then the value /// was updated. /// - /// `compare_and_swap` also takes an `Ordering` argument which describes the memory ordering of - /// this operation. + /// `compare_and_swap` also takes an [`Ordering`] argument which describes the memory + /// ordering of this operation. + /// + /// [`Ordering`]: enum.Ordering.html /// /// # Examples /// @@ -339,10 +352,15 @@ impl AtomicBool { /// The return value is a result indicating whether the new value was written and containing /// the previous value. On success this value is guaranteed to be equal to `current`. /// - /// `compare_exchange` takes two `Ordering` arguments to describe the memory ordering of this - /// operation. The first describes the required ordering if the operation succeeds while the - /// second describes the required ordering when the operation fails. The failure ordering can't - /// be `Release` or `AcqRel` and must be equivalent or weaker than the success ordering. + /// `compare_exchange` takes two [`Ordering`] arguments to describe the memory + /// ordering of this operation. The first describes the required ordering if the + /// operation succeeds while the second describes the required ordering when the + /// operation fails. The failure ordering can't be [`Release`] or [`AcqRel`] and must + /// be equivalent or weaker than the success ordering. + /// + /// [`Ordering`]: enum.Ordering.html + /// [`Release`]: enum.Ordering.html#variant.Release + /// [`AcqRel`]: enum.Ordering.html#variant.Release /// /// # Examples /// @@ -387,11 +405,15 @@ impl AtomicBool { /// return value is a result indicating whether the new value was written and containing the /// previous value. /// - /// `compare_exchange_weak` takes two `Ordering` arguments to describe the memory + /// `compare_exchange_weak` takes two [`Ordering`] arguments to describe the memory /// ordering of this operation. The first describes the required ordering if the operation /// succeeds while the second describes the required ordering when the operation fails. The - /// failure ordering can't be `Release` or `AcqRel` and must be equivalent or weaker than the - /// success ordering. + /// failure ordering can't be [`Release`] or [`AcqRel`] and must be equivalent or + /// weaker than the success ordering. + /// + /// [`Ordering`]: enum.Ordering.html + /// [`Release`]: enum.Ordering.html#variant.Release + /// [`AcqRel`]: enum.Ordering.html#variant.Release /// /// # Examples /// @@ -619,11 +641,16 @@ impl AtomicPtr { /// Loads a value from the pointer. /// - /// `load` takes an `Ordering` argument which describes the memory ordering of this operation. + /// `load` takes an [`Ordering`] argument which describes the memory ordering + /// of this operation. /// /// # Panics /// - /// Panics if `order` is `Release` or `AcqRel`. + /// Panics if `order` is [`Release`] or [`AcqRel`]. + /// + /// [`Ordering`]: enum.Ordering.html + /// [`Release`]: enum.Ordering.html#variant.Release + /// [`AcqRel`]: enum.Ordering.html#variant.AcqRel /// /// # Examples /// @@ -643,7 +670,10 @@ impl AtomicPtr { /// Stores a value into the pointer. /// - /// `store` takes an `Ordering` argument which describes the memory ordering of this operation. + /// `store` takes an [`Ordering`] argument which describes the memory ordering + /// of this operation. + /// + /// [`Ordering`]: enum.Ordering.html /// /// # Examples /// @@ -671,7 +701,10 @@ impl AtomicPtr { /// Stores a value into the pointer, returning the old value. /// - /// `swap` takes an `Ordering` argument which describes the memory ordering of this operation. + /// `swap` takes an [`Ordering`] argument which describes the memory ordering + /// of this operation. + /// + /// [`Ordering`]: enum.Ordering.html /// /// # Examples /// @@ -696,8 +729,10 @@ impl AtomicPtr { /// The return value is always the previous value. If it is equal to `current`, then the value /// was updated. /// - /// `compare_and_swap` also takes an `Ordering` argument which describes the memory ordering of - /// this operation. + /// `compare_and_swap` also takes an [`Ordering`] argument which describes the memory + /// ordering of this operation. + /// + /// [`Ordering`]: enum.Ordering.html /// /// # Examples /// @@ -726,10 +761,15 @@ impl AtomicPtr { /// The return value is a result indicating whether the new value was written and containing /// the previous value. On success this value is guaranteed to be equal to `current`. /// - /// `compare_exchange` takes two `Ordering` arguments to describe the memory ordering of this - /// operation. The first describes the required ordering if the operation succeeds while the - /// second describes the required ordering when the operation fails. The failure ordering can't - /// be `Release` or `AcqRel` and must be equivalent or weaker than the success ordering. + /// `compare_exchange` takes two [`Ordering`] arguments to describe the memory + /// ordering of this operation. The first describes the required ordering if + /// the operation succeeds while the second describes the required ordering when + /// the operation fails. The failure ordering can't be [`Release`] or [`AcqRel`] + /// and must be equivalent or weaker than the success ordering. + /// + /// [`Ordering`]: enum.Ordering.html + /// [`Release`]: enum.Ordering.html#variant.Release + /// [`AcqRel`]: enum.Ordering.html#variant.AcqRel /// /// # Examples /// @@ -768,16 +808,21 @@ impl AtomicPtr { /// Stores a value into the pointer if the current value is the same as the `current` value. /// - /// Unlike `compare_exchange`, this function is allowed to spuriously fail even when the + /// Unlike [`compare_exchange`], this function is allowed to spuriously fail even when the /// comparison succeeds, which can result in more efficient code on some platforms. The /// return value is a result indicating whether the new value was written and containing the /// previous value. /// - /// `compare_exchange_weak` takes two `Ordering` arguments to describe the memory + /// `compare_exchange_weak` takes two [`Ordering`] arguments to describe the memory /// ordering of this operation. The first describes the required ordering if the operation /// succeeds while the second describes the required ordering when the operation fails. The - /// failure ordering can't be `Release` or `AcqRel` and must be equivalent or weaker than the - /// success ordering. + /// failure ordering can't be [`Release`] or [`AcqRel`] and must be equivalent or + /// weaker than the success ordering. + /// + /// [`compare_exchange`]: #method.compare_exchange + /// [`Ordering`]: enum.Ordering.html + /// [`Release`]: enum.Ordering.html#variant.Release + /// [`AcqRel`]: enum.Ordering.html#variant.AcqRel /// /// # Examples /// @@ -913,12 +958,16 @@ macro_rules! atomic_int { /// Loads a value from the atomic integer. /// - /// `load` takes an `Ordering` argument which describes the memory ordering of this + /// `load` takes an [`Ordering`] argument which describes the memory ordering of this /// operation. /// /// # Panics /// - /// Panics if `order` is `Release` or `AcqRel`. + /// Panics if `order` is [`Release`] or [`AcqRel`]. + /// + /// [`Ordering`]: enum.Ordering.html + /// [`Release`]: enum.Ordering.html#variant.Release + /// [`AcqRel`]: enum.Ordering.html#variant.AcqRel /// /// # Examples /// @@ -937,9 +986,11 @@ macro_rules! atomic_int { /// Stores a value into the atomic integer. /// - /// `store` takes an `Ordering` argument which describes the memory ordering of this + /// `store` takes an [`Ordering`] argument which describes the memory ordering of this /// operation. /// + /// [`Ordering`]: enum.Ordering.html + /// /// # Examples /// /// ``` @@ -962,9 +1013,11 @@ macro_rules! atomic_int { /// Stores a value into the atomic integer, returning the old value. /// - /// `swap` takes an `Ordering` argument which describes the memory ordering of this + /// `swap` takes an [`Ordering`] argument which describes the memory ordering of this /// operation. /// + /// [`Ordering`]: enum.Ordering.html + /// /// # Examples /// /// ``` @@ -986,9 +1039,11 @@ macro_rules! atomic_int { /// The return value is always the previous value. If it is equal to `current`, then the /// value was updated. /// - /// `compare_and_swap` also takes an `Ordering` argument which describes the memory + /// `compare_and_swap` also takes an [`Ordering`] argument which describes the memory /// ordering of this operation. /// + /// [`Ordering`]: enum.Ordering.html + /// /// # Examples /// /// ``` @@ -1024,11 +1079,15 @@ macro_rules! atomic_int { /// containing the previous value. On success this value is guaranteed to be equal to /// `current`. /// - /// `compare_exchange` takes two `Ordering` arguments to describe the memory ordering of - /// this operation. The first describes the required ordering if the operation succeeds - /// while the second describes the required ordering when the operation fails. The - /// failure ordering can't be `Release` or `AcqRel` and must be equivalent or weaker - /// than the success ordering. + /// `compare_exchange` takes two [`Ordering`] arguments to describe the memory + /// ordering of this operation. The first describes the required ordering if + /// the operation succeeds while the second describes the required ordering when + /// the operation fails. The failure ordering can't be [`Release`] or [`AcqRel`] and + /// must be equivalent or weaker than the success ordering. + /// + /// [`Ordering`]: enum.Ordering.html + /// [`Release`]: enum.Ordering.html#variant.Release + /// [`AcqRel`]: enum.Ordering.html#variant.AcqRel /// /// # Examples /// @@ -1062,16 +1121,21 @@ macro_rules! atomic_int { /// Stores a value into the atomic integer if the current value is the same as the /// `current` value. /// - /// Unlike `compare_exchange`, this function is allowed to spuriously fail even when the - /// comparison succeeds, which can result in more efficient code on some platforms. The - /// return value is a result indicating whether the new value was written and containing - /// the previous value. + /// Unlike [`compare_exchange`], this function is allowed to spuriously fail even + /// when the comparison succeeds, which can result in more efficient code on some + /// platforms. The return value is a result indicating whether the new value was + /// written and containing the previous value. /// - /// `compare_exchange_weak` takes two `Ordering` arguments to describe the memory + /// `compare_exchange_weak` takes two [`Ordering`] arguments to describe the memory /// ordering of this operation. The first describes the required ordering if the /// operation succeeds while the second describes the required ordering when the - /// operation fails. The failure ordering can't be `Release` or `AcqRel` and must be - /// equivalent or weaker than the success ordering. + /// operation fails. The failure ordering can't be [`Release`] or [`AcqRel`] and + /// must be equivalent or weaker than the success ordering. + /// + /// [`compare_exchange`]: #method.compare_exchange + /// [`Ordering`]: enum.Ordering.html + /// [`Release`]: enum.Ordering.html#variant.Release + /// [`AcqRel`]: enum.Ordering.html#variant.AcqRel /// /// # Examples /// @@ -1431,24 +1495,31 @@ unsafe fn atomic_xor(dst: *mut T, val: T, order: Ordering) -> T { /// An atomic fence. /// -/// A fence 'A' which has `Release` ordering semantics, synchronizes with a -/// fence 'B' with (at least) `Acquire` semantics, if and only if there exists +/// A fence 'A' which has [`Release`] ordering semantics, synchronizes with a +/// fence 'B' with (at least) [`Acquire`] semantics, if and only if there exists /// atomic operations X and Y, both operating on some atomic object 'M' such /// that A is sequenced before X, Y is synchronized before B and Y observes /// the change to M. This provides a happens-before dependence between A and B. /// -/// Atomic operations with `Release` or `Acquire` semantics can also synchronize +/// Atomic operations with [`Release`] or [`Acquire`] semantics can also synchronize /// with a fence. /// -/// A fence which has `SeqCst` ordering, in addition to having both `Acquire` -/// and `Release` semantics, participates in the global program order of the -/// other `SeqCst` operations and/or fences. +/// A fence which has [`SeqCst`] ordering, in addition to having both [`Acquire`] +/// and [`Release`] semantics, participates in the global program order of the +/// other [`SeqCst`] operations and/or fences. /// -/// Accepts `Acquire`, `Release`, `AcqRel` and `SeqCst` orderings. +/// Accepts [`Acquire`], [`Release`], [`AcqRel`] and [`SeqCst`] orderings. /// /// # Panics /// -/// Panics if `order` is `Relaxed`. +/// Panics if `order` is [`Relaxed`]. +/// +/// [`Ordering`]: enum.Ordering.html +/// [`Acquire`]: enum.Ordering.html#variant.Acquire +/// [`SeqCst`]: enum.Ordering.html#variant.SeqCst +/// [`Release`]: enum.Ordering.html#variant.Release +/// [`AcqRel`]: enum.Ordering.html#variant.AcqRel +/// [`Relaxed`]: enum.Ordering.html#variant.Relaxed #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn fence(order: Ordering) { diff --git a/src/librustc_llvm/build.rs b/src/librustc_llvm/build.rs index e681a81cf0c..2ee4cc49435 100644 --- a/src/librustc_llvm/build.rs +++ b/src/librustc_llvm/build.rs @@ -96,7 +96,7 @@ fn main() { let optional_components = ["x86", "arm", "aarch64", "mips", "powerpc", "pnacl", "systemz", "jsbackend", "msp430", - "sparc"]; + "sparc", "nvptx"]; // FIXME: surely we don't need all these components, right? Stuff like mcjit // or interpreter the compiler itself never uses. diff --git a/src/librustc_llvm/ffi.rs b/src/librustc_llvm/ffi.rs index 5fd85023e41..c1705815165 100644 --- a/src/librustc_llvm/ffi.rs +++ b/src/librustc_llvm/ffi.rs @@ -42,6 +42,7 @@ pub enum CallConv { X86StdcallCallConv = 64, X86FastcallCallConv = 65, ArmAapcsCallConv = 67, + PtxKernel = 71, X86_64_SysV = 78, X86_64_Win64 = 79, X86_VectorCall = 80, diff --git a/src/librustc_llvm/lib.rs b/src/librustc_llvm/lib.rs index c8b1ea50f97..1e45ea083c9 100644 --- a/src/librustc_llvm/lib.rs +++ b/src/librustc_llvm/lib.rs @@ -376,6 +376,11 @@ pub fn initialize_available_targets() { LLVMInitializeSparcTargetMC, LLVMInitializeSparcAsmPrinter, LLVMInitializeSparcAsmParser); + init_target!(llvm_component = "nvptx", + LLVMInitializeNVPTXTargetInfo, + LLVMInitializeNVPTXTarget, + LLVMInitializeNVPTXTargetMC, + LLVMInitializeNVPTXAsmPrinter); } pub fn last_error() -> Option { diff --git a/src/librustc_platform_intrinsics/lib.rs b/src/librustc_platform_intrinsics/lib.rs index 6fe1f0c2b9c..e814050e960 100644 --- a/src/librustc_platform_intrinsics/lib.rs +++ b/src/librustc_platform_intrinsics/lib.rs @@ -95,6 +95,7 @@ static VOID: Type = Type::Void; mod x86; mod arm; mod aarch64; +mod nvptx; impl Intrinsic { pub fn find(name: &str) -> Option { @@ -104,6 +105,8 @@ impl Intrinsic { arm::find(name) } else if name.starts_with("aarch64_") { aarch64::find(name) + } else if name.starts_with("nvptx_") { + nvptx::find(name) } else { None } diff --git a/src/librustc_platform_intrinsics/nvptx.rs b/src/librustc_platform_intrinsics/nvptx.rs new file mode 100644 index 00000000000..82408723ebe --- /dev/null +++ b/src/librustc_platform_intrinsics/nvptx.rs @@ -0,0 +1,92 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// DO NOT EDIT: autogenerated by etc/platform-intrinsics/generator.py +// ignore-tidy-linelength + +#![allow(unused_imports)] + +use {Intrinsic, Type}; +use IntrinsicDef::Named; + +// The default inlining settings trigger a pathological behaviour in +// LLVM, which causes makes compilation very slow. See #28273. +#[inline(never)] +pub fn find(name: &str) -> Option { + if !name.starts_with("nvptx") { return None } + Some(match &name["nvptx".len()..] { + "_syncthreads" => Intrinsic { + inputs: { static INPUTS: [&'static Type; 0] = []; &INPUTS }, + output: &::VOID, + definition: Named("llvm.cuda.syncthreads") + }, + "_block_dim_x" => Intrinsic { + inputs: { static INPUTS: [&'static Type; 0] = []; &INPUTS }, + output: &::I32, + definition: Named("llvm.nvvm.read.ptx.sreg.ntid.x") + }, + "_block_dim_y" => Intrinsic { + inputs: { static INPUTS: [&'static Type; 0] = []; &INPUTS }, + output: &::I32, + definition: Named("llvm.nvvm.read.ptx.sreg.ntid.y") + }, + "_block_dim_z" => Intrinsic { + inputs: { static INPUTS: [&'static Type; 0] = []; &INPUTS }, + output: &::I32, + definition: Named("llvm.nvvm.read.ptx.sreg.ntid.z") + }, + "_block_idx_x" => Intrinsic { + inputs: { static INPUTS: [&'static Type; 0] = []; &INPUTS }, + output: &::I32, + definition: Named("llvm.nvvm.read.ptx.sreg.ctaid.x") + }, + "_block_idx_y" => Intrinsic { + inputs: { static INPUTS: [&'static Type; 0] = []; &INPUTS }, + output: &::I32, + definition: Named("llvm.nvvm.read.ptx.sreg.ctaid.y") + }, + "_block_idx_z" => Intrinsic { + inputs: { static INPUTS: [&'static Type; 0] = []; &INPUTS }, + output: &::I32, + definition: Named("llvm.nvvm.read.ptx.sreg.ctaid.z") + }, + "_grid_dim_x" => Intrinsic { + inputs: { static INPUTS: [&'static Type; 0] = []; &INPUTS }, + output: &::I32, + definition: Named("llvm.nvvm.read.ptx.sreg.nctaid.x") + }, + "_grid_dim_y" => Intrinsic { + inputs: { static INPUTS: [&'static Type; 0] = []; &INPUTS }, + output: &::I32, + definition: Named("llvm.nvvm.read.ptx.sreg.nctaid.y") + }, + "_grid_dim_z" => Intrinsic { + inputs: { static INPUTS: [&'static Type; 0] = []; &INPUTS }, + output: &::I32, + definition: Named("llvm.nvvm.read.ptx.sreg.nctaid.z") + }, + "_thread_idx_x" => Intrinsic { + inputs: { static INPUTS: [&'static Type; 0] = []; &INPUTS }, + output: &::I32, + definition: Named("llvm.nvvm.read.ptx.sreg.tid.x") + }, + "_thread_idx_y" => Intrinsic { + inputs: { static INPUTS: [&'static Type; 0] = []; &INPUTS }, + output: &::I32, + definition: Named("llvm.nvvm.read.ptx.sreg.tid.y") + }, + "_thread_idx_z" => Intrinsic { + inputs: { static INPUTS: [&'static Type; 0] = []; &INPUTS }, + output: &::I32, + definition: Named("llvm.nvvm.read.ptx.sreg.tid.z") + }, + _ => return None, + }) +} diff --git a/src/librustc_trans/abi.rs b/src/librustc_trans/abi.rs index 9c4246e079b..81e4b4d1f21 100644 --- a/src/librustc_trans/abi.rs +++ b/src/librustc_trans/abi.rs @@ -25,6 +25,8 @@ use cabi_mips64; use cabi_asmjs; use cabi_msp430; use cabi_sparc; +use cabi_nvptx; +use cabi_nvptx64; use machine::{llalign_of_min, llsize_of, llsize_of_alloc}; use type_::Type; use type_of; @@ -353,6 +355,7 @@ impl FnType { Win64 => llvm::X86_64_Win64, SysV64 => llvm::X86_64_SysV, Aapcs => llvm::ArmAapcsCallConv, + PtxKernel => llvm::PtxKernel, // These API constants ought to be more specific... Cdecl => llvm::CCallConv, @@ -608,6 +611,8 @@ impl FnType { "wasm32" => cabi_asmjs::compute_abi_info(ccx, self), "msp430" => cabi_msp430::compute_abi_info(ccx, self), "sparc" => cabi_sparc::compute_abi_info(ccx, self), + "nvptx" => cabi_nvptx::compute_abi_info(ccx, self), + "nvptx64" => cabi_nvptx64::compute_abi_info(ccx, self), a => ccx.sess().fatal(&format!("unrecognized arch \"{}\" in target specification", a)) } diff --git a/src/librustc_trans/cabi_nvptx.rs b/src/librustc_trans/cabi_nvptx.rs new file mode 100644 index 00000000000..5ece19f764a --- /dev/null +++ b/src/librustc_trans/cabi_nvptx.rs @@ -0,0 +1,53 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Reference: PTX Writer's Guide to Interoperability +// http://docs.nvidia.com/cuda/ptx-writers-guide-to-interoperability + +#![allow(non_upper_case_globals)] + +use llvm::Struct; + +use abi::{self, ArgType, FnType}; +use context::CrateContext; +use type_::Type; + +fn ty_size(ty: Type) -> usize { + abi::ty_size(ty, 4) +} + +fn classify_ret_ty(ccx: &CrateContext, ret: &mut ArgType) { + if ret.ty.kind() == Struct && ty_size(ret.ty) > 32 { + ret.make_indirect(ccx); + } else { + ret.extend_integer_width_to(32); + } +} + +fn classify_arg_ty(ccx: &CrateContext, arg: &mut ArgType) { + if arg.ty.kind() == Struct && ty_size(arg.ty) > 32 { + arg.make_indirect(ccx); + } else { + arg.extend_integer_width_to(32); + } +} + +pub fn compute_abi_info(ccx: &CrateContext, fty: &mut FnType) { + if !fty.ret.is_ignore() { + classify_ret_ty(ccx, &mut fty.ret); + } + + for arg in &mut fty.args { + if arg.is_ignore() { + continue; + } + classify_arg_ty(ccx, arg); + } +} diff --git a/src/librustc_trans/cabi_nvptx64.rs b/src/librustc_trans/cabi_nvptx64.rs new file mode 100644 index 00000000000..880c6cfd7a8 --- /dev/null +++ b/src/librustc_trans/cabi_nvptx64.rs @@ -0,0 +1,53 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Reference: PTX Writer's Guide to Interoperability +// http://docs.nvidia.com/cuda/ptx-writers-guide-to-interoperability + +#![allow(non_upper_case_globals)] + +use llvm::Struct; + +use abi::{self, ArgType, FnType}; +use context::CrateContext; +use type_::Type; + +fn ty_size(ty: Type) -> usize { + abi::ty_size(ty, 8) +} + +fn classify_ret_ty(ccx: &CrateContext, ret: &mut ArgType) { + if ret.ty.kind() == Struct && ty_size(ret.ty) > 64 { + ret.make_indirect(ccx); + } else { + ret.extend_integer_width_to(64); + } +} + +fn classify_arg_ty(ccx: &CrateContext, arg: &mut ArgType) { + if arg.ty.kind() == Struct && ty_size(arg.ty) > 64 { + arg.make_indirect(ccx); + } else { + arg.extend_integer_width_to(64); + } +} + +pub fn compute_abi_info(ccx: &CrateContext, fty: &mut FnType) { + if !fty.ret.is_ignore() { + classify_ret_ty(ccx, &mut fty.ret); + } + + for arg in &mut fty.args { + if arg.is_ignore() { + continue; + } + classify_arg_ty(ccx, arg); + } +} diff --git a/src/librustc_trans/lib.rs b/src/librustc_trans/lib.rs index 3a8eef131a2..0d3e1853f01 100644 --- a/src/librustc_trans/lib.rs +++ b/src/librustc_trans/lib.rs @@ -103,6 +103,8 @@ mod cabi_asmjs; mod cabi_mips; mod cabi_mips64; mod cabi_msp430; +mod cabi_nvptx; +mod cabi_nvptx64; mod cabi_powerpc; mod cabi_powerpc64; mod cabi_s390x; diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index d66adb5ecfa..ff7133f5d0c 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -2057,10 +2057,9 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, let item_type = m.type_(); let id = derive_id(format!("{}.{}", item_type, name)); let ns_id = derive_id(format!("{}.{}", name, item_type.name_space())); - write!(w, "

\ + write!(w, "

\