Merge branch 'master' into maccoda/env_docs

This commit is contained in:
Dylan Maccora 2017-05-20 09:42:37 +10:00 committed by GitHub
commit 55d75c42ef
241 changed files with 5084 additions and 2478 deletions

6
.gitmodules vendored
View File

@ -13,7 +13,7 @@
path = src/jemalloc
url = https://github.com/rust-lang/jemalloc.git
[submodule "src/rust-installer"]
path = src/rust-installer
path = src/tools/rust-installer
url = https://github.com/rust-lang/rust-installer.git
[submodule "src/liblibc"]
path = src/liblibc
@ -23,7 +23,7 @@
url = https://github.com/rust-lang-nursery/nomicon.git
[submodule "src/tools/cargo"]
path = src/tools/cargo
url = https://github.com/rust-lang/cargo
url = https://github.com/rust-lang/cargo.git
[submodule "reference"]
path = src/doc/reference
url = https://github.com/rust-lang-nursery/reference.git
@ -32,4 +32,4 @@
url = https://github.com/rust-lang/book.git
[submodule "src/tools/rls"]
path = src/tools/rls
url = https://github.com/rust-lang-nursery/rls
url = https://github.com/rust-lang-nursery/rls.git

View File

@ -177,7 +177,7 @@ python x.py test src/test/rustdoc
python x.py build src/libcore --stage 0
```
You can explore the build system throught the various `--help` pages for each
You can explore the build system through the various `--help` pages for each
subcommand. For example to learn more about a command you can run:
```

View File

@ -141,9 +141,9 @@ install:
- set PATH="C:\Program Files (x86)\Inno Setup 5";%PATH%
# Help debug some handle issues on AppVeyor
- ps: Invoke-WebRequest -Uri https://download.sysinternals.com/files/Handle.zip -OutFile handle.zip
- appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-15-Handle.zip
- mkdir handle
- ps: Expand-Archive handle.zip -dest handle
- 7z x -ohandle 2017-05-15-Handle.zip
- set PATH=%PATH%;%CD%\handle
- handle.exe -accepteula -help

3
configure vendored
View File

@ -519,6 +519,7 @@ valopt_nosave host "${CFG_BUILD}" "GNUs ./configure syntax LLVM host triples"
valopt_nosave target "${CFG_HOST}" "GNUs ./configure syntax LLVM target triples"
valopt_nosave mandir "${CFG_PREFIX}/share/man" "install man pages in PATH"
valopt_nosave docdir "${CFG_PREFIX}/share/doc/rust" "install documentation in PATH"
valopt_nosave bindir "${CFG_PREFIX}/bin" "install binaries"
# On Windows this determines root of the subtree for target libraries.
# Host runtime libs always go to 'bin'.
@ -710,6 +711,7 @@ envopt LDFLAGS
CFG_PREFIX=${CFG_PREFIX%/}
CFG_MANDIR=${CFG_MANDIR%/}
CFG_DOCDIR=${CFG_DOCDIR%/}
CFG_BINDIR=${CFG_BINDIR%/}
CFG_HOST="$(echo $CFG_HOST | tr ',' ' ')"
CFG_TARGET="$(echo $CFG_TARGET | tr ',' ' ')"
@ -750,6 +752,7 @@ putvar CFG_X86_64_LINUX_ANDROID_NDK
putvar CFG_NACL_CROSS_PATH
putvar CFG_MANDIR
putvar CFG_DOCDIR
putvar CFG_BINDIR
putvar CFG_USING_LIBCPP
msg

1279
src/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -13,10 +13,7 @@ members = [
"tools/build-manifest",
"tools/remote-test-client",
"tools/remote-test-server",
]
# These projects have their own Cargo.lock
exclude = [
"tools/rust-installer",
"tools/cargo",
"tools/rls",
]
@ -37,3 +34,6 @@ debug-assertions = false
[profile.test]
debug = false
debug-assertions = false
[replace]
"https://github.com/rust-lang/cargo#0.20.0" = { path = "tools/cargo" }

View File

@ -38,7 +38,24 @@ use std::path::PathBuf;
use std::process::{Command, ExitStatus};
fn main() {
let args = env::args_os().skip(1).collect::<Vec<_>>();
let mut args = env::args_os().skip(1).collect::<Vec<_>>();
// Append metadata suffix for internal crates. See the corresponding entry
// in bootstrap/lib.rs for details.
if let Ok(s) = env::var("RUSTC_METADATA_SUFFIX") {
for i in 1..args.len() {
// Dirty code for borrowing issues
let mut new = None;
if let Some(current_as_str) = args[i].to_str() {
if (&*args[i - 1] == "-C" && current_as_str.starts_with("metadata")) ||
current_as_str.starts_with("-Cmetadata") {
new = Some(format!("{}-{}", current_as_str, s));
}
}
if let Some(new) = new { args[i] = new.into(); }
}
}
// Detect whether or not we're a build script depending on whether --target
// is passed (a bit janky...)
let target = args.windows(2)

View File

@ -14,6 +14,7 @@ import contextlib
import datetime
import hashlib
import os
import re
import shutil
import subprocess
import sys
@ -126,13 +127,13 @@ def unpack(tarball, dst, verbose=False, match=None):
shutil.move(tp, fp)
shutil.rmtree(os.path.join(dst, fname))
def run(args, verbose=False, exception=False):
def run(args, verbose=False, exception=False, cwd=None):
if verbose:
print("running: " + ' '.join(args))
sys.stdout.flush()
# Use Popen here instead of call() as it apparently allows powershell on
# Windows to not lock up waiting for input presumably.
ret = subprocess.Popen(args)
ret = subprocess.Popen(args, cwd=cwd)
code = ret.wait()
if code != 0:
err = "failed to run: " + ' '.join(args)
@ -297,8 +298,10 @@ class RustBuild(object):
def get_toml(self, key):
for line in self.config_toml.splitlines():
if line.startswith(key + ' ='):
return self.get_string(line)
match = re.match(r'^{}\s*=(.*)$'.format(key), line)
if match is not None:
value = match.group(1)
return self.get_string(value) or value.strip()
return None
def get_mk(self, key):
@ -329,6 +332,8 @@ class RustBuild(object):
def get_string(self, line):
start = line.find('"')
if start == -1:
return None
end = start + 1 + line[start + 1:].find('"')
return line[start + 1:end]
@ -386,12 +391,22 @@ class RustBuild(object):
args.append("--frozen")
self.run(args, env)
def run(self, args, env):
proc = subprocess.Popen(args, env=env)
def run(self, args, env=None, cwd=None):
proc = subprocess.Popen(args, env=env, cwd=cwd)
ret = proc.wait()
if ret != 0:
sys.exit(ret)
def output(self, args, env=None, cwd=None):
default_encoding = sys.getdefaultencoding()
proc = subprocess.Popen(args, stdout=subprocess.PIPE, env=env, cwd=cwd)
(out, err) = proc.communicate()
ret = proc.wait()
if ret != 0:
print(out)
sys.exit(ret)
return out.decode(default_encoding)
def build_triple(self):
default_encoding = sys.getdefaultencoding()
config = self.get_toml('build')
@ -529,6 +544,54 @@ class RustBuild(object):
return "{}-{}".format(cputype, ostype)
def update_submodules(self):
if (not os.path.exists(os.path.join(self.rust_root, ".git"))) or \
self.get_toml('submodules') == "false" or \
self.get_mk('CFG_DISABLE_MANAGE_SUBMODULES') == "1":
return
print('Updating submodules')
output = self.output(["git", "submodule", "status"], cwd=self.rust_root)
submodules = []
for line in output.splitlines():
# NOTE `git submodule status` output looks like this:
#
# -5066b7dcab7e700844b0e2ba71b8af9dc627a59b src/liblibc
# +b37ef24aa82d2be3a3cc0fe89bf82292f4ca181c src/compiler-rt (remotes/origin/..)
# e058ca661692a8d01f8cf9d35939dfe3105ce968 src/jemalloc (3.6.0-533-ge058ca6)
#
# The first character can be '-', '+' or ' ' and denotes the
# `State` of the submodule Right next to this character is the
# SHA-1 of the submodule HEAD And after that comes the path to the
# submodule
path = line[1:].split(' ')[1]
submodules.append([path, line[0]])
self.run(["git", "submodule", "sync"], cwd=self.rust_root)
for submod in submodules:
path, status = submod
if path.endswith('llvm') and \
(self.get_toml('llvm-config') or self.get_mk('CFG_LLVM_ROOT')):
continue
if path.endswith('jemalloc') and \
(self.get_toml('jemalloc') or self.get_mk('CFG_JEMALLOC_ROOT')):
continue
submod_path = os.path.join(self.rust_root, path)
if status == ' ':
self.run(["git", "reset", "--hard"], cwd=submod_path)
self.run(["git", "clean", "-fdx"], cwd=submod_path)
elif status == '+':
self.run(["git", "submodule", "update", path], cwd=self.rust_root)
self.run(["git", "reset", "--hard"], cwd=submod_path)
self.run(["git", "clean", "-fdx"], cwd=submod_path)
elif status == '-':
self.run(["git", "submodule", "init", path], cwd=self.rust_root)
self.run(["git", "submodule", "update", path], cwd=self.rust_root)
else:
raise ValueError('unknown submodule status: ' + status)
def bootstrap():
parser = argparse.ArgumentParser(description='Build rust')
parser.add_argument('--config')
@ -597,6 +660,8 @@ def bootstrap():
else:
rb._download_url = 'https://static.rust-lang.org'
rb.update_submodules()
# Fetch/build the bootstrap
rb.build = rb.build_triple()
rb.download_stage0()

View File

@ -99,7 +99,9 @@ pub struct Config {
// Fallback musl-root for all targets
pub musl_root: Option<PathBuf>,
pub prefix: Option<PathBuf>,
pub sysconfdir: Option<PathBuf>,
pub docdir: Option<PathBuf>,
pub bindir: Option<PathBuf>,
pub libdir: Option<PathBuf>,
pub libdir_relative: Option<PathBuf>,
pub mandir: Option<PathBuf>,
@ -165,9 +167,11 @@ struct Build {
#[derive(RustcDecodable, Default, Clone)]
struct Install {
prefix: Option<String>,
mandir: Option<String>,
sysconfdir: Option<String>,
docdir: Option<String>,
bindir: Option<String>,
libdir: Option<String>,
mandir: Option<String>,
}
/// TOML representation of how the LLVM build is configured.
@ -315,9 +319,11 @@ impl Config {
if let Some(ref install) = toml.install {
config.prefix = install.prefix.clone().map(PathBuf::from);
config.mandir = install.mandir.clone().map(PathBuf::from);
config.sysconfdir = install.sysconfdir.clone().map(PathBuf::from);
config.docdir = install.docdir.clone().map(PathBuf::from);
config.bindir = install.bindir.clone().map(PathBuf::from);
config.libdir = install.libdir.clone().map(PathBuf::from);
config.mandir = install.mandir.clone().map(PathBuf::from);
}
if let Some(ref llvm) = toml.llvm {
@ -523,9 +529,15 @@ impl Config {
"CFG_PREFIX" => {
self.prefix = Some(PathBuf::from(value));
}
"CFG_SYSCONFDIR" => {
self.sysconfdir = Some(PathBuf::from(value));
}
"CFG_DOCDIR" => {
self.docdir = Some(PathBuf::from(value));
}
"CFG_BINDIR" => {
self.bindir = Some(PathBuf::from(value));
}
"CFG_LIBDIR" => {
self.libdir = Some(PathBuf::from(value));
}

View File

@ -160,15 +160,22 @@
# Instead of installing to /usr/local, install to this path instead.
#prefix = "/usr/local"
# Where to install system configuration files
# If this is a relative path, it will get installed in `prefix` above
#sysconfdir = "/etc"
# Where to install documentation in `prefix` above
#docdir = "share/doc/rust"
# Where to install binaries in `prefix` above
#bindir = "bin"
# Where to install libraries in `prefix` above
#libdir = "lib"
# Where to install man pages in `prefix` above
#mandir = "share/man"
# Where to install documentation in `prefix` above
#docdir = "share/doc/rust"
# =============================================================================
# Options for compiling Rust code itself
# =============================================================================

View File

@ -26,12 +26,6 @@ use std::process::{Command, Stdio};
use build_helper::output;
#[cfg(not(target_os = "solaris"))]
const SH_CMD: &'static str = "sh";
// On Solaris, sh is the historical bourne shell, not a POSIX shell, or bash.
#[cfg(target_os = "solaris")]
const SH_CMD: &'static str = "bash";
use {Build, Compiler, Mode};
use channel;
use util::{cp_r, libdir, is_dylib, cp_filtered, copy, exe};
@ -55,6 +49,10 @@ pub fn tmpdir(build: &Build) -> PathBuf {
build.out.join("tmp/dist")
}
fn rust_installer(build: &Build) -> Command {
build.tool_cmd(&Compiler::new(0, &build.config.build), "rust-installer")
}
/// Builds the `rust-docs` installer component.
///
/// Slurps up documentation from the `stage`'s `host`.
@ -74,14 +72,14 @@ pub fn docs(build: &Build, stage: u32, host: &str) {
let src = build.out.join(host).join("doc");
cp_r(&src, &dst);
let mut cmd = Command::new(SH_CMD);
cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
let mut cmd = rust_installer(build);
cmd.arg("generate")
.arg("--product-name=Rust-Documentation")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-documentation-is-installed.")
.arg(format!("--image-dir={}", sanitize_sh(&image)))
.arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
.arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
.arg("--image-dir").arg(&image)
.arg("--work-dir").arg(&tmpdir(build))
.arg("--output-dir").arg(&distdir(build))
.arg(format!("--package-name={}-{}", name, host))
.arg("--component-name=rust-docs")
.arg("--legacy-manifest-dirs=rustlib,cargo")
@ -124,14 +122,14 @@ pub fn mingw(build: &Build, host: &str) {
.arg(host);
build.run(&mut cmd);
let mut cmd = Command::new(SH_CMD);
cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
let mut cmd = rust_installer(build);
cmd.arg("generate")
.arg("--product-name=Rust-MinGW")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-MinGW-is-installed.")
.arg(format!("--image-dir={}", sanitize_sh(&image)))
.arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
.arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
.arg("--image-dir").arg(&image)
.arg("--work-dir").arg(&tmpdir(build))
.arg("--output-dir").arg(&distdir(build))
.arg(format!("--package-name={}-{}", name, host))
.arg("--component-name=rust-mingw")
.arg("--legacy-manifest-dirs=rustlib,cargo");
@ -190,15 +188,15 @@ pub fn rustc(build: &Build, stage: u32, host: &str) {
}
// Finally, wrap everything up in a nice tarball!
let mut cmd = Command::new(SH_CMD);
cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
let mut cmd = rust_installer(build);
cmd.arg("generate")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-is-ready-to-roll.")
.arg(format!("--image-dir={}", sanitize_sh(&image)))
.arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
.arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
.arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
.arg("--image-dir").arg(&image)
.arg("--work-dir").arg(&tmpdir(build))
.arg("--output-dir").arg(&distdir(build))
.arg("--non-installed-overlay").arg(&overlay)
.arg(format!("--package-name={}-{}", name, host))
.arg("--component-name=rustc")
.arg("--legacy-manifest-dirs=rustlib,cargo");
@ -300,14 +298,14 @@ pub fn std(build: &Build, compiler: &Compiler, target: &str) {
let src = build.sysroot(compiler).join("lib/rustlib");
cp_r(&src.join(target), &dst);
let mut cmd = Command::new(SH_CMD);
cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
let mut cmd = rust_installer(build);
cmd.arg("generate")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=std-is-standing-at-the-ready.")
.arg(format!("--image-dir={}", sanitize_sh(&image)))
.arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
.arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
.arg("--image-dir").arg(&image)
.arg("--work-dir").arg(&tmpdir(build))
.arg("--output-dir").arg(&distdir(build))
.arg(format!("--package-name={}-{}", name, target))
.arg(format!("--component-name=rust-std-{}", target))
.arg("--legacy-manifest-dirs=rustlib,cargo");
@ -356,14 +354,14 @@ pub fn analysis(build: &Build, compiler: &Compiler, target: &str) {
println!("image_src: {:?}, dst: {:?}", image_src, dst);
cp_r(&image_src, &dst);
let mut cmd = Command::new(SH_CMD);
cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
let mut cmd = rust_installer(build);
cmd.arg("generate")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=save-analysis-saved.")
.arg(format!("--image-dir={}", sanitize_sh(&image)))
.arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
.arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
.arg("--image-dir").arg(&image)
.arg("--work-dir").arg(&tmpdir(build))
.arg("--output-dir").arg(&distdir(build))
.arg(format!("--package-name={}-{}", name, target))
.arg(format!("--component-name=rust-analysis-{}", target))
.arg("--legacy-manifest-dirs=rustlib,cargo");
@ -471,13 +469,17 @@ pub fn rust_src(build: &Build) {
write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes());
// Create plain source tarball
let tarball = rust_src_location(build);
let mut tarball = rust_src_location(build);
tarball.set_extension(""); // strip .gz
tarball.set_extension(""); // strip .tar
if let Some(dir) = tarball.parent() {
t!(fs::create_dir_all(dir));
}
let mut cmd = Command::new("tar");
cmd.arg("-czf").arg(sanitize_sh(&tarball))
.arg(&plain_name)
let mut cmd = rust_installer(build);
cmd.arg("tarball")
.arg("--input").arg(&plain_name)
.arg("--output").arg(&tarball)
.arg("--work-dir=.")
.current_dir(tmpdir(build));
build.run(&mut cmd);
@ -521,14 +523,14 @@ pub fn rust_src(build: &Build) {
}
// Create source tarball in rust-installer format
let mut cmd = Command::new(SH_CMD);
cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
let mut cmd = rust_installer(build);
cmd.arg("generate")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Awesome-Source.")
.arg(format!("--image-dir={}", sanitize_sh(&image)))
.arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
.arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
.arg("--image-dir").arg(&image)
.arg("--work-dir").arg(&tmpdir(build))
.arg("--output-dir").arg(&distdir(build))
.arg(format!("--package-name={}", name))
.arg("--component-name=rust-src")
.arg("--legacy-manifest-dirs=rustlib,cargo");
@ -594,7 +596,7 @@ pub fn cargo(build: &Build, stage: u32, target: &str) {
// Prepare the image directory
t!(fs::create_dir_all(image.join("share/zsh/site-functions")));
t!(fs::create_dir_all(image.join("etc/bash_completions.d")));
t!(fs::create_dir_all(image.join("etc/bash_completion.d")));
let cargo = build.cargo_out(&compiler, Mode::Tool, target)
.join(exe("cargo", target));
install(&cargo, &image.join("bin"), 0o755);
@ -604,7 +606,7 @@ pub fn cargo(build: &Build, stage: u32, target: &str) {
}
install(&etc.join("_cargo"), &image.join("share/zsh/site-functions"), 0o644);
copy(&etc.join("cargo.bashcomp.sh"),
&image.join("etc/bash_completions.d/cargo"));
&image.join("etc/bash_completion.d/cargo"));
let doc = image.join("share/doc/cargo");
install(&src.join("README.md"), &doc, 0o644);
install(&src.join("LICENSE-MIT"), &doc, 0o644);
@ -622,15 +624,15 @@ pub fn cargo(build: &Build, stage: u32, target: &str) {
t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
// Generate the installer tarball
let mut cmd = Command::new("sh");
cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
let mut cmd = rust_installer(build);
cmd.arg("generate")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-is-ready-to-roll.")
.arg(format!("--image-dir={}", sanitize_sh(&image)))
.arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
.arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
.arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
.arg("--image-dir").arg(&image)
.arg("--work-dir").arg(&tmpdir(build))
.arg("--output-dir").arg(&distdir(build))
.arg("--non-installed-overlay").arg(&overlay)
.arg(format!("--package-name={}-{}", name, target))
.arg("--component-name=cargo")
.arg("--legacy-manifest-dirs=rustlib,cargo");
@ -671,15 +673,15 @@ pub fn rls(build: &Build, stage: u32, target: &str) {
t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
// Generate the installer tarball
let mut cmd = Command::new("sh");
cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
let mut cmd = rust_installer(build);
cmd.arg("generate")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=RLS-ready-to-serve.")
.arg(format!("--image-dir={}", sanitize_sh(&image)))
.arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
.arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
.arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
.arg("--image-dir").arg(&image)
.arg("--work-dir").arg(&tmpdir(build))
.arg("--output-dir").arg(&distdir(build))
.arg("--non-installed-overlay").arg(&overlay)
.arg(format!("--package-name={}-{}", name, target))
.arg("--component-name=rls")
.arg("--legacy-manifest-dirs=rustlib,cargo");
@ -730,29 +732,28 @@ pub fn extended(build: &Build, stage: u32, target: &str) {
// upgrades rustc was upgraded before rust-std. To avoid rustc clobbering
// the std files during uninstall. To do this ensure that rustc comes
// before rust-std in the list below.
let mut input_tarballs = format!("{},{},{},{},{},{}",
sanitize_sh(&rustc_installer),
sanitize_sh(&cargo_installer),
sanitize_sh(&rls_installer),
sanitize_sh(&analysis_installer),
sanitize_sh(&docs_installer),
sanitize_sh(&std_installer));
let mut tarballs = vec![rustc_installer, cargo_installer, rls_installer,
analysis_installer, docs_installer, std_installer];
if target.contains("pc-windows-gnu") {
input_tarballs.push_str(",");
input_tarballs.push_str(&sanitize_sh(&mingw_installer));
tarballs.push(mingw_installer);
}
let mut input_tarballs = tarballs[0].as_os_str().to_owned();
for tarball in &tarballs[1..] {
input_tarballs.push(",");
input_tarballs.push(tarball);
}
let mut cmd = Command::new(SH_CMD);
cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/combine-installers.sh")))
let mut cmd = rust_installer(build);
cmd.arg("combine")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-is-ready-to-roll.")
.arg(format!("--work-dir={}", sanitize_sh(&work)))
.arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
.arg("--work-dir").arg(&work)
.arg("--output-dir").arg(&distdir(build))
.arg(format!("--package-name={}-{}", pkgname(build, "rust"), target))
.arg("--legacy-manifest-dirs=rustlib,cargo")
.arg(format!("--input-tarballs={}", input_tarballs))
.arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)));
.arg("--input-tarballs").arg(input_tarballs)
.arg("--non-installed-overlay").arg(&overlay);
build.run(&mut cmd);
let mut license = String::new();

View File

@ -21,67 +21,110 @@ use std::process::Command;
use Build;
use dist::{sanitize_sh, tmpdir};
/// Installs everything.
pub fn install(build: &Build, stage: u32, host: &str) {
let prefix_default = PathBuf::from("/usr/local");
let docdir_default = PathBuf::from("share/doc/rust");
let mandir_default = PathBuf::from("share/man");
let libdir_default = PathBuf::from("lib");
let prefix = build.config.prefix.as_ref().unwrap_or(&prefix_default);
let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default);
let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default);
let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default);
let docdir = prefix.join(docdir);
let libdir = prefix.join(libdir);
let mandir = prefix.join(mandir);
let destdir = env::var_os("DESTDIR").map(PathBuf::from);
let prefix = add_destdir(&prefix, &destdir);
let docdir = add_destdir(&docdir, &destdir);
let libdir = add_destdir(&libdir, &destdir);
let mandir = add_destdir(&mandir, &destdir);
let empty_dir = build.out.join("tmp/empty_dir");
t!(fs::create_dir_all(&empty_dir));
if build.config.docs {
install_sh(&build, "docs", "rust-docs", &build.rust_package_vers(),
stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
}
for target in build.config.target.iter() {
install_sh(&build, "std", "rust-std", &build.rust_package_vers(),
stage, target, &prefix, &docdir, &libdir, &mandir, &empty_dir);
}
if build.config.extended {
install_sh(&build, "cargo", "cargo", &build.cargo_package_vers(),
stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
install_sh(&build, "rls", "rls", &build.rls_package_vers(),
stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
}
install_sh(&build, "rustc", "rustc", &build.rust_package_vers(),
stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
t!(fs::remove_dir_all(&empty_dir));
pub struct Installer<'a> {
build: &'a Build,
prefix: PathBuf,
sysconfdir: PathBuf,
docdir: PathBuf,
bindir: PathBuf,
libdir: PathBuf,
mandir: PathBuf,
}
fn install_sh(build: &Build, package: &str, name: &str, version: &str, stage: u32, host: &str,
prefix: &Path, docdir: &Path, libdir: &Path, mandir: &Path, empty_dir: &Path) {
println!("Install {} stage{} ({})", package, stage, host);
let package_name = format!("{}-{}-{}", name, version, host);
impl<'a> Installer<'a> {
pub fn new(build: &'a Build) -> Installer<'a> {
let prefix_default = PathBuf::from("/usr/local");
let sysconfdir_default = PathBuf::from("/etc");
let docdir_default = PathBuf::from("share/doc/rust");
let bindir_default = PathBuf::from("bin");
let libdir_default = PathBuf::from("lib");
let mandir_default = PathBuf::from("share/man");
let prefix = build.config.prefix.as_ref().unwrap_or(&prefix_default);
let sysconfdir = build.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default);
let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default);
let bindir = build.config.bindir.as_ref().unwrap_or(&bindir_default);
let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default);
let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default);
let mut cmd = Command::new("sh");
cmd.current_dir(empty_dir)
.arg(sanitize_sh(&tmpdir(build).join(&package_name).join("install.sh")))
.arg(format!("--prefix={}", sanitize_sh(prefix)))
.arg(format!("--docdir={}", sanitize_sh(docdir)))
.arg(format!("--libdir={}", sanitize_sh(libdir)))
.arg(format!("--mandir={}", sanitize_sh(mandir)))
.arg("--disable-ldconfig");
build.run(&mut cmd);
let sysconfdir = prefix.join(sysconfdir);
let docdir = prefix.join(docdir);
let bindir = prefix.join(bindir);
let libdir = prefix.join(libdir);
let mandir = prefix.join(mandir);
let destdir = env::var_os("DESTDIR").map(PathBuf::from);
let prefix = add_destdir(&prefix, &destdir);
let sysconfdir = add_destdir(&sysconfdir, &destdir);
let docdir = add_destdir(&docdir, &destdir);
let bindir = add_destdir(&bindir, &destdir);
let libdir = add_destdir(&libdir, &destdir);
let mandir = add_destdir(&mandir, &destdir);
Installer {
build,
prefix,
sysconfdir,
docdir,
bindir,
libdir,
mandir,
}
}
/// Installs everything.
pub fn install(&self, stage: u32, host: &str) {
let empty_dir = self.build.out.join("tmp/empty_dir");
t!(fs::create_dir_all(&empty_dir));
if self.build.config.docs {
self.install_sh("docs", "rust-docs", &self.build.rust_package_vers(),
stage, Some(host), &empty_dir);
}
for target in self.build.config.target.iter() {
self.install_sh("std", "rust-std", &self.build.rust_package_vers(),
stage, Some(target), &empty_dir);
}
if self.build.config.extended {
self.install_sh("cargo", "cargo", &self.build.cargo_package_vers(),
stage, Some(host), &empty_dir);
self.install_sh("rls", "rls", &self.build.rls_package_vers(),
stage, Some(host), &empty_dir);
self.install_sh("analysis", "rust-analysis", &self.build.rust_package_vers(),
stage, Some(host), &empty_dir);
self.install_sh("src", "rust-src", &self.build.rust_package_vers(),
stage, None, &empty_dir);
}
self.install_sh("rustc", "rustc", &self.build.rust_package_vers(),
stage, Some(host), &empty_dir);
t!(fs::remove_dir_all(&empty_dir));
}
fn install_sh(&self, package: &str, name: &str, version: &str,
stage: u32, host: Option<&str>, empty_dir: &Path) {
println!("Install {} stage{} ({:?})", package, stage, host);
let package_name = if let Some(host) = host {
format!("{}-{}-{}", name, version, host)
} else {
format!("{}-{}", name, version)
};
let mut cmd = Command::new("sh");
cmd.current_dir(empty_dir)
.arg(sanitize_sh(&tmpdir(self.build).join(&package_name).join("install.sh")))
.arg(format!("--prefix={}", sanitize_sh(&self.prefix)))
.arg(format!("--sysconfdir={}", sanitize_sh(&self.sysconfdir)))
.arg(format!("--docdir={}", sanitize_sh(&self.docdir)))
.arg(format!("--bindir={}", sanitize_sh(&self.bindir)))
.arg(format!("--libdir={}", sanitize_sh(&self.libdir)))
.arg(format!("--mandir={}", sanitize_sh(&self.mandir)))
.arg("--disable-ldconfig");
self.build.run(&mut cmd);
}
}
fn add_destdir(path: &Path, destdir: &Option<PathBuf>) -> PathBuf {

View File

@ -82,7 +82,7 @@ use std::env;
use std::ffi::OsString;
use std::fs::{self, File};
use std::io::Read;
use std::path::{Component, PathBuf, Path};
use std::path::{PathBuf, Path};
use std::process::Command;
use build_helper::{run_silent, run_suppressed, output, mtime};
@ -285,129 +285,12 @@ impl Build {
self.verbose(&format!("auto-detected local-rebuild {}", local_release));
self.local_rebuild = true;
}
self.verbose("updating submodules");
self.update_submodules();
self.verbose("learning about cargo");
metadata::build(self);
step::run(self);
}
/// Updates all git submodules that we have.
///
/// This will detect if any submodules are out of date an run the necessary
/// commands to sync them all with upstream.
fn update_submodules(&self) {
struct Submodule<'a> {
path: &'a Path,
state: State,
}
enum State {
// The submodule may have staged/unstaged changes
MaybeDirty,
// Or could be initialized but never updated
NotInitialized,
// The submodule, itself, has extra commits but those changes haven't been commited to
// the (outer) git repository
OutOfSync,
}
if !self.src_is_git || !self.config.submodules {
return
}
let git = || {
let mut cmd = Command::new("git");
cmd.current_dir(&self.src);
return cmd
};
let git_submodule = || {
let mut cmd = Command::new("git");
cmd.current_dir(&self.src).arg("submodule");
return cmd
};
// FIXME: this takes a seriously long time to execute on Windows and a
// nontrivial amount of time on Unix, we should have a better way
// of detecting whether we need to run all the submodule commands
// below.
let out = output(git_submodule().arg("status"));
let mut submodules = vec![];
for line in out.lines() {
// NOTE `git submodule status` output looks like this:
//
// -5066b7dcab7e700844b0e2ba71b8af9dc627a59b src/liblibc
// +b37ef24aa82d2be3a3cc0fe89bf82292f4ca181c src/compiler-rt (remotes/origin/..)
// e058ca661692a8d01f8cf9d35939dfe3105ce968 src/jemalloc (3.6.0-533-ge058ca6)
//
// The first character can be '-', '+' or ' ' and denotes the `State` of the submodule
// Right next to this character is the SHA-1 of the submodule HEAD
// And after that comes the path to the submodule
let path = Path::new(line[1..].split(' ').skip(1).next().unwrap());
let state = if line.starts_with('-') {
State::NotInitialized
} else if line.starts_with('+') {
State::OutOfSync
} else if line.starts_with(' ') {
State::MaybeDirty
} else {
panic!("unexpected git submodule state: {:?}", line.chars().next());
};
submodules.push(Submodule { path: path, state: state })
}
self.run(git_submodule().arg("sync"));
for submodule in submodules {
// If using llvm-root then don't touch the llvm submodule.
if submodule.path.components().any(|c| c == Component::Normal("llvm".as_ref())) &&
self.config.target_config.get(&self.config.build)
.and_then(|c| c.llvm_config.as_ref()).is_some()
{
continue
}
if submodule.path.components().any(|c| c == Component::Normal("jemalloc".as_ref())) &&
!self.config.use_jemalloc
{
continue
}
// `submodule.path` is the relative path to a submodule (from the repository root)
// `submodule_path` is the path to a submodule from the cwd
// use `submodule.path` when e.g. executing a submodule specific command from the
// repository root
// use `submodule_path` when e.g. executing a normal git command for the submodule
// (set via `current_dir`)
let submodule_path = self.src.join(submodule.path);
match submodule.state {
State::MaybeDirty => {
// drop staged changes
self.run(git().current_dir(&submodule_path)
.args(&["reset", "--hard"]));
// drops unstaged changes
self.run(git().current_dir(&submodule_path)
.args(&["clean", "-fdx"]));
},
State::NotInitialized => {
self.run(git_submodule().arg("init").arg(submodule.path));
self.run(git_submodule().arg("update").arg(submodule.path));
},
State::OutOfSync => {
// drops submodule commits that weren't reported to the (outer) git repository
self.run(git_submodule().arg("update").arg(submodule.path));
self.run(git().current_dir(&submodule_path)
.args(&["reset", "--hard"]));
self.run(git().current_dir(&submodule_path)
.args(&["clean", "-fdx"]));
},
}
}
}
/// Clear out `dir` if `input` is newer.
///
/// After this executes, it will also ensure that `dir` exists.
@ -475,12 +358,30 @@ impl Build {
.env("RUSTDOC_REAL", self.rustdoc(compiler))
.env("RUSTC_FLAGS", self.rustc_flags(target).join(" "));
// Tools don't get debuginfo right now, e.g. cargo and rls don't get
// compiled with debuginfo.
if mode != Mode::Tool {
cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string())
.env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string())
.env("RUSTC_FORCE_UNSTABLE", "1");
// Tools don't get debuginfo right now, e.g. cargo and rls don't
// get compiled with debuginfo.
cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string())
.env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string())
.env("RUSTC_FORCE_UNSTABLE", "1");
// Currently the compiler depends on crates from crates.io, and
// then other crates can depend on the compiler (e.g. proc-macro
// crates). Let's say, for example that rustc itself depends on the
// bitflags crate. If an external crate then depends on the
// bitflags crate as well, we need to make sure they don't
// conflict, even if they pick the same verison of bitflags. We'll
// want to make sure that e.g. a plugin and rustc each get their
// own copy of bitflags.
// Cargo ensures that this works in general through the -C metadata
// flag. This flag will frob the symbols in the binary to make sure
// they're different, even though the source code is the exact
// same. To solve this problem for the compiler we extend Cargo's
// already-passed -C metadata flag with our own. Our rustc.rs
// wrapper around the actual rustc will detect -C metadata being
// passed and frob it with this extra string we're passing in.
cargo.env("RUSTC_METADATA_SUFFIX", "rustc");
}
// Enable usage of unstable features

View File

@ -574,6 +574,10 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libstd-tool"))
.run(move |s| compile::tool(build, s.stage, s.target, "remote-test-client"));
rules.build("tool-rust-installer", "src/tools/rust-installer")
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libstd-tool"))
.run(move |s| compile::tool(build, s.stage, s.target, "rust-installer"));
rules.build("tool-cargo", "src/tools/cargo")
.host(true)
.default(build.config.extended)
@ -704,6 +708,7 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
.host(true)
.only_host_build(true)
.default(true)
.dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::rustc(build, s.stage, s.target));
rules.dist("dist-std", "src/libstd")
.dep(move |s| {
@ -718,10 +723,12 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
})
.default(true)
.only_host_build(true)
.dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::std(build, &s.compiler(), s.target));
rules.dist("dist-mingw", "path/to/nowhere")
.default(true)
.only_host_build(true)
.dep(move |s| tool_rust_installer(build, s))
.run(move |s| {
if s.target.contains("pc-windows-gnu") {
dist::mingw(build, s.target)
@ -732,29 +739,34 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
.host(true)
.only_build(true)
.only_host_build(true)
.dep(move |s| tool_rust_installer(build, s))
.run(move |_| dist::rust_src(build));
rules.dist("dist-docs", "src/doc")
.default(true)
.only_host_build(true)
.dep(|s| s.name("default:doc"))
.dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::docs(build, s.stage, s.target));
rules.dist("dist-analysis", "analysis")
.default(build.config.extended)
.dep(|s| s.name("dist-std"))
.only_host_build(true)
.dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::analysis(build, &s.compiler(), s.target));
rules.dist("dist-rls", "rls")
.host(true)
.only_host_build(true)
.dep(|s| s.name("tool-rls"))
.dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::rls(build, s.stage, s.target));
rules.dist("install", "path/to/nowhere")
.dep(|s| s.name("default:dist"))
.run(move |s| install::install(build, s.stage, s.target));
.run(move |s| install::Installer::new(build).install(s.stage, s.target));
rules.dist("dist-cargo", "cargo")
.host(true)
.only_host_build(true)
.dep(|s| s.name("tool-cargo"))
.dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::cargo(build, s.stage, s.target));
rules.dist("dist-extended", "extended")
.default(build.config.extended)
@ -767,6 +779,7 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
.dep(|d| d.name("dist-cargo"))
.dep(|d| d.name("dist-rls"))
.dep(|d| d.name("dist-analysis"))
.dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::extended(build, s.stage, s.target));
rules.dist("dist-sign", "hash-and-sign")
@ -778,6 +791,14 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
rules.verify();
return rules;
/// Helper to depend on a stage0 build-only rust-installer tool.
fn tool_rust_installer<'a>(build: &'a Build, step: &Step<'a>) -> Step<'a> {
step.name("tool-rust-installer")
.host(&build.config.build)
.target(&build.config.build)
.stage(0)
}
}
#[derive(PartialEq, Eq, Hash, Clone, Debug)]

View File

@ -16,6 +16,12 @@ for example:
Images will output artifacts in an `obj` dir at the root of a repository.
## Filesystem layout
- Each directory, excluding `scripts` and `disabled`, corresponds to a docker image
- `scripts` contains files shared by docker images
- `disabled` contains images that are not build travis
## Cross toolchains
A number of these images take quite a long time to compile as they're building

View File

@ -2,52 +2,44 @@ FROM ubuntu:16.04
RUN apt-get update && \
apt-get install -y --no-install-recommends \
g++ \
make \
file \
curl \
ca-certificates \
python2.7 \
git \
cmake \
unzip \
sudo \
xz-utils \
curl \
file \
g++ \
git \
libssl-dev \
pkg-config
make \
pkg-config \
python2.7 \
sudo \
unzip \
xz-utils
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
# dumb-init
COPY scripts/dumb-init.sh /scripts/
RUN sh /scripts/dumb-init.sh
RUN curl -o /usr/local/bin/sccache \
https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
# ndk
COPY scripts/android-ndk.sh /scripts/
RUN . /scripts/android-ndk.sh && \
download_and_make_toolchain android-ndk-r13b-linux-x86_64.zip arm 9
# Install NDK
COPY install-ndk.sh /tmp
RUN . /tmp/install-ndk.sh && \
download_ndk android-ndk-r13b-linux-x86_64.zip && \
make_standalone_toolchain arm 9 && \
remove_ndk
# Install SDK
# sdk
RUN dpkg --add-architecture i386 && \
apt-get update && \
apt-get install -y --no-install-recommends \
openjdk-9-jre-headless \
tzdata \
libstdc++6:i386 \
libgl1-mesa-glx \
libpulse0
libpulse0 \
libstdc++6:i386 \
openjdk-9-jre-headless \
tzdata
COPY install-sdk.sh /tmp
RUN . /tmp/install-sdk.sh && \
download_sdk tools_r25.2.5-linux.zip && \
download_sysimage armeabi-v7a 18 && \
create_avd armeabi-v7a 18
COPY scripts/android-sdk.sh /scripts/
RUN . /scripts/android-sdk.sh && \
download_and_create_avd tools_r25.2.5-linux.zip armeabi-v7a 18
# Setup env
# env
ENV PATH=$PATH:/android/sdk/tools
ENV PATH=$PATH:/android/sdk/platform-tools
@ -57,8 +49,12 @@ ENV RUST_CONFIGURE_ARGS \
--target=$TARGETS \
--arm-linux-androideabi-ndk=/android/ndk/arm-9
ENV SCRIPT python2.7 ../x.py test --target $TARGETS --verbose
ENV SCRIPT python2.7 ../x.py test --target $TARGETS
# Entrypoint
COPY start-emulator.sh /android/
ENTRYPOINT ["/usr/bin/dumb-init", "--", "/android/start-emulator.sh"]
# sccache
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
# init
COPY scripts/android-start-emulator.sh /scripts/
ENTRYPOINT ["/usr/bin/dumb-init", "--", "/scripts/android-start-emulator.sh"]

View File

@ -1,35 +0,0 @@
#!/bin/sh
# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ex
URL=https://dl.google.com/android/repository
download_ndk() {
mkdir -p /android/ndk
cd /android/ndk
curl -O $URL/$1
unzip -q $1
rm $1
mv android-ndk-* ndk
}
make_standalone_toolchain() {
# See https://developer.android.com/ndk/guides/standalone_toolchain.html
python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \
--install-dir /android/ndk/$1-$2 \
--arch $1 \
--api $2
}
remove_ndk() {
rm -rf /android/ndk/ndk
}

View File

@ -31,7 +31,7 @@ WORKDIR /build
# The `vexpress_config` config file was a previously generated config file for
# the kernel. This file was generated by running `make vexpress_defconfig`
# followed by `make menuconfig` and then enabling the IPv6 protocol page.
COPY vexpress_config /build/.config
COPY armhf-gnu/vexpress_config /build/.config
RUN curl https://cdn.kernel.org/pub/linux/kernel/v4.x/linux-4.4.42.tar.xz | \
tar xJf - && \
cd /build/linux-4.4.42 && \
@ -63,11 +63,11 @@ RUN curl http://cdimage.ubuntu.com/ubuntu-base/releases/16.04/release/ubuntu-bas
# Copy over our init script, which starts up our test server and also a few
# other misc tasks.
COPY rcS rootfs/etc/init.d/rcS
COPY armhf-gnu/rcS rootfs/etc/init.d/rcS
RUN chmod +x rootfs/etc/init.d/rcS
# Helper to quickly fill the entropy pool in the kernel.
COPY addentropy.c /tmp/
COPY armhf-gnu/addentropy.c /tmp/
RUN arm-linux-gnueabihf-gcc addentropy.c -o rootfs/addentropy -static
# TODO: What is this?!

View File

@ -32,10 +32,10 @@ ENTRYPOINT ["/usr/bin/dumb-init", "--"]
WORKDIR /tmp
COPY build-rumprun.sh /tmp/
COPY cross/build-rumprun.sh /tmp/
RUN ./build-rumprun.sh
COPY build-arm-musl.sh /tmp/
COPY cross/build-arm-musl.sh /tmp/
RUN ./build-arm-musl.sh
# originally from

View File

@ -2,36 +2,30 @@ FROM ubuntu:16.04
RUN apt-get update && \
apt-get install -y --no-install-recommends \
g++ \
make \
file \
curl \
ca-certificates \
python2.7 \
git \
cmake \
unzip \
sudo \
xz-utils \
curl \
file \
g++ \
git \
libssl-dev \
pkg-config
make \
pkg-config \
python2.7 \
sudo \
unzip \
xz-utils
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
# dumb-init
COPY scripts/dumb-init.sh /scripts/
RUN sh /scripts/dumb-init.sh
RUN curl -o /usr/local/bin/sccache \
https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
COPY android-ndk.sh /
RUN . /android-ndk.sh && \
download_ndk android-ndk-r13b-linux-x86_64.zip && \
make_standalone_toolchain arm64 21 && \
remove_ndk
# ndk
COPY scripts/android-ndk.sh /scripts/
RUN . /scripts/android-ndk.sh && \
download_and_make_toolchain android-ndk-r13b-linux-x86_64.zip arm64 21
# env
ENV PATH=$PATH:/android/ndk/arm64-21/bin
ENV DEP_Z_ROOT=/android/ndk/arm64-21/sysroot/usr/
@ -47,3 +41,10 @@ ENV RUST_CONFIGURE_ARGS \
--enable-cargo-openssl-static
ENV SCRIPT python2.7 ../x.py dist --target $HOSTS --host $HOSTS
# sccache
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
# init
ENTRYPOINT ["/usr/bin/dumb-init", "--"]

View File

@ -2,37 +2,36 @@ FROM ubuntu:16.04
RUN apt-get update && \
apt-get install -y --no-install-recommends \
g++ \
make \
file \
curl \
ca-certificates \
python2.7 \
git \
cmake \
unzip \
sudo \
xz-utils \
curl \
file \
g++ \
git \
libssl-dev \
pkg-config
make \
pkg-config \
python2.7 \
sudo \
unzip \
xz-utils
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
# dumb-init
COPY scripts/dumb-init.sh /scripts/
RUN sh /scripts/dumb-init.sh
RUN curl -o /usr/local/bin/sccache \
https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
COPY android-ndk.sh /
RUN . /android-ndk.sh && \
# ndk
COPY scripts/android-ndk.sh /scripts/
RUN . /scripts/android-ndk.sh && \
download_ndk android-ndk-r13b-linux-x86_64.zip && \
make_standalone_toolchain arm 9 && \
make_standalone_toolchain arm 21 && \
remove_ndk
RUN chmod 777 /android/ndk && \
ln -s /android/ndk/arm-21 /android/ndk/arm
# env
ENV PATH=$PATH:/android/ndk/arm-9/bin
ENV DEP_Z_ROOT=/android/ndk/arm-9/sysroot/usr/
@ -54,12 +53,16 @@ ENV RUST_CONFIGURE_ARGS \
# level 9), the default linker behavior is to generate an error, to allow the
# build to finish we use --warn-unresolved-symbols. Note that the missing
# symbols does not affect std, only the compiler (llvm) and cargo (openssl).
RUN chmod 777 /android/ndk && \
ln -s /android/ndk/arm-21 /android/ndk/arm
ENV SCRIPT \
python2.7 ../x.py build src/llvm --host $HOSTS --target $HOSTS && \
(export RUSTFLAGS="\"-C link-arg=-Wl,--warn-unresolved-symbols\""; \
rm /android/ndk/arm && \
ln -s /android/ndk/arm-9 /android/ndk/arm && \
python2.7 ../x.py dist --host $HOSTS --target $HOSTS)
# sccache
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
# init
ENTRYPOINT ["/usr/bin/dumb-init", "--"]

View File

@ -2,37 +2,36 @@ FROM ubuntu:16.04
RUN apt-get update && \
apt-get install -y --no-install-recommends \
g++ \
make \
file \
curl \
ca-certificates \
python2.7 \
git \
cmake \
unzip \
sudo \
xz-utils \
curl \
file \
g++ \
git \
libssl-dev \
pkg-config
make \
pkg-config \
python2.7 \
sudo \
unzip \
xz-utils
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
# dumb-init
COPY scripts/dumb-init.sh /scripts/
RUN sh /scripts/dumb-init.sh
RUN curl -o /usr/local/bin/sccache \
https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
COPY android-ndk.sh /
RUN . /android-ndk.sh && \
# ndk
COPY scripts/android-ndk.sh /scripts/
RUN . /scripts/android-ndk.sh && \
download_ndk android-ndk-r13b-linux-x86_64.zip && \
make_standalone_toolchain x86 9 && \
make_standalone_toolchain x86 21 && \
remove_ndk
RUN chmod 777 /android/ndk && \
ln -s /android/ndk/x86-21 /android/ndk/x86
# env
ENV PATH=$PATH:/android/ndk/x86-9/bin
ENV DEP_Z_ROOT=/android/ndk/x86-9/sysroot/usr/
@ -54,12 +53,16 @@ ENV RUST_CONFIGURE_ARGS \
# level 9), the default linker behavior is to generate an error, to allow the
# build to finish we use --warn-unresolved-symbols. Note that the missing
# symbols does not affect std, only the compiler (llvm) and cargo (openssl).
RUN chmod 777 /android/ndk && \
ln -s /android/ndk/x86-21 /android/ndk/x86
ENV SCRIPT \
python2.7 ../x.py build src/llvm --host $HOSTS --target $HOSTS && \
(export RUSTFLAGS="\"-C link-arg=-Wl,--warn-unresolved-symbols\""; \
rm /android/ndk/x86 && \
ln -s /android/ndk/x86-9 /android/ndk/x86 && \
python2.7 ../x.py dist --host $HOSTS --target $HOSTS)
# sccache
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
# init
ENTRYPOINT ["/usr/bin/dumb-init", "--"]

View File

@ -2,36 +2,30 @@ FROM ubuntu:16.04
RUN apt-get update && \
apt-get install -y --no-install-recommends \
g++ \
make \
file \
curl \
ca-certificates \
python2.7 \
git \
cmake \
unzip \
sudo \
xz-utils \
curl \
file \
g++ \
git \
libssl-dev \
pkg-config
make \
pkg-config \
python2.7 \
sudo \
unzip \
xz-utils
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
# dumb-init
COPY scripts/dumb-init.sh /scripts/
RUN sh /scripts/dumb-init.sh
RUN curl -o /usr/local/bin/sccache \
https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
COPY android-ndk.sh /
RUN . /android-ndk.sh && \
download_ndk android-ndk-r13b-linux-x86_64.zip && \
make_standalone_toolchain x86_64 21 && \
remove_ndk
# ndk
COPY scripts/android-ndk.sh /scripts/
RUN . /scripts/android-ndk.sh && \
download_and_make_toolchain android-ndk-r13b-linux-x86_64.zip x86_64 21
# env
ENV PATH=$PATH:/android/ndk/x86_64-21/bin
ENV DEP_Z_ROOT=/android/ndk/x86_64-21/sysroot/usr/
@ -47,3 +41,10 @@ ENV RUST_CONFIGURE_ARGS \
--enable-cargo-openssl-static
ENV SCRIPT python2.7 ../x.py dist --target $HOSTS --host $HOSTS
# sccache
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
# init
ENTRYPOINT ["/usr/bin/dumb-init", "--"]

View File

@ -56,7 +56,7 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools
USER rustbuild
WORKDIR /tmp
COPY aarch64-linux-gnu.config build-toolchains.sh /tmp/
COPY dist-aarch64-linux/aarch64-linux-gnu.config dist-aarch64-linux/build-toolchains.sh /tmp/
RUN ./build-toolchains.sh
USER root

View File

@ -2,33 +2,27 @@ FROM ubuntu:16.04
RUN apt-get update && \
apt-get install -y --no-install-recommends \
g++ \
make \
file \
curl \
ca-certificates \
python2.7 \
git \
cmake \
unzip \
sudo \
xz-utils \
curl \
file \
g++ \
git \
libssl-dev \
pkg-config
make \
pkg-config \
python2.7 \
sudo \
unzip \
xz-utils
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
# dumb-init
COPY scripts/dumb-init.sh /scripts/
RUN sh /scripts/dumb-init.sh
RUN curl -o /usr/local/bin/sccache \
https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
# Install NDK
COPY install-ndk.sh /tmp
RUN . /tmp/install-ndk.sh && \
# ndk
COPY scripts/android-ndk.sh /scripts/
RUN . /scripts/android-ndk.sh && \
download_ndk android-ndk-r13b-linux-x86_64.zip && \
make_standalone_toolchain arm 9 && \
make_standalone_toolchain x86 9 && \
@ -36,6 +30,7 @@ RUN . /tmp/install-ndk.sh && \
make_standalone_toolchain x86_64 21 && \
remove_ndk
# env
ENV TARGETS=arm-linux-androideabi
ENV TARGETS=$TARGETS,armv7-linux-androideabi
ENV TARGETS=$TARGETS,i686-linux-android
@ -52,3 +47,10 @@ ENV RUST_CONFIGURE_ARGS \
--x86_64-linux-android-ndk=/android/ndk/x86_64-21
ENV SCRIPT python2.7 ../x.py dist --target $TARGETS
# cache
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
# init
ENTRYPOINT ["/usr/bin/dumb-init", "--"]

View File

@ -1,35 +0,0 @@
#!/bin/sh
# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ex
URL=https://dl.google.com/android/repository
download_ndk() {
mkdir -p /android/ndk
cd /android/ndk
curl -O $URL/$1
unzip -q $1
rm $1
mv android-ndk-* ndk
}
make_standalone_toolchain() {
# See https://developer.android.com/ndk/guides/standalone_toolchain.html
python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \
--install-dir /android/ndk/$1-$2 \
--arch $1 \
--api $2
}
remove_ndk() {
rm -rf /android/ndk/ndk
}

View File

@ -56,7 +56,7 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools
USER rustbuild
WORKDIR /tmp
COPY arm-linux-gnueabi.config build-toolchains.sh /tmp/
COPY dist-arm-linux/arm-linux-gnueabi.config dist-arm-linux/build-toolchains.sh /tmp/
RUN ./build-toolchains.sh
USER root

View File

@ -56,7 +56,7 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools
USER rustbuild
WORKDIR /tmp
COPY arm-linux-gnueabihf.config build-toolchains.sh /tmp/
COPY dist-armhf-linux/arm-linux-gnueabihf.config dist-armhf-linux/build-toolchains.sh /tmp/
RUN ./build-toolchains.sh
USER root

View File

@ -56,7 +56,7 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools
USER rustbuild
WORKDIR /tmp
COPY build-toolchains.sh armv7-linux-gnueabihf.config /tmp/
COPY dist-armv7-linux/build-toolchains.sh dist-armv7-linux/armv7-linux-gnueabihf.config /tmp/
RUN ./build-toolchains.sh
USER root

View File

@ -21,7 +21,7 @@ RUN curl -L https://cmake.org/files/v3.8/cmake-3.8.0-rc1-Linux-x86_64.tar.gz | \
tar xzf - -C /usr/local --strip-components=1
WORKDIR /tmp
COPY shared.sh build-toolchain.sh compiler-rt-dso-handle.patch /tmp/
COPY dist-fuchsia/shared.sh dist-fuchsia/build-toolchain.sh dist-fuchsia/compiler-rt-dso-handle.patch /tmp/
RUN /tmp/build-toolchain.sh
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \

View File

@ -17,7 +17,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
pkg-config
WORKDIR /build/
COPY musl-libunwind-patch.patch build-musl.sh /build/
COPY dist-i586-gnu-i686-musl/musl-libunwind-patch.patch dist-i586-gnu-i686-musl/build-musl.sh /build/
RUN sh /build/build-musl.sh && rm -rf /build
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \

View File

@ -16,7 +16,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
libssl-dev \
pkg-config
COPY build-toolchain.sh /tmp/
COPY dist-i686-freebsd/build-toolchain.sh /tmp/
RUN /tmp/build-toolchain.sh i686
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \

View File

@ -29,13 +29,13 @@ ENV PATH=/rustroot/bin:$PATH
ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib
ENV PKG_CONFIG_PATH=/rustroot/lib/pkgconfig
WORKDIR /tmp
COPY shared.sh build-binutils.sh /tmp/
COPY dist-i686-linux/shared.sh dist-i686-linux/build-binutils.sh /tmp/
# We need a build of openssl which supports SNI to download artifacts from
# static.rust-lang.org. This'll be used to link into libcurl below (and used
# later as well), so build a copy of OpenSSL with dynamic libraries into our
# generic root.
COPY build-openssl.sh /tmp/
COPY dist-i686-linux/build-openssl.sh /tmp/
RUN ./build-openssl.sh
# The `curl` binary on CentOS doesn't support SNI which is needed for fetching
@ -44,7 +44,7 @@ RUN ./build-openssl.sh
#
# Note that we also disable a bunch of optional features of curl that we don't
# really need.
COPY build-curl.sh /tmp/
COPY dist-i686-linux/build-curl.sh /tmp/
RUN ./build-curl.sh
# binutils < 2.22 has a bug where the 32-bit executables it generates
@ -54,26 +54,26 @@ RUN ./build-curl.sh
RUN ./build-binutils.sh
# Need a newer version of gcc than centos has to compile LLVM nowadays
COPY build-gcc.sh /tmp/
COPY dist-i686-linux/build-gcc.sh /tmp/
RUN ./build-gcc.sh
# CentOS 5.5 has Python 2.4 by default, but LLVM needs 2.7+
COPY build-python.sh /tmp/
COPY dist-i686-linux/build-python.sh /tmp/
RUN ./build-python.sh
# Apparently CentOS 5.5 desn't have `git` in yum, but we're gonna need it for
# cloning, so download and build it here.
COPY build-git.sh /tmp/
COPY dist-i686-linux/build-git.sh /tmp/
RUN ./build-git.sh
# libssh2 (a dependency of Cargo) requires cmake 2.8.11 or higher but CentOS
# only has 2.6.4, so build our own
COPY build-cmake.sh /tmp/
COPY dist-i686-linux/build-cmake.sh /tmp/
RUN ./build-cmake.sh
# for sanitizers, we need kernel headers files newer than the ones CentOS ships
# with so we install newer ones here
COPY build-headers.sh /tmp/
COPY dist-i686-linux/build-headers.sh /tmp/
RUN ./build-headers.sh
RUN curl -Lo /rustroot/dumb-init \

View File

@ -56,8 +56,8 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools
USER rustbuild
WORKDIR /tmp
COPY patches/ /tmp/patches/
COPY powerpc-linux-gnu.config build-powerpc-toolchain.sh /tmp/
COPY dist-powerpc-linux/patches/ /tmp/patches/
COPY dist-powerpc-linux/powerpc-linux-gnu.config dist-powerpc-linux/build-powerpc-toolchain.sh /tmp/
RUN ./build-powerpc-toolchain.sh
USER root

View File

@ -56,8 +56,8 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools
USER rustbuild
WORKDIR /tmp
COPY patches/ /tmp/patches/
COPY shared.sh powerpc64-linux-gnu.config build-powerpc64-toolchain.sh /tmp/
COPY dist-powerpc64-linux/patches/ /tmp/patches/
COPY dist-powerpc64-linux/shared.sh dist-powerpc64-linux/powerpc64-linux-gnu.config dist-powerpc64-linux/build-powerpc64-toolchain.sh /tmp/
RUN ./build-powerpc64-toolchain.sh
USER root

View File

@ -59,7 +59,7 @@ WORKDIR /tmp
USER root
RUN apt-get install -y --no-install-recommends rpm2cpio cpio
COPY shared.sh build-powerpc64le-toolchain.sh /tmp/
COPY dist-powerpc64le-linux/shared.sh dist-powerpc64le-linux/build-powerpc64le-toolchain.sh /tmp/
RUN ./build-powerpc64le-toolchain.sh
RUN curl -o /usr/local/bin/sccache \

View File

@ -56,8 +56,8 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools
USER rustbuild
WORKDIR /tmp
COPY patches/ /tmp/patches/
COPY s390x-linux-gnu.config build-s390x-toolchain.sh /tmp/
COPY dist-s390x-linux/patches/ /tmp/patches/
COPY dist-s390x-linux/s390x-linux-gnu.config dist-s390x-linux/build-s390x-toolchain.sh /tmp/
RUN ./build-s390x-toolchain.sh
USER root

View File

@ -16,7 +16,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
libssl-dev \
pkg-config
COPY build-toolchain.sh /tmp/
COPY dist-x86_64-freebsd/build-toolchain.sh /tmp/
RUN /tmp/build-toolchain.sh x86_64
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \

View File

@ -29,13 +29,13 @@ ENV PATH=/rustroot/bin:$PATH
ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib
ENV PKG_CONFIG_PATH=/rustroot/lib/pkgconfig
WORKDIR /tmp
COPY shared.sh build-binutils.sh /tmp/
COPY dist-x86_64-linux/shared.sh dist-x86_64-linux/build-binutils.sh /tmp/
# We need a build of openssl which supports SNI to download artifacts from
# static.rust-lang.org. This'll be used to link into libcurl below (and used
# later as well), so build a copy of OpenSSL with dynamic libraries into our
# generic root.
COPY build-openssl.sh /tmp/
COPY dist-x86_64-linux/build-openssl.sh /tmp/
RUN ./build-openssl.sh
# The `curl` binary on CentOS doesn't support SNI which is needed for fetching
@ -44,7 +44,7 @@ RUN ./build-openssl.sh
#
# Note that we also disable a bunch of optional features of curl that we don't
# really need.
COPY build-curl.sh /tmp/
COPY dist-x86_64-linux/build-curl.sh /tmp/
RUN ./build-curl.sh
# binutils < 2.22 has a bug where the 32-bit executables it generates
@ -54,26 +54,26 @@ RUN ./build-curl.sh
RUN ./build-binutils.sh
# Need a newer version of gcc than centos has to compile LLVM nowadays
COPY build-gcc.sh /tmp/
COPY dist-x86_64-linux/build-gcc.sh /tmp/
RUN ./build-gcc.sh
# CentOS 5.5 has Python 2.4 by default, but LLVM needs 2.7+
COPY build-python.sh /tmp/
COPY dist-x86_64-linux/build-python.sh /tmp/
RUN ./build-python.sh
# Apparently CentOS 5.5 desn't have `git` in yum, but we're gonna need it for
# cloning, so download and build it here.
COPY build-git.sh /tmp/
COPY dist-x86_64-linux/build-git.sh /tmp/
RUN ./build-git.sh
# libssh2 (a dependency of Cargo) requires cmake 2.8.11 or higher but CentOS
# only has 2.6.4, so build our own
COPY build-cmake.sh /tmp/
COPY dist-x86_64-linux/build-cmake.sh /tmp/
RUN ./build-cmake.sh
# for sanitizers, we need kernel headers files newer than the ones CentOS ships
# with so we install newer ones here
COPY build-headers.sh /tmp/
COPY dist-x86_64-linux/build-headers.sh /tmp/
RUN ./build-headers.sh
RUN curl -Lo /rustroot/dumb-init \

View File

@ -17,7 +17,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
pkg-config
WORKDIR /build/
COPY build-musl.sh /build/
COPY dist-x86_64-musl/build-musl.sh /build/
RUN sh /build/build-musl.sh && rm -rf /build
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \

View File

@ -56,7 +56,7 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools
USER rustbuild
WORKDIR /tmp
COPY build-netbsd-toolchain.sh /tmp/
COPY dist-x86_64-netbsd/build-netbsd-toolchain.sh /tmp/
RUN ./build-netbsd-toolchain.sh
USER root

View File

@ -24,7 +24,7 @@ RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-ini
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
WORKDIR /tmp
COPY build-emscripten.sh /tmp/
COPY emscripten/build-emscripten.sh /tmp/
RUN ./build-emscripten.sh
ENV PATH=$PATH:/tmp/emsdk_portable
ENV PATH=$PATH:/tmp/emsdk_portable/clang/tag-e1.37.10/build_tag-e1.37.10_32/bin

View File

@ -26,7 +26,8 @@ if [ -f "$docker_dir/$image/Dockerfile" ]; then
build \
--rm \
-t rust-ci \
"$docker_dir/$image"
-f "$docker_dir/$image/Dockerfile" \
"$docker_dir"
elif [ -f "$docker_dir/disabled/$image/Dockerfile" ]; then
if [ -n "$TRAVIS_OS_NAME" ]; then
echo Cannot run disabled images on travis!

View File

@ -1,4 +1,3 @@
#!/bin/sh
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
@ -33,3 +32,9 @@ make_standalone_toolchain() {
remove_ndk() {
rm -rf /android/ndk/ndk
}
download_and_make_toolchain() {
download_ndk $1 && \
make_standalone_toolchain $2 $3 && \
remove_ndk
}

View File

@ -1,4 +1,3 @@
#!/bin/sh
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
@ -47,3 +46,8 @@ create_avd() {
--abi $abi
}
download_and_create_avd() {
download_sdk $1
download_sysimage $2 $3
create_avd $2 $3
}

View File

@ -0,0 +1,15 @@
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ex
curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb
dpkg -i dumb-init_*.deb
rm dumb-init_*.deb

View File

@ -0,0 +1,16 @@
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ex
curl -o /usr/local/bin/sccache \
https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl
chmod +x /usr/local/bin/sccache

@ -1 +1 @@
Subproject commit ad7de198561b3a12217ea2da76d796d9c7fc0ed3
Subproject commit 97422981c53a00f7c3d6584d363443117f179fff

@ -1 +1 @@
Subproject commit 6b0de90d87dda15e323ef24cdf7ed873ac5cf4d3
Subproject commit f7a108dfa9e90b07821700c55d01f08a9adf005c

View File

@ -217,6 +217,7 @@
- [unique](library-features/unique.md)
- [unsize](library-features/unsize.md)
- [utf8_error_error_len](library-features/utf8-error-error-len.md)
- [vec_resize_default](library-features/vec-resize-default.md)
- [vec_remove_item](library-features/vec-remove-item.md)
- [windows_c](library-features/windows-c.md)
- [windows_handle](library-features/windows-handle.md)

View File

@ -4,7 +4,7 @@ The tracking issue for this feature is: [#23121]
[#23121]: https://github.com/rust-lang/rust/issues/23121
See also [`slice_patterns`](slice-patterns.html).
See also [`slice_patterns`](language-features/slice-patterns.html).
------------------------

View File

@ -190,4 +190,4 @@ constraints, etc.
[llvm-docs]: http://llvm.org/docs/LangRef.html#inline-assembler-expressions
If you need more power and don't mind losing some of the niceties of
`asm!`, check out [global_asm](global_asm.html).
`asm!`, check out [global_asm](language-features/global_asm.html).

View File

@ -4,7 +4,7 @@ The tracking issue for this feature is: [#29641]
[#29641]: https://github.com/rust-lang/rust/issues/29641
See also [`box_syntax`](box-syntax.html)
See also [`box_syntax`](language-features/box-syntax.html)
------------------------

View File

@ -4,7 +4,7 @@ The tracking issue for this feature is: [#27779]
[#27779]: https://github.com/rust-lang/rust/issues/27779
See also [`box_patterns`](box-patterns.html)
See also [`box_patterns`](language-features/box-patterns.html)
------------------------

View File

@ -74,5 +74,5 @@ usages and placed the larger, single usage in the crate root.
If you don't need quite as much power and flexibility as
`global_asm!` provides, and you don't mind restricting your inline
assembly to `fn` bodies only, you might try the [asm](asm.html)
feature instead.
assembly to `fn` bodies only, you might try the
[asm](language-features/asm.html) feature instead.

View File

@ -4,7 +4,80 @@ The tracking issue for this feature is: [#37339]
[#37339]: https://github.com/rust-lang/rust/issues/37339
Documentation to be appended to section G of the book.
------------------------
### Loops as expressions
Like most things in Rust, loops are expressions, and have a value; normally `()` unless the loop
never exits.
A `loop` can instead evaluate to a useful value via *break with value*:
```rust
#![feature(loop_break_value)]
// Find the first square number over 1000:
let mut n = 1;
let square = loop {
if n * n > 1000 {
break n * n;
}
n += 1;
};
```
The evaluation type may be specified externally:
```rust
#![feature(loop_break_value)]
// Declare that value returned is unsigned 64-bit:
let n: u64 = loop {
break 1;
};
```
It is an error if types do not agree, either between a "break" value and an external requirement,
or between multiple "break" values:
```no_compile
#![feature(loop_break_value)]
loop {
if true {
break 1u32;
} else {
break 0u8; // error: types do not agree
}
};
let n: i32 = loop {
break 0u32; // error: type does not agree with external requirement
};
```
#### Break: label, value
Four forms of `break` are available, where EXPR is some expression which evaluates to a value:
1. `break;`
2. `break 'label;`
3. `break EXPR;`
4. `break 'label EXPR;`
When no value is given, the value `()` is assumed, thus `break;` is equivalent to `break ();`.
Using a label allows returning a value from an inner loop:
```rust
#![feature(loop_break_value)]
let result = 'outer: loop {
for n in 1..10 {
if n > 4 {
break 'outer n;
}
}
};
```

View File

@ -8,6 +8,6 @@ This feature is part of "compiler plugins." It will often be used with the
[`plugin`] and `rustc_private` features as well. For more details, see
their docs.
[`plugin`]: plugin.html
[`plugin`]: language-features/plugin.html
------------------------

View File

@ -8,7 +8,7 @@ The tracking issue for this feature is: [#29597]
This feature is part of "compiler plugins." It will often be used with the
[`plugin_registrar`] and `rustc_private` features.
[`plugin_registrar`]: plugin-registrar.html
[`plugin_registrar`]: language-features/plugin-registrar.html
------------------------

View File

@ -6,5 +6,236 @@ The tracking issue for this feature is: [#38356]
------------------------
This feature flag guards the new procedural macro features as laid out by [RFC 1566], which alongside the now-stable
[custom derives], provide stabilizable alternatives to the compiler plugin API (which requires the use of
perma-unstable internal APIs) for programmatically modifying Rust code at compile-time.
The two new procedural macro kinds are:
* Function-like procedural macros which are invoked like regular declarative macros, and:
* Attribute-like procedural macros which can be applied to any item which built-in attributes can
be applied to, and which can take arguments in their invocation as well.
Additionally, this feature flag implicitly enables the [`use_extern_macros`](language-features/use-extern-macros.html) feature,
which allows macros to be imported like any other item with `use` statements, as compared to
applying `#[macro_use]` to an `extern crate` declaration. It is important to note that procedural macros may
**only** be imported in this manner, and will throw an error otherwise.
You **must** declare the `proc_macro` feature in both the crate declaring these new procedural macro kinds as well as
in any crates that use them.
### Common Concepts
As with custom derives, procedural macros may only be declared in crates of the `proc-macro` type, and must be public
functions. No other public items may be declared in `proc-macro` crates, but private items are fine.
To declare your crate as a `proc-macro` crate, simply add:
```toml
[lib]
proc-macro = true
```
to your `Cargo.toml`.
Unlike custom derives, however, the name of the function implementing the procedural macro is used directly as the
procedural macro's name, so choose carefully.
Additionally, both new kinds of procedural macros return a `TokenStream` which *wholly* replaces the original
invocation and its input.
#### Importing
As referenced above, the new procedural macros are not meant to be imported via `#[macro_use]` and will throw an
error if they are. Instead, they are meant to be imported like any other item in Rust, with `use` statements:
```rust,ignore
#![feature(proc_macro)]
// Where `my_proc_macros` is some crate of type `proc_macro`
extern crate my_proc_macros;
// And declares a `#[proc_macro] pub fn my_bang_macro()` at its root.
use my_proc_macros::my_bang_macro;
fn main() {
println!("{}", my_bang_macro!());
}
```
#### Error Reporting
Any panics in a procedural macro implementation will be caught by the compiler and turned into an error message pointing
to the problematic invocation. Thus, it is important to make your panic messages as informative as possible: use
`Option::expect` instead of `Option::unwrap` and `Result::expect` instead of `Result::unwrap`, and inform the user of
the error condition as unambiguously as you can.
#### `TokenStream`
The `proc_macro::TokenStream` type is hardcoded into the signatures of procedural macro functions for both input and
output. It is a wrapper around the compiler's internal representation for a given chunk of Rust code.
### Function-like Procedural Macros
These are procedural macros that are invoked like regular declarative macros. They are declared as public functions in
crates of the `proc_macro` type and using the `#[proc_macro]` attribute. The name of the declared function becomes the
name of the macro as it is to be imported and used. The function must be of the kind `fn(TokenStream) -> TokenStream`
where the sole argument is the input to the macro and the return type is the macro's output.
This kind of macro can expand to anything that is valid for the context it is invoked in, including expressions and
statements, as well as items.
**Note**: invocations of this kind of macro require a wrapping `[]`, `{}` or `()` like regular macros, but these do not
appear in the input, only the tokens between them. The tokens between the braces do not need to be valid Rust syntax.
<span class="filename">my_macro_crate/src/lib.rs</span>
```rust,ignore
#![feature(proc_macro)]
// This is always necessary to get the `TokenStream` typedef.
extern crate proc_macro;
use proc_macro::TokenStream;
#[proc_macro]
pub fn say_hello(_input: TokenStream) -> TokenStream {
// This macro will accept any input because it ignores it.
// To enforce correctness in macros which don't take input,
// you may want to add `assert!(_input.to_string().is_empty());`.
"println!(\"Hello, world!\")".parse().unwrap()
}
```
<span class="filename">my_macro_user/Cargo.toml</span>
```toml
[dependencies]
my_macro_crate = { path = "<relative path to my_macro_crate>" }
```
<span class="filename">my_macro_user/src/lib.rs</span>
```rust,ignore
#![feature(proc_macro)]
extern crate my_macro_crate;
use my_macro_crate::say_hello;
fn main() {
say_hello!();
}
```
As expected, this prints `Hello, world!`.
### Attribute-like Procedural Macros
These are arguably the most powerful flavor of procedural macro as they can be applied anywhere attributes are allowed.
They are declared as public functions in crates of the `proc-macro` type, using the `#[proc_macro_attribute]` attribute.
The name of the function becomes the name of the attribute as it is to be imported and used. The function must be of the
kind `fn(TokenStream, TokenStream) -> TokenStream` where:
The first argument represents any metadata for the attribute (see [the reference chapter on attributes][refr-attr]).
Only the metadata itself will appear in this argument, for example:
* `#[my_macro]` will get an empty string.
* `#[my_macro = "string"]` will get `= "string"`.
* `#[my_macro(ident)]` will get `(ident)`.
* etc.
The second argument is the item that the attribute is applied to. It can be a function, a type definition,
an impl block, an `extern` block, or a module—attribute invocations can take the inner form (`#![my_attr]`)
or outer form (`#[my_attr]`).
The return type is the output of the macro which *wholly* replaces the item it was applied to. Thus, if your intention
is to merely modify an item, it *must* be copied to the output. The output must be an item; expressions, statements
and bare blocks are not allowed.
There is no restriction on how many items an attribute-like procedural macro can emit as long as they are valid in
the given context.
<span class="filename">my_macro_crate/src/lib.rs</span>
```rust,ignore
#![feature(proc_macro)]
extern crate proc_macro;
use proc_macro::TokenStream;
/// Adds a `/// ### Panics` docstring to the end of the input's documentation
///
/// Does not assert that its receiver is a function or method.
#[proc_macro_attribute]
pub fn panics_note(args: TokenStream, input: TokenStream) -> TokenStream {
let args = args.to_string();
let mut input = input.to_string();
assert!(args.starts_with("= \""), "`#[panics_note]` requires an argument of the form \
`#[panics_note = \"panic note here\"]`");
// Get just the bare note string
let panics_note = args.trim_matches(&['=', ' ', '"'][..]);
// The input will include all docstrings regardless of where the attribute is placed,
// so we need to find the last index before the start of the item
let insert_idx = idx_after_last_docstring(&input);
// And insert our `### Panics` note there so it always appears at the end of an item's docs
input.insert_str(insert_idx, &format!("/// # Panics \n/// {}\n", panics_note));
input.parse().unwrap()
}
// `proc-macro` crates can contain any kind of private item still
fn idx_after_last_docstring(input: &str) -> usize {
// Skip docstring lines to find the start of the item proper
input.lines().skip_while(|line| line.trim_left().starts_with("///")).next()
// Find the index of the first non-docstring line in the input
// Note: assumes this exact line is unique in the input
.and_then(|line_after| input.find(line_after))
// No docstrings in the input
.unwrap_or(0)
}
```
<span class="filename">my_macro_user/Cargo.toml</span>
```toml
[dependencies]
my_macro_crate = { path = "<relative path to my_macro_crate>" }
```
<span class="filename">my_macro_user/src/lib.rs</span>
```rust,ignore
#![feature(proc_macro)]
extern crate my_macro_crate;
use my_macro_crate::panics_note;
/// Do the `foo` thing.
#[panics_note = "Always."]
pub fn foo() {
panic!()
}
```
Then the rendered documentation for `pub fn foo` will look like this:
> `pub fn foo()`
>
> ----
> Do the `foo` thing.
> # Panics
> Always.
[RFC 1566]: https://github.com/rust-lang/rfcs/blob/master/text/1566-proc-macros.md
[custom derives]: https://doc.rust-lang.org/book/procedural-macros.html
[rust-lang/rust#41430]: https://github.com/rust-lang/rust/issues/41430
[refr-attr]: https://doc.rust-lang.org/reference/attributes.html

View File

@ -4,7 +4,8 @@ The tracking issue for this feature is: [#23121]
[#23121]: https://github.com/rust-lang/rust/issues/23121
See also [`advanced_slice_patterns`](advanced-slice-patterns.html).
See also
[`advanced_slice_patterns`](language-features/advanced-slice-patterns.html).
------------------------

View File

@ -4,7 +4,7 @@ The tracking issue for this feature is: [#33082]
[#33082]: https://github.com/rust-lang/rust/issues/33082
See also [`alloc_system`](alloc-system.html).
See also [`alloc_system`](library-features/alloc-system.html).
------------------------

View File

@ -4,7 +4,7 @@ The tracking issue for this feature is: [#33082]
[#33082]: https://github.com/rust-lang/rust/issues/33082
See also [`alloc_jemalloc`](alloc-jemalloc.html).
See also [`alloc_jemalloc`](library-features/alloc-jemalloc.html).
------------------------

View File

@ -0,0 +1,7 @@
# `iterator_step_by`
The tracking issue for this feature is: [#27741]
[#27741]: https://github.com/rust-lang/rust/issues/27741
------------------------

View File

@ -0,0 +1,7 @@
# `vec_resize_default`
The tracking issue for this feature is: [#41758]
[#41758]: https://github.com/rust-lang/rust/issues/41758
------------------------

View File

@ -195,30 +195,34 @@ fn bench_contains_equal(b: &mut Bencher) {
})
}
macro_rules! make_test_inner {
($s:ident, $code:expr, $name:ident, $str:expr) => {
($s:ident, $code:expr, $name:ident, $str:expr, $iters:expr) => {
#[bench]
fn $name(bencher: &mut Bencher) {
let mut $s = $str;
black_box(&mut $s);
bencher.iter(|| $code);
bencher.iter(|| for _ in 0..$iters { black_box($code); });
}
}
}
macro_rules! make_test {
($name:ident, $s:ident, $code:expr) => {
make_test!($name, $s, $code, 1);
};
($name:ident, $s:ident, $code:expr, $iters:expr) => {
mod $name {
use test::Bencher;
use test::black_box;
// Short strings: 65 bytes each
make_test_inner!($s, $code, short_ascii,
"Mary had a little lamb, Little lamb Mary had a littl lamb, lamb!");
"Mary had a little lamb, Little lamb Mary had a littl lamb, lamb!", $iters);
make_test_inner!($s, $code, short_mixed,
"ศไทย中华Việt Nam; Mary had a little lamb, Little lam!");
"ศไทย中华Việt Nam; Mary had a little lamb, Little lam!", $iters);
make_test_inner!($s, $code, short_pile_of_poo,
"💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩!");
"💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩!", $iters);
make_test_inner!($s, $code, long_lorem_ipsum,"\
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem sit amet dolor \
ultricies condimentum. Praesent iaculis purus elit, ac malesuada quam malesuada in. Duis sed orci \
@ -253,7 +257,7 @@ Nam lectus enim, dapibus non nisi tempor, consectetur convallis massa. Maecenas
feugiat. Etiam quis mauris vel risus luctus mattis a a nunc. Nullam orci quam, imperdiet id \
vehicula in, porttitor ut nibh. Duis sagittis adipiscing nisl vitae congue. Donec mollis risus eu \
leo suscipit, varius porttitor nulla porta. Pellentesque ut sem nec nisi euismod vehicula. Nulla \
malesuada sollicitudin quam eu fermentum!");
malesuada sollicitudin quam eu fermentum!", $iters);
}
}
}
@ -288,6 +292,13 @@ make_test!(find_zzz_char, s, s.find('\u{1F4A4}'));
make_test!(rfind_zzz_char, s, s.rfind('\u{1F4A4}'));
make_test!(find_zzz_str, s, s.find("\u{1F4A4}"));
make_test!(starts_with_ascii_char, s, s.starts_with('/'), 1024);
make_test!(ends_with_ascii_char, s, s.ends_with('/'), 1024);
make_test!(starts_with_unichar, s, s.starts_with('\u{1F4A4}'), 1024);
make_test!(ends_with_unichar, s, s.ends_with('\u{1F4A4}'), 1024);
make_test!(starts_with_str, s, s.starts_with("💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩"), 1024);
make_test!(ends_with_str, s, s.ends_with("💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩"), 1024);
make_test!(split_space_char, s, s.split(' ').count());
make_test!(split_terminator_space_char, s, s.split_terminator(' ').count());

View File

@ -813,6 +813,7 @@ impl str {
/// assert!(!bananas.contains("apples"));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn contains<'a, P: Pattern<'a>>(&'a self, pat: P) -> bool {
core_str::StrExt::contains(self, pat)
}
@ -900,6 +901,7 @@ impl str {
/// assert_eq!(s.find(x), None);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn find<'a, P: Pattern<'a>>(&'a self, pat: P) -> Option<usize> {
core_str::StrExt::find(self, pat)
}
@ -944,6 +946,7 @@ impl str {
/// assert_eq!(s.rfind(x), None);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn rfind<'a, P: Pattern<'a>>(&'a self, pat: P) -> Option<usize>
where P::Searcher: ReverseSearcher<'a>
{
@ -1057,6 +1060,7 @@ impl str {
///
/// [`split_whitespace`]: #method.split_whitespace
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn split<'a, P: Pattern<'a>>(&'a self, pat: P) -> Split<'a, P> {
core_str::StrExt::split(self, pat)
}
@ -1106,6 +1110,7 @@ impl str {
/// assert_eq!(v, ["ghi", "def", "abc"]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn rsplit<'a, P: Pattern<'a>>(&'a self, pat: P) -> RSplit<'a, P>
where P::Searcher: ReverseSearcher<'a>
{
@ -1152,6 +1157,7 @@ impl str {
/// assert_eq!(v, ["A", "", "B", ""]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn split_terminator<'a, P: Pattern<'a>>(&'a self, pat: P) -> SplitTerminator<'a, P> {
core_str::StrExt::split_terminator(self, pat)
}
@ -1195,6 +1201,7 @@ impl str {
/// assert_eq!(v, ["", "B", "", "A"]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn rsplit_terminator<'a, P: Pattern<'a>>(&'a self, pat: P) -> RSplitTerminator<'a, P>
where P::Searcher: ReverseSearcher<'a>
{
@ -1247,6 +1254,7 @@ impl str {
/// assert_eq!(v, ["abc", "defXghi"]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn splitn<'a, P: Pattern<'a>>(&'a self, n: usize, pat: P) -> SplitN<'a, P> {
core_str::StrExt::splitn(self, n, pat)
}
@ -1294,6 +1302,7 @@ impl str {
/// assert_eq!(v, ["ghi", "abc1def"]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn rsplitn<'a, P: Pattern<'a>>(&'a self, n: usize, pat: P) -> RSplitN<'a, P>
where P::Searcher: ReverseSearcher<'a>
{
@ -1334,6 +1343,7 @@ impl str {
/// assert_eq!(v, ["1", "2", "3"]);
/// ```
#[stable(feature = "str_matches", since = "1.2.0")]
#[inline]
pub fn matches<'a, P: Pattern<'a>>(&'a self, pat: P) -> Matches<'a, P> {
core_str::StrExt::matches(self, pat)
}
@ -1370,6 +1380,7 @@ impl str {
/// assert_eq!(v, ["3", "2", "1"]);
/// ```
#[stable(feature = "str_matches", since = "1.2.0")]
#[inline]
pub fn rmatches<'a, P: Pattern<'a>>(&'a self, pat: P) -> RMatches<'a, P>
where P::Searcher: ReverseSearcher<'a>
{
@ -1415,6 +1426,7 @@ impl str {
/// assert_eq!(v, [(0, "aba")]); // only the first `aba`
/// ```
#[stable(feature = "str_match_indices", since = "1.5.0")]
#[inline]
pub fn match_indices<'a, P: Pattern<'a>>(&'a self, pat: P) -> MatchIndices<'a, P> {
core_str::StrExt::match_indices(self, pat)
}
@ -1457,6 +1469,7 @@ impl str {
/// assert_eq!(v, [(2, "aba")]); // only the last `aba`
/// ```
#[stable(feature = "str_match_indices", since = "1.5.0")]
#[inline]
pub fn rmatch_indices<'a, P: Pattern<'a>>(&'a self, pat: P) -> RMatchIndices<'a, P>
where P::Searcher: ReverseSearcher<'a>
{
@ -1737,6 +1750,7 @@ impl str {
/// assert_eq!(s, s.replace("cookie monster", "little lamb"));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn replace<'a, P: Pattern<'a>>(&'a self, from: P, to: &str) -> String {
let mut result = String::new();
let mut last_end = 0;

View File

@ -1220,11 +1220,14 @@ impl<T> Vec<T> {
}
impl<T: Clone> Vec<T> {
/// Resizes the `Vec` in-place so that `len()` is equal to `new_len`.
/// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
///
/// If `new_len` is greater than `len()`, the `Vec` is extended by the
/// If `new_len` is greater than `len`, the `Vec` is extended by the
/// difference, with each additional slot filled with `value`.
/// If `new_len` is less than `len()`, the `Vec` is simply truncated.
/// If `new_len` is less than `len`, the `Vec` is simply truncated.
///
/// This method requires `Clone` to clone the passed value. If you'd
/// rather create a value with `Default` instead, see [`resize_default`].
///
/// # Examples
///
@ -1237,46 +1240,19 @@ impl<T: Clone> Vec<T> {
/// vec.resize(2, 0);
/// assert_eq!(vec, [1, 2]);
/// ```
///
/// [`resize_default`]: #method.resize_default
#[stable(feature = "vec_resize", since = "1.5.0")]
pub fn resize(&mut self, new_len: usize, value: T) {
let len = self.len();
if new_len > len {
self.extend_with_element(new_len - len, value);
self.extend_with(new_len - len, ExtendElement(value))
} else {
self.truncate(new_len);
}
}
/// Extend the vector by `n` additional clones of `value`.
fn extend_with_element(&mut self, n: usize, value: T) {
self.reserve(n);
unsafe {
let mut ptr = self.as_mut_ptr().offset(self.len() as isize);
// Use SetLenOnDrop to work around bug where compiler
// may not realize the store through `ptr` through self.set_len()
// don't alias.
let mut local_len = SetLenOnDrop::new(&mut self.len);
// Write all elements except the last one
for _ in 1..n {
ptr::write(ptr, value.clone());
ptr = ptr.offset(1);
// Increment the length in every step in case clone() panics
local_len.increment_len(1);
}
if n > 0 {
// We can write the last element directly without cloning needlessly
ptr::write(ptr, value);
local_len.increment_len(1);
}
// len set by scope guard
}
}
/// Clones and appends all elements in a slice to the `Vec`.
///
/// Iterates over the slice `other`, clones each element, and then appends
@ -1300,6 +1276,92 @@ impl<T: Clone> Vec<T> {
}
}
impl<T: Default> Vec<T> {
/// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
///
/// If `new_len` is greater than `len`, the `Vec` is extended by the
/// difference, with each additional slot filled with `Default::default()`.
/// If `new_len` is less than `len`, the `Vec` is simply truncated.
///
/// This method uses `Default` to create new values on every push. If
/// you'd rather `Clone` a given value, use [`resize`].
///
///
/// # Examples
///
/// ```
/// #![feature(vec_resize_default)]
///
/// let mut vec = vec![1, 2, 3];
/// vec.resize_default(5);
/// assert_eq!(vec, [1, 2, 3, 0, 0]);
///
/// let mut vec = vec![1, 2, 3, 4];
/// vec.resize_default(2);
/// assert_eq!(vec, [1, 2]);
/// ```
///
/// [`resize`]: #method.resize
#[unstable(feature = "vec_resize_default", issue = "41758")]
pub fn resize_default(&mut self, new_len: usize) {
let len = self.len();
if new_len > len {
self.extend_with(new_len - len, ExtendDefault);
} else {
self.truncate(new_len);
}
}
}
// This code generalises `extend_with_{element,default}`.
trait ExtendWith<T> {
fn next(&self) -> T;
fn last(self) -> T;
}
struct ExtendElement<T>(T);
impl<T: Clone> ExtendWith<T> for ExtendElement<T> {
fn next(&self) -> T { self.0.clone() }
fn last(self) -> T { self.0 }
}
struct ExtendDefault;
impl<T: Default> ExtendWith<T> for ExtendDefault {
fn next(&self) -> T { Default::default() }
fn last(self) -> T { Default::default() }
}
impl<T> Vec<T> {
/// Extend the vector by `n` values, using the given generator.
fn extend_with<E: ExtendWith<T>>(&mut self, n: usize, value: E) {
self.reserve(n);
unsafe {
let mut ptr = self.as_mut_ptr().offset(self.len() as isize);
// Use SetLenOnDrop to work around bug where compiler
// may not realize the store through `ptr` through self.set_len()
// don't alias.
let mut local_len = SetLenOnDrop::new(&mut self.len);
// Write all elements except the last one
for _ in 1..n {
ptr::write(ptr, value.next());
ptr = ptr.offset(1);
// Increment the length in every step in case next() panics
local_len.increment_len(1);
}
if n > 0 {
// We can write the last element directly without cloning needlessly
ptr::write(ptr, value.last());
local_len.increment_len(1);
}
// len set by scope guard
}
}
}
// Set the length of the vec when the `SetLenOnDrop` value goes out of scope.
//
// The idea is: The length field in SetLenOnDrop is a local variable
@ -1389,7 +1451,7 @@ trait SpecFromElem: Sized {
impl<T: Clone> SpecFromElem for T {
default fn from_elem(elem: Self, n: usize) -> Vec<Self> {
let mut v = Vec::with_capacity(n);
v.extend_with_element(n, elem);
v.extend_with(n, ExtendElement(elem));
v
}
}
@ -1424,7 +1486,7 @@ macro_rules! impl_spec_from_elem {
}
}
let mut v = Vec::with_capacity(n);
v.extend_with_element(n, elem);
v.extend_with(n, ExtendElement(elem));
v
}
}

View File

@ -21,7 +21,7 @@ fn float_to_decimal_common_exact<T>(fmt: &mut Formatter, num: &T,
{
unsafe {
let mut buf: [u8; 1024] = mem::uninitialized(); // enough for f32 and f64
let mut parts: [flt2dec::Part; 5] = mem::uninitialized();
let mut parts: [flt2dec::Part; 4] = mem::uninitialized();
let formatted = flt2dec::to_exact_fixed_str(flt2dec::strategy::grisu::format_exact,
*num, sign, precision,
false, &mut buf, &mut parts);
@ -39,7 +39,7 @@ fn float_to_decimal_common_shortest<T>(fmt: &mut Formatter,
unsafe {
// enough for f32 and f64
let mut buf: [u8; flt2dec::MAX_SIG_DIGITS] = mem::uninitialized();
let mut parts: [flt2dec::Part; 5] = mem::uninitialized();
let mut parts: [flt2dec::Part; 4] = mem::uninitialized();
let formatted = flt2dec::to_shortest_str(flt2dec::strategy::grisu::format_shortest,
*num, sign, 0, false, &mut buf, &mut parts);
fmt.pad_formatted_parts(&formatted)
@ -75,7 +75,7 @@ fn float_to_exponential_common_exact<T>(fmt: &mut Formatter, num: &T,
{
unsafe {
let mut buf: [u8; 1024] = mem::uninitialized(); // enough for f32 and f64
let mut parts: [flt2dec::Part; 7] = mem::uninitialized();
let mut parts: [flt2dec::Part; 6] = mem::uninitialized();
let formatted = flt2dec::to_exact_exp_str(flt2dec::strategy::grisu::format_exact,
*num, sign, precision,
upper, &mut buf, &mut parts);
@ -94,7 +94,7 @@ fn float_to_exponential_common_shortest<T>(fmt: &mut Formatter,
unsafe {
// enough for f32 and f64
let mut buf: [u8; flt2dec::MAX_SIG_DIGITS] = mem::uninitialized();
let mut parts: [flt2dec::Part; 7] = mem::uninitialized();
let mut parts: [flt2dec::Part; 6] = mem::uninitialized();
let formatted = flt2dec::to_shortest_exp_str(flt2dec::strategy::grisu::format_shortest,
*num, sign, (0, 0), upper,
&mut buf, &mut parts);

View File

@ -11,7 +11,7 @@
use cmp::Ordering;
use super::{Chain, Cycle, Cloned, Enumerate, Filter, FilterMap, FlatMap, Fuse};
use super::{Inspect, Map, Peekable, Scan, Skip, SkipWhile, Take, TakeWhile, Rev};
use super::{Inspect, Map, Peekable, Scan, Skip, SkipWhile, StepBy, Take, TakeWhile, Rev};
use super::{Zip, Sum, Product};
use super::{ChainState, FromIterator, ZipImpl};
@ -258,6 +258,39 @@ pub trait Iterator {
None
}
/// Creates an iterator starting at the same point, but stepping by
/// the given amount at each iteration.
///
/// Note that it will always return the first element of the range,
/// regardless of the step given.
///
/// # Panics
///
/// The method will panic if the given step is `0`.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// #![feature(iterator_step_by)]
/// let a = [0, 1, 2, 3, 4, 5];
/// let mut iter = a.into_iter().step_by(2);
///
/// assert_eq!(iter.next(), Some(&0));
/// assert_eq!(iter.next(), Some(&2));
/// assert_eq!(iter.next(), Some(&4));
/// assert_eq!(iter.next(), None);
/// ```
#[inline]
#[unstable(feature = "iterator_step_by",
reason = "unstable replacement of Range::step_by",
issue = "27741")]
fn step_by(self, step: usize) -> StepBy<Self> where Self: Sized {
assert!(step != 0);
StepBy{iter: self, step: step - 1, first_take: true}
}
/// Takes two iterators and creates a new iterator over both in sequence.
///
/// `chain()` will return a new iterator which will first iterate over

View File

@ -313,7 +313,7 @@ pub use self::iterator::Iterator;
pub use self::range::Step;
#[unstable(feature = "step_by", reason = "recent addition",
issue = "27741")]
pub use self::range::StepBy;
pub use self::range::StepBy as DeprecatedStepBy;
#[stable(feature = "rust1", since = "1.0.0")]
pub use self::sources::{Repeat, repeat};
@ -520,6 +520,41 @@ impl<I> Iterator for Cycle<I> where I: Clone + Iterator {
#[unstable(feature = "fused", issue = "35602")]
impl<I> FusedIterator for Cycle<I> where I: Clone + Iterator {}
/// An iterator that steps by n elements every iteration.
///
/// This `struct` is created by the [`step_by`] method on [`Iterator`]. See
/// its documentation for more.
///
/// [`step_by`]: trait.Iterator.html#method.step_by
/// [`Iterator`]: trait.Iterator.html
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[unstable(feature = "iterator_step_by",
reason = "unstable replacement of Range::step_by",
issue = "27741")]
#[derive(Clone, Debug)]
pub struct StepBy<I> {
iter: I,
step: usize,
first_take: bool,
}
#[unstable(feature = "iterator_step_by",
reason = "unstable replacement of Range::step_by",
issue = "27741")]
impl<I> Iterator for StepBy<I> where I: Iterator {
type Item = I::Item;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.first_take {
self.first_take = false;
self.iter.next()
} else {
self.iter.nth(self.step)
}
}
}
/// An iterator that strings two iterators together.
///
/// This `struct` is created by the [`chain`] method on [`Iterator`]. See its

View File

@ -410,8 +410,8 @@ fn determine_sign(sign: Sign, decoded: &FullDecoded, negative: bool) -> &'static
/// it will only print given digits and nothing else.
///
/// The byte buffer should be at least `MAX_SIG_DIGITS` bytes long.
/// There should be at least 5 parts available, due to the worst case like
/// `[+][0.][0000][45][0000]` with `frac_digits = 10`.
/// There should be at least 4 parts available, due to the worst case like
/// `[+][0.][0000][2][0000]` with `frac_digits = 10`.
pub fn to_shortest_str<'a, T, F>(mut format_shortest: F, v: T,
sign: Sign, frac_digits: usize, _upper: bool,
buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a>
@ -465,8 +465,8 @@ pub fn to_shortest_str<'a, T, F>(mut format_shortest: F, v: T,
/// cannot be in this range, avoiding any confusion.
///
/// The byte buffer should be at least `MAX_SIG_DIGITS` bytes long.
/// There should be at least 7 parts available, due to the worst case like
/// `[+][1][.][2345][e][-][67]`.
/// There should be at least 6 parts available, due to the worst case like
/// `[+][1][.][2345][e][-][6]`.
pub fn to_shortest_exp_str<'a, T, F>(mut format_shortest: F, v: T,
sign: Sign, dec_bounds: (i16, i16), upper: bool,
buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a>
@ -544,8 +544,8 @@ fn estimate_max_buf_len(exp: i16) -> usize {
/// The byte buffer should be at least `ndigits` bytes long unless `ndigits` is
/// so large that only the fixed number of digits will be ever written.
/// (The tipping point for `f64` is about 800, so 1000 bytes should be enough.)
/// There should be at least 7 parts available, due to the worst case like
/// `[+][1][.][2345][e][-][67]`.
/// There should be at least 6 parts available, due to the worst case like
/// `[+][1][.][2345][e][-][6]`.
pub fn to_exact_exp_str<'a, T, F>(mut format_exact: F, v: T,
sign: Sign, ndigits: usize, upper: bool,
buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a>
@ -600,8 +600,8 @@ pub fn to_exact_exp_str<'a, T, F>(mut format_exact: F, v: T,
/// The byte buffer should be enough for the output unless `frac_digits` is
/// so large that only the fixed number of digits will be ever written.
/// (The tipping point for `f64` is about 800, and 1000 bytes should be enough.)
/// There should be at least 5 parts available, due to the worst case like
/// `[+][0.][0000][45][0000]` with `frac_digits = 10`.
/// There should be at least 4 parts available, due to the worst case like
/// `[+][0.][0000][2][0000]` with `frac_digits = 10`.
pub fn to_exact_fixed_str<'a, T, F>(mut format_exact: F, v: T,
sign: Sign, frac_digits: usize, _upper: bool,
buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a>

View File

@ -12,6 +12,7 @@ use super::Wrapping;
use ops::*;
#[allow(unused_macros)]
macro_rules! sh_impl_signed {
($t:ident, $f:ident) => (
#[stable(feature = "rust1", since = "1.0.0")]

View File

@ -799,6 +799,7 @@ macro_rules! neg_impl_numeric {
($($t:ty)*) => { neg_impl_core!{ x => -x, $($t)*} }
}
#[allow(unused_macros)]
macro_rules! neg_impl_unsigned {
($($t:ty)*) => {
neg_impl_core!{ x => {

View File

@ -429,7 +429,33 @@ impl<'a> DoubleEndedSearcher<'a> for CharSearcher<'a> {}
/// Searches for chars that are equal to a given char
impl<'a> Pattern<'a> for char {
pattern_methods!(CharSearcher<'a>, CharEqPattern, CharSearcher);
type Searcher = CharSearcher<'a>;
#[inline]
fn into_searcher(self, haystack: &'a str) -> Self::Searcher {
CharSearcher(CharEqPattern(self).into_searcher(haystack))
}
#[inline]
fn is_contained_in(self, haystack: &'a str) -> bool {
if (self as u32) < 128 {
haystack.as_bytes().contains(&(self as u8))
} else {
let mut buffer = [0u8; 4];
self.encode_utf8(&mut buffer).is_contained_in(haystack)
}
}
#[inline]
fn is_prefix_of(self, haystack: &'a str) -> bool {
CharEqPattern(self).is_prefix_of(haystack)
}
#[inline]
fn is_suffix_of(self, haystack: &'a str) -> bool where Self::Searcher: ReverseSearcher<'a>
{
CharEqPattern(self).is_suffix_of(haystack)
}
}
/////////////////////////////////////////////////////////////////////////////

View File

@ -144,6 +144,33 @@ fn test_iterator_chain_find() {
assert_eq!(iter.next(), None);
}
#[test]
fn test_iterator_step_by() {
// Identity
// Replace with (0..).step_by(1) after Range::step_by gets removed
let mut it = Iterator::step_by((0..), 1).take(3);
assert_eq!(it.next(), Some(0));
assert_eq!(it.next(), Some(1));
assert_eq!(it.next(), Some(2));
assert_eq!(it.next(), None);
// Replace with (0..).step_by(3) after Range::step_by gets removed
let mut it = Iterator::step_by((0..), 3).take(4);
assert_eq!(it.next(), Some(0));
assert_eq!(it.next(), Some(3));
assert_eq!(it.next(), Some(6));
assert_eq!(it.next(), Some(9));
assert_eq!(it.next(), None);
}
#[test]
#[should_panic]
fn test_iterator_step_by_zero() {
// Replace with (0..).step_by(0) after Range::step_by gets removed
let mut it = Iterator::step_by((0..), 0);
it.next();
}
#[test]
fn test_filter_map() {
let it = (0..).step_by(1).take(10)
@ -1119,4 +1146,4 @@ fn test_step_replace_no_between() {
let y = x.replace_one();
assert_eq!(x, 1);
assert_eq!(y, 5);
}
}

View File

@ -20,6 +20,7 @@
#![feature(fixed_size_array)]
#![feature(flt2dec)]
#![feature(fmt_internals)]
#![feature(iterator_step_by)]
#![feature(i128_type)]
#![feature(iter_rfind)]
#![feature(libc)]

View File

@ -133,6 +133,14 @@ impl<'a> Quote for &'a str {
}
}
impl Quote for usize {
fn quote(&self) -> TokenStream {
let integer_symbol = Symbol::intern(&self.to_string());
TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Integer(integer_symbol), None))
.into()
}
}
impl Quote for Ident {
fn quote(&self) -> TokenStream {
// FIXME(jseyfried) quote hygiene
@ -193,15 +201,17 @@ impl Quote for token::BinOpToken {
impl Quote for Lit {
fn quote(&self) -> TokenStream {
macro_rules! gen_match {
($($i:ident),*) => {
($($i:ident),*; $($raw:ident),*) => {
match *self {
$( Lit::$i(lit) => quote!(::syntax::parse::token::Lit::$i((quote lit))), )*
_ => panic!("Unsupported literal"),
$( Lit::$raw(lit, n) => {
quote!(::syntax::parse::token::Lit::$raw((quote lit), (quote n)))
})*
}
}
}
gen_match!(Byte, Char, Float, Str_, Integer, ByteStr)
gen_match!(Byte, Char, Float, Str_, Integer, ByteStr; StrRaw, ByteStrRaw)
}
}

View File

@ -13,12 +13,12 @@ arena = { path = "../libarena" }
fmt_macros = { path = "../libfmt_macros" }
graphviz = { path = "../libgraphviz" }
log = "0.3"
owning_ref = "0.3.3"
rustc_back = { path = "../librustc_back" }
rustc_bitflags = { path = "../librustc_bitflags" }
rustc_const_math = { path = "../librustc_const_math" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_errors = { path = "../librustc_errors" }
rustc_llvm = { path = "../librustc_llvm" }
serialize = { path = "../libserialize" }
syntax = { path = "../libsyntax" }
syntax_pos = { path = "../libsyntax_pos" }

View File

@ -106,6 +106,8 @@ pub enum DepNode<D: Clone + Debug> {
UsedTraitImports(D),
ConstEval(D),
SymbolName(D),
SpecializationGraph(D),
ObjectSafety(D),
// The set of impls for a given trait. Ultimately, it would be
// nice to get more fine-grained here (e.g., to include a
@ -116,6 +118,8 @@ pub enum DepNode<D: Clone + Debug> {
// than changes in the impl body.
TraitImpls(D),
AllLocalTraitImpls,
// Nodes representing caches. To properly handle a true cache, we
// don't use a DepTrackingMap, but rather we push a task node.
// Otherwise the write into the map would be incorrectly
@ -262,7 +266,10 @@ impl<D: Clone + Debug> DepNode<D> {
UsedTraitImports(ref d) => op(d).map(UsedTraitImports),
ConstEval(ref d) => op(d).map(ConstEval),
SymbolName(ref d) => op(d).map(SymbolName),
SpecializationGraph(ref d) => op(d).map(SpecializationGraph),
ObjectSafety(ref d) => op(d).map(ObjectSafety),
TraitImpls(ref d) => op(d).map(TraitImpls),
AllLocalTraitImpls => Some(AllLocalTraitImpls),
TraitItems(ref d) => op(d).map(TraitItems),
ReprHints(ref d) => op(d).map(ReprHints),
TraitSelect { ref trait_def_id, ref input_def_id } => {

View File

@ -409,6 +409,67 @@ RFC. It is, however, [currently unimplemented][iss15872].
[iss15872]: https://github.com/rust-lang/rust/issues/15872
"##,
E0119: r##"
There are conflicting trait implementations for the same type.
Example of erroneous code:
```compile_fail,E0119
trait MyTrait {
fn get(&self) -> usize;
}
impl<T> MyTrait for T {
fn get(&self) -> usize { 0 }
}
struct Foo {
value: usize
}
impl MyTrait for Foo { // error: conflicting implementations of trait
// `MyTrait` for type `Foo`
fn get(&self) -> usize { self.value }
}
```
When looking for the implementation for the trait, the compiler finds
both the `impl<T> MyTrait for T` where T is all types and the `impl
MyTrait for Foo`. Since a trait cannot be implemented multiple times,
this is an error. So, when you write:
```
trait MyTrait {
fn get(&self) -> usize;
}
impl<T> MyTrait for T {
fn get(&self) -> usize { 0 }
}
```
This makes the trait implemented on all types in the scope. So if you
try to implement it on another one after that, the implementations will
conflict. Example:
```
trait MyTrait {
fn get(&self) -> usize;
}
impl<T> MyTrait for T {
fn get(&self) -> usize { 0 }
}
struct Foo;
fn main() {
let f = Foo;
f.get(); // the trait is implemented so we can use it
}
```
"##,
E0133: r##"
Unsafe code was used outside of an unsafe function or block.

View File

@ -16,6 +16,7 @@
use hir;
use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE, DefIndexAddressSpace};
use ich::Fingerprint;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::indexed_vec::IndexVec;
use rustc_data_structures::stable_hasher::StableHasher;
@ -34,7 +35,7 @@ use util::nodemap::NodeMap;
pub struct DefPathTable {
index_to_key: [Vec<DefKey>; 2],
key_to_index: FxHashMap<DefKey, DefIndex>,
def_path_hashes: [Vec<u64>; 2],
def_path_hashes: [Vec<Fingerprint>; 2],
}
// Unfortunately we have to provide a manual impl of Clone because of the
@ -55,7 +56,7 @@ impl DefPathTable {
fn allocate(&mut self,
key: DefKey,
def_path_hash: u64,
def_path_hash: Fingerprint,
address_space: DefIndexAddressSpace)
-> DefIndex {
let index = {
@ -79,7 +80,7 @@ impl DefPathTable {
}
#[inline(always)]
pub fn def_path_hash(&self, index: DefIndex) -> u64 {
pub fn def_path_hash(&self, index: DefIndex) -> Fingerprint {
self.def_path_hashes[index.address_space().index()]
[index.as_array_index()]
}
@ -146,8 +147,8 @@ impl Decodable for DefPathTable {
let index_to_key_lo: Vec<DefKey> = Decodable::decode(d)?;
let index_to_key_hi: Vec<DefKey> = Decodable::decode(d)?;
let def_path_hashes_lo: Vec<u64> = Decodable::decode(d)?;
let def_path_hashes_hi: Vec<u64> = Decodable::decode(d)?;
let def_path_hashes_lo: Vec<Fingerprint> = Decodable::decode(d)?;
let def_path_hashes_hi: Vec<Fingerprint> = Decodable::decode(d)?;
let index_to_key = [index_to_key_lo, index_to_key_hi];
let def_path_hashes = [def_path_hashes_lo, def_path_hashes_hi];
@ -210,7 +211,7 @@ pub struct DefKey {
}
impl DefKey {
fn compute_stable_hash(&self, parent_hash: u64) -> u64 {
fn compute_stable_hash(&self, parent_hash: Fingerprint) -> Fingerprint {
let mut hasher = StableHasher::new();
// We hash a 0u8 here to disambiguate between regular DefPath hashes,
@ -221,7 +222,7 @@ impl DefKey {
hasher.finish()
}
fn root_parent_stable_hash(crate_name: &str, crate_disambiguator: &str) -> u64 {
fn root_parent_stable_hash(crate_name: &str, crate_disambiguator: &str) -> Fingerprint {
let mut hasher = StableHasher::new();
// Disambiguate this from a regular DefPath hash,
// see compute_stable_hash() above.
@ -396,7 +397,7 @@ impl Definitions {
}
#[inline(always)]
pub fn def_path_hash(&self, index: DefIndex) -> u64 {
pub fn def_path_hash(&self, index: DefIndex) -> Fingerprint {
self.table.def_path_hash(index)
}

View File

@ -497,7 +497,7 @@ impl<'hir> Map<'hir> {
}
pub fn trait_impls(&self, trait_did: DefId) -> &'hir [NodeId] {
self.dep_graph.read(DepNode::TraitImpls(trait_did));
self.dep_graph.read(DepNode::AllLocalTraitImpls);
// NB: intentionally bypass `self.forest.krate()` so that we
// do not trigger a read of the whole krate here
@ -505,7 +505,7 @@ impl<'hir> Map<'hir> {
}
pub fn trait_default_impl(&self, trait_did: DefId) -> Option<NodeId> {
self.dep_graph.read(DepNode::TraitImpls(trait_did));
self.dep_graph.read(DepNode::AllLocalTraitImpls);
// NB: intentionally bypass `self.forest.krate()` so that we
// do not trigger a read of the whole krate here

View File

@ -10,87 +10,75 @@
use rustc_serialize::{Encodable, Decodable, Encoder, Decoder};
use rustc_data_structures::stable_hasher;
use rustc_data_structures::ToHex;
const FINGERPRINT_LENGTH: usize = 16;
use std::mem;
use std::slice;
#[derive(Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Clone, Copy)]
pub struct Fingerprint(pub [u8; FINGERPRINT_LENGTH]);
pub struct Fingerprint(u64, u64);
impl Fingerprint {
#[inline]
pub fn zero() -> Fingerprint {
Fingerprint([0; FINGERPRINT_LENGTH])
Fingerprint(0, 0)
}
#[inline]
pub fn from_smaller_hash(hash: u64) -> Fingerprint {
let mut result = Fingerprint::zero();
result.0[0] = (hash >> 0) as u8;
result.0[1] = (hash >> 8) as u8;
result.0[2] = (hash >> 16) as u8;
result.0[3] = (hash >> 24) as u8;
result.0[4] = (hash >> 32) as u8;
result.0[5] = (hash >> 40) as u8;
result.0[6] = (hash >> 48) as u8;
result.0[7] = (hash >> 56) as u8;
result
Fingerprint(hash, hash)
}
#[inline]
pub fn to_smaller_hash(&self) -> u64 {
((self.0[0] as u64) << 0) |
((self.0[1] as u64) << 8) |
((self.0[2] as u64) << 16) |
((self.0[3] as u64) << 24) |
((self.0[4] as u64) << 32) |
((self.0[5] as u64) << 40) |
((self.0[6] as u64) << 48) |
((self.0[7] as u64) << 56)
self.0
}
pub fn to_hex(&self) -> String {
self.0.to_hex()
format!("{:x}{:x}", self.0, self.1)
}
}
impl Encodable for Fingerprint {
#[inline]
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
for &byte in &self.0 {
s.emit_u8(byte)?;
}
Ok(())
s.emit_u64(self.0.to_le())?;
s.emit_u64(self.1.to_le())
}
}
impl Decodable for Fingerprint {
#[inline]
fn decode<D: Decoder>(d: &mut D) -> Result<Fingerprint, D::Error> {
let mut result = Fingerprint([0u8; FINGERPRINT_LENGTH]);
for byte in &mut result.0 {
*byte = d.read_u8()?;
}
Ok(result)
let _0 = u64::from_le(d.read_u64()?);
let _1 = u64::from_le(d.read_u64()?);
Ok(Fingerprint(_0, _1))
}
}
impl ::std::fmt::Display for Fingerprint {
fn fmt(&self, formatter: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> {
for i in 0 .. self.0.len() {
if i > 0 {
write!(formatter, "::")?;
}
write!(formatter, "{}", self.0[i])?;
}
Ok(())
write!(formatter, "{:x}-{:x}", self.0, self.1)
}
}
impl stable_hasher::StableHasherResult for Fingerprint {
fn finish(mut hasher: stable_hasher::StableHasher<Self>) -> Self {
let mut fingerprint = Fingerprint::zero();
fingerprint.0.copy_from_slice(hasher.finalize());
fingerprint
let hash_bytes: &[u8] = hasher.finalize();
assert!(hash_bytes.len() >= mem::size_of::<u64>() * 2);
let hash_bytes: &[u64] = unsafe {
slice::from_raw_parts(hash_bytes.as_ptr() as *const u64, 2)
};
// The bytes returned bytes the Blake2B hasher are always little-endian.
Fingerprint(u64::from_le(hash_bytes[0]), u64::from_le(hash_bytes[1]))
}
}
impl<CTX> stable_hasher::HashStable<CTX> for Fingerprint {
#[inline]
fn hash_stable<W: stable_hasher::StableHasherResult>(&self,
_: &mut CTX,
hasher: &mut stable_hasher::StableHasher<W>) {
::std::hash::Hash::hash(self, hasher);
}
}

View File

@ -16,7 +16,7 @@ use ty;
use util::nodemap::NodeMap;
use std::hash as std_hash;
use std::collections::{HashMap, HashSet};
use std::collections::{HashMap, HashSet, BTreeMap};
use syntax::ast;
use syntax::attr;
@ -110,7 +110,7 @@ impl<'a, 'tcx: 'a> StableHashingContext<'a, 'tcx> {
}
#[inline]
pub fn def_path_hash(&mut self, def_id: DefId) -> u64 {
pub fn def_path_hash(&mut self, def_id: DefId) -> ich::Fingerprint {
self.tcx.def_path_hash(def_id)
}
@ -348,3 +348,25 @@ pub fn hash_stable_nodemap<'a, 'tcx, V, W>(hcx: &mut StableHashingContext<'a, 't
hcx.tcx.hir.definitions().node_to_hir_id(*node_id).local_id
});
}
pub fn hash_stable_btreemap<'a, 'tcx, K, V, SK, F, W>(hcx: &mut StableHashingContext<'a, 'tcx>,
hasher: &mut StableHasher<W>,
map: &BTreeMap<K, V>,
extract_stable_key: F)
where K: Eq + Ord,
V: HashStable<StableHashingContext<'a, 'tcx>>,
SK: HashStable<StableHashingContext<'a, 'tcx>> + Ord + Clone,
F: Fn(&mut StableHashingContext<'a, 'tcx>, &K) -> SK,
W: StableHasherResult,
{
let mut keys: Vec<_> = map.keys()
.map(|k| (extract_stable_key(hcx, k), k))
.collect();
keys.sort_unstable_by_key(|&(ref stable_key, _)| stable_key.clone());
keys.len().hash_stable(hcx, hasher);
for (stable_key, key) in keys {
stable_key.hash_stable(hcx, hasher);
map[key].hash_stable(hcx, hasher);
}
}

View File

@ -13,7 +13,8 @@
pub use self::fingerprint::Fingerprint;
pub use self::caching_codemap_view::CachingCodemapView;
pub use self::hcx::{StableHashingContext, NodeIdHashingMode, hash_stable_hashmap,
hash_stable_hashset, hash_stable_nodemap};
hash_stable_hashset, hash_stable_nodemap,
hash_stable_btreemap};
mod fingerprint;
mod caching_codemap_view;
mod hcx;

View File

@ -39,10 +39,12 @@ use super::sub::Sub;
use super::InferCtxt;
use super::{MiscVariable, TypeTrace};
use hir::def_id::DefId;
use ty::{IntType, UintType};
use ty::{self, Ty, TyCtxt};
use ty::error::TypeError;
use ty::relate::{self, Relate, RelateResult, TypeRelation};
use ty::subst::Substs;
use traits::{Obligation, PredicateObligations};
use syntax::ast;
@ -336,6 +338,23 @@ impl<'cx, 'gcx, 'tcx> TypeRelation<'cx, 'gcx, 'tcx> for Generalizer<'cx, 'gcx, '
Ok(ty::Binder(self.relate(a.skip_binder(), b.skip_binder())?))
}
fn relate_item_substs(&mut self,
item_def_id: DefId,
a_subst: &'tcx Substs<'tcx>,
b_subst: &'tcx Substs<'tcx>)
-> RelateResult<'tcx, &'tcx Substs<'tcx>>
{
if self.ambient_variance == ty::Variance::Invariant {
// Avoid fetching the variance if we are in an invariant
// context; no need, and it can induce dependency cycles
// (e.g. #41849).
relate::relate_substs(self, None, a_subst, b_subst)
} else {
let opt_variances = self.tcx().variances_of(item_def_id);
relate::relate_substs(self, Some(&opt_variances), a_subst, b_subst)
}
}
fn relate_with_variance<T: Relate<'tcx>>(&mut self,
variance: ty::Variance,
a: &T,

View File

@ -54,7 +54,7 @@ extern crate fmt_macros;
extern crate getopts;
extern crate graphviz;
extern crate libc;
extern crate rustc_llvm as llvm;
extern crate owning_ref;
extern crate rustc_back;
extern crate rustc_data_structures;
extern crate serialize;

View File

@ -76,6 +76,12 @@ declare_lint! {
"detects unreachable patterns"
}
declare_lint! {
pub UNUSED_MACROS,
Warn,
"detects macros that were not used"
}
declare_lint! {
pub WARNINGS,
Warn,
@ -259,6 +265,7 @@ impl LintPass for HardwiredLints {
DEAD_CODE,
UNREACHABLE_CODE,
UNREACHABLE_PATTERNS,
UNUSED_MACROS,
WARNINGS,
UNUSED_FEATURES,
STABLE_FEATURES,

View File

@ -49,6 +49,7 @@ use hir;
use hir::def_id::LOCAL_CRATE;
use hir::intravisit as hir_visit;
use syntax::visit as ast_visit;
use syntax::tokenstream::ThinTokenStream;
/// Information about the registered lints.
///
@ -1125,6 +1126,13 @@ impl<'a> ast_visit::Visitor<'a> for EarlyContext<'a> {
fn visit_attribute(&mut self, attr: &'a ast::Attribute) {
run_lints!(self, check_attribute, early_passes, attr);
}
fn visit_mac_def(&mut self, _mac: &'a ThinTokenStream, id: ast::NodeId) {
let lints = self.sess.lints.borrow_mut().take(id);
for early_lint in lints {
self.early_lint(&early_lint);
}
}
}
enum CheckLintNameResult {

View File

@ -36,8 +36,9 @@ use session::search_paths::PathKind;
use util::nodemap::{NodeSet, DefIdMap};
use std::any::Any;
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use std::rc::Rc;
use owning_ref::ErasedBoxRef;
use syntax::ast;
use syntax::ext::base::SyntaxExtension;
use syntax::symbol::Symbol;
@ -201,11 +202,33 @@ impl EncodedMetadataHashes {
}
}
/// The backend's way to give the crate store access to the metadata in a library.
/// Note that it returns the raw metadata bytes stored in the library file, whether
/// it is compressed, uncompressed, some weird mix, etc.
/// rmeta files are backend independent and not handled here.
///
/// At the time of this writing, there is only one backend and one way to store
/// metadata in library -- this trait just serves to decouple rustc_metadata from
/// the archive reader, which depends on LLVM.
pub trait MetadataLoader {
fn get_rlib_metadata(&self,
target: &Target,
filename: &Path)
-> Result<ErasedBoxRef<[u8]>, String>;
fn get_dylib_metadata(&self,
target: &Target,
filename: &Path)
-> Result<ErasedBoxRef<[u8]>, String>;
}
/// A store of Rust crates, through with their metadata
/// can be accessed.
pub trait CrateStore {
fn crate_data_as_rc_any(&self, krate: CrateNum) -> Rc<Any>;
// access to the metadata loader
fn metadata_loader(&self) -> &MetadataLoader;
// item info
fn visibility(&self, def: DefId) -> ty::Visibility;
fn visible_parent_map<'a>(&'a self) -> ::std::cell::Ref<'a, DefIdMap<DefId>>;
@ -259,7 +282,7 @@ pub trait CrateStore {
-> Option<DefId>;
fn def_key(&self, def: DefId) -> DefKey;
fn def_path(&self, def: DefId) -> hir_map::DefPath;
fn def_path_hash(&self, def: DefId) -> u64;
fn def_path_hash(&self, def: DefId) -> ich::Fingerprint;
fn struct_field_names(&self, def: DefId) -> Vec<ast::Name>;
fn item_children(&self, did: DefId) -> Vec<def::Export>;
fn load_macro(&self, did: DefId, sess: &Session) -> LoadedMacro;
@ -275,8 +298,6 @@ pub trait CrateStore {
fn used_link_args(&self) -> Vec<String>;
// utility functions
fn metadata_filename(&self) -> &str;
fn metadata_section_name(&self, target: &Target) -> &str;
fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)>;
fn used_crate_source(&self, cnum: CrateNum) -> CrateSource;
fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option<CrateNum>;
@ -393,7 +414,7 @@ impl CrateStore for DummyCrateStore {
fn def_path(&self, def: DefId) -> hir_map::DefPath {
bug!("relative_def_path")
}
fn def_path_hash(&self, def: DefId) -> u64 {
fn def_path_hash(&self, def: DefId) -> ich::Fingerprint {
bug!("wa")
}
fn struct_field_names(&self, def: DefId) -> Vec<ast::Name> { bug!("struct_field_names") }
@ -413,8 +434,6 @@ impl CrateStore for DummyCrateStore {
fn used_link_args(&self) -> Vec<String> { vec![] }
// utility functions
fn metadata_filename(&self) -> &str { bug!("metadata_filename") }
fn metadata_section_name(&self, target: &Target) -> &str { bug!("metadata_section_name") }
fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)>
{ vec![] }
fn used_crate_source(&self, cnum: CrateNum) -> CrateSource { bug!("used_crate_source") }
@ -427,6 +446,9 @@ impl CrateStore for DummyCrateStore {
bug!("encode_metadata")
}
fn metadata_encoding_version(&self) -> &[u8] { bug!("metadata_encoding_version") }
// access to the metadata loader
fn metadata_loader(&self) -> &MetadataLoader { bug!("metadata_loader") }
}
pub trait CrateLoader {

View File

@ -328,7 +328,7 @@ top_level_options!(
}
);
#[derive(Clone, PartialEq, Eq)]
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum PrintRequest {
FileNames,
Sysroot,
@ -824,9 +824,9 @@ options! {CodegenOptions, CodegenSetter, basic_codegen_options,
linker: Option<String> = (None, parse_opt_string, [UNTRACKED],
"system linker to link outputs with"),
link_arg: Vec<String> = (vec![], parse_string_push, [UNTRACKED],
"a single extra argument to pass to the linker (can be used several times)"),
"a single extra argument to append to the linker invocation (can be used several times)"),
link_args: Option<Vec<String>> = (None, parse_opt_list, [UNTRACKED],
"extra arguments to pass to the linker (space separated)"),
"extra arguments to append to the linker invocation (space separated)"),
link_dead_code: bool = (false, parse_bool, [UNTRACKED],
"don't let linker strip dead code (turning it on can be used for code coverage)"),
lto: bool = (false, parse_bool, [TRACKED],
@ -963,7 +963,7 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
"attempt to recover from parse errors (experimental)"),
incremental: Option<String> = (None, parse_opt_string, [UNTRACKED],
"enable incremental compilation (experimental)"),
incremental_cc: bool = (false, parse_bool, [UNTRACKED],
incremental_cc: bool = (true, parse_bool, [UNTRACKED],
"enable cross-crate incremental compilation (even more experimental)"),
incremental_info: bool = (false, parse_bool, [UNTRACKED],
"print high-level information about incremental reuse (or the lack thereof)"),
@ -1029,6 +1029,10 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
"add a mapping target to the file path remapping config"),
force_unstable_if_unmarked: bool = (false, parse_bool, [TRACKED],
"force all crates to be `rustc_private` unstable"),
pre_link_arg: Vec<String> = (vec![], parse_string_push, [UNTRACKED],
"a single extra argument to prepend the linker invocation (can be used several times)"),
pre_link_args: Option<Vec<String>> = (None, parse_opt_list, [UNTRACKED],
"extra arguments to prepend to the linker invocation (space separated)"),
}
pub fn default_lib_output() -> CrateType {

View File

@ -159,9 +159,14 @@ pub fn get_or_default_sysroot() -> PathBuf {
})
}
match canonicalize(env::current_exe().ok()) {
Some(mut p) => { p.pop(); p.pop(); p }
None => bug!("can't determine value for sysroot")
match env::current_exe() {
Ok(exe) => {
match canonicalize(Some(exe)) {
Some(mut p) => { p.pop(); p.pop(); return p; },
None => bug!("can't determine value for sysroot")
}
}
Err(ref e) => panic!(format!("failed to get current_exe: {}", e))
}
}

View File

@ -37,19 +37,16 @@ use syntax_pos::{Span, MultiSpan, FileMap};
use rustc_back::{LinkerFlavor, PanicStrategy};
use rustc_back::target::Target;
use rustc_data_structures::flock;
use llvm;
use std::path::{Path, PathBuf};
use std::cell::{self, Cell, RefCell};
use std::collections::HashMap;
use std::env;
use std::ffi::CString;
use std::io::Write;
use std::rc::Rc;
use std::fmt;
use std::time::Duration;
use std::sync::Arc;
use libc::c_int;
mod code_stats;
pub mod config;
@ -713,8 +710,6 @@ pub fn build_session_(sopts: config::Options,
out_of_fuel: Cell::new(false),
};
init_llvm(&sess);
sess
}
@ -743,55 +738,6 @@ pub enum IncrCompSession {
}
}
fn init_llvm(sess: &Session) {
unsafe {
// Before we touch LLVM, make sure that multithreading is enabled.
use std::sync::Once;
static INIT: Once = Once::new();
static mut POISONED: bool = false;
INIT.call_once(|| {
if llvm::LLVMStartMultithreaded() != 1 {
// use an extra bool to make sure that all future usage of LLVM
// cannot proceed despite the Once not running more than once.
POISONED = true;
}
configure_llvm(sess);
});
if POISONED {
bug!("couldn't enable multi-threaded LLVM");
}
}
}
unsafe fn configure_llvm(sess: &Session) {
let mut llvm_c_strs = Vec::new();
let mut llvm_args = Vec::new();
{
let mut add = |arg: &str| {
let s = CString::new(arg).unwrap();
llvm_args.push(s.as_ptr());
llvm_c_strs.push(s);
};
add("rustc"); // fake program name
if sess.time_llvm_passes() { add("-time-passes"); }
if sess.print_llvm_passes() { add("-debug-pass=Structure"); }
for arg in &sess.opts.cg.llvm_args {
add(&(*arg));
}
}
llvm::LLVMInitializePasses();
llvm::initialize_available_targets();
llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int,
llvm_args.as_ptr());
}
pub fn early_error(output: config::ErrorOutputType, msg: &str) -> ! {
let emitter: Box<Emitter> = match output {
config::ErrorOutputType::HumanReadable(color_config) => {

View File

@ -619,8 +619,6 @@ pub fn get_vtable_methods<'a, 'tcx>(
debug!("get_vtable_methods({:?})", trait_ref);
supertraits(tcx, trait_ref).flat_map(move |trait_ref| {
tcx.populate_implementations_for_trait_if_necessary(trait_ref.def_id());
let trait_methods = tcx.associated_items(trait_ref.def_id())
.filter(|item| item.kind == ty::AssociatedKind::Method);
@ -782,3 +780,19 @@ impl<'tcx> TraitObligation<'tcx> {
ty::Binder(self.predicate.skip_binder().self_ty())
}
}
pub fn provide(providers: &mut ty::maps::Providers) {
*providers = ty::maps::Providers {
is_object_safe: object_safety::is_object_safe_provider,
specialization_graph_of: specialize::specialization_graph_provider,
..*providers
};
}
pub fn provide_extern(providers: &mut ty::maps::Providers) {
*providers = ty::maps::Providers {
is_object_safe: object_safety::is_object_safe_provider,
specialization_graph_of: specialize::specialization_graph_provider,
..*providers
};
}

View File

@ -77,25 +77,6 @@ pub enum MethodViolationCode {
}
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
pub fn is_object_safe(self, trait_def_id: DefId) -> bool {
// Because we query yes/no results frequently, we keep a cache:
let def = self.trait_def(trait_def_id);
let result = def.object_safety().unwrap_or_else(|| {
let result = self.object_safety_violations(trait_def_id).is_empty();
// Record just a yes/no result in the cache; this is what is
// queried most frequently. Note that this may overwrite a
// previous result, but always with the same thing.
def.set_object_safety(result);
result
});
debug!("is_object_safe({:?}) = {}", trait_def_id, result);
result
}
/// Returns the object safety violations that affect
/// astconv - currently, Self in supertraits. This is needed
@ -391,3 +372,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
error
}
}
pub(super) fn is_object_safe_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
trait_def_id: DefId)
-> bool {
tcx.object_safety_violations(trait_def_id).is_empty()
}

View File

@ -900,96 +900,50 @@ fn assemble_candidates_from_impls<'cx, 'gcx, 'tcx>(
// In either case, we handle this by not adding a
// candidate for an impl if it contains a `default`
// type.
let opt_node_item = assoc_ty_def(selcx,
impl_data.impl_def_id,
obligation.predicate.item_name);
let new_candidate = if let Some(node_item) = opt_node_item {
let is_default = if node_item.node.is_from_trait() {
// If true, the impl inherited a `type Foo = Bar`
// given in the trait, which is implicitly default.
// Otherwise, the impl did not specify `type` and
// neither did the trait:
//
// ```rust
// trait Foo { type T; }
// impl Foo for Bar { }
// ```
//
// This is an error, but it will be
// reported in `check_impl_items_against_trait`.
// We accept it here but will flag it as
// an error when we confirm the candidate
// (which will ultimately lead to `normalize_to_error`
// being invoked).
node_item.item.defaultness.has_value()
} else {
node_item.item.defaultness.is_default() ||
selcx.tcx().impl_is_default(node_item.node.def_id())
};
let node_item = assoc_ty_def(selcx,
impl_data.impl_def_id,
obligation.predicate.item_name);
// Only reveal a specializable default if we're past type-checking
// and the obligations is monomorphic, otherwise passes such as
// transmute checking and polymorphic MIR optimizations could
// get a result which isn't correct for all monomorphizations.
if !is_default {
let is_default = if node_item.node.is_from_trait() {
// If true, the impl inherited a `type Foo = Bar`
// given in the trait, which is implicitly default.
// Otherwise, the impl did not specify `type` and
// neither did the trait:
//
// ```rust
// trait Foo { type T; }
// impl Foo for Bar { }
// ```
//
// This is an error, but it will be
// reported in `check_impl_items_against_trait`.
// We accept it here but will flag it as
// an error when we confirm the candidate
// (which will ultimately lead to `normalize_to_error`
// being invoked).
node_item.item.defaultness.has_value()
} else {
node_item.item.defaultness.is_default() ||
selcx.tcx().impl_is_default(node_item.node.def_id())
};
// Only reveal a specializable default if we're past type-checking
// and the obligations is monomorphic, otherwise passes such as
// transmute checking and polymorphic MIR optimizations could
// get a result which isn't correct for all monomorphizations.
let new_candidate = if !is_default {
Some(ProjectionTyCandidate::Select)
} else if selcx.projection_mode() == Reveal::All {
assert!(!poly_trait_ref.needs_infer());
if !poly_trait_ref.needs_subst() {
Some(ProjectionTyCandidate::Select)
} else if selcx.projection_mode() == Reveal::All {
assert!(!poly_trait_ref.needs_infer());
if !poly_trait_ref.needs_subst() {
Some(ProjectionTyCandidate::Select)
} else {
None
}
} else {
None
}
} else {
// This is saying that neither the trait nor
// the impl contain a definition for this
// associated type. Normally this situation
// could only arise through a compiler bug --
// if the user wrote a bad item name, it
// should have failed in astconv. **However**,
// at coherence-checking time, we only look at
// the topmost impl (we don't even consider
// the trait itself) for the definition -- and
// so in that case it may be that the trait
// *DOES* have a declaration, but we don't see
// it, and we end up in this branch.
//
// This is kind of tricky to handle actually.
// For now, we just unconditionally ICE,
// because otherwise, examples like the
// following will succeed:
//
// ```
// trait Assoc {
// type Output;
// }
//
// impl<T> Assoc for T {
// default type Output = bool;
// }
//
// impl Assoc for u8 {}
// impl Assoc for u16 {}
//
// trait Foo {}
// impl Foo for <u8 as Assoc>::Output {}
// impl Foo for <u16 as Assoc>::Output {}
// return None;
// }
// ```
//
// The essential problem here is that the
// projection fails, leaving two unnormalized
// types, which appear not to unify -- so the
// overlap check succeeds, when it should
// fail.
span_bug!(obligation.cause.span,
"Tried to project an inherited associated type during \
coherence checking, which is currently not supported.");
None
};
candidate_set.vec.extend(new_candidate);
}
super::VtableParam(..) => {
@ -1274,35 +1228,25 @@ fn confirm_impl_candidate<'cx, 'gcx, 'tcx>(
let VtableImplData { substs, nested, impl_def_id } = impl_vtable;
let tcx = selcx.tcx();
let trait_ref = obligation.predicate.trait_ref;
let assoc_ty = assoc_ty_def(selcx, impl_def_id, obligation.predicate.item_name);
match assoc_ty {
Some(node_item) => {
let ty = if !node_item.item.defaultness.has_value() {
// This means that the impl is missing a definition for the
// associated type. This error will be reported by the type
// checker method `check_impl_items_against_trait`, so here we
// just return TyError.
debug!("confirm_impl_candidate: no associated type {:?} for {:?}",
node_item.item.name,
obligation.predicate.trait_ref);
tcx.types.err
} else {
tcx.type_of(node_item.item.def_id)
};
let substs = translate_substs(selcx.infcx(), impl_def_id, substs, node_item.node);
Progress {
ty: ty.subst(tcx, substs),
obligations: nested,
cacheable: true
}
}
None => {
span_bug!(obligation.cause.span,
"No associated type for {:?}",
trait_ref);
}
let ty = if !assoc_ty.item.defaultness.has_value() {
// This means that the impl is missing a definition for the
// associated type. This error will be reported by the type
// checker method `check_impl_items_against_trait`, so here we
// just return TyError.
debug!("confirm_impl_candidate: no associated type {:?} for {:?}",
assoc_ty.item.name,
obligation.predicate.trait_ref);
tcx.types.err
} else {
tcx.type_of(assoc_ty.item.def_id)
};
let substs = translate_substs(selcx.infcx(), impl_def_id, substs, assoc_ty.node);
Progress {
ty: ty.subst(tcx, substs),
obligations: nested,
cacheable: true
}
}
@ -1315,27 +1259,43 @@ fn assoc_ty_def<'cx, 'gcx, 'tcx>(
selcx: &SelectionContext<'cx, 'gcx, 'tcx>,
impl_def_id: DefId,
assoc_ty_name: ast::Name)
-> Option<specialization_graph::NodeItem<ty::AssociatedItem>>
-> specialization_graph::NodeItem<ty::AssociatedItem>
{
let trait_def_id = selcx.tcx().impl_trait_ref(impl_def_id).unwrap().def_id;
let trait_def = selcx.tcx().trait_def(trait_def_id);
let tcx = selcx.tcx();
let trait_def_id = tcx.impl_trait_ref(impl_def_id).unwrap().def_id;
let trait_def = tcx.trait_def(trait_def_id);
if !trait_def.is_complete(selcx.tcx()) {
let impl_node = specialization_graph::Node::Impl(impl_def_id);
for item in impl_node.items(selcx.tcx()) {
if item.kind == ty::AssociatedKind::Type && item.name == assoc_ty_name {
return Some(specialization_graph::NodeItem {
node: specialization_graph::Node::Impl(impl_def_id),
item: item,
});
}
// This function may be called while we are still building the
// specialization graph that is queried below (via TraidDef::ancestors()),
// so, in order to avoid unnecessary infinite recursion, we manually look
// for the associated item at the given impl.
// If there is no such item in that impl, this function will fail with a
// cycle error if the specialization graph is currently being built.
let impl_node = specialization_graph::Node::Impl(impl_def_id);
for item in impl_node.items(tcx) {
if item.kind == ty::AssociatedKind::Type && item.name == assoc_ty_name {
return specialization_graph::NodeItem {
node: specialization_graph::Node::Impl(impl_def_id),
item: item,
};
}
None
}
if let Some(assoc_item) = trait_def
.ancestors(tcx, impl_def_id)
.defs(tcx, assoc_ty_name, ty::AssociatedKind::Type)
.next() {
assoc_item
} else {
trait_def
.ancestors(impl_def_id)
.defs(selcx.tcx(), assoc_ty_name, ty::AssociatedKind::Type)
.next()
// This is saying that neither the trait nor
// the impl contain a definition for this
// associated type. Normally this situation
// could only arise through a compiler bug --
// if the user wrote a bad item name, it
// should have failed in astconv.
bug!("No associated type `{}` for {}",
assoc_ty_name,
tcx.item_path_str(impl_def_id))
}
}

View File

@ -27,6 +27,7 @@ use ty::subst::{Subst, Substs};
use traits::{self, Reveal, ObligationCause};
use ty::{self, TyCtxt, TypeFoldable};
use syntax_pos::DUMMY_SP;
use std::rc::Rc;
pub mod specialization_graph;
@ -118,7 +119,7 @@ pub fn find_associated_item<'a, 'tcx>(
let trait_def_id = tcx.trait_id_of_impl(impl_data.impl_def_id).unwrap();
let trait_def = tcx.trait_def(trait_def_id);
let ancestors = trait_def.ancestors(impl_data.impl_def_id);
let ancestors = trait_def.ancestors(tcx, impl_data.impl_def_id);
match ancestors.defs(tcx, item.name, item.kind).next() {
Some(node_item) => {
let substs = tcx.infer_ctxt((), Reveal::All).enter(|infcx| {
@ -285,3 +286,62 @@ impl SpecializesCache {
self.map.insert((a, b), result);
}
}
// Query provider for `specialization_graph_of`.
pub(super) fn specialization_graph_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
trait_id: DefId)
-> Rc<specialization_graph::Graph> {
let mut sg = specialization_graph::Graph::new();
let mut trait_impls: Vec<DefId> = tcx.trait_impls_of(trait_id).iter().collect();
// The coherence checking implementation seems to rely on impls being
// iterated over (roughly) in definition order, so we are sorting by
// negated CrateNum (so remote definitions are visited first) and then
// by a flattend version of the DefIndex.
trait_impls.sort_unstable_by_key(|def_id| {
(-(def_id.krate.as_u32() as i64),
def_id.index.address_space().index(),
def_id.index.as_array_index())
});
for impl_def_id in trait_impls {
if impl_def_id.is_local() {
// This is where impl overlap checking happens:
let insert_result = sg.insert(tcx, impl_def_id);
// Report error if there was one.
if let Err(overlap) = insert_result {
let mut err = struct_span_err!(tcx.sess,
tcx.span_of_impl(impl_def_id).unwrap(),
E0119,
"conflicting implementations of trait `{}`{}:",
overlap.trait_desc,
overlap.self_desc.clone().map_or(String::new(),
|ty| {
format!(" for type `{}`", ty)
}));
match tcx.span_of_impl(overlap.with_impl) {
Ok(span) => {
err.span_label(span, format!("first implementation here"));
err.span_label(tcx.span_of_impl(impl_def_id).unwrap(),
format!("conflicting implementation{}",
overlap.self_desc
.map_or(String::new(),
|ty| format!(" for `{}`", ty))));
}
Err(cname) => {
err.note(&format!("conflicting implementation in crate `{}`", cname));
}
}
err.emit();
}
} else {
let parent = tcx.impl_parent(impl_def_id).unwrap_or(trait_id);
sg.record_impl_from_cstore(tcx, parent, impl_def_id)
}
}
Rc::new(sg)
}

View File

@ -12,8 +12,9 @@ use super::{OverlapError, specializes};
use hir::def_id::DefId;
use traits::{self, Reveal};
use ty::{self, TyCtxt, TraitDef, TypeFoldable};
use ty::{self, TyCtxt, TypeFoldable};
use ty::fast_reject::{self, SimplifiedType};
use std::rc::Rc;
use syntax::ast::Name;
use util::nodemap::{DefIdMap, FxHashMap};
@ -301,18 +302,19 @@ impl<'a, 'gcx, 'tcx> Node {
}
}
pub struct Ancestors<'a> {
trait_def: &'a TraitDef,
pub struct Ancestors {
trait_def_id: DefId,
specialization_graph: Rc<Graph>,
current_source: Option<Node>,
}
impl<'a> Iterator for Ancestors<'a> {
impl Iterator for Ancestors {
type Item = Node;
fn next(&mut self) -> Option<Node> {
let cur = self.current_source.take();
if let Some(Node::Impl(cur_impl)) = cur {
let parent = self.trait_def.specialization_graph.borrow().parent(cur_impl);
if parent == self.trait_def.def_id {
let parent = self.specialization_graph.parent(cur_impl);
if parent == self.trait_def_id {
self.current_source = Some(Node::Trait(parent));
} else {
self.current_source = Some(Node::Impl(parent));
@ -336,7 +338,7 @@ impl<T> NodeItem<T> {
}
}
impl<'a, 'gcx, 'tcx> Ancestors<'a> {
impl<'a, 'gcx, 'tcx> Ancestors {
/// Search the items from the given ancestors, returning each definition
/// with the given name and the given kind.
#[inline] // FIXME(#35870) Avoid closures being unexported due to impl Trait.
@ -351,9 +353,14 @@ impl<'a, 'gcx, 'tcx> Ancestors<'a> {
/// Walk up the specialization ancestors of a given impl, starting with that
/// impl itself.
pub fn ancestors<'a>(trait_def: &'a TraitDef, start_from_impl: DefId) -> Ancestors<'a> {
pub fn ancestors(tcx: TyCtxt,
trait_def_id: DefId,
start_from_impl: DefId)
-> Ancestors {
let specialization_graph = tcx.specialization_graph_of(trait_def_id);
Ancestors {
trait_def: trait_def,
trait_def_id,
specialization_graph,
current_source: Some(Node::Impl(start_from_impl)),
}
}

View File

@ -18,10 +18,12 @@ use middle::region::RegionMaps;
use mir;
use mir::transform::{MirSuite, MirPassIndex};
use session::CompileResult;
use traits::specialization_graph;
use ty::{self, CrateInherentImpls, Ty, TyCtxt};
use ty::item_path;
use ty::steal::Steal;
use ty::subst::Substs;
use ty::fast_reject::SimplifiedType;
use util::nodemap::{DefIdSet, NodeSet};
use rustc_data_structures::indexed_vec::IndexVec;
@ -98,6 +100,15 @@ impl Key for (CrateNum, DefId) {
}
}
impl Key for (DefId, SimplifiedType) {
fn map_crate(&self) -> CrateNum {
self.0.krate
}
fn default_span(&self, tcx: TyCtxt) -> Span {
self.0.default_span(tcx)
}
}
impl<'tcx> Key for (DefId, &'tcx Substs<'tcx>) {
fn map_crate(&self) -> CrateNum {
self.0.krate
@ -391,6 +402,24 @@ impl<'tcx> QueryDescription for queries::is_mir_available<'tcx> {
}
}
impl<'tcx> QueryDescription for queries::trait_impls_of<'tcx> {
fn describe(tcx: TyCtxt, def_id: DefId) -> String {
format!("trait impls of `{}`", tcx.item_path_str(def_id))
}
}
impl<'tcx> QueryDescription for queries::relevant_trait_impls_for<'tcx> {
fn describe(tcx: TyCtxt, (def_id, ty): (DefId, SimplifiedType)) -> String {
format!("relevant impls for: `({}, {:?})`", tcx.item_path_str(def_id), ty)
}
}
impl<'tcx> QueryDescription for queries::is_object_safe<'tcx> {
fn describe(tcx: TyCtxt, def_id: DefId) -> String {
format!("determine object safety of trait `{}`", tcx.item_path_str(def_id))
}
}
macro_rules! define_maps {
(<$tcx:tt>
$($(#[$attr:meta])*
@ -592,7 +621,7 @@ macro_rules! define_map_struct {
output: $output:tt) => {
define_map_struct! {
tcx: $tcx,
ready: ([pub] $attrs $name),
ready: ([] $attrs $name),
input: ($($input)*),
output: $output
}
@ -820,6 +849,13 @@ define_maps! { <'tcx>
[] item_body_nested_bodies: ItemBodyNestedBodies(DefId) -> Rc<BTreeMap<hir::BodyId, hir::Body>>,
[] const_is_rvalue_promotable_to_static: ConstIsRvaluePromotableToStatic(DefId) -> bool,
[] is_mir_available: IsMirAvailable(DefId) -> bool,
[] trait_impls_of: TraitImpls(DefId) -> ty::trait_def::TraitImpls,
// Note that TraitDef::for_each_relevant_impl() will do type simplication for you.
[] relevant_trait_impls_for: relevant_trait_impls_for((DefId, SimplifiedType))
-> ty::trait_def::TraitImpls,
[] specialization_graph_of: SpecializationGraph(DefId) -> Rc<specialization_graph::Graph>,
[] is_object_safe: ObjectSafety(DefId) -> bool,
}
fn coherent_trait_dep_node((_, def_id): (CrateNum, DefId)) -> DepNode<DefId> {
@ -859,3 +895,7 @@ fn mir_keys(_: CrateNum) -> DepNode<DefId> {
fn crate_variances(_: CrateNum) -> DepNode<DefId> {
DepNode::CrateVariances
}
fn relevant_trait_impls_for((def_id, _): (DefId, SimplifiedType)) -> DepNode<DefId> {
DepNode::TraitImpls(def_id)
}

Some files were not shown because too many files have changed in this diff Show More