Auto merge of #39199 - alexcrichton:rollup, r=alexcrichton

Rollup of 28 pull requests

- Successful merges: #38603, #38761, #38842, #38847, #38955, #38966, #39062, #39068, #39077, #39111, #39112, #39114, #39118, #39120, #39132, #39135, #39138, #39142, #39143, #39146, #39157, #39166, #39167, #39168, #39179, #39184, #39195, #39197
- Failed merges: #39060, #39145
This commit is contained in:
bors 2017-01-21 00:56:18 +00:00
commit aedb49cbc9
151 changed files with 4975 additions and 928 deletions

View File

@ -8,13 +8,13 @@ git:
depth: 1
submodules: false
osx_image: xcode8.2
matrix:
fast_finish: true
include:
# Linux builders, all docker images
- env: IMAGE=arm-android DEPLOY=1
- env: IMAGE=android DEPLOY=1
- env: IMAGE=cross DEPLOY=1
- env: IMAGE=linux-tested-targets DEPLOY=1
- env: IMAGE=dist-arm-linux DEPLOY=1
- env: IMAGE=dist-armv7-aarch64-linux DEPLOY=1
- env: IMAGE=dist-freebsd DEPLOY=1
@ -23,16 +23,17 @@ matrix:
- env: IMAGE=dist-powerpc-linux DEPLOY=1
- env: IMAGE=dist-powerpc64-linux DEPLOY=1
- env: IMAGE=dist-s390x-linux-netbsd DEPLOY=1
- env: IMAGE=i686-gnu DEPLOY=1
- env: IMAGE=dist-x86-linux DEPLOY=1
- env: IMAGE=emscripten
- env: IMAGE=i686-gnu
- env: IMAGE=i686-gnu-nopt
- env: IMAGE=x86_64-gnu DEPLOY=1
- env: IMAGE=x86_64-gnu
- env: IMAGE=x86_64-gnu-full-bootstrap
- env: IMAGE=x86_64-gnu-aux
- env: IMAGE=x86_64-gnu-debug
- env: IMAGE=x86_64-gnu-nopt
- env: IMAGE=x86_64-gnu-make
- env: IMAGE=x86_64-gnu-llvm-3.7 ALLOW_PR=1 RUST_BACKTRACE=1
- env: IMAGE=x86_64-musl DEPLOY=1
- env: IMAGE=x86_64-gnu-distcheck
# OSX builders
@ -41,6 +42,7 @@ matrix:
RUST_CONFIGURE_ARGS=--build=x86_64-apple-darwin
SRC=.
os: osx
osx_image: xcode8.2
before_script: &osx_before_script >
ulimit -c unlimited
install: &osx_install_sccache >
@ -50,7 +52,7 @@ matrix:
echo 'bt all' > cmds;
for file in $(ls /cores); do
echo core file $file;
lldb -c $file `which ld` -b -s cmds;
lldb -c /cores/$file `which ld` -b -s cmds;
done
- env: >
@ -59,6 +61,7 @@ matrix:
SRC=.
DEPLOY=1
os: osx
osx_image: xcode8.2
before_script: *osx_before_script
install: *osx_install_sccache
after_failure: *osx_after_failure
@ -67,6 +70,7 @@ matrix:
RUST_CONFIGURE_ARGS=--build=x86_64-apple-darwin --disable-rustbuild
SRC=.
os: osx
osx_image: xcode8.2
before_script: *osx_before_script
install: *osx_install_sccache
after_failure: *osx_after_failure
@ -76,6 +80,7 @@ matrix:
SRC=.
DEPLOY=1
os: osx
osx_image: xcode8.2
before_script: *osx_before_script
install: *osx_install_sccache
after_failure: *osx_after_failure
@ -119,11 +124,9 @@ before_deploy:
- mkdir -p deploy/$TRAVIS_COMMIT
- >
if [ "$TRAVIS_OS_NAME" == "osx" ]; then
cp build/dist/*.tar.gz deploy/$TRAVIS_COMMIT &&
find "deploy/$TRAVIS_COMMIT" -maxdepth 1 -type f -exec sh -c 'shasum -a 256 -b "{}" > "{}.sha256"' \;;
cp build/dist/*.tar.gz deploy/$TRAVIS_COMMIT;
else
cp obj/build/dist/*.tar.gz deploy/$TRAVIS_COMMIT &&
find "deploy/$TRAVIS_COMMIT" -maxdepth 1 -type f -exec sh -c 'sha256sum -b "{}" > "{}.sha256"' \;;
cp obj/build/dist/*.tar.gz deploy/$TRAVIS_COMMIT;
fi
deploy:

View File

@ -1,3 +1,244 @@
Version 1.15.0 (2017-02-02)
===========================
Language
--------
* Basic procedural macros allowing custom `#[derive]`, aka "macros 1.1", are
stable. This allows popular code-generating crates like Serde and Diesel to
work ergonomically. [RFC 1681].
* [Tuple structs may be empty. Unary and empty tuple structs may be instantiated
with curly braces][36868]. Part of [RFC 1506].
* [A number of minor changes to name resolution have been activated][37127].
They add up to more consistent semantics, allowing for future evolution of
Rust macros. Specified in [RFC 1560], see its section on ["changes"] for
details of what is different. The breaking changes here have been transitioned
through the [`legacy_imports`] lint since 1.14, with no known regressions.
* [In `macro_rules`, `path` fragments can now be parsed as type parameter
bounds][38279]
* [`?Sized` can be used in `where` clauses][37791]
* [There is now a limit on the size of monomorphized types and it can be
modified with the `#![type_size_limit]` crate attribute, similarly to
the `#![recursion_limit]` attribute][37789]
Compiler
--------
* [On Windows, the compiler will apply dllimport attributes when linking to
extern functions][37973]. Additional attributes and flags can control which
library kind is linked and its name. [RFC 1717].
* [Rust-ABI symbols are no longer exported from cdylibs][38117]
* [The `--test` flag works with procedural macro crates][38107]
* [Fix `extern "aapcs" fn` ABI][37814]
* [The `-C no-stack-check` flag is deprecated][37636]. It does nothing.
* [The `format!` expander recognizes incorrect `printf` and shell-style
formatting directives and suggests the correct format][37613].
* [Only report one error for all unused imports in an import list][37456]
Compiler Performance
--------------------
* [Avoid unnecessary `mk_ty` calls in `Ty::super_fold_with`][37705]
* [Avoid more unnecessary `mk_ty` calls in `Ty::super_fold_with`][37979]
* [Don't clone in `UnificationTable::probe`][37848]
* [Remove `scope_auxiliary` to cut RSS by 10%][37764]
* [Use small vectors in type walker][37760]
* [Macro expansion performance was improved][37701]
* [Change `HirVec<P<T>>` to `HirVec<T>` in `hir::Expr`][37642]
* [Replace FNV with a faster hash function][37229]
Stabilized APIs
---------------
* [`std::iter::Iterator::min_by`]
* [`std::iter::Iterator::max_by`]
* [`std::os::*::fs::FileExt`]
* [`std::sync::atomic::Atomic*::get_mut`]
* [`std::sync::atomic::Atomic*::into_inner`]
* [`std::vec::IntoIter::as_slice`]
* [`std::vec::IntoIter::as_mut_slice`]
* [`std::sync::mpsc::Receiver::try_iter`]
* [`std::os::unix::process::CommandExt::before_exec`]
* [`std::rc::Rc::strong_count`]
* [`std::rc::Rc::weak_count`]
* [`std::sync::Arc::strong_count`]
* [`std::sync::Arc::weak_count`]
* [`std::char::encode_utf8`]
* [`std::char::encode_utf16`]
* [`std::cell::Ref::clone`]
* [`std::io::Take::into_inner`]
Libraries
---------
* [The standard sorting algorithm has been rewritten for dramatic performance
improvements][38192]. It is a hybrid merge sort, drawing influences from
Timsort. Previously it was a naive merge sort.
* [`Iterator::nth` no longer has a `Sized` bound][38134]
* [`Extend<&T>` is specialized for `Vec` where `T: Copy`][38182] to improve
performance.
* [`chars().count()` is much faster][37888] and so are [`chars().last()`
and `char_indices().last()`][37882]
* [Fix ARM Objective-C ABI in `std::env::args`][38146]
* [Chinese characters display correctly in `fmt::Debug`][37855]
* [Derive `Default` for `Duration`][37699]
* [Support creation of anonymous pipes on WinXP/2k][37677]
* [`mpsc::RecvTimeoutError` implements `Error`][37527]
* [Don't pass overlapped handles to processes][38835]
Cargo
-----
* [In this release, Cargo build scripts no longer have access to the `OUT_DIR`
environment variable at build time via `env!("OUT_DIR")`][cargo/3368]. They
should instead check the variable at runtime with `std::env`. That the value
was set at build time was a bug, and incorrect when cross-compiling. This
change is known to cause breakage.
* [Add `--all` flag to `cargo test`][cargo/3221]
* [Compile statically against the MSVC CRT][cargo/3363]
* [Mix feature flags into fingerprint/metadata shorthash][cargo/3102]
* [Link OpenSSL statically on OSX][cargo/3311]
* [Apply new fingerprinting to build dir outputs][cargo/3310]
* [Test for bad path overrides with summaries][cargo/3336]
* [Require `cargo install --vers` to take a semver version][cargo/3338]
* [Fix retrying crate downloads for network errors][cargo/3348]
* [Implement string lookup for `build.rustflags` config key][cargo/3356]
* [Emit more info on --message-format=json][cargo/3319]
* [Assume `build.rs` in the same directory as `Cargo.toml` is a build script][cargo/3361]
* [Don't ignore errors in workspace manifest][cargo/3409]
* [Fix `--message-format JSON` when rustc emits non-JSON warnings][cargo/3410]
Tooling
-------
* [Test runners (binaries built with `--test`) now support a `--list` argument
that lists the tests it contains][38185]
* [Test runners now support a `--exact` argument that makes the test filter
match exactly, instead of matching only a substring of the test name][38181]
* [rustdoc supports a `--playground-url` flag][37763]
* [rustdoc provides more details about `#[should_panic]` errors][37749]
Misc
----
* [The Rust build system is now written in Rust][37817]. The Makefiles may
continue to be used in this release by passing `--disable-rustbuild` to the
configure script, but they will be deleted soon. Note that the new build
system uses a different on-disk layout that will likely affect any scripts
building Rust.
* [Rust supports i686-unknown-openbsd][38086]. Tier 3 support. No testing or
releases.
* [Rust supports the MSP430][37627]. Tier 3 support. No testing or releases.
* [Rust supports the ARMv5TE architecture][37615]. Tier 3 support. No testing or
releases.
Compatibility Notes
-------------------
* [A number of minor changes to name resolution have been activated][37127].
They add up to more consistent semantics, allowing for future evolution of
Rust macros. Specified in [RFC 1560], see its section on ["changes"] for
details of what is different. The breaking changes here have been transitioned
through the [`legacy_imports`] lint since 1.14, with no known regressions.
* [In this release, Cargo build scripts no longer have access to the `OUT_DIR`
environment variable at build time via `env!("OUT_DIR")`][cargo/3368]. They
should instead check the variable at runtime with `std::env`. That the value
was set at build time was a bug, and incorrect when cross-compiling. This
change is known to cause breakage.
* [Higher-ranked lifetimes are no longer allowed to appear _only_ in associated
types][33685]. The [`hr_lifetime_in_assoc_type` lint] has been a warning since
1.10 and is now an error by default. It will become a hard error in the near
future.
* [The semantics relating modules to file system directories are changing in
minor ways][37602]. This is captured in the new `legacy_directory_ownership`
lint, which is a warning in this release, and will become a hard error in the
future.
* [Rust-ABI symbols are no longer exported from cdylibs][38117]
* [Once `Peekable` peeks a `None` it will return that `None` without re-querying
the underlying iterator][37834]
["changes"]: https://github.com/rust-lang/rfcs/blob/master/text/1560-name-resolution.md#changes-to-name-resolution-rules
[33685]: https://github.com/rust-lang/rust/issues/33685
[36868]: https://github.com/rust-lang/rust/pull/36868
[37127]: https://github.com/rust-lang/rust/pull/37127
[37229]: https://github.com/rust-lang/rust/pull/37229
[37456]: https://github.com/rust-lang/rust/pull/37456
[37527]: https://github.com/rust-lang/rust/pull/37527
[37602]: https://github.com/rust-lang/rust/pull/37602
[37613]: https://github.com/rust-lang/rust/pull/37613
[37615]: https://github.com/rust-lang/rust/pull/37615
[37636]: https://github.com/rust-lang/rust/pull/37636
[37642]: https://github.com/rust-lang/rust/pull/37642
[37677]: https://github.com/rust-lang/rust/pull/37677
[37699]: https://github.com/rust-lang/rust/pull/37699
[37701]: https://github.com/rust-lang/rust/pull/37701
[37705]: https://github.com/rust-lang/rust/pull/37705
[37749]: https://github.com/rust-lang/rust/pull/37749
[37760]: https://github.com/rust-lang/rust/pull/37760
[37763]: https://github.com/rust-lang/rust/pull/37763
[37764]: https://github.com/rust-lang/rust/pull/37764
[37789]: https://github.com/rust-lang/rust/pull/37789
[37791]: https://github.com/rust-lang/rust/pull/37791
[37814]: https://github.com/rust-lang/rust/pull/37814
[37817]: https://github.com/rust-lang/rust/pull/37817
[37834]: https://github.com/rust-lang/rust/pull/37834
[37848]: https://github.com/rust-lang/rust/pull/37848
[37855]: https://github.com/rust-lang/rust/pull/37855
[37882]: https://github.com/rust-lang/rust/pull/37882
[37888]: https://github.com/rust-lang/rust/pull/37888
[37973]: https://github.com/rust-lang/rust/pull/37973
[37979]: https://github.com/rust-lang/rust/pull/37979
[38086]: https://github.com/rust-lang/rust/pull/38086
[38107]: https://github.com/rust-lang/rust/pull/38107
[38117]: https://github.com/rust-lang/rust/pull/38117
[38134]: https://github.com/rust-lang/rust/pull/38134
[38146]: https://github.com/rust-lang/rust/pull/38146
[38181]: https://github.com/rust-lang/rust/pull/38181
[38182]: https://github.com/rust-lang/rust/pull/38182
[38185]: https://github.com/rust-lang/rust/pull/38185
[38192]: https://github.com/rust-lang/rust/pull/38192
[38279]: https://github.com/rust-lang/rust/pull/38279
[38835]: https://github.com/rust-lang/rust/pull/38835
[RFC 1492]: https://github.com/rust-lang/rfcs/blob/master/text/1492-dotdot-in-patterns.md
[RFC 1506]: https://github.com/rust-lang/rfcs/blob/master/text/1506-adt-kinds.md
[RFC 1560]: https://github.com/rust-lang/rfcs/blob/master/text/1560-name-resolution.md
[RFC 1681]: https://github.com/rust-lang/rfcs/blob/master/text/1681-macros-1.1.md
[RFC 1717]: https://github.com/rust-lang/rfcs/blob/master/text/1717-dllimport.md
[`hr_lifetime_in_assoc_type` lint]: https://github.com/rust-lang/rust/issues/33685
[`legacy_imports`]: https://github.com/rust-lang/rust/pull/38271
[cargo/3102]: https://github.com/rust-lang/cargo/pull/3102
[cargo/3221]: https://github.com/rust-lang/cargo/pull/3221
[cargo/3310]: https://github.com/rust-lang/cargo/pull/3310
[cargo/3311]: https://github.com/rust-lang/cargo/pull/3311
[cargo/3319]: https://github.com/rust-lang/cargo/pull/3319
[cargo/3336]: https://github.com/rust-lang/cargo/pull/3336
[cargo/3338]: https://github.com/rust-lang/cargo/pull/3338
[cargo/3348]: https://github.com/rust-lang/cargo/pull/3348
[cargo/3356]: https://github.com/rust-lang/cargo/pull/3356
[cargo/3361]: https://github.com/rust-lang/cargo/pull/3361
[cargo/3363]: https://github.com/rust-lang/cargo/pull/3363
[cargo/3368]: https://github.com/rust-lang/cargo/issues/3368
[cargo/3409]: https://github.com/rust-lang/cargo/pull/3409
[cargo/3410]: https://github.com/rust-lang/cargo/pull/3410
[`std::iter::Iterator::min_by`]: https://doc.rust-lang.org/std/iter/trait.Iterator.html#method.min_by
[`std::iter::Iterator::max_by`]: https://doc.rust-lang.org/std/iter/trait.Iterator.html#method.max_by
[`std::os::*::fs::FileExt`]: https://doc.rust-lang.org/std/os/unix/fs/trait.FileExt.html
[`std::sync::atomic::Atomic*::get_mut`]: https://doc.rust-lang.org/std/sync/atomic/struct.AtomicU8.html#method.get_mut
[`std::sync::atomic::Atomic*::into_inner`]: https://doc.rust-lang.org/std/sync/atomic/struct.AtomicU8.html#method.into_inner
[`std::vec::IntoIter::as_slice`]: https://doc.rust-lang.org/std/vec/struct.IntoIter.html#method.as_slice
[`std::vec::IntoIter::as_mut_slice`]: https://doc.rust-lang.org/std/vec/struct.IntoIter.html#method.as_mut_slice
[`std::sync::mpsc::Receiver::try_iter`]: https://doc.rust-lang.org/std/sync/mpsc/struct.Receiver.html#method.try_iter
[`std::os::unix::process::CommandExt::before_exec`]: https://doc.rust-lang.org/std/os/unix/process/trait.CommandExt.html#tymethod.before_exec
[`std::rc::Rc::strong_count`]: https://doc.rust-lang.org/std/rc/struct.Rc.html#method.strong_count
[`std::rc::Rc::weak_count`]: https://doc.rust-lang.org/std/rc/struct.Rc.html#method.weak_count
[`std::sync::Arc::strong_count`]: https://doc.rust-lang.org/std/sync/struct.Arc.html#method.strong_count
[`std::sync::Arc::weak_count`]: https://doc.rust-lang.org/std/sync/struct.Arc.html#method.weak_count
[`std::char::encode_utf8`]: https://doc.rust-lang.org/std/primitive.char.html#method.encode_utf8
[`std::char::encode_utf16`]: https://doc.rust-lang.org/std/primitive.char.html#method.encode_utf16
[`std::cell::Ref::clone`]: https://doc.rust-lang.org/std/cell/struct.Ref.html#method.clone
[`std::io::Take::into_inner`]: https://doc.rust-lang.org/std/io/struct.Take.html#method.into_inner
Version 1.14.0 (2016-12-22)
===========================

View File

@ -11,8 +11,8 @@ environment:
SCRIPT: python x.py test && python x.py dist
DEPLOY: 1
- MSYS_BITS: 32
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-msvc
SCRIPT: python x.py test && python x.py dist
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-msvc --target=i586-pc-windows-msvc
SCRIPT: python x.py test --host i686-pc-windows-msvc --target i686-pc-windows-msvc && python x.py dist
DEPLOY: 1
# MSVC makefiles
@ -73,6 +73,9 @@ environment:
MINGW_DIR: mingw64
DEPLOY: 1
matrix:
fast_finish: true
clone_depth: 1
build: false
@ -131,9 +134,6 @@ before_deploy:
- ps: |
New-Item -Path deploy -ItemType directory
Get-ChildItem -Path build\dist -Filter '*.tar.gz' | Move-Item -Destination deploy
Get-FileHash .\deploy\* | ForEach-Object {
[io.file]::WriteAllText($_.Path + ".sha256", $_.Hash.ToLower() + "`n")
}
Get-ChildItem -Path deploy | Foreach-Object {
Push-AppveyorArtifact $_.FullName -FileName ${env:APPVEYOR_REPO_COMMIT}/$_
}
@ -147,7 +147,7 @@ deploy:
bucket: rust-lang-ci
set_public: true
region: us-east-1
artifact: /.*\.(tar.gz|sha256)/
artifact: /.*\.tar.gz/
folder: rustc-builds
on:
branch: auto

View File

@ -121,10 +121,14 @@ fn set_compiler(cfg: &mut gcc::Config,
}
"mips-unknown-linux-musl" => {
cfg.compiler("mips-linux-musl-gcc");
if cfg.get_compiler().path().to_str() == Some("gcc") {
cfg.compiler("mips-linux-musl-gcc");
}
}
"mipsel-unknown-linux-musl" => {
cfg.compiler("mipsel-linux-musl-gcc");
if cfg.get_compiler().path().to_str() == Some("gcc") {
cfg.compiler("mipsel-linux-musl-gcc");
}
}
t if t.contains("musl") => {

View File

@ -213,7 +213,11 @@ pub fn rustc(build: &Build, target: &str, compiler: &Compiler) {
if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
cargo.env("CFG_LLVM_ROOT", s);
}
if build.config.llvm_static_stdcpp {
// Building with a static libstdc++ is only supported on linux right now,
// not for MSVC or OSX
if build.config.llvm_static_stdcpp &&
!target.contains("windows") &&
!target.contains("apple") {
cargo.env("LLVM_STATIC_STDCPP",
compiler_file(build.cxx(target), "libstdc++.a"));
}

View File

@ -287,8 +287,8 @@ To learn more about a subcommand, run `./x.py <command> -h`
build: m.opt_str("build").unwrap_or_else(|| {
env::var("BUILD").unwrap()
}),
host: m.opt_strs("host"),
target: m.opt_strs("target"),
host: split(m.opt_strs("host")),
target: split(m.opt_strs("target")),
config: cfg_file,
src: m.opt_str("src").map(PathBuf::from),
jobs: m.opt_str("jobs").map(|j| j.parse().unwrap()),
@ -309,3 +309,7 @@ impl Subcommand {
}
}
}
fn split(s: Vec<String>) -> Vec<String> {
s.iter().flat_map(|s| s.split(',')).map(|s| s.to_string()).collect()
}

View File

@ -45,7 +45,7 @@ pub fn check(build: &mut Build) {
let target = path.join(cmd);
let mut cmd_alt = cmd.to_os_string();
cmd_alt.push(".exe");
if target.exists() ||
if target.is_file() ||
target.with_extension("exe").exists() ||
target.join(cmd_alt).exists() {
return Some(target);

145
src/ci/docker/README.md Normal file
View File

@ -0,0 +1,145 @@
# Docker images for CI
This folder contains a bunch of docker images used by the continuous integration
(CI) of Rust. An script is accompanied (`run.sh`) with these images to actually
execute them. To test out an image execute:
```
./src/ci/docker/run.sh $image_name
```
for example:
```
./src/ci/docker/run.sh x86_64-gnu
```
Images will output artifacts in an `obj` dir at the root of a repository.
## Cross toolchains
A number of these images take quite a long time to compile as they're building
whole gcc toolchains to do cross builds with. Much of this is relatively
self-explanatory but some images use [crosstool-ng] which isn't quite as self
explanatory. Below is a description of where these `*.config` files come form,
how to generate them, and how the existing ones were generated.
[crosstool-ng]: https://github.com/crosstool-ng/crosstool-ng
### Generating a `.config` file
If you have a `linux-cross` image lying around you can use that and skip the
next two steps.
- First we spin up a container and copy `build_toolchain_root.sh` into it. All
these steps are outside the container:
```
# Note: We use ubuntu:15.10 because that's the "base" of linux-cross Docker
# image
$ docker run -it ubuntu:15.10 bash
$ docker ps
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
cfbec05ed730 ubuntu:15.10 "bash" 16 seconds ago Up 15 seconds drunk_murdock
$ docker cp build_toolchain_root.sh drunk_murdock:/
```
- Then inside the container we build crosstool-ng by simply calling the bash
script we copied in the previous step:
```
$ bash build_toolchain_root.sh
```
- Now, inside the container run the following command to configure the
toolchain. To get a clue of which options need to be changed check the next
section and come back.
```
$ ct-ng menuconfig
```
- Finally, we retrieve the `.config` file from the container and give it a
meaningful name. This is done outside the container.
```
$ docker drunk_murdock:/.config arm-linux-gnueabi.config
```
- Now you can shutdown the container or repeat the two last steps to generate a
new `.config` file.
### Toolchain configuration
Changes on top of the default toolchain configuration used to generate the
`.config` files in this directory. The changes are formatted as follows:
```
$category > $option = $value -- $comment
```
### `arm-linux-gnueabi.config`
For targets: `arm-unknown-linux-gnueabi`
- Path and misc options > Prefix directory = /x-tools/${CT\_TARGET}
- Target options > Target Architecture = arm
- Target options > Architecture level = armv6 -- (+)
- Target options > Floating point = software (no FPU) -- (\*)
- Operating System > Target OS = linux
- Operating System > Linux kernel version = 3.2.72 -- Precise kernel
- C-library > glibc version = 2.14.1
- C compiler > gcc version = 4.9.3
- C compiler > C++ = ENABLE -- to cross compile LLVM
### `arm-linux-gnueabihf.config`
For targets: `arm-unknown-linux-gnueabihf`
- Path and misc options > Prefix directory = /x-tools/${CT\_TARGET}
- Target options > Target Architecture = arm
- Target options > Architecture level = armv6 -- (+)
- Target options > Use specific FPU = vfp -- (+)
- Target options > Floating point = hardware (FPU) -- (\*)
- Target options > Default instruction set mode = arm -- (+)
- Operating System > Target OS = linux
- Operating System > Linux kernel version = 3.2.72 -- Precise kernel
- C-library > glibc version = 2.14.1
- C compiler > gcc version = 4.9.3
- C compiler > C++ = ENABLE -- to cross compile LLVM
### `armv7-linux-gnueabihf.config`
For targets: `armv7-unknown-linux-gnueabihf`
- Path and misc options > Prefix directory = /x-tools/${CT\_TARGET}
- Target options > Target Architecture = arm
- Target options > Suffix to the arch-part = v7
- Target options > Architecture level = armv7-a -- (+)
- Target options > Use specific FPU = vfpv3-d16 -- (\*)
- Target options > Floating point = hardware (FPU) -- (\*)
- Target options > Default instruction set mode = thumb -- (\*)
- Operating System > Target OS = linux
- Operating System > Linux kernel version = 3.2.72 -- Precise kernel
- C-library > glibc version = 2.14.1
- C compiler > gcc version = 4.9.3
- C compiler > C++ = ENABLE -- to cross compile LLVM
(\*) These options have been selected to match the configuration of the arm
toolchains shipped with Ubuntu 15.10
(+) These options have been selected to match the gcc flags we use to compile C
libraries like jemalloc. See the mk/cfg/arm(v7)-uknown-linux-gnueabi{,hf}.mk
file in Rust's source code.
## `aarch64-linux-gnu.config`
For targets: `aarch64-unknown-linux-gnu`
- Path and misc options > Prefix directory = /x-tools/${CT\_TARGET}
- Target options > Target Architecture = arm
- Target options > Bitness = 64-bit
- Operating System > Target OS = linux
- Operating System > Linux kernel version = 4.2.6
- C-library > glibc version = 2.17 -- aarch64 support was introduced in this version
- C compiler > gcc version = 5.2.0
- C compiler > C++ = ENABLE -- to cross compile LLVM

View File

@ -53,8 +53,4 @@ ENV RUST_CONFIGURE_ARGS \
# to all the targets above eventually.
ENV SCRIPT \
python2.7 ../x.py test --target arm-linux-androideabi && \
python2.7 ../x.py dist \
--target arm-linux-androideabi \
--target armv7-linux-androideabi \
--target i686-linux-android \
--target aarch64-linux-android
python2.7 ../x.py dist --target $TARGETS

View File

@ -10,7 +10,12 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
git \
cmake \
sudo \
xz-utils
xz-utils \
zlib1g-dev \
g++-arm-linux-gnueabi \
g++-arm-linux-gnueabihf \
bzip2 \
patch
ENV SCCACHE_DIGEST=7237e38e029342fa27b7ac25412cb9d52554008b12389727320bd533fd7f05b6a96d55485f305caf95e5c8f5f97c3313e10012ccad3e752aba2518f3522ba783
RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST | \
@ -21,21 +26,50 @@ RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-ini
rm dumb-init_*.deb
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
WORKDIR /tmp
COPY build-rumprun.sh /tmp/
RUN ./build-rumprun.sh
COPY build-arm-musl.sh /tmp/
RUN ./build-arm-musl.sh
# originally from
# https://downloads.openwrt.org/snapshots/trunk/ar71xx/generic/OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2
RUN mkdir /usr/local/mips-linux-musl
RUN curl -L https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2 | \
tar xjf - -C /usr/local/mips-linux-musl --strip-components=2
RUN for file in /usr/local/mips-linux-musl/bin/mips-openwrt-linux-*; do \
ln -s $file /usr/local/bin/`basename $file`; \
done
# Note that this originally came from:
# https://downloads.openwrt.org/snapshots/trunk/malta/generic/OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2
RUN mkdir /usr/local/mipsel-linux-musl
RUN curl -L https://s3.amazonaws.com/rust-lang-ci/libc/OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2 | \
tar xjf - -C /usr/local/mipsel-linux-musl --strip-components=2
RUN for file in /usr/local/mipsel-linux-musl/bin/mipsel-openwrt-linux-*; do \
ln -s $file /usr/local/bin/`basename $file`; \
done
ENV TARGETS=asmjs-unknown-emscripten
ENV TARGETS=$TARGETS,wasm32-unknown-emscripten
ENV TARGETS=$TARGETS,x86_64-rumprun-netbsd
ENV TARGETS=$TARGETS,mips-unknown-linux-musl
ENV TARGETS=$TARGETS,mipsel-unknown-linux-musl
ENV TARGETS=$TARGETS,arm-unknown-linux-musleabi
ENV TARGETS=$TARGETS,arm-unknown-linux-musleabihf
ENV TARGETS=$TARGETS,armv7-unknown-linux-musleabihf
#ENV TARGETS=$TARGETS,mips-unknown-linux-musl
#ENV TARGETS=$TARGETS,arm-unknown-linux-musleabi
#ENV TARGETS=$TARGETS,arm-unknown-linux-musleabihf
#ENV TARGETS=$TARGETS,armv7-unknown-linux-musleabihf
#ENV TARGETS=$TARGETS,x86_64-rumprun-netbsd
ENV CC_mipsel_unknown_linux_musl=mipsel-openwrt-linux-gcc \
CC_mips_unknown_linux_musl=mips-openwrt-linux-gcc
# Suppress some warnings in the openwrt toolchains we downloaded
ENV STAGING_DIR=/tmp
ENV RUST_CONFIGURE_ARGS \
--target=$TARGETS \
--enable-rustbuild
# Just a smoke test in dist to see if this works for now, we should expand this
# to all the targets above eventually.
ENV SCRIPT \
python2.7 ../x.py build && \
python2.7 ../x.py dist --target wasm32-unknown-emscripten
--musl-root-arm=/usr/local/arm-linux-musleabi \
--musl-root-armhf=/usr/local/arm-linux-musleabihf \
--musl-root-armv7=/usr/local/armv7-linux-musleabihf
ENV SCRIPT python2.7 ../x.py dist --target $TARGETS

View File

@ -0,0 +1,120 @@
#!/bin/bash
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ex
MUSL=1.1.16
hide_output() {
set +x
on_err="
echo ERROR: An error was encountered with the build.
cat /tmp/build.log
exit 1
"
trap "$on_err" ERR
bash -c "while true; do sleep 30; echo \$(date) - building ...; done" &
PING_LOOP_PID=$!
$@ &> /tmp/build.log
trap - ERR
kill $PING_LOOP_PID
rm /tmp/build.log
set -x
}
curl -O https://www.musl-libc.org/releases/musl-$MUSL.tar.gz
tar xf musl-$MUSL.tar.gz
cd musl-$MUSL
CC=arm-linux-gnueabi-gcc \
CFLAGS="-march=armv6 -marm" \
hide_output ./configure \
--prefix=/usr/local/arm-linux-musleabi \
--enable-wrapper=gcc
hide_output make -j$(nproc)
hide_output make install
cd ..
rm -rf musl-$MUSL
tar xf musl-$MUSL.tar.gz
cd musl-$MUSL
CC=arm-linux-gnueabihf-gcc \
CFLAGS="-march=armv6 -marm" \
hide_output ./configure \
--prefix=/usr/local/arm-linux-musleabihf \
--enable-wrapper=gcc
hide_output make -j$(nproc)
hide_output make install
cd ..
rm -rf musl-$MUSL
tar xf musl-$MUSL.tar.gz
cd musl-$MUSL
CC=arm-linux-gnueabihf-gcc \
CFLAGS="-march=armv7-a" \
hide_output ./configure \
--prefix=/usr/local/armv7-linux-musleabihf \
--enable-wrapper=gcc
hide_output make -j$(nproc)
hide_output make install
cd ..
rm -rf musl-$MUSL*
ln -nsf ../arm-linux-musleabi/bin/musl-gcc /usr/local/bin/arm-linux-musleabi-gcc
ln -nsf ../arm-linux-musleabihf/bin/musl-gcc /usr/local/bin/arm-linux-musleabihf-gcc
ln -nsf ../armv7-linux-musleabihf/bin/musl-gcc /usr/local/bin/armv7-linux-musleabihf-gcc
curl -L https://github.com/llvm-mirror/llvm/archive/release_39.tar.gz | tar xzf -
curl -L https://github.com/llvm-mirror/libunwind/archive/release_39.tar.gz | tar xzf -
mkdir libunwind-build
cd libunwind-build
cmake ../libunwind-release_39 \
-DLLVM_PATH=/tmp/llvm-release_39 \
-DLIBUNWIND_ENABLE_SHARED=0 \
-DCMAKE_C_COMPILER=arm-linux-gnueabi-gcc \
-DCMAKE_CXX_COMPILER=arm-linux-gnueabi-g++ \
-DCMAKE_C_FLAGS="-march=armv6 -marm" \
-DCMAKE_CXX_FLAGS="-march=armv6 -marm"
make -j$(nproc)
cp lib/libunwind.a /usr/local/arm-linux-musleabi/lib
cd ..
rm -rf libunwind-build
mkdir libunwind-build
cd libunwind-build
cmake ../libunwind-release_39 \
-DLLVM_PATH=/tmp/llvm-release_39 \
-DLIBUNWIND_ENABLE_SHARED=0 \
-DCMAKE_C_COMPILER=arm-linux-gnueabihf-gcc \
-DCMAKE_CXX_COMPILER=arm-linux-gnueabihf-g++ \
-DCMAKE_C_FLAGS="-march=armv6 -marm" \
-DCMAKE_CXX_FLAGS="-march=armv6 -marm"
make -j$(nproc)
cp lib/libunwind.a /usr/local/arm-linux-musleabihf/lib
cd ..
rm -rf libunwind-build
mkdir libunwind-build
cd libunwind-build
cmake ../libunwind-release_39 \
-DLLVM_PATH=/tmp/llvm-release_39 \
-DLIBUNWIND_ENABLE_SHARED=0 \
-DCMAKE_C_COMPILER=arm-linux-gnueabihf-gcc \
-DCMAKE_CXX_COMPILER=arm-linux-gnueabihf-g++ \
-DCMAKE_C_FLAGS="-march=armv7-a" \
-DCMAKE_CXX_FLAGS="-march=armv7-a"
make -j$(nproc)
cp lib/libunwind.a /usr/local/armv7-linux-musleabihf/lib
cd ..
rm -rf libunwind-build
rm -rf libunwind-release_39
rm -rf llvm-release_39

View File

@ -0,0 +1,38 @@
#!/bin/bash
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ex
hide_output() {
set +x
on_err="
echo ERROR: An error was encountered with the build.
cat /tmp/build.log
exit 1
"
trap "$on_err" ERR
bash -c "while true; do sleep 30; echo \$(date) - building ...; done" &
PING_LOOP_PID=$!
$@ &> /tmp/build.log
trap - ERR
kill $PING_LOOP_PID
rm /tmp/build.log
set -x
}
git clone https://github.com/rumpkernel/rumprun
cd rumprun
git reset --hard 39a97f37a85e44c69b662f6b97b688fbe892603b
git submodule update --init
CC=cc hide_output ./build-rr.sh -d /usr/local hw
cd ..
rm -rf rumprun

View File

@ -1,19 +1,29 @@
FROM ubuntu:16.04
RUN apt-get update && apt-get install -y --no-install-recommends \
g++ \
make \
file \
curl \
automake \
bison \
bzip2 \
ca-certificates \
python2.7 \
git \
cmake \
sudo \
curl \
file \
flex \
g++ \
gawk \
gdb \
xz-utils \
g++-arm-linux-gnueabi \
g++-arm-linux-gnueabihf
git \
gperf \
help2man \
libncurses-dev \
libtool-bin \
make \
patch \
python2.7 \
sudo \
texinfo \
wget \
xz-utils
ENV SCCACHE_DIGEST=7237e38e029342fa27b7ac25412cb9d52554008b12389727320bd533fd7f05b6a96d55485f305caf95e5c8f5f97c3313e10012ccad3e752aba2518f3522ba783
RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST | \
@ -24,11 +34,47 @@ RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-ini
rm dumb-init_*.deb
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
ENV RUST_CONFIGURE_ARGS \
--host=arm-unknown-linux-gnueabi,arm-unknown-linux-gnueabihf
ENV SCRIPT \
python2.7 ../x.py dist \
--host arm-unknown-linux-gnueabi \
--target arm-unknown-linux-gnueabi \
--host arm-unknown-linux-gnueabihf \
--target arm-unknown-linux-gnueabihf
# Ubuntu 16.04 (this contianer) ships with make 4, but something in the
# toolchains we build below chokes on that, so go back to make 3
RUN curl https://ftp.gnu.org/gnu/make/make-3.81.tar.gz | tar xzf - && \
cd make-3.81 && \
./configure --prefix=/usr && \
make && \
make install && \
cd .. && \
rm -rf make-3.81
RUN curl http://crosstool-ng.org/download/crosstool-ng/crosstool-ng-1.22.0.tar.bz2 | \
tar xjf - && \
cd crosstool-ng && \
./configure --prefix=/usr/local && \
make -j$(nproc) && \
make install && \
cd .. && \
rm -rf crosstool-ng
RUN groupadd -r rustbuild && useradd -m -r -g rustbuild rustbuild
RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools
USER rustbuild
WORKDIR /tmp
COPY arm-linux-gnueabihf.config arm-linux-gnueabi.config build-toolchains.sh /tmp/
RUN ./build-toolchains.sh
USER root
ENV PATH=$PATH:/x-tools/arm-unknown-linux-gnueabi/bin
ENV PATH=$PATH:/x-tools/arm-unknown-linux-gnueabihf/bin
ENV CC_arm_unknown_linux_gnueabi=arm-unknown-linux-gnueabi-gcc \
AR_arm_unknown_linux_gnueabi=arm-unknown-linux-gnueabi-ar \
CXX_arm_unknown_linux_gnueabi=arm-unknown-linux-gnueabi-g++ \
CC_arm_unknown_linux_gnueabihf=arm-unknown-linux-gnueabihf-gcc \
AR_arm_unknown_linux_gnueabihf=arm-unknown-linux-gnueabihf-ar \
CXX_arm_unknown_linux_gnueabihf=arm-unknown-linux-gnueabihf-g++
ENV HOSTS=arm-unknown-linux-gnueabi
ENV HOSTS=$HOSTS,arm-unknown-linux-gnueabihf
ENV RUST_CONFIGURE_ARGS --host=$HOSTS
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS

View File

@ -0,0 +1,560 @@
#
# Automatically generated file; DO NOT EDIT.
# Crosstool-NG Configuration
#
CT_CONFIGURE_has_make381=y
CT_MODULES=y
#
# Paths and misc options
#
#
# crosstool-NG behavior
#
# CT_OBSOLETE is not set
# CT_EXPERIMENTAL is not set
# CT_DEBUG_CT is not set
#
# Paths
#
CT_LOCAL_TARBALLS_DIR=""
CT_WORK_DIR="${CT_TOP_DIR}/.build"
CT_PREFIX_DIR="/x-tools/${CT_TARGET}"
CT_INSTALL_DIR="${CT_PREFIX_DIR}"
CT_RM_RF_PREFIX_DIR=y
CT_REMOVE_DOCS=y
CT_INSTALL_DIR_RO=y
CT_STRIP_HOST_TOOLCHAIN_EXECUTABLES=y
# CT_STRIP_TARGET_TOOLCHAIN_EXECUTABLES is not set
#
# Downloading
#
# CT_FORBID_DOWNLOAD is not set
# CT_FORCE_DOWNLOAD is not set
CT_CONNECT_TIMEOUT=10
# CT_ONLY_DOWNLOAD is not set
# CT_USE_MIRROR is not set
#
# Extracting
#
# CT_FORCE_EXTRACT is not set
CT_OVERIDE_CONFIG_GUESS_SUB=y
# CT_ONLY_EXTRACT is not set
CT_PATCH_BUNDLED=y
# CT_PATCH_LOCAL is not set
# CT_PATCH_BUNDLED_LOCAL is not set
# CT_PATCH_LOCAL_BUNDLED is not set
# CT_PATCH_BUNDLED_FALLBACK_LOCAL is not set
# CT_PATCH_LOCAL_FALLBACK_BUNDLED is not set
# CT_PATCH_NONE is not set
CT_PATCH_ORDER="bundled"
#
# Build behavior
#
CT_PARALLEL_JOBS=0
CT_LOAD=""
CT_USE_PIPES=y
CT_EXTRA_CFLAGS_FOR_BUILD=""
CT_EXTRA_LDFLAGS_FOR_BUILD=""
CT_EXTRA_CFLAGS_FOR_HOST=""
CT_EXTRA_LDFLAGS_FOR_HOST=""
# CT_CONFIG_SHELL_SH is not set
# CT_CONFIG_SHELL_ASH is not set
CT_CONFIG_SHELL_BASH=y
# CT_CONFIG_SHELL_CUSTOM is not set
CT_CONFIG_SHELL="${bash}"
#
# Logging
#
# CT_LOG_ERROR is not set
# CT_LOG_WARN is not set
CT_LOG_INFO=y
# CT_LOG_EXTRA is not set
# CT_LOG_ALL is not set
# CT_LOG_DEBUG is not set
CT_LOG_LEVEL_MAX="INFO"
# CT_LOG_SEE_TOOLS_WARN is not set
CT_LOG_PROGRESS_BAR=y
CT_LOG_TO_FILE=y
CT_LOG_FILE_COMPRESS=y
#
# Target options
#
CT_ARCH="arm"
CT_ARCH_SUPPORTS_BOTH_MMU=y
CT_ARCH_SUPPORTS_BOTH_ENDIAN=y
CT_ARCH_SUPPORTS_32=y
CT_ARCH_SUPPORTS_64=y
CT_ARCH_SUPPORTS_WITH_ARCH=y
CT_ARCH_SUPPORTS_WITH_CPU=y
CT_ARCH_SUPPORTS_WITH_TUNE=y
CT_ARCH_SUPPORTS_WITH_FLOAT=y
CT_ARCH_SUPPORTS_WITH_FPU=y
CT_ARCH_SUPPORTS_SOFTFP=y
CT_ARCH_DEFAULT_HAS_MMU=y
CT_ARCH_DEFAULT_LE=y
CT_ARCH_DEFAULT_32=y
CT_ARCH_ARCH="armv6"
CT_ARCH_CPU=""
CT_ARCH_TUNE=""
CT_ARCH_FPU=""
# CT_ARCH_BE is not set
CT_ARCH_LE=y
CT_ARCH_32=y
# CT_ARCH_64 is not set
CT_ARCH_BITNESS=32
# CT_ARCH_FLOAT_HW is not set
CT_ARCH_FLOAT_SW=y
CT_TARGET_CFLAGS=""
CT_TARGET_LDFLAGS=""
# CT_ARCH_alpha is not set
CT_ARCH_arm=y
# CT_ARCH_avr is not set
# CT_ARCH_m68k is not set
# CT_ARCH_mips is not set
# CT_ARCH_nios2 is not set
# CT_ARCH_powerpc is not set
# CT_ARCH_s390 is not set
# CT_ARCH_sh is not set
# CT_ARCH_sparc is not set
# CT_ARCH_x86 is not set
# CT_ARCH_xtensa is not set
CT_ARCH_alpha_AVAILABLE=y
CT_ARCH_arm_AVAILABLE=y
CT_ARCH_avr_AVAILABLE=y
CT_ARCH_m68k_AVAILABLE=y
CT_ARCH_microblaze_AVAILABLE=y
CT_ARCH_mips_AVAILABLE=y
CT_ARCH_nios2_AVAILABLE=y
CT_ARCH_powerpc_AVAILABLE=y
CT_ARCH_s390_AVAILABLE=y
CT_ARCH_sh_AVAILABLE=y
CT_ARCH_sparc_AVAILABLE=y
CT_ARCH_x86_AVAILABLE=y
CT_ARCH_xtensa_AVAILABLE=y
CT_ARCH_SUFFIX=""
#
# Generic target options
#
# CT_MULTILIB is not set
CT_ARCH_USE_MMU=y
CT_ARCH_ENDIAN="little"
#
# Target optimisations
#
CT_ARCH_EXCLUSIVE_WITH_CPU=y
# CT_ARCH_FLOAT_AUTO is not set
# CT_ARCH_FLOAT_SOFTFP is not set
CT_ARCH_FLOAT="soft"
#
# arm other options
#
CT_ARCH_ARM_MODE="arm"
CT_ARCH_ARM_MODE_ARM=y
# CT_ARCH_ARM_MODE_THUMB is not set
# CT_ARCH_ARM_INTERWORKING is not set
CT_ARCH_ARM_EABI_FORCE=y
CT_ARCH_ARM_EABI=y
#
# Toolchain options
#
#
# General toolchain options
#
CT_FORCE_SYSROOT=y
CT_USE_SYSROOT=y
CT_SYSROOT_NAME="sysroot"
CT_SYSROOT_DIR_PREFIX=""
CT_WANTS_STATIC_LINK=y
# CT_STATIC_TOOLCHAIN is not set
CT_TOOLCHAIN_PKGVERSION=""
CT_TOOLCHAIN_BUGURL=""
#
# Tuple completion and aliasing
#
CT_TARGET_VENDOR="unknown"
CT_TARGET_ALIAS_SED_EXPR=""
CT_TARGET_ALIAS=""
#
# Toolchain type
#
CT_CROSS=y
# CT_CANADIAN is not set
CT_TOOLCHAIN_TYPE="cross"
#
# Build system
#
CT_BUILD=""
CT_BUILD_PREFIX=""
CT_BUILD_SUFFIX=""
#
# Misc options
#
# CT_TOOLCHAIN_ENABLE_NLS is not set
#
# Operating System
#
CT_KERNEL_SUPPORTS_SHARED_LIBS=y
CT_KERNEL="linux"
CT_KERNEL_VERSION="3.2.72"
# CT_KERNEL_bare_metal is not set
CT_KERNEL_linux=y
CT_KERNEL_bare_metal_AVAILABLE=y
CT_KERNEL_linux_AVAILABLE=y
# CT_KERNEL_V_4_3 is not set
# CT_KERNEL_V_4_2 is not set
# CT_KERNEL_V_4_1 is not set
# CT_KERNEL_V_3_18 is not set
# CT_KERNEL_V_3_14 is not set
# CT_KERNEL_V_3_12 is not set
# CT_KERNEL_V_3_10 is not set
# CT_KERNEL_V_3_4 is not set
CT_KERNEL_V_3_2=y
# CT_KERNEL_V_2_6_32 is not set
# CT_KERNEL_LINUX_CUSTOM is not set
CT_KERNEL_windows_AVAILABLE=y
#
# Common kernel options
#
CT_SHARED_LIBS=y
#
# linux other options
#
CT_KERNEL_LINUX_VERBOSITY_0=y
# CT_KERNEL_LINUX_VERBOSITY_1 is not set
# CT_KERNEL_LINUX_VERBOSITY_2 is not set
CT_KERNEL_LINUX_VERBOSE_LEVEL=0
CT_KERNEL_LINUX_INSTALL_CHECK=y
#
# Binary utilities
#
CT_ARCH_BINFMT_ELF=y
CT_BINUTILS="binutils"
CT_BINUTILS_binutils=y
#
# GNU binutils
#
# CT_CC_BINUTILS_SHOW_LINARO is not set
CT_BINUTILS_V_2_25_1=y
# CT_BINUTILS_V_2_25 is not set
# CT_BINUTILS_V_2_24 is not set
# CT_BINUTILS_V_2_23_2 is not set
# CT_BINUTILS_V_2_23_1 is not set
# CT_BINUTILS_V_2_22 is not set
# CT_BINUTILS_V_2_21_53 is not set
# CT_BINUTILS_V_2_21_1a is not set
# CT_BINUTILS_V_2_20_1a is not set
# CT_BINUTILS_V_2_19_1a is not set
# CT_BINUTILS_V_2_18a is not set
CT_BINUTILS_VERSION="2.25.1"
CT_BINUTILS_2_25_1_or_later=y
CT_BINUTILS_2_25_or_later=y
CT_BINUTILS_2_24_or_later=y
CT_BINUTILS_2_23_or_later=y
CT_BINUTILS_2_22_or_later=y
CT_BINUTILS_2_21_or_later=y
CT_BINUTILS_2_20_or_later=y
CT_BINUTILS_2_19_or_later=y
CT_BINUTILS_2_18_or_later=y
CT_BINUTILS_HAS_HASH_STYLE=y
CT_BINUTILS_HAS_GOLD=y
CT_BINUTILS_GOLD_SUPPORTS_ARCH=y
CT_BINUTILS_GOLD_SUPPORT=y
CT_BINUTILS_HAS_PLUGINS=y
CT_BINUTILS_HAS_PKGVERSION_BUGURL=y
CT_BINUTILS_FORCE_LD_BFD=y
CT_BINUTILS_LINKER_LD=y
# CT_BINUTILS_LINKER_LD_GOLD is not set
# CT_BINUTILS_LINKER_GOLD_LD is not set
CT_BINUTILS_LINKERS_LIST="ld"
CT_BINUTILS_LINKER_DEFAULT="bfd"
# CT_BINUTILS_PLUGINS is not set
CT_BINUTILS_EXTRA_CONFIG_ARRAY=""
# CT_BINUTILS_FOR_TARGET is not set
#
# binutils other options
#
#
# C-library
#
CT_LIBC="glibc"
CT_LIBC_VERSION="2.16.0"
CT_LIBC_glibc=y
# CT_LIBC_musl is not set
# CT_LIBC_uClibc is not set
CT_LIBC_avr_libc_AVAILABLE=y
CT_LIBC_glibc_AVAILABLE=y
CT_THREADS="nptl"
# CT_CC_GLIBC_SHOW_LINARO is not set
# CT_LIBC_GLIBC_V_2_22 is not set
# CT_LIBC_GLIBC_V_2_21 is not set
# CT_LIBC_GLIBC_V_2_20 is not set
# CT_LIBC_GLIBC_V_2_19 is not set
# CT_LIBC_GLIBC_V_2_18 is not set
# CT_LIBC_GLIBC_V_2_17 is not set
CT_LIBC_GLIBC_V_2_16_0=y
# CT_LIBC_GLIBC_V_2_15 is not set
# CT_LIBC_GLIBC_V_2_14_1 is not set
# CT_LIBC_GLIBC_V_2_14 is not set
# CT_LIBC_GLIBC_V_2_13 is not set
# CT_LIBC_GLIBC_V_2_12_2 is not set
# CT_LIBC_GLIBC_V_2_12_1 is not set
# CT_LIBC_GLIBC_V_2_11_1 is not set
# CT_LIBC_GLIBC_V_2_11 is not set
# CT_LIBC_GLIBC_V_2_10_1 is not set
# CT_LIBC_GLIBC_V_2_9 is not set
# CT_LIBC_GLIBC_V_2_8 is not set
CT_LIBC_mingw_AVAILABLE=y
CT_LIBC_musl_AVAILABLE=y
CT_LIBC_newlib_AVAILABLE=y
CT_LIBC_none_AVAILABLE=y
CT_LIBC_uClibc_AVAILABLE=y
CT_LIBC_SUPPORT_THREADS_ANY=y
CT_LIBC_SUPPORT_THREADS_NATIVE=y
#
# Common C library options
#
CT_THREADS_NATIVE=y
CT_LIBC_XLDD=y
#
# glibc other options
#
CT_LIBC_GLIBC_PORTS_EXTERNAL=y
CT_LIBC_GLIBC_MAY_FORCE_PORTS=y
CT_LIBC_glibc_familly=y
CT_LIBC_GLIBC_EXTRA_CONFIG_ARRAY=""
CT_LIBC_GLIBC_CONFIGPARMS=""
CT_LIBC_GLIBC_EXTRA_CFLAGS=""
CT_LIBC_EXTRA_CC_ARGS=""
# CT_LIBC_DISABLE_VERSIONING is not set
CT_LIBC_OLDEST_ABI=""
CT_LIBC_GLIBC_FORCE_UNWIND=y
CT_LIBC_GLIBC_USE_PORTS=y
CT_LIBC_ADDONS_LIST=""
#
# WARNING !!!
#
#
# For glibc >= 2.8, it can happen that the tarballs
#
#
# for the addons are not available for download.
#
#
# If that happens, bad luck... Try a previous version
#
#
# or try again later... :-(
#
# CT_LIBC_LOCALES is not set
# CT_LIBC_GLIBC_KERNEL_VERSION_NONE is not set
CT_LIBC_GLIBC_KERNEL_VERSION_AS_HEADERS=y
# CT_LIBC_GLIBC_KERNEL_VERSION_CHOSEN is not set
CT_LIBC_GLIBC_MIN_KERNEL="3.2.72"
#
# C compiler
#
CT_CC="gcc"
CT_CC_CORE_PASSES_NEEDED=y
CT_CC_CORE_PASS_1_NEEDED=y
CT_CC_CORE_PASS_2_NEEDED=y
CT_CC_gcc=y
# CT_CC_GCC_SHOW_LINARO is not set
# CT_CC_GCC_V_5_2_0 is not set
CT_CC_GCC_V_4_9_3=y
# CT_CC_GCC_V_4_8_5 is not set
# CT_CC_GCC_V_4_7_4 is not set
# CT_CC_GCC_V_4_6_4 is not set
# CT_CC_GCC_V_4_5_4 is not set
# CT_CC_GCC_V_4_4_7 is not set
# CT_CC_GCC_V_4_3_6 is not set
# CT_CC_GCC_V_4_2_4 is not set
CT_CC_GCC_4_2_or_later=y
CT_CC_GCC_4_3_or_later=y
CT_CC_GCC_4_4_or_later=y
CT_CC_GCC_4_5_or_later=y
CT_CC_GCC_4_6_or_later=y
CT_CC_GCC_4_7_or_later=y
CT_CC_GCC_4_8_or_later=y
CT_CC_GCC_4_9=y
CT_CC_GCC_4_9_or_later=y
CT_CC_GCC_HAS_GRAPHITE=y
CT_CC_GCC_USE_GRAPHITE=y
CT_CC_GCC_HAS_LTO=y
CT_CC_GCC_USE_LTO=y
CT_CC_GCC_HAS_PKGVERSION_BUGURL=y
CT_CC_GCC_HAS_BUILD_ID=y
CT_CC_GCC_HAS_LNK_HASH_STYLE=y
CT_CC_GCC_USE_GMP_MPFR=y
CT_CC_GCC_USE_MPC=y
CT_CC_GCC_HAS_LIBQUADMATH=y
CT_CC_GCC_HAS_LIBSANITIZER=y
CT_CC_GCC_VERSION="4.9.3"
# CT_CC_LANG_FORTRAN is not set
CT_CC_GCC_ENABLE_CXX_FLAGS=""
CT_CC_GCC_CORE_EXTRA_CONFIG_ARRAY=""
CT_CC_GCC_EXTRA_CONFIG_ARRAY=""
CT_CC_GCC_EXTRA_ENV_ARRAY=""
CT_CC_GCC_STATIC_LIBSTDCXX=y
# CT_CC_GCC_SYSTEM_ZLIB is not set
#
# Optimisation features
#
#
# Settings for libraries running on target
#
CT_CC_GCC_ENABLE_TARGET_OPTSPACE=y
# CT_CC_GCC_LIBMUDFLAP is not set
# CT_CC_GCC_LIBGOMP is not set
# CT_CC_GCC_LIBSSP is not set
# CT_CC_GCC_LIBQUADMATH is not set
# CT_CC_GCC_LIBSANITIZER is not set
#
# Misc. obscure options.
#
CT_CC_CXA_ATEXIT=y
# CT_CC_GCC_DISABLE_PCH is not set
CT_CC_GCC_SJLJ_EXCEPTIONS=m
CT_CC_GCC_LDBL_128=m
# CT_CC_GCC_BUILD_ID is not set
CT_CC_GCC_LNK_HASH_STYLE_DEFAULT=y
# CT_CC_GCC_LNK_HASH_STYLE_SYSV is not set
# CT_CC_GCC_LNK_HASH_STYLE_GNU is not set
# CT_CC_GCC_LNK_HASH_STYLE_BOTH is not set
CT_CC_GCC_LNK_HASH_STYLE=""
CT_CC_GCC_DEC_FLOAT_AUTO=y
# CT_CC_GCC_DEC_FLOAT_BID is not set
# CT_CC_GCC_DEC_FLOAT_DPD is not set
# CT_CC_GCC_DEC_FLOATS_NO is not set
CT_CC_SUPPORT_CXX=y
CT_CC_SUPPORT_FORTRAN=y
CT_CC_SUPPORT_JAVA=y
CT_CC_SUPPORT_ADA=y
CT_CC_SUPPORT_OBJC=y
CT_CC_SUPPORT_OBJCXX=y
CT_CC_SUPPORT_GOLANG=y
#
# Additional supported languages:
#
CT_CC_LANG_CXX=y
# CT_CC_LANG_JAVA is not set
#
# Debug facilities
#
# CT_DEBUG_dmalloc is not set
# CT_DEBUG_duma is not set
# CT_DEBUG_gdb is not set
# CT_DEBUG_ltrace is not set
# CT_DEBUG_strace is not set
#
# Companion libraries
#
CT_COMPLIBS_NEEDED=y
CT_LIBICONV_NEEDED=y
CT_GETTEXT_NEEDED=y
CT_GMP_NEEDED=y
CT_MPFR_NEEDED=y
CT_ISL_NEEDED=y
CT_CLOOG_NEEDED=y
CT_MPC_NEEDED=y
CT_COMPLIBS=y
CT_LIBICONV=y
CT_GETTEXT=y
CT_GMP=y
CT_MPFR=y
CT_ISL=y
CT_CLOOG=y
CT_MPC=y
CT_LIBICONV_V_1_14=y
CT_LIBICONV_VERSION="1.14"
CT_GETTEXT_V_0_19_6=y
CT_GETTEXT_VERSION="0.19.6"
CT_GMP_V_6_0_0=y
# CT_GMP_V_5_1_3 is not set
# CT_GMP_V_5_1_1 is not set
# CT_GMP_V_5_0_2 is not set
# CT_GMP_V_5_0_1 is not set
# CT_GMP_V_4_3_2 is not set
# CT_GMP_V_4_3_1 is not set
# CT_GMP_V_4_3_0 is not set
CT_GMP_5_0_2_or_later=y
CT_GMP_VERSION="6.0.0a"
CT_MPFR_V_3_1_3=y
# CT_MPFR_V_3_1_2 is not set
# CT_MPFR_V_3_1_0 is not set
# CT_MPFR_V_3_0_1 is not set
# CT_MPFR_V_3_0_0 is not set
# CT_MPFR_V_2_4_2 is not set
# CT_MPFR_V_2_4_1 is not set
# CT_MPFR_V_2_4_0 is not set
CT_MPFR_VERSION="3.1.3"
CT_ISL_V_0_14=y
CT_ISL_V_0_14_or_later=y
CT_ISL_V_0_12_or_later=y
CT_ISL_VERSION="0.14"
CT_CLOOG_V_0_18_4=y
# CT_CLOOG_V_0_18_1 is not set
# CT_CLOOG_V_0_18_0 is not set
CT_CLOOG_VERSION="0.18.4"
CT_CLOOG_0_18_4_or_later=y
CT_CLOOG_0_18_or_later=y
CT_MPC_V_1_0_3=y
# CT_MPC_V_1_0_2 is not set
# CT_MPC_V_1_0_1 is not set
# CT_MPC_V_1_0 is not set
# CT_MPC_V_0_9 is not set
# CT_MPC_V_0_8_2 is not set
# CT_MPC_V_0_8_1 is not set
# CT_MPC_V_0_7 is not set
CT_MPC_VERSION="1.0.3"
#
# Companion libraries common options
#
# CT_COMPLIBS_CHECK is not set
#
# Companion tools
#
#
# READ HELP before you say 'Y' below !!!
#
# CT_COMP_TOOLS is not set

View File

@ -0,0 +1,561 @@
#
# Automatically generated file; DO NOT EDIT.
# Crosstool-NG Configuration
#
CT_CONFIGURE_has_make381=y
CT_MODULES=y
#
# Paths and misc options
#
#
# crosstool-NG behavior
#
# CT_OBSOLETE is not set
# CT_EXPERIMENTAL is not set
# CT_DEBUG_CT is not set
#
# Paths
#
CT_LOCAL_TARBALLS_DIR=""
CT_WORK_DIR="${CT_TOP_DIR}/.build"
CT_PREFIX_DIR="/x-tools/${CT_TARGET}"
CT_INSTALL_DIR="${CT_PREFIX_DIR}"
CT_RM_RF_PREFIX_DIR=y
CT_REMOVE_DOCS=y
CT_INSTALL_DIR_RO=y
CT_STRIP_HOST_TOOLCHAIN_EXECUTABLES=y
# CT_STRIP_TARGET_TOOLCHAIN_EXECUTABLES is not set
#
# Downloading
#
# CT_FORBID_DOWNLOAD is not set
# CT_FORCE_DOWNLOAD is not set
CT_CONNECT_TIMEOUT=10
# CT_ONLY_DOWNLOAD is not set
# CT_USE_MIRROR is not set
#
# Extracting
#
# CT_FORCE_EXTRACT is not set
CT_OVERIDE_CONFIG_GUESS_SUB=y
# CT_ONLY_EXTRACT is not set
CT_PATCH_BUNDLED=y
# CT_PATCH_LOCAL is not set
# CT_PATCH_BUNDLED_LOCAL is not set
# CT_PATCH_LOCAL_BUNDLED is not set
# CT_PATCH_BUNDLED_FALLBACK_LOCAL is not set
# CT_PATCH_LOCAL_FALLBACK_BUNDLED is not set
# CT_PATCH_NONE is not set
CT_PATCH_ORDER="bundled"
#
# Build behavior
#
CT_PARALLEL_JOBS=0
CT_LOAD=""
CT_USE_PIPES=y
CT_EXTRA_CFLAGS_FOR_BUILD=""
CT_EXTRA_LDFLAGS_FOR_BUILD=""
CT_EXTRA_CFLAGS_FOR_HOST=""
CT_EXTRA_LDFLAGS_FOR_HOST=""
# CT_CONFIG_SHELL_SH is not set
# CT_CONFIG_SHELL_ASH is not set
CT_CONFIG_SHELL_BASH=y
# CT_CONFIG_SHELL_CUSTOM is not set
CT_CONFIG_SHELL="${bash}"
#
# Logging
#
# CT_LOG_ERROR is not set
# CT_LOG_WARN is not set
CT_LOG_INFO=y
# CT_LOG_EXTRA is not set
# CT_LOG_ALL is not set
# CT_LOG_DEBUG is not set
CT_LOG_LEVEL_MAX="INFO"
# CT_LOG_SEE_TOOLS_WARN is not set
CT_LOG_PROGRESS_BAR=y
CT_LOG_TO_FILE=y
CT_LOG_FILE_COMPRESS=y
#
# Target options
#
CT_ARCH="arm"
CT_ARCH_SUPPORTS_BOTH_MMU=y
CT_ARCH_SUPPORTS_BOTH_ENDIAN=y
CT_ARCH_SUPPORTS_32=y
CT_ARCH_SUPPORTS_64=y
CT_ARCH_SUPPORTS_WITH_ARCH=y
CT_ARCH_SUPPORTS_WITH_CPU=y
CT_ARCH_SUPPORTS_WITH_TUNE=y
CT_ARCH_SUPPORTS_WITH_FLOAT=y
CT_ARCH_SUPPORTS_WITH_FPU=y
CT_ARCH_SUPPORTS_SOFTFP=y
CT_ARCH_DEFAULT_HAS_MMU=y
CT_ARCH_DEFAULT_LE=y
CT_ARCH_DEFAULT_32=y
CT_ARCH_ARCH="armv6"
CT_ARCH_CPU=""
CT_ARCH_TUNE=""
CT_ARCH_FPU="vfp"
# CT_ARCH_BE is not set
CT_ARCH_LE=y
CT_ARCH_32=y
# CT_ARCH_64 is not set
CT_ARCH_BITNESS=32
CT_ARCH_FLOAT_HW=y
# CT_ARCH_FLOAT_SW is not set
CT_TARGET_CFLAGS=""
CT_TARGET_LDFLAGS=""
# CT_ARCH_alpha is not set
CT_ARCH_arm=y
# CT_ARCH_avr is not set
# CT_ARCH_m68k is not set
# CT_ARCH_mips is not set
# CT_ARCH_nios2 is not set
# CT_ARCH_powerpc is not set
# CT_ARCH_s390 is not set
# CT_ARCH_sh is not set
# CT_ARCH_sparc is not set
# CT_ARCH_x86 is not set
# CT_ARCH_xtensa is not set
CT_ARCH_alpha_AVAILABLE=y
CT_ARCH_arm_AVAILABLE=y
CT_ARCH_avr_AVAILABLE=y
CT_ARCH_m68k_AVAILABLE=y
CT_ARCH_microblaze_AVAILABLE=y
CT_ARCH_mips_AVAILABLE=y
CT_ARCH_nios2_AVAILABLE=y
CT_ARCH_powerpc_AVAILABLE=y
CT_ARCH_s390_AVAILABLE=y
CT_ARCH_sh_AVAILABLE=y
CT_ARCH_sparc_AVAILABLE=y
CT_ARCH_x86_AVAILABLE=y
CT_ARCH_xtensa_AVAILABLE=y
CT_ARCH_SUFFIX=""
#
# Generic target options
#
# CT_MULTILIB is not set
CT_ARCH_USE_MMU=y
CT_ARCH_ENDIAN="little"
#
# Target optimisations
#
CT_ARCH_EXCLUSIVE_WITH_CPU=y
# CT_ARCH_FLOAT_AUTO is not set
# CT_ARCH_FLOAT_SOFTFP is not set
CT_ARCH_FLOAT="hard"
#
# arm other options
#
CT_ARCH_ARM_MODE="arm"
CT_ARCH_ARM_MODE_ARM=y
# CT_ARCH_ARM_MODE_THUMB is not set
# CT_ARCH_ARM_INTERWORKING is not set
CT_ARCH_ARM_EABI_FORCE=y
CT_ARCH_ARM_EABI=y
CT_ARCH_ARM_TUPLE_USE_EABIHF=y
#
# Toolchain options
#
#
# General toolchain options
#
CT_FORCE_SYSROOT=y
CT_USE_SYSROOT=y
CT_SYSROOT_NAME="sysroot"
CT_SYSROOT_DIR_PREFIX=""
CT_WANTS_STATIC_LINK=y
# CT_STATIC_TOOLCHAIN is not set
CT_TOOLCHAIN_PKGVERSION=""
CT_TOOLCHAIN_BUGURL=""
#
# Tuple completion and aliasing
#
CT_TARGET_VENDOR="unknown"
CT_TARGET_ALIAS_SED_EXPR=""
CT_TARGET_ALIAS=""
#
# Toolchain type
#
CT_CROSS=y
# CT_CANADIAN is not set
CT_TOOLCHAIN_TYPE="cross"
#
# Build system
#
CT_BUILD=""
CT_BUILD_PREFIX=""
CT_BUILD_SUFFIX=""
#
# Misc options
#
# CT_TOOLCHAIN_ENABLE_NLS is not set
#
# Operating System
#
CT_KERNEL_SUPPORTS_SHARED_LIBS=y
CT_KERNEL="linux"
CT_KERNEL_VERSION="3.2.72"
# CT_KERNEL_bare_metal is not set
CT_KERNEL_linux=y
CT_KERNEL_bare_metal_AVAILABLE=y
CT_KERNEL_linux_AVAILABLE=y
# CT_KERNEL_V_4_3 is not set
# CT_KERNEL_V_4_2 is not set
# CT_KERNEL_V_4_1 is not set
# CT_KERNEL_V_3_18 is not set
# CT_KERNEL_V_3_14 is not set
# CT_KERNEL_V_3_12 is not set
# CT_KERNEL_V_3_10 is not set
# CT_KERNEL_V_3_4 is not set
CT_KERNEL_V_3_2=y
# CT_KERNEL_V_2_6_32 is not set
# CT_KERNEL_LINUX_CUSTOM is not set
CT_KERNEL_windows_AVAILABLE=y
#
# Common kernel options
#
CT_SHARED_LIBS=y
#
# linux other options
#
CT_KERNEL_LINUX_VERBOSITY_0=y
# CT_KERNEL_LINUX_VERBOSITY_1 is not set
# CT_KERNEL_LINUX_VERBOSITY_2 is not set
CT_KERNEL_LINUX_VERBOSE_LEVEL=0
CT_KERNEL_LINUX_INSTALL_CHECK=y
#
# Binary utilities
#
CT_ARCH_BINFMT_ELF=y
CT_BINUTILS="binutils"
CT_BINUTILS_binutils=y
#
# GNU binutils
#
# CT_CC_BINUTILS_SHOW_LINARO is not set
CT_BINUTILS_V_2_25_1=y
# CT_BINUTILS_V_2_25 is not set
# CT_BINUTILS_V_2_24 is not set
# CT_BINUTILS_V_2_23_2 is not set
# CT_BINUTILS_V_2_23_1 is not set
# CT_BINUTILS_V_2_22 is not set
# CT_BINUTILS_V_2_21_53 is not set
# CT_BINUTILS_V_2_21_1a is not set
# CT_BINUTILS_V_2_20_1a is not set
# CT_BINUTILS_V_2_19_1a is not set
# CT_BINUTILS_V_2_18a is not set
CT_BINUTILS_VERSION="2.25.1"
CT_BINUTILS_2_25_1_or_later=y
CT_BINUTILS_2_25_or_later=y
CT_BINUTILS_2_24_or_later=y
CT_BINUTILS_2_23_or_later=y
CT_BINUTILS_2_22_or_later=y
CT_BINUTILS_2_21_or_later=y
CT_BINUTILS_2_20_or_later=y
CT_BINUTILS_2_19_or_later=y
CT_BINUTILS_2_18_or_later=y
CT_BINUTILS_HAS_HASH_STYLE=y
CT_BINUTILS_HAS_GOLD=y
CT_BINUTILS_GOLD_SUPPORTS_ARCH=y
CT_BINUTILS_GOLD_SUPPORT=y
CT_BINUTILS_HAS_PLUGINS=y
CT_BINUTILS_HAS_PKGVERSION_BUGURL=y
CT_BINUTILS_FORCE_LD_BFD=y
CT_BINUTILS_LINKER_LD=y
# CT_BINUTILS_LINKER_LD_GOLD is not set
# CT_BINUTILS_LINKER_GOLD_LD is not set
CT_BINUTILS_LINKERS_LIST="ld"
CT_BINUTILS_LINKER_DEFAULT="bfd"
# CT_BINUTILS_PLUGINS is not set
CT_BINUTILS_EXTRA_CONFIG_ARRAY=""
# CT_BINUTILS_FOR_TARGET is not set
#
# binutils other options
#
#
# C-library
#
CT_LIBC="glibc"
CT_LIBC_VERSION="2.16.0"
CT_LIBC_glibc=y
# CT_LIBC_musl is not set
# CT_LIBC_uClibc is not set
CT_LIBC_avr_libc_AVAILABLE=y
CT_LIBC_glibc_AVAILABLE=y
CT_THREADS="nptl"
# CT_CC_GLIBC_SHOW_LINARO is not set
# CT_LIBC_GLIBC_V_2_22 is not set
# CT_LIBC_GLIBC_V_2_21 is not set
# CT_LIBC_GLIBC_V_2_20 is not set
# CT_LIBC_GLIBC_V_2_19 is not set
# CT_LIBC_GLIBC_V_2_18 is not set
# CT_LIBC_GLIBC_V_2_17 is not set
CT_LIBC_GLIBC_V_2_16_0=y
# CT_LIBC_GLIBC_V_2_15 is not set
# CT_LIBC_GLIBC_V_2_14_1 is not set
# CT_LIBC_GLIBC_V_2_14 is not set
# CT_LIBC_GLIBC_V_2_13 is not set
# CT_LIBC_GLIBC_V_2_12_2 is not set
# CT_LIBC_GLIBC_V_2_12_1 is not set
# CT_LIBC_GLIBC_V_2_11_1 is not set
# CT_LIBC_GLIBC_V_2_11 is not set
# CT_LIBC_GLIBC_V_2_10_1 is not set
# CT_LIBC_GLIBC_V_2_9 is not set
# CT_LIBC_GLIBC_V_2_8 is not set
CT_LIBC_mingw_AVAILABLE=y
CT_LIBC_musl_AVAILABLE=y
CT_LIBC_newlib_AVAILABLE=y
CT_LIBC_none_AVAILABLE=y
CT_LIBC_uClibc_AVAILABLE=y
CT_LIBC_SUPPORT_THREADS_ANY=y
CT_LIBC_SUPPORT_THREADS_NATIVE=y
#
# Common C library options
#
CT_THREADS_NATIVE=y
CT_LIBC_XLDD=y
#
# glibc other options
#
CT_LIBC_GLIBC_PORTS_EXTERNAL=y
CT_LIBC_GLIBC_MAY_FORCE_PORTS=y
CT_LIBC_glibc_familly=y
CT_LIBC_GLIBC_EXTRA_CONFIG_ARRAY=""
CT_LIBC_GLIBC_CONFIGPARMS=""
CT_LIBC_GLIBC_EXTRA_CFLAGS=""
CT_LIBC_EXTRA_CC_ARGS=""
# CT_LIBC_DISABLE_VERSIONING is not set
CT_LIBC_OLDEST_ABI=""
CT_LIBC_GLIBC_FORCE_UNWIND=y
CT_LIBC_GLIBC_USE_PORTS=y
CT_LIBC_ADDONS_LIST=""
#
# WARNING !!!
#
#
# For glibc >= 2.8, it can happen that the tarballs
#
#
# for the addons are not available for download.
#
#
# If that happens, bad luck... Try a previous version
#
#
# or try again later... :-(
#
# CT_LIBC_LOCALES is not set
# CT_LIBC_GLIBC_KERNEL_VERSION_NONE is not set
CT_LIBC_GLIBC_KERNEL_VERSION_AS_HEADERS=y
# CT_LIBC_GLIBC_KERNEL_VERSION_CHOSEN is not set
CT_LIBC_GLIBC_MIN_KERNEL="3.2.72"
#
# C compiler
#
CT_CC="gcc"
CT_CC_CORE_PASSES_NEEDED=y
CT_CC_CORE_PASS_1_NEEDED=y
CT_CC_CORE_PASS_2_NEEDED=y
CT_CC_gcc=y
# CT_CC_GCC_SHOW_LINARO is not set
# CT_CC_GCC_V_5_2_0 is not set
CT_CC_GCC_V_4_9_3=y
# CT_CC_GCC_V_4_8_5 is not set
# CT_CC_GCC_V_4_7_4 is not set
# CT_CC_GCC_V_4_6_4 is not set
# CT_CC_GCC_V_4_5_4 is not set
# CT_CC_GCC_V_4_4_7 is not set
# CT_CC_GCC_V_4_3_6 is not set
# CT_CC_GCC_V_4_2_4 is not set
CT_CC_GCC_4_2_or_later=y
CT_CC_GCC_4_3_or_later=y
CT_CC_GCC_4_4_or_later=y
CT_CC_GCC_4_5_or_later=y
CT_CC_GCC_4_6_or_later=y
CT_CC_GCC_4_7_or_later=y
CT_CC_GCC_4_8_or_later=y
CT_CC_GCC_4_9=y
CT_CC_GCC_4_9_or_later=y
CT_CC_GCC_HAS_GRAPHITE=y
CT_CC_GCC_USE_GRAPHITE=y
CT_CC_GCC_HAS_LTO=y
CT_CC_GCC_USE_LTO=y
CT_CC_GCC_HAS_PKGVERSION_BUGURL=y
CT_CC_GCC_HAS_BUILD_ID=y
CT_CC_GCC_HAS_LNK_HASH_STYLE=y
CT_CC_GCC_USE_GMP_MPFR=y
CT_CC_GCC_USE_MPC=y
CT_CC_GCC_HAS_LIBQUADMATH=y
CT_CC_GCC_HAS_LIBSANITIZER=y
CT_CC_GCC_VERSION="4.9.3"
# CT_CC_LANG_FORTRAN is not set
CT_CC_GCC_ENABLE_CXX_FLAGS=""
CT_CC_GCC_CORE_EXTRA_CONFIG_ARRAY=""
CT_CC_GCC_EXTRA_CONFIG_ARRAY=""
CT_CC_GCC_EXTRA_ENV_ARRAY=""
CT_CC_GCC_STATIC_LIBSTDCXX=y
# CT_CC_GCC_SYSTEM_ZLIB is not set
#
# Optimisation features
#
#
# Settings for libraries running on target
#
CT_CC_GCC_ENABLE_TARGET_OPTSPACE=y
# CT_CC_GCC_LIBMUDFLAP is not set
# CT_CC_GCC_LIBGOMP is not set
# CT_CC_GCC_LIBSSP is not set
# CT_CC_GCC_LIBQUADMATH is not set
# CT_CC_GCC_LIBSANITIZER is not set
#
# Misc. obscure options.
#
CT_CC_CXA_ATEXIT=y
# CT_CC_GCC_DISABLE_PCH is not set
CT_CC_GCC_SJLJ_EXCEPTIONS=m
CT_CC_GCC_LDBL_128=m
# CT_CC_GCC_BUILD_ID is not set
CT_CC_GCC_LNK_HASH_STYLE_DEFAULT=y
# CT_CC_GCC_LNK_HASH_STYLE_SYSV is not set
# CT_CC_GCC_LNK_HASH_STYLE_GNU is not set
# CT_CC_GCC_LNK_HASH_STYLE_BOTH is not set
CT_CC_GCC_LNK_HASH_STYLE=""
CT_CC_GCC_DEC_FLOAT_AUTO=y
# CT_CC_GCC_DEC_FLOAT_BID is not set
# CT_CC_GCC_DEC_FLOAT_DPD is not set
# CT_CC_GCC_DEC_FLOATS_NO is not set
CT_CC_SUPPORT_CXX=y
CT_CC_SUPPORT_FORTRAN=y
CT_CC_SUPPORT_JAVA=y
CT_CC_SUPPORT_ADA=y
CT_CC_SUPPORT_OBJC=y
CT_CC_SUPPORT_OBJCXX=y
CT_CC_SUPPORT_GOLANG=y
#
# Additional supported languages:
#
CT_CC_LANG_CXX=y
# CT_CC_LANG_JAVA is not set
#
# Debug facilities
#
# CT_DEBUG_dmalloc is not set
# CT_DEBUG_duma is not set
# CT_DEBUG_gdb is not set
# CT_DEBUG_ltrace is not set
# CT_DEBUG_strace is not set
#
# Companion libraries
#
CT_COMPLIBS_NEEDED=y
CT_LIBICONV_NEEDED=y
CT_GETTEXT_NEEDED=y
CT_GMP_NEEDED=y
CT_MPFR_NEEDED=y
CT_ISL_NEEDED=y
CT_CLOOG_NEEDED=y
CT_MPC_NEEDED=y
CT_COMPLIBS=y
CT_LIBICONV=y
CT_GETTEXT=y
CT_GMP=y
CT_MPFR=y
CT_ISL=y
CT_CLOOG=y
CT_MPC=y
CT_LIBICONV_V_1_14=y
CT_LIBICONV_VERSION="1.14"
CT_GETTEXT_V_0_19_6=y
CT_GETTEXT_VERSION="0.19.6"
CT_GMP_V_6_0_0=y
# CT_GMP_V_5_1_3 is not set
# CT_GMP_V_5_1_1 is not set
# CT_GMP_V_5_0_2 is not set
# CT_GMP_V_5_0_1 is not set
# CT_GMP_V_4_3_2 is not set
# CT_GMP_V_4_3_1 is not set
# CT_GMP_V_4_3_0 is not set
CT_GMP_5_0_2_or_later=y
CT_GMP_VERSION="6.0.0a"
CT_MPFR_V_3_1_3=y
# CT_MPFR_V_3_1_2 is not set
# CT_MPFR_V_3_1_0 is not set
# CT_MPFR_V_3_0_1 is not set
# CT_MPFR_V_3_0_0 is not set
# CT_MPFR_V_2_4_2 is not set
# CT_MPFR_V_2_4_1 is not set
# CT_MPFR_V_2_4_0 is not set
CT_MPFR_VERSION="3.1.3"
CT_ISL_V_0_14=y
CT_ISL_V_0_14_or_later=y
CT_ISL_V_0_12_or_later=y
CT_ISL_VERSION="0.14"
CT_CLOOG_V_0_18_4=y
# CT_CLOOG_V_0_18_1 is not set
# CT_CLOOG_V_0_18_0 is not set
CT_CLOOG_VERSION="0.18.4"
CT_CLOOG_0_18_4_or_later=y
CT_CLOOG_0_18_or_later=y
CT_MPC_V_1_0_3=y
# CT_MPC_V_1_0_2 is not set
# CT_MPC_V_1_0_1 is not set
# CT_MPC_V_1_0 is not set
# CT_MPC_V_0_9 is not set
# CT_MPC_V_0_8_2 is not set
# CT_MPC_V_0_8_1 is not set
# CT_MPC_V_0_7 is not set
CT_MPC_VERSION="1.0.3"
#
# Companion libraries common options
#
# CT_COMPLIBS_CHECK is not set
#
# Companion tools
#
#
# READ HELP before you say 'Y' below !!!
#
# CT_COMP_TOOLS is not set

View File

@ -0,0 +1,45 @@
#!/bin/bash
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ex
hide_output() {
set +x
on_err="
echo ERROR: An error was encountered with the build.
cat /tmp/build.log
exit 1
"
trap "$on_err" ERR
bash -c "while true; do sleep 30; echo \$(date) - building ...; done" &
PING_LOOP_PID=$!
$@ &> /tmp/build.log
rm /tmp/build.log
trap - ERR
kill $PING_LOOP_PID
set -x
}
mkdir build
cd build
cp ../arm-linux-gnueabi.config .config
ct-ng oldconfig
hide_output ct-ng build
cd ..
rm -rf build
mkdir build
cd build
cp ../arm-linux-gnueabihf.config .config
ct-ng oldconfig
hide_output ct-ng build
cd ..
rm -rf build

View File

@ -1,19 +1,29 @@
FROM ubuntu:16.04
RUN apt-get update && apt-get install -y --no-install-recommends \
g++ \
make \
file \
curl \
automake \
bison \
bzip2 \
ca-certificates \
python2.7 \
git \
cmake \
sudo \
curl \
file \
flex \
g++ \
gawk \
gdb \
xz-utils \
g++-arm-linux-gnueabihf \
g++-aarch64-linux-gnu
git \
gperf \
help2man \
libncurses-dev \
libtool-bin \
make \
patch \
python2.7 \
sudo \
texinfo \
wget \
xz-utils
ENV SCCACHE_DIGEST=7237e38e029342fa27b7ac25412cb9d52554008b12389727320bd533fd7f05b6a96d55485f305caf95e5c8f5f97c3313e10012ccad3e752aba2518f3522ba783
RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST | \
@ -24,11 +34,48 @@ RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-ini
rm dumb-init_*.deb
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
ENV RUST_CONFIGURE_ARGS \
--host=armv7-unknown-linux-gnueabihf,aarch64-unknown-linux-gnu
ENV SCRIPT \
python2.7 ../x.py dist \
--host armv7-unknown-linux-gnueabihf \
--target armv7-unknown-linux-gnueabihf \
--host aarch64-unknown-linux-gnu \
--target aarch64-unknown-linux-gnu
# Ubuntu 16.04 (this contianer) ships with make 4, but something in the
# toolchains we build below chokes on that, so go back to make 3
RUN curl https://ftp.gnu.org/gnu/make/make-3.81.tar.gz | tar xzf - && \
cd make-3.81 && \
./configure --prefix=/usr && \
make && \
make install && \
cd .. && \
rm -rf make-3.81
RUN curl http://crosstool-ng.org/download/crosstool-ng/crosstool-ng-1.22.0.tar.bz2 | \
tar xjf - && \
cd crosstool-ng && \
./configure --prefix=/usr/local && \
make -j$(nproc) && \
make install && \
cd .. && \
rm -rf crosstool-ng
RUN groupadd -r rustbuild && useradd -m -r -g rustbuild rustbuild
RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools
USER rustbuild
WORKDIR /tmp
COPY armv7-linux-gnueabihf.config /tmp/
COPY armv7-linux-gnueabihf.config aarch64-linux-gnu.config build-toolchains.sh /tmp/
RUN ./build-toolchains.sh
USER root
ENV PATH=$PATH:/x-tools/aarch64-unknown-linux-gnueabi/bin
ENV PATH=$PATH:/x-tools/armv7-unknown-linux-gnueabihf/bin
ENV CC_aarch64_unknown_linux_gnu=aarch64-unknown-linux-gnueabi-gcc \
AR_aarch64_unknown_linux_gnu=aarch64-unknown-linux-gnueabi-ar \
CXX_aarch64_unknown_linux_gnu=aarch64-unknown-linux-gnueabi-g++ \
CC_armv7_unknown_linux_gnueabihf=armv7-unknown-linux-gnueabihf-gcc \
AR_armv7_unknown_linux_gnueabihf=armv7-unknown-linux-gnueabihf-ar \
CXX_armv7_unknown_linux_gnueabihf=armv7-unknown-linux-gnueabihf-g++
ENV HOSTS=armv7-unknown-linux-gnueabihf
ENV HOSTS=$HOSTS,aarch64-unknown-linux-gnu
ENV RUST_CONFIGURE_ARGS --host=$HOSTS
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS

View File

@ -0,0 +1,551 @@
#
# Automatically generated file; DO NOT EDIT.
# Crosstool-NG Configuration
#
CT_CONFIGURE_has_make381=y
CT_CONFIGURE_has_xz=y
CT_MODULES=y
#
# Paths and misc options
#
#
# crosstool-NG behavior
#
# CT_OBSOLETE is not set
# CT_EXPERIMENTAL is not set
# CT_DEBUG_CT is not set
#
# Paths
#
CT_LOCAL_TARBALLS_DIR=""
CT_WORK_DIR="${CT_TOP_DIR}/.build"
CT_PREFIX_DIR="/x-tools/${CT_TARGET}"
CT_INSTALL_DIR="${CT_PREFIX_DIR}"
CT_RM_RF_PREFIX_DIR=y
CT_REMOVE_DOCS=y
CT_INSTALL_DIR_RO=y
CT_STRIP_HOST_TOOLCHAIN_EXECUTABLES=y
# CT_STRIP_TARGET_TOOLCHAIN_EXECUTABLES is not set
#
# Downloading
#
# CT_FORBID_DOWNLOAD is not set
# CT_FORCE_DOWNLOAD is not set
CT_CONNECT_TIMEOUT=10
# CT_ONLY_DOWNLOAD is not set
# CT_USE_MIRROR is not set
#
# Extracting
#
# CT_FORCE_EXTRACT is not set
CT_OVERIDE_CONFIG_GUESS_SUB=y
# CT_ONLY_EXTRACT is not set
CT_PATCH_BUNDLED=y
# CT_PATCH_LOCAL is not set
# CT_PATCH_BUNDLED_LOCAL is not set
# CT_PATCH_LOCAL_BUNDLED is not set
# CT_PATCH_BUNDLED_FALLBACK_LOCAL is not set
# CT_PATCH_LOCAL_FALLBACK_BUNDLED is not set
# CT_PATCH_NONE is not set
CT_PATCH_ORDER="bundled"
#
# Build behavior
#
CT_PARALLEL_JOBS=0
CT_LOAD=""
CT_USE_PIPES=y
CT_EXTRA_CFLAGS_FOR_BUILD=""
CT_EXTRA_LDFLAGS_FOR_BUILD=""
CT_EXTRA_CFLAGS_FOR_HOST=""
CT_EXTRA_LDFLAGS_FOR_HOST=""
# CT_CONFIG_SHELL_SH is not set
# CT_CONFIG_SHELL_ASH is not set
CT_CONFIG_SHELL_BASH=y
# CT_CONFIG_SHELL_CUSTOM is not set
CT_CONFIG_SHELL="${bash}"
#
# Logging
#
# CT_LOG_ERROR is not set
# CT_LOG_WARN is not set
CT_LOG_INFO=y
# CT_LOG_EXTRA is not set
# CT_LOG_ALL is not set
# CT_LOG_DEBUG is not set
CT_LOG_LEVEL_MAX="INFO"
# CT_LOG_SEE_TOOLS_WARN is not set
CT_LOG_PROGRESS_BAR=y
CT_LOG_TO_FILE=y
CT_LOG_FILE_COMPRESS=y
#
# Target options
#
CT_ARCH="arm"
CT_ARCH_SUPPORTS_BOTH_MMU=y
CT_ARCH_SUPPORTS_BOTH_ENDIAN=y
CT_ARCH_SUPPORTS_32=y
CT_ARCH_SUPPORTS_64=y
CT_ARCH_SUPPORTS_WITH_ARCH=y
CT_ARCH_SUPPORTS_WITH_CPU=y
CT_ARCH_SUPPORTS_WITH_TUNE=y
CT_ARCH_DEFAULT_HAS_MMU=y
CT_ARCH_DEFAULT_LE=y
CT_ARCH_DEFAULT_32=y
CT_ARCH_ARCH=""
CT_ARCH_CPU=""
CT_ARCH_TUNE=""
# CT_ARCH_BE is not set
CT_ARCH_LE=y
# CT_ARCH_32 is not set
CT_ARCH_64=y
CT_ARCH_BITNESS=64
CT_TARGET_CFLAGS=""
CT_TARGET_LDFLAGS=""
# CT_ARCH_alpha is not set
CT_ARCH_arm=y
# CT_ARCH_avr is not set
# CT_ARCH_m68k is not set
# CT_ARCH_mips is not set
# CT_ARCH_nios2 is not set
# CT_ARCH_powerpc is not set
# CT_ARCH_s390 is not set
# CT_ARCH_sh is not set
# CT_ARCH_sparc is not set
# CT_ARCH_x86 is not set
# CT_ARCH_xtensa is not set
CT_ARCH_alpha_AVAILABLE=y
CT_ARCH_arm_AVAILABLE=y
CT_ARCH_avr_AVAILABLE=y
CT_ARCH_m68k_AVAILABLE=y
CT_ARCH_microblaze_AVAILABLE=y
CT_ARCH_mips_AVAILABLE=y
CT_ARCH_nios2_AVAILABLE=y
CT_ARCH_powerpc_AVAILABLE=y
CT_ARCH_s390_AVAILABLE=y
CT_ARCH_sh_AVAILABLE=y
CT_ARCH_sparc_AVAILABLE=y
CT_ARCH_x86_AVAILABLE=y
CT_ARCH_xtensa_AVAILABLE=y
CT_ARCH_SUFFIX=""
#
# Generic target options
#
# CT_MULTILIB is not set
CT_ARCH_USE_MMU=y
CT_ARCH_ENDIAN="little"
#
# Target optimisations
#
CT_ARCH_EXCLUSIVE_WITH_CPU=y
CT_ARCH_FLOAT=""
#
# arm other options
#
CT_ARCH_ARM_MODE="arm"
CT_ARCH_ARM_MODE_ARM=y
# CT_ARCH_ARM_MODE_THUMB is not set
# CT_ARCH_ARM_INTERWORKING is not set
CT_ARCH_ARM_EABI_FORCE=y
CT_ARCH_ARM_EABI=y
#
# Toolchain options
#
#
# General toolchain options
#
CT_FORCE_SYSROOT=y
CT_USE_SYSROOT=y
CT_SYSROOT_NAME="sysroot"
CT_SYSROOT_DIR_PREFIX=""
CT_WANTS_STATIC_LINK=y
# CT_STATIC_TOOLCHAIN is not set
CT_TOOLCHAIN_PKGVERSION=""
CT_TOOLCHAIN_BUGURL=""
#
# Tuple completion and aliasing
#
CT_TARGET_VENDOR="unknown"
CT_TARGET_ALIAS_SED_EXPR=""
CT_TARGET_ALIAS=""
#
# Toolchain type
#
CT_CROSS=y
# CT_CANADIAN is not set
CT_TOOLCHAIN_TYPE="cross"
#
# Build system
#
CT_BUILD=""
CT_BUILD_PREFIX=""
CT_BUILD_SUFFIX=""
#
# Misc options
#
# CT_TOOLCHAIN_ENABLE_NLS is not set
#
# Operating System
#
CT_KERNEL_SUPPORTS_SHARED_LIBS=y
CT_KERNEL="linux"
CT_KERNEL_VERSION="4.2.6"
# CT_KERNEL_bare_metal is not set
CT_KERNEL_linux=y
CT_KERNEL_bare_metal_AVAILABLE=y
CT_KERNEL_linux_AVAILABLE=y
# CT_KERNEL_V_4_3 is not set
CT_KERNEL_V_4_2=y
# CT_KERNEL_V_4_1 is not set
# CT_KERNEL_V_3_18 is not set
# CT_KERNEL_V_3_14 is not set
# CT_KERNEL_V_3_12 is not set
# CT_KERNEL_V_3_10 is not set
# CT_KERNEL_V_3_4 is not set
# CT_KERNEL_V_3_2 is not set
# CT_KERNEL_V_2_6_32 is not set
# CT_KERNEL_LINUX_CUSTOM is not set
CT_KERNEL_windows_AVAILABLE=y
#
# Common kernel options
#
CT_SHARED_LIBS=y
#
# linux other options
#
CT_KERNEL_LINUX_VERBOSITY_0=y
# CT_KERNEL_LINUX_VERBOSITY_1 is not set
# CT_KERNEL_LINUX_VERBOSITY_2 is not set
CT_KERNEL_LINUX_VERBOSE_LEVEL=0
CT_KERNEL_LINUX_INSTALL_CHECK=y
#
# Binary utilities
#
CT_ARCH_BINFMT_ELF=y
CT_BINUTILS="binutils"
CT_BINUTILS_binutils=y
#
# GNU binutils
#
# CT_CC_BINUTILS_SHOW_LINARO is not set
CT_BINUTILS_V_2_25_1=y
# CT_BINUTILS_V_2_25 is not set
# CT_BINUTILS_V_2_24 is not set
# CT_BINUTILS_V_2_23_2 is not set
# CT_BINUTILS_V_2_23_1 is not set
# CT_BINUTILS_V_2_22 is not set
# CT_BINUTILS_V_2_21_53 is not set
# CT_BINUTILS_V_2_21_1a is not set
# CT_BINUTILS_V_2_20_1a is not set
# CT_BINUTILS_V_2_19_1a is not set
# CT_BINUTILS_V_2_18a is not set
CT_BINUTILS_VERSION="2.25.1"
CT_BINUTILS_2_25_1_or_later=y
CT_BINUTILS_2_25_or_later=y
CT_BINUTILS_2_24_or_later=y
CT_BINUTILS_2_23_or_later=y
CT_BINUTILS_2_22_or_later=y
CT_BINUTILS_2_21_or_later=y
CT_BINUTILS_2_20_or_later=y
CT_BINUTILS_2_19_or_later=y
CT_BINUTILS_2_18_or_later=y
CT_BINUTILS_HAS_HASH_STYLE=y
CT_BINUTILS_HAS_GOLD=y
CT_BINUTILS_GOLD_SUPPORTS_ARCH=y
CT_BINUTILS_GOLD_SUPPORT=y
CT_BINUTILS_HAS_PLUGINS=y
CT_BINUTILS_HAS_PKGVERSION_BUGURL=y
CT_BINUTILS_FORCE_LD_BFD=y
CT_BINUTILS_LINKER_LD=y
# CT_BINUTILS_LINKER_LD_GOLD is not set
# CT_BINUTILS_LINKER_GOLD_LD is not set
CT_BINUTILS_LINKERS_LIST="ld"
CT_BINUTILS_LINKER_DEFAULT="bfd"
# CT_BINUTILS_PLUGINS is not set
CT_BINUTILS_EXTRA_CONFIG_ARRAY=""
# CT_BINUTILS_FOR_TARGET is not set
#
# binutils other options
#
#
# C-library
#
CT_LIBC="glibc"
CT_LIBC_VERSION="2.17"
CT_LIBC_glibc=y
# CT_LIBC_musl is not set
# CT_LIBC_uClibc is not set
CT_LIBC_avr_libc_AVAILABLE=y
CT_LIBC_glibc_AVAILABLE=y
CT_THREADS="nptl"
# CT_CC_GLIBC_SHOW_LINARO is not set
# CT_LIBC_GLIBC_V_2_22 is not set
# CT_LIBC_GLIBC_V_2_21 is not set
# CT_LIBC_GLIBC_V_2_20 is not set
# CT_LIBC_GLIBC_V_2_19 is not set
# CT_LIBC_GLIBC_V_2_18 is not set
CT_LIBC_GLIBC_V_2_17=y
# CT_LIBC_GLIBC_V_2_16_0 is not set
# CT_LIBC_GLIBC_V_2_15 is not set
# CT_LIBC_GLIBC_V_2_14_1 is not set
# CT_LIBC_GLIBC_V_2_14 is not set
# CT_LIBC_GLIBC_V_2_13 is not set
# CT_LIBC_GLIBC_V_2_12_2 is not set
# CT_LIBC_GLIBC_V_2_12_1 is not set
# CT_LIBC_GLIBC_V_2_11_1 is not set
# CT_LIBC_GLIBC_V_2_11 is not set
# CT_LIBC_GLIBC_V_2_10_1 is not set
# CT_LIBC_GLIBC_V_2_9 is not set
# CT_LIBC_GLIBC_V_2_8 is not set
CT_LIBC_GLIBC_2_17_or_later=y
CT_LIBC_mingw_AVAILABLE=y
CT_LIBC_musl_AVAILABLE=y
CT_LIBC_newlib_AVAILABLE=y
CT_LIBC_none_AVAILABLE=y
CT_LIBC_uClibc_AVAILABLE=y
CT_LIBC_SUPPORT_THREADS_ANY=y
CT_LIBC_SUPPORT_THREADS_NATIVE=y
#
# Common C library options
#
CT_THREADS_NATIVE=y
CT_LIBC_XLDD=y
#
# glibc other options
#
# CT_LIBC_GLIBC_PORTS_EXTERNAL is not set
CT_LIBC_GLIBC_MAY_FORCE_PORTS=y
CT_LIBC_glibc_familly=y
CT_LIBC_GLIBC_EXTRA_CONFIG_ARRAY=""
CT_LIBC_GLIBC_CONFIGPARMS=""
CT_LIBC_GLIBC_EXTRA_CFLAGS=""
CT_LIBC_EXTRA_CC_ARGS=""
# CT_LIBC_DISABLE_VERSIONING is not set
CT_LIBC_OLDEST_ABI=""
CT_LIBC_GLIBC_FORCE_UNWIND=y
CT_LIBC_GLIBC_USE_PORTS=y
CT_LIBC_ADDONS_LIST=""
#
# WARNING !!!
#
#
# For glibc >= 2.8, it can happen that the tarballs
#
#
# for the addons are not available for download.
#
#
# If that happens, bad luck... Try a previous version
#
#
# or try again later... :-(
#
# CT_LIBC_LOCALES is not set
# CT_LIBC_GLIBC_KERNEL_VERSION_NONE is not set
CT_LIBC_GLIBC_KERNEL_VERSION_AS_HEADERS=y
# CT_LIBC_GLIBC_KERNEL_VERSION_CHOSEN is not set
CT_LIBC_GLIBC_MIN_KERNEL="4.2.6"
#
# C compiler
#
CT_CC="gcc"
CT_CC_CORE_PASSES_NEEDED=y
CT_CC_CORE_PASS_1_NEEDED=y
CT_CC_CORE_PASS_2_NEEDED=y
CT_CC_gcc=y
# CT_CC_GCC_SHOW_LINARO is not set
CT_CC_GCC_V_5_2_0=y
# CT_CC_GCC_V_4_9_3 is not set
# CT_CC_GCC_V_4_8_5 is not set
# CT_CC_GCC_V_4_7_4 is not set
# CT_CC_GCC_V_4_6_4 is not set
# CT_CC_GCC_V_4_5_4 is not set
# CT_CC_GCC_V_4_4_7 is not set
# CT_CC_GCC_V_4_3_6 is not set
# CT_CC_GCC_V_4_2_4 is not set
CT_CC_GCC_4_2_or_later=y
CT_CC_GCC_4_3_or_later=y
CT_CC_GCC_4_4_or_later=y
CT_CC_GCC_4_5_or_later=y
CT_CC_GCC_4_6_or_later=y
CT_CC_GCC_4_7_or_later=y
CT_CC_GCC_4_8_or_later=y
CT_CC_GCC_4_9_or_later=y
CT_CC_GCC_5=y
CT_CC_GCC_5_or_later=y
CT_CC_GCC_HAS_GRAPHITE=y
CT_CC_GCC_USE_GRAPHITE=y
CT_CC_GCC_HAS_LTO=y
CT_CC_GCC_USE_LTO=y
CT_CC_GCC_HAS_PKGVERSION_BUGURL=y
CT_CC_GCC_HAS_BUILD_ID=y
CT_CC_GCC_HAS_LNK_HASH_STYLE=y
CT_CC_GCC_USE_GMP_MPFR=y
CT_CC_GCC_USE_MPC=y
CT_CC_GCC_HAS_LIBQUADMATH=y
CT_CC_GCC_HAS_LIBSANITIZER=y
CT_CC_GCC_VERSION="5.2.0"
# CT_CC_LANG_FORTRAN is not set
CT_CC_GCC_ENABLE_CXX_FLAGS=""
CT_CC_GCC_CORE_EXTRA_CONFIG_ARRAY=""
CT_CC_GCC_EXTRA_CONFIG_ARRAY=""
CT_CC_GCC_EXTRA_ENV_ARRAY=""
CT_CC_GCC_STATIC_LIBSTDCXX=y
# CT_CC_GCC_SYSTEM_ZLIB is not set
#
# Optimisation features
#
#
# Settings for libraries running on target
#
CT_CC_GCC_ENABLE_TARGET_OPTSPACE=y
# CT_CC_GCC_LIBMUDFLAP is not set
# CT_CC_GCC_LIBGOMP is not set
# CT_CC_GCC_LIBSSP is not set
# CT_CC_GCC_LIBQUADMATH is not set
# CT_CC_GCC_LIBSANITIZER is not set
#
# Misc. obscure options.
#
CT_CC_CXA_ATEXIT=y
# CT_CC_GCC_DISABLE_PCH is not set
CT_CC_GCC_SJLJ_EXCEPTIONS=m
CT_CC_GCC_LDBL_128=m
# CT_CC_GCC_BUILD_ID is not set
CT_CC_GCC_LNK_HASH_STYLE_DEFAULT=y
# CT_CC_GCC_LNK_HASH_STYLE_SYSV is not set
# CT_CC_GCC_LNK_HASH_STYLE_GNU is not set
# CT_CC_GCC_LNK_HASH_STYLE_BOTH is not set
CT_CC_GCC_LNK_HASH_STYLE=""
CT_CC_GCC_DEC_FLOAT_AUTO=y
# CT_CC_GCC_DEC_FLOAT_BID is not set
# CT_CC_GCC_DEC_FLOAT_DPD is not set
# CT_CC_GCC_DEC_FLOATS_NO is not set
CT_CC_SUPPORT_CXX=y
CT_CC_SUPPORT_FORTRAN=y
CT_CC_SUPPORT_JAVA=y
CT_CC_SUPPORT_ADA=y
CT_CC_SUPPORT_OBJC=y
CT_CC_SUPPORT_OBJCXX=y
CT_CC_SUPPORT_GOLANG=y
#
# Additional supported languages:
#
CT_CC_LANG_CXX=y
# CT_CC_LANG_JAVA is not set
#
# Debug facilities
#
# CT_DEBUG_dmalloc is not set
# CT_DEBUG_duma is not set
# CT_DEBUG_gdb is not set
# CT_DEBUG_ltrace is not set
# CT_DEBUG_strace is not set
#
# Companion libraries
#
CT_COMPLIBS_NEEDED=y
CT_LIBICONV_NEEDED=y
CT_GETTEXT_NEEDED=y
CT_GMP_NEEDED=y
CT_MPFR_NEEDED=y
CT_ISL_NEEDED=y
CT_MPC_NEEDED=y
CT_COMPLIBS=y
CT_LIBICONV=y
CT_GETTEXT=y
CT_GMP=y
CT_MPFR=y
CT_ISL=y
CT_MPC=y
CT_LIBICONV_V_1_14=y
CT_LIBICONV_VERSION="1.14"
CT_GETTEXT_V_0_19_6=y
CT_GETTEXT_VERSION="0.19.6"
CT_GMP_V_6_0_0=y
# CT_GMP_V_5_1_3 is not set
# CT_GMP_V_5_1_1 is not set
# CT_GMP_V_5_0_2 is not set
# CT_GMP_V_5_0_1 is not set
# CT_GMP_V_4_3_2 is not set
# CT_GMP_V_4_3_1 is not set
# CT_GMP_V_4_3_0 is not set
CT_GMP_5_0_2_or_later=y
CT_GMP_VERSION="6.0.0a"
CT_MPFR_V_3_1_3=y
# CT_MPFR_V_3_1_2 is not set
# CT_MPFR_V_3_1_0 is not set
# CT_MPFR_V_3_0_1 is not set
# CT_MPFR_V_3_0_0 is not set
# CT_MPFR_V_2_4_2 is not set
# CT_MPFR_V_2_4_1 is not set
# CT_MPFR_V_2_4_0 is not set
CT_MPFR_VERSION="3.1.3"
CT_ISL_V_0_14=y
# CT_ISL_V_0_12_2 is not set
CT_ISL_V_0_14_or_later=y
CT_ISL_V_0_12_or_later=y
CT_ISL_VERSION="0.14"
# CT_CLOOG_V_0_18_4 is not set
# CT_CLOOG_V_0_18_1 is not set
# CT_CLOOG_V_0_18_0 is not set
CT_MPC_V_1_0_3=y
# CT_MPC_V_1_0_2 is not set
# CT_MPC_V_1_0_1 is not set
# CT_MPC_V_1_0 is not set
# CT_MPC_V_0_9 is not set
# CT_MPC_V_0_8_2 is not set
# CT_MPC_V_0_8_1 is not set
# CT_MPC_V_0_7 is not set
CT_MPC_VERSION="1.0.3"
#
# Companion libraries common options
#
# CT_COMPLIBS_CHECK is not set
#
# Companion tools
#
#
# READ HELP before you say 'Y' below !!!
#
# CT_COMP_TOOLS is not set

View File

@ -0,0 +1,569 @@
#
# Automatically generated file; DO NOT EDIT.
# Crosstool-NG Configuration
#
CT_CONFIGURE_has_make381=y
CT_MODULES=y
#
# Paths and misc options
#
#
# crosstool-NG behavior
#
# CT_OBSOLETE is not set
# CT_EXPERIMENTAL is not set
# CT_DEBUG_CT is not set
#
# Paths
#
CT_LOCAL_TARBALLS_DIR=""
CT_WORK_DIR="${CT_TOP_DIR}/.build"
CT_PREFIX_DIR="/x-tools/${CT_TARGET}"
CT_INSTALL_DIR="${CT_PREFIX_DIR}"
CT_RM_RF_PREFIX_DIR=y
CT_REMOVE_DOCS=y
CT_INSTALL_DIR_RO=y
CT_STRIP_HOST_TOOLCHAIN_EXECUTABLES=y
# CT_STRIP_TARGET_TOOLCHAIN_EXECUTABLES is not set
#
# Downloading
#
# CT_FORBID_DOWNLOAD is not set
# CT_FORCE_DOWNLOAD is not set
CT_CONNECT_TIMEOUT=10
# CT_ONLY_DOWNLOAD is not set
# CT_USE_MIRROR is not set
#
# Extracting
#
# CT_FORCE_EXTRACT is not set
CT_OVERIDE_CONFIG_GUESS_SUB=y
# CT_ONLY_EXTRACT is not set
CT_PATCH_BUNDLED=y
# CT_PATCH_LOCAL is not set
# CT_PATCH_BUNDLED_LOCAL is not set
# CT_PATCH_LOCAL_BUNDLED is not set
# CT_PATCH_BUNDLED_FALLBACK_LOCAL is not set
# CT_PATCH_LOCAL_FALLBACK_BUNDLED is not set
# CT_PATCH_NONE is not set
CT_PATCH_ORDER="bundled"
#
# Build behavior
#
CT_PARALLEL_JOBS=0
CT_LOAD=""
CT_USE_PIPES=y
CT_EXTRA_CFLAGS_FOR_BUILD=""
CT_EXTRA_LDFLAGS_FOR_BUILD=""
CT_EXTRA_CFLAGS_FOR_HOST=""
CT_EXTRA_LDFLAGS_FOR_HOST=""
# CT_CONFIG_SHELL_SH is not set
# CT_CONFIG_SHELL_ASH is not set
CT_CONFIG_SHELL_BASH=y
# CT_CONFIG_SHELL_CUSTOM is not set
CT_CONFIG_SHELL="${bash}"
#
# Logging
#
# CT_LOG_ERROR is not set
# CT_LOG_WARN is not set
CT_LOG_INFO=y
# CT_LOG_EXTRA is not set
# CT_LOG_ALL is not set
# CT_LOG_DEBUG is not set
CT_LOG_LEVEL_MAX="INFO"
# CT_LOG_SEE_TOOLS_WARN is not set
CT_LOG_PROGRESS_BAR=y
CT_LOG_TO_FILE=y
CT_LOG_FILE_COMPRESS=y
#
# Target options
#
CT_ARCH="arm"
CT_ARCH_SUPPORTS_BOTH_MMU=y
CT_ARCH_SUPPORTS_BOTH_ENDIAN=y
CT_ARCH_SUPPORTS_32=y
CT_ARCH_SUPPORTS_64=y
CT_ARCH_SUPPORTS_WITH_ARCH=y
CT_ARCH_SUPPORTS_WITH_CPU=y
CT_ARCH_SUPPORTS_WITH_TUNE=y
CT_ARCH_SUPPORTS_WITH_FLOAT=y
CT_ARCH_SUPPORTS_WITH_FPU=y
CT_ARCH_SUPPORTS_SOFTFP=y
CT_ARCH_DEFAULT_HAS_MMU=y
CT_ARCH_DEFAULT_LE=y
CT_ARCH_DEFAULT_32=y
CT_ARCH_ARCH="armv7-a"
CT_ARCH_CPU=""
CT_ARCH_TUNE=""
CT_ARCH_FPU="vfpv3-d16"
# CT_ARCH_BE is not set
CT_ARCH_LE=y
CT_ARCH_32=y
# CT_ARCH_64 is not set
CT_ARCH_BITNESS=32
CT_ARCH_FLOAT_HW=y
# CT_ARCH_FLOAT_SW is not set
CT_TARGET_CFLAGS=""
CT_TARGET_LDFLAGS=""
# CT_ARCH_alpha is not set
CT_ARCH_arm=y
# CT_ARCH_avr is not set
# CT_ARCH_m68k is not set
# CT_ARCH_mips is not set
# CT_ARCH_nios2 is not set
# CT_ARCH_powerpc is not set
# CT_ARCH_s390 is not set
# CT_ARCH_sh is not set
# CT_ARCH_sparc is not set
# CT_ARCH_x86 is not set
# CT_ARCH_xtensa is not set
CT_ARCH_alpha_AVAILABLE=y
CT_ARCH_arm_AVAILABLE=y
CT_ARCH_avr_AVAILABLE=y
CT_ARCH_m68k_AVAILABLE=y
CT_ARCH_microblaze_AVAILABLE=y
CT_ARCH_mips_AVAILABLE=y
CT_ARCH_nios2_AVAILABLE=y
CT_ARCH_powerpc_AVAILABLE=y
CT_ARCH_s390_AVAILABLE=y
CT_ARCH_sh_AVAILABLE=y
CT_ARCH_sparc_AVAILABLE=y
CT_ARCH_x86_AVAILABLE=y
CT_ARCH_xtensa_AVAILABLE=y
CT_ARCH_SUFFIX="v7"
#
# Generic target options
#
# CT_MULTILIB is not set
CT_ARCH_USE_MMU=y
CT_ARCH_ENDIAN="little"
#
# Target optimisations
#
CT_ARCH_EXCLUSIVE_WITH_CPU=y
# CT_ARCH_FLOAT_AUTO is not set
# CT_ARCH_FLOAT_SOFTFP is not set
CT_ARCH_FLOAT="hard"
# CT_ARCH_ALPHA_EV4 is not set
# CT_ARCH_ALPHA_EV45 is not set
# CT_ARCH_ALPHA_EV5 is not set
# CT_ARCH_ALPHA_EV56 is not set
# CT_ARCH_ALPHA_EV6 is not set
# CT_ARCH_ALPHA_EV67 is not set
#
# arm other options
#
CT_ARCH_ARM_MODE="thumb"
# CT_ARCH_ARM_MODE_ARM is not set
CT_ARCH_ARM_MODE_THUMB=y
# CT_ARCH_ARM_INTERWORKING is not set
CT_ARCH_ARM_EABI_FORCE=y
CT_ARCH_ARM_EABI=y
CT_ARCH_ARM_TUPLE_USE_EABIHF=y
#
# Toolchain options
#
#
# General toolchain options
#
CT_FORCE_SYSROOT=y
CT_USE_SYSROOT=y
CT_SYSROOT_NAME="sysroot"
CT_SYSROOT_DIR_PREFIX=""
CT_WANTS_STATIC_LINK=y
# CT_STATIC_TOOLCHAIN is not set
CT_TOOLCHAIN_PKGVERSION=""
CT_TOOLCHAIN_BUGURL=""
#
# Tuple completion and aliasing
#
CT_TARGET_VENDOR="unknown"
CT_TARGET_ALIAS_SED_EXPR=""
CT_TARGET_ALIAS=""
#
# Toolchain type
#
CT_CROSS=y
# CT_CANADIAN is not set
CT_TOOLCHAIN_TYPE="cross"
#
# Build system
#
CT_BUILD=""
CT_BUILD_PREFIX=""
CT_BUILD_SUFFIX=""
#
# Misc options
#
# CT_TOOLCHAIN_ENABLE_NLS is not set
#
# Operating System
#
CT_KERNEL_SUPPORTS_SHARED_LIBS=y
CT_KERNEL="linux"
CT_KERNEL_VERSION="3.2.72"
# CT_KERNEL_bare_metal is not set
CT_KERNEL_linux=y
CT_KERNEL_bare_metal_AVAILABLE=y
CT_KERNEL_linux_AVAILABLE=y
# CT_KERNEL_V_4_3 is not set
# CT_KERNEL_V_4_2 is not set
# CT_KERNEL_V_4_1 is not set
# CT_KERNEL_V_3_18 is not set
# CT_KERNEL_V_3_14 is not set
# CT_KERNEL_V_3_12 is not set
# CT_KERNEL_V_3_10 is not set
# CT_KERNEL_V_3_4 is not set
CT_KERNEL_V_3_2=y
# CT_KERNEL_V_2_6_32 is not set
# CT_KERNEL_LINUX_CUSTOM is not set
CT_KERNEL_windows_AVAILABLE=y
#
# Common kernel options
#
CT_SHARED_LIBS=y
#
# linux other options
#
CT_KERNEL_LINUX_VERBOSITY_0=y
# CT_KERNEL_LINUX_VERBOSITY_1 is not set
# CT_KERNEL_LINUX_VERBOSITY_2 is not set
CT_KERNEL_LINUX_VERBOSE_LEVEL=0
CT_KERNEL_LINUX_INSTALL_CHECK=y
#
# Binary utilities
#
CT_ARCH_BINFMT_ELF=y
CT_BINUTILS="binutils"
CT_BINUTILS_binutils=y
#
# GNU binutils
#
# CT_CC_BINUTILS_SHOW_LINARO is not set
CT_BINUTILS_V_2_25_1=y
# CT_BINUTILS_V_2_25 is not set
# CT_BINUTILS_V_2_24 is not set
# CT_BINUTILS_V_2_23_2 is not set
# CT_BINUTILS_V_2_23_1 is not set
# CT_BINUTILS_V_2_22 is not set
# CT_BINUTILS_V_2_21_53 is not set
# CT_BINUTILS_V_2_21_1a is not set
# CT_BINUTILS_V_2_20_1a is not set
# CT_BINUTILS_V_2_19_1a is not set
# CT_BINUTILS_V_2_18a is not set
CT_BINUTILS_VERSION="2.25.1"
CT_BINUTILS_2_25_1_or_later=y
CT_BINUTILS_2_25_or_later=y
CT_BINUTILS_2_24_or_later=y
CT_BINUTILS_2_23_or_later=y
CT_BINUTILS_2_22_or_later=y
CT_BINUTILS_2_21_or_later=y
CT_BINUTILS_2_20_or_later=y
CT_BINUTILS_2_19_or_later=y
CT_BINUTILS_2_18_or_later=y
CT_BINUTILS_HAS_HASH_STYLE=y
CT_BINUTILS_HAS_GOLD=y
CT_BINUTILS_GOLD_SUPPORTS_ARCH=y
CT_BINUTILS_GOLD_SUPPORT=y
CT_BINUTILS_HAS_PLUGINS=y
CT_BINUTILS_HAS_PKGVERSION_BUGURL=y
CT_BINUTILS_FORCE_LD_BFD=y
CT_BINUTILS_LINKER_LD=y
# CT_BINUTILS_LINKER_LD_GOLD is not set
# CT_BINUTILS_LINKER_GOLD_LD is not set
CT_BINUTILS_LINKERS_LIST="ld"
CT_BINUTILS_LINKER_DEFAULT="bfd"
# CT_BINUTILS_PLUGINS is not set
CT_BINUTILS_EXTRA_CONFIG_ARRAY=""
# CT_BINUTILS_FOR_TARGET is not set
#
# binutils other options
#
#
# C-library
#
CT_LIBC="glibc"
CT_LIBC_VERSION="2.16.0"
CT_LIBC_glibc=y
# CT_LIBC_musl is not set
# CT_LIBC_newlib is not set
# CT_LIBC_none is not set
# CT_LIBC_uClibc is not set
CT_LIBC_avr_libc_AVAILABLE=y
CT_LIBC_glibc_AVAILABLE=y
CT_THREADS="nptl"
# CT_CC_GLIBC_SHOW_LINARO is not set
# CT_LIBC_GLIBC_V_2_22 is not set
# CT_LIBC_GLIBC_V_2_21 is not set
# CT_LIBC_GLIBC_V_2_20 is not set
# CT_LIBC_GLIBC_V_2_19 is not set
# CT_LIBC_GLIBC_V_2_18 is not set
# CT_LIBC_GLIBC_V_2_17 is not set
CT_LIBC_GLIBC_V_2_16_0=y
# CT_LIBC_GLIBC_V_2_15 is not set
# CT_LIBC_GLIBC_V_2_14_1 is not set
# CT_LIBC_GLIBC_V_2_14 is not set
# CT_LIBC_GLIBC_V_2_13 is not set
# CT_LIBC_GLIBC_V_2_12_2 is not set
# CT_LIBC_GLIBC_V_2_12_1 is not set
# CT_LIBC_GLIBC_V_2_11_1 is not set
# CT_LIBC_GLIBC_V_2_11 is not set
# CT_LIBC_GLIBC_V_2_10_1 is not set
# CT_LIBC_GLIBC_V_2_9 is not set
# CT_LIBC_GLIBC_V_2_8 is not set
CT_LIBC_mingw_AVAILABLE=y
CT_LIBC_musl_AVAILABLE=y
CT_LIBC_newlib_AVAILABLE=y
CT_LIBC_none_AVAILABLE=y
CT_LIBC_uClibc_AVAILABLE=y
CT_LIBC_SUPPORT_THREADS_ANY=y
CT_LIBC_SUPPORT_THREADS_NATIVE=y
#
# Common C library options
#
CT_THREADS_NATIVE=y
CT_LIBC_XLDD=y
#
# glibc other options
#
CT_LIBC_GLIBC_PORTS_EXTERNAL=y
CT_LIBC_GLIBC_MAY_FORCE_PORTS=y
CT_LIBC_glibc_familly=y
CT_LIBC_GLIBC_EXTRA_CONFIG_ARRAY=""
CT_LIBC_GLIBC_CONFIGPARMS=""
CT_LIBC_GLIBC_EXTRA_CFLAGS=""
CT_LIBC_EXTRA_CC_ARGS=""
# CT_LIBC_DISABLE_VERSIONING is not set
CT_LIBC_OLDEST_ABI=""
CT_LIBC_GLIBC_FORCE_UNWIND=y
CT_LIBC_GLIBC_USE_PORTS=y
CT_LIBC_ADDONS_LIST=""
#
# WARNING !!!
#
#
# For glibc >= 2.8, it can happen that the tarballs
#
#
# for the addons are not available for download.
#
#
# If that happens, bad luck... Try a previous version
#
#
# or try again later... :-(
#
# CT_LIBC_LOCALES is not set
# CT_LIBC_GLIBC_KERNEL_VERSION_NONE is not set
CT_LIBC_GLIBC_KERNEL_VERSION_AS_HEADERS=y
# CT_LIBC_GLIBC_KERNEL_VERSION_CHOSEN is not set
CT_LIBC_GLIBC_MIN_KERNEL="3.2.72"
#
# C compiler
#
CT_CC="gcc"
CT_CC_CORE_PASSES_NEEDED=y
CT_CC_CORE_PASS_1_NEEDED=y
CT_CC_CORE_PASS_2_NEEDED=y
CT_CC_gcc=y
# CT_CC_GCC_SHOW_LINARO is not set
# CT_CC_GCC_V_5_2_0 is not set
CT_CC_GCC_V_4_9_3=y
# CT_CC_GCC_V_4_8_5 is not set
# CT_CC_GCC_V_4_7_4 is not set
# CT_CC_GCC_V_4_6_4 is not set
# CT_CC_GCC_V_4_5_4 is not set
# CT_CC_GCC_V_4_4_7 is not set
# CT_CC_GCC_V_4_3_6 is not set
# CT_CC_GCC_V_4_2_4 is not set
CT_CC_GCC_4_2_or_later=y
CT_CC_GCC_4_3_or_later=y
CT_CC_GCC_4_4_or_later=y
CT_CC_GCC_4_5_or_later=y
CT_CC_GCC_4_6_or_later=y
CT_CC_GCC_4_7_or_later=y
CT_CC_GCC_4_8_or_later=y
CT_CC_GCC_4_9=y
CT_CC_GCC_4_9_or_later=y
CT_CC_GCC_HAS_GRAPHITE=y
CT_CC_GCC_USE_GRAPHITE=y
CT_CC_GCC_HAS_LTO=y
CT_CC_GCC_USE_LTO=y
CT_CC_GCC_HAS_PKGVERSION_BUGURL=y
CT_CC_GCC_HAS_BUILD_ID=y
CT_CC_GCC_HAS_LNK_HASH_STYLE=y
CT_CC_GCC_USE_GMP_MPFR=y
CT_CC_GCC_USE_MPC=y
CT_CC_GCC_HAS_LIBQUADMATH=y
CT_CC_GCC_HAS_LIBSANITIZER=y
CT_CC_GCC_VERSION="4.9.3"
# CT_CC_LANG_FORTRAN is not set
CT_CC_GCC_ENABLE_CXX_FLAGS=""
CT_CC_GCC_CORE_EXTRA_CONFIG_ARRAY=""
CT_CC_GCC_EXTRA_CONFIG_ARRAY=""
CT_CC_GCC_EXTRA_ENV_ARRAY=""
CT_CC_GCC_STATIC_LIBSTDCXX=y
# CT_CC_GCC_SYSTEM_ZLIB is not set
#
# Optimisation features
#
#
# Settings for libraries running on target
#
CT_CC_GCC_ENABLE_TARGET_OPTSPACE=y
# CT_CC_GCC_LIBMUDFLAP is not set
# CT_CC_GCC_LIBGOMP is not set
# CT_CC_GCC_LIBSSP is not set
# CT_CC_GCC_LIBQUADMATH is not set
# CT_CC_GCC_LIBSANITIZER is not set
#
# Misc. obscure options.
#
CT_CC_CXA_ATEXIT=y
# CT_CC_GCC_DISABLE_PCH is not set
CT_CC_GCC_SJLJ_EXCEPTIONS=m
CT_CC_GCC_LDBL_128=m
# CT_CC_GCC_BUILD_ID is not set
CT_CC_GCC_LNK_HASH_STYLE_DEFAULT=y
# CT_CC_GCC_LNK_HASH_STYLE_SYSV is not set
# CT_CC_GCC_LNK_HASH_STYLE_GNU is not set
# CT_CC_GCC_LNK_HASH_STYLE_BOTH is not set
CT_CC_GCC_LNK_HASH_STYLE=""
CT_CC_GCC_DEC_FLOAT_AUTO=y
# CT_CC_GCC_DEC_FLOAT_BID is not set
# CT_CC_GCC_DEC_FLOAT_DPD is not set
# CT_CC_GCC_DEC_FLOATS_NO is not set
CT_CC_SUPPORT_CXX=y
CT_CC_SUPPORT_FORTRAN=y
CT_CC_SUPPORT_JAVA=y
CT_CC_SUPPORT_ADA=y
CT_CC_SUPPORT_OBJC=y
CT_CC_SUPPORT_OBJCXX=y
CT_CC_SUPPORT_GOLANG=y
#
# Additional supported languages:
#
CT_CC_LANG_CXX=y
# CT_CC_LANG_JAVA is not set
#
# Debug facilities
#
# CT_DEBUG_dmalloc is not set
# CT_DEBUG_duma is not set
# CT_DEBUG_gdb is not set
# CT_DEBUG_ltrace is not set
# CT_DEBUG_strace is not set
#
# Companion libraries
#
CT_COMPLIBS_NEEDED=y
CT_LIBICONV_NEEDED=y
CT_GETTEXT_NEEDED=y
CT_GMP_NEEDED=y
CT_MPFR_NEEDED=y
CT_ISL_NEEDED=y
CT_CLOOG_NEEDED=y
CT_MPC_NEEDED=y
CT_COMPLIBS=y
CT_LIBICONV=y
CT_GETTEXT=y
CT_GMP=y
CT_MPFR=y
CT_ISL=y
CT_CLOOG=y
CT_MPC=y
CT_LIBICONV_V_1_14=y
CT_LIBICONV_VERSION="1.14"
CT_GETTEXT_V_0_19_6=y
CT_GETTEXT_VERSION="0.19.6"
CT_GMP_V_6_0_0=y
# CT_GMP_V_5_1_3 is not set
# CT_GMP_V_5_1_1 is not set
# CT_GMP_V_5_0_2 is not set
# CT_GMP_V_5_0_1 is not set
# CT_GMP_V_4_3_2 is not set
# CT_GMP_V_4_3_1 is not set
# CT_GMP_V_4_3_0 is not set
CT_GMP_5_0_2_or_later=y
CT_GMP_VERSION="6.0.0a"
CT_MPFR_V_3_1_3=y
# CT_MPFR_V_3_1_2 is not set
# CT_MPFR_V_3_1_0 is not set
# CT_MPFR_V_3_0_1 is not set
# CT_MPFR_V_3_0_0 is not set
# CT_MPFR_V_2_4_2 is not set
# CT_MPFR_V_2_4_1 is not set
# CT_MPFR_V_2_4_0 is not set
CT_MPFR_VERSION="3.1.3"
CT_ISL_V_0_14=y
CT_ISL_V_0_14_or_later=y
CT_ISL_V_0_12_or_later=y
CT_ISL_VERSION="0.14"
CT_CLOOG_V_0_18_4=y
# CT_CLOOG_V_0_18_1 is not set
# CT_CLOOG_V_0_18_0 is not set
CT_CLOOG_VERSION="0.18.4"
CT_CLOOG_0_18_4_or_later=y
CT_CLOOG_0_18_or_later=y
CT_MPC_V_1_0_3=y
# CT_MPC_V_1_0_2 is not set
# CT_MPC_V_1_0_1 is not set
# CT_MPC_V_1_0 is not set
# CT_MPC_V_0_9 is not set
# CT_MPC_V_0_8_2 is not set
# CT_MPC_V_0_8_1 is not set
# CT_MPC_V_0_7 is not set
CT_MPC_VERSION="1.0.3"
#
# Companion libraries common options
#
# CT_COMPLIBS_CHECK is not set
#
# Companion tools
#
#
# READ HELP before you say 'Y' below !!!
#
# CT_COMP_TOOLS is not set

View File

@ -0,0 +1,45 @@
#!/bin/bash
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ex
hide_output() {
set +x
on_err="
echo ERROR: An error was encountered with the build.
cat /tmp/build.log
exit 1
"
trap "$on_err" ERR
bash -c "while true; do sleep 30; echo \$(date) - building ...; done" &
PING_LOOP_PID=$!
$@ &> /tmp/build.log
rm /tmp/build.log
trap - ERR
kill $PING_LOOP_PID
set -x
}
mkdir build
cd build
cp ../armv7-linux-gnueabihf.config .config
ct-ng oldconfig
hide_output ct-ng build
cd ..
rm -rf build
mkdir build
cd build
cp ../aarch64-linux-gnu.config .config
ct-ng oldconfig
hide_output ct-ng build
cd ..
rm -rf build

View File

@ -35,11 +35,8 @@ ENV \
CC_i686_unknown_freebsd=i686-unknown-freebsd10-gcc \
CXX_i686_unknown_freebsd=i686-unknown-freebsd10-g++
ENV RUST_CONFIGURE_ARGS \
--host=x86_64-unknown-freebsd,i686-unknown-freebsd
ENV SCRIPT \
python2.7 ../x.py dist \
--host x86_64-unknown-freebsd \
--target x86_64-unknown-freebsd \
--host i686-unknown-freebsd \
--target i686-unknown-freebsd
ENV HOSTS=x86_64-unknown-freebsd
ENV HOSTS=$HOSTS,i686-unknown-freebsd
ENV RUST_CONFIGURE_ARGS --host=$HOSTS
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS

View File

@ -24,11 +24,8 @@ RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-ini
rm dumb-init_*.deb
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
ENV RUST_CONFIGURE_ARGS \
--host=mips-unknown-linux-gnu,mipsel-unknown-linux-gnu
ENV SCRIPT \
python2.7 ../x.py dist \
--host mips-unknown-linux-gnu \
--target mips-unknown-linux-gnu \
--host mipsel-unknown-linux-gnu \
--target mipsel-unknown-linux-gnu
ENV HOSTS=mips-unknown-linux-gnu
ENV HOSTS=$HOSTS,mipsel-unknown-linux-gnu
ENV RUST_CONFIGURE_ARGS --host=$HOSTS
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS

View File

@ -24,11 +24,8 @@ RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-ini
rm dumb-init_*.deb
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
ENV RUST_CONFIGURE_ARGS \
--host=mips64-unknown-linux-gnuabi64,mips64el-unknown-linux-gnuabi64
ENV SCRIPT \
python2.7 ../x.py dist \
--host mips64-unknown-linux-gnuabi64 \
--target mips64-unknown-linux-gnuabi64 \
--host mips64el-unknown-linux-gnuabi64 \
--target mips64el-unknown-linux-gnuabi64
ENV HOSTS=mips64-unknown-linux-gnuabi64
ENV HOSTS=$HOSTS,mips64el-unknown-linux-gnuabi64
ENV RUST_CONFIGURE_ARGS --host=$HOSTS
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS

View File

@ -23,12 +23,10 @@ RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-ini
rm dumb-init_*.deb
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
ENV RUST_CONFIGURE_ARGS \
--host=powerpc-unknown-linux-gnu
ENV SCRIPT \
python2.7 ../x.py dist \
--host powerpc-unknown-linux-gnu \
--target powerpc-unknown-linux-gnu
ENV HOSTS=powerpc-unknown-linux-gnu
ENV RUST_CONFIGURE_ARGS --host=$HOSTS
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
# FIXME(#36150) this will fail the bootstrap. Probably means something bad is
# happening!

View File

@ -29,11 +29,8 @@ ENV \
CC_powerpc64_unknown_linux_gnu=powerpc64-linux-gnu-gcc \
CXX_powerpc64_unknown_linux_gnu=powerpc64-linux-gnu-g++
ENV RUST_CONFIGURE_ARGS \
--host=powerpc64-unknown-linux-gnu,powerpc64le-unknown-linux-gnu
ENV SCRIPT \
python2.7 ../x.py dist \
--host powerpc64-unknown-linux-gnu \
--target powerpc64-unknown-linux-gnu \
--host powerpc64le-unknown-linux-gnu \
--target powerpc64le-unknown-linux-gnu
ENV HOSTS=powerpc64-unknown-linux-gnu
ENV HOSTS=$HOSTS,powerpc64le-unknown-linux-gnu
ENV RUST_CONFIGURE_ARGS --host=$HOSTS
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS

View File

@ -33,11 +33,8 @@ ENV \
CC_x86_64_unknown_netbsd=x86_64-unknown-netbsd-gcc \
CXX_x86_64_unknown_netbsd=x86_64-unknown-netbsd-g++
ENV RUST_CONFIGURE_ARGS \
--host=x86_64-unknown-netbsd,s390x-unknown-linux-gnu
ENV SCRIPT \
python2.7 ../x.py dist \
--host x86_64-unknown-netbsd \
--target x86_64-unknown-netbsd \
--host s390x-unknown-linux-gnu \
--target s390x-unknown-linux-gnu
ENV HOSTS=x86_64-unknown-netbsd
ENV HOSTS=$HOSTS,s390x-unknown-linux-gnu
ENV RUST_CONFIGURE_ARGS --host=$HOSTS
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS

View File

@ -0,0 +1,80 @@
FROM centos:5
WORKDIR /build
RUN yum upgrade -y && yum install -y \
curl \
bzip2 \
gcc \
make \
glibc-devel \
perl \
zlib-devel \
file \
xz \
which \
pkg-config \
wget \
autoconf \
gettext
ENV PATH=/rustroot/bin:$PATH
ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib
WORKDIR /tmp
# binutils < 2.22 has a bug where the 32-bit executables it generates
# immediately segfault in Rust, so we need to install our own binutils.
#
# See https://github.com/rust-lang/rust/issues/20440 for more info
COPY shared.sh build-binutils.sh /tmp/
RUN ./build-binutils.sh
# Need a newer version of gcc than centos has to compile LLVM nowadays
COPY build-gcc.sh /tmp/
RUN ./build-gcc.sh
# We need a build of openssl which supports SNI to download artifacts from
# static.rust-lang.org. This'll be used to link into libcurl below (and used
# later as well), so build a copy of OpenSSL with dynamic libraries into our
# generic root.
COPY build-openssl.sh /tmp/
RUN ./build-openssl.sh
# The `curl` binary on CentOS doesn't support SNI which is needed for fetching
# some https urls we have, so install a new version of libcurl + curl which is
# using the openssl we just built previously.
#
# Note that we also disable a bunch of optional features of curl that we don't
# really need.
COPY build-curl.sh /tmp/
RUN ./build-curl.sh
# CentOS 5.5 has Python 2.4 by default, but LLVM needs 2.7+
COPY build-python.sh /tmp/
RUN ./build-python.sh
# Apparently CentOS 5.5 desn't have `git` in yum, but we're gonna need it for
# cloning, so download and build it here.
COPY build-git.sh /tmp/
RUN ./build-git.sh
# libssh2 (a dependency of Cargo) requires cmake 2.8.11 or higher but CentOS
# only has 2.6.4, so build our own
COPY build-cmake.sh /tmp/
RUN ./build-cmake.sh
RUN curl -Lo /rustroot/dumb-init \
https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64 && \
chmod +x /rustroot/dumb-init
ENTRYPOINT ["/rustroot/dumb-init", "--"]
ENV SCCACHE_DIGEST=7237e38e029342fa27b7ac25412cb9d52554008b12389727320bd533fd7f05b6a96d55485f305caf95e5c8f5f97c3313e10012ccad3e752aba2518f3522ba783
RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST | \
xz --decompress | \
tar xf - -C /usr/local/bin --strip-components=1
ENV HOSTS=i686-unknown-linux-gnu
ENV HOSTS=$HOSTS,x86_64-unknown-linux-gnu
ENV RUST_CONFIGURE_ARGS --host=$HOSTS
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS

View File

@ -0,0 +1,26 @@
#!/bin/bash
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ex
source shared.sh
curl https://ftp.gnu.org/gnu/binutils/binutils-2.25.1.tar.bz2 | tar xfj -
mkdir binutils-build
cd binutils-build
hide_output ../binutils-2.25.1/configure --prefix=/rustroot
hide_output make -j10
hide_output make install
cd ..
rm -rf binutils-build
rm -rf binutils-2.25.1

View File

@ -0,0 +1,25 @@
#!/bin/bash
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ex
source shared.sh
curl https://cmake.org/files/v3.6/cmake-3.6.3.tar.gz | tar xzf -
mkdir cmake-build
cd cmake-build
hide_output ../cmake-3.6.3/configure --prefix=/rustroot
hide_output make -j10
hide_output make install
cd ..
rm -rf cmake-build
rm -rf cmake-3.6.3

View File

@ -0,0 +1,43 @@
#!/bin/bash
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ex
source shared.sh
VERSION=7.51.0
curl http://cool.haxx.se/download/curl-$VERSION.tar.bz2 | tar xjf -
mkdir curl-build
cd curl-build
hide_output ../curl-$VERSION/configure \
--prefix=/rustroot \
--with-ssl=/rustroot \
--disable-sspi \
--disable-gopher \
--disable-smtp \
--disable-smb \
--disable-imap \
--disable-pop3 \
--disable-tftp \
--disable-telnet \
--disable-manual \
--disable-dict \
--disable-rtsp \
--disable-ldaps \
--disable-ldap
hide_output make -j10
hide_output make install
cd ..
rm -rf curl-build
rm -rf curl-$VERSION
yum erase -y curl

View File

@ -0,0 +1,31 @@
#!/bin/bash
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ex
source shared.sh
curl https://ftp.gnu.org/gnu/gcc/gcc-4.7.4/gcc-4.7.4.tar.bz2 | tar xjf -
cd gcc-4.7.4
./contrib/download_prerequisites
mkdir ../gcc-build
cd ../gcc-build
hide_output ../gcc-4.7.4/configure \
--prefix=/rustroot \
--enable-languages=c,c++
hide_output make -j10
hide_output make install
ln -nsf gcc /rustroot/bin/cc
cd ..
rm -rf gcc-build
rm -rf gcc-4.7.4
yum erase -y gcc binutils

View File

@ -0,0 +1,24 @@
#!/bin/bash
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ex
source shared.sh
curl https://www.kernel.org/pub/software/scm/git/git-2.10.0.tar.gz | tar xzf -
cd git-2.10.0
make configure
hide_output ./configure --prefix=/rustroot
hide_output make -j10
hide_output make install
cd ..
rm -rf git-2.10.0

View File

@ -0,0 +1,27 @@
#!/bin/bash
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ex
source shared.sh
VERSION=1.0.2j
curl https://www.openssl.org/source/openssl-$VERSION.tar.gz | tar xzf -
cd openssl-$VERSION
hide_output ./config --prefix=/rustroot shared -fPIC
hide_output make -j10
hide_output make install
cd ..
rm -rf openssl-$VERSION
# Make the system cert collection available to the new install.
ln -nsf /etc/pki/tls/cert.pem /rustroot/ssl/

View File

@ -0,0 +1,30 @@
#!/bin/bash
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ex
source shared.sh
curl https://www.python.org/ftp/python/2.7.12/Python-2.7.12.tgz | \
tar xzf -
mkdir python-build
cd python-build
# Gotta do some hackery to tell python about our custom OpenSSL build, but other
# than that fairly normal.
CFLAGS='-I /rustroot/include' LDFLAGS='-L /rustroot/lib -L /rustroot/lib64' \
hide_output ../Python-2.7.12/configure --prefix=/rustroot
hide_output make -j10
hide_output make install
cd ..
rm -rf python-build
rm -rf Python-2.7.12

View File

@ -0,0 +1,25 @@
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
hide_output() {
set +x
on_err="
echo ERROR: An error was encountered with the build.
cat /tmp/build.log
exit 1
"
trap "$on_err" ERR
bash -c "while true; do sleep 30; echo \$(date) - building ...; done" &
PING_LOOP_PID=$!
$@ &> /tmp/build.log
trap - ERR
kill $PING_LOOP_PID
set -x
}

View File

@ -0,0 +1,41 @@
FROM ubuntu:16.04
RUN apt-get update && apt-get install -y --no-install-recommends \
g++ \
make \
file \
curl \
ca-certificates \
python \
git \
cmake \
sudo \
gdb \
xz-utils \
lib32stdc++6
ENV SCCACHE_DIGEST=7237e38e029342fa27b7ac25412cb9d52554008b12389727320bd533fd7f05b6a96d55485f305caf95e5c8f5f97c3313e10012ccad3e752aba2518f3522ba783
RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST | \
tar xJf - -C /usr/local/bin --strip-components=1
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
WORKDIR /tmp
COPY build-emscripten.sh /tmp/
RUN ./build-emscripten.sh
ENV PATH=$PATH:/tmp/emsdk_portable
ENV PATH=$PATH:/tmp/emsdk_portable/clang/tag-e1.37.1/build_tag-e1.37.1_32/bin
ENV PATH=$PATH:/tmp/emsdk_portable/node/4.1.1_32bit/bin
ENV PATH=$PATH:/tmp/emsdk_portable/emscripten/tag-1.37.1
ENV EMSCRIPTEN=/tmp/emsdk_portable/emscripten/tag-1.37.1
ENV RUST_CONFIGURE_ARGS --target=asmjs-unknown-emscripten
# Run `emcc` first as it's got a prompt and doesn't actually do anything, after
# that's done with do the real build.
ENV SCRIPT emcc && \
python2.7 ../x.py test --target asmjs-unknown-emscripten

View File

@ -0,0 +1,36 @@
#!/bin/bash
# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ex
hide_output() {
set +x
on_err="
echo ERROR: An error was encountered with the build.
cat /tmp/build.log
exit 1
"
trap "$on_err" ERR
bash -c "while true; do sleep 30; echo \$(date) - building ...; done" &
PING_LOOP_PID=$!
$@ &> /tmp/build.log
trap - ERR
kill $PING_LOOP_PID
rm /tmp/build.log
set -x
}
curl https://s3.amazonaws.com/mozilla-games/emscripten/releases/emsdk-portable.tar.gz | \
tar xzf -
source emsdk_portable/emsdk_env.sh
hide_output emsdk update
hide_output emsdk install --build=Release sdk-tag-1.37.1-32bit
hide_output emsdk activate --build=Release sdk-tag-1.37.1-32bit

View File

@ -1,7 +1,7 @@
FROM ubuntu:16.04
RUN apt-get update && apt-get install -y --no-install-recommends \
g++ \
g++-multilib \
make \
file \
curl \
@ -11,10 +11,11 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
cmake \
xz-utils \
sudo \
gdb
gdb \
patch
WORKDIR /build/
COPY build-musl.sh /build/
COPY musl-libunwind-patch.patch build-musl.sh /build/
RUN sh /build/build-musl.sh && rm -rf /build
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
@ -27,9 +28,17 @@ RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST |
tar xJf - -C /usr/local/bin --strip-components=1
ENV RUST_CONFIGURE_ARGS \
--target=x86_64-unknown-linux-musl \
--musl-root-x86_64=/musl-x86_64
ENV PATH=$PATH:/musl-x86_64/bin
--target=x86_64-unknown-linux-musl,i686-unknown-linux-musl,i586-unknown-linux-gnu \
--musl-root-x86_64=/musl-x86_64 \
--musl-root-i686=/musl-i686
# FIXME should also test i686-unknown-linux-musl
ENV SCRIPT \
python2.7 ../x.py test --target x86_64-unknown-linux-musl && \
python2.7 ../x.py dist --target x86_64-unknown-linux-musl
python2.7 ../x.py test \
--target x86_64-unknown-linux-musl \
--target i586-unknown-linux-gnu \
&& \
python2.7 ../x.py dist \
--target x86_64-unknown-linux-musl \
--target i686-unknown-linux-musl \
--target i586-unknown-linux-gnu

View File

@ -0,0 +1,64 @@
#!/bin/sh
# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ex
export CFLAGS="-fPIC"
MUSL=musl-1.1.14
curl https://www.musl-libc.org/releases/$MUSL.tar.gz | tar xzf -
cd $MUSL
./configure --prefix=/musl-x86_64 --disable-shared
make -j10
make install
make clean
# for i686
CFLAGS="$CFLAGS -m32" ./configure --prefix=/musl-i686 --disable-shared --target=i686
make -j10
make install
cd ..
# To build MUSL we're going to need a libunwind lying around, so acquire that
# here and build it.
curl -L https://github.com/llvm-mirror/llvm/archive/release_37.tar.gz | tar xzf -
curl -L https://github.com/llvm-mirror/libunwind/archive/release_37.tar.gz | tar xzf -
# Whoa what's this mysterious patch we're applying to libunwind! Why are we
# swapping the values of ESP/EBP in libunwind?!
#
# Discovered in #35599 it turns out that the vanilla build of libunwind is not
# suitable for unwinding 32-bit musl. After some investigation it ended up
# looking like the register values for ESP/EBP were indeed incorrect (swapped)
# in the source. Similar commits in libunwind (r280099 and r282589) have noticed
# this for other platforms, and we just need to realize it for musl linux as
# well.
#
# More technical info can be found at #35599
cd libunwind-release_37
patch -Np1 < /build/musl-libunwind-patch.patch
cd ..
mkdir libunwind-build
cd libunwind-build
cmake ../libunwind-release_37 -DLLVM_PATH=/build/llvm-release_37 \
-DLIBUNWIND_ENABLE_SHARED=0
make -j10
cp lib/libunwind.a /musl-x86_64/lib
# (Note: the next cmake call doesn't fully override the previous cached one, so remove the cached
# configuration manually. IOW, if don't do this or call make clean we'll end up building libunwind
# for x86_64 again)
rm -rf *
# for i686
CFLAGS="$CFLAGS -m32 -g" CXXFLAGS="$CXXFLAGS -m32 -g" cmake ../libunwind-release_37 \
-DLLVM_PATH=/build/llvm-release_37 \
-DLIBUNWIND_ENABLE_SHARED=0
make -j10
cp lib/libunwind.a /musl-i686/lib

View File

@ -0,0 +1,15 @@
diff --git a/include/libunwind.h b/include/libunwind.h
index c5b9633..1360eb2 100644
--- a/include/libunwind.h
+++ b/include/libunwind.h
@@ -151,8 +151,8 @@ enum {
UNW_X86_ECX = 1,
UNW_X86_EDX = 2,
UNW_X86_EBX = 3,
- UNW_X86_EBP = 4,
- UNW_X86_ESP = 5,
+ UNW_X86_ESP = 4,
+ UNW_X86_EBP = 5,
UNW_X86_ESI = 6,
UNW_X86_EDI = 7
};

View File

@ -51,5 +51,6 @@ exec docker \
--env DEPLOY=$DEPLOY \
--env LOCAL_USER_ID=`id -u` \
--volume "$HOME/.cargo:/cargo" \
--rm \
rust-ci \
/checkout/src/ci/run.sh

View File

@ -1,33 +0,0 @@
#!/bin/sh
# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ex
export CFLAGS="-fPIC"
MUSL=musl-1.1.14
curl https://www.musl-libc.org/releases/$MUSL.tar.gz | tar xzf -
cd $MUSL
./configure --prefix=/musl-x86_64 --disable-shared
make -j10
make install
make clean
cd ..
# To build MUSL we're going to need a libunwind lying around, so acquire that
# here and build it.
curl -L https://github.com/llvm-mirror/llvm/archive/release_37.tar.gz | tar xzf -
curl -L https://github.com/llvm-mirror/libunwind/archive/release_37.tar.gz | tar xzf -
mkdir libunwind-build
cd libunwind-build
cmake ../libunwind-release_37 -DLLVM_PATH=/build/llvm-release_37 \
-DLIBUNWIND_ENABLE_SHARED=0
make -j10
cp lib/libunwind.a /musl-x86_64/lib

View File

@ -22,19 +22,6 @@ RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-sccache"
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-quiet-tests"
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --disable-manage-submodules"
# FIXME: we shouldn't forcibly enable debug assertions and llvm assertions when
# `DEPLOY` is set because then we'll be shipping slower binaries. We
# should only set these for auto branches, but we need to make sure that
# if we disable this all the relevant platforms are still tested
# somewhere with debug and llvm assertions.
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-debug-assertions"
# In general we always want to run tests with LLVM assertions enabled, but not
# all platforms currently support that, so we have an option to disable.
if [ "$NO_LLVM_ASSERTIONS" = "" ]; then
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-llvm-assertions"
fi
# If we're deploying artifacts then we set the release channel, otherwise if
# we're not deploying then we want to be sure to enable all assertions becauase
# we'll be running tests
@ -43,10 +30,19 @@ fi
# either automatically or manually.
if [ "$DEPLOY" != "" ]; then
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --release-channel=nightly"
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-llvm-static-stdcpp"
if [ "$NO_LLVM_ASSERTIONS" = "1" ]; then
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --disable-llvm-assertions"
fi
else
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-debug-assertions"
# In general we always want to run tests with LLVM assertions enabled, but not
# all platforms currently support that, so we have an option to disable.
if [ "$NO_LLVM_ASSERTIONS" = "" ]; then
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-llvm-assertions"
fi
fi
# We want to enable usage of the `src/vendor` dir as much as possible, but not
@ -63,7 +59,7 @@ $SRC/configure $RUST_CONFIGURE_ARGS
if [ "$TRAVIS_OS_NAME" = "osx" ]; then
ncpus=$(sysctl -n hw.ncpu)
else
ncpus=$(nproc)
ncpus=$(grep processor /proc/cpuinfo | wc -l)
fi
if [ ! -z "$SCRIPT" ]; then

@ -1 +1 @@
Subproject commit e058ca661692a8d01f8cf9d35939dfe3105ce968
Subproject commit 11bfb0dcf85f7aa92abd30524bb1e42e18d108c6

View File

@ -1023,8 +1023,8 @@ impl<T> Vec<T> {
/// Create a draining iterator that removes the specified range in the vector
/// and yields the removed items.
///
/// Note 1: The element range is removed even if the iterator is not
/// consumed until the end.
/// Note 1: The element range is removed even if the iterator is only
/// partially consumed or not consumed at all.
///
/// Note 2: It is unspecified how many elements are removed from the vector,
/// if the `Drain` value is leaked.

View File

@ -107,7 +107,6 @@ fn main() {
"apple_versioning.c",
"ashldi3.c",
"ashrdi3.c",
"clear_cache.c",
"clzdi2.c",
"clzsi2.c",
"cmpdi2.c",

View File

@ -39,9 +39,9 @@ extern crate syntax;
use std::fmt;
use std::str::FromStr;
use syntax::ast;
use syntax::errors::DiagnosticBuilder;
use syntax::parse;
use syntax::ptr::P;
use syntax::tokenstream::TokenStream as TokenStream_;
/// The main type provided by this crate, representing an abstract stream of
/// tokens.
@ -54,7 +54,7 @@ use syntax::ptr::P;
/// time!
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
pub struct TokenStream {
inner: Vec<P<ast::Item>>,
inner: TokenStream_,
}
/// Error returned from `TokenStream::from_str`.
@ -77,17 +77,41 @@ pub struct LexError {
#[doc(hidden)]
pub mod __internal {
use std::cell::Cell;
use std::rc::Rc;
use syntax::ast;
use syntax::ptr::P;
use syntax::parse::ParseSess;
use super::TokenStream;
use syntax::parse::{self, token, ParseSess};
use syntax::tokenstream::TokenStream as TokenStream_;
use super::{TokenStream, LexError};
pub fn new_token_stream(item: P<ast::Item>) -> TokenStream {
TokenStream { inner: vec![item] }
TokenStream { inner: TokenStream_::from_tokens(vec![
token::Interpolated(Rc::new(token::NtItem(item)))
])}
}
pub fn token_stream_items(stream: TokenStream) -> Vec<P<ast::Item>> {
pub fn token_stream_wrap(inner: TokenStream_) -> TokenStream {
TokenStream {
inner: inner
}
}
pub fn token_stream_parse_items(stream: TokenStream) -> Result<Vec<P<ast::Item>>, LexError> {
with_parse_sess(move |sess| {
let mut parser = parse::new_parser_from_ts(sess, stream.inner);
let mut items = Vec::new();
while let Some(item) = try!(parser.parse_item().map_err(super::parse_to_lex_err)) {
items.push(item)
}
Ok(items)
})
}
pub fn token_stream_inner(stream: TokenStream) -> TokenStream_ {
stream.inner
}
@ -96,6 +120,10 @@ pub mod __internal {
trait_name: &str,
expand: fn(TokenStream) -> TokenStream,
attributes: &[&'static str]);
fn register_attr_proc_macro(&mut self,
name: &str,
expand: fn(TokenStream, TokenStream) -> TokenStream);
}
// Emulate scoped_thread_local!() here essentially
@ -125,11 +153,17 @@ pub mod __internal {
where F: FnOnce(&ParseSess) -> R
{
let p = CURRENT_SESS.with(|p| p.get());
assert!(!p.is_null());
assert!(!p.is_null(), "proc_macro::__internal::with_parse_sess() called \
before set_parse_sess()!");
f(unsafe { &*p })
}
}
fn parse_to_lex_err(mut err: DiagnosticBuilder) -> LexError {
err.cancel();
LexError { _inner: () }
}
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
impl FromStr for TokenStream {
type Err = LexError;
@ -138,18 +172,10 @@ impl FromStr for TokenStream {
__internal::with_parse_sess(|sess| {
let src = src.to_string();
let name = "<proc-macro source code>".to_string();
let mut parser = parse::new_parser_from_source_str(sess, name, src);
let mut ret = TokenStream { inner: Vec::new() };
loop {
match parser.parse_item() {
Ok(Some(item)) => ret.inner.push(item),
Ok(None) => return Ok(ret),
Err(mut err) => {
err.cancel();
return Err(LexError { _inner: () })
}
}
}
let tts = try!(parse::parse_tts_from_source_str(name, src, sess)
.map_err(parse_to_lex_err));
Ok(__internal::token_stream_wrap(TokenStream_::from_tts(tts)))
})
}
}
@ -157,11 +183,6 @@ impl FromStr for TokenStream {
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
impl fmt::Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for item in self.inner.iter() {
let item = syntax::print::pprust::item_to_string(item);
try!(f.write_str(&item));
try!(f.write_str("\n"));
}
Ok(())
self.inner.fmt(f)
}
}

View File

@ -13,6 +13,7 @@ pub use self::AnnNode::*;
use syntax::abi::Abi;
use syntax::ast;
use syntax::codemap::{CodeMap, Spanned};
use syntax::parse::ParseSess;
use syntax::parse::lexer::comments;
use syntax::print::pp::{self, break_offset, word, space, hardbreak};
use syntax::print::pp::{Breaks, eof};
@ -21,7 +22,6 @@ use syntax::print::pprust::{self as ast_pp, PrintState};
use syntax::ptr::P;
use syntax::symbol::keywords;
use syntax_pos::{self, BytePos};
use errors;
use hir;
use hir::{PatKind, RegionTyParamBound, TraitTyParamBound, TraitBoundModifier};
@ -116,7 +116,7 @@ pub const default_columns: usize = 78;
/// it can scan the input text for comments and literals to
/// copy forward.
pub fn print_crate<'a>(cm: &'a CodeMap,
span_diagnostic: &errors::Handler,
sess: &ParseSess,
krate: &hir::Crate,
filename: String,
input: &mut Read,
@ -124,8 +124,7 @@ pub fn print_crate<'a>(cm: &'a CodeMap,
ann: &'a PpAnn,
is_expanded: bool)
-> io::Result<()> {
let mut s = State::new_from_input(cm, span_diagnostic, filename, input,
out, ann, is_expanded);
let mut s = State::new_from_input(cm, sess, filename, input, out, ann, is_expanded);
// When printing the AST, we sometimes need to inject `#[no_std]` here.
// Since you can't compile the HIR, it's not necessary.
@ -137,16 +136,14 @@ pub fn print_crate<'a>(cm: &'a CodeMap,
impl<'a> State<'a> {
pub fn new_from_input(cm: &'a CodeMap,
span_diagnostic: &errors::Handler,
sess: &ParseSess,
filename: String,
input: &mut Read,
out: Box<Write + 'a>,
ann: &'a PpAnn,
is_expanded: bool)
-> State<'a> {
let (cmnts, lits) = comments::gather_comments_and_literals(span_diagnostic,
filename,
input);
let (cmnts, lits) = comments::gather_comments_and_literals(sess, filename, input);
State::new(cm,
out,

View File

@ -206,7 +206,7 @@ declare_lint! {
declare_lint! {
pub EXTRA_REQUIREMENT_IN_IMPL,
Warn,
Deny,
"detects extra requirements in impls that were erroneously allowed"
}

View File

@ -25,6 +25,7 @@ use lint;
use middle::cstore;
use syntax::ast::{self, IntTy, UintTy};
use syntax::parse::token;
use syntax::parse;
use syntax::symbol::Symbol;
use syntax::feature_gate::UnstableFeatures;
@ -1259,7 +1260,7 @@ pub fn parse_cfgspecs(cfgspecs: Vec<String> ) -> ast::CrateConfig {
let meta_item = panictry!(parser.parse_meta_item());
if !parser.reader.is_eof() {
if parser.token != token::Eof {
early_error(ErrorOutputType::default(), &format!("invalid --cfg argument: {}", s))
} else if meta_item.is_meta_item_list() {
let msg =

View File

@ -681,6 +681,7 @@ pub fn phase_2_configure_and_expand<F>(sess: &Session,
should_test: sess.opts.test,
..syntax::ext::expand::ExpansionConfig::default(crate_name.to_string())
};
let mut ecx = ExtCtxt::new(&sess.parse_sess, cfg, &mut resolver);
let err_count = ecx.parse_sess.span_diagnostic.err_count();
@ -740,17 +741,6 @@ pub fn phase_2_configure_and_expand<F>(sess: &Session,
"checking for inline asm in case the target doesn't support it",
|| no_asm::check_crate(sess, &krate));
// Needs to go *after* expansion to be able to check the results of macro expansion.
time(time_passes, "complete gated feature checking", || {
sess.track_errors(|| {
syntax::feature_gate::check_crate(&krate,
&sess.parse_sess,
&sess.features.borrow(),
&attributes,
sess.opts.unstable_features);
})
})?;
time(sess.time_passes(),
"early lint checks",
|| lint::check_ast_crate(sess, &krate));
@ -768,6 +758,17 @@ pub fn phase_2_configure_and_expand<F>(sess: &Session,
Ok(())
})?;
// Needs to go *after* expansion to be able to check the results of macro expansion.
time(time_passes, "complete gated feature checking", || {
sess.track_errors(|| {
syntax::feature_gate::check_crate(&krate,
&sess.parse_sess,
&sess.features.borrow(),
&attributes,
sess.opts.unstable_features);
})
})?;
// Lower ast -> hir.
let hir_forest = time(sess.time_passes(), "lowering ast -> hir", || {
let hir_crate = lower_crate(sess, &krate, &mut resolver);

View File

@ -82,6 +82,7 @@ use rustc::util::common::time;
use serialize::json::ToJson;
use std::any::Any;
use std::cmp::max;
use std::cmp::Ordering::Equal;
use std::default::Default;
@ -1018,15 +1019,34 @@ fn parse_crate_attrs<'a>(sess: &'a Session, input: &Input) -> PResult<'a, Vec<as
}
}
/// Runs `f` in a suitable thread for running `rustc`; returns a
/// `Result` with either the return value of `f` or -- if a panic
/// occurs -- the panic value.
pub fn in_rustc_thread<F, R>(f: F) -> Result<R, Box<Any + Send>>
where F: FnOnce() -> R + Send + 'static,
R: Send + 'static,
{
// Temporarily have stack size set to 16MB to deal with nom-using crates failing
const STACK_SIZE: usize = 16 * 1024 * 1024; // 16MB
let mut cfg = thread::Builder::new().name("rustc".to_string());
// FIXME: Hacks on hacks. If the env is trying to override the stack size
// then *don't* set it explicitly.
if env::var_os("RUST_MIN_STACK").is_none() {
cfg = cfg.stack_size(STACK_SIZE);
}
let thread = cfg.spawn(f);
thread.unwrap().join()
}
/// Run a procedure which will detect panics in the compiler and print nicer
/// error messages rather than just failing the test.
///
/// The diagnostic emitter yielded to the procedure should be used for reporting
/// errors of the compiler.
pub fn monitor<F: FnOnce() + Send + 'static>(f: F) {
// Temporarily have stack size set to 16MB to deal with nom-using crates failing
const STACK_SIZE: usize = 16 * 1024 * 1024; // 16MB
struct Sink(Arc<Mutex<Vec<u8>>>);
impl Write for Sink {
fn write(&mut self, data: &[u8]) -> io::Result<usize> {
@ -1040,20 +1060,12 @@ pub fn monitor<F: FnOnce() + Send + 'static>(f: F) {
let data = Arc::new(Mutex::new(Vec::new()));
let err = Sink(data.clone());
let mut cfg = thread::Builder::new().name("rustc".to_string());
let result = in_rustc_thread(move || {
io::set_panic(Some(box err));
f()
});
// FIXME: Hacks on hacks. If the env is trying to override the stack size
// then *don't* set it explicitly.
if env::var_os("RUST_MIN_STACK").is_none() {
cfg = cfg.stack_size(STACK_SIZE);
}
let thread = cfg.spawn(move || {
io::set_panic(Some(box err));
f()
});
if let Err(value) = thread.unwrap().join() {
if let Err(value) = result {
// Thread panicked without emitting a fatal diagnostic
if !value.is::<errors::FatalError>() {
let emitter =

View File

@ -838,7 +838,7 @@ pub fn print_after_parsing(sess: &Session,
debug!("pretty printing source code {:?}", s);
let sess = annotation.sess();
pprust::print_crate(sess.codemap(),
sess.diagnostic(),
&sess.parse_sess,
krate,
src_name.to_string(),
&mut rdr,
@ -896,7 +896,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session,
debug!("pretty printing source code {:?}", s);
let sess = annotation.sess();
pprust::print_crate(sess.codemap(),
sess.diagnostic(),
&sess.parse_sess,
krate,
src_name.to_string(),
&mut rdr,
@ -920,7 +920,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session,
debug!("pretty printing source code {:?}", s);
let sess = annotation.sess();
pprust_hir::print_crate(sess.codemap(),
sess.diagnostic(),
&sess.parse_sess,
krate,
src_name.to_string(),
&mut rdr,
@ -945,7 +945,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session,
let sess = annotation.sess();
let ast_map = annotation.ast_map().expect("--unpretty missing HIR map");
let mut pp_state = pprust_hir::State::new_from_input(sess.codemap(),
sess.diagnostic(),
&sess.parse_sess,
src_name.to_string(),
&mut rdr,
box out,

View File

@ -578,6 +578,7 @@ impl<'a> CrateLoader<'a> {
use proc_macro::__internal::Registry;
use rustc_back::dynamic_lib::DynamicLibrary;
use syntax_ext::deriving::custom::CustomDerive;
use syntax_ext::proc_macro_impl::AttrProcMacro;
let path = match dylib {
Some(dylib) => dylib,
@ -613,6 +614,15 @@ impl<'a> CrateLoader<'a> {
);
self.0.push((Symbol::intern(trait_name), Rc::new(derive)));
}
fn register_attr_proc_macro(&mut self,
name: &str,
expand: fn(TokenStream, TokenStream) -> TokenStream) {
let expand = SyntaxExtension::AttrProcMacro(
Box::new(AttrProcMacro { inner: expand })
);
self.0.push((Symbol::intern(name), Rc::new(expand)));
}
}
let mut my_registrar = MyRegistrar(Vec::new());

View File

@ -29,7 +29,7 @@ use rustc_back::PanicStrategy;
use syntax::ast;
use syntax::attr;
use syntax::parse::new_parser_from_source_str;
use syntax::parse::filemap_to_tts;
use syntax::symbol::Symbol;
use syntax_pos::{mk_sp, Span};
use rustc::hir::svh::Svh;
@ -399,19 +399,9 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore {
let (name, def) = data.get_macro(id.index);
let source_name = format!("<{} macros>", name);
// NB: Don't use parse_tts_from_source_str because it parses with quote_depth > 0.
let mut parser = new_parser_from_source_str(&sess.parse_sess, source_name, def.body);
let lo = parser.span.lo;
let body = match parser.parse_all_token_trees() {
Ok(body) => body,
Err(mut err) => {
err.emit();
sess.abort_if_errors();
unreachable!();
}
};
let local_span = mk_sp(lo, parser.prev_span.hi);
let filemap = sess.parse_sess.codemap().new_filemap(source_name, None, def.body);
let local_span = mk_sp(filemap.start_pos, filemap.end_pos);
let body = filemap_to_tts(&sess.parse_sess, filemap);
// Mark the attrs as used
let attrs = data.get_item_attrs(id.index);

View File

@ -143,7 +143,7 @@ impl<'a> Resolver<'a> {
let is_prelude = attr::contains_name(&item.attrs, "prelude_import");
match view_path.node {
ViewPathSimple(binding, ref full_path) => {
ViewPathSimple(mut binding, ref full_path) => {
let mut source = full_path.segments.last().unwrap().identifier;
let source_name = source.name;
if source_name == "mod" || source_name == "self" {
@ -157,6 +157,9 @@ impl<'a> Resolver<'a> {
ModuleKind::Block(..) => unreachable!(),
};
source.name = crate_name;
if binding.name == "$crate" {
binding.name = crate_name;
}
self.session.struct_span_warn(item.span, "`$crate` may not be imported")
.note("`use $crate;` was erroneously allowed and \

View File

@ -61,12 +61,13 @@ use syntax::ast::{FnDecl, ForeignItem, ForeignItemKind, Generics};
use syntax::ast::{Item, ItemKind, ImplItem, ImplItemKind};
use syntax::ast::{Local, Mutability, Pat, PatKind, Path};
use syntax::ast::{QSelf, TraitItemKind, TraitRef, Ty, TyKind};
use syntax::feature_gate::{emit_feature_err, GateIssue};
use syntax::feature_gate::{feature_err, emit_feature_err, GateIssue};
use syntax_pos::{Span, DUMMY_SP, MultiSpan};
use errors::DiagnosticBuilder;
use std::cell::{Cell, RefCell};
use std::cmp;
use std::fmt;
use std::mem::replace;
use std::rc::Rc;
@ -1123,6 +1124,12 @@ pub struct Resolver<'a> {
// Avoid duplicated errors for "name already defined".
name_already_seen: FxHashMap<Name, Span>,
// If `#![feature(proc_macro)]` is set
proc_macro_enabled: bool,
// A set of procedural macros imported by `#[macro_use]` that have already been warned about
warned_proc_macros: FxHashSet<Name>,
}
pub struct ResolverArenas<'a> {
@ -1227,6 +1234,8 @@ impl<'a> Resolver<'a> {
invocations.insert(Mark::root(),
arenas.alloc_invocation_data(InvocationData::root(graph_root)));
let features = session.features.borrow();
Resolver {
session: session,
@ -1284,7 +1293,9 @@ impl<'a> Resolver<'a> {
span: DUMMY_SP,
vis: ty::Visibility::Public,
}),
use_extern_macros: session.features.borrow().use_extern_macros,
// `#![feature(proc_macro)]` implies `#[feature(extern_macros)]`
use_extern_macros: features.use_extern_macros || features.proc_macro,
exported_macros: Vec::new(),
crate_loader: crate_loader,
@ -1296,6 +1307,8 @@ impl<'a> Resolver<'a> {
invocations: invocations,
name_already_seen: FxHashMap(),
whitelisted_legacy_custom_derives: Vec::new(),
proc_macro_enabled: features.proc_macro,
warned_proc_macros: FxHashSet(),
}
}
@ -1525,6 +1538,8 @@ impl<'a> Resolver<'a> {
debug!("(resolving item) resolving {}", name);
self.check_proc_macro_attrs(&item.attrs);
match item.node {
ItemKind::Enum(_, ref generics) |
ItemKind::Ty(_, ref generics) |
@ -1554,6 +1569,8 @@ impl<'a> Resolver<'a> {
walk_list!(this, visit_ty_param_bound, bounds);
for trait_item in trait_items {
this.check_proc_macro_attrs(&trait_item.attrs);
match trait_item.node {
TraitItemKind::Const(_, ref default) => {
// Only impose the restrictions of
@ -1738,6 +1755,7 @@ impl<'a> Resolver<'a> {
this.with_self_rib(Def::SelfTy(trait_id, Some(item_def_id)), |this| {
this.with_current_self_type(self_type, |this| {
for impl_item in impl_items {
this.check_proc_macro_attrs(&impl_item.attrs);
this.resolve_visibility(&impl_item.vis);
match impl_item.node {
ImplItemKind::Const(..) => {
@ -3184,6 +3202,31 @@ impl<'a> Resolver<'a> {
let msg = "`self` no longer imports values".to_string();
self.session.add_lint(lint::builtin::LEGACY_IMPORTS, id, span, msg);
}
fn check_proc_macro_attrs(&mut self, attrs: &[ast::Attribute]) {
if self.proc_macro_enabled { return; }
for attr in attrs {
let maybe_binding = self.builtin_macros.get(&attr.name()).cloned().or_else(|| {
let ident = Ident::with_empty_ctxt(attr.name());
self.resolve_lexical_macro_path_segment(ident, MacroNS, None).ok()
});
if let Some(binding) = maybe_binding {
if let SyntaxExtension::AttrProcMacro(..) = *binding.get_macro(self) {
attr::mark_known(attr);
let msg = "attribute procedural macros are experimental";
let feature = "proc_macro";
feature_err(&self.session.parse_sess, feature,
attr.span, GateIssue::Language, msg)
.span_note(binding.span, "procedural macro imported here")
.emit();
}
}
}
}
}
fn is_struct_like(def: Def) -> bool {
@ -3224,7 +3267,7 @@ fn show_candidates(session: &mut DiagnosticBuilder,
better: bool) {
// don't show more than MAX_CANDIDATES results, so
// we're consistent with the trait suggestions
const MAX_CANDIDATES: usize = 5;
const MAX_CANDIDATES: usize = 4;
// we want consistent results across executions, but candidates are produced
// by iterating through a hash map, so make sure they are ordered:
@ -3237,21 +3280,21 @@ fn show_candidates(session: &mut DiagnosticBuilder,
1 => " is found in another module, you can import it",
_ => "s are found in other modules, you can import them",
};
session.help(&format!("possible {}candidate{} into scope:", better, msg_diff));
let count = path_strings.len() as isize - MAX_CANDIDATES as isize + 1;
for (idx, path_string) in path_strings.iter().enumerate() {
if idx == MAX_CANDIDATES - 1 && count > 1 {
session.help(
&format!(" and {} other candidates", count).to_string(),
);
break;
} else {
session.help(
&format!(" `use {};`", path_string).to_string(),
);
}
}
let end = cmp::min(MAX_CANDIDATES, path_strings.len());
session.help(&format!("possible {}candidate{} into scope:{}{}",
better,
msg_diff,
&path_strings[0..end].iter().map(|candidate| {
format!("\n `use {};`", candidate)
}).collect::<String>(),
if path_strings.len() > MAX_CANDIDATES {
format!("\nand {} other candidates",
path_strings.len() - MAX_CANDIDATES)
} else {
"".to_owned()
}
));
}
/// A somewhat inefficient routine to obtain the name of a module.

View File

@ -27,7 +27,7 @@ use syntax::ext::base::{NormalTT, Resolver as SyntaxResolver, SyntaxExtension};
use syntax::ext::expand::{Expansion, mark_tts};
use syntax::ext::hygiene::Mark;
use syntax::ext::tt::macro_rules;
use syntax::feature_gate::{emit_feature_err, GateIssue};
use syntax::feature_gate::{emit_feature_err, GateIssue, is_builtin_attr};
use syntax::fold::{self, Folder};
use syntax::ptr::P;
use syntax::symbol::keywords;
@ -183,6 +183,10 @@ impl<'a> base::Resolver for Resolver<'a> {
},
None => {}
}
if self.proc_macro_enabled && !is_builtin_attr(&attrs[i]) {
return Some(attrs.remove(i));
}
}
None
}
@ -373,6 +377,10 @@ impl<'a> Resolver<'a> {
let resolution = self.resolve_lexical_macro_path_segment(ident, MacroNS, Some(span));
let (legacy_resolution, resolution) = match (legacy_resolution, resolution) {
(Some(legacy_resolution), Ok(resolution)) => (legacy_resolution, resolution),
(Some(MacroBinding::Modern(binding)), Err(_)) => {
self.err_if_macro_use_proc_macro(ident.name, span, binding);
continue
},
_ => continue,
};
let (legacy_span, participle) = match legacy_resolution {
@ -469,4 +477,37 @@ impl<'a> Resolver<'a> {
self.exported_macros.push(def);
}
}
/// Error if `ext` is a Macros 1.1 procedural macro being imported by `#[macro_use]`
fn err_if_macro_use_proc_macro(&mut self, name: Name, use_span: Span,
binding: &NameBinding<'a>) {
use self::SyntaxExtension::*;
let krate = binding.def().def_id().krate;
// Plugin-based syntax extensions are exempt from this check
if krate == BUILTIN_MACROS_CRATE { return; }
let ext = binding.get_macro(self);
match *ext {
// If `ext` is a procedural macro, check if we've already warned about it
AttrProcMacro(_) | ProcMacro(_) => if !self.warned_proc_macros.insert(name) { return; },
_ => return,
}
let warn_msg = match *ext {
AttrProcMacro(_) => "attribute procedural macros cannot be \
imported with `#[macro_use]`",
ProcMacro(_) => "procedural macros cannot be imported with `#[macro_use]`",
_ => return,
};
let crate_name = self.session.cstore.crate_name(krate);
self.session.struct_span_err(use_span, warn_msg)
.help(&format!("instead, import the procedural macro like any other item: \
`use {}::{};`", crate_name, name))
.emit();
}
}

View File

@ -17,9 +17,9 @@ use std::env;
use std::path::Path;
use syntax::ast;
use syntax::parse::lexer::{self, Reader, StringReader};
use syntax::parse::filemap_to_tts;
use syntax::parse::lexer::{self, StringReader};
use syntax::parse::token::{self, Token};
use syntax::parse::parser::Parser;
use syntax::symbol::keywords;
use syntax::tokenstream::TokenTree;
use syntax_pos::*;
@ -85,14 +85,13 @@ impl<'a> SpanUtils<'a> {
let filemap = self.sess
.codemap()
.new_filemap(String::from("<anon-dxr>"), None, self.snippet(span));
let s = self.sess;
lexer::StringReader::new(s.diagnostic(), filemap)
lexer::StringReader::new(&self.sess.parse_sess, filemap)
}
fn span_to_tts(&self, span: Span) -> Vec<TokenTree> {
let srdr = self.retokenise_span(span);
let mut p = Parser::new(&self.sess.parse_sess, Box::new(srdr), None, false);
p.parse_all_token_trees().expect("Couldn't re-parse span")
let filename = String::from("<anon-dxr>");
let filemap = self.sess.codemap().new_filemap(filename, None, self.snippet(span));
filemap_to_tts(&self.sess.parse_sess, filemap)
}
// Re-parses a path and returns the span for the last identifier in the path

View File

@ -667,7 +667,9 @@ pub fn run_passes(sess: &Session,
// Sanity check
assert!(trans.modules.len() == sess.opts.cg.codegen_units ||
sess.opts.debugging_opts.incremental.is_some());
sess.opts.debugging_opts.incremental.is_some() ||
!sess.opts.output_types.should_trans() ||
sess.opts.debugging_opts.no_trans);
let tm = create_target_machine(sess);
@ -756,7 +758,7 @@ pub fn run_passes(sess: &Session,
// the compiler decides the number of codegen units (and will
// potentially create hundreds of them).
let num_workers = work_items.len() - 1;
if num_workers == 1 {
if num_workers <= 1 {
run_work_singlethreaded(sess, &trans.exported_symbols, work_items);
} else {
run_work_multithreaded(sess, work_items, num_workers);

View File

@ -1145,6 +1145,23 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
};
let no_builtins = attr::contains_name(&krate.attrs, "no_builtins");
// Skip crate items and just output metadata in -Z no-trans mode.
if tcx.sess.opts.debugging_opts.no_trans ||
!tcx.sess.opts.output_types.should_trans() {
let empty_exported_symbols = ExportedSymbols::empty();
let linker_info = LinkerInfo::new(&shared_ccx, &empty_exported_symbols);
return CrateTranslation {
modules: vec![],
metadata_module: metadata_module,
link: link_meta,
metadata: metadata,
exported_symbols: empty_exported_symbols,
no_builtins: no_builtins,
linker_info: linker_info,
windows_subsystem: None,
};
}
// Run the translation item collector and partition the collected items into
// codegen units.
let (codegen_units, symbol_map) = collect_and_partition_translation_items(&shared_ccx);
@ -1181,22 +1198,6 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
assert_module_sources::assert_module_sources(tcx, &modules);
// Skip crate items and just output metadata in -Z no-trans mode.
if tcx.sess.opts.debugging_opts.no_trans ||
tcx.sess.opts.output_types.contains_key(&config::OutputType::Metadata) {
let linker_info = LinkerInfo::new(&shared_ccx, &ExportedSymbols::empty());
return CrateTranslation {
modules: modules,
metadata_module: metadata_module,
link: link_meta,
metadata: metadata,
exported_symbols: ExportedSymbols::empty(),
no_builtins: no_builtins,
linker_info: linker_info,
windows_subsystem: None,
};
}
// Instantiate translation items without filling out definitions yet...
for ccx in crate_context_list.iter_need_trans() {
let cgu = ccx.codegen_unit();

View File

@ -102,6 +102,7 @@ impl<'cx, 'tcx, 'v> ItemLikeVisitor<'v> for OverlapChecker<'cx, 'tcx> {
match item.node {
hir::ItemEnum(..) |
hir::ItemStruct(..) |
hir::ItemTrait(..) |
hir::ItemUnion(..) => {
let type_def_id = self.tcx.map.local_def_id(item.id);
self.check_for_overlapping_inherent_impls(type_def_id);

View File

@ -1675,7 +1675,6 @@ fn is_unsized<'gcx: 'tcx, 'tcx>(astconv: &AstConv<'gcx, 'tcx>,
for ab in ast_bounds {
if let &hir::TraitTyParamBound(ref ptr, hir::TraitBoundModifier::Maybe) = ab {
if unbound.is_none() {
assert!(ptr.bound_lifetimes.is_empty());
unbound = Some(ptr.trait_ref.clone());
} else {
span_err!(tcx.sess, span, E0203,

View File

@ -27,7 +27,7 @@ use std::io;
use std::io::prelude::*;
use syntax::codemap::CodeMap;
use syntax::parse::lexer::{self, Reader, TokenAndSpan};
use syntax::parse::lexer::{self, TokenAndSpan};
use syntax::parse::token;
use syntax::parse;
use syntax_pos::Span;
@ -42,8 +42,7 @@ pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str>
let mut out = Vec::new();
write_header(class, id, &mut out).unwrap();
let mut classifier = Classifier::new(lexer::StringReader::new(&sess.span_diagnostic, fm),
sess.codemap());
let mut classifier = Classifier::new(lexer::StringReader::new(&sess, fm), sess.codemap());
if let Err(_) = classifier.write_source(&mut out) {
return format!("<pre>{}</pre>", src);
}
@ -63,8 +62,7 @@ pub fn render_inner_with_highlighting(src: &str) -> io::Result<String> {
let fm = sess.codemap().new_filemap("<stdin>".to_string(), None, src.to_string());
let mut out = Vec::new();
let mut classifier = Classifier::new(lexer::StringReader::new(&sess.span_diagnostic, fm),
sess.codemap());
let mut classifier = Classifier::new(lexer::StringReader::new(&sess, fm), sess.codemap());
classifier.write_source(&mut out)?;
Ok(String::from_utf8_lossy(&out).into_owned())
@ -185,10 +183,10 @@ impl<'a> Classifier<'a> {
Ok(tas) => tas,
Err(_) => {
self.lexer.emit_fatal_errors();
self.lexer.span_diagnostic.struct_warn("Backing out of syntax highlighting")
.note("You probably did not intend to render this \
as a rust code-block")
.emit();
self.lexer.sess.span_diagnostic
.struct_warn("Backing out of syntax highlighting")
.note("You probably did not intend to render this as a rust code-block")
.emit();
return Err(io::Error::new(io::ErrorKind::Other, ""));
}
};

View File

@ -29,7 +29,7 @@ use rustc::session::config::{OutputType, OutputTypes, Externs};
use rustc::session::search_paths::{SearchPaths, PathKind};
use rustc_back::dynamic_lib::DynamicLibrary;
use rustc_back::tempdir::TempDir;
use rustc_driver::{driver, Compilation};
use rustc_driver::{self, driver, Compilation};
use rustc_driver::driver::phase_2_configure_and_expand;
use rustc_metadata::cstore::CStore;
use rustc_resolve::MakeGlobMap;
@ -429,19 +429,26 @@ impl Collector {
should_panic: testing::ShouldPanic::No,
},
testfn: testing::DynTestFn(box move |()| {
runtest(&test,
&cratename,
cfgs,
libs,
externs,
should_panic,
no_run,
as_test_harness,
compile_fail,
error_codes,
&opts,
maybe_sysroot);
})
match {
rustc_driver::in_rustc_thread(move || {
runtest(&test,
&cratename,
cfgs,
libs,
externs,
should_panic,
no_run,
as_test_harness,
compile_fail,
error_codes,
&opts,
maybe_sysroot)
})
} {
Ok(()) => (),
Err(err) => panic::resume_unwind(err),
}
}),
});
}

View File

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[allow(dead_code)] // not used on emscripten
#![allow(warnings)] // not used on emscripten
use env;
use net::{SocketAddr, SocketAddrV4, SocketAddrV6, Ipv4Addr, Ipv6Addr, ToSocketAddrs};

View File

@ -172,6 +172,7 @@ mod tests {
macro_rules! test_checked_next_power_of_two {
($test_name:ident, $T:ident) => (
#[cfg_attr(target_os = "emscripten", ignore)] // FIXME(#39119)
fn $test_name() {
#![test]
assert_eq!((0 as $T).checked_next_power_of_two(), Some(1));

View File

@ -491,9 +491,12 @@ pub fn panicking() -> bool {
/// Puts the current thread to sleep for the specified amount of time.
///
/// The thread may sleep longer than the duration specified due to scheduling
/// specifics or platform-dependent functionality. Note that on unix platforms
/// this function will not return early due to a signal being received or a
/// spurious wakeup.
/// specifics or platform-dependent functionality.
///
/// # Platform behavior
///
/// On Unix platforms this function will not return early due to a
/// signal being received or a spurious wakeup.
///
/// # Examples
///

View File

@ -615,9 +615,7 @@ impl<'a> ExtCtxt<'a> {
pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree])
-> parser::Parser<'a> {
let mut parser = parse::tts_to_parser(self.parse_sess, tts.to_vec());
parser.allow_interpolated_tts = false; // FIXME(jseyfried) `quote!` can't handle these yet
parser
parse::tts_to_parser(self.parse_sess, tts.to_vec())
}
pub fn codemap(&self) -> &'a CodeMap { self.parse_sess.codemap() }
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }

View File

@ -21,7 +21,7 @@ use ext::base::*;
use feature_gate::{self, Features};
use fold;
use fold::*;
use parse::{ParseSess, DirectoryOwnership, PResult, lexer};
use parse::{ParseSess, DirectoryOwnership, PResult, filemap_to_tts};
use parse::parser::Parser;
use parse::token;
use print::pprust;
@ -364,7 +364,9 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
kind.expect_from_annotatables(items)
}
SyntaxExtension::AttrProcMacro(ref mac) => {
let attr_toks = TokenStream::from_tts(tts_for_attr(&attr, &self.cx.parse_sess));
let attr_toks = TokenStream::from_tts(tts_for_attr_args(&attr,
&self.cx.parse_sess));
let item_toks = TokenStream::from_tts(tts_for_item(&item, &self.cx.parse_sess));
let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks);
@ -640,17 +642,35 @@ fn tts_for_item(item: &Annotatable, parse_sess: &ParseSess) -> Vec<TokenTree> {
string_to_tts(text, parse_sess)
}
fn tts_for_attr(attr: &ast::Attribute, parse_sess: &ParseSess) -> Vec<TokenTree> {
string_to_tts(pprust::attr_to_string(attr), parse_sess)
fn tts_for_attr_args(attr: &ast::Attribute, parse_sess: &ParseSess) -> Vec<TokenTree> {
use ast::MetaItemKind::*;
use print::pp::Breaks;
use print::pprust::PrintState;
let token_string = match attr.value.node {
// For `#[foo]`, an empty token
Word => return vec![],
// For `#[foo(bar, baz)]`, returns `(bar, baz)`
List(ref items) => pprust::to_string(|s| {
s.popen()?;
s.commasep(Breaks::Consistent,
&items[..],
|s, i| s.print_meta_list_item(&i))?;
s.pclose()
}),
// For `#[foo = "bar"]`, returns `= "bar"`
NameValue(ref lit) => pprust::to_string(|s| {
s.word_space("=")?;
s.print_literal(lit)
}),
};
string_to_tts(token_string, parse_sess)
}
fn string_to_tts(text: String, parse_sess: &ParseSess) -> Vec<TokenTree> {
let filemap = parse_sess.codemap()
.new_filemap(String::from("<macro expansion>"), None, text);
let lexer = lexer::StringReader::new(&parse_sess.span_diagnostic, filemap);
let mut parser = Parser::new(parse_sess, Box::new(lexer), None, false);
panictry!(parser.parse_all_token_trees())
let filename = String::from("<macro expansion>");
filemap_to_tts(parse_sess, parse_sess.codemap().new_filemap(filename, None, text))
}
impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {

View File

@ -82,7 +82,6 @@ use ast::Ident;
use syntax_pos::{self, BytePos, mk_sp, Span};
use codemap::Spanned;
use errors::FatalError;
use parse::lexer::*; //resolve bug?
use parse::{Directory, ParseSess};
use parse::parser::{PathStyle, Parser};
use parse::token::{DocComment, MatchNt, SubstNt};
@ -407,9 +406,9 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
Success(())
}
pub fn parse(sess: &ParseSess, rdr: TtReader, ms: &[TokenTree], directory: Option<Directory>)
pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory: Option<Directory>)
-> NamedParseResult {
let mut parser = Parser::new(sess, Box::new(rdr), directory, true);
let mut parser = Parser::new(sess, tts, directory, true);
let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo));
let mut next_eis = Vec::new(); // or proceed normally
@ -481,23 +480,8 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
match name {
"tt" => {
p.quote_depth += 1; //but in theory, non-quoted tts might be useful
let mut tt = panictry!(p.parse_token_tree());
let tt = panictry!(p.parse_token_tree());
p.quote_depth -= 1;
while let TokenTree::Token(sp, token::Interpolated(nt)) = tt {
if let token::NtTT(..) = *nt {
match Rc::try_unwrap(nt) {
Ok(token::NtTT(sub_tt)) => tt = sub_tt,
Ok(_) => unreachable!(),
Err(nt_rc) => match *nt_rc {
token::NtTT(ref sub_tt) => tt = sub_tt.clone(),
_ => unreachable!(),
},
}
} else {
tt = TokenTree::Token(sp, token::Interpolated(nt.clone()));
break
}
}
return token::NtTT(tt);
}
_ => {}
@ -527,7 +511,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
"ident" => match p.token {
token::Ident(sn) => {
p.bump();
token::NtIdent(Spanned::<Ident>{node: sn, span: p.span})
token::NtIdent(Spanned::<Ident>{node: sn, span: p.prev_span})
}
_ => {
let token_str = pprust::token_to_string(&p.token);

View File

@ -16,8 +16,8 @@ use ext::expand::{Expansion, ExpansionKind};
use ext::tt::macro_parser::{Success, Error, Failure};
use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
use ext::tt::macro_parser::{parse, parse_failure_msg};
use ext::tt::transcribe::transcribe;
use parse::{Directory, ParseSess};
use parse::lexer::new_tt_reader;
use parse::parser::Parser;
use parse::token::{self, NtTT, Token};
use parse::token::Token::*;
@ -113,13 +113,12 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
_ => cx.span_bug(sp, "malformed macro rhs"),
};
// rhs has holes ( `$id` and `$(...)` that need filled)
let trncbr =
new_tt_reader(&cx.parse_sess.span_diagnostic, Some(named_matches), rhs);
let tts = transcribe(&cx.parse_sess.span_diagnostic, Some(named_matches), rhs);
let directory = Directory {
path: cx.current_expansion.module.directory.clone(),
ownership: cx.current_expansion.directory_ownership,
};
let mut p = Parser::new(cx.parse_sess(), Box::new(trncbr), Some(directory), false);
let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), false);
p.root_module_name = cx.current_expansion.module.mod_path.last()
.map(|id| (*id.name.as_str()).to_owned());
@ -187,10 +186,8 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
})),
];
// Parse the macro_rules! invocation (`none` is for no interpolations):
let arg_reader = new_tt_reader(&sess.span_diagnostic, None, def.body.clone());
let argument_map = match parse(sess, arg_reader, &argument_gram, None) {
// Parse the macro_rules! invocation
let argument_map = match parse(sess, def.body.clone(), &argument_gram, None) {
Success(m) => m,
Failure(sp, tok) => {
let s = parse_failure_msg(tok);

View File

@ -10,10 +10,9 @@
use self::LockstepIterSize::*;
use ast::Ident;
use errors::{Handler, DiagnosticBuilder};
use errors::Handler;
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
use parse::token::{self, MatchNt, SubstNt, Token, NtIdent};
use parse::lexer::TokenAndSpan;
use parse::token::{self, MatchNt, SubstNt, Token, NtIdent, NtTT};
use syntax_pos::{Span, DUMMY_SP};
use tokenstream::{self, TokenTree};
use util::small_vector::SmallVector;
@ -32,8 +31,8 @@ struct TtFrame {
}
#[derive(Clone)]
pub struct TtReader<'a> {
pub sp_diag: &'a Handler,
struct TtReader<'a> {
sp_diag: &'a Handler,
/// the unzipped tree:
stack: SmallVector<TtFrame>,
/* for MBE-style macro transcription */
@ -41,20 +40,15 @@ pub struct TtReader<'a> {
repeat_idx: Vec<usize>,
repeat_len: Vec<usize>,
/* cached: */
pub cur_tok: Token,
pub cur_span: Span,
/// Transform doc comments. Only useful in macro invocations
pub fatal_errs: Vec<DiagnosticBuilder<'a>>,
}
/// This can do Macro-By-Example transcription. On the other hand, if
/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
/// (and should) be None.
pub fn new_tt_reader(sp_diag: &Handler,
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
src: Vec<tokenstream::TokenTree>)
-> TtReader {
pub fn transcribe(sp_diag: &Handler,
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
src: Vec<tokenstream::TokenTree>)
-> Vec<TokenTree> {
let mut r = TtReader {
sp_diag: sp_diag,
stack: SmallVector::one(TtFrame {
@ -73,13 +67,15 @@ pub fn new_tt_reader(sp_diag: &Handler,
},
repeat_idx: Vec::new(),
repeat_len: Vec::new(),
/* dummy values, never read: */
cur_tok: token::Eof,
cur_span: DUMMY_SP,
fatal_errs: Vec::new(),
};
tt_next_token(&mut r); /* get cur_tok and cur_span set up */
r
let mut tts = Vec::new();
let mut prev_span = DUMMY_SP;
while let Some(tt) = tt_next_token(&mut r, prev_span) {
prev_span = tt.span();
tts.push(tt);
}
tts
}
fn lookup_cur_matched_by_matched(r: &TtReader, start: Rc<NamedMatch>) -> Rc<NamedMatch> {
@ -153,38 +149,24 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
/// Return the next token from the TtReader.
/// EFFECT: advances the reader's token field
pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
// FIXME(pcwalton): Bad copy?
let ret_val = TokenAndSpan {
tok: r.cur_tok.clone(),
sp: r.cur_span.clone(),
};
fn tt_next_token(r: &mut TtReader, prev_span: Span) -> Option<TokenTree> {
loop {
let should_pop = match r.stack.last() {
None => {
assert_eq!(ret_val.tok, token::Eof);
return ret_val;
}
Some(frame) => {
if frame.idx < frame.forest.len() {
break;
}
!frame.dotdotdoted ||
*r.repeat_idx.last().unwrap() == *r.repeat_len.last().unwrap() - 1
let should_pop = if let Some(frame) = r.stack.last() {
if frame.idx < frame.forest.len() {
break;
}
!frame.dotdotdoted || *r.repeat_idx.last().unwrap() == *r.repeat_len.last().unwrap() - 1
} else {
return None;
};
/* done with this set; pop or repeat? */
if should_pop {
let prev = r.stack.pop().unwrap();
match r.stack.last_mut() {
None => {
r.cur_tok = token::Eof;
return ret_val;
}
Some(frame) => {
frame.idx += 1;
}
if let Some(frame) = r.stack.last_mut() {
frame.idx += 1;
} else {
return None;
}
if prev.dotdotdoted {
r.repeat_idx.pop();
@ -194,8 +176,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
*r.repeat_idx.last_mut().unwrap() += 1;
r.stack.last_mut().unwrap().idx = 0;
if let Some(tk) = r.stack.last().unwrap().sep.clone() {
r.cur_tok = tk; // repeat same span, I guess
return ret_val;
return Some(TokenTree::Token(prev_span, tk)); // repeat same span, I guess
}
}
}
@ -231,7 +212,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
}
r.stack.last_mut().unwrap().idx += 1;
return tt_next_token(r);
return tt_next_token(r, prev_span);
}
r.repeat_len.push(len);
r.repeat_idx.push(0);
@ -249,9 +230,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
r.stack.last_mut().unwrap().idx += 1;
match lookup_cur_matched(r, ident) {
None => {
r.cur_span = sp;
r.cur_tok = SubstNt(ident);
return ret_val;
return Some(TokenTree::Token(sp, SubstNt(ident)));
// this can't be 0 length, just like TokenTree::Delimited
}
Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched {
@ -260,15 +239,12 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
// (a) idents can be in lots of places, so it'd be a pain
// (b) we actually can, since it's a token.
NtIdent(ref sn) => {
r.cur_span = sn.span;
r.cur_tok = token::Ident(sn.node);
return ret_val;
return Some(TokenTree::Token(sn.span, token::Ident(sn.node)));
}
NtTT(ref tt) => return Some(tt.clone()),
_ => {
// FIXME(pcwalton): Bad copy.
r.cur_span = sp;
r.cur_tok = token::Interpolated(nt.clone());
return ret_val;
// FIXME(pcwalton): Bad copy
return Some(TokenTree::Token(sp, token::Interpolated(nt.clone())));
}
}
} else {
@ -289,11 +265,9 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
});
// if this could be 0-length, we'd need to potentially recur here
}
TokenTree::Token(sp, tok) => {
r.cur_span = sp;
r.cur_tok = tok;
tt @ TokenTree::Token(..) => {
r.stack.last_mut().unwrap().idx += 1;
return ret_val;
return Some(tt);
}
}
}

View File

@ -30,7 +30,7 @@ use ast::{self, NodeId, PatKind};
use attr;
use codemap::{CodeMap, Spanned};
use syntax_pos::Span;
use errors::{DiagnosticBuilder, Handler};
use errors::{DiagnosticBuilder, Handler, FatalError};
use visit::{self, FnKind, Visitor};
use parse::ParseSess;
use symbol::Symbol;
@ -319,6 +319,9 @@ declare_features! (
// The `unadjusted` ABI. Perma unstable.
(active, abi_unadjusted, "1.16.0", None),
// Macros 1.1
(active, proc_macro, "1.16.0", Some(35900)),
// Allows attributes on struct literal fields.
(active, struct_field_attributes, "1.16.0", Some(38814)),
@ -375,8 +378,6 @@ declare_features! (
// Allows `..` in tuple (struct) patterns
(accepted, dotdot_in_tuple_patterns, "1.14.0", Some(33627)),
(accepted, item_like_imports, "1.14.0", Some(35120)),
// Macros 1.1
(accepted, proc_macro, "1.15.0", Some(35900)),
);
// (changing above list without updating src/doc/reference.md makes @cmr sad)
@ -444,6 +445,10 @@ pub fn deprecated_attributes() -> Vec<&'static (&'static str, AttributeType, Att
BUILTIN_ATTRIBUTES.iter().filter(|a| a.2.is_deprecated()).collect()
}
pub fn is_builtin_attr(attr: &ast::Attribute) -> bool {
BUILTIN_ATTRIBUTES.iter().any(|&(builtin_name, _, _)| attr.check_name(builtin_name))
}
// Attributes that have a special meaning to rustc or rustdoc
pub const BUILTIN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeGate)] = &[
// Normal attributes
@ -737,6 +742,16 @@ pub const BUILTIN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeG
is currently unstable",
cfg_fn!(windows_subsystem))),
("proc_macro_attribute", Normal, Gated(Stability::Unstable,
"proc_macro",
"attribute proc macros are currently unstable",
cfg_fn!(proc_macro))),
("rustc_derive_registrar", Normal, Gated(Stability::Unstable,
"rustc_derive_registrar",
"used internally by rustc",
cfg_fn!(rustc_attrs))),
// Crate level attributes
("crate_name", CrateLevel, Ungated),
("crate_type", CrateLevel, Ungated),
@ -879,9 +894,10 @@ fn find_lang_feature_issue(feature: &str) -> Option<u32> {
issue
} else {
// search in Accepted or Removed features
ACCEPTED_FEATURES.iter().chain(REMOVED_FEATURES.iter())
.find(|t| t.0 == feature)
.unwrap().2
match ACCEPTED_FEATURES.iter().chain(REMOVED_FEATURES).find(|t| t.0 == feature) {
Some(&(_, _, issue)) => issue,
None => panic!("Feature `{}` is not declared anywhere", feature),
}
}
}
@ -1382,6 +1398,8 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute]) -> Features {
let mut features = Features::new();
let mut feature_checker = MutexFeatureChecker::default();
for attr in krate_attrs {
if !attr.check_name("feature") {
continue
@ -1405,6 +1423,7 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute]) -> F
if let Some(&(_, _, _, setter)) = ACTIVE_FEATURES.iter()
.find(|& &(n, _, _, _)| name == n) {
*(setter(&mut features)) = true;
feature_checker.collect(&features, mi.span);
}
else if let Some(&(_, _, _)) = REMOVED_FEATURES.iter()
.find(|& &(n, _, _)| name == n) {
@ -1421,9 +1440,45 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute]) -> F
}
}
feature_checker.check(span_handler);
features
}
// A collector for mutually-exclusive features and their flag spans
#[derive(Default)]
struct MutexFeatureChecker {
proc_macro: Option<Span>,
custom_attribute: Option<Span>,
}
impl MutexFeatureChecker {
// If this method turns out to be a hotspot due to branching,
// the branching can be eliminated by modifying `setter!()` to set these spans
// only for the features that need to be checked for mutual exclusion.
fn collect(&mut self, features: &Features, span: Span) {
if features.proc_macro {
// If self.proc_macro is None, set to Some(span)
self.proc_macro = self.proc_macro.or(Some(span));
}
if features.custom_attribute {
self.custom_attribute = self.custom_attribute.or(Some(span));
}
}
fn check(self, handler: &Handler) {
if let (Some(pm_span), Some(ca_span)) = (self.proc_macro, self.custom_attribute) {
handler.struct_span_err(pm_span, "Cannot use `#![feature(proc_macro)]` and \
`#![feature(custom_attribute)] at the same time")
.span_note(ca_span, "`#![feature(custom_attribute)]` declared here")
.emit();
panic!(FatalError);
}
}
}
pub fn check_crate(krate: &ast::Crate,
sess: &ParseSess,
features: &Features,

View File

@ -13,11 +13,8 @@ pub use self::CommentStyle::*;
use ast;
use codemap::CodeMap;
use syntax_pos::{BytePos, CharPos, Pos};
use errors;
use parse::lexer::is_block_doc_comment;
use parse::lexer::{StringReader, TokenAndSpan};
use parse::lexer::{is_pattern_whitespace, Reader};
use parse::lexer;
use parse::lexer::{is_block_doc_comment, is_pattern_whitespace};
use parse::lexer::{self, ParseSess, StringReader, TokenAndSpan};
use print::pprust;
use str::char_at;
@ -346,16 +343,14 @@ pub struct Literal {
// it appears this function is called only from pprust... that's
// probably not a good thing.
pub fn gather_comments_and_literals(span_diagnostic: &errors::Handler,
path: String,
srdr: &mut Read)
pub fn gather_comments_and_literals(sess: &ParseSess, path: String, srdr: &mut Read)
-> (Vec<Comment>, Vec<Literal>) {
let mut src = Vec::new();
srdr.read_to_end(&mut src).unwrap();
let src = String::from_utf8(src).unwrap();
let cm = CodeMap::new();
let filemap = cm.new_filemap(path, None, src);
let mut rdr = lexer::StringReader::new_raw(span_diagnostic, filemap);
let mut rdr = lexer::StringReader::new_raw(sess, filemap);
let mut comments: Vec<Comment> = Vec::new();
let mut literals: Vec<Literal> = Vec::new();

View File

@ -11,9 +11,8 @@
use ast::{self, Ident};
use syntax_pos::{self, BytePos, CharPos, Pos, Span};
use codemap::CodeMap;
use errors::{FatalError, Handler, DiagnosticBuilder};
use ext::tt::transcribe::tt_next_token;
use parse::token;
use errors::{FatalError, DiagnosticBuilder};
use parse::{token, ParseSess};
use str::char_at;
use symbol::{Symbol, keywords};
use std_unicode::property::Pattern_White_Space;
@ -23,52 +22,10 @@ use std::char;
use std::mem::replace;
use std::rc::Rc;
pub use ext::tt::transcribe::{TtReader, new_tt_reader};
pub mod comments;
mod tokentrees;
mod unicode_chars;
pub trait Reader {
fn is_eof(&self) -> bool;
fn try_next_token(&mut self) -> Result<TokenAndSpan, ()>;
fn next_token(&mut self) -> TokenAndSpan where Self: Sized {
let res = self.try_next_token();
self.unwrap_or_abort(res)
}
/// Report a fatal error with the current span.
fn fatal(&self, &str) -> FatalError;
/// Report a non-fatal error with the current span.
fn err(&self, &str);
fn emit_fatal_errors(&mut self);
fn unwrap_or_abort(&mut self, res: Result<TokenAndSpan, ()>) -> TokenAndSpan {
match res {
Ok(tok) => tok,
Err(_) => {
self.emit_fatal_errors();
panic!(FatalError);
}
}
}
fn peek(&self) -> TokenAndSpan;
/// Get a token the parser cares about.
fn try_real_token(&mut self) -> Result<TokenAndSpan, ()> {
let mut t = self.try_next_token()?;
loop {
match t.tok {
token::Whitespace | token::Comment | token::Shebang(_) => {
t = self.try_next_token()?;
}
_ => break,
}
}
Ok(t)
}
fn real_token(&mut self) -> TokenAndSpan {
let res = self.try_real_token();
self.unwrap_or_abort(res)
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct TokenAndSpan {
pub tok: token::Token,
@ -82,7 +39,7 @@ impl Default for TokenAndSpan {
}
pub struct StringReader<'a> {
pub span_diagnostic: &'a Handler,
pub sess: &'a ParseSess,
/// The absolute offset within the codemap of the next character to read
pub next_pos: BytePos,
/// The absolute offset within the codemap of the current character
@ -105,9 +62,44 @@ pub struct StringReader<'a> {
// cache a direct reference to the source text, so that we don't have to
// retrieve it via `self.filemap.src.as_ref().unwrap()` all the time.
source_text: Rc<String>,
/// Stack of open delimiters and their spans. Used for error message.
token: token::Token,
span: Span,
open_braces: Vec<(token::DelimToken, Span)>,
}
impl<'a> Reader for StringReader<'a> {
impl<'a> StringReader<'a> {
fn next_token(&mut self) -> TokenAndSpan where Self: Sized {
let res = self.try_next_token();
self.unwrap_or_abort(res)
}
fn unwrap_or_abort(&mut self, res: Result<TokenAndSpan, ()>) -> TokenAndSpan {
match res {
Ok(tok) => tok,
Err(_) => {
self.emit_fatal_errors();
panic!(FatalError);
}
}
}
fn try_real_token(&mut self) -> Result<TokenAndSpan, ()> {
let mut t = self.try_next_token()?;
loop {
match t.tok {
token::Whitespace | token::Comment | token::Shebang(_) => {
t = self.try_next_token()?;
}
_ => break,
}
}
self.token = t.tok.clone();
self.span = t.sp;
Ok(t)
}
pub fn real_token(&mut self) -> TokenAndSpan {
let res = self.try_real_token();
self.unwrap_or_abort(res)
}
fn is_eof(&self) -> bool {
if self.ch.is_none() {
return true;
@ -119,7 +111,7 @@ impl<'a> Reader for StringReader<'a> {
}
}
/// Return the next token. EFFECT: advances the string_reader.
fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
pub fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
assert!(self.fatal_errs.is_empty());
let ret_val = TokenAndSpan {
tok: replace(&mut self.peek_tok, token::Underscore),
@ -131,16 +123,13 @@ impl<'a> Reader for StringReader<'a> {
fn fatal(&self, m: &str) -> FatalError {
self.fatal_span(self.peek_span, m)
}
fn err(&self, m: &str) {
self.err_span(self.peek_span, m)
}
fn emit_fatal_errors(&mut self) {
pub fn emit_fatal_errors(&mut self) {
for err in &mut self.fatal_errs {
err.emit();
}
self.fatal_errs.clear();
}
fn peek(&self) -> TokenAndSpan {
pub fn peek(&self) -> TokenAndSpan {
// FIXME(pcwalton): Bad copy!
TokenAndSpan {
tok: self.peek_tok.clone(),
@ -149,59 +138,24 @@ impl<'a> Reader for StringReader<'a> {
}
}
impl<'a> Reader for TtReader<'a> {
fn is_eof(&self) -> bool {
self.peek().tok == token::Eof
}
fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
assert!(self.fatal_errs.is_empty());
let r = tt_next_token(self);
debug!("TtReader: r={:?}", r);
Ok(r)
}
fn fatal(&self, m: &str) -> FatalError {
self.sp_diag.span_fatal(self.cur_span, m)
}
fn err(&self, m: &str) {
self.sp_diag.span_err(self.cur_span, m);
}
fn emit_fatal_errors(&mut self) {
for err in &mut self.fatal_errs {
err.emit();
}
self.fatal_errs.clear();
}
fn peek(&self) -> TokenAndSpan {
TokenAndSpan {
tok: self.cur_tok.clone(),
sp: self.cur_span,
}
}
}
impl<'a> StringReader<'a> {
/// For comments.rs, which hackily pokes into next_pos and ch
pub fn new_raw<'b>(span_diagnostic: &'b Handler,
filemap: Rc<syntax_pos::FileMap>)
-> StringReader<'b> {
let mut sr = StringReader::new_raw_internal(span_diagnostic, filemap);
pub fn new_raw<'b>(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
let mut sr = StringReader::new_raw_internal(sess, filemap);
sr.bump();
sr
}
fn new_raw_internal<'b>(span_diagnostic: &'b Handler,
filemap: Rc<syntax_pos::FileMap>)
-> StringReader<'b> {
fn new_raw_internal(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
if filemap.src.is_none() {
span_diagnostic.bug(&format!("Cannot lex filemap \
without source: {}",
filemap.name)[..]);
sess.span_diagnostic.bug(&format!("Cannot lex filemap without source: {}",
filemap.name));
}
let source_text = (*filemap.src.as_ref().unwrap()).clone();
StringReader {
span_diagnostic: span_diagnostic,
sess: sess,
next_pos: filemap.start_pos,
pos: filemap.start_pos,
col: CharPos(0),
@ -214,13 +168,14 @@ impl<'a> StringReader<'a> {
peek_span: syntax_pos::DUMMY_SP,
source_text: source_text,
fatal_errs: Vec::new(),
token: token::Eof,
span: syntax_pos::DUMMY_SP,
open_braces: Vec::new(),
}
}
pub fn new<'b>(span_diagnostic: &'b Handler,
filemap: Rc<syntax_pos::FileMap>)
-> StringReader<'b> {
let mut sr = StringReader::new_raw(span_diagnostic, filemap);
pub fn new(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
let mut sr = StringReader::new_raw(sess, filemap);
if let Err(_) = sr.advance_token() {
sr.emit_fatal_errors();
panic!(FatalError);
@ -234,12 +189,12 @@ impl<'a> StringReader<'a> {
/// Report a fatal lexical error with a given span.
pub fn fatal_span(&self, sp: Span, m: &str) -> FatalError {
self.span_diagnostic.span_fatal(sp, m)
self.sess.span_diagnostic.span_fatal(sp, m)
}
/// Report a lexical error with a given span.
pub fn err_span(&self, sp: Span, m: &str) {
self.span_diagnostic.span_err(sp, m)
self.sess.span_diagnostic.span_err(sp, m)
}
@ -274,7 +229,7 @@ impl<'a> StringReader<'a> {
for c in c.escape_default() {
m.push(c)
}
self.span_diagnostic.struct_span_fatal(syntax_pos::mk_sp(from_pos, to_pos), &m[..])
self.sess.span_diagnostic.struct_span_fatal(syntax_pos::mk_sp(from_pos, to_pos), &m[..])
}
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
@ -298,7 +253,7 @@ impl<'a> StringReader<'a> {
for c in c.escape_default() {
m.push(c)
}
self.span_diagnostic.struct_span_err(syntax_pos::mk_sp(from_pos, to_pos), &m[..])
self.sess.span_diagnostic.struct_span_err(syntax_pos::mk_sp(from_pos, to_pos), &m[..])
}
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending the
@ -503,9 +458,8 @@ impl<'a> StringReader<'a> {
fn scan_comment(&mut self) -> Option<TokenAndSpan> {
if let Some(c) = self.ch {
if c.is_whitespace() {
self.span_diagnostic.span_err(syntax_pos::mk_sp(self.pos, self.pos),
"called consume_any_line_comment, but there \
was whitespace");
let msg = "called consume_any_line_comment, but there was whitespace";
self.sess.span_diagnostic.span_err(syntax_pos::mk_sp(self.pos, self.pos), msg);
}
}
@ -875,7 +829,7 @@ impl<'a> StringReader<'a> {
self.scan_unicode_escape(delim) && !ascii_only
} else {
let span = syntax_pos::mk_sp(start, self.pos);
self.span_diagnostic
self.sess.span_diagnostic
.struct_span_err(span, "incorrect unicode escape sequence")
.span_help(span,
"format of unicode escape sequences is \
@ -1701,35 +1655,41 @@ fn ident_continue(c: Option<char>) -> bool {
mod tests {
use super::*;
use ast::Ident;
use ast::{Ident, CrateConfig};
use symbol::Symbol;
use syntax_pos::{BytePos, Span, NO_EXPANSION};
use codemap::CodeMap;
use errors;
use feature_gate::UnstableFeatures;
use parse::token;
use std::cell::RefCell;
use std::io;
use std::rc::Rc;
fn mk_sh(cm: Rc<CodeMap>) -> errors::Handler {
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()),
Some(cm));
errors::Handler::with_emitter(true, false, Box::new(emitter))
fn mk_sess(cm: Rc<CodeMap>) -> ParseSess {
let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()), Some(cm.clone()));
ParseSess {
span_diagnostic: errors::Handler::with_emitter(true, false, Box::new(emitter)),
unstable_features: UnstableFeatures::from_environment(),
config: CrateConfig::new(),
included_mod_stack: RefCell::new(Vec::new()),
code_map: cm,
}
}
// open a string reader for the given string
fn setup<'a>(cm: &CodeMap,
span_handler: &'a errors::Handler,
sess: &'a ParseSess,
teststr: String)
-> StringReader<'a> {
let fm = cm.new_filemap("zebra.rs".to_string(), None, teststr);
StringReader::new(span_handler, fm)
StringReader::new(sess, fm)
}
#[test]
fn t1() {
let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone());
let sh = mk_sess(cm.clone());
let mut string_reader = setup(&cm,
&sh,
"/* my source file */ fn main() { println!(\"zebra\"); }\n"
@ -1781,7 +1741,7 @@ mod tests {
#[test]
fn doublecolonparsing() {
let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone());
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a b".to_string()),
vec![mk_ident("a"), token::Whitespace, mk_ident("b")]);
}
@ -1789,7 +1749,7 @@ mod tests {
#[test]
fn dcparsing_2() {
let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone());
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a::b".to_string()),
vec![mk_ident("a"), token::ModSep, mk_ident("b")]);
}
@ -1797,7 +1757,7 @@ mod tests {
#[test]
fn dcparsing_3() {
let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone());
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a ::b".to_string()),
vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]);
}
@ -1805,7 +1765,7 @@ mod tests {
#[test]
fn dcparsing_4() {
let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone());
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a:: b".to_string()),
vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]);
}
@ -1813,7 +1773,7 @@ mod tests {
#[test]
fn character_a() {
let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone());
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern("a")), None));
}
@ -1821,7 +1781,7 @@ mod tests {
#[test]
fn character_space() {
let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone());
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern(" ")), None));
}
@ -1829,7 +1789,7 @@ mod tests {
#[test]
fn character_escaped() {
let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone());
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern("\\n")), None));
}
@ -1837,7 +1797,7 @@ mod tests {
#[test]
fn lifetime_name() {
let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone());
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok,
token::Lifetime(Ident::from_str("'abc")));
}
@ -1845,7 +1805,7 @@ mod tests {
#[test]
fn raw_string() {
let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone());
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
.next_token()
.tok,
@ -1855,7 +1815,7 @@ mod tests {
#[test]
fn literal_suffixes() {
let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone());
let sh = mk_sess(cm.clone());
macro_rules! test {
($input: expr, $tok_type: ident, $tok_contents: expr) => {{
assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok,
@ -1899,7 +1859,7 @@ mod tests {
#[test]
fn nested_block_comments() {
let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone());
let sh = mk_sess(cm.clone());
let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string());
match lexer.next_token().tok {
token::Comment => {}
@ -1912,7 +1872,7 @@ mod tests {
#[test]
fn crlf_comments() {
let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone());
let sh = mk_sess(cm.clone());
let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string());
let comment = lexer.next_token();
assert_eq!(comment.tok, token::Comment);

View File

@ -0,0 +1,138 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use print::pprust::token_to_string;
use parse::lexer::StringReader;
use parse::{token, PResult};
use syntax_pos::Span;
use tokenstream::{Delimited, TokenTree};
use std::rc::Rc;
impl<'a> StringReader<'a> {
// Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
let mut tts = Vec::new();
while self.token != token::Eof {
tts.push(self.parse_token_tree()?);
}
Ok(tts)
}
// Parse a stream of tokens into a list of `TokenTree`s, up to a `CloseDelim`.
fn parse_token_trees_until_close_delim(&mut self) -> Vec<TokenTree> {
let mut tts = vec![];
loop {
if let token::CloseDelim(..) = self.token {
return tts;
}
match self.parse_token_tree() {
Ok(tt) => tts.push(tt),
Err(mut e) => {
e.emit();
return tts;
}
}
}
}
fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
match self.token {
token::Eof => {
let msg = "this file contains an un-closed delimiter";
let mut err = self.sess.span_diagnostic.struct_span_err(self.span, msg);
for &(_, sp) in &self.open_braces {
err.span_help(sp, "did you mean to close this delimiter?");
}
Err(err)
},
token::OpenDelim(delim) => {
// The span for beginning of the delimited section
let pre_span = self.span;
// Parse the open delimiter.
self.open_braces.push((delim, self.span));
let open_span = self.span;
self.real_token();
// Parse the token trees within the delimiters.
// We stop at any delimiter so we can try to recover if the user
// uses an incorrect delimiter.
let tts = self.parse_token_trees_until_close_delim();
let close_span = self.span;
// Expand to cover the entire delimited token tree
let span = Span { hi: close_span.hi, ..pre_span };
match self.token {
// Correct delimiter.
token::CloseDelim(d) if d == delim => {
self.open_braces.pop().unwrap();
// Parse the close delimiter.
self.real_token();
}
// Incorrect delimiter.
token::CloseDelim(other) => {
let token_str = token_to_string(&self.token);
let msg = format!("incorrect close delimiter: `{}`", token_str);
let mut err = self.sess.span_diagnostic.struct_span_err(self.span, &msg);
// This is a conservative error: only report the last unclosed delimiter.
// The previous unclosed delimiters could actually be closed! The parser
// just hasn't gotten to them yet.
if let Some(&(_, sp)) = self.open_braces.last() {
err.span_note(sp, "unclosed delimiter");
};
err.emit();
self.open_braces.pop().unwrap();
// If the incorrect delimiter matches an earlier opening
// delimiter, then don't consume it (it can be used to
// close the earlier one). Otherwise, consume it.
// E.g., we try to recover from:
// fn foo() {
// bar(baz(
// } // Incorrect delimiter but matches the earlier `{`
if !self.open_braces.iter().any(|&(b, _)| b == other) {
self.real_token();
}
}
token::Eof => {
// Silently recover, the EOF token will be seen again
// and an error emitted then. Thus we don't pop from
// self.open_braces here.
},
_ => {}
}
Ok(TokenTree::Delimited(span, Rc::new(Delimited {
delim: delim,
open_span: open_span,
tts: tts,
close_span: close_span,
})))
},
token::CloseDelim(_) => {
// An unexpected closing delimiter (i.e., there is no
// matching opening delimiter).
let token_str = token_to_string(&self.token);
let msg = format!("unexpected close delimiter: `{}`", token_str);
let err = self.sess.span_diagnostic.struct_span_err(self.span, &msg);
Err(err)
},
_ => {
let tt = TokenTree::Token(self.span, self.token.clone());
self.real_token();
Ok(tt)
}
}
}
}

View File

@ -243,10 +243,8 @@ pub fn check_for_substitution<'a>(reader: &StringReader<'a>,
err.span_help(span, &msg);
},
None => {
reader
.span_diagnostic
.span_bug_no_panic(span,
&format!("substitution character not found for '{}'", ch));
let msg = format!("substitution character not found for '{}'", ch);
reader.sess.span_diagnostic.span_bug_no_panic(span, &msg);
}
}
});

View File

@ -45,7 +45,7 @@ pub mod obsolete;
/// Info about a parsing session.
pub struct ParseSess {
pub span_diagnostic: Handler, // better be the same as the one in the reader!
pub span_diagnostic: Handler,
pub unstable_features: UnstableFeatures,
pub config: CrateConfig,
/// Used to determine and report recursive mod inclusions
@ -219,19 +219,15 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
}
/// Given a filemap, produce a sequence of token-trees
pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
-> Vec<tokenstream::TokenTree> {
// it appears to me that the cfg doesn't matter here... indeed,
// parsing tt's probably shouldn't require a parser at all.
let srdr = lexer::StringReader::new(&sess.span_diagnostic, filemap);
let mut p1 = Parser::new(sess, Box::new(srdr), None, false);
panictry!(p1.parse_all_token_trees())
pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>) -> Vec<tokenstream::TokenTree> {
let mut srdr = lexer::StringReader::new(sess, filemap);
srdr.real_token();
panictry!(srdr.parse_all_token_trees())
}
/// Given tts and the ParseSess, produce a parser
pub fn tts_to_parser<'a>(sess: &'a ParseSess, tts: Vec<tokenstream::TokenTree>) -> Parser<'a> {
let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, tts);
let mut p = Parser::new(sess, Box::new(trdr), None, false);
let mut p = Parser::new(sess, tts, None, false);
p.check_unknown_macro_variable();
p
}

View File

@ -46,7 +46,7 @@ use ext::tt::macro_parser;
use parse;
use parse::classify;
use parse::common::SeqSep;
use parse::lexer::{Reader, TokenAndSpan};
use parse::lexer::TokenAndSpan;
use parse::obsolete::ObsoleteSyntax;
use parse::token::{self, MatchNt, SubstNt};
use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
@ -156,22 +156,6 @@ enum PrevTokenKind {
Other,
}
// Simple circular buffer used for keeping few next tokens.
#[derive(Default)]
struct LookaheadBuffer {
buffer: [TokenAndSpan; LOOKAHEAD_BUFFER_CAPACITY],
start: usize,
end: usize,
}
const LOOKAHEAD_BUFFER_CAPACITY: usize = 8;
impl LookaheadBuffer {
fn len(&self) -> usize {
(LOOKAHEAD_BUFFER_CAPACITY + self.end - self.start) % LOOKAHEAD_BUFFER_CAPACITY
}
}
/* ident is handled by common.rs */
pub struct Parser<'a> {
@ -184,19 +168,14 @@ pub struct Parser<'a> {
pub prev_span: Span,
/// the previous token kind
prev_token_kind: PrevTokenKind,
lookahead_buffer: LookaheadBuffer,
pub tokens_consumed: usize,
pub restrictions: Restrictions,
pub quote_depth: usize, // not (yet) related to the quasiquoter
parsing_token_tree: bool,
pub reader: Box<Reader+'a>,
/// The set of seen errors about obsolete syntax. Used to suppress
/// extra detail when the same error is seen twice
pub obsolete_set: HashSet<ObsoleteSyntax>,
/// Used to determine the path to externally loaded source files
pub directory: Directory,
/// Stack of open delimiters and their spans. Used for error message.
pub open_braces: Vec<(token::DelimToken, Span)>,
/// Name of the root module this parser originated from. If `None`, then the
/// name is not known. This does not change while the parser is descending
/// into modules, and sub-parsers have new values for this name.
@ -204,7 +183,6 @@ pub struct Parser<'a> {
pub expected_tokens: Vec<TokenType>,
pub tts: Vec<(TokenTree, usize)>,
pub desugar_doc_comments: bool,
pub allow_interpolated_tts: bool,
}
#[derive(PartialEq, Eq, Clone)]
@ -270,30 +248,31 @@ impl From<P<Expr>> for LhsExpr {
impl<'a> Parser<'a> {
pub fn new(sess: &'a ParseSess,
rdr: Box<Reader+'a>,
tokens: Vec<TokenTree>,
directory: Option<Directory>,
desugar_doc_comments: bool)
-> Self {
let tt = TokenTree::Delimited(syntax_pos::DUMMY_SP, Rc::new(Delimited {
delim: token::NoDelim,
open_span: syntax_pos::DUMMY_SP,
tts: tokens,
close_span: syntax_pos::DUMMY_SP,
}));
let mut parser = Parser {
reader: rdr,
sess: sess,
token: token::Underscore,
span: syntax_pos::DUMMY_SP,
prev_span: syntax_pos::DUMMY_SP,
prev_token_kind: PrevTokenKind::Other,
lookahead_buffer: Default::default(),
tokens_consumed: 0,
restrictions: Restrictions::empty(),
quote_depth: 0,
parsing_token_tree: false,
obsolete_set: HashSet::new(),
directory: Directory { path: PathBuf::new(), ownership: DirectoryOwnership::Owned },
open_braces: Vec::new(),
root_module_name: None,
expected_tokens: Vec::new(),
tts: Vec::new(),
tts: if tt.len() > 0 { vec![(tt, 0)] } else { Vec::new() },
desugar_doc_comments: desugar_doc_comments,
allow_interpolated_tts: true,
};
let tok = parser.next_tok();
@ -309,8 +288,8 @@ impl<'a> Parser<'a> {
}
fn next_tok(&mut self) -> TokenAndSpan {
'outer: loop {
let mut tok = if let Some((tts, i)) = self.tts.pop() {
loop {
let tok = if let Some((tts, i)) = self.tts.pop() {
let tt = tts.get_tt(i);
if i + 1 < tts.len() {
self.tts.push((tts, i + 1));
@ -322,28 +301,14 @@ impl<'a> Parser<'a> {
continue
}
} else {
self.reader.real_token()
TokenAndSpan { tok: token::Eof, sp: self.span }
};
loop {
let nt = match tok.tok {
token::Interpolated(ref nt) => nt.clone(),
token::DocComment(name) if self.desugar_doc_comments => {
self.tts.push((TokenTree::Token(tok.sp, token::DocComment(name)), 0));
continue 'outer
}
_ => return tok,
};
match *nt {
token::NtTT(TokenTree::Token(sp, ref t)) => {
tok = TokenAndSpan { tok: t.clone(), sp: sp };
}
token::NtTT(ref tt) => {
self.tts.push((tt.clone(), 0));
continue 'outer
}
_ => return tok,
match tok.tok {
token::DocComment(name) if self.desugar_doc_comments => {
self.tts.push((TokenTree::Token(tok.sp, token::DocComment(name)), 0));
}
_ => return tok,
}
}
}
@ -892,17 +857,9 @@ impl<'a> Parser<'a> {
_ => PrevTokenKind::Other,
};
let next = if self.lookahead_buffer.start == self.lookahead_buffer.end {
self.next_tok()
} else {
// Avoid token copies with `replace`.
let old_start = self.lookahead_buffer.start;
self.lookahead_buffer.start = (old_start + 1) % LOOKAHEAD_BUFFER_CAPACITY;
mem::replace(&mut self.lookahead_buffer.buffer[old_start], Default::default())
};
let next = self.next_tok();
self.span = next.sp;
self.token = next.tok;
self.tokens_consumed += 1;
self.expected_tokens.clear();
// check after each token
self.check_unknown_macro_variable();
@ -935,18 +892,20 @@ impl<'a> Parser<'a> {
F: FnOnce(&token::Token) -> R,
{
if dist == 0 {
f(&self.token)
} else if dist < LOOKAHEAD_BUFFER_CAPACITY {
while self.lookahead_buffer.len() < dist {
self.lookahead_buffer.buffer[self.lookahead_buffer.end] = self.next_tok();
self.lookahead_buffer.end =
(self.lookahead_buffer.end + 1) % LOOKAHEAD_BUFFER_CAPACITY;
}
let index = (self.lookahead_buffer.start + dist - 1) % LOOKAHEAD_BUFFER_CAPACITY;
f(&self.lookahead_buffer.buffer[index].tok)
} else {
self.bug("lookahead distance is too large");
return f(&self.token);
}
let mut tok = token::Eof;
if let Some(&(ref tts, mut i)) = self.tts.last() {
i += dist - 1;
if i < tts.len() {
tok = match tts.get_tt(i) {
TokenTree::Token(_, tok) => tok,
TokenTree::Delimited(_, delimited) => token::OpenDelim(delimited.delim),
TokenTree::Sequence(..) => token::Dollar,
};
}
}
f(&tok)
}
pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> {
self.sess.span_diagnostic.struct_span_fatal(self.span, m)
@ -1277,10 +1236,17 @@ impl<'a> Parser<'a> {
"at least one type parameter bound \
must be specified");
}
if let TyKind::Path(None, ref path) = lhs.node {
let mut lhs = lhs.unwrap();
if let TyKind::Paren(ty) = lhs.node {
// We have to accept the first bound in parens for backward compatibility.
// Example: `(Bound) + Bound + Bound`
lhs = ty.unwrap();
}
if let TyKind::Path(None, path) = lhs.node {
let poly_trait_ref = PolyTraitRef {
bound_lifetimes: Vec::new(),
trait_ref: TraitRef { path: path.clone(), ref_id: lhs.id },
trait_ref: TraitRef { path: path, ref_id: lhs.id },
span: lhs.span,
};
let poly_trait_ref = TraitTyParamBound(poly_trait_ref, TraitBoundModifier::None);
@ -2743,94 +2709,28 @@ impl<'a> Parser<'a> {
// whether something will be a nonterminal or a seq
// yet.
match self.token {
token::Eof => {
let mut err: DiagnosticBuilder<'a> =
self.diagnostic().struct_span_err(self.span,
"this file contains an un-closed delimiter");
for &(_, sp) in &self.open_braces {
err.span_help(sp, "did you mean to close this delimiter?");
}
Err(err)
},
token::OpenDelim(delim) => {
if self.tts.last().map(|&(_, i)| i == 1).unwrap_or(false) {
if self.quote_depth == 0 && self.tts.last().map(|&(_, i)| i == 1).unwrap_or(false) {
let tt = self.tts.pop().unwrap().0;
self.bump();
return Ok(if self.allow_interpolated_tts {
// avoid needlessly reparsing token trees in recursive macro expansions
TokenTree::Token(tt.span(), token::Interpolated(Rc::new(token::NtTT(tt))))
} else {
tt
});
return Ok(tt);
}
let parsing_token_tree = ::std::mem::replace(&mut self.parsing_token_tree, true);
// The span for beginning of the delimited section
let pre_span = self.span;
// Parse the open delimiter.
self.open_braces.push((delim, self.span));
let open_span = self.span;
self.bump();
// Parse the token trees within the delimiters.
// We stop at any delimiter so we can try to recover if the user
// uses an incorrect delimiter.
let tts = self.parse_seq_to_before_tokens(&[&token::CloseDelim(token::Brace),
&token::CloseDelim(token::Paren),
&token::CloseDelim(token::Bracket)],
SeqSep::none(),
|p| p.parse_token_tree(),
|mut e| e.emit());
self.parsing_token_tree = parsing_token_tree;
let close_span = self.span;
// Expand to cover the entire delimited token tree
let span = Span { hi: close_span.hi, ..pre_span };
self.bump();
match self.token {
// Correct delimiter.
token::CloseDelim(d) if d == delim => {
self.open_braces.pop().unwrap();
// Parse the close delimiter.
self.bump();
}
// Incorrect delimiter.
token::CloseDelim(other) => {
let token_str = self.this_token_to_string();
let mut err = self.diagnostic().struct_span_err(self.span,
&format!("incorrect close delimiter: `{}`", token_str));
// This is a conservative error: only report the last unclosed delimiter.
// The previous unclosed delimiters could actually be closed! The parser
// just hasn't gotten to them yet.
if let Some(&(_, sp)) = self.open_braces.last() {
err.span_note(sp, "unclosed delimiter");
};
err.emit();
self.open_braces.pop().unwrap();
// If the incorrect delimiter matches an earlier opening
// delimiter, then don't consume it (it can be used to
// close the earlier one). Otherwise, consume it.
// E.g., we try to recover from:
// fn foo() {
// bar(baz(
// } // Incorrect delimiter but matches the earlier `{`
if !self.open_braces.iter().any(|&(b, _)| b == other) {
self.bump();
}
}
token::Eof => {
// Silently recover, the EOF token will be seen again
// and an error emitted then. Thus we don't pop from
// self.open_braces here.
},
_ => {}
}
self.parsing_token_tree = parsing_token_tree;
let span = Span { lo: open_span.lo, ..close_span };
Ok(TokenTree::Delimited(span, Rc::new(Delimited {
delim: delim,
open_span: open_span,
@ -2838,21 +2738,9 @@ impl<'a> Parser<'a> {
close_span: close_span,
})))
},
token::CloseDelim(_) => {
// An unexpected closing delimiter (i.e., there is no
// matching opening delimiter).
let token_str = self.this_token_to_string();
let err = self.diagnostic().struct_span_err(self.span,
&format!("unexpected close delimiter: `{}`", token_str));
Err(err)
},
/* we ought to allow different depths of unquotation */
token::Dollar | token::SubstNt(..) if self.quote_depth > 0 => {
self.parse_unquoted()
}
_ => {
Ok(TokenTree::Token(self.span, self.bump_and_get()))
}
token::CloseDelim(_) | token::Eof => unreachable!(),
token::Dollar | token::SubstNt(..) if self.quote_depth > 0 => self.parse_unquoted(),
_ => Ok(TokenTree::Token(self.span, self.bump_and_get())),
}
}

View File

@ -18,10 +18,9 @@ use util::parser::AssocOp;
use attr;
use codemap::{self, CodeMap};
use syntax_pos::{self, BytePos};
use errors;
use parse::token::{self, BinOpToken, Token};
use parse::lexer::comments;
use parse;
use parse::{self, ParseSess};
use print::pp::{self, break_offset, word, space, zerobreak, hardbreak};
use print::pp::{Breaks, eof};
use print::pp::Breaks::{Consistent, Inconsistent};
@ -101,20 +100,15 @@ pub const DEFAULT_COLUMNS: usize = 78;
/// it can scan the input text for comments and literals to
/// copy forward.
pub fn print_crate<'a>(cm: &'a CodeMap,
span_diagnostic: &errors::Handler,
sess: &ParseSess,
krate: &ast::Crate,
filename: String,
input: &mut Read,
out: Box<Write+'a>,
ann: &'a PpAnn,
is_expanded: bool) -> io::Result<()> {
let mut s = State::new_from_input(cm,
span_diagnostic,
filename,
input,
out,
ann,
is_expanded);
let mut s = State::new_from_input(cm, sess, filename, input, out, ann, is_expanded);
if is_expanded && !std_inject::injected_crate_name(krate).is_none() {
// We need to print `#![no_std]` (and its feature gate) so that
// compiling pretty-printed source won't inject libstd again.
@ -140,16 +134,13 @@ pub fn print_crate<'a>(cm: &'a CodeMap,
impl<'a> State<'a> {
pub fn new_from_input(cm: &'a CodeMap,
span_diagnostic: &errors::Handler,
sess: &ParseSess,
filename: String,
input: &mut Read,
out: Box<Write+'a>,
ann: &'a PpAnn,
is_expanded: bool) -> State<'a> {
let (cmnts, lits) = comments::gather_comments_and_literals(
span_diagnostic,
filename,
input);
let (cmnts, lits) = comments::gather_comments_and_literals(sess, filename, input);
State::new(
cm,

View File

@ -30,7 +30,6 @@ use codemap::{Spanned, combine_spans};
use ext::base;
use ext::tt::macro_parser;
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
use parse::lexer;
use parse::{self, Directory};
use parse::token::{self, Token, Lit, Nonterminal};
use print::pprust;
@ -139,7 +138,10 @@ impl TokenTree {
if let Nonterminal::NtTT(..) = **nt { 1 } else { 0 }
},
TokenTree::Token(_, token::MatchNt(..)) => 3,
TokenTree::Delimited(_, ref delimed) => delimed.tts.len() + 2,
TokenTree::Delimited(_, ref delimed) => match delimed.delim {
token::NoDelim => delimed.tts.len(),
_ => delimed.tts.len() + 2,
},
TokenTree::Sequence(_, ref seq) => seq.tts.len(),
TokenTree::Token(..) => 0,
}
@ -181,6 +183,9 @@ impl TokenTree {
close_span: sp,
}))
}
(&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
delimed.tts[index].clone()
}
(&TokenTree::Delimited(_, ref delimed), _) => {
if index == 0 {
return delimed.open_tt();
@ -215,14 +220,12 @@ impl TokenTree {
mtch: &[TokenTree],
tts: &[TokenTree])
-> macro_parser::NamedParseResult {
let diag = &cx.parse_sess().span_diagnostic;
// `None` is because we're not interpolating
let arg_rdr = lexer::new_tt_reader(diag, None, tts.iter().cloned().collect());
let directory = Directory {
path: cx.current_expansion.module.directory.clone(),
ownership: cx.current_expansion.directory_ownership,
};
macro_parser::parse(cx.parse_sess(), arg_rdr, mtch, Some(directory))
macro_parser::parse(cx.parse_sess(), tts.iter().cloned().collect(), mtch, Some(directory))
}
/// Check if this TokenTree is equal to the other, regardless of span information.

View File

@ -77,8 +77,9 @@ impl MultiItemModifier for CustomDerive {
let inner = self.inner;
panic::catch_unwind(panic::AssertUnwindSafe(|| inner(input)))
});
let new_items = match res {
Ok(stream) => __internal::token_stream_items(stream),
let stream = match res {
Ok(stream) => stream,
Err(e) => {
let msg = "custom derive attribute panicked";
let mut err = ecx.struct_span_fatal(span, msg);
@ -94,6 +95,18 @@ impl MultiItemModifier for CustomDerive {
}
};
let new_items = __internal::set_parse_sess(&ecx.parse_sess, || {
match __internal::token_stream_parse_items(stream) {
Ok(new_items) => new_items,
Err(_) => {
// FIXME: handle this better
let msg = "custom derive produced unparseable tokens";
ecx.struct_span_fatal(span, msg).emit();
panic!(FatalError);
}
}
});
let mut res = vec![Annotatable::Item(item)];
// Reassign spans of all expanded items to the input `item`
// for better errors here.

View File

@ -47,6 +47,8 @@ pub mod proc_macro_registrar;
// for custom_derive
pub mod deriving;
pub mod proc_macro_impl;
use std::rc::Rc;
use syntax::ast;
use syntax::ext::base::{MacroExpanderFn, NormalTT, MultiModifier, NamedSyntaxExtension};

View File

@ -0,0 +1,58 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::panic;
use errors::FatalError;
use syntax::codemap::Span;
use syntax::ext::base::*;
use syntax::tokenstream::TokenStream;
use syntax::ext::base;
use proc_macro::TokenStream as TsShim;
use proc_macro::__internal;
pub struct AttrProcMacro {
pub inner: fn(TsShim, TsShim) -> TsShim,
}
impl base::AttrProcMacro for AttrProcMacro {
fn expand<'cx>(&self,
ecx: &'cx mut ExtCtxt,
span: Span,
annotation: TokenStream,
annotated: TokenStream)
-> TokenStream {
let annotation = __internal::token_stream_wrap(annotation);
let annotated = __internal::token_stream_wrap(annotated);
let res = __internal::set_parse_sess(&ecx.parse_sess, || {
panic::catch_unwind(panic::AssertUnwindSafe(|| (self.inner)(annotation, annotated)))
});
match res {
Ok(stream) => __internal::token_stream_inner(stream),
Err(e) => {
let msg = "custom attribute panicked";
let mut err = ecx.struct_span_fatal(span, msg);
if let Some(s) = e.downcast_ref::<String>() {
err.help(&format!("message: {}", s));
}
if let Some(s) = e.downcast_ref::<&'static str>() {
err.help(&format!("message: {}", s));
}
err.emit();
panic!(FatalError);
}
}
}
}

View File

@ -11,18 +11,20 @@
use std::mem;
use errors;
use syntax::ast::{self, Ident, NodeId};
use syntax::codemap::{ExpnInfo, NameAndSpan, MacroAttribute};
use syntax::ext::base::ExtCtxt;
use syntax::ext::build::AstBuilder;
use syntax::ext::expand::ExpansionConfig;
use syntax::parse::ParseSess;
use syntax::fold::Folder;
use syntax::parse::ParseSess;
use syntax::ptr::P;
use syntax::symbol::Symbol;
use syntax_pos::{Span, DUMMY_SP};
use syntax::visit::{self, Visitor};
use syntax_pos::{Span, DUMMY_SP};
use deriving;
struct CustomDerive {
@ -32,8 +34,14 @@ struct CustomDerive {
attrs: Vec<ast::Name>,
}
struct CollectCustomDerives<'a> {
struct AttrProcMacro {
function_name: Ident,
span: Span,
}
struct CollectProcMacros<'a> {
derives: Vec<CustomDerive>,
attr_macros: Vec<AttrProcMacro>,
in_root: bool,
handler: &'a errors::Handler,
is_proc_macro_crate: bool,
@ -50,16 +58,17 @@ pub fn modify(sess: &ParseSess,
let ecfg = ExpansionConfig::default("proc_macro".to_string());
let mut cx = ExtCtxt::new(sess, ecfg, resolver);
let derives = {
let mut collect = CollectCustomDerives {
let (derives, attr_macros) = {
let mut collect = CollectProcMacros {
derives: Vec::new(),
attr_macros: Vec::new(),
in_root: true,
handler: handler,
is_proc_macro_crate: is_proc_macro_crate,
is_test_crate: is_test_crate,
};
visit::walk_crate(&mut collect, &krate);
collect.derives
(collect.derives, collect.attr_macros)
};
if !is_proc_macro_crate {
@ -74,7 +83,7 @@ pub fn modify(sess: &ParseSess,
return krate;
}
krate.module.items.push(mk_registrar(&mut cx, &derives));
krate.module.items.push(mk_registrar(&mut cx, &derives, &attr_macros));
if krate.exported_macros.len() > 0 {
handler.err("cannot export macro_rules! macros from a `proc-macro` \
@ -84,7 +93,7 @@ pub fn modify(sess: &ParseSess,
return krate
}
impl<'a> CollectCustomDerives<'a> {
impl<'a> CollectProcMacros<'a> {
fn check_not_pub_in_root(&self, vis: &ast::Visibility, sp: Span) {
if self.is_proc_macro_crate &&
self.in_root &&
@ -92,61 +101,11 @@ impl<'a> CollectCustomDerives<'a> {
self.handler.span_err(sp,
"`proc-macro` crate types cannot \
export any items other than functions \
tagged with `#[proc_macro_derive]` \
currently");
tagged with `#[proc_macro_derive]` currently");
}
}
}
impl<'a> Visitor<'a> for CollectCustomDerives<'a> {
fn visit_item(&mut self, item: &'a ast::Item) {
let mut attrs = item.attrs.iter().filter(|a| a.check_name("proc_macro_derive"));
// First up, make sure we're checking a bare function. If we're not then
// we're just not interested in this item.
//
// If we find one, try to locate a `#[proc_macro_derive]` attribute on
// it.
match item.node {
ast::ItemKind::Fn(..) => {}
_ => {
// Check for invalid use of proc_macro_derive
if let Some(attr) = attrs.next() {
self.handler.span_err(attr.span(),
"the `#[proc_macro_derive]` \
attribute may only be used \
on bare functions");
return;
}
self.check_not_pub_in_root(&item.vis, item.span);
return visit::walk_item(self, item)
}
}
let attr = match attrs.next() {
Some(attr) => attr,
None => {
self.check_not_pub_in_root(&item.vis, item.span);
return visit::walk_item(self, item)
}
};
if let Some(a) = attrs.next() {
self.handler.span_err(a.span(), "multiple `#[proc_macro_derive]` \
attributes found");
}
if self.is_test_crate {
return;
}
if !self.is_proc_macro_crate {
self.handler.span_err(attr.span(),
"the `#[proc_macro_derive]` attribute is \
only usable with crates of the `proc-macro` \
crate type");
}
fn collect_custom_derive(&mut self, item: &'a ast::Item, attr: &'a ast::Attribute) {
// Once we've located the `#[proc_macro_derive]` attribute, verify
// that it's of the form `#[proc_macro_derive(Foo)]` or
// `#[proc_macro_derive(Foo, attributes(A, ..))]`
@ -232,6 +191,101 @@ impl<'a> Visitor<'a> for CollectCustomDerives<'a> {
};
self.handler.span_err(item.span, msg);
}
}
fn collect_attr_proc_macro(&mut self, item: &'a ast::Item, attr: &'a ast::Attribute) {
if let Some(_) = attr.meta_item_list() {
self.handler.span_err(attr.span, "`#[proc_macro_attribute]` attribute
cannot contain any meta items");
return;
}
if self.in_root && item.vis == ast::Visibility::Public {
self.attr_macros.push(AttrProcMacro {
span: item.span,
function_name: item.ident,
});
} else {
let msg = if !self.in_root {
"functions tagged with `#[proc_macro_attribute]` must \
currently reside in the root of the crate"
} else {
"functions tagged with `#[proc_macro_attribute]` must be `pub`"
};
self.handler.span_err(item.span, msg);
}
}
}
impl<'a> Visitor<'a> for CollectProcMacros<'a> {
fn visit_item(&mut self, item: &'a ast::Item) {
// First up, make sure we're checking a bare function. If we're not then
// we're just not interested in this item.
//
// If we find one, try to locate a `#[proc_macro_derive]` attribute on
// it.
let is_fn = match item.node {
ast::ItemKind::Fn(..) => true,
_ => false,
};
let mut found_attr: Option<&'a ast::Attribute> = None;
for attr in &item.attrs {
if attr.check_name("proc_macro_derive") || attr.check_name("proc_macro_attribute") {
if let Some(prev_attr) = found_attr {
let msg = if attr.name() == prev_attr.name() {
format!("Only one `#[{}]` attribute is allowed on any given function",
attr.name())
} else {
format!("`#[{}]` and `#[{}]` attributes cannot both be applied \
to the same function", attr.name(), prev_attr.name())
};
self.handler.struct_span_err(attr.span(), &msg)
.span_note(prev_attr.span(), "Previous attribute here")
.emit();
return;
}
found_attr = Some(attr);
}
}
let attr = match found_attr {
None => {
self.check_not_pub_in_root(&item.vis, item.span);
return visit::walk_item(self, item);
},
Some(attr) => attr,
};
if !is_fn {
let msg = format!("the `#[{}]` attribute may only be used on bare functions",
attr.name());
self.handler.span_err(attr.span(), &msg);
return;
}
if self.is_test_crate {
return;
}
if !self.is_proc_macro_crate {
let msg = format!("the `#[{}]` attribute is only usable with crates of the \
`proc-macro` crate type", attr.name());
self.handler.span_err(attr.span(), &msg);
return;
}
if attr.check_name("proc_macro_derive") {
self.collect_custom_derive(item, attr);
} else if attr.check_name("proc_macro_attribute") {
self.collect_attr_proc_macro(item, attr);
};
visit::walk_item(self, item);
}
@ -265,7 +319,8 @@ impl<'a> Visitor<'a> for CollectCustomDerives<'a> {
// }
// }
fn mk_registrar(cx: &mut ExtCtxt,
custom_derives: &[CustomDerive]) -> P<ast::Item> {
custom_derives: &[CustomDerive],
custom_attrs: &[AttrProcMacro]) -> P<ast::Item> {
let eid = cx.codemap().record_expansion(ExpnInfo {
call_site: DUMMY_SP,
callee: NameAndSpan {
@ -286,25 +341,36 @@ fn mk_registrar(cx: &mut ExtCtxt,
let registry = Ident::from_str("Registry");
let registrar = Ident::from_str("registrar");
let register_custom_derive = Ident::from_str("register_custom_derive");
let stmts = custom_derives.iter().map(|cd| {
let register_attr_proc_macro = Ident::from_str("register_attr_proc_macro");
let mut stmts = custom_derives.iter().map(|cd| {
let path = cx.path_global(cd.span, vec![cd.function_name]);
let trait_name = cx.expr_str(cd.span, cd.trait_name);
let attrs = cx.expr_vec_slice(
span,
cd.attrs.iter().map(|&s| cx.expr_str(cd.span, s)).collect::<Vec<_>>()
);
(path, trait_name, attrs)
}).map(|(path, trait_name, attrs)| {
let registrar = cx.expr_ident(span, registrar);
let ufcs_path = cx.path(span, vec![proc_macro, __internal, registry,
register_custom_derive]);
cx.expr_call(span,
cx.expr_path(ufcs_path),
vec![registrar, trait_name, cx.expr_path(path), attrs])
}).map(|expr| {
cx.stmt_expr(expr)
cx.stmt_expr(cx.expr_call(span, cx.expr_path(ufcs_path),
vec![registrar, trait_name, cx.expr_path(path), attrs]))
}).collect::<Vec<_>>();
stmts.extend(custom_attrs.iter().map(|ca| {
let name = cx.expr_str(ca.span, ca.function_name.name);
let path = cx.path_global(ca.span, vec![ca.function_name]);
let registrar = cx.expr_ident(ca.span, registrar);
let ufcs_path = cx.path(span,
vec![proc_macro, __internal, registry, register_attr_proc_macro]);
cx.stmt_expr(cx.expr_call(span, cx.expr_path(ufcs_path),
vec![registrar, name, cx.expr_path(path)]))
}));
let path = cx.path(span, vec![proc_macro, __internal, registry]);
let registrar_path = cx.ty_path(path);
let arg_ty = cx.ty_rptr(span, registrar_path, None, ast::Mutability::Mutable);

View File

@ -53,6 +53,7 @@
// ignore-shave
// ignore-wasm32
// ignore-wasm64
// ignore-emscripten
// compile-flags: -C no-prepopulate-passes

View File

@ -0,0 +1,23 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// force-host
// no-prefer-dynamic
#![feature(proc_macro)]
#![crate_type = "proc-macro"]
extern crate proc_macro;
use proc_macro::TokenStream;
#[proc_macro_attribute]
pub fn attr_proc_macro(_: TokenStream, input: TokenStream) -> TokenStream {
input
}

View File

@ -16,8 +16,7 @@ extern crate derive_bad;
#[derive(
A
)]
//~^^ ERROR: custom derive attribute panicked
//~| HELP: called `Result::unwrap()` on an `Err` value: LexError
//~^^ ERROR: custom derive produced unparseable tokens
struct A;
fn main() {}

View File

@ -0,0 +1,24 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:attr_proc_macro.rs
// gate-test-proc_macro
#![feature(use_extern_macros)]
extern crate attr_proc_macro;
use attr_proc_macro::attr_proc_macro;
#[attr_proc_macro]
//~^ ERROR: attribute procedural macros are experimental
struct Foo;
fn main() {
let _ = Foo;
}

View File

@ -0,0 +1,22 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:attr_proc_macro.rs
#![feature(proc_macro)]
#[macro_use] extern crate attr_proc_macro;
#[attr_proc_macro]
//~^ ERROR: attribute procedural macros cannot be imported with `#[macro_use]`
struct Foo;
fn main() {
let _ = Foo;
}

View File

@ -0,0 +1,24 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:attr_proc_macro.rs
#![feature(proc_macro, custom_attribute)]
//~^ ERROR Cannot use `#![feature(proc_macro)]` and `#![feature(custom_attribute)] at the same time
extern crate attr_proc_macro;
use attr_proc_macro::attr_proc_macro;
#[attr_proc_macro]
fn foo() {}
fn main() {
foo();
}

View File

@ -12,6 +12,7 @@
// ignore-arm
// ignore-aarch64
// ignore-s390x
// ignore-emscripten
#![feature(asm, rustc_attrs)]

View File

@ -9,6 +9,7 @@
// except according to those terms.
// ignore-s390x
// ignore-emscripten
#![feature(asm)]

View File

@ -12,6 +12,7 @@
// ignore-arm
// ignore-aarch64
// ignore-s390x
// ignore-emscripten
#![feature(asm, rustc_attrs)]

View File

@ -9,6 +9,7 @@
// except according to those terms.
// ignore-s390x
// ignore-emscripten
#![feature(asm)]

Some files were not shown because too many files have changed in this diff Show More