Merge branch 'master' into redox
This commit is contained in:
commit
7e7775ce7b
11
.travis.yml
11
.travis.yml
|
@ -1,4 +1,4 @@
|
||||||
language: rust
|
language: minimal
|
||||||
sudo: required
|
sudo: required
|
||||||
dist: trusty
|
dist: trusty
|
||||||
services:
|
services:
|
||||||
|
@ -20,7 +20,7 @@ matrix:
|
||||||
- env: IMAGE=x86_64-gnu-cargotest
|
- env: IMAGE=x86_64-gnu-cargotest
|
||||||
- env: IMAGE=x86_64-gnu-debug
|
- env: IMAGE=x86_64-gnu-debug
|
||||||
- env: IMAGE=x86_64-gnu-nopt
|
- env: IMAGE=x86_64-gnu-nopt
|
||||||
- env: IMAGE=x86_64-gnu-rustbuild
|
- env: IMAGE=x86_64-gnu-make
|
||||||
- env: IMAGE=x86_64-gnu-llvm-3.7 ALLOW_PR=1 RUST_BACKTRACE=1
|
- env: IMAGE=x86_64-gnu-llvm-3.7 ALLOW_PR=1 RUST_BACKTRACE=1
|
||||||
- env: IMAGE=x86_64-musl
|
- env: IMAGE=x86_64-musl
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ matrix:
|
||||||
install: brew install ccache
|
install: brew install ccache
|
||||||
- env: >
|
- env: >
|
||||||
RUST_CHECK_TARGET=check
|
RUST_CHECK_TARGET=check
|
||||||
RUST_CONFIGURE_ARGS=--target=x86_64-apple-darwin --enable-rustbuild
|
RUST_CONFIGURE_ARGS=--target=x86_64-apple-darwin --disable-rustbuild
|
||||||
SRC=.
|
SRC=.
|
||||||
os: osx
|
os: osx
|
||||||
install: brew install ccache
|
install: brew install ccache
|
||||||
|
@ -51,10 +51,9 @@ matrix:
|
||||||
install: brew install ccache
|
install: brew install ccache
|
||||||
|
|
||||||
script:
|
script:
|
||||||
- if [ -z "$ALLOW_PR" ] && [ "$TRAVIS_BRANCH" != "auto" ]; then
|
- >
|
||||||
|
if [ "$ALLOW_PR" = "" ] && [ "$TRAVIS_BRANCH" != "auto" ]; then
|
||||||
echo skipping, not a full build;
|
echo skipping, not a full build;
|
||||||
elif [ -z "$ENABLE_AUTO" ] then
|
|
||||||
echo skipping, not quite ready yet
|
|
||||||
elif [ "$TRAVIS_OS_NAME" = "osx" ]; then
|
elif [ "$TRAVIS_OS_NAME" = "osx" ]; then
|
||||||
git submodule update --init;
|
git submodule update --init;
|
||||||
src/ci/run.sh;
|
src/ci/run.sh;
|
||||||
|
|
147
CONTRIBUTING.md
147
CONTRIBUTING.md
|
@ -86,13 +86,17 @@ benchmarks, generate documentation, install a fresh build of Rust, and more.
|
||||||
It's your best friend when working on Rust, allowing you to compile & test
|
It's your best friend when working on Rust, allowing you to compile & test
|
||||||
your contributions before submission.
|
your contributions before submission.
|
||||||
|
|
||||||
All the configuration for the build system lives in [the `mk` directory][mkdir]
|
The build system lives in [the `src/bootstrap` directory][bootstrap] in the
|
||||||
in the project root. It can be hard to follow in places, as it uses some
|
project root. Our build system is itself written in Rust and is based on Cargo
|
||||||
advanced Make features which make for some challenging reading. If you have
|
to actually build all the compiler's crates. If you have questions on the build
|
||||||
questions on the build system internals, try asking in
|
system internals, try asking in [`#rust-internals`][pound-rust-internals].
|
||||||
[`#rust-internals`][pound-rust-internals].
|
|
||||||
|
|
||||||
[mkdir]: https://github.com/rust-lang/rust/tree/master/mk/
|
[bootstrap]: https://github.com/rust-lang/rust/tree/master/src/bootstrap/
|
||||||
|
|
||||||
|
> **Note**: the build system was recently rewritten from a jungle of makefiles
|
||||||
|
> to the current incarnation you'll see in `src/bootstrap`. If you experience
|
||||||
|
> bugs you can temporarily revert back to the makefiles with
|
||||||
|
> `--disable-rustbuild` passed to `./configure`.
|
||||||
|
|
||||||
### Configuration
|
### Configuration
|
||||||
|
|
||||||
|
@ -119,42 +123,111 @@ configuration used later in the build process. Some options to note:
|
||||||
|
|
||||||
To see a full list of options, run `./configure --help`.
|
To see a full list of options, run `./configure --help`.
|
||||||
|
|
||||||
### Useful Targets
|
### Building
|
||||||
|
|
||||||
Some common make targets are:
|
Although the `./configure` script will generate a `Makefile`, this is actually
|
||||||
|
just a thin veneer over the actual build system driver, `x.py`. This file, at
|
||||||
|
the root of the repository, is used to build, test, and document various parts
|
||||||
|
of the compiler. You can execute it as:
|
||||||
|
|
||||||
- `make tips` - show useful targets, variables and other tips for working with
|
```sh
|
||||||
the build system.
|
python x.py build
|
||||||
- `make rustc-stage1` - build up to (and including) the first stage. For most
|
```
|
||||||
cases we don't need to build the stage2 compiler, so we can save time by not
|
|
||||||
building it. The stage1 compiler is a fully functioning compiler and
|
On some systems you can also use the shorter version:
|
||||||
(probably) will be enough to determine if your change works as expected.
|
|
||||||
- `make $host/stage1/bin/rustc` - Where $host is a target triple like x86_64-unknown-linux-gnu.
|
```sh
|
||||||
This will build just rustc, without libstd. This is the fastest way to recompile after
|
./x.py build
|
||||||
you changed only rustc source code. Note however that the resulting rustc binary
|
```
|
||||||
won't have a stdlib to link against by default. You can build libstd once with
|
|
||||||
`make rustc-stage1`, rustc will pick it up afterwards. libstd is only guaranteed to
|
To learn more about the driver and top-level targets, you can execute:
|
||||||
work if recompiled, so if there are any issues recompile it.
|
|
||||||
- `make check` - build the full compiler & run all tests (takes a while). This
|
```sh
|
||||||
|
python x.py --help
|
||||||
|
```
|
||||||
|
|
||||||
|
The general format for the driver script is:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
python x.py <command> [<directory>]
|
||||||
|
```
|
||||||
|
|
||||||
|
Some example commands are `build`, `test`, and `doc`. These will build, test,
|
||||||
|
and document the specified directory. The second argument, `<directory>`, is
|
||||||
|
optional and defaults to working over the entire compiler. If specified,
|
||||||
|
however, only that specific directory will be built. For example:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# build the entire compiler
|
||||||
|
python x.py build
|
||||||
|
|
||||||
|
# build all documentation
|
||||||
|
python x.py doc
|
||||||
|
|
||||||
|
# run all test suites
|
||||||
|
python x.py test
|
||||||
|
|
||||||
|
# build only the standard library
|
||||||
|
python x.py build src/libstd
|
||||||
|
|
||||||
|
# test only one particular test suite
|
||||||
|
python x.py test src/test/rustdoc
|
||||||
|
|
||||||
|
# build only the stage0 libcore library
|
||||||
|
python x.py build src/libcore --stage 0
|
||||||
|
```
|
||||||
|
|
||||||
|
You can explore the build system throught the various `--help` pages for each
|
||||||
|
subcommand. For example to learn more about a command you can run:
|
||||||
|
|
||||||
|
```
|
||||||
|
python x.py build --help
|
||||||
|
```
|
||||||
|
|
||||||
|
To learn about all possible rules you can execute, run:
|
||||||
|
|
||||||
|
```
|
||||||
|
python x.py build --help --verbose
|
||||||
|
```
|
||||||
|
|
||||||
|
### Useful commands
|
||||||
|
|
||||||
|
Some common invocations of `x.py` are:
|
||||||
|
|
||||||
|
- `x.py build --help` - show the help message and explain the subcommand
|
||||||
|
- `x.py build src/libtest --stage 1` - build up to (and including) the first
|
||||||
|
stage. For most cases we don't need to build the stage2 compiler, so we can
|
||||||
|
save time by not building it. The stage1 compiler is a fully functioning
|
||||||
|
compiler and (probably) will be enough to determine if your change works as
|
||||||
|
expected.
|
||||||
|
- `x.py build src/rustc --stage 1` - This will build just rustc, without libstd.
|
||||||
|
This is the fastest way to recompile after you changed only rustc source code.
|
||||||
|
Note however that the resulting rustc binary won't have a stdlib to link
|
||||||
|
against by default. You can build libstd once with `x.py build src/libstd`,
|
||||||
|
but it is is only guaranteed to work if recompiled, so if there are any issues
|
||||||
|
recompile it.
|
||||||
|
- `x.py test` - build the full compiler & run all tests (takes a while). This
|
||||||
is what gets run by the continuous integration system against your pull
|
is what gets run by the continuous integration system against your pull
|
||||||
request. You should run this before submitting to make sure your tests pass
|
request. You should run this before submitting to make sure your tests pass
|
||||||
& everything builds in the correct manner.
|
& everything builds in the correct manner.
|
||||||
- `make check-stage1-std NO_REBUILD=1` - test the standard library without
|
- `x.py test src/libstd --stage 1` - test the standard library without
|
||||||
rebuilding the entire compiler
|
recompiling stage 2.
|
||||||
- `make check TESTNAME=<substring-of-test-name>` - Run a matching set of tests.
|
- `x.py test src/test/run-pass --filter TESTNAME` - Run a matching set of tests.
|
||||||
- `TESTNAME` should be a substring of the tests to match against e.g. it could
|
- `TESTNAME` should be a substring of the tests to match against e.g. it could
|
||||||
be the fully qualified test name, or just a part of it.
|
be the fully qualified test name, or just a part of it.
|
||||||
`TESTNAME=collections::hash::map::test_map::test_capacity_not_less_than_len`
|
`TESTNAME=collections::hash::map::test_map::test_capacity_not_less_than_len`
|
||||||
or `TESTNAME=test_capacity_not_less_than_len`.
|
or `TESTNAME=test_capacity_not_less_than_len`.
|
||||||
- `make check-stage1-rpass TESTNAME=<substring-of-test-name>` - Run a single
|
- `x.py test src/test/run-pass --stage 1 --filter <substring-of-test-name>` -
|
||||||
rpass test with the stage1 compiler (this will be quicker than running the
|
Run a single rpass test with the stage1 compiler (this will be quicker than
|
||||||
command above as we only build the stage1 compiler, not the entire thing).
|
running the command above as we only build the stage1 compiler, not the entire
|
||||||
You can also leave off the `-rpass` to run all stage1 test types.
|
thing). You can also leave off the directory argument to run all stage1 test
|
||||||
- `make check-stage1-coretest` - Run stage1 tests in `libcore`.
|
types.
|
||||||
- `make tidy` - Check that the source code is in compliance with Rust's style
|
- `x.py test src/libcore --stage 1` - Run stage1 tests in `libcore`.
|
||||||
guidelines. There is no official document describing Rust's full guidelines
|
- `x.py test src/tools/tidy` - Check that the source code is in compliance with
|
||||||
as of yet, but basic rules like 4 spaces for indentation and no more than 99
|
Rust's style guidelines. There is no official document describing Rust's full
|
||||||
characters in a single line should be kept in mind when writing code.
|
guidelines as of yet, but basic rules like 4 spaces for indentation and no
|
||||||
|
more than 99 characters in a single line should be kept in mind when writing
|
||||||
|
code.
|
||||||
|
|
||||||
## Pull Requests
|
## Pull Requests
|
||||||
|
|
||||||
|
@ -172,19 +245,17 @@ amount of time you have to wait. You need to have built the compiler at least
|
||||||
once before running these will work, but that’s only one full build rather than
|
once before running these will work, but that’s only one full build rather than
|
||||||
one each time.
|
one each time.
|
||||||
|
|
||||||
$ make -j8 rustc-stage1 && make check-stage1
|
$ python x.py test --stage 1
|
||||||
|
|
||||||
is one such example, which builds just `rustc`, and then runs the tests. If
|
is one such example, which builds just `rustc`, and then runs the tests. If
|
||||||
you’re adding something to the standard library, try
|
you’re adding something to the standard library, try
|
||||||
|
|
||||||
$ make -j8 check-stage1-std NO_REBUILD=1
|
$ python x.py test src/libstd --stage 1
|
||||||
|
|
||||||
This will not rebuild the compiler, but will run the tests.
|
|
||||||
|
|
||||||
Please make sure your pull request is in compliance with Rust's style
|
Please make sure your pull request is in compliance with Rust's style
|
||||||
guidelines by running
|
guidelines by running
|
||||||
|
|
||||||
$ make tidy
|
$ python x.py test src/tools/tidy
|
||||||
|
|
||||||
Make this check before every pull request (and every new commit in a pull
|
Make this check before every pull request (and every new commit in a pull
|
||||||
request) ; you can add [git hooks](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks)
|
request) ; you can add [git hooks](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks)
|
||||||
|
|
50
README.md
50
README.md
|
@ -36,16 +36,14 @@ Read ["Installing Rust"] from [The Book].
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
$ ./configure
|
$ ./configure
|
||||||
$ make && make install
|
$ make && sudo make install
|
||||||
```
|
```
|
||||||
|
|
||||||
> ***Note:*** You may need to use `sudo make install` if you do not
|
> ***Note:*** Install locations can be adjusted by passing a `--prefix`
|
||||||
> normally have permission to modify the destination directory. The
|
> argument to `configure`. Various other options are also supported – pass
|
||||||
> install locations can be adjusted by passing a `--prefix` argument
|
|
||||||
> to `configure`. Various other options are also supported – pass
|
|
||||||
> `--help` for more information on them.
|
> `--help` for more information on them.
|
||||||
|
|
||||||
When complete, `make install` will place several programs into
|
When complete, `sudo make install` will place several programs into
|
||||||
`/usr/local/bin`: `rustc`, the Rust compiler, and `rustdoc`, the
|
`/usr/local/bin`: `rustc`, the Rust compiler, and `rustdoc`, the
|
||||||
API-documentation tool. This install does not include [Cargo],
|
API-documentation tool. This install does not include [Cargo],
|
||||||
Rust's package manager, which you may also want to build.
|
Rust's package manager, which you may also want to build.
|
||||||
|
@ -108,30 +106,22 @@ MSVC builds of Rust additionally require an installation of Visual Studio 2013
|
||||||
(or later) so `rustc` can use its linker. Make sure to check the “C++ tools”
|
(or later) so `rustc` can use its linker. Make sure to check the “C++ tools”
|
||||||
option.
|
option.
|
||||||
|
|
||||||
With these dependencies installed, the build takes two steps:
|
With these dependencies installed, you can build the compiler in a `cmd.exe`
|
||||||
|
shell with:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
$ ./configure
|
> python x.py build
|
||||||
|
```
|
||||||
|
|
||||||
|
If you're running inside of an msys shell, however, you can run:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ ./configure --build=x86_64-pc-windows-msvc
|
||||||
$ make && make install
|
$ make && make install
|
||||||
```
|
```
|
||||||
|
|
||||||
#### MSVC with rustbuild
|
Currently building Rust only works with some known versions of Visual Studio. If
|
||||||
|
you have a more recent version installed the build system doesn't understand
|
||||||
The old build system, based on makefiles, is currently being rewritten into a
|
|
||||||
Rust-based build system called rustbuild. This can be used to bootstrap the
|
|
||||||
compiler on MSVC without needing to install MSYS or MinGW. All you need are
|
|
||||||
[Python 2](https://www.python.org/downloads/),
|
|
||||||
[CMake](https://cmake.org/download/), and
|
|
||||||
[Git](https://git-scm.com/downloads) in your PATH (make sure you do not use the
|
|
||||||
ones from MSYS if you have it installed). You'll also need Visual Studio 2013 or
|
|
||||||
newer with the C++ tools. Then all you need to do is to kick off rustbuild.
|
|
||||||
|
|
||||||
```
|
|
||||||
python x.py build
|
|
||||||
```
|
|
||||||
|
|
||||||
Currently rustbuild only works with some known versions of Visual Studio. If you
|
|
||||||
have a more recent version installed that a part of rustbuild doesn't understand
|
|
||||||
then you may need to force rustbuild to use an older version. This can be done
|
then you may need to force rustbuild to use an older version. This can be done
|
||||||
by manually calling the appropriate vcvars file before running the bootstrap.
|
by manually calling the appropriate vcvars file before running the bootstrap.
|
||||||
|
|
||||||
|
@ -149,16 +139,6 @@ $ ./configure
|
||||||
$ make docs
|
$ make docs
|
||||||
```
|
```
|
||||||
|
|
||||||
Building the documentation requires building the compiler, so the above
|
|
||||||
details will apply. Once you have the compiler built, you can
|
|
||||||
|
|
||||||
```sh
|
|
||||||
$ make docs NO_REBUILD=1
|
|
||||||
```
|
|
||||||
|
|
||||||
To make sure you don’t re-build the compiler because you made a change
|
|
||||||
to some documentation.
|
|
||||||
|
|
||||||
The generated documentation will appear in a top-level `doc` directory,
|
The generated documentation will appear in a top-level `doc` directory,
|
||||||
created by the `make` rule.
|
created by the `make` rule.
|
||||||
|
|
||||||
|
|
56
appveyor.yml
56
appveyor.yml
|
@ -2,25 +2,22 @@ environment:
|
||||||
matrix:
|
matrix:
|
||||||
# 32/64 bit MSVC
|
# 32/64 bit MSVC
|
||||||
- MSYS_BITS: 64
|
- MSYS_BITS: 64
|
||||||
TARGET: x86_64-pc-windows-msvc
|
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc
|
||||||
CHECK: check
|
RUST_CHECK_TARGET: check
|
||||||
CONFIGURE_ARGS: --enable-llvm-assertions --enable-debug-assertions
|
|
||||||
- MSYS_BITS: 32
|
- MSYS_BITS: 32
|
||||||
TARGET: i686-pc-windows-msvc
|
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-msvc
|
||||||
CHECK: check
|
RUST_CHECK_TARGET: check
|
||||||
CONFIGURE_ARGS: --enable-llvm-assertions --enable-debug-assertions
|
|
||||||
|
|
||||||
# MSVC rustbuild
|
# MSVC makefiles
|
||||||
- MSYS_BITS: 64
|
- MSYS_BITS: 64
|
||||||
CONFIGURE_ARGS: --enable-rustbuild --enable-llvm-assertions --enable-debug-assertions
|
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --disable-rustbuild
|
||||||
TARGET: x86_64-pc-windows-msvc
|
RUST_CHECK_TARGET: check
|
||||||
CHECK: check
|
|
||||||
|
|
||||||
# MSVC cargotest
|
# MSVC cargotest
|
||||||
- MSYS_BITS: 64
|
- MSYS_BITS: 64
|
||||||
CONFIGURE_ARGS: --enable-rustbuild --enable-llvm-assertions --enable-debug-assertions
|
NO_VENDOR: 1
|
||||||
TARGET: x86_64-pc-windows-msvc
|
RUST_CHECK_TARGET: check-cargotest
|
||||||
CHECK: check-cargotest
|
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc
|
||||||
|
|
||||||
# 32/64-bit MinGW builds.
|
# 32/64-bit MinGW builds.
|
||||||
#
|
#
|
||||||
|
@ -47,24 +44,22 @@ environment:
|
||||||
# *not* use debug assertions and llvm assertions. This is because they take
|
# *not* use debug assertions and llvm assertions. This is because they take
|
||||||
# too long on appveyor and this is tested by rustbuild below.
|
# too long on appveyor and this is tested by rustbuild below.
|
||||||
- MSYS_BITS: 32
|
- MSYS_BITS: 32
|
||||||
TARGET: i686-pc-windows-gnu
|
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu
|
||||||
CHECK: check
|
RUST_CHECK_TARGET: check
|
||||||
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci
|
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci
|
||||||
MINGW_ARCHIVE: i686-4.9.2-release-win32-dwarf-rt_v4-rev4.7z
|
MINGW_ARCHIVE: i686-4.9.2-release-win32-dwarf-rt_v4-rev4.7z
|
||||||
MINGW_DIR: mingw32
|
MINGW_DIR: mingw32
|
||||||
|
|
||||||
- MSYS_BITS: 32
|
- MSYS_BITS: 32
|
||||||
CONFIGURE_ARGS: --enable-rustbuild --enable-llvm-assertions --enable-debug-assertions
|
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu --disable-rustbuild
|
||||||
TARGET: i686-pc-windows-gnu
|
RUST_CHECK_TARGET: check
|
||||||
CHECK: check
|
|
||||||
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci
|
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci
|
||||||
MINGW_ARCHIVE: i686-4.9.2-release-win32-dwarf-rt_v4-rev4.7z
|
MINGW_ARCHIVE: i686-4.9.2-release-win32-dwarf-rt_v4-rev4.7z
|
||||||
MINGW_DIR: mingw32
|
MINGW_DIR: mingw32
|
||||||
|
|
||||||
- MSYS_BITS: 64
|
- MSYS_BITS: 64
|
||||||
CONFIGURE_ARGS: --enable-llvm-assertions --enable-debug-assertions
|
RUST_CHECK_TARGET: check
|
||||||
TARGET: x86_64-pc-windows-gnu
|
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu
|
||||||
CHECK: check
|
|
||||||
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci
|
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci
|
||||||
MINGW_ARCHIVE: x86_64-4.9.2-release-win32-seh-rt_v4-rev4.7z
|
MINGW_ARCHIVE: x86_64-4.9.2-release-win32-seh-rt_v4-rev4.7z
|
||||||
MINGW_DIR: mingw64
|
MINGW_DIR: mingw64
|
||||||
|
@ -90,15 +85,20 @@ install:
|
||||||
- if NOT defined MINGW_URL set PATH=C:\msys64\mingw%MSYS_BITS%\bin;C:\msys64\usr\bin;%PATH%
|
- if NOT defined MINGW_URL set PATH=C:\msys64\mingw%MSYS_BITS%\bin;C:\msys64\usr\bin;%PATH%
|
||||||
|
|
||||||
test_script:
|
test_script:
|
||||||
- sh ./configure
|
- git submodule update --init
|
||||||
%CONFIGURE_ARGS%
|
- set SRC=.
|
||||||
--build=%TARGET%
|
- set NO_CCACHE=1
|
||||||
- bash -c "make -j$(nproc)"
|
- sh src/ci/run.sh
|
||||||
- bash -c "make %CHECK% -j$(nproc)"
|
|
||||||
|
|
||||||
cache:
|
cache:
|
||||||
- build/%TARGET%/llvm -> src/rustllvm/llvm-auto-clean-trigger
|
- "build/i686-pc-windows-gnu/llvm -> src/rustllvm/llvm-auto-clean-trigger"
|
||||||
- "%TARGET%/llvm -> src/rustllvm/llvm-auto-clean-trigger"
|
- "build/x86_64-pc-windows-gnu/llvm -> src/rustllvm/llvm-auto-clean-trigger"
|
||||||
|
- "build/i686-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger"
|
||||||
|
- "build/x86_64-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger"
|
||||||
|
- "i686-pc-windows-gnu/llvm -> src/rustllvm/llvm-auto-clean-trigger"
|
||||||
|
- "x86_64-pc-windows-gnu/llvm -> src/rustllvm/llvm-auto-clean-trigger"
|
||||||
|
- "i686-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger"
|
||||||
|
- "x86_64-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger"
|
||||||
|
|
||||||
branches:
|
branches:
|
||||||
only:
|
only:
|
||||||
|
|
|
@ -631,7 +631,7 @@ opt stage0-landing-pads 1 "enable landing pads during bootstrap with stage0"
|
||||||
opt dist-host-only 0 "only install bins for the host architecture"
|
opt dist-host-only 0 "only install bins for the host architecture"
|
||||||
opt inject-std-version 1 "inject the current compiler version of libstd into programs"
|
opt inject-std-version 1 "inject the current compiler version of libstd into programs"
|
||||||
opt llvm-version-check 1 "check if the LLVM version is supported, build anyway"
|
opt llvm-version-check 1 "check if the LLVM version is supported, build anyway"
|
||||||
opt rustbuild 0 "use the rust and cargo based build system"
|
opt rustbuild 1 "use the rust and cargo based build system"
|
||||||
opt codegen-tests 1 "run the src/test/codegen tests"
|
opt codegen-tests 1 "run the src/test/codegen tests"
|
||||||
opt option-checking 1 "complain about unrecognized options in this configure script"
|
opt option-checking 1 "complain about unrecognized options in this configure script"
|
||||||
opt ninja 0 "build LLVM using the Ninja generator (for MSVC, requires building in the correct environment)"
|
opt ninja 0 "build LLVM using the Ninja generator (for MSVC, requires building in the correct environment)"
|
||||||
|
@ -664,11 +664,11 @@ valopt armv7-linux-androideabi-ndk "" "armv7-linux-androideabi NDK standalone pa
|
||||||
valopt aarch64-linux-android-ndk "" "aarch64-linux-android NDK standalone path"
|
valopt aarch64-linux-android-ndk "" "aarch64-linux-android NDK standalone path"
|
||||||
valopt nacl-cross-path "" "NaCl SDK path (Pepper Canary is recommended). Must be absolute!"
|
valopt nacl-cross-path "" "NaCl SDK path (Pepper Canary is recommended). Must be absolute!"
|
||||||
valopt musl-root "/usr/local" "MUSL root installation directory (deprecated)"
|
valopt musl-root "/usr/local" "MUSL root installation directory (deprecated)"
|
||||||
valopt musl-root-x86_64 "/usr/local" "x86_64-unknown-linux-musl install directory"
|
valopt musl-root-x86_64 "" "x86_64-unknown-linux-musl install directory"
|
||||||
valopt musl-root-i686 "/usr/local" "i686-unknown-linux-musl install directory"
|
valopt musl-root-i686 "" "i686-unknown-linux-musl install directory"
|
||||||
valopt musl-root-arm "/usr/local" "arm-unknown-linux-musleabi install directory"
|
valopt musl-root-arm "" "arm-unknown-linux-musleabi install directory"
|
||||||
valopt musl-root-armhf "/usr/local" "arm-unknown-linux-musleabihf install directory"
|
valopt musl-root-armhf "" "arm-unknown-linux-musleabihf install directory"
|
||||||
valopt musl-root-armv7 "/usr/local" "armv7-unknown-linux-musleabihf install directory"
|
valopt musl-root-armv7 "" "armv7-unknown-linux-musleabihf install directory"
|
||||||
valopt extra-filename "" "Additional data that is hashed and passed to the -C extra-filename flag"
|
valopt extra-filename "" "Additional data that is hashed and passed to the -C extra-filename flag"
|
||||||
|
|
||||||
if [ -e ${CFG_SRC_DIR}.git ]
|
if [ -e ${CFG_SRC_DIR}.git ]
|
||||||
|
@ -848,7 +848,10 @@ then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# For building LLVM
|
# For building LLVM
|
||||||
|
if [ -z "$CFG_LLVM_ROOT" ]
|
||||||
|
then
|
||||||
probe_need CFG_CMAKE cmake
|
probe_need CFG_CMAKE cmake
|
||||||
|
fi
|
||||||
|
|
||||||
# On MacOS X, invoking `javac` pops up a dialog if the JDK is not
|
# On MacOS X, invoking `javac` pops up a dialog if the JDK is not
|
||||||
# installed. Since `javac` is only used if `antlr4` is available,
|
# installed. Since `javac` is only used if `antlr4` is available,
|
||||||
|
@ -1371,7 +1374,7 @@ then
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -z "$CFG_ENABLE_RUSTBUILD" ]; then
|
if [ -n "$CFG_DISABLE_RUSTBUILD" ]; then
|
||||||
|
|
||||||
step_msg "making directories"
|
step_msg "making directories"
|
||||||
|
|
||||||
|
@ -1471,7 +1474,7 @@ fi
|
||||||
step_msg "configuring submodules"
|
step_msg "configuring submodules"
|
||||||
|
|
||||||
# Have to be in the top of src directory for this
|
# Have to be in the top of src directory for this
|
||||||
if [ -z $CFG_DISABLE_MANAGE_SUBMODULES ] && [ -z $CFG_ENABLE_RUSTBUILD ]
|
if [ -z "$CFG_DISABLE_MANAGE_SUBMODULES" ] && [ -n "$CFG_DISABLE_RUSTBUILD" ]
|
||||||
then
|
then
|
||||||
cd ${CFG_SRC_DIR}
|
cd ${CFG_SRC_DIR}
|
||||||
|
|
||||||
|
@ -1543,11 +1546,11 @@ do
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
if [ -n "$CFG_ENABLE_RUSTBUILD" ]
|
if [ -z "$CFG_DISABLE_RUSTBUILD" ]
|
||||||
then
|
then
|
||||||
msg "not configuring LLVM, rustbuild in use"
|
msg "not configuring LLVM, rustbuild in use"
|
||||||
do_reconfigure=0
|
do_reconfigure=0
|
||||||
elif [ -z $CFG_LLVM_ROOT ]
|
elif [ -z "$CFG_LLVM_ROOT" ]
|
||||||
then
|
then
|
||||||
LLVM_BUILD_DIR=${CFG_BUILD_DIR}$t/llvm
|
LLVM_BUILD_DIR=${CFG_BUILD_DIR}$t/llvm
|
||||||
LLVM_INST_DIR=$LLVM_BUILD_DIR
|
LLVM_INST_DIR=$LLVM_BUILD_DIR
|
||||||
|
@ -1868,7 +1871,7 @@ do
|
||||||
putvar $CFG_LLVM_INST_DIR
|
putvar $CFG_LLVM_INST_DIR
|
||||||
done
|
done
|
||||||
|
|
||||||
if [ -n "$CFG_ENABLE_RUSTBUILD" ]
|
if [ -z "$CFG_DISABLE_RUSTBUILD" ]
|
||||||
then
|
then
|
||||||
INPUT_MAKEFILE=src/bootstrap/mk/Makefile.in
|
INPUT_MAKEFILE=src/bootstrap/mk/Makefile.in
|
||||||
else
|
else
|
||||||
|
@ -1887,5 +1890,28 @@ else
|
||||||
step_msg "complete"
|
step_msg "complete"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [ "$CFG_SRC_DIR" = `pwd` ]; then
|
||||||
|
X_PY=x.py
|
||||||
|
else
|
||||||
|
X_PY=${CFG_SRC_DIR_RELATIVE}x.py
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$CFG_DISABLE_RUSTBUILD" ]; then
|
||||||
|
msg "NOTE you have now configured rust to use a rewritten build system"
|
||||||
|
msg " called rustbuild, and as a result this may have bugs that "
|
||||||
|
msg " you did not see before. If you experience any issues you can"
|
||||||
|
msg " go back to the old build system with --disable-rustbuild and"
|
||||||
|
msg " please feel free to report any bugs!"
|
||||||
|
msg ""
|
||||||
|
msg "run \`python ${X_PY} --help\`"
|
||||||
|
else
|
||||||
|
warn "the makefile-based build system is deprecated in favor of rustbuild"
|
||||||
|
msg ""
|
||||||
|
msg "It is recommended you avoid passing --disable-rustbuild to get your"
|
||||||
|
msg "build working as the makefiles will be deleted on 2017-02-02. If you"
|
||||||
|
msg "encounter bugs with rustbuild please file issues against rust-lang/rust"
|
||||||
|
msg ""
|
||||||
msg "run \`make help\`"
|
msg "run \`make help\`"
|
||||||
|
fi
|
||||||
|
|
||||||
msg
|
msg
|
||||||
|
|
|
@ -0,0 +1,24 @@
|
||||||
|
# i686-unknown-openbsd configuration
|
||||||
|
CC_i686-unknown-openbsd=$(CC)
|
||||||
|
CXX_i686-unknown-openbsd=$(CXX)
|
||||||
|
CPP_i686-unknown-openbsd=$(CPP)
|
||||||
|
AR_i686-unknown-openbsd=$(AR)
|
||||||
|
CFG_LIB_NAME_i686-unknown-openbsd=lib$(1).so
|
||||||
|
CFG_STATIC_LIB_NAME_i686-unknown-openbsd=lib$(1).a
|
||||||
|
CFG_LIB_GLOB_i686-unknown-openbsd=lib$(1)-*.so
|
||||||
|
CFG_LIB_DSYM_GLOB_i686-unknown-openbsd=$(1)-*.dylib.dSYM
|
||||||
|
CFG_JEMALLOC_CFLAGS_i686-unknown-openbsd := -m32 -I/usr/include $(CFLAGS)
|
||||||
|
CFG_GCCISH_CFLAGS_i686-unknown-openbsd := -g -fPIC -m32 -I/usr/include $(CFLAGS)
|
||||||
|
CFG_GCCISH_LINK_FLAGS_i686-unknown-openbsd := -shared -fPIC -g -pthread -m32
|
||||||
|
CFG_GCCISH_DEF_FLAG_i686-unknown-openbsd := -Wl,--export-dynamic,--dynamic-list=
|
||||||
|
CFG_LLC_FLAGS_i686-unknown-openbsd :=
|
||||||
|
CFG_INSTALL_NAME_i686-unknown-openbsd =
|
||||||
|
CFG_EXE_SUFFIX_i686-unknown-openbsd :=
|
||||||
|
CFG_WINDOWSY_i686-unknown-openbsd :=
|
||||||
|
CFG_UNIXY_i686-unknown-openbsd := 1
|
||||||
|
CFG_LDPATH_i686-unknown-openbsd :=
|
||||||
|
CFG_RUN_i686-unknown-openbsd=$(2)
|
||||||
|
CFG_RUN_TARG_i686-unknown-openbsd=$(call CFG_RUN_i686-unknown-openbsd,,$(2))
|
||||||
|
CFG_GNU_TRIPLE_i686-unknown-openbsd := i686-unknown-openbsd
|
||||||
|
RUSTC_FLAGS_i686-unknown-openbsd=-C linker=$(call FIND_COMPILER,$(CC))
|
||||||
|
CFG_DISABLE_JEMALLOC_i686-unknown-openbsd := 1
|
24
mk/crates.mk
24
mk/crates.mk
|
@ -52,7 +52,7 @@
|
||||||
TARGET_CRATES := libc std term \
|
TARGET_CRATES := libc std term \
|
||||||
getopts collections test rand \
|
getopts collections test rand \
|
||||||
compiler_builtins core alloc \
|
compiler_builtins core alloc \
|
||||||
rustc_unicode rustc_bitflags \
|
std_unicode rustc_bitflags \
|
||||||
alloc_system alloc_jemalloc \
|
alloc_system alloc_jemalloc \
|
||||||
panic_abort panic_unwind unwind
|
panic_abort panic_unwind unwind
|
||||||
RUSTC_CRATES := rustc rustc_typeck rustc_mir rustc_borrowck rustc_resolve rustc_driver \
|
RUSTC_CRATES := rustc rustc_typeck rustc_mir rustc_borrowck rustc_resolve rustc_driver \
|
||||||
|
@ -65,27 +65,23 @@ HOST_CRATES := syntax syntax_ext proc_macro_tokens proc_macro_plugin syntax_pos
|
||||||
TOOLS := compiletest rustdoc rustc rustbook error_index_generator
|
TOOLS := compiletest rustdoc rustc rustbook error_index_generator
|
||||||
|
|
||||||
DEPS_core :=
|
DEPS_core :=
|
||||||
DEPS_compiler_builtins := core
|
DEPS_compiler_builtins := core native:compiler-rt
|
||||||
DEPS_alloc := core libc alloc_system
|
DEPS_alloc := core libc alloc_system
|
||||||
DEPS_alloc_system := core libc
|
DEPS_alloc_system := core libc
|
||||||
DEPS_alloc_jemalloc := core libc native:jemalloc
|
DEPS_alloc_jemalloc := core libc native:jemalloc
|
||||||
DEPS_collections := core alloc rustc_unicode
|
DEPS_collections := core alloc std_unicode
|
||||||
DEPS_libc := core
|
DEPS_libc := core
|
||||||
DEPS_rand := core
|
DEPS_rand := core
|
||||||
DEPS_rustc_bitflags := core
|
DEPS_rustc_bitflags := core
|
||||||
DEPS_rustc_unicode := core
|
DEPS_std_unicode := core
|
||||||
DEPS_panic_abort := libc alloc
|
DEPS_panic_abort := libc alloc
|
||||||
DEPS_panic_unwind := libc alloc unwind
|
DEPS_panic_unwind := libc alloc unwind
|
||||||
DEPS_unwind := libc
|
DEPS_unwind := libc
|
||||||
|
|
||||||
RUSTFLAGS_compiler_builtins := -lstatic=compiler-rt
|
RUSTFLAGS_compiler_builtins := -lstatic=compiler-rt
|
||||||
|
RUSTFLAGS_panic_abort := -C panic=abort
|
||||||
|
|
||||||
# FIXME(stage0): change this to just `RUSTFLAGS_panic_abort := ...`
|
DEPS_std := core libc rand alloc collections compiler_builtins std_unicode \
|
||||||
RUSTFLAGS1_panic_abort := -C panic=abort
|
|
||||||
RUSTFLAGS2_panic_abort := -C panic=abort
|
|
||||||
RUSTFLAGS3_panic_abort := -C panic=abort
|
|
||||||
|
|
||||||
DEPS_std := core libc rand alloc collections compiler_builtins rustc_unicode \
|
|
||||||
native:backtrace \
|
native:backtrace \
|
||||||
alloc_system panic_abort panic_unwind unwind
|
alloc_system panic_abort panic_unwind unwind
|
||||||
DEPS_arena := std
|
DEPS_arena := std
|
||||||
|
@ -100,7 +96,7 @@ DEPS_serialize := std log
|
||||||
DEPS_term := std
|
DEPS_term := std
|
||||||
DEPS_test := std getopts term native:rust_test_helpers
|
DEPS_test := std getopts term native:rust_test_helpers
|
||||||
|
|
||||||
DEPS_syntax := std term serialize log arena libc rustc_bitflags rustc_unicode rustc_errors syntax_pos rustc_data_structures
|
DEPS_syntax := std term serialize log arena libc rustc_bitflags std_unicode rustc_errors syntax_pos rustc_data_structures
|
||||||
DEPS_syntax_ext := syntax syntax_pos rustc_errors fmt_macros proc_macro
|
DEPS_syntax_ext := syntax syntax_pos rustc_errors fmt_macros proc_macro
|
||||||
DEPS_syntax_pos := serialize
|
DEPS_syntax_pos := serialize
|
||||||
DEPS_proc_macro_tokens := syntax syntax_pos log
|
DEPS_proc_macro_tokens := syntax syntax_pos log
|
||||||
|
@ -140,7 +136,7 @@ DEPS_rustc_trans := arena flate getopts graphviz libc rustc rustc_back \
|
||||||
DEPS_rustc_incremental := rustc syntax_pos serialize rustc_data_structures
|
DEPS_rustc_incremental := rustc syntax_pos serialize rustc_data_structures
|
||||||
DEPS_rustc_save_analysis := rustc log syntax syntax_pos serialize
|
DEPS_rustc_save_analysis := rustc log syntax syntax_pos serialize
|
||||||
DEPS_rustc_typeck := rustc syntax syntax_pos rustc_platform_intrinsics rustc_const_math \
|
DEPS_rustc_typeck := rustc syntax syntax_pos rustc_platform_intrinsics rustc_const_math \
|
||||||
rustc_const_eval rustc_errors
|
rustc_const_eval rustc_errors rustc_data_structures
|
||||||
|
|
||||||
DEPS_rustdoc := rustc rustc_driver native:hoedown serialize getopts test \
|
DEPS_rustdoc := rustc rustc_driver native:hoedown serialize getopts test \
|
||||||
rustc_lint rustc_const_eval syntax_pos rustc_data_structures
|
rustc_lint rustc_const_eval syntax_pos rustc_data_structures
|
||||||
|
@ -162,7 +158,7 @@ ONLY_RLIB_libc := 1
|
||||||
ONLY_RLIB_alloc := 1
|
ONLY_RLIB_alloc := 1
|
||||||
ONLY_RLIB_rand := 1
|
ONLY_RLIB_rand := 1
|
||||||
ONLY_RLIB_collections := 1
|
ONLY_RLIB_collections := 1
|
||||||
ONLY_RLIB_rustc_unicode := 1
|
ONLY_RLIB_std_unicode := 1
|
||||||
ONLY_RLIB_rustc_bitflags := 1
|
ONLY_RLIB_rustc_bitflags := 1
|
||||||
ONLY_RLIB_alloc_system := 1
|
ONLY_RLIB_alloc_system := 1
|
||||||
ONLY_RLIB_alloc_jemalloc := 1
|
ONLY_RLIB_alloc_jemalloc := 1
|
||||||
|
@ -173,7 +169,7 @@ ONLY_RLIB_unwind := 1
|
||||||
TARGET_SPECIFIC_alloc_jemalloc := 1
|
TARGET_SPECIFIC_alloc_jemalloc := 1
|
||||||
|
|
||||||
# Documented-by-default crates
|
# Documented-by-default crates
|
||||||
DOC_CRATES := std alloc collections core libc rustc_unicode
|
DOC_CRATES := std alloc collections core libc std_unicode
|
||||||
|
|
||||||
ifeq ($(CFG_DISABLE_JEMALLOC),)
|
ifeq ($(CFG_DISABLE_JEMALLOC),)
|
||||||
RUSTFLAGS_rustc_back := --cfg 'feature="jemalloc"'
|
RUSTFLAGS_rustc_back := --cfg 'feature="jemalloc"'
|
||||||
|
|
10
mk/main.mk
10
mk/main.mk
|
@ -372,15 +372,12 @@ CFG_INFO := $(info cfg: disabling unstable features (CFG_DISABLE_UNSTABLE_FEATUR
|
||||||
# Turn on feature-staging
|
# Turn on feature-staging
|
||||||
export CFG_DISABLE_UNSTABLE_FEATURES
|
export CFG_DISABLE_UNSTABLE_FEATURES
|
||||||
# Subvert unstable feature lints to do the self-build
|
# Subvert unstable feature lints to do the self-build
|
||||||
export RUSTC_BOOTSTRAP=1
|
|
||||||
endif
|
endif
|
||||||
ifdef CFG_MUSL_ROOT
|
ifdef CFG_MUSL_ROOT
|
||||||
export CFG_MUSL_ROOT
|
export CFG_MUSL_ROOT
|
||||||
endif
|
endif
|
||||||
|
|
||||||
# FIXME: Transitionary measure to bootstrap using the old bootstrap logic.
|
export RUSTC_BOOTSTRAP := 1
|
||||||
# Remove this once the bootstrap compiler uses the new login in Issue #36548.
|
|
||||||
export RUSTC_BOOTSTRAP_KEY=62b3e239
|
|
||||||
|
|
||||||
######################################################################
|
######################################################################
|
||||||
# Per-stage targets and runner
|
# Per-stage targets and runner
|
||||||
|
@ -443,10 +440,7 @@ endif
|
||||||
TSREQ$(1)_T_$(2)_H_$(3) = \
|
TSREQ$(1)_T_$(2)_H_$(3) = \
|
||||||
$$(HSREQ$(1)_H_$(3)) \
|
$$(HSREQ$(1)_H_$(3)) \
|
||||||
$$(foreach obj,$$(REQUIRED_OBJECTS_$(2)),\
|
$$(foreach obj,$$(REQUIRED_OBJECTS_$(2)),\
|
||||||
$$(TLIB$(1)_T_$(2)_H_$(3))/$$(obj)) \
|
$$(TLIB$(1)_T_$(2)_H_$(3))/$$(obj))
|
||||||
$$(TLIB0_T_$(2)_H_$(3))/$$(call CFG_STATIC_LIB_NAME_$(2),compiler-rt)
|
|
||||||
# ^ This copies `libcompiler-rt.a` to the stage0 sysroot
|
|
||||||
# ^ TODO(stage0) update this to not copy `libcompiler-rt.a` to stage0
|
|
||||||
|
|
||||||
# Prerequisites for a working stageN compiler and libraries, for a specific
|
# Prerequisites for a working stageN compiler and libraries, for a specific
|
||||||
# target
|
# target
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
|
|
||||||
# The names of crates that must be tested
|
# The names of crates that must be tested
|
||||||
|
|
||||||
# libcore/librustc_unicode tests are in a separate crate
|
# libcore/libstd_unicode tests are in a separate crate
|
||||||
DEPS_coretest :=
|
DEPS_coretest :=
|
||||||
$(eval $(call RUST_CRATE,coretest))
|
$(eval $(call RUST_CRATE,coretest))
|
||||||
|
|
||||||
|
|
|
@ -45,7 +45,6 @@ dependencies = [
|
||||||
"gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)",
|
"gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
"getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"md5 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
"num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
|
"toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -73,7 +72,7 @@ version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloc 0.0.0",
|
"alloc 0.0.0",
|
||||||
"core 0.0.0",
|
"core 0.0.0",
|
||||||
"rustc_unicode 0.0.0",
|
"std_unicode 0.0.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -172,11 +171,6 @@ name = "log"
|
||||||
version = "0.3.6"
|
version = "0.3.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "md5"
|
|
||||||
version = "0.1.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "num_cpus"
|
name = "num_cpus"
|
||||||
version = "0.2.13"
|
version = "0.2.13"
|
||||||
|
@ -409,6 +403,7 @@ version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"build_helper 0.1.0",
|
"build_helper 0.1.0",
|
||||||
"gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)",
|
"gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"rustc_bitflags 0.0.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -550,13 +545,6 @@ dependencies = [
|
||||||
"syntax_pos 0.0.0",
|
"syntax_pos 0.0.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rustc_unicode"
|
|
||||||
version = "0.0.0"
|
|
||||||
dependencies = [
|
|
||||||
"core 0.0.0",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustdoc"
|
name = "rustdoc"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
|
@ -604,7 +592,7 @@ dependencies = [
|
||||||
"panic_abort 0.0.0",
|
"panic_abort 0.0.0",
|
||||||
"panic_unwind 0.0.0",
|
"panic_unwind 0.0.0",
|
||||||
"rand 0.0.0",
|
"rand 0.0.0",
|
||||||
"rustc_unicode 0.0.0",
|
"std_unicode 0.0.0",
|
||||||
"unwind 0.0.0",
|
"unwind 0.0.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -616,6 +604,13 @@ dependencies = [
|
||||||
"std 0.0.0",
|
"std 0.0.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "std_unicode"
|
||||||
|
version = "0.0.0"
|
||||||
|
dependencies = [
|
||||||
|
"core 0.0.0",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syntax"
|
name = "syntax"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
|
@ -686,7 +681,6 @@ dependencies = [
|
||||||
"checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685"
|
"checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685"
|
||||||
"checksum libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)" = "044d1360593a78f5c8e5e710beccdc24ab71d1f01bc19a29bcacdba22e8475d8"
|
"checksum libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)" = "044d1360593a78f5c8e5e710beccdc24ab71d1f01bc19a29bcacdba22e8475d8"
|
||||||
"checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054"
|
"checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054"
|
||||||
"checksum md5 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a5539a8dee9b4ae308c9c406a379838b435a8f2c84cf9fedc6d5a576be9888db"
|
|
||||||
"checksum num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "cee7e88156f3f9e19bdd598f8d6c9db7bf4078f99f8381f43a55b09648d1a6e3"
|
"checksum num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "cee7e88156f3f9e19bdd598f8d6c9db7bf4078f99f8381f43a55b09648d1a6e3"
|
||||||
"checksum rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)" = "6159e4e6e559c81bd706afe9c8fd68f547d3e851ce12e76b1de7914bab61691b"
|
"checksum rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)" = "6159e4e6e559c81bd706afe9c8fd68f547d3e851ce12e76b1de7914bab61691b"
|
||||||
"checksum toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)" = "0590d72182e50e879c4da3b11c6488dae18fccb1ae0c7a3eda18e16795844796"
|
"checksum toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)" = "0590d72182e50e879c4da3b11c6488dae18fccb1ae0c7a3eda18e16795844796"
|
||||||
|
|
|
@ -11,3 +11,20 @@ members = [
|
||||||
"tools/rustbook",
|
"tools/rustbook",
|
||||||
"tools/tidy",
|
"tools/tidy",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# Curiously, compiletest will segfault if compiled with opt-level=3 on 64-bit
|
||||||
|
# MSVC when running the compile-fail test suite when a should-fail test panics.
|
||||||
|
# But hey if this is removed and it gets past the bots, sounds good to me.
|
||||||
|
[profile.release]
|
||||||
|
opt-level = 2
|
||||||
|
[profile.bench]
|
||||||
|
opt-level = 2
|
||||||
|
|
||||||
|
# These options are controlled from our rustc wrapper script, so turn them off
|
||||||
|
# here and have them controlled elsewhere.
|
||||||
|
[profile.dev]
|
||||||
|
debug = false
|
||||||
|
debug-assertions = false
|
||||||
|
[profile.test]
|
||||||
|
debug = false
|
||||||
|
debug-assertions = false
|
||||||
|
|
|
@ -29,4 +29,3 @@ getopts = "0.2"
|
||||||
rustc-serialize = "0.3"
|
rustc-serialize = "0.3"
|
||||||
gcc = "0.3.38"
|
gcc = "0.3.38"
|
||||||
libc = "0.2"
|
libc = "0.2"
|
||||||
md5 = "0.1"
|
|
||||||
|
|
|
@ -32,7 +32,7 @@ The script accepts commands, flags, and filters to determine what to do:
|
||||||
# build the whole compiler
|
# build the whole compiler
|
||||||
./x.py build
|
./x.py build
|
||||||
|
|
||||||
# build the stage1 compier
|
# build the stage1 compiler
|
||||||
./x.py build --stage 1
|
./x.py build --stage 1
|
||||||
|
|
||||||
# build stage0 libstd
|
# build stage0 libstd
|
||||||
|
@ -66,17 +66,6 @@ The script accepts commands, flags, and filters to determine what to do:
|
||||||
* `doc` - a command for building documentation. Like above can take arguments
|
* `doc` - a command for building documentation. Like above can take arguments
|
||||||
for what to document.
|
for what to document.
|
||||||
|
|
||||||
If you're more used to `./configure` and `make`, however, then you can also
|
|
||||||
configure the build system to use rustbuild instead of the old makefiles:
|
|
||||||
|
|
||||||
```
|
|
||||||
./configure --enable-rustbuild
|
|
||||||
make
|
|
||||||
```
|
|
||||||
|
|
||||||
Afterwards the `Makefile` which is generated will have a few commands like
|
|
||||||
`make check`, `make tidy`, etc.
|
|
||||||
|
|
||||||
## Configuring rustbuild
|
## Configuring rustbuild
|
||||||
|
|
||||||
There are currently two primary methods for configuring the rustbuild build
|
There are currently two primary methods for configuring the rustbuild build
|
||||||
|
@ -90,6 +79,13 @@ be found at `src/bootstrap/config.toml.example`, and the configuration file
|
||||||
can also be passed as `--config path/to/config.toml` if the build system is
|
can also be passed as `--config path/to/config.toml` if the build system is
|
||||||
being invoked manually (via the python script).
|
being invoked manually (via the python script).
|
||||||
|
|
||||||
|
Finally, rustbuild makes use of the [gcc-rs crate] which has [its own
|
||||||
|
method][env-vars] of configuring C compilers and C flags via environment
|
||||||
|
variables.
|
||||||
|
|
||||||
|
[gcc-rs crate]: https://github.com/alexcrichton/gcc-rs
|
||||||
|
[env-vars]: https://github.com/alexcrichton/gcc-rs#external-configuration-via-environment-variables
|
||||||
|
|
||||||
## Build stages
|
## Build stages
|
||||||
|
|
||||||
The rustbuild build system goes through a few phases to actually build the
|
The rustbuild build system goes through a few phases to actually build the
|
||||||
|
@ -273,16 +269,17 @@ After that, each module in rustbuild should have enough documentation to keep
|
||||||
you up and running. Some general areas that you may be interested in modifying
|
you up and running. Some general areas that you may be interested in modifying
|
||||||
are:
|
are:
|
||||||
|
|
||||||
* Adding a new build tool? Take a look at `build/step.rs` for examples of other
|
* Adding a new build tool? Take a look at `bootstrap/step.rs` for examples of
|
||||||
tools, as well as `build/mod.rs`.
|
other tools.
|
||||||
* Adding a new compiler crate? Look no further! Adding crates can be done by
|
* Adding a new compiler crate? Look no further! Adding crates can be done by
|
||||||
adding a new directory with `Cargo.toml` followed by configuring all
|
adding a new directory with `Cargo.toml` followed by configuring all
|
||||||
`Cargo.toml` files accordingly.
|
`Cargo.toml` files accordingly.
|
||||||
* Adding a new dependency from crates.io? We're still working on that, so hold
|
* Adding a new dependency from crates.io? We're still working on that, so hold
|
||||||
off on that for now.
|
off on that for now.
|
||||||
* Adding a new configuration option? Take a look at `build/config.rs` or perhaps
|
* Adding a new configuration option? Take a look at `bootstrap/config.rs` or
|
||||||
`build/flags.rs` and then modify the build elsewhere to read that option.
|
perhaps `bootstrap/flags.rs` and then modify the build elsewhere to read that
|
||||||
* Adding a sanity check? Take a look at `build/sanity.rs`.
|
option.
|
||||||
|
* Adding a sanity check? Take a look at `bootstrap/sanity.rs`.
|
||||||
|
|
||||||
If you have any questions feel free to reach out on `#rust-internals` on IRC or
|
If you have any questions feel free to reach out on `#rust-internals` on IRC or
|
||||||
open an issue in the bug tracker!
|
open an issue in the bug tracker!
|
||||||
|
|
|
@ -125,6 +125,11 @@ fn main() {
|
||||||
cmd.arg("-C").arg(format!("codegen-units={}", s));
|
cmd.arg("-C").arg(format!("codegen-units={}", s));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Emit save-analysis info.
|
||||||
|
if env::var("RUSTC_SAVE_ANALYSIS") == Ok("api".to_string()) {
|
||||||
|
cmd.arg("-Zsave-analysis-api");
|
||||||
|
}
|
||||||
|
|
||||||
// Dealing with rpath here is a little special, so let's go into some
|
// Dealing with rpath here is a little special, so let's go into some
|
||||||
// detail. First off, `-rpath` is a linker option on Unix platforms
|
// detail. First off, `-rpath` is a linker option on Unix platforms
|
||||||
// which adds to the runtime dynamic loader path when looking for
|
// which adds to the runtime dynamic loader path when looking for
|
||||||
|
|
|
@ -30,32 +30,37 @@ def get(url, path, verbose=False):
|
||||||
sha_path = sha_file.name
|
sha_path = sha_file.name
|
||||||
|
|
||||||
try:
|
try:
|
||||||
download(sha_path, sha_url, verbose)
|
download(sha_path, sha_url, False, verbose)
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
if verify(path, sha_path, False):
|
if verify(path, sha_path, False):
|
||||||
|
if verbose:
|
||||||
print("using already-download file " + path)
|
print("using already-download file " + path)
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
|
if verbose:
|
||||||
print("ignoring already-download file " + path + " due to failed verification")
|
print("ignoring already-download file " + path + " due to failed verification")
|
||||||
os.unlink(path)
|
os.unlink(path)
|
||||||
download(temp_path, url, verbose)
|
download(temp_path, url, True, verbose)
|
||||||
if not verify(temp_path, sha_path, True):
|
if not verify(temp_path, sha_path, verbose):
|
||||||
raise RuntimeError("failed verification")
|
raise RuntimeError("failed verification")
|
||||||
|
if verbose:
|
||||||
print("moving {} to {}".format(temp_path, path))
|
print("moving {} to {}".format(temp_path, path))
|
||||||
shutil.move(temp_path, path)
|
shutil.move(temp_path, path)
|
||||||
finally:
|
finally:
|
||||||
delete_if_present(sha_path)
|
delete_if_present(sha_path, verbose)
|
||||||
delete_if_present(temp_path)
|
delete_if_present(temp_path, verbose)
|
||||||
|
|
||||||
|
|
||||||
def delete_if_present(path):
|
def delete_if_present(path, verbose):
|
||||||
if os.path.isfile(path):
|
if os.path.isfile(path):
|
||||||
|
if verbose:
|
||||||
print("removing " + path)
|
print("removing " + path)
|
||||||
os.unlink(path)
|
os.unlink(path)
|
||||||
|
|
||||||
|
|
||||||
def download(path, url, verbose):
|
def download(path, url, probably_big, verbose):
|
||||||
print("downloading {} to {}".format(url, path))
|
if probably_big or verbose:
|
||||||
|
print("downloading {}".format(url))
|
||||||
# see http://serverfault.com/questions/301128/how-to-download
|
# see http://serverfault.com/questions/301128/how-to-download
|
||||||
if sys.platform == 'win32':
|
if sys.platform == 'win32':
|
||||||
run(["PowerShell.exe", "/nologo", "-Command",
|
run(["PowerShell.exe", "/nologo", "-Command",
|
||||||
|
@ -63,17 +68,22 @@ def download(path, url, verbose):
|
||||||
".DownloadFile('{}', '{}')".format(url, path)],
|
".DownloadFile('{}', '{}')".format(url, path)],
|
||||||
verbose=verbose)
|
verbose=verbose)
|
||||||
else:
|
else:
|
||||||
run(["curl", "-o", path, url], verbose=verbose)
|
if probably_big or verbose:
|
||||||
|
option = "-#"
|
||||||
|
else:
|
||||||
|
option = "-s"
|
||||||
|
run(["curl", option, "-Sf", "-o", path, url], verbose=verbose)
|
||||||
|
|
||||||
|
|
||||||
def verify(path, sha_path, verbose):
|
def verify(path, sha_path, verbose):
|
||||||
|
if verbose:
|
||||||
print("verifying " + path)
|
print("verifying " + path)
|
||||||
with open(path, "rb") as f:
|
with open(path, "rb") as f:
|
||||||
found = hashlib.sha256(f.read()).hexdigest()
|
found = hashlib.sha256(f.read()).hexdigest()
|
||||||
with open(sha_path, "r") as f:
|
with open(sha_path, "r") as f:
|
||||||
expected, _ = f.readline().split()
|
expected, _ = f.readline().split()
|
||||||
verified = found == expected
|
verified = found == expected
|
||||||
if not verified and verbose:
|
if not verified:
|
||||||
print("invalid checksum:\n"
|
print("invalid checksum:\n"
|
||||||
" found: {}\n"
|
" found: {}\n"
|
||||||
" expected: {}".format(found, expected))
|
" expected: {}".format(found, expected))
|
||||||
|
@ -144,6 +154,7 @@ class RustBuild(object):
|
||||||
|
|
||||||
if self.rustc().startswith(self.bin_root()) and \
|
if self.rustc().startswith(self.bin_root()) and \
|
||||||
(not os.path.exists(self.rustc()) or self.rustc_out_of_date()):
|
(not os.path.exists(self.rustc()) or self.rustc_out_of_date()):
|
||||||
|
self.print_what_it_means_to_bootstrap()
|
||||||
if os.path.exists(self.bin_root()):
|
if os.path.exists(self.bin_root()):
|
||||||
shutil.rmtree(self.bin_root())
|
shutil.rmtree(self.bin_root())
|
||||||
channel = self.stage0_rustc_channel()
|
channel = self.stage0_rustc_channel()
|
||||||
|
@ -167,6 +178,7 @@ class RustBuild(object):
|
||||||
|
|
||||||
if self.cargo().startswith(self.bin_root()) and \
|
if self.cargo().startswith(self.bin_root()) and \
|
||||||
(not os.path.exists(self.cargo()) or self.cargo_out_of_date()):
|
(not os.path.exists(self.cargo()) or self.cargo_out_of_date()):
|
||||||
|
self.print_what_it_means_to_bootstrap()
|
||||||
channel = self.stage0_cargo_channel()
|
channel = self.stage0_cargo_channel()
|
||||||
filename = "cargo-{}-{}.tar.gz".format(channel, self.build)
|
filename = "cargo-{}-{}.tar.gz".format(channel, self.build)
|
||||||
url = "https://static.rust-lang.org/cargo-dist/" + self.stage0_cargo_date()
|
url = "https://static.rust-lang.org/cargo-dist/" + self.stage0_cargo_date()
|
||||||
|
@ -251,7 +263,27 @@ class RustBuild(object):
|
||||||
else:
|
else:
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
|
def print_what_it_means_to_bootstrap(self):
|
||||||
|
if hasattr(self, 'printed'):
|
||||||
|
return
|
||||||
|
self.printed = True
|
||||||
|
if os.path.exists(self.bootstrap_binary()):
|
||||||
|
return
|
||||||
|
if not '--help' in sys.argv or len(sys.argv) == 1:
|
||||||
|
return
|
||||||
|
|
||||||
|
print('info: the build system for Rust is written in Rust, so this')
|
||||||
|
print(' script is now going to download a stage0 rust compiler')
|
||||||
|
print(' and then compile the build system itself')
|
||||||
|
print('')
|
||||||
|
print('info: in the meantime you can read more about rustbuild at')
|
||||||
|
print(' src/bootstrap/README.md before the download finishes')
|
||||||
|
|
||||||
|
def bootstrap_binary(self):
|
||||||
|
return os.path.join(self.build_dir, "bootstrap/debug/bootstrap")
|
||||||
|
|
||||||
def build_bootstrap(self):
|
def build_bootstrap(self):
|
||||||
|
self.print_what_it_means_to_bootstrap()
|
||||||
build_dir = os.path.join(self.build_dir, "bootstrap")
|
build_dir = os.path.join(self.build_dir, "bootstrap")
|
||||||
if self.clean and os.path.exists(build_dir):
|
if self.clean and os.path.exists(build_dir):
|
||||||
shutil.rmtree(build_dir)
|
shutil.rmtree(build_dir)
|
||||||
|
@ -408,10 +440,19 @@ def main():
|
||||||
rb.use_vendored_sources = '\nvendor = true' in rb.config_toml or \
|
rb.use_vendored_sources = '\nvendor = true' in rb.config_toml or \
|
||||||
'CFG_ENABLE_VENDOR' in rb.config_mk
|
'CFG_ENABLE_VENDOR' in rb.config_mk
|
||||||
|
|
||||||
|
if 'SUDO_USER' in os.environ:
|
||||||
|
if os.environ['USER'] != os.environ['SUDO_USER']:
|
||||||
|
rb.use_vendored_sources = True
|
||||||
|
print('info: looks like you are running this command under `sudo`')
|
||||||
|
print(' and so in order to preserve your $HOME this will now')
|
||||||
|
print(' use vendored sources by default. Note that if this')
|
||||||
|
print(' does not work you should run a normal build first')
|
||||||
|
print(' before running a command like `sudo make intall`')
|
||||||
|
|
||||||
if rb.use_vendored_sources:
|
if rb.use_vendored_sources:
|
||||||
if not os.path.exists('.cargo'):
|
if not os.path.exists('.cargo'):
|
||||||
os.makedirs('.cargo')
|
os.makedirs('.cargo')
|
||||||
f = open('.cargo/config','w')
|
with open('.cargo/config','w') as f:
|
||||||
f.write("""
|
f.write("""
|
||||||
[source.crates-io]
|
[source.crates-io]
|
||||||
replace-with = 'vendored-sources'
|
replace-with = 'vendored-sources'
|
||||||
|
@ -420,10 +461,10 @@ def main():
|
||||||
[source.vendored-sources]
|
[source.vendored-sources]
|
||||||
directory = '{}/src/vendor'
|
directory = '{}/src/vendor'
|
||||||
""".format(rb.rust_root))
|
""".format(rb.rust_root))
|
||||||
f.close()
|
|
||||||
else:
|
else:
|
||||||
if os.path.exists('.cargo'):
|
if os.path.exists('.cargo'):
|
||||||
shutil.rmtree('.cargo')
|
shutil.rmtree('.cargo')
|
||||||
|
|
||||||
data = stage0_data(rb.rust_root)
|
data = stage0_data(rb.rust_root)
|
||||||
rb._rustc_channel, rb._rustc_date = data['rustc'].split('-', 1)
|
rb._rustc_channel, rb._rustc_date = data['rustc'].split('-', 1)
|
||||||
rb._cargo_channel, rb._cargo_date = data['cargo'].split('-', 1)
|
rb._cargo_channel, rb._cargo_date = data['cargo'].split('-', 1)
|
||||||
|
@ -438,7 +479,7 @@ def main():
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
# Run the bootstrap
|
# Run the bootstrap
|
||||||
args = [os.path.join(rb.build_dir, "bootstrap/debug/bootstrap")]
|
args = [rb.bootstrap_binary()]
|
||||||
args.extend(sys.argv[1:])
|
args.extend(sys.argv[1:])
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
env["BUILD"] = rb.build
|
env["BUILD"] = rb.build
|
||||||
|
|
|
@ -51,7 +51,7 @@ pub fn find(build: &mut Build) {
|
||||||
if let Some(cc) = config.and_then(|c| c.cc.as_ref()) {
|
if let Some(cc) = config.and_then(|c| c.cc.as_ref()) {
|
||||||
cfg.compiler(cc);
|
cfg.compiler(cc);
|
||||||
} else {
|
} else {
|
||||||
set_compiler(&mut cfg, "gcc", target, config);
|
set_compiler(&mut cfg, "gcc", target, config, build);
|
||||||
}
|
}
|
||||||
|
|
||||||
let compiler = cfg.get_compiler();
|
let compiler = cfg.get_compiler();
|
||||||
|
@ -72,7 +72,7 @@ pub fn find(build: &mut Build) {
|
||||||
if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) {
|
if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) {
|
||||||
cfg.compiler(cxx);
|
cfg.compiler(cxx);
|
||||||
} else {
|
} else {
|
||||||
set_compiler(&mut cfg, "g++", host, config);
|
set_compiler(&mut cfg, "g++", host, config, build);
|
||||||
}
|
}
|
||||||
let compiler = cfg.get_compiler();
|
let compiler = cfg.get_compiler();
|
||||||
build.verbose(&format!("CXX_{} = {:?}", host, compiler.path()));
|
build.verbose(&format!("CXX_{} = {:?}", host, compiler.path()));
|
||||||
|
@ -83,7 +83,8 @@ pub fn find(build: &mut Build) {
|
||||||
fn set_compiler(cfg: &mut gcc::Config,
|
fn set_compiler(cfg: &mut gcc::Config,
|
||||||
gnu_compiler: &str,
|
gnu_compiler: &str,
|
||||||
target: &str,
|
target: &str,
|
||||||
config: Option<&Target>) {
|
config: Option<&Target>,
|
||||||
|
build: &Build) {
|
||||||
match target {
|
match target {
|
||||||
// When compiling for android we may have the NDK configured in the
|
// When compiling for android we may have the NDK configured in the
|
||||||
// config.toml in which case we look there. Otherwise the default
|
// config.toml in which case we look there. Otherwise the default
|
||||||
|
@ -119,6 +120,22 @@ fn set_compiler(cfg: &mut gcc::Config,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
"mips-unknown-linux-musl" => {
|
||||||
|
cfg.compiler("mips-linux-musl-gcc");
|
||||||
|
}
|
||||||
|
"mipsel-unknown-linux-musl" => {
|
||||||
|
cfg.compiler("mipsel-linux-musl-gcc");
|
||||||
|
}
|
||||||
|
|
||||||
|
t if t.contains("musl") => {
|
||||||
|
if let Some(root) = build.musl_root(target) {
|
||||||
|
let guess = root.join("bin/musl-gcc");
|
||||||
|
if guess.exists() {
|
||||||
|
cfg.compiler(guess);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,6 @@ use std::io::prelude::*;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
use build_helper::output;
|
use build_helper::output;
|
||||||
use md5;
|
|
||||||
|
|
||||||
use Build;
|
use Build;
|
||||||
|
|
||||||
|
@ -91,20 +90,4 @@ pub fn collect(build: &mut Build) {
|
||||||
build.ver_hash = Some(ver_hash);
|
build.ver_hash = Some(ver_hash);
|
||||||
build.short_ver_hash = Some(short_ver_hash);
|
build.short_ver_hash = Some(short_ver_hash);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Calculate this compiler's bootstrap key, which is currently defined as
|
|
||||||
// the first 8 characters of the md5 of the release string.
|
|
||||||
let key = md5::compute(build.release.as_bytes());
|
|
||||||
build.bootstrap_key = format!("{:02x}{:02x}{:02x}{:02x}",
|
|
||||||
key[0], key[1], key[2], key[3]);
|
|
||||||
|
|
||||||
// Slurp up the stage0 bootstrap key as we're bootstrapping from an
|
|
||||||
// otherwise stable compiler.
|
|
||||||
let mut s = String::new();
|
|
||||||
t!(t!(File::open(build.src.join("src/stage0.txt"))).read_to_string(&mut s));
|
|
||||||
if let Some(line) = s.lines().find(|l| l.starts_with("rustc_key")) {
|
|
||||||
if let Some(key) = line.split(": ").nth(1) {
|
|
||||||
build.bootstrap_key_stage0 = key.to_string();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,13 +8,14 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
//! Implementation of the various `check-*` targets of the build system.
|
//! Implementation of the test-related targets of the build system.
|
||||||
//!
|
//!
|
||||||
//! This file implements the various regression test suites that we execute on
|
//! This file implements the various regression test suites that we execute on
|
||||||
//! our CI.
|
//! our CI.
|
||||||
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::env;
|
use std::env;
|
||||||
|
use std::fmt;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::path::{PathBuf, Path};
|
use std::path::{PathBuf, Path};
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
@ -22,10 +23,39 @@ use std::process::Command;
|
||||||
use build_helper::output;
|
use build_helper::output;
|
||||||
|
|
||||||
use {Build, Compiler, Mode};
|
use {Build, Compiler, Mode};
|
||||||
|
use dist;
|
||||||
use util::{self, dylib_path, dylib_path_var};
|
use util::{self, dylib_path, dylib_path_var};
|
||||||
|
|
||||||
const ADB_TEST_DIR: &'static str = "/data/tmp";
|
const ADB_TEST_DIR: &'static str = "/data/tmp";
|
||||||
|
|
||||||
|
/// The two modes of the test runner; tests or benchmarks.
|
||||||
|
#[derive(Copy, Clone)]
|
||||||
|
pub enum TestKind {
|
||||||
|
/// Run `cargo test`
|
||||||
|
Test,
|
||||||
|
/// Run `cargo bench`
|
||||||
|
Bench,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TestKind {
|
||||||
|
// Return the cargo subcommand for this test kind
|
||||||
|
fn subcommand(self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
TestKind::Test => "test",
|
||||||
|
TestKind::Bench => "bench",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for TestKind {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
f.write_str(match *self {
|
||||||
|
TestKind::Test => "Testing",
|
||||||
|
TestKind::Bench => "Benchmarking",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
|
/// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
|
||||||
///
|
///
|
||||||
/// This tool in `src/tools` will verify the validity of all our links in the
|
/// This tool in `src/tools` will verify the validity of all our links in the
|
||||||
|
@ -33,6 +63,8 @@ const ADB_TEST_DIR: &'static str = "/data/tmp";
|
||||||
pub fn linkcheck(build: &Build, stage: u32, host: &str) {
|
pub fn linkcheck(build: &Build, stage: u32, host: &str) {
|
||||||
println!("Linkcheck stage{} ({})", stage, host);
|
println!("Linkcheck stage{} ({})", stage, host);
|
||||||
let compiler = Compiler::new(stage, host);
|
let compiler = Compiler::new(stage, host);
|
||||||
|
|
||||||
|
let _time = util::timeit();
|
||||||
build.run(build.tool_cmd(&compiler, "linkchecker")
|
build.run(build.tool_cmd(&compiler, "linkchecker")
|
||||||
.arg(build.out.join(host).join("doc")));
|
.arg(build.out.join(host).join("doc")));
|
||||||
}
|
}
|
||||||
|
@ -58,6 +90,7 @@ pub fn cargotest(build: &Build, stage: u32, host: &str) {
|
||||||
let out_dir = build.out.join("ct");
|
let out_dir = build.out.join("ct");
|
||||||
t!(fs::create_dir_all(&out_dir));
|
t!(fs::create_dir_all(&out_dir));
|
||||||
|
|
||||||
|
let _time = util::timeit();
|
||||||
build.run(build.tool_cmd(compiler, "cargotest")
|
build.run(build.tool_cmd(compiler, "cargotest")
|
||||||
.env("PATH", newpath)
|
.env("PATH", newpath)
|
||||||
.arg(&build.cargo)
|
.arg(&build.cargo)
|
||||||
|
@ -90,7 +123,8 @@ pub fn compiletest(build: &Build,
|
||||||
target: &str,
|
target: &str,
|
||||||
mode: &str,
|
mode: &str,
|
||||||
suite: &str) {
|
suite: &str) {
|
||||||
println!("Check compiletest {} ({} -> {})", suite, compiler.host, target);
|
println!("Check compiletest suite={} mode={} ({} -> {})",
|
||||||
|
suite, mode, compiler.host, target);
|
||||||
let mut cmd = build.tool_cmd(compiler, "compiletest");
|
let mut cmd = build.tool_cmd(compiler, "compiletest");
|
||||||
|
|
||||||
// compiletest currently has... a lot of arguments, so let's just pass all
|
// compiletest currently has... a lot of arguments, so let's just pass all
|
||||||
|
@ -184,6 +218,9 @@ pub fn compiletest(build: &Build,
|
||||||
|
|
||||||
// Running a C compiler on MSVC requires a few env vars to be set, to be
|
// Running a C compiler on MSVC requires a few env vars to be set, to be
|
||||||
// sure to set them here.
|
// sure to set them here.
|
||||||
|
//
|
||||||
|
// Note that if we encounter `PATH` we make sure to append to our own `PATH`
|
||||||
|
// rather than stomp over it.
|
||||||
if target.contains("msvc") {
|
if target.contains("msvc") {
|
||||||
for &(ref k, ref v) in build.cc[target].0.env() {
|
for &(ref k, ref v) in build.cc[target].0.env() {
|
||||||
if k != "PATH" {
|
if k != "PATH" {
|
||||||
|
@ -191,7 +228,8 @@ pub fn compiletest(build: &Build,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
build.add_bootstrap_key(&mut cmd);
|
cmd.env("RUSTC_BOOTSTRAP", "1");
|
||||||
|
build.add_rust_test_threads(&mut cmd);
|
||||||
|
|
||||||
cmd.arg("--adb-path").arg("adb");
|
cmd.arg("--adb-path").arg("adb");
|
||||||
cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
|
cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
|
||||||
|
@ -203,6 +241,7 @@ pub fn compiletest(build: &Build,
|
||||||
cmd.arg("--android-cross-path").arg("");
|
cmd.arg("--android-cross-path").arg("");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let _time = util::timeit();
|
||||||
build.run(&mut cmd);
|
build.run(&mut cmd);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -215,6 +254,7 @@ pub fn docs(build: &Build, compiler: &Compiler) {
|
||||||
// Do a breadth-first traversal of the `src/doc` directory and just run
|
// Do a breadth-first traversal of the `src/doc` directory and just run
|
||||||
// tests for all files that end in `*.md`
|
// tests for all files that end in `*.md`
|
||||||
let mut stack = vec![build.src.join("src/doc")];
|
let mut stack = vec![build.src.join("src/doc")];
|
||||||
|
let _time = util::timeit();
|
||||||
|
|
||||||
while let Some(p) = stack.pop() {
|
while let Some(p) = stack.pop() {
|
||||||
if p.is_dir() {
|
if p.is_dir() {
|
||||||
|
@ -243,6 +283,8 @@ pub fn error_index(build: &Build, compiler: &Compiler) {
|
||||||
let dir = testdir(build, compiler.host);
|
let dir = testdir(build, compiler.host);
|
||||||
t!(fs::create_dir_all(&dir));
|
t!(fs::create_dir_all(&dir));
|
||||||
let output = dir.join("error-index.md");
|
let output = dir.join("error-index.md");
|
||||||
|
|
||||||
|
let _time = util::timeit();
|
||||||
build.run(build.tool_cmd(compiler, "error_index_generator")
|
build.run(build.tool_cmd(compiler, "error_index_generator")
|
||||||
.arg("markdown")
|
.arg("markdown")
|
||||||
.arg(&output)
|
.arg(&output)
|
||||||
|
@ -254,6 +296,7 @@ pub fn error_index(build: &Build, compiler: &Compiler) {
|
||||||
fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
|
fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
|
||||||
let mut cmd = Command::new(build.rustdoc(compiler));
|
let mut cmd = Command::new(build.rustdoc(compiler));
|
||||||
build.add_rustc_lib_path(compiler, &mut cmd);
|
build.add_rustc_lib_path(compiler, &mut cmd);
|
||||||
|
build.add_rust_test_threads(&mut cmd);
|
||||||
cmd.arg("--test");
|
cmd.arg("--test");
|
||||||
cmd.arg(markdown);
|
cmd.arg(markdown);
|
||||||
|
|
||||||
|
@ -278,6 +321,7 @@ pub fn krate(build: &Build,
|
||||||
compiler: &Compiler,
|
compiler: &Compiler,
|
||||||
target: &str,
|
target: &str,
|
||||||
mode: Mode,
|
mode: Mode,
|
||||||
|
test_kind: TestKind,
|
||||||
krate: Option<&str>) {
|
krate: Option<&str>) {
|
||||||
let (name, path, features, root) = match mode {
|
let (name, path, features, root) = match mode {
|
||||||
Mode::Libstd => {
|
Mode::Libstd => {
|
||||||
|
@ -291,7 +335,7 @@ pub fn krate(build: &Build,
|
||||||
}
|
}
|
||||||
_ => panic!("can only test libraries"),
|
_ => panic!("can only test libraries"),
|
||||||
};
|
};
|
||||||
println!("Testing {} stage{} ({} -> {})", name, compiler.stage,
|
println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage,
|
||||||
compiler.host, target);
|
compiler.host, target);
|
||||||
|
|
||||||
// Build up the base `cargo test` command.
|
// Build up the base `cargo test` command.
|
||||||
|
@ -299,7 +343,7 @@ pub fn krate(build: &Build,
|
||||||
// Pass in some standard flags then iterate over the graph we've discovered
|
// Pass in some standard flags then iterate over the graph we've discovered
|
||||||
// in `cargo metadata` with the maps above and figure out what `-p`
|
// in `cargo metadata` with the maps above and figure out what `-p`
|
||||||
// arguments need to get passed.
|
// arguments need to get passed.
|
||||||
let mut cargo = build.cargo(compiler, mode, target, "test");
|
let mut cargo = build.cargo(compiler, mode, target, test_kind.subcommand());
|
||||||
cargo.arg("--manifest-path")
|
cargo.arg("--manifest-path")
|
||||||
.arg(build.src.join(path).join("Cargo.toml"))
|
.arg(build.src.join(path).join("Cargo.toml"))
|
||||||
.arg("--features").arg(features);
|
.arg("--features").arg(features);
|
||||||
|
@ -336,16 +380,25 @@ pub fn krate(build: &Build,
|
||||||
dylib_path.insert(0, build.sysroot_libdir(compiler, target));
|
dylib_path.insert(0, build.sysroot_libdir(compiler, target));
|
||||||
cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
|
cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
|
||||||
|
|
||||||
if build.config.quiet_tests {
|
if target.contains("android") {
|
||||||
|
cargo.arg("--no-run");
|
||||||
|
} else if target.contains("emscripten") {
|
||||||
|
cargo.arg("--no-run");
|
||||||
|
}
|
||||||
|
|
||||||
cargo.arg("--");
|
cargo.arg("--");
|
||||||
|
|
||||||
|
if build.config.quiet_tests {
|
||||||
cargo.arg("--quiet");
|
cargo.arg("--quiet");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let _time = util::timeit();
|
||||||
|
|
||||||
if target.contains("android") {
|
if target.contains("android") {
|
||||||
build.run(cargo.arg("--no-run"));
|
build.run(&mut cargo);
|
||||||
krate_android(build, compiler, target, mode);
|
krate_android(build, compiler, target, mode);
|
||||||
} else if target.contains("emscripten") {
|
} else if target.contains("emscripten") {
|
||||||
build.run(cargo.arg("--no-run"));
|
build.run(&mut cargo);
|
||||||
krate_emscripten(build, compiler, target, mode);
|
krate_emscripten(build, compiler, target, mode);
|
||||||
} else {
|
} else {
|
||||||
cargo.args(&build.flags.cmd.test_args());
|
cargo.args(&build.flags.cmd.test_args());
|
||||||
|
@ -372,14 +425,17 @@ fn krate_android(build: &Build,
|
||||||
target,
|
target,
|
||||||
compiler.host,
|
compiler.host,
|
||||||
test_file_name);
|
test_file_name);
|
||||||
|
let quiet = if build.config.quiet_tests { "--quiet" } else { "" };
|
||||||
let program = format!("(cd {dir}; \
|
let program = format!("(cd {dir}; \
|
||||||
LD_LIBRARY_PATH=./{target} ./{test} \
|
LD_LIBRARY_PATH=./{target} ./{test} \
|
||||||
--logfile {log} \
|
--logfile {log} \
|
||||||
|
{quiet} \
|
||||||
{args})",
|
{args})",
|
||||||
dir = ADB_TEST_DIR,
|
dir = ADB_TEST_DIR,
|
||||||
target = target,
|
target = target,
|
||||||
test = test_file_name,
|
test = test_file_name,
|
||||||
log = log,
|
log = log,
|
||||||
|
quiet = quiet,
|
||||||
args = build.flags.cmd.test_args().join(" "));
|
args = build.flags.cmd.test_args().join(" "));
|
||||||
|
|
||||||
let output = output(Command::new("adb").arg("shell").arg(&program));
|
let output = output(Command::new("adb").arg("shell").arg(&program));
|
||||||
|
@ -408,18 +464,12 @@ fn krate_emscripten(build: &Build,
|
||||||
let test_file_name = test.to_string_lossy().into_owned();
|
let test_file_name = test.to_string_lossy().into_owned();
|
||||||
println!("running {}", test_file_name);
|
println!("running {}", test_file_name);
|
||||||
let nodejs = build.config.nodejs.as_ref().expect("nodejs not configured");
|
let nodejs = build.config.nodejs.as_ref().expect("nodejs not configured");
|
||||||
let status = Command::new(nodejs)
|
let mut cmd = Command::new(nodejs);
|
||||||
.arg(&test_file_name)
|
cmd.arg(&test_file_name);
|
||||||
.stderr(::std::process::Stdio::inherit())
|
if build.config.quiet_tests {
|
||||||
.status();
|
cmd.arg("--quiet");
|
||||||
match status {
|
|
||||||
Ok(status) => {
|
|
||||||
if !status.success() {
|
|
||||||
panic!("some tests failed");
|
|
||||||
}
|
}
|
||||||
}
|
build.run(&mut cmd);
|
||||||
Err(e) => panic!(format!("failed to execute command: {}", e)),
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -467,3 +517,32 @@ pub fn android_copy_libs(build: &Build,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Run "distcheck", a 'make check' from a tarball
|
||||||
|
pub fn distcheck(build: &Build) {
|
||||||
|
if build.config.build != "x86_64-unknown-linux-gnu" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if !build.config.host.iter().any(|s| s == "x86_64-unknown-linux-gnu") {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if !build.config.target.iter().any(|s| s == "x86_64-unknown-linux-gnu") {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
let dir = build.out.join("tmp").join("distcheck");
|
||||||
|
let _ = fs::remove_dir_all(&dir);
|
||||||
|
t!(fs::create_dir_all(&dir));
|
||||||
|
|
||||||
|
let mut cmd = Command::new("tar");
|
||||||
|
cmd.arg("-xzf")
|
||||||
|
.arg(dist::rust_src_location(build))
|
||||||
|
.arg("--strip-components=1")
|
||||||
|
.current_dir(&dir);
|
||||||
|
build.run(&mut cmd);
|
||||||
|
build.run(Command::new("./configure")
|
||||||
|
.current_dir(&dir));
|
||||||
|
build.run(Command::new("make")
|
||||||
|
.arg("check")
|
||||||
|
.current_dir(&dir));
|
||||||
|
}
|
||||||
|
|
|
@ -46,6 +46,9 @@ fn rm_rf(build: &Build, path: &Path) {
|
||||||
if !path.exists() {
|
if !path.exists() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if path.is_file() {
|
||||||
|
return do_op(path, "remove file", |p| fs::remove_file(p));
|
||||||
|
}
|
||||||
|
|
||||||
for file in t!(fs::read_dir(path)) {
|
for file in t!(fs::read_dir(path)) {
|
||||||
let file = t!(file).path();
|
let file = t!(file).path();
|
||||||
|
|
|
@ -120,8 +120,8 @@ fn build_startup_objects(build: &Build, target: &str, into: &Path) {
|
||||||
for file in t!(fs::read_dir(build.src.join("src/rtstartup"))) {
|
for file in t!(fs::read_dir(build.src.join("src/rtstartup"))) {
|
||||||
let file = t!(file);
|
let file = t!(file);
|
||||||
let mut cmd = Command::new(&compiler_path);
|
let mut cmd = Command::new(&compiler_path);
|
||||||
build.add_bootstrap_key(&mut cmd);
|
build.run(cmd.env("RUSTC_BOOTSTRAP", "1")
|
||||||
build.run(cmd.arg("--target").arg(target)
|
.arg("--target").arg(target)
|
||||||
.arg("--emit=obj")
|
.arg("--emit=obj")
|
||||||
.arg("--out-dir").arg(into)
|
.arg("--out-dir").arg(into)
|
||||||
.arg(file.path()));
|
.arg(file.path()));
|
||||||
|
|
|
@ -23,7 +23,7 @@ use std::io::Write;
|
||||||
use std::path::{PathBuf, Path};
|
use std::path::{PathBuf, Path};
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
use {Build, Compiler};
|
use {Build, Compiler, Mode};
|
||||||
use util::{cp_r, libdir, is_dylib, cp_filtered, copy};
|
use util::{cp_r, libdir, is_dylib, cp_filtered, copy};
|
||||||
|
|
||||||
pub fn package_vers(build: &Build) -> &str {
|
pub fn package_vers(build: &Build) -> &str {
|
||||||
|
@ -284,6 +284,55 @@ pub fn std(build: &Build, compiler: &Compiler, target: &str) {
|
||||||
t!(fs::remove_dir_all(&image));
|
t!(fs::remove_dir_all(&image));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn rust_src_location(build: &Build) -> PathBuf {
|
||||||
|
let plain_name = format!("rustc-{}-src", package_vers(build));
|
||||||
|
distdir(build).join(&format!("{}.tar.gz", plain_name))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a tarball of save-analysis metadata, if available.
|
||||||
|
pub fn analysis(build: &Build, compiler: &Compiler, target: &str) {
|
||||||
|
println!("Dist analysis");
|
||||||
|
|
||||||
|
if build.config.channel != "nightly" {
|
||||||
|
println!("Skipping dist-analysis - not on nightly channel");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if compiler.stage != 2 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
let name = format!("rust-analysis-{}", package_vers(build));
|
||||||
|
let image = tmpdir(build).join(format!("{}-{}-image", name, target));
|
||||||
|
|
||||||
|
let src = build.stage_out(compiler, Mode::Libstd).join(target).join("release").join("deps");
|
||||||
|
|
||||||
|
let image_src = src.join("save-analysis");
|
||||||
|
let dst = image.join("lib/rustlib").join(target).join("analysis");
|
||||||
|
t!(fs::create_dir_all(&dst));
|
||||||
|
cp_r(&image_src, &dst);
|
||||||
|
|
||||||
|
let mut cmd = Command::new("sh");
|
||||||
|
cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
|
||||||
|
.arg("--product-name=Rust")
|
||||||
|
.arg("--rel-manifest-dir=rustlib")
|
||||||
|
.arg("--success-message=save-analysis-saved.")
|
||||||
|
.arg(format!("--image-dir={}", sanitize_sh(&image)))
|
||||||
|
.arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
|
||||||
|
.arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
|
||||||
|
.arg(format!("--package-name={}-{}", name, target))
|
||||||
|
.arg(format!("--component-name=rust-analysis-{}", target))
|
||||||
|
.arg("--legacy-manifest-dirs=rustlib,cargo");
|
||||||
|
build.run(&mut cmd);
|
||||||
|
t!(fs::remove_dir_all(&image));
|
||||||
|
|
||||||
|
// Create plain source tarball
|
||||||
|
let mut cmd = Command::new("tar");
|
||||||
|
cmd.arg("-czf").arg(sanitize_sh(&distdir(build).join(&format!("{}.tar.gz", name))))
|
||||||
|
.arg("analysis")
|
||||||
|
.current_dir(&src);
|
||||||
|
build.run(&mut cmd);
|
||||||
|
}
|
||||||
|
|
||||||
/// Creates the `rust-src` installer component and the plain source tarball
|
/// Creates the `rust-src` installer component and the plain source tarball
|
||||||
pub fn rust_src(build: &Build) {
|
pub fn rust_src(build: &Build) {
|
||||||
println!("Dist src");
|
println!("Dist src");
|
||||||
|
@ -374,7 +423,7 @@ pub fn rust_src(build: &Build) {
|
||||||
|
|
||||||
// Create plain source tarball
|
// Create plain source tarball
|
||||||
let mut cmd = Command::new("tar");
|
let mut cmd = Command::new("tar");
|
||||||
cmd.arg("-czf").arg(sanitize_sh(&distdir(build).join(&format!("{}.tar.gz", plain_name))))
|
cmd.arg("-czf").arg(sanitize_sh(&rust_src_location(build)))
|
||||||
.arg(&plain_name)
|
.arg(&plain_name)
|
||||||
.current_dir(&dst);
|
.current_dir(&dst);
|
||||||
build.run(&mut cmd);
|
build.run(&mut cmd);
|
||||||
|
|
|
@ -49,6 +49,10 @@ pub enum Subcommand {
|
||||||
paths: Vec<PathBuf>,
|
paths: Vec<PathBuf>,
|
||||||
test_args: Vec<String>,
|
test_args: Vec<String>,
|
||||||
},
|
},
|
||||||
|
Bench {
|
||||||
|
paths: Vec<PathBuf>,
|
||||||
|
test_args: Vec<String>,
|
||||||
|
},
|
||||||
Clean,
|
Clean,
|
||||||
Dist {
|
Dist {
|
||||||
install: bool,
|
install: bool,
|
||||||
|
@ -141,6 +145,7 @@ Arguments:
|
||||||
command == "dist" ||
|
command == "dist" ||
|
||||||
command == "doc" ||
|
command == "doc" ||
|
||||||
command == "test" ||
|
command == "test" ||
|
||||||
|
command == "bench" ||
|
||||||
command == "clean" {
|
command == "clean" {
|
||||||
println!("Available invocations:");
|
println!("Available invocations:");
|
||||||
if args.iter().any(|a| a == "-v") {
|
if args.iter().any(|a| a == "-v") {
|
||||||
|
@ -163,6 +168,7 @@ println!("\
|
||||||
Subcommands:
|
Subcommands:
|
||||||
build Compile either the compiler or libraries
|
build Compile either the compiler or libraries
|
||||||
test Build and run some test suites
|
test Build and run some test suites
|
||||||
|
bench Build and run some benchmarks
|
||||||
doc Build documentation
|
doc Build documentation
|
||||||
clean Clean out build directories
|
clean Clean out build directories
|
||||||
dist Build and/or install distribution artifacts
|
dist Build and/or install distribution artifacts
|
||||||
|
@ -210,6 +216,14 @@ To learn more about a subcommand, run `./x.py <command> -h`
|
||||||
test_args: m.opt_strs("test-args"),
|
test_args: m.opt_strs("test-args"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
"bench" => {
|
||||||
|
opts.optmulti("", "test-args", "extra arguments", "ARGS");
|
||||||
|
m = parse(&opts);
|
||||||
|
Subcommand::Bench {
|
||||||
|
paths: remaining_as_path(&m),
|
||||||
|
test_args: m.opt_strs("test-args"),
|
||||||
|
}
|
||||||
|
}
|
||||||
"clean" => {
|
"clean" => {
|
||||||
m = parse(&opts);
|
m = parse(&opts);
|
||||||
if m.free.len() > 0 {
|
if m.free.len() > 0 {
|
||||||
|
@ -225,6 +239,7 @@ To learn more about a subcommand, run `./x.py <command> -h`
|
||||||
install: m.opt_present("install"),
|
install: m.opt_present("install"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
"--help" => usage(0, &opts),
|
||||||
cmd => {
|
cmd => {
|
||||||
println!("unknown command: {}", cmd);
|
println!("unknown command: {}", cmd);
|
||||||
usage(1, &opts);
|
usage(1, &opts);
|
||||||
|
@ -259,7 +274,8 @@ To learn more about a subcommand, run `./x.py <command> -h`
|
||||||
impl Subcommand {
|
impl Subcommand {
|
||||||
pub fn test_args(&self) -> Vec<&str> {
|
pub fn test_args(&self) -> Vec<&str> {
|
||||||
match *self {
|
match *self {
|
||||||
Subcommand::Test { ref test_args, .. } => {
|
Subcommand::Test { ref test_args, .. } |
|
||||||
|
Subcommand::Bench { ref test_args, .. } => {
|
||||||
test_args.iter().flat_map(|s| s.split_whitespace()).collect()
|
test_args.iter().flat_map(|s| s.split_whitespace()).collect()
|
||||||
}
|
}
|
||||||
_ => Vec::new(),
|
_ => Vec::new(),
|
||||||
|
|
|
@ -51,6 +51,7 @@ type LPVOID = *mut u8;
|
||||||
type JOBOBJECTINFOCLASS = i32;
|
type JOBOBJECTINFOCLASS = i32;
|
||||||
type SIZE_T = usize;
|
type SIZE_T = usize;
|
||||||
type LARGE_INTEGER = i64;
|
type LARGE_INTEGER = i64;
|
||||||
|
type UINT = u32;
|
||||||
type ULONG_PTR = usize;
|
type ULONG_PTR = usize;
|
||||||
type ULONGLONG = u64;
|
type ULONGLONG = u64;
|
||||||
|
|
||||||
|
@ -59,6 +60,8 @@ const DUPLICATE_SAME_ACCESS: DWORD = 0x2;
|
||||||
const PROCESS_DUP_HANDLE: DWORD = 0x40;
|
const PROCESS_DUP_HANDLE: DWORD = 0x40;
|
||||||
const JobObjectExtendedLimitInformation: JOBOBJECTINFOCLASS = 9;
|
const JobObjectExtendedLimitInformation: JOBOBJECTINFOCLASS = 9;
|
||||||
const JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE: DWORD = 0x2000;
|
const JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE: DWORD = 0x2000;
|
||||||
|
const SEM_FAILCRITICALERRORS: UINT = 0x0001;
|
||||||
|
const SEM_NOGPFAULTERRORBOX: UINT = 0x0002;
|
||||||
|
|
||||||
extern "system" {
|
extern "system" {
|
||||||
fn CreateJobObjectW(lpJobAttributes: *mut u8, lpName: *const u8) -> HANDLE;
|
fn CreateJobObjectW(lpJobAttributes: *mut u8, lpName: *const u8) -> HANDLE;
|
||||||
|
@ -79,6 +82,7 @@ extern "system" {
|
||||||
JobObjectInformationClass: JOBOBJECTINFOCLASS,
|
JobObjectInformationClass: JOBOBJECTINFOCLASS,
|
||||||
lpJobObjectInformation: LPVOID,
|
lpJobObjectInformation: LPVOID,
|
||||||
cbJobObjectInformationLength: DWORD) -> BOOL;
|
cbJobObjectInformationLength: DWORD) -> BOOL;
|
||||||
|
fn SetErrorMode(mode: UINT) -> UINT;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[repr(C)]
|
#[repr(C)]
|
||||||
|
@ -115,6 +119,13 @@ struct JOBOBJECT_BASIC_LIMIT_INFORMATION {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub unsafe fn setup() {
|
pub unsafe fn setup() {
|
||||||
|
// Tell Windows to not show any UI on errors (such as not finding a required dll
|
||||||
|
// during startup or terminating abnormally). This is important for running tests,
|
||||||
|
// since some of them use abnormal termination by design.
|
||||||
|
// This mode is inherited by all child processes.
|
||||||
|
let mode = SetErrorMode(SEM_NOGPFAULTERRORBOX); // read inherited flags
|
||||||
|
SetErrorMode(mode | SEM_FAILCRITICALERRORS | SEM_NOGPFAULTERRORBOX);
|
||||||
|
|
||||||
// Create a new job object for us to use
|
// Create a new job object for us to use
|
||||||
let job = CreateJobObjectW(0 as *mut _, 0 as *const _);
|
let job = CreateJobObjectW(0 as *mut _, 0 as *const _);
|
||||||
assert!(job != 0 as *mut _, "{}", io::Error::last_os_error());
|
assert!(job != 0 as *mut _, "{}", io::Error::last_os_error());
|
||||||
|
|
|
@ -13,22 +13,69 @@
|
||||||
//! This module, and its descendants, are the implementation of the Rust build
|
//! This module, and its descendants, are the implementation of the Rust build
|
||||||
//! system. Most of this build system is backed by Cargo but the outer layer
|
//! system. Most of this build system is backed by Cargo but the outer layer
|
||||||
//! here serves as the ability to orchestrate calling Cargo, sequencing Cargo
|
//! here serves as the ability to orchestrate calling Cargo, sequencing Cargo
|
||||||
//! builds, building artifacts like LLVM, etc.
|
//! builds, building artifacts like LLVM, etc. The goals of rustbuild are:
|
||||||
//!
|
//!
|
||||||
//! More documentation can be found in each respective module below.
|
//! * To be an easily understandable, easily extensible, and maintainable build
|
||||||
|
//! system.
|
||||||
|
//! * Leverage standard tools in the Rust ecosystem to build the compiler, aka
|
||||||
|
//! crates.io and Cargo.
|
||||||
|
//! * A standard interface to build across all platforms, including MSVC
|
||||||
|
//!
|
||||||
|
//! ## Architecture
|
||||||
|
//!
|
||||||
|
//! Although this build system defers most of the complicated logic to Cargo
|
||||||
|
//! itself, it still needs to maintain a list of targets and dependencies which
|
||||||
|
//! it can itself perform. Rustbuild is made up of a list of rules with
|
||||||
|
//! dependencies amongst them (created in the `step` module) and then knows how
|
||||||
|
//! to execute each in sequence. Each time rustbuild is invoked, it will simply
|
||||||
|
//! iterate through this list of steps and execute each serially in turn. For
|
||||||
|
//! each step rustbuild relies on the step internally being incremental and
|
||||||
|
//! parallel. Note, though, that the `-j` parameter to rustbuild gets forwarded
|
||||||
|
//! to appropriate test harnesses and such.
|
||||||
|
//!
|
||||||
|
//! Most of the "meaty" steps that matter are backed by Cargo, which does indeed
|
||||||
|
//! have its own parallelism and incremental management. Later steps, like
|
||||||
|
//! tests, aren't incremental and simply run the entire suite currently.
|
||||||
|
//!
|
||||||
|
//! When you execute `x.py build`, the steps which are executed are:
|
||||||
|
//!
|
||||||
|
//! * First, the python script is run. This will automatically download the
|
||||||
|
//! stage0 rustc and cargo according to `src/stage0.txt`, or using the cached
|
||||||
|
//! versions if they're available. These are then used to compile rustbuild
|
||||||
|
//! itself (using Cargo). Finally, control is then transferred to rustbuild.
|
||||||
|
//!
|
||||||
|
//! * Rustbuild takes over, performs sanity checks, probes the environment,
|
||||||
|
//! reads configuration, builds up a list of steps, and then starts executing
|
||||||
|
//! them.
|
||||||
|
//!
|
||||||
|
//! * The stage0 libstd is compiled
|
||||||
|
//! * The stage0 libtest is compiled
|
||||||
|
//! * The stage0 librustc is compiled
|
||||||
|
//! * The stage1 compiler is assembled
|
||||||
|
//! * The stage1 libstd, libtest, librustc are compiled
|
||||||
|
//! * The stage2 compiler is assembled
|
||||||
|
//! * The stage2 libstd, libtest, librustc are compiled
|
||||||
|
//!
|
||||||
|
//! Each step is driven by a separate Cargo project and rustbuild orchestrates
|
||||||
|
//! copying files between steps and otherwise preparing for Cargo to run.
|
||||||
|
//!
|
||||||
|
//! ## Further information
|
||||||
|
//!
|
||||||
|
//! More documentation can be found in each respective module below, and you can
|
||||||
|
//! also check out the `src/bootstrap/README.md` file for more information.
|
||||||
|
|
||||||
extern crate build_helper;
|
extern crate build_helper;
|
||||||
extern crate cmake;
|
extern crate cmake;
|
||||||
extern crate filetime;
|
extern crate filetime;
|
||||||
extern crate gcc;
|
extern crate gcc;
|
||||||
extern crate getopts;
|
extern crate getopts;
|
||||||
extern crate md5;
|
|
||||||
extern crate num_cpus;
|
extern crate num_cpus;
|
||||||
extern crate rustc_serialize;
|
extern crate rustc_serialize;
|
||||||
extern crate toml;
|
extern crate toml;
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::env;
|
use std::env;
|
||||||
|
use std::ffi::OsString;
|
||||||
use std::fs::{self, File};
|
use std::fs::{self, File};
|
||||||
use std::path::{Component, PathBuf, Path};
|
use std::path::{Component, PathBuf, Path};
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
@ -120,8 +167,6 @@ pub struct Build {
|
||||||
version: String,
|
version: String,
|
||||||
package_vers: String,
|
package_vers: String,
|
||||||
local_rebuild: bool,
|
local_rebuild: bool,
|
||||||
bootstrap_key: String,
|
|
||||||
bootstrap_key_stage0: String,
|
|
||||||
|
|
||||||
// Probed tools at runtime
|
// Probed tools at runtime
|
||||||
lldb_version: Option<String>,
|
lldb_version: Option<String>,
|
||||||
|
@ -131,6 +176,7 @@ pub struct Build {
|
||||||
cc: HashMap<String, (gcc::Tool, Option<PathBuf>)>,
|
cc: HashMap<String, (gcc::Tool, Option<PathBuf>)>,
|
||||||
cxx: HashMap<String, gcc::Tool>,
|
cxx: HashMap<String, gcc::Tool>,
|
||||||
crates: HashMap<String, Crate>,
|
crates: HashMap<String, Crate>,
|
||||||
|
is_sudo: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -141,6 +187,7 @@ struct Crate {
|
||||||
doc_step: String,
|
doc_step: String,
|
||||||
build_step: String,
|
build_step: String,
|
||||||
test_step: String,
|
test_step: String,
|
||||||
|
bench_step: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The various "modes" of invoking Cargo.
|
/// The various "modes" of invoking Cargo.
|
||||||
|
@ -189,6 +236,16 @@ impl Build {
|
||||||
};
|
};
|
||||||
let local_rebuild = config.local_rebuild;
|
let local_rebuild = config.local_rebuild;
|
||||||
|
|
||||||
|
let is_sudo = match env::var_os("SUDO_USER") {
|
||||||
|
Some(sudo_user) => {
|
||||||
|
match env::var_os("USER") {
|
||||||
|
Some(user) => user != sudo_user,
|
||||||
|
None => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => false,
|
||||||
|
};
|
||||||
|
|
||||||
Build {
|
Build {
|
||||||
flags: flags,
|
flags: flags,
|
||||||
config: config,
|
config: config,
|
||||||
|
@ -204,14 +261,13 @@ impl Build {
|
||||||
ver_date: None,
|
ver_date: None,
|
||||||
version: String::new(),
|
version: String::new(),
|
||||||
local_rebuild: local_rebuild,
|
local_rebuild: local_rebuild,
|
||||||
bootstrap_key: String::new(),
|
|
||||||
bootstrap_key_stage0: String::new(),
|
|
||||||
package_vers: String::new(),
|
package_vers: String::new(),
|
||||||
cc: HashMap::new(),
|
cc: HashMap::new(),
|
||||||
cxx: HashMap::new(),
|
cxx: HashMap::new(),
|
||||||
crates: HashMap::new(),
|
crates: HashMap::new(),
|
||||||
lldb_version: None,
|
lldb_version: None,
|
||||||
lldb_python_dir: None,
|
lldb_python_dir: None,
|
||||||
|
is_sudo: is_sudo,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -418,7 +474,7 @@ impl Build {
|
||||||
// how the actual compiler itself is called.
|
// how the actual compiler itself is called.
|
||||||
//
|
//
|
||||||
// These variables are primarily all read by
|
// These variables are primarily all read by
|
||||||
// src/bootstrap/{rustc,rustdoc.rs}
|
// src/bootstrap/bin/{rustc.rs,rustdoc.rs}
|
||||||
cargo.env("RUSTC", self.out.join("bootstrap/debug/rustc"))
|
cargo.env("RUSTC", self.out.join("bootstrap/debug/rustc"))
|
||||||
.env("RUSTC_REAL", self.compiler_path(compiler))
|
.env("RUSTC_REAL", self.compiler_path(compiler))
|
||||||
.env("RUSTC_STAGE", stage.to_string())
|
.env("RUSTC_STAGE", stage.to_string())
|
||||||
|
@ -437,7 +493,9 @@ impl Build {
|
||||||
.env("RUSTDOC_REAL", self.rustdoc(compiler))
|
.env("RUSTDOC_REAL", self.rustdoc(compiler))
|
||||||
.env("RUSTC_FLAGS", self.rustc_flags(target).join(" "));
|
.env("RUSTC_FLAGS", self.rustc_flags(target).join(" "));
|
||||||
|
|
||||||
self.add_bootstrap_key(&mut cargo);
|
// Enable usage of unstable features
|
||||||
|
cargo.env("RUSTC_BOOTSTRAP", "1");
|
||||||
|
self.add_rust_test_threads(&mut cargo);
|
||||||
|
|
||||||
// Specify some various options for build scripts used throughout
|
// Specify some various options for build scripts used throughout
|
||||||
// the build.
|
// the build.
|
||||||
|
@ -449,6 +507,10 @@ impl Build {
|
||||||
.env(format!("CFLAGS_{}", target), self.cflags(target).join(" "));
|
.env(format!("CFLAGS_{}", target), self.cflags(target).join(" "));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if self.config.channel == "nightly" && compiler.stage == 2 {
|
||||||
|
cargo.env("RUSTC_SAVE_ANALYSIS", "api".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
// Environment variables *required* needed throughout the build
|
// Environment variables *required* needed throughout the build
|
||||||
//
|
//
|
||||||
// FIXME: should update code to not require this env var
|
// FIXME: should update code to not require this env var
|
||||||
|
@ -457,10 +519,11 @@ impl Build {
|
||||||
if self.config.verbose || self.flags.verbose {
|
if self.config.verbose || self.flags.verbose {
|
||||||
cargo.arg("-v");
|
cargo.arg("-v");
|
||||||
}
|
}
|
||||||
if self.config.rust_optimize {
|
// FIXME: cargo bench does not accept `--release`
|
||||||
|
if self.config.rust_optimize && cmd != "bench" {
|
||||||
cargo.arg("--release");
|
cargo.arg("--release");
|
||||||
}
|
}
|
||||||
if self.config.vendor {
|
if self.config.vendor || self.is_sudo {
|
||||||
cargo.arg("--frozen");
|
cargo.arg("--frozen");
|
||||||
}
|
}
|
||||||
return cargo
|
return cargo
|
||||||
|
@ -494,12 +557,30 @@ impl Build {
|
||||||
fn tool_cmd(&self, compiler: &Compiler, tool: &str) -> Command {
|
fn tool_cmd(&self, compiler: &Compiler, tool: &str) -> Command {
|
||||||
let mut cmd = Command::new(self.tool(&compiler, tool));
|
let mut cmd = Command::new(self.tool(&compiler, tool));
|
||||||
let host = compiler.host;
|
let host = compiler.host;
|
||||||
let paths = vec![
|
let mut paths = vec![
|
||||||
self.cargo_out(compiler, Mode::Libstd, host).join("deps"),
|
self.cargo_out(compiler, Mode::Libstd, host).join("deps"),
|
||||||
self.cargo_out(compiler, Mode::Libtest, host).join("deps"),
|
self.cargo_out(compiler, Mode::Libtest, host).join("deps"),
|
||||||
self.cargo_out(compiler, Mode::Librustc, host).join("deps"),
|
self.cargo_out(compiler, Mode::Librustc, host).join("deps"),
|
||||||
self.cargo_out(compiler, Mode::Tool, host).join("deps"),
|
self.cargo_out(compiler, Mode::Tool, host).join("deps"),
|
||||||
];
|
];
|
||||||
|
|
||||||
|
// On MSVC a tool may invoke a C compiler (e.g. compiletest in run-make
|
||||||
|
// mode) and that C compiler may need some extra PATH modification. Do
|
||||||
|
// so here.
|
||||||
|
if compiler.host.contains("msvc") {
|
||||||
|
let curpaths = env::var_os("PATH").unwrap_or(OsString::new());
|
||||||
|
let curpaths = env::split_paths(&curpaths).collect::<Vec<_>>();
|
||||||
|
for &(ref k, ref v) in self.cc[compiler.host].0.env() {
|
||||||
|
if k != "PATH" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for path in env::split_paths(v) {
|
||||||
|
if !curpaths.contains(&path) {
|
||||||
|
paths.push(path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
add_lib_path(paths, &mut cmd);
|
add_lib_path(paths, &mut cmd);
|
||||||
return cmd
|
return cmd
|
||||||
}
|
}
|
||||||
|
@ -507,7 +588,7 @@ impl Build {
|
||||||
/// Get the space-separated set of activated features for the standard
|
/// Get the space-separated set of activated features for the standard
|
||||||
/// library.
|
/// library.
|
||||||
fn std_features(&self) -> String {
|
fn std_features(&self) -> String {
|
||||||
let mut features = String::new();
|
let mut features = "panic-unwind".to_string();
|
||||||
if self.config.debug_jemalloc {
|
if self.config.debug_jemalloc {
|
||||||
features.push_str(" debug-jemalloc");
|
features.push_str(" debug-jemalloc");
|
||||||
}
|
}
|
||||||
|
@ -653,12 +734,11 @@ impl Build {
|
||||||
add_lib_path(vec![self.rustc_libdir(compiler)], cmd);
|
add_lib_path(vec![self.rustc_libdir(compiler)], cmd);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Adds the compiler's bootstrap key to the environment of `cmd`.
|
/// Adds the `RUST_TEST_THREADS` env var if necessary
|
||||||
fn add_bootstrap_key(&self, cmd: &mut Command) {
|
fn add_rust_test_threads(&self, cmd: &mut Command) {
|
||||||
cmd.env("RUSTC_BOOTSTRAP", "1");
|
if env::var_os("RUST_TEST_THREADS").is_none() {
|
||||||
// FIXME: Transitionary measure to bootstrap using the old bootstrap logic.
|
cmd.env("RUST_TEST_THREADS", self.jobs().to_string());
|
||||||
// Remove this once the bootstrap compiler uses the new login in Issue #36548.
|
}
|
||||||
cmd.env("RUSTC_BOOTSTRAP_KEY", "62b3e239");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the compiler's libdir where it stores the dynamic libraries that
|
/// Returns the compiler's libdir where it stores the dynamic libraries that
|
||||||
|
|
|
@ -70,6 +70,7 @@ fn build_krate(build: &mut Build, krate: &str) {
|
||||||
build_step: format!("build-crate-{}", package.name),
|
build_step: format!("build-crate-{}", package.name),
|
||||||
doc_step: format!("doc-crate-{}", package.name),
|
doc_step: format!("doc-crate-{}", package.name),
|
||||||
test_step: format!("test-crate-{}", package.name),
|
test_step: format!("test-crate-{}", package.name),
|
||||||
|
bench_step: format!("bench-crate-{}", package.name),
|
||||||
name: package.name,
|
name: package.name,
|
||||||
deps: Vec::new(),
|
deps: Vec::new(),
|
||||||
path: path,
|
path: path,
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Copyright 20126 The Rust Project Developers. See the COPYRIGHT
|
# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||||
# file at the top-level directory of this distribution and at
|
# file at the top-level directory of this distribution and at
|
||||||
# http://rust-lang.org/COPYRIGHT.
|
# http://rust-lang.org/COPYRIGHT.
|
||||||
#
|
#
|
||||||
|
@ -23,9 +23,14 @@ all:
|
||||||
$(Q)$(BOOTSTRAP) build $(BOOTSTRAP_ARGS)
|
$(Q)$(BOOTSTRAP) build $(BOOTSTRAP_ARGS)
|
||||||
$(Q)$(BOOTSTRAP) doc $(BOOTSTRAP_ARGS)
|
$(Q)$(BOOTSTRAP) doc $(BOOTSTRAP_ARGS)
|
||||||
|
|
||||||
# Don’t use $(Q) here, always show how to invoke the bootstrap script directly
|
|
||||||
help:
|
help:
|
||||||
$(BOOTSTRAP) --help
|
$(Q)echo 'Welcome to the rustbuild build system!'
|
||||||
|
$(Q)echo
|
||||||
|
$(Q)echo This makefile is a thin veneer over the ./x.py script located
|
||||||
|
$(Q)echo in this directory. To get the full power of the build system
|
||||||
|
$(Q)echo you can run x.py directly.
|
||||||
|
$(Q)echo
|
||||||
|
$(Q)echo To learn more run \`./x.py --help\`
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
$(Q)$(BOOTSTRAP) clean $(BOOTSTRAP_ARGS)
|
$(Q)$(BOOTSTRAP) clean $(BOOTSTRAP_ARGS)
|
||||||
|
@ -50,16 +55,17 @@ check-cargotest:
|
||||||
$(Q)$(BOOTSTRAP) test src/tools/cargotest $(BOOTSTRAP_ARGS)
|
$(Q)$(BOOTSTRAP) test src/tools/cargotest $(BOOTSTRAP_ARGS)
|
||||||
dist:
|
dist:
|
||||||
$(Q)$(BOOTSTRAP) dist $(BOOTSTRAP_ARGS)
|
$(Q)$(BOOTSTRAP) dist $(BOOTSTRAP_ARGS)
|
||||||
|
distcheck:
|
||||||
|
$(Q)$(BOOTSTRAP) test distcheck
|
||||||
install:
|
install:
|
||||||
ifeq (root user, $(USER) $(patsubst %,user,$(SUDO_USER)))
|
|
||||||
$(Q)echo "'sudo make install' is not supported currently."
|
|
||||||
else
|
|
||||||
$(Q)$(BOOTSTRAP) dist --install $(BOOTSTRAP_ARGS)
|
$(Q)$(BOOTSTRAP) dist --install $(BOOTSTRAP_ARGS)
|
||||||
endif
|
|
||||||
tidy:
|
tidy:
|
||||||
$(Q)$(BOOTSTRAP) test src/tools/tidy $(BOOTSTRAP_ARGS) --stage 0
|
$(Q)$(BOOTSTRAP) test src/tools/tidy $(BOOTSTRAP_ARGS) --stage 0
|
||||||
|
|
||||||
check-stage2-android:
|
check-stage2-T-arm-linux-androideabi-H-x86_64-unknown-linux-gnu:
|
||||||
$(Q)$(BOOTSTRAP) --step check-target --target arm-linux-androideabi
|
$(Q)$(BOOTSTRAP) test --target arm-linux-androideabi
|
||||||
|
check-stage2-T-x86_64-unknown-linux-musl-H-x86_64-unknown-linux-gnu:
|
||||||
|
$(Q)$(BOOTSTRAP) test --target x86_64-unknown-linux-gnu
|
||||||
|
|
||||||
|
|
||||||
.PHONY: dist
|
.PHONY: dist
|
||||||
|
|
|
@ -28,7 +28,7 @@ use cmake;
|
||||||
use gcc;
|
use gcc;
|
||||||
|
|
||||||
use Build;
|
use Build;
|
||||||
use util::up_to_date;
|
use util::{self, up_to_date};
|
||||||
|
|
||||||
/// Compile LLVM for `target`.
|
/// Compile LLVM for `target`.
|
||||||
pub fn llvm(build: &Build, target: &str) {
|
pub fn llvm(build: &Build, target: &str) {
|
||||||
|
@ -58,6 +58,7 @@ pub fn llvm(build: &Build, target: &str) {
|
||||||
|
|
||||||
println!("Building LLVM for {}", target);
|
println!("Building LLVM for {}", target);
|
||||||
|
|
||||||
|
let _time = util::timeit();
|
||||||
let _ = fs::remove_dir_all(&dst.join("build"));
|
let _ = fs::remove_dir_all(&dst.join("build"));
|
||||||
t!(fs::create_dir_all(&dst.join("build")));
|
t!(fs::create_dir_all(&dst.join("build")));
|
||||||
let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
|
let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
|
||||||
|
@ -158,6 +159,17 @@ pub fn test_helpers(build: &Build, target: &str) {
|
||||||
println!("Building test helpers");
|
println!("Building test helpers");
|
||||||
t!(fs::create_dir_all(&dst));
|
t!(fs::create_dir_all(&dst));
|
||||||
let mut cfg = gcc::Config::new();
|
let mut cfg = gcc::Config::new();
|
||||||
|
|
||||||
|
// We may have found various cross-compilers a little differently due to our
|
||||||
|
// extra configuration, so inform gcc of these compilers. Note, though, that
|
||||||
|
// on MSVC we still need gcc's detection of env vars (ugh).
|
||||||
|
if !target.contains("msvc") {
|
||||||
|
if let Some(ar) = build.ar(target) {
|
||||||
|
cfg.archiver(ar);
|
||||||
|
}
|
||||||
|
cfg.compiler(build.cc(target));
|
||||||
|
}
|
||||||
|
|
||||||
cfg.cargo_metadata(false)
|
cfg.cargo_metadata(false)
|
||||||
.out_dir(&dst)
|
.out_dir(&dst)
|
||||||
.target(target)
|
.target(target)
|
||||||
|
|
|
@ -41,10 +41,14 @@ pub fn check(build: &mut Build) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let have_cmd = |cmd: &OsStr| {
|
let have_cmd = |cmd: &OsStr| {
|
||||||
for path in env::split_paths(&path).map(|p| p.join(cmd)) {
|
for path in env::split_paths(&path) {
|
||||||
if fs::metadata(&path).is_ok() ||
|
let target = path.join(cmd);
|
||||||
fs::metadata(path.with_extension("exe")).is_ok() {
|
let mut cmd_alt = cmd.to_os_string();
|
||||||
return Some(path);
|
cmd_alt.push(".exe");
|
||||||
|
if target.exists() ||
|
||||||
|
target.with_extension("exe").exists() ||
|
||||||
|
target.join(cmd_alt).exists() {
|
||||||
|
return Some(target);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return None;
|
return None;
|
||||||
|
|
|
@ -8,10 +8,28 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
|
//! Definition of steps of the build system.
|
||||||
|
//!
|
||||||
|
//! This is where some of the real meat of rustbuild is located, in how we
|
||||||
|
//! define targets and the dependencies amongst them. This file can sort of be
|
||||||
|
//! viewed as just defining targets in a makefile which shell out to predefined
|
||||||
|
//! functions elsewhere about how to execute the target.
|
||||||
|
//!
|
||||||
|
//! The primary function here you're likely interested in is the `build_rules`
|
||||||
|
//! function. This will create a `Rules` structure which basically just lists
|
||||||
|
//! everything that rustbuild can do. Each rule has a human-readable name, a
|
||||||
|
//! path associated with it, some dependencies, and then a closure of how to
|
||||||
|
//! actually perform the rule.
|
||||||
|
//!
|
||||||
|
//! All steps below are defined in self-contained units, so adding a new target
|
||||||
|
//! to the build system should just involve adding the meta information here
|
||||||
|
//! along with the actual implementation elsewhere. You can find more comments
|
||||||
|
//! about how to define rules themselves below.
|
||||||
|
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
use check;
|
use check::{self, TestKind};
|
||||||
use compile;
|
use compile;
|
||||||
use dist;
|
use dist;
|
||||||
use doc;
|
use doc;
|
||||||
|
@ -20,36 +38,6 @@ use install;
|
||||||
use native;
|
use native;
|
||||||
use {Compiler, Build, Mode};
|
use {Compiler, Build, Mode};
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
|
|
||||||
struct Step<'a> {
|
|
||||||
name: &'a str,
|
|
||||||
stage: u32,
|
|
||||||
host: &'a str,
|
|
||||||
target: &'a str,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Step<'a> {
|
|
||||||
fn name(&self, name: &'a str) -> Step<'a> {
|
|
||||||
Step { name: name, ..*self }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn stage(&self, stage: u32) -> Step<'a> {
|
|
||||||
Step { stage: stage, ..*self }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn host(&self, host: &'a str) -> Step<'a> {
|
|
||||||
Step { host: host, ..*self }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn target(&self, target: &'a str) -> Step<'a> {
|
|
||||||
Step { target: target, ..*self }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn compiler(&self) -> Compiler<'a> {
|
|
||||||
Compiler::new(self.stage, self.host)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn run(build: &Build) {
|
pub fn run(build: &Build) {
|
||||||
let rules = build_rules(build);
|
let rules = build_rules(build);
|
||||||
let steps = rules.plan();
|
let steps = rules.plan();
|
||||||
|
@ -57,14 +45,91 @@ pub fn run(build: &Build) {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build_rules(build: &Build) -> Rules {
|
pub fn build_rules(build: &Build) -> Rules {
|
||||||
let mut rules: Rules = Rules::new(build);
|
let mut rules = Rules::new(build);
|
||||||
|
|
||||||
|
// This is the first rule that we're going to define for rustbuild, which is
|
||||||
|
// used to compile LLVM itself. All rules are added through the `rules`
|
||||||
|
// structure created above and are configured through a builder-style
|
||||||
|
// interface.
|
||||||
|
//
|
||||||
|
// First up we see the `build` method. This represents a rule that's part of
|
||||||
|
// the top-level `build` subcommand. For example `./x.py build` is what this
|
||||||
|
// is associating with. Note that this is normally only relevant if you flag
|
||||||
|
// a rule as `default`, which we'll talk about later.
|
||||||
|
//
|
||||||
|
// Next up we'll see two arguments to this method:
|
||||||
|
//
|
||||||
|
// * `llvm` - this is the "human readable" name of this target. This name is
|
||||||
|
// not accessed anywhere outside this file itself (e.g. not in
|
||||||
|
// the CLI nor elsewhere in rustbuild). The purpose of this is to
|
||||||
|
// easily define dependencies between rules. That is, other rules
|
||||||
|
// will depend on this with the name "llvm".
|
||||||
|
// * `src/llvm` - this is the relevant path to the rule that we're working
|
||||||
|
// with. This path is the engine behind how commands like
|
||||||
|
// `./x.py build src/llvm` work. This should typically point
|
||||||
|
// to the relevant component, but if there's not really a
|
||||||
|
// path to be assigned here you can pass something like
|
||||||
|
// `path/to/nowhere` to ignore it.
|
||||||
|
//
|
||||||
|
// After we create the rule with the `build` method we can then configure
|
||||||
|
// various aspects of it. For example this LLVM rule uses `.host(true)` to
|
||||||
|
// flag that it's a rule only for host targets. In other words, LLVM isn't
|
||||||
|
// compiled for targets configured through `--target` (e.g. those we're just
|
||||||
|
// building a standard library for).
|
||||||
|
//
|
||||||
|
// Next up the `dep` method will add a dependency to this rule. The closure
|
||||||
|
// is yielded the step that represents executing the `llvm` rule itself
|
||||||
|
// (containing information like stage, host, target, ...) and then it must
|
||||||
|
// return a target that the step depends on. Here LLVM is actually
|
||||||
|
// interesting where a cross-compiled LLVM depends on the host LLVM, but
|
||||||
|
// otherwise it has no dependencies.
|
||||||
|
//
|
||||||
|
// To handle this we do a bit of dynamic dispatch to see what the dependency
|
||||||
|
// is. If we're building a LLVM for the build triple, then we don't actually
|
||||||
|
// have any dependencies! To do that we return a dependency on the "dummy"
|
||||||
|
// target which does nothing.
|
||||||
|
//
|
||||||
|
// If we're build a cross-compiled LLVM, however, we need to assemble the
|
||||||
|
// libraries from the previous compiler. This step has the same name as
|
||||||
|
// ours (llvm) but we want it for a different target, so we use the
|
||||||
|
// builder-style methods on `Step` to configure this target to the build
|
||||||
|
// triple.
|
||||||
|
//
|
||||||
|
// Finally, to finish off this rule, we define how to actually execute it.
|
||||||
|
// That logic is all defined in the `native` module so we just delegate to
|
||||||
|
// the relevant function there. The argument to the closure passed to `run`
|
||||||
|
// is a `Step` (defined below) which encapsulates information like the
|
||||||
|
// stage, target, host, etc.
|
||||||
|
rules.build("llvm", "src/llvm")
|
||||||
|
.host(true)
|
||||||
|
.dep(move |s| {
|
||||||
|
if s.target == build.config.build {
|
||||||
|
dummy(s, build)
|
||||||
|
} else {
|
||||||
|
s.target(&build.config.build)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.run(move |s| native::llvm(build, s.target));
|
||||||
|
|
||||||
|
// Ok! After that example rule that's hopefully enough to explain what's
|
||||||
|
// going on here. You can check out the API docs below and also see a bunch
|
||||||
|
// more examples of rules directly below as well.
|
||||||
|
|
||||||
// dummy rule to do nothing, useful when a dep maps to no deps
|
// dummy rule to do nothing, useful when a dep maps to no deps
|
||||||
rules.build("dummy", "path/to/nowhere");
|
rules.build("dummy", "path/to/nowhere");
|
||||||
fn dummy<'a>(s: &Step<'a>, build: &'a Build) -> Step<'a> {
|
|
||||||
s.name("dummy").stage(0)
|
// the compiler with no target libraries ready to go
|
||||||
.target(&build.config.build)
|
rules.build("rustc", "src/rustc")
|
||||||
|
.dep(move |s| {
|
||||||
|
if s.stage == 0 {
|
||||||
|
dummy(s, build)
|
||||||
|
} else {
|
||||||
|
s.name("librustc")
|
||||||
.host(&build.config.build)
|
.host(&build.config.build)
|
||||||
|
.stage(s.stage - 1)
|
||||||
}
|
}
|
||||||
|
})
|
||||||
|
.run(move |s| compile::assemble_rustc(build, s.stage, s.target));
|
||||||
|
|
||||||
// Helper for loading an entire DAG of crates, rooted at `name`
|
// Helper for loading an entire DAG of crates, rooted at `name`
|
||||||
let krates = |name: &str| {
|
let krates = |name: &str| {
|
||||||
|
@ -85,21 +150,6 @@ pub fn build_rules(build: &Build) -> Rules {
|
||||||
return ret
|
return ret
|
||||||
};
|
};
|
||||||
|
|
||||||
rules.build("rustc", "path/to/nowhere")
|
|
||||||
.dep(move |s| {
|
|
||||||
if s.stage == 0 {
|
|
||||||
dummy(s, build)
|
|
||||||
} else {
|
|
||||||
s.name("librustc")
|
|
||||||
.host(&build.config.build)
|
|
||||||
.stage(s.stage - 1)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.run(move |s| compile::assemble_rustc(build, s.stage, s.target));
|
|
||||||
rules.build("llvm", "src/llvm")
|
|
||||||
.host(true)
|
|
||||||
.run(move |s| native::llvm(build, s.target));
|
|
||||||
|
|
||||||
// ========================================================================
|
// ========================================================================
|
||||||
// Crate compilations
|
// Crate compilations
|
||||||
//
|
//
|
||||||
|
@ -268,37 +318,55 @@ pub fn build_rules(build: &Build) -> Rules {
|
||||||
rules.test(&krate.test_step, path)
|
rules.test(&krate.test_step, path)
|
||||||
.dep(|s| s.name("libtest"))
|
.dep(|s| s.name("libtest"))
|
||||||
.run(move |s| check::krate(build, &s.compiler(), s.target,
|
.run(move |s| check::krate(build, &s.compiler(), s.target,
|
||||||
Mode::Libstd, Some(&krate.name)));
|
Mode::Libstd, TestKind::Test,
|
||||||
|
Some(&krate.name)));
|
||||||
}
|
}
|
||||||
rules.test("check-std-all", "path/to/nowhere")
|
rules.test("check-std-all", "path/to/nowhere")
|
||||||
.dep(|s| s.name("libtest"))
|
.dep(|s| s.name("libtest"))
|
||||||
.default(true)
|
.default(true)
|
||||||
.run(move |s| check::krate(build, &s.compiler(), s.target, Mode::Libstd,
|
.run(move |s| check::krate(build, &s.compiler(), s.target,
|
||||||
None));
|
Mode::Libstd, TestKind::Test, None));
|
||||||
|
|
||||||
|
// std benchmarks
|
||||||
|
for (krate, path, _default) in krates("std_shim") {
|
||||||
|
rules.bench(&krate.bench_step, path)
|
||||||
|
.dep(|s| s.name("libtest"))
|
||||||
|
.run(move |s| check::krate(build, &s.compiler(), s.target,
|
||||||
|
Mode::Libstd, TestKind::Bench,
|
||||||
|
Some(&krate.name)));
|
||||||
|
}
|
||||||
|
rules.bench("bench-std-all", "path/to/nowhere")
|
||||||
|
.dep(|s| s.name("libtest"))
|
||||||
|
.default(true)
|
||||||
|
.run(move |s| check::krate(build, &s.compiler(), s.target,
|
||||||
|
Mode::Libstd, TestKind::Bench, None));
|
||||||
|
|
||||||
for (krate, path, _default) in krates("test_shim") {
|
for (krate, path, _default) in krates("test_shim") {
|
||||||
rules.test(&krate.test_step, path)
|
rules.test(&krate.test_step, path)
|
||||||
.dep(|s| s.name("libtest"))
|
.dep(|s| s.name("libtest"))
|
||||||
.run(move |s| check::krate(build, &s.compiler(), s.target,
|
.run(move |s| check::krate(build, &s.compiler(), s.target,
|
||||||
Mode::Libtest, Some(&krate.name)));
|
Mode::Libtest, TestKind::Test,
|
||||||
|
Some(&krate.name)));
|
||||||
}
|
}
|
||||||
rules.test("check-test-all", "path/to/nowhere")
|
rules.test("check-test-all", "path/to/nowhere")
|
||||||
.dep(|s| s.name("libtest"))
|
.dep(|s| s.name("libtest"))
|
||||||
.default(true)
|
.default(true)
|
||||||
.run(move |s| check::krate(build, &s.compiler(), s.target, Mode::Libtest,
|
.run(move |s| check::krate(build, &s.compiler(), s.target,
|
||||||
None));
|
Mode::Libtest, TestKind::Test, None));
|
||||||
for (krate, path, _default) in krates("rustc-main") {
|
for (krate, path, _default) in krates("rustc-main") {
|
||||||
rules.test(&krate.test_step, path)
|
rules.test(&krate.test_step, path)
|
||||||
.dep(|s| s.name("librustc"))
|
.dep(|s| s.name("librustc"))
|
||||||
.host(true)
|
.host(true)
|
||||||
.run(move |s| check::krate(build, &s.compiler(), s.target,
|
.run(move |s| check::krate(build, &s.compiler(), s.target,
|
||||||
Mode::Librustc, Some(&krate.name)));
|
Mode::Librustc, TestKind::Test,
|
||||||
|
Some(&krate.name)));
|
||||||
}
|
}
|
||||||
rules.test("check-rustc-all", "path/to/nowhere")
|
rules.test("check-rustc-all", "path/to/nowhere")
|
||||||
.dep(|s| s.name("librustc"))
|
.dep(|s| s.name("librustc"))
|
||||||
.default(true)
|
.default(true)
|
||||||
.host(true)
|
.host(true)
|
||||||
.run(move |s| check::krate(build, &s.compiler(), s.target, Mode::Librustc,
|
.run(move |s| check::krate(build, &s.compiler(), s.target,
|
||||||
None));
|
Mode::Librustc, TestKind::Test, None));
|
||||||
|
|
||||||
rules.test("check-linkchecker", "src/tools/linkchecker")
|
rules.test("check-linkchecker", "src/tools/linkchecker")
|
||||||
.dep(|s| s.name("tool-linkchecker"))
|
.dep(|s| s.name("tool-linkchecker"))
|
||||||
|
@ -312,10 +380,10 @@ pub fn build_rules(build: &Build) -> Rules {
|
||||||
.host(true)
|
.host(true)
|
||||||
.run(move |s| check::cargotest(build, s.stage, s.target));
|
.run(move |s| check::cargotest(build, s.stage, s.target));
|
||||||
rules.test("check-tidy", "src/tools/tidy")
|
rules.test("check-tidy", "src/tools/tidy")
|
||||||
.dep(|s| s.name("tool-tidy"))
|
.dep(|s| s.name("tool-tidy").stage(0))
|
||||||
.default(true)
|
.default(true)
|
||||||
.host(true)
|
.host(true)
|
||||||
.run(move |s| check::tidy(build, s.stage, s.target));
|
.run(move |s| check::tidy(build, 0, s.target));
|
||||||
rules.test("check-error-index", "src/tools/error_index_generator")
|
rules.test("check-error-index", "src/tools/error_index_generator")
|
||||||
.dep(|s| s.name("libstd"))
|
.dep(|s| s.name("libstd"))
|
||||||
.dep(|s| s.name("tool-error-index").host(s.host))
|
.dep(|s| s.name("tool-error-index").host(s.host))
|
||||||
|
@ -327,6 +395,10 @@ pub fn build_rules(build: &Build) -> Rules {
|
||||||
.default(true)
|
.default(true)
|
||||||
.host(true)
|
.host(true)
|
||||||
.run(move |s| check::docs(build, &s.compiler()));
|
.run(move |s| check::docs(build, &s.compiler()));
|
||||||
|
rules.test("check-distcheck", "distcheck")
|
||||||
|
.dep(|s| s.name("dist-src"))
|
||||||
|
.run(move |_| check::distcheck(build));
|
||||||
|
|
||||||
|
|
||||||
rules.build("test-helpers", "src/rt/rust_test_helpers.c")
|
rules.build("test-helpers", "src/rt/rust_test_helpers.c")
|
||||||
.run(move |s| native::test_helpers(build, s.target));
|
.run(move |s| native::test_helpers(build, s.target));
|
||||||
|
@ -427,21 +499,98 @@ pub fn build_rules(build: &Build) -> Rules {
|
||||||
.default(true)
|
.default(true)
|
||||||
.dep(|s| s.name("default:doc"))
|
.dep(|s| s.name("default:doc"))
|
||||||
.run(move |s| dist::docs(build, s.stage, s.target));
|
.run(move |s| dist::docs(build, s.stage, s.target));
|
||||||
|
rules.dist("dist-analysis", "src/libstd")
|
||||||
|
.dep(|s| s.name("dist-std"))
|
||||||
|
.default(true)
|
||||||
|
.run(move |s| dist::analysis(build, &s.compiler(), s.target));
|
||||||
rules.dist("install", "src")
|
rules.dist("install", "src")
|
||||||
.dep(|s| s.name("default:dist"))
|
.dep(|s| s.name("default:dist"))
|
||||||
.run(move |s| install::install(build, s.stage, s.target));
|
.run(move |s| install::install(build, s.stage, s.target));
|
||||||
|
|
||||||
rules.verify();
|
rules.verify();
|
||||||
return rules
|
return rules;
|
||||||
|
|
||||||
|
fn dummy<'a>(s: &Step<'a>, build: &'a Build) -> Step<'a> {
|
||||||
|
s.name("dummy").stage(0)
|
||||||
|
.target(&build.config.build)
|
||||||
|
.host(&build.config.build)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
|
||||||
|
struct Step<'a> {
|
||||||
|
/// Human readable name of the rule this step is executing. Possible names
|
||||||
|
/// are all defined above in `build_rules`.
|
||||||
|
name: &'a str,
|
||||||
|
|
||||||
|
/// The stage this step is executing in. This is typically 0, 1, or 2.
|
||||||
|
stage: u32,
|
||||||
|
|
||||||
|
/// This step will likely involve a compiler, and the target that compiler
|
||||||
|
/// itself is built for is called the host, this variable. Typically this is
|
||||||
|
/// the target of the build machine itself.
|
||||||
|
host: &'a str,
|
||||||
|
|
||||||
|
/// The target that this step represents generating. If you're building a
|
||||||
|
/// standard library for a new suite of targets, for example, this'll be set
|
||||||
|
/// to those targets.
|
||||||
|
target: &'a str,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Step<'a> {
|
||||||
|
/// Creates a new step which is the same as this, except has a new name.
|
||||||
|
fn name(&self, name: &'a str) -> Step<'a> {
|
||||||
|
Step { name: name, ..*self }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new step which is the same as this, except has a new stage.
|
||||||
|
fn stage(&self, stage: u32) -> Step<'a> {
|
||||||
|
Step { stage: stage, ..*self }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new step which is the same as this, except has a new host.
|
||||||
|
fn host(&self, host: &'a str) -> Step<'a> {
|
||||||
|
Step { host: host, ..*self }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new step which is the same as this, except has a new target.
|
||||||
|
fn target(&self, target: &'a str) -> Step<'a> {
|
||||||
|
Step { target: target, ..*self }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the `Compiler` structure that this step corresponds to.
|
||||||
|
fn compiler(&self) -> Compiler<'a> {
|
||||||
|
Compiler::new(self.stage, self.host)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Rule<'a> {
|
struct Rule<'a> {
|
||||||
|
/// The human readable name of this target, defined in `build_rules`.
|
||||||
name: &'a str,
|
name: &'a str,
|
||||||
|
|
||||||
|
/// The path associated with this target, used in the `./x.py` driver for
|
||||||
|
/// easy and ergonomic specification of what to do.
|
||||||
path: &'a str,
|
path: &'a str,
|
||||||
|
|
||||||
|
/// The "kind" of top-level command that this rule is associated with, only
|
||||||
|
/// relevant if this is a default rule.
|
||||||
kind: Kind,
|
kind: Kind,
|
||||||
|
|
||||||
|
/// List of dependencies this rule has. Each dependency is a function from a
|
||||||
|
/// step that's being executed to another step that should be executed.
|
||||||
deps: Vec<Box<Fn(&Step<'a>) -> Step<'a> + 'a>>,
|
deps: Vec<Box<Fn(&Step<'a>) -> Step<'a> + 'a>>,
|
||||||
|
|
||||||
|
/// How to actually execute this rule. Takes a step with contextual
|
||||||
|
/// information and then executes it.
|
||||||
run: Box<Fn(&Step<'a>) + 'a>,
|
run: Box<Fn(&Step<'a>) + 'a>,
|
||||||
|
|
||||||
|
/// Whether or not this is a "default" rule. That basically means that if
|
||||||
|
/// you run, for example, `./x.py test` whether it's included or not.
|
||||||
default: bool,
|
default: bool,
|
||||||
|
|
||||||
|
/// Whether or not this is a "host" rule, or in other words whether this is
|
||||||
|
/// only intended for compiler hosts and not for targets that are being
|
||||||
|
/// generated.
|
||||||
host: bool,
|
host: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -449,6 +598,7 @@ struct Rule<'a> {
|
||||||
enum Kind {
|
enum Kind {
|
||||||
Build,
|
Build,
|
||||||
Test,
|
Test,
|
||||||
|
Bench,
|
||||||
Dist,
|
Dist,
|
||||||
Doc,
|
Doc,
|
||||||
}
|
}
|
||||||
|
@ -467,6 +617,8 @@ impl<'a> Rule<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Builder pattern returned from the various methods on `Rules` which will add
|
||||||
|
/// the rule to the internal list on `Drop`.
|
||||||
struct RuleBuilder<'a: 'b, 'b> {
|
struct RuleBuilder<'a: 'b, 'b> {
|
||||||
rules: &'b mut Rules<'a>,
|
rules: &'b mut Rules<'a>,
|
||||||
rule: Rule<'a>,
|
rule: Rule<'a>,
|
||||||
|
@ -528,21 +680,35 @@ impl<'a> Rules<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Creates a new rule of `Kind::Build` with the specified human readable
|
||||||
|
/// name and path associated with it.
|
||||||
|
///
|
||||||
|
/// The builder returned should be configured further with information such
|
||||||
|
/// as how to actually run this rule.
|
||||||
fn build<'b>(&'b mut self, name: &'a str, path: &'a str)
|
fn build<'b>(&'b mut self, name: &'a str, path: &'a str)
|
||||||
-> RuleBuilder<'a, 'b> {
|
-> RuleBuilder<'a, 'b> {
|
||||||
self.rule(name, path, Kind::Build)
|
self.rule(name, path, Kind::Build)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Same as `build`, but for `Kind::Test`.
|
||||||
fn test<'b>(&'b mut self, name: &'a str, path: &'a str)
|
fn test<'b>(&'b mut self, name: &'a str, path: &'a str)
|
||||||
-> RuleBuilder<'a, 'b> {
|
-> RuleBuilder<'a, 'b> {
|
||||||
self.rule(name, path, Kind::Test)
|
self.rule(name, path, Kind::Test)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Same as `build`, but for `Kind::Bench`.
|
||||||
|
fn bench<'b>(&'b mut self, name: &'a str, path: &'a str)
|
||||||
|
-> RuleBuilder<'a, 'b> {
|
||||||
|
self.rule(name, path, Kind::Bench)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Same as `build`, but for `Kind::Doc`.
|
||||||
fn doc<'b>(&'b mut self, name: &'a str, path: &'a str)
|
fn doc<'b>(&'b mut self, name: &'a str, path: &'a str)
|
||||||
-> RuleBuilder<'a, 'b> {
|
-> RuleBuilder<'a, 'b> {
|
||||||
self.rule(name, path, Kind::Doc)
|
self.rule(name, path, Kind::Doc)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Same as `build`, but for `Kind::Dist`.
|
||||||
fn dist<'b>(&'b mut self, name: &'a str, path: &'a str)
|
fn dist<'b>(&'b mut self, name: &'a str, path: &'a str)
|
||||||
-> RuleBuilder<'a, 'b> {
|
-> RuleBuilder<'a, 'b> {
|
||||||
self.rule(name, path, Kind::Dist)
|
self.rule(name, path, Kind::Dist)
|
||||||
|
@ -583,6 +749,7 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd?
|
||||||
"build" => Kind::Build,
|
"build" => Kind::Build,
|
||||||
"doc" => Kind::Doc,
|
"doc" => Kind::Doc,
|
||||||
"test" => Kind::Test,
|
"test" => Kind::Test,
|
||||||
|
"bench" => Kind::Bench,
|
||||||
"dist" => Kind::Dist,
|
"dist" => Kind::Dist,
|
||||||
_ => return,
|
_ => return,
|
||||||
};
|
};
|
||||||
|
@ -602,10 +769,36 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd?
|
||||||
/// Construct the top-level build steps that we're going to be executing,
|
/// Construct the top-level build steps that we're going to be executing,
|
||||||
/// given the subcommand that our build is performing.
|
/// given the subcommand that our build is performing.
|
||||||
fn plan(&self) -> Vec<Step<'a>> {
|
fn plan(&self) -> Vec<Step<'a>> {
|
||||||
|
// Ok, the logic here is pretty subtle, and involves quite a few
|
||||||
|
// conditionals. The basic idea here is to:
|
||||||
|
//
|
||||||
|
// 1. First, filter all our rules to the relevant ones. This means that
|
||||||
|
// the command specified corresponds to one of our `Kind` variants,
|
||||||
|
// and we filter all rules based on that.
|
||||||
|
//
|
||||||
|
// 2. Next, we determine which rules we're actually executing. If a
|
||||||
|
// number of path filters were specified on the command line we look
|
||||||
|
// for those, otherwise we look for anything tagged `default`.
|
||||||
|
//
|
||||||
|
// 3. Finally, we generate some steps with host and target information.
|
||||||
|
//
|
||||||
|
// The last step is by far the most complicated and subtle. The basic
|
||||||
|
// thinking here is that we want to take the cartesian product of
|
||||||
|
// specified hosts and targets and build rules with that. The list of
|
||||||
|
// hosts and targets, if not specified, come from the how this build was
|
||||||
|
// configured. If the rule we're looking at is a host-only rule the we
|
||||||
|
// ignore the list of targets and instead consider the list of hosts
|
||||||
|
// also the list of targets.
|
||||||
|
//
|
||||||
|
// Once the host and target lists are generated we take the cartesian
|
||||||
|
// product of the two and then create a step based off them. Note that
|
||||||
|
// the stage each step is associated was specified with the `--step`
|
||||||
|
// flag on the command line.
|
||||||
let (kind, paths) = match self.build.flags.cmd {
|
let (kind, paths) = match self.build.flags.cmd {
|
||||||
Subcommand::Build { ref paths } => (Kind::Build, &paths[..]),
|
Subcommand::Build { ref paths } => (Kind::Build, &paths[..]),
|
||||||
Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]),
|
Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]),
|
||||||
Subcommand::Test { ref paths, test_args: _ } => (Kind::Test, &paths[..]),
|
Subcommand::Test { ref paths, test_args: _ } => (Kind::Test, &paths[..]),
|
||||||
|
Subcommand::Bench { ref paths, test_args: _ } => (Kind::Bench, &paths[..]),
|
||||||
Subcommand::Dist { install } => {
|
Subcommand::Dist { install } => {
|
||||||
if install {
|
if install {
|
||||||
return vec![self.sbuild.name("install")]
|
return vec![self.sbuild.name("install")]
|
||||||
|
@ -631,7 +824,18 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd?
|
||||||
} else {
|
} else {
|
||||||
&self.build.config.target
|
&self.build.config.target
|
||||||
};
|
};
|
||||||
let arr = if rule.host {hosts} else {targets};
|
// If --target was specified but --host wasn't specified, don't run
|
||||||
|
// any host-only tests
|
||||||
|
let arr = if rule.host {
|
||||||
|
if self.build.flags.target.len() > 0 &&
|
||||||
|
self.build.flags.host.len() == 0 {
|
||||||
|
&hosts[..0]
|
||||||
|
} else {
|
||||||
|
hosts
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
targets
|
||||||
|
};
|
||||||
|
|
||||||
hosts.iter().flat_map(move |host| {
|
hosts.iter().flat_map(move |host| {
|
||||||
arr.iter().map(move |target| {
|
arr.iter().map(move |target| {
|
||||||
|
@ -672,6 +876,15 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd?
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Performs topological sort of dependencies rooted at the `step`
|
||||||
|
/// specified, pushing all results onto the `order` vector provided.
|
||||||
|
///
|
||||||
|
/// In other words, when this method returns, the `order` vector will
|
||||||
|
/// contain a list of steps which if executed in order will eventually
|
||||||
|
/// complete the `step` specified as well.
|
||||||
|
///
|
||||||
|
/// The `added` set specified here is the set of steps that are already
|
||||||
|
/// present in `order` (and hence don't need to be added again).
|
||||||
fn fill(&self,
|
fn fill(&self,
|
||||||
step: Step<'a>,
|
step: Step<'a>,
|
||||||
order: &mut Vec<Step<'a>>,
|
order: &mut Vec<Step<'a>>,
|
||||||
|
|
|
@ -18,6 +18,7 @@ use std::ffi::OsString;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
use std::time::Instant;
|
||||||
|
|
||||||
use filetime::FileTime;
|
use filetime::FileTime;
|
||||||
|
|
||||||
|
@ -189,3 +190,19 @@ pub fn push_exe_path(mut buf: PathBuf, components: &[&str]) -> PathBuf {
|
||||||
|
|
||||||
buf
|
buf
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct TimeIt(Instant);
|
||||||
|
|
||||||
|
/// Returns an RAII structure that prints out how long it took to drop.
|
||||||
|
pub fn timeit() -> TimeIt {
|
||||||
|
TimeIt(Instant::now())
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for TimeIt {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
let time = self.0.elapsed();
|
||||||
|
println!("\tfinished in {}.{:03}",
|
||||||
|
time.as_secs(),
|
||||||
|
time.subsec_nanos() / 1_000_000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -21,7 +21,8 @@ pub fn run(cmd: &mut Command) {
|
||||||
pub fn run_silent(cmd: &mut Command) {
|
pub fn run_silent(cmd: &mut Command) {
|
||||||
let status = match cmd.status() {
|
let status = match cmd.status() {
|
||||||
Ok(status) => status,
|
Ok(status) => status,
|
||||||
Err(e) => fail(&format!("failed to execute command: {}", e)),
|
Err(e) => fail(&format!("failed to execute command: {:?}\nerror: {}",
|
||||||
|
cmd, e)),
|
||||||
};
|
};
|
||||||
if !status.success() {
|
if !status.success() {
|
||||||
fail(&format!("command did not execute successfully: {:?}\n\
|
fail(&format!("command did not execute successfully: {:?}\n\
|
||||||
|
@ -63,7 +64,8 @@ pub fn cc2ar(cc: &Path, target: &str) -> Option<PathBuf> {
|
||||||
pub fn output(cmd: &mut Command) -> String {
|
pub fn output(cmd: &mut Command) -> String {
|
||||||
let output = match cmd.stderr(Stdio::inherit()).output() {
|
let output = match cmd.stderr(Stdio::inherit()).output() {
|
||||||
Ok(status) => status,
|
Ok(status) => status,
|
||||||
Err(e) => fail(&format!("failed to execute command: {}", e)),
|
Err(e) => fail(&format!("failed to execute command: {:?}\nerror: {}",
|
||||||
|
cmd, e)),
|
||||||
};
|
};
|
||||||
if !output.status.success() {
|
if !output.status.success() {
|
||||||
panic!("command did not execute successfully: {:?}\n\
|
panic!("command did not execute successfully: {:?}\n\
|
||||||
|
|
|
@ -9,7 +9,6 @@ RUN dpkg --add-architecture i386 && \
|
||||||
curl \
|
curl \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
python2.7 \
|
python2.7 \
|
||||||
python-minimal \
|
|
||||||
git \
|
git \
|
||||||
cmake \
|
cmake \
|
||||||
ccache \
|
ccache \
|
||||||
|
@ -39,8 +38,7 @@ ENV RUST_CONFIGURE_ARGS \
|
||||||
--arm-linux-androideabi-ndk=/android/ndk-arm-9 \
|
--arm-linux-androideabi-ndk=/android/ndk-arm-9 \
|
||||||
--armv7-linux-androideabi-ndk=/android/ndk-arm-9 \
|
--armv7-linux-androideabi-ndk=/android/ndk-arm-9 \
|
||||||
--i686-linux-android-ndk=/android/ndk-x86-9 \
|
--i686-linux-android-ndk=/android/ndk-x86-9 \
|
||||||
--aarch64-linux-android-ndk=/android/ndk-aarch64 \
|
--aarch64-linux-android-ndk=/android/ndk-aarch64
|
||||||
--enable-rustbuild
|
ENV XPY_CHECK test --target arm-linux-androideabi
|
||||||
ENV RUST_CHECK_TARGET check-stage2-android
|
|
||||||
RUN mkdir /tmp/obj
|
RUN mkdir /tmp/obj
|
||||||
RUN chmod 777 /tmp/obj
|
RUN chmod 777 /tmp/obj
|
||||||
|
|
|
@ -7,7 +7,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
curl \
|
curl \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
python2.7 \
|
python2.7 \
|
||||||
python-minimal \
|
|
||||||
git \
|
git \
|
||||||
cmake \
|
cmake \
|
||||||
ccache \
|
ccache \
|
||||||
|
|
|
@ -19,17 +19,21 @@ ci_dir="`dirname $docker_dir`"
|
||||||
src_dir="`dirname $ci_dir`"
|
src_dir="`dirname $ci_dir`"
|
||||||
root_dir="`dirname $src_dir`"
|
root_dir="`dirname $src_dir`"
|
||||||
|
|
||||||
docker build \
|
docker \
|
||||||
|
build \
|
||||||
--rm \
|
--rm \
|
||||||
-t rust-ci \
|
-t rust-ci \
|
||||||
"`dirname "$script"`/$image"
|
"`dirname "$script"`/$image"
|
||||||
|
|
||||||
mkdir -p $HOME/.ccache
|
mkdir -p $HOME/.ccache
|
||||||
mkdir -p $HOME/.cargo
|
mkdir -p $HOME/.cargo
|
||||||
|
mkdir -p $root_dir/obj
|
||||||
|
|
||||||
exec docker run \
|
exec docker \
|
||||||
|
run \
|
||||||
--volume "$root_dir:/checkout:ro" \
|
--volume "$root_dir:/checkout:ro" \
|
||||||
--workdir /tmp/obj \
|
--volume "$root_dir/obj:/checkout/obj" \
|
||||||
|
--workdir /checkout/obj \
|
||||||
--env SRC=/checkout \
|
--env SRC=/checkout \
|
||||||
--env CCACHE_DIR=/ccache \
|
--env CCACHE_DIR=/ccache \
|
||||||
--volume "$HOME/.ccache:/ccache" \
|
--volume "$HOME/.ccache:/ccache" \
|
||||||
|
|
|
@ -7,7 +7,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
curl \
|
curl \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
python2.7 \
|
python2.7 \
|
||||||
python-minimal \
|
|
||||||
git \
|
git \
|
||||||
cmake \
|
cmake \
|
||||||
ccache \
|
ccache \
|
||||||
|
@ -23,7 +22,7 @@ ENV \
|
||||||
AR_x86_64_unknown_freebsd=x86_64-unknown-freebsd10-ar \
|
AR_x86_64_unknown_freebsd=x86_64-unknown-freebsd10-ar \
|
||||||
CC_x86_64_unknown_freebsd=x86_64-unknown-freebsd10-gcc
|
CC_x86_64_unknown_freebsd=x86_64-unknown-freebsd10-gcc
|
||||||
|
|
||||||
ENV RUST_CONFIGURE_ARGS --target=x86_64-unknown-freebsd --enable-rustbuild
|
ENV RUST_CONFIGURE_ARGS --target=x86_64-unknown-freebsd
|
||||||
ENV RUST_CHECK_TARGET ""
|
ENV RUST_CHECK_TARGET ""
|
||||||
RUN mkdir /tmp/obj
|
RUN mkdir /tmp/obj
|
||||||
RUN chmod 777 /tmp/obj
|
RUN chmod 777 /tmp/obj
|
||||||
|
|
|
@ -7,14 +7,14 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
curl \
|
curl \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
python2.7 \
|
python2.7 \
|
||||||
python-minimal \
|
|
||||||
git \
|
git \
|
||||||
cmake \
|
cmake \
|
||||||
ccache \
|
ccache \
|
||||||
libssl-dev \
|
libssl-dev \
|
||||||
sudo
|
sudo
|
||||||
|
|
||||||
ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --enable-rustbuild
|
ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu
|
||||||
ENV RUST_CHECK_TARGET check-cargotest
|
ENV RUST_CHECK_TARGET check-cargotest
|
||||||
|
ENV NO_VENDOR 1
|
||||||
RUN mkdir /tmp/obj
|
RUN mkdir /tmp/obj
|
||||||
RUN chmod 777 /tmp/obj
|
RUN chmod 777 /tmp/obj
|
||||||
|
|
|
@ -7,7 +7,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
curl \
|
curl \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
python2.7 \
|
python2.7 \
|
||||||
python2.7-minimal \
|
|
||||||
git \
|
git \
|
||||||
cmake \
|
cmake \
|
||||||
ccache \
|
ccache \
|
||||||
|
@ -19,7 +18,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
|
||||||
ENV RUST_CONFIGURE_ARGS \
|
ENV RUST_CONFIGURE_ARGS \
|
||||||
--build=x86_64-unknown-linux-gnu \
|
--build=x86_64-unknown-linux-gnu \
|
||||||
--enable-rustbuild \
|
|
||||||
--llvm-root=/usr/lib/llvm-3.7
|
--llvm-root=/usr/lib/llvm-3.7
|
||||||
ENV RUST_CHECK_TARGET check
|
ENV RUST_CHECK_TARGET check
|
||||||
RUN mkdir /tmp/obj
|
RUN mkdir /tmp/obj
|
||||||
|
|
|
@ -7,14 +7,13 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
curl \
|
curl \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
python2.7 \
|
python2.7 \
|
||||||
python-minimal \
|
|
||||||
git \
|
git \
|
||||||
cmake \
|
cmake \
|
||||||
ccache \
|
ccache \
|
||||||
sudo \
|
sudo \
|
||||||
gdb
|
gdb
|
||||||
|
|
||||||
ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --enable-rustbuild
|
ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --disable-rustbuild
|
||||||
ENV RUST_CHECK_TARGET check
|
ENV RUST_CHECK_TARGET check
|
||||||
RUN mkdir /tmp/obj
|
RUN mkdir /tmp/obj
|
||||||
RUN chmod 777 /tmp/obj
|
RUN chmod 777 /tmp/obj
|
|
@ -20,8 +20,10 @@ RUN sh /build/build-musl.sh && rm -rf /build
|
||||||
|
|
||||||
ENV RUST_CONFIGURE_ARGS \
|
ENV RUST_CONFIGURE_ARGS \
|
||||||
--target=x86_64-unknown-linux-musl \
|
--target=x86_64-unknown-linux-musl \
|
||||||
--musl-root=/musl-x86_64
|
--musl-root-x86_64=/musl-x86_64
|
||||||
ENV RUST_CHECK_TARGET check-stage2-T-x86_64-unknown-linux-musl-H-x86_64-unknown-linux-gnu
|
ENV RUST_CHECK_TARGET check-stage2-T-x86_64-unknown-linux-musl-H-x86_64-unknown-linux-gnu
|
||||||
|
ENV PATH=$PATH:/musl-x86_64/bin
|
||||||
|
ENV XPY_CHECK test --target x86_64-unknown-linux-musl
|
||||||
|
|
||||||
RUN mkdir /tmp/obj
|
RUN mkdir /tmp/obj
|
||||||
RUN chmod 777 /tmp/obj
|
RUN chmod 777 /tmp/obj
|
||||||
|
|
|
@ -14,12 +14,20 @@ set -e
|
||||||
if [ "$LOCAL_USER_ID" != "" ]; then
|
if [ "$LOCAL_USER_ID" != "" ]; then
|
||||||
useradd --shell /bin/bash -u $LOCAL_USER_ID -o -c "" -m user
|
useradd --shell /bin/bash -u $LOCAL_USER_ID -o -c "" -m user
|
||||||
export HOME=/home/user
|
export HOME=/home/user
|
||||||
export LOCAL_USER_ID=
|
unset LOCAL_USER_ID
|
||||||
exec sudo -E -u user env PATH=$PATH "$0"
|
exec su --preserve-environment -c "env PATH=$PATH \"$0\"" user
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$NO_LLVM_ASSERTIONS" = "" ]; then
|
if [ "$NO_LLVM_ASSERTIONS" = "" ]; then
|
||||||
LLVM_ASSERTIONS=--enable-llvm-assertions
|
ENABLE_LLVM_ASSERTIONS=--enable-llvm-assertions
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$NO_VENDOR" = "" ]; then
|
||||||
|
ENABLE_VENDOR=--enable-vendor
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$NO_CCACHE" = "" ]; then
|
||||||
|
ENABLE_CCACHE=--enable-ccache
|
||||||
fi
|
fi
|
||||||
|
|
||||||
set -ex
|
set -ex
|
||||||
|
@ -28,9 +36,9 @@ $SRC/configure \
|
||||||
--disable-manage-submodules \
|
--disable-manage-submodules \
|
||||||
--enable-debug-assertions \
|
--enable-debug-assertions \
|
||||||
--enable-quiet-tests \
|
--enable-quiet-tests \
|
||||||
--enable-ccache \
|
$ENABLE_CCACHE \
|
||||||
--enable-vendor \
|
$ENABLE_VENDOR \
|
||||||
$LLVM_ASSERTIONS \
|
$ENABLE_LLVM_ASSERTIONS \
|
||||||
$RUST_CONFIGURE_ARGS
|
$RUST_CONFIGURE_ARGS
|
||||||
|
|
||||||
if [ "$TRAVIS_OS_NAME" = "osx" ]; then
|
if [ "$TRAVIS_OS_NAME" = "osx" ]; then
|
||||||
|
@ -41,4 +49,8 @@ fi
|
||||||
|
|
||||||
make -j $ncpus tidy
|
make -j $ncpus tidy
|
||||||
make -j $ncpus
|
make -j $ncpus
|
||||||
|
if [ ! -z "$XPY_CHECK" ]; then
|
||||||
|
exec python2.7 $SRC/x.py $XPY_CHECK
|
||||||
|
else
|
||||||
exec make $RUST_CHECK_TARGET -j $ncpus
|
exec make $RUST_CHECK_TARGET -j $ncpus
|
||||||
|
fi
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 3bc0272cab9fdcfc2ef4df9625ec3c9d5909db79
|
Subproject commit a8fc4c169fac43a5dc204d4fd56ddb1739f8c178
|
|
@ -662,26 +662,31 @@ attribute turns off Rust's name mangling, so that it is easier to link to.
|
||||||
|
|
||||||
It’s important to be mindful of `panic!`s when working with FFI. A `panic!`
|
It’s important to be mindful of `panic!`s when working with FFI. A `panic!`
|
||||||
across an FFI boundary is undefined behavior. If you’re writing code that may
|
across an FFI boundary is undefined behavior. If you’re writing code that may
|
||||||
panic, you should run it in another thread, so that the panic doesn’t bubble up
|
panic, you should run it in a closure with [`catch_unwind()`]:
|
||||||
to C:
|
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
use std::thread;
|
use std::panic::catch_unwind;
|
||||||
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub extern fn oh_no() -> i32 {
|
pub extern fn oh_no() -> i32 {
|
||||||
let h = thread::spawn(|| {
|
let result = catch_unwind(|| {
|
||||||
panic!("Oops!");
|
panic!("Oops!");
|
||||||
});
|
});
|
||||||
|
match result {
|
||||||
|
Ok(_) => 0,
|
||||||
|
Err(_) => 1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
match h.join() {
|
fn main() {}
|
||||||
Ok(_) => 1,
|
|
||||||
Err(_) => 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
# fn main() {}
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Please note that [`catch_unwind()`] will only catch unwinding panics, not
|
||||||
|
those who abort the process. See the documentation of [`catch_unwind()`]
|
||||||
|
for more information.
|
||||||
|
|
||||||
|
[`catch_unwind()`]: https://doc.rust-lang.org/std/panic/fn.catch_unwind.html
|
||||||
|
|
||||||
# Representing opaque structs
|
# Representing opaque structs
|
||||||
|
|
||||||
Sometimes, a C library wants to provide a pointer to something, but not let you
|
Sometimes, a C library wants to provide a pointer to something, but not let you
|
||||||
|
|
|
@ -589,11 +589,11 @@ please see the [Documentation chapter](documentation.html).
|
||||||
|
|
||||||
# Testing and concurrency
|
# Testing and concurrency
|
||||||
|
|
||||||
One thing that is important to note when writing tests are run concurrently
|
One thing that is important to note when writing tests is that they may be run
|
||||||
using threads. For this reason you should take care that your tests are written
|
concurrently using threads. For this reason you should take care that your tests
|
||||||
in such a way as to not depend on each-other, or on any shared state. "Shared
|
are written in such a way as to not depend on each-other, or on any shared
|
||||||
state" can also include the environment, such as the current working directory,
|
state. "Shared state" can also include the environment, such as the current
|
||||||
or environment variables.
|
working directory, or environment variables.
|
||||||
|
|
||||||
If this is an issue it is possible to control this concurrency, either by
|
If this is an issue it is possible to control this concurrency, either by
|
||||||
setting the environment variable `RUST_TEST_THREADS`, or by passing the argument
|
setting the environment variable `RUST_TEST_THREADS`, or by passing the argument
|
||||||
|
|
|
@ -47,7 +47,7 @@ let x: i32 = 5;
|
||||||
```
|
```
|
||||||
|
|
||||||
If I asked you to read this out loud to the rest of the class, you’d say “`x`
|
If I asked you to read this out loud to the rest of the class, you’d say “`x`
|
||||||
is a binding with the type `i32` and the value `five`.”
|
is a binding with the type `i32` and the value `5`.”
|
||||||
|
|
||||||
In this case we chose to represent `x` as a 32-bit signed integer. Rust has
|
In this case we chose to represent `x` as a 32-bit signed integer. Rust has
|
||||||
many different primitive integer types. They begin with `i` for signed integers
|
many different primitive integer types. They begin with `i` for signed integers
|
||||||
|
|
|
@ -17,7 +17,7 @@ the language.
|
||||||
|
|
||||||
[**The Rust Reference**][ref]. While Rust does not have a
|
[**The Rust Reference**][ref]. While Rust does not have a
|
||||||
specification, the reference tries to describe its working in
|
specification, the reference tries to describe its working in
|
||||||
detail. It tends to be out of date.
|
detail. It is accurate, but not necessarily complete.
|
||||||
|
|
||||||
[**Standard Library API Reference**][api]. Documentation for the
|
[**Standard Library API Reference**][api]. Documentation for the
|
||||||
standard library.
|
standard library.
|
||||||
|
|
|
@ -603,7 +603,8 @@ syntax named by _designator_. Valid designators are:
|
||||||
* `ty`: a [type](#types)
|
* `ty`: a [type](#types)
|
||||||
* `ident`: an [identifier](#identifiers)
|
* `ident`: an [identifier](#identifiers)
|
||||||
* `path`: a [path](#paths)
|
* `path`: a [path](#paths)
|
||||||
* `tt`: either side of the `=>` in macro rules
|
* `tt`: a token tree (a single [token](#tokens) or a sequence of token trees surrounded
|
||||||
|
by matching `()`, `[]`, or `{}`)
|
||||||
* `meta`: the contents of an [attribute](#attributes)
|
* `meta`: the contents of an [attribute](#attributes)
|
||||||
|
|
||||||
In the transcriber, the
|
In the transcriber, the
|
||||||
|
@ -740,13 +741,14 @@ There are several kinds of item:
|
||||||
* [`extern crate` declarations](#extern-crate-declarations)
|
* [`extern crate` declarations](#extern-crate-declarations)
|
||||||
* [`use` declarations](#use-declarations)
|
* [`use` declarations](#use-declarations)
|
||||||
* [modules](#modules)
|
* [modules](#modules)
|
||||||
* [functions](#functions)
|
* [function definitions](#functions)
|
||||||
|
* [`extern` blocks](#external-blocks)
|
||||||
* [type definitions](grammar.html#type-definitions)
|
* [type definitions](grammar.html#type-definitions)
|
||||||
* [structs](#structs)
|
* [struct definitions](#structs)
|
||||||
* [enumerations](#enumerations)
|
* [enumeration definitions](#enumerations)
|
||||||
* [constant items](#constant-items)
|
* [constant items](#constant-items)
|
||||||
* [static items](#static-items)
|
* [static items](#static-items)
|
||||||
* [traits](#traits)
|
* [trait definitions](#traits)
|
||||||
* [implementations](#implementations)
|
* [implementations](#implementations)
|
||||||
|
|
||||||
Some items form an implicit scope for the declaration of sub-items. In other
|
Some items form an implicit scope for the declaration of sub-items. In other
|
||||||
|
@ -2462,11 +2464,6 @@ The currently implemented features of the reference compiler are:
|
||||||
* `unboxed_closures` - Rust's new closure design, which is currently a work in
|
* `unboxed_closures` - Rust's new closure design, which is currently a work in
|
||||||
progress feature with many known bugs.
|
progress feature with many known bugs.
|
||||||
|
|
||||||
* `unmarked_api` - Allows use of items within a `#![staged_api]` crate
|
|
||||||
which have not been marked with a stability marker.
|
|
||||||
Such items should not be allowed by the compiler to exist,
|
|
||||||
so if you need this there probably is a compiler bug.
|
|
||||||
|
|
||||||
* `allow_internal_unstable` - Allows `macro_rules!` macros to be tagged with the
|
* `allow_internal_unstable` - Allows `macro_rules!` macros to be tagged with the
|
||||||
`#[allow_internal_unstable]` attribute, designed
|
`#[allow_internal_unstable]` attribute, designed
|
||||||
to allow `std` macros to call
|
to allow `std` macros to call
|
||||||
|
|
|
@ -37,8 +37,6 @@ TEMPLATE = """// Copyright {year} The Rust Project Developers. See the COPYRIGHT
|
||||||
|
|
||||||
// This file was auto-generated using 'src/etc/generate-deriving-span-tests.py'
|
// This file was auto-generated using 'src/etc/generate-deriving-span-tests.py'
|
||||||
|
|
||||||
extern crate rand;
|
|
||||||
|
|
||||||
{error_deriving}
|
{error_deriving}
|
||||||
struct Error;
|
struct Error;
|
||||||
{code}
|
{code}
|
||||||
|
@ -106,7 +104,6 @@ STRUCT = 2
|
||||||
ALL = STRUCT | ENUM
|
ALL = STRUCT | ENUM
|
||||||
|
|
||||||
traits = {
|
traits = {
|
||||||
'Zero': (STRUCT, [], 1),
|
|
||||||
'Default': (STRUCT, [], 1),
|
'Default': (STRUCT, [], 1),
|
||||||
'FromPrimitive': (0, [], 0), # only works for C-like enums
|
'FromPrimitive': (0, [], 0), # only works for C-like enums
|
||||||
|
|
||||||
|
@ -116,7 +113,7 @@ traits = {
|
||||||
|
|
||||||
for (trait, supers, errs) in [('Clone', [], 1),
|
for (trait, supers, errs) in [('Clone', [], 1),
|
||||||
('PartialEq', [], 2),
|
('PartialEq', [], 2),
|
||||||
('PartialOrd', ['PartialEq'], 8),
|
('PartialOrd', ['PartialEq'], 9),
|
||||||
('Eq', ['PartialEq'], 1),
|
('Eq', ['PartialEq'], 1),
|
||||||
('Ord', ['Eq', 'PartialOrd', 'PartialEq'], 1),
|
('Ord', ['Eq', 'PartialOrd', 'PartialEq'], 1),
|
||||||
('Debug', [], 1),
|
('Debug', [], 1),
|
||||||
|
|
|
@ -23,7 +23,6 @@ use std::fs::File;
|
||||||
use std::io::{BufRead, Read};
|
use std::io::{BufRead, Read};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use syntax::parse;
|
|
||||||
use syntax::parse::lexer;
|
use syntax::parse::lexer;
|
||||||
use rustc::dep_graph::DepGraph;
|
use rustc::dep_graph::DepGraph;
|
||||||
use rustc::session::{self, config};
|
use rustc::session::{self, config};
|
||||||
|
@ -31,15 +30,16 @@ use rustc::middle::cstore::DummyCrateStore;
|
||||||
|
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ast::Name;
|
|
||||||
use syntax::codemap;
|
use syntax::codemap;
|
||||||
use syntax::parse::token::{self, BinOpToken, DelimToken, Lit, Token};
|
use syntax::parse::token::{self, BinOpToken, DelimToken, Lit, Token};
|
||||||
use syntax::parse::lexer::TokenAndSpan;
|
use syntax::parse::lexer::TokenAndSpan;
|
||||||
use syntax_pos::Pos;
|
use syntax_pos::Pos;
|
||||||
|
|
||||||
|
use syntax::symbol::{Symbol, keywords};
|
||||||
|
|
||||||
fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
|
fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
|
||||||
fn id() -> token::Token {
|
fn id() -> token::Token {
|
||||||
Token::Ident(ast::Ident::with_empty_ctxt(Name(0)))
|
Token::Ident(ast::Ident::with_empty_ctxt(keywords::Invalid.name()))
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut res = HashMap::new();
|
let mut res = HashMap::new();
|
||||||
|
@ -65,7 +65,7 @@ fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
|
||||||
"SHL" => Token::BinOp(BinOpToken::Shl),
|
"SHL" => Token::BinOp(BinOpToken::Shl),
|
||||||
"LBRACE" => Token::OpenDelim(DelimToken::Brace),
|
"LBRACE" => Token::OpenDelim(DelimToken::Brace),
|
||||||
"RARROW" => Token::RArrow,
|
"RARROW" => Token::RArrow,
|
||||||
"LIT_STR" => Token::Literal(Lit::Str_(Name(0)), None),
|
"LIT_STR" => Token::Literal(Lit::Str_(keywords::Invalid.name()), None),
|
||||||
"DOTDOT" => Token::DotDot,
|
"DOTDOT" => Token::DotDot,
|
||||||
"MOD_SEP" => Token::ModSep,
|
"MOD_SEP" => Token::ModSep,
|
||||||
"DOTDOTDOT" => Token::DotDotDot,
|
"DOTDOTDOT" => Token::DotDotDot,
|
||||||
|
@ -75,21 +75,22 @@ fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
|
||||||
"ANDAND" => Token::AndAnd,
|
"ANDAND" => Token::AndAnd,
|
||||||
"AT" => Token::At,
|
"AT" => Token::At,
|
||||||
"LBRACKET" => Token::OpenDelim(DelimToken::Bracket),
|
"LBRACKET" => Token::OpenDelim(DelimToken::Bracket),
|
||||||
"LIT_STR_RAW" => Token::Literal(Lit::StrRaw(Name(0), 0), None),
|
"LIT_STR_RAW" => Token::Literal(Lit::StrRaw(keywords::Invalid.name(), 0), None),
|
||||||
"RPAREN" => Token::CloseDelim(DelimToken::Paren),
|
"RPAREN" => Token::CloseDelim(DelimToken::Paren),
|
||||||
"SLASH" => Token::BinOp(BinOpToken::Slash),
|
"SLASH" => Token::BinOp(BinOpToken::Slash),
|
||||||
"COMMA" => Token::Comma,
|
"COMMA" => Token::Comma,
|
||||||
"LIFETIME" => Token::Lifetime(ast::Ident::with_empty_ctxt(Name(0))),
|
"LIFETIME" => Token::Lifetime(
|
||||||
|
ast::Ident::with_empty_ctxt(keywords::Invalid.name())),
|
||||||
"CARET" => Token::BinOp(BinOpToken::Caret),
|
"CARET" => Token::BinOp(BinOpToken::Caret),
|
||||||
"TILDE" => Token::Tilde,
|
"TILDE" => Token::Tilde,
|
||||||
"IDENT" => id(),
|
"IDENT" => id(),
|
||||||
"PLUS" => Token::BinOp(BinOpToken::Plus),
|
"PLUS" => Token::BinOp(BinOpToken::Plus),
|
||||||
"LIT_CHAR" => Token::Literal(Lit::Char(Name(0)), None),
|
"LIT_CHAR" => Token::Literal(Lit::Char(keywords::Invalid.name()), None),
|
||||||
"LIT_BYTE" => Token::Literal(Lit::Byte(Name(0)), None),
|
"LIT_BYTE" => Token::Literal(Lit::Byte(keywords::Invalid.name()), None),
|
||||||
"EQ" => Token::Eq,
|
"EQ" => Token::Eq,
|
||||||
"RBRACKET" => Token::CloseDelim(DelimToken::Bracket),
|
"RBRACKET" => Token::CloseDelim(DelimToken::Bracket),
|
||||||
"COMMENT" => Token::Comment,
|
"COMMENT" => Token::Comment,
|
||||||
"DOC_COMMENT" => Token::DocComment(Name(0)),
|
"DOC_COMMENT" => Token::DocComment(keywords::Invalid.name()),
|
||||||
"DOT" => Token::Dot,
|
"DOT" => Token::Dot,
|
||||||
"EQEQ" => Token::EqEq,
|
"EQEQ" => Token::EqEq,
|
||||||
"NE" => Token::Ne,
|
"NE" => Token::Ne,
|
||||||
|
@ -99,9 +100,9 @@ fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
|
||||||
"BINOP" => Token::BinOp(BinOpToken::Plus),
|
"BINOP" => Token::BinOp(BinOpToken::Plus),
|
||||||
"POUND" => Token::Pound,
|
"POUND" => Token::Pound,
|
||||||
"OROR" => Token::OrOr,
|
"OROR" => Token::OrOr,
|
||||||
"LIT_INTEGER" => Token::Literal(Lit::Integer(Name(0)), None),
|
"LIT_INTEGER" => Token::Literal(Lit::Integer(keywords::Invalid.name()), None),
|
||||||
"BINOPEQ" => Token::BinOpEq(BinOpToken::Plus),
|
"BINOPEQ" => Token::BinOpEq(BinOpToken::Plus),
|
||||||
"LIT_FLOAT" => Token::Literal(Lit::Float(Name(0)), None),
|
"LIT_FLOAT" => Token::Literal(Lit::Float(keywords::Invalid.name()), None),
|
||||||
"WHITESPACE" => Token::Whitespace,
|
"WHITESPACE" => Token::Whitespace,
|
||||||
"UNDERSCORE" => Token::Underscore,
|
"UNDERSCORE" => Token::Underscore,
|
||||||
"MINUS" => Token::BinOp(BinOpToken::Minus),
|
"MINUS" => Token::BinOp(BinOpToken::Minus),
|
||||||
|
@ -111,10 +112,11 @@ fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
|
||||||
"OR" => Token::BinOp(BinOpToken::Or),
|
"OR" => Token::BinOp(BinOpToken::Or),
|
||||||
"GT" => Token::Gt,
|
"GT" => Token::Gt,
|
||||||
"LE" => Token::Le,
|
"LE" => Token::Le,
|
||||||
"LIT_BINARY" => Token::Literal(Lit::ByteStr(Name(0)), None),
|
"LIT_BINARY" => Token::Literal(Lit::ByteStr(keywords::Invalid.name()), None),
|
||||||
"LIT_BINARY_RAW" => Token::Literal(Lit::ByteStrRaw(Name(0), 0), None),
|
"LIT_BINARY_RAW" => Token::Literal(
|
||||||
|
Lit::ByteStrRaw(keywords::Invalid.name(), 0), None),
|
||||||
"QUESTION" => Token::Question,
|
"QUESTION" => Token::Question,
|
||||||
"SHEBANG" => Token::Shebang(Name(0)),
|
"SHEBANG" => Token::Shebang(keywords::Invalid.name()),
|
||||||
_ => continue,
|
_ => continue,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -158,7 +160,7 @@ fn fix(mut lit: &str) -> ast::Name {
|
||||||
let leading_hashes = count(lit);
|
let leading_hashes = count(lit);
|
||||||
|
|
||||||
// +1/-1 to adjust for single quotes
|
// +1/-1 to adjust for single quotes
|
||||||
parse::token::intern(&lit[leading_hashes + 1..lit.len() - leading_hashes - 1])
|
Symbol::intern(&lit[leading_hashes + 1..lit.len() - leading_hashes - 1])
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Assuming a char/byte literal, strip the 'b' prefix and the single quotes.
|
/// Assuming a char/byte literal, strip the 'b' prefix and the single quotes.
|
||||||
|
@ -168,7 +170,7 @@ fn fixchar(mut lit: &str) -> ast::Name {
|
||||||
lit = &lit[1..];
|
lit = &lit[1..];
|
||||||
}
|
}
|
||||||
|
|
||||||
parse::token::intern(&lit[1..lit.len() - 1])
|
Symbol::intern(&lit[1..lit.len() - 1])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn count(lit: &str) -> usize {
|
fn count(lit: &str) -> usize {
|
||||||
|
@ -196,7 +198,7 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, token::Token>, surrogate_
|
||||||
let not_found = format!("didn't find token {:?} in the map", toknum);
|
let not_found = format!("didn't find token {:?} in the map", toknum);
|
||||||
let proto_tok = tokens.get(toknum).expect(¬_found[..]);
|
let proto_tok = tokens.get(toknum).expect(¬_found[..]);
|
||||||
|
|
||||||
let nm = parse::token::intern(content);
|
let nm = Symbol::intern(content);
|
||||||
|
|
||||||
debug!("What we got: content (`{}`), proto: {:?}", content, proto_tok);
|
debug!("What we got: content (`{}`), proto: {:?}", content, proto_tok);
|
||||||
|
|
||||||
|
|
|
@ -524,6 +524,9 @@ impl<I: Iterator + ?Sized> Iterator for Box<I> {
|
||||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
(**self).size_hint()
|
(**self).size_hint()
|
||||||
}
|
}
|
||||||
|
fn nth(&mut self, n: usize) -> Option<I::Item> {
|
||||||
|
(**self).nth(n)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
impl<I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for Box<I> {
|
impl<I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for Box<I> {
|
||||||
|
@ -532,7 +535,14 @@ impl<I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for Box<I> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
impl<I: ExactSizeIterator + ?Sized> ExactSizeIterator for Box<I> {}
|
impl<I: ExactSizeIterator + ?Sized> ExactSizeIterator for Box<I> {
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
(**self).len()
|
||||||
|
}
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
(**self).is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[unstable(feature = "fused", issue = "35602")]
|
#[unstable(feature = "fused", issue = "35602")]
|
||||||
impl<I: FusedIterator + ?Sized> FusedIterator for Box<I> {}
|
impl<I: FusedIterator + ?Sized> FusedIterator for Box<I> {}
|
||||||
|
|
|
@ -74,11 +74,13 @@
|
||||||
|
|
||||||
#![feature(allocator)]
|
#![feature(allocator)]
|
||||||
#![feature(box_syntax)]
|
#![feature(box_syntax)]
|
||||||
|
#![feature(cfg_target_has_atomic)]
|
||||||
#![feature(coerce_unsized)]
|
#![feature(coerce_unsized)]
|
||||||
#![feature(const_fn)]
|
#![feature(const_fn)]
|
||||||
#![feature(core_intrinsics)]
|
#![feature(core_intrinsics)]
|
||||||
#![feature(custom_attribute)]
|
#![feature(custom_attribute)]
|
||||||
#![feature(dropck_parametricity)]
|
#![feature(dropck_parametricity)]
|
||||||
|
#![cfg_attr(not(test), feature(exact_size_is_empty))]
|
||||||
#![feature(fundamental)]
|
#![feature(fundamental)]
|
||||||
#![feature(lang_items)]
|
#![feature(lang_items)]
|
||||||
#![feature(needs_allocator)]
|
#![feature(needs_allocator)]
|
||||||
|
@ -121,6 +123,7 @@ mod boxed {
|
||||||
}
|
}
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod boxed_test;
|
mod boxed_test;
|
||||||
|
#[cfg(target_has_atomic = "ptr")]
|
||||||
pub mod arc;
|
pub mod arc;
|
||||||
pub mod rc;
|
pub mod rc;
|
||||||
pub mod raw_vec;
|
pub mod raw_vec;
|
||||||
|
|
|
@ -8,12 +8,10 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use core::sync::atomic::{AtomicPtr, Ordering};
|
#[cfg(target_has_atomic = "ptr")]
|
||||||
use core::mem;
|
pub use self::imp::set_oom_handler;
|
||||||
use core::intrinsics;
|
use core::intrinsics;
|
||||||
|
|
||||||
static OOM_HANDLER: AtomicPtr<()> = AtomicPtr::new(default_oom_handler as *mut ());
|
|
||||||
|
|
||||||
fn default_oom_handler() -> ! {
|
fn default_oom_handler() -> ! {
|
||||||
// The default handler can't do much more since we can't assume the presence
|
// The default handler can't do much more since we can't assume the presence
|
||||||
// of libc or any way of printing an error message.
|
// of libc or any way of printing an error message.
|
||||||
|
@ -25,6 +23,18 @@ fn default_oom_handler() -> ! {
|
||||||
#[inline(never)]
|
#[inline(never)]
|
||||||
#[unstable(feature = "oom", reason = "not a scrutinized interface",
|
#[unstable(feature = "oom", reason = "not a scrutinized interface",
|
||||||
issue = "27700")]
|
issue = "27700")]
|
||||||
|
pub fn oom() -> ! {
|
||||||
|
self::imp::oom()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(target_has_atomic = "ptr")]
|
||||||
|
mod imp {
|
||||||
|
use core::mem;
|
||||||
|
use core::sync::atomic::{AtomicPtr, Ordering};
|
||||||
|
|
||||||
|
static OOM_HANDLER: AtomicPtr<()> = AtomicPtr::new(super::default_oom_handler as *mut ());
|
||||||
|
|
||||||
|
#[inline(always)]
|
||||||
pub fn oom() -> ! {
|
pub fn oom() -> ! {
|
||||||
let value = OOM_HANDLER.load(Ordering::SeqCst);
|
let value = OOM_HANDLER.load(Ordering::SeqCst);
|
||||||
let handler: fn() -> ! = unsafe { mem::transmute(value) };
|
let handler: fn() -> ! = unsafe { mem::transmute(value) };
|
||||||
|
@ -40,3 +50,12 @@ pub fn oom() -> ! {
|
||||||
pub fn set_oom_handler(handler: fn() -> !) {
|
pub fn set_oom_handler(handler: fn() -> !) {
|
||||||
OOM_HANDLER.store(handler as *mut (), Ordering::SeqCst);
|
OOM_HANDLER.store(handler as *mut (), Ordering::SeqCst);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(target_has_atomic = "ptr"))]
|
||||||
|
mod imp {
|
||||||
|
#[inline(always)]
|
||||||
|
pub fn oom() -> ! {
|
||||||
|
super::default_oom_handler()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -12,35 +12,35 @@
|
||||||
|
|
||||||
//! Single-threaded reference-counting pointers.
|
//! Single-threaded reference-counting pointers.
|
||||||
//!
|
//!
|
||||||
//! The type [`Rc<T>`][rc] provides shared ownership of a value of type `T`,
|
//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
|
||||||
//! allocated in the heap. Invoking [`clone`][clone] on `Rc` produces a new
|
//! allocated in the heap. Invoking [`clone()`][clone] on [`Rc`] produces a new
|
||||||
//! pointer to the same value in the heap. When the last `Rc` pointer to a
|
//! pointer to the same value in the heap. When the last [`Rc`] pointer to a
|
||||||
//! given value is destroyed, the pointed-to value is also destroyed.
|
//! given value is destroyed, the pointed-to value is also destroyed.
|
||||||
//!
|
//!
|
||||||
//! Shared references in Rust disallow mutation by default, and `Rc` is no
|
//! Shared references in Rust disallow mutation by default, and `Rc` is no
|
||||||
//! exception. If you need to mutate through an `Rc`, use [`Cell`][cell] or
|
//! exception. If you need to mutate through an [`Rc`], use [`Cell`] or
|
||||||
//! [`RefCell`][refcell].
|
//! [`RefCell`].
|
||||||
//!
|
//!
|
||||||
//! `Rc` uses non-atomic reference counting. This means that overhead is very
|
//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
|
||||||
//! low, but an `Rc` cannot be sent between threads, and consequently `Rc`
|
//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
|
||||||
//! does not implement [`Send`][send]. As a result, the Rust compiler
|
//! does not implement [`Send`][send]. As a result, the Rust compiler
|
||||||
//! will check *at compile time* that you are not sending `Rc`s between
|
//! will check *at compile time* that you are not sending [`Rc`]s between
|
||||||
//! threads. If you need multi-threaded, atomic reference counting, use
|
//! threads. If you need multi-threaded, atomic reference counting, use
|
||||||
//! [`sync::Arc`][arc].
|
//! [`sync::Arc`][arc].
|
||||||
//!
|
//!
|
||||||
//! The [`downgrade`][downgrade] method can be used to create a non-owning
|
//! The [`downgrade()`][downgrade] method can be used to create a non-owning
|
||||||
//! [`Weak`][weak] pointer. A `Weak` pointer can be [`upgrade`][upgrade]d
|
//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
|
||||||
//! to an `Rc`, but this will return [`None`][option] if the value has
|
//! to an [`Rc`], but this will return [`None`] if the value has
|
||||||
//! already been dropped.
|
//! already been dropped.
|
||||||
//!
|
//!
|
||||||
//! A cycle between `Rc` pointers will never be deallocated. For this reason,
|
//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
|
||||||
//! `Weak` is used to break cycles. For example, a tree could have strong
|
//! [`Weak`] is used to break cycles. For example, a tree could have strong
|
||||||
//! `Rc` pointers from parent nodes to children, and `Weak` pointers from
|
//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
|
||||||
//! children back to their parents.
|
//! children back to their parents.
|
||||||
//!
|
//!
|
||||||
//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`][deref] trait),
|
//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
|
||||||
//! so you can call `T`'s methods on a value of type `Rc<T>`. To avoid name
|
//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
|
||||||
//! clashes with `T`'s methods, the methods of `Rc<T>` itself are [associated
|
//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are [associated
|
||||||
//! functions][assoc], called using function-like syntax:
|
//! functions][assoc], called using function-like syntax:
|
||||||
//!
|
//!
|
||||||
//! ```
|
//! ```
|
||||||
|
@ -50,28 +50,15 @@
|
||||||
//! Rc::downgrade(&my_rc);
|
//! Rc::downgrade(&my_rc);
|
||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
//! `Weak<T>` does not auto-dereference to `T`, because the value may have
|
//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the value may have
|
||||||
//! already been destroyed.
|
//! already been destroyed.
|
||||||
//!
|
//!
|
||||||
//! [rc]: struct.Rc.html
|
|
||||||
//! [weak]: struct.Weak.html
|
|
||||||
//! [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
|
|
||||||
//! [cell]: ../../std/cell/struct.Cell.html
|
|
||||||
//! [refcell]: ../../std/cell/struct.RefCell.html
|
|
||||||
//! [send]: ../../std/marker/trait.Send.html
|
|
||||||
//! [arc]: ../../std/sync/struct.Arc.html
|
|
||||||
//! [deref]: ../../std/ops/trait.Deref.html
|
|
||||||
//! [downgrade]: struct.Rc.html#method.downgrade
|
|
||||||
//! [upgrade]: struct.Weak.html#method.upgrade
|
|
||||||
//! [option]: ../../std/option/enum.Option.html
|
|
||||||
//! [assoc]: ../../book/method-syntax.html#associated-functions
|
|
||||||
//!
|
|
||||||
//! # Examples
|
//! # Examples
|
||||||
//!
|
//!
|
||||||
//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
|
//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
|
||||||
//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
|
//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
|
||||||
//! unique ownership, because more than one gadget may belong to the same
|
//! unique ownership, because more than one gadget may belong to the same
|
||||||
//! `Owner`. `Rc` allows us to share an `Owner` between multiple `Gadget`s,
|
//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
|
||||||
//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
|
//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
|
||||||
//!
|
//!
|
||||||
//! ```
|
//! ```
|
||||||
|
@ -127,20 +114,20 @@
|
||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
//! If our requirements change, and we also need to be able to traverse from
|
//! If our requirements change, and we also need to be able to traverse from
|
||||||
//! `Owner` to `Gadget`, we will run into problems. An `Rc` pointer from `Owner`
|
//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
|
||||||
//! to `Gadget` introduces a cycle between the values. This means that their
|
//! to `Gadget` introduces a cycle between the values. This means that their
|
||||||
//! reference counts can never reach 0, and the values will remain allocated
|
//! reference counts can never reach 0, and the values will remain allocated
|
||||||
//! forever: a memory leak. In order to get around this, we can use `Weak`
|
//! forever: a memory leak. In order to get around this, we can use [`Weak`]
|
||||||
//! pointers.
|
//! pointers.
|
||||||
//!
|
//!
|
||||||
//! Rust actually makes it somewhat difficult to produce this loop in the first
|
//! Rust actually makes it somewhat difficult to produce this loop in the first
|
||||||
//! place. In order to end up with two values that point at each other, one of
|
//! place. In order to end up with two values that point at each other, one of
|
||||||
//! them needs to be mutable. This is difficult because `Rc` enforces
|
//! them needs to be mutable. This is difficult because [`Rc`] enforces
|
||||||
//! memory safety by only giving out shared references to the value it wraps,
|
//! memory safety by only giving out shared references to the value it wraps,
|
||||||
//! and these don't allow direct mutation. We need to wrap the part of the
|
//! and these don't allow direct mutation. We need to wrap the part of the
|
||||||
//! value we wish to mutate in a [`RefCell`][refcell], which provides *interior
|
//! value we wish to mutate in a [`RefCell`], which provides *interior
|
||||||
//! mutability*: a method to achieve mutability through a shared reference.
|
//! mutability*: a method to achieve mutability through a shared reference.
|
||||||
//! `RefCell` enforces Rust's borrowing rules at runtime.
|
//! [`RefCell`] enforces Rust's borrowing rules at runtime.
|
||||||
//!
|
//!
|
||||||
//! ```
|
//! ```
|
||||||
//! use std::rc::Rc;
|
//! use std::rc::Rc;
|
||||||
|
@ -214,6 +201,19 @@
|
||||||
//! // Gadget Man, so he gets destroyed as well.
|
//! // Gadget Man, so he gets destroyed as well.
|
||||||
//! }
|
//! }
|
||||||
//! ```
|
//! ```
|
||||||
|
//!
|
||||||
|
//! [`Rc`]: struct.Rc.html
|
||||||
|
//! [`Weak`]: struct.Weak.html
|
||||||
|
//! [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
|
||||||
|
//! [`Cell`]: ../../std/cell/struct.Cell.html
|
||||||
|
//! [`RefCell`]: ../../std/cell/struct.RefCell.html
|
||||||
|
//! [send]: ../../std/marker/trait.Send.html
|
||||||
|
//! [arc]: ../../std/sync/struct.Arc.html
|
||||||
|
//! [`Deref`]: ../../std/ops/trait.Deref.html
|
||||||
|
//! [downgrade]: struct.Rc.html#method.downgrade
|
||||||
|
//! [upgrade]: struct.Weak.html#method.upgrade
|
||||||
|
//! [`None`]: ../../std/option/enum.Option.html#variant.None
|
||||||
|
//! [assoc]: ../../book/method-syntax.html#associated-functions
|
||||||
|
|
||||||
#![stable(feature = "rust1", since = "1.0.0")]
|
#![stable(feature = "rust1", since = "1.0.0")]
|
||||||
|
|
||||||
|
@ -251,9 +251,11 @@ struct RcBox<T: ?Sized> {
|
||||||
/// See the [module-level documentation](./index.html) for more details.
|
/// See the [module-level documentation](./index.html) for more details.
|
||||||
///
|
///
|
||||||
/// The inherent methods of `Rc` are all associated functions, which means
|
/// The inherent methods of `Rc` are all associated functions, which means
|
||||||
/// that you have to call them as e.g. `Rc::get_mut(&value)` instead of
|
/// that you have to call them as e.g. [`Rc::get_mut(&value)`][get_mut] instead of
|
||||||
/// `value.get_mut()`. This avoids conflicts with methods of the inner
|
/// `value.get_mut()`. This avoids conflicts with methods of the inner
|
||||||
/// type `T`.
|
/// type `T`.
|
||||||
|
///
|
||||||
|
/// [get_mut]: #method.get_mut
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
pub struct Rc<T: ?Sized> {
|
pub struct Rc<T: ?Sized> {
|
||||||
ptr: Shared<RcBox<T>>,
|
ptr: Shared<RcBox<T>>,
|
||||||
|
@ -337,10 +339,10 @@ impl<T> Rc<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks whether [`Rc::try_unwrap`][try_unwrap] would return
|
/// Checks whether [`Rc::try_unwrap`][try_unwrap] would return
|
||||||
/// [`Ok`][result].
|
/// [`Ok`].
|
||||||
///
|
///
|
||||||
/// [try_unwrap]: struct.Rc.html#method.try_unwrap
|
/// [try_unwrap]: struct.Rc.html#method.try_unwrap
|
||||||
/// [result]: ../../std/result/enum.Result.html
|
/// [`Ok`]: ../../std/result/enum.Result.html#variant.Ok
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
|
@ -543,14 +545,14 @@ impl<T: ?Sized> Rc<T> {
|
||||||
/// Returns a mutable reference to the inner value, if there are
|
/// Returns a mutable reference to the inner value, if there are
|
||||||
/// no other `Rc` or [`Weak`][weak] pointers to the same value.
|
/// no other `Rc` or [`Weak`][weak] pointers to the same value.
|
||||||
///
|
///
|
||||||
/// Returns [`None`][option] otherwise, because it is not safe to
|
/// Returns [`None`] otherwise, because it is not safe to
|
||||||
/// mutate a shared value.
|
/// mutate a shared value.
|
||||||
///
|
///
|
||||||
/// See also [`make_mut`][make_mut], which will [`clone`][clone]
|
/// See also [`make_mut`][make_mut], which will [`clone`][clone]
|
||||||
/// the inner value when it's shared.
|
/// the inner value when it's shared.
|
||||||
///
|
///
|
||||||
/// [weak]: struct.Weak.html
|
/// [weak]: struct.Weak.html
|
||||||
/// [option]: ../../std/option/enum.Option.html
|
/// [`None`]: ../../std/option/enum.Option.html#variant.None
|
||||||
/// [make_mut]: struct.Rc.html#method.make_mut
|
/// [make_mut]: struct.Rc.html#method.make_mut
|
||||||
/// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
|
/// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
|
||||||
///
|
///
|
||||||
|
|
|
@ -69,6 +69,7 @@ fn main() {
|
||||||
.read_dir()
|
.read_dir()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|e| e.unwrap())
|
.map(|e| e.unwrap())
|
||||||
|
.filter(|e| &*e.file_name() != ".git")
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
while let Some(entry) = stack.pop() {
|
while let Some(entry) = stack.pop() {
|
||||||
let path = entry.path();
|
let path = entry.path();
|
||||||
|
@ -150,11 +151,17 @@ fn main() {
|
||||||
cmd.arg(format!("--build={}", build_helper::gnu_target(&host)));
|
cmd.arg(format!("--build={}", build_helper::gnu_target(&host)));
|
||||||
|
|
||||||
run(&mut cmd);
|
run(&mut cmd);
|
||||||
run(Command::new("make")
|
let mut make = Command::new("make");
|
||||||
.current_dir(&build_dir)
|
make.current_dir(&build_dir)
|
||||||
.arg("build_lib_static")
|
.arg("build_lib_static");
|
||||||
.arg("-j")
|
|
||||||
.arg(env::var("NUM_JOBS").expect("NUM_JOBS was not set")));
|
// mingw make seems... buggy? unclear...
|
||||||
|
if !host.contains("windows") {
|
||||||
|
make.arg("-j")
|
||||||
|
.arg(env::var("NUM_JOBS").expect("NUM_JOBS was not set"));
|
||||||
|
}
|
||||||
|
|
||||||
|
run(&mut make);
|
||||||
|
|
||||||
if target.contains("windows") {
|
if target.contains("windows") {
|
||||||
println!("cargo:rustc-link-lib=static=jemalloc");
|
println!("cargo:rustc-link-lib=static=jemalloc");
|
||||||
|
|
|
@ -10,8 +10,12 @@ path = "lib.rs"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
alloc = { path = "../liballoc" }
|
alloc = { path = "../liballoc" }
|
||||||
core = { path = "../libcore" }
|
core = { path = "../libcore" }
|
||||||
rustc_unicode = { path = "../librustc_unicode" }
|
std_unicode = { path = "../libstd_unicode" }
|
||||||
|
|
||||||
[[test]]
|
[[test]]
|
||||||
name = "collectionstest"
|
name = "collectionstest"
|
||||||
path = "../libcollectionstest/lib.rs"
|
path = "../libcollectionstest/lib.rs"
|
||||||
|
|
||||||
|
[[bench]]
|
||||||
|
name = "collectionstest"
|
||||||
|
path = "../libcollectionstest/lib.rs"
|
||||||
|
|
|
@ -986,7 +986,11 @@ impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
|
impl<'a, T> ExactSizeIterator for Iter<'a, T> {
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.iter.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[unstable(feature = "fused", issue = "35602")]
|
#[unstable(feature = "fused", issue = "35602")]
|
||||||
impl<'a, T> FusedIterator for Iter<'a, T> {}
|
impl<'a, T> FusedIterator for Iter<'a, T> {}
|
||||||
|
@ -1022,7 +1026,11 @@ impl<T> DoubleEndedIterator for IntoIter<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
impl<T> ExactSizeIterator for IntoIter<T> {}
|
impl<T> ExactSizeIterator for IntoIter<T> {
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.iter.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[unstable(feature = "fused", issue = "35602")]
|
#[unstable(feature = "fused", issue = "35602")]
|
||||||
impl<T> FusedIterator for IntoIter<T> {}
|
impl<T> FusedIterator for IntoIter<T> {}
|
||||||
|
@ -1057,7 +1065,11 @@ impl<'a, T: 'a> DoubleEndedIterator for Drain<'a, T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "drain", since = "1.6.0")]
|
#[stable(feature = "drain", since = "1.6.0")]
|
||||||
impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {}
|
impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.iter.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[unstable(feature = "fused", issue = "35602")]
|
#[unstable(feature = "fused", issue = "35602")]
|
||||||
impl<'a, T: 'a> FusedIterator for Drain<'a, T> {}
|
impl<'a, T: 'a> FusedIterator for Drain<'a, T> {}
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
#![unstable(feature = "enumset",
|
#![unstable(feature = "enumset",
|
||||||
reason = "matches collection reform specification, \
|
reason = "matches collection reform specification, \
|
||||||
waiting for dust to settle",
|
waiting for dust to settle",
|
||||||
issue = "0")]
|
issue = "37966")]
|
||||||
|
|
||||||
use core::marker;
|
use core::marker;
|
||||||
use core::fmt;
|
use core::fmt;
|
||||||
|
|
|
@ -36,6 +36,7 @@
|
||||||
#![cfg_attr(not(test), feature(char_escape_debug))]
|
#![cfg_attr(not(test), feature(char_escape_debug))]
|
||||||
#![feature(core_intrinsics)]
|
#![feature(core_intrinsics)]
|
||||||
#![feature(dropck_parametricity)]
|
#![feature(dropck_parametricity)]
|
||||||
|
#![feature(exact_size_is_empty)]
|
||||||
#![feature(fmt_internals)]
|
#![feature(fmt_internals)]
|
||||||
#![feature(fused)]
|
#![feature(fused)]
|
||||||
#![feature(heap_api)]
|
#![feature(heap_api)]
|
||||||
|
@ -46,18 +47,19 @@
|
||||||
#![feature(placement_in)]
|
#![feature(placement_in)]
|
||||||
#![feature(placement_new_protocol)]
|
#![feature(placement_new_protocol)]
|
||||||
#![feature(shared)]
|
#![feature(shared)]
|
||||||
|
#![feature(slice_get_slice)]
|
||||||
#![feature(slice_patterns)]
|
#![feature(slice_patterns)]
|
||||||
#![feature(specialization)]
|
#![feature(specialization)]
|
||||||
#![feature(staged_api)]
|
#![feature(staged_api)]
|
||||||
#![feature(step_by)]
|
|
||||||
#![feature(trusted_len)]
|
#![feature(trusted_len)]
|
||||||
#![feature(unicode)]
|
#![feature(unicode)]
|
||||||
#![feature(unique)]
|
#![feature(unique)]
|
||||||
|
#![feature(untagged_unions)]
|
||||||
#![cfg_attr(test, feature(rand, test))]
|
#![cfg_attr(test, feature(rand, test))]
|
||||||
|
|
||||||
#![no_std]
|
#![no_std]
|
||||||
|
|
||||||
extern crate rustc_unicode;
|
extern crate std_unicode;
|
||||||
extern crate alloc;
|
extern crate alloc;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -98,8 +98,7 @@
|
||||||
#![cfg_attr(test, allow(unused_imports, dead_code))]
|
#![cfg_attr(test, allow(unused_imports, dead_code))]
|
||||||
|
|
||||||
use alloc::boxed::Box;
|
use alloc::boxed::Box;
|
||||||
use core::cmp::Ordering::{self, Greater, Less};
|
use core::cmp::Ordering::{self, Greater};
|
||||||
use core::cmp;
|
|
||||||
use core::mem::size_of;
|
use core::mem::size_of;
|
||||||
use core::mem;
|
use core::mem;
|
||||||
use core::ptr;
|
use core::ptr;
|
||||||
|
@ -118,6 +117,8 @@ pub use core::slice::{SplitMut, ChunksMut, Split};
|
||||||
pub use core::slice::{SplitN, RSplitN, SplitNMut, RSplitNMut};
|
pub use core::slice::{SplitN, RSplitN, SplitNMut, RSplitNMut};
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
pub use core::slice::{from_raw_parts, from_raw_parts_mut};
|
pub use core::slice::{from_raw_parts, from_raw_parts_mut};
|
||||||
|
#[unstable(feature = "slice_get_slice", issue = "35729")]
|
||||||
|
pub use core::slice::SliceIndex;
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
// Basic slice extension methods
|
// Basic slice extension methods
|
||||||
|
@ -353,7 +354,9 @@ impl<T> [T] {
|
||||||
/// ```
|
/// ```
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn get(&self, index: usize) -> Option<&T> {
|
pub fn get<I>(&self, index: I) -> Option<&I::Output>
|
||||||
|
where I: SliceIndex<T>
|
||||||
|
{
|
||||||
core_slice::SliceExt::get(self, index)
|
core_slice::SliceExt::get(self, index)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -372,7 +375,9 @@ impl<T> [T] {
|
||||||
/// or `None` if the index is out of bounds
|
/// or `None` if the index is out of bounds
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
|
pub fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
|
||||||
|
where I: SliceIndex<T>
|
||||||
|
{
|
||||||
core_slice::SliceExt::get_mut(self, index)
|
core_slice::SliceExt::get_mut(self, index)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -390,7 +395,9 @@ impl<T> [T] {
|
||||||
/// ```
|
/// ```
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub unsafe fn get_unchecked(&self, index: usize) -> &T {
|
pub unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
|
||||||
|
where I: SliceIndex<T>
|
||||||
|
{
|
||||||
core_slice::SliceExt::get_unchecked(self, index)
|
core_slice::SliceExt::get_unchecked(self, index)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -410,7 +417,9 @@ impl<T> [T] {
|
||||||
/// ```
|
/// ```
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut T {
|
pub unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
|
||||||
|
where I: SliceIndex<T>
|
||||||
|
{
|
||||||
core_slice::SliceExt::get_unchecked_mut(self, index)
|
core_slice::SliceExt::get_unchecked_mut(self, index)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1032,8 +1041,8 @@ impl<T> [T] {
|
||||||
|
|
||||||
/// This is equivalent to `self.sort_by(|a, b| a.cmp(b))`.
|
/// This is equivalent to `self.sort_by(|a, b| a.cmp(b))`.
|
||||||
///
|
///
|
||||||
/// This sort is stable and `O(n log n)` worst-case but allocates
|
/// This sort is stable and `O(n log n)` worst-case, but allocates
|
||||||
/// approximately `2 * n` where `n` is the length of `self`.
|
/// temporary storage half the size of `self`.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
|
@ -1054,8 +1063,8 @@ impl<T> [T] {
|
||||||
/// Sorts the slice, in place, using `f` to extract a key by which to
|
/// Sorts the slice, in place, using `f` to extract a key by which to
|
||||||
/// order the sort by.
|
/// order the sort by.
|
||||||
///
|
///
|
||||||
/// This sort is stable and `O(n log n)` worst-case but allocates
|
/// This sort is stable and `O(n log n)` worst-case, but allocates
|
||||||
/// approximately `2 * n`, where `n` is the length of `self`.
|
/// temporary storage half the size of `self`.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
|
@ -1076,8 +1085,8 @@ impl<T> [T] {
|
||||||
/// Sorts the slice, in place, using `compare` to compare
|
/// Sorts the slice, in place, using `compare` to compare
|
||||||
/// elements.
|
/// elements.
|
||||||
///
|
///
|
||||||
/// This sort is stable and `O(n log n)` worst-case but allocates
|
/// This sort is stable and `O(n log n)` worst-case, but allocates
|
||||||
/// approximately `2 * n`, where `n` is the length of `self`.
|
/// temporary storage half the size of `self`.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
|
@ -1295,213 +1304,333 @@ impl<T: Clone> ToOwned for [T] {
|
||||||
// Sorting
|
// Sorting
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
fn insertion_sort<T, F>(v: &mut [T], mut compare: F)
|
/// Inserts `v[0]` into pre-sorted sequence `v[1..]` so that whole `v[..]` becomes sorted.
|
||||||
|
///
|
||||||
|
/// This is the integral subroutine of insertion sort.
|
||||||
|
fn insert_head<T, F>(v: &mut [T], compare: &mut F)
|
||||||
where F: FnMut(&T, &T) -> Ordering
|
where F: FnMut(&T, &T) -> Ordering
|
||||||
{
|
{
|
||||||
let len = v.len() as isize;
|
if v.len() >= 2 && compare(&v[0], &v[1]) == Greater {
|
||||||
let buf_v = v.as_mut_ptr();
|
|
||||||
|
|
||||||
// 1 <= i < len;
|
|
||||||
for i in 1..len {
|
|
||||||
// j satisfies: 0 <= j <= i;
|
|
||||||
let mut j = i;
|
|
||||||
unsafe {
|
unsafe {
|
||||||
// `i` is in bounds.
|
// There are three ways to implement insertion here:
|
||||||
let read_ptr = buf_v.offset(i) as *const T;
|
//
|
||||||
|
// 1. Swap adjacent elements until the first one gets to its final destination.
|
||||||
|
// However, this way we copy data around more than is necessary. If elements are big
|
||||||
|
// structures (costly to copy), this method will be slow.
|
||||||
|
//
|
||||||
|
// 2. Iterate until the right place for the first element is found. Then shift the
|
||||||
|
// elements succeeding it to make room for it and finally place it into the
|
||||||
|
// remaining hole. This is a good method.
|
||||||
|
//
|
||||||
|
// 3. Copy the first element into a temporary variable. Iterate until the right place
|
||||||
|
// for it is found. As we go along, copy every traversed element into the slot
|
||||||
|
// preceding it. Finally, copy data from the temporary variable into the remaining
|
||||||
|
// hole. This method is very good. Benchmarks demonstrated slightly better
|
||||||
|
// performance than with the 2nd method.
|
||||||
|
//
|
||||||
|
// All methods were benchmarked, and the 3rd showed best results. So we chose that one.
|
||||||
|
let mut tmp = NoDrop { value: ptr::read(&v[0]) };
|
||||||
|
|
||||||
// find where to insert, we need to do strict <,
|
// Intermediate state of the insertion process is always tracked by `hole`, which
|
||||||
// rather than <=, to maintain stability.
|
// serves two purposes:
|
||||||
|
// 1. Protects integrity of `v` from panics in `compare`.
|
||||||
|
// 2. Fills the remaining hole in `v` in the end.
|
||||||
|
//
|
||||||
|
// Panic safety:
|
||||||
|
//
|
||||||
|
// If `compare` panics at any point during the process, `hole` will get dropped and
|
||||||
|
// fill the hole in `v` with `tmp`, thus ensuring that `v` still holds every object it
|
||||||
|
// initially held exactly once.
|
||||||
|
let mut hole = InsertionHole {
|
||||||
|
src: &mut tmp.value,
|
||||||
|
dest: &mut v[1],
|
||||||
|
};
|
||||||
|
ptr::copy_nonoverlapping(&v[1], &mut v[0], 1);
|
||||||
|
|
||||||
// 0 <= j - 1 < len, so .offset(j - 1) is in bounds.
|
for i in 2..v.len() {
|
||||||
while j > 0 && compare(&*read_ptr, &*buf_v.offset(j - 1)) == Less {
|
if compare(&tmp.value, &v[i]) != Greater {
|
||||||
j -= 1;
|
break;
|
||||||
}
|
|
||||||
|
|
||||||
// shift everything to the right, to make space to
|
|
||||||
// insert this value.
|
|
||||||
|
|
||||||
// j + 1 could be `len` (for the last `i`), but in
|
|
||||||
// that case, `i == j` so we don't copy. The
|
|
||||||
// `.offset(j)` is always in bounds.
|
|
||||||
|
|
||||||
if i != j {
|
|
||||||
let tmp = ptr::read(read_ptr);
|
|
||||||
ptr::copy(&*buf_v.offset(j), buf_v.offset(j + 1), (i - j) as usize);
|
|
||||||
ptr::copy_nonoverlapping(&tmp, buf_v.offset(j), 1);
|
|
||||||
mem::forget(tmp);
|
|
||||||
}
|
}
|
||||||
|
ptr::copy_nonoverlapping(&v[i], &mut v[i - 1], 1);
|
||||||
|
hole.dest = &mut v[i];
|
||||||
}
|
}
|
||||||
|
// `hole` gets dropped and thus copies `tmp` into the remaining hole in `v`.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn merge_sort<T, F>(v: &mut [T], mut compare: F)
|
// Holds a value, but never drops it.
|
||||||
|
#[allow(unions_with_drop_fields)]
|
||||||
|
union NoDrop<T> {
|
||||||
|
value: T
|
||||||
|
}
|
||||||
|
|
||||||
|
// When dropped, copies from `src` into `dest`.
|
||||||
|
struct InsertionHole<T> {
|
||||||
|
src: *mut T,
|
||||||
|
dest: *mut T,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Drop for InsertionHole<T> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
unsafe { ptr::copy_nonoverlapping(self.src, self.dest, 1); }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Merges non-decreasing runs `v[..mid]` and `v[mid..]` using `buf` as temporary storage, and
|
||||||
|
/// stores the result into `v[..]`.
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The two slices must be non-empty and `mid` must be in bounds. Buffer `buf` must be long enough
|
||||||
|
/// to hold a copy of the shorter slice. Also, `T` must not be a zero-sized type.
|
||||||
|
unsafe fn merge<T, F>(v: &mut [T], mid: usize, buf: *mut T, compare: &mut F)
|
||||||
where F: FnMut(&T, &T) -> Ordering
|
where F: FnMut(&T, &T) -> Ordering
|
||||||
{
|
{
|
||||||
// warning: this wildly uses unsafe.
|
|
||||||
const BASE_INSERTION: usize = 32;
|
|
||||||
const LARGE_INSERTION: usize = 16;
|
|
||||||
|
|
||||||
// FIXME #12092: smaller insertion runs seems to make sorting
|
|
||||||
// vectors of large elements a little faster on some platforms,
|
|
||||||
// but hasn't been tested/tuned extensively
|
|
||||||
let insertion = if size_of::<T>() <= 16 {
|
|
||||||
BASE_INSERTION
|
|
||||||
} else {
|
|
||||||
LARGE_INSERTION
|
|
||||||
};
|
|
||||||
|
|
||||||
let len = v.len();
|
let len = v.len();
|
||||||
|
let v = v.as_mut_ptr();
|
||||||
|
let v_mid = v.offset(mid as isize);
|
||||||
|
let v_end = v.offset(len as isize);
|
||||||
|
|
||||||
// short vectors get sorted in-place via insertion sort to avoid allocations
|
// The merge process first copies the shorter run into `buf`. Then it traces the newly copied
|
||||||
if len <= insertion {
|
// run and the longer run forwards (or backwards), comparing their next unconsumed elements and
|
||||||
insertion_sort(v, compare);
|
// copying the lesser (or greater) one into `v`.
|
||||||
return;
|
//
|
||||||
}
|
// As soon as the shorter run is fully consumed, the process is done. If the longer run gets
|
||||||
|
// consumed first, then we must copy whatever is left of the shorter run into the remaining
|
||||||
|
// hole in `v`.
|
||||||
|
//
|
||||||
|
// Intermediate state of the process is always tracked by `hole`, which serves two purposes:
|
||||||
|
// 1. Protects integrity of `v` from panics in `compare`.
|
||||||
|
// 2. Fills the remaining hole in `v` if the longer run gets consumed first.
|
||||||
|
//
|
||||||
|
// Panic safety:
|
||||||
|
//
|
||||||
|
// If `compare` panics at any point during the process, `hole` will get dropped and fill the
|
||||||
|
// hole in `v` with the unconsumed range in `buf`, thus ensuring that `v` still holds every
|
||||||
|
// object it initially held exactly once.
|
||||||
|
let mut hole;
|
||||||
|
|
||||||
// allocate some memory to use as scratch memory, we keep the
|
if mid <= len - mid {
|
||||||
// length 0 so we can keep shallow copies of the contents of `v`
|
// The left run is shorter.
|
||||||
// without risking the dtors running on an object twice if
|
ptr::copy_nonoverlapping(v, buf, mid);
|
||||||
// `compare` panics.
|
hole = MergeHole {
|
||||||
let mut working_space = Vec::with_capacity(2 * len);
|
start: buf,
|
||||||
// these both are buffers of length `len`.
|
end: buf.offset(mid as isize),
|
||||||
let mut buf_dat = working_space.as_mut_ptr();
|
dest: v,
|
||||||
let mut buf_tmp = unsafe { buf_dat.offset(len as isize) };
|
|
||||||
|
|
||||||
// length `len`.
|
|
||||||
let buf_v = v.as_ptr();
|
|
||||||
|
|
||||||
// step 1. sort short runs with insertion sort. This takes the
|
|
||||||
// values from `v` and sorts them into `buf_dat`, leaving that
|
|
||||||
// with sorted runs of length INSERTION.
|
|
||||||
|
|
||||||
// We could hardcode the sorting comparisons here, and we could
|
|
||||||
// manipulate/step the pointers themselves, rather than repeatedly
|
|
||||||
// .offset-ing.
|
|
||||||
for start in (0..len).step_by(insertion) {
|
|
||||||
// start <= i < len;
|
|
||||||
for i in start..cmp::min(start + insertion, len) {
|
|
||||||
// j satisfies: start <= j <= i;
|
|
||||||
let mut j = i as isize;
|
|
||||||
unsafe {
|
|
||||||
// `i` is in bounds.
|
|
||||||
let read_ptr = buf_v.offset(i as isize);
|
|
||||||
|
|
||||||
// find where to insert, we need to do strict <,
|
|
||||||
// rather than <=, to maintain stability.
|
|
||||||
|
|
||||||
// start <= j - 1 < len, so .offset(j - 1) is in
|
|
||||||
// bounds.
|
|
||||||
while j > start as isize && compare(&*read_ptr, &*buf_dat.offset(j - 1)) == Less {
|
|
||||||
j -= 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// shift everything to the right, to make space to
|
|
||||||
// insert this value.
|
|
||||||
|
|
||||||
// j + 1 could be `len` (for the last `i`), but in
|
|
||||||
// that case, `i == j` so we don't copy. The
|
|
||||||
// `.offset(j)` is always in bounds.
|
|
||||||
ptr::copy(&*buf_dat.offset(j), buf_dat.offset(j + 1), i - j as usize);
|
|
||||||
ptr::copy_nonoverlapping(read_ptr, buf_dat.offset(j), 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// step 2. merge the sorted runs.
|
|
||||||
let mut width = insertion;
|
|
||||||
while width < len {
|
|
||||||
// merge the sorted runs of length `width` in `buf_dat` two at
|
|
||||||
// a time, placing the result in `buf_tmp`.
|
|
||||||
|
|
||||||
// 0 <= start <= len.
|
|
||||||
for start in (0..len).step_by(2 * width) {
|
|
||||||
// manipulate pointers directly for speed (rather than
|
|
||||||
// using a `for` loop with `range` and `.offset` inside
|
|
||||||
// that loop).
|
|
||||||
unsafe {
|
|
||||||
// the end of the first run & start of the
|
|
||||||
// second. Offset of `len` is defined, since this is
|
|
||||||
// precisely one byte past the end of the object.
|
|
||||||
let right_start = buf_dat.offset(cmp::min(start + width, len) as isize);
|
|
||||||
// end of the second. Similar reasoning to the above re safety.
|
|
||||||
let right_end_idx = cmp::min(start + 2 * width, len);
|
|
||||||
let right_end = buf_dat.offset(right_end_idx as isize);
|
|
||||||
|
|
||||||
// the pointers to the elements under consideration
|
|
||||||
// from the two runs.
|
|
||||||
|
|
||||||
// both of these are in bounds.
|
|
||||||
let mut left = buf_dat.offset(start as isize);
|
|
||||||
let mut right = right_start;
|
|
||||||
|
|
||||||
// where we're putting the results, it is a run of
|
|
||||||
// length `2*width`, so we step it once for each step
|
|
||||||
// of either `left` or `right`. `buf_tmp` has length
|
|
||||||
// `len`, so these are in bounds.
|
|
||||||
let mut out = buf_tmp.offset(start as isize);
|
|
||||||
let out_end = buf_tmp.offset(right_end_idx as isize);
|
|
||||||
|
|
||||||
// If left[last] <= right[0], they are already in order:
|
|
||||||
// fast-forward the left side (the right side is handled
|
|
||||||
// in the loop).
|
|
||||||
// If `right` is not empty then left is not empty, and
|
|
||||||
// the offsets are in bounds.
|
|
||||||
if right != right_end && compare(&*right.offset(-1), &*right) != Greater {
|
|
||||||
let elems = (right_start as usize - left as usize) / mem::size_of::<T>();
|
|
||||||
ptr::copy_nonoverlapping(&*left, out, elems);
|
|
||||||
out = out.offset(elems as isize);
|
|
||||||
left = right_start;
|
|
||||||
}
|
|
||||||
|
|
||||||
while out < out_end {
|
|
||||||
// Either the left or the right run are exhausted,
|
|
||||||
// so just copy the remainder from the other run
|
|
||||||
// and move on; this gives a huge speed-up (order
|
|
||||||
// of 25%) for mostly sorted vectors (the best
|
|
||||||
// case).
|
|
||||||
if left == right_start {
|
|
||||||
// the number remaining in this run.
|
|
||||||
let elems = (right_end as usize - right as usize) / mem::size_of::<T>();
|
|
||||||
ptr::copy_nonoverlapping(&*right, out, elems);
|
|
||||||
break;
|
|
||||||
} else if right == right_end {
|
|
||||||
let elems = (right_start as usize - left as usize) / mem::size_of::<T>();
|
|
||||||
ptr::copy_nonoverlapping(&*left, out, elems);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// check which side is smaller, and that's the
|
|
||||||
// next element for the new run.
|
|
||||||
|
|
||||||
// `left < right_start` and `right < right_end`,
|
|
||||||
// so these are valid.
|
|
||||||
let to_copy = if compare(&*left, &*right) == Greater {
|
|
||||||
step(&mut right)
|
|
||||||
} else {
|
|
||||||
step(&mut left)
|
|
||||||
};
|
};
|
||||||
ptr::copy_nonoverlapping(&*to_copy, out, 1);
|
|
||||||
step(&mut out);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mem::swap(&mut buf_dat, &mut buf_tmp);
|
// Initially, these pointers point to the beginnings of their arrays.
|
||||||
|
let left = &mut hole.start;
|
||||||
|
let mut right = v_mid;
|
||||||
|
let out = &mut hole.dest;
|
||||||
|
|
||||||
width *= 2;
|
while *left < hole.end && right < v_end {
|
||||||
|
// Consume the lesser side.
|
||||||
|
// If equal, prefer the left run to maintain stability.
|
||||||
|
let to_copy = if compare(&**left, &*right) == Greater {
|
||||||
|
get_and_increment(&mut right)
|
||||||
|
} else {
|
||||||
|
get_and_increment(left)
|
||||||
|
};
|
||||||
|
ptr::copy_nonoverlapping(to_copy, get_and_increment(out), 1);
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
// The right run is shorter.
|
||||||
|
ptr::copy_nonoverlapping(v_mid, buf, len - mid);
|
||||||
|
hole = MergeHole {
|
||||||
|
start: buf,
|
||||||
|
end: buf.offset((len - mid) as isize),
|
||||||
|
dest: v_mid,
|
||||||
|
};
|
||||||
|
|
||||||
// write the result to `v` in one go, so that there are never two copies
|
// Initially, these pointers point past the ends of their arrays.
|
||||||
// of the same object in `v`.
|
let left = &mut hole.dest;
|
||||||
unsafe {
|
let right = &mut hole.end;
|
||||||
ptr::copy_nonoverlapping(&*buf_dat, v.as_mut_ptr(), len);
|
let mut out = v_end;
|
||||||
|
|
||||||
|
while v < *left && buf < *right {
|
||||||
|
// Consume the greater side.
|
||||||
|
// If equal, prefer the right run to maintain stability.
|
||||||
|
let to_copy = if compare(&*left.offset(-1), &*right.offset(-1)) == Greater {
|
||||||
|
decrement_and_get(left)
|
||||||
|
} else {
|
||||||
|
decrement_and_get(right)
|
||||||
|
};
|
||||||
|
ptr::copy_nonoverlapping(to_copy, decrement_and_get(&mut out), 1);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
// Finally, `hole` gets dropped. If the shorter run was not fully consumed, whatever remains of
|
||||||
|
// it will now be copied into the hole in `v`.
|
||||||
|
|
||||||
// increment the pointer, returning the old pointer.
|
unsafe fn get_and_increment<T>(ptr: &mut *mut T) -> *mut T {
|
||||||
#[inline(always)]
|
|
||||||
unsafe fn step<T>(ptr: &mut *mut T) -> *mut T {
|
|
||||||
let old = *ptr;
|
let old = *ptr;
|
||||||
*ptr = ptr.offset(1);
|
*ptr = ptr.offset(1);
|
||||||
old
|
old
|
||||||
}
|
}
|
||||||
|
|
||||||
|
unsafe fn decrement_and_get<T>(ptr: &mut *mut T) -> *mut T {
|
||||||
|
*ptr = ptr.offset(-1);
|
||||||
|
*ptr
|
||||||
|
}
|
||||||
|
|
||||||
|
// When dropped, copies the range `start..end` into `dest..`.
|
||||||
|
struct MergeHole<T> {
|
||||||
|
start: *mut T,
|
||||||
|
end: *mut T,
|
||||||
|
dest: *mut T,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Drop for MergeHole<T> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
// `T` is not a zero-sized type, so it's okay to divide by it's size.
|
||||||
|
let len = (self.end as usize - self.start as usize) / mem::size_of::<T>();
|
||||||
|
unsafe { ptr::copy_nonoverlapping(self.start, self.dest, len); }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This merge sort borrows some (but not all) ideas from TimSort, which is described in detail
|
||||||
|
/// [here](http://svn.python.org/projects/python/trunk/Objects/listsort.txt).
|
||||||
|
///
|
||||||
|
/// The algorithm identifies strictly descending and non-descending subsequences, which are called
|
||||||
|
/// natural runs. There is a stack of pending runs yet to be merged. Each newly found run is pushed
|
||||||
|
/// onto the stack, and then some pairs of adjacent runs are merged until these two invariants are
|
||||||
|
/// satisfied, for every `i` in `0 .. runs.len() - 2`:
|
||||||
|
///
|
||||||
|
/// 1. `runs[i].len > runs[i + 1].len`
|
||||||
|
/// 2. `runs[i].len > runs[i + 1].len + runs[i + 2].len`
|
||||||
|
///
|
||||||
|
/// The invariants ensure that the total running time is `O(n log n)` worst-case.
|
||||||
|
fn merge_sort<T, F>(v: &mut [T], mut compare: F)
|
||||||
|
where F: FnMut(&T, &T) -> Ordering
|
||||||
|
{
|
||||||
|
// Sorting has no meaningful behavior on zero-sized types.
|
||||||
|
if size_of::<T>() == 0 {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME #12092: These numbers are platform-specific and need more extensive testing/tuning.
|
||||||
|
//
|
||||||
|
// If `v` has length up to `insertion_len`, simply switch to insertion sort because it is going
|
||||||
|
// to perform better than merge sort. For bigger types `T`, the threshold is smaller.
|
||||||
|
//
|
||||||
|
// Short runs are extended using insertion sort to span at least `min_run` elements, in order
|
||||||
|
// to improve performance.
|
||||||
|
let (max_insertion, min_run) = if size_of::<T>() <= 16 {
|
||||||
|
(64, 32)
|
||||||
|
} else {
|
||||||
|
(32, 16)
|
||||||
|
};
|
||||||
|
|
||||||
|
let len = v.len();
|
||||||
|
|
||||||
|
// Short arrays get sorted in-place via insertion sort to avoid allocations.
|
||||||
|
if len <= max_insertion {
|
||||||
|
if len >= 2 {
|
||||||
|
for i in (0..len-1).rev() {
|
||||||
|
insert_head(&mut v[i..], &mut compare);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allocate a buffer to use as scratch memory. We keep the length 0 so we can keep in it
|
||||||
|
// shallow copies of the contents of `v` without risking the dtors running on copies if
|
||||||
|
// `compare` panics. When merging two sorted runs, this buffer holds a copy of the shorter run,
|
||||||
|
// which will always have length at most `len / 2`.
|
||||||
|
let mut buf = Vec::with_capacity(len / 2);
|
||||||
|
|
||||||
|
// In order to identify natural runs in `v`, we traverse it backwards. That might seem like a
|
||||||
|
// strange decision, but consider the fact that merges more often go in the opposite direction
|
||||||
|
// (forwards). According to benchmarks, merging forwards is slightly faster than merging
|
||||||
|
// backwards. To conclude, identifying runs by traversing backwards improves performance.
|
||||||
|
let mut runs = vec![];
|
||||||
|
let mut end = len;
|
||||||
|
while end > 0 {
|
||||||
|
// Find the next natural run, and reverse it if it's strictly descending.
|
||||||
|
let mut start = end - 1;
|
||||||
|
if start > 0 {
|
||||||
|
start -= 1;
|
||||||
|
if compare(&v[start], &v[start + 1]) == Greater {
|
||||||
|
while start > 0 && compare(&v[start - 1], &v[start]) == Greater {
|
||||||
|
start -= 1;
|
||||||
|
}
|
||||||
|
v[start..end].reverse();
|
||||||
|
} else {
|
||||||
|
while start > 0 && compare(&v[start - 1], &v[start]) != Greater {
|
||||||
|
start -= 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert some more elements into the run if it's too short. Insertion sort is faster than
|
||||||
|
// merge sort on short sequences, so this significantly improves performance.
|
||||||
|
while start > 0 && end - start < min_run {
|
||||||
|
start -= 1;
|
||||||
|
insert_head(&mut v[start..end], &mut compare);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Push this run onto the stack.
|
||||||
|
runs.push(Run {
|
||||||
|
start: start,
|
||||||
|
len: end - start,
|
||||||
|
});
|
||||||
|
end = start;
|
||||||
|
|
||||||
|
// Merge some pairs of adjacent runs to satisfy the invariants.
|
||||||
|
while let Some(r) = collapse(&runs) {
|
||||||
|
let left = runs[r + 1];
|
||||||
|
let right = runs[r];
|
||||||
|
unsafe {
|
||||||
|
merge(&mut v[left.start .. right.start + right.len], left.len, buf.as_mut_ptr(),
|
||||||
|
&mut compare);
|
||||||
|
}
|
||||||
|
runs[r] = Run {
|
||||||
|
start: left.start,
|
||||||
|
len: left.len + right.len,
|
||||||
|
};
|
||||||
|
runs.remove(r + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finally, exactly one run must remain in the stack.
|
||||||
|
debug_assert!(runs.len() == 1 && runs[0].start == 0 && runs[0].len == len);
|
||||||
|
|
||||||
|
// Examines the stack of runs and identifies the next pair of runs to merge. More specifically,
|
||||||
|
// if `Some(r)` is returned, that means `runs[r]` and `runs[r + 1]` must be merged next. If the
|
||||||
|
// algorithm should continue building a new run instead, `None` is returned.
|
||||||
|
//
|
||||||
|
// TimSort is infamous for it's buggy implementations, as described here:
|
||||||
|
// http://envisage-project.eu/timsort-specification-and-verification/
|
||||||
|
//
|
||||||
|
// The gist of the story is: we must enforce the invariants on the top four runs on the stack.
|
||||||
|
// Enforcing them on just top three is not sufficient to ensure that the invariants will still
|
||||||
|
// hold for *all* runs in the stack.
|
||||||
|
//
|
||||||
|
// This function correctly checks invariants for the top four runs. Additionally, if the top
|
||||||
|
// run starts at index 0, it will always demand a merge operation until the stack is fully
|
||||||
|
// collapsed, in order to complete the sort.
|
||||||
|
#[inline]
|
||||||
|
fn collapse(runs: &[Run]) -> Option<usize> {
|
||||||
|
let n = runs.len();
|
||||||
|
if n >= 2 && (runs[n - 1].start == 0 ||
|
||||||
|
runs[n - 2].len <= runs[n - 1].len ||
|
||||||
|
(n >= 3 && runs[n - 3].len <= runs[n - 2].len + runs[n - 1].len) ||
|
||||||
|
(n >= 4 && runs[n - 4].len <= runs[n - 3].len + runs[n - 2].len)) {
|
||||||
|
if n >= 3 && runs[n - 3].len < runs[n - 1].len {
|
||||||
|
Some(n - 3)
|
||||||
|
} else {
|
||||||
|
Some(n - 2)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
struct Run {
|
||||||
|
start: usize,
|
||||||
|
len: usize,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,12 +24,12 @@ use core::str::pattern::Pattern;
|
||||||
use core::str::pattern::{Searcher, ReverseSearcher, DoubleEndedSearcher};
|
use core::str::pattern::{Searcher, ReverseSearcher, DoubleEndedSearcher};
|
||||||
use core::mem;
|
use core::mem;
|
||||||
use core::iter::FusedIterator;
|
use core::iter::FusedIterator;
|
||||||
use rustc_unicode::str::{UnicodeStr, Utf16Encoder};
|
use std_unicode::str::{UnicodeStr, Utf16Encoder};
|
||||||
|
|
||||||
use vec_deque::VecDeque;
|
use vec_deque::VecDeque;
|
||||||
use borrow::{Borrow, ToOwned};
|
use borrow::{Borrow, ToOwned};
|
||||||
use string::String;
|
use string::String;
|
||||||
use rustc_unicode;
|
use std_unicode;
|
||||||
use vec::Vec;
|
use vec::Vec;
|
||||||
use slice::SliceConcatExt;
|
use slice::SliceConcatExt;
|
||||||
use boxed::Box;
|
use boxed::Box;
|
||||||
|
@ -54,7 +54,7 @@ pub use core::str::{from_utf8, Chars, CharIndices, Bytes};
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
pub use core::str::{from_utf8_unchecked, ParseBoolError};
|
pub use core::str::{from_utf8_unchecked, ParseBoolError};
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
pub use rustc_unicode::str::SplitWhitespace;
|
pub use std_unicode::str::SplitWhitespace;
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
pub use core::str::pattern;
|
pub use core::str::pattern;
|
||||||
|
|
||||||
|
@ -1705,7 +1705,7 @@ impl str {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn case_ignoreable_then_cased<I: Iterator<Item = char>>(iter: I) -> bool {
|
fn case_ignoreable_then_cased<I: Iterator<Item = char>>(iter: I) -> bool {
|
||||||
use rustc_unicode::derived_property::{Cased, Case_Ignorable};
|
use std_unicode::derived_property::{Cased, Case_Ignorable};
|
||||||
match iter.skip_while(|&c| Case_Ignorable(c)).next() {
|
match iter.skip_while(|&c| Case_Ignorable(c)).next() {
|
||||||
Some(c) => Cased(c),
|
Some(c) => Cased(c),
|
||||||
None => false,
|
None => false,
|
||||||
|
|
|
@ -63,8 +63,8 @@ use core::mem;
|
||||||
use core::ops::{self, Add, AddAssign, Index, IndexMut};
|
use core::ops::{self, Add, AddAssign, Index, IndexMut};
|
||||||
use core::ptr;
|
use core::ptr;
|
||||||
use core::str::pattern::Pattern;
|
use core::str::pattern::Pattern;
|
||||||
use rustc_unicode::char::{decode_utf16, REPLACEMENT_CHARACTER};
|
use std_unicode::char::{decode_utf16, REPLACEMENT_CHARACTER};
|
||||||
use rustc_unicode::str as unicode_str;
|
use std_unicode::str as unicode_str;
|
||||||
|
|
||||||
use borrow::{Cow, ToOwned};
|
use borrow::{Cow, ToOwned};
|
||||||
use range::RangeArgument;
|
use range::RangeArgument;
|
||||||
|
@ -1129,8 +1129,6 @@ impl String {
|
||||||
#[inline]
|
#[inline]
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
pub fn insert(&mut self, idx: usize, ch: char) {
|
pub fn insert(&mut self, idx: usize, ch: char) {
|
||||||
let len = self.len();
|
|
||||||
assert!(idx <= len);
|
|
||||||
assert!(self.is_char_boundary(idx));
|
assert!(self.is_char_boundary(idx));
|
||||||
let mut bits = [0; 4];
|
let mut bits = [0; 4];
|
||||||
let bits = ch.encode_utf8(&mut bits).as_bytes();
|
let bits = ch.encode_utf8(&mut bits).as_bytes();
|
||||||
|
@ -1184,7 +1182,6 @@ impl String {
|
||||||
reason = "recent addition",
|
reason = "recent addition",
|
||||||
issue = "35553")]
|
issue = "35553")]
|
||||||
pub fn insert_str(&mut self, idx: usize, string: &str) {
|
pub fn insert_str(&mut self, idx: usize, string: &str) {
|
||||||
assert!(idx <= self.len());
|
|
||||||
assert!(self.is_char_boundary(idx));
|
assert!(self.is_char_boundary(idx));
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
|
@ -1260,6 +1257,38 @@ impl String {
|
||||||
self.len() == 0
|
self.len() == 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Divide one string into two at an index.
|
||||||
|
///
|
||||||
|
/// The argument, `mid`, should be a byte offset from the start of the string. It must also
|
||||||
|
/// be on the boundary of a UTF-8 code point.
|
||||||
|
///
|
||||||
|
/// The two strings returned go from the start of the string to `mid`, and from `mid` to the end
|
||||||
|
/// of the string.
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
///
|
||||||
|
/// Panics if `mid` is not on a `UTF-8` code point boundary, or if it is beyond the last
|
||||||
|
/// code point of the string.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # #![feature(string_split_off)]
|
||||||
|
/// # fn main() {
|
||||||
|
/// let mut hello = String::from("Hello, World!");
|
||||||
|
/// let world = hello.split_off(7);
|
||||||
|
/// assert_eq!(hello, "Hello, ");
|
||||||
|
/// assert_eq!(world, "World!");
|
||||||
|
/// # }
|
||||||
|
/// ```
|
||||||
|
#[inline]
|
||||||
|
#[unstable(feature = "string_split_off", issue = "38080")]
|
||||||
|
pub fn split_off(&mut self, mid: usize) -> String {
|
||||||
|
assert!(self.is_char_boundary(mid));
|
||||||
|
let other = self.vec.split_off(mid);
|
||||||
|
unsafe { String::from_utf8_unchecked(other) }
|
||||||
|
}
|
||||||
|
|
||||||
/// Truncates this `String`, removing all contents.
|
/// Truncates this `String`, removing all contents.
|
||||||
///
|
///
|
||||||
/// While this means the `String` will have a length of zero, it does not
|
/// While this means the `String` will have a length of zero, it does not
|
||||||
|
|
|
@ -1244,7 +1244,7 @@ impl<T: Clone> Vec<T> {
|
||||||
/// ```
|
/// ```
|
||||||
#[stable(feature = "vec_extend_from_slice", since = "1.6.0")]
|
#[stable(feature = "vec_extend_from_slice", since = "1.6.0")]
|
||||||
pub fn extend_from_slice(&mut self, other: &[T]) {
|
pub fn extend_from_slice(&mut self, other: &[T]) {
|
||||||
self.extend(other.iter().cloned())
|
self.spec_extend(other.iter())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1499,7 +1499,7 @@ impl<T> ops::DerefMut for Vec<T> {
|
||||||
impl<T> FromIterator<T> for Vec<T> {
|
impl<T> FromIterator<T> for Vec<T> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Vec<T> {
|
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Vec<T> {
|
||||||
<Self as SpecExtend<_>>::from_iter(iter.into_iter())
|
<Self as SpecExtend<_, _>>::from_iter(iter.into_iter())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1572,12 +1572,12 @@ impl<T> Extend<T> for Vec<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Specialization trait used for Vec::from_iter and Vec::extend
|
// Specialization trait used for Vec::from_iter and Vec::extend
|
||||||
trait SpecExtend<I> {
|
trait SpecExtend<T, I> {
|
||||||
fn from_iter(iter: I) -> Self;
|
fn from_iter(iter: I) -> Self;
|
||||||
fn spec_extend(&mut self, iter: I);
|
fn spec_extend(&mut self, iter: I);
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<I, T> SpecExtend<I> for Vec<T>
|
impl<T, I> SpecExtend<T, I> for Vec<T>
|
||||||
where I: Iterator<Item=T>,
|
where I: Iterator<Item=T>,
|
||||||
{
|
{
|
||||||
default fn from_iter(mut iterator: I) -> Self {
|
default fn from_iter(mut iterator: I) -> Self {
|
||||||
|
@ -1607,7 +1607,7 @@ impl<I, T> SpecExtend<I> for Vec<T>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<I, T> SpecExtend<I> for Vec<T>
|
impl<T, I> SpecExtend<T, I> for Vec<T>
|
||||||
where I: TrustedLen<Item=T>,
|
where I: TrustedLen<Item=T>,
|
||||||
{
|
{
|
||||||
fn from_iter(iterator: I) -> Self {
|
fn from_iter(iterator: I) -> Self {
|
||||||
|
@ -1642,6 +1642,33 @@ impl<I, T> SpecExtend<I> for Vec<T>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a, T: 'a, I> SpecExtend<&'a T, I> for Vec<T>
|
||||||
|
where I: Iterator<Item=&'a T>,
|
||||||
|
T: Clone,
|
||||||
|
{
|
||||||
|
default fn from_iter(iterator: I) -> Self {
|
||||||
|
SpecExtend::from_iter(iterator.cloned())
|
||||||
|
}
|
||||||
|
|
||||||
|
default fn spec_extend(&mut self, iterator: I) {
|
||||||
|
self.spec_extend(iterator.cloned())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, T: 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T>
|
||||||
|
where T: Copy,
|
||||||
|
{
|
||||||
|
fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) {
|
||||||
|
let slice = iterator.as_slice();
|
||||||
|
self.reserve(slice.len());
|
||||||
|
unsafe {
|
||||||
|
let len = self.len();
|
||||||
|
self.set_len(len + slice.len());
|
||||||
|
self.get_unchecked_mut(len..).copy_from_slice(slice);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<T> Vec<T> {
|
impl<T> Vec<T> {
|
||||||
fn extend_desugared<I: Iterator<Item = T>>(&mut self, mut iterator: I) {
|
fn extend_desugared<I: Iterator<Item = T>>(&mut self, mut iterator: I) {
|
||||||
// This is the case for a general iterator.
|
// This is the case for a general iterator.
|
||||||
|
@ -1669,7 +1696,7 @@ impl<T> Vec<T> {
|
||||||
#[stable(feature = "extend_ref", since = "1.2.0")]
|
#[stable(feature = "extend_ref", since = "1.2.0")]
|
||||||
impl<'a, T: 'a + Copy> Extend<&'a T> for Vec<T> {
|
impl<'a, T: 'a + Copy> Extend<&'a T> for Vec<T> {
|
||||||
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
|
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
|
||||||
self.extend(iter.into_iter().map(|&x| x))
|
self.spec_extend(iter.into_iter())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1988,7 +2015,11 @@ impl<T> DoubleEndedIterator for IntoIter<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
impl<T> ExactSizeIterator for IntoIter<T> {}
|
impl<T> ExactSizeIterator for IntoIter<T> {
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.ptr == self.end
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[unstable(feature = "fused", issue = "35602")]
|
#[unstable(feature = "fused", issue = "35602")]
|
||||||
impl<T> FusedIterator for IntoIter<T> {}
|
impl<T> FusedIterator for IntoIter<T> {}
|
||||||
|
@ -2082,7 +2113,11 @@ impl<'a, T> Drop for Drain<'a, T> {
|
||||||
|
|
||||||
|
|
||||||
#[stable(feature = "drain", since = "1.6.0")]
|
#[stable(feature = "drain", since = "1.6.0")]
|
||||||
impl<'a, T> ExactSizeIterator for Drain<'a, T> {}
|
impl<'a, T> ExactSizeIterator for Drain<'a, T> {
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.iter.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[unstable(feature = "fused", issue = "35602")]
|
#[unstable(feature = "fused", issue = "35602")]
|
||||||
impl<'a, T> FusedIterator for Drain<'a, T> {}
|
impl<'a, T> FusedIterator for Drain<'a, T> {}
|
||||||
|
|
|
@ -810,7 +810,7 @@ impl<T> VecDeque<T> {
|
||||||
/// ```
|
/// ```
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
pub fn is_empty(&self) -> bool {
|
pub fn is_empty(&self) -> bool {
|
||||||
self.len() == 0
|
self.tail == self.head
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a draining iterator that removes the specified range in the
|
/// Create a draining iterator that removes the specified range in the
|
||||||
|
@ -1916,7 +1916,11 @@ impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
|
impl<'a, T> ExactSizeIterator for Iter<'a, T> {
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.head == self.tail
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[unstable(feature = "fused", issue = "35602")]
|
#[unstable(feature = "fused", issue = "35602")]
|
||||||
impl<'a, T> FusedIterator for Iter<'a, T> {}
|
impl<'a, T> FusedIterator for Iter<'a, T> {}
|
||||||
|
@ -1980,7 +1984,11 @@ impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
impl<'a, T> ExactSizeIterator for IterMut<'a, T> {}
|
impl<'a, T> ExactSizeIterator for IterMut<'a, T> {
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.head == self.tail
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[unstable(feature = "fused", issue = "35602")]
|
#[unstable(feature = "fused", issue = "35602")]
|
||||||
impl<'a, T> FusedIterator for IterMut<'a, T> {}
|
impl<'a, T> FusedIterator for IterMut<'a, T> {}
|
||||||
|
@ -2017,7 +2025,11 @@ impl<T> DoubleEndedIterator for IntoIter<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
impl<T> ExactSizeIterator for IntoIter<T> {}
|
impl<T> ExactSizeIterator for IntoIter<T> {
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.inner.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[unstable(feature = "fused", issue = "35602")]
|
#[unstable(feature = "fused", issue = "35602")]
|
||||||
impl<T> FusedIterator for IntoIter<T> {}
|
impl<T> FusedIterator for IntoIter<T> {}
|
||||||
|
|
|
@ -18,12 +18,14 @@
|
||||||
#![feature(const_fn)]
|
#![feature(const_fn)]
|
||||||
#![feature(dedup_by)]
|
#![feature(dedup_by)]
|
||||||
#![feature(enumset)]
|
#![feature(enumset)]
|
||||||
|
#![feature(exact_size_is_empty)]
|
||||||
#![feature(pattern)]
|
#![feature(pattern)]
|
||||||
#![feature(rand)]
|
#![feature(rand)]
|
||||||
#![feature(repeat_str)]
|
#![feature(repeat_str)]
|
||||||
#![feature(step_by)]
|
#![feature(step_by)]
|
||||||
#![feature(str_escape)]
|
#![feature(str_escape)]
|
||||||
#![feature(str_replacen)]
|
#![feature(str_replacen)]
|
||||||
|
#![feature(string_split_off)]
|
||||||
#![feature(test)]
|
#![feature(test)]
|
||||||
#![feature(unboxed_closures)]
|
#![feature(unboxed_closures)]
|
||||||
#![feature(unicode)]
|
#![feature(unicode)]
|
||||||
|
@ -31,7 +33,7 @@
|
||||||
|
|
||||||
extern crate collections;
|
extern crate collections;
|
||||||
extern crate test;
|
extern crate test;
|
||||||
extern crate rustc_unicode;
|
extern crate std_unicode;
|
||||||
|
|
||||||
use std::hash::{Hash, Hasher};
|
use std::hash::{Hash, Hasher};
|
||||||
use std::collections::hash_map::DefaultHasher;
|
use std::collections::hash_map::DefaultHasher;
|
||||||
|
|
|
@ -383,7 +383,7 @@ fn test_reverse() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_sort() {
|
fn test_sort() {
|
||||||
for len in 4..25 {
|
for len in (2..25).chain(500..510) {
|
||||||
for _ in 0..100 {
|
for _ in 0..100 {
|
||||||
let mut v: Vec<_> = thread_rng().gen_iter::<i32>().take(len).collect();
|
let mut v: Vec<_> = thread_rng().gen_iter::<i32>().take(len).collect();
|
||||||
let mut v1 = v.clone();
|
let mut v1 = v.clone();
|
||||||
|
@ -410,7 +410,7 @@ fn test_sort() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_sort_stability() {
|
fn test_sort_stability() {
|
||||||
for len in 4..25 {
|
for len in (2..25).chain(500..510) {
|
||||||
for _ in 0..10 {
|
for _ in 0..10 {
|
||||||
let mut counts = [0; 10];
|
let mut counts = [0; 10];
|
||||||
|
|
||||||
|
@ -441,6 +441,13 @@ fn test_sort_stability() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_sort_zero_sized_type() {
|
||||||
|
// Should not panic.
|
||||||
|
[(); 10].sort();
|
||||||
|
[(); 100].sort();
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_concat() {
|
fn test_concat() {
|
||||||
let v: [Vec<i32>; 0] = [];
|
let v: [Vec<i32>; 0] = [];
|
||||||
|
@ -633,6 +640,16 @@ fn test_iter_clone() {
|
||||||
assert_eq!(it.next(), jt.next());
|
assert_eq!(it.next(), jt.next());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_iter_is_empty() {
|
||||||
|
let xs = [1, 2, 5, 10, 11];
|
||||||
|
for i in 0..xs.len() {
|
||||||
|
for j in i..xs.len() {
|
||||||
|
assert_eq!(xs[i..j].iter().is_empty(), xs[i..j].is_empty());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_mut_iterator() {
|
fn test_mut_iterator() {
|
||||||
let mut xs = [1, 2, 3, 4, 5];
|
let mut xs = [1, 2, 3, 4, 5];
|
||||||
|
@ -1328,89 +1345,104 @@ mod bench {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[bench]
|
fn gen_ascending(len: usize) -> Vec<u64> {
|
||||||
fn sort_random_small(b: &mut Bencher) {
|
(0..len as u64).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn gen_descending(len: usize) -> Vec<u64> {
|
||||||
|
(0..len as u64).rev().collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn gen_random(len: usize) -> Vec<u64> {
|
||||||
let mut rng = thread_rng();
|
let mut rng = thread_rng();
|
||||||
b.iter(|| {
|
rng.gen_iter::<u64>().take(len).collect()
|
||||||
let mut v: Vec<_> = rng.gen_iter::<u64>().take(5).collect();
|
|
||||||
v.sort();
|
|
||||||
});
|
|
||||||
b.bytes = 5 * mem::size_of::<u64>() as u64;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[bench]
|
fn gen_mostly_ascending(len: usize) -> Vec<u64> {
|
||||||
fn sort_random_medium(b: &mut Bencher) {
|
|
||||||
let mut rng = thread_rng();
|
let mut rng = thread_rng();
|
||||||
b.iter(|| {
|
let mut v = gen_ascending(len);
|
||||||
let mut v: Vec<_> = rng.gen_iter::<u64>().take(100).collect();
|
for _ in (0usize..).take_while(|x| x * x <= len) {
|
||||||
v.sort();
|
let x = rng.gen::<usize>() % len;
|
||||||
});
|
let y = rng.gen::<usize>() % len;
|
||||||
b.bytes = 100 * mem::size_of::<u64>() as u64;
|
v.swap(x, y);
|
||||||
|
}
|
||||||
|
v
|
||||||
}
|
}
|
||||||
|
|
||||||
#[bench]
|
fn gen_mostly_descending(len: usize) -> Vec<u64> {
|
||||||
fn sort_random_large(b: &mut Bencher) {
|
|
||||||
let mut rng = thread_rng();
|
let mut rng = thread_rng();
|
||||||
b.iter(|| {
|
let mut v = gen_descending(len);
|
||||||
let mut v: Vec<_> = rng.gen_iter::<u64>().take(10000).collect();
|
for _ in (0usize..).take_while(|x| x * x <= len) {
|
||||||
v.sort();
|
let x = rng.gen::<usize>() % len;
|
||||||
});
|
let y = rng.gen::<usize>() % len;
|
||||||
b.bytes = 10000 * mem::size_of::<u64>() as u64;
|
v.swap(x, y);
|
||||||
|
}
|
||||||
|
v
|
||||||
}
|
}
|
||||||
|
|
||||||
#[bench]
|
fn gen_big_random(len: usize) -> Vec<[u64; 16]> {
|
||||||
fn sort_sorted(b: &mut Bencher) {
|
|
||||||
let mut v: Vec<_> = (0..10000).collect();
|
|
||||||
b.iter(|| {
|
|
||||||
v.sort();
|
|
||||||
});
|
|
||||||
b.bytes = (v.len() * mem::size_of_val(&v[0])) as u64;
|
|
||||||
}
|
|
||||||
|
|
||||||
type BigSortable = (u64, u64, u64, u64);
|
|
||||||
|
|
||||||
#[bench]
|
|
||||||
fn sort_big_random_small(b: &mut Bencher) {
|
|
||||||
let mut rng = thread_rng();
|
let mut rng = thread_rng();
|
||||||
b.iter(|| {
|
rng.gen_iter().map(|x| [x; 16]).take(len).collect()
|
||||||
let mut v = rng.gen_iter::<BigSortable>()
|
|
||||||
.take(5)
|
|
||||||
.collect::<Vec<BigSortable>>();
|
|
||||||
v.sort();
|
|
||||||
});
|
|
||||||
b.bytes = 5 * mem::size_of::<BigSortable>() as u64;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[bench]
|
fn gen_big_ascending(len: usize) -> Vec<[u64; 16]> {
|
||||||
fn sort_big_random_medium(b: &mut Bencher) {
|
(0..len as u64).map(|x| [x; 16]).take(len).collect()
|
||||||
let mut rng = thread_rng();
|
|
||||||
b.iter(|| {
|
|
||||||
let mut v = rng.gen_iter::<BigSortable>()
|
|
||||||
.take(100)
|
|
||||||
.collect::<Vec<BigSortable>>();
|
|
||||||
v.sort();
|
|
||||||
});
|
|
||||||
b.bytes = 100 * mem::size_of::<BigSortable>() as u64;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[bench]
|
fn gen_big_descending(len: usize) -> Vec<[u64; 16]> {
|
||||||
fn sort_big_random_large(b: &mut Bencher) {
|
(0..len as u64).rev().map(|x| [x; 16]).take(len).collect()
|
||||||
let mut rng = thread_rng();
|
|
||||||
b.iter(|| {
|
|
||||||
let mut v = rng.gen_iter::<BigSortable>()
|
|
||||||
.take(10000)
|
|
||||||
.collect::<Vec<BigSortable>>();
|
|
||||||
v.sort();
|
|
||||||
});
|
|
||||||
b.bytes = 10000 * mem::size_of::<BigSortable>() as u64;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
macro_rules! sort_bench {
|
||||||
|
($name:ident, $gen:expr, $len:expr) => {
|
||||||
#[bench]
|
#[bench]
|
||||||
fn sort_big_sorted(b: &mut Bencher) {
|
fn $name(b: &mut Bencher) {
|
||||||
let mut v: Vec<BigSortable> = (0..10000).map(|i| (i, i, i, i)).collect();
|
b.iter(|| $gen($len).sort());
|
||||||
|
b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sort_bench!(sort_small_random, gen_random, 10);
|
||||||
|
sort_bench!(sort_small_ascending, gen_ascending, 10);
|
||||||
|
sort_bench!(sort_small_descending, gen_descending, 10);
|
||||||
|
|
||||||
|
sort_bench!(sort_small_big_random, gen_big_random, 10);
|
||||||
|
sort_bench!(sort_small_big_ascending, gen_big_ascending, 10);
|
||||||
|
sort_bench!(sort_small_big_descending, gen_big_descending, 10);
|
||||||
|
|
||||||
|
sort_bench!(sort_medium_random, gen_random, 100);
|
||||||
|
sort_bench!(sort_medium_ascending, gen_ascending, 100);
|
||||||
|
sort_bench!(sort_medium_descending, gen_descending, 100);
|
||||||
|
|
||||||
|
sort_bench!(sort_large_random, gen_random, 10000);
|
||||||
|
sort_bench!(sort_large_ascending, gen_ascending, 10000);
|
||||||
|
sort_bench!(sort_large_descending, gen_descending, 10000);
|
||||||
|
sort_bench!(sort_large_mostly_ascending, gen_mostly_ascending, 10000);
|
||||||
|
sort_bench!(sort_large_mostly_descending, gen_mostly_descending, 10000);
|
||||||
|
|
||||||
|
sort_bench!(sort_large_big_random, gen_big_random, 10000);
|
||||||
|
sort_bench!(sort_large_big_ascending, gen_big_ascending, 10000);
|
||||||
|
sort_bench!(sort_large_big_descending, gen_big_descending, 10000);
|
||||||
|
|
||||||
|
#[bench]
|
||||||
|
fn sort_large_random_expensive(b: &mut Bencher) {
|
||||||
|
let len = 10000;
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
v.sort();
|
let mut count = 0;
|
||||||
|
let cmp = move |a: &u64, b: &u64| {
|
||||||
|
count += 1;
|
||||||
|
if count % 1_000_000_000 == 0 {
|
||||||
|
panic!("should not happen");
|
||||||
|
}
|
||||||
|
(*a as f64).cos().partial_cmp(&(*b as f64).cos()).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut v = gen_random(len);
|
||||||
|
v.sort_by(cmp);
|
||||||
|
|
||||||
|
black_box(count);
|
||||||
});
|
});
|
||||||
b.bytes = (v.len() * mem::size_of_val(&v[0])) as u64;
|
b.bytes = len as u64 * mem::size_of::<u64>() as u64;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -530,7 +530,7 @@ fn from_utf8_mostly_ascii() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_is_utf16() {
|
fn test_is_utf16() {
|
||||||
use rustc_unicode::str::is_utf16;
|
use std_unicode::str::is_utf16;
|
||||||
|
|
||||||
macro_rules! pos {
|
macro_rules! pos {
|
||||||
($($e:expr),*) => { { $(assert!(is_utf16($e));)* } }
|
($($e:expr),*) => { { $(assert!(is_utf16($e));)* } }
|
||||||
|
@ -1186,7 +1186,7 @@ fn test_rev_split_char_iterator_no_trailing() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_utf16_code_units() {
|
fn test_utf16_code_units() {
|
||||||
use rustc_unicode::str::Utf16Encoder;
|
use std_unicode::str::Utf16Encoder;
|
||||||
assert_eq!(Utf16Encoder::new(vec!['é', '\u{1F4A9}'].into_iter()).collect::<Vec<u16>>(),
|
assert_eq!(Utf16Encoder::new(vec!['é', '\u{1F4A9}'].into_iter()).collect::<Vec<u16>>(),
|
||||||
[0xE9, 0xD83D, 0xDCA9])
|
[0xE9, 0xD83D, 0xDCA9])
|
||||||
}
|
}
|
||||||
|
|
|
@ -132,7 +132,7 @@ fn test_from_utf16() {
|
||||||
let s_as_utf16 = s.encode_utf16().collect::<Vec<u16>>();
|
let s_as_utf16 = s.encode_utf16().collect::<Vec<u16>>();
|
||||||
let u_as_string = String::from_utf16(&u).unwrap();
|
let u_as_string = String::from_utf16(&u).unwrap();
|
||||||
|
|
||||||
assert!(::rustc_unicode::str::is_utf16(&u));
|
assert!(::std_unicode::str::is_utf16(&u));
|
||||||
assert_eq!(s_as_utf16, u);
|
assert_eq!(s_as_utf16, u);
|
||||||
|
|
||||||
assert_eq!(u_as_string, s);
|
assert_eq!(u_as_string, s);
|
||||||
|
@ -231,6 +231,45 @@ fn test_pop() {
|
||||||
assert_eq!(data, "ประเทศไทย中");
|
assert_eq!(data, "ประเทศไทย中");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_split_off_empty() {
|
||||||
|
let orig = "Hello, world!";
|
||||||
|
let mut split = String::from(orig);
|
||||||
|
let empty: String = split.split_off(orig.len());
|
||||||
|
assert!(empty.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic]
|
||||||
|
fn test_split_off_past_end() {
|
||||||
|
let orig = "Hello, world!";
|
||||||
|
let mut split = String::from(orig);
|
||||||
|
split.split_off(orig.len() + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic]
|
||||||
|
fn test_split_off_mid_char() {
|
||||||
|
let mut orig = String::from("山");
|
||||||
|
orig.split_off(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_split_off_ascii() {
|
||||||
|
let mut ab = String::from("ABCD");
|
||||||
|
let cd = ab.split_off(2);
|
||||||
|
assert_eq!(ab, "AB");
|
||||||
|
assert_eq!(cd, "CD");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_split_off_unicode() {
|
||||||
|
let mut nihon = String::from("日本語");
|
||||||
|
let go = nihon.split_off("日本".len());
|
||||||
|
assert_eq!(nihon, "日本");
|
||||||
|
assert_eq!(go, "語");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_str_truncate() {
|
fn test_str_truncate() {
|
||||||
let mut s = String::from("12345");
|
let mut s = String::from("12345");
|
||||||
|
|
|
@ -1007,3 +1007,24 @@ fn assert_covariance() {
|
||||||
d
|
d
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_is_empty() {
|
||||||
|
let mut v = VecDeque::<i32>::new();
|
||||||
|
assert!(v.is_empty());
|
||||||
|
assert!(v.iter().is_empty());
|
||||||
|
assert!(v.iter_mut().is_empty());
|
||||||
|
v.extend(&[2, 3, 4]);
|
||||||
|
assert!(!v.is_empty());
|
||||||
|
assert!(!v.iter().is_empty());
|
||||||
|
assert!(!v.iter_mut().is_empty());
|
||||||
|
while let Some(_) = v.pop_front() {
|
||||||
|
assert_eq!(v.is_empty(), v.len() == 0);
|
||||||
|
assert_eq!(v.iter().is_empty(), v.iter().len() == 0);
|
||||||
|
assert_eq!(v.iter_mut().is_empty(), v.iter_mut().len() == 0);
|
||||||
|
}
|
||||||
|
assert!(v.is_empty());
|
||||||
|
assert!(v.iter().is_empty());
|
||||||
|
assert!(v.iter_mut().is_empty());
|
||||||
|
assert!(v.into_iter().is_empty());
|
||||||
|
}
|
||||||
|
|
|
@ -8,6 +8,7 @@ version = "0.0.0"
|
||||||
name = "compiler_builtins"
|
name = "compiler_builtins"
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
test = false
|
test = false
|
||||||
|
bench = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
core = { path = "../libcore" }
|
core = { path = "../libcore" }
|
||||||
|
|
|
@ -94,6 +94,7 @@ fn main() {
|
||||||
cfg.flag("-fvisibility=hidden");
|
cfg.flag("-fvisibility=hidden");
|
||||||
cfg.flag("-fomit-frame-pointer");
|
cfg.flag("-fomit-frame-pointer");
|
||||||
cfg.flag("-ffreestanding");
|
cfg.flag("-ffreestanding");
|
||||||
|
cfg.define("VISIBILITY_HIDDEN", None);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut sources = Sources::new();
|
let mut sources = Sources::new();
|
||||||
|
|
|
@ -7,7 +7,12 @@ version = "0.0.0"
|
||||||
name = "core"
|
name = "core"
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
test = false
|
test = false
|
||||||
|
bench = false
|
||||||
|
|
||||||
[[test]]
|
[[test]]
|
||||||
name = "coretest"
|
name = "coretest"
|
||||||
path = "../libcoretest/lib.rs"
|
path = "../libcoretest/lib.rs"
|
||||||
|
|
||||||
|
[[bench]]
|
||||||
|
name = "coretest"
|
||||||
|
path = "../libcoretest/lib.rs"
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
|
|
||||||
//! Character manipulation.
|
//! Character manipulation.
|
||||||
//!
|
//!
|
||||||
//! For more details, see ::rustc_unicode::char (a.k.a. std::char)
|
//! For more details, see ::std_unicode::char (a.k.a. std::char)
|
||||||
|
|
||||||
#![allow(non_snake_case)]
|
#![allow(non_snake_case)]
|
||||||
#![stable(feature = "core_char", since = "1.2.0")]
|
#![stable(feature = "core_char", since = "1.2.0")]
|
||||||
|
@ -238,7 +238,7 @@ impl fmt::Display for CharTryFromError {
|
||||||
/// A 'radix' here is sometimes also called a 'base'. A radix of two
|
/// A 'radix' here is sometimes also called a 'base'. A radix of two
|
||||||
/// indicates a binary number, a radix of ten, decimal, and a radix of
|
/// indicates a binary number, a radix of ten, decimal, and a radix of
|
||||||
/// sixteen, hexadecimal, to give some common values. Arbitrary
|
/// sixteen, hexadecimal, to give some common values. Arbitrary
|
||||||
/// radicum are supported.
|
/// radices are supported.
|
||||||
///
|
///
|
||||||
/// `from_digit()` will return `None` if the input is not a digit in
|
/// `from_digit()` will return `None` if the input is not a digit in
|
||||||
/// the given radix.
|
/// the given radix.
|
||||||
|
|
|
@ -166,7 +166,9 @@ pub struct Formatter<'a> {
|
||||||
// NB. Argument is essentially an optimized partially applied formatting function,
|
// NB. Argument is essentially an optimized partially applied formatting function,
|
||||||
// equivalent to `exists T.(&T, fn(&T, &mut Formatter) -> Result`.
|
// equivalent to `exists T.(&T, fn(&T, &mut Formatter) -> Result`.
|
||||||
|
|
||||||
enum Void {}
|
struct Void {
|
||||||
|
_priv: (),
|
||||||
|
}
|
||||||
|
|
||||||
/// This struct represents the generic "argument" which is taken by the Xprintf
|
/// This struct represents the generic "argument" which is taken by the Xprintf
|
||||||
/// family of functions. It contains a function to format the given value. At
|
/// family of functions. It contains a function to format the given value. At
|
||||||
|
|
|
@ -247,7 +247,7 @@ pub trait Iterator {
|
||||||
/// ```
|
/// ```
|
||||||
#[inline]
|
#[inline]
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
fn nth(&mut self, mut n: usize) -> Option<Self::Item> where Self: Sized {
|
fn nth(&mut self, mut n: usize) -> Option<Self::Item> {
|
||||||
for x in self {
|
for x in self {
|
||||||
if n == 0 { return Some(x) }
|
if n == 0 { return Some(x) }
|
||||||
n -= 1;
|
n -= 1;
|
||||||
|
@ -2179,4 +2179,7 @@ impl<'a, I: Iterator + ?Sized> Iterator for &'a mut I {
|
||||||
type Item = I::Item;
|
type Item = I::Item;
|
||||||
fn next(&mut self) -> Option<I::Item> { (**self).next() }
|
fn next(&mut self) -> Option<I::Item> { (**self).next() }
|
||||||
fn size_hint(&self) -> (usize, Option<usize>) { (**self).size_hint() }
|
fn size_hint(&self) -> (usize, Option<usize>) { (**self).size_hint() }
|
||||||
|
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||||
|
(**self).nth(n)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -225,12 +225,12 @@
|
||||||
//! often called 'iterator adapters', as they're a form of the 'adapter
|
//! often called 'iterator adapters', as they're a form of the 'adapter
|
||||||
//! pattern'.
|
//! pattern'.
|
||||||
//!
|
//!
|
||||||
//! Common iterator adapters include [`map()`], [`take()`], and [`collect()`].
|
//! Common iterator adapters include [`map()`], [`take()`], and [`filter()`].
|
||||||
//! For more, see their documentation.
|
//! For more, see their documentation.
|
||||||
//!
|
//!
|
||||||
//! [`map()`]: trait.Iterator.html#method.map
|
//! [`map()`]: trait.Iterator.html#method.map
|
||||||
//! [`take()`]: trait.Iterator.html#method.take
|
//! [`take()`]: trait.Iterator.html#method.take
|
||||||
//! [`collect()`]: trait.Iterator.html#method.collect
|
//! [`filter()`]: trait.Iterator.html#method.filter
|
||||||
//!
|
//!
|
||||||
//! # Laziness
|
//! # Laziness
|
||||||
//!
|
//!
|
||||||
|
@ -268,7 +268,7 @@
|
||||||
//! [`map()`]: trait.Iterator.html#method.map
|
//! [`map()`]: trait.Iterator.html#method.map
|
||||||
//!
|
//!
|
||||||
//! The two most common ways to evaluate an iterator are to use a `for` loop
|
//! The two most common ways to evaluate an iterator are to use a `for` loop
|
||||||
//! like this, or using the [`collect()`] adapter to produce a new collection.
|
//! like this, or using the [`collect()`] method to produce a new collection.
|
||||||
//!
|
//!
|
||||||
//! [`collect()`]: trait.Iterator.html#method.collect
|
//! [`collect()`]: trait.Iterator.html#method.collect
|
||||||
//!
|
//!
|
||||||
|
@ -368,7 +368,16 @@ impl<I> DoubleEndedIterator for Rev<I> where I: DoubleEndedIterator {
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
impl<I> ExactSizeIterator for Rev<I>
|
impl<I> ExactSizeIterator for Rev<I>
|
||||||
where I: ExactSizeIterator + DoubleEndedIterator {}
|
where I: ExactSizeIterator + DoubleEndedIterator
|
||||||
|
{
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
self.iter.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.iter.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[unstable(feature = "fused", issue = "35602")]
|
#[unstable(feature = "fused", issue = "35602")]
|
||||||
impl<I> FusedIterator for Rev<I>
|
impl<I> FusedIterator for Rev<I>
|
||||||
|
@ -425,7 +434,15 @@ impl<'a, I, T: 'a> DoubleEndedIterator for Cloned<I>
|
||||||
#[stable(feature = "iter_cloned", since = "1.1.0")]
|
#[stable(feature = "iter_cloned", since = "1.1.0")]
|
||||||
impl<'a, I, T: 'a> ExactSizeIterator for Cloned<I>
|
impl<'a, I, T: 'a> ExactSizeIterator for Cloned<I>
|
||||||
where I: ExactSizeIterator<Item=&'a T>, T: Clone
|
where I: ExactSizeIterator<Item=&'a T>, T: Clone
|
||||||
{}
|
{
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
self.it.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.it.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[unstable(feature = "fused", issue = "35602")]
|
#[unstable(feature = "fused", issue = "35602")]
|
||||||
impl<'a, I, T: 'a> FusedIterator for Cloned<I>
|
impl<'a, I, T: 'a> FusedIterator for Cloned<I>
|
||||||
|
@ -920,7 +937,7 @@ unsafe impl<A, B> TrustedLen for Zip<A, B>
|
||||||
/// you can also [`map()`] backwards:
|
/// you can also [`map()`] backwards:
|
||||||
///
|
///
|
||||||
/// ```rust
|
/// ```rust
|
||||||
/// let v: Vec<i32> = vec![1, 2, 3].into_iter().rev().map(|x| x + 1).collect();
|
/// let v: Vec<i32> = vec![1, 2, 3].into_iter().map(|x| x + 1).rev().collect();
|
||||||
///
|
///
|
||||||
/// assert_eq!(v, [4, 3, 2]);
|
/// assert_eq!(v, [4, 3, 2]);
|
||||||
/// ```
|
/// ```
|
||||||
|
@ -1007,7 +1024,16 @@ impl<B, I: DoubleEndedIterator, F> DoubleEndedIterator for Map<I, F> where
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
impl<B, I: ExactSizeIterator, F> ExactSizeIterator for Map<I, F>
|
impl<B, I: ExactSizeIterator, F> ExactSizeIterator for Map<I, F>
|
||||||
where F: FnMut(I::Item) -> B {}
|
where F: FnMut(I::Item) -> B
|
||||||
|
{
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
self.iter.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.iter.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[unstable(feature = "fused", issue = "35602")]
|
#[unstable(feature = "fused", issue = "35602")]
|
||||||
impl<B, I: FusedIterator, F> FusedIterator for Map<I, F>
|
impl<B, I: FusedIterator, F> FusedIterator for Map<I, F>
|
||||||
|
@ -1236,7 +1262,15 @@ impl<I> DoubleEndedIterator for Enumerate<I> where
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
impl<I> ExactSizeIterator for Enumerate<I> where I: ExactSizeIterator {}
|
impl<I> ExactSizeIterator for Enumerate<I> where I: ExactSizeIterator {
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
self.iter.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.iter.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
unsafe impl<I> TrustedRandomAccess for Enumerate<I>
|
unsafe impl<I> TrustedRandomAccess for Enumerate<I>
|
||||||
|
@ -1945,7 +1979,15 @@ impl<I> DoubleEndedIterator for Fuse<I>
|
||||||
|
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
impl<I> ExactSizeIterator for Fuse<I> where I: ExactSizeIterator {}
|
impl<I> ExactSizeIterator for Fuse<I> where I: ExactSizeIterator {
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
self.iter.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.iter.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// An iterator that calls a function with a reference to each element before
|
/// An iterator that calls a function with a reference to each element before
|
||||||
/// yielding it.
|
/// yielding it.
|
||||||
|
@ -2012,7 +2054,16 @@ impl<I: DoubleEndedIterator, F> DoubleEndedIterator for Inspect<I, F>
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
impl<I: ExactSizeIterator, F> ExactSizeIterator for Inspect<I, F>
|
impl<I: ExactSizeIterator, F> ExactSizeIterator for Inspect<I, F>
|
||||||
where F: FnMut(&I::Item) {}
|
where F: FnMut(&I::Item)
|
||||||
|
{
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
self.iter.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.iter.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[unstable(feature = "fused", issue = "35602")]
|
#[unstable(feature = "fused", issue = "35602")]
|
||||||
impl<I: FusedIterator, F> FusedIterator for Inspect<I, F>
|
impl<I: FusedIterator, F> FusedIterator for Inspect<I, F>
|
||||||
|
|
|
@ -552,7 +552,14 @@ pub trait ExactSizeIterator: Iterator {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
impl<'a, I: ExactSizeIterator + ?Sized> ExactSizeIterator for &'a mut I {}
|
impl<'a, I: ExactSizeIterator + ?Sized> ExactSizeIterator for &'a mut I {
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
(**self).len()
|
||||||
|
}
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
(**self).is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Trait to represent types that can be created by summing up an iterator.
|
/// Trait to represent types that can be created by summing up an iterator.
|
||||||
///
|
///
|
||||||
|
|
|
@ -89,7 +89,6 @@
|
||||||
#![feature(specialization)]
|
#![feature(specialization)]
|
||||||
#![feature(staged_api)]
|
#![feature(staged_api)]
|
||||||
#![feature(unboxed_closures)]
|
#![feature(unboxed_closures)]
|
||||||
#![cfg_attr(stage0, feature(question_mark))]
|
|
||||||
#![feature(never_type)]
|
#![feature(never_type)]
|
||||||
#![feature(prelude_import)]
|
#![feature(prelude_import)]
|
||||||
|
|
||||||
|
|
|
@ -659,6 +659,16 @@ impl<T> Option<T> {
|
||||||
impl<'a, T: Clone> Option<&'a T> {
|
impl<'a, T: Clone> Option<&'a T> {
|
||||||
/// Maps an `Option<&T>` to an `Option<T>` by cloning the contents of the
|
/// Maps an `Option<&T>` to an `Option<T>` by cloning the contents of the
|
||||||
/// option.
|
/// option.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// let x = 12;
|
||||||
|
/// let opt_x = Some(&x);
|
||||||
|
/// assert_eq!(opt_x, Some(&12));
|
||||||
|
/// let cloned = opt_x.cloned();
|
||||||
|
/// assert_eq!(cloned, Some(12));
|
||||||
|
/// ```
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
pub fn cloned(self) -> Option<T> {
|
pub fn cloned(self) -> Option<T> {
|
||||||
self.map(|t| t.clone())
|
self.map(|t| t.clone())
|
||||||
|
|
|
@ -38,10 +38,14 @@ use cmp;
|
||||||
use fmt;
|
use fmt;
|
||||||
use intrinsics::assume;
|
use intrinsics::assume;
|
||||||
use iter::*;
|
use iter::*;
|
||||||
use ops::{self, RangeFull};
|
use ops::{FnMut, self};
|
||||||
|
use option::Option;
|
||||||
|
use option::Option::{None, Some};
|
||||||
|
use result::Result;
|
||||||
|
use result::Result::{Ok, Err};
|
||||||
use ptr;
|
use ptr;
|
||||||
use mem;
|
use mem;
|
||||||
use marker;
|
use marker::{Copy, Send, Sync, Sized, self};
|
||||||
use iter_private::TrustedRandomAccess;
|
use iter_private::TrustedRandomAccess;
|
||||||
|
|
||||||
#[repr(C)]
|
#[repr(C)]
|
||||||
|
@ -80,7 +84,8 @@ pub trait SliceExt {
|
||||||
#[stable(feature = "core", since = "1.6.0")]
|
#[stable(feature = "core", since = "1.6.0")]
|
||||||
fn chunks(&self, size: usize) -> Chunks<Self::Item>;
|
fn chunks(&self, size: usize) -> Chunks<Self::Item>;
|
||||||
#[stable(feature = "core", since = "1.6.0")]
|
#[stable(feature = "core", since = "1.6.0")]
|
||||||
fn get(&self, index: usize) -> Option<&Self::Item>;
|
fn get<I>(&self, index: I) -> Option<&I::Output>
|
||||||
|
where I: SliceIndex<Self::Item>;
|
||||||
#[stable(feature = "core", since = "1.6.0")]
|
#[stable(feature = "core", since = "1.6.0")]
|
||||||
fn first(&self) -> Option<&Self::Item>;
|
fn first(&self) -> Option<&Self::Item>;
|
||||||
#[stable(feature = "core", since = "1.6.0")]
|
#[stable(feature = "core", since = "1.6.0")]
|
||||||
|
@ -90,7 +95,8 @@ pub trait SliceExt {
|
||||||
#[stable(feature = "core", since = "1.6.0")]
|
#[stable(feature = "core", since = "1.6.0")]
|
||||||
fn last(&self) -> Option<&Self::Item>;
|
fn last(&self) -> Option<&Self::Item>;
|
||||||
#[stable(feature = "core", since = "1.6.0")]
|
#[stable(feature = "core", since = "1.6.0")]
|
||||||
unsafe fn get_unchecked(&self, index: usize) -> &Self::Item;
|
unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
|
||||||
|
where I: SliceIndex<Self::Item>;
|
||||||
#[stable(feature = "core", since = "1.6.0")]
|
#[stable(feature = "core", since = "1.6.0")]
|
||||||
fn as_ptr(&self) -> *const Self::Item;
|
fn as_ptr(&self) -> *const Self::Item;
|
||||||
#[stable(feature = "core", since = "1.6.0")]
|
#[stable(feature = "core", since = "1.6.0")]
|
||||||
|
@ -108,7 +114,8 @@ pub trait SliceExt {
|
||||||
#[stable(feature = "core", since = "1.6.0")]
|
#[stable(feature = "core", since = "1.6.0")]
|
||||||
fn is_empty(&self) -> bool { self.len() == 0 }
|
fn is_empty(&self) -> bool { self.len() == 0 }
|
||||||
#[stable(feature = "core", since = "1.6.0")]
|
#[stable(feature = "core", since = "1.6.0")]
|
||||||
fn get_mut(&mut self, index: usize) -> Option<&mut Self::Item>;
|
fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
|
||||||
|
where I: SliceIndex<Self::Item>;
|
||||||
#[stable(feature = "core", since = "1.6.0")]
|
#[stable(feature = "core", since = "1.6.0")]
|
||||||
fn iter_mut(&mut self) -> IterMut<Self::Item>;
|
fn iter_mut(&mut self) -> IterMut<Self::Item>;
|
||||||
#[stable(feature = "core", since = "1.6.0")]
|
#[stable(feature = "core", since = "1.6.0")]
|
||||||
|
@ -137,7 +144,8 @@ pub trait SliceExt {
|
||||||
#[stable(feature = "core", since = "1.6.0")]
|
#[stable(feature = "core", since = "1.6.0")]
|
||||||
fn reverse(&mut self);
|
fn reverse(&mut self);
|
||||||
#[stable(feature = "core", since = "1.6.0")]
|
#[stable(feature = "core", since = "1.6.0")]
|
||||||
unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut Self::Item;
|
unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
|
||||||
|
where I: SliceIndex<Self::Item>;
|
||||||
#[stable(feature = "core", since = "1.6.0")]
|
#[stable(feature = "core", since = "1.6.0")]
|
||||||
fn as_mut_ptr(&mut self) -> *mut Self::Item;
|
fn as_mut_ptr(&mut self) -> *mut Self::Item;
|
||||||
|
|
||||||
|
@ -258,8 +266,10 @@ impl<T> SliceExt for [T] {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn get(&self, index: usize) -> Option<&T> {
|
fn get<I>(&self, index: I) -> Option<&I::Output>
|
||||||
if index < self.len() { Some(&self[index]) } else { None }
|
where I: SliceIndex<T>
|
||||||
|
{
|
||||||
|
index.get(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -284,8 +294,10 @@ impl<T> SliceExt for [T] {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
unsafe fn get_unchecked(&self, index: usize) -> &T {
|
unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
|
||||||
&*(self.as_ptr().offset(index as isize))
|
where I: SliceIndex<T>
|
||||||
|
{
|
||||||
|
index.get_unchecked(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -323,8 +335,10 @@ impl<T> SliceExt for [T] {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn get_mut(&mut self, index: usize) -> Option<&mut T> {
|
fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
|
||||||
if index < self.len() { Some(&mut self[index]) } else { None }
|
where I: SliceIndex<T>
|
||||||
|
{
|
||||||
|
index.get_mut(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -451,8 +465,10 @@ impl<T> SliceExt for [T] {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut T {
|
unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
|
||||||
&mut *self.as_mut_ptr().offset(index as isize)
|
where I: SliceIndex<T>
|
||||||
|
{
|
||||||
|
index.get_unchecked_mut(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -515,23 +531,26 @@ impl<T> SliceExt for [T] {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
|
#[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
|
||||||
impl<T> ops::Index<usize> for [T] {
|
impl<T, I> ops::Index<I> for [T]
|
||||||
type Output = T;
|
where I: SliceIndex<T>
|
||||||
|
{
|
||||||
|
type Output = I::Output;
|
||||||
|
|
||||||
fn index(&self, index: usize) -> &T {
|
#[inline]
|
||||||
// NB built-in indexing
|
fn index(&self, index: I) -> &I::Output {
|
||||||
&(*self)[index]
|
index.index(self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
|
#[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
|
||||||
impl<T> ops::IndexMut<usize> for [T] {
|
impl<T, I> ops::IndexMut<I> for [T]
|
||||||
|
where I: SliceIndex<T>
|
||||||
|
{
|
||||||
#[inline]
|
#[inline]
|
||||||
fn index_mut(&mut self, index: usize) -> &mut T {
|
fn index_mut(&mut self, index: I) -> &mut I::Output {
|
||||||
// NB built-in indexing
|
index.index_mut(self)
|
||||||
&mut (*self)[index]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -547,205 +566,349 @@ fn slice_index_order_fail(index: usize, end: usize) -> ! {
|
||||||
panic!("slice index starts at {} but ends at {}", index, end);
|
panic!("slice index starts at {} but ends at {}", index, end);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A helper trait used for indexing operations.
|
||||||
|
#[unstable(feature = "slice_get_slice", issue = "35729")]
|
||||||
|
#[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
|
||||||
|
pub trait SliceIndex<T> {
|
||||||
|
/// The output type returned by methods.
|
||||||
|
type Output: ?Sized;
|
||||||
|
|
||||||
/// Implements slicing with syntax `&self[begin .. end]`.
|
/// Returns a shared reference to the output at this location, if in
|
||||||
///
|
/// bounds.
|
||||||
/// Returns a slice of self for the index range [`begin`..`end`).
|
fn get(self, slice: &[T]) -> Option<&Self::Output>;
|
||||||
///
|
|
||||||
/// This operation is `O(1)`.
|
/// Returns a mutable reference to the output at this location, if in
|
||||||
///
|
/// bounds.
|
||||||
/// # Panics
|
fn get_mut(self, slice: &mut [T]) -> Option<&mut Self::Output>;
|
||||||
///
|
|
||||||
/// Requires that `begin <= end` and `end <= self.len()`,
|
/// Returns a shared reference to the output at this location, without
|
||||||
/// otherwise slicing will panic.
|
/// performing any bounds checking.
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
unsafe fn get_unchecked(self, slice: &[T]) -> &Self::Output;
|
||||||
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
|
|
||||||
impl<T> ops::Index<ops::Range<usize>> for [T] {
|
/// Returns a mutable reference to the output at this location, without
|
||||||
|
/// performing any bounds checking.
|
||||||
|
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut Self::Output;
|
||||||
|
|
||||||
|
/// Returns a shared reference to the output at this location, panicking
|
||||||
|
/// if out of bounds.
|
||||||
|
fn index(self, slice: &[T]) -> &Self::Output;
|
||||||
|
|
||||||
|
/// Returns a mutable reference to the output at this location, panicking
|
||||||
|
/// if out of bounds.
|
||||||
|
fn index_mut(self, slice: &mut [T]) -> &mut Self::Output;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[stable(feature = "slice-get-slice-impls", since = "1.13.0")]
|
||||||
|
impl<T> SliceIndex<T> for usize {
|
||||||
|
type Output = T;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn get(self, slice: &[T]) -> Option<&T> {
|
||||||
|
if self < slice.len() {
|
||||||
|
unsafe {
|
||||||
|
Some(self.get_unchecked(slice))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn get_mut(self, slice: &mut [T]) -> Option<&mut T> {
|
||||||
|
if self < slice.len() {
|
||||||
|
unsafe {
|
||||||
|
Some(self.get_unchecked_mut(slice))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
unsafe fn get_unchecked(self, slice: &[T]) -> &T {
|
||||||
|
&*slice.as_ptr().offset(self as isize)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut T {
|
||||||
|
&mut *slice.as_mut_ptr().offset(self as isize)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn index(self, slice: &[T]) -> &T {
|
||||||
|
// NB: use intrinsic indexing
|
||||||
|
&(*slice)[self]
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn index_mut(self, slice: &mut [T]) -> &mut T {
|
||||||
|
// NB: use intrinsic indexing
|
||||||
|
&mut (*slice)[self]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[stable(feature = "slice-get-slice-impls", since = "1.13.0")]
|
||||||
|
impl<T> SliceIndex<T> for ops::Range<usize> {
|
||||||
type Output = [T];
|
type Output = [T];
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn index(&self, index: ops::Range<usize>) -> &[T] {
|
fn get(self, slice: &[T]) -> Option<&[T]> {
|
||||||
if index.start > index.end {
|
if self.start > self.end || self.end > slice.len() {
|
||||||
slice_index_order_fail(index.start, index.end);
|
None
|
||||||
} else if index.end > self.len() {
|
} else {
|
||||||
slice_index_len_fail(index.end, self.len());
|
unsafe {
|
||||||
|
Some(self.get_unchecked(slice))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
|
||||||
|
if self.start > self.end || self.end > slice.len() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
unsafe {
|
||||||
|
Some(self.get_unchecked_mut(slice))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
|
||||||
|
from_raw_parts(slice.as_ptr().offset(self.start as isize), self.end - self.start)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
|
||||||
|
from_raw_parts_mut(slice.as_mut_ptr().offset(self.start as isize), self.end - self.start)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn index(self, slice: &[T]) -> &[T] {
|
||||||
|
if self.start > self.end {
|
||||||
|
slice_index_order_fail(self.start, self.end);
|
||||||
|
} else if self.end > slice.len() {
|
||||||
|
slice_index_len_fail(self.end, slice.len());
|
||||||
}
|
}
|
||||||
unsafe {
|
unsafe {
|
||||||
from_raw_parts (
|
self.get_unchecked(slice)
|
||||||
self.as_ptr().offset(index.start as isize),
|
}
|
||||||
index.end - index.start
|
}
|
||||||
)
|
|
||||||
|
#[inline]
|
||||||
|
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
|
||||||
|
if self.start > self.end {
|
||||||
|
slice_index_order_fail(self.start, self.end);
|
||||||
|
} else if self.end > slice.len() {
|
||||||
|
slice_index_len_fail(self.end, slice.len());
|
||||||
|
}
|
||||||
|
unsafe {
|
||||||
|
self.get_unchecked_mut(slice)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Implements slicing with syntax `&self[.. end]`.
|
#[stable(feature = "slice-get-slice-impls", since = "1.13.0")]
|
||||||
///
|
impl<T> SliceIndex<T> for ops::RangeTo<usize> {
|
||||||
/// Returns a slice of self from the beginning until but not including
|
|
||||||
/// the index `end`.
|
|
||||||
///
|
|
||||||
/// Equivalent to `&self[0 .. end]`
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
|
||||||
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
|
|
||||||
impl<T> ops::Index<ops::RangeTo<usize>> for [T] {
|
|
||||||
type Output = [T];
|
type Output = [T];
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn index(&self, index: ops::RangeTo<usize>) -> &[T] {
|
fn get(self, slice: &[T]) -> Option<&[T]> {
|
||||||
self.index(0 .. index.end)
|
(0..self.end).get(slice)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
|
||||||
|
(0..self.end).get_mut(slice)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
|
||||||
|
(0..self.end).get_unchecked(slice)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
|
||||||
|
(0..self.end).get_unchecked_mut(slice)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn index(self, slice: &[T]) -> &[T] {
|
||||||
|
(0..self.end).index(slice)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
|
||||||
|
(0..self.end).index_mut(slice)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Implements slicing with syntax `&self[begin ..]`.
|
#[stable(feature = "slice-get-slice-impls", since = "1.13.0")]
|
||||||
///
|
impl<T> SliceIndex<T> for ops::RangeFrom<usize> {
|
||||||
/// Returns a slice of self from and including the index `begin` until the end.
|
|
||||||
///
|
|
||||||
/// Equivalent to `&self[begin .. self.len()]`
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
|
||||||
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
|
|
||||||
impl<T> ops::Index<ops::RangeFrom<usize>> for [T] {
|
|
||||||
type Output = [T];
|
type Output = [T];
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn index(&self, index: ops::RangeFrom<usize>) -> &[T] {
|
fn get(self, slice: &[T]) -> Option<&[T]> {
|
||||||
self.index(index.start .. self.len())
|
(self.start..slice.len()).get(slice)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
|
||||||
|
(self.start..slice.len()).get_mut(slice)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
|
||||||
|
(self.start..slice.len()).get_unchecked(slice)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
|
||||||
|
(self.start..slice.len()).get_unchecked_mut(slice)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn index(self, slice: &[T]) -> &[T] {
|
||||||
|
(self.start..slice.len()).index(slice)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
|
||||||
|
(self.start..slice.len()).index_mut(slice)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Implements slicing with syntax `&self[..]`.
|
#[stable(feature = "slice-get-slice-impls", since = "1.13.0")]
|
||||||
///
|
impl<T> SliceIndex<T> for ops::RangeFull {
|
||||||
/// Returns a slice of the whole slice. This operation cannot panic.
|
|
||||||
///
|
|
||||||
/// Equivalent to `&self[0 .. self.len()]`
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
|
||||||
impl<T> ops::Index<RangeFull> for [T] {
|
|
||||||
type Output = [T];
|
type Output = [T];
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn index(&self, _index: RangeFull) -> &[T] {
|
fn get(self, slice: &[T]) -> Option<&[T]> {
|
||||||
self
|
Some(slice)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
|
||||||
|
Some(slice)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
|
||||||
|
slice
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
|
||||||
|
slice
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn index(self, slice: &[T]) -> &[T] {
|
||||||
|
slice
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
|
||||||
|
slice
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
|
|
||||||
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
|
#[stable(feature = "slice-get-slice-impls", since = "1.13.0")]
|
||||||
impl<T> ops::Index<ops::RangeInclusive<usize>> for [T] {
|
impl<T> SliceIndex<T> for ops::RangeInclusive<usize> {
|
||||||
type Output = [T];
|
type Output = [T];
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn index(&self, index: ops::RangeInclusive<usize>) -> &[T] {
|
fn get(self, slice: &[T]) -> Option<&[T]> {
|
||||||
match index {
|
match self {
|
||||||
|
ops::RangeInclusive::Empty { .. } => Some(&[]),
|
||||||
|
ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() => None,
|
||||||
|
ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).get(slice),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
|
||||||
|
match self {
|
||||||
|
ops::RangeInclusive::Empty { .. } => Some(&mut []),
|
||||||
|
ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() => None,
|
||||||
|
ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).get_mut(slice),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
|
||||||
|
match self {
|
||||||
ops::RangeInclusive::Empty { .. } => &[],
|
ops::RangeInclusive::Empty { .. } => &[],
|
||||||
ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() =>
|
ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).get_unchecked(slice),
|
||||||
panic!("attempted to index slice up to maximum usize"),
|
}
|
||||||
ops::RangeInclusive::NonEmpty { start, end } =>
|
}
|
||||||
self.index(start .. end+1)
|
|
||||||
|
#[inline]
|
||||||
|
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
|
||||||
|
match self {
|
||||||
|
ops::RangeInclusive::Empty { .. } => &mut [],
|
||||||
|
ops::RangeInclusive::NonEmpty { start, end } => {
|
||||||
|
(start..end + 1).get_unchecked_mut(slice)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
|
|
||||||
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
|
#[inline]
|
||||||
impl<T> ops::Index<ops::RangeToInclusive<usize>> for [T] {
|
fn index(self, slice: &[T]) -> &[T] {
|
||||||
|
match self {
|
||||||
|
ops::RangeInclusive::Empty { .. } => &[],
|
||||||
|
ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() => {
|
||||||
|
panic!("attempted to index slice up to maximum usize");
|
||||||
|
},
|
||||||
|
ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).index(slice),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
|
||||||
|
match self {
|
||||||
|
ops::RangeInclusive::Empty { .. } => &mut [],
|
||||||
|
ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() => {
|
||||||
|
panic!("attempted to index slice up to maximum usize");
|
||||||
|
},
|
||||||
|
ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).index_mut(slice),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[stable(feature = "slice-get-slice-impls", since = "1.13.0")]
|
||||||
|
impl<T> SliceIndex<T> for ops::RangeToInclusive<usize> {
|
||||||
type Output = [T];
|
type Output = [T];
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn index(&self, index: ops::RangeToInclusive<usize>) -> &[T] {
|
fn get(self, slice: &[T]) -> Option<&[T]> {
|
||||||
self.index(0...index.end)
|
(0...self.end).get(slice)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Implements mutable slicing with syntax `&mut self[begin .. end]`.
|
|
||||||
///
|
|
||||||
/// Returns a slice of self for the index range [`begin`..`end`).
|
|
||||||
///
|
|
||||||
/// This operation is `O(1)`.
|
|
||||||
///
|
|
||||||
/// # Panics
|
|
||||||
///
|
|
||||||
/// Requires that `begin <= end` and `end <= self.len()`,
|
|
||||||
/// otherwise slicing will panic.
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
|
||||||
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
|
|
||||||
impl<T> ops::IndexMut<ops::Range<usize>> for [T] {
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn index_mut(&mut self, index: ops::Range<usize>) -> &mut [T] {
|
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
|
||||||
if index.start > index.end {
|
(0...self.end).get_mut(slice)
|
||||||
slice_index_order_fail(index.start, index.end);
|
|
||||||
} else if index.end > self.len() {
|
|
||||||
slice_index_len_fail(index.end, self.len());
|
|
||||||
}
|
|
||||||
unsafe {
|
|
||||||
from_raw_parts_mut(
|
|
||||||
self.as_mut_ptr().offset(index.start as isize),
|
|
||||||
index.end - index.start
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Implements mutable slicing with syntax `&mut self[.. end]`.
|
|
||||||
///
|
|
||||||
/// Returns a slice of self from the beginning until but not including
|
|
||||||
/// the index `end`.
|
|
||||||
///
|
|
||||||
/// Equivalent to `&mut self[0 .. end]`
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
|
||||||
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
|
|
||||||
impl<T> ops::IndexMut<ops::RangeTo<usize>> for [T] {
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn index_mut(&mut self, index: ops::RangeTo<usize>) -> &mut [T] {
|
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
|
||||||
self.index_mut(0 .. index.end)
|
(0...self.end).get_unchecked(slice)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Implements mutable slicing with syntax `&mut self[begin ..]`.
|
|
||||||
///
|
|
||||||
/// Returns a slice of self from and including the index `begin` until the end.
|
|
||||||
///
|
|
||||||
/// Equivalent to `&mut self[begin .. self.len()]`
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
|
||||||
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
|
|
||||||
impl<T> ops::IndexMut<ops::RangeFrom<usize>> for [T] {
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn index_mut(&mut self, index: ops::RangeFrom<usize>) -> &mut [T] {
|
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
|
||||||
let len = self.len();
|
(0...self.end).get_unchecked_mut(slice)
|
||||||
self.index_mut(index.start .. len)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Implements mutable slicing with syntax `&mut self[..]`.
|
|
||||||
///
|
|
||||||
/// Returns a slice of the whole slice. This operation can not panic.
|
|
||||||
///
|
|
||||||
/// Equivalent to `&mut self[0 .. self.len()]`
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
|
||||||
impl<T> ops::IndexMut<RangeFull> for [T] {
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn index_mut(&mut self, _index: RangeFull) -> &mut [T] {
|
fn index(self, slice: &[T]) -> &[T] {
|
||||||
self
|
(0...self.end).index(slice)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
|
|
||||||
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
|
|
||||||
impl<T> ops::IndexMut<ops::RangeInclusive<usize>> for [T] {
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn index_mut(&mut self, index: ops::RangeInclusive<usize>) -> &mut [T] {
|
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
|
||||||
match index {
|
(0...self.end).index_mut(slice)
|
||||||
ops::RangeInclusive::Empty { .. } => &mut [],
|
|
||||||
ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() =>
|
|
||||||
panic!("attempted to index slice up to maximum usize"),
|
|
||||||
ops::RangeInclusive::NonEmpty { start, end } =>
|
|
||||||
self.index_mut(start .. end+1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
|
|
||||||
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
|
|
||||||
impl<T> ops::IndexMut<ops::RangeToInclusive<usize>> for [T] {
|
|
||||||
#[inline]
|
|
||||||
fn index_mut(&mut self, index: ops::RangeToInclusive<usize>) -> &mut [T] {
|
|
||||||
self.index_mut(0...index.end)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -983,7 +1146,11 @@ impl<'a, T> Iter<'a, T> {
|
||||||
iterator!{struct Iter -> *const T, &'a T}
|
iterator!{struct Iter -> *const T, &'a T}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
|
impl<'a, T> ExactSizeIterator for Iter<'a, T> {
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.ptr == self.end
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[unstable(feature = "fused", issue = "35602")]
|
#[unstable(feature = "fused", issue = "35602")]
|
||||||
impl<'a, T> FusedIterator for Iter<'a, T> {}
|
impl<'a, T> FusedIterator for Iter<'a, T> {}
|
||||||
|
@ -1107,7 +1274,11 @@ impl<'a, T> IterMut<'a, T> {
|
||||||
iterator!{struct IterMut -> *mut T, &'a mut T}
|
iterator!{struct IterMut -> *mut T, &'a mut T}
|
||||||
|
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
impl<'a, T> ExactSizeIterator for IterMut<'a, T> {}
|
impl<'a, T> ExactSizeIterator for IterMut<'a, T> {
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.ptr == self.end
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[unstable(feature = "fused", issue = "35602")]
|
#[unstable(feature = "fused", issue = "35602")]
|
||||||
impl<'a, T> FusedIterator for IterMut<'a, T> {}
|
impl<'a, T> FusedIterator for IterMut<'a, T> {}
|
||||||
|
|
|
@ -618,6 +618,11 @@ impl<'a> ExactSizeIterator for Bytes<'a> {
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
self.0.len()
|
self.0.len()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.0.is_empty()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[unstable(feature = "fused", issue = "35602")]
|
#[unstable(feature = "fused", issue = "35602")]
|
||||||
|
|
|
@ -40,7 +40,7 @@
|
||||||
extern crate core;
|
extern crate core;
|
||||||
extern crate test;
|
extern crate test;
|
||||||
extern crate libc;
|
extern crate libc;
|
||||||
extern crate rustc_unicode;
|
extern crate std_unicode;
|
||||||
extern crate rand;
|
extern crate rand;
|
||||||
|
|
||||||
mod any;
|
mod any;
|
||||||
|
|
|
@ -180,3 +180,47 @@ fn test_windows_last() {
|
||||||
let c2 = v2.windows(2);
|
let c2 = v2.windows(2);
|
||||||
assert_eq!(c2.last().unwrap()[0], 3);
|
assert_eq!(c2.last().unwrap()[0], 3);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn get_range() {
|
||||||
|
let v: &[i32] = &[0, 1, 2, 3, 4, 5];
|
||||||
|
assert_eq!(v.get(..), Some(&[0, 1, 2, 3, 4, 5][..]));
|
||||||
|
assert_eq!(v.get(..2), Some(&[0, 1][..]));
|
||||||
|
assert_eq!(v.get(2..), Some(&[2, 3, 4, 5][..]));
|
||||||
|
assert_eq!(v.get(1..4), Some(&[1, 2, 3][..]));
|
||||||
|
assert_eq!(v.get(7..), None);
|
||||||
|
assert_eq!(v.get(7..10), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn get_mut_range() {
|
||||||
|
let mut v: &mut [i32] = &mut [0, 1, 2, 3, 4, 5];
|
||||||
|
assert_eq!(v.get_mut(..), Some(&mut [0, 1, 2, 3, 4, 5][..]));
|
||||||
|
assert_eq!(v.get_mut(..2), Some(&mut [0, 1][..]));
|
||||||
|
assert_eq!(v.get_mut(2..), Some(&mut [2, 3, 4, 5][..]));
|
||||||
|
assert_eq!(v.get_mut(1..4), Some(&mut [1, 2, 3][..]));
|
||||||
|
assert_eq!(v.get_mut(7..), None);
|
||||||
|
assert_eq!(v.get_mut(7..10), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn get_unchecked_range() {
|
||||||
|
unsafe {
|
||||||
|
let v: &[i32] = &[0, 1, 2, 3, 4, 5];
|
||||||
|
assert_eq!(v.get_unchecked(..), &[0, 1, 2, 3, 4, 5][..]);
|
||||||
|
assert_eq!(v.get_unchecked(..2), &[0, 1][..]);
|
||||||
|
assert_eq!(v.get_unchecked(2..), &[2, 3, 4, 5][..]);
|
||||||
|
assert_eq!(v.get_unchecked(1..4), &[1, 2, 3][..]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn get_unchecked_mut_range() {
|
||||||
|
unsafe {
|
||||||
|
let v: &mut [i32] = &mut [0, 1, 2, 3, 4, 5];
|
||||||
|
assert_eq!(v.get_unchecked_mut(..), &mut [0, 1, 2, 3, 4, 5][..]);
|
||||||
|
assert_eq!(v.get_unchecked_mut(..2), &mut [0, 1][..]);
|
||||||
|
assert_eq!(v.get_unchecked_mut(2..), &mut[2, 3, 4, 5][..]);
|
||||||
|
assert_eq!(v.get_unchecked_mut(1..4), &mut [1, 2, 3][..]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -295,7 +295,6 @@
|
||||||
#![cfg_attr(not(stage0), deny(warnings))]
|
#![cfg_attr(not(stage0), deny(warnings))]
|
||||||
|
|
||||||
#![feature(str_escape)]
|
#![feature(str_escape)]
|
||||||
#![cfg_attr(stage0, feature(question_mark))]
|
|
||||||
|
|
||||||
use self::LabelText::*;
|
use self::LabelText::*;
|
||||||
|
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 6e8c1b490ccbe5e84d248bab883515bc85394b5f
|
Subproject commit 0ac39c5ccf6a04395b7c40dd62321cb91f63f160
|
|
@ -6,6 +6,7 @@ version = "0.0.0"
|
||||||
[lib]
|
[lib]
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
test = false
|
test = false
|
||||||
|
bench = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
core = { path = "../libcore" }
|
core = { path = "../libcore" }
|
||||||
|
|
|
@ -6,6 +6,7 @@ version = "0.0.0"
|
||||||
[lib]
|
[lib]
|
||||||
path = "lib.rs"
|
path = "lib.rs"
|
||||||
test = false
|
test = false
|
||||||
|
bench = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
alloc = { path = "../liballoc" }
|
alloc = { path = "../liballoc" }
|
||||||
|
|
|
@ -10,8 +10,6 @@
|
||||||
|
|
||||||
use rustc_data_structures::graph;
|
use rustc_data_structures::graph;
|
||||||
use cfg::*;
|
use cfg::*;
|
||||||
use hir::def::Def;
|
|
||||||
use hir::pat_util;
|
|
||||||
use ty::{self, TyCtxt};
|
use ty::{self, TyCtxt};
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ptr::P;
|
use syntax::ptr::P;
|
||||||
|
@ -100,7 +98,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||||
fn pat(&mut self, pat: &hir::Pat, pred: CFGIndex) -> CFGIndex {
|
fn pat(&mut self, pat: &hir::Pat, pred: CFGIndex) -> CFGIndex {
|
||||||
match pat.node {
|
match pat.node {
|
||||||
PatKind::Binding(.., None) |
|
PatKind::Binding(.., None) |
|
||||||
PatKind::Path(..) |
|
PatKind::Path(_) |
|
||||||
PatKind::Lit(..) |
|
PatKind::Lit(..) |
|
||||||
PatKind::Range(..) |
|
PatKind::Range(..) |
|
||||||
PatKind::Wild => {
|
PatKind::Wild => {
|
||||||
|
@ -284,7 +282,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||||
|
|
||||||
hir::ExprBreak(label, ref opt_expr) => {
|
hir::ExprBreak(label, ref opt_expr) => {
|
||||||
let v = self.opt_expr(opt_expr, pred);
|
let v = self.opt_expr(opt_expr, pred);
|
||||||
let loop_scope = self.find_scope(expr, label.map(|l| l.node));
|
let loop_scope = self.find_scope(expr, label);
|
||||||
let b = self.add_ast_node(expr.id, &[v]);
|
let b = self.add_ast_node(expr.id, &[v]);
|
||||||
self.add_exiting_edge(expr, b,
|
self.add_exiting_edge(expr, b,
|
||||||
loop_scope, loop_scope.break_index);
|
loop_scope, loop_scope.break_index);
|
||||||
|
@ -292,7 +290,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
hir::ExprAgain(label) => {
|
hir::ExprAgain(label) => {
|
||||||
let loop_scope = self.find_scope(expr, label.map(|l| l.node));
|
let loop_scope = self.find_scope(expr, label);
|
||||||
let a = self.add_ast_node(expr.id, &[pred]);
|
let a = self.add_ast_node(expr.id, &[pred]);
|
||||||
self.add_exiting_edge(expr, a,
|
self.add_exiting_edge(expr, a,
|
||||||
loop_scope, loop_scope.continue_index);
|
loop_scope, loop_scope.continue_index);
|
||||||
|
@ -361,7 +359,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||||
|
|
||||||
hir::ExprClosure(..) |
|
hir::ExprClosure(..) |
|
||||||
hir::ExprLit(..) |
|
hir::ExprLit(..) |
|
||||||
hir::ExprPath(..) => {
|
hir::ExprPath(_) => {
|
||||||
self.straightline(expr, pred, None::<hir::Expr>.iter())
|
self.straightline(expr, pred, None::<hir::Expr>.iter())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -457,7 +455,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||||
// Visit the guard expression
|
// Visit the guard expression
|
||||||
let guard_exit = self.expr(&guard, guard_start);
|
let guard_exit = self.expr(&guard, guard_start);
|
||||||
|
|
||||||
let this_has_bindings = pat_util::pat_contains_bindings_or_wild(&pat);
|
let this_has_bindings = pat.contains_bindings_or_wild();
|
||||||
|
|
||||||
// If both this pattern and the previous pattern
|
// If both this pattern and the previous pattern
|
||||||
// were free of bindings, they must consist only
|
// were free of bindings, they must consist only
|
||||||
|
@ -570,23 +568,16 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||||
|
|
||||||
fn find_scope(&self,
|
fn find_scope(&self,
|
||||||
expr: &hir::Expr,
|
expr: &hir::Expr,
|
||||||
label: Option<ast::Name>) -> LoopScope {
|
label: Option<hir::Label>) -> LoopScope {
|
||||||
if label.is_none() {
|
match label {
|
||||||
return *self.loop_scopes.last().unwrap();
|
None => *self.loop_scopes.last().unwrap(),
|
||||||
}
|
Some(label) => {
|
||||||
|
|
||||||
match self.tcx.expect_def(expr.id) {
|
|
||||||
Def::Label(loop_id) => {
|
|
||||||
for l in &self.loop_scopes {
|
for l in &self.loop_scopes {
|
||||||
if l.loop_id == loop_id {
|
if l.loop_id == label.loop_id {
|
||||||
return *l;
|
return *l;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
span_bug!(expr.span, "no loop scope for id {}", loop_id);
|
span_bug!(expr.span, "no loop scope for id {}", label.loop_id);
|
||||||
}
|
|
||||||
|
|
||||||
r => {
|
|
||||||
span_bug!(expr.span, "bad entry `{:?}` in def_map for label", r);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,6 +42,10 @@ pub enum DepNode<D: Clone + Debug> {
|
||||||
// Represents the HIR node with the given node-id
|
// Represents the HIR node with the given node-id
|
||||||
Hir(D),
|
Hir(D),
|
||||||
|
|
||||||
|
// Represents the body of a function or method. The def-id is that of the
|
||||||
|
// function/method.
|
||||||
|
HirBody(D),
|
||||||
|
|
||||||
// Represents the metadata for a given HIR node, typically found
|
// Represents the metadata for a given HIR node, typically found
|
||||||
// in an extern crate.
|
// in an extern crate.
|
||||||
MetaData(D),
|
MetaData(D),
|
||||||
|
@ -59,6 +63,7 @@ pub enum DepNode<D: Clone + Debug> {
|
||||||
PluginRegistrar,
|
PluginRegistrar,
|
||||||
StabilityIndex,
|
StabilityIndex,
|
||||||
CollectItem(D),
|
CollectItem(D),
|
||||||
|
CollectItemSig(D),
|
||||||
Coherence,
|
Coherence,
|
||||||
EffectCheck,
|
EffectCheck,
|
||||||
Liveness,
|
Liveness,
|
||||||
|
@ -90,7 +95,7 @@ pub enum DepNode<D: Clone + Debug> {
|
||||||
RvalueCheck(D),
|
RvalueCheck(D),
|
||||||
Reachability,
|
Reachability,
|
||||||
DeadCheck,
|
DeadCheck,
|
||||||
StabilityCheck,
|
StabilityCheck(D),
|
||||||
LateLintCheck,
|
LateLintCheck,
|
||||||
TransCrate,
|
TransCrate,
|
||||||
TransCrateItem(D),
|
TransCrateItem(D),
|
||||||
|
@ -105,7 +110,6 @@ pub enum DepNode<D: Clone + Debug> {
|
||||||
// predicates for an item wind up in `ItemSignature`).
|
// predicates for an item wind up in `ItemSignature`).
|
||||||
AssociatedItems(D),
|
AssociatedItems(D),
|
||||||
ItemSignature(D),
|
ItemSignature(D),
|
||||||
FieldTy(D),
|
|
||||||
SizedConstraint(D),
|
SizedConstraint(D),
|
||||||
AssociatedItemDefIds(D),
|
AssociatedItemDefIds(D),
|
||||||
InherentImpls(D),
|
InherentImpls(D),
|
||||||
|
@ -150,12 +154,12 @@ impl<D: Clone + Debug> DepNode<D> {
|
||||||
CollectItem,
|
CollectItem,
|
||||||
BorrowCheck,
|
BorrowCheck,
|
||||||
Hir,
|
Hir,
|
||||||
|
HirBody,
|
||||||
TransCrateItem,
|
TransCrateItem,
|
||||||
TypeckItemType,
|
TypeckItemType,
|
||||||
TypeckItemBody,
|
TypeckItemBody,
|
||||||
AssociatedItems,
|
AssociatedItems,
|
||||||
ItemSignature,
|
ItemSignature,
|
||||||
FieldTy,
|
|
||||||
AssociatedItemDefIds,
|
AssociatedItemDefIds,
|
||||||
InherentImpls,
|
InherentImpls,
|
||||||
TraitImpls,
|
TraitImpls,
|
||||||
|
@ -189,7 +193,6 @@ impl<D: Clone + Debug> DepNode<D> {
|
||||||
Privacy => Some(Privacy),
|
Privacy => Some(Privacy),
|
||||||
Reachability => Some(Reachability),
|
Reachability => Some(Reachability),
|
||||||
DeadCheck => Some(DeadCheck),
|
DeadCheck => Some(DeadCheck),
|
||||||
StabilityCheck => Some(StabilityCheck),
|
|
||||||
LateLintCheck => Some(LateLintCheck),
|
LateLintCheck => Some(LateLintCheck),
|
||||||
TransCrate => Some(TransCrate),
|
TransCrate => Some(TransCrate),
|
||||||
TransWriteMetadata => Some(TransWriteMetadata),
|
TransWriteMetadata => Some(TransWriteMetadata),
|
||||||
|
@ -200,8 +203,10 @@ impl<D: Clone + Debug> DepNode<D> {
|
||||||
WorkProduct(ref id) => Some(WorkProduct(id.clone())),
|
WorkProduct(ref id) => Some(WorkProduct(id.clone())),
|
||||||
|
|
||||||
Hir(ref d) => op(d).map(Hir),
|
Hir(ref d) => op(d).map(Hir),
|
||||||
|
HirBody(ref d) => op(d).map(HirBody),
|
||||||
MetaData(ref d) => op(d).map(MetaData),
|
MetaData(ref d) => op(d).map(MetaData),
|
||||||
CollectItem(ref d) => op(d).map(CollectItem),
|
CollectItem(ref d) => op(d).map(CollectItem),
|
||||||
|
CollectItemSig(ref d) => op(d).map(CollectItemSig),
|
||||||
CoherenceCheckImpl(ref d) => op(d).map(CoherenceCheckImpl),
|
CoherenceCheckImpl(ref d) => op(d).map(CoherenceCheckImpl),
|
||||||
CoherenceOverlapCheck(ref d) => op(d).map(CoherenceOverlapCheck),
|
CoherenceOverlapCheck(ref d) => op(d).map(CoherenceOverlapCheck),
|
||||||
CoherenceOverlapCheckSpecial(ref d) => op(d).map(CoherenceOverlapCheckSpecial),
|
CoherenceOverlapCheckSpecial(ref d) => op(d).map(CoherenceOverlapCheckSpecial),
|
||||||
|
@ -217,11 +222,11 @@ impl<D: Clone + Debug> DepNode<D> {
|
||||||
Mir(ref d) => op(d).map(Mir),
|
Mir(ref d) => op(d).map(Mir),
|
||||||
BorrowCheck(ref d) => op(d).map(BorrowCheck),
|
BorrowCheck(ref d) => op(d).map(BorrowCheck),
|
||||||
RvalueCheck(ref d) => op(d).map(RvalueCheck),
|
RvalueCheck(ref d) => op(d).map(RvalueCheck),
|
||||||
|
StabilityCheck(ref d) => op(d).map(StabilityCheck),
|
||||||
TransCrateItem(ref d) => op(d).map(TransCrateItem),
|
TransCrateItem(ref d) => op(d).map(TransCrateItem),
|
||||||
TransInlinedItem(ref d) => op(d).map(TransInlinedItem),
|
TransInlinedItem(ref d) => op(d).map(TransInlinedItem),
|
||||||
AssociatedItems(ref d) => op(d).map(AssociatedItems),
|
AssociatedItems(ref d) => op(d).map(AssociatedItems),
|
||||||
ItemSignature(ref d) => op(d).map(ItemSignature),
|
ItemSignature(ref d) => op(d).map(ItemSignature),
|
||||||
FieldTy(ref d) => op(d).map(FieldTy),
|
|
||||||
SizedConstraint(ref d) => op(d).map(SizedConstraint),
|
SizedConstraint(ref d) => op(d).map(SizedConstraint),
|
||||||
AssociatedItemDefIds(ref d) => op(d).map(AssociatedItemDefIds),
|
AssociatedItemDefIds(ref d) => op(d).map(AssociatedItemDefIds),
|
||||||
InherentImpls(ref d) => op(d).map(InherentImpls),
|
InherentImpls(ref d) => op(d).map(InherentImpls),
|
||||||
|
|
|
@ -129,8 +129,8 @@ impl<'a> CheckAttrVisitor<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Visitor for CheckAttrVisitor<'a> {
|
impl<'a> Visitor<'a> for CheckAttrVisitor<'a> {
|
||||||
fn visit_item(&mut self, item: &ast::Item) {
|
fn visit_item(&mut self, item: &'a ast::Item) {
|
||||||
let target = Target::from_item(item);
|
let target = Target::from_item(item);
|
||||||
for attr in &item.attrs {
|
for attr in &item.attrs {
|
||||||
self.check_attribute(attr, target);
|
self.check_attribute(attr, target);
|
||||||
|
|
|
@ -83,14 +83,6 @@ impl PathResolution {
|
||||||
PathResolution { base_def: def, depth: 0 }
|
PathResolution { base_def: def, depth: 0 }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the definition, if fully resolved, otherwise panic.
|
|
||||||
pub fn full_def(&self) -> Def {
|
|
||||||
if self.depth != 0 {
|
|
||||||
bug!("path not fully resolved: {:?}", self);
|
|
||||||
}
|
|
||||||
self.base_def
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn kind_name(&self) -> &'static str {
|
pub fn kind_name(&self) -> &'static str {
|
||||||
if self.depth != 0 {
|
if self.depth != 0 {
|
||||||
"associated item"
|
"associated item"
|
||||||
|
|
|
@ -38,6 +38,7 @@ use syntax::ast::{NodeId, CRATE_NODE_ID, Name, Attribute};
|
||||||
use syntax::codemap::Spanned;
|
use syntax::codemap::Spanned;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
use hir::*;
|
use hir::*;
|
||||||
|
use hir::def::Def;
|
||||||
use hir::map::Map;
|
use hir::map::Map;
|
||||||
use super::itemlikevisit::DeepVisitor;
|
use super::itemlikevisit::DeepVisitor;
|
||||||
|
|
||||||
|
@ -66,6 +67,62 @@ impl<'a> FnKind<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Specifies what nested things a visitor wants to visit. The most
|
||||||
|
/// common choice is `OnlyBodies`, which will cause the visitor to
|
||||||
|
/// visit fn bodies for fns that it encounters, but skip over nested
|
||||||
|
/// item-like things.
|
||||||
|
///
|
||||||
|
/// See the comments on `ItemLikeVisitor` for more details on the overall
|
||||||
|
/// visit strategy.
|
||||||
|
pub enum NestedVisitorMap<'this, 'tcx: 'this> {
|
||||||
|
/// Do not visit any nested things. When you add a new
|
||||||
|
/// "non-nested" thing, you will want to audit such uses to see if
|
||||||
|
/// they remain valid.
|
||||||
|
///
|
||||||
|
/// Use this if you are only walking some particular kind of tree
|
||||||
|
/// (i.e., a type, or fn signature) and you don't want to thread a
|
||||||
|
/// HIR map around.
|
||||||
|
None,
|
||||||
|
|
||||||
|
/// Do not visit nested item-like things, but visit nested things
|
||||||
|
/// that are inside of an item-like.
|
||||||
|
///
|
||||||
|
/// **This is the most common choice.** A very commmon pattern is
|
||||||
|
/// to use `tcx.visit_all_item_likes_in_krate()` as an outer loop,
|
||||||
|
/// and to have the visitor that visits the contents of each item
|
||||||
|
/// using this setting.
|
||||||
|
OnlyBodies(&'this Map<'tcx>),
|
||||||
|
|
||||||
|
/// Visit all nested things, including item-likes.
|
||||||
|
///
|
||||||
|
/// **This is an unusual choice.** It is used when you want to
|
||||||
|
/// process everything within their lexical context. Typically you
|
||||||
|
/// kick off the visit by doing `walk_krate()`.
|
||||||
|
All(&'this Map<'tcx>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'this, 'tcx> NestedVisitorMap<'this, 'tcx> {
|
||||||
|
/// Returns the map to use for an "intra item-like" thing (if any).
|
||||||
|
/// e.g., function body.
|
||||||
|
pub fn intra(self) -> Option<&'this Map<'tcx>> {
|
||||||
|
match self {
|
||||||
|
NestedVisitorMap::None => None,
|
||||||
|
NestedVisitorMap::OnlyBodies(map) => Some(map),
|
||||||
|
NestedVisitorMap::All(map) => Some(map),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the map to use for an "item-like" thing (if any).
|
||||||
|
/// e.g., item, impl-item.
|
||||||
|
pub fn inter(self) -> Option<&'this Map<'tcx>> {
|
||||||
|
match self {
|
||||||
|
NestedVisitorMap::None => None,
|
||||||
|
NestedVisitorMap::OnlyBodies(_) => None,
|
||||||
|
NestedVisitorMap::All(map) => Some(map),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Each method of the Visitor trait is a hook to be potentially
|
/// Each method of the Visitor trait is a hook to be potentially
|
||||||
/// overridden. Each method's default implementation recursively visits
|
/// overridden. Each method's default implementation recursively visits
|
||||||
/// the substructure of the input via the corresponding `walk` method;
|
/// the substructure of the input via the corresponding `walk` method;
|
||||||
|
@ -87,13 +144,14 @@ pub trait Visitor<'v> : Sized {
|
||||||
// Nested items.
|
// Nested items.
|
||||||
|
|
||||||
/// The default versions of the `visit_nested_XXX` routines invoke
|
/// The default versions of the `visit_nested_XXX` routines invoke
|
||||||
/// this method to get a map to use; if they get back `None`, they
|
/// this method to get a map to use. By selecting an enum variant,
|
||||||
/// just skip nested things. Otherwise, they will lookup the
|
/// you control which kinds of nested HIR are visited; see
|
||||||
/// nested item-like things in the map and visit it. So the best
|
/// `NestedVisitorMap` for details. By "nested HIR", we are
|
||||||
/// way to implement a nested visitor is to override this method
|
/// referring to bits of HIR that are not directly embedded within
|
||||||
/// to return a `Map`; one advantage of this is that if we add
|
/// one another but rather indirectly, through a table in the
|
||||||
/// more types of nested things in the future, they will
|
/// crate. This is done to control dependencies during incremental
|
||||||
/// automatically work.
|
/// compilation: the non-inline bits of HIR can be tracked and
|
||||||
|
/// hashed separately.
|
||||||
///
|
///
|
||||||
/// **If for some reason you want the nested behavior, but don't
|
/// **If for some reason you want the nested behavior, but don't
|
||||||
/// have a `Map` are your disposal:** then you should override the
|
/// have a `Map` are your disposal:** then you should override the
|
||||||
|
@ -101,9 +159,7 @@ pub trait Visitor<'v> : Sized {
|
||||||
/// `panic!()`. This way, if a new `visit_nested_XXX` variant is
|
/// `panic!()`. This way, if a new `visit_nested_XXX` variant is
|
||||||
/// added in the future, we will see the panic in your code and
|
/// added in the future, we will see the panic in your code and
|
||||||
/// fix it appropriately.
|
/// fix it appropriately.
|
||||||
fn nested_visit_map(&mut self) -> Option<&Map<'v>> {
|
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'v>;
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Invoked when a nested item is encountered. By default does
|
/// Invoked when a nested item is encountered. By default does
|
||||||
/// nothing unless you override `nested_visit_map` to return
|
/// nothing unless you override `nested_visit_map` to return
|
||||||
|
@ -115,8 +171,7 @@ pub trait Visitor<'v> : Sized {
|
||||||
/// but cannot supply a `Map`; see `nested_visit_map` for advice.
|
/// but cannot supply a `Map`; see `nested_visit_map` for advice.
|
||||||
#[allow(unused_variables)]
|
#[allow(unused_variables)]
|
||||||
fn visit_nested_item(&mut self, id: ItemId) {
|
fn visit_nested_item(&mut self, id: ItemId) {
|
||||||
let opt_item = self.nested_visit_map()
|
let opt_item = self.nested_visit_map().inter().map(|map| map.expect_item(id.id));
|
||||||
.map(|map| map.expect_item(id.id));
|
|
||||||
if let Some(item) = opt_item {
|
if let Some(item) = opt_item {
|
||||||
self.visit_item(item);
|
self.visit_item(item);
|
||||||
}
|
}
|
||||||
|
@ -127,13 +182,23 @@ pub trait Visitor<'v> : Sized {
|
||||||
/// method.
|
/// method.
|
||||||
#[allow(unused_variables)]
|
#[allow(unused_variables)]
|
||||||
fn visit_nested_impl_item(&mut self, id: ImplItemId) {
|
fn visit_nested_impl_item(&mut self, id: ImplItemId) {
|
||||||
let opt_item = self.nested_visit_map()
|
let opt_item = self.nested_visit_map().inter().map(|map| map.impl_item(id));
|
||||||
.map(|map| map.impl_item(id));
|
|
||||||
if let Some(item) = opt_item {
|
if let Some(item) = opt_item {
|
||||||
self.visit_impl_item(item);
|
self.visit_impl_item(item);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Invoked to visit the body of a function, method or closure. Like
|
||||||
|
/// visit_nested_item, does nothing by default unless you override
|
||||||
|
/// `nested_visit_map` to return `Some(_)`, in which case it will walk the
|
||||||
|
/// body.
|
||||||
|
fn visit_body(&mut self, id: ExprId) {
|
||||||
|
let opt_expr = self.nested_visit_map().intra().map(|map| map.expr(id));
|
||||||
|
if let Some(expr) = opt_expr {
|
||||||
|
self.visit_expr(expr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Visit the top-level item and (optionally) nested items / impl items. See
|
/// Visit the top-level item and (optionally) nested items / impl items. See
|
||||||
/// `visit_nested_item` for details.
|
/// `visit_nested_item` for details.
|
||||||
fn visit_item(&mut self, i: &'v Item) {
|
fn visit_item(&mut self, i: &'v Item) {
|
||||||
|
@ -155,6 +220,9 @@ pub trait Visitor<'v> : Sized {
|
||||||
fn visit_id(&mut self, _node_id: NodeId) {
|
fn visit_id(&mut self, _node_id: NodeId) {
|
||||||
// Nothing to do.
|
// Nothing to do.
|
||||||
}
|
}
|
||||||
|
fn visit_def_mention(&mut self, _def: Def) {
|
||||||
|
// Nothing to do.
|
||||||
|
}
|
||||||
fn visit_name(&mut self, _span: Span, _name: Name) {
|
fn visit_name(&mut self, _span: Span, _name: Name) {
|
||||||
// Nothing to do.
|
// Nothing to do.
|
||||||
}
|
}
|
||||||
|
@ -196,7 +264,7 @@ pub trait Visitor<'v> : Sized {
|
||||||
fn visit_where_predicate(&mut self, predicate: &'v WherePredicate) {
|
fn visit_where_predicate(&mut self, predicate: &'v WherePredicate) {
|
||||||
walk_where_predicate(self, predicate)
|
walk_where_predicate(self, predicate)
|
||||||
}
|
}
|
||||||
fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v FnDecl, b: &'v Expr, s: Span, id: NodeId) {
|
fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v FnDecl, b: ExprId, s: Span, id: NodeId) {
|
||||||
walk_fn(self, fk, fd, b, s, id)
|
walk_fn(self, fk, fd, b, s, id)
|
||||||
}
|
}
|
||||||
fn visit_trait_item(&mut self, ti: &'v TraitItem) {
|
fn visit_trait_item(&mut self, ti: &'v TraitItem) {
|
||||||
|
@ -244,12 +312,12 @@ pub trait Visitor<'v> : Sized {
|
||||||
fn visit_lifetime_def(&mut self, lifetime: &'v LifetimeDef) {
|
fn visit_lifetime_def(&mut self, lifetime: &'v LifetimeDef) {
|
||||||
walk_lifetime_def(self, lifetime)
|
walk_lifetime_def(self, lifetime)
|
||||||
}
|
}
|
||||||
|
fn visit_qpath(&mut self, qpath: &'v QPath, id: NodeId, span: Span) {
|
||||||
|
walk_qpath(self, qpath, id, span)
|
||||||
|
}
|
||||||
fn visit_path(&mut self, path: &'v Path, _id: NodeId) {
|
fn visit_path(&mut self, path: &'v Path, _id: NodeId) {
|
||||||
walk_path(self, path)
|
walk_path(self, path)
|
||||||
}
|
}
|
||||||
fn visit_path_list_item(&mut self, prefix: &'v Path, item: &'v PathListItem) {
|
|
||||||
walk_path_list_item(self, prefix, item)
|
|
||||||
}
|
|
||||||
fn visit_path_segment(&mut self, path_span: Span, path_segment: &'v PathSegment) {
|
fn visit_path_segment(&mut self, path_span: Span, path_segment: &'v PathSegment) {
|
||||||
walk_path_segment(self, path_span, path_segment)
|
walk_path_segment(self, path_span, path_segment)
|
||||||
}
|
}
|
||||||
|
@ -349,31 +417,17 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) {
|
||||||
visitor.visit_id(item.id);
|
visitor.visit_id(item.id);
|
||||||
walk_opt_name(visitor, item.span, opt_name)
|
walk_opt_name(visitor, item.span, opt_name)
|
||||||
}
|
}
|
||||||
ItemUse(ref vp) => {
|
ItemUse(ref path, _) => {
|
||||||
visitor.visit_id(item.id);
|
visitor.visit_id(item.id);
|
||||||
match vp.node {
|
|
||||||
ViewPathSimple(name, ref path) => {
|
|
||||||
visitor.visit_name(vp.span, name);
|
|
||||||
visitor.visit_path(path, item.id);
|
visitor.visit_path(path, item.id);
|
||||||
}
|
}
|
||||||
ViewPathGlob(ref path) => {
|
|
||||||
visitor.visit_path(path, item.id);
|
|
||||||
}
|
|
||||||
ViewPathList(ref prefix, ref list) => {
|
|
||||||
visitor.visit_path(prefix, item.id);
|
|
||||||
for item in list {
|
|
||||||
visitor.visit_path_list_item(prefix, item)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ItemStatic(ref typ, _, ref expr) |
|
ItemStatic(ref typ, _, ref expr) |
|
||||||
ItemConst(ref typ, ref expr) => {
|
ItemConst(ref typ, ref expr) => {
|
||||||
visitor.visit_id(item.id);
|
visitor.visit_id(item.id);
|
||||||
visitor.visit_ty(typ);
|
visitor.visit_ty(typ);
|
||||||
visitor.visit_expr(expr);
|
visitor.visit_expr(expr);
|
||||||
}
|
}
|
||||||
ItemFn(ref declaration, unsafety, constness, abi, ref generics, ref body) => {
|
ItemFn(ref declaration, unsafety, constness, abi, ref generics, body_id) => {
|
||||||
visitor.visit_fn(FnKind::ItemFn(item.name,
|
visitor.visit_fn(FnKind::ItemFn(item.name,
|
||||||
generics,
|
generics,
|
||||||
unsafety,
|
unsafety,
|
||||||
|
@ -382,7 +436,7 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) {
|
||||||
&item.vis,
|
&item.vis,
|
||||||
&item.attrs),
|
&item.attrs),
|
||||||
declaration,
|
declaration,
|
||||||
body,
|
body_id,
|
||||||
item.span,
|
item.span,
|
||||||
item.id)
|
item.id)
|
||||||
}
|
}
|
||||||
|
@ -481,11 +535,8 @@ pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) {
|
||||||
walk_fn_decl(visitor, &function_declaration.decl);
|
walk_fn_decl(visitor, &function_declaration.decl);
|
||||||
walk_list!(visitor, visit_lifetime_def, &function_declaration.lifetimes);
|
walk_list!(visitor, visit_lifetime_def, &function_declaration.lifetimes);
|
||||||
}
|
}
|
||||||
TyPath(ref maybe_qself, ref path) => {
|
TyPath(ref qpath) => {
|
||||||
if let Some(ref qself) = *maybe_qself {
|
visitor.visit_qpath(qpath, typ.id, typ.span);
|
||||||
visitor.visit_ty(&qself.ty);
|
|
||||||
}
|
|
||||||
visitor.visit_path(path, typ.id);
|
|
||||||
}
|
}
|
||||||
TyObjectSum(ref ty, ref bounds) => {
|
TyObjectSum(ref ty, ref bounds) => {
|
||||||
visitor.visit_ty(ty);
|
visitor.visit_ty(ty);
|
||||||
|
@ -508,18 +559,26 @@ pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path) {
|
pub fn walk_qpath<'v, V: Visitor<'v>>(visitor: &mut V, qpath: &'v QPath, id: NodeId, span: Span) {
|
||||||
for segment in &path.segments {
|
match *qpath {
|
||||||
visitor.visit_path_segment(path.span, segment);
|
QPath::Resolved(ref maybe_qself, ref path) => {
|
||||||
|
if let Some(ref qself) = *maybe_qself {
|
||||||
|
visitor.visit_ty(qself);
|
||||||
|
}
|
||||||
|
visitor.visit_path(path, id)
|
||||||
|
}
|
||||||
|
QPath::TypeRelative(ref qself, ref segment) => {
|
||||||
|
visitor.visit_ty(qself);
|
||||||
|
visitor.visit_path_segment(span, segment);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn walk_path_list_item<'v, V>(visitor: &mut V, _prefix: &'v Path, item: &'v PathListItem)
|
pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path) {
|
||||||
where V: Visitor<'v>,
|
visitor.visit_def_mention(path.def);
|
||||||
{
|
for segment in &path.segments {
|
||||||
visitor.visit_id(item.node.id);
|
visitor.visit_path_segment(path.span, segment);
|
||||||
visitor.visit_name(item.span, item.node.name);
|
}
|
||||||
walk_opt_name(visitor, item.span, item.node.rename);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn walk_path_segment<'v, V: Visitor<'v>>(visitor: &mut V,
|
pub fn walk_path_segment<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||||
|
@ -555,18 +614,15 @@ pub fn walk_assoc_type_binding<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||||
pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) {
|
pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) {
|
||||||
visitor.visit_id(pattern.id);
|
visitor.visit_id(pattern.id);
|
||||||
match pattern.node {
|
match pattern.node {
|
||||||
PatKind::TupleStruct(ref path, ref children, _) => {
|
PatKind::TupleStruct(ref qpath, ref children, _) => {
|
||||||
visitor.visit_path(path, pattern.id);
|
visitor.visit_qpath(qpath, pattern.id, pattern.span);
|
||||||
walk_list!(visitor, visit_pat, children);
|
walk_list!(visitor, visit_pat, children);
|
||||||
}
|
}
|
||||||
PatKind::Path(ref opt_qself, ref path) => {
|
PatKind::Path(ref qpath) => {
|
||||||
if let Some(ref qself) = *opt_qself {
|
visitor.visit_qpath(qpath, pattern.id, pattern.span);
|
||||||
visitor.visit_ty(&qself.ty);
|
|
||||||
}
|
}
|
||||||
visitor.visit_path(path, pattern.id)
|
PatKind::Struct(ref qpath, ref fields, _) => {
|
||||||
}
|
visitor.visit_qpath(qpath, pattern.id, pattern.span);
|
||||||
PatKind::Struct(ref path, ref fields, _) => {
|
|
||||||
visitor.visit_path(path, pattern.id);
|
|
||||||
for field in fields {
|
for field in fields {
|
||||||
visitor.visit_name(field.span, field.node.name);
|
visitor.visit_name(field.span, field.node.name);
|
||||||
visitor.visit_pat(&field.node.pat)
|
visitor.visit_pat(&field.node.pat)
|
||||||
|
@ -579,7 +635,8 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) {
|
||||||
PatKind::Ref(ref subpattern, _) => {
|
PatKind::Ref(ref subpattern, _) => {
|
||||||
visitor.visit_pat(subpattern)
|
visitor.visit_pat(subpattern)
|
||||||
}
|
}
|
||||||
PatKind::Binding(_, ref pth1, ref optional_subpattern) => {
|
PatKind::Binding(_, def_id, ref pth1, ref optional_subpattern) => {
|
||||||
|
visitor.visit_def_mention(Def::Local(def_id));
|
||||||
visitor.visit_name(pth1.span, pth1.node);
|
visitor.visit_name(pth1.span, pth1.node);
|
||||||
walk_list!(visitor, visit_pat, optional_subpattern);
|
walk_list!(visitor, visit_pat, optional_subpattern);
|
||||||
}
|
}
|
||||||
|
@ -704,13 +761,25 @@ pub fn walk_fn_kind<'v, V: Visitor<'v>>(visitor: &mut V, function_kind: FnKind<'
|
||||||
pub fn walk_fn<'v, V: Visitor<'v>>(visitor: &mut V,
|
pub fn walk_fn<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||||
function_kind: FnKind<'v>,
|
function_kind: FnKind<'v>,
|
||||||
function_declaration: &'v FnDecl,
|
function_declaration: &'v FnDecl,
|
||||||
function_body: &'v Expr,
|
body_id: ExprId,
|
||||||
_span: Span,
|
_span: Span,
|
||||||
id: NodeId) {
|
id: NodeId) {
|
||||||
visitor.visit_id(id);
|
visitor.visit_id(id);
|
||||||
walk_fn_decl(visitor, function_declaration);
|
walk_fn_decl(visitor, function_declaration);
|
||||||
walk_fn_kind(visitor, function_kind);
|
walk_fn_kind(visitor, function_kind);
|
||||||
visitor.visit_expr(function_body)
|
visitor.visit_body(body_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn walk_fn_with_body<'v, V: Visitor<'v>>(visitor: &mut V,
|
||||||
|
function_kind: FnKind<'v>,
|
||||||
|
function_declaration: &'v FnDecl,
|
||||||
|
body: &'v Expr,
|
||||||
|
_span: Span,
|
||||||
|
id: NodeId) {
|
||||||
|
visitor.visit_id(id);
|
||||||
|
walk_fn_decl(visitor, function_declaration);
|
||||||
|
walk_fn_kind(visitor, function_kind);
|
||||||
|
visitor.visit_expr(body)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v TraitItem) {
|
pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v TraitItem) {
|
||||||
|
@ -727,13 +796,13 @@ pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v Trai
|
||||||
visitor.visit_generics(&sig.generics);
|
visitor.visit_generics(&sig.generics);
|
||||||
walk_fn_decl(visitor, &sig.decl);
|
walk_fn_decl(visitor, &sig.decl);
|
||||||
}
|
}
|
||||||
MethodTraitItem(ref sig, Some(ref body)) => {
|
MethodTraitItem(ref sig, Some(body_id)) => {
|
||||||
visitor.visit_fn(FnKind::Method(trait_item.name,
|
visitor.visit_fn(FnKind::Method(trait_item.name,
|
||||||
sig,
|
sig,
|
||||||
None,
|
None,
|
||||||
&trait_item.attrs),
|
&trait_item.attrs),
|
||||||
&sig.decl,
|
&sig.decl,
|
||||||
body,
|
body_id,
|
||||||
trait_item.span,
|
trait_item.span,
|
||||||
trait_item.id);
|
trait_item.id);
|
||||||
}
|
}
|
||||||
|
@ -759,13 +828,13 @@ pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplIt
|
||||||
visitor.visit_ty(ty);
|
visitor.visit_ty(ty);
|
||||||
visitor.visit_expr(expr);
|
visitor.visit_expr(expr);
|
||||||
}
|
}
|
||||||
ImplItemKind::Method(ref sig, ref body) => {
|
ImplItemKind::Method(ref sig, body_id) => {
|
||||||
visitor.visit_fn(FnKind::Method(impl_item.name,
|
visitor.visit_fn(FnKind::Method(impl_item.name,
|
||||||
sig,
|
sig,
|
||||||
Some(&impl_item.vis),
|
Some(&impl_item.vis),
|
||||||
&impl_item.attrs),
|
&impl_item.attrs),
|
||||||
&sig.decl,
|
&sig.decl,
|
||||||
body,
|
body_id,
|
||||||
impl_item.span,
|
impl_item.span,
|
||||||
impl_item.id);
|
impl_item.id);
|
||||||
}
|
}
|
||||||
|
@ -840,8 +909,8 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) {
|
||||||
visitor.visit_expr(element);
|
visitor.visit_expr(element);
|
||||||
visitor.visit_expr(count)
|
visitor.visit_expr(count)
|
||||||
}
|
}
|
||||||
ExprStruct(ref path, ref fields, ref optional_base) => {
|
ExprStruct(ref qpath, ref fields, ref optional_base) => {
|
||||||
visitor.visit_path(path, expression.id);
|
visitor.visit_qpath(qpath, expression.id, expression.span);
|
||||||
for field in fields {
|
for field in fields {
|
||||||
visitor.visit_name(field.name.span, field.name.node);
|
visitor.visit_name(field.name.span, field.name.node);
|
||||||
visitor.visit_expr(&field.expr)
|
visitor.visit_expr(&field.expr)
|
||||||
|
@ -890,7 +959,7 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) {
|
||||||
visitor.visit_expr(subexpression);
|
visitor.visit_expr(subexpression);
|
||||||
walk_list!(visitor, visit_arm, arms);
|
walk_list!(visitor, visit_arm, arms);
|
||||||
}
|
}
|
||||||
ExprClosure(_, ref function_declaration, ref body, _fn_decl_span) => {
|
ExprClosure(_, ref function_declaration, body, _fn_decl_span) => {
|
||||||
visitor.visit_fn(FnKind::Closure(&expression.attrs),
|
visitor.visit_fn(FnKind::Closure(&expression.attrs),
|
||||||
function_declaration,
|
function_declaration,
|
||||||
body,
|
body,
|
||||||
|
@ -917,18 +986,21 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) {
|
||||||
visitor.visit_expr(main_expression);
|
visitor.visit_expr(main_expression);
|
||||||
visitor.visit_expr(index_expression)
|
visitor.visit_expr(index_expression)
|
||||||
}
|
}
|
||||||
ExprPath(ref maybe_qself, ref path) => {
|
ExprPath(ref qpath) => {
|
||||||
if let Some(ref qself) = *maybe_qself {
|
visitor.visit_qpath(qpath, expression.id, expression.span);
|
||||||
visitor.visit_ty(&qself.ty);
|
|
||||||
}
|
}
|
||||||
visitor.visit_path(path, expression.id)
|
ExprBreak(None, ref opt_expr) => {
|
||||||
}
|
|
||||||
ExprBreak(ref opt_sp_name, ref opt_expr) => {
|
|
||||||
walk_opt_sp_name(visitor, opt_sp_name);
|
|
||||||
walk_list!(visitor, visit_expr, opt_expr);
|
walk_list!(visitor, visit_expr, opt_expr);
|
||||||
}
|
}
|
||||||
ExprAgain(ref opt_sp_name) => {
|
ExprBreak(Some(label), ref opt_expr) => {
|
||||||
walk_opt_sp_name(visitor, opt_sp_name);
|
visitor.visit_def_mention(Def::Label(label.loop_id));
|
||||||
|
visitor.visit_name(label.span, label.name);
|
||||||
|
walk_list!(visitor, visit_expr, opt_expr);
|
||||||
|
}
|
||||||
|
ExprAgain(None) => {}
|
||||||
|
ExprAgain(Some(label)) => {
|
||||||
|
visitor.visit_def_mention(Def::Label(label.loop_id));
|
||||||
|
visitor.visit_name(label.span, label.name);
|
||||||
}
|
}
|
||||||
ExprRet(ref optional_expression) => {
|
ExprRet(ref optional_expression) => {
|
||||||
walk_list!(visitor, visit_expr, optional_expression);
|
walk_list!(visitor, visit_expr, optional_expression);
|
||||||
|
@ -1002,13 +1074,14 @@ impl IdRange {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub struct IdRangeComputingVisitor {
|
pub struct IdRangeComputingVisitor<'a, 'ast: 'a> {
|
||||||
pub result: IdRange,
|
result: IdRange,
|
||||||
|
map: &'a map::Map<'ast>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IdRangeComputingVisitor {
|
impl<'a, 'ast> IdRangeComputingVisitor<'a, 'ast> {
|
||||||
pub fn new() -> IdRangeComputingVisitor {
|
pub fn new(map: &'a map::Map<'ast>) -> IdRangeComputingVisitor<'a, 'ast> {
|
||||||
IdRangeComputingVisitor { result: IdRange::max() }
|
IdRangeComputingVisitor { result: IdRange::max(), map: map }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn result(&self) -> IdRange {
|
pub fn result(&self) -> IdRange {
|
||||||
|
@ -1016,20 +1089,25 @@ impl IdRangeComputingVisitor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'v> Visitor<'v> for IdRangeComputingVisitor {
|
impl<'a, 'ast> Visitor<'ast> for IdRangeComputingVisitor<'a, 'ast> {
|
||||||
|
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'ast> {
|
||||||
|
NestedVisitorMap::OnlyBodies(&self.map)
|
||||||
|
}
|
||||||
|
|
||||||
fn visit_id(&mut self, id: NodeId) {
|
fn visit_id(&mut self, id: NodeId) {
|
||||||
self.result.add(id);
|
self.result.add(id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Computes the id range for a single fn body, ignoring nested items.
|
/// Computes the id range for a single fn body, ignoring nested items.
|
||||||
pub fn compute_id_range_for_fn_body(fk: FnKind,
|
pub fn compute_id_range_for_fn_body<'v>(fk: FnKind<'v>,
|
||||||
decl: &FnDecl,
|
decl: &'v FnDecl,
|
||||||
body: &Expr,
|
body: &'v Expr,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
id: NodeId)
|
id: NodeId,
|
||||||
|
map: &map::Map<'v>)
|
||||||
-> IdRange {
|
-> IdRange {
|
||||||
let mut visitor = IdRangeComputingVisitor::new();
|
let mut visitor = IdRangeComputingVisitor::new(map);
|
||||||
visitor.visit_fn(fk, decl, body, sp, id);
|
walk_fn_with_body(&mut visitor, fk, decl, body, sp, id);
|
||||||
visitor.result()
|
visitor.result()
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,8 +41,10 @@ use super::intravisit::Visitor;
|
||||||
/// item-like things.
|
/// item-like things.
|
||||||
/// - Example: Lifetime resolution, which wants to bring lifetimes declared on the
|
/// - Example: Lifetime resolution, which wants to bring lifetimes declared on the
|
||||||
/// impl into scope while visiting the impl-items, and then back out again.
|
/// impl into scope while visiting the impl-items, and then back out again.
|
||||||
/// - How: Implement `intravisit::Visitor` and override the `visit_nested_foo()` foo methods
|
/// - How: Implement `intravisit::Visitor` and override the
|
||||||
/// as needed. Walk your crate with `intravisit::walk_crate()` invoked on `tcx.map.krate()`.
|
/// `visit_nested_map()` methods to return
|
||||||
|
/// `NestedVisitorMap::All`. Walk your crate with
|
||||||
|
/// `intravisit::walk_crate()` invoked on `tcx.map.krate()`.
|
||||||
/// - Pro: Visitor methods for any kind of HIR node, not just item-like things.
|
/// - Pro: Visitor methods for any kind of HIR node, not just item-like things.
|
||||||
/// - Pro: Preserves nesting information
|
/// - Pro: Preserves nesting information
|
||||||
/// - Con: Does not integrate well into dependency tracking.
|
/// - Con: Does not integrate well into dependency tracking.
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -48,7 +48,7 @@ pub trait MaybeFnLike { fn is_fn_like(&self) -> bool; }
|
||||||
/// Components shared by fn-like things (fn items, methods, closures).
|
/// Components shared by fn-like things (fn items, methods, closures).
|
||||||
pub struct FnParts<'a> {
|
pub struct FnParts<'a> {
|
||||||
pub decl: &'a FnDecl,
|
pub decl: &'a FnDecl,
|
||||||
pub body: &'a Expr,
|
pub body: ast::ExprId,
|
||||||
pub kind: FnKind<'a>,
|
pub kind: FnKind<'a>,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
pub id: NodeId,
|
pub id: NodeId,
|
||||||
|
@ -115,7 +115,7 @@ struct ItemFnParts<'a> {
|
||||||
abi: abi::Abi,
|
abi: abi::Abi,
|
||||||
vis: &'a ast::Visibility,
|
vis: &'a ast::Visibility,
|
||||||
generics: &'a ast::Generics,
|
generics: &'a ast::Generics,
|
||||||
body: &'a Expr,
|
body: ast::ExprId,
|
||||||
id: NodeId,
|
id: NodeId,
|
||||||
span: Span,
|
span: Span,
|
||||||
attrs: &'a [Attribute],
|
attrs: &'a [Attribute],
|
||||||
|
@ -125,14 +125,14 @@ struct ItemFnParts<'a> {
|
||||||
/// for use when implementing FnLikeNode operations.
|
/// for use when implementing FnLikeNode operations.
|
||||||
struct ClosureParts<'a> {
|
struct ClosureParts<'a> {
|
||||||
decl: &'a FnDecl,
|
decl: &'a FnDecl,
|
||||||
body: &'a Expr,
|
body: ast::ExprId,
|
||||||
id: NodeId,
|
id: NodeId,
|
||||||
span: Span,
|
span: Span,
|
||||||
attrs: &'a [Attribute],
|
attrs: &'a [Attribute],
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> ClosureParts<'a> {
|
impl<'a> ClosureParts<'a> {
|
||||||
fn new(d: &'a FnDecl, b: &'a Expr, id: NodeId, s: Span, attrs: &'a [Attribute]) -> Self {
|
fn new(d: &'a FnDecl, b: ast::ExprId, id: NodeId, s: Span, attrs: &'a [Attribute]) -> Self {
|
||||||
ClosureParts {
|
ClosureParts {
|
||||||
decl: d,
|
decl: d,
|
||||||
body: b,
|
body: b,
|
||||||
|
@ -172,9 +172,9 @@ impl<'a> FnLikeNode<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn body(self) -> &'a Expr {
|
pub fn body(self) -> ast::ExprId {
|
||||||
self.handle(|i: ItemFnParts<'a>| &*i.body,
|
self.handle(|i: ItemFnParts<'a>| i.body,
|
||||||
|_, _, _: &'a ast::MethodSig, _, body: &'a ast::Expr, _, _| body,
|
|_, _, _: &'a ast::MethodSig, _, body: ast::ExprId, _, _| body,
|
||||||
|c: ClosureParts<'a>| c.body)
|
|c: ClosureParts<'a>| c.body)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -196,6 +196,18 @@ impl<'a> FnLikeNode<'a> {
|
||||||
|c: ClosureParts| c.id)
|
|c: ClosureParts| c.id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn constness(self) -> ast::Constness {
|
||||||
|
match self.kind() {
|
||||||
|
FnKind::ItemFn(_, _, _, constness, ..) => {
|
||||||
|
constness
|
||||||
|
}
|
||||||
|
FnKind::Method(_, m, ..) => {
|
||||||
|
m.constness
|
||||||
|
}
|
||||||
|
_ => ast::Constness::NotConst
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn kind(self) -> FnKind<'a> {
|
pub fn kind(self) -> FnKind<'a> {
|
||||||
let item = |p: ItemFnParts<'a>| -> FnKind<'a> {
|
let item = |p: ItemFnParts<'a>| -> FnKind<'a> {
|
||||||
FnKind::ItemFn(p.name, p.generics, p.unsafety, p.constness, p.abi, p.vis, p.attrs)
|
FnKind::ItemFn(p.name, p.generics, p.unsafety, p.constness, p.abi, p.vis, p.attrs)
|
||||||
|
@ -215,7 +227,7 @@ impl<'a> FnLikeNode<'a> {
|
||||||
Name,
|
Name,
|
||||||
&'a ast::MethodSig,
|
&'a ast::MethodSig,
|
||||||
Option<&'a ast::Visibility>,
|
Option<&'a ast::Visibility>,
|
||||||
&'a ast::Expr,
|
ast::ExprId,
|
||||||
Span,
|
Span,
|
||||||
&'a [Attribute])
|
&'a [Attribute])
|
||||||
-> A,
|
-> A,
|
||||||
|
@ -223,13 +235,13 @@ impl<'a> FnLikeNode<'a> {
|
||||||
{
|
{
|
||||||
match self.node {
|
match self.node {
|
||||||
map::NodeItem(i) => match i.node {
|
map::NodeItem(i) => match i.node {
|
||||||
ast::ItemFn(ref decl, unsafety, constness, abi, ref generics, ref block) =>
|
ast::ItemFn(ref decl, unsafety, constness, abi, ref generics, block) =>
|
||||||
item_fn(ItemFnParts {
|
item_fn(ItemFnParts {
|
||||||
id: i.id,
|
id: i.id,
|
||||||
name: i.name,
|
name: i.name,
|
||||||
decl: &decl,
|
decl: &decl,
|
||||||
unsafety: unsafety,
|
unsafety: unsafety,
|
||||||
body: &block,
|
body: block,
|
||||||
generics: generics,
|
generics: generics,
|
||||||
abi: abi,
|
abi: abi,
|
||||||
vis: &i.vis,
|
vis: &i.vis,
|
||||||
|
@ -240,24 +252,24 @@ impl<'a> FnLikeNode<'a> {
|
||||||
_ => bug!("item FnLikeNode that is not fn-like"),
|
_ => bug!("item FnLikeNode that is not fn-like"),
|
||||||
},
|
},
|
||||||
map::NodeTraitItem(ti) => match ti.node {
|
map::NodeTraitItem(ti) => match ti.node {
|
||||||
ast::MethodTraitItem(ref sig, Some(ref body)) => {
|
ast::MethodTraitItem(ref sig, Some(body)) => {
|
||||||
method(ti.id, ti.name, sig, None, body, ti.span, &ti.attrs)
|
method(ti.id, ti.name, sig, None, body, ti.span, &ti.attrs)
|
||||||
}
|
}
|
||||||
_ => bug!("trait method FnLikeNode that is not fn-like"),
|
_ => bug!("trait method FnLikeNode that is not fn-like"),
|
||||||
},
|
},
|
||||||
map::NodeImplItem(ii) => {
|
map::NodeImplItem(ii) => {
|
||||||
match ii.node {
|
match ii.node {
|
||||||
ast::ImplItemKind::Method(ref sig, ref body) => {
|
ast::ImplItemKind::Method(ref sig, body) => {
|
||||||
method(ii.id, ii.name, sig, Some(&ii.vis), body, ii.span, &ii.attrs)
|
method(ii.id, ii.name, sig, Some(&ii.vis), body, ii.span, &ii.attrs)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
bug!("impl method FnLikeNode that is not fn-like")
|
bug!("impl method FnLikeNode that is not fn-like")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
map::NodeExpr(e) => match e.node {
|
map::NodeExpr(e) => match e.node {
|
||||||
ast::ExprClosure(_, ref decl, ref block, _fn_decl_span) =>
|
ast::ExprClosure(_, ref decl, block, _fn_decl_span) =>
|
||||||
closure(ClosureParts::new(&decl, &block, e.id, e.span, &e.attrs)),
|
closure(ClosureParts::new(&decl, block, e.id, e.span, &e.attrs)),
|
||||||
_ => bug!("expr FnLikeNode that is not fn-like"),
|
_ => bug!("expr FnLikeNode that is not fn-like"),
|
||||||
},
|
},
|
||||||
_ => bug!("other FnLikeNode that is not fn-like"),
|
_ => bug!("other FnLikeNode that is not fn-like"),
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
use hir::intravisit::Visitor;
|
use hir::intravisit::{Visitor, NestedVisitorMap};
|
||||||
use hir::def_id::DefId;
|
use hir::def_id::DefId;
|
||||||
use middle::cstore::InlinedItem;
|
use middle::cstore::InlinedItem;
|
||||||
use std::iter::repeat;
|
use std::iter::repeat;
|
||||||
|
@ -91,7 +91,7 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> {
|
||||||
/// deep walking so that we walk nested items in the context of
|
/// deep walking so that we walk nested items in the context of
|
||||||
/// their outer items.
|
/// their outer items.
|
||||||
|
|
||||||
fn nested_visit_map(&mut self) -> Option<&map::Map<'ast>> {
|
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'ast> {
|
||||||
panic!("visit_nested_xxx must be manually implemented in this visitor")
|
panic!("visit_nested_xxx must be manually implemented in this visitor")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -106,6 +106,10 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> {
|
||||||
self.visit_impl_item(self.krate.impl_item(item_id))
|
self.visit_impl_item(self.krate.impl_item(item_id))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn visit_body(&mut self, id: ExprId) {
|
||||||
|
self.visit_expr(self.krate.expr(id))
|
||||||
|
}
|
||||||
|
|
||||||
fn visit_item(&mut self, i: &'ast Item) {
|
fn visit_item(&mut self, i: &'ast Item) {
|
||||||
debug!("visit_item: {:?}", i);
|
debug!("visit_item: {:?}", i);
|
||||||
|
|
||||||
|
@ -124,23 +128,6 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> {
|
||||||
this.insert(struct_def.id(), NodeStructCtor(struct_def));
|
this.insert(struct_def.id(), NodeStructCtor(struct_def));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ItemTrait(.., ref bounds, _) => {
|
|
||||||
for b in bounds.iter() {
|
|
||||||
if let TraitTyParamBound(ref t, TraitBoundModifier::None) = *b {
|
|
||||||
this.insert(t.trait_ref.ref_id, NodeItem(i));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ItemUse(ref view_path) => {
|
|
||||||
match view_path.node {
|
|
||||||
ViewPathList(_, ref paths) => {
|
|
||||||
for path in paths {
|
|
||||||
this.insert(path.node.id, NodeItem(i));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => ()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
intravisit::walk_item(this, i);
|
intravisit::walk_item(this, i);
|
||||||
|
@ -217,8 +204,16 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn visit_trait_ref(&mut self, tr: &'ast TraitRef) {
|
||||||
|
self.insert(tr.ref_id, NodeTraitRef(tr));
|
||||||
|
|
||||||
|
self.with_parent(tr.ref_id, |this| {
|
||||||
|
intravisit::walk_trait_ref(this, tr);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
fn visit_fn(&mut self, fk: intravisit::FnKind<'ast>, fd: &'ast FnDecl,
|
fn visit_fn(&mut self, fk: intravisit::FnKind<'ast>, fd: &'ast FnDecl,
|
||||||
b: &'ast Expr, s: Span, id: NodeId) {
|
b: ExprId, s: Span, id: NodeId) {
|
||||||
assert_eq!(self.parent_node, id);
|
assert_eq!(self.parent_node, id);
|
||||||
intravisit::walk_fn(self, fk, fd, b, s, id);
|
intravisit::walk_fn(self, fk, fd, b, s, id);
|
||||||
}
|
}
|
||||||
|
@ -234,7 +229,28 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> {
|
||||||
self.insert(lifetime.id, NodeLifetime(lifetime));
|
self.insert(lifetime.id, NodeLifetime(lifetime));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn visit_vis(&mut self, visibility: &'ast Visibility) {
|
||||||
|
match *visibility {
|
||||||
|
Visibility::Public |
|
||||||
|
Visibility::Crate |
|
||||||
|
Visibility::Inherited => {}
|
||||||
|
Visibility::Restricted { id, .. } => {
|
||||||
|
self.insert(id, NodeVisibility(visibility));
|
||||||
|
self.with_parent(id, |this| {
|
||||||
|
intravisit::walk_vis(this, visibility);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn visit_macro_def(&mut self, macro_def: &'ast MacroDef) {
|
fn visit_macro_def(&mut self, macro_def: &'ast MacroDef) {
|
||||||
self.insert_entry(macro_def.id, NotPresent);
|
self.insert_entry(macro_def.id, NotPresent);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn visit_struct_field(&mut self, field: &'ast StructField) {
|
||||||
|
self.insert(field.id, NodeField(field));
|
||||||
|
self.with_parent(field.id, |this| {
|
||||||
|
intravisit::walk_struct_field(this, field);
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
use hir::map::definitions::*;
|
use hir::map::definitions::*;
|
||||||
|
|
||||||
use hir;
|
use hir;
|
||||||
use hir::intravisit;
|
use hir::intravisit::{self, Visitor, NestedVisitorMap};
|
||||||
use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex};
|
use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex};
|
||||||
|
|
||||||
use middle::cstore::InlinedItem;
|
use middle::cstore::InlinedItem;
|
||||||
|
@ -135,8 +135,8 @@ impl<'a> DefCollector<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> visit::Visitor for DefCollector<'a> {
|
impl<'a> visit::Visitor<'a> for DefCollector<'a> {
|
||||||
fn visit_item(&mut self, i: &Item) {
|
fn visit_item(&mut self, i: &'a Item) {
|
||||||
debug!("visit_item: {:?}", i);
|
debug!("visit_item: {:?}", i);
|
||||||
|
|
||||||
// Pick the def data. This need not be unique, but the more
|
// Pick the def data. This need not be unique, but the more
|
||||||
|
@ -155,7 +155,20 @@ impl<'a> visit::Visitor for DefCollector<'a> {
|
||||||
DefPathData::ValueNs(i.ident.name.as_str()),
|
DefPathData::ValueNs(i.ident.name.as_str()),
|
||||||
ItemKind::Mac(..) if i.id == DUMMY_NODE_ID => return, // Scope placeholder
|
ItemKind::Mac(..) if i.id == DUMMY_NODE_ID => return, // Scope placeholder
|
||||||
ItemKind::Mac(..) => return self.visit_macro_invoc(i.id, false),
|
ItemKind::Mac(..) => return self.visit_macro_invoc(i.id, false),
|
||||||
ItemKind::Use(..) => DefPathData::Misc,
|
ItemKind::Use(ref view_path) => {
|
||||||
|
match view_path.node {
|
||||||
|
ViewPathGlob(..) => {}
|
||||||
|
|
||||||
|
// FIXME(eddyb) Should use the real name. Which namespace?
|
||||||
|
ViewPathSimple(..) => {}
|
||||||
|
ViewPathList(_, ref imports) => {
|
||||||
|
for import in imports {
|
||||||
|
self.create_def(import.node.id, DefPathData::Misc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
DefPathData::Misc
|
||||||
|
}
|
||||||
};
|
};
|
||||||
let def = self.create_def(i.id, def_data);
|
let def = self.create_def(i.id, def_data);
|
||||||
|
|
||||||
|
@ -198,7 +211,7 @@ impl<'a> visit::Visitor for DefCollector<'a> {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_foreign_item(&mut self, foreign_item: &ForeignItem) {
|
fn visit_foreign_item(&mut self, foreign_item: &'a ForeignItem) {
|
||||||
let def = self.create_def(foreign_item.id,
|
let def = self.create_def(foreign_item.id,
|
||||||
DefPathData::ValueNs(foreign_item.ident.name.as_str()));
|
DefPathData::ValueNs(foreign_item.ident.name.as_str()));
|
||||||
|
|
||||||
|
@ -207,7 +220,7 @@ impl<'a> visit::Visitor for DefCollector<'a> {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_generics(&mut self, generics: &Generics) {
|
fn visit_generics(&mut self, generics: &'a Generics) {
|
||||||
for ty_param in generics.ty_params.iter() {
|
for ty_param in generics.ty_params.iter() {
|
||||||
self.create_def(ty_param.id, DefPathData::TypeParam(ty_param.ident.name.as_str()));
|
self.create_def(ty_param.id, DefPathData::TypeParam(ty_param.ident.name.as_str()));
|
||||||
}
|
}
|
||||||
|
@ -215,7 +228,7 @@ impl<'a> visit::Visitor for DefCollector<'a> {
|
||||||
visit::walk_generics(self, generics);
|
visit::walk_generics(self, generics);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_trait_item(&mut self, ti: &TraitItem) {
|
fn visit_trait_item(&mut self, ti: &'a TraitItem) {
|
||||||
let def_data = match ti.node {
|
let def_data = match ti.node {
|
||||||
TraitItemKind::Method(..) | TraitItemKind::Const(..) =>
|
TraitItemKind::Method(..) | TraitItemKind::Const(..) =>
|
||||||
DefPathData::ValueNs(ti.ident.name.as_str()),
|
DefPathData::ValueNs(ti.ident.name.as_str()),
|
||||||
|
@ -233,7 +246,7 @@ impl<'a> visit::Visitor for DefCollector<'a> {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_impl_item(&mut self, ii: &ImplItem) {
|
fn visit_impl_item(&mut self, ii: &'a ImplItem) {
|
||||||
let def_data = match ii.node {
|
let def_data = match ii.node {
|
||||||
ImplItemKind::Method(..) | ImplItemKind::Const(..) =>
|
ImplItemKind::Method(..) | ImplItemKind::Const(..) =>
|
||||||
DefPathData::ValueNs(ii.ident.name.as_str()),
|
DefPathData::ValueNs(ii.ident.name.as_str()),
|
||||||
|
@ -251,7 +264,7 @@ impl<'a> visit::Visitor for DefCollector<'a> {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_pat(&mut self, pat: &Pat) {
|
fn visit_pat(&mut self, pat: &'a Pat) {
|
||||||
let parent_def = self.parent_def;
|
let parent_def = self.parent_def;
|
||||||
|
|
||||||
match pat.node {
|
match pat.node {
|
||||||
|
@ -267,7 +280,7 @@ impl<'a> visit::Visitor for DefCollector<'a> {
|
||||||
self.parent_def = parent_def;
|
self.parent_def = parent_def;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_expr(&mut self, expr: &Expr) {
|
fn visit_expr(&mut self, expr: &'a Expr) {
|
||||||
let parent_def = self.parent_def;
|
let parent_def = self.parent_def;
|
||||||
|
|
||||||
match expr.node {
|
match expr.node {
|
||||||
|
@ -284,7 +297,7 @@ impl<'a> visit::Visitor for DefCollector<'a> {
|
||||||
self.parent_def = parent_def;
|
self.parent_def = parent_def;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_ty(&mut self, ty: &Ty) {
|
fn visit_ty(&mut self, ty: &'a Ty) {
|
||||||
match ty.node {
|
match ty.node {
|
||||||
TyKind::Mac(..) => return self.visit_macro_invoc(ty.id, false),
|
TyKind::Mac(..) => return self.visit_macro_invoc(ty.id, false),
|
||||||
TyKind::Array(_, ref length) => self.visit_ast_const_integer(length),
|
TyKind::Array(_, ref length) => self.visit_ast_const_integer(length),
|
||||||
|
@ -296,15 +309,15 @@ impl<'a> visit::Visitor for DefCollector<'a> {
|
||||||
visit::walk_ty(self, ty);
|
visit::walk_ty(self, ty);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_lifetime_def(&mut self, def: &LifetimeDef) {
|
fn visit_lifetime_def(&mut self, def: &'a LifetimeDef) {
|
||||||
self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name.as_str()));
|
self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name.as_str()));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_macro_def(&mut self, macro_def: &MacroDef) {
|
fn visit_macro_def(&mut self, macro_def: &'a MacroDef) {
|
||||||
self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.ident.name.as_str()));
|
self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.ident.name.as_str()));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_stmt(&mut self, stmt: &Stmt) {
|
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||||
match stmt.node {
|
match stmt.node {
|
||||||
StmtKind::Mac(..) => self.visit_macro_invoc(stmt.id, false),
|
StmtKind::Mac(..) => self.visit_macro_invoc(stmt.id, false),
|
||||||
_ => visit::walk_stmt(self, stmt),
|
_ => visit::walk_stmt(self, stmt),
|
||||||
|
@ -313,7 +326,18 @@ impl<'a> visit::Visitor for DefCollector<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// We walk the HIR rather than the AST when reading items from metadata.
|
// We walk the HIR rather than the AST when reading items from metadata.
|
||||||
impl<'ast> intravisit::Visitor<'ast> for DefCollector<'ast> {
|
impl<'ast> Visitor<'ast> for DefCollector<'ast> {
|
||||||
|
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'ast> {
|
||||||
|
// note however that we override `visit_body` below
|
||||||
|
NestedVisitorMap::None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_body(&mut self, id: hir::ExprId) {
|
||||||
|
if let Some(krate) = self.hir_crate {
|
||||||
|
self.visit_expr(krate.expr(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn visit_item(&mut self, i: &'ast hir::Item) {
|
fn visit_item(&mut self, i: &'ast hir::Item) {
|
||||||
debug!("visit_item: {:?}", i);
|
debug!("visit_item: {:?}", i);
|
||||||
|
|
||||||
|
@ -423,7 +447,7 @@ impl<'ast> intravisit::Visitor<'ast> for DefCollector<'ast> {
|
||||||
fn visit_pat(&mut self, pat: &'ast hir::Pat) {
|
fn visit_pat(&mut self, pat: &'ast hir::Pat) {
|
||||||
let parent_def = self.parent_def;
|
let parent_def = self.parent_def;
|
||||||
|
|
||||||
if let hir::PatKind::Binding(_, name, _) = pat.node {
|
if let hir::PatKind::Binding(_, _, name, _) = pat.node {
|
||||||
let def = self.create_def(pat.id, DefPathData::Binding(name.node.as_str()));
|
let def = self.create_def(pat.id, DefPathData::Binding(name.node.as_str()));
|
||||||
self.parent_def = Some(def);
|
self.parent_def = Some(def);
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,6 @@ pub use self::definitions::{Definitions, DefKey, DefPath, DefPathData,
|
||||||
use dep_graph::{DepGraph, DepNode};
|
use dep_graph::{DepGraph, DepNode};
|
||||||
|
|
||||||
use middle::cstore::InlinedItem;
|
use middle::cstore::InlinedItem;
|
||||||
use middle::cstore::InlinedItem as II;
|
|
||||||
use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex};
|
use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex};
|
||||||
|
|
||||||
use syntax::abi::Abi;
|
use syntax::abi::Abi;
|
||||||
|
@ -46,9 +45,11 @@ pub enum Node<'ast> {
|
||||||
NodeTraitItem(&'ast TraitItem),
|
NodeTraitItem(&'ast TraitItem),
|
||||||
NodeImplItem(&'ast ImplItem),
|
NodeImplItem(&'ast ImplItem),
|
||||||
NodeVariant(&'ast Variant),
|
NodeVariant(&'ast Variant),
|
||||||
|
NodeField(&'ast StructField),
|
||||||
NodeExpr(&'ast Expr),
|
NodeExpr(&'ast Expr),
|
||||||
NodeStmt(&'ast Stmt),
|
NodeStmt(&'ast Stmt),
|
||||||
NodeTy(&'ast Ty),
|
NodeTy(&'ast Ty),
|
||||||
|
NodeTraitRef(&'ast TraitRef),
|
||||||
NodeLocal(&'ast Pat),
|
NodeLocal(&'ast Pat),
|
||||||
NodePat(&'ast Pat),
|
NodePat(&'ast Pat),
|
||||||
NodeBlock(&'ast Block),
|
NodeBlock(&'ast Block),
|
||||||
|
@ -57,7 +58,10 @@ pub enum Node<'ast> {
|
||||||
NodeStructCtor(&'ast VariantData),
|
NodeStructCtor(&'ast VariantData),
|
||||||
|
|
||||||
NodeLifetime(&'ast Lifetime),
|
NodeLifetime(&'ast Lifetime),
|
||||||
NodeTyParam(&'ast TyParam)
|
NodeTyParam(&'ast TyParam),
|
||||||
|
NodeVisibility(&'ast Visibility),
|
||||||
|
|
||||||
|
NodeInlinedItem(&'ast InlinedItem),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Represents an entry and its parent NodeID.
|
/// Represents an entry and its parent NodeID.
|
||||||
|
@ -73,15 +77,18 @@ pub enum MapEntry<'ast> {
|
||||||
EntryTraitItem(NodeId, &'ast TraitItem),
|
EntryTraitItem(NodeId, &'ast TraitItem),
|
||||||
EntryImplItem(NodeId, &'ast ImplItem),
|
EntryImplItem(NodeId, &'ast ImplItem),
|
||||||
EntryVariant(NodeId, &'ast Variant),
|
EntryVariant(NodeId, &'ast Variant),
|
||||||
|
EntryField(NodeId, &'ast StructField),
|
||||||
EntryExpr(NodeId, &'ast Expr),
|
EntryExpr(NodeId, &'ast Expr),
|
||||||
EntryStmt(NodeId, &'ast Stmt),
|
EntryStmt(NodeId, &'ast Stmt),
|
||||||
EntryTy(NodeId, &'ast Ty),
|
EntryTy(NodeId, &'ast Ty),
|
||||||
|
EntryTraitRef(NodeId, &'ast TraitRef),
|
||||||
EntryLocal(NodeId, &'ast Pat),
|
EntryLocal(NodeId, &'ast Pat),
|
||||||
EntryPat(NodeId, &'ast Pat),
|
EntryPat(NodeId, &'ast Pat),
|
||||||
EntryBlock(NodeId, &'ast Block),
|
EntryBlock(NodeId, &'ast Block),
|
||||||
EntryStructCtor(NodeId, &'ast VariantData),
|
EntryStructCtor(NodeId, &'ast VariantData),
|
||||||
EntryLifetime(NodeId, &'ast Lifetime),
|
EntryLifetime(NodeId, &'ast Lifetime),
|
||||||
EntryTyParam(NodeId, &'ast TyParam),
|
EntryTyParam(NodeId, &'ast TyParam),
|
||||||
|
EntryVisibility(NodeId, &'ast Visibility),
|
||||||
|
|
||||||
/// Roots for node trees.
|
/// Roots for node trees.
|
||||||
RootCrate,
|
RootCrate,
|
||||||
|
@ -102,15 +109,20 @@ impl<'ast> MapEntry<'ast> {
|
||||||
NodeTraitItem(n) => EntryTraitItem(p, n),
|
NodeTraitItem(n) => EntryTraitItem(p, n),
|
||||||
NodeImplItem(n) => EntryImplItem(p, n),
|
NodeImplItem(n) => EntryImplItem(p, n),
|
||||||
NodeVariant(n) => EntryVariant(p, n),
|
NodeVariant(n) => EntryVariant(p, n),
|
||||||
|
NodeField(n) => EntryField(p, n),
|
||||||
NodeExpr(n) => EntryExpr(p, n),
|
NodeExpr(n) => EntryExpr(p, n),
|
||||||
NodeStmt(n) => EntryStmt(p, n),
|
NodeStmt(n) => EntryStmt(p, n),
|
||||||
NodeTy(n) => EntryTy(p, n),
|
NodeTy(n) => EntryTy(p, n),
|
||||||
|
NodeTraitRef(n) => EntryTraitRef(p, n),
|
||||||
NodeLocal(n) => EntryLocal(p, n),
|
NodeLocal(n) => EntryLocal(p, n),
|
||||||
NodePat(n) => EntryPat(p, n),
|
NodePat(n) => EntryPat(p, n),
|
||||||
NodeBlock(n) => EntryBlock(p, n),
|
NodeBlock(n) => EntryBlock(p, n),
|
||||||
NodeStructCtor(n) => EntryStructCtor(p, n),
|
NodeStructCtor(n) => EntryStructCtor(p, n),
|
||||||
NodeLifetime(n) => EntryLifetime(p, n),
|
NodeLifetime(n) => EntryLifetime(p, n),
|
||||||
NodeTyParam(n) => EntryTyParam(p, n),
|
NodeTyParam(n) => EntryTyParam(p, n),
|
||||||
|
NodeVisibility(n) => EntryVisibility(p, n),
|
||||||
|
|
||||||
|
NodeInlinedItem(n) => RootInlinedParent(n),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -121,15 +133,18 @@ impl<'ast> MapEntry<'ast> {
|
||||||
EntryTraitItem(id, _) => id,
|
EntryTraitItem(id, _) => id,
|
||||||
EntryImplItem(id, _) => id,
|
EntryImplItem(id, _) => id,
|
||||||
EntryVariant(id, _) => id,
|
EntryVariant(id, _) => id,
|
||||||
|
EntryField(id, _) => id,
|
||||||
EntryExpr(id, _) => id,
|
EntryExpr(id, _) => id,
|
||||||
EntryStmt(id, _) => id,
|
EntryStmt(id, _) => id,
|
||||||
EntryTy(id, _) => id,
|
EntryTy(id, _) => id,
|
||||||
|
EntryTraitRef(id, _) => id,
|
||||||
EntryLocal(id, _) => id,
|
EntryLocal(id, _) => id,
|
||||||
EntryPat(id, _) => id,
|
EntryPat(id, _) => id,
|
||||||
EntryBlock(id, _) => id,
|
EntryBlock(id, _) => id,
|
||||||
EntryStructCtor(id, _) => id,
|
EntryStructCtor(id, _) => id,
|
||||||
EntryLifetime(id, _) => id,
|
EntryLifetime(id, _) => id,
|
||||||
EntryTyParam(id, _) => id,
|
EntryTyParam(id, _) => id,
|
||||||
|
EntryVisibility(id, _) => id,
|
||||||
|
|
||||||
NotPresent |
|
NotPresent |
|
||||||
RootCrate |
|
RootCrate |
|
||||||
|
@ -144,15 +159,19 @@ impl<'ast> MapEntry<'ast> {
|
||||||
EntryTraitItem(_, n) => NodeTraitItem(n),
|
EntryTraitItem(_, n) => NodeTraitItem(n),
|
||||||
EntryImplItem(_, n) => NodeImplItem(n),
|
EntryImplItem(_, n) => NodeImplItem(n),
|
||||||
EntryVariant(_, n) => NodeVariant(n),
|
EntryVariant(_, n) => NodeVariant(n),
|
||||||
|
EntryField(_, n) => NodeField(n),
|
||||||
EntryExpr(_, n) => NodeExpr(n),
|
EntryExpr(_, n) => NodeExpr(n),
|
||||||
EntryStmt(_, n) => NodeStmt(n),
|
EntryStmt(_, n) => NodeStmt(n),
|
||||||
EntryTy(_, n) => NodeTy(n),
|
EntryTy(_, n) => NodeTy(n),
|
||||||
|
EntryTraitRef(_, n) => NodeTraitRef(n),
|
||||||
EntryLocal(_, n) => NodeLocal(n),
|
EntryLocal(_, n) => NodeLocal(n),
|
||||||
EntryPat(_, n) => NodePat(n),
|
EntryPat(_, n) => NodePat(n),
|
||||||
EntryBlock(_, n) => NodeBlock(n),
|
EntryBlock(_, n) => NodeBlock(n),
|
||||||
EntryStructCtor(_, n) => NodeStructCtor(n),
|
EntryStructCtor(_, n) => NodeStructCtor(n),
|
||||||
EntryLifetime(_, n) => NodeLifetime(n),
|
EntryLifetime(_, n) => NodeLifetime(n),
|
||||||
EntryTyParam(_, n) => NodeTyParam(n),
|
EntryTyParam(_, n) => NodeTyParam(n),
|
||||||
|
EntryVisibility(_, n) => NodeVisibility(n),
|
||||||
|
RootInlinedParent(n) => NodeInlinedItem(n),
|
||||||
_ => return None
|
_ => return None
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -237,45 +256,63 @@ impl<'ast> Map<'ast> {
|
||||||
let map = self.map.borrow();
|
let map = self.map.borrow();
|
||||||
let mut id = id0;
|
let mut id = id0;
|
||||||
if !self.is_inlined_node_id(id) {
|
if !self.is_inlined_node_id(id) {
|
||||||
|
let mut last_expr = None;
|
||||||
loop {
|
loop {
|
||||||
match map[id.as_usize()] {
|
match map[id.as_usize()] {
|
||||||
EntryItem(_, item) => {
|
EntryItem(_, item) => {
|
||||||
let def_id = self.local_def_id(item.id);
|
assert_eq!(id, item.id);
|
||||||
// NB ^~~~~~~
|
let def_id = self.local_def_id(id);
|
||||||
//
|
|
||||||
// You would expect that `item.id == id`, but this
|
|
||||||
// is not always the case. In particular, for a
|
|
||||||
// ViewPath item like `use self::{mem, foo}`, we
|
|
||||||
// map the ids for `mem` and `foo` to the
|
|
||||||
// enclosing view path item. This seems mega super
|
|
||||||
// ultra wrong, but then who am I to judge?
|
|
||||||
// -nmatsakis
|
|
||||||
assert!(!self.is_inlined_def_id(def_id));
|
assert!(!self.is_inlined_def_id(def_id));
|
||||||
|
|
||||||
|
if let Some(last_id) = last_expr {
|
||||||
|
// The body of the item may have a separate dep node
|
||||||
|
// (Note that trait items don't currently have
|
||||||
|
// their own dep node, so there's also just one
|
||||||
|
// HirBody node for all the items)
|
||||||
|
if self.is_body(last_id, item) {
|
||||||
|
return DepNode::HirBody(def_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
return DepNode::Hir(def_id);
|
return DepNode::Hir(def_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
EntryImplItem(..) => {
|
EntryImplItem(_, item) => {
|
||||||
let def_id = self.local_def_id(id);
|
let def_id = self.local_def_id(id);
|
||||||
assert!(!self.is_inlined_def_id(def_id));
|
assert!(!self.is_inlined_def_id(def_id));
|
||||||
|
|
||||||
|
if let Some(last_id) = last_expr {
|
||||||
|
// The body of the item may have a separate dep node
|
||||||
|
if self.is_impl_item_body(last_id, item) {
|
||||||
|
return DepNode::HirBody(def_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
return DepNode::Hir(def_id);
|
return DepNode::Hir(def_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
EntryForeignItem(p, _) |
|
EntryForeignItem(p, _) |
|
||||||
EntryTraitItem(p, _) |
|
EntryTraitItem(p, _) |
|
||||||
EntryVariant(p, _) |
|
EntryVariant(p, _) |
|
||||||
EntryExpr(p, _) |
|
EntryField(p, _) |
|
||||||
EntryStmt(p, _) |
|
EntryStmt(p, _) |
|
||||||
EntryTy(p, _) |
|
EntryTy(p, _) |
|
||||||
|
EntryTraitRef(p, _) |
|
||||||
EntryLocal(p, _) |
|
EntryLocal(p, _) |
|
||||||
EntryPat(p, _) |
|
EntryPat(p, _) |
|
||||||
EntryBlock(p, _) |
|
EntryBlock(p, _) |
|
||||||
EntryStructCtor(p, _) |
|
EntryStructCtor(p, _) |
|
||||||
EntryLifetime(p, _) |
|
EntryLifetime(p, _) |
|
||||||
EntryTyParam(p, _) =>
|
EntryTyParam(p, _) |
|
||||||
|
EntryVisibility(p, _) =>
|
||||||
id = p,
|
id = p,
|
||||||
|
|
||||||
RootCrate =>
|
EntryExpr(p, _) => {
|
||||||
return DepNode::Krate,
|
last_expr = Some(id);
|
||||||
|
id = p;
|
||||||
|
}
|
||||||
|
|
||||||
|
RootCrate => {
|
||||||
|
return DepNode::Hir(DefId::local(CRATE_DEF_INDEX));
|
||||||
|
}
|
||||||
|
|
||||||
RootInlinedParent(_) =>
|
RootInlinedParent(_) =>
|
||||||
bug!("node {} has inlined ancestor but is not inlined", id0),
|
bug!("node {} has inlined ancestor but is not inlined", id0),
|
||||||
|
@ -304,23 +341,22 @@ impl<'ast> Map<'ast> {
|
||||||
EntryTraitItem(p, _) |
|
EntryTraitItem(p, _) |
|
||||||
EntryImplItem(p, _) |
|
EntryImplItem(p, _) |
|
||||||
EntryVariant(p, _) |
|
EntryVariant(p, _) |
|
||||||
|
EntryField(p, _) |
|
||||||
EntryExpr(p, _) |
|
EntryExpr(p, _) |
|
||||||
EntryStmt(p, _) |
|
EntryStmt(p, _) |
|
||||||
EntryTy(p, _) |
|
EntryTy(p, _) |
|
||||||
|
EntryTraitRef(p, _) |
|
||||||
EntryLocal(p, _) |
|
EntryLocal(p, _) |
|
||||||
EntryPat(p, _) |
|
EntryPat(p, _) |
|
||||||
EntryBlock(p, _) |
|
EntryBlock(p, _) |
|
||||||
EntryStructCtor(p, _) |
|
EntryStructCtor(p, _) |
|
||||||
EntryLifetime(p, _) |
|
EntryLifetime(p, _) |
|
||||||
EntryTyParam(p, _) =>
|
EntryTyParam(p, _) |
|
||||||
|
EntryVisibility(p, _) =>
|
||||||
id = p,
|
id = p,
|
||||||
|
|
||||||
RootInlinedParent(parent) => match *parent {
|
RootInlinedParent(parent) =>
|
||||||
InlinedItem::Item(def_id, _) |
|
return DepNode::MetaData(parent.def_id),
|
||||||
InlinedItem::TraitItem(def_id, _) |
|
|
||||||
InlinedItem::ImplItem(def_id, _) =>
|
|
||||||
return DepNode::MetaData(def_id)
|
|
||||||
},
|
|
||||||
|
|
||||||
RootCrate =>
|
RootCrate =>
|
||||||
bug!("node {} has crate ancestor but is inlined", id0),
|
bug!("node {} has crate ancestor but is inlined", id0),
|
||||||
|
@ -332,6 +368,29 @@ impl<'ast> Map<'ast> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn is_body(&self, node_id: NodeId, item: &Item) -> bool {
|
||||||
|
match item.node {
|
||||||
|
ItemFn(_, _, _, _, _, body) => body.node_id() == node_id,
|
||||||
|
// Since trait items currently don't get their own dep nodes,
|
||||||
|
// we check here whether node_id is the body of any of the items.
|
||||||
|
// If they get their own dep nodes, this can go away
|
||||||
|
ItemTrait(_, _, _, ref trait_items) => {
|
||||||
|
trait_items.iter().any(|trait_item| { match trait_item.node {
|
||||||
|
MethodTraitItem(_, Some(body)) => body.node_id() == node_id,
|
||||||
|
_ => false
|
||||||
|
}})
|
||||||
|
}
|
||||||
|
_ => false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_impl_item_body(&self, node_id: NodeId, item: &ImplItem) -> bool {
|
||||||
|
match item.node {
|
||||||
|
ImplItemKind::Method(_, body) => body.node_id() == node_id,
|
||||||
|
_ => false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn num_local_def_ids(&self) -> usize {
|
pub fn num_local_def_ids(&self) -> usize {
|
||||||
self.definitions.borrow().len()
|
self.definitions.borrow().len()
|
||||||
}
|
}
|
||||||
|
@ -543,8 +602,7 @@ impl<'ast> Map<'ast> {
|
||||||
pub fn get_parent_did(&self, id: NodeId) -> DefId {
|
pub fn get_parent_did(&self, id: NodeId) -> DefId {
|
||||||
let parent = self.get_parent(id);
|
let parent = self.get_parent(id);
|
||||||
match self.find_entry(parent) {
|
match self.find_entry(parent) {
|
||||||
Some(RootInlinedParent(&II::TraitItem(did, _))) |
|
Some(RootInlinedParent(ii)) => ii.def_id,
|
||||||
Some(RootInlinedParent(&II::ImplItem(did, _))) => did,
|
|
||||||
_ => self.local_def_id(parent)
|
_ => self.local_def_id(parent)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -642,6 +700,10 @@ impl<'ast> Map<'ast> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn expr(&self, id: ExprId) -> &'ast Expr {
|
||||||
|
self.expect_expr(id.node_id())
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns the name associated with the given NodeId's AST.
|
/// Returns the name associated with the given NodeId's AST.
|
||||||
pub fn name(&self, id: NodeId) -> Name {
|
pub fn name(&self, id: NodeId) -> Name {
|
||||||
match self.get(id) {
|
match self.get(id) {
|
||||||
|
@ -650,9 +712,10 @@ impl<'ast> Map<'ast> {
|
||||||
NodeImplItem(ii) => ii.name,
|
NodeImplItem(ii) => ii.name,
|
||||||
NodeTraitItem(ti) => ti.name,
|
NodeTraitItem(ti) => ti.name,
|
||||||
NodeVariant(v) => v.node.name,
|
NodeVariant(v) => v.node.name,
|
||||||
|
NodeField(f) => f.name,
|
||||||
NodeLifetime(lt) => lt.name,
|
NodeLifetime(lt) => lt.name,
|
||||||
NodeTyParam(tp) => tp.name,
|
NodeTyParam(tp) => tp.name,
|
||||||
NodeLocal(&Pat { node: PatKind::Binding(_,l,_), .. }) => l.node,
|
NodeLocal(&Pat { node: PatKind::Binding(_,_,l,_), .. }) => l.node,
|
||||||
NodeStructCtor(_) => self.name(self.get_parent(id)),
|
NodeStructCtor(_) => self.name(self.get_parent(id)),
|
||||||
_ => bug!("no name for {}", self.node_to_string(id))
|
_ => bug!("no name for {}", self.node_to_string(id))
|
||||||
}
|
}
|
||||||
|
@ -668,6 +731,7 @@ impl<'ast> Map<'ast> {
|
||||||
Some(NodeTraitItem(ref ti)) => Some(&ti.attrs[..]),
|
Some(NodeTraitItem(ref ti)) => Some(&ti.attrs[..]),
|
||||||
Some(NodeImplItem(ref ii)) => Some(&ii.attrs[..]),
|
Some(NodeImplItem(ref ii)) => Some(&ii.attrs[..]),
|
||||||
Some(NodeVariant(ref v)) => Some(&v.node.attrs[..]),
|
Some(NodeVariant(ref v)) => Some(&v.node.attrs[..]),
|
||||||
|
Some(NodeField(ref f)) => Some(&f.attrs[..]),
|
||||||
Some(NodeExpr(ref e)) => Some(&*e.attrs),
|
Some(NodeExpr(ref e)) => Some(&*e.attrs),
|
||||||
Some(NodeStmt(ref s)) => Some(s.node.attrs()),
|
Some(NodeStmt(ref s)) => Some(s.node.attrs()),
|
||||||
// unit/tuple structs take the attributes straight from
|
// unit/tuple structs take the attributes straight from
|
||||||
|
@ -697,44 +761,40 @@ impl<'ast> Map<'ast> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn opt_span(&self, id: NodeId) -> Option<Span> {
|
|
||||||
let sp = match self.find(id) {
|
|
||||||
Some(NodeItem(item)) => item.span,
|
|
||||||
Some(NodeForeignItem(foreign_item)) => foreign_item.span,
|
|
||||||
Some(NodeTraitItem(trait_method)) => trait_method.span,
|
|
||||||
Some(NodeImplItem(ref impl_item)) => impl_item.span,
|
|
||||||
Some(NodeVariant(variant)) => variant.span,
|
|
||||||
Some(NodeExpr(expr)) => expr.span,
|
|
||||||
Some(NodeStmt(stmt)) => stmt.span,
|
|
||||||
Some(NodeTy(ty)) => ty.span,
|
|
||||||
Some(NodeLocal(pat)) => pat.span,
|
|
||||||
Some(NodePat(pat)) => pat.span,
|
|
||||||
Some(NodeBlock(block)) => block.span,
|
|
||||||
Some(NodeStructCtor(_)) => self.expect_item(self.get_parent(id)).span,
|
|
||||||
Some(NodeTyParam(ty_param)) => ty_param.span,
|
|
||||||
_ => return None,
|
|
||||||
};
|
|
||||||
Some(sp)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn span(&self, id: NodeId) -> Span {
|
pub fn span(&self, id: NodeId) -> Span {
|
||||||
self.read(id); // reveals span from node
|
self.read(id); // reveals span from node
|
||||||
self.opt_span(id)
|
match self.find_entry(id) {
|
||||||
.unwrap_or_else(|| bug!("AstMap.span: could not find span for id {:?}", id))
|
Some(EntryItem(_, item)) => item.span,
|
||||||
|
Some(EntryForeignItem(_, foreign_item)) => foreign_item.span,
|
||||||
|
Some(EntryTraitItem(_, trait_method)) => trait_method.span,
|
||||||
|
Some(EntryImplItem(_, impl_item)) => impl_item.span,
|
||||||
|
Some(EntryVariant(_, variant)) => variant.span,
|
||||||
|
Some(EntryField(_, field)) => field.span,
|
||||||
|
Some(EntryExpr(_, expr)) => expr.span,
|
||||||
|
Some(EntryStmt(_, stmt)) => stmt.span,
|
||||||
|
Some(EntryTy(_, ty)) => ty.span,
|
||||||
|
Some(EntryTraitRef(_, tr)) => tr.path.span,
|
||||||
|
Some(EntryLocal(_, pat)) => pat.span,
|
||||||
|
Some(EntryPat(_, pat)) => pat.span,
|
||||||
|
Some(EntryBlock(_, block)) => block.span,
|
||||||
|
Some(EntryStructCtor(_, _)) => self.expect_item(self.get_parent(id)).span,
|
||||||
|
Some(EntryLifetime(_, lifetime)) => lifetime.span,
|
||||||
|
Some(EntryTyParam(_, ty_param)) => ty_param.span,
|
||||||
|
Some(EntryVisibility(_, &Visibility::Restricted { ref path, .. })) => path.span,
|
||||||
|
Some(EntryVisibility(_, v)) => bug!("unexpected Visibility {:?}", v),
|
||||||
|
|
||||||
|
Some(RootCrate) => self.forest.krate.span,
|
||||||
|
Some(RootInlinedParent(parent)) => parent.body.span,
|
||||||
|
Some(NotPresent) | None => {
|
||||||
|
bug!("hir::map::Map::span: id not in map: {:?}", id)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn span_if_local(&self, id: DefId) -> Option<Span> {
|
pub fn span_if_local(&self, id: DefId) -> Option<Span> {
|
||||||
self.as_local_node_id(id).map(|id| self.span(id))
|
self.as_local_node_id(id).map(|id| self.span(id))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn def_id_span(&self, def_id: DefId, fallback: Span) -> Span {
|
|
||||||
if let Some(node_id) = self.as_local_node_id(def_id) {
|
|
||||||
self.opt_span(node_id).unwrap_or(fallback)
|
|
||||||
} else {
|
|
||||||
fallback
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn node_to_string(&self, id: NodeId) -> String {
|
pub fn node_to_string(&self, id: NodeId) -> String {
|
||||||
node_id_to_string(self, id, true)
|
node_id_to_string(self, id, true)
|
||||||
}
|
}
|
||||||
|
@ -823,6 +883,7 @@ impl<'a, 'ast> Iterator for NodesMatchingSuffix<'a, 'ast> {
|
||||||
Some(EntryTraitItem(_, n)) => n.name(),
|
Some(EntryTraitItem(_, n)) => n.name(),
|
||||||
Some(EntryImplItem(_, n)) => n.name(),
|
Some(EntryImplItem(_, n)) => n.name(),
|
||||||
Some(EntryVariant(_, n)) => n.name(),
|
Some(EntryVariant(_, n)) => n.name(),
|
||||||
|
Some(EntryField(_, n)) => n.name(),
|
||||||
_ => continue,
|
_ => continue,
|
||||||
};
|
};
|
||||||
if self.matches_names(self.map.get_parent(idx), name) {
|
if self.matches_names(self.map.get_parent(idx), name) {
|
||||||
|
@ -841,6 +902,7 @@ impl<T:Named> Named for Spanned<T> { fn name(&self) -> Name { self.node.name() }
|
||||||
impl Named for Item { fn name(&self) -> Name { self.name } }
|
impl Named for Item { fn name(&self) -> Name { self.name } }
|
||||||
impl Named for ForeignItem { fn name(&self) -> Name { self.name } }
|
impl Named for ForeignItem { fn name(&self) -> Name { self.name } }
|
||||||
impl Named for Variant_ { fn name(&self) -> Name { self.name } }
|
impl Named for Variant_ { fn name(&self) -> Name { self.name } }
|
||||||
|
impl Named for StructField { fn name(&self) -> Name { self.name } }
|
||||||
impl Named for TraitItem { fn name(&self) -> Name { self.name } }
|
impl Named for TraitItem { fn name(&self) -> Name { self.name } }
|
||||||
impl Named for ImplItem { fn name(&self) -> Name { self.name } }
|
impl Named for ImplItem { fn name(&self) -> Name { self.name } }
|
||||||
|
|
||||||
|
@ -926,15 +988,20 @@ impl<'a> NodePrinter for pprust::State<'a> {
|
||||||
NodeExpr(a) => self.print_expr(&a),
|
NodeExpr(a) => self.print_expr(&a),
|
||||||
NodeStmt(a) => self.print_stmt(&a),
|
NodeStmt(a) => self.print_stmt(&a),
|
||||||
NodeTy(a) => self.print_type(&a),
|
NodeTy(a) => self.print_type(&a),
|
||||||
|
NodeTraitRef(a) => self.print_trait_ref(&a),
|
||||||
NodePat(a) => self.print_pat(&a),
|
NodePat(a) => self.print_pat(&a),
|
||||||
NodeBlock(a) => self.print_block(&a),
|
NodeBlock(a) => self.print_block(&a),
|
||||||
NodeLifetime(a) => self.print_lifetime(&a),
|
NodeLifetime(a) => self.print_lifetime(&a),
|
||||||
|
NodeVisibility(a) => self.print_visibility(&a),
|
||||||
NodeTyParam(_) => bug!("cannot print TyParam"),
|
NodeTyParam(_) => bug!("cannot print TyParam"),
|
||||||
|
NodeField(_) => bug!("cannot print StructField"),
|
||||||
// these cases do not carry enough information in the
|
// these cases do not carry enough information in the
|
||||||
// ast_map to reconstruct their full structure for pretty
|
// ast_map to reconstruct their full structure for pretty
|
||||||
// printing.
|
// printing.
|
||||||
NodeLocal(_) => bug!("cannot print isolated Local"),
|
NodeLocal(_) => bug!("cannot print isolated Local"),
|
||||||
NodeStructCtor(_) => bug!("cannot print isolated StructCtor"),
|
NodeStructCtor(_) => bug!("cannot print isolated StructCtor"),
|
||||||
|
|
||||||
|
NodeInlinedItem(_) => bug!("cannot print inlined item"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1009,6 +1076,11 @@ fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String {
|
||||||
variant.node.name,
|
variant.node.name,
|
||||||
path_str(), id_str)
|
path_str(), id_str)
|
||||||
}
|
}
|
||||||
|
Some(NodeField(ref field)) => {
|
||||||
|
format!("field {} in {}{}",
|
||||||
|
field.name,
|
||||||
|
path_str(), id_str)
|
||||||
|
}
|
||||||
Some(NodeExpr(ref expr)) => {
|
Some(NodeExpr(ref expr)) => {
|
||||||
format!("expr {}{}", pprust::expr_to_string(&expr), id_str)
|
format!("expr {}{}", pprust::expr_to_string(&expr), id_str)
|
||||||
}
|
}
|
||||||
|
@ -1018,6 +1090,9 @@ fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String {
|
||||||
Some(NodeTy(ref ty)) => {
|
Some(NodeTy(ref ty)) => {
|
||||||
format!("type {}{}", pprust::ty_to_string(&ty), id_str)
|
format!("type {}{}", pprust::ty_to_string(&ty), id_str)
|
||||||
}
|
}
|
||||||
|
Some(NodeTraitRef(ref tr)) => {
|
||||||
|
format!("trait_ref {}{}", pprust::path_to_string(&tr.path), id_str)
|
||||||
|
}
|
||||||
Some(NodeLocal(ref pat)) => {
|
Some(NodeLocal(ref pat)) => {
|
||||||
format!("local {}{}", pprust::pat_to_string(&pat), id_str)
|
format!("local {}{}", pprust::pat_to_string(&pat), id_str)
|
||||||
}
|
}
|
||||||
|
@ -1037,6 +1112,12 @@ fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String {
|
||||||
Some(NodeTyParam(ref ty_param)) => {
|
Some(NodeTyParam(ref ty_param)) => {
|
||||||
format!("typaram {:?}{}", ty_param, id_str)
|
format!("typaram {:?}{}", ty_param, id_str)
|
||||||
}
|
}
|
||||||
|
Some(NodeVisibility(ref vis)) => {
|
||||||
|
format!("visibility {:?}{}", vis, id_str)
|
||||||
|
}
|
||||||
|
Some(NodeInlinedItem(_)) => {
|
||||||
|
format!("inlined item {}", id_str)
|
||||||
|
}
|
||||||
None => {
|
None => {
|
||||||
format!("unknown node{}", id_str)
|
format!("unknown node{}", id_str)
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,13 +27,13 @@ pub use self::Ty_::*;
|
||||||
pub use self::TyParamBound::*;
|
pub use self::TyParamBound::*;
|
||||||
pub use self::UnOp::*;
|
pub use self::UnOp::*;
|
||||||
pub use self::UnsafeSource::*;
|
pub use self::UnsafeSource::*;
|
||||||
pub use self::ViewPath_::*;
|
|
||||||
pub use self::Visibility::{Public, Inherited};
|
pub use self::Visibility::{Public, Inherited};
|
||||||
pub use self::PathParameters::*;
|
pub use self::PathParameters::*;
|
||||||
|
|
||||||
use hir::def::Def;
|
use hir::def::Def;
|
||||||
use hir::def_id::DefId;
|
use hir::def_id::DefId;
|
||||||
use util::nodemap::{NodeMap, FxHashSet};
|
use util::nodemap::{NodeMap, FxHashSet};
|
||||||
|
use rustc_data_structures::fnv::FnvHashMap;
|
||||||
|
|
||||||
use syntax_pos::{mk_sp, Span, ExpnId, DUMMY_SP};
|
use syntax_pos::{mk_sp, Span, ExpnId, DUMMY_SP};
|
||||||
use syntax::codemap::{self, respan, Spanned};
|
use syntax::codemap::{self, respan, Spanned};
|
||||||
|
@ -108,6 +108,8 @@ pub struct Path {
|
||||||
/// A `::foo` path, is relative to the crate root rather than current
|
/// A `::foo` path, is relative to the crate root rather than current
|
||||||
/// module (like paths in an import).
|
/// module (like paths in an import).
|
||||||
pub global: bool,
|
pub global: bool,
|
||||||
|
/// The definition that the path resolved to.
|
||||||
|
pub def: Def,
|
||||||
/// The segments in the path: the things separated by `::`.
|
/// The segments in the path: the things separated by `::`.
|
||||||
pub segments: HirVec<PathSegment>,
|
pub segments: HirVec<PathSegment>,
|
||||||
}
|
}
|
||||||
|
@ -124,21 +126,6 @@ impl fmt::Display for Path {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Path {
|
|
||||||
/// Convert a span and an identifier to the corresponding
|
|
||||||
/// 1-segment path.
|
|
||||||
pub fn from_name(s: Span, name: Name) -> Path {
|
|
||||||
Path {
|
|
||||||
span: s,
|
|
||||||
global: false,
|
|
||||||
segments: hir_vec![PathSegment {
|
|
||||||
name: name,
|
|
||||||
parameters: PathParameters::none()
|
|
||||||
}],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A segment of a path: an identifier, an optional lifetime, and a set of
|
/// A segment of a path: an identifier, an optional lifetime, and a set of
|
||||||
/// types.
|
/// types.
|
||||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
||||||
|
@ -154,6 +141,16 @@ pub struct PathSegment {
|
||||||
pub parameters: PathParameters,
|
pub parameters: PathParameters,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl PathSegment {
|
||||||
|
/// Convert an identifier to the corresponding segment.
|
||||||
|
pub fn from_name(name: Name) -> PathSegment {
|
||||||
|
PathSegment {
|
||||||
|
name: name,
|
||||||
|
parameters: PathParameters::none()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
||||||
pub enum PathParameters {
|
pub enum PathParameters {
|
||||||
/// The `<'a, A,B,C>` in `foo::bar::baz::<'a, A,B,C>`
|
/// The `<'a, A,B,C>` in `foo::bar::baz::<'a, A,B,C>`
|
||||||
|
@ -167,6 +164,7 @@ impl PathParameters {
|
||||||
AngleBracketedParameters(AngleBracketedParameterData {
|
AngleBracketedParameters(AngleBracketedParameterData {
|
||||||
lifetimes: HirVec::new(),
|
lifetimes: HirVec::new(),
|
||||||
types: HirVec::new(),
|
types: HirVec::new(),
|
||||||
|
infer_types: true,
|
||||||
bindings: HirVec::new(),
|
bindings: HirVec::new(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -241,6 +239,11 @@ pub struct AngleBracketedParameterData {
|
||||||
pub lifetimes: HirVec<Lifetime>,
|
pub lifetimes: HirVec<Lifetime>,
|
||||||
/// The type parameters for this path segment, if present.
|
/// The type parameters for this path segment, if present.
|
||||||
pub types: HirVec<P<Ty>>,
|
pub types: HirVec<P<Ty>>,
|
||||||
|
/// Whether to infer remaining type parameters, if any.
|
||||||
|
/// This only applies to expression and pattern paths, and
|
||||||
|
/// out of those only the segments with no type parameters
|
||||||
|
/// to begin with, e.g. `Vec::new` is `<Vec<..>>::new::<..>`.
|
||||||
|
pub infer_types: bool,
|
||||||
/// Bindings (equality constraints) on associated types, if present.
|
/// Bindings (equality constraints) on associated types, if present.
|
||||||
/// E.g., `Foo<A=Bar>`.
|
/// E.g., `Foo<A=Bar>`.
|
||||||
pub bindings: HirVec<TypeBinding>,
|
pub bindings: HirVec<TypeBinding>,
|
||||||
|
@ -426,6 +429,7 @@ pub struct Crate {
|
||||||
pub items: BTreeMap<NodeId, Item>,
|
pub items: BTreeMap<NodeId, Item>,
|
||||||
|
|
||||||
pub impl_items: BTreeMap<ImplItemId, ImplItem>,
|
pub impl_items: BTreeMap<ImplItemId, ImplItem>,
|
||||||
|
pub exprs: FnvHashMap<ExprId, Expr>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Crate {
|
impl Crate {
|
||||||
|
@ -456,6 +460,10 @@ impl Crate {
|
||||||
visitor.visit_impl_item(impl_item);
|
visitor.visit_impl_item(impl_item);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn expr(&self, id: ExprId) -> &Expr {
|
||||||
|
&self.exprs[&id]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A macro definition, in this crate or imported from another.
|
/// A macro definition, in this crate or imported from another.
|
||||||
|
@ -527,7 +535,7 @@ impl Pat {
|
||||||
PatKind::Lit(_) |
|
PatKind::Lit(_) |
|
||||||
PatKind::Range(..) |
|
PatKind::Range(..) |
|
||||||
PatKind::Binding(..) |
|
PatKind::Binding(..) |
|
||||||
PatKind::Path(..) => {
|
PatKind::Path(_) => {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -566,20 +574,20 @@ pub enum PatKind {
|
||||||
Wild,
|
Wild,
|
||||||
|
|
||||||
/// A fresh binding `ref mut binding @ OPT_SUBPATTERN`.
|
/// A fresh binding `ref mut binding @ OPT_SUBPATTERN`.
|
||||||
Binding(BindingMode, Spanned<Name>, Option<P<Pat>>),
|
/// The `DefId` is for the definition of the variable being bound.
|
||||||
|
Binding(BindingMode, DefId, Spanned<Name>, Option<P<Pat>>),
|
||||||
|
|
||||||
/// A struct or struct variant pattern, e.g. `Variant {x, y, ..}`.
|
/// A struct or struct variant pattern, e.g. `Variant {x, y, ..}`.
|
||||||
/// The `bool` is `true` in the presence of a `..`.
|
/// The `bool` is `true` in the presence of a `..`.
|
||||||
Struct(Path, HirVec<Spanned<FieldPat>>, bool),
|
Struct(QPath, HirVec<Spanned<FieldPat>>, bool),
|
||||||
|
|
||||||
/// A tuple struct/variant pattern `Variant(x, y, .., z)`.
|
/// A tuple struct/variant pattern `Variant(x, y, .., z)`.
|
||||||
/// If the `..` pattern fragment is present, then `Option<usize>` denotes its position.
|
/// If the `..` pattern fragment is present, then `Option<usize>` denotes its position.
|
||||||
/// 0 <= position <= subpats.len()
|
/// 0 <= position <= subpats.len()
|
||||||
TupleStruct(Path, HirVec<P<Pat>>, Option<usize>),
|
TupleStruct(QPath, HirVec<P<Pat>>, Option<usize>),
|
||||||
|
|
||||||
/// A possibly qualified path pattern.
|
/// A path pattern for an unit struct/variant or a (maybe-associated) constant.
|
||||||
/// Such pattern can be resolved to a unit struct/variant or a constant.
|
Path(QPath),
|
||||||
Path(Option<QSelf>, Path),
|
|
||||||
|
|
||||||
/// A tuple pattern `(a, b)`.
|
/// A tuple pattern `(a, b)`.
|
||||||
/// If the `..` pattern fragment is present, then `Option<usize>` denotes its position.
|
/// If the `..` pattern fragment is present, then `Option<usize>` denotes its position.
|
||||||
|
@ -836,9 +844,6 @@ pub enum BlockCheckMode {
|
||||||
UnsafeBlock(UnsafeSource),
|
UnsafeBlock(UnsafeSource),
|
||||||
PushUnsafeBlock(UnsafeSource),
|
PushUnsafeBlock(UnsafeSource),
|
||||||
PopUnsafeBlock(UnsafeSource),
|
PopUnsafeBlock(UnsafeSource),
|
||||||
// Within this block (but outside a PopUnstableBlock), we suspend checking of stability.
|
|
||||||
PushUnstableBlock,
|
|
||||||
PopUnstableBlock,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||||
|
@ -847,6 +852,15 @@ pub enum UnsafeSource {
|
||||||
UserProvided,
|
UserProvided,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
||||||
|
pub struct ExprId(NodeId);
|
||||||
|
|
||||||
|
impl ExprId {
|
||||||
|
pub fn node_id(self) -> NodeId {
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// An expression
|
/// An expression
|
||||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
|
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
|
||||||
pub struct Expr {
|
pub struct Expr {
|
||||||
|
@ -856,6 +870,12 @@ pub struct Expr {
|
||||||
pub attrs: ThinVec<Attribute>,
|
pub attrs: ThinVec<Attribute>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Expr {
|
||||||
|
pub fn expr_id(&self) -> ExprId {
|
||||||
|
ExprId(self.id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl fmt::Debug for Expr {
|
impl fmt::Debug for Expr {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "expr({}: {})", self.id, print::expr_to_string(self))
|
write!(f, "expr({}: {})", self.id, print::expr_to_string(self))
|
||||||
|
@ -915,7 +935,7 @@ pub enum Expr_ {
|
||||||
/// A closure (for example, `move |a, b, c| {a + b + c}`).
|
/// A closure (for example, `move |a, b, c| {a + b + c}`).
|
||||||
///
|
///
|
||||||
/// The final span is the span of the argument block `|...|`
|
/// The final span is the span of the argument block `|...|`
|
||||||
ExprClosure(CaptureClause, P<FnDecl>, P<Expr>, Span),
|
ExprClosure(CaptureClause, P<FnDecl>, ExprId, Span),
|
||||||
/// A block (`{ ... }`)
|
/// A block (`{ ... }`)
|
||||||
ExprBlock(P<Block>),
|
ExprBlock(P<Block>),
|
||||||
|
|
||||||
|
@ -934,19 +954,15 @@ pub enum Expr_ {
|
||||||
/// An indexing operation (`foo[2]`)
|
/// An indexing operation (`foo[2]`)
|
||||||
ExprIndex(P<Expr>, P<Expr>),
|
ExprIndex(P<Expr>, P<Expr>),
|
||||||
|
|
||||||
/// Variable reference, possibly containing `::` and/or type
|
/// Path to a definition, possibly containing lifetime or type parameters.
|
||||||
/// parameters, e.g. foo::bar::<baz>.
|
ExprPath(QPath),
|
||||||
///
|
|
||||||
/// Optionally "qualified",
|
|
||||||
/// e.g. `<HirVec<T> as SomeTrait>::SomeType`.
|
|
||||||
ExprPath(Option<QSelf>, Path),
|
|
||||||
|
|
||||||
/// A referencing operation (`&a` or `&mut a`)
|
/// A referencing operation (`&a` or `&mut a`)
|
||||||
ExprAddrOf(Mutability, P<Expr>),
|
ExprAddrOf(Mutability, P<Expr>),
|
||||||
/// A `break`, with an optional label to break
|
/// A `break`, with an optional label to break
|
||||||
ExprBreak(Option<Spanned<Name>>, Option<P<Expr>>),
|
ExprBreak(Option<Label>, Option<P<Expr>>),
|
||||||
/// A `continue`, with an optional label
|
/// A `continue`, with an optional label
|
||||||
ExprAgain(Option<Spanned<Name>>),
|
ExprAgain(Option<Label>),
|
||||||
/// A `return`, with an optional value to be returned
|
/// A `return`, with an optional value to be returned
|
||||||
ExprRet(Option<P<Expr>>),
|
ExprRet(Option<P<Expr>>),
|
||||||
|
|
||||||
|
@ -957,7 +973,7 @@ pub enum Expr_ {
|
||||||
///
|
///
|
||||||
/// For example, `Foo {x: 1, y: 2}`, or
|
/// For example, `Foo {x: 1, y: 2}`, or
|
||||||
/// `Foo {x: 1, .. base}`, where `base` is the `Option<Expr>`.
|
/// `Foo {x: 1, .. base}`, where `base` is the `Option<Expr>`.
|
||||||
ExprStruct(P<Path>, HirVec<Field>, Option<P<Expr>>),
|
ExprStruct(QPath, HirVec<Field>, Option<P<Expr>>),
|
||||||
|
|
||||||
/// An array literal constructed from one repeated element.
|
/// An array literal constructed from one repeated element.
|
||||||
///
|
///
|
||||||
|
@ -966,22 +982,30 @@ pub enum Expr_ {
|
||||||
ExprRepeat(P<Expr>, P<Expr>),
|
ExprRepeat(P<Expr>, P<Expr>),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The explicit Self type in a "qualified path". The actual
|
/// Optionally `Self`-qualified value/type path or associated extension.
|
||||||
/// path, including the trait and the associated item, is stored
|
|
||||||
/// separately. `position` represents the index of the associated
|
|
||||||
/// item qualified with this Self type.
|
|
||||||
///
|
|
||||||
/// <HirVec<T> as a::b::Trait>::AssociatedItem
|
|
||||||
/// ^~~~~ ~~~~~~~~~~~~~~^
|
|
||||||
/// ty position = 3
|
|
||||||
///
|
|
||||||
/// <HirVec<T>>::AssociatedItem
|
|
||||||
/// ^~~~~ ^
|
|
||||||
/// ty position = 0
|
|
||||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
||||||
pub struct QSelf {
|
pub enum QPath {
|
||||||
pub ty: P<Ty>,
|
/// Path to a definition, optionally "fully-qualified" with a `Self`
|
||||||
pub position: usize,
|
/// type, if the path points to an associated item in a trait.
|
||||||
|
///
|
||||||
|
/// E.g. an unqualified path like `Clone::clone` has `None` for `Self`,
|
||||||
|
/// while `<Vec<T> as Clone>::clone` has `Some(Vec<T>)` for `Self`,
|
||||||
|
/// even though they both have the same two-segment `Clone::clone` `Path`.
|
||||||
|
Resolved(Option<P<Ty>>, P<Path>),
|
||||||
|
|
||||||
|
/// Type-related paths, e.g. `<T>::default` or `<T>::Output`.
|
||||||
|
/// Will be resolved by type-checking to an associated item.
|
||||||
|
///
|
||||||
|
/// UFCS source paths can desugar into this, with `Vec::new` turning into
|
||||||
|
/// `<Vec>::new`, and `T::X::Y::method` into `<<<T>::X>::Y>::method`,
|
||||||
|
/// the `X` and `Y` nodes being each a `TyPath(QPath::TypeRelative(..))`.
|
||||||
|
TypeRelative(P<Ty>, P<PathSegment>)
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for QPath {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(f, "{}", print::qpath_to_string(self))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Hints at the original code for a `match _ { .. }`
|
/// Hints at the original code for a `match _ { .. }`
|
||||||
|
@ -1014,6 +1038,13 @@ pub enum LoopSource {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||||
|
pub struct Label {
|
||||||
|
pub span: Span,
|
||||||
|
pub name: Name,
|
||||||
|
pub loop_id: NodeId
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||||
pub enum CaptureClause {
|
pub enum CaptureClause {
|
||||||
CaptureByValue,
|
CaptureByValue,
|
||||||
|
@ -1058,7 +1089,7 @@ pub enum TraitItem_ {
|
||||||
/// must contain a value)
|
/// must contain a value)
|
||||||
ConstTraitItem(P<Ty>, Option<P<Expr>>),
|
ConstTraitItem(P<Ty>, Option<P<Expr>>),
|
||||||
/// A method with an optional body
|
/// A method with an optional body
|
||||||
MethodTraitItem(MethodSig, Option<P<Expr>>),
|
MethodTraitItem(MethodSig, Option<ExprId>),
|
||||||
/// An associated type with (possibly empty) bounds and optional concrete
|
/// An associated type with (possibly empty) bounds and optional concrete
|
||||||
/// type
|
/// type
|
||||||
TypeTraitItem(TyParamBounds, Option<P<Ty>>),
|
TypeTraitItem(TyParamBounds, Option<P<Ty>>),
|
||||||
|
@ -1091,7 +1122,7 @@ pub enum ImplItemKind {
|
||||||
/// of the expression
|
/// of the expression
|
||||||
Const(P<Ty>, P<Expr>),
|
Const(P<Ty>, P<Expr>),
|
||||||
/// A method implementation with the given signature and body
|
/// A method implementation with the given signature and body
|
||||||
Method(MethodSig, P<Expr>),
|
Method(MethodSig, ExprId),
|
||||||
/// An associated type
|
/// An associated type
|
||||||
Type(P<Ty>),
|
Type(P<Ty>),
|
||||||
}
|
}
|
||||||
|
@ -1155,11 +1186,12 @@ pub enum Ty_ {
|
||||||
TyNever,
|
TyNever,
|
||||||
/// A tuple (`(A, B, C, D,...)`)
|
/// A tuple (`(A, B, C, D,...)`)
|
||||||
TyTup(HirVec<P<Ty>>),
|
TyTup(HirVec<P<Ty>>),
|
||||||
/// A path (`module::module::...::Type`), optionally
|
/// A path to a type definition (`module::module::...::Type`), or an
|
||||||
/// "qualified", e.g. `<HirVec<T> as SomeTrait>::SomeType`.
|
/// associated type, e.g. `<Vec<T> as Trait>::Type` or `<T>::Target`.
|
||||||
///
|
///
|
||||||
/// Type parameters are stored in the Path itself
|
/// Type parameters may be stored in each `PathSegment`.
|
||||||
TyPath(Option<QSelf>, Path),
|
TyPath(QPath),
|
||||||
|
|
||||||
/// Something like `A+B`. Note that `B` must always be a path.
|
/// Something like `A+B`. Note that `B` must always be a path.
|
||||||
TyObjectSum(P<Ty>, TyParamBounds),
|
TyObjectSum(P<Ty>, TyParamBounds),
|
||||||
/// A type like `for<'a> Foo<&'a Bar>`
|
/// A type like `for<'a> Foo<&'a Bar>`
|
||||||
|
@ -1216,7 +1248,7 @@ pub type ExplicitSelf = Spanned<SelfKind>;
|
||||||
|
|
||||||
impl Arg {
|
impl Arg {
|
||||||
pub fn to_self(&self) -> Option<ExplicitSelf> {
|
pub fn to_self(&self) -> Option<ExplicitSelf> {
|
||||||
if let PatKind::Binding(BindByValue(mutbl), name, _) = self.pat.node {
|
if let PatKind::Binding(BindByValue(mutbl), _, name, _) = self.pat.node {
|
||||||
if name.node == keywords::SelfValue.name() {
|
if name.node == keywords::SelfValue.name() {
|
||||||
return match self.ty.node {
|
return match self.ty.node {
|
||||||
TyInfer => Some(respan(self.pat.span, SelfKind::Value(mutbl))),
|
TyInfer => Some(respan(self.pat.span, SelfKind::Value(mutbl))),
|
||||||
|
@ -1232,7 +1264,7 @@ impl Arg {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_self(&self) -> bool {
|
pub fn is_self(&self) -> bool {
|
||||||
if let PatKind::Binding(_, name, _) = self.pat.node {
|
if let PatKind::Binding(_, _, name, _) = self.pat.node {
|
||||||
name.node == keywords::SelfValue.name()
|
name.node == keywords::SelfValue.name()
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
|
@ -1375,32 +1407,20 @@ pub struct Variant_ {
|
||||||
|
|
||||||
pub type Variant = Spanned<Variant_>;
|
pub type Variant = Spanned<Variant_>;
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
||||||
pub struct PathListItem_ {
|
pub enum UseKind {
|
||||||
pub name: Name,
|
/// One import, e.g. `use foo::bar` or `use foo::bar as baz`.
|
||||||
/// renamed in list, eg `use foo::{bar as baz};`
|
/// Also produced for each element of a list `use`, e.g.
|
||||||
pub rename: Option<Name>,
|
// `use foo::{a, b}` lowers to `use foo::a; use foo::b;`.
|
||||||
pub id: NodeId,
|
Single,
|
||||||
}
|
|
||||||
|
|
||||||
pub type PathListItem = Spanned<PathListItem_>;
|
/// Glob import, e.g. `use foo::*`.
|
||||||
|
Glob,
|
||||||
|
|
||||||
pub type ViewPath = Spanned<ViewPath_>;
|
/// Degenerate list import, e.g. `use foo::{a, b}` produces
|
||||||
|
/// an additional `use foo::{}` for performing checks such as
|
||||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
/// unstable feature gating. May be removed in the future.
|
||||||
pub enum ViewPath_ {
|
ListStem,
|
||||||
/// `foo::bar::baz as quux`
|
|
||||||
///
|
|
||||||
/// or just
|
|
||||||
///
|
|
||||||
/// `foo::bar::baz` (with `as baz` implicitly on the right)
|
|
||||||
ViewPathSimple(Name, Path),
|
|
||||||
|
|
||||||
/// `foo::bar::*`
|
|
||||||
ViewPathGlob(Path),
|
|
||||||
|
|
||||||
/// `foo::bar::{a,b,c}`
|
|
||||||
ViewPathList(Path, HirVec<PathListItem>),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// TraitRef's appear in impls.
|
/// TraitRef's appear in impls.
|
||||||
|
@ -1534,15 +1554,20 @@ pub enum Item_ {
|
||||||
///
|
///
|
||||||
/// e.g. `extern crate foo` or `extern crate foo_bar as foo`
|
/// e.g. `extern crate foo` or `extern crate foo_bar as foo`
|
||||||
ItemExternCrate(Option<Name>),
|
ItemExternCrate(Option<Name>),
|
||||||
/// A `use` or `pub use` item
|
|
||||||
ItemUse(P<ViewPath>),
|
/// `use foo::bar::*;` or `use foo::bar::baz as quux;`
|
||||||
|
///
|
||||||
|
/// or just
|
||||||
|
///
|
||||||
|
/// `use foo::bar::baz;` (with `as baz` implicitly on the right)
|
||||||
|
ItemUse(P<Path>, UseKind),
|
||||||
|
|
||||||
/// A `static` item
|
/// A `static` item
|
||||||
ItemStatic(P<Ty>, Mutability, P<Expr>),
|
ItemStatic(P<Ty>, Mutability, P<Expr>),
|
||||||
/// A `const` item
|
/// A `const` item
|
||||||
ItemConst(P<Ty>, P<Expr>),
|
ItemConst(P<Ty>, P<Expr>),
|
||||||
/// A function declaration
|
/// A function declaration
|
||||||
ItemFn(P<FnDecl>, Unsafety, Constness, Abi, Generics, P<Expr>),
|
ItemFn(P<FnDecl>, Unsafety, Constness, Abi, Generics, ExprId),
|
||||||
/// A module
|
/// A module
|
||||||
ItemMod(Mod),
|
ItemMod(Mod),
|
||||||
/// An external module
|
/// An external module
|
||||||
|
|
|
@ -8,13 +8,12 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use hir::def::*;
|
use hir::def::Def;
|
||||||
use hir::def_id::DefId;
|
use hir::def_id::DefId;
|
||||||
use hir::{self, PatKind};
|
use hir::{self, PatKind};
|
||||||
use ty::TyCtxt;
|
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::codemap::Spanned;
|
use syntax::codemap::Spanned;
|
||||||
use syntax_pos::{Span, DUMMY_SP};
|
use syntax_pos::Span;
|
||||||
|
|
||||||
use std::iter::{Enumerate, ExactSizeIterator};
|
use std::iter::{Enumerate, ExactSizeIterator};
|
||||||
|
|
||||||
|
@ -51,14 +50,19 @@ impl<T: ExactSizeIterator> EnumerateAndAdjustIterator for T {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pat_is_refutable(dm: &DefMap, pat: &hir::Pat) -> bool {
|
impl hir::Pat {
|
||||||
match pat.node {
|
pub fn is_refutable(&self) -> bool {
|
||||||
PatKind::Lit(_) | PatKind::Range(..) | PatKind::Path(Some(..), _) => true,
|
match self.node {
|
||||||
PatKind::TupleStruct(..) |
|
PatKind::Lit(_) |
|
||||||
PatKind::Path(..) |
|
PatKind::Range(..) |
|
||||||
PatKind::Struct(..) => {
|
PatKind::Path(hir::QPath::Resolved(Some(..), _)) |
|
||||||
match dm.get(&pat.id).map(|d| d.full_def()) {
|
PatKind::Path(hir::QPath::TypeRelative(..)) => true,
|
||||||
Some(Def::Variant(..)) | Some(Def::VariantCtor(..)) => true,
|
|
||||||
|
PatKind::Path(hir::QPath::Resolved(_, ref path)) |
|
||||||
|
PatKind::TupleStruct(hir::QPath::Resolved(_, ref path), ..) |
|
||||||
|
PatKind::Struct(hir::QPath::Resolved(_, ref path), ..) => {
|
||||||
|
match path.def {
|
||||||
|
Def::Variant(..) | Def::VariantCtor(..) => true,
|
||||||
_ => false
|
_ => false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -67,11 +71,12 @@ pub fn pat_is_refutable(dm: &DefMap, pat: &hir::Pat) -> bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pat_is_const(dm: &DefMap, pat: &hir::Pat) -> bool {
|
pub fn is_const(&self) -> bool {
|
||||||
match pat.node {
|
match self.node {
|
||||||
PatKind::Path(..) => {
|
PatKind::Path(hir::QPath::TypeRelative(..)) => true,
|
||||||
match dm.get(&pat.id).map(|d| d.full_def()) {
|
PatKind::Path(hir::QPath::Resolved(_, ref path)) => {
|
||||||
Some(Def::Const(..)) | Some(Def::AssociatedConst(..)) => true,
|
match path.def {
|
||||||
|
Def::Const(..) | Def::AssociatedConst(..) => true,
|
||||||
_ => false
|
_ => false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -81,11 +86,11 @@ pub fn pat_is_const(dm: &DefMap, pat: &hir::Pat) -> bool {
|
||||||
|
|
||||||
/// Call `f` on every "binding" in a pattern, e.g., on `a` in
|
/// Call `f` on every "binding" in a pattern, e.g., on `a` in
|
||||||
/// `match foo() { Some(a) => (), None => () }`
|
/// `match foo() { Some(a) => (), None => () }`
|
||||||
pub fn pat_bindings<F>(pat: &hir::Pat, mut f: F)
|
pub fn each_binding<F>(&self, mut f: F)
|
||||||
where F: FnMut(hir::BindingMode, ast::NodeId, Span, &Spanned<ast::Name>),
|
where F: FnMut(hir::BindingMode, ast::NodeId, Span, &Spanned<ast::Name>),
|
||||||
{
|
{
|
||||||
pat.walk(|p| {
|
self.walk(|p| {
|
||||||
if let PatKind::Binding(binding_mode, ref pth, _) = p.node {
|
if let PatKind::Binding(binding_mode, _, ref pth, _) = p.node {
|
||||||
f(binding_mode, p.id, p.span, pth);
|
f(binding_mode, p.id, p.span, pth);
|
||||||
}
|
}
|
||||||
true
|
true
|
||||||
|
@ -94,9 +99,9 @@ pub fn pat_bindings<F>(pat: &hir::Pat, mut f: F)
|
||||||
|
|
||||||
/// Checks if the pattern contains any patterns that bind something to
|
/// Checks if the pattern contains any patterns that bind something to
|
||||||
/// an ident, e.g. `foo`, or `Foo(foo)` or `foo @ Bar(..)`.
|
/// an ident, e.g. `foo`, or `Foo(foo)` or `foo @ Bar(..)`.
|
||||||
pub fn pat_contains_bindings(pat: &hir::Pat) -> bool {
|
pub fn contains_bindings(&self) -> bool {
|
||||||
let mut contains_bindings = false;
|
let mut contains_bindings = false;
|
||||||
pat.walk(|p| {
|
self.walk(|p| {
|
||||||
if let PatKind::Binding(..) = p.node {
|
if let PatKind::Binding(..) = p.node {
|
||||||
contains_bindings = true;
|
contains_bindings = true;
|
||||||
false // there's at least one binding, can short circuit now.
|
false // there's at least one binding, can short circuit now.
|
||||||
|
@ -107,38 +112,11 @@ pub fn pat_contains_bindings(pat: &hir::Pat) -> bool {
|
||||||
contains_bindings
|
contains_bindings
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks if the pattern contains any `ref` or `ref mut` bindings,
|
|
||||||
/// and if yes whether its containing mutable ones or just immutables ones.
|
|
||||||
pub fn pat_contains_ref_binding(pat: &hir::Pat) -> Option<hir::Mutability> {
|
|
||||||
let mut result = None;
|
|
||||||
pat_bindings(pat, |mode, _, _, _| {
|
|
||||||
if let hir::BindingMode::BindByRef(m) = mode {
|
|
||||||
// Pick Mutable as maximum
|
|
||||||
match result {
|
|
||||||
None | Some(hir::MutImmutable) => result = Some(m),
|
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Checks if the patterns for this arm contain any `ref` or `ref mut`
|
|
||||||
/// bindings, and if yes whether its containing mutable ones or just immutables ones.
|
|
||||||
pub fn arm_contains_ref_binding(arm: &hir::Arm) -> Option<hir::Mutability> {
|
|
||||||
arm.pats.iter()
|
|
||||||
.filter_map(|pat| pat_contains_ref_binding(pat))
|
|
||||||
.max_by_key(|m| match *m {
|
|
||||||
hir::MutMutable => 1,
|
|
||||||
hir::MutImmutable => 0,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Checks if the pattern contains any patterns that bind something to
|
/// Checks if the pattern contains any patterns that bind something to
|
||||||
/// an ident or wildcard, e.g. `foo`, or `Foo(_)`, `foo @ Bar(..)`,
|
/// an ident or wildcard, e.g. `foo`, or `Foo(_)`, `foo @ Bar(..)`,
|
||||||
pub fn pat_contains_bindings_or_wild(pat: &hir::Pat) -> bool {
|
pub fn contains_bindings_or_wild(&self) -> bool {
|
||||||
let mut contains_bindings = false;
|
let mut contains_bindings = false;
|
||||||
pat.walk(|p| {
|
self.walk(|p| {
|
||||||
match p.node {
|
match p.node {
|
||||||
PatKind::Binding(..) | PatKind::Wild => {
|
PatKind::Binding(..) | PatKind::Wild => {
|
||||||
contains_bindings = true;
|
contains_bindings = true;
|
||||||
|
@ -150,9 +128,9 @@ pub fn pat_contains_bindings_or_wild(pat: &hir::Pat) -> bool {
|
||||||
contains_bindings
|
contains_bindings
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn simple_name<'a>(pat: &'a hir::Pat) -> Option<ast::Name> {
|
pub fn simple_name(&self) -> Option<ast::Name> {
|
||||||
match pat.node {
|
match self.node {
|
||||||
PatKind::Binding(hir::BindByValue(..), ref path1, None) => {
|
PatKind::Binding(hir::BindByValue(..), _, ref path1, None) => {
|
||||||
Some(path1.node)
|
Some(path1.node)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
|
@ -161,21 +139,17 @@ pub fn simple_name<'a>(pat: &'a hir::Pat) -> Option<ast::Name> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn def_to_path<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefId) -> hir::Path {
|
|
||||||
hir::Path::from_name(DUMMY_SP, tcx.item_name(id))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return variants that are necessary to exist for the pattern to match.
|
/// Return variants that are necessary to exist for the pattern to match.
|
||||||
pub fn necessary_variants(dm: &DefMap, pat: &hir::Pat) -> Vec<DefId> {
|
pub fn necessary_variants(&self) -> Vec<DefId> {
|
||||||
let mut variants = vec![];
|
let mut variants = vec![];
|
||||||
pat.walk(|p| {
|
self.walk(|p| {
|
||||||
match p.node {
|
match p.node {
|
||||||
PatKind::TupleStruct(..) |
|
PatKind::Path(hir::QPath::Resolved(_, ref path)) |
|
||||||
PatKind::Path(..) |
|
PatKind::TupleStruct(hir::QPath::Resolved(_, ref path), ..) |
|
||||||
PatKind::Struct(..) => {
|
PatKind::Struct(hir::QPath::Resolved(_, ref path), ..) => {
|
||||||
match dm.get(&p.id).map(|d| d.full_def()) {
|
match path.def {
|
||||||
Some(Def::Variant(id)) |
|
Def::Variant(id) |
|
||||||
Some(Def::VariantCtor(id, ..)) => variants.push(id),
|
Def::VariantCtor(id, ..) => variants.push(id),
|
||||||
_ => ()
|
_ => ()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -187,3 +161,33 @@ pub fn necessary_variants(dm: &DefMap, pat: &hir::Pat) -> Vec<DefId> {
|
||||||
variants.dedup();
|
variants.dedup();
|
||||||
variants
|
variants
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Checks if the pattern contains any `ref` or `ref mut` bindings,
|
||||||
|
/// and if yes whether its containing mutable ones or just immutables ones.
|
||||||
|
pub fn contains_ref_binding(&self) -> Option<hir::Mutability> {
|
||||||
|
let mut result = None;
|
||||||
|
self.each_binding(|mode, _, _, _| {
|
||||||
|
if let hir::BindingMode::BindByRef(m) = mode {
|
||||||
|
// Pick Mutable as maximum
|
||||||
|
match result {
|
||||||
|
None | Some(hir::MutImmutable) => result = Some(m),
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl hir::Arm {
|
||||||
|
/// Checks if the patterns for this arm contain any `ref` or `ref mut`
|
||||||
|
/// bindings, and if yes whether its containing mutable ones or just immutables ones.
|
||||||
|
pub fn contains_ref_binding(&self) -> Option<hir::Mutability> {
|
||||||
|
self.pats.iter()
|
||||||
|
.filter_map(|pat| pat.contains_ref_binding())
|
||||||
|
.max_by_key(|m| match *m {
|
||||||
|
hir::MutMutable => 1,
|
||||||
|
hir::MutImmutable => 0,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -272,7 +272,11 @@ pub fn fn_block_to_string(p: &hir::FnDecl) -> String {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn path_to_string(p: &hir::Path) -> String {
|
pub fn path_to_string(p: &hir::Path) -> String {
|
||||||
to_string(|s| s.print_path(p, false, 0))
|
to_string(|s| s.print_path(p, false))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn qpath_to_string(p: &hir::QPath) -> String {
|
||||||
|
to_string(|s| s.print_qpath(p, false))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn name_to_string(name: ast::Name) -> String {
|
pub fn name_to_string(name: ast::Name) -> String {
|
||||||
|
@ -528,11 +532,8 @@ impl<'a> State<'a> {
|
||||||
};
|
};
|
||||||
self.print_ty_fn(f.abi, f.unsafety, &f.decl, None, &generics)?;
|
self.print_ty_fn(f.abi, f.unsafety, &f.decl, None, &generics)?;
|
||||||
}
|
}
|
||||||
hir::TyPath(None, ref path) => {
|
hir::TyPath(ref qpath) => {
|
||||||
self.print_path(path, false, 0)?;
|
self.print_qpath(qpath, false)?
|
||||||
}
|
|
||||||
hir::TyPath(Some(ref qself), ref path) => {
|
|
||||||
self.print_qpath(path, qself, false)?
|
|
||||||
}
|
}
|
||||||
hir::TyObjectSum(ref ty, ref bounds) => {
|
hir::TyObjectSum(ref ty, ref bounds) => {
|
||||||
self.print_type(&ty)?;
|
self.print_type(&ty)?;
|
||||||
|
@ -643,6 +644,15 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn print_expr_id(&mut self, expr_id: &hir::ExprId) -> io::Result<()> {
|
||||||
|
if let Some(krate) = self.krate {
|
||||||
|
let expr = &krate.exprs[expr_id];
|
||||||
|
self.print_expr(expr)
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Pretty-print an item
|
/// Pretty-print an item
|
||||||
pub fn print_item(&mut self, item: &hir::Item) -> io::Result<()> {
|
pub fn print_item(&mut self, item: &hir::Item) -> io::Result<()> {
|
||||||
self.hardbreak_if_not_bol()?;
|
self.hardbreak_if_not_bol()?;
|
||||||
|
@ -668,10 +678,22 @@ impl<'a> State<'a> {
|
||||||
self.end()?; // end inner head-block
|
self.end()?; // end inner head-block
|
||||||
self.end()?; // end outer head-block
|
self.end()?; // end outer head-block
|
||||||
}
|
}
|
||||||
hir::ItemUse(ref vp) => {
|
hir::ItemUse(ref path, kind) => {
|
||||||
self.head(&visibility_qualified(&item.vis, "use"))?;
|
self.head(&visibility_qualified(&item.vis, "use"))?;
|
||||||
self.print_view_path(&vp)?;
|
self.print_path(path, false)?;
|
||||||
|
|
||||||
|
match kind {
|
||||||
|
hir::UseKind::Single => {
|
||||||
|
if path.segments.last().unwrap().name != item.name {
|
||||||
|
space(&mut self.s)?;
|
||||||
|
self.word_space("as")?;
|
||||||
|
self.print_name(item.name)?;
|
||||||
|
}
|
||||||
word(&mut self.s, ";")?;
|
word(&mut self.s, ";")?;
|
||||||
|
}
|
||||||
|
hir::UseKind::Glob => word(&mut self.s, "::*;")?,
|
||||||
|
hir::UseKind::ListStem => word(&mut self.s, "::{};")?
|
||||||
|
}
|
||||||
self.end()?; // end inner head-block
|
self.end()?; // end inner head-block
|
||||||
self.end()?; // end outer head-block
|
self.end()?; // end outer head-block
|
||||||
}
|
}
|
||||||
|
@ -716,7 +738,7 @@ impl<'a> State<'a> {
|
||||||
word(&mut self.s, " ")?;
|
word(&mut self.s, " ")?;
|
||||||
self.end()?; // need to close a box
|
self.end()?; // need to close a box
|
||||||
self.end()?; // need to close a box
|
self.end()?; // need to close a box
|
||||||
self.print_expr(&body)?;
|
self.print_expr_id(body)?;
|
||||||
}
|
}
|
||||||
hir::ItemMod(ref _mod) => {
|
hir::ItemMod(ref _mod) => {
|
||||||
self.head(&visibility_qualified(&item.vis, "mod"))?;
|
self.head(&visibility_qualified(&item.vis, "mod"))?;
|
||||||
|
@ -844,8 +866,8 @@ impl<'a> State<'a> {
|
||||||
self.ann.post(self, NodeItem(item))
|
self.ann.post(self, NodeItem(item))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_trait_ref(&mut self, t: &hir::TraitRef) -> io::Result<()> {
|
pub fn print_trait_ref(&mut self, t: &hir::TraitRef) -> io::Result<()> {
|
||||||
self.print_path(&t.path, false, 0)
|
self.print_path(&t.path, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_formal_lifetime_list(&mut self, lifetimes: &[hir::LifetimeDef]) -> io::Result<()> {
|
fn print_formal_lifetime_list(&mut self, lifetimes: &[hir::LifetimeDef]) -> io::Result<()> {
|
||||||
|
@ -1007,7 +1029,7 @@ impl<'a> State<'a> {
|
||||||
self.nbsp()?;
|
self.nbsp()?;
|
||||||
self.end()?; // need to close a box
|
self.end()?; // need to close a box
|
||||||
self.end()?; // need to close a box
|
self.end()?; // need to close a box
|
||||||
self.print_expr(body)?;
|
self.print_expr_id(body)?;
|
||||||
} else {
|
} else {
|
||||||
word(&mut self.s, ";")?;
|
word(&mut self.s, ";")?;
|
||||||
}
|
}
|
||||||
|
@ -1052,7 +1074,7 @@ impl<'a> State<'a> {
|
||||||
self.nbsp()?;
|
self.nbsp()?;
|
||||||
self.end()?; // need to close a box
|
self.end()?; // need to close a box
|
||||||
self.end()?; // need to close a box
|
self.end()?; // need to close a box
|
||||||
self.print_expr(body)?;
|
self.print_expr_id(body)?;
|
||||||
}
|
}
|
||||||
hir::ImplItemKind::Type(ref ty) => {
|
hir::ImplItemKind::Type(ref ty) => {
|
||||||
self.print_associated_type(ii.name, None, Some(ty))?;
|
self.print_associated_type(ii.name, None, Some(ty))?;
|
||||||
|
@ -1115,8 +1137,6 @@ impl<'a> State<'a> {
|
||||||
hir::UnsafeBlock(..) => self.word_space("unsafe")?,
|
hir::UnsafeBlock(..) => self.word_space("unsafe")?,
|
||||||
hir::PushUnsafeBlock(..) => self.word_space("push_unsafe")?,
|
hir::PushUnsafeBlock(..) => self.word_space("push_unsafe")?,
|
||||||
hir::PopUnsafeBlock(..) => self.word_space("pop_unsafe")?,
|
hir::PopUnsafeBlock(..) => self.word_space("pop_unsafe")?,
|
||||||
hir::PushUnstableBlock => self.word_space("push_unstable")?,
|
|
||||||
hir::PopUnstableBlock => self.word_space("pop_unstable")?,
|
|
||||||
hir::DefaultBlock => (),
|
hir::DefaultBlock => (),
|
||||||
}
|
}
|
||||||
self.maybe_print_comment(blk.span.lo)?;
|
self.maybe_print_comment(blk.span.lo)?;
|
||||||
|
@ -1237,11 +1257,11 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_expr_struct(&mut self,
|
fn print_expr_struct(&mut self,
|
||||||
path: &hir::Path,
|
qpath: &hir::QPath,
|
||||||
fields: &[hir::Field],
|
fields: &[hir::Field],
|
||||||
wth: &Option<P<hir::Expr>>)
|
wth: &Option<P<hir::Expr>>)
|
||||||
-> io::Result<()> {
|
-> io::Result<()> {
|
||||||
self.print_path(path, true, 0)?;
|
self.print_qpath(qpath, true)?;
|
||||||
word(&mut self.s, "{")?;
|
word(&mut self.s, "{")?;
|
||||||
self.commasep_cmnt(Consistent,
|
self.commasep_cmnt(Consistent,
|
||||||
&fields[..],
|
&fields[..],
|
||||||
|
@ -1345,8 +1365,8 @@ impl<'a> State<'a> {
|
||||||
hir::ExprRepeat(ref element, ref count) => {
|
hir::ExprRepeat(ref element, ref count) => {
|
||||||
self.print_expr_repeat(&element, &count)?;
|
self.print_expr_repeat(&element, &count)?;
|
||||||
}
|
}
|
||||||
hir::ExprStruct(ref path, ref fields, ref wth) => {
|
hir::ExprStruct(ref qpath, ref fields, ref wth) => {
|
||||||
self.print_expr_struct(path, &fields[..], wth)?;
|
self.print_expr_struct(qpath, &fields[..], wth)?;
|
||||||
}
|
}
|
||||||
hir::ExprTup(ref exprs) => {
|
hir::ExprTup(ref exprs) => {
|
||||||
self.print_expr_tup(exprs)?;
|
self.print_expr_tup(exprs)?;
|
||||||
|
@ -1421,7 +1441,7 @@ impl<'a> State<'a> {
|
||||||
space(&mut self.s)?;
|
space(&mut self.s)?;
|
||||||
|
|
||||||
// this is a bare expression
|
// this is a bare expression
|
||||||
self.print_expr(body)?;
|
self.print_expr_id(body)?;
|
||||||
self.end()?; // need to close a box
|
self.end()?; // need to close a box
|
||||||
|
|
||||||
// a box will be closed by print_expr, but we didn't want an overall
|
// a box will be closed by print_expr, but we didn't want an overall
|
||||||
|
@ -1465,17 +1485,14 @@ impl<'a> State<'a> {
|
||||||
self.print_expr(&index)?;
|
self.print_expr(&index)?;
|
||||||
word(&mut self.s, "]")?;
|
word(&mut self.s, "]")?;
|
||||||
}
|
}
|
||||||
hir::ExprPath(None, ref path) => {
|
hir::ExprPath(ref qpath) => {
|
||||||
self.print_path(path, true, 0)?
|
self.print_qpath(qpath, true)?
|
||||||
}
|
}
|
||||||
hir::ExprPath(Some(ref qself), ref path) => {
|
hir::ExprBreak(opt_label, ref opt_expr) => {
|
||||||
self.print_qpath(path, qself, true)?
|
|
||||||
}
|
|
||||||
hir::ExprBreak(opt_name, ref opt_expr) => {
|
|
||||||
word(&mut self.s, "break")?;
|
word(&mut self.s, "break")?;
|
||||||
space(&mut self.s)?;
|
space(&mut self.s)?;
|
||||||
if let Some(name) = opt_name {
|
if let Some(label) = opt_label {
|
||||||
self.print_name(name.node)?;
|
self.print_name(label.name)?;
|
||||||
space(&mut self.s)?;
|
space(&mut self.s)?;
|
||||||
}
|
}
|
||||||
if let Some(ref expr) = *opt_expr {
|
if let Some(ref expr) = *opt_expr {
|
||||||
|
@ -1483,11 +1500,11 @@ impl<'a> State<'a> {
|
||||||
space(&mut self.s)?;
|
space(&mut self.s)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
hir::ExprAgain(opt_name) => {
|
hir::ExprAgain(opt_label) => {
|
||||||
word(&mut self.s, "continue")?;
|
word(&mut self.s, "continue")?;
|
||||||
space(&mut self.s)?;
|
space(&mut self.s)?;
|
||||||
if let Some(name) = opt_name {
|
if let Some(label) = opt_label {
|
||||||
self.print_name(name.node)?;
|
self.print_name(label.name)?;
|
||||||
space(&mut self.s)?
|
space(&mut self.s)?
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1622,13 +1639,12 @@ impl<'a> State<'a> {
|
||||||
|
|
||||||
fn print_path(&mut self,
|
fn print_path(&mut self,
|
||||||
path: &hir::Path,
|
path: &hir::Path,
|
||||||
colons_before_params: bool,
|
colons_before_params: bool)
|
||||||
depth: usize)
|
|
||||||
-> io::Result<()> {
|
-> io::Result<()> {
|
||||||
self.maybe_print_comment(path.span.lo)?;
|
self.maybe_print_comment(path.span.lo)?;
|
||||||
|
|
||||||
let mut first = !path.global;
|
let mut first = !path.global;
|
||||||
for segment in &path.segments[..path.segments.len() - depth] {
|
for segment in &path.segments {
|
||||||
if first {
|
if first {
|
||||||
first = false
|
first = false
|
||||||
} else {
|
} else {
|
||||||
|
@ -1644,32 +1660,62 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_qpath(&mut self,
|
fn print_qpath(&mut self,
|
||||||
path: &hir::Path,
|
qpath: &hir::QPath,
|
||||||
qself: &hir::QSelf,
|
|
||||||
colons_before_params: bool)
|
colons_before_params: bool)
|
||||||
-> io::Result<()> {
|
-> io::Result<()> {
|
||||||
|
match *qpath {
|
||||||
|
hir::QPath::Resolved(None, ref path) => {
|
||||||
|
self.print_path(path, colons_before_params)
|
||||||
|
}
|
||||||
|
hir::QPath::Resolved(Some(ref qself), ref path) => {
|
||||||
word(&mut self.s, "<")?;
|
word(&mut self.s, "<")?;
|
||||||
self.print_type(&qself.ty)?;
|
self.print_type(qself)?;
|
||||||
if qself.position > 0 {
|
|
||||||
space(&mut self.s)?;
|
space(&mut self.s)?;
|
||||||
self.word_space("as")?;
|
self.word_space("as")?;
|
||||||
let depth = path.segments.len() - qself.position;
|
|
||||||
self.print_path(&path, false, depth)?;
|
let mut first = !path.global;
|
||||||
|
for segment in &path.segments[..path.segments.len() - 1] {
|
||||||
|
if first {
|
||||||
|
first = false
|
||||||
|
} else {
|
||||||
|
word(&mut self.s, "::")?
|
||||||
}
|
}
|
||||||
|
self.print_name(segment.name)?;
|
||||||
|
self.print_path_parameters(&segment.parameters, colons_before_params)?;
|
||||||
|
}
|
||||||
|
|
||||||
word(&mut self.s, ">")?;
|
word(&mut self.s, ">")?;
|
||||||
word(&mut self.s, "::")?;
|
word(&mut self.s, "::")?;
|
||||||
let item_segment = path.segments.last().unwrap();
|
let item_segment = path.segments.last().unwrap();
|
||||||
self.print_name(item_segment.name)?;
|
self.print_name(item_segment.name)?;
|
||||||
self.print_path_parameters(&item_segment.parameters, colons_before_params)
|
self.print_path_parameters(&item_segment.parameters, colons_before_params)
|
||||||
}
|
}
|
||||||
|
hir::QPath::TypeRelative(ref qself, ref item_segment) => {
|
||||||
|
word(&mut self.s, "<")?;
|
||||||
|
self.print_type(qself)?;
|
||||||
|
word(&mut self.s, ">")?;
|
||||||
|
word(&mut self.s, "::")?;
|
||||||
|
self.print_name(item_segment.name)?;
|
||||||
|
self.print_path_parameters(&item_segment.parameters, colons_before_params)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn print_path_parameters(&mut self,
|
fn print_path_parameters(&mut self,
|
||||||
parameters: &hir::PathParameters,
|
parameters: &hir::PathParameters,
|
||||||
colons_before_params: bool)
|
colons_before_params: bool)
|
||||||
-> io::Result<()> {
|
-> io::Result<()> {
|
||||||
if parameters.is_empty() {
|
if parameters.is_empty() {
|
||||||
|
let infer_types = match *parameters {
|
||||||
|
hir::AngleBracketedParameters(ref data) => data.infer_types,
|
||||||
|
hir::ParenthesizedParameters(_) => false
|
||||||
|
};
|
||||||
|
|
||||||
|
// FIXME(eddyb) See the comment below about infer_types.
|
||||||
|
if !(infer_types && false) {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if colons_before_params {
|
if colons_before_params {
|
||||||
word(&mut self.s, "::")?
|
word(&mut self.s, "::")?
|
||||||
|
@ -1696,6 +1742,16 @@ impl<'a> State<'a> {
|
||||||
comma = true;
|
comma = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME(eddyb) This would leak into error messages, e.g.:
|
||||||
|
// "non-exhaustive patterns: `Some::<..>(_)` not covered".
|
||||||
|
if data.infer_types && false {
|
||||||
|
if comma {
|
||||||
|
self.word_space(",")?
|
||||||
|
}
|
||||||
|
word(&mut self.s, "..")?;
|
||||||
|
comma = true;
|
||||||
|
}
|
||||||
|
|
||||||
for binding in data.bindings.iter() {
|
for binding in data.bindings.iter() {
|
||||||
if comma {
|
if comma {
|
||||||
self.word_space(",")?
|
self.word_space(",")?
|
||||||
|
@ -1733,7 +1789,7 @@ impl<'a> State<'a> {
|
||||||
// is that it doesn't matter
|
// is that it doesn't matter
|
||||||
match pat.node {
|
match pat.node {
|
||||||
PatKind::Wild => word(&mut self.s, "_")?,
|
PatKind::Wild => word(&mut self.s, "_")?,
|
||||||
PatKind::Binding(binding_mode, ref path1, ref sub) => {
|
PatKind::Binding(binding_mode, _, ref path1, ref sub) => {
|
||||||
match binding_mode {
|
match binding_mode {
|
||||||
hir::BindByRef(mutbl) => {
|
hir::BindByRef(mutbl) => {
|
||||||
self.word_nbsp("ref")?;
|
self.word_nbsp("ref")?;
|
||||||
|
@ -1750,8 +1806,8 @@ impl<'a> State<'a> {
|
||||||
self.print_pat(&p)?;
|
self.print_pat(&p)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
PatKind::TupleStruct(ref path, ref elts, ddpos) => {
|
PatKind::TupleStruct(ref qpath, ref elts, ddpos) => {
|
||||||
self.print_path(path, true, 0)?;
|
self.print_qpath(qpath, true)?;
|
||||||
self.popen()?;
|
self.popen()?;
|
||||||
if let Some(ddpos) = ddpos {
|
if let Some(ddpos) = ddpos {
|
||||||
self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p))?;
|
self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p))?;
|
||||||
|
@ -1768,14 +1824,11 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
self.pclose()?;
|
self.pclose()?;
|
||||||
}
|
}
|
||||||
PatKind::Path(None, ref path) => {
|
PatKind::Path(ref qpath) => {
|
||||||
self.print_path(path, true, 0)?;
|
self.print_qpath(qpath, true)?;
|
||||||
}
|
}
|
||||||
PatKind::Path(Some(ref qself), ref path) => {
|
PatKind::Struct(ref qpath, ref fields, etc) => {
|
||||||
self.print_qpath(path, qself, false)?;
|
self.print_qpath(qpath, true)?;
|
||||||
}
|
|
||||||
PatKind::Struct(ref path, ref fields, etc) => {
|
|
||||||
self.print_path(path, true, 0)?;
|
|
||||||
self.nbsp()?;
|
self.nbsp()?;
|
||||||
self.word_space("{")?;
|
self.word_space("{")?;
|
||||||
self.commasep_cmnt(Consistent,
|
self.commasep_cmnt(Consistent,
|
||||||
|
@ -2108,7 +2161,7 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
&hir::WherePredicate::EqPredicate(hir::WhereEqPredicate{ref path, ref ty, ..}) => {
|
&hir::WherePredicate::EqPredicate(hir::WhereEqPredicate{ref path, ref ty, ..}) => {
|
||||||
self.print_path(path, false, 0)?;
|
self.print_path(path, false)?;
|
||||||
space(&mut self.s)?;
|
space(&mut self.s)?;
|
||||||
self.word_space("=")?;
|
self.word_space("=")?;
|
||||||
self.print_type(&ty)?;
|
self.print_type(&ty)?;
|
||||||
|
@ -2119,38 +2172,6 @@ impl<'a> State<'a> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_view_path(&mut self, vp: &hir::ViewPath) -> io::Result<()> {
|
|
||||||
match vp.node {
|
|
||||||
hir::ViewPathSimple(name, ref path) => {
|
|
||||||
self.print_path(path, false, 0)?;
|
|
||||||
|
|
||||||
if path.segments.last().unwrap().name != name {
|
|
||||||
space(&mut self.s)?;
|
|
||||||
self.word_space("as")?;
|
|
||||||
self.print_name(name)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
hir::ViewPathGlob(ref path) => {
|
|
||||||
self.print_path(path, false, 0)?;
|
|
||||||
word(&mut self.s, "::*")
|
|
||||||
}
|
|
||||||
|
|
||||||
hir::ViewPathList(ref path, ref segments) => {
|
|
||||||
if path.segments.is_empty() {
|
|
||||||
word(&mut self.s, "{")?;
|
|
||||||
} else {
|
|
||||||
self.print_path(path, false, 0)?;
|
|
||||||
word(&mut self.s, "::{")?;
|
|
||||||
}
|
|
||||||
self.commasep(Inconsistent, &segments[..], |s, w| s.print_name(w.node.name))?;
|
|
||||||
word(&mut self.s, "}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn print_mutability(&mut self, mutbl: hir::Mutability) -> io::Result<()> {
|
pub fn print_mutability(&mut self, mutbl: hir::Mutability) -> io::Result<()> {
|
||||||
match mutbl {
|
match mutbl {
|
||||||
hir::MutMutable => self.word_nbsp("mut"),
|
hir::MutMutable => self.word_nbsp("mut"),
|
||||||
|
@ -2171,7 +2192,7 @@ impl<'a> State<'a> {
|
||||||
if let Some(eself) = input.to_self() {
|
if let Some(eself) = input.to_self() {
|
||||||
self.print_explicit_self(&eself)?;
|
self.print_explicit_self(&eself)?;
|
||||||
} else {
|
} else {
|
||||||
let invalid = if let PatKind::Binding(_, name, _) = input.pat.node {
|
let invalid = if let PatKind::Binding(_, _, name, _) = input.pat.node {
|
||||||
name.node == keywords::Invalid.name()
|
name.node == keywords::Invalid.name()
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
|
|
|
@ -1440,8 +1440,8 @@ impl<'a, 'gcx, 'tcx> Rebuilder<'a, 'gcx, 'tcx> {
|
||||||
}
|
}
|
||||||
ty_queue.push(&mut_ty.ty);
|
ty_queue.push(&mut_ty.ty);
|
||||||
}
|
}
|
||||||
hir::TyPath(ref maybe_qself, ref path) => {
|
hir::TyPath(hir::QPath::Resolved(ref maybe_qself, ref path)) => {
|
||||||
match self.tcx.expect_def(cur_ty.id) {
|
match path.def {
|
||||||
Def::Enum(did) | Def::TyAlias(did) |
|
Def::Enum(did) | Def::TyAlias(did) |
|
||||||
Def::Struct(did) | Def::Union(did) => {
|
Def::Struct(did) | Def::Union(did) => {
|
||||||
let generics = self.tcx.item_generics(did);
|
let generics = self.tcx.item_generics(did);
|
||||||
|
@ -1476,15 +1476,12 @@ impl<'a, 'gcx, 'tcx> Rebuilder<'a, 'gcx, 'tcx> {
|
||||||
};
|
};
|
||||||
let new_path = self.rebuild_path(rebuild_info, lifetime);
|
let new_path = self.rebuild_path(rebuild_info, lifetime);
|
||||||
let qself = maybe_qself.as_ref().map(|qself| {
|
let qself = maybe_qself.as_ref().map(|qself| {
|
||||||
hir::QSelf {
|
self.rebuild_arg_ty_or_output(qself, lifetime,
|
||||||
ty: self.rebuild_arg_ty_or_output(&qself.ty, lifetime,
|
anon_nums, region_names)
|
||||||
anon_nums, region_names),
|
|
||||||
position: qself.position
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
let to = hir::Ty {
|
let to = hir::Ty {
|
||||||
id: cur_ty.id,
|
id: cur_ty.id,
|
||||||
node: hir::TyPath(qself, new_path),
|
node: hir::TyPath(hir::QPath::Resolved(qself, P(new_path))),
|
||||||
span: cur_ty.span
|
span: cur_ty.span
|
||||||
};
|
};
|
||||||
new_ty = self.rebuild_ty(new_ty, P(to));
|
new_ty = self.rebuild_ty(new_ty, P(to));
|
||||||
|
@ -1609,6 +1606,7 @@ impl<'a, 'gcx, 'tcx> Rebuilder<'a, 'gcx, 'tcx> {
|
||||||
hir::AngleBracketedParameters(hir::AngleBracketedParameterData {
|
hir::AngleBracketedParameters(hir::AngleBracketedParameterData {
|
||||||
lifetimes: new_lts.into(),
|
lifetimes: new_lts.into(),
|
||||||
types: new_types,
|
types: new_types,
|
||||||
|
infer_types: data.infer_types,
|
||||||
bindings: new_bindings,
|
bindings: new_bindings,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1623,6 +1621,7 @@ impl<'a, 'gcx, 'tcx> Rebuilder<'a, 'gcx, 'tcx> {
|
||||||
hir::Path {
|
hir::Path {
|
||||||
span: path.span,
|
span: path.span,
|
||||||
global: path.global,
|
global: path.global,
|
||||||
|
def: path.def,
|
||||||
segments: new_segs.into()
|
segments: new_segs.into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue