Merge branch 'master' into redox

This commit is contained in:
Jeremy Soller 2016-12-12 14:55:09 -07:00
commit 7e7775ce7b
718 changed files with 20810 additions and 11449 deletions

View File

@ -1,4 +1,4 @@
language: rust
language: minimal
sudo: required
dist: trusty
services:
@ -20,7 +20,7 @@ matrix:
- env: IMAGE=x86_64-gnu-cargotest
- env: IMAGE=x86_64-gnu-debug
- env: IMAGE=x86_64-gnu-nopt
- env: IMAGE=x86_64-gnu-rustbuild
- env: IMAGE=x86_64-gnu-make
- env: IMAGE=x86_64-gnu-llvm-3.7 ALLOW_PR=1 RUST_BACKTRACE=1
- env: IMAGE=x86_64-musl
@ -39,7 +39,7 @@ matrix:
install: brew install ccache
- env: >
RUST_CHECK_TARGET=check
RUST_CONFIGURE_ARGS=--target=x86_64-apple-darwin --enable-rustbuild
RUST_CONFIGURE_ARGS=--target=x86_64-apple-darwin --disable-rustbuild
SRC=.
os: osx
install: brew install ccache
@ -51,17 +51,16 @@ matrix:
install: brew install ccache
script:
- if [ -z "$ALLOW_PR" ] && [ "$TRAVIS_BRANCH" != "auto" ]; then
echo skipping, not a full build;
elif [ -z "$ENABLE_AUTO" ] then
echo skipping, not quite ready yet
elif [ "$TRAVIS_OS_NAME" = "osx" ]; then
git submodule update --init;
src/ci/run.sh;
else
git submodule update --init;
src/ci/docker/run.sh $IMAGE;
fi
- >
if [ "$ALLOW_PR" = "" ] && [ "$TRAVIS_BRANCH" != "auto" ]; then
echo skipping, not a full build;
elif [ "$TRAVIS_OS_NAME" = "osx" ]; then
git submodule update --init;
src/ci/run.sh;
else
git submodule update --init;
src/ci/docker/run.sh $IMAGE;
fi
# Save tagged docker images we created and load them if they're available
before_cache:

View File

@ -86,13 +86,17 @@ benchmarks, generate documentation, install a fresh build of Rust, and more.
It's your best friend when working on Rust, allowing you to compile & test
your contributions before submission.
All the configuration for the build system lives in [the `mk` directory][mkdir]
in the project root. It can be hard to follow in places, as it uses some
advanced Make features which make for some challenging reading. If you have
questions on the build system internals, try asking in
[`#rust-internals`][pound-rust-internals].
The build system lives in [the `src/bootstrap` directory][bootstrap] in the
project root. Our build system is itself written in Rust and is based on Cargo
to actually build all the compiler's crates. If you have questions on the build
system internals, try asking in [`#rust-internals`][pound-rust-internals].
[mkdir]: https://github.com/rust-lang/rust/tree/master/mk/
[bootstrap]: https://github.com/rust-lang/rust/tree/master/src/bootstrap/
> **Note**: the build system was recently rewritten from a jungle of makefiles
> to the current incarnation you'll see in `src/bootstrap`. If you experience
> bugs you can temporarily revert back to the makefiles with
> `--disable-rustbuild` passed to `./configure`.
### Configuration
@ -119,42 +123,111 @@ configuration used later in the build process. Some options to note:
To see a full list of options, run `./configure --help`.
### Useful Targets
### Building
Some common make targets are:
Although the `./configure` script will generate a `Makefile`, this is actually
just a thin veneer over the actual build system driver, `x.py`. This file, at
the root of the repository, is used to build, test, and document various parts
of the compiler. You can execute it as:
- `make tips` - show useful targets, variables and other tips for working with
the build system.
- `make rustc-stage1` - build up to (and including) the first stage. For most
cases we don't need to build the stage2 compiler, so we can save time by not
building it. The stage1 compiler is a fully functioning compiler and
(probably) will be enough to determine if your change works as expected.
- `make $host/stage1/bin/rustc` - Where $host is a target triple like x86_64-unknown-linux-gnu.
This will build just rustc, without libstd. This is the fastest way to recompile after
you changed only rustc source code. Note however that the resulting rustc binary
won't have a stdlib to link against by default. You can build libstd once with
`make rustc-stage1`, rustc will pick it up afterwards. libstd is only guaranteed to
work if recompiled, so if there are any issues recompile it.
- `make check` - build the full compiler & run all tests (takes a while). This
```sh
python x.py build
```
On some systems you can also use the shorter version:
```sh
./x.py build
```
To learn more about the driver and top-level targets, you can execute:
```sh
python x.py --help
```
The general format for the driver script is:
```sh
python x.py <command> [<directory>]
```
Some example commands are `build`, `test`, and `doc`. These will build, test,
and document the specified directory. The second argument, `<directory>`, is
optional and defaults to working over the entire compiler. If specified,
however, only that specific directory will be built. For example:
```sh
# build the entire compiler
python x.py build
# build all documentation
python x.py doc
# run all test suites
python x.py test
# build only the standard library
python x.py build src/libstd
# test only one particular test suite
python x.py test src/test/rustdoc
# build only the stage0 libcore library
python x.py build src/libcore --stage 0
```
You can explore the build system throught the various `--help` pages for each
subcommand. For example to learn more about a command you can run:
```
python x.py build --help
```
To learn about all possible rules you can execute, run:
```
python x.py build --help --verbose
```
### Useful commands
Some common invocations of `x.py` are:
- `x.py build --help` - show the help message and explain the subcommand
- `x.py build src/libtest --stage 1` - build up to (and including) the first
stage. For most cases we don't need to build the stage2 compiler, so we can
save time by not building it. The stage1 compiler is a fully functioning
compiler and (probably) will be enough to determine if your change works as
expected.
- `x.py build src/rustc --stage 1` - This will build just rustc, without libstd.
This is the fastest way to recompile after you changed only rustc source code.
Note however that the resulting rustc binary won't have a stdlib to link
against by default. You can build libstd once with `x.py build src/libstd`,
but it is is only guaranteed to work if recompiled, so if there are any issues
recompile it.
- `x.py test` - build the full compiler & run all tests (takes a while). This
is what gets run by the continuous integration system against your pull
request. You should run this before submitting to make sure your tests pass
& everything builds in the correct manner.
- `make check-stage1-std NO_REBUILD=1` - test the standard library without
rebuilding the entire compiler
- `make check TESTNAME=<substring-of-test-name>` - Run a matching set of tests.
- `x.py test src/libstd --stage 1` - test the standard library without
recompiling stage 2.
- `x.py test src/test/run-pass --filter TESTNAME` - Run a matching set of tests.
- `TESTNAME` should be a substring of the tests to match against e.g. it could
be the fully qualified test name, or just a part of it.
`TESTNAME=collections::hash::map::test_map::test_capacity_not_less_than_len`
or `TESTNAME=test_capacity_not_less_than_len`.
- `make check-stage1-rpass TESTNAME=<substring-of-test-name>` - Run a single
rpass test with the stage1 compiler (this will be quicker than running the
command above as we only build the stage1 compiler, not the entire thing).
You can also leave off the `-rpass` to run all stage1 test types.
- `make check-stage1-coretest` - Run stage1 tests in `libcore`.
- `make tidy` - Check that the source code is in compliance with Rust's style
guidelines. There is no official document describing Rust's full guidelines
as of yet, but basic rules like 4 spaces for indentation and no more than 99
characters in a single line should be kept in mind when writing code.
- `x.py test src/test/run-pass --stage 1 --filter <substring-of-test-name>` -
Run a single rpass test with the stage1 compiler (this will be quicker than
running the command above as we only build the stage1 compiler, not the entire
thing). You can also leave off the directory argument to run all stage1 test
types.
- `x.py test src/libcore --stage 1` - Run stage1 tests in `libcore`.
- `x.py test src/tools/tidy` - Check that the source code is in compliance with
Rust's style guidelines. There is no official document describing Rust's full
guidelines as of yet, but basic rules like 4 spaces for indentation and no
more than 99 characters in a single line should be kept in mind when writing
code.
## Pull Requests
@ -172,19 +245,17 @@ amount of time you have to wait. You need to have built the compiler at least
once before running these will work, but thats only one full build rather than
one each time.
$ make -j8 rustc-stage1 && make check-stage1
$ python x.py test --stage 1
is one such example, which builds just `rustc`, and then runs the tests. If
youre adding something to the standard library, try
$ make -j8 check-stage1-std NO_REBUILD=1
This will not rebuild the compiler, but will run the tests.
$ python x.py test src/libstd --stage 1
Please make sure your pull request is in compliance with Rust's style
guidelines by running
$ make tidy
$ python x.py test src/tools/tidy
Make this check before every pull request (and every new commit in a pull
request) ; you can add [git hooks](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks)

View File

@ -36,16 +36,14 @@ Read ["Installing Rust"] from [The Book].
```sh
$ ./configure
$ make && make install
$ make && sudo make install
```
> ***Note:*** You may need to use `sudo make install` if you do not
> normally have permission to modify the destination directory. The
> install locations can be adjusted by passing a `--prefix` argument
> to `configure`. Various other options are also supported pass
> ***Note:*** Install locations can be adjusted by passing a `--prefix`
> argument to `configure`. Various other options are also supported pass
> `--help` for more information on them.
When complete, `make install` will place several programs into
When complete, `sudo make install` will place several programs into
`/usr/local/bin`: `rustc`, the Rust compiler, and `rustdoc`, the
API-documentation tool. This install does not include [Cargo],
Rust's package manager, which you may also want to build.
@ -108,30 +106,22 @@ MSVC builds of Rust additionally require an installation of Visual Studio 2013
(or later) so `rustc` can use its linker. Make sure to check the “C++ tools”
option.
With these dependencies installed, the build takes two steps:
With these dependencies installed, you can build the compiler in a `cmd.exe`
shell with:
```sh
$ ./configure
> python x.py build
```
If you're running inside of an msys shell, however, you can run:
```sh
$ ./configure --build=x86_64-pc-windows-msvc
$ make && make install
```
#### MSVC with rustbuild
The old build system, based on makefiles, is currently being rewritten into a
Rust-based build system called rustbuild. This can be used to bootstrap the
compiler on MSVC without needing to install MSYS or MinGW. All you need are
[Python 2](https://www.python.org/downloads/),
[CMake](https://cmake.org/download/), and
[Git](https://git-scm.com/downloads) in your PATH (make sure you do not use the
ones from MSYS if you have it installed). You'll also need Visual Studio 2013 or
newer with the C++ tools. Then all you need to do is to kick off rustbuild.
```
python x.py build
```
Currently rustbuild only works with some known versions of Visual Studio. If you
have a more recent version installed that a part of rustbuild doesn't understand
Currently building Rust only works with some known versions of Visual Studio. If
you have a more recent version installed the build system doesn't understand
then you may need to force rustbuild to use an older version. This can be done
by manually calling the appropriate vcvars file before running the bootstrap.
@ -149,16 +139,6 @@ $ ./configure
$ make docs
```
Building the documentation requires building the compiler, so the above
details will apply. Once you have the compiler built, you can
```sh
$ make docs NO_REBUILD=1
```
To make sure you dont re-build the compiler because you made a change
to some documentation.
The generated documentation will appear in a top-level `doc` directory,
created by the `make` rule.

View File

@ -2,25 +2,22 @@ environment:
matrix:
# 32/64 bit MSVC
- MSYS_BITS: 64
TARGET: x86_64-pc-windows-msvc
CHECK: check
CONFIGURE_ARGS: --enable-llvm-assertions --enable-debug-assertions
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc
RUST_CHECK_TARGET: check
- MSYS_BITS: 32
TARGET: i686-pc-windows-msvc
CHECK: check
CONFIGURE_ARGS: --enable-llvm-assertions --enable-debug-assertions
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-msvc
RUST_CHECK_TARGET: check
# MSVC rustbuild
# MSVC makefiles
- MSYS_BITS: 64
CONFIGURE_ARGS: --enable-rustbuild --enable-llvm-assertions --enable-debug-assertions
TARGET: x86_64-pc-windows-msvc
CHECK: check
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --disable-rustbuild
RUST_CHECK_TARGET: check
# MSVC cargotest
- MSYS_BITS: 64
CONFIGURE_ARGS: --enable-rustbuild --enable-llvm-assertions --enable-debug-assertions
TARGET: x86_64-pc-windows-msvc
CHECK: check-cargotest
NO_VENDOR: 1
RUST_CHECK_TARGET: check-cargotest
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc
# 32/64-bit MinGW builds.
#
@ -47,24 +44,22 @@ environment:
# *not* use debug assertions and llvm assertions. This is because they take
# too long on appveyor and this is tested by rustbuild below.
- MSYS_BITS: 32
TARGET: i686-pc-windows-gnu
CHECK: check
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu
RUST_CHECK_TARGET: check
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci
MINGW_ARCHIVE: i686-4.9.2-release-win32-dwarf-rt_v4-rev4.7z
MINGW_DIR: mingw32
- MSYS_BITS: 32
CONFIGURE_ARGS: --enable-rustbuild --enable-llvm-assertions --enable-debug-assertions
TARGET: i686-pc-windows-gnu
CHECK: check
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu --disable-rustbuild
RUST_CHECK_TARGET: check
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci
MINGW_ARCHIVE: i686-4.9.2-release-win32-dwarf-rt_v4-rev4.7z
MINGW_DIR: mingw32
- MSYS_BITS: 64
CONFIGURE_ARGS: --enable-llvm-assertions --enable-debug-assertions
TARGET: x86_64-pc-windows-gnu
CHECK: check
RUST_CHECK_TARGET: check
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu
MINGW_URL: https://s3.amazonaws.com/rust-lang-ci
MINGW_ARCHIVE: x86_64-4.9.2-release-win32-seh-rt_v4-rev4.7z
MINGW_DIR: mingw64
@ -90,15 +85,20 @@ install:
- if NOT defined MINGW_URL set PATH=C:\msys64\mingw%MSYS_BITS%\bin;C:\msys64\usr\bin;%PATH%
test_script:
- sh ./configure
%CONFIGURE_ARGS%
--build=%TARGET%
- bash -c "make -j$(nproc)"
- bash -c "make %CHECK% -j$(nproc)"
- git submodule update --init
- set SRC=.
- set NO_CCACHE=1
- sh src/ci/run.sh
cache:
- build/%TARGET%/llvm -> src/rustllvm/llvm-auto-clean-trigger
- "%TARGET%/llvm -> src/rustllvm/llvm-auto-clean-trigger"
- "build/i686-pc-windows-gnu/llvm -> src/rustllvm/llvm-auto-clean-trigger"
- "build/x86_64-pc-windows-gnu/llvm -> src/rustllvm/llvm-auto-clean-trigger"
- "build/i686-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger"
- "build/x86_64-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger"
- "i686-pc-windows-gnu/llvm -> src/rustllvm/llvm-auto-clean-trigger"
- "x86_64-pc-windows-gnu/llvm -> src/rustllvm/llvm-auto-clean-trigger"
- "i686-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger"
- "x86_64-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger"
branches:
only:

52
configure vendored
View File

@ -631,7 +631,7 @@ opt stage0-landing-pads 1 "enable landing pads during bootstrap with stage0"
opt dist-host-only 0 "only install bins for the host architecture"
opt inject-std-version 1 "inject the current compiler version of libstd into programs"
opt llvm-version-check 1 "check if the LLVM version is supported, build anyway"
opt rustbuild 0 "use the rust and cargo based build system"
opt rustbuild 1 "use the rust and cargo based build system"
opt codegen-tests 1 "run the src/test/codegen tests"
opt option-checking 1 "complain about unrecognized options in this configure script"
opt ninja 0 "build LLVM using the Ninja generator (for MSVC, requires building in the correct environment)"
@ -664,11 +664,11 @@ valopt armv7-linux-androideabi-ndk "" "armv7-linux-androideabi NDK standalone pa
valopt aarch64-linux-android-ndk "" "aarch64-linux-android NDK standalone path"
valopt nacl-cross-path "" "NaCl SDK path (Pepper Canary is recommended). Must be absolute!"
valopt musl-root "/usr/local" "MUSL root installation directory (deprecated)"
valopt musl-root-x86_64 "/usr/local" "x86_64-unknown-linux-musl install directory"
valopt musl-root-i686 "/usr/local" "i686-unknown-linux-musl install directory"
valopt musl-root-arm "/usr/local" "arm-unknown-linux-musleabi install directory"
valopt musl-root-armhf "/usr/local" "arm-unknown-linux-musleabihf install directory"
valopt musl-root-armv7 "/usr/local" "armv7-unknown-linux-musleabihf install directory"
valopt musl-root-x86_64 "" "x86_64-unknown-linux-musl install directory"
valopt musl-root-i686 "" "i686-unknown-linux-musl install directory"
valopt musl-root-arm "" "arm-unknown-linux-musleabi install directory"
valopt musl-root-armhf "" "arm-unknown-linux-musleabihf install directory"
valopt musl-root-armv7 "" "armv7-unknown-linux-musleabihf install directory"
valopt extra-filename "" "Additional data that is hashed and passed to the -C extra-filename flag"
if [ -e ${CFG_SRC_DIR}.git ]
@ -848,7 +848,10 @@ then
fi
# For building LLVM
probe_need CFG_CMAKE cmake
if [ -z "$CFG_LLVM_ROOT" ]
then
probe_need CFG_CMAKE cmake
fi
# On MacOS X, invoking `javac` pops up a dialog if the JDK is not
# installed. Since `javac` is only used if `antlr4` is available,
@ -1371,7 +1374,7 @@ then
fi
fi
if [ -z "$CFG_ENABLE_RUSTBUILD" ]; then
if [ -n "$CFG_DISABLE_RUSTBUILD" ]; then
step_msg "making directories"
@ -1471,7 +1474,7 @@ fi
step_msg "configuring submodules"
# Have to be in the top of src directory for this
if [ -z $CFG_DISABLE_MANAGE_SUBMODULES ] && [ -z $CFG_ENABLE_RUSTBUILD ]
if [ -z "$CFG_DISABLE_MANAGE_SUBMODULES" ] && [ -n "$CFG_DISABLE_RUSTBUILD" ]
then
cd ${CFG_SRC_DIR}
@ -1543,11 +1546,11 @@ do
;;
esac
if [ -n "$CFG_ENABLE_RUSTBUILD" ]
if [ -z "$CFG_DISABLE_RUSTBUILD" ]
then
msg "not configuring LLVM, rustbuild in use"
do_reconfigure=0
elif [ -z $CFG_LLVM_ROOT ]
elif [ -z "$CFG_LLVM_ROOT" ]
then
LLVM_BUILD_DIR=${CFG_BUILD_DIR}$t/llvm
LLVM_INST_DIR=$LLVM_BUILD_DIR
@ -1868,7 +1871,7 @@ do
putvar $CFG_LLVM_INST_DIR
done
if [ -n "$CFG_ENABLE_RUSTBUILD" ]
if [ -z "$CFG_DISABLE_RUSTBUILD" ]
then
INPUT_MAKEFILE=src/bootstrap/mk/Makefile.in
else
@ -1887,5 +1890,28 @@ else
step_msg "complete"
fi
msg "run \`make help\`"
if [ "$CFG_SRC_DIR" = `pwd` ]; then
X_PY=x.py
else
X_PY=${CFG_SRC_DIR_RELATIVE}x.py
fi
if [ -z "$CFG_DISABLE_RUSTBUILD" ]; then
msg "NOTE you have now configured rust to use a rewritten build system"
msg " called rustbuild, and as a result this may have bugs that "
msg " you did not see before. If you experience any issues you can"
msg " go back to the old build system with --disable-rustbuild and"
msg " please feel free to report any bugs!"
msg ""
msg "run \`python ${X_PY} --help\`"
else
warn "the makefile-based build system is deprecated in favor of rustbuild"
msg ""
msg "It is recommended you avoid passing --disable-rustbuild to get your"
msg "build working as the makefiles will be deleted on 2017-02-02. If you"
msg "encounter bugs with rustbuild please file issues against rust-lang/rust"
msg ""
msg "run \`make help\`"
fi
msg

View File

@ -0,0 +1,24 @@
# i686-unknown-openbsd configuration
CC_i686-unknown-openbsd=$(CC)
CXX_i686-unknown-openbsd=$(CXX)
CPP_i686-unknown-openbsd=$(CPP)
AR_i686-unknown-openbsd=$(AR)
CFG_LIB_NAME_i686-unknown-openbsd=lib$(1).so
CFG_STATIC_LIB_NAME_i686-unknown-openbsd=lib$(1).a
CFG_LIB_GLOB_i686-unknown-openbsd=lib$(1)-*.so
CFG_LIB_DSYM_GLOB_i686-unknown-openbsd=$(1)-*.dylib.dSYM
CFG_JEMALLOC_CFLAGS_i686-unknown-openbsd := -m32 -I/usr/include $(CFLAGS)
CFG_GCCISH_CFLAGS_i686-unknown-openbsd := -g -fPIC -m32 -I/usr/include $(CFLAGS)
CFG_GCCISH_LINK_FLAGS_i686-unknown-openbsd := -shared -fPIC -g -pthread -m32
CFG_GCCISH_DEF_FLAG_i686-unknown-openbsd := -Wl,--export-dynamic,--dynamic-list=
CFG_LLC_FLAGS_i686-unknown-openbsd :=
CFG_INSTALL_NAME_i686-unknown-openbsd =
CFG_EXE_SUFFIX_i686-unknown-openbsd :=
CFG_WINDOWSY_i686-unknown-openbsd :=
CFG_UNIXY_i686-unknown-openbsd := 1
CFG_LDPATH_i686-unknown-openbsd :=
CFG_RUN_i686-unknown-openbsd=$(2)
CFG_RUN_TARG_i686-unknown-openbsd=$(call CFG_RUN_i686-unknown-openbsd,,$(2))
CFG_GNU_TRIPLE_i686-unknown-openbsd := i686-unknown-openbsd
RUSTC_FLAGS_i686-unknown-openbsd=-C linker=$(call FIND_COMPILER,$(CC))
CFG_DISABLE_JEMALLOC_i686-unknown-openbsd := 1

View File

@ -52,7 +52,7 @@
TARGET_CRATES := libc std term \
getopts collections test rand \
compiler_builtins core alloc \
rustc_unicode rustc_bitflags \
std_unicode rustc_bitflags \
alloc_system alloc_jemalloc \
panic_abort panic_unwind unwind
RUSTC_CRATES := rustc rustc_typeck rustc_mir rustc_borrowck rustc_resolve rustc_driver \
@ -65,27 +65,23 @@ HOST_CRATES := syntax syntax_ext proc_macro_tokens proc_macro_plugin syntax_pos
TOOLS := compiletest rustdoc rustc rustbook error_index_generator
DEPS_core :=
DEPS_compiler_builtins := core
DEPS_compiler_builtins := core native:compiler-rt
DEPS_alloc := core libc alloc_system
DEPS_alloc_system := core libc
DEPS_alloc_jemalloc := core libc native:jemalloc
DEPS_collections := core alloc rustc_unicode
DEPS_collections := core alloc std_unicode
DEPS_libc := core
DEPS_rand := core
DEPS_rustc_bitflags := core
DEPS_rustc_unicode := core
DEPS_std_unicode := core
DEPS_panic_abort := libc alloc
DEPS_panic_unwind := libc alloc unwind
DEPS_unwind := libc
RUSTFLAGS_compiler_builtins := -lstatic=compiler-rt
RUSTFLAGS_panic_abort := -C panic=abort
# FIXME(stage0): change this to just `RUSTFLAGS_panic_abort := ...`
RUSTFLAGS1_panic_abort := -C panic=abort
RUSTFLAGS2_panic_abort := -C panic=abort
RUSTFLAGS3_panic_abort := -C panic=abort
DEPS_std := core libc rand alloc collections compiler_builtins rustc_unicode \
DEPS_std := core libc rand alloc collections compiler_builtins std_unicode \
native:backtrace \
alloc_system panic_abort panic_unwind unwind
DEPS_arena := std
@ -100,7 +96,7 @@ DEPS_serialize := std log
DEPS_term := std
DEPS_test := std getopts term native:rust_test_helpers
DEPS_syntax := std term serialize log arena libc rustc_bitflags rustc_unicode rustc_errors syntax_pos rustc_data_structures
DEPS_syntax := std term serialize log arena libc rustc_bitflags std_unicode rustc_errors syntax_pos rustc_data_structures
DEPS_syntax_ext := syntax syntax_pos rustc_errors fmt_macros proc_macro
DEPS_syntax_pos := serialize
DEPS_proc_macro_tokens := syntax syntax_pos log
@ -140,7 +136,7 @@ DEPS_rustc_trans := arena flate getopts graphviz libc rustc rustc_back \
DEPS_rustc_incremental := rustc syntax_pos serialize rustc_data_structures
DEPS_rustc_save_analysis := rustc log syntax syntax_pos serialize
DEPS_rustc_typeck := rustc syntax syntax_pos rustc_platform_intrinsics rustc_const_math \
rustc_const_eval rustc_errors
rustc_const_eval rustc_errors rustc_data_structures
DEPS_rustdoc := rustc rustc_driver native:hoedown serialize getopts test \
rustc_lint rustc_const_eval syntax_pos rustc_data_structures
@ -162,7 +158,7 @@ ONLY_RLIB_libc := 1
ONLY_RLIB_alloc := 1
ONLY_RLIB_rand := 1
ONLY_RLIB_collections := 1
ONLY_RLIB_rustc_unicode := 1
ONLY_RLIB_std_unicode := 1
ONLY_RLIB_rustc_bitflags := 1
ONLY_RLIB_alloc_system := 1
ONLY_RLIB_alloc_jemalloc := 1
@ -173,7 +169,7 @@ ONLY_RLIB_unwind := 1
TARGET_SPECIFIC_alloc_jemalloc := 1
# Documented-by-default crates
DOC_CRATES := std alloc collections core libc rustc_unicode
DOC_CRATES := std alloc collections core libc std_unicode
ifeq ($(CFG_DISABLE_JEMALLOC),)
RUSTFLAGS_rustc_back := --cfg 'feature="jemalloc"'

View File

@ -372,15 +372,12 @@ CFG_INFO := $(info cfg: disabling unstable features (CFG_DISABLE_UNSTABLE_FEATUR
# Turn on feature-staging
export CFG_DISABLE_UNSTABLE_FEATURES
# Subvert unstable feature lints to do the self-build
export RUSTC_BOOTSTRAP=1
endif
ifdef CFG_MUSL_ROOT
export CFG_MUSL_ROOT
endif
# FIXME: Transitionary measure to bootstrap using the old bootstrap logic.
# Remove this once the bootstrap compiler uses the new login in Issue #36548.
export RUSTC_BOOTSTRAP_KEY=62b3e239
export RUSTC_BOOTSTRAP := 1
######################################################################
# Per-stage targets and runner
@ -443,10 +440,7 @@ endif
TSREQ$(1)_T_$(2)_H_$(3) = \
$$(HSREQ$(1)_H_$(3)) \
$$(foreach obj,$$(REQUIRED_OBJECTS_$(2)),\
$$(TLIB$(1)_T_$(2)_H_$(3))/$$(obj)) \
$$(TLIB0_T_$(2)_H_$(3))/$$(call CFG_STATIC_LIB_NAME_$(2),compiler-rt)
# ^ This copies `libcompiler-rt.a` to the stage0 sysroot
# ^ TODO(stage0) update this to not copy `libcompiler-rt.a` to stage0
$$(TLIB$(1)_T_$(2)_H_$(3))/$$(obj))
# Prerequisites for a working stageN compiler and libraries, for a specific
# target

View File

@ -15,7 +15,7 @@
# The names of crates that must be tested
# libcore/librustc_unicode tests are in a separate crate
# libcore/libstd_unicode tests are in a separate crate
DEPS_coretest :=
$(eval $(call RUST_CRATE,coretest))

26
src/Cargo.lock generated
View File

@ -45,7 +45,6 @@ dependencies = [
"gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)",
"getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)",
"md5 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
@ -73,7 +72,7 @@ version = "0.0.0"
dependencies = [
"alloc 0.0.0",
"core 0.0.0",
"rustc_unicode 0.0.0",
"std_unicode 0.0.0",
]
[[package]]
@ -172,11 +171,6 @@ name = "log"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "md5"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "num_cpus"
version = "0.2.13"
@ -409,6 +403,7 @@ version = "0.0.0"
dependencies = [
"build_helper 0.1.0",
"gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_bitflags 0.0.0",
]
[[package]]
@ -550,13 +545,6 @@ dependencies = [
"syntax_pos 0.0.0",
]
[[package]]
name = "rustc_unicode"
version = "0.0.0"
dependencies = [
"core 0.0.0",
]
[[package]]
name = "rustdoc"
version = "0.0.0"
@ -604,7 +592,7 @@ dependencies = [
"panic_abort 0.0.0",
"panic_unwind 0.0.0",
"rand 0.0.0",
"rustc_unicode 0.0.0",
"std_unicode 0.0.0",
"unwind 0.0.0",
]
@ -616,6 +604,13 @@ dependencies = [
"std 0.0.0",
]
[[package]]
name = "std_unicode"
version = "0.0.0"
dependencies = [
"core 0.0.0",
]
[[package]]
name = "syntax"
version = "0.0.0"
@ -686,7 +681,6 @@ dependencies = [
"checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685"
"checksum libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)" = "044d1360593a78f5c8e5e710beccdc24ab71d1f01bc19a29bcacdba22e8475d8"
"checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054"
"checksum md5 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a5539a8dee9b4ae308c9c406a379838b435a8f2c84cf9fedc6d5a576be9888db"
"checksum num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "cee7e88156f3f9e19bdd598f8d6c9db7bf4078f99f8381f43a55b09648d1a6e3"
"checksum rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)" = "6159e4e6e559c81bd706afe9c8fd68f547d3e851ce12e76b1de7914bab61691b"
"checksum toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)" = "0590d72182e50e879c4da3b11c6488dae18fccb1ae0c7a3eda18e16795844796"

View File

@ -11,3 +11,20 @@ members = [
"tools/rustbook",
"tools/tidy",
]
# Curiously, compiletest will segfault if compiled with opt-level=3 on 64-bit
# MSVC when running the compile-fail test suite when a should-fail test panics.
# But hey if this is removed and it gets past the bots, sounds good to me.
[profile.release]
opt-level = 2
[profile.bench]
opt-level = 2
# These options are controlled from our rustc wrapper script, so turn them off
# here and have them controlled elsewhere.
[profile.dev]
debug = false
debug-assertions = false
[profile.test]
debug = false
debug-assertions = false

View File

@ -29,4 +29,3 @@ getopts = "0.2"
rustc-serialize = "0.3"
gcc = "0.3.38"
libc = "0.2"
md5 = "0.1"

View File

@ -32,7 +32,7 @@ The script accepts commands, flags, and filters to determine what to do:
# build the whole compiler
./x.py build
# build the stage1 compier
# build the stage1 compiler
./x.py build --stage 1
# build stage0 libstd
@ -66,17 +66,6 @@ The script accepts commands, flags, and filters to determine what to do:
* `doc` - a command for building documentation. Like above can take arguments
for what to document.
If you're more used to `./configure` and `make`, however, then you can also
configure the build system to use rustbuild instead of the old makefiles:
```
./configure --enable-rustbuild
make
```
Afterwards the `Makefile` which is generated will have a few commands like
`make check`, `make tidy`, etc.
## Configuring rustbuild
There are currently two primary methods for configuring the rustbuild build
@ -90,6 +79,13 @@ be found at `src/bootstrap/config.toml.example`, and the configuration file
can also be passed as `--config path/to/config.toml` if the build system is
being invoked manually (via the python script).
Finally, rustbuild makes use of the [gcc-rs crate] which has [its own
method][env-vars] of configuring C compilers and C flags via environment
variables.
[gcc-rs crate]: https://github.com/alexcrichton/gcc-rs
[env-vars]: https://github.com/alexcrichton/gcc-rs#external-configuration-via-environment-variables
## Build stages
The rustbuild build system goes through a few phases to actually build the
@ -273,16 +269,17 @@ After that, each module in rustbuild should have enough documentation to keep
you up and running. Some general areas that you may be interested in modifying
are:
* Adding a new build tool? Take a look at `build/step.rs` for examples of other
tools, as well as `build/mod.rs`.
* Adding a new build tool? Take a look at `bootstrap/step.rs` for examples of
other tools.
* Adding a new compiler crate? Look no further! Adding crates can be done by
adding a new directory with `Cargo.toml` followed by configuring all
`Cargo.toml` files accordingly.
* Adding a new dependency from crates.io? We're still working on that, so hold
off on that for now.
* Adding a new configuration option? Take a look at `build/config.rs` or perhaps
`build/flags.rs` and then modify the build elsewhere to read that option.
* Adding a sanity check? Take a look at `build/sanity.rs`.
* Adding a new configuration option? Take a look at `bootstrap/config.rs` or
perhaps `bootstrap/flags.rs` and then modify the build elsewhere to read that
option.
* Adding a sanity check? Take a look at `bootstrap/sanity.rs`.
If you have any questions feel free to reach out on `#rust-internals` on IRC or
open an issue in the bug tracker!

View File

@ -125,6 +125,11 @@ fn main() {
cmd.arg("-C").arg(format!("codegen-units={}", s));
}
// Emit save-analysis info.
if env::var("RUSTC_SAVE_ANALYSIS") == Ok("api".to_string()) {
cmd.arg("-Zsave-analysis-api");
}
// Dealing with rpath here is a little special, so let's go into some
// detail. First off, `-rpath` is a linker option on Unix platforms
// which adds to the runtime dynamic loader path when looking for

View File

@ -30,32 +30,37 @@ def get(url, path, verbose=False):
sha_path = sha_file.name
try:
download(sha_path, sha_url, verbose)
download(sha_path, sha_url, False, verbose)
if os.path.exists(path):
if verify(path, sha_path, False):
print("using already-download file " + path)
if verbose:
print("using already-download file " + path)
return
else:
print("ignoring already-download file " + path + " due to failed verification")
if verbose:
print("ignoring already-download file " + path + " due to failed verification")
os.unlink(path)
download(temp_path, url, verbose)
if not verify(temp_path, sha_path, True):
download(temp_path, url, True, verbose)
if not verify(temp_path, sha_path, verbose):
raise RuntimeError("failed verification")
print("moving {} to {}".format(temp_path, path))
if verbose:
print("moving {} to {}".format(temp_path, path))
shutil.move(temp_path, path)
finally:
delete_if_present(sha_path)
delete_if_present(temp_path)
delete_if_present(sha_path, verbose)
delete_if_present(temp_path, verbose)
def delete_if_present(path):
def delete_if_present(path, verbose):
if os.path.isfile(path):
print("removing " + path)
if verbose:
print("removing " + path)
os.unlink(path)
def download(path, url, verbose):
print("downloading {} to {}".format(url, path))
def download(path, url, probably_big, verbose):
if probably_big or verbose:
print("downloading {}".format(url))
# see http://serverfault.com/questions/301128/how-to-download
if sys.platform == 'win32':
run(["PowerShell.exe", "/nologo", "-Command",
@ -63,17 +68,22 @@ def download(path, url, verbose):
".DownloadFile('{}', '{}')".format(url, path)],
verbose=verbose)
else:
run(["curl", "-o", path, url], verbose=verbose)
if probably_big or verbose:
option = "-#"
else:
option = "-s"
run(["curl", option, "-Sf", "-o", path, url], verbose=verbose)
def verify(path, sha_path, verbose):
print("verifying " + path)
if verbose:
print("verifying " + path)
with open(path, "rb") as f:
found = hashlib.sha256(f.read()).hexdigest()
with open(sha_path, "r") as f:
expected, _ = f.readline().split()
verified = found == expected
if not verified and verbose:
if not verified:
print("invalid checksum:\n"
" found: {}\n"
" expected: {}".format(found, expected))
@ -144,6 +154,7 @@ class RustBuild(object):
if self.rustc().startswith(self.bin_root()) and \
(not os.path.exists(self.rustc()) or self.rustc_out_of_date()):
self.print_what_it_means_to_bootstrap()
if os.path.exists(self.bin_root()):
shutil.rmtree(self.bin_root())
channel = self.stage0_rustc_channel()
@ -167,6 +178,7 @@ class RustBuild(object):
if self.cargo().startswith(self.bin_root()) and \
(not os.path.exists(self.cargo()) or self.cargo_out_of_date()):
self.print_what_it_means_to_bootstrap()
channel = self.stage0_cargo_channel()
filename = "cargo-{}-{}.tar.gz".format(channel, self.build)
url = "https://static.rust-lang.org/cargo-dist/" + self.stage0_cargo_date()
@ -251,7 +263,27 @@ class RustBuild(object):
else:
return ''
def print_what_it_means_to_bootstrap(self):
if hasattr(self, 'printed'):
return
self.printed = True
if os.path.exists(self.bootstrap_binary()):
return
if not '--help' in sys.argv or len(sys.argv) == 1:
return
print('info: the build system for Rust is written in Rust, so this')
print(' script is now going to download a stage0 rust compiler')
print(' and then compile the build system itself')
print('')
print('info: in the meantime you can read more about rustbuild at')
print(' src/bootstrap/README.md before the download finishes')
def bootstrap_binary(self):
return os.path.join(self.build_dir, "bootstrap/debug/bootstrap")
def build_bootstrap(self):
self.print_what_it_means_to_bootstrap()
build_dir = os.path.join(self.build_dir, "bootstrap")
if self.clean and os.path.exists(build_dir):
shutil.rmtree(build_dir)
@ -408,22 +440,31 @@ def main():
rb.use_vendored_sources = '\nvendor = true' in rb.config_toml or \
'CFG_ENABLE_VENDOR' in rb.config_mk
if 'SUDO_USER' in os.environ:
if os.environ['USER'] != os.environ['SUDO_USER']:
rb.use_vendored_sources = True
print('info: looks like you are running this command under `sudo`')
print(' and so in order to preserve your $HOME this will now')
print(' use vendored sources by default. Note that if this')
print(' does not work you should run a normal build first')
print(' before running a command like `sudo make intall`')
if rb.use_vendored_sources:
if not os.path.exists('.cargo'):
os.makedirs('.cargo')
f = open('.cargo/config','w')
f.write("""
[source.crates-io]
replace-with = 'vendored-sources'
registry = 'https://example.com'
with open('.cargo/config','w') as f:
f.write("""
[source.crates-io]
replace-with = 'vendored-sources'
registry = 'https://example.com'
[source.vendored-sources]
directory = '{}/src/vendor'
""".format(rb.rust_root))
f.close()
[source.vendored-sources]
directory = '{}/src/vendor'
""".format(rb.rust_root))
else:
if os.path.exists('.cargo'):
shutil.rmtree('.cargo')
data = stage0_data(rb.rust_root)
rb._rustc_channel, rb._rustc_date = data['rustc'].split('-', 1)
rb._cargo_channel, rb._cargo_date = data['cargo'].split('-', 1)
@ -438,7 +479,7 @@ def main():
sys.stdout.flush()
# Run the bootstrap
args = [os.path.join(rb.build_dir, "bootstrap/debug/bootstrap")]
args = [rb.bootstrap_binary()]
args.extend(sys.argv[1:])
env = os.environ.copy()
env["BUILD"] = rb.build

View File

@ -51,7 +51,7 @@ pub fn find(build: &mut Build) {
if let Some(cc) = config.and_then(|c| c.cc.as_ref()) {
cfg.compiler(cc);
} else {
set_compiler(&mut cfg, "gcc", target, config);
set_compiler(&mut cfg, "gcc", target, config, build);
}
let compiler = cfg.get_compiler();
@ -72,7 +72,7 @@ pub fn find(build: &mut Build) {
if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) {
cfg.compiler(cxx);
} else {
set_compiler(&mut cfg, "g++", host, config);
set_compiler(&mut cfg, "g++", host, config, build);
}
let compiler = cfg.get_compiler();
build.verbose(&format!("CXX_{} = {:?}", host, compiler.path()));
@ -83,7 +83,8 @@ pub fn find(build: &mut Build) {
fn set_compiler(cfg: &mut gcc::Config,
gnu_compiler: &str,
target: &str,
config: Option<&Target>) {
config: Option<&Target>,
build: &Build) {
match target {
// When compiling for android we may have the NDK configured in the
// config.toml in which case we look there. Otherwise the default
@ -119,6 +120,22 @@ fn set_compiler(cfg: &mut gcc::Config,
}
}
"mips-unknown-linux-musl" => {
cfg.compiler("mips-linux-musl-gcc");
}
"mipsel-unknown-linux-musl" => {
cfg.compiler("mipsel-linux-musl-gcc");
}
t if t.contains("musl") => {
if let Some(root) = build.musl_root(target) {
let guess = root.join("bin/musl-gcc");
if guess.exists() {
cfg.compiler(guess);
}
}
}
_ => {}
}
}

View File

@ -20,7 +20,6 @@ use std::io::prelude::*;
use std::process::Command;
use build_helper::output;
use md5;
use Build;
@ -91,20 +90,4 @@ pub fn collect(build: &mut Build) {
build.ver_hash = Some(ver_hash);
build.short_ver_hash = Some(short_ver_hash);
}
// Calculate this compiler's bootstrap key, which is currently defined as
// the first 8 characters of the md5 of the release string.
let key = md5::compute(build.release.as_bytes());
build.bootstrap_key = format!("{:02x}{:02x}{:02x}{:02x}",
key[0], key[1], key[2], key[3]);
// Slurp up the stage0 bootstrap key as we're bootstrapping from an
// otherwise stable compiler.
let mut s = String::new();
t!(t!(File::open(build.src.join("src/stage0.txt"))).read_to_string(&mut s));
if let Some(line) = s.lines().find(|l| l.starts_with("rustc_key")) {
if let Some(key) = line.split(": ").nth(1) {
build.bootstrap_key_stage0 = key.to_string();
}
}
}

View File

@ -8,13 +8,14 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Implementation of the various `check-*` targets of the build system.
//! Implementation of the test-related targets of the build system.
//!
//! This file implements the various regression test suites that we execute on
//! our CI.
use std::collections::HashSet;
use std::env;
use std::fmt;
use std::fs;
use std::path::{PathBuf, Path};
use std::process::Command;
@ -22,10 +23,39 @@ use std::process::Command;
use build_helper::output;
use {Build, Compiler, Mode};
use dist;
use util::{self, dylib_path, dylib_path_var};
const ADB_TEST_DIR: &'static str = "/data/tmp";
/// The two modes of the test runner; tests or benchmarks.
#[derive(Copy, Clone)]
pub enum TestKind {
/// Run `cargo test`
Test,
/// Run `cargo bench`
Bench,
}
impl TestKind {
// Return the cargo subcommand for this test kind
fn subcommand(self) -> &'static str {
match self {
TestKind::Test => "test",
TestKind::Bench => "bench",
}
}
}
impl fmt::Display for TestKind {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(match *self {
TestKind::Test => "Testing",
TestKind::Bench => "Benchmarking",
})
}
}
/// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
///
/// This tool in `src/tools` will verify the validity of all our links in the
@ -33,6 +63,8 @@ const ADB_TEST_DIR: &'static str = "/data/tmp";
pub fn linkcheck(build: &Build, stage: u32, host: &str) {
println!("Linkcheck stage{} ({})", stage, host);
let compiler = Compiler::new(stage, host);
let _time = util::timeit();
build.run(build.tool_cmd(&compiler, "linkchecker")
.arg(build.out.join(host).join("doc")));
}
@ -58,6 +90,7 @@ pub fn cargotest(build: &Build, stage: u32, host: &str) {
let out_dir = build.out.join("ct");
t!(fs::create_dir_all(&out_dir));
let _time = util::timeit();
build.run(build.tool_cmd(compiler, "cargotest")
.env("PATH", newpath)
.arg(&build.cargo)
@ -90,7 +123,8 @@ pub fn compiletest(build: &Build,
target: &str,
mode: &str,
suite: &str) {
println!("Check compiletest {} ({} -> {})", suite, compiler.host, target);
println!("Check compiletest suite={} mode={} ({} -> {})",
suite, mode, compiler.host, target);
let mut cmd = build.tool_cmd(compiler, "compiletest");
// compiletest currently has... a lot of arguments, so let's just pass all
@ -184,6 +218,9 @@ pub fn compiletest(build: &Build,
// Running a C compiler on MSVC requires a few env vars to be set, to be
// sure to set them here.
//
// Note that if we encounter `PATH` we make sure to append to our own `PATH`
// rather than stomp over it.
if target.contains("msvc") {
for &(ref k, ref v) in build.cc[target].0.env() {
if k != "PATH" {
@ -191,7 +228,8 @@ pub fn compiletest(build: &Build,
}
}
}
build.add_bootstrap_key(&mut cmd);
cmd.env("RUSTC_BOOTSTRAP", "1");
build.add_rust_test_threads(&mut cmd);
cmd.arg("--adb-path").arg("adb");
cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
@ -203,6 +241,7 @@ pub fn compiletest(build: &Build,
cmd.arg("--android-cross-path").arg("");
}
let _time = util::timeit();
build.run(&mut cmd);
}
@ -215,6 +254,7 @@ pub fn docs(build: &Build, compiler: &Compiler) {
// Do a breadth-first traversal of the `src/doc` directory and just run
// tests for all files that end in `*.md`
let mut stack = vec![build.src.join("src/doc")];
let _time = util::timeit();
while let Some(p) = stack.pop() {
if p.is_dir() {
@ -243,6 +283,8 @@ pub fn error_index(build: &Build, compiler: &Compiler) {
let dir = testdir(build, compiler.host);
t!(fs::create_dir_all(&dir));
let output = dir.join("error-index.md");
let _time = util::timeit();
build.run(build.tool_cmd(compiler, "error_index_generator")
.arg("markdown")
.arg(&output)
@ -254,6 +296,7 @@ pub fn error_index(build: &Build, compiler: &Compiler) {
fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
let mut cmd = Command::new(build.rustdoc(compiler));
build.add_rustc_lib_path(compiler, &mut cmd);
build.add_rust_test_threads(&mut cmd);
cmd.arg("--test");
cmd.arg(markdown);
@ -278,6 +321,7 @@ pub fn krate(build: &Build,
compiler: &Compiler,
target: &str,
mode: Mode,
test_kind: TestKind,
krate: Option<&str>) {
let (name, path, features, root) = match mode {
Mode::Libstd => {
@ -291,7 +335,7 @@ pub fn krate(build: &Build,
}
_ => panic!("can only test libraries"),
};
println!("Testing {} stage{} ({} -> {})", name, compiler.stage,
println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage,
compiler.host, target);
// Build up the base `cargo test` command.
@ -299,7 +343,7 @@ pub fn krate(build: &Build,
// Pass in some standard flags then iterate over the graph we've discovered
// in `cargo metadata` with the maps above and figure out what `-p`
// arguments need to get passed.
let mut cargo = build.cargo(compiler, mode, target, "test");
let mut cargo = build.cargo(compiler, mode, target, test_kind.subcommand());
cargo.arg("--manifest-path")
.arg(build.src.join(path).join("Cargo.toml"))
.arg("--features").arg(features);
@ -336,16 +380,25 @@ pub fn krate(build: &Build,
dylib_path.insert(0, build.sysroot_libdir(compiler, target));
cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
if target.contains("android") {
cargo.arg("--no-run");
} else if target.contains("emscripten") {
cargo.arg("--no-run");
}
cargo.arg("--");
if build.config.quiet_tests {
cargo.arg("--");
cargo.arg("--quiet");
}
let _time = util::timeit();
if target.contains("android") {
build.run(cargo.arg("--no-run"));
build.run(&mut cargo);
krate_android(build, compiler, target, mode);
} else if target.contains("emscripten") {
build.run(cargo.arg("--no-run"));
build.run(&mut cargo);
krate_emscripten(build, compiler, target, mode);
} else {
cargo.args(&build.flags.cmd.test_args());
@ -372,14 +425,17 @@ fn krate_android(build: &Build,
target,
compiler.host,
test_file_name);
let quiet = if build.config.quiet_tests { "--quiet" } else { "" };
let program = format!("(cd {dir}; \
LD_LIBRARY_PATH=./{target} ./{test} \
--logfile {log} \
{quiet} \
{args})",
dir = ADB_TEST_DIR,
target = target,
test = test_file_name,
log = log,
quiet = quiet,
args = build.flags.cmd.test_args().join(" "));
let output = output(Command::new("adb").arg("shell").arg(&program));
@ -408,18 +464,12 @@ fn krate_emscripten(build: &Build,
let test_file_name = test.to_string_lossy().into_owned();
println!("running {}", test_file_name);
let nodejs = build.config.nodejs.as_ref().expect("nodejs not configured");
let status = Command::new(nodejs)
.arg(&test_file_name)
.stderr(::std::process::Stdio::inherit())
.status();
match status {
Ok(status) => {
if !status.success() {
panic!("some tests failed");
}
}
Err(e) => panic!(format!("failed to execute command: {}", e)),
};
let mut cmd = Command::new(nodejs);
cmd.arg(&test_file_name);
if build.config.quiet_tests {
cmd.arg("--quiet");
}
build.run(&mut cmd);
}
}
@ -467,3 +517,32 @@ pub fn android_copy_libs(build: &Build,
}
}
}
/// Run "distcheck", a 'make check' from a tarball
pub fn distcheck(build: &Build) {
if build.config.build != "x86_64-unknown-linux-gnu" {
return
}
if !build.config.host.iter().any(|s| s == "x86_64-unknown-linux-gnu") {
return
}
if !build.config.target.iter().any(|s| s == "x86_64-unknown-linux-gnu") {
return
}
let dir = build.out.join("tmp").join("distcheck");
let _ = fs::remove_dir_all(&dir);
t!(fs::create_dir_all(&dir));
let mut cmd = Command::new("tar");
cmd.arg("-xzf")
.arg(dist::rust_src_location(build))
.arg("--strip-components=1")
.current_dir(&dir);
build.run(&mut cmd);
build.run(Command::new("./configure")
.current_dir(&dir));
build.run(Command::new("make")
.arg("check")
.current_dir(&dir));
}

View File

@ -46,6 +46,9 @@ fn rm_rf(build: &Build, path: &Path) {
if !path.exists() {
return
}
if path.is_file() {
return do_op(path, "remove file", |p| fs::remove_file(p));
}
for file in t!(fs::read_dir(path)) {
let file = t!(file).path();

View File

@ -120,8 +120,8 @@ fn build_startup_objects(build: &Build, target: &str, into: &Path) {
for file in t!(fs::read_dir(build.src.join("src/rtstartup"))) {
let file = t!(file);
let mut cmd = Command::new(&compiler_path);
build.add_bootstrap_key(&mut cmd);
build.run(cmd.arg("--target").arg(target)
build.run(cmd.env("RUSTC_BOOTSTRAP", "1")
.arg("--target").arg(target)
.arg("--emit=obj")
.arg("--out-dir").arg(into)
.arg(file.path()));

View File

@ -23,7 +23,7 @@ use std::io::Write;
use std::path::{PathBuf, Path};
use std::process::Command;
use {Build, Compiler};
use {Build, Compiler, Mode};
use util::{cp_r, libdir, is_dylib, cp_filtered, copy};
pub fn package_vers(build: &Build) -> &str {
@ -284,6 +284,55 @@ pub fn std(build: &Build, compiler: &Compiler, target: &str) {
t!(fs::remove_dir_all(&image));
}
pub fn rust_src_location(build: &Build) -> PathBuf {
let plain_name = format!("rustc-{}-src", package_vers(build));
distdir(build).join(&format!("{}.tar.gz", plain_name))
}
/// Creates a tarball of save-analysis metadata, if available.
pub fn analysis(build: &Build, compiler: &Compiler, target: &str) {
println!("Dist analysis");
if build.config.channel != "nightly" {
println!("Skipping dist-analysis - not on nightly channel");
return;
}
if compiler.stage != 2 {
return
}
let name = format!("rust-analysis-{}", package_vers(build));
let image = tmpdir(build).join(format!("{}-{}-image", name, target));
let src = build.stage_out(compiler, Mode::Libstd).join(target).join("release").join("deps");
let image_src = src.join("save-analysis");
let dst = image.join("lib/rustlib").join(target).join("analysis");
t!(fs::create_dir_all(&dst));
cp_r(&image_src, &dst);
let mut cmd = Command::new("sh");
cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=save-analysis-saved.")
.arg(format!("--image-dir={}", sanitize_sh(&image)))
.arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
.arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
.arg(format!("--package-name={}-{}", name, target))
.arg(format!("--component-name=rust-analysis-{}", target))
.arg("--legacy-manifest-dirs=rustlib,cargo");
build.run(&mut cmd);
t!(fs::remove_dir_all(&image));
// Create plain source tarball
let mut cmd = Command::new("tar");
cmd.arg("-czf").arg(sanitize_sh(&distdir(build).join(&format!("{}.tar.gz", name))))
.arg("analysis")
.current_dir(&src);
build.run(&mut cmd);
}
/// Creates the `rust-src` installer component and the plain source tarball
pub fn rust_src(build: &Build) {
println!("Dist src");
@ -374,7 +423,7 @@ pub fn rust_src(build: &Build) {
// Create plain source tarball
let mut cmd = Command::new("tar");
cmd.arg("-czf").arg(sanitize_sh(&distdir(build).join(&format!("{}.tar.gz", plain_name))))
cmd.arg("-czf").arg(sanitize_sh(&rust_src_location(build)))
.arg(&plain_name)
.current_dir(&dst);
build.run(&mut cmd);

View File

@ -49,6 +49,10 @@ pub enum Subcommand {
paths: Vec<PathBuf>,
test_args: Vec<String>,
},
Bench {
paths: Vec<PathBuf>,
test_args: Vec<String>,
},
Clean,
Dist {
install: bool,
@ -141,6 +145,7 @@ Arguments:
command == "dist" ||
command == "doc" ||
command == "test" ||
command == "bench" ||
command == "clean" {
println!("Available invocations:");
if args.iter().any(|a| a == "-v") {
@ -163,6 +168,7 @@ println!("\
Subcommands:
build Compile either the compiler or libraries
test Build and run some test suites
bench Build and run some benchmarks
doc Build documentation
clean Clean out build directories
dist Build and/or install distribution artifacts
@ -210,6 +216,14 @@ To learn more about a subcommand, run `./x.py <command> -h`
test_args: m.opt_strs("test-args"),
}
}
"bench" => {
opts.optmulti("", "test-args", "extra arguments", "ARGS");
m = parse(&opts);
Subcommand::Bench {
paths: remaining_as_path(&m),
test_args: m.opt_strs("test-args"),
}
}
"clean" => {
m = parse(&opts);
if m.free.len() > 0 {
@ -225,6 +239,7 @@ To learn more about a subcommand, run `./x.py <command> -h`
install: m.opt_present("install"),
}
}
"--help" => usage(0, &opts),
cmd => {
println!("unknown command: {}", cmd);
usage(1, &opts);
@ -259,7 +274,8 @@ To learn more about a subcommand, run `./x.py <command> -h`
impl Subcommand {
pub fn test_args(&self) -> Vec<&str> {
match *self {
Subcommand::Test { ref test_args, .. } => {
Subcommand::Test { ref test_args, .. } |
Subcommand::Bench { ref test_args, .. } => {
test_args.iter().flat_map(|s| s.split_whitespace()).collect()
}
_ => Vec::new(),

View File

@ -51,6 +51,7 @@ type LPVOID = *mut u8;
type JOBOBJECTINFOCLASS = i32;
type SIZE_T = usize;
type LARGE_INTEGER = i64;
type UINT = u32;
type ULONG_PTR = usize;
type ULONGLONG = u64;
@ -59,6 +60,8 @@ const DUPLICATE_SAME_ACCESS: DWORD = 0x2;
const PROCESS_DUP_HANDLE: DWORD = 0x40;
const JobObjectExtendedLimitInformation: JOBOBJECTINFOCLASS = 9;
const JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE: DWORD = 0x2000;
const SEM_FAILCRITICALERRORS: UINT = 0x0001;
const SEM_NOGPFAULTERRORBOX: UINT = 0x0002;
extern "system" {
fn CreateJobObjectW(lpJobAttributes: *mut u8, lpName: *const u8) -> HANDLE;
@ -79,6 +82,7 @@ extern "system" {
JobObjectInformationClass: JOBOBJECTINFOCLASS,
lpJobObjectInformation: LPVOID,
cbJobObjectInformationLength: DWORD) -> BOOL;
fn SetErrorMode(mode: UINT) -> UINT;
}
#[repr(C)]
@ -115,6 +119,13 @@ struct JOBOBJECT_BASIC_LIMIT_INFORMATION {
}
pub unsafe fn setup() {
// Tell Windows to not show any UI on errors (such as not finding a required dll
// during startup or terminating abnormally). This is important for running tests,
// since some of them use abnormal termination by design.
// This mode is inherited by all child processes.
let mode = SetErrorMode(SEM_NOGPFAULTERRORBOX); // read inherited flags
SetErrorMode(mode | SEM_FAILCRITICALERRORS | SEM_NOGPFAULTERRORBOX);
// Create a new job object for us to use
let job = CreateJobObjectW(0 as *mut _, 0 as *const _);
assert!(job != 0 as *mut _, "{}", io::Error::last_os_error());

View File

@ -13,22 +13,69 @@
//! This module, and its descendants, are the implementation of the Rust build
//! system. Most of this build system is backed by Cargo but the outer layer
//! here serves as the ability to orchestrate calling Cargo, sequencing Cargo
//! builds, building artifacts like LLVM, etc.
//! builds, building artifacts like LLVM, etc. The goals of rustbuild are:
//!
//! More documentation can be found in each respective module below.
//! * To be an easily understandable, easily extensible, and maintainable build
//! system.
//! * Leverage standard tools in the Rust ecosystem to build the compiler, aka
//! crates.io and Cargo.
//! * A standard interface to build across all platforms, including MSVC
//!
//! ## Architecture
//!
//! Although this build system defers most of the complicated logic to Cargo
//! itself, it still needs to maintain a list of targets and dependencies which
//! it can itself perform. Rustbuild is made up of a list of rules with
//! dependencies amongst them (created in the `step` module) and then knows how
//! to execute each in sequence. Each time rustbuild is invoked, it will simply
//! iterate through this list of steps and execute each serially in turn. For
//! each step rustbuild relies on the step internally being incremental and
//! parallel. Note, though, that the `-j` parameter to rustbuild gets forwarded
//! to appropriate test harnesses and such.
//!
//! Most of the "meaty" steps that matter are backed by Cargo, which does indeed
//! have its own parallelism and incremental management. Later steps, like
//! tests, aren't incremental and simply run the entire suite currently.
//!
//! When you execute `x.py build`, the steps which are executed are:
//!
//! * First, the python script is run. This will automatically download the
//! stage0 rustc and cargo according to `src/stage0.txt`, or using the cached
//! versions if they're available. These are then used to compile rustbuild
//! itself (using Cargo). Finally, control is then transferred to rustbuild.
//!
//! * Rustbuild takes over, performs sanity checks, probes the environment,
//! reads configuration, builds up a list of steps, and then starts executing
//! them.
//!
//! * The stage0 libstd is compiled
//! * The stage0 libtest is compiled
//! * The stage0 librustc is compiled
//! * The stage1 compiler is assembled
//! * The stage1 libstd, libtest, librustc are compiled
//! * The stage2 compiler is assembled
//! * The stage2 libstd, libtest, librustc are compiled
//!
//! Each step is driven by a separate Cargo project and rustbuild orchestrates
//! copying files between steps and otherwise preparing for Cargo to run.
//!
//! ## Further information
//!
//! More documentation can be found in each respective module below, and you can
//! also check out the `src/bootstrap/README.md` file for more information.
extern crate build_helper;
extern crate cmake;
extern crate filetime;
extern crate gcc;
extern crate getopts;
extern crate md5;
extern crate num_cpus;
extern crate rustc_serialize;
extern crate toml;
use std::collections::HashMap;
use std::env;
use std::ffi::OsString;
use std::fs::{self, File};
use std::path::{Component, PathBuf, Path};
use std::process::Command;
@ -120,8 +167,6 @@ pub struct Build {
version: String,
package_vers: String,
local_rebuild: bool,
bootstrap_key: String,
bootstrap_key_stage0: String,
// Probed tools at runtime
lldb_version: Option<String>,
@ -131,6 +176,7 @@ pub struct Build {
cc: HashMap<String, (gcc::Tool, Option<PathBuf>)>,
cxx: HashMap<String, gcc::Tool>,
crates: HashMap<String, Crate>,
is_sudo: bool,
}
#[derive(Debug)]
@ -141,6 +187,7 @@ struct Crate {
doc_step: String,
build_step: String,
test_step: String,
bench_step: String,
}
/// The various "modes" of invoking Cargo.
@ -189,6 +236,16 @@ impl Build {
};
let local_rebuild = config.local_rebuild;
let is_sudo = match env::var_os("SUDO_USER") {
Some(sudo_user) => {
match env::var_os("USER") {
Some(user) => user != sudo_user,
None => false,
}
}
None => false,
};
Build {
flags: flags,
config: config,
@ -204,14 +261,13 @@ impl Build {
ver_date: None,
version: String::new(),
local_rebuild: local_rebuild,
bootstrap_key: String::new(),
bootstrap_key_stage0: String::new(),
package_vers: String::new(),
cc: HashMap::new(),
cxx: HashMap::new(),
crates: HashMap::new(),
lldb_version: None,
lldb_python_dir: None,
is_sudo: is_sudo,
}
}
@ -418,7 +474,7 @@ impl Build {
// how the actual compiler itself is called.
//
// These variables are primarily all read by
// src/bootstrap/{rustc,rustdoc.rs}
// src/bootstrap/bin/{rustc.rs,rustdoc.rs}
cargo.env("RUSTC", self.out.join("bootstrap/debug/rustc"))
.env("RUSTC_REAL", self.compiler_path(compiler))
.env("RUSTC_STAGE", stage.to_string())
@ -437,7 +493,9 @@ impl Build {
.env("RUSTDOC_REAL", self.rustdoc(compiler))
.env("RUSTC_FLAGS", self.rustc_flags(target).join(" "));
self.add_bootstrap_key(&mut cargo);
// Enable usage of unstable features
cargo.env("RUSTC_BOOTSTRAP", "1");
self.add_rust_test_threads(&mut cargo);
// Specify some various options for build scripts used throughout
// the build.
@ -449,6 +507,10 @@ impl Build {
.env(format!("CFLAGS_{}", target), self.cflags(target).join(" "));
}
if self.config.channel == "nightly" && compiler.stage == 2 {
cargo.env("RUSTC_SAVE_ANALYSIS", "api".to_string());
}
// Environment variables *required* needed throughout the build
//
// FIXME: should update code to not require this env var
@ -457,10 +519,11 @@ impl Build {
if self.config.verbose || self.flags.verbose {
cargo.arg("-v");
}
if self.config.rust_optimize {
// FIXME: cargo bench does not accept `--release`
if self.config.rust_optimize && cmd != "bench" {
cargo.arg("--release");
}
if self.config.vendor {
if self.config.vendor || self.is_sudo {
cargo.arg("--frozen");
}
return cargo
@ -494,12 +557,30 @@ impl Build {
fn tool_cmd(&self, compiler: &Compiler, tool: &str) -> Command {
let mut cmd = Command::new(self.tool(&compiler, tool));
let host = compiler.host;
let paths = vec![
let mut paths = vec![
self.cargo_out(compiler, Mode::Libstd, host).join("deps"),
self.cargo_out(compiler, Mode::Libtest, host).join("deps"),
self.cargo_out(compiler, Mode::Librustc, host).join("deps"),
self.cargo_out(compiler, Mode::Tool, host).join("deps"),
];
// On MSVC a tool may invoke a C compiler (e.g. compiletest in run-make
// mode) and that C compiler may need some extra PATH modification. Do
// so here.
if compiler.host.contains("msvc") {
let curpaths = env::var_os("PATH").unwrap_or(OsString::new());
let curpaths = env::split_paths(&curpaths).collect::<Vec<_>>();
for &(ref k, ref v) in self.cc[compiler.host].0.env() {
if k != "PATH" {
continue
}
for path in env::split_paths(v) {
if !curpaths.contains(&path) {
paths.push(path);
}
}
}
}
add_lib_path(paths, &mut cmd);
return cmd
}
@ -507,7 +588,7 @@ impl Build {
/// Get the space-separated set of activated features for the standard
/// library.
fn std_features(&self) -> String {
let mut features = String::new();
let mut features = "panic-unwind".to_string();
if self.config.debug_jemalloc {
features.push_str(" debug-jemalloc");
}
@ -653,12 +734,11 @@ impl Build {
add_lib_path(vec![self.rustc_libdir(compiler)], cmd);
}
/// Adds the compiler's bootstrap key to the environment of `cmd`.
fn add_bootstrap_key(&self, cmd: &mut Command) {
cmd.env("RUSTC_BOOTSTRAP", "1");
// FIXME: Transitionary measure to bootstrap using the old bootstrap logic.
// Remove this once the bootstrap compiler uses the new login in Issue #36548.
cmd.env("RUSTC_BOOTSTRAP_KEY", "62b3e239");
/// Adds the `RUST_TEST_THREADS` env var if necessary
fn add_rust_test_threads(&self, cmd: &mut Command) {
if env::var_os("RUST_TEST_THREADS").is_none() {
cmd.env("RUST_TEST_THREADS", self.jobs().to_string());
}
}
/// Returns the compiler's libdir where it stores the dynamic libraries that

View File

@ -70,6 +70,7 @@ fn build_krate(build: &mut Build, krate: &str) {
build_step: format!("build-crate-{}", package.name),
doc_step: format!("doc-crate-{}", package.name),
test_step: format!("test-crate-{}", package.name),
bench_step: format!("bench-crate-{}", package.name),
name: package.name,
deps: Vec::new(),
path: path,

View File

@ -1,4 +1,4 @@
# Copyright 20126 The Rust Project Developers. See the COPYRIGHT
# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
@ -23,9 +23,14 @@ all:
$(Q)$(BOOTSTRAP) build $(BOOTSTRAP_ARGS)
$(Q)$(BOOTSTRAP) doc $(BOOTSTRAP_ARGS)
# Dont use $(Q) here, always show how to invoke the bootstrap script directly
help:
$(BOOTSTRAP) --help
$(Q)echo 'Welcome to the rustbuild build system!'
$(Q)echo
$(Q)echo This makefile is a thin veneer over the ./x.py script located
$(Q)echo in this directory. To get the full power of the build system
$(Q)echo you can run x.py directly.
$(Q)echo
$(Q)echo To learn more run \`./x.py --help\`
clean:
$(Q)$(BOOTSTRAP) clean $(BOOTSTRAP_ARGS)
@ -50,16 +55,17 @@ check-cargotest:
$(Q)$(BOOTSTRAP) test src/tools/cargotest $(BOOTSTRAP_ARGS)
dist:
$(Q)$(BOOTSTRAP) dist $(BOOTSTRAP_ARGS)
distcheck:
$(Q)$(BOOTSTRAP) test distcheck
install:
ifeq (root user, $(USER) $(patsubst %,user,$(SUDO_USER)))
$(Q)echo "'sudo make install' is not supported currently."
else
$(Q)$(BOOTSTRAP) dist --install $(BOOTSTRAP_ARGS)
endif
tidy:
$(Q)$(BOOTSTRAP) test src/tools/tidy $(BOOTSTRAP_ARGS) --stage 0
check-stage2-android:
$(Q)$(BOOTSTRAP) --step check-target --target arm-linux-androideabi
check-stage2-T-arm-linux-androideabi-H-x86_64-unknown-linux-gnu:
$(Q)$(BOOTSTRAP) test --target arm-linux-androideabi
check-stage2-T-x86_64-unknown-linux-musl-H-x86_64-unknown-linux-gnu:
$(Q)$(BOOTSTRAP) test --target x86_64-unknown-linux-gnu
.PHONY: dist

View File

@ -28,7 +28,7 @@ use cmake;
use gcc;
use Build;
use util::up_to_date;
use util::{self, up_to_date};
/// Compile LLVM for `target`.
pub fn llvm(build: &Build, target: &str) {
@ -58,6 +58,7 @@ pub fn llvm(build: &Build, target: &str) {
println!("Building LLVM for {}", target);
let _time = util::timeit();
let _ = fs::remove_dir_all(&dst.join("build"));
t!(fs::create_dir_all(&dst.join("build")));
let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
@ -158,6 +159,17 @@ pub fn test_helpers(build: &Build, target: &str) {
println!("Building test helpers");
t!(fs::create_dir_all(&dst));
let mut cfg = gcc::Config::new();
// We may have found various cross-compilers a little differently due to our
// extra configuration, so inform gcc of these compilers. Note, though, that
// on MSVC we still need gcc's detection of env vars (ugh).
if !target.contains("msvc") {
if let Some(ar) = build.ar(target) {
cfg.archiver(ar);
}
cfg.compiler(build.cc(target));
}
cfg.cargo_metadata(false)
.out_dir(&dst)
.target(target)

View File

@ -41,10 +41,14 @@ pub fn check(build: &mut Build) {
}
}
let have_cmd = |cmd: &OsStr| {
for path in env::split_paths(&path).map(|p| p.join(cmd)) {
if fs::metadata(&path).is_ok() ||
fs::metadata(path.with_extension("exe")).is_ok() {
return Some(path);
for path in env::split_paths(&path) {
let target = path.join(cmd);
let mut cmd_alt = cmd.to_os_string();
cmd_alt.push(".exe");
if target.exists() ||
target.with_extension("exe").exists() ||
target.join(cmd_alt).exists() {
return Some(target);
}
}
return None;

View File

@ -8,10 +8,28 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Definition of steps of the build system.
//!
//! This is where some of the real meat of rustbuild is located, in how we
//! define targets and the dependencies amongst them. This file can sort of be
//! viewed as just defining targets in a makefile which shell out to predefined
//! functions elsewhere about how to execute the target.
//!
//! The primary function here you're likely interested in is the `build_rules`
//! function. This will create a `Rules` structure which basically just lists
//! everything that rustbuild can do. Each rule has a human-readable name, a
//! path associated with it, some dependencies, and then a closure of how to
//! actually perform the rule.
//!
//! All steps below are defined in self-contained units, so adding a new target
//! to the build system should just involve adding the meta information here
//! along with the actual implementation elsewhere. You can find more comments
//! about how to define rules themselves below.
use std::collections::{HashMap, HashSet};
use std::mem;
use check;
use check::{self, TestKind};
use compile;
use dist;
use doc;
@ -20,36 +38,6 @@ use install;
use native;
use {Compiler, Build, Mode};
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
struct Step<'a> {
name: &'a str,
stage: u32,
host: &'a str,
target: &'a str,
}
impl<'a> Step<'a> {
fn name(&self, name: &'a str) -> Step<'a> {
Step { name: name, ..*self }
}
fn stage(&self, stage: u32) -> Step<'a> {
Step { stage: stage, ..*self }
}
fn host(&self, host: &'a str) -> Step<'a> {
Step { host: host, ..*self }
}
fn target(&self, target: &'a str) -> Step<'a> {
Step { target: target, ..*self }
}
fn compiler(&self) -> Compiler<'a> {
Compiler::new(self.stage, self.host)
}
}
pub fn run(build: &Build) {
let rules = build_rules(build);
let steps = rules.plan();
@ -57,14 +45,91 @@ pub fn run(build: &Build) {
}
pub fn build_rules(build: &Build) -> Rules {
let mut rules: Rules = Rules::new(build);
let mut rules = Rules::new(build);
// This is the first rule that we're going to define for rustbuild, which is
// used to compile LLVM itself. All rules are added through the `rules`
// structure created above and are configured through a builder-style
// interface.
//
// First up we see the `build` method. This represents a rule that's part of
// the top-level `build` subcommand. For example `./x.py build` is what this
// is associating with. Note that this is normally only relevant if you flag
// a rule as `default`, which we'll talk about later.
//
// Next up we'll see two arguments to this method:
//
// * `llvm` - this is the "human readable" name of this target. This name is
// not accessed anywhere outside this file itself (e.g. not in
// the CLI nor elsewhere in rustbuild). The purpose of this is to
// easily define dependencies between rules. That is, other rules
// will depend on this with the name "llvm".
// * `src/llvm` - this is the relevant path to the rule that we're working
// with. This path is the engine behind how commands like
// `./x.py build src/llvm` work. This should typically point
// to the relevant component, but if there's not really a
// path to be assigned here you can pass something like
// `path/to/nowhere` to ignore it.
//
// After we create the rule with the `build` method we can then configure
// various aspects of it. For example this LLVM rule uses `.host(true)` to
// flag that it's a rule only for host targets. In other words, LLVM isn't
// compiled for targets configured through `--target` (e.g. those we're just
// building a standard library for).
//
// Next up the `dep` method will add a dependency to this rule. The closure
// is yielded the step that represents executing the `llvm` rule itself
// (containing information like stage, host, target, ...) and then it must
// return a target that the step depends on. Here LLVM is actually
// interesting where a cross-compiled LLVM depends on the host LLVM, but
// otherwise it has no dependencies.
//
// To handle this we do a bit of dynamic dispatch to see what the dependency
// is. If we're building a LLVM for the build triple, then we don't actually
// have any dependencies! To do that we return a dependency on the "dummy"
// target which does nothing.
//
// If we're build a cross-compiled LLVM, however, we need to assemble the
// libraries from the previous compiler. This step has the same name as
// ours (llvm) but we want it for a different target, so we use the
// builder-style methods on `Step` to configure this target to the build
// triple.
//
// Finally, to finish off this rule, we define how to actually execute it.
// That logic is all defined in the `native` module so we just delegate to
// the relevant function there. The argument to the closure passed to `run`
// is a `Step` (defined below) which encapsulates information like the
// stage, target, host, etc.
rules.build("llvm", "src/llvm")
.host(true)
.dep(move |s| {
if s.target == build.config.build {
dummy(s, build)
} else {
s.target(&build.config.build)
}
})
.run(move |s| native::llvm(build, s.target));
// Ok! After that example rule that's hopefully enough to explain what's
// going on here. You can check out the API docs below and also see a bunch
// more examples of rules directly below as well.
// dummy rule to do nothing, useful when a dep maps to no deps
rules.build("dummy", "path/to/nowhere");
fn dummy<'a>(s: &Step<'a>, build: &'a Build) -> Step<'a> {
s.name("dummy").stage(0)
.target(&build.config.build)
.host(&build.config.build)
}
// the compiler with no target libraries ready to go
rules.build("rustc", "src/rustc")
.dep(move |s| {
if s.stage == 0 {
dummy(s, build)
} else {
s.name("librustc")
.host(&build.config.build)
.stage(s.stage - 1)
}
})
.run(move |s| compile::assemble_rustc(build, s.stage, s.target));
// Helper for loading an entire DAG of crates, rooted at `name`
let krates = |name: &str| {
@ -85,21 +150,6 @@ pub fn build_rules(build: &Build) -> Rules {
return ret
};
rules.build("rustc", "path/to/nowhere")
.dep(move |s| {
if s.stage == 0 {
dummy(s, build)
} else {
s.name("librustc")
.host(&build.config.build)
.stage(s.stage - 1)
}
})
.run(move |s| compile::assemble_rustc(build, s.stage, s.target));
rules.build("llvm", "src/llvm")
.host(true)
.run(move |s| native::llvm(build, s.target));
// ========================================================================
// Crate compilations
//
@ -268,37 +318,55 @@ pub fn build_rules(build: &Build) -> Rules {
rules.test(&krate.test_step, path)
.dep(|s| s.name("libtest"))
.run(move |s| check::krate(build, &s.compiler(), s.target,
Mode::Libstd, Some(&krate.name)));
Mode::Libstd, TestKind::Test,
Some(&krate.name)));
}
rules.test("check-std-all", "path/to/nowhere")
.dep(|s| s.name("libtest"))
.default(true)
.run(move |s| check::krate(build, &s.compiler(), s.target, Mode::Libstd,
None));
.run(move |s| check::krate(build, &s.compiler(), s.target,
Mode::Libstd, TestKind::Test, None));
// std benchmarks
for (krate, path, _default) in krates("std_shim") {
rules.bench(&krate.bench_step, path)
.dep(|s| s.name("libtest"))
.run(move |s| check::krate(build, &s.compiler(), s.target,
Mode::Libstd, TestKind::Bench,
Some(&krate.name)));
}
rules.bench("bench-std-all", "path/to/nowhere")
.dep(|s| s.name("libtest"))
.default(true)
.run(move |s| check::krate(build, &s.compiler(), s.target,
Mode::Libstd, TestKind::Bench, None));
for (krate, path, _default) in krates("test_shim") {
rules.test(&krate.test_step, path)
.dep(|s| s.name("libtest"))
.run(move |s| check::krate(build, &s.compiler(), s.target,
Mode::Libtest, Some(&krate.name)));
Mode::Libtest, TestKind::Test,
Some(&krate.name)));
}
rules.test("check-test-all", "path/to/nowhere")
.dep(|s| s.name("libtest"))
.default(true)
.run(move |s| check::krate(build, &s.compiler(), s.target, Mode::Libtest,
None));
.run(move |s| check::krate(build, &s.compiler(), s.target,
Mode::Libtest, TestKind::Test, None));
for (krate, path, _default) in krates("rustc-main") {
rules.test(&krate.test_step, path)
.dep(|s| s.name("librustc"))
.host(true)
.run(move |s| check::krate(build, &s.compiler(), s.target,
Mode::Librustc, Some(&krate.name)));
Mode::Librustc, TestKind::Test,
Some(&krate.name)));
}
rules.test("check-rustc-all", "path/to/nowhere")
.dep(|s| s.name("librustc"))
.default(true)
.host(true)
.run(move |s| check::krate(build, &s.compiler(), s.target, Mode::Librustc,
None));
.run(move |s| check::krate(build, &s.compiler(), s.target,
Mode::Librustc, TestKind::Test, None));
rules.test("check-linkchecker", "src/tools/linkchecker")
.dep(|s| s.name("tool-linkchecker"))
@ -312,10 +380,10 @@ pub fn build_rules(build: &Build) -> Rules {
.host(true)
.run(move |s| check::cargotest(build, s.stage, s.target));
rules.test("check-tidy", "src/tools/tidy")
.dep(|s| s.name("tool-tidy"))
.dep(|s| s.name("tool-tidy").stage(0))
.default(true)
.host(true)
.run(move |s| check::tidy(build, s.stage, s.target));
.run(move |s| check::tidy(build, 0, s.target));
rules.test("check-error-index", "src/tools/error_index_generator")
.dep(|s| s.name("libstd"))
.dep(|s| s.name("tool-error-index").host(s.host))
@ -327,6 +395,10 @@ pub fn build_rules(build: &Build) -> Rules {
.default(true)
.host(true)
.run(move |s| check::docs(build, &s.compiler()));
rules.test("check-distcheck", "distcheck")
.dep(|s| s.name("dist-src"))
.run(move |_| check::distcheck(build));
rules.build("test-helpers", "src/rt/rust_test_helpers.c")
.run(move |s| native::test_helpers(build, s.target));
@ -427,21 +499,98 @@ pub fn build_rules(build: &Build) -> Rules {
.default(true)
.dep(|s| s.name("default:doc"))
.run(move |s| dist::docs(build, s.stage, s.target));
rules.dist("dist-analysis", "src/libstd")
.dep(|s| s.name("dist-std"))
.default(true)
.run(move |s| dist::analysis(build, &s.compiler(), s.target));
rules.dist("install", "src")
.dep(|s| s.name("default:dist"))
.run(move |s| install::install(build, s.stage, s.target));
rules.verify();
return rules
return rules;
fn dummy<'a>(s: &Step<'a>, build: &'a Build) -> Step<'a> {
s.name("dummy").stage(0)
.target(&build.config.build)
.host(&build.config.build)
}
}
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
struct Step<'a> {
/// Human readable name of the rule this step is executing. Possible names
/// are all defined above in `build_rules`.
name: &'a str,
/// The stage this step is executing in. This is typically 0, 1, or 2.
stage: u32,
/// This step will likely involve a compiler, and the target that compiler
/// itself is built for is called the host, this variable. Typically this is
/// the target of the build machine itself.
host: &'a str,
/// The target that this step represents generating. If you're building a
/// standard library for a new suite of targets, for example, this'll be set
/// to those targets.
target: &'a str,
}
impl<'a> Step<'a> {
/// Creates a new step which is the same as this, except has a new name.
fn name(&self, name: &'a str) -> Step<'a> {
Step { name: name, ..*self }
}
/// Creates a new step which is the same as this, except has a new stage.
fn stage(&self, stage: u32) -> Step<'a> {
Step { stage: stage, ..*self }
}
/// Creates a new step which is the same as this, except has a new host.
fn host(&self, host: &'a str) -> Step<'a> {
Step { host: host, ..*self }
}
/// Creates a new step which is the same as this, except has a new target.
fn target(&self, target: &'a str) -> Step<'a> {
Step { target: target, ..*self }
}
/// Returns the `Compiler` structure that this step corresponds to.
fn compiler(&self) -> Compiler<'a> {
Compiler::new(self.stage, self.host)
}
}
struct Rule<'a> {
/// The human readable name of this target, defined in `build_rules`.
name: &'a str,
/// The path associated with this target, used in the `./x.py` driver for
/// easy and ergonomic specification of what to do.
path: &'a str,
/// The "kind" of top-level command that this rule is associated with, only
/// relevant if this is a default rule.
kind: Kind,
/// List of dependencies this rule has. Each dependency is a function from a
/// step that's being executed to another step that should be executed.
deps: Vec<Box<Fn(&Step<'a>) -> Step<'a> + 'a>>,
/// How to actually execute this rule. Takes a step with contextual
/// information and then executes it.
run: Box<Fn(&Step<'a>) + 'a>,
/// Whether or not this is a "default" rule. That basically means that if
/// you run, for example, `./x.py test` whether it's included or not.
default: bool,
/// Whether or not this is a "host" rule, or in other words whether this is
/// only intended for compiler hosts and not for targets that are being
/// generated.
host: bool,
}
@ -449,6 +598,7 @@ struct Rule<'a> {
enum Kind {
Build,
Test,
Bench,
Dist,
Doc,
}
@ -467,6 +617,8 @@ impl<'a> Rule<'a> {
}
}
/// Builder pattern returned from the various methods on `Rules` which will add
/// the rule to the internal list on `Drop`.
struct RuleBuilder<'a: 'b, 'b> {
rules: &'b mut Rules<'a>,
rule: Rule<'a>,
@ -528,21 +680,35 @@ impl<'a> Rules<'a> {
}
}
/// Creates a new rule of `Kind::Build` with the specified human readable
/// name and path associated with it.
///
/// The builder returned should be configured further with information such
/// as how to actually run this rule.
fn build<'b>(&'b mut self, name: &'a str, path: &'a str)
-> RuleBuilder<'a, 'b> {
self.rule(name, path, Kind::Build)
}
/// Same as `build`, but for `Kind::Test`.
fn test<'b>(&'b mut self, name: &'a str, path: &'a str)
-> RuleBuilder<'a, 'b> {
self.rule(name, path, Kind::Test)
}
/// Same as `build`, but for `Kind::Bench`.
fn bench<'b>(&'b mut self, name: &'a str, path: &'a str)
-> RuleBuilder<'a, 'b> {
self.rule(name, path, Kind::Bench)
}
/// Same as `build`, but for `Kind::Doc`.
fn doc<'b>(&'b mut self, name: &'a str, path: &'a str)
-> RuleBuilder<'a, 'b> {
self.rule(name, path, Kind::Doc)
}
/// Same as `build`, but for `Kind::Dist`.
fn dist<'b>(&'b mut self, name: &'a str, path: &'a str)
-> RuleBuilder<'a, 'b> {
self.rule(name, path, Kind::Dist)
@ -583,6 +749,7 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd?
"build" => Kind::Build,
"doc" => Kind::Doc,
"test" => Kind::Test,
"bench" => Kind::Bench,
"dist" => Kind::Dist,
_ => return,
};
@ -602,10 +769,36 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd?
/// Construct the top-level build steps that we're going to be executing,
/// given the subcommand that our build is performing.
fn plan(&self) -> Vec<Step<'a>> {
// Ok, the logic here is pretty subtle, and involves quite a few
// conditionals. The basic idea here is to:
//
// 1. First, filter all our rules to the relevant ones. This means that
// the command specified corresponds to one of our `Kind` variants,
// and we filter all rules based on that.
//
// 2. Next, we determine which rules we're actually executing. If a
// number of path filters were specified on the command line we look
// for those, otherwise we look for anything tagged `default`.
//
// 3. Finally, we generate some steps with host and target information.
//
// The last step is by far the most complicated and subtle. The basic
// thinking here is that we want to take the cartesian product of
// specified hosts and targets and build rules with that. The list of
// hosts and targets, if not specified, come from the how this build was
// configured. If the rule we're looking at is a host-only rule the we
// ignore the list of targets and instead consider the list of hosts
// also the list of targets.
//
// Once the host and target lists are generated we take the cartesian
// product of the two and then create a step based off them. Note that
// the stage each step is associated was specified with the `--step`
// flag on the command line.
let (kind, paths) = match self.build.flags.cmd {
Subcommand::Build { ref paths } => (Kind::Build, &paths[..]),
Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]),
Subcommand::Test { ref paths, test_args: _ } => (Kind::Test, &paths[..]),
Subcommand::Bench { ref paths, test_args: _ } => (Kind::Bench, &paths[..]),
Subcommand::Dist { install } => {
if install {
return vec![self.sbuild.name("install")]
@ -631,7 +824,18 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd?
} else {
&self.build.config.target
};
let arr = if rule.host {hosts} else {targets};
// If --target was specified but --host wasn't specified, don't run
// any host-only tests
let arr = if rule.host {
if self.build.flags.target.len() > 0 &&
self.build.flags.host.len() == 0 {
&hosts[..0]
} else {
hosts
}
} else {
targets
};
hosts.iter().flat_map(move |host| {
arr.iter().map(move |target| {
@ -672,6 +876,15 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd?
}
}
/// Performs topological sort of dependencies rooted at the `step`
/// specified, pushing all results onto the `order` vector provided.
///
/// In other words, when this method returns, the `order` vector will
/// contain a list of steps which if executed in order will eventually
/// complete the `step` specified as well.
///
/// The `added` set specified here is the set of steps that are already
/// present in `order` (and hence don't need to be added again).
fn fill(&self,
step: Step<'a>,
order: &mut Vec<Step<'a>>,

View File

@ -18,6 +18,7 @@ use std::ffi::OsString;
use std::fs;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::time::Instant;
use filetime::FileTime;
@ -189,3 +190,19 @@ pub fn push_exe_path(mut buf: PathBuf, components: &[&str]) -> PathBuf {
buf
}
pub struct TimeIt(Instant);
/// Returns an RAII structure that prints out how long it took to drop.
pub fn timeit() -> TimeIt {
TimeIt(Instant::now())
}
impl Drop for TimeIt {
fn drop(&mut self) {
let time = self.0.elapsed();
println!("\tfinished in {}.{:03}",
time.as_secs(),
time.subsec_nanos() / 1_000_000);
}
}

View File

@ -21,7 +21,8 @@ pub fn run(cmd: &mut Command) {
pub fn run_silent(cmd: &mut Command) {
let status = match cmd.status() {
Ok(status) => status,
Err(e) => fail(&format!("failed to execute command: {}", e)),
Err(e) => fail(&format!("failed to execute command: {:?}\nerror: {}",
cmd, e)),
};
if !status.success() {
fail(&format!("command did not execute successfully: {:?}\n\
@ -63,7 +64,8 @@ pub fn cc2ar(cc: &Path, target: &str) -> Option<PathBuf> {
pub fn output(cmd: &mut Command) -> String {
let output = match cmd.stderr(Stdio::inherit()).output() {
Ok(status) => status,
Err(e) => fail(&format!("failed to execute command: {}", e)),
Err(e) => fail(&format!("failed to execute command: {:?}\nerror: {}",
cmd, e)),
};
if !output.status.success() {
panic!("command did not execute successfully: {:?}\n\

View File

@ -9,7 +9,6 @@ RUN dpkg --add-architecture i386 && \
curl \
ca-certificates \
python2.7 \
python-minimal \
git \
cmake \
ccache \
@ -39,8 +38,7 @@ ENV RUST_CONFIGURE_ARGS \
--arm-linux-androideabi-ndk=/android/ndk-arm-9 \
--armv7-linux-androideabi-ndk=/android/ndk-arm-9 \
--i686-linux-android-ndk=/android/ndk-x86-9 \
--aarch64-linux-android-ndk=/android/ndk-aarch64 \
--enable-rustbuild
ENV RUST_CHECK_TARGET check-stage2-android
--aarch64-linux-android-ndk=/android/ndk-aarch64
ENV XPY_CHECK test --target arm-linux-androideabi
RUN mkdir /tmp/obj
RUN chmod 777 /tmp/obj

View File

@ -7,7 +7,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
curl \
ca-certificates \
python2.7 \
python-minimal \
git \
cmake \
ccache \

View File

@ -19,17 +19,21 @@ ci_dir="`dirname $docker_dir`"
src_dir="`dirname $ci_dir`"
root_dir="`dirname $src_dir`"
docker build \
docker \
build \
--rm \
-t rust-ci \
"`dirname "$script"`/$image"
mkdir -p $HOME/.ccache
mkdir -p $HOME/.cargo
mkdir -p $root_dir/obj
exec docker run \
exec docker \
run \
--volume "$root_dir:/checkout:ro" \
--workdir /tmp/obj \
--volume "$root_dir/obj:/checkout/obj" \
--workdir /checkout/obj \
--env SRC=/checkout \
--env CCACHE_DIR=/ccache \
--volume "$HOME/.ccache:/ccache" \

View File

@ -7,7 +7,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
curl \
ca-certificates \
python2.7 \
python-minimal \
git \
cmake \
ccache \
@ -23,7 +22,7 @@ ENV \
AR_x86_64_unknown_freebsd=x86_64-unknown-freebsd10-ar \
CC_x86_64_unknown_freebsd=x86_64-unknown-freebsd10-gcc
ENV RUST_CONFIGURE_ARGS --target=x86_64-unknown-freebsd --enable-rustbuild
ENV RUST_CONFIGURE_ARGS --target=x86_64-unknown-freebsd
ENV RUST_CHECK_TARGET ""
RUN mkdir /tmp/obj
RUN chmod 777 /tmp/obj

View File

@ -7,14 +7,14 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
curl \
ca-certificates \
python2.7 \
python-minimal \
git \
cmake \
ccache \
libssl-dev \
sudo
ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --enable-rustbuild
ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu
ENV RUST_CHECK_TARGET check-cargotest
ENV NO_VENDOR 1
RUN mkdir /tmp/obj
RUN chmod 777 /tmp/obj

View File

@ -7,7 +7,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
curl \
ca-certificates \
python2.7 \
python2.7-minimal \
git \
cmake \
ccache \
@ -19,7 +18,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
ENV RUST_CONFIGURE_ARGS \
--build=x86_64-unknown-linux-gnu \
--enable-rustbuild \
--llvm-root=/usr/lib/llvm-3.7
ENV RUST_CHECK_TARGET check
RUN mkdir /tmp/obj

View File

@ -7,14 +7,13 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
curl \
ca-certificates \
python2.7 \
python-minimal \
git \
cmake \
ccache \
sudo \
gdb
ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --enable-rustbuild
ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --disable-rustbuild
ENV RUST_CHECK_TARGET check
RUN mkdir /tmp/obj
RUN chmod 777 /tmp/obj

View File

@ -20,8 +20,10 @@ RUN sh /build/build-musl.sh && rm -rf /build
ENV RUST_CONFIGURE_ARGS \
--target=x86_64-unknown-linux-musl \
--musl-root=/musl-x86_64
--musl-root-x86_64=/musl-x86_64
ENV RUST_CHECK_TARGET check-stage2-T-x86_64-unknown-linux-musl-H-x86_64-unknown-linux-gnu
ENV PATH=$PATH:/musl-x86_64/bin
ENV XPY_CHECK test --target x86_64-unknown-linux-musl
RUN mkdir /tmp/obj
RUN chmod 777 /tmp/obj

View File

@ -14,12 +14,20 @@ set -e
if [ "$LOCAL_USER_ID" != "" ]; then
useradd --shell /bin/bash -u $LOCAL_USER_ID -o -c "" -m user
export HOME=/home/user
export LOCAL_USER_ID=
exec sudo -E -u user env PATH=$PATH "$0"
unset LOCAL_USER_ID
exec su --preserve-environment -c "env PATH=$PATH \"$0\"" user
fi
if [ "$NO_LLVM_ASSERTIONS" = "" ]; then
LLVM_ASSERTIONS=--enable-llvm-assertions
ENABLE_LLVM_ASSERTIONS=--enable-llvm-assertions
fi
if [ "$NO_VENDOR" = "" ]; then
ENABLE_VENDOR=--enable-vendor
fi
if [ "$NO_CCACHE" = "" ]; then
ENABLE_CCACHE=--enable-ccache
fi
set -ex
@ -28,9 +36,9 @@ $SRC/configure \
--disable-manage-submodules \
--enable-debug-assertions \
--enable-quiet-tests \
--enable-ccache \
--enable-vendor \
$LLVM_ASSERTIONS \
$ENABLE_CCACHE \
$ENABLE_VENDOR \
$ENABLE_LLVM_ASSERTIONS \
$RUST_CONFIGURE_ARGS
if [ "$TRAVIS_OS_NAME" = "osx" ]; then
@ -41,4 +49,8 @@ fi
make -j $ncpus tidy
make -j $ncpus
exec make $RUST_CHECK_TARGET -j $ncpus
if [ ! -z "$XPY_CHECK" ]; then
exec python2.7 $SRC/x.py $XPY_CHECK
else
exec make $RUST_CHECK_TARGET -j $ncpus
fi

@ -1 +1 @@
Subproject commit 3bc0272cab9fdcfc2ef4df9625ec3c9d5909db79
Subproject commit a8fc4c169fac43a5dc204d4fd56ddb1739f8c178

View File

@ -662,26 +662,31 @@ attribute turns off Rust's name mangling, so that it is easier to link to.
Its important to be mindful of `panic!`s when working with FFI. A `panic!`
across an FFI boundary is undefined behavior. If youre writing code that may
panic, you should run it in another thread, so that the panic doesnt bubble up
to C:
panic, you should run it in a closure with [`catch_unwind()`]:
```rust
use std::thread;
use std::panic::catch_unwind;
#[no_mangle]
pub extern fn oh_no() -> i32 {
let h = thread::spawn(|| {
let result = catch_unwind(|| {
panic!("Oops!");
});
match h.join() {
Ok(_) => 1,
Err(_) => 0,
match result {
Ok(_) => 0,
Err(_) => 1,
}
}
# fn main() {}
fn main() {}
```
Please note that [`catch_unwind()`] will only catch unwinding panics, not
those who abort the process. See the documentation of [`catch_unwind()`]
for more information.
[`catch_unwind()`]: https://doc.rust-lang.org/std/panic/fn.catch_unwind.html
# Representing opaque structs
Sometimes, a C library wants to provide a pointer to something, but not let you

View File

@ -589,11 +589,11 @@ please see the [Documentation chapter](documentation.html).
# Testing and concurrency
One thing that is important to note when writing tests are run concurrently
using threads. For this reason you should take care that your tests are written
in such a way as to not depend on each-other, or on any shared state. "Shared
state" can also include the environment, such as the current working directory,
or environment variables.
One thing that is important to note when writing tests is that they may be run
concurrently using threads. For this reason you should take care that your tests
are written in such a way as to not depend on each-other, or on any shared
state. "Shared state" can also include the environment, such as the current
working directory, or environment variables.
If this is an issue it is possible to control this concurrency, either by
setting the environment variable `RUST_TEST_THREADS`, or by passing the argument

View File

@ -47,7 +47,7 @@ let x: i32 = 5;
```
If I asked you to read this out loud to the rest of the class, youd say “`x`
is a binding with the type `i32` and the value `five`.”
is a binding with the type `i32` and the value `5`.”
In this case we chose to represent `x` as a 32-bit signed integer. Rust has
many different primitive integer types. They begin with `i` for signed integers

View File

@ -17,7 +17,7 @@ the language.
[**The Rust Reference**][ref]. While Rust does not have a
specification, the reference tries to describe its working in
detail. It tends to be out of date.
detail. It is accurate, but not necessarily complete.
[**Standard Library API Reference**][api]. Documentation for the
standard library.

View File

@ -603,7 +603,8 @@ syntax named by _designator_. Valid designators are:
* `ty`: a [type](#types)
* `ident`: an [identifier](#identifiers)
* `path`: a [path](#paths)
* `tt`: either side of the `=>` in macro rules
* `tt`: a token tree (a single [token](#tokens) or a sequence of token trees surrounded
by matching `()`, `[]`, or `{}`)
* `meta`: the contents of an [attribute](#attributes)
In the transcriber, the
@ -740,13 +741,14 @@ There are several kinds of item:
* [`extern crate` declarations](#extern-crate-declarations)
* [`use` declarations](#use-declarations)
* [modules](#modules)
* [functions](#functions)
* [function definitions](#functions)
* [`extern` blocks](#external-blocks)
* [type definitions](grammar.html#type-definitions)
* [structs](#structs)
* [enumerations](#enumerations)
* [struct definitions](#structs)
* [enumeration definitions](#enumerations)
* [constant items](#constant-items)
* [static items](#static-items)
* [traits](#traits)
* [trait definitions](#traits)
* [implementations](#implementations)
Some items form an implicit scope for the declaration of sub-items. In other
@ -2462,11 +2464,6 @@ The currently implemented features of the reference compiler are:
* `unboxed_closures` - Rust's new closure design, which is currently a work in
progress feature with many known bugs.
* `unmarked_api` - Allows use of items within a `#![staged_api]` crate
which have not been marked with a stability marker.
Such items should not be allowed by the compiler to exist,
so if you need this there probably is a compiler bug.
* `allow_internal_unstable` - Allows `macro_rules!` macros to be tagged with the
`#[allow_internal_unstable]` attribute, designed
to allow `std` macros to call

View File

@ -37,8 +37,6 @@ TEMPLATE = """// Copyright {year} The Rust Project Developers. See the COPYRIGHT
// This file was auto-generated using 'src/etc/generate-deriving-span-tests.py'
extern crate rand;
{error_deriving}
struct Error;
{code}
@ -106,7 +104,6 @@ STRUCT = 2
ALL = STRUCT | ENUM
traits = {
'Zero': (STRUCT, [], 1),
'Default': (STRUCT, [], 1),
'FromPrimitive': (0, [], 0), # only works for C-like enums
@ -116,7 +113,7 @@ traits = {
for (trait, supers, errs) in [('Clone', [], 1),
('PartialEq', [], 2),
('PartialOrd', ['PartialEq'], 8),
('PartialOrd', ['PartialEq'], 9),
('Eq', ['PartialEq'], 1),
('Ord', ['Eq', 'PartialOrd', 'PartialEq'], 1),
('Debug', [], 1),

View File

@ -23,7 +23,6 @@ use std::fs::File;
use std::io::{BufRead, Read};
use std::path::Path;
use syntax::parse;
use syntax::parse::lexer;
use rustc::dep_graph::DepGraph;
use rustc::session::{self, config};
@ -31,15 +30,16 @@ use rustc::middle::cstore::DummyCrateStore;
use std::rc::Rc;
use syntax::ast;
use syntax::ast::Name;
use syntax::codemap;
use syntax::parse::token::{self, BinOpToken, DelimToken, Lit, Token};
use syntax::parse::lexer::TokenAndSpan;
use syntax_pos::Pos;
use syntax::symbol::{Symbol, keywords};
fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
fn id() -> token::Token {
Token::Ident(ast::Ident::with_empty_ctxt(Name(0)))
Token::Ident(ast::Ident::with_empty_ctxt(keywords::Invalid.name()))
}
let mut res = HashMap::new();
@ -65,7 +65,7 @@ fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
"SHL" => Token::BinOp(BinOpToken::Shl),
"LBRACE" => Token::OpenDelim(DelimToken::Brace),
"RARROW" => Token::RArrow,
"LIT_STR" => Token::Literal(Lit::Str_(Name(0)), None),
"LIT_STR" => Token::Literal(Lit::Str_(keywords::Invalid.name()), None),
"DOTDOT" => Token::DotDot,
"MOD_SEP" => Token::ModSep,
"DOTDOTDOT" => Token::DotDotDot,
@ -75,21 +75,22 @@ fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
"ANDAND" => Token::AndAnd,
"AT" => Token::At,
"LBRACKET" => Token::OpenDelim(DelimToken::Bracket),
"LIT_STR_RAW" => Token::Literal(Lit::StrRaw(Name(0), 0), None),
"LIT_STR_RAW" => Token::Literal(Lit::StrRaw(keywords::Invalid.name(), 0), None),
"RPAREN" => Token::CloseDelim(DelimToken::Paren),
"SLASH" => Token::BinOp(BinOpToken::Slash),
"COMMA" => Token::Comma,
"LIFETIME" => Token::Lifetime(ast::Ident::with_empty_ctxt(Name(0))),
"LIFETIME" => Token::Lifetime(
ast::Ident::with_empty_ctxt(keywords::Invalid.name())),
"CARET" => Token::BinOp(BinOpToken::Caret),
"TILDE" => Token::Tilde,
"IDENT" => id(),
"PLUS" => Token::BinOp(BinOpToken::Plus),
"LIT_CHAR" => Token::Literal(Lit::Char(Name(0)), None),
"LIT_BYTE" => Token::Literal(Lit::Byte(Name(0)), None),
"LIT_CHAR" => Token::Literal(Lit::Char(keywords::Invalid.name()), None),
"LIT_BYTE" => Token::Literal(Lit::Byte(keywords::Invalid.name()), None),
"EQ" => Token::Eq,
"RBRACKET" => Token::CloseDelim(DelimToken::Bracket),
"COMMENT" => Token::Comment,
"DOC_COMMENT" => Token::DocComment(Name(0)),
"DOC_COMMENT" => Token::DocComment(keywords::Invalid.name()),
"DOT" => Token::Dot,
"EQEQ" => Token::EqEq,
"NE" => Token::Ne,
@ -99,9 +100,9 @@ fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
"BINOP" => Token::BinOp(BinOpToken::Plus),
"POUND" => Token::Pound,
"OROR" => Token::OrOr,
"LIT_INTEGER" => Token::Literal(Lit::Integer(Name(0)), None),
"LIT_INTEGER" => Token::Literal(Lit::Integer(keywords::Invalid.name()), None),
"BINOPEQ" => Token::BinOpEq(BinOpToken::Plus),
"LIT_FLOAT" => Token::Literal(Lit::Float(Name(0)), None),
"LIT_FLOAT" => Token::Literal(Lit::Float(keywords::Invalid.name()), None),
"WHITESPACE" => Token::Whitespace,
"UNDERSCORE" => Token::Underscore,
"MINUS" => Token::BinOp(BinOpToken::Minus),
@ -111,10 +112,11 @@ fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
"OR" => Token::BinOp(BinOpToken::Or),
"GT" => Token::Gt,
"LE" => Token::Le,
"LIT_BINARY" => Token::Literal(Lit::ByteStr(Name(0)), None),
"LIT_BINARY_RAW" => Token::Literal(Lit::ByteStrRaw(Name(0), 0), None),
"LIT_BINARY" => Token::Literal(Lit::ByteStr(keywords::Invalid.name()), None),
"LIT_BINARY_RAW" => Token::Literal(
Lit::ByteStrRaw(keywords::Invalid.name(), 0), None),
"QUESTION" => Token::Question,
"SHEBANG" => Token::Shebang(Name(0)),
"SHEBANG" => Token::Shebang(keywords::Invalid.name()),
_ => continue,
};
@ -158,7 +160,7 @@ fn fix(mut lit: &str) -> ast::Name {
let leading_hashes = count(lit);
// +1/-1 to adjust for single quotes
parse::token::intern(&lit[leading_hashes + 1..lit.len() - leading_hashes - 1])
Symbol::intern(&lit[leading_hashes + 1..lit.len() - leading_hashes - 1])
}
/// Assuming a char/byte literal, strip the 'b' prefix and the single quotes.
@ -168,7 +170,7 @@ fn fixchar(mut lit: &str) -> ast::Name {
lit = &lit[1..];
}
parse::token::intern(&lit[1..lit.len() - 1])
Symbol::intern(&lit[1..lit.len() - 1])
}
fn count(lit: &str) -> usize {
@ -196,7 +198,7 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, token::Token>, surrogate_
let not_found = format!("didn't find token {:?} in the map", toknum);
let proto_tok = tokens.get(toknum).expect(&not_found[..]);
let nm = parse::token::intern(content);
let nm = Symbol::intern(content);
debug!("What we got: content (`{}`), proto: {:?}", content, proto_tok);

View File

@ -524,6 +524,9 @@ impl<I: Iterator + ?Sized> Iterator for Box<I> {
fn size_hint(&self) -> (usize, Option<usize>) {
(**self).size_hint()
}
fn nth(&mut self, n: usize) -> Option<I::Item> {
(**self).nth(n)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for Box<I> {
@ -532,7 +535,14 @@ impl<I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for Box<I> {
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: ExactSizeIterator + ?Sized> ExactSizeIterator for Box<I> {}
impl<I: ExactSizeIterator + ?Sized> ExactSizeIterator for Box<I> {
fn len(&self) -> usize {
(**self).len()
}
fn is_empty(&self) -> bool {
(**self).is_empty()
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<I: FusedIterator + ?Sized> FusedIterator for Box<I> {}

View File

@ -74,11 +74,13 @@
#![feature(allocator)]
#![feature(box_syntax)]
#![feature(cfg_target_has_atomic)]
#![feature(coerce_unsized)]
#![feature(const_fn)]
#![feature(core_intrinsics)]
#![feature(custom_attribute)]
#![feature(dropck_parametricity)]
#![cfg_attr(not(test), feature(exact_size_is_empty))]
#![feature(fundamental)]
#![feature(lang_items)]
#![feature(needs_allocator)]
@ -121,6 +123,7 @@ mod boxed {
}
#[cfg(test)]
mod boxed_test;
#[cfg(target_has_atomic = "ptr")]
pub mod arc;
pub mod rc;
pub mod raw_vec;

View File

@ -8,12 +8,10 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::sync::atomic::{AtomicPtr, Ordering};
use core::mem;
#[cfg(target_has_atomic = "ptr")]
pub use self::imp::set_oom_handler;
use core::intrinsics;
static OOM_HANDLER: AtomicPtr<()> = AtomicPtr::new(default_oom_handler as *mut ());
fn default_oom_handler() -> ! {
// The default handler can't do much more since we can't assume the presence
// of libc or any way of printing an error message.
@ -26,17 +24,38 @@ fn default_oom_handler() -> ! {
#[unstable(feature = "oom", reason = "not a scrutinized interface",
issue = "27700")]
pub fn oom() -> ! {
let value = OOM_HANDLER.load(Ordering::SeqCst);
let handler: fn() -> ! = unsafe { mem::transmute(value) };
handler();
self::imp::oom()
}
/// Set a custom handler for out-of-memory conditions
///
/// To avoid recursive OOM failures, it is critical that the OOM handler does
/// not allocate any memory itself.
#[unstable(feature = "oom", reason = "not a scrutinized interface",
issue = "27700")]
pub fn set_oom_handler(handler: fn() -> !) {
OOM_HANDLER.store(handler as *mut (), Ordering::SeqCst);
#[cfg(target_has_atomic = "ptr")]
mod imp {
use core::mem;
use core::sync::atomic::{AtomicPtr, Ordering};
static OOM_HANDLER: AtomicPtr<()> = AtomicPtr::new(super::default_oom_handler as *mut ());
#[inline(always)]
pub fn oom() -> ! {
let value = OOM_HANDLER.load(Ordering::SeqCst);
let handler: fn() -> ! = unsafe { mem::transmute(value) };
handler();
}
/// Set a custom handler for out-of-memory conditions
///
/// To avoid recursive OOM failures, it is critical that the OOM handler does
/// not allocate any memory itself.
#[unstable(feature = "oom", reason = "not a scrutinized interface",
issue = "27700")]
pub fn set_oom_handler(handler: fn() -> !) {
OOM_HANDLER.store(handler as *mut (), Ordering::SeqCst);
}
}
#[cfg(not(target_has_atomic = "ptr"))]
mod imp {
#[inline(always)]
pub fn oom() -> ! {
super::default_oom_handler()
}
}

View File

@ -12,35 +12,35 @@
//! Single-threaded reference-counting pointers.
//!
//! The type [`Rc<T>`][rc] provides shared ownership of a value of type `T`,
//! allocated in the heap. Invoking [`clone`][clone] on `Rc` produces a new
//! pointer to the same value in the heap. When the last `Rc` pointer to a
//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
//! allocated in the heap. Invoking [`clone()`][clone] on [`Rc`] produces a new
//! pointer to the same value in the heap. When the last [`Rc`] pointer to a
//! given value is destroyed, the pointed-to value is also destroyed.
//!
//! Shared references in Rust disallow mutation by default, and `Rc` is no
//! exception. If you need to mutate through an `Rc`, use [`Cell`][cell] or
//! [`RefCell`][refcell].
//! exception. If you need to mutate through an [`Rc`], use [`Cell`] or
//! [`RefCell`].
//!
//! `Rc` uses non-atomic reference counting. This means that overhead is very
//! low, but an `Rc` cannot be sent between threads, and consequently `Rc`
//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
//! does not implement [`Send`][send]. As a result, the Rust compiler
//! will check *at compile time* that you are not sending `Rc`s between
//! will check *at compile time* that you are not sending [`Rc`]s between
//! threads. If you need multi-threaded, atomic reference counting, use
//! [`sync::Arc`][arc].
//!
//! The [`downgrade`][downgrade] method can be used to create a non-owning
//! [`Weak`][weak] pointer. A `Weak` pointer can be [`upgrade`][upgrade]d
//! to an `Rc`, but this will return [`None`][option] if the value has
//! The [`downgrade()`][downgrade] method can be used to create a non-owning
//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
//! to an [`Rc`], but this will return [`None`] if the value has
//! already been dropped.
//!
//! A cycle between `Rc` pointers will never be deallocated. For this reason,
//! `Weak` is used to break cycles. For example, a tree could have strong
//! `Rc` pointers from parent nodes to children, and `Weak` pointers from
//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
//! [`Weak`] is used to break cycles. For example, a tree could have strong
//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
//! children back to their parents.
//!
//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`][deref] trait),
//! so you can call `T`'s methods on a value of type `Rc<T>`. To avoid name
//! clashes with `T`'s methods, the methods of `Rc<T>` itself are [associated
//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are [associated
//! functions][assoc], called using function-like syntax:
//!
//! ```
@ -50,28 +50,15 @@
//! Rc::downgrade(&my_rc);
//! ```
//!
//! `Weak<T>` does not auto-dereference to `T`, because the value may have
//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the value may have
//! already been destroyed.
//!
//! [rc]: struct.Rc.html
//! [weak]: struct.Weak.html
//! [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
//! [cell]: ../../std/cell/struct.Cell.html
//! [refcell]: ../../std/cell/struct.RefCell.html
//! [send]: ../../std/marker/trait.Send.html
//! [arc]: ../../std/sync/struct.Arc.html
//! [deref]: ../../std/ops/trait.Deref.html
//! [downgrade]: struct.Rc.html#method.downgrade
//! [upgrade]: struct.Weak.html#method.upgrade
//! [option]: ../../std/option/enum.Option.html
//! [assoc]: ../../book/method-syntax.html#associated-functions
//!
//! # Examples
//!
//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
//! unique ownership, because more than one gadget may belong to the same
//! `Owner`. `Rc` allows us to share an `Owner` between multiple `Gadget`s,
//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
//!
//! ```
@ -127,20 +114,20 @@
//! ```
//!
//! If our requirements change, and we also need to be able to traverse from
//! `Owner` to `Gadget`, we will run into problems. An `Rc` pointer from `Owner`
//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
//! to `Gadget` introduces a cycle between the values. This means that their
//! reference counts can never reach 0, and the values will remain allocated
//! forever: a memory leak. In order to get around this, we can use `Weak`
//! forever: a memory leak. In order to get around this, we can use [`Weak`]
//! pointers.
//!
//! Rust actually makes it somewhat difficult to produce this loop in the first
//! place. In order to end up with two values that point at each other, one of
//! them needs to be mutable. This is difficult because `Rc` enforces
//! them needs to be mutable. This is difficult because [`Rc`] enforces
//! memory safety by only giving out shared references to the value it wraps,
//! and these don't allow direct mutation. We need to wrap the part of the
//! value we wish to mutate in a [`RefCell`][refcell], which provides *interior
//! value we wish to mutate in a [`RefCell`], which provides *interior
//! mutability*: a method to achieve mutability through a shared reference.
//! `RefCell` enforces Rust's borrowing rules at runtime.
//! [`RefCell`] enforces Rust's borrowing rules at runtime.
//!
//! ```
//! use std::rc::Rc;
@ -214,6 +201,19 @@
//! // Gadget Man, so he gets destroyed as well.
//! }
//! ```
//!
//! [`Rc`]: struct.Rc.html
//! [`Weak`]: struct.Weak.html
//! [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
//! [`Cell`]: ../../std/cell/struct.Cell.html
//! [`RefCell`]: ../../std/cell/struct.RefCell.html
//! [send]: ../../std/marker/trait.Send.html
//! [arc]: ../../std/sync/struct.Arc.html
//! [`Deref`]: ../../std/ops/trait.Deref.html
//! [downgrade]: struct.Rc.html#method.downgrade
//! [upgrade]: struct.Weak.html#method.upgrade
//! [`None`]: ../../std/option/enum.Option.html#variant.None
//! [assoc]: ../../book/method-syntax.html#associated-functions
#![stable(feature = "rust1", since = "1.0.0")]
@ -251,9 +251,11 @@ struct RcBox<T: ?Sized> {
/// See the [module-level documentation](./index.html) for more details.
///
/// The inherent methods of `Rc` are all associated functions, which means
/// that you have to call them as e.g. `Rc::get_mut(&value)` instead of
/// `value.get_mut()`. This avoids conflicts with methods of the inner
/// that you have to call them as e.g. [`Rc::get_mut(&value)`][get_mut] instead of
/// `value.get_mut()`. This avoids conflicts with methods of the inner
/// type `T`.
///
/// [get_mut]: #method.get_mut
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Rc<T: ?Sized> {
ptr: Shared<RcBox<T>>,
@ -337,10 +339,10 @@ impl<T> Rc<T> {
}
/// Checks whether [`Rc::try_unwrap`][try_unwrap] would return
/// [`Ok`][result].
/// [`Ok`].
///
/// [try_unwrap]: struct.Rc.html#method.try_unwrap
/// [result]: ../../std/result/enum.Result.html
/// [`Ok`]: ../../std/result/enum.Result.html#variant.Ok
///
/// # Examples
///
@ -543,14 +545,14 @@ impl<T: ?Sized> Rc<T> {
/// Returns a mutable reference to the inner value, if there are
/// no other `Rc` or [`Weak`][weak] pointers to the same value.
///
/// Returns [`None`][option] otherwise, because it is not safe to
/// Returns [`None`] otherwise, because it is not safe to
/// mutate a shared value.
///
/// See also [`make_mut`][make_mut], which will [`clone`][clone]
/// the inner value when it's shared.
///
/// [weak]: struct.Weak.html
/// [option]: ../../std/option/enum.Option.html
/// [`None`]: ../../std/option/enum.Option.html#variant.None
/// [make_mut]: struct.Rc.html#method.make_mut
/// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
///

View File

@ -69,6 +69,7 @@ fn main() {
.read_dir()
.unwrap()
.map(|e| e.unwrap())
.filter(|e| &*e.file_name() != ".git")
.collect::<Vec<_>>();
while let Some(entry) = stack.pop() {
let path = entry.path();
@ -150,11 +151,17 @@ fn main() {
cmd.arg(format!("--build={}", build_helper::gnu_target(&host)));
run(&mut cmd);
run(Command::new("make")
.current_dir(&build_dir)
.arg("build_lib_static")
.arg("-j")
.arg(env::var("NUM_JOBS").expect("NUM_JOBS was not set")));
let mut make = Command::new("make");
make.current_dir(&build_dir)
.arg("build_lib_static");
// mingw make seems... buggy? unclear...
if !host.contains("windows") {
make.arg("-j")
.arg(env::var("NUM_JOBS").expect("NUM_JOBS was not set"));
}
run(&mut make);
if target.contains("windows") {
println!("cargo:rustc-link-lib=static=jemalloc");

View File

@ -10,8 +10,12 @@ path = "lib.rs"
[dependencies]
alloc = { path = "../liballoc" }
core = { path = "../libcore" }
rustc_unicode = { path = "../librustc_unicode" }
std_unicode = { path = "../libstd_unicode" }
[[test]]
name = "collectionstest"
path = "../libcollectionstest/lib.rs"
[[bench]]
name = "collectionstest"
path = "../libcollectionstest/lib.rs"

View File

@ -986,7 +986,11 @@ impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
impl<'a, T> ExactSizeIterator for Iter<'a, T> {
fn is_empty(&self) -> bool {
self.iter.is_empty()
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<'a, T> FusedIterator for Iter<'a, T> {}
@ -1022,7 +1026,11 @@ impl<T> DoubleEndedIterator for IntoIter<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for IntoIter<T> {}
impl<T> ExactSizeIterator for IntoIter<T> {
fn is_empty(&self) -> bool {
self.iter.is_empty()
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<T> FusedIterator for IntoIter<T> {}
@ -1057,7 +1065,11 @@ impl<'a, T: 'a> DoubleEndedIterator for Drain<'a, T> {
}
#[stable(feature = "drain", since = "1.6.0")]
impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {}
impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {
fn is_empty(&self) -> bool {
self.iter.is_empty()
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<'a, T: 'a> FusedIterator for Drain<'a, T> {}

View File

@ -16,7 +16,7 @@
#![unstable(feature = "enumset",
reason = "matches collection reform specification, \
waiting for dust to settle",
issue = "0")]
issue = "37966")]
use core::marker;
use core::fmt;

View File

@ -36,6 +36,7 @@
#![cfg_attr(not(test), feature(char_escape_debug))]
#![feature(core_intrinsics)]
#![feature(dropck_parametricity)]
#![feature(exact_size_is_empty)]
#![feature(fmt_internals)]
#![feature(fused)]
#![feature(heap_api)]
@ -46,18 +47,19 @@
#![feature(placement_in)]
#![feature(placement_new_protocol)]
#![feature(shared)]
#![feature(slice_get_slice)]
#![feature(slice_patterns)]
#![feature(specialization)]
#![feature(staged_api)]
#![feature(step_by)]
#![feature(trusted_len)]
#![feature(unicode)]
#![feature(unique)]
#![feature(untagged_unions)]
#![cfg_attr(test, feature(rand, test))]
#![no_std]
extern crate rustc_unicode;
extern crate std_unicode;
extern crate alloc;
#[cfg(test)]

View File

@ -98,8 +98,7 @@
#![cfg_attr(test, allow(unused_imports, dead_code))]
use alloc::boxed::Box;
use core::cmp::Ordering::{self, Greater, Less};
use core::cmp;
use core::cmp::Ordering::{self, Greater};
use core::mem::size_of;
use core::mem;
use core::ptr;
@ -118,6 +117,8 @@ pub use core::slice::{SplitMut, ChunksMut, Split};
pub use core::slice::{SplitN, RSplitN, SplitNMut, RSplitNMut};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::slice::{from_raw_parts, from_raw_parts_mut};
#[unstable(feature = "slice_get_slice", issue = "35729")]
pub use core::slice::SliceIndex;
////////////////////////////////////////////////////////////////////////////////
// Basic slice extension methods
@ -353,7 +354,9 @@ impl<T> [T] {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn get(&self, index: usize) -> Option<&T> {
pub fn get<I>(&self, index: I) -> Option<&I::Output>
where I: SliceIndex<T>
{
core_slice::SliceExt::get(self, index)
}
@ -372,7 +375,9 @@ impl<T> [T] {
/// or `None` if the index is out of bounds
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
pub fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
where I: SliceIndex<T>
{
core_slice::SliceExt::get_mut(self, index)
}
@ -390,7 +395,9 @@ impl<T> [T] {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub unsafe fn get_unchecked(&self, index: usize) -> &T {
pub unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
where I: SliceIndex<T>
{
core_slice::SliceExt::get_unchecked(self, index)
}
@ -410,7 +417,9 @@ impl<T> [T] {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut T {
pub unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
where I: SliceIndex<T>
{
core_slice::SliceExt::get_unchecked_mut(self, index)
}
@ -1032,8 +1041,8 @@ impl<T> [T] {
/// This is equivalent to `self.sort_by(|a, b| a.cmp(b))`.
///
/// This sort is stable and `O(n log n)` worst-case but allocates
/// approximately `2 * n` where `n` is the length of `self`.
/// This sort is stable and `O(n log n)` worst-case, but allocates
/// temporary storage half the size of `self`.
///
/// # Examples
///
@ -1054,8 +1063,8 @@ impl<T> [T] {
/// Sorts the slice, in place, using `f` to extract a key by which to
/// order the sort by.
///
/// This sort is stable and `O(n log n)` worst-case but allocates
/// approximately `2 * n`, where `n` is the length of `self`.
/// This sort is stable and `O(n log n)` worst-case, but allocates
/// temporary storage half the size of `self`.
///
/// # Examples
///
@ -1076,8 +1085,8 @@ impl<T> [T] {
/// Sorts the slice, in place, using `compare` to compare
/// elements.
///
/// This sort is stable and `O(n log n)` worst-case but allocates
/// approximately `2 * n`, where `n` is the length of `self`.
/// This sort is stable and `O(n log n)` worst-case, but allocates
/// temporary storage half the size of `self`.
///
/// # Examples
///
@ -1295,213 +1304,333 @@ impl<T: Clone> ToOwned for [T] {
// Sorting
////////////////////////////////////////////////////////////////////////////////
fn insertion_sort<T, F>(v: &mut [T], mut compare: F)
/// Inserts `v[0]` into pre-sorted sequence `v[1..]` so that whole `v[..]` becomes sorted.
///
/// This is the integral subroutine of insertion sort.
fn insert_head<T, F>(v: &mut [T], compare: &mut F)
where F: FnMut(&T, &T) -> Ordering
{
let len = v.len() as isize;
let buf_v = v.as_mut_ptr();
// 1 <= i < len;
for i in 1..len {
// j satisfies: 0 <= j <= i;
let mut j = i;
if v.len() >= 2 && compare(&v[0], &v[1]) == Greater {
unsafe {
// `i` is in bounds.
let read_ptr = buf_v.offset(i) as *const T;
// There are three ways to implement insertion here:
//
// 1. Swap adjacent elements until the first one gets to its final destination.
// However, this way we copy data around more than is necessary. If elements are big
// structures (costly to copy), this method will be slow.
//
// 2. Iterate until the right place for the first element is found. Then shift the
// elements succeeding it to make room for it and finally place it into the
// remaining hole. This is a good method.
//
// 3. Copy the first element into a temporary variable. Iterate until the right place
// for it is found. As we go along, copy every traversed element into the slot
// preceding it. Finally, copy data from the temporary variable into the remaining
// hole. This method is very good. Benchmarks demonstrated slightly better
// performance than with the 2nd method.
//
// All methods were benchmarked, and the 3rd showed best results. So we chose that one.
let mut tmp = NoDrop { value: ptr::read(&v[0]) };
// find where to insert, we need to do strict <,
// rather than <=, to maintain stability.
// Intermediate state of the insertion process is always tracked by `hole`, which
// serves two purposes:
// 1. Protects integrity of `v` from panics in `compare`.
// 2. Fills the remaining hole in `v` in the end.
//
// Panic safety:
//
// If `compare` panics at any point during the process, `hole` will get dropped and
// fill the hole in `v` with `tmp`, thus ensuring that `v` still holds every object it
// initially held exactly once.
let mut hole = InsertionHole {
src: &mut tmp.value,
dest: &mut v[1],
};
ptr::copy_nonoverlapping(&v[1], &mut v[0], 1);
// 0 <= j - 1 < len, so .offset(j - 1) is in bounds.
while j > 0 && compare(&*read_ptr, &*buf_v.offset(j - 1)) == Less {
j -= 1;
for i in 2..v.len() {
if compare(&tmp.value, &v[i]) != Greater {
break;
}
ptr::copy_nonoverlapping(&v[i], &mut v[i - 1], 1);
hole.dest = &mut v[i];
}
// `hole` gets dropped and thus copies `tmp` into the remaining hole in `v`.
}
}
// shift everything to the right, to make space to
// insert this value.
// Holds a value, but never drops it.
#[allow(unions_with_drop_fields)]
union NoDrop<T> {
value: T
}
// j + 1 could be `len` (for the last `i`), but in
// that case, `i == j` so we don't copy. The
// `.offset(j)` is always in bounds.
// When dropped, copies from `src` into `dest`.
struct InsertionHole<T> {
src: *mut T,
dest: *mut T,
}
if i != j {
let tmp = ptr::read(read_ptr);
ptr::copy(&*buf_v.offset(j), buf_v.offset(j + 1), (i - j) as usize);
ptr::copy_nonoverlapping(&tmp, buf_v.offset(j), 1);
mem::forget(tmp);
}
impl<T> Drop for InsertionHole<T> {
fn drop(&mut self) {
unsafe { ptr::copy_nonoverlapping(self.src, self.dest, 1); }
}
}
}
fn merge_sort<T, F>(v: &mut [T], mut compare: F)
/// Merges non-decreasing runs `v[..mid]` and `v[mid..]` using `buf` as temporary storage, and
/// stores the result into `v[..]`.
///
/// # Safety
///
/// The two slices must be non-empty and `mid` must be in bounds. Buffer `buf` must be long enough
/// to hold a copy of the shorter slice. Also, `T` must not be a zero-sized type.
unsafe fn merge<T, F>(v: &mut [T], mid: usize, buf: *mut T, compare: &mut F)
where F: FnMut(&T, &T) -> Ordering
{
// warning: this wildly uses unsafe.
const BASE_INSERTION: usize = 32;
const LARGE_INSERTION: usize = 16;
// FIXME #12092: smaller insertion runs seems to make sorting
// vectors of large elements a little faster on some platforms,
// but hasn't been tested/tuned extensively
let insertion = if size_of::<T>() <= 16 {
BASE_INSERTION
} else {
LARGE_INSERTION
};
let len = v.len();
let v = v.as_mut_ptr();
let v_mid = v.offset(mid as isize);
let v_end = v.offset(len as isize);
// short vectors get sorted in-place via insertion sort to avoid allocations
if len <= insertion {
insertion_sort(v, compare);
return;
}
// The merge process first copies the shorter run into `buf`. Then it traces the newly copied
// run and the longer run forwards (or backwards), comparing their next unconsumed elements and
// copying the lesser (or greater) one into `v`.
//
// As soon as the shorter run is fully consumed, the process is done. If the longer run gets
// consumed first, then we must copy whatever is left of the shorter run into the remaining
// hole in `v`.
//
// Intermediate state of the process is always tracked by `hole`, which serves two purposes:
// 1. Protects integrity of `v` from panics in `compare`.
// 2. Fills the remaining hole in `v` if the longer run gets consumed first.
//
// Panic safety:
//
// If `compare` panics at any point during the process, `hole` will get dropped and fill the
// hole in `v` with the unconsumed range in `buf`, thus ensuring that `v` still holds every
// object it initially held exactly once.
let mut hole;
// allocate some memory to use as scratch memory, we keep the
// length 0 so we can keep shallow copies of the contents of `v`
// without risking the dtors running on an object twice if
// `compare` panics.
let mut working_space = Vec::with_capacity(2 * len);
// these both are buffers of length `len`.
let mut buf_dat = working_space.as_mut_ptr();
let mut buf_tmp = unsafe { buf_dat.offset(len as isize) };
if mid <= len - mid {
// The left run is shorter.
ptr::copy_nonoverlapping(v, buf, mid);
hole = MergeHole {
start: buf,
end: buf.offset(mid as isize),
dest: v,
};
// length `len`.
let buf_v = v.as_ptr();
// Initially, these pointers point to the beginnings of their arrays.
let left = &mut hole.start;
let mut right = v_mid;
let out = &mut hole.dest;
// step 1. sort short runs with insertion sort. This takes the
// values from `v` and sorts them into `buf_dat`, leaving that
// with sorted runs of length INSERTION.
while *left < hole.end && right < v_end {
// Consume the lesser side.
// If equal, prefer the left run to maintain stability.
let to_copy = if compare(&**left, &*right) == Greater {
get_and_increment(&mut right)
} else {
get_and_increment(left)
};
ptr::copy_nonoverlapping(to_copy, get_and_increment(out), 1);
}
} else {
// The right run is shorter.
ptr::copy_nonoverlapping(v_mid, buf, len - mid);
hole = MergeHole {
start: buf,
end: buf.offset((len - mid) as isize),
dest: v_mid,
};
// We could hardcode the sorting comparisons here, and we could
// manipulate/step the pointers themselves, rather than repeatedly
// .offset-ing.
for start in (0..len).step_by(insertion) {
// start <= i < len;
for i in start..cmp::min(start + insertion, len) {
// j satisfies: start <= j <= i;
let mut j = i as isize;
unsafe {
// `i` is in bounds.
let read_ptr = buf_v.offset(i as isize);
// Initially, these pointers point past the ends of their arrays.
let left = &mut hole.dest;
let right = &mut hole.end;
let mut out = v_end;
// find where to insert, we need to do strict <,
// rather than <=, to maintain stability.
// start <= j - 1 < len, so .offset(j - 1) is in
// bounds.
while j > start as isize && compare(&*read_ptr, &*buf_dat.offset(j - 1)) == Less {
j -= 1;
}
// shift everything to the right, to make space to
// insert this value.
// j + 1 could be `len` (for the last `i`), but in
// that case, `i == j` so we don't copy. The
// `.offset(j)` is always in bounds.
ptr::copy(&*buf_dat.offset(j), buf_dat.offset(j + 1), i - j as usize);
ptr::copy_nonoverlapping(read_ptr, buf_dat.offset(j), 1);
}
while v < *left && buf < *right {
// Consume the greater side.
// If equal, prefer the right run to maintain stability.
let to_copy = if compare(&*left.offset(-1), &*right.offset(-1)) == Greater {
decrement_and_get(left)
} else {
decrement_and_get(right)
};
ptr::copy_nonoverlapping(to_copy, decrement_and_get(&mut out), 1);
}
}
// Finally, `hole` gets dropped. If the shorter run was not fully consumed, whatever remains of
// it will now be copied into the hole in `v`.
// step 2. merge the sorted runs.
let mut width = insertion;
while width < len {
// merge the sorted runs of length `width` in `buf_dat` two at
// a time, placing the result in `buf_tmp`.
// 0 <= start <= len.
for start in (0..len).step_by(2 * width) {
// manipulate pointers directly for speed (rather than
// using a `for` loop with `range` and `.offset` inside
// that loop).
unsafe {
// the end of the first run & start of the
// second. Offset of `len` is defined, since this is
// precisely one byte past the end of the object.
let right_start = buf_dat.offset(cmp::min(start + width, len) as isize);
// end of the second. Similar reasoning to the above re safety.
let right_end_idx = cmp::min(start + 2 * width, len);
let right_end = buf_dat.offset(right_end_idx as isize);
// the pointers to the elements under consideration
// from the two runs.
// both of these are in bounds.
let mut left = buf_dat.offset(start as isize);
let mut right = right_start;
// where we're putting the results, it is a run of
// length `2*width`, so we step it once for each step
// of either `left` or `right`. `buf_tmp` has length
// `len`, so these are in bounds.
let mut out = buf_tmp.offset(start as isize);
let out_end = buf_tmp.offset(right_end_idx as isize);
// If left[last] <= right[0], they are already in order:
// fast-forward the left side (the right side is handled
// in the loop).
// If `right` is not empty then left is not empty, and
// the offsets are in bounds.
if right != right_end && compare(&*right.offset(-1), &*right) != Greater {
let elems = (right_start as usize - left as usize) / mem::size_of::<T>();
ptr::copy_nonoverlapping(&*left, out, elems);
out = out.offset(elems as isize);
left = right_start;
}
while out < out_end {
// Either the left or the right run are exhausted,
// so just copy the remainder from the other run
// and move on; this gives a huge speed-up (order
// of 25%) for mostly sorted vectors (the best
// case).
if left == right_start {
// the number remaining in this run.
let elems = (right_end as usize - right as usize) / mem::size_of::<T>();
ptr::copy_nonoverlapping(&*right, out, elems);
break;
} else if right == right_end {
let elems = (right_start as usize - left as usize) / mem::size_of::<T>();
ptr::copy_nonoverlapping(&*left, out, elems);
break;
}
// check which side is smaller, and that's the
// next element for the new run.
// `left < right_start` and `right < right_end`,
// so these are valid.
let to_copy = if compare(&*left, &*right) == Greater {
step(&mut right)
} else {
step(&mut left)
};
ptr::copy_nonoverlapping(&*to_copy, out, 1);
step(&mut out);
}
}
}
mem::swap(&mut buf_dat, &mut buf_tmp);
width *= 2;
}
// write the result to `v` in one go, so that there are never two copies
// of the same object in `v`.
unsafe {
ptr::copy_nonoverlapping(&*buf_dat, v.as_mut_ptr(), len);
}
// increment the pointer, returning the old pointer.
#[inline(always)]
unsafe fn step<T>(ptr: &mut *mut T) -> *mut T {
unsafe fn get_and_increment<T>(ptr: &mut *mut T) -> *mut T {
let old = *ptr;
*ptr = ptr.offset(1);
old
}
unsafe fn decrement_and_get<T>(ptr: &mut *mut T) -> *mut T {
*ptr = ptr.offset(-1);
*ptr
}
// When dropped, copies the range `start..end` into `dest..`.
struct MergeHole<T> {
start: *mut T,
end: *mut T,
dest: *mut T,
}
impl<T> Drop for MergeHole<T> {
fn drop(&mut self) {
// `T` is not a zero-sized type, so it's okay to divide by it's size.
let len = (self.end as usize - self.start as usize) / mem::size_of::<T>();
unsafe { ptr::copy_nonoverlapping(self.start, self.dest, len); }
}
}
}
/// This merge sort borrows some (but not all) ideas from TimSort, which is described in detail
/// [here](http://svn.python.org/projects/python/trunk/Objects/listsort.txt).
///
/// The algorithm identifies strictly descending and non-descending subsequences, which are called
/// natural runs. There is a stack of pending runs yet to be merged. Each newly found run is pushed
/// onto the stack, and then some pairs of adjacent runs are merged until these two invariants are
/// satisfied, for every `i` in `0 .. runs.len() - 2`:
///
/// 1. `runs[i].len > runs[i + 1].len`
/// 2. `runs[i].len > runs[i + 1].len + runs[i + 2].len`
///
/// The invariants ensure that the total running time is `O(n log n)` worst-case.
fn merge_sort<T, F>(v: &mut [T], mut compare: F)
where F: FnMut(&T, &T) -> Ordering
{
// Sorting has no meaningful behavior on zero-sized types.
if size_of::<T>() == 0 {
return;
}
// FIXME #12092: These numbers are platform-specific and need more extensive testing/tuning.
//
// If `v` has length up to `insertion_len`, simply switch to insertion sort because it is going
// to perform better than merge sort. For bigger types `T`, the threshold is smaller.
//
// Short runs are extended using insertion sort to span at least `min_run` elements, in order
// to improve performance.
let (max_insertion, min_run) = if size_of::<T>() <= 16 {
(64, 32)
} else {
(32, 16)
};
let len = v.len();
// Short arrays get sorted in-place via insertion sort to avoid allocations.
if len <= max_insertion {
if len >= 2 {
for i in (0..len-1).rev() {
insert_head(&mut v[i..], &mut compare);
}
}
return;
}
// Allocate a buffer to use as scratch memory. We keep the length 0 so we can keep in it
// shallow copies of the contents of `v` without risking the dtors running on copies if
// `compare` panics. When merging two sorted runs, this buffer holds a copy of the shorter run,
// which will always have length at most `len / 2`.
let mut buf = Vec::with_capacity(len / 2);
// In order to identify natural runs in `v`, we traverse it backwards. That might seem like a
// strange decision, but consider the fact that merges more often go in the opposite direction
// (forwards). According to benchmarks, merging forwards is slightly faster than merging
// backwards. To conclude, identifying runs by traversing backwards improves performance.
let mut runs = vec![];
let mut end = len;
while end > 0 {
// Find the next natural run, and reverse it if it's strictly descending.
let mut start = end - 1;
if start > 0 {
start -= 1;
if compare(&v[start], &v[start + 1]) == Greater {
while start > 0 && compare(&v[start - 1], &v[start]) == Greater {
start -= 1;
}
v[start..end].reverse();
} else {
while start > 0 && compare(&v[start - 1], &v[start]) != Greater {
start -= 1;
}
}
}
// Insert some more elements into the run if it's too short. Insertion sort is faster than
// merge sort on short sequences, so this significantly improves performance.
while start > 0 && end - start < min_run {
start -= 1;
insert_head(&mut v[start..end], &mut compare);
}
// Push this run onto the stack.
runs.push(Run {
start: start,
len: end - start,
});
end = start;
// Merge some pairs of adjacent runs to satisfy the invariants.
while let Some(r) = collapse(&runs) {
let left = runs[r + 1];
let right = runs[r];
unsafe {
merge(&mut v[left.start .. right.start + right.len], left.len, buf.as_mut_ptr(),
&mut compare);
}
runs[r] = Run {
start: left.start,
len: left.len + right.len,
};
runs.remove(r + 1);
}
}
// Finally, exactly one run must remain in the stack.
debug_assert!(runs.len() == 1 && runs[0].start == 0 && runs[0].len == len);
// Examines the stack of runs and identifies the next pair of runs to merge. More specifically,
// if `Some(r)` is returned, that means `runs[r]` and `runs[r + 1]` must be merged next. If the
// algorithm should continue building a new run instead, `None` is returned.
//
// TimSort is infamous for it's buggy implementations, as described here:
// http://envisage-project.eu/timsort-specification-and-verification/
//
// The gist of the story is: we must enforce the invariants on the top four runs on the stack.
// Enforcing them on just top three is not sufficient to ensure that the invariants will still
// hold for *all* runs in the stack.
//
// This function correctly checks invariants for the top four runs. Additionally, if the top
// run starts at index 0, it will always demand a merge operation until the stack is fully
// collapsed, in order to complete the sort.
#[inline]
fn collapse(runs: &[Run]) -> Option<usize> {
let n = runs.len();
if n >= 2 && (runs[n - 1].start == 0 ||
runs[n - 2].len <= runs[n - 1].len ||
(n >= 3 && runs[n - 3].len <= runs[n - 2].len + runs[n - 1].len) ||
(n >= 4 && runs[n - 4].len <= runs[n - 3].len + runs[n - 2].len)) {
if n >= 3 && runs[n - 3].len < runs[n - 1].len {
Some(n - 3)
} else {
Some(n - 2)
}
} else {
None
}
}
#[derive(Clone, Copy)]
struct Run {
start: usize,
len: usize,
}
}

View File

@ -24,12 +24,12 @@ use core::str::pattern::Pattern;
use core::str::pattern::{Searcher, ReverseSearcher, DoubleEndedSearcher};
use core::mem;
use core::iter::FusedIterator;
use rustc_unicode::str::{UnicodeStr, Utf16Encoder};
use std_unicode::str::{UnicodeStr, Utf16Encoder};
use vec_deque::VecDeque;
use borrow::{Borrow, ToOwned};
use string::String;
use rustc_unicode;
use std_unicode;
use vec::Vec;
use slice::SliceConcatExt;
use boxed::Box;
@ -54,7 +54,7 @@ pub use core::str::{from_utf8, Chars, CharIndices, Bytes};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::str::{from_utf8_unchecked, ParseBoolError};
#[stable(feature = "rust1", since = "1.0.0")]
pub use rustc_unicode::str::SplitWhitespace;
pub use std_unicode::str::SplitWhitespace;
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::str::pattern;
@ -1705,7 +1705,7 @@ impl str {
}
fn case_ignoreable_then_cased<I: Iterator<Item = char>>(iter: I) -> bool {
use rustc_unicode::derived_property::{Cased, Case_Ignorable};
use std_unicode::derived_property::{Cased, Case_Ignorable};
match iter.skip_while(|&c| Case_Ignorable(c)).next() {
Some(c) => Cased(c),
None => false,

View File

@ -63,8 +63,8 @@ use core::mem;
use core::ops::{self, Add, AddAssign, Index, IndexMut};
use core::ptr;
use core::str::pattern::Pattern;
use rustc_unicode::char::{decode_utf16, REPLACEMENT_CHARACTER};
use rustc_unicode::str as unicode_str;
use std_unicode::char::{decode_utf16, REPLACEMENT_CHARACTER};
use std_unicode::str as unicode_str;
use borrow::{Cow, ToOwned};
use range::RangeArgument;
@ -1129,8 +1129,6 @@ impl String {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(&mut self, idx: usize, ch: char) {
let len = self.len();
assert!(idx <= len);
assert!(self.is_char_boundary(idx));
let mut bits = [0; 4];
let bits = ch.encode_utf8(&mut bits).as_bytes();
@ -1184,7 +1182,6 @@ impl String {
reason = "recent addition",
issue = "35553")]
pub fn insert_str(&mut self, idx: usize, string: &str) {
assert!(idx <= self.len());
assert!(self.is_char_boundary(idx));
unsafe {
@ -1260,6 +1257,38 @@ impl String {
self.len() == 0
}
/// Divide one string into two at an index.
///
/// The argument, `mid`, should be a byte offset from the start of the string. It must also
/// be on the boundary of a UTF-8 code point.
///
/// The two strings returned go from the start of the string to `mid`, and from `mid` to the end
/// of the string.
///
/// # Panics
///
/// Panics if `mid` is not on a `UTF-8` code point boundary, or if it is beyond the last
/// code point of the string.
///
/// # Examples
///
/// ```
/// # #![feature(string_split_off)]
/// # fn main() {
/// let mut hello = String::from("Hello, World!");
/// let world = hello.split_off(7);
/// assert_eq!(hello, "Hello, ");
/// assert_eq!(world, "World!");
/// # }
/// ```
#[inline]
#[unstable(feature = "string_split_off", issue = "38080")]
pub fn split_off(&mut self, mid: usize) -> String {
assert!(self.is_char_boundary(mid));
let other = self.vec.split_off(mid);
unsafe { String::from_utf8_unchecked(other) }
}
/// Truncates this `String`, removing all contents.
///
/// While this means the `String` will have a length of zero, it does not

View File

@ -1244,7 +1244,7 @@ impl<T: Clone> Vec<T> {
/// ```
#[stable(feature = "vec_extend_from_slice", since = "1.6.0")]
pub fn extend_from_slice(&mut self, other: &[T]) {
self.extend(other.iter().cloned())
self.spec_extend(other.iter())
}
}
@ -1499,7 +1499,7 @@ impl<T> ops::DerefMut for Vec<T> {
impl<T> FromIterator<T> for Vec<T> {
#[inline]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Vec<T> {
<Self as SpecExtend<_>>::from_iter(iter.into_iter())
<Self as SpecExtend<_, _>>::from_iter(iter.into_iter())
}
}
@ -1572,12 +1572,12 @@ impl<T> Extend<T> for Vec<T> {
}
// Specialization trait used for Vec::from_iter and Vec::extend
trait SpecExtend<I> {
trait SpecExtend<T, I> {
fn from_iter(iter: I) -> Self;
fn spec_extend(&mut self, iter: I);
}
impl<I, T> SpecExtend<I> for Vec<T>
impl<T, I> SpecExtend<T, I> for Vec<T>
where I: Iterator<Item=T>,
{
default fn from_iter(mut iterator: I) -> Self {
@ -1607,7 +1607,7 @@ impl<I, T> SpecExtend<I> for Vec<T>
}
}
impl<I, T> SpecExtend<I> for Vec<T>
impl<T, I> SpecExtend<T, I> for Vec<T>
where I: TrustedLen<Item=T>,
{
fn from_iter(iterator: I) -> Self {
@ -1642,6 +1642,33 @@ impl<I, T> SpecExtend<I> for Vec<T>
}
}
impl<'a, T: 'a, I> SpecExtend<&'a T, I> for Vec<T>
where I: Iterator<Item=&'a T>,
T: Clone,
{
default fn from_iter(iterator: I) -> Self {
SpecExtend::from_iter(iterator.cloned())
}
default fn spec_extend(&mut self, iterator: I) {
self.spec_extend(iterator.cloned())
}
}
impl<'a, T: 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T>
where T: Copy,
{
fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) {
let slice = iterator.as_slice();
self.reserve(slice.len());
unsafe {
let len = self.len();
self.set_len(len + slice.len());
self.get_unchecked_mut(len..).copy_from_slice(slice);
}
}
}
impl<T> Vec<T> {
fn extend_desugared<I: Iterator<Item = T>>(&mut self, mut iterator: I) {
// This is the case for a general iterator.
@ -1669,7 +1696,7 @@ impl<T> Vec<T> {
#[stable(feature = "extend_ref", since = "1.2.0")]
impl<'a, T: 'a + Copy> Extend<&'a T> for Vec<T> {
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
self.extend(iter.into_iter().map(|&x| x))
self.spec_extend(iter.into_iter())
}
}
@ -1988,7 +2015,11 @@ impl<T> DoubleEndedIterator for IntoIter<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for IntoIter<T> {}
impl<T> ExactSizeIterator for IntoIter<T> {
fn is_empty(&self) -> bool {
self.ptr == self.end
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<T> FusedIterator for IntoIter<T> {}
@ -2082,7 +2113,11 @@ impl<'a, T> Drop for Drain<'a, T> {
#[stable(feature = "drain", since = "1.6.0")]
impl<'a, T> ExactSizeIterator for Drain<'a, T> {}
impl<'a, T> ExactSizeIterator for Drain<'a, T> {
fn is_empty(&self) -> bool {
self.iter.is_empty()
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<'a, T> FusedIterator for Drain<'a, T> {}

View File

@ -810,7 +810,7 @@ impl<T> VecDeque<T> {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_empty(&self) -> bool {
self.len() == 0
self.tail == self.head
}
/// Create a draining iterator that removes the specified range in the
@ -1916,7 +1916,11 @@ impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
impl<'a, T> ExactSizeIterator for Iter<'a, T> {
fn is_empty(&self) -> bool {
self.head == self.tail
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<'a, T> FusedIterator for Iter<'a, T> {}
@ -1980,7 +1984,11 @@ impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for IterMut<'a, T> {}
impl<'a, T> ExactSizeIterator for IterMut<'a, T> {
fn is_empty(&self) -> bool {
self.head == self.tail
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<'a, T> FusedIterator for IterMut<'a, T> {}
@ -2017,7 +2025,11 @@ impl<T> DoubleEndedIterator for IntoIter<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for IntoIter<T> {}
impl<T> ExactSizeIterator for IntoIter<T> {
fn is_empty(&self) -> bool {
self.inner.is_empty()
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<T> FusedIterator for IntoIter<T> {}

View File

@ -18,12 +18,14 @@
#![feature(const_fn)]
#![feature(dedup_by)]
#![feature(enumset)]
#![feature(exact_size_is_empty)]
#![feature(pattern)]
#![feature(rand)]
#![feature(repeat_str)]
#![feature(step_by)]
#![feature(str_escape)]
#![feature(str_replacen)]
#![feature(string_split_off)]
#![feature(test)]
#![feature(unboxed_closures)]
#![feature(unicode)]
@ -31,7 +33,7 @@
extern crate collections;
extern crate test;
extern crate rustc_unicode;
extern crate std_unicode;
use std::hash::{Hash, Hasher};
use std::collections::hash_map::DefaultHasher;

View File

@ -383,7 +383,7 @@ fn test_reverse() {
#[test]
fn test_sort() {
for len in 4..25 {
for len in (2..25).chain(500..510) {
for _ in 0..100 {
let mut v: Vec<_> = thread_rng().gen_iter::<i32>().take(len).collect();
let mut v1 = v.clone();
@ -410,7 +410,7 @@ fn test_sort() {
#[test]
fn test_sort_stability() {
for len in 4..25 {
for len in (2..25).chain(500..510) {
for _ in 0..10 {
let mut counts = [0; 10];
@ -441,6 +441,13 @@ fn test_sort_stability() {
}
}
#[test]
fn test_sort_zero_sized_type() {
// Should not panic.
[(); 10].sort();
[(); 100].sort();
}
#[test]
fn test_concat() {
let v: [Vec<i32>; 0] = [];
@ -633,6 +640,16 @@ fn test_iter_clone() {
assert_eq!(it.next(), jt.next());
}
#[test]
fn test_iter_is_empty() {
let xs = [1, 2, 5, 10, 11];
for i in 0..xs.len() {
for j in i..xs.len() {
assert_eq!(xs[i..j].iter().is_empty(), xs[i..j].is_empty());
}
}
}
#[test]
fn test_mut_iterator() {
let mut xs = [1, 2, 3, 4, 5];
@ -1328,89 +1345,104 @@ mod bench {
})
}
#[bench]
fn sort_random_small(b: &mut Bencher) {
fn gen_ascending(len: usize) -> Vec<u64> {
(0..len as u64).collect()
}
fn gen_descending(len: usize) -> Vec<u64> {
(0..len as u64).rev().collect()
}
fn gen_random(len: usize) -> Vec<u64> {
let mut rng = thread_rng();
b.iter(|| {
let mut v: Vec<_> = rng.gen_iter::<u64>().take(5).collect();
v.sort();
});
b.bytes = 5 * mem::size_of::<u64>() as u64;
rng.gen_iter::<u64>().take(len).collect()
}
#[bench]
fn sort_random_medium(b: &mut Bencher) {
fn gen_mostly_ascending(len: usize) -> Vec<u64> {
let mut rng = thread_rng();
b.iter(|| {
let mut v: Vec<_> = rng.gen_iter::<u64>().take(100).collect();
v.sort();
});
b.bytes = 100 * mem::size_of::<u64>() as u64;
let mut v = gen_ascending(len);
for _ in (0usize..).take_while(|x| x * x <= len) {
let x = rng.gen::<usize>() % len;
let y = rng.gen::<usize>() % len;
v.swap(x, y);
}
v
}
#[bench]
fn sort_random_large(b: &mut Bencher) {
fn gen_mostly_descending(len: usize) -> Vec<u64> {
let mut rng = thread_rng();
b.iter(|| {
let mut v: Vec<_> = rng.gen_iter::<u64>().take(10000).collect();
v.sort();
});
b.bytes = 10000 * mem::size_of::<u64>() as u64;
let mut v = gen_descending(len);
for _ in (0usize..).take_while(|x| x * x <= len) {
let x = rng.gen::<usize>() % len;
let y = rng.gen::<usize>() % len;
v.swap(x, y);
}
v
}
#[bench]
fn sort_sorted(b: &mut Bencher) {
let mut v: Vec<_> = (0..10000).collect();
b.iter(|| {
v.sort();
});
b.bytes = (v.len() * mem::size_of_val(&v[0])) as u64;
}
type BigSortable = (u64, u64, u64, u64);
#[bench]
fn sort_big_random_small(b: &mut Bencher) {
fn gen_big_random(len: usize) -> Vec<[u64; 16]> {
let mut rng = thread_rng();
b.iter(|| {
let mut v = rng.gen_iter::<BigSortable>()
.take(5)
.collect::<Vec<BigSortable>>();
v.sort();
});
b.bytes = 5 * mem::size_of::<BigSortable>() as u64;
rng.gen_iter().map(|x| [x; 16]).take(len).collect()
}
#[bench]
fn sort_big_random_medium(b: &mut Bencher) {
let mut rng = thread_rng();
b.iter(|| {
let mut v = rng.gen_iter::<BigSortable>()
.take(100)
.collect::<Vec<BigSortable>>();
v.sort();
});
b.bytes = 100 * mem::size_of::<BigSortable>() as u64;
fn gen_big_ascending(len: usize) -> Vec<[u64; 16]> {
(0..len as u64).map(|x| [x; 16]).take(len).collect()
}
#[bench]
fn sort_big_random_large(b: &mut Bencher) {
let mut rng = thread_rng();
b.iter(|| {
let mut v = rng.gen_iter::<BigSortable>()
.take(10000)
.collect::<Vec<BigSortable>>();
v.sort();
});
b.bytes = 10000 * mem::size_of::<BigSortable>() as u64;
fn gen_big_descending(len: usize) -> Vec<[u64; 16]> {
(0..len as u64).rev().map(|x| [x; 16]).take(len).collect()
}
macro_rules! sort_bench {
($name:ident, $gen:expr, $len:expr) => {
#[bench]
fn $name(b: &mut Bencher) {
b.iter(|| $gen($len).sort());
b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64;
}
}
}
sort_bench!(sort_small_random, gen_random, 10);
sort_bench!(sort_small_ascending, gen_ascending, 10);
sort_bench!(sort_small_descending, gen_descending, 10);
sort_bench!(sort_small_big_random, gen_big_random, 10);
sort_bench!(sort_small_big_ascending, gen_big_ascending, 10);
sort_bench!(sort_small_big_descending, gen_big_descending, 10);
sort_bench!(sort_medium_random, gen_random, 100);
sort_bench!(sort_medium_ascending, gen_ascending, 100);
sort_bench!(sort_medium_descending, gen_descending, 100);
sort_bench!(sort_large_random, gen_random, 10000);
sort_bench!(sort_large_ascending, gen_ascending, 10000);
sort_bench!(sort_large_descending, gen_descending, 10000);
sort_bench!(sort_large_mostly_ascending, gen_mostly_ascending, 10000);
sort_bench!(sort_large_mostly_descending, gen_mostly_descending, 10000);
sort_bench!(sort_large_big_random, gen_big_random, 10000);
sort_bench!(sort_large_big_ascending, gen_big_ascending, 10000);
sort_bench!(sort_large_big_descending, gen_big_descending, 10000);
#[bench]
fn sort_big_sorted(b: &mut Bencher) {
let mut v: Vec<BigSortable> = (0..10000).map(|i| (i, i, i, i)).collect();
fn sort_large_random_expensive(b: &mut Bencher) {
let len = 10000;
b.iter(|| {
v.sort();
let mut count = 0;
let cmp = move |a: &u64, b: &u64| {
count += 1;
if count % 1_000_000_000 == 0 {
panic!("should not happen");
}
(*a as f64).cos().partial_cmp(&(*b as f64).cos()).unwrap()
};
let mut v = gen_random(len);
v.sort_by(cmp);
black_box(count);
});
b.bytes = (v.len() * mem::size_of_val(&v[0])) as u64;
b.bytes = len as u64 * mem::size_of::<u64>() as u64;
}
}

View File

@ -530,7 +530,7 @@ fn from_utf8_mostly_ascii() {
#[test]
fn test_is_utf16() {
use rustc_unicode::str::is_utf16;
use std_unicode::str::is_utf16;
macro_rules! pos {
($($e:expr),*) => { { $(assert!(is_utf16($e));)* } }
@ -1186,7 +1186,7 @@ fn test_rev_split_char_iterator_no_trailing() {
#[test]
fn test_utf16_code_units() {
use rustc_unicode::str::Utf16Encoder;
use std_unicode::str::Utf16Encoder;
assert_eq!(Utf16Encoder::new(vec!['é', '\u{1F4A9}'].into_iter()).collect::<Vec<u16>>(),
[0xE9, 0xD83D, 0xDCA9])
}

View File

@ -132,7 +132,7 @@ fn test_from_utf16() {
let s_as_utf16 = s.encode_utf16().collect::<Vec<u16>>();
let u_as_string = String::from_utf16(&u).unwrap();
assert!(::rustc_unicode::str::is_utf16(&u));
assert!(::std_unicode::str::is_utf16(&u));
assert_eq!(s_as_utf16, u);
assert_eq!(u_as_string, s);
@ -231,6 +231,45 @@ fn test_pop() {
assert_eq!(data, "ประเทศไทย中");
}
#[test]
fn test_split_off_empty() {
let orig = "Hello, world!";
let mut split = String::from(orig);
let empty: String = split.split_off(orig.len());
assert!(empty.is_empty());
}
#[test]
#[should_panic]
fn test_split_off_past_end() {
let orig = "Hello, world!";
let mut split = String::from(orig);
split.split_off(orig.len() + 1);
}
#[test]
#[should_panic]
fn test_split_off_mid_char() {
let mut orig = String::from("");
orig.split_off(1);
}
#[test]
fn test_split_off_ascii() {
let mut ab = String::from("ABCD");
let cd = ab.split_off(2);
assert_eq!(ab, "AB");
assert_eq!(cd, "CD");
}
#[test]
fn test_split_off_unicode() {
let mut nihon = String::from("日本語");
let go = nihon.split_off("日本".len());
assert_eq!(nihon, "日本");
assert_eq!(go, "");
}
#[test]
fn test_str_truncate() {
let mut s = String::from("12345");

View File

@ -1007,3 +1007,24 @@ fn assert_covariance() {
d
}
}
#[test]
fn test_is_empty() {
let mut v = VecDeque::<i32>::new();
assert!(v.is_empty());
assert!(v.iter().is_empty());
assert!(v.iter_mut().is_empty());
v.extend(&[2, 3, 4]);
assert!(!v.is_empty());
assert!(!v.iter().is_empty());
assert!(!v.iter_mut().is_empty());
while let Some(_) = v.pop_front() {
assert_eq!(v.is_empty(), v.len() == 0);
assert_eq!(v.iter().is_empty(), v.iter().len() == 0);
assert_eq!(v.iter_mut().is_empty(), v.iter_mut().len() == 0);
}
assert!(v.is_empty());
assert!(v.iter().is_empty());
assert!(v.iter_mut().is_empty());
assert!(v.into_iter().is_empty());
}

View File

@ -8,6 +8,7 @@ version = "0.0.0"
name = "compiler_builtins"
path = "lib.rs"
test = false
bench = false
[dependencies]
core = { path = "../libcore" }

View File

@ -94,6 +94,7 @@ fn main() {
cfg.flag("-fvisibility=hidden");
cfg.flag("-fomit-frame-pointer");
cfg.flag("-ffreestanding");
cfg.define("VISIBILITY_HIDDEN", None);
}
let mut sources = Sources::new();

View File

@ -7,7 +7,12 @@ version = "0.0.0"
name = "core"
path = "lib.rs"
test = false
bench = false
[[test]]
name = "coretest"
path = "../libcoretest/lib.rs"
[[bench]]
name = "coretest"
path = "../libcoretest/lib.rs"

View File

@ -10,7 +10,7 @@
//! Character manipulation.
//!
//! For more details, see ::rustc_unicode::char (a.k.a. std::char)
//! For more details, see ::std_unicode::char (a.k.a. std::char)
#![allow(non_snake_case)]
#![stable(feature = "core_char", since = "1.2.0")]
@ -238,7 +238,7 @@ impl fmt::Display for CharTryFromError {
/// A 'radix' here is sometimes also called a 'base'. A radix of two
/// indicates a binary number, a radix of ten, decimal, and a radix of
/// sixteen, hexadecimal, to give some common values. Arbitrary
/// radicum are supported.
/// radices are supported.
///
/// `from_digit()` will return `None` if the input is not a digit in
/// the given radix.

View File

@ -166,7 +166,9 @@ pub struct Formatter<'a> {
// NB. Argument is essentially an optimized partially applied formatting function,
// equivalent to `exists T.(&T, fn(&T, &mut Formatter) -> Result`.
enum Void {}
struct Void {
_priv: (),
}
/// This struct represents the generic "argument" which is taken by the Xprintf
/// family of functions. It contains a function to format the given value. At

View File

@ -247,7 +247,7 @@ pub trait Iterator {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn nth(&mut self, mut n: usize) -> Option<Self::Item> where Self: Sized {
fn nth(&mut self, mut n: usize) -> Option<Self::Item> {
for x in self {
if n == 0 { return Some(x) }
n -= 1;
@ -2179,4 +2179,7 @@ impl<'a, I: Iterator + ?Sized> Iterator for &'a mut I {
type Item = I::Item;
fn next(&mut self) -> Option<I::Item> { (**self).next() }
fn size_hint(&self) -> (usize, Option<usize>) { (**self).size_hint() }
fn nth(&mut self, n: usize) -> Option<Self::Item> {
(**self).nth(n)
}
}

View File

@ -225,12 +225,12 @@
//! often called 'iterator adapters', as they're a form of the 'adapter
//! pattern'.
//!
//! Common iterator adapters include [`map()`], [`take()`], and [`collect()`].
//! Common iterator adapters include [`map()`], [`take()`], and [`filter()`].
//! For more, see their documentation.
//!
//! [`map()`]: trait.Iterator.html#method.map
//! [`take()`]: trait.Iterator.html#method.take
//! [`collect()`]: trait.Iterator.html#method.collect
//! [`filter()`]: trait.Iterator.html#method.filter
//!
//! # Laziness
//!
@ -268,7 +268,7 @@
//! [`map()`]: trait.Iterator.html#method.map
//!
//! The two most common ways to evaluate an iterator are to use a `for` loop
//! like this, or using the [`collect()`] adapter to produce a new collection.
//! like this, or using the [`collect()`] method to produce a new collection.
//!
//! [`collect()`]: trait.Iterator.html#method.collect
//!
@ -368,7 +368,16 @@ impl<I> DoubleEndedIterator for Rev<I> where I: DoubleEndedIterator {
#[stable(feature = "rust1", since = "1.0.0")]
impl<I> ExactSizeIterator for Rev<I>
where I: ExactSizeIterator + DoubleEndedIterator {}
where I: ExactSizeIterator + DoubleEndedIterator
{
fn len(&self) -> usize {
self.iter.len()
}
fn is_empty(&self) -> bool {
self.iter.is_empty()
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<I> FusedIterator for Rev<I>
@ -425,7 +434,15 @@ impl<'a, I, T: 'a> DoubleEndedIterator for Cloned<I>
#[stable(feature = "iter_cloned", since = "1.1.0")]
impl<'a, I, T: 'a> ExactSizeIterator for Cloned<I>
where I: ExactSizeIterator<Item=&'a T>, T: Clone
{}
{
fn len(&self) -> usize {
self.it.len()
}
fn is_empty(&self) -> bool {
self.it.is_empty()
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<'a, I, T: 'a> FusedIterator for Cloned<I>
@ -920,7 +937,7 @@ unsafe impl<A, B> TrustedLen for Zip<A, B>
/// you can also [`map()`] backwards:
///
/// ```rust
/// let v: Vec<i32> = vec![1, 2, 3].into_iter().rev().map(|x| x + 1).collect();
/// let v: Vec<i32> = vec![1, 2, 3].into_iter().map(|x| x + 1).rev().collect();
///
/// assert_eq!(v, [4, 3, 2]);
/// ```
@ -1007,7 +1024,16 @@ impl<B, I: DoubleEndedIterator, F> DoubleEndedIterator for Map<I, F> where
#[stable(feature = "rust1", since = "1.0.0")]
impl<B, I: ExactSizeIterator, F> ExactSizeIterator for Map<I, F>
where F: FnMut(I::Item) -> B {}
where F: FnMut(I::Item) -> B
{
fn len(&self) -> usize {
self.iter.len()
}
fn is_empty(&self) -> bool {
self.iter.is_empty()
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<B, I: FusedIterator, F> FusedIterator for Map<I, F>
@ -1236,7 +1262,15 @@ impl<I> DoubleEndedIterator for Enumerate<I> where
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I> ExactSizeIterator for Enumerate<I> where I: ExactSizeIterator {}
impl<I> ExactSizeIterator for Enumerate<I> where I: ExactSizeIterator {
fn len(&self) -> usize {
self.iter.len()
}
fn is_empty(&self) -> bool {
self.iter.is_empty()
}
}
#[doc(hidden)]
unsafe impl<I> TrustedRandomAccess for Enumerate<I>
@ -1945,7 +1979,15 @@ impl<I> DoubleEndedIterator for Fuse<I>
#[stable(feature = "rust1", since = "1.0.0")]
impl<I> ExactSizeIterator for Fuse<I> where I: ExactSizeIterator {}
impl<I> ExactSizeIterator for Fuse<I> where I: ExactSizeIterator {
fn len(&self) -> usize {
self.iter.len()
}
fn is_empty(&self) -> bool {
self.iter.is_empty()
}
}
/// An iterator that calls a function with a reference to each element before
/// yielding it.
@ -2012,7 +2054,16 @@ impl<I: DoubleEndedIterator, F> DoubleEndedIterator for Inspect<I, F>
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: ExactSizeIterator, F> ExactSizeIterator for Inspect<I, F>
where F: FnMut(&I::Item) {}
where F: FnMut(&I::Item)
{
fn len(&self) -> usize {
self.iter.len()
}
fn is_empty(&self) -> bool {
self.iter.is_empty()
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<I: FusedIterator, F> FusedIterator for Inspect<I, F>

View File

@ -552,7 +552,14 @@ pub trait ExactSizeIterator: Iterator {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, I: ExactSizeIterator + ?Sized> ExactSizeIterator for &'a mut I {}
impl<'a, I: ExactSizeIterator + ?Sized> ExactSizeIterator for &'a mut I {
fn len(&self) -> usize {
(**self).len()
}
fn is_empty(&self) -> bool {
(**self).is_empty()
}
}
/// Trait to represent types that can be created by summing up an iterator.
///

View File

@ -89,7 +89,6 @@
#![feature(specialization)]
#![feature(staged_api)]
#![feature(unboxed_closures)]
#![cfg_attr(stage0, feature(question_mark))]
#![feature(never_type)]
#![feature(prelude_import)]

View File

@ -659,6 +659,16 @@ impl<T> Option<T> {
impl<'a, T: Clone> Option<&'a T> {
/// Maps an `Option<&T>` to an `Option<T>` by cloning the contents of the
/// option.
///
/// # Examples
///
/// ```
/// let x = 12;
/// let opt_x = Some(&x);
/// assert_eq!(opt_x, Some(&12));
/// let cloned = opt_x.cloned();
/// assert_eq!(cloned, Some(12));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn cloned(self) -> Option<T> {
self.map(|t| t.clone())

View File

@ -38,10 +38,14 @@ use cmp;
use fmt;
use intrinsics::assume;
use iter::*;
use ops::{self, RangeFull};
use ops::{FnMut, self};
use option::Option;
use option::Option::{None, Some};
use result::Result;
use result::Result::{Ok, Err};
use ptr;
use mem;
use marker;
use marker::{Copy, Send, Sync, Sized, self};
use iter_private::TrustedRandomAccess;
#[repr(C)]
@ -80,7 +84,8 @@ pub trait SliceExt {
#[stable(feature = "core", since = "1.6.0")]
fn chunks(&self, size: usize) -> Chunks<Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
fn get(&self, index: usize) -> Option<&Self::Item>;
fn get<I>(&self, index: I) -> Option<&I::Output>
where I: SliceIndex<Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
fn first(&self) -> Option<&Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
@ -90,7 +95,8 @@ pub trait SliceExt {
#[stable(feature = "core", since = "1.6.0")]
fn last(&self) -> Option<&Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
unsafe fn get_unchecked(&self, index: usize) -> &Self::Item;
unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
where I: SliceIndex<Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
fn as_ptr(&self) -> *const Self::Item;
#[stable(feature = "core", since = "1.6.0")]
@ -108,7 +114,8 @@ pub trait SliceExt {
#[stable(feature = "core", since = "1.6.0")]
fn is_empty(&self) -> bool { self.len() == 0 }
#[stable(feature = "core", since = "1.6.0")]
fn get_mut(&mut self, index: usize) -> Option<&mut Self::Item>;
fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
where I: SliceIndex<Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
fn iter_mut(&mut self) -> IterMut<Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
@ -137,7 +144,8 @@ pub trait SliceExt {
#[stable(feature = "core", since = "1.6.0")]
fn reverse(&mut self);
#[stable(feature = "core", since = "1.6.0")]
unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut Self::Item;
unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
where I: SliceIndex<Self::Item>;
#[stable(feature = "core", since = "1.6.0")]
fn as_mut_ptr(&mut self) -> *mut Self::Item;
@ -258,8 +266,10 @@ impl<T> SliceExt for [T] {
}
#[inline]
fn get(&self, index: usize) -> Option<&T> {
if index < self.len() { Some(&self[index]) } else { None }
fn get<I>(&self, index: I) -> Option<&I::Output>
where I: SliceIndex<T>
{
index.get(self)
}
#[inline]
@ -284,8 +294,10 @@ impl<T> SliceExt for [T] {
}
#[inline]
unsafe fn get_unchecked(&self, index: usize) -> &T {
&*(self.as_ptr().offset(index as isize))
unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
where I: SliceIndex<T>
{
index.get_unchecked(self)
}
#[inline]
@ -323,8 +335,10 @@ impl<T> SliceExt for [T] {
}
#[inline]
fn get_mut(&mut self, index: usize) -> Option<&mut T> {
if index < self.len() { Some(&mut self[index]) } else { None }
fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
where I: SliceIndex<T>
{
index.get_mut(self)
}
#[inline]
@ -451,8 +465,10 @@ impl<T> SliceExt for [T] {
}
#[inline]
unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut T {
&mut *self.as_mut_ptr().offset(index as isize)
unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
where I: SliceIndex<T>
{
index.get_unchecked_mut(self)
}
#[inline]
@ -515,23 +531,26 @@ impl<T> SliceExt for [T] {
}
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
impl<T> ops::Index<usize> for [T] {
type Output = T;
#[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
impl<T, I> ops::Index<I> for [T]
where I: SliceIndex<T>
{
type Output = I::Output;
fn index(&self, index: usize) -> &T {
// NB built-in indexing
&(*self)[index]
#[inline]
fn index(&self, index: I) -> &I::Output {
index.index(self)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
impl<T> ops::IndexMut<usize> for [T] {
#[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
impl<T, I> ops::IndexMut<I> for [T]
where I: SliceIndex<T>
{
#[inline]
fn index_mut(&mut self, index: usize) -> &mut T {
// NB built-in indexing
&mut (*self)[index]
fn index_mut(&mut self, index: I) -> &mut I::Output {
index.index_mut(self)
}
}
@ -547,205 +566,349 @@ fn slice_index_order_fail(index: usize, end: usize) -> ! {
panic!("slice index starts at {} but ends at {}", index, end);
}
/// A helper trait used for indexing operations.
#[unstable(feature = "slice_get_slice", issue = "35729")]
#[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"]
pub trait SliceIndex<T> {
/// The output type returned by methods.
type Output: ?Sized;
/// Implements slicing with syntax `&self[begin .. end]`.
///
/// Returns a slice of self for the index range [`begin`..`end`).
///
/// This operation is `O(1)`.
///
/// # Panics
///
/// Requires that `begin <= end` and `end <= self.len()`,
/// otherwise slicing will panic.
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
impl<T> ops::Index<ops::Range<usize>> for [T] {
/// Returns a shared reference to the output at this location, if in
/// bounds.
fn get(self, slice: &[T]) -> Option<&Self::Output>;
/// Returns a mutable reference to the output at this location, if in
/// bounds.
fn get_mut(self, slice: &mut [T]) -> Option<&mut Self::Output>;
/// Returns a shared reference to the output at this location, without
/// performing any bounds checking.
unsafe fn get_unchecked(self, slice: &[T]) -> &Self::Output;
/// Returns a mutable reference to the output at this location, without
/// performing any bounds checking.
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut Self::Output;
/// Returns a shared reference to the output at this location, panicking
/// if out of bounds.
fn index(self, slice: &[T]) -> &Self::Output;
/// Returns a mutable reference to the output at this location, panicking
/// if out of bounds.
fn index_mut(self, slice: &mut [T]) -> &mut Self::Output;
}
#[stable(feature = "slice-get-slice-impls", since = "1.13.0")]
impl<T> SliceIndex<T> for usize {
type Output = T;
#[inline]
fn get(self, slice: &[T]) -> Option<&T> {
if self < slice.len() {
unsafe {
Some(self.get_unchecked(slice))
}
} else {
None
}
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut T> {
if self < slice.len() {
unsafe {
Some(self.get_unchecked_mut(slice))
}
} else {
None
}
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &T {
&*slice.as_ptr().offset(self as isize)
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut T {
&mut *slice.as_mut_ptr().offset(self as isize)
}
#[inline]
fn index(self, slice: &[T]) -> &T {
// NB: use intrinsic indexing
&(*slice)[self]
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut T {
// NB: use intrinsic indexing
&mut (*slice)[self]
}
}
#[stable(feature = "slice-get-slice-impls", since = "1.13.0")]
impl<T> SliceIndex<T> for ops::Range<usize> {
type Output = [T];
#[inline]
fn index(&self, index: ops::Range<usize>) -> &[T] {
if index.start > index.end {
slice_index_order_fail(index.start, index.end);
} else if index.end > self.len() {
slice_index_len_fail(index.end, self.len());
fn get(self, slice: &[T]) -> Option<&[T]> {
if self.start > self.end || self.end > slice.len() {
None
} else {
unsafe {
Some(self.get_unchecked(slice))
}
}
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
if self.start > self.end || self.end > slice.len() {
None
} else {
unsafe {
Some(self.get_unchecked_mut(slice))
}
}
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
from_raw_parts(slice.as_ptr().offset(self.start as isize), self.end - self.start)
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
from_raw_parts_mut(slice.as_mut_ptr().offset(self.start as isize), self.end - self.start)
}
#[inline]
fn index(self, slice: &[T]) -> &[T] {
if self.start > self.end {
slice_index_order_fail(self.start, self.end);
} else if self.end > slice.len() {
slice_index_len_fail(self.end, slice.len());
}
unsafe {
from_raw_parts (
self.as_ptr().offset(index.start as isize),
index.end - index.start
)
self.get_unchecked(slice)
}
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
if self.start > self.end {
slice_index_order_fail(self.start, self.end);
} else if self.end > slice.len() {
slice_index_len_fail(self.end, slice.len());
}
unsafe {
self.get_unchecked_mut(slice)
}
}
}
/// Implements slicing with syntax `&self[.. end]`.
///
/// Returns a slice of self from the beginning until but not including
/// the index `end`.
///
/// Equivalent to `&self[0 .. end]`
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
impl<T> ops::Index<ops::RangeTo<usize>> for [T] {
#[stable(feature = "slice-get-slice-impls", since = "1.13.0")]
impl<T> SliceIndex<T> for ops::RangeTo<usize> {
type Output = [T];
#[inline]
fn index(&self, index: ops::RangeTo<usize>) -> &[T] {
self.index(0 .. index.end)
fn get(self, slice: &[T]) -> Option<&[T]> {
(0..self.end).get(slice)
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
(0..self.end).get_mut(slice)
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
(0..self.end).get_unchecked(slice)
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
(0..self.end).get_unchecked_mut(slice)
}
#[inline]
fn index(self, slice: &[T]) -> &[T] {
(0..self.end).index(slice)
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
(0..self.end).index_mut(slice)
}
}
/// Implements slicing with syntax `&self[begin ..]`.
///
/// Returns a slice of self from and including the index `begin` until the end.
///
/// Equivalent to `&self[begin .. self.len()]`
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
impl<T> ops::Index<ops::RangeFrom<usize>> for [T] {
#[stable(feature = "slice-get-slice-impls", since = "1.13.0")]
impl<T> SliceIndex<T> for ops::RangeFrom<usize> {
type Output = [T];
#[inline]
fn index(&self, index: ops::RangeFrom<usize>) -> &[T] {
self.index(index.start .. self.len())
fn get(self, slice: &[T]) -> Option<&[T]> {
(self.start..slice.len()).get(slice)
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
(self.start..slice.len()).get_mut(slice)
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
(self.start..slice.len()).get_unchecked(slice)
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
(self.start..slice.len()).get_unchecked_mut(slice)
}
#[inline]
fn index(self, slice: &[T]) -> &[T] {
(self.start..slice.len()).index(slice)
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
(self.start..slice.len()).index_mut(slice)
}
}
/// Implements slicing with syntax `&self[..]`.
///
/// Returns a slice of the whole slice. This operation cannot panic.
///
/// Equivalent to `&self[0 .. self.len()]`
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ops::Index<RangeFull> for [T] {
#[stable(feature = "slice-get-slice-impls", since = "1.13.0")]
impl<T> SliceIndex<T> for ops::RangeFull {
type Output = [T];
#[inline]
fn index(&self, _index: RangeFull) -> &[T] {
self
fn get(self, slice: &[T]) -> Option<&[T]> {
Some(slice)
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
Some(slice)
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
slice
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
slice
}
#[inline]
fn index(self, slice: &[T]) -> &[T] {
slice
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
slice
}
}
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
impl<T> ops::Index<ops::RangeInclusive<usize>> for [T] {
#[stable(feature = "slice-get-slice-impls", since = "1.13.0")]
impl<T> SliceIndex<T> for ops::RangeInclusive<usize> {
type Output = [T];
#[inline]
fn index(&self, index: ops::RangeInclusive<usize>) -> &[T] {
match index {
fn get(self, slice: &[T]) -> Option<&[T]> {
match self {
ops::RangeInclusive::Empty { .. } => Some(&[]),
ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() => None,
ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).get(slice),
}
}
#[inline]
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
match self {
ops::RangeInclusive::Empty { .. } => Some(&mut []),
ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() => None,
ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).get_mut(slice),
}
}
#[inline]
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
match self {
ops::RangeInclusive::Empty { .. } => &[],
ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() =>
panic!("attempted to index slice up to maximum usize"),
ops::RangeInclusive::NonEmpty { start, end } =>
self.index(start .. end+1)
ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).get_unchecked(slice),
}
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
match self {
ops::RangeInclusive::Empty { .. } => &mut [],
ops::RangeInclusive::NonEmpty { start, end } => {
(start..end + 1).get_unchecked_mut(slice)
}
}
}
#[inline]
fn index(self, slice: &[T]) -> &[T] {
match self {
ops::RangeInclusive::Empty { .. } => &[],
ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() => {
panic!("attempted to index slice up to maximum usize");
},
ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).index(slice),
}
}
#[inline]
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
match self {
ops::RangeInclusive::Empty { .. } => &mut [],
ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() => {
panic!("attempted to index slice up to maximum usize");
},
ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).index_mut(slice),
}
}
}
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
impl<T> ops::Index<ops::RangeToInclusive<usize>> for [T] {
#[stable(feature = "slice-get-slice-impls", since = "1.13.0")]
impl<T> SliceIndex<T> for ops::RangeToInclusive<usize> {
type Output = [T];
#[inline]
fn index(&self, index: ops::RangeToInclusive<usize>) -> &[T] {
self.index(0...index.end)
fn get(self, slice: &[T]) -> Option<&[T]> {
(0...self.end).get(slice)
}
}
/// Implements mutable slicing with syntax `&mut self[begin .. end]`.
///
/// Returns a slice of self for the index range [`begin`..`end`).
///
/// This operation is `O(1)`.
///
/// # Panics
///
/// Requires that `begin <= end` and `end <= self.len()`,
/// otherwise slicing will panic.
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
impl<T> ops::IndexMut<ops::Range<usize>> for [T] {
#[inline]
fn index_mut(&mut self, index: ops::Range<usize>) -> &mut [T] {
if index.start > index.end {
slice_index_order_fail(index.start, index.end);
} else if index.end > self.len() {
slice_index_len_fail(index.end, self.len());
}
unsafe {
from_raw_parts_mut(
self.as_mut_ptr().offset(index.start as isize),
index.end - index.start
)
}
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
(0...self.end).get_mut(slice)
}
}
/// Implements mutable slicing with syntax `&mut self[.. end]`.
///
/// Returns a slice of self from the beginning until but not including
/// the index `end`.
///
/// Equivalent to `&mut self[0 .. end]`
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
impl<T> ops::IndexMut<ops::RangeTo<usize>> for [T] {
#[inline]
fn index_mut(&mut self, index: ops::RangeTo<usize>) -> &mut [T] {
self.index_mut(0 .. index.end)
unsafe fn get_unchecked(self, slice: &[T]) -> &[T] {
(0...self.end).get_unchecked(slice)
}
}
/// Implements mutable slicing with syntax `&mut self[begin ..]`.
///
/// Returns a slice of self from and including the index `begin` until the end.
///
/// Equivalent to `&mut self[begin .. self.len()]`
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
impl<T> ops::IndexMut<ops::RangeFrom<usize>> for [T] {
#[inline]
fn index_mut(&mut self, index: ops::RangeFrom<usize>) -> &mut [T] {
let len = self.len();
self.index_mut(index.start .. len)
unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] {
(0...self.end).get_unchecked_mut(slice)
}
}
/// Implements mutable slicing with syntax `&mut self[..]`.
///
/// Returns a slice of the whole slice. This operation can not panic.
///
/// Equivalent to `&mut self[0 .. self.len()]`
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ops::IndexMut<RangeFull> for [T] {
#[inline]
fn index_mut(&mut self, _index: RangeFull) -> &mut [T] {
self
fn index(self, slice: &[T]) -> &[T] {
(0...self.end).index(slice)
}
}
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
impl<T> ops::IndexMut<ops::RangeInclusive<usize>> for [T] {
#[inline]
fn index_mut(&mut self, index: ops::RangeInclusive<usize>) -> &mut [T] {
match index {
ops::RangeInclusive::Empty { .. } => &mut [],
ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() =>
panic!("attempted to index slice up to maximum usize"),
ops::RangeInclusive::NonEmpty { start, end } =>
self.index_mut(start .. end+1)
}
}
}
#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
#[rustc_on_unimplemented = "slice indices are of type `usize`"]
impl<T> ops::IndexMut<ops::RangeToInclusive<usize>> for [T] {
#[inline]
fn index_mut(&mut self, index: ops::RangeToInclusive<usize>) -> &mut [T] {
self.index_mut(0...index.end)
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
(0...self.end).index_mut(slice)
}
}
@ -983,7 +1146,11 @@ impl<'a, T> Iter<'a, T> {
iterator!{struct Iter -> *const T, &'a T}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
impl<'a, T> ExactSizeIterator for Iter<'a, T> {
fn is_empty(&self) -> bool {
self.ptr == self.end
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<'a, T> FusedIterator for Iter<'a, T> {}
@ -1107,7 +1274,11 @@ impl<'a, T> IterMut<'a, T> {
iterator!{struct IterMut -> *mut T, &'a mut T}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for IterMut<'a, T> {}
impl<'a, T> ExactSizeIterator for IterMut<'a, T> {
fn is_empty(&self) -> bool {
self.ptr == self.end
}
}
#[unstable(feature = "fused", issue = "35602")]
impl<'a, T> FusedIterator for IterMut<'a, T> {}

View File

@ -618,6 +618,11 @@ impl<'a> ExactSizeIterator for Bytes<'a> {
fn len(&self) -> usize {
self.0.len()
}
#[inline]
fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
#[unstable(feature = "fused", issue = "35602")]

View File

@ -40,7 +40,7 @@
extern crate core;
extern crate test;
extern crate libc;
extern crate rustc_unicode;
extern crate std_unicode;
extern crate rand;
mod any;

View File

@ -180,3 +180,47 @@ fn test_windows_last() {
let c2 = v2.windows(2);
assert_eq!(c2.last().unwrap()[0], 3);
}
#[test]
fn get_range() {
let v: &[i32] = &[0, 1, 2, 3, 4, 5];
assert_eq!(v.get(..), Some(&[0, 1, 2, 3, 4, 5][..]));
assert_eq!(v.get(..2), Some(&[0, 1][..]));
assert_eq!(v.get(2..), Some(&[2, 3, 4, 5][..]));
assert_eq!(v.get(1..4), Some(&[1, 2, 3][..]));
assert_eq!(v.get(7..), None);
assert_eq!(v.get(7..10), None);
}
#[test]
fn get_mut_range() {
let mut v: &mut [i32] = &mut [0, 1, 2, 3, 4, 5];
assert_eq!(v.get_mut(..), Some(&mut [0, 1, 2, 3, 4, 5][..]));
assert_eq!(v.get_mut(..2), Some(&mut [0, 1][..]));
assert_eq!(v.get_mut(2..), Some(&mut [2, 3, 4, 5][..]));
assert_eq!(v.get_mut(1..4), Some(&mut [1, 2, 3][..]));
assert_eq!(v.get_mut(7..), None);
assert_eq!(v.get_mut(7..10), None);
}
#[test]
fn get_unchecked_range() {
unsafe {
let v: &[i32] = &[0, 1, 2, 3, 4, 5];
assert_eq!(v.get_unchecked(..), &[0, 1, 2, 3, 4, 5][..]);
assert_eq!(v.get_unchecked(..2), &[0, 1][..]);
assert_eq!(v.get_unchecked(2..), &[2, 3, 4, 5][..]);
assert_eq!(v.get_unchecked(1..4), &[1, 2, 3][..]);
}
}
#[test]
fn get_unchecked_mut_range() {
unsafe {
let v: &mut [i32] = &mut [0, 1, 2, 3, 4, 5];
assert_eq!(v.get_unchecked_mut(..), &mut [0, 1, 2, 3, 4, 5][..]);
assert_eq!(v.get_unchecked_mut(..2), &mut [0, 1][..]);
assert_eq!(v.get_unchecked_mut(2..), &mut[2, 3, 4, 5][..]);
assert_eq!(v.get_unchecked_mut(1..4), &mut [1, 2, 3][..]);
}
}

View File

@ -295,7 +295,6 @@
#![cfg_attr(not(stage0), deny(warnings))]
#![feature(str_escape)]
#![cfg_attr(stage0, feature(question_mark))]
use self::LabelText::*;

@ -1 +1 @@
Subproject commit 6e8c1b490ccbe5e84d248bab883515bc85394b5f
Subproject commit 0ac39c5ccf6a04395b7c40dd62321cb91f63f160

View File

@ -6,6 +6,7 @@ version = "0.0.0"
[lib]
path = "lib.rs"
test = false
bench = false
[dependencies]
core = { path = "../libcore" }

View File

@ -6,6 +6,7 @@ version = "0.0.0"
[lib]
path = "lib.rs"
test = false
bench = false
[dependencies]
alloc = { path = "../liballoc" }

View File

@ -10,8 +10,6 @@
use rustc_data_structures::graph;
use cfg::*;
use hir::def::Def;
use hir::pat_util;
use ty::{self, TyCtxt};
use syntax::ast;
use syntax::ptr::P;
@ -100,7 +98,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
fn pat(&mut self, pat: &hir::Pat, pred: CFGIndex) -> CFGIndex {
match pat.node {
PatKind::Binding(.., None) |
PatKind::Path(..) |
PatKind::Path(_) |
PatKind::Lit(..) |
PatKind::Range(..) |
PatKind::Wild => {
@ -284,7 +282,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
hir::ExprBreak(label, ref opt_expr) => {
let v = self.opt_expr(opt_expr, pred);
let loop_scope = self.find_scope(expr, label.map(|l| l.node));
let loop_scope = self.find_scope(expr, label);
let b = self.add_ast_node(expr.id, &[v]);
self.add_exiting_edge(expr, b,
loop_scope, loop_scope.break_index);
@ -292,7 +290,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
}
hir::ExprAgain(label) => {
let loop_scope = self.find_scope(expr, label.map(|l| l.node));
let loop_scope = self.find_scope(expr, label);
let a = self.add_ast_node(expr.id, &[pred]);
self.add_exiting_edge(expr, a,
loop_scope, loop_scope.continue_index);
@ -361,7 +359,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
hir::ExprClosure(..) |
hir::ExprLit(..) |
hir::ExprPath(..) => {
hir::ExprPath(_) => {
self.straightline(expr, pred, None::<hir::Expr>.iter())
}
}
@ -457,7 +455,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
// Visit the guard expression
let guard_exit = self.expr(&guard, guard_start);
let this_has_bindings = pat_util::pat_contains_bindings_or_wild(&pat);
let this_has_bindings = pat.contains_bindings_or_wild();
// If both this pattern and the previous pattern
// were free of bindings, they must consist only
@ -570,23 +568,16 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
fn find_scope(&self,
expr: &hir::Expr,
label: Option<ast::Name>) -> LoopScope {
if label.is_none() {
return *self.loop_scopes.last().unwrap();
}
match self.tcx.expect_def(expr.id) {
Def::Label(loop_id) => {
label: Option<hir::Label>) -> LoopScope {
match label {
None => *self.loop_scopes.last().unwrap(),
Some(label) => {
for l in &self.loop_scopes {
if l.loop_id == loop_id {
if l.loop_id == label.loop_id {
return *l;
}
}
span_bug!(expr.span, "no loop scope for id {}", loop_id);
}
r => {
span_bug!(expr.span, "bad entry `{:?}` in def_map for label", r);
span_bug!(expr.span, "no loop scope for id {}", label.loop_id);
}
}
}

View File

@ -42,6 +42,10 @@ pub enum DepNode<D: Clone + Debug> {
// Represents the HIR node with the given node-id
Hir(D),
// Represents the body of a function or method. The def-id is that of the
// function/method.
HirBody(D),
// Represents the metadata for a given HIR node, typically found
// in an extern crate.
MetaData(D),
@ -59,6 +63,7 @@ pub enum DepNode<D: Clone + Debug> {
PluginRegistrar,
StabilityIndex,
CollectItem(D),
CollectItemSig(D),
Coherence,
EffectCheck,
Liveness,
@ -90,7 +95,7 @@ pub enum DepNode<D: Clone + Debug> {
RvalueCheck(D),
Reachability,
DeadCheck,
StabilityCheck,
StabilityCheck(D),
LateLintCheck,
TransCrate,
TransCrateItem(D),
@ -105,7 +110,6 @@ pub enum DepNode<D: Clone + Debug> {
// predicates for an item wind up in `ItemSignature`).
AssociatedItems(D),
ItemSignature(D),
FieldTy(D),
SizedConstraint(D),
AssociatedItemDefIds(D),
InherentImpls(D),
@ -150,12 +154,12 @@ impl<D: Clone + Debug> DepNode<D> {
CollectItem,
BorrowCheck,
Hir,
HirBody,
TransCrateItem,
TypeckItemType,
TypeckItemBody,
AssociatedItems,
ItemSignature,
FieldTy,
AssociatedItemDefIds,
InherentImpls,
TraitImpls,
@ -189,7 +193,6 @@ impl<D: Clone + Debug> DepNode<D> {
Privacy => Some(Privacy),
Reachability => Some(Reachability),
DeadCheck => Some(DeadCheck),
StabilityCheck => Some(StabilityCheck),
LateLintCheck => Some(LateLintCheck),
TransCrate => Some(TransCrate),
TransWriteMetadata => Some(TransWriteMetadata),
@ -200,8 +203,10 @@ impl<D: Clone + Debug> DepNode<D> {
WorkProduct(ref id) => Some(WorkProduct(id.clone())),
Hir(ref d) => op(d).map(Hir),
HirBody(ref d) => op(d).map(HirBody),
MetaData(ref d) => op(d).map(MetaData),
CollectItem(ref d) => op(d).map(CollectItem),
CollectItemSig(ref d) => op(d).map(CollectItemSig),
CoherenceCheckImpl(ref d) => op(d).map(CoherenceCheckImpl),
CoherenceOverlapCheck(ref d) => op(d).map(CoherenceOverlapCheck),
CoherenceOverlapCheckSpecial(ref d) => op(d).map(CoherenceOverlapCheckSpecial),
@ -217,11 +222,11 @@ impl<D: Clone + Debug> DepNode<D> {
Mir(ref d) => op(d).map(Mir),
BorrowCheck(ref d) => op(d).map(BorrowCheck),
RvalueCheck(ref d) => op(d).map(RvalueCheck),
StabilityCheck(ref d) => op(d).map(StabilityCheck),
TransCrateItem(ref d) => op(d).map(TransCrateItem),
TransInlinedItem(ref d) => op(d).map(TransInlinedItem),
AssociatedItems(ref d) => op(d).map(AssociatedItems),
ItemSignature(ref d) => op(d).map(ItemSignature),
FieldTy(ref d) => op(d).map(FieldTy),
SizedConstraint(ref d) => op(d).map(SizedConstraint),
AssociatedItemDefIds(ref d) => op(d).map(AssociatedItemDefIds),
InherentImpls(ref d) => op(d).map(InherentImpls),

View File

@ -129,8 +129,8 @@ impl<'a> CheckAttrVisitor<'a> {
}
}
impl<'a> Visitor for CheckAttrVisitor<'a> {
fn visit_item(&mut self, item: &ast::Item) {
impl<'a> Visitor<'a> for CheckAttrVisitor<'a> {
fn visit_item(&mut self, item: &'a ast::Item) {
let target = Target::from_item(item);
for attr in &item.attrs {
self.check_attribute(attr, target);

View File

@ -83,14 +83,6 @@ impl PathResolution {
PathResolution { base_def: def, depth: 0 }
}
/// Get the definition, if fully resolved, otherwise panic.
pub fn full_def(&self) -> Def {
if self.depth != 0 {
bug!("path not fully resolved: {:?}", self);
}
self.base_def
}
pub fn kind_name(&self) -> &'static str {
if self.depth != 0 {
"associated item"

View File

@ -38,6 +38,7 @@ use syntax::ast::{NodeId, CRATE_NODE_ID, Name, Attribute};
use syntax::codemap::Spanned;
use syntax_pos::Span;
use hir::*;
use hir::def::Def;
use hir::map::Map;
use super::itemlikevisit::DeepVisitor;
@ -66,6 +67,62 @@ impl<'a> FnKind<'a> {
}
}
/// Specifies what nested things a visitor wants to visit. The most
/// common choice is `OnlyBodies`, which will cause the visitor to
/// visit fn bodies for fns that it encounters, but skip over nested
/// item-like things.
///
/// See the comments on `ItemLikeVisitor` for more details on the overall
/// visit strategy.
pub enum NestedVisitorMap<'this, 'tcx: 'this> {
/// Do not visit any nested things. When you add a new
/// "non-nested" thing, you will want to audit such uses to see if
/// they remain valid.
///
/// Use this if you are only walking some particular kind of tree
/// (i.e., a type, or fn signature) and you don't want to thread a
/// HIR map around.
None,
/// Do not visit nested item-like things, but visit nested things
/// that are inside of an item-like.
///
/// **This is the most common choice.** A very commmon pattern is
/// to use `tcx.visit_all_item_likes_in_krate()` as an outer loop,
/// and to have the visitor that visits the contents of each item
/// using this setting.
OnlyBodies(&'this Map<'tcx>),
/// Visit all nested things, including item-likes.
///
/// **This is an unusual choice.** It is used when you want to
/// process everything within their lexical context. Typically you
/// kick off the visit by doing `walk_krate()`.
All(&'this Map<'tcx>),
}
impl<'this, 'tcx> NestedVisitorMap<'this, 'tcx> {
/// Returns the map to use for an "intra item-like" thing (if any).
/// e.g., function body.
pub fn intra(self) -> Option<&'this Map<'tcx>> {
match self {
NestedVisitorMap::None => None,
NestedVisitorMap::OnlyBodies(map) => Some(map),
NestedVisitorMap::All(map) => Some(map),
}
}
/// Returns the map to use for an "item-like" thing (if any).
/// e.g., item, impl-item.
pub fn inter(self) -> Option<&'this Map<'tcx>> {
match self {
NestedVisitorMap::None => None,
NestedVisitorMap::OnlyBodies(_) => None,
NestedVisitorMap::All(map) => Some(map),
}
}
}
/// Each method of the Visitor trait is a hook to be potentially
/// overridden. Each method's default implementation recursively visits
/// the substructure of the input via the corresponding `walk` method;
@ -87,13 +144,14 @@ pub trait Visitor<'v> : Sized {
// Nested items.
/// The default versions of the `visit_nested_XXX` routines invoke
/// this method to get a map to use; if they get back `None`, they
/// just skip nested things. Otherwise, they will lookup the
/// nested item-like things in the map and visit it. So the best
/// way to implement a nested visitor is to override this method
/// to return a `Map`; one advantage of this is that if we add
/// more types of nested things in the future, they will
/// automatically work.
/// this method to get a map to use. By selecting an enum variant,
/// you control which kinds of nested HIR are visited; see
/// `NestedVisitorMap` for details. By "nested HIR", we are
/// referring to bits of HIR that are not directly embedded within
/// one another but rather indirectly, through a table in the
/// crate. This is done to control dependencies during incremental
/// compilation: the non-inline bits of HIR can be tracked and
/// hashed separately.
///
/// **If for some reason you want the nested behavior, but don't
/// have a `Map` are your disposal:** then you should override the
@ -101,9 +159,7 @@ pub trait Visitor<'v> : Sized {
/// `panic!()`. This way, if a new `visit_nested_XXX` variant is
/// added in the future, we will see the panic in your code and
/// fix it appropriately.
fn nested_visit_map(&mut self) -> Option<&Map<'v>> {
None
}
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'v>;
/// Invoked when a nested item is encountered. By default does
/// nothing unless you override `nested_visit_map` to return
@ -115,8 +171,7 @@ pub trait Visitor<'v> : Sized {
/// but cannot supply a `Map`; see `nested_visit_map` for advice.
#[allow(unused_variables)]
fn visit_nested_item(&mut self, id: ItemId) {
let opt_item = self.nested_visit_map()
.map(|map| map.expect_item(id.id));
let opt_item = self.nested_visit_map().inter().map(|map| map.expect_item(id.id));
if let Some(item) = opt_item {
self.visit_item(item);
}
@ -127,13 +182,23 @@ pub trait Visitor<'v> : Sized {
/// method.
#[allow(unused_variables)]
fn visit_nested_impl_item(&mut self, id: ImplItemId) {
let opt_item = self.nested_visit_map()
.map(|map| map.impl_item(id));
let opt_item = self.nested_visit_map().inter().map(|map| map.impl_item(id));
if let Some(item) = opt_item {
self.visit_impl_item(item);
}
}
/// Invoked to visit the body of a function, method or closure. Like
/// visit_nested_item, does nothing by default unless you override
/// `nested_visit_map` to return `Some(_)`, in which case it will walk the
/// body.
fn visit_body(&mut self, id: ExprId) {
let opt_expr = self.nested_visit_map().intra().map(|map| map.expr(id));
if let Some(expr) = opt_expr {
self.visit_expr(expr);
}
}
/// Visit the top-level item and (optionally) nested items / impl items. See
/// `visit_nested_item` for details.
fn visit_item(&mut self, i: &'v Item) {
@ -155,6 +220,9 @@ pub trait Visitor<'v> : Sized {
fn visit_id(&mut self, _node_id: NodeId) {
// Nothing to do.
}
fn visit_def_mention(&mut self, _def: Def) {
// Nothing to do.
}
fn visit_name(&mut self, _span: Span, _name: Name) {
// Nothing to do.
}
@ -196,7 +264,7 @@ pub trait Visitor<'v> : Sized {
fn visit_where_predicate(&mut self, predicate: &'v WherePredicate) {
walk_where_predicate(self, predicate)
}
fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v FnDecl, b: &'v Expr, s: Span, id: NodeId) {
fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v FnDecl, b: ExprId, s: Span, id: NodeId) {
walk_fn(self, fk, fd, b, s, id)
}
fn visit_trait_item(&mut self, ti: &'v TraitItem) {
@ -244,12 +312,12 @@ pub trait Visitor<'v> : Sized {
fn visit_lifetime_def(&mut self, lifetime: &'v LifetimeDef) {
walk_lifetime_def(self, lifetime)
}
fn visit_qpath(&mut self, qpath: &'v QPath, id: NodeId, span: Span) {
walk_qpath(self, qpath, id, span)
}
fn visit_path(&mut self, path: &'v Path, _id: NodeId) {
walk_path(self, path)
}
fn visit_path_list_item(&mut self, prefix: &'v Path, item: &'v PathListItem) {
walk_path_list_item(self, prefix, item)
}
fn visit_path_segment(&mut self, path_span: Span, path_segment: &'v PathSegment) {
walk_path_segment(self, path_span, path_segment)
}
@ -349,23 +417,9 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) {
visitor.visit_id(item.id);
walk_opt_name(visitor, item.span, opt_name)
}
ItemUse(ref vp) => {
ItemUse(ref path, _) => {
visitor.visit_id(item.id);
match vp.node {
ViewPathSimple(name, ref path) => {
visitor.visit_name(vp.span, name);
visitor.visit_path(path, item.id);
}
ViewPathGlob(ref path) => {
visitor.visit_path(path, item.id);
}
ViewPathList(ref prefix, ref list) => {
visitor.visit_path(prefix, item.id);
for item in list {
visitor.visit_path_list_item(prefix, item)
}
}
}
visitor.visit_path(path, item.id);
}
ItemStatic(ref typ, _, ref expr) |
ItemConst(ref typ, ref expr) => {
@ -373,7 +427,7 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) {
visitor.visit_ty(typ);
visitor.visit_expr(expr);
}
ItemFn(ref declaration, unsafety, constness, abi, ref generics, ref body) => {
ItemFn(ref declaration, unsafety, constness, abi, ref generics, body_id) => {
visitor.visit_fn(FnKind::ItemFn(item.name,
generics,
unsafety,
@ -382,7 +436,7 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) {
&item.vis,
&item.attrs),
declaration,
body,
body_id,
item.span,
item.id)
}
@ -481,11 +535,8 @@ pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) {
walk_fn_decl(visitor, &function_declaration.decl);
walk_list!(visitor, visit_lifetime_def, &function_declaration.lifetimes);
}
TyPath(ref maybe_qself, ref path) => {
if let Some(ref qself) = *maybe_qself {
visitor.visit_ty(&qself.ty);
}
visitor.visit_path(path, typ.id);
TyPath(ref qpath) => {
visitor.visit_qpath(qpath, typ.id, typ.span);
}
TyObjectSum(ref ty, ref bounds) => {
visitor.visit_ty(ty);
@ -508,18 +559,26 @@ pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) {
}
}
pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path) {
for segment in &path.segments {
visitor.visit_path_segment(path.span, segment);
pub fn walk_qpath<'v, V: Visitor<'v>>(visitor: &mut V, qpath: &'v QPath, id: NodeId, span: Span) {
match *qpath {
QPath::Resolved(ref maybe_qself, ref path) => {
if let Some(ref qself) = *maybe_qself {
visitor.visit_ty(qself);
}
visitor.visit_path(path, id)
}
QPath::TypeRelative(ref qself, ref segment) => {
visitor.visit_ty(qself);
visitor.visit_path_segment(span, segment);
}
}
}
pub fn walk_path_list_item<'v, V>(visitor: &mut V, _prefix: &'v Path, item: &'v PathListItem)
where V: Visitor<'v>,
{
visitor.visit_id(item.node.id);
visitor.visit_name(item.span, item.node.name);
walk_opt_name(visitor, item.span, item.node.rename);
pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path) {
visitor.visit_def_mention(path.def);
for segment in &path.segments {
visitor.visit_path_segment(path.span, segment);
}
}
pub fn walk_path_segment<'v, V: Visitor<'v>>(visitor: &mut V,
@ -555,18 +614,15 @@ pub fn walk_assoc_type_binding<'v, V: Visitor<'v>>(visitor: &mut V,
pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) {
visitor.visit_id(pattern.id);
match pattern.node {
PatKind::TupleStruct(ref path, ref children, _) => {
visitor.visit_path(path, pattern.id);
PatKind::TupleStruct(ref qpath, ref children, _) => {
visitor.visit_qpath(qpath, pattern.id, pattern.span);
walk_list!(visitor, visit_pat, children);
}
PatKind::Path(ref opt_qself, ref path) => {
if let Some(ref qself) = *opt_qself {
visitor.visit_ty(&qself.ty);
}
visitor.visit_path(path, pattern.id)
PatKind::Path(ref qpath) => {
visitor.visit_qpath(qpath, pattern.id, pattern.span);
}
PatKind::Struct(ref path, ref fields, _) => {
visitor.visit_path(path, pattern.id);
PatKind::Struct(ref qpath, ref fields, _) => {
visitor.visit_qpath(qpath, pattern.id, pattern.span);
for field in fields {
visitor.visit_name(field.span, field.node.name);
visitor.visit_pat(&field.node.pat)
@ -579,7 +635,8 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) {
PatKind::Ref(ref subpattern, _) => {
visitor.visit_pat(subpattern)
}
PatKind::Binding(_, ref pth1, ref optional_subpattern) => {
PatKind::Binding(_, def_id, ref pth1, ref optional_subpattern) => {
visitor.visit_def_mention(Def::Local(def_id));
visitor.visit_name(pth1.span, pth1.node);
walk_list!(visitor, visit_pat, optional_subpattern);
}
@ -704,13 +761,25 @@ pub fn walk_fn_kind<'v, V: Visitor<'v>>(visitor: &mut V, function_kind: FnKind<'
pub fn walk_fn<'v, V: Visitor<'v>>(visitor: &mut V,
function_kind: FnKind<'v>,
function_declaration: &'v FnDecl,
function_body: &'v Expr,
body_id: ExprId,
_span: Span,
id: NodeId) {
visitor.visit_id(id);
walk_fn_decl(visitor, function_declaration);
walk_fn_kind(visitor, function_kind);
visitor.visit_expr(function_body)
visitor.visit_body(body_id)
}
pub fn walk_fn_with_body<'v, V: Visitor<'v>>(visitor: &mut V,
function_kind: FnKind<'v>,
function_declaration: &'v FnDecl,
body: &'v Expr,
_span: Span,
id: NodeId) {
visitor.visit_id(id);
walk_fn_decl(visitor, function_declaration);
walk_fn_kind(visitor, function_kind);
visitor.visit_expr(body)
}
pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v TraitItem) {
@ -727,13 +796,13 @@ pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v Trai
visitor.visit_generics(&sig.generics);
walk_fn_decl(visitor, &sig.decl);
}
MethodTraitItem(ref sig, Some(ref body)) => {
MethodTraitItem(ref sig, Some(body_id)) => {
visitor.visit_fn(FnKind::Method(trait_item.name,
sig,
None,
&trait_item.attrs),
&sig.decl,
body,
body_id,
trait_item.span,
trait_item.id);
}
@ -759,13 +828,13 @@ pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplIt
visitor.visit_ty(ty);
visitor.visit_expr(expr);
}
ImplItemKind::Method(ref sig, ref body) => {
ImplItemKind::Method(ref sig, body_id) => {
visitor.visit_fn(FnKind::Method(impl_item.name,
sig,
Some(&impl_item.vis),
&impl_item.attrs),
&sig.decl,
body,
body_id,
impl_item.span,
impl_item.id);
}
@ -840,8 +909,8 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) {
visitor.visit_expr(element);
visitor.visit_expr(count)
}
ExprStruct(ref path, ref fields, ref optional_base) => {
visitor.visit_path(path, expression.id);
ExprStruct(ref qpath, ref fields, ref optional_base) => {
visitor.visit_qpath(qpath, expression.id, expression.span);
for field in fields {
visitor.visit_name(field.name.span, field.name.node);
visitor.visit_expr(&field.expr)
@ -890,7 +959,7 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) {
visitor.visit_expr(subexpression);
walk_list!(visitor, visit_arm, arms);
}
ExprClosure(_, ref function_declaration, ref body, _fn_decl_span) => {
ExprClosure(_, ref function_declaration, body, _fn_decl_span) => {
visitor.visit_fn(FnKind::Closure(&expression.attrs),
function_declaration,
body,
@ -917,18 +986,21 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) {
visitor.visit_expr(main_expression);
visitor.visit_expr(index_expression)
}
ExprPath(ref maybe_qself, ref path) => {
if let Some(ref qself) = *maybe_qself {
visitor.visit_ty(&qself.ty);
}
visitor.visit_path(path, expression.id)
ExprPath(ref qpath) => {
visitor.visit_qpath(qpath, expression.id, expression.span);
}
ExprBreak(ref opt_sp_name, ref opt_expr) => {
walk_opt_sp_name(visitor, opt_sp_name);
ExprBreak(None, ref opt_expr) => {
walk_list!(visitor, visit_expr, opt_expr);
}
ExprAgain(ref opt_sp_name) => {
walk_opt_sp_name(visitor, opt_sp_name);
ExprBreak(Some(label), ref opt_expr) => {
visitor.visit_def_mention(Def::Label(label.loop_id));
visitor.visit_name(label.span, label.name);
walk_list!(visitor, visit_expr, opt_expr);
}
ExprAgain(None) => {}
ExprAgain(Some(label)) => {
visitor.visit_def_mention(Def::Label(label.loop_id));
visitor.visit_name(label.span, label.name);
}
ExprRet(ref optional_expression) => {
walk_list!(visitor, visit_expr, optional_expression);
@ -1002,13 +1074,14 @@ impl IdRange {
}
pub struct IdRangeComputingVisitor {
pub result: IdRange,
pub struct IdRangeComputingVisitor<'a, 'ast: 'a> {
result: IdRange,
map: &'a map::Map<'ast>,
}
impl IdRangeComputingVisitor {
pub fn new() -> IdRangeComputingVisitor {
IdRangeComputingVisitor { result: IdRange::max() }
impl<'a, 'ast> IdRangeComputingVisitor<'a, 'ast> {
pub fn new(map: &'a map::Map<'ast>) -> IdRangeComputingVisitor<'a, 'ast> {
IdRangeComputingVisitor { result: IdRange::max(), map: map }
}
pub fn result(&self) -> IdRange {
@ -1016,20 +1089,25 @@ impl IdRangeComputingVisitor {
}
}
impl<'v> Visitor<'v> for IdRangeComputingVisitor {
impl<'a, 'ast> Visitor<'ast> for IdRangeComputingVisitor<'a, 'ast> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'ast> {
NestedVisitorMap::OnlyBodies(&self.map)
}
fn visit_id(&mut self, id: NodeId) {
self.result.add(id);
}
}
/// Computes the id range for a single fn body, ignoring nested items.
pub fn compute_id_range_for_fn_body(fk: FnKind,
decl: &FnDecl,
body: &Expr,
sp: Span,
id: NodeId)
-> IdRange {
let mut visitor = IdRangeComputingVisitor::new();
visitor.visit_fn(fk, decl, body, sp, id);
pub fn compute_id_range_for_fn_body<'v>(fk: FnKind<'v>,
decl: &'v FnDecl,
body: &'v Expr,
sp: Span,
id: NodeId,
map: &map::Map<'v>)
-> IdRange {
let mut visitor = IdRangeComputingVisitor::new(map);
walk_fn_with_body(&mut visitor, fk, decl, body, sp, id);
visitor.result()
}

View File

@ -41,8 +41,10 @@ use super::intravisit::Visitor;
/// item-like things.
/// - Example: Lifetime resolution, which wants to bring lifetimes declared on the
/// impl into scope while visiting the impl-items, and then back out again.
/// - How: Implement `intravisit::Visitor` and override the `visit_nested_foo()` foo methods
/// as needed. Walk your crate with `intravisit::walk_crate()` invoked on `tcx.map.krate()`.
/// - How: Implement `intravisit::Visitor` and override the
/// `visit_nested_map()` methods to return
/// `NestedVisitorMap::All`. Walk your crate with
/// `intravisit::walk_crate()` invoked on `tcx.map.krate()`.
/// - Pro: Visitor methods for any kind of HIR node, not just item-like things.
/// - Pro: Preserves nesting information
/// - Con: Does not integrate well into dependency tracking.

File diff suppressed because it is too large Load Diff

View File

@ -48,7 +48,7 @@ pub trait MaybeFnLike { fn is_fn_like(&self) -> bool; }
/// Components shared by fn-like things (fn items, methods, closures).
pub struct FnParts<'a> {
pub decl: &'a FnDecl,
pub body: &'a Expr,
pub body: ast::ExprId,
pub kind: FnKind<'a>,
pub span: Span,
pub id: NodeId,
@ -115,7 +115,7 @@ struct ItemFnParts<'a> {
abi: abi::Abi,
vis: &'a ast::Visibility,
generics: &'a ast::Generics,
body: &'a Expr,
body: ast::ExprId,
id: NodeId,
span: Span,
attrs: &'a [Attribute],
@ -125,14 +125,14 @@ struct ItemFnParts<'a> {
/// for use when implementing FnLikeNode operations.
struct ClosureParts<'a> {
decl: &'a FnDecl,
body: &'a Expr,
body: ast::ExprId,
id: NodeId,
span: Span,
attrs: &'a [Attribute],
}
impl<'a> ClosureParts<'a> {
fn new(d: &'a FnDecl, b: &'a Expr, id: NodeId, s: Span, attrs: &'a [Attribute]) -> Self {
fn new(d: &'a FnDecl, b: ast::ExprId, id: NodeId, s: Span, attrs: &'a [Attribute]) -> Self {
ClosureParts {
decl: d,
body: b,
@ -172,9 +172,9 @@ impl<'a> FnLikeNode<'a> {
}
}
pub fn body(self) -> &'a Expr {
self.handle(|i: ItemFnParts<'a>| &*i.body,
|_, _, _: &'a ast::MethodSig, _, body: &'a ast::Expr, _, _| body,
pub fn body(self) -> ast::ExprId {
self.handle(|i: ItemFnParts<'a>| i.body,
|_, _, _: &'a ast::MethodSig, _, body: ast::ExprId, _, _| body,
|c: ClosureParts<'a>| c.body)
}
@ -196,6 +196,18 @@ impl<'a> FnLikeNode<'a> {
|c: ClosureParts| c.id)
}
pub fn constness(self) -> ast::Constness {
match self.kind() {
FnKind::ItemFn(_, _, _, constness, ..) => {
constness
}
FnKind::Method(_, m, ..) => {
m.constness
}
_ => ast::Constness::NotConst
}
}
pub fn kind(self) -> FnKind<'a> {
let item = |p: ItemFnParts<'a>| -> FnKind<'a> {
FnKind::ItemFn(p.name, p.generics, p.unsafety, p.constness, p.abi, p.vis, p.attrs)
@ -215,7 +227,7 @@ impl<'a> FnLikeNode<'a> {
Name,
&'a ast::MethodSig,
Option<&'a ast::Visibility>,
&'a ast::Expr,
ast::ExprId,
Span,
&'a [Attribute])
-> A,
@ -223,13 +235,13 @@ impl<'a> FnLikeNode<'a> {
{
match self.node {
map::NodeItem(i) => match i.node {
ast::ItemFn(ref decl, unsafety, constness, abi, ref generics, ref block) =>
ast::ItemFn(ref decl, unsafety, constness, abi, ref generics, block) =>
item_fn(ItemFnParts {
id: i.id,
name: i.name,
decl: &decl,
unsafety: unsafety,
body: &block,
body: block,
generics: generics,
abi: abi,
vis: &i.vis,
@ -240,24 +252,24 @@ impl<'a> FnLikeNode<'a> {
_ => bug!("item FnLikeNode that is not fn-like"),
},
map::NodeTraitItem(ti) => match ti.node {
ast::MethodTraitItem(ref sig, Some(ref body)) => {
ast::MethodTraitItem(ref sig, Some(body)) => {
method(ti.id, ti.name, sig, None, body, ti.span, &ti.attrs)
}
_ => bug!("trait method FnLikeNode that is not fn-like"),
},
map::NodeImplItem(ii) => {
match ii.node {
ast::ImplItemKind::Method(ref sig, ref body) => {
ast::ImplItemKind::Method(ref sig, body) => {
method(ii.id, ii.name, sig, Some(&ii.vis), body, ii.span, &ii.attrs)
}
_ => {
bug!("impl method FnLikeNode that is not fn-like")
}
}
}
},
map::NodeExpr(e) => match e.node {
ast::ExprClosure(_, ref decl, ref block, _fn_decl_span) =>
closure(ClosureParts::new(&decl, &block, e.id, e.span, &e.attrs)),
ast::ExprClosure(_, ref decl, block, _fn_decl_span) =>
closure(ClosureParts::new(&decl, block, e.id, e.span, &e.attrs)),
_ => bug!("expr FnLikeNode that is not fn-like"),
},
_ => bug!("other FnLikeNode that is not fn-like"),

View File

@ -10,7 +10,7 @@
use super::*;
use hir::intravisit::Visitor;
use hir::intravisit::{Visitor, NestedVisitorMap};
use hir::def_id::DefId;
use middle::cstore::InlinedItem;
use std::iter::repeat;
@ -91,7 +91,7 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> {
/// deep walking so that we walk nested items in the context of
/// their outer items.
fn nested_visit_map(&mut self) -> Option<&map::Map<'ast>> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'ast> {
panic!("visit_nested_xxx must be manually implemented in this visitor")
}
@ -106,6 +106,10 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> {
self.visit_impl_item(self.krate.impl_item(item_id))
}
fn visit_body(&mut self, id: ExprId) {
self.visit_expr(self.krate.expr(id))
}
fn visit_item(&mut self, i: &'ast Item) {
debug!("visit_item: {:?}", i);
@ -124,23 +128,6 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> {
this.insert(struct_def.id(), NodeStructCtor(struct_def));
}
}
ItemTrait(.., ref bounds, _) => {
for b in bounds.iter() {
if let TraitTyParamBound(ref t, TraitBoundModifier::None) = *b {
this.insert(t.trait_ref.ref_id, NodeItem(i));
}
}
}
ItemUse(ref view_path) => {
match view_path.node {
ViewPathList(_, ref paths) => {
for path in paths {
this.insert(path.node.id, NodeItem(i));
}
}
_ => ()
}
}
_ => {}
}
intravisit::walk_item(this, i);
@ -217,8 +204,16 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> {
});
}
fn visit_trait_ref(&mut self, tr: &'ast TraitRef) {
self.insert(tr.ref_id, NodeTraitRef(tr));
self.with_parent(tr.ref_id, |this| {
intravisit::walk_trait_ref(this, tr);
});
}
fn visit_fn(&mut self, fk: intravisit::FnKind<'ast>, fd: &'ast FnDecl,
b: &'ast Expr, s: Span, id: NodeId) {
b: ExprId, s: Span, id: NodeId) {
assert_eq!(self.parent_node, id);
intravisit::walk_fn(self, fk, fd, b, s, id);
}
@ -234,7 +229,28 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> {
self.insert(lifetime.id, NodeLifetime(lifetime));
}
fn visit_vis(&mut self, visibility: &'ast Visibility) {
match *visibility {
Visibility::Public |
Visibility::Crate |
Visibility::Inherited => {}
Visibility::Restricted { id, .. } => {
self.insert(id, NodeVisibility(visibility));
self.with_parent(id, |this| {
intravisit::walk_vis(this, visibility);
});
}
}
}
fn visit_macro_def(&mut self, macro_def: &'ast MacroDef) {
self.insert_entry(macro_def.id, NotPresent);
}
fn visit_struct_field(&mut self, field: &'ast StructField) {
self.insert(field.id, NodeField(field));
self.with_parent(field.id, |this| {
intravisit::walk_struct_field(this, field);
});
}
}

View File

@ -11,7 +11,7 @@
use hir::map::definitions::*;
use hir;
use hir::intravisit;
use hir::intravisit::{self, Visitor, NestedVisitorMap};
use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex};
use middle::cstore::InlinedItem;
@ -135,8 +135,8 @@ impl<'a> DefCollector<'a> {
}
}
impl<'a> visit::Visitor for DefCollector<'a> {
fn visit_item(&mut self, i: &Item) {
impl<'a> visit::Visitor<'a> for DefCollector<'a> {
fn visit_item(&mut self, i: &'a Item) {
debug!("visit_item: {:?}", i);
// Pick the def data. This need not be unique, but the more
@ -155,7 +155,20 @@ impl<'a> visit::Visitor for DefCollector<'a> {
DefPathData::ValueNs(i.ident.name.as_str()),
ItemKind::Mac(..) if i.id == DUMMY_NODE_ID => return, // Scope placeholder
ItemKind::Mac(..) => return self.visit_macro_invoc(i.id, false),
ItemKind::Use(..) => DefPathData::Misc,
ItemKind::Use(ref view_path) => {
match view_path.node {
ViewPathGlob(..) => {}
// FIXME(eddyb) Should use the real name. Which namespace?
ViewPathSimple(..) => {}
ViewPathList(_, ref imports) => {
for import in imports {
self.create_def(import.node.id, DefPathData::Misc);
}
}
}
DefPathData::Misc
}
};
let def = self.create_def(i.id, def_data);
@ -198,7 +211,7 @@ impl<'a> visit::Visitor for DefCollector<'a> {
});
}
fn visit_foreign_item(&mut self, foreign_item: &ForeignItem) {
fn visit_foreign_item(&mut self, foreign_item: &'a ForeignItem) {
let def = self.create_def(foreign_item.id,
DefPathData::ValueNs(foreign_item.ident.name.as_str()));
@ -207,7 +220,7 @@ impl<'a> visit::Visitor for DefCollector<'a> {
});
}
fn visit_generics(&mut self, generics: &Generics) {
fn visit_generics(&mut self, generics: &'a Generics) {
for ty_param in generics.ty_params.iter() {
self.create_def(ty_param.id, DefPathData::TypeParam(ty_param.ident.name.as_str()));
}
@ -215,7 +228,7 @@ impl<'a> visit::Visitor for DefCollector<'a> {
visit::walk_generics(self, generics);
}
fn visit_trait_item(&mut self, ti: &TraitItem) {
fn visit_trait_item(&mut self, ti: &'a TraitItem) {
let def_data = match ti.node {
TraitItemKind::Method(..) | TraitItemKind::Const(..) =>
DefPathData::ValueNs(ti.ident.name.as_str()),
@ -233,7 +246,7 @@ impl<'a> visit::Visitor for DefCollector<'a> {
});
}
fn visit_impl_item(&mut self, ii: &ImplItem) {
fn visit_impl_item(&mut self, ii: &'a ImplItem) {
let def_data = match ii.node {
ImplItemKind::Method(..) | ImplItemKind::Const(..) =>
DefPathData::ValueNs(ii.ident.name.as_str()),
@ -251,7 +264,7 @@ impl<'a> visit::Visitor for DefCollector<'a> {
});
}
fn visit_pat(&mut self, pat: &Pat) {
fn visit_pat(&mut self, pat: &'a Pat) {
let parent_def = self.parent_def;
match pat.node {
@ -267,7 +280,7 @@ impl<'a> visit::Visitor for DefCollector<'a> {
self.parent_def = parent_def;
}
fn visit_expr(&mut self, expr: &Expr) {
fn visit_expr(&mut self, expr: &'a Expr) {
let parent_def = self.parent_def;
match expr.node {
@ -284,7 +297,7 @@ impl<'a> visit::Visitor for DefCollector<'a> {
self.parent_def = parent_def;
}
fn visit_ty(&mut self, ty: &Ty) {
fn visit_ty(&mut self, ty: &'a Ty) {
match ty.node {
TyKind::Mac(..) => return self.visit_macro_invoc(ty.id, false),
TyKind::Array(_, ref length) => self.visit_ast_const_integer(length),
@ -296,15 +309,15 @@ impl<'a> visit::Visitor for DefCollector<'a> {
visit::walk_ty(self, ty);
}
fn visit_lifetime_def(&mut self, def: &LifetimeDef) {
fn visit_lifetime_def(&mut self, def: &'a LifetimeDef) {
self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name.as_str()));
}
fn visit_macro_def(&mut self, macro_def: &MacroDef) {
fn visit_macro_def(&mut self, macro_def: &'a MacroDef) {
self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.ident.name.as_str()));
}
fn visit_stmt(&mut self, stmt: &Stmt) {
fn visit_stmt(&mut self, stmt: &'a Stmt) {
match stmt.node {
StmtKind::Mac(..) => self.visit_macro_invoc(stmt.id, false),
_ => visit::walk_stmt(self, stmt),
@ -313,7 +326,18 @@ impl<'a> visit::Visitor for DefCollector<'a> {
}
// We walk the HIR rather than the AST when reading items from metadata.
impl<'ast> intravisit::Visitor<'ast> for DefCollector<'ast> {
impl<'ast> Visitor<'ast> for DefCollector<'ast> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'ast> {
// note however that we override `visit_body` below
NestedVisitorMap::None
}
fn visit_body(&mut self, id: hir::ExprId) {
if let Some(krate) = self.hir_crate {
self.visit_expr(krate.expr(id));
}
}
fn visit_item(&mut self, i: &'ast hir::Item) {
debug!("visit_item: {:?}", i);
@ -423,7 +447,7 @@ impl<'ast> intravisit::Visitor<'ast> for DefCollector<'ast> {
fn visit_pat(&mut self, pat: &'ast hir::Pat) {
let parent_def = self.parent_def;
if let hir::PatKind::Binding(_, name, _) = pat.node {
if let hir::PatKind::Binding(_, _, name, _) = pat.node {
let def = self.create_def(pat.id, DefPathData::Binding(name.node.as_str()));
self.parent_def = Some(def);
}

View File

@ -18,7 +18,6 @@ pub use self::definitions::{Definitions, DefKey, DefPath, DefPathData,
use dep_graph::{DepGraph, DepNode};
use middle::cstore::InlinedItem;
use middle::cstore::InlinedItem as II;
use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex};
use syntax::abi::Abi;
@ -46,9 +45,11 @@ pub enum Node<'ast> {
NodeTraitItem(&'ast TraitItem),
NodeImplItem(&'ast ImplItem),
NodeVariant(&'ast Variant),
NodeField(&'ast StructField),
NodeExpr(&'ast Expr),
NodeStmt(&'ast Stmt),
NodeTy(&'ast Ty),
NodeTraitRef(&'ast TraitRef),
NodeLocal(&'ast Pat),
NodePat(&'ast Pat),
NodeBlock(&'ast Block),
@ -57,7 +58,10 @@ pub enum Node<'ast> {
NodeStructCtor(&'ast VariantData),
NodeLifetime(&'ast Lifetime),
NodeTyParam(&'ast TyParam)
NodeTyParam(&'ast TyParam),
NodeVisibility(&'ast Visibility),
NodeInlinedItem(&'ast InlinedItem),
}
/// Represents an entry and its parent NodeID.
@ -73,15 +77,18 @@ pub enum MapEntry<'ast> {
EntryTraitItem(NodeId, &'ast TraitItem),
EntryImplItem(NodeId, &'ast ImplItem),
EntryVariant(NodeId, &'ast Variant),
EntryField(NodeId, &'ast StructField),
EntryExpr(NodeId, &'ast Expr),
EntryStmt(NodeId, &'ast Stmt),
EntryTy(NodeId, &'ast Ty),
EntryTraitRef(NodeId, &'ast TraitRef),
EntryLocal(NodeId, &'ast Pat),
EntryPat(NodeId, &'ast Pat),
EntryBlock(NodeId, &'ast Block),
EntryStructCtor(NodeId, &'ast VariantData),
EntryLifetime(NodeId, &'ast Lifetime),
EntryTyParam(NodeId, &'ast TyParam),
EntryVisibility(NodeId, &'ast Visibility),
/// Roots for node trees.
RootCrate,
@ -102,15 +109,20 @@ impl<'ast> MapEntry<'ast> {
NodeTraitItem(n) => EntryTraitItem(p, n),
NodeImplItem(n) => EntryImplItem(p, n),
NodeVariant(n) => EntryVariant(p, n),
NodeField(n) => EntryField(p, n),
NodeExpr(n) => EntryExpr(p, n),
NodeStmt(n) => EntryStmt(p, n),
NodeTy(n) => EntryTy(p, n),
NodeTraitRef(n) => EntryTraitRef(p, n),
NodeLocal(n) => EntryLocal(p, n),
NodePat(n) => EntryPat(p, n),
NodeBlock(n) => EntryBlock(p, n),
NodeStructCtor(n) => EntryStructCtor(p, n),
NodeLifetime(n) => EntryLifetime(p, n),
NodeTyParam(n) => EntryTyParam(p, n),
NodeVisibility(n) => EntryVisibility(p, n),
NodeInlinedItem(n) => RootInlinedParent(n),
}
}
@ -121,15 +133,18 @@ impl<'ast> MapEntry<'ast> {
EntryTraitItem(id, _) => id,
EntryImplItem(id, _) => id,
EntryVariant(id, _) => id,
EntryField(id, _) => id,
EntryExpr(id, _) => id,
EntryStmt(id, _) => id,
EntryTy(id, _) => id,
EntryTraitRef(id, _) => id,
EntryLocal(id, _) => id,
EntryPat(id, _) => id,
EntryBlock(id, _) => id,
EntryStructCtor(id, _) => id,
EntryLifetime(id, _) => id,
EntryTyParam(id, _) => id,
EntryVisibility(id, _) => id,
NotPresent |
RootCrate |
@ -144,15 +159,19 @@ impl<'ast> MapEntry<'ast> {
EntryTraitItem(_, n) => NodeTraitItem(n),
EntryImplItem(_, n) => NodeImplItem(n),
EntryVariant(_, n) => NodeVariant(n),
EntryField(_, n) => NodeField(n),
EntryExpr(_, n) => NodeExpr(n),
EntryStmt(_, n) => NodeStmt(n),
EntryTy(_, n) => NodeTy(n),
EntryTraitRef(_, n) => NodeTraitRef(n),
EntryLocal(_, n) => NodeLocal(n),
EntryPat(_, n) => NodePat(n),
EntryBlock(_, n) => NodeBlock(n),
EntryStructCtor(_, n) => NodeStructCtor(n),
EntryLifetime(_, n) => NodeLifetime(n),
EntryTyParam(_, n) => NodeTyParam(n),
EntryVisibility(_, n) => NodeVisibility(n),
RootInlinedParent(n) => NodeInlinedItem(n),
_ => return None
})
}
@ -237,45 +256,63 @@ impl<'ast> Map<'ast> {
let map = self.map.borrow();
let mut id = id0;
if !self.is_inlined_node_id(id) {
let mut last_expr = None;
loop {
match map[id.as_usize()] {
EntryItem(_, item) => {
let def_id = self.local_def_id(item.id);
// NB ^~~~~~~
//
// You would expect that `item.id == id`, but this
// is not always the case. In particular, for a
// ViewPath item like `use self::{mem, foo}`, we
// map the ids for `mem` and `foo` to the
// enclosing view path item. This seems mega super
// ultra wrong, but then who am I to judge?
// -nmatsakis
assert_eq!(id, item.id);
let def_id = self.local_def_id(id);
assert!(!self.is_inlined_def_id(def_id));
if let Some(last_id) = last_expr {
// The body of the item may have a separate dep node
// (Note that trait items don't currently have
// their own dep node, so there's also just one
// HirBody node for all the items)
if self.is_body(last_id, item) {
return DepNode::HirBody(def_id);
}
}
return DepNode::Hir(def_id);
}
EntryImplItem(..) => {
EntryImplItem(_, item) => {
let def_id = self.local_def_id(id);
assert!(!self.is_inlined_def_id(def_id));
if let Some(last_id) = last_expr {
// The body of the item may have a separate dep node
if self.is_impl_item_body(last_id, item) {
return DepNode::HirBody(def_id);
}
}
return DepNode::Hir(def_id);
}
EntryForeignItem(p, _) |
EntryTraitItem(p, _) |
EntryVariant(p, _) |
EntryExpr(p, _) |
EntryField(p, _) |
EntryStmt(p, _) |
EntryTy(p, _) |
EntryTraitRef(p, _) |
EntryLocal(p, _) |
EntryPat(p, _) |
EntryBlock(p, _) |
EntryStructCtor(p, _) |
EntryLifetime(p, _) |
EntryTyParam(p, _) =>
EntryTyParam(p, _) |
EntryVisibility(p, _) =>
id = p,
RootCrate =>
return DepNode::Krate,
EntryExpr(p, _) => {
last_expr = Some(id);
id = p;
}
RootCrate => {
return DepNode::Hir(DefId::local(CRATE_DEF_INDEX));
}
RootInlinedParent(_) =>
bug!("node {} has inlined ancestor but is not inlined", id0),
@ -304,23 +341,22 @@ impl<'ast> Map<'ast> {
EntryTraitItem(p, _) |
EntryImplItem(p, _) |
EntryVariant(p, _) |
EntryField(p, _) |
EntryExpr(p, _) |
EntryStmt(p, _) |
EntryTy(p, _) |
EntryTraitRef(p, _) |
EntryLocal(p, _) |
EntryPat(p, _) |
EntryBlock(p, _) |
EntryStructCtor(p, _) |
EntryLifetime(p, _) |
EntryTyParam(p, _) =>
EntryTyParam(p, _) |
EntryVisibility(p, _) =>
id = p,
RootInlinedParent(parent) => match *parent {
InlinedItem::Item(def_id, _) |
InlinedItem::TraitItem(def_id, _) |
InlinedItem::ImplItem(def_id, _) =>
return DepNode::MetaData(def_id)
},
RootInlinedParent(parent) =>
return DepNode::MetaData(parent.def_id),
RootCrate =>
bug!("node {} has crate ancestor but is inlined", id0),
@ -332,6 +368,29 @@ impl<'ast> Map<'ast> {
}
}
fn is_body(&self, node_id: NodeId, item: &Item) -> bool {
match item.node {
ItemFn(_, _, _, _, _, body) => body.node_id() == node_id,
// Since trait items currently don't get their own dep nodes,
// we check here whether node_id is the body of any of the items.
// If they get their own dep nodes, this can go away
ItemTrait(_, _, _, ref trait_items) => {
trait_items.iter().any(|trait_item| { match trait_item.node {
MethodTraitItem(_, Some(body)) => body.node_id() == node_id,
_ => false
}})
}
_ => false
}
}
fn is_impl_item_body(&self, node_id: NodeId, item: &ImplItem) -> bool {
match item.node {
ImplItemKind::Method(_, body) => body.node_id() == node_id,
_ => false
}
}
pub fn num_local_def_ids(&self) -> usize {
self.definitions.borrow().len()
}
@ -543,8 +602,7 @@ impl<'ast> Map<'ast> {
pub fn get_parent_did(&self, id: NodeId) -> DefId {
let parent = self.get_parent(id);
match self.find_entry(parent) {
Some(RootInlinedParent(&II::TraitItem(did, _))) |
Some(RootInlinedParent(&II::ImplItem(did, _))) => did,
Some(RootInlinedParent(ii)) => ii.def_id,
_ => self.local_def_id(parent)
}
}
@ -642,6 +700,10 @@ impl<'ast> Map<'ast> {
}
}
pub fn expr(&self, id: ExprId) -> &'ast Expr {
self.expect_expr(id.node_id())
}
/// Returns the name associated with the given NodeId's AST.
pub fn name(&self, id: NodeId) -> Name {
match self.get(id) {
@ -650,9 +712,10 @@ impl<'ast> Map<'ast> {
NodeImplItem(ii) => ii.name,
NodeTraitItem(ti) => ti.name,
NodeVariant(v) => v.node.name,
NodeField(f) => f.name,
NodeLifetime(lt) => lt.name,
NodeTyParam(tp) => tp.name,
NodeLocal(&Pat { node: PatKind::Binding(_,l,_), .. }) => l.node,
NodeLocal(&Pat { node: PatKind::Binding(_,_,l,_), .. }) => l.node,
NodeStructCtor(_) => self.name(self.get_parent(id)),
_ => bug!("no name for {}", self.node_to_string(id))
}
@ -668,6 +731,7 @@ impl<'ast> Map<'ast> {
Some(NodeTraitItem(ref ti)) => Some(&ti.attrs[..]),
Some(NodeImplItem(ref ii)) => Some(&ii.attrs[..]),
Some(NodeVariant(ref v)) => Some(&v.node.attrs[..]),
Some(NodeField(ref f)) => Some(&f.attrs[..]),
Some(NodeExpr(ref e)) => Some(&*e.attrs),
Some(NodeStmt(ref s)) => Some(s.node.attrs()),
// unit/tuple structs take the attributes straight from
@ -697,44 +761,40 @@ impl<'ast> Map<'ast> {
}
}
pub fn opt_span(&self, id: NodeId) -> Option<Span> {
let sp = match self.find(id) {
Some(NodeItem(item)) => item.span,
Some(NodeForeignItem(foreign_item)) => foreign_item.span,
Some(NodeTraitItem(trait_method)) => trait_method.span,
Some(NodeImplItem(ref impl_item)) => impl_item.span,
Some(NodeVariant(variant)) => variant.span,
Some(NodeExpr(expr)) => expr.span,
Some(NodeStmt(stmt)) => stmt.span,
Some(NodeTy(ty)) => ty.span,
Some(NodeLocal(pat)) => pat.span,
Some(NodePat(pat)) => pat.span,
Some(NodeBlock(block)) => block.span,
Some(NodeStructCtor(_)) => self.expect_item(self.get_parent(id)).span,
Some(NodeTyParam(ty_param)) => ty_param.span,
_ => return None,
};
Some(sp)
}
pub fn span(&self, id: NodeId) -> Span {
self.read(id); // reveals span from node
self.opt_span(id)
.unwrap_or_else(|| bug!("AstMap.span: could not find span for id {:?}", id))
match self.find_entry(id) {
Some(EntryItem(_, item)) => item.span,
Some(EntryForeignItem(_, foreign_item)) => foreign_item.span,
Some(EntryTraitItem(_, trait_method)) => trait_method.span,
Some(EntryImplItem(_, impl_item)) => impl_item.span,
Some(EntryVariant(_, variant)) => variant.span,
Some(EntryField(_, field)) => field.span,
Some(EntryExpr(_, expr)) => expr.span,
Some(EntryStmt(_, stmt)) => stmt.span,
Some(EntryTy(_, ty)) => ty.span,
Some(EntryTraitRef(_, tr)) => tr.path.span,
Some(EntryLocal(_, pat)) => pat.span,
Some(EntryPat(_, pat)) => pat.span,
Some(EntryBlock(_, block)) => block.span,
Some(EntryStructCtor(_, _)) => self.expect_item(self.get_parent(id)).span,
Some(EntryLifetime(_, lifetime)) => lifetime.span,
Some(EntryTyParam(_, ty_param)) => ty_param.span,
Some(EntryVisibility(_, &Visibility::Restricted { ref path, .. })) => path.span,
Some(EntryVisibility(_, v)) => bug!("unexpected Visibility {:?}", v),
Some(RootCrate) => self.forest.krate.span,
Some(RootInlinedParent(parent)) => parent.body.span,
Some(NotPresent) | None => {
bug!("hir::map::Map::span: id not in map: {:?}", id)
}
}
}
pub fn span_if_local(&self, id: DefId) -> Option<Span> {
self.as_local_node_id(id).map(|id| self.span(id))
}
pub fn def_id_span(&self, def_id: DefId, fallback: Span) -> Span {
if let Some(node_id) = self.as_local_node_id(def_id) {
self.opt_span(node_id).unwrap_or(fallback)
} else {
fallback
}
}
pub fn node_to_string(&self, id: NodeId) -> String {
node_id_to_string(self, id, true)
}
@ -823,6 +883,7 @@ impl<'a, 'ast> Iterator for NodesMatchingSuffix<'a, 'ast> {
Some(EntryTraitItem(_, n)) => n.name(),
Some(EntryImplItem(_, n)) => n.name(),
Some(EntryVariant(_, n)) => n.name(),
Some(EntryField(_, n)) => n.name(),
_ => continue,
};
if self.matches_names(self.map.get_parent(idx), name) {
@ -841,6 +902,7 @@ impl<T:Named> Named for Spanned<T> { fn name(&self) -> Name { self.node.name() }
impl Named for Item { fn name(&self) -> Name { self.name } }
impl Named for ForeignItem { fn name(&self) -> Name { self.name } }
impl Named for Variant_ { fn name(&self) -> Name { self.name } }
impl Named for StructField { fn name(&self) -> Name { self.name } }
impl Named for TraitItem { fn name(&self) -> Name { self.name } }
impl Named for ImplItem { fn name(&self) -> Name { self.name } }
@ -926,15 +988,20 @@ impl<'a> NodePrinter for pprust::State<'a> {
NodeExpr(a) => self.print_expr(&a),
NodeStmt(a) => self.print_stmt(&a),
NodeTy(a) => self.print_type(&a),
NodeTraitRef(a) => self.print_trait_ref(&a),
NodePat(a) => self.print_pat(&a),
NodeBlock(a) => self.print_block(&a),
NodeLifetime(a) => self.print_lifetime(&a),
NodeVisibility(a) => self.print_visibility(&a),
NodeTyParam(_) => bug!("cannot print TyParam"),
NodeField(_) => bug!("cannot print StructField"),
// these cases do not carry enough information in the
// ast_map to reconstruct their full structure for pretty
// printing.
NodeLocal(_) => bug!("cannot print isolated Local"),
NodeStructCtor(_) => bug!("cannot print isolated StructCtor"),
NodeInlinedItem(_) => bug!("cannot print inlined item"),
}
}
}
@ -1009,6 +1076,11 @@ fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String {
variant.node.name,
path_str(), id_str)
}
Some(NodeField(ref field)) => {
format!("field {} in {}{}",
field.name,
path_str(), id_str)
}
Some(NodeExpr(ref expr)) => {
format!("expr {}{}", pprust::expr_to_string(&expr), id_str)
}
@ -1018,6 +1090,9 @@ fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String {
Some(NodeTy(ref ty)) => {
format!("type {}{}", pprust::ty_to_string(&ty), id_str)
}
Some(NodeTraitRef(ref tr)) => {
format!("trait_ref {}{}", pprust::path_to_string(&tr.path), id_str)
}
Some(NodeLocal(ref pat)) => {
format!("local {}{}", pprust::pat_to_string(&pat), id_str)
}
@ -1037,6 +1112,12 @@ fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String {
Some(NodeTyParam(ref ty_param)) => {
format!("typaram {:?}{}", ty_param, id_str)
}
Some(NodeVisibility(ref vis)) => {
format!("visibility {:?}{}", vis, id_str)
}
Some(NodeInlinedItem(_)) => {
format!("inlined item {}", id_str)
}
None => {
format!("unknown node{}", id_str)
}

View File

@ -27,13 +27,13 @@ pub use self::Ty_::*;
pub use self::TyParamBound::*;
pub use self::UnOp::*;
pub use self::UnsafeSource::*;
pub use self::ViewPath_::*;
pub use self::Visibility::{Public, Inherited};
pub use self::PathParameters::*;
use hir::def::Def;
use hir::def_id::DefId;
use util::nodemap::{NodeMap, FxHashSet};
use rustc_data_structures::fnv::FnvHashMap;
use syntax_pos::{mk_sp, Span, ExpnId, DUMMY_SP};
use syntax::codemap::{self, respan, Spanned};
@ -108,6 +108,8 @@ pub struct Path {
/// A `::foo` path, is relative to the crate root rather than current
/// module (like paths in an import).
pub global: bool,
/// The definition that the path resolved to.
pub def: Def,
/// The segments in the path: the things separated by `::`.
pub segments: HirVec<PathSegment>,
}
@ -124,21 +126,6 @@ impl fmt::Display for Path {
}
}
impl Path {
/// Convert a span and an identifier to the corresponding
/// 1-segment path.
pub fn from_name(s: Span, name: Name) -> Path {
Path {
span: s,
global: false,
segments: hir_vec![PathSegment {
name: name,
parameters: PathParameters::none()
}],
}
}
}
/// A segment of a path: an identifier, an optional lifetime, and a set of
/// types.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
@ -154,6 +141,16 @@ pub struct PathSegment {
pub parameters: PathParameters,
}
impl PathSegment {
/// Convert an identifier to the corresponding segment.
pub fn from_name(name: Name) -> PathSegment {
PathSegment {
name: name,
parameters: PathParameters::none()
}
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum PathParameters {
/// The `<'a, A,B,C>` in `foo::bar::baz::<'a, A,B,C>`
@ -167,6 +164,7 @@ impl PathParameters {
AngleBracketedParameters(AngleBracketedParameterData {
lifetimes: HirVec::new(),
types: HirVec::new(),
infer_types: true,
bindings: HirVec::new(),
})
}
@ -241,6 +239,11 @@ pub struct AngleBracketedParameterData {
pub lifetimes: HirVec<Lifetime>,
/// The type parameters for this path segment, if present.
pub types: HirVec<P<Ty>>,
/// Whether to infer remaining type parameters, if any.
/// This only applies to expression and pattern paths, and
/// out of those only the segments with no type parameters
/// to begin with, e.g. `Vec::new` is `<Vec<..>>::new::<..>`.
pub infer_types: bool,
/// Bindings (equality constraints) on associated types, if present.
/// E.g., `Foo<A=Bar>`.
pub bindings: HirVec<TypeBinding>,
@ -426,6 +429,7 @@ pub struct Crate {
pub items: BTreeMap<NodeId, Item>,
pub impl_items: BTreeMap<ImplItemId, ImplItem>,
pub exprs: FnvHashMap<ExprId, Expr>,
}
impl Crate {
@ -456,6 +460,10 @@ impl Crate {
visitor.visit_impl_item(impl_item);
}
}
pub fn expr(&self, id: ExprId) -> &Expr {
&self.exprs[&id]
}
}
/// A macro definition, in this crate or imported from another.
@ -527,7 +535,7 @@ impl Pat {
PatKind::Lit(_) |
PatKind::Range(..) |
PatKind::Binding(..) |
PatKind::Path(..) => {
PatKind::Path(_) => {
true
}
}
@ -566,20 +574,20 @@ pub enum PatKind {
Wild,
/// A fresh binding `ref mut binding @ OPT_SUBPATTERN`.
Binding(BindingMode, Spanned<Name>, Option<P<Pat>>),
/// The `DefId` is for the definition of the variable being bound.
Binding(BindingMode, DefId, Spanned<Name>, Option<P<Pat>>),
/// A struct or struct variant pattern, e.g. `Variant {x, y, ..}`.
/// The `bool` is `true` in the presence of a `..`.
Struct(Path, HirVec<Spanned<FieldPat>>, bool),
Struct(QPath, HirVec<Spanned<FieldPat>>, bool),
/// A tuple struct/variant pattern `Variant(x, y, .., z)`.
/// If the `..` pattern fragment is present, then `Option<usize>` denotes its position.
/// 0 <= position <= subpats.len()
TupleStruct(Path, HirVec<P<Pat>>, Option<usize>),
TupleStruct(QPath, HirVec<P<Pat>>, Option<usize>),
/// A possibly qualified path pattern.
/// Such pattern can be resolved to a unit struct/variant or a constant.
Path(Option<QSelf>, Path),
/// A path pattern for an unit struct/variant or a (maybe-associated) constant.
Path(QPath),
/// A tuple pattern `(a, b)`.
/// If the `..` pattern fragment is present, then `Option<usize>` denotes its position.
@ -836,9 +844,6 @@ pub enum BlockCheckMode {
UnsafeBlock(UnsafeSource),
PushUnsafeBlock(UnsafeSource),
PopUnsafeBlock(UnsafeSource),
// Within this block (but outside a PopUnstableBlock), we suspend checking of stability.
PushUnstableBlock,
PopUnstableBlock,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
@ -847,6 +852,15 @@ pub enum UnsafeSource {
UserProvided,
}
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ExprId(NodeId);
impl ExprId {
pub fn node_id(self) -> NodeId {
self.0
}
}
/// An expression
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub struct Expr {
@ -856,6 +870,12 @@ pub struct Expr {
pub attrs: ThinVec<Attribute>,
}
impl Expr {
pub fn expr_id(&self) -> ExprId {
ExprId(self.id)
}
}
impl fmt::Debug for Expr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "expr({}: {})", self.id, print::expr_to_string(self))
@ -915,7 +935,7 @@ pub enum Expr_ {
/// A closure (for example, `move |a, b, c| {a + b + c}`).
///
/// The final span is the span of the argument block `|...|`
ExprClosure(CaptureClause, P<FnDecl>, P<Expr>, Span),
ExprClosure(CaptureClause, P<FnDecl>, ExprId, Span),
/// A block (`{ ... }`)
ExprBlock(P<Block>),
@ -934,19 +954,15 @@ pub enum Expr_ {
/// An indexing operation (`foo[2]`)
ExprIndex(P<Expr>, P<Expr>),
/// Variable reference, possibly containing `::` and/or type
/// parameters, e.g. foo::bar::<baz>.
///
/// Optionally "qualified",
/// e.g. `<HirVec<T> as SomeTrait>::SomeType`.
ExprPath(Option<QSelf>, Path),
/// Path to a definition, possibly containing lifetime or type parameters.
ExprPath(QPath),
/// A referencing operation (`&a` or `&mut a`)
ExprAddrOf(Mutability, P<Expr>),
/// A `break`, with an optional label to break
ExprBreak(Option<Spanned<Name>>, Option<P<Expr>>),
ExprBreak(Option<Label>, Option<P<Expr>>),
/// A `continue`, with an optional label
ExprAgain(Option<Spanned<Name>>),
ExprAgain(Option<Label>),
/// A `return`, with an optional value to be returned
ExprRet(Option<P<Expr>>),
@ -957,7 +973,7 @@ pub enum Expr_ {
///
/// For example, `Foo {x: 1, y: 2}`, or
/// `Foo {x: 1, .. base}`, where `base` is the `Option<Expr>`.
ExprStruct(P<Path>, HirVec<Field>, Option<P<Expr>>),
ExprStruct(QPath, HirVec<Field>, Option<P<Expr>>),
/// An array literal constructed from one repeated element.
///
@ -966,22 +982,30 @@ pub enum Expr_ {
ExprRepeat(P<Expr>, P<Expr>),
}
/// The explicit Self type in a "qualified path". The actual
/// path, including the trait and the associated item, is stored
/// separately. `position` represents the index of the associated
/// item qualified with this Self type.
///
/// <HirVec<T> as a::b::Trait>::AssociatedItem
/// ^~~~~ ~~~~~~~~~~~~~~^
/// ty position = 3
///
/// <HirVec<T>>::AssociatedItem
/// ^~~~~ ^
/// ty position = 0
/// Optionally `Self`-qualified value/type path or associated extension.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct QSelf {
pub ty: P<Ty>,
pub position: usize,
pub enum QPath {
/// Path to a definition, optionally "fully-qualified" with a `Self`
/// type, if the path points to an associated item in a trait.
///
/// E.g. an unqualified path like `Clone::clone` has `None` for `Self`,
/// while `<Vec<T> as Clone>::clone` has `Some(Vec<T>)` for `Self`,
/// even though they both have the same two-segment `Clone::clone` `Path`.
Resolved(Option<P<Ty>>, P<Path>),
/// Type-related paths, e.g. `<T>::default` or `<T>::Output`.
/// Will be resolved by type-checking to an associated item.
///
/// UFCS source paths can desugar into this, with `Vec::new` turning into
/// `<Vec>::new`, and `T::X::Y::method` into `<<<T>::X>::Y>::method`,
/// the `X` and `Y` nodes being each a `TyPath(QPath::TypeRelative(..))`.
TypeRelative(P<Ty>, P<PathSegment>)
}
impl fmt::Display for QPath {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", print::qpath_to_string(self))
}
}
/// Hints at the original code for a `match _ { .. }`
@ -1014,6 +1038,13 @@ pub enum LoopSource {
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub struct Label {
pub span: Span,
pub name: Name,
pub loop_id: NodeId
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum CaptureClause {
CaptureByValue,
@ -1058,7 +1089,7 @@ pub enum TraitItem_ {
/// must contain a value)
ConstTraitItem(P<Ty>, Option<P<Expr>>),
/// A method with an optional body
MethodTraitItem(MethodSig, Option<P<Expr>>),
MethodTraitItem(MethodSig, Option<ExprId>),
/// An associated type with (possibly empty) bounds and optional concrete
/// type
TypeTraitItem(TyParamBounds, Option<P<Ty>>),
@ -1091,7 +1122,7 @@ pub enum ImplItemKind {
/// of the expression
Const(P<Ty>, P<Expr>),
/// A method implementation with the given signature and body
Method(MethodSig, P<Expr>),
Method(MethodSig, ExprId),
/// An associated type
Type(P<Ty>),
}
@ -1155,11 +1186,12 @@ pub enum Ty_ {
TyNever,
/// A tuple (`(A, B, C, D,...)`)
TyTup(HirVec<P<Ty>>),
/// A path (`module::module::...::Type`), optionally
/// "qualified", e.g. `<HirVec<T> as SomeTrait>::SomeType`.
/// A path to a type definition (`module::module::...::Type`), or an
/// associated type, e.g. `<Vec<T> as Trait>::Type` or `<T>::Target`.
///
/// Type parameters are stored in the Path itself
TyPath(Option<QSelf>, Path),
/// Type parameters may be stored in each `PathSegment`.
TyPath(QPath),
/// Something like `A+B`. Note that `B` must always be a path.
TyObjectSum(P<Ty>, TyParamBounds),
/// A type like `for<'a> Foo<&'a Bar>`
@ -1216,7 +1248,7 @@ pub type ExplicitSelf = Spanned<SelfKind>;
impl Arg {
pub fn to_self(&self) -> Option<ExplicitSelf> {
if let PatKind::Binding(BindByValue(mutbl), name, _) = self.pat.node {
if let PatKind::Binding(BindByValue(mutbl), _, name, _) = self.pat.node {
if name.node == keywords::SelfValue.name() {
return match self.ty.node {
TyInfer => Some(respan(self.pat.span, SelfKind::Value(mutbl))),
@ -1232,7 +1264,7 @@ impl Arg {
}
pub fn is_self(&self) -> bool {
if let PatKind::Binding(_, name, _) = self.pat.node {
if let PatKind::Binding(_, _, name, _) = self.pat.node {
name.node == keywords::SelfValue.name()
} else {
false
@ -1375,32 +1407,20 @@ pub struct Variant_ {
pub type Variant = Spanned<Variant_>;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub struct PathListItem_ {
pub name: Name,
/// renamed in list, eg `use foo::{bar as baz};`
pub rename: Option<Name>,
pub id: NodeId,
}
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum UseKind {
/// One import, e.g. `use foo::bar` or `use foo::bar as baz`.
/// Also produced for each element of a list `use`, e.g.
// `use foo::{a, b}` lowers to `use foo::a; use foo::b;`.
Single,
pub type PathListItem = Spanned<PathListItem_>;
/// Glob import, e.g. `use foo::*`.
Glob,
pub type ViewPath = Spanned<ViewPath_>;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum ViewPath_ {
/// `foo::bar::baz as quux`
///
/// or just
///
/// `foo::bar::baz` (with `as baz` implicitly on the right)
ViewPathSimple(Name, Path),
/// `foo::bar::*`
ViewPathGlob(Path),
/// `foo::bar::{a,b,c}`
ViewPathList(Path, HirVec<PathListItem>),
/// Degenerate list import, e.g. `use foo::{a, b}` produces
/// an additional `use foo::{}` for performing checks such as
/// unstable feature gating. May be removed in the future.
ListStem,
}
/// TraitRef's appear in impls.
@ -1534,15 +1554,20 @@ pub enum Item_ {
///
/// e.g. `extern crate foo` or `extern crate foo_bar as foo`
ItemExternCrate(Option<Name>),
/// A `use` or `pub use` item
ItemUse(P<ViewPath>),
/// `use foo::bar::*;` or `use foo::bar::baz as quux;`
///
/// or just
///
/// `use foo::bar::baz;` (with `as baz` implicitly on the right)
ItemUse(P<Path>, UseKind),
/// A `static` item
ItemStatic(P<Ty>, Mutability, P<Expr>),
/// A `const` item
ItemConst(P<Ty>, P<Expr>),
/// A function declaration
ItemFn(P<FnDecl>, Unsafety, Constness, Abi, Generics, P<Expr>),
ItemFn(P<FnDecl>, Unsafety, Constness, Abi, Generics, ExprId),
/// A module
ItemMod(Mod),
/// An external module

View File

@ -8,13 +8,12 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use hir::def::*;
use hir::def::Def;
use hir::def_id::DefId;
use hir::{self, PatKind};
use ty::TyCtxt;
use syntax::ast;
use syntax::codemap::Spanned;
use syntax_pos::{Span, DUMMY_SP};
use syntax_pos::Span;
use std::iter::{Enumerate, ExactSizeIterator};
@ -51,139 +50,144 @@ impl<T: ExactSizeIterator> EnumerateAndAdjustIterator for T {
}
}
pub fn pat_is_refutable(dm: &DefMap, pat: &hir::Pat) -> bool {
match pat.node {
PatKind::Lit(_) | PatKind::Range(..) | PatKind::Path(Some(..), _) => true,
PatKind::TupleStruct(..) |
PatKind::Path(..) |
PatKind::Struct(..) => {
match dm.get(&pat.id).map(|d| d.full_def()) {
Some(Def::Variant(..)) | Some(Def::VariantCtor(..)) => true,
_ => false
}
}
PatKind::Slice(..) => true,
_ => false
}
}
impl hir::Pat {
pub fn is_refutable(&self) -> bool {
match self.node {
PatKind::Lit(_) |
PatKind::Range(..) |
PatKind::Path(hir::QPath::Resolved(Some(..), _)) |
PatKind::Path(hir::QPath::TypeRelative(..)) => true,
pub fn pat_is_const(dm: &DefMap, pat: &hir::Pat) -> bool {
match pat.node {
PatKind::Path(..) => {
match dm.get(&pat.id).map(|d| d.full_def()) {
Some(Def::Const(..)) | Some(Def::AssociatedConst(..)) => true,
_ => false
}
}
_ => false
}
}
/// Call `f` on every "binding" in a pattern, e.g., on `a` in
/// `match foo() { Some(a) => (), None => () }`
pub fn pat_bindings<F>(pat: &hir::Pat, mut f: F)
where F: FnMut(hir::BindingMode, ast::NodeId, Span, &Spanned<ast::Name>),
{
pat.walk(|p| {
if let PatKind::Binding(binding_mode, ref pth, _) = p.node {
f(binding_mode, p.id, p.span, pth);
}
true
});
}
/// Checks if the pattern contains any patterns that bind something to
/// an ident, e.g. `foo`, or `Foo(foo)` or `foo @ Bar(..)`.
pub fn pat_contains_bindings(pat: &hir::Pat) -> bool {
let mut contains_bindings = false;
pat.walk(|p| {
if let PatKind::Binding(..) = p.node {
contains_bindings = true;
false // there's at least one binding, can short circuit now.
} else {
true
}
});
contains_bindings
}
/// Checks if the pattern contains any `ref` or `ref mut` bindings,
/// and if yes whether its containing mutable ones or just immutables ones.
pub fn pat_contains_ref_binding(pat: &hir::Pat) -> Option<hir::Mutability> {
let mut result = None;
pat_bindings(pat, |mode, _, _, _| {
if let hir::BindingMode::BindByRef(m) = mode {
// Pick Mutable as maximum
match result {
None | Some(hir::MutImmutable) => result = Some(m),
_ => (),
}
}
});
result
}
/// Checks if the patterns for this arm contain any `ref` or `ref mut`
/// bindings, and if yes whether its containing mutable ones or just immutables ones.
pub fn arm_contains_ref_binding(arm: &hir::Arm) -> Option<hir::Mutability> {
arm.pats.iter()
.filter_map(|pat| pat_contains_ref_binding(pat))
.max_by_key(|m| match *m {
hir::MutMutable => 1,
hir::MutImmutable => 0,
})
}
/// Checks if the pattern contains any patterns that bind something to
/// an ident or wildcard, e.g. `foo`, or `Foo(_)`, `foo @ Bar(..)`,
pub fn pat_contains_bindings_or_wild(pat: &hir::Pat) -> bool {
let mut contains_bindings = false;
pat.walk(|p| {
match p.node {
PatKind::Binding(..) | PatKind::Wild => {
contains_bindings = true;
false // there's at least one binding/wildcard, can short circuit now.
}
_ => true
}
});
contains_bindings
}
pub fn simple_name<'a>(pat: &'a hir::Pat) -> Option<ast::Name> {
match pat.node {
PatKind::Binding(hir::BindByValue(..), ref path1, None) => {
Some(path1.node)
}
_ => {
None
}
}
}
pub fn def_to_path<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefId) -> hir::Path {
hir::Path::from_name(DUMMY_SP, tcx.item_name(id))
}
/// Return variants that are necessary to exist for the pattern to match.
pub fn necessary_variants(dm: &DefMap, pat: &hir::Pat) -> Vec<DefId> {
let mut variants = vec![];
pat.walk(|p| {
match p.node {
PatKind::TupleStruct(..) |
PatKind::Path(..) |
PatKind::Struct(..) => {
match dm.get(&p.id).map(|d| d.full_def()) {
Some(Def::Variant(id)) |
Some(Def::VariantCtor(id, ..)) => variants.push(id),
_ => ()
PatKind::Path(hir::QPath::Resolved(_, ref path)) |
PatKind::TupleStruct(hir::QPath::Resolved(_, ref path), ..) |
PatKind::Struct(hir::QPath::Resolved(_, ref path), ..) => {
match path.def {
Def::Variant(..) | Def::VariantCtor(..) => true,
_ => false
}
}
_ => ()
PatKind::Slice(..) => true,
_ => false
}
true
});
variants.sort();
variants.dedup();
variants
}
pub fn is_const(&self) -> bool {
match self.node {
PatKind::Path(hir::QPath::TypeRelative(..)) => true,
PatKind::Path(hir::QPath::Resolved(_, ref path)) => {
match path.def {
Def::Const(..) | Def::AssociatedConst(..) => true,
_ => false
}
}
_ => false
}
}
/// Call `f` on every "binding" in a pattern, e.g., on `a` in
/// `match foo() { Some(a) => (), None => () }`
pub fn each_binding<F>(&self, mut f: F)
where F: FnMut(hir::BindingMode, ast::NodeId, Span, &Spanned<ast::Name>),
{
self.walk(|p| {
if let PatKind::Binding(binding_mode, _, ref pth, _) = p.node {
f(binding_mode, p.id, p.span, pth);
}
true
});
}
/// Checks if the pattern contains any patterns that bind something to
/// an ident, e.g. `foo`, or `Foo(foo)` or `foo @ Bar(..)`.
pub fn contains_bindings(&self) -> bool {
let mut contains_bindings = false;
self.walk(|p| {
if let PatKind::Binding(..) = p.node {
contains_bindings = true;
false // there's at least one binding, can short circuit now.
} else {
true
}
});
contains_bindings
}
/// Checks if the pattern contains any patterns that bind something to
/// an ident or wildcard, e.g. `foo`, or `Foo(_)`, `foo @ Bar(..)`,
pub fn contains_bindings_or_wild(&self) -> bool {
let mut contains_bindings = false;
self.walk(|p| {
match p.node {
PatKind::Binding(..) | PatKind::Wild => {
contains_bindings = true;
false // there's at least one binding/wildcard, can short circuit now.
}
_ => true
}
});
contains_bindings
}
pub fn simple_name(&self) -> Option<ast::Name> {
match self.node {
PatKind::Binding(hir::BindByValue(..), _, ref path1, None) => {
Some(path1.node)
}
_ => {
None
}
}
}
/// Return variants that are necessary to exist for the pattern to match.
pub fn necessary_variants(&self) -> Vec<DefId> {
let mut variants = vec![];
self.walk(|p| {
match p.node {
PatKind::Path(hir::QPath::Resolved(_, ref path)) |
PatKind::TupleStruct(hir::QPath::Resolved(_, ref path), ..) |
PatKind::Struct(hir::QPath::Resolved(_, ref path), ..) => {
match path.def {
Def::Variant(id) |
Def::VariantCtor(id, ..) => variants.push(id),
_ => ()
}
}
_ => ()
}
true
});
variants.sort();
variants.dedup();
variants
}
/// Checks if the pattern contains any `ref` or `ref mut` bindings,
/// and if yes whether its containing mutable ones or just immutables ones.
pub fn contains_ref_binding(&self) -> Option<hir::Mutability> {
let mut result = None;
self.each_binding(|mode, _, _, _| {
if let hir::BindingMode::BindByRef(m) = mode {
// Pick Mutable as maximum
match result {
None | Some(hir::MutImmutable) => result = Some(m),
_ => (),
}
}
});
result
}
}
impl hir::Arm {
/// Checks if the patterns for this arm contain any `ref` or `ref mut`
/// bindings, and if yes whether its containing mutable ones or just immutables ones.
pub fn contains_ref_binding(&self) -> Option<hir::Mutability> {
self.pats.iter()
.filter_map(|pat| pat.contains_ref_binding())
.max_by_key(|m| match *m {
hir::MutMutable => 1,
hir::MutImmutable => 0,
})
}
}

View File

@ -272,7 +272,11 @@ pub fn fn_block_to_string(p: &hir::FnDecl) -> String {
}
pub fn path_to_string(p: &hir::Path) -> String {
to_string(|s| s.print_path(p, false, 0))
to_string(|s| s.print_path(p, false))
}
pub fn qpath_to_string(p: &hir::QPath) -> String {
to_string(|s| s.print_qpath(p, false))
}
pub fn name_to_string(name: ast::Name) -> String {
@ -528,11 +532,8 @@ impl<'a> State<'a> {
};
self.print_ty_fn(f.abi, f.unsafety, &f.decl, None, &generics)?;
}
hir::TyPath(None, ref path) => {
self.print_path(path, false, 0)?;
}
hir::TyPath(Some(ref qself), ref path) => {
self.print_qpath(path, qself, false)?
hir::TyPath(ref qpath) => {
self.print_qpath(qpath, false)?
}
hir::TyObjectSum(ref ty, ref bounds) => {
self.print_type(&ty)?;
@ -643,6 +644,15 @@ impl<'a> State<'a> {
}
}
pub fn print_expr_id(&mut self, expr_id: &hir::ExprId) -> io::Result<()> {
if let Some(krate) = self.krate {
let expr = &krate.exprs[expr_id];
self.print_expr(expr)
} else {
Ok(())
}
}
/// Pretty-print an item
pub fn print_item(&mut self, item: &hir::Item) -> io::Result<()> {
self.hardbreak_if_not_bol()?;
@ -668,10 +678,22 @@ impl<'a> State<'a> {
self.end()?; // end inner head-block
self.end()?; // end outer head-block
}
hir::ItemUse(ref vp) => {
hir::ItemUse(ref path, kind) => {
self.head(&visibility_qualified(&item.vis, "use"))?;
self.print_view_path(&vp)?;
word(&mut self.s, ";")?;
self.print_path(path, false)?;
match kind {
hir::UseKind::Single => {
if path.segments.last().unwrap().name != item.name {
space(&mut self.s)?;
self.word_space("as")?;
self.print_name(item.name)?;
}
word(&mut self.s, ";")?;
}
hir::UseKind::Glob => word(&mut self.s, "::*;")?,
hir::UseKind::ListStem => word(&mut self.s, "::{};")?
}
self.end()?; // end inner head-block
self.end()?; // end outer head-block
}
@ -716,7 +738,7 @@ impl<'a> State<'a> {
word(&mut self.s, " ")?;
self.end()?; // need to close a box
self.end()?; // need to close a box
self.print_expr(&body)?;
self.print_expr_id(body)?;
}
hir::ItemMod(ref _mod) => {
self.head(&visibility_qualified(&item.vis, "mod"))?;
@ -844,8 +866,8 @@ impl<'a> State<'a> {
self.ann.post(self, NodeItem(item))
}
fn print_trait_ref(&mut self, t: &hir::TraitRef) -> io::Result<()> {
self.print_path(&t.path, false, 0)
pub fn print_trait_ref(&mut self, t: &hir::TraitRef) -> io::Result<()> {
self.print_path(&t.path, false)
}
fn print_formal_lifetime_list(&mut self, lifetimes: &[hir::LifetimeDef]) -> io::Result<()> {
@ -1007,7 +1029,7 @@ impl<'a> State<'a> {
self.nbsp()?;
self.end()?; // need to close a box
self.end()?; // need to close a box
self.print_expr(body)?;
self.print_expr_id(body)?;
} else {
word(&mut self.s, ";")?;
}
@ -1052,7 +1074,7 @@ impl<'a> State<'a> {
self.nbsp()?;
self.end()?; // need to close a box
self.end()?; // need to close a box
self.print_expr(body)?;
self.print_expr_id(body)?;
}
hir::ImplItemKind::Type(ref ty) => {
self.print_associated_type(ii.name, None, Some(ty))?;
@ -1115,8 +1137,6 @@ impl<'a> State<'a> {
hir::UnsafeBlock(..) => self.word_space("unsafe")?,
hir::PushUnsafeBlock(..) => self.word_space("push_unsafe")?,
hir::PopUnsafeBlock(..) => self.word_space("pop_unsafe")?,
hir::PushUnstableBlock => self.word_space("push_unstable")?,
hir::PopUnstableBlock => self.word_space("pop_unstable")?,
hir::DefaultBlock => (),
}
self.maybe_print_comment(blk.span.lo)?;
@ -1237,11 +1257,11 @@ impl<'a> State<'a> {
}
fn print_expr_struct(&mut self,
path: &hir::Path,
qpath: &hir::QPath,
fields: &[hir::Field],
wth: &Option<P<hir::Expr>>)
-> io::Result<()> {
self.print_path(path, true, 0)?;
self.print_qpath(qpath, true)?;
word(&mut self.s, "{")?;
self.commasep_cmnt(Consistent,
&fields[..],
@ -1345,8 +1365,8 @@ impl<'a> State<'a> {
hir::ExprRepeat(ref element, ref count) => {
self.print_expr_repeat(&element, &count)?;
}
hir::ExprStruct(ref path, ref fields, ref wth) => {
self.print_expr_struct(path, &fields[..], wth)?;
hir::ExprStruct(ref qpath, ref fields, ref wth) => {
self.print_expr_struct(qpath, &fields[..], wth)?;
}
hir::ExprTup(ref exprs) => {
self.print_expr_tup(exprs)?;
@ -1421,7 +1441,7 @@ impl<'a> State<'a> {
space(&mut self.s)?;
// this is a bare expression
self.print_expr(body)?;
self.print_expr_id(body)?;
self.end()?; // need to close a box
// a box will be closed by print_expr, but we didn't want an overall
@ -1465,17 +1485,14 @@ impl<'a> State<'a> {
self.print_expr(&index)?;
word(&mut self.s, "]")?;
}
hir::ExprPath(None, ref path) => {
self.print_path(path, true, 0)?
hir::ExprPath(ref qpath) => {
self.print_qpath(qpath, true)?
}
hir::ExprPath(Some(ref qself), ref path) => {
self.print_qpath(path, qself, true)?
}
hir::ExprBreak(opt_name, ref opt_expr) => {
hir::ExprBreak(opt_label, ref opt_expr) => {
word(&mut self.s, "break")?;
space(&mut self.s)?;
if let Some(name) = opt_name {
self.print_name(name.node)?;
if let Some(label) = opt_label {
self.print_name(label.name)?;
space(&mut self.s)?;
}
if let Some(ref expr) = *opt_expr {
@ -1483,11 +1500,11 @@ impl<'a> State<'a> {
space(&mut self.s)?;
}
}
hir::ExprAgain(opt_name) => {
hir::ExprAgain(opt_label) => {
word(&mut self.s, "continue")?;
space(&mut self.s)?;
if let Some(name) = opt_name {
self.print_name(name.node)?;
if let Some(label) = opt_label {
self.print_name(label.name)?;
space(&mut self.s)?
}
}
@ -1622,13 +1639,12 @@ impl<'a> State<'a> {
fn print_path(&mut self,
path: &hir::Path,
colons_before_params: bool,
depth: usize)
colons_before_params: bool)
-> io::Result<()> {
self.maybe_print_comment(path.span.lo)?;
let mut first = !path.global;
for segment in &path.segments[..path.segments.len() - depth] {
for segment in &path.segments {
if first {
first = false
} else {
@ -1644,23 +1660,45 @@ impl<'a> State<'a> {
}
fn print_qpath(&mut self,
path: &hir::Path,
qself: &hir::QSelf,
qpath: &hir::QPath,
colons_before_params: bool)
-> io::Result<()> {
word(&mut self.s, "<")?;
self.print_type(&qself.ty)?;
if qself.position > 0 {
space(&mut self.s)?;
self.word_space("as")?;
let depth = path.segments.len() - qself.position;
self.print_path(&path, false, depth)?;
match *qpath {
hir::QPath::Resolved(None, ref path) => {
self.print_path(path, colons_before_params)
}
hir::QPath::Resolved(Some(ref qself), ref path) => {
word(&mut self.s, "<")?;
self.print_type(qself)?;
space(&mut self.s)?;
self.word_space("as")?;
let mut first = !path.global;
for segment in &path.segments[..path.segments.len() - 1] {
if first {
first = false
} else {
word(&mut self.s, "::")?
}
self.print_name(segment.name)?;
self.print_path_parameters(&segment.parameters, colons_before_params)?;
}
word(&mut self.s, ">")?;
word(&mut self.s, "::")?;
let item_segment = path.segments.last().unwrap();
self.print_name(item_segment.name)?;
self.print_path_parameters(&item_segment.parameters, colons_before_params)
}
hir::QPath::TypeRelative(ref qself, ref item_segment) => {
word(&mut self.s, "<")?;
self.print_type(qself)?;
word(&mut self.s, ">")?;
word(&mut self.s, "::")?;
self.print_name(item_segment.name)?;
self.print_path_parameters(&item_segment.parameters, colons_before_params)
}
}
word(&mut self.s, ">")?;
word(&mut self.s, "::")?;
let item_segment = path.segments.last().unwrap();
self.print_name(item_segment.name)?;
self.print_path_parameters(&item_segment.parameters, colons_before_params)
}
fn print_path_parameters(&mut self,
@ -1668,7 +1706,15 @@ impl<'a> State<'a> {
colons_before_params: bool)
-> io::Result<()> {
if parameters.is_empty() {
return Ok(());
let infer_types = match *parameters {
hir::AngleBracketedParameters(ref data) => data.infer_types,
hir::ParenthesizedParameters(_) => false
};
// FIXME(eddyb) See the comment below about infer_types.
if !(infer_types && false) {
return Ok(());
}
}
if colons_before_params {
@ -1696,6 +1742,16 @@ impl<'a> State<'a> {
comma = true;
}
// FIXME(eddyb) This would leak into error messages, e.g.:
// "non-exhaustive patterns: `Some::<..>(_)` not covered".
if data.infer_types && false {
if comma {
self.word_space(",")?
}
word(&mut self.s, "..")?;
comma = true;
}
for binding in data.bindings.iter() {
if comma {
self.word_space(",")?
@ -1733,7 +1789,7 @@ impl<'a> State<'a> {
// is that it doesn't matter
match pat.node {
PatKind::Wild => word(&mut self.s, "_")?,
PatKind::Binding(binding_mode, ref path1, ref sub) => {
PatKind::Binding(binding_mode, _, ref path1, ref sub) => {
match binding_mode {
hir::BindByRef(mutbl) => {
self.word_nbsp("ref")?;
@ -1750,8 +1806,8 @@ impl<'a> State<'a> {
self.print_pat(&p)?;
}
}
PatKind::TupleStruct(ref path, ref elts, ddpos) => {
self.print_path(path, true, 0)?;
PatKind::TupleStruct(ref qpath, ref elts, ddpos) => {
self.print_qpath(qpath, true)?;
self.popen()?;
if let Some(ddpos) = ddpos {
self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p))?;
@ -1768,14 +1824,11 @@ impl<'a> State<'a> {
}
self.pclose()?;
}
PatKind::Path(None, ref path) => {
self.print_path(path, true, 0)?;
PatKind::Path(ref qpath) => {
self.print_qpath(qpath, true)?;
}
PatKind::Path(Some(ref qself), ref path) => {
self.print_qpath(path, qself, false)?;
}
PatKind::Struct(ref path, ref fields, etc) => {
self.print_path(path, true, 0)?;
PatKind::Struct(ref qpath, ref fields, etc) => {
self.print_qpath(qpath, true)?;
self.nbsp()?;
self.word_space("{")?;
self.commasep_cmnt(Consistent,
@ -2108,7 +2161,7 @@ impl<'a> State<'a> {
}
}
&hir::WherePredicate::EqPredicate(hir::WhereEqPredicate{ref path, ref ty, ..}) => {
self.print_path(path, false, 0)?;
self.print_path(path, false)?;
space(&mut self.s)?;
self.word_space("=")?;
self.print_type(&ty)?;
@ -2119,38 +2172,6 @@ impl<'a> State<'a> {
Ok(())
}
pub fn print_view_path(&mut self, vp: &hir::ViewPath) -> io::Result<()> {
match vp.node {
hir::ViewPathSimple(name, ref path) => {
self.print_path(path, false, 0)?;
if path.segments.last().unwrap().name != name {
space(&mut self.s)?;
self.word_space("as")?;
self.print_name(name)?;
}
Ok(())
}
hir::ViewPathGlob(ref path) => {
self.print_path(path, false, 0)?;
word(&mut self.s, "::*")
}
hir::ViewPathList(ref path, ref segments) => {
if path.segments.is_empty() {
word(&mut self.s, "{")?;
} else {
self.print_path(path, false, 0)?;
word(&mut self.s, "::{")?;
}
self.commasep(Inconsistent, &segments[..], |s, w| s.print_name(w.node.name))?;
word(&mut self.s, "}")
}
}
}
pub fn print_mutability(&mut self, mutbl: hir::Mutability) -> io::Result<()> {
match mutbl {
hir::MutMutable => self.word_nbsp("mut"),
@ -2171,7 +2192,7 @@ impl<'a> State<'a> {
if let Some(eself) = input.to_self() {
self.print_explicit_self(&eself)?;
} else {
let invalid = if let PatKind::Binding(_, name, _) = input.pat.node {
let invalid = if let PatKind::Binding(_, _, name, _) = input.pat.node {
name.node == keywords::Invalid.name()
} else {
false

View File

@ -1440,8 +1440,8 @@ impl<'a, 'gcx, 'tcx> Rebuilder<'a, 'gcx, 'tcx> {
}
ty_queue.push(&mut_ty.ty);
}
hir::TyPath(ref maybe_qself, ref path) => {
match self.tcx.expect_def(cur_ty.id) {
hir::TyPath(hir::QPath::Resolved(ref maybe_qself, ref path)) => {
match path.def {
Def::Enum(did) | Def::TyAlias(did) |
Def::Struct(did) | Def::Union(did) => {
let generics = self.tcx.item_generics(did);
@ -1476,15 +1476,12 @@ impl<'a, 'gcx, 'tcx> Rebuilder<'a, 'gcx, 'tcx> {
};
let new_path = self.rebuild_path(rebuild_info, lifetime);
let qself = maybe_qself.as_ref().map(|qself| {
hir::QSelf {
ty: self.rebuild_arg_ty_or_output(&qself.ty, lifetime,
anon_nums, region_names),
position: qself.position
}
self.rebuild_arg_ty_or_output(qself, lifetime,
anon_nums, region_names)
});
let to = hir::Ty {
id: cur_ty.id,
node: hir::TyPath(qself, new_path),
node: hir::TyPath(hir::QPath::Resolved(qself, P(new_path))),
span: cur_ty.span
};
new_ty = self.rebuild_ty(new_ty, P(to));
@ -1609,6 +1606,7 @@ impl<'a, 'gcx, 'tcx> Rebuilder<'a, 'gcx, 'tcx> {
hir::AngleBracketedParameters(hir::AngleBracketedParameterData {
lifetimes: new_lts.into(),
types: new_types,
infer_types: data.infer_types,
bindings: new_bindings,
})
}
@ -1623,6 +1621,7 @@ impl<'a, 'gcx, 'tcx> Rebuilder<'a, 'gcx, 'tcx> {
hir::Path {
span: path.span,
global: path.global,
def: path.def,
segments: new_segs.into()
}
}

Some files were not shown because too many files have changed in this diff Show More